blob: 89cd771903c2e866940a5497b7fdc2c9a6765e09 [file] [log] [blame]
locke-lunargd556cc32019-09-17 01:21:23 -06001/* Copyright (c) 2015-2019 The Khronos Group Inc.
2 * Copyright (c) 2015-2019 Valve Corporation
3 * Copyright (c) 2015-2019 LunarG, Inc.
4 * Copyright (C) 2015-2019 Google Inc.
5 *
6 * Licensed under the Apache License, Version 2.0 (the "License");
7 * you may not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
9 *
10 * http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 *
18 * Author: Mark Lobodzinski <mark@lunarg.com>
19 * Author: Dave Houlton <daveh@lunarg.com>
20 * Shannon McPherson <shannon@lunarg.com>
21 */
22
23// Allow use of STL min and max functions in Windows
24#define NOMINMAX
25
26#include <cmath>
27#include <set>
28#include <sstream>
29#include <string>
30
31#include "vk_enum_string_helper.h"
32#include "vk_format_utils.h"
33#include "vk_layer_data.h"
34#include "vk_layer_utils.h"
35#include "vk_layer_logging.h"
36#include "vk_typemap_helper.h"
37
38#include "chassis.h"
39#include "state_tracker.h"
40#include "shader_validation.h"
41
42using std::max;
43using std::string;
44using std::stringstream;
45using std::unique_ptr;
46using std::unordered_map;
47using std::unordered_set;
48using std::vector;
49
50#ifdef VK_USE_PLATFORM_ANDROID_KHR
51// Android-specific validation that uses types defined only with VK_USE_PLATFORM_ANDROID_KHR
52// This could also move into a seperate core_validation_android.cpp file... ?
53
54void ValidationStateTracker::RecordCreateImageANDROID(const VkImageCreateInfo *create_info, IMAGE_STATE *is_node) {
55 const VkExternalMemoryImageCreateInfo *emici = lvl_find_in_chain<VkExternalMemoryImageCreateInfo>(create_info->pNext);
56 if (emici && (emici->handleTypes & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID)) {
57 is_node->imported_ahb = true;
58 }
59 const VkExternalFormatANDROID *ext_fmt_android = lvl_find_in_chain<VkExternalFormatANDROID>(create_info->pNext);
60 if (ext_fmt_android && (0 != ext_fmt_android->externalFormat)) {
61 is_node->has_ahb_format = true;
62 is_node->ahb_format = ext_fmt_android->externalFormat;
63 }
64}
65
66void ValidationStateTracker::RecordCreateSamplerYcbcrConversionANDROID(const VkSamplerYcbcrConversionCreateInfo *create_info,
67 VkSamplerYcbcrConversion ycbcr_conversion) {
68 const VkExternalFormatANDROID *ext_format_android = lvl_find_in_chain<VkExternalFormatANDROID>(create_info->pNext);
69 if (ext_format_android && (0 != ext_format_android->externalFormat)) {
70 ycbcr_conversion_ahb_fmt_map.emplace(ycbcr_conversion, ext_format_android->externalFormat);
71 }
72};
73
74void ValidationStateTracker::RecordDestroySamplerYcbcrConversionANDROID(VkSamplerYcbcrConversion ycbcr_conversion) {
75 ycbcr_conversion_ahb_fmt_map.erase(ycbcr_conversion);
76};
77
78#else
79
80void ValidationStateTracker::RecordCreateImageANDROID(const VkImageCreateInfo *create_info, IMAGE_STATE *is_node) {}
81
82void ValidationStateTracker::RecordCreateSamplerYcbcrConversionANDROID(const VkSamplerYcbcrConversionCreateInfo *create_info,
83 VkSamplerYcbcrConversion ycbcr_conversion){};
84
85void ValidationStateTracker::RecordDestroySamplerYcbcrConversionANDROID(VkSamplerYcbcrConversion ycbcr_conversion){};
86
87#endif // VK_USE_PLATFORM_ANDROID_KHR
88
89void ValidationStateTracker::PostCallRecordCreateImage(VkDevice device, const VkImageCreateInfo *pCreateInfo,
90 const VkAllocationCallbacks *pAllocator, VkImage *pImage, VkResult result) {
91 if (VK_SUCCESS != result) return;
92 std::unique_ptr<IMAGE_STATE> is_node(new IMAGE_STATE(*pImage, pCreateInfo));
93 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
94 RecordCreateImageANDROID(pCreateInfo, is_node.get());
95 }
96 const auto swapchain_info = lvl_find_in_chain<VkImageSwapchainCreateInfoKHR>(pCreateInfo->pNext);
97 if (swapchain_info) {
98 is_node->create_from_swapchain = swapchain_info->swapchain;
99 }
100
101 bool pre_fetch_memory_reqs = true;
102#ifdef VK_USE_PLATFORM_ANDROID_KHR
103 if (is_node->external_format_android) {
104 // Do not fetch requirements for external memory images
105 pre_fetch_memory_reqs = false;
106 }
107#endif
108 // Record the memory requirements in case they won't be queried
109 if (pre_fetch_memory_reqs) {
110 DispatchGetImageMemoryRequirements(device, *pImage, &is_node->requirements);
111 }
112 imageMap.insert(std::make_pair(*pImage, std::move(is_node)));
113}
114
115void ValidationStateTracker::PreCallRecordDestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks *pAllocator) {
116 if (!image) return;
117 IMAGE_STATE *image_state = GetImageState(image);
118 const VulkanTypedHandle obj_struct(image, kVulkanObjectTypeImage);
119 InvalidateCommandBuffers(image_state->cb_bindings, obj_struct);
120 // Clean up memory mapping, bindings and range references for image
121 for (auto mem_binding : image_state->GetBoundMemory()) {
122 auto mem_info = GetDevMemState(mem_binding);
123 if (mem_info) {
124 RemoveImageMemoryRange(image, mem_info);
125 }
126 }
127 if (image_state->bind_swapchain) {
128 auto swapchain = GetSwapchainState(image_state->bind_swapchain);
129 if (swapchain) {
130 swapchain->bound_images.erase(image_state->image);
131 }
132 }
133 RemoveAliasingImage(image_state);
134 ClearMemoryObjectBindings(obj_struct);
135 // Remove image from imageMap
136 imageMap.erase(image);
137}
138
139void ValidationStateTracker::PreCallRecordCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image,
140 VkImageLayout imageLayout, const VkClearColorValue *pColor,
141 uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
142 auto cb_node = GetCBState(commandBuffer);
143 auto image_state = GetImageState(image);
144 if (cb_node && image_state) {
145 AddCommandBufferBindingImage(cb_node, image_state);
146 }
147}
148
149void ValidationStateTracker::PreCallRecordCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image,
150 VkImageLayout imageLayout,
151 const VkClearDepthStencilValue *pDepthStencil,
152 uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
153 auto cb_node = GetCBState(commandBuffer);
154 auto image_state = GetImageState(image);
155 if (cb_node && image_state) {
156 AddCommandBufferBindingImage(cb_node, image_state);
157 }
158}
159
160void ValidationStateTracker::PreCallRecordCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage,
161 VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout,
162 uint32_t regionCount, const VkImageCopy *pRegions) {
163 auto cb_node = GetCBState(commandBuffer);
164 auto src_image_state = GetImageState(srcImage);
165 auto dst_image_state = GetImageState(dstImage);
166
167 // Update bindings between images and cmd buffer
168 AddCommandBufferBindingImage(cb_node, src_image_state);
169 AddCommandBufferBindingImage(cb_node, dst_image_state);
170}
171
172void ValidationStateTracker::PreCallRecordCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage,
173 VkImageLayout srcImageLayout, VkImage dstImage,
174 VkImageLayout dstImageLayout, uint32_t regionCount,
175 const VkImageResolve *pRegions) {
176 auto cb_node = GetCBState(commandBuffer);
177 auto src_image_state = GetImageState(srcImage);
178 auto dst_image_state = GetImageState(dstImage);
179
180 // Update bindings between images and cmd buffer
181 AddCommandBufferBindingImage(cb_node, src_image_state);
182 AddCommandBufferBindingImage(cb_node, dst_image_state);
183}
184
185void ValidationStateTracker::PreCallRecordCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage,
186 VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout,
187 uint32_t regionCount, const VkImageBlit *pRegions, VkFilter filter) {
188 auto cb_node = GetCBState(commandBuffer);
189 auto src_image_state = GetImageState(srcImage);
190 auto dst_image_state = GetImageState(dstImage);
191
192 // Update bindings between images and cmd buffer
193 AddCommandBufferBindingImage(cb_node, src_image_state);
194 AddCommandBufferBindingImage(cb_node, dst_image_state);
195}
196
197void ValidationStateTracker::PostCallRecordCreateBuffer(VkDevice device, const VkBufferCreateInfo *pCreateInfo,
198 const VkAllocationCallbacks *pAllocator, VkBuffer *pBuffer,
199 VkResult result) {
200 if (result != VK_SUCCESS) return;
201 // TODO : This doesn't create deep copy of pQueueFamilyIndices so need to fix that if/when we want that data to be valid
202 std::unique_ptr<BUFFER_STATE> buffer_state(new BUFFER_STATE(*pBuffer, pCreateInfo));
203
204 // Get a set of requirements in the case the app does not
205 DispatchGetBufferMemoryRequirements(device, *pBuffer, &buffer_state->requirements);
206
207 bufferMap.insert(std::make_pair(*pBuffer, std::move(buffer_state)));
208}
209
210void ValidationStateTracker::PostCallRecordCreateBufferView(VkDevice device, const VkBufferViewCreateInfo *pCreateInfo,
211 const VkAllocationCallbacks *pAllocator, VkBufferView *pView,
212 VkResult result) {
213 if (result != VK_SUCCESS) return;
214 bufferViewMap[*pView] = std::unique_ptr<BUFFER_VIEW_STATE>(new BUFFER_VIEW_STATE(*pView, pCreateInfo));
215}
216
217void ValidationStateTracker::PostCallRecordCreateImageView(VkDevice device, const VkImageViewCreateInfo *pCreateInfo,
218 const VkAllocationCallbacks *pAllocator, VkImageView *pView,
219 VkResult result) {
220 if (result != VK_SUCCESS) return;
221 auto image_state = GetImageState(pCreateInfo->image);
222 imageViewMap[*pView] = std::unique_ptr<IMAGE_VIEW_STATE>(new IMAGE_VIEW_STATE(image_state, *pView, pCreateInfo));
223}
224
225void ValidationStateTracker::PreCallRecordCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer,
226 uint32_t regionCount, const VkBufferCopy *pRegions) {
227 auto cb_node = GetCBState(commandBuffer);
228 auto src_buffer_state = GetBufferState(srcBuffer);
229 auto dst_buffer_state = GetBufferState(dstBuffer);
230
231 // Update bindings between buffers and cmd buffer
232 AddCommandBufferBindingBuffer(cb_node, src_buffer_state);
233 AddCommandBufferBindingBuffer(cb_node, dst_buffer_state);
234}
235
236void ValidationStateTracker::PreCallRecordDestroyImageView(VkDevice device, VkImageView imageView,
237 const VkAllocationCallbacks *pAllocator) {
238 IMAGE_VIEW_STATE *image_view_state = GetImageViewState(imageView);
239 if (!image_view_state) return;
240 const VulkanTypedHandle obj_struct(imageView, kVulkanObjectTypeImageView);
241
242 // Any bound cmd buffers are now invalid
243 InvalidateCommandBuffers(image_view_state->cb_bindings, obj_struct);
244 imageViewMap.erase(imageView);
245}
246
247void ValidationStateTracker::PreCallRecordDestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks *pAllocator) {
248 if (!buffer) return;
249 auto buffer_state = GetBufferState(buffer);
250 const VulkanTypedHandle obj_struct(buffer, kVulkanObjectTypeBuffer);
251
252 InvalidateCommandBuffers(buffer_state->cb_bindings, obj_struct);
253 for (auto mem_binding : buffer_state->GetBoundMemory()) {
254 auto mem_info = GetDevMemState(mem_binding);
255 if (mem_info) {
256 RemoveBufferMemoryRange(buffer, mem_info);
257 }
258 }
259 ClearMemoryObjectBindings(obj_struct);
260 bufferMap.erase(buffer_state->buffer);
261}
262
263void ValidationStateTracker::PreCallRecordDestroyBufferView(VkDevice device, VkBufferView bufferView,
264 const VkAllocationCallbacks *pAllocator) {
265 if (!bufferView) return;
266 auto buffer_view_state = GetBufferViewState(bufferView);
267 const VulkanTypedHandle obj_struct(bufferView, kVulkanObjectTypeBufferView);
268
269 // Any bound cmd buffers are now invalid
270 InvalidateCommandBuffers(buffer_view_state->cb_bindings, obj_struct);
271 bufferViewMap.erase(bufferView);
272}
273
274void ValidationStateTracker::PreCallRecordCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
275 VkDeviceSize size, uint32_t data) {
276 auto cb_node = GetCBState(commandBuffer);
277 auto buffer_state = GetBufferState(dstBuffer);
278 // Update bindings between buffer and cmd buffer
279 AddCommandBufferBindingBuffer(cb_node, buffer_state);
280}
281
282void ValidationStateTracker::PreCallRecordCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage,
283 VkImageLayout srcImageLayout, VkBuffer dstBuffer,
284 uint32_t regionCount, const VkBufferImageCopy *pRegions) {
285 auto cb_node = GetCBState(commandBuffer);
286 auto src_image_state = GetImageState(srcImage);
287 auto dst_buffer_state = GetBufferState(dstBuffer);
288
289 // Update bindings between buffer/image and cmd buffer
290 AddCommandBufferBindingImage(cb_node, src_image_state);
291 AddCommandBufferBindingBuffer(cb_node, dst_buffer_state);
292}
293
294void ValidationStateTracker::PreCallRecordCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
295 VkImageLayout dstImageLayout, uint32_t regionCount,
296 const VkBufferImageCopy *pRegions) {
297 auto cb_node = GetCBState(commandBuffer);
298 auto src_buffer_state = GetBufferState(srcBuffer);
299 auto dst_image_state = GetImageState(dstImage);
300
301 AddCommandBufferBindingBuffer(cb_node, src_buffer_state);
302 AddCommandBufferBindingImage(cb_node, dst_image_state);
303}
304
305// Get the image viewstate for a given framebuffer attachment
306IMAGE_VIEW_STATE *ValidationStateTracker::GetAttachmentImageViewState(FRAMEBUFFER_STATE *framebuffer, uint32_t index) {
307 assert(framebuffer && (index < framebuffer->createInfo.attachmentCount));
308 const VkImageView &image_view = framebuffer->createInfo.pAttachments[index];
309 return GetImageViewState(image_view);
310}
311
312// Get the image viewstate for a given framebuffer attachment
313const IMAGE_VIEW_STATE *ValidationStateTracker::GetAttachmentImageViewState(const FRAMEBUFFER_STATE *framebuffer,
314 uint32_t index) const {
315 assert(framebuffer && (index < framebuffer->createInfo.attachmentCount));
316 const VkImageView &image_view = framebuffer->createInfo.pAttachments[index];
317 return GetImageViewState(image_view);
318}
319
320void ValidationStateTracker::AddAliasingImage(IMAGE_STATE *image_state) {
321 if (!(image_state->createInfo.flags & VK_IMAGE_CREATE_ALIAS_BIT)) return;
322 std::unordered_set<VkImage> *bound_images = nullptr;
323
324 if (image_state->create_from_swapchain) {
325 auto swapchain_state = GetSwapchainState(image_state->create_from_swapchain);
326 if (swapchain_state) {
327 bound_images = &swapchain_state->bound_images;
328 }
329 } else {
330 auto mem_state = GetDevMemState(image_state->binding.mem);
331 if (mem_state) {
332 bound_images = &mem_state->bound_images;
333 }
334 }
335
336 if (bound_images) {
337 for (const auto &handle : *bound_images) {
338 if (handle != image_state->image) {
339 auto is = GetImageState(handle);
340 if (is && is->IsCompatibleAliasing(image_state)) {
341 auto inserted = is->aliasing_images.emplace(image_state->image);
342 if (inserted.second) {
343 image_state->aliasing_images.emplace(handle);
344 }
345 }
346 }
347 }
348 }
349}
350
351void ValidationStateTracker::RemoveAliasingImage(IMAGE_STATE *image_state) {
352 for (const auto &image : image_state->aliasing_images) {
353 auto is = GetImageState(image);
354 if (is) {
355 is->aliasing_images.erase(image_state->image);
356 }
357 }
358 image_state->aliasing_images.clear();
359}
360
361void ValidationStateTracker::RemoveAliasingImages(const std::unordered_set<VkImage> &bound_images) {
362 // This is one way clear. Because the bound_images include cross references, the one way clear loop could clear the whole
363 // reference. It doesn't need two ways clear.
364 for (const auto &handle : bound_images) {
365 auto is = GetImageState(handle);
366 if (is) {
367 is->aliasing_images.clear();
368 }
369 }
370}
371
372EVENT_STATE *ValidationStateTracker::GetEventState(VkEvent event) {
373 auto it = eventMap.find(event);
374 if (it == eventMap.end()) {
375 return nullptr;
376 }
377 return &it->second;
378}
379
380const QUEUE_STATE *ValidationStateTracker::GetQueueState(VkQueue queue) const {
381 auto it = queueMap.find(queue);
382 if (it == queueMap.cend()) {
383 return nullptr;
384 }
385 return &it->second;
386}
387
388QUEUE_STATE *ValidationStateTracker::GetQueueState(VkQueue queue) {
389 auto it = queueMap.find(queue);
390 if (it == queueMap.end()) {
391 return nullptr;
392 }
393 return &it->second;
394}
395
396const PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState(VkPhysicalDevice phys) const {
397 auto *phys_dev_map = ((physical_device_map.size() > 0) ? &physical_device_map : &instance_state->physical_device_map);
398 auto it = phys_dev_map->find(phys);
399 if (it == phys_dev_map->end()) {
400 return nullptr;
401 }
402 return &it->second;
403}
404
405PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState(VkPhysicalDevice phys) {
406 auto *phys_dev_map = ((physical_device_map.size() > 0) ? &physical_device_map : &instance_state->physical_device_map);
407 auto it = phys_dev_map->find(phys);
408 if (it == phys_dev_map->end()) {
409 return nullptr;
410 }
411 return &it->second;
412}
413
414PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState() { return physical_device_state; }
415const PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState() const { return physical_device_state; }
416
417// Return ptr to memory binding for given handle of specified type
418template <typename State, typename Result>
419static Result GetObjectMemBindingImpl(State state, const VulkanTypedHandle &typed_handle) {
420 switch (typed_handle.type) {
421 case kVulkanObjectTypeImage:
422 return state->GetImageState(typed_handle.Cast<VkImage>());
423 case kVulkanObjectTypeBuffer:
424 return state->GetBufferState(typed_handle.Cast<VkBuffer>());
425 case kVulkanObjectTypeAccelerationStructureNV:
426 return state->GetAccelerationStructureState(typed_handle.Cast<VkAccelerationStructureNV>());
427 default:
428 break;
429 }
430 return nullptr;
431}
432
433const BINDABLE *ValidationStateTracker::GetObjectMemBinding(const VulkanTypedHandle &typed_handle) const {
434 return GetObjectMemBindingImpl<const ValidationStateTracker *, const BINDABLE *>(this, typed_handle);
435}
436
437BINDABLE *ValidationStateTracker::GetObjectMemBinding(const VulkanTypedHandle &typed_handle) {
438 return GetObjectMemBindingImpl<ValidationStateTracker *, BINDABLE *>(this, typed_handle);
439}
440
441void ValidationStateTracker::AddMemObjInfo(void *object, const VkDeviceMemory mem, const VkMemoryAllocateInfo *pAllocateInfo) {
442 assert(object != NULL);
443
444 auto *mem_info = new DEVICE_MEMORY_STATE(object, mem, pAllocateInfo);
445 memObjMap[mem] = unique_ptr<DEVICE_MEMORY_STATE>(mem_info);
446
447 auto dedicated = lvl_find_in_chain<VkMemoryDedicatedAllocateInfoKHR>(pAllocateInfo->pNext);
448 if (dedicated) {
449 mem_info->is_dedicated = true;
450 mem_info->dedicated_buffer = dedicated->buffer;
451 mem_info->dedicated_image = dedicated->image;
452 }
453 auto export_info = lvl_find_in_chain<VkExportMemoryAllocateInfo>(pAllocateInfo->pNext);
454 if (export_info) {
455 mem_info->is_export = true;
456 mem_info->export_handle_type_flags = export_info->handleTypes;
457 }
458}
459
460// Create binding link between given sampler and command buffer node
461void ValidationStateTracker::AddCommandBufferBindingSampler(CMD_BUFFER_STATE *cb_node, SAMPLER_STATE *sampler_state) {
462 if (disabled.command_buffer_state) {
463 return;
464 }
465 auto inserted = cb_node->object_bindings.emplace(sampler_state->sampler, kVulkanObjectTypeSampler);
466 if (inserted.second) {
467 // Only need to complete the cross-reference if this is a new item
468 sampler_state->cb_bindings.insert(cb_node);
469 }
470}
471
472// Create binding link between given image node and command buffer node
473void ValidationStateTracker::AddCommandBufferBindingImage(CMD_BUFFER_STATE *cb_node, IMAGE_STATE *image_state) {
474 if (disabled.command_buffer_state) {
475 return;
476 }
477 // Skip validation if this image was created through WSI
478 if (image_state->create_from_swapchain == VK_NULL_HANDLE) {
479 // First update cb binding for image
480 auto image_inserted = cb_node->object_bindings.emplace(image_state->image, kVulkanObjectTypeImage);
481 if (image_inserted.second) {
482 // Only need to continue if this is a new item (the rest of the work would have be done previous)
483 image_state->cb_bindings.insert(cb_node);
484 // Now update CB binding in MemObj mini CB list
485 for (auto mem_binding : image_state->GetBoundMemory()) {
486 DEVICE_MEMORY_STATE *pMemInfo = GetDevMemState(mem_binding);
487 if (pMemInfo) {
488 // Now update CBInfo's Mem reference list
489 auto mem_inserted = cb_node->memObjs.insert(mem_binding);
490 if (mem_inserted.second) {
491 // Only need to complete the cross-reference if this is a new item
492 pMemInfo->cb_bindings.insert(cb_node);
493 }
494 }
495 }
496 }
497 }
498}
499
500// Create binding link between given image view node and its image with command buffer node
501void ValidationStateTracker::AddCommandBufferBindingImageView(CMD_BUFFER_STATE *cb_node, IMAGE_VIEW_STATE *view_state) {
502 if (disabled.command_buffer_state) {
503 return;
504 }
505 // First add bindings for imageView
506 auto inserted = cb_node->object_bindings.emplace(view_state->image_view, kVulkanObjectTypeImageView);
507 if (inserted.second) {
508 // Only need to continue if this is a new item
509 view_state->cb_bindings.insert(cb_node);
510 auto image_state = GetImageState(view_state->create_info.image);
511 // Add bindings for image within imageView
512 if (image_state) {
513 AddCommandBufferBindingImage(cb_node, image_state);
514 }
515 }
516}
517
518// Create binding link between given buffer node and command buffer node
519void ValidationStateTracker::AddCommandBufferBindingBuffer(CMD_BUFFER_STATE *cb_node, BUFFER_STATE *buffer_state) {
520 if (disabled.command_buffer_state) {
521 return;
522 }
523 // First update cb binding for buffer
524 auto buffer_inserted = cb_node->object_bindings.emplace(buffer_state->buffer, kVulkanObjectTypeBuffer);
525 if (buffer_inserted.second) {
526 // Only need to continue if this is a new item
527 buffer_state->cb_bindings.insert(cb_node);
528 // Now update CB binding in MemObj mini CB list
529 for (auto mem_binding : buffer_state->GetBoundMemory()) {
530 DEVICE_MEMORY_STATE *pMemInfo = GetDevMemState(mem_binding);
531 if (pMemInfo) {
532 // Now update CBInfo's Mem reference list
533 auto inserted = cb_node->memObjs.insert(mem_binding);
534 if (inserted.second) {
535 // Only need to complete the cross-reference if this is a new item
536 pMemInfo->cb_bindings.insert(cb_node);
537 }
538 }
539 }
540 }
541}
542
543// Create binding link between given buffer view node and its buffer with command buffer node
544void ValidationStateTracker::AddCommandBufferBindingBufferView(CMD_BUFFER_STATE *cb_node, BUFFER_VIEW_STATE *view_state) {
545 if (disabled.command_buffer_state) {
546 return;
547 }
548 // First add bindings for bufferView
549 auto inserted = cb_node->object_bindings.emplace(view_state->buffer_view, kVulkanObjectTypeBufferView);
550 if (inserted.second) {
551 // Only need to complete the cross-reference if this is a new item
552 view_state->cb_bindings.insert(cb_node);
553 auto buffer_state = GetBufferState(view_state->create_info.buffer);
554 // Add bindings for buffer within bufferView
555 if (buffer_state) {
556 AddCommandBufferBindingBuffer(cb_node, buffer_state);
557 }
558 }
559}
560
561// Create binding link between given acceleration structure and command buffer node
562void ValidationStateTracker::AddCommandBufferBindingAccelerationStructure(CMD_BUFFER_STATE *cb_node,
563 ACCELERATION_STRUCTURE_STATE *as_state) {
564 if (disabled.command_buffer_state) {
565 return;
566 }
567 auto as_inserted = cb_node->object_bindings.emplace(as_state->acceleration_structure, kVulkanObjectTypeAccelerationStructureNV);
568 if (as_inserted.second) {
569 // Only need to complete the cross-reference if this is a new item
570 as_state->cb_bindings.insert(cb_node);
571 // Now update CB binding in MemObj mini CB list
572 for (auto mem_binding : as_state->GetBoundMemory()) {
573 DEVICE_MEMORY_STATE *pMemInfo = GetDevMemState(mem_binding);
574 if (pMemInfo) {
575 // Now update CBInfo's Mem reference list
576 auto mem_inserted = cb_node->memObjs.insert(mem_binding);
577 if (mem_inserted.second) {
578 // Only need to complete the cross-reference if this is a new item
579 pMemInfo->cb_bindings.insert(cb_node);
580 }
581 }
582 }
583 }
584}
585
586// For every mem obj bound to particular CB, free bindings related to that CB
587void ValidationStateTracker::ClearCmdBufAndMemReferences(CMD_BUFFER_STATE *cb_node) {
588 if (cb_node) {
589 if (cb_node->memObjs.size() > 0) {
590 for (auto mem : cb_node->memObjs) {
591 DEVICE_MEMORY_STATE *pInfo = GetDevMemState(mem);
592 if (pInfo) {
593 pInfo->cb_bindings.erase(cb_node);
594 }
595 }
596 cb_node->memObjs.clear();
597 }
598 }
599}
600
601// Clear a single object binding from given memory object
602void ValidationStateTracker::ClearMemoryObjectBinding(const VulkanTypedHandle &typed_handle, VkDeviceMemory mem) {
603 DEVICE_MEMORY_STATE *mem_info = GetDevMemState(mem);
604 // This obj is bound to a memory object. Remove the reference to this object in that memory object's list
605 if (mem_info) {
606 mem_info->obj_bindings.erase(typed_handle);
607 }
608}
609
610// ClearMemoryObjectBindings clears the binding of objects to memory
611// For the given object it pulls the memory bindings and makes sure that the bindings
612// no longer refer to the object being cleared. This occurs when objects are destroyed.
613void ValidationStateTracker::ClearMemoryObjectBindings(const VulkanTypedHandle &typed_handle) {
614 BINDABLE *mem_binding = GetObjectMemBinding(typed_handle);
615 if (mem_binding) {
616 if (!mem_binding->sparse) {
617 ClearMemoryObjectBinding(typed_handle, mem_binding->binding.mem);
618 } else { // Sparse, clear all bindings
619 for (auto &sparse_mem_binding : mem_binding->sparse_bindings) {
620 ClearMemoryObjectBinding(typed_handle, sparse_mem_binding.mem);
621 }
622 }
623 }
624}
625
626// SetMemBinding is used to establish immutable, non-sparse binding between a single image/buffer object and memory object.
627// Corresponding valid usage checks are in ValidateSetMemBinding().
628void ValidationStateTracker::SetMemBinding(VkDeviceMemory mem, BINDABLE *mem_binding, VkDeviceSize memory_offset,
629 const VulkanTypedHandle &typed_handle) {
630 assert(mem_binding);
631 mem_binding->binding.mem = mem;
632 mem_binding->UpdateBoundMemorySet(); // force recreation of cached set
633 mem_binding->binding.offset = memory_offset;
634 mem_binding->binding.size = mem_binding->requirements.size;
635
636 if (mem != VK_NULL_HANDLE) {
637 DEVICE_MEMORY_STATE *mem_info = GetDevMemState(mem);
638 if (mem_info) {
639 mem_info->obj_bindings.insert(typed_handle);
640 // For image objects, make sure default memory state is correctly set
641 // TODO : What's the best/correct way to handle this?
642 if (kVulkanObjectTypeImage == typed_handle.type) {
643 auto const image_state = reinterpret_cast<const IMAGE_STATE *>(mem_binding);
644 if (image_state) {
645 VkImageCreateInfo ici = image_state->createInfo;
646 if (ici.usage & (VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT)) {
647 // TODO:: More memory state transition stuff.
648 }
649 }
650 }
651 }
652 }
653}
654
655// For NULL mem case, clear any previous binding Else...
656// Make sure given object is in its object map
657// IF a previous binding existed, update binding
658// Add reference from objectInfo to memoryInfo
659// Add reference off of object's binding info
660// Return VK_TRUE if addition is successful, VK_FALSE otherwise
661bool ValidationStateTracker::SetSparseMemBinding(MEM_BINDING binding, const VulkanTypedHandle &typed_handle) {
662 bool skip = VK_FALSE;
663 // Handle NULL case separately, just clear previous binding & decrement reference
664 if (binding.mem == VK_NULL_HANDLE) {
665 // TODO : This should cause the range of the resource to be unbound according to spec
666 } else {
667 BINDABLE *mem_binding = GetObjectMemBinding(typed_handle);
668 assert(mem_binding);
669 if (mem_binding) { // Invalid handles are reported by object tracker, but Get returns NULL for them, so avoid SEGV here
670 assert(mem_binding->sparse);
671 DEVICE_MEMORY_STATE *mem_info = GetDevMemState(binding.mem);
672 if (mem_info) {
673 mem_info->obj_bindings.insert(typed_handle);
674 // Need to set mem binding for this object
675 mem_binding->sparse_bindings.insert(binding);
676 mem_binding->UpdateBoundMemorySet();
677 }
678 }
679 }
680 return skip;
681}
682
683const RENDER_PASS_STATE *ValidationStateTracker::GetRenderPassState(VkRenderPass renderpass) const {
684 auto it = renderPassMap.find(renderpass);
685 if (it == renderPassMap.end()) {
686 return nullptr;
687 }
688 return it->second.get();
689}
690
691RENDER_PASS_STATE *ValidationStateTracker::GetRenderPassState(VkRenderPass renderpass) {
692 auto it = renderPassMap.find(renderpass);
693 if (it == renderPassMap.end()) {
694 return nullptr;
695 }
696 return it->second.get();
697}
698
699std::shared_ptr<RENDER_PASS_STATE> ValidationStateTracker::GetRenderPassStateSharedPtr(VkRenderPass renderpass) {
700 auto it = renderPassMap.find(renderpass);
701 if (it == renderPassMap.end()) {
702 return nullptr;
703 }
704 return it->second;
705}
706
707void ValidationStateTracker::UpdateDrawState(CMD_BUFFER_STATE *cb_state, const VkPipelineBindPoint bind_point) {
708 auto &state = cb_state->lastBound[bind_point];
709 PIPELINE_STATE *pPipe = state.pipeline_state;
710 if (VK_NULL_HANDLE != state.pipeline_layout) {
711 for (const auto &set_binding_pair : pPipe->active_slots) {
712 uint32_t setIndex = set_binding_pair.first;
713 // Pull the set node
714 cvdescriptorset::DescriptorSet *descriptor_set = state.per_set[setIndex].bound_descriptor_set;
715 if (!descriptor_set->IsPushDescriptor()) {
716 // For the "bindless" style resource usage with many descriptors, need to optimize command <-> descriptor binding
717
718 // TODO: If recreating the reduced_map here shows up in profilinging, need to find a way of sharing with the
719 // Validate pass. Though in the case of "many" descriptors, typically the descriptor count >> binding count
720 cvdescriptorset::PrefilterBindRequestMap reduced_map(*descriptor_set, set_binding_pair.second);
721 const auto &binding_req_map = reduced_map.FilteredMap(*cb_state, *pPipe);
722
723 if (reduced_map.IsManyDescriptors()) {
724 // Only update validate binding tags if we meet the "many" criteria in the Prefilter class
725 descriptor_set->UpdateValidationCache(*cb_state, *pPipe, binding_req_map);
726 }
727
728 // We can skip updating the state if "nothing" has changed since the last validation.
729 // See CoreChecks::ValidateCmdBufDrawState for more details.
730 bool need_update =
731 !reduced_map.IsManyDescriptors() ||
732 // Update if descriptor set (or contents) has changed
733 state.per_set[setIndex].validated_set != descriptor_set ||
734 state.per_set[setIndex].validated_set_change_count != descriptor_set->GetChangeCount() ||
735 (!disabled.image_layout_validation &&
736 state.per_set[setIndex].validated_set_image_layout_change_count != cb_state->image_layout_change_count) ||
737 // Update if previous bindingReqMap doesn't include new bindingRepMap
738 !std::includes(state.per_set[setIndex].validated_set_binding_req_map.begin(),
739 state.per_set[setIndex].validated_set_binding_req_map.end(), set_binding_pair.second.begin(),
740 set_binding_pair.second.end());
741
742 if (need_update) {
743 // Bind this set and its active descriptor resources to the command buffer
744 descriptor_set->UpdateDrawState(this, cb_state, binding_req_map);
745
746 state.per_set[setIndex].validated_set = descriptor_set;
747 state.per_set[setIndex].validated_set_change_count = descriptor_set->GetChangeCount();
748 state.per_set[setIndex].validated_set_image_layout_change_count = cb_state->image_layout_change_count;
749 if (reduced_map.IsManyDescriptors()) {
750 // Check whether old == new before assigning, the equality check is much cheaper than
751 // freeing and reallocating the map.
752 if (state.per_set[setIndex].validated_set_binding_req_map != set_binding_pair.second) {
753 state.per_set[setIndex].validated_set_binding_req_map = set_binding_pair.second;
754 }
755 } else {
756 state.per_set[setIndex].validated_set_binding_req_map = BindingReqMap();
757 }
758 }
759 }
760 }
761 }
762 if (!pPipe->vertex_binding_descriptions_.empty()) {
763 cb_state->vertex_buffer_used = true;
764 }
765}
766
767// Remove set from setMap and delete the set
768void ValidationStateTracker::FreeDescriptorSet(cvdescriptorset::DescriptorSet *descriptor_set) {
769 setMap.erase(descriptor_set->GetSet());
770}
771
772// Free all DS Pools including their Sets & related sub-structs
773// NOTE : Calls to this function should be wrapped in mutex
774void ValidationStateTracker::DeleteDescriptorSetPools() {
775 for (auto ii = descriptorPoolMap.begin(); ii != descriptorPoolMap.end();) {
776 // Remove this pools' sets from setMap and delete them
777 for (auto ds : ii->second->sets) {
778 FreeDescriptorSet(ds);
779 }
780 ii->second->sets.clear();
781 ii = descriptorPoolMap.erase(ii);
782 }
783}
784
785// For given object struct return a ptr of BASE_NODE type for its wrapping struct
786BASE_NODE *ValidationStateTracker::GetStateStructPtrFromObject(const VulkanTypedHandle &object_struct) {
787 BASE_NODE *base_ptr = nullptr;
788 switch (object_struct.type) {
789 case kVulkanObjectTypeDescriptorSet: {
790 base_ptr = GetSetNode(object_struct.Cast<VkDescriptorSet>());
791 break;
792 }
793 case kVulkanObjectTypeSampler: {
794 base_ptr = GetSamplerState(object_struct.Cast<VkSampler>());
795 break;
796 }
797 case kVulkanObjectTypeQueryPool: {
798 base_ptr = GetQueryPoolState(object_struct.Cast<VkQueryPool>());
799 break;
800 }
801 case kVulkanObjectTypePipeline: {
802 base_ptr = GetPipelineState(object_struct.Cast<VkPipeline>());
803 break;
804 }
805 case kVulkanObjectTypeBuffer: {
806 base_ptr = GetBufferState(object_struct.Cast<VkBuffer>());
807 break;
808 }
809 case kVulkanObjectTypeBufferView: {
810 base_ptr = GetBufferViewState(object_struct.Cast<VkBufferView>());
811 break;
812 }
813 case kVulkanObjectTypeImage: {
814 base_ptr = GetImageState(object_struct.Cast<VkImage>());
815 break;
816 }
817 case kVulkanObjectTypeImageView: {
818 base_ptr = GetImageViewState(object_struct.Cast<VkImageView>());
819 break;
820 }
821 case kVulkanObjectTypeEvent: {
822 base_ptr = GetEventState(object_struct.Cast<VkEvent>());
823 break;
824 }
825 case kVulkanObjectTypeDescriptorPool: {
826 base_ptr = GetDescriptorPoolState(object_struct.Cast<VkDescriptorPool>());
827 break;
828 }
829 case kVulkanObjectTypeCommandPool: {
830 base_ptr = GetCommandPoolState(object_struct.Cast<VkCommandPool>());
831 break;
832 }
833 case kVulkanObjectTypeFramebuffer: {
834 base_ptr = GetFramebufferState(object_struct.Cast<VkFramebuffer>());
835 break;
836 }
837 case kVulkanObjectTypeRenderPass: {
838 base_ptr = GetRenderPassState(object_struct.Cast<VkRenderPass>());
839 break;
840 }
841 case kVulkanObjectTypeDeviceMemory: {
842 base_ptr = GetDevMemState(object_struct.Cast<VkDeviceMemory>());
843 break;
844 }
845 case kVulkanObjectTypeAccelerationStructureNV: {
846 base_ptr = GetAccelerationStructureState(object_struct.Cast<VkAccelerationStructureNV>());
847 break;
848 }
849 default:
850 // TODO : Any other objects to be handled here?
851 assert(0);
852 break;
853 }
854 return base_ptr;
855}
856
857// Tie the VulkanTypedHandle to the cmd buffer which includes:
858// Add object_binding to cmd buffer
859// Add cb_binding to object
860void ValidationStateTracker::AddCommandBufferBinding(std::unordered_set<CMD_BUFFER_STATE *> *cb_bindings,
861 const VulkanTypedHandle &obj, CMD_BUFFER_STATE *cb_node) {
862 if (disabled.command_buffer_state) {
863 return;
864 }
865 cb_bindings->insert(cb_node);
866 cb_node->object_bindings.insert(obj);
867}
868
869// For a given object, if cb_node is in that objects cb_bindings, remove cb_node
870void ValidationStateTracker::RemoveCommandBufferBinding(VulkanTypedHandle const &object, CMD_BUFFER_STATE *cb_node) {
871 BASE_NODE *base_obj = GetStateStructPtrFromObject(object);
872 if (base_obj) base_obj->cb_bindings.erase(cb_node);
873}
874
875// Reset the command buffer state
876// Maintain the createInfo and set state to CB_NEW, but clear all other state
877void ValidationStateTracker::ResetCommandBufferState(const VkCommandBuffer cb) {
878 CMD_BUFFER_STATE *pCB = GetCBState(cb);
879 if (pCB) {
880 pCB->in_use.store(0);
881 // Reset CB state (note that createInfo is not cleared)
882 pCB->commandBuffer = cb;
883 memset(&pCB->beginInfo, 0, sizeof(VkCommandBufferBeginInfo));
884 memset(&pCB->inheritanceInfo, 0, sizeof(VkCommandBufferInheritanceInfo));
885 pCB->hasDrawCmd = false;
886 pCB->hasTraceRaysCmd = false;
887 pCB->hasBuildAccelerationStructureCmd = false;
888 pCB->hasDispatchCmd = false;
889 pCB->state = CB_NEW;
890 pCB->submitCount = 0;
891 pCB->image_layout_change_count = 1; // Start at 1. 0 is insert value for validation cache versions, s.t. new == dirty
892 pCB->status = 0;
893 pCB->static_status = 0;
894 pCB->viewportMask = 0;
895 pCB->scissorMask = 0;
896
897 for (auto &item : pCB->lastBound) {
898 item.second.reset();
899 }
900
901 memset(&pCB->activeRenderPassBeginInfo, 0, sizeof(pCB->activeRenderPassBeginInfo));
902 pCB->activeRenderPass = nullptr;
903 pCB->activeSubpassContents = VK_SUBPASS_CONTENTS_INLINE;
904 pCB->activeSubpass = 0;
905 pCB->broken_bindings.clear();
906 pCB->waitedEvents.clear();
907 pCB->events.clear();
908 pCB->writeEventsBeforeWait.clear();
909 pCB->queryToStateMap.clear();
910 pCB->activeQueries.clear();
911 pCB->startedQueries.clear();
912 pCB->image_layout_map.clear();
913 pCB->eventToStageMap.clear();
914 pCB->cb_vertex_buffer_binding_info.clear();
915 pCB->current_vertex_buffer_binding_info.vertex_buffer_bindings.clear();
916 pCB->vertex_buffer_used = false;
917 pCB->primaryCommandBuffer = VK_NULL_HANDLE;
918 // If secondary, invalidate any primary command buffer that may call us.
919 if (pCB->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
920 InvalidateCommandBuffers(pCB->linkedCommandBuffers, VulkanTypedHandle(cb, kVulkanObjectTypeCommandBuffer));
921 }
922
923 // Remove reverse command buffer links.
924 for (auto pSubCB : pCB->linkedCommandBuffers) {
925 pSubCB->linkedCommandBuffers.erase(pCB);
926 }
927 pCB->linkedCommandBuffers.clear();
928 ClearCmdBufAndMemReferences(pCB);
929 pCB->queue_submit_functions.clear();
930 pCB->cmd_execute_commands_functions.clear();
931 pCB->eventUpdates.clear();
932 pCB->queryUpdates.clear();
933
934 // Remove object bindings
935 for (const auto &obj : pCB->object_bindings) {
936 RemoveCommandBufferBinding(obj, pCB);
937 }
938 pCB->object_bindings.clear();
939 // Remove this cmdBuffer's reference from each FrameBuffer's CB ref list
940 for (auto framebuffer : pCB->framebuffers) {
941 auto fb_state = GetFramebufferState(framebuffer);
942 if (fb_state) fb_state->cb_bindings.erase(pCB);
943 }
944 pCB->framebuffers.clear();
945 pCB->activeFramebuffer = VK_NULL_HANDLE;
946 memset(&pCB->index_buffer_binding, 0, sizeof(pCB->index_buffer_binding));
947
948 pCB->qfo_transfer_image_barriers.Reset();
949 pCB->qfo_transfer_buffer_barriers.Reset();
950
951 // Clean up the label data
952 ResetCmdDebugUtilsLabel(report_data, pCB->commandBuffer);
953 pCB->debug_label.Reset();
954 }
955 if (command_buffer_reset_callback) {
956 (*command_buffer_reset_callback)(cb);
957 }
958}
959
960void ValidationStateTracker::PostCallRecordCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo,
961 const VkAllocationCallbacks *pAllocator, VkDevice *pDevice,
962 VkResult result) {
963 if (VK_SUCCESS != result) return;
964
965 const VkPhysicalDeviceFeatures *enabled_features_found = pCreateInfo->pEnabledFeatures;
966 if (nullptr == enabled_features_found) {
967 const auto *features2 = lvl_find_in_chain<VkPhysicalDeviceFeatures2KHR>(pCreateInfo->pNext);
968 if (features2) {
969 enabled_features_found = &(features2->features);
970 }
971 }
972
973 ValidationObject *device_object = GetLayerDataPtr(get_dispatch_key(*pDevice), layer_data_map);
974 ValidationObject *validation_data = GetValidationObject(device_object->object_dispatch, this->container_type);
975 ValidationStateTracker *state_tracker = static_cast<ValidationStateTracker *>(validation_data);
976
977 if (nullptr == enabled_features_found) {
978 state_tracker->enabled_features.core = {};
979 } else {
980 state_tracker->enabled_features.core = *enabled_features_found;
981 }
982
983 // Make sure that queue_family_properties are obtained for this device's physical_device, even if the app has not
984 // previously set them through an explicit API call.
985 uint32_t count;
986 auto pd_state = GetPhysicalDeviceState(gpu);
987 DispatchGetPhysicalDeviceQueueFamilyProperties(gpu, &count, nullptr);
988 pd_state->queue_family_properties.resize(std::max(static_cast<uint32_t>(pd_state->queue_family_properties.size()), count));
989 DispatchGetPhysicalDeviceQueueFamilyProperties(gpu, &count, &pd_state->queue_family_properties[0]);
990 // Save local link to this device's physical device state
991 state_tracker->physical_device_state = pd_state;
992
993 const auto *device_group_ci = lvl_find_in_chain<VkDeviceGroupDeviceCreateInfo>(pCreateInfo->pNext);
994 state_tracker->physical_device_count =
995 device_group_ci && device_group_ci->physicalDeviceCount > 0 ? device_group_ci->physicalDeviceCount : 1;
996
997 const auto *descriptor_indexing_features = lvl_find_in_chain<VkPhysicalDeviceDescriptorIndexingFeaturesEXT>(pCreateInfo->pNext);
998 if (descriptor_indexing_features) {
999 state_tracker->enabled_features.descriptor_indexing = *descriptor_indexing_features;
1000 }
1001
1002 const auto *eight_bit_storage_features = lvl_find_in_chain<VkPhysicalDevice8BitStorageFeaturesKHR>(pCreateInfo->pNext);
1003 if (eight_bit_storage_features) {
1004 state_tracker->enabled_features.eight_bit_storage = *eight_bit_storage_features;
1005 }
1006
1007 const auto *exclusive_scissor_features = lvl_find_in_chain<VkPhysicalDeviceExclusiveScissorFeaturesNV>(pCreateInfo->pNext);
1008 if (exclusive_scissor_features) {
1009 state_tracker->enabled_features.exclusive_scissor = *exclusive_scissor_features;
1010 }
1011
1012 const auto *shading_rate_image_features = lvl_find_in_chain<VkPhysicalDeviceShadingRateImageFeaturesNV>(pCreateInfo->pNext);
1013 if (shading_rate_image_features) {
1014 state_tracker->enabled_features.shading_rate_image = *shading_rate_image_features;
1015 }
1016
1017 const auto *mesh_shader_features = lvl_find_in_chain<VkPhysicalDeviceMeshShaderFeaturesNV>(pCreateInfo->pNext);
1018 if (mesh_shader_features) {
1019 state_tracker->enabled_features.mesh_shader = *mesh_shader_features;
1020 }
1021
1022 const auto *inline_uniform_block_features =
1023 lvl_find_in_chain<VkPhysicalDeviceInlineUniformBlockFeaturesEXT>(pCreateInfo->pNext);
1024 if (inline_uniform_block_features) {
1025 state_tracker->enabled_features.inline_uniform_block = *inline_uniform_block_features;
1026 }
1027
1028 const auto *transform_feedback_features = lvl_find_in_chain<VkPhysicalDeviceTransformFeedbackFeaturesEXT>(pCreateInfo->pNext);
1029 if (transform_feedback_features) {
1030 state_tracker->enabled_features.transform_feedback_features = *transform_feedback_features;
1031 }
1032
1033 const auto *float16_int8_features = lvl_find_in_chain<VkPhysicalDeviceFloat16Int8FeaturesKHR>(pCreateInfo->pNext);
1034 if (float16_int8_features) {
1035 state_tracker->enabled_features.float16_int8 = *float16_int8_features;
1036 }
1037
1038 const auto *vtx_attrib_div_features = lvl_find_in_chain<VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT>(pCreateInfo->pNext);
1039 if (vtx_attrib_div_features) {
1040 state_tracker->enabled_features.vtx_attrib_divisor_features = *vtx_attrib_div_features;
1041 }
1042
1043 const auto *uniform_buffer_standard_layout_features =
1044 lvl_find_in_chain<VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR>(pCreateInfo->pNext);
1045 if (uniform_buffer_standard_layout_features) {
1046 state_tracker->enabled_features.uniform_buffer_standard_layout = *uniform_buffer_standard_layout_features;
1047 }
1048
1049 const auto *scalar_block_layout_features = lvl_find_in_chain<VkPhysicalDeviceScalarBlockLayoutFeaturesEXT>(pCreateInfo->pNext);
1050 if (scalar_block_layout_features) {
1051 state_tracker->enabled_features.scalar_block_layout_features = *scalar_block_layout_features;
1052 }
1053
1054 const auto *buffer_address = lvl_find_in_chain<VkPhysicalDeviceBufferAddressFeaturesEXT>(pCreateInfo->pNext);
1055 if (buffer_address) {
1056 state_tracker->enabled_features.buffer_address = *buffer_address;
1057 }
1058
1059 const auto *cooperative_matrix_features = lvl_find_in_chain<VkPhysicalDeviceCooperativeMatrixFeaturesNV>(pCreateInfo->pNext);
1060 if (cooperative_matrix_features) {
1061 state_tracker->enabled_features.cooperative_matrix_features = *cooperative_matrix_features;
1062 }
1063
1064 const auto *float_controls_features = lvl_find_in_chain<VkPhysicalDeviceFloatControlsPropertiesKHR>(pCreateInfo->pNext);
1065 if (float_controls_features) {
1066 state_tracker->enabled_features.float_controls = *float_controls_features;
1067 }
1068
1069 const auto *host_query_reset_features = lvl_find_in_chain<VkPhysicalDeviceHostQueryResetFeaturesEXT>(pCreateInfo->pNext);
1070 if (host_query_reset_features) {
1071 state_tracker->enabled_features.host_query_reset_features = *host_query_reset_features;
1072 }
1073
1074 const auto *compute_shader_derivatives_features =
1075 lvl_find_in_chain<VkPhysicalDeviceComputeShaderDerivativesFeaturesNV>(pCreateInfo->pNext);
1076 if (compute_shader_derivatives_features) {
1077 state_tracker->enabled_features.compute_shader_derivatives_features = *compute_shader_derivatives_features;
1078 }
1079
1080 const auto *fragment_shader_barycentric_features =
1081 lvl_find_in_chain<VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV>(pCreateInfo->pNext);
1082 if (fragment_shader_barycentric_features) {
1083 state_tracker->enabled_features.fragment_shader_barycentric_features = *fragment_shader_barycentric_features;
1084 }
1085
1086 const auto *shader_image_footprint_features =
1087 lvl_find_in_chain<VkPhysicalDeviceShaderImageFootprintFeaturesNV>(pCreateInfo->pNext);
1088 if (shader_image_footprint_features) {
1089 state_tracker->enabled_features.shader_image_footprint_features = *shader_image_footprint_features;
1090 }
1091
1092 const auto *fragment_shader_interlock_features =
1093 lvl_find_in_chain<VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT>(pCreateInfo->pNext);
1094 if (fragment_shader_interlock_features) {
1095 state_tracker->enabled_features.fragment_shader_interlock_features = *fragment_shader_interlock_features;
1096 }
1097
1098 const auto *demote_to_helper_invocation_features =
1099 lvl_find_in_chain<VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT>(pCreateInfo->pNext);
1100 if (demote_to_helper_invocation_features) {
1101 state_tracker->enabled_features.demote_to_helper_invocation_features = *demote_to_helper_invocation_features;
1102 }
1103
1104 const auto *texel_buffer_alignment_features =
1105 lvl_find_in_chain<VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT>(pCreateInfo->pNext);
1106 if (texel_buffer_alignment_features) {
1107 state_tracker->enabled_features.texel_buffer_alignment_features = *texel_buffer_alignment_features;
1108 }
1109
1110 const auto *imageless_framebuffer_features =
1111 lvl_find_in_chain<VkPhysicalDeviceImagelessFramebufferFeaturesKHR>(pCreateInfo->pNext);
1112 if (imageless_framebuffer_features) {
1113 state_tracker->enabled_features.imageless_framebuffer_features = *imageless_framebuffer_features;
1114 }
1115
1116 const auto *pipeline_exe_props_features =
1117 lvl_find_in_chain<VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR>(pCreateInfo->pNext);
1118 if (pipeline_exe_props_features) {
1119 state_tracker->enabled_features.pipeline_exe_props_features = *pipeline_exe_props_features;
1120 }
1121
Jeff Bolz82f854d2019-09-17 14:56:47 -05001122 const auto *dedicated_allocation_image_aliasing_features =
1123 lvl_find_in_chain<VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV>(pCreateInfo->pNext);
1124 if (dedicated_allocation_image_aliasing_features) {
1125 state_tracker->enabled_features.dedicated_allocation_image_aliasing_features =
1126 *dedicated_allocation_image_aliasing_features;
1127 }
1128
Jeff Bolz526f2d52019-09-18 13:18:08 -05001129 const auto *subgroup_extended_types_features =
1130 lvl_find_in_chain<VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR>(pCreateInfo->pNext);
1131 if (subgroup_extended_types_features) {
1132 state_tracker->enabled_features.subgroup_extended_types_features = *subgroup_extended_types_features;
1133 }
1134
locke-lunargd556cc32019-09-17 01:21:23 -06001135 // Store physical device properties and physical device mem limits into CoreChecks structs
1136 DispatchGetPhysicalDeviceMemoryProperties(gpu, &state_tracker->phys_dev_mem_props);
1137 DispatchGetPhysicalDeviceProperties(gpu, &state_tracker->phys_dev_props);
1138
1139 const auto &dev_ext = state_tracker->device_extensions;
1140 auto *phys_dev_props = &state_tracker->phys_dev_ext_props;
1141
1142 if (dev_ext.vk_khr_push_descriptor) {
1143 // Get the needed push_descriptor limits
1144 VkPhysicalDevicePushDescriptorPropertiesKHR push_descriptor_prop;
1145 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_push_descriptor, &push_descriptor_prop);
1146 phys_dev_props->max_push_descriptors = push_descriptor_prop.maxPushDescriptors;
1147 }
1148
1149 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_descriptor_indexing, &phys_dev_props->descriptor_indexing_props);
1150 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_shading_rate_image, &phys_dev_props->shading_rate_image_props);
1151 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_mesh_shader, &phys_dev_props->mesh_shader_props);
1152 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_inline_uniform_block, &phys_dev_props->inline_uniform_block_props);
1153 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_vertex_attribute_divisor, &phys_dev_props->vtx_attrib_divisor_props);
1154 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_depth_stencil_resolve, &phys_dev_props->depth_stencil_resolve_props);
1155 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_transform_feedback, &phys_dev_props->transform_feedback_props);
1156 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_ray_tracing, &phys_dev_props->ray_tracing_props);
1157 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_texel_buffer_alignment, &phys_dev_props->texel_buffer_alignment_props);
1158 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_fragment_density_map, &phys_dev_props->fragment_density_map_props);
1159 if (state_tracker->device_extensions.vk_nv_cooperative_matrix) {
1160 // Get the needed cooperative_matrix properties
1161 auto cooperative_matrix_props = lvl_init_struct<VkPhysicalDeviceCooperativeMatrixPropertiesNV>();
1162 auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(&cooperative_matrix_props);
1163 instance_dispatch_table.GetPhysicalDeviceProperties2KHR(gpu, &prop2);
1164 state_tracker->phys_dev_ext_props.cooperative_matrix_props = cooperative_matrix_props;
1165
1166 uint32_t numCooperativeMatrixProperties = 0;
1167 instance_dispatch_table.GetPhysicalDeviceCooperativeMatrixPropertiesNV(gpu, &numCooperativeMatrixProperties, NULL);
1168 state_tracker->cooperative_matrix_properties.resize(numCooperativeMatrixProperties,
1169 lvl_init_struct<VkCooperativeMatrixPropertiesNV>());
1170
1171 instance_dispatch_table.GetPhysicalDeviceCooperativeMatrixPropertiesNV(gpu, &numCooperativeMatrixProperties,
1172 state_tracker->cooperative_matrix_properties.data());
1173 }
1174 if (state_tracker->api_version >= VK_API_VERSION_1_1) {
1175 // Get the needed subgroup limits
1176 auto subgroup_prop = lvl_init_struct<VkPhysicalDeviceSubgroupProperties>();
1177 auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(&subgroup_prop);
1178 instance_dispatch_table.GetPhysicalDeviceProperties2(gpu, &prop2);
1179
1180 state_tracker->phys_dev_ext_props.subgroup_props = subgroup_prop;
1181 }
1182
1183 // Store queue family data
1184 if (pCreateInfo->pQueueCreateInfos != nullptr) {
1185 for (uint32_t i = 0; i < pCreateInfo->queueCreateInfoCount; ++i) {
1186 state_tracker->queue_family_index_map.insert(
1187 std::make_pair(pCreateInfo->pQueueCreateInfos[i].queueFamilyIndex, pCreateInfo->pQueueCreateInfos[i].queueCount));
1188 }
1189 }
1190}
1191
1192void ValidationStateTracker::PreCallRecordDestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) {
1193 if (!device) return;
1194
1195 pipelineMap.clear();
1196 renderPassMap.clear();
1197
1198 // Reset all command buffers before destroying them, to unlink object_bindings.
1199 for (auto &commandBuffer : commandBufferMap) {
1200 ResetCommandBufferState(commandBuffer.first);
1201 }
1202 commandBufferMap.clear();
1203
1204 // This will also delete all sets in the pool & remove them from setMap
1205 DeleteDescriptorSetPools();
1206 // All sets should be removed
1207 assert(setMap.empty());
1208 descriptorSetLayoutMap.clear();
1209 imageViewMap.clear();
1210 imageMap.clear();
1211 bufferViewMap.clear();
1212 bufferMap.clear();
1213 // Queues persist until device is destroyed
1214 queueMap.clear();
1215}
1216
1217// Loop through bound objects and increment their in_use counts.
1218void ValidationStateTracker::IncrementBoundObjects(CMD_BUFFER_STATE const *cb_node) {
1219 for (auto obj : cb_node->object_bindings) {
1220 auto base_obj = GetStateStructPtrFromObject(obj);
1221 if (base_obj) {
1222 base_obj->in_use.fetch_add(1);
1223 }
1224 }
1225}
1226
1227// Track which resources are in-flight by atomically incrementing their "in_use" count
1228void ValidationStateTracker::IncrementResources(CMD_BUFFER_STATE *cb_node) {
1229 cb_node->submitCount++;
1230 cb_node->in_use.fetch_add(1);
1231
1232 // First Increment for all "generic" objects bound to cmd buffer, followed by special-case objects below
1233 IncrementBoundObjects(cb_node);
1234 // TODO : We should be able to remove the NULL look-up checks from the code below as long as
1235 // all the corresponding cases are verified to cause CB_INVALID state and the CB_INVALID state
1236 // should then be flagged prior to calling this function
1237 for (auto event : cb_node->writeEventsBeforeWait) {
1238 auto event_state = GetEventState(event);
1239 if (event_state) event_state->write_in_use++;
1240 }
1241}
1242
1243// Decrement in-use count for objects bound to command buffer
1244void ValidationStateTracker::DecrementBoundResources(CMD_BUFFER_STATE const *cb_node) {
1245 BASE_NODE *base_obj = nullptr;
1246 for (auto obj : cb_node->object_bindings) {
1247 base_obj = GetStateStructPtrFromObject(obj);
1248 if (base_obj) {
1249 base_obj->in_use.fetch_sub(1);
1250 }
1251 }
1252}
1253
1254void ValidationStateTracker::RetireWorkOnQueue(QUEUE_STATE *pQueue, uint64_t seq, bool switch_finished_queries) {
1255 std::unordered_map<VkQueue, uint64_t> otherQueueSeqs;
1256
1257 // Roll this queue forward, one submission at a time.
1258 while (pQueue->seq < seq) {
1259 auto &submission = pQueue->submissions.front();
1260
1261 for (auto &wait : submission.waitSemaphores) {
1262 auto pSemaphore = GetSemaphoreState(wait.semaphore);
1263 if (pSemaphore) {
1264 pSemaphore->in_use.fetch_sub(1);
1265 }
1266 auto &lastSeq = otherQueueSeqs[wait.queue];
1267 lastSeq = std::max(lastSeq, wait.seq);
1268 }
1269
1270 for (auto &semaphore : submission.signalSemaphores) {
1271 auto pSemaphore = GetSemaphoreState(semaphore);
1272 if (pSemaphore) {
1273 pSemaphore->in_use.fetch_sub(1);
1274 }
1275 }
1276
1277 for (auto &semaphore : submission.externalSemaphores) {
1278 auto pSemaphore = GetSemaphoreState(semaphore);
1279 if (pSemaphore) {
1280 pSemaphore->in_use.fetch_sub(1);
1281 }
1282 }
1283
1284 for (auto cb : submission.cbs) {
1285 auto cb_node = GetCBState(cb);
1286 if (!cb_node) {
1287 continue;
1288 }
1289 // First perform decrement on general case bound objects
1290 DecrementBoundResources(cb_node);
1291 for (auto event : cb_node->writeEventsBeforeWait) {
1292 auto eventNode = eventMap.find(event);
1293 if (eventNode != eventMap.end()) {
1294 eventNode->second.write_in_use--;
1295 }
1296 }
1297 for (auto queryStatePair : cb_node->queryToStateMap) {
1298 const QueryState newState =
1299 ((queryStatePair.second == QUERYSTATE_ENDED && switch_finished_queries) ? QUERYSTATE_AVAILABLE
1300 : queryStatePair.second);
1301 pQueue->queryToStateMap[queryStatePair.first] = newState;
1302 queryToStateMap[queryStatePair.first] = newState;
1303 }
1304 for (auto eventStagePair : cb_node->eventToStageMap) {
1305 eventMap[eventStagePair.first].stageMask = eventStagePair.second;
1306 }
1307
1308 cb_node->in_use.fetch_sub(1);
1309 }
1310
1311 auto pFence = GetFenceState(submission.fence);
1312 if (pFence && pFence->scope == kSyncScopeInternal) {
1313 pFence->state = FENCE_RETIRED;
1314 }
1315
1316 pQueue->submissions.pop_front();
1317 pQueue->seq++;
1318 }
1319
1320 // Roll other queues forward to the highest seq we saw a wait for
1321 for (auto qs : otherQueueSeqs) {
1322 RetireWorkOnQueue(GetQueueState(qs.first), qs.second, switch_finished_queries);
1323 }
1324}
1325
1326// Submit a fence to a queue, delimiting previous fences and previous untracked
1327// work by it.
1328static void SubmitFence(QUEUE_STATE *pQueue, FENCE_STATE *pFence, uint64_t submitCount) {
1329 pFence->state = FENCE_INFLIGHT;
1330 pFence->signaler.first = pQueue->queue;
1331 pFence->signaler.second = pQueue->seq + pQueue->submissions.size() + submitCount;
1332}
1333
1334void ValidationStateTracker::PostCallRecordQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits,
1335 VkFence fence, VkResult result) {
1336 uint64_t early_retire_seq = 0;
1337 auto pQueue = GetQueueState(queue);
1338 auto pFence = GetFenceState(fence);
1339
1340 if (pFence) {
1341 if (pFence->scope == kSyncScopeInternal) {
1342 // Mark fence in use
1343 SubmitFence(pQueue, pFence, std::max(1u, submitCount));
1344 if (!submitCount) {
1345 // If no submissions, but just dropping a fence on the end of the queue,
1346 // record an empty submission with just the fence, so we can determine
1347 // its completion.
1348 pQueue->submissions.emplace_back(std::vector<VkCommandBuffer>(), std::vector<SEMAPHORE_WAIT>(),
1349 std::vector<VkSemaphore>(), std::vector<VkSemaphore>(), fence);
1350 }
1351 } else {
1352 // Retire work up until this fence early, we will not see the wait that corresponds to this signal
1353 early_retire_seq = pQueue->seq + pQueue->submissions.size();
1354 }
1355 }
1356
1357 // Now process each individual submit
1358 for (uint32_t submit_idx = 0; submit_idx < submitCount; submit_idx++) {
1359 std::vector<VkCommandBuffer> cbs;
1360 const VkSubmitInfo *submit = &pSubmits[submit_idx];
1361 vector<SEMAPHORE_WAIT> semaphore_waits;
1362 vector<VkSemaphore> semaphore_signals;
1363 vector<VkSemaphore> semaphore_externals;
1364 for (uint32_t i = 0; i < submit->waitSemaphoreCount; ++i) {
1365 VkSemaphore semaphore = submit->pWaitSemaphores[i];
1366 auto pSemaphore = GetSemaphoreState(semaphore);
1367 if (pSemaphore) {
1368 if (pSemaphore->scope == kSyncScopeInternal) {
1369 if (pSemaphore->signaler.first != VK_NULL_HANDLE) {
1370 semaphore_waits.push_back({semaphore, pSemaphore->signaler.first, pSemaphore->signaler.second});
1371 pSemaphore->in_use.fetch_add(1);
1372 }
1373 pSemaphore->signaler.first = VK_NULL_HANDLE;
1374 pSemaphore->signaled = false;
1375 } else {
1376 semaphore_externals.push_back(semaphore);
1377 pSemaphore->in_use.fetch_add(1);
1378 if (pSemaphore->scope == kSyncScopeExternalTemporary) {
1379 pSemaphore->scope = kSyncScopeInternal;
1380 }
1381 }
1382 }
1383 }
1384 for (uint32_t i = 0; i < submit->signalSemaphoreCount; ++i) {
1385 VkSemaphore semaphore = submit->pSignalSemaphores[i];
1386 auto pSemaphore = GetSemaphoreState(semaphore);
1387 if (pSemaphore) {
1388 if (pSemaphore->scope == kSyncScopeInternal) {
1389 pSemaphore->signaler.first = queue;
1390 pSemaphore->signaler.second = pQueue->seq + pQueue->submissions.size() + 1;
1391 pSemaphore->signaled = true;
1392 pSemaphore->in_use.fetch_add(1);
1393 semaphore_signals.push_back(semaphore);
1394 } else {
1395 // Retire work up until this submit early, we will not see the wait that corresponds to this signal
1396 early_retire_seq = std::max(early_retire_seq, pQueue->seq + pQueue->submissions.size() + 1);
1397 }
1398 }
1399 }
1400 for (uint32_t i = 0; i < submit->commandBufferCount; i++) {
1401 auto cb_node = GetCBState(submit->pCommandBuffers[i]);
1402 if (cb_node) {
1403 cbs.push_back(submit->pCommandBuffers[i]);
1404 for (auto secondaryCmdBuffer : cb_node->linkedCommandBuffers) {
1405 cbs.push_back(secondaryCmdBuffer->commandBuffer);
1406 IncrementResources(secondaryCmdBuffer);
1407 }
1408 IncrementResources(cb_node);
1409 }
1410 }
1411 pQueue->submissions.emplace_back(cbs, semaphore_waits, semaphore_signals, semaphore_externals,
1412 submit_idx == submitCount - 1 ? fence : (VkFence)VK_NULL_HANDLE);
1413 }
1414
1415 if (early_retire_seq) {
1416 RetireWorkOnQueue(pQueue, early_retire_seq, true);
1417 }
1418}
1419
1420void ValidationStateTracker::PostCallRecordAllocateMemory(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo,
1421 const VkAllocationCallbacks *pAllocator, VkDeviceMemory *pMemory,
1422 VkResult result) {
1423 if (VK_SUCCESS == result) {
1424 AddMemObjInfo(device, *pMemory, pAllocateInfo);
1425 }
1426 return;
1427}
1428
1429void ValidationStateTracker::PreCallRecordFreeMemory(VkDevice device, VkDeviceMemory mem, const VkAllocationCallbacks *pAllocator) {
1430 if (!mem) return;
1431 DEVICE_MEMORY_STATE *mem_info = GetDevMemState(mem);
1432 const VulkanTypedHandle obj_struct(mem, kVulkanObjectTypeDeviceMemory);
1433
1434 // Clear mem binding for any bound objects
1435 for (const auto &obj : mem_info->obj_bindings) {
1436 BINDABLE *bindable_state = nullptr;
1437 switch (obj.type) {
1438 case kVulkanObjectTypeImage:
1439 bindable_state = GetImageState(obj.Cast<VkImage>());
1440 break;
1441 case kVulkanObjectTypeBuffer:
1442 bindable_state = GetBufferState(obj.Cast<VkBuffer>());
1443 break;
1444 case kVulkanObjectTypeAccelerationStructureNV:
1445 bindable_state = GetAccelerationStructureState(obj.Cast<VkAccelerationStructureNV>());
1446 break;
1447
1448 default:
1449 // Should only have acceleration structure, buffer, or image objects bound to memory
1450 assert(0);
1451 }
1452
1453 if (bindable_state) {
1454 bindable_state->binding.mem = MEMORY_UNBOUND;
1455 bindable_state->UpdateBoundMemorySet();
1456 }
1457 }
1458 // Any bound cmd buffers are now invalid
1459 InvalidateCommandBuffers(mem_info->cb_bindings, obj_struct);
1460 RemoveAliasingImages(mem_info->bound_images);
1461 memObjMap.erase(mem);
1462}
1463
1464void ValidationStateTracker::PostCallRecordQueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo *pBindInfo,
1465 VkFence fence, VkResult result) {
1466 if (result != VK_SUCCESS) return;
1467 uint64_t early_retire_seq = 0;
1468 auto pFence = GetFenceState(fence);
1469 auto pQueue = GetQueueState(queue);
1470
1471 if (pFence) {
1472 if (pFence->scope == kSyncScopeInternal) {
1473 SubmitFence(pQueue, pFence, std::max(1u, bindInfoCount));
1474 if (!bindInfoCount) {
1475 // No work to do, just dropping a fence in the queue by itself.
1476 pQueue->submissions.emplace_back(std::vector<VkCommandBuffer>(), std::vector<SEMAPHORE_WAIT>(),
1477 std::vector<VkSemaphore>(), std::vector<VkSemaphore>(), fence);
1478 }
1479 } else {
1480 // Retire work up until this fence early, we will not see the wait that corresponds to this signal
1481 early_retire_seq = pQueue->seq + pQueue->submissions.size();
1482 }
1483 }
1484
1485 for (uint32_t bindIdx = 0; bindIdx < bindInfoCount; ++bindIdx) {
1486 const VkBindSparseInfo &bindInfo = pBindInfo[bindIdx];
1487 // Track objects tied to memory
1488 for (uint32_t j = 0; j < bindInfo.bufferBindCount; j++) {
1489 for (uint32_t k = 0; k < bindInfo.pBufferBinds[j].bindCount; k++) {
1490 auto sparse_binding = bindInfo.pBufferBinds[j].pBinds[k];
1491 SetSparseMemBinding({sparse_binding.memory, sparse_binding.memoryOffset, sparse_binding.size},
1492 VulkanTypedHandle(bindInfo.pBufferBinds[j].buffer, kVulkanObjectTypeBuffer));
1493 }
1494 }
1495 for (uint32_t j = 0; j < bindInfo.imageOpaqueBindCount; j++) {
1496 for (uint32_t k = 0; k < bindInfo.pImageOpaqueBinds[j].bindCount; k++) {
1497 auto sparse_binding = bindInfo.pImageOpaqueBinds[j].pBinds[k];
1498 SetSparseMemBinding({sparse_binding.memory, sparse_binding.memoryOffset, sparse_binding.size},
1499 VulkanTypedHandle(bindInfo.pImageOpaqueBinds[j].image, kVulkanObjectTypeImage));
1500 }
1501 }
1502 for (uint32_t j = 0; j < bindInfo.imageBindCount; j++) {
1503 for (uint32_t k = 0; k < bindInfo.pImageBinds[j].bindCount; k++) {
1504 auto sparse_binding = bindInfo.pImageBinds[j].pBinds[k];
1505 // TODO: This size is broken for non-opaque bindings, need to update to comprehend full sparse binding data
1506 VkDeviceSize size = sparse_binding.extent.depth * sparse_binding.extent.height * sparse_binding.extent.width * 4;
1507 SetSparseMemBinding({sparse_binding.memory, sparse_binding.memoryOffset, size},
1508 VulkanTypedHandle(bindInfo.pImageBinds[j].image, kVulkanObjectTypeImage));
1509 }
1510 }
1511
1512 std::vector<SEMAPHORE_WAIT> semaphore_waits;
1513 std::vector<VkSemaphore> semaphore_signals;
1514 std::vector<VkSemaphore> semaphore_externals;
1515 for (uint32_t i = 0; i < bindInfo.waitSemaphoreCount; ++i) {
1516 VkSemaphore semaphore = bindInfo.pWaitSemaphores[i];
1517 auto pSemaphore = GetSemaphoreState(semaphore);
1518 if (pSemaphore) {
1519 if (pSemaphore->scope == kSyncScopeInternal) {
1520 if (pSemaphore->signaler.first != VK_NULL_HANDLE) {
1521 semaphore_waits.push_back({semaphore, pSemaphore->signaler.first, pSemaphore->signaler.second});
1522 pSemaphore->in_use.fetch_add(1);
1523 }
1524 pSemaphore->signaler.first = VK_NULL_HANDLE;
1525 pSemaphore->signaled = false;
1526 } else {
1527 semaphore_externals.push_back(semaphore);
1528 pSemaphore->in_use.fetch_add(1);
1529 if (pSemaphore->scope == kSyncScopeExternalTemporary) {
1530 pSemaphore->scope = kSyncScopeInternal;
1531 }
1532 }
1533 }
1534 }
1535 for (uint32_t i = 0; i < bindInfo.signalSemaphoreCount; ++i) {
1536 VkSemaphore semaphore = bindInfo.pSignalSemaphores[i];
1537 auto pSemaphore = GetSemaphoreState(semaphore);
1538 if (pSemaphore) {
1539 if (pSemaphore->scope == kSyncScopeInternal) {
1540 pSemaphore->signaler.first = queue;
1541 pSemaphore->signaler.second = pQueue->seq + pQueue->submissions.size() + 1;
1542 pSemaphore->signaled = true;
1543 pSemaphore->in_use.fetch_add(1);
1544 semaphore_signals.push_back(semaphore);
1545 } else {
1546 // Retire work up until this submit early, we will not see the wait that corresponds to this signal
1547 early_retire_seq = std::max(early_retire_seq, pQueue->seq + pQueue->submissions.size() + 1);
1548 }
1549 }
1550 }
1551
1552 pQueue->submissions.emplace_back(std::vector<VkCommandBuffer>(), semaphore_waits, semaphore_signals, semaphore_externals,
1553 bindIdx == bindInfoCount - 1 ? fence : (VkFence)VK_NULL_HANDLE);
1554 }
1555
1556 if (early_retire_seq) {
1557 RetireWorkOnQueue(pQueue, early_retire_seq, true);
1558 }
1559}
1560
1561void ValidationStateTracker::PostCallRecordCreateSemaphore(VkDevice device, const VkSemaphoreCreateInfo *pCreateInfo,
1562 const VkAllocationCallbacks *pAllocator, VkSemaphore *pSemaphore,
1563 VkResult result) {
1564 if (VK_SUCCESS != result) return;
1565 std::unique_ptr<SEMAPHORE_STATE> semaphore_state(new SEMAPHORE_STATE{});
1566 semaphore_state->signaler.first = VK_NULL_HANDLE;
1567 semaphore_state->signaler.second = 0;
1568 semaphore_state->signaled = false;
1569 semaphore_state->scope = kSyncScopeInternal;
1570 semaphoreMap[*pSemaphore] = std::move(semaphore_state);
1571}
1572
1573void ValidationStateTracker::RecordImportSemaphoreState(VkSemaphore semaphore, VkExternalSemaphoreHandleTypeFlagBitsKHR handle_type,
1574 VkSemaphoreImportFlagsKHR flags) {
1575 SEMAPHORE_STATE *sema_node = GetSemaphoreState(semaphore);
1576 if (sema_node && sema_node->scope != kSyncScopeExternalPermanent) {
1577 if ((handle_type == VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR || flags & VK_SEMAPHORE_IMPORT_TEMPORARY_BIT_KHR) &&
1578 sema_node->scope == kSyncScopeInternal) {
1579 sema_node->scope = kSyncScopeExternalTemporary;
1580 } else {
1581 sema_node->scope = kSyncScopeExternalPermanent;
1582 }
1583 }
1584}
1585
1586void ValidationStateTracker::RecordMappedMemory(VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size, void **ppData) {
1587 auto mem_info = GetDevMemState(mem);
1588 if (mem_info) {
1589 mem_info->mapped_range.offset = offset;
1590 mem_info->mapped_range.size = size;
1591 mem_info->p_driver_data = *ppData;
1592 }
1593}
1594
1595void ValidationStateTracker::RetireFence(VkFence fence) {
1596 auto pFence = GetFenceState(fence);
1597 if (pFence && pFence->scope == kSyncScopeInternal) {
1598 if (pFence->signaler.first != VK_NULL_HANDLE) {
1599 // Fence signaller is a queue -- use this as proof that prior operations on that queue have completed.
1600 RetireWorkOnQueue(GetQueueState(pFence->signaler.first), pFence->signaler.second, true);
1601 } else {
1602 // Fence signaller is the WSI. We're not tracking what the WSI op actually /was/ in CV yet, but we need to mark
1603 // the fence as retired.
1604 pFence->state = FENCE_RETIRED;
1605 }
1606 }
1607}
1608
1609void ValidationStateTracker::PostCallRecordWaitForFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences,
1610 VkBool32 waitAll, uint64_t timeout, VkResult result) {
1611 if (VK_SUCCESS != result) return;
1612
1613 // When we know that all fences are complete we can clean/remove their CBs
1614 if ((VK_TRUE == waitAll) || (1 == fenceCount)) {
1615 for (uint32_t i = 0; i < fenceCount; i++) {
1616 RetireFence(pFences[i]);
1617 }
1618 }
1619 // NOTE : Alternate case not handled here is when some fences have completed. In
1620 // this case for app to guarantee which fences completed it will have to call
1621 // vkGetFenceStatus() at which point we'll clean/remove their CBs if complete.
1622}
1623
1624void ValidationStateTracker::PostCallRecordGetFenceStatus(VkDevice device, VkFence fence, VkResult result) {
1625 if (VK_SUCCESS != result) return;
1626 RetireFence(fence);
1627}
1628
1629void ValidationStateTracker::RecordGetDeviceQueueState(uint32_t queue_family_index, VkQueue queue) {
1630 // Add queue to tracking set only if it is new
1631 auto queue_is_new = queues.emplace(queue);
1632 if (queue_is_new.second == true) {
1633 QUEUE_STATE *queue_state = &queueMap[queue];
1634 queue_state->queue = queue;
1635 queue_state->queueFamilyIndex = queue_family_index;
1636 queue_state->seq = 0;
1637 }
1638}
1639
1640void ValidationStateTracker::PostCallRecordGetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex,
1641 VkQueue *pQueue) {
1642 RecordGetDeviceQueueState(queueFamilyIndex, *pQueue);
1643}
1644
1645void ValidationStateTracker::PostCallRecordGetDeviceQueue2(VkDevice device, const VkDeviceQueueInfo2 *pQueueInfo, VkQueue *pQueue) {
1646 RecordGetDeviceQueueState(pQueueInfo->queueFamilyIndex, *pQueue);
1647}
1648
1649void ValidationStateTracker::PostCallRecordQueueWaitIdle(VkQueue queue, VkResult result) {
1650 if (VK_SUCCESS != result) return;
1651 QUEUE_STATE *queue_state = GetQueueState(queue);
1652 RetireWorkOnQueue(queue_state, queue_state->seq + queue_state->submissions.size(), true);
1653}
1654
1655void ValidationStateTracker::PostCallRecordDeviceWaitIdle(VkDevice device, VkResult result) {
1656 if (VK_SUCCESS != result) return;
1657 for (auto &queue : queueMap) {
1658 RetireWorkOnQueue(&queue.second, queue.second.seq + queue.second.submissions.size(), true);
1659 }
1660}
1661
1662void ValidationStateTracker::PreCallRecordDestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks *pAllocator) {
1663 if (!fence) return;
1664 fenceMap.erase(fence);
1665}
1666
1667void ValidationStateTracker::PreCallRecordDestroySemaphore(VkDevice device, VkSemaphore semaphore,
1668 const VkAllocationCallbacks *pAllocator) {
1669 if (!semaphore) return;
1670 semaphoreMap.erase(semaphore);
1671}
1672
1673void ValidationStateTracker::PreCallRecordDestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks *pAllocator) {
1674 if (!event) return;
1675 EVENT_STATE *event_state = GetEventState(event);
1676 const VulkanTypedHandle obj_struct(event, kVulkanObjectTypeEvent);
1677 InvalidateCommandBuffers(event_state->cb_bindings, obj_struct);
1678 eventMap.erase(event);
1679}
1680
1681void ValidationStateTracker::PreCallRecordDestroyQueryPool(VkDevice device, VkQueryPool queryPool,
1682 const VkAllocationCallbacks *pAllocator) {
1683 if (!queryPool) return;
1684 QUERY_POOL_STATE *qp_state = GetQueryPoolState(queryPool);
1685 const VulkanTypedHandle obj_struct(queryPool, kVulkanObjectTypeQueryPool);
1686 InvalidateCommandBuffers(qp_state->cb_bindings, obj_struct);
1687 queryPoolMap.erase(queryPool);
1688}
1689
1690// Object with given handle is being bound to memory w/ given mem_info struct.
1691// Track the newly bound memory range with given memoryOffset
1692// Also scan any previous ranges, track aliased ranges with new range, and flag an error if a linear
1693// and non-linear range incorrectly overlap.
1694// Return true if an error is flagged and the user callback returns "true", otherwise false
1695// is_image indicates an image object, otherwise handle is for a buffer
1696// is_linear indicates a buffer or linear image
1697void ValidationStateTracker::InsertMemoryRange(const VulkanTypedHandle &typed_handle, DEVICE_MEMORY_STATE *mem_info,
1698 VkDeviceSize memoryOffset, VkMemoryRequirements memRequirements, bool is_linear) {
1699 if (typed_handle.type == kVulkanObjectTypeImage) {
1700 mem_info->bound_images.insert(typed_handle.Cast<VkImage>());
1701 } else if (typed_handle.type == kVulkanObjectTypeBuffer) {
1702 mem_info->bound_buffers.insert(typed_handle.handle);
1703 } else if (typed_handle.type == kVulkanObjectTypeAccelerationStructureNV) {
1704 mem_info->bound_acceleration_structures.insert(typed_handle.handle);
1705 } else {
1706 // Unsupported object type
1707 assert(false);
1708 }
1709}
1710
1711void ValidationStateTracker::InsertImageMemoryRange(VkImage image, DEVICE_MEMORY_STATE *mem_info, VkDeviceSize mem_offset,
1712 VkMemoryRequirements mem_reqs, bool is_linear) {
1713 InsertMemoryRange(VulkanTypedHandle(image, kVulkanObjectTypeImage), mem_info, mem_offset, mem_reqs, is_linear);
1714}
1715
1716void ValidationStateTracker::InsertBufferMemoryRange(VkBuffer buffer, DEVICE_MEMORY_STATE *mem_info, VkDeviceSize mem_offset,
1717 const VkMemoryRequirements &mem_reqs) {
1718 InsertMemoryRange(VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer), mem_info, mem_offset, mem_reqs, true);
1719}
1720
1721void ValidationStateTracker::InsertAccelerationStructureMemoryRange(VkAccelerationStructureNV as, DEVICE_MEMORY_STATE *mem_info,
1722 VkDeviceSize mem_offset, const VkMemoryRequirements &mem_reqs) {
1723 InsertMemoryRange(VulkanTypedHandle(as, kVulkanObjectTypeAccelerationStructureNV), mem_info, mem_offset, mem_reqs, true);
1724}
1725
1726// This function will remove the handle-to-index mapping from the appropriate map.
1727static void RemoveMemoryRange(const VulkanTypedHandle &typed_handle, DEVICE_MEMORY_STATE *mem_info) {
1728 if (typed_handle.type == kVulkanObjectTypeImage) {
1729 mem_info->bound_images.erase(typed_handle.Cast<VkImage>());
1730 } else if (typed_handle.type == kVulkanObjectTypeBuffer) {
1731 mem_info->bound_buffers.erase(typed_handle.handle);
1732 } else if (typed_handle.type == kVulkanObjectTypeAccelerationStructureNV) {
1733 mem_info->bound_acceleration_structures.erase(typed_handle.handle);
1734 } else {
1735 // Unsupported object type
1736 assert(false);
1737 }
1738}
1739
1740void ValidationStateTracker::RemoveBufferMemoryRange(VkBuffer buffer, DEVICE_MEMORY_STATE *mem_info) {
1741 RemoveMemoryRange(VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer), mem_info);
1742}
1743
1744void ValidationStateTracker::RemoveImageMemoryRange(VkImage image, DEVICE_MEMORY_STATE *mem_info) {
1745 RemoveMemoryRange(VulkanTypedHandle(image, kVulkanObjectTypeImage), mem_info);
1746}
1747
1748void ValidationStateTracker::RemoveAccelerationStructureMemoryRange(VkAccelerationStructureNV as, DEVICE_MEMORY_STATE *mem_info) {
1749 RemoveMemoryRange(VulkanTypedHandle(as, kVulkanObjectTypeAccelerationStructureNV), mem_info);
1750}
1751
1752void ValidationStateTracker::UpdateBindBufferMemoryState(VkBuffer buffer, VkDeviceMemory mem, VkDeviceSize memoryOffset) {
1753 BUFFER_STATE *buffer_state = GetBufferState(buffer);
1754 if (buffer_state) {
1755 // Track bound memory range information
1756 auto mem_info = GetDevMemState(mem);
1757 if (mem_info) {
1758 InsertBufferMemoryRange(buffer, mem_info, memoryOffset, buffer_state->requirements);
1759 }
1760 // Track objects tied to memory
1761 SetMemBinding(mem, buffer_state, memoryOffset, VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer));
1762 }
1763}
1764
1765void ValidationStateTracker::PostCallRecordBindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory mem,
1766 VkDeviceSize memoryOffset, VkResult result) {
1767 if (VK_SUCCESS != result) return;
1768 UpdateBindBufferMemoryState(buffer, mem, memoryOffset);
1769}
1770
1771void ValidationStateTracker::PostCallRecordBindBufferMemory2(VkDevice device, uint32_t bindInfoCount,
1772 const VkBindBufferMemoryInfoKHR *pBindInfos, VkResult result) {
1773 for (uint32_t i = 0; i < bindInfoCount; i++) {
1774 UpdateBindBufferMemoryState(pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset);
1775 }
1776}
1777
1778void ValidationStateTracker::PostCallRecordBindBufferMemory2KHR(VkDevice device, uint32_t bindInfoCount,
1779 const VkBindBufferMemoryInfoKHR *pBindInfos, VkResult result) {
1780 for (uint32_t i = 0; i < bindInfoCount; i++) {
1781 UpdateBindBufferMemoryState(pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset);
1782 }
1783}
1784
1785void ValidationStateTracker::RecordGetBufferMemoryRequirementsState(VkBuffer buffer, VkMemoryRequirements *pMemoryRequirements) {
1786 BUFFER_STATE *buffer_state = GetBufferState(buffer);
1787 if (buffer_state) {
1788 buffer_state->requirements = *pMemoryRequirements;
1789 buffer_state->memory_requirements_checked = true;
1790 }
1791}
1792
1793void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements(VkDevice device, VkBuffer buffer,
1794 VkMemoryRequirements *pMemoryRequirements) {
1795 RecordGetBufferMemoryRequirementsState(buffer, pMemoryRequirements);
1796}
1797
1798void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements2(VkDevice device,
1799 const VkBufferMemoryRequirementsInfo2KHR *pInfo,
1800 VkMemoryRequirements2KHR *pMemoryRequirements) {
1801 RecordGetBufferMemoryRequirementsState(pInfo->buffer, &pMemoryRequirements->memoryRequirements);
1802}
1803
1804void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements2KHR(VkDevice device,
1805 const VkBufferMemoryRequirementsInfo2KHR *pInfo,
1806 VkMemoryRequirements2KHR *pMemoryRequirements) {
1807 RecordGetBufferMemoryRequirementsState(pInfo->buffer, &pMemoryRequirements->memoryRequirements);
1808}
1809
1810void ValidationStateTracker::RecordGetImageMemoryRequiementsState(VkImage image, VkMemoryRequirements *pMemoryRequirements) {
1811 IMAGE_STATE *image_state = GetImageState(image);
1812 if (image_state) {
1813 image_state->requirements = *pMemoryRequirements;
1814 image_state->memory_requirements_checked = true;
1815 }
1816}
1817
1818void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements(VkDevice device, VkImage image,
1819 VkMemoryRequirements *pMemoryRequirements) {
1820 RecordGetImageMemoryRequiementsState(image, pMemoryRequirements);
1821}
1822
1823void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements2(VkDevice device, const VkImageMemoryRequirementsInfo2 *pInfo,
1824 VkMemoryRequirements2 *pMemoryRequirements) {
1825 RecordGetImageMemoryRequiementsState(pInfo->image, &pMemoryRequirements->memoryRequirements);
1826}
1827
1828void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements2KHR(VkDevice device,
1829 const VkImageMemoryRequirementsInfo2 *pInfo,
1830 VkMemoryRequirements2 *pMemoryRequirements) {
1831 RecordGetImageMemoryRequiementsState(pInfo->image, &pMemoryRequirements->memoryRequirements);
1832}
1833
1834static void RecordGetImageSparseMemoryRequirementsState(IMAGE_STATE *image_state,
1835 VkSparseImageMemoryRequirements *sparse_image_memory_requirements) {
1836 image_state->sparse_requirements.emplace_back(*sparse_image_memory_requirements);
1837 if (sparse_image_memory_requirements->formatProperties.aspectMask & VK_IMAGE_ASPECT_METADATA_BIT) {
1838 image_state->sparse_metadata_required = true;
1839 }
1840}
1841
1842void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements(
1843 VkDevice device, VkImage image, uint32_t *pSparseMemoryRequirementCount,
1844 VkSparseImageMemoryRequirements *pSparseMemoryRequirements) {
1845 auto image_state = GetImageState(image);
1846 image_state->get_sparse_reqs_called = true;
1847 if (!pSparseMemoryRequirements) return;
1848 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
1849 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i]);
1850 }
1851}
1852
1853void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements2(
1854 VkDevice device, const VkImageSparseMemoryRequirementsInfo2KHR *pInfo, uint32_t *pSparseMemoryRequirementCount,
1855 VkSparseImageMemoryRequirements2KHR *pSparseMemoryRequirements) {
1856 auto image_state = GetImageState(pInfo->image);
1857 image_state->get_sparse_reqs_called = true;
1858 if (!pSparseMemoryRequirements) return;
1859 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
1860 assert(!pSparseMemoryRequirements[i].pNext); // TODO: If an extension is ever added here we need to handle it
1861 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i].memoryRequirements);
1862 }
1863}
1864
1865void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements2KHR(
1866 VkDevice device, const VkImageSparseMemoryRequirementsInfo2KHR *pInfo, uint32_t *pSparseMemoryRequirementCount,
1867 VkSparseImageMemoryRequirements2KHR *pSparseMemoryRequirements) {
1868 auto image_state = GetImageState(pInfo->image);
1869 image_state->get_sparse_reqs_called = true;
1870 if (!pSparseMemoryRequirements) return;
1871 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
1872 assert(!pSparseMemoryRequirements[i].pNext); // TODO: If an extension is ever added here we need to handle it
1873 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i].memoryRequirements);
1874 }
1875}
1876
1877void ValidationStateTracker::PreCallRecordDestroyShaderModule(VkDevice device, VkShaderModule shaderModule,
1878 const VkAllocationCallbacks *pAllocator) {
1879 if (!shaderModule) return;
1880 shaderModuleMap.erase(shaderModule);
1881}
1882
1883void ValidationStateTracker::PreCallRecordDestroyPipeline(VkDevice device, VkPipeline pipeline,
1884 const VkAllocationCallbacks *pAllocator) {
1885 if (!pipeline) return;
1886 PIPELINE_STATE *pipeline_state = GetPipelineState(pipeline);
1887 const VulkanTypedHandle obj_struct(pipeline, kVulkanObjectTypePipeline);
1888 // Any bound cmd buffers are now invalid
1889 InvalidateCommandBuffers(pipeline_state->cb_bindings, obj_struct);
1890 pipelineMap.erase(pipeline);
1891}
1892
1893void ValidationStateTracker::PreCallRecordDestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout,
1894 const VkAllocationCallbacks *pAllocator) {
1895 if (!pipelineLayout) return;
1896 pipelineLayoutMap.erase(pipelineLayout);
1897}
1898
1899void ValidationStateTracker::PreCallRecordDestroySampler(VkDevice device, VkSampler sampler,
1900 const VkAllocationCallbacks *pAllocator) {
1901 if (!sampler) return;
1902 SAMPLER_STATE *sampler_state = GetSamplerState(sampler);
1903 const VulkanTypedHandle obj_struct(sampler, kVulkanObjectTypeSampler);
1904 // Any bound cmd buffers are now invalid
1905 if (sampler_state) {
1906 InvalidateCommandBuffers(sampler_state->cb_bindings, obj_struct);
1907 }
1908 samplerMap.erase(sampler);
1909}
1910
1911void ValidationStateTracker::PreCallRecordDestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout,
1912 const VkAllocationCallbacks *pAllocator) {
1913 if (!descriptorSetLayout) return;
1914 auto layout_it = descriptorSetLayoutMap.find(descriptorSetLayout);
1915 if (layout_it != descriptorSetLayoutMap.end()) {
1916 layout_it->second.get()->MarkDestroyed();
1917 descriptorSetLayoutMap.erase(layout_it);
1918 }
1919}
1920
1921void ValidationStateTracker::PreCallRecordDestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
1922 const VkAllocationCallbacks *pAllocator) {
1923 if (!descriptorPool) return;
1924 DESCRIPTOR_POOL_STATE *desc_pool_state = GetDescriptorPoolState(descriptorPool);
1925 const VulkanTypedHandle obj_struct(descriptorPool, kVulkanObjectTypeDescriptorPool);
1926 if (desc_pool_state) {
1927 // Any bound cmd buffers are now invalid
1928 InvalidateCommandBuffers(desc_pool_state->cb_bindings, obj_struct);
1929 // Free sets that were in this pool
1930 for (auto ds : desc_pool_state->sets) {
1931 FreeDescriptorSet(ds);
1932 }
1933 descriptorPoolMap.erase(descriptorPool);
1934 }
1935}
1936
1937// Free all command buffers in given list, removing all references/links to them using ResetCommandBufferState
1938void ValidationStateTracker::FreeCommandBufferStates(COMMAND_POOL_STATE *pool_state, const uint32_t command_buffer_count,
1939 const VkCommandBuffer *command_buffers) {
1940 for (uint32_t i = 0; i < command_buffer_count; i++) {
1941 auto cb_state = GetCBState(command_buffers[i]);
1942 // Remove references to command buffer's state and delete
1943 if (cb_state) {
1944 // reset prior to delete, removing various references to it.
1945 // TODO: fix this, it's insane.
1946 ResetCommandBufferState(cb_state->commandBuffer);
1947 // Remove the cb_state's references from COMMAND_POOL_STATEs
1948 pool_state->commandBuffers.erase(command_buffers[i]);
1949 // Remove the cb debug labels
1950 EraseCmdDebugUtilsLabel(report_data, cb_state->commandBuffer);
1951 // Remove CBState from CB map
1952 commandBufferMap.erase(cb_state->commandBuffer);
1953 }
1954 }
1955}
1956
1957void ValidationStateTracker::PreCallRecordFreeCommandBuffers(VkDevice device, VkCommandPool commandPool,
1958 uint32_t commandBufferCount, const VkCommandBuffer *pCommandBuffers) {
1959 auto pPool = GetCommandPoolState(commandPool);
1960 FreeCommandBufferStates(pPool, commandBufferCount, pCommandBuffers);
1961}
1962
1963void ValidationStateTracker::PostCallRecordCreateCommandPool(VkDevice device, const VkCommandPoolCreateInfo *pCreateInfo,
1964 const VkAllocationCallbacks *pAllocator, VkCommandPool *pCommandPool,
1965 VkResult result) {
1966 if (VK_SUCCESS != result) return;
1967 std::unique_ptr<COMMAND_POOL_STATE> cmd_pool_state(new COMMAND_POOL_STATE{});
1968 cmd_pool_state->createFlags = pCreateInfo->flags;
1969 cmd_pool_state->queueFamilyIndex = pCreateInfo->queueFamilyIndex;
1970 commandPoolMap[*pCommandPool] = std::move(cmd_pool_state);
1971}
1972
1973void ValidationStateTracker::PostCallRecordCreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo *pCreateInfo,
1974 const VkAllocationCallbacks *pAllocator, VkQueryPool *pQueryPool,
1975 VkResult result) {
1976 if (VK_SUCCESS != result) return;
1977 std::unique_ptr<QUERY_POOL_STATE> query_pool_state(new QUERY_POOL_STATE{});
1978 query_pool_state->createInfo = *pCreateInfo;
1979 queryPoolMap[*pQueryPool] = std::move(query_pool_state);
1980
1981 QueryObject query_obj{*pQueryPool, 0u};
1982 for (uint32_t i = 0; i < pCreateInfo->queryCount; ++i) {
1983 query_obj.query = i;
1984 queryToStateMap[query_obj] = QUERYSTATE_UNKNOWN;
1985 }
1986}
1987
1988void ValidationStateTracker::PreCallRecordDestroyCommandPool(VkDevice device, VkCommandPool commandPool,
1989 const VkAllocationCallbacks *pAllocator) {
1990 if (!commandPool) return;
1991 COMMAND_POOL_STATE *cp_state = GetCommandPoolState(commandPool);
1992 // Remove cmdpool from cmdpoolmap, after freeing layer data for the command buffers
1993 // "When a pool is destroyed, all command buffers allocated from the pool are freed."
1994 if (cp_state) {
1995 // Create a vector, as FreeCommandBufferStates deletes from cp_state->commandBuffers during iteration.
1996 std::vector<VkCommandBuffer> cb_vec{cp_state->commandBuffers.begin(), cp_state->commandBuffers.end()};
1997 FreeCommandBufferStates(cp_state, static_cast<uint32_t>(cb_vec.size()), cb_vec.data());
1998 commandPoolMap.erase(commandPool);
1999 }
2000}
2001
2002void ValidationStateTracker::PostCallRecordResetCommandPool(VkDevice device, VkCommandPool commandPool,
2003 VkCommandPoolResetFlags flags, VkResult result) {
2004 if (VK_SUCCESS != result) return;
2005 // Reset all of the CBs allocated from this pool
2006 auto command_pool_state = GetCommandPoolState(commandPool);
2007 for (auto cmdBuffer : command_pool_state->commandBuffers) {
2008 ResetCommandBufferState(cmdBuffer);
2009 }
2010}
2011
2012void ValidationStateTracker::PostCallRecordResetFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences,
2013 VkResult result) {
2014 for (uint32_t i = 0; i < fenceCount; ++i) {
2015 auto pFence = GetFenceState(pFences[i]);
2016 if (pFence) {
2017 if (pFence->scope == kSyncScopeInternal) {
2018 pFence->state = FENCE_UNSIGNALED;
2019 } else if (pFence->scope == kSyncScopeExternalTemporary) {
2020 pFence->scope = kSyncScopeInternal;
2021 }
2022 }
2023 }
2024}
2025
2026// For given cb_nodes, invalidate them and track object causing invalidation
2027void ValidationStateTracker::InvalidateCommandBuffers(std::unordered_set<CMD_BUFFER_STATE *> const &cb_nodes,
2028 const VulkanTypedHandle &obj) {
2029 for (auto cb_node : cb_nodes) {
2030 if (cb_node->state == CB_RECORDING) {
2031 cb_node->state = CB_INVALID_INCOMPLETE;
2032 } else if (cb_node->state == CB_RECORDED) {
2033 cb_node->state = CB_INVALID_COMPLETE;
2034 }
2035 cb_node->broken_bindings.push_back(obj);
2036
2037 // if secondary, then propagate the invalidation to the primaries that will call us.
2038 if (cb_node->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
2039 InvalidateCommandBuffers(cb_node->linkedCommandBuffers, obj);
2040 }
2041 }
2042}
2043
2044void ValidationStateTracker::PreCallRecordDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer,
2045 const VkAllocationCallbacks *pAllocator) {
2046 if (!framebuffer) return;
2047 FRAMEBUFFER_STATE *framebuffer_state = GetFramebufferState(framebuffer);
2048 const VulkanTypedHandle obj_struct(framebuffer, kVulkanObjectTypeFramebuffer);
2049 InvalidateCommandBuffers(framebuffer_state->cb_bindings, obj_struct);
2050 frameBufferMap.erase(framebuffer);
2051}
2052
2053void ValidationStateTracker::PreCallRecordDestroyRenderPass(VkDevice device, VkRenderPass renderPass,
2054 const VkAllocationCallbacks *pAllocator) {
2055 if (!renderPass) return;
2056 RENDER_PASS_STATE *rp_state = GetRenderPassState(renderPass);
2057 const VulkanTypedHandle obj_struct(renderPass, kVulkanObjectTypeRenderPass);
2058 InvalidateCommandBuffers(rp_state->cb_bindings, obj_struct);
2059 renderPassMap.erase(renderPass);
2060}
2061
2062void ValidationStateTracker::PostCallRecordCreateFence(VkDevice device, const VkFenceCreateInfo *pCreateInfo,
2063 const VkAllocationCallbacks *pAllocator, VkFence *pFence, VkResult result) {
2064 if (VK_SUCCESS != result) return;
2065 std::unique_ptr<FENCE_STATE> fence_state(new FENCE_STATE{});
2066 fence_state->fence = *pFence;
2067 fence_state->createInfo = *pCreateInfo;
2068 fence_state->state = (pCreateInfo->flags & VK_FENCE_CREATE_SIGNALED_BIT) ? FENCE_RETIRED : FENCE_UNSIGNALED;
2069 fenceMap[*pFence] = std::move(fence_state);
2070}
2071
2072bool ValidationStateTracker::PreCallValidateCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2073 const VkGraphicsPipelineCreateInfo *pCreateInfos,
2074 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
2075 void *cgpl_state_data) {
2076 // Set up the state that CoreChecks, gpu_validation and later StateTracker Record will use.
2077 create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
2078 cgpl_state->pCreateInfos = pCreateInfos; // GPU validation can alter this, so we have to set a default value for the Chassis
2079 cgpl_state->pipe_state.reserve(count);
2080 for (uint32_t i = 0; i < count; i++) {
2081 cgpl_state->pipe_state.push_back(std::unique_ptr<PIPELINE_STATE>(new PIPELINE_STATE));
2082 (cgpl_state->pipe_state)[i]->initGraphicsPipeline(this, &pCreateInfos[i],
2083 GetRenderPassStateSharedPtr(pCreateInfos[i].renderPass));
2084 (cgpl_state->pipe_state)[i]->pipeline_layout = *GetPipelineLayout(pCreateInfos[i].layout);
2085 }
2086 return false;
2087}
2088
2089void ValidationStateTracker::PostCallRecordCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2090 const VkGraphicsPipelineCreateInfo *pCreateInfos,
2091 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
2092 VkResult result, void *cgpl_state_data) {
2093 create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
2094 // This API may create pipelines regardless of the return value
2095 for (uint32_t i = 0; i < count; i++) {
2096 if (pPipelines[i] != VK_NULL_HANDLE) {
2097 (cgpl_state->pipe_state)[i]->pipeline = pPipelines[i];
2098 pipelineMap[pPipelines[i]] = std::move((cgpl_state->pipe_state)[i]);
2099 }
2100 }
2101 cgpl_state->pipe_state.clear();
2102}
2103
2104bool ValidationStateTracker::PreCallValidateCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2105 const VkComputePipelineCreateInfo *pCreateInfos,
2106 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
2107 void *ccpl_state_data) {
2108 auto *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
2109 ccpl_state->pCreateInfos = pCreateInfos; // GPU validation can alter this, so we have to set a default value for the Chassis
2110 ccpl_state->pipe_state.reserve(count);
2111 for (uint32_t i = 0; i < count; i++) {
2112 // Create and initialize internal tracking data structure
2113 ccpl_state->pipe_state.push_back(unique_ptr<PIPELINE_STATE>(new PIPELINE_STATE));
2114 ccpl_state->pipe_state.back()->initComputePipeline(this, &pCreateInfos[i]);
2115 ccpl_state->pipe_state.back()->pipeline_layout = *GetPipelineLayout(pCreateInfos[i].layout);
2116 }
2117 return false;
2118}
2119
2120void ValidationStateTracker::PostCallRecordCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2121 const VkComputePipelineCreateInfo *pCreateInfos,
2122 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
2123 VkResult result, void *ccpl_state_data) {
2124 create_compute_pipeline_api_state *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
2125
2126 // This API may create pipelines regardless of the return value
2127 for (uint32_t i = 0; i < count; i++) {
2128 if (pPipelines[i] != VK_NULL_HANDLE) {
2129 (ccpl_state->pipe_state)[i]->pipeline = pPipelines[i];
2130 pipelineMap[pPipelines[i]] = std::move((ccpl_state->pipe_state)[i]);
2131 }
2132 }
2133 ccpl_state->pipe_state.clear();
2134}
2135
2136bool ValidationStateTracker::PreCallValidateCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache,
2137 uint32_t count,
2138 const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
2139 const VkAllocationCallbacks *pAllocator,
2140 VkPipeline *pPipelines, void *crtpl_state_data) {
2141 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
2142 crtpl_state->pipe_state.reserve(count);
2143 for (uint32_t i = 0; i < count; i++) {
2144 // Create and initialize internal tracking data structure
2145 crtpl_state->pipe_state.push_back(unique_ptr<PIPELINE_STATE>(new PIPELINE_STATE));
2146 crtpl_state->pipe_state.back()->initRayTracingPipelineNV(this, &pCreateInfos[i]);
2147 crtpl_state->pipe_state.back()->pipeline_layout = *GetPipelineLayout(pCreateInfos[i].layout);
2148 }
2149 return false;
2150}
2151
2152void ValidationStateTracker::PostCallRecordCreateRayTracingPipelinesNV(
2153 VkDevice device, VkPipelineCache pipelineCache, uint32_t count, const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
2154 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines, VkResult result, void *crtpl_state_data) {
2155 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
2156 // This API may create pipelines regardless of the return value
2157 for (uint32_t i = 0; i < count; i++) {
2158 if (pPipelines[i] != VK_NULL_HANDLE) {
2159 (crtpl_state->pipe_state)[i]->pipeline = pPipelines[i];
2160 pipelineMap[pPipelines[i]] = std::move((crtpl_state->pipe_state)[i]);
2161 }
2162 }
2163 crtpl_state->pipe_state.clear();
2164}
2165
2166void ValidationStateTracker::PostCallRecordCreateSampler(VkDevice device, const VkSamplerCreateInfo *pCreateInfo,
2167 const VkAllocationCallbacks *pAllocator, VkSampler *pSampler,
2168 VkResult result) {
2169 samplerMap[*pSampler] = unique_ptr<SAMPLER_STATE>(new SAMPLER_STATE(pSampler, pCreateInfo));
2170}
2171
2172void ValidationStateTracker::PostCallRecordCreateDescriptorSetLayout(VkDevice device,
2173 const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
2174 const VkAllocationCallbacks *pAllocator,
2175 VkDescriptorSetLayout *pSetLayout, VkResult result) {
2176 if (VK_SUCCESS != result) return;
2177 descriptorSetLayoutMap[*pSetLayout] = std::make_shared<cvdescriptorset::DescriptorSetLayout>(pCreateInfo, *pSetLayout);
2178}
2179
2180// For repeatable sorting, not very useful for "memory in range" search
2181struct PushConstantRangeCompare {
2182 bool operator()(const VkPushConstantRange *lhs, const VkPushConstantRange *rhs) const {
2183 if (lhs->offset == rhs->offset) {
2184 if (lhs->size == rhs->size) {
2185 // The comparison is arbitrary, but avoids false aliasing by comparing all fields.
2186 return lhs->stageFlags < rhs->stageFlags;
2187 }
2188 // If the offsets are the same then sorting by the end of range is useful for validation
2189 return lhs->size < rhs->size;
2190 }
2191 return lhs->offset < rhs->offset;
2192 }
2193};
2194
2195static PushConstantRangesDict push_constant_ranges_dict;
2196
2197PushConstantRangesId GetCanonicalId(const VkPipelineLayoutCreateInfo *info) {
2198 if (!info->pPushConstantRanges) {
2199 // Hand back the empty entry (creating as needed)...
2200 return push_constant_ranges_dict.look_up(PushConstantRanges());
2201 }
2202
2203 // Sort the input ranges to ensure equivalent ranges map to the same id
2204 std::set<const VkPushConstantRange *, PushConstantRangeCompare> sorted;
2205 for (uint32_t i = 0; i < info->pushConstantRangeCount; i++) {
2206 sorted.insert(info->pPushConstantRanges + i);
2207 }
2208
2209 PushConstantRanges ranges(sorted.size());
2210 for (const auto range : sorted) {
2211 ranges.emplace_back(*range);
2212 }
2213 return push_constant_ranges_dict.look_up(std::move(ranges));
2214}
2215
2216// Dictionary of canoncial form of the pipeline set layout of descriptor set layouts
2217static PipelineLayoutSetLayoutsDict pipeline_layout_set_layouts_dict;
2218
2219// Dictionary of canonical form of the "compatible for set" records
2220static PipelineLayoutCompatDict pipeline_layout_compat_dict;
2221
2222static PipelineLayoutCompatId GetCanonicalId(const uint32_t set_index, const PushConstantRangesId pcr_id,
2223 const PipelineLayoutSetLayoutsId set_layouts_id) {
2224 return pipeline_layout_compat_dict.look_up(PipelineLayoutCompatDef(set_index, pcr_id, set_layouts_id));
2225}
2226
2227void ValidationStateTracker::PostCallRecordCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo *pCreateInfo,
2228 const VkAllocationCallbacks *pAllocator,
2229 VkPipelineLayout *pPipelineLayout, VkResult result) {
2230 if (VK_SUCCESS != result) return;
2231
2232 std::unique_ptr<PIPELINE_LAYOUT_STATE> pipeline_layout_state(new PIPELINE_LAYOUT_STATE{});
2233 pipeline_layout_state->layout = *pPipelineLayout;
2234 pipeline_layout_state->set_layouts.resize(pCreateInfo->setLayoutCount);
2235 PipelineLayoutSetLayoutsDef set_layouts(pCreateInfo->setLayoutCount);
2236 for (uint32_t i = 0; i < pCreateInfo->setLayoutCount; ++i) {
2237 pipeline_layout_state->set_layouts[i] = GetDescriptorSetLayout(this, pCreateInfo->pSetLayouts[i]);
2238 set_layouts[i] = pipeline_layout_state->set_layouts[i]->GetLayoutId();
2239 }
2240
2241 // Get canonical form IDs for the "compatible for set" contents
2242 pipeline_layout_state->push_constant_ranges = GetCanonicalId(pCreateInfo);
2243 auto set_layouts_id = pipeline_layout_set_layouts_dict.look_up(set_layouts);
2244 pipeline_layout_state->compat_for_set.reserve(pCreateInfo->setLayoutCount);
2245
2246 // Create table of "compatible for set N" cannonical forms for trivial accept validation
2247 for (uint32_t i = 0; i < pCreateInfo->setLayoutCount; ++i) {
2248 pipeline_layout_state->compat_for_set.emplace_back(
2249 GetCanonicalId(i, pipeline_layout_state->push_constant_ranges, set_layouts_id));
2250 }
2251 pipelineLayoutMap[*pPipelineLayout] = std::move(pipeline_layout_state);
2252}
2253
2254void ValidationStateTracker::PostCallRecordCreateDescriptorPool(VkDevice device, const VkDescriptorPoolCreateInfo *pCreateInfo,
2255 const VkAllocationCallbacks *pAllocator,
2256 VkDescriptorPool *pDescriptorPool, VkResult result) {
2257 if (VK_SUCCESS != result) return;
2258 descriptorPoolMap[*pDescriptorPool] =
2259 std::unique_ptr<DESCRIPTOR_POOL_STATE>(new DESCRIPTOR_POOL_STATE(*pDescriptorPool, pCreateInfo));
2260}
2261
2262void ValidationStateTracker::PostCallRecordResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
2263 VkDescriptorPoolResetFlags flags, VkResult result) {
2264 if (VK_SUCCESS != result) return;
2265 DESCRIPTOR_POOL_STATE *pPool = GetDescriptorPoolState(descriptorPool);
2266 // TODO: validate flags
2267 // For every set off of this pool, clear it, remove from setMap, and free cvdescriptorset::DescriptorSet
2268 for (auto ds : pPool->sets) {
2269 FreeDescriptorSet(ds);
2270 }
2271 pPool->sets.clear();
2272 // Reset available count for each type and available sets for this pool
2273 for (auto it = pPool->availableDescriptorTypeCount.begin(); it != pPool->availableDescriptorTypeCount.end(); ++it) {
2274 pPool->availableDescriptorTypeCount[it->first] = pPool->maxDescriptorTypeCount[it->first];
2275 }
2276 pPool->availableSets = pPool->maxSets;
2277}
2278
2279bool ValidationStateTracker::PreCallValidateAllocateDescriptorSets(VkDevice device,
2280 const VkDescriptorSetAllocateInfo *pAllocateInfo,
2281 VkDescriptorSet *pDescriptorSets, void *ads_state_data) {
2282 // Always update common data
2283 cvdescriptorset::AllocateDescriptorSetsData *ads_state =
2284 reinterpret_cast<cvdescriptorset::AllocateDescriptorSetsData *>(ads_state_data);
2285 UpdateAllocateDescriptorSetsData(pAllocateInfo, ads_state);
2286
2287 return false;
2288}
2289
2290// Allocation state was good and call down chain was made so update state based on allocating descriptor sets
2291void ValidationStateTracker::PostCallRecordAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo *pAllocateInfo,
2292 VkDescriptorSet *pDescriptorSets, VkResult result,
2293 void *ads_state_data) {
2294 if (VK_SUCCESS != result) return;
2295 // All the updates are contained in a single cvdescriptorset function
2296 cvdescriptorset::AllocateDescriptorSetsData *ads_state =
2297 reinterpret_cast<cvdescriptorset::AllocateDescriptorSetsData *>(ads_state_data);
2298 PerformAllocateDescriptorSets(pAllocateInfo, pDescriptorSets, ads_state);
2299}
2300
2301void ValidationStateTracker::PreCallRecordFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t count,
2302 const VkDescriptorSet *pDescriptorSets) {
2303 DESCRIPTOR_POOL_STATE *pool_state = GetDescriptorPoolState(descriptorPool);
2304 // Update available descriptor sets in pool
2305 pool_state->availableSets += count;
2306
2307 // For each freed descriptor add its resources back into the pool as available and remove from pool and setMap
2308 for (uint32_t i = 0; i < count; ++i) {
2309 if (pDescriptorSets[i] != VK_NULL_HANDLE) {
2310 auto descriptor_set = setMap[pDescriptorSets[i]].get();
2311 uint32_t type_index = 0, descriptor_count = 0;
2312 for (uint32_t j = 0; j < descriptor_set->GetBindingCount(); ++j) {
2313 type_index = static_cast<uint32_t>(descriptor_set->GetTypeFromIndex(j));
2314 descriptor_count = descriptor_set->GetDescriptorCountFromIndex(j);
2315 pool_state->availableDescriptorTypeCount[type_index] += descriptor_count;
2316 }
2317 FreeDescriptorSet(descriptor_set);
2318 pool_state->sets.erase(descriptor_set);
2319 }
2320 }
2321}
2322
2323void ValidationStateTracker::PreCallRecordUpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount,
2324 const VkWriteDescriptorSet *pDescriptorWrites,
2325 uint32_t descriptorCopyCount,
2326 const VkCopyDescriptorSet *pDescriptorCopies) {
2327 cvdescriptorset::PerformUpdateDescriptorSets(this, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount,
2328 pDescriptorCopies);
2329}
2330
2331void ValidationStateTracker::PostCallRecordAllocateCommandBuffers(VkDevice device, const VkCommandBufferAllocateInfo *pCreateInfo,
2332 VkCommandBuffer *pCommandBuffer, VkResult result) {
2333 if (VK_SUCCESS != result) return;
2334 auto pPool = GetCommandPoolState(pCreateInfo->commandPool);
2335 if (pPool) {
2336 for (uint32_t i = 0; i < pCreateInfo->commandBufferCount; i++) {
2337 // Add command buffer to its commandPool map
2338 pPool->commandBuffers.insert(pCommandBuffer[i]);
2339 std::unique_ptr<CMD_BUFFER_STATE> pCB(new CMD_BUFFER_STATE{});
2340 pCB->createInfo = *pCreateInfo;
2341 pCB->device = device;
2342 // Add command buffer to map
2343 commandBufferMap[pCommandBuffer[i]] = std::move(pCB);
2344 ResetCommandBufferState(pCommandBuffer[i]);
2345 }
2346 }
2347}
2348
2349// Add bindings between the given cmd buffer & framebuffer and the framebuffer's children
2350void ValidationStateTracker::AddFramebufferBinding(CMD_BUFFER_STATE *cb_state, FRAMEBUFFER_STATE *fb_state) {
2351 AddCommandBufferBinding(&fb_state->cb_bindings, VulkanTypedHandle(fb_state->framebuffer, kVulkanObjectTypeFramebuffer),
2352 cb_state);
2353
2354 const uint32_t attachmentCount = fb_state->createInfo.attachmentCount;
2355 for (uint32_t attachment = 0; attachment < attachmentCount; ++attachment) {
2356 auto view_state = GetAttachmentImageViewState(fb_state, attachment);
2357 if (view_state) {
2358 AddCommandBufferBindingImageView(cb_state, view_state);
2359 }
2360 }
2361}
2362
2363void ValidationStateTracker::PreCallRecordBeginCommandBuffer(VkCommandBuffer commandBuffer,
2364 const VkCommandBufferBeginInfo *pBeginInfo) {
2365 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2366 if (!cb_state) return;
2367 // This implicitly resets the Cmd Buffer so make sure any fence is done and then clear memory references
2368 ClearCmdBufAndMemReferences(cb_state);
2369 if (cb_state->createInfo.level != VK_COMMAND_BUFFER_LEVEL_PRIMARY) {
2370 // Secondary Command Buffer
2371 const VkCommandBufferInheritanceInfo *pInfo = pBeginInfo->pInheritanceInfo;
2372 if (pInfo) {
2373 if (pBeginInfo->flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT) {
2374 assert(pInfo->renderPass);
2375 auto framebuffer = GetFramebufferState(pInfo->framebuffer);
2376 if (framebuffer) {
2377 // Connect this framebuffer and its children to this cmdBuffer
2378 AddFramebufferBinding(cb_state, framebuffer);
2379 }
2380 }
2381 }
2382 }
2383 if (CB_RECORDED == cb_state->state || CB_INVALID_COMPLETE == cb_state->state) {
2384 ResetCommandBufferState(commandBuffer);
2385 }
2386 // Set updated state here in case implicit reset occurs above
2387 cb_state->state = CB_RECORDING;
2388 cb_state->beginInfo = *pBeginInfo;
2389 if (cb_state->beginInfo.pInheritanceInfo) {
2390 cb_state->inheritanceInfo = *(cb_state->beginInfo.pInheritanceInfo);
2391 cb_state->beginInfo.pInheritanceInfo = &cb_state->inheritanceInfo;
2392 // If we are a secondary command-buffer and inheriting. Update the items we should inherit.
2393 if ((cb_state->createInfo.level != VK_COMMAND_BUFFER_LEVEL_PRIMARY) &&
2394 (cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT)) {
2395 cb_state->activeRenderPass = GetRenderPassState(cb_state->beginInfo.pInheritanceInfo->renderPass);
2396 cb_state->activeSubpass = cb_state->beginInfo.pInheritanceInfo->subpass;
2397 cb_state->activeFramebuffer = cb_state->beginInfo.pInheritanceInfo->framebuffer;
2398 cb_state->framebuffers.insert(cb_state->beginInfo.pInheritanceInfo->framebuffer);
2399 }
2400 }
2401
2402 auto chained_device_group_struct = lvl_find_in_chain<VkDeviceGroupCommandBufferBeginInfo>(pBeginInfo->pNext);
2403 if (chained_device_group_struct) {
2404 cb_state->initial_device_mask = chained_device_group_struct->deviceMask;
2405 } else {
2406 cb_state->initial_device_mask = (1 << physical_device_count) - 1;
2407 }
2408}
2409
2410void ValidationStateTracker::PostCallRecordEndCommandBuffer(VkCommandBuffer commandBuffer, VkResult result) {
2411 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2412 if (!cb_state) return;
2413 // Cached validation is specific to a specific recording of a specific command buffer.
2414 for (auto descriptor_set : cb_state->validated_descriptor_sets) {
2415 descriptor_set->ClearCachedValidation(cb_state);
2416 }
2417 cb_state->validated_descriptor_sets.clear();
2418 if (VK_SUCCESS == result) {
2419 cb_state->state = CB_RECORDED;
2420 }
2421}
2422
2423void ValidationStateTracker::PostCallRecordResetCommandBuffer(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags,
2424 VkResult result) {
2425 if (VK_SUCCESS == result) {
2426 ResetCommandBufferState(commandBuffer);
2427 }
2428}
2429
2430CBStatusFlags MakeStaticStateMask(VkPipelineDynamicStateCreateInfo const *ds) {
2431 // initially assume everything is static state
2432 CBStatusFlags flags = CBSTATUS_ALL_STATE_SET;
2433
2434 if (ds) {
2435 for (uint32_t i = 0; i < ds->dynamicStateCount; i++) {
2436 switch (ds->pDynamicStates[i]) {
2437 case VK_DYNAMIC_STATE_LINE_WIDTH:
2438 flags &= ~CBSTATUS_LINE_WIDTH_SET;
2439 break;
2440 case VK_DYNAMIC_STATE_DEPTH_BIAS:
2441 flags &= ~CBSTATUS_DEPTH_BIAS_SET;
2442 break;
2443 case VK_DYNAMIC_STATE_BLEND_CONSTANTS:
2444 flags &= ~CBSTATUS_BLEND_CONSTANTS_SET;
2445 break;
2446 case VK_DYNAMIC_STATE_DEPTH_BOUNDS:
2447 flags &= ~CBSTATUS_DEPTH_BOUNDS_SET;
2448 break;
2449 case VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK:
2450 flags &= ~CBSTATUS_STENCIL_READ_MASK_SET;
2451 break;
2452 case VK_DYNAMIC_STATE_STENCIL_WRITE_MASK:
2453 flags &= ~CBSTATUS_STENCIL_WRITE_MASK_SET;
2454 break;
2455 case VK_DYNAMIC_STATE_STENCIL_REFERENCE:
2456 flags &= ~CBSTATUS_STENCIL_REFERENCE_SET;
2457 break;
2458 case VK_DYNAMIC_STATE_SCISSOR:
2459 flags &= ~CBSTATUS_SCISSOR_SET;
2460 break;
2461 case VK_DYNAMIC_STATE_VIEWPORT:
2462 flags &= ~CBSTATUS_VIEWPORT_SET;
2463 break;
2464 case VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV:
2465 flags &= ~CBSTATUS_EXCLUSIVE_SCISSOR_SET;
2466 break;
2467 case VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV:
2468 flags &= ~CBSTATUS_SHADING_RATE_PALETTE_SET;
2469 break;
2470 case VK_DYNAMIC_STATE_LINE_STIPPLE_EXT:
2471 flags &= ~CBSTATUS_LINE_STIPPLE_SET;
2472 break;
2473 default:
2474 break;
2475 }
2476 }
2477 }
2478
2479 return flags;
2480}
2481
2482// Validation cache:
2483// CV is the bottommost implementor of this extension. Don't pass calls down.
2484// utility function to set collective state for pipeline
2485void SetPipelineState(PIPELINE_STATE *pPipe) {
2486 // If any attachment used by this pipeline has blendEnable, set top-level blendEnable
2487 if (pPipe->graphicsPipelineCI.pColorBlendState) {
2488 for (size_t i = 0; i < pPipe->attachments.size(); ++i) {
2489 if (VK_TRUE == pPipe->attachments[i].blendEnable) {
2490 if (((pPipe->attachments[i].dstAlphaBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
2491 (pPipe->attachments[i].dstAlphaBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
2492 ((pPipe->attachments[i].dstColorBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
2493 (pPipe->attachments[i].dstColorBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
2494 ((pPipe->attachments[i].srcAlphaBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
2495 (pPipe->attachments[i].srcAlphaBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
2496 ((pPipe->attachments[i].srcColorBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
2497 (pPipe->attachments[i].srcColorBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA))) {
2498 pPipe->blendConstantsEnabled = true;
2499 }
2500 }
2501 }
2502 }
2503}
2504
2505void ValidationStateTracker::PreCallRecordCmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
2506 VkPipeline pipeline) {
2507 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2508 assert(cb_state);
2509
2510 auto pipe_state = GetPipelineState(pipeline);
2511 if (VK_PIPELINE_BIND_POINT_GRAPHICS == pipelineBindPoint) {
2512 cb_state->status &= ~cb_state->static_status;
2513 cb_state->static_status = MakeStaticStateMask(pipe_state->graphicsPipelineCI.ptr()->pDynamicState);
2514 cb_state->status |= cb_state->static_status;
2515 }
2516 ResetCommandBufferPushConstantDataIfIncompatible(cb_state, pipe_state->pipeline_layout.layout);
2517 cb_state->lastBound[pipelineBindPoint].pipeline_state = pipe_state;
2518 SetPipelineState(pipe_state);
2519 AddCommandBufferBinding(&pipe_state->cb_bindings, VulkanTypedHandle(pipeline, kVulkanObjectTypePipeline), cb_state);
2520}
2521
2522void ValidationStateTracker::PreCallRecordCmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport,
2523 uint32_t viewportCount, const VkViewport *pViewports) {
2524 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2525 cb_state->viewportMask |= ((1u << viewportCount) - 1u) << firstViewport;
2526 cb_state->status |= CBSTATUS_VIEWPORT_SET;
2527}
2528
2529void ValidationStateTracker::PreCallRecordCmdSetExclusiveScissorNV(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor,
2530 uint32_t exclusiveScissorCount,
2531 const VkRect2D *pExclusiveScissors) {
2532 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2533 // TODO: We don't have VUIDs for validating that all exclusive scissors have been set.
2534 // cb_state->exclusiveScissorMask |= ((1u << exclusiveScissorCount) - 1u) << firstExclusiveScissor;
2535 cb_state->status |= CBSTATUS_EXCLUSIVE_SCISSOR_SET;
2536}
2537
2538void ValidationStateTracker::PreCallRecordCmdBindShadingRateImageNV(VkCommandBuffer commandBuffer, VkImageView imageView,
2539 VkImageLayout imageLayout) {
2540 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2541
2542 if (imageView != VK_NULL_HANDLE) {
2543 auto view_state = GetImageViewState(imageView);
2544 AddCommandBufferBindingImageView(cb_state, view_state);
2545 }
2546}
2547
2548void ValidationStateTracker::PreCallRecordCmdSetViewportShadingRatePaletteNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
2549 uint32_t viewportCount,
2550 const VkShadingRatePaletteNV *pShadingRatePalettes) {
2551 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2552 // TODO: We don't have VUIDs for validating that all shading rate palettes have been set.
2553 // cb_state->shadingRatePaletteMask |= ((1u << viewportCount) - 1u) << firstViewport;
2554 cb_state->status |= CBSTATUS_SHADING_RATE_PALETTE_SET;
2555}
2556
2557void ValidationStateTracker::PostCallRecordCreateAccelerationStructureNV(VkDevice device,
2558 const VkAccelerationStructureCreateInfoNV *pCreateInfo,
2559 const VkAllocationCallbacks *pAllocator,
2560 VkAccelerationStructureNV *pAccelerationStructure,
2561 VkResult result) {
2562 if (VK_SUCCESS != result) return;
2563 std::unique_ptr<ACCELERATION_STRUCTURE_STATE> as_state(new ACCELERATION_STRUCTURE_STATE(*pAccelerationStructure, pCreateInfo));
2564
2565 // Query the requirements in case the application doesn't (to avoid bind/validation time query)
2566 VkAccelerationStructureMemoryRequirementsInfoNV as_memory_requirements_info = {};
2567 as_memory_requirements_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
2568 as_memory_requirements_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV;
2569 as_memory_requirements_info.accelerationStructure = as_state->acceleration_structure;
2570 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &as_memory_requirements_info, &as_state->memory_requirements);
2571
2572 VkAccelerationStructureMemoryRequirementsInfoNV scratch_memory_req_info = {};
2573 scratch_memory_req_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
2574 scratch_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV;
2575 scratch_memory_req_info.accelerationStructure = as_state->acceleration_structure;
2576 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &scratch_memory_req_info,
2577 &as_state->build_scratch_memory_requirements);
2578
2579 VkAccelerationStructureMemoryRequirementsInfoNV update_memory_req_info = {};
2580 update_memory_req_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
2581 update_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV;
2582 update_memory_req_info.accelerationStructure = as_state->acceleration_structure;
2583 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &update_memory_req_info,
2584 &as_state->update_scratch_memory_requirements);
2585
2586 accelerationStructureMap[*pAccelerationStructure] = std::move(as_state);
2587}
2588
2589void ValidationStateTracker::PostCallRecordGetAccelerationStructureMemoryRequirementsNV(
2590 VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV *pInfo, VkMemoryRequirements2KHR *pMemoryRequirements) {
2591 ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureState(pInfo->accelerationStructure);
2592 if (as_state != nullptr) {
2593 if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV) {
2594 as_state->memory_requirements = *pMemoryRequirements;
2595 as_state->memory_requirements_checked = true;
2596 } else if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV) {
2597 as_state->build_scratch_memory_requirements = *pMemoryRequirements;
2598 as_state->build_scratch_memory_requirements_checked = true;
2599 } else if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV) {
2600 as_state->update_scratch_memory_requirements = *pMemoryRequirements;
2601 as_state->update_scratch_memory_requirements_checked = true;
2602 }
2603 }
2604}
2605
2606void ValidationStateTracker::PostCallRecordBindAccelerationStructureMemoryNV(
2607 VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV *pBindInfos, VkResult result) {
2608 if (VK_SUCCESS != result) return;
2609 for (uint32_t i = 0; i < bindInfoCount; i++) {
2610 const VkBindAccelerationStructureMemoryInfoNV &info = pBindInfos[i];
2611
2612 ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureState(info.accelerationStructure);
2613 if (as_state) {
2614 // Track bound memory range information
2615 auto mem_info = GetDevMemState(info.memory);
2616 if (mem_info) {
2617 InsertAccelerationStructureMemoryRange(info.accelerationStructure, mem_info, info.memoryOffset,
2618 as_state->requirements);
2619 }
2620 // Track objects tied to memory
2621 SetMemBinding(info.memory, as_state, info.memoryOffset,
2622 VulkanTypedHandle(info.accelerationStructure, kVulkanObjectTypeAccelerationStructureNV));
2623
2624 // GPU validation of top level acceleration structure building needs acceleration structure handles.
2625 if (enabled.gpu_validation) {
2626 DispatchGetAccelerationStructureHandleNV(device, info.accelerationStructure, 8, &as_state->opaque_handle);
2627 }
2628 }
2629 }
2630}
2631
2632void ValidationStateTracker::PostCallRecordCmdBuildAccelerationStructureNV(
2633 VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV *pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset,
2634 VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset) {
2635 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2636 if (cb_state == nullptr) {
2637 return;
2638 }
2639
2640 ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureState(dst);
2641 ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureState(src);
2642 if (dst_as_state != nullptr) {
2643 dst_as_state->built = true;
2644 dst_as_state->build_info.initialize(pInfo);
2645 AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
2646 }
2647 if (src_as_state != nullptr) {
2648 AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
2649 }
2650 cb_state->hasBuildAccelerationStructureCmd = true;
2651}
2652
2653void ValidationStateTracker::PostCallRecordCmdCopyAccelerationStructureNV(VkCommandBuffer commandBuffer,
2654 VkAccelerationStructureNV dst,
2655 VkAccelerationStructureNV src,
2656 VkCopyAccelerationStructureModeNV mode) {
2657 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2658 if (cb_state) {
2659 ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureState(src);
2660 ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureState(dst);
2661 if (dst_as_state != nullptr && src_as_state != nullptr) {
2662 dst_as_state->built = true;
2663 dst_as_state->build_info = src_as_state->build_info;
2664 AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
2665 AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
2666 }
2667 }
2668}
2669
2670void ValidationStateTracker::PreCallRecordDestroyAccelerationStructureNV(VkDevice device,
2671 VkAccelerationStructureNV accelerationStructure,
2672 const VkAllocationCallbacks *pAllocator) {
2673 if (!accelerationStructure) return;
2674 auto *as_state = GetAccelerationStructureState(accelerationStructure);
2675 if (as_state) {
2676 const VulkanTypedHandle obj_struct(accelerationStructure, kVulkanObjectTypeAccelerationStructureNV);
2677 InvalidateCommandBuffers(as_state->cb_bindings, obj_struct);
2678 for (auto mem_binding : as_state->GetBoundMemory()) {
2679 auto mem_info = GetDevMemState(mem_binding);
2680 if (mem_info) {
2681 RemoveAccelerationStructureMemoryRange(accelerationStructure, mem_info);
2682 }
2683 }
2684 ClearMemoryObjectBindings(obj_struct);
2685 accelerationStructureMap.erase(accelerationStructure);
2686 }
2687}
2688
2689void ValidationStateTracker::PreCallRecordCmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth) {
2690 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2691 cb_state->status |= CBSTATUS_LINE_WIDTH_SET;
2692}
2693
2694void ValidationStateTracker::PreCallRecordCmdSetLineStippleEXT(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor,
2695 uint16_t lineStipplePattern) {
2696 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2697 cb_state->status |= CBSTATUS_LINE_STIPPLE_SET;
2698}
2699
2700void ValidationStateTracker::PreCallRecordCmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor,
2701 float depthBiasClamp, float depthBiasSlopeFactor) {
2702 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2703 cb_state->status |= CBSTATUS_DEPTH_BIAS_SET;
2704}
2705
2706void ValidationStateTracker::PreCallRecordCmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4]) {
2707 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2708 cb_state->status |= CBSTATUS_BLEND_CONSTANTS_SET;
2709}
2710
2711void ValidationStateTracker::PreCallRecordCmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds,
2712 float maxDepthBounds) {
2713 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2714 cb_state->status |= CBSTATUS_DEPTH_BOUNDS_SET;
2715}
2716
2717void ValidationStateTracker::PreCallRecordCmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
2718 uint32_t compareMask) {
2719 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2720 cb_state->status |= CBSTATUS_STENCIL_READ_MASK_SET;
2721}
2722
2723void ValidationStateTracker::PreCallRecordCmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
2724 uint32_t writeMask) {
2725 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2726 cb_state->status |= CBSTATUS_STENCIL_WRITE_MASK_SET;
2727}
2728
2729void ValidationStateTracker::PreCallRecordCmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
2730 uint32_t reference) {
2731 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2732 cb_state->status |= CBSTATUS_STENCIL_REFERENCE_SET;
2733}
2734
2735// Update pipeline_layout bind points applying the "Pipeline Layout Compatibility" rules.
2736// One of pDescriptorSets or push_descriptor_set should be nullptr, indicating whether this
2737// is called for CmdBindDescriptorSets or CmdPushDescriptorSet.
2738void ValidationStateTracker::UpdateLastBoundDescriptorSets(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint pipeline_bind_point,
2739 const PIPELINE_LAYOUT_STATE *pipeline_layout, uint32_t first_set,
2740 uint32_t set_count, const VkDescriptorSet *pDescriptorSets,
2741 cvdescriptorset::DescriptorSet *push_descriptor_set,
2742 uint32_t dynamic_offset_count, const uint32_t *p_dynamic_offsets) {
2743 assert((pDescriptorSets == nullptr) ^ (push_descriptor_set == nullptr));
2744 // Defensive
2745 assert(pipeline_layout);
2746 if (!pipeline_layout) return;
2747
2748 uint32_t required_size = first_set + set_count;
2749 const uint32_t last_binding_index = required_size - 1;
2750 assert(last_binding_index < pipeline_layout->compat_for_set.size());
2751
2752 // Some useful shorthand
2753 auto &last_bound = cb_state->lastBound[pipeline_bind_point];
2754 auto &pipe_compat_ids = pipeline_layout->compat_for_set;
2755 const uint32_t current_size = static_cast<uint32_t>(last_bound.per_set.size());
2756
2757 // We need this three times in this function, but nowhere else
2758 auto push_descriptor_cleanup = [&last_bound](const cvdescriptorset::DescriptorSet *ds) -> bool {
2759 if (ds && ds->IsPushDescriptor()) {
2760 assert(ds == last_bound.push_descriptor_set.get());
2761 last_bound.push_descriptor_set = nullptr;
2762 return true;
2763 }
2764 return false;
2765 };
2766
2767 // Clean up the "disturbed" before and after the range to be set
2768 if (required_size < current_size) {
2769 if (last_bound.per_set[last_binding_index].compat_id_for_set != pipe_compat_ids[last_binding_index]) {
2770 // We're disturbing those after last, we'll shrink below, but first need to check for and cleanup the push_descriptor
2771 for (auto set_idx = required_size; set_idx < current_size; ++set_idx) {
2772 if (push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set)) break;
2773 }
2774 } else {
2775 // We're not disturbing past last, so leave the upper binding data alone.
2776 required_size = current_size;
2777 }
2778 }
2779
2780 // We resize if we need more set entries or if those past "last" are disturbed
2781 if (required_size != current_size) {
2782 last_bound.per_set.resize(required_size);
2783 }
2784
2785 // For any previously bound sets, need to set them to "invalid" if they were disturbed by this update
2786 for (uint32_t set_idx = 0; set_idx < first_set; ++set_idx) {
2787 if (last_bound.per_set[set_idx].compat_id_for_set != pipe_compat_ids[set_idx]) {
2788 push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set);
2789 last_bound.per_set[set_idx].bound_descriptor_set = nullptr;
2790 last_bound.per_set[set_idx].dynamicOffsets.clear();
2791 last_bound.per_set[set_idx].compat_id_for_set = pipe_compat_ids[set_idx];
2792 }
2793 }
2794
2795 // Now update the bound sets with the input sets
2796 const uint32_t *input_dynamic_offsets = p_dynamic_offsets; // "read" pointer for dynamic offset data
2797 for (uint32_t input_idx = 0; input_idx < set_count; input_idx++) {
2798 auto set_idx = input_idx + first_set; // set_idx is index within layout, input_idx is index within input descriptor sets
2799 cvdescriptorset::DescriptorSet *descriptor_set =
2800 push_descriptor_set ? push_descriptor_set : GetSetNode(pDescriptorSets[input_idx]);
2801
2802 // Record binding (or push)
2803 if (descriptor_set != last_bound.push_descriptor_set.get()) {
2804 // Only cleanup the push descriptors if they aren't the currently used set.
2805 push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set);
2806 }
2807 last_bound.per_set[set_idx].bound_descriptor_set = descriptor_set;
2808 last_bound.per_set[set_idx].compat_id_for_set = pipe_compat_ids[set_idx]; // compat ids are canonical *per* set index
2809
2810 if (descriptor_set) {
2811 auto set_dynamic_descriptor_count = descriptor_set->GetDynamicDescriptorCount();
2812 // TODO: Add logic for tracking push_descriptor offsets (here or in caller)
2813 if (set_dynamic_descriptor_count && input_dynamic_offsets) {
2814 const uint32_t *end_offset = input_dynamic_offsets + set_dynamic_descriptor_count;
2815 last_bound.per_set[set_idx].dynamicOffsets = std::vector<uint32_t>(input_dynamic_offsets, end_offset);
2816 input_dynamic_offsets = end_offset;
2817 assert(input_dynamic_offsets <= (p_dynamic_offsets + dynamic_offset_count));
2818 } else {
2819 last_bound.per_set[set_idx].dynamicOffsets.clear();
2820 }
2821 if (!descriptor_set->IsPushDescriptor()) {
2822 // Can't cache validation of push_descriptors
2823 cb_state->validated_descriptor_sets.insert(descriptor_set);
2824 }
2825 }
2826 }
2827}
2828
2829// Update the bound state for the bind point, including the effects of incompatible pipeline layouts
2830void ValidationStateTracker::PreCallRecordCmdBindDescriptorSets(VkCommandBuffer commandBuffer,
2831 VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
2832 uint32_t firstSet, uint32_t setCount,
2833 const VkDescriptorSet *pDescriptorSets, uint32_t dynamicOffsetCount,
2834 const uint32_t *pDynamicOffsets) {
2835 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2836 auto pipeline_layout = GetPipelineLayout(layout);
2837
2838 // Resize binding arrays
2839 uint32_t last_set_index = firstSet + setCount - 1;
2840 if (last_set_index >= cb_state->lastBound[pipelineBindPoint].per_set.size()) {
2841 cb_state->lastBound[pipelineBindPoint].per_set.resize(last_set_index + 1);
2842 }
2843
2844 UpdateLastBoundDescriptorSets(cb_state, pipelineBindPoint, pipeline_layout, firstSet, setCount, pDescriptorSets, nullptr,
2845 dynamicOffsetCount, pDynamicOffsets);
2846 cb_state->lastBound[pipelineBindPoint].pipeline_layout = layout;
2847 ResetCommandBufferPushConstantDataIfIncompatible(cb_state, layout);
2848}
2849
2850void ValidationStateTracker::RecordCmdPushDescriptorSetState(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint pipelineBindPoint,
2851 VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount,
2852 const VkWriteDescriptorSet *pDescriptorWrites) {
2853 const auto &pipeline_layout = GetPipelineLayout(layout);
2854 // Short circuit invalid updates
2855 if (!pipeline_layout || (set >= pipeline_layout->set_layouts.size()) || !pipeline_layout->set_layouts[set] ||
2856 !pipeline_layout->set_layouts[set]->IsPushDescriptor())
2857 return;
2858
2859 // We need a descriptor set to update the bindings with, compatible with the passed layout
2860 const auto dsl = pipeline_layout->set_layouts[set];
2861 auto &last_bound = cb_state->lastBound[pipelineBindPoint];
2862 auto &push_descriptor_set = last_bound.push_descriptor_set;
2863 // If we are disturbing the current push_desriptor_set clear it
2864 if (!push_descriptor_set || !CompatForSet(set, last_bound, pipeline_layout->compat_for_set)) {
2865 last_bound.UnbindAndResetPushDescriptorSet(new cvdescriptorset::DescriptorSet(0, 0, dsl, 0, this));
2866 }
2867
2868 UpdateLastBoundDescriptorSets(cb_state, pipelineBindPoint, pipeline_layout, set, 1, nullptr, push_descriptor_set.get(), 0,
2869 nullptr);
2870 last_bound.pipeline_layout = layout;
2871
2872 // Now that we have either the new or extant push_descriptor set ... do the write updates against it
2873 push_descriptor_set->PerformPushDescriptorsUpdate(descriptorWriteCount, pDescriptorWrites);
2874}
2875
2876void ValidationStateTracker::PreCallRecordCmdPushDescriptorSetKHR(VkCommandBuffer commandBuffer,
2877 VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
2878 uint32_t set, uint32_t descriptorWriteCount,
2879 const VkWriteDescriptorSet *pDescriptorWrites) {
2880 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2881 RecordCmdPushDescriptorSetState(cb_state, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites);
2882}
2883
2884void ValidationStateTracker::PreCallRecordCmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
2885 VkIndexType indexType) {
2886 auto buffer_state = GetBufferState(buffer);
2887 auto cb_state = GetCBState(commandBuffer);
2888
2889 cb_state->status |= CBSTATUS_INDEX_BUFFER_BOUND;
2890 cb_state->index_buffer_binding.buffer = buffer;
2891 cb_state->index_buffer_binding.size = buffer_state->createInfo.size;
2892 cb_state->index_buffer_binding.offset = offset;
2893 cb_state->index_buffer_binding.index_type = indexType;
2894 // Add binding for this index buffer to this commandbuffer
2895 AddCommandBufferBindingBuffer(cb_state, buffer_state);
2896}
2897
2898void ValidationStateTracker::PreCallRecordCmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding,
2899 uint32_t bindingCount, const VkBuffer *pBuffers,
2900 const VkDeviceSize *pOffsets) {
2901 auto cb_state = GetCBState(commandBuffer);
2902
2903 uint32_t end = firstBinding + bindingCount;
2904 if (cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.size() < end) {
2905 cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.resize(end);
2906 }
2907
2908 for (uint32_t i = 0; i < bindingCount; ++i) {
2909 auto &vertex_buffer_binding = cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings[i + firstBinding];
2910 vertex_buffer_binding.buffer = pBuffers[i];
2911 vertex_buffer_binding.offset = pOffsets[i];
2912 // Add binding for this vertex buffer to this commandbuffer
2913 AddCommandBufferBindingBuffer(cb_state, GetBufferState(pBuffers[i]));
2914 }
2915}
2916
2917void ValidationStateTracker::PostCallRecordCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer,
2918 VkDeviceSize dstOffset, VkDeviceSize dataSize, const void *pData) {
2919 auto cb_state = GetCBState(commandBuffer);
2920 auto dst_buffer_state = GetBufferState(dstBuffer);
2921
2922 // Update bindings between buffer and cmd buffer
2923 AddCommandBufferBindingBuffer(cb_state, dst_buffer_state);
2924}
2925
2926bool ValidationStateTracker::SetEventStageMask(VkQueue queue, VkCommandBuffer commandBuffer, VkEvent event,
2927 VkPipelineStageFlags stageMask) {
2928 CMD_BUFFER_STATE *pCB = GetCBState(commandBuffer);
2929 if (pCB) {
2930 pCB->eventToStageMap[event] = stageMask;
2931 }
2932 auto queue_data = queueMap.find(queue);
2933 if (queue_data != queueMap.end()) {
2934 queue_data->second.eventToStageMap[event] = stageMask;
2935 }
2936 return false;
2937}
2938
2939void ValidationStateTracker::PreCallRecordCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event,
2940 VkPipelineStageFlags stageMask) {
2941 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2942 auto event_state = GetEventState(event);
2943 if (event_state) {
2944 AddCommandBufferBinding(&event_state->cb_bindings, VulkanTypedHandle(event, kVulkanObjectTypeEvent), cb_state);
2945 event_state->cb_bindings.insert(cb_state);
2946 }
2947 cb_state->events.push_back(event);
2948 if (!cb_state->waitedEvents.count(event)) {
2949 cb_state->writeEventsBeforeWait.push_back(event);
2950 }
2951 cb_state->eventUpdates.emplace_back([=](VkQueue q) { return SetEventStageMask(q, commandBuffer, event, stageMask); });
2952}
2953
2954void ValidationStateTracker::PreCallRecordCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event,
2955 VkPipelineStageFlags stageMask) {
2956 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2957 auto event_state = GetEventState(event);
2958 if (event_state) {
2959 AddCommandBufferBinding(&event_state->cb_bindings, VulkanTypedHandle(event, kVulkanObjectTypeEvent), cb_state);
2960 event_state->cb_bindings.insert(cb_state);
2961 }
2962 cb_state->events.push_back(event);
2963 if (!cb_state->waitedEvents.count(event)) {
2964 cb_state->writeEventsBeforeWait.push_back(event);
2965 }
2966
2967 cb_state->eventUpdates.emplace_back(
2968 [=](VkQueue q) { return SetEventStageMask(q, commandBuffer, event, VkPipelineStageFlags(0)); });
2969}
2970
2971void ValidationStateTracker::PreCallRecordCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents,
2972 VkPipelineStageFlags sourceStageMask, VkPipelineStageFlags dstStageMask,
2973 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
2974 uint32_t bufferMemoryBarrierCount,
2975 const VkBufferMemoryBarrier *pBufferMemoryBarriers,
2976 uint32_t imageMemoryBarrierCount,
2977 const VkImageMemoryBarrier *pImageMemoryBarriers) {
2978 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2979 for (uint32_t i = 0; i < eventCount; ++i) {
2980 auto event_state = GetEventState(pEvents[i]);
2981 if (event_state) {
2982 AddCommandBufferBinding(&event_state->cb_bindings, VulkanTypedHandle(pEvents[i], kVulkanObjectTypeEvent), cb_state);
2983 event_state->cb_bindings.insert(cb_state);
2984 }
2985 cb_state->waitedEvents.insert(pEvents[i]);
2986 cb_state->events.push_back(pEvents[i]);
2987 }
2988}
2989
2990bool ValidationStateTracker::SetQueryState(VkQueue queue, VkCommandBuffer commandBuffer, QueryObject object, QueryState value) {
2991 CMD_BUFFER_STATE *pCB = GetCBState(commandBuffer);
2992 if (pCB) {
2993 pCB->queryToStateMap[object] = value;
2994 }
2995 auto queue_data = queueMap.find(queue);
2996 if (queue_data != queueMap.end()) {
2997 queue_data->second.queryToStateMap[object] = value;
2998 }
2999 return false;
3000}
3001
3002bool ValidationStateTracker::SetQueryStateMulti(VkQueue queue, VkCommandBuffer commandBuffer, VkQueryPool queryPool,
3003 uint32_t firstQuery, uint32_t queryCount, QueryState value) {
3004 CMD_BUFFER_STATE *pCB = GetCBState(commandBuffer);
3005 auto queue_data = queueMap.find(queue);
3006
3007 for (uint32_t i = 0; i < queryCount; i++) {
3008 QueryObject object = {queryPool, firstQuery + i};
3009 if (pCB) {
3010 pCB->queryToStateMap[object] = value;
3011 }
3012 if (queue_data != queueMap.end()) {
3013 queue_data->second.queryToStateMap[object] = value;
3014 }
3015 }
3016 return false;
3017}
3018
3019void ValidationStateTracker::RecordCmdBeginQuery(CMD_BUFFER_STATE *cb_state, const QueryObject &query_obj) {
3020 cb_state->activeQueries.insert(query_obj);
3021 cb_state->startedQueries.insert(query_obj);
3022 cb_state->queryUpdates.emplace_back([this, cb_state, query_obj](VkQueue q) {
3023 SetQueryState(q, cb_state->commandBuffer, query_obj, QUERYSTATE_RUNNING);
3024 return false;
3025 });
3026 AddCommandBufferBinding(&GetQueryPoolState(query_obj.pool)->cb_bindings,
3027 VulkanTypedHandle(query_obj.pool, kVulkanObjectTypeQueryPool), cb_state);
3028}
3029
3030void ValidationStateTracker::PostCallRecordCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot,
3031 VkFlags flags) {
3032 QueryObject query = {queryPool, slot};
3033 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3034 RecordCmdBeginQuery(cb_state, query);
3035}
3036
3037void ValidationStateTracker::RecordCmdEndQuery(CMD_BUFFER_STATE *cb_state, const QueryObject &query_obj) {
3038 cb_state->activeQueries.erase(query_obj);
3039 cb_state->queryUpdates.emplace_back(
3040 [this, cb_state, query_obj](VkQueue q) { return SetQueryState(q, cb_state->commandBuffer, query_obj, QUERYSTATE_ENDED); });
3041 AddCommandBufferBinding(&GetQueryPoolState(query_obj.pool)->cb_bindings,
3042 VulkanTypedHandle(query_obj.pool, kVulkanObjectTypeQueryPool), cb_state);
3043}
3044
3045void ValidationStateTracker::PostCallRecordCmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot) {
3046 QueryObject query_obj = {queryPool, slot};
3047 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3048 RecordCmdEndQuery(cb_state, query_obj);
3049}
3050
3051void ValidationStateTracker::PostCallRecordCmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
3052 uint32_t firstQuery, uint32_t queryCount) {
3053 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3054
3055 cb_state->queryUpdates.emplace_back([this, commandBuffer, queryPool, firstQuery, queryCount](VkQueue q) {
3056 return SetQueryStateMulti(q, commandBuffer, queryPool, firstQuery, queryCount, QUERYSTATE_RESET);
3057 });
3058 AddCommandBufferBinding(&GetQueryPoolState(queryPool)->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool),
3059 cb_state);
3060}
3061
3062void ValidationStateTracker::PostCallRecordCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
3063 uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer,
3064 VkDeviceSize dstOffset, VkDeviceSize stride,
3065 VkQueryResultFlags flags) {
3066 auto cb_state = GetCBState(commandBuffer);
3067 auto dst_buff_state = GetBufferState(dstBuffer);
3068 AddCommandBufferBindingBuffer(cb_state, dst_buff_state);
3069 AddCommandBufferBinding(&GetQueryPoolState(queryPool)->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool),
3070 cb_state);
3071}
3072
3073void ValidationStateTracker::PostCallRecordCmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
3074 VkQueryPool queryPool, uint32_t slot) {
3075 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3076 AddCommandBufferBinding(&GetQueryPoolState(queryPool)->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool),
3077 cb_state);
3078 QueryObject query = {queryPool, slot};
3079 cb_state->queryUpdates.emplace_back(
3080 [this, commandBuffer, query](VkQueue q) { return SetQueryState(q, commandBuffer, query, QUERYSTATE_ENDED); });
3081}
3082
3083void ValidationStateTracker::PostCallRecordCreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo *pCreateInfo,
3084 const VkAllocationCallbacks *pAllocator, VkFramebuffer *pFramebuffer,
3085 VkResult result) {
3086 if (VK_SUCCESS != result) return;
3087 // Shadow create info and store in map
3088 std::unique_ptr<FRAMEBUFFER_STATE> fb_state(
3089 new FRAMEBUFFER_STATE(*pFramebuffer, pCreateInfo, GetRenderPassStateSharedPtr(pCreateInfo->renderPass)));
3090
3091 if ((pCreateInfo->flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) == 0) {
3092 for (uint32_t i = 0; i < pCreateInfo->attachmentCount; ++i) {
3093 VkImageView view = pCreateInfo->pAttachments[i];
3094 auto view_state = GetImageViewState(view);
3095 if (!view_state) {
3096 continue;
3097 }
3098 }
3099 }
3100 frameBufferMap[*pFramebuffer] = std::move(fb_state);
3101}
3102
3103void ValidationStateTracker::RecordRenderPassDAG(RenderPassCreateVersion rp_version, const VkRenderPassCreateInfo2KHR *pCreateInfo,
3104 RENDER_PASS_STATE *render_pass) {
3105 auto &subpass_to_node = render_pass->subpassToNode;
3106 subpass_to_node.resize(pCreateInfo->subpassCount);
3107 auto &self_dependencies = render_pass->self_dependencies;
3108 self_dependencies.resize(pCreateInfo->subpassCount);
3109
3110 for (uint32_t i = 0; i < pCreateInfo->subpassCount; ++i) {
3111 subpass_to_node[i].pass = i;
3112 self_dependencies[i].clear();
3113 }
3114 for (uint32_t i = 0; i < pCreateInfo->dependencyCount; ++i) {
3115 const VkSubpassDependency2KHR &dependency = pCreateInfo->pDependencies[i];
3116 if ((dependency.srcSubpass != VK_SUBPASS_EXTERNAL) && (dependency.dstSubpass != VK_SUBPASS_EXTERNAL)) {
3117 if (dependency.srcSubpass == dependency.dstSubpass) {
3118 self_dependencies[dependency.srcSubpass].push_back(i);
3119 } else {
3120 subpass_to_node[dependency.dstSubpass].prev.push_back(dependency.srcSubpass);
3121 subpass_to_node[dependency.srcSubpass].next.push_back(dependency.dstSubpass);
3122 }
3123 }
3124 }
3125}
3126
3127static void MarkAttachmentFirstUse(RENDER_PASS_STATE *render_pass, uint32_t index, bool is_read) {
3128 if (index == VK_ATTACHMENT_UNUSED) return;
3129
3130 if (!render_pass->attachment_first_read.count(index)) render_pass->attachment_first_read[index] = is_read;
3131}
3132
3133void ValidationStateTracker::RecordCreateRenderPassState(RenderPassCreateVersion rp_version,
3134 std::shared_ptr<RENDER_PASS_STATE> &render_pass,
3135 VkRenderPass *pRenderPass) {
3136 render_pass->renderPass = *pRenderPass;
3137 auto create_info = render_pass->createInfo.ptr();
3138
3139 RecordRenderPassDAG(RENDER_PASS_VERSION_1, create_info, render_pass.get());
3140
3141 for (uint32_t i = 0; i < create_info->subpassCount; ++i) {
3142 const VkSubpassDescription2KHR &subpass = create_info->pSubpasses[i];
3143 for (uint32_t j = 0; j < subpass.colorAttachmentCount; ++j) {
3144 MarkAttachmentFirstUse(render_pass.get(), subpass.pColorAttachments[j].attachment, false);
3145
3146 // resolve attachments are considered to be written
3147 if (subpass.pResolveAttachments) {
3148 MarkAttachmentFirstUse(render_pass.get(), subpass.pResolveAttachments[j].attachment, false);
3149 }
3150 }
3151 if (subpass.pDepthStencilAttachment) {
3152 MarkAttachmentFirstUse(render_pass.get(), subpass.pDepthStencilAttachment->attachment, false);
3153 }
3154 for (uint32_t j = 0; j < subpass.inputAttachmentCount; ++j) {
3155 MarkAttachmentFirstUse(render_pass.get(), subpass.pInputAttachments[j].attachment, true);
3156 }
3157 }
3158
3159 // Even though render_pass is an rvalue-ref parameter, still must move s.t. move assignment is invoked.
3160 renderPassMap[*pRenderPass] = std::move(render_pass);
3161}
3162
3163// Style note:
3164// Use of rvalue reference exceeds reccommended usage of rvalue refs in google style guide, but intentionally forces caller to move
3165// or copy. This is clearer than passing a pointer to shared_ptr and avoids the atomic increment/decrement of shared_ptr copy
3166// construction or assignment.
3167void ValidationStateTracker::PostCallRecordCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo *pCreateInfo,
3168 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
3169 VkResult result) {
3170 if (VK_SUCCESS != result) return;
3171 auto render_pass_state = std::make_shared<RENDER_PASS_STATE>(pCreateInfo);
3172 RecordCreateRenderPassState(RENDER_PASS_VERSION_1, render_pass_state, pRenderPass);
3173}
3174
3175void ValidationStateTracker::PostCallRecordCreateRenderPass2KHR(VkDevice device, const VkRenderPassCreateInfo2KHR *pCreateInfo,
3176 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
3177 VkResult result) {
3178 if (VK_SUCCESS != result) return;
3179 auto render_pass_state = std::make_shared<RENDER_PASS_STATE>(pCreateInfo);
3180 RecordCreateRenderPassState(RENDER_PASS_VERSION_2, render_pass_state, pRenderPass);
3181}
3182
3183void ValidationStateTracker::RecordCmdBeginRenderPassState(VkCommandBuffer commandBuffer,
3184 const VkRenderPassBeginInfo *pRenderPassBegin,
3185 const VkSubpassContents contents) {
3186 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3187 auto render_pass_state = pRenderPassBegin ? GetRenderPassState(pRenderPassBegin->renderPass) : nullptr;
3188 auto framebuffer = pRenderPassBegin ? GetFramebufferState(pRenderPassBegin->framebuffer) : nullptr;
3189
3190 if (render_pass_state) {
3191 cb_state->activeFramebuffer = pRenderPassBegin->framebuffer;
3192 cb_state->activeRenderPass = render_pass_state;
3193 // This is a shallow copy as that is all that is needed for now
3194 cb_state->activeRenderPassBeginInfo = *pRenderPassBegin;
3195 cb_state->activeSubpass = 0;
3196 cb_state->activeSubpassContents = contents;
3197 cb_state->framebuffers.insert(pRenderPassBegin->framebuffer);
3198 // Connect this framebuffer and its children to this cmdBuffer
3199 AddFramebufferBinding(cb_state, framebuffer);
3200 // Connect this RP to cmdBuffer
3201 AddCommandBufferBinding(&render_pass_state->cb_bindings,
3202 VulkanTypedHandle(render_pass_state->renderPass, kVulkanObjectTypeRenderPass), cb_state);
3203
3204 auto chained_device_group_struct = lvl_find_in_chain<VkDeviceGroupRenderPassBeginInfo>(pRenderPassBegin->pNext);
3205 if (chained_device_group_struct) {
3206 cb_state->active_render_pass_device_mask = chained_device_group_struct->deviceMask;
3207 } else {
3208 cb_state->active_render_pass_device_mask = cb_state->initial_device_mask;
3209 }
3210 }
3211}
3212
3213void ValidationStateTracker::PreCallRecordCmdBeginRenderPass(VkCommandBuffer commandBuffer,
3214 const VkRenderPassBeginInfo *pRenderPassBegin,
3215 VkSubpassContents contents) {
3216 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, contents);
3217}
3218
3219void ValidationStateTracker::PreCallRecordCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer,
3220 const VkRenderPassBeginInfo *pRenderPassBegin,
3221 const VkSubpassBeginInfoKHR *pSubpassBeginInfo) {
3222 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, pSubpassBeginInfo->contents);
3223}
3224
3225void ValidationStateTracker::RecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
3226 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3227 cb_state->activeSubpass++;
3228 cb_state->activeSubpassContents = contents;
3229}
3230
3231void ValidationStateTracker::PostCallRecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
3232 RecordCmdNextSubpass(commandBuffer, contents);
3233}
3234
3235void ValidationStateTracker::PostCallRecordCmdNextSubpass2KHR(VkCommandBuffer commandBuffer,
3236 const VkSubpassBeginInfoKHR *pSubpassBeginInfo,
3237 const VkSubpassEndInfoKHR *pSubpassEndInfo) {
3238 RecordCmdNextSubpass(commandBuffer, pSubpassBeginInfo->contents);
3239}
3240
3241void ValidationStateTracker::RecordCmdEndRenderPassState(VkCommandBuffer commandBuffer) {
3242 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3243 cb_state->activeRenderPass = nullptr;
3244 cb_state->activeSubpass = 0;
3245 cb_state->activeFramebuffer = VK_NULL_HANDLE;
3246}
3247
3248void ValidationStateTracker::PostCallRecordCmdEndRenderPass(VkCommandBuffer commandBuffer) {
3249 RecordCmdEndRenderPassState(commandBuffer);
3250}
3251
3252void ValidationStateTracker::PostCallRecordCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer,
3253 const VkSubpassEndInfoKHR *pSubpassEndInfo) {
3254 RecordCmdEndRenderPassState(commandBuffer);
3255}
3256
3257void ValidationStateTracker::PreCallRecordCmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBuffersCount,
3258 const VkCommandBuffer *pCommandBuffers) {
3259 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3260
3261 CMD_BUFFER_STATE *sub_cb_state = NULL;
3262 for (uint32_t i = 0; i < commandBuffersCount; i++) {
3263 sub_cb_state = GetCBState(pCommandBuffers[i]);
3264 assert(sub_cb_state);
3265 if (!(sub_cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT)) {
3266 if (cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT) {
3267 // TODO: Because this is a state change, clearing the VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT needs to be moved
3268 // from the validation step to the recording step
3269 cb_state->beginInfo.flags &= ~VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT;
3270 }
3271 }
3272
3273 // Propagate inital layout and current layout state to the primary cmd buffer
3274 // NOTE: The update/population of the image_layout_map is done in CoreChecks, but for other classes derived from
3275 // ValidationStateTracker these maps will be empty, so leaving the propagation in the the state tracker should be a no-op
3276 // for those other classes.
3277 for (const auto &sub_layout_map_entry : sub_cb_state->image_layout_map) {
3278 const auto image = sub_layout_map_entry.first;
3279 const auto *image_state = GetImageState(image);
3280 if (!image_state) continue; // Can't set layouts of a dead image
3281
3282 auto *cb_subres_map = GetImageSubresourceLayoutMap(cb_state, *image_state);
3283 const auto *sub_cb_subres_map = sub_layout_map_entry.second.get();
3284 assert(cb_subres_map && sub_cb_subres_map); // Non const get and map traversal should never be null
3285 cb_subres_map->UpdateFrom(*sub_cb_subres_map);
3286 }
3287
3288 sub_cb_state->primaryCommandBuffer = cb_state->commandBuffer;
3289 cb_state->linkedCommandBuffers.insert(sub_cb_state);
3290 sub_cb_state->linkedCommandBuffers.insert(cb_state);
3291 for (auto &function : sub_cb_state->queryUpdates) {
3292 cb_state->queryUpdates.push_back(function);
3293 }
3294 for (auto &function : sub_cb_state->queue_submit_functions) {
3295 cb_state->queue_submit_functions.push_back(function);
3296 }
3297 }
3298}
3299
3300void ValidationStateTracker::PostCallRecordMapMemory(VkDevice device, VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size,
3301 VkFlags flags, void **ppData, VkResult result) {
3302 if (VK_SUCCESS != result) return;
3303 RecordMappedMemory(mem, offset, size, ppData);
3304}
3305
3306void ValidationStateTracker::PreCallRecordUnmapMemory(VkDevice device, VkDeviceMemory mem) {
3307 auto mem_info = GetDevMemState(mem);
3308 if (mem_info) {
3309 mem_info->mapped_range = MemRange();
3310 mem_info->p_driver_data = nullptr;
3311 }
3312}
3313
3314void ValidationStateTracker::UpdateBindImageMemoryState(const VkBindImageMemoryInfo &bindInfo) {
3315 IMAGE_STATE *image_state = GetImageState(bindInfo.image);
3316 if (image_state) {
3317 const auto swapchain_info = lvl_find_in_chain<VkBindImageMemorySwapchainInfoKHR>(bindInfo.pNext);
3318 if (swapchain_info) {
3319 auto swapchain = GetSwapchainState(swapchain_info->swapchain);
3320 if (swapchain) {
3321 swapchain->bound_images.insert(image_state->image);
3322 image_state->bind_swapchain = swapchain_info->swapchain;
3323 image_state->bind_swapchain_imageIndex = swapchain_info->imageIndex;
3324 }
3325 } else {
3326 // Track bound memory range information
3327 auto mem_info = GetDevMemState(bindInfo.memory);
3328 if (mem_info) {
3329 InsertImageMemoryRange(bindInfo.image, mem_info, bindInfo.memoryOffset, image_state->requirements,
3330 image_state->createInfo.tiling == VK_IMAGE_TILING_LINEAR);
3331 }
3332
3333 // Track objects tied to memory
3334 SetMemBinding(bindInfo.memory, image_state, bindInfo.memoryOffset,
3335 VulkanTypedHandle(bindInfo.image, kVulkanObjectTypeImage));
3336 }
3337 if (image_state->createInfo.flags & VK_IMAGE_CREATE_ALIAS_BIT) {
3338 AddAliasingImage(image_state);
3339 }
3340 }
3341}
3342
3343void ValidationStateTracker::PostCallRecordBindImageMemory(VkDevice device, VkImage image, VkDeviceMemory mem,
3344 VkDeviceSize memoryOffset, VkResult result) {
3345 if (VK_SUCCESS != result) return;
3346 VkBindImageMemoryInfo bindInfo = {};
3347 bindInfo.sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
3348 bindInfo.image = image;
3349 bindInfo.memory = mem;
3350 bindInfo.memoryOffset = memoryOffset;
3351 UpdateBindImageMemoryState(bindInfo);
3352}
3353
3354void ValidationStateTracker::PostCallRecordBindImageMemory2(VkDevice device, uint32_t bindInfoCount,
3355 const VkBindImageMemoryInfoKHR *pBindInfos, VkResult result) {
3356 if (VK_SUCCESS != result) return;
3357 for (uint32_t i = 0; i < bindInfoCount; i++) {
3358 UpdateBindImageMemoryState(pBindInfos[i]);
3359 }
3360}
3361
3362void ValidationStateTracker::PostCallRecordBindImageMemory2KHR(VkDevice device, uint32_t bindInfoCount,
3363 const VkBindImageMemoryInfoKHR *pBindInfos, VkResult result) {
3364 if (VK_SUCCESS != result) return;
3365 for (uint32_t i = 0; i < bindInfoCount; i++) {
3366 UpdateBindImageMemoryState(pBindInfos[i]);
3367 }
3368}
3369
3370void ValidationStateTracker::PreCallRecordSetEvent(VkDevice device, VkEvent event) {
3371 auto event_state = GetEventState(event);
3372 if (event_state) {
3373 event_state->stageMask = VK_PIPELINE_STAGE_HOST_BIT;
3374 }
3375 // Host setting event is visible to all queues immediately so update stageMask for any queue that's seen this event
3376 // TODO : For correctness this needs separate fix to verify that app doesn't make incorrect assumptions about the
3377 // ordering of this command in relation to vkCmd[Set|Reset]Events (see GH297)
3378 for (auto queue_data : queueMap) {
3379 auto event_entry = queue_data.second.eventToStageMap.find(event);
3380 if (event_entry != queue_data.second.eventToStageMap.end()) {
3381 event_entry->second |= VK_PIPELINE_STAGE_HOST_BIT;
3382 }
3383 }
3384}
3385
3386void ValidationStateTracker::PostCallRecordImportSemaphoreFdKHR(VkDevice device,
3387 const VkImportSemaphoreFdInfoKHR *pImportSemaphoreFdInfo,
3388 VkResult result) {
3389 if (VK_SUCCESS != result) return;
3390 RecordImportSemaphoreState(pImportSemaphoreFdInfo->semaphore, pImportSemaphoreFdInfo->handleType,
3391 pImportSemaphoreFdInfo->flags);
3392}
3393
3394void ValidationStateTracker::RecordGetExternalSemaphoreState(VkSemaphore semaphore,
3395 VkExternalSemaphoreHandleTypeFlagBitsKHR handle_type) {
3396 SEMAPHORE_STATE *semaphore_state = GetSemaphoreState(semaphore);
3397 if (semaphore_state && handle_type != VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR) {
3398 // Cannot track semaphore state once it is exported, except for Sync FD handle types which have copy transference
3399 semaphore_state->scope = kSyncScopeExternalPermanent;
3400 }
3401}
3402
3403#ifdef VK_USE_PLATFORM_WIN32_KHR
3404void ValidationStateTracker::PostCallRecordImportSemaphoreWin32HandleKHR(
3405 VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR *pImportSemaphoreWin32HandleInfo, VkResult result) {
3406 if (VK_SUCCESS != result) return;
3407 RecordImportSemaphoreState(pImportSemaphoreWin32HandleInfo->semaphore, pImportSemaphoreWin32HandleInfo->handleType,
3408 pImportSemaphoreWin32HandleInfo->flags);
3409}
3410
3411void ValidationStateTracker::PostCallRecordGetSemaphoreWin32HandleKHR(VkDevice device,
3412 const VkSemaphoreGetWin32HandleInfoKHR *pGetWin32HandleInfo,
3413 HANDLE *pHandle, VkResult result) {
3414 if (VK_SUCCESS != result) return;
3415 RecordGetExternalSemaphoreState(pGetWin32HandleInfo->semaphore, pGetWin32HandleInfo->handleType);
3416}
3417
3418void ValidationStateTracker::PostCallRecordImportFenceWin32HandleKHR(
3419 VkDevice device, const VkImportFenceWin32HandleInfoKHR *pImportFenceWin32HandleInfo, VkResult result) {
3420 if (VK_SUCCESS != result) return;
3421 RecordImportFenceState(pImportFenceWin32HandleInfo->fence, pImportFenceWin32HandleInfo->handleType,
3422 pImportFenceWin32HandleInfo->flags);
3423}
3424
3425void ValidationStateTracker::PostCallRecordGetFenceWin32HandleKHR(VkDevice device,
3426 const VkFenceGetWin32HandleInfoKHR *pGetWin32HandleInfo,
3427 HANDLE *pHandle, VkResult result) {
3428 if (VK_SUCCESS != result) return;
3429 RecordGetExternalFenceState(pGetWin32HandleInfo->fence, pGetWin32HandleInfo->handleType);
3430}
3431#endif
3432
3433void ValidationStateTracker::PostCallRecordGetSemaphoreFdKHR(VkDevice device, const VkSemaphoreGetFdInfoKHR *pGetFdInfo, int *pFd,
3434 VkResult result) {
3435 if (VK_SUCCESS != result) return;
3436 RecordGetExternalSemaphoreState(pGetFdInfo->semaphore, pGetFdInfo->handleType);
3437}
3438
3439void ValidationStateTracker::RecordImportFenceState(VkFence fence, VkExternalFenceHandleTypeFlagBitsKHR handle_type,
3440 VkFenceImportFlagsKHR flags) {
3441 FENCE_STATE *fence_node = GetFenceState(fence);
3442 if (fence_node && fence_node->scope != kSyncScopeExternalPermanent) {
3443 if ((handle_type == VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR || flags & VK_FENCE_IMPORT_TEMPORARY_BIT_KHR) &&
3444 fence_node->scope == kSyncScopeInternal) {
3445 fence_node->scope = kSyncScopeExternalTemporary;
3446 } else {
3447 fence_node->scope = kSyncScopeExternalPermanent;
3448 }
3449 }
3450}
3451
3452void ValidationStateTracker::PostCallRecordImportFenceFdKHR(VkDevice device, const VkImportFenceFdInfoKHR *pImportFenceFdInfo,
3453 VkResult result) {
3454 if (VK_SUCCESS != result) return;
3455 RecordImportFenceState(pImportFenceFdInfo->fence, pImportFenceFdInfo->handleType, pImportFenceFdInfo->flags);
3456}
3457
3458void ValidationStateTracker::RecordGetExternalFenceState(VkFence fence, VkExternalFenceHandleTypeFlagBitsKHR handle_type) {
3459 FENCE_STATE *fence_state = GetFenceState(fence);
3460 if (fence_state) {
3461 if (handle_type != VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR) {
3462 // Export with reference transference becomes external
3463 fence_state->scope = kSyncScopeExternalPermanent;
3464 } else if (fence_state->scope == kSyncScopeInternal) {
3465 // Export with copy transference has a side effect of resetting the fence
3466 fence_state->state = FENCE_UNSIGNALED;
3467 }
3468 }
3469}
3470
3471void ValidationStateTracker::PostCallRecordGetFenceFdKHR(VkDevice device, const VkFenceGetFdInfoKHR *pGetFdInfo, int *pFd,
3472 VkResult result) {
3473 if (VK_SUCCESS != result) return;
3474 RecordGetExternalFenceState(pGetFdInfo->fence, pGetFdInfo->handleType);
3475}
3476
3477void ValidationStateTracker::PostCallRecordCreateEvent(VkDevice device, const VkEventCreateInfo *pCreateInfo,
3478 const VkAllocationCallbacks *pAllocator, VkEvent *pEvent, VkResult result) {
3479 if (VK_SUCCESS != result) return;
3480 eventMap[*pEvent].write_in_use = 0;
3481 eventMap[*pEvent].stageMask = VkPipelineStageFlags(0);
3482}
3483
3484void ValidationStateTracker::RecordCreateSwapchainState(VkResult result, const VkSwapchainCreateInfoKHR *pCreateInfo,
3485 VkSwapchainKHR *pSwapchain, SURFACE_STATE *surface_state,
3486 SWAPCHAIN_NODE *old_swapchain_state) {
3487 if (VK_SUCCESS == result) {
3488 auto swapchain_state = unique_ptr<SWAPCHAIN_NODE>(new SWAPCHAIN_NODE(pCreateInfo, *pSwapchain));
3489 if (VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR == pCreateInfo->presentMode ||
3490 VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR == pCreateInfo->presentMode) {
3491 swapchain_state->shared_presentable = true;
3492 }
3493 surface_state->swapchain = swapchain_state.get();
3494 swapchainMap[*pSwapchain] = std::move(swapchain_state);
3495 } else {
3496 surface_state->swapchain = nullptr;
3497 }
3498 // Spec requires that even if CreateSwapchainKHR fails, oldSwapchain is retired
3499 if (old_swapchain_state) {
3500 old_swapchain_state->retired = true;
3501 }
3502 return;
3503}
3504
3505void ValidationStateTracker::PostCallRecordCreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR *pCreateInfo,
3506 const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchain,
3507 VkResult result) {
3508 auto surface_state = GetSurfaceState(pCreateInfo->surface);
3509 auto old_swapchain_state = GetSwapchainState(pCreateInfo->oldSwapchain);
3510 RecordCreateSwapchainState(result, pCreateInfo, pSwapchain, surface_state, old_swapchain_state);
3511}
3512
3513void ValidationStateTracker::PreCallRecordDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain,
3514 const VkAllocationCallbacks *pAllocator) {
3515 if (!swapchain) return;
3516 auto swapchain_data = GetSwapchainState(swapchain);
3517 if (swapchain_data) {
3518 for (const auto &swapchain_image : swapchain_data->images) {
3519 ClearMemoryObjectBindings(VulkanTypedHandle(swapchain_image, kVulkanObjectTypeImage));
3520 imageMap.erase(swapchain_image);
3521 }
3522
3523 auto surface_state = GetSurfaceState(swapchain_data->createInfo.surface);
3524 if (surface_state) {
3525 if (surface_state->swapchain == swapchain_data) surface_state->swapchain = nullptr;
3526 }
3527 RemoveAliasingImages(swapchain_data->bound_images);
3528 swapchainMap.erase(swapchain);
3529 }
3530}
3531
3532void ValidationStateTracker::PostCallRecordQueuePresentKHR(VkQueue queue, const VkPresentInfoKHR *pPresentInfo, VkResult result) {
3533 // Semaphore waits occur before error generation, if the call reached the ICD. (Confirm?)
3534 for (uint32_t i = 0; i < pPresentInfo->waitSemaphoreCount; ++i) {
3535 auto pSemaphore = GetSemaphoreState(pPresentInfo->pWaitSemaphores[i]);
3536 if (pSemaphore) {
3537 pSemaphore->signaler.first = VK_NULL_HANDLE;
3538 pSemaphore->signaled = false;
3539 }
3540 }
3541
3542 for (uint32_t i = 0; i < pPresentInfo->swapchainCount; ++i) {
3543 // Note: this is imperfect, in that we can get confused about what did or didn't succeed-- but if the app does that, it's
3544 // confused itself just as much.
3545 auto local_result = pPresentInfo->pResults ? pPresentInfo->pResults[i] : result;
3546 if (local_result != VK_SUCCESS && local_result != VK_SUBOPTIMAL_KHR) continue; // this present didn't actually happen.
3547 // Mark the image as having been released to the WSI
3548 auto swapchain_data = GetSwapchainState(pPresentInfo->pSwapchains[i]);
3549 if (swapchain_data && (swapchain_data->images.size() > pPresentInfo->pImageIndices[i])) {
3550 auto image = swapchain_data->images[pPresentInfo->pImageIndices[i]];
3551 auto image_state = GetImageState(image);
3552 if (image_state) {
3553 image_state->acquired = false;
3554 }
3555 }
3556 }
3557 // Note: even though presentation is directed to a queue, there is no direct ordering between QP and subsequent work, so QP (and
3558 // its semaphore waits) /never/ participate in any completion proof.
3559}
3560
3561void ValidationStateTracker::PostCallRecordCreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount,
3562 const VkSwapchainCreateInfoKHR *pCreateInfos,
3563 const VkAllocationCallbacks *pAllocator,
3564 VkSwapchainKHR *pSwapchains, VkResult result) {
3565 if (pCreateInfos) {
3566 for (uint32_t i = 0; i < swapchainCount; i++) {
3567 auto surface_state = GetSurfaceState(pCreateInfos[i].surface);
3568 auto old_swapchain_state = GetSwapchainState(pCreateInfos[i].oldSwapchain);
3569 RecordCreateSwapchainState(result, &pCreateInfos[i], &pSwapchains[i], surface_state, old_swapchain_state);
3570 }
3571 }
3572}
3573
3574void ValidationStateTracker::RecordAcquireNextImageState(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
3575 VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex) {
3576 auto pFence = GetFenceState(fence);
3577 if (pFence && pFence->scope == kSyncScopeInternal) {
3578 // Treat as inflight since it is valid to wait on this fence, even in cases where it is technically a temporary
3579 // import
3580 pFence->state = FENCE_INFLIGHT;
3581 pFence->signaler.first = VK_NULL_HANDLE; // ANI isn't on a queue, so this can't participate in a completion proof.
3582 }
3583
3584 auto pSemaphore = GetSemaphoreState(semaphore);
3585 if (pSemaphore && pSemaphore->scope == kSyncScopeInternal) {
3586 // Treat as signaled since it is valid to wait on this semaphore, even in cases where it is technically a
3587 // temporary import
3588 pSemaphore->signaled = true;
3589 pSemaphore->signaler.first = VK_NULL_HANDLE;
3590 }
3591
3592 // Mark the image as acquired.
3593 auto swapchain_data = GetSwapchainState(swapchain);
3594 if (swapchain_data && (swapchain_data->images.size() > *pImageIndex)) {
3595 auto image = swapchain_data->images[*pImageIndex];
3596 auto image_state = GetImageState(image);
3597 if (image_state) {
3598 image_state->acquired = true;
3599 image_state->shared_presentable = swapchain_data->shared_presentable;
3600 }
3601 }
3602}
3603
3604void ValidationStateTracker::PostCallRecordAcquireNextImageKHR(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
3605 VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex,
3606 VkResult result) {
3607 if ((VK_SUCCESS != result) && (VK_SUBOPTIMAL_KHR != result)) return;
3608 RecordAcquireNextImageState(device, swapchain, timeout, semaphore, fence, pImageIndex);
3609}
3610
3611void ValidationStateTracker::PostCallRecordAcquireNextImage2KHR(VkDevice device, const VkAcquireNextImageInfoKHR *pAcquireInfo,
3612 uint32_t *pImageIndex, VkResult result) {
3613 if ((VK_SUCCESS != result) && (VK_SUBOPTIMAL_KHR != result)) return;
3614 RecordAcquireNextImageState(device, pAcquireInfo->swapchain, pAcquireInfo->timeout, pAcquireInfo->semaphore,
3615 pAcquireInfo->fence, pImageIndex);
3616}
3617
3618void ValidationStateTracker::PostCallRecordEnumeratePhysicalDevices(VkInstance instance, uint32_t *pPhysicalDeviceCount,
3619 VkPhysicalDevice *pPhysicalDevices, VkResult result) {
3620 if ((NULL != pPhysicalDevices) && ((result == VK_SUCCESS || result == VK_INCOMPLETE))) {
3621 for (uint32_t i = 0; i < *pPhysicalDeviceCount; i++) {
3622 auto &phys_device_state = physical_device_map[pPhysicalDevices[i]];
3623 phys_device_state.phys_device = pPhysicalDevices[i];
3624 // Init actual features for each physical device
3625 DispatchGetPhysicalDeviceFeatures(pPhysicalDevices[i], &phys_device_state.features2.features);
3626 }
3627 }
3628}
3629
3630// Common function to update state for GetPhysicalDeviceQueueFamilyProperties & 2KHR version
3631static void StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(PHYSICAL_DEVICE_STATE *pd_state, uint32_t count,
3632 VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
3633 pd_state->queue_family_known_count = std::max(pd_state->queue_family_known_count, count);
3634
3635 if (!pQueueFamilyProperties) {
3636 if (UNCALLED == pd_state->vkGetPhysicalDeviceQueueFamilyPropertiesState)
3637 pd_state->vkGetPhysicalDeviceQueueFamilyPropertiesState = QUERY_COUNT;
3638 } else { // Save queue family properties
3639 pd_state->vkGetPhysicalDeviceQueueFamilyPropertiesState = QUERY_DETAILS;
3640
3641 pd_state->queue_family_properties.resize(std::max(static_cast<uint32_t>(pd_state->queue_family_properties.size()), count));
3642 for (uint32_t i = 0; i < count; ++i) {
3643 pd_state->queue_family_properties[i] = pQueueFamilyProperties[i].queueFamilyProperties;
3644 }
3645 }
3646}
3647
3648void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice,
3649 uint32_t *pQueueFamilyPropertyCount,
3650 VkQueueFamilyProperties *pQueueFamilyProperties) {
3651 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
3652 assert(physical_device_state);
3653 VkQueueFamilyProperties2KHR *pqfp = nullptr;
3654 std::vector<VkQueueFamilyProperties2KHR> qfp;
3655 qfp.resize(*pQueueFamilyPropertyCount);
3656 if (pQueueFamilyProperties) {
3657 for (uint32_t i = 0; i < *pQueueFamilyPropertyCount; ++i) {
3658 qfp[i].sType = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2_KHR;
3659 qfp[i].pNext = nullptr;
3660 qfp[i].queueFamilyProperties = pQueueFamilyProperties[i];
3661 }
3662 pqfp = qfp.data();
3663 }
3664 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount, pqfp);
3665}
3666
3667void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties2(
3668 VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
3669 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
3670 assert(physical_device_state);
3671 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount,
3672 pQueueFamilyProperties);
3673}
3674
3675void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties2KHR(
3676 VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
3677 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
3678 assert(physical_device_state);
3679 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount,
3680 pQueueFamilyProperties);
3681}
3682void ValidationStateTracker::PreCallRecordDestroySurfaceKHR(VkInstance instance, VkSurfaceKHR surface,
3683 const VkAllocationCallbacks *pAllocator) {
3684 surface_map.erase(surface);
3685}
3686
3687void ValidationStateTracker::RecordVulkanSurface(VkSurfaceKHR *pSurface) {
3688 surface_map[*pSurface] = std::unique_ptr<SURFACE_STATE>(new SURFACE_STATE{*pSurface});
3689}
3690
3691void ValidationStateTracker::PostCallRecordCreateDisplayPlaneSurfaceKHR(VkInstance instance,
3692 const VkDisplaySurfaceCreateInfoKHR *pCreateInfo,
3693 const VkAllocationCallbacks *pAllocator,
3694 VkSurfaceKHR *pSurface, VkResult result) {
3695 if (VK_SUCCESS != result) return;
3696 RecordVulkanSurface(pSurface);
3697}
3698
3699#ifdef VK_USE_PLATFORM_ANDROID_KHR
3700void ValidationStateTracker::PostCallRecordCreateAndroidSurfaceKHR(VkInstance instance,
3701 const VkAndroidSurfaceCreateInfoKHR *pCreateInfo,
3702 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
3703 VkResult result) {
3704 if (VK_SUCCESS != result) return;
3705 RecordVulkanSurface(pSurface);
3706}
3707#endif // VK_USE_PLATFORM_ANDROID_KHR
3708
3709#ifdef VK_USE_PLATFORM_IOS_MVK
3710void ValidationStateTracker::PostCallRecordCreateIOSSurfaceMVK(VkInstance instance, const VkIOSSurfaceCreateInfoMVK *pCreateInfo,
3711 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
3712 VkResult result) {
3713 if (VK_SUCCESS != result) return;
3714 RecordVulkanSurface(pSurface);
3715}
3716#endif // VK_USE_PLATFORM_IOS_MVK
3717
3718#ifdef VK_USE_PLATFORM_MACOS_MVK
3719void ValidationStateTracker::PostCallRecordCreateMacOSSurfaceMVK(VkInstance instance,
3720 const VkMacOSSurfaceCreateInfoMVK *pCreateInfo,
3721 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
3722 VkResult result) {
3723 if (VK_SUCCESS != result) return;
3724 RecordVulkanSurface(pSurface);
3725}
3726#endif // VK_USE_PLATFORM_MACOS_MVK
3727
3728#ifdef VK_USE_PLATFORM_WAYLAND_KHR
3729void ValidationStateTracker::PostCallRecordCreateWaylandSurfaceKHR(VkInstance instance,
3730 const VkWaylandSurfaceCreateInfoKHR *pCreateInfo,
3731 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
3732 VkResult result) {
3733 if (VK_SUCCESS != result) return;
3734 RecordVulkanSurface(pSurface);
3735}
3736#endif // VK_USE_PLATFORM_WAYLAND_KHR
3737
3738#ifdef VK_USE_PLATFORM_WIN32_KHR
3739void ValidationStateTracker::PostCallRecordCreateWin32SurfaceKHR(VkInstance instance,
3740 const VkWin32SurfaceCreateInfoKHR *pCreateInfo,
3741 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
3742 VkResult result) {
3743 if (VK_SUCCESS != result) return;
3744 RecordVulkanSurface(pSurface);
3745}
3746#endif // VK_USE_PLATFORM_WIN32_KHR
3747
3748#ifdef VK_USE_PLATFORM_XCB_KHR
3749void ValidationStateTracker::PostCallRecordCreateXcbSurfaceKHR(VkInstance instance, const VkXcbSurfaceCreateInfoKHR *pCreateInfo,
3750 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
3751 VkResult result) {
3752 if (VK_SUCCESS != result) return;
3753 RecordVulkanSurface(pSurface);
3754}
3755#endif // VK_USE_PLATFORM_XCB_KHR
3756
3757#ifdef VK_USE_PLATFORM_XLIB_KHR
3758void ValidationStateTracker::PostCallRecordCreateXlibSurfaceKHR(VkInstance instance, const VkXlibSurfaceCreateInfoKHR *pCreateInfo,
3759 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
3760 VkResult result) {
3761 if (VK_SUCCESS != result) return;
3762 RecordVulkanSurface(pSurface);
3763}
3764#endif // VK_USE_PLATFORM_XLIB_KHR
3765
3766void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice physicalDevice,
3767 VkSurfaceKHR surface,
3768 VkSurfaceCapabilitiesKHR *pSurfaceCapabilities,
3769 VkResult result) {
3770 if (VK_SUCCESS != result) return;
3771 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
3772 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHRState = QUERY_DETAILS;
3773 physical_device_state->surfaceCapabilities = *pSurfaceCapabilities;
3774}
3775
3776void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilities2KHR(
3777 VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
3778 VkSurfaceCapabilities2KHR *pSurfaceCapabilities, VkResult result) {
3779 if (VK_SUCCESS != result) return;
3780 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
3781 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHRState = QUERY_DETAILS;
3782 physical_device_state->surfaceCapabilities = pSurfaceCapabilities->surfaceCapabilities;
3783}
3784
3785void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilities2EXT(VkPhysicalDevice physicalDevice,
3786 VkSurfaceKHR surface,
3787 VkSurfaceCapabilities2EXT *pSurfaceCapabilities,
3788 VkResult result) {
3789 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
3790 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHRState = QUERY_DETAILS;
3791 physical_device_state->surfaceCapabilities.minImageCount = pSurfaceCapabilities->minImageCount;
3792 physical_device_state->surfaceCapabilities.maxImageCount = pSurfaceCapabilities->maxImageCount;
3793 physical_device_state->surfaceCapabilities.currentExtent = pSurfaceCapabilities->currentExtent;
3794 physical_device_state->surfaceCapabilities.minImageExtent = pSurfaceCapabilities->minImageExtent;
3795 physical_device_state->surfaceCapabilities.maxImageExtent = pSurfaceCapabilities->maxImageExtent;
3796 physical_device_state->surfaceCapabilities.maxImageArrayLayers = pSurfaceCapabilities->maxImageArrayLayers;
3797 physical_device_state->surfaceCapabilities.supportedTransforms = pSurfaceCapabilities->supportedTransforms;
3798 physical_device_state->surfaceCapabilities.currentTransform = pSurfaceCapabilities->currentTransform;
3799 physical_device_state->surfaceCapabilities.supportedCompositeAlpha = pSurfaceCapabilities->supportedCompositeAlpha;
3800 physical_device_state->surfaceCapabilities.supportedUsageFlags = pSurfaceCapabilities->supportedUsageFlags;
3801}
3802
3803void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice,
3804 uint32_t queueFamilyIndex, VkSurfaceKHR surface,
3805 VkBool32 *pSupported, VkResult result) {
3806 if (VK_SUCCESS != result) return;
3807 auto surface_state = GetSurfaceState(surface);
3808 surface_state->gpu_queue_support[{physicalDevice, queueFamilyIndex}] = (*pSupported == VK_TRUE);
3809}
3810
3811void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice physicalDevice,
3812 VkSurfaceKHR surface,
3813 uint32_t *pPresentModeCount,
3814 VkPresentModeKHR *pPresentModes,
3815 VkResult result) {
3816 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
3817
3818 // TODO: This isn't quite right -- available modes may differ by surface AND physical device.
3819 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
3820 auto &call_state = physical_device_state->vkGetPhysicalDeviceSurfacePresentModesKHRState;
3821
3822 if (*pPresentModeCount) {
3823 if (call_state < QUERY_COUNT) call_state = QUERY_COUNT;
3824 if (*pPresentModeCount > physical_device_state->present_modes.size())
3825 physical_device_state->present_modes.resize(*pPresentModeCount);
3826 }
3827 if (pPresentModes) {
3828 if (call_state < QUERY_DETAILS) call_state = QUERY_DETAILS;
3829 for (uint32_t i = 0; i < *pPresentModeCount; i++) {
3830 physical_device_state->present_modes[i] = pPresentModes[i];
3831 }
3832 }
3833}
3834
3835void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
3836 uint32_t *pSurfaceFormatCount,
3837 VkSurfaceFormatKHR *pSurfaceFormats,
3838 VkResult result) {
3839 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
3840
3841 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
3842 auto &call_state = physical_device_state->vkGetPhysicalDeviceSurfaceFormatsKHRState;
3843
3844 if (*pSurfaceFormatCount) {
3845 if (call_state < QUERY_COUNT) call_state = QUERY_COUNT;
3846 if (*pSurfaceFormatCount > physical_device_state->surface_formats.size())
3847 physical_device_state->surface_formats.resize(*pSurfaceFormatCount);
3848 }
3849 if (pSurfaceFormats) {
3850 if (call_state < QUERY_DETAILS) call_state = QUERY_DETAILS;
3851 for (uint32_t i = 0; i < *pSurfaceFormatCount; i++) {
3852 physical_device_state->surface_formats[i] = pSurfaceFormats[i];
3853 }
3854 }
3855}
3856
3857void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceFormats2KHR(VkPhysicalDevice physicalDevice,
3858 const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
3859 uint32_t *pSurfaceFormatCount,
3860 VkSurfaceFormat2KHR *pSurfaceFormats,
3861 VkResult result) {
3862 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
3863
3864 auto physicalDeviceState = GetPhysicalDeviceState(physicalDevice);
3865 if (*pSurfaceFormatCount) {
3866 if (physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState < QUERY_COUNT) {
3867 physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState = QUERY_COUNT;
3868 }
3869 if (*pSurfaceFormatCount > physicalDeviceState->surface_formats.size())
3870 physicalDeviceState->surface_formats.resize(*pSurfaceFormatCount);
3871 }
3872 if (pSurfaceFormats) {
3873 if (physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState < QUERY_DETAILS) {
3874 physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState = QUERY_DETAILS;
3875 }
3876 for (uint32_t i = 0; i < *pSurfaceFormatCount; i++) {
3877 physicalDeviceState->surface_formats[i] = pSurfaceFormats[i].surfaceFormat;
3878 }
3879 }
3880}
3881
3882void ValidationStateTracker::PreCallRecordCmdBeginDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,
3883 const VkDebugUtilsLabelEXT *pLabelInfo) {
3884 BeginCmdDebugUtilsLabel(report_data, commandBuffer, pLabelInfo);
3885}
3886
3887void ValidationStateTracker::PostCallRecordCmdEndDebugUtilsLabelEXT(VkCommandBuffer commandBuffer) {
3888 EndCmdDebugUtilsLabel(report_data, commandBuffer);
3889}
3890
3891void ValidationStateTracker::PreCallRecordCmdInsertDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,
3892 const VkDebugUtilsLabelEXT *pLabelInfo) {
3893 InsertCmdDebugUtilsLabel(report_data, commandBuffer, pLabelInfo);
3894
3895 // Squirrel away an easily accessible copy.
3896 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3897 cb_state->debug_label = LoggingLabel(pLabelInfo);
3898}
3899
3900void ValidationStateTracker::RecordEnumeratePhysicalDeviceGroupsState(
3901 uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHR *pPhysicalDeviceGroupProperties) {
3902 if (NULL != pPhysicalDeviceGroupProperties) {
3903 for (uint32_t i = 0; i < *pPhysicalDeviceGroupCount; i++) {
3904 for (uint32_t j = 0; j < pPhysicalDeviceGroupProperties[i].physicalDeviceCount; j++) {
3905 VkPhysicalDevice cur_phys_dev = pPhysicalDeviceGroupProperties[i].physicalDevices[j];
3906 auto &phys_device_state = physical_device_map[cur_phys_dev];
3907 phys_device_state.phys_device = cur_phys_dev;
3908 // Init actual features for each physical device
3909 DispatchGetPhysicalDeviceFeatures(cur_phys_dev, &phys_device_state.features2.features);
3910 }
3911 }
3912 }
3913}
3914
3915void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceGroups(
3916 VkInstance instance, uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHR *pPhysicalDeviceGroupProperties,
3917 VkResult result) {
3918 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
3919 RecordEnumeratePhysicalDeviceGroupsState(pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
3920}
3921
3922void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceGroupsKHR(
3923 VkInstance instance, uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHR *pPhysicalDeviceGroupProperties,
3924 VkResult result) {
3925 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
3926 RecordEnumeratePhysicalDeviceGroupsState(pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
3927}
3928
3929void ValidationStateTracker::PreCallRecordDestroyDescriptorUpdateTemplate(VkDevice device,
3930 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
3931 const VkAllocationCallbacks *pAllocator) {
3932 if (!descriptorUpdateTemplate) return;
3933 desc_template_map.erase(descriptorUpdateTemplate);
3934}
3935
3936void ValidationStateTracker::PreCallRecordDestroyDescriptorUpdateTemplateKHR(VkDevice device,
3937 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
3938 const VkAllocationCallbacks *pAllocator) {
3939 if (!descriptorUpdateTemplate) return;
3940 desc_template_map.erase(descriptorUpdateTemplate);
3941}
3942
3943void ValidationStateTracker::RecordCreateDescriptorUpdateTemplateState(const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo,
3944 VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate) {
3945 safe_VkDescriptorUpdateTemplateCreateInfo local_create_info(pCreateInfo);
3946 std::unique_ptr<TEMPLATE_STATE> template_state(new TEMPLATE_STATE(*pDescriptorUpdateTemplate, &local_create_info));
3947 desc_template_map[*pDescriptorUpdateTemplate] = std::move(template_state);
3948}
3949
3950void ValidationStateTracker::PostCallRecordCreateDescriptorUpdateTemplate(
3951 VkDevice device, const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator,
3952 VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate, VkResult result) {
3953 if (VK_SUCCESS != result) return;
3954 RecordCreateDescriptorUpdateTemplateState(pCreateInfo, pDescriptorUpdateTemplate);
3955}
3956
3957void ValidationStateTracker::PostCallRecordCreateDescriptorUpdateTemplateKHR(
3958 VkDevice device, const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator,
3959 VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate, VkResult result) {
3960 if (VK_SUCCESS != result) return;
3961 RecordCreateDescriptorUpdateTemplateState(pCreateInfo, pDescriptorUpdateTemplate);
3962}
3963
3964void ValidationStateTracker::RecordUpdateDescriptorSetWithTemplateState(VkDescriptorSet descriptorSet,
3965 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
3966 const void *pData) {
3967 auto const template_map_entry = desc_template_map.find(descriptorUpdateTemplate);
3968 if ((template_map_entry == desc_template_map.end()) || (template_map_entry->second.get() == nullptr)) {
3969 assert(0);
3970 } else {
3971 const TEMPLATE_STATE *template_state = template_map_entry->second.get();
3972 // TODO: Record template push descriptor updates
3973 if (template_state->create_info.templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET) {
3974 PerformUpdateDescriptorSetsWithTemplateKHR(descriptorSet, template_state, pData);
3975 }
3976 }
3977}
3978
3979void ValidationStateTracker::PreCallRecordUpdateDescriptorSetWithTemplate(VkDevice device, VkDescriptorSet descriptorSet,
3980 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
3981 const void *pData) {
3982 RecordUpdateDescriptorSetWithTemplateState(descriptorSet, descriptorUpdateTemplate, pData);
3983}
3984
3985void ValidationStateTracker::PreCallRecordUpdateDescriptorSetWithTemplateKHR(VkDevice device, VkDescriptorSet descriptorSet,
3986 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
3987 const void *pData) {
3988 RecordUpdateDescriptorSetWithTemplateState(descriptorSet, descriptorUpdateTemplate, pData);
3989}
3990
3991void ValidationStateTracker::PreCallRecordCmdPushDescriptorSetWithTemplateKHR(
3992 VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set,
3993 const void *pData) {
3994 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3995
3996 const auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
3997 if (template_state) {
3998 auto layout_data = GetPipelineLayout(layout);
3999 auto dsl = GetDslFromPipelineLayout(layout_data, set);
4000 const auto &template_ci = template_state->create_info;
4001 if (dsl && !dsl->IsDestroyed()) {
4002 // Decode the template into a set of write updates
4003 cvdescriptorset::DecodedTemplateUpdate decoded_template(this, VK_NULL_HANDLE, template_state, pData,
4004 dsl->GetDescriptorSetLayout());
4005 RecordCmdPushDescriptorSetState(cb_state, template_ci.pipelineBindPoint, layout, set,
4006 static_cast<uint32_t>(decoded_template.desc_writes.size()),
4007 decoded_template.desc_writes.data());
4008 }
4009 }
4010}
4011
4012void ValidationStateTracker::RecordGetPhysicalDeviceDisplayPlanePropertiesState(VkPhysicalDevice physicalDevice,
4013 uint32_t *pPropertyCount, void *pProperties) {
4014 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4015 if (*pPropertyCount) {
4016 if (physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState < QUERY_COUNT) {
4017 physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState = QUERY_COUNT;
4018 }
4019 physical_device_state->display_plane_property_count = *pPropertyCount;
4020 }
4021 if (pProperties) {
4022 if (physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState < QUERY_DETAILS) {
4023 physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState = QUERY_DETAILS;
4024 }
4025 }
4026}
4027
4028void ValidationStateTracker::PostCallRecordGetPhysicalDeviceDisplayPlanePropertiesKHR(VkPhysicalDevice physicalDevice,
4029 uint32_t *pPropertyCount,
4030 VkDisplayPlanePropertiesKHR *pProperties,
4031 VkResult result) {
4032 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4033 RecordGetPhysicalDeviceDisplayPlanePropertiesState(physicalDevice, pPropertyCount, pProperties);
4034}
4035
4036void ValidationStateTracker::PostCallRecordGetPhysicalDeviceDisplayPlaneProperties2KHR(VkPhysicalDevice physicalDevice,
4037 uint32_t *pPropertyCount,
4038 VkDisplayPlaneProperties2KHR *pProperties,
4039 VkResult result) {
4040 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4041 RecordGetPhysicalDeviceDisplayPlanePropertiesState(physicalDevice, pPropertyCount, pProperties);
4042}
4043
4044void ValidationStateTracker::PostCallRecordCmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
4045 uint32_t query, VkQueryControlFlags flags, uint32_t index) {
4046 QueryObject query_obj = {queryPool, query, index};
4047 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4048 RecordCmdBeginQuery(cb_state, query_obj);
4049}
4050
4051void ValidationStateTracker::PostCallRecordCmdEndQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
4052 uint32_t query, uint32_t index) {
4053 QueryObject query_obj = {queryPool, query, index};
4054 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4055 RecordCmdEndQuery(cb_state, query_obj);
4056}
4057
4058void ValidationStateTracker::RecordCreateSamplerYcbcrConversionState(const VkSamplerYcbcrConversionCreateInfo *create_info,
4059 VkSamplerYcbcrConversion ycbcr_conversion) {
4060 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
4061 RecordCreateSamplerYcbcrConversionANDROID(create_info, ycbcr_conversion);
4062 }
4063}
4064
4065void ValidationStateTracker::PostCallRecordCreateSamplerYcbcrConversion(VkDevice device,
4066 const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
4067 const VkAllocationCallbacks *pAllocator,
4068 VkSamplerYcbcrConversion *pYcbcrConversion,
4069 VkResult result) {
4070 if (VK_SUCCESS != result) return;
4071 RecordCreateSamplerYcbcrConversionState(pCreateInfo, *pYcbcrConversion);
4072}
4073
4074void ValidationStateTracker::PostCallRecordCreateSamplerYcbcrConversionKHR(VkDevice device,
4075 const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
4076 const VkAllocationCallbacks *pAllocator,
4077 VkSamplerYcbcrConversion *pYcbcrConversion,
4078 VkResult result) {
4079 if (VK_SUCCESS != result) return;
4080 RecordCreateSamplerYcbcrConversionState(pCreateInfo, *pYcbcrConversion);
4081}
4082
4083void ValidationStateTracker::PostCallRecordDestroySamplerYcbcrConversion(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion,
4084 const VkAllocationCallbacks *pAllocator) {
4085 if (!ycbcrConversion) return;
4086 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
4087 RecordDestroySamplerYcbcrConversionANDROID(ycbcrConversion);
4088 }
4089}
4090
4091void ValidationStateTracker::PostCallRecordDestroySamplerYcbcrConversionKHR(VkDevice device,
4092 VkSamplerYcbcrConversion ycbcrConversion,
4093 const VkAllocationCallbacks *pAllocator) {
4094 if (!ycbcrConversion) return;
4095 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
4096 RecordDestroySamplerYcbcrConversionANDROID(ycbcrConversion);
4097 }
4098}
4099
4100void ValidationStateTracker::PostCallRecordResetQueryPoolEXT(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
4101 uint32_t queryCount) {
4102 // Do nothing if the feature is not enabled.
4103 if (!enabled_features.host_query_reset_features.hostQueryReset) return;
4104
4105 // Do nothing if the query pool has been destroyed.
4106 auto query_pool_state = GetQueryPoolState(queryPool);
4107 if (!query_pool_state) return;
4108
4109 // Reset the state of existing entries.
4110 QueryObject query_obj{queryPool, 0};
4111 const uint32_t max_query_count = std::min(queryCount, query_pool_state->createInfo.queryCount - firstQuery);
4112 for (uint32_t i = 0; i < max_query_count; ++i) {
4113 query_obj.query = firstQuery + i;
4114 auto query_it = queryToStateMap.find(query_obj);
4115 if (query_it != queryToStateMap.end()) query_it->second = QUERYSTATE_RESET;
4116 }
4117}
4118
4119void ValidationStateTracker::PerformUpdateDescriptorSetsWithTemplateKHR(VkDescriptorSet descriptorSet,
4120 const TEMPLATE_STATE *template_state, const void *pData) {
4121 // Translate the templated update into a normal update for validation...
4122 cvdescriptorset::DecodedTemplateUpdate decoded_update(this, descriptorSet, template_state, pData);
4123 cvdescriptorset::PerformUpdateDescriptorSets(this, static_cast<uint32_t>(decoded_update.desc_writes.size()),
4124 decoded_update.desc_writes.data(), 0, NULL);
4125}
4126
4127// Update the common AllocateDescriptorSetsData
4128void ValidationStateTracker::UpdateAllocateDescriptorSetsData(const VkDescriptorSetAllocateInfo *p_alloc_info,
4129 cvdescriptorset::AllocateDescriptorSetsData *ds_data) {
4130 for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
4131 auto layout = GetDescriptorSetLayout(this, p_alloc_info->pSetLayouts[i]);
4132 if (layout) {
4133 ds_data->layout_nodes[i] = layout;
4134 // Count total descriptors required per type
4135 for (uint32_t j = 0; j < layout->GetBindingCount(); ++j) {
4136 const auto &binding_layout = layout->GetDescriptorSetLayoutBindingPtrFromIndex(j);
4137 uint32_t typeIndex = static_cast<uint32_t>(binding_layout->descriptorType);
4138 ds_data->required_descriptors_by_type[typeIndex] += binding_layout->descriptorCount;
4139 }
4140 }
4141 // Any unknown layouts will be flagged as errors during ValidateAllocateDescriptorSets() call
4142 }
4143}
4144
4145// Decrement allocated sets from the pool and insert new sets into set_map
4146void ValidationStateTracker::PerformAllocateDescriptorSets(const VkDescriptorSetAllocateInfo *p_alloc_info,
4147 const VkDescriptorSet *descriptor_sets,
4148 const cvdescriptorset::AllocateDescriptorSetsData *ds_data) {
4149 auto pool_state = descriptorPoolMap[p_alloc_info->descriptorPool].get();
4150 // Account for sets and individual descriptors allocated from pool
4151 pool_state->availableSets -= p_alloc_info->descriptorSetCount;
4152 for (auto it = ds_data->required_descriptors_by_type.begin(); it != ds_data->required_descriptors_by_type.end(); ++it) {
4153 pool_state->availableDescriptorTypeCount[it->first] -= ds_data->required_descriptors_by_type.at(it->first);
4154 }
4155
4156 const auto *variable_count_info = lvl_find_in_chain<VkDescriptorSetVariableDescriptorCountAllocateInfoEXT>(p_alloc_info->pNext);
4157 bool variable_count_valid = variable_count_info && variable_count_info->descriptorSetCount == p_alloc_info->descriptorSetCount;
4158
4159 // Create tracking object for each descriptor set; insert into global map and the pool's set.
4160 for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
4161 uint32_t variable_count = variable_count_valid ? variable_count_info->pDescriptorCounts[i] : 0;
4162
4163 std::unique_ptr<cvdescriptorset::DescriptorSet> new_ds(new cvdescriptorset::DescriptorSet(
4164 descriptor_sets[i], p_alloc_info->descriptorPool, ds_data->layout_nodes[i], variable_count, this));
4165 pool_state->sets.insert(new_ds.get());
4166 new_ds->in_use.store(0);
4167 setMap[descriptor_sets[i]] = std::move(new_ds);
4168 }
4169}
4170
4171// Generic function to handle state update for all CmdDraw* and CmdDispatch* type functions
4172void ValidationStateTracker::UpdateStateCmdDrawDispatchType(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint bind_point) {
4173 UpdateDrawState(cb_state, bind_point);
4174 cb_state->hasDispatchCmd = true;
4175}
4176
4177static inline void UpdateResourceTrackingOnDraw(CMD_BUFFER_STATE *pCB) {
4178 pCB->cb_vertex_buffer_binding_info.push_back(pCB->current_vertex_buffer_binding_info);
4179}
4180
4181// Generic function to handle state update for all CmdDraw* type functions
4182void ValidationStateTracker::UpdateStateCmdDrawType(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint bind_point) {
4183 UpdateStateCmdDrawDispatchType(cb_state, bind_point);
4184 UpdateResourceTrackingOnDraw(cb_state);
4185 cb_state->hasDrawCmd = true;
4186}
4187
4188void ValidationStateTracker::PostCallRecordCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount,
4189 uint32_t firstVertex, uint32_t firstInstance) {
4190 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4191 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4192}
4193
4194void ValidationStateTracker::PostCallRecordCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount,
4195 uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset,
4196 uint32_t firstInstance) {
4197 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4198 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4199}
4200
4201void ValidationStateTracker::PostCallRecordCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
4202 uint32_t count, uint32_t stride) {
4203 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4204 BUFFER_STATE *buffer_state = GetBufferState(buffer);
4205 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4206 AddCommandBufferBindingBuffer(cb_state, buffer_state);
4207}
4208
4209void ValidationStateTracker::PostCallRecordCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
4210 VkDeviceSize offset, uint32_t count, uint32_t stride) {
4211 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4212 BUFFER_STATE *buffer_state = GetBufferState(buffer);
4213 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4214 AddCommandBufferBindingBuffer(cb_state, buffer_state);
4215}
4216
4217void ValidationStateTracker::PostCallRecordCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z) {
4218 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4219 UpdateStateCmdDrawDispatchType(cb_state, VK_PIPELINE_BIND_POINT_COMPUTE);
4220}
4221
4222void ValidationStateTracker::PostCallRecordCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
4223 VkDeviceSize offset) {
4224 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4225 UpdateStateCmdDrawDispatchType(cb_state, VK_PIPELINE_BIND_POINT_COMPUTE);
4226 BUFFER_STATE *buffer_state = GetBufferState(buffer);
4227 AddCommandBufferBindingBuffer(cb_state, buffer_state);
4228}
4229
4230void ValidationStateTracker::PreCallRecordCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer,
4231 VkDeviceSize offset, VkBuffer countBuffer,
4232 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
4233 uint32_t stride) {
4234 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4235 BUFFER_STATE *buffer_state = GetBufferState(buffer);
4236 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
4237 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4238 AddCommandBufferBindingBuffer(cb_state, buffer_state);
4239 AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
4240}
4241
4242void ValidationStateTracker::PreCallRecordCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer,
4243 VkDeviceSize offset, VkBuffer countBuffer,
4244 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
4245 uint32_t stride) {
4246 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4247 BUFFER_STATE *buffer_state = GetBufferState(buffer);
4248 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
4249 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4250 AddCommandBufferBindingBuffer(cb_state, buffer_state);
4251 AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
4252}
4253
4254void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksNV(VkCommandBuffer commandBuffer, uint32_t taskCount,
4255 uint32_t firstTask) {
4256 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4257 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4258}
4259
4260void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksIndirectNV(VkCommandBuffer commandBuffer, VkBuffer buffer,
4261 VkDeviceSize offset, uint32_t drawCount, uint32_t stride) {
4262 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4263 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4264 BUFFER_STATE *buffer_state = GetBufferState(buffer);
4265 if (buffer_state) {
4266 AddCommandBufferBindingBuffer(cb_state, buffer_state);
4267 }
4268}
4269
4270void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksIndirectCountNV(VkCommandBuffer commandBuffer, VkBuffer buffer,
4271 VkDeviceSize offset, VkBuffer countBuffer,
4272 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
4273 uint32_t stride) {
4274 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4275 BUFFER_STATE *buffer_state = GetBufferState(buffer);
4276 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
4277 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4278 if (buffer_state) {
4279 AddCommandBufferBindingBuffer(cb_state, buffer_state);
4280 }
4281 if (count_buffer_state) {
4282 AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
4283 }
4284}
4285
4286void ValidationStateTracker::PostCallRecordCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo *pCreateInfo,
4287 const VkAllocationCallbacks *pAllocator,
4288 VkShaderModule *pShaderModule, VkResult result,
4289 void *csm_state_data) {
4290 if (VK_SUCCESS != result) return;
4291 create_shader_module_api_state *csm_state = reinterpret_cast<create_shader_module_api_state *>(csm_state_data);
4292
4293 spv_target_env spirv_environment = ((api_version >= VK_API_VERSION_1_1) ? SPV_ENV_VULKAN_1_1 : SPV_ENV_VULKAN_1_0);
4294 bool is_spirv = (pCreateInfo->pCode[0] == spv::MagicNumber);
4295 std::unique_ptr<SHADER_MODULE_STATE> new_shader_module(
4296 is_spirv ? new SHADER_MODULE_STATE(pCreateInfo, *pShaderModule, spirv_environment, csm_state->unique_shader_id)
4297 : new SHADER_MODULE_STATE());
4298 shaderModuleMap[*pShaderModule] = std::move(new_shader_module);
4299}
4300
4301void ValidationStateTracker::RecordPipelineShaderStage(VkPipelineShaderStageCreateInfo const *pStage, PIPELINE_STATE *pipeline,
4302 PIPELINE_STATE::StageState *stage_state) {
4303 // Validation shouldn't rely on anything in stage state being valid if the spirv isn't
4304 auto module = GetShaderModuleState(pStage->module);
4305 if (!module->has_valid_spirv) return;
4306
4307 // Validation shouldn't rely on anything in stage state being valid if the entrypoint isn't present
4308 auto entrypoint = FindEntrypoint(module, pStage->pName, pStage->stage);
4309 if (entrypoint == module->end()) return;
4310
4311 // Mark accessible ids
4312 stage_state->accessible_ids = MarkAccessibleIds(module, entrypoint);
4313 ProcessExecutionModes(module, entrypoint, pipeline);
4314
4315 stage_state->descriptor_uses =
4316 CollectInterfaceByDescriptorSlot(report_data, module, stage_state->accessible_ids, &stage_state->has_writable_descriptor);
4317 // Capture descriptor uses for the pipeline
4318 for (auto use : stage_state->descriptor_uses) {
4319 // While validating shaders capture which slots are used by the pipeline
4320 auto &reqs = pipeline->active_slots[use.first.first][use.first.second];
4321 reqs = descriptor_req(reqs | DescriptorTypeToReqs(module, use.second.type_id));
4322 }
4323}
4324
4325void ValidationStateTracker::ResetCommandBufferPushConstantDataIfIncompatible(CMD_BUFFER_STATE *cb_state, VkPipelineLayout layout) {
4326 if (cb_state == nullptr) {
4327 return;
4328 }
4329
4330 const PIPELINE_LAYOUT_STATE *pipeline_layout_state = GetPipelineLayout(layout);
4331 if (pipeline_layout_state == nullptr) {
4332 return;
4333 }
4334
4335 if (cb_state->push_constant_data_ranges != pipeline_layout_state->push_constant_ranges) {
4336 cb_state->push_constant_data_ranges = pipeline_layout_state->push_constant_ranges;
4337 cb_state->push_constant_data.clear();
4338 uint32_t size_needed = 0;
4339 for (auto push_constant_range : *cb_state->push_constant_data_ranges) {
4340 size_needed = std::max(size_needed, (push_constant_range.offset + push_constant_range.size));
4341 }
4342 cb_state->push_constant_data.resize(size_needed, 0);
4343 }
4344}