blob: 377077d71f17d5364aef20f040b5b5733af0a514 [file] [log] [blame]
locke-lunargd556cc32019-09-17 01:21:23 -06001/* Copyright (c) 2015-2019 The Khronos Group Inc.
2 * Copyright (c) 2015-2019 Valve Corporation
3 * Copyright (c) 2015-2019 LunarG, Inc.
4 * Copyright (C) 2015-2019 Google Inc.
5 *
6 * Licensed under the Apache License, Version 2.0 (the "License");
7 * you may not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
9 *
10 * http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 *
18 * Author: Mark Lobodzinski <mark@lunarg.com>
19 * Author: Dave Houlton <daveh@lunarg.com>
20 * Shannon McPherson <shannon@lunarg.com>
21 */
22
23// Allow use of STL min and max functions in Windows
24#define NOMINMAX
25
26#include <cmath>
27#include <set>
28#include <sstream>
29#include <string>
30
31#include "vk_enum_string_helper.h"
32#include "vk_format_utils.h"
33#include "vk_layer_data.h"
34#include "vk_layer_utils.h"
35#include "vk_layer_logging.h"
36#include "vk_typemap_helper.h"
37
38#include "chassis.h"
39#include "state_tracker.h"
40#include "shader_validation.h"
41
42using std::max;
43using std::string;
44using std::stringstream;
45using std::unique_ptr;
46using std::unordered_map;
47using std::unordered_set;
48using std::vector;
49
50#ifdef VK_USE_PLATFORM_ANDROID_KHR
51// Android-specific validation that uses types defined only with VK_USE_PLATFORM_ANDROID_KHR
52// This could also move into a seperate core_validation_android.cpp file... ?
53
54void ValidationStateTracker::RecordCreateImageANDROID(const VkImageCreateInfo *create_info, IMAGE_STATE *is_node) {
55 const VkExternalMemoryImageCreateInfo *emici = lvl_find_in_chain<VkExternalMemoryImageCreateInfo>(create_info->pNext);
56 if (emici && (emici->handleTypes & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID)) {
57 is_node->imported_ahb = true;
58 }
59 const VkExternalFormatANDROID *ext_fmt_android = lvl_find_in_chain<VkExternalFormatANDROID>(create_info->pNext);
60 if (ext_fmt_android && (0 != ext_fmt_android->externalFormat)) {
61 is_node->has_ahb_format = true;
62 is_node->ahb_format = ext_fmt_android->externalFormat;
63 }
64}
65
66void ValidationStateTracker::RecordCreateSamplerYcbcrConversionANDROID(const VkSamplerYcbcrConversionCreateInfo *create_info,
67 VkSamplerYcbcrConversion ycbcr_conversion) {
68 const VkExternalFormatANDROID *ext_format_android = lvl_find_in_chain<VkExternalFormatANDROID>(create_info->pNext);
69 if (ext_format_android && (0 != ext_format_android->externalFormat)) {
70 ycbcr_conversion_ahb_fmt_map.emplace(ycbcr_conversion, ext_format_android->externalFormat);
71 }
72};
73
74void ValidationStateTracker::RecordDestroySamplerYcbcrConversionANDROID(VkSamplerYcbcrConversion ycbcr_conversion) {
75 ycbcr_conversion_ahb_fmt_map.erase(ycbcr_conversion);
76};
77
78#else
79
80void ValidationStateTracker::RecordCreateImageANDROID(const VkImageCreateInfo *create_info, IMAGE_STATE *is_node) {}
81
82void ValidationStateTracker::RecordCreateSamplerYcbcrConversionANDROID(const VkSamplerYcbcrConversionCreateInfo *create_info,
83 VkSamplerYcbcrConversion ycbcr_conversion){};
84
85void ValidationStateTracker::RecordDestroySamplerYcbcrConversionANDROID(VkSamplerYcbcrConversion ycbcr_conversion){};
86
87#endif // VK_USE_PLATFORM_ANDROID_KHR
88
89void ValidationStateTracker::PostCallRecordCreateImage(VkDevice device, const VkImageCreateInfo *pCreateInfo,
90 const VkAllocationCallbacks *pAllocator, VkImage *pImage, VkResult result) {
91 if (VK_SUCCESS != result) return;
92 std::unique_ptr<IMAGE_STATE> is_node(new IMAGE_STATE(*pImage, pCreateInfo));
93 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
94 RecordCreateImageANDROID(pCreateInfo, is_node.get());
95 }
96 const auto swapchain_info = lvl_find_in_chain<VkImageSwapchainCreateInfoKHR>(pCreateInfo->pNext);
97 if (swapchain_info) {
98 is_node->create_from_swapchain = swapchain_info->swapchain;
99 }
100
101 bool pre_fetch_memory_reqs = true;
102#ifdef VK_USE_PLATFORM_ANDROID_KHR
103 if (is_node->external_format_android) {
104 // Do not fetch requirements for external memory images
105 pre_fetch_memory_reqs = false;
106 }
107#endif
108 // Record the memory requirements in case they won't be queried
109 if (pre_fetch_memory_reqs) {
110 DispatchGetImageMemoryRequirements(device, *pImage, &is_node->requirements);
111 }
112 imageMap.insert(std::make_pair(*pImage, std::move(is_node)));
113}
114
115void ValidationStateTracker::PreCallRecordDestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks *pAllocator) {
116 if (!image) return;
117 IMAGE_STATE *image_state = GetImageState(image);
118 const VulkanTypedHandle obj_struct(image, kVulkanObjectTypeImage);
119 InvalidateCommandBuffers(image_state->cb_bindings, obj_struct);
120 // Clean up memory mapping, bindings and range references for image
121 for (auto mem_binding : image_state->GetBoundMemory()) {
122 auto mem_info = GetDevMemState(mem_binding);
123 if (mem_info) {
124 RemoveImageMemoryRange(image, mem_info);
125 }
126 }
127 if (image_state->bind_swapchain) {
128 auto swapchain = GetSwapchainState(image_state->bind_swapchain);
129 if (swapchain) {
130 swapchain->bound_images.erase(image_state->image);
131 }
132 }
133 RemoveAliasingImage(image_state);
134 ClearMemoryObjectBindings(obj_struct);
135 // Remove image from imageMap
136 imageMap.erase(image);
137}
138
139void ValidationStateTracker::PreCallRecordCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image,
140 VkImageLayout imageLayout, const VkClearColorValue *pColor,
141 uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
142 auto cb_node = GetCBState(commandBuffer);
143 auto image_state = GetImageState(image);
144 if (cb_node && image_state) {
145 AddCommandBufferBindingImage(cb_node, image_state);
146 }
147}
148
149void ValidationStateTracker::PreCallRecordCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image,
150 VkImageLayout imageLayout,
151 const VkClearDepthStencilValue *pDepthStencil,
152 uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
153 auto cb_node = GetCBState(commandBuffer);
154 auto image_state = GetImageState(image);
155 if (cb_node && image_state) {
156 AddCommandBufferBindingImage(cb_node, image_state);
157 }
158}
159
160void ValidationStateTracker::PreCallRecordCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage,
161 VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout,
162 uint32_t regionCount, const VkImageCopy *pRegions) {
163 auto cb_node = GetCBState(commandBuffer);
164 auto src_image_state = GetImageState(srcImage);
165 auto dst_image_state = GetImageState(dstImage);
166
167 // Update bindings between images and cmd buffer
168 AddCommandBufferBindingImage(cb_node, src_image_state);
169 AddCommandBufferBindingImage(cb_node, dst_image_state);
170}
171
172void ValidationStateTracker::PreCallRecordCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage,
173 VkImageLayout srcImageLayout, VkImage dstImage,
174 VkImageLayout dstImageLayout, uint32_t regionCount,
175 const VkImageResolve *pRegions) {
176 auto cb_node = GetCBState(commandBuffer);
177 auto src_image_state = GetImageState(srcImage);
178 auto dst_image_state = GetImageState(dstImage);
179
180 // Update bindings between images and cmd buffer
181 AddCommandBufferBindingImage(cb_node, src_image_state);
182 AddCommandBufferBindingImage(cb_node, dst_image_state);
183}
184
185void ValidationStateTracker::PreCallRecordCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage,
186 VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout,
187 uint32_t regionCount, const VkImageBlit *pRegions, VkFilter filter) {
188 auto cb_node = GetCBState(commandBuffer);
189 auto src_image_state = GetImageState(srcImage);
190 auto dst_image_state = GetImageState(dstImage);
191
192 // Update bindings between images and cmd buffer
193 AddCommandBufferBindingImage(cb_node, src_image_state);
194 AddCommandBufferBindingImage(cb_node, dst_image_state);
195}
196
197void ValidationStateTracker::PostCallRecordCreateBuffer(VkDevice device, const VkBufferCreateInfo *pCreateInfo,
198 const VkAllocationCallbacks *pAllocator, VkBuffer *pBuffer,
199 VkResult result) {
200 if (result != VK_SUCCESS) return;
201 // TODO : This doesn't create deep copy of pQueueFamilyIndices so need to fix that if/when we want that data to be valid
202 std::unique_ptr<BUFFER_STATE> buffer_state(new BUFFER_STATE(*pBuffer, pCreateInfo));
203
204 // Get a set of requirements in the case the app does not
205 DispatchGetBufferMemoryRequirements(device, *pBuffer, &buffer_state->requirements);
206
207 bufferMap.insert(std::make_pair(*pBuffer, std::move(buffer_state)));
208}
209
210void ValidationStateTracker::PostCallRecordCreateBufferView(VkDevice device, const VkBufferViewCreateInfo *pCreateInfo,
211 const VkAllocationCallbacks *pAllocator, VkBufferView *pView,
212 VkResult result) {
213 if (result != VK_SUCCESS) return;
214 bufferViewMap[*pView] = std::unique_ptr<BUFFER_VIEW_STATE>(new BUFFER_VIEW_STATE(*pView, pCreateInfo));
215}
216
217void ValidationStateTracker::PostCallRecordCreateImageView(VkDevice device, const VkImageViewCreateInfo *pCreateInfo,
218 const VkAllocationCallbacks *pAllocator, VkImageView *pView,
219 VkResult result) {
220 if (result != VK_SUCCESS) return;
221 auto image_state = GetImageState(pCreateInfo->image);
222 imageViewMap[*pView] = std::unique_ptr<IMAGE_VIEW_STATE>(new IMAGE_VIEW_STATE(image_state, *pView, pCreateInfo));
223}
224
225void ValidationStateTracker::PreCallRecordCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer,
226 uint32_t regionCount, const VkBufferCopy *pRegions) {
227 auto cb_node = GetCBState(commandBuffer);
228 auto src_buffer_state = GetBufferState(srcBuffer);
229 auto dst_buffer_state = GetBufferState(dstBuffer);
230
231 // Update bindings between buffers and cmd buffer
232 AddCommandBufferBindingBuffer(cb_node, src_buffer_state);
233 AddCommandBufferBindingBuffer(cb_node, dst_buffer_state);
234}
235
236void ValidationStateTracker::PreCallRecordDestroyImageView(VkDevice device, VkImageView imageView,
237 const VkAllocationCallbacks *pAllocator) {
238 IMAGE_VIEW_STATE *image_view_state = GetImageViewState(imageView);
239 if (!image_view_state) return;
240 const VulkanTypedHandle obj_struct(imageView, kVulkanObjectTypeImageView);
241
242 // Any bound cmd buffers are now invalid
243 InvalidateCommandBuffers(image_view_state->cb_bindings, obj_struct);
244 imageViewMap.erase(imageView);
245}
246
247void ValidationStateTracker::PreCallRecordDestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks *pAllocator) {
248 if (!buffer) return;
249 auto buffer_state = GetBufferState(buffer);
250 const VulkanTypedHandle obj_struct(buffer, kVulkanObjectTypeBuffer);
251
252 InvalidateCommandBuffers(buffer_state->cb_bindings, obj_struct);
253 for (auto mem_binding : buffer_state->GetBoundMemory()) {
254 auto mem_info = GetDevMemState(mem_binding);
255 if (mem_info) {
256 RemoveBufferMemoryRange(buffer, mem_info);
257 }
258 }
259 ClearMemoryObjectBindings(obj_struct);
260 bufferMap.erase(buffer_state->buffer);
261}
262
263void ValidationStateTracker::PreCallRecordDestroyBufferView(VkDevice device, VkBufferView bufferView,
264 const VkAllocationCallbacks *pAllocator) {
265 if (!bufferView) return;
266 auto buffer_view_state = GetBufferViewState(bufferView);
267 const VulkanTypedHandle obj_struct(bufferView, kVulkanObjectTypeBufferView);
268
269 // Any bound cmd buffers are now invalid
270 InvalidateCommandBuffers(buffer_view_state->cb_bindings, obj_struct);
271 bufferViewMap.erase(bufferView);
272}
273
274void ValidationStateTracker::PreCallRecordCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
275 VkDeviceSize size, uint32_t data) {
276 auto cb_node = GetCBState(commandBuffer);
277 auto buffer_state = GetBufferState(dstBuffer);
278 // Update bindings between buffer and cmd buffer
279 AddCommandBufferBindingBuffer(cb_node, buffer_state);
280}
281
282void ValidationStateTracker::PreCallRecordCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage,
283 VkImageLayout srcImageLayout, VkBuffer dstBuffer,
284 uint32_t regionCount, const VkBufferImageCopy *pRegions) {
285 auto cb_node = GetCBState(commandBuffer);
286 auto src_image_state = GetImageState(srcImage);
287 auto dst_buffer_state = GetBufferState(dstBuffer);
288
289 // Update bindings between buffer/image and cmd buffer
290 AddCommandBufferBindingImage(cb_node, src_image_state);
291 AddCommandBufferBindingBuffer(cb_node, dst_buffer_state);
292}
293
294void ValidationStateTracker::PreCallRecordCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
295 VkImageLayout dstImageLayout, uint32_t regionCount,
296 const VkBufferImageCopy *pRegions) {
297 auto cb_node = GetCBState(commandBuffer);
298 auto src_buffer_state = GetBufferState(srcBuffer);
299 auto dst_image_state = GetImageState(dstImage);
300
301 AddCommandBufferBindingBuffer(cb_node, src_buffer_state);
302 AddCommandBufferBindingImage(cb_node, dst_image_state);
303}
304
305// Get the image viewstate for a given framebuffer attachment
306IMAGE_VIEW_STATE *ValidationStateTracker::GetAttachmentImageViewState(FRAMEBUFFER_STATE *framebuffer, uint32_t index) {
307 assert(framebuffer && (index < framebuffer->createInfo.attachmentCount));
308 const VkImageView &image_view = framebuffer->createInfo.pAttachments[index];
309 return GetImageViewState(image_view);
310}
311
312// Get the image viewstate for a given framebuffer attachment
313const IMAGE_VIEW_STATE *ValidationStateTracker::GetAttachmentImageViewState(const FRAMEBUFFER_STATE *framebuffer,
314 uint32_t index) const {
315 assert(framebuffer && (index < framebuffer->createInfo.attachmentCount));
316 const VkImageView &image_view = framebuffer->createInfo.pAttachments[index];
317 return GetImageViewState(image_view);
318}
319
320void ValidationStateTracker::AddAliasingImage(IMAGE_STATE *image_state) {
321 if (!(image_state->createInfo.flags & VK_IMAGE_CREATE_ALIAS_BIT)) return;
322 std::unordered_set<VkImage> *bound_images = nullptr;
323
324 if (image_state->create_from_swapchain) {
325 auto swapchain_state = GetSwapchainState(image_state->create_from_swapchain);
326 if (swapchain_state) {
327 bound_images = &swapchain_state->bound_images;
328 }
329 } else {
330 auto mem_state = GetDevMemState(image_state->binding.mem);
331 if (mem_state) {
332 bound_images = &mem_state->bound_images;
333 }
334 }
335
336 if (bound_images) {
337 for (const auto &handle : *bound_images) {
338 if (handle != image_state->image) {
339 auto is = GetImageState(handle);
340 if (is && is->IsCompatibleAliasing(image_state)) {
341 auto inserted = is->aliasing_images.emplace(image_state->image);
342 if (inserted.second) {
343 image_state->aliasing_images.emplace(handle);
344 }
345 }
346 }
347 }
348 }
349}
350
351void ValidationStateTracker::RemoveAliasingImage(IMAGE_STATE *image_state) {
352 for (const auto &image : image_state->aliasing_images) {
353 auto is = GetImageState(image);
354 if (is) {
355 is->aliasing_images.erase(image_state->image);
356 }
357 }
358 image_state->aliasing_images.clear();
359}
360
361void ValidationStateTracker::RemoveAliasingImages(const std::unordered_set<VkImage> &bound_images) {
362 // This is one way clear. Because the bound_images include cross references, the one way clear loop could clear the whole
363 // reference. It doesn't need two ways clear.
364 for (const auto &handle : bound_images) {
365 auto is = GetImageState(handle);
366 if (is) {
367 is->aliasing_images.clear();
368 }
369 }
370}
371
372EVENT_STATE *ValidationStateTracker::GetEventState(VkEvent event) {
373 auto it = eventMap.find(event);
374 if (it == eventMap.end()) {
375 return nullptr;
376 }
377 return &it->second;
378}
379
380const QUEUE_STATE *ValidationStateTracker::GetQueueState(VkQueue queue) const {
381 auto it = queueMap.find(queue);
382 if (it == queueMap.cend()) {
383 return nullptr;
384 }
385 return &it->second;
386}
387
388QUEUE_STATE *ValidationStateTracker::GetQueueState(VkQueue queue) {
389 auto it = queueMap.find(queue);
390 if (it == queueMap.end()) {
391 return nullptr;
392 }
393 return &it->second;
394}
395
396const PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState(VkPhysicalDevice phys) const {
397 auto *phys_dev_map = ((physical_device_map.size() > 0) ? &physical_device_map : &instance_state->physical_device_map);
398 auto it = phys_dev_map->find(phys);
399 if (it == phys_dev_map->end()) {
400 return nullptr;
401 }
402 return &it->second;
403}
404
405PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState(VkPhysicalDevice phys) {
406 auto *phys_dev_map = ((physical_device_map.size() > 0) ? &physical_device_map : &instance_state->physical_device_map);
407 auto it = phys_dev_map->find(phys);
408 if (it == phys_dev_map->end()) {
409 return nullptr;
410 }
411 return &it->second;
412}
413
414PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState() { return physical_device_state; }
415const PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState() const { return physical_device_state; }
416
417// Return ptr to memory binding for given handle of specified type
418template <typename State, typename Result>
419static Result GetObjectMemBindingImpl(State state, const VulkanTypedHandle &typed_handle) {
420 switch (typed_handle.type) {
421 case kVulkanObjectTypeImage:
422 return state->GetImageState(typed_handle.Cast<VkImage>());
423 case kVulkanObjectTypeBuffer:
424 return state->GetBufferState(typed_handle.Cast<VkBuffer>());
425 case kVulkanObjectTypeAccelerationStructureNV:
426 return state->GetAccelerationStructureState(typed_handle.Cast<VkAccelerationStructureNV>());
427 default:
428 break;
429 }
430 return nullptr;
431}
432
433const BINDABLE *ValidationStateTracker::GetObjectMemBinding(const VulkanTypedHandle &typed_handle) const {
434 return GetObjectMemBindingImpl<const ValidationStateTracker *, const BINDABLE *>(this, typed_handle);
435}
436
437BINDABLE *ValidationStateTracker::GetObjectMemBinding(const VulkanTypedHandle &typed_handle) {
438 return GetObjectMemBindingImpl<ValidationStateTracker *, BINDABLE *>(this, typed_handle);
439}
440
441void ValidationStateTracker::AddMemObjInfo(void *object, const VkDeviceMemory mem, const VkMemoryAllocateInfo *pAllocateInfo) {
442 assert(object != NULL);
443
444 auto *mem_info = new DEVICE_MEMORY_STATE(object, mem, pAllocateInfo);
445 memObjMap[mem] = unique_ptr<DEVICE_MEMORY_STATE>(mem_info);
446
447 auto dedicated = lvl_find_in_chain<VkMemoryDedicatedAllocateInfoKHR>(pAllocateInfo->pNext);
448 if (dedicated) {
449 mem_info->is_dedicated = true;
450 mem_info->dedicated_buffer = dedicated->buffer;
451 mem_info->dedicated_image = dedicated->image;
452 }
453 auto export_info = lvl_find_in_chain<VkExportMemoryAllocateInfo>(pAllocateInfo->pNext);
454 if (export_info) {
455 mem_info->is_export = true;
456 mem_info->export_handle_type_flags = export_info->handleTypes;
457 }
458}
459
460// Create binding link between given sampler and command buffer node
461void ValidationStateTracker::AddCommandBufferBindingSampler(CMD_BUFFER_STATE *cb_node, SAMPLER_STATE *sampler_state) {
462 if (disabled.command_buffer_state) {
463 return;
464 }
465 auto inserted = cb_node->object_bindings.emplace(sampler_state->sampler, kVulkanObjectTypeSampler);
466 if (inserted.second) {
467 // Only need to complete the cross-reference if this is a new item
468 sampler_state->cb_bindings.insert(cb_node);
469 }
470}
471
472// Create binding link between given image node and command buffer node
473void ValidationStateTracker::AddCommandBufferBindingImage(CMD_BUFFER_STATE *cb_node, IMAGE_STATE *image_state) {
474 if (disabled.command_buffer_state) {
475 return;
476 }
477 // Skip validation if this image was created through WSI
478 if (image_state->create_from_swapchain == VK_NULL_HANDLE) {
479 // First update cb binding for image
480 auto image_inserted = cb_node->object_bindings.emplace(image_state->image, kVulkanObjectTypeImage);
481 if (image_inserted.second) {
482 // Only need to continue if this is a new item (the rest of the work would have be done previous)
483 image_state->cb_bindings.insert(cb_node);
484 // Now update CB binding in MemObj mini CB list
485 for (auto mem_binding : image_state->GetBoundMemory()) {
486 DEVICE_MEMORY_STATE *pMemInfo = GetDevMemState(mem_binding);
487 if (pMemInfo) {
488 // Now update CBInfo's Mem reference list
489 auto mem_inserted = cb_node->memObjs.insert(mem_binding);
490 if (mem_inserted.second) {
491 // Only need to complete the cross-reference if this is a new item
492 pMemInfo->cb_bindings.insert(cb_node);
493 }
494 }
495 }
496 }
497 }
498}
499
500// Create binding link between given image view node and its image with command buffer node
501void ValidationStateTracker::AddCommandBufferBindingImageView(CMD_BUFFER_STATE *cb_node, IMAGE_VIEW_STATE *view_state) {
502 if (disabled.command_buffer_state) {
503 return;
504 }
505 // First add bindings for imageView
506 auto inserted = cb_node->object_bindings.emplace(view_state->image_view, kVulkanObjectTypeImageView);
507 if (inserted.second) {
508 // Only need to continue if this is a new item
509 view_state->cb_bindings.insert(cb_node);
510 auto image_state = GetImageState(view_state->create_info.image);
511 // Add bindings for image within imageView
512 if (image_state) {
513 AddCommandBufferBindingImage(cb_node, image_state);
514 }
515 }
516}
517
518// Create binding link between given buffer node and command buffer node
519void ValidationStateTracker::AddCommandBufferBindingBuffer(CMD_BUFFER_STATE *cb_node, BUFFER_STATE *buffer_state) {
520 if (disabled.command_buffer_state) {
521 return;
522 }
523 // First update cb binding for buffer
524 auto buffer_inserted = cb_node->object_bindings.emplace(buffer_state->buffer, kVulkanObjectTypeBuffer);
525 if (buffer_inserted.second) {
526 // Only need to continue if this is a new item
527 buffer_state->cb_bindings.insert(cb_node);
528 // Now update CB binding in MemObj mini CB list
529 for (auto mem_binding : buffer_state->GetBoundMemory()) {
530 DEVICE_MEMORY_STATE *pMemInfo = GetDevMemState(mem_binding);
531 if (pMemInfo) {
532 // Now update CBInfo's Mem reference list
533 auto inserted = cb_node->memObjs.insert(mem_binding);
534 if (inserted.second) {
535 // Only need to complete the cross-reference if this is a new item
536 pMemInfo->cb_bindings.insert(cb_node);
537 }
538 }
539 }
540 }
541}
542
543// Create binding link between given buffer view node and its buffer with command buffer node
544void ValidationStateTracker::AddCommandBufferBindingBufferView(CMD_BUFFER_STATE *cb_node, BUFFER_VIEW_STATE *view_state) {
545 if (disabled.command_buffer_state) {
546 return;
547 }
548 // First add bindings for bufferView
549 auto inserted = cb_node->object_bindings.emplace(view_state->buffer_view, kVulkanObjectTypeBufferView);
550 if (inserted.second) {
551 // Only need to complete the cross-reference if this is a new item
552 view_state->cb_bindings.insert(cb_node);
553 auto buffer_state = GetBufferState(view_state->create_info.buffer);
554 // Add bindings for buffer within bufferView
555 if (buffer_state) {
556 AddCommandBufferBindingBuffer(cb_node, buffer_state);
557 }
558 }
559}
560
561// Create binding link between given acceleration structure and command buffer node
562void ValidationStateTracker::AddCommandBufferBindingAccelerationStructure(CMD_BUFFER_STATE *cb_node,
563 ACCELERATION_STRUCTURE_STATE *as_state) {
564 if (disabled.command_buffer_state) {
565 return;
566 }
567 auto as_inserted = cb_node->object_bindings.emplace(as_state->acceleration_structure, kVulkanObjectTypeAccelerationStructureNV);
568 if (as_inserted.second) {
569 // Only need to complete the cross-reference if this is a new item
570 as_state->cb_bindings.insert(cb_node);
571 // Now update CB binding in MemObj mini CB list
572 for (auto mem_binding : as_state->GetBoundMemory()) {
573 DEVICE_MEMORY_STATE *pMemInfo = GetDevMemState(mem_binding);
574 if (pMemInfo) {
575 // Now update CBInfo's Mem reference list
576 auto mem_inserted = cb_node->memObjs.insert(mem_binding);
577 if (mem_inserted.second) {
578 // Only need to complete the cross-reference if this is a new item
579 pMemInfo->cb_bindings.insert(cb_node);
580 }
581 }
582 }
583 }
584}
585
586// For every mem obj bound to particular CB, free bindings related to that CB
587void ValidationStateTracker::ClearCmdBufAndMemReferences(CMD_BUFFER_STATE *cb_node) {
588 if (cb_node) {
589 if (cb_node->memObjs.size() > 0) {
590 for (auto mem : cb_node->memObjs) {
591 DEVICE_MEMORY_STATE *pInfo = GetDevMemState(mem);
592 if (pInfo) {
593 pInfo->cb_bindings.erase(cb_node);
594 }
595 }
596 cb_node->memObjs.clear();
597 }
598 }
599}
600
601// Clear a single object binding from given memory object
602void ValidationStateTracker::ClearMemoryObjectBinding(const VulkanTypedHandle &typed_handle, VkDeviceMemory mem) {
603 DEVICE_MEMORY_STATE *mem_info = GetDevMemState(mem);
604 // This obj is bound to a memory object. Remove the reference to this object in that memory object's list
605 if (mem_info) {
606 mem_info->obj_bindings.erase(typed_handle);
607 }
608}
609
610// ClearMemoryObjectBindings clears the binding of objects to memory
611// For the given object it pulls the memory bindings and makes sure that the bindings
612// no longer refer to the object being cleared. This occurs when objects are destroyed.
613void ValidationStateTracker::ClearMemoryObjectBindings(const VulkanTypedHandle &typed_handle) {
614 BINDABLE *mem_binding = GetObjectMemBinding(typed_handle);
615 if (mem_binding) {
616 if (!mem_binding->sparse) {
617 ClearMemoryObjectBinding(typed_handle, mem_binding->binding.mem);
618 } else { // Sparse, clear all bindings
619 for (auto &sparse_mem_binding : mem_binding->sparse_bindings) {
620 ClearMemoryObjectBinding(typed_handle, sparse_mem_binding.mem);
621 }
622 }
623 }
624}
625
626// SetMemBinding is used to establish immutable, non-sparse binding between a single image/buffer object and memory object.
627// Corresponding valid usage checks are in ValidateSetMemBinding().
628void ValidationStateTracker::SetMemBinding(VkDeviceMemory mem, BINDABLE *mem_binding, VkDeviceSize memory_offset,
629 const VulkanTypedHandle &typed_handle) {
630 assert(mem_binding);
631 mem_binding->binding.mem = mem;
632 mem_binding->UpdateBoundMemorySet(); // force recreation of cached set
633 mem_binding->binding.offset = memory_offset;
634 mem_binding->binding.size = mem_binding->requirements.size;
635
636 if (mem != VK_NULL_HANDLE) {
637 DEVICE_MEMORY_STATE *mem_info = GetDevMemState(mem);
638 if (mem_info) {
639 mem_info->obj_bindings.insert(typed_handle);
640 // For image objects, make sure default memory state is correctly set
641 // TODO : What's the best/correct way to handle this?
642 if (kVulkanObjectTypeImage == typed_handle.type) {
643 auto const image_state = reinterpret_cast<const IMAGE_STATE *>(mem_binding);
644 if (image_state) {
645 VkImageCreateInfo ici = image_state->createInfo;
646 if (ici.usage & (VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT)) {
647 // TODO:: More memory state transition stuff.
648 }
649 }
650 }
651 }
652 }
653}
654
655// For NULL mem case, clear any previous binding Else...
656// Make sure given object is in its object map
657// IF a previous binding existed, update binding
658// Add reference from objectInfo to memoryInfo
659// Add reference off of object's binding info
660// Return VK_TRUE if addition is successful, VK_FALSE otherwise
661bool ValidationStateTracker::SetSparseMemBinding(MEM_BINDING binding, const VulkanTypedHandle &typed_handle) {
662 bool skip = VK_FALSE;
663 // Handle NULL case separately, just clear previous binding & decrement reference
664 if (binding.mem == VK_NULL_HANDLE) {
665 // TODO : This should cause the range of the resource to be unbound according to spec
666 } else {
667 BINDABLE *mem_binding = GetObjectMemBinding(typed_handle);
668 assert(mem_binding);
669 if (mem_binding) { // Invalid handles are reported by object tracker, but Get returns NULL for them, so avoid SEGV here
670 assert(mem_binding->sparse);
671 DEVICE_MEMORY_STATE *mem_info = GetDevMemState(binding.mem);
672 if (mem_info) {
673 mem_info->obj_bindings.insert(typed_handle);
674 // Need to set mem binding for this object
675 mem_binding->sparse_bindings.insert(binding);
676 mem_binding->UpdateBoundMemorySet();
677 }
678 }
679 }
680 return skip;
681}
682
683const RENDER_PASS_STATE *ValidationStateTracker::GetRenderPassState(VkRenderPass renderpass) const {
684 auto it = renderPassMap.find(renderpass);
685 if (it == renderPassMap.end()) {
686 return nullptr;
687 }
688 return it->second.get();
689}
690
691RENDER_PASS_STATE *ValidationStateTracker::GetRenderPassState(VkRenderPass renderpass) {
692 auto it = renderPassMap.find(renderpass);
693 if (it == renderPassMap.end()) {
694 return nullptr;
695 }
696 return it->second.get();
697}
698
699std::shared_ptr<RENDER_PASS_STATE> ValidationStateTracker::GetRenderPassStateSharedPtr(VkRenderPass renderpass) {
700 auto it = renderPassMap.find(renderpass);
701 if (it == renderPassMap.end()) {
702 return nullptr;
703 }
704 return it->second;
705}
706
707void ValidationStateTracker::UpdateDrawState(CMD_BUFFER_STATE *cb_state, const VkPipelineBindPoint bind_point) {
708 auto &state = cb_state->lastBound[bind_point];
709 PIPELINE_STATE *pPipe = state.pipeline_state;
710 if (VK_NULL_HANDLE != state.pipeline_layout) {
711 for (const auto &set_binding_pair : pPipe->active_slots) {
712 uint32_t setIndex = set_binding_pair.first;
713 // Pull the set node
714 cvdescriptorset::DescriptorSet *descriptor_set = state.per_set[setIndex].bound_descriptor_set;
715 if (!descriptor_set->IsPushDescriptor()) {
716 // For the "bindless" style resource usage with many descriptors, need to optimize command <-> descriptor binding
717
718 // TODO: If recreating the reduced_map here shows up in profilinging, need to find a way of sharing with the
719 // Validate pass. Though in the case of "many" descriptors, typically the descriptor count >> binding count
720 cvdescriptorset::PrefilterBindRequestMap reduced_map(*descriptor_set, set_binding_pair.second);
721 const auto &binding_req_map = reduced_map.FilteredMap(*cb_state, *pPipe);
722
723 if (reduced_map.IsManyDescriptors()) {
724 // Only update validate binding tags if we meet the "many" criteria in the Prefilter class
725 descriptor_set->UpdateValidationCache(*cb_state, *pPipe, binding_req_map);
726 }
727
728 // We can skip updating the state if "nothing" has changed since the last validation.
729 // See CoreChecks::ValidateCmdBufDrawState for more details.
730 bool need_update =
731 !reduced_map.IsManyDescriptors() ||
732 // Update if descriptor set (or contents) has changed
733 state.per_set[setIndex].validated_set != descriptor_set ||
734 state.per_set[setIndex].validated_set_change_count != descriptor_set->GetChangeCount() ||
735 (!disabled.image_layout_validation &&
736 state.per_set[setIndex].validated_set_image_layout_change_count != cb_state->image_layout_change_count) ||
737 // Update if previous bindingReqMap doesn't include new bindingRepMap
738 !std::includes(state.per_set[setIndex].validated_set_binding_req_map.begin(),
739 state.per_set[setIndex].validated_set_binding_req_map.end(), set_binding_pair.second.begin(),
740 set_binding_pair.second.end());
741
742 if (need_update) {
743 // Bind this set and its active descriptor resources to the command buffer
744 descriptor_set->UpdateDrawState(this, cb_state, binding_req_map);
745
746 state.per_set[setIndex].validated_set = descriptor_set;
747 state.per_set[setIndex].validated_set_change_count = descriptor_set->GetChangeCount();
748 state.per_set[setIndex].validated_set_image_layout_change_count = cb_state->image_layout_change_count;
749 if (reduced_map.IsManyDescriptors()) {
750 // Check whether old == new before assigning, the equality check is much cheaper than
751 // freeing and reallocating the map.
752 if (state.per_set[setIndex].validated_set_binding_req_map != set_binding_pair.second) {
753 state.per_set[setIndex].validated_set_binding_req_map = set_binding_pair.second;
754 }
755 } else {
756 state.per_set[setIndex].validated_set_binding_req_map = BindingReqMap();
757 }
758 }
759 }
760 }
761 }
762 if (!pPipe->vertex_binding_descriptions_.empty()) {
763 cb_state->vertex_buffer_used = true;
764 }
765}
766
767// Remove set from setMap and delete the set
768void ValidationStateTracker::FreeDescriptorSet(cvdescriptorset::DescriptorSet *descriptor_set) {
769 setMap.erase(descriptor_set->GetSet());
770}
771
772// Free all DS Pools including their Sets & related sub-structs
773// NOTE : Calls to this function should be wrapped in mutex
774void ValidationStateTracker::DeleteDescriptorSetPools() {
775 for (auto ii = descriptorPoolMap.begin(); ii != descriptorPoolMap.end();) {
776 // Remove this pools' sets from setMap and delete them
777 for (auto ds : ii->second->sets) {
778 FreeDescriptorSet(ds);
779 }
780 ii->second->sets.clear();
781 ii = descriptorPoolMap.erase(ii);
782 }
783}
784
785// For given object struct return a ptr of BASE_NODE type for its wrapping struct
786BASE_NODE *ValidationStateTracker::GetStateStructPtrFromObject(const VulkanTypedHandle &object_struct) {
787 BASE_NODE *base_ptr = nullptr;
788 switch (object_struct.type) {
789 case kVulkanObjectTypeDescriptorSet: {
790 base_ptr = GetSetNode(object_struct.Cast<VkDescriptorSet>());
791 break;
792 }
793 case kVulkanObjectTypeSampler: {
794 base_ptr = GetSamplerState(object_struct.Cast<VkSampler>());
795 break;
796 }
797 case kVulkanObjectTypeQueryPool: {
798 base_ptr = GetQueryPoolState(object_struct.Cast<VkQueryPool>());
799 break;
800 }
801 case kVulkanObjectTypePipeline: {
802 base_ptr = GetPipelineState(object_struct.Cast<VkPipeline>());
803 break;
804 }
805 case kVulkanObjectTypeBuffer: {
806 base_ptr = GetBufferState(object_struct.Cast<VkBuffer>());
807 break;
808 }
809 case kVulkanObjectTypeBufferView: {
810 base_ptr = GetBufferViewState(object_struct.Cast<VkBufferView>());
811 break;
812 }
813 case kVulkanObjectTypeImage: {
814 base_ptr = GetImageState(object_struct.Cast<VkImage>());
815 break;
816 }
817 case kVulkanObjectTypeImageView: {
818 base_ptr = GetImageViewState(object_struct.Cast<VkImageView>());
819 break;
820 }
821 case kVulkanObjectTypeEvent: {
822 base_ptr = GetEventState(object_struct.Cast<VkEvent>());
823 break;
824 }
825 case kVulkanObjectTypeDescriptorPool: {
826 base_ptr = GetDescriptorPoolState(object_struct.Cast<VkDescriptorPool>());
827 break;
828 }
829 case kVulkanObjectTypeCommandPool: {
830 base_ptr = GetCommandPoolState(object_struct.Cast<VkCommandPool>());
831 break;
832 }
833 case kVulkanObjectTypeFramebuffer: {
834 base_ptr = GetFramebufferState(object_struct.Cast<VkFramebuffer>());
835 break;
836 }
837 case kVulkanObjectTypeRenderPass: {
838 base_ptr = GetRenderPassState(object_struct.Cast<VkRenderPass>());
839 break;
840 }
841 case kVulkanObjectTypeDeviceMemory: {
842 base_ptr = GetDevMemState(object_struct.Cast<VkDeviceMemory>());
843 break;
844 }
845 case kVulkanObjectTypeAccelerationStructureNV: {
846 base_ptr = GetAccelerationStructureState(object_struct.Cast<VkAccelerationStructureNV>());
847 break;
848 }
849 default:
850 // TODO : Any other objects to be handled here?
851 assert(0);
852 break;
853 }
854 return base_ptr;
855}
856
857// Tie the VulkanTypedHandle to the cmd buffer which includes:
858// Add object_binding to cmd buffer
859// Add cb_binding to object
860void ValidationStateTracker::AddCommandBufferBinding(std::unordered_set<CMD_BUFFER_STATE *> *cb_bindings,
861 const VulkanTypedHandle &obj, CMD_BUFFER_STATE *cb_node) {
862 if (disabled.command_buffer_state) {
863 return;
864 }
865 cb_bindings->insert(cb_node);
866 cb_node->object_bindings.insert(obj);
867}
868
869// For a given object, if cb_node is in that objects cb_bindings, remove cb_node
870void ValidationStateTracker::RemoveCommandBufferBinding(VulkanTypedHandle const &object, CMD_BUFFER_STATE *cb_node) {
871 BASE_NODE *base_obj = GetStateStructPtrFromObject(object);
872 if (base_obj) base_obj->cb_bindings.erase(cb_node);
873}
874
875// Reset the command buffer state
876// Maintain the createInfo and set state to CB_NEW, but clear all other state
877void ValidationStateTracker::ResetCommandBufferState(const VkCommandBuffer cb) {
878 CMD_BUFFER_STATE *pCB = GetCBState(cb);
879 if (pCB) {
880 pCB->in_use.store(0);
881 // Reset CB state (note that createInfo is not cleared)
882 pCB->commandBuffer = cb;
883 memset(&pCB->beginInfo, 0, sizeof(VkCommandBufferBeginInfo));
884 memset(&pCB->inheritanceInfo, 0, sizeof(VkCommandBufferInheritanceInfo));
885 pCB->hasDrawCmd = false;
886 pCB->hasTraceRaysCmd = false;
887 pCB->hasBuildAccelerationStructureCmd = false;
888 pCB->hasDispatchCmd = false;
889 pCB->state = CB_NEW;
890 pCB->submitCount = 0;
891 pCB->image_layout_change_count = 1; // Start at 1. 0 is insert value for validation cache versions, s.t. new == dirty
892 pCB->status = 0;
893 pCB->static_status = 0;
894 pCB->viewportMask = 0;
895 pCB->scissorMask = 0;
896
897 for (auto &item : pCB->lastBound) {
898 item.second.reset();
899 }
900
901 memset(&pCB->activeRenderPassBeginInfo, 0, sizeof(pCB->activeRenderPassBeginInfo));
902 pCB->activeRenderPass = nullptr;
903 pCB->activeSubpassContents = VK_SUBPASS_CONTENTS_INLINE;
904 pCB->activeSubpass = 0;
905 pCB->broken_bindings.clear();
906 pCB->waitedEvents.clear();
907 pCB->events.clear();
908 pCB->writeEventsBeforeWait.clear();
909 pCB->queryToStateMap.clear();
910 pCB->activeQueries.clear();
911 pCB->startedQueries.clear();
912 pCB->image_layout_map.clear();
913 pCB->eventToStageMap.clear();
914 pCB->cb_vertex_buffer_binding_info.clear();
915 pCB->current_vertex_buffer_binding_info.vertex_buffer_bindings.clear();
916 pCB->vertex_buffer_used = false;
917 pCB->primaryCommandBuffer = VK_NULL_HANDLE;
918 // If secondary, invalidate any primary command buffer that may call us.
919 if (pCB->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
920 InvalidateCommandBuffers(pCB->linkedCommandBuffers, VulkanTypedHandle(cb, kVulkanObjectTypeCommandBuffer));
921 }
922
923 // Remove reverse command buffer links.
924 for (auto pSubCB : pCB->linkedCommandBuffers) {
925 pSubCB->linkedCommandBuffers.erase(pCB);
926 }
927 pCB->linkedCommandBuffers.clear();
928 ClearCmdBufAndMemReferences(pCB);
929 pCB->queue_submit_functions.clear();
930 pCB->cmd_execute_commands_functions.clear();
931 pCB->eventUpdates.clear();
932 pCB->queryUpdates.clear();
933
934 // Remove object bindings
935 for (const auto &obj : pCB->object_bindings) {
936 RemoveCommandBufferBinding(obj, pCB);
937 }
938 pCB->object_bindings.clear();
939 // Remove this cmdBuffer's reference from each FrameBuffer's CB ref list
940 for (auto framebuffer : pCB->framebuffers) {
941 auto fb_state = GetFramebufferState(framebuffer);
942 if (fb_state) fb_state->cb_bindings.erase(pCB);
943 }
944 pCB->framebuffers.clear();
945 pCB->activeFramebuffer = VK_NULL_HANDLE;
946 memset(&pCB->index_buffer_binding, 0, sizeof(pCB->index_buffer_binding));
947
948 pCB->qfo_transfer_image_barriers.Reset();
949 pCB->qfo_transfer_buffer_barriers.Reset();
950
951 // Clean up the label data
952 ResetCmdDebugUtilsLabel(report_data, pCB->commandBuffer);
953 pCB->debug_label.Reset();
954 }
955 if (command_buffer_reset_callback) {
956 (*command_buffer_reset_callback)(cb);
957 }
958}
959
960void ValidationStateTracker::PostCallRecordCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo,
961 const VkAllocationCallbacks *pAllocator, VkDevice *pDevice,
962 VkResult result) {
963 if (VK_SUCCESS != result) return;
964
965 const VkPhysicalDeviceFeatures *enabled_features_found = pCreateInfo->pEnabledFeatures;
966 if (nullptr == enabled_features_found) {
967 const auto *features2 = lvl_find_in_chain<VkPhysicalDeviceFeatures2KHR>(pCreateInfo->pNext);
968 if (features2) {
969 enabled_features_found = &(features2->features);
970 }
971 }
972
973 ValidationObject *device_object = GetLayerDataPtr(get_dispatch_key(*pDevice), layer_data_map);
974 ValidationObject *validation_data = GetValidationObject(device_object->object_dispatch, this->container_type);
975 ValidationStateTracker *state_tracker = static_cast<ValidationStateTracker *>(validation_data);
976
977 if (nullptr == enabled_features_found) {
978 state_tracker->enabled_features.core = {};
979 } else {
980 state_tracker->enabled_features.core = *enabled_features_found;
981 }
982
983 // Make sure that queue_family_properties are obtained for this device's physical_device, even if the app has not
984 // previously set them through an explicit API call.
985 uint32_t count;
986 auto pd_state = GetPhysicalDeviceState(gpu);
987 DispatchGetPhysicalDeviceQueueFamilyProperties(gpu, &count, nullptr);
988 pd_state->queue_family_properties.resize(std::max(static_cast<uint32_t>(pd_state->queue_family_properties.size()), count));
989 DispatchGetPhysicalDeviceQueueFamilyProperties(gpu, &count, &pd_state->queue_family_properties[0]);
990 // Save local link to this device's physical device state
991 state_tracker->physical_device_state = pd_state;
992
993 const auto *device_group_ci = lvl_find_in_chain<VkDeviceGroupDeviceCreateInfo>(pCreateInfo->pNext);
994 state_tracker->physical_device_count =
995 device_group_ci && device_group_ci->physicalDeviceCount > 0 ? device_group_ci->physicalDeviceCount : 1;
996
997 const auto *descriptor_indexing_features = lvl_find_in_chain<VkPhysicalDeviceDescriptorIndexingFeaturesEXT>(pCreateInfo->pNext);
998 if (descriptor_indexing_features) {
999 state_tracker->enabled_features.descriptor_indexing = *descriptor_indexing_features;
1000 }
1001
1002 const auto *eight_bit_storage_features = lvl_find_in_chain<VkPhysicalDevice8BitStorageFeaturesKHR>(pCreateInfo->pNext);
1003 if (eight_bit_storage_features) {
1004 state_tracker->enabled_features.eight_bit_storage = *eight_bit_storage_features;
1005 }
1006
1007 const auto *exclusive_scissor_features = lvl_find_in_chain<VkPhysicalDeviceExclusiveScissorFeaturesNV>(pCreateInfo->pNext);
1008 if (exclusive_scissor_features) {
1009 state_tracker->enabled_features.exclusive_scissor = *exclusive_scissor_features;
1010 }
1011
1012 const auto *shading_rate_image_features = lvl_find_in_chain<VkPhysicalDeviceShadingRateImageFeaturesNV>(pCreateInfo->pNext);
1013 if (shading_rate_image_features) {
1014 state_tracker->enabled_features.shading_rate_image = *shading_rate_image_features;
1015 }
1016
1017 const auto *mesh_shader_features = lvl_find_in_chain<VkPhysicalDeviceMeshShaderFeaturesNV>(pCreateInfo->pNext);
1018 if (mesh_shader_features) {
1019 state_tracker->enabled_features.mesh_shader = *mesh_shader_features;
1020 }
1021
1022 const auto *inline_uniform_block_features =
1023 lvl_find_in_chain<VkPhysicalDeviceInlineUniformBlockFeaturesEXT>(pCreateInfo->pNext);
1024 if (inline_uniform_block_features) {
1025 state_tracker->enabled_features.inline_uniform_block = *inline_uniform_block_features;
1026 }
1027
1028 const auto *transform_feedback_features = lvl_find_in_chain<VkPhysicalDeviceTransformFeedbackFeaturesEXT>(pCreateInfo->pNext);
1029 if (transform_feedback_features) {
1030 state_tracker->enabled_features.transform_feedback_features = *transform_feedback_features;
1031 }
1032
1033 const auto *float16_int8_features = lvl_find_in_chain<VkPhysicalDeviceFloat16Int8FeaturesKHR>(pCreateInfo->pNext);
1034 if (float16_int8_features) {
1035 state_tracker->enabled_features.float16_int8 = *float16_int8_features;
1036 }
1037
1038 const auto *vtx_attrib_div_features = lvl_find_in_chain<VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT>(pCreateInfo->pNext);
1039 if (vtx_attrib_div_features) {
1040 state_tracker->enabled_features.vtx_attrib_divisor_features = *vtx_attrib_div_features;
1041 }
1042
1043 const auto *uniform_buffer_standard_layout_features =
1044 lvl_find_in_chain<VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR>(pCreateInfo->pNext);
1045 if (uniform_buffer_standard_layout_features) {
1046 state_tracker->enabled_features.uniform_buffer_standard_layout = *uniform_buffer_standard_layout_features;
1047 }
1048
1049 const auto *scalar_block_layout_features = lvl_find_in_chain<VkPhysicalDeviceScalarBlockLayoutFeaturesEXT>(pCreateInfo->pNext);
1050 if (scalar_block_layout_features) {
1051 state_tracker->enabled_features.scalar_block_layout_features = *scalar_block_layout_features;
1052 }
1053
1054 const auto *buffer_address = lvl_find_in_chain<VkPhysicalDeviceBufferAddressFeaturesEXT>(pCreateInfo->pNext);
1055 if (buffer_address) {
1056 state_tracker->enabled_features.buffer_address = *buffer_address;
1057 }
1058
1059 const auto *cooperative_matrix_features = lvl_find_in_chain<VkPhysicalDeviceCooperativeMatrixFeaturesNV>(pCreateInfo->pNext);
1060 if (cooperative_matrix_features) {
1061 state_tracker->enabled_features.cooperative_matrix_features = *cooperative_matrix_features;
1062 }
1063
1064 const auto *float_controls_features = lvl_find_in_chain<VkPhysicalDeviceFloatControlsPropertiesKHR>(pCreateInfo->pNext);
1065 if (float_controls_features) {
1066 state_tracker->enabled_features.float_controls = *float_controls_features;
1067 }
1068
1069 const auto *host_query_reset_features = lvl_find_in_chain<VkPhysicalDeviceHostQueryResetFeaturesEXT>(pCreateInfo->pNext);
1070 if (host_query_reset_features) {
1071 state_tracker->enabled_features.host_query_reset_features = *host_query_reset_features;
1072 }
1073
1074 const auto *compute_shader_derivatives_features =
1075 lvl_find_in_chain<VkPhysicalDeviceComputeShaderDerivativesFeaturesNV>(pCreateInfo->pNext);
1076 if (compute_shader_derivatives_features) {
1077 state_tracker->enabled_features.compute_shader_derivatives_features = *compute_shader_derivatives_features;
1078 }
1079
1080 const auto *fragment_shader_barycentric_features =
1081 lvl_find_in_chain<VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV>(pCreateInfo->pNext);
1082 if (fragment_shader_barycentric_features) {
1083 state_tracker->enabled_features.fragment_shader_barycentric_features = *fragment_shader_barycentric_features;
1084 }
1085
1086 const auto *shader_image_footprint_features =
1087 lvl_find_in_chain<VkPhysicalDeviceShaderImageFootprintFeaturesNV>(pCreateInfo->pNext);
1088 if (shader_image_footprint_features) {
1089 state_tracker->enabled_features.shader_image_footprint_features = *shader_image_footprint_features;
1090 }
1091
1092 const auto *fragment_shader_interlock_features =
1093 lvl_find_in_chain<VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT>(pCreateInfo->pNext);
1094 if (fragment_shader_interlock_features) {
1095 state_tracker->enabled_features.fragment_shader_interlock_features = *fragment_shader_interlock_features;
1096 }
1097
1098 const auto *demote_to_helper_invocation_features =
1099 lvl_find_in_chain<VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT>(pCreateInfo->pNext);
1100 if (demote_to_helper_invocation_features) {
1101 state_tracker->enabled_features.demote_to_helper_invocation_features = *demote_to_helper_invocation_features;
1102 }
1103
1104 const auto *texel_buffer_alignment_features =
1105 lvl_find_in_chain<VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT>(pCreateInfo->pNext);
1106 if (texel_buffer_alignment_features) {
1107 state_tracker->enabled_features.texel_buffer_alignment_features = *texel_buffer_alignment_features;
1108 }
1109
1110 const auto *imageless_framebuffer_features =
1111 lvl_find_in_chain<VkPhysicalDeviceImagelessFramebufferFeaturesKHR>(pCreateInfo->pNext);
1112 if (imageless_framebuffer_features) {
1113 state_tracker->enabled_features.imageless_framebuffer_features = *imageless_framebuffer_features;
1114 }
1115
1116 const auto *pipeline_exe_props_features =
1117 lvl_find_in_chain<VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR>(pCreateInfo->pNext);
1118 if (pipeline_exe_props_features) {
1119 state_tracker->enabled_features.pipeline_exe_props_features = *pipeline_exe_props_features;
1120 }
1121
Jeff Bolz82f854d2019-09-17 14:56:47 -05001122 const auto *dedicated_allocation_image_aliasing_features =
1123 lvl_find_in_chain<VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV>(pCreateInfo->pNext);
1124 if (dedicated_allocation_image_aliasing_features) {
1125 state_tracker->enabled_features.dedicated_allocation_image_aliasing_features =
1126 *dedicated_allocation_image_aliasing_features;
1127 }
1128
Jeff Bolz526f2d52019-09-18 13:18:08 -05001129 const auto *subgroup_extended_types_features =
1130 lvl_find_in_chain<VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR>(pCreateInfo->pNext);
1131 if (subgroup_extended_types_features) {
1132 state_tracker->enabled_features.subgroup_extended_types_features = *subgroup_extended_types_features;
1133 }
1134
locke-lunargd556cc32019-09-17 01:21:23 -06001135 // Store physical device properties and physical device mem limits into CoreChecks structs
1136 DispatchGetPhysicalDeviceMemoryProperties(gpu, &state_tracker->phys_dev_mem_props);
1137 DispatchGetPhysicalDeviceProperties(gpu, &state_tracker->phys_dev_props);
1138
1139 const auto &dev_ext = state_tracker->device_extensions;
1140 auto *phys_dev_props = &state_tracker->phys_dev_ext_props;
1141
1142 if (dev_ext.vk_khr_push_descriptor) {
1143 // Get the needed push_descriptor limits
1144 VkPhysicalDevicePushDescriptorPropertiesKHR push_descriptor_prop;
1145 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_push_descriptor, &push_descriptor_prop);
1146 phys_dev_props->max_push_descriptors = push_descriptor_prop.maxPushDescriptors;
1147 }
1148
1149 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_descriptor_indexing, &phys_dev_props->descriptor_indexing_props);
1150 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_shading_rate_image, &phys_dev_props->shading_rate_image_props);
1151 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_mesh_shader, &phys_dev_props->mesh_shader_props);
1152 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_inline_uniform_block, &phys_dev_props->inline_uniform_block_props);
1153 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_vertex_attribute_divisor, &phys_dev_props->vtx_attrib_divisor_props);
1154 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_depth_stencil_resolve, &phys_dev_props->depth_stencil_resolve_props);
1155 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_transform_feedback, &phys_dev_props->transform_feedback_props);
1156 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_ray_tracing, &phys_dev_props->ray_tracing_props);
1157 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_texel_buffer_alignment, &phys_dev_props->texel_buffer_alignment_props);
1158 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_fragment_density_map, &phys_dev_props->fragment_density_map_props);
1159 if (state_tracker->device_extensions.vk_nv_cooperative_matrix) {
1160 // Get the needed cooperative_matrix properties
1161 auto cooperative_matrix_props = lvl_init_struct<VkPhysicalDeviceCooperativeMatrixPropertiesNV>();
1162 auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(&cooperative_matrix_props);
1163 instance_dispatch_table.GetPhysicalDeviceProperties2KHR(gpu, &prop2);
1164 state_tracker->phys_dev_ext_props.cooperative_matrix_props = cooperative_matrix_props;
1165
1166 uint32_t numCooperativeMatrixProperties = 0;
1167 instance_dispatch_table.GetPhysicalDeviceCooperativeMatrixPropertiesNV(gpu, &numCooperativeMatrixProperties, NULL);
1168 state_tracker->cooperative_matrix_properties.resize(numCooperativeMatrixProperties,
1169 lvl_init_struct<VkCooperativeMatrixPropertiesNV>());
1170
1171 instance_dispatch_table.GetPhysicalDeviceCooperativeMatrixPropertiesNV(gpu, &numCooperativeMatrixProperties,
1172 state_tracker->cooperative_matrix_properties.data());
1173 }
1174 if (state_tracker->api_version >= VK_API_VERSION_1_1) {
1175 // Get the needed subgroup limits
1176 auto subgroup_prop = lvl_init_struct<VkPhysicalDeviceSubgroupProperties>();
1177 auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(&subgroup_prop);
1178 instance_dispatch_table.GetPhysicalDeviceProperties2(gpu, &prop2);
1179
1180 state_tracker->phys_dev_ext_props.subgroup_props = subgroup_prop;
1181 }
1182
1183 // Store queue family data
1184 if (pCreateInfo->pQueueCreateInfos != nullptr) {
1185 for (uint32_t i = 0; i < pCreateInfo->queueCreateInfoCount; ++i) {
1186 state_tracker->queue_family_index_map.insert(
1187 std::make_pair(pCreateInfo->pQueueCreateInfos[i].queueFamilyIndex, pCreateInfo->pQueueCreateInfos[i].queueCount));
1188 }
1189 }
1190}
1191
1192void ValidationStateTracker::PreCallRecordDestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) {
1193 if (!device) return;
1194
1195 pipelineMap.clear();
1196 renderPassMap.clear();
1197
1198 // Reset all command buffers before destroying them, to unlink object_bindings.
1199 for (auto &commandBuffer : commandBufferMap) {
1200 ResetCommandBufferState(commandBuffer.first);
1201 }
1202 commandBufferMap.clear();
1203
1204 // This will also delete all sets in the pool & remove them from setMap
1205 DeleteDescriptorSetPools();
1206 // All sets should be removed
1207 assert(setMap.empty());
1208 descriptorSetLayoutMap.clear();
1209 imageViewMap.clear();
1210 imageMap.clear();
1211 bufferViewMap.clear();
1212 bufferMap.clear();
1213 // Queues persist until device is destroyed
1214 queueMap.clear();
1215}
1216
1217// Loop through bound objects and increment their in_use counts.
1218void ValidationStateTracker::IncrementBoundObjects(CMD_BUFFER_STATE const *cb_node) {
1219 for (auto obj : cb_node->object_bindings) {
1220 auto base_obj = GetStateStructPtrFromObject(obj);
1221 if (base_obj) {
1222 base_obj->in_use.fetch_add(1);
1223 }
1224 }
1225}
1226
1227// Track which resources are in-flight by atomically incrementing their "in_use" count
1228void ValidationStateTracker::IncrementResources(CMD_BUFFER_STATE *cb_node) {
1229 cb_node->submitCount++;
1230 cb_node->in_use.fetch_add(1);
1231
1232 // First Increment for all "generic" objects bound to cmd buffer, followed by special-case objects below
1233 IncrementBoundObjects(cb_node);
1234 // TODO : We should be able to remove the NULL look-up checks from the code below as long as
1235 // all the corresponding cases are verified to cause CB_INVALID state and the CB_INVALID state
1236 // should then be flagged prior to calling this function
1237 for (auto event : cb_node->writeEventsBeforeWait) {
1238 auto event_state = GetEventState(event);
1239 if (event_state) event_state->write_in_use++;
1240 }
1241}
1242
1243// Decrement in-use count for objects bound to command buffer
1244void ValidationStateTracker::DecrementBoundResources(CMD_BUFFER_STATE const *cb_node) {
1245 BASE_NODE *base_obj = nullptr;
1246 for (auto obj : cb_node->object_bindings) {
1247 base_obj = GetStateStructPtrFromObject(obj);
1248 if (base_obj) {
1249 base_obj->in_use.fetch_sub(1);
1250 }
1251 }
1252}
1253
1254void ValidationStateTracker::RetireWorkOnQueue(QUEUE_STATE *pQueue, uint64_t seq, bool switch_finished_queries) {
1255 std::unordered_map<VkQueue, uint64_t> otherQueueSeqs;
1256
1257 // Roll this queue forward, one submission at a time.
1258 while (pQueue->seq < seq) {
1259 auto &submission = pQueue->submissions.front();
1260
1261 for (auto &wait : submission.waitSemaphores) {
1262 auto pSemaphore = GetSemaphoreState(wait.semaphore);
1263 if (pSemaphore) {
1264 pSemaphore->in_use.fetch_sub(1);
1265 }
1266 auto &lastSeq = otherQueueSeqs[wait.queue];
1267 lastSeq = std::max(lastSeq, wait.seq);
1268 }
1269
1270 for (auto &semaphore : submission.signalSemaphores) {
1271 auto pSemaphore = GetSemaphoreState(semaphore);
1272 if (pSemaphore) {
1273 pSemaphore->in_use.fetch_sub(1);
1274 }
1275 }
1276
1277 for (auto &semaphore : submission.externalSemaphores) {
1278 auto pSemaphore = GetSemaphoreState(semaphore);
1279 if (pSemaphore) {
1280 pSemaphore->in_use.fetch_sub(1);
1281 }
1282 }
1283
1284 for (auto cb : submission.cbs) {
1285 auto cb_node = GetCBState(cb);
1286 if (!cb_node) {
1287 continue;
1288 }
1289 // First perform decrement on general case bound objects
1290 DecrementBoundResources(cb_node);
1291 for (auto event : cb_node->writeEventsBeforeWait) {
1292 auto eventNode = eventMap.find(event);
1293 if (eventNode != eventMap.end()) {
1294 eventNode->second.write_in_use--;
1295 }
1296 }
1297 for (auto queryStatePair : cb_node->queryToStateMap) {
1298 const QueryState newState =
1299 ((queryStatePair.second == QUERYSTATE_ENDED && switch_finished_queries) ? QUERYSTATE_AVAILABLE
1300 : queryStatePair.second);
1301 pQueue->queryToStateMap[queryStatePair.first] = newState;
1302 queryToStateMap[queryStatePair.first] = newState;
1303 }
1304 for (auto eventStagePair : cb_node->eventToStageMap) {
1305 eventMap[eventStagePair.first].stageMask = eventStagePair.second;
1306 }
1307
1308 cb_node->in_use.fetch_sub(1);
1309 }
1310
1311 auto pFence = GetFenceState(submission.fence);
1312 if (pFence && pFence->scope == kSyncScopeInternal) {
1313 pFence->state = FENCE_RETIRED;
1314 }
1315
1316 pQueue->submissions.pop_front();
1317 pQueue->seq++;
1318 }
1319
1320 // Roll other queues forward to the highest seq we saw a wait for
1321 for (auto qs : otherQueueSeqs) {
1322 RetireWorkOnQueue(GetQueueState(qs.first), qs.second, switch_finished_queries);
1323 }
1324}
1325
1326// Submit a fence to a queue, delimiting previous fences and previous untracked
1327// work by it.
1328static void SubmitFence(QUEUE_STATE *pQueue, FENCE_STATE *pFence, uint64_t submitCount) {
1329 pFence->state = FENCE_INFLIGHT;
1330 pFence->signaler.first = pQueue->queue;
1331 pFence->signaler.second = pQueue->seq + pQueue->submissions.size() + submitCount;
1332}
1333
1334void ValidationStateTracker::PostCallRecordQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits,
1335 VkFence fence, VkResult result) {
1336 uint64_t early_retire_seq = 0;
1337 auto pQueue = GetQueueState(queue);
1338 auto pFence = GetFenceState(fence);
1339
1340 if (pFence) {
1341 if (pFence->scope == kSyncScopeInternal) {
1342 // Mark fence in use
1343 SubmitFence(pQueue, pFence, std::max(1u, submitCount));
1344 if (!submitCount) {
1345 // If no submissions, but just dropping a fence on the end of the queue,
1346 // record an empty submission with just the fence, so we can determine
1347 // its completion.
1348 pQueue->submissions.emplace_back(std::vector<VkCommandBuffer>(), std::vector<SEMAPHORE_WAIT>(),
1349 std::vector<VkSemaphore>(), std::vector<VkSemaphore>(), fence);
1350 }
1351 } else {
1352 // Retire work up until this fence early, we will not see the wait that corresponds to this signal
1353 early_retire_seq = pQueue->seq + pQueue->submissions.size();
1354 }
1355 }
1356
1357 // Now process each individual submit
1358 for (uint32_t submit_idx = 0; submit_idx < submitCount; submit_idx++) {
1359 std::vector<VkCommandBuffer> cbs;
1360 const VkSubmitInfo *submit = &pSubmits[submit_idx];
1361 vector<SEMAPHORE_WAIT> semaphore_waits;
1362 vector<VkSemaphore> semaphore_signals;
1363 vector<VkSemaphore> semaphore_externals;
1364 for (uint32_t i = 0; i < submit->waitSemaphoreCount; ++i) {
1365 VkSemaphore semaphore = submit->pWaitSemaphores[i];
1366 auto pSemaphore = GetSemaphoreState(semaphore);
1367 if (pSemaphore) {
1368 if (pSemaphore->scope == kSyncScopeInternal) {
1369 if (pSemaphore->signaler.first != VK_NULL_HANDLE) {
1370 semaphore_waits.push_back({semaphore, pSemaphore->signaler.first, pSemaphore->signaler.second});
1371 pSemaphore->in_use.fetch_add(1);
1372 }
1373 pSemaphore->signaler.first = VK_NULL_HANDLE;
1374 pSemaphore->signaled = false;
1375 } else {
1376 semaphore_externals.push_back(semaphore);
1377 pSemaphore->in_use.fetch_add(1);
1378 if (pSemaphore->scope == kSyncScopeExternalTemporary) {
1379 pSemaphore->scope = kSyncScopeInternal;
1380 }
1381 }
1382 }
1383 }
1384 for (uint32_t i = 0; i < submit->signalSemaphoreCount; ++i) {
1385 VkSemaphore semaphore = submit->pSignalSemaphores[i];
1386 auto pSemaphore = GetSemaphoreState(semaphore);
1387 if (pSemaphore) {
1388 if (pSemaphore->scope == kSyncScopeInternal) {
1389 pSemaphore->signaler.first = queue;
1390 pSemaphore->signaler.second = pQueue->seq + pQueue->submissions.size() + 1;
1391 pSemaphore->signaled = true;
1392 pSemaphore->in_use.fetch_add(1);
1393 semaphore_signals.push_back(semaphore);
1394 } else {
1395 // Retire work up until this submit early, we will not see the wait that corresponds to this signal
1396 early_retire_seq = std::max(early_retire_seq, pQueue->seq + pQueue->submissions.size() + 1);
1397 }
1398 }
1399 }
1400 for (uint32_t i = 0; i < submit->commandBufferCount; i++) {
1401 auto cb_node = GetCBState(submit->pCommandBuffers[i]);
1402 if (cb_node) {
1403 cbs.push_back(submit->pCommandBuffers[i]);
1404 for (auto secondaryCmdBuffer : cb_node->linkedCommandBuffers) {
1405 cbs.push_back(secondaryCmdBuffer->commandBuffer);
1406 IncrementResources(secondaryCmdBuffer);
1407 }
1408 IncrementResources(cb_node);
1409 }
1410 }
1411 pQueue->submissions.emplace_back(cbs, semaphore_waits, semaphore_signals, semaphore_externals,
1412 submit_idx == submitCount - 1 ? fence : (VkFence)VK_NULL_HANDLE);
1413 }
1414
1415 if (early_retire_seq) {
1416 RetireWorkOnQueue(pQueue, early_retire_seq, true);
1417 }
1418}
1419
1420void ValidationStateTracker::PostCallRecordAllocateMemory(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo,
1421 const VkAllocationCallbacks *pAllocator, VkDeviceMemory *pMemory,
1422 VkResult result) {
1423 if (VK_SUCCESS == result) {
1424 AddMemObjInfo(device, *pMemory, pAllocateInfo);
1425 }
1426 return;
1427}
1428
1429void ValidationStateTracker::PreCallRecordFreeMemory(VkDevice device, VkDeviceMemory mem, const VkAllocationCallbacks *pAllocator) {
1430 if (!mem) return;
1431 DEVICE_MEMORY_STATE *mem_info = GetDevMemState(mem);
1432 const VulkanTypedHandle obj_struct(mem, kVulkanObjectTypeDeviceMemory);
1433
1434 // Clear mem binding for any bound objects
1435 for (const auto &obj : mem_info->obj_bindings) {
1436 BINDABLE *bindable_state = nullptr;
1437 switch (obj.type) {
1438 case kVulkanObjectTypeImage:
1439 bindable_state = GetImageState(obj.Cast<VkImage>());
1440 break;
1441 case kVulkanObjectTypeBuffer:
1442 bindable_state = GetBufferState(obj.Cast<VkBuffer>());
1443 break;
1444 case kVulkanObjectTypeAccelerationStructureNV:
1445 bindable_state = GetAccelerationStructureState(obj.Cast<VkAccelerationStructureNV>());
1446 break;
1447
1448 default:
1449 // Should only have acceleration structure, buffer, or image objects bound to memory
1450 assert(0);
1451 }
1452
1453 if (bindable_state) {
1454 bindable_state->binding.mem = MEMORY_UNBOUND;
1455 bindable_state->UpdateBoundMemorySet();
1456 }
1457 }
1458 // Any bound cmd buffers are now invalid
1459 InvalidateCommandBuffers(mem_info->cb_bindings, obj_struct);
1460 RemoveAliasingImages(mem_info->bound_images);
1461 memObjMap.erase(mem);
1462}
1463
1464void ValidationStateTracker::PostCallRecordQueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo *pBindInfo,
1465 VkFence fence, VkResult result) {
1466 if (result != VK_SUCCESS) return;
1467 uint64_t early_retire_seq = 0;
1468 auto pFence = GetFenceState(fence);
1469 auto pQueue = GetQueueState(queue);
1470
1471 if (pFence) {
1472 if (pFence->scope == kSyncScopeInternal) {
1473 SubmitFence(pQueue, pFence, std::max(1u, bindInfoCount));
1474 if (!bindInfoCount) {
1475 // No work to do, just dropping a fence in the queue by itself.
1476 pQueue->submissions.emplace_back(std::vector<VkCommandBuffer>(), std::vector<SEMAPHORE_WAIT>(),
1477 std::vector<VkSemaphore>(), std::vector<VkSemaphore>(), fence);
1478 }
1479 } else {
1480 // Retire work up until this fence early, we will not see the wait that corresponds to this signal
1481 early_retire_seq = pQueue->seq + pQueue->submissions.size();
1482 }
1483 }
1484
1485 for (uint32_t bindIdx = 0; bindIdx < bindInfoCount; ++bindIdx) {
1486 const VkBindSparseInfo &bindInfo = pBindInfo[bindIdx];
1487 // Track objects tied to memory
1488 for (uint32_t j = 0; j < bindInfo.bufferBindCount; j++) {
1489 for (uint32_t k = 0; k < bindInfo.pBufferBinds[j].bindCount; k++) {
1490 auto sparse_binding = bindInfo.pBufferBinds[j].pBinds[k];
1491 SetSparseMemBinding({sparse_binding.memory, sparse_binding.memoryOffset, sparse_binding.size},
1492 VulkanTypedHandle(bindInfo.pBufferBinds[j].buffer, kVulkanObjectTypeBuffer));
1493 }
1494 }
1495 for (uint32_t j = 0; j < bindInfo.imageOpaqueBindCount; j++) {
1496 for (uint32_t k = 0; k < bindInfo.pImageOpaqueBinds[j].bindCount; k++) {
1497 auto sparse_binding = bindInfo.pImageOpaqueBinds[j].pBinds[k];
1498 SetSparseMemBinding({sparse_binding.memory, sparse_binding.memoryOffset, sparse_binding.size},
1499 VulkanTypedHandle(bindInfo.pImageOpaqueBinds[j].image, kVulkanObjectTypeImage));
1500 }
1501 }
1502 for (uint32_t j = 0; j < bindInfo.imageBindCount; j++) {
1503 for (uint32_t k = 0; k < bindInfo.pImageBinds[j].bindCount; k++) {
1504 auto sparse_binding = bindInfo.pImageBinds[j].pBinds[k];
1505 // TODO: This size is broken for non-opaque bindings, need to update to comprehend full sparse binding data
1506 VkDeviceSize size = sparse_binding.extent.depth * sparse_binding.extent.height * sparse_binding.extent.width * 4;
1507 SetSparseMemBinding({sparse_binding.memory, sparse_binding.memoryOffset, size},
1508 VulkanTypedHandle(bindInfo.pImageBinds[j].image, kVulkanObjectTypeImage));
1509 }
1510 }
1511
1512 std::vector<SEMAPHORE_WAIT> semaphore_waits;
1513 std::vector<VkSemaphore> semaphore_signals;
1514 std::vector<VkSemaphore> semaphore_externals;
1515 for (uint32_t i = 0; i < bindInfo.waitSemaphoreCount; ++i) {
1516 VkSemaphore semaphore = bindInfo.pWaitSemaphores[i];
1517 auto pSemaphore = GetSemaphoreState(semaphore);
1518 if (pSemaphore) {
1519 if (pSemaphore->scope == kSyncScopeInternal) {
1520 if (pSemaphore->signaler.first != VK_NULL_HANDLE) {
1521 semaphore_waits.push_back({semaphore, pSemaphore->signaler.first, pSemaphore->signaler.second});
1522 pSemaphore->in_use.fetch_add(1);
1523 }
1524 pSemaphore->signaler.first = VK_NULL_HANDLE;
1525 pSemaphore->signaled = false;
1526 } else {
1527 semaphore_externals.push_back(semaphore);
1528 pSemaphore->in_use.fetch_add(1);
1529 if (pSemaphore->scope == kSyncScopeExternalTemporary) {
1530 pSemaphore->scope = kSyncScopeInternal;
1531 }
1532 }
1533 }
1534 }
1535 for (uint32_t i = 0; i < bindInfo.signalSemaphoreCount; ++i) {
1536 VkSemaphore semaphore = bindInfo.pSignalSemaphores[i];
1537 auto pSemaphore = GetSemaphoreState(semaphore);
1538 if (pSemaphore) {
1539 if (pSemaphore->scope == kSyncScopeInternal) {
1540 pSemaphore->signaler.first = queue;
1541 pSemaphore->signaler.second = pQueue->seq + pQueue->submissions.size() + 1;
1542 pSemaphore->signaled = true;
1543 pSemaphore->in_use.fetch_add(1);
1544 semaphore_signals.push_back(semaphore);
1545 } else {
1546 // Retire work up until this submit early, we will not see the wait that corresponds to this signal
1547 early_retire_seq = std::max(early_retire_seq, pQueue->seq + pQueue->submissions.size() + 1);
1548 }
1549 }
1550 }
1551
1552 pQueue->submissions.emplace_back(std::vector<VkCommandBuffer>(), semaphore_waits, semaphore_signals, semaphore_externals,
1553 bindIdx == bindInfoCount - 1 ? fence : (VkFence)VK_NULL_HANDLE);
1554 }
1555
1556 if (early_retire_seq) {
1557 RetireWorkOnQueue(pQueue, early_retire_seq, true);
1558 }
1559}
1560
1561void ValidationStateTracker::PostCallRecordCreateSemaphore(VkDevice device, const VkSemaphoreCreateInfo *pCreateInfo,
1562 const VkAllocationCallbacks *pAllocator, VkSemaphore *pSemaphore,
1563 VkResult result) {
1564 if (VK_SUCCESS != result) return;
1565 std::unique_ptr<SEMAPHORE_STATE> semaphore_state(new SEMAPHORE_STATE{});
1566 semaphore_state->signaler.first = VK_NULL_HANDLE;
1567 semaphore_state->signaler.second = 0;
1568 semaphore_state->signaled = false;
1569 semaphore_state->scope = kSyncScopeInternal;
1570 semaphoreMap[*pSemaphore] = std::move(semaphore_state);
1571}
1572
1573void ValidationStateTracker::RecordImportSemaphoreState(VkSemaphore semaphore, VkExternalSemaphoreHandleTypeFlagBitsKHR handle_type,
1574 VkSemaphoreImportFlagsKHR flags) {
1575 SEMAPHORE_STATE *sema_node = GetSemaphoreState(semaphore);
1576 if (sema_node && sema_node->scope != kSyncScopeExternalPermanent) {
1577 if ((handle_type == VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR || flags & VK_SEMAPHORE_IMPORT_TEMPORARY_BIT_KHR) &&
1578 sema_node->scope == kSyncScopeInternal) {
1579 sema_node->scope = kSyncScopeExternalTemporary;
1580 } else {
1581 sema_node->scope = kSyncScopeExternalPermanent;
1582 }
1583 }
1584}
1585
1586void ValidationStateTracker::RecordMappedMemory(VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size, void **ppData) {
1587 auto mem_info = GetDevMemState(mem);
1588 if (mem_info) {
1589 mem_info->mapped_range.offset = offset;
1590 mem_info->mapped_range.size = size;
1591 mem_info->p_driver_data = *ppData;
1592 }
1593}
1594
1595void ValidationStateTracker::RetireFence(VkFence fence) {
1596 auto pFence = GetFenceState(fence);
1597 if (pFence && pFence->scope == kSyncScopeInternal) {
1598 if (pFence->signaler.first != VK_NULL_HANDLE) {
1599 // Fence signaller is a queue -- use this as proof that prior operations on that queue have completed.
1600 RetireWorkOnQueue(GetQueueState(pFence->signaler.first), pFence->signaler.second, true);
1601 } else {
1602 // Fence signaller is the WSI. We're not tracking what the WSI op actually /was/ in CV yet, but we need to mark
1603 // the fence as retired.
1604 pFence->state = FENCE_RETIRED;
1605 }
1606 }
1607}
1608
1609void ValidationStateTracker::PostCallRecordWaitForFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences,
1610 VkBool32 waitAll, uint64_t timeout, VkResult result) {
1611 if (VK_SUCCESS != result) return;
1612
1613 // When we know that all fences are complete we can clean/remove their CBs
1614 if ((VK_TRUE == waitAll) || (1 == fenceCount)) {
1615 for (uint32_t i = 0; i < fenceCount; i++) {
1616 RetireFence(pFences[i]);
1617 }
1618 }
1619 // NOTE : Alternate case not handled here is when some fences have completed. In
1620 // this case for app to guarantee which fences completed it will have to call
1621 // vkGetFenceStatus() at which point we'll clean/remove their CBs if complete.
1622}
1623
1624void ValidationStateTracker::PostCallRecordGetFenceStatus(VkDevice device, VkFence fence, VkResult result) {
1625 if (VK_SUCCESS != result) return;
1626 RetireFence(fence);
1627}
1628
1629void ValidationStateTracker::RecordGetDeviceQueueState(uint32_t queue_family_index, VkQueue queue) {
1630 // Add queue to tracking set only if it is new
1631 auto queue_is_new = queues.emplace(queue);
1632 if (queue_is_new.second == true) {
1633 QUEUE_STATE *queue_state = &queueMap[queue];
1634 queue_state->queue = queue;
1635 queue_state->queueFamilyIndex = queue_family_index;
1636 queue_state->seq = 0;
1637 }
1638}
1639
1640void ValidationStateTracker::PostCallRecordGetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex,
1641 VkQueue *pQueue) {
1642 RecordGetDeviceQueueState(queueFamilyIndex, *pQueue);
1643}
1644
1645void ValidationStateTracker::PostCallRecordGetDeviceQueue2(VkDevice device, const VkDeviceQueueInfo2 *pQueueInfo, VkQueue *pQueue) {
1646 RecordGetDeviceQueueState(pQueueInfo->queueFamilyIndex, *pQueue);
1647}
1648
1649void ValidationStateTracker::PostCallRecordQueueWaitIdle(VkQueue queue, VkResult result) {
1650 if (VK_SUCCESS != result) return;
1651 QUEUE_STATE *queue_state = GetQueueState(queue);
1652 RetireWorkOnQueue(queue_state, queue_state->seq + queue_state->submissions.size(), true);
1653}
1654
1655void ValidationStateTracker::PostCallRecordDeviceWaitIdle(VkDevice device, VkResult result) {
1656 if (VK_SUCCESS != result) return;
1657 for (auto &queue : queueMap) {
1658 RetireWorkOnQueue(&queue.second, queue.second.seq + queue.second.submissions.size(), true);
1659 }
1660}
1661
1662void ValidationStateTracker::PreCallRecordDestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks *pAllocator) {
1663 if (!fence) return;
1664 fenceMap.erase(fence);
1665}
1666
1667void ValidationStateTracker::PreCallRecordDestroySemaphore(VkDevice device, VkSemaphore semaphore,
1668 const VkAllocationCallbacks *pAllocator) {
1669 if (!semaphore) return;
1670 semaphoreMap.erase(semaphore);
1671}
1672
1673void ValidationStateTracker::PreCallRecordDestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks *pAllocator) {
1674 if (!event) return;
1675 EVENT_STATE *event_state = GetEventState(event);
1676 const VulkanTypedHandle obj_struct(event, kVulkanObjectTypeEvent);
1677 InvalidateCommandBuffers(event_state->cb_bindings, obj_struct);
1678 eventMap.erase(event);
1679}
1680
1681void ValidationStateTracker::PreCallRecordDestroyQueryPool(VkDevice device, VkQueryPool queryPool,
1682 const VkAllocationCallbacks *pAllocator) {
1683 if (!queryPool) return;
1684 QUERY_POOL_STATE *qp_state = GetQueryPoolState(queryPool);
1685 const VulkanTypedHandle obj_struct(queryPool, kVulkanObjectTypeQueryPool);
1686 InvalidateCommandBuffers(qp_state->cb_bindings, obj_struct);
1687 queryPoolMap.erase(queryPool);
1688}
1689
1690// Object with given handle is being bound to memory w/ given mem_info struct.
1691// Track the newly bound memory range with given memoryOffset
1692// Also scan any previous ranges, track aliased ranges with new range, and flag an error if a linear
1693// and non-linear range incorrectly overlap.
1694// Return true if an error is flagged and the user callback returns "true", otherwise false
1695// is_image indicates an image object, otherwise handle is for a buffer
1696// is_linear indicates a buffer or linear image
1697void ValidationStateTracker::InsertMemoryRange(const VulkanTypedHandle &typed_handle, DEVICE_MEMORY_STATE *mem_info,
1698 VkDeviceSize memoryOffset, VkMemoryRequirements memRequirements, bool is_linear) {
1699 if (typed_handle.type == kVulkanObjectTypeImage) {
1700 mem_info->bound_images.insert(typed_handle.Cast<VkImage>());
1701 } else if (typed_handle.type == kVulkanObjectTypeBuffer) {
1702 mem_info->bound_buffers.insert(typed_handle.handle);
1703 } else if (typed_handle.type == kVulkanObjectTypeAccelerationStructureNV) {
1704 mem_info->bound_acceleration_structures.insert(typed_handle.handle);
1705 } else {
1706 // Unsupported object type
1707 assert(false);
1708 }
1709}
1710
1711void ValidationStateTracker::InsertImageMemoryRange(VkImage image, DEVICE_MEMORY_STATE *mem_info, VkDeviceSize mem_offset,
1712 VkMemoryRequirements mem_reqs, bool is_linear) {
1713 InsertMemoryRange(VulkanTypedHandle(image, kVulkanObjectTypeImage), mem_info, mem_offset, mem_reqs, is_linear);
1714}
1715
1716void ValidationStateTracker::InsertBufferMemoryRange(VkBuffer buffer, DEVICE_MEMORY_STATE *mem_info, VkDeviceSize mem_offset,
1717 const VkMemoryRequirements &mem_reqs) {
1718 InsertMemoryRange(VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer), mem_info, mem_offset, mem_reqs, true);
1719}
1720
1721void ValidationStateTracker::InsertAccelerationStructureMemoryRange(VkAccelerationStructureNV as, DEVICE_MEMORY_STATE *mem_info,
1722 VkDeviceSize mem_offset, const VkMemoryRequirements &mem_reqs) {
1723 InsertMemoryRange(VulkanTypedHandle(as, kVulkanObjectTypeAccelerationStructureNV), mem_info, mem_offset, mem_reqs, true);
1724}
1725
1726// This function will remove the handle-to-index mapping from the appropriate map.
1727static void RemoveMemoryRange(const VulkanTypedHandle &typed_handle, DEVICE_MEMORY_STATE *mem_info) {
1728 if (typed_handle.type == kVulkanObjectTypeImage) {
1729 mem_info->bound_images.erase(typed_handle.Cast<VkImage>());
1730 } else if (typed_handle.type == kVulkanObjectTypeBuffer) {
1731 mem_info->bound_buffers.erase(typed_handle.handle);
1732 } else if (typed_handle.type == kVulkanObjectTypeAccelerationStructureNV) {
1733 mem_info->bound_acceleration_structures.erase(typed_handle.handle);
1734 } else {
1735 // Unsupported object type
1736 assert(false);
1737 }
1738}
1739
1740void ValidationStateTracker::RemoveBufferMemoryRange(VkBuffer buffer, DEVICE_MEMORY_STATE *mem_info) {
1741 RemoveMemoryRange(VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer), mem_info);
1742}
1743
1744void ValidationStateTracker::RemoveImageMemoryRange(VkImage image, DEVICE_MEMORY_STATE *mem_info) {
1745 RemoveMemoryRange(VulkanTypedHandle(image, kVulkanObjectTypeImage), mem_info);
1746}
1747
1748void ValidationStateTracker::RemoveAccelerationStructureMemoryRange(VkAccelerationStructureNV as, DEVICE_MEMORY_STATE *mem_info) {
1749 RemoveMemoryRange(VulkanTypedHandle(as, kVulkanObjectTypeAccelerationStructureNV), mem_info);
1750}
1751
1752void ValidationStateTracker::UpdateBindBufferMemoryState(VkBuffer buffer, VkDeviceMemory mem, VkDeviceSize memoryOffset) {
1753 BUFFER_STATE *buffer_state = GetBufferState(buffer);
1754 if (buffer_state) {
1755 // Track bound memory range information
1756 auto mem_info = GetDevMemState(mem);
1757 if (mem_info) {
1758 InsertBufferMemoryRange(buffer, mem_info, memoryOffset, buffer_state->requirements);
1759 }
1760 // Track objects tied to memory
1761 SetMemBinding(mem, buffer_state, memoryOffset, VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer));
1762 }
1763}
1764
1765void ValidationStateTracker::PostCallRecordBindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory mem,
1766 VkDeviceSize memoryOffset, VkResult result) {
1767 if (VK_SUCCESS != result) return;
1768 UpdateBindBufferMemoryState(buffer, mem, memoryOffset);
1769}
1770
1771void ValidationStateTracker::PostCallRecordBindBufferMemory2(VkDevice device, uint32_t bindInfoCount,
1772 const VkBindBufferMemoryInfoKHR *pBindInfos, VkResult result) {
1773 for (uint32_t i = 0; i < bindInfoCount; i++) {
1774 UpdateBindBufferMemoryState(pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset);
1775 }
1776}
1777
1778void ValidationStateTracker::PostCallRecordBindBufferMemory2KHR(VkDevice device, uint32_t bindInfoCount,
1779 const VkBindBufferMemoryInfoKHR *pBindInfos, VkResult result) {
1780 for (uint32_t i = 0; i < bindInfoCount; i++) {
1781 UpdateBindBufferMemoryState(pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset);
1782 }
1783}
1784
1785void ValidationStateTracker::RecordGetBufferMemoryRequirementsState(VkBuffer buffer, VkMemoryRequirements *pMemoryRequirements) {
1786 BUFFER_STATE *buffer_state = GetBufferState(buffer);
1787 if (buffer_state) {
1788 buffer_state->requirements = *pMemoryRequirements;
1789 buffer_state->memory_requirements_checked = true;
1790 }
1791}
1792
1793void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements(VkDevice device, VkBuffer buffer,
1794 VkMemoryRequirements *pMemoryRequirements) {
1795 RecordGetBufferMemoryRequirementsState(buffer, pMemoryRequirements);
1796}
1797
1798void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements2(VkDevice device,
1799 const VkBufferMemoryRequirementsInfo2KHR *pInfo,
1800 VkMemoryRequirements2KHR *pMemoryRequirements) {
1801 RecordGetBufferMemoryRequirementsState(pInfo->buffer, &pMemoryRequirements->memoryRequirements);
1802}
1803
1804void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements2KHR(VkDevice device,
1805 const VkBufferMemoryRequirementsInfo2KHR *pInfo,
1806 VkMemoryRequirements2KHR *pMemoryRequirements) {
1807 RecordGetBufferMemoryRequirementsState(pInfo->buffer, &pMemoryRequirements->memoryRequirements);
1808}
1809
1810void ValidationStateTracker::RecordGetImageMemoryRequiementsState(VkImage image, VkMemoryRequirements *pMemoryRequirements) {
1811 IMAGE_STATE *image_state = GetImageState(image);
1812 if (image_state) {
1813 image_state->requirements = *pMemoryRequirements;
1814 image_state->memory_requirements_checked = true;
1815 }
1816}
1817
1818void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements(VkDevice device, VkImage image,
1819 VkMemoryRequirements *pMemoryRequirements) {
1820 RecordGetImageMemoryRequiementsState(image, pMemoryRequirements);
1821}
1822
1823void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements2(VkDevice device, const VkImageMemoryRequirementsInfo2 *pInfo,
1824 VkMemoryRequirements2 *pMemoryRequirements) {
1825 RecordGetImageMemoryRequiementsState(pInfo->image, &pMemoryRequirements->memoryRequirements);
1826}
1827
1828void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements2KHR(VkDevice device,
1829 const VkImageMemoryRequirementsInfo2 *pInfo,
1830 VkMemoryRequirements2 *pMemoryRequirements) {
1831 RecordGetImageMemoryRequiementsState(pInfo->image, &pMemoryRequirements->memoryRequirements);
1832}
1833
1834static void RecordGetImageSparseMemoryRequirementsState(IMAGE_STATE *image_state,
1835 VkSparseImageMemoryRequirements *sparse_image_memory_requirements) {
1836 image_state->sparse_requirements.emplace_back(*sparse_image_memory_requirements);
1837 if (sparse_image_memory_requirements->formatProperties.aspectMask & VK_IMAGE_ASPECT_METADATA_BIT) {
1838 image_state->sparse_metadata_required = true;
1839 }
1840}
1841
1842void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements(
1843 VkDevice device, VkImage image, uint32_t *pSparseMemoryRequirementCount,
1844 VkSparseImageMemoryRequirements *pSparseMemoryRequirements) {
1845 auto image_state = GetImageState(image);
1846 image_state->get_sparse_reqs_called = true;
1847 if (!pSparseMemoryRequirements) return;
1848 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
1849 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i]);
1850 }
1851}
1852
1853void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements2(
1854 VkDevice device, const VkImageSparseMemoryRequirementsInfo2KHR *pInfo, uint32_t *pSparseMemoryRequirementCount,
1855 VkSparseImageMemoryRequirements2KHR *pSparseMemoryRequirements) {
1856 auto image_state = GetImageState(pInfo->image);
1857 image_state->get_sparse_reqs_called = true;
1858 if (!pSparseMemoryRequirements) return;
1859 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
1860 assert(!pSparseMemoryRequirements[i].pNext); // TODO: If an extension is ever added here we need to handle it
1861 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i].memoryRequirements);
1862 }
1863}
1864
1865void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements2KHR(
1866 VkDevice device, const VkImageSparseMemoryRequirementsInfo2KHR *pInfo, uint32_t *pSparseMemoryRequirementCount,
1867 VkSparseImageMemoryRequirements2KHR *pSparseMemoryRequirements) {
1868 auto image_state = GetImageState(pInfo->image);
1869 image_state->get_sparse_reqs_called = true;
1870 if (!pSparseMemoryRequirements) return;
1871 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
1872 assert(!pSparseMemoryRequirements[i].pNext); // TODO: If an extension is ever added here we need to handle it
1873 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i].memoryRequirements);
1874 }
1875}
1876
1877void ValidationStateTracker::PreCallRecordDestroyShaderModule(VkDevice device, VkShaderModule shaderModule,
1878 const VkAllocationCallbacks *pAllocator) {
1879 if (!shaderModule) return;
1880 shaderModuleMap.erase(shaderModule);
1881}
1882
1883void ValidationStateTracker::PreCallRecordDestroyPipeline(VkDevice device, VkPipeline pipeline,
1884 const VkAllocationCallbacks *pAllocator) {
1885 if (!pipeline) return;
1886 PIPELINE_STATE *pipeline_state = GetPipelineState(pipeline);
1887 const VulkanTypedHandle obj_struct(pipeline, kVulkanObjectTypePipeline);
1888 // Any bound cmd buffers are now invalid
1889 InvalidateCommandBuffers(pipeline_state->cb_bindings, obj_struct);
1890 pipelineMap.erase(pipeline);
1891}
1892
1893void ValidationStateTracker::PreCallRecordDestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout,
1894 const VkAllocationCallbacks *pAllocator) {
1895 if (!pipelineLayout) return;
1896 pipelineLayoutMap.erase(pipelineLayout);
1897}
1898
1899void ValidationStateTracker::PreCallRecordDestroySampler(VkDevice device, VkSampler sampler,
1900 const VkAllocationCallbacks *pAllocator) {
1901 if (!sampler) return;
1902 SAMPLER_STATE *sampler_state = GetSamplerState(sampler);
1903 const VulkanTypedHandle obj_struct(sampler, kVulkanObjectTypeSampler);
1904 // Any bound cmd buffers are now invalid
1905 if (sampler_state) {
1906 InvalidateCommandBuffers(sampler_state->cb_bindings, obj_struct);
1907 }
1908 samplerMap.erase(sampler);
1909}
1910
1911void ValidationStateTracker::PreCallRecordDestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout,
1912 const VkAllocationCallbacks *pAllocator) {
1913 if (!descriptorSetLayout) return;
1914 auto layout_it = descriptorSetLayoutMap.find(descriptorSetLayout);
1915 if (layout_it != descriptorSetLayoutMap.end()) {
1916 layout_it->second.get()->MarkDestroyed();
1917 descriptorSetLayoutMap.erase(layout_it);
1918 }
1919}
1920
1921void ValidationStateTracker::PreCallRecordDestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
1922 const VkAllocationCallbacks *pAllocator) {
1923 if (!descriptorPool) return;
1924 DESCRIPTOR_POOL_STATE *desc_pool_state = GetDescriptorPoolState(descriptorPool);
1925 const VulkanTypedHandle obj_struct(descriptorPool, kVulkanObjectTypeDescriptorPool);
1926 if (desc_pool_state) {
1927 // Any bound cmd buffers are now invalid
1928 InvalidateCommandBuffers(desc_pool_state->cb_bindings, obj_struct);
1929 // Free sets that were in this pool
1930 for (auto ds : desc_pool_state->sets) {
1931 FreeDescriptorSet(ds);
1932 }
1933 descriptorPoolMap.erase(descriptorPool);
1934 }
1935}
1936
1937// Free all command buffers in given list, removing all references/links to them using ResetCommandBufferState
1938void ValidationStateTracker::FreeCommandBufferStates(COMMAND_POOL_STATE *pool_state, const uint32_t command_buffer_count,
1939 const VkCommandBuffer *command_buffers) {
1940 for (uint32_t i = 0; i < command_buffer_count; i++) {
1941 auto cb_state = GetCBState(command_buffers[i]);
1942 // Remove references to command buffer's state and delete
1943 if (cb_state) {
1944 // reset prior to delete, removing various references to it.
1945 // TODO: fix this, it's insane.
1946 ResetCommandBufferState(cb_state->commandBuffer);
1947 // Remove the cb_state's references from COMMAND_POOL_STATEs
1948 pool_state->commandBuffers.erase(command_buffers[i]);
1949 // Remove the cb debug labels
1950 EraseCmdDebugUtilsLabel(report_data, cb_state->commandBuffer);
1951 // Remove CBState from CB map
1952 commandBufferMap.erase(cb_state->commandBuffer);
1953 }
1954 }
1955}
1956
1957void ValidationStateTracker::PreCallRecordFreeCommandBuffers(VkDevice device, VkCommandPool commandPool,
1958 uint32_t commandBufferCount, const VkCommandBuffer *pCommandBuffers) {
1959 auto pPool = GetCommandPoolState(commandPool);
1960 FreeCommandBufferStates(pPool, commandBufferCount, pCommandBuffers);
1961}
1962
1963void ValidationStateTracker::PostCallRecordCreateCommandPool(VkDevice device, const VkCommandPoolCreateInfo *pCreateInfo,
1964 const VkAllocationCallbacks *pAllocator, VkCommandPool *pCommandPool,
1965 VkResult result) {
1966 if (VK_SUCCESS != result) return;
1967 std::unique_ptr<COMMAND_POOL_STATE> cmd_pool_state(new COMMAND_POOL_STATE{});
1968 cmd_pool_state->createFlags = pCreateInfo->flags;
1969 cmd_pool_state->queueFamilyIndex = pCreateInfo->queueFamilyIndex;
1970 commandPoolMap[*pCommandPool] = std::move(cmd_pool_state);
1971}
1972
1973void ValidationStateTracker::PostCallRecordCreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo *pCreateInfo,
1974 const VkAllocationCallbacks *pAllocator, VkQueryPool *pQueryPool,
1975 VkResult result) {
1976 if (VK_SUCCESS != result) return;
1977 std::unique_ptr<QUERY_POOL_STATE> query_pool_state(new QUERY_POOL_STATE{});
1978 query_pool_state->createInfo = *pCreateInfo;
1979 queryPoolMap[*pQueryPool] = std::move(query_pool_state);
1980
1981 QueryObject query_obj{*pQueryPool, 0u};
1982 for (uint32_t i = 0; i < pCreateInfo->queryCount; ++i) {
1983 query_obj.query = i;
1984 queryToStateMap[query_obj] = QUERYSTATE_UNKNOWN;
1985 }
1986}
1987
1988void ValidationStateTracker::PreCallRecordDestroyCommandPool(VkDevice device, VkCommandPool commandPool,
1989 const VkAllocationCallbacks *pAllocator) {
1990 if (!commandPool) return;
1991 COMMAND_POOL_STATE *cp_state = GetCommandPoolState(commandPool);
1992 // Remove cmdpool from cmdpoolmap, after freeing layer data for the command buffers
1993 // "When a pool is destroyed, all command buffers allocated from the pool are freed."
1994 if (cp_state) {
1995 // Create a vector, as FreeCommandBufferStates deletes from cp_state->commandBuffers during iteration.
1996 std::vector<VkCommandBuffer> cb_vec{cp_state->commandBuffers.begin(), cp_state->commandBuffers.end()};
1997 FreeCommandBufferStates(cp_state, static_cast<uint32_t>(cb_vec.size()), cb_vec.data());
1998 commandPoolMap.erase(commandPool);
1999 }
2000}
2001
2002void ValidationStateTracker::PostCallRecordResetCommandPool(VkDevice device, VkCommandPool commandPool,
2003 VkCommandPoolResetFlags flags, VkResult result) {
2004 if (VK_SUCCESS != result) return;
2005 // Reset all of the CBs allocated from this pool
2006 auto command_pool_state = GetCommandPoolState(commandPool);
2007 for (auto cmdBuffer : command_pool_state->commandBuffers) {
2008 ResetCommandBufferState(cmdBuffer);
2009 }
2010}
2011
2012void ValidationStateTracker::PostCallRecordResetFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences,
2013 VkResult result) {
2014 for (uint32_t i = 0; i < fenceCount; ++i) {
2015 auto pFence = GetFenceState(pFences[i]);
2016 if (pFence) {
2017 if (pFence->scope == kSyncScopeInternal) {
2018 pFence->state = FENCE_UNSIGNALED;
2019 } else if (pFence->scope == kSyncScopeExternalTemporary) {
2020 pFence->scope = kSyncScopeInternal;
2021 }
2022 }
2023 }
2024}
2025
2026// For given cb_nodes, invalidate them and track object causing invalidation
2027void ValidationStateTracker::InvalidateCommandBuffers(std::unordered_set<CMD_BUFFER_STATE *> const &cb_nodes,
2028 const VulkanTypedHandle &obj) {
2029 for (auto cb_node : cb_nodes) {
2030 if (cb_node->state == CB_RECORDING) {
2031 cb_node->state = CB_INVALID_INCOMPLETE;
2032 } else if (cb_node->state == CB_RECORDED) {
2033 cb_node->state = CB_INVALID_COMPLETE;
2034 }
2035 cb_node->broken_bindings.push_back(obj);
2036
2037 // if secondary, then propagate the invalidation to the primaries that will call us.
2038 if (cb_node->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
2039 InvalidateCommandBuffers(cb_node->linkedCommandBuffers, obj);
2040 }
2041 }
2042}
2043
2044void ValidationStateTracker::PreCallRecordDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer,
2045 const VkAllocationCallbacks *pAllocator) {
2046 if (!framebuffer) return;
2047 FRAMEBUFFER_STATE *framebuffer_state = GetFramebufferState(framebuffer);
2048 const VulkanTypedHandle obj_struct(framebuffer, kVulkanObjectTypeFramebuffer);
2049 InvalidateCommandBuffers(framebuffer_state->cb_bindings, obj_struct);
2050 frameBufferMap.erase(framebuffer);
2051}
2052
2053void ValidationStateTracker::PreCallRecordDestroyRenderPass(VkDevice device, VkRenderPass renderPass,
2054 const VkAllocationCallbacks *pAllocator) {
2055 if (!renderPass) return;
2056 RENDER_PASS_STATE *rp_state = GetRenderPassState(renderPass);
2057 const VulkanTypedHandle obj_struct(renderPass, kVulkanObjectTypeRenderPass);
2058 InvalidateCommandBuffers(rp_state->cb_bindings, obj_struct);
2059 renderPassMap.erase(renderPass);
2060}
2061
2062void ValidationStateTracker::PostCallRecordCreateFence(VkDevice device, const VkFenceCreateInfo *pCreateInfo,
2063 const VkAllocationCallbacks *pAllocator, VkFence *pFence, VkResult result) {
2064 if (VK_SUCCESS != result) return;
2065 std::unique_ptr<FENCE_STATE> fence_state(new FENCE_STATE{});
2066 fence_state->fence = *pFence;
2067 fence_state->createInfo = *pCreateInfo;
2068 fence_state->state = (pCreateInfo->flags & VK_FENCE_CREATE_SIGNALED_BIT) ? FENCE_RETIRED : FENCE_UNSIGNALED;
2069 fenceMap[*pFence] = std::move(fence_state);
2070}
2071
2072bool ValidationStateTracker::PreCallValidateCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2073 const VkGraphicsPipelineCreateInfo *pCreateInfos,
2074 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
2075 void *cgpl_state_data) {
2076 // Set up the state that CoreChecks, gpu_validation and later StateTracker Record will use.
2077 create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
2078 cgpl_state->pCreateInfos = pCreateInfos; // GPU validation can alter this, so we have to set a default value for the Chassis
2079 cgpl_state->pipe_state.reserve(count);
2080 for (uint32_t i = 0; i < count; i++) {
2081 cgpl_state->pipe_state.push_back(std::unique_ptr<PIPELINE_STATE>(new PIPELINE_STATE));
2082 (cgpl_state->pipe_state)[i]->initGraphicsPipeline(this, &pCreateInfos[i],
2083 GetRenderPassStateSharedPtr(pCreateInfos[i].renderPass));
2084 (cgpl_state->pipe_state)[i]->pipeline_layout = *GetPipelineLayout(pCreateInfos[i].layout);
2085 }
2086 return false;
2087}
2088
2089void ValidationStateTracker::PostCallRecordCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2090 const VkGraphicsPipelineCreateInfo *pCreateInfos,
2091 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
2092 VkResult result, void *cgpl_state_data) {
2093 create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
2094 // This API may create pipelines regardless of the return value
2095 for (uint32_t i = 0; i < count; i++) {
2096 if (pPipelines[i] != VK_NULL_HANDLE) {
2097 (cgpl_state->pipe_state)[i]->pipeline = pPipelines[i];
2098 pipelineMap[pPipelines[i]] = std::move((cgpl_state->pipe_state)[i]);
2099 }
2100 }
2101 cgpl_state->pipe_state.clear();
2102}
2103
2104bool ValidationStateTracker::PreCallValidateCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2105 const VkComputePipelineCreateInfo *pCreateInfos,
2106 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
2107 void *ccpl_state_data) {
2108 auto *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
2109 ccpl_state->pCreateInfos = pCreateInfos; // GPU validation can alter this, so we have to set a default value for the Chassis
2110 ccpl_state->pipe_state.reserve(count);
2111 for (uint32_t i = 0; i < count; i++) {
2112 // Create and initialize internal tracking data structure
2113 ccpl_state->pipe_state.push_back(unique_ptr<PIPELINE_STATE>(new PIPELINE_STATE));
2114 ccpl_state->pipe_state.back()->initComputePipeline(this, &pCreateInfos[i]);
2115 ccpl_state->pipe_state.back()->pipeline_layout = *GetPipelineLayout(pCreateInfos[i].layout);
2116 }
2117 return false;
2118}
2119
2120void ValidationStateTracker::PostCallRecordCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2121 const VkComputePipelineCreateInfo *pCreateInfos,
2122 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
2123 VkResult result, void *ccpl_state_data) {
2124 create_compute_pipeline_api_state *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
2125
2126 // This API may create pipelines regardless of the return value
2127 for (uint32_t i = 0; i < count; i++) {
2128 if (pPipelines[i] != VK_NULL_HANDLE) {
2129 (ccpl_state->pipe_state)[i]->pipeline = pPipelines[i];
2130 pipelineMap[pPipelines[i]] = std::move((ccpl_state->pipe_state)[i]);
2131 }
2132 }
2133 ccpl_state->pipe_state.clear();
2134}
2135
2136bool ValidationStateTracker::PreCallValidateCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache,
2137 uint32_t count,
2138 const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
2139 const VkAllocationCallbacks *pAllocator,
2140 VkPipeline *pPipelines, void *crtpl_state_data) {
2141 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
2142 crtpl_state->pipe_state.reserve(count);
2143 for (uint32_t i = 0; i < count; i++) {
2144 // Create and initialize internal tracking data structure
2145 crtpl_state->pipe_state.push_back(unique_ptr<PIPELINE_STATE>(new PIPELINE_STATE));
2146 crtpl_state->pipe_state.back()->initRayTracingPipelineNV(this, &pCreateInfos[i]);
2147 crtpl_state->pipe_state.back()->pipeline_layout = *GetPipelineLayout(pCreateInfos[i].layout);
2148 }
2149 return false;
2150}
2151
2152void ValidationStateTracker::PostCallRecordCreateRayTracingPipelinesNV(
2153 VkDevice device, VkPipelineCache pipelineCache, uint32_t count, const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
2154 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines, VkResult result, void *crtpl_state_data) {
2155 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
2156 // This API may create pipelines regardless of the return value
2157 for (uint32_t i = 0; i < count; i++) {
2158 if (pPipelines[i] != VK_NULL_HANDLE) {
2159 (crtpl_state->pipe_state)[i]->pipeline = pPipelines[i];
2160 pipelineMap[pPipelines[i]] = std::move((crtpl_state->pipe_state)[i]);
2161 }
2162 }
2163 crtpl_state->pipe_state.clear();
2164}
2165
2166void ValidationStateTracker::PostCallRecordCreateSampler(VkDevice device, const VkSamplerCreateInfo *pCreateInfo,
2167 const VkAllocationCallbacks *pAllocator, VkSampler *pSampler,
2168 VkResult result) {
2169 samplerMap[*pSampler] = unique_ptr<SAMPLER_STATE>(new SAMPLER_STATE(pSampler, pCreateInfo));
2170}
2171
2172void ValidationStateTracker::PostCallRecordCreateDescriptorSetLayout(VkDevice device,
2173 const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
2174 const VkAllocationCallbacks *pAllocator,
2175 VkDescriptorSetLayout *pSetLayout, VkResult result) {
2176 if (VK_SUCCESS != result) return;
2177 descriptorSetLayoutMap[*pSetLayout] = std::make_shared<cvdescriptorset::DescriptorSetLayout>(pCreateInfo, *pSetLayout);
2178}
2179
2180// For repeatable sorting, not very useful for "memory in range" search
2181struct PushConstantRangeCompare {
2182 bool operator()(const VkPushConstantRange *lhs, const VkPushConstantRange *rhs) const {
2183 if (lhs->offset == rhs->offset) {
2184 if (lhs->size == rhs->size) {
2185 // The comparison is arbitrary, but avoids false aliasing by comparing all fields.
2186 return lhs->stageFlags < rhs->stageFlags;
2187 }
2188 // If the offsets are the same then sorting by the end of range is useful for validation
2189 return lhs->size < rhs->size;
2190 }
2191 return lhs->offset < rhs->offset;
2192 }
2193};
2194
2195static PushConstantRangesDict push_constant_ranges_dict;
2196
2197PushConstantRangesId GetCanonicalId(const VkPipelineLayoutCreateInfo *info) {
2198 if (!info->pPushConstantRanges) {
2199 // Hand back the empty entry (creating as needed)...
2200 return push_constant_ranges_dict.look_up(PushConstantRanges());
2201 }
2202
2203 // Sort the input ranges to ensure equivalent ranges map to the same id
2204 std::set<const VkPushConstantRange *, PushConstantRangeCompare> sorted;
2205 for (uint32_t i = 0; i < info->pushConstantRangeCount; i++) {
2206 sorted.insert(info->pPushConstantRanges + i);
2207 }
2208
2209 PushConstantRanges ranges(sorted.size());
2210 for (const auto range : sorted) {
2211 ranges.emplace_back(*range);
2212 }
2213 return push_constant_ranges_dict.look_up(std::move(ranges));
2214}
2215
2216// Dictionary of canoncial form of the pipeline set layout of descriptor set layouts
2217static PipelineLayoutSetLayoutsDict pipeline_layout_set_layouts_dict;
2218
2219// Dictionary of canonical form of the "compatible for set" records
2220static PipelineLayoutCompatDict pipeline_layout_compat_dict;
2221
2222static PipelineLayoutCompatId GetCanonicalId(const uint32_t set_index, const PushConstantRangesId pcr_id,
2223 const PipelineLayoutSetLayoutsId set_layouts_id) {
2224 return pipeline_layout_compat_dict.look_up(PipelineLayoutCompatDef(set_index, pcr_id, set_layouts_id));
2225}
2226
2227void ValidationStateTracker::PostCallRecordCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo *pCreateInfo,
2228 const VkAllocationCallbacks *pAllocator,
2229 VkPipelineLayout *pPipelineLayout, VkResult result) {
2230 if (VK_SUCCESS != result) return;
2231
2232 std::unique_ptr<PIPELINE_LAYOUT_STATE> pipeline_layout_state(new PIPELINE_LAYOUT_STATE{});
2233 pipeline_layout_state->layout = *pPipelineLayout;
2234 pipeline_layout_state->set_layouts.resize(pCreateInfo->setLayoutCount);
2235 PipelineLayoutSetLayoutsDef set_layouts(pCreateInfo->setLayoutCount);
2236 for (uint32_t i = 0; i < pCreateInfo->setLayoutCount; ++i) {
2237 pipeline_layout_state->set_layouts[i] = GetDescriptorSetLayout(this, pCreateInfo->pSetLayouts[i]);
2238 set_layouts[i] = pipeline_layout_state->set_layouts[i]->GetLayoutId();
2239 }
2240
2241 // Get canonical form IDs for the "compatible for set" contents
2242 pipeline_layout_state->push_constant_ranges = GetCanonicalId(pCreateInfo);
2243 auto set_layouts_id = pipeline_layout_set_layouts_dict.look_up(set_layouts);
2244 pipeline_layout_state->compat_for_set.reserve(pCreateInfo->setLayoutCount);
2245
2246 // Create table of "compatible for set N" cannonical forms for trivial accept validation
2247 for (uint32_t i = 0; i < pCreateInfo->setLayoutCount; ++i) {
2248 pipeline_layout_state->compat_for_set.emplace_back(
2249 GetCanonicalId(i, pipeline_layout_state->push_constant_ranges, set_layouts_id));
2250 }
2251 pipelineLayoutMap[*pPipelineLayout] = std::move(pipeline_layout_state);
2252}
2253
2254void ValidationStateTracker::PostCallRecordCreateDescriptorPool(VkDevice device, const VkDescriptorPoolCreateInfo *pCreateInfo,
2255 const VkAllocationCallbacks *pAllocator,
2256 VkDescriptorPool *pDescriptorPool, VkResult result) {
2257 if (VK_SUCCESS != result) return;
2258 descriptorPoolMap[*pDescriptorPool] =
2259 std::unique_ptr<DESCRIPTOR_POOL_STATE>(new DESCRIPTOR_POOL_STATE(*pDescriptorPool, pCreateInfo));
2260}
2261
2262void ValidationStateTracker::PostCallRecordResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
2263 VkDescriptorPoolResetFlags flags, VkResult result) {
2264 if (VK_SUCCESS != result) return;
2265 DESCRIPTOR_POOL_STATE *pPool = GetDescriptorPoolState(descriptorPool);
2266 // TODO: validate flags
2267 // For every set off of this pool, clear it, remove from setMap, and free cvdescriptorset::DescriptorSet
2268 for (auto ds : pPool->sets) {
2269 FreeDescriptorSet(ds);
2270 }
2271 pPool->sets.clear();
2272 // Reset available count for each type and available sets for this pool
2273 for (auto it = pPool->availableDescriptorTypeCount.begin(); it != pPool->availableDescriptorTypeCount.end(); ++it) {
2274 pPool->availableDescriptorTypeCount[it->first] = pPool->maxDescriptorTypeCount[it->first];
2275 }
2276 pPool->availableSets = pPool->maxSets;
2277}
2278
2279bool ValidationStateTracker::PreCallValidateAllocateDescriptorSets(VkDevice device,
2280 const VkDescriptorSetAllocateInfo *pAllocateInfo,
2281 VkDescriptorSet *pDescriptorSets, void *ads_state_data) {
2282 // Always update common data
2283 cvdescriptorset::AllocateDescriptorSetsData *ads_state =
2284 reinterpret_cast<cvdescriptorset::AllocateDescriptorSetsData *>(ads_state_data);
2285 UpdateAllocateDescriptorSetsData(pAllocateInfo, ads_state);
2286
2287 return false;
2288}
2289
2290// Allocation state was good and call down chain was made so update state based on allocating descriptor sets
2291void ValidationStateTracker::PostCallRecordAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo *pAllocateInfo,
2292 VkDescriptorSet *pDescriptorSets, VkResult result,
2293 void *ads_state_data) {
2294 if (VK_SUCCESS != result) return;
2295 // All the updates are contained in a single cvdescriptorset function
2296 cvdescriptorset::AllocateDescriptorSetsData *ads_state =
2297 reinterpret_cast<cvdescriptorset::AllocateDescriptorSetsData *>(ads_state_data);
2298 PerformAllocateDescriptorSets(pAllocateInfo, pDescriptorSets, ads_state);
2299}
2300
2301void ValidationStateTracker::PreCallRecordFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t count,
2302 const VkDescriptorSet *pDescriptorSets) {
2303 DESCRIPTOR_POOL_STATE *pool_state = GetDescriptorPoolState(descriptorPool);
2304 // Update available descriptor sets in pool
2305 pool_state->availableSets += count;
2306
2307 // For each freed descriptor add its resources back into the pool as available and remove from pool and setMap
2308 for (uint32_t i = 0; i < count; ++i) {
2309 if (pDescriptorSets[i] != VK_NULL_HANDLE) {
2310 auto descriptor_set = setMap[pDescriptorSets[i]].get();
2311 uint32_t type_index = 0, descriptor_count = 0;
2312 for (uint32_t j = 0; j < descriptor_set->GetBindingCount(); ++j) {
2313 type_index = static_cast<uint32_t>(descriptor_set->GetTypeFromIndex(j));
2314 descriptor_count = descriptor_set->GetDescriptorCountFromIndex(j);
2315 pool_state->availableDescriptorTypeCount[type_index] += descriptor_count;
2316 }
2317 FreeDescriptorSet(descriptor_set);
2318 pool_state->sets.erase(descriptor_set);
2319 }
2320 }
2321}
2322
2323void ValidationStateTracker::PreCallRecordUpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount,
2324 const VkWriteDescriptorSet *pDescriptorWrites,
2325 uint32_t descriptorCopyCount,
2326 const VkCopyDescriptorSet *pDescriptorCopies) {
2327 cvdescriptorset::PerformUpdateDescriptorSets(this, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount,
2328 pDescriptorCopies);
2329}
2330
2331void ValidationStateTracker::PostCallRecordAllocateCommandBuffers(VkDevice device, const VkCommandBufferAllocateInfo *pCreateInfo,
2332 VkCommandBuffer *pCommandBuffer, VkResult result) {
2333 if (VK_SUCCESS != result) return;
2334 auto pPool = GetCommandPoolState(pCreateInfo->commandPool);
2335 if (pPool) {
2336 for (uint32_t i = 0; i < pCreateInfo->commandBufferCount; i++) {
2337 // Add command buffer to its commandPool map
2338 pPool->commandBuffers.insert(pCommandBuffer[i]);
2339 std::unique_ptr<CMD_BUFFER_STATE> pCB(new CMD_BUFFER_STATE{});
2340 pCB->createInfo = *pCreateInfo;
2341 pCB->device = device;
2342 // Add command buffer to map
2343 commandBufferMap[pCommandBuffer[i]] = std::move(pCB);
2344 ResetCommandBufferState(pCommandBuffer[i]);
2345 }
2346 }
2347}
2348
2349// Add bindings between the given cmd buffer & framebuffer and the framebuffer's children
2350void ValidationStateTracker::AddFramebufferBinding(CMD_BUFFER_STATE *cb_state, FRAMEBUFFER_STATE *fb_state) {
2351 AddCommandBufferBinding(&fb_state->cb_bindings, VulkanTypedHandle(fb_state->framebuffer, kVulkanObjectTypeFramebuffer),
2352 cb_state);
2353
2354 const uint32_t attachmentCount = fb_state->createInfo.attachmentCount;
2355 for (uint32_t attachment = 0; attachment < attachmentCount; ++attachment) {
2356 auto view_state = GetAttachmentImageViewState(fb_state, attachment);
2357 if (view_state) {
2358 AddCommandBufferBindingImageView(cb_state, view_state);
2359 }
2360 }
2361}
2362
2363void ValidationStateTracker::PreCallRecordBeginCommandBuffer(VkCommandBuffer commandBuffer,
2364 const VkCommandBufferBeginInfo *pBeginInfo) {
2365 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2366 if (!cb_state) return;
2367 // This implicitly resets the Cmd Buffer so make sure any fence is done and then clear memory references
2368 ClearCmdBufAndMemReferences(cb_state);
2369 if (cb_state->createInfo.level != VK_COMMAND_BUFFER_LEVEL_PRIMARY) {
2370 // Secondary Command Buffer
2371 const VkCommandBufferInheritanceInfo *pInfo = pBeginInfo->pInheritanceInfo;
2372 if (pInfo) {
2373 if (pBeginInfo->flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT) {
2374 assert(pInfo->renderPass);
2375 auto framebuffer = GetFramebufferState(pInfo->framebuffer);
2376 if (framebuffer) {
2377 // Connect this framebuffer and its children to this cmdBuffer
2378 AddFramebufferBinding(cb_state, framebuffer);
2379 }
2380 }
2381 }
2382 }
2383 if (CB_RECORDED == cb_state->state || CB_INVALID_COMPLETE == cb_state->state) {
2384 ResetCommandBufferState(commandBuffer);
2385 }
2386 // Set updated state here in case implicit reset occurs above
2387 cb_state->state = CB_RECORDING;
2388 cb_state->beginInfo = *pBeginInfo;
2389 if (cb_state->beginInfo.pInheritanceInfo) {
2390 cb_state->inheritanceInfo = *(cb_state->beginInfo.pInheritanceInfo);
2391 cb_state->beginInfo.pInheritanceInfo = &cb_state->inheritanceInfo;
2392 // If we are a secondary command-buffer and inheriting. Update the items we should inherit.
2393 if ((cb_state->createInfo.level != VK_COMMAND_BUFFER_LEVEL_PRIMARY) &&
2394 (cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT)) {
2395 cb_state->activeRenderPass = GetRenderPassState(cb_state->beginInfo.pInheritanceInfo->renderPass);
2396 cb_state->activeSubpass = cb_state->beginInfo.pInheritanceInfo->subpass;
2397 cb_state->activeFramebuffer = cb_state->beginInfo.pInheritanceInfo->framebuffer;
2398 cb_state->framebuffers.insert(cb_state->beginInfo.pInheritanceInfo->framebuffer);
2399 }
2400 }
2401
2402 auto chained_device_group_struct = lvl_find_in_chain<VkDeviceGroupCommandBufferBeginInfo>(pBeginInfo->pNext);
2403 if (chained_device_group_struct) {
2404 cb_state->initial_device_mask = chained_device_group_struct->deviceMask;
2405 } else {
2406 cb_state->initial_device_mask = (1 << physical_device_count) - 1;
2407 }
2408}
2409
2410void ValidationStateTracker::PostCallRecordEndCommandBuffer(VkCommandBuffer commandBuffer, VkResult result) {
2411 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2412 if (!cb_state) return;
2413 // Cached validation is specific to a specific recording of a specific command buffer.
2414 for (auto descriptor_set : cb_state->validated_descriptor_sets) {
2415 descriptor_set->ClearCachedValidation(cb_state);
2416 }
2417 cb_state->validated_descriptor_sets.clear();
2418 if (VK_SUCCESS == result) {
2419 cb_state->state = CB_RECORDED;
2420 }
2421}
2422
2423void ValidationStateTracker::PostCallRecordResetCommandBuffer(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags,
2424 VkResult result) {
2425 if (VK_SUCCESS == result) {
2426 ResetCommandBufferState(commandBuffer);
2427 }
2428}
2429
2430CBStatusFlags MakeStaticStateMask(VkPipelineDynamicStateCreateInfo const *ds) {
2431 // initially assume everything is static state
2432 CBStatusFlags flags = CBSTATUS_ALL_STATE_SET;
2433
2434 if (ds) {
2435 for (uint32_t i = 0; i < ds->dynamicStateCount; i++) {
2436 switch (ds->pDynamicStates[i]) {
2437 case VK_DYNAMIC_STATE_LINE_WIDTH:
2438 flags &= ~CBSTATUS_LINE_WIDTH_SET;
2439 break;
2440 case VK_DYNAMIC_STATE_DEPTH_BIAS:
2441 flags &= ~CBSTATUS_DEPTH_BIAS_SET;
2442 break;
2443 case VK_DYNAMIC_STATE_BLEND_CONSTANTS:
2444 flags &= ~CBSTATUS_BLEND_CONSTANTS_SET;
2445 break;
2446 case VK_DYNAMIC_STATE_DEPTH_BOUNDS:
2447 flags &= ~CBSTATUS_DEPTH_BOUNDS_SET;
2448 break;
2449 case VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK:
2450 flags &= ~CBSTATUS_STENCIL_READ_MASK_SET;
2451 break;
2452 case VK_DYNAMIC_STATE_STENCIL_WRITE_MASK:
2453 flags &= ~CBSTATUS_STENCIL_WRITE_MASK_SET;
2454 break;
2455 case VK_DYNAMIC_STATE_STENCIL_REFERENCE:
2456 flags &= ~CBSTATUS_STENCIL_REFERENCE_SET;
2457 break;
2458 case VK_DYNAMIC_STATE_SCISSOR:
2459 flags &= ~CBSTATUS_SCISSOR_SET;
2460 break;
2461 case VK_DYNAMIC_STATE_VIEWPORT:
2462 flags &= ~CBSTATUS_VIEWPORT_SET;
2463 break;
2464 case VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV:
2465 flags &= ~CBSTATUS_EXCLUSIVE_SCISSOR_SET;
2466 break;
2467 case VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV:
2468 flags &= ~CBSTATUS_SHADING_RATE_PALETTE_SET;
2469 break;
2470 case VK_DYNAMIC_STATE_LINE_STIPPLE_EXT:
2471 flags &= ~CBSTATUS_LINE_STIPPLE_SET;
2472 break;
Chris Mayer9ded5eb2019-09-19 16:33:26 +02002473 case VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV:
2474 flags &= ~CBSTATUS_VIEWPORT_W_SCALING_SET;
2475 break;
locke-lunargd556cc32019-09-17 01:21:23 -06002476 default:
2477 break;
2478 }
2479 }
2480 }
2481
2482 return flags;
2483}
2484
2485// Validation cache:
2486// CV is the bottommost implementor of this extension. Don't pass calls down.
2487// utility function to set collective state for pipeline
2488void SetPipelineState(PIPELINE_STATE *pPipe) {
2489 // If any attachment used by this pipeline has blendEnable, set top-level blendEnable
2490 if (pPipe->graphicsPipelineCI.pColorBlendState) {
2491 for (size_t i = 0; i < pPipe->attachments.size(); ++i) {
2492 if (VK_TRUE == pPipe->attachments[i].blendEnable) {
2493 if (((pPipe->attachments[i].dstAlphaBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
2494 (pPipe->attachments[i].dstAlphaBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
2495 ((pPipe->attachments[i].dstColorBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
2496 (pPipe->attachments[i].dstColorBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
2497 ((pPipe->attachments[i].srcAlphaBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
2498 (pPipe->attachments[i].srcAlphaBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
2499 ((pPipe->attachments[i].srcColorBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
2500 (pPipe->attachments[i].srcColorBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA))) {
2501 pPipe->blendConstantsEnabled = true;
2502 }
2503 }
2504 }
2505 }
2506}
2507
2508void ValidationStateTracker::PreCallRecordCmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
2509 VkPipeline pipeline) {
2510 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2511 assert(cb_state);
2512
2513 auto pipe_state = GetPipelineState(pipeline);
2514 if (VK_PIPELINE_BIND_POINT_GRAPHICS == pipelineBindPoint) {
2515 cb_state->status &= ~cb_state->static_status;
2516 cb_state->static_status = MakeStaticStateMask(pipe_state->graphicsPipelineCI.ptr()->pDynamicState);
2517 cb_state->status |= cb_state->static_status;
2518 }
2519 ResetCommandBufferPushConstantDataIfIncompatible(cb_state, pipe_state->pipeline_layout.layout);
2520 cb_state->lastBound[pipelineBindPoint].pipeline_state = pipe_state;
2521 SetPipelineState(pipe_state);
2522 AddCommandBufferBinding(&pipe_state->cb_bindings, VulkanTypedHandle(pipeline, kVulkanObjectTypePipeline), cb_state);
2523}
2524
2525void ValidationStateTracker::PreCallRecordCmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport,
2526 uint32_t viewportCount, const VkViewport *pViewports) {
2527 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2528 cb_state->viewportMask |= ((1u << viewportCount) - 1u) << firstViewport;
2529 cb_state->status |= CBSTATUS_VIEWPORT_SET;
2530}
2531
2532void ValidationStateTracker::PreCallRecordCmdSetExclusiveScissorNV(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor,
2533 uint32_t exclusiveScissorCount,
2534 const VkRect2D *pExclusiveScissors) {
2535 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2536 // TODO: We don't have VUIDs for validating that all exclusive scissors have been set.
2537 // cb_state->exclusiveScissorMask |= ((1u << exclusiveScissorCount) - 1u) << firstExclusiveScissor;
2538 cb_state->status |= CBSTATUS_EXCLUSIVE_SCISSOR_SET;
2539}
2540
2541void ValidationStateTracker::PreCallRecordCmdBindShadingRateImageNV(VkCommandBuffer commandBuffer, VkImageView imageView,
2542 VkImageLayout imageLayout) {
2543 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2544
2545 if (imageView != VK_NULL_HANDLE) {
2546 auto view_state = GetImageViewState(imageView);
2547 AddCommandBufferBindingImageView(cb_state, view_state);
2548 }
2549}
2550
2551void ValidationStateTracker::PreCallRecordCmdSetViewportShadingRatePaletteNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
2552 uint32_t viewportCount,
2553 const VkShadingRatePaletteNV *pShadingRatePalettes) {
2554 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2555 // TODO: We don't have VUIDs for validating that all shading rate palettes have been set.
2556 // cb_state->shadingRatePaletteMask |= ((1u << viewportCount) - 1u) << firstViewport;
2557 cb_state->status |= CBSTATUS_SHADING_RATE_PALETTE_SET;
2558}
2559
2560void ValidationStateTracker::PostCallRecordCreateAccelerationStructureNV(VkDevice device,
2561 const VkAccelerationStructureCreateInfoNV *pCreateInfo,
2562 const VkAllocationCallbacks *pAllocator,
2563 VkAccelerationStructureNV *pAccelerationStructure,
2564 VkResult result) {
2565 if (VK_SUCCESS != result) return;
2566 std::unique_ptr<ACCELERATION_STRUCTURE_STATE> as_state(new ACCELERATION_STRUCTURE_STATE(*pAccelerationStructure, pCreateInfo));
2567
2568 // Query the requirements in case the application doesn't (to avoid bind/validation time query)
2569 VkAccelerationStructureMemoryRequirementsInfoNV as_memory_requirements_info = {};
2570 as_memory_requirements_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
2571 as_memory_requirements_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV;
2572 as_memory_requirements_info.accelerationStructure = as_state->acceleration_structure;
2573 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &as_memory_requirements_info, &as_state->memory_requirements);
2574
2575 VkAccelerationStructureMemoryRequirementsInfoNV scratch_memory_req_info = {};
2576 scratch_memory_req_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
2577 scratch_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV;
2578 scratch_memory_req_info.accelerationStructure = as_state->acceleration_structure;
2579 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &scratch_memory_req_info,
2580 &as_state->build_scratch_memory_requirements);
2581
2582 VkAccelerationStructureMemoryRequirementsInfoNV update_memory_req_info = {};
2583 update_memory_req_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
2584 update_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV;
2585 update_memory_req_info.accelerationStructure = as_state->acceleration_structure;
2586 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &update_memory_req_info,
2587 &as_state->update_scratch_memory_requirements);
2588
2589 accelerationStructureMap[*pAccelerationStructure] = std::move(as_state);
2590}
2591
2592void ValidationStateTracker::PostCallRecordGetAccelerationStructureMemoryRequirementsNV(
2593 VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV *pInfo, VkMemoryRequirements2KHR *pMemoryRequirements) {
2594 ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureState(pInfo->accelerationStructure);
2595 if (as_state != nullptr) {
2596 if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV) {
2597 as_state->memory_requirements = *pMemoryRequirements;
2598 as_state->memory_requirements_checked = true;
2599 } else if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV) {
2600 as_state->build_scratch_memory_requirements = *pMemoryRequirements;
2601 as_state->build_scratch_memory_requirements_checked = true;
2602 } else if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV) {
2603 as_state->update_scratch_memory_requirements = *pMemoryRequirements;
2604 as_state->update_scratch_memory_requirements_checked = true;
2605 }
2606 }
2607}
2608
2609void ValidationStateTracker::PostCallRecordBindAccelerationStructureMemoryNV(
2610 VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV *pBindInfos, VkResult result) {
2611 if (VK_SUCCESS != result) return;
2612 for (uint32_t i = 0; i < bindInfoCount; i++) {
2613 const VkBindAccelerationStructureMemoryInfoNV &info = pBindInfos[i];
2614
2615 ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureState(info.accelerationStructure);
2616 if (as_state) {
2617 // Track bound memory range information
2618 auto mem_info = GetDevMemState(info.memory);
2619 if (mem_info) {
2620 InsertAccelerationStructureMemoryRange(info.accelerationStructure, mem_info, info.memoryOffset,
2621 as_state->requirements);
2622 }
2623 // Track objects tied to memory
2624 SetMemBinding(info.memory, as_state, info.memoryOffset,
2625 VulkanTypedHandle(info.accelerationStructure, kVulkanObjectTypeAccelerationStructureNV));
2626
2627 // GPU validation of top level acceleration structure building needs acceleration structure handles.
2628 if (enabled.gpu_validation) {
2629 DispatchGetAccelerationStructureHandleNV(device, info.accelerationStructure, 8, &as_state->opaque_handle);
2630 }
2631 }
2632 }
2633}
2634
2635void ValidationStateTracker::PostCallRecordCmdBuildAccelerationStructureNV(
2636 VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV *pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset,
2637 VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset) {
2638 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2639 if (cb_state == nullptr) {
2640 return;
2641 }
2642
2643 ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureState(dst);
2644 ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureState(src);
2645 if (dst_as_state != nullptr) {
2646 dst_as_state->built = true;
2647 dst_as_state->build_info.initialize(pInfo);
2648 AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
2649 }
2650 if (src_as_state != nullptr) {
2651 AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
2652 }
2653 cb_state->hasBuildAccelerationStructureCmd = true;
2654}
2655
2656void ValidationStateTracker::PostCallRecordCmdCopyAccelerationStructureNV(VkCommandBuffer commandBuffer,
2657 VkAccelerationStructureNV dst,
2658 VkAccelerationStructureNV src,
2659 VkCopyAccelerationStructureModeNV mode) {
2660 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2661 if (cb_state) {
2662 ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureState(src);
2663 ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureState(dst);
2664 if (dst_as_state != nullptr && src_as_state != nullptr) {
2665 dst_as_state->built = true;
2666 dst_as_state->build_info = src_as_state->build_info;
2667 AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
2668 AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
2669 }
2670 }
2671}
2672
2673void ValidationStateTracker::PreCallRecordDestroyAccelerationStructureNV(VkDevice device,
2674 VkAccelerationStructureNV accelerationStructure,
2675 const VkAllocationCallbacks *pAllocator) {
2676 if (!accelerationStructure) return;
2677 auto *as_state = GetAccelerationStructureState(accelerationStructure);
2678 if (as_state) {
2679 const VulkanTypedHandle obj_struct(accelerationStructure, kVulkanObjectTypeAccelerationStructureNV);
2680 InvalidateCommandBuffers(as_state->cb_bindings, obj_struct);
2681 for (auto mem_binding : as_state->GetBoundMemory()) {
2682 auto mem_info = GetDevMemState(mem_binding);
2683 if (mem_info) {
2684 RemoveAccelerationStructureMemoryRange(accelerationStructure, mem_info);
2685 }
2686 }
2687 ClearMemoryObjectBindings(obj_struct);
2688 accelerationStructureMap.erase(accelerationStructure);
2689 }
2690}
2691
Chris Mayer9ded5eb2019-09-19 16:33:26 +02002692void ValidationStateTracker::PreCallRecordCmdSetViewportWScalingNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
2693 uint32_t viewportCount,
2694 const VkViewportWScalingNV *pViewportWScalings) {
2695 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2696 cb_state->status |= CBSTATUS_VIEWPORT_W_SCALING_SET;
2697}
2698
locke-lunargd556cc32019-09-17 01:21:23 -06002699void ValidationStateTracker::PreCallRecordCmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth) {
2700 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2701 cb_state->status |= CBSTATUS_LINE_WIDTH_SET;
2702}
2703
2704void ValidationStateTracker::PreCallRecordCmdSetLineStippleEXT(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor,
2705 uint16_t lineStipplePattern) {
2706 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2707 cb_state->status |= CBSTATUS_LINE_STIPPLE_SET;
2708}
2709
2710void ValidationStateTracker::PreCallRecordCmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor,
2711 float depthBiasClamp, float depthBiasSlopeFactor) {
2712 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2713 cb_state->status |= CBSTATUS_DEPTH_BIAS_SET;
2714}
2715
2716void ValidationStateTracker::PreCallRecordCmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4]) {
2717 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2718 cb_state->status |= CBSTATUS_BLEND_CONSTANTS_SET;
2719}
2720
2721void ValidationStateTracker::PreCallRecordCmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds,
2722 float maxDepthBounds) {
2723 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2724 cb_state->status |= CBSTATUS_DEPTH_BOUNDS_SET;
2725}
2726
2727void ValidationStateTracker::PreCallRecordCmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
2728 uint32_t compareMask) {
2729 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2730 cb_state->status |= CBSTATUS_STENCIL_READ_MASK_SET;
2731}
2732
2733void ValidationStateTracker::PreCallRecordCmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
2734 uint32_t writeMask) {
2735 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2736 cb_state->status |= CBSTATUS_STENCIL_WRITE_MASK_SET;
2737}
2738
2739void ValidationStateTracker::PreCallRecordCmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
2740 uint32_t reference) {
2741 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2742 cb_state->status |= CBSTATUS_STENCIL_REFERENCE_SET;
2743}
2744
2745// Update pipeline_layout bind points applying the "Pipeline Layout Compatibility" rules.
2746// One of pDescriptorSets or push_descriptor_set should be nullptr, indicating whether this
2747// is called for CmdBindDescriptorSets or CmdPushDescriptorSet.
2748void ValidationStateTracker::UpdateLastBoundDescriptorSets(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint pipeline_bind_point,
2749 const PIPELINE_LAYOUT_STATE *pipeline_layout, uint32_t first_set,
2750 uint32_t set_count, const VkDescriptorSet *pDescriptorSets,
2751 cvdescriptorset::DescriptorSet *push_descriptor_set,
2752 uint32_t dynamic_offset_count, const uint32_t *p_dynamic_offsets) {
2753 assert((pDescriptorSets == nullptr) ^ (push_descriptor_set == nullptr));
2754 // Defensive
2755 assert(pipeline_layout);
2756 if (!pipeline_layout) return;
2757
2758 uint32_t required_size = first_set + set_count;
2759 const uint32_t last_binding_index = required_size - 1;
2760 assert(last_binding_index < pipeline_layout->compat_for_set.size());
2761
2762 // Some useful shorthand
2763 auto &last_bound = cb_state->lastBound[pipeline_bind_point];
2764 auto &pipe_compat_ids = pipeline_layout->compat_for_set;
2765 const uint32_t current_size = static_cast<uint32_t>(last_bound.per_set.size());
2766
2767 // We need this three times in this function, but nowhere else
2768 auto push_descriptor_cleanup = [&last_bound](const cvdescriptorset::DescriptorSet *ds) -> bool {
2769 if (ds && ds->IsPushDescriptor()) {
2770 assert(ds == last_bound.push_descriptor_set.get());
2771 last_bound.push_descriptor_set = nullptr;
2772 return true;
2773 }
2774 return false;
2775 };
2776
2777 // Clean up the "disturbed" before and after the range to be set
2778 if (required_size < current_size) {
2779 if (last_bound.per_set[last_binding_index].compat_id_for_set != pipe_compat_ids[last_binding_index]) {
2780 // We're disturbing those after last, we'll shrink below, but first need to check for and cleanup the push_descriptor
2781 for (auto set_idx = required_size; set_idx < current_size; ++set_idx) {
2782 if (push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set)) break;
2783 }
2784 } else {
2785 // We're not disturbing past last, so leave the upper binding data alone.
2786 required_size = current_size;
2787 }
2788 }
2789
2790 // We resize if we need more set entries or if those past "last" are disturbed
2791 if (required_size != current_size) {
2792 last_bound.per_set.resize(required_size);
2793 }
2794
2795 // For any previously bound sets, need to set them to "invalid" if they were disturbed by this update
2796 for (uint32_t set_idx = 0; set_idx < first_set; ++set_idx) {
2797 if (last_bound.per_set[set_idx].compat_id_for_set != pipe_compat_ids[set_idx]) {
2798 push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set);
2799 last_bound.per_set[set_idx].bound_descriptor_set = nullptr;
2800 last_bound.per_set[set_idx].dynamicOffsets.clear();
2801 last_bound.per_set[set_idx].compat_id_for_set = pipe_compat_ids[set_idx];
2802 }
2803 }
2804
2805 // Now update the bound sets with the input sets
2806 const uint32_t *input_dynamic_offsets = p_dynamic_offsets; // "read" pointer for dynamic offset data
2807 for (uint32_t input_idx = 0; input_idx < set_count; input_idx++) {
2808 auto set_idx = input_idx + first_set; // set_idx is index within layout, input_idx is index within input descriptor sets
2809 cvdescriptorset::DescriptorSet *descriptor_set =
2810 push_descriptor_set ? push_descriptor_set : GetSetNode(pDescriptorSets[input_idx]);
2811
2812 // Record binding (or push)
2813 if (descriptor_set != last_bound.push_descriptor_set.get()) {
2814 // Only cleanup the push descriptors if they aren't the currently used set.
2815 push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set);
2816 }
2817 last_bound.per_set[set_idx].bound_descriptor_set = descriptor_set;
2818 last_bound.per_set[set_idx].compat_id_for_set = pipe_compat_ids[set_idx]; // compat ids are canonical *per* set index
2819
2820 if (descriptor_set) {
2821 auto set_dynamic_descriptor_count = descriptor_set->GetDynamicDescriptorCount();
2822 // TODO: Add logic for tracking push_descriptor offsets (here or in caller)
2823 if (set_dynamic_descriptor_count && input_dynamic_offsets) {
2824 const uint32_t *end_offset = input_dynamic_offsets + set_dynamic_descriptor_count;
2825 last_bound.per_set[set_idx].dynamicOffsets = std::vector<uint32_t>(input_dynamic_offsets, end_offset);
2826 input_dynamic_offsets = end_offset;
2827 assert(input_dynamic_offsets <= (p_dynamic_offsets + dynamic_offset_count));
2828 } else {
2829 last_bound.per_set[set_idx].dynamicOffsets.clear();
2830 }
2831 if (!descriptor_set->IsPushDescriptor()) {
2832 // Can't cache validation of push_descriptors
2833 cb_state->validated_descriptor_sets.insert(descriptor_set);
2834 }
2835 }
2836 }
2837}
2838
2839// Update the bound state for the bind point, including the effects of incompatible pipeline layouts
2840void ValidationStateTracker::PreCallRecordCmdBindDescriptorSets(VkCommandBuffer commandBuffer,
2841 VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
2842 uint32_t firstSet, uint32_t setCount,
2843 const VkDescriptorSet *pDescriptorSets, uint32_t dynamicOffsetCount,
2844 const uint32_t *pDynamicOffsets) {
2845 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2846 auto pipeline_layout = GetPipelineLayout(layout);
2847
2848 // Resize binding arrays
2849 uint32_t last_set_index = firstSet + setCount - 1;
2850 if (last_set_index >= cb_state->lastBound[pipelineBindPoint].per_set.size()) {
2851 cb_state->lastBound[pipelineBindPoint].per_set.resize(last_set_index + 1);
2852 }
2853
2854 UpdateLastBoundDescriptorSets(cb_state, pipelineBindPoint, pipeline_layout, firstSet, setCount, pDescriptorSets, nullptr,
2855 dynamicOffsetCount, pDynamicOffsets);
2856 cb_state->lastBound[pipelineBindPoint].pipeline_layout = layout;
2857 ResetCommandBufferPushConstantDataIfIncompatible(cb_state, layout);
2858}
2859
2860void ValidationStateTracker::RecordCmdPushDescriptorSetState(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint pipelineBindPoint,
2861 VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount,
2862 const VkWriteDescriptorSet *pDescriptorWrites) {
2863 const auto &pipeline_layout = GetPipelineLayout(layout);
2864 // Short circuit invalid updates
2865 if (!pipeline_layout || (set >= pipeline_layout->set_layouts.size()) || !pipeline_layout->set_layouts[set] ||
2866 !pipeline_layout->set_layouts[set]->IsPushDescriptor())
2867 return;
2868
2869 // We need a descriptor set to update the bindings with, compatible with the passed layout
2870 const auto dsl = pipeline_layout->set_layouts[set];
2871 auto &last_bound = cb_state->lastBound[pipelineBindPoint];
2872 auto &push_descriptor_set = last_bound.push_descriptor_set;
2873 // If we are disturbing the current push_desriptor_set clear it
2874 if (!push_descriptor_set || !CompatForSet(set, last_bound, pipeline_layout->compat_for_set)) {
2875 last_bound.UnbindAndResetPushDescriptorSet(new cvdescriptorset::DescriptorSet(0, 0, dsl, 0, this));
2876 }
2877
2878 UpdateLastBoundDescriptorSets(cb_state, pipelineBindPoint, pipeline_layout, set, 1, nullptr, push_descriptor_set.get(), 0,
2879 nullptr);
2880 last_bound.pipeline_layout = layout;
2881
2882 // Now that we have either the new or extant push_descriptor set ... do the write updates against it
2883 push_descriptor_set->PerformPushDescriptorsUpdate(descriptorWriteCount, pDescriptorWrites);
2884}
2885
2886void ValidationStateTracker::PreCallRecordCmdPushDescriptorSetKHR(VkCommandBuffer commandBuffer,
2887 VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
2888 uint32_t set, uint32_t descriptorWriteCount,
2889 const VkWriteDescriptorSet *pDescriptorWrites) {
2890 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2891 RecordCmdPushDescriptorSetState(cb_state, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites);
2892}
2893
2894void ValidationStateTracker::PreCallRecordCmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
2895 VkIndexType indexType) {
2896 auto buffer_state = GetBufferState(buffer);
2897 auto cb_state = GetCBState(commandBuffer);
2898
2899 cb_state->status |= CBSTATUS_INDEX_BUFFER_BOUND;
2900 cb_state->index_buffer_binding.buffer = buffer;
2901 cb_state->index_buffer_binding.size = buffer_state->createInfo.size;
2902 cb_state->index_buffer_binding.offset = offset;
2903 cb_state->index_buffer_binding.index_type = indexType;
2904 // Add binding for this index buffer to this commandbuffer
2905 AddCommandBufferBindingBuffer(cb_state, buffer_state);
2906}
2907
2908void ValidationStateTracker::PreCallRecordCmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding,
2909 uint32_t bindingCount, const VkBuffer *pBuffers,
2910 const VkDeviceSize *pOffsets) {
2911 auto cb_state = GetCBState(commandBuffer);
2912
2913 uint32_t end = firstBinding + bindingCount;
2914 if (cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.size() < end) {
2915 cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.resize(end);
2916 }
2917
2918 for (uint32_t i = 0; i < bindingCount; ++i) {
2919 auto &vertex_buffer_binding = cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings[i + firstBinding];
2920 vertex_buffer_binding.buffer = pBuffers[i];
2921 vertex_buffer_binding.offset = pOffsets[i];
2922 // Add binding for this vertex buffer to this commandbuffer
2923 AddCommandBufferBindingBuffer(cb_state, GetBufferState(pBuffers[i]));
2924 }
2925}
2926
2927void ValidationStateTracker::PostCallRecordCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer,
2928 VkDeviceSize dstOffset, VkDeviceSize dataSize, const void *pData) {
2929 auto cb_state = GetCBState(commandBuffer);
2930 auto dst_buffer_state = GetBufferState(dstBuffer);
2931
2932 // Update bindings between buffer and cmd buffer
2933 AddCommandBufferBindingBuffer(cb_state, dst_buffer_state);
2934}
2935
2936bool ValidationStateTracker::SetEventStageMask(VkQueue queue, VkCommandBuffer commandBuffer, VkEvent event,
2937 VkPipelineStageFlags stageMask) {
2938 CMD_BUFFER_STATE *pCB = GetCBState(commandBuffer);
2939 if (pCB) {
2940 pCB->eventToStageMap[event] = stageMask;
2941 }
2942 auto queue_data = queueMap.find(queue);
2943 if (queue_data != queueMap.end()) {
2944 queue_data->second.eventToStageMap[event] = stageMask;
2945 }
2946 return false;
2947}
2948
2949void ValidationStateTracker::PreCallRecordCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event,
2950 VkPipelineStageFlags stageMask) {
2951 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2952 auto event_state = GetEventState(event);
2953 if (event_state) {
2954 AddCommandBufferBinding(&event_state->cb_bindings, VulkanTypedHandle(event, kVulkanObjectTypeEvent), cb_state);
2955 event_state->cb_bindings.insert(cb_state);
2956 }
2957 cb_state->events.push_back(event);
2958 if (!cb_state->waitedEvents.count(event)) {
2959 cb_state->writeEventsBeforeWait.push_back(event);
2960 }
2961 cb_state->eventUpdates.emplace_back([=](VkQueue q) { return SetEventStageMask(q, commandBuffer, event, stageMask); });
2962}
2963
2964void ValidationStateTracker::PreCallRecordCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event,
2965 VkPipelineStageFlags stageMask) {
2966 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2967 auto event_state = GetEventState(event);
2968 if (event_state) {
2969 AddCommandBufferBinding(&event_state->cb_bindings, VulkanTypedHandle(event, kVulkanObjectTypeEvent), cb_state);
2970 event_state->cb_bindings.insert(cb_state);
2971 }
2972 cb_state->events.push_back(event);
2973 if (!cb_state->waitedEvents.count(event)) {
2974 cb_state->writeEventsBeforeWait.push_back(event);
2975 }
2976
2977 cb_state->eventUpdates.emplace_back(
2978 [=](VkQueue q) { return SetEventStageMask(q, commandBuffer, event, VkPipelineStageFlags(0)); });
2979}
2980
2981void ValidationStateTracker::PreCallRecordCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents,
2982 VkPipelineStageFlags sourceStageMask, VkPipelineStageFlags dstStageMask,
2983 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
2984 uint32_t bufferMemoryBarrierCount,
2985 const VkBufferMemoryBarrier *pBufferMemoryBarriers,
2986 uint32_t imageMemoryBarrierCount,
2987 const VkImageMemoryBarrier *pImageMemoryBarriers) {
2988 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2989 for (uint32_t i = 0; i < eventCount; ++i) {
2990 auto event_state = GetEventState(pEvents[i]);
2991 if (event_state) {
2992 AddCommandBufferBinding(&event_state->cb_bindings, VulkanTypedHandle(pEvents[i], kVulkanObjectTypeEvent), cb_state);
2993 event_state->cb_bindings.insert(cb_state);
2994 }
2995 cb_state->waitedEvents.insert(pEvents[i]);
2996 cb_state->events.push_back(pEvents[i]);
2997 }
2998}
2999
3000bool ValidationStateTracker::SetQueryState(VkQueue queue, VkCommandBuffer commandBuffer, QueryObject object, QueryState value) {
3001 CMD_BUFFER_STATE *pCB = GetCBState(commandBuffer);
3002 if (pCB) {
3003 pCB->queryToStateMap[object] = value;
3004 }
3005 auto queue_data = queueMap.find(queue);
3006 if (queue_data != queueMap.end()) {
3007 queue_data->second.queryToStateMap[object] = value;
3008 }
3009 return false;
3010}
3011
3012bool ValidationStateTracker::SetQueryStateMulti(VkQueue queue, VkCommandBuffer commandBuffer, VkQueryPool queryPool,
3013 uint32_t firstQuery, uint32_t queryCount, QueryState value) {
3014 CMD_BUFFER_STATE *pCB = GetCBState(commandBuffer);
3015 auto queue_data = queueMap.find(queue);
3016
3017 for (uint32_t i = 0; i < queryCount; i++) {
3018 QueryObject object = {queryPool, firstQuery + i};
3019 if (pCB) {
3020 pCB->queryToStateMap[object] = value;
3021 }
3022 if (queue_data != queueMap.end()) {
3023 queue_data->second.queryToStateMap[object] = value;
3024 }
3025 }
3026 return false;
3027}
3028
3029void ValidationStateTracker::RecordCmdBeginQuery(CMD_BUFFER_STATE *cb_state, const QueryObject &query_obj) {
3030 cb_state->activeQueries.insert(query_obj);
3031 cb_state->startedQueries.insert(query_obj);
3032 cb_state->queryUpdates.emplace_back([this, cb_state, query_obj](VkQueue q) {
3033 SetQueryState(q, cb_state->commandBuffer, query_obj, QUERYSTATE_RUNNING);
3034 return false;
3035 });
3036 AddCommandBufferBinding(&GetQueryPoolState(query_obj.pool)->cb_bindings,
3037 VulkanTypedHandle(query_obj.pool, kVulkanObjectTypeQueryPool), cb_state);
3038}
3039
3040void ValidationStateTracker::PostCallRecordCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot,
3041 VkFlags flags) {
3042 QueryObject query = {queryPool, slot};
3043 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3044 RecordCmdBeginQuery(cb_state, query);
3045}
3046
3047void ValidationStateTracker::RecordCmdEndQuery(CMD_BUFFER_STATE *cb_state, const QueryObject &query_obj) {
3048 cb_state->activeQueries.erase(query_obj);
3049 cb_state->queryUpdates.emplace_back(
3050 [this, cb_state, query_obj](VkQueue q) { return SetQueryState(q, cb_state->commandBuffer, query_obj, QUERYSTATE_ENDED); });
3051 AddCommandBufferBinding(&GetQueryPoolState(query_obj.pool)->cb_bindings,
3052 VulkanTypedHandle(query_obj.pool, kVulkanObjectTypeQueryPool), cb_state);
3053}
3054
3055void ValidationStateTracker::PostCallRecordCmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot) {
3056 QueryObject query_obj = {queryPool, slot};
3057 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3058 RecordCmdEndQuery(cb_state, query_obj);
3059}
3060
3061void ValidationStateTracker::PostCallRecordCmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
3062 uint32_t firstQuery, uint32_t queryCount) {
3063 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3064
3065 cb_state->queryUpdates.emplace_back([this, commandBuffer, queryPool, firstQuery, queryCount](VkQueue q) {
3066 return SetQueryStateMulti(q, commandBuffer, queryPool, firstQuery, queryCount, QUERYSTATE_RESET);
3067 });
3068 AddCommandBufferBinding(&GetQueryPoolState(queryPool)->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool),
3069 cb_state);
3070}
3071
3072void ValidationStateTracker::PostCallRecordCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
3073 uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer,
3074 VkDeviceSize dstOffset, VkDeviceSize stride,
3075 VkQueryResultFlags flags) {
3076 auto cb_state = GetCBState(commandBuffer);
3077 auto dst_buff_state = GetBufferState(dstBuffer);
3078 AddCommandBufferBindingBuffer(cb_state, dst_buff_state);
3079 AddCommandBufferBinding(&GetQueryPoolState(queryPool)->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool),
3080 cb_state);
3081}
3082
3083void ValidationStateTracker::PostCallRecordCmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
3084 VkQueryPool queryPool, uint32_t slot) {
3085 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3086 AddCommandBufferBinding(&GetQueryPoolState(queryPool)->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool),
3087 cb_state);
3088 QueryObject query = {queryPool, slot};
3089 cb_state->queryUpdates.emplace_back(
3090 [this, commandBuffer, query](VkQueue q) { return SetQueryState(q, commandBuffer, query, QUERYSTATE_ENDED); });
3091}
3092
3093void ValidationStateTracker::PostCallRecordCreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo *pCreateInfo,
3094 const VkAllocationCallbacks *pAllocator, VkFramebuffer *pFramebuffer,
3095 VkResult result) {
3096 if (VK_SUCCESS != result) return;
3097 // Shadow create info and store in map
3098 std::unique_ptr<FRAMEBUFFER_STATE> fb_state(
3099 new FRAMEBUFFER_STATE(*pFramebuffer, pCreateInfo, GetRenderPassStateSharedPtr(pCreateInfo->renderPass)));
3100
3101 if ((pCreateInfo->flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) == 0) {
3102 for (uint32_t i = 0; i < pCreateInfo->attachmentCount; ++i) {
3103 VkImageView view = pCreateInfo->pAttachments[i];
3104 auto view_state = GetImageViewState(view);
3105 if (!view_state) {
3106 continue;
3107 }
3108 }
3109 }
3110 frameBufferMap[*pFramebuffer] = std::move(fb_state);
3111}
3112
3113void ValidationStateTracker::RecordRenderPassDAG(RenderPassCreateVersion rp_version, const VkRenderPassCreateInfo2KHR *pCreateInfo,
3114 RENDER_PASS_STATE *render_pass) {
3115 auto &subpass_to_node = render_pass->subpassToNode;
3116 subpass_to_node.resize(pCreateInfo->subpassCount);
3117 auto &self_dependencies = render_pass->self_dependencies;
3118 self_dependencies.resize(pCreateInfo->subpassCount);
3119
3120 for (uint32_t i = 0; i < pCreateInfo->subpassCount; ++i) {
3121 subpass_to_node[i].pass = i;
3122 self_dependencies[i].clear();
3123 }
3124 for (uint32_t i = 0; i < pCreateInfo->dependencyCount; ++i) {
3125 const VkSubpassDependency2KHR &dependency = pCreateInfo->pDependencies[i];
3126 if ((dependency.srcSubpass != VK_SUBPASS_EXTERNAL) && (dependency.dstSubpass != VK_SUBPASS_EXTERNAL)) {
3127 if (dependency.srcSubpass == dependency.dstSubpass) {
3128 self_dependencies[dependency.srcSubpass].push_back(i);
3129 } else {
3130 subpass_to_node[dependency.dstSubpass].prev.push_back(dependency.srcSubpass);
3131 subpass_to_node[dependency.srcSubpass].next.push_back(dependency.dstSubpass);
3132 }
3133 }
3134 }
3135}
3136
3137static void MarkAttachmentFirstUse(RENDER_PASS_STATE *render_pass, uint32_t index, bool is_read) {
3138 if (index == VK_ATTACHMENT_UNUSED) return;
3139
3140 if (!render_pass->attachment_first_read.count(index)) render_pass->attachment_first_read[index] = is_read;
3141}
3142
3143void ValidationStateTracker::RecordCreateRenderPassState(RenderPassCreateVersion rp_version,
3144 std::shared_ptr<RENDER_PASS_STATE> &render_pass,
3145 VkRenderPass *pRenderPass) {
3146 render_pass->renderPass = *pRenderPass;
3147 auto create_info = render_pass->createInfo.ptr();
3148
3149 RecordRenderPassDAG(RENDER_PASS_VERSION_1, create_info, render_pass.get());
3150
3151 for (uint32_t i = 0; i < create_info->subpassCount; ++i) {
3152 const VkSubpassDescription2KHR &subpass = create_info->pSubpasses[i];
3153 for (uint32_t j = 0; j < subpass.colorAttachmentCount; ++j) {
3154 MarkAttachmentFirstUse(render_pass.get(), subpass.pColorAttachments[j].attachment, false);
3155
3156 // resolve attachments are considered to be written
3157 if (subpass.pResolveAttachments) {
3158 MarkAttachmentFirstUse(render_pass.get(), subpass.pResolveAttachments[j].attachment, false);
3159 }
3160 }
3161 if (subpass.pDepthStencilAttachment) {
3162 MarkAttachmentFirstUse(render_pass.get(), subpass.pDepthStencilAttachment->attachment, false);
3163 }
3164 for (uint32_t j = 0; j < subpass.inputAttachmentCount; ++j) {
3165 MarkAttachmentFirstUse(render_pass.get(), subpass.pInputAttachments[j].attachment, true);
3166 }
3167 }
3168
3169 // Even though render_pass is an rvalue-ref parameter, still must move s.t. move assignment is invoked.
3170 renderPassMap[*pRenderPass] = std::move(render_pass);
3171}
3172
3173// Style note:
3174// Use of rvalue reference exceeds reccommended usage of rvalue refs in google style guide, but intentionally forces caller to move
3175// or copy. This is clearer than passing a pointer to shared_ptr and avoids the atomic increment/decrement of shared_ptr copy
3176// construction or assignment.
3177void ValidationStateTracker::PostCallRecordCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo *pCreateInfo,
3178 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
3179 VkResult result) {
3180 if (VK_SUCCESS != result) return;
3181 auto render_pass_state = std::make_shared<RENDER_PASS_STATE>(pCreateInfo);
3182 RecordCreateRenderPassState(RENDER_PASS_VERSION_1, render_pass_state, pRenderPass);
3183}
3184
3185void ValidationStateTracker::PostCallRecordCreateRenderPass2KHR(VkDevice device, const VkRenderPassCreateInfo2KHR *pCreateInfo,
3186 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
3187 VkResult result) {
3188 if (VK_SUCCESS != result) return;
3189 auto render_pass_state = std::make_shared<RENDER_PASS_STATE>(pCreateInfo);
3190 RecordCreateRenderPassState(RENDER_PASS_VERSION_2, render_pass_state, pRenderPass);
3191}
3192
3193void ValidationStateTracker::RecordCmdBeginRenderPassState(VkCommandBuffer commandBuffer,
3194 const VkRenderPassBeginInfo *pRenderPassBegin,
3195 const VkSubpassContents contents) {
3196 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3197 auto render_pass_state = pRenderPassBegin ? GetRenderPassState(pRenderPassBegin->renderPass) : nullptr;
3198 auto framebuffer = pRenderPassBegin ? GetFramebufferState(pRenderPassBegin->framebuffer) : nullptr;
3199
3200 if (render_pass_state) {
3201 cb_state->activeFramebuffer = pRenderPassBegin->framebuffer;
3202 cb_state->activeRenderPass = render_pass_state;
3203 // This is a shallow copy as that is all that is needed for now
3204 cb_state->activeRenderPassBeginInfo = *pRenderPassBegin;
3205 cb_state->activeSubpass = 0;
3206 cb_state->activeSubpassContents = contents;
3207 cb_state->framebuffers.insert(pRenderPassBegin->framebuffer);
3208 // Connect this framebuffer and its children to this cmdBuffer
3209 AddFramebufferBinding(cb_state, framebuffer);
3210 // Connect this RP to cmdBuffer
3211 AddCommandBufferBinding(&render_pass_state->cb_bindings,
3212 VulkanTypedHandle(render_pass_state->renderPass, kVulkanObjectTypeRenderPass), cb_state);
3213
3214 auto chained_device_group_struct = lvl_find_in_chain<VkDeviceGroupRenderPassBeginInfo>(pRenderPassBegin->pNext);
3215 if (chained_device_group_struct) {
3216 cb_state->active_render_pass_device_mask = chained_device_group_struct->deviceMask;
3217 } else {
3218 cb_state->active_render_pass_device_mask = cb_state->initial_device_mask;
3219 }
3220 }
3221}
3222
3223void ValidationStateTracker::PreCallRecordCmdBeginRenderPass(VkCommandBuffer commandBuffer,
3224 const VkRenderPassBeginInfo *pRenderPassBegin,
3225 VkSubpassContents contents) {
3226 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, contents);
3227}
3228
3229void ValidationStateTracker::PreCallRecordCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer,
3230 const VkRenderPassBeginInfo *pRenderPassBegin,
3231 const VkSubpassBeginInfoKHR *pSubpassBeginInfo) {
3232 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, pSubpassBeginInfo->contents);
3233}
3234
3235void ValidationStateTracker::RecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
3236 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3237 cb_state->activeSubpass++;
3238 cb_state->activeSubpassContents = contents;
3239}
3240
3241void ValidationStateTracker::PostCallRecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
3242 RecordCmdNextSubpass(commandBuffer, contents);
3243}
3244
3245void ValidationStateTracker::PostCallRecordCmdNextSubpass2KHR(VkCommandBuffer commandBuffer,
3246 const VkSubpassBeginInfoKHR *pSubpassBeginInfo,
3247 const VkSubpassEndInfoKHR *pSubpassEndInfo) {
3248 RecordCmdNextSubpass(commandBuffer, pSubpassBeginInfo->contents);
3249}
3250
3251void ValidationStateTracker::RecordCmdEndRenderPassState(VkCommandBuffer commandBuffer) {
3252 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3253 cb_state->activeRenderPass = nullptr;
3254 cb_state->activeSubpass = 0;
3255 cb_state->activeFramebuffer = VK_NULL_HANDLE;
3256}
3257
3258void ValidationStateTracker::PostCallRecordCmdEndRenderPass(VkCommandBuffer commandBuffer) {
3259 RecordCmdEndRenderPassState(commandBuffer);
3260}
3261
3262void ValidationStateTracker::PostCallRecordCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer,
3263 const VkSubpassEndInfoKHR *pSubpassEndInfo) {
3264 RecordCmdEndRenderPassState(commandBuffer);
3265}
3266
3267void ValidationStateTracker::PreCallRecordCmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBuffersCount,
3268 const VkCommandBuffer *pCommandBuffers) {
3269 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3270
3271 CMD_BUFFER_STATE *sub_cb_state = NULL;
3272 for (uint32_t i = 0; i < commandBuffersCount; i++) {
3273 sub_cb_state = GetCBState(pCommandBuffers[i]);
3274 assert(sub_cb_state);
3275 if (!(sub_cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT)) {
3276 if (cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT) {
3277 // TODO: Because this is a state change, clearing the VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT needs to be moved
3278 // from the validation step to the recording step
3279 cb_state->beginInfo.flags &= ~VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT;
3280 }
3281 }
3282
3283 // Propagate inital layout and current layout state to the primary cmd buffer
3284 // NOTE: The update/population of the image_layout_map is done in CoreChecks, but for other classes derived from
3285 // ValidationStateTracker these maps will be empty, so leaving the propagation in the the state tracker should be a no-op
3286 // for those other classes.
3287 for (const auto &sub_layout_map_entry : sub_cb_state->image_layout_map) {
3288 const auto image = sub_layout_map_entry.first;
3289 const auto *image_state = GetImageState(image);
3290 if (!image_state) continue; // Can't set layouts of a dead image
3291
3292 auto *cb_subres_map = GetImageSubresourceLayoutMap(cb_state, *image_state);
3293 const auto *sub_cb_subres_map = sub_layout_map_entry.second.get();
3294 assert(cb_subres_map && sub_cb_subres_map); // Non const get and map traversal should never be null
3295 cb_subres_map->UpdateFrom(*sub_cb_subres_map);
3296 }
3297
3298 sub_cb_state->primaryCommandBuffer = cb_state->commandBuffer;
3299 cb_state->linkedCommandBuffers.insert(sub_cb_state);
3300 sub_cb_state->linkedCommandBuffers.insert(cb_state);
3301 for (auto &function : sub_cb_state->queryUpdates) {
3302 cb_state->queryUpdates.push_back(function);
3303 }
3304 for (auto &function : sub_cb_state->queue_submit_functions) {
3305 cb_state->queue_submit_functions.push_back(function);
3306 }
3307 }
3308}
3309
3310void ValidationStateTracker::PostCallRecordMapMemory(VkDevice device, VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size,
3311 VkFlags flags, void **ppData, VkResult result) {
3312 if (VK_SUCCESS != result) return;
3313 RecordMappedMemory(mem, offset, size, ppData);
3314}
3315
3316void ValidationStateTracker::PreCallRecordUnmapMemory(VkDevice device, VkDeviceMemory mem) {
3317 auto mem_info = GetDevMemState(mem);
3318 if (mem_info) {
3319 mem_info->mapped_range = MemRange();
3320 mem_info->p_driver_data = nullptr;
3321 }
3322}
3323
3324void ValidationStateTracker::UpdateBindImageMemoryState(const VkBindImageMemoryInfo &bindInfo) {
3325 IMAGE_STATE *image_state = GetImageState(bindInfo.image);
3326 if (image_state) {
3327 const auto swapchain_info = lvl_find_in_chain<VkBindImageMemorySwapchainInfoKHR>(bindInfo.pNext);
3328 if (swapchain_info) {
3329 auto swapchain = GetSwapchainState(swapchain_info->swapchain);
3330 if (swapchain) {
3331 swapchain->bound_images.insert(image_state->image);
3332 image_state->bind_swapchain = swapchain_info->swapchain;
3333 image_state->bind_swapchain_imageIndex = swapchain_info->imageIndex;
3334 }
3335 } else {
3336 // Track bound memory range information
3337 auto mem_info = GetDevMemState(bindInfo.memory);
3338 if (mem_info) {
3339 InsertImageMemoryRange(bindInfo.image, mem_info, bindInfo.memoryOffset, image_state->requirements,
3340 image_state->createInfo.tiling == VK_IMAGE_TILING_LINEAR);
3341 }
3342
3343 // Track objects tied to memory
3344 SetMemBinding(bindInfo.memory, image_state, bindInfo.memoryOffset,
3345 VulkanTypedHandle(bindInfo.image, kVulkanObjectTypeImage));
3346 }
3347 if (image_state->createInfo.flags & VK_IMAGE_CREATE_ALIAS_BIT) {
3348 AddAliasingImage(image_state);
3349 }
3350 }
3351}
3352
3353void ValidationStateTracker::PostCallRecordBindImageMemory(VkDevice device, VkImage image, VkDeviceMemory mem,
3354 VkDeviceSize memoryOffset, VkResult result) {
3355 if (VK_SUCCESS != result) return;
3356 VkBindImageMemoryInfo bindInfo = {};
3357 bindInfo.sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
3358 bindInfo.image = image;
3359 bindInfo.memory = mem;
3360 bindInfo.memoryOffset = memoryOffset;
3361 UpdateBindImageMemoryState(bindInfo);
3362}
3363
3364void ValidationStateTracker::PostCallRecordBindImageMemory2(VkDevice device, uint32_t bindInfoCount,
3365 const VkBindImageMemoryInfoKHR *pBindInfos, VkResult result) {
3366 if (VK_SUCCESS != result) return;
3367 for (uint32_t i = 0; i < bindInfoCount; i++) {
3368 UpdateBindImageMemoryState(pBindInfos[i]);
3369 }
3370}
3371
3372void ValidationStateTracker::PostCallRecordBindImageMemory2KHR(VkDevice device, uint32_t bindInfoCount,
3373 const VkBindImageMemoryInfoKHR *pBindInfos, VkResult result) {
3374 if (VK_SUCCESS != result) return;
3375 for (uint32_t i = 0; i < bindInfoCount; i++) {
3376 UpdateBindImageMemoryState(pBindInfos[i]);
3377 }
3378}
3379
3380void ValidationStateTracker::PreCallRecordSetEvent(VkDevice device, VkEvent event) {
3381 auto event_state = GetEventState(event);
3382 if (event_state) {
3383 event_state->stageMask = VK_PIPELINE_STAGE_HOST_BIT;
3384 }
3385 // Host setting event is visible to all queues immediately so update stageMask for any queue that's seen this event
3386 // TODO : For correctness this needs separate fix to verify that app doesn't make incorrect assumptions about the
3387 // ordering of this command in relation to vkCmd[Set|Reset]Events (see GH297)
3388 for (auto queue_data : queueMap) {
3389 auto event_entry = queue_data.second.eventToStageMap.find(event);
3390 if (event_entry != queue_data.second.eventToStageMap.end()) {
3391 event_entry->second |= VK_PIPELINE_STAGE_HOST_BIT;
3392 }
3393 }
3394}
3395
3396void ValidationStateTracker::PostCallRecordImportSemaphoreFdKHR(VkDevice device,
3397 const VkImportSemaphoreFdInfoKHR *pImportSemaphoreFdInfo,
3398 VkResult result) {
3399 if (VK_SUCCESS != result) return;
3400 RecordImportSemaphoreState(pImportSemaphoreFdInfo->semaphore, pImportSemaphoreFdInfo->handleType,
3401 pImportSemaphoreFdInfo->flags);
3402}
3403
3404void ValidationStateTracker::RecordGetExternalSemaphoreState(VkSemaphore semaphore,
3405 VkExternalSemaphoreHandleTypeFlagBitsKHR handle_type) {
3406 SEMAPHORE_STATE *semaphore_state = GetSemaphoreState(semaphore);
3407 if (semaphore_state && handle_type != VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR) {
3408 // Cannot track semaphore state once it is exported, except for Sync FD handle types which have copy transference
3409 semaphore_state->scope = kSyncScopeExternalPermanent;
3410 }
3411}
3412
3413#ifdef VK_USE_PLATFORM_WIN32_KHR
3414void ValidationStateTracker::PostCallRecordImportSemaphoreWin32HandleKHR(
3415 VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR *pImportSemaphoreWin32HandleInfo, VkResult result) {
3416 if (VK_SUCCESS != result) return;
3417 RecordImportSemaphoreState(pImportSemaphoreWin32HandleInfo->semaphore, pImportSemaphoreWin32HandleInfo->handleType,
3418 pImportSemaphoreWin32HandleInfo->flags);
3419}
3420
3421void ValidationStateTracker::PostCallRecordGetSemaphoreWin32HandleKHR(VkDevice device,
3422 const VkSemaphoreGetWin32HandleInfoKHR *pGetWin32HandleInfo,
3423 HANDLE *pHandle, VkResult result) {
3424 if (VK_SUCCESS != result) return;
3425 RecordGetExternalSemaphoreState(pGetWin32HandleInfo->semaphore, pGetWin32HandleInfo->handleType);
3426}
3427
3428void ValidationStateTracker::PostCallRecordImportFenceWin32HandleKHR(
3429 VkDevice device, const VkImportFenceWin32HandleInfoKHR *pImportFenceWin32HandleInfo, VkResult result) {
3430 if (VK_SUCCESS != result) return;
3431 RecordImportFenceState(pImportFenceWin32HandleInfo->fence, pImportFenceWin32HandleInfo->handleType,
3432 pImportFenceWin32HandleInfo->flags);
3433}
3434
3435void ValidationStateTracker::PostCallRecordGetFenceWin32HandleKHR(VkDevice device,
3436 const VkFenceGetWin32HandleInfoKHR *pGetWin32HandleInfo,
3437 HANDLE *pHandle, VkResult result) {
3438 if (VK_SUCCESS != result) return;
3439 RecordGetExternalFenceState(pGetWin32HandleInfo->fence, pGetWin32HandleInfo->handleType);
3440}
3441#endif
3442
3443void ValidationStateTracker::PostCallRecordGetSemaphoreFdKHR(VkDevice device, const VkSemaphoreGetFdInfoKHR *pGetFdInfo, int *pFd,
3444 VkResult result) {
3445 if (VK_SUCCESS != result) return;
3446 RecordGetExternalSemaphoreState(pGetFdInfo->semaphore, pGetFdInfo->handleType);
3447}
3448
3449void ValidationStateTracker::RecordImportFenceState(VkFence fence, VkExternalFenceHandleTypeFlagBitsKHR handle_type,
3450 VkFenceImportFlagsKHR flags) {
3451 FENCE_STATE *fence_node = GetFenceState(fence);
3452 if (fence_node && fence_node->scope != kSyncScopeExternalPermanent) {
3453 if ((handle_type == VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR || flags & VK_FENCE_IMPORT_TEMPORARY_BIT_KHR) &&
3454 fence_node->scope == kSyncScopeInternal) {
3455 fence_node->scope = kSyncScopeExternalTemporary;
3456 } else {
3457 fence_node->scope = kSyncScopeExternalPermanent;
3458 }
3459 }
3460}
3461
3462void ValidationStateTracker::PostCallRecordImportFenceFdKHR(VkDevice device, const VkImportFenceFdInfoKHR *pImportFenceFdInfo,
3463 VkResult result) {
3464 if (VK_SUCCESS != result) return;
3465 RecordImportFenceState(pImportFenceFdInfo->fence, pImportFenceFdInfo->handleType, pImportFenceFdInfo->flags);
3466}
3467
3468void ValidationStateTracker::RecordGetExternalFenceState(VkFence fence, VkExternalFenceHandleTypeFlagBitsKHR handle_type) {
3469 FENCE_STATE *fence_state = GetFenceState(fence);
3470 if (fence_state) {
3471 if (handle_type != VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR) {
3472 // Export with reference transference becomes external
3473 fence_state->scope = kSyncScopeExternalPermanent;
3474 } else if (fence_state->scope == kSyncScopeInternal) {
3475 // Export with copy transference has a side effect of resetting the fence
3476 fence_state->state = FENCE_UNSIGNALED;
3477 }
3478 }
3479}
3480
3481void ValidationStateTracker::PostCallRecordGetFenceFdKHR(VkDevice device, const VkFenceGetFdInfoKHR *pGetFdInfo, int *pFd,
3482 VkResult result) {
3483 if (VK_SUCCESS != result) return;
3484 RecordGetExternalFenceState(pGetFdInfo->fence, pGetFdInfo->handleType);
3485}
3486
3487void ValidationStateTracker::PostCallRecordCreateEvent(VkDevice device, const VkEventCreateInfo *pCreateInfo,
3488 const VkAllocationCallbacks *pAllocator, VkEvent *pEvent, VkResult result) {
3489 if (VK_SUCCESS != result) return;
3490 eventMap[*pEvent].write_in_use = 0;
3491 eventMap[*pEvent].stageMask = VkPipelineStageFlags(0);
3492}
3493
3494void ValidationStateTracker::RecordCreateSwapchainState(VkResult result, const VkSwapchainCreateInfoKHR *pCreateInfo,
3495 VkSwapchainKHR *pSwapchain, SURFACE_STATE *surface_state,
3496 SWAPCHAIN_NODE *old_swapchain_state) {
3497 if (VK_SUCCESS == result) {
3498 auto swapchain_state = unique_ptr<SWAPCHAIN_NODE>(new SWAPCHAIN_NODE(pCreateInfo, *pSwapchain));
3499 if (VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR == pCreateInfo->presentMode ||
3500 VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR == pCreateInfo->presentMode) {
3501 swapchain_state->shared_presentable = true;
3502 }
3503 surface_state->swapchain = swapchain_state.get();
3504 swapchainMap[*pSwapchain] = std::move(swapchain_state);
3505 } else {
3506 surface_state->swapchain = nullptr;
3507 }
3508 // Spec requires that even if CreateSwapchainKHR fails, oldSwapchain is retired
3509 if (old_swapchain_state) {
3510 old_swapchain_state->retired = true;
3511 }
3512 return;
3513}
3514
3515void ValidationStateTracker::PostCallRecordCreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR *pCreateInfo,
3516 const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchain,
3517 VkResult result) {
3518 auto surface_state = GetSurfaceState(pCreateInfo->surface);
3519 auto old_swapchain_state = GetSwapchainState(pCreateInfo->oldSwapchain);
3520 RecordCreateSwapchainState(result, pCreateInfo, pSwapchain, surface_state, old_swapchain_state);
3521}
3522
3523void ValidationStateTracker::PreCallRecordDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain,
3524 const VkAllocationCallbacks *pAllocator) {
3525 if (!swapchain) return;
3526 auto swapchain_data = GetSwapchainState(swapchain);
3527 if (swapchain_data) {
3528 for (const auto &swapchain_image : swapchain_data->images) {
3529 ClearMemoryObjectBindings(VulkanTypedHandle(swapchain_image, kVulkanObjectTypeImage));
3530 imageMap.erase(swapchain_image);
3531 }
3532
3533 auto surface_state = GetSurfaceState(swapchain_data->createInfo.surface);
3534 if (surface_state) {
3535 if (surface_state->swapchain == swapchain_data) surface_state->swapchain = nullptr;
3536 }
3537 RemoveAliasingImages(swapchain_data->bound_images);
3538 swapchainMap.erase(swapchain);
3539 }
3540}
3541
3542void ValidationStateTracker::PostCallRecordQueuePresentKHR(VkQueue queue, const VkPresentInfoKHR *pPresentInfo, VkResult result) {
3543 // Semaphore waits occur before error generation, if the call reached the ICD. (Confirm?)
3544 for (uint32_t i = 0; i < pPresentInfo->waitSemaphoreCount; ++i) {
3545 auto pSemaphore = GetSemaphoreState(pPresentInfo->pWaitSemaphores[i]);
3546 if (pSemaphore) {
3547 pSemaphore->signaler.first = VK_NULL_HANDLE;
3548 pSemaphore->signaled = false;
3549 }
3550 }
3551
3552 for (uint32_t i = 0; i < pPresentInfo->swapchainCount; ++i) {
3553 // Note: this is imperfect, in that we can get confused about what did or didn't succeed-- but if the app does that, it's
3554 // confused itself just as much.
3555 auto local_result = pPresentInfo->pResults ? pPresentInfo->pResults[i] : result;
3556 if (local_result != VK_SUCCESS && local_result != VK_SUBOPTIMAL_KHR) continue; // this present didn't actually happen.
3557 // Mark the image as having been released to the WSI
3558 auto swapchain_data = GetSwapchainState(pPresentInfo->pSwapchains[i]);
3559 if (swapchain_data && (swapchain_data->images.size() > pPresentInfo->pImageIndices[i])) {
3560 auto image = swapchain_data->images[pPresentInfo->pImageIndices[i]];
3561 auto image_state = GetImageState(image);
3562 if (image_state) {
3563 image_state->acquired = false;
3564 }
3565 }
3566 }
3567 // Note: even though presentation is directed to a queue, there is no direct ordering between QP and subsequent work, so QP (and
3568 // its semaphore waits) /never/ participate in any completion proof.
3569}
3570
3571void ValidationStateTracker::PostCallRecordCreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount,
3572 const VkSwapchainCreateInfoKHR *pCreateInfos,
3573 const VkAllocationCallbacks *pAllocator,
3574 VkSwapchainKHR *pSwapchains, VkResult result) {
3575 if (pCreateInfos) {
3576 for (uint32_t i = 0; i < swapchainCount; i++) {
3577 auto surface_state = GetSurfaceState(pCreateInfos[i].surface);
3578 auto old_swapchain_state = GetSwapchainState(pCreateInfos[i].oldSwapchain);
3579 RecordCreateSwapchainState(result, &pCreateInfos[i], &pSwapchains[i], surface_state, old_swapchain_state);
3580 }
3581 }
3582}
3583
3584void ValidationStateTracker::RecordAcquireNextImageState(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
3585 VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex) {
3586 auto pFence = GetFenceState(fence);
3587 if (pFence && pFence->scope == kSyncScopeInternal) {
3588 // Treat as inflight since it is valid to wait on this fence, even in cases where it is technically a temporary
3589 // import
3590 pFence->state = FENCE_INFLIGHT;
3591 pFence->signaler.first = VK_NULL_HANDLE; // ANI isn't on a queue, so this can't participate in a completion proof.
3592 }
3593
3594 auto pSemaphore = GetSemaphoreState(semaphore);
3595 if (pSemaphore && pSemaphore->scope == kSyncScopeInternal) {
3596 // Treat as signaled since it is valid to wait on this semaphore, even in cases where it is technically a
3597 // temporary import
3598 pSemaphore->signaled = true;
3599 pSemaphore->signaler.first = VK_NULL_HANDLE;
3600 }
3601
3602 // Mark the image as acquired.
3603 auto swapchain_data = GetSwapchainState(swapchain);
3604 if (swapchain_data && (swapchain_data->images.size() > *pImageIndex)) {
3605 auto image = swapchain_data->images[*pImageIndex];
3606 auto image_state = GetImageState(image);
3607 if (image_state) {
3608 image_state->acquired = true;
3609 image_state->shared_presentable = swapchain_data->shared_presentable;
3610 }
3611 }
3612}
3613
3614void ValidationStateTracker::PostCallRecordAcquireNextImageKHR(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
3615 VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex,
3616 VkResult result) {
3617 if ((VK_SUCCESS != result) && (VK_SUBOPTIMAL_KHR != result)) return;
3618 RecordAcquireNextImageState(device, swapchain, timeout, semaphore, fence, pImageIndex);
3619}
3620
3621void ValidationStateTracker::PostCallRecordAcquireNextImage2KHR(VkDevice device, const VkAcquireNextImageInfoKHR *pAcquireInfo,
3622 uint32_t *pImageIndex, VkResult result) {
3623 if ((VK_SUCCESS != result) && (VK_SUBOPTIMAL_KHR != result)) return;
3624 RecordAcquireNextImageState(device, pAcquireInfo->swapchain, pAcquireInfo->timeout, pAcquireInfo->semaphore,
3625 pAcquireInfo->fence, pImageIndex);
3626}
3627
3628void ValidationStateTracker::PostCallRecordEnumeratePhysicalDevices(VkInstance instance, uint32_t *pPhysicalDeviceCount,
3629 VkPhysicalDevice *pPhysicalDevices, VkResult result) {
3630 if ((NULL != pPhysicalDevices) && ((result == VK_SUCCESS || result == VK_INCOMPLETE))) {
3631 for (uint32_t i = 0; i < *pPhysicalDeviceCount; i++) {
3632 auto &phys_device_state = physical_device_map[pPhysicalDevices[i]];
3633 phys_device_state.phys_device = pPhysicalDevices[i];
3634 // Init actual features for each physical device
3635 DispatchGetPhysicalDeviceFeatures(pPhysicalDevices[i], &phys_device_state.features2.features);
3636 }
3637 }
3638}
3639
3640// Common function to update state for GetPhysicalDeviceQueueFamilyProperties & 2KHR version
3641static void StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(PHYSICAL_DEVICE_STATE *pd_state, uint32_t count,
3642 VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
3643 pd_state->queue_family_known_count = std::max(pd_state->queue_family_known_count, count);
3644
3645 if (!pQueueFamilyProperties) {
3646 if (UNCALLED == pd_state->vkGetPhysicalDeviceQueueFamilyPropertiesState)
3647 pd_state->vkGetPhysicalDeviceQueueFamilyPropertiesState = QUERY_COUNT;
3648 } else { // Save queue family properties
3649 pd_state->vkGetPhysicalDeviceQueueFamilyPropertiesState = QUERY_DETAILS;
3650
3651 pd_state->queue_family_properties.resize(std::max(static_cast<uint32_t>(pd_state->queue_family_properties.size()), count));
3652 for (uint32_t i = 0; i < count; ++i) {
3653 pd_state->queue_family_properties[i] = pQueueFamilyProperties[i].queueFamilyProperties;
3654 }
3655 }
3656}
3657
3658void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice,
3659 uint32_t *pQueueFamilyPropertyCount,
3660 VkQueueFamilyProperties *pQueueFamilyProperties) {
3661 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
3662 assert(physical_device_state);
3663 VkQueueFamilyProperties2KHR *pqfp = nullptr;
3664 std::vector<VkQueueFamilyProperties2KHR> qfp;
3665 qfp.resize(*pQueueFamilyPropertyCount);
3666 if (pQueueFamilyProperties) {
3667 for (uint32_t i = 0; i < *pQueueFamilyPropertyCount; ++i) {
3668 qfp[i].sType = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2_KHR;
3669 qfp[i].pNext = nullptr;
3670 qfp[i].queueFamilyProperties = pQueueFamilyProperties[i];
3671 }
3672 pqfp = qfp.data();
3673 }
3674 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount, pqfp);
3675}
3676
3677void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties2(
3678 VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
3679 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
3680 assert(physical_device_state);
3681 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount,
3682 pQueueFamilyProperties);
3683}
3684
3685void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties2KHR(
3686 VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
3687 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
3688 assert(physical_device_state);
3689 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount,
3690 pQueueFamilyProperties);
3691}
3692void ValidationStateTracker::PreCallRecordDestroySurfaceKHR(VkInstance instance, VkSurfaceKHR surface,
3693 const VkAllocationCallbacks *pAllocator) {
3694 surface_map.erase(surface);
3695}
3696
3697void ValidationStateTracker::RecordVulkanSurface(VkSurfaceKHR *pSurface) {
3698 surface_map[*pSurface] = std::unique_ptr<SURFACE_STATE>(new SURFACE_STATE{*pSurface});
3699}
3700
3701void ValidationStateTracker::PostCallRecordCreateDisplayPlaneSurfaceKHR(VkInstance instance,
3702 const VkDisplaySurfaceCreateInfoKHR *pCreateInfo,
3703 const VkAllocationCallbacks *pAllocator,
3704 VkSurfaceKHR *pSurface, VkResult result) {
3705 if (VK_SUCCESS != result) return;
3706 RecordVulkanSurface(pSurface);
3707}
3708
3709#ifdef VK_USE_PLATFORM_ANDROID_KHR
3710void ValidationStateTracker::PostCallRecordCreateAndroidSurfaceKHR(VkInstance instance,
3711 const VkAndroidSurfaceCreateInfoKHR *pCreateInfo,
3712 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
3713 VkResult result) {
3714 if (VK_SUCCESS != result) return;
3715 RecordVulkanSurface(pSurface);
3716}
3717#endif // VK_USE_PLATFORM_ANDROID_KHR
3718
3719#ifdef VK_USE_PLATFORM_IOS_MVK
3720void ValidationStateTracker::PostCallRecordCreateIOSSurfaceMVK(VkInstance instance, const VkIOSSurfaceCreateInfoMVK *pCreateInfo,
3721 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
3722 VkResult result) {
3723 if (VK_SUCCESS != result) return;
3724 RecordVulkanSurface(pSurface);
3725}
3726#endif // VK_USE_PLATFORM_IOS_MVK
3727
3728#ifdef VK_USE_PLATFORM_MACOS_MVK
3729void ValidationStateTracker::PostCallRecordCreateMacOSSurfaceMVK(VkInstance instance,
3730 const VkMacOSSurfaceCreateInfoMVK *pCreateInfo,
3731 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
3732 VkResult result) {
3733 if (VK_SUCCESS != result) return;
3734 RecordVulkanSurface(pSurface);
3735}
3736#endif // VK_USE_PLATFORM_MACOS_MVK
3737
3738#ifdef VK_USE_PLATFORM_WAYLAND_KHR
3739void ValidationStateTracker::PostCallRecordCreateWaylandSurfaceKHR(VkInstance instance,
3740 const VkWaylandSurfaceCreateInfoKHR *pCreateInfo,
3741 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
3742 VkResult result) {
3743 if (VK_SUCCESS != result) return;
3744 RecordVulkanSurface(pSurface);
3745}
3746#endif // VK_USE_PLATFORM_WAYLAND_KHR
3747
3748#ifdef VK_USE_PLATFORM_WIN32_KHR
3749void ValidationStateTracker::PostCallRecordCreateWin32SurfaceKHR(VkInstance instance,
3750 const VkWin32SurfaceCreateInfoKHR *pCreateInfo,
3751 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
3752 VkResult result) {
3753 if (VK_SUCCESS != result) return;
3754 RecordVulkanSurface(pSurface);
3755}
3756#endif // VK_USE_PLATFORM_WIN32_KHR
3757
3758#ifdef VK_USE_PLATFORM_XCB_KHR
3759void ValidationStateTracker::PostCallRecordCreateXcbSurfaceKHR(VkInstance instance, const VkXcbSurfaceCreateInfoKHR *pCreateInfo,
3760 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
3761 VkResult result) {
3762 if (VK_SUCCESS != result) return;
3763 RecordVulkanSurface(pSurface);
3764}
3765#endif // VK_USE_PLATFORM_XCB_KHR
3766
3767#ifdef VK_USE_PLATFORM_XLIB_KHR
3768void ValidationStateTracker::PostCallRecordCreateXlibSurfaceKHR(VkInstance instance, const VkXlibSurfaceCreateInfoKHR *pCreateInfo,
3769 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
3770 VkResult result) {
3771 if (VK_SUCCESS != result) return;
3772 RecordVulkanSurface(pSurface);
3773}
3774#endif // VK_USE_PLATFORM_XLIB_KHR
3775
3776void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice physicalDevice,
3777 VkSurfaceKHR surface,
3778 VkSurfaceCapabilitiesKHR *pSurfaceCapabilities,
3779 VkResult result) {
3780 if (VK_SUCCESS != result) return;
3781 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
3782 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHRState = QUERY_DETAILS;
3783 physical_device_state->surfaceCapabilities = *pSurfaceCapabilities;
3784}
3785
3786void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilities2KHR(
3787 VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
3788 VkSurfaceCapabilities2KHR *pSurfaceCapabilities, VkResult result) {
3789 if (VK_SUCCESS != result) return;
3790 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
3791 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHRState = QUERY_DETAILS;
3792 physical_device_state->surfaceCapabilities = pSurfaceCapabilities->surfaceCapabilities;
3793}
3794
3795void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilities2EXT(VkPhysicalDevice physicalDevice,
3796 VkSurfaceKHR surface,
3797 VkSurfaceCapabilities2EXT *pSurfaceCapabilities,
3798 VkResult result) {
3799 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
3800 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHRState = QUERY_DETAILS;
3801 physical_device_state->surfaceCapabilities.minImageCount = pSurfaceCapabilities->minImageCount;
3802 physical_device_state->surfaceCapabilities.maxImageCount = pSurfaceCapabilities->maxImageCount;
3803 physical_device_state->surfaceCapabilities.currentExtent = pSurfaceCapabilities->currentExtent;
3804 physical_device_state->surfaceCapabilities.minImageExtent = pSurfaceCapabilities->minImageExtent;
3805 physical_device_state->surfaceCapabilities.maxImageExtent = pSurfaceCapabilities->maxImageExtent;
3806 physical_device_state->surfaceCapabilities.maxImageArrayLayers = pSurfaceCapabilities->maxImageArrayLayers;
3807 physical_device_state->surfaceCapabilities.supportedTransforms = pSurfaceCapabilities->supportedTransforms;
3808 physical_device_state->surfaceCapabilities.currentTransform = pSurfaceCapabilities->currentTransform;
3809 physical_device_state->surfaceCapabilities.supportedCompositeAlpha = pSurfaceCapabilities->supportedCompositeAlpha;
3810 physical_device_state->surfaceCapabilities.supportedUsageFlags = pSurfaceCapabilities->supportedUsageFlags;
3811}
3812
3813void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice,
3814 uint32_t queueFamilyIndex, VkSurfaceKHR surface,
3815 VkBool32 *pSupported, VkResult result) {
3816 if (VK_SUCCESS != result) return;
3817 auto surface_state = GetSurfaceState(surface);
3818 surface_state->gpu_queue_support[{physicalDevice, queueFamilyIndex}] = (*pSupported == VK_TRUE);
3819}
3820
3821void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice physicalDevice,
3822 VkSurfaceKHR surface,
3823 uint32_t *pPresentModeCount,
3824 VkPresentModeKHR *pPresentModes,
3825 VkResult result) {
3826 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
3827
3828 // TODO: This isn't quite right -- available modes may differ by surface AND physical device.
3829 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
3830 auto &call_state = physical_device_state->vkGetPhysicalDeviceSurfacePresentModesKHRState;
3831
3832 if (*pPresentModeCount) {
3833 if (call_state < QUERY_COUNT) call_state = QUERY_COUNT;
3834 if (*pPresentModeCount > physical_device_state->present_modes.size())
3835 physical_device_state->present_modes.resize(*pPresentModeCount);
3836 }
3837 if (pPresentModes) {
3838 if (call_state < QUERY_DETAILS) call_state = QUERY_DETAILS;
3839 for (uint32_t i = 0; i < *pPresentModeCount; i++) {
3840 physical_device_state->present_modes[i] = pPresentModes[i];
3841 }
3842 }
3843}
3844
3845void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
3846 uint32_t *pSurfaceFormatCount,
3847 VkSurfaceFormatKHR *pSurfaceFormats,
3848 VkResult result) {
3849 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
3850
3851 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
3852 auto &call_state = physical_device_state->vkGetPhysicalDeviceSurfaceFormatsKHRState;
3853
3854 if (*pSurfaceFormatCount) {
3855 if (call_state < QUERY_COUNT) call_state = QUERY_COUNT;
3856 if (*pSurfaceFormatCount > physical_device_state->surface_formats.size())
3857 physical_device_state->surface_formats.resize(*pSurfaceFormatCount);
3858 }
3859 if (pSurfaceFormats) {
3860 if (call_state < QUERY_DETAILS) call_state = QUERY_DETAILS;
3861 for (uint32_t i = 0; i < *pSurfaceFormatCount; i++) {
3862 physical_device_state->surface_formats[i] = pSurfaceFormats[i];
3863 }
3864 }
3865}
3866
3867void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceFormats2KHR(VkPhysicalDevice physicalDevice,
3868 const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
3869 uint32_t *pSurfaceFormatCount,
3870 VkSurfaceFormat2KHR *pSurfaceFormats,
3871 VkResult result) {
3872 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
3873
3874 auto physicalDeviceState = GetPhysicalDeviceState(physicalDevice);
3875 if (*pSurfaceFormatCount) {
3876 if (physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState < QUERY_COUNT) {
3877 physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState = QUERY_COUNT;
3878 }
3879 if (*pSurfaceFormatCount > physicalDeviceState->surface_formats.size())
3880 physicalDeviceState->surface_formats.resize(*pSurfaceFormatCount);
3881 }
3882 if (pSurfaceFormats) {
3883 if (physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState < QUERY_DETAILS) {
3884 physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState = QUERY_DETAILS;
3885 }
3886 for (uint32_t i = 0; i < *pSurfaceFormatCount; i++) {
3887 physicalDeviceState->surface_formats[i] = pSurfaceFormats[i].surfaceFormat;
3888 }
3889 }
3890}
3891
3892void ValidationStateTracker::PreCallRecordCmdBeginDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,
3893 const VkDebugUtilsLabelEXT *pLabelInfo) {
3894 BeginCmdDebugUtilsLabel(report_data, commandBuffer, pLabelInfo);
3895}
3896
3897void ValidationStateTracker::PostCallRecordCmdEndDebugUtilsLabelEXT(VkCommandBuffer commandBuffer) {
3898 EndCmdDebugUtilsLabel(report_data, commandBuffer);
3899}
3900
3901void ValidationStateTracker::PreCallRecordCmdInsertDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,
3902 const VkDebugUtilsLabelEXT *pLabelInfo) {
3903 InsertCmdDebugUtilsLabel(report_data, commandBuffer, pLabelInfo);
3904
3905 // Squirrel away an easily accessible copy.
3906 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3907 cb_state->debug_label = LoggingLabel(pLabelInfo);
3908}
3909
3910void ValidationStateTracker::RecordEnumeratePhysicalDeviceGroupsState(
3911 uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHR *pPhysicalDeviceGroupProperties) {
3912 if (NULL != pPhysicalDeviceGroupProperties) {
3913 for (uint32_t i = 0; i < *pPhysicalDeviceGroupCount; i++) {
3914 for (uint32_t j = 0; j < pPhysicalDeviceGroupProperties[i].physicalDeviceCount; j++) {
3915 VkPhysicalDevice cur_phys_dev = pPhysicalDeviceGroupProperties[i].physicalDevices[j];
3916 auto &phys_device_state = physical_device_map[cur_phys_dev];
3917 phys_device_state.phys_device = cur_phys_dev;
3918 // Init actual features for each physical device
3919 DispatchGetPhysicalDeviceFeatures(cur_phys_dev, &phys_device_state.features2.features);
3920 }
3921 }
3922 }
3923}
3924
3925void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceGroups(
3926 VkInstance instance, uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHR *pPhysicalDeviceGroupProperties,
3927 VkResult result) {
3928 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
3929 RecordEnumeratePhysicalDeviceGroupsState(pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
3930}
3931
3932void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceGroupsKHR(
3933 VkInstance instance, uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHR *pPhysicalDeviceGroupProperties,
3934 VkResult result) {
3935 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
3936 RecordEnumeratePhysicalDeviceGroupsState(pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
3937}
3938
3939void ValidationStateTracker::PreCallRecordDestroyDescriptorUpdateTemplate(VkDevice device,
3940 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
3941 const VkAllocationCallbacks *pAllocator) {
3942 if (!descriptorUpdateTemplate) return;
3943 desc_template_map.erase(descriptorUpdateTemplate);
3944}
3945
3946void ValidationStateTracker::PreCallRecordDestroyDescriptorUpdateTemplateKHR(VkDevice device,
3947 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
3948 const VkAllocationCallbacks *pAllocator) {
3949 if (!descriptorUpdateTemplate) return;
3950 desc_template_map.erase(descriptorUpdateTemplate);
3951}
3952
3953void ValidationStateTracker::RecordCreateDescriptorUpdateTemplateState(const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo,
3954 VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate) {
3955 safe_VkDescriptorUpdateTemplateCreateInfo local_create_info(pCreateInfo);
3956 std::unique_ptr<TEMPLATE_STATE> template_state(new TEMPLATE_STATE(*pDescriptorUpdateTemplate, &local_create_info));
3957 desc_template_map[*pDescriptorUpdateTemplate] = std::move(template_state);
3958}
3959
3960void ValidationStateTracker::PostCallRecordCreateDescriptorUpdateTemplate(
3961 VkDevice device, const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator,
3962 VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate, VkResult result) {
3963 if (VK_SUCCESS != result) return;
3964 RecordCreateDescriptorUpdateTemplateState(pCreateInfo, pDescriptorUpdateTemplate);
3965}
3966
3967void ValidationStateTracker::PostCallRecordCreateDescriptorUpdateTemplateKHR(
3968 VkDevice device, const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator,
3969 VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate, VkResult result) {
3970 if (VK_SUCCESS != result) return;
3971 RecordCreateDescriptorUpdateTemplateState(pCreateInfo, pDescriptorUpdateTemplate);
3972}
3973
3974void ValidationStateTracker::RecordUpdateDescriptorSetWithTemplateState(VkDescriptorSet descriptorSet,
3975 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
3976 const void *pData) {
3977 auto const template_map_entry = desc_template_map.find(descriptorUpdateTemplate);
3978 if ((template_map_entry == desc_template_map.end()) || (template_map_entry->second.get() == nullptr)) {
3979 assert(0);
3980 } else {
3981 const TEMPLATE_STATE *template_state = template_map_entry->second.get();
3982 // TODO: Record template push descriptor updates
3983 if (template_state->create_info.templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET) {
3984 PerformUpdateDescriptorSetsWithTemplateKHR(descriptorSet, template_state, pData);
3985 }
3986 }
3987}
3988
3989void ValidationStateTracker::PreCallRecordUpdateDescriptorSetWithTemplate(VkDevice device, VkDescriptorSet descriptorSet,
3990 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
3991 const void *pData) {
3992 RecordUpdateDescriptorSetWithTemplateState(descriptorSet, descriptorUpdateTemplate, pData);
3993}
3994
3995void ValidationStateTracker::PreCallRecordUpdateDescriptorSetWithTemplateKHR(VkDevice device, VkDescriptorSet descriptorSet,
3996 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
3997 const void *pData) {
3998 RecordUpdateDescriptorSetWithTemplateState(descriptorSet, descriptorUpdateTemplate, pData);
3999}
4000
4001void ValidationStateTracker::PreCallRecordCmdPushDescriptorSetWithTemplateKHR(
4002 VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set,
4003 const void *pData) {
4004 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4005
4006 const auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
4007 if (template_state) {
4008 auto layout_data = GetPipelineLayout(layout);
4009 auto dsl = GetDslFromPipelineLayout(layout_data, set);
4010 const auto &template_ci = template_state->create_info;
4011 if (dsl && !dsl->IsDestroyed()) {
4012 // Decode the template into a set of write updates
4013 cvdescriptorset::DecodedTemplateUpdate decoded_template(this, VK_NULL_HANDLE, template_state, pData,
4014 dsl->GetDescriptorSetLayout());
4015 RecordCmdPushDescriptorSetState(cb_state, template_ci.pipelineBindPoint, layout, set,
4016 static_cast<uint32_t>(decoded_template.desc_writes.size()),
4017 decoded_template.desc_writes.data());
4018 }
4019 }
4020}
4021
4022void ValidationStateTracker::RecordGetPhysicalDeviceDisplayPlanePropertiesState(VkPhysicalDevice physicalDevice,
4023 uint32_t *pPropertyCount, void *pProperties) {
4024 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4025 if (*pPropertyCount) {
4026 if (physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState < QUERY_COUNT) {
4027 physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState = QUERY_COUNT;
4028 }
4029 physical_device_state->display_plane_property_count = *pPropertyCount;
4030 }
4031 if (pProperties) {
4032 if (physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState < QUERY_DETAILS) {
4033 physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState = QUERY_DETAILS;
4034 }
4035 }
4036}
4037
4038void ValidationStateTracker::PostCallRecordGetPhysicalDeviceDisplayPlanePropertiesKHR(VkPhysicalDevice physicalDevice,
4039 uint32_t *pPropertyCount,
4040 VkDisplayPlanePropertiesKHR *pProperties,
4041 VkResult result) {
4042 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4043 RecordGetPhysicalDeviceDisplayPlanePropertiesState(physicalDevice, pPropertyCount, pProperties);
4044}
4045
4046void ValidationStateTracker::PostCallRecordGetPhysicalDeviceDisplayPlaneProperties2KHR(VkPhysicalDevice physicalDevice,
4047 uint32_t *pPropertyCount,
4048 VkDisplayPlaneProperties2KHR *pProperties,
4049 VkResult result) {
4050 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4051 RecordGetPhysicalDeviceDisplayPlanePropertiesState(physicalDevice, pPropertyCount, pProperties);
4052}
4053
4054void ValidationStateTracker::PostCallRecordCmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
4055 uint32_t query, VkQueryControlFlags flags, uint32_t index) {
4056 QueryObject query_obj = {queryPool, query, index};
4057 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4058 RecordCmdBeginQuery(cb_state, query_obj);
4059}
4060
4061void ValidationStateTracker::PostCallRecordCmdEndQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
4062 uint32_t query, uint32_t index) {
4063 QueryObject query_obj = {queryPool, query, index};
4064 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4065 RecordCmdEndQuery(cb_state, query_obj);
4066}
4067
4068void ValidationStateTracker::RecordCreateSamplerYcbcrConversionState(const VkSamplerYcbcrConversionCreateInfo *create_info,
4069 VkSamplerYcbcrConversion ycbcr_conversion) {
4070 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
4071 RecordCreateSamplerYcbcrConversionANDROID(create_info, ycbcr_conversion);
4072 }
4073}
4074
4075void ValidationStateTracker::PostCallRecordCreateSamplerYcbcrConversion(VkDevice device,
4076 const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
4077 const VkAllocationCallbacks *pAllocator,
4078 VkSamplerYcbcrConversion *pYcbcrConversion,
4079 VkResult result) {
4080 if (VK_SUCCESS != result) return;
4081 RecordCreateSamplerYcbcrConversionState(pCreateInfo, *pYcbcrConversion);
4082}
4083
4084void ValidationStateTracker::PostCallRecordCreateSamplerYcbcrConversionKHR(VkDevice device,
4085 const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
4086 const VkAllocationCallbacks *pAllocator,
4087 VkSamplerYcbcrConversion *pYcbcrConversion,
4088 VkResult result) {
4089 if (VK_SUCCESS != result) return;
4090 RecordCreateSamplerYcbcrConversionState(pCreateInfo, *pYcbcrConversion);
4091}
4092
4093void ValidationStateTracker::PostCallRecordDestroySamplerYcbcrConversion(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion,
4094 const VkAllocationCallbacks *pAllocator) {
4095 if (!ycbcrConversion) return;
4096 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
4097 RecordDestroySamplerYcbcrConversionANDROID(ycbcrConversion);
4098 }
4099}
4100
4101void ValidationStateTracker::PostCallRecordDestroySamplerYcbcrConversionKHR(VkDevice device,
4102 VkSamplerYcbcrConversion ycbcrConversion,
4103 const VkAllocationCallbacks *pAllocator) {
4104 if (!ycbcrConversion) return;
4105 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
4106 RecordDestroySamplerYcbcrConversionANDROID(ycbcrConversion);
4107 }
4108}
4109
4110void ValidationStateTracker::PostCallRecordResetQueryPoolEXT(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
4111 uint32_t queryCount) {
4112 // Do nothing if the feature is not enabled.
4113 if (!enabled_features.host_query_reset_features.hostQueryReset) return;
4114
4115 // Do nothing if the query pool has been destroyed.
4116 auto query_pool_state = GetQueryPoolState(queryPool);
4117 if (!query_pool_state) return;
4118
4119 // Reset the state of existing entries.
4120 QueryObject query_obj{queryPool, 0};
4121 const uint32_t max_query_count = std::min(queryCount, query_pool_state->createInfo.queryCount - firstQuery);
4122 for (uint32_t i = 0; i < max_query_count; ++i) {
4123 query_obj.query = firstQuery + i;
4124 auto query_it = queryToStateMap.find(query_obj);
4125 if (query_it != queryToStateMap.end()) query_it->second = QUERYSTATE_RESET;
4126 }
4127}
4128
4129void ValidationStateTracker::PerformUpdateDescriptorSetsWithTemplateKHR(VkDescriptorSet descriptorSet,
4130 const TEMPLATE_STATE *template_state, const void *pData) {
4131 // Translate the templated update into a normal update for validation...
4132 cvdescriptorset::DecodedTemplateUpdate decoded_update(this, descriptorSet, template_state, pData);
4133 cvdescriptorset::PerformUpdateDescriptorSets(this, static_cast<uint32_t>(decoded_update.desc_writes.size()),
4134 decoded_update.desc_writes.data(), 0, NULL);
4135}
4136
4137// Update the common AllocateDescriptorSetsData
4138void ValidationStateTracker::UpdateAllocateDescriptorSetsData(const VkDescriptorSetAllocateInfo *p_alloc_info,
4139 cvdescriptorset::AllocateDescriptorSetsData *ds_data) {
4140 for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
4141 auto layout = GetDescriptorSetLayout(this, p_alloc_info->pSetLayouts[i]);
4142 if (layout) {
4143 ds_data->layout_nodes[i] = layout;
4144 // Count total descriptors required per type
4145 for (uint32_t j = 0; j < layout->GetBindingCount(); ++j) {
4146 const auto &binding_layout = layout->GetDescriptorSetLayoutBindingPtrFromIndex(j);
4147 uint32_t typeIndex = static_cast<uint32_t>(binding_layout->descriptorType);
4148 ds_data->required_descriptors_by_type[typeIndex] += binding_layout->descriptorCount;
4149 }
4150 }
4151 // Any unknown layouts will be flagged as errors during ValidateAllocateDescriptorSets() call
4152 }
4153}
4154
4155// Decrement allocated sets from the pool and insert new sets into set_map
4156void ValidationStateTracker::PerformAllocateDescriptorSets(const VkDescriptorSetAllocateInfo *p_alloc_info,
4157 const VkDescriptorSet *descriptor_sets,
4158 const cvdescriptorset::AllocateDescriptorSetsData *ds_data) {
4159 auto pool_state = descriptorPoolMap[p_alloc_info->descriptorPool].get();
4160 // Account for sets and individual descriptors allocated from pool
4161 pool_state->availableSets -= p_alloc_info->descriptorSetCount;
4162 for (auto it = ds_data->required_descriptors_by_type.begin(); it != ds_data->required_descriptors_by_type.end(); ++it) {
4163 pool_state->availableDescriptorTypeCount[it->first] -= ds_data->required_descriptors_by_type.at(it->first);
4164 }
4165
4166 const auto *variable_count_info = lvl_find_in_chain<VkDescriptorSetVariableDescriptorCountAllocateInfoEXT>(p_alloc_info->pNext);
4167 bool variable_count_valid = variable_count_info && variable_count_info->descriptorSetCount == p_alloc_info->descriptorSetCount;
4168
4169 // Create tracking object for each descriptor set; insert into global map and the pool's set.
4170 for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
4171 uint32_t variable_count = variable_count_valid ? variable_count_info->pDescriptorCounts[i] : 0;
4172
4173 std::unique_ptr<cvdescriptorset::DescriptorSet> new_ds(new cvdescriptorset::DescriptorSet(
4174 descriptor_sets[i], p_alloc_info->descriptorPool, ds_data->layout_nodes[i], variable_count, this));
4175 pool_state->sets.insert(new_ds.get());
4176 new_ds->in_use.store(0);
4177 setMap[descriptor_sets[i]] = std::move(new_ds);
4178 }
4179}
4180
4181// Generic function to handle state update for all CmdDraw* and CmdDispatch* type functions
4182void ValidationStateTracker::UpdateStateCmdDrawDispatchType(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint bind_point) {
4183 UpdateDrawState(cb_state, bind_point);
4184 cb_state->hasDispatchCmd = true;
4185}
4186
4187static inline void UpdateResourceTrackingOnDraw(CMD_BUFFER_STATE *pCB) {
4188 pCB->cb_vertex_buffer_binding_info.push_back(pCB->current_vertex_buffer_binding_info);
4189}
4190
4191// Generic function to handle state update for all CmdDraw* type functions
4192void ValidationStateTracker::UpdateStateCmdDrawType(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint bind_point) {
4193 UpdateStateCmdDrawDispatchType(cb_state, bind_point);
4194 UpdateResourceTrackingOnDraw(cb_state);
4195 cb_state->hasDrawCmd = true;
4196}
4197
4198void ValidationStateTracker::PostCallRecordCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount,
4199 uint32_t firstVertex, uint32_t firstInstance) {
4200 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4201 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4202}
4203
4204void ValidationStateTracker::PostCallRecordCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount,
4205 uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset,
4206 uint32_t firstInstance) {
4207 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4208 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4209}
4210
4211void ValidationStateTracker::PostCallRecordCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
4212 uint32_t count, uint32_t stride) {
4213 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4214 BUFFER_STATE *buffer_state = GetBufferState(buffer);
4215 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4216 AddCommandBufferBindingBuffer(cb_state, buffer_state);
4217}
4218
4219void ValidationStateTracker::PostCallRecordCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
4220 VkDeviceSize offset, uint32_t count, uint32_t stride) {
4221 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4222 BUFFER_STATE *buffer_state = GetBufferState(buffer);
4223 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4224 AddCommandBufferBindingBuffer(cb_state, buffer_state);
4225}
4226
4227void ValidationStateTracker::PostCallRecordCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z) {
4228 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4229 UpdateStateCmdDrawDispatchType(cb_state, VK_PIPELINE_BIND_POINT_COMPUTE);
4230}
4231
4232void ValidationStateTracker::PostCallRecordCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
4233 VkDeviceSize offset) {
4234 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4235 UpdateStateCmdDrawDispatchType(cb_state, VK_PIPELINE_BIND_POINT_COMPUTE);
4236 BUFFER_STATE *buffer_state = GetBufferState(buffer);
4237 AddCommandBufferBindingBuffer(cb_state, buffer_state);
4238}
4239
4240void ValidationStateTracker::PreCallRecordCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer,
4241 VkDeviceSize offset, VkBuffer countBuffer,
4242 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
4243 uint32_t stride) {
4244 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4245 BUFFER_STATE *buffer_state = GetBufferState(buffer);
4246 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
4247 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4248 AddCommandBufferBindingBuffer(cb_state, buffer_state);
4249 AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
4250}
4251
4252void ValidationStateTracker::PreCallRecordCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer,
4253 VkDeviceSize offset, VkBuffer countBuffer,
4254 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
4255 uint32_t stride) {
4256 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4257 BUFFER_STATE *buffer_state = GetBufferState(buffer);
4258 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
4259 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4260 AddCommandBufferBindingBuffer(cb_state, buffer_state);
4261 AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
4262}
4263
4264void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksNV(VkCommandBuffer commandBuffer, uint32_t taskCount,
4265 uint32_t firstTask) {
4266 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4267 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4268}
4269
4270void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksIndirectNV(VkCommandBuffer commandBuffer, VkBuffer buffer,
4271 VkDeviceSize offset, uint32_t drawCount, uint32_t stride) {
4272 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4273 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4274 BUFFER_STATE *buffer_state = GetBufferState(buffer);
4275 if (buffer_state) {
4276 AddCommandBufferBindingBuffer(cb_state, buffer_state);
4277 }
4278}
4279
4280void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksIndirectCountNV(VkCommandBuffer commandBuffer, VkBuffer buffer,
4281 VkDeviceSize offset, VkBuffer countBuffer,
4282 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
4283 uint32_t stride) {
4284 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4285 BUFFER_STATE *buffer_state = GetBufferState(buffer);
4286 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
4287 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4288 if (buffer_state) {
4289 AddCommandBufferBindingBuffer(cb_state, buffer_state);
4290 }
4291 if (count_buffer_state) {
4292 AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
4293 }
4294}
4295
4296void ValidationStateTracker::PostCallRecordCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo *pCreateInfo,
4297 const VkAllocationCallbacks *pAllocator,
4298 VkShaderModule *pShaderModule, VkResult result,
4299 void *csm_state_data) {
4300 if (VK_SUCCESS != result) return;
4301 create_shader_module_api_state *csm_state = reinterpret_cast<create_shader_module_api_state *>(csm_state_data);
4302
4303 spv_target_env spirv_environment = ((api_version >= VK_API_VERSION_1_1) ? SPV_ENV_VULKAN_1_1 : SPV_ENV_VULKAN_1_0);
4304 bool is_spirv = (pCreateInfo->pCode[0] == spv::MagicNumber);
4305 std::unique_ptr<SHADER_MODULE_STATE> new_shader_module(
4306 is_spirv ? new SHADER_MODULE_STATE(pCreateInfo, *pShaderModule, spirv_environment, csm_state->unique_shader_id)
4307 : new SHADER_MODULE_STATE());
4308 shaderModuleMap[*pShaderModule] = std::move(new_shader_module);
4309}
4310
4311void ValidationStateTracker::RecordPipelineShaderStage(VkPipelineShaderStageCreateInfo const *pStage, PIPELINE_STATE *pipeline,
4312 PIPELINE_STATE::StageState *stage_state) {
4313 // Validation shouldn't rely on anything in stage state being valid if the spirv isn't
4314 auto module = GetShaderModuleState(pStage->module);
4315 if (!module->has_valid_spirv) return;
4316
4317 // Validation shouldn't rely on anything in stage state being valid if the entrypoint isn't present
4318 auto entrypoint = FindEntrypoint(module, pStage->pName, pStage->stage);
4319 if (entrypoint == module->end()) return;
4320
4321 // Mark accessible ids
4322 stage_state->accessible_ids = MarkAccessibleIds(module, entrypoint);
4323 ProcessExecutionModes(module, entrypoint, pipeline);
4324
4325 stage_state->descriptor_uses =
4326 CollectInterfaceByDescriptorSlot(report_data, module, stage_state->accessible_ids, &stage_state->has_writable_descriptor);
4327 // Capture descriptor uses for the pipeline
4328 for (auto use : stage_state->descriptor_uses) {
4329 // While validating shaders capture which slots are used by the pipeline
4330 auto &reqs = pipeline->active_slots[use.first.first][use.first.second];
4331 reqs = descriptor_req(reqs | DescriptorTypeToReqs(module, use.second.type_id));
4332 }
4333}
4334
4335void ValidationStateTracker::ResetCommandBufferPushConstantDataIfIncompatible(CMD_BUFFER_STATE *cb_state, VkPipelineLayout layout) {
4336 if (cb_state == nullptr) {
4337 return;
4338 }
4339
4340 const PIPELINE_LAYOUT_STATE *pipeline_layout_state = GetPipelineLayout(layout);
4341 if (pipeline_layout_state == nullptr) {
4342 return;
4343 }
4344
4345 if (cb_state->push_constant_data_ranges != pipeline_layout_state->push_constant_ranges) {
4346 cb_state->push_constant_data_ranges = pipeline_layout_state->push_constant_ranges;
4347 cb_state->push_constant_data.clear();
4348 uint32_t size_needed = 0;
4349 for (auto push_constant_range : *cb_state->push_constant_data_ranges) {
4350 size_needed = std::max(size_needed, (push_constant_range.offset + push_constant_range.size));
4351 }
4352 cb_state->push_constant_data.resize(size_needed, 0);
4353 }
4354}