blob: c4d27e6a84ec6fed12719c14285658ee9183436c [file] [log] [blame]
Tony-LunarG73719992020-01-15 10:20:28 -07001/* Copyright (c) 2015-2020 The Khronos Group Inc.
2 * Copyright (c) 2015-2020 Valve Corporation
3 * Copyright (c) 2015-2020 LunarG, Inc.
4 * Copyright (C) 2015-2020 Google Inc.
locke-lunargd556cc32019-09-17 01:21:23 -06005 *
6 * Licensed under the Apache License, Version 2.0 (the "License");
7 * you may not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
9 *
10 * http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 *
18 * Author: Mark Lobodzinski <mark@lunarg.com>
19 * Author: Dave Houlton <daveh@lunarg.com>
20 * Shannon McPherson <shannon@lunarg.com>
21 */
22
23// Allow use of STL min and max functions in Windows
24#define NOMINMAX
25
26#include <cmath>
27#include <set>
28#include <sstream>
29#include <string>
30
31#include "vk_enum_string_helper.h"
32#include "vk_format_utils.h"
33#include "vk_layer_data.h"
34#include "vk_layer_utils.h"
35#include "vk_layer_logging.h"
36#include "vk_typemap_helper.h"
37
38#include "chassis.h"
39#include "state_tracker.h"
40#include "shader_validation.h"
41
42using std::max;
43using std::string;
44using std::stringstream;
45using std::unique_ptr;
46using std::unordered_map;
47using std::unordered_set;
48using std::vector;
49
50#ifdef VK_USE_PLATFORM_ANDROID_KHR
51// Android-specific validation that uses types defined only with VK_USE_PLATFORM_ANDROID_KHR
52// This could also move into a seperate core_validation_android.cpp file... ?
53
54void ValidationStateTracker::RecordCreateImageANDROID(const VkImageCreateInfo *create_info, IMAGE_STATE *is_node) {
55 const VkExternalMemoryImageCreateInfo *emici = lvl_find_in_chain<VkExternalMemoryImageCreateInfo>(create_info->pNext);
56 if (emici && (emici->handleTypes & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID)) {
57 is_node->imported_ahb = true;
58 }
59 const VkExternalFormatANDROID *ext_fmt_android = lvl_find_in_chain<VkExternalFormatANDROID>(create_info->pNext);
60 if (ext_fmt_android && (0 != ext_fmt_android->externalFormat)) {
61 is_node->has_ahb_format = true;
62 is_node->ahb_format = ext_fmt_android->externalFormat;
63 }
64}
65
66void ValidationStateTracker::RecordCreateSamplerYcbcrConversionANDROID(const VkSamplerYcbcrConversionCreateInfo *create_info,
67 VkSamplerYcbcrConversion ycbcr_conversion) {
68 const VkExternalFormatANDROID *ext_format_android = lvl_find_in_chain<VkExternalFormatANDROID>(create_info->pNext);
69 if (ext_format_android && (0 != ext_format_android->externalFormat)) {
70 ycbcr_conversion_ahb_fmt_map.emplace(ycbcr_conversion, ext_format_android->externalFormat);
71 }
72};
73
74void ValidationStateTracker::RecordDestroySamplerYcbcrConversionANDROID(VkSamplerYcbcrConversion ycbcr_conversion) {
75 ycbcr_conversion_ahb_fmt_map.erase(ycbcr_conversion);
76};
77
78#else
79
80void ValidationStateTracker::RecordCreateImageANDROID(const VkImageCreateInfo *create_info, IMAGE_STATE *is_node) {}
81
82void ValidationStateTracker::RecordCreateSamplerYcbcrConversionANDROID(const VkSamplerYcbcrConversionCreateInfo *create_info,
83 VkSamplerYcbcrConversion ycbcr_conversion){};
84
85void ValidationStateTracker::RecordDestroySamplerYcbcrConversionANDROID(VkSamplerYcbcrConversion ycbcr_conversion){};
86
87#endif // VK_USE_PLATFORM_ANDROID_KHR
88
89void ValidationStateTracker::PostCallRecordCreateImage(VkDevice device, const VkImageCreateInfo *pCreateInfo,
90 const VkAllocationCallbacks *pAllocator, VkImage *pImage, VkResult result) {
91 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -050092 auto is_node = std::make_shared<IMAGE_STATE>(*pImage, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -060093 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
94 RecordCreateImageANDROID(pCreateInfo, is_node.get());
95 }
96 const auto swapchain_info = lvl_find_in_chain<VkImageSwapchainCreateInfoKHR>(pCreateInfo->pNext);
97 if (swapchain_info) {
98 is_node->create_from_swapchain = swapchain_info->swapchain;
99 }
100
101 bool pre_fetch_memory_reqs = true;
102#ifdef VK_USE_PLATFORM_ANDROID_KHR
103 if (is_node->external_format_android) {
104 // Do not fetch requirements for external memory images
105 pre_fetch_memory_reqs = false;
106 }
107#endif
108 // Record the memory requirements in case they won't be queried
109 if (pre_fetch_memory_reqs) {
110 DispatchGetImageMemoryRequirements(device, *pImage, &is_node->requirements);
111 }
112 imageMap.insert(std::make_pair(*pImage, std::move(is_node)));
113}
114
115void ValidationStateTracker::PreCallRecordDestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks *pAllocator) {
116 if (!image) return;
117 IMAGE_STATE *image_state = GetImageState(image);
118 const VulkanTypedHandle obj_struct(image, kVulkanObjectTypeImage);
119 InvalidateCommandBuffers(image_state->cb_bindings, obj_struct);
120 // Clean up memory mapping, bindings and range references for image
121 for (auto mem_binding : image_state->GetBoundMemory()) {
122 auto mem_info = GetDevMemState(mem_binding);
123 if (mem_info) {
124 RemoveImageMemoryRange(image, mem_info);
125 }
126 }
127 if (image_state->bind_swapchain) {
128 auto swapchain = GetSwapchainState(image_state->bind_swapchain);
129 if (swapchain) {
locke-lunargb3584732019-10-28 20:18:36 -0600130 swapchain->images[image_state->bind_swapchain_imageIndex].bound_images.erase(image_state->image);
locke-lunargd556cc32019-09-17 01:21:23 -0600131 }
132 }
133 RemoveAliasingImage(image_state);
134 ClearMemoryObjectBindings(obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500135 image_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600136 // Remove image from imageMap
137 imageMap.erase(image);
138}
139
140void ValidationStateTracker::PreCallRecordCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image,
141 VkImageLayout imageLayout, const VkClearColorValue *pColor,
142 uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
143 auto cb_node = GetCBState(commandBuffer);
144 auto image_state = GetImageState(image);
145 if (cb_node && image_state) {
146 AddCommandBufferBindingImage(cb_node, image_state);
147 }
148}
149
150void ValidationStateTracker::PreCallRecordCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image,
151 VkImageLayout imageLayout,
152 const VkClearDepthStencilValue *pDepthStencil,
153 uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
154 auto cb_node = GetCBState(commandBuffer);
155 auto image_state = GetImageState(image);
156 if (cb_node && image_state) {
157 AddCommandBufferBindingImage(cb_node, image_state);
158 }
159}
160
161void ValidationStateTracker::PreCallRecordCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage,
162 VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout,
163 uint32_t regionCount, const VkImageCopy *pRegions) {
164 auto cb_node = GetCBState(commandBuffer);
165 auto src_image_state = GetImageState(srcImage);
166 auto dst_image_state = GetImageState(dstImage);
167
168 // Update bindings between images and cmd buffer
169 AddCommandBufferBindingImage(cb_node, src_image_state);
170 AddCommandBufferBindingImage(cb_node, dst_image_state);
171}
172
173void ValidationStateTracker::PreCallRecordCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage,
174 VkImageLayout srcImageLayout, VkImage dstImage,
175 VkImageLayout dstImageLayout, uint32_t regionCount,
176 const VkImageResolve *pRegions) {
177 auto cb_node = GetCBState(commandBuffer);
178 auto src_image_state = GetImageState(srcImage);
179 auto dst_image_state = GetImageState(dstImage);
180
181 // Update bindings between images and cmd buffer
182 AddCommandBufferBindingImage(cb_node, src_image_state);
183 AddCommandBufferBindingImage(cb_node, dst_image_state);
184}
185
186void ValidationStateTracker::PreCallRecordCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage,
187 VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout,
188 uint32_t regionCount, const VkImageBlit *pRegions, VkFilter filter) {
189 auto cb_node = GetCBState(commandBuffer);
190 auto src_image_state = GetImageState(srcImage);
191 auto dst_image_state = GetImageState(dstImage);
192
193 // Update bindings between images and cmd buffer
194 AddCommandBufferBindingImage(cb_node, src_image_state);
195 AddCommandBufferBindingImage(cb_node, dst_image_state);
196}
197
198void ValidationStateTracker::PostCallRecordCreateBuffer(VkDevice device, const VkBufferCreateInfo *pCreateInfo,
199 const VkAllocationCallbacks *pAllocator, VkBuffer *pBuffer,
200 VkResult result) {
201 if (result != VK_SUCCESS) return;
202 // TODO : This doesn't create deep copy of pQueueFamilyIndices so need to fix that if/when we want that data to be valid
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500203 auto buffer_state = std::make_shared<BUFFER_STATE>(*pBuffer, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -0600204
205 // Get a set of requirements in the case the app does not
206 DispatchGetBufferMemoryRequirements(device, *pBuffer, &buffer_state->requirements);
207
208 bufferMap.insert(std::make_pair(*pBuffer, std::move(buffer_state)));
209}
210
211void ValidationStateTracker::PostCallRecordCreateBufferView(VkDevice device, const VkBufferViewCreateInfo *pCreateInfo,
212 const VkAllocationCallbacks *pAllocator, VkBufferView *pView,
213 VkResult result) {
214 if (result != VK_SUCCESS) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500215 auto buffer_state = GetBufferShared(pCreateInfo->buffer);
216 bufferViewMap[*pView] = std::make_shared<BUFFER_VIEW_STATE>(buffer_state, *pView, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -0600217}
218
219void ValidationStateTracker::PostCallRecordCreateImageView(VkDevice device, const VkImageViewCreateInfo *pCreateInfo,
220 const VkAllocationCallbacks *pAllocator, VkImageView *pView,
221 VkResult result) {
222 if (result != VK_SUCCESS) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500223 auto image_state = GetImageShared(pCreateInfo->image);
224 imageViewMap[*pView] = std::make_shared<IMAGE_VIEW_STATE>(image_state, *pView, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -0600225}
226
227void ValidationStateTracker::PreCallRecordCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer,
228 uint32_t regionCount, const VkBufferCopy *pRegions) {
229 auto cb_node = GetCBState(commandBuffer);
230 auto src_buffer_state = GetBufferState(srcBuffer);
231 auto dst_buffer_state = GetBufferState(dstBuffer);
232
233 // Update bindings between buffers and cmd buffer
234 AddCommandBufferBindingBuffer(cb_node, src_buffer_state);
235 AddCommandBufferBindingBuffer(cb_node, dst_buffer_state);
236}
237
238void ValidationStateTracker::PreCallRecordDestroyImageView(VkDevice device, VkImageView imageView,
239 const VkAllocationCallbacks *pAllocator) {
240 IMAGE_VIEW_STATE *image_view_state = GetImageViewState(imageView);
241 if (!image_view_state) return;
242 const VulkanTypedHandle obj_struct(imageView, kVulkanObjectTypeImageView);
243
244 // Any bound cmd buffers are now invalid
245 InvalidateCommandBuffers(image_view_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500246 image_view_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600247 imageViewMap.erase(imageView);
248}
249
250void ValidationStateTracker::PreCallRecordDestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks *pAllocator) {
251 if (!buffer) return;
252 auto buffer_state = GetBufferState(buffer);
253 const VulkanTypedHandle obj_struct(buffer, kVulkanObjectTypeBuffer);
254
255 InvalidateCommandBuffers(buffer_state->cb_bindings, obj_struct);
256 for (auto mem_binding : buffer_state->GetBoundMemory()) {
257 auto mem_info = GetDevMemState(mem_binding);
258 if (mem_info) {
259 RemoveBufferMemoryRange(buffer, mem_info);
260 }
261 }
262 ClearMemoryObjectBindings(obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500263 buffer_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600264 bufferMap.erase(buffer_state->buffer);
265}
266
267void ValidationStateTracker::PreCallRecordDestroyBufferView(VkDevice device, VkBufferView bufferView,
268 const VkAllocationCallbacks *pAllocator) {
269 if (!bufferView) return;
270 auto buffer_view_state = GetBufferViewState(bufferView);
271 const VulkanTypedHandle obj_struct(bufferView, kVulkanObjectTypeBufferView);
272
273 // Any bound cmd buffers are now invalid
274 InvalidateCommandBuffers(buffer_view_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500275 buffer_view_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600276 bufferViewMap.erase(bufferView);
277}
278
279void ValidationStateTracker::PreCallRecordCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
280 VkDeviceSize size, uint32_t data) {
281 auto cb_node = GetCBState(commandBuffer);
282 auto buffer_state = GetBufferState(dstBuffer);
283 // Update bindings between buffer and cmd buffer
284 AddCommandBufferBindingBuffer(cb_node, buffer_state);
285}
286
287void ValidationStateTracker::PreCallRecordCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage,
288 VkImageLayout srcImageLayout, VkBuffer dstBuffer,
289 uint32_t regionCount, const VkBufferImageCopy *pRegions) {
290 auto cb_node = GetCBState(commandBuffer);
291 auto src_image_state = GetImageState(srcImage);
292 auto dst_buffer_state = GetBufferState(dstBuffer);
293
294 // Update bindings between buffer/image and cmd buffer
295 AddCommandBufferBindingImage(cb_node, src_image_state);
296 AddCommandBufferBindingBuffer(cb_node, dst_buffer_state);
297}
298
299void ValidationStateTracker::PreCallRecordCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
300 VkImageLayout dstImageLayout, uint32_t regionCount,
301 const VkBufferImageCopy *pRegions) {
302 auto cb_node = GetCBState(commandBuffer);
303 auto src_buffer_state = GetBufferState(srcBuffer);
304 auto dst_image_state = GetImageState(dstImage);
305
306 AddCommandBufferBindingBuffer(cb_node, src_buffer_state);
307 AddCommandBufferBindingImage(cb_node, dst_image_state);
308}
309
310// Get the image viewstate for a given framebuffer attachment
311IMAGE_VIEW_STATE *ValidationStateTracker::GetAttachmentImageViewState(FRAMEBUFFER_STATE *framebuffer, uint32_t index) {
Mark Lobodzinski544b3dd2019-12-03 14:44:54 -0700312 if (framebuffer->createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) return nullptr;
locke-lunargd556cc32019-09-17 01:21:23 -0600313 assert(framebuffer && (index < framebuffer->createInfo.attachmentCount));
314 const VkImageView &image_view = framebuffer->createInfo.pAttachments[index];
315 return GetImageViewState(image_view);
316}
317
318// Get the image viewstate for a given framebuffer attachment
319const IMAGE_VIEW_STATE *ValidationStateTracker::GetAttachmentImageViewState(const FRAMEBUFFER_STATE *framebuffer,
320 uint32_t index) const {
Mark Lobodzinski544b3dd2019-12-03 14:44:54 -0700321 if (framebuffer->createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) return nullptr;
locke-lunargd556cc32019-09-17 01:21:23 -0600322 assert(framebuffer && (index < framebuffer->createInfo.attachmentCount));
323 const VkImageView &image_view = framebuffer->createInfo.pAttachments[index];
324 return GetImageViewState(image_view);
325}
326
327void ValidationStateTracker::AddAliasingImage(IMAGE_STATE *image_state) {
328 if (!(image_state->createInfo.flags & VK_IMAGE_CREATE_ALIAS_BIT)) return;
329 std::unordered_set<VkImage> *bound_images = nullptr;
330
locke-lunargb3584732019-10-28 20:18:36 -0600331 if (image_state->bind_swapchain) {
332 auto swapchain_state = GetSwapchainState(image_state->bind_swapchain);
locke-lunargd556cc32019-09-17 01:21:23 -0600333 if (swapchain_state) {
locke-lunargb3584732019-10-28 20:18:36 -0600334 bound_images = &swapchain_state->images[image_state->bind_swapchain_imageIndex].bound_images;
locke-lunargd556cc32019-09-17 01:21:23 -0600335 }
336 } else {
337 auto mem_state = GetDevMemState(image_state->binding.mem);
338 if (mem_state) {
339 bound_images = &mem_state->bound_images;
340 }
341 }
342
343 if (bound_images) {
344 for (const auto &handle : *bound_images) {
345 if (handle != image_state->image) {
346 auto is = GetImageState(handle);
347 if (is && is->IsCompatibleAliasing(image_state)) {
348 auto inserted = is->aliasing_images.emplace(image_state->image);
349 if (inserted.second) {
350 image_state->aliasing_images.emplace(handle);
351 }
352 }
353 }
354 }
355 }
356}
357
358void ValidationStateTracker::RemoveAliasingImage(IMAGE_STATE *image_state) {
359 for (const auto &image : image_state->aliasing_images) {
360 auto is = GetImageState(image);
361 if (is) {
362 is->aliasing_images.erase(image_state->image);
363 }
364 }
365 image_state->aliasing_images.clear();
366}
367
368void ValidationStateTracker::RemoveAliasingImages(const std::unordered_set<VkImage> &bound_images) {
369 // This is one way clear. Because the bound_images include cross references, the one way clear loop could clear the whole
370 // reference. It doesn't need two ways clear.
371 for (const auto &handle : bound_images) {
372 auto is = GetImageState(handle);
373 if (is) {
374 is->aliasing_images.clear();
375 }
376 }
377}
378
Jeff Bolz310775c2019-10-09 00:46:33 -0500379const EVENT_STATE *ValidationStateTracker::GetEventState(VkEvent event) const {
380 auto it = eventMap.find(event);
381 if (it == eventMap.end()) {
382 return nullptr;
383 }
384 return &it->second;
385}
386
locke-lunargd556cc32019-09-17 01:21:23 -0600387EVENT_STATE *ValidationStateTracker::GetEventState(VkEvent event) {
388 auto it = eventMap.find(event);
389 if (it == eventMap.end()) {
390 return nullptr;
391 }
392 return &it->second;
393}
394
395const QUEUE_STATE *ValidationStateTracker::GetQueueState(VkQueue queue) const {
396 auto it = queueMap.find(queue);
397 if (it == queueMap.cend()) {
398 return nullptr;
399 }
400 return &it->second;
401}
402
403QUEUE_STATE *ValidationStateTracker::GetQueueState(VkQueue queue) {
404 auto it = queueMap.find(queue);
405 if (it == queueMap.end()) {
406 return nullptr;
407 }
408 return &it->second;
409}
410
411const PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState(VkPhysicalDevice phys) const {
412 auto *phys_dev_map = ((physical_device_map.size() > 0) ? &physical_device_map : &instance_state->physical_device_map);
413 auto it = phys_dev_map->find(phys);
414 if (it == phys_dev_map->end()) {
415 return nullptr;
416 }
417 return &it->second;
418}
419
420PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState(VkPhysicalDevice phys) {
421 auto *phys_dev_map = ((physical_device_map.size() > 0) ? &physical_device_map : &instance_state->physical_device_map);
422 auto it = phys_dev_map->find(phys);
423 if (it == phys_dev_map->end()) {
424 return nullptr;
425 }
426 return &it->second;
427}
428
429PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState() { return physical_device_state; }
430const PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState() const { return physical_device_state; }
431
432// Return ptr to memory binding for given handle of specified type
433template <typename State, typename Result>
434static Result GetObjectMemBindingImpl(State state, const VulkanTypedHandle &typed_handle) {
435 switch (typed_handle.type) {
436 case kVulkanObjectTypeImage:
437 return state->GetImageState(typed_handle.Cast<VkImage>());
438 case kVulkanObjectTypeBuffer:
439 return state->GetBufferState(typed_handle.Cast<VkBuffer>());
440 case kVulkanObjectTypeAccelerationStructureNV:
441 return state->GetAccelerationStructureState(typed_handle.Cast<VkAccelerationStructureNV>());
442 default:
443 break;
444 }
445 return nullptr;
446}
447
448const BINDABLE *ValidationStateTracker::GetObjectMemBinding(const VulkanTypedHandle &typed_handle) const {
449 return GetObjectMemBindingImpl<const ValidationStateTracker *, const BINDABLE *>(this, typed_handle);
450}
451
452BINDABLE *ValidationStateTracker::GetObjectMemBinding(const VulkanTypedHandle &typed_handle) {
453 return GetObjectMemBindingImpl<ValidationStateTracker *, BINDABLE *>(this, typed_handle);
454}
455
456void ValidationStateTracker::AddMemObjInfo(void *object, const VkDeviceMemory mem, const VkMemoryAllocateInfo *pAllocateInfo) {
457 assert(object != NULL);
458
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500459 memObjMap[mem] = std::make_shared<DEVICE_MEMORY_STATE>(object, mem, pAllocateInfo);
460 auto mem_info = memObjMap[mem].get();
locke-lunargd556cc32019-09-17 01:21:23 -0600461
462 auto dedicated = lvl_find_in_chain<VkMemoryDedicatedAllocateInfoKHR>(pAllocateInfo->pNext);
463 if (dedicated) {
464 mem_info->is_dedicated = true;
465 mem_info->dedicated_buffer = dedicated->buffer;
466 mem_info->dedicated_image = dedicated->image;
467 }
468 auto export_info = lvl_find_in_chain<VkExportMemoryAllocateInfo>(pAllocateInfo->pNext);
469 if (export_info) {
470 mem_info->is_export = true;
471 mem_info->export_handle_type_flags = export_info->handleTypes;
472 }
473}
474
475// Create binding link between given sampler and command buffer node
476void ValidationStateTracker::AddCommandBufferBindingSampler(CMD_BUFFER_STATE *cb_node, SAMPLER_STATE *sampler_state) {
477 if (disabled.command_buffer_state) {
478 return;
479 }
Jeff Bolzadbfa852019-10-04 13:53:30 -0500480 AddCommandBufferBinding(sampler_state->cb_bindings,
481 VulkanTypedHandle(sampler_state->sampler, kVulkanObjectTypeSampler, sampler_state), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -0600482}
483
484// Create binding link between given image node and command buffer node
485void ValidationStateTracker::AddCommandBufferBindingImage(CMD_BUFFER_STATE *cb_node, IMAGE_STATE *image_state) {
486 if (disabled.command_buffer_state) {
487 return;
488 }
489 // Skip validation if this image was created through WSI
490 if (image_state->create_from_swapchain == VK_NULL_HANDLE) {
491 // First update cb binding for image
Jeff Bolzadbfa852019-10-04 13:53:30 -0500492 if (AddCommandBufferBinding(image_state->cb_bindings,
493 VulkanTypedHandle(image_state->image, kVulkanObjectTypeImage, image_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -0600494 // Now update CB binding in MemObj mini CB list
495 for (auto mem_binding : image_state->GetBoundMemory()) {
496 DEVICE_MEMORY_STATE *pMemInfo = GetDevMemState(mem_binding);
497 if (pMemInfo) {
498 // Now update CBInfo's Mem reference list
Jeff Bolzadbfa852019-10-04 13:53:30 -0500499 AddCommandBufferBinding(pMemInfo->cb_bindings,
500 VulkanTypedHandle(mem_binding, kVulkanObjectTypeDeviceMemory, pMemInfo), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -0600501 }
502 }
503 }
504 }
505}
506
507// Create binding link between given image view node and its image with command buffer node
508void ValidationStateTracker::AddCommandBufferBindingImageView(CMD_BUFFER_STATE *cb_node, IMAGE_VIEW_STATE *view_state) {
509 if (disabled.command_buffer_state) {
510 return;
511 }
512 // First add bindings for imageView
Jeff Bolzadbfa852019-10-04 13:53:30 -0500513 if (AddCommandBufferBinding(view_state->cb_bindings,
514 VulkanTypedHandle(view_state->image_view, kVulkanObjectTypeImageView, view_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -0600515 // Only need to continue if this is a new item
Jeff Bolzadbfa852019-10-04 13:53:30 -0500516 auto image_state = view_state->image_state.get();
locke-lunargd556cc32019-09-17 01:21:23 -0600517 // Add bindings for image within imageView
518 if (image_state) {
519 AddCommandBufferBindingImage(cb_node, image_state);
520 }
521 }
522}
523
524// Create binding link between given buffer node and command buffer node
525void ValidationStateTracker::AddCommandBufferBindingBuffer(CMD_BUFFER_STATE *cb_node, BUFFER_STATE *buffer_state) {
526 if (disabled.command_buffer_state) {
527 return;
528 }
529 // First update cb binding for buffer
Jeff Bolzadbfa852019-10-04 13:53:30 -0500530 if (AddCommandBufferBinding(buffer_state->cb_bindings,
531 VulkanTypedHandle(buffer_state->buffer, kVulkanObjectTypeBuffer, buffer_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -0600532 // Now update CB binding in MemObj mini CB list
533 for (auto mem_binding : buffer_state->GetBoundMemory()) {
534 DEVICE_MEMORY_STATE *pMemInfo = GetDevMemState(mem_binding);
535 if (pMemInfo) {
536 // Now update CBInfo's Mem reference list
Jeff Bolzadbfa852019-10-04 13:53:30 -0500537 AddCommandBufferBinding(pMemInfo->cb_bindings,
538 VulkanTypedHandle(mem_binding, kVulkanObjectTypeDeviceMemory, pMemInfo), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -0600539 }
540 }
541 }
542}
543
544// Create binding link between given buffer view node and its buffer with command buffer node
545void ValidationStateTracker::AddCommandBufferBindingBufferView(CMD_BUFFER_STATE *cb_node, BUFFER_VIEW_STATE *view_state) {
546 if (disabled.command_buffer_state) {
547 return;
548 }
549 // First add bindings for bufferView
Jeff Bolzadbfa852019-10-04 13:53:30 -0500550 if (AddCommandBufferBinding(view_state->cb_bindings,
551 VulkanTypedHandle(view_state->buffer_view, kVulkanObjectTypeBufferView, view_state), cb_node)) {
552 auto buffer_state = view_state->buffer_state.get();
locke-lunargd556cc32019-09-17 01:21:23 -0600553 // Add bindings for buffer within bufferView
554 if (buffer_state) {
555 AddCommandBufferBindingBuffer(cb_node, buffer_state);
556 }
557 }
558}
559
560// Create binding link between given acceleration structure and command buffer node
561void ValidationStateTracker::AddCommandBufferBindingAccelerationStructure(CMD_BUFFER_STATE *cb_node,
562 ACCELERATION_STRUCTURE_STATE *as_state) {
563 if (disabled.command_buffer_state) {
564 return;
565 }
Jeff Bolzadbfa852019-10-04 13:53:30 -0500566 if (AddCommandBufferBinding(
567 as_state->cb_bindings,
568 VulkanTypedHandle(as_state->acceleration_structure, kVulkanObjectTypeAccelerationStructureNV, as_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -0600569 // Now update CB binding in MemObj mini CB list
570 for (auto mem_binding : as_state->GetBoundMemory()) {
571 DEVICE_MEMORY_STATE *pMemInfo = GetDevMemState(mem_binding);
572 if (pMemInfo) {
573 // Now update CBInfo's Mem reference list
Jeff Bolzadbfa852019-10-04 13:53:30 -0500574 AddCommandBufferBinding(pMemInfo->cb_bindings,
575 VulkanTypedHandle(mem_binding, kVulkanObjectTypeDeviceMemory, pMemInfo), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -0600576 }
577 }
578 }
579}
580
locke-lunargd556cc32019-09-17 01:21:23 -0600581// Clear a single object binding from given memory object
582void ValidationStateTracker::ClearMemoryObjectBinding(const VulkanTypedHandle &typed_handle, VkDeviceMemory mem) {
583 DEVICE_MEMORY_STATE *mem_info = GetDevMemState(mem);
584 // This obj is bound to a memory object. Remove the reference to this object in that memory object's list
585 if (mem_info) {
586 mem_info->obj_bindings.erase(typed_handle);
587 }
588}
589
590// ClearMemoryObjectBindings clears the binding of objects to memory
591// For the given object it pulls the memory bindings and makes sure that the bindings
592// no longer refer to the object being cleared. This occurs when objects are destroyed.
593void ValidationStateTracker::ClearMemoryObjectBindings(const VulkanTypedHandle &typed_handle) {
594 BINDABLE *mem_binding = GetObjectMemBinding(typed_handle);
595 if (mem_binding) {
596 if (!mem_binding->sparse) {
597 ClearMemoryObjectBinding(typed_handle, mem_binding->binding.mem);
598 } else { // Sparse, clear all bindings
599 for (auto &sparse_mem_binding : mem_binding->sparse_bindings) {
600 ClearMemoryObjectBinding(typed_handle, sparse_mem_binding.mem);
601 }
602 }
603 }
604}
605
606// SetMemBinding is used to establish immutable, non-sparse binding between a single image/buffer object and memory object.
607// Corresponding valid usage checks are in ValidateSetMemBinding().
608void ValidationStateTracker::SetMemBinding(VkDeviceMemory mem, BINDABLE *mem_binding, VkDeviceSize memory_offset,
609 const VulkanTypedHandle &typed_handle) {
610 assert(mem_binding);
611 mem_binding->binding.mem = mem;
612 mem_binding->UpdateBoundMemorySet(); // force recreation of cached set
613 mem_binding->binding.offset = memory_offset;
614 mem_binding->binding.size = mem_binding->requirements.size;
615
616 if (mem != VK_NULL_HANDLE) {
617 DEVICE_MEMORY_STATE *mem_info = GetDevMemState(mem);
618 if (mem_info) {
619 mem_info->obj_bindings.insert(typed_handle);
620 // For image objects, make sure default memory state is correctly set
621 // TODO : What's the best/correct way to handle this?
622 if (kVulkanObjectTypeImage == typed_handle.type) {
623 auto const image_state = reinterpret_cast<const IMAGE_STATE *>(mem_binding);
624 if (image_state) {
625 VkImageCreateInfo ici = image_state->createInfo;
626 if (ici.usage & (VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT)) {
627 // TODO:: More memory state transition stuff.
628 }
629 }
630 }
631 }
632 }
633}
634
635// For NULL mem case, clear any previous binding Else...
636// Make sure given object is in its object map
637// IF a previous binding existed, update binding
638// Add reference from objectInfo to memoryInfo
639// Add reference off of object's binding info
640// Return VK_TRUE if addition is successful, VK_FALSE otherwise
641bool ValidationStateTracker::SetSparseMemBinding(MEM_BINDING binding, const VulkanTypedHandle &typed_handle) {
642 bool skip = VK_FALSE;
643 // Handle NULL case separately, just clear previous binding & decrement reference
644 if (binding.mem == VK_NULL_HANDLE) {
645 // TODO : This should cause the range of the resource to be unbound according to spec
646 } else {
647 BINDABLE *mem_binding = GetObjectMemBinding(typed_handle);
648 assert(mem_binding);
649 if (mem_binding) { // Invalid handles are reported by object tracker, but Get returns NULL for them, so avoid SEGV here
650 assert(mem_binding->sparse);
651 DEVICE_MEMORY_STATE *mem_info = GetDevMemState(binding.mem);
652 if (mem_info) {
653 mem_info->obj_bindings.insert(typed_handle);
654 // Need to set mem binding for this object
655 mem_binding->sparse_bindings.insert(binding);
656 mem_binding->UpdateBoundMemorySet();
657 }
658 }
659 }
660 return skip;
661}
662
locke-lunargd556cc32019-09-17 01:21:23 -0600663void ValidationStateTracker::UpdateDrawState(CMD_BUFFER_STATE *cb_state, const VkPipelineBindPoint bind_point) {
664 auto &state = cb_state->lastBound[bind_point];
665 PIPELINE_STATE *pPipe = state.pipeline_state;
666 if (VK_NULL_HANDLE != state.pipeline_layout) {
667 for (const auto &set_binding_pair : pPipe->active_slots) {
668 uint32_t setIndex = set_binding_pair.first;
669 // Pull the set node
670 cvdescriptorset::DescriptorSet *descriptor_set = state.per_set[setIndex].bound_descriptor_set;
671 if (!descriptor_set->IsPushDescriptor()) {
672 // For the "bindless" style resource usage with many descriptors, need to optimize command <-> descriptor binding
673
674 // TODO: If recreating the reduced_map here shows up in profilinging, need to find a way of sharing with the
675 // Validate pass. Though in the case of "many" descriptors, typically the descriptor count >> binding count
676 cvdescriptorset::PrefilterBindRequestMap reduced_map(*descriptor_set, set_binding_pair.second);
677 const auto &binding_req_map = reduced_map.FilteredMap(*cb_state, *pPipe);
678
679 if (reduced_map.IsManyDescriptors()) {
680 // Only update validate binding tags if we meet the "many" criteria in the Prefilter class
681 descriptor_set->UpdateValidationCache(*cb_state, *pPipe, binding_req_map);
682 }
683
684 // We can skip updating the state if "nothing" has changed since the last validation.
685 // See CoreChecks::ValidateCmdBufDrawState for more details.
Jeff Bolz56308942019-10-06 22:05:23 -0500686 bool descriptor_set_changed =
locke-lunargd556cc32019-09-17 01:21:23 -0600687 !reduced_map.IsManyDescriptors() ||
688 // Update if descriptor set (or contents) has changed
689 state.per_set[setIndex].validated_set != descriptor_set ||
690 state.per_set[setIndex].validated_set_change_count != descriptor_set->GetChangeCount() ||
691 (!disabled.image_layout_validation &&
Jeff Bolz56308942019-10-06 22:05:23 -0500692 state.per_set[setIndex].validated_set_image_layout_change_count != cb_state->image_layout_change_count);
693 bool need_update = descriptor_set_changed ||
694 // Update if previous bindingReqMap doesn't include new bindingReqMap
695 !std::includes(state.per_set[setIndex].validated_set_binding_req_map.begin(),
696 state.per_set[setIndex].validated_set_binding_req_map.end(),
697 binding_req_map.begin(), binding_req_map.end());
locke-lunargd556cc32019-09-17 01:21:23 -0600698
699 if (need_update) {
700 // Bind this set and its active descriptor resources to the command buffer
Jeff Bolz56308942019-10-06 22:05:23 -0500701 if (!descriptor_set_changed && reduced_map.IsManyDescriptors()) {
702 // Only record the bindings that haven't already been recorded
703 BindingReqMap delta_reqs;
704 std::set_difference(binding_req_map.begin(), binding_req_map.end(),
705 state.per_set[setIndex].validated_set_binding_req_map.begin(),
706 state.per_set[setIndex].validated_set_binding_req_map.end(),
707 std::inserter(delta_reqs, delta_reqs.begin()));
locke-gb3ce08f2019-09-30 12:30:56 -0600708 descriptor_set->UpdateDrawState(this, cb_state, pPipe, delta_reqs);
Jeff Bolz56308942019-10-06 22:05:23 -0500709 } else {
locke-gb3ce08f2019-09-30 12:30:56 -0600710 descriptor_set->UpdateDrawState(this, cb_state, pPipe, binding_req_map);
Jeff Bolz56308942019-10-06 22:05:23 -0500711 }
locke-lunargd556cc32019-09-17 01:21:23 -0600712
713 state.per_set[setIndex].validated_set = descriptor_set;
714 state.per_set[setIndex].validated_set_change_count = descriptor_set->GetChangeCount();
715 state.per_set[setIndex].validated_set_image_layout_change_count = cb_state->image_layout_change_count;
716 if (reduced_map.IsManyDescriptors()) {
717 // Check whether old == new before assigning, the equality check is much cheaper than
718 // freeing and reallocating the map.
719 if (state.per_set[setIndex].validated_set_binding_req_map != set_binding_pair.second) {
720 state.per_set[setIndex].validated_set_binding_req_map = set_binding_pair.second;
721 }
722 } else {
723 state.per_set[setIndex].validated_set_binding_req_map = BindingReqMap();
724 }
725 }
726 }
727 }
728 }
729 if (!pPipe->vertex_binding_descriptions_.empty()) {
730 cb_state->vertex_buffer_used = true;
731 }
732}
733
734// Remove set from setMap and delete the set
735void ValidationStateTracker::FreeDescriptorSet(cvdescriptorset::DescriptorSet *descriptor_set) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500736 descriptor_set->destroyed = true;
Jeff Bolzadbfa852019-10-04 13:53:30 -0500737 const VulkanTypedHandle obj_struct(descriptor_set->GetSet(), kVulkanObjectTypeDescriptorSet);
Jeff Bolz41a1ced2019-10-11 11:40:49 -0500738 // Any bound cmd buffers are now invalid
Jeff Bolzadbfa852019-10-04 13:53:30 -0500739 InvalidateCommandBuffers(descriptor_set->cb_bindings, obj_struct);
Jeff Bolz41a1ced2019-10-11 11:40:49 -0500740
locke-lunargd556cc32019-09-17 01:21:23 -0600741 setMap.erase(descriptor_set->GetSet());
742}
743
744// Free all DS Pools including their Sets & related sub-structs
745// NOTE : Calls to this function should be wrapped in mutex
746void ValidationStateTracker::DeleteDescriptorSetPools() {
747 for (auto ii = descriptorPoolMap.begin(); ii != descriptorPoolMap.end();) {
748 // Remove this pools' sets from setMap and delete them
749 for (auto ds : ii->second->sets) {
750 FreeDescriptorSet(ds);
751 }
752 ii->second->sets.clear();
753 ii = descriptorPoolMap.erase(ii);
754 }
755}
756
757// For given object struct return a ptr of BASE_NODE type for its wrapping struct
758BASE_NODE *ValidationStateTracker::GetStateStructPtrFromObject(const VulkanTypedHandle &object_struct) {
Jeff Bolzadbfa852019-10-04 13:53:30 -0500759 if (object_struct.node) {
760#ifdef _DEBUG
761 // assert that lookup would find the same object
762 VulkanTypedHandle other = object_struct;
763 other.node = nullptr;
764 assert(object_struct.node == GetStateStructPtrFromObject(other));
765#endif
766 return object_struct.node;
767 }
locke-lunargd556cc32019-09-17 01:21:23 -0600768 BASE_NODE *base_ptr = nullptr;
769 switch (object_struct.type) {
770 case kVulkanObjectTypeDescriptorSet: {
771 base_ptr = GetSetNode(object_struct.Cast<VkDescriptorSet>());
772 break;
773 }
774 case kVulkanObjectTypeSampler: {
775 base_ptr = GetSamplerState(object_struct.Cast<VkSampler>());
776 break;
777 }
778 case kVulkanObjectTypeQueryPool: {
779 base_ptr = GetQueryPoolState(object_struct.Cast<VkQueryPool>());
780 break;
781 }
782 case kVulkanObjectTypePipeline: {
783 base_ptr = GetPipelineState(object_struct.Cast<VkPipeline>());
784 break;
785 }
786 case kVulkanObjectTypeBuffer: {
787 base_ptr = GetBufferState(object_struct.Cast<VkBuffer>());
788 break;
789 }
790 case kVulkanObjectTypeBufferView: {
791 base_ptr = GetBufferViewState(object_struct.Cast<VkBufferView>());
792 break;
793 }
794 case kVulkanObjectTypeImage: {
795 base_ptr = GetImageState(object_struct.Cast<VkImage>());
796 break;
797 }
798 case kVulkanObjectTypeImageView: {
799 base_ptr = GetImageViewState(object_struct.Cast<VkImageView>());
800 break;
801 }
802 case kVulkanObjectTypeEvent: {
803 base_ptr = GetEventState(object_struct.Cast<VkEvent>());
804 break;
805 }
806 case kVulkanObjectTypeDescriptorPool: {
807 base_ptr = GetDescriptorPoolState(object_struct.Cast<VkDescriptorPool>());
808 break;
809 }
810 case kVulkanObjectTypeCommandPool: {
811 base_ptr = GetCommandPoolState(object_struct.Cast<VkCommandPool>());
812 break;
813 }
814 case kVulkanObjectTypeFramebuffer: {
815 base_ptr = GetFramebufferState(object_struct.Cast<VkFramebuffer>());
816 break;
817 }
818 case kVulkanObjectTypeRenderPass: {
819 base_ptr = GetRenderPassState(object_struct.Cast<VkRenderPass>());
820 break;
821 }
822 case kVulkanObjectTypeDeviceMemory: {
823 base_ptr = GetDevMemState(object_struct.Cast<VkDeviceMemory>());
824 break;
825 }
826 case kVulkanObjectTypeAccelerationStructureNV: {
827 base_ptr = GetAccelerationStructureState(object_struct.Cast<VkAccelerationStructureNV>());
828 break;
829 }
Jeff Bolzadbfa852019-10-04 13:53:30 -0500830 case kVulkanObjectTypeUnknown:
831 // This can happen if an element of the object_bindings vector has been
832 // zeroed out, after an object is destroyed.
833 break;
locke-lunargd556cc32019-09-17 01:21:23 -0600834 default:
835 // TODO : Any other objects to be handled here?
836 assert(0);
837 break;
838 }
839 return base_ptr;
840}
841
842// Tie the VulkanTypedHandle to the cmd buffer which includes:
843// Add object_binding to cmd buffer
844// Add cb_binding to object
Jeff Bolzadbfa852019-10-04 13:53:30 -0500845bool ValidationStateTracker::AddCommandBufferBinding(small_unordered_map<CMD_BUFFER_STATE *, int, 8> &cb_bindings,
locke-lunargd556cc32019-09-17 01:21:23 -0600846 const VulkanTypedHandle &obj, CMD_BUFFER_STATE *cb_node) {
847 if (disabled.command_buffer_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -0500848 return false;
locke-lunargd556cc32019-09-17 01:21:23 -0600849 }
Jeff Bolzadbfa852019-10-04 13:53:30 -0500850 // Insert the cb_binding with a default 'index' of -1. Then push the obj into the object_bindings
851 // vector, and update cb_bindings[cb_node] with the index of that element of the vector.
852 auto inserted = cb_bindings.insert({cb_node, -1});
853 if (inserted.second) {
854 cb_node->object_bindings.push_back(obj);
855 inserted.first->second = (int)cb_node->object_bindings.size() - 1;
856 return true;
857 }
858 return false;
locke-lunargd556cc32019-09-17 01:21:23 -0600859}
860
861// For a given object, if cb_node is in that objects cb_bindings, remove cb_node
862void ValidationStateTracker::RemoveCommandBufferBinding(VulkanTypedHandle const &object, CMD_BUFFER_STATE *cb_node) {
863 BASE_NODE *base_obj = GetStateStructPtrFromObject(object);
864 if (base_obj) base_obj->cb_bindings.erase(cb_node);
865}
866
867// Reset the command buffer state
868// Maintain the createInfo and set state to CB_NEW, but clear all other state
869void ValidationStateTracker::ResetCommandBufferState(const VkCommandBuffer cb) {
870 CMD_BUFFER_STATE *pCB = GetCBState(cb);
871 if (pCB) {
872 pCB->in_use.store(0);
873 // Reset CB state (note that createInfo is not cleared)
874 pCB->commandBuffer = cb;
875 memset(&pCB->beginInfo, 0, sizeof(VkCommandBufferBeginInfo));
876 memset(&pCB->inheritanceInfo, 0, sizeof(VkCommandBufferInheritanceInfo));
877 pCB->hasDrawCmd = false;
878 pCB->hasTraceRaysCmd = false;
879 pCB->hasBuildAccelerationStructureCmd = false;
880 pCB->hasDispatchCmd = false;
881 pCB->state = CB_NEW;
Lionel Landwerlinc7420912019-05-23 00:33:42 +0100882 pCB->commandCount = 0;
locke-lunargd556cc32019-09-17 01:21:23 -0600883 pCB->submitCount = 0;
884 pCB->image_layout_change_count = 1; // Start at 1. 0 is insert value for validation cache versions, s.t. new == dirty
885 pCB->status = 0;
886 pCB->static_status = 0;
887 pCB->viewportMask = 0;
888 pCB->scissorMask = 0;
889
890 for (auto &item : pCB->lastBound) {
891 item.second.reset();
892 }
893
894 memset(&pCB->activeRenderPassBeginInfo, 0, sizeof(pCB->activeRenderPassBeginInfo));
895 pCB->activeRenderPass = nullptr;
896 pCB->activeSubpassContents = VK_SUBPASS_CONTENTS_INLINE;
897 pCB->activeSubpass = 0;
898 pCB->broken_bindings.clear();
899 pCB->waitedEvents.clear();
900 pCB->events.clear();
901 pCB->writeEventsBeforeWait.clear();
locke-lunargd556cc32019-09-17 01:21:23 -0600902 pCB->activeQueries.clear();
903 pCB->startedQueries.clear();
904 pCB->image_layout_map.clear();
locke-lunargd556cc32019-09-17 01:21:23 -0600905 pCB->current_vertex_buffer_binding_info.vertex_buffer_bindings.clear();
906 pCB->vertex_buffer_used = false;
907 pCB->primaryCommandBuffer = VK_NULL_HANDLE;
908 // If secondary, invalidate any primary command buffer that may call us.
909 if (pCB->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
Jeff Bolzadbfa852019-10-04 13:53:30 -0500910 InvalidateLinkedCommandBuffers(pCB->linkedCommandBuffers, VulkanTypedHandle(cb, kVulkanObjectTypeCommandBuffer));
locke-lunargd556cc32019-09-17 01:21:23 -0600911 }
912
913 // Remove reverse command buffer links.
914 for (auto pSubCB : pCB->linkedCommandBuffers) {
915 pSubCB->linkedCommandBuffers.erase(pCB);
916 }
917 pCB->linkedCommandBuffers.clear();
locke-lunargd556cc32019-09-17 01:21:23 -0600918 pCB->queue_submit_functions.clear();
919 pCB->cmd_execute_commands_functions.clear();
920 pCB->eventUpdates.clear();
921 pCB->queryUpdates.clear();
922
923 // Remove object bindings
924 for (const auto &obj : pCB->object_bindings) {
925 RemoveCommandBufferBinding(obj, pCB);
926 }
927 pCB->object_bindings.clear();
928 // Remove this cmdBuffer's reference from each FrameBuffer's CB ref list
929 for (auto framebuffer : pCB->framebuffers) {
930 auto fb_state = GetFramebufferState(framebuffer);
931 if (fb_state) fb_state->cb_bindings.erase(pCB);
932 }
933 pCB->framebuffers.clear();
934 pCB->activeFramebuffer = VK_NULL_HANDLE;
935 memset(&pCB->index_buffer_binding, 0, sizeof(pCB->index_buffer_binding));
936
937 pCB->qfo_transfer_image_barriers.Reset();
938 pCB->qfo_transfer_buffer_barriers.Reset();
939
940 // Clean up the label data
941 ResetCmdDebugUtilsLabel(report_data, pCB->commandBuffer);
942 pCB->debug_label.Reset();
locke-gb3ce08f2019-09-30 12:30:56 -0600943 pCB->validate_descriptorsets_in_queuesubmit.clear();
locke-lunargd556cc32019-09-17 01:21:23 -0600944 }
945 if (command_buffer_reset_callback) {
946 (*command_buffer_reset_callback)(cb);
947 }
948}
949
950void ValidationStateTracker::PostCallRecordCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo,
951 const VkAllocationCallbacks *pAllocator, VkDevice *pDevice,
952 VkResult result) {
953 if (VK_SUCCESS != result) return;
954
955 const VkPhysicalDeviceFeatures *enabled_features_found = pCreateInfo->pEnabledFeatures;
956 if (nullptr == enabled_features_found) {
957 const auto *features2 = lvl_find_in_chain<VkPhysicalDeviceFeatures2KHR>(pCreateInfo->pNext);
958 if (features2) {
959 enabled_features_found = &(features2->features);
960 }
961 }
962
963 ValidationObject *device_object = GetLayerDataPtr(get_dispatch_key(*pDevice), layer_data_map);
964 ValidationObject *validation_data = GetValidationObject(device_object->object_dispatch, this->container_type);
965 ValidationStateTracker *state_tracker = static_cast<ValidationStateTracker *>(validation_data);
966
967 if (nullptr == enabled_features_found) {
968 state_tracker->enabled_features.core = {};
969 } else {
970 state_tracker->enabled_features.core = *enabled_features_found;
971 }
972
973 // Make sure that queue_family_properties are obtained for this device's physical_device, even if the app has not
974 // previously set them through an explicit API call.
975 uint32_t count;
976 auto pd_state = GetPhysicalDeviceState(gpu);
977 DispatchGetPhysicalDeviceQueueFamilyProperties(gpu, &count, nullptr);
978 pd_state->queue_family_properties.resize(std::max(static_cast<uint32_t>(pd_state->queue_family_properties.size()), count));
979 DispatchGetPhysicalDeviceQueueFamilyProperties(gpu, &count, &pd_state->queue_family_properties[0]);
980 // Save local link to this device's physical device state
981 state_tracker->physical_device_state = pd_state;
982
Tony-LunarGb036c2f2019-12-05 14:38:25 -0700983 const auto *vulkan_12_features = lvl_find_in_chain<VkPhysicalDeviceVulkan12Features>(pCreateInfo->pNext);
984 if (vulkan_12_features) {
985 state_tracker->enabled_features.eight_bit_storage.storageBuffer8BitAccess = vulkan_12_features->storageBuffer8BitAccess;
986 state_tracker->enabled_features.eight_bit_storage.uniformAndStorageBuffer8BitAccess =
987 vulkan_12_features->uniformAndStorageBuffer8BitAccess;
988 state_tracker->enabled_features.eight_bit_storage.storagePushConstant8 = vulkan_12_features->storagePushConstant8;
989
990 state_tracker->enabled_features.float16_int8.shaderFloat16 = vulkan_12_features->shaderFloat16;
991 state_tracker->enabled_features.float16_int8.shaderInt8 = vulkan_12_features->shaderInt8;
992
993 if (vulkan_12_features->descriptorIndexing) {
994 VkPhysicalDeviceFeatures2 features2 = {};
995 auto di_features = lvl_init_struct<VkPhysicalDeviceDescriptorIndexingFeatures>();
996 features2 = lvl_init_struct<VkPhysicalDeviceFeatures2>(&di_features);
997 DispatchGetPhysicalDeviceFeatures2KHR(gpu, &features2);
998 state_tracker->enabled_features.descriptor_indexing = di_features;
999 }
1000 state_tracker->enabled_features.scalar_block_layout_features.scalarBlockLayout = vulkan_12_features->scalarBlockLayout;
1001
1002 state_tracker->enabled_features.imageless_framebuffer_features.imagelessFramebuffer =
1003 vulkan_12_features->imagelessFramebuffer;
1004
1005 state_tracker->enabled_features.uniform_buffer_standard_layout.uniformBufferStandardLayout =
1006 vulkan_12_features->uniformBufferStandardLayout;
1007
1008 state_tracker->enabled_features.subgroup_extended_types_features.shaderSubgroupExtendedTypes =
1009 vulkan_12_features->shaderSubgroupExtendedTypes;
1010
1011 state_tracker->enabled_features.separate_depth_stencil_layouts_features.separateDepthStencilLayouts =
1012 vulkan_12_features->separateDepthStencilLayouts;
1013
1014 state_tracker->enabled_features.host_query_reset_features.hostQueryReset = vulkan_12_features->hostQueryReset;
1015
1016 state_tracker->enabled_features.timeline_semaphore_features.timelineSemaphore = vulkan_12_features->timelineSemaphore;
1017
1018 state_tracker->enabled_features.buffer_device_address.bufferDeviceAddress = vulkan_12_features->bufferDeviceAddress;
1019 state_tracker->enabled_features.buffer_device_address.bufferDeviceAddressCaptureReplay =
1020 vulkan_12_features->bufferDeviceAddressCaptureReplay;
1021 state_tracker->enabled_features.buffer_device_address.bufferDeviceAddressMultiDevice =
1022 vulkan_12_features->bufferDeviceAddressMultiDevice;
1023
1024 } else {
1025 // These structs are only allowed in pNext chain if there is no kPhysicalDeviceVulkan12Features
1026
1027 const auto *eight_bit_storage_features = lvl_find_in_chain<VkPhysicalDevice8BitStorageFeatures>(pCreateInfo->pNext);
1028 if (eight_bit_storage_features) {
1029 state_tracker->enabled_features.eight_bit_storage = *eight_bit_storage_features;
1030 }
1031
1032 const auto *float16_int8_features = lvl_find_in_chain<VkPhysicalDeviceShaderFloat16Int8Features>(pCreateInfo->pNext);
1033 if (float16_int8_features) {
1034 state_tracker->enabled_features.float16_int8 = *float16_int8_features;
1035 }
1036
1037 const auto *descriptor_indexing_features =
1038 lvl_find_in_chain<VkPhysicalDeviceDescriptorIndexingFeatures>(pCreateInfo->pNext);
1039 if (descriptor_indexing_features) {
1040 state_tracker->enabled_features.descriptor_indexing = *descriptor_indexing_features;
1041 }
1042
1043 const auto *scalar_block_layout_features =
1044 lvl_find_in_chain<VkPhysicalDeviceScalarBlockLayoutFeatures>(pCreateInfo->pNext);
1045 if (scalar_block_layout_features) {
1046 state_tracker->enabled_features.scalar_block_layout_features = *scalar_block_layout_features;
1047 }
1048
1049 const auto *imageless_framebuffer_features =
1050 lvl_find_in_chain<VkPhysicalDeviceImagelessFramebufferFeatures>(pCreateInfo->pNext);
1051 if (imageless_framebuffer_features) {
1052 state_tracker->enabled_features.imageless_framebuffer_features = *imageless_framebuffer_features;
1053 }
1054
1055 const auto *uniform_buffer_standard_layout_features =
1056 lvl_find_in_chain<VkPhysicalDeviceUniformBufferStandardLayoutFeatures>(pCreateInfo->pNext);
1057 if (uniform_buffer_standard_layout_features) {
1058 state_tracker->enabled_features.uniform_buffer_standard_layout = *uniform_buffer_standard_layout_features;
1059 }
1060
1061 const auto *subgroup_extended_types_features =
1062 lvl_find_in_chain<VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures>(pCreateInfo->pNext);
1063 if (subgroup_extended_types_features) {
1064 state_tracker->enabled_features.subgroup_extended_types_features = *subgroup_extended_types_features;
1065 }
1066
1067 const auto *separate_depth_stencil_layouts_features =
1068 lvl_find_in_chain<VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures>(pCreateInfo->pNext);
1069 if (separate_depth_stencil_layouts_features) {
1070 state_tracker->enabled_features.separate_depth_stencil_layouts_features = *separate_depth_stencil_layouts_features;
1071 }
1072
1073 const auto *host_query_reset_features = lvl_find_in_chain<VkPhysicalDeviceHostQueryResetFeatures>(pCreateInfo->pNext);
1074 if (host_query_reset_features) {
1075 state_tracker->enabled_features.host_query_reset_features = *host_query_reset_features;
1076 }
1077
1078 const auto *timeline_semaphore_features =
1079 lvl_find_in_chain<VkPhysicalDeviceTimelineSemaphoreFeatures>(pCreateInfo->pNext);
1080 if (timeline_semaphore_features) {
1081 state_tracker->enabled_features.timeline_semaphore_features = *timeline_semaphore_features;
1082 }
1083
1084 const auto *buffer_device_address = lvl_find_in_chain<VkPhysicalDeviceBufferDeviceAddressFeatures>(pCreateInfo->pNext);
1085 if (buffer_device_address) {
1086 state_tracker->enabled_features.buffer_device_address = *buffer_device_address;
1087 }
1088 }
1089
locke-lunargd556cc32019-09-17 01:21:23 -06001090 const auto *device_group_ci = lvl_find_in_chain<VkDeviceGroupDeviceCreateInfo>(pCreateInfo->pNext);
1091 state_tracker->physical_device_count =
1092 device_group_ci && device_group_ci->physicalDeviceCount > 0 ? device_group_ci->physicalDeviceCount : 1;
1093
locke-lunargd556cc32019-09-17 01:21:23 -06001094 const auto *exclusive_scissor_features = lvl_find_in_chain<VkPhysicalDeviceExclusiveScissorFeaturesNV>(pCreateInfo->pNext);
1095 if (exclusive_scissor_features) {
1096 state_tracker->enabled_features.exclusive_scissor = *exclusive_scissor_features;
1097 }
1098
1099 const auto *shading_rate_image_features = lvl_find_in_chain<VkPhysicalDeviceShadingRateImageFeaturesNV>(pCreateInfo->pNext);
1100 if (shading_rate_image_features) {
1101 state_tracker->enabled_features.shading_rate_image = *shading_rate_image_features;
1102 }
1103
1104 const auto *mesh_shader_features = lvl_find_in_chain<VkPhysicalDeviceMeshShaderFeaturesNV>(pCreateInfo->pNext);
1105 if (mesh_shader_features) {
1106 state_tracker->enabled_features.mesh_shader = *mesh_shader_features;
1107 }
1108
1109 const auto *inline_uniform_block_features =
1110 lvl_find_in_chain<VkPhysicalDeviceInlineUniformBlockFeaturesEXT>(pCreateInfo->pNext);
1111 if (inline_uniform_block_features) {
1112 state_tracker->enabled_features.inline_uniform_block = *inline_uniform_block_features;
1113 }
1114
1115 const auto *transform_feedback_features = lvl_find_in_chain<VkPhysicalDeviceTransformFeedbackFeaturesEXT>(pCreateInfo->pNext);
1116 if (transform_feedback_features) {
1117 state_tracker->enabled_features.transform_feedback_features = *transform_feedback_features;
1118 }
1119
locke-lunargd556cc32019-09-17 01:21:23 -06001120 const auto *vtx_attrib_div_features = lvl_find_in_chain<VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT>(pCreateInfo->pNext);
1121 if (vtx_attrib_div_features) {
1122 state_tracker->enabled_features.vtx_attrib_divisor_features = *vtx_attrib_div_features;
1123 }
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001124
Jeff Bolz4563f2a2019-12-10 13:30:30 -06001125 const auto *buffer_device_address_ext = lvl_find_in_chain<VkPhysicalDeviceBufferDeviceAddressFeaturesEXT>(pCreateInfo->pNext);
1126 if (buffer_device_address_ext) {
1127 state_tracker->enabled_features.buffer_device_address_ext = *buffer_device_address_ext;
locke-lunargd556cc32019-09-17 01:21:23 -06001128 }
1129
1130 const auto *cooperative_matrix_features = lvl_find_in_chain<VkPhysicalDeviceCooperativeMatrixFeaturesNV>(pCreateInfo->pNext);
1131 if (cooperative_matrix_features) {
1132 state_tracker->enabled_features.cooperative_matrix_features = *cooperative_matrix_features;
1133 }
1134
locke-lunargd556cc32019-09-17 01:21:23 -06001135 const auto *compute_shader_derivatives_features =
1136 lvl_find_in_chain<VkPhysicalDeviceComputeShaderDerivativesFeaturesNV>(pCreateInfo->pNext);
1137 if (compute_shader_derivatives_features) {
1138 state_tracker->enabled_features.compute_shader_derivatives_features = *compute_shader_derivatives_features;
1139 }
1140
1141 const auto *fragment_shader_barycentric_features =
1142 lvl_find_in_chain<VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV>(pCreateInfo->pNext);
1143 if (fragment_shader_barycentric_features) {
1144 state_tracker->enabled_features.fragment_shader_barycentric_features = *fragment_shader_barycentric_features;
1145 }
1146
1147 const auto *shader_image_footprint_features =
1148 lvl_find_in_chain<VkPhysicalDeviceShaderImageFootprintFeaturesNV>(pCreateInfo->pNext);
1149 if (shader_image_footprint_features) {
1150 state_tracker->enabled_features.shader_image_footprint_features = *shader_image_footprint_features;
1151 }
1152
1153 const auto *fragment_shader_interlock_features =
1154 lvl_find_in_chain<VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT>(pCreateInfo->pNext);
1155 if (fragment_shader_interlock_features) {
1156 state_tracker->enabled_features.fragment_shader_interlock_features = *fragment_shader_interlock_features;
1157 }
1158
1159 const auto *demote_to_helper_invocation_features =
1160 lvl_find_in_chain<VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT>(pCreateInfo->pNext);
1161 if (demote_to_helper_invocation_features) {
1162 state_tracker->enabled_features.demote_to_helper_invocation_features = *demote_to_helper_invocation_features;
1163 }
1164
1165 const auto *texel_buffer_alignment_features =
1166 lvl_find_in_chain<VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT>(pCreateInfo->pNext);
1167 if (texel_buffer_alignment_features) {
1168 state_tracker->enabled_features.texel_buffer_alignment_features = *texel_buffer_alignment_features;
1169 }
1170
locke-lunargd556cc32019-09-17 01:21:23 -06001171 const auto *pipeline_exe_props_features =
1172 lvl_find_in_chain<VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR>(pCreateInfo->pNext);
1173 if (pipeline_exe_props_features) {
1174 state_tracker->enabled_features.pipeline_exe_props_features = *pipeline_exe_props_features;
1175 }
1176
Jeff Bolz82f854d2019-09-17 14:56:47 -05001177 const auto *dedicated_allocation_image_aliasing_features =
1178 lvl_find_in_chain<VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV>(pCreateInfo->pNext);
1179 if (dedicated_allocation_image_aliasing_features) {
1180 state_tracker->enabled_features.dedicated_allocation_image_aliasing_features =
1181 *dedicated_allocation_image_aliasing_features;
1182 }
1183
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001184 const auto *performance_query_features = lvl_find_in_chain<VkPhysicalDevicePerformanceQueryFeaturesKHR>(pCreateInfo->pNext);
1185 if (performance_query_features) {
1186 state_tracker->enabled_features.performance_query_features = *performance_query_features;
1187 }
1188
Tobias Hector782bcde2019-11-28 16:19:42 +00001189 const auto *device_coherent_memory_features = lvl_find_in_chain<VkPhysicalDeviceCoherentMemoryFeaturesAMD>(pCreateInfo->pNext);
1190 if (device_coherent_memory_features) {
1191 state_tracker->enabled_features.device_coherent_memory_features = *device_coherent_memory_features;
1192 }
1193
locke-lunargd556cc32019-09-17 01:21:23 -06001194 // Store physical device properties and physical device mem limits into CoreChecks structs
1195 DispatchGetPhysicalDeviceMemoryProperties(gpu, &state_tracker->phys_dev_mem_props);
1196 DispatchGetPhysicalDeviceProperties(gpu, &state_tracker->phys_dev_props);
1197
1198 const auto &dev_ext = state_tracker->device_extensions;
1199 auto *phys_dev_props = &state_tracker->phys_dev_ext_props;
1200
1201 if (dev_ext.vk_khr_push_descriptor) {
1202 // Get the needed push_descriptor limits
1203 VkPhysicalDevicePushDescriptorPropertiesKHR push_descriptor_prop;
1204 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_push_descriptor, &push_descriptor_prop);
1205 phys_dev_props->max_push_descriptors = push_descriptor_prop.maxPushDescriptors;
1206 }
1207
1208 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_descriptor_indexing, &phys_dev_props->descriptor_indexing_props);
1209 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_shading_rate_image, &phys_dev_props->shading_rate_image_props);
1210 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_mesh_shader, &phys_dev_props->mesh_shader_props);
1211 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_inline_uniform_block, &phys_dev_props->inline_uniform_block_props);
1212 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_vertex_attribute_divisor, &phys_dev_props->vtx_attrib_divisor_props);
1213 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_depth_stencil_resolve, &phys_dev_props->depth_stencil_resolve_props);
1214 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_transform_feedback, &phys_dev_props->transform_feedback_props);
1215 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_ray_tracing, &phys_dev_props->ray_tracing_props);
1216 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_texel_buffer_alignment, &phys_dev_props->texel_buffer_alignment_props);
1217 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_fragment_density_map, &phys_dev_props->fragment_density_map_props);
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001218 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_performance_query, &phys_dev_props->performance_query_props);
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00001219 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_timeline_semaphore, &phys_dev_props->timeline_semaphore_props);
Mark Lobodzinskie4a2b7f2019-12-20 12:51:30 -07001220 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_shader_float_controls, &phys_dev_props->float_controls_props);
1221
locke-lunargd556cc32019-09-17 01:21:23 -06001222 if (state_tracker->device_extensions.vk_nv_cooperative_matrix) {
1223 // Get the needed cooperative_matrix properties
1224 auto cooperative_matrix_props = lvl_init_struct<VkPhysicalDeviceCooperativeMatrixPropertiesNV>();
1225 auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(&cooperative_matrix_props);
1226 instance_dispatch_table.GetPhysicalDeviceProperties2KHR(gpu, &prop2);
1227 state_tracker->phys_dev_ext_props.cooperative_matrix_props = cooperative_matrix_props;
1228
1229 uint32_t numCooperativeMatrixProperties = 0;
1230 instance_dispatch_table.GetPhysicalDeviceCooperativeMatrixPropertiesNV(gpu, &numCooperativeMatrixProperties, NULL);
1231 state_tracker->cooperative_matrix_properties.resize(numCooperativeMatrixProperties,
1232 lvl_init_struct<VkCooperativeMatrixPropertiesNV>());
1233
1234 instance_dispatch_table.GetPhysicalDeviceCooperativeMatrixPropertiesNV(gpu, &numCooperativeMatrixProperties,
1235 state_tracker->cooperative_matrix_properties.data());
1236 }
1237 if (state_tracker->api_version >= VK_API_VERSION_1_1) {
1238 // Get the needed subgroup limits
1239 auto subgroup_prop = lvl_init_struct<VkPhysicalDeviceSubgroupProperties>();
1240 auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(&subgroup_prop);
1241 instance_dispatch_table.GetPhysicalDeviceProperties2(gpu, &prop2);
1242
1243 state_tracker->phys_dev_ext_props.subgroup_props = subgroup_prop;
1244 }
1245
1246 // Store queue family data
1247 if (pCreateInfo->pQueueCreateInfos != nullptr) {
1248 for (uint32_t i = 0; i < pCreateInfo->queueCreateInfoCount; ++i) {
1249 state_tracker->queue_family_index_map.insert(
1250 std::make_pair(pCreateInfo->pQueueCreateInfos[i].queueFamilyIndex, pCreateInfo->pQueueCreateInfos[i].queueCount));
1251 }
1252 }
1253}
1254
1255void ValidationStateTracker::PreCallRecordDestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) {
1256 if (!device) return;
1257
locke-lunargd556cc32019-09-17 01:21:23 -06001258 // Reset all command buffers before destroying them, to unlink object_bindings.
1259 for (auto &commandBuffer : commandBufferMap) {
1260 ResetCommandBufferState(commandBuffer.first);
1261 }
Jeff Bolzadbfa852019-10-04 13:53:30 -05001262 pipelineMap.clear();
1263 renderPassMap.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06001264 commandBufferMap.clear();
1265
1266 // This will also delete all sets in the pool & remove them from setMap
1267 DeleteDescriptorSetPools();
1268 // All sets should be removed
1269 assert(setMap.empty());
1270 descriptorSetLayoutMap.clear();
1271 imageViewMap.clear();
1272 imageMap.clear();
1273 bufferViewMap.clear();
1274 bufferMap.clear();
1275 // Queues persist until device is destroyed
1276 queueMap.clear();
1277}
1278
1279// Loop through bound objects and increment their in_use counts.
1280void ValidationStateTracker::IncrementBoundObjects(CMD_BUFFER_STATE const *cb_node) {
1281 for (auto obj : cb_node->object_bindings) {
1282 auto base_obj = GetStateStructPtrFromObject(obj);
1283 if (base_obj) {
1284 base_obj->in_use.fetch_add(1);
1285 }
1286 }
1287}
1288
1289// Track which resources are in-flight by atomically incrementing their "in_use" count
1290void ValidationStateTracker::IncrementResources(CMD_BUFFER_STATE *cb_node) {
1291 cb_node->submitCount++;
1292 cb_node->in_use.fetch_add(1);
1293
1294 // First Increment for all "generic" objects bound to cmd buffer, followed by special-case objects below
1295 IncrementBoundObjects(cb_node);
1296 // TODO : We should be able to remove the NULL look-up checks from the code below as long as
1297 // all the corresponding cases are verified to cause CB_INVALID state and the CB_INVALID state
1298 // should then be flagged prior to calling this function
1299 for (auto event : cb_node->writeEventsBeforeWait) {
1300 auto event_state = GetEventState(event);
1301 if (event_state) event_state->write_in_use++;
1302 }
1303}
1304
1305// Decrement in-use count for objects bound to command buffer
1306void ValidationStateTracker::DecrementBoundResources(CMD_BUFFER_STATE const *cb_node) {
1307 BASE_NODE *base_obj = nullptr;
1308 for (auto obj : cb_node->object_bindings) {
1309 base_obj = GetStateStructPtrFromObject(obj);
1310 if (base_obj) {
1311 base_obj->in_use.fetch_sub(1);
1312 }
1313 }
1314}
1315
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001316void ValidationStateTracker::RetireWorkOnQueue(QUEUE_STATE *pQueue, uint64_t seq) {
locke-lunargd556cc32019-09-17 01:21:23 -06001317 std::unordered_map<VkQueue, uint64_t> otherQueueSeqs;
1318
1319 // Roll this queue forward, one submission at a time.
1320 while (pQueue->seq < seq) {
1321 auto &submission = pQueue->submissions.front();
1322
1323 for (auto &wait : submission.waitSemaphores) {
1324 auto pSemaphore = GetSemaphoreState(wait.semaphore);
1325 if (pSemaphore) {
1326 pSemaphore->in_use.fetch_sub(1);
1327 }
1328 auto &lastSeq = otherQueueSeqs[wait.queue];
1329 lastSeq = std::max(lastSeq, wait.seq);
1330 }
1331
1332 for (auto &semaphore : submission.signalSemaphores) {
1333 auto pSemaphore = GetSemaphoreState(semaphore);
1334 if (pSemaphore) {
1335 pSemaphore->in_use.fetch_sub(1);
1336 }
1337 }
1338
1339 for (auto &semaphore : submission.externalSemaphores) {
1340 auto pSemaphore = GetSemaphoreState(semaphore);
1341 if (pSemaphore) {
1342 pSemaphore->in_use.fetch_sub(1);
1343 }
1344 }
1345
1346 for (auto cb : submission.cbs) {
1347 auto cb_node = GetCBState(cb);
1348 if (!cb_node) {
1349 continue;
1350 }
1351 // First perform decrement on general case bound objects
1352 DecrementBoundResources(cb_node);
1353 for (auto event : cb_node->writeEventsBeforeWait) {
1354 auto eventNode = eventMap.find(event);
1355 if (eventNode != eventMap.end()) {
1356 eventNode->second.write_in_use--;
1357 }
1358 }
Jeff Bolz310775c2019-10-09 00:46:33 -05001359 QueryMap localQueryToStateMap;
1360 for (auto &function : cb_node->queryUpdates) {
1361 function(nullptr, /*do_validate*/ false, &localQueryToStateMap);
1362 }
1363
1364 for (auto queryStatePair : localQueryToStateMap) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001365 if (queryStatePair.second == QUERYSTATE_ENDED) {
1366 queryToStateMap[queryStatePair.first] = QUERYSTATE_AVAILABLE;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001367
1368 const QUERY_POOL_STATE *qp_state = GetQueryPoolState(queryStatePair.first.pool);
1369 if (qp_state->createInfo.queryType == VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR)
1370 queryPassToStateMap[QueryObjectPass(queryStatePair.first, submission.perf_submit_pass)] =
1371 QUERYSTATE_AVAILABLE;
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001372 }
locke-lunargd556cc32019-09-17 01:21:23 -06001373 }
locke-lunargd556cc32019-09-17 01:21:23 -06001374 cb_node->in_use.fetch_sub(1);
1375 }
1376
1377 auto pFence = GetFenceState(submission.fence);
1378 if (pFence && pFence->scope == kSyncScopeInternal) {
1379 pFence->state = FENCE_RETIRED;
1380 }
1381
1382 pQueue->submissions.pop_front();
1383 pQueue->seq++;
1384 }
1385
1386 // Roll other queues forward to the highest seq we saw a wait for
1387 for (auto qs : otherQueueSeqs) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001388 RetireWorkOnQueue(GetQueueState(qs.first), qs.second);
locke-lunargd556cc32019-09-17 01:21:23 -06001389 }
1390}
1391
1392// Submit a fence to a queue, delimiting previous fences and previous untracked
1393// work by it.
1394static void SubmitFence(QUEUE_STATE *pQueue, FENCE_STATE *pFence, uint64_t submitCount) {
1395 pFence->state = FENCE_INFLIGHT;
1396 pFence->signaler.first = pQueue->queue;
1397 pFence->signaler.second = pQueue->seq + pQueue->submissions.size() + submitCount;
1398}
1399
1400void ValidationStateTracker::PostCallRecordQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits,
1401 VkFence fence, VkResult result) {
1402 uint64_t early_retire_seq = 0;
1403 auto pQueue = GetQueueState(queue);
1404 auto pFence = GetFenceState(fence);
1405
1406 if (pFence) {
1407 if (pFence->scope == kSyncScopeInternal) {
1408 // Mark fence in use
1409 SubmitFence(pQueue, pFence, std::max(1u, submitCount));
1410 if (!submitCount) {
1411 // If no submissions, but just dropping a fence on the end of the queue,
1412 // record an empty submission with just the fence, so we can determine
1413 // its completion.
1414 pQueue->submissions.emplace_back(std::vector<VkCommandBuffer>(), std::vector<SEMAPHORE_WAIT>(),
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001415 std::vector<VkSemaphore>(), std::vector<VkSemaphore>(), fence, 0);
locke-lunargd556cc32019-09-17 01:21:23 -06001416 }
1417 } else {
1418 // Retire work up until this fence early, we will not see the wait that corresponds to this signal
1419 early_retire_seq = pQueue->seq + pQueue->submissions.size();
1420 }
1421 }
1422
1423 // Now process each individual submit
1424 for (uint32_t submit_idx = 0; submit_idx < submitCount; submit_idx++) {
1425 std::vector<VkCommandBuffer> cbs;
1426 const VkSubmitInfo *submit = &pSubmits[submit_idx];
1427 vector<SEMAPHORE_WAIT> semaphore_waits;
1428 vector<VkSemaphore> semaphore_signals;
1429 vector<VkSemaphore> semaphore_externals;
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00001430 auto *timeline_semaphore_submit = lvl_find_in_chain<VkTimelineSemaphoreSubmitInfoKHR>(submit->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001431 for (uint32_t i = 0; i < submit->waitSemaphoreCount; ++i) {
1432 VkSemaphore semaphore = submit->pWaitSemaphores[i];
1433 auto pSemaphore = GetSemaphoreState(semaphore);
1434 if (pSemaphore) {
1435 if (pSemaphore->scope == kSyncScopeInternal) {
1436 if (pSemaphore->signaler.first != VK_NULL_HANDLE) {
1437 semaphore_waits.push_back({semaphore, pSemaphore->signaler.first, pSemaphore->signaler.second});
1438 pSemaphore->in_use.fetch_add(1);
1439 }
1440 pSemaphore->signaler.first = VK_NULL_HANDLE;
1441 pSemaphore->signaled = false;
1442 } else {
1443 semaphore_externals.push_back(semaphore);
1444 pSemaphore->in_use.fetch_add(1);
1445 if (pSemaphore->scope == kSyncScopeExternalTemporary) {
1446 pSemaphore->scope = kSyncScopeInternal;
1447 }
1448 }
1449 }
1450 }
1451 for (uint32_t i = 0; i < submit->signalSemaphoreCount; ++i) {
1452 VkSemaphore semaphore = submit->pSignalSemaphores[i];
1453 auto pSemaphore = GetSemaphoreState(semaphore);
1454 if (pSemaphore) {
1455 if (pSemaphore->scope == kSyncScopeInternal) {
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00001456 if (pSemaphore->type == VK_SEMAPHORE_TYPE_BINARY_KHR) {
1457 pSemaphore->signaler.first = queue;
1458 pSemaphore->signaler.second = pQueue->seq + pQueue->submissions.size() + 1;
1459 pSemaphore->signaled = true;
1460 } else {
1461 pSemaphore->payload = timeline_semaphore_submit->pSignalSemaphoreValues[i];
1462 }
locke-lunargd556cc32019-09-17 01:21:23 -06001463 pSemaphore->in_use.fetch_add(1);
1464 semaphore_signals.push_back(semaphore);
1465 } else {
1466 // Retire work up until this submit early, we will not see the wait that corresponds to this signal
1467 early_retire_seq = std::max(early_retire_seq, pQueue->seq + pQueue->submissions.size() + 1);
1468 }
1469 }
1470 }
1471 for (uint32_t i = 0; i < submit->commandBufferCount; i++) {
1472 auto cb_node = GetCBState(submit->pCommandBuffers[i]);
1473 if (cb_node) {
1474 cbs.push_back(submit->pCommandBuffers[i]);
1475 for (auto secondaryCmdBuffer : cb_node->linkedCommandBuffers) {
1476 cbs.push_back(secondaryCmdBuffer->commandBuffer);
1477 IncrementResources(secondaryCmdBuffer);
1478 }
1479 IncrementResources(cb_node);
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001480
1481 QueryMap localQueryToStateMap;
1482 for (auto &function : cb_node->queryUpdates) {
1483 function(nullptr, /*do_validate*/ false, &localQueryToStateMap);
1484 }
1485
1486 for (auto queryStatePair : localQueryToStateMap) {
1487 queryToStateMap[queryStatePair.first] = queryStatePair.second;
1488 }
1489
1490 EventToStageMap localEventToStageMap;
1491 for (auto &function : cb_node->eventUpdates) {
1492 function(nullptr, /*do_validate*/ false, &localEventToStageMap);
1493 }
1494
1495 for (auto eventStagePair : localEventToStageMap) {
1496 eventMap[eventStagePair.first].stageMask = eventStagePair.second;
1497 }
locke-lunargd556cc32019-09-17 01:21:23 -06001498 }
1499 }
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001500
1501 const auto perf_submit = lvl_find_in_chain<VkPerformanceQuerySubmitInfoKHR>(submit->pNext);
1502
locke-lunargd556cc32019-09-17 01:21:23 -06001503 pQueue->submissions.emplace_back(cbs, semaphore_waits, semaphore_signals, semaphore_externals,
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001504 submit_idx == submitCount - 1 ? fence : (VkFence)VK_NULL_HANDLE,
1505 perf_submit ? perf_submit->counterPassIndex : 0);
locke-lunargd556cc32019-09-17 01:21:23 -06001506 }
1507
1508 if (early_retire_seq) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001509 RetireWorkOnQueue(pQueue, early_retire_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06001510 }
1511}
1512
1513void ValidationStateTracker::PostCallRecordAllocateMemory(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo,
1514 const VkAllocationCallbacks *pAllocator, VkDeviceMemory *pMemory,
1515 VkResult result) {
1516 if (VK_SUCCESS == result) {
1517 AddMemObjInfo(device, *pMemory, pAllocateInfo);
1518 }
1519 return;
1520}
1521
1522void ValidationStateTracker::PreCallRecordFreeMemory(VkDevice device, VkDeviceMemory mem, const VkAllocationCallbacks *pAllocator) {
1523 if (!mem) return;
1524 DEVICE_MEMORY_STATE *mem_info = GetDevMemState(mem);
1525 const VulkanTypedHandle obj_struct(mem, kVulkanObjectTypeDeviceMemory);
1526
1527 // Clear mem binding for any bound objects
1528 for (const auto &obj : mem_info->obj_bindings) {
1529 BINDABLE *bindable_state = nullptr;
1530 switch (obj.type) {
1531 case kVulkanObjectTypeImage:
1532 bindable_state = GetImageState(obj.Cast<VkImage>());
1533 break;
1534 case kVulkanObjectTypeBuffer:
1535 bindable_state = GetBufferState(obj.Cast<VkBuffer>());
1536 break;
1537 case kVulkanObjectTypeAccelerationStructureNV:
1538 bindable_state = GetAccelerationStructureState(obj.Cast<VkAccelerationStructureNV>());
1539 break;
1540
1541 default:
1542 // Should only have acceleration structure, buffer, or image objects bound to memory
1543 assert(0);
1544 }
1545
1546 if (bindable_state) {
1547 bindable_state->binding.mem = MEMORY_UNBOUND;
1548 bindable_state->UpdateBoundMemorySet();
1549 }
1550 }
1551 // Any bound cmd buffers are now invalid
1552 InvalidateCommandBuffers(mem_info->cb_bindings, obj_struct);
1553 RemoveAliasingImages(mem_info->bound_images);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05001554 mem_info->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06001555 memObjMap.erase(mem);
1556}
1557
1558void ValidationStateTracker::PostCallRecordQueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo *pBindInfo,
1559 VkFence fence, VkResult result) {
1560 if (result != VK_SUCCESS) return;
1561 uint64_t early_retire_seq = 0;
1562 auto pFence = GetFenceState(fence);
1563 auto pQueue = GetQueueState(queue);
1564
1565 if (pFence) {
1566 if (pFence->scope == kSyncScopeInternal) {
1567 SubmitFence(pQueue, pFence, std::max(1u, bindInfoCount));
1568 if (!bindInfoCount) {
1569 // No work to do, just dropping a fence in the queue by itself.
1570 pQueue->submissions.emplace_back(std::vector<VkCommandBuffer>(), std::vector<SEMAPHORE_WAIT>(),
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001571 std::vector<VkSemaphore>(), std::vector<VkSemaphore>(), fence, 0);
locke-lunargd556cc32019-09-17 01:21:23 -06001572 }
1573 } else {
1574 // Retire work up until this fence early, we will not see the wait that corresponds to this signal
1575 early_retire_seq = pQueue->seq + pQueue->submissions.size();
1576 }
1577 }
1578
1579 for (uint32_t bindIdx = 0; bindIdx < bindInfoCount; ++bindIdx) {
1580 const VkBindSparseInfo &bindInfo = pBindInfo[bindIdx];
1581 // Track objects tied to memory
1582 for (uint32_t j = 0; j < bindInfo.bufferBindCount; j++) {
1583 for (uint32_t k = 0; k < bindInfo.pBufferBinds[j].bindCount; k++) {
1584 auto sparse_binding = bindInfo.pBufferBinds[j].pBinds[k];
1585 SetSparseMemBinding({sparse_binding.memory, sparse_binding.memoryOffset, sparse_binding.size},
1586 VulkanTypedHandle(bindInfo.pBufferBinds[j].buffer, kVulkanObjectTypeBuffer));
1587 }
1588 }
1589 for (uint32_t j = 0; j < bindInfo.imageOpaqueBindCount; j++) {
1590 for (uint32_t k = 0; k < bindInfo.pImageOpaqueBinds[j].bindCount; k++) {
1591 auto sparse_binding = bindInfo.pImageOpaqueBinds[j].pBinds[k];
1592 SetSparseMemBinding({sparse_binding.memory, sparse_binding.memoryOffset, sparse_binding.size},
1593 VulkanTypedHandle(bindInfo.pImageOpaqueBinds[j].image, kVulkanObjectTypeImage));
1594 }
1595 }
1596 for (uint32_t j = 0; j < bindInfo.imageBindCount; j++) {
1597 for (uint32_t k = 0; k < bindInfo.pImageBinds[j].bindCount; k++) {
1598 auto sparse_binding = bindInfo.pImageBinds[j].pBinds[k];
1599 // TODO: This size is broken for non-opaque bindings, need to update to comprehend full sparse binding data
1600 VkDeviceSize size = sparse_binding.extent.depth * sparse_binding.extent.height * sparse_binding.extent.width * 4;
1601 SetSparseMemBinding({sparse_binding.memory, sparse_binding.memoryOffset, size},
1602 VulkanTypedHandle(bindInfo.pImageBinds[j].image, kVulkanObjectTypeImage));
1603 }
1604 }
1605
1606 std::vector<SEMAPHORE_WAIT> semaphore_waits;
1607 std::vector<VkSemaphore> semaphore_signals;
1608 std::vector<VkSemaphore> semaphore_externals;
1609 for (uint32_t i = 0; i < bindInfo.waitSemaphoreCount; ++i) {
1610 VkSemaphore semaphore = bindInfo.pWaitSemaphores[i];
1611 auto pSemaphore = GetSemaphoreState(semaphore);
1612 if (pSemaphore) {
1613 if (pSemaphore->scope == kSyncScopeInternal) {
1614 if (pSemaphore->signaler.first != VK_NULL_HANDLE) {
1615 semaphore_waits.push_back({semaphore, pSemaphore->signaler.first, pSemaphore->signaler.second});
1616 pSemaphore->in_use.fetch_add(1);
1617 }
1618 pSemaphore->signaler.first = VK_NULL_HANDLE;
1619 pSemaphore->signaled = false;
1620 } else {
1621 semaphore_externals.push_back(semaphore);
1622 pSemaphore->in_use.fetch_add(1);
1623 if (pSemaphore->scope == kSyncScopeExternalTemporary) {
1624 pSemaphore->scope = kSyncScopeInternal;
1625 }
1626 }
1627 }
1628 }
1629 for (uint32_t i = 0; i < bindInfo.signalSemaphoreCount; ++i) {
1630 VkSemaphore semaphore = bindInfo.pSignalSemaphores[i];
1631 auto pSemaphore = GetSemaphoreState(semaphore);
1632 if (pSemaphore) {
1633 if (pSemaphore->scope == kSyncScopeInternal) {
1634 pSemaphore->signaler.first = queue;
1635 pSemaphore->signaler.second = pQueue->seq + pQueue->submissions.size() + 1;
1636 pSemaphore->signaled = true;
1637 pSemaphore->in_use.fetch_add(1);
1638 semaphore_signals.push_back(semaphore);
1639 } else {
1640 // Retire work up until this submit early, we will not see the wait that corresponds to this signal
1641 early_retire_seq = std::max(early_retire_seq, pQueue->seq + pQueue->submissions.size() + 1);
1642 }
1643 }
1644 }
1645
1646 pQueue->submissions.emplace_back(std::vector<VkCommandBuffer>(), semaphore_waits, semaphore_signals, semaphore_externals,
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001647 bindIdx == bindInfoCount - 1 ? fence : (VkFence)VK_NULL_HANDLE, 0);
locke-lunargd556cc32019-09-17 01:21:23 -06001648 }
1649
1650 if (early_retire_seq) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001651 RetireWorkOnQueue(pQueue, early_retire_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06001652 }
1653}
1654
1655void ValidationStateTracker::PostCallRecordCreateSemaphore(VkDevice device, const VkSemaphoreCreateInfo *pCreateInfo,
1656 const VkAllocationCallbacks *pAllocator, VkSemaphore *pSemaphore,
1657 VkResult result) {
1658 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05001659 auto semaphore_state = std::make_shared<SEMAPHORE_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06001660 semaphore_state->signaler.first = VK_NULL_HANDLE;
1661 semaphore_state->signaler.second = 0;
1662 semaphore_state->signaled = false;
1663 semaphore_state->scope = kSyncScopeInternal;
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00001664 semaphore_state->type = VK_SEMAPHORE_TYPE_BINARY_KHR;
1665 semaphore_state->payload = 0;
1666 auto semaphore_type_create_info = lvl_find_in_chain<VkSemaphoreTypeCreateInfoKHR>(pCreateInfo->pNext);
1667 if (semaphore_type_create_info) {
1668 semaphore_state->type = semaphore_type_create_info->semaphoreType;
1669 semaphore_state->payload = semaphore_type_create_info->initialValue;
1670 }
locke-lunargd556cc32019-09-17 01:21:23 -06001671 semaphoreMap[*pSemaphore] = std::move(semaphore_state);
1672}
1673
1674void ValidationStateTracker::RecordImportSemaphoreState(VkSemaphore semaphore, VkExternalSemaphoreHandleTypeFlagBitsKHR handle_type,
1675 VkSemaphoreImportFlagsKHR flags) {
1676 SEMAPHORE_STATE *sema_node = GetSemaphoreState(semaphore);
1677 if (sema_node && sema_node->scope != kSyncScopeExternalPermanent) {
1678 if ((handle_type == VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR || flags & VK_SEMAPHORE_IMPORT_TEMPORARY_BIT_KHR) &&
1679 sema_node->scope == kSyncScopeInternal) {
1680 sema_node->scope = kSyncScopeExternalTemporary;
1681 } else {
1682 sema_node->scope = kSyncScopeExternalPermanent;
1683 }
1684 }
1685}
1686
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00001687void ValidationStateTracker::PostCallRecordSignalSemaphoreKHR(VkDevice device, const VkSemaphoreSignalInfoKHR *pSignalInfo,
1688 VkResult result) {
1689 auto *pSemaphore = GetSemaphoreState(pSignalInfo->semaphore);
1690 pSemaphore->payload = pSignalInfo->value;
1691}
1692
locke-lunargd556cc32019-09-17 01:21:23 -06001693void ValidationStateTracker::RecordMappedMemory(VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size, void **ppData) {
1694 auto mem_info = GetDevMemState(mem);
1695 if (mem_info) {
1696 mem_info->mapped_range.offset = offset;
1697 mem_info->mapped_range.size = size;
1698 mem_info->p_driver_data = *ppData;
1699 }
1700}
1701
1702void ValidationStateTracker::RetireFence(VkFence fence) {
1703 auto pFence = GetFenceState(fence);
1704 if (pFence && pFence->scope == kSyncScopeInternal) {
1705 if (pFence->signaler.first != VK_NULL_HANDLE) {
1706 // Fence signaller is a queue -- use this as proof that prior operations on that queue have completed.
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001707 RetireWorkOnQueue(GetQueueState(pFence->signaler.first), pFence->signaler.second);
locke-lunargd556cc32019-09-17 01:21:23 -06001708 } else {
1709 // Fence signaller is the WSI. We're not tracking what the WSI op actually /was/ in CV yet, but we need to mark
1710 // the fence as retired.
1711 pFence->state = FENCE_RETIRED;
1712 }
1713 }
1714}
1715
1716void ValidationStateTracker::PostCallRecordWaitForFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences,
1717 VkBool32 waitAll, uint64_t timeout, VkResult result) {
1718 if (VK_SUCCESS != result) return;
1719
1720 // When we know that all fences are complete we can clean/remove their CBs
1721 if ((VK_TRUE == waitAll) || (1 == fenceCount)) {
1722 for (uint32_t i = 0; i < fenceCount; i++) {
1723 RetireFence(pFences[i]);
1724 }
1725 }
1726 // NOTE : Alternate case not handled here is when some fences have completed. In
1727 // this case for app to guarantee which fences completed it will have to call
1728 // vkGetFenceStatus() at which point we'll clean/remove their CBs if complete.
1729}
1730
1731void ValidationStateTracker::PostCallRecordGetFenceStatus(VkDevice device, VkFence fence, VkResult result) {
1732 if (VK_SUCCESS != result) return;
1733 RetireFence(fence);
1734}
1735
1736void ValidationStateTracker::RecordGetDeviceQueueState(uint32_t queue_family_index, VkQueue queue) {
1737 // Add queue to tracking set only if it is new
1738 auto queue_is_new = queues.emplace(queue);
1739 if (queue_is_new.second == true) {
1740 QUEUE_STATE *queue_state = &queueMap[queue];
1741 queue_state->queue = queue;
1742 queue_state->queueFamilyIndex = queue_family_index;
1743 queue_state->seq = 0;
1744 }
1745}
1746
1747void ValidationStateTracker::PostCallRecordGetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex,
1748 VkQueue *pQueue) {
1749 RecordGetDeviceQueueState(queueFamilyIndex, *pQueue);
1750}
1751
1752void ValidationStateTracker::PostCallRecordGetDeviceQueue2(VkDevice device, const VkDeviceQueueInfo2 *pQueueInfo, VkQueue *pQueue) {
1753 RecordGetDeviceQueueState(pQueueInfo->queueFamilyIndex, *pQueue);
1754}
1755
1756void ValidationStateTracker::PostCallRecordQueueWaitIdle(VkQueue queue, VkResult result) {
1757 if (VK_SUCCESS != result) return;
1758 QUEUE_STATE *queue_state = GetQueueState(queue);
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001759 RetireWorkOnQueue(queue_state, queue_state->seq + queue_state->submissions.size());
locke-lunargd556cc32019-09-17 01:21:23 -06001760}
1761
1762void ValidationStateTracker::PostCallRecordDeviceWaitIdle(VkDevice device, VkResult result) {
1763 if (VK_SUCCESS != result) return;
1764 for (auto &queue : queueMap) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001765 RetireWorkOnQueue(&queue.second, queue.second.seq + queue.second.submissions.size());
locke-lunargd556cc32019-09-17 01:21:23 -06001766 }
1767}
1768
1769void ValidationStateTracker::PreCallRecordDestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks *pAllocator) {
1770 if (!fence) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05001771 auto fence_state = GetFenceState(fence);
1772 fence_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06001773 fenceMap.erase(fence);
1774}
1775
1776void ValidationStateTracker::PreCallRecordDestroySemaphore(VkDevice device, VkSemaphore semaphore,
1777 const VkAllocationCallbacks *pAllocator) {
1778 if (!semaphore) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05001779 auto semaphore_state = GetSemaphoreState(semaphore);
1780 semaphore_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06001781 semaphoreMap.erase(semaphore);
1782}
1783
1784void ValidationStateTracker::PreCallRecordDestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks *pAllocator) {
1785 if (!event) return;
1786 EVENT_STATE *event_state = GetEventState(event);
1787 const VulkanTypedHandle obj_struct(event, kVulkanObjectTypeEvent);
1788 InvalidateCommandBuffers(event_state->cb_bindings, obj_struct);
1789 eventMap.erase(event);
1790}
1791
1792void ValidationStateTracker::PreCallRecordDestroyQueryPool(VkDevice device, VkQueryPool queryPool,
1793 const VkAllocationCallbacks *pAllocator) {
1794 if (!queryPool) return;
1795 QUERY_POOL_STATE *qp_state = GetQueryPoolState(queryPool);
1796 const VulkanTypedHandle obj_struct(queryPool, kVulkanObjectTypeQueryPool);
1797 InvalidateCommandBuffers(qp_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05001798 qp_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06001799 queryPoolMap.erase(queryPool);
1800}
1801
1802// Object with given handle is being bound to memory w/ given mem_info struct.
1803// Track the newly bound memory range with given memoryOffset
1804// Also scan any previous ranges, track aliased ranges with new range, and flag an error if a linear
1805// and non-linear range incorrectly overlap.
1806// Return true if an error is flagged and the user callback returns "true", otherwise false
1807// is_image indicates an image object, otherwise handle is for a buffer
1808// is_linear indicates a buffer or linear image
1809void ValidationStateTracker::InsertMemoryRange(const VulkanTypedHandle &typed_handle, DEVICE_MEMORY_STATE *mem_info,
1810 VkDeviceSize memoryOffset, VkMemoryRequirements memRequirements, bool is_linear) {
1811 if (typed_handle.type == kVulkanObjectTypeImage) {
1812 mem_info->bound_images.insert(typed_handle.Cast<VkImage>());
1813 } else if (typed_handle.type == kVulkanObjectTypeBuffer) {
1814 mem_info->bound_buffers.insert(typed_handle.handle);
1815 } else if (typed_handle.type == kVulkanObjectTypeAccelerationStructureNV) {
1816 mem_info->bound_acceleration_structures.insert(typed_handle.handle);
1817 } else {
1818 // Unsupported object type
1819 assert(false);
1820 }
1821}
1822
1823void ValidationStateTracker::InsertImageMemoryRange(VkImage image, DEVICE_MEMORY_STATE *mem_info, VkDeviceSize mem_offset,
1824 VkMemoryRequirements mem_reqs, bool is_linear) {
1825 InsertMemoryRange(VulkanTypedHandle(image, kVulkanObjectTypeImage), mem_info, mem_offset, mem_reqs, is_linear);
1826}
1827
1828void ValidationStateTracker::InsertBufferMemoryRange(VkBuffer buffer, DEVICE_MEMORY_STATE *mem_info, VkDeviceSize mem_offset,
1829 const VkMemoryRequirements &mem_reqs) {
1830 InsertMemoryRange(VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer), mem_info, mem_offset, mem_reqs, true);
1831}
1832
1833void ValidationStateTracker::InsertAccelerationStructureMemoryRange(VkAccelerationStructureNV as, DEVICE_MEMORY_STATE *mem_info,
1834 VkDeviceSize mem_offset, const VkMemoryRequirements &mem_reqs) {
1835 InsertMemoryRange(VulkanTypedHandle(as, kVulkanObjectTypeAccelerationStructureNV), mem_info, mem_offset, mem_reqs, true);
1836}
1837
1838// This function will remove the handle-to-index mapping from the appropriate map.
1839static void RemoveMemoryRange(const VulkanTypedHandle &typed_handle, DEVICE_MEMORY_STATE *mem_info) {
1840 if (typed_handle.type == kVulkanObjectTypeImage) {
1841 mem_info->bound_images.erase(typed_handle.Cast<VkImage>());
1842 } else if (typed_handle.type == kVulkanObjectTypeBuffer) {
1843 mem_info->bound_buffers.erase(typed_handle.handle);
1844 } else if (typed_handle.type == kVulkanObjectTypeAccelerationStructureNV) {
1845 mem_info->bound_acceleration_structures.erase(typed_handle.handle);
1846 } else {
1847 // Unsupported object type
1848 assert(false);
1849 }
1850}
1851
1852void ValidationStateTracker::RemoveBufferMemoryRange(VkBuffer buffer, DEVICE_MEMORY_STATE *mem_info) {
1853 RemoveMemoryRange(VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer), mem_info);
1854}
1855
1856void ValidationStateTracker::RemoveImageMemoryRange(VkImage image, DEVICE_MEMORY_STATE *mem_info) {
1857 RemoveMemoryRange(VulkanTypedHandle(image, kVulkanObjectTypeImage), mem_info);
1858}
1859
1860void ValidationStateTracker::RemoveAccelerationStructureMemoryRange(VkAccelerationStructureNV as, DEVICE_MEMORY_STATE *mem_info) {
1861 RemoveMemoryRange(VulkanTypedHandle(as, kVulkanObjectTypeAccelerationStructureNV), mem_info);
1862}
1863
1864void ValidationStateTracker::UpdateBindBufferMemoryState(VkBuffer buffer, VkDeviceMemory mem, VkDeviceSize memoryOffset) {
1865 BUFFER_STATE *buffer_state = GetBufferState(buffer);
1866 if (buffer_state) {
1867 // Track bound memory range information
1868 auto mem_info = GetDevMemState(mem);
1869 if (mem_info) {
1870 InsertBufferMemoryRange(buffer, mem_info, memoryOffset, buffer_state->requirements);
1871 }
1872 // Track objects tied to memory
1873 SetMemBinding(mem, buffer_state, memoryOffset, VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer));
1874 }
1875}
1876
1877void ValidationStateTracker::PostCallRecordBindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory mem,
1878 VkDeviceSize memoryOffset, VkResult result) {
1879 if (VK_SUCCESS != result) return;
1880 UpdateBindBufferMemoryState(buffer, mem, memoryOffset);
1881}
1882
1883void ValidationStateTracker::PostCallRecordBindBufferMemory2(VkDevice device, uint32_t bindInfoCount,
1884 const VkBindBufferMemoryInfoKHR *pBindInfos, VkResult result) {
1885 for (uint32_t i = 0; i < bindInfoCount; i++) {
1886 UpdateBindBufferMemoryState(pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset);
1887 }
1888}
1889
1890void ValidationStateTracker::PostCallRecordBindBufferMemory2KHR(VkDevice device, uint32_t bindInfoCount,
1891 const VkBindBufferMemoryInfoKHR *pBindInfos, VkResult result) {
1892 for (uint32_t i = 0; i < bindInfoCount; i++) {
1893 UpdateBindBufferMemoryState(pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset);
1894 }
1895}
1896
1897void ValidationStateTracker::RecordGetBufferMemoryRequirementsState(VkBuffer buffer, VkMemoryRequirements *pMemoryRequirements) {
1898 BUFFER_STATE *buffer_state = GetBufferState(buffer);
1899 if (buffer_state) {
1900 buffer_state->requirements = *pMemoryRequirements;
1901 buffer_state->memory_requirements_checked = true;
1902 }
1903}
1904
1905void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements(VkDevice device, VkBuffer buffer,
1906 VkMemoryRequirements *pMemoryRequirements) {
1907 RecordGetBufferMemoryRequirementsState(buffer, pMemoryRequirements);
1908}
1909
1910void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements2(VkDevice device,
1911 const VkBufferMemoryRequirementsInfo2KHR *pInfo,
1912 VkMemoryRequirements2KHR *pMemoryRequirements) {
1913 RecordGetBufferMemoryRequirementsState(pInfo->buffer, &pMemoryRequirements->memoryRequirements);
1914}
1915
1916void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements2KHR(VkDevice device,
1917 const VkBufferMemoryRequirementsInfo2KHR *pInfo,
1918 VkMemoryRequirements2KHR *pMemoryRequirements) {
1919 RecordGetBufferMemoryRequirementsState(pInfo->buffer, &pMemoryRequirements->memoryRequirements);
1920}
1921
1922void ValidationStateTracker::RecordGetImageMemoryRequiementsState(VkImage image, VkMemoryRequirements *pMemoryRequirements) {
1923 IMAGE_STATE *image_state = GetImageState(image);
1924 if (image_state) {
1925 image_state->requirements = *pMemoryRequirements;
1926 image_state->memory_requirements_checked = true;
1927 }
1928}
1929
1930void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements(VkDevice device, VkImage image,
1931 VkMemoryRequirements *pMemoryRequirements) {
1932 RecordGetImageMemoryRequiementsState(image, pMemoryRequirements);
1933}
1934
1935void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements2(VkDevice device, const VkImageMemoryRequirementsInfo2 *pInfo,
1936 VkMemoryRequirements2 *pMemoryRequirements) {
1937 RecordGetImageMemoryRequiementsState(pInfo->image, &pMemoryRequirements->memoryRequirements);
1938}
1939
1940void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements2KHR(VkDevice device,
1941 const VkImageMemoryRequirementsInfo2 *pInfo,
1942 VkMemoryRequirements2 *pMemoryRequirements) {
1943 RecordGetImageMemoryRequiementsState(pInfo->image, &pMemoryRequirements->memoryRequirements);
1944}
1945
1946static void RecordGetImageSparseMemoryRequirementsState(IMAGE_STATE *image_state,
1947 VkSparseImageMemoryRequirements *sparse_image_memory_requirements) {
1948 image_state->sparse_requirements.emplace_back(*sparse_image_memory_requirements);
1949 if (sparse_image_memory_requirements->formatProperties.aspectMask & VK_IMAGE_ASPECT_METADATA_BIT) {
1950 image_state->sparse_metadata_required = true;
1951 }
1952}
1953
1954void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements(
1955 VkDevice device, VkImage image, uint32_t *pSparseMemoryRequirementCount,
1956 VkSparseImageMemoryRequirements *pSparseMemoryRequirements) {
1957 auto image_state = GetImageState(image);
1958 image_state->get_sparse_reqs_called = true;
1959 if (!pSparseMemoryRequirements) return;
1960 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
1961 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i]);
1962 }
1963}
1964
1965void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements2(
1966 VkDevice device, const VkImageSparseMemoryRequirementsInfo2KHR *pInfo, uint32_t *pSparseMemoryRequirementCount,
1967 VkSparseImageMemoryRequirements2KHR *pSparseMemoryRequirements) {
1968 auto image_state = GetImageState(pInfo->image);
1969 image_state->get_sparse_reqs_called = true;
1970 if (!pSparseMemoryRequirements) return;
1971 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
1972 assert(!pSparseMemoryRequirements[i].pNext); // TODO: If an extension is ever added here we need to handle it
1973 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i].memoryRequirements);
1974 }
1975}
1976
1977void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements2KHR(
1978 VkDevice device, const VkImageSparseMemoryRequirementsInfo2KHR *pInfo, uint32_t *pSparseMemoryRequirementCount,
1979 VkSparseImageMemoryRequirements2KHR *pSparseMemoryRequirements) {
1980 auto image_state = GetImageState(pInfo->image);
1981 image_state->get_sparse_reqs_called = true;
1982 if (!pSparseMemoryRequirements) return;
1983 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
1984 assert(!pSparseMemoryRequirements[i].pNext); // TODO: If an extension is ever added here we need to handle it
1985 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i].memoryRequirements);
1986 }
1987}
1988
1989void ValidationStateTracker::PreCallRecordDestroyShaderModule(VkDevice device, VkShaderModule shaderModule,
1990 const VkAllocationCallbacks *pAllocator) {
1991 if (!shaderModule) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05001992 auto shader_module_state = GetShaderModuleState(shaderModule);
1993 shader_module_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06001994 shaderModuleMap.erase(shaderModule);
1995}
1996
1997void ValidationStateTracker::PreCallRecordDestroyPipeline(VkDevice device, VkPipeline pipeline,
1998 const VkAllocationCallbacks *pAllocator) {
1999 if (!pipeline) return;
2000 PIPELINE_STATE *pipeline_state = GetPipelineState(pipeline);
2001 const VulkanTypedHandle obj_struct(pipeline, kVulkanObjectTypePipeline);
2002 // Any bound cmd buffers are now invalid
2003 InvalidateCommandBuffers(pipeline_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002004 pipeline_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002005 pipelineMap.erase(pipeline);
2006}
2007
2008void ValidationStateTracker::PreCallRecordDestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout,
2009 const VkAllocationCallbacks *pAllocator) {
2010 if (!pipelineLayout) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002011 auto pipeline_layout_state = GetPipelineLayout(pipelineLayout);
2012 pipeline_layout_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002013 pipelineLayoutMap.erase(pipelineLayout);
2014}
2015
2016void ValidationStateTracker::PreCallRecordDestroySampler(VkDevice device, VkSampler sampler,
2017 const VkAllocationCallbacks *pAllocator) {
2018 if (!sampler) return;
2019 SAMPLER_STATE *sampler_state = GetSamplerState(sampler);
2020 const VulkanTypedHandle obj_struct(sampler, kVulkanObjectTypeSampler);
2021 // Any bound cmd buffers are now invalid
2022 if (sampler_state) {
2023 InvalidateCommandBuffers(sampler_state->cb_bindings, obj_struct);
2024 }
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002025 sampler_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002026 samplerMap.erase(sampler);
2027}
2028
2029void ValidationStateTracker::PreCallRecordDestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout,
2030 const VkAllocationCallbacks *pAllocator) {
2031 if (!descriptorSetLayout) return;
2032 auto layout_it = descriptorSetLayoutMap.find(descriptorSetLayout);
2033 if (layout_it != descriptorSetLayoutMap.end()) {
Jeff Bolz6ae39612019-10-11 20:57:36 -05002034 layout_it->second.get()->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002035 descriptorSetLayoutMap.erase(layout_it);
2036 }
2037}
2038
2039void ValidationStateTracker::PreCallRecordDestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
2040 const VkAllocationCallbacks *pAllocator) {
2041 if (!descriptorPool) return;
2042 DESCRIPTOR_POOL_STATE *desc_pool_state = GetDescriptorPoolState(descriptorPool);
2043 const VulkanTypedHandle obj_struct(descriptorPool, kVulkanObjectTypeDescriptorPool);
2044 if (desc_pool_state) {
2045 // Any bound cmd buffers are now invalid
2046 InvalidateCommandBuffers(desc_pool_state->cb_bindings, obj_struct);
2047 // Free sets that were in this pool
2048 for (auto ds : desc_pool_state->sets) {
2049 FreeDescriptorSet(ds);
2050 }
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002051 desc_pool_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002052 descriptorPoolMap.erase(descriptorPool);
2053 }
2054}
2055
2056// Free all command buffers in given list, removing all references/links to them using ResetCommandBufferState
2057void ValidationStateTracker::FreeCommandBufferStates(COMMAND_POOL_STATE *pool_state, const uint32_t command_buffer_count,
2058 const VkCommandBuffer *command_buffers) {
2059 for (uint32_t i = 0; i < command_buffer_count; i++) {
2060 auto cb_state = GetCBState(command_buffers[i]);
2061 // Remove references to command buffer's state and delete
2062 if (cb_state) {
2063 // reset prior to delete, removing various references to it.
2064 // TODO: fix this, it's insane.
2065 ResetCommandBufferState(cb_state->commandBuffer);
2066 // Remove the cb_state's references from COMMAND_POOL_STATEs
2067 pool_state->commandBuffers.erase(command_buffers[i]);
2068 // Remove the cb debug labels
2069 EraseCmdDebugUtilsLabel(report_data, cb_state->commandBuffer);
2070 // Remove CBState from CB map
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002071 cb_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002072 commandBufferMap.erase(cb_state->commandBuffer);
2073 }
2074 }
2075}
2076
2077void ValidationStateTracker::PreCallRecordFreeCommandBuffers(VkDevice device, VkCommandPool commandPool,
2078 uint32_t commandBufferCount, const VkCommandBuffer *pCommandBuffers) {
2079 auto pPool = GetCommandPoolState(commandPool);
2080 FreeCommandBufferStates(pPool, commandBufferCount, pCommandBuffers);
2081}
2082
2083void ValidationStateTracker::PostCallRecordCreateCommandPool(VkDevice device, const VkCommandPoolCreateInfo *pCreateInfo,
2084 const VkAllocationCallbacks *pAllocator, VkCommandPool *pCommandPool,
2085 VkResult result) {
2086 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002087 auto cmd_pool_state = std::make_shared<COMMAND_POOL_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002088 cmd_pool_state->createFlags = pCreateInfo->flags;
2089 cmd_pool_state->queueFamilyIndex = pCreateInfo->queueFamilyIndex;
2090 commandPoolMap[*pCommandPool] = std::move(cmd_pool_state);
2091}
2092
2093void ValidationStateTracker::PostCallRecordCreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo *pCreateInfo,
2094 const VkAllocationCallbacks *pAllocator, VkQueryPool *pQueryPool,
2095 VkResult result) {
2096 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002097 auto query_pool_state = std::make_shared<QUERY_POOL_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002098 query_pool_state->createInfo = *pCreateInfo;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002099 query_pool_state->pool = *pQueryPool;
2100 if (pCreateInfo->queryType == VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR) {
2101 const auto *perf = lvl_find_in_chain<VkQueryPoolPerformanceCreateInfoKHR>(pCreateInfo->pNext);
2102 const QUEUE_FAMILY_PERF_COUNTERS &counters = *physical_device_state->perf_counters[perf->queueFamilyIndex];
2103
2104 for (uint32_t i = 0; i < perf->counterIndexCount; i++) {
2105 const auto &counter = counters.counters[perf->pCounterIndices[i]];
2106 switch (counter.scope) {
2107 case VK_QUERY_SCOPE_COMMAND_BUFFER_KHR:
2108 query_pool_state->has_perf_scope_command_buffer = true;
2109 break;
2110 case VK_QUERY_SCOPE_RENDER_PASS_KHR:
2111 query_pool_state->has_perf_scope_render_pass = true;
2112 break;
2113 default:
2114 break;
2115 }
2116 }
2117
2118 DispatchGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(physical_device_state->phys_device, perf,
2119 &query_pool_state->n_performance_passes);
2120 }
2121
locke-lunargd556cc32019-09-17 01:21:23 -06002122 queryPoolMap[*pQueryPool] = std::move(query_pool_state);
2123
2124 QueryObject query_obj{*pQueryPool, 0u};
2125 for (uint32_t i = 0; i < pCreateInfo->queryCount; ++i) {
2126 query_obj.query = i;
2127 queryToStateMap[query_obj] = QUERYSTATE_UNKNOWN;
2128 }
2129}
2130
2131void ValidationStateTracker::PreCallRecordDestroyCommandPool(VkDevice device, VkCommandPool commandPool,
2132 const VkAllocationCallbacks *pAllocator) {
2133 if (!commandPool) return;
2134 COMMAND_POOL_STATE *cp_state = GetCommandPoolState(commandPool);
2135 // Remove cmdpool from cmdpoolmap, after freeing layer data for the command buffers
2136 // "When a pool is destroyed, all command buffers allocated from the pool are freed."
2137 if (cp_state) {
2138 // Create a vector, as FreeCommandBufferStates deletes from cp_state->commandBuffers during iteration.
2139 std::vector<VkCommandBuffer> cb_vec{cp_state->commandBuffers.begin(), cp_state->commandBuffers.end()};
2140 FreeCommandBufferStates(cp_state, static_cast<uint32_t>(cb_vec.size()), cb_vec.data());
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002141 cp_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002142 commandPoolMap.erase(commandPool);
2143 }
2144}
2145
2146void ValidationStateTracker::PostCallRecordResetCommandPool(VkDevice device, VkCommandPool commandPool,
2147 VkCommandPoolResetFlags flags, VkResult result) {
2148 if (VK_SUCCESS != result) return;
2149 // Reset all of the CBs allocated from this pool
2150 auto command_pool_state = GetCommandPoolState(commandPool);
2151 for (auto cmdBuffer : command_pool_state->commandBuffers) {
2152 ResetCommandBufferState(cmdBuffer);
2153 }
2154}
2155
2156void ValidationStateTracker::PostCallRecordResetFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences,
2157 VkResult result) {
2158 for (uint32_t i = 0; i < fenceCount; ++i) {
2159 auto pFence = GetFenceState(pFences[i]);
2160 if (pFence) {
2161 if (pFence->scope == kSyncScopeInternal) {
2162 pFence->state = FENCE_UNSIGNALED;
2163 } else if (pFence->scope == kSyncScopeExternalTemporary) {
2164 pFence->scope = kSyncScopeInternal;
2165 }
2166 }
2167 }
2168}
2169
Jeff Bolzadbfa852019-10-04 13:53:30 -05002170// For given cb_nodes, invalidate them and track object causing invalidation.
2171// InvalidateCommandBuffers and InvalidateLinkedCommandBuffers are essentially
2172// the same, except one takes a map and one takes a set, and InvalidateCommandBuffers
2173// can also unlink objects from command buffers.
2174void ValidationStateTracker::InvalidateCommandBuffers(small_unordered_map<CMD_BUFFER_STATE *, int, 8> &cb_nodes,
2175 const VulkanTypedHandle &obj, bool unlink) {
2176 for (const auto &cb_node_pair : cb_nodes) {
2177 auto &cb_node = cb_node_pair.first;
2178 if (cb_node->state == CB_RECORDING) {
2179 cb_node->state = CB_INVALID_INCOMPLETE;
2180 } else if (cb_node->state == CB_RECORDED) {
2181 cb_node->state = CB_INVALID_COMPLETE;
2182 }
2183 cb_node->broken_bindings.push_back(obj);
2184
2185 // if secondary, then propagate the invalidation to the primaries that will call us.
2186 if (cb_node->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
2187 InvalidateLinkedCommandBuffers(cb_node->linkedCommandBuffers, obj);
2188 }
2189 if (unlink) {
2190 int index = cb_node_pair.second;
2191 assert(cb_node->object_bindings[index] == obj);
2192 cb_node->object_bindings[index] = VulkanTypedHandle();
2193 }
2194 }
2195 if (unlink) {
2196 cb_nodes.clear();
2197 }
2198}
2199
2200void ValidationStateTracker::InvalidateLinkedCommandBuffers(std::unordered_set<CMD_BUFFER_STATE *> &cb_nodes,
2201 const VulkanTypedHandle &obj) {
locke-lunargd556cc32019-09-17 01:21:23 -06002202 for (auto cb_node : cb_nodes) {
2203 if (cb_node->state == CB_RECORDING) {
2204 cb_node->state = CB_INVALID_INCOMPLETE;
2205 } else if (cb_node->state == CB_RECORDED) {
2206 cb_node->state = CB_INVALID_COMPLETE;
2207 }
2208 cb_node->broken_bindings.push_back(obj);
2209
2210 // if secondary, then propagate the invalidation to the primaries that will call us.
2211 if (cb_node->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05002212 InvalidateLinkedCommandBuffers(cb_node->linkedCommandBuffers, obj);
locke-lunargd556cc32019-09-17 01:21:23 -06002213 }
2214 }
2215}
2216
2217void ValidationStateTracker::PreCallRecordDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer,
2218 const VkAllocationCallbacks *pAllocator) {
2219 if (!framebuffer) return;
2220 FRAMEBUFFER_STATE *framebuffer_state = GetFramebufferState(framebuffer);
2221 const VulkanTypedHandle obj_struct(framebuffer, kVulkanObjectTypeFramebuffer);
2222 InvalidateCommandBuffers(framebuffer_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002223 framebuffer_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002224 frameBufferMap.erase(framebuffer);
2225}
2226
2227void ValidationStateTracker::PreCallRecordDestroyRenderPass(VkDevice device, VkRenderPass renderPass,
2228 const VkAllocationCallbacks *pAllocator) {
2229 if (!renderPass) return;
2230 RENDER_PASS_STATE *rp_state = GetRenderPassState(renderPass);
2231 const VulkanTypedHandle obj_struct(renderPass, kVulkanObjectTypeRenderPass);
2232 InvalidateCommandBuffers(rp_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002233 rp_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002234 renderPassMap.erase(renderPass);
2235}
2236
2237void ValidationStateTracker::PostCallRecordCreateFence(VkDevice device, const VkFenceCreateInfo *pCreateInfo,
2238 const VkAllocationCallbacks *pAllocator, VkFence *pFence, VkResult result) {
2239 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002240 auto fence_state = std::make_shared<FENCE_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002241 fence_state->fence = *pFence;
2242 fence_state->createInfo = *pCreateInfo;
2243 fence_state->state = (pCreateInfo->flags & VK_FENCE_CREATE_SIGNALED_BIT) ? FENCE_RETIRED : FENCE_UNSIGNALED;
2244 fenceMap[*pFence] = std::move(fence_state);
2245}
2246
2247bool ValidationStateTracker::PreCallValidateCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2248 const VkGraphicsPipelineCreateInfo *pCreateInfos,
2249 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002250 void *cgpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002251 // Set up the state that CoreChecks, gpu_validation and later StateTracker Record will use.
2252 create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
2253 cgpl_state->pCreateInfos = pCreateInfos; // GPU validation can alter this, so we have to set a default value for the Chassis
2254 cgpl_state->pipe_state.reserve(count);
2255 for (uint32_t i = 0; i < count; i++) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002256 cgpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
Jeff Bolz6ae39612019-10-11 20:57:36 -05002257 (cgpl_state->pipe_state)[i]->initGraphicsPipeline(this, &pCreateInfos[i], GetRenderPassShared(pCreateInfos[i].renderPass));
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002258 (cgpl_state->pipe_state)[i]->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06002259 }
2260 return false;
2261}
2262
2263void ValidationStateTracker::PostCallRecordCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2264 const VkGraphicsPipelineCreateInfo *pCreateInfos,
2265 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
2266 VkResult result, void *cgpl_state_data) {
2267 create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
2268 // This API may create pipelines regardless of the return value
2269 for (uint32_t i = 0; i < count; i++) {
2270 if (pPipelines[i] != VK_NULL_HANDLE) {
2271 (cgpl_state->pipe_state)[i]->pipeline = pPipelines[i];
2272 pipelineMap[pPipelines[i]] = std::move((cgpl_state->pipe_state)[i]);
2273 }
2274 }
2275 cgpl_state->pipe_state.clear();
2276}
2277
2278bool ValidationStateTracker::PreCallValidateCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2279 const VkComputePipelineCreateInfo *pCreateInfos,
2280 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002281 void *ccpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002282 auto *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
2283 ccpl_state->pCreateInfos = pCreateInfos; // GPU validation can alter this, so we have to set a default value for the Chassis
2284 ccpl_state->pipe_state.reserve(count);
2285 for (uint32_t i = 0; i < count; i++) {
2286 // Create and initialize internal tracking data structure
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002287 ccpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
locke-lunargd556cc32019-09-17 01:21:23 -06002288 ccpl_state->pipe_state.back()->initComputePipeline(this, &pCreateInfos[i]);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002289 ccpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06002290 }
2291 return false;
2292}
2293
2294void ValidationStateTracker::PostCallRecordCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2295 const VkComputePipelineCreateInfo *pCreateInfos,
2296 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
2297 VkResult result, void *ccpl_state_data) {
2298 create_compute_pipeline_api_state *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
2299
2300 // This API may create pipelines regardless of the return value
2301 for (uint32_t i = 0; i < count; i++) {
2302 if (pPipelines[i] != VK_NULL_HANDLE) {
2303 (ccpl_state->pipe_state)[i]->pipeline = pPipelines[i];
2304 pipelineMap[pPipelines[i]] = std::move((ccpl_state->pipe_state)[i]);
2305 }
2306 }
2307 ccpl_state->pipe_state.clear();
2308}
2309
2310bool ValidationStateTracker::PreCallValidateCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache,
2311 uint32_t count,
2312 const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
2313 const VkAllocationCallbacks *pAllocator,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002314 VkPipeline *pPipelines, void *crtpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002315 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
2316 crtpl_state->pipe_state.reserve(count);
2317 for (uint32_t i = 0; i < count; i++) {
2318 // Create and initialize internal tracking data structure
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002319 crtpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
locke-lunargd556cc32019-09-17 01:21:23 -06002320 crtpl_state->pipe_state.back()->initRayTracingPipelineNV(this, &pCreateInfos[i]);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002321 crtpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06002322 }
2323 return false;
2324}
2325
2326void ValidationStateTracker::PostCallRecordCreateRayTracingPipelinesNV(
2327 VkDevice device, VkPipelineCache pipelineCache, uint32_t count, const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
2328 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines, VkResult result, void *crtpl_state_data) {
2329 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
2330 // This API may create pipelines regardless of the return value
2331 for (uint32_t i = 0; i < count; i++) {
2332 if (pPipelines[i] != VK_NULL_HANDLE) {
2333 (crtpl_state->pipe_state)[i]->pipeline = pPipelines[i];
2334 pipelineMap[pPipelines[i]] = std::move((crtpl_state->pipe_state)[i]);
2335 }
2336 }
2337 crtpl_state->pipe_state.clear();
2338}
2339
2340void ValidationStateTracker::PostCallRecordCreateSampler(VkDevice device, const VkSamplerCreateInfo *pCreateInfo,
2341 const VkAllocationCallbacks *pAllocator, VkSampler *pSampler,
2342 VkResult result) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002343 samplerMap[*pSampler] = std::make_shared<SAMPLER_STATE>(pSampler, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002344}
2345
2346void ValidationStateTracker::PostCallRecordCreateDescriptorSetLayout(VkDevice device,
2347 const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
2348 const VkAllocationCallbacks *pAllocator,
2349 VkDescriptorSetLayout *pSetLayout, VkResult result) {
2350 if (VK_SUCCESS != result) return;
2351 descriptorSetLayoutMap[*pSetLayout] = std::make_shared<cvdescriptorset::DescriptorSetLayout>(pCreateInfo, *pSetLayout);
2352}
2353
2354// For repeatable sorting, not very useful for "memory in range" search
2355struct PushConstantRangeCompare {
2356 bool operator()(const VkPushConstantRange *lhs, const VkPushConstantRange *rhs) const {
2357 if (lhs->offset == rhs->offset) {
2358 if (lhs->size == rhs->size) {
2359 // The comparison is arbitrary, but avoids false aliasing by comparing all fields.
2360 return lhs->stageFlags < rhs->stageFlags;
2361 }
2362 // If the offsets are the same then sorting by the end of range is useful for validation
2363 return lhs->size < rhs->size;
2364 }
2365 return lhs->offset < rhs->offset;
2366 }
2367};
2368
2369static PushConstantRangesDict push_constant_ranges_dict;
2370
2371PushConstantRangesId GetCanonicalId(const VkPipelineLayoutCreateInfo *info) {
2372 if (!info->pPushConstantRanges) {
2373 // Hand back the empty entry (creating as needed)...
2374 return push_constant_ranges_dict.look_up(PushConstantRanges());
2375 }
2376
2377 // Sort the input ranges to ensure equivalent ranges map to the same id
2378 std::set<const VkPushConstantRange *, PushConstantRangeCompare> sorted;
2379 for (uint32_t i = 0; i < info->pushConstantRangeCount; i++) {
2380 sorted.insert(info->pPushConstantRanges + i);
2381 }
2382
Jeremy Hayesf34b9fb2019-12-06 09:37:03 -07002383 PushConstantRanges ranges;
2384 ranges.reserve(sorted.size());
locke-lunargd556cc32019-09-17 01:21:23 -06002385 for (const auto range : sorted) {
2386 ranges.emplace_back(*range);
2387 }
2388 return push_constant_ranges_dict.look_up(std::move(ranges));
2389}
2390
2391// Dictionary of canoncial form of the pipeline set layout of descriptor set layouts
2392static PipelineLayoutSetLayoutsDict pipeline_layout_set_layouts_dict;
2393
2394// Dictionary of canonical form of the "compatible for set" records
2395static PipelineLayoutCompatDict pipeline_layout_compat_dict;
2396
2397static PipelineLayoutCompatId GetCanonicalId(const uint32_t set_index, const PushConstantRangesId pcr_id,
2398 const PipelineLayoutSetLayoutsId set_layouts_id) {
2399 return pipeline_layout_compat_dict.look_up(PipelineLayoutCompatDef(set_index, pcr_id, set_layouts_id));
2400}
2401
2402void ValidationStateTracker::PostCallRecordCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo *pCreateInfo,
2403 const VkAllocationCallbacks *pAllocator,
2404 VkPipelineLayout *pPipelineLayout, VkResult result) {
2405 if (VK_SUCCESS != result) return;
2406
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002407 auto pipeline_layout_state = std::make_shared<PIPELINE_LAYOUT_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002408 pipeline_layout_state->layout = *pPipelineLayout;
2409 pipeline_layout_state->set_layouts.resize(pCreateInfo->setLayoutCount);
2410 PipelineLayoutSetLayoutsDef set_layouts(pCreateInfo->setLayoutCount);
2411 for (uint32_t i = 0; i < pCreateInfo->setLayoutCount; ++i) {
Jeff Bolz6ae39612019-10-11 20:57:36 -05002412 pipeline_layout_state->set_layouts[i] = GetDescriptorSetLayoutShared(pCreateInfo->pSetLayouts[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06002413 set_layouts[i] = pipeline_layout_state->set_layouts[i]->GetLayoutId();
2414 }
2415
2416 // Get canonical form IDs for the "compatible for set" contents
2417 pipeline_layout_state->push_constant_ranges = GetCanonicalId(pCreateInfo);
2418 auto set_layouts_id = pipeline_layout_set_layouts_dict.look_up(set_layouts);
2419 pipeline_layout_state->compat_for_set.reserve(pCreateInfo->setLayoutCount);
2420
2421 // Create table of "compatible for set N" cannonical forms for trivial accept validation
2422 for (uint32_t i = 0; i < pCreateInfo->setLayoutCount; ++i) {
2423 pipeline_layout_state->compat_for_set.emplace_back(
2424 GetCanonicalId(i, pipeline_layout_state->push_constant_ranges, set_layouts_id));
2425 }
2426 pipelineLayoutMap[*pPipelineLayout] = std::move(pipeline_layout_state);
2427}
2428
2429void ValidationStateTracker::PostCallRecordCreateDescriptorPool(VkDevice device, const VkDescriptorPoolCreateInfo *pCreateInfo,
2430 const VkAllocationCallbacks *pAllocator,
2431 VkDescriptorPool *pDescriptorPool, VkResult result) {
2432 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002433 descriptorPoolMap[*pDescriptorPool] = std::make_shared<DESCRIPTOR_POOL_STATE>(*pDescriptorPool, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002434}
2435
2436void ValidationStateTracker::PostCallRecordResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
2437 VkDescriptorPoolResetFlags flags, VkResult result) {
2438 if (VK_SUCCESS != result) return;
2439 DESCRIPTOR_POOL_STATE *pPool = GetDescriptorPoolState(descriptorPool);
2440 // TODO: validate flags
2441 // For every set off of this pool, clear it, remove from setMap, and free cvdescriptorset::DescriptorSet
2442 for (auto ds : pPool->sets) {
2443 FreeDescriptorSet(ds);
2444 }
2445 pPool->sets.clear();
2446 // Reset available count for each type and available sets for this pool
2447 for (auto it = pPool->availableDescriptorTypeCount.begin(); it != pPool->availableDescriptorTypeCount.end(); ++it) {
2448 pPool->availableDescriptorTypeCount[it->first] = pPool->maxDescriptorTypeCount[it->first];
2449 }
2450 pPool->availableSets = pPool->maxSets;
2451}
2452
2453bool ValidationStateTracker::PreCallValidateAllocateDescriptorSets(VkDevice device,
2454 const VkDescriptorSetAllocateInfo *pAllocateInfo,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002455 VkDescriptorSet *pDescriptorSets, void *ads_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002456 // Always update common data
2457 cvdescriptorset::AllocateDescriptorSetsData *ads_state =
2458 reinterpret_cast<cvdescriptorset::AllocateDescriptorSetsData *>(ads_state_data);
2459 UpdateAllocateDescriptorSetsData(pAllocateInfo, ads_state);
2460
2461 return false;
2462}
2463
2464// Allocation state was good and call down chain was made so update state based on allocating descriptor sets
2465void ValidationStateTracker::PostCallRecordAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo *pAllocateInfo,
2466 VkDescriptorSet *pDescriptorSets, VkResult result,
2467 void *ads_state_data) {
2468 if (VK_SUCCESS != result) return;
2469 // All the updates are contained in a single cvdescriptorset function
2470 cvdescriptorset::AllocateDescriptorSetsData *ads_state =
2471 reinterpret_cast<cvdescriptorset::AllocateDescriptorSetsData *>(ads_state_data);
2472 PerformAllocateDescriptorSets(pAllocateInfo, pDescriptorSets, ads_state);
2473}
2474
2475void ValidationStateTracker::PreCallRecordFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t count,
2476 const VkDescriptorSet *pDescriptorSets) {
2477 DESCRIPTOR_POOL_STATE *pool_state = GetDescriptorPoolState(descriptorPool);
2478 // Update available descriptor sets in pool
2479 pool_state->availableSets += count;
2480
2481 // For each freed descriptor add its resources back into the pool as available and remove from pool and setMap
2482 for (uint32_t i = 0; i < count; ++i) {
2483 if (pDescriptorSets[i] != VK_NULL_HANDLE) {
2484 auto descriptor_set = setMap[pDescriptorSets[i]].get();
2485 uint32_t type_index = 0, descriptor_count = 0;
2486 for (uint32_t j = 0; j < descriptor_set->GetBindingCount(); ++j) {
2487 type_index = static_cast<uint32_t>(descriptor_set->GetTypeFromIndex(j));
2488 descriptor_count = descriptor_set->GetDescriptorCountFromIndex(j);
2489 pool_state->availableDescriptorTypeCount[type_index] += descriptor_count;
2490 }
2491 FreeDescriptorSet(descriptor_set);
2492 pool_state->sets.erase(descriptor_set);
2493 }
2494 }
2495}
2496
2497void ValidationStateTracker::PreCallRecordUpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount,
2498 const VkWriteDescriptorSet *pDescriptorWrites,
2499 uint32_t descriptorCopyCount,
2500 const VkCopyDescriptorSet *pDescriptorCopies) {
2501 cvdescriptorset::PerformUpdateDescriptorSets(this, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount,
2502 pDescriptorCopies);
2503}
2504
2505void ValidationStateTracker::PostCallRecordAllocateCommandBuffers(VkDevice device, const VkCommandBufferAllocateInfo *pCreateInfo,
2506 VkCommandBuffer *pCommandBuffer, VkResult result) {
2507 if (VK_SUCCESS != result) return;
Jeff Bolz6835fda2019-10-06 00:15:34 -05002508 auto pPool = GetCommandPoolShared(pCreateInfo->commandPool);
locke-lunargd556cc32019-09-17 01:21:23 -06002509 if (pPool) {
2510 for (uint32_t i = 0; i < pCreateInfo->commandBufferCount; i++) {
2511 // Add command buffer to its commandPool map
2512 pPool->commandBuffers.insert(pCommandBuffer[i]);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002513 auto pCB = std::make_shared<CMD_BUFFER_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002514 pCB->createInfo = *pCreateInfo;
2515 pCB->device = device;
Jeff Bolz6835fda2019-10-06 00:15:34 -05002516 pCB->command_pool = pPool;
locke-lunargd556cc32019-09-17 01:21:23 -06002517 // Add command buffer to map
2518 commandBufferMap[pCommandBuffer[i]] = std::move(pCB);
2519 ResetCommandBufferState(pCommandBuffer[i]);
2520 }
2521 }
2522}
2523
2524// Add bindings between the given cmd buffer & framebuffer and the framebuffer's children
2525void ValidationStateTracker::AddFramebufferBinding(CMD_BUFFER_STATE *cb_state, FRAMEBUFFER_STATE *fb_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05002526 AddCommandBufferBinding(fb_state->cb_bindings, VulkanTypedHandle(fb_state->framebuffer, kVulkanObjectTypeFramebuffer, fb_state),
locke-lunargd556cc32019-09-17 01:21:23 -06002527 cb_state);
Mark Lobodzinski544b3dd2019-12-03 14:44:54 -07002528 // If imageless fb, skip fb binding
2529 if (fb_state->createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) return;
locke-lunargd556cc32019-09-17 01:21:23 -06002530 const uint32_t attachmentCount = fb_state->createInfo.attachmentCount;
2531 for (uint32_t attachment = 0; attachment < attachmentCount; ++attachment) {
2532 auto view_state = GetAttachmentImageViewState(fb_state, attachment);
2533 if (view_state) {
2534 AddCommandBufferBindingImageView(cb_state, view_state);
2535 }
2536 }
2537}
2538
2539void ValidationStateTracker::PreCallRecordBeginCommandBuffer(VkCommandBuffer commandBuffer,
2540 const VkCommandBufferBeginInfo *pBeginInfo) {
2541 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2542 if (!cb_state) return;
locke-lunargd556cc32019-09-17 01:21:23 -06002543 if (cb_state->createInfo.level != VK_COMMAND_BUFFER_LEVEL_PRIMARY) {
2544 // Secondary Command Buffer
2545 const VkCommandBufferInheritanceInfo *pInfo = pBeginInfo->pInheritanceInfo;
2546 if (pInfo) {
2547 if (pBeginInfo->flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT) {
2548 assert(pInfo->renderPass);
2549 auto framebuffer = GetFramebufferState(pInfo->framebuffer);
2550 if (framebuffer) {
2551 // Connect this framebuffer and its children to this cmdBuffer
2552 AddFramebufferBinding(cb_state, framebuffer);
2553 }
2554 }
2555 }
2556 }
2557 if (CB_RECORDED == cb_state->state || CB_INVALID_COMPLETE == cb_state->state) {
2558 ResetCommandBufferState(commandBuffer);
2559 }
2560 // Set updated state here in case implicit reset occurs above
2561 cb_state->state = CB_RECORDING;
2562 cb_state->beginInfo = *pBeginInfo;
2563 if (cb_state->beginInfo.pInheritanceInfo) {
2564 cb_state->inheritanceInfo = *(cb_state->beginInfo.pInheritanceInfo);
2565 cb_state->beginInfo.pInheritanceInfo = &cb_state->inheritanceInfo;
2566 // If we are a secondary command-buffer and inheriting. Update the items we should inherit.
2567 if ((cb_state->createInfo.level != VK_COMMAND_BUFFER_LEVEL_PRIMARY) &&
2568 (cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT)) {
2569 cb_state->activeRenderPass = GetRenderPassState(cb_state->beginInfo.pInheritanceInfo->renderPass);
2570 cb_state->activeSubpass = cb_state->beginInfo.pInheritanceInfo->subpass;
2571 cb_state->activeFramebuffer = cb_state->beginInfo.pInheritanceInfo->framebuffer;
2572 cb_state->framebuffers.insert(cb_state->beginInfo.pInheritanceInfo->framebuffer);
2573 }
2574 }
2575
2576 auto chained_device_group_struct = lvl_find_in_chain<VkDeviceGroupCommandBufferBeginInfo>(pBeginInfo->pNext);
2577 if (chained_device_group_struct) {
2578 cb_state->initial_device_mask = chained_device_group_struct->deviceMask;
2579 } else {
2580 cb_state->initial_device_mask = (1 << physical_device_count) - 1;
2581 }
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002582
2583 cb_state->performance_lock_acquired = performance_lock_acquired;
locke-lunargd556cc32019-09-17 01:21:23 -06002584}
2585
2586void ValidationStateTracker::PostCallRecordEndCommandBuffer(VkCommandBuffer commandBuffer, VkResult result) {
2587 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2588 if (!cb_state) return;
2589 // Cached validation is specific to a specific recording of a specific command buffer.
2590 for (auto descriptor_set : cb_state->validated_descriptor_sets) {
2591 descriptor_set->ClearCachedValidation(cb_state);
2592 }
2593 cb_state->validated_descriptor_sets.clear();
2594 if (VK_SUCCESS == result) {
2595 cb_state->state = CB_RECORDED;
2596 }
2597}
2598
2599void ValidationStateTracker::PostCallRecordResetCommandBuffer(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags,
2600 VkResult result) {
2601 if (VK_SUCCESS == result) {
2602 ResetCommandBufferState(commandBuffer);
2603 }
2604}
2605
2606CBStatusFlags MakeStaticStateMask(VkPipelineDynamicStateCreateInfo const *ds) {
2607 // initially assume everything is static state
2608 CBStatusFlags flags = CBSTATUS_ALL_STATE_SET;
2609
2610 if (ds) {
2611 for (uint32_t i = 0; i < ds->dynamicStateCount; i++) {
2612 switch (ds->pDynamicStates[i]) {
2613 case VK_DYNAMIC_STATE_LINE_WIDTH:
2614 flags &= ~CBSTATUS_LINE_WIDTH_SET;
2615 break;
2616 case VK_DYNAMIC_STATE_DEPTH_BIAS:
2617 flags &= ~CBSTATUS_DEPTH_BIAS_SET;
2618 break;
2619 case VK_DYNAMIC_STATE_BLEND_CONSTANTS:
2620 flags &= ~CBSTATUS_BLEND_CONSTANTS_SET;
2621 break;
2622 case VK_DYNAMIC_STATE_DEPTH_BOUNDS:
2623 flags &= ~CBSTATUS_DEPTH_BOUNDS_SET;
2624 break;
2625 case VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK:
2626 flags &= ~CBSTATUS_STENCIL_READ_MASK_SET;
2627 break;
2628 case VK_DYNAMIC_STATE_STENCIL_WRITE_MASK:
2629 flags &= ~CBSTATUS_STENCIL_WRITE_MASK_SET;
2630 break;
2631 case VK_DYNAMIC_STATE_STENCIL_REFERENCE:
2632 flags &= ~CBSTATUS_STENCIL_REFERENCE_SET;
2633 break;
2634 case VK_DYNAMIC_STATE_SCISSOR:
2635 flags &= ~CBSTATUS_SCISSOR_SET;
2636 break;
2637 case VK_DYNAMIC_STATE_VIEWPORT:
2638 flags &= ~CBSTATUS_VIEWPORT_SET;
2639 break;
2640 case VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV:
2641 flags &= ~CBSTATUS_EXCLUSIVE_SCISSOR_SET;
2642 break;
2643 case VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV:
2644 flags &= ~CBSTATUS_SHADING_RATE_PALETTE_SET;
2645 break;
2646 case VK_DYNAMIC_STATE_LINE_STIPPLE_EXT:
2647 flags &= ~CBSTATUS_LINE_STIPPLE_SET;
2648 break;
Chris Mayer9ded5eb2019-09-19 16:33:26 +02002649 case VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV:
2650 flags &= ~CBSTATUS_VIEWPORT_W_SCALING_SET;
2651 break;
locke-lunargd556cc32019-09-17 01:21:23 -06002652 default:
2653 break;
2654 }
2655 }
2656 }
2657
2658 return flags;
2659}
2660
2661// Validation cache:
2662// CV is the bottommost implementor of this extension. Don't pass calls down.
2663// utility function to set collective state for pipeline
2664void SetPipelineState(PIPELINE_STATE *pPipe) {
2665 // If any attachment used by this pipeline has blendEnable, set top-level blendEnable
2666 if (pPipe->graphicsPipelineCI.pColorBlendState) {
2667 for (size_t i = 0; i < pPipe->attachments.size(); ++i) {
2668 if (VK_TRUE == pPipe->attachments[i].blendEnable) {
2669 if (((pPipe->attachments[i].dstAlphaBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
2670 (pPipe->attachments[i].dstAlphaBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
2671 ((pPipe->attachments[i].dstColorBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
2672 (pPipe->attachments[i].dstColorBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
2673 ((pPipe->attachments[i].srcAlphaBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
2674 (pPipe->attachments[i].srcAlphaBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
2675 ((pPipe->attachments[i].srcColorBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
2676 (pPipe->attachments[i].srcColorBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA))) {
2677 pPipe->blendConstantsEnabled = true;
2678 }
2679 }
2680 }
2681 }
2682}
2683
2684void ValidationStateTracker::PreCallRecordCmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
2685 VkPipeline pipeline) {
2686 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2687 assert(cb_state);
2688
2689 auto pipe_state = GetPipelineState(pipeline);
2690 if (VK_PIPELINE_BIND_POINT_GRAPHICS == pipelineBindPoint) {
2691 cb_state->status &= ~cb_state->static_status;
2692 cb_state->static_status = MakeStaticStateMask(pipe_state->graphicsPipelineCI.ptr()->pDynamicState);
2693 cb_state->status |= cb_state->static_status;
2694 }
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002695 ResetCommandBufferPushConstantDataIfIncompatible(cb_state, pipe_state->pipeline_layout->layout);
locke-lunargd556cc32019-09-17 01:21:23 -06002696 cb_state->lastBound[pipelineBindPoint].pipeline_state = pipe_state;
2697 SetPipelineState(pipe_state);
Jeff Bolzadbfa852019-10-04 13:53:30 -05002698 AddCommandBufferBinding(pipe_state->cb_bindings, VulkanTypedHandle(pipeline, kVulkanObjectTypePipeline), cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06002699}
2700
2701void ValidationStateTracker::PreCallRecordCmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport,
2702 uint32_t viewportCount, const VkViewport *pViewports) {
2703 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2704 cb_state->viewportMask |= ((1u << viewportCount) - 1u) << firstViewport;
2705 cb_state->status |= CBSTATUS_VIEWPORT_SET;
2706}
2707
2708void ValidationStateTracker::PreCallRecordCmdSetExclusiveScissorNV(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor,
2709 uint32_t exclusiveScissorCount,
2710 const VkRect2D *pExclusiveScissors) {
2711 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2712 // TODO: We don't have VUIDs for validating that all exclusive scissors have been set.
2713 // cb_state->exclusiveScissorMask |= ((1u << exclusiveScissorCount) - 1u) << firstExclusiveScissor;
2714 cb_state->status |= CBSTATUS_EXCLUSIVE_SCISSOR_SET;
2715}
2716
2717void ValidationStateTracker::PreCallRecordCmdBindShadingRateImageNV(VkCommandBuffer commandBuffer, VkImageView imageView,
2718 VkImageLayout imageLayout) {
2719 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2720
2721 if (imageView != VK_NULL_HANDLE) {
2722 auto view_state = GetImageViewState(imageView);
2723 AddCommandBufferBindingImageView(cb_state, view_state);
2724 }
2725}
2726
2727void ValidationStateTracker::PreCallRecordCmdSetViewportShadingRatePaletteNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
2728 uint32_t viewportCount,
2729 const VkShadingRatePaletteNV *pShadingRatePalettes) {
2730 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2731 // TODO: We don't have VUIDs for validating that all shading rate palettes have been set.
2732 // cb_state->shadingRatePaletteMask |= ((1u << viewportCount) - 1u) << firstViewport;
2733 cb_state->status |= CBSTATUS_SHADING_RATE_PALETTE_SET;
2734}
2735
2736void ValidationStateTracker::PostCallRecordCreateAccelerationStructureNV(VkDevice device,
2737 const VkAccelerationStructureCreateInfoNV *pCreateInfo,
2738 const VkAllocationCallbacks *pAllocator,
2739 VkAccelerationStructureNV *pAccelerationStructure,
2740 VkResult result) {
2741 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002742 auto as_state = std::make_shared<ACCELERATION_STRUCTURE_STATE>(*pAccelerationStructure, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002743
2744 // Query the requirements in case the application doesn't (to avoid bind/validation time query)
2745 VkAccelerationStructureMemoryRequirementsInfoNV as_memory_requirements_info = {};
2746 as_memory_requirements_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
2747 as_memory_requirements_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV;
2748 as_memory_requirements_info.accelerationStructure = as_state->acceleration_structure;
2749 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &as_memory_requirements_info, &as_state->memory_requirements);
2750
2751 VkAccelerationStructureMemoryRequirementsInfoNV scratch_memory_req_info = {};
2752 scratch_memory_req_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
2753 scratch_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV;
2754 scratch_memory_req_info.accelerationStructure = as_state->acceleration_structure;
2755 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &scratch_memory_req_info,
2756 &as_state->build_scratch_memory_requirements);
2757
2758 VkAccelerationStructureMemoryRequirementsInfoNV update_memory_req_info = {};
2759 update_memory_req_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
2760 update_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV;
2761 update_memory_req_info.accelerationStructure = as_state->acceleration_structure;
2762 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &update_memory_req_info,
2763 &as_state->update_scratch_memory_requirements);
2764
2765 accelerationStructureMap[*pAccelerationStructure] = std::move(as_state);
2766}
2767
2768void ValidationStateTracker::PostCallRecordGetAccelerationStructureMemoryRequirementsNV(
2769 VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV *pInfo, VkMemoryRequirements2KHR *pMemoryRequirements) {
2770 ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureState(pInfo->accelerationStructure);
2771 if (as_state != nullptr) {
2772 if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV) {
2773 as_state->memory_requirements = *pMemoryRequirements;
2774 as_state->memory_requirements_checked = true;
2775 } else if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV) {
2776 as_state->build_scratch_memory_requirements = *pMemoryRequirements;
2777 as_state->build_scratch_memory_requirements_checked = true;
2778 } else if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV) {
2779 as_state->update_scratch_memory_requirements = *pMemoryRequirements;
2780 as_state->update_scratch_memory_requirements_checked = true;
2781 }
2782 }
2783}
2784
2785void ValidationStateTracker::PostCallRecordBindAccelerationStructureMemoryNV(
2786 VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV *pBindInfos, VkResult result) {
2787 if (VK_SUCCESS != result) return;
2788 for (uint32_t i = 0; i < bindInfoCount; i++) {
2789 const VkBindAccelerationStructureMemoryInfoNV &info = pBindInfos[i];
2790
2791 ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureState(info.accelerationStructure);
2792 if (as_state) {
2793 // Track bound memory range information
2794 auto mem_info = GetDevMemState(info.memory);
2795 if (mem_info) {
2796 InsertAccelerationStructureMemoryRange(info.accelerationStructure, mem_info, info.memoryOffset,
2797 as_state->requirements);
2798 }
2799 // Track objects tied to memory
2800 SetMemBinding(info.memory, as_state, info.memoryOffset,
2801 VulkanTypedHandle(info.accelerationStructure, kVulkanObjectTypeAccelerationStructureNV));
2802
2803 // GPU validation of top level acceleration structure building needs acceleration structure handles.
2804 if (enabled.gpu_validation) {
2805 DispatchGetAccelerationStructureHandleNV(device, info.accelerationStructure, 8, &as_state->opaque_handle);
2806 }
2807 }
2808 }
2809}
2810
2811void ValidationStateTracker::PostCallRecordCmdBuildAccelerationStructureNV(
2812 VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV *pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset,
2813 VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset) {
2814 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2815 if (cb_state == nullptr) {
2816 return;
2817 }
2818
2819 ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureState(dst);
2820 ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureState(src);
2821 if (dst_as_state != nullptr) {
2822 dst_as_state->built = true;
2823 dst_as_state->build_info.initialize(pInfo);
2824 AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
2825 }
2826 if (src_as_state != nullptr) {
2827 AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
2828 }
2829 cb_state->hasBuildAccelerationStructureCmd = true;
2830}
2831
2832void ValidationStateTracker::PostCallRecordCmdCopyAccelerationStructureNV(VkCommandBuffer commandBuffer,
2833 VkAccelerationStructureNV dst,
2834 VkAccelerationStructureNV src,
2835 VkCopyAccelerationStructureModeNV mode) {
2836 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2837 if (cb_state) {
2838 ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureState(src);
2839 ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureState(dst);
2840 if (dst_as_state != nullptr && src_as_state != nullptr) {
2841 dst_as_state->built = true;
2842 dst_as_state->build_info = src_as_state->build_info;
2843 AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
2844 AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
2845 }
2846 }
2847}
2848
2849void ValidationStateTracker::PreCallRecordDestroyAccelerationStructureNV(VkDevice device,
2850 VkAccelerationStructureNV accelerationStructure,
2851 const VkAllocationCallbacks *pAllocator) {
2852 if (!accelerationStructure) return;
2853 auto *as_state = GetAccelerationStructureState(accelerationStructure);
2854 if (as_state) {
2855 const VulkanTypedHandle obj_struct(accelerationStructure, kVulkanObjectTypeAccelerationStructureNV);
2856 InvalidateCommandBuffers(as_state->cb_bindings, obj_struct);
2857 for (auto mem_binding : as_state->GetBoundMemory()) {
2858 auto mem_info = GetDevMemState(mem_binding);
2859 if (mem_info) {
2860 RemoveAccelerationStructureMemoryRange(accelerationStructure, mem_info);
2861 }
2862 }
2863 ClearMemoryObjectBindings(obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002864 as_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002865 accelerationStructureMap.erase(accelerationStructure);
2866 }
2867}
2868
Chris Mayer9ded5eb2019-09-19 16:33:26 +02002869void ValidationStateTracker::PreCallRecordCmdSetViewportWScalingNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
2870 uint32_t viewportCount,
2871 const VkViewportWScalingNV *pViewportWScalings) {
2872 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2873 cb_state->status |= CBSTATUS_VIEWPORT_W_SCALING_SET;
2874}
2875
locke-lunargd556cc32019-09-17 01:21:23 -06002876void ValidationStateTracker::PreCallRecordCmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth) {
2877 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2878 cb_state->status |= CBSTATUS_LINE_WIDTH_SET;
2879}
2880
2881void ValidationStateTracker::PreCallRecordCmdSetLineStippleEXT(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor,
2882 uint16_t lineStipplePattern) {
2883 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2884 cb_state->status |= CBSTATUS_LINE_STIPPLE_SET;
2885}
2886
2887void ValidationStateTracker::PreCallRecordCmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor,
2888 float depthBiasClamp, float depthBiasSlopeFactor) {
2889 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2890 cb_state->status |= CBSTATUS_DEPTH_BIAS_SET;
2891}
2892
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002893void ValidationStateTracker::PreCallRecordCmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount,
2894 const VkRect2D *pScissors) {
2895 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2896 cb_state->scissorMask |= ((1u << scissorCount) - 1u) << firstScissor;
2897 cb_state->status |= CBSTATUS_SCISSOR_SET;
2898}
2899
locke-lunargd556cc32019-09-17 01:21:23 -06002900void ValidationStateTracker::PreCallRecordCmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4]) {
2901 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2902 cb_state->status |= CBSTATUS_BLEND_CONSTANTS_SET;
2903}
2904
2905void ValidationStateTracker::PreCallRecordCmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds,
2906 float maxDepthBounds) {
2907 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2908 cb_state->status |= CBSTATUS_DEPTH_BOUNDS_SET;
2909}
2910
2911void ValidationStateTracker::PreCallRecordCmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
2912 uint32_t compareMask) {
2913 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2914 cb_state->status |= CBSTATUS_STENCIL_READ_MASK_SET;
2915}
2916
2917void ValidationStateTracker::PreCallRecordCmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
2918 uint32_t writeMask) {
2919 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2920 cb_state->status |= CBSTATUS_STENCIL_WRITE_MASK_SET;
2921}
2922
2923void ValidationStateTracker::PreCallRecordCmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
2924 uint32_t reference) {
2925 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2926 cb_state->status |= CBSTATUS_STENCIL_REFERENCE_SET;
2927}
2928
2929// Update pipeline_layout bind points applying the "Pipeline Layout Compatibility" rules.
2930// One of pDescriptorSets or push_descriptor_set should be nullptr, indicating whether this
2931// is called for CmdBindDescriptorSets or CmdPushDescriptorSet.
2932void ValidationStateTracker::UpdateLastBoundDescriptorSets(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint pipeline_bind_point,
2933 const PIPELINE_LAYOUT_STATE *pipeline_layout, uint32_t first_set,
2934 uint32_t set_count, const VkDescriptorSet *pDescriptorSets,
2935 cvdescriptorset::DescriptorSet *push_descriptor_set,
2936 uint32_t dynamic_offset_count, const uint32_t *p_dynamic_offsets) {
2937 assert((pDescriptorSets == nullptr) ^ (push_descriptor_set == nullptr));
2938 // Defensive
2939 assert(pipeline_layout);
2940 if (!pipeline_layout) return;
2941
2942 uint32_t required_size = first_set + set_count;
2943 const uint32_t last_binding_index = required_size - 1;
2944 assert(last_binding_index < pipeline_layout->compat_for_set.size());
2945
2946 // Some useful shorthand
2947 auto &last_bound = cb_state->lastBound[pipeline_bind_point];
2948 auto &pipe_compat_ids = pipeline_layout->compat_for_set;
2949 const uint32_t current_size = static_cast<uint32_t>(last_bound.per_set.size());
2950
2951 // We need this three times in this function, but nowhere else
2952 auto push_descriptor_cleanup = [&last_bound](const cvdescriptorset::DescriptorSet *ds) -> bool {
2953 if (ds && ds->IsPushDescriptor()) {
2954 assert(ds == last_bound.push_descriptor_set.get());
2955 last_bound.push_descriptor_set = nullptr;
2956 return true;
2957 }
2958 return false;
2959 };
2960
2961 // Clean up the "disturbed" before and after the range to be set
2962 if (required_size < current_size) {
2963 if (last_bound.per_set[last_binding_index].compat_id_for_set != pipe_compat_ids[last_binding_index]) {
2964 // We're disturbing those after last, we'll shrink below, but first need to check for and cleanup the push_descriptor
2965 for (auto set_idx = required_size; set_idx < current_size; ++set_idx) {
2966 if (push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set)) break;
2967 }
2968 } else {
2969 // We're not disturbing past last, so leave the upper binding data alone.
2970 required_size = current_size;
2971 }
2972 }
2973
2974 // We resize if we need more set entries or if those past "last" are disturbed
2975 if (required_size != current_size) {
2976 last_bound.per_set.resize(required_size);
2977 }
2978
2979 // For any previously bound sets, need to set them to "invalid" if they were disturbed by this update
2980 for (uint32_t set_idx = 0; set_idx < first_set; ++set_idx) {
2981 if (last_bound.per_set[set_idx].compat_id_for_set != pipe_compat_ids[set_idx]) {
2982 push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set);
2983 last_bound.per_set[set_idx].bound_descriptor_set = nullptr;
2984 last_bound.per_set[set_idx].dynamicOffsets.clear();
2985 last_bound.per_set[set_idx].compat_id_for_set = pipe_compat_ids[set_idx];
2986 }
2987 }
2988
2989 // Now update the bound sets with the input sets
2990 const uint32_t *input_dynamic_offsets = p_dynamic_offsets; // "read" pointer for dynamic offset data
2991 for (uint32_t input_idx = 0; input_idx < set_count; input_idx++) {
2992 auto set_idx = input_idx + first_set; // set_idx is index within layout, input_idx is index within input descriptor sets
2993 cvdescriptorset::DescriptorSet *descriptor_set =
2994 push_descriptor_set ? push_descriptor_set : GetSetNode(pDescriptorSets[input_idx]);
2995
2996 // Record binding (or push)
2997 if (descriptor_set != last_bound.push_descriptor_set.get()) {
2998 // Only cleanup the push descriptors if they aren't the currently used set.
2999 push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set);
3000 }
3001 last_bound.per_set[set_idx].bound_descriptor_set = descriptor_set;
3002 last_bound.per_set[set_idx].compat_id_for_set = pipe_compat_ids[set_idx]; // compat ids are canonical *per* set index
3003
3004 if (descriptor_set) {
3005 auto set_dynamic_descriptor_count = descriptor_set->GetDynamicDescriptorCount();
3006 // TODO: Add logic for tracking push_descriptor offsets (here or in caller)
3007 if (set_dynamic_descriptor_count && input_dynamic_offsets) {
3008 const uint32_t *end_offset = input_dynamic_offsets + set_dynamic_descriptor_count;
3009 last_bound.per_set[set_idx].dynamicOffsets = std::vector<uint32_t>(input_dynamic_offsets, end_offset);
3010 input_dynamic_offsets = end_offset;
3011 assert(input_dynamic_offsets <= (p_dynamic_offsets + dynamic_offset_count));
3012 } else {
3013 last_bound.per_set[set_idx].dynamicOffsets.clear();
3014 }
3015 if (!descriptor_set->IsPushDescriptor()) {
3016 // Can't cache validation of push_descriptors
3017 cb_state->validated_descriptor_sets.insert(descriptor_set);
3018 }
3019 }
3020 }
3021}
3022
3023// Update the bound state for the bind point, including the effects of incompatible pipeline layouts
3024void ValidationStateTracker::PreCallRecordCmdBindDescriptorSets(VkCommandBuffer commandBuffer,
3025 VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
3026 uint32_t firstSet, uint32_t setCount,
3027 const VkDescriptorSet *pDescriptorSets, uint32_t dynamicOffsetCount,
3028 const uint32_t *pDynamicOffsets) {
3029 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3030 auto pipeline_layout = GetPipelineLayout(layout);
3031
3032 // Resize binding arrays
3033 uint32_t last_set_index = firstSet + setCount - 1;
3034 if (last_set_index >= cb_state->lastBound[pipelineBindPoint].per_set.size()) {
3035 cb_state->lastBound[pipelineBindPoint].per_set.resize(last_set_index + 1);
3036 }
3037
3038 UpdateLastBoundDescriptorSets(cb_state, pipelineBindPoint, pipeline_layout, firstSet, setCount, pDescriptorSets, nullptr,
3039 dynamicOffsetCount, pDynamicOffsets);
3040 cb_state->lastBound[pipelineBindPoint].pipeline_layout = layout;
3041 ResetCommandBufferPushConstantDataIfIncompatible(cb_state, layout);
3042}
3043
3044void ValidationStateTracker::RecordCmdPushDescriptorSetState(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint pipelineBindPoint,
3045 VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount,
3046 const VkWriteDescriptorSet *pDescriptorWrites) {
3047 const auto &pipeline_layout = GetPipelineLayout(layout);
3048 // Short circuit invalid updates
3049 if (!pipeline_layout || (set >= pipeline_layout->set_layouts.size()) || !pipeline_layout->set_layouts[set] ||
3050 !pipeline_layout->set_layouts[set]->IsPushDescriptor())
3051 return;
3052
3053 // We need a descriptor set to update the bindings with, compatible with the passed layout
3054 const auto dsl = pipeline_layout->set_layouts[set];
3055 auto &last_bound = cb_state->lastBound[pipelineBindPoint];
3056 auto &push_descriptor_set = last_bound.push_descriptor_set;
3057 // If we are disturbing the current push_desriptor_set clear it
3058 if (!push_descriptor_set || !CompatForSet(set, last_bound, pipeline_layout->compat_for_set)) {
John Zulaufd2c3dae2019-12-12 11:02:17 -07003059 last_bound.UnbindAndResetPushDescriptorSet(new cvdescriptorset::DescriptorSet(0, nullptr, dsl, 0, this));
locke-lunargd556cc32019-09-17 01:21:23 -06003060 }
3061
3062 UpdateLastBoundDescriptorSets(cb_state, pipelineBindPoint, pipeline_layout, set, 1, nullptr, push_descriptor_set.get(), 0,
3063 nullptr);
3064 last_bound.pipeline_layout = layout;
3065
3066 // Now that we have either the new or extant push_descriptor set ... do the write updates against it
Jeff Bolz41a1ced2019-10-11 11:40:49 -05003067 push_descriptor_set->PerformPushDescriptorsUpdate(this, descriptorWriteCount, pDescriptorWrites);
locke-lunargd556cc32019-09-17 01:21:23 -06003068}
3069
3070void ValidationStateTracker::PreCallRecordCmdPushDescriptorSetKHR(VkCommandBuffer commandBuffer,
3071 VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
3072 uint32_t set, uint32_t descriptorWriteCount,
3073 const VkWriteDescriptorSet *pDescriptorWrites) {
3074 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3075 RecordCmdPushDescriptorSetState(cb_state, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites);
3076}
3077
Tony-LunarG6bb1d0c2019-09-23 10:39:25 -06003078void ValidationStateTracker::PostCallRecordCmdPushConstants(VkCommandBuffer commandBuffer, VkPipelineLayout layout,
3079 VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size,
3080 const void *pValues) {
3081 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3082 if (cb_state != nullptr) {
3083 ResetCommandBufferPushConstantDataIfIncompatible(cb_state, layout);
3084
3085 auto &push_constant_data = cb_state->push_constant_data;
3086 assert((offset + size) <= static_cast<uint32_t>(push_constant_data.size()));
3087 std::memcpy(push_constant_data.data() + offset, pValues, static_cast<std::size_t>(size));
3088 }
3089}
3090
locke-lunargd556cc32019-09-17 01:21:23 -06003091void ValidationStateTracker::PreCallRecordCmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
3092 VkIndexType indexType) {
3093 auto buffer_state = GetBufferState(buffer);
3094 auto cb_state = GetCBState(commandBuffer);
3095
3096 cb_state->status |= CBSTATUS_INDEX_BUFFER_BOUND;
3097 cb_state->index_buffer_binding.buffer = buffer;
3098 cb_state->index_buffer_binding.size = buffer_state->createInfo.size;
3099 cb_state->index_buffer_binding.offset = offset;
3100 cb_state->index_buffer_binding.index_type = indexType;
3101 // Add binding for this index buffer to this commandbuffer
3102 AddCommandBufferBindingBuffer(cb_state, buffer_state);
3103}
3104
3105void ValidationStateTracker::PreCallRecordCmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding,
3106 uint32_t bindingCount, const VkBuffer *pBuffers,
3107 const VkDeviceSize *pOffsets) {
3108 auto cb_state = GetCBState(commandBuffer);
3109
3110 uint32_t end = firstBinding + bindingCount;
3111 if (cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.size() < end) {
3112 cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.resize(end);
3113 }
3114
3115 for (uint32_t i = 0; i < bindingCount; ++i) {
3116 auto &vertex_buffer_binding = cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings[i + firstBinding];
3117 vertex_buffer_binding.buffer = pBuffers[i];
3118 vertex_buffer_binding.offset = pOffsets[i];
3119 // Add binding for this vertex buffer to this commandbuffer
3120 AddCommandBufferBindingBuffer(cb_state, GetBufferState(pBuffers[i]));
3121 }
3122}
3123
3124void ValidationStateTracker::PostCallRecordCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer,
3125 VkDeviceSize dstOffset, VkDeviceSize dataSize, const void *pData) {
3126 auto cb_state = GetCBState(commandBuffer);
3127 auto dst_buffer_state = GetBufferState(dstBuffer);
3128
3129 // Update bindings between buffer and cmd buffer
3130 AddCommandBufferBindingBuffer(cb_state, dst_buffer_state);
3131}
3132
Jeff Bolz310775c2019-10-09 00:46:33 -05003133bool ValidationStateTracker::SetEventStageMask(VkEvent event, VkPipelineStageFlags stageMask,
3134 EventToStageMap *localEventToStageMap) {
3135 (*localEventToStageMap)[event] = stageMask;
locke-lunargd556cc32019-09-17 01:21:23 -06003136 return false;
3137}
3138
3139void ValidationStateTracker::PreCallRecordCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event,
3140 VkPipelineStageFlags stageMask) {
3141 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3142 auto event_state = GetEventState(event);
3143 if (event_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05003144 AddCommandBufferBinding(event_state->cb_bindings, VulkanTypedHandle(event, kVulkanObjectTypeEvent, event_state), cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003145 }
3146 cb_state->events.push_back(event);
3147 if (!cb_state->waitedEvents.count(event)) {
3148 cb_state->writeEventsBeforeWait.push_back(event);
3149 }
Jeff Bolz310775c2019-10-09 00:46:33 -05003150 cb_state->eventUpdates.emplace_back(
3151 [event, stageMask](const ValidationStateTracker *device_data, bool do_validate, EventToStageMap *localEventToStageMap) {
3152 return SetEventStageMask(event, stageMask, localEventToStageMap);
3153 });
locke-lunargd556cc32019-09-17 01:21:23 -06003154}
3155
3156void ValidationStateTracker::PreCallRecordCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event,
3157 VkPipelineStageFlags stageMask) {
3158 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3159 auto event_state = GetEventState(event);
3160 if (event_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05003161 AddCommandBufferBinding(event_state->cb_bindings, VulkanTypedHandle(event, kVulkanObjectTypeEvent, event_state), cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003162 }
3163 cb_state->events.push_back(event);
3164 if (!cb_state->waitedEvents.count(event)) {
3165 cb_state->writeEventsBeforeWait.push_back(event);
3166 }
3167
3168 cb_state->eventUpdates.emplace_back(
Jeff Bolz310775c2019-10-09 00:46:33 -05003169 [event](const ValidationStateTracker *, bool do_validate, EventToStageMap *localEventToStageMap) {
3170 return SetEventStageMask(event, VkPipelineStageFlags(0), localEventToStageMap);
3171 });
locke-lunargd556cc32019-09-17 01:21:23 -06003172}
3173
3174void ValidationStateTracker::PreCallRecordCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents,
3175 VkPipelineStageFlags sourceStageMask, VkPipelineStageFlags dstStageMask,
3176 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
3177 uint32_t bufferMemoryBarrierCount,
3178 const VkBufferMemoryBarrier *pBufferMemoryBarriers,
3179 uint32_t imageMemoryBarrierCount,
3180 const VkImageMemoryBarrier *pImageMemoryBarriers) {
3181 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3182 for (uint32_t i = 0; i < eventCount; ++i) {
3183 auto event_state = GetEventState(pEvents[i]);
3184 if (event_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05003185 AddCommandBufferBinding(event_state->cb_bindings, VulkanTypedHandle(pEvents[i], kVulkanObjectTypeEvent, event_state),
3186 cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003187 }
3188 cb_state->waitedEvents.insert(pEvents[i]);
3189 cb_state->events.push_back(pEvents[i]);
3190 }
3191}
3192
Jeff Bolz310775c2019-10-09 00:46:33 -05003193bool ValidationStateTracker::SetQueryState(QueryObject object, QueryState value, QueryMap *localQueryToStateMap) {
3194 (*localQueryToStateMap)[object] = value;
3195 return false;
3196}
3197
3198bool ValidationStateTracker::SetQueryStateMulti(VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, QueryState value,
3199 QueryMap *localQueryToStateMap) {
3200 for (uint32_t i = 0; i < queryCount; i++) {
3201 QueryObject object = {queryPool, firstQuery + i};
3202 (*localQueryToStateMap)[object] = value;
locke-lunargd556cc32019-09-17 01:21:23 -06003203 }
3204 return false;
3205}
3206
Jeff Bolz310775c2019-10-09 00:46:33 -05003207QueryState ValidationStateTracker::GetQueryState(const QueryMap *localQueryToStateMap, VkQueryPool queryPool,
3208 uint32_t queryIndex) const {
3209 QueryObject query = {queryPool, queryIndex};
locke-lunargd556cc32019-09-17 01:21:23 -06003210
Jeff Bolz310775c2019-10-09 00:46:33 -05003211 const std::array<const decltype(queryToStateMap) *, 2> map_list = {localQueryToStateMap, &queryToStateMap};
3212
3213 for (const auto map : map_list) {
3214 auto query_data = map->find(query);
3215 if (query_data != map->end()) {
3216 return query_data->second;
locke-lunargd556cc32019-09-17 01:21:23 -06003217 }
3218 }
Jeff Bolz310775c2019-10-09 00:46:33 -05003219 return QUERYSTATE_UNKNOWN;
locke-lunargd556cc32019-09-17 01:21:23 -06003220}
3221
3222void ValidationStateTracker::RecordCmdBeginQuery(CMD_BUFFER_STATE *cb_state, const QueryObject &query_obj) {
Jeff Bolz047f3012019-10-09 23:52:23 -05003223 if (disabled.query_validation) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003224 cb_state->activeQueries.insert(query_obj);
3225 cb_state->startedQueries.insert(query_obj);
Jeff Bolz310775c2019-10-09 00:46:33 -05003226 cb_state->queryUpdates.emplace_back(
3227 [query_obj](const ValidationStateTracker *device_data, bool do_validate, QueryMap *localQueryToStateMap) {
3228 SetQueryState(query_obj, QUERYSTATE_RUNNING, localQueryToStateMap);
3229 return false;
3230 });
Jeff Bolzadbfa852019-10-04 13:53:30 -05003231 auto pool_state = GetQueryPoolState(query_obj.pool);
3232 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(query_obj.pool, kVulkanObjectTypeQueryPool, pool_state),
3233 cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003234}
3235
3236void ValidationStateTracker::PostCallRecordCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot,
3237 VkFlags flags) {
Jeff Bolz047f3012019-10-09 23:52:23 -05003238 if (disabled.query_validation) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003239 QueryObject query = {queryPool, slot};
3240 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3241 RecordCmdBeginQuery(cb_state, query);
3242}
3243
3244void ValidationStateTracker::RecordCmdEndQuery(CMD_BUFFER_STATE *cb_state, const QueryObject &query_obj) {
Jeff Bolz047f3012019-10-09 23:52:23 -05003245 if (disabled.query_validation) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003246 cb_state->activeQueries.erase(query_obj);
3247 cb_state->queryUpdates.emplace_back(
Jeff Bolz310775c2019-10-09 00:46:33 -05003248 [query_obj](const ValidationStateTracker *device_data, bool do_validate, QueryMap *localQueryToStateMap) {
3249 return SetQueryState(query_obj, QUERYSTATE_ENDED, localQueryToStateMap);
3250 });
Jeff Bolzadbfa852019-10-04 13:53:30 -05003251 auto pool_state = GetQueryPoolState(query_obj.pool);
3252 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(query_obj.pool, kVulkanObjectTypeQueryPool, pool_state),
3253 cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003254}
3255
3256void ValidationStateTracker::PostCallRecordCmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot) {
Jeff Bolz047f3012019-10-09 23:52:23 -05003257 if (disabled.query_validation) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003258 QueryObject query_obj = {queryPool, slot};
3259 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3260 RecordCmdEndQuery(cb_state, query_obj);
3261}
3262
3263void ValidationStateTracker::PostCallRecordCmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
3264 uint32_t firstQuery, uint32_t queryCount) {
Jeff Bolz047f3012019-10-09 23:52:23 -05003265 if (disabled.query_validation) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003266 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3267
Jeff Bolz310775c2019-10-09 00:46:33 -05003268 cb_state->queryUpdates.emplace_back([queryPool, firstQuery, queryCount](const ValidationStateTracker *device_data,
3269 bool do_validate, QueryMap *localQueryToStateMap) {
3270 return SetQueryStateMulti(queryPool, firstQuery, queryCount, QUERYSTATE_RESET, localQueryToStateMap);
locke-lunargd556cc32019-09-17 01:21:23 -06003271 });
Jeff Bolzadbfa852019-10-04 13:53:30 -05003272 auto pool_state = GetQueryPoolState(queryPool);
3273 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool, pool_state),
locke-lunargd556cc32019-09-17 01:21:23 -06003274 cb_state);
3275}
3276
3277void ValidationStateTracker::PostCallRecordCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
3278 uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer,
3279 VkDeviceSize dstOffset, VkDeviceSize stride,
3280 VkQueryResultFlags flags) {
Jeff Bolz047f3012019-10-09 23:52:23 -05003281 if (disabled.query_validation) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003282 auto cb_state = GetCBState(commandBuffer);
3283 auto dst_buff_state = GetBufferState(dstBuffer);
3284 AddCommandBufferBindingBuffer(cb_state, dst_buff_state);
Jeff Bolzadbfa852019-10-04 13:53:30 -05003285 auto pool_state = GetQueryPoolState(queryPool);
3286 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool, pool_state),
locke-lunargd556cc32019-09-17 01:21:23 -06003287 cb_state);
3288}
3289
3290void ValidationStateTracker::PostCallRecordCmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
3291 VkQueryPool queryPool, uint32_t slot) {
Jeff Bolz047f3012019-10-09 23:52:23 -05003292 if (disabled.query_validation) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003293 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
Jeff Bolzadbfa852019-10-04 13:53:30 -05003294 auto pool_state = GetQueryPoolState(queryPool);
3295 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool, pool_state),
locke-lunargd556cc32019-09-17 01:21:23 -06003296 cb_state);
3297 QueryObject query = {queryPool, slot};
3298 cb_state->queryUpdates.emplace_back(
Jeff Bolz310775c2019-10-09 00:46:33 -05003299 [query](const ValidationStateTracker *device_data, bool do_validate, QueryMap *localQueryToStateMap) {
3300 return SetQueryState(query, QUERYSTATE_ENDED, localQueryToStateMap);
3301 });
locke-lunargd556cc32019-09-17 01:21:23 -06003302}
3303
3304void ValidationStateTracker::PostCallRecordCreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo *pCreateInfo,
3305 const VkAllocationCallbacks *pAllocator, VkFramebuffer *pFramebuffer,
3306 VkResult result) {
3307 if (VK_SUCCESS != result) return;
3308 // Shadow create info and store in map
Jeff Bolz6ae39612019-10-11 20:57:36 -05003309 auto fb_state = std::make_shared<FRAMEBUFFER_STATE>(*pFramebuffer, pCreateInfo, GetRenderPassShared(pCreateInfo->renderPass));
locke-lunargd556cc32019-09-17 01:21:23 -06003310
3311 if ((pCreateInfo->flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) == 0) {
3312 for (uint32_t i = 0; i < pCreateInfo->attachmentCount; ++i) {
3313 VkImageView view = pCreateInfo->pAttachments[i];
3314 auto view_state = GetImageViewState(view);
3315 if (!view_state) {
3316 continue;
3317 }
3318 }
3319 }
3320 frameBufferMap[*pFramebuffer] = std::move(fb_state);
3321}
3322
3323void ValidationStateTracker::RecordRenderPassDAG(RenderPassCreateVersion rp_version, const VkRenderPassCreateInfo2KHR *pCreateInfo,
3324 RENDER_PASS_STATE *render_pass) {
3325 auto &subpass_to_node = render_pass->subpassToNode;
3326 subpass_to_node.resize(pCreateInfo->subpassCount);
3327 auto &self_dependencies = render_pass->self_dependencies;
3328 self_dependencies.resize(pCreateInfo->subpassCount);
3329
3330 for (uint32_t i = 0; i < pCreateInfo->subpassCount; ++i) {
3331 subpass_to_node[i].pass = i;
3332 self_dependencies[i].clear();
3333 }
3334 for (uint32_t i = 0; i < pCreateInfo->dependencyCount; ++i) {
3335 const VkSubpassDependency2KHR &dependency = pCreateInfo->pDependencies[i];
3336 if ((dependency.srcSubpass != VK_SUBPASS_EXTERNAL) && (dependency.dstSubpass != VK_SUBPASS_EXTERNAL)) {
3337 if (dependency.srcSubpass == dependency.dstSubpass) {
3338 self_dependencies[dependency.srcSubpass].push_back(i);
3339 } else {
3340 subpass_to_node[dependency.dstSubpass].prev.push_back(dependency.srcSubpass);
3341 subpass_to_node[dependency.srcSubpass].next.push_back(dependency.dstSubpass);
3342 }
3343 }
3344 }
3345}
3346
3347static void MarkAttachmentFirstUse(RENDER_PASS_STATE *render_pass, uint32_t index, bool is_read) {
3348 if (index == VK_ATTACHMENT_UNUSED) return;
3349
3350 if (!render_pass->attachment_first_read.count(index)) render_pass->attachment_first_read[index] = is_read;
3351}
3352
3353void ValidationStateTracker::RecordCreateRenderPassState(RenderPassCreateVersion rp_version,
3354 std::shared_ptr<RENDER_PASS_STATE> &render_pass,
3355 VkRenderPass *pRenderPass) {
3356 render_pass->renderPass = *pRenderPass;
3357 auto create_info = render_pass->createInfo.ptr();
3358
3359 RecordRenderPassDAG(RENDER_PASS_VERSION_1, create_info, render_pass.get());
3360
3361 for (uint32_t i = 0; i < create_info->subpassCount; ++i) {
3362 const VkSubpassDescription2KHR &subpass = create_info->pSubpasses[i];
3363 for (uint32_t j = 0; j < subpass.colorAttachmentCount; ++j) {
3364 MarkAttachmentFirstUse(render_pass.get(), subpass.pColorAttachments[j].attachment, false);
3365
3366 // resolve attachments are considered to be written
3367 if (subpass.pResolveAttachments) {
3368 MarkAttachmentFirstUse(render_pass.get(), subpass.pResolveAttachments[j].attachment, false);
3369 }
3370 }
3371 if (subpass.pDepthStencilAttachment) {
3372 MarkAttachmentFirstUse(render_pass.get(), subpass.pDepthStencilAttachment->attachment, false);
3373 }
3374 for (uint32_t j = 0; j < subpass.inputAttachmentCount; ++j) {
3375 MarkAttachmentFirstUse(render_pass.get(), subpass.pInputAttachments[j].attachment, true);
3376 }
3377 }
3378
3379 // Even though render_pass is an rvalue-ref parameter, still must move s.t. move assignment is invoked.
3380 renderPassMap[*pRenderPass] = std::move(render_pass);
3381}
3382
3383// Style note:
3384// Use of rvalue reference exceeds reccommended usage of rvalue refs in google style guide, but intentionally forces caller to move
3385// or copy. This is clearer than passing a pointer to shared_ptr and avoids the atomic increment/decrement of shared_ptr copy
3386// construction or assignment.
3387void ValidationStateTracker::PostCallRecordCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo *pCreateInfo,
3388 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
3389 VkResult result) {
3390 if (VK_SUCCESS != result) return;
3391 auto render_pass_state = std::make_shared<RENDER_PASS_STATE>(pCreateInfo);
3392 RecordCreateRenderPassState(RENDER_PASS_VERSION_1, render_pass_state, pRenderPass);
3393}
3394
Tony-LunarG977448c2019-12-02 14:52:02 -07003395void ValidationStateTracker::RecordCreateRenderPass2(VkDevice device, const VkRenderPassCreateInfo2KHR *pCreateInfo,
3396 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
3397 VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06003398 if (VK_SUCCESS != result) return;
3399 auto render_pass_state = std::make_shared<RENDER_PASS_STATE>(pCreateInfo);
3400 RecordCreateRenderPassState(RENDER_PASS_VERSION_2, render_pass_state, pRenderPass);
3401}
3402
Tony-LunarG977448c2019-12-02 14:52:02 -07003403void ValidationStateTracker::PostCallRecordCreateRenderPass2KHR(VkDevice device, const VkRenderPassCreateInfo2KHR *pCreateInfo,
3404 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
3405 VkResult result) {
3406 RecordCreateRenderPass2(device, pCreateInfo, pAllocator, pRenderPass, result);
3407}
3408
3409void ValidationStateTracker::PostCallRecordCreateRenderPass2(VkDevice device, const VkRenderPassCreateInfo2KHR *pCreateInfo,
3410 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
3411 VkResult result) {
3412 RecordCreateRenderPass2(device, pCreateInfo, pAllocator, pRenderPass, result);
3413}
3414
locke-lunargd556cc32019-09-17 01:21:23 -06003415void ValidationStateTracker::RecordCmdBeginRenderPassState(VkCommandBuffer commandBuffer,
3416 const VkRenderPassBeginInfo *pRenderPassBegin,
3417 const VkSubpassContents contents) {
3418 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3419 auto render_pass_state = pRenderPassBegin ? GetRenderPassState(pRenderPassBegin->renderPass) : nullptr;
3420 auto framebuffer = pRenderPassBegin ? GetFramebufferState(pRenderPassBegin->framebuffer) : nullptr;
3421
3422 if (render_pass_state) {
3423 cb_state->activeFramebuffer = pRenderPassBegin->framebuffer;
3424 cb_state->activeRenderPass = render_pass_state;
3425 // This is a shallow copy as that is all that is needed for now
3426 cb_state->activeRenderPassBeginInfo = *pRenderPassBegin;
3427 cb_state->activeSubpass = 0;
3428 cb_state->activeSubpassContents = contents;
3429 cb_state->framebuffers.insert(pRenderPassBegin->framebuffer);
3430 // Connect this framebuffer and its children to this cmdBuffer
3431 AddFramebufferBinding(cb_state, framebuffer);
3432 // Connect this RP to cmdBuffer
Jeff Bolzadbfa852019-10-04 13:53:30 -05003433 AddCommandBufferBinding(render_pass_state->cb_bindings,
3434 VulkanTypedHandle(render_pass_state->renderPass, kVulkanObjectTypeRenderPass, render_pass_state),
3435 cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003436
3437 auto chained_device_group_struct = lvl_find_in_chain<VkDeviceGroupRenderPassBeginInfo>(pRenderPassBegin->pNext);
3438 if (chained_device_group_struct) {
3439 cb_state->active_render_pass_device_mask = chained_device_group_struct->deviceMask;
3440 } else {
3441 cb_state->active_render_pass_device_mask = cb_state->initial_device_mask;
3442 }
3443 }
3444}
3445
3446void ValidationStateTracker::PreCallRecordCmdBeginRenderPass(VkCommandBuffer commandBuffer,
3447 const VkRenderPassBeginInfo *pRenderPassBegin,
3448 VkSubpassContents contents) {
3449 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, contents);
3450}
3451
3452void ValidationStateTracker::PreCallRecordCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer,
3453 const VkRenderPassBeginInfo *pRenderPassBegin,
3454 const VkSubpassBeginInfoKHR *pSubpassBeginInfo) {
3455 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, pSubpassBeginInfo->contents);
3456}
3457
Tony-LunarG977448c2019-12-02 14:52:02 -07003458void ValidationStateTracker::PreCallRecordCmdBeginRenderPass2(VkCommandBuffer commandBuffer,
3459 const VkRenderPassBeginInfo *pRenderPassBegin,
3460 const VkSubpassBeginInfoKHR *pSubpassBeginInfo) {
3461 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, pSubpassBeginInfo->contents);
3462}
3463
locke-lunargd556cc32019-09-17 01:21:23 -06003464void ValidationStateTracker::RecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
3465 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3466 cb_state->activeSubpass++;
3467 cb_state->activeSubpassContents = contents;
3468}
3469
3470void ValidationStateTracker::PostCallRecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
3471 RecordCmdNextSubpass(commandBuffer, contents);
3472}
3473
3474void ValidationStateTracker::PostCallRecordCmdNextSubpass2KHR(VkCommandBuffer commandBuffer,
3475 const VkSubpassBeginInfoKHR *pSubpassBeginInfo,
3476 const VkSubpassEndInfoKHR *pSubpassEndInfo) {
3477 RecordCmdNextSubpass(commandBuffer, pSubpassBeginInfo->contents);
3478}
3479
Tony-LunarG977448c2019-12-02 14:52:02 -07003480void ValidationStateTracker::PostCallRecordCmdNextSubpass2(VkCommandBuffer commandBuffer,
3481 const VkSubpassBeginInfoKHR *pSubpassBeginInfo,
3482 const VkSubpassEndInfoKHR *pSubpassEndInfo) {
3483 RecordCmdNextSubpass(commandBuffer, pSubpassBeginInfo->contents);
3484}
3485
locke-lunargd556cc32019-09-17 01:21:23 -06003486void ValidationStateTracker::RecordCmdEndRenderPassState(VkCommandBuffer commandBuffer) {
3487 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3488 cb_state->activeRenderPass = nullptr;
3489 cb_state->activeSubpass = 0;
3490 cb_state->activeFramebuffer = VK_NULL_HANDLE;
3491}
3492
3493void ValidationStateTracker::PostCallRecordCmdEndRenderPass(VkCommandBuffer commandBuffer) {
3494 RecordCmdEndRenderPassState(commandBuffer);
3495}
3496
3497void ValidationStateTracker::PostCallRecordCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer,
3498 const VkSubpassEndInfoKHR *pSubpassEndInfo) {
3499 RecordCmdEndRenderPassState(commandBuffer);
3500}
3501
Tony-LunarG977448c2019-12-02 14:52:02 -07003502void ValidationStateTracker::PostCallRecordCmdEndRenderPass2(VkCommandBuffer commandBuffer,
3503 const VkSubpassEndInfoKHR *pSubpassEndInfo) {
3504 RecordCmdEndRenderPassState(commandBuffer);
3505}
locke-lunargd556cc32019-09-17 01:21:23 -06003506void ValidationStateTracker::PreCallRecordCmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBuffersCount,
3507 const VkCommandBuffer *pCommandBuffers) {
3508 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3509
3510 CMD_BUFFER_STATE *sub_cb_state = NULL;
3511 for (uint32_t i = 0; i < commandBuffersCount; i++) {
3512 sub_cb_state = GetCBState(pCommandBuffers[i]);
3513 assert(sub_cb_state);
3514 if (!(sub_cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT)) {
3515 if (cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT) {
3516 // TODO: Because this is a state change, clearing the VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT needs to be moved
3517 // from the validation step to the recording step
3518 cb_state->beginInfo.flags &= ~VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT;
3519 }
3520 }
3521
3522 // Propagate inital layout and current layout state to the primary cmd buffer
3523 // NOTE: The update/population of the image_layout_map is done in CoreChecks, but for other classes derived from
3524 // ValidationStateTracker these maps will be empty, so leaving the propagation in the the state tracker should be a no-op
3525 // for those other classes.
3526 for (const auto &sub_layout_map_entry : sub_cb_state->image_layout_map) {
3527 const auto image = sub_layout_map_entry.first;
3528 const auto *image_state = GetImageState(image);
3529 if (!image_state) continue; // Can't set layouts of a dead image
3530
3531 auto *cb_subres_map = GetImageSubresourceLayoutMap(cb_state, *image_state);
3532 const auto *sub_cb_subres_map = sub_layout_map_entry.second.get();
3533 assert(cb_subres_map && sub_cb_subres_map); // Non const get and map traversal should never be null
3534 cb_subres_map->UpdateFrom(*sub_cb_subres_map);
3535 }
3536
3537 sub_cb_state->primaryCommandBuffer = cb_state->commandBuffer;
3538 cb_state->linkedCommandBuffers.insert(sub_cb_state);
3539 sub_cb_state->linkedCommandBuffers.insert(cb_state);
3540 for (auto &function : sub_cb_state->queryUpdates) {
3541 cb_state->queryUpdates.push_back(function);
3542 }
3543 for (auto &function : sub_cb_state->queue_submit_functions) {
3544 cb_state->queue_submit_functions.push_back(function);
3545 }
3546 }
3547}
3548
3549void ValidationStateTracker::PostCallRecordMapMemory(VkDevice device, VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size,
3550 VkFlags flags, void **ppData, VkResult result) {
3551 if (VK_SUCCESS != result) return;
3552 RecordMappedMemory(mem, offset, size, ppData);
3553}
3554
3555void ValidationStateTracker::PreCallRecordUnmapMemory(VkDevice device, VkDeviceMemory mem) {
3556 auto mem_info = GetDevMemState(mem);
3557 if (mem_info) {
3558 mem_info->mapped_range = MemRange();
3559 mem_info->p_driver_data = nullptr;
3560 }
3561}
3562
3563void ValidationStateTracker::UpdateBindImageMemoryState(const VkBindImageMemoryInfo &bindInfo) {
3564 IMAGE_STATE *image_state = GetImageState(bindInfo.image);
3565 if (image_state) {
3566 const auto swapchain_info = lvl_find_in_chain<VkBindImageMemorySwapchainInfoKHR>(bindInfo.pNext);
3567 if (swapchain_info) {
3568 auto swapchain = GetSwapchainState(swapchain_info->swapchain);
3569 if (swapchain) {
locke-lunargb3584732019-10-28 20:18:36 -06003570 swapchain->images[swapchain_info->imageIndex].bound_images.emplace(image_state->image);
locke-lunargd556cc32019-09-17 01:21:23 -06003571 image_state->bind_swapchain = swapchain_info->swapchain;
3572 image_state->bind_swapchain_imageIndex = swapchain_info->imageIndex;
3573 }
3574 } else {
3575 // Track bound memory range information
3576 auto mem_info = GetDevMemState(bindInfo.memory);
3577 if (mem_info) {
3578 InsertImageMemoryRange(bindInfo.image, mem_info, bindInfo.memoryOffset, image_state->requirements,
3579 image_state->createInfo.tiling == VK_IMAGE_TILING_LINEAR);
3580 }
3581
3582 // Track objects tied to memory
3583 SetMemBinding(bindInfo.memory, image_state, bindInfo.memoryOffset,
3584 VulkanTypedHandle(bindInfo.image, kVulkanObjectTypeImage));
3585 }
3586 if (image_state->createInfo.flags & VK_IMAGE_CREATE_ALIAS_BIT) {
3587 AddAliasingImage(image_state);
3588 }
3589 }
3590}
3591
3592void ValidationStateTracker::PostCallRecordBindImageMemory(VkDevice device, VkImage image, VkDeviceMemory mem,
3593 VkDeviceSize memoryOffset, VkResult result) {
3594 if (VK_SUCCESS != result) return;
3595 VkBindImageMemoryInfo bindInfo = {};
3596 bindInfo.sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
3597 bindInfo.image = image;
3598 bindInfo.memory = mem;
3599 bindInfo.memoryOffset = memoryOffset;
3600 UpdateBindImageMemoryState(bindInfo);
3601}
3602
3603void ValidationStateTracker::PostCallRecordBindImageMemory2(VkDevice device, uint32_t bindInfoCount,
3604 const VkBindImageMemoryInfoKHR *pBindInfos, VkResult result) {
3605 if (VK_SUCCESS != result) return;
3606 for (uint32_t i = 0; i < bindInfoCount; i++) {
3607 UpdateBindImageMemoryState(pBindInfos[i]);
3608 }
3609}
3610
3611void ValidationStateTracker::PostCallRecordBindImageMemory2KHR(VkDevice device, uint32_t bindInfoCount,
3612 const VkBindImageMemoryInfoKHR *pBindInfos, VkResult result) {
3613 if (VK_SUCCESS != result) return;
3614 for (uint32_t i = 0; i < bindInfoCount; i++) {
3615 UpdateBindImageMemoryState(pBindInfos[i]);
3616 }
3617}
3618
3619void ValidationStateTracker::PreCallRecordSetEvent(VkDevice device, VkEvent event) {
3620 auto event_state = GetEventState(event);
3621 if (event_state) {
3622 event_state->stageMask = VK_PIPELINE_STAGE_HOST_BIT;
3623 }
locke-lunargd556cc32019-09-17 01:21:23 -06003624}
3625
3626void ValidationStateTracker::PostCallRecordImportSemaphoreFdKHR(VkDevice device,
3627 const VkImportSemaphoreFdInfoKHR *pImportSemaphoreFdInfo,
3628 VkResult result) {
3629 if (VK_SUCCESS != result) return;
3630 RecordImportSemaphoreState(pImportSemaphoreFdInfo->semaphore, pImportSemaphoreFdInfo->handleType,
3631 pImportSemaphoreFdInfo->flags);
3632}
3633
3634void ValidationStateTracker::RecordGetExternalSemaphoreState(VkSemaphore semaphore,
3635 VkExternalSemaphoreHandleTypeFlagBitsKHR handle_type) {
3636 SEMAPHORE_STATE *semaphore_state = GetSemaphoreState(semaphore);
3637 if (semaphore_state && handle_type != VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR) {
3638 // Cannot track semaphore state once it is exported, except for Sync FD handle types which have copy transference
3639 semaphore_state->scope = kSyncScopeExternalPermanent;
3640 }
3641}
3642
3643#ifdef VK_USE_PLATFORM_WIN32_KHR
3644void ValidationStateTracker::PostCallRecordImportSemaphoreWin32HandleKHR(
3645 VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR *pImportSemaphoreWin32HandleInfo, VkResult result) {
3646 if (VK_SUCCESS != result) return;
3647 RecordImportSemaphoreState(pImportSemaphoreWin32HandleInfo->semaphore, pImportSemaphoreWin32HandleInfo->handleType,
3648 pImportSemaphoreWin32HandleInfo->flags);
3649}
3650
3651void ValidationStateTracker::PostCallRecordGetSemaphoreWin32HandleKHR(VkDevice device,
3652 const VkSemaphoreGetWin32HandleInfoKHR *pGetWin32HandleInfo,
3653 HANDLE *pHandle, VkResult result) {
3654 if (VK_SUCCESS != result) return;
3655 RecordGetExternalSemaphoreState(pGetWin32HandleInfo->semaphore, pGetWin32HandleInfo->handleType);
3656}
3657
3658void ValidationStateTracker::PostCallRecordImportFenceWin32HandleKHR(
3659 VkDevice device, const VkImportFenceWin32HandleInfoKHR *pImportFenceWin32HandleInfo, VkResult result) {
3660 if (VK_SUCCESS != result) return;
3661 RecordImportFenceState(pImportFenceWin32HandleInfo->fence, pImportFenceWin32HandleInfo->handleType,
3662 pImportFenceWin32HandleInfo->flags);
3663}
3664
3665void ValidationStateTracker::PostCallRecordGetFenceWin32HandleKHR(VkDevice device,
3666 const VkFenceGetWin32HandleInfoKHR *pGetWin32HandleInfo,
3667 HANDLE *pHandle, VkResult result) {
3668 if (VK_SUCCESS != result) return;
3669 RecordGetExternalFenceState(pGetWin32HandleInfo->fence, pGetWin32HandleInfo->handleType);
3670}
3671#endif
3672
3673void ValidationStateTracker::PostCallRecordGetSemaphoreFdKHR(VkDevice device, const VkSemaphoreGetFdInfoKHR *pGetFdInfo, int *pFd,
3674 VkResult result) {
3675 if (VK_SUCCESS != result) return;
3676 RecordGetExternalSemaphoreState(pGetFdInfo->semaphore, pGetFdInfo->handleType);
3677}
3678
3679void ValidationStateTracker::RecordImportFenceState(VkFence fence, VkExternalFenceHandleTypeFlagBitsKHR handle_type,
3680 VkFenceImportFlagsKHR flags) {
3681 FENCE_STATE *fence_node = GetFenceState(fence);
3682 if (fence_node && fence_node->scope != kSyncScopeExternalPermanent) {
3683 if ((handle_type == VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR || flags & VK_FENCE_IMPORT_TEMPORARY_BIT_KHR) &&
3684 fence_node->scope == kSyncScopeInternal) {
3685 fence_node->scope = kSyncScopeExternalTemporary;
3686 } else {
3687 fence_node->scope = kSyncScopeExternalPermanent;
3688 }
3689 }
3690}
3691
3692void ValidationStateTracker::PostCallRecordImportFenceFdKHR(VkDevice device, const VkImportFenceFdInfoKHR *pImportFenceFdInfo,
3693 VkResult result) {
3694 if (VK_SUCCESS != result) return;
3695 RecordImportFenceState(pImportFenceFdInfo->fence, pImportFenceFdInfo->handleType, pImportFenceFdInfo->flags);
3696}
3697
3698void ValidationStateTracker::RecordGetExternalFenceState(VkFence fence, VkExternalFenceHandleTypeFlagBitsKHR handle_type) {
3699 FENCE_STATE *fence_state = GetFenceState(fence);
3700 if (fence_state) {
3701 if (handle_type != VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR) {
3702 // Export with reference transference becomes external
3703 fence_state->scope = kSyncScopeExternalPermanent;
3704 } else if (fence_state->scope == kSyncScopeInternal) {
3705 // Export with copy transference has a side effect of resetting the fence
3706 fence_state->state = FENCE_UNSIGNALED;
3707 }
3708 }
3709}
3710
3711void ValidationStateTracker::PostCallRecordGetFenceFdKHR(VkDevice device, const VkFenceGetFdInfoKHR *pGetFdInfo, int *pFd,
3712 VkResult result) {
3713 if (VK_SUCCESS != result) return;
3714 RecordGetExternalFenceState(pGetFdInfo->fence, pGetFdInfo->handleType);
3715}
3716
3717void ValidationStateTracker::PostCallRecordCreateEvent(VkDevice device, const VkEventCreateInfo *pCreateInfo,
3718 const VkAllocationCallbacks *pAllocator, VkEvent *pEvent, VkResult result) {
3719 if (VK_SUCCESS != result) return;
3720 eventMap[*pEvent].write_in_use = 0;
3721 eventMap[*pEvent].stageMask = VkPipelineStageFlags(0);
3722}
3723
3724void ValidationStateTracker::RecordCreateSwapchainState(VkResult result, const VkSwapchainCreateInfoKHR *pCreateInfo,
3725 VkSwapchainKHR *pSwapchain, SURFACE_STATE *surface_state,
3726 SWAPCHAIN_NODE *old_swapchain_state) {
3727 if (VK_SUCCESS == result) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003728 auto swapchain_state = std::make_shared<SWAPCHAIN_NODE>(pCreateInfo, *pSwapchain);
locke-lunargd556cc32019-09-17 01:21:23 -06003729 if (VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR == pCreateInfo->presentMode ||
3730 VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR == pCreateInfo->presentMode) {
3731 swapchain_state->shared_presentable = true;
3732 }
3733 surface_state->swapchain = swapchain_state.get();
3734 swapchainMap[*pSwapchain] = std::move(swapchain_state);
3735 } else {
3736 surface_state->swapchain = nullptr;
3737 }
3738 // Spec requires that even if CreateSwapchainKHR fails, oldSwapchain is retired
3739 if (old_swapchain_state) {
3740 old_swapchain_state->retired = true;
3741 }
3742 return;
3743}
3744
3745void ValidationStateTracker::PostCallRecordCreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR *pCreateInfo,
3746 const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchain,
3747 VkResult result) {
3748 auto surface_state = GetSurfaceState(pCreateInfo->surface);
3749 auto old_swapchain_state = GetSwapchainState(pCreateInfo->oldSwapchain);
3750 RecordCreateSwapchainState(result, pCreateInfo, pSwapchain, surface_state, old_swapchain_state);
3751}
3752
3753void ValidationStateTracker::PreCallRecordDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain,
3754 const VkAllocationCallbacks *pAllocator) {
3755 if (!swapchain) return;
3756 auto swapchain_data = GetSwapchainState(swapchain);
3757 if (swapchain_data) {
3758 for (const auto &swapchain_image : swapchain_data->images) {
locke-lunargb3584732019-10-28 20:18:36 -06003759 ClearMemoryObjectBindings(VulkanTypedHandle(swapchain_image.image, kVulkanObjectTypeImage));
3760 imageMap.erase(swapchain_image.image);
3761 RemoveAliasingImages(swapchain_image.bound_images);
locke-lunargd556cc32019-09-17 01:21:23 -06003762 }
3763
3764 auto surface_state = GetSurfaceState(swapchain_data->createInfo.surface);
3765 if (surface_state) {
3766 if (surface_state->swapchain == swapchain_data) surface_state->swapchain = nullptr;
3767 }
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003768 swapchain_data->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06003769 swapchainMap.erase(swapchain);
3770 }
3771}
3772
3773void ValidationStateTracker::PostCallRecordQueuePresentKHR(VkQueue queue, const VkPresentInfoKHR *pPresentInfo, VkResult result) {
3774 // Semaphore waits occur before error generation, if the call reached the ICD. (Confirm?)
3775 for (uint32_t i = 0; i < pPresentInfo->waitSemaphoreCount; ++i) {
3776 auto pSemaphore = GetSemaphoreState(pPresentInfo->pWaitSemaphores[i]);
3777 if (pSemaphore) {
3778 pSemaphore->signaler.first = VK_NULL_HANDLE;
3779 pSemaphore->signaled = false;
3780 }
3781 }
3782
3783 for (uint32_t i = 0; i < pPresentInfo->swapchainCount; ++i) {
3784 // Note: this is imperfect, in that we can get confused about what did or didn't succeed-- but if the app does that, it's
3785 // confused itself just as much.
3786 auto local_result = pPresentInfo->pResults ? pPresentInfo->pResults[i] : result;
3787 if (local_result != VK_SUCCESS && local_result != VK_SUBOPTIMAL_KHR) continue; // this present didn't actually happen.
3788 // Mark the image as having been released to the WSI
3789 auto swapchain_data = GetSwapchainState(pPresentInfo->pSwapchains[i]);
3790 if (swapchain_data && (swapchain_data->images.size() > pPresentInfo->pImageIndices[i])) {
locke-lunargb3584732019-10-28 20:18:36 -06003791 auto image = swapchain_data->images[pPresentInfo->pImageIndices[i]].image;
locke-lunargd556cc32019-09-17 01:21:23 -06003792 auto image_state = GetImageState(image);
3793 if (image_state) {
3794 image_state->acquired = false;
Jeff Bolz46c0ea02019-10-09 13:06:29 -05003795 if (image_state->shared_presentable) {
3796 image_state->layout_locked = true;
3797 }
locke-lunargd556cc32019-09-17 01:21:23 -06003798 }
3799 }
3800 }
3801 // Note: even though presentation is directed to a queue, there is no direct ordering between QP and subsequent work, so QP (and
3802 // its semaphore waits) /never/ participate in any completion proof.
3803}
3804
3805void ValidationStateTracker::PostCallRecordCreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount,
3806 const VkSwapchainCreateInfoKHR *pCreateInfos,
3807 const VkAllocationCallbacks *pAllocator,
3808 VkSwapchainKHR *pSwapchains, VkResult result) {
3809 if (pCreateInfos) {
3810 for (uint32_t i = 0; i < swapchainCount; i++) {
3811 auto surface_state = GetSurfaceState(pCreateInfos[i].surface);
3812 auto old_swapchain_state = GetSwapchainState(pCreateInfos[i].oldSwapchain);
3813 RecordCreateSwapchainState(result, &pCreateInfos[i], &pSwapchains[i], surface_state, old_swapchain_state);
3814 }
3815 }
3816}
3817
3818void ValidationStateTracker::RecordAcquireNextImageState(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
3819 VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex) {
3820 auto pFence = GetFenceState(fence);
3821 if (pFence && pFence->scope == kSyncScopeInternal) {
3822 // Treat as inflight since it is valid to wait on this fence, even in cases where it is technically a temporary
3823 // import
3824 pFence->state = FENCE_INFLIGHT;
3825 pFence->signaler.first = VK_NULL_HANDLE; // ANI isn't on a queue, so this can't participate in a completion proof.
3826 }
3827
3828 auto pSemaphore = GetSemaphoreState(semaphore);
3829 if (pSemaphore && pSemaphore->scope == kSyncScopeInternal) {
3830 // Treat as signaled since it is valid to wait on this semaphore, even in cases where it is technically a
3831 // temporary import
3832 pSemaphore->signaled = true;
3833 pSemaphore->signaler.first = VK_NULL_HANDLE;
3834 }
3835
3836 // Mark the image as acquired.
3837 auto swapchain_data = GetSwapchainState(swapchain);
3838 if (swapchain_data && (swapchain_data->images.size() > *pImageIndex)) {
locke-lunargb3584732019-10-28 20:18:36 -06003839 auto image = swapchain_data->images[*pImageIndex].image;
locke-lunargd556cc32019-09-17 01:21:23 -06003840 auto image_state = GetImageState(image);
3841 if (image_state) {
3842 image_state->acquired = true;
3843 image_state->shared_presentable = swapchain_data->shared_presentable;
3844 }
3845 }
3846}
3847
3848void ValidationStateTracker::PostCallRecordAcquireNextImageKHR(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
3849 VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex,
3850 VkResult result) {
3851 if ((VK_SUCCESS != result) && (VK_SUBOPTIMAL_KHR != result)) return;
3852 RecordAcquireNextImageState(device, swapchain, timeout, semaphore, fence, pImageIndex);
3853}
3854
3855void ValidationStateTracker::PostCallRecordAcquireNextImage2KHR(VkDevice device, const VkAcquireNextImageInfoKHR *pAcquireInfo,
3856 uint32_t *pImageIndex, VkResult result) {
3857 if ((VK_SUCCESS != result) && (VK_SUBOPTIMAL_KHR != result)) return;
3858 RecordAcquireNextImageState(device, pAcquireInfo->swapchain, pAcquireInfo->timeout, pAcquireInfo->semaphore,
3859 pAcquireInfo->fence, pImageIndex);
3860}
3861
3862void ValidationStateTracker::PostCallRecordEnumeratePhysicalDevices(VkInstance instance, uint32_t *pPhysicalDeviceCount,
3863 VkPhysicalDevice *pPhysicalDevices, VkResult result) {
3864 if ((NULL != pPhysicalDevices) && ((result == VK_SUCCESS || result == VK_INCOMPLETE))) {
3865 for (uint32_t i = 0; i < *pPhysicalDeviceCount; i++) {
3866 auto &phys_device_state = physical_device_map[pPhysicalDevices[i]];
3867 phys_device_state.phys_device = pPhysicalDevices[i];
3868 // Init actual features for each physical device
3869 DispatchGetPhysicalDeviceFeatures(pPhysicalDevices[i], &phys_device_state.features2.features);
3870 }
3871 }
3872}
3873
3874// Common function to update state for GetPhysicalDeviceQueueFamilyProperties & 2KHR version
3875static void StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(PHYSICAL_DEVICE_STATE *pd_state, uint32_t count,
3876 VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
3877 pd_state->queue_family_known_count = std::max(pd_state->queue_family_known_count, count);
3878
3879 if (!pQueueFamilyProperties) {
3880 if (UNCALLED == pd_state->vkGetPhysicalDeviceQueueFamilyPropertiesState)
3881 pd_state->vkGetPhysicalDeviceQueueFamilyPropertiesState = QUERY_COUNT;
3882 } else { // Save queue family properties
3883 pd_state->vkGetPhysicalDeviceQueueFamilyPropertiesState = QUERY_DETAILS;
3884
3885 pd_state->queue_family_properties.resize(std::max(static_cast<uint32_t>(pd_state->queue_family_properties.size()), count));
3886 for (uint32_t i = 0; i < count; ++i) {
3887 pd_state->queue_family_properties[i] = pQueueFamilyProperties[i].queueFamilyProperties;
3888 }
3889 }
3890}
3891
3892void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice,
3893 uint32_t *pQueueFamilyPropertyCount,
3894 VkQueueFamilyProperties *pQueueFamilyProperties) {
3895 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
3896 assert(physical_device_state);
3897 VkQueueFamilyProperties2KHR *pqfp = nullptr;
3898 std::vector<VkQueueFamilyProperties2KHR> qfp;
3899 qfp.resize(*pQueueFamilyPropertyCount);
3900 if (pQueueFamilyProperties) {
3901 for (uint32_t i = 0; i < *pQueueFamilyPropertyCount; ++i) {
3902 qfp[i].sType = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2_KHR;
3903 qfp[i].pNext = nullptr;
3904 qfp[i].queueFamilyProperties = pQueueFamilyProperties[i];
3905 }
3906 pqfp = qfp.data();
3907 }
3908 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount, pqfp);
3909}
3910
3911void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties2(
3912 VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
3913 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
3914 assert(physical_device_state);
3915 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount,
3916 pQueueFamilyProperties);
3917}
3918
3919void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties2KHR(
3920 VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
3921 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
3922 assert(physical_device_state);
3923 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount,
3924 pQueueFamilyProperties);
3925}
3926void ValidationStateTracker::PreCallRecordDestroySurfaceKHR(VkInstance instance, VkSurfaceKHR surface,
3927 const VkAllocationCallbacks *pAllocator) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003928 if (!surface) return;
3929 auto surface_state = GetSurfaceState(surface);
3930 surface_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06003931 surface_map.erase(surface);
3932}
3933
3934void ValidationStateTracker::RecordVulkanSurface(VkSurfaceKHR *pSurface) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003935 surface_map[*pSurface] = std::make_shared<SURFACE_STATE>(*pSurface);
locke-lunargd556cc32019-09-17 01:21:23 -06003936}
3937
3938void ValidationStateTracker::PostCallRecordCreateDisplayPlaneSurfaceKHR(VkInstance instance,
3939 const VkDisplaySurfaceCreateInfoKHR *pCreateInfo,
3940 const VkAllocationCallbacks *pAllocator,
3941 VkSurfaceKHR *pSurface, VkResult result) {
3942 if (VK_SUCCESS != result) return;
3943 RecordVulkanSurface(pSurface);
3944}
3945
3946#ifdef VK_USE_PLATFORM_ANDROID_KHR
3947void ValidationStateTracker::PostCallRecordCreateAndroidSurfaceKHR(VkInstance instance,
3948 const VkAndroidSurfaceCreateInfoKHR *pCreateInfo,
3949 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
3950 VkResult result) {
3951 if (VK_SUCCESS != result) return;
3952 RecordVulkanSurface(pSurface);
3953}
3954#endif // VK_USE_PLATFORM_ANDROID_KHR
3955
3956#ifdef VK_USE_PLATFORM_IOS_MVK
3957void ValidationStateTracker::PostCallRecordCreateIOSSurfaceMVK(VkInstance instance, const VkIOSSurfaceCreateInfoMVK *pCreateInfo,
3958 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
3959 VkResult result) {
3960 if (VK_SUCCESS != result) return;
3961 RecordVulkanSurface(pSurface);
3962}
3963#endif // VK_USE_PLATFORM_IOS_MVK
3964
3965#ifdef VK_USE_PLATFORM_MACOS_MVK
3966void ValidationStateTracker::PostCallRecordCreateMacOSSurfaceMVK(VkInstance instance,
3967 const VkMacOSSurfaceCreateInfoMVK *pCreateInfo,
3968 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
3969 VkResult result) {
3970 if (VK_SUCCESS != result) return;
3971 RecordVulkanSurface(pSurface);
3972}
3973#endif // VK_USE_PLATFORM_MACOS_MVK
3974
Jeremy Kniagerf33a67c2019-12-09 09:44:39 -07003975#ifdef VK_USE_PLATFORM_METAL_EXT
3976void ValidationStateTracker::PostCallRecordCreateMetalSurfaceEXT(VkInstance instance,
3977 const VkMetalSurfaceCreateInfoEXT *pCreateInfo,
3978 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
3979 VkResult result) {
3980 if (VK_SUCCESS != result) return;
3981 RecordVulkanSurface(pSurface);
3982}
3983#endif // VK_USE_PLATFORM_METAL_EXT
3984
locke-lunargd556cc32019-09-17 01:21:23 -06003985#ifdef VK_USE_PLATFORM_WAYLAND_KHR
3986void ValidationStateTracker::PostCallRecordCreateWaylandSurfaceKHR(VkInstance instance,
3987 const VkWaylandSurfaceCreateInfoKHR *pCreateInfo,
3988 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
3989 VkResult result) {
3990 if (VK_SUCCESS != result) return;
3991 RecordVulkanSurface(pSurface);
3992}
3993#endif // VK_USE_PLATFORM_WAYLAND_KHR
3994
3995#ifdef VK_USE_PLATFORM_WIN32_KHR
3996void ValidationStateTracker::PostCallRecordCreateWin32SurfaceKHR(VkInstance instance,
3997 const VkWin32SurfaceCreateInfoKHR *pCreateInfo,
3998 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
3999 VkResult result) {
4000 if (VK_SUCCESS != result) return;
4001 RecordVulkanSurface(pSurface);
4002}
4003#endif // VK_USE_PLATFORM_WIN32_KHR
4004
4005#ifdef VK_USE_PLATFORM_XCB_KHR
4006void ValidationStateTracker::PostCallRecordCreateXcbSurfaceKHR(VkInstance instance, const VkXcbSurfaceCreateInfoKHR *pCreateInfo,
4007 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4008 VkResult result) {
4009 if (VK_SUCCESS != result) return;
4010 RecordVulkanSurface(pSurface);
4011}
4012#endif // VK_USE_PLATFORM_XCB_KHR
4013
4014#ifdef VK_USE_PLATFORM_XLIB_KHR
4015void ValidationStateTracker::PostCallRecordCreateXlibSurfaceKHR(VkInstance instance, const VkXlibSurfaceCreateInfoKHR *pCreateInfo,
4016 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4017 VkResult result) {
4018 if (VK_SUCCESS != result) return;
4019 RecordVulkanSurface(pSurface);
4020}
4021#endif // VK_USE_PLATFORM_XLIB_KHR
4022
Cort23cf2282019-09-20 18:58:18 +02004023void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02004024 VkPhysicalDeviceFeatures *pFeatures) {
4025 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4026 physical_device_state->vkGetPhysicalDeviceFeaturesState = QUERY_DETAILS;
4027 physical_device_state->features2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
4028 physical_device_state->features2.pNext = nullptr;
4029 physical_device_state->features2.features = *pFeatures;
Cort23cf2282019-09-20 18:58:18 +02004030}
4031
4032void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures2(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02004033 VkPhysicalDeviceFeatures2 *pFeatures) {
4034 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4035 physical_device_state->vkGetPhysicalDeviceFeaturesState = QUERY_DETAILS;
4036 physical_device_state->features2.initialize(pFeatures);
Cort23cf2282019-09-20 18:58:18 +02004037}
4038
4039void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures2KHR(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02004040 VkPhysicalDeviceFeatures2 *pFeatures) {
4041 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4042 physical_device_state->vkGetPhysicalDeviceFeaturesState = QUERY_DETAILS;
4043 physical_device_state->features2.initialize(pFeatures);
Cort23cf2282019-09-20 18:58:18 +02004044}
4045
locke-lunargd556cc32019-09-17 01:21:23 -06004046void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice physicalDevice,
4047 VkSurfaceKHR surface,
4048 VkSurfaceCapabilitiesKHR *pSurfaceCapabilities,
4049 VkResult result) {
4050 if (VK_SUCCESS != result) return;
4051 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4052 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHRState = QUERY_DETAILS;
Camden Stocker61050592019-11-27 12:03:09 -08004053 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004054 physical_device_state->surfaceCapabilities = *pSurfaceCapabilities;
4055}
4056
4057void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilities2KHR(
4058 VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
4059 VkSurfaceCapabilities2KHR *pSurfaceCapabilities, VkResult result) {
4060 if (VK_SUCCESS != result) return;
4061 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4062 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHRState = QUERY_DETAILS;
Camden Stocker61050592019-11-27 12:03:09 -08004063 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004064 physical_device_state->surfaceCapabilities = pSurfaceCapabilities->surfaceCapabilities;
4065}
4066
4067void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilities2EXT(VkPhysicalDevice physicalDevice,
4068 VkSurfaceKHR surface,
4069 VkSurfaceCapabilities2EXT *pSurfaceCapabilities,
4070 VkResult result) {
4071 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4072 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHRState = QUERY_DETAILS;
Camden Stocker61050592019-11-27 12:03:09 -08004073 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004074 physical_device_state->surfaceCapabilities.minImageCount = pSurfaceCapabilities->minImageCount;
4075 physical_device_state->surfaceCapabilities.maxImageCount = pSurfaceCapabilities->maxImageCount;
4076 physical_device_state->surfaceCapabilities.currentExtent = pSurfaceCapabilities->currentExtent;
4077 physical_device_state->surfaceCapabilities.minImageExtent = pSurfaceCapabilities->minImageExtent;
4078 physical_device_state->surfaceCapabilities.maxImageExtent = pSurfaceCapabilities->maxImageExtent;
4079 physical_device_state->surfaceCapabilities.maxImageArrayLayers = pSurfaceCapabilities->maxImageArrayLayers;
4080 physical_device_state->surfaceCapabilities.supportedTransforms = pSurfaceCapabilities->supportedTransforms;
4081 physical_device_state->surfaceCapabilities.currentTransform = pSurfaceCapabilities->currentTransform;
4082 physical_device_state->surfaceCapabilities.supportedCompositeAlpha = pSurfaceCapabilities->supportedCompositeAlpha;
4083 physical_device_state->surfaceCapabilities.supportedUsageFlags = pSurfaceCapabilities->supportedUsageFlags;
4084}
4085
4086void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice,
4087 uint32_t queueFamilyIndex, VkSurfaceKHR surface,
4088 VkBool32 *pSupported, VkResult result) {
4089 if (VK_SUCCESS != result) return;
4090 auto surface_state = GetSurfaceState(surface);
4091 surface_state->gpu_queue_support[{physicalDevice, queueFamilyIndex}] = (*pSupported == VK_TRUE);
4092}
4093
4094void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice physicalDevice,
4095 VkSurfaceKHR surface,
4096 uint32_t *pPresentModeCount,
4097 VkPresentModeKHR *pPresentModes,
4098 VkResult result) {
4099 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4100
4101 // TODO: This isn't quite right -- available modes may differ by surface AND physical device.
4102 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4103 auto &call_state = physical_device_state->vkGetPhysicalDeviceSurfacePresentModesKHRState;
4104
4105 if (*pPresentModeCount) {
4106 if (call_state < QUERY_COUNT) call_state = QUERY_COUNT;
4107 if (*pPresentModeCount > physical_device_state->present_modes.size())
4108 physical_device_state->present_modes.resize(*pPresentModeCount);
4109 }
4110 if (pPresentModes) {
4111 if (call_state < QUERY_DETAILS) call_state = QUERY_DETAILS;
4112 for (uint32_t i = 0; i < *pPresentModeCount; i++) {
4113 physical_device_state->present_modes[i] = pPresentModes[i];
4114 }
4115 }
4116}
4117
4118void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
4119 uint32_t *pSurfaceFormatCount,
4120 VkSurfaceFormatKHR *pSurfaceFormats,
4121 VkResult result) {
4122 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4123
4124 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4125 auto &call_state = physical_device_state->vkGetPhysicalDeviceSurfaceFormatsKHRState;
4126
4127 if (*pSurfaceFormatCount) {
4128 if (call_state < QUERY_COUNT) call_state = QUERY_COUNT;
4129 if (*pSurfaceFormatCount > physical_device_state->surface_formats.size())
4130 physical_device_state->surface_formats.resize(*pSurfaceFormatCount);
4131 }
4132 if (pSurfaceFormats) {
4133 if (call_state < QUERY_DETAILS) call_state = QUERY_DETAILS;
4134 for (uint32_t i = 0; i < *pSurfaceFormatCount; i++) {
4135 physical_device_state->surface_formats[i] = pSurfaceFormats[i];
4136 }
4137 }
4138}
4139
4140void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceFormats2KHR(VkPhysicalDevice physicalDevice,
4141 const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
4142 uint32_t *pSurfaceFormatCount,
4143 VkSurfaceFormat2KHR *pSurfaceFormats,
4144 VkResult result) {
4145 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4146
4147 auto physicalDeviceState = GetPhysicalDeviceState(physicalDevice);
4148 if (*pSurfaceFormatCount) {
4149 if (physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState < QUERY_COUNT) {
4150 physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState = QUERY_COUNT;
4151 }
4152 if (*pSurfaceFormatCount > physicalDeviceState->surface_formats.size())
4153 physicalDeviceState->surface_formats.resize(*pSurfaceFormatCount);
4154 }
4155 if (pSurfaceFormats) {
4156 if (physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState < QUERY_DETAILS) {
4157 physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState = QUERY_DETAILS;
4158 }
4159 for (uint32_t i = 0; i < *pSurfaceFormatCount; i++) {
4160 physicalDeviceState->surface_formats[i] = pSurfaceFormats[i].surfaceFormat;
4161 }
4162 }
4163}
4164
4165void ValidationStateTracker::PreCallRecordCmdBeginDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,
4166 const VkDebugUtilsLabelEXT *pLabelInfo) {
4167 BeginCmdDebugUtilsLabel(report_data, commandBuffer, pLabelInfo);
4168}
4169
4170void ValidationStateTracker::PostCallRecordCmdEndDebugUtilsLabelEXT(VkCommandBuffer commandBuffer) {
4171 EndCmdDebugUtilsLabel(report_data, commandBuffer);
4172}
4173
4174void ValidationStateTracker::PreCallRecordCmdInsertDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,
4175 const VkDebugUtilsLabelEXT *pLabelInfo) {
4176 InsertCmdDebugUtilsLabel(report_data, commandBuffer, pLabelInfo);
4177
4178 // Squirrel away an easily accessible copy.
4179 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4180 cb_state->debug_label = LoggingLabel(pLabelInfo);
4181}
4182
4183void ValidationStateTracker::RecordEnumeratePhysicalDeviceGroupsState(
4184 uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHR *pPhysicalDeviceGroupProperties) {
4185 if (NULL != pPhysicalDeviceGroupProperties) {
4186 for (uint32_t i = 0; i < *pPhysicalDeviceGroupCount; i++) {
4187 for (uint32_t j = 0; j < pPhysicalDeviceGroupProperties[i].physicalDeviceCount; j++) {
4188 VkPhysicalDevice cur_phys_dev = pPhysicalDeviceGroupProperties[i].physicalDevices[j];
4189 auto &phys_device_state = physical_device_map[cur_phys_dev];
4190 phys_device_state.phys_device = cur_phys_dev;
4191 // Init actual features for each physical device
4192 DispatchGetPhysicalDeviceFeatures(cur_phys_dev, &phys_device_state.features2.features);
4193 }
4194 }
4195 }
4196}
4197
4198void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceGroups(
4199 VkInstance instance, uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHR *pPhysicalDeviceGroupProperties,
4200 VkResult result) {
4201 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4202 RecordEnumeratePhysicalDeviceGroupsState(pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
4203}
4204
4205void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceGroupsKHR(
4206 VkInstance instance, uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHR *pPhysicalDeviceGroupProperties,
4207 VkResult result) {
4208 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4209 RecordEnumeratePhysicalDeviceGroupsState(pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
4210}
4211
Lionel Landwerlinc7420912019-05-23 00:33:42 +01004212void ValidationStateTracker::RecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCounters(VkPhysicalDevice physicalDevice,
4213 uint32_t queueFamilyIndex,
4214 uint32_t *pCounterCount,
4215 VkPerformanceCounterKHR *pCounters) {
4216 if (NULL == pCounters) return;
4217
4218 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4219 assert(physical_device_state);
4220
4221 std::unique_ptr<QUEUE_FAMILY_PERF_COUNTERS> queueFamilyCounters(new QUEUE_FAMILY_PERF_COUNTERS());
4222 queueFamilyCounters->counters.resize(*pCounterCount);
4223 for (uint32_t i = 0; i < *pCounterCount; i++) queueFamilyCounters->counters[i] = pCounters[i];
4224
4225 physical_device_state->perf_counters[queueFamilyIndex] = std::move(queueFamilyCounters);
4226}
4227
4228void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
4229 VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, uint32_t *pCounterCount, VkPerformanceCounterKHR *pCounters,
4230 VkPerformanceCounterDescriptionKHR *pCounterDescriptions, VkResult result) {
4231 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4232 RecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCounters(physicalDevice, queueFamilyIndex, pCounterCount, pCounters);
4233}
4234
4235void ValidationStateTracker::PostCallRecordAcquireProfilingLockKHR(VkDevice device, const VkAcquireProfilingLockInfoKHR *pInfo,
4236 VkResult result) {
4237 if (result == VK_SUCCESS) performance_lock_acquired = true;
4238}
4239
4240bool ValidationStateTracker::PreCallValidateReleaseProfilingLockKHR(VkDevice device) const {
4241 bool skip = false;
4242
4243 if (!performance_lock_acquired) {
4244 skip |= log_msg(
4245 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
4246 "VUID-vkReleaseProfilingLockKHR-device-03235",
4247 "The profiling lock of device must have been held via a previous successful call to vkAcquireProfilingLockKHR.");
4248 }
4249
4250 return skip;
4251}
4252
4253void ValidationStateTracker::PostCallRecordReleaseProfilingLockKHR(VkDevice device) {
4254 performance_lock_acquired = false;
4255 for (auto &cmd_buffer : commandBufferMap) {
4256 cmd_buffer.second->performance_lock_released = true;
4257 }
4258}
4259
locke-lunargd556cc32019-09-17 01:21:23 -06004260void ValidationStateTracker::PreCallRecordDestroyDescriptorUpdateTemplate(VkDevice device,
4261 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
4262 const VkAllocationCallbacks *pAllocator) {
4263 if (!descriptorUpdateTemplate) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004264 auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
4265 template_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004266 desc_template_map.erase(descriptorUpdateTemplate);
4267}
4268
4269void ValidationStateTracker::PreCallRecordDestroyDescriptorUpdateTemplateKHR(VkDevice device,
4270 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
4271 const VkAllocationCallbacks *pAllocator) {
4272 if (!descriptorUpdateTemplate) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004273 auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
4274 template_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004275 desc_template_map.erase(descriptorUpdateTemplate);
4276}
4277
4278void ValidationStateTracker::RecordCreateDescriptorUpdateTemplateState(const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo,
4279 VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate) {
4280 safe_VkDescriptorUpdateTemplateCreateInfo local_create_info(pCreateInfo);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004281 auto template_state = std::make_shared<TEMPLATE_STATE>(*pDescriptorUpdateTemplate, &local_create_info);
locke-lunargd556cc32019-09-17 01:21:23 -06004282 desc_template_map[*pDescriptorUpdateTemplate] = std::move(template_state);
4283}
4284
4285void ValidationStateTracker::PostCallRecordCreateDescriptorUpdateTemplate(
4286 VkDevice device, const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator,
4287 VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate, VkResult result) {
4288 if (VK_SUCCESS != result) return;
4289 RecordCreateDescriptorUpdateTemplateState(pCreateInfo, pDescriptorUpdateTemplate);
4290}
4291
4292void ValidationStateTracker::PostCallRecordCreateDescriptorUpdateTemplateKHR(
4293 VkDevice device, const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator,
4294 VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate, VkResult result) {
4295 if (VK_SUCCESS != result) return;
4296 RecordCreateDescriptorUpdateTemplateState(pCreateInfo, pDescriptorUpdateTemplate);
4297}
4298
4299void ValidationStateTracker::RecordUpdateDescriptorSetWithTemplateState(VkDescriptorSet descriptorSet,
4300 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
4301 const void *pData) {
4302 auto const template_map_entry = desc_template_map.find(descriptorUpdateTemplate);
4303 if ((template_map_entry == desc_template_map.end()) || (template_map_entry->second.get() == nullptr)) {
4304 assert(0);
4305 } else {
4306 const TEMPLATE_STATE *template_state = template_map_entry->second.get();
4307 // TODO: Record template push descriptor updates
4308 if (template_state->create_info.templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET) {
4309 PerformUpdateDescriptorSetsWithTemplateKHR(descriptorSet, template_state, pData);
4310 }
4311 }
4312}
4313
4314void ValidationStateTracker::PreCallRecordUpdateDescriptorSetWithTemplate(VkDevice device, VkDescriptorSet descriptorSet,
4315 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
4316 const void *pData) {
4317 RecordUpdateDescriptorSetWithTemplateState(descriptorSet, descriptorUpdateTemplate, pData);
4318}
4319
4320void ValidationStateTracker::PreCallRecordUpdateDescriptorSetWithTemplateKHR(VkDevice device, VkDescriptorSet descriptorSet,
4321 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
4322 const void *pData) {
4323 RecordUpdateDescriptorSetWithTemplateState(descriptorSet, descriptorUpdateTemplate, pData);
4324}
4325
4326void ValidationStateTracker::PreCallRecordCmdPushDescriptorSetWithTemplateKHR(
4327 VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set,
4328 const void *pData) {
4329 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4330
4331 const auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
4332 if (template_state) {
4333 auto layout_data = GetPipelineLayout(layout);
4334 auto dsl = GetDslFromPipelineLayout(layout_data, set);
4335 const auto &template_ci = template_state->create_info;
Jeff Bolz6ae39612019-10-11 20:57:36 -05004336 if (dsl && !dsl->destroyed) {
locke-lunargd556cc32019-09-17 01:21:23 -06004337 // Decode the template into a set of write updates
4338 cvdescriptorset::DecodedTemplateUpdate decoded_template(this, VK_NULL_HANDLE, template_state, pData,
4339 dsl->GetDescriptorSetLayout());
4340 RecordCmdPushDescriptorSetState(cb_state, template_ci.pipelineBindPoint, layout, set,
4341 static_cast<uint32_t>(decoded_template.desc_writes.size()),
4342 decoded_template.desc_writes.data());
4343 }
4344 }
4345}
4346
4347void ValidationStateTracker::RecordGetPhysicalDeviceDisplayPlanePropertiesState(VkPhysicalDevice physicalDevice,
4348 uint32_t *pPropertyCount, void *pProperties) {
4349 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4350 if (*pPropertyCount) {
4351 if (physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState < QUERY_COUNT) {
4352 physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState = QUERY_COUNT;
Camden Stocker61050592019-11-27 12:03:09 -08004353 physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004354 }
4355 physical_device_state->display_plane_property_count = *pPropertyCount;
4356 }
4357 if (pProperties) {
4358 if (physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState < QUERY_DETAILS) {
4359 physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState = QUERY_DETAILS;
Camden Stocker61050592019-11-27 12:03:09 -08004360 physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004361 }
4362 }
4363}
4364
4365void ValidationStateTracker::PostCallRecordGetPhysicalDeviceDisplayPlanePropertiesKHR(VkPhysicalDevice physicalDevice,
4366 uint32_t *pPropertyCount,
4367 VkDisplayPlanePropertiesKHR *pProperties,
4368 VkResult result) {
4369 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4370 RecordGetPhysicalDeviceDisplayPlanePropertiesState(physicalDevice, pPropertyCount, pProperties);
4371}
4372
4373void ValidationStateTracker::PostCallRecordGetPhysicalDeviceDisplayPlaneProperties2KHR(VkPhysicalDevice physicalDevice,
4374 uint32_t *pPropertyCount,
4375 VkDisplayPlaneProperties2KHR *pProperties,
4376 VkResult result) {
4377 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4378 RecordGetPhysicalDeviceDisplayPlanePropertiesState(physicalDevice, pPropertyCount, pProperties);
4379}
4380
4381void ValidationStateTracker::PostCallRecordCmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
4382 uint32_t query, VkQueryControlFlags flags, uint32_t index) {
4383 QueryObject query_obj = {queryPool, query, index};
4384 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4385 RecordCmdBeginQuery(cb_state, query_obj);
4386}
4387
4388void ValidationStateTracker::PostCallRecordCmdEndQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
4389 uint32_t query, uint32_t index) {
4390 QueryObject query_obj = {queryPool, query, index};
4391 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4392 RecordCmdEndQuery(cb_state, query_obj);
4393}
4394
4395void ValidationStateTracker::RecordCreateSamplerYcbcrConversionState(const VkSamplerYcbcrConversionCreateInfo *create_info,
4396 VkSamplerYcbcrConversion ycbcr_conversion) {
4397 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
4398 RecordCreateSamplerYcbcrConversionANDROID(create_info, ycbcr_conversion);
4399 }
4400}
4401
4402void ValidationStateTracker::PostCallRecordCreateSamplerYcbcrConversion(VkDevice device,
4403 const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
4404 const VkAllocationCallbacks *pAllocator,
4405 VkSamplerYcbcrConversion *pYcbcrConversion,
4406 VkResult result) {
4407 if (VK_SUCCESS != result) return;
4408 RecordCreateSamplerYcbcrConversionState(pCreateInfo, *pYcbcrConversion);
4409}
4410
4411void ValidationStateTracker::PostCallRecordCreateSamplerYcbcrConversionKHR(VkDevice device,
4412 const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
4413 const VkAllocationCallbacks *pAllocator,
4414 VkSamplerYcbcrConversion *pYcbcrConversion,
4415 VkResult result) {
4416 if (VK_SUCCESS != result) return;
4417 RecordCreateSamplerYcbcrConversionState(pCreateInfo, *pYcbcrConversion);
4418}
4419
4420void ValidationStateTracker::PostCallRecordDestroySamplerYcbcrConversion(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion,
4421 const VkAllocationCallbacks *pAllocator) {
4422 if (!ycbcrConversion) return;
4423 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
4424 RecordDestroySamplerYcbcrConversionANDROID(ycbcrConversion);
4425 }
4426}
4427
4428void ValidationStateTracker::PostCallRecordDestroySamplerYcbcrConversionKHR(VkDevice device,
4429 VkSamplerYcbcrConversion ycbcrConversion,
4430 const VkAllocationCallbacks *pAllocator) {
4431 if (!ycbcrConversion) return;
4432 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
4433 RecordDestroySamplerYcbcrConversionANDROID(ycbcrConversion);
4434 }
4435}
4436
Tony-LunarG977448c2019-12-02 14:52:02 -07004437void ValidationStateTracker::RecordResetQueryPool(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
4438 uint32_t queryCount) {
locke-lunargd556cc32019-09-17 01:21:23 -06004439 // Do nothing if the feature is not enabled.
4440 if (!enabled_features.host_query_reset_features.hostQueryReset) return;
4441
4442 // Do nothing if the query pool has been destroyed.
4443 auto query_pool_state = GetQueryPoolState(queryPool);
4444 if (!query_pool_state) return;
4445
4446 // Reset the state of existing entries.
4447 QueryObject query_obj{queryPool, 0};
Lionel Landwerlinc7420912019-05-23 00:33:42 +01004448 QueryObjectPass query_pass_obj{query_obj, 0};
locke-lunargd556cc32019-09-17 01:21:23 -06004449 const uint32_t max_query_count = std::min(queryCount, query_pool_state->createInfo.queryCount - firstQuery);
4450 for (uint32_t i = 0; i < max_query_count; ++i) {
4451 query_obj.query = firstQuery + i;
4452 auto query_it = queryToStateMap.find(query_obj);
4453 if (query_it != queryToStateMap.end()) query_it->second = QUERYSTATE_RESET;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01004454 if (query_pool_state->createInfo.queryType == VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR) {
4455 for (uint32_t passIndex = 0; passIndex < query_pool_state->n_performance_passes; passIndex++) {
4456 query_pass_obj.perf_pass = passIndex;
4457 auto query_perf_it = queryPassToStateMap.find(query_pass_obj);
4458 if (query_perf_it != queryPassToStateMap.end()) query_perf_it->second = QUERYSTATE_RESET;
4459 }
4460 }
locke-lunargd556cc32019-09-17 01:21:23 -06004461 }
4462}
4463
Tony-LunarG977448c2019-12-02 14:52:02 -07004464void ValidationStateTracker::PostCallRecordResetQueryPoolEXT(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
4465 uint32_t queryCount) {
4466 RecordResetQueryPool(device, queryPool, firstQuery, queryCount);
4467}
4468
4469void ValidationStateTracker::PostCallRecordResetQueryPool(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
4470 uint32_t queryCount) {
4471 RecordResetQueryPool(device, queryPool, firstQuery, queryCount);
4472}
4473
locke-lunargd556cc32019-09-17 01:21:23 -06004474void ValidationStateTracker::PerformUpdateDescriptorSetsWithTemplateKHR(VkDescriptorSet descriptorSet,
4475 const TEMPLATE_STATE *template_state, const void *pData) {
4476 // Translate the templated update into a normal update for validation...
4477 cvdescriptorset::DecodedTemplateUpdate decoded_update(this, descriptorSet, template_state, pData);
4478 cvdescriptorset::PerformUpdateDescriptorSets(this, static_cast<uint32_t>(decoded_update.desc_writes.size()),
4479 decoded_update.desc_writes.data(), 0, NULL);
4480}
4481
4482// Update the common AllocateDescriptorSetsData
4483void ValidationStateTracker::UpdateAllocateDescriptorSetsData(const VkDescriptorSetAllocateInfo *p_alloc_info,
Jeff Bolz46c0ea02019-10-09 13:06:29 -05004484 cvdescriptorset::AllocateDescriptorSetsData *ds_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06004485 for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
Jeff Bolz6ae39612019-10-11 20:57:36 -05004486 auto layout = GetDescriptorSetLayoutShared(p_alloc_info->pSetLayouts[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06004487 if (layout) {
4488 ds_data->layout_nodes[i] = layout;
4489 // Count total descriptors required per type
4490 for (uint32_t j = 0; j < layout->GetBindingCount(); ++j) {
4491 const auto &binding_layout = layout->GetDescriptorSetLayoutBindingPtrFromIndex(j);
4492 uint32_t typeIndex = static_cast<uint32_t>(binding_layout->descriptorType);
4493 ds_data->required_descriptors_by_type[typeIndex] += binding_layout->descriptorCount;
4494 }
4495 }
4496 // Any unknown layouts will be flagged as errors during ValidateAllocateDescriptorSets() call
4497 }
4498}
4499
4500// Decrement allocated sets from the pool and insert new sets into set_map
4501void ValidationStateTracker::PerformAllocateDescriptorSets(const VkDescriptorSetAllocateInfo *p_alloc_info,
4502 const VkDescriptorSet *descriptor_sets,
4503 const cvdescriptorset::AllocateDescriptorSetsData *ds_data) {
4504 auto pool_state = descriptorPoolMap[p_alloc_info->descriptorPool].get();
4505 // Account for sets and individual descriptors allocated from pool
4506 pool_state->availableSets -= p_alloc_info->descriptorSetCount;
4507 for (auto it = ds_data->required_descriptors_by_type.begin(); it != ds_data->required_descriptors_by_type.end(); ++it) {
4508 pool_state->availableDescriptorTypeCount[it->first] -= ds_data->required_descriptors_by_type.at(it->first);
4509 }
4510
4511 const auto *variable_count_info = lvl_find_in_chain<VkDescriptorSetVariableDescriptorCountAllocateInfoEXT>(p_alloc_info->pNext);
4512 bool variable_count_valid = variable_count_info && variable_count_info->descriptorSetCount == p_alloc_info->descriptorSetCount;
4513
4514 // Create tracking object for each descriptor set; insert into global map and the pool's set.
4515 for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
4516 uint32_t variable_count = variable_count_valid ? variable_count_info->pDescriptorCounts[i] : 0;
4517
Jeff Bolz41a1ced2019-10-11 11:40:49 -05004518 auto new_ds = std::make_shared<cvdescriptorset::DescriptorSet>(descriptor_sets[i], pool_state, ds_data->layout_nodes[i],
John Zulaufd2c3dae2019-12-12 11:02:17 -07004519 variable_count, this);
locke-lunargd556cc32019-09-17 01:21:23 -06004520 pool_state->sets.insert(new_ds.get());
4521 new_ds->in_use.store(0);
4522 setMap[descriptor_sets[i]] = std::move(new_ds);
4523 }
4524}
4525
4526// Generic function to handle state update for all CmdDraw* and CmdDispatch* type functions
4527void ValidationStateTracker::UpdateStateCmdDrawDispatchType(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint bind_point) {
4528 UpdateDrawState(cb_state, bind_point);
4529 cb_state->hasDispatchCmd = true;
4530}
4531
locke-lunargd556cc32019-09-17 01:21:23 -06004532// Generic function to handle state update for all CmdDraw* type functions
4533void ValidationStateTracker::UpdateStateCmdDrawType(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint bind_point) {
4534 UpdateStateCmdDrawDispatchType(cb_state, bind_point);
locke-lunargd556cc32019-09-17 01:21:23 -06004535 cb_state->hasDrawCmd = true;
4536}
4537
4538void ValidationStateTracker::PostCallRecordCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount,
4539 uint32_t firstVertex, uint32_t firstInstance) {
4540 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4541 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4542}
4543
4544void ValidationStateTracker::PostCallRecordCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount,
4545 uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset,
4546 uint32_t firstInstance) {
4547 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4548 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4549}
4550
4551void ValidationStateTracker::PostCallRecordCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
4552 uint32_t count, uint32_t stride) {
4553 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4554 BUFFER_STATE *buffer_state = GetBufferState(buffer);
4555 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4556 AddCommandBufferBindingBuffer(cb_state, buffer_state);
4557}
4558
4559void ValidationStateTracker::PostCallRecordCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
4560 VkDeviceSize offset, uint32_t count, uint32_t stride) {
4561 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4562 BUFFER_STATE *buffer_state = GetBufferState(buffer);
4563 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4564 AddCommandBufferBindingBuffer(cb_state, buffer_state);
4565}
4566
4567void ValidationStateTracker::PostCallRecordCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z) {
4568 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4569 UpdateStateCmdDrawDispatchType(cb_state, VK_PIPELINE_BIND_POINT_COMPUTE);
4570}
4571
4572void ValidationStateTracker::PostCallRecordCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
4573 VkDeviceSize offset) {
4574 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4575 UpdateStateCmdDrawDispatchType(cb_state, VK_PIPELINE_BIND_POINT_COMPUTE);
4576 BUFFER_STATE *buffer_state = GetBufferState(buffer);
4577 AddCommandBufferBindingBuffer(cb_state, buffer_state);
4578}
4579
Tony-LunarG977448c2019-12-02 14:52:02 -07004580void ValidationStateTracker::RecordCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
4581 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
4582 uint32_t stride) {
4583 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4584 BUFFER_STATE *buffer_state = GetBufferState(buffer);
4585 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
4586 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4587 AddCommandBufferBindingBuffer(cb_state, buffer_state);
4588 AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
4589}
4590
locke-lunargd556cc32019-09-17 01:21:23 -06004591void ValidationStateTracker::PreCallRecordCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer,
4592 VkDeviceSize offset, VkBuffer countBuffer,
4593 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
4594 uint32_t stride) {
Tony-LunarG977448c2019-12-02 14:52:02 -07004595 RecordCmdDrawIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
4596}
4597
4598void ValidationStateTracker::PreCallRecordCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
4599 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
4600 uint32_t maxDrawCount, uint32_t stride) {
4601 RecordCmdDrawIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
4602}
4603
4604void ValidationStateTracker::RecordCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
4605 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
4606 uint32_t maxDrawCount, uint32_t stride) {
locke-lunargd556cc32019-09-17 01:21:23 -06004607 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4608 BUFFER_STATE *buffer_state = GetBufferState(buffer);
4609 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
4610 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4611 AddCommandBufferBindingBuffer(cb_state, buffer_state);
4612 AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
4613}
4614
4615void ValidationStateTracker::PreCallRecordCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer,
4616 VkDeviceSize offset, VkBuffer countBuffer,
4617 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
4618 uint32_t stride) {
Tony-LunarG977448c2019-12-02 14:52:02 -07004619 RecordCmdDrawIndexedIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
4620}
4621
4622void ValidationStateTracker::PreCallRecordCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer,
4623 VkDeviceSize offset, VkBuffer countBuffer,
4624 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
4625 uint32_t stride) {
4626 RecordCmdDrawIndexedIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
locke-lunargd556cc32019-09-17 01:21:23 -06004627}
4628
4629void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksNV(VkCommandBuffer commandBuffer, uint32_t taskCount,
4630 uint32_t firstTask) {
4631 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4632 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4633}
4634
4635void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksIndirectNV(VkCommandBuffer commandBuffer, VkBuffer buffer,
4636 VkDeviceSize offset, uint32_t drawCount, uint32_t stride) {
4637 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4638 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4639 BUFFER_STATE *buffer_state = GetBufferState(buffer);
4640 if (buffer_state) {
4641 AddCommandBufferBindingBuffer(cb_state, buffer_state);
4642 }
4643}
4644
4645void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksIndirectCountNV(VkCommandBuffer commandBuffer, VkBuffer buffer,
4646 VkDeviceSize offset, VkBuffer countBuffer,
4647 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
4648 uint32_t stride) {
4649 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4650 BUFFER_STATE *buffer_state = GetBufferState(buffer);
4651 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
4652 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4653 if (buffer_state) {
4654 AddCommandBufferBindingBuffer(cb_state, buffer_state);
4655 }
4656 if (count_buffer_state) {
4657 AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
4658 }
4659}
4660
4661void ValidationStateTracker::PostCallRecordCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo *pCreateInfo,
4662 const VkAllocationCallbacks *pAllocator,
4663 VkShaderModule *pShaderModule, VkResult result,
4664 void *csm_state_data) {
4665 if (VK_SUCCESS != result) return;
4666 create_shader_module_api_state *csm_state = reinterpret_cast<create_shader_module_api_state *>(csm_state_data);
4667
4668 spv_target_env spirv_environment = ((api_version >= VK_API_VERSION_1_1) ? SPV_ENV_VULKAN_1_1 : SPV_ENV_VULKAN_1_0);
4669 bool is_spirv = (pCreateInfo->pCode[0] == spv::MagicNumber);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004670 auto new_shader_module = is_spirv ? std::make_shared<SHADER_MODULE_STATE>(pCreateInfo, *pShaderModule, spirv_environment,
4671 csm_state->unique_shader_id)
4672 : std::make_shared<SHADER_MODULE_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06004673 shaderModuleMap[*pShaderModule] = std::move(new_shader_module);
4674}
4675
4676void ValidationStateTracker::RecordPipelineShaderStage(VkPipelineShaderStageCreateInfo const *pStage, PIPELINE_STATE *pipeline,
Jeff Bolz46c0ea02019-10-09 13:06:29 -05004677 PIPELINE_STATE::StageState *stage_state) const {
locke-lunargd556cc32019-09-17 01:21:23 -06004678 // Validation shouldn't rely on anything in stage state being valid if the spirv isn't
4679 auto module = GetShaderModuleState(pStage->module);
4680 if (!module->has_valid_spirv) return;
4681
4682 // Validation shouldn't rely on anything in stage state being valid if the entrypoint isn't present
4683 auto entrypoint = FindEntrypoint(module, pStage->pName, pStage->stage);
4684 if (entrypoint == module->end()) return;
4685
4686 // Mark accessible ids
4687 stage_state->accessible_ids = MarkAccessibleIds(module, entrypoint);
4688 ProcessExecutionModes(module, entrypoint, pipeline);
4689
4690 stage_state->descriptor_uses =
4691 CollectInterfaceByDescriptorSlot(report_data, module, stage_state->accessible_ids, &stage_state->has_writable_descriptor);
4692 // Capture descriptor uses for the pipeline
4693 for (auto use : stage_state->descriptor_uses) {
4694 // While validating shaders capture which slots are used by the pipeline
John Zulauf649edd52019-10-02 14:39:41 -06004695 const uint32_t slot = use.first.first;
4696 auto &reqs = pipeline->active_slots[slot][use.first.second];
locke-lunargd556cc32019-09-17 01:21:23 -06004697 reqs = descriptor_req(reqs | DescriptorTypeToReqs(module, use.second.type_id));
John Zulauf649edd52019-10-02 14:39:41 -06004698 pipeline->max_active_slot = std::max(pipeline->max_active_slot, slot);
locke-lunargd556cc32019-09-17 01:21:23 -06004699 }
4700}
4701
4702void ValidationStateTracker::ResetCommandBufferPushConstantDataIfIncompatible(CMD_BUFFER_STATE *cb_state, VkPipelineLayout layout) {
4703 if (cb_state == nullptr) {
4704 return;
4705 }
4706
4707 const PIPELINE_LAYOUT_STATE *pipeline_layout_state = GetPipelineLayout(layout);
4708 if (pipeline_layout_state == nullptr) {
4709 return;
4710 }
4711
4712 if (cb_state->push_constant_data_ranges != pipeline_layout_state->push_constant_ranges) {
4713 cb_state->push_constant_data_ranges = pipeline_layout_state->push_constant_ranges;
4714 cb_state->push_constant_data.clear();
4715 uint32_t size_needed = 0;
4716 for (auto push_constant_range : *cb_state->push_constant_data_ranges) {
4717 size_needed = std::max(size_needed, (push_constant_range.offset + push_constant_range.size));
4718 }
4719 cb_state->push_constant_data.resize(size_needed, 0);
4720 }
4721}
John Zulauf22b0fbe2019-10-15 06:26:16 -06004722
4723void ValidationStateTracker::PostCallRecordGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain,
4724 uint32_t *pSwapchainImageCount, VkImage *pSwapchainImages,
4725 VkResult result) {
4726 if ((result != VK_SUCCESS) && (result != VK_INCOMPLETE)) return;
4727 auto swapchain_state = GetSwapchainState(swapchain);
4728
4729 if (*pSwapchainImageCount > swapchain_state->images.size()) swapchain_state->images.resize(*pSwapchainImageCount);
4730
4731 if (pSwapchainImages) {
4732 if (swapchain_state->vkGetSwapchainImagesKHRState < QUERY_DETAILS) {
4733 swapchain_state->vkGetSwapchainImagesKHRState = QUERY_DETAILS;
4734 }
4735 for (uint32_t i = 0; i < *pSwapchainImageCount; ++i) {
locke-lunargb3584732019-10-28 20:18:36 -06004736 if (swapchain_state->images[i].image != VK_NULL_HANDLE) continue; // Already retrieved this.
John Zulauf22b0fbe2019-10-15 06:26:16 -06004737
4738 // Add imageMap entries for each swapchain image
4739 VkImageCreateInfo image_ci;
4740 image_ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
4741 image_ci.pNext = nullptr; // to be set later
4742 image_ci.flags = VK_IMAGE_CREATE_ALIAS_BIT; // to be updated below
4743 image_ci.imageType = VK_IMAGE_TYPE_2D;
4744 image_ci.format = swapchain_state->createInfo.imageFormat;
4745 image_ci.extent.width = swapchain_state->createInfo.imageExtent.width;
4746 image_ci.extent.height = swapchain_state->createInfo.imageExtent.height;
4747 image_ci.extent.depth = 1;
4748 image_ci.mipLevels = 1;
4749 image_ci.arrayLayers = swapchain_state->createInfo.imageArrayLayers;
4750 image_ci.samples = VK_SAMPLE_COUNT_1_BIT;
4751 image_ci.tiling = VK_IMAGE_TILING_OPTIMAL;
4752 image_ci.usage = swapchain_state->createInfo.imageUsage;
4753 image_ci.sharingMode = swapchain_state->createInfo.imageSharingMode;
4754 image_ci.queueFamilyIndexCount = swapchain_state->createInfo.queueFamilyIndexCount;
4755 image_ci.pQueueFamilyIndices = swapchain_state->createInfo.pQueueFamilyIndices;
4756 image_ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
4757
4758 image_ci.pNext = lvl_find_in_chain<VkImageFormatListCreateInfoKHR>(swapchain_state->createInfo.pNext);
4759
4760 if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR)
4761 image_ci.flags |= VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT;
4762 if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR)
4763 image_ci.flags |= VK_IMAGE_CREATE_PROTECTED_BIT;
4764 if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR)
4765 image_ci.flags |= (VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT | VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR);
4766
4767 imageMap[pSwapchainImages[i]] = std::make_shared<IMAGE_STATE>(pSwapchainImages[i], &image_ci);
4768 auto &image_state = imageMap[pSwapchainImages[i]];
4769 image_state->valid = false;
4770 image_state->create_from_swapchain = swapchain;
4771 image_state->bind_swapchain = swapchain;
4772 image_state->bind_swapchain_imageIndex = i;
locke-lunargb3584732019-10-28 20:18:36 -06004773 swapchain_state->images[i].image = pSwapchainImages[i];
4774 swapchain_state->images[i].bound_images.emplace(pSwapchainImages[i]);
John Zulauf22b0fbe2019-10-15 06:26:16 -06004775 }
4776 }
4777
4778 if (*pSwapchainImageCount) {
4779 if (swapchain_state->vkGetSwapchainImagesKHRState < QUERY_COUNT) {
4780 swapchain_state->vkGetSwapchainImagesKHRState = QUERY_COUNT;
4781 }
4782 swapchain_state->get_swapchain_image_count = *pSwapchainImageCount;
4783 }
4784}