blob: 32dee0b8196c1589b208e01c991b81af4c15fec0 [file] [log] [blame]
locke-lunargd556cc32019-09-17 01:21:23 -06001/* Copyright (c) 2015-2019 The Khronos Group Inc.
2 * Copyright (c) 2015-2019 Valve Corporation
3 * Copyright (c) 2015-2019 LunarG, Inc.
4 * Copyright (C) 2015-2019 Google Inc.
5 *
6 * Licensed under the Apache License, Version 2.0 (the "License");
7 * you may not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
9 *
10 * http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 *
18 * Author: Mark Lobodzinski <mark@lunarg.com>
19 * Author: Dave Houlton <daveh@lunarg.com>
20 * Shannon McPherson <shannon@lunarg.com>
21 */
22
23// Allow use of STL min and max functions in Windows
24#define NOMINMAX
25
26#include <cmath>
27#include <set>
28#include <sstream>
29#include <string>
30
31#include "vk_enum_string_helper.h"
32#include "vk_format_utils.h"
33#include "vk_layer_data.h"
34#include "vk_layer_utils.h"
35#include "vk_layer_logging.h"
36#include "vk_typemap_helper.h"
37
38#include "chassis.h"
39#include "state_tracker.h"
40#include "shader_validation.h"
41
42using std::max;
43using std::string;
44using std::stringstream;
45using std::unique_ptr;
46using std::unordered_map;
47using std::unordered_set;
48using std::vector;
49
50#ifdef VK_USE_PLATFORM_ANDROID_KHR
51// Android-specific validation that uses types defined only with VK_USE_PLATFORM_ANDROID_KHR
52// This could also move into a seperate core_validation_android.cpp file... ?
53
54void ValidationStateTracker::RecordCreateImageANDROID(const VkImageCreateInfo *create_info, IMAGE_STATE *is_node) {
55 const VkExternalMemoryImageCreateInfo *emici = lvl_find_in_chain<VkExternalMemoryImageCreateInfo>(create_info->pNext);
56 if (emici && (emici->handleTypes & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID)) {
57 is_node->imported_ahb = true;
58 }
59 const VkExternalFormatANDROID *ext_fmt_android = lvl_find_in_chain<VkExternalFormatANDROID>(create_info->pNext);
60 if (ext_fmt_android && (0 != ext_fmt_android->externalFormat)) {
61 is_node->has_ahb_format = true;
62 is_node->ahb_format = ext_fmt_android->externalFormat;
63 }
64}
65
66void ValidationStateTracker::RecordCreateSamplerYcbcrConversionANDROID(const VkSamplerYcbcrConversionCreateInfo *create_info,
67 VkSamplerYcbcrConversion ycbcr_conversion) {
68 const VkExternalFormatANDROID *ext_format_android = lvl_find_in_chain<VkExternalFormatANDROID>(create_info->pNext);
69 if (ext_format_android && (0 != ext_format_android->externalFormat)) {
70 ycbcr_conversion_ahb_fmt_map.emplace(ycbcr_conversion, ext_format_android->externalFormat);
71 }
72};
73
74void ValidationStateTracker::RecordDestroySamplerYcbcrConversionANDROID(VkSamplerYcbcrConversion ycbcr_conversion) {
75 ycbcr_conversion_ahb_fmt_map.erase(ycbcr_conversion);
76};
77
78#else
79
80void ValidationStateTracker::RecordCreateImageANDROID(const VkImageCreateInfo *create_info, IMAGE_STATE *is_node) {}
81
82void ValidationStateTracker::RecordCreateSamplerYcbcrConversionANDROID(const VkSamplerYcbcrConversionCreateInfo *create_info,
83 VkSamplerYcbcrConversion ycbcr_conversion){};
84
85void ValidationStateTracker::RecordDestroySamplerYcbcrConversionANDROID(VkSamplerYcbcrConversion ycbcr_conversion){};
86
87#endif // VK_USE_PLATFORM_ANDROID_KHR
88
89void ValidationStateTracker::PostCallRecordCreateImage(VkDevice device, const VkImageCreateInfo *pCreateInfo,
90 const VkAllocationCallbacks *pAllocator, VkImage *pImage, VkResult result) {
91 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -050092 auto is_node = std::make_shared<IMAGE_STATE>(*pImage, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -060093 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
94 RecordCreateImageANDROID(pCreateInfo, is_node.get());
95 }
96 const auto swapchain_info = lvl_find_in_chain<VkImageSwapchainCreateInfoKHR>(pCreateInfo->pNext);
97 if (swapchain_info) {
98 is_node->create_from_swapchain = swapchain_info->swapchain;
99 }
100
101 bool pre_fetch_memory_reqs = true;
102#ifdef VK_USE_PLATFORM_ANDROID_KHR
103 if (is_node->external_format_android) {
104 // Do not fetch requirements for external memory images
105 pre_fetch_memory_reqs = false;
106 }
107#endif
108 // Record the memory requirements in case they won't be queried
109 if (pre_fetch_memory_reqs) {
110 DispatchGetImageMemoryRequirements(device, *pImage, &is_node->requirements);
111 }
112 imageMap.insert(std::make_pair(*pImage, std::move(is_node)));
113}
114
115void ValidationStateTracker::PreCallRecordDestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks *pAllocator) {
116 if (!image) return;
117 IMAGE_STATE *image_state = GetImageState(image);
118 const VulkanTypedHandle obj_struct(image, kVulkanObjectTypeImage);
119 InvalidateCommandBuffers(image_state->cb_bindings, obj_struct);
120 // Clean up memory mapping, bindings and range references for image
121 for (auto mem_binding : image_state->GetBoundMemory()) {
122 auto mem_info = GetDevMemState(mem_binding);
123 if (mem_info) {
124 RemoveImageMemoryRange(image, mem_info);
125 }
126 }
127 if (image_state->bind_swapchain) {
128 auto swapchain = GetSwapchainState(image_state->bind_swapchain);
129 if (swapchain) {
locke-lunargb3584732019-10-28 20:18:36 -0600130 swapchain->images[image_state->bind_swapchain_imageIndex].bound_images.erase(image_state->image);
locke-lunargd556cc32019-09-17 01:21:23 -0600131 }
132 }
133 RemoveAliasingImage(image_state);
134 ClearMemoryObjectBindings(obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500135 image_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600136 // Remove image from imageMap
137 imageMap.erase(image);
138}
139
140void ValidationStateTracker::PreCallRecordCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image,
141 VkImageLayout imageLayout, const VkClearColorValue *pColor,
142 uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
143 auto cb_node = GetCBState(commandBuffer);
144 auto image_state = GetImageState(image);
145 if (cb_node && image_state) {
146 AddCommandBufferBindingImage(cb_node, image_state);
147 }
148}
149
150void ValidationStateTracker::PreCallRecordCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image,
151 VkImageLayout imageLayout,
152 const VkClearDepthStencilValue *pDepthStencil,
153 uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
154 auto cb_node = GetCBState(commandBuffer);
155 auto image_state = GetImageState(image);
156 if (cb_node && image_state) {
157 AddCommandBufferBindingImage(cb_node, image_state);
158 }
159}
160
161void ValidationStateTracker::PreCallRecordCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage,
162 VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout,
163 uint32_t regionCount, const VkImageCopy *pRegions) {
164 auto cb_node = GetCBState(commandBuffer);
165 auto src_image_state = GetImageState(srcImage);
166 auto dst_image_state = GetImageState(dstImage);
167
168 // Update bindings between images and cmd buffer
169 AddCommandBufferBindingImage(cb_node, src_image_state);
170 AddCommandBufferBindingImage(cb_node, dst_image_state);
171}
172
173void ValidationStateTracker::PreCallRecordCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage,
174 VkImageLayout srcImageLayout, VkImage dstImage,
175 VkImageLayout dstImageLayout, uint32_t regionCount,
176 const VkImageResolve *pRegions) {
177 auto cb_node = GetCBState(commandBuffer);
178 auto src_image_state = GetImageState(srcImage);
179 auto dst_image_state = GetImageState(dstImage);
180
181 // Update bindings between images and cmd buffer
182 AddCommandBufferBindingImage(cb_node, src_image_state);
183 AddCommandBufferBindingImage(cb_node, dst_image_state);
184}
185
186void ValidationStateTracker::PreCallRecordCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage,
187 VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout,
188 uint32_t regionCount, const VkImageBlit *pRegions, VkFilter filter) {
189 auto cb_node = GetCBState(commandBuffer);
190 auto src_image_state = GetImageState(srcImage);
191 auto dst_image_state = GetImageState(dstImage);
192
193 // Update bindings between images and cmd buffer
194 AddCommandBufferBindingImage(cb_node, src_image_state);
195 AddCommandBufferBindingImage(cb_node, dst_image_state);
196}
197
198void ValidationStateTracker::PostCallRecordCreateBuffer(VkDevice device, const VkBufferCreateInfo *pCreateInfo,
199 const VkAllocationCallbacks *pAllocator, VkBuffer *pBuffer,
200 VkResult result) {
201 if (result != VK_SUCCESS) return;
202 // TODO : This doesn't create deep copy of pQueueFamilyIndices so need to fix that if/when we want that data to be valid
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500203 auto buffer_state = std::make_shared<BUFFER_STATE>(*pBuffer, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -0600204
205 // Get a set of requirements in the case the app does not
206 DispatchGetBufferMemoryRequirements(device, *pBuffer, &buffer_state->requirements);
207
208 bufferMap.insert(std::make_pair(*pBuffer, std::move(buffer_state)));
209}
210
211void ValidationStateTracker::PostCallRecordCreateBufferView(VkDevice device, const VkBufferViewCreateInfo *pCreateInfo,
212 const VkAllocationCallbacks *pAllocator, VkBufferView *pView,
213 VkResult result) {
214 if (result != VK_SUCCESS) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500215 auto buffer_state = GetBufferShared(pCreateInfo->buffer);
216 bufferViewMap[*pView] = std::make_shared<BUFFER_VIEW_STATE>(buffer_state, *pView, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -0600217}
218
219void ValidationStateTracker::PostCallRecordCreateImageView(VkDevice device, const VkImageViewCreateInfo *pCreateInfo,
220 const VkAllocationCallbacks *pAllocator, VkImageView *pView,
221 VkResult result) {
222 if (result != VK_SUCCESS) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500223 auto image_state = GetImageShared(pCreateInfo->image);
224 imageViewMap[*pView] = std::make_shared<IMAGE_VIEW_STATE>(image_state, *pView, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -0600225}
226
227void ValidationStateTracker::PreCallRecordCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer,
228 uint32_t regionCount, const VkBufferCopy *pRegions) {
229 auto cb_node = GetCBState(commandBuffer);
230 auto src_buffer_state = GetBufferState(srcBuffer);
231 auto dst_buffer_state = GetBufferState(dstBuffer);
232
233 // Update bindings between buffers and cmd buffer
234 AddCommandBufferBindingBuffer(cb_node, src_buffer_state);
235 AddCommandBufferBindingBuffer(cb_node, dst_buffer_state);
236}
237
238void ValidationStateTracker::PreCallRecordDestroyImageView(VkDevice device, VkImageView imageView,
239 const VkAllocationCallbacks *pAllocator) {
240 IMAGE_VIEW_STATE *image_view_state = GetImageViewState(imageView);
241 if (!image_view_state) return;
242 const VulkanTypedHandle obj_struct(imageView, kVulkanObjectTypeImageView);
243
244 // Any bound cmd buffers are now invalid
245 InvalidateCommandBuffers(image_view_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500246 image_view_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600247 imageViewMap.erase(imageView);
248}
249
250void ValidationStateTracker::PreCallRecordDestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks *pAllocator) {
251 if (!buffer) return;
252 auto buffer_state = GetBufferState(buffer);
253 const VulkanTypedHandle obj_struct(buffer, kVulkanObjectTypeBuffer);
254
255 InvalidateCommandBuffers(buffer_state->cb_bindings, obj_struct);
256 for (auto mem_binding : buffer_state->GetBoundMemory()) {
257 auto mem_info = GetDevMemState(mem_binding);
258 if (mem_info) {
259 RemoveBufferMemoryRange(buffer, mem_info);
260 }
261 }
262 ClearMemoryObjectBindings(obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500263 buffer_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600264 bufferMap.erase(buffer_state->buffer);
265}
266
267void ValidationStateTracker::PreCallRecordDestroyBufferView(VkDevice device, VkBufferView bufferView,
268 const VkAllocationCallbacks *pAllocator) {
269 if (!bufferView) return;
270 auto buffer_view_state = GetBufferViewState(bufferView);
271 const VulkanTypedHandle obj_struct(bufferView, kVulkanObjectTypeBufferView);
272
273 // Any bound cmd buffers are now invalid
274 InvalidateCommandBuffers(buffer_view_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500275 buffer_view_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600276 bufferViewMap.erase(bufferView);
277}
278
279void ValidationStateTracker::PreCallRecordCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
280 VkDeviceSize size, uint32_t data) {
281 auto cb_node = GetCBState(commandBuffer);
282 auto buffer_state = GetBufferState(dstBuffer);
283 // Update bindings between buffer and cmd buffer
284 AddCommandBufferBindingBuffer(cb_node, buffer_state);
285}
286
287void ValidationStateTracker::PreCallRecordCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage,
288 VkImageLayout srcImageLayout, VkBuffer dstBuffer,
289 uint32_t regionCount, const VkBufferImageCopy *pRegions) {
290 auto cb_node = GetCBState(commandBuffer);
291 auto src_image_state = GetImageState(srcImage);
292 auto dst_buffer_state = GetBufferState(dstBuffer);
293
294 // Update bindings between buffer/image and cmd buffer
295 AddCommandBufferBindingImage(cb_node, src_image_state);
296 AddCommandBufferBindingBuffer(cb_node, dst_buffer_state);
297}
298
299void ValidationStateTracker::PreCallRecordCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
300 VkImageLayout dstImageLayout, uint32_t regionCount,
301 const VkBufferImageCopy *pRegions) {
302 auto cb_node = GetCBState(commandBuffer);
303 auto src_buffer_state = GetBufferState(srcBuffer);
304 auto dst_image_state = GetImageState(dstImage);
305
306 AddCommandBufferBindingBuffer(cb_node, src_buffer_state);
307 AddCommandBufferBindingImage(cb_node, dst_image_state);
308}
309
310// Get the image viewstate for a given framebuffer attachment
311IMAGE_VIEW_STATE *ValidationStateTracker::GetAttachmentImageViewState(FRAMEBUFFER_STATE *framebuffer, uint32_t index) {
Mark Lobodzinski544b3dd2019-12-03 14:44:54 -0700312 if (framebuffer->createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) return nullptr;
locke-lunargd556cc32019-09-17 01:21:23 -0600313 assert(framebuffer && (index < framebuffer->createInfo.attachmentCount));
314 const VkImageView &image_view = framebuffer->createInfo.pAttachments[index];
315 return GetImageViewState(image_view);
316}
317
318// Get the image viewstate for a given framebuffer attachment
319const IMAGE_VIEW_STATE *ValidationStateTracker::GetAttachmentImageViewState(const FRAMEBUFFER_STATE *framebuffer,
320 uint32_t index) const {
Mark Lobodzinski544b3dd2019-12-03 14:44:54 -0700321 if (framebuffer->createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) return nullptr;
locke-lunargd556cc32019-09-17 01:21:23 -0600322 assert(framebuffer && (index < framebuffer->createInfo.attachmentCount));
323 const VkImageView &image_view = framebuffer->createInfo.pAttachments[index];
324 return GetImageViewState(image_view);
325}
326
327void ValidationStateTracker::AddAliasingImage(IMAGE_STATE *image_state) {
328 if (!(image_state->createInfo.flags & VK_IMAGE_CREATE_ALIAS_BIT)) return;
329 std::unordered_set<VkImage> *bound_images = nullptr;
330
locke-lunargb3584732019-10-28 20:18:36 -0600331 if (image_state->bind_swapchain) {
332 auto swapchain_state = GetSwapchainState(image_state->bind_swapchain);
locke-lunargd556cc32019-09-17 01:21:23 -0600333 if (swapchain_state) {
locke-lunargb3584732019-10-28 20:18:36 -0600334 bound_images = &swapchain_state->images[image_state->bind_swapchain_imageIndex].bound_images;
locke-lunargd556cc32019-09-17 01:21:23 -0600335 }
336 } else {
337 auto mem_state = GetDevMemState(image_state->binding.mem);
338 if (mem_state) {
339 bound_images = &mem_state->bound_images;
340 }
341 }
342
343 if (bound_images) {
344 for (const auto &handle : *bound_images) {
345 if (handle != image_state->image) {
346 auto is = GetImageState(handle);
347 if (is && is->IsCompatibleAliasing(image_state)) {
348 auto inserted = is->aliasing_images.emplace(image_state->image);
349 if (inserted.second) {
350 image_state->aliasing_images.emplace(handle);
351 }
352 }
353 }
354 }
355 }
356}
357
358void ValidationStateTracker::RemoveAliasingImage(IMAGE_STATE *image_state) {
359 for (const auto &image : image_state->aliasing_images) {
360 auto is = GetImageState(image);
361 if (is) {
362 is->aliasing_images.erase(image_state->image);
363 }
364 }
365 image_state->aliasing_images.clear();
366}
367
368void ValidationStateTracker::RemoveAliasingImages(const std::unordered_set<VkImage> &bound_images) {
369 // This is one way clear. Because the bound_images include cross references, the one way clear loop could clear the whole
370 // reference. It doesn't need two ways clear.
371 for (const auto &handle : bound_images) {
372 auto is = GetImageState(handle);
373 if (is) {
374 is->aliasing_images.clear();
375 }
376 }
377}
378
Jeff Bolz310775c2019-10-09 00:46:33 -0500379const EVENT_STATE *ValidationStateTracker::GetEventState(VkEvent event) const {
380 auto it = eventMap.find(event);
381 if (it == eventMap.end()) {
382 return nullptr;
383 }
384 return &it->second;
385}
386
locke-lunargd556cc32019-09-17 01:21:23 -0600387EVENT_STATE *ValidationStateTracker::GetEventState(VkEvent event) {
388 auto it = eventMap.find(event);
389 if (it == eventMap.end()) {
390 return nullptr;
391 }
392 return &it->second;
393}
394
395const QUEUE_STATE *ValidationStateTracker::GetQueueState(VkQueue queue) const {
396 auto it = queueMap.find(queue);
397 if (it == queueMap.cend()) {
398 return nullptr;
399 }
400 return &it->second;
401}
402
403QUEUE_STATE *ValidationStateTracker::GetQueueState(VkQueue queue) {
404 auto it = queueMap.find(queue);
405 if (it == queueMap.end()) {
406 return nullptr;
407 }
408 return &it->second;
409}
410
411const PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState(VkPhysicalDevice phys) const {
412 auto *phys_dev_map = ((physical_device_map.size() > 0) ? &physical_device_map : &instance_state->physical_device_map);
413 auto it = phys_dev_map->find(phys);
414 if (it == phys_dev_map->end()) {
415 return nullptr;
416 }
417 return &it->second;
418}
419
420PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState(VkPhysicalDevice phys) {
421 auto *phys_dev_map = ((physical_device_map.size() > 0) ? &physical_device_map : &instance_state->physical_device_map);
422 auto it = phys_dev_map->find(phys);
423 if (it == phys_dev_map->end()) {
424 return nullptr;
425 }
426 return &it->second;
427}
428
429PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState() { return physical_device_state; }
430const PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState() const { return physical_device_state; }
431
432// Return ptr to memory binding for given handle of specified type
433template <typename State, typename Result>
434static Result GetObjectMemBindingImpl(State state, const VulkanTypedHandle &typed_handle) {
435 switch (typed_handle.type) {
436 case kVulkanObjectTypeImage:
437 return state->GetImageState(typed_handle.Cast<VkImage>());
438 case kVulkanObjectTypeBuffer:
439 return state->GetBufferState(typed_handle.Cast<VkBuffer>());
440 case kVulkanObjectTypeAccelerationStructureNV:
441 return state->GetAccelerationStructureState(typed_handle.Cast<VkAccelerationStructureNV>());
442 default:
443 break;
444 }
445 return nullptr;
446}
447
448const BINDABLE *ValidationStateTracker::GetObjectMemBinding(const VulkanTypedHandle &typed_handle) const {
449 return GetObjectMemBindingImpl<const ValidationStateTracker *, const BINDABLE *>(this, typed_handle);
450}
451
452BINDABLE *ValidationStateTracker::GetObjectMemBinding(const VulkanTypedHandle &typed_handle) {
453 return GetObjectMemBindingImpl<ValidationStateTracker *, BINDABLE *>(this, typed_handle);
454}
455
456void ValidationStateTracker::AddMemObjInfo(void *object, const VkDeviceMemory mem, const VkMemoryAllocateInfo *pAllocateInfo) {
457 assert(object != NULL);
458
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500459 memObjMap[mem] = std::make_shared<DEVICE_MEMORY_STATE>(object, mem, pAllocateInfo);
460 auto mem_info = memObjMap[mem].get();
locke-lunargd556cc32019-09-17 01:21:23 -0600461
462 auto dedicated = lvl_find_in_chain<VkMemoryDedicatedAllocateInfoKHR>(pAllocateInfo->pNext);
463 if (dedicated) {
464 mem_info->is_dedicated = true;
465 mem_info->dedicated_buffer = dedicated->buffer;
466 mem_info->dedicated_image = dedicated->image;
467 }
468 auto export_info = lvl_find_in_chain<VkExportMemoryAllocateInfo>(pAllocateInfo->pNext);
469 if (export_info) {
470 mem_info->is_export = true;
471 mem_info->export_handle_type_flags = export_info->handleTypes;
472 }
473}
474
475// Create binding link between given sampler and command buffer node
476void ValidationStateTracker::AddCommandBufferBindingSampler(CMD_BUFFER_STATE *cb_node, SAMPLER_STATE *sampler_state) {
477 if (disabled.command_buffer_state) {
478 return;
479 }
Jeff Bolzadbfa852019-10-04 13:53:30 -0500480 AddCommandBufferBinding(sampler_state->cb_bindings,
481 VulkanTypedHandle(sampler_state->sampler, kVulkanObjectTypeSampler, sampler_state), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -0600482}
483
484// Create binding link between given image node and command buffer node
485void ValidationStateTracker::AddCommandBufferBindingImage(CMD_BUFFER_STATE *cb_node, IMAGE_STATE *image_state) {
486 if (disabled.command_buffer_state) {
487 return;
488 }
489 // Skip validation if this image was created through WSI
490 if (image_state->create_from_swapchain == VK_NULL_HANDLE) {
491 // First update cb binding for image
Jeff Bolzadbfa852019-10-04 13:53:30 -0500492 if (AddCommandBufferBinding(image_state->cb_bindings,
493 VulkanTypedHandle(image_state->image, kVulkanObjectTypeImage, image_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -0600494 // Now update CB binding in MemObj mini CB list
495 for (auto mem_binding : image_state->GetBoundMemory()) {
496 DEVICE_MEMORY_STATE *pMemInfo = GetDevMemState(mem_binding);
497 if (pMemInfo) {
498 // Now update CBInfo's Mem reference list
Jeff Bolzadbfa852019-10-04 13:53:30 -0500499 AddCommandBufferBinding(pMemInfo->cb_bindings,
500 VulkanTypedHandle(mem_binding, kVulkanObjectTypeDeviceMemory, pMemInfo), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -0600501 }
502 }
503 }
504 }
505}
506
507// Create binding link between given image view node and its image with command buffer node
508void ValidationStateTracker::AddCommandBufferBindingImageView(CMD_BUFFER_STATE *cb_node, IMAGE_VIEW_STATE *view_state) {
509 if (disabled.command_buffer_state) {
510 return;
511 }
512 // First add bindings for imageView
Jeff Bolzadbfa852019-10-04 13:53:30 -0500513 if (AddCommandBufferBinding(view_state->cb_bindings,
514 VulkanTypedHandle(view_state->image_view, kVulkanObjectTypeImageView, view_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -0600515 // Only need to continue if this is a new item
Jeff Bolzadbfa852019-10-04 13:53:30 -0500516 auto image_state = view_state->image_state.get();
locke-lunargd556cc32019-09-17 01:21:23 -0600517 // Add bindings for image within imageView
518 if (image_state) {
519 AddCommandBufferBindingImage(cb_node, image_state);
520 }
521 }
522}
523
524// Create binding link between given buffer node and command buffer node
525void ValidationStateTracker::AddCommandBufferBindingBuffer(CMD_BUFFER_STATE *cb_node, BUFFER_STATE *buffer_state) {
526 if (disabled.command_buffer_state) {
527 return;
528 }
529 // First update cb binding for buffer
Jeff Bolzadbfa852019-10-04 13:53:30 -0500530 if (AddCommandBufferBinding(buffer_state->cb_bindings,
531 VulkanTypedHandle(buffer_state->buffer, kVulkanObjectTypeBuffer, buffer_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -0600532 // Now update CB binding in MemObj mini CB list
533 for (auto mem_binding : buffer_state->GetBoundMemory()) {
534 DEVICE_MEMORY_STATE *pMemInfo = GetDevMemState(mem_binding);
535 if (pMemInfo) {
536 // Now update CBInfo's Mem reference list
Jeff Bolzadbfa852019-10-04 13:53:30 -0500537 AddCommandBufferBinding(pMemInfo->cb_bindings,
538 VulkanTypedHandle(mem_binding, kVulkanObjectTypeDeviceMemory, pMemInfo), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -0600539 }
540 }
541 }
542}
543
544// Create binding link between given buffer view node and its buffer with command buffer node
545void ValidationStateTracker::AddCommandBufferBindingBufferView(CMD_BUFFER_STATE *cb_node, BUFFER_VIEW_STATE *view_state) {
546 if (disabled.command_buffer_state) {
547 return;
548 }
549 // First add bindings for bufferView
Jeff Bolzadbfa852019-10-04 13:53:30 -0500550 if (AddCommandBufferBinding(view_state->cb_bindings,
551 VulkanTypedHandle(view_state->buffer_view, kVulkanObjectTypeBufferView, view_state), cb_node)) {
552 auto buffer_state = view_state->buffer_state.get();
locke-lunargd556cc32019-09-17 01:21:23 -0600553 // Add bindings for buffer within bufferView
554 if (buffer_state) {
555 AddCommandBufferBindingBuffer(cb_node, buffer_state);
556 }
557 }
558}
559
560// Create binding link between given acceleration structure and command buffer node
561void ValidationStateTracker::AddCommandBufferBindingAccelerationStructure(CMD_BUFFER_STATE *cb_node,
562 ACCELERATION_STRUCTURE_STATE *as_state) {
563 if (disabled.command_buffer_state) {
564 return;
565 }
Jeff Bolzadbfa852019-10-04 13:53:30 -0500566 if (AddCommandBufferBinding(
567 as_state->cb_bindings,
568 VulkanTypedHandle(as_state->acceleration_structure, kVulkanObjectTypeAccelerationStructureNV, as_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -0600569 // Now update CB binding in MemObj mini CB list
570 for (auto mem_binding : as_state->GetBoundMemory()) {
571 DEVICE_MEMORY_STATE *pMemInfo = GetDevMemState(mem_binding);
572 if (pMemInfo) {
573 // Now update CBInfo's Mem reference list
Jeff Bolzadbfa852019-10-04 13:53:30 -0500574 AddCommandBufferBinding(pMemInfo->cb_bindings,
575 VulkanTypedHandle(mem_binding, kVulkanObjectTypeDeviceMemory, pMemInfo), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -0600576 }
577 }
578 }
579}
580
locke-lunargd556cc32019-09-17 01:21:23 -0600581// Clear a single object binding from given memory object
582void ValidationStateTracker::ClearMemoryObjectBinding(const VulkanTypedHandle &typed_handle, VkDeviceMemory mem) {
583 DEVICE_MEMORY_STATE *mem_info = GetDevMemState(mem);
584 // This obj is bound to a memory object. Remove the reference to this object in that memory object's list
585 if (mem_info) {
586 mem_info->obj_bindings.erase(typed_handle);
587 }
588}
589
590// ClearMemoryObjectBindings clears the binding of objects to memory
591// For the given object it pulls the memory bindings and makes sure that the bindings
592// no longer refer to the object being cleared. This occurs when objects are destroyed.
593void ValidationStateTracker::ClearMemoryObjectBindings(const VulkanTypedHandle &typed_handle) {
594 BINDABLE *mem_binding = GetObjectMemBinding(typed_handle);
595 if (mem_binding) {
596 if (!mem_binding->sparse) {
597 ClearMemoryObjectBinding(typed_handle, mem_binding->binding.mem);
598 } else { // Sparse, clear all bindings
599 for (auto &sparse_mem_binding : mem_binding->sparse_bindings) {
600 ClearMemoryObjectBinding(typed_handle, sparse_mem_binding.mem);
601 }
602 }
603 }
604}
605
606// SetMemBinding is used to establish immutable, non-sparse binding between a single image/buffer object and memory object.
607// Corresponding valid usage checks are in ValidateSetMemBinding().
608void ValidationStateTracker::SetMemBinding(VkDeviceMemory mem, BINDABLE *mem_binding, VkDeviceSize memory_offset,
609 const VulkanTypedHandle &typed_handle) {
610 assert(mem_binding);
611 mem_binding->binding.mem = mem;
612 mem_binding->UpdateBoundMemorySet(); // force recreation of cached set
613 mem_binding->binding.offset = memory_offset;
614 mem_binding->binding.size = mem_binding->requirements.size;
615
616 if (mem != VK_NULL_HANDLE) {
617 DEVICE_MEMORY_STATE *mem_info = GetDevMemState(mem);
618 if (mem_info) {
619 mem_info->obj_bindings.insert(typed_handle);
620 // For image objects, make sure default memory state is correctly set
621 // TODO : What's the best/correct way to handle this?
622 if (kVulkanObjectTypeImage == typed_handle.type) {
623 auto const image_state = reinterpret_cast<const IMAGE_STATE *>(mem_binding);
624 if (image_state) {
625 VkImageCreateInfo ici = image_state->createInfo;
626 if (ici.usage & (VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT)) {
627 // TODO:: More memory state transition stuff.
628 }
629 }
630 }
631 }
632 }
633}
634
635// For NULL mem case, clear any previous binding Else...
636// Make sure given object is in its object map
637// IF a previous binding existed, update binding
638// Add reference from objectInfo to memoryInfo
639// Add reference off of object's binding info
640// Return VK_TRUE if addition is successful, VK_FALSE otherwise
641bool ValidationStateTracker::SetSparseMemBinding(MEM_BINDING binding, const VulkanTypedHandle &typed_handle) {
642 bool skip = VK_FALSE;
643 // Handle NULL case separately, just clear previous binding & decrement reference
644 if (binding.mem == VK_NULL_HANDLE) {
645 // TODO : This should cause the range of the resource to be unbound according to spec
646 } else {
647 BINDABLE *mem_binding = GetObjectMemBinding(typed_handle);
648 assert(mem_binding);
649 if (mem_binding) { // Invalid handles are reported by object tracker, but Get returns NULL for them, so avoid SEGV here
650 assert(mem_binding->sparse);
651 DEVICE_MEMORY_STATE *mem_info = GetDevMemState(binding.mem);
652 if (mem_info) {
653 mem_info->obj_bindings.insert(typed_handle);
654 // Need to set mem binding for this object
655 mem_binding->sparse_bindings.insert(binding);
656 mem_binding->UpdateBoundMemorySet();
657 }
658 }
659 }
660 return skip;
661}
662
locke-lunargd556cc32019-09-17 01:21:23 -0600663void ValidationStateTracker::UpdateDrawState(CMD_BUFFER_STATE *cb_state, const VkPipelineBindPoint bind_point) {
664 auto &state = cb_state->lastBound[bind_point];
665 PIPELINE_STATE *pPipe = state.pipeline_state;
666 if (VK_NULL_HANDLE != state.pipeline_layout) {
667 for (const auto &set_binding_pair : pPipe->active_slots) {
668 uint32_t setIndex = set_binding_pair.first;
669 // Pull the set node
670 cvdescriptorset::DescriptorSet *descriptor_set = state.per_set[setIndex].bound_descriptor_set;
671 if (!descriptor_set->IsPushDescriptor()) {
672 // For the "bindless" style resource usage with many descriptors, need to optimize command <-> descriptor binding
673
674 // TODO: If recreating the reduced_map here shows up in profilinging, need to find a way of sharing with the
675 // Validate pass. Though in the case of "many" descriptors, typically the descriptor count >> binding count
676 cvdescriptorset::PrefilterBindRequestMap reduced_map(*descriptor_set, set_binding_pair.second);
677 const auto &binding_req_map = reduced_map.FilteredMap(*cb_state, *pPipe);
678
679 if (reduced_map.IsManyDescriptors()) {
680 // Only update validate binding tags if we meet the "many" criteria in the Prefilter class
681 descriptor_set->UpdateValidationCache(*cb_state, *pPipe, binding_req_map);
682 }
683
684 // We can skip updating the state if "nothing" has changed since the last validation.
685 // See CoreChecks::ValidateCmdBufDrawState for more details.
Jeff Bolz56308942019-10-06 22:05:23 -0500686 bool descriptor_set_changed =
locke-lunargd556cc32019-09-17 01:21:23 -0600687 !reduced_map.IsManyDescriptors() ||
688 // Update if descriptor set (or contents) has changed
689 state.per_set[setIndex].validated_set != descriptor_set ||
690 state.per_set[setIndex].validated_set_change_count != descriptor_set->GetChangeCount() ||
691 (!disabled.image_layout_validation &&
Jeff Bolz56308942019-10-06 22:05:23 -0500692 state.per_set[setIndex].validated_set_image_layout_change_count != cb_state->image_layout_change_count);
693 bool need_update = descriptor_set_changed ||
694 // Update if previous bindingReqMap doesn't include new bindingReqMap
695 !std::includes(state.per_set[setIndex].validated_set_binding_req_map.begin(),
696 state.per_set[setIndex].validated_set_binding_req_map.end(),
697 binding_req_map.begin(), binding_req_map.end());
locke-lunargd556cc32019-09-17 01:21:23 -0600698
699 if (need_update) {
700 // Bind this set and its active descriptor resources to the command buffer
Jeff Bolz56308942019-10-06 22:05:23 -0500701 if (!descriptor_set_changed && reduced_map.IsManyDescriptors()) {
702 // Only record the bindings that haven't already been recorded
703 BindingReqMap delta_reqs;
704 std::set_difference(binding_req_map.begin(), binding_req_map.end(),
705 state.per_set[setIndex].validated_set_binding_req_map.begin(),
706 state.per_set[setIndex].validated_set_binding_req_map.end(),
707 std::inserter(delta_reqs, delta_reqs.begin()));
locke-gb3ce08f2019-09-30 12:30:56 -0600708 descriptor_set->UpdateDrawState(this, cb_state, pPipe, delta_reqs);
Jeff Bolz56308942019-10-06 22:05:23 -0500709 } else {
locke-gb3ce08f2019-09-30 12:30:56 -0600710 descriptor_set->UpdateDrawState(this, cb_state, pPipe, binding_req_map);
Jeff Bolz56308942019-10-06 22:05:23 -0500711 }
locke-lunargd556cc32019-09-17 01:21:23 -0600712
713 state.per_set[setIndex].validated_set = descriptor_set;
714 state.per_set[setIndex].validated_set_change_count = descriptor_set->GetChangeCount();
715 state.per_set[setIndex].validated_set_image_layout_change_count = cb_state->image_layout_change_count;
716 if (reduced_map.IsManyDescriptors()) {
717 // Check whether old == new before assigning, the equality check is much cheaper than
718 // freeing and reallocating the map.
719 if (state.per_set[setIndex].validated_set_binding_req_map != set_binding_pair.second) {
720 state.per_set[setIndex].validated_set_binding_req_map = set_binding_pair.second;
721 }
722 } else {
723 state.per_set[setIndex].validated_set_binding_req_map = BindingReqMap();
724 }
725 }
726 }
727 }
728 }
729 if (!pPipe->vertex_binding_descriptions_.empty()) {
730 cb_state->vertex_buffer_used = true;
731 }
732}
733
734// Remove set from setMap and delete the set
735void ValidationStateTracker::FreeDescriptorSet(cvdescriptorset::DescriptorSet *descriptor_set) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500736 descriptor_set->destroyed = true;
Jeff Bolzadbfa852019-10-04 13:53:30 -0500737 const VulkanTypedHandle obj_struct(descriptor_set->GetSet(), kVulkanObjectTypeDescriptorSet);
Jeff Bolz41a1ced2019-10-11 11:40:49 -0500738 // Any bound cmd buffers are now invalid
Jeff Bolzadbfa852019-10-04 13:53:30 -0500739 InvalidateCommandBuffers(descriptor_set->cb_bindings, obj_struct);
Jeff Bolz41a1ced2019-10-11 11:40:49 -0500740
locke-lunargd556cc32019-09-17 01:21:23 -0600741 setMap.erase(descriptor_set->GetSet());
742}
743
744// Free all DS Pools including their Sets & related sub-structs
745// NOTE : Calls to this function should be wrapped in mutex
746void ValidationStateTracker::DeleteDescriptorSetPools() {
747 for (auto ii = descriptorPoolMap.begin(); ii != descriptorPoolMap.end();) {
748 // Remove this pools' sets from setMap and delete them
749 for (auto ds : ii->second->sets) {
750 FreeDescriptorSet(ds);
751 }
752 ii->second->sets.clear();
753 ii = descriptorPoolMap.erase(ii);
754 }
755}
756
757// For given object struct return a ptr of BASE_NODE type for its wrapping struct
758BASE_NODE *ValidationStateTracker::GetStateStructPtrFromObject(const VulkanTypedHandle &object_struct) {
Jeff Bolzadbfa852019-10-04 13:53:30 -0500759 if (object_struct.node) {
760#ifdef _DEBUG
761 // assert that lookup would find the same object
762 VulkanTypedHandle other = object_struct;
763 other.node = nullptr;
764 assert(object_struct.node == GetStateStructPtrFromObject(other));
765#endif
766 return object_struct.node;
767 }
locke-lunargd556cc32019-09-17 01:21:23 -0600768 BASE_NODE *base_ptr = nullptr;
769 switch (object_struct.type) {
770 case kVulkanObjectTypeDescriptorSet: {
771 base_ptr = GetSetNode(object_struct.Cast<VkDescriptorSet>());
772 break;
773 }
774 case kVulkanObjectTypeSampler: {
775 base_ptr = GetSamplerState(object_struct.Cast<VkSampler>());
776 break;
777 }
778 case kVulkanObjectTypeQueryPool: {
779 base_ptr = GetQueryPoolState(object_struct.Cast<VkQueryPool>());
780 break;
781 }
782 case kVulkanObjectTypePipeline: {
783 base_ptr = GetPipelineState(object_struct.Cast<VkPipeline>());
784 break;
785 }
786 case kVulkanObjectTypeBuffer: {
787 base_ptr = GetBufferState(object_struct.Cast<VkBuffer>());
788 break;
789 }
790 case kVulkanObjectTypeBufferView: {
791 base_ptr = GetBufferViewState(object_struct.Cast<VkBufferView>());
792 break;
793 }
794 case kVulkanObjectTypeImage: {
795 base_ptr = GetImageState(object_struct.Cast<VkImage>());
796 break;
797 }
798 case kVulkanObjectTypeImageView: {
799 base_ptr = GetImageViewState(object_struct.Cast<VkImageView>());
800 break;
801 }
802 case kVulkanObjectTypeEvent: {
803 base_ptr = GetEventState(object_struct.Cast<VkEvent>());
804 break;
805 }
806 case kVulkanObjectTypeDescriptorPool: {
807 base_ptr = GetDescriptorPoolState(object_struct.Cast<VkDescriptorPool>());
808 break;
809 }
810 case kVulkanObjectTypeCommandPool: {
811 base_ptr = GetCommandPoolState(object_struct.Cast<VkCommandPool>());
812 break;
813 }
814 case kVulkanObjectTypeFramebuffer: {
815 base_ptr = GetFramebufferState(object_struct.Cast<VkFramebuffer>());
816 break;
817 }
818 case kVulkanObjectTypeRenderPass: {
819 base_ptr = GetRenderPassState(object_struct.Cast<VkRenderPass>());
820 break;
821 }
822 case kVulkanObjectTypeDeviceMemory: {
823 base_ptr = GetDevMemState(object_struct.Cast<VkDeviceMemory>());
824 break;
825 }
826 case kVulkanObjectTypeAccelerationStructureNV: {
827 base_ptr = GetAccelerationStructureState(object_struct.Cast<VkAccelerationStructureNV>());
828 break;
829 }
Jeff Bolzadbfa852019-10-04 13:53:30 -0500830 case kVulkanObjectTypeUnknown:
831 // This can happen if an element of the object_bindings vector has been
832 // zeroed out, after an object is destroyed.
833 break;
locke-lunargd556cc32019-09-17 01:21:23 -0600834 default:
835 // TODO : Any other objects to be handled here?
836 assert(0);
837 break;
838 }
839 return base_ptr;
840}
841
842// Tie the VulkanTypedHandle to the cmd buffer which includes:
843// Add object_binding to cmd buffer
844// Add cb_binding to object
Jeff Bolzadbfa852019-10-04 13:53:30 -0500845bool ValidationStateTracker::AddCommandBufferBinding(small_unordered_map<CMD_BUFFER_STATE *, int, 8> &cb_bindings,
locke-lunargd556cc32019-09-17 01:21:23 -0600846 const VulkanTypedHandle &obj, CMD_BUFFER_STATE *cb_node) {
847 if (disabled.command_buffer_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -0500848 return false;
locke-lunargd556cc32019-09-17 01:21:23 -0600849 }
Jeff Bolzadbfa852019-10-04 13:53:30 -0500850 // Insert the cb_binding with a default 'index' of -1. Then push the obj into the object_bindings
851 // vector, and update cb_bindings[cb_node] with the index of that element of the vector.
852 auto inserted = cb_bindings.insert({cb_node, -1});
853 if (inserted.second) {
854 cb_node->object_bindings.push_back(obj);
855 inserted.first->second = (int)cb_node->object_bindings.size() - 1;
856 return true;
857 }
858 return false;
locke-lunargd556cc32019-09-17 01:21:23 -0600859}
860
861// For a given object, if cb_node is in that objects cb_bindings, remove cb_node
862void ValidationStateTracker::RemoveCommandBufferBinding(VulkanTypedHandle const &object, CMD_BUFFER_STATE *cb_node) {
863 BASE_NODE *base_obj = GetStateStructPtrFromObject(object);
864 if (base_obj) base_obj->cb_bindings.erase(cb_node);
865}
866
867// Reset the command buffer state
868// Maintain the createInfo and set state to CB_NEW, but clear all other state
869void ValidationStateTracker::ResetCommandBufferState(const VkCommandBuffer cb) {
870 CMD_BUFFER_STATE *pCB = GetCBState(cb);
871 if (pCB) {
872 pCB->in_use.store(0);
873 // Reset CB state (note that createInfo is not cleared)
874 pCB->commandBuffer = cb;
875 memset(&pCB->beginInfo, 0, sizeof(VkCommandBufferBeginInfo));
876 memset(&pCB->inheritanceInfo, 0, sizeof(VkCommandBufferInheritanceInfo));
877 pCB->hasDrawCmd = false;
878 pCB->hasTraceRaysCmd = false;
879 pCB->hasBuildAccelerationStructureCmd = false;
880 pCB->hasDispatchCmd = false;
881 pCB->state = CB_NEW;
Lionel Landwerlinc7420912019-05-23 00:33:42 +0100882 pCB->commandCount = 0;
locke-lunargd556cc32019-09-17 01:21:23 -0600883 pCB->submitCount = 0;
884 pCB->image_layout_change_count = 1; // Start at 1. 0 is insert value for validation cache versions, s.t. new == dirty
885 pCB->status = 0;
886 pCB->static_status = 0;
887 pCB->viewportMask = 0;
888 pCB->scissorMask = 0;
889
890 for (auto &item : pCB->lastBound) {
891 item.second.reset();
892 }
893
894 memset(&pCB->activeRenderPassBeginInfo, 0, sizeof(pCB->activeRenderPassBeginInfo));
895 pCB->activeRenderPass = nullptr;
896 pCB->activeSubpassContents = VK_SUBPASS_CONTENTS_INLINE;
897 pCB->activeSubpass = 0;
898 pCB->broken_bindings.clear();
899 pCB->waitedEvents.clear();
900 pCB->events.clear();
901 pCB->writeEventsBeforeWait.clear();
locke-lunargd556cc32019-09-17 01:21:23 -0600902 pCB->activeQueries.clear();
903 pCB->startedQueries.clear();
904 pCB->image_layout_map.clear();
locke-lunargd556cc32019-09-17 01:21:23 -0600905 pCB->current_vertex_buffer_binding_info.vertex_buffer_bindings.clear();
906 pCB->vertex_buffer_used = false;
907 pCB->primaryCommandBuffer = VK_NULL_HANDLE;
908 // If secondary, invalidate any primary command buffer that may call us.
909 if (pCB->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
Jeff Bolzadbfa852019-10-04 13:53:30 -0500910 InvalidateLinkedCommandBuffers(pCB->linkedCommandBuffers, VulkanTypedHandle(cb, kVulkanObjectTypeCommandBuffer));
locke-lunargd556cc32019-09-17 01:21:23 -0600911 }
912
913 // Remove reverse command buffer links.
914 for (auto pSubCB : pCB->linkedCommandBuffers) {
915 pSubCB->linkedCommandBuffers.erase(pCB);
916 }
917 pCB->linkedCommandBuffers.clear();
locke-lunargd556cc32019-09-17 01:21:23 -0600918 pCB->queue_submit_functions.clear();
919 pCB->cmd_execute_commands_functions.clear();
920 pCB->eventUpdates.clear();
921 pCB->queryUpdates.clear();
922
923 // Remove object bindings
924 for (const auto &obj : pCB->object_bindings) {
925 RemoveCommandBufferBinding(obj, pCB);
926 }
927 pCB->object_bindings.clear();
928 // Remove this cmdBuffer's reference from each FrameBuffer's CB ref list
929 for (auto framebuffer : pCB->framebuffers) {
930 auto fb_state = GetFramebufferState(framebuffer);
931 if (fb_state) fb_state->cb_bindings.erase(pCB);
932 }
933 pCB->framebuffers.clear();
934 pCB->activeFramebuffer = VK_NULL_HANDLE;
935 memset(&pCB->index_buffer_binding, 0, sizeof(pCB->index_buffer_binding));
936
937 pCB->qfo_transfer_image_barriers.Reset();
938 pCB->qfo_transfer_buffer_barriers.Reset();
939
940 // Clean up the label data
941 ResetCmdDebugUtilsLabel(report_data, pCB->commandBuffer);
942 pCB->debug_label.Reset();
locke-gb3ce08f2019-09-30 12:30:56 -0600943 pCB->validate_descriptorsets_in_queuesubmit.clear();
locke-lunargd556cc32019-09-17 01:21:23 -0600944 }
945 if (command_buffer_reset_callback) {
946 (*command_buffer_reset_callback)(cb);
947 }
948}
949
950void ValidationStateTracker::PostCallRecordCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo,
951 const VkAllocationCallbacks *pAllocator, VkDevice *pDevice,
952 VkResult result) {
953 if (VK_SUCCESS != result) return;
954
955 const VkPhysicalDeviceFeatures *enabled_features_found = pCreateInfo->pEnabledFeatures;
956 if (nullptr == enabled_features_found) {
957 const auto *features2 = lvl_find_in_chain<VkPhysicalDeviceFeatures2KHR>(pCreateInfo->pNext);
958 if (features2) {
959 enabled_features_found = &(features2->features);
960 }
961 }
962
963 ValidationObject *device_object = GetLayerDataPtr(get_dispatch_key(*pDevice), layer_data_map);
964 ValidationObject *validation_data = GetValidationObject(device_object->object_dispatch, this->container_type);
965 ValidationStateTracker *state_tracker = static_cast<ValidationStateTracker *>(validation_data);
966
967 if (nullptr == enabled_features_found) {
968 state_tracker->enabled_features.core = {};
969 } else {
970 state_tracker->enabled_features.core = *enabled_features_found;
971 }
972
973 // Make sure that queue_family_properties are obtained for this device's physical_device, even if the app has not
974 // previously set them through an explicit API call.
975 uint32_t count;
976 auto pd_state = GetPhysicalDeviceState(gpu);
977 DispatchGetPhysicalDeviceQueueFamilyProperties(gpu, &count, nullptr);
978 pd_state->queue_family_properties.resize(std::max(static_cast<uint32_t>(pd_state->queue_family_properties.size()), count));
979 DispatchGetPhysicalDeviceQueueFamilyProperties(gpu, &count, &pd_state->queue_family_properties[0]);
980 // Save local link to this device's physical device state
981 state_tracker->physical_device_state = pd_state;
982
983 const auto *device_group_ci = lvl_find_in_chain<VkDeviceGroupDeviceCreateInfo>(pCreateInfo->pNext);
984 state_tracker->physical_device_count =
985 device_group_ci && device_group_ci->physicalDeviceCount > 0 ? device_group_ci->physicalDeviceCount : 1;
986
987 const auto *descriptor_indexing_features = lvl_find_in_chain<VkPhysicalDeviceDescriptorIndexingFeaturesEXT>(pCreateInfo->pNext);
988 if (descriptor_indexing_features) {
989 state_tracker->enabled_features.descriptor_indexing = *descriptor_indexing_features;
990 }
991
992 const auto *eight_bit_storage_features = lvl_find_in_chain<VkPhysicalDevice8BitStorageFeaturesKHR>(pCreateInfo->pNext);
993 if (eight_bit_storage_features) {
994 state_tracker->enabled_features.eight_bit_storage = *eight_bit_storage_features;
995 }
996
997 const auto *exclusive_scissor_features = lvl_find_in_chain<VkPhysicalDeviceExclusiveScissorFeaturesNV>(pCreateInfo->pNext);
998 if (exclusive_scissor_features) {
999 state_tracker->enabled_features.exclusive_scissor = *exclusive_scissor_features;
1000 }
1001
1002 const auto *shading_rate_image_features = lvl_find_in_chain<VkPhysicalDeviceShadingRateImageFeaturesNV>(pCreateInfo->pNext);
1003 if (shading_rate_image_features) {
1004 state_tracker->enabled_features.shading_rate_image = *shading_rate_image_features;
1005 }
1006
1007 const auto *mesh_shader_features = lvl_find_in_chain<VkPhysicalDeviceMeshShaderFeaturesNV>(pCreateInfo->pNext);
1008 if (mesh_shader_features) {
1009 state_tracker->enabled_features.mesh_shader = *mesh_shader_features;
1010 }
1011
1012 const auto *inline_uniform_block_features =
1013 lvl_find_in_chain<VkPhysicalDeviceInlineUniformBlockFeaturesEXT>(pCreateInfo->pNext);
1014 if (inline_uniform_block_features) {
1015 state_tracker->enabled_features.inline_uniform_block = *inline_uniform_block_features;
1016 }
1017
1018 const auto *transform_feedback_features = lvl_find_in_chain<VkPhysicalDeviceTransformFeedbackFeaturesEXT>(pCreateInfo->pNext);
1019 if (transform_feedback_features) {
1020 state_tracker->enabled_features.transform_feedback_features = *transform_feedback_features;
1021 }
1022
1023 const auto *float16_int8_features = lvl_find_in_chain<VkPhysicalDeviceFloat16Int8FeaturesKHR>(pCreateInfo->pNext);
1024 if (float16_int8_features) {
1025 state_tracker->enabled_features.float16_int8 = *float16_int8_features;
1026 }
1027
1028 const auto *vtx_attrib_div_features = lvl_find_in_chain<VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT>(pCreateInfo->pNext);
1029 if (vtx_attrib_div_features) {
1030 state_tracker->enabled_features.vtx_attrib_divisor_features = *vtx_attrib_div_features;
1031 }
1032
1033 const auto *uniform_buffer_standard_layout_features =
1034 lvl_find_in_chain<VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR>(pCreateInfo->pNext);
1035 if (uniform_buffer_standard_layout_features) {
1036 state_tracker->enabled_features.uniform_buffer_standard_layout = *uniform_buffer_standard_layout_features;
1037 }
1038
1039 const auto *scalar_block_layout_features = lvl_find_in_chain<VkPhysicalDeviceScalarBlockLayoutFeaturesEXT>(pCreateInfo->pNext);
1040 if (scalar_block_layout_features) {
1041 state_tracker->enabled_features.scalar_block_layout_features = *scalar_block_layout_features;
1042 }
1043
1044 const auto *buffer_address = lvl_find_in_chain<VkPhysicalDeviceBufferAddressFeaturesEXT>(pCreateInfo->pNext);
1045 if (buffer_address) {
1046 state_tracker->enabled_features.buffer_address = *buffer_address;
1047 }
1048
1049 const auto *cooperative_matrix_features = lvl_find_in_chain<VkPhysicalDeviceCooperativeMatrixFeaturesNV>(pCreateInfo->pNext);
1050 if (cooperative_matrix_features) {
1051 state_tracker->enabled_features.cooperative_matrix_features = *cooperative_matrix_features;
1052 }
1053
locke-lunargd556cc32019-09-17 01:21:23 -06001054 const auto *host_query_reset_features = lvl_find_in_chain<VkPhysicalDeviceHostQueryResetFeaturesEXT>(pCreateInfo->pNext);
1055 if (host_query_reset_features) {
1056 state_tracker->enabled_features.host_query_reset_features = *host_query_reset_features;
1057 }
1058
1059 const auto *compute_shader_derivatives_features =
1060 lvl_find_in_chain<VkPhysicalDeviceComputeShaderDerivativesFeaturesNV>(pCreateInfo->pNext);
1061 if (compute_shader_derivatives_features) {
1062 state_tracker->enabled_features.compute_shader_derivatives_features = *compute_shader_derivatives_features;
1063 }
1064
1065 const auto *fragment_shader_barycentric_features =
1066 lvl_find_in_chain<VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV>(pCreateInfo->pNext);
1067 if (fragment_shader_barycentric_features) {
1068 state_tracker->enabled_features.fragment_shader_barycentric_features = *fragment_shader_barycentric_features;
1069 }
1070
1071 const auto *shader_image_footprint_features =
1072 lvl_find_in_chain<VkPhysicalDeviceShaderImageFootprintFeaturesNV>(pCreateInfo->pNext);
1073 if (shader_image_footprint_features) {
1074 state_tracker->enabled_features.shader_image_footprint_features = *shader_image_footprint_features;
1075 }
1076
1077 const auto *fragment_shader_interlock_features =
1078 lvl_find_in_chain<VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT>(pCreateInfo->pNext);
1079 if (fragment_shader_interlock_features) {
1080 state_tracker->enabled_features.fragment_shader_interlock_features = *fragment_shader_interlock_features;
1081 }
1082
1083 const auto *demote_to_helper_invocation_features =
1084 lvl_find_in_chain<VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT>(pCreateInfo->pNext);
1085 if (demote_to_helper_invocation_features) {
1086 state_tracker->enabled_features.demote_to_helper_invocation_features = *demote_to_helper_invocation_features;
1087 }
1088
1089 const auto *texel_buffer_alignment_features =
1090 lvl_find_in_chain<VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT>(pCreateInfo->pNext);
1091 if (texel_buffer_alignment_features) {
1092 state_tracker->enabled_features.texel_buffer_alignment_features = *texel_buffer_alignment_features;
1093 }
1094
1095 const auto *imageless_framebuffer_features =
1096 lvl_find_in_chain<VkPhysicalDeviceImagelessFramebufferFeaturesKHR>(pCreateInfo->pNext);
1097 if (imageless_framebuffer_features) {
1098 state_tracker->enabled_features.imageless_framebuffer_features = *imageless_framebuffer_features;
1099 }
1100
1101 const auto *pipeline_exe_props_features =
1102 lvl_find_in_chain<VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR>(pCreateInfo->pNext);
1103 if (pipeline_exe_props_features) {
1104 state_tracker->enabled_features.pipeline_exe_props_features = *pipeline_exe_props_features;
1105 }
1106
Jeff Bolz82f854d2019-09-17 14:56:47 -05001107 const auto *dedicated_allocation_image_aliasing_features =
1108 lvl_find_in_chain<VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV>(pCreateInfo->pNext);
1109 if (dedicated_allocation_image_aliasing_features) {
1110 state_tracker->enabled_features.dedicated_allocation_image_aliasing_features =
1111 *dedicated_allocation_image_aliasing_features;
1112 }
1113
Jeff Bolz526f2d52019-09-18 13:18:08 -05001114 const auto *subgroup_extended_types_features =
1115 lvl_find_in_chain<VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR>(pCreateInfo->pNext);
1116 if (subgroup_extended_types_features) {
1117 state_tracker->enabled_features.subgroup_extended_types_features = *subgroup_extended_types_features;
1118 }
1119
Piers Daniell9af77cd2019-10-16 13:54:12 -06001120 const auto *separate_depth_stencil_layouts_features =
1121 lvl_find_in_chain<VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR>(pCreateInfo->pNext);
1122 if (separate_depth_stencil_layouts_features) {
1123 state_tracker->enabled_features.separate_depth_stencil_layouts_features = *separate_depth_stencil_layouts_features;
1124 }
1125
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001126 const auto *performance_query_features = lvl_find_in_chain<VkPhysicalDevicePerformanceQueryFeaturesKHR>(pCreateInfo->pNext);
1127 if (performance_query_features) {
1128 state_tracker->enabled_features.performance_query_features = *performance_query_features;
1129 }
1130
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00001131 const auto *timeline_semaphore_features = lvl_find_in_chain<VkPhysicalDeviceTimelineSemaphoreFeaturesKHR>(pCreateInfo->pNext);
1132 if (timeline_semaphore_features) {
1133 state_tracker->enabled_features.timeline_semaphore_features = *timeline_semaphore_features;
1134 }
1135
Tobias Hector782bcde2019-11-28 16:19:42 +00001136 const auto *device_coherent_memory_features = lvl_find_in_chain<VkPhysicalDeviceCoherentMemoryFeaturesAMD>(pCreateInfo->pNext);
1137 if (device_coherent_memory_features) {
1138 state_tracker->enabled_features.device_coherent_memory_features = *device_coherent_memory_features;
1139 }
1140
locke-lunargd556cc32019-09-17 01:21:23 -06001141 // Store physical device properties and physical device mem limits into CoreChecks structs
1142 DispatchGetPhysicalDeviceMemoryProperties(gpu, &state_tracker->phys_dev_mem_props);
1143 DispatchGetPhysicalDeviceProperties(gpu, &state_tracker->phys_dev_props);
1144
1145 const auto &dev_ext = state_tracker->device_extensions;
1146 auto *phys_dev_props = &state_tracker->phys_dev_ext_props;
1147
1148 if (dev_ext.vk_khr_push_descriptor) {
1149 // Get the needed push_descriptor limits
1150 VkPhysicalDevicePushDescriptorPropertiesKHR push_descriptor_prop;
1151 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_push_descriptor, &push_descriptor_prop);
1152 phys_dev_props->max_push_descriptors = push_descriptor_prop.maxPushDescriptors;
1153 }
1154
1155 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_descriptor_indexing, &phys_dev_props->descriptor_indexing_props);
1156 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_shading_rate_image, &phys_dev_props->shading_rate_image_props);
1157 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_mesh_shader, &phys_dev_props->mesh_shader_props);
1158 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_inline_uniform_block, &phys_dev_props->inline_uniform_block_props);
1159 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_vertex_attribute_divisor, &phys_dev_props->vtx_attrib_divisor_props);
1160 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_depth_stencil_resolve, &phys_dev_props->depth_stencil_resolve_props);
1161 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_transform_feedback, &phys_dev_props->transform_feedback_props);
1162 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_ray_tracing, &phys_dev_props->ray_tracing_props);
1163 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_texel_buffer_alignment, &phys_dev_props->texel_buffer_alignment_props);
1164 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_fragment_density_map, &phys_dev_props->fragment_density_map_props);
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001165 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_performance_query, &phys_dev_props->performance_query_props);
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00001166 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_timeline_semaphore, &phys_dev_props->timeline_semaphore_props);
locke-lunargd556cc32019-09-17 01:21:23 -06001167 if (state_tracker->device_extensions.vk_nv_cooperative_matrix) {
1168 // Get the needed cooperative_matrix properties
1169 auto cooperative_matrix_props = lvl_init_struct<VkPhysicalDeviceCooperativeMatrixPropertiesNV>();
1170 auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(&cooperative_matrix_props);
1171 instance_dispatch_table.GetPhysicalDeviceProperties2KHR(gpu, &prop2);
1172 state_tracker->phys_dev_ext_props.cooperative_matrix_props = cooperative_matrix_props;
1173
1174 uint32_t numCooperativeMatrixProperties = 0;
1175 instance_dispatch_table.GetPhysicalDeviceCooperativeMatrixPropertiesNV(gpu, &numCooperativeMatrixProperties, NULL);
1176 state_tracker->cooperative_matrix_properties.resize(numCooperativeMatrixProperties,
1177 lvl_init_struct<VkCooperativeMatrixPropertiesNV>());
1178
1179 instance_dispatch_table.GetPhysicalDeviceCooperativeMatrixPropertiesNV(gpu, &numCooperativeMatrixProperties,
1180 state_tracker->cooperative_matrix_properties.data());
1181 }
1182 if (state_tracker->api_version >= VK_API_VERSION_1_1) {
1183 // Get the needed subgroup limits
1184 auto subgroup_prop = lvl_init_struct<VkPhysicalDeviceSubgroupProperties>();
1185 auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(&subgroup_prop);
1186 instance_dispatch_table.GetPhysicalDeviceProperties2(gpu, &prop2);
1187
1188 state_tracker->phys_dev_ext_props.subgroup_props = subgroup_prop;
1189 }
1190
1191 // Store queue family data
1192 if (pCreateInfo->pQueueCreateInfos != nullptr) {
1193 for (uint32_t i = 0; i < pCreateInfo->queueCreateInfoCount; ++i) {
1194 state_tracker->queue_family_index_map.insert(
1195 std::make_pair(pCreateInfo->pQueueCreateInfos[i].queueFamilyIndex, pCreateInfo->pQueueCreateInfos[i].queueCount));
1196 }
1197 }
1198}
1199
1200void ValidationStateTracker::PreCallRecordDestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) {
1201 if (!device) return;
1202
locke-lunargd556cc32019-09-17 01:21:23 -06001203 // Reset all command buffers before destroying them, to unlink object_bindings.
1204 for (auto &commandBuffer : commandBufferMap) {
1205 ResetCommandBufferState(commandBuffer.first);
1206 }
Jeff Bolzadbfa852019-10-04 13:53:30 -05001207 pipelineMap.clear();
1208 renderPassMap.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06001209 commandBufferMap.clear();
1210
1211 // This will also delete all sets in the pool & remove them from setMap
1212 DeleteDescriptorSetPools();
1213 // All sets should be removed
1214 assert(setMap.empty());
1215 descriptorSetLayoutMap.clear();
1216 imageViewMap.clear();
1217 imageMap.clear();
1218 bufferViewMap.clear();
1219 bufferMap.clear();
1220 // Queues persist until device is destroyed
1221 queueMap.clear();
1222}
1223
1224// Loop through bound objects and increment their in_use counts.
1225void ValidationStateTracker::IncrementBoundObjects(CMD_BUFFER_STATE const *cb_node) {
1226 for (auto obj : cb_node->object_bindings) {
1227 auto base_obj = GetStateStructPtrFromObject(obj);
1228 if (base_obj) {
1229 base_obj->in_use.fetch_add(1);
1230 }
1231 }
1232}
1233
1234// Track which resources are in-flight by atomically incrementing their "in_use" count
1235void ValidationStateTracker::IncrementResources(CMD_BUFFER_STATE *cb_node) {
1236 cb_node->submitCount++;
1237 cb_node->in_use.fetch_add(1);
1238
1239 // First Increment for all "generic" objects bound to cmd buffer, followed by special-case objects below
1240 IncrementBoundObjects(cb_node);
1241 // TODO : We should be able to remove the NULL look-up checks from the code below as long as
1242 // all the corresponding cases are verified to cause CB_INVALID state and the CB_INVALID state
1243 // should then be flagged prior to calling this function
1244 for (auto event : cb_node->writeEventsBeforeWait) {
1245 auto event_state = GetEventState(event);
1246 if (event_state) event_state->write_in_use++;
1247 }
1248}
1249
1250// Decrement in-use count for objects bound to command buffer
1251void ValidationStateTracker::DecrementBoundResources(CMD_BUFFER_STATE const *cb_node) {
1252 BASE_NODE *base_obj = nullptr;
1253 for (auto obj : cb_node->object_bindings) {
1254 base_obj = GetStateStructPtrFromObject(obj);
1255 if (base_obj) {
1256 base_obj->in_use.fetch_sub(1);
1257 }
1258 }
1259}
1260
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001261void ValidationStateTracker::RetireWorkOnQueue(QUEUE_STATE *pQueue, uint64_t seq) {
locke-lunargd556cc32019-09-17 01:21:23 -06001262 std::unordered_map<VkQueue, uint64_t> otherQueueSeqs;
1263
1264 // Roll this queue forward, one submission at a time.
1265 while (pQueue->seq < seq) {
1266 auto &submission = pQueue->submissions.front();
1267
1268 for (auto &wait : submission.waitSemaphores) {
1269 auto pSemaphore = GetSemaphoreState(wait.semaphore);
1270 if (pSemaphore) {
1271 pSemaphore->in_use.fetch_sub(1);
1272 }
1273 auto &lastSeq = otherQueueSeqs[wait.queue];
1274 lastSeq = std::max(lastSeq, wait.seq);
1275 }
1276
1277 for (auto &semaphore : submission.signalSemaphores) {
1278 auto pSemaphore = GetSemaphoreState(semaphore);
1279 if (pSemaphore) {
1280 pSemaphore->in_use.fetch_sub(1);
1281 }
1282 }
1283
1284 for (auto &semaphore : submission.externalSemaphores) {
1285 auto pSemaphore = GetSemaphoreState(semaphore);
1286 if (pSemaphore) {
1287 pSemaphore->in_use.fetch_sub(1);
1288 }
1289 }
1290
1291 for (auto cb : submission.cbs) {
1292 auto cb_node = GetCBState(cb);
1293 if (!cb_node) {
1294 continue;
1295 }
1296 // First perform decrement on general case bound objects
1297 DecrementBoundResources(cb_node);
1298 for (auto event : cb_node->writeEventsBeforeWait) {
1299 auto eventNode = eventMap.find(event);
1300 if (eventNode != eventMap.end()) {
1301 eventNode->second.write_in_use--;
1302 }
1303 }
Jeff Bolz310775c2019-10-09 00:46:33 -05001304 QueryMap localQueryToStateMap;
1305 for (auto &function : cb_node->queryUpdates) {
1306 function(nullptr, /*do_validate*/ false, &localQueryToStateMap);
1307 }
1308
1309 for (auto queryStatePair : localQueryToStateMap) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001310 if (queryStatePair.second == QUERYSTATE_ENDED) {
1311 queryToStateMap[queryStatePair.first] = QUERYSTATE_AVAILABLE;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001312
1313 const QUERY_POOL_STATE *qp_state = GetQueryPoolState(queryStatePair.first.pool);
1314 if (qp_state->createInfo.queryType == VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR)
1315 queryPassToStateMap[QueryObjectPass(queryStatePair.first, submission.perf_submit_pass)] =
1316 QUERYSTATE_AVAILABLE;
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001317 }
locke-lunargd556cc32019-09-17 01:21:23 -06001318 }
locke-lunargd556cc32019-09-17 01:21:23 -06001319 cb_node->in_use.fetch_sub(1);
1320 }
1321
1322 auto pFence = GetFenceState(submission.fence);
1323 if (pFence && pFence->scope == kSyncScopeInternal) {
1324 pFence->state = FENCE_RETIRED;
1325 }
1326
1327 pQueue->submissions.pop_front();
1328 pQueue->seq++;
1329 }
1330
1331 // Roll other queues forward to the highest seq we saw a wait for
1332 for (auto qs : otherQueueSeqs) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001333 RetireWorkOnQueue(GetQueueState(qs.first), qs.second);
locke-lunargd556cc32019-09-17 01:21:23 -06001334 }
1335}
1336
1337// Submit a fence to a queue, delimiting previous fences and previous untracked
1338// work by it.
1339static void SubmitFence(QUEUE_STATE *pQueue, FENCE_STATE *pFence, uint64_t submitCount) {
1340 pFence->state = FENCE_INFLIGHT;
1341 pFence->signaler.first = pQueue->queue;
1342 pFence->signaler.second = pQueue->seq + pQueue->submissions.size() + submitCount;
1343}
1344
1345void ValidationStateTracker::PostCallRecordQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits,
1346 VkFence fence, VkResult result) {
1347 uint64_t early_retire_seq = 0;
1348 auto pQueue = GetQueueState(queue);
1349 auto pFence = GetFenceState(fence);
1350
1351 if (pFence) {
1352 if (pFence->scope == kSyncScopeInternal) {
1353 // Mark fence in use
1354 SubmitFence(pQueue, pFence, std::max(1u, submitCount));
1355 if (!submitCount) {
1356 // If no submissions, but just dropping a fence on the end of the queue,
1357 // record an empty submission with just the fence, so we can determine
1358 // its completion.
1359 pQueue->submissions.emplace_back(std::vector<VkCommandBuffer>(), std::vector<SEMAPHORE_WAIT>(),
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001360 std::vector<VkSemaphore>(), std::vector<VkSemaphore>(), fence, 0);
locke-lunargd556cc32019-09-17 01:21:23 -06001361 }
1362 } else {
1363 // Retire work up until this fence early, we will not see the wait that corresponds to this signal
1364 early_retire_seq = pQueue->seq + pQueue->submissions.size();
1365 }
1366 }
1367
1368 // Now process each individual submit
1369 for (uint32_t submit_idx = 0; submit_idx < submitCount; submit_idx++) {
1370 std::vector<VkCommandBuffer> cbs;
1371 const VkSubmitInfo *submit = &pSubmits[submit_idx];
1372 vector<SEMAPHORE_WAIT> semaphore_waits;
1373 vector<VkSemaphore> semaphore_signals;
1374 vector<VkSemaphore> semaphore_externals;
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00001375 auto *timeline_semaphore_submit = lvl_find_in_chain<VkTimelineSemaphoreSubmitInfoKHR>(submit->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001376 for (uint32_t i = 0; i < submit->waitSemaphoreCount; ++i) {
1377 VkSemaphore semaphore = submit->pWaitSemaphores[i];
1378 auto pSemaphore = GetSemaphoreState(semaphore);
1379 if (pSemaphore) {
1380 if (pSemaphore->scope == kSyncScopeInternal) {
1381 if (pSemaphore->signaler.first != VK_NULL_HANDLE) {
1382 semaphore_waits.push_back({semaphore, pSemaphore->signaler.first, pSemaphore->signaler.second});
1383 pSemaphore->in_use.fetch_add(1);
1384 }
1385 pSemaphore->signaler.first = VK_NULL_HANDLE;
1386 pSemaphore->signaled = false;
1387 } else {
1388 semaphore_externals.push_back(semaphore);
1389 pSemaphore->in_use.fetch_add(1);
1390 if (pSemaphore->scope == kSyncScopeExternalTemporary) {
1391 pSemaphore->scope = kSyncScopeInternal;
1392 }
1393 }
1394 }
1395 }
1396 for (uint32_t i = 0; i < submit->signalSemaphoreCount; ++i) {
1397 VkSemaphore semaphore = submit->pSignalSemaphores[i];
1398 auto pSemaphore = GetSemaphoreState(semaphore);
1399 if (pSemaphore) {
1400 if (pSemaphore->scope == kSyncScopeInternal) {
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00001401 if (pSemaphore->type == VK_SEMAPHORE_TYPE_BINARY_KHR) {
1402 pSemaphore->signaler.first = queue;
1403 pSemaphore->signaler.second = pQueue->seq + pQueue->submissions.size() + 1;
1404 pSemaphore->signaled = true;
1405 } else {
1406 pSemaphore->payload = timeline_semaphore_submit->pSignalSemaphoreValues[i];
1407 }
locke-lunargd556cc32019-09-17 01:21:23 -06001408 pSemaphore->in_use.fetch_add(1);
1409 semaphore_signals.push_back(semaphore);
1410 } else {
1411 // Retire work up until this submit early, we will not see the wait that corresponds to this signal
1412 early_retire_seq = std::max(early_retire_seq, pQueue->seq + pQueue->submissions.size() + 1);
1413 }
1414 }
1415 }
1416 for (uint32_t i = 0; i < submit->commandBufferCount; i++) {
1417 auto cb_node = GetCBState(submit->pCommandBuffers[i]);
1418 if (cb_node) {
1419 cbs.push_back(submit->pCommandBuffers[i]);
1420 for (auto secondaryCmdBuffer : cb_node->linkedCommandBuffers) {
1421 cbs.push_back(secondaryCmdBuffer->commandBuffer);
1422 IncrementResources(secondaryCmdBuffer);
1423 }
1424 IncrementResources(cb_node);
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001425
1426 QueryMap localQueryToStateMap;
1427 for (auto &function : cb_node->queryUpdates) {
1428 function(nullptr, /*do_validate*/ false, &localQueryToStateMap);
1429 }
1430
1431 for (auto queryStatePair : localQueryToStateMap) {
1432 queryToStateMap[queryStatePair.first] = queryStatePair.second;
1433 }
1434
1435 EventToStageMap localEventToStageMap;
1436 for (auto &function : cb_node->eventUpdates) {
1437 function(nullptr, /*do_validate*/ false, &localEventToStageMap);
1438 }
1439
1440 for (auto eventStagePair : localEventToStageMap) {
1441 eventMap[eventStagePair.first].stageMask = eventStagePair.second;
1442 }
locke-lunargd556cc32019-09-17 01:21:23 -06001443 }
1444 }
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001445
1446 const auto perf_submit = lvl_find_in_chain<VkPerformanceQuerySubmitInfoKHR>(submit->pNext);
1447
locke-lunargd556cc32019-09-17 01:21:23 -06001448 pQueue->submissions.emplace_back(cbs, semaphore_waits, semaphore_signals, semaphore_externals,
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001449 submit_idx == submitCount - 1 ? fence : (VkFence)VK_NULL_HANDLE,
1450 perf_submit ? perf_submit->counterPassIndex : 0);
locke-lunargd556cc32019-09-17 01:21:23 -06001451 }
1452
1453 if (early_retire_seq) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001454 RetireWorkOnQueue(pQueue, early_retire_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06001455 }
1456}
1457
1458void ValidationStateTracker::PostCallRecordAllocateMemory(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo,
1459 const VkAllocationCallbacks *pAllocator, VkDeviceMemory *pMemory,
1460 VkResult result) {
1461 if (VK_SUCCESS == result) {
1462 AddMemObjInfo(device, *pMemory, pAllocateInfo);
1463 }
1464 return;
1465}
1466
1467void ValidationStateTracker::PreCallRecordFreeMemory(VkDevice device, VkDeviceMemory mem, const VkAllocationCallbacks *pAllocator) {
1468 if (!mem) return;
1469 DEVICE_MEMORY_STATE *mem_info = GetDevMemState(mem);
1470 const VulkanTypedHandle obj_struct(mem, kVulkanObjectTypeDeviceMemory);
1471
1472 // Clear mem binding for any bound objects
1473 for (const auto &obj : mem_info->obj_bindings) {
1474 BINDABLE *bindable_state = nullptr;
1475 switch (obj.type) {
1476 case kVulkanObjectTypeImage:
1477 bindable_state = GetImageState(obj.Cast<VkImage>());
1478 break;
1479 case kVulkanObjectTypeBuffer:
1480 bindable_state = GetBufferState(obj.Cast<VkBuffer>());
1481 break;
1482 case kVulkanObjectTypeAccelerationStructureNV:
1483 bindable_state = GetAccelerationStructureState(obj.Cast<VkAccelerationStructureNV>());
1484 break;
1485
1486 default:
1487 // Should only have acceleration structure, buffer, or image objects bound to memory
1488 assert(0);
1489 }
1490
1491 if (bindable_state) {
1492 bindable_state->binding.mem = MEMORY_UNBOUND;
1493 bindable_state->UpdateBoundMemorySet();
1494 }
1495 }
1496 // Any bound cmd buffers are now invalid
1497 InvalidateCommandBuffers(mem_info->cb_bindings, obj_struct);
1498 RemoveAliasingImages(mem_info->bound_images);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05001499 mem_info->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06001500 memObjMap.erase(mem);
1501}
1502
1503void ValidationStateTracker::PostCallRecordQueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo *pBindInfo,
1504 VkFence fence, VkResult result) {
1505 if (result != VK_SUCCESS) return;
1506 uint64_t early_retire_seq = 0;
1507 auto pFence = GetFenceState(fence);
1508 auto pQueue = GetQueueState(queue);
1509
1510 if (pFence) {
1511 if (pFence->scope == kSyncScopeInternal) {
1512 SubmitFence(pQueue, pFence, std::max(1u, bindInfoCount));
1513 if (!bindInfoCount) {
1514 // No work to do, just dropping a fence in the queue by itself.
1515 pQueue->submissions.emplace_back(std::vector<VkCommandBuffer>(), std::vector<SEMAPHORE_WAIT>(),
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001516 std::vector<VkSemaphore>(), std::vector<VkSemaphore>(), fence, 0);
locke-lunargd556cc32019-09-17 01:21:23 -06001517 }
1518 } else {
1519 // Retire work up until this fence early, we will not see the wait that corresponds to this signal
1520 early_retire_seq = pQueue->seq + pQueue->submissions.size();
1521 }
1522 }
1523
1524 for (uint32_t bindIdx = 0; bindIdx < bindInfoCount; ++bindIdx) {
1525 const VkBindSparseInfo &bindInfo = pBindInfo[bindIdx];
1526 // Track objects tied to memory
1527 for (uint32_t j = 0; j < bindInfo.bufferBindCount; j++) {
1528 for (uint32_t k = 0; k < bindInfo.pBufferBinds[j].bindCount; k++) {
1529 auto sparse_binding = bindInfo.pBufferBinds[j].pBinds[k];
1530 SetSparseMemBinding({sparse_binding.memory, sparse_binding.memoryOffset, sparse_binding.size},
1531 VulkanTypedHandle(bindInfo.pBufferBinds[j].buffer, kVulkanObjectTypeBuffer));
1532 }
1533 }
1534 for (uint32_t j = 0; j < bindInfo.imageOpaqueBindCount; j++) {
1535 for (uint32_t k = 0; k < bindInfo.pImageOpaqueBinds[j].bindCount; k++) {
1536 auto sparse_binding = bindInfo.pImageOpaqueBinds[j].pBinds[k];
1537 SetSparseMemBinding({sparse_binding.memory, sparse_binding.memoryOffset, sparse_binding.size},
1538 VulkanTypedHandle(bindInfo.pImageOpaqueBinds[j].image, kVulkanObjectTypeImage));
1539 }
1540 }
1541 for (uint32_t j = 0; j < bindInfo.imageBindCount; j++) {
1542 for (uint32_t k = 0; k < bindInfo.pImageBinds[j].bindCount; k++) {
1543 auto sparse_binding = bindInfo.pImageBinds[j].pBinds[k];
1544 // TODO: This size is broken for non-opaque bindings, need to update to comprehend full sparse binding data
1545 VkDeviceSize size = sparse_binding.extent.depth * sparse_binding.extent.height * sparse_binding.extent.width * 4;
1546 SetSparseMemBinding({sparse_binding.memory, sparse_binding.memoryOffset, size},
1547 VulkanTypedHandle(bindInfo.pImageBinds[j].image, kVulkanObjectTypeImage));
1548 }
1549 }
1550
1551 std::vector<SEMAPHORE_WAIT> semaphore_waits;
1552 std::vector<VkSemaphore> semaphore_signals;
1553 std::vector<VkSemaphore> semaphore_externals;
1554 for (uint32_t i = 0; i < bindInfo.waitSemaphoreCount; ++i) {
1555 VkSemaphore semaphore = bindInfo.pWaitSemaphores[i];
1556 auto pSemaphore = GetSemaphoreState(semaphore);
1557 if (pSemaphore) {
1558 if (pSemaphore->scope == kSyncScopeInternal) {
1559 if (pSemaphore->signaler.first != VK_NULL_HANDLE) {
1560 semaphore_waits.push_back({semaphore, pSemaphore->signaler.first, pSemaphore->signaler.second});
1561 pSemaphore->in_use.fetch_add(1);
1562 }
1563 pSemaphore->signaler.first = VK_NULL_HANDLE;
1564 pSemaphore->signaled = false;
1565 } else {
1566 semaphore_externals.push_back(semaphore);
1567 pSemaphore->in_use.fetch_add(1);
1568 if (pSemaphore->scope == kSyncScopeExternalTemporary) {
1569 pSemaphore->scope = kSyncScopeInternal;
1570 }
1571 }
1572 }
1573 }
1574 for (uint32_t i = 0; i < bindInfo.signalSemaphoreCount; ++i) {
1575 VkSemaphore semaphore = bindInfo.pSignalSemaphores[i];
1576 auto pSemaphore = GetSemaphoreState(semaphore);
1577 if (pSemaphore) {
1578 if (pSemaphore->scope == kSyncScopeInternal) {
1579 pSemaphore->signaler.first = queue;
1580 pSemaphore->signaler.second = pQueue->seq + pQueue->submissions.size() + 1;
1581 pSemaphore->signaled = true;
1582 pSemaphore->in_use.fetch_add(1);
1583 semaphore_signals.push_back(semaphore);
1584 } else {
1585 // Retire work up until this submit early, we will not see the wait that corresponds to this signal
1586 early_retire_seq = std::max(early_retire_seq, pQueue->seq + pQueue->submissions.size() + 1);
1587 }
1588 }
1589 }
1590
1591 pQueue->submissions.emplace_back(std::vector<VkCommandBuffer>(), semaphore_waits, semaphore_signals, semaphore_externals,
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001592 bindIdx == bindInfoCount - 1 ? fence : (VkFence)VK_NULL_HANDLE, 0);
locke-lunargd556cc32019-09-17 01:21:23 -06001593 }
1594
1595 if (early_retire_seq) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001596 RetireWorkOnQueue(pQueue, early_retire_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06001597 }
1598}
1599
1600void ValidationStateTracker::PostCallRecordCreateSemaphore(VkDevice device, const VkSemaphoreCreateInfo *pCreateInfo,
1601 const VkAllocationCallbacks *pAllocator, VkSemaphore *pSemaphore,
1602 VkResult result) {
1603 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05001604 auto semaphore_state = std::make_shared<SEMAPHORE_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06001605 semaphore_state->signaler.first = VK_NULL_HANDLE;
1606 semaphore_state->signaler.second = 0;
1607 semaphore_state->signaled = false;
1608 semaphore_state->scope = kSyncScopeInternal;
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00001609 semaphore_state->type = VK_SEMAPHORE_TYPE_BINARY_KHR;
1610 semaphore_state->payload = 0;
1611 auto semaphore_type_create_info = lvl_find_in_chain<VkSemaphoreTypeCreateInfoKHR>(pCreateInfo->pNext);
1612 if (semaphore_type_create_info) {
1613 semaphore_state->type = semaphore_type_create_info->semaphoreType;
1614 semaphore_state->payload = semaphore_type_create_info->initialValue;
1615 }
locke-lunargd556cc32019-09-17 01:21:23 -06001616 semaphoreMap[*pSemaphore] = std::move(semaphore_state);
1617}
1618
1619void ValidationStateTracker::RecordImportSemaphoreState(VkSemaphore semaphore, VkExternalSemaphoreHandleTypeFlagBitsKHR handle_type,
1620 VkSemaphoreImportFlagsKHR flags) {
1621 SEMAPHORE_STATE *sema_node = GetSemaphoreState(semaphore);
1622 if (sema_node && sema_node->scope != kSyncScopeExternalPermanent) {
1623 if ((handle_type == VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR || flags & VK_SEMAPHORE_IMPORT_TEMPORARY_BIT_KHR) &&
1624 sema_node->scope == kSyncScopeInternal) {
1625 sema_node->scope = kSyncScopeExternalTemporary;
1626 } else {
1627 sema_node->scope = kSyncScopeExternalPermanent;
1628 }
1629 }
1630}
1631
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00001632void ValidationStateTracker::PostCallRecordSignalSemaphoreKHR(VkDevice device, const VkSemaphoreSignalInfoKHR *pSignalInfo,
1633 VkResult result) {
1634 auto *pSemaphore = GetSemaphoreState(pSignalInfo->semaphore);
1635 pSemaphore->payload = pSignalInfo->value;
1636}
1637
locke-lunargd556cc32019-09-17 01:21:23 -06001638void ValidationStateTracker::RecordMappedMemory(VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size, void **ppData) {
1639 auto mem_info = GetDevMemState(mem);
1640 if (mem_info) {
1641 mem_info->mapped_range.offset = offset;
1642 mem_info->mapped_range.size = size;
1643 mem_info->p_driver_data = *ppData;
1644 }
1645}
1646
1647void ValidationStateTracker::RetireFence(VkFence fence) {
1648 auto pFence = GetFenceState(fence);
1649 if (pFence && pFence->scope == kSyncScopeInternal) {
1650 if (pFence->signaler.first != VK_NULL_HANDLE) {
1651 // Fence signaller is a queue -- use this as proof that prior operations on that queue have completed.
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001652 RetireWorkOnQueue(GetQueueState(pFence->signaler.first), pFence->signaler.second);
locke-lunargd556cc32019-09-17 01:21:23 -06001653 } else {
1654 // Fence signaller is the WSI. We're not tracking what the WSI op actually /was/ in CV yet, but we need to mark
1655 // the fence as retired.
1656 pFence->state = FENCE_RETIRED;
1657 }
1658 }
1659}
1660
1661void ValidationStateTracker::PostCallRecordWaitForFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences,
1662 VkBool32 waitAll, uint64_t timeout, VkResult result) {
1663 if (VK_SUCCESS != result) return;
1664
1665 // When we know that all fences are complete we can clean/remove their CBs
1666 if ((VK_TRUE == waitAll) || (1 == fenceCount)) {
1667 for (uint32_t i = 0; i < fenceCount; i++) {
1668 RetireFence(pFences[i]);
1669 }
1670 }
1671 // NOTE : Alternate case not handled here is when some fences have completed. In
1672 // this case for app to guarantee which fences completed it will have to call
1673 // vkGetFenceStatus() at which point we'll clean/remove their CBs if complete.
1674}
1675
1676void ValidationStateTracker::PostCallRecordGetFenceStatus(VkDevice device, VkFence fence, VkResult result) {
1677 if (VK_SUCCESS != result) return;
1678 RetireFence(fence);
1679}
1680
1681void ValidationStateTracker::RecordGetDeviceQueueState(uint32_t queue_family_index, VkQueue queue) {
1682 // Add queue to tracking set only if it is new
1683 auto queue_is_new = queues.emplace(queue);
1684 if (queue_is_new.second == true) {
1685 QUEUE_STATE *queue_state = &queueMap[queue];
1686 queue_state->queue = queue;
1687 queue_state->queueFamilyIndex = queue_family_index;
1688 queue_state->seq = 0;
1689 }
1690}
1691
1692void ValidationStateTracker::PostCallRecordGetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex,
1693 VkQueue *pQueue) {
1694 RecordGetDeviceQueueState(queueFamilyIndex, *pQueue);
1695}
1696
1697void ValidationStateTracker::PostCallRecordGetDeviceQueue2(VkDevice device, const VkDeviceQueueInfo2 *pQueueInfo, VkQueue *pQueue) {
1698 RecordGetDeviceQueueState(pQueueInfo->queueFamilyIndex, *pQueue);
1699}
1700
1701void ValidationStateTracker::PostCallRecordQueueWaitIdle(VkQueue queue, VkResult result) {
1702 if (VK_SUCCESS != result) return;
1703 QUEUE_STATE *queue_state = GetQueueState(queue);
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001704 RetireWorkOnQueue(queue_state, queue_state->seq + queue_state->submissions.size());
locke-lunargd556cc32019-09-17 01:21:23 -06001705}
1706
1707void ValidationStateTracker::PostCallRecordDeviceWaitIdle(VkDevice device, VkResult result) {
1708 if (VK_SUCCESS != result) return;
1709 for (auto &queue : queueMap) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001710 RetireWorkOnQueue(&queue.second, queue.second.seq + queue.second.submissions.size());
locke-lunargd556cc32019-09-17 01:21:23 -06001711 }
1712}
1713
1714void ValidationStateTracker::PreCallRecordDestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks *pAllocator) {
1715 if (!fence) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05001716 auto fence_state = GetFenceState(fence);
1717 fence_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06001718 fenceMap.erase(fence);
1719}
1720
1721void ValidationStateTracker::PreCallRecordDestroySemaphore(VkDevice device, VkSemaphore semaphore,
1722 const VkAllocationCallbacks *pAllocator) {
1723 if (!semaphore) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05001724 auto semaphore_state = GetSemaphoreState(semaphore);
1725 semaphore_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06001726 semaphoreMap.erase(semaphore);
1727}
1728
1729void ValidationStateTracker::PreCallRecordDestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks *pAllocator) {
1730 if (!event) return;
1731 EVENT_STATE *event_state = GetEventState(event);
1732 const VulkanTypedHandle obj_struct(event, kVulkanObjectTypeEvent);
1733 InvalidateCommandBuffers(event_state->cb_bindings, obj_struct);
1734 eventMap.erase(event);
1735}
1736
1737void ValidationStateTracker::PreCallRecordDestroyQueryPool(VkDevice device, VkQueryPool queryPool,
1738 const VkAllocationCallbacks *pAllocator) {
1739 if (!queryPool) return;
1740 QUERY_POOL_STATE *qp_state = GetQueryPoolState(queryPool);
1741 const VulkanTypedHandle obj_struct(queryPool, kVulkanObjectTypeQueryPool);
1742 InvalidateCommandBuffers(qp_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05001743 qp_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06001744 queryPoolMap.erase(queryPool);
1745}
1746
1747// Object with given handle is being bound to memory w/ given mem_info struct.
1748// Track the newly bound memory range with given memoryOffset
1749// Also scan any previous ranges, track aliased ranges with new range, and flag an error if a linear
1750// and non-linear range incorrectly overlap.
1751// Return true if an error is flagged and the user callback returns "true", otherwise false
1752// is_image indicates an image object, otherwise handle is for a buffer
1753// is_linear indicates a buffer or linear image
1754void ValidationStateTracker::InsertMemoryRange(const VulkanTypedHandle &typed_handle, DEVICE_MEMORY_STATE *mem_info,
1755 VkDeviceSize memoryOffset, VkMemoryRequirements memRequirements, bool is_linear) {
1756 if (typed_handle.type == kVulkanObjectTypeImage) {
1757 mem_info->bound_images.insert(typed_handle.Cast<VkImage>());
1758 } else if (typed_handle.type == kVulkanObjectTypeBuffer) {
1759 mem_info->bound_buffers.insert(typed_handle.handle);
1760 } else if (typed_handle.type == kVulkanObjectTypeAccelerationStructureNV) {
1761 mem_info->bound_acceleration_structures.insert(typed_handle.handle);
1762 } else {
1763 // Unsupported object type
1764 assert(false);
1765 }
1766}
1767
1768void ValidationStateTracker::InsertImageMemoryRange(VkImage image, DEVICE_MEMORY_STATE *mem_info, VkDeviceSize mem_offset,
1769 VkMemoryRequirements mem_reqs, bool is_linear) {
1770 InsertMemoryRange(VulkanTypedHandle(image, kVulkanObjectTypeImage), mem_info, mem_offset, mem_reqs, is_linear);
1771}
1772
1773void ValidationStateTracker::InsertBufferMemoryRange(VkBuffer buffer, DEVICE_MEMORY_STATE *mem_info, VkDeviceSize mem_offset,
1774 const VkMemoryRequirements &mem_reqs) {
1775 InsertMemoryRange(VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer), mem_info, mem_offset, mem_reqs, true);
1776}
1777
1778void ValidationStateTracker::InsertAccelerationStructureMemoryRange(VkAccelerationStructureNV as, DEVICE_MEMORY_STATE *mem_info,
1779 VkDeviceSize mem_offset, const VkMemoryRequirements &mem_reqs) {
1780 InsertMemoryRange(VulkanTypedHandle(as, kVulkanObjectTypeAccelerationStructureNV), mem_info, mem_offset, mem_reqs, true);
1781}
1782
1783// This function will remove the handle-to-index mapping from the appropriate map.
1784static void RemoveMemoryRange(const VulkanTypedHandle &typed_handle, DEVICE_MEMORY_STATE *mem_info) {
1785 if (typed_handle.type == kVulkanObjectTypeImage) {
1786 mem_info->bound_images.erase(typed_handle.Cast<VkImage>());
1787 } else if (typed_handle.type == kVulkanObjectTypeBuffer) {
1788 mem_info->bound_buffers.erase(typed_handle.handle);
1789 } else if (typed_handle.type == kVulkanObjectTypeAccelerationStructureNV) {
1790 mem_info->bound_acceleration_structures.erase(typed_handle.handle);
1791 } else {
1792 // Unsupported object type
1793 assert(false);
1794 }
1795}
1796
1797void ValidationStateTracker::RemoveBufferMemoryRange(VkBuffer buffer, DEVICE_MEMORY_STATE *mem_info) {
1798 RemoveMemoryRange(VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer), mem_info);
1799}
1800
1801void ValidationStateTracker::RemoveImageMemoryRange(VkImage image, DEVICE_MEMORY_STATE *mem_info) {
1802 RemoveMemoryRange(VulkanTypedHandle(image, kVulkanObjectTypeImage), mem_info);
1803}
1804
1805void ValidationStateTracker::RemoveAccelerationStructureMemoryRange(VkAccelerationStructureNV as, DEVICE_MEMORY_STATE *mem_info) {
1806 RemoveMemoryRange(VulkanTypedHandle(as, kVulkanObjectTypeAccelerationStructureNV), mem_info);
1807}
1808
1809void ValidationStateTracker::UpdateBindBufferMemoryState(VkBuffer buffer, VkDeviceMemory mem, VkDeviceSize memoryOffset) {
1810 BUFFER_STATE *buffer_state = GetBufferState(buffer);
1811 if (buffer_state) {
1812 // Track bound memory range information
1813 auto mem_info = GetDevMemState(mem);
1814 if (mem_info) {
1815 InsertBufferMemoryRange(buffer, mem_info, memoryOffset, buffer_state->requirements);
1816 }
1817 // Track objects tied to memory
1818 SetMemBinding(mem, buffer_state, memoryOffset, VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer));
1819 }
1820}
1821
1822void ValidationStateTracker::PostCallRecordBindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory mem,
1823 VkDeviceSize memoryOffset, VkResult result) {
1824 if (VK_SUCCESS != result) return;
1825 UpdateBindBufferMemoryState(buffer, mem, memoryOffset);
1826}
1827
1828void ValidationStateTracker::PostCallRecordBindBufferMemory2(VkDevice device, uint32_t bindInfoCount,
1829 const VkBindBufferMemoryInfoKHR *pBindInfos, VkResult result) {
1830 for (uint32_t i = 0; i < bindInfoCount; i++) {
1831 UpdateBindBufferMemoryState(pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset);
1832 }
1833}
1834
1835void ValidationStateTracker::PostCallRecordBindBufferMemory2KHR(VkDevice device, uint32_t bindInfoCount,
1836 const VkBindBufferMemoryInfoKHR *pBindInfos, VkResult result) {
1837 for (uint32_t i = 0; i < bindInfoCount; i++) {
1838 UpdateBindBufferMemoryState(pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset);
1839 }
1840}
1841
1842void ValidationStateTracker::RecordGetBufferMemoryRequirementsState(VkBuffer buffer, VkMemoryRequirements *pMemoryRequirements) {
1843 BUFFER_STATE *buffer_state = GetBufferState(buffer);
1844 if (buffer_state) {
1845 buffer_state->requirements = *pMemoryRequirements;
1846 buffer_state->memory_requirements_checked = true;
1847 }
1848}
1849
1850void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements(VkDevice device, VkBuffer buffer,
1851 VkMemoryRequirements *pMemoryRequirements) {
1852 RecordGetBufferMemoryRequirementsState(buffer, pMemoryRequirements);
1853}
1854
1855void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements2(VkDevice device,
1856 const VkBufferMemoryRequirementsInfo2KHR *pInfo,
1857 VkMemoryRequirements2KHR *pMemoryRequirements) {
1858 RecordGetBufferMemoryRequirementsState(pInfo->buffer, &pMemoryRequirements->memoryRequirements);
1859}
1860
1861void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements2KHR(VkDevice device,
1862 const VkBufferMemoryRequirementsInfo2KHR *pInfo,
1863 VkMemoryRequirements2KHR *pMemoryRequirements) {
1864 RecordGetBufferMemoryRequirementsState(pInfo->buffer, &pMemoryRequirements->memoryRequirements);
1865}
1866
1867void ValidationStateTracker::RecordGetImageMemoryRequiementsState(VkImage image, VkMemoryRequirements *pMemoryRequirements) {
1868 IMAGE_STATE *image_state = GetImageState(image);
1869 if (image_state) {
1870 image_state->requirements = *pMemoryRequirements;
1871 image_state->memory_requirements_checked = true;
1872 }
1873}
1874
1875void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements(VkDevice device, VkImage image,
1876 VkMemoryRequirements *pMemoryRequirements) {
1877 RecordGetImageMemoryRequiementsState(image, pMemoryRequirements);
1878}
1879
1880void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements2(VkDevice device, const VkImageMemoryRequirementsInfo2 *pInfo,
1881 VkMemoryRequirements2 *pMemoryRequirements) {
1882 RecordGetImageMemoryRequiementsState(pInfo->image, &pMemoryRequirements->memoryRequirements);
1883}
1884
1885void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements2KHR(VkDevice device,
1886 const VkImageMemoryRequirementsInfo2 *pInfo,
1887 VkMemoryRequirements2 *pMemoryRequirements) {
1888 RecordGetImageMemoryRequiementsState(pInfo->image, &pMemoryRequirements->memoryRequirements);
1889}
1890
1891static void RecordGetImageSparseMemoryRequirementsState(IMAGE_STATE *image_state,
1892 VkSparseImageMemoryRequirements *sparse_image_memory_requirements) {
1893 image_state->sparse_requirements.emplace_back(*sparse_image_memory_requirements);
1894 if (sparse_image_memory_requirements->formatProperties.aspectMask & VK_IMAGE_ASPECT_METADATA_BIT) {
1895 image_state->sparse_metadata_required = true;
1896 }
1897}
1898
1899void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements(
1900 VkDevice device, VkImage image, uint32_t *pSparseMemoryRequirementCount,
1901 VkSparseImageMemoryRequirements *pSparseMemoryRequirements) {
1902 auto image_state = GetImageState(image);
1903 image_state->get_sparse_reqs_called = true;
1904 if (!pSparseMemoryRequirements) return;
1905 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
1906 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i]);
1907 }
1908}
1909
1910void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements2(
1911 VkDevice device, const VkImageSparseMemoryRequirementsInfo2KHR *pInfo, uint32_t *pSparseMemoryRequirementCount,
1912 VkSparseImageMemoryRequirements2KHR *pSparseMemoryRequirements) {
1913 auto image_state = GetImageState(pInfo->image);
1914 image_state->get_sparse_reqs_called = true;
1915 if (!pSparseMemoryRequirements) return;
1916 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
1917 assert(!pSparseMemoryRequirements[i].pNext); // TODO: If an extension is ever added here we need to handle it
1918 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i].memoryRequirements);
1919 }
1920}
1921
1922void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements2KHR(
1923 VkDevice device, const VkImageSparseMemoryRequirementsInfo2KHR *pInfo, uint32_t *pSparseMemoryRequirementCount,
1924 VkSparseImageMemoryRequirements2KHR *pSparseMemoryRequirements) {
1925 auto image_state = GetImageState(pInfo->image);
1926 image_state->get_sparse_reqs_called = true;
1927 if (!pSparseMemoryRequirements) return;
1928 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
1929 assert(!pSparseMemoryRequirements[i].pNext); // TODO: If an extension is ever added here we need to handle it
1930 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i].memoryRequirements);
1931 }
1932}
1933
1934void ValidationStateTracker::PreCallRecordDestroyShaderModule(VkDevice device, VkShaderModule shaderModule,
1935 const VkAllocationCallbacks *pAllocator) {
1936 if (!shaderModule) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05001937 auto shader_module_state = GetShaderModuleState(shaderModule);
1938 shader_module_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06001939 shaderModuleMap.erase(shaderModule);
1940}
1941
1942void ValidationStateTracker::PreCallRecordDestroyPipeline(VkDevice device, VkPipeline pipeline,
1943 const VkAllocationCallbacks *pAllocator) {
1944 if (!pipeline) return;
1945 PIPELINE_STATE *pipeline_state = GetPipelineState(pipeline);
1946 const VulkanTypedHandle obj_struct(pipeline, kVulkanObjectTypePipeline);
1947 // Any bound cmd buffers are now invalid
1948 InvalidateCommandBuffers(pipeline_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05001949 pipeline_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06001950 pipelineMap.erase(pipeline);
1951}
1952
1953void ValidationStateTracker::PreCallRecordDestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout,
1954 const VkAllocationCallbacks *pAllocator) {
1955 if (!pipelineLayout) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05001956 auto pipeline_layout_state = GetPipelineLayout(pipelineLayout);
1957 pipeline_layout_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06001958 pipelineLayoutMap.erase(pipelineLayout);
1959}
1960
1961void ValidationStateTracker::PreCallRecordDestroySampler(VkDevice device, VkSampler sampler,
1962 const VkAllocationCallbacks *pAllocator) {
1963 if (!sampler) return;
1964 SAMPLER_STATE *sampler_state = GetSamplerState(sampler);
1965 const VulkanTypedHandle obj_struct(sampler, kVulkanObjectTypeSampler);
1966 // Any bound cmd buffers are now invalid
1967 if (sampler_state) {
1968 InvalidateCommandBuffers(sampler_state->cb_bindings, obj_struct);
1969 }
Jeff Bolze7fc67b2019-10-04 12:29:31 -05001970 sampler_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06001971 samplerMap.erase(sampler);
1972}
1973
1974void ValidationStateTracker::PreCallRecordDestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout,
1975 const VkAllocationCallbacks *pAllocator) {
1976 if (!descriptorSetLayout) return;
1977 auto layout_it = descriptorSetLayoutMap.find(descriptorSetLayout);
1978 if (layout_it != descriptorSetLayoutMap.end()) {
Jeff Bolz6ae39612019-10-11 20:57:36 -05001979 layout_it->second.get()->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06001980 descriptorSetLayoutMap.erase(layout_it);
1981 }
1982}
1983
1984void ValidationStateTracker::PreCallRecordDestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
1985 const VkAllocationCallbacks *pAllocator) {
1986 if (!descriptorPool) return;
1987 DESCRIPTOR_POOL_STATE *desc_pool_state = GetDescriptorPoolState(descriptorPool);
1988 const VulkanTypedHandle obj_struct(descriptorPool, kVulkanObjectTypeDescriptorPool);
1989 if (desc_pool_state) {
1990 // Any bound cmd buffers are now invalid
1991 InvalidateCommandBuffers(desc_pool_state->cb_bindings, obj_struct);
1992 // Free sets that were in this pool
1993 for (auto ds : desc_pool_state->sets) {
1994 FreeDescriptorSet(ds);
1995 }
Jeff Bolze7fc67b2019-10-04 12:29:31 -05001996 desc_pool_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06001997 descriptorPoolMap.erase(descriptorPool);
1998 }
1999}
2000
2001// Free all command buffers in given list, removing all references/links to them using ResetCommandBufferState
2002void ValidationStateTracker::FreeCommandBufferStates(COMMAND_POOL_STATE *pool_state, const uint32_t command_buffer_count,
2003 const VkCommandBuffer *command_buffers) {
2004 for (uint32_t i = 0; i < command_buffer_count; i++) {
2005 auto cb_state = GetCBState(command_buffers[i]);
2006 // Remove references to command buffer's state and delete
2007 if (cb_state) {
2008 // reset prior to delete, removing various references to it.
2009 // TODO: fix this, it's insane.
2010 ResetCommandBufferState(cb_state->commandBuffer);
2011 // Remove the cb_state's references from COMMAND_POOL_STATEs
2012 pool_state->commandBuffers.erase(command_buffers[i]);
2013 // Remove the cb debug labels
2014 EraseCmdDebugUtilsLabel(report_data, cb_state->commandBuffer);
2015 // Remove CBState from CB map
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002016 cb_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002017 commandBufferMap.erase(cb_state->commandBuffer);
2018 }
2019 }
2020}
2021
2022void ValidationStateTracker::PreCallRecordFreeCommandBuffers(VkDevice device, VkCommandPool commandPool,
2023 uint32_t commandBufferCount, const VkCommandBuffer *pCommandBuffers) {
2024 auto pPool = GetCommandPoolState(commandPool);
2025 FreeCommandBufferStates(pPool, commandBufferCount, pCommandBuffers);
2026}
2027
2028void ValidationStateTracker::PostCallRecordCreateCommandPool(VkDevice device, const VkCommandPoolCreateInfo *pCreateInfo,
2029 const VkAllocationCallbacks *pAllocator, VkCommandPool *pCommandPool,
2030 VkResult result) {
2031 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002032 auto cmd_pool_state = std::make_shared<COMMAND_POOL_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002033 cmd_pool_state->createFlags = pCreateInfo->flags;
2034 cmd_pool_state->queueFamilyIndex = pCreateInfo->queueFamilyIndex;
2035 commandPoolMap[*pCommandPool] = std::move(cmd_pool_state);
2036}
2037
2038void ValidationStateTracker::PostCallRecordCreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo *pCreateInfo,
2039 const VkAllocationCallbacks *pAllocator, VkQueryPool *pQueryPool,
2040 VkResult result) {
2041 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002042 auto query_pool_state = std::make_shared<QUERY_POOL_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002043 query_pool_state->createInfo = *pCreateInfo;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002044 query_pool_state->pool = *pQueryPool;
2045 if (pCreateInfo->queryType == VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR) {
2046 const auto *perf = lvl_find_in_chain<VkQueryPoolPerformanceCreateInfoKHR>(pCreateInfo->pNext);
2047 const QUEUE_FAMILY_PERF_COUNTERS &counters = *physical_device_state->perf_counters[perf->queueFamilyIndex];
2048
2049 for (uint32_t i = 0; i < perf->counterIndexCount; i++) {
2050 const auto &counter = counters.counters[perf->pCounterIndices[i]];
2051 switch (counter.scope) {
2052 case VK_QUERY_SCOPE_COMMAND_BUFFER_KHR:
2053 query_pool_state->has_perf_scope_command_buffer = true;
2054 break;
2055 case VK_QUERY_SCOPE_RENDER_PASS_KHR:
2056 query_pool_state->has_perf_scope_render_pass = true;
2057 break;
2058 default:
2059 break;
2060 }
2061 }
2062
2063 DispatchGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(physical_device_state->phys_device, perf,
2064 &query_pool_state->n_performance_passes);
2065 }
2066
locke-lunargd556cc32019-09-17 01:21:23 -06002067 queryPoolMap[*pQueryPool] = std::move(query_pool_state);
2068
2069 QueryObject query_obj{*pQueryPool, 0u};
2070 for (uint32_t i = 0; i < pCreateInfo->queryCount; ++i) {
2071 query_obj.query = i;
2072 queryToStateMap[query_obj] = QUERYSTATE_UNKNOWN;
2073 }
2074}
2075
2076void ValidationStateTracker::PreCallRecordDestroyCommandPool(VkDevice device, VkCommandPool commandPool,
2077 const VkAllocationCallbacks *pAllocator) {
2078 if (!commandPool) return;
2079 COMMAND_POOL_STATE *cp_state = GetCommandPoolState(commandPool);
2080 // Remove cmdpool from cmdpoolmap, after freeing layer data for the command buffers
2081 // "When a pool is destroyed, all command buffers allocated from the pool are freed."
2082 if (cp_state) {
2083 // Create a vector, as FreeCommandBufferStates deletes from cp_state->commandBuffers during iteration.
2084 std::vector<VkCommandBuffer> cb_vec{cp_state->commandBuffers.begin(), cp_state->commandBuffers.end()};
2085 FreeCommandBufferStates(cp_state, static_cast<uint32_t>(cb_vec.size()), cb_vec.data());
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002086 cp_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002087 commandPoolMap.erase(commandPool);
2088 }
2089}
2090
2091void ValidationStateTracker::PostCallRecordResetCommandPool(VkDevice device, VkCommandPool commandPool,
2092 VkCommandPoolResetFlags flags, VkResult result) {
2093 if (VK_SUCCESS != result) return;
2094 // Reset all of the CBs allocated from this pool
2095 auto command_pool_state = GetCommandPoolState(commandPool);
2096 for (auto cmdBuffer : command_pool_state->commandBuffers) {
2097 ResetCommandBufferState(cmdBuffer);
2098 }
2099}
2100
2101void ValidationStateTracker::PostCallRecordResetFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences,
2102 VkResult result) {
2103 for (uint32_t i = 0; i < fenceCount; ++i) {
2104 auto pFence = GetFenceState(pFences[i]);
2105 if (pFence) {
2106 if (pFence->scope == kSyncScopeInternal) {
2107 pFence->state = FENCE_UNSIGNALED;
2108 } else if (pFence->scope == kSyncScopeExternalTemporary) {
2109 pFence->scope = kSyncScopeInternal;
2110 }
2111 }
2112 }
2113}
2114
Jeff Bolzadbfa852019-10-04 13:53:30 -05002115// For given cb_nodes, invalidate them and track object causing invalidation.
2116// InvalidateCommandBuffers and InvalidateLinkedCommandBuffers are essentially
2117// the same, except one takes a map and one takes a set, and InvalidateCommandBuffers
2118// can also unlink objects from command buffers.
2119void ValidationStateTracker::InvalidateCommandBuffers(small_unordered_map<CMD_BUFFER_STATE *, int, 8> &cb_nodes,
2120 const VulkanTypedHandle &obj, bool unlink) {
2121 for (const auto &cb_node_pair : cb_nodes) {
2122 auto &cb_node = cb_node_pair.first;
2123 if (cb_node->state == CB_RECORDING) {
2124 cb_node->state = CB_INVALID_INCOMPLETE;
2125 } else if (cb_node->state == CB_RECORDED) {
2126 cb_node->state = CB_INVALID_COMPLETE;
2127 }
2128 cb_node->broken_bindings.push_back(obj);
2129
2130 // if secondary, then propagate the invalidation to the primaries that will call us.
2131 if (cb_node->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
2132 InvalidateLinkedCommandBuffers(cb_node->linkedCommandBuffers, obj);
2133 }
2134 if (unlink) {
2135 int index = cb_node_pair.second;
2136 assert(cb_node->object_bindings[index] == obj);
2137 cb_node->object_bindings[index] = VulkanTypedHandle();
2138 }
2139 }
2140 if (unlink) {
2141 cb_nodes.clear();
2142 }
2143}
2144
2145void ValidationStateTracker::InvalidateLinkedCommandBuffers(std::unordered_set<CMD_BUFFER_STATE *> &cb_nodes,
2146 const VulkanTypedHandle &obj) {
locke-lunargd556cc32019-09-17 01:21:23 -06002147 for (auto cb_node : cb_nodes) {
2148 if (cb_node->state == CB_RECORDING) {
2149 cb_node->state = CB_INVALID_INCOMPLETE;
2150 } else if (cb_node->state == CB_RECORDED) {
2151 cb_node->state = CB_INVALID_COMPLETE;
2152 }
2153 cb_node->broken_bindings.push_back(obj);
2154
2155 // if secondary, then propagate the invalidation to the primaries that will call us.
2156 if (cb_node->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05002157 InvalidateLinkedCommandBuffers(cb_node->linkedCommandBuffers, obj);
locke-lunargd556cc32019-09-17 01:21:23 -06002158 }
2159 }
2160}
2161
2162void ValidationStateTracker::PreCallRecordDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer,
2163 const VkAllocationCallbacks *pAllocator) {
2164 if (!framebuffer) return;
2165 FRAMEBUFFER_STATE *framebuffer_state = GetFramebufferState(framebuffer);
2166 const VulkanTypedHandle obj_struct(framebuffer, kVulkanObjectTypeFramebuffer);
2167 InvalidateCommandBuffers(framebuffer_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002168 framebuffer_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002169 frameBufferMap.erase(framebuffer);
2170}
2171
2172void ValidationStateTracker::PreCallRecordDestroyRenderPass(VkDevice device, VkRenderPass renderPass,
2173 const VkAllocationCallbacks *pAllocator) {
2174 if (!renderPass) return;
2175 RENDER_PASS_STATE *rp_state = GetRenderPassState(renderPass);
2176 const VulkanTypedHandle obj_struct(renderPass, kVulkanObjectTypeRenderPass);
2177 InvalidateCommandBuffers(rp_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002178 rp_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002179 renderPassMap.erase(renderPass);
2180}
2181
2182void ValidationStateTracker::PostCallRecordCreateFence(VkDevice device, const VkFenceCreateInfo *pCreateInfo,
2183 const VkAllocationCallbacks *pAllocator, VkFence *pFence, VkResult result) {
2184 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002185 auto fence_state = std::make_shared<FENCE_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002186 fence_state->fence = *pFence;
2187 fence_state->createInfo = *pCreateInfo;
2188 fence_state->state = (pCreateInfo->flags & VK_FENCE_CREATE_SIGNALED_BIT) ? FENCE_RETIRED : FENCE_UNSIGNALED;
2189 fenceMap[*pFence] = std::move(fence_state);
2190}
2191
2192bool ValidationStateTracker::PreCallValidateCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2193 const VkGraphicsPipelineCreateInfo *pCreateInfos,
2194 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002195 void *cgpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002196 // Set up the state that CoreChecks, gpu_validation and later StateTracker Record will use.
2197 create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
2198 cgpl_state->pCreateInfos = pCreateInfos; // GPU validation can alter this, so we have to set a default value for the Chassis
2199 cgpl_state->pipe_state.reserve(count);
2200 for (uint32_t i = 0; i < count; i++) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002201 cgpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
Jeff Bolz6ae39612019-10-11 20:57:36 -05002202 (cgpl_state->pipe_state)[i]->initGraphicsPipeline(this, &pCreateInfos[i], GetRenderPassShared(pCreateInfos[i].renderPass));
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002203 (cgpl_state->pipe_state)[i]->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06002204 }
2205 return false;
2206}
2207
2208void ValidationStateTracker::PostCallRecordCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2209 const VkGraphicsPipelineCreateInfo *pCreateInfos,
2210 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
2211 VkResult result, void *cgpl_state_data) {
2212 create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
2213 // This API may create pipelines regardless of the return value
2214 for (uint32_t i = 0; i < count; i++) {
2215 if (pPipelines[i] != VK_NULL_HANDLE) {
2216 (cgpl_state->pipe_state)[i]->pipeline = pPipelines[i];
2217 pipelineMap[pPipelines[i]] = std::move((cgpl_state->pipe_state)[i]);
2218 }
2219 }
2220 cgpl_state->pipe_state.clear();
2221}
2222
2223bool ValidationStateTracker::PreCallValidateCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2224 const VkComputePipelineCreateInfo *pCreateInfos,
2225 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002226 void *ccpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002227 auto *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
2228 ccpl_state->pCreateInfos = pCreateInfos; // GPU validation can alter this, so we have to set a default value for the Chassis
2229 ccpl_state->pipe_state.reserve(count);
2230 for (uint32_t i = 0; i < count; i++) {
2231 // Create and initialize internal tracking data structure
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002232 ccpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
locke-lunargd556cc32019-09-17 01:21:23 -06002233 ccpl_state->pipe_state.back()->initComputePipeline(this, &pCreateInfos[i]);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002234 ccpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06002235 }
2236 return false;
2237}
2238
2239void ValidationStateTracker::PostCallRecordCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2240 const VkComputePipelineCreateInfo *pCreateInfos,
2241 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
2242 VkResult result, void *ccpl_state_data) {
2243 create_compute_pipeline_api_state *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
2244
2245 // This API may create pipelines regardless of the return value
2246 for (uint32_t i = 0; i < count; i++) {
2247 if (pPipelines[i] != VK_NULL_HANDLE) {
2248 (ccpl_state->pipe_state)[i]->pipeline = pPipelines[i];
2249 pipelineMap[pPipelines[i]] = std::move((ccpl_state->pipe_state)[i]);
2250 }
2251 }
2252 ccpl_state->pipe_state.clear();
2253}
2254
2255bool ValidationStateTracker::PreCallValidateCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache,
2256 uint32_t count,
2257 const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
2258 const VkAllocationCallbacks *pAllocator,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002259 VkPipeline *pPipelines, void *crtpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002260 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
2261 crtpl_state->pipe_state.reserve(count);
2262 for (uint32_t i = 0; i < count; i++) {
2263 // Create and initialize internal tracking data structure
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002264 crtpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
locke-lunargd556cc32019-09-17 01:21:23 -06002265 crtpl_state->pipe_state.back()->initRayTracingPipelineNV(this, &pCreateInfos[i]);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002266 crtpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06002267 }
2268 return false;
2269}
2270
2271void ValidationStateTracker::PostCallRecordCreateRayTracingPipelinesNV(
2272 VkDevice device, VkPipelineCache pipelineCache, uint32_t count, const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
2273 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines, VkResult result, void *crtpl_state_data) {
2274 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
2275 // This API may create pipelines regardless of the return value
2276 for (uint32_t i = 0; i < count; i++) {
2277 if (pPipelines[i] != VK_NULL_HANDLE) {
2278 (crtpl_state->pipe_state)[i]->pipeline = pPipelines[i];
2279 pipelineMap[pPipelines[i]] = std::move((crtpl_state->pipe_state)[i]);
2280 }
2281 }
2282 crtpl_state->pipe_state.clear();
2283}
2284
2285void ValidationStateTracker::PostCallRecordCreateSampler(VkDevice device, const VkSamplerCreateInfo *pCreateInfo,
2286 const VkAllocationCallbacks *pAllocator, VkSampler *pSampler,
2287 VkResult result) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002288 samplerMap[*pSampler] = std::make_shared<SAMPLER_STATE>(pSampler, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002289}
2290
2291void ValidationStateTracker::PostCallRecordCreateDescriptorSetLayout(VkDevice device,
2292 const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
2293 const VkAllocationCallbacks *pAllocator,
2294 VkDescriptorSetLayout *pSetLayout, VkResult result) {
2295 if (VK_SUCCESS != result) return;
2296 descriptorSetLayoutMap[*pSetLayout] = std::make_shared<cvdescriptorset::DescriptorSetLayout>(pCreateInfo, *pSetLayout);
2297}
2298
2299// For repeatable sorting, not very useful for "memory in range" search
2300struct PushConstantRangeCompare {
2301 bool operator()(const VkPushConstantRange *lhs, const VkPushConstantRange *rhs) const {
2302 if (lhs->offset == rhs->offset) {
2303 if (lhs->size == rhs->size) {
2304 // The comparison is arbitrary, but avoids false aliasing by comparing all fields.
2305 return lhs->stageFlags < rhs->stageFlags;
2306 }
2307 // If the offsets are the same then sorting by the end of range is useful for validation
2308 return lhs->size < rhs->size;
2309 }
2310 return lhs->offset < rhs->offset;
2311 }
2312};
2313
2314static PushConstantRangesDict push_constant_ranges_dict;
2315
2316PushConstantRangesId GetCanonicalId(const VkPipelineLayoutCreateInfo *info) {
2317 if (!info->pPushConstantRanges) {
2318 // Hand back the empty entry (creating as needed)...
2319 return push_constant_ranges_dict.look_up(PushConstantRanges());
2320 }
2321
2322 // Sort the input ranges to ensure equivalent ranges map to the same id
2323 std::set<const VkPushConstantRange *, PushConstantRangeCompare> sorted;
2324 for (uint32_t i = 0; i < info->pushConstantRangeCount; i++) {
2325 sorted.insert(info->pPushConstantRanges + i);
2326 }
2327
Jeremy Hayesf34b9fb2019-12-06 09:37:03 -07002328 PushConstantRanges ranges;
2329 ranges.reserve(sorted.size());
locke-lunargd556cc32019-09-17 01:21:23 -06002330 for (const auto range : sorted) {
2331 ranges.emplace_back(*range);
2332 }
2333 return push_constant_ranges_dict.look_up(std::move(ranges));
2334}
2335
2336// Dictionary of canoncial form of the pipeline set layout of descriptor set layouts
2337static PipelineLayoutSetLayoutsDict pipeline_layout_set_layouts_dict;
2338
2339// Dictionary of canonical form of the "compatible for set" records
2340static PipelineLayoutCompatDict pipeline_layout_compat_dict;
2341
2342static PipelineLayoutCompatId GetCanonicalId(const uint32_t set_index, const PushConstantRangesId pcr_id,
2343 const PipelineLayoutSetLayoutsId set_layouts_id) {
2344 return pipeline_layout_compat_dict.look_up(PipelineLayoutCompatDef(set_index, pcr_id, set_layouts_id));
2345}
2346
2347void ValidationStateTracker::PostCallRecordCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo *pCreateInfo,
2348 const VkAllocationCallbacks *pAllocator,
2349 VkPipelineLayout *pPipelineLayout, VkResult result) {
2350 if (VK_SUCCESS != result) return;
2351
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002352 auto pipeline_layout_state = std::make_shared<PIPELINE_LAYOUT_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002353 pipeline_layout_state->layout = *pPipelineLayout;
2354 pipeline_layout_state->set_layouts.resize(pCreateInfo->setLayoutCount);
2355 PipelineLayoutSetLayoutsDef set_layouts(pCreateInfo->setLayoutCount);
2356 for (uint32_t i = 0; i < pCreateInfo->setLayoutCount; ++i) {
Jeff Bolz6ae39612019-10-11 20:57:36 -05002357 pipeline_layout_state->set_layouts[i] = GetDescriptorSetLayoutShared(pCreateInfo->pSetLayouts[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06002358 set_layouts[i] = pipeline_layout_state->set_layouts[i]->GetLayoutId();
2359 }
2360
2361 // Get canonical form IDs for the "compatible for set" contents
2362 pipeline_layout_state->push_constant_ranges = GetCanonicalId(pCreateInfo);
2363 auto set_layouts_id = pipeline_layout_set_layouts_dict.look_up(set_layouts);
2364 pipeline_layout_state->compat_for_set.reserve(pCreateInfo->setLayoutCount);
2365
2366 // Create table of "compatible for set N" cannonical forms for trivial accept validation
2367 for (uint32_t i = 0; i < pCreateInfo->setLayoutCount; ++i) {
2368 pipeline_layout_state->compat_for_set.emplace_back(
2369 GetCanonicalId(i, pipeline_layout_state->push_constant_ranges, set_layouts_id));
2370 }
2371 pipelineLayoutMap[*pPipelineLayout] = std::move(pipeline_layout_state);
2372}
2373
2374void ValidationStateTracker::PostCallRecordCreateDescriptorPool(VkDevice device, const VkDescriptorPoolCreateInfo *pCreateInfo,
2375 const VkAllocationCallbacks *pAllocator,
2376 VkDescriptorPool *pDescriptorPool, VkResult result) {
2377 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002378 descriptorPoolMap[*pDescriptorPool] = std::make_shared<DESCRIPTOR_POOL_STATE>(*pDescriptorPool, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002379}
2380
2381void ValidationStateTracker::PostCallRecordResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
2382 VkDescriptorPoolResetFlags flags, VkResult result) {
2383 if (VK_SUCCESS != result) return;
2384 DESCRIPTOR_POOL_STATE *pPool = GetDescriptorPoolState(descriptorPool);
2385 // TODO: validate flags
2386 // For every set off of this pool, clear it, remove from setMap, and free cvdescriptorset::DescriptorSet
2387 for (auto ds : pPool->sets) {
2388 FreeDescriptorSet(ds);
2389 }
2390 pPool->sets.clear();
2391 // Reset available count for each type and available sets for this pool
2392 for (auto it = pPool->availableDescriptorTypeCount.begin(); it != pPool->availableDescriptorTypeCount.end(); ++it) {
2393 pPool->availableDescriptorTypeCount[it->first] = pPool->maxDescriptorTypeCount[it->first];
2394 }
2395 pPool->availableSets = pPool->maxSets;
2396}
2397
2398bool ValidationStateTracker::PreCallValidateAllocateDescriptorSets(VkDevice device,
2399 const VkDescriptorSetAllocateInfo *pAllocateInfo,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002400 VkDescriptorSet *pDescriptorSets, void *ads_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002401 // Always update common data
2402 cvdescriptorset::AllocateDescriptorSetsData *ads_state =
2403 reinterpret_cast<cvdescriptorset::AllocateDescriptorSetsData *>(ads_state_data);
2404 UpdateAllocateDescriptorSetsData(pAllocateInfo, ads_state);
2405
2406 return false;
2407}
2408
2409// Allocation state was good and call down chain was made so update state based on allocating descriptor sets
2410void ValidationStateTracker::PostCallRecordAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo *pAllocateInfo,
2411 VkDescriptorSet *pDescriptorSets, VkResult result,
2412 void *ads_state_data) {
2413 if (VK_SUCCESS != result) return;
2414 // All the updates are contained in a single cvdescriptorset function
2415 cvdescriptorset::AllocateDescriptorSetsData *ads_state =
2416 reinterpret_cast<cvdescriptorset::AllocateDescriptorSetsData *>(ads_state_data);
2417 PerformAllocateDescriptorSets(pAllocateInfo, pDescriptorSets, ads_state);
2418}
2419
2420void ValidationStateTracker::PreCallRecordFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t count,
2421 const VkDescriptorSet *pDescriptorSets) {
2422 DESCRIPTOR_POOL_STATE *pool_state = GetDescriptorPoolState(descriptorPool);
2423 // Update available descriptor sets in pool
2424 pool_state->availableSets += count;
2425
2426 // For each freed descriptor add its resources back into the pool as available and remove from pool and setMap
2427 for (uint32_t i = 0; i < count; ++i) {
2428 if (pDescriptorSets[i] != VK_NULL_HANDLE) {
2429 auto descriptor_set = setMap[pDescriptorSets[i]].get();
2430 uint32_t type_index = 0, descriptor_count = 0;
2431 for (uint32_t j = 0; j < descriptor_set->GetBindingCount(); ++j) {
2432 type_index = static_cast<uint32_t>(descriptor_set->GetTypeFromIndex(j));
2433 descriptor_count = descriptor_set->GetDescriptorCountFromIndex(j);
2434 pool_state->availableDescriptorTypeCount[type_index] += descriptor_count;
2435 }
2436 FreeDescriptorSet(descriptor_set);
2437 pool_state->sets.erase(descriptor_set);
2438 }
2439 }
2440}
2441
2442void ValidationStateTracker::PreCallRecordUpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount,
2443 const VkWriteDescriptorSet *pDescriptorWrites,
2444 uint32_t descriptorCopyCount,
2445 const VkCopyDescriptorSet *pDescriptorCopies) {
2446 cvdescriptorset::PerformUpdateDescriptorSets(this, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount,
2447 pDescriptorCopies);
2448}
2449
2450void ValidationStateTracker::PostCallRecordAllocateCommandBuffers(VkDevice device, const VkCommandBufferAllocateInfo *pCreateInfo,
2451 VkCommandBuffer *pCommandBuffer, VkResult result) {
2452 if (VK_SUCCESS != result) return;
Jeff Bolz6835fda2019-10-06 00:15:34 -05002453 auto pPool = GetCommandPoolShared(pCreateInfo->commandPool);
locke-lunargd556cc32019-09-17 01:21:23 -06002454 if (pPool) {
2455 for (uint32_t i = 0; i < pCreateInfo->commandBufferCount; i++) {
2456 // Add command buffer to its commandPool map
2457 pPool->commandBuffers.insert(pCommandBuffer[i]);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002458 auto pCB = std::make_shared<CMD_BUFFER_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002459 pCB->createInfo = *pCreateInfo;
2460 pCB->device = device;
Jeff Bolz6835fda2019-10-06 00:15:34 -05002461 pCB->command_pool = pPool;
locke-lunargd556cc32019-09-17 01:21:23 -06002462 // Add command buffer to map
2463 commandBufferMap[pCommandBuffer[i]] = std::move(pCB);
2464 ResetCommandBufferState(pCommandBuffer[i]);
2465 }
2466 }
2467}
2468
2469// Add bindings between the given cmd buffer & framebuffer and the framebuffer's children
2470void ValidationStateTracker::AddFramebufferBinding(CMD_BUFFER_STATE *cb_state, FRAMEBUFFER_STATE *fb_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05002471 AddCommandBufferBinding(fb_state->cb_bindings, VulkanTypedHandle(fb_state->framebuffer, kVulkanObjectTypeFramebuffer, fb_state),
locke-lunargd556cc32019-09-17 01:21:23 -06002472 cb_state);
Mark Lobodzinski544b3dd2019-12-03 14:44:54 -07002473 // If imageless fb, skip fb binding
2474 if (fb_state->createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) return;
locke-lunargd556cc32019-09-17 01:21:23 -06002475 const uint32_t attachmentCount = fb_state->createInfo.attachmentCount;
2476 for (uint32_t attachment = 0; attachment < attachmentCount; ++attachment) {
2477 auto view_state = GetAttachmentImageViewState(fb_state, attachment);
2478 if (view_state) {
2479 AddCommandBufferBindingImageView(cb_state, view_state);
2480 }
2481 }
2482}
2483
2484void ValidationStateTracker::PreCallRecordBeginCommandBuffer(VkCommandBuffer commandBuffer,
2485 const VkCommandBufferBeginInfo *pBeginInfo) {
2486 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2487 if (!cb_state) return;
locke-lunargd556cc32019-09-17 01:21:23 -06002488 if (cb_state->createInfo.level != VK_COMMAND_BUFFER_LEVEL_PRIMARY) {
2489 // Secondary Command Buffer
2490 const VkCommandBufferInheritanceInfo *pInfo = pBeginInfo->pInheritanceInfo;
2491 if (pInfo) {
2492 if (pBeginInfo->flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT) {
2493 assert(pInfo->renderPass);
2494 auto framebuffer = GetFramebufferState(pInfo->framebuffer);
2495 if (framebuffer) {
2496 // Connect this framebuffer and its children to this cmdBuffer
2497 AddFramebufferBinding(cb_state, framebuffer);
2498 }
2499 }
2500 }
2501 }
2502 if (CB_RECORDED == cb_state->state || CB_INVALID_COMPLETE == cb_state->state) {
2503 ResetCommandBufferState(commandBuffer);
2504 }
2505 // Set updated state here in case implicit reset occurs above
2506 cb_state->state = CB_RECORDING;
2507 cb_state->beginInfo = *pBeginInfo;
2508 if (cb_state->beginInfo.pInheritanceInfo) {
2509 cb_state->inheritanceInfo = *(cb_state->beginInfo.pInheritanceInfo);
2510 cb_state->beginInfo.pInheritanceInfo = &cb_state->inheritanceInfo;
2511 // If we are a secondary command-buffer and inheriting. Update the items we should inherit.
2512 if ((cb_state->createInfo.level != VK_COMMAND_BUFFER_LEVEL_PRIMARY) &&
2513 (cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT)) {
2514 cb_state->activeRenderPass = GetRenderPassState(cb_state->beginInfo.pInheritanceInfo->renderPass);
2515 cb_state->activeSubpass = cb_state->beginInfo.pInheritanceInfo->subpass;
2516 cb_state->activeFramebuffer = cb_state->beginInfo.pInheritanceInfo->framebuffer;
2517 cb_state->framebuffers.insert(cb_state->beginInfo.pInheritanceInfo->framebuffer);
2518 }
2519 }
2520
2521 auto chained_device_group_struct = lvl_find_in_chain<VkDeviceGroupCommandBufferBeginInfo>(pBeginInfo->pNext);
2522 if (chained_device_group_struct) {
2523 cb_state->initial_device_mask = chained_device_group_struct->deviceMask;
2524 } else {
2525 cb_state->initial_device_mask = (1 << physical_device_count) - 1;
2526 }
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002527
2528 cb_state->performance_lock_acquired = performance_lock_acquired;
locke-lunargd556cc32019-09-17 01:21:23 -06002529}
2530
2531void ValidationStateTracker::PostCallRecordEndCommandBuffer(VkCommandBuffer commandBuffer, VkResult result) {
2532 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2533 if (!cb_state) return;
2534 // Cached validation is specific to a specific recording of a specific command buffer.
2535 for (auto descriptor_set : cb_state->validated_descriptor_sets) {
2536 descriptor_set->ClearCachedValidation(cb_state);
2537 }
2538 cb_state->validated_descriptor_sets.clear();
2539 if (VK_SUCCESS == result) {
2540 cb_state->state = CB_RECORDED;
2541 }
2542}
2543
2544void ValidationStateTracker::PostCallRecordResetCommandBuffer(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags,
2545 VkResult result) {
2546 if (VK_SUCCESS == result) {
2547 ResetCommandBufferState(commandBuffer);
2548 }
2549}
2550
2551CBStatusFlags MakeStaticStateMask(VkPipelineDynamicStateCreateInfo const *ds) {
2552 // initially assume everything is static state
2553 CBStatusFlags flags = CBSTATUS_ALL_STATE_SET;
2554
2555 if (ds) {
2556 for (uint32_t i = 0; i < ds->dynamicStateCount; i++) {
2557 switch (ds->pDynamicStates[i]) {
2558 case VK_DYNAMIC_STATE_LINE_WIDTH:
2559 flags &= ~CBSTATUS_LINE_WIDTH_SET;
2560 break;
2561 case VK_DYNAMIC_STATE_DEPTH_BIAS:
2562 flags &= ~CBSTATUS_DEPTH_BIAS_SET;
2563 break;
2564 case VK_DYNAMIC_STATE_BLEND_CONSTANTS:
2565 flags &= ~CBSTATUS_BLEND_CONSTANTS_SET;
2566 break;
2567 case VK_DYNAMIC_STATE_DEPTH_BOUNDS:
2568 flags &= ~CBSTATUS_DEPTH_BOUNDS_SET;
2569 break;
2570 case VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK:
2571 flags &= ~CBSTATUS_STENCIL_READ_MASK_SET;
2572 break;
2573 case VK_DYNAMIC_STATE_STENCIL_WRITE_MASK:
2574 flags &= ~CBSTATUS_STENCIL_WRITE_MASK_SET;
2575 break;
2576 case VK_DYNAMIC_STATE_STENCIL_REFERENCE:
2577 flags &= ~CBSTATUS_STENCIL_REFERENCE_SET;
2578 break;
2579 case VK_DYNAMIC_STATE_SCISSOR:
2580 flags &= ~CBSTATUS_SCISSOR_SET;
2581 break;
2582 case VK_DYNAMIC_STATE_VIEWPORT:
2583 flags &= ~CBSTATUS_VIEWPORT_SET;
2584 break;
2585 case VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV:
2586 flags &= ~CBSTATUS_EXCLUSIVE_SCISSOR_SET;
2587 break;
2588 case VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV:
2589 flags &= ~CBSTATUS_SHADING_RATE_PALETTE_SET;
2590 break;
2591 case VK_DYNAMIC_STATE_LINE_STIPPLE_EXT:
2592 flags &= ~CBSTATUS_LINE_STIPPLE_SET;
2593 break;
Chris Mayer9ded5eb2019-09-19 16:33:26 +02002594 case VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV:
2595 flags &= ~CBSTATUS_VIEWPORT_W_SCALING_SET;
2596 break;
locke-lunargd556cc32019-09-17 01:21:23 -06002597 default:
2598 break;
2599 }
2600 }
2601 }
2602
2603 return flags;
2604}
2605
2606// Validation cache:
2607// CV is the bottommost implementor of this extension. Don't pass calls down.
2608// utility function to set collective state for pipeline
2609void SetPipelineState(PIPELINE_STATE *pPipe) {
2610 // If any attachment used by this pipeline has blendEnable, set top-level blendEnable
2611 if (pPipe->graphicsPipelineCI.pColorBlendState) {
2612 for (size_t i = 0; i < pPipe->attachments.size(); ++i) {
2613 if (VK_TRUE == pPipe->attachments[i].blendEnable) {
2614 if (((pPipe->attachments[i].dstAlphaBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
2615 (pPipe->attachments[i].dstAlphaBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
2616 ((pPipe->attachments[i].dstColorBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
2617 (pPipe->attachments[i].dstColorBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
2618 ((pPipe->attachments[i].srcAlphaBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
2619 (pPipe->attachments[i].srcAlphaBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
2620 ((pPipe->attachments[i].srcColorBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
2621 (pPipe->attachments[i].srcColorBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA))) {
2622 pPipe->blendConstantsEnabled = true;
2623 }
2624 }
2625 }
2626 }
2627}
2628
2629void ValidationStateTracker::PreCallRecordCmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
2630 VkPipeline pipeline) {
2631 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2632 assert(cb_state);
2633
2634 auto pipe_state = GetPipelineState(pipeline);
2635 if (VK_PIPELINE_BIND_POINT_GRAPHICS == pipelineBindPoint) {
2636 cb_state->status &= ~cb_state->static_status;
2637 cb_state->static_status = MakeStaticStateMask(pipe_state->graphicsPipelineCI.ptr()->pDynamicState);
2638 cb_state->status |= cb_state->static_status;
2639 }
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002640 ResetCommandBufferPushConstantDataIfIncompatible(cb_state, pipe_state->pipeline_layout->layout);
locke-lunargd556cc32019-09-17 01:21:23 -06002641 cb_state->lastBound[pipelineBindPoint].pipeline_state = pipe_state;
2642 SetPipelineState(pipe_state);
Jeff Bolzadbfa852019-10-04 13:53:30 -05002643 AddCommandBufferBinding(pipe_state->cb_bindings, VulkanTypedHandle(pipeline, kVulkanObjectTypePipeline), cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06002644}
2645
2646void ValidationStateTracker::PreCallRecordCmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport,
2647 uint32_t viewportCount, const VkViewport *pViewports) {
2648 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2649 cb_state->viewportMask |= ((1u << viewportCount) - 1u) << firstViewport;
2650 cb_state->status |= CBSTATUS_VIEWPORT_SET;
2651}
2652
2653void ValidationStateTracker::PreCallRecordCmdSetExclusiveScissorNV(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor,
2654 uint32_t exclusiveScissorCount,
2655 const VkRect2D *pExclusiveScissors) {
2656 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2657 // TODO: We don't have VUIDs for validating that all exclusive scissors have been set.
2658 // cb_state->exclusiveScissorMask |= ((1u << exclusiveScissorCount) - 1u) << firstExclusiveScissor;
2659 cb_state->status |= CBSTATUS_EXCLUSIVE_SCISSOR_SET;
2660}
2661
2662void ValidationStateTracker::PreCallRecordCmdBindShadingRateImageNV(VkCommandBuffer commandBuffer, VkImageView imageView,
2663 VkImageLayout imageLayout) {
2664 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2665
2666 if (imageView != VK_NULL_HANDLE) {
2667 auto view_state = GetImageViewState(imageView);
2668 AddCommandBufferBindingImageView(cb_state, view_state);
2669 }
2670}
2671
2672void ValidationStateTracker::PreCallRecordCmdSetViewportShadingRatePaletteNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
2673 uint32_t viewportCount,
2674 const VkShadingRatePaletteNV *pShadingRatePalettes) {
2675 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2676 // TODO: We don't have VUIDs for validating that all shading rate palettes have been set.
2677 // cb_state->shadingRatePaletteMask |= ((1u << viewportCount) - 1u) << firstViewport;
2678 cb_state->status |= CBSTATUS_SHADING_RATE_PALETTE_SET;
2679}
2680
2681void ValidationStateTracker::PostCallRecordCreateAccelerationStructureNV(VkDevice device,
2682 const VkAccelerationStructureCreateInfoNV *pCreateInfo,
2683 const VkAllocationCallbacks *pAllocator,
2684 VkAccelerationStructureNV *pAccelerationStructure,
2685 VkResult result) {
2686 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002687 auto as_state = std::make_shared<ACCELERATION_STRUCTURE_STATE>(*pAccelerationStructure, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002688
2689 // Query the requirements in case the application doesn't (to avoid bind/validation time query)
2690 VkAccelerationStructureMemoryRequirementsInfoNV as_memory_requirements_info = {};
2691 as_memory_requirements_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
2692 as_memory_requirements_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV;
2693 as_memory_requirements_info.accelerationStructure = as_state->acceleration_structure;
2694 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &as_memory_requirements_info, &as_state->memory_requirements);
2695
2696 VkAccelerationStructureMemoryRequirementsInfoNV scratch_memory_req_info = {};
2697 scratch_memory_req_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
2698 scratch_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV;
2699 scratch_memory_req_info.accelerationStructure = as_state->acceleration_structure;
2700 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &scratch_memory_req_info,
2701 &as_state->build_scratch_memory_requirements);
2702
2703 VkAccelerationStructureMemoryRequirementsInfoNV update_memory_req_info = {};
2704 update_memory_req_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
2705 update_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV;
2706 update_memory_req_info.accelerationStructure = as_state->acceleration_structure;
2707 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &update_memory_req_info,
2708 &as_state->update_scratch_memory_requirements);
2709
2710 accelerationStructureMap[*pAccelerationStructure] = std::move(as_state);
2711}
2712
2713void ValidationStateTracker::PostCallRecordGetAccelerationStructureMemoryRequirementsNV(
2714 VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV *pInfo, VkMemoryRequirements2KHR *pMemoryRequirements) {
2715 ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureState(pInfo->accelerationStructure);
2716 if (as_state != nullptr) {
2717 if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV) {
2718 as_state->memory_requirements = *pMemoryRequirements;
2719 as_state->memory_requirements_checked = true;
2720 } else if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV) {
2721 as_state->build_scratch_memory_requirements = *pMemoryRequirements;
2722 as_state->build_scratch_memory_requirements_checked = true;
2723 } else if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV) {
2724 as_state->update_scratch_memory_requirements = *pMemoryRequirements;
2725 as_state->update_scratch_memory_requirements_checked = true;
2726 }
2727 }
2728}
2729
2730void ValidationStateTracker::PostCallRecordBindAccelerationStructureMemoryNV(
2731 VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV *pBindInfos, VkResult result) {
2732 if (VK_SUCCESS != result) return;
2733 for (uint32_t i = 0; i < bindInfoCount; i++) {
2734 const VkBindAccelerationStructureMemoryInfoNV &info = pBindInfos[i];
2735
2736 ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureState(info.accelerationStructure);
2737 if (as_state) {
2738 // Track bound memory range information
2739 auto mem_info = GetDevMemState(info.memory);
2740 if (mem_info) {
2741 InsertAccelerationStructureMemoryRange(info.accelerationStructure, mem_info, info.memoryOffset,
2742 as_state->requirements);
2743 }
2744 // Track objects tied to memory
2745 SetMemBinding(info.memory, as_state, info.memoryOffset,
2746 VulkanTypedHandle(info.accelerationStructure, kVulkanObjectTypeAccelerationStructureNV));
2747
2748 // GPU validation of top level acceleration structure building needs acceleration structure handles.
2749 if (enabled.gpu_validation) {
2750 DispatchGetAccelerationStructureHandleNV(device, info.accelerationStructure, 8, &as_state->opaque_handle);
2751 }
2752 }
2753 }
2754}
2755
2756void ValidationStateTracker::PostCallRecordCmdBuildAccelerationStructureNV(
2757 VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV *pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset,
2758 VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset) {
2759 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2760 if (cb_state == nullptr) {
2761 return;
2762 }
2763
2764 ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureState(dst);
2765 ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureState(src);
2766 if (dst_as_state != nullptr) {
2767 dst_as_state->built = true;
2768 dst_as_state->build_info.initialize(pInfo);
2769 AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
2770 }
2771 if (src_as_state != nullptr) {
2772 AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
2773 }
2774 cb_state->hasBuildAccelerationStructureCmd = true;
2775}
2776
2777void ValidationStateTracker::PostCallRecordCmdCopyAccelerationStructureNV(VkCommandBuffer commandBuffer,
2778 VkAccelerationStructureNV dst,
2779 VkAccelerationStructureNV src,
2780 VkCopyAccelerationStructureModeNV mode) {
2781 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2782 if (cb_state) {
2783 ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureState(src);
2784 ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureState(dst);
2785 if (dst_as_state != nullptr && src_as_state != nullptr) {
2786 dst_as_state->built = true;
2787 dst_as_state->build_info = src_as_state->build_info;
2788 AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
2789 AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
2790 }
2791 }
2792}
2793
2794void ValidationStateTracker::PreCallRecordDestroyAccelerationStructureNV(VkDevice device,
2795 VkAccelerationStructureNV accelerationStructure,
2796 const VkAllocationCallbacks *pAllocator) {
2797 if (!accelerationStructure) return;
2798 auto *as_state = GetAccelerationStructureState(accelerationStructure);
2799 if (as_state) {
2800 const VulkanTypedHandle obj_struct(accelerationStructure, kVulkanObjectTypeAccelerationStructureNV);
2801 InvalidateCommandBuffers(as_state->cb_bindings, obj_struct);
2802 for (auto mem_binding : as_state->GetBoundMemory()) {
2803 auto mem_info = GetDevMemState(mem_binding);
2804 if (mem_info) {
2805 RemoveAccelerationStructureMemoryRange(accelerationStructure, mem_info);
2806 }
2807 }
2808 ClearMemoryObjectBindings(obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002809 as_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002810 accelerationStructureMap.erase(accelerationStructure);
2811 }
2812}
2813
Chris Mayer9ded5eb2019-09-19 16:33:26 +02002814void ValidationStateTracker::PreCallRecordCmdSetViewportWScalingNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
2815 uint32_t viewportCount,
2816 const VkViewportWScalingNV *pViewportWScalings) {
2817 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2818 cb_state->status |= CBSTATUS_VIEWPORT_W_SCALING_SET;
2819}
2820
locke-lunargd556cc32019-09-17 01:21:23 -06002821void ValidationStateTracker::PreCallRecordCmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth) {
2822 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2823 cb_state->status |= CBSTATUS_LINE_WIDTH_SET;
2824}
2825
2826void ValidationStateTracker::PreCallRecordCmdSetLineStippleEXT(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor,
2827 uint16_t lineStipplePattern) {
2828 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2829 cb_state->status |= CBSTATUS_LINE_STIPPLE_SET;
2830}
2831
2832void ValidationStateTracker::PreCallRecordCmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor,
2833 float depthBiasClamp, float depthBiasSlopeFactor) {
2834 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2835 cb_state->status |= CBSTATUS_DEPTH_BIAS_SET;
2836}
2837
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002838void ValidationStateTracker::PreCallRecordCmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount,
2839 const VkRect2D *pScissors) {
2840 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2841 cb_state->scissorMask |= ((1u << scissorCount) - 1u) << firstScissor;
2842 cb_state->status |= CBSTATUS_SCISSOR_SET;
2843}
2844
locke-lunargd556cc32019-09-17 01:21:23 -06002845void ValidationStateTracker::PreCallRecordCmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4]) {
2846 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2847 cb_state->status |= CBSTATUS_BLEND_CONSTANTS_SET;
2848}
2849
2850void ValidationStateTracker::PreCallRecordCmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds,
2851 float maxDepthBounds) {
2852 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2853 cb_state->status |= CBSTATUS_DEPTH_BOUNDS_SET;
2854}
2855
2856void ValidationStateTracker::PreCallRecordCmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
2857 uint32_t compareMask) {
2858 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2859 cb_state->status |= CBSTATUS_STENCIL_READ_MASK_SET;
2860}
2861
2862void ValidationStateTracker::PreCallRecordCmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
2863 uint32_t writeMask) {
2864 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2865 cb_state->status |= CBSTATUS_STENCIL_WRITE_MASK_SET;
2866}
2867
2868void ValidationStateTracker::PreCallRecordCmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
2869 uint32_t reference) {
2870 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2871 cb_state->status |= CBSTATUS_STENCIL_REFERENCE_SET;
2872}
2873
2874// Update pipeline_layout bind points applying the "Pipeline Layout Compatibility" rules.
2875// One of pDescriptorSets or push_descriptor_set should be nullptr, indicating whether this
2876// is called for CmdBindDescriptorSets or CmdPushDescriptorSet.
2877void ValidationStateTracker::UpdateLastBoundDescriptorSets(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint pipeline_bind_point,
2878 const PIPELINE_LAYOUT_STATE *pipeline_layout, uint32_t first_set,
2879 uint32_t set_count, const VkDescriptorSet *pDescriptorSets,
2880 cvdescriptorset::DescriptorSet *push_descriptor_set,
2881 uint32_t dynamic_offset_count, const uint32_t *p_dynamic_offsets) {
2882 assert((pDescriptorSets == nullptr) ^ (push_descriptor_set == nullptr));
2883 // Defensive
2884 assert(pipeline_layout);
2885 if (!pipeline_layout) return;
2886
2887 uint32_t required_size = first_set + set_count;
2888 const uint32_t last_binding_index = required_size - 1;
2889 assert(last_binding_index < pipeline_layout->compat_for_set.size());
2890
2891 // Some useful shorthand
2892 auto &last_bound = cb_state->lastBound[pipeline_bind_point];
2893 auto &pipe_compat_ids = pipeline_layout->compat_for_set;
2894 const uint32_t current_size = static_cast<uint32_t>(last_bound.per_set.size());
2895
2896 // We need this three times in this function, but nowhere else
2897 auto push_descriptor_cleanup = [&last_bound](const cvdescriptorset::DescriptorSet *ds) -> bool {
2898 if (ds && ds->IsPushDescriptor()) {
2899 assert(ds == last_bound.push_descriptor_set.get());
2900 last_bound.push_descriptor_set = nullptr;
2901 return true;
2902 }
2903 return false;
2904 };
2905
2906 // Clean up the "disturbed" before and after the range to be set
2907 if (required_size < current_size) {
2908 if (last_bound.per_set[last_binding_index].compat_id_for_set != pipe_compat_ids[last_binding_index]) {
2909 // We're disturbing those after last, we'll shrink below, but first need to check for and cleanup the push_descriptor
2910 for (auto set_idx = required_size; set_idx < current_size; ++set_idx) {
2911 if (push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set)) break;
2912 }
2913 } else {
2914 // We're not disturbing past last, so leave the upper binding data alone.
2915 required_size = current_size;
2916 }
2917 }
2918
2919 // We resize if we need more set entries or if those past "last" are disturbed
2920 if (required_size != current_size) {
2921 last_bound.per_set.resize(required_size);
2922 }
2923
2924 // For any previously bound sets, need to set them to "invalid" if they were disturbed by this update
2925 for (uint32_t set_idx = 0; set_idx < first_set; ++set_idx) {
2926 if (last_bound.per_set[set_idx].compat_id_for_set != pipe_compat_ids[set_idx]) {
2927 push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set);
2928 last_bound.per_set[set_idx].bound_descriptor_set = nullptr;
2929 last_bound.per_set[set_idx].dynamicOffsets.clear();
2930 last_bound.per_set[set_idx].compat_id_for_set = pipe_compat_ids[set_idx];
2931 }
2932 }
2933
2934 // Now update the bound sets with the input sets
2935 const uint32_t *input_dynamic_offsets = p_dynamic_offsets; // "read" pointer for dynamic offset data
2936 for (uint32_t input_idx = 0; input_idx < set_count; input_idx++) {
2937 auto set_idx = input_idx + first_set; // set_idx is index within layout, input_idx is index within input descriptor sets
2938 cvdescriptorset::DescriptorSet *descriptor_set =
2939 push_descriptor_set ? push_descriptor_set : GetSetNode(pDescriptorSets[input_idx]);
2940
2941 // Record binding (or push)
2942 if (descriptor_set != last_bound.push_descriptor_set.get()) {
2943 // Only cleanup the push descriptors if they aren't the currently used set.
2944 push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set);
2945 }
2946 last_bound.per_set[set_idx].bound_descriptor_set = descriptor_set;
2947 last_bound.per_set[set_idx].compat_id_for_set = pipe_compat_ids[set_idx]; // compat ids are canonical *per* set index
2948
2949 if (descriptor_set) {
2950 auto set_dynamic_descriptor_count = descriptor_set->GetDynamicDescriptorCount();
2951 // TODO: Add logic for tracking push_descriptor offsets (here or in caller)
2952 if (set_dynamic_descriptor_count && input_dynamic_offsets) {
2953 const uint32_t *end_offset = input_dynamic_offsets + set_dynamic_descriptor_count;
2954 last_bound.per_set[set_idx].dynamicOffsets = std::vector<uint32_t>(input_dynamic_offsets, end_offset);
2955 input_dynamic_offsets = end_offset;
2956 assert(input_dynamic_offsets <= (p_dynamic_offsets + dynamic_offset_count));
2957 } else {
2958 last_bound.per_set[set_idx].dynamicOffsets.clear();
2959 }
2960 if (!descriptor_set->IsPushDescriptor()) {
2961 // Can't cache validation of push_descriptors
2962 cb_state->validated_descriptor_sets.insert(descriptor_set);
2963 }
2964 }
2965 }
2966}
2967
2968// Update the bound state for the bind point, including the effects of incompatible pipeline layouts
2969void ValidationStateTracker::PreCallRecordCmdBindDescriptorSets(VkCommandBuffer commandBuffer,
2970 VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
2971 uint32_t firstSet, uint32_t setCount,
2972 const VkDescriptorSet *pDescriptorSets, uint32_t dynamicOffsetCount,
2973 const uint32_t *pDynamicOffsets) {
2974 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2975 auto pipeline_layout = GetPipelineLayout(layout);
2976
2977 // Resize binding arrays
2978 uint32_t last_set_index = firstSet + setCount - 1;
2979 if (last_set_index >= cb_state->lastBound[pipelineBindPoint].per_set.size()) {
2980 cb_state->lastBound[pipelineBindPoint].per_set.resize(last_set_index + 1);
2981 }
2982
2983 UpdateLastBoundDescriptorSets(cb_state, pipelineBindPoint, pipeline_layout, firstSet, setCount, pDescriptorSets, nullptr,
2984 dynamicOffsetCount, pDynamicOffsets);
2985 cb_state->lastBound[pipelineBindPoint].pipeline_layout = layout;
2986 ResetCommandBufferPushConstantDataIfIncompatible(cb_state, layout);
2987}
2988
2989void ValidationStateTracker::RecordCmdPushDescriptorSetState(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint pipelineBindPoint,
2990 VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount,
2991 const VkWriteDescriptorSet *pDescriptorWrites) {
2992 const auto &pipeline_layout = GetPipelineLayout(layout);
2993 // Short circuit invalid updates
2994 if (!pipeline_layout || (set >= pipeline_layout->set_layouts.size()) || !pipeline_layout->set_layouts[set] ||
2995 !pipeline_layout->set_layouts[set]->IsPushDescriptor())
2996 return;
2997
2998 // We need a descriptor set to update the bindings with, compatible with the passed layout
2999 const auto dsl = pipeline_layout->set_layouts[set];
3000 auto &last_bound = cb_state->lastBound[pipelineBindPoint];
3001 auto &push_descriptor_set = last_bound.push_descriptor_set;
3002 // If we are disturbing the current push_desriptor_set clear it
3003 if (!push_descriptor_set || !CompatForSet(set, last_bound, pipeline_layout->compat_for_set)) {
Jeff Bolz252d2532019-10-15 22:06:39 -05003004 last_bound.UnbindAndResetPushDescriptorSet(new cvdescriptorset::DescriptorSet(0, nullptr, dsl, 0, this, report_data));
locke-lunargd556cc32019-09-17 01:21:23 -06003005 }
3006
3007 UpdateLastBoundDescriptorSets(cb_state, pipelineBindPoint, pipeline_layout, set, 1, nullptr, push_descriptor_set.get(), 0,
3008 nullptr);
3009 last_bound.pipeline_layout = layout;
3010
3011 // Now that we have either the new or extant push_descriptor set ... do the write updates against it
Jeff Bolz41a1ced2019-10-11 11:40:49 -05003012 push_descriptor_set->PerformPushDescriptorsUpdate(this, descriptorWriteCount, pDescriptorWrites);
locke-lunargd556cc32019-09-17 01:21:23 -06003013}
3014
3015void ValidationStateTracker::PreCallRecordCmdPushDescriptorSetKHR(VkCommandBuffer commandBuffer,
3016 VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
3017 uint32_t set, uint32_t descriptorWriteCount,
3018 const VkWriteDescriptorSet *pDescriptorWrites) {
3019 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3020 RecordCmdPushDescriptorSetState(cb_state, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites);
3021}
3022
Tony-LunarG6bb1d0c2019-09-23 10:39:25 -06003023void ValidationStateTracker::PostCallRecordCmdPushConstants(VkCommandBuffer commandBuffer, VkPipelineLayout layout,
3024 VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size,
3025 const void *pValues) {
3026 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3027 if (cb_state != nullptr) {
3028 ResetCommandBufferPushConstantDataIfIncompatible(cb_state, layout);
3029
3030 auto &push_constant_data = cb_state->push_constant_data;
3031 assert((offset + size) <= static_cast<uint32_t>(push_constant_data.size()));
3032 std::memcpy(push_constant_data.data() + offset, pValues, static_cast<std::size_t>(size));
3033 }
3034}
3035
locke-lunargd556cc32019-09-17 01:21:23 -06003036void ValidationStateTracker::PreCallRecordCmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
3037 VkIndexType indexType) {
3038 auto buffer_state = GetBufferState(buffer);
3039 auto cb_state = GetCBState(commandBuffer);
3040
3041 cb_state->status |= CBSTATUS_INDEX_BUFFER_BOUND;
3042 cb_state->index_buffer_binding.buffer = buffer;
3043 cb_state->index_buffer_binding.size = buffer_state->createInfo.size;
3044 cb_state->index_buffer_binding.offset = offset;
3045 cb_state->index_buffer_binding.index_type = indexType;
3046 // Add binding for this index buffer to this commandbuffer
3047 AddCommandBufferBindingBuffer(cb_state, buffer_state);
3048}
3049
3050void ValidationStateTracker::PreCallRecordCmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding,
3051 uint32_t bindingCount, const VkBuffer *pBuffers,
3052 const VkDeviceSize *pOffsets) {
3053 auto cb_state = GetCBState(commandBuffer);
3054
3055 uint32_t end = firstBinding + bindingCount;
3056 if (cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.size() < end) {
3057 cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.resize(end);
3058 }
3059
3060 for (uint32_t i = 0; i < bindingCount; ++i) {
3061 auto &vertex_buffer_binding = cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings[i + firstBinding];
3062 vertex_buffer_binding.buffer = pBuffers[i];
3063 vertex_buffer_binding.offset = pOffsets[i];
3064 // Add binding for this vertex buffer to this commandbuffer
3065 AddCommandBufferBindingBuffer(cb_state, GetBufferState(pBuffers[i]));
3066 }
3067}
3068
3069void ValidationStateTracker::PostCallRecordCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer,
3070 VkDeviceSize dstOffset, VkDeviceSize dataSize, const void *pData) {
3071 auto cb_state = GetCBState(commandBuffer);
3072 auto dst_buffer_state = GetBufferState(dstBuffer);
3073
3074 // Update bindings between buffer and cmd buffer
3075 AddCommandBufferBindingBuffer(cb_state, dst_buffer_state);
3076}
3077
Jeff Bolz310775c2019-10-09 00:46:33 -05003078bool ValidationStateTracker::SetEventStageMask(VkEvent event, VkPipelineStageFlags stageMask,
3079 EventToStageMap *localEventToStageMap) {
3080 (*localEventToStageMap)[event] = stageMask;
locke-lunargd556cc32019-09-17 01:21:23 -06003081 return false;
3082}
3083
3084void ValidationStateTracker::PreCallRecordCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event,
3085 VkPipelineStageFlags stageMask) {
3086 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3087 auto event_state = GetEventState(event);
3088 if (event_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05003089 AddCommandBufferBinding(event_state->cb_bindings, VulkanTypedHandle(event, kVulkanObjectTypeEvent, event_state), cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003090 }
3091 cb_state->events.push_back(event);
3092 if (!cb_state->waitedEvents.count(event)) {
3093 cb_state->writeEventsBeforeWait.push_back(event);
3094 }
Jeff Bolz310775c2019-10-09 00:46:33 -05003095 cb_state->eventUpdates.emplace_back(
3096 [event, stageMask](const ValidationStateTracker *device_data, bool do_validate, EventToStageMap *localEventToStageMap) {
3097 return SetEventStageMask(event, stageMask, localEventToStageMap);
3098 });
locke-lunargd556cc32019-09-17 01:21:23 -06003099}
3100
3101void ValidationStateTracker::PreCallRecordCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event,
3102 VkPipelineStageFlags stageMask) {
3103 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3104 auto event_state = GetEventState(event);
3105 if (event_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05003106 AddCommandBufferBinding(event_state->cb_bindings, VulkanTypedHandle(event, kVulkanObjectTypeEvent, event_state), cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003107 }
3108 cb_state->events.push_back(event);
3109 if (!cb_state->waitedEvents.count(event)) {
3110 cb_state->writeEventsBeforeWait.push_back(event);
3111 }
3112
3113 cb_state->eventUpdates.emplace_back(
Jeff Bolz310775c2019-10-09 00:46:33 -05003114 [event](const ValidationStateTracker *, bool do_validate, EventToStageMap *localEventToStageMap) {
3115 return SetEventStageMask(event, VkPipelineStageFlags(0), localEventToStageMap);
3116 });
locke-lunargd556cc32019-09-17 01:21:23 -06003117}
3118
3119void ValidationStateTracker::PreCallRecordCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents,
3120 VkPipelineStageFlags sourceStageMask, VkPipelineStageFlags dstStageMask,
3121 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
3122 uint32_t bufferMemoryBarrierCount,
3123 const VkBufferMemoryBarrier *pBufferMemoryBarriers,
3124 uint32_t imageMemoryBarrierCount,
3125 const VkImageMemoryBarrier *pImageMemoryBarriers) {
3126 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3127 for (uint32_t i = 0; i < eventCount; ++i) {
3128 auto event_state = GetEventState(pEvents[i]);
3129 if (event_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05003130 AddCommandBufferBinding(event_state->cb_bindings, VulkanTypedHandle(pEvents[i], kVulkanObjectTypeEvent, event_state),
3131 cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003132 }
3133 cb_state->waitedEvents.insert(pEvents[i]);
3134 cb_state->events.push_back(pEvents[i]);
3135 }
3136}
3137
Jeff Bolz310775c2019-10-09 00:46:33 -05003138bool ValidationStateTracker::SetQueryState(QueryObject object, QueryState value, QueryMap *localQueryToStateMap) {
3139 (*localQueryToStateMap)[object] = value;
3140 return false;
3141}
3142
3143bool ValidationStateTracker::SetQueryStateMulti(VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, QueryState value,
3144 QueryMap *localQueryToStateMap) {
3145 for (uint32_t i = 0; i < queryCount; i++) {
3146 QueryObject object = {queryPool, firstQuery + i};
3147 (*localQueryToStateMap)[object] = value;
locke-lunargd556cc32019-09-17 01:21:23 -06003148 }
3149 return false;
3150}
3151
Jeff Bolz310775c2019-10-09 00:46:33 -05003152QueryState ValidationStateTracker::GetQueryState(const QueryMap *localQueryToStateMap, VkQueryPool queryPool,
3153 uint32_t queryIndex) const {
3154 QueryObject query = {queryPool, queryIndex};
locke-lunargd556cc32019-09-17 01:21:23 -06003155
Jeff Bolz310775c2019-10-09 00:46:33 -05003156 const std::array<const decltype(queryToStateMap) *, 2> map_list = {localQueryToStateMap, &queryToStateMap};
3157
3158 for (const auto map : map_list) {
3159 auto query_data = map->find(query);
3160 if (query_data != map->end()) {
3161 return query_data->second;
locke-lunargd556cc32019-09-17 01:21:23 -06003162 }
3163 }
Jeff Bolz310775c2019-10-09 00:46:33 -05003164 return QUERYSTATE_UNKNOWN;
locke-lunargd556cc32019-09-17 01:21:23 -06003165}
3166
3167void ValidationStateTracker::RecordCmdBeginQuery(CMD_BUFFER_STATE *cb_state, const QueryObject &query_obj) {
Jeff Bolz047f3012019-10-09 23:52:23 -05003168 if (disabled.query_validation) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003169 cb_state->activeQueries.insert(query_obj);
3170 cb_state->startedQueries.insert(query_obj);
Jeff Bolz310775c2019-10-09 00:46:33 -05003171 cb_state->queryUpdates.emplace_back(
3172 [query_obj](const ValidationStateTracker *device_data, bool do_validate, QueryMap *localQueryToStateMap) {
3173 SetQueryState(query_obj, QUERYSTATE_RUNNING, localQueryToStateMap);
3174 return false;
3175 });
Jeff Bolzadbfa852019-10-04 13:53:30 -05003176 auto pool_state = GetQueryPoolState(query_obj.pool);
3177 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(query_obj.pool, kVulkanObjectTypeQueryPool, pool_state),
3178 cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003179}
3180
3181void ValidationStateTracker::PostCallRecordCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot,
3182 VkFlags flags) {
Jeff Bolz047f3012019-10-09 23:52:23 -05003183 if (disabled.query_validation) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003184 QueryObject query = {queryPool, slot};
3185 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3186 RecordCmdBeginQuery(cb_state, query);
3187}
3188
3189void ValidationStateTracker::RecordCmdEndQuery(CMD_BUFFER_STATE *cb_state, const QueryObject &query_obj) {
Jeff Bolz047f3012019-10-09 23:52:23 -05003190 if (disabled.query_validation) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003191 cb_state->activeQueries.erase(query_obj);
3192 cb_state->queryUpdates.emplace_back(
Jeff Bolz310775c2019-10-09 00:46:33 -05003193 [query_obj](const ValidationStateTracker *device_data, bool do_validate, QueryMap *localQueryToStateMap) {
3194 return SetQueryState(query_obj, QUERYSTATE_ENDED, localQueryToStateMap);
3195 });
Jeff Bolzadbfa852019-10-04 13:53:30 -05003196 auto pool_state = GetQueryPoolState(query_obj.pool);
3197 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(query_obj.pool, kVulkanObjectTypeQueryPool, pool_state),
3198 cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003199}
3200
3201void ValidationStateTracker::PostCallRecordCmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot) {
Jeff Bolz047f3012019-10-09 23:52:23 -05003202 if (disabled.query_validation) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003203 QueryObject query_obj = {queryPool, slot};
3204 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3205 RecordCmdEndQuery(cb_state, query_obj);
3206}
3207
3208void ValidationStateTracker::PostCallRecordCmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
3209 uint32_t firstQuery, uint32_t queryCount) {
Jeff Bolz047f3012019-10-09 23:52:23 -05003210 if (disabled.query_validation) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003211 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3212
Jeff Bolz310775c2019-10-09 00:46:33 -05003213 cb_state->queryUpdates.emplace_back([queryPool, firstQuery, queryCount](const ValidationStateTracker *device_data,
3214 bool do_validate, QueryMap *localQueryToStateMap) {
3215 return SetQueryStateMulti(queryPool, firstQuery, queryCount, QUERYSTATE_RESET, localQueryToStateMap);
locke-lunargd556cc32019-09-17 01:21:23 -06003216 });
Jeff Bolzadbfa852019-10-04 13:53:30 -05003217 auto pool_state = GetQueryPoolState(queryPool);
3218 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool, pool_state),
locke-lunargd556cc32019-09-17 01:21:23 -06003219 cb_state);
3220}
3221
3222void ValidationStateTracker::PostCallRecordCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
3223 uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer,
3224 VkDeviceSize dstOffset, VkDeviceSize stride,
3225 VkQueryResultFlags flags) {
Jeff Bolz047f3012019-10-09 23:52:23 -05003226 if (disabled.query_validation) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003227 auto cb_state = GetCBState(commandBuffer);
3228 auto dst_buff_state = GetBufferState(dstBuffer);
3229 AddCommandBufferBindingBuffer(cb_state, dst_buff_state);
Jeff Bolzadbfa852019-10-04 13:53:30 -05003230 auto pool_state = GetQueryPoolState(queryPool);
3231 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool, pool_state),
locke-lunargd556cc32019-09-17 01:21:23 -06003232 cb_state);
3233}
3234
3235void ValidationStateTracker::PostCallRecordCmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
3236 VkQueryPool queryPool, uint32_t slot) {
Jeff Bolz047f3012019-10-09 23:52:23 -05003237 if (disabled.query_validation) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003238 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
Jeff Bolzadbfa852019-10-04 13:53:30 -05003239 auto pool_state = GetQueryPoolState(queryPool);
3240 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool, pool_state),
locke-lunargd556cc32019-09-17 01:21:23 -06003241 cb_state);
3242 QueryObject query = {queryPool, slot};
3243 cb_state->queryUpdates.emplace_back(
Jeff Bolz310775c2019-10-09 00:46:33 -05003244 [query](const ValidationStateTracker *device_data, bool do_validate, QueryMap *localQueryToStateMap) {
3245 return SetQueryState(query, QUERYSTATE_ENDED, localQueryToStateMap);
3246 });
locke-lunargd556cc32019-09-17 01:21:23 -06003247}
3248
3249void ValidationStateTracker::PostCallRecordCreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo *pCreateInfo,
3250 const VkAllocationCallbacks *pAllocator, VkFramebuffer *pFramebuffer,
3251 VkResult result) {
3252 if (VK_SUCCESS != result) return;
3253 // Shadow create info and store in map
Jeff Bolz6ae39612019-10-11 20:57:36 -05003254 auto fb_state = std::make_shared<FRAMEBUFFER_STATE>(*pFramebuffer, pCreateInfo, GetRenderPassShared(pCreateInfo->renderPass));
locke-lunargd556cc32019-09-17 01:21:23 -06003255
3256 if ((pCreateInfo->flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) == 0) {
3257 for (uint32_t i = 0; i < pCreateInfo->attachmentCount; ++i) {
3258 VkImageView view = pCreateInfo->pAttachments[i];
3259 auto view_state = GetImageViewState(view);
3260 if (!view_state) {
3261 continue;
3262 }
3263 }
3264 }
3265 frameBufferMap[*pFramebuffer] = std::move(fb_state);
3266}
3267
3268void ValidationStateTracker::RecordRenderPassDAG(RenderPassCreateVersion rp_version, const VkRenderPassCreateInfo2KHR *pCreateInfo,
3269 RENDER_PASS_STATE *render_pass) {
3270 auto &subpass_to_node = render_pass->subpassToNode;
3271 subpass_to_node.resize(pCreateInfo->subpassCount);
3272 auto &self_dependencies = render_pass->self_dependencies;
3273 self_dependencies.resize(pCreateInfo->subpassCount);
3274
3275 for (uint32_t i = 0; i < pCreateInfo->subpassCount; ++i) {
3276 subpass_to_node[i].pass = i;
3277 self_dependencies[i].clear();
3278 }
3279 for (uint32_t i = 0; i < pCreateInfo->dependencyCount; ++i) {
3280 const VkSubpassDependency2KHR &dependency = pCreateInfo->pDependencies[i];
3281 if ((dependency.srcSubpass != VK_SUBPASS_EXTERNAL) && (dependency.dstSubpass != VK_SUBPASS_EXTERNAL)) {
3282 if (dependency.srcSubpass == dependency.dstSubpass) {
3283 self_dependencies[dependency.srcSubpass].push_back(i);
3284 } else {
3285 subpass_to_node[dependency.dstSubpass].prev.push_back(dependency.srcSubpass);
3286 subpass_to_node[dependency.srcSubpass].next.push_back(dependency.dstSubpass);
3287 }
3288 }
3289 }
3290}
3291
3292static void MarkAttachmentFirstUse(RENDER_PASS_STATE *render_pass, uint32_t index, bool is_read) {
3293 if (index == VK_ATTACHMENT_UNUSED) return;
3294
3295 if (!render_pass->attachment_first_read.count(index)) render_pass->attachment_first_read[index] = is_read;
3296}
3297
3298void ValidationStateTracker::RecordCreateRenderPassState(RenderPassCreateVersion rp_version,
3299 std::shared_ptr<RENDER_PASS_STATE> &render_pass,
3300 VkRenderPass *pRenderPass) {
3301 render_pass->renderPass = *pRenderPass;
3302 auto create_info = render_pass->createInfo.ptr();
3303
3304 RecordRenderPassDAG(RENDER_PASS_VERSION_1, create_info, render_pass.get());
3305
3306 for (uint32_t i = 0; i < create_info->subpassCount; ++i) {
3307 const VkSubpassDescription2KHR &subpass = create_info->pSubpasses[i];
3308 for (uint32_t j = 0; j < subpass.colorAttachmentCount; ++j) {
3309 MarkAttachmentFirstUse(render_pass.get(), subpass.pColorAttachments[j].attachment, false);
3310
3311 // resolve attachments are considered to be written
3312 if (subpass.pResolveAttachments) {
3313 MarkAttachmentFirstUse(render_pass.get(), subpass.pResolveAttachments[j].attachment, false);
3314 }
3315 }
3316 if (subpass.pDepthStencilAttachment) {
3317 MarkAttachmentFirstUse(render_pass.get(), subpass.pDepthStencilAttachment->attachment, false);
3318 }
3319 for (uint32_t j = 0; j < subpass.inputAttachmentCount; ++j) {
3320 MarkAttachmentFirstUse(render_pass.get(), subpass.pInputAttachments[j].attachment, true);
3321 }
3322 }
3323
3324 // Even though render_pass is an rvalue-ref parameter, still must move s.t. move assignment is invoked.
3325 renderPassMap[*pRenderPass] = std::move(render_pass);
3326}
3327
3328// Style note:
3329// Use of rvalue reference exceeds reccommended usage of rvalue refs in google style guide, but intentionally forces caller to move
3330// or copy. This is clearer than passing a pointer to shared_ptr and avoids the atomic increment/decrement of shared_ptr copy
3331// construction or assignment.
3332void ValidationStateTracker::PostCallRecordCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo *pCreateInfo,
3333 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
3334 VkResult result) {
3335 if (VK_SUCCESS != result) return;
3336 auto render_pass_state = std::make_shared<RENDER_PASS_STATE>(pCreateInfo);
3337 RecordCreateRenderPassState(RENDER_PASS_VERSION_1, render_pass_state, pRenderPass);
3338}
3339
3340void ValidationStateTracker::PostCallRecordCreateRenderPass2KHR(VkDevice device, const VkRenderPassCreateInfo2KHR *pCreateInfo,
3341 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
3342 VkResult result) {
3343 if (VK_SUCCESS != result) return;
3344 auto render_pass_state = std::make_shared<RENDER_PASS_STATE>(pCreateInfo);
3345 RecordCreateRenderPassState(RENDER_PASS_VERSION_2, render_pass_state, pRenderPass);
3346}
3347
3348void ValidationStateTracker::RecordCmdBeginRenderPassState(VkCommandBuffer commandBuffer,
3349 const VkRenderPassBeginInfo *pRenderPassBegin,
3350 const VkSubpassContents contents) {
3351 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3352 auto render_pass_state = pRenderPassBegin ? GetRenderPassState(pRenderPassBegin->renderPass) : nullptr;
3353 auto framebuffer = pRenderPassBegin ? GetFramebufferState(pRenderPassBegin->framebuffer) : nullptr;
3354
3355 if (render_pass_state) {
3356 cb_state->activeFramebuffer = pRenderPassBegin->framebuffer;
3357 cb_state->activeRenderPass = render_pass_state;
3358 // This is a shallow copy as that is all that is needed for now
3359 cb_state->activeRenderPassBeginInfo = *pRenderPassBegin;
3360 cb_state->activeSubpass = 0;
3361 cb_state->activeSubpassContents = contents;
3362 cb_state->framebuffers.insert(pRenderPassBegin->framebuffer);
3363 // Connect this framebuffer and its children to this cmdBuffer
3364 AddFramebufferBinding(cb_state, framebuffer);
3365 // Connect this RP to cmdBuffer
Jeff Bolzadbfa852019-10-04 13:53:30 -05003366 AddCommandBufferBinding(render_pass_state->cb_bindings,
3367 VulkanTypedHandle(render_pass_state->renderPass, kVulkanObjectTypeRenderPass, render_pass_state),
3368 cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003369
3370 auto chained_device_group_struct = lvl_find_in_chain<VkDeviceGroupRenderPassBeginInfo>(pRenderPassBegin->pNext);
3371 if (chained_device_group_struct) {
3372 cb_state->active_render_pass_device_mask = chained_device_group_struct->deviceMask;
3373 } else {
3374 cb_state->active_render_pass_device_mask = cb_state->initial_device_mask;
3375 }
3376 }
3377}
3378
3379void ValidationStateTracker::PreCallRecordCmdBeginRenderPass(VkCommandBuffer commandBuffer,
3380 const VkRenderPassBeginInfo *pRenderPassBegin,
3381 VkSubpassContents contents) {
3382 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, contents);
3383}
3384
3385void ValidationStateTracker::PreCallRecordCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer,
3386 const VkRenderPassBeginInfo *pRenderPassBegin,
3387 const VkSubpassBeginInfoKHR *pSubpassBeginInfo) {
3388 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, pSubpassBeginInfo->contents);
3389}
3390
3391void ValidationStateTracker::RecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
3392 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3393 cb_state->activeSubpass++;
3394 cb_state->activeSubpassContents = contents;
3395}
3396
3397void ValidationStateTracker::PostCallRecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
3398 RecordCmdNextSubpass(commandBuffer, contents);
3399}
3400
3401void ValidationStateTracker::PostCallRecordCmdNextSubpass2KHR(VkCommandBuffer commandBuffer,
3402 const VkSubpassBeginInfoKHR *pSubpassBeginInfo,
3403 const VkSubpassEndInfoKHR *pSubpassEndInfo) {
3404 RecordCmdNextSubpass(commandBuffer, pSubpassBeginInfo->contents);
3405}
3406
3407void ValidationStateTracker::RecordCmdEndRenderPassState(VkCommandBuffer commandBuffer) {
3408 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3409 cb_state->activeRenderPass = nullptr;
3410 cb_state->activeSubpass = 0;
3411 cb_state->activeFramebuffer = VK_NULL_HANDLE;
3412}
3413
3414void ValidationStateTracker::PostCallRecordCmdEndRenderPass(VkCommandBuffer commandBuffer) {
3415 RecordCmdEndRenderPassState(commandBuffer);
3416}
3417
3418void ValidationStateTracker::PostCallRecordCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer,
3419 const VkSubpassEndInfoKHR *pSubpassEndInfo) {
3420 RecordCmdEndRenderPassState(commandBuffer);
3421}
3422
3423void ValidationStateTracker::PreCallRecordCmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBuffersCount,
3424 const VkCommandBuffer *pCommandBuffers) {
3425 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3426
3427 CMD_BUFFER_STATE *sub_cb_state = NULL;
3428 for (uint32_t i = 0; i < commandBuffersCount; i++) {
3429 sub_cb_state = GetCBState(pCommandBuffers[i]);
3430 assert(sub_cb_state);
3431 if (!(sub_cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT)) {
3432 if (cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT) {
3433 // TODO: Because this is a state change, clearing the VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT needs to be moved
3434 // from the validation step to the recording step
3435 cb_state->beginInfo.flags &= ~VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT;
3436 }
3437 }
3438
3439 // Propagate inital layout and current layout state to the primary cmd buffer
3440 // NOTE: The update/population of the image_layout_map is done in CoreChecks, but for other classes derived from
3441 // ValidationStateTracker these maps will be empty, so leaving the propagation in the the state tracker should be a no-op
3442 // for those other classes.
3443 for (const auto &sub_layout_map_entry : sub_cb_state->image_layout_map) {
3444 const auto image = sub_layout_map_entry.first;
3445 const auto *image_state = GetImageState(image);
3446 if (!image_state) continue; // Can't set layouts of a dead image
3447
3448 auto *cb_subres_map = GetImageSubresourceLayoutMap(cb_state, *image_state);
3449 const auto *sub_cb_subres_map = sub_layout_map_entry.second.get();
3450 assert(cb_subres_map && sub_cb_subres_map); // Non const get and map traversal should never be null
3451 cb_subres_map->UpdateFrom(*sub_cb_subres_map);
3452 }
3453
3454 sub_cb_state->primaryCommandBuffer = cb_state->commandBuffer;
3455 cb_state->linkedCommandBuffers.insert(sub_cb_state);
3456 sub_cb_state->linkedCommandBuffers.insert(cb_state);
3457 for (auto &function : sub_cb_state->queryUpdates) {
3458 cb_state->queryUpdates.push_back(function);
3459 }
3460 for (auto &function : sub_cb_state->queue_submit_functions) {
3461 cb_state->queue_submit_functions.push_back(function);
3462 }
3463 }
3464}
3465
3466void ValidationStateTracker::PostCallRecordMapMemory(VkDevice device, VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size,
3467 VkFlags flags, void **ppData, VkResult result) {
3468 if (VK_SUCCESS != result) return;
3469 RecordMappedMemory(mem, offset, size, ppData);
3470}
3471
3472void ValidationStateTracker::PreCallRecordUnmapMemory(VkDevice device, VkDeviceMemory mem) {
3473 auto mem_info = GetDevMemState(mem);
3474 if (mem_info) {
3475 mem_info->mapped_range = MemRange();
3476 mem_info->p_driver_data = nullptr;
3477 }
3478}
3479
3480void ValidationStateTracker::UpdateBindImageMemoryState(const VkBindImageMemoryInfo &bindInfo) {
3481 IMAGE_STATE *image_state = GetImageState(bindInfo.image);
3482 if (image_state) {
3483 const auto swapchain_info = lvl_find_in_chain<VkBindImageMemorySwapchainInfoKHR>(bindInfo.pNext);
3484 if (swapchain_info) {
3485 auto swapchain = GetSwapchainState(swapchain_info->swapchain);
3486 if (swapchain) {
locke-lunargb3584732019-10-28 20:18:36 -06003487 swapchain->images[swapchain_info->imageIndex].bound_images.emplace(image_state->image);
locke-lunargd556cc32019-09-17 01:21:23 -06003488 image_state->bind_swapchain = swapchain_info->swapchain;
3489 image_state->bind_swapchain_imageIndex = swapchain_info->imageIndex;
3490 }
3491 } else {
3492 // Track bound memory range information
3493 auto mem_info = GetDevMemState(bindInfo.memory);
3494 if (mem_info) {
3495 InsertImageMemoryRange(bindInfo.image, mem_info, bindInfo.memoryOffset, image_state->requirements,
3496 image_state->createInfo.tiling == VK_IMAGE_TILING_LINEAR);
3497 }
3498
3499 // Track objects tied to memory
3500 SetMemBinding(bindInfo.memory, image_state, bindInfo.memoryOffset,
3501 VulkanTypedHandle(bindInfo.image, kVulkanObjectTypeImage));
3502 }
3503 if (image_state->createInfo.flags & VK_IMAGE_CREATE_ALIAS_BIT) {
3504 AddAliasingImage(image_state);
3505 }
3506 }
3507}
3508
3509void ValidationStateTracker::PostCallRecordBindImageMemory(VkDevice device, VkImage image, VkDeviceMemory mem,
3510 VkDeviceSize memoryOffset, VkResult result) {
3511 if (VK_SUCCESS != result) return;
3512 VkBindImageMemoryInfo bindInfo = {};
3513 bindInfo.sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
3514 bindInfo.image = image;
3515 bindInfo.memory = mem;
3516 bindInfo.memoryOffset = memoryOffset;
3517 UpdateBindImageMemoryState(bindInfo);
3518}
3519
3520void ValidationStateTracker::PostCallRecordBindImageMemory2(VkDevice device, uint32_t bindInfoCount,
3521 const VkBindImageMemoryInfoKHR *pBindInfos, VkResult result) {
3522 if (VK_SUCCESS != result) return;
3523 for (uint32_t i = 0; i < bindInfoCount; i++) {
3524 UpdateBindImageMemoryState(pBindInfos[i]);
3525 }
3526}
3527
3528void ValidationStateTracker::PostCallRecordBindImageMemory2KHR(VkDevice device, uint32_t bindInfoCount,
3529 const VkBindImageMemoryInfoKHR *pBindInfos, VkResult result) {
3530 if (VK_SUCCESS != result) return;
3531 for (uint32_t i = 0; i < bindInfoCount; i++) {
3532 UpdateBindImageMemoryState(pBindInfos[i]);
3533 }
3534}
3535
3536void ValidationStateTracker::PreCallRecordSetEvent(VkDevice device, VkEvent event) {
3537 auto event_state = GetEventState(event);
3538 if (event_state) {
3539 event_state->stageMask = VK_PIPELINE_STAGE_HOST_BIT;
3540 }
locke-lunargd556cc32019-09-17 01:21:23 -06003541}
3542
3543void ValidationStateTracker::PostCallRecordImportSemaphoreFdKHR(VkDevice device,
3544 const VkImportSemaphoreFdInfoKHR *pImportSemaphoreFdInfo,
3545 VkResult result) {
3546 if (VK_SUCCESS != result) return;
3547 RecordImportSemaphoreState(pImportSemaphoreFdInfo->semaphore, pImportSemaphoreFdInfo->handleType,
3548 pImportSemaphoreFdInfo->flags);
3549}
3550
3551void ValidationStateTracker::RecordGetExternalSemaphoreState(VkSemaphore semaphore,
3552 VkExternalSemaphoreHandleTypeFlagBitsKHR handle_type) {
3553 SEMAPHORE_STATE *semaphore_state = GetSemaphoreState(semaphore);
3554 if (semaphore_state && handle_type != VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR) {
3555 // Cannot track semaphore state once it is exported, except for Sync FD handle types which have copy transference
3556 semaphore_state->scope = kSyncScopeExternalPermanent;
3557 }
3558}
3559
3560#ifdef VK_USE_PLATFORM_WIN32_KHR
3561void ValidationStateTracker::PostCallRecordImportSemaphoreWin32HandleKHR(
3562 VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR *pImportSemaphoreWin32HandleInfo, VkResult result) {
3563 if (VK_SUCCESS != result) return;
3564 RecordImportSemaphoreState(pImportSemaphoreWin32HandleInfo->semaphore, pImportSemaphoreWin32HandleInfo->handleType,
3565 pImportSemaphoreWin32HandleInfo->flags);
3566}
3567
3568void ValidationStateTracker::PostCallRecordGetSemaphoreWin32HandleKHR(VkDevice device,
3569 const VkSemaphoreGetWin32HandleInfoKHR *pGetWin32HandleInfo,
3570 HANDLE *pHandle, VkResult result) {
3571 if (VK_SUCCESS != result) return;
3572 RecordGetExternalSemaphoreState(pGetWin32HandleInfo->semaphore, pGetWin32HandleInfo->handleType);
3573}
3574
3575void ValidationStateTracker::PostCallRecordImportFenceWin32HandleKHR(
3576 VkDevice device, const VkImportFenceWin32HandleInfoKHR *pImportFenceWin32HandleInfo, VkResult result) {
3577 if (VK_SUCCESS != result) return;
3578 RecordImportFenceState(pImportFenceWin32HandleInfo->fence, pImportFenceWin32HandleInfo->handleType,
3579 pImportFenceWin32HandleInfo->flags);
3580}
3581
3582void ValidationStateTracker::PostCallRecordGetFenceWin32HandleKHR(VkDevice device,
3583 const VkFenceGetWin32HandleInfoKHR *pGetWin32HandleInfo,
3584 HANDLE *pHandle, VkResult result) {
3585 if (VK_SUCCESS != result) return;
3586 RecordGetExternalFenceState(pGetWin32HandleInfo->fence, pGetWin32HandleInfo->handleType);
3587}
3588#endif
3589
3590void ValidationStateTracker::PostCallRecordGetSemaphoreFdKHR(VkDevice device, const VkSemaphoreGetFdInfoKHR *pGetFdInfo, int *pFd,
3591 VkResult result) {
3592 if (VK_SUCCESS != result) return;
3593 RecordGetExternalSemaphoreState(pGetFdInfo->semaphore, pGetFdInfo->handleType);
3594}
3595
3596void ValidationStateTracker::RecordImportFenceState(VkFence fence, VkExternalFenceHandleTypeFlagBitsKHR handle_type,
3597 VkFenceImportFlagsKHR flags) {
3598 FENCE_STATE *fence_node = GetFenceState(fence);
3599 if (fence_node && fence_node->scope != kSyncScopeExternalPermanent) {
3600 if ((handle_type == VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR || flags & VK_FENCE_IMPORT_TEMPORARY_BIT_KHR) &&
3601 fence_node->scope == kSyncScopeInternal) {
3602 fence_node->scope = kSyncScopeExternalTemporary;
3603 } else {
3604 fence_node->scope = kSyncScopeExternalPermanent;
3605 }
3606 }
3607}
3608
3609void ValidationStateTracker::PostCallRecordImportFenceFdKHR(VkDevice device, const VkImportFenceFdInfoKHR *pImportFenceFdInfo,
3610 VkResult result) {
3611 if (VK_SUCCESS != result) return;
3612 RecordImportFenceState(pImportFenceFdInfo->fence, pImportFenceFdInfo->handleType, pImportFenceFdInfo->flags);
3613}
3614
3615void ValidationStateTracker::RecordGetExternalFenceState(VkFence fence, VkExternalFenceHandleTypeFlagBitsKHR handle_type) {
3616 FENCE_STATE *fence_state = GetFenceState(fence);
3617 if (fence_state) {
3618 if (handle_type != VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR) {
3619 // Export with reference transference becomes external
3620 fence_state->scope = kSyncScopeExternalPermanent;
3621 } else if (fence_state->scope == kSyncScopeInternal) {
3622 // Export with copy transference has a side effect of resetting the fence
3623 fence_state->state = FENCE_UNSIGNALED;
3624 }
3625 }
3626}
3627
3628void ValidationStateTracker::PostCallRecordGetFenceFdKHR(VkDevice device, const VkFenceGetFdInfoKHR *pGetFdInfo, int *pFd,
3629 VkResult result) {
3630 if (VK_SUCCESS != result) return;
3631 RecordGetExternalFenceState(pGetFdInfo->fence, pGetFdInfo->handleType);
3632}
3633
3634void ValidationStateTracker::PostCallRecordCreateEvent(VkDevice device, const VkEventCreateInfo *pCreateInfo,
3635 const VkAllocationCallbacks *pAllocator, VkEvent *pEvent, VkResult result) {
3636 if (VK_SUCCESS != result) return;
3637 eventMap[*pEvent].write_in_use = 0;
3638 eventMap[*pEvent].stageMask = VkPipelineStageFlags(0);
3639}
3640
3641void ValidationStateTracker::RecordCreateSwapchainState(VkResult result, const VkSwapchainCreateInfoKHR *pCreateInfo,
3642 VkSwapchainKHR *pSwapchain, SURFACE_STATE *surface_state,
3643 SWAPCHAIN_NODE *old_swapchain_state) {
3644 if (VK_SUCCESS == result) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003645 auto swapchain_state = std::make_shared<SWAPCHAIN_NODE>(pCreateInfo, *pSwapchain);
locke-lunargd556cc32019-09-17 01:21:23 -06003646 if (VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR == pCreateInfo->presentMode ||
3647 VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR == pCreateInfo->presentMode) {
3648 swapchain_state->shared_presentable = true;
3649 }
3650 surface_state->swapchain = swapchain_state.get();
3651 swapchainMap[*pSwapchain] = std::move(swapchain_state);
3652 } else {
3653 surface_state->swapchain = nullptr;
3654 }
3655 // Spec requires that even if CreateSwapchainKHR fails, oldSwapchain is retired
3656 if (old_swapchain_state) {
3657 old_swapchain_state->retired = true;
3658 }
3659 return;
3660}
3661
3662void ValidationStateTracker::PostCallRecordCreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR *pCreateInfo,
3663 const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchain,
3664 VkResult result) {
3665 auto surface_state = GetSurfaceState(pCreateInfo->surface);
3666 auto old_swapchain_state = GetSwapchainState(pCreateInfo->oldSwapchain);
3667 RecordCreateSwapchainState(result, pCreateInfo, pSwapchain, surface_state, old_swapchain_state);
3668}
3669
3670void ValidationStateTracker::PreCallRecordDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain,
3671 const VkAllocationCallbacks *pAllocator) {
3672 if (!swapchain) return;
3673 auto swapchain_data = GetSwapchainState(swapchain);
3674 if (swapchain_data) {
3675 for (const auto &swapchain_image : swapchain_data->images) {
locke-lunargb3584732019-10-28 20:18:36 -06003676 ClearMemoryObjectBindings(VulkanTypedHandle(swapchain_image.image, kVulkanObjectTypeImage));
3677 imageMap.erase(swapchain_image.image);
3678 RemoveAliasingImages(swapchain_image.bound_images);
locke-lunargd556cc32019-09-17 01:21:23 -06003679 }
3680
3681 auto surface_state = GetSurfaceState(swapchain_data->createInfo.surface);
3682 if (surface_state) {
3683 if (surface_state->swapchain == swapchain_data) surface_state->swapchain = nullptr;
3684 }
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003685 swapchain_data->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06003686 swapchainMap.erase(swapchain);
3687 }
3688}
3689
3690void ValidationStateTracker::PostCallRecordQueuePresentKHR(VkQueue queue, const VkPresentInfoKHR *pPresentInfo, VkResult result) {
3691 // Semaphore waits occur before error generation, if the call reached the ICD. (Confirm?)
3692 for (uint32_t i = 0; i < pPresentInfo->waitSemaphoreCount; ++i) {
3693 auto pSemaphore = GetSemaphoreState(pPresentInfo->pWaitSemaphores[i]);
3694 if (pSemaphore) {
3695 pSemaphore->signaler.first = VK_NULL_HANDLE;
3696 pSemaphore->signaled = false;
3697 }
3698 }
3699
3700 for (uint32_t i = 0; i < pPresentInfo->swapchainCount; ++i) {
3701 // Note: this is imperfect, in that we can get confused about what did or didn't succeed-- but if the app does that, it's
3702 // confused itself just as much.
3703 auto local_result = pPresentInfo->pResults ? pPresentInfo->pResults[i] : result;
3704 if (local_result != VK_SUCCESS && local_result != VK_SUBOPTIMAL_KHR) continue; // this present didn't actually happen.
3705 // Mark the image as having been released to the WSI
3706 auto swapchain_data = GetSwapchainState(pPresentInfo->pSwapchains[i]);
3707 if (swapchain_data && (swapchain_data->images.size() > pPresentInfo->pImageIndices[i])) {
locke-lunargb3584732019-10-28 20:18:36 -06003708 auto image = swapchain_data->images[pPresentInfo->pImageIndices[i]].image;
locke-lunargd556cc32019-09-17 01:21:23 -06003709 auto image_state = GetImageState(image);
3710 if (image_state) {
3711 image_state->acquired = false;
Jeff Bolz46c0ea02019-10-09 13:06:29 -05003712 if (image_state->shared_presentable) {
3713 image_state->layout_locked = true;
3714 }
locke-lunargd556cc32019-09-17 01:21:23 -06003715 }
3716 }
3717 }
3718 // Note: even though presentation is directed to a queue, there is no direct ordering between QP and subsequent work, so QP (and
3719 // its semaphore waits) /never/ participate in any completion proof.
3720}
3721
3722void ValidationStateTracker::PostCallRecordCreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount,
3723 const VkSwapchainCreateInfoKHR *pCreateInfos,
3724 const VkAllocationCallbacks *pAllocator,
3725 VkSwapchainKHR *pSwapchains, VkResult result) {
3726 if (pCreateInfos) {
3727 for (uint32_t i = 0; i < swapchainCount; i++) {
3728 auto surface_state = GetSurfaceState(pCreateInfos[i].surface);
3729 auto old_swapchain_state = GetSwapchainState(pCreateInfos[i].oldSwapchain);
3730 RecordCreateSwapchainState(result, &pCreateInfos[i], &pSwapchains[i], surface_state, old_swapchain_state);
3731 }
3732 }
3733}
3734
3735void ValidationStateTracker::RecordAcquireNextImageState(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
3736 VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex) {
3737 auto pFence = GetFenceState(fence);
3738 if (pFence && pFence->scope == kSyncScopeInternal) {
3739 // Treat as inflight since it is valid to wait on this fence, even in cases where it is technically a temporary
3740 // import
3741 pFence->state = FENCE_INFLIGHT;
3742 pFence->signaler.first = VK_NULL_HANDLE; // ANI isn't on a queue, so this can't participate in a completion proof.
3743 }
3744
3745 auto pSemaphore = GetSemaphoreState(semaphore);
3746 if (pSemaphore && pSemaphore->scope == kSyncScopeInternal) {
3747 // Treat as signaled since it is valid to wait on this semaphore, even in cases where it is technically a
3748 // temporary import
3749 pSemaphore->signaled = true;
3750 pSemaphore->signaler.first = VK_NULL_HANDLE;
3751 }
3752
3753 // Mark the image as acquired.
3754 auto swapchain_data = GetSwapchainState(swapchain);
3755 if (swapchain_data && (swapchain_data->images.size() > *pImageIndex)) {
locke-lunargb3584732019-10-28 20:18:36 -06003756 auto image = swapchain_data->images[*pImageIndex].image;
locke-lunargd556cc32019-09-17 01:21:23 -06003757 auto image_state = GetImageState(image);
3758 if (image_state) {
3759 image_state->acquired = true;
3760 image_state->shared_presentable = swapchain_data->shared_presentable;
3761 }
3762 }
3763}
3764
3765void ValidationStateTracker::PostCallRecordAcquireNextImageKHR(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
3766 VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex,
3767 VkResult result) {
3768 if ((VK_SUCCESS != result) && (VK_SUBOPTIMAL_KHR != result)) return;
3769 RecordAcquireNextImageState(device, swapchain, timeout, semaphore, fence, pImageIndex);
3770}
3771
3772void ValidationStateTracker::PostCallRecordAcquireNextImage2KHR(VkDevice device, const VkAcquireNextImageInfoKHR *pAcquireInfo,
3773 uint32_t *pImageIndex, VkResult result) {
3774 if ((VK_SUCCESS != result) && (VK_SUBOPTIMAL_KHR != result)) return;
3775 RecordAcquireNextImageState(device, pAcquireInfo->swapchain, pAcquireInfo->timeout, pAcquireInfo->semaphore,
3776 pAcquireInfo->fence, pImageIndex);
3777}
3778
3779void ValidationStateTracker::PostCallRecordEnumeratePhysicalDevices(VkInstance instance, uint32_t *pPhysicalDeviceCount,
3780 VkPhysicalDevice *pPhysicalDevices, VkResult result) {
3781 if ((NULL != pPhysicalDevices) && ((result == VK_SUCCESS || result == VK_INCOMPLETE))) {
3782 for (uint32_t i = 0; i < *pPhysicalDeviceCount; i++) {
3783 auto &phys_device_state = physical_device_map[pPhysicalDevices[i]];
3784 phys_device_state.phys_device = pPhysicalDevices[i];
3785 // Init actual features for each physical device
3786 DispatchGetPhysicalDeviceFeatures(pPhysicalDevices[i], &phys_device_state.features2.features);
3787 }
3788 }
3789}
3790
3791// Common function to update state for GetPhysicalDeviceQueueFamilyProperties & 2KHR version
3792static void StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(PHYSICAL_DEVICE_STATE *pd_state, uint32_t count,
3793 VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
3794 pd_state->queue_family_known_count = std::max(pd_state->queue_family_known_count, count);
3795
3796 if (!pQueueFamilyProperties) {
3797 if (UNCALLED == pd_state->vkGetPhysicalDeviceQueueFamilyPropertiesState)
3798 pd_state->vkGetPhysicalDeviceQueueFamilyPropertiesState = QUERY_COUNT;
3799 } else { // Save queue family properties
3800 pd_state->vkGetPhysicalDeviceQueueFamilyPropertiesState = QUERY_DETAILS;
3801
3802 pd_state->queue_family_properties.resize(std::max(static_cast<uint32_t>(pd_state->queue_family_properties.size()), count));
3803 for (uint32_t i = 0; i < count; ++i) {
3804 pd_state->queue_family_properties[i] = pQueueFamilyProperties[i].queueFamilyProperties;
3805 }
3806 }
3807}
3808
3809void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice,
3810 uint32_t *pQueueFamilyPropertyCount,
3811 VkQueueFamilyProperties *pQueueFamilyProperties) {
3812 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
3813 assert(physical_device_state);
3814 VkQueueFamilyProperties2KHR *pqfp = nullptr;
3815 std::vector<VkQueueFamilyProperties2KHR> qfp;
3816 qfp.resize(*pQueueFamilyPropertyCount);
3817 if (pQueueFamilyProperties) {
3818 for (uint32_t i = 0; i < *pQueueFamilyPropertyCount; ++i) {
3819 qfp[i].sType = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2_KHR;
3820 qfp[i].pNext = nullptr;
3821 qfp[i].queueFamilyProperties = pQueueFamilyProperties[i];
3822 }
3823 pqfp = qfp.data();
3824 }
3825 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount, pqfp);
3826}
3827
3828void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties2(
3829 VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
3830 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
3831 assert(physical_device_state);
3832 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount,
3833 pQueueFamilyProperties);
3834}
3835
3836void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties2KHR(
3837 VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
3838 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
3839 assert(physical_device_state);
3840 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount,
3841 pQueueFamilyProperties);
3842}
3843void ValidationStateTracker::PreCallRecordDestroySurfaceKHR(VkInstance instance, VkSurfaceKHR surface,
3844 const VkAllocationCallbacks *pAllocator) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003845 if (!surface) return;
3846 auto surface_state = GetSurfaceState(surface);
3847 surface_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06003848 surface_map.erase(surface);
3849}
3850
3851void ValidationStateTracker::RecordVulkanSurface(VkSurfaceKHR *pSurface) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003852 surface_map[*pSurface] = std::make_shared<SURFACE_STATE>(*pSurface);
locke-lunargd556cc32019-09-17 01:21:23 -06003853}
3854
3855void ValidationStateTracker::PostCallRecordCreateDisplayPlaneSurfaceKHR(VkInstance instance,
3856 const VkDisplaySurfaceCreateInfoKHR *pCreateInfo,
3857 const VkAllocationCallbacks *pAllocator,
3858 VkSurfaceKHR *pSurface, VkResult result) {
3859 if (VK_SUCCESS != result) return;
3860 RecordVulkanSurface(pSurface);
3861}
3862
3863#ifdef VK_USE_PLATFORM_ANDROID_KHR
3864void ValidationStateTracker::PostCallRecordCreateAndroidSurfaceKHR(VkInstance instance,
3865 const VkAndroidSurfaceCreateInfoKHR *pCreateInfo,
3866 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
3867 VkResult result) {
3868 if (VK_SUCCESS != result) return;
3869 RecordVulkanSurface(pSurface);
3870}
3871#endif // VK_USE_PLATFORM_ANDROID_KHR
3872
3873#ifdef VK_USE_PLATFORM_IOS_MVK
3874void ValidationStateTracker::PostCallRecordCreateIOSSurfaceMVK(VkInstance instance, const VkIOSSurfaceCreateInfoMVK *pCreateInfo,
3875 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
3876 VkResult result) {
3877 if (VK_SUCCESS != result) return;
3878 RecordVulkanSurface(pSurface);
3879}
3880#endif // VK_USE_PLATFORM_IOS_MVK
3881
3882#ifdef VK_USE_PLATFORM_MACOS_MVK
3883void ValidationStateTracker::PostCallRecordCreateMacOSSurfaceMVK(VkInstance instance,
3884 const VkMacOSSurfaceCreateInfoMVK *pCreateInfo,
3885 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
3886 VkResult result) {
3887 if (VK_SUCCESS != result) return;
3888 RecordVulkanSurface(pSurface);
3889}
3890#endif // VK_USE_PLATFORM_MACOS_MVK
3891
3892#ifdef VK_USE_PLATFORM_WAYLAND_KHR
3893void ValidationStateTracker::PostCallRecordCreateWaylandSurfaceKHR(VkInstance instance,
3894 const VkWaylandSurfaceCreateInfoKHR *pCreateInfo,
3895 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
3896 VkResult result) {
3897 if (VK_SUCCESS != result) return;
3898 RecordVulkanSurface(pSurface);
3899}
3900#endif // VK_USE_PLATFORM_WAYLAND_KHR
3901
3902#ifdef VK_USE_PLATFORM_WIN32_KHR
3903void ValidationStateTracker::PostCallRecordCreateWin32SurfaceKHR(VkInstance instance,
3904 const VkWin32SurfaceCreateInfoKHR *pCreateInfo,
3905 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
3906 VkResult result) {
3907 if (VK_SUCCESS != result) return;
3908 RecordVulkanSurface(pSurface);
3909}
3910#endif // VK_USE_PLATFORM_WIN32_KHR
3911
3912#ifdef VK_USE_PLATFORM_XCB_KHR
3913void ValidationStateTracker::PostCallRecordCreateXcbSurfaceKHR(VkInstance instance, const VkXcbSurfaceCreateInfoKHR *pCreateInfo,
3914 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
3915 VkResult result) {
3916 if (VK_SUCCESS != result) return;
3917 RecordVulkanSurface(pSurface);
3918}
3919#endif // VK_USE_PLATFORM_XCB_KHR
3920
3921#ifdef VK_USE_PLATFORM_XLIB_KHR
3922void ValidationStateTracker::PostCallRecordCreateXlibSurfaceKHR(VkInstance instance, const VkXlibSurfaceCreateInfoKHR *pCreateInfo,
3923 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
3924 VkResult result) {
3925 if (VK_SUCCESS != result) return;
3926 RecordVulkanSurface(pSurface);
3927}
3928#endif // VK_USE_PLATFORM_XLIB_KHR
3929
Cort23cf2282019-09-20 18:58:18 +02003930void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02003931 VkPhysicalDeviceFeatures *pFeatures) {
3932 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
3933 physical_device_state->vkGetPhysicalDeviceFeaturesState = QUERY_DETAILS;
3934 physical_device_state->features2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
3935 physical_device_state->features2.pNext = nullptr;
3936 physical_device_state->features2.features = *pFeatures;
Cort23cf2282019-09-20 18:58:18 +02003937}
3938
3939void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures2(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02003940 VkPhysicalDeviceFeatures2 *pFeatures) {
3941 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
3942 physical_device_state->vkGetPhysicalDeviceFeaturesState = QUERY_DETAILS;
3943 physical_device_state->features2.initialize(pFeatures);
Cort23cf2282019-09-20 18:58:18 +02003944}
3945
3946void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures2KHR(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02003947 VkPhysicalDeviceFeatures2 *pFeatures) {
3948 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
3949 physical_device_state->vkGetPhysicalDeviceFeaturesState = QUERY_DETAILS;
3950 physical_device_state->features2.initialize(pFeatures);
Cort23cf2282019-09-20 18:58:18 +02003951}
3952
locke-lunargd556cc32019-09-17 01:21:23 -06003953void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice physicalDevice,
3954 VkSurfaceKHR surface,
3955 VkSurfaceCapabilitiesKHR *pSurfaceCapabilities,
3956 VkResult result) {
3957 if (VK_SUCCESS != result) return;
3958 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
3959 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHRState = QUERY_DETAILS;
Camden Stocker61050592019-11-27 12:03:09 -08003960 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06003961 physical_device_state->surfaceCapabilities = *pSurfaceCapabilities;
3962}
3963
3964void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilities2KHR(
3965 VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
3966 VkSurfaceCapabilities2KHR *pSurfaceCapabilities, VkResult result) {
3967 if (VK_SUCCESS != result) return;
3968 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
3969 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHRState = QUERY_DETAILS;
Camden Stocker61050592019-11-27 12:03:09 -08003970 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06003971 physical_device_state->surfaceCapabilities = pSurfaceCapabilities->surfaceCapabilities;
3972}
3973
3974void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilities2EXT(VkPhysicalDevice physicalDevice,
3975 VkSurfaceKHR surface,
3976 VkSurfaceCapabilities2EXT *pSurfaceCapabilities,
3977 VkResult result) {
3978 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
3979 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHRState = QUERY_DETAILS;
Camden Stocker61050592019-11-27 12:03:09 -08003980 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06003981 physical_device_state->surfaceCapabilities.minImageCount = pSurfaceCapabilities->minImageCount;
3982 physical_device_state->surfaceCapabilities.maxImageCount = pSurfaceCapabilities->maxImageCount;
3983 physical_device_state->surfaceCapabilities.currentExtent = pSurfaceCapabilities->currentExtent;
3984 physical_device_state->surfaceCapabilities.minImageExtent = pSurfaceCapabilities->minImageExtent;
3985 physical_device_state->surfaceCapabilities.maxImageExtent = pSurfaceCapabilities->maxImageExtent;
3986 physical_device_state->surfaceCapabilities.maxImageArrayLayers = pSurfaceCapabilities->maxImageArrayLayers;
3987 physical_device_state->surfaceCapabilities.supportedTransforms = pSurfaceCapabilities->supportedTransforms;
3988 physical_device_state->surfaceCapabilities.currentTransform = pSurfaceCapabilities->currentTransform;
3989 physical_device_state->surfaceCapabilities.supportedCompositeAlpha = pSurfaceCapabilities->supportedCompositeAlpha;
3990 physical_device_state->surfaceCapabilities.supportedUsageFlags = pSurfaceCapabilities->supportedUsageFlags;
3991}
3992
3993void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice,
3994 uint32_t queueFamilyIndex, VkSurfaceKHR surface,
3995 VkBool32 *pSupported, VkResult result) {
3996 if (VK_SUCCESS != result) return;
3997 auto surface_state = GetSurfaceState(surface);
3998 surface_state->gpu_queue_support[{physicalDevice, queueFamilyIndex}] = (*pSupported == VK_TRUE);
3999}
4000
4001void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice physicalDevice,
4002 VkSurfaceKHR surface,
4003 uint32_t *pPresentModeCount,
4004 VkPresentModeKHR *pPresentModes,
4005 VkResult result) {
4006 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4007
4008 // TODO: This isn't quite right -- available modes may differ by surface AND physical device.
4009 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4010 auto &call_state = physical_device_state->vkGetPhysicalDeviceSurfacePresentModesKHRState;
4011
4012 if (*pPresentModeCount) {
4013 if (call_state < QUERY_COUNT) call_state = QUERY_COUNT;
4014 if (*pPresentModeCount > physical_device_state->present_modes.size())
4015 physical_device_state->present_modes.resize(*pPresentModeCount);
4016 }
4017 if (pPresentModes) {
4018 if (call_state < QUERY_DETAILS) call_state = QUERY_DETAILS;
4019 for (uint32_t i = 0; i < *pPresentModeCount; i++) {
4020 physical_device_state->present_modes[i] = pPresentModes[i];
4021 }
4022 }
4023}
4024
4025void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
4026 uint32_t *pSurfaceFormatCount,
4027 VkSurfaceFormatKHR *pSurfaceFormats,
4028 VkResult result) {
4029 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4030
4031 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4032 auto &call_state = physical_device_state->vkGetPhysicalDeviceSurfaceFormatsKHRState;
4033
4034 if (*pSurfaceFormatCount) {
4035 if (call_state < QUERY_COUNT) call_state = QUERY_COUNT;
4036 if (*pSurfaceFormatCount > physical_device_state->surface_formats.size())
4037 physical_device_state->surface_formats.resize(*pSurfaceFormatCount);
4038 }
4039 if (pSurfaceFormats) {
4040 if (call_state < QUERY_DETAILS) call_state = QUERY_DETAILS;
4041 for (uint32_t i = 0; i < *pSurfaceFormatCount; i++) {
4042 physical_device_state->surface_formats[i] = pSurfaceFormats[i];
4043 }
4044 }
4045}
4046
4047void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceFormats2KHR(VkPhysicalDevice physicalDevice,
4048 const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
4049 uint32_t *pSurfaceFormatCount,
4050 VkSurfaceFormat2KHR *pSurfaceFormats,
4051 VkResult result) {
4052 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4053
4054 auto physicalDeviceState = GetPhysicalDeviceState(physicalDevice);
4055 if (*pSurfaceFormatCount) {
4056 if (physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState < QUERY_COUNT) {
4057 physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState = QUERY_COUNT;
4058 }
4059 if (*pSurfaceFormatCount > physicalDeviceState->surface_formats.size())
4060 physicalDeviceState->surface_formats.resize(*pSurfaceFormatCount);
4061 }
4062 if (pSurfaceFormats) {
4063 if (physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState < QUERY_DETAILS) {
4064 physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState = QUERY_DETAILS;
4065 }
4066 for (uint32_t i = 0; i < *pSurfaceFormatCount; i++) {
4067 physicalDeviceState->surface_formats[i] = pSurfaceFormats[i].surfaceFormat;
4068 }
4069 }
4070}
4071
4072void ValidationStateTracker::PreCallRecordCmdBeginDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,
4073 const VkDebugUtilsLabelEXT *pLabelInfo) {
4074 BeginCmdDebugUtilsLabel(report_data, commandBuffer, pLabelInfo);
4075}
4076
4077void ValidationStateTracker::PostCallRecordCmdEndDebugUtilsLabelEXT(VkCommandBuffer commandBuffer) {
4078 EndCmdDebugUtilsLabel(report_data, commandBuffer);
4079}
4080
4081void ValidationStateTracker::PreCallRecordCmdInsertDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,
4082 const VkDebugUtilsLabelEXT *pLabelInfo) {
4083 InsertCmdDebugUtilsLabel(report_data, commandBuffer, pLabelInfo);
4084
4085 // Squirrel away an easily accessible copy.
4086 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4087 cb_state->debug_label = LoggingLabel(pLabelInfo);
4088}
4089
4090void ValidationStateTracker::RecordEnumeratePhysicalDeviceGroupsState(
4091 uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHR *pPhysicalDeviceGroupProperties) {
4092 if (NULL != pPhysicalDeviceGroupProperties) {
4093 for (uint32_t i = 0; i < *pPhysicalDeviceGroupCount; i++) {
4094 for (uint32_t j = 0; j < pPhysicalDeviceGroupProperties[i].physicalDeviceCount; j++) {
4095 VkPhysicalDevice cur_phys_dev = pPhysicalDeviceGroupProperties[i].physicalDevices[j];
4096 auto &phys_device_state = physical_device_map[cur_phys_dev];
4097 phys_device_state.phys_device = cur_phys_dev;
4098 // Init actual features for each physical device
4099 DispatchGetPhysicalDeviceFeatures(cur_phys_dev, &phys_device_state.features2.features);
4100 }
4101 }
4102 }
4103}
4104
4105void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceGroups(
4106 VkInstance instance, uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHR *pPhysicalDeviceGroupProperties,
4107 VkResult result) {
4108 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4109 RecordEnumeratePhysicalDeviceGroupsState(pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
4110}
4111
4112void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceGroupsKHR(
4113 VkInstance instance, uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHR *pPhysicalDeviceGroupProperties,
4114 VkResult result) {
4115 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4116 RecordEnumeratePhysicalDeviceGroupsState(pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
4117}
4118
Lionel Landwerlinc7420912019-05-23 00:33:42 +01004119void ValidationStateTracker::RecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCounters(VkPhysicalDevice physicalDevice,
4120 uint32_t queueFamilyIndex,
4121 uint32_t *pCounterCount,
4122 VkPerformanceCounterKHR *pCounters) {
4123 if (NULL == pCounters) return;
4124
4125 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4126 assert(physical_device_state);
4127
4128 std::unique_ptr<QUEUE_FAMILY_PERF_COUNTERS> queueFamilyCounters(new QUEUE_FAMILY_PERF_COUNTERS());
4129 queueFamilyCounters->counters.resize(*pCounterCount);
4130 for (uint32_t i = 0; i < *pCounterCount; i++) queueFamilyCounters->counters[i] = pCounters[i];
4131
4132 physical_device_state->perf_counters[queueFamilyIndex] = std::move(queueFamilyCounters);
4133}
4134
4135void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
4136 VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, uint32_t *pCounterCount, VkPerformanceCounterKHR *pCounters,
4137 VkPerformanceCounterDescriptionKHR *pCounterDescriptions, VkResult result) {
4138 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4139 RecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCounters(physicalDevice, queueFamilyIndex, pCounterCount, pCounters);
4140}
4141
4142void ValidationStateTracker::PostCallRecordAcquireProfilingLockKHR(VkDevice device, const VkAcquireProfilingLockInfoKHR *pInfo,
4143 VkResult result) {
4144 if (result == VK_SUCCESS) performance_lock_acquired = true;
4145}
4146
4147bool ValidationStateTracker::PreCallValidateReleaseProfilingLockKHR(VkDevice device) const {
4148 bool skip = false;
4149
4150 if (!performance_lock_acquired) {
4151 skip |= log_msg(
4152 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
4153 "VUID-vkReleaseProfilingLockKHR-device-03235",
4154 "The profiling lock of device must have been held via a previous successful call to vkAcquireProfilingLockKHR.");
4155 }
4156
4157 return skip;
4158}
4159
4160void ValidationStateTracker::PostCallRecordReleaseProfilingLockKHR(VkDevice device) {
4161 performance_lock_acquired = false;
4162 for (auto &cmd_buffer : commandBufferMap) {
4163 cmd_buffer.second->performance_lock_released = true;
4164 }
4165}
4166
locke-lunargd556cc32019-09-17 01:21:23 -06004167void ValidationStateTracker::PreCallRecordDestroyDescriptorUpdateTemplate(VkDevice device,
4168 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
4169 const VkAllocationCallbacks *pAllocator) {
4170 if (!descriptorUpdateTemplate) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004171 auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
4172 template_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004173 desc_template_map.erase(descriptorUpdateTemplate);
4174}
4175
4176void ValidationStateTracker::PreCallRecordDestroyDescriptorUpdateTemplateKHR(VkDevice device,
4177 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
4178 const VkAllocationCallbacks *pAllocator) {
4179 if (!descriptorUpdateTemplate) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004180 auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
4181 template_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004182 desc_template_map.erase(descriptorUpdateTemplate);
4183}
4184
4185void ValidationStateTracker::RecordCreateDescriptorUpdateTemplateState(const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo,
4186 VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate) {
4187 safe_VkDescriptorUpdateTemplateCreateInfo local_create_info(pCreateInfo);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004188 auto template_state = std::make_shared<TEMPLATE_STATE>(*pDescriptorUpdateTemplate, &local_create_info);
locke-lunargd556cc32019-09-17 01:21:23 -06004189 desc_template_map[*pDescriptorUpdateTemplate] = std::move(template_state);
4190}
4191
4192void ValidationStateTracker::PostCallRecordCreateDescriptorUpdateTemplate(
4193 VkDevice device, const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator,
4194 VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate, VkResult result) {
4195 if (VK_SUCCESS != result) return;
4196 RecordCreateDescriptorUpdateTemplateState(pCreateInfo, pDescriptorUpdateTemplate);
4197}
4198
4199void ValidationStateTracker::PostCallRecordCreateDescriptorUpdateTemplateKHR(
4200 VkDevice device, const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator,
4201 VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate, VkResult result) {
4202 if (VK_SUCCESS != result) return;
4203 RecordCreateDescriptorUpdateTemplateState(pCreateInfo, pDescriptorUpdateTemplate);
4204}
4205
4206void ValidationStateTracker::RecordUpdateDescriptorSetWithTemplateState(VkDescriptorSet descriptorSet,
4207 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
4208 const void *pData) {
4209 auto const template_map_entry = desc_template_map.find(descriptorUpdateTemplate);
4210 if ((template_map_entry == desc_template_map.end()) || (template_map_entry->second.get() == nullptr)) {
4211 assert(0);
4212 } else {
4213 const TEMPLATE_STATE *template_state = template_map_entry->second.get();
4214 // TODO: Record template push descriptor updates
4215 if (template_state->create_info.templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET) {
4216 PerformUpdateDescriptorSetsWithTemplateKHR(descriptorSet, template_state, pData);
4217 }
4218 }
4219}
4220
4221void ValidationStateTracker::PreCallRecordUpdateDescriptorSetWithTemplate(VkDevice device, VkDescriptorSet descriptorSet,
4222 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
4223 const void *pData) {
4224 RecordUpdateDescriptorSetWithTemplateState(descriptorSet, descriptorUpdateTemplate, pData);
4225}
4226
4227void ValidationStateTracker::PreCallRecordUpdateDescriptorSetWithTemplateKHR(VkDevice device, VkDescriptorSet descriptorSet,
4228 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
4229 const void *pData) {
4230 RecordUpdateDescriptorSetWithTemplateState(descriptorSet, descriptorUpdateTemplate, pData);
4231}
4232
4233void ValidationStateTracker::PreCallRecordCmdPushDescriptorSetWithTemplateKHR(
4234 VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set,
4235 const void *pData) {
4236 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4237
4238 const auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
4239 if (template_state) {
4240 auto layout_data = GetPipelineLayout(layout);
4241 auto dsl = GetDslFromPipelineLayout(layout_data, set);
4242 const auto &template_ci = template_state->create_info;
Jeff Bolz6ae39612019-10-11 20:57:36 -05004243 if (dsl && !dsl->destroyed) {
locke-lunargd556cc32019-09-17 01:21:23 -06004244 // Decode the template into a set of write updates
4245 cvdescriptorset::DecodedTemplateUpdate decoded_template(this, VK_NULL_HANDLE, template_state, pData,
4246 dsl->GetDescriptorSetLayout());
4247 RecordCmdPushDescriptorSetState(cb_state, template_ci.pipelineBindPoint, layout, set,
4248 static_cast<uint32_t>(decoded_template.desc_writes.size()),
4249 decoded_template.desc_writes.data());
4250 }
4251 }
4252}
4253
4254void ValidationStateTracker::RecordGetPhysicalDeviceDisplayPlanePropertiesState(VkPhysicalDevice physicalDevice,
4255 uint32_t *pPropertyCount, void *pProperties) {
4256 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4257 if (*pPropertyCount) {
4258 if (physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState < QUERY_COUNT) {
4259 physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState = QUERY_COUNT;
Camden Stocker61050592019-11-27 12:03:09 -08004260 physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004261 }
4262 physical_device_state->display_plane_property_count = *pPropertyCount;
4263 }
4264 if (pProperties) {
4265 if (physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState < QUERY_DETAILS) {
4266 physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState = QUERY_DETAILS;
Camden Stocker61050592019-11-27 12:03:09 -08004267 physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004268 }
4269 }
4270}
4271
4272void ValidationStateTracker::PostCallRecordGetPhysicalDeviceDisplayPlanePropertiesKHR(VkPhysicalDevice physicalDevice,
4273 uint32_t *pPropertyCount,
4274 VkDisplayPlanePropertiesKHR *pProperties,
4275 VkResult result) {
4276 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4277 RecordGetPhysicalDeviceDisplayPlanePropertiesState(physicalDevice, pPropertyCount, pProperties);
4278}
4279
4280void ValidationStateTracker::PostCallRecordGetPhysicalDeviceDisplayPlaneProperties2KHR(VkPhysicalDevice physicalDevice,
4281 uint32_t *pPropertyCount,
4282 VkDisplayPlaneProperties2KHR *pProperties,
4283 VkResult result) {
4284 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4285 RecordGetPhysicalDeviceDisplayPlanePropertiesState(physicalDevice, pPropertyCount, pProperties);
4286}
4287
4288void ValidationStateTracker::PostCallRecordCmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
4289 uint32_t query, VkQueryControlFlags flags, uint32_t index) {
4290 QueryObject query_obj = {queryPool, query, index};
4291 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4292 RecordCmdBeginQuery(cb_state, query_obj);
4293}
4294
4295void ValidationStateTracker::PostCallRecordCmdEndQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
4296 uint32_t query, uint32_t index) {
4297 QueryObject query_obj = {queryPool, query, index};
4298 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4299 RecordCmdEndQuery(cb_state, query_obj);
4300}
4301
4302void ValidationStateTracker::RecordCreateSamplerYcbcrConversionState(const VkSamplerYcbcrConversionCreateInfo *create_info,
4303 VkSamplerYcbcrConversion ycbcr_conversion) {
4304 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
4305 RecordCreateSamplerYcbcrConversionANDROID(create_info, ycbcr_conversion);
4306 }
4307}
4308
4309void ValidationStateTracker::PostCallRecordCreateSamplerYcbcrConversion(VkDevice device,
4310 const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
4311 const VkAllocationCallbacks *pAllocator,
4312 VkSamplerYcbcrConversion *pYcbcrConversion,
4313 VkResult result) {
4314 if (VK_SUCCESS != result) return;
4315 RecordCreateSamplerYcbcrConversionState(pCreateInfo, *pYcbcrConversion);
4316}
4317
4318void ValidationStateTracker::PostCallRecordCreateSamplerYcbcrConversionKHR(VkDevice device,
4319 const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
4320 const VkAllocationCallbacks *pAllocator,
4321 VkSamplerYcbcrConversion *pYcbcrConversion,
4322 VkResult result) {
4323 if (VK_SUCCESS != result) return;
4324 RecordCreateSamplerYcbcrConversionState(pCreateInfo, *pYcbcrConversion);
4325}
4326
4327void ValidationStateTracker::PostCallRecordDestroySamplerYcbcrConversion(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion,
4328 const VkAllocationCallbacks *pAllocator) {
4329 if (!ycbcrConversion) return;
4330 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
4331 RecordDestroySamplerYcbcrConversionANDROID(ycbcrConversion);
4332 }
4333}
4334
4335void ValidationStateTracker::PostCallRecordDestroySamplerYcbcrConversionKHR(VkDevice device,
4336 VkSamplerYcbcrConversion ycbcrConversion,
4337 const VkAllocationCallbacks *pAllocator) {
4338 if (!ycbcrConversion) return;
4339 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
4340 RecordDestroySamplerYcbcrConversionANDROID(ycbcrConversion);
4341 }
4342}
4343
4344void ValidationStateTracker::PostCallRecordResetQueryPoolEXT(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
4345 uint32_t queryCount) {
4346 // Do nothing if the feature is not enabled.
4347 if (!enabled_features.host_query_reset_features.hostQueryReset) return;
4348
4349 // Do nothing if the query pool has been destroyed.
4350 auto query_pool_state = GetQueryPoolState(queryPool);
4351 if (!query_pool_state) return;
4352
4353 // Reset the state of existing entries.
4354 QueryObject query_obj{queryPool, 0};
Lionel Landwerlinc7420912019-05-23 00:33:42 +01004355 QueryObjectPass query_pass_obj{query_obj, 0};
locke-lunargd556cc32019-09-17 01:21:23 -06004356 const uint32_t max_query_count = std::min(queryCount, query_pool_state->createInfo.queryCount - firstQuery);
4357 for (uint32_t i = 0; i < max_query_count; ++i) {
4358 query_obj.query = firstQuery + i;
4359 auto query_it = queryToStateMap.find(query_obj);
4360 if (query_it != queryToStateMap.end()) query_it->second = QUERYSTATE_RESET;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01004361 if (query_pool_state->createInfo.queryType == VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR) {
4362 for (uint32_t passIndex = 0; passIndex < query_pool_state->n_performance_passes; passIndex++) {
4363 query_pass_obj.perf_pass = passIndex;
4364 auto query_perf_it = queryPassToStateMap.find(query_pass_obj);
4365 if (query_perf_it != queryPassToStateMap.end()) query_perf_it->second = QUERYSTATE_RESET;
4366 }
4367 }
locke-lunargd556cc32019-09-17 01:21:23 -06004368 }
4369}
4370
4371void ValidationStateTracker::PerformUpdateDescriptorSetsWithTemplateKHR(VkDescriptorSet descriptorSet,
4372 const TEMPLATE_STATE *template_state, const void *pData) {
4373 // Translate the templated update into a normal update for validation...
4374 cvdescriptorset::DecodedTemplateUpdate decoded_update(this, descriptorSet, template_state, pData);
4375 cvdescriptorset::PerformUpdateDescriptorSets(this, static_cast<uint32_t>(decoded_update.desc_writes.size()),
4376 decoded_update.desc_writes.data(), 0, NULL);
4377}
4378
4379// Update the common AllocateDescriptorSetsData
4380void ValidationStateTracker::UpdateAllocateDescriptorSetsData(const VkDescriptorSetAllocateInfo *p_alloc_info,
Jeff Bolz46c0ea02019-10-09 13:06:29 -05004381 cvdescriptorset::AllocateDescriptorSetsData *ds_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06004382 for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
Jeff Bolz6ae39612019-10-11 20:57:36 -05004383 auto layout = GetDescriptorSetLayoutShared(p_alloc_info->pSetLayouts[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06004384 if (layout) {
4385 ds_data->layout_nodes[i] = layout;
4386 // Count total descriptors required per type
4387 for (uint32_t j = 0; j < layout->GetBindingCount(); ++j) {
4388 const auto &binding_layout = layout->GetDescriptorSetLayoutBindingPtrFromIndex(j);
4389 uint32_t typeIndex = static_cast<uint32_t>(binding_layout->descriptorType);
4390 ds_data->required_descriptors_by_type[typeIndex] += binding_layout->descriptorCount;
4391 }
4392 }
4393 // Any unknown layouts will be flagged as errors during ValidateAllocateDescriptorSets() call
4394 }
4395}
4396
4397// Decrement allocated sets from the pool and insert new sets into set_map
4398void ValidationStateTracker::PerformAllocateDescriptorSets(const VkDescriptorSetAllocateInfo *p_alloc_info,
4399 const VkDescriptorSet *descriptor_sets,
4400 const cvdescriptorset::AllocateDescriptorSetsData *ds_data) {
4401 auto pool_state = descriptorPoolMap[p_alloc_info->descriptorPool].get();
4402 // Account for sets and individual descriptors allocated from pool
4403 pool_state->availableSets -= p_alloc_info->descriptorSetCount;
4404 for (auto it = ds_data->required_descriptors_by_type.begin(); it != ds_data->required_descriptors_by_type.end(); ++it) {
4405 pool_state->availableDescriptorTypeCount[it->first] -= ds_data->required_descriptors_by_type.at(it->first);
4406 }
4407
4408 const auto *variable_count_info = lvl_find_in_chain<VkDescriptorSetVariableDescriptorCountAllocateInfoEXT>(p_alloc_info->pNext);
4409 bool variable_count_valid = variable_count_info && variable_count_info->descriptorSetCount == p_alloc_info->descriptorSetCount;
4410
4411 // Create tracking object for each descriptor set; insert into global map and the pool's set.
4412 for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
4413 uint32_t variable_count = variable_count_valid ? variable_count_info->pDescriptorCounts[i] : 0;
4414
Jeff Bolz41a1ced2019-10-11 11:40:49 -05004415 auto new_ds = std::make_shared<cvdescriptorset::DescriptorSet>(descriptor_sets[i], pool_state, ds_data->layout_nodes[i],
Jeff Bolz252d2532019-10-15 22:06:39 -05004416 variable_count, this, report_data);
locke-lunargd556cc32019-09-17 01:21:23 -06004417 pool_state->sets.insert(new_ds.get());
4418 new_ds->in_use.store(0);
4419 setMap[descriptor_sets[i]] = std::move(new_ds);
4420 }
4421}
4422
4423// Generic function to handle state update for all CmdDraw* and CmdDispatch* type functions
4424void ValidationStateTracker::UpdateStateCmdDrawDispatchType(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint bind_point) {
4425 UpdateDrawState(cb_state, bind_point);
4426 cb_state->hasDispatchCmd = true;
4427}
4428
locke-lunargd556cc32019-09-17 01:21:23 -06004429// Generic function to handle state update for all CmdDraw* type functions
4430void ValidationStateTracker::UpdateStateCmdDrawType(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint bind_point) {
4431 UpdateStateCmdDrawDispatchType(cb_state, bind_point);
locke-lunargd556cc32019-09-17 01:21:23 -06004432 cb_state->hasDrawCmd = true;
4433}
4434
4435void ValidationStateTracker::PostCallRecordCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount,
4436 uint32_t firstVertex, uint32_t firstInstance) {
4437 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4438 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4439}
4440
4441void ValidationStateTracker::PostCallRecordCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount,
4442 uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset,
4443 uint32_t firstInstance) {
4444 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4445 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4446}
4447
4448void ValidationStateTracker::PostCallRecordCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
4449 uint32_t count, uint32_t stride) {
4450 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4451 BUFFER_STATE *buffer_state = GetBufferState(buffer);
4452 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4453 AddCommandBufferBindingBuffer(cb_state, buffer_state);
4454}
4455
4456void ValidationStateTracker::PostCallRecordCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
4457 VkDeviceSize offset, uint32_t count, uint32_t stride) {
4458 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4459 BUFFER_STATE *buffer_state = GetBufferState(buffer);
4460 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4461 AddCommandBufferBindingBuffer(cb_state, buffer_state);
4462}
4463
4464void ValidationStateTracker::PostCallRecordCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z) {
4465 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4466 UpdateStateCmdDrawDispatchType(cb_state, VK_PIPELINE_BIND_POINT_COMPUTE);
4467}
4468
4469void ValidationStateTracker::PostCallRecordCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
4470 VkDeviceSize offset) {
4471 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4472 UpdateStateCmdDrawDispatchType(cb_state, VK_PIPELINE_BIND_POINT_COMPUTE);
4473 BUFFER_STATE *buffer_state = GetBufferState(buffer);
4474 AddCommandBufferBindingBuffer(cb_state, buffer_state);
4475}
4476
4477void ValidationStateTracker::PreCallRecordCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer,
4478 VkDeviceSize offset, VkBuffer countBuffer,
4479 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
4480 uint32_t stride) {
4481 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4482 BUFFER_STATE *buffer_state = GetBufferState(buffer);
4483 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
4484 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4485 AddCommandBufferBindingBuffer(cb_state, buffer_state);
4486 AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
4487}
4488
4489void ValidationStateTracker::PreCallRecordCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer,
4490 VkDeviceSize offset, VkBuffer countBuffer,
4491 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
4492 uint32_t stride) {
4493 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4494 BUFFER_STATE *buffer_state = GetBufferState(buffer);
4495 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
4496 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4497 AddCommandBufferBindingBuffer(cb_state, buffer_state);
4498 AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
4499}
4500
4501void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksNV(VkCommandBuffer commandBuffer, uint32_t taskCount,
4502 uint32_t firstTask) {
4503 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4504 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4505}
4506
4507void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksIndirectNV(VkCommandBuffer commandBuffer, VkBuffer buffer,
4508 VkDeviceSize offset, uint32_t drawCount, uint32_t stride) {
4509 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4510 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4511 BUFFER_STATE *buffer_state = GetBufferState(buffer);
4512 if (buffer_state) {
4513 AddCommandBufferBindingBuffer(cb_state, buffer_state);
4514 }
4515}
4516
4517void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksIndirectCountNV(VkCommandBuffer commandBuffer, VkBuffer buffer,
4518 VkDeviceSize offset, VkBuffer countBuffer,
4519 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
4520 uint32_t stride) {
4521 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4522 BUFFER_STATE *buffer_state = GetBufferState(buffer);
4523 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
4524 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4525 if (buffer_state) {
4526 AddCommandBufferBindingBuffer(cb_state, buffer_state);
4527 }
4528 if (count_buffer_state) {
4529 AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
4530 }
4531}
4532
4533void ValidationStateTracker::PostCallRecordCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo *pCreateInfo,
4534 const VkAllocationCallbacks *pAllocator,
4535 VkShaderModule *pShaderModule, VkResult result,
4536 void *csm_state_data) {
4537 if (VK_SUCCESS != result) return;
4538 create_shader_module_api_state *csm_state = reinterpret_cast<create_shader_module_api_state *>(csm_state_data);
4539
4540 spv_target_env spirv_environment = ((api_version >= VK_API_VERSION_1_1) ? SPV_ENV_VULKAN_1_1 : SPV_ENV_VULKAN_1_0);
4541 bool is_spirv = (pCreateInfo->pCode[0] == spv::MagicNumber);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004542 auto new_shader_module = is_spirv ? std::make_shared<SHADER_MODULE_STATE>(pCreateInfo, *pShaderModule, spirv_environment,
4543 csm_state->unique_shader_id)
4544 : std::make_shared<SHADER_MODULE_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06004545 shaderModuleMap[*pShaderModule] = std::move(new_shader_module);
4546}
4547
4548void ValidationStateTracker::RecordPipelineShaderStage(VkPipelineShaderStageCreateInfo const *pStage, PIPELINE_STATE *pipeline,
Jeff Bolz46c0ea02019-10-09 13:06:29 -05004549 PIPELINE_STATE::StageState *stage_state) const {
locke-lunargd556cc32019-09-17 01:21:23 -06004550 // Validation shouldn't rely on anything in stage state being valid if the spirv isn't
4551 auto module = GetShaderModuleState(pStage->module);
4552 if (!module->has_valid_spirv) return;
4553
4554 // Validation shouldn't rely on anything in stage state being valid if the entrypoint isn't present
4555 auto entrypoint = FindEntrypoint(module, pStage->pName, pStage->stage);
4556 if (entrypoint == module->end()) return;
4557
4558 // Mark accessible ids
4559 stage_state->accessible_ids = MarkAccessibleIds(module, entrypoint);
4560 ProcessExecutionModes(module, entrypoint, pipeline);
4561
4562 stage_state->descriptor_uses =
4563 CollectInterfaceByDescriptorSlot(report_data, module, stage_state->accessible_ids, &stage_state->has_writable_descriptor);
4564 // Capture descriptor uses for the pipeline
4565 for (auto use : stage_state->descriptor_uses) {
4566 // While validating shaders capture which slots are used by the pipeline
John Zulauf649edd52019-10-02 14:39:41 -06004567 const uint32_t slot = use.first.first;
4568 auto &reqs = pipeline->active_slots[slot][use.first.second];
locke-lunargd556cc32019-09-17 01:21:23 -06004569 reqs = descriptor_req(reqs | DescriptorTypeToReqs(module, use.second.type_id));
John Zulauf649edd52019-10-02 14:39:41 -06004570 pipeline->max_active_slot = std::max(pipeline->max_active_slot, slot);
locke-lunargd556cc32019-09-17 01:21:23 -06004571 }
4572}
4573
4574void ValidationStateTracker::ResetCommandBufferPushConstantDataIfIncompatible(CMD_BUFFER_STATE *cb_state, VkPipelineLayout layout) {
4575 if (cb_state == nullptr) {
4576 return;
4577 }
4578
4579 const PIPELINE_LAYOUT_STATE *pipeline_layout_state = GetPipelineLayout(layout);
4580 if (pipeline_layout_state == nullptr) {
4581 return;
4582 }
4583
4584 if (cb_state->push_constant_data_ranges != pipeline_layout_state->push_constant_ranges) {
4585 cb_state->push_constant_data_ranges = pipeline_layout_state->push_constant_ranges;
4586 cb_state->push_constant_data.clear();
4587 uint32_t size_needed = 0;
4588 for (auto push_constant_range : *cb_state->push_constant_data_ranges) {
4589 size_needed = std::max(size_needed, (push_constant_range.offset + push_constant_range.size));
4590 }
4591 cb_state->push_constant_data.resize(size_needed, 0);
4592 }
4593}
John Zulauf22b0fbe2019-10-15 06:26:16 -06004594
4595void ValidationStateTracker::PostCallRecordGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain,
4596 uint32_t *pSwapchainImageCount, VkImage *pSwapchainImages,
4597 VkResult result) {
4598 if ((result != VK_SUCCESS) && (result != VK_INCOMPLETE)) return;
4599 auto swapchain_state = GetSwapchainState(swapchain);
4600
4601 if (*pSwapchainImageCount > swapchain_state->images.size()) swapchain_state->images.resize(*pSwapchainImageCount);
4602
4603 if (pSwapchainImages) {
4604 if (swapchain_state->vkGetSwapchainImagesKHRState < QUERY_DETAILS) {
4605 swapchain_state->vkGetSwapchainImagesKHRState = QUERY_DETAILS;
4606 }
4607 for (uint32_t i = 0; i < *pSwapchainImageCount; ++i) {
locke-lunargb3584732019-10-28 20:18:36 -06004608 if (swapchain_state->images[i].image != VK_NULL_HANDLE) continue; // Already retrieved this.
John Zulauf22b0fbe2019-10-15 06:26:16 -06004609
4610 // Add imageMap entries for each swapchain image
4611 VkImageCreateInfo image_ci;
4612 image_ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
4613 image_ci.pNext = nullptr; // to be set later
4614 image_ci.flags = VK_IMAGE_CREATE_ALIAS_BIT; // to be updated below
4615 image_ci.imageType = VK_IMAGE_TYPE_2D;
4616 image_ci.format = swapchain_state->createInfo.imageFormat;
4617 image_ci.extent.width = swapchain_state->createInfo.imageExtent.width;
4618 image_ci.extent.height = swapchain_state->createInfo.imageExtent.height;
4619 image_ci.extent.depth = 1;
4620 image_ci.mipLevels = 1;
4621 image_ci.arrayLayers = swapchain_state->createInfo.imageArrayLayers;
4622 image_ci.samples = VK_SAMPLE_COUNT_1_BIT;
4623 image_ci.tiling = VK_IMAGE_TILING_OPTIMAL;
4624 image_ci.usage = swapchain_state->createInfo.imageUsage;
4625 image_ci.sharingMode = swapchain_state->createInfo.imageSharingMode;
4626 image_ci.queueFamilyIndexCount = swapchain_state->createInfo.queueFamilyIndexCount;
4627 image_ci.pQueueFamilyIndices = swapchain_state->createInfo.pQueueFamilyIndices;
4628 image_ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
4629
4630 image_ci.pNext = lvl_find_in_chain<VkImageFormatListCreateInfoKHR>(swapchain_state->createInfo.pNext);
4631
4632 if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR)
4633 image_ci.flags |= VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT;
4634 if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR)
4635 image_ci.flags |= VK_IMAGE_CREATE_PROTECTED_BIT;
4636 if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR)
4637 image_ci.flags |= (VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT | VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR);
4638
4639 imageMap[pSwapchainImages[i]] = std::make_shared<IMAGE_STATE>(pSwapchainImages[i], &image_ci);
4640 auto &image_state = imageMap[pSwapchainImages[i]];
4641 image_state->valid = false;
4642 image_state->create_from_swapchain = swapchain;
4643 image_state->bind_swapchain = swapchain;
4644 image_state->bind_swapchain_imageIndex = i;
locke-lunargb3584732019-10-28 20:18:36 -06004645 swapchain_state->images[i].image = pSwapchainImages[i];
4646 swapchain_state->images[i].bound_images.emplace(pSwapchainImages[i]);
John Zulauf22b0fbe2019-10-15 06:26:16 -06004647 }
4648 }
4649
4650 if (*pSwapchainImageCount) {
4651 if (swapchain_state->vkGetSwapchainImagesKHRState < QUERY_COUNT) {
4652 swapchain_state->vkGetSwapchainImagesKHRState = QUERY_COUNT;
4653 }
4654 swapchain_state->get_swapchain_image_count = *pSwapchainImageCount;
4655 }
4656}