blob: 1406e8e6687d85235ececec69ace1c917afba2bd [file] [log] [blame]
locke-lunargd556cc32019-09-17 01:21:23 -06001/* Copyright (c) 2015-2019 The Khronos Group Inc.
2 * Copyright (c) 2015-2019 Valve Corporation
3 * Copyright (c) 2015-2019 LunarG, Inc.
4 * Copyright (C) 2015-2019 Google Inc.
5 *
6 * Licensed under the Apache License, Version 2.0 (the "License");
7 * you may not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
9 *
10 * http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 *
18 * Author: Mark Lobodzinski <mark@lunarg.com>
19 * Author: Dave Houlton <daveh@lunarg.com>
20 * Shannon McPherson <shannon@lunarg.com>
21 */
22
23// Allow use of STL min and max functions in Windows
24#define NOMINMAX
25
26#include <cmath>
27#include <set>
28#include <sstream>
29#include <string>
30
31#include "vk_enum_string_helper.h"
32#include "vk_format_utils.h"
33#include "vk_layer_data.h"
34#include "vk_layer_utils.h"
35#include "vk_layer_logging.h"
36#include "vk_typemap_helper.h"
37
38#include "chassis.h"
39#include "state_tracker.h"
40#include "shader_validation.h"
41
42using std::max;
43using std::string;
44using std::stringstream;
45using std::unique_ptr;
46using std::unordered_map;
47using std::unordered_set;
48using std::vector;
49
50#ifdef VK_USE_PLATFORM_ANDROID_KHR
51// Android-specific validation that uses types defined only with VK_USE_PLATFORM_ANDROID_KHR
52// This could also move into a seperate core_validation_android.cpp file... ?
53
54void ValidationStateTracker::RecordCreateImageANDROID(const VkImageCreateInfo *create_info, IMAGE_STATE *is_node) {
55 const VkExternalMemoryImageCreateInfo *emici = lvl_find_in_chain<VkExternalMemoryImageCreateInfo>(create_info->pNext);
56 if (emici && (emici->handleTypes & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID)) {
57 is_node->imported_ahb = true;
58 }
59 const VkExternalFormatANDROID *ext_fmt_android = lvl_find_in_chain<VkExternalFormatANDROID>(create_info->pNext);
60 if (ext_fmt_android && (0 != ext_fmt_android->externalFormat)) {
61 is_node->has_ahb_format = true;
62 is_node->ahb_format = ext_fmt_android->externalFormat;
63 }
64}
65
66void ValidationStateTracker::RecordCreateSamplerYcbcrConversionANDROID(const VkSamplerYcbcrConversionCreateInfo *create_info,
67 VkSamplerYcbcrConversion ycbcr_conversion) {
68 const VkExternalFormatANDROID *ext_format_android = lvl_find_in_chain<VkExternalFormatANDROID>(create_info->pNext);
69 if (ext_format_android && (0 != ext_format_android->externalFormat)) {
70 ycbcr_conversion_ahb_fmt_map.emplace(ycbcr_conversion, ext_format_android->externalFormat);
71 }
72};
73
74void ValidationStateTracker::RecordDestroySamplerYcbcrConversionANDROID(VkSamplerYcbcrConversion ycbcr_conversion) {
75 ycbcr_conversion_ahb_fmt_map.erase(ycbcr_conversion);
76};
77
78#else
79
80void ValidationStateTracker::RecordCreateImageANDROID(const VkImageCreateInfo *create_info, IMAGE_STATE *is_node) {}
81
82void ValidationStateTracker::RecordCreateSamplerYcbcrConversionANDROID(const VkSamplerYcbcrConversionCreateInfo *create_info,
83 VkSamplerYcbcrConversion ycbcr_conversion){};
84
85void ValidationStateTracker::RecordDestroySamplerYcbcrConversionANDROID(VkSamplerYcbcrConversion ycbcr_conversion){};
86
87#endif // VK_USE_PLATFORM_ANDROID_KHR
88
89void ValidationStateTracker::PostCallRecordCreateImage(VkDevice device, const VkImageCreateInfo *pCreateInfo,
90 const VkAllocationCallbacks *pAllocator, VkImage *pImage, VkResult result) {
91 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -050092 auto is_node = std::make_shared<IMAGE_STATE>(*pImage, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -060093 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
94 RecordCreateImageANDROID(pCreateInfo, is_node.get());
95 }
96 const auto swapchain_info = lvl_find_in_chain<VkImageSwapchainCreateInfoKHR>(pCreateInfo->pNext);
97 if (swapchain_info) {
98 is_node->create_from_swapchain = swapchain_info->swapchain;
99 }
100
101 bool pre_fetch_memory_reqs = true;
102#ifdef VK_USE_PLATFORM_ANDROID_KHR
103 if (is_node->external_format_android) {
104 // Do not fetch requirements for external memory images
105 pre_fetch_memory_reqs = false;
106 }
107#endif
108 // Record the memory requirements in case they won't be queried
109 if (pre_fetch_memory_reqs) {
110 DispatchGetImageMemoryRequirements(device, *pImage, &is_node->requirements);
111 }
112 imageMap.insert(std::make_pair(*pImage, std::move(is_node)));
113}
114
115void ValidationStateTracker::PreCallRecordDestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks *pAllocator) {
116 if (!image) return;
117 IMAGE_STATE *image_state = GetImageState(image);
118 const VulkanTypedHandle obj_struct(image, kVulkanObjectTypeImage);
119 InvalidateCommandBuffers(image_state->cb_bindings, obj_struct);
120 // Clean up memory mapping, bindings and range references for image
121 for (auto mem_binding : image_state->GetBoundMemory()) {
122 auto mem_info = GetDevMemState(mem_binding);
123 if (mem_info) {
124 RemoveImageMemoryRange(image, mem_info);
125 }
126 }
127 if (image_state->bind_swapchain) {
128 auto swapchain = GetSwapchainState(image_state->bind_swapchain);
129 if (swapchain) {
locke-lunargb3584732019-10-28 20:18:36 -0600130 swapchain->images[image_state->bind_swapchain_imageIndex].bound_images.erase(image_state->image);
locke-lunargd556cc32019-09-17 01:21:23 -0600131 }
132 }
133 RemoveAliasingImage(image_state);
134 ClearMemoryObjectBindings(obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500135 image_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600136 // Remove image from imageMap
137 imageMap.erase(image);
138}
139
140void ValidationStateTracker::PreCallRecordCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image,
141 VkImageLayout imageLayout, const VkClearColorValue *pColor,
142 uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
143 auto cb_node = GetCBState(commandBuffer);
144 auto image_state = GetImageState(image);
145 if (cb_node && image_state) {
146 AddCommandBufferBindingImage(cb_node, image_state);
147 }
148}
149
150void ValidationStateTracker::PreCallRecordCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image,
151 VkImageLayout imageLayout,
152 const VkClearDepthStencilValue *pDepthStencil,
153 uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
154 auto cb_node = GetCBState(commandBuffer);
155 auto image_state = GetImageState(image);
156 if (cb_node && image_state) {
157 AddCommandBufferBindingImage(cb_node, image_state);
158 }
159}
160
161void ValidationStateTracker::PreCallRecordCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage,
162 VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout,
163 uint32_t regionCount, const VkImageCopy *pRegions) {
164 auto cb_node = GetCBState(commandBuffer);
165 auto src_image_state = GetImageState(srcImage);
166 auto dst_image_state = GetImageState(dstImage);
167
168 // Update bindings between images and cmd buffer
169 AddCommandBufferBindingImage(cb_node, src_image_state);
170 AddCommandBufferBindingImage(cb_node, dst_image_state);
171}
172
173void ValidationStateTracker::PreCallRecordCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage,
174 VkImageLayout srcImageLayout, VkImage dstImage,
175 VkImageLayout dstImageLayout, uint32_t regionCount,
176 const VkImageResolve *pRegions) {
177 auto cb_node = GetCBState(commandBuffer);
178 auto src_image_state = GetImageState(srcImage);
179 auto dst_image_state = GetImageState(dstImage);
180
181 // Update bindings between images and cmd buffer
182 AddCommandBufferBindingImage(cb_node, src_image_state);
183 AddCommandBufferBindingImage(cb_node, dst_image_state);
184}
185
186void ValidationStateTracker::PreCallRecordCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage,
187 VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout,
188 uint32_t regionCount, const VkImageBlit *pRegions, VkFilter filter) {
189 auto cb_node = GetCBState(commandBuffer);
190 auto src_image_state = GetImageState(srcImage);
191 auto dst_image_state = GetImageState(dstImage);
192
193 // Update bindings between images and cmd buffer
194 AddCommandBufferBindingImage(cb_node, src_image_state);
195 AddCommandBufferBindingImage(cb_node, dst_image_state);
196}
197
198void ValidationStateTracker::PostCallRecordCreateBuffer(VkDevice device, const VkBufferCreateInfo *pCreateInfo,
199 const VkAllocationCallbacks *pAllocator, VkBuffer *pBuffer,
200 VkResult result) {
201 if (result != VK_SUCCESS) return;
202 // TODO : This doesn't create deep copy of pQueueFamilyIndices so need to fix that if/when we want that data to be valid
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500203 auto buffer_state = std::make_shared<BUFFER_STATE>(*pBuffer, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -0600204
205 // Get a set of requirements in the case the app does not
206 DispatchGetBufferMemoryRequirements(device, *pBuffer, &buffer_state->requirements);
207
208 bufferMap.insert(std::make_pair(*pBuffer, std::move(buffer_state)));
209}
210
211void ValidationStateTracker::PostCallRecordCreateBufferView(VkDevice device, const VkBufferViewCreateInfo *pCreateInfo,
212 const VkAllocationCallbacks *pAllocator, VkBufferView *pView,
213 VkResult result) {
214 if (result != VK_SUCCESS) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500215 auto buffer_state = GetBufferShared(pCreateInfo->buffer);
216 bufferViewMap[*pView] = std::make_shared<BUFFER_VIEW_STATE>(buffer_state, *pView, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -0600217}
218
219void ValidationStateTracker::PostCallRecordCreateImageView(VkDevice device, const VkImageViewCreateInfo *pCreateInfo,
220 const VkAllocationCallbacks *pAllocator, VkImageView *pView,
221 VkResult result) {
222 if (result != VK_SUCCESS) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500223 auto image_state = GetImageShared(pCreateInfo->image);
224 imageViewMap[*pView] = std::make_shared<IMAGE_VIEW_STATE>(image_state, *pView, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -0600225}
226
227void ValidationStateTracker::PreCallRecordCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer,
228 uint32_t regionCount, const VkBufferCopy *pRegions) {
229 auto cb_node = GetCBState(commandBuffer);
230 auto src_buffer_state = GetBufferState(srcBuffer);
231 auto dst_buffer_state = GetBufferState(dstBuffer);
232
233 // Update bindings between buffers and cmd buffer
234 AddCommandBufferBindingBuffer(cb_node, src_buffer_state);
235 AddCommandBufferBindingBuffer(cb_node, dst_buffer_state);
236}
237
238void ValidationStateTracker::PreCallRecordDestroyImageView(VkDevice device, VkImageView imageView,
239 const VkAllocationCallbacks *pAllocator) {
240 IMAGE_VIEW_STATE *image_view_state = GetImageViewState(imageView);
241 if (!image_view_state) return;
242 const VulkanTypedHandle obj_struct(imageView, kVulkanObjectTypeImageView);
243
244 // Any bound cmd buffers are now invalid
245 InvalidateCommandBuffers(image_view_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500246 image_view_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600247 imageViewMap.erase(imageView);
248}
249
250void ValidationStateTracker::PreCallRecordDestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks *pAllocator) {
251 if (!buffer) return;
252 auto buffer_state = GetBufferState(buffer);
253 const VulkanTypedHandle obj_struct(buffer, kVulkanObjectTypeBuffer);
254
255 InvalidateCommandBuffers(buffer_state->cb_bindings, obj_struct);
256 for (auto mem_binding : buffer_state->GetBoundMemory()) {
257 auto mem_info = GetDevMemState(mem_binding);
258 if (mem_info) {
259 RemoveBufferMemoryRange(buffer, mem_info);
260 }
261 }
262 ClearMemoryObjectBindings(obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500263 buffer_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600264 bufferMap.erase(buffer_state->buffer);
265}
266
267void ValidationStateTracker::PreCallRecordDestroyBufferView(VkDevice device, VkBufferView bufferView,
268 const VkAllocationCallbacks *pAllocator) {
269 if (!bufferView) return;
270 auto buffer_view_state = GetBufferViewState(bufferView);
271 const VulkanTypedHandle obj_struct(bufferView, kVulkanObjectTypeBufferView);
272
273 // Any bound cmd buffers are now invalid
274 InvalidateCommandBuffers(buffer_view_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500275 buffer_view_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600276 bufferViewMap.erase(bufferView);
277}
278
279void ValidationStateTracker::PreCallRecordCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
280 VkDeviceSize size, uint32_t data) {
281 auto cb_node = GetCBState(commandBuffer);
282 auto buffer_state = GetBufferState(dstBuffer);
283 // Update bindings between buffer and cmd buffer
284 AddCommandBufferBindingBuffer(cb_node, buffer_state);
285}
286
287void ValidationStateTracker::PreCallRecordCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage,
288 VkImageLayout srcImageLayout, VkBuffer dstBuffer,
289 uint32_t regionCount, const VkBufferImageCopy *pRegions) {
290 auto cb_node = GetCBState(commandBuffer);
291 auto src_image_state = GetImageState(srcImage);
292 auto dst_buffer_state = GetBufferState(dstBuffer);
293
294 // Update bindings between buffer/image and cmd buffer
295 AddCommandBufferBindingImage(cb_node, src_image_state);
296 AddCommandBufferBindingBuffer(cb_node, dst_buffer_state);
297}
298
299void ValidationStateTracker::PreCallRecordCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
300 VkImageLayout dstImageLayout, uint32_t regionCount,
301 const VkBufferImageCopy *pRegions) {
302 auto cb_node = GetCBState(commandBuffer);
303 auto src_buffer_state = GetBufferState(srcBuffer);
304 auto dst_image_state = GetImageState(dstImage);
305
306 AddCommandBufferBindingBuffer(cb_node, src_buffer_state);
307 AddCommandBufferBindingImage(cb_node, dst_image_state);
308}
309
310// Get the image viewstate for a given framebuffer attachment
311IMAGE_VIEW_STATE *ValidationStateTracker::GetAttachmentImageViewState(FRAMEBUFFER_STATE *framebuffer, uint32_t index) {
Mark Lobodzinski544b3dd2019-12-03 14:44:54 -0700312 if (framebuffer->createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) return nullptr;
locke-lunargd556cc32019-09-17 01:21:23 -0600313 assert(framebuffer && (index < framebuffer->createInfo.attachmentCount));
314 const VkImageView &image_view = framebuffer->createInfo.pAttachments[index];
315 return GetImageViewState(image_view);
316}
317
318// Get the image viewstate for a given framebuffer attachment
319const IMAGE_VIEW_STATE *ValidationStateTracker::GetAttachmentImageViewState(const FRAMEBUFFER_STATE *framebuffer,
320 uint32_t index) const {
Mark Lobodzinski544b3dd2019-12-03 14:44:54 -0700321 if (framebuffer->createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) return nullptr;
locke-lunargd556cc32019-09-17 01:21:23 -0600322 assert(framebuffer && (index < framebuffer->createInfo.attachmentCount));
323 const VkImageView &image_view = framebuffer->createInfo.pAttachments[index];
324 return GetImageViewState(image_view);
325}
326
327void ValidationStateTracker::AddAliasingImage(IMAGE_STATE *image_state) {
328 if (!(image_state->createInfo.flags & VK_IMAGE_CREATE_ALIAS_BIT)) return;
329 std::unordered_set<VkImage> *bound_images = nullptr;
330
locke-lunargb3584732019-10-28 20:18:36 -0600331 if (image_state->bind_swapchain) {
332 auto swapchain_state = GetSwapchainState(image_state->bind_swapchain);
locke-lunargd556cc32019-09-17 01:21:23 -0600333 if (swapchain_state) {
locke-lunargb3584732019-10-28 20:18:36 -0600334 bound_images = &swapchain_state->images[image_state->bind_swapchain_imageIndex].bound_images;
locke-lunargd556cc32019-09-17 01:21:23 -0600335 }
336 } else {
337 auto mem_state = GetDevMemState(image_state->binding.mem);
338 if (mem_state) {
339 bound_images = &mem_state->bound_images;
340 }
341 }
342
343 if (bound_images) {
344 for (const auto &handle : *bound_images) {
345 if (handle != image_state->image) {
346 auto is = GetImageState(handle);
347 if (is && is->IsCompatibleAliasing(image_state)) {
348 auto inserted = is->aliasing_images.emplace(image_state->image);
349 if (inserted.second) {
350 image_state->aliasing_images.emplace(handle);
351 }
352 }
353 }
354 }
355 }
356}
357
358void ValidationStateTracker::RemoveAliasingImage(IMAGE_STATE *image_state) {
359 for (const auto &image : image_state->aliasing_images) {
360 auto is = GetImageState(image);
361 if (is) {
362 is->aliasing_images.erase(image_state->image);
363 }
364 }
365 image_state->aliasing_images.clear();
366}
367
368void ValidationStateTracker::RemoveAliasingImages(const std::unordered_set<VkImage> &bound_images) {
369 // This is one way clear. Because the bound_images include cross references, the one way clear loop could clear the whole
370 // reference. It doesn't need two ways clear.
371 for (const auto &handle : bound_images) {
372 auto is = GetImageState(handle);
373 if (is) {
374 is->aliasing_images.clear();
375 }
376 }
377}
378
Jeff Bolz310775c2019-10-09 00:46:33 -0500379const EVENT_STATE *ValidationStateTracker::GetEventState(VkEvent event) const {
380 auto it = eventMap.find(event);
381 if (it == eventMap.end()) {
382 return nullptr;
383 }
384 return &it->second;
385}
386
locke-lunargd556cc32019-09-17 01:21:23 -0600387EVENT_STATE *ValidationStateTracker::GetEventState(VkEvent event) {
388 auto it = eventMap.find(event);
389 if (it == eventMap.end()) {
390 return nullptr;
391 }
392 return &it->second;
393}
394
395const QUEUE_STATE *ValidationStateTracker::GetQueueState(VkQueue queue) const {
396 auto it = queueMap.find(queue);
397 if (it == queueMap.cend()) {
398 return nullptr;
399 }
400 return &it->second;
401}
402
403QUEUE_STATE *ValidationStateTracker::GetQueueState(VkQueue queue) {
404 auto it = queueMap.find(queue);
405 if (it == queueMap.end()) {
406 return nullptr;
407 }
408 return &it->second;
409}
410
411const PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState(VkPhysicalDevice phys) const {
412 auto *phys_dev_map = ((physical_device_map.size() > 0) ? &physical_device_map : &instance_state->physical_device_map);
413 auto it = phys_dev_map->find(phys);
414 if (it == phys_dev_map->end()) {
415 return nullptr;
416 }
417 return &it->second;
418}
419
420PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState(VkPhysicalDevice phys) {
421 auto *phys_dev_map = ((physical_device_map.size() > 0) ? &physical_device_map : &instance_state->physical_device_map);
422 auto it = phys_dev_map->find(phys);
423 if (it == phys_dev_map->end()) {
424 return nullptr;
425 }
426 return &it->second;
427}
428
429PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState() { return physical_device_state; }
430const PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState() const { return physical_device_state; }
431
432// Return ptr to memory binding for given handle of specified type
433template <typename State, typename Result>
434static Result GetObjectMemBindingImpl(State state, const VulkanTypedHandle &typed_handle) {
435 switch (typed_handle.type) {
436 case kVulkanObjectTypeImage:
437 return state->GetImageState(typed_handle.Cast<VkImage>());
438 case kVulkanObjectTypeBuffer:
439 return state->GetBufferState(typed_handle.Cast<VkBuffer>());
440 case kVulkanObjectTypeAccelerationStructureNV:
441 return state->GetAccelerationStructureState(typed_handle.Cast<VkAccelerationStructureNV>());
442 default:
443 break;
444 }
445 return nullptr;
446}
447
448const BINDABLE *ValidationStateTracker::GetObjectMemBinding(const VulkanTypedHandle &typed_handle) const {
449 return GetObjectMemBindingImpl<const ValidationStateTracker *, const BINDABLE *>(this, typed_handle);
450}
451
452BINDABLE *ValidationStateTracker::GetObjectMemBinding(const VulkanTypedHandle &typed_handle) {
453 return GetObjectMemBindingImpl<ValidationStateTracker *, BINDABLE *>(this, typed_handle);
454}
455
456void ValidationStateTracker::AddMemObjInfo(void *object, const VkDeviceMemory mem, const VkMemoryAllocateInfo *pAllocateInfo) {
457 assert(object != NULL);
458
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500459 memObjMap[mem] = std::make_shared<DEVICE_MEMORY_STATE>(object, mem, pAllocateInfo);
460 auto mem_info = memObjMap[mem].get();
locke-lunargd556cc32019-09-17 01:21:23 -0600461
462 auto dedicated = lvl_find_in_chain<VkMemoryDedicatedAllocateInfoKHR>(pAllocateInfo->pNext);
463 if (dedicated) {
464 mem_info->is_dedicated = true;
465 mem_info->dedicated_buffer = dedicated->buffer;
466 mem_info->dedicated_image = dedicated->image;
467 }
468 auto export_info = lvl_find_in_chain<VkExportMemoryAllocateInfo>(pAllocateInfo->pNext);
469 if (export_info) {
470 mem_info->is_export = true;
471 mem_info->export_handle_type_flags = export_info->handleTypes;
472 }
473}
474
475// Create binding link between given sampler and command buffer node
476void ValidationStateTracker::AddCommandBufferBindingSampler(CMD_BUFFER_STATE *cb_node, SAMPLER_STATE *sampler_state) {
477 if (disabled.command_buffer_state) {
478 return;
479 }
Jeff Bolzadbfa852019-10-04 13:53:30 -0500480 AddCommandBufferBinding(sampler_state->cb_bindings,
481 VulkanTypedHandle(sampler_state->sampler, kVulkanObjectTypeSampler, sampler_state), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -0600482}
483
484// Create binding link between given image node and command buffer node
485void ValidationStateTracker::AddCommandBufferBindingImage(CMD_BUFFER_STATE *cb_node, IMAGE_STATE *image_state) {
486 if (disabled.command_buffer_state) {
487 return;
488 }
489 // Skip validation if this image was created through WSI
490 if (image_state->create_from_swapchain == VK_NULL_HANDLE) {
491 // First update cb binding for image
Jeff Bolzadbfa852019-10-04 13:53:30 -0500492 if (AddCommandBufferBinding(image_state->cb_bindings,
493 VulkanTypedHandle(image_state->image, kVulkanObjectTypeImage, image_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -0600494 // Now update CB binding in MemObj mini CB list
495 for (auto mem_binding : image_state->GetBoundMemory()) {
496 DEVICE_MEMORY_STATE *pMemInfo = GetDevMemState(mem_binding);
497 if (pMemInfo) {
498 // Now update CBInfo's Mem reference list
Jeff Bolzadbfa852019-10-04 13:53:30 -0500499 AddCommandBufferBinding(pMemInfo->cb_bindings,
500 VulkanTypedHandle(mem_binding, kVulkanObjectTypeDeviceMemory, pMemInfo), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -0600501 }
502 }
503 }
504 }
505}
506
507// Create binding link between given image view node and its image with command buffer node
508void ValidationStateTracker::AddCommandBufferBindingImageView(CMD_BUFFER_STATE *cb_node, IMAGE_VIEW_STATE *view_state) {
509 if (disabled.command_buffer_state) {
510 return;
511 }
512 // First add bindings for imageView
Jeff Bolzadbfa852019-10-04 13:53:30 -0500513 if (AddCommandBufferBinding(view_state->cb_bindings,
514 VulkanTypedHandle(view_state->image_view, kVulkanObjectTypeImageView, view_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -0600515 // Only need to continue if this is a new item
Jeff Bolzadbfa852019-10-04 13:53:30 -0500516 auto image_state = view_state->image_state.get();
locke-lunargd556cc32019-09-17 01:21:23 -0600517 // Add bindings for image within imageView
518 if (image_state) {
519 AddCommandBufferBindingImage(cb_node, image_state);
520 }
521 }
522}
523
524// Create binding link between given buffer node and command buffer node
525void ValidationStateTracker::AddCommandBufferBindingBuffer(CMD_BUFFER_STATE *cb_node, BUFFER_STATE *buffer_state) {
526 if (disabled.command_buffer_state) {
527 return;
528 }
529 // First update cb binding for buffer
Jeff Bolzadbfa852019-10-04 13:53:30 -0500530 if (AddCommandBufferBinding(buffer_state->cb_bindings,
531 VulkanTypedHandle(buffer_state->buffer, kVulkanObjectTypeBuffer, buffer_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -0600532 // Now update CB binding in MemObj mini CB list
533 for (auto mem_binding : buffer_state->GetBoundMemory()) {
534 DEVICE_MEMORY_STATE *pMemInfo = GetDevMemState(mem_binding);
535 if (pMemInfo) {
536 // Now update CBInfo's Mem reference list
Jeff Bolzadbfa852019-10-04 13:53:30 -0500537 AddCommandBufferBinding(pMemInfo->cb_bindings,
538 VulkanTypedHandle(mem_binding, kVulkanObjectTypeDeviceMemory, pMemInfo), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -0600539 }
540 }
541 }
542}
543
544// Create binding link between given buffer view node and its buffer with command buffer node
545void ValidationStateTracker::AddCommandBufferBindingBufferView(CMD_BUFFER_STATE *cb_node, BUFFER_VIEW_STATE *view_state) {
546 if (disabled.command_buffer_state) {
547 return;
548 }
549 // First add bindings for bufferView
Jeff Bolzadbfa852019-10-04 13:53:30 -0500550 if (AddCommandBufferBinding(view_state->cb_bindings,
551 VulkanTypedHandle(view_state->buffer_view, kVulkanObjectTypeBufferView, view_state), cb_node)) {
552 auto buffer_state = view_state->buffer_state.get();
locke-lunargd556cc32019-09-17 01:21:23 -0600553 // Add bindings for buffer within bufferView
554 if (buffer_state) {
555 AddCommandBufferBindingBuffer(cb_node, buffer_state);
556 }
557 }
558}
559
560// Create binding link between given acceleration structure and command buffer node
561void ValidationStateTracker::AddCommandBufferBindingAccelerationStructure(CMD_BUFFER_STATE *cb_node,
562 ACCELERATION_STRUCTURE_STATE *as_state) {
563 if (disabled.command_buffer_state) {
564 return;
565 }
Jeff Bolzadbfa852019-10-04 13:53:30 -0500566 if (AddCommandBufferBinding(
567 as_state->cb_bindings,
568 VulkanTypedHandle(as_state->acceleration_structure, kVulkanObjectTypeAccelerationStructureNV, as_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -0600569 // Now update CB binding in MemObj mini CB list
570 for (auto mem_binding : as_state->GetBoundMemory()) {
571 DEVICE_MEMORY_STATE *pMemInfo = GetDevMemState(mem_binding);
572 if (pMemInfo) {
573 // Now update CBInfo's Mem reference list
Jeff Bolzadbfa852019-10-04 13:53:30 -0500574 AddCommandBufferBinding(pMemInfo->cb_bindings,
575 VulkanTypedHandle(mem_binding, kVulkanObjectTypeDeviceMemory, pMemInfo), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -0600576 }
577 }
578 }
579}
580
locke-lunargd556cc32019-09-17 01:21:23 -0600581// Clear a single object binding from given memory object
582void ValidationStateTracker::ClearMemoryObjectBinding(const VulkanTypedHandle &typed_handle, VkDeviceMemory mem) {
583 DEVICE_MEMORY_STATE *mem_info = GetDevMemState(mem);
584 // This obj is bound to a memory object. Remove the reference to this object in that memory object's list
585 if (mem_info) {
586 mem_info->obj_bindings.erase(typed_handle);
587 }
588}
589
590// ClearMemoryObjectBindings clears the binding of objects to memory
591// For the given object it pulls the memory bindings and makes sure that the bindings
592// no longer refer to the object being cleared. This occurs when objects are destroyed.
593void ValidationStateTracker::ClearMemoryObjectBindings(const VulkanTypedHandle &typed_handle) {
594 BINDABLE *mem_binding = GetObjectMemBinding(typed_handle);
595 if (mem_binding) {
596 if (!mem_binding->sparse) {
597 ClearMemoryObjectBinding(typed_handle, mem_binding->binding.mem);
598 } else { // Sparse, clear all bindings
599 for (auto &sparse_mem_binding : mem_binding->sparse_bindings) {
600 ClearMemoryObjectBinding(typed_handle, sparse_mem_binding.mem);
601 }
602 }
603 }
604}
605
606// SetMemBinding is used to establish immutable, non-sparse binding between a single image/buffer object and memory object.
607// Corresponding valid usage checks are in ValidateSetMemBinding().
608void ValidationStateTracker::SetMemBinding(VkDeviceMemory mem, BINDABLE *mem_binding, VkDeviceSize memory_offset,
609 const VulkanTypedHandle &typed_handle) {
610 assert(mem_binding);
611 mem_binding->binding.mem = mem;
612 mem_binding->UpdateBoundMemorySet(); // force recreation of cached set
613 mem_binding->binding.offset = memory_offset;
614 mem_binding->binding.size = mem_binding->requirements.size;
615
616 if (mem != VK_NULL_HANDLE) {
617 DEVICE_MEMORY_STATE *mem_info = GetDevMemState(mem);
618 if (mem_info) {
619 mem_info->obj_bindings.insert(typed_handle);
620 // For image objects, make sure default memory state is correctly set
621 // TODO : What's the best/correct way to handle this?
622 if (kVulkanObjectTypeImage == typed_handle.type) {
623 auto const image_state = reinterpret_cast<const IMAGE_STATE *>(mem_binding);
624 if (image_state) {
625 VkImageCreateInfo ici = image_state->createInfo;
626 if (ici.usage & (VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT)) {
627 // TODO:: More memory state transition stuff.
628 }
629 }
630 }
631 }
632 }
633}
634
635// For NULL mem case, clear any previous binding Else...
636// Make sure given object is in its object map
637// IF a previous binding existed, update binding
638// Add reference from objectInfo to memoryInfo
639// Add reference off of object's binding info
640// Return VK_TRUE if addition is successful, VK_FALSE otherwise
641bool ValidationStateTracker::SetSparseMemBinding(MEM_BINDING binding, const VulkanTypedHandle &typed_handle) {
642 bool skip = VK_FALSE;
643 // Handle NULL case separately, just clear previous binding & decrement reference
644 if (binding.mem == VK_NULL_HANDLE) {
645 // TODO : This should cause the range of the resource to be unbound according to spec
646 } else {
647 BINDABLE *mem_binding = GetObjectMemBinding(typed_handle);
648 assert(mem_binding);
649 if (mem_binding) { // Invalid handles are reported by object tracker, but Get returns NULL for them, so avoid SEGV here
650 assert(mem_binding->sparse);
651 DEVICE_MEMORY_STATE *mem_info = GetDevMemState(binding.mem);
652 if (mem_info) {
653 mem_info->obj_bindings.insert(typed_handle);
654 // Need to set mem binding for this object
655 mem_binding->sparse_bindings.insert(binding);
656 mem_binding->UpdateBoundMemorySet();
657 }
658 }
659 }
660 return skip;
661}
662
locke-lunargd556cc32019-09-17 01:21:23 -0600663void ValidationStateTracker::UpdateDrawState(CMD_BUFFER_STATE *cb_state, const VkPipelineBindPoint bind_point) {
664 auto &state = cb_state->lastBound[bind_point];
665 PIPELINE_STATE *pPipe = state.pipeline_state;
666 if (VK_NULL_HANDLE != state.pipeline_layout) {
667 for (const auto &set_binding_pair : pPipe->active_slots) {
668 uint32_t setIndex = set_binding_pair.first;
669 // Pull the set node
670 cvdescriptorset::DescriptorSet *descriptor_set = state.per_set[setIndex].bound_descriptor_set;
671 if (!descriptor_set->IsPushDescriptor()) {
672 // For the "bindless" style resource usage with many descriptors, need to optimize command <-> descriptor binding
673
674 // TODO: If recreating the reduced_map here shows up in profilinging, need to find a way of sharing with the
675 // Validate pass. Though in the case of "many" descriptors, typically the descriptor count >> binding count
676 cvdescriptorset::PrefilterBindRequestMap reduced_map(*descriptor_set, set_binding_pair.second);
677 const auto &binding_req_map = reduced_map.FilteredMap(*cb_state, *pPipe);
678
679 if (reduced_map.IsManyDescriptors()) {
680 // Only update validate binding tags if we meet the "many" criteria in the Prefilter class
681 descriptor_set->UpdateValidationCache(*cb_state, *pPipe, binding_req_map);
682 }
683
684 // We can skip updating the state if "nothing" has changed since the last validation.
685 // See CoreChecks::ValidateCmdBufDrawState for more details.
Jeff Bolz56308942019-10-06 22:05:23 -0500686 bool descriptor_set_changed =
locke-lunargd556cc32019-09-17 01:21:23 -0600687 !reduced_map.IsManyDescriptors() ||
688 // Update if descriptor set (or contents) has changed
689 state.per_set[setIndex].validated_set != descriptor_set ||
690 state.per_set[setIndex].validated_set_change_count != descriptor_set->GetChangeCount() ||
691 (!disabled.image_layout_validation &&
Jeff Bolz56308942019-10-06 22:05:23 -0500692 state.per_set[setIndex].validated_set_image_layout_change_count != cb_state->image_layout_change_count);
693 bool need_update = descriptor_set_changed ||
694 // Update if previous bindingReqMap doesn't include new bindingReqMap
695 !std::includes(state.per_set[setIndex].validated_set_binding_req_map.begin(),
696 state.per_set[setIndex].validated_set_binding_req_map.end(),
697 binding_req_map.begin(), binding_req_map.end());
locke-lunargd556cc32019-09-17 01:21:23 -0600698
699 if (need_update) {
700 // Bind this set and its active descriptor resources to the command buffer
Jeff Bolz56308942019-10-06 22:05:23 -0500701 if (!descriptor_set_changed && reduced_map.IsManyDescriptors()) {
702 // Only record the bindings that haven't already been recorded
703 BindingReqMap delta_reqs;
704 std::set_difference(binding_req_map.begin(), binding_req_map.end(),
705 state.per_set[setIndex].validated_set_binding_req_map.begin(),
706 state.per_set[setIndex].validated_set_binding_req_map.end(),
707 std::inserter(delta_reqs, delta_reqs.begin()));
locke-gb3ce08f2019-09-30 12:30:56 -0600708 descriptor_set->UpdateDrawState(this, cb_state, pPipe, delta_reqs);
Jeff Bolz56308942019-10-06 22:05:23 -0500709 } else {
locke-gb3ce08f2019-09-30 12:30:56 -0600710 descriptor_set->UpdateDrawState(this, cb_state, pPipe, binding_req_map);
Jeff Bolz56308942019-10-06 22:05:23 -0500711 }
locke-lunargd556cc32019-09-17 01:21:23 -0600712
713 state.per_set[setIndex].validated_set = descriptor_set;
714 state.per_set[setIndex].validated_set_change_count = descriptor_set->GetChangeCount();
715 state.per_set[setIndex].validated_set_image_layout_change_count = cb_state->image_layout_change_count;
716 if (reduced_map.IsManyDescriptors()) {
717 // Check whether old == new before assigning, the equality check is much cheaper than
718 // freeing and reallocating the map.
719 if (state.per_set[setIndex].validated_set_binding_req_map != set_binding_pair.second) {
720 state.per_set[setIndex].validated_set_binding_req_map = set_binding_pair.second;
721 }
722 } else {
723 state.per_set[setIndex].validated_set_binding_req_map = BindingReqMap();
724 }
725 }
726 }
727 }
728 }
729 if (!pPipe->vertex_binding_descriptions_.empty()) {
730 cb_state->vertex_buffer_used = true;
731 }
732}
733
734// Remove set from setMap and delete the set
735void ValidationStateTracker::FreeDescriptorSet(cvdescriptorset::DescriptorSet *descriptor_set) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500736 descriptor_set->destroyed = true;
Jeff Bolzadbfa852019-10-04 13:53:30 -0500737 const VulkanTypedHandle obj_struct(descriptor_set->GetSet(), kVulkanObjectTypeDescriptorSet);
Jeff Bolz41a1ced2019-10-11 11:40:49 -0500738 // Any bound cmd buffers are now invalid
Jeff Bolzadbfa852019-10-04 13:53:30 -0500739 InvalidateCommandBuffers(descriptor_set->cb_bindings, obj_struct);
Jeff Bolz41a1ced2019-10-11 11:40:49 -0500740
locke-lunargd556cc32019-09-17 01:21:23 -0600741 setMap.erase(descriptor_set->GetSet());
742}
743
744// Free all DS Pools including their Sets & related sub-structs
745// NOTE : Calls to this function should be wrapped in mutex
746void ValidationStateTracker::DeleteDescriptorSetPools() {
747 for (auto ii = descriptorPoolMap.begin(); ii != descriptorPoolMap.end();) {
748 // Remove this pools' sets from setMap and delete them
749 for (auto ds : ii->second->sets) {
750 FreeDescriptorSet(ds);
751 }
752 ii->second->sets.clear();
753 ii = descriptorPoolMap.erase(ii);
754 }
755}
756
757// For given object struct return a ptr of BASE_NODE type for its wrapping struct
758BASE_NODE *ValidationStateTracker::GetStateStructPtrFromObject(const VulkanTypedHandle &object_struct) {
Jeff Bolzadbfa852019-10-04 13:53:30 -0500759 if (object_struct.node) {
760#ifdef _DEBUG
761 // assert that lookup would find the same object
762 VulkanTypedHandle other = object_struct;
763 other.node = nullptr;
764 assert(object_struct.node == GetStateStructPtrFromObject(other));
765#endif
766 return object_struct.node;
767 }
locke-lunargd556cc32019-09-17 01:21:23 -0600768 BASE_NODE *base_ptr = nullptr;
769 switch (object_struct.type) {
770 case kVulkanObjectTypeDescriptorSet: {
771 base_ptr = GetSetNode(object_struct.Cast<VkDescriptorSet>());
772 break;
773 }
774 case kVulkanObjectTypeSampler: {
775 base_ptr = GetSamplerState(object_struct.Cast<VkSampler>());
776 break;
777 }
778 case kVulkanObjectTypeQueryPool: {
779 base_ptr = GetQueryPoolState(object_struct.Cast<VkQueryPool>());
780 break;
781 }
782 case kVulkanObjectTypePipeline: {
783 base_ptr = GetPipelineState(object_struct.Cast<VkPipeline>());
784 break;
785 }
786 case kVulkanObjectTypeBuffer: {
787 base_ptr = GetBufferState(object_struct.Cast<VkBuffer>());
788 break;
789 }
790 case kVulkanObjectTypeBufferView: {
791 base_ptr = GetBufferViewState(object_struct.Cast<VkBufferView>());
792 break;
793 }
794 case kVulkanObjectTypeImage: {
795 base_ptr = GetImageState(object_struct.Cast<VkImage>());
796 break;
797 }
798 case kVulkanObjectTypeImageView: {
799 base_ptr = GetImageViewState(object_struct.Cast<VkImageView>());
800 break;
801 }
802 case kVulkanObjectTypeEvent: {
803 base_ptr = GetEventState(object_struct.Cast<VkEvent>());
804 break;
805 }
806 case kVulkanObjectTypeDescriptorPool: {
807 base_ptr = GetDescriptorPoolState(object_struct.Cast<VkDescriptorPool>());
808 break;
809 }
810 case kVulkanObjectTypeCommandPool: {
811 base_ptr = GetCommandPoolState(object_struct.Cast<VkCommandPool>());
812 break;
813 }
814 case kVulkanObjectTypeFramebuffer: {
815 base_ptr = GetFramebufferState(object_struct.Cast<VkFramebuffer>());
816 break;
817 }
818 case kVulkanObjectTypeRenderPass: {
819 base_ptr = GetRenderPassState(object_struct.Cast<VkRenderPass>());
820 break;
821 }
822 case kVulkanObjectTypeDeviceMemory: {
823 base_ptr = GetDevMemState(object_struct.Cast<VkDeviceMemory>());
824 break;
825 }
826 case kVulkanObjectTypeAccelerationStructureNV: {
827 base_ptr = GetAccelerationStructureState(object_struct.Cast<VkAccelerationStructureNV>());
828 break;
829 }
Jeff Bolzadbfa852019-10-04 13:53:30 -0500830 case kVulkanObjectTypeUnknown:
831 // This can happen if an element of the object_bindings vector has been
832 // zeroed out, after an object is destroyed.
833 break;
locke-lunargd556cc32019-09-17 01:21:23 -0600834 default:
835 // TODO : Any other objects to be handled here?
836 assert(0);
837 break;
838 }
839 return base_ptr;
840}
841
842// Tie the VulkanTypedHandle to the cmd buffer which includes:
843// Add object_binding to cmd buffer
844// Add cb_binding to object
Jeff Bolzadbfa852019-10-04 13:53:30 -0500845bool ValidationStateTracker::AddCommandBufferBinding(small_unordered_map<CMD_BUFFER_STATE *, int, 8> &cb_bindings,
locke-lunargd556cc32019-09-17 01:21:23 -0600846 const VulkanTypedHandle &obj, CMD_BUFFER_STATE *cb_node) {
847 if (disabled.command_buffer_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -0500848 return false;
locke-lunargd556cc32019-09-17 01:21:23 -0600849 }
Jeff Bolzadbfa852019-10-04 13:53:30 -0500850 // Insert the cb_binding with a default 'index' of -1. Then push the obj into the object_bindings
851 // vector, and update cb_bindings[cb_node] with the index of that element of the vector.
852 auto inserted = cb_bindings.insert({cb_node, -1});
853 if (inserted.second) {
854 cb_node->object_bindings.push_back(obj);
855 inserted.first->second = (int)cb_node->object_bindings.size() - 1;
856 return true;
857 }
858 return false;
locke-lunargd556cc32019-09-17 01:21:23 -0600859}
860
861// For a given object, if cb_node is in that objects cb_bindings, remove cb_node
862void ValidationStateTracker::RemoveCommandBufferBinding(VulkanTypedHandle const &object, CMD_BUFFER_STATE *cb_node) {
863 BASE_NODE *base_obj = GetStateStructPtrFromObject(object);
864 if (base_obj) base_obj->cb_bindings.erase(cb_node);
865}
866
867// Reset the command buffer state
868// Maintain the createInfo and set state to CB_NEW, but clear all other state
869void ValidationStateTracker::ResetCommandBufferState(const VkCommandBuffer cb) {
870 CMD_BUFFER_STATE *pCB = GetCBState(cb);
871 if (pCB) {
872 pCB->in_use.store(0);
873 // Reset CB state (note that createInfo is not cleared)
874 pCB->commandBuffer = cb;
875 memset(&pCB->beginInfo, 0, sizeof(VkCommandBufferBeginInfo));
876 memset(&pCB->inheritanceInfo, 0, sizeof(VkCommandBufferInheritanceInfo));
877 pCB->hasDrawCmd = false;
878 pCB->hasTraceRaysCmd = false;
879 pCB->hasBuildAccelerationStructureCmd = false;
880 pCB->hasDispatchCmd = false;
881 pCB->state = CB_NEW;
Lionel Landwerlinc7420912019-05-23 00:33:42 +0100882 pCB->commandCount = 0;
locke-lunargd556cc32019-09-17 01:21:23 -0600883 pCB->submitCount = 0;
884 pCB->image_layout_change_count = 1; // Start at 1. 0 is insert value for validation cache versions, s.t. new == dirty
885 pCB->status = 0;
886 pCB->static_status = 0;
887 pCB->viewportMask = 0;
888 pCB->scissorMask = 0;
889
890 for (auto &item : pCB->lastBound) {
891 item.second.reset();
892 }
893
894 memset(&pCB->activeRenderPassBeginInfo, 0, sizeof(pCB->activeRenderPassBeginInfo));
895 pCB->activeRenderPass = nullptr;
896 pCB->activeSubpassContents = VK_SUBPASS_CONTENTS_INLINE;
897 pCB->activeSubpass = 0;
898 pCB->broken_bindings.clear();
899 pCB->waitedEvents.clear();
900 pCB->events.clear();
901 pCB->writeEventsBeforeWait.clear();
locke-lunargd556cc32019-09-17 01:21:23 -0600902 pCB->activeQueries.clear();
903 pCB->startedQueries.clear();
904 pCB->image_layout_map.clear();
locke-lunargd556cc32019-09-17 01:21:23 -0600905 pCB->current_vertex_buffer_binding_info.vertex_buffer_bindings.clear();
906 pCB->vertex_buffer_used = false;
907 pCB->primaryCommandBuffer = VK_NULL_HANDLE;
908 // If secondary, invalidate any primary command buffer that may call us.
909 if (pCB->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
Jeff Bolzadbfa852019-10-04 13:53:30 -0500910 InvalidateLinkedCommandBuffers(pCB->linkedCommandBuffers, VulkanTypedHandle(cb, kVulkanObjectTypeCommandBuffer));
locke-lunargd556cc32019-09-17 01:21:23 -0600911 }
912
913 // Remove reverse command buffer links.
914 for (auto pSubCB : pCB->linkedCommandBuffers) {
915 pSubCB->linkedCommandBuffers.erase(pCB);
916 }
917 pCB->linkedCommandBuffers.clear();
locke-lunargd556cc32019-09-17 01:21:23 -0600918 pCB->queue_submit_functions.clear();
919 pCB->cmd_execute_commands_functions.clear();
920 pCB->eventUpdates.clear();
921 pCB->queryUpdates.clear();
922
923 // Remove object bindings
924 for (const auto &obj : pCB->object_bindings) {
925 RemoveCommandBufferBinding(obj, pCB);
926 }
927 pCB->object_bindings.clear();
928 // Remove this cmdBuffer's reference from each FrameBuffer's CB ref list
929 for (auto framebuffer : pCB->framebuffers) {
930 auto fb_state = GetFramebufferState(framebuffer);
931 if (fb_state) fb_state->cb_bindings.erase(pCB);
932 }
933 pCB->framebuffers.clear();
934 pCB->activeFramebuffer = VK_NULL_HANDLE;
935 memset(&pCB->index_buffer_binding, 0, sizeof(pCB->index_buffer_binding));
936
937 pCB->qfo_transfer_image_barriers.Reset();
938 pCB->qfo_transfer_buffer_barriers.Reset();
939
940 // Clean up the label data
941 ResetCmdDebugUtilsLabel(report_data, pCB->commandBuffer);
942 pCB->debug_label.Reset();
locke-gb3ce08f2019-09-30 12:30:56 -0600943 pCB->validate_descriptorsets_in_queuesubmit.clear();
locke-lunargd556cc32019-09-17 01:21:23 -0600944 }
945 if (command_buffer_reset_callback) {
946 (*command_buffer_reset_callback)(cb);
947 }
948}
949
950void ValidationStateTracker::PostCallRecordCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo,
951 const VkAllocationCallbacks *pAllocator, VkDevice *pDevice,
952 VkResult result) {
953 if (VK_SUCCESS != result) return;
954
955 const VkPhysicalDeviceFeatures *enabled_features_found = pCreateInfo->pEnabledFeatures;
956 if (nullptr == enabled_features_found) {
957 const auto *features2 = lvl_find_in_chain<VkPhysicalDeviceFeatures2KHR>(pCreateInfo->pNext);
958 if (features2) {
959 enabled_features_found = &(features2->features);
960 }
961 }
962
963 ValidationObject *device_object = GetLayerDataPtr(get_dispatch_key(*pDevice), layer_data_map);
964 ValidationObject *validation_data = GetValidationObject(device_object->object_dispatch, this->container_type);
965 ValidationStateTracker *state_tracker = static_cast<ValidationStateTracker *>(validation_data);
966
967 if (nullptr == enabled_features_found) {
968 state_tracker->enabled_features.core = {};
969 } else {
970 state_tracker->enabled_features.core = *enabled_features_found;
971 }
972
973 // Make sure that queue_family_properties are obtained for this device's physical_device, even if the app has not
974 // previously set them through an explicit API call.
975 uint32_t count;
976 auto pd_state = GetPhysicalDeviceState(gpu);
977 DispatchGetPhysicalDeviceQueueFamilyProperties(gpu, &count, nullptr);
978 pd_state->queue_family_properties.resize(std::max(static_cast<uint32_t>(pd_state->queue_family_properties.size()), count));
979 DispatchGetPhysicalDeviceQueueFamilyProperties(gpu, &count, &pd_state->queue_family_properties[0]);
980 // Save local link to this device's physical device state
981 state_tracker->physical_device_state = pd_state;
982
983 const auto *device_group_ci = lvl_find_in_chain<VkDeviceGroupDeviceCreateInfo>(pCreateInfo->pNext);
984 state_tracker->physical_device_count =
985 device_group_ci && device_group_ci->physicalDeviceCount > 0 ? device_group_ci->physicalDeviceCount : 1;
986
987 const auto *descriptor_indexing_features = lvl_find_in_chain<VkPhysicalDeviceDescriptorIndexingFeaturesEXT>(pCreateInfo->pNext);
988 if (descriptor_indexing_features) {
989 state_tracker->enabled_features.descriptor_indexing = *descriptor_indexing_features;
990 }
991
992 const auto *eight_bit_storage_features = lvl_find_in_chain<VkPhysicalDevice8BitStorageFeaturesKHR>(pCreateInfo->pNext);
993 if (eight_bit_storage_features) {
994 state_tracker->enabled_features.eight_bit_storage = *eight_bit_storage_features;
995 }
996
997 const auto *exclusive_scissor_features = lvl_find_in_chain<VkPhysicalDeviceExclusiveScissorFeaturesNV>(pCreateInfo->pNext);
998 if (exclusive_scissor_features) {
999 state_tracker->enabled_features.exclusive_scissor = *exclusive_scissor_features;
1000 }
1001
1002 const auto *shading_rate_image_features = lvl_find_in_chain<VkPhysicalDeviceShadingRateImageFeaturesNV>(pCreateInfo->pNext);
1003 if (shading_rate_image_features) {
1004 state_tracker->enabled_features.shading_rate_image = *shading_rate_image_features;
1005 }
1006
1007 const auto *mesh_shader_features = lvl_find_in_chain<VkPhysicalDeviceMeshShaderFeaturesNV>(pCreateInfo->pNext);
1008 if (mesh_shader_features) {
1009 state_tracker->enabled_features.mesh_shader = *mesh_shader_features;
1010 }
1011
1012 const auto *inline_uniform_block_features =
1013 lvl_find_in_chain<VkPhysicalDeviceInlineUniformBlockFeaturesEXT>(pCreateInfo->pNext);
1014 if (inline_uniform_block_features) {
1015 state_tracker->enabled_features.inline_uniform_block = *inline_uniform_block_features;
1016 }
1017
1018 const auto *transform_feedback_features = lvl_find_in_chain<VkPhysicalDeviceTransformFeedbackFeaturesEXT>(pCreateInfo->pNext);
1019 if (transform_feedback_features) {
1020 state_tracker->enabled_features.transform_feedback_features = *transform_feedback_features;
1021 }
1022
1023 const auto *float16_int8_features = lvl_find_in_chain<VkPhysicalDeviceFloat16Int8FeaturesKHR>(pCreateInfo->pNext);
1024 if (float16_int8_features) {
1025 state_tracker->enabled_features.float16_int8 = *float16_int8_features;
1026 }
1027
1028 const auto *vtx_attrib_div_features = lvl_find_in_chain<VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT>(pCreateInfo->pNext);
1029 if (vtx_attrib_div_features) {
1030 state_tracker->enabled_features.vtx_attrib_divisor_features = *vtx_attrib_div_features;
1031 }
1032
1033 const auto *uniform_buffer_standard_layout_features =
1034 lvl_find_in_chain<VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR>(pCreateInfo->pNext);
1035 if (uniform_buffer_standard_layout_features) {
1036 state_tracker->enabled_features.uniform_buffer_standard_layout = *uniform_buffer_standard_layout_features;
1037 }
1038
1039 const auto *scalar_block_layout_features = lvl_find_in_chain<VkPhysicalDeviceScalarBlockLayoutFeaturesEXT>(pCreateInfo->pNext);
1040 if (scalar_block_layout_features) {
1041 state_tracker->enabled_features.scalar_block_layout_features = *scalar_block_layout_features;
1042 }
1043
Jeff Bolz4563f2a2019-12-10 13:30:30 -06001044 const auto *buffer_device_address = lvl_find_in_chain<VkPhysicalDeviceBufferDeviceAddressFeaturesKHR>(pCreateInfo->pNext);
1045 if (buffer_device_address) {
1046 state_tracker->enabled_features.buffer_device_address = *buffer_device_address;
1047 }
1048
1049 const auto *buffer_device_address_ext = lvl_find_in_chain<VkPhysicalDeviceBufferDeviceAddressFeaturesEXT>(pCreateInfo->pNext);
1050 if (buffer_device_address_ext) {
1051 state_tracker->enabled_features.buffer_device_address_ext = *buffer_device_address_ext;
locke-lunargd556cc32019-09-17 01:21:23 -06001052 }
1053
1054 const auto *cooperative_matrix_features = lvl_find_in_chain<VkPhysicalDeviceCooperativeMatrixFeaturesNV>(pCreateInfo->pNext);
1055 if (cooperative_matrix_features) {
1056 state_tracker->enabled_features.cooperative_matrix_features = *cooperative_matrix_features;
1057 }
1058
locke-lunargd556cc32019-09-17 01:21:23 -06001059 const auto *host_query_reset_features = lvl_find_in_chain<VkPhysicalDeviceHostQueryResetFeaturesEXT>(pCreateInfo->pNext);
1060 if (host_query_reset_features) {
1061 state_tracker->enabled_features.host_query_reset_features = *host_query_reset_features;
1062 }
1063
1064 const auto *compute_shader_derivatives_features =
1065 lvl_find_in_chain<VkPhysicalDeviceComputeShaderDerivativesFeaturesNV>(pCreateInfo->pNext);
1066 if (compute_shader_derivatives_features) {
1067 state_tracker->enabled_features.compute_shader_derivatives_features = *compute_shader_derivatives_features;
1068 }
1069
1070 const auto *fragment_shader_barycentric_features =
1071 lvl_find_in_chain<VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV>(pCreateInfo->pNext);
1072 if (fragment_shader_barycentric_features) {
1073 state_tracker->enabled_features.fragment_shader_barycentric_features = *fragment_shader_barycentric_features;
1074 }
1075
1076 const auto *shader_image_footprint_features =
1077 lvl_find_in_chain<VkPhysicalDeviceShaderImageFootprintFeaturesNV>(pCreateInfo->pNext);
1078 if (shader_image_footprint_features) {
1079 state_tracker->enabled_features.shader_image_footprint_features = *shader_image_footprint_features;
1080 }
1081
1082 const auto *fragment_shader_interlock_features =
1083 lvl_find_in_chain<VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT>(pCreateInfo->pNext);
1084 if (fragment_shader_interlock_features) {
1085 state_tracker->enabled_features.fragment_shader_interlock_features = *fragment_shader_interlock_features;
1086 }
1087
1088 const auto *demote_to_helper_invocation_features =
1089 lvl_find_in_chain<VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT>(pCreateInfo->pNext);
1090 if (demote_to_helper_invocation_features) {
1091 state_tracker->enabled_features.demote_to_helper_invocation_features = *demote_to_helper_invocation_features;
1092 }
1093
1094 const auto *texel_buffer_alignment_features =
1095 lvl_find_in_chain<VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT>(pCreateInfo->pNext);
1096 if (texel_buffer_alignment_features) {
1097 state_tracker->enabled_features.texel_buffer_alignment_features = *texel_buffer_alignment_features;
1098 }
1099
1100 const auto *imageless_framebuffer_features =
1101 lvl_find_in_chain<VkPhysicalDeviceImagelessFramebufferFeaturesKHR>(pCreateInfo->pNext);
1102 if (imageless_framebuffer_features) {
1103 state_tracker->enabled_features.imageless_framebuffer_features = *imageless_framebuffer_features;
1104 }
1105
1106 const auto *pipeline_exe_props_features =
1107 lvl_find_in_chain<VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR>(pCreateInfo->pNext);
1108 if (pipeline_exe_props_features) {
1109 state_tracker->enabled_features.pipeline_exe_props_features = *pipeline_exe_props_features;
1110 }
1111
Jeff Bolz82f854d2019-09-17 14:56:47 -05001112 const auto *dedicated_allocation_image_aliasing_features =
1113 lvl_find_in_chain<VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV>(pCreateInfo->pNext);
1114 if (dedicated_allocation_image_aliasing_features) {
1115 state_tracker->enabled_features.dedicated_allocation_image_aliasing_features =
1116 *dedicated_allocation_image_aliasing_features;
1117 }
1118
Jeff Bolz526f2d52019-09-18 13:18:08 -05001119 const auto *subgroup_extended_types_features =
1120 lvl_find_in_chain<VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR>(pCreateInfo->pNext);
1121 if (subgroup_extended_types_features) {
1122 state_tracker->enabled_features.subgroup_extended_types_features = *subgroup_extended_types_features;
1123 }
1124
Piers Daniell9af77cd2019-10-16 13:54:12 -06001125 const auto *separate_depth_stencil_layouts_features =
1126 lvl_find_in_chain<VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR>(pCreateInfo->pNext);
1127 if (separate_depth_stencil_layouts_features) {
1128 state_tracker->enabled_features.separate_depth_stencil_layouts_features = *separate_depth_stencil_layouts_features;
1129 }
1130
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001131 const auto *performance_query_features = lvl_find_in_chain<VkPhysicalDevicePerformanceQueryFeaturesKHR>(pCreateInfo->pNext);
1132 if (performance_query_features) {
1133 state_tracker->enabled_features.performance_query_features = *performance_query_features;
1134 }
1135
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00001136 const auto *timeline_semaphore_features = lvl_find_in_chain<VkPhysicalDeviceTimelineSemaphoreFeaturesKHR>(pCreateInfo->pNext);
1137 if (timeline_semaphore_features) {
1138 state_tracker->enabled_features.timeline_semaphore_features = *timeline_semaphore_features;
1139 }
1140
Tobias Hector782bcde2019-11-28 16:19:42 +00001141 const auto *device_coherent_memory_features = lvl_find_in_chain<VkPhysicalDeviceCoherentMemoryFeaturesAMD>(pCreateInfo->pNext);
1142 if (device_coherent_memory_features) {
1143 state_tracker->enabled_features.device_coherent_memory_features = *device_coherent_memory_features;
1144 }
1145
locke-lunargd556cc32019-09-17 01:21:23 -06001146 // Store physical device properties and physical device mem limits into CoreChecks structs
1147 DispatchGetPhysicalDeviceMemoryProperties(gpu, &state_tracker->phys_dev_mem_props);
1148 DispatchGetPhysicalDeviceProperties(gpu, &state_tracker->phys_dev_props);
1149
1150 const auto &dev_ext = state_tracker->device_extensions;
1151 auto *phys_dev_props = &state_tracker->phys_dev_ext_props;
1152
1153 if (dev_ext.vk_khr_push_descriptor) {
1154 // Get the needed push_descriptor limits
1155 VkPhysicalDevicePushDescriptorPropertiesKHR push_descriptor_prop;
1156 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_push_descriptor, &push_descriptor_prop);
1157 phys_dev_props->max_push_descriptors = push_descriptor_prop.maxPushDescriptors;
1158 }
1159
1160 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_descriptor_indexing, &phys_dev_props->descriptor_indexing_props);
1161 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_shading_rate_image, &phys_dev_props->shading_rate_image_props);
1162 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_mesh_shader, &phys_dev_props->mesh_shader_props);
1163 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_inline_uniform_block, &phys_dev_props->inline_uniform_block_props);
1164 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_vertex_attribute_divisor, &phys_dev_props->vtx_attrib_divisor_props);
1165 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_depth_stencil_resolve, &phys_dev_props->depth_stencil_resolve_props);
1166 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_transform_feedback, &phys_dev_props->transform_feedback_props);
1167 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_ray_tracing, &phys_dev_props->ray_tracing_props);
1168 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_texel_buffer_alignment, &phys_dev_props->texel_buffer_alignment_props);
1169 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_fragment_density_map, &phys_dev_props->fragment_density_map_props);
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001170 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_performance_query, &phys_dev_props->performance_query_props);
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00001171 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_timeline_semaphore, &phys_dev_props->timeline_semaphore_props);
locke-lunargd556cc32019-09-17 01:21:23 -06001172 if (state_tracker->device_extensions.vk_nv_cooperative_matrix) {
1173 // Get the needed cooperative_matrix properties
1174 auto cooperative_matrix_props = lvl_init_struct<VkPhysicalDeviceCooperativeMatrixPropertiesNV>();
1175 auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(&cooperative_matrix_props);
1176 instance_dispatch_table.GetPhysicalDeviceProperties2KHR(gpu, &prop2);
1177 state_tracker->phys_dev_ext_props.cooperative_matrix_props = cooperative_matrix_props;
1178
1179 uint32_t numCooperativeMatrixProperties = 0;
1180 instance_dispatch_table.GetPhysicalDeviceCooperativeMatrixPropertiesNV(gpu, &numCooperativeMatrixProperties, NULL);
1181 state_tracker->cooperative_matrix_properties.resize(numCooperativeMatrixProperties,
1182 lvl_init_struct<VkCooperativeMatrixPropertiesNV>());
1183
1184 instance_dispatch_table.GetPhysicalDeviceCooperativeMatrixPropertiesNV(gpu, &numCooperativeMatrixProperties,
1185 state_tracker->cooperative_matrix_properties.data());
1186 }
1187 if (state_tracker->api_version >= VK_API_VERSION_1_1) {
1188 // Get the needed subgroup limits
1189 auto subgroup_prop = lvl_init_struct<VkPhysicalDeviceSubgroupProperties>();
1190 auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(&subgroup_prop);
1191 instance_dispatch_table.GetPhysicalDeviceProperties2(gpu, &prop2);
1192
1193 state_tracker->phys_dev_ext_props.subgroup_props = subgroup_prop;
1194 }
1195
1196 // Store queue family data
1197 if (pCreateInfo->pQueueCreateInfos != nullptr) {
1198 for (uint32_t i = 0; i < pCreateInfo->queueCreateInfoCount; ++i) {
1199 state_tracker->queue_family_index_map.insert(
1200 std::make_pair(pCreateInfo->pQueueCreateInfos[i].queueFamilyIndex, pCreateInfo->pQueueCreateInfos[i].queueCount));
1201 }
1202 }
1203}
1204
1205void ValidationStateTracker::PreCallRecordDestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) {
1206 if (!device) return;
1207
locke-lunargd556cc32019-09-17 01:21:23 -06001208 // Reset all command buffers before destroying them, to unlink object_bindings.
1209 for (auto &commandBuffer : commandBufferMap) {
1210 ResetCommandBufferState(commandBuffer.first);
1211 }
Jeff Bolzadbfa852019-10-04 13:53:30 -05001212 pipelineMap.clear();
1213 renderPassMap.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06001214 commandBufferMap.clear();
1215
1216 // This will also delete all sets in the pool & remove them from setMap
1217 DeleteDescriptorSetPools();
1218 // All sets should be removed
1219 assert(setMap.empty());
1220 descriptorSetLayoutMap.clear();
1221 imageViewMap.clear();
1222 imageMap.clear();
1223 bufferViewMap.clear();
1224 bufferMap.clear();
1225 // Queues persist until device is destroyed
1226 queueMap.clear();
1227}
1228
1229// Loop through bound objects and increment their in_use counts.
1230void ValidationStateTracker::IncrementBoundObjects(CMD_BUFFER_STATE const *cb_node) {
1231 for (auto obj : cb_node->object_bindings) {
1232 auto base_obj = GetStateStructPtrFromObject(obj);
1233 if (base_obj) {
1234 base_obj->in_use.fetch_add(1);
1235 }
1236 }
1237}
1238
1239// Track which resources are in-flight by atomically incrementing their "in_use" count
1240void ValidationStateTracker::IncrementResources(CMD_BUFFER_STATE *cb_node) {
1241 cb_node->submitCount++;
1242 cb_node->in_use.fetch_add(1);
1243
1244 // First Increment for all "generic" objects bound to cmd buffer, followed by special-case objects below
1245 IncrementBoundObjects(cb_node);
1246 // TODO : We should be able to remove the NULL look-up checks from the code below as long as
1247 // all the corresponding cases are verified to cause CB_INVALID state and the CB_INVALID state
1248 // should then be flagged prior to calling this function
1249 for (auto event : cb_node->writeEventsBeforeWait) {
1250 auto event_state = GetEventState(event);
1251 if (event_state) event_state->write_in_use++;
1252 }
1253}
1254
1255// Decrement in-use count for objects bound to command buffer
1256void ValidationStateTracker::DecrementBoundResources(CMD_BUFFER_STATE const *cb_node) {
1257 BASE_NODE *base_obj = nullptr;
1258 for (auto obj : cb_node->object_bindings) {
1259 base_obj = GetStateStructPtrFromObject(obj);
1260 if (base_obj) {
1261 base_obj->in_use.fetch_sub(1);
1262 }
1263 }
1264}
1265
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001266void ValidationStateTracker::RetireWorkOnQueue(QUEUE_STATE *pQueue, uint64_t seq) {
locke-lunargd556cc32019-09-17 01:21:23 -06001267 std::unordered_map<VkQueue, uint64_t> otherQueueSeqs;
1268
1269 // Roll this queue forward, one submission at a time.
1270 while (pQueue->seq < seq) {
1271 auto &submission = pQueue->submissions.front();
1272
1273 for (auto &wait : submission.waitSemaphores) {
1274 auto pSemaphore = GetSemaphoreState(wait.semaphore);
1275 if (pSemaphore) {
1276 pSemaphore->in_use.fetch_sub(1);
1277 }
1278 auto &lastSeq = otherQueueSeqs[wait.queue];
1279 lastSeq = std::max(lastSeq, wait.seq);
1280 }
1281
1282 for (auto &semaphore : submission.signalSemaphores) {
1283 auto pSemaphore = GetSemaphoreState(semaphore);
1284 if (pSemaphore) {
1285 pSemaphore->in_use.fetch_sub(1);
1286 }
1287 }
1288
1289 for (auto &semaphore : submission.externalSemaphores) {
1290 auto pSemaphore = GetSemaphoreState(semaphore);
1291 if (pSemaphore) {
1292 pSemaphore->in_use.fetch_sub(1);
1293 }
1294 }
1295
1296 for (auto cb : submission.cbs) {
1297 auto cb_node = GetCBState(cb);
1298 if (!cb_node) {
1299 continue;
1300 }
1301 // First perform decrement on general case bound objects
1302 DecrementBoundResources(cb_node);
1303 for (auto event : cb_node->writeEventsBeforeWait) {
1304 auto eventNode = eventMap.find(event);
1305 if (eventNode != eventMap.end()) {
1306 eventNode->second.write_in_use--;
1307 }
1308 }
Jeff Bolz310775c2019-10-09 00:46:33 -05001309 QueryMap localQueryToStateMap;
1310 for (auto &function : cb_node->queryUpdates) {
1311 function(nullptr, /*do_validate*/ false, &localQueryToStateMap);
1312 }
1313
1314 for (auto queryStatePair : localQueryToStateMap) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001315 if (queryStatePair.second == QUERYSTATE_ENDED) {
1316 queryToStateMap[queryStatePair.first] = QUERYSTATE_AVAILABLE;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001317
1318 const QUERY_POOL_STATE *qp_state = GetQueryPoolState(queryStatePair.first.pool);
1319 if (qp_state->createInfo.queryType == VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR)
1320 queryPassToStateMap[QueryObjectPass(queryStatePair.first, submission.perf_submit_pass)] =
1321 QUERYSTATE_AVAILABLE;
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001322 }
locke-lunargd556cc32019-09-17 01:21:23 -06001323 }
locke-lunargd556cc32019-09-17 01:21:23 -06001324 cb_node->in_use.fetch_sub(1);
1325 }
1326
1327 auto pFence = GetFenceState(submission.fence);
1328 if (pFence && pFence->scope == kSyncScopeInternal) {
1329 pFence->state = FENCE_RETIRED;
1330 }
1331
1332 pQueue->submissions.pop_front();
1333 pQueue->seq++;
1334 }
1335
1336 // Roll other queues forward to the highest seq we saw a wait for
1337 for (auto qs : otherQueueSeqs) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001338 RetireWorkOnQueue(GetQueueState(qs.first), qs.second);
locke-lunargd556cc32019-09-17 01:21:23 -06001339 }
1340}
1341
1342// Submit a fence to a queue, delimiting previous fences and previous untracked
1343// work by it.
1344static void SubmitFence(QUEUE_STATE *pQueue, FENCE_STATE *pFence, uint64_t submitCount) {
1345 pFence->state = FENCE_INFLIGHT;
1346 pFence->signaler.first = pQueue->queue;
1347 pFence->signaler.second = pQueue->seq + pQueue->submissions.size() + submitCount;
1348}
1349
1350void ValidationStateTracker::PostCallRecordQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits,
1351 VkFence fence, VkResult result) {
1352 uint64_t early_retire_seq = 0;
1353 auto pQueue = GetQueueState(queue);
1354 auto pFence = GetFenceState(fence);
1355
1356 if (pFence) {
1357 if (pFence->scope == kSyncScopeInternal) {
1358 // Mark fence in use
1359 SubmitFence(pQueue, pFence, std::max(1u, submitCount));
1360 if (!submitCount) {
1361 // If no submissions, but just dropping a fence on the end of the queue,
1362 // record an empty submission with just the fence, so we can determine
1363 // its completion.
1364 pQueue->submissions.emplace_back(std::vector<VkCommandBuffer>(), std::vector<SEMAPHORE_WAIT>(),
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001365 std::vector<VkSemaphore>(), std::vector<VkSemaphore>(), fence, 0);
locke-lunargd556cc32019-09-17 01:21:23 -06001366 }
1367 } else {
1368 // Retire work up until this fence early, we will not see the wait that corresponds to this signal
1369 early_retire_seq = pQueue->seq + pQueue->submissions.size();
1370 }
1371 }
1372
1373 // Now process each individual submit
1374 for (uint32_t submit_idx = 0; submit_idx < submitCount; submit_idx++) {
1375 std::vector<VkCommandBuffer> cbs;
1376 const VkSubmitInfo *submit = &pSubmits[submit_idx];
1377 vector<SEMAPHORE_WAIT> semaphore_waits;
1378 vector<VkSemaphore> semaphore_signals;
1379 vector<VkSemaphore> semaphore_externals;
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00001380 auto *timeline_semaphore_submit = lvl_find_in_chain<VkTimelineSemaphoreSubmitInfoKHR>(submit->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001381 for (uint32_t i = 0; i < submit->waitSemaphoreCount; ++i) {
1382 VkSemaphore semaphore = submit->pWaitSemaphores[i];
1383 auto pSemaphore = GetSemaphoreState(semaphore);
1384 if (pSemaphore) {
1385 if (pSemaphore->scope == kSyncScopeInternal) {
1386 if (pSemaphore->signaler.first != VK_NULL_HANDLE) {
1387 semaphore_waits.push_back({semaphore, pSemaphore->signaler.first, pSemaphore->signaler.second});
1388 pSemaphore->in_use.fetch_add(1);
1389 }
1390 pSemaphore->signaler.first = VK_NULL_HANDLE;
1391 pSemaphore->signaled = false;
1392 } else {
1393 semaphore_externals.push_back(semaphore);
1394 pSemaphore->in_use.fetch_add(1);
1395 if (pSemaphore->scope == kSyncScopeExternalTemporary) {
1396 pSemaphore->scope = kSyncScopeInternal;
1397 }
1398 }
1399 }
1400 }
1401 for (uint32_t i = 0; i < submit->signalSemaphoreCount; ++i) {
1402 VkSemaphore semaphore = submit->pSignalSemaphores[i];
1403 auto pSemaphore = GetSemaphoreState(semaphore);
1404 if (pSemaphore) {
1405 if (pSemaphore->scope == kSyncScopeInternal) {
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00001406 if (pSemaphore->type == VK_SEMAPHORE_TYPE_BINARY_KHR) {
1407 pSemaphore->signaler.first = queue;
1408 pSemaphore->signaler.second = pQueue->seq + pQueue->submissions.size() + 1;
1409 pSemaphore->signaled = true;
1410 } else {
1411 pSemaphore->payload = timeline_semaphore_submit->pSignalSemaphoreValues[i];
1412 }
locke-lunargd556cc32019-09-17 01:21:23 -06001413 pSemaphore->in_use.fetch_add(1);
1414 semaphore_signals.push_back(semaphore);
1415 } else {
1416 // Retire work up until this submit early, we will not see the wait that corresponds to this signal
1417 early_retire_seq = std::max(early_retire_seq, pQueue->seq + pQueue->submissions.size() + 1);
1418 }
1419 }
1420 }
1421 for (uint32_t i = 0; i < submit->commandBufferCount; i++) {
1422 auto cb_node = GetCBState(submit->pCommandBuffers[i]);
1423 if (cb_node) {
1424 cbs.push_back(submit->pCommandBuffers[i]);
1425 for (auto secondaryCmdBuffer : cb_node->linkedCommandBuffers) {
1426 cbs.push_back(secondaryCmdBuffer->commandBuffer);
1427 IncrementResources(secondaryCmdBuffer);
1428 }
1429 IncrementResources(cb_node);
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001430
1431 QueryMap localQueryToStateMap;
1432 for (auto &function : cb_node->queryUpdates) {
1433 function(nullptr, /*do_validate*/ false, &localQueryToStateMap);
1434 }
1435
1436 for (auto queryStatePair : localQueryToStateMap) {
1437 queryToStateMap[queryStatePair.first] = queryStatePair.second;
1438 }
1439
1440 EventToStageMap localEventToStageMap;
1441 for (auto &function : cb_node->eventUpdates) {
1442 function(nullptr, /*do_validate*/ false, &localEventToStageMap);
1443 }
1444
1445 for (auto eventStagePair : localEventToStageMap) {
1446 eventMap[eventStagePair.first].stageMask = eventStagePair.second;
1447 }
locke-lunargd556cc32019-09-17 01:21:23 -06001448 }
1449 }
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001450
1451 const auto perf_submit = lvl_find_in_chain<VkPerformanceQuerySubmitInfoKHR>(submit->pNext);
1452
locke-lunargd556cc32019-09-17 01:21:23 -06001453 pQueue->submissions.emplace_back(cbs, semaphore_waits, semaphore_signals, semaphore_externals,
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001454 submit_idx == submitCount - 1 ? fence : (VkFence)VK_NULL_HANDLE,
1455 perf_submit ? perf_submit->counterPassIndex : 0);
locke-lunargd556cc32019-09-17 01:21:23 -06001456 }
1457
1458 if (early_retire_seq) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001459 RetireWorkOnQueue(pQueue, early_retire_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06001460 }
1461}
1462
1463void ValidationStateTracker::PostCallRecordAllocateMemory(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo,
1464 const VkAllocationCallbacks *pAllocator, VkDeviceMemory *pMemory,
1465 VkResult result) {
1466 if (VK_SUCCESS == result) {
1467 AddMemObjInfo(device, *pMemory, pAllocateInfo);
1468 }
1469 return;
1470}
1471
1472void ValidationStateTracker::PreCallRecordFreeMemory(VkDevice device, VkDeviceMemory mem, const VkAllocationCallbacks *pAllocator) {
1473 if (!mem) return;
1474 DEVICE_MEMORY_STATE *mem_info = GetDevMemState(mem);
1475 const VulkanTypedHandle obj_struct(mem, kVulkanObjectTypeDeviceMemory);
1476
1477 // Clear mem binding for any bound objects
1478 for (const auto &obj : mem_info->obj_bindings) {
1479 BINDABLE *bindable_state = nullptr;
1480 switch (obj.type) {
1481 case kVulkanObjectTypeImage:
1482 bindable_state = GetImageState(obj.Cast<VkImage>());
1483 break;
1484 case kVulkanObjectTypeBuffer:
1485 bindable_state = GetBufferState(obj.Cast<VkBuffer>());
1486 break;
1487 case kVulkanObjectTypeAccelerationStructureNV:
1488 bindable_state = GetAccelerationStructureState(obj.Cast<VkAccelerationStructureNV>());
1489 break;
1490
1491 default:
1492 // Should only have acceleration structure, buffer, or image objects bound to memory
1493 assert(0);
1494 }
1495
1496 if (bindable_state) {
1497 bindable_state->binding.mem = MEMORY_UNBOUND;
1498 bindable_state->UpdateBoundMemorySet();
1499 }
1500 }
1501 // Any bound cmd buffers are now invalid
1502 InvalidateCommandBuffers(mem_info->cb_bindings, obj_struct);
1503 RemoveAliasingImages(mem_info->bound_images);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05001504 mem_info->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06001505 memObjMap.erase(mem);
1506}
1507
1508void ValidationStateTracker::PostCallRecordQueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo *pBindInfo,
1509 VkFence fence, VkResult result) {
1510 if (result != VK_SUCCESS) return;
1511 uint64_t early_retire_seq = 0;
1512 auto pFence = GetFenceState(fence);
1513 auto pQueue = GetQueueState(queue);
1514
1515 if (pFence) {
1516 if (pFence->scope == kSyncScopeInternal) {
1517 SubmitFence(pQueue, pFence, std::max(1u, bindInfoCount));
1518 if (!bindInfoCount) {
1519 // No work to do, just dropping a fence in the queue by itself.
1520 pQueue->submissions.emplace_back(std::vector<VkCommandBuffer>(), std::vector<SEMAPHORE_WAIT>(),
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001521 std::vector<VkSemaphore>(), std::vector<VkSemaphore>(), fence, 0);
locke-lunargd556cc32019-09-17 01:21:23 -06001522 }
1523 } else {
1524 // Retire work up until this fence early, we will not see the wait that corresponds to this signal
1525 early_retire_seq = pQueue->seq + pQueue->submissions.size();
1526 }
1527 }
1528
1529 for (uint32_t bindIdx = 0; bindIdx < bindInfoCount; ++bindIdx) {
1530 const VkBindSparseInfo &bindInfo = pBindInfo[bindIdx];
1531 // Track objects tied to memory
1532 for (uint32_t j = 0; j < bindInfo.bufferBindCount; j++) {
1533 for (uint32_t k = 0; k < bindInfo.pBufferBinds[j].bindCount; k++) {
1534 auto sparse_binding = bindInfo.pBufferBinds[j].pBinds[k];
1535 SetSparseMemBinding({sparse_binding.memory, sparse_binding.memoryOffset, sparse_binding.size},
1536 VulkanTypedHandle(bindInfo.pBufferBinds[j].buffer, kVulkanObjectTypeBuffer));
1537 }
1538 }
1539 for (uint32_t j = 0; j < bindInfo.imageOpaqueBindCount; j++) {
1540 for (uint32_t k = 0; k < bindInfo.pImageOpaqueBinds[j].bindCount; k++) {
1541 auto sparse_binding = bindInfo.pImageOpaqueBinds[j].pBinds[k];
1542 SetSparseMemBinding({sparse_binding.memory, sparse_binding.memoryOffset, sparse_binding.size},
1543 VulkanTypedHandle(bindInfo.pImageOpaqueBinds[j].image, kVulkanObjectTypeImage));
1544 }
1545 }
1546 for (uint32_t j = 0; j < bindInfo.imageBindCount; j++) {
1547 for (uint32_t k = 0; k < bindInfo.pImageBinds[j].bindCount; k++) {
1548 auto sparse_binding = bindInfo.pImageBinds[j].pBinds[k];
1549 // TODO: This size is broken for non-opaque bindings, need to update to comprehend full sparse binding data
1550 VkDeviceSize size = sparse_binding.extent.depth * sparse_binding.extent.height * sparse_binding.extent.width * 4;
1551 SetSparseMemBinding({sparse_binding.memory, sparse_binding.memoryOffset, size},
1552 VulkanTypedHandle(bindInfo.pImageBinds[j].image, kVulkanObjectTypeImage));
1553 }
1554 }
1555
1556 std::vector<SEMAPHORE_WAIT> semaphore_waits;
1557 std::vector<VkSemaphore> semaphore_signals;
1558 std::vector<VkSemaphore> semaphore_externals;
1559 for (uint32_t i = 0; i < bindInfo.waitSemaphoreCount; ++i) {
1560 VkSemaphore semaphore = bindInfo.pWaitSemaphores[i];
1561 auto pSemaphore = GetSemaphoreState(semaphore);
1562 if (pSemaphore) {
1563 if (pSemaphore->scope == kSyncScopeInternal) {
1564 if (pSemaphore->signaler.first != VK_NULL_HANDLE) {
1565 semaphore_waits.push_back({semaphore, pSemaphore->signaler.first, pSemaphore->signaler.second});
1566 pSemaphore->in_use.fetch_add(1);
1567 }
1568 pSemaphore->signaler.first = VK_NULL_HANDLE;
1569 pSemaphore->signaled = false;
1570 } else {
1571 semaphore_externals.push_back(semaphore);
1572 pSemaphore->in_use.fetch_add(1);
1573 if (pSemaphore->scope == kSyncScopeExternalTemporary) {
1574 pSemaphore->scope = kSyncScopeInternal;
1575 }
1576 }
1577 }
1578 }
1579 for (uint32_t i = 0; i < bindInfo.signalSemaphoreCount; ++i) {
1580 VkSemaphore semaphore = bindInfo.pSignalSemaphores[i];
1581 auto pSemaphore = GetSemaphoreState(semaphore);
1582 if (pSemaphore) {
1583 if (pSemaphore->scope == kSyncScopeInternal) {
1584 pSemaphore->signaler.first = queue;
1585 pSemaphore->signaler.second = pQueue->seq + pQueue->submissions.size() + 1;
1586 pSemaphore->signaled = true;
1587 pSemaphore->in_use.fetch_add(1);
1588 semaphore_signals.push_back(semaphore);
1589 } else {
1590 // Retire work up until this submit early, we will not see the wait that corresponds to this signal
1591 early_retire_seq = std::max(early_retire_seq, pQueue->seq + pQueue->submissions.size() + 1);
1592 }
1593 }
1594 }
1595
1596 pQueue->submissions.emplace_back(std::vector<VkCommandBuffer>(), semaphore_waits, semaphore_signals, semaphore_externals,
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001597 bindIdx == bindInfoCount - 1 ? fence : (VkFence)VK_NULL_HANDLE, 0);
locke-lunargd556cc32019-09-17 01:21:23 -06001598 }
1599
1600 if (early_retire_seq) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001601 RetireWorkOnQueue(pQueue, early_retire_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06001602 }
1603}
1604
1605void ValidationStateTracker::PostCallRecordCreateSemaphore(VkDevice device, const VkSemaphoreCreateInfo *pCreateInfo,
1606 const VkAllocationCallbacks *pAllocator, VkSemaphore *pSemaphore,
1607 VkResult result) {
1608 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05001609 auto semaphore_state = std::make_shared<SEMAPHORE_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06001610 semaphore_state->signaler.first = VK_NULL_HANDLE;
1611 semaphore_state->signaler.second = 0;
1612 semaphore_state->signaled = false;
1613 semaphore_state->scope = kSyncScopeInternal;
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00001614 semaphore_state->type = VK_SEMAPHORE_TYPE_BINARY_KHR;
1615 semaphore_state->payload = 0;
1616 auto semaphore_type_create_info = lvl_find_in_chain<VkSemaphoreTypeCreateInfoKHR>(pCreateInfo->pNext);
1617 if (semaphore_type_create_info) {
1618 semaphore_state->type = semaphore_type_create_info->semaphoreType;
1619 semaphore_state->payload = semaphore_type_create_info->initialValue;
1620 }
locke-lunargd556cc32019-09-17 01:21:23 -06001621 semaphoreMap[*pSemaphore] = std::move(semaphore_state);
1622}
1623
1624void ValidationStateTracker::RecordImportSemaphoreState(VkSemaphore semaphore, VkExternalSemaphoreHandleTypeFlagBitsKHR handle_type,
1625 VkSemaphoreImportFlagsKHR flags) {
1626 SEMAPHORE_STATE *sema_node = GetSemaphoreState(semaphore);
1627 if (sema_node && sema_node->scope != kSyncScopeExternalPermanent) {
1628 if ((handle_type == VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR || flags & VK_SEMAPHORE_IMPORT_TEMPORARY_BIT_KHR) &&
1629 sema_node->scope == kSyncScopeInternal) {
1630 sema_node->scope = kSyncScopeExternalTemporary;
1631 } else {
1632 sema_node->scope = kSyncScopeExternalPermanent;
1633 }
1634 }
1635}
1636
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00001637void ValidationStateTracker::PostCallRecordSignalSemaphoreKHR(VkDevice device, const VkSemaphoreSignalInfoKHR *pSignalInfo,
1638 VkResult result) {
1639 auto *pSemaphore = GetSemaphoreState(pSignalInfo->semaphore);
1640 pSemaphore->payload = pSignalInfo->value;
1641}
1642
locke-lunargd556cc32019-09-17 01:21:23 -06001643void ValidationStateTracker::RecordMappedMemory(VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size, void **ppData) {
1644 auto mem_info = GetDevMemState(mem);
1645 if (mem_info) {
1646 mem_info->mapped_range.offset = offset;
1647 mem_info->mapped_range.size = size;
1648 mem_info->p_driver_data = *ppData;
1649 }
1650}
1651
1652void ValidationStateTracker::RetireFence(VkFence fence) {
1653 auto pFence = GetFenceState(fence);
1654 if (pFence && pFence->scope == kSyncScopeInternal) {
1655 if (pFence->signaler.first != VK_NULL_HANDLE) {
1656 // Fence signaller is a queue -- use this as proof that prior operations on that queue have completed.
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001657 RetireWorkOnQueue(GetQueueState(pFence->signaler.first), pFence->signaler.second);
locke-lunargd556cc32019-09-17 01:21:23 -06001658 } else {
1659 // Fence signaller is the WSI. We're not tracking what the WSI op actually /was/ in CV yet, but we need to mark
1660 // the fence as retired.
1661 pFence->state = FENCE_RETIRED;
1662 }
1663 }
1664}
1665
1666void ValidationStateTracker::PostCallRecordWaitForFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences,
1667 VkBool32 waitAll, uint64_t timeout, VkResult result) {
1668 if (VK_SUCCESS != result) return;
1669
1670 // When we know that all fences are complete we can clean/remove their CBs
1671 if ((VK_TRUE == waitAll) || (1 == fenceCount)) {
1672 for (uint32_t i = 0; i < fenceCount; i++) {
1673 RetireFence(pFences[i]);
1674 }
1675 }
1676 // NOTE : Alternate case not handled here is when some fences have completed. In
1677 // this case for app to guarantee which fences completed it will have to call
1678 // vkGetFenceStatus() at which point we'll clean/remove their CBs if complete.
1679}
1680
1681void ValidationStateTracker::PostCallRecordGetFenceStatus(VkDevice device, VkFence fence, VkResult result) {
1682 if (VK_SUCCESS != result) return;
1683 RetireFence(fence);
1684}
1685
1686void ValidationStateTracker::RecordGetDeviceQueueState(uint32_t queue_family_index, VkQueue queue) {
1687 // Add queue to tracking set only if it is new
1688 auto queue_is_new = queues.emplace(queue);
1689 if (queue_is_new.second == true) {
1690 QUEUE_STATE *queue_state = &queueMap[queue];
1691 queue_state->queue = queue;
1692 queue_state->queueFamilyIndex = queue_family_index;
1693 queue_state->seq = 0;
1694 }
1695}
1696
1697void ValidationStateTracker::PostCallRecordGetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex,
1698 VkQueue *pQueue) {
1699 RecordGetDeviceQueueState(queueFamilyIndex, *pQueue);
1700}
1701
1702void ValidationStateTracker::PostCallRecordGetDeviceQueue2(VkDevice device, const VkDeviceQueueInfo2 *pQueueInfo, VkQueue *pQueue) {
1703 RecordGetDeviceQueueState(pQueueInfo->queueFamilyIndex, *pQueue);
1704}
1705
1706void ValidationStateTracker::PostCallRecordQueueWaitIdle(VkQueue queue, VkResult result) {
1707 if (VK_SUCCESS != result) return;
1708 QUEUE_STATE *queue_state = GetQueueState(queue);
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001709 RetireWorkOnQueue(queue_state, queue_state->seq + queue_state->submissions.size());
locke-lunargd556cc32019-09-17 01:21:23 -06001710}
1711
1712void ValidationStateTracker::PostCallRecordDeviceWaitIdle(VkDevice device, VkResult result) {
1713 if (VK_SUCCESS != result) return;
1714 for (auto &queue : queueMap) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001715 RetireWorkOnQueue(&queue.second, queue.second.seq + queue.second.submissions.size());
locke-lunargd556cc32019-09-17 01:21:23 -06001716 }
1717}
1718
1719void ValidationStateTracker::PreCallRecordDestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks *pAllocator) {
1720 if (!fence) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05001721 auto fence_state = GetFenceState(fence);
1722 fence_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06001723 fenceMap.erase(fence);
1724}
1725
1726void ValidationStateTracker::PreCallRecordDestroySemaphore(VkDevice device, VkSemaphore semaphore,
1727 const VkAllocationCallbacks *pAllocator) {
1728 if (!semaphore) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05001729 auto semaphore_state = GetSemaphoreState(semaphore);
1730 semaphore_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06001731 semaphoreMap.erase(semaphore);
1732}
1733
1734void ValidationStateTracker::PreCallRecordDestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks *pAllocator) {
1735 if (!event) return;
1736 EVENT_STATE *event_state = GetEventState(event);
1737 const VulkanTypedHandle obj_struct(event, kVulkanObjectTypeEvent);
1738 InvalidateCommandBuffers(event_state->cb_bindings, obj_struct);
1739 eventMap.erase(event);
1740}
1741
1742void ValidationStateTracker::PreCallRecordDestroyQueryPool(VkDevice device, VkQueryPool queryPool,
1743 const VkAllocationCallbacks *pAllocator) {
1744 if (!queryPool) return;
1745 QUERY_POOL_STATE *qp_state = GetQueryPoolState(queryPool);
1746 const VulkanTypedHandle obj_struct(queryPool, kVulkanObjectTypeQueryPool);
1747 InvalidateCommandBuffers(qp_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05001748 qp_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06001749 queryPoolMap.erase(queryPool);
1750}
1751
1752// Object with given handle is being bound to memory w/ given mem_info struct.
1753// Track the newly bound memory range with given memoryOffset
1754// Also scan any previous ranges, track aliased ranges with new range, and flag an error if a linear
1755// and non-linear range incorrectly overlap.
1756// Return true if an error is flagged and the user callback returns "true", otherwise false
1757// is_image indicates an image object, otherwise handle is for a buffer
1758// is_linear indicates a buffer or linear image
1759void ValidationStateTracker::InsertMemoryRange(const VulkanTypedHandle &typed_handle, DEVICE_MEMORY_STATE *mem_info,
1760 VkDeviceSize memoryOffset, VkMemoryRequirements memRequirements, bool is_linear) {
1761 if (typed_handle.type == kVulkanObjectTypeImage) {
1762 mem_info->bound_images.insert(typed_handle.Cast<VkImage>());
1763 } else if (typed_handle.type == kVulkanObjectTypeBuffer) {
1764 mem_info->bound_buffers.insert(typed_handle.handle);
1765 } else if (typed_handle.type == kVulkanObjectTypeAccelerationStructureNV) {
1766 mem_info->bound_acceleration_structures.insert(typed_handle.handle);
1767 } else {
1768 // Unsupported object type
1769 assert(false);
1770 }
1771}
1772
1773void ValidationStateTracker::InsertImageMemoryRange(VkImage image, DEVICE_MEMORY_STATE *mem_info, VkDeviceSize mem_offset,
1774 VkMemoryRequirements mem_reqs, bool is_linear) {
1775 InsertMemoryRange(VulkanTypedHandle(image, kVulkanObjectTypeImage), mem_info, mem_offset, mem_reqs, is_linear);
1776}
1777
1778void ValidationStateTracker::InsertBufferMemoryRange(VkBuffer buffer, DEVICE_MEMORY_STATE *mem_info, VkDeviceSize mem_offset,
1779 const VkMemoryRequirements &mem_reqs) {
1780 InsertMemoryRange(VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer), mem_info, mem_offset, mem_reqs, true);
1781}
1782
1783void ValidationStateTracker::InsertAccelerationStructureMemoryRange(VkAccelerationStructureNV as, DEVICE_MEMORY_STATE *mem_info,
1784 VkDeviceSize mem_offset, const VkMemoryRequirements &mem_reqs) {
1785 InsertMemoryRange(VulkanTypedHandle(as, kVulkanObjectTypeAccelerationStructureNV), mem_info, mem_offset, mem_reqs, true);
1786}
1787
1788// This function will remove the handle-to-index mapping from the appropriate map.
1789static void RemoveMemoryRange(const VulkanTypedHandle &typed_handle, DEVICE_MEMORY_STATE *mem_info) {
1790 if (typed_handle.type == kVulkanObjectTypeImage) {
1791 mem_info->bound_images.erase(typed_handle.Cast<VkImage>());
1792 } else if (typed_handle.type == kVulkanObjectTypeBuffer) {
1793 mem_info->bound_buffers.erase(typed_handle.handle);
1794 } else if (typed_handle.type == kVulkanObjectTypeAccelerationStructureNV) {
1795 mem_info->bound_acceleration_structures.erase(typed_handle.handle);
1796 } else {
1797 // Unsupported object type
1798 assert(false);
1799 }
1800}
1801
1802void ValidationStateTracker::RemoveBufferMemoryRange(VkBuffer buffer, DEVICE_MEMORY_STATE *mem_info) {
1803 RemoveMemoryRange(VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer), mem_info);
1804}
1805
1806void ValidationStateTracker::RemoveImageMemoryRange(VkImage image, DEVICE_MEMORY_STATE *mem_info) {
1807 RemoveMemoryRange(VulkanTypedHandle(image, kVulkanObjectTypeImage), mem_info);
1808}
1809
1810void ValidationStateTracker::RemoveAccelerationStructureMemoryRange(VkAccelerationStructureNV as, DEVICE_MEMORY_STATE *mem_info) {
1811 RemoveMemoryRange(VulkanTypedHandle(as, kVulkanObjectTypeAccelerationStructureNV), mem_info);
1812}
1813
1814void ValidationStateTracker::UpdateBindBufferMemoryState(VkBuffer buffer, VkDeviceMemory mem, VkDeviceSize memoryOffset) {
1815 BUFFER_STATE *buffer_state = GetBufferState(buffer);
1816 if (buffer_state) {
1817 // Track bound memory range information
1818 auto mem_info = GetDevMemState(mem);
1819 if (mem_info) {
1820 InsertBufferMemoryRange(buffer, mem_info, memoryOffset, buffer_state->requirements);
1821 }
1822 // Track objects tied to memory
1823 SetMemBinding(mem, buffer_state, memoryOffset, VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer));
1824 }
1825}
1826
1827void ValidationStateTracker::PostCallRecordBindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory mem,
1828 VkDeviceSize memoryOffset, VkResult result) {
1829 if (VK_SUCCESS != result) return;
1830 UpdateBindBufferMemoryState(buffer, mem, memoryOffset);
1831}
1832
1833void ValidationStateTracker::PostCallRecordBindBufferMemory2(VkDevice device, uint32_t bindInfoCount,
1834 const VkBindBufferMemoryInfoKHR *pBindInfos, VkResult result) {
1835 for (uint32_t i = 0; i < bindInfoCount; i++) {
1836 UpdateBindBufferMemoryState(pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset);
1837 }
1838}
1839
1840void ValidationStateTracker::PostCallRecordBindBufferMemory2KHR(VkDevice device, uint32_t bindInfoCount,
1841 const VkBindBufferMemoryInfoKHR *pBindInfos, VkResult result) {
1842 for (uint32_t i = 0; i < bindInfoCount; i++) {
1843 UpdateBindBufferMemoryState(pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset);
1844 }
1845}
1846
1847void ValidationStateTracker::RecordGetBufferMemoryRequirementsState(VkBuffer buffer, VkMemoryRequirements *pMemoryRequirements) {
1848 BUFFER_STATE *buffer_state = GetBufferState(buffer);
1849 if (buffer_state) {
1850 buffer_state->requirements = *pMemoryRequirements;
1851 buffer_state->memory_requirements_checked = true;
1852 }
1853}
1854
1855void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements(VkDevice device, VkBuffer buffer,
1856 VkMemoryRequirements *pMemoryRequirements) {
1857 RecordGetBufferMemoryRequirementsState(buffer, pMemoryRequirements);
1858}
1859
1860void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements2(VkDevice device,
1861 const VkBufferMemoryRequirementsInfo2KHR *pInfo,
1862 VkMemoryRequirements2KHR *pMemoryRequirements) {
1863 RecordGetBufferMemoryRequirementsState(pInfo->buffer, &pMemoryRequirements->memoryRequirements);
1864}
1865
1866void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements2KHR(VkDevice device,
1867 const VkBufferMemoryRequirementsInfo2KHR *pInfo,
1868 VkMemoryRequirements2KHR *pMemoryRequirements) {
1869 RecordGetBufferMemoryRequirementsState(pInfo->buffer, &pMemoryRequirements->memoryRequirements);
1870}
1871
1872void ValidationStateTracker::RecordGetImageMemoryRequiementsState(VkImage image, VkMemoryRequirements *pMemoryRequirements) {
1873 IMAGE_STATE *image_state = GetImageState(image);
1874 if (image_state) {
1875 image_state->requirements = *pMemoryRequirements;
1876 image_state->memory_requirements_checked = true;
1877 }
1878}
1879
1880void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements(VkDevice device, VkImage image,
1881 VkMemoryRequirements *pMemoryRequirements) {
1882 RecordGetImageMemoryRequiementsState(image, pMemoryRequirements);
1883}
1884
1885void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements2(VkDevice device, const VkImageMemoryRequirementsInfo2 *pInfo,
1886 VkMemoryRequirements2 *pMemoryRequirements) {
1887 RecordGetImageMemoryRequiementsState(pInfo->image, &pMemoryRequirements->memoryRequirements);
1888}
1889
1890void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements2KHR(VkDevice device,
1891 const VkImageMemoryRequirementsInfo2 *pInfo,
1892 VkMemoryRequirements2 *pMemoryRequirements) {
1893 RecordGetImageMemoryRequiementsState(pInfo->image, &pMemoryRequirements->memoryRequirements);
1894}
1895
1896static void RecordGetImageSparseMemoryRequirementsState(IMAGE_STATE *image_state,
1897 VkSparseImageMemoryRequirements *sparse_image_memory_requirements) {
1898 image_state->sparse_requirements.emplace_back(*sparse_image_memory_requirements);
1899 if (sparse_image_memory_requirements->formatProperties.aspectMask & VK_IMAGE_ASPECT_METADATA_BIT) {
1900 image_state->sparse_metadata_required = true;
1901 }
1902}
1903
1904void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements(
1905 VkDevice device, VkImage image, uint32_t *pSparseMemoryRequirementCount,
1906 VkSparseImageMemoryRequirements *pSparseMemoryRequirements) {
1907 auto image_state = GetImageState(image);
1908 image_state->get_sparse_reqs_called = true;
1909 if (!pSparseMemoryRequirements) return;
1910 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
1911 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i]);
1912 }
1913}
1914
1915void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements2(
1916 VkDevice device, const VkImageSparseMemoryRequirementsInfo2KHR *pInfo, uint32_t *pSparseMemoryRequirementCount,
1917 VkSparseImageMemoryRequirements2KHR *pSparseMemoryRequirements) {
1918 auto image_state = GetImageState(pInfo->image);
1919 image_state->get_sparse_reqs_called = true;
1920 if (!pSparseMemoryRequirements) return;
1921 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
1922 assert(!pSparseMemoryRequirements[i].pNext); // TODO: If an extension is ever added here we need to handle it
1923 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i].memoryRequirements);
1924 }
1925}
1926
1927void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements2KHR(
1928 VkDevice device, const VkImageSparseMemoryRequirementsInfo2KHR *pInfo, uint32_t *pSparseMemoryRequirementCount,
1929 VkSparseImageMemoryRequirements2KHR *pSparseMemoryRequirements) {
1930 auto image_state = GetImageState(pInfo->image);
1931 image_state->get_sparse_reqs_called = true;
1932 if (!pSparseMemoryRequirements) return;
1933 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
1934 assert(!pSparseMemoryRequirements[i].pNext); // TODO: If an extension is ever added here we need to handle it
1935 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i].memoryRequirements);
1936 }
1937}
1938
1939void ValidationStateTracker::PreCallRecordDestroyShaderModule(VkDevice device, VkShaderModule shaderModule,
1940 const VkAllocationCallbacks *pAllocator) {
1941 if (!shaderModule) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05001942 auto shader_module_state = GetShaderModuleState(shaderModule);
1943 shader_module_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06001944 shaderModuleMap.erase(shaderModule);
1945}
1946
1947void ValidationStateTracker::PreCallRecordDestroyPipeline(VkDevice device, VkPipeline pipeline,
1948 const VkAllocationCallbacks *pAllocator) {
1949 if (!pipeline) return;
1950 PIPELINE_STATE *pipeline_state = GetPipelineState(pipeline);
1951 const VulkanTypedHandle obj_struct(pipeline, kVulkanObjectTypePipeline);
1952 // Any bound cmd buffers are now invalid
1953 InvalidateCommandBuffers(pipeline_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05001954 pipeline_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06001955 pipelineMap.erase(pipeline);
1956}
1957
1958void ValidationStateTracker::PreCallRecordDestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout,
1959 const VkAllocationCallbacks *pAllocator) {
1960 if (!pipelineLayout) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05001961 auto pipeline_layout_state = GetPipelineLayout(pipelineLayout);
1962 pipeline_layout_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06001963 pipelineLayoutMap.erase(pipelineLayout);
1964}
1965
1966void ValidationStateTracker::PreCallRecordDestroySampler(VkDevice device, VkSampler sampler,
1967 const VkAllocationCallbacks *pAllocator) {
1968 if (!sampler) return;
1969 SAMPLER_STATE *sampler_state = GetSamplerState(sampler);
1970 const VulkanTypedHandle obj_struct(sampler, kVulkanObjectTypeSampler);
1971 // Any bound cmd buffers are now invalid
1972 if (sampler_state) {
1973 InvalidateCommandBuffers(sampler_state->cb_bindings, obj_struct);
1974 }
Jeff Bolze7fc67b2019-10-04 12:29:31 -05001975 sampler_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06001976 samplerMap.erase(sampler);
1977}
1978
1979void ValidationStateTracker::PreCallRecordDestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout,
1980 const VkAllocationCallbacks *pAllocator) {
1981 if (!descriptorSetLayout) return;
1982 auto layout_it = descriptorSetLayoutMap.find(descriptorSetLayout);
1983 if (layout_it != descriptorSetLayoutMap.end()) {
Jeff Bolz6ae39612019-10-11 20:57:36 -05001984 layout_it->second.get()->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06001985 descriptorSetLayoutMap.erase(layout_it);
1986 }
1987}
1988
1989void ValidationStateTracker::PreCallRecordDestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
1990 const VkAllocationCallbacks *pAllocator) {
1991 if (!descriptorPool) return;
1992 DESCRIPTOR_POOL_STATE *desc_pool_state = GetDescriptorPoolState(descriptorPool);
1993 const VulkanTypedHandle obj_struct(descriptorPool, kVulkanObjectTypeDescriptorPool);
1994 if (desc_pool_state) {
1995 // Any bound cmd buffers are now invalid
1996 InvalidateCommandBuffers(desc_pool_state->cb_bindings, obj_struct);
1997 // Free sets that were in this pool
1998 for (auto ds : desc_pool_state->sets) {
1999 FreeDescriptorSet(ds);
2000 }
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002001 desc_pool_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002002 descriptorPoolMap.erase(descriptorPool);
2003 }
2004}
2005
2006// Free all command buffers in given list, removing all references/links to them using ResetCommandBufferState
2007void ValidationStateTracker::FreeCommandBufferStates(COMMAND_POOL_STATE *pool_state, const uint32_t command_buffer_count,
2008 const VkCommandBuffer *command_buffers) {
2009 for (uint32_t i = 0; i < command_buffer_count; i++) {
2010 auto cb_state = GetCBState(command_buffers[i]);
2011 // Remove references to command buffer's state and delete
2012 if (cb_state) {
2013 // reset prior to delete, removing various references to it.
2014 // TODO: fix this, it's insane.
2015 ResetCommandBufferState(cb_state->commandBuffer);
2016 // Remove the cb_state's references from COMMAND_POOL_STATEs
2017 pool_state->commandBuffers.erase(command_buffers[i]);
2018 // Remove the cb debug labels
2019 EraseCmdDebugUtilsLabel(report_data, cb_state->commandBuffer);
2020 // Remove CBState from CB map
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002021 cb_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002022 commandBufferMap.erase(cb_state->commandBuffer);
2023 }
2024 }
2025}
2026
2027void ValidationStateTracker::PreCallRecordFreeCommandBuffers(VkDevice device, VkCommandPool commandPool,
2028 uint32_t commandBufferCount, const VkCommandBuffer *pCommandBuffers) {
2029 auto pPool = GetCommandPoolState(commandPool);
2030 FreeCommandBufferStates(pPool, commandBufferCount, pCommandBuffers);
2031}
2032
2033void ValidationStateTracker::PostCallRecordCreateCommandPool(VkDevice device, const VkCommandPoolCreateInfo *pCreateInfo,
2034 const VkAllocationCallbacks *pAllocator, VkCommandPool *pCommandPool,
2035 VkResult result) {
2036 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002037 auto cmd_pool_state = std::make_shared<COMMAND_POOL_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002038 cmd_pool_state->createFlags = pCreateInfo->flags;
2039 cmd_pool_state->queueFamilyIndex = pCreateInfo->queueFamilyIndex;
2040 commandPoolMap[*pCommandPool] = std::move(cmd_pool_state);
2041}
2042
2043void ValidationStateTracker::PostCallRecordCreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo *pCreateInfo,
2044 const VkAllocationCallbacks *pAllocator, VkQueryPool *pQueryPool,
2045 VkResult result) {
2046 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002047 auto query_pool_state = std::make_shared<QUERY_POOL_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002048 query_pool_state->createInfo = *pCreateInfo;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002049 query_pool_state->pool = *pQueryPool;
2050 if (pCreateInfo->queryType == VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR) {
2051 const auto *perf = lvl_find_in_chain<VkQueryPoolPerformanceCreateInfoKHR>(pCreateInfo->pNext);
2052 const QUEUE_FAMILY_PERF_COUNTERS &counters = *physical_device_state->perf_counters[perf->queueFamilyIndex];
2053
2054 for (uint32_t i = 0; i < perf->counterIndexCount; i++) {
2055 const auto &counter = counters.counters[perf->pCounterIndices[i]];
2056 switch (counter.scope) {
2057 case VK_QUERY_SCOPE_COMMAND_BUFFER_KHR:
2058 query_pool_state->has_perf_scope_command_buffer = true;
2059 break;
2060 case VK_QUERY_SCOPE_RENDER_PASS_KHR:
2061 query_pool_state->has_perf_scope_render_pass = true;
2062 break;
2063 default:
2064 break;
2065 }
2066 }
2067
2068 DispatchGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(physical_device_state->phys_device, perf,
2069 &query_pool_state->n_performance_passes);
2070 }
2071
locke-lunargd556cc32019-09-17 01:21:23 -06002072 queryPoolMap[*pQueryPool] = std::move(query_pool_state);
2073
2074 QueryObject query_obj{*pQueryPool, 0u};
2075 for (uint32_t i = 0; i < pCreateInfo->queryCount; ++i) {
2076 query_obj.query = i;
2077 queryToStateMap[query_obj] = QUERYSTATE_UNKNOWN;
2078 }
2079}
2080
2081void ValidationStateTracker::PreCallRecordDestroyCommandPool(VkDevice device, VkCommandPool commandPool,
2082 const VkAllocationCallbacks *pAllocator) {
2083 if (!commandPool) return;
2084 COMMAND_POOL_STATE *cp_state = GetCommandPoolState(commandPool);
2085 // Remove cmdpool from cmdpoolmap, after freeing layer data for the command buffers
2086 // "When a pool is destroyed, all command buffers allocated from the pool are freed."
2087 if (cp_state) {
2088 // Create a vector, as FreeCommandBufferStates deletes from cp_state->commandBuffers during iteration.
2089 std::vector<VkCommandBuffer> cb_vec{cp_state->commandBuffers.begin(), cp_state->commandBuffers.end()};
2090 FreeCommandBufferStates(cp_state, static_cast<uint32_t>(cb_vec.size()), cb_vec.data());
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002091 cp_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002092 commandPoolMap.erase(commandPool);
2093 }
2094}
2095
2096void ValidationStateTracker::PostCallRecordResetCommandPool(VkDevice device, VkCommandPool commandPool,
2097 VkCommandPoolResetFlags flags, VkResult result) {
2098 if (VK_SUCCESS != result) return;
2099 // Reset all of the CBs allocated from this pool
2100 auto command_pool_state = GetCommandPoolState(commandPool);
2101 for (auto cmdBuffer : command_pool_state->commandBuffers) {
2102 ResetCommandBufferState(cmdBuffer);
2103 }
2104}
2105
2106void ValidationStateTracker::PostCallRecordResetFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences,
2107 VkResult result) {
2108 for (uint32_t i = 0; i < fenceCount; ++i) {
2109 auto pFence = GetFenceState(pFences[i]);
2110 if (pFence) {
2111 if (pFence->scope == kSyncScopeInternal) {
2112 pFence->state = FENCE_UNSIGNALED;
2113 } else if (pFence->scope == kSyncScopeExternalTemporary) {
2114 pFence->scope = kSyncScopeInternal;
2115 }
2116 }
2117 }
2118}
2119
Jeff Bolzadbfa852019-10-04 13:53:30 -05002120// For given cb_nodes, invalidate them and track object causing invalidation.
2121// InvalidateCommandBuffers and InvalidateLinkedCommandBuffers are essentially
2122// the same, except one takes a map and one takes a set, and InvalidateCommandBuffers
2123// can also unlink objects from command buffers.
2124void ValidationStateTracker::InvalidateCommandBuffers(small_unordered_map<CMD_BUFFER_STATE *, int, 8> &cb_nodes,
2125 const VulkanTypedHandle &obj, bool unlink) {
2126 for (const auto &cb_node_pair : cb_nodes) {
2127 auto &cb_node = cb_node_pair.first;
2128 if (cb_node->state == CB_RECORDING) {
2129 cb_node->state = CB_INVALID_INCOMPLETE;
2130 } else if (cb_node->state == CB_RECORDED) {
2131 cb_node->state = CB_INVALID_COMPLETE;
2132 }
2133 cb_node->broken_bindings.push_back(obj);
2134
2135 // if secondary, then propagate the invalidation to the primaries that will call us.
2136 if (cb_node->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
2137 InvalidateLinkedCommandBuffers(cb_node->linkedCommandBuffers, obj);
2138 }
2139 if (unlink) {
2140 int index = cb_node_pair.second;
2141 assert(cb_node->object_bindings[index] == obj);
2142 cb_node->object_bindings[index] = VulkanTypedHandle();
2143 }
2144 }
2145 if (unlink) {
2146 cb_nodes.clear();
2147 }
2148}
2149
2150void ValidationStateTracker::InvalidateLinkedCommandBuffers(std::unordered_set<CMD_BUFFER_STATE *> &cb_nodes,
2151 const VulkanTypedHandle &obj) {
locke-lunargd556cc32019-09-17 01:21:23 -06002152 for (auto cb_node : cb_nodes) {
2153 if (cb_node->state == CB_RECORDING) {
2154 cb_node->state = CB_INVALID_INCOMPLETE;
2155 } else if (cb_node->state == CB_RECORDED) {
2156 cb_node->state = CB_INVALID_COMPLETE;
2157 }
2158 cb_node->broken_bindings.push_back(obj);
2159
2160 // if secondary, then propagate the invalidation to the primaries that will call us.
2161 if (cb_node->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05002162 InvalidateLinkedCommandBuffers(cb_node->linkedCommandBuffers, obj);
locke-lunargd556cc32019-09-17 01:21:23 -06002163 }
2164 }
2165}
2166
2167void ValidationStateTracker::PreCallRecordDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer,
2168 const VkAllocationCallbacks *pAllocator) {
2169 if (!framebuffer) return;
2170 FRAMEBUFFER_STATE *framebuffer_state = GetFramebufferState(framebuffer);
2171 const VulkanTypedHandle obj_struct(framebuffer, kVulkanObjectTypeFramebuffer);
2172 InvalidateCommandBuffers(framebuffer_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002173 framebuffer_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002174 frameBufferMap.erase(framebuffer);
2175}
2176
2177void ValidationStateTracker::PreCallRecordDestroyRenderPass(VkDevice device, VkRenderPass renderPass,
2178 const VkAllocationCallbacks *pAllocator) {
2179 if (!renderPass) return;
2180 RENDER_PASS_STATE *rp_state = GetRenderPassState(renderPass);
2181 const VulkanTypedHandle obj_struct(renderPass, kVulkanObjectTypeRenderPass);
2182 InvalidateCommandBuffers(rp_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002183 rp_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002184 renderPassMap.erase(renderPass);
2185}
2186
2187void ValidationStateTracker::PostCallRecordCreateFence(VkDevice device, const VkFenceCreateInfo *pCreateInfo,
2188 const VkAllocationCallbacks *pAllocator, VkFence *pFence, VkResult result) {
2189 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002190 auto fence_state = std::make_shared<FENCE_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002191 fence_state->fence = *pFence;
2192 fence_state->createInfo = *pCreateInfo;
2193 fence_state->state = (pCreateInfo->flags & VK_FENCE_CREATE_SIGNALED_BIT) ? FENCE_RETIRED : FENCE_UNSIGNALED;
2194 fenceMap[*pFence] = std::move(fence_state);
2195}
2196
2197bool ValidationStateTracker::PreCallValidateCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2198 const VkGraphicsPipelineCreateInfo *pCreateInfos,
2199 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002200 void *cgpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002201 // Set up the state that CoreChecks, gpu_validation and later StateTracker Record will use.
2202 create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
2203 cgpl_state->pCreateInfos = pCreateInfos; // GPU validation can alter this, so we have to set a default value for the Chassis
2204 cgpl_state->pipe_state.reserve(count);
2205 for (uint32_t i = 0; i < count; i++) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002206 cgpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
Jeff Bolz6ae39612019-10-11 20:57:36 -05002207 (cgpl_state->pipe_state)[i]->initGraphicsPipeline(this, &pCreateInfos[i], GetRenderPassShared(pCreateInfos[i].renderPass));
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002208 (cgpl_state->pipe_state)[i]->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06002209 }
2210 return false;
2211}
2212
2213void ValidationStateTracker::PostCallRecordCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2214 const VkGraphicsPipelineCreateInfo *pCreateInfos,
2215 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
2216 VkResult result, void *cgpl_state_data) {
2217 create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
2218 // This API may create pipelines regardless of the return value
2219 for (uint32_t i = 0; i < count; i++) {
2220 if (pPipelines[i] != VK_NULL_HANDLE) {
2221 (cgpl_state->pipe_state)[i]->pipeline = pPipelines[i];
2222 pipelineMap[pPipelines[i]] = std::move((cgpl_state->pipe_state)[i]);
2223 }
2224 }
2225 cgpl_state->pipe_state.clear();
2226}
2227
2228bool ValidationStateTracker::PreCallValidateCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2229 const VkComputePipelineCreateInfo *pCreateInfos,
2230 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002231 void *ccpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002232 auto *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
2233 ccpl_state->pCreateInfos = pCreateInfos; // GPU validation can alter this, so we have to set a default value for the Chassis
2234 ccpl_state->pipe_state.reserve(count);
2235 for (uint32_t i = 0; i < count; i++) {
2236 // Create and initialize internal tracking data structure
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002237 ccpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
locke-lunargd556cc32019-09-17 01:21:23 -06002238 ccpl_state->pipe_state.back()->initComputePipeline(this, &pCreateInfos[i]);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002239 ccpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06002240 }
2241 return false;
2242}
2243
2244void ValidationStateTracker::PostCallRecordCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2245 const VkComputePipelineCreateInfo *pCreateInfos,
2246 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
2247 VkResult result, void *ccpl_state_data) {
2248 create_compute_pipeline_api_state *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
2249
2250 // This API may create pipelines regardless of the return value
2251 for (uint32_t i = 0; i < count; i++) {
2252 if (pPipelines[i] != VK_NULL_HANDLE) {
2253 (ccpl_state->pipe_state)[i]->pipeline = pPipelines[i];
2254 pipelineMap[pPipelines[i]] = std::move((ccpl_state->pipe_state)[i]);
2255 }
2256 }
2257 ccpl_state->pipe_state.clear();
2258}
2259
2260bool ValidationStateTracker::PreCallValidateCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache,
2261 uint32_t count,
2262 const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
2263 const VkAllocationCallbacks *pAllocator,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002264 VkPipeline *pPipelines, void *crtpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002265 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
2266 crtpl_state->pipe_state.reserve(count);
2267 for (uint32_t i = 0; i < count; i++) {
2268 // Create and initialize internal tracking data structure
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002269 crtpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
locke-lunargd556cc32019-09-17 01:21:23 -06002270 crtpl_state->pipe_state.back()->initRayTracingPipelineNV(this, &pCreateInfos[i]);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002271 crtpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06002272 }
2273 return false;
2274}
2275
2276void ValidationStateTracker::PostCallRecordCreateRayTracingPipelinesNV(
2277 VkDevice device, VkPipelineCache pipelineCache, uint32_t count, const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
2278 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines, VkResult result, void *crtpl_state_data) {
2279 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
2280 // This API may create pipelines regardless of the return value
2281 for (uint32_t i = 0; i < count; i++) {
2282 if (pPipelines[i] != VK_NULL_HANDLE) {
2283 (crtpl_state->pipe_state)[i]->pipeline = pPipelines[i];
2284 pipelineMap[pPipelines[i]] = std::move((crtpl_state->pipe_state)[i]);
2285 }
2286 }
2287 crtpl_state->pipe_state.clear();
2288}
2289
2290void ValidationStateTracker::PostCallRecordCreateSampler(VkDevice device, const VkSamplerCreateInfo *pCreateInfo,
2291 const VkAllocationCallbacks *pAllocator, VkSampler *pSampler,
2292 VkResult result) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002293 samplerMap[*pSampler] = std::make_shared<SAMPLER_STATE>(pSampler, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002294}
2295
2296void ValidationStateTracker::PostCallRecordCreateDescriptorSetLayout(VkDevice device,
2297 const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
2298 const VkAllocationCallbacks *pAllocator,
2299 VkDescriptorSetLayout *pSetLayout, VkResult result) {
2300 if (VK_SUCCESS != result) return;
2301 descriptorSetLayoutMap[*pSetLayout] = std::make_shared<cvdescriptorset::DescriptorSetLayout>(pCreateInfo, *pSetLayout);
2302}
2303
2304// For repeatable sorting, not very useful for "memory in range" search
2305struct PushConstantRangeCompare {
2306 bool operator()(const VkPushConstantRange *lhs, const VkPushConstantRange *rhs) const {
2307 if (lhs->offset == rhs->offset) {
2308 if (lhs->size == rhs->size) {
2309 // The comparison is arbitrary, but avoids false aliasing by comparing all fields.
2310 return lhs->stageFlags < rhs->stageFlags;
2311 }
2312 // If the offsets are the same then sorting by the end of range is useful for validation
2313 return lhs->size < rhs->size;
2314 }
2315 return lhs->offset < rhs->offset;
2316 }
2317};
2318
2319static PushConstantRangesDict push_constant_ranges_dict;
2320
2321PushConstantRangesId GetCanonicalId(const VkPipelineLayoutCreateInfo *info) {
2322 if (!info->pPushConstantRanges) {
2323 // Hand back the empty entry (creating as needed)...
2324 return push_constant_ranges_dict.look_up(PushConstantRanges());
2325 }
2326
2327 // Sort the input ranges to ensure equivalent ranges map to the same id
2328 std::set<const VkPushConstantRange *, PushConstantRangeCompare> sorted;
2329 for (uint32_t i = 0; i < info->pushConstantRangeCount; i++) {
2330 sorted.insert(info->pPushConstantRanges + i);
2331 }
2332
Jeremy Hayesf34b9fb2019-12-06 09:37:03 -07002333 PushConstantRanges ranges;
2334 ranges.reserve(sorted.size());
locke-lunargd556cc32019-09-17 01:21:23 -06002335 for (const auto range : sorted) {
2336 ranges.emplace_back(*range);
2337 }
2338 return push_constant_ranges_dict.look_up(std::move(ranges));
2339}
2340
2341// Dictionary of canoncial form of the pipeline set layout of descriptor set layouts
2342static PipelineLayoutSetLayoutsDict pipeline_layout_set_layouts_dict;
2343
2344// Dictionary of canonical form of the "compatible for set" records
2345static PipelineLayoutCompatDict pipeline_layout_compat_dict;
2346
2347static PipelineLayoutCompatId GetCanonicalId(const uint32_t set_index, const PushConstantRangesId pcr_id,
2348 const PipelineLayoutSetLayoutsId set_layouts_id) {
2349 return pipeline_layout_compat_dict.look_up(PipelineLayoutCompatDef(set_index, pcr_id, set_layouts_id));
2350}
2351
2352void ValidationStateTracker::PostCallRecordCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo *pCreateInfo,
2353 const VkAllocationCallbacks *pAllocator,
2354 VkPipelineLayout *pPipelineLayout, VkResult result) {
2355 if (VK_SUCCESS != result) return;
2356
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002357 auto pipeline_layout_state = std::make_shared<PIPELINE_LAYOUT_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002358 pipeline_layout_state->layout = *pPipelineLayout;
2359 pipeline_layout_state->set_layouts.resize(pCreateInfo->setLayoutCount);
2360 PipelineLayoutSetLayoutsDef set_layouts(pCreateInfo->setLayoutCount);
2361 for (uint32_t i = 0; i < pCreateInfo->setLayoutCount; ++i) {
Jeff Bolz6ae39612019-10-11 20:57:36 -05002362 pipeline_layout_state->set_layouts[i] = GetDescriptorSetLayoutShared(pCreateInfo->pSetLayouts[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06002363 set_layouts[i] = pipeline_layout_state->set_layouts[i]->GetLayoutId();
2364 }
2365
2366 // Get canonical form IDs for the "compatible for set" contents
2367 pipeline_layout_state->push_constant_ranges = GetCanonicalId(pCreateInfo);
2368 auto set_layouts_id = pipeline_layout_set_layouts_dict.look_up(set_layouts);
2369 pipeline_layout_state->compat_for_set.reserve(pCreateInfo->setLayoutCount);
2370
2371 // Create table of "compatible for set N" cannonical forms for trivial accept validation
2372 for (uint32_t i = 0; i < pCreateInfo->setLayoutCount; ++i) {
2373 pipeline_layout_state->compat_for_set.emplace_back(
2374 GetCanonicalId(i, pipeline_layout_state->push_constant_ranges, set_layouts_id));
2375 }
2376 pipelineLayoutMap[*pPipelineLayout] = std::move(pipeline_layout_state);
2377}
2378
2379void ValidationStateTracker::PostCallRecordCreateDescriptorPool(VkDevice device, const VkDescriptorPoolCreateInfo *pCreateInfo,
2380 const VkAllocationCallbacks *pAllocator,
2381 VkDescriptorPool *pDescriptorPool, VkResult result) {
2382 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002383 descriptorPoolMap[*pDescriptorPool] = std::make_shared<DESCRIPTOR_POOL_STATE>(*pDescriptorPool, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002384}
2385
2386void ValidationStateTracker::PostCallRecordResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
2387 VkDescriptorPoolResetFlags flags, VkResult result) {
2388 if (VK_SUCCESS != result) return;
2389 DESCRIPTOR_POOL_STATE *pPool = GetDescriptorPoolState(descriptorPool);
2390 // TODO: validate flags
2391 // For every set off of this pool, clear it, remove from setMap, and free cvdescriptorset::DescriptorSet
2392 for (auto ds : pPool->sets) {
2393 FreeDescriptorSet(ds);
2394 }
2395 pPool->sets.clear();
2396 // Reset available count for each type and available sets for this pool
2397 for (auto it = pPool->availableDescriptorTypeCount.begin(); it != pPool->availableDescriptorTypeCount.end(); ++it) {
2398 pPool->availableDescriptorTypeCount[it->first] = pPool->maxDescriptorTypeCount[it->first];
2399 }
2400 pPool->availableSets = pPool->maxSets;
2401}
2402
2403bool ValidationStateTracker::PreCallValidateAllocateDescriptorSets(VkDevice device,
2404 const VkDescriptorSetAllocateInfo *pAllocateInfo,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002405 VkDescriptorSet *pDescriptorSets, void *ads_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002406 // Always update common data
2407 cvdescriptorset::AllocateDescriptorSetsData *ads_state =
2408 reinterpret_cast<cvdescriptorset::AllocateDescriptorSetsData *>(ads_state_data);
2409 UpdateAllocateDescriptorSetsData(pAllocateInfo, ads_state);
2410
2411 return false;
2412}
2413
2414// Allocation state was good and call down chain was made so update state based on allocating descriptor sets
2415void ValidationStateTracker::PostCallRecordAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo *pAllocateInfo,
2416 VkDescriptorSet *pDescriptorSets, VkResult result,
2417 void *ads_state_data) {
2418 if (VK_SUCCESS != result) return;
2419 // All the updates are contained in a single cvdescriptorset function
2420 cvdescriptorset::AllocateDescriptorSetsData *ads_state =
2421 reinterpret_cast<cvdescriptorset::AllocateDescriptorSetsData *>(ads_state_data);
2422 PerformAllocateDescriptorSets(pAllocateInfo, pDescriptorSets, ads_state);
2423}
2424
2425void ValidationStateTracker::PreCallRecordFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t count,
2426 const VkDescriptorSet *pDescriptorSets) {
2427 DESCRIPTOR_POOL_STATE *pool_state = GetDescriptorPoolState(descriptorPool);
2428 // Update available descriptor sets in pool
2429 pool_state->availableSets += count;
2430
2431 // For each freed descriptor add its resources back into the pool as available and remove from pool and setMap
2432 for (uint32_t i = 0; i < count; ++i) {
2433 if (pDescriptorSets[i] != VK_NULL_HANDLE) {
2434 auto descriptor_set = setMap[pDescriptorSets[i]].get();
2435 uint32_t type_index = 0, descriptor_count = 0;
2436 for (uint32_t j = 0; j < descriptor_set->GetBindingCount(); ++j) {
2437 type_index = static_cast<uint32_t>(descriptor_set->GetTypeFromIndex(j));
2438 descriptor_count = descriptor_set->GetDescriptorCountFromIndex(j);
2439 pool_state->availableDescriptorTypeCount[type_index] += descriptor_count;
2440 }
2441 FreeDescriptorSet(descriptor_set);
2442 pool_state->sets.erase(descriptor_set);
2443 }
2444 }
2445}
2446
2447void ValidationStateTracker::PreCallRecordUpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount,
2448 const VkWriteDescriptorSet *pDescriptorWrites,
2449 uint32_t descriptorCopyCount,
2450 const VkCopyDescriptorSet *pDescriptorCopies) {
2451 cvdescriptorset::PerformUpdateDescriptorSets(this, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount,
2452 pDescriptorCopies);
2453}
2454
2455void ValidationStateTracker::PostCallRecordAllocateCommandBuffers(VkDevice device, const VkCommandBufferAllocateInfo *pCreateInfo,
2456 VkCommandBuffer *pCommandBuffer, VkResult result) {
2457 if (VK_SUCCESS != result) return;
Jeff Bolz6835fda2019-10-06 00:15:34 -05002458 auto pPool = GetCommandPoolShared(pCreateInfo->commandPool);
locke-lunargd556cc32019-09-17 01:21:23 -06002459 if (pPool) {
2460 for (uint32_t i = 0; i < pCreateInfo->commandBufferCount; i++) {
2461 // Add command buffer to its commandPool map
2462 pPool->commandBuffers.insert(pCommandBuffer[i]);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002463 auto pCB = std::make_shared<CMD_BUFFER_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002464 pCB->createInfo = *pCreateInfo;
2465 pCB->device = device;
Jeff Bolz6835fda2019-10-06 00:15:34 -05002466 pCB->command_pool = pPool;
locke-lunargd556cc32019-09-17 01:21:23 -06002467 // Add command buffer to map
2468 commandBufferMap[pCommandBuffer[i]] = std::move(pCB);
2469 ResetCommandBufferState(pCommandBuffer[i]);
2470 }
2471 }
2472}
2473
2474// Add bindings between the given cmd buffer & framebuffer and the framebuffer's children
2475void ValidationStateTracker::AddFramebufferBinding(CMD_BUFFER_STATE *cb_state, FRAMEBUFFER_STATE *fb_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05002476 AddCommandBufferBinding(fb_state->cb_bindings, VulkanTypedHandle(fb_state->framebuffer, kVulkanObjectTypeFramebuffer, fb_state),
locke-lunargd556cc32019-09-17 01:21:23 -06002477 cb_state);
Mark Lobodzinski544b3dd2019-12-03 14:44:54 -07002478 // If imageless fb, skip fb binding
2479 if (fb_state->createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) return;
locke-lunargd556cc32019-09-17 01:21:23 -06002480 const uint32_t attachmentCount = fb_state->createInfo.attachmentCount;
2481 for (uint32_t attachment = 0; attachment < attachmentCount; ++attachment) {
2482 auto view_state = GetAttachmentImageViewState(fb_state, attachment);
2483 if (view_state) {
2484 AddCommandBufferBindingImageView(cb_state, view_state);
2485 }
2486 }
2487}
2488
2489void ValidationStateTracker::PreCallRecordBeginCommandBuffer(VkCommandBuffer commandBuffer,
2490 const VkCommandBufferBeginInfo *pBeginInfo) {
2491 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2492 if (!cb_state) return;
locke-lunargd556cc32019-09-17 01:21:23 -06002493 if (cb_state->createInfo.level != VK_COMMAND_BUFFER_LEVEL_PRIMARY) {
2494 // Secondary Command Buffer
2495 const VkCommandBufferInheritanceInfo *pInfo = pBeginInfo->pInheritanceInfo;
2496 if (pInfo) {
2497 if (pBeginInfo->flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT) {
2498 assert(pInfo->renderPass);
2499 auto framebuffer = GetFramebufferState(pInfo->framebuffer);
2500 if (framebuffer) {
2501 // Connect this framebuffer and its children to this cmdBuffer
2502 AddFramebufferBinding(cb_state, framebuffer);
2503 }
2504 }
2505 }
2506 }
2507 if (CB_RECORDED == cb_state->state || CB_INVALID_COMPLETE == cb_state->state) {
2508 ResetCommandBufferState(commandBuffer);
2509 }
2510 // Set updated state here in case implicit reset occurs above
2511 cb_state->state = CB_RECORDING;
2512 cb_state->beginInfo = *pBeginInfo;
2513 if (cb_state->beginInfo.pInheritanceInfo) {
2514 cb_state->inheritanceInfo = *(cb_state->beginInfo.pInheritanceInfo);
2515 cb_state->beginInfo.pInheritanceInfo = &cb_state->inheritanceInfo;
2516 // If we are a secondary command-buffer and inheriting. Update the items we should inherit.
2517 if ((cb_state->createInfo.level != VK_COMMAND_BUFFER_LEVEL_PRIMARY) &&
2518 (cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT)) {
2519 cb_state->activeRenderPass = GetRenderPassState(cb_state->beginInfo.pInheritanceInfo->renderPass);
2520 cb_state->activeSubpass = cb_state->beginInfo.pInheritanceInfo->subpass;
2521 cb_state->activeFramebuffer = cb_state->beginInfo.pInheritanceInfo->framebuffer;
2522 cb_state->framebuffers.insert(cb_state->beginInfo.pInheritanceInfo->framebuffer);
2523 }
2524 }
2525
2526 auto chained_device_group_struct = lvl_find_in_chain<VkDeviceGroupCommandBufferBeginInfo>(pBeginInfo->pNext);
2527 if (chained_device_group_struct) {
2528 cb_state->initial_device_mask = chained_device_group_struct->deviceMask;
2529 } else {
2530 cb_state->initial_device_mask = (1 << physical_device_count) - 1;
2531 }
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002532
2533 cb_state->performance_lock_acquired = performance_lock_acquired;
locke-lunargd556cc32019-09-17 01:21:23 -06002534}
2535
2536void ValidationStateTracker::PostCallRecordEndCommandBuffer(VkCommandBuffer commandBuffer, VkResult result) {
2537 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2538 if (!cb_state) return;
2539 // Cached validation is specific to a specific recording of a specific command buffer.
2540 for (auto descriptor_set : cb_state->validated_descriptor_sets) {
2541 descriptor_set->ClearCachedValidation(cb_state);
2542 }
2543 cb_state->validated_descriptor_sets.clear();
2544 if (VK_SUCCESS == result) {
2545 cb_state->state = CB_RECORDED;
2546 }
2547}
2548
2549void ValidationStateTracker::PostCallRecordResetCommandBuffer(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags,
2550 VkResult result) {
2551 if (VK_SUCCESS == result) {
2552 ResetCommandBufferState(commandBuffer);
2553 }
2554}
2555
2556CBStatusFlags MakeStaticStateMask(VkPipelineDynamicStateCreateInfo const *ds) {
2557 // initially assume everything is static state
2558 CBStatusFlags flags = CBSTATUS_ALL_STATE_SET;
2559
2560 if (ds) {
2561 for (uint32_t i = 0; i < ds->dynamicStateCount; i++) {
2562 switch (ds->pDynamicStates[i]) {
2563 case VK_DYNAMIC_STATE_LINE_WIDTH:
2564 flags &= ~CBSTATUS_LINE_WIDTH_SET;
2565 break;
2566 case VK_DYNAMIC_STATE_DEPTH_BIAS:
2567 flags &= ~CBSTATUS_DEPTH_BIAS_SET;
2568 break;
2569 case VK_DYNAMIC_STATE_BLEND_CONSTANTS:
2570 flags &= ~CBSTATUS_BLEND_CONSTANTS_SET;
2571 break;
2572 case VK_DYNAMIC_STATE_DEPTH_BOUNDS:
2573 flags &= ~CBSTATUS_DEPTH_BOUNDS_SET;
2574 break;
2575 case VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK:
2576 flags &= ~CBSTATUS_STENCIL_READ_MASK_SET;
2577 break;
2578 case VK_DYNAMIC_STATE_STENCIL_WRITE_MASK:
2579 flags &= ~CBSTATUS_STENCIL_WRITE_MASK_SET;
2580 break;
2581 case VK_DYNAMIC_STATE_STENCIL_REFERENCE:
2582 flags &= ~CBSTATUS_STENCIL_REFERENCE_SET;
2583 break;
2584 case VK_DYNAMIC_STATE_SCISSOR:
2585 flags &= ~CBSTATUS_SCISSOR_SET;
2586 break;
2587 case VK_DYNAMIC_STATE_VIEWPORT:
2588 flags &= ~CBSTATUS_VIEWPORT_SET;
2589 break;
2590 case VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV:
2591 flags &= ~CBSTATUS_EXCLUSIVE_SCISSOR_SET;
2592 break;
2593 case VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV:
2594 flags &= ~CBSTATUS_SHADING_RATE_PALETTE_SET;
2595 break;
2596 case VK_DYNAMIC_STATE_LINE_STIPPLE_EXT:
2597 flags &= ~CBSTATUS_LINE_STIPPLE_SET;
2598 break;
Chris Mayer9ded5eb2019-09-19 16:33:26 +02002599 case VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV:
2600 flags &= ~CBSTATUS_VIEWPORT_W_SCALING_SET;
2601 break;
locke-lunargd556cc32019-09-17 01:21:23 -06002602 default:
2603 break;
2604 }
2605 }
2606 }
2607
2608 return flags;
2609}
2610
2611// Validation cache:
2612// CV is the bottommost implementor of this extension. Don't pass calls down.
2613// utility function to set collective state for pipeline
2614void SetPipelineState(PIPELINE_STATE *pPipe) {
2615 // If any attachment used by this pipeline has blendEnable, set top-level blendEnable
2616 if (pPipe->graphicsPipelineCI.pColorBlendState) {
2617 for (size_t i = 0; i < pPipe->attachments.size(); ++i) {
2618 if (VK_TRUE == pPipe->attachments[i].blendEnable) {
2619 if (((pPipe->attachments[i].dstAlphaBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
2620 (pPipe->attachments[i].dstAlphaBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
2621 ((pPipe->attachments[i].dstColorBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
2622 (pPipe->attachments[i].dstColorBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
2623 ((pPipe->attachments[i].srcAlphaBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
2624 (pPipe->attachments[i].srcAlphaBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
2625 ((pPipe->attachments[i].srcColorBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
2626 (pPipe->attachments[i].srcColorBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA))) {
2627 pPipe->blendConstantsEnabled = true;
2628 }
2629 }
2630 }
2631 }
2632}
2633
2634void ValidationStateTracker::PreCallRecordCmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
2635 VkPipeline pipeline) {
2636 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2637 assert(cb_state);
2638
2639 auto pipe_state = GetPipelineState(pipeline);
2640 if (VK_PIPELINE_BIND_POINT_GRAPHICS == pipelineBindPoint) {
2641 cb_state->status &= ~cb_state->static_status;
2642 cb_state->static_status = MakeStaticStateMask(pipe_state->graphicsPipelineCI.ptr()->pDynamicState);
2643 cb_state->status |= cb_state->static_status;
2644 }
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002645 ResetCommandBufferPushConstantDataIfIncompatible(cb_state, pipe_state->pipeline_layout->layout);
locke-lunargd556cc32019-09-17 01:21:23 -06002646 cb_state->lastBound[pipelineBindPoint].pipeline_state = pipe_state;
2647 SetPipelineState(pipe_state);
Jeff Bolzadbfa852019-10-04 13:53:30 -05002648 AddCommandBufferBinding(pipe_state->cb_bindings, VulkanTypedHandle(pipeline, kVulkanObjectTypePipeline), cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06002649}
2650
2651void ValidationStateTracker::PreCallRecordCmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport,
2652 uint32_t viewportCount, const VkViewport *pViewports) {
2653 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2654 cb_state->viewportMask |= ((1u << viewportCount) - 1u) << firstViewport;
2655 cb_state->status |= CBSTATUS_VIEWPORT_SET;
2656}
2657
2658void ValidationStateTracker::PreCallRecordCmdSetExclusiveScissorNV(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor,
2659 uint32_t exclusiveScissorCount,
2660 const VkRect2D *pExclusiveScissors) {
2661 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2662 // TODO: We don't have VUIDs for validating that all exclusive scissors have been set.
2663 // cb_state->exclusiveScissorMask |= ((1u << exclusiveScissorCount) - 1u) << firstExclusiveScissor;
2664 cb_state->status |= CBSTATUS_EXCLUSIVE_SCISSOR_SET;
2665}
2666
2667void ValidationStateTracker::PreCallRecordCmdBindShadingRateImageNV(VkCommandBuffer commandBuffer, VkImageView imageView,
2668 VkImageLayout imageLayout) {
2669 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2670
2671 if (imageView != VK_NULL_HANDLE) {
2672 auto view_state = GetImageViewState(imageView);
2673 AddCommandBufferBindingImageView(cb_state, view_state);
2674 }
2675}
2676
2677void ValidationStateTracker::PreCallRecordCmdSetViewportShadingRatePaletteNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
2678 uint32_t viewportCount,
2679 const VkShadingRatePaletteNV *pShadingRatePalettes) {
2680 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2681 // TODO: We don't have VUIDs for validating that all shading rate palettes have been set.
2682 // cb_state->shadingRatePaletteMask |= ((1u << viewportCount) - 1u) << firstViewport;
2683 cb_state->status |= CBSTATUS_SHADING_RATE_PALETTE_SET;
2684}
2685
2686void ValidationStateTracker::PostCallRecordCreateAccelerationStructureNV(VkDevice device,
2687 const VkAccelerationStructureCreateInfoNV *pCreateInfo,
2688 const VkAllocationCallbacks *pAllocator,
2689 VkAccelerationStructureNV *pAccelerationStructure,
2690 VkResult result) {
2691 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002692 auto as_state = std::make_shared<ACCELERATION_STRUCTURE_STATE>(*pAccelerationStructure, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002693
2694 // Query the requirements in case the application doesn't (to avoid bind/validation time query)
2695 VkAccelerationStructureMemoryRequirementsInfoNV as_memory_requirements_info = {};
2696 as_memory_requirements_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
2697 as_memory_requirements_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV;
2698 as_memory_requirements_info.accelerationStructure = as_state->acceleration_structure;
2699 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &as_memory_requirements_info, &as_state->memory_requirements);
2700
2701 VkAccelerationStructureMemoryRequirementsInfoNV scratch_memory_req_info = {};
2702 scratch_memory_req_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
2703 scratch_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV;
2704 scratch_memory_req_info.accelerationStructure = as_state->acceleration_structure;
2705 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &scratch_memory_req_info,
2706 &as_state->build_scratch_memory_requirements);
2707
2708 VkAccelerationStructureMemoryRequirementsInfoNV update_memory_req_info = {};
2709 update_memory_req_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
2710 update_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV;
2711 update_memory_req_info.accelerationStructure = as_state->acceleration_structure;
2712 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &update_memory_req_info,
2713 &as_state->update_scratch_memory_requirements);
2714
2715 accelerationStructureMap[*pAccelerationStructure] = std::move(as_state);
2716}
2717
2718void ValidationStateTracker::PostCallRecordGetAccelerationStructureMemoryRequirementsNV(
2719 VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV *pInfo, VkMemoryRequirements2KHR *pMemoryRequirements) {
2720 ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureState(pInfo->accelerationStructure);
2721 if (as_state != nullptr) {
2722 if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV) {
2723 as_state->memory_requirements = *pMemoryRequirements;
2724 as_state->memory_requirements_checked = true;
2725 } else if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV) {
2726 as_state->build_scratch_memory_requirements = *pMemoryRequirements;
2727 as_state->build_scratch_memory_requirements_checked = true;
2728 } else if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV) {
2729 as_state->update_scratch_memory_requirements = *pMemoryRequirements;
2730 as_state->update_scratch_memory_requirements_checked = true;
2731 }
2732 }
2733}
2734
2735void ValidationStateTracker::PostCallRecordBindAccelerationStructureMemoryNV(
2736 VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV *pBindInfos, VkResult result) {
2737 if (VK_SUCCESS != result) return;
2738 for (uint32_t i = 0; i < bindInfoCount; i++) {
2739 const VkBindAccelerationStructureMemoryInfoNV &info = pBindInfos[i];
2740
2741 ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureState(info.accelerationStructure);
2742 if (as_state) {
2743 // Track bound memory range information
2744 auto mem_info = GetDevMemState(info.memory);
2745 if (mem_info) {
2746 InsertAccelerationStructureMemoryRange(info.accelerationStructure, mem_info, info.memoryOffset,
2747 as_state->requirements);
2748 }
2749 // Track objects tied to memory
2750 SetMemBinding(info.memory, as_state, info.memoryOffset,
2751 VulkanTypedHandle(info.accelerationStructure, kVulkanObjectTypeAccelerationStructureNV));
2752
2753 // GPU validation of top level acceleration structure building needs acceleration structure handles.
2754 if (enabled.gpu_validation) {
2755 DispatchGetAccelerationStructureHandleNV(device, info.accelerationStructure, 8, &as_state->opaque_handle);
2756 }
2757 }
2758 }
2759}
2760
2761void ValidationStateTracker::PostCallRecordCmdBuildAccelerationStructureNV(
2762 VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV *pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset,
2763 VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset) {
2764 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2765 if (cb_state == nullptr) {
2766 return;
2767 }
2768
2769 ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureState(dst);
2770 ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureState(src);
2771 if (dst_as_state != nullptr) {
2772 dst_as_state->built = true;
2773 dst_as_state->build_info.initialize(pInfo);
2774 AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
2775 }
2776 if (src_as_state != nullptr) {
2777 AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
2778 }
2779 cb_state->hasBuildAccelerationStructureCmd = true;
2780}
2781
2782void ValidationStateTracker::PostCallRecordCmdCopyAccelerationStructureNV(VkCommandBuffer commandBuffer,
2783 VkAccelerationStructureNV dst,
2784 VkAccelerationStructureNV src,
2785 VkCopyAccelerationStructureModeNV mode) {
2786 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2787 if (cb_state) {
2788 ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureState(src);
2789 ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureState(dst);
2790 if (dst_as_state != nullptr && src_as_state != nullptr) {
2791 dst_as_state->built = true;
2792 dst_as_state->build_info = src_as_state->build_info;
2793 AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
2794 AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
2795 }
2796 }
2797}
2798
2799void ValidationStateTracker::PreCallRecordDestroyAccelerationStructureNV(VkDevice device,
2800 VkAccelerationStructureNV accelerationStructure,
2801 const VkAllocationCallbacks *pAllocator) {
2802 if (!accelerationStructure) return;
2803 auto *as_state = GetAccelerationStructureState(accelerationStructure);
2804 if (as_state) {
2805 const VulkanTypedHandle obj_struct(accelerationStructure, kVulkanObjectTypeAccelerationStructureNV);
2806 InvalidateCommandBuffers(as_state->cb_bindings, obj_struct);
2807 for (auto mem_binding : as_state->GetBoundMemory()) {
2808 auto mem_info = GetDevMemState(mem_binding);
2809 if (mem_info) {
2810 RemoveAccelerationStructureMemoryRange(accelerationStructure, mem_info);
2811 }
2812 }
2813 ClearMemoryObjectBindings(obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002814 as_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002815 accelerationStructureMap.erase(accelerationStructure);
2816 }
2817}
2818
Chris Mayer9ded5eb2019-09-19 16:33:26 +02002819void ValidationStateTracker::PreCallRecordCmdSetViewportWScalingNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
2820 uint32_t viewportCount,
2821 const VkViewportWScalingNV *pViewportWScalings) {
2822 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2823 cb_state->status |= CBSTATUS_VIEWPORT_W_SCALING_SET;
2824}
2825
locke-lunargd556cc32019-09-17 01:21:23 -06002826void ValidationStateTracker::PreCallRecordCmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth) {
2827 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2828 cb_state->status |= CBSTATUS_LINE_WIDTH_SET;
2829}
2830
2831void ValidationStateTracker::PreCallRecordCmdSetLineStippleEXT(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor,
2832 uint16_t lineStipplePattern) {
2833 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2834 cb_state->status |= CBSTATUS_LINE_STIPPLE_SET;
2835}
2836
2837void ValidationStateTracker::PreCallRecordCmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor,
2838 float depthBiasClamp, float depthBiasSlopeFactor) {
2839 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2840 cb_state->status |= CBSTATUS_DEPTH_BIAS_SET;
2841}
2842
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002843void ValidationStateTracker::PreCallRecordCmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount,
2844 const VkRect2D *pScissors) {
2845 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2846 cb_state->scissorMask |= ((1u << scissorCount) - 1u) << firstScissor;
2847 cb_state->status |= CBSTATUS_SCISSOR_SET;
2848}
2849
locke-lunargd556cc32019-09-17 01:21:23 -06002850void ValidationStateTracker::PreCallRecordCmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4]) {
2851 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2852 cb_state->status |= CBSTATUS_BLEND_CONSTANTS_SET;
2853}
2854
2855void ValidationStateTracker::PreCallRecordCmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds,
2856 float maxDepthBounds) {
2857 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2858 cb_state->status |= CBSTATUS_DEPTH_BOUNDS_SET;
2859}
2860
2861void ValidationStateTracker::PreCallRecordCmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
2862 uint32_t compareMask) {
2863 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2864 cb_state->status |= CBSTATUS_STENCIL_READ_MASK_SET;
2865}
2866
2867void ValidationStateTracker::PreCallRecordCmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
2868 uint32_t writeMask) {
2869 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2870 cb_state->status |= CBSTATUS_STENCIL_WRITE_MASK_SET;
2871}
2872
2873void ValidationStateTracker::PreCallRecordCmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
2874 uint32_t reference) {
2875 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2876 cb_state->status |= CBSTATUS_STENCIL_REFERENCE_SET;
2877}
2878
2879// Update pipeline_layout bind points applying the "Pipeline Layout Compatibility" rules.
2880// One of pDescriptorSets or push_descriptor_set should be nullptr, indicating whether this
2881// is called for CmdBindDescriptorSets or CmdPushDescriptorSet.
2882void ValidationStateTracker::UpdateLastBoundDescriptorSets(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint pipeline_bind_point,
2883 const PIPELINE_LAYOUT_STATE *pipeline_layout, uint32_t first_set,
2884 uint32_t set_count, const VkDescriptorSet *pDescriptorSets,
2885 cvdescriptorset::DescriptorSet *push_descriptor_set,
2886 uint32_t dynamic_offset_count, const uint32_t *p_dynamic_offsets) {
2887 assert((pDescriptorSets == nullptr) ^ (push_descriptor_set == nullptr));
2888 // Defensive
2889 assert(pipeline_layout);
2890 if (!pipeline_layout) return;
2891
2892 uint32_t required_size = first_set + set_count;
2893 const uint32_t last_binding_index = required_size - 1;
2894 assert(last_binding_index < pipeline_layout->compat_for_set.size());
2895
2896 // Some useful shorthand
2897 auto &last_bound = cb_state->lastBound[pipeline_bind_point];
2898 auto &pipe_compat_ids = pipeline_layout->compat_for_set;
2899 const uint32_t current_size = static_cast<uint32_t>(last_bound.per_set.size());
2900
2901 // We need this three times in this function, but nowhere else
2902 auto push_descriptor_cleanup = [&last_bound](const cvdescriptorset::DescriptorSet *ds) -> bool {
2903 if (ds && ds->IsPushDescriptor()) {
2904 assert(ds == last_bound.push_descriptor_set.get());
2905 last_bound.push_descriptor_set = nullptr;
2906 return true;
2907 }
2908 return false;
2909 };
2910
2911 // Clean up the "disturbed" before and after the range to be set
2912 if (required_size < current_size) {
2913 if (last_bound.per_set[last_binding_index].compat_id_for_set != pipe_compat_ids[last_binding_index]) {
2914 // We're disturbing those after last, we'll shrink below, but first need to check for and cleanup the push_descriptor
2915 for (auto set_idx = required_size; set_idx < current_size; ++set_idx) {
2916 if (push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set)) break;
2917 }
2918 } else {
2919 // We're not disturbing past last, so leave the upper binding data alone.
2920 required_size = current_size;
2921 }
2922 }
2923
2924 // We resize if we need more set entries or if those past "last" are disturbed
2925 if (required_size != current_size) {
2926 last_bound.per_set.resize(required_size);
2927 }
2928
2929 // For any previously bound sets, need to set them to "invalid" if they were disturbed by this update
2930 for (uint32_t set_idx = 0; set_idx < first_set; ++set_idx) {
2931 if (last_bound.per_set[set_idx].compat_id_for_set != pipe_compat_ids[set_idx]) {
2932 push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set);
2933 last_bound.per_set[set_idx].bound_descriptor_set = nullptr;
2934 last_bound.per_set[set_idx].dynamicOffsets.clear();
2935 last_bound.per_set[set_idx].compat_id_for_set = pipe_compat_ids[set_idx];
2936 }
2937 }
2938
2939 // Now update the bound sets with the input sets
2940 const uint32_t *input_dynamic_offsets = p_dynamic_offsets; // "read" pointer for dynamic offset data
2941 for (uint32_t input_idx = 0; input_idx < set_count; input_idx++) {
2942 auto set_idx = input_idx + first_set; // set_idx is index within layout, input_idx is index within input descriptor sets
2943 cvdescriptorset::DescriptorSet *descriptor_set =
2944 push_descriptor_set ? push_descriptor_set : GetSetNode(pDescriptorSets[input_idx]);
2945
2946 // Record binding (or push)
2947 if (descriptor_set != last_bound.push_descriptor_set.get()) {
2948 // Only cleanup the push descriptors if they aren't the currently used set.
2949 push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set);
2950 }
2951 last_bound.per_set[set_idx].bound_descriptor_set = descriptor_set;
2952 last_bound.per_set[set_idx].compat_id_for_set = pipe_compat_ids[set_idx]; // compat ids are canonical *per* set index
2953
2954 if (descriptor_set) {
2955 auto set_dynamic_descriptor_count = descriptor_set->GetDynamicDescriptorCount();
2956 // TODO: Add logic for tracking push_descriptor offsets (here or in caller)
2957 if (set_dynamic_descriptor_count && input_dynamic_offsets) {
2958 const uint32_t *end_offset = input_dynamic_offsets + set_dynamic_descriptor_count;
2959 last_bound.per_set[set_idx].dynamicOffsets = std::vector<uint32_t>(input_dynamic_offsets, end_offset);
2960 input_dynamic_offsets = end_offset;
2961 assert(input_dynamic_offsets <= (p_dynamic_offsets + dynamic_offset_count));
2962 } else {
2963 last_bound.per_set[set_idx].dynamicOffsets.clear();
2964 }
2965 if (!descriptor_set->IsPushDescriptor()) {
2966 // Can't cache validation of push_descriptors
2967 cb_state->validated_descriptor_sets.insert(descriptor_set);
2968 }
2969 }
2970 }
2971}
2972
2973// Update the bound state for the bind point, including the effects of incompatible pipeline layouts
2974void ValidationStateTracker::PreCallRecordCmdBindDescriptorSets(VkCommandBuffer commandBuffer,
2975 VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
2976 uint32_t firstSet, uint32_t setCount,
2977 const VkDescriptorSet *pDescriptorSets, uint32_t dynamicOffsetCount,
2978 const uint32_t *pDynamicOffsets) {
2979 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2980 auto pipeline_layout = GetPipelineLayout(layout);
2981
2982 // Resize binding arrays
2983 uint32_t last_set_index = firstSet + setCount - 1;
2984 if (last_set_index >= cb_state->lastBound[pipelineBindPoint].per_set.size()) {
2985 cb_state->lastBound[pipelineBindPoint].per_set.resize(last_set_index + 1);
2986 }
2987
2988 UpdateLastBoundDescriptorSets(cb_state, pipelineBindPoint, pipeline_layout, firstSet, setCount, pDescriptorSets, nullptr,
2989 dynamicOffsetCount, pDynamicOffsets);
2990 cb_state->lastBound[pipelineBindPoint].pipeline_layout = layout;
2991 ResetCommandBufferPushConstantDataIfIncompatible(cb_state, layout);
2992}
2993
2994void ValidationStateTracker::RecordCmdPushDescriptorSetState(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint pipelineBindPoint,
2995 VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount,
2996 const VkWriteDescriptorSet *pDescriptorWrites) {
2997 const auto &pipeline_layout = GetPipelineLayout(layout);
2998 // Short circuit invalid updates
2999 if (!pipeline_layout || (set >= pipeline_layout->set_layouts.size()) || !pipeline_layout->set_layouts[set] ||
3000 !pipeline_layout->set_layouts[set]->IsPushDescriptor())
3001 return;
3002
3003 // We need a descriptor set to update the bindings with, compatible with the passed layout
3004 const auto dsl = pipeline_layout->set_layouts[set];
3005 auto &last_bound = cb_state->lastBound[pipelineBindPoint];
3006 auto &push_descriptor_set = last_bound.push_descriptor_set;
3007 // If we are disturbing the current push_desriptor_set clear it
3008 if (!push_descriptor_set || !CompatForSet(set, last_bound, pipeline_layout->compat_for_set)) {
John Zulaufd2c3dae2019-12-12 11:02:17 -07003009 last_bound.UnbindAndResetPushDescriptorSet(new cvdescriptorset::DescriptorSet(0, nullptr, dsl, 0, this));
locke-lunargd556cc32019-09-17 01:21:23 -06003010 }
3011
3012 UpdateLastBoundDescriptorSets(cb_state, pipelineBindPoint, pipeline_layout, set, 1, nullptr, push_descriptor_set.get(), 0,
3013 nullptr);
3014 last_bound.pipeline_layout = layout;
3015
3016 // Now that we have either the new or extant push_descriptor set ... do the write updates against it
Jeff Bolz41a1ced2019-10-11 11:40:49 -05003017 push_descriptor_set->PerformPushDescriptorsUpdate(this, descriptorWriteCount, pDescriptorWrites);
locke-lunargd556cc32019-09-17 01:21:23 -06003018}
3019
3020void ValidationStateTracker::PreCallRecordCmdPushDescriptorSetKHR(VkCommandBuffer commandBuffer,
3021 VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
3022 uint32_t set, uint32_t descriptorWriteCount,
3023 const VkWriteDescriptorSet *pDescriptorWrites) {
3024 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3025 RecordCmdPushDescriptorSetState(cb_state, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites);
3026}
3027
Tony-LunarG6bb1d0c2019-09-23 10:39:25 -06003028void ValidationStateTracker::PostCallRecordCmdPushConstants(VkCommandBuffer commandBuffer, VkPipelineLayout layout,
3029 VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size,
3030 const void *pValues) {
3031 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3032 if (cb_state != nullptr) {
3033 ResetCommandBufferPushConstantDataIfIncompatible(cb_state, layout);
3034
3035 auto &push_constant_data = cb_state->push_constant_data;
3036 assert((offset + size) <= static_cast<uint32_t>(push_constant_data.size()));
3037 std::memcpy(push_constant_data.data() + offset, pValues, static_cast<std::size_t>(size));
3038 }
3039}
3040
locke-lunargd556cc32019-09-17 01:21:23 -06003041void ValidationStateTracker::PreCallRecordCmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
3042 VkIndexType indexType) {
3043 auto buffer_state = GetBufferState(buffer);
3044 auto cb_state = GetCBState(commandBuffer);
3045
3046 cb_state->status |= CBSTATUS_INDEX_BUFFER_BOUND;
3047 cb_state->index_buffer_binding.buffer = buffer;
3048 cb_state->index_buffer_binding.size = buffer_state->createInfo.size;
3049 cb_state->index_buffer_binding.offset = offset;
3050 cb_state->index_buffer_binding.index_type = indexType;
3051 // Add binding for this index buffer to this commandbuffer
3052 AddCommandBufferBindingBuffer(cb_state, buffer_state);
3053}
3054
3055void ValidationStateTracker::PreCallRecordCmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding,
3056 uint32_t bindingCount, const VkBuffer *pBuffers,
3057 const VkDeviceSize *pOffsets) {
3058 auto cb_state = GetCBState(commandBuffer);
3059
3060 uint32_t end = firstBinding + bindingCount;
3061 if (cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.size() < end) {
3062 cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.resize(end);
3063 }
3064
3065 for (uint32_t i = 0; i < bindingCount; ++i) {
3066 auto &vertex_buffer_binding = cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings[i + firstBinding];
3067 vertex_buffer_binding.buffer = pBuffers[i];
3068 vertex_buffer_binding.offset = pOffsets[i];
3069 // Add binding for this vertex buffer to this commandbuffer
3070 AddCommandBufferBindingBuffer(cb_state, GetBufferState(pBuffers[i]));
3071 }
3072}
3073
3074void ValidationStateTracker::PostCallRecordCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer,
3075 VkDeviceSize dstOffset, VkDeviceSize dataSize, const void *pData) {
3076 auto cb_state = GetCBState(commandBuffer);
3077 auto dst_buffer_state = GetBufferState(dstBuffer);
3078
3079 // Update bindings between buffer and cmd buffer
3080 AddCommandBufferBindingBuffer(cb_state, dst_buffer_state);
3081}
3082
Jeff Bolz310775c2019-10-09 00:46:33 -05003083bool ValidationStateTracker::SetEventStageMask(VkEvent event, VkPipelineStageFlags stageMask,
3084 EventToStageMap *localEventToStageMap) {
3085 (*localEventToStageMap)[event] = stageMask;
locke-lunargd556cc32019-09-17 01:21:23 -06003086 return false;
3087}
3088
3089void ValidationStateTracker::PreCallRecordCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event,
3090 VkPipelineStageFlags stageMask) {
3091 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3092 auto event_state = GetEventState(event);
3093 if (event_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05003094 AddCommandBufferBinding(event_state->cb_bindings, VulkanTypedHandle(event, kVulkanObjectTypeEvent, event_state), cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003095 }
3096 cb_state->events.push_back(event);
3097 if (!cb_state->waitedEvents.count(event)) {
3098 cb_state->writeEventsBeforeWait.push_back(event);
3099 }
Jeff Bolz310775c2019-10-09 00:46:33 -05003100 cb_state->eventUpdates.emplace_back(
3101 [event, stageMask](const ValidationStateTracker *device_data, bool do_validate, EventToStageMap *localEventToStageMap) {
3102 return SetEventStageMask(event, stageMask, localEventToStageMap);
3103 });
locke-lunargd556cc32019-09-17 01:21:23 -06003104}
3105
3106void ValidationStateTracker::PreCallRecordCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event,
3107 VkPipelineStageFlags stageMask) {
3108 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3109 auto event_state = GetEventState(event);
3110 if (event_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05003111 AddCommandBufferBinding(event_state->cb_bindings, VulkanTypedHandle(event, kVulkanObjectTypeEvent, event_state), cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003112 }
3113 cb_state->events.push_back(event);
3114 if (!cb_state->waitedEvents.count(event)) {
3115 cb_state->writeEventsBeforeWait.push_back(event);
3116 }
3117
3118 cb_state->eventUpdates.emplace_back(
Jeff Bolz310775c2019-10-09 00:46:33 -05003119 [event](const ValidationStateTracker *, bool do_validate, EventToStageMap *localEventToStageMap) {
3120 return SetEventStageMask(event, VkPipelineStageFlags(0), localEventToStageMap);
3121 });
locke-lunargd556cc32019-09-17 01:21:23 -06003122}
3123
3124void ValidationStateTracker::PreCallRecordCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents,
3125 VkPipelineStageFlags sourceStageMask, VkPipelineStageFlags dstStageMask,
3126 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
3127 uint32_t bufferMemoryBarrierCount,
3128 const VkBufferMemoryBarrier *pBufferMemoryBarriers,
3129 uint32_t imageMemoryBarrierCount,
3130 const VkImageMemoryBarrier *pImageMemoryBarriers) {
3131 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3132 for (uint32_t i = 0; i < eventCount; ++i) {
3133 auto event_state = GetEventState(pEvents[i]);
3134 if (event_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05003135 AddCommandBufferBinding(event_state->cb_bindings, VulkanTypedHandle(pEvents[i], kVulkanObjectTypeEvent, event_state),
3136 cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003137 }
3138 cb_state->waitedEvents.insert(pEvents[i]);
3139 cb_state->events.push_back(pEvents[i]);
3140 }
3141}
3142
Jeff Bolz310775c2019-10-09 00:46:33 -05003143bool ValidationStateTracker::SetQueryState(QueryObject object, QueryState value, QueryMap *localQueryToStateMap) {
3144 (*localQueryToStateMap)[object] = value;
3145 return false;
3146}
3147
3148bool ValidationStateTracker::SetQueryStateMulti(VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, QueryState value,
3149 QueryMap *localQueryToStateMap) {
3150 for (uint32_t i = 0; i < queryCount; i++) {
3151 QueryObject object = {queryPool, firstQuery + i};
3152 (*localQueryToStateMap)[object] = value;
locke-lunargd556cc32019-09-17 01:21:23 -06003153 }
3154 return false;
3155}
3156
Jeff Bolz310775c2019-10-09 00:46:33 -05003157QueryState ValidationStateTracker::GetQueryState(const QueryMap *localQueryToStateMap, VkQueryPool queryPool,
3158 uint32_t queryIndex) const {
3159 QueryObject query = {queryPool, queryIndex};
locke-lunargd556cc32019-09-17 01:21:23 -06003160
Jeff Bolz310775c2019-10-09 00:46:33 -05003161 const std::array<const decltype(queryToStateMap) *, 2> map_list = {localQueryToStateMap, &queryToStateMap};
3162
3163 for (const auto map : map_list) {
3164 auto query_data = map->find(query);
3165 if (query_data != map->end()) {
3166 return query_data->second;
locke-lunargd556cc32019-09-17 01:21:23 -06003167 }
3168 }
Jeff Bolz310775c2019-10-09 00:46:33 -05003169 return QUERYSTATE_UNKNOWN;
locke-lunargd556cc32019-09-17 01:21:23 -06003170}
3171
3172void ValidationStateTracker::RecordCmdBeginQuery(CMD_BUFFER_STATE *cb_state, const QueryObject &query_obj) {
Jeff Bolz047f3012019-10-09 23:52:23 -05003173 if (disabled.query_validation) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003174 cb_state->activeQueries.insert(query_obj);
3175 cb_state->startedQueries.insert(query_obj);
Jeff Bolz310775c2019-10-09 00:46:33 -05003176 cb_state->queryUpdates.emplace_back(
3177 [query_obj](const ValidationStateTracker *device_data, bool do_validate, QueryMap *localQueryToStateMap) {
3178 SetQueryState(query_obj, QUERYSTATE_RUNNING, localQueryToStateMap);
3179 return false;
3180 });
Jeff Bolzadbfa852019-10-04 13:53:30 -05003181 auto pool_state = GetQueryPoolState(query_obj.pool);
3182 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(query_obj.pool, kVulkanObjectTypeQueryPool, pool_state),
3183 cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003184}
3185
3186void ValidationStateTracker::PostCallRecordCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot,
3187 VkFlags flags) {
Jeff Bolz047f3012019-10-09 23:52:23 -05003188 if (disabled.query_validation) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003189 QueryObject query = {queryPool, slot};
3190 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3191 RecordCmdBeginQuery(cb_state, query);
3192}
3193
3194void ValidationStateTracker::RecordCmdEndQuery(CMD_BUFFER_STATE *cb_state, const QueryObject &query_obj) {
Jeff Bolz047f3012019-10-09 23:52:23 -05003195 if (disabled.query_validation) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003196 cb_state->activeQueries.erase(query_obj);
3197 cb_state->queryUpdates.emplace_back(
Jeff Bolz310775c2019-10-09 00:46:33 -05003198 [query_obj](const ValidationStateTracker *device_data, bool do_validate, QueryMap *localQueryToStateMap) {
3199 return SetQueryState(query_obj, QUERYSTATE_ENDED, localQueryToStateMap);
3200 });
Jeff Bolzadbfa852019-10-04 13:53:30 -05003201 auto pool_state = GetQueryPoolState(query_obj.pool);
3202 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(query_obj.pool, kVulkanObjectTypeQueryPool, pool_state),
3203 cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003204}
3205
3206void ValidationStateTracker::PostCallRecordCmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot) {
Jeff Bolz047f3012019-10-09 23:52:23 -05003207 if (disabled.query_validation) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003208 QueryObject query_obj = {queryPool, slot};
3209 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3210 RecordCmdEndQuery(cb_state, query_obj);
3211}
3212
3213void ValidationStateTracker::PostCallRecordCmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
3214 uint32_t firstQuery, uint32_t queryCount) {
Jeff Bolz047f3012019-10-09 23:52:23 -05003215 if (disabled.query_validation) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003216 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3217
Jeff Bolz310775c2019-10-09 00:46:33 -05003218 cb_state->queryUpdates.emplace_back([queryPool, firstQuery, queryCount](const ValidationStateTracker *device_data,
3219 bool do_validate, QueryMap *localQueryToStateMap) {
3220 return SetQueryStateMulti(queryPool, firstQuery, queryCount, QUERYSTATE_RESET, localQueryToStateMap);
locke-lunargd556cc32019-09-17 01:21:23 -06003221 });
Jeff Bolzadbfa852019-10-04 13:53:30 -05003222 auto pool_state = GetQueryPoolState(queryPool);
3223 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool, pool_state),
locke-lunargd556cc32019-09-17 01:21:23 -06003224 cb_state);
3225}
3226
3227void ValidationStateTracker::PostCallRecordCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
3228 uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer,
3229 VkDeviceSize dstOffset, VkDeviceSize stride,
3230 VkQueryResultFlags flags) {
Jeff Bolz047f3012019-10-09 23:52:23 -05003231 if (disabled.query_validation) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003232 auto cb_state = GetCBState(commandBuffer);
3233 auto dst_buff_state = GetBufferState(dstBuffer);
3234 AddCommandBufferBindingBuffer(cb_state, dst_buff_state);
Jeff Bolzadbfa852019-10-04 13:53:30 -05003235 auto pool_state = GetQueryPoolState(queryPool);
3236 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool, pool_state),
locke-lunargd556cc32019-09-17 01:21:23 -06003237 cb_state);
3238}
3239
3240void ValidationStateTracker::PostCallRecordCmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
3241 VkQueryPool queryPool, uint32_t slot) {
Jeff Bolz047f3012019-10-09 23:52:23 -05003242 if (disabled.query_validation) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003243 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
Jeff Bolzadbfa852019-10-04 13:53:30 -05003244 auto pool_state = GetQueryPoolState(queryPool);
3245 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool, pool_state),
locke-lunargd556cc32019-09-17 01:21:23 -06003246 cb_state);
3247 QueryObject query = {queryPool, slot};
3248 cb_state->queryUpdates.emplace_back(
Jeff Bolz310775c2019-10-09 00:46:33 -05003249 [query](const ValidationStateTracker *device_data, bool do_validate, QueryMap *localQueryToStateMap) {
3250 return SetQueryState(query, QUERYSTATE_ENDED, localQueryToStateMap);
3251 });
locke-lunargd556cc32019-09-17 01:21:23 -06003252}
3253
3254void ValidationStateTracker::PostCallRecordCreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo *pCreateInfo,
3255 const VkAllocationCallbacks *pAllocator, VkFramebuffer *pFramebuffer,
3256 VkResult result) {
3257 if (VK_SUCCESS != result) return;
3258 // Shadow create info and store in map
Jeff Bolz6ae39612019-10-11 20:57:36 -05003259 auto fb_state = std::make_shared<FRAMEBUFFER_STATE>(*pFramebuffer, pCreateInfo, GetRenderPassShared(pCreateInfo->renderPass));
locke-lunargd556cc32019-09-17 01:21:23 -06003260
3261 if ((pCreateInfo->flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) == 0) {
3262 for (uint32_t i = 0; i < pCreateInfo->attachmentCount; ++i) {
3263 VkImageView view = pCreateInfo->pAttachments[i];
3264 auto view_state = GetImageViewState(view);
3265 if (!view_state) {
3266 continue;
3267 }
3268 }
3269 }
3270 frameBufferMap[*pFramebuffer] = std::move(fb_state);
3271}
3272
3273void ValidationStateTracker::RecordRenderPassDAG(RenderPassCreateVersion rp_version, const VkRenderPassCreateInfo2KHR *pCreateInfo,
3274 RENDER_PASS_STATE *render_pass) {
3275 auto &subpass_to_node = render_pass->subpassToNode;
3276 subpass_to_node.resize(pCreateInfo->subpassCount);
3277 auto &self_dependencies = render_pass->self_dependencies;
3278 self_dependencies.resize(pCreateInfo->subpassCount);
3279
3280 for (uint32_t i = 0; i < pCreateInfo->subpassCount; ++i) {
3281 subpass_to_node[i].pass = i;
3282 self_dependencies[i].clear();
3283 }
3284 for (uint32_t i = 0; i < pCreateInfo->dependencyCount; ++i) {
3285 const VkSubpassDependency2KHR &dependency = pCreateInfo->pDependencies[i];
3286 if ((dependency.srcSubpass != VK_SUBPASS_EXTERNAL) && (dependency.dstSubpass != VK_SUBPASS_EXTERNAL)) {
3287 if (dependency.srcSubpass == dependency.dstSubpass) {
3288 self_dependencies[dependency.srcSubpass].push_back(i);
3289 } else {
3290 subpass_to_node[dependency.dstSubpass].prev.push_back(dependency.srcSubpass);
3291 subpass_to_node[dependency.srcSubpass].next.push_back(dependency.dstSubpass);
3292 }
3293 }
3294 }
3295}
3296
3297static void MarkAttachmentFirstUse(RENDER_PASS_STATE *render_pass, uint32_t index, bool is_read) {
3298 if (index == VK_ATTACHMENT_UNUSED) return;
3299
3300 if (!render_pass->attachment_first_read.count(index)) render_pass->attachment_first_read[index] = is_read;
3301}
3302
3303void ValidationStateTracker::RecordCreateRenderPassState(RenderPassCreateVersion rp_version,
3304 std::shared_ptr<RENDER_PASS_STATE> &render_pass,
3305 VkRenderPass *pRenderPass) {
3306 render_pass->renderPass = *pRenderPass;
3307 auto create_info = render_pass->createInfo.ptr();
3308
3309 RecordRenderPassDAG(RENDER_PASS_VERSION_1, create_info, render_pass.get());
3310
3311 for (uint32_t i = 0; i < create_info->subpassCount; ++i) {
3312 const VkSubpassDescription2KHR &subpass = create_info->pSubpasses[i];
3313 for (uint32_t j = 0; j < subpass.colorAttachmentCount; ++j) {
3314 MarkAttachmentFirstUse(render_pass.get(), subpass.pColorAttachments[j].attachment, false);
3315
3316 // resolve attachments are considered to be written
3317 if (subpass.pResolveAttachments) {
3318 MarkAttachmentFirstUse(render_pass.get(), subpass.pResolveAttachments[j].attachment, false);
3319 }
3320 }
3321 if (subpass.pDepthStencilAttachment) {
3322 MarkAttachmentFirstUse(render_pass.get(), subpass.pDepthStencilAttachment->attachment, false);
3323 }
3324 for (uint32_t j = 0; j < subpass.inputAttachmentCount; ++j) {
3325 MarkAttachmentFirstUse(render_pass.get(), subpass.pInputAttachments[j].attachment, true);
3326 }
3327 }
3328
3329 // Even though render_pass is an rvalue-ref parameter, still must move s.t. move assignment is invoked.
3330 renderPassMap[*pRenderPass] = std::move(render_pass);
3331}
3332
3333// Style note:
3334// Use of rvalue reference exceeds reccommended usage of rvalue refs in google style guide, but intentionally forces caller to move
3335// or copy. This is clearer than passing a pointer to shared_ptr and avoids the atomic increment/decrement of shared_ptr copy
3336// construction or assignment.
3337void ValidationStateTracker::PostCallRecordCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo *pCreateInfo,
3338 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
3339 VkResult result) {
3340 if (VK_SUCCESS != result) return;
3341 auto render_pass_state = std::make_shared<RENDER_PASS_STATE>(pCreateInfo);
3342 RecordCreateRenderPassState(RENDER_PASS_VERSION_1, render_pass_state, pRenderPass);
3343}
3344
3345void ValidationStateTracker::PostCallRecordCreateRenderPass2KHR(VkDevice device, const VkRenderPassCreateInfo2KHR *pCreateInfo,
3346 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
3347 VkResult result) {
3348 if (VK_SUCCESS != result) return;
3349 auto render_pass_state = std::make_shared<RENDER_PASS_STATE>(pCreateInfo);
3350 RecordCreateRenderPassState(RENDER_PASS_VERSION_2, render_pass_state, pRenderPass);
3351}
3352
3353void ValidationStateTracker::RecordCmdBeginRenderPassState(VkCommandBuffer commandBuffer,
3354 const VkRenderPassBeginInfo *pRenderPassBegin,
3355 const VkSubpassContents contents) {
3356 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3357 auto render_pass_state = pRenderPassBegin ? GetRenderPassState(pRenderPassBegin->renderPass) : nullptr;
3358 auto framebuffer = pRenderPassBegin ? GetFramebufferState(pRenderPassBegin->framebuffer) : nullptr;
3359
3360 if (render_pass_state) {
3361 cb_state->activeFramebuffer = pRenderPassBegin->framebuffer;
3362 cb_state->activeRenderPass = render_pass_state;
3363 // This is a shallow copy as that is all that is needed for now
3364 cb_state->activeRenderPassBeginInfo = *pRenderPassBegin;
3365 cb_state->activeSubpass = 0;
3366 cb_state->activeSubpassContents = contents;
3367 cb_state->framebuffers.insert(pRenderPassBegin->framebuffer);
3368 // Connect this framebuffer and its children to this cmdBuffer
3369 AddFramebufferBinding(cb_state, framebuffer);
3370 // Connect this RP to cmdBuffer
Jeff Bolzadbfa852019-10-04 13:53:30 -05003371 AddCommandBufferBinding(render_pass_state->cb_bindings,
3372 VulkanTypedHandle(render_pass_state->renderPass, kVulkanObjectTypeRenderPass, render_pass_state),
3373 cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003374
3375 auto chained_device_group_struct = lvl_find_in_chain<VkDeviceGroupRenderPassBeginInfo>(pRenderPassBegin->pNext);
3376 if (chained_device_group_struct) {
3377 cb_state->active_render_pass_device_mask = chained_device_group_struct->deviceMask;
3378 } else {
3379 cb_state->active_render_pass_device_mask = cb_state->initial_device_mask;
3380 }
3381 }
3382}
3383
3384void ValidationStateTracker::PreCallRecordCmdBeginRenderPass(VkCommandBuffer commandBuffer,
3385 const VkRenderPassBeginInfo *pRenderPassBegin,
3386 VkSubpassContents contents) {
3387 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, contents);
3388}
3389
3390void ValidationStateTracker::PreCallRecordCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer,
3391 const VkRenderPassBeginInfo *pRenderPassBegin,
3392 const VkSubpassBeginInfoKHR *pSubpassBeginInfo) {
3393 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, pSubpassBeginInfo->contents);
3394}
3395
3396void ValidationStateTracker::RecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
3397 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3398 cb_state->activeSubpass++;
3399 cb_state->activeSubpassContents = contents;
3400}
3401
3402void ValidationStateTracker::PostCallRecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
3403 RecordCmdNextSubpass(commandBuffer, contents);
3404}
3405
3406void ValidationStateTracker::PostCallRecordCmdNextSubpass2KHR(VkCommandBuffer commandBuffer,
3407 const VkSubpassBeginInfoKHR *pSubpassBeginInfo,
3408 const VkSubpassEndInfoKHR *pSubpassEndInfo) {
3409 RecordCmdNextSubpass(commandBuffer, pSubpassBeginInfo->contents);
3410}
3411
3412void ValidationStateTracker::RecordCmdEndRenderPassState(VkCommandBuffer commandBuffer) {
3413 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3414 cb_state->activeRenderPass = nullptr;
3415 cb_state->activeSubpass = 0;
3416 cb_state->activeFramebuffer = VK_NULL_HANDLE;
3417}
3418
3419void ValidationStateTracker::PostCallRecordCmdEndRenderPass(VkCommandBuffer commandBuffer) {
3420 RecordCmdEndRenderPassState(commandBuffer);
3421}
3422
3423void ValidationStateTracker::PostCallRecordCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer,
3424 const VkSubpassEndInfoKHR *pSubpassEndInfo) {
3425 RecordCmdEndRenderPassState(commandBuffer);
3426}
3427
3428void ValidationStateTracker::PreCallRecordCmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBuffersCount,
3429 const VkCommandBuffer *pCommandBuffers) {
3430 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3431
3432 CMD_BUFFER_STATE *sub_cb_state = NULL;
3433 for (uint32_t i = 0; i < commandBuffersCount; i++) {
3434 sub_cb_state = GetCBState(pCommandBuffers[i]);
3435 assert(sub_cb_state);
3436 if (!(sub_cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT)) {
3437 if (cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT) {
3438 // TODO: Because this is a state change, clearing the VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT needs to be moved
3439 // from the validation step to the recording step
3440 cb_state->beginInfo.flags &= ~VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT;
3441 }
3442 }
3443
3444 // Propagate inital layout and current layout state to the primary cmd buffer
3445 // NOTE: The update/population of the image_layout_map is done in CoreChecks, but for other classes derived from
3446 // ValidationStateTracker these maps will be empty, so leaving the propagation in the the state tracker should be a no-op
3447 // for those other classes.
3448 for (const auto &sub_layout_map_entry : sub_cb_state->image_layout_map) {
3449 const auto image = sub_layout_map_entry.first;
3450 const auto *image_state = GetImageState(image);
3451 if (!image_state) continue; // Can't set layouts of a dead image
3452
3453 auto *cb_subres_map = GetImageSubresourceLayoutMap(cb_state, *image_state);
3454 const auto *sub_cb_subres_map = sub_layout_map_entry.second.get();
3455 assert(cb_subres_map && sub_cb_subres_map); // Non const get and map traversal should never be null
3456 cb_subres_map->UpdateFrom(*sub_cb_subres_map);
3457 }
3458
3459 sub_cb_state->primaryCommandBuffer = cb_state->commandBuffer;
3460 cb_state->linkedCommandBuffers.insert(sub_cb_state);
3461 sub_cb_state->linkedCommandBuffers.insert(cb_state);
3462 for (auto &function : sub_cb_state->queryUpdates) {
3463 cb_state->queryUpdates.push_back(function);
3464 }
3465 for (auto &function : sub_cb_state->queue_submit_functions) {
3466 cb_state->queue_submit_functions.push_back(function);
3467 }
3468 }
3469}
3470
3471void ValidationStateTracker::PostCallRecordMapMemory(VkDevice device, VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size,
3472 VkFlags flags, void **ppData, VkResult result) {
3473 if (VK_SUCCESS != result) return;
3474 RecordMappedMemory(mem, offset, size, ppData);
3475}
3476
3477void ValidationStateTracker::PreCallRecordUnmapMemory(VkDevice device, VkDeviceMemory mem) {
3478 auto mem_info = GetDevMemState(mem);
3479 if (mem_info) {
3480 mem_info->mapped_range = MemRange();
3481 mem_info->p_driver_data = nullptr;
3482 }
3483}
3484
3485void ValidationStateTracker::UpdateBindImageMemoryState(const VkBindImageMemoryInfo &bindInfo) {
3486 IMAGE_STATE *image_state = GetImageState(bindInfo.image);
3487 if (image_state) {
3488 const auto swapchain_info = lvl_find_in_chain<VkBindImageMemorySwapchainInfoKHR>(bindInfo.pNext);
3489 if (swapchain_info) {
3490 auto swapchain = GetSwapchainState(swapchain_info->swapchain);
3491 if (swapchain) {
locke-lunargb3584732019-10-28 20:18:36 -06003492 swapchain->images[swapchain_info->imageIndex].bound_images.emplace(image_state->image);
locke-lunargd556cc32019-09-17 01:21:23 -06003493 image_state->bind_swapchain = swapchain_info->swapchain;
3494 image_state->bind_swapchain_imageIndex = swapchain_info->imageIndex;
3495 }
3496 } else {
3497 // Track bound memory range information
3498 auto mem_info = GetDevMemState(bindInfo.memory);
3499 if (mem_info) {
3500 InsertImageMemoryRange(bindInfo.image, mem_info, bindInfo.memoryOffset, image_state->requirements,
3501 image_state->createInfo.tiling == VK_IMAGE_TILING_LINEAR);
3502 }
3503
3504 // Track objects tied to memory
3505 SetMemBinding(bindInfo.memory, image_state, bindInfo.memoryOffset,
3506 VulkanTypedHandle(bindInfo.image, kVulkanObjectTypeImage));
3507 }
3508 if (image_state->createInfo.flags & VK_IMAGE_CREATE_ALIAS_BIT) {
3509 AddAliasingImage(image_state);
3510 }
3511 }
3512}
3513
3514void ValidationStateTracker::PostCallRecordBindImageMemory(VkDevice device, VkImage image, VkDeviceMemory mem,
3515 VkDeviceSize memoryOffset, VkResult result) {
3516 if (VK_SUCCESS != result) return;
3517 VkBindImageMemoryInfo bindInfo = {};
3518 bindInfo.sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
3519 bindInfo.image = image;
3520 bindInfo.memory = mem;
3521 bindInfo.memoryOffset = memoryOffset;
3522 UpdateBindImageMemoryState(bindInfo);
3523}
3524
3525void ValidationStateTracker::PostCallRecordBindImageMemory2(VkDevice device, uint32_t bindInfoCount,
3526 const VkBindImageMemoryInfoKHR *pBindInfos, VkResult result) {
3527 if (VK_SUCCESS != result) return;
3528 for (uint32_t i = 0; i < bindInfoCount; i++) {
3529 UpdateBindImageMemoryState(pBindInfos[i]);
3530 }
3531}
3532
3533void ValidationStateTracker::PostCallRecordBindImageMemory2KHR(VkDevice device, uint32_t bindInfoCount,
3534 const VkBindImageMemoryInfoKHR *pBindInfos, VkResult result) {
3535 if (VK_SUCCESS != result) return;
3536 for (uint32_t i = 0; i < bindInfoCount; i++) {
3537 UpdateBindImageMemoryState(pBindInfos[i]);
3538 }
3539}
3540
3541void ValidationStateTracker::PreCallRecordSetEvent(VkDevice device, VkEvent event) {
3542 auto event_state = GetEventState(event);
3543 if (event_state) {
3544 event_state->stageMask = VK_PIPELINE_STAGE_HOST_BIT;
3545 }
locke-lunargd556cc32019-09-17 01:21:23 -06003546}
3547
3548void ValidationStateTracker::PostCallRecordImportSemaphoreFdKHR(VkDevice device,
3549 const VkImportSemaphoreFdInfoKHR *pImportSemaphoreFdInfo,
3550 VkResult result) {
3551 if (VK_SUCCESS != result) return;
3552 RecordImportSemaphoreState(pImportSemaphoreFdInfo->semaphore, pImportSemaphoreFdInfo->handleType,
3553 pImportSemaphoreFdInfo->flags);
3554}
3555
3556void ValidationStateTracker::RecordGetExternalSemaphoreState(VkSemaphore semaphore,
3557 VkExternalSemaphoreHandleTypeFlagBitsKHR handle_type) {
3558 SEMAPHORE_STATE *semaphore_state = GetSemaphoreState(semaphore);
3559 if (semaphore_state && handle_type != VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR) {
3560 // Cannot track semaphore state once it is exported, except for Sync FD handle types which have copy transference
3561 semaphore_state->scope = kSyncScopeExternalPermanent;
3562 }
3563}
3564
3565#ifdef VK_USE_PLATFORM_WIN32_KHR
3566void ValidationStateTracker::PostCallRecordImportSemaphoreWin32HandleKHR(
3567 VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR *pImportSemaphoreWin32HandleInfo, VkResult result) {
3568 if (VK_SUCCESS != result) return;
3569 RecordImportSemaphoreState(pImportSemaphoreWin32HandleInfo->semaphore, pImportSemaphoreWin32HandleInfo->handleType,
3570 pImportSemaphoreWin32HandleInfo->flags);
3571}
3572
3573void ValidationStateTracker::PostCallRecordGetSemaphoreWin32HandleKHR(VkDevice device,
3574 const VkSemaphoreGetWin32HandleInfoKHR *pGetWin32HandleInfo,
3575 HANDLE *pHandle, VkResult result) {
3576 if (VK_SUCCESS != result) return;
3577 RecordGetExternalSemaphoreState(pGetWin32HandleInfo->semaphore, pGetWin32HandleInfo->handleType);
3578}
3579
3580void ValidationStateTracker::PostCallRecordImportFenceWin32HandleKHR(
3581 VkDevice device, const VkImportFenceWin32HandleInfoKHR *pImportFenceWin32HandleInfo, VkResult result) {
3582 if (VK_SUCCESS != result) return;
3583 RecordImportFenceState(pImportFenceWin32HandleInfo->fence, pImportFenceWin32HandleInfo->handleType,
3584 pImportFenceWin32HandleInfo->flags);
3585}
3586
3587void ValidationStateTracker::PostCallRecordGetFenceWin32HandleKHR(VkDevice device,
3588 const VkFenceGetWin32HandleInfoKHR *pGetWin32HandleInfo,
3589 HANDLE *pHandle, VkResult result) {
3590 if (VK_SUCCESS != result) return;
3591 RecordGetExternalFenceState(pGetWin32HandleInfo->fence, pGetWin32HandleInfo->handleType);
3592}
3593#endif
3594
3595void ValidationStateTracker::PostCallRecordGetSemaphoreFdKHR(VkDevice device, const VkSemaphoreGetFdInfoKHR *pGetFdInfo, int *pFd,
3596 VkResult result) {
3597 if (VK_SUCCESS != result) return;
3598 RecordGetExternalSemaphoreState(pGetFdInfo->semaphore, pGetFdInfo->handleType);
3599}
3600
3601void ValidationStateTracker::RecordImportFenceState(VkFence fence, VkExternalFenceHandleTypeFlagBitsKHR handle_type,
3602 VkFenceImportFlagsKHR flags) {
3603 FENCE_STATE *fence_node = GetFenceState(fence);
3604 if (fence_node && fence_node->scope != kSyncScopeExternalPermanent) {
3605 if ((handle_type == VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR || flags & VK_FENCE_IMPORT_TEMPORARY_BIT_KHR) &&
3606 fence_node->scope == kSyncScopeInternal) {
3607 fence_node->scope = kSyncScopeExternalTemporary;
3608 } else {
3609 fence_node->scope = kSyncScopeExternalPermanent;
3610 }
3611 }
3612}
3613
3614void ValidationStateTracker::PostCallRecordImportFenceFdKHR(VkDevice device, const VkImportFenceFdInfoKHR *pImportFenceFdInfo,
3615 VkResult result) {
3616 if (VK_SUCCESS != result) return;
3617 RecordImportFenceState(pImportFenceFdInfo->fence, pImportFenceFdInfo->handleType, pImportFenceFdInfo->flags);
3618}
3619
3620void ValidationStateTracker::RecordGetExternalFenceState(VkFence fence, VkExternalFenceHandleTypeFlagBitsKHR handle_type) {
3621 FENCE_STATE *fence_state = GetFenceState(fence);
3622 if (fence_state) {
3623 if (handle_type != VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR) {
3624 // Export with reference transference becomes external
3625 fence_state->scope = kSyncScopeExternalPermanent;
3626 } else if (fence_state->scope == kSyncScopeInternal) {
3627 // Export with copy transference has a side effect of resetting the fence
3628 fence_state->state = FENCE_UNSIGNALED;
3629 }
3630 }
3631}
3632
3633void ValidationStateTracker::PostCallRecordGetFenceFdKHR(VkDevice device, const VkFenceGetFdInfoKHR *pGetFdInfo, int *pFd,
3634 VkResult result) {
3635 if (VK_SUCCESS != result) return;
3636 RecordGetExternalFenceState(pGetFdInfo->fence, pGetFdInfo->handleType);
3637}
3638
3639void ValidationStateTracker::PostCallRecordCreateEvent(VkDevice device, const VkEventCreateInfo *pCreateInfo,
3640 const VkAllocationCallbacks *pAllocator, VkEvent *pEvent, VkResult result) {
3641 if (VK_SUCCESS != result) return;
3642 eventMap[*pEvent].write_in_use = 0;
3643 eventMap[*pEvent].stageMask = VkPipelineStageFlags(0);
3644}
3645
3646void ValidationStateTracker::RecordCreateSwapchainState(VkResult result, const VkSwapchainCreateInfoKHR *pCreateInfo,
3647 VkSwapchainKHR *pSwapchain, SURFACE_STATE *surface_state,
3648 SWAPCHAIN_NODE *old_swapchain_state) {
3649 if (VK_SUCCESS == result) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003650 auto swapchain_state = std::make_shared<SWAPCHAIN_NODE>(pCreateInfo, *pSwapchain);
locke-lunargd556cc32019-09-17 01:21:23 -06003651 if (VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR == pCreateInfo->presentMode ||
3652 VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR == pCreateInfo->presentMode) {
3653 swapchain_state->shared_presentable = true;
3654 }
3655 surface_state->swapchain = swapchain_state.get();
3656 swapchainMap[*pSwapchain] = std::move(swapchain_state);
3657 } else {
3658 surface_state->swapchain = nullptr;
3659 }
3660 // Spec requires that even if CreateSwapchainKHR fails, oldSwapchain is retired
3661 if (old_swapchain_state) {
3662 old_swapchain_state->retired = true;
3663 }
3664 return;
3665}
3666
3667void ValidationStateTracker::PostCallRecordCreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR *pCreateInfo,
3668 const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchain,
3669 VkResult result) {
3670 auto surface_state = GetSurfaceState(pCreateInfo->surface);
3671 auto old_swapchain_state = GetSwapchainState(pCreateInfo->oldSwapchain);
3672 RecordCreateSwapchainState(result, pCreateInfo, pSwapchain, surface_state, old_swapchain_state);
3673}
3674
3675void ValidationStateTracker::PreCallRecordDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain,
3676 const VkAllocationCallbacks *pAllocator) {
3677 if (!swapchain) return;
3678 auto swapchain_data = GetSwapchainState(swapchain);
3679 if (swapchain_data) {
3680 for (const auto &swapchain_image : swapchain_data->images) {
locke-lunargb3584732019-10-28 20:18:36 -06003681 ClearMemoryObjectBindings(VulkanTypedHandle(swapchain_image.image, kVulkanObjectTypeImage));
3682 imageMap.erase(swapchain_image.image);
3683 RemoveAliasingImages(swapchain_image.bound_images);
locke-lunargd556cc32019-09-17 01:21:23 -06003684 }
3685
3686 auto surface_state = GetSurfaceState(swapchain_data->createInfo.surface);
3687 if (surface_state) {
3688 if (surface_state->swapchain == swapchain_data) surface_state->swapchain = nullptr;
3689 }
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003690 swapchain_data->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06003691 swapchainMap.erase(swapchain);
3692 }
3693}
3694
3695void ValidationStateTracker::PostCallRecordQueuePresentKHR(VkQueue queue, const VkPresentInfoKHR *pPresentInfo, VkResult result) {
3696 // Semaphore waits occur before error generation, if the call reached the ICD. (Confirm?)
3697 for (uint32_t i = 0; i < pPresentInfo->waitSemaphoreCount; ++i) {
3698 auto pSemaphore = GetSemaphoreState(pPresentInfo->pWaitSemaphores[i]);
3699 if (pSemaphore) {
3700 pSemaphore->signaler.first = VK_NULL_HANDLE;
3701 pSemaphore->signaled = false;
3702 }
3703 }
3704
3705 for (uint32_t i = 0; i < pPresentInfo->swapchainCount; ++i) {
3706 // Note: this is imperfect, in that we can get confused about what did or didn't succeed-- but if the app does that, it's
3707 // confused itself just as much.
3708 auto local_result = pPresentInfo->pResults ? pPresentInfo->pResults[i] : result;
3709 if (local_result != VK_SUCCESS && local_result != VK_SUBOPTIMAL_KHR) continue; // this present didn't actually happen.
3710 // Mark the image as having been released to the WSI
3711 auto swapchain_data = GetSwapchainState(pPresentInfo->pSwapchains[i]);
3712 if (swapchain_data && (swapchain_data->images.size() > pPresentInfo->pImageIndices[i])) {
locke-lunargb3584732019-10-28 20:18:36 -06003713 auto image = swapchain_data->images[pPresentInfo->pImageIndices[i]].image;
locke-lunargd556cc32019-09-17 01:21:23 -06003714 auto image_state = GetImageState(image);
3715 if (image_state) {
3716 image_state->acquired = false;
Jeff Bolz46c0ea02019-10-09 13:06:29 -05003717 if (image_state->shared_presentable) {
3718 image_state->layout_locked = true;
3719 }
locke-lunargd556cc32019-09-17 01:21:23 -06003720 }
3721 }
3722 }
3723 // Note: even though presentation is directed to a queue, there is no direct ordering between QP and subsequent work, so QP (and
3724 // its semaphore waits) /never/ participate in any completion proof.
3725}
3726
3727void ValidationStateTracker::PostCallRecordCreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount,
3728 const VkSwapchainCreateInfoKHR *pCreateInfos,
3729 const VkAllocationCallbacks *pAllocator,
3730 VkSwapchainKHR *pSwapchains, VkResult result) {
3731 if (pCreateInfos) {
3732 for (uint32_t i = 0; i < swapchainCount; i++) {
3733 auto surface_state = GetSurfaceState(pCreateInfos[i].surface);
3734 auto old_swapchain_state = GetSwapchainState(pCreateInfos[i].oldSwapchain);
3735 RecordCreateSwapchainState(result, &pCreateInfos[i], &pSwapchains[i], surface_state, old_swapchain_state);
3736 }
3737 }
3738}
3739
3740void ValidationStateTracker::RecordAcquireNextImageState(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
3741 VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex) {
3742 auto pFence = GetFenceState(fence);
3743 if (pFence && pFence->scope == kSyncScopeInternal) {
3744 // Treat as inflight since it is valid to wait on this fence, even in cases where it is technically a temporary
3745 // import
3746 pFence->state = FENCE_INFLIGHT;
3747 pFence->signaler.first = VK_NULL_HANDLE; // ANI isn't on a queue, so this can't participate in a completion proof.
3748 }
3749
3750 auto pSemaphore = GetSemaphoreState(semaphore);
3751 if (pSemaphore && pSemaphore->scope == kSyncScopeInternal) {
3752 // Treat as signaled since it is valid to wait on this semaphore, even in cases where it is technically a
3753 // temporary import
3754 pSemaphore->signaled = true;
3755 pSemaphore->signaler.first = VK_NULL_HANDLE;
3756 }
3757
3758 // Mark the image as acquired.
3759 auto swapchain_data = GetSwapchainState(swapchain);
3760 if (swapchain_data && (swapchain_data->images.size() > *pImageIndex)) {
locke-lunargb3584732019-10-28 20:18:36 -06003761 auto image = swapchain_data->images[*pImageIndex].image;
locke-lunargd556cc32019-09-17 01:21:23 -06003762 auto image_state = GetImageState(image);
3763 if (image_state) {
3764 image_state->acquired = true;
3765 image_state->shared_presentable = swapchain_data->shared_presentable;
3766 }
3767 }
3768}
3769
3770void ValidationStateTracker::PostCallRecordAcquireNextImageKHR(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
3771 VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex,
3772 VkResult result) {
3773 if ((VK_SUCCESS != result) && (VK_SUBOPTIMAL_KHR != result)) return;
3774 RecordAcquireNextImageState(device, swapchain, timeout, semaphore, fence, pImageIndex);
3775}
3776
3777void ValidationStateTracker::PostCallRecordAcquireNextImage2KHR(VkDevice device, const VkAcquireNextImageInfoKHR *pAcquireInfo,
3778 uint32_t *pImageIndex, VkResult result) {
3779 if ((VK_SUCCESS != result) && (VK_SUBOPTIMAL_KHR != result)) return;
3780 RecordAcquireNextImageState(device, pAcquireInfo->swapchain, pAcquireInfo->timeout, pAcquireInfo->semaphore,
3781 pAcquireInfo->fence, pImageIndex);
3782}
3783
3784void ValidationStateTracker::PostCallRecordEnumeratePhysicalDevices(VkInstance instance, uint32_t *pPhysicalDeviceCount,
3785 VkPhysicalDevice *pPhysicalDevices, VkResult result) {
3786 if ((NULL != pPhysicalDevices) && ((result == VK_SUCCESS || result == VK_INCOMPLETE))) {
3787 for (uint32_t i = 0; i < *pPhysicalDeviceCount; i++) {
3788 auto &phys_device_state = physical_device_map[pPhysicalDevices[i]];
3789 phys_device_state.phys_device = pPhysicalDevices[i];
3790 // Init actual features for each physical device
3791 DispatchGetPhysicalDeviceFeatures(pPhysicalDevices[i], &phys_device_state.features2.features);
3792 }
3793 }
3794}
3795
3796// Common function to update state for GetPhysicalDeviceQueueFamilyProperties & 2KHR version
3797static void StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(PHYSICAL_DEVICE_STATE *pd_state, uint32_t count,
3798 VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
3799 pd_state->queue_family_known_count = std::max(pd_state->queue_family_known_count, count);
3800
3801 if (!pQueueFamilyProperties) {
3802 if (UNCALLED == pd_state->vkGetPhysicalDeviceQueueFamilyPropertiesState)
3803 pd_state->vkGetPhysicalDeviceQueueFamilyPropertiesState = QUERY_COUNT;
3804 } else { // Save queue family properties
3805 pd_state->vkGetPhysicalDeviceQueueFamilyPropertiesState = QUERY_DETAILS;
3806
3807 pd_state->queue_family_properties.resize(std::max(static_cast<uint32_t>(pd_state->queue_family_properties.size()), count));
3808 for (uint32_t i = 0; i < count; ++i) {
3809 pd_state->queue_family_properties[i] = pQueueFamilyProperties[i].queueFamilyProperties;
3810 }
3811 }
3812}
3813
3814void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice,
3815 uint32_t *pQueueFamilyPropertyCount,
3816 VkQueueFamilyProperties *pQueueFamilyProperties) {
3817 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
3818 assert(physical_device_state);
3819 VkQueueFamilyProperties2KHR *pqfp = nullptr;
3820 std::vector<VkQueueFamilyProperties2KHR> qfp;
3821 qfp.resize(*pQueueFamilyPropertyCount);
3822 if (pQueueFamilyProperties) {
3823 for (uint32_t i = 0; i < *pQueueFamilyPropertyCount; ++i) {
3824 qfp[i].sType = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2_KHR;
3825 qfp[i].pNext = nullptr;
3826 qfp[i].queueFamilyProperties = pQueueFamilyProperties[i];
3827 }
3828 pqfp = qfp.data();
3829 }
3830 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount, pqfp);
3831}
3832
3833void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties2(
3834 VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
3835 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
3836 assert(physical_device_state);
3837 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount,
3838 pQueueFamilyProperties);
3839}
3840
3841void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties2KHR(
3842 VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
3843 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
3844 assert(physical_device_state);
3845 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount,
3846 pQueueFamilyProperties);
3847}
3848void ValidationStateTracker::PreCallRecordDestroySurfaceKHR(VkInstance instance, VkSurfaceKHR surface,
3849 const VkAllocationCallbacks *pAllocator) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003850 if (!surface) return;
3851 auto surface_state = GetSurfaceState(surface);
3852 surface_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06003853 surface_map.erase(surface);
3854}
3855
3856void ValidationStateTracker::RecordVulkanSurface(VkSurfaceKHR *pSurface) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003857 surface_map[*pSurface] = std::make_shared<SURFACE_STATE>(*pSurface);
locke-lunargd556cc32019-09-17 01:21:23 -06003858}
3859
3860void ValidationStateTracker::PostCallRecordCreateDisplayPlaneSurfaceKHR(VkInstance instance,
3861 const VkDisplaySurfaceCreateInfoKHR *pCreateInfo,
3862 const VkAllocationCallbacks *pAllocator,
3863 VkSurfaceKHR *pSurface, VkResult result) {
3864 if (VK_SUCCESS != result) return;
3865 RecordVulkanSurface(pSurface);
3866}
3867
3868#ifdef VK_USE_PLATFORM_ANDROID_KHR
3869void ValidationStateTracker::PostCallRecordCreateAndroidSurfaceKHR(VkInstance instance,
3870 const VkAndroidSurfaceCreateInfoKHR *pCreateInfo,
3871 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
3872 VkResult result) {
3873 if (VK_SUCCESS != result) return;
3874 RecordVulkanSurface(pSurface);
3875}
3876#endif // VK_USE_PLATFORM_ANDROID_KHR
3877
3878#ifdef VK_USE_PLATFORM_IOS_MVK
3879void ValidationStateTracker::PostCallRecordCreateIOSSurfaceMVK(VkInstance instance, const VkIOSSurfaceCreateInfoMVK *pCreateInfo,
3880 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
3881 VkResult result) {
3882 if (VK_SUCCESS != result) return;
3883 RecordVulkanSurface(pSurface);
3884}
3885#endif // VK_USE_PLATFORM_IOS_MVK
3886
3887#ifdef VK_USE_PLATFORM_MACOS_MVK
3888void ValidationStateTracker::PostCallRecordCreateMacOSSurfaceMVK(VkInstance instance,
3889 const VkMacOSSurfaceCreateInfoMVK *pCreateInfo,
3890 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
3891 VkResult result) {
3892 if (VK_SUCCESS != result) return;
3893 RecordVulkanSurface(pSurface);
3894}
3895#endif // VK_USE_PLATFORM_MACOS_MVK
3896
Jeremy Kniagerf33a67c2019-12-09 09:44:39 -07003897#ifdef VK_USE_PLATFORM_METAL_EXT
3898void ValidationStateTracker::PostCallRecordCreateMetalSurfaceEXT(VkInstance instance,
3899 const VkMetalSurfaceCreateInfoEXT *pCreateInfo,
3900 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
3901 VkResult result) {
3902 if (VK_SUCCESS != result) return;
3903 RecordVulkanSurface(pSurface);
3904}
3905#endif // VK_USE_PLATFORM_METAL_EXT
3906
locke-lunargd556cc32019-09-17 01:21:23 -06003907#ifdef VK_USE_PLATFORM_WAYLAND_KHR
3908void ValidationStateTracker::PostCallRecordCreateWaylandSurfaceKHR(VkInstance instance,
3909 const VkWaylandSurfaceCreateInfoKHR *pCreateInfo,
3910 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
3911 VkResult result) {
3912 if (VK_SUCCESS != result) return;
3913 RecordVulkanSurface(pSurface);
3914}
3915#endif // VK_USE_PLATFORM_WAYLAND_KHR
3916
3917#ifdef VK_USE_PLATFORM_WIN32_KHR
3918void ValidationStateTracker::PostCallRecordCreateWin32SurfaceKHR(VkInstance instance,
3919 const VkWin32SurfaceCreateInfoKHR *pCreateInfo,
3920 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
3921 VkResult result) {
3922 if (VK_SUCCESS != result) return;
3923 RecordVulkanSurface(pSurface);
3924}
3925#endif // VK_USE_PLATFORM_WIN32_KHR
3926
3927#ifdef VK_USE_PLATFORM_XCB_KHR
3928void ValidationStateTracker::PostCallRecordCreateXcbSurfaceKHR(VkInstance instance, const VkXcbSurfaceCreateInfoKHR *pCreateInfo,
3929 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
3930 VkResult result) {
3931 if (VK_SUCCESS != result) return;
3932 RecordVulkanSurface(pSurface);
3933}
3934#endif // VK_USE_PLATFORM_XCB_KHR
3935
3936#ifdef VK_USE_PLATFORM_XLIB_KHR
3937void ValidationStateTracker::PostCallRecordCreateXlibSurfaceKHR(VkInstance instance, const VkXlibSurfaceCreateInfoKHR *pCreateInfo,
3938 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
3939 VkResult result) {
3940 if (VK_SUCCESS != result) return;
3941 RecordVulkanSurface(pSurface);
3942}
3943#endif // VK_USE_PLATFORM_XLIB_KHR
3944
Cort23cf2282019-09-20 18:58:18 +02003945void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02003946 VkPhysicalDeviceFeatures *pFeatures) {
3947 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
3948 physical_device_state->vkGetPhysicalDeviceFeaturesState = QUERY_DETAILS;
3949 physical_device_state->features2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
3950 physical_device_state->features2.pNext = nullptr;
3951 physical_device_state->features2.features = *pFeatures;
Cort23cf2282019-09-20 18:58:18 +02003952}
3953
3954void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures2(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02003955 VkPhysicalDeviceFeatures2 *pFeatures) {
3956 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
3957 physical_device_state->vkGetPhysicalDeviceFeaturesState = QUERY_DETAILS;
3958 physical_device_state->features2.initialize(pFeatures);
Cort23cf2282019-09-20 18:58:18 +02003959}
3960
3961void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures2KHR(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02003962 VkPhysicalDeviceFeatures2 *pFeatures) {
3963 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
3964 physical_device_state->vkGetPhysicalDeviceFeaturesState = QUERY_DETAILS;
3965 physical_device_state->features2.initialize(pFeatures);
Cort23cf2282019-09-20 18:58:18 +02003966}
3967
locke-lunargd556cc32019-09-17 01:21:23 -06003968void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice physicalDevice,
3969 VkSurfaceKHR surface,
3970 VkSurfaceCapabilitiesKHR *pSurfaceCapabilities,
3971 VkResult result) {
3972 if (VK_SUCCESS != result) return;
3973 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
3974 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHRState = QUERY_DETAILS;
Camden Stocker61050592019-11-27 12:03:09 -08003975 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06003976 physical_device_state->surfaceCapabilities = *pSurfaceCapabilities;
3977}
3978
3979void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilities2KHR(
3980 VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
3981 VkSurfaceCapabilities2KHR *pSurfaceCapabilities, VkResult result) {
3982 if (VK_SUCCESS != result) return;
3983 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
3984 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHRState = QUERY_DETAILS;
Camden Stocker61050592019-11-27 12:03:09 -08003985 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06003986 physical_device_state->surfaceCapabilities = pSurfaceCapabilities->surfaceCapabilities;
3987}
3988
3989void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilities2EXT(VkPhysicalDevice physicalDevice,
3990 VkSurfaceKHR surface,
3991 VkSurfaceCapabilities2EXT *pSurfaceCapabilities,
3992 VkResult result) {
3993 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
3994 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHRState = QUERY_DETAILS;
Camden Stocker61050592019-11-27 12:03:09 -08003995 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06003996 physical_device_state->surfaceCapabilities.minImageCount = pSurfaceCapabilities->minImageCount;
3997 physical_device_state->surfaceCapabilities.maxImageCount = pSurfaceCapabilities->maxImageCount;
3998 physical_device_state->surfaceCapabilities.currentExtent = pSurfaceCapabilities->currentExtent;
3999 physical_device_state->surfaceCapabilities.minImageExtent = pSurfaceCapabilities->minImageExtent;
4000 physical_device_state->surfaceCapabilities.maxImageExtent = pSurfaceCapabilities->maxImageExtent;
4001 physical_device_state->surfaceCapabilities.maxImageArrayLayers = pSurfaceCapabilities->maxImageArrayLayers;
4002 physical_device_state->surfaceCapabilities.supportedTransforms = pSurfaceCapabilities->supportedTransforms;
4003 physical_device_state->surfaceCapabilities.currentTransform = pSurfaceCapabilities->currentTransform;
4004 physical_device_state->surfaceCapabilities.supportedCompositeAlpha = pSurfaceCapabilities->supportedCompositeAlpha;
4005 physical_device_state->surfaceCapabilities.supportedUsageFlags = pSurfaceCapabilities->supportedUsageFlags;
4006}
4007
4008void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice,
4009 uint32_t queueFamilyIndex, VkSurfaceKHR surface,
4010 VkBool32 *pSupported, VkResult result) {
4011 if (VK_SUCCESS != result) return;
4012 auto surface_state = GetSurfaceState(surface);
4013 surface_state->gpu_queue_support[{physicalDevice, queueFamilyIndex}] = (*pSupported == VK_TRUE);
4014}
4015
4016void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice physicalDevice,
4017 VkSurfaceKHR surface,
4018 uint32_t *pPresentModeCount,
4019 VkPresentModeKHR *pPresentModes,
4020 VkResult result) {
4021 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4022
4023 // TODO: This isn't quite right -- available modes may differ by surface AND physical device.
4024 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4025 auto &call_state = physical_device_state->vkGetPhysicalDeviceSurfacePresentModesKHRState;
4026
4027 if (*pPresentModeCount) {
4028 if (call_state < QUERY_COUNT) call_state = QUERY_COUNT;
4029 if (*pPresentModeCount > physical_device_state->present_modes.size())
4030 physical_device_state->present_modes.resize(*pPresentModeCount);
4031 }
4032 if (pPresentModes) {
4033 if (call_state < QUERY_DETAILS) call_state = QUERY_DETAILS;
4034 for (uint32_t i = 0; i < *pPresentModeCount; i++) {
4035 physical_device_state->present_modes[i] = pPresentModes[i];
4036 }
4037 }
4038}
4039
4040void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
4041 uint32_t *pSurfaceFormatCount,
4042 VkSurfaceFormatKHR *pSurfaceFormats,
4043 VkResult result) {
4044 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4045
4046 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4047 auto &call_state = physical_device_state->vkGetPhysicalDeviceSurfaceFormatsKHRState;
4048
4049 if (*pSurfaceFormatCount) {
4050 if (call_state < QUERY_COUNT) call_state = QUERY_COUNT;
4051 if (*pSurfaceFormatCount > physical_device_state->surface_formats.size())
4052 physical_device_state->surface_formats.resize(*pSurfaceFormatCount);
4053 }
4054 if (pSurfaceFormats) {
4055 if (call_state < QUERY_DETAILS) call_state = QUERY_DETAILS;
4056 for (uint32_t i = 0; i < *pSurfaceFormatCount; i++) {
4057 physical_device_state->surface_formats[i] = pSurfaceFormats[i];
4058 }
4059 }
4060}
4061
4062void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceFormats2KHR(VkPhysicalDevice physicalDevice,
4063 const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
4064 uint32_t *pSurfaceFormatCount,
4065 VkSurfaceFormat2KHR *pSurfaceFormats,
4066 VkResult result) {
4067 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4068
4069 auto physicalDeviceState = GetPhysicalDeviceState(physicalDevice);
4070 if (*pSurfaceFormatCount) {
4071 if (physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState < QUERY_COUNT) {
4072 physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState = QUERY_COUNT;
4073 }
4074 if (*pSurfaceFormatCount > physicalDeviceState->surface_formats.size())
4075 physicalDeviceState->surface_formats.resize(*pSurfaceFormatCount);
4076 }
4077 if (pSurfaceFormats) {
4078 if (physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState < QUERY_DETAILS) {
4079 physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState = QUERY_DETAILS;
4080 }
4081 for (uint32_t i = 0; i < *pSurfaceFormatCount; i++) {
4082 physicalDeviceState->surface_formats[i] = pSurfaceFormats[i].surfaceFormat;
4083 }
4084 }
4085}
4086
4087void ValidationStateTracker::PreCallRecordCmdBeginDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,
4088 const VkDebugUtilsLabelEXT *pLabelInfo) {
4089 BeginCmdDebugUtilsLabel(report_data, commandBuffer, pLabelInfo);
4090}
4091
4092void ValidationStateTracker::PostCallRecordCmdEndDebugUtilsLabelEXT(VkCommandBuffer commandBuffer) {
4093 EndCmdDebugUtilsLabel(report_data, commandBuffer);
4094}
4095
4096void ValidationStateTracker::PreCallRecordCmdInsertDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,
4097 const VkDebugUtilsLabelEXT *pLabelInfo) {
4098 InsertCmdDebugUtilsLabel(report_data, commandBuffer, pLabelInfo);
4099
4100 // Squirrel away an easily accessible copy.
4101 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4102 cb_state->debug_label = LoggingLabel(pLabelInfo);
4103}
4104
4105void ValidationStateTracker::RecordEnumeratePhysicalDeviceGroupsState(
4106 uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHR *pPhysicalDeviceGroupProperties) {
4107 if (NULL != pPhysicalDeviceGroupProperties) {
4108 for (uint32_t i = 0; i < *pPhysicalDeviceGroupCount; i++) {
4109 for (uint32_t j = 0; j < pPhysicalDeviceGroupProperties[i].physicalDeviceCount; j++) {
4110 VkPhysicalDevice cur_phys_dev = pPhysicalDeviceGroupProperties[i].physicalDevices[j];
4111 auto &phys_device_state = physical_device_map[cur_phys_dev];
4112 phys_device_state.phys_device = cur_phys_dev;
4113 // Init actual features for each physical device
4114 DispatchGetPhysicalDeviceFeatures(cur_phys_dev, &phys_device_state.features2.features);
4115 }
4116 }
4117 }
4118}
4119
4120void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceGroups(
4121 VkInstance instance, uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHR *pPhysicalDeviceGroupProperties,
4122 VkResult result) {
4123 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4124 RecordEnumeratePhysicalDeviceGroupsState(pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
4125}
4126
4127void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceGroupsKHR(
4128 VkInstance instance, uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHR *pPhysicalDeviceGroupProperties,
4129 VkResult result) {
4130 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4131 RecordEnumeratePhysicalDeviceGroupsState(pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
4132}
4133
Lionel Landwerlinc7420912019-05-23 00:33:42 +01004134void ValidationStateTracker::RecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCounters(VkPhysicalDevice physicalDevice,
4135 uint32_t queueFamilyIndex,
4136 uint32_t *pCounterCount,
4137 VkPerformanceCounterKHR *pCounters) {
4138 if (NULL == pCounters) return;
4139
4140 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4141 assert(physical_device_state);
4142
4143 std::unique_ptr<QUEUE_FAMILY_PERF_COUNTERS> queueFamilyCounters(new QUEUE_FAMILY_PERF_COUNTERS());
4144 queueFamilyCounters->counters.resize(*pCounterCount);
4145 for (uint32_t i = 0; i < *pCounterCount; i++) queueFamilyCounters->counters[i] = pCounters[i];
4146
4147 physical_device_state->perf_counters[queueFamilyIndex] = std::move(queueFamilyCounters);
4148}
4149
4150void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
4151 VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, uint32_t *pCounterCount, VkPerformanceCounterKHR *pCounters,
4152 VkPerformanceCounterDescriptionKHR *pCounterDescriptions, VkResult result) {
4153 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4154 RecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCounters(physicalDevice, queueFamilyIndex, pCounterCount, pCounters);
4155}
4156
4157void ValidationStateTracker::PostCallRecordAcquireProfilingLockKHR(VkDevice device, const VkAcquireProfilingLockInfoKHR *pInfo,
4158 VkResult result) {
4159 if (result == VK_SUCCESS) performance_lock_acquired = true;
4160}
4161
4162bool ValidationStateTracker::PreCallValidateReleaseProfilingLockKHR(VkDevice device) const {
4163 bool skip = false;
4164
4165 if (!performance_lock_acquired) {
4166 skip |= log_msg(
4167 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
4168 "VUID-vkReleaseProfilingLockKHR-device-03235",
4169 "The profiling lock of device must have been held via a previous successful call to vkAcquireProfilingLockKHR.");
4170 }
4171
4172 return skip;
4173}
4174
4175void ValidationStateTracker::PostCallRecordReleaseProfilingLockKHR(VkDevice device) {
4176 performance_lock_acquired = false;
4177 for (auto &cmd_buffer : commandBufferMap) {
4178 cmd_buffer.second->performance_lock_released = true;
4179 }
4180}
4181
locke-lunargd556cc32019-09-17 01:21:23 -06004182void ValidationStateTracker::PreCallRecordDestroyDescriptorUpdateTemplate(VkDevice device,
4183 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
4184 const VkAllocationCallbacks *pAllocator) {
4185 if (!descriptorUpdateTemplate) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004186 auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
4187 template_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004188 desc_template_map.erase(descriptorUpdateTemplate);
4189}
4190
4191void ValidationStateTracker::PreCallRecordDestroyDescriptorUpdateTemplateKHR(VkDevice device,
4192 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
4193 const VkAllocationCallbacks *pAllocator) {
4194 if (!descriptorUpdateTemplate) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004195 auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
4196 template_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004197 desc_template_map.erase(descriptorUpdateTemplate);
4198}
4199
4200void ValidationStateTracker::RecordCreateDescriptorUpdateTemplateState(const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo,
4201 VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate) {
4202 safe_VkDescriptorUpdateTemplateCreateInfo local_create_info(pCreateInfo);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004203 auto template_state = std::make_shared<TEMPLATE_STATE>(*pDescriptorUpdateTemplate, &local_create_info);
locke-lunargd556cc32019-09-17 01:21:23 -06004204 desc_template_map[*pDescriptorUpdateTemplate] = std::move(template_state);
4205}
4206
4207void ValidationStateTracker::PostCallRecordCreateDescriptorUpdateTemplate(
4208 VkDevice device, const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator,
4209 VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate, VkResult result) {
4210 if (VK_SUCCESS != result) return;
4211 RecordCreateDescriptorUpdateTemplateState(pCreateInfo, pDescriptorUpdateTemplate);
4212}
4213
4214void ValidationStateTracker::PostCallRecordCreateDescriptorUpdateTemplateKHR(
4215 VkDevice device, const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator,
4216 VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate, VkResult result) {
4217 if (VK_SUCCESS != result) return;
4218 RecordCreateDescriptorUpdateTemplateState(pCreateInfo, pDescriptorUpdateTemplate);
4219}
4220
4221void ValidationStateTracker::RecordUpdateDescriptorSetWithTemplateState(VkDescriptorSet descriptorSet,
4222 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
4223 const void *pData) {
4224 auto const template_map_entry = desc_template_map.find(descriptorUpdateTemplate);
4225 if ((template_map_entry == desc_template_map.end()) || (template_map_entry->second.get() == nullptr)) {
4226 assert(0);
4227 } else {
4228 const TEMPLATE_STATE *template_state = template_map_entry->second.get();
4229 // TODO: Record template push descriptor updates
4230 if (template_state->create_info.templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET) {
4231 PerformUpdateDescriptorSetsWithTemplateKHR(descriptorSet, template_state, pData);
4232 }
4233 }
4234}
4235
4236void ValidationStateTracker::PreCallRecordUpdateDescriptorSetWithTemplate(VkDevice device, VkDescriptorSet descriptorSet,
4237 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
4238 const void *pData) {
4239 RecordUpdateDescriptorSetWithTemplateState(descriptorSet, descriptorUpdateTemplate, pData);
4240}
4241
4242void ValidationStateTracker::PreCallRecordUpdateDescriptorSetWithTemplateKHR(VkDevice device, VkDescriptorSet descriptorSet,
4243 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
4244 const void *pData) {
4245 RecordUpdateDescriptorSetWithTemplateState(descriptorSet, descriptorUpdateTemplate, pData);
4246}
4247
4248void ValidationStateTracker::PreCallRecordCmdPushDescriptorSetWithTemplateKHR(
4249 VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set,
4250 const void *pData) {
4251 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4252
4253 const auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
4254 if (template_state) {
4255 auto layout_data = GetPipelineLayout(layout);
4256 auto dsl = GetDslFromPipelineLayout(layout_data, set);
4257 const auto &template_ci = template_state->create_info;
Jeff Bolz6ae39612019-10-11 20:57:36 -05004258 if (dsl && !dsl->destroyed) {
locke-lunargd556cc32019-09-17 01:21:23 -06004259 // Decode the template into a set of write updates
4260 cvdescriptorset::DecodedTemplateUpdate decoded_template(this, VK_NULL_HANDLE, template_state, pData,
4261 dsl->GetDescriptorSetLayout());
4262 RecordCmdPushDescriptorSetState(cb_state, template_ci.pipelineBindPoint, layout, set,
4263 static_cast<uint32_t>(decoded_template.desc_writes.size()),
4264 decoded_template.desc_writes.data());
4265 }
4266 }
4267}
4268
4269void ValidationStateTracker::RecordGetPhysicalDeviceDisplayPlanePropertiesState(VkPhysicalDevice physicalDevice,
4270 uint32_t *pPropertyCount, void *pProperties) {
4271 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4272 if (*pPropertyCount) {
4273 if (physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState < QUERY_COUNT) {
4274 physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState = QUERY_COUNT;
Camden Stocker61050592019-11-27 12:03:09 -08004275 physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004276 }
4277 physical_device_state->display_plane_property_count = *pPropertyCount;
4278 }
4279 if (pProperties) {
4280 if (physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState < QUERY_DETAILS) {
4281 physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState = QUERY_DETAILS;
Camden Stocker61050592019-11-27 12:03:09 -08004282 physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004283 }
4284 }
4285}
4286
4287void ValidationStateTracker::PostCallRecordGetPhysicalDeviceDisplayPlanePropertiesKHR(VkPhysicalDevice physicalDevice,
4288 uint32_t *pPropertyCount,
4289 VkDisplayPlanePropertiesKHR *pProperties,
4290 VkResult result) {
4291 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4292 RecordGetPhysicalDeviceDisplayPlanePropertiesState(physicalDevice, pPropertyCount, pProperties);
4293}
4294
4295void ValidationStateTracker::PostCallRecordGetPhysicalDeviceDisplayPlaneProperties2KHR(VkPhysicalDevice physicalDevice,
4296 uint32_t *pPropertyCount,
4297 VkDisplayPlaneProperties2KHR *pProperties,
4298 VkResult result) {
4299 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4300 RecordGetPhysicalDeviceDisplayPlanePropertiesState(physicalDevice, pPropertyCount, pProperties);
4301}
4302
4303void ValidationStateTracker::PostCallRecordCmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
4304 uint32_t query, VkQueryControlFlags flags, uint32_t index) {
4305 QueryObject query_obj = {queryPool, query, index};
4306 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4307 RecordCmdBeginQuery(cb_state, query_obj);
4308}
4309
4310void ValidationStateTracker::PostCallRecordCmdEndQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
4311 uint32_t query, uint32_t index) {
4312 QueryObject query_obj = {queryPool, query, index};
4313 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4314 RecordCmdEndQuery(cb_state, query_obj);
4315}
4316
4317void ValidationStateTracker::RecordCreateSamplerYcbcrConversionState(const VkSamplerYcbcrConversionCreateInfo *create_info,
4318 VkSamplerYcbcrConversion ycbcr_conversion) {
4319 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
4320 RecordCreateSamplerYcbcrConversionANDROID(create_info, ycbcr_conversion);
4321 }
4322}
4323
4324void ValidationStateTracker::PostCallRecordCreateSamplerYcbcrConversion(VkDevice device,
4325 const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
4326 const VkAllocationCallbacks *pAllocator,
4327 VkSamplerYcbcrConversion *pYcbcrConversion,
4328 VkResult result) {
4329 if (VK_SUCCESS != result) return;
4330 RecordCreateSamplerYcbcrConversionState(pCreateInfo, *pYcbcrConversion);
4331}
4332
4333void ValidationStateTracker::PostCallRecordCreateSamplerYcbcrConversionKHR(VkDevice device,
4334 const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
4335 const VkAllocationCallbacks *pAllocator,
4336 VkSamplerYcbcrConversion *pYcbcrConversion,
4337 VkResult result) {
4338 if (VK_SUCCESS != result) return;
4339 RecordCreateSamplerYcbcrConversionState(pCreateInfo, *pYcbcrConversion);
4340}
4341
4342void ValidationStateTracker::PostCallRecordDestroySamplerYcbcrConversion(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion,
4343 const VkAllocationCallbacks *pAllocator) {
4344 if (!ycbcrConversion) return;
4345 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
4346 RecordDestroySamplerYcbcrConversionANDROID(ycbcrConversion);
4347 }
4348}
4349
4350void ValidationStateTracker::PostCallRecordDestroySamplerYcbcrConversionKHR(VkDevice device,
4351 VkSamplerYcbcrConversion ycbcrConversion,
4352 const VkAllocationCallbacks *pAllocator) {
4353 if (!ycbcrConversion) return;
4354 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
4355 RecordDestroySamplerYcbcrConversionANDROID(ycbcrConversion);
4356 }
4357}
4358
4359void ValidationStateTracker::PostCallRecordResetQueryPoolEXT(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
4360 uint32_t queryCount) {
4361 // Do nothing if the feature is not enabled.
4362 if (!enabled_features.host_query_reset_features.hostQueryReset) return;
4363
4364 // Do nothing if the query pool has been destroyed.
4365 auto query_pool_state = GetQueryPoolState(queryPool);
4366 if (!query_pool_state) return;
4367
4368 // Reset the state of existing entries.
4369 QueryObject query_obj{queryPool, 0};
Lionel Landwerlinc7420912019-05-23 00:33:42 +01004370 QueryObjectPass query_pass_obj{query_obj, 0};
locke-lunargd556cc32019-09-17 01:21:23 -06004371 const uint32_t max_query_count = std::min(queryCount, query_pool_state->createInfo.queryCount - firstQuery);
4372 for (uint32_t i = 0; i < max_query_count; ++i) {
4373 query_obj.query = firstQuery + i;
4374 auto query_it = queryToStateMap.find(query_obj);
4375 if (query_it != queryToStateMap.end()) query_it->second = QUERYSTATE_RESET;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01004376 if (query_pool_state->createInfo.queryType == VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR) {
4377 for (uint32_t passIndex = 0; passIndex < query_pool_state->n_performance_passes; passIndex++) {
4378 query_pass_obj.perf_pass = passIndex;
4379 auto query_perf_it = queryPassToStateMap.find(query_pass_obj);
4380 if (query_perf_it != queryPassToStateMap.end()) query_perf_it->second = QUERYSTATE_RESET;
4381 }
4382 }
locke-lunargd556cc32019-09-17 01:21:23 -06004383 }
4384}
4385
4386void ValidationStateTracker::PerformUpdateDescriptorSetsWithTemplateKHR(VkDescriptorSet descriptorSet,
4387 const TEMPLATE_STATE *template_state, const void *pData) {
4388 // Translate the templated update into a normal update for validation...
4389 cvdescriptorset::DecodedTemplateUpdate decoded_update(this, descriptorSet, template_state, pData);
4390 cvdescriptorset::PerformUpdateDescriptorSets(this, static_cast<uint32_t>(decoded_update.desc_writes.size()),
4391 decoded_update.desc_writes.data(), 0, NULL);
4392}
4393
4394// Update the common AllocateDescriptorSetsData
4395void ValidationStateTracker::UpdateAllocateDescriptorSetsData(const VkDescriptorSetAllocateInfo *p_alloc_info,
Jeff Bolz46c0ea02019-10-09 13:06:29 -05004396 cvdescriptorset::AllocateDescriptorSetsData *ds_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06004397 for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
Jeff Bolz6ae39612019-10-11 20:57:36 -05004398 auto layout = GetDescriptorSetLayoutShared(p_alloc_info->pSetLayouts[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06004399 if (layout) {
4400 ds_data->layout_nodes[i] = layout;
4401 // Count total descriptors required per type
4402 for (uint32_t j = 0; j < layout->GetBindingCount(); ++j) {
4403 const auto &binding_layout = layout->GetDescriptorSetLayoutBindingPtrFromIndex(j);
4404 uint32_t typeIndex = static_cast<uint32_t>(binding_layout->descriptorType);
4405 ds_data->required_descriptors_by_type[typeIndex] += binding_layout->descriptorCount;
4406 }
4407 }
4408 // Any unknown layouts will be flagged as errors during ValidateAllocateDescriptorSets() call
4409 }
4410}
4411
4412// Decrement allocated sets from the pool and insert new sets into set_map
4413void ValidationStateTracker::PerformAllocateDescriptorSets(const VkDescriptorSetAllocateInfo *p_alloc_info,
4414 const VkDescriptorSet *descriptor_sets,
4415 const cvdescriptorset::AllocateDescriptorSetsData *ds_data) {
4416 auto pool_state = descriptorPoolMap[p_alloc_info->descriptorPool].get();
4417 // Account for sets and individual descriptors allocated from pool
4418 pool_state->availableSets -= p_alloc_info->descriptorSetCount;
4419 for (auto it = ds_data->required_descriptors_by_type.begin(); it != ds_data->required_descriptors_by_type.end(); ++it) {
4420 pool_state->availableDescriptorTypeCount[it->first] -= ds_data->required_descriptors_by_type.at(it->first);
4421 }
4422
4423 const auto *variable_count_info = lvl_find_in_chain<VkDescriptorSetVariableDescriptorCountAllocateInfoEXT>(p_alloc_info->pNext);
4424 bool variable_count_valid = variable_count_info && variable_count_info->descriptorSetCount == p_alloc_info->descriptorSetCount;
4425
4426 // Create tracking object for each descriptor set; insert into global map and the pool's set.
4427 for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
4428 uint32_t variable_count = variable_count_valid ? variable_count_info->pDescriptorCounts[i] : 0;
4429
Jeff Bolz41a1ced2019-10-11 11:40:49 -05004430 auto new_ds = std::make_shared<cvdescriptorset::DescriptorSet>(descriptor_sets[i], pool_state, ds_data->layout_nodes[i],
John Zulaufd2c3dae2019-12-12 11:02:17 -07004431 variable_count, this);
locke-lunargd556cc32019-09-17 01:21:23 -06004432 pool_state->sets.insert(new_ds.get());
4433 new_ds->in_use.store(0);
4434 setMap[descriptor_sets[i]] = std::move(new_ds);
4435 }
4436}
4437
4438// Generic function to handle state update for all CmdDraw* and CmdDispatch* type functions
4439void ValidationStateTracker::UpdateStateCmdDrawDispatchType(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint bind_point) {
4440 UpdateDrawState(cb_state, bind_point);
4441 cb_state->hasDispatchCmd = true;
4442}
4443
locke-lunargd556cc32019-09-17 01:21:23 -06004444// Generic function to handle state update for all CmdDraw* type functions
4445void ValidationStateTracker::UpdateStateCmdDrawType(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint bind_point) {
4446 UpdateStateCmdDrawDispatchType(cb_state, bind_point);
locke-lunargd556cc32019-09-17 01:21:23 -06004447 cb_state->hasDrawCmd = true;
4448}
4449
4450void ValidationStateTracker::PostCallRecordCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount,
4451 uint32_t firstVertex, uint32_t firstInstance) {
4452 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4453 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4454}
4455
4456void ValidationStateTracker::PostCallRecordCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount,
4457 uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset,
4458 uint32_t firstInstance) {
4459 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4460 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4461}
4462
4463void ValidationStateTracker::PostCallRecordCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
4464 uint32_t count, uint32_t stride) {
4465 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4466 BUFFER_STATE *buffer_state = GetBufferState(buffer);
4467 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4468 AddCommandBufferBindingBuffer(cb_state, buffer_state);
4469}
4470
4471void ValidationStateTracker::PostCallRecordCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
4472 VkDeviceSize offset, uint32_t count, uint32_t stride) {
4473 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4474 BUFFER_STATE *buffer_state = GetBufferState(buffer);
4475 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4476 AddCommandBufferBindingBuffer(cb_state, buffer_state);
4477}
4478
4479void ValidationStateTracker::PostCallRecordCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z) {
4480 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4481 UpdateStateCmdDrawDispatchType(cb_state, VK_PIPELINE_BIND_POINT_COMPUTE);
4482}
4483
4484void ValidationStateTracker::PostCallRecordCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
4485 VkDeviceSize offset) {
4486 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4487 UpdateStateCmdDrawDispatchType(cb_state, VK_PIPELINE_BIND_POINT_COMPUTE);
4488 BUFFER_STATE *buffer_state = GetBufferState(buffer);
4489 AddCommandBufferBindingBuffer(cb_state, buffer_state);
4490}
4491
4492void ValidationStateTracker::PreCallRecordCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer,
4493 VkDeviceSize offset, VkBuffer countBuffer,
4494 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
4495 uint32_t stride) {
4496 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4497 BUFFER_STATE *buffer_state = GetBufferState(buffer);
4498 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
4499 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4500 AddCommandBufferBindingBuffer(cb_state, buffer_state);
4501 AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
4502}
4503
4504void ValidationStateTracker::PreCallRecordCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer,
4505 VkDeviceSize offset, VkBuffer countBuffer,
4506 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
4507 uint32_t stride) {
4508 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4509 BUFFER_STATE *buffer_state = GetBufferState(buffer);
4510 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
4511 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4512 AddCommandBufferBindingBuffer(cb_state, buffer_state);
4513 AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
4514}
4515
4516void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksNV(VkCommandBuffer commandBuffer, uint32_t taskCount,
4517 uint32_t firstTask) {
4518 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4519 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4520}
4521
4522void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksIndirectNV(VkCommandBuffer commandBuffer, VkBuffer buffer,
4523 VkDeviceSize offset, uint32_t drawCount, uint32_t stride) {
4524 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4525 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4526 BUFFER_STATE *buffer_state = GetBufferState(buffer);
4527 if (buffer_state) {
4528 AddCommandBufferBindingBuffer(cb_state, buffer_state);
4529 }
4530}
4531
4532void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksIndirectCountNV(VkCommandBuffer commandBuffer, VkBuffer buffer,
4533 VkDeviceSize offset, VkBuffer countBuffer,
4534 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
4535 uint32_t stride) {
4536 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4537 BUFFER_STATE *buffer_state = GetBufferState(buffer);
4538 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
4539 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4540 if (buffer_state) {
4541 AddCommandBufferBindingBuffer(cb_state, buffer_state);
4542 }
4543 if (count_buffer_state) {
4544 AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
4545 }
4546}
4547
4548void ValidationStateTracker::PostCallRecordCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo *pCreateInfo,
4549 const VkAllocationCallbacks *pAllocator,
4550 VkShaderModule *pShaderModule, VkResult result,
4551 void *csm_state_data) {
4552 if (VK_SUCCESS != result) return;
4553 create_shader_module_api_state *csm_state = reinterpret_cast<create_shader_module_api_state *>(csm_state_data);
4554
4555 spv_target_env spirv_environment = ((api_version >= VK_API_VERSION_1_1) ? SPV_ENV_VULKAN_1_1 : SPV_ENV_VULKAN_1_0);
4556 bool is_spirv = (pCreateInfo->pCode[0] == spv::MagicNumber);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004557 auto new_shader_module = is_spirv ? std::make_shared<SHADER_MODULE_STATE>(pCreateInfo, *pShaderModule, spirv_environment,
4558 csm_state->unique_shader_id)
4559 : std::make_shared<SHADER_MODULE_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06004560 shaderModuleMap[*pShaderModule] = std::move(new_shader_module);
4561}
4562
4563void ValidationStateTracker::RecordPipelineShaderStage(VkPipelineShaderStageCreateInfo const *pStage, PIPELINE_STATE *pipeline,
Jeff Bolz46c0ea02019-10-09 13:06:29 -05004564 PIPELINE_STATE::StageState *stage_state) const {
locke-lunargd556cc32019-09-17 01:21:23 -06004565 // Validation shouldn't rely on anything in stage state being valid if the spirv isn't
4566 auto module = GetShaderModuleState(pStage->module);
4567 if (!module->has_valid_spirv) return;
4568
4569 // Validation shouldn't rely on anything in stage state being valid if the entrypoint isn't present
4570 auto entrypoint = FindEntrypoint(module, pStage->pName, pStage->stage);
4571 if (entrypoint == module->end()) return;
4572
4573 // Mark accessible ids
4574 stage_state->accessible_ids = MarkAccessibleIds(module, entrypoint);
4575 ProcessExecutionModes(module, entrypoint, pipeline);
4576
4577 stage_state->descriptor_uses =
4578 CollectInterfaceByDescriptorSlot(report_data, module, stage_state->accessible_ids, &stage_state->has_writable_descriptor);
4579 // Capture descriptor uses for the pipeline
4580 for (auto use : stage_state->descriptor_uses) {
4581 // While validating shaders capture which slots are used by the pipeline
John Zulauf649edd52019-10-02 14:39:41 -06004582 const uint32_t slot = use.first.first;
4583 auto &reqs = pipeline->active_slots[slot][use.first.second];
locke-lunargd556cc32019-09-17 01:21:23 -06004584 reqs = descriptor_req(reqs | DescriptorTypeToReqs(module, use.second.type_id));
John Zulauf649edd52019-10-02 14:39:41 -06004585 pipeline->max_active_slot = std::max(pipeline->max_active_slot, slot);
locke-lunargd556cc32019-09-17 01:21:23 -06004586 }
4587}
4588
4589void ValidationStateTracker::ResetCommandBufferPushConstantDataIfIncompatible(CMD_BUFFER_STATE *cb_state, VkPipelineLayout layout) {
4590 if (cb_state == nullptr) {
4591 return;
4592 }
4593
4594 const PIPELINE_LAYOUT_STATE *pipeline_layout_state = GetPipelineLayout(layout);
4595 if (pipeline_layout_state == nullptr) {
4596 return;
4597 }
4598
4599 if (cb_state->push_constant_data_ranges != pipeline_layout_state->push_constant_ranges) {
4600 cb_state->push_constant_data_ranges = pipeline_layout_state->push_constant_ranges;
4601 cb_state->push_constant_data.clear();
4602 uint32_t size_needed = 0;
4603 for (auto push_constant_range : *cb_state->push_constant_data_ranges) {
4604 size_needed = std::max(size_needed, (push_constant_range.offset + push_constant_range.size));
4605 }
4606 cb_state->push_constant_data.resize(size_needed, 0);
4607 }
4608}
John Zulauf22b0fbe2019-10-15 06:26:16 -06004609
4610void ValidationStateTracker::PostCallRecordGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain,
4611 uint32_t *pSwapchainImageCount, VkImage *pSwapchainImages,
4612 VkResult result) {
4613 if ((result != VK_SUCCESS) && (result != VK_INCOMPLETE)) return;
4614 auto swapchain_state = GetSwapchainState(swapchain);
4615
4616 if (*pSwapchainImageCount > swapchain_state->images.size()) swapchain_state->images.resize(*pSwapchainImageCount);
4617
4618 if (pSwapchainImages) {
4619 if (swapchain_state->vkGetSwapchainImagesKHRState < QUERY_DETAILS) {
4620 swapchain_state->vkGetSwapchainImagesKHRState = QUERY_DETAILS;
4621 }
4622 for (uint32_t i = 0; i < *pSwapchainImageCount; ++i) {
locke-lunargb3584732019-10-28 20:18:36 -06004623 if (swapchain_state->images[i].image != VK_NULL_HANDLE) continue; // Already retrieved this.
John Zulauf22b0fbe2019-10-15 06:26:16 -06004624
4625 // Add imageMap entries for each swapchain image
4626 VkImageCreateInfo image_ci;
4627 image_ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
4628 image_ci.pNext = nullptr; // to be set later
4629 image_ci.flags = VK_IMAGE_CREATE_ALIAS_BIT; // to be updated below
4630 image_ci.imageType = VK_IMAGE_TYPE_2D;
4631 image_ci.format = swapchain_state->createInfo.imageFormat;
4632 image_ci.extent.width = swapchain_state->createInfo.imageExtent.width;
4633 image_ci.extent.height = swapchain_state->createInfo.imageExtent.height;
4634 image_ci.extent.depth = 1;
4635 image_ci.mipLevels = 1;
4636 image_ci.arrayLayers = swapchain_state->createInfo.imageArrayLayers;
4637 image_ci.samples = VK_SAMPLE_COUNT_1_BIT;
4638 image_ci.tiling = VK_IMAGE_TILING_OPTIMAL;
4639 image_ci.usage = swapchain_state->createInfo.imageUsage;
4640 image_ci.sharingMode = swapchain_state->createInfo.imageSharingMode;
4641 image_ci.queueFamilyIndexCount = swapchain_state->createInfo.queueFamilyIndexCount;
4642 image_ci.pQueueFamilyIndices = swapchain_state->createInfo.pQueueFamilyIndices;
4643 image_ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
4644
4645 image_ci.pNext = lvl_find_in_chain<VkImageFormatListCreateInfoKHR>(swapchain_state->createInfo.pNext);
4646
4647 if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR)
4648 image_ci.flags |= VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT;
4649 if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR)
4650 image_ci.flags |= VK_IMAGE_CREATE_PROTECTED_BIT;
4651 if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR)
4652 image_ci.flags |= (VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT | VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR);
4653
4654 imageMap[pSwapchainImages[i]] = std::make_shared<IMAGE_STATE>(pSwapchainImages[i], &image_ci);
4655 auto &image_state = imageMap[pSwapchainImages[i]];
4656 image_state->valid = false;
4657 image_state->create_from_swapchain = swapchain;
4658 image_state->bind_swapchain = swapchain;
4659 image_state->bind_swapchain_imageIndex = i;
locke-lunargb3584732019-10-28 20:18:36 -06004660 swapchain_state->images[i].image = pSwapchainImages[i];
4661 swapchain_state->images[i].bound_images.emplace(pSwapchainImages[i]);
John Zulauf22b0fbe2019-10-15 06:26:16 -06004662 }
4663 }
4664
4665 if (*pSwapchainImageCount) {
4666 if (swapchain_state->vkGetSwapchainImagesKHRState < QUERY_COUNT) {
4667 swapchain_state->vkGetSwapchainImagesKHRState = QUERY_COUNT;
4668 }
4669 swapchain_state->get_swapchain_image_count = *pSwapchainImageCount;
4670 }
4671}