blob: 8ac7a78bff9baeec2c5942e3126fe1b2d8b5b1fa [file] [log] [blame]
locke-lunargd556cc32019-09-17 01:21:23 -06001/* Copyright (c) 2015-2019 The Khronos Group Inc.
2 * Copyright (c) 2015-2019 Valve Corporation
3 * Copyright (c) 2015-2019 LunarG, Inc.
4 * Copyright (C) 2015-2019 Google Inc.
5 *
6 * Licensed under the Apache License, Version 2.0 (the "License");
7 * you may not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
9 *
10 * http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 *
18 * Author: Mark Lobodzinski <mark@lunarg.com>
19 * Author: Dave Houlton <daveh@lunarg.com>
20 * Shannon McPherson <shannon@lunarg.com>
21 */
22
23// Allow use of STL min and max functions in Windows
24#define NOMINMAX
25
26#include <cmath>
27#include <set>
28#include <sstream>
29#include <string>
30
31#include "vk_enum_string_helper.h"
32#include "vk_format_utils.h"
33#include "vk_layer_data.h"
34#include "vk_layer_utils.h"
35#include "vk_layer_logging.h"
36#include "vk_typemap_helper.h"
37
38#include "chassis.h"
39#include "state_tracker.h"
40#include "shader_validation.h"
41
42using std::max;
43using std::string;
44using std::stringstream;
45using std::unique_ptr;
46using std::unordered_map;
47using std::unordered_set;
48using std::vector;
49
50#ifdef VK_USE_PLATFORM_ANDROID_KHR
51// Android-specific validation that uses types defined only with VK_USE_PLATFORM_ANDROID_KHR
52// This could also move into a seperate core_validation_android.cpp file... ?
53
54void ValidationStateTracker::RecordCreateImageANDROID(const VkImageCreateInfo *create_info, IMAGE_STATE *is_node) {
55 const VkExternalMemoryImageCreateInfo *emici = lvl_find_in_chain<VkExternalMemoryImageCreateInfo>(create_info->pNext);
56 if (emici && (emici->handleTypes & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID)) {
57 is_node->imported_ahb = true;
58 }
59 const VkExternalFormatANDROID *ext_fmt_android = lvl_find_in_chain<VkExternalFormatANDROID>(create_info->pNext);
60 if (ext_fmt_android && (0 != ext_fmt_android->externalFormat)) {
61 is_node->has_ahb_format = true;
62 is_node->ahb_format = ext_fmt_android->externalFormat;
63 }
64}
65
66void ValidationStateTracker::RecordCreateSamplerYcbcrConversionANDROID(const VkSamplerYcbcrConversionCreateInfo *create_info,
67 VkSamplerYcbcrConversion ycbcr_conversion) {
68 const VkExternalFormatANDROID *ext_format_android = lvl_find_in_chain<VkExternalFormatANDROID>(create_info->pNext);
69 if (ext_format_android && (0 != ext_format_android->externalFormat)) {
70 ycbcr_conversion_ahb_fmt_map.emplace(ycbcr_conversion, ext_format_android->externalFormat);
71 }
72};
73
74void ValidationStateTracker::RecordDestroySamplerYcbcrConversionANDROID(VkSamplerYcbcrConversion ycbcr_conversion) {
75 ycbcr_conversion_ahb_fmt_map.erase(ycbcr_conversion);
76};
77
78#else
79
80void ValidationStateTracker::RecordCreateImageANDROID(const VkImageCreateInfo *create_info, IMAGE_STATE *is_node) {}
81
82void ValidationStateTracker::RecordCreateSamplerYcbcrConversionANDROID(const VkSamplerYcbcrConversionCreateInfo *create_info,
83 VkSamplerYcbcrConversion ycbcr_conversion){};
84
85void ValidationStateTracker::RecordDestroySamplerYcbcrConversionANDROID(VkSamplerYcbcrConversion ycbcr_conversion){};
86
87#endif // VK_USE_PLATFORM_ANDROID_KHR
88
89void ValidationStateTracker::PostCallRecordCreateImage(VkDevice device, const VkImageCreateInfo *pCreateInfo,
90 const VkAllocationCallbacks *pAllocator, VkImage *pImage, VkResult result) {
91 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -050092 auto is_node = std::make_shared<IMAGE_STATE>(*pImage, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -060093 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
94 RecordCreateImageANDROID(pCreateInfo, is_node.get());
95 }
96 const auto swapchain_info = lvl_find_in_chain<VkImageSwapchainCreateInfoKHR>(pCreateInfo->pNext);
97 if (swapchain_info) {
98 is_node->create_from_swapchain = swapchain_info->swapchain;
99 }
100
101 bool pre_fetch_memory_reqs = true;
102#ifdef VK_USE_PLATFORM_ANDROID_KHR
103 if (is_node->external_format_android) {
104 // Do not fetch requirements for external memory images
105 pre_fetch_memory_reqs = false;
106 }
107#endif
108 // Record the memory requirements in case they won't be queried
109 if (pre_fetch_memory_reqs) {
110 DispatchGetImageMemoryRequirements(device, *pImage, &is_node->requirements);
111 }
112 imageMap.insert(std::make_pair(*pImage, std::move(is_node)));
113}
114
115void ValidationStateTracker::PreCallRecordDestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks *pAllocator) {
116 if (!image) return;
117 IMAGE_STATE *image_state = GetImageState(image);
118 const VulkanTypedHandle obj_struct(image, kVulkanObjectTypeImage);
119 InvalidateCommandBuffers(image_state->cb_bindings, obj_struct);
120 // Clean up memory mapping, bindings and range references for image
121 for (auto mem_binding : image_state->GetBoundMemory()) {
122 auto mem_info = GetDevMemState(mem_binding);
123 if (mem_info) {
124 RemoveImageMemoryRange(image, mem_info);
125 }
126 }
127 if (image_state->bind_swapchain) {
128 auto swapchain = GetSwapchainState(image_state->bind_swapchain);
129 if (swapchain) {
locke-lunargb3584732019-10-28 20:18:36 -0600130 swapchain->images[image_state->bind_swapchain_imageIndex].bound_images.erase(image_state->image);
locke-lunargd556cc32019-09-17 01:21:23 -0600131 }
132 }
133 RemoveAliasingImage(image_state);
134 ClearMemoryObjectBindings(obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500135 image_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600136 // Remove image from imageMap
137 imageMap.erase(image);
138}
139
140void ValidationStateTracker::PreCallRecordCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image,
141 VkImageLayout imageLayout, const VkClearColorValue *pColor,
142 uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
143 auto cb_node = GetCBState(commandBuffer);
144 auto image_state = GetImageState(image);
145 if (cb_node && image_state) {
146 AddCommandBufferBindingImage(cb_node, image_state);
147 }
148}
149
150void ValidationStateTracker::PreCallRecordCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image,
151 VkImageLayout imageLayout,
152 const VkClearDepthStencilValue *pDepthStencil,
153 uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
154 auto cb_node = GetCBState(commandBuffer);
155 auto image_state = GetImageState(image);
156 if (cb_node && image_state) {
157 AddCommandBufferBindingImage(cb_node, image_state);
158 }
159}
160
161void ValidationStateTracker::PreCallRecordCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage,
162 VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout,
163 uint32_t regionCount, const VkImageCopy *pRegions) {
164 auto cb_node = GetCBState(commandBuffer);
165 auto src_image_state = GetImageState(srcImage);
166 auto dst_image_state = GetImageState(dstImage);
167
168 // Update bindings between images and cmd buffer
169 AddCommandBufferBindingImage(cb_node, src_image_state);
170 AddCommandBufferBindingImage(cb_node, dst_image_state);
171}
172
173void ValidationStateTracker::PreCallRecordCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage,
174 VkImageLayout srcImageLayout, VkImage dstImage,
175 VkImageLayout dstImageLayout, uint32_t regionCount,
176 const VkImageResolve *pRegions) {
177 auto cb_node = GetCBState(commandBuffer);
178 auto src_image_state = GetImageState(srcImage);
179 auto dst_image_state = GetImageState(dstImage);
180
181 // Update bindings between images and cmd buffer
182 AddCommandBufferBindingImage(cb_node, src_image_state);
183 AddCommandBufferBindingImage(cb_node, dst_image_state);
184}
185
186void ValidationStateTracker::PreCallRecordCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage,
187 VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout,
188 uint32_t regionCount, const VkImageBlit *pRegions, VkFilter filter) {
189 auto cb_node = GetCBState(commandBuffer);
190 auto src_image_state = GetImageState(srcImage);
191 auto dst_image_state = GetImageState(dstImage);
192
193 // Update bindings between images and cmd buffer
194 AddCommandBufferBindingImage(cb_node, src_image_state);
195 AddCommandBufferBindingImage(cb_node, dst_image_state);
196}
197
198void ValidationStateTracker::PostCallRecordCreateBuffer(VkDevice device, const VkBufferCreateInfo *pCreateInfo,
199 const VkAllocationCallbacks *pAllocator, VkBuffer *pBuffer,
200 VkResult result) {
201 if (result != VK_SUCCESS) return;
202 // TODO : This doesn't create deep copy of pQueueFamilyIndices so need to fix that if/when we want that data to be valid
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500203 auto buffer_state = std::make_shared<BUFFER_STATE>(*pBuffer, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -0600204
205 // Get a set of requirements in the case the app does not
206 DispatchGetBufferMemoryRequirements(device, *pBuffer, &buffer_state->requirements);
207
208 bufferMap.insert(std::make_pair(*pBuffer, std::move(buffer_state)));
209}
210
211void ValidationStateTracker::PostCallRecordCreateBufferView(VkDevice device, const VkBufferViewCreateInfo *pCreateInfo,
212 const VkAllocationCallbacks *pAllocator, VkBufferView *pView,
213 VkResult result) {
214 if (result != VK_SUCCESS) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500215 auto buffer_state = GetBufferShared(pCreateInfo->buffer);
216 bufferViewMap[*pView] = std::make_shared<BUFFER_VIEW_STATE>(buffer_state, *pView, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -0600217}
218
219void ValidationStateTracker::PostCallRecordCreateImageView(VkDevice device, const VkImageViewCreateInfo *pCreateInfo,
220 const VkAllocationCallbacks *pAllocator, VkImageView *pView,
221 VkResult result) {
222 if (result != VK_SUCCESS) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500223 auto image_state = GetImageShared(pCreateInfo->image);
224 imageViewMap[*pView] = std::make_shared<IMAGE_VIEW_STATE>(image_state, *pView, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -0600225}
226
227void ValidationStateTracker::PreCallRecordCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer,
228 uint32_t regionCount, const VkBufferCopy *pRegions) {
229 auto cb_node = GetCBState(commandBuffer);
230 auto src_buffer_state = GetBufferState(srcBuffer);
231 auto dst_buffer_state = GetBufferState(dstBuffer);
232
233 // Update bindings between buffers and cmd buffer
234 AddCommandBufferBindingBuffer(cb_node, src_buffer_state);
235 AddCommandBufferBindingBuffer(cb_node, dst_buffer_state);
236}
237
238void ValidationStateTracker::PreCallRecordDestroyImageView(VkDevice device, VkImageView imageView,
239 const VkAllocationCallbacks *pAllocator) {
240 IMAGE_VIEW_STATE *image_view_state = GetImageViewState(imageView);
241 if (!image_view_state) return;
242 const VulkanTypedHandle obj_struct(imageView, kVulkanObjectTypeImageView);
243
244 // Any bound cmd buffers are now invalid
245 InvalidateCommandBuffers(image_view_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500246 image_view_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600247 imageViewMap.erase(imageView);
248}
249
250void ValidationStateTracker::PreCallRecordDestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks *pAllocator) {
251 if (!buffer) return;
252 auto buffer_state = GetBufferState(buffer);
253 const VulkanTypedHandle obj_struct(buffer, kVulkanObjectTypeBuffer);
254
255 InvalidateCommandBuffers(buffer_state->cb_bindings, obj_struct);
256 for (auto mem_binding : buffer_state->GetBoundMemory()) {
257 auto mem_info = GetDevMemState(mem_binding);
258 if (mem_info) {
259 RemoveBufferMemoryRange(buffer, mem_info);
260 }
261 }
262 ClearMemoryObjectBindings(obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500263 buffer_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600264 bufferMap.erase(buffer_state->buffer);
265}
266
267void ValidationStateTracker::PreCallRecordDestroyBufferView(VkDevice device, VkBufferView bufferView,
268 const VkAllocationCallbacks *pAllocator) {
269 if (!bufferView) return;
270 auto buffer_view_state = GetBufferViewState(bufferView);
271 const VulkanTypedHandle obj_struct(bufferView, kVulkanObjectTypeBufferView);
272
273 // Any bound cmd buffers are now invalid
274 InvalidateCommandBuffers(buffer_view_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500275 buffer_view_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600276 bufferViewMap.erase(bufferView);
277}
278
279void ValidationStateTracker::PreCallRecordCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
280 VkDeviceSize size, uint32_t data) {
281 auto cb_node = GetCBState(commandBuffer);
282 auto buffer_state = GetBufferState(dstBuffer);
283 // Update bindings between buffer and cmd buffer
284 AddCommandBufferBindingBuffer(cb_node, buffer_state);
285}
286
287void ValidationStateTracker::PreCallRecordCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage,
288 VkImageLayout srcImageLayout, VkBuffer dstBuffer,
289 uint32_t regionCount, const VkBufferImageCopy *pRegions) {
290 auto cb_node = GetCBState(commandBuffer);
291 auto src_image_state = GetImageState(srcImage);
292 auto dst_buffer_state = GetBufferState(dstBuffer);
293
294 // Update bindings between buffer/image and cmd buffer
295 AddCommandBufferBindingImage(cb_node, src_image_state);
296 AddCommandBufferBindingBuffer(cb_node, dst_buffer_state);
297}
298
299void ValidationStateTracker::PreCallRecordCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
300 VkImageLayout dstImageLayout, uint32_t regionCount,
301 const VkBufferImageCopy *pRegions) {
302 auto cb_node = GetCBState(commandBuffer);
303 auto src_buffer_state = GetBufferState(srcBuffer);
304 auto dst_image_state = GetImageState(dstImage);
305
306 AddCommandBufferBindingBuffer(cb_node, src_buffer_state);
307 AddCommandBufferBindingImage(cb_node, dst_image_state);
308}
309
310// Get the image viewstate for a given framebuffer attachment
311IMAGE_VIEW_STATE *ValidationStateTracker::GetAttachmentImageViewState(FRAMEBUFFER_STATE *framebuffer, uint32_t index) {
312 assert(framebuffer && (index < framebuffer->createInfo.attachmentCount));
313 const VkImageView &image_view = framebuffer->createInfo.pAttachments[index];
314 return GetImageViewState(image_view);
315}
316
317// Get the image viewstate for a given framebuffer attachment
318const IMAGE_VIEW_STATE *ValidationStateTracker::GetAttachmentImageViewState(const FRAMEBUFFER_STATE *framebuffer,
319 uint32_t index) const {
320 assert(framebuffer && (index < framebuffer->createInfo.attachmentCount));
321 const VkImageView &image_view = framebuffer->createInfo.pAttachments[index];
322 return GetImageViewState(image_view);
323}
324
325void ValidationStateTracker::AddAliasingImage(IMAGE_STATE *image_state) {
326 if (!(image_state->createInfo.flags & VK_IMAGE_CREATE_ALIAS_BIT)) return;
327 std::unordered_set<VkImage> *bound_images = nullptr;
328
locke-lunargb3584732019-10-28 20:18:36 -0600329 if (image_state->bind_swapchain) {
330 auto swapchain_state = GetSwapchainState(image_state->bind_swapchain);
locke-lunargd556cc32019-09-17 01:21:23 -0600331 if (swapchain_state) {
locke-lunargb3584732019-10-28 20:18:36 -0600332 bound_images = &swapchain_state->images[image_state->bind_swapchain_imageIndex].bound_images;
locke-lunargd556cc32019-09-17 01:21:23 -0600333 }
334 } else {
335 auto mem_state = GetDevMemState(image_state->binding.mem);
336 if (mem_state) {
337 bound_images = &mem_state->bound_images;
338 }
339 }
340
341 if (bound_images) {
342 for (const auto &handle : *bound_images) {
343 if (handle != image_state->image) {
344 auto is = GetImageState(handle);
345 if (is && is->IsCompatibleAliasing(image_state)) {
346 auto inserted = is->aliasing_images.emplace(image_state->image);
347 if (inserted.second) {
348 image_state->aliasing_images.emplace(handle);
349 }
350 }
351 }
352 }
353 }
354}
355
356void ValidationStateTracker::RemoveAliasingImage(IMAGE_STATE *image_state) {
357 for (const auto &image : image_state->aliasing_images) {
358 auto is = GetImageState(image);
359 if (is) {
360 is->aliasing_images.erase(image_state->image);
361 }
362 }
363 image_state->aliasing_images.clear();
364}
365
366void ValidationStateTracker::RemoveAliasingImages(const std::unordered_set<VkImage> &bound_images) {
367 // This is one way clear. Because the bound_images include cross references, the one way clear loop could clear the whole
368 // reference. It doesn't need two ways clear.
369 for (const auto &handle : bound_images) {
370 auto is = GetImageState(handle);
371 if (is) {
372 is->aliasing_images.clear();
373 }
374 }
375}
376
Jeff Bolz310775c2019-10-09 00:46:33 -0500377const EVENT_STATE *ValidationStateTracker::GetEventState(VkEvent event) const {
378 auto it = eventMap.find(event);
379 if (it == eventMap.end()) {
380 return nullptr;
381 }
382 return &it->second;
383}
384
locke-lunargd556cc32019-09-17 01:21:23 -0600385EVENT_STATE *ValidationStateTracker::GetEventState(VkEvent event) {
386 auto it = eventMap.find(event);
387 if (it == eventMap.end()) {
388 return nullptr;
389 }
390 return &it->second;
391}
392
393const QUEUE_STATE *ValidationStateTracker::GetQueueState(VkQueue queue) const {
394 auto it = queueMap.find(queue);
395 if (it == queueMap.cend()) {
396 return nullptr;
397 }
398 return &it->second;
399}
400
401QUEUE_STATE *ValidationStateTracker::GetQueueState(VkQueue queue) {
402 auto it = queueMap.find(queue);
403 if (it == queueMap.end()) {
404 return nullptr;
405 }
406 return &it->second;
407}
408
409const PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState(VkPhysicalDevice phys) const {
410 auto *phys_dev_map = ((physical_device_map.size() > 0) ? &physical_device_map : &instance_state->physical_device_map);
411 auto it = phys_dev_map->find(phys);
412 if (it == phys_dev_map->end()) {
413 return nullptr;
414 }
415 return &it->second;
416}
417
418PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState(VkPhysicalDevice phys) {
419 auto *phys_dev_map = ((physical_device_map.size() > 0) ? &physical_device_map : &instance_state->physical_device_map);
420 auto it = phys_dev_map->find(phys);
421 if (it == phys_dev_map->end()) {
422 return nullptr;
423 }
424 return &it->second;
425}
426
427PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState() { return physical_device_state; }
428const PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState() const { return physical_device_state; }
429
430// Return ptr to memory binding for given handle of specified type
431template <typename State, typename Result>
432static Result GetObjectMemBindingImpl(State state, const VulkanTypedHandle &typed_handle) {
433 switch (typed_handle.type) {
434 case kVulkanObjectTypeImage:
435 return state->GetImageState(typed_handle.Cast<VkImage>());
436 case kVulkanObjectTypeBuffer:
437 return state->GetBufferState(typed_handle.Cast<VkBuffer>());
438 case kVulkanObjectTypeAccelerationStructureNV:
439 return state->GetAccelerationStructureState(typed_handle.Cast<VkAccelerationStructureNV>());
440 default:
441 break;
442 }
443 return nullptr;
444}
445
446const BINDABLE *ValidationStateTracker::GetObjectMemBinding(const VulkanTypedHandle &typed_handle) const {
447 return GetObjectMemBindingImpl<const ValidationStateTracker *, const BINDABLE *>(this, typed_handle);
448}
449
450BINDABLE *ValidationStateTracker::GetObjectMemBinding(const VulkanTypedHandle &typed_handle) {
451 return GetObjectMemBindingImpl<ValidationStateTracker *, BINDABLE *>(this, typed_handle);
452}
453
454void ValidationStateTracker::AddMemObjInfo(void *object, const VkDeviceMemory mem, const VkMemoryAllocateInfo *pAllocateInfo) {
455 assert(object != NULL);
456
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500457 memObjMap[mem] = std::make_shared<DEVICE_MEMORY_STATE>(object, mem, pAllocateInfo);
458 auto mem_info = memObjMap[mem].get();
locke-lunargd556cc32019-09-17 01:21:23 -0600459
460 auto dedicated = lvl_find_in_chain<VkMemoryDedicatedAllocateInfoKHR>(pAllocateInfo->pNext);
461 if (dedicated) {
462 mem_info->is_dedicated = true;
463 mem_info->dedicated_buffer = dedicated->buffer;
464 mem_info->dedicated_image = dedicated->image;
465 }
466 auto export_info = lvl_find_in_chain<VkExportMemoryAllocateInfo>(pAllocateInfo->pNext);
467 if (export_info) {
468 mem_info->is_export = true;
469 mem_info->export_handle_type_flags = export_info->handleTypes;
470 }
471}
472
473// Create binding link between given sampler and command buffer node
474void ValidationStateTracker::AddCommandBufferBindingSampler(CMD_BUFFER_STATE *cb_node, SAMPLER_STATE *sampler_state) {
475 if (disabled.command_buffer_state) {
476 return;
477 }
Jeff Bolzadbfa852019-10-04 13:53:30 -0500478 AddCommandBufferBinding(sampler_state->cb_bindings,
479 VulkanTypedHandle(sampler_state->sampler, kVulkanObjectTypeSampler, sampler_state), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -0600480}
481
482// Create binding link between given image node and command buffer node
483void ValidationStateTracker::AddCommandBufferBindingImage(CMD_BUFFER_STATE *cb_node, IMAGE_STATE *image_state) {
484 if (disabled.command_buffer_state) {
485 return;
486 }
487 // Skip validation if this image was created through WSI
488 if (image_state->create_from_swapchain == VK_NULL_HANDLE) {
489 // First update cb binding for image
Jeff Bolzadbfa852019-10-04 13:53:30 -0500490 if (AddCommandBufferBinding(image_state->cb_bindings,
491 VulkanTypedHandle(image_state->image, kVulkanObjectTypeImage, image_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -0600492 // Now update CB binding in MemObj mini CB list
493 for (auto mem_binding : image_state->GetBoundMemory()) {
494 DEVICE_MEMORY_STATE *pMemInfo = GetDevMemState(mem_binding);
495 if (pMemInfo) {
496 // Now update CBInfo's Mem reference list
Jeff Bolzadbfa852019-10-04 13:53:30 -0500497 AddCommandBufferBinding(pMemInfo->cb_bindings,
498 VulkanTypedHandle(mem_binding, kVulkanObjectTypeDeviceMemory, pMemInfo), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -0600499 }
500 }
501 }
502 }
503}
504
505// Create binding link between given image view node and its image with command buffer node
506void ValidationStateTracker::AddCommandBufferBindingImageView(CMD_BUFFER_STATE *cb_node, IMAGE_VIEW_STATE *view_state) {
507 if (disabled.command_buffer_state) {
508 return;
509 }
510 // First add bindings for imageView
Jeff Bolzadbfa852019-10-04 13:53:30 -0500511 if (AddCommandBufferBinding(view_state->cb_bindings,
512 VulkanTypedHandle(view_state->image_view, kVulkanObjectTypeImageView, view_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -0600513 // Only need to continue if this is a new item
Jeff Bolzadbfa852019-10-04 13:53:30 -0500514 auto image_state = view_state->image_state.get();
locke-lunargd556cc32019-09-17 01:21:23 -0600515 // Add bindings for image within imageView
516 if (image_state) {
517 AddCommandBufferBindingImage(cb_node, image_state);
518 }
519 }
520}
521
522// Create binding link between given buffer node and command buffer node
523void ValidationStateTracker::AddCommandBufferBindingBuffer(CMD_BUFFER_STATE *cb_node, BUFFER_STATE *buffer_state) {
524 if (disabled.command_buffer_state) {
525 return;
526 }
527 // First update cb binding for buffer
Jeff Bolzadbfa852019-10-04 13:53:30 -0500528 if (AddCommandBufferBinding(buffer_state->cb_bindings,
529 VulkanTypedHandle(buffer_state->buffer, kVulkanObjectTypeBuffer, buffer_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -0600530 // Now update CB binding in MemObj mini CB list
531 for (auto mem_binding : buffer_state->GetBoundMemory()) {
532 DEVICE_MEMORY_STATE *pMemInfo = GetDevMemState(mem_binding);
533 if (pMemInfo) {
534 // Now update CBInfo's Mem reference list
Jeff Bolzadbfa852019-10-04 13:53:30 -0500535 AddCommandBufferBinding(pMemInfo->cb_bindings,
536 VulkanTypedHandle(mem_binding, kVulkanObjectTypeDeviceMemory, pMemInfo), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -0600537 }
538 }
539 }
540}
541
542// Create binding link between given buffer view node and its buffer with command buffer node
543void ValidationStateTracker::AddCommandBufferBindingBufferView(CMD_BUFFER_STATE *cb_node, BUFFER_VIEW_STATE *view_state) {
544 if (disabled.command_buffer_state) {
545 return;
546 }
547 // First add bindings for bufferView
Jeff Bolzadbfa852019-10-04 13:53:30 -0500548 if (AddCommandBufferBinding(view_state->cb_bindings,
549 VulkanTypedHandle(view_state->buffer_view, kVulkanObjectTypeBufferView, view_state), cb_node)) {
550 auto buffer_state = view_state->buffer_state.get();
locke-lunargd556cc32019-09-17 01:21:23 -0600551 // Add bindings for buffer within bufferView
552 if (buffer_state) {
553 AddCommandBufferBindingBuffer(cb_node, buffer_state);
554 }
555 }
556}
557
558// Create binding link between given acceleration structure and command buffer node
559void ValidationStateTracker::AddCommandBufferBindingAccelerationStructure(CMD_BUFFER_STATE *cb_node,
560 ACCELERATION_STRUCTURE_STATE *as_state) {
561 if (disabled.command_buffer_state) {
562 return;
563 }
Jeff Bolzadbfa852019-10-04 13:53:30 -0500564 if (AddCommandBufferBinding(
565 as_state->cb_bindings,
566 VulkanTypedHandle(as_state->acceleration_structure, kVulkanObjectTypeAccelerationStructureNV, as_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -0600567 // Now update CB binding in MemObj mini CB list
568 for (auto mem_binding : as_state->GetBoundMemory()) {
569 DEVICE_MEMORY_STATE *pMemInfo = GetDevMemState(mem_binding);
570 if (pMemInfo) {
571 // Now update CBInfo's Mem reference list
Jeff Bolzadbfa852019-10-04 13:53:30 -0500572 AddCommandBufferBinding(pMemInfo->cb_bindings,
573 VulkanTypedHandle(mem_binding, kVulkanObjectTypeDeviceMemory, pMemInfo), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -0600574 }
575 }
576 }
577}
578
locke-lunargd556cc32019-09-17 01:21:23 -0600579// Clear a single object binding from given memory object
580void ValidationStateTracker::ClearMemoryObjectBinding(const VulkanTypedHandle &typed_handle, VkDeviceMemory mem) {
581 DEVICE_MEMORY_STATE *mem_info = GetDevMemState(mem);
582 // This obj is bound to a memory object. Remove the reference to this object in that memory object's list
583 if (mem_info) {
584 mem_info->obj_bindings.erase(typed_handle);
585 }
586}
587
588// ClearMemoryObjectBindings clears the binding of objects to memory
589// For the given object it pulls the memory bindings and makes sure that the bindings
590// no longer refer to the object being cleared. This occurs when objects are destroyed.
591void ValidationStateTracker::ClearMemoryObjectBindings(const VulkanTypedHandle &typed_handle) {
592 BINDABLE *mem_binding = GetObjectMemBinding(typed_handle);
593 if (mem_binding) {
594 if (!mem_binding->sparse) {
595 ClearMemoryObjectBinding(typed_handle, mem_binding->binding.mem);
596 } else { // Sparse, clear all bindings
597 for (auto &sparse_mem_binding : mem_binding->sparse_bindings) {
598 ClearMemoryObjectBinding(typed_handle, sparse_mem_binding.mem);
599 }
600 }
601 }
602}
603
604// SetMemBinding is used to establish immutable, non-sparse binding between a single image/buffer object and memory object.
605// Corresponding valid usage checks are in ValidateSetMemBinding().
606void ValidationStateTracker::SetMemBinding(VkDeviceMemory mem, BINDABLE *mem_binding, VkDeviceSize memory_offset,
607 const VulkanTypedHandle &typed_handle) {
608 assert(mem_binding);
609 mem_binding->binding.mem = mem;
610 mem_binding->UpdateBoundMemorySet(); // force recreation of cached set
611 mem_binding->binding.offset = memory_offset;
612 mem_binding->binding.size = mem_binding->requirements.size;
613
614 if (mem != VK_NULL_HANDLE) {
615 DEVICE_MEMORY_STATE *mem_info = GetDevMemState(mem);
616 if (mem_info) {
617 mem_info->obj_bindings.insert(typed_handle);
618 // For image objects, make sure default memory state is correctly set
619 // TODO : What's the best/correct way to handle this?
620 if (kVulkanObjectTypeImage == typed_handle.type) {
621 auto const image_state = reinterpret_cast<const IMAGE_STATE *>(mem_binding);
622 if (image_state) {
623 VkImageCreateInfo ici = image_state->createInfo;
624 if (ici.usage & (VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT)) {
625 // TODO:: More memory state transition stuff.
626 }
627 }
628 }
629 }
630 }
631}
632
633// For NULL mem case, clear any previous binding Else...
634// Make sure given object is in its object map
635// IF a previous binding existed, update binding
636// Add reference from objectInfo to memoryInfo
637// Add reference off of object's binding info
638// Return VK_TRUE if addition is successful, VK_FALSE otherwise
639bool ValidationStateTracker::SetSparseMemBinding(MEM_BINDING binding, const VulkanTypedHandle &typed_handle) {
640 bool skip = VK_FALSE;
641 // Handle NULL case separately, just clear previous binding & decrement reference
642 if (binding.mem == VK_NULL_HANDLE) {
643 // TODO : This should cause the range of the resource to be unbound according to spec
644 } else {
645 BINDABLE *mem_binding = GetObjectMemBinding(typed_handle);
646 assert(mem_binding);
647 if (mem_binding) { // Invalid handles are reported by object tracker, but Get returns NULL for them, so avoid SEGV here
648 assert(mem_binding->sparse);
649 DEVICE_MEMORY_STATE *mem_info = GetDevMemState(binding.mem);
650 if (mem_info) {
651 mem_info->obj_bindings.insert(typed_handle);
652 // Need to set mem binding for this object
653 mem_binding->sparse_bindings.insert(binding);
654 mem_binding->UpdateBoundMemorySet();
655 }
656 }
657 }
658 return skip;
659}
660
locke-lunargd556cc32019-09-17 01:21:23 -0600661void ValidationStateTracker::UpdateDrawState(CMD_BUFFER_STATE *cb_state, const VkPipelineBindPoint bind_point) {
662 auto &state = cb_state->lastBound[bind_point];
663 PIPELINE_STATE *pPipe = state.pipeline_state;
664 if (VK_NULL_HANDLE != state.pipeline_layout) {
665 for (const auto &set_binding_pair : pPipe->active_slots) {
666 uint32_t setIndex = set_binding_pair.first;
667 // Pull the set node
668 cvdescriptorset::DescriptorSet *descriptor_set = state.per_set[setIndex].bound_descriptor_set;
669 if (!descriptor_set->IsPushDescriptor()) {
670 // For the "bindless" style resource usage with many descriptors, need to optimize command <-> descriptor binding
671
672 // TODO: If recreating the reduced_map here shows up in profilinging, need to find a way of sharing with the
673 // Validate pass. Though in the case of "many" descriptors, typically the descriptor count >> binding count
674 cvdescriptorset::PrefilterBindRequestMap reduced_map(*descriptor_set, set_binding_pair.second);
675 const auto &binding_req_map = reduced_map.FilteredMap(*cb_state, *pPipe);
676
677 if (reduced_map.IsManyDescriptors()) {
678 // Only update validate binding tags if we meet the "many" criteria in the Prefilter class
679 descriptor_set->UpdateValidationCache(*cb_state, *pPipe, binding_req_map);
680 }
681
682 // We can skip updating the state if "nothing" has changed since the last validation.
683 // See CoreChecks::ValidateCmdBufDrawState for more details.
Jeff Bolz56308942019-10-06 22:05:23 -0500684 bool descriptor_set_changed =
locke-lunargd556cc32019-09-17 01:21:23 -0600685 !reduced_map.IsManyDescriptors() ||
686 // Update if descriptor set (or contents) has changed
687 state.per_set[setIndex].validated_set != descriptor_set ||
688 state.per_set[setIndex].validated_set_change_count != descriptor_set->GetChangeCount() ||
689 (!disabled.image_layout_validation &&
Jeff Bolz56308942019-10-06 22:05:23 -0500690 state.per_set[setIndex].validated_set_image_layout_change_count != cb_state->image_layout_change_count);
691 bool need_update = descriptor_set_changed ||
692 // Update if previous bindingReqMap doesn't include new bindingReqMap
693 !std::includes(state.per_set[setIndex].validated_set_binding_req_map.begin(),
694 state.per_set[setIndex].validated_set_binding_req_map.end(),
695 binding_req_map.begin(), binding_req_map.end());
locke-lunargd556cc32019-09-17 01:21:23 -0600696
697 if (need_update) {
698 // Bind this set and its active descriptor resources to the command buffer
Jeff Bolz56308942019-10-06 22:05:23 -0500699 if (!descriptor_set_changed && reduced_map.IsManyDescriptors()) {
700 // Only record the bindings that haven't already been recorded
701 BindingReqMap delta_reqs;
702 std::set_difference(binding_req_map.begin(), binding_req_map.end(),
703 state.per_set[setIndex].validated_set_binding_req_map.begin(),
704 state.per_set[setIndex].validated_set_binding_req_map.end(),
705 std::inserter(delta_reqs, delta_reqs.begin()));
locke-gb3ce08f2019-09-30 12:30:56 -0600706 descriptor_set->UpdateDrawState(this, cb_state, pPipe, delta_reqs);
Jeff Bolz56308942019-10-06 22:05:23 -0500707 } else {
locke-gb3ce08f2019-09-30 12:30:56 -0600708 descriptor_set->UpdateDrawState(this, cb_state, pPipe, binding_req_map);
Jeff Bolz56308942019-10-06 22:05:23 -0500709 }
locke-lunargd556cc32019-09-17 01:21:23 -0600710
711 state.per_set[setIndex].validated_set = descriptor_set;
712 state.per_set[setIndex].validated_set_change_count = descriptor_set->GetChangeCount();
713 state.per_set[setIndex].validated_set_image_layout_change_count = cb_state->image_layout_change_count;
714 if (reduced_map.IsManyDescriptors()) {
715 // Check whether old == new before assigning, the equality check is much cheaper than
716 // freeing and reallocating the map.
717 if (state.per_set[setIndex].validated_set_binding_req_map != set_binding_pair.second) {
718 state.per_set[setIndex].validated_set_binding_req_map = set_binding_pair.second;
719 }
720 } else {
721 state.per_set[setIndex].validated_set_binding_req_map = BindingReqMap();
722 }
723 }
724 }
725 }
726 }
727 if (!pPipe->vertex_binding_descriptions_.empty()) {
728 cb_state->vertex_buffer_used = true;
729 }
730}
731
732// Remove set from setMap and delete the set
733void ValidationStateTracker::FreeDescriptorSet(cvdescriptorset::DescriptorSet *descriptor_set) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500734 descriptor_set->destroyed = true;
Jeff Bolzadbfa852019-10-04 13:53:30 -0500735 const VulkanTypedHandle obj_struct(descriptor_set->GetSet(), kVulkanObjectTypeDescriptorSet);
Jeff Bolz41a1ced2019-10-11 11:40:49 -0500736 // Any bound cmd buffers are now invalid
Jeff Bolzadbfa852019-10-04 13:53:30 -0500737 InvalidateCommandBuffers(descriptor_set->cb_bindings, obj_struct);
Jeff Bolz41a1ced2019-10-11 11:40:49 -0500738
locke-lunargd556cc32019-09-17 01:21:23 -0600739 setMap.erase(descriptor_set->GetSet());
740}
741
742// Free all DS Pools including their Sets & related sub-structs
743// NOTE : Calls to this function should be wrapped in mutex
744void ValidationStateTracker::DeleteDescriptorSetPools() {
745 for (auto ii = descriptorPoolMap.begin(); ii != descriptorPoolMap.end();) {
746 // Remove this pools' sets from setMap and delete them
747 for (auto ds : ii->second->sets) {
748 FreeDescriptorSet(ds);
749 }
750 ii->second->sets.clear();
751 ii = descriptorPoolMap.erase(ii);
752 }
753}
754
755// For given object struct return a ptr of BASE_NODE type for its wrapping struct
756BASE_NODE *ValidationStateTracker::GetStateStructPtrFromObject(const VulkanTypedHandle &object_struct) {
Jeff Bolzadbfa852019-10-04 13:53:30 -0500757 if (object_struct.node) {
758#ifdef _DEBUG
759 // assert that lookup would find the same object
760 VulkanTypedHandle other = object_struct;
761 other.node = nullptr;
762 assert(object_struct.node == GetStateStructPtrFromObject(other));
763#endif
764 return object_struct.node;
765 }
locke-lunargd556cc32019-09-17 01:21:23 -0600766 BASE_NODE *base_ptr = nullptr;
767 switch (object_struct.type) {
768 case kVulkanObjectTypeDescriptorSet: {
769 base_ptr = GetSetNode(object_struct.Cast<VkDescriptorSet>());
770 break;
771 }
772 case kVulkanObjectTypeSampler: {
773 base_ptr = GetSamplerState(object_struct.Cast<VkSampler>());
774 break;
775 }
776 case kVulkanObjectTypeQueryPool: {
777 base_ptr = GetQueryPoolState(object_struct.Cast<VkQueryPool>());
778 break;
779 }
780 case kVulkanObjectTypePipeline: {
781 base_ptr = GetPipelineState(object_struct.Cast<VkPipeline>());
782 break;
783 }
784 case kVulkanObjectTypeBuffer: {
785 base_ptr = GetBufferState(object_struct.Cast<VkBuffer>());
786 break;
787 }
788 case kVulkanObjectTypeBufferView: {
789 base_ptr = GetBufferViewState(object_struct.Cast<VkBufferView>());
790 break;
791 }
792 case kVulkanObjectTypeImage: {
793 base_ptr = GetImageState(object_struct.Cast<VkImage>());
794 break;
795 }
796 case kVulkanObjectTypeImageView: {
797 base_ptr = GetImageViewState(object_struct.Cast<VkImageView>());
798 break;
799 }
800 case kVulkanObjectTypeEvent: {
801 base_ptr = GetEventState(object_struct.Cast<VkEvent>());
802 break;
803 }
804 case kVulkanObjectTypeDescriptorPool: {
805 base_ptr = GetDescriptorPoolState(object_struct.Cast<VkDescriptorPool>());
806 break;
807 }
808 case kVulkanObjectTypeCommandPool: {
809 base_ptr = GetCommandPoolState(object_struct.Cast<VkCommandPool>());
810 break;
811 }
812 case kVulkanObjectTypeFramebuffer: {
813 base_ptr = GetFramebufferState(object_struct.Cast<VkFramebuffer>());
814 break;
815 }
816 case kVulkanObjectTypeRenderPass: {
817 base_ptr = GetRenderPassState(object_struct.Cast<VkRenderPass>());
818 break;
819 }
820 case kVulkanObjectTypeDeviceMemory: {
821 base_ptr = GetDevMemState(object_struct.Cast<VkDeviceMemory>());
822 break;
823 }
824 case kVulkanObjectTypeAccelerationStructureNV: {
825 base_ptr = GetAccelerationStructureState(object_struct.Cast<VkAccelerationStructureNV>());
826 break;
827 }
Jeff Bolzadbfa852019-10-04 13:53:30 -0500828 case kVulkanObjectTypeUnknown:
829 // This can happen if an element of the object_bindings vector has been
830 // zeroed out, after an object is destroyed.
831 break;
locke-lunargd556cc32019-09-17 01:21:23 -0600832 default:
833 // TODO : Any other objects to be handled here?
834 assert(0);
835 break;
836 }
837 return base_ptr;
838}
839
840// Tie the VulkanTypedHandle to the cmd buffer which includes:
841// Add object_binding to cmd buffer
842// Add cb_binding to object
Jeff Bolzadbfa852019-10-04 13:53:30 -0500843bool ValidationStateTracker::AddCommandBufferBinding(small_unordered_map<CMD_BUFFER_STATE *, int, 8> &cb_bindings,
locke-lunargd556cc32019-09-17 01:21:23 -0600844 const VulkanTypedHandle &obj, CMD_BUFFER_STATE *cb_node) {
845 if (disabled.command_buffer_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -0500846 return false;
locke-lunargd556cc32019-09-17 01:21:23 -0600847 }
Jeff Bolzadbfa852019-10-04 13:53:30 -0500848 // Insert the cb_binding with a default 'index' of -1. Then push the obj into the object_bindings
849 // vector, and update cb_bindings[cb_node] with the index of that element of the vector.
850 auto inserted = cb_bindings.insert({cb_node, -1});
851 if (inserted.second) {
852 cb_node->object_bindings.push_back(obj);
853 inserted.first->second = (int)cb_node->object_bindings.size() - 1;
854 return true;
855 }
856 return false;
locke-lunargd556cc32019-09-17 01:21:23 -0600857}
858
859// For a given object, if cb_node is in that objects cb_bindings, remove cb_node
860void ValidationStateTracker::RemoveCommandBufferBinding(VulkanTypedHandle const &object, CMD_BUFFER_STATE *cb_node) {
861 BASE_NODE *base_obj = GetStateStructPtrFromObject(object);
862 if (base_obj) base_obj->cb_bindings.erase(cb_node);
863}
864
865// Reset the command buffer state
866// Maintain the createInfo and set state to CB_NEW, but clear all other state
867void ValidationStateTracker::ResetCommandBufferState(const VkCommandBuffer cb) {
868 CMD_BUFFER_STATE *pCB = GetCBState(cb);
869 if (pCB) {
870 pCB->in_use.store(0);
871 // Reset CB state (note that createInfo is not cleared)
872 pCB->commandBuffer = cb;
873 memset(&pCB->beginInfo, 0, sizeof(VkCommandBufferBeginInfo));
874 memset(&pCB->inheritanceInfo, 0, sizeof(VkCommandBufferInheritanceInfo));
875 pCB->hasDrawCmd = false;
876 pCB->hasTraceRaysCmd = false;
877 pCB->hasBuildAccelerationStructureCmd = false;
878 pCB->hasDispatchCmd = false;
879 pCB->state = CB_NEW;
Lionel Landwerlinc7420912019-05-23 00:33:42 +0100880 pCB->commandCount = 0;
locke-lunargd556cc32019-09-17 01:21:23 -0600881 pCB->submitCount = 0;
882 pCB->image_layout_change_count = 1; // Start at 1. 0 is insert value for validation cache versions, s.t. new == dirty
883 pCB->status = 0;
884 pCB->static_status = 0;
885 pCB->viewportMask = 0;
886 pCB->scissorMask = 0;
887
888 for (auto &item : pCB->lastBound) {
889 item.second.reset();
890 }
891
892 memset(&pCB->activeRenderPassBeginInfo, 0, sizeof(pCB->activeRenderPassBeginInfo));
893 pCB->activeRenderPass = nullptr;
894 pCB->activeSubpassContents = VK_SUBPASS_CONTENTS_INLINE;
895 pCB->activeSubpass = 0;
896 pCB->broken_bindings.clear();
897 pCB->waitedEvents.clear();
898 pCB->events.clear();
899 pCB->writeEventsBeforeWait.clear();
locke-lunargd556cc32019-09-17 01:21:23 -0600900 pCB->activeQueries.clear();
901 pCB->startedQueries.clear();
902 pCB->image_layout_map.clear();
locke-lunargd556cc32019-09-17 01:21:23 -0600903 pCB->current_vertex_buffer_binding_info.vertex_buffer_bindings.clear();
904 pCB->vertex_buffer_used = false;
905 pCB->primaryCommandBuffer = VK_NULL_HANDLE;
906 // If secondary, invalidate any primary command buffer that may call us.
907 if (pCB->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
Jeff Bolzadbfa852019-10-04 13:53:30 -0500908 InvalidateLinkedCommandBuffers(pCB->linkedCommandBuffers, VulkanTypedHandle(cb, kVulkanObjectTypeCommandBuffer));
locke-lunargd556cc32019-09-17 01:21:23 -0600909 }
910
911 // Remove reverse command buffer links.
912 for (auto pSubCB : pCB->linkedCommandBuffers) {
913 pSubCB->linkedCommandBuffers.erase(pCB);
914 }
915 pCB->linkedCommandBuffers.clear();
locke-lunargd556cc32019-09-17 01:21:23 -0600916 pCB->queue_submit_functions.clear();
917 pCB->cmd_execute_commands_functions.clear();
918 pCB->eventUpdates.clear();
919 pCB->queryUpdates.clear();
920
921 // Remove object bindings
922 for (const auto &obj : pCB->object_bindings) {
923 RemoveCommandBufferBinding(obj, pCB);
924 }
925 pCB->object_bindings.clear();
926 // Remove this cmdBuffer's reference from each FrameBuffer's CB ref list
927 for (auto framebuffer : pCB->framebuffers) {
928 auto fb_state = GetFramebufferState(framebuffer);
929 if (fb_state) fb_state->cb_bindings.erase(pCB);
930 }
931 pCB->framebuffers.clear();
932 pCB->activeFramebuffer = VK_NULL_HANDLE;
933 memset(&pCB->index_buffer_binding, 0, sizeof(pCB->index_buffer_binding));
934
935 pCB->qfo_transfer_image_barriers.Reset();
936 pCB->qfo_transfer_buffer_barriers.Reset();
937
938 // Clean up the label data
939 ResetCmdDebugUtilsLabel(report_data, pCB->commandBuffer);
940 pCB->debug_label.Reset();
locke-gb3ce08f2019-09-30 12:30:56 -0600941 pCB->validate_descriptorsets_in_queuesubmit.clear();
locke-lunargd556cc32019-09-17 01:21:23 -0600942 }
943 if (command_buffer_reset_callback) {
944 (*command_buffer_reset_callback)(cb);
945 }
946}
947
948void ValidationStateTracker::PostCallRecordCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo,
949 const VkAllocationCallbacks *pAllocator, VkDevice *pDevice,
950 VkResult result) {
951 if (VK_SUCCESS != result) return;
952
953 const VkPhysicalDeviceFeatures *enabled_features_found = pCreateInfo->pEnabledFeatures;
954 if (nullptr == enabled_features_found) {
955 const auto *features2 = lvl_find_in_chain<VkPhysicalDeviceFeatures2KHR>(pCreateInfo->pNext);
956 if (features2) {
957 enabled_features_found = &(features2->features);
958 }
959 }
960
961 ValidationObject *device_object = GetLayerDataPtr(get_dispatch_key(*pDevice), layer_data_map);
962 ValidationObject *validation_data = GetValidationObject(device_object->object_dispatch, this->container_type);
963 ValidationStateTracker *state_tracker = static_cast<ValidationStateTracker *>(validation_data);
964
965 if (nullptr == enabled_features_found) {
966 state_tracker->enabled_features.core = {};
967 } else {
968 state_tracker->enabled_features.core = *enabled_features_found;
969 }
970
971 // Make sure that queue_family_properties are obtained for this device's physical_device, even if the app has not
972 // previously set them through an explicit API call.
973 uint32_t count;
974 auto pd_state = GetPhysicalDeviceState(gpu);
975 DispatchGetPhysicalDeviceQueueFamilyProperties(gpu, &count, nullptr);
976 pd_state->queue_family_properties.resize(std::max(static_cast<uint32_t>(pd_state->queue_family_properties.size()), count));
977 DispatchGetPhysicalDeviceQueueFamilyProperties(gpu, &count, &pd_state->queue_family_properties[0]);
978 // Save local link to this device's physical device state
979 state_tracker->physical_device_state = pd_state;
980
981 const auto *device_group_ci = lvl_find_in_chain<VkDeviceGroupDeviceCreateInfo>(pCreateInfo->pNext);
982 state_tracker->physical_device_count =
983 device_group_ci && device_group_ci->physicalDeviceCount > 0 ? device_group_ci->physicalDeviceCount : 1;
984
985 const auto *descriptor_indexing_features = lvl_find_in_chain<VkPhysicalDeviceDescriptorIndexingFeaturesEXT>(pCreateInfo->pNext);
986 if (descriptor_indexing_features) {
987 state_tracker->enabled_features.descriptor_indexing = *descriptor_indexing_features;
988 }
989
990 const auto *eight_bit_storage_features = lvl_find_in_chain<VkPhysicalDevice8BitStorageFeaturesKHR>(pCreateInfo->pNext);
991 if (eight_bit_storage_features) {
992 state_tracker->enabled_features.eight_bit_storage = *eight_bit_storage_features;
993 }
994
995 const auto *exclusive_scissor_features = lvl_find_in_chain<VkPhysicalDeviceExclusiveScissorFeaturesNV>(pCreateInfo->pNext);
996 if (exclusive_scissor_features) {
997 state_tracker->enabled_features.exclusive_scissor = *exclusive_scissor_features;
998 }
999
1000 const auto *shading_rate_image_features = lvl_find_in_chain<VkPhysicalDeviceShadingRateImageFeaturesNV>(pCreateInfo->pNext);
1001 if (shading_rate_image_features) {
1002 state_tracker->enabled_features.shading_rate_image = *shading_rate_image_features;
1003 }
1004
1005 const auto *mesh_shader_features = lvl_find_in_chain<VkPhysicalDeviceMeshShaderFeaturesNV>(pCreateInfo->pNext);
1006 if (mesh_shader_features) {
1007 state_tracker->enabled_features.mesh_shader = *mesh_shader_features;
1008 }
1009
1010 const auto *inline_uniform_block_features =
1011 lvl_find_in_chain<VkPhysicalDeviceInlineUniformBlockFeaturesEXT>(pCreateInfo->pNext);
1012 if (inline_uniform_block_features) {
1013 state_tracker->enabled_features.inline_uniform_block = *inline_uniform_block_features;
1014 }
1015
1016 const auto *transform_feedback_features = lvl_find_in_chain<VkPhysicalDeviceTransformFeedbackFeaturesEXT>(pCreateInfo->pNext);
1017 if (transform_feedback_features) {
1018 state_tracker->enabled_features.transform_feedback_features = *transform_feedback_features;
1019 }
1020
1021 const auto *float16_int8_features = lvl_find_in_chain<VkPhysicalDeviceFloat16Int8FeaturesKHR>(pCreateInfo->pNext);
1022 if (float16_int8_features) {
1023 state_tracker->enabled_features.float16_int8 = *float16_int8_features;
1024 }
1025
1026 const auto *vtx_attrib_div_features = lvl_find_in_chain<VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT>(pCreateInfo->pNext);
1027 if (vtx_attrib_div_features) {
1028 state_tracker->enabled_features.vtx_attrib_divisor_features = *vtx_attrib_div_features;
1029 }
1030
1031 const auto *uniform_buffer_standard_layout_features =
1032 lvl_find_in_chain<VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR>(pCreateInfo->pNext);
1033 if (uniform_buffer_standard_layout_features) {
1034 state_tracker->enabled_features.uniform_buffer_standard_layout = *uniform_buffer_standard_layout_features;
1035 }
1036
1037 const auto *scalar_block_layout_features = lvl_find_in_chain<VkPhysicalDeviceScalarBlockLayoutFeaturesEXT>(pCreateInfo->pNext);
1038 if (scalar_block_layout_features) {
1039 state_tracker->enabled_features.scalar_block_layout_features = *scalar_block_layout_features;
1040 }
1041
1042 const auto *buffer_address = lvl_find_in_chain<VkPhysicalDeviceBufferAddressFeaturesEXT>(pCreateInfo->pNext);
1043 if (buffer_address) {
1044 state_tracker->enabled_features.buffer_address = *buffer_address;
1045 }
1046
1047 const auto *cooperative_matrix_features = lvl_find_in_chain<VkPhysicalDeviceCooperativeMatrixFeaturesNV>(pCreateInfo->pNext);
1048 if (cooperative_matrix_features) {
1049 state_tracker->enabled_features.cooperative_matrix_features = *cooperative_matrix_features;
1050 }
1051
locke-lunargd556cc32019-09-17 01:21:23 -06001052 const auto *host_query_reset_features = lvl_find_in_chain<VkPhysicalDeviceHostQueryResetFeaturesEXT>(pCreateInfo->pNext);
1053 if (host_query_reset_features) {
1054 state_tracker->enabled_features.host_query_reset_features = *host_query_reset_features;
1055 }
1056
1057 const auto *compute_shader_derivatives_features =
1058 lvl_find_in_chain<VkPhysicalDeviceComputeShaderDerivativesFeaturesNV>(pCreateInfo->pNext);
1059 if (compute_shader_derivatives_features) {
1060 state_tracker->enabled_features.compute_shader_derivatives_features = *compute_shader_derivatives_features;
1061 }
1062
1063 const auto *fragment_shader_barycentric_features =
1064 lvl_find_in_chain<VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV>(pCreateInfo->pNext);
1065 if (fragment_shader_barycentric_features) {
1066 state_tracker->enabled_features.fragment_shader_barycentric_features = *fragment_shader_barycentric_features;
1067 }
1068
1069 const auto *shader_image_footprint_features =
1070 lvl_find_in_chain<VkPhysicalDeviceShaderImageFootprintFeaturesNV>(pCreateInfo->pNext);
1071 if (shader_image_footprint_features) {
1072 state_tracker->enabled_features.shader_image_footprint_features = *shader_image_footprint_features;
1073 }
1074
1075 const auto *fragment_shader_interlock_features =
1076 lvl_find_in_chain<VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT>(pCreateInfo->pNext);
1077 if (fragment_shader_interlock_features) {
1078 state_tracker->enabled_features.fragment_shader_interlock_features = *fragment_shader_interlock_features;
1079 }
1080
1081 const auto *demote_to_helper_invocation_features =
1082 lvl_find_in_chain<VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT>(pCreateInfo->pNext);
1083 if (demote_to_helper_invocation_features) {
1084 state_tracker->enabled_features.demote_to_helper_invocation_features = *demote_to_helper_invocation_features;
1085 }
1086
1087 const auto *texel_buffer_alignment_features =
1088 lvl_find_in_chain<VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT>(pCreateInfo->pNext);
1089 if (texel_buffer_alignment_features) {
1090 state_tracker->enabled_features.texel_buffer_alignment_features = *texel_buffer_alignment_features;
1091 }
1092
1093 const auto *imageless_framebuffer_features =
1094 lvl_find_in_chain<VkPhysicalDeviceImagelessFramebufferFeaturesKHR>(pCreateInfo->pNext);
1095 if (imageless_framebuffer_features) {
1096 state_tracker->enabled_features.imageless_framebuffer_features = *imageless_framebuffer_features;
1097 }
1098
1099 const auto *pipeline_exe_props_features =
1100 lvl_find_in_chain<VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR>(pCreateInfo->pNext);
1101 if (pipeline_exe_props_features) {
1102 state_tracker->enabled_features.pipeline_exe_props_features = *pipeline_exe_props_features;
1103 }
1104
Jeff Bolz82f854d2019-09-17 14:56:47 -05001105 const auto *dedicated_allocation_image_aliasing_features =
1106 lvl_find_in_chain<VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV>(pCreateInfo->pNext);
1107 if (dedicated_allocation_image_aliasing_features) {
1108 state_tracker->enabled_features.dedicated_allocation_image_aliasing_features =
1109 *dedicated_allocation_image_aliasing_features;
1110 }
1111
Jeff Bolz526f2d52019-09-18 13:18:08 -05001112 const auto *subgroup_extended_types_features =
1113 lvl_find_in_chain<VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR>(pCreateInfo->pNext);
1114 if (subgroup_extended_types_features) {
1115 state_tracker->enabled_features.subgroup_extended_types_features = *subgroup_extended_types_features;
1116 }
1117
Piers Daniell9af77cd2019-10-16 13:54:12 -06001118 const auto *separate_depth_stencil_layouts_features =
1119 lvl_find_in_chain<VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR>(pCreateInfo->pNext);
1120 if (separate_depth_stencil_layouts_features) {
1121 state_tracker->enabled_features.separate_depth_stencil_layouts_features = *separate_depth_stencil_layouts_features;
1122 }
1123
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001124 const auto *performance_query_features = lvl_find_in_chain<VkPhysicalDevicePerformanceQueryFeaturesKHR>(pCreateInfo->pNext);
1125 if (performance_query_features) {
1126 state_tracker->enabled_features.performance_query_features = *performance_query_features;
1127 }
1128
locke-lunargd556cc32019-09-17 01:21:23 -06001129 // Store physical device properties and physical device mem limits into CoreChecks structs
1130 DispatchGetPhysicalDeviceMemoryProperties(gpu, &state_tracker->phys_dev_mem_props);
1131 DispatchGetPhysicalDeviceProperties(gpu, &state_tracker->phys_dev_props);
1132
1133 const auto &dev_ext = state_tracker->device_extensions;
1134 auto *phys_dev_props = &state_tracker->phys_dev_ext_props;
1135
1136 if (dev_ext.vk_khr_push_descriptor) {
1137 // Get the needed push_descriptor limits
1138 VkPhysicalDevicePushDescriptorPropertiesKHR push_descriptor_prop;
1139 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_push_descriptor, &push_descriptor_prop);
1140 phys_dev_props->max_push_descriptors = push_descriptor_prop.maxPushDescriptors;
1141 }
1142
1143 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_descriptor_indexing, &phys_dev_props->descriptor_indexing_props);
1144 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_shading_rate_image, &phys_dev_props->shading_rate_image_props);
1145 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_mesh_shader, &phys_dev_props->mesh_shader_props);
1146 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_inline_uniform_block, &phys_dev_props->inline_uniform_block_props);
1147 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_vertex_attribute_divisor, &phys_dev_props->vtx_attrib_divisor_props);
1148 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_depth_stencil_resolve, &phys_dev_props->depth_stencil_resolve_props);
1149 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_transform_feedback, &phys_dev_props->transform_feedback_props);
1150 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_ray_tracing, &phys_dev_props->ray_tracing_props);
1151 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_texel_buffer_alignment, &phys_dev_props->texel_buffer_alignment_props);
1152 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_fragment_density_map, &phys_dev_props->fragment_density_map_props);
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001153 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_performance_query, &phys_dev_props->performance_query_props);
locke-lunargd556cc32019-09-17 01:21:23 -06001154 if (state_tracker->device_extensions.vk_nv_cooperative_matrix) {
1155 // Get the needed cooperative_matrix properties
1156 auto cooperative_matrix_props = lvl_init_struct<VkPhysicalDeviceCooperativeMatrixPropertiesNV>();
1157 auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(&cooperative_matrix_props);
1158 instance_dispatch_table.GetPhysicalDeviceProperties2KHR(gpu, &prop2);
1159 state_tracker->phys_dev_ext_props.cooperative_matrix_props = cooperative_matrix_props;
1160
1161 uint32_t numCooperativeMatrixProperties = 0;
1162 instance_dispatch_table.GetPhysicalDeviceCooperativeMatrixPropertiesNV(gpu, &numCooperativeMatrixProperties, NULL);
1163 state_tracker->cooperative_matrix_properties.resize(numCooperativeMatrixProperties,
1164 lvl_init_struct<VkCooperativeMatrixPropertiesNV>());
1165
1166 instance_dispatch_table.GetPhysicalDeviceCooperativeMatrixPropertiesNV(gpu, &numCooperativeMatrixProperties,
1167 state_tracker->cooperative_matrix_properties.data());
1168 }
1169 if (state_tracker->api_version >= VK_API_VERSION_1_1) {
1170 // Get the needed subgroup limits
1171 auto subgroup_prop = lvl_init_struct<VkPhysicalDeviceSubgroupProperties>();
1172 auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(&subgroup_prop);
1173 instance_dispatch_table.GetPhysicalDeviceProperties2(gpu, &prop2);
1174
1175 state_tracker->phys_dev_ext_props.subgroup_props = subgroup_prop;
1176 }
1177
1178 // Store queue family data
1179 if (pCreateInfo->pQueueCreateInfos != nullptr) {
1180 for (uint32_t i = 0; i < pCreateInfo->queueCreateInfoCount; ++i) {
1181 state_tracker->queue_family_index_map.insert(
1182 std::make_pair(pCreateInfo->pQueueCreateInfos[i].queueFamilyIndex, pCreateInfo->pQueueCreateInfos[i].queueCount));
1183 }
1184 }
1185}
1186
1187void ValidationStateTracker::PreCallRecordDestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) {
1188 if (!device) return;
1189
locke-lunargd556cc32019-09-17 01:21:23 -06001190 // Reset all command buffers before destroying them, to unlink object_bindings.
1191 for (auto &commandBuffer : commandBufferMap) {
1192 ResetCommandBufferState(commandBuffer.first);
1193 }
Jeff Bolzadbfa852019-10-04 13:53:30 -05001194 pipelineMap.clear();
1195 renderPassMap.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06001196 commandBufferMap.clear();
1197
1198 // This will also delete all sets in the pool & remove them from setMap
1199 DeleteDescriptorSetPools();
1200 // All sets should be removed
1201 assert(setMap.empty());
1202 descriptorSetLayoutMap.clear();
1203 imageViewMap.clear();
1204 imageMap.clear();
1205 bufferViewMap.clear();
1206 bufferMap.clear();
1207 // Queues persist until device is destroyed
1208 queueMap.clear();
1209}
1210
1211// Loop through bound objects and increment their in_use counts.
1212void ValidationStateTracker::IncrementBoundObjects(CMD_BUFFER_STATE const *cb_node) {
1213 for (auto obj : cb_node->object_bindings) {
1214 auto base_obj = GetStateStructPtrFromObject(obj);
1215 if (base_obj) {
1216 base_obj->in_use.fetch_add(1);
1217 }
1218 }
1219}
1220
1221// Track which resources are in-flight by atomically incrementing their "in_use" count
1222void ValidationStateTracker::IncrementResources(CMD_BUFFER_STATE *cb_node) {
1223 cb_node->submitCount++;
1224 cb_node->in_use.fetch_add(1);
1225
1226 // First Increment for all "generic" objects bound to cmd buffer, followed by special-case objects below
1227 IncrementBoundObjects(cb_node);
1228 // TODO : We should be able to remove the NULL look-up checks from the code below as long as
1229 // all the corresponding cases are verified to cause CB_INVALID state and the CB_INVALID state
1230 // should then be flagged prior to calling this function
1231 for (auto event : cb_node->writeEventsBeforeWait) {
1232 auto event_state = GetEventState(event);
1233 if (event_state) event_state->write_in_use++;
1234 }
1235}
1236
1237// Decrement in-use count for objects bound to command buffer
1238void ValidationStateTracker::DecrementBoundResources(CMD_BUFFER_STATE const *cb_node) {
1239 BASE_NODE *base_obj = nullptr;
1240 for (auto obj : cb_node->object_bindings) {
1241 base_obj = GetStateStructPtrFromObject(obj);
1242 if (base_obj) {
1243 base_obj->in_use.fetch_sub(1);
1244 }
1245 }
1246}
1247
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001248void ValidationStateTracker::RetireWorkOnQueue(QUEUE_STATE *pQueue, uint64_t seq) {
locke-lunargd556cc32019-09-17 01:21:23 -06001249 std::unordered_map<VkQueue, uint64_t> otherQueueSeqs;
1250
1251 // Roll this queue forward, one submission at a time.
1252 while (pQueue->seq < seq) {
1253 auto &submission = pQueue->submissions.front();
1254
1255 for (auto &wait : submission.waitSemaphores) {
1256 auto pSemaphore = GetSemaphoreState(wait.semaphore);
1257 if (pSemaphore) {
1258 pSemaphore->in_use.fetch_sub(1);
1259 }
1260 auto &lastSeq = otherQueueSeqs[wait.queue];
1261 lastSeq = std::max(lastSeq, wait.seq);
1262 }
1263
1264 for (auto &semaphore : submission.signalSemaphores) {
1265 auto pSemaphore = GetSemaphoreState(semaphore);
1266 if (pSemaphore) {
1267 pSemaphore->in_use.fetch_sub(1);
1268 }
1269 }
1270
1271 for (auto &semaphore : submission.externalSemaphores) {
1272 auto pSemaphore = GetSemaphoreState(semaphore);
1273 if (pSemaphore) {
1274 pSemaphore->in_use.fetch_sub(1);
1275 }
1276 }
1277
1278 for (auto cb : submission.cbs) {
1279 auto cb_node = GetCBState(cb);
1280 if (!cb_node) {
1281 continue;
1282 }
1283 // First perform decrement on general case bound objects
1284 DecrementBoundResources(cb_node);
1285 for (auto event : cb_node->writeEventsBeforeWait) {
1286 auto eventNode = eventMap.find(event);
1287 if (eventNode != eventMap.end()) {
1288 eventNode->second.write_in_use--;
1289 }
1290 }
Jeff Bolz310775c2019-10-09 00:46:33 -05001291 QueryMap localQueryToStateMap;
1292 for (auto &function : cb_node->queryUpdates) {
1293 function(nullptr, /*do_validate*/ false, &localQueryToStateMap);
1294 }
1295
1296 for (auto queryStatePair : localQueryToStateMap) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001297 if (queryStatePair.second == QUERYSTATE_ENDED) {
1298 queryToStateMap[queryStatePair.first] = QUERYSTATE_AVAILABLE;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001299
1300 const QUERY_POOL_STATE *qp_state = GetQueryPoolState(queryStatePair.first.pool);
1301 if (qp_state->createInfo.queryType == VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR)
1302 queryPassToStateMap[QueryObjectPass(queryStatePair.first, submission.perf_submit_pass)] =
1303 QUERYSTATE_AVAILABLE;
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001304 }
locke-lunargd556cc32019-09-17 01:21:23 -06001305 }
locke-lunargd556cc32019-09-17 01:21:23 -06001306 cb_node->in_use.fetch_sub(1);
1307 }
1308
1309 auto pFence = GetFenceState(submission.fence);
1310 if (pFence && pFence->scope == kSyncScopeInternal) {
1311 pFence->state = FENCE_RETIRED;
1312 }
1313
1314 pQueue->submissions.pop_front();
1315 pQueue->seq++;
1316 }
1317
1318 // Roll other queues forward to the highest seq we saw a wait for
1319 for (auto qs : otherQueueSeqs) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001320 RetireWorkOnQueue(GetQueueState(qs.first), qs.second);
locke-lunargd556cc32019-09-17 01:21:23 -06001321 }
1322}
1323
1324// Submit a fence to a queue, delimiting previous fences and previous untracked
1325// work by it.
1326static void SubmitFence(QUEUE_STATE *pQueue, FENCE_STATE *pFence, uint64_t submitCount) {
1327 pFence->state = FENCE_INFLIGHT;
1328 pFence->signaler.first = pQueue->queue;
1329 pFence->signaler.second = pQueue->seq + pQueue->submissions.size() + submitCount;
1330}
1331
1332void ValidationStateTracker::PostCallRecordQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits,
1333 VkFence fence, VkResult result) {
1334 uint64_t early_retire_seq = 0;
1335 auto pQueue = GetQueueState(queue);
1336 auto pFence = GetFenceState(fence);
1337
1338 if (pFence) {
1339 if (pFence->scope == kSyncScopeInternal) {
1340 // Mark fence in use
1341 SubmitFence(pQueue, pFence, std::max(1u, submitCount));
1342 if (!submitCount) {
1343 // If no submissions, but just dropping a fence on the end of the queue,
1344 // record an empty submission with just the fence, so we can determine
1345 // its completion.
1346 pQueue->submissions.emplace_back(std::vector<VkCommandBuffer>(), std::vector<SEMAPHORE_WAIT>(),
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001347 std::vector<VkSemaphore>(), std::vector<VkSemaphore>(), fence, 0);
locke-lunargd556cc32019-09-17 01:21:23 -06001348 }
1349 } else {
1350 // Retire work up until this fence early, we will not see the wait that corresponds to this signal
1351 early_retire_seq = pQueue->seq + pQueue->submissions.size();
1352 }
1353 }
1354
1355 // Now process each individual submit
1356 for (uint32_t submit_idx = 0; submit_idx < submitCount; submit_idx++) {
1357 std::vector<VkCommandBuffer> cbs;
1358 const VkSubmitInfo *submit = &pSubmits[submit_idx];
1359 vector<SEMAPHORE_WAIT> semaphore_waits;
1360 vector<VkSemaphore> semaphore_signals;
1361 vector<VkSemaphore> semaphore_externals;
1362 for (uint32_t i = 0; i < submit->waitSemaphoreCount; ++i) {
1363 VkSemaphore semaphore = submit->pWaitSemaphores[i];
1364 auto pSemaphore = GetSemaphoreState(semaphore);
1365 if (pSemaphore) {
1366 if (pSemaphore->scope == kSyncScopeInternal) {
1367 if (pSemaphore->signaler.first != VK_NULL_HANDLE) {
1368 semaphore_waits.push_back({semaphore, pSemaphore->signaler.first, pSemaphore->signaler.second});
1369 pSemaphore->in_use.fetch_add(1);
1370 }
1371 pSemaphore->signaler.first = VK_NULL_HANDLE;
1372 pSemaphore->signaled = false;
1373 } else {
1374 semaphore_externals.push_back(semaphore);
1375 pSemaphore->in_use.fetch_add(1);
1376 if (pSemaphore->scope == kSyncScopeExternalTemporary) {
1377 pSemaphore->scope = kSyncScopeInternal;
1378 }
1379 }
1380 }
1381 }
1382 for (uint32_t i = 0; i < submit->signalSemaphoreCount; ++i) {
1383 VkSemaphore semaphore = submit->pSignalSemaphores[i];
1384 auto pSemaphore = GetSemaphoreState(semaphore);
1385 if (pSemaphore) {
1386 if (pSemaphore->scope == kSyncScopeInternal) {
1387 pSemaphore->signaler.first = queue;
1388 pSemaphore->signaler.second = pQueue->seq + pQueue->submissions.size() + 1;
1389 pSemaphore->signaled = true;
1390 pSemaphore->in_use.fetch_add(1);
1391 semaphore_signals.push_back(semaphore);
1392 } else {
1393 // Retire work up until this submit early, we will not see the wait that corresponds to this signal
1394 early_retire_seq = std::max(early_retire_seq, pQueue->seq + pQueue->submissions.size() + 1);
1395 }
1396 }
1397 }
1398 for (uint32_t i = 0; i < submit->commandBufferCount; i++) {
1399 auto cb_node = GetCBState(submit->pCommandBuffers[i]);
1400 if (cb_node) {
1401 cbs.push_back(submit->pCommandBuffers[i]);
1402 for (auto secondaryCmdBuffer : cb_node->linkedCommandBuffers) {
1403 cbs.push_back(secondaryCmdBuffer->commandBuffer);
1404 IncrementResources(secondaryCmdBuffer);
1405 }
1406 IncrementResources(cb_node);
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001407
1408 QueryMap localQueryToStateMap;
1409 for (auto &function : cb_node->queryUpdates) {
1410 function(nullptr, /*do_validate*/ false, &localQueryToStateMap);
1411 }
1412
1413 for (auto queryStatePair : localQueryToStateMap) {
1414 queryToStateMap[queryStatePair.first] = queryStatePair.second;
1415 }
1416
1417 EventToStageMap localEventToStageMap;
1418 for (auto &function : cb_node->eventUpdates) {
1419 function(nullptr, /*do_validate*/ false, &localEventToStageMap);
1420 }
1421
1422 for (auto eventStagePair : localEventToStageMap) {
1423 eventMap[eventStagePair.first].stageMask = eventStagePair.second;
1424 }
locke-lunargd556cc32019-09-17 01:21:23 -06001425 }
1426 }
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001427
1428 const auto perf_submit = lvl_find_in_chain<VkPerformanceQuerySubmitInfoKHR>(submit->pNext);
1429
locke-lunargd556cc32019-09-17 01:21:23 -06001430 pQueue->submissions.emplace_back(cbs, semaphore_waits, semaphore_signals, semaphore_externals,
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001431 submit_idx == submitCount - 1 ? fence : (VkFence)VK_NULL_HANDLE,
1432 perf_submit ? perf_submit->counterPassIndex : 0);
locke-lunargd556cc32019-09-17 01:21:23 -06001433 }
1434
1435 if (early_retire_seq) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001436 RetireWorkOnQueue(pQueue, early_retire_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06001437 }
1438}
1439
1440void ValidationStateTracker::PostCallRecordAllocateMemory(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo,
1441 const VkAllocationCallbacks *pAllocator, VkDeviceMemory *pMemory,
1442 VkResult result) {
1443 if (VK_SUCCESS == result) {
1444 AddMemObjInfo(device, *pMemory, pAllocateInfo);
1445 }
1446 return;
1447}
1448
1449void ValidationStateTracker::PreCallRecordFreeMemory(VkDevice device, VkDeviceMemory mem, const VkAllocationCallbacks *pAllocator) {
1450 if (!mem) return;
1451 DEVICE_MEMORY_STATE *mem_info = GetDevMemState(mem);
1452 const VulkanTypedHandle obj_struct(mem, kVulkanObjectTypeDeviceMemory);
1453
1454 // Clear mem binding for any bound objects
1455 for (const auto &obj : mem_info->obj_bindings) {
1456 BINDABLE *bindable_state = nullptr;
1457 switch (obj.type) {
1458 case kVulkanObjectTypeImage:
1459 bindable_state = GetImageState(obj.Cast<VkImage>());
1460 break;
1461 case kVulkanObjectTypeBuffer:
1462 bindable_state = GetBufferState(obj.Cast<VkBuffer>());
1463 break;
1464 case kVulkanObjectTypeAccelerationStructureNV:
1465 bindable_state = GetAccelerationStructureState(obj.Cast<VkAccelerationStructureNV>());
1466 break;
1467
1468 default:
1469 // Should only have acceleration structure, buffer, or image objects bound to memory
1470 assert(0);
1471 }
1472
1473 if (bindable_state) {
1474 bindable_state->binding.mem = MEMORY_UNBOUND;
1475 bindable_state->UpdateBoundMemorySet();
1476 }
1477 }
1478 // Any bound cmd buffers are now invalid
1479 InvalidateCommandBuffers(mem_info->cb_bindings, obj_struct);
1480 RemoveAliasingImages(mem_info->bound_images);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05001481 mem_info->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06001482 memObjMap.erase(mem);
1483}
1484
1485void ValidationStateTracker::PostCallRecordQueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo *pBindInfo,
1486 VkFence fence, VkResult result) {
1487 if (result != VK_SUCCESS) return;
1488 uint64_t early_retire_seq = 0;
1489 auto pFence = GetFenceState(fence);
1490 auto pQueue = GetQueueState(queue);
1491
1492 if (pFence) {
1493 if (pFence->scope == kSyncScopeInternal) {
1494 SubmitFence(pQueue, pFence, std::max(1u, bindInfoCount));
1495 if (!bindInfoCount) {
1496 // No work to do, just dropping a fence in the queue by itself.
1497 pQueue->submissions.emplace_back(std::vector<VkCommandBuffer>(), std::vector<SEMAPHORE_WAIT>(),
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001498 std::vector<VkSemaphore>(), std::vector<VkSemaphore>(), fence, 0);
locke-lunargd556cc32019-09-17 01:21:23 -06001499 }
1500 } else {
1501 // Retire work up until this fence early, we will not see the wait that corresponds to this signal
1502 early_retire_seq = pQueue->seq + pQueue->submissions.size();
1503 }
1504 }
1505
1506 for (uint32_t bindIdx = 0; bindIdx < bindInfoCount; ++bindIdx) {
1507 const VkBindSparseInfo &bindInfo = pBindInfo[bindIdx];
1508 // Track objects tied to memory
1509 for (uint32_t j = 0; j < bindInfo.bufferBindCount; j++) {
1510 for (uint32_t k = 0; k < bindInfo.pBufferBinds[j].bindCount; k++) {
1511 auto sparse_binding = bindInfo.pBufferBinds[j].pBinds[k];
1512 SetSparseMemBinding({sparse_binding.memory, sparse_binding.memoryOffset, sparse_binding.size},
1513 VulkanTypedHandle(bindInfo.pBufferBinds[j].buffer, kVulkanObjectTypeBuffer));
1514 }
1515 }
1516 for (uint32_t j = 0; j < bindInfo.imageOpaqueBindCount; j++) {
1517 for (uint32_t k = 0; k < bindInfo.pImageOpaqueBinds[j].bindCount; k++) {
1518 auto sparse_binding = bindInfo.pImageOpaqueBinds[j].pBinds[k];
1519 SetSparseMemBinding({sparse_binding.memory, sparse_binding.memoryOffset, sparse_binding.size},
1520 VulkanTypedHandle(bindInfo.pImageOpaqueBinds[j].image, kVulkanObjectTypeImage));
1521 }
1522 }
1523 for (uint32_t j = 0; j < bindInfo.imageBindCount; j++) {
1524 for (uint32_t k = 0; k < bindInfo.pImageBinds[j].bindCount; k++) {
1525 auto sparse_binding = bindInfo.pImageBinds[j].pBinds[k];
1526 // TODO: This size is broken for non-opaque bindings, need to update to comprehend full sparse binding data
1527 VkDeviceSize size = sparse_binding.extent.depth * sparse_binding.extent.height * sparse_binding.extent.width * 4;
1528 SetSparseMemBinding({sparse_binding.memory, sparse_binding.memoryOffset, size},
1529 VulkanTypedHandle(bindInfo.pImageBinds[j].image, kVulkanObjectTypeImage));
1530 }
1531 }
1532
1533 std::vector<SEMAPHORE_WAIT> semaphore_waits;
1534 std::vector<VkSemaphore> semaphore_signals;
1535 std::vector<VkSemaphore> semaphore_externals;
1536 for (uint32_t i = 0; i < bindInfo.waitSemaphoreCount; ++i) {
1537 VkSemaphore semaphore = bindInfo.pWaitSemaphores[i];
1538 auto pSemaphore = GetSemaphoreState(semaphore);
1539 if (pSemaphore) {
1540 if (pSemaphore->scope == kSyncScopeInternal) {
1541 if (pSemaphore->signaler.first != VK_NULL_HANDLE) {
1542 semaphore_waits.push_back({semaphore, pSemaphore->signaler.first, pSemaphore->signaler.second});
1543 pSemaphore->in_use.fetch_add(1);
1544 }
1545 pSemaphore->signaler.first = VK_NULL_HANDLE;
1546 pSemaphore->signaled = false;
1547 } else {
1548 semaphore_externals.push_back(semaphore);
1549 pSemaphore->in_use.fetch_add(1);
1550 if (pSemaphore->scope == kSyncScopeExternalTemporary) {
1551 pSemaphore->scope = kSyncScopeInternal;
1552 }
1553 }
1554 }
1555 }
1556 for (uint32_t i = 0; i < bindInfo.signalSemaphoreCount; ++i) {
1557 VkSemaphore semaphore = bindInfo.pSignalSemaphores[i];
1558 auto pSemaphore = GetSemaphoreState(semaphore);
1559 if (pSemaphore) {
1560 if (pSemaphore->scope == kSyncScopeInternal) {
1561 pSemaphore->signaler.first = queue;
1562 pSemaphore->signaler.second = pQueue->seq + pQueue->submissions.size() + 1;
1563 pSemaphore->signaled = true;
1564 pSemaphore->in_use.fetch_add(1);
1565 semaphore_signals.push_back(semaphore);
1566 } else {
1567 // Retire work up until this submit early, we will not see the wait that corresponds to this signal
1568 early_retire_seq = std::max(early_retire_seq, pQueue->seq + pQueue->submissions.size() + 1);
1569 }
1570 }
1571 }
1572
1573 pQueue->submissions.emplace_back(std::vector<VkCommandBuffer>(), semaphore_waits, semaphore_signals, semaphore_externals,
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001574 bindIdx == bindInfoCount - 1 ? fence : (VkFence)VK_NULL_HANDLE, 0);
locke-lunargd556cc32019-09-17 01:21:23 -06001575 }
1576
1577 if (early_retire_seq) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001578 RetireWorkOnQueue(pQueue, early_retire_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06001579 }
1580}
1581
1582void ValidationStateTracker::PostCallRecordCreateSemaphore(VkDevice device, const VkSemaphoreCreateInfo *pCreateInfo,
1583 const VkAllocationCallbacks *pAllocator, VkSemaphore *pSemaphore,
1584 VkResult result) {
1585 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05001586 auto semaphore_state = std::make_shared<SEMAPHORE_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06001587 semaphore_state->signaler.first = VK_NULL_HANDLE;
1588 semaphore_state->signaler.second = 0;
1589 semaphore_state->signaled = false;
1590 semaphore_state->scope = kSyncScopeInternal;
1591 semaphoreMap[*pSemaphore] = std::move(semaphore_state);
1592}
1593
1594void ValidationStateTracker::RecordImportSemaphoreState(VkSemaphore semaphore, VkExternalSemaphoreHandleTypeFlagBitsKHR handle_type,
1595 VkSemaphoreImportFlagsKHR flags) {
1596 SEMAPHORE_STATE *sema_node = GetSemaphoreState(semaphore);
1597 if (sema_node && sema_node->scope != kSyncScopeExternalPermanent) {
1598 if ((handle_type == VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR || flags & VK_SEMAPHORE_IMPORT_TEMPORARY_BIT_KHR) &&
1599 sema_node->scope == kSyncScopeInternal) {
1600 sema_node->scope = kSyncScopeExternalTemporary;
1601 } else {
1602 sema_node->scope = kSyncScopeExternalPermanent;
1603 }
1604 }
1605}
1606
1607void ValidationStateTracker::RecordMappedMemory(VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size, void **ppData) {
1608 auto mem_info = GetDevMemState(mem);
1609 if (mem_info) {
1610 mem_info->mapped_range.offset = offset;
1611 mem_info->mapped_range.size = size;
1612 mem_info->p_driver_data = *ppData;
1613 }
1614}
1615
1616void ValidationStateTracker::RetireFence(VkFence fence) {
1617 auto pFence = GetFenceState(fence);
1618 if (pFence && pFence->scope == kSyncScopeInternal) {
1619 if (pFence->signaler.first != VK_NULL_HANDLE) {
1620 // Fence signaller is a queue -- use this as proof that prior operations on that queue have completed.
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001621 RetireWorkOnQueue(GetQueueState(pFence->signaler.first), pFence->signaler.second);
locke-lunargd556cc32019-09-17 01:21:23 -06001622 } else {
1623 // Fence signaller is the WSI. We're not tracking what the WSI op actually /was/ in CV yet, but we need to mark
1624 // the fence as retired.
1625 pFence->state = FENCE_RETIRED;
1626 }
1627 }
1628}
1629
1630void ValidationStateTracker::PostCallRecordWaitForFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences,
1631 VkBool32 waitAll, uint64_t timeout, VkResult result) {
1632 if (VK_SUCCESS != result) return;
1633
1634 // When we know that all fences are complete we can clean/remove their CBs
1635 if ((VK_TRUE == waitAll) || (1 == fenceCount)) {
1636 for (uint32_t i = 0; i < fenceCount; i++) {
1637 RetireFence(pFences[i]);
1638 }
1639 }
1640 // NOTE : Alternate case not handled here is when some fences have completed. In
1641 // this case for app to guarantee which fences completed it will have to call
1642 // vkGetFenceStatus() at which point we'll clean/remove their CBs if complete.
1643}
1644
1645void ValidationStateTracker::PostCallRecordGetFenceStatus(VkDevice device, VkFence fence, VkResult result) {
1646 if (VK_SUCCESS != result) return;
1647 RetireFence(fence);
1648}
1649
1650void ValidationStateTracker::RecordGetDeviceQueueState(uint32_t queue_family_index, VkQueue queue) {
1651 // Add queue to tracking set only if it is new
1652 auto queue_is_new = queues.emplace(queue);
1653 if (queue_is_new.second == true) {
1654 QUEUE_STATE *queue_state = &queueMap[queue];
1655 queue_state->queue = queue;
1656 queue_state->queueFamilyIndex = queue_family_index;
1657 queue_state->seq = 0;
1658 }
1659}
1660
1661void ValidationStateTracker::PostCallRecordGetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex,
1662 VkQueue *pQueue) {
1663 RecordGetDeviceQueueState(queueFamilyIndex, *pQueue);
1664}
1665
1666void ValidationStateTracker::PostCallRecordGetDeviceQueue2(VkDevice device, const VkDeviceQueueInfo2 *pQueueInfo, VkQueue *pQueue) {
1667 RecordGetDeviceQueueState(pQueueInfo->queueFamilyIndex, *pQueue);
1668}
1669
1670void ValidationStateTracker::PostCallRecordQueueWaitIdle(VkQueue queue, VkResult result) {
1671 if (VK_SUCCESS != result) return;
1672 QUEUE_STATE *queue_state = GetQueueState(queue);
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001673 RetireWorkOnQueue(queue_state, queue_state->seq + queue_state->submissions.size());
locke-lunargd556cc32019-09-17 01:21:23 -06001674}
1675
1676void ValidationStateTracker::PostCallRecordDeviceWaitIdle(VkDevice device, VkResult result) {
1677 if (VK_SUCCESS != result) return;
1678 for (auto &queue : queueMap) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001679 RetireWorkOnQueue(&queue.second, queue.second.seq + queue.second.submissions.size());
locke-lunargd556cc32019-09-17 01:21:23 -06001680 }
1681}
1682
1683void ValidationStateTracker::PreCallRecordDestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks *pAllocator) {
1684 if (!fence) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05001685 auto fence_state = GetFenceState(fence);
1686 fence_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06001687 fenceMap.erase(fence);
1688}
1689
1690void ValidationStateTracker::PreCallRecordDestroySemaphore(VkDevice device, VkSemaphore semaphore,
1691 const VkAllocationCallbacks *pAllocator) {
1692 if (!semaphore) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05001693 auto semaphore_state = GetSemaphoreState(semaphore);
1694 semaphore_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06001695 semaphoreMap.erase(semaphore);
1696}
1697
1698void ValidationStateTracker::PreCallRecordDestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks *pAllocator) {
1699 if (!event) return;
1700 EVENT_STATE *event_state = GetEventState(event);
1701 const VulkanTypedHandle obj_struct(event, kVulkanObjectTypeEvent);
1702 InvalidateCommandBuffers(event_state->cb_bindings, obj_struct);
1703 eventMap.erase(event);
1704}
1705
1706void ValidationStateTracker::PreCallRecordDestroyQueryPool(VkDevice device, VkQueryPool queryPool,
1707 const VkAllocationCallbacks *pAllocator) {
1708 if (!queryPool) return;
1709 QUERY_POOL_STATE *qp_state = GetQueryPoolState(queryPool);
1710 const VulkanTypedHandle obj_struct(queryPool, kVulkanObjectTypeQueryPool);
1711 InvalidateCommandBuffers(qp_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05001712 qp_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06001713 queryPoolMap.erase(queryPool);
1714}
1715
1716// Object with given handle is being bound to memory w/ given mem_info struct.
1717// Track the newly bound memory range with given memoryOffset
1718// Also scan any previous ranges, track aliased ranges with new range, and flag an error if a linear
1719// and non-linear range incorrectly overlap.
1720// Return true if an error is flagged and the user callback returns "true", otherwise false
1721// is_image indicates an image object, otherwise handle is for a buffer
1722// is_linear indicates a buffer or linear image
1723void ValidationStateTracker::InsertMemoryRange(const VulkanTypedHandle &typed_handle, DEVICE_MEMORY_STATE *mem_info,
1724 VkDeviceSize memoryOffset, VkMemoryRequirements memRequirements, bool is_linear) {
1725 if (typed_handle.type == kVulkanObjectTypeImage) {
1726 mem_info->bound_images.insert(typed_handle.Cast<VkImage>());
1727 } else if (typed_handle.type == kVulkanObjectTypeBuffer) {
1728 mem_info->bound_buffers.insert(typed_handle.handle);
1729 } else if (typed_handle.type == kVulkanObjectTypeAccelerationStructureNV) {
1730 mem_info->bound_acceleration_structures.insert(typed_handle.handle);
1731 } else {
1732 // Unsupported object type
1733 assert(false);
1734 }
1735}
1736
1737void ValidationStateTracker::InsertImageMemoryRange(VkImage image, DEVICE_MEMORY_STATE *mem_info, VkDeviceSize mem_offset,
1738 VkMemoryRequirements mem_reqs, bool is_linear) {
1739 InsertMemoryRange(VulkanTypedHandle(image, kVulkanObjectTypeImage), mem_info, mem_offset, mem_reqs, is_linear);
1740}
1741
1742void ValidationStateTracker::InsertBufferMemoryRange(VkBuffer buffer, DEVICE_MEMORY_STATE *mem_info, VkDeviceSize mem_offset,
1743 const VkMemoryRequirements &mem_reqs) {
1744 InsertMemoryRange(VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer), mem_info, mem_offset, mem_reqs, true);
1745}
1746
1747void ValidationStateTracker::InsertAccelerationStructureMemoryRange(VkAccelerationStructureNV as, DEVICE_MEMORY_STATE *mem_info,
1748 VkDeviceSize mem_offset, const VkMemoryRequirements &mem_reqs) {
1749 InsertMemoryRange(VulkanTypedHandle(as, kVulkanObjectTypeAccelerationStructureNV), mem_info, mem_offset, mem_reqs, true);
1750}
1751
1752// This function will remove the handle-to-index mapping from the appropriate map.
1753static void RemoveMemoryRange(const VulkanTypedHandle &typed_handle, DEVICE_MEMORY_STATE *mem_info) {
1754 if (typed_handle.type == kVulkanObjectTypeImage) {
1755 mem_info->bound_images.erase(typed_handle.Cast<VkImage>());
1756 } else if (typed_handle.type == kVulkanObjectTypeBuffer) {
1757 mem_info->bound_buffers.erase(typed_handle.handle);
1758 } else if (typed_handle.type == kVulkanObjectTypeAccelerationStructureNV) {
1759 mem_info->bound_acceleration_structures.erase(typed_handle.handle);
1760 } else {
1761 // Unsupported object type
1762 assert(false);
1763 }
1764}
1765
1766void ValidationStateTracker::RemoveBufferMemoryRange(VkBuffer buffer, DEVICE_MEMORY_STATE *mem_info) {
1767 RemoveMemoryRange(VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer), mem_info);
1768}
1769
1770void ValidationStateTracker::RemoveImageMemoryRange(VkImage image, DEVICE_MEMORY_STATE *mem_info) {
1771 RemoveMemoryRange(VulkanTypedHandle(image, kVulkanObjectTypeImage), mem_info);
1772}
1773
1774void ValidationStateTracker::RemoveAccelerationStructureMemoryRange(VkAccelerationStructureNV as, DEVICE_MEMORY_STATE *mem_info) {
1775 RemoveMemoryRange(VulkanTypedHandle(as, kVulkanObjectTypeAccelerationStructureNV), mem_info);
1776}
1777
1778void ValidationStateTracker::UpdateBindBufferMemoryState(VkBuffer buffer, VkDeviceMemory mem, VkDeviceSize memoryOffset) {
1779 BUFFER_STATE *buffer_state = GetBufferState(buffer);
1780 if (buffer_state) {
1781 // Track bound memory range information
1782 auto mem_info = GetDevMemState(mem);
1783 if (mem_info) {
1784 InsertBufferMemoryRange(buffer, mem_info, memoryOffset, buffer_state->requirements);
1785 }
1786 // Track objects tied to memory
1787 SetMemBinding(mem, buffer_state, memoryOffset, VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer));
1788 }
1789}
1790
1791void ValidationStateTracker::PostCallRecordBindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory mem,
1792 VkDeviceSize memoryOffset, VkResult result) {
1793 if (VK_SUCCESS != result) return;
1794 UpdateBindBufferMemoryState(buffer, mem, memoryOffset);
1795}
1796
1797void ValidationStateTracker::PostCallRecordBindBufferMemory2(VkDevice device, uint32_t bindInfoCount,
1798 const VkBindBufferMemoryInfoKHR *pBindInfos, VkResult result) {
1799 for (uint32_t i = 0; i < bindInfoCount; i++) {
1800 UpdateBindBufferMemoryState(pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset);
1801 }
1802}
1803
1804void ValidationStateTracker::PostCallRecordBindBufferMemory2KHR(VkDevice device, uint32_t bindInfoCount,
1805 const VkBindBufferMemoryInfoKHR *pBindInfos, VkResult result) {
1806 for (uint32_t i = 0; i < bindInfoCount; i++) {
1807 UpdateBindBufferMemoryState(pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset);
1808 }
1809}
1810
1811void ValidationStateTracker::RecordGetBufferMemoryRequirementsState(VkBuffer buffer, VkMemoryRequirements *pMemoryRequirements) {
1812 BUFFER_STATE *buffer_state = GetBufferState(buffer);
1813 if (buffer_state) {
1814 buffer_state->requirements = *pMemoryRequirements;
1815 buffer_state->memory_requirements_checked = true;
1816 }
1817}
1818
1819void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements(VkDevice device, VkBuffer buffer,
1820 VkMemoryRequirements *pMemoryRequirements) {
1821 RecordGetBufferMemoryRequirementsState(buffer, pMemoryRequirements);
1822}
1823
1824void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements2(VkDevice device,
1825 const VkBufferMemoryRequirementsInfo2KHR *pInfo,
1826 VkMemoryRequirements2KHR *pMemoryRequirements) {
1827 RecordGetBufferMemoryRequirementsState(pInfo->buffer, &pMemoryRequirements->memoryRequirements);
1828}
1829
1830void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements2KHR(VkDevice device,
1831 const VkBufferMemoryRequirementsInfo2KHR *pInfo,
1832 VkMemoryRequirements2KHR *pMemoryRequirements) {
1833 RecordGetBufferMemoryRequirementsState(pInfo->buffer, &pMemoryRequirements->memoryRequirements);
1834}
1835
1836void ValidationStateTracker::RecordGetImageMemoryRequiementsState(VkImage image, VkMemoryRequirements *pMemoryRequirements) {
1837 IMAGE_STATE *image_state = GetImageState(image);
1838 if (image_state) {
1839 image_state->requirements = *pMemoryRequirements;
1840 image_state->memory_requirements_checked = true;
1841 }
1842}
1843
1844void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements(VkDevice device, VkImage image,
1845 VkMemoryRequirements *pMemoryRequirements) {
1846 RecordGetImageMemoryRequiementsState(image, pMemoryRequirements);
1847}
1848
1849void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements2(VkDevice device, const VkImageMemoryRequirementsInfo2 *pInfo,
1850 VkMemoryRequirements2 *pMemoryRequirements) {
1851 RecordGetImageMemoryRequiementsState(pInfo->image, &pMemoryRequirements->memoryRequirements);
1852}
1853
1854void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements2KHR(VkDevice device,
1855 const VkImageMemoryRequirementsInfo2 *pInfo,
1856 VkMemoryRequirements2 *pMemoryRequirements) {
1857 RecordGetImageMemoryRequiementsState(pInfo->image, &pMemoryRequirements->memoryRequirements);
1858}
1859
1860static void RecordGetImageSparseMemoryRequirementsState(IMAGE_STATE *image_state,
1861 VkSparseImageMemoryRequirements *sparse_image_memory_requirements) {
1862 image_state->sparse_requirements.emplace_back(*sparse_image_memory_requirements);
1863 if (sparse_image_memory_requirements->formatProperties.aspectMask & VK_IMAGE_ASPECT_METADATA_BIT) {
1864 image_state->sparse_metadata_required = true;
1865 }
1866}
1867
1868void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements(
1869 VkDevice device, VkImage image, uint32_t *pSparseMemoryRequirementCount,
1870 VkSparseImageMemoryRequirements *pSparseMemoryRequirements) {
1871 auto image_state = GetImageState(image);
1872 image_state->get_sparse_reqs_called = true;
1873 if (!pSparseMemoryRequirements) return;
1874 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
1875 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i]);
1876 }
1877}
1878
1879void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements2(
1880 VkDevice device, const VkImageSparseMemoryRequirementsInfo2KHR *pInfo, uint32_t *pSparseMemoryRequirementCount,
1881 VkSparseImageMemoryRequirements2KHR *pSparseMemoryRequirements) {
1882 auto image_state = GetImageState(pInfo->image);
1883 image_state->get_sparse_reqs_called = true;
1884 if (!pSparseMemoryRequirements) return;
1885 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
1886 assert(!pSparseMemoryRequirements[i].pNext); // TODO: If an extension is ever added here we need to handle it
1887 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i].memoryRequirements);
1888 }
1889}
1890
1891void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements2KHR(
1892 VkDevice device, const VkImageSparseMemoryRequirementsInfo2KHR *pInfo, uint32_t *pSparseMemoryRequirementCount,
1893 VkSparseImageMemoryRequirements2KHR *pSparseMemoryRequirements) {
1894 auto image_state = GetImageState(pInfo->image);
1895 image_state->get_sparse_reqs_called = true;
1896 if (!pSparseMemoryRequirements) return;
1897 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
1898 assert(!pSparseMemoryRequirements[i].pNext); // TODO: If an extension is ever added here we need to handle it
1899 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i].memoryRequirements);
1900 }
1901}
1902
1903void ValidationStateTracker::PreCallRecordDestroyShaderModule(VkDevice device, VkShaderModule shaderModule,
1904 const VkAllocationCallbacks *pAllocator) {
1905 if (!shaderModule) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05001906 auto shader_module_state = GetShaderModuleState(shaderModule);
1907 shader_module_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06001908 shaderModuleMap.erase(shaderModule);
1909}
1910
1911void ValidationStateTracker::PreCallRecordDestroyPipeline(VkDevice device, VkPipeline pipeline,
1912 const VkAllocationCallbacks *pAllocator) {
1913 if (!pipeline) return;
1914 PIPELINE_STATE *pipeline_state = GetPipelineState(pipeline);
1915 const VulkanTypedHandle obj_struct(pipeline, kVulkanObjectTypePipeline);
1916 // Any bound cmd buffers are now invalid
1917 InvalidateCommandBuffers(pipeline_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05001918 pipeline_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06001919 pipelineMap.erase(pipeline);
1920}
1921
1922void ValidationStateTracker::PreCallRecordDestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout,
1923 const VkAllocationCallbacks *pAllocator) {
1924 if (!pipelineLayout) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05001925 auto pipeline_layout_state = GetPipelineLayout(pipelineLayout);
1926 pipeline_layout_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06001927 pipelineLayoutMap.erase(pipelineLayout);
1928}
1929
1930void ValidationStateTracker::PreCallRecordDestroySampler(VkDevice device, VkSampler sampler,
1931 const VkAllocationCallbacks *pAllocator) {
1932 if (!sampler) return;
1933 SAMPLER_STATE *sampler_state = GetSamplerState(sampler);
1934 const VulkanTypedHandle obj_struct(sampler, kVulkanObjectTypeSampler);
1935 // Any bound cmd buffers are now invalid
1936 if (sampler_state) {
1937 InvalidateCommandBuffers(sampler_state->cb_bindings, obj_struct);
1938 }
Jeff Bolze7fc67b2019-10-04 12:29:31 -05001939 sampler_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06001940 samplerMap.erase(sampler);
1941}
1942
1943void ValidationStateTracker::PreCallRecordDestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout,
1944 const VkAllocationCallbacks *pAllocator) {
1945 if (!descriptorSetLayout) return;
1946 auto layout_it = descriptorSetLayoutMap.find(descriptorSetLayout);
1947 if (layout_it != descriptorSetLayoutMap.end()) {
Jeff Bolz6ae39612019-10-11 20:57:36 -05001948 layout_it->second.get()->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06001949 descriptorSetLayoutMap.erase(layout_it);
1950 }
1951}
1952
1953void ValidationStateTracker::PreCallRecordDestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
1954 const VkAllocationCallbacks *pAllocator) {
1955 if (!descriptorPool) return;
1956 DESCRIPTOR_POOL_STATE *desc_pool_state = GetDescriptorPoolState(descriptorPool);
1957 const VulkanTypedHandle obj_struct(descriptorPool, kVulkanObjectTypeDescriptorPool);
1958 if (desc_pool_state) {
1959 // Any bound cmd buffers are now invalid
1960 InvalidateCommandBuffers(desc_pool_state->cb_bindings, obj_struct);
1961 // Free sets that were in this pool
1962 for (auto ds : desc_pool_state->sets) {
1963 FreeDescriptorSet(ds);
1964 }
Jeff Bolze7fc67b2019-10-04 12:29:31 -05001965 desc_pool_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06001966 descriptorPoolMap.erase(descriptorPool);
1967 }
1968}
1969
1970// Free all command buffers in given list, removing all references/links to them using ResetCommandBufferState
1971void ValidationStateTracker::FreeCommandBufferStates(COMMAND_POOL_STATE *pool_state, const uint32_t command_buffer_count,
1972 const VkCommandBuffer *command_buffers) {
1973 for (uint32_t i = 0; i < command_buffer_count; i++) {
1974 auto cb_state = GetCBState(command_buffers[i]);
1975 // Remove references to command buffer's state and delete
1976 if (cb_state) {
1977 // reset prior to delete, removing various references to it.
1978 // TODO: fix this, it's insane.
1979 ResetCommandBufferState(cb_state->commandBuffer);
1980 // Remove the cb_state's references from COMMAND_POOL_STATEs
1981 pool_state->commandBuffers.erase(command_buffers[i]);
1982 // Remove the cb debug labels
1983 EraseCmdDebugUtilsLabel(report_data, cb_state->commandBuffer);
1984 // Remove CBState from CB map
Jeff Bolze7fc67b2019-10-04 12:29:31 -05001985 cb_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06001986 commandBufferMap.erase(cb_state->commandBuffer);
1987 }
1988 }
1989}
1990
1991void ValidationStateTracker::PreCallRecordFreeCommandBuffers(VkDevice device, VkCommandPool commandPool,
1992 uint32_t commandBufferCount, const VkCommandBuffer *pCommandBuffers) {
1993 auto pPool = GetCommandPoolState(commandPool);
1994 FreeCommandBufferStates(pPool, commandBufferCount, pCommandBuffers);
1995}
1996
1997void ValidationStateTracker::PostCallRecordCreateCommandPool(VkDevice device, const VkCommandPoolCreateInfo *pCreateInfo,
1998 const VkAllocationCallbacks *pAllocator, VkCommandPool *pCommandPool,
1999 VkResult result) {
2000 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002001 auto cmd_pool_state = std::make_shared<COMMAND_POOL_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002002 cmd_pool_state->createFlags = pCreateInfo->flags;
2003 cmd_pool_state->queueFamilyIndex = pCreateInfo->queueFamilyIndex;
2004 commandPoolMap[*pCommandPool] = std::move(cmd_pool_state);
2005}
2006
2007void ValidationStateTracker::PostCallRecordCreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo *pCreateInfo,
2008 const VkAllocationCallbacks *pAllocator, VkQueryPool *pQueryPool,
2009 VkResult result) {
2010 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002011 auto query_pool_state = std::make_shared<QUERY_POOL_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002012 query_pool_state->createInfo = *pCreateInfo;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002013 query_pool_state->pool = *pQueryPool;
2014 if (pCreateInfo->queryType == VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR) {
2015 const auto *perf = lvl_find_in_chain<VkQueryPoolPerformanceCreateInfoKHR>(pCreateInfo->pNext);
2016 const QUEUE_FAMILY_PERF_COUNTERS &counters = *physical_device_state->perf_counters[perf->queueFamilyIndex];
2017
2018 for (uint32_t i = 0; i < perf->counterIndexCount; i++) {
2019 const auto &counter = counters.counters[perf->pCounterIndices[i]];
2020 switch (counter.scope) {
2021 case VK_QUERY_SCOPE_COMMAND_BUFFER_KHR:
2022 query_pool_state->has_perf_scope_command_buffer = true;
2023 break;
2024 case VK_QUERY_SCOPE_RENDER_PASS_KHR:
2025 query_pool_state->has_perf_scope_render_pass = true;
2026 break;
2027 default:
2028 break;
2029 }
2030 }
2031
2032 DispatchGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(physical_device_state->phys_device, perf,
2033 &query_pool_state->n_performance_passes);
2034 }
2035
locke-lunargd556cc32019-09-17 01:21:23 -06002036 queryPoolMap[*pQueryPool] = std::move(query_pool_state);
2037
2038 QueryObject query_obj{*pQueryPool, 0u};
2039 for (uint32_t i = 0; i < pCreateInfo->queryCount; ++i) {
2040 query_obj.query = i;
2041 queryToStateMap[query_obj] = QUERYSTATE_UNKNOWN;
2042 }
2043}
2044
2045void ValidationStateTracker::PreCallRecordDestroyCommandPool(VkDevice device, VkCommandPool commandPool,
2046 const VkAllocationCallbacks *pAllocator) {
2047 if (!commandPool) return;
2048 COMMAND_POOL_STATE *cp_state = GetCommandPoolState(commandPool);
2049 // Remove cmdpool from cmdpoolmap, after freeing layer data for the command buffers
2050 // "When a pool is destroyed, all command buffers allocated from the pool are freed."
2051 if (cp_state) {
2052 // Create a vector, as FreeCommandBufferStates deletes from cp_state->commandBuffers during iteration.
2053 std::vector<VkCommandBuffer> cb_vec{cp_state->commandBuffers.begin(), cp_state->commandBuffers.end()};
2054 FreeCommandBufferStates(cp_state, static_cast<uint32_t>(cb_vec.size()), cb_vec.data());
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002055 cp_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002056 commandPoolMap.erase(commandPool);
2057 }
2058}
2059
2060void ValidationStateTracker::PostCallRecordResetCommandPool(VkDevice device, VkCommandPool commandPool,
2061 VkCommandPoolResetFlags flags, VkResult result) {
2062 if (VK_SUCCESS != result) return;
2063 // Reset all of the CBs allocated from this pool
2064 auto command_pool_state = GetCommandPoolState(commandPool);
2065 for (auto cmdBuffer : command_pool_state->commandBuffers) {
2066 ResetCommandBufferState(cmdBuffer);
2067 }
2068}
2069
2070void ValidationStateTracker::PostCallRecordResetFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences,
2071 VkResult result) {
2072 for (uint32_t i = 0; i < fenceCount; ++i) {
2073 auto pFence = GetFenceState(pFences[i]);
2074 if (pFence) {
2075 if (pFence->scope == kSyncScopeInternal) {
2076 pFence->state = FENCE_UNSIGNALED;
2077 } else if (pFence->scope == kSyncScopeExternalTemporary) {
2078 pFence->scope = kSyncScopeInternal;
2079 }
2080 }
2081 }
2082}
2083
Jeff Bolzadbfa852019-10-04 13:53:30 -05002084// For given cb_nodes, invalidate them and track object causing invalidation.
2085// InvalidateCommandBuffers and InvalidateLinkedCommandBuffers are essentially
2086// the same, except one takes a map and one takes a set, and InvalidateCommandBuffers
2087// can also unlink objects from command buffers.
2088void ValidationStateTracker::InvalidateCommandBuffers(small_unordered_map<CMD_BUFFER_STATE *, int, 8> &cb_nodes,
2089 const VulkanTypedHandle &obj, bool unlink) {
2090 for (const auto &cb_node_pair : cb_nodes) {
2091 auto &cb_node = cb_node_pair.first;
2092 if (cb_node->state == CB_RECORDING) {
2093 cb_node->state = CB_INVALID_INCOMPLETE;
2094 } else if (cb_node->state == CB_RECORDED) {
2095 cb_node->state = CB_INVALID_COMPLETE;
2096 }
2097 cb_node->broken_bindings.push_back(obj);
2098
2099 // if secondary, then propagate the invalidation to the primaries that will call us.
2100 if (cb_node->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
2101 InvalidateLinkedCommandBuffers(cb_node->linkedCommandBuffers, obj);
2102 }
2103 if (unlink) {
2104 int index = cb_node_pair.second;
2105 assert(cb_node->object_bindings[index] == obj);
2106 cb_node->object_bindings[index] = VulkanTypedHandle();
2107 }
2108 }
2109 if (unlink) {
2110 cb_nodes.clear();
2111 }
2112}
2113
2114void ValidationStateTracker::InvalidateLinkedCommandBuffers(std::unordered_set<CMD_BUFFER_STATE *> &cb_nodes,
2115 const VulkanTypedHandle &obj) {
locke-lunargd556cc32019-09-17 01:21:23 -06002116 for (auto cb_node : cb_nodes) {
2117 if (cb_node->state == CB_RECORDING) {
2118 cb_node->state = CB_INVALID_INCOMPLETE;
2119 } else if (cb_node->state == CB_RECORDED) {
2120 cb_node->state = CB_INVALID_COMPLETE;
2121 }
2122 cb_node->broken_bindings.push_back(obj);
2123
2124 // if secondary, then propagate the invalidation to the primaries that will call us.
2125 if (cb_node->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05002126 InvalidateLinkedCommandBuffers(cb_node->linkedCommandBuffers, obj);
locke-lunargd556cc32019-09-17 01:21:23 -06002127 }
2128 }
2129}
2130
2131void ValidationStateTracker::PreCallRecordDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer,
2132 const VkAllocationCallbacks *pAllocator) {
2133 if (!framebuffer) return;
2134 FRAMEBUFFER_STATE *framebuffer_state = GetFramebufferState(framebuffer);
2135 const VulkanTypedHandle obj_struct(framebuffer, kVulkanObjectTypeFramebuffer);
2136 InvalidateCommandBuffers(framebuffer_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002137 framebuffer_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002138 frameBufferMap.erase(framebuffer);
2139}
2140
2141void ValidationStateTracker::PreCallRecordDestroyRenderPass(VkDevice device, VkRenderPass renderPass,
2142 const VkAllocationCallbacks *pAllocator) {
2143 if (!renderPass) return;
2144 RENDER_PASS_STATE *rp_state = GetRenderPassState(renderPass);
2145 const VulkanTypedHandle obj_struct(renderPass, kVulkanObjectTypeRenderPass);
2146 InvalidateCommandBuffers(rp_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002147 rp_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002148 renderPassMap.erase(renderPass);
2149}
2150
2151void ValidationStateTracker::PostCallRecordCreateFence(VkDevice device, const VkFenceCreateInfo *pCreateInfo,
2152 const VkAllocationCallbacks *pAllocator, VkFence *pFence, VkResult result) {
2153 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002154 auto fence_state = std::make_shared<FENCE_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002155 fence_state->fence = *pFence;
2156 fence_state->createInfo = *pCreateInfo;
2157 fence_state->state = (pCreateInfo->flags & VK_FENCE_CREATE_SIGNALED_BIT) ? FENCE_RETIRED : FENCE_UNSIGNALED;
2158 fenceMap[*pFence] = std::move(fence_state);
2159}
2160
2161bool ValidationStateTracker::PreCallValidateCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2162 const VkGraphicsPipelineCreateInfo *pCreateInfos,
2163 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002164 void *cgpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002165 // Set up the state that CoreChecks, gpu_validation and later StateTracker Record will use.
2166 create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
2167 cgpl_state->pCreateInfos = pCreateInfos; // GPU validation can alter this, so we have to set a default value for the Chassis
2168 cgpl_state->pipe_state.reserve(count);
2169 for (uint32_t i = 0; i < count; i++) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002170 cgpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
Jeff Bolz6ae39612019-10-11 20:57:36 -05002171 (cgpl_state->pipe_state)[i]->initGraphicsPipeline(this, &pCreateInfos[i], GetRenderPassShared(pCreateInfos[i].renderPass));
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002172 (cgpl_state->pipe_state)[i]->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06002173 }
2174 return false;
2175}
2176
2177void ValidationStateTracker::PostCallRecordCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2178 const VkGraphicsPipelineCreateInfo *pCreateInfos,
2179 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
2180 VkResult result, void *cgpl_state_data) {
2181 create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
2182 // This API may create pipelines regardless of the return value
2183 for (uint32_t i = 0; i < count; i++) {
2184 if (pPipelines[i] != VK_NULL_HANDLE) {
2185 (cgpl_state->pipe_state)[i]->pipeline = pPipelines[i];
2186 pipelineMap[pPipelines[i]] = std::move((cgpl_state->pipe_state)[i]);
2187 }
2188 }
2189 cgpl_state->pipe_state.clear();
2190}
2191
2192bool ValidationStateTracker::PreCallValidateCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2193 const VkComputePipelineCreateInfo *pCreateInfos,
2194 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002195 void *ccpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002196 auto *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
2197 ccpl_state->pCreateInfos = pCreateInfos; // GPU validation can alter this, so we have to set a default value for the Chassis
2198 ccpl_state->pipe_state.reserve(count);
2199 for (uint32_t i = 0; i < count; i++) {
2200 // Create and initialize internal tracking data structure
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002201 ccpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
locke-lunargd556cc32019-09-17 01:21:23 -06002202 ccpl_state->pipe_state.back()->initComputePipeline(this, &pCreateInfos[i]);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002203 ccpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06002204 }
2205 return false;
2206}
2207
2208void ValidationStateTracker::PostCallRecordCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2209 const VkComputePipelineCreateInfo *pCreateInfos,
2210 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
2211 VkResult result, void *ccpl_state_data) {
2212 create_compute_pipeline_api_state *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
2213
2214 // This API may create pipelines regardless of the return value
2215 for (uint32_t i = 0; i < count; i++) {
2216 if (pPipelines[i] != VK_NULL_HANDLE) {
2217 (ccpl_state->pipe_state)[i]->pipeline = pPipelines[i];
2218 pipelineMap[pPipelines[i]] = std::move((ccpl_state->pipe_state)[i]);
2219 }
2220 }
2221 ccpl_state->pipe_state.clear();
2222}
2223
2224bool ValidationStateTracker::PreCallValidateCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache,
2225 uint32_t count,
2226 const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
2227 const VkAllocationCallbacks *pAllocator,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002228 VkPipeline *pPipelines, void *crtpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002229 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
2230 crtpl_state->pipe_state.reserve(count);
2231 for (uint32_t i = 0; i < count; i++) {
2232 // Create and initialize internal tracking data structure
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002233 crtpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
locke-lunargd556cc32019-09-17 01:21:23 -06002234 crtpl_state->pipe_state.back()->initRayTracingPipelineNV(this, &pCreateInfos[i]);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002235 crtpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06002236 }
2237 return false;
2238}
2239
2240void ValidationStateTracker::PostCallRecordCreateRayTracingPipelinesNV(
2241 VkDevice device, VkPipelineCache pipelineCache, uint32_t count, const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
2242 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines, VkResult result, void *crtpl_state_data) {
2243 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
2244 // This API may create pipelines regardless of the return value
2245 for (uint32_t i = 0; i < count; i++) {
2246 if (pPipelines[i] != VK_NULL_HANDLE) {
2247 (crtpl_state->pipe_state)[i]->pipeline = pPipelines[i];
2248 pipelineMap[pPipelines[i]] = std::move((crtpl_state->pipe_state)[i]);
2249 }
2250 }
2251 crtpl_state->pipe_state.clear();
2252}
2253
2254void ValidationStateTracker::PostCallRecordCreateSampler(VkDevice device, const VkSamplerCreateInfo *pCreateInfo,
2255 const VkAllocationCallbacks *pAllocator, VkSampler *pSampler,
2256 VkResult result) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002257 samplerMap[*pSampler] = std::make_shared<SAMPLER_STATE>(pSampler, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002258}
2259
2260void ValidationStateTracker::PostCallRecordCreateDescriptorSetLayout(VkDevice device,
2261 const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
2262 const VkAllocationCallbacks *pAllocator,
2263 VkDescriptorSetLayout *pSetLayout, VkResult result) {
2264 if (VK_SUCCESS != result) return;
2265 descriptorSetLayoutMap[*pSetLayout] = std::make_shared<cvdescriptorset::DescriptorSetLayout>(pCreateInfo, *pSetLayout);
2266}
2267
2268// For repeatable sorting, not very useful for "memory in range" search
2269struct PushConstantRangeCompare {
2270 bool operator()(const VkPushConstantRange *lhs, const VkPushConstantRange *rhs) const {
2271 if (lhs->offset == rhs->offset) {
2272 if (lhs->size == rhs->size) {
2273 // The comparison is arbitrary, but avoids false aliasing by comparing all fields.
2274 return lhs->stageFlags < rhs->stageFlags;
2275 }
2276 // If the offsets are the same then sorting by the end of range is useful for validation
2277 return lhs->size < rhs->size;
2278 }
2279 return lhs->offset < rhs->offset;
2280 }
2281};
2282
2283static PushConstantRangesDict push_constant_ranges_dict;
2284
2285PushConstantRangesId GetCanonicalId(const VkPipelineLayoutCreateInfo *info) {
2286 if (!info->pPushConstantRanges) {
2287 // Hand back the empty entry (creating as needed)...
2288 return push_constant_ranges_dict.look_up(PushConstantRanges());
2289 }
2290
2291 // Sort the input ranges to ensure equivalent ranges map to the same id
2292 std::set<const VkPushConstantRange *, PushConstantRangeCompare> sorted;
2293 for (uint32_t i = 0; i < info->pushConstantRangeCount; i++) {
2294 sorted.insert(info->pPushConstantRanges + i);
2295 }
2296
2297 PushConstantRanges ranges(sorted.size());
2298 for (const auto range : sorted) {
2299 ranges.emplace_back(*range);
2300 }
2301 return push_constant_ranges_dict.look_up(std::move(ranges));
2302}
2303
2304// Dictionary of canoncial form of the pipeline set layout of descriptor set layouts
2305static PipelineLayoutSetLayoutsDict pipeline_layout_set_layouts_dict;
2306
2307// Dictionary of canonical form of the "compatible for set" records
2308static PipelineLayoutCompatDict pipeline_layout_compat_dict;
2309
2310static PipelineLayoutCompatId GetCanonicalId(const uint32_t set_index, const PushConstantRangesId pcr_id,
2311 const PipelineLayoutSetLayoutsId set_layouts_id) {
2312 return pipeline_layout_compat_dict.look_up(PipelineLayoutCompatDef(set_index, pcr_id, set_layouts_id));
2313}
2314
2315void ValidationStateTracker::PostCallRecordCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo *pCreateInfo,
2316 const VkAllocationCallbacks *pAllocator,
2317 VkPipelineLayout *pPipelineLayout, VkResult result) {
2318 if (VK_SUCCESS != result) return;
2319
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002320 auto pipeline_layout_state = std::make_shared<PIPELINE_LAYOUT_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002321 pipeline_layout_state->layout = *pPipelineLayout;
2322 pipeline_layout_state->set_layouts.resize(pCreateInfo->setLayoutCount);
2323 PipelineLayoutSetLayoutsDef set_layouts(pCreateInfo->setLayoutCount);
2324 for (uint32_t i = 0; i < pCreateInfo->setLayoutCount; ++i) {
Jeff Bolz6ae39612019-10-11 20:57:36 -05002325 pipeline_layout_state->set_layouts[i] = GetDescriptorSetLayoutShared(pCreateInfo->pSetLayouts[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06002326 set_layouts[i] = pipeline_layout_state->set_layouts[i]->GetLayoutId();
2327 }
2328
2329 // Get canonical form IDs for the "compatible for set" contents
2330 pipeline_layout_state->push_constant_ranges = GetCanonicalId(pCreateInfo);
2331 auto set_layouts_id = pipeline_layout_set_layouts_dict.look_up(set_layouts);
2332 pipeline_layout_state->compat_for_set.reserve(pCreateInfo->setLayoutCount);
2333
2334 // Create table of "compatible for set N" cannonical forms for trivial accept validation
2335 for (uint32_t i = 0; i < pCreateInfo->setLayoutCount; ++i) {
2336 pipeline_layout_state->compat_for_set.emplace_back(
2337 GetCanonicalId(i, pipeline_layout_state->push_constant_ranges, set_layouts_id));
2338 }
2339 pipelineLayoutMap[*pPipelineLayout] = std::move(pipeline_layout_state);
2340}
2341
2342void ValidationStateTracker::PostCallRecordCreateDescriptorPool(VkDevice device, const VkDescriptorPoolCreateInfo *pCreateInfo,
2343 const VkAllocationCallbacks *pAllocator,
2344 VkDescriptorPool *pDescriptorPool, VkResult result) {
2345 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002346 descriptorPoolMap[*pDescriptorPool] = std::make_shared<DESCRIPTOR_POOL_STATE>(*pDescriptorPool, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002347}
2348
2349void ValidationStateTracker::PostCallRecordResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
2350 VkDescriptorPoolResetFlags flags, VkResult result) {
2351 if (VK_SUCCESS != result) return;
2352 DESCRIPTOR_POOL_STATE *pPool = GetDescriptorPoolState(descriptorPool);
2353 // TODO: validate flags
2354 // For every set off of this pool, clear it, remove from setMap, and free cvdescriptorset::DescriptorSet
2355 for (auto ds : pPool->sets) {
2356 FreeDescriptorSet(ds);
2357 }
2358 pPool->sets.clear();
2359 // Reset available count for each type and available sets for this pool
2360 for (auto it = pPool->availableDescriptorTypeCount.begin(); it != pPool->availableDescriptorTypeCount.end(); ++it) {
2361 pPool->availableDescriptorTypeCount[it->first] = pPool->maxDescriptorTypeCount[it->first];
2362 }
2363 pPool->availableSets = pPool->maxSets;
2364}
2365
2366bool ValidationStateTracker::PreCallValidateAllocateDescriptorSets(VkDevice device,
2367 const VkDescriptorSetAllocateInfo *pAllocateInfo,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002368 VkDescriptorSet *pDescriptorSets, void *ads_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002369 // Always update common data
2370 cvdescriptorset::AllocateDescriptorSetsData *ads_state =
2371 reinterpret_cast<cvdescriptorset::AllocateDescriptorSetsData *>(ads_state_data);
2372 UpdateAllocateDescriptorSetsData(pAllocateInfo, ads_state);
2373
2374 return false;
2375}
2376
2377// Allocation state was good and call down chain was made so update state based on allocating descriptor sets
2378void ValidationStateTracker::PostCallRecordAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo *pAllocateInfo,
2379 VkDescriptorSet *pDescriptorSets, VkResult result,
2380 void *ads_state_data) {
2381 if (VK_SUCCESS != result) return;
2382 // All the updates are contained in a single cvdescriptorset function
2383 cvdescriptorset::AllocateDescriptorSetsData *ads_state =
2384 reinterpret_cast<cvdescriptorset::AllocateDescriptorSetsData *>(ads_state_data);
2385 PerformAllocateDescriptorSets(pAllocateInfo, pDescriptorSets, ads_state);
2386}
2387
2388void ValidationStateTracker::PreCallRecordFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t count,
2389 const VkDescriptorSet *pDescriptorSets) {
2390 DESCRIPTOR_POOL_STATE *pool_state = GetDescriptorPoolState(descriptorPool);
2391 // Update available descriptor sets in pool
2392 pool_state->availableSets += count;
2393
2394 // For each freed descriptor add its resources back into the pool as available and remove from pool and setMap
2395 for (uint32_t i = 0; i < count; ++i) {
2396 if (pDescriptorSets[i] != VK_NULL_HANDLE) {
2397 auto descriptor_set = setMap[pDescriptorSets[i]].get();
2398 uint32_t type_index = 0, descriptor_count = 0;
2399 for (uint32_t j = 0; j < descriptor_set->GetBindingCount(); ++j) {
2400 type_index = static_cast<uint32_t>(descriptor_set->GetTypeFromIndex(j));
2401 descriptor_count = descriptor_set->GetDescriptorCountFromIndex(j);
2402 pool_state->availableDescriptorTypeCount[type_index] += descriptor_count;
2403 }
2404 FreeDescriptorSet(descriptor_set);
2405 pool_state->sets.erase(descriptor_set);
2406 }
2407 }
2408}
2409
2410void ValidationStateTracker::PreCallRecordUpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount,
2411 const VkWriteDescriptorSet *pDescriptorWrites,
2412 uint32_t descriptorCopyCount,
2413 const VkCopyDescriptorSet *pDescriptorCopies) {
2414 cvdescriptorset::PerformUpdateDescriptorSets(this, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount,
2415 pDescriptorCopies);
2416}
2417
2418void ValidationStateTracker::PostCallRecordAllocateCommandBuffers(VkDevice device, const VkCommandBufferAllocateInfo *pCreateInfo,
2419 VkCommandBuffer *pCommandBuffer, VkResult result) {
2420 if (VK_SUCCESS != result) return;
Jeff Bolz6835fda2019-10-06 00:15:34 -05002421 auto pPool = GetCommandPoolShared(pCreateInfo->commandPool);
locke-lunargd556cc32019-09-17 01:21:23 -06002422 if (pPool) {
2423 for (uint32_t i = 0; i < pCreateInfo->commandBufferCount; i++) {
2424 // Add command buffer to its commandPool map
2425 pPool->commandBuffers.insert(pCommandBuffer[i]);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002426 auto pCB = std::make_shared<CMD_BUFFER_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002427 pCB->createInfo = *pCreateInfo;
2428 pCB->device = device;
Jeff Bolz6835fda2019-10-06 00:15:34 -05002429 pCB->command_pool = pPool;
locke-lunargd556cc32019-09-17 01:21:23 -06002430 // Add command buffer to map
2431 commandBufferMap[pCommandBuffer[i]] = std::move(pCB);
2432 ResetCommandBufferState(pCommandBuffer[i]);
2433 }
2434 }
2435}
2436
2437// Add bindings between the given cmd buffer & framebuffer and the framebuffer's children
2438void ValidationStateTracker::AddFramebufferBinding(CMD_BUFFER_STATE *cb_state, FRAMEBUFFER_STATE *fb_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05002439 AddCommandBufferBinding(fb_state->cb_bindings, VulkanTypedHandle(fb_state->framebuffer, kVulkanObjectTypeFramebuffer, fb_state),
locke-lunargd556cc32019-09-17 01:21:23 -06002440 cb_state);
2441
2442 const uint32_t attachmentCount = fb_state->createInfo.attachmentCount;
2443 for (uint32_t attachment = 0; attachment < attachmentCount; ++attachment) {
2444 auto view_state = GetAttachmentImageViewState(fb_state, attachment);
2445 if (view_state) {
2446 AddCommandBufferBindingImageView(cb_state, view_state);
2447 }
2448 }
2449}
2450
2451void ValidationStateTracker::PreCallRecordBeginCommandBuffer(VkCommandBuffer commandBuffer,
2452 const VkCommandBufferBeginInfo *pBeginInfo) {
2453 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2454 if (!cb_state) return;
locke-lunargd556cc32019-09-17 01:21:23 -06002455 if (cb_state->createInfo.level != VK_COMMAND_BUFFER_LEVEL_PRIMARY) {
2456 // Secondary Command Buffer
2457 const VkCommandBufferInheritanceInfo *pInfo = pBeginInfo->pInheritanceInfo;
2458 if (pInfo) {
2459 if (pBeginInfo->flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT) {
2460 assert(pInfo->renderPass);
2461 auto framebuffer = GetFramebufferState(pInfo->framebuffer);
2462 if (framebuffer) {
2463 // Connect this framebuffer and its children to this cmdBuffer
2464 AddFramebufferBinding(cb_state, framebuffer);
2465 }
2466 }
2467 }
2468 }
2469 if (CB_RECORDED == cb_state->state || CB_INVALID_COMPLETE == cb_state->state) {
2470 ResetCommandBufferState(commandBuffer);
2471 }
2472 // Set updated state here in case implicit reset occurs above
2473 cb_state->state = CB_RECORDING;
2474 cb_state->beginInfo = *pBeginInfo;
2475 if (cb_state->beginInfo.pInheritanceInfo) {
2476 cb_state->inheritanceInfo = *(cb_state->beginInfo.pInheritanceInfo);
2477 cb_state->beginInfo.pInheritanceInfo = &cb_state->inheritanceInfo;
2478 // If we are a secondary command-buffer and inheriting. Update the items we should inherit.
2479 if ((cb_state->createInfo.level != VK_COMMAND_BUFFER_LEVEL_PRIMARY) &&
2480 (cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT)) {
2481 cb_state->activeRenderPass = GetRenderPassState(cb_state->beginInfo.pInheritanceInfo->renderPass);
2482 cb_state->activeSubpass = cb_state->beginInfo.pInheritanceInfo->subpass;
2483 cb_state->activeFramebuffer = cb_state->beginInfo.pInheritanceInfo->framebuffer;
2484 cb_state->framebuffers.insert(cb_state->beginInfo.pInheritanceInfo->framebuffer);
2485 }
2486 }
2487
2488 auto chained_device_group_struct = lvl_find_in_chain<VkDeviceGroupCommandBufferBeginInfo>(pBeginInfo->pNext);
2489 if (chained_device_group_struct) {
2490 cb_state->initial_device_mask = chained_device_group_struct->deviceMask;
2491 } else {
2492 cb_state->initial_device_mask = (1 << physical_device_count) - 1;
2493 }
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002494
2495 cb_state->performance_lock_acquired = performance_lock_acquired;
locke-lunargd556cc32019-09-17 01:21:23 -06002496}
2497
2498void ValidationStateTracker::PostCallRecordEndCommandBuffer(VkCommandBuffer commandBuffer, VkResult result) {
2499 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2500 if (!cb_state) return;
2501 // Cached validation is specific to a specific recording of a specific command buffer.
2502 for (auto descriptor_set : cb_state->validated_descriptor_sets) {
2503 descriptor_set->ClearCachedValidation(cb_state);
2504 }
2505 cb_state->validated_descriptor_sets.clear();
2506 if (VK_SUCCESS == result) {
2507 cb_state->state = CB_RECORDED;
2508 }
2509}
2510
2511void ValidationStateTracker::PostCallRecordResetCommandBuffer(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags,
2512 VkResult result) {
2513 if (VK_SUCCESS == result) {
2514 ResetCommandBufferState(commandBuffer);
2515 }
2516}
2517
2518CBStatusFlags MakeStaticStateMask(VkPipelineDynamicStateCreateInfo const *ds) {
2519 // initially assume everything is static state
2520 CBStatusFlags flags = CBSTATUS_ALL_STATE_SET;
2521
2522 if (ds) {
2523 for (uint32_t i = 0; i < ds->dynamicStateCount; i++) {
2524 switch (ds->pDynamicStates[i]) {
2525 case VK_DYNAMIC_STATE_LINE_WIDTH:
2526 flags &= ~CBSTATUS_LINE_WIDTH_SET;
2527 break;
2528 case VK_DYNAMIC_STATE_DEPTH_BIAS:
2529 flags &= ~CBSTATUS_DEPTH_BIAS_SET;
2530 break;
2531 case VK_DYNAMIC_STATE_BLEND_CONSTANTS:
2532 flags &= ~CBSTATUS_BLEND_CONSTANTS_SET;
2533 break;
2534 case VK_DYNAMIC_STATE_DEPTH_BOUNDS:
2535 flags &= ~CBSTATUS_DEPTH_BOUNDS_SET;
2536 break;
2537 case VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK:
2538 flags &= ~CBSTATUS_STENCIL_READ_MASK_SET;
2539 break;
2540 case VK_DYNAMIC_STATE_STENCIL_WRITE_MASK:
2541 flags &= ~CBSTATUS_STENCIL_WRITE_MASK_SET;
2542 break;
2543 case VK_DYNAMIC_STATE_STENCIL_REFERENCE:
2544 flags &= ~CBSTATUS_STENCIL_REFERENCE_SET;
2545 break;
2546 case VK_DYNAMIC_STATE_SCISSOR:
2547 flags &= ~CBSTATUS_SCISSOR_SET;
2548 break;
2549 case VK_DYNAMIC_STATE_VIEWPORT:
2550 flags &= ~CBSTATUS_VIEWPORT_SET;
2551 break;
2552 case VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV:
2553 flags &= ~CBSTATUS_EXCLUSIVE_SCISSOR_SET;
2554 break;
2555 case VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV:
2556 flags &= ~CBSTATUS_SHADING_RATE_PALETTE_SET;
2557 break;
2558 case VK_DYNAMIC_STATE_LINE_STIPPLE_EXT:
2559 flags &= ~CBSTATUS_LINE_STIPPLE_SET;
2560 break;
Chris Mayer9ded5eb2019-09-19 16:33:26 +02002561 case VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV:
2562 flags &= ~CBSTATUS_VIEWPORT_W_SCALING_SET;
2563 break;
locke-lunargd556cc32019-09-17 01:21:23 -06002564 default:
2565 break;
2566 }
2567 }
2568 }
2569
2570 return flags;
2571}
2572
2573// Validation cache:
2574// CV is the bottommost implementor of this extension. Don't pass calls down.
2575// utility function to set collective state for pipeline
2576void SetPipelineState(PIPELINE_STATE *pPipe) {
2577 // If any attachment used by this pipeline has blendEnable, set top-level blendEnable
2578 if (pPipe->graphicsPipelineCI.pColorBlendState) {
2579 for (size_t i = 0; i < pPipe->attachments.size(); ++i) {
2580 if (VK_TRUE == pPipe->attachments[i].blendEnable) {
2581 if (((pPipe->attachments[i].dstAlphaBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
2582 (pPipe->attachments[i].dstAlphaBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
2583 ((pPipe->attachments[i].dstColorBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
2584 (pPipe->attachments[i].dstColorBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
2585 ((pPipe->attachments[i].srcAlphaBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
2586 (pPipe->attachments[i].srcAlphaBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
2587 ((pPipe->attachments[i].srcColorBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
2588 (pPipe->attachments[i].srcColorBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA))) {
2589 pPipe->blendConstantsEnabled = true;
2590 }
2591 }
2592 }
2593 }
2594}
2595
2596void ValidationStateTracker::PreCallRecordCmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
2597 VkPipeline pipeline) {
2598 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2599 assert(cb_state);
2600
2601 auto pipe_state = GetPipelineState(pipeline);
2602 if (VK_PIPELINE_BIND_POINT_GRAPHICS == pipelineBindPoint) {
2603 cb_state->status &= ~cb_state->static_status;
2604 cb_state->static_status = MakeStaticStateMask(pipe_state->graphicsPipelineCI.ptr()->pDynamicState);
2605 cb_state->status |= cb_state->static_status;
2606 }
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002607 ResetCommandBufferPushConstantDataIfIncompatible(cb_state, pipe_state->pipeline_layout->layout);
locke-lunargd556cc32019-09-17 01:21:23 -06002608 cb_state->lastBound[pipelineBindPoint].pipeline_state = pipe_state;
2609 SetPipelineState(pipe_state);
Jeff Bolzadbfa852019-10-04 13:53:30 -05002610 AddCommandBufferBinding(pipe_state->cb_bindings, VulkanTypedHandle(pipeline, kVulkanObjectTypePipeline), cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06002611}
2612
2613void ValidationStateTracker::PreCallRecordCmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport,
2614 uint32_t viewportCount, const VkViewport *pViewports) {
2615 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2616 cb_state->viewportMask |= ((1u << viewportCount) - 1u) << firstViewport;
2617 cb_state->status |= CBSTATUS_VIEWPORT_SET;
2618}
2619
2620void ValidationStateTracker::PreCallRecordCmdSetExclusiveScissorNV(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor,
2621 uint32_t exclusiveScissorCount,
2622 const VkRect2D *pExclusiveScissors) {
2623 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2624 // TODO: We don't have VUIDs for validating that all exclusive scissors have been set.
2625 // cb_state->exclusiveScissorMask |= ((1u << exclusiveScissorCount) - 1u) << firstExclusiveScissor;
2626 cb_state->status |= CBSTATUS_EXCLUSIVE_SCISSOR_SET;
2627}
2628
2629void ValidationStateTracker::PreCallRecordCmdBindShadingRateImageNV(VkCommandBuffer commandBuffer, VkImageView imageView,
2630 VkImageLayout imageLayout) {
2631 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2632
2633 if (imageView != VK_NULL_HANDLE) {
2634 auto view_state = GetImageViewState(imageView);
2635 AddCommandBufferBindingImageView(cb_state, view_state);
2636 }
2637}
2638
2639void ValidationStateTracker::PreCallRecordCmdSetViewportShadingRatePaletteNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
2640 uint32_t viewportCount,
2641 const VkShadingRatePaletteNV *pShadingRatePalettes) {
2642 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2643 // TODO: We don't have VUIDs for validating that all shading rate palettes have been set.
2644 // cb_state->shadingRatePaletteMask |= ((1u << viewportCount) - 1u) << firstViewport;
2645 cb_state->status |= CBSTATUS_SHADING_RATE_PALETTE_SET;
2646}
2647
2648void ValidationStateTracker::PostCallRecordCreateAccelerationStructureNV(VkDevice device,
2649 const VkAccelerationStructureCreateInfoNV *pCreateInfo,
2650 const VkAllocationCallbacks *pAllocator,
2651 VkAccelerationStructureNV *pAccelerationStructure,
2652 VkResult result) {
2653 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002654 auto as_state = std::make_shared<ACCELERATION_STRUCTURE_STATE>(*pAccelerationStructure, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002655
2656 // Query the requirements in case the application doesn't (to avoid bind/validation time query)
2657 VkAccelerationStructureMemoryRequirementsInfoNV as_memory_requirements_info = {};
2658 as_memory_requirements_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
2659 as_memory_requirements_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV;
2660 as_memory_requirements_info.accelerationStructure = as_state->acceleration_structure;
2661 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &as_memory_requirements_info, &as_state->memory_requirements);
2662
2663 VkAccelerationStructureMemoryRequirementsInfoNV scratch_memory_req_info = {};
2664 scratch_memory_req_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
2665 scratch_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV;
2666 scratch_memory_req_info.accelerationStructure = as_state->acceleration_structure;
2667 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &scratch_memory_req_info,
2668 &as_state->build_scratch_memory_requirements);
2669
2670 VkAccelerationStructureMemoryRequirementsInfoNV update_memory_req_info = {};
2671 update_memory_req_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
2672 update_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV;
2673 update_memory_req_info.accelerationStructure = as_state->acceleration_structure;
2674 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &update_memory_req_info,
2675 &as_state->update_scratch_memory_requirements);
2676
2677 accelerationStructureMap[*pAccelerationStructure] = std::move(as_state);
2678}
2679
2680void ValidationStateTracker::PostCallRecordGetAccelerationStructureMemoryRequirementsNV(
2681 VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV *pInfo, VkMemoryRequirements2KHR *pMemoryRequirements) {
2682 ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureState(pInfo->accelerationStructure);
2683 if (as_state != nullptr) {
2684 if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV) {
2685 as_state->memory_requirements = *pMemoryRequirements;
2686 as_state->memory_requirements_checked = true;
2687 } else if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV) {
2688 as_state->build_scratch_memory_requirements = *pMemoryRequirements;
2689 as_state->build_scratch_memory_requirements_checked = true;
2690 } else if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV) {
2691 as_state->update_scratch_memory_requirements = *pMemoryRequirements;
2692 as_state->update_scratch_memory_requirements_checked = true;
2693 }
2694 }
2695}
2696
2697void ValidationStateTracker::PostCallRecordBindAccelerationStructureMemoryNV(
2698 VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV *pBindInfos, VkResult result) {
2699 if (VK_SUCCESS != result) return;
2700 for (uint32_t i = 0; i < bindInfoCount; i++) {
2701 const VkBindAccelerationStructureMemoryInfoNV &info = pBindInfos[i];
2702
2703 ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureState(info.accelerationStructure);
2704 if (as_state) {
2705 // Track bound memory range information
2706 auto mem_info = GetDevMemState(info.memory);
2707 if (mem_info) {
2708 InsertAccelerationStructureMemoryRange(info.accelerationStructure, mem_info, info.memoryOffset,
2709 as_state->requirements);
2710 }
2711 // Track objects tied to memory
2712 SetMemBinding(info.memory, as_state, info.memoryOffset,
2713 VulkanTypedHandle(info.accelerationStructure, kVulkanObjectTypeAccelerationStructureNV));
2714
2715 // GPU validation of top level acceleration structure building needs acceleration structure handles.
2716 if (enabled.gpu_validation) {
2717 DispatchGetAccelerationStructureHandleNV(device, info.accelerationStructure, 8, &as_state->opaque_handle);
2718 }
2719 }
2720 }
2721}
2722
2723void ValidationStateTracker::PostCallRecordCmdBuildAccelerationStructureNV(
2724 VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV *pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset,
2725 VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset) {
2726 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2727 if (cb_state == nullptr) {
2728 return;
2729 }
2730
2731 ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureState(dst);
2732 ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureState(src);
2733 if (dst_as_state != nullptr) {
2734 dst_as_state->built = true;
2735 dst_as_state->build_info.initialize(pInfo);
2736 AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
2737 }
2738 if (src_as_state != nullptr) {
2739 AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
2740 }
2741 cb_state->hasBuildAccelerationStructureCmd = true;
2742}
2743
2744void ValidationStateTracker::PostCallRecordCmdCopyAccelerationStructureNV(VkCommandBuffer commandBuffer,
2745 VkAccelerationStructureNV dst,
2746 VkAccelerationStructureNV src,
2747 VkCopyAccelerationStructureModeNV mode) {
2748 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2749 if (cb_state) {
2750 ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureState(src);
2751 ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureState(dst);
2752 if (dst_as_state != nullptr && src_as_state != nullptr) {
2753 dst_as_state->built = true;
2754 dst_as_state->build_info = src_as_state->build_info;
2755 AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
2756 AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
2757 }
2758 }
2759}
2760
2761void ValidationStateTracker::PreCallRecordDestroyAccelerationStructureNV(VkDevice device,
2762 VkAccelerationStructureNV accelerationStructure,
2763 const VkAllocationCallbacks *pAllocator) {
2764 if (!accelerationStructure) return;
2765 auto *as_state = GetAccelerationStructureState(accelerationStructure);
2766 if (as_state) {
2767 const VulkanTypedHandle obj_struct(accelerationStructure, kVulkanObjectTypeAccelerationStructureNV);
2768 InvalidateCommandBuffers(as_state->cb_bindings, obj_struct);
2769 for (auto mem_binding : as_state->GetBoundMemory()) {
2770 auto mem_info = GetDevMemState(mem_binding);
2771 if (mem_info) {
2772 RemoveAccelerationStructureMemoryRange(accelerationStructure, mem_info);
2773 }
2774 }
2775 ClearMemoryObjectBindings(obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002776 as_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002777 accelerationStructureMap.erase(accelerationStructure);
2778 }
2779}
2780
Chris Mayer9ded5eb2019-09-19 16:33:26 +02002781void ValidationStateTracker::PreCallRecordCmdSetViewportWScalingNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
2782 uint32_t viewportCount,
2783 const VkViewportWScalingNV *pViewportWScalings) {
2784 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2785 cb_state->status |= CBSTATUS_VIEWPORT_W_SCALING_SET;
2786}
2787
locke-lunargd556cc32019-09-17 01:21:23 -06002788void ValidationStateTracker::PreCallRecordCmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth) {
2789 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2790 cb_state->status |= CBSTATUS_LINE_WIDTH_SET;
2791}
2792
2793void ValidationStateTracker::PreCallRecordCmdSetLineStippleEXT(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor,
2794 uint16_t lineStipplePattern) {
2795 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2796 cb_state->status |= CBSTATUS_LINE_STIPPLE_SET;
2797}
2798
2799void ValidationStateTracker::PreCallRecordCmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor,
2800 float depthBiasClamp, float depthBiasSlopeFactor) {
2801 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2802 cb_state->status |= CBSTATUS_DEPTH_BIAS_SET;
2803}
2804
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002805void ValidationStateTracker::PreCallRecordCmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount,
2806 const VkRect2D *pScissors) {
2807 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2808 cb_state->scissorMask |= ((1u << scissorCount) - 1u) << firstScissor;
2809 cb_state->status |= CBSTATUS_SCISSOR_SET;
2810}
2811
locke-lunargd556cc32019-09-17 01:21:23 -06002812void ValidationStateTracker::PreCallRecordCmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4]) {
2813 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2814 cb_state->status |= CBSTATUS_BLEND_CONSTANTS_SET;
2815}
2816
2817void ValidationStateTracker::PreCallRecordCmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds,
2818 float maxDepthBounds) {
2819 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2820 cb_state->status |= CBSTATUS_DEPTH_BOUNDS_SET;
2821}
2822
2823void ValidationStateTracker::PreCallRecordCmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
2824 uint32_t compareMask) {
2825 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2826 cb_state->status |= CBSTATUS_STENCIL_READ_MASK_SET;
2827}
2828
2829void ValidationStateTracker::PreCallRecordCmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
2830 uint32_t writeMask) {
2831 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2832 cb_state->status |= CBSTATUS_STENCIL_WRITE_MASK_SET;
2833}
2834
2835void ValidationStateTracker::PreCallRecordCmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
2836 uint32_t reference) {
2837 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2838 cb_state->status |= CBSTATUS_STENCIL_REFERENCE_SET;
2839}
2840
2841// Update pipeline_layout bind points applying the "Pipeline Layout Compatibility" rules.
2842// One of pDescriptorSets or push_descriptor_set should be nullptr, indicating whether this
2843// is called for CmdBindDescriptorSets or CmdPushDescriptorSet.
2844void ValidationStateTracker::UpdateLastBoundDescriptorSets(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint pipeline_bind_point,
2845 const PIPELINE_LAYOUT_STATE *pipeline_layout, uint32_t first_set,
2846 uint32_t set_count, const VkDescriptorSet *pDescriptorSets,
2847 cvdescriptorset::DescriptorSet *push_descriptor_set,
2848 uint32_t dynamic_offset_count, const uint32_t *p_dynamic_offsets) {
2849 assert((pDescriptorSets == nullptr) ^ (push_descriptor_set == nullptr));
2850 // Defensive
2851 assert(pipeline_layout);
2852 if (!pipeline_layout) return;
2853
2854 uint32_t required_size = first_set + set_count;
2855 const uint32_t last_binding_index = required_size - 1;
2856 assert(last_binding_index < pipeline_layout->compat_for_set.size());
2857
2858 // Some useful shorthand
2859 auto &last_bound = cb_state->lastBound[pipeline_bind_point];
2860 auto &pipe_compat_ids = pipeline_layout->compat_for_set;
2861 const uint32_t current_size = static_cast<uint32_t>(last_bound.per_set.size());
2862
2863 // We need this three times in this function, but nowhere else
2864 auto push_descriptor_cleanup = [&last_bound](const cvdescriptorset::DescriptorSet *ds) -> bool {
2865 if (ds && ds->IsPushDescriptor()) {
2866 assert(ds == last_bound.push_descriptor_set.get());
2867 last_bound.push_descriptor_set = nullptr;
2868 return true;
2869 }
2870 return false;
2871 };
2872
2873 // Clean up the "disturbed" before and after the range to be set
2874 if (required_size < current_size) {
2875 if (last_bound.per_set[last_binding_index].compat_id_for_set != pipe_compat_ids[last_binding_index]) {
2876 // We're disturbing those after last, we'll shrink below, but first need to check for and cleanup the push_descriptor
2877 for (auto set_idx = required_size; set_idx < current_size; ++set_idx) {
2878 if (push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set)) break;
2879 }
2880 } else {
2881 // We're not disturbing past last, so leave the upper binding data alone.
2882 required_size = current_size;
2883 }
2884 }
2885
2886 // We resize if we need more set entries or if those past "last" are disturbed
2887 if (required_size != current_size) {
2888 last_bound.per_set.resize(required_size);
2889 }
2890
2891 // For any previously bound sets, need to set them to "invalid" if they were disturbed by this update
2892 for (uint32_t set_idx = 0; set_idx < first_set; ++set_idx) {
2893 if (last_bound.per_set[set_idx].compat_id_for_set != pipe_compat_ids[set_idx]) {
2894 push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set);
2895 last_bound.per_set[set_idx].bound_descriptor_set = nullptr;
2896 last_bound.per_set[set_idx].dynamicOffsets.clear();
2897 last_bound.per_set[set_idx].compat_id_for_set = pipe_compat_ids[set_idx];
2898 }
2899 }
2900
2901 // Now update the bound sets with the input sets
2902 const uint32_t *input_dynamic_offsets = p_dynamic_offsets; // "read" pointer for dynamic offset data
2903 for (uint32_t input_idx = 0; input_idx < set_count; input_idx++) {
2904 auto set_idx = input_idx + first_set; // set_idx is index within layout, input_idx is index within input descriptor sets
2905 cvdescriptorset::DescriptorSet *descriptor_set =
2906 push_descriptor_set ? push_descriptor_set : GetSetNode(pDescriptorSets[input_idx]);
2907
2908 // Record binding (or push)
2909 if (descriptor_set != last_bound.push_descriptor_set.get()) {
2910 // Only cleanup the push descriptors if they aren't the currently used set.
2911 push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set);
2912 }
2913 last_bound.per_set[set_idx].bound_descriptor_set = descriptor_set;
2914 last_bound.per_set[set_idx].compat_id_for_set = pipe_compat_ids[set_idx]; // compat ids are canonical *per* set index
2915
2916 if (descriptor_set) {
2917 auto set_dynamic_descriptor_count = descriptor_set->GetDynamicDescriptorCount();
2918 // TODO: Add logic for tracking push_descriptor offsets (here or in caller)
2919 if (set_dynamic_descriptor_count && input_dynamic_offsets) {
2920 const uint32_t *end_offset = input_dynamic_offsets + set_dynamic_descriptor_count;
2921 last_bound.per_set[set_idx].dynamicOffsets = std::vector<uint32_t>(input_dynamic_offsets, end_offset);
2922 input_dynamic_offsets = end_offset;
2923 assert(input_dynamic_offsets <= (p_dynamic_offsets + dynamic_offset_count));
2924 } else {
2925 last_bound.per_set[set_idx].dynamicOffsets.clear();
2926 }
2927 if (!descriptor_set->IsPushDescriptor()) {
2928 // Can't cache validation of push_descriptors
2929 cb_state->validated_descriptor_sets.insert(descriptor_set);
2930 }
2931 }
2932 }
2933}
2934
2935// Update the bound state for the bind point, including the effects of incompatible pipeline layouts
2936void ValidationStateTracker::PreCallRecordCmdBindDescriptorSets(VkCommandBuffer commandBuffer,
2937 VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
2938 uint32_t firstSet, uint32_t setCount,
2939 const VkDescriptorSet *pDescriptorSets, uint32_t dynamicOffsetCount,
2940 const uint32_t *pDynamicOffsets) {
2941 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2942 auto pipeline_layout = GetPipelineLayout(layout);
2943
2944 // Resize binding arrays
2945 uint32_t last_set_index = firstSet + setCount - 1;
2946 if (last_set_index >= cb_state->lastBound[pipelineBindPoint].per_set.size()) {
2947 cb_state->lastBound[pipelineBindPoint].per_set.resize(last_set_index + 1);
2948 }
2949
2950 UpdateLastBoundDescriptorSets(cb_state, pipelineBindPoint, pipeline_layout, firstSet, setCount, pDescriptorSets, nullptr,
2951 dynamicOffsetCount, pDynamicOffsets);
2952 cb_state->lastBound[pipelineBindPoint].pipeline_layout = layout;
2953 ResetCommandBufferPushConstantDataIfIncompatible(cb_state, layout);
2954}
2955
2956void ValidationStateTracker::RecordCmdPushDescriptorSetState(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint pipelineBindPoint,
2957 VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount,
2958 const VkWriteDescriptorSet *pDescriptorWrites) {
2959 const auto &pipeline_layout = GetPipelineLayout(layout);
2960 // Short circuit invalid updates
2961 if (!pipeline_layout || (set >= pipeline_layout->set_layouts.size()) || !pipeline_layout->set_layouts[set] ||
2962 !pipeline_layout->set_layouts[set]->IsPushDescriptor())
2963 return;
2964
2965 // We need a descriptor set to update the bindings with, compatible with the passed layout
2966 const auto dsl = pipeline_layout->set_layouts[set];
2967 auto &last_bound = cb_state->lastBound[pipelineBindPoint];
2968 auto &push_descriptor_set = last_bound.push_descriptor_set;
2969 // If we are disturbing the current push_desriptor_set clear it
2970 if (!push_descriptor_set || !CompatForSet(set, last_bound, pipeline_layout->compat_for_set)) {
Jeff Bolz252d2532019-10-15 22:06:39 -05002971 last_bound.UnbindAndResetPushDescriptorSet(new cvdescriptorset::DescriptorSet(0, nullptr, dsl, 0, this, report_data));
locke-lunargd556cc32019-09-17 01:21:23 -06002972 }
2973
2974 UpdateLastBoundDescriptorSets(cb_state, pipelineBindPoint, pipeline_layout, set, 1, nullptr, push_descriptor_set.get(), 0,
2975 nullptr);
2976 last_bound.pipeline_layout = layout;
2977
2978 // Now that we have either the new or extant push_descriptor set ... do the write updates against it
Jeff Bolz41a1ced2019-10-11 11:40:49 -05002979 push_descriptor_set->PerformPushDescriptorsUpdate(this, descriptorWriteCount, pDescriptorWrites);
locke-lunargd556cc32019-09-17 01:21:23 -06002980}
2981
2982void ValidationStateTracker::PreCallRecordCmdPushDescriptorSetKHR(VkCommandBuffer commandBuffer,
2983 VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
2984 uint32_t set, uint32_t descriptorWriteCount,
2985 const VkWriteDescriptorSet *pDescriptorWrites) {
2986 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2987 RecordCmdPushDescriptorSetState(cb_state, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites);
2988}
2989
Tony-LunarG6bb1d0c2019-09-23 10:39:25 -06002990void ValidationStateTracker::PostCallRecordCmdPushConstants(VkCommandBuffer commandBuffer, VkPipelineLayout layout,
2991 VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size,
2992 const void *pValues) {
2993 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2994 if (cb_state != nullptr) {
2995 ResetCommandBufferPushConstantDataIfIncompatible(cb_state, layout);
2996
2997 auto &push_constant_data = cb_state->push_constant_data;
2998 assert((offset + size) <= static_cast<uint32_t>(push_constant_data.size()));
2999 std::memcpy(push_constant_data.data() + offset, pValues, static_cast<std::size_t>(size));
3000 }
3001}
3002
locke-lunargd556cc32019-09-17 01:21:23 -06003003void ValidationStateTracker::PreCallRecordCmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
3004 VkIndexType indexType) {
3005 auto buffer_state = GetBufferState(buffer);
3006 auto cb_state = GetCBState(commandBuffer);
3007
3008 cb_state->status |= CBSTATUS_INDEX_BUFFER_BOUND;
3009 cb_state->index_buffer_binding.buffer = buffer;
3010 cb_state->index_buffer_binding.size = buffer_state->createInfo.size;
3011 cb_state->index_buffer_binding.offset = offset;
3012 cb_state->index_buffer_binding.index_type = indexType;
3013 // Add binding for this index buffer to this commandbuffer
3014 AddCommandBufferBindingBuffer(cb_state, buffer_state);
3015}
3016
3017void ValidationStateTracker::PreCallRecordCmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding,
3018 uint32_t bindingCount, const VkBuffer *pBuffers,
3019 const VkDeviceSize *pOffsets) {
3020 auto cb_state = GetCBState(commandBuffer);
3021
3022 uint32_t end = firstBinding + bindingCount;
3023 if (cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.size() < end) {
3024 cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.resize(end);
3025 }
3026
3027 for (uint32_t i = 0; i < bindingCount; ++i) {
3028 auto &vertex_buffer_binding = cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings[i + firstBinding];
3029 vertex_buffer_binding.buffer = pBuffers[i];
3030 vertex_buffer_binding.offset = pOffsets[i];
3031 // Add binding for this vertex buffer to this commandbuffer
3032 AddCommandBufferBindingBuffer(cb_state, GetBufferState(pBuffers[i]));
3033 }
3034}
3035
3036void ValidationStateTracker::PostCallRecordCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer,
3037 VkDeviceSize dstOffset, VkDeviceSize dataSize, const void *pData) {
3038 auto cb_state = GetCBState(commandBuffer);
3039 auto dst_buffer_state = GetBufferState(dstBuffer);
3040
3041 // Update bindings between buffer and cmd buffer
3042 AddCommandBufferBindingBuffer(cb_state, dst_buffer_state);
3043}
3044
Jeff Bolz310775c2019-10-09 00:46:33 -05003045bool ValidationStateTracker::SetEventStageMask(VkEvent event, VkPipelineStageFlags stageMask,
3046 EventToStageMap *localEventToStageMap) {
3047 (*localEventToStageMap)[event] = stageMask;
locke-lunargd556cc32019-09-17 01:21:23 -06003048 return false;
3049}
3050
3051void ValidationStateTracker::PreCallRecordCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event,
3052 VkPipelineStageFlags stageMask) {
3053 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3054 auto event_state = GetEventState(event);
3055 if (event_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05003056 AddCommandBufferBinding(event_state->cb_bindings, VulkanTypedHandle(event, kVulkanObjectTypeEvent, event_state), cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003057 }
3058 cb_state->events.push_back(event);
3059 if (!cb_state->waitedEvents.count(event)) {
3060 cb_state->writeEventsBeforeWait.push_back(event);
3061 }
Jeff Bolz310775c2019-10-09 00:46:33 -05003062 cb_state->eventUpdates.emplace_back(
3063 [event, stageMask](const ValidationStateTracker *device_data, bool do_validate, EventToStageMap *localEventToStageMap) {
3064 return SetEventStageMask(event, stageMask, localEventToStageMap);
3065 });
locke-lunargd556cc32019-09-17 01:21:23 -06003066}
3067
3068void ValidationStateTracker::PreCallRecordCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event,
3069 VkPipelineStageFlags stageMask) {
3070 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3071 auto event_state = GetEventState(event);
3072 if (event_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05003073 AddCommandBufferBinding(event_state->cb_bindings, VulkanTypedHandle(event, kVulkanObjectTypeEvent, event_state), cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003074 }
3075 cb_state->events.push_back(event);
3076 if (!cb_state->waitedEvents.count(event)) {
3077 cb_state->writeEventsBeforeWait.push_back(event);
3078 }
3079
3080 cb_state->eventUpdates.emplace_back(
Jeff Bolz310775c2019-10-09 00:46:33 -05003081 [event](const ValidationStateTracker *, bool do_validate, EventToStageMap *localEventToStageMap) {
3082 return SetEventStageMask(event, VkPipelineStageFlags(0), localEventToStageMap);
3083 });
locke-lunargd556cc32019-09-17 01:21:23 -06003084}
3085
3086void ValidationStateTracker::PreCallRecordCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents,
3087 VkPipelineStageFlags sourceStageMask, VkPipelineStageFlags dstStageMask,
3088 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
3089 uint32_t bufferMemoryBarrierCount,
3090 const VkBufferMemoryBarrier *pBufferMemoryBarriers,
3091 uint32_t imageMemoryBarrierCount,
3092 const VkImageMemoryBarrier *pImageMemoryBarriers) {
3093 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3094 for (uint32_t i = 0; i < eventCount; ++i) {
3095 auto event_state = GetEventState(pEvents[i]);
3096 if (event_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05003097 AddCommandBufferBinding(event_state->cb_bindings, VulkanTypedHandle(pEvents[i], kVulkanObjectTypeEvent, event_state),
3098 cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003099 }
3100 cb_state->waitedEvents.insert(pEvents[i]);
3101 cb_state->events.push_back(pEvents[i]);
3102 }
3103}
3104
Jeff Bolz310775c2019-10-09 00:46:33 -05003105bool ValidationStateTracker::SetQueryState(QueryObject object, QueryState value, QueryMap *localQueryToStateMap) {
3106 (*localQueryToStateMap)[object] = value;
3107 return false;
3108}
3109
3110bool ValidationStateTracker::SetQueryStateMulti(VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, QueryState value,
3111 QueryMap *localQueryToStateMap) {
3112 for (uint32_t i = 0; i < queryCount; i++) {
3113 QueryObject object = {queryPool, firstQuery + i};
3114 (*localQueryToStateMap)[object] = value;
locke-lunargd556cc32019-09-17 01:21:23 -06003115 }
3116 return false;
3117}
3118
Jeff Bolz310775c2019-10-09 00:46:33 -05003119QueryState ValidationStateTracker::GetQueryState(const QueryMap *localQueryToStateMap, VkQueryPool queryPool,
3120 uint32_t queryIndex) const {
3121 QueryObject query = {queryPool, queryIndex};
locke-lunargd556cc32019-09-17 01:21:23 -06003122
Jeff Bolz310775c2019-10-09 00:46:33 -05003123 const std::array<const decltype(queryToStateMap) *, 2> map_list = {localQueryToStateMap, &queryToStateMap};
3124
3125 for (const auto map : map_list) {
3126 auto query_data = map->find(query);
3127 if (query_data != map->end()) {
3128 return query_data->second;
locke-lunargd556cc32019-09-17 01:21:23 -06003129 }
3130 }
Jeff Bolz310775c2019-10-09 00:46:33 -05003131 return QUERYSTATE_UNKNOWN;
locke-lunargd556cc32019-09-17 01:21:23 -06003132}
3133
3134void ValidationStateTracker::RecordCmdBeginQuery(CMD_BUFFER_STATE *cb_state, const QueryObject &query_obj) {
Jeff Bolz047f3012019-10-09 23:52:23 -05003135 if (disabled.query_validation) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003136 cb_state->activeQueries.insert(query_obj);
3137 cb_state->startedQueries.insert(query_obj);
Jeff Bolz310775c2019-10-09 00:46:33 -05003138 cb_state->queryUpdates.emplace_back(
3139 [query_obj](const ValidationStateTracker *device_data, bool do_validate, QueryMap *localQueryToStateMap) {
3140 SetQueryState(query_obj, QUERYSTATE_RUNNING, localQueryToStateMap);
3141 return false;
3142 });
Jeff Bolzadbfa852019-10-04 13:53:30 -05003143 auto pool_state = GetQueryPoolState(query_obj.pool);
3144 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(query_obj.pool, kVulkanObjectTypeQueryPool, pool_state),
3145 cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003146}
3147
3148void ValidationStateTracker::PostCallRecordCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot,
3149 VkFlags flags) {
Jeff Bolz047f3012019-10-09 23:52:23 -05003150 if (disabled.query_validation) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003151 QueryObject query = {queryPool, slot};
3152 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3153 RecordCmdBeginQuery(cb_state, query);
3154}
3155
3156void ValidationStateTracker::RecordCmdEndQuery(CMD_BUFFER_STATE *cb_state, const QueryObject &query_obj) {
Jeff Bolz047f3012019-10-09 23:52:23 -05003157 if (disabled.query_validation) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003158 cb_state->activeQueries.erase(query_obj);
3159 cb_state->queryUpdates.emplace_back(
Jeff Bolz310775c2019-10-09 00:46:33 -05003160 [query_obj](const ValidationStateTracker *device_data, bool do_validate, QueryMap *localQueryToStateMap) {
3161 return SetQueryState(query_obj, QUERYSTATE_ENDED, localQueryToStateMap);
3162 });
Jeff Bolzadbfa852019-10-04 13:53:30 -05003163 auto pool_state = GetQueryPoolState(query_obj.pool);
3164 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(query_obj.pool, kVulkanObjectTypeQueryPool, pool_state),
3165 cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003166}
3167
3168void ValidationStateTracker::PostCallRecordCmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot) {
Jeff Bolz047f3012019-10-09 23:52:23 -05003169 if (disabled.query_validation) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003170 QueryObject query_obj = {queryPool, slot};
3171 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3172 RecordCmdEndQuery(cb_state, query_obj);
3173}
3174
3175void ValidationStateTracker::PostCallRecordCmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
3176 uint32_t firstQuery, uint32_t queryCount) {
Jeff Bolz047f3012019-10-09 23:52:23 -05003177 if (disabled.query_validation) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003178 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3179
Jeff Bolz310775c2019-10-09 00:46:33 -05003180 cb_state->queryUpdates.emplace_back([queryPool, firstQuery, queryCount](const ValidationStateTracker *device_data,
3181 bool do_validate, QueryMap *localQueryToStateMap) {
3182 return SetQueryStateMulti(queryPool, firstQuery, queryCount, QUERYSTATE_RESET, localQueryToStateMap);
locke-lunargd556cc32019-09-17 01:21:23 -06003183 });
Jeff Bolzadbfa852019-10-04 13:53:30 -05003184 auto pool_state = GetQueryPoolState(queryPool);
3185 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool, pool_state),
locke-lunargd556cc32019-09-17 01:21:23 -06003186 cb_state);
3187}
3188
3189void ValidationStateTracker::PostCallRecordCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
3190 uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer,
3191 VkDeviceSize dstOffset, VkDeviceSize stride,
3192 VkQueryResultFlags flags) {
Jeff Bolz047f3012019-10-09 23:52:23 -05003193 if (disabled.query_validation) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003194 auto cb_state = GetCBState(commandBuffer);
3195 auto dst_buff_state = GetBufferState(dstBuffer);
3196 AddCommandBufferBindingBuffer(cb_state, dst_buff_state);
Jeff Bolzadbfa852019-10-04 13:53:30 -05003197 auto pool_state = GetQueryPoolState(queryPool);
3198 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool, pool_state),
locke-lunargd556cc32019-09-17 01:21:23 -06003199 cb_state);
3200}
3201
3202void ValidationStateTracker::PostCallRecordCmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
3203 VkQueryPool queryPool, uint32_t slot) {
Jeff Bolz047f3012019-10-09 23:52:23 -05003204 if (disabled.query_validation) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003205 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
Jeff Bolzadbfa852019-10-04 13:53:30 -05003206 auto pool_state = GetQueryPoolState(queryPool);
3207 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool, pool_state),
locke-lunargd556cc32019-09-17 01:21:23 -06003208 cb_state);
3209 QueryObject query = {queryPool, slot};
3210 cb_state->queryUpdates.emplace_back(
Jeff Bolz310775c2019-10-09 00:46:33 -05003211 [query](const ValidationStateTracker *device_data, bool do_validate, QueryMap *localQueryToStateMap) {
3212 return SetQueryState(query, QUERYSTATE_ENDED, localQueryToStateMap);
3213 });
locke-lunargd556cc32019-09-17 01:21:23 -06003214}
3215
3216void ValidationStateTracker::PostCallRecordCreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo *pCreateInfo,
3217 const VkAllocationCallbacks *pAllocator, VkFramebuffer *pFramebuffer,
3218 VkResult result) {
3219 if (VK_SUCCESS != result) return;
3220 // Shadow create info and store in map
Jeff Bolz6ae39612019-10-11 20:57:36 -05003221 auto fb_state = std::make_shared<FRAMEBUFFER_STATE>(*pFramebuffer, pCreateInfo, GetRenderPassShared(pCreateInfo->renderPass));
locke-lunargd556cc32019-09-17 01:21:23 -06003222
3223 if ((pCreateInfo->flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) == 0) {
3224 for (uint32_t i = 0; i < pCreateInfo->attachmentCount; ++i) {
3225 VkImageView view = pCreateInfo->pAttachments[i];
3226 auto view_state = GetImageViewState(view);
3227 if (!view_state) {
3228 continue;
3229 }
3230 }
3231 }
3232 frameBufferMap[*pFramebuffer] = std::move(fb_state);
3233}
3234
3235void ValidationStateTracker::RecordRenderPassDAG(RenderPassCreateVersion rp_version, const VkRenderPassCreateInfo2KHR *pCreateInfo,
3236 RENDER_PASS_STATE *render_pass) {
3237 auto &subpass_to_node = render_pass->subpassToNode;
3238 subpass_to_node.resize(pCreateInfo->subpassCount);
3239 auto &self_dependencies = render_pass->self_dependencies;
3240 self_dependencies.resize(pCreateInfo->subpassCount);
3241
3242 for (uint32_t i = 0; i < pCreateInfo->subpassCount; ++i) {
3243 subpass_to_node[i].pass = i;
3244 self_dependencies[i].clear();
3245 }
3246 for (uint32_t i = 0; i < pCreateInfo->dependencyCount; ++i) {
3247 const VkSubpassDependency2KHR &dependency = pCreateInfo->pDependencies[i];
3248 if ((dependency.srcSubpass != VK_SUBPASS_EXTERNAL) && (dependency.dstSubpass != VK_SUBPASS_EXTERNAL)) {
3249 if (dependency.srcSubpass == dependency.dstSubpass) {
3250 self_dependencies[dependency.srcSubpass].push_back(i);
3251 } else {
3252 subpass_to_node[dependency.dstSubpass].prev.push_back(dependency.srcSubpass);
3253 subpass_to_node[dependency.srcSubpass].next.push_back(dependency.dstSubpass);
3254 }
3255 }
3256 }
3257}
3258
3259static void MarkAttachmentFirstUse(RENDER_PASS_STATE *render_pass, uint32_t index, bool is_read) {
3260 if (index == VK_ATTACHMENT_UNUSED) return;
3261
3262 if (!render_pass->attachment_first_read.count(index)) render_pass->attachment_first_read[index] = is_read;
3263}
3264
3265void ValidationStateTracker::RecordCreateRenderPassState(RenderPassCreateVersion rp_version,
3266 std::shared_ptr<RENDER_PASS_STATE> &render_pass,
3267 VkRenderPass *pRenderPass) {
3268 render_pass->renderPass = *pRenderPass;
3269 auto create_info = render_pass->createInfo.ptr();
3270
3271 RecordRenderPassDAG(RENDER_PASS_VERSION_1, create_info, render_pass.get());
3272
3273 for (uint32_t i = 0; i < create_info->subpassCount; ++i) {
3274 const VkSubpassDescription2KHR &subpass = create_info->pSubpasses[i];
3275 for (uint32_t j = 0; j < subpass.colorAttachmentCount; ++j) {
3276 MarkAttachmentFirstUse(render_pass.get(), subpass.pColorAttachments[j].attachment, false);
3277
3278 // resolve attachments are considered to be written
3279 if (subpass.pResolveAttachments) {
3280 MarkAttachmentFirstUse(render_pass.get(), subpass.pResolveAttachments[j].attachment, false);
3281 }
3282 }
3283 if (subpass.pDepthStencilAttachment) {
3284 MarkAttachmentFirstUse(render_pass.get(), subpass.pDepthStencilAttachment->attachment, false);
3285 }
3286 for (uint32_t j = 0; j < subpass.inputAttachmentCount; ++j) {
3287 MarkAttachmentFirstUse(render_pass.get(), subpass.pInputAttachments[j].attachment, true);
3288 }
3289 }
3290
3291 // Even though render_pass is an rvalue-ref parameter, still must move s.t. move assignment is invoked.
3292 renderPassMap[*pRenderPass] = std::move(render_pass);
3293}
3294
3295// Style note:
3296// Use of rvalue reference exceeds reccommended usage of rvalue refs in google style guide, but intentionally forces caller to move
3297// or copy. This is clearer than passing a pointer to shared_ptr and avoids the atomic increment/decrement of shared_ptr copy
3298// construction or assignment.
3299void ValidationStateTracker::PostCallRecordCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo *pCreateInfo,
3300 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
3301 VkResult result) {
3302 if (VK_SUCCESS != result) return;
3303 auto render_pass_state = std::make_shared<RENDER_PASS_STATE>(pCreateInfo);
3304 RecordCreateRenderPassState(RENDER_PASS_VERSION_1, render_pass_state, pRenderPass);
3305}
3306
3307void ValidationStateTracker::PostCallRecordCreateRenderPass2KHR(VkDevice device, const VkRenderPassCreateInfo2KHR *pCreateInfo,
3308 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
3309 VkResult result) {
3310 if (VK_SUCCESS != result) return;
3311 auto render_pass_state = std::make_shared<RENDER_PASS_STATE>(pCreateInfo);
3312 RecordCreateRenderPassState(RENDER_PASS_VERSION_2, render_pass_state, pRenderPass);
3313}
3314
3315void ValidationStateTracker::RecordCmdBeginRenderPassState(VkCommandBuffer commandBuffer,
3316 const VkRenderPassBeginInfo *pRenderPassBegin,
3317 const VkSubpassContents contents) {
3318 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3319 auto render_pass_state = pRenderPassBegin ? GetRenderPassState(pRenderPassBegin->renderPass) : nullptr;
3320 auto framebuffer = pRenderPassBegin ? GetFramebufferState(pRenderPassBegin->framebuffer) : nullptr;
3321
3322 if (render_pass_state) {
3323 cb_state->activeFramebuffer = pRenderPassBegin->framebuffer;
3324 cb_state->activeRenderPass = render_pass_state;
3325 // This is a shallow copy as that is all that is needed for now
3326 cb_state->activeRenderPassBeginInfo = *pRenderPassBegin;
3327 cb_state->activeSubpass = 0;
3328 cb_state->activeSubpassContents = contents;
3329 cb_state->framebuffers.insert(pRenderPassBegin->framebuffer);
3330 // Connect this framebuffer and its children to this cmdBuffer
3331 AddFramebufferBinding(cb_state, framebuffer);
3332 // Connect this RP to cmdBuffer
Jeff Bolzadbfa852019-10-04 13:53:30 -05003333 AddCommandBufferBinding(render_pass_state->cb_bindings,
3334 VulkanTypedHandle(render_pass_state->renderPass, kVulkanObjectTypeRenderPass, render_pass_state),
3335 cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003336
3337 auto chained_device_group_struct = lvl_find_in_chain<VkDeviceGroupRenderPassBeginInfo>(pRenderPassBegin->pNext);
3338 if (chained_device_group_struct) {
3339 cb_state->active_render_pass_device_mask = chained_device_group_struct->deviceMask;
3340 } else {
3341 cb_state->active_render_pass_device_mask = cb_state->initial_device_mask;
3342 }
3343 }
3344}
3345
3346void ValidationStateTracker::PreCallRecordCmdBeginRenderPass(VkCommandBuffer commandBuffer,
3347 const VkRenderPassBeginInfo *pRenderPassBegin,
3348 VkSubpassContents contents) {
3349 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, contents);
3350}
3351
3352void ValidationStateTracker::PreCallRecordCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer,
3353 const VkRenderPassBeginInfo *pRenderPassBegin,
3354 const VkSubpassBeginInfoKHR *pSubpassBeginInfo) {
3355 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, pSubpassBeginInfo->contents);
3356}
3357
3358void ValidationStateTracker::RecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
3359 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3360 cb_state->activeSubpass++;
3361 cb_state->activeSubpassContents = contents;
3362}
3363
3364void ValidationStateTracker::PostCallRecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
3365 RecordCmdNextSubpass(commandBuffer, contents);
3366}
3367
3368void ValidationStateTracker::PostCallRecordCmdNextSubpass2KHR(VkCommandBuffer commandBuffer,
3369 const VkSubpassBeginInfoKHR *pSubpassBeginInfo,
3370 const VkSubpassEndInfoKHR *pSubpassEndInfo) {
3371 RecordCmdNextSubpass(commandBuffer, pSubpassBeginInfo->contents);
3372}
3373
3374void ValidationStateTracker::RecordCmdEndRenderPassState(VkCommandBuffer commandBuffer) {
3375 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3376 cb_state->activeRenderPass = nullptr;
3377 cb_state->activeSubpass = 0;
3378 cb_state->activeFramebuffer = VK_NULL_HANDLE;
3379}
3380
3381void ValidationStateTracker::PostCallRecordCmdEndRenderPass(VkCommandBuffer commandBuffer) {
3382 RecordCmdEndRenderPassState(commandBuffer);
3383}
3384
3385void ValidationStateTracker::PostCallRecordCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer,
3386 const VkSubpassEndInfoKHR *pSubpassEndInfo) {
3387 RecordCmdEndRenderPassState(commandBuffer);
3388}
3389
3390void ValidationStateTracker::PreCallRecordCmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBuffersCount,
3391 const VkCommandBuffer *pCommandBuffers) {
3392 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3393
3394 CMD_BUFFER_STATE *sub_cb_state = NULL;
3395 for (uint32_t i = 0; i < commandBuffersCount; i++) {
3396 sub_cb_state = GetCBState(pCommandBuffers[i]);
3397 assert(sub_cb_state);
3398 if (!(sub_cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT)) {
3399 if (cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT) {
3400 // TODO: Because this is a state change, clearing the VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT needs to be moved
3401 // from the validation step to the recording step
3402 cb_state->beginInfo.flags &= ~VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT;
3403 }
3404 }
3405
3406 // Propagate inital layout and current layout state to the primary cmd buffer
3407 // NOTE: The update/population of the image_layout_map is done in CoreChecks, but for other classes derived from
3408 // ValidationStateTracker these maps will be empty, so leaving the propagation in the the state tracker should be a no-op
3409 // for those other classes.
3410 for (const auto &sub_layout_map_entry : sub_cb_state->image_layout_map) {
3411 const auto image = sub_layout_map_entry.first;
3412 const auto *image_state = GetImageState(image);
3413 if (!image_state) continue; // Can't set layouts of a dead image
3414
3415 auto *cb_subres_map = GetImageSubresourceLayoutMap(cb_state, *image_state);
3416 const auto *sub_cb_subres_map = sub_layout_map_entry.second.get();
3417 assert(cb_subres_map && sub_cb_subres_map); // Non const get and map traversal should never be null
3418 cb_subres_map->UpdateFrom(*sub_cb_subres_map);
3419 }
3420
3421 sub_cb_state->primaryCommandBuffer = cb_state->commandBuffer;
3422 cb_state->linkedCommandBuffers.insert(sub_cb_state);
3423 sub_cb_state->linkedCommandBuffers.insert(cb_state);
3424 for (auto &function : sub_cb_state->queryUpdates) {
3425 cb_state->queryUpdates.push_back(function);
3426 }
3427 for (auto &function : sub_cb_state->queue_submit_functions) {
3428 cb_state->queue_submit_functions.push_back(function);
3429 }
3430 }
3431}
3432
3433void ValidationStateTracker::PostCallRecordMapMemory(VkDevice device, VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size,
3434 VkFlags flags, void **ppData, VkResult result) {
3435 if (VK_SUCCESS != result) return;
3436 RecordMappedMemory(mem, offset, size, ppData);
3437}
3438
3439void ValidationStateTracker::PreCallRecordUnmapMemory(VkDevice device, VkDeviceMemory mem) {
3440 auto mem_info = GetDevMemState(mem);
3441 if (mem_info) {
3442 mem_info->mapped_range = MemRange();
3443 mem_info->p_driver_data = nullptr;
3444 }
3445}
3446
3447void ValidationStateTracker::UpdateBindImageMemoryState(const VkBindImageMemoryInfo &bindInfo) {
3448 IMAGE_STATE *image_state = GetImageState(bindInfo.image);
3449 if (image_state) {
3450 const auto swapchain_info = lvl_find_in_chain<VkBindImageMemorySwapchainInfoKHR>(bindInfo.pNext);
3451 if (swapchain_info) {
3452 auto swapchain = GetSwapchainState(swapchain_info->swapchain);
3453 if (swapchain) {
locke-lunargb3584732019-10-28 20:18:36 -06003454 swapchain->images[swapchain_info->imageIndex].bound_images.emplace(image_state->image);
locke-lunargd556cc32019-09-17 01:21:23 -06003455 image_state->bind_swapchain = swapchain_info->swapchain;
3456 image_state->bind_swapchain_imageIndex = swapchain_info->imageIndex;
3457 }
3458 } else {
3459 // Track bound memory range information
3460 auto mem_info = GetDevMemState(bindInfo.memory);
3461 if (mem_info) {
3462 InsertImageMemoryRange(bindInfo.image, mem_info, bindInfo.memoryOffset, image_state->requirements,
3463 image_state->createInfo.tiling == VK_IMAGE_TILING_LINEAR);
3464 }
3465
3466 // Track objects tied to memory
3467 SetMemBinding(bindInfo.memory, image_state, bindInfo.memoryOffset,
3468 VulkanTypedHandle(bindInfo.image, kVulkanObjectTypeImage));
3469 }
3470 if (image_state->createInfo.flags & VK_IMAGE_CREATE_ALIAS_BIT) {
3471 AddAliasingImage(image_state);
3472 }
3473 }
3474}
3475
3476void ValidationStateTracker::PostCallRecordBindImageMemory(VkDevice device, VkImage image, VkDeviceMemory mem,
3477 VkDeviceSize memoryOffset, VkResult result) {
3478 if (VK_SUCCESS != result) return;
3479 VkBindImageMemoryInfo bindInfo = {};
3480 bindInfo.sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
3481 bindInfo.image = image;
3482 bindInfo.memory = mem;
3483 bindInfo.memoryOffset = memoryOffset;
3484 UpdateBindImageMemoryState(bindInfo);
3485}
3486
3487void ValidationStateTracker::PostCallRecordBindImageMemory2(VkDevice device, uint32_t bindInfoCount,
3488 const VkBindImageMemoryInfoKHR *pBindInfos, VkResult result) {
3489 if (VK_SUCCESS != result) return;
3490 for (uint32_t i = 0; i < bindInfoCount; i++) {
3491 UpdateBindImageMemoryState(pBindInfos[i]);
3492 }
3493}
3494
3495void ValidationStateTracker::PostCallRecordBindImageMemory2KHR(VkDevice device, uint32_t bindInfoCount,
3496 const VkBindImageMemoryInfoKHR *pBindInfos, VkResult result) {
3497 if (VK_SUCCESS != result) return;
3498 for (uint32_t i = 0; i < bindInfoCount; i++) {
3499 UpdateBindImageMemoryState(pBindInfos[i]);
3500 }
3501}
3502
3503void ValidationStateTracker::PreCallRecordSetEvent(VkDevice device, VkEvent event) {
3504 auto event_state = GetEventState(event);
3505 if (event_state) {
3506 event_state->stageMask = VK_PIPELINE_STAGE_HOST_BIT;
3507 }
locke-lunargd556cc32019-09-17 01:21:23 -06003508}
3509
3510void ValidationStateTracker::PostCallRecordImportSemaphoreFdKHR(VkDevice device,
3511 const VkImportSemaphoreFdInfoKHR *pImportSemaphoreFdInfo,
3512 VkResult result) {
3513 if (VK_SUCCESS != result) return;
3514 RecordImportSemaphoreState(pImportSemaphoreFdInfo->semaphore, pImportSemaphoreFdInfo->handleType,
3515 pImportSemaphoreFdInfo->flags);
3516}
3517
3518void ValidationStateTracker::RecordGetExternalSemaphoreState(VkSemaphore semaphore,
3519 VkExternalSemaphoreHandleTypeFlagBitsKHR handle_type) {
3520 SEMAPHORE_STATE *semaphore_state = GetSemaphoreState(semaphore);
3521 if (semaphore_state && handle_type != VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR) {
3522 // Cannot track semaphore state once it is exported, except for Sync FD handle types which have copy transference
3523 semaphore_state->scope = kSyncScopeExternalPermanent;
3524 }
3525}
3526
3527#ifdef VK_USE_PLATFORM_WIN32_KHR
3528void ValidationStateTracker::PostCallRecordImportSemaphoreWin32HandleKHR(
3529 VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR *pImportSemaphoreWin32HandleInfo, VkResult result) {
3530 if (VK_SUCCESS != result) return;
3531 RecordImportSemaphoreState(pImportSemaphoreWin32HandleInfo->semaphore, pImportSemaphoreWin32HandleInfo->handleType,
3532 pImportSemaphoreWin32HandleInfo->flags);
3533}
3534
3535void ValidationStateTracker::PostCallRecordGetSemaphoreWin32HandleKHR(VkDevice device,
3536 const VkSemaphoreGetWin32HandleInfoKHR *pGetWin32HandleInfo,
3537 HANDLE *pHandle, VkResult result) {
3538 if (VK_SUCCESS != result) return;
3539 RecordGetExternalSemaphoreState(pGetWin32HandleInfo->semaphore, pGetWin32HandleInfo->handleType);
3540}
3541
3542void ValidationStateTracker::PostCallRecordImportFenceWin32HandleKHR(
3543 VkDevice device, const VkImportFenceWin32HandleInfoKHR *pImportFenceWin32HandleInfo, VkResult result) {
3544 if (VK_SUCCESS != result) return;
3545 RecordImportFenceState(pImportFenceWin32HandleInfo->fence, pImportFenceWin32HandleInfo->handleType,
3546 pImportFenceWin32HandleInfo->flags);
3547}
3548
3549void ValidationStateTracker::PostCallRecordGetFenceWin32HandleKHR(VkDevice device,
3550 const VkFenceGetWin32HandleInfoKHR *pGetWin32HandleInfo,
3551 HANDLE *pHandle, VkResult result) {
3552 if (VK_SUCCESS != result) return;
3553 RecordGetExternalFenceState(pGetWin32HandleInfo->fence, pGetWin32HandleInfo->handleType);
3554}
3555#endif
3556
3557void ValidationStateTracker::PostCallRecordGetSemaphoreFdKHR(VkDevice device, const VkSemaphoreGetFdInfoKHR *pGetFdInfo, int *pFd,
3558 VkResult result) {
3559 if (VK_SUCCESS != result) return;
3560 RecordGetExternalSemaphoreState(pGetFdInfo->semaphore, pGetFdInfo->handleType);
3561}
3562
3563void ValidationStateTracker::RecordImportFenceState(VkFence fence, VkExternalFenceHandleTypeFlagBitsKHR handle_type,
3564 VkFenceImportFlagsKHR flags) {
3565 FENCE_STATE *fence_node = GetFenceState(fence);
3566 if (fence_node && fence_node->scope != kSyncScopeExternalPermanent) {
3567 if ((handle_type == VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR || flags & VK_FENCE_IMPORT_TEMPORARY_BIT_KHR) &&
3568 fence_node->scope == kSyncScopeInternal) {
3569 fence_node->scope = kSyncScopeExternalTemporary;
3570 } else {
3571 fence_node->scope = kSyncScopeExternalPermanent;
3572 }
3573 }
3574}
3575
3576void ValidationStateTracker::PostCallRecordImportFenceFdKHR(VkDevice device, const VkImportFenceFdInfoKHR *pImportFenceFdInfo,
3577 VkResult result) {
3578 if (VK_SUCCESS != result) return;
3579 RecordImportFenceState(pImportFenceFdInfo->fence, pImportFenceFdInfo->handleType, pImportFenceFdInfo->flags);
3580}
3581
3582void ValidationStateTracker::RecordGetExternalFenceState(VkFence fence, VkExternalFenceHandleTypeFlagBitsKHR handle_type) {
3583 FENCE_STATE *fence_state = GetFenceState(fence);
3584 if (fence_state) {
3585 if (handle_type != VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR) {
3586 // Export with reference transference becomes external
3587 fence_state->scope = kSyncScopeExternalPermanent;
3588 } else if (fence_state->scope == kSyncScopeInternal) {
3589 // Export with copy transference has a side effect of resetting the fence
3590 fence_state->state = FENCE_UNSIGNALED;
3591 }
3592 }
3593}
3594
3595void ValidationStateTracker::PostCallRecordGetFenceFdKHR(VkDevice device, const VkFenceGetFdInfoKHR *pGetFdInfo, int *pFd,
3596 VkResult result) {
3597 if (VK_SUCCESS != result) return;
3598 RecordGetExternalFenceState(pGetFdInfo->fence, pGetFdInfo->handleType);
3599}
3600
3601void ValidationStateTracker::PostCallRecordCreateEvent(VkDevice device, const VkEventCreateInfo *pCreateInfo,
3602 const VkAllocationCallbacks *pAllocator, VkEvent *pEvent, VkResult result) {
3603 if (VK_SUCCESS != result) return;
3604 eventMap[*pEvent].write_in_use = 0;
3605 eventMap[*pEvent].stageMask = VkPipelineStageFlags(0);
3606}
3607
3608void ValidationStateTracker::RecordCreateSwapchainState(VkResult result, const VkSwapchainCreateInfoKHR *pCreateInfo,
3609 VkSwapchainKHR *pSwapchain, SURFACE_STATE *surface_state,
3610 SWAPCHAIN_NODE *old_swapchain_state) {
3611 if (VK_SUCCESS == result) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003612 auto swapchain_state = std::make_shared<SWAPCHAIN_NODE>(pCreateInfo, *pSwapchain);
locke-lunargd556cc32019-09-17 01:21:23 -06003613 if (VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR == pCreateInfo->presentMode ||
3614 VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR == pCreateInfo->presentMode) {
3615 swapchain_state->shared_presentable = true;
3616 }
3617 surface_state->swapchain = swapchain_state.get();
3618 swapchainMap[*pSwapchain] = std::move(swapchain_state);
3619 } else {
3620 surface_state->swapchain = nullptr;
3621 }
3622 // Spec requires that even if CreateSwapchainKHR fails, oldSwapchain is retired
3623 if (old_swapchain_state) {
3624 old_swapchain_state->retired = true;
3625 }
3626 return;
3627}
3628
3629void ValidationStateTracker::PostCallRecordCreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR *pCreateInfo,
3630 const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchain,
3631 VkResult result) {
3632 auto surface_state = GetSurfaceState(pCreateInfo->surface);
3633 auto old_swapchain_state = GetSwapchainState(pCreateInfo->oldSwapchain);
3634 RecordCreateSwapchainState(result, pCreateInfo, pSwapchain, surface_state, old_swapchain_state);
3635}
3636
3637void ValidationStateTracker::PreCallRecordDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain,
3638 const VkAllocationCallbacks *pAllocator) {
3639 if (!swapchain) return;
3640 auto swapchain_data = GetSwapchainState(swapchain);
3641 if (swapchain_data) {
3642 for (const auto &swapchain_image : swapchain_data->images) {
locke-lunargb3584732019-10-28 20:18:36 -06003643 ClearMemoryObjectBindings(VulkanTypedHandle(swapchain_image.image, kVulkanObjectTypeImage));
3644 imageMap.erase(swapchain_image.image);
3645 RemoveAliasingImages(swapchain_image.bound_images);
locke-lunargd556cc32019-09-17 01:21:23 -06003646 }
3647
3648 auto surface_state = GetSurfaceState(swapchain_data->createInfo.surface);
3649 if (surface_state) {
3650 if (surface_state->swapchain == swapchain_data) surface_state->swapchain = nullptr;
3651 }
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003652 swapchain_data->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06003653 swapchainMap.erase(swapchain);
3654 }
3655}
3656
3657void ValidationStateTracker::PostCallRecordQueuePresentKHR(VkQueue queue, const VkPresentInfoKHR *pPresentInfo, VkResult result) {
3658 // Semaphore waits occur before error generation, if the call reached the ICD. (Confirm?)
3659 for (uint32_t i = 0; i < pPresentInfo->waitSemaphoreCount; ++i) {
3660 auto pSemaphore = GetSemaphoreState(pPresentInfo->pWaitSemaphores[i]);
3661 if (pSemaphore) {
3662 pSemaphore->signaler.first = VK_NULL_HANDLE;
3663 pSemaphore->signaled = false;
3664 }
3665 }
3666
3667 for (uint32_t i = 0; i < pPresentInfo->swapchainCount; ++i) {
3668 // Note: this is imperfect, in that we can get confused about what did or didn't succeed-- but if the app does that, it's
3669 // confused itself just as much.
3670 auto local_result = pPresentInfo->pResults ? pPresentInfo->pResults[i] : result;
3671 if (local_result != VK_SUCCESS && local_result != VK_SUBOPTIMAL_KHR) continue; // this present didn't actually happen.
3672 // Mark the image as having been released to the WSI
3673 auto swapchain_data = GetSwapchainState(pPresentInfo->pSwapchains[i]);
3674 if (swapchain_data && (swapchain_data->images.size() > pPresentInfo->pImageIndices[i])) {
locke-lunargb3584732019-10-28 20:18:36 -06003675 auto image = swapchain_data->images[pPresentInfo->pImageIndices[i]].image;
locke-lunargd556cc32019-09-17 01:21:23 -06003676 auto image_state = GetImageState(image);
3677 if (image_state) {
3678 image_state->acquired = false;
Jeff Bolz46c0ea02019-10-09 13:06:29 -05003679 if (image_state->shared_presentable) {
3680 image_state->layout_locked = true;
3681 }
locke-lunargd556cc32019-09-17 01:21:23 -06003682 }
3683 }
3684 }
3685 // Note: even though presentation is directed to a queue, there is no direct ordering between QP and subsequent work, so QP (and
3686 // its semaphore waits) /never/ participate in any completion proof.
3687}
3688
3689void ValidationStateTracker::PostCallRecordCreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount,
3690 const VkSwapchainCreateInfoKHR *pCreateInfos,
3691 const VkAllocationCallbacks *pAllocator,
3692 VkSwapchainKHR *pSwapchains, VkResult result) {
3693 if (pCreateInfos) {
3694 for (uint32_t i = 0; i < swapchainCount; i++) {
3695 auto surface_state = GetSurfaceState(pCreateInfos[i].surface);
3696 auto old_swapchain_state = GetSwapchainState(pCreateInfos[i].oldSwapchain);
3697 RecordCreateSwapchainState(result, &pCreateInfos[i], &pSwapchains[i], surface_state, old_swapchain_state);
3698 }
3699 }
3700}
3701
3702void ValidationStateTracker::RecordAcquireNextImageState(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
3703 VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex) {
3704 auto pFence = GetFenceState(fence);
3705 if (pFence && pFence->scope == kSyncScopeInternal) {
3706 // Treat as inflight since it is valid to wait on this fence, even in cases where it is technically a temporary
3707 // import
3708 pFence->state = FENCE_INFLIGHT;
3709 pFence->signaler.first = VK_NULL_HANDLE; // ANI isn't on a queue, so this can't participate in a completion proof.
3710 }
3711
3712 auto pSemaphore = GetSemaphoreState(semaphore);
3713 if (pSemaphore && pSemaphore->scope == kSyncScopeInternal) {
3714 // Treat as signaled since it is valid to wait on this semaphore, even in cases where it is technically a
3715 // temporary import
3716 pSemaphore->signaled = true;
3717 pSemaphore->signaler.first = VK_NULL_HANDLE;
3718 }
3719
3720 // Mark the image as acquired.
3721 auto swapchain_data = GetSwapchainState(swapchain);
3722 if (swapchain_data && (swapchain_data->images.size() > *pImageIndex)) {
locke-lunargb3584732019-10-28 20:18:36 -06003723 auto image = swapchain_data->images[*pImageIndex].image;
locke-lunargd556cc32019-09-17 01:21:23 -06003724 auto image_state = GetImageState(image);
3725 if (image_state) {
3726 image_state->acquired = true;
3727 image_state->shared_presentable = swapchain_data->shared_presentable;
3728 }
3729 }
3730}
3731
3732void ValidationStateTracker::PostCallRecordAcquireNextImageKHR(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
3733 VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex,
3734 VkResult result) {
3735 if ((VK_SUCCESS != result) && (VK_SUBOPTIMAL_KHR != result)) return;
3736 RecordAcquireNextImageState(device, swapchain, timeout, semaphore, fence, pImageIndex);
3737}
3738
3739void ValidationStateTracker::PostCallRecordAcquireNextImage2KHR(VkDevice device, const VkAcquireNextImageInfoKHR *pAcquireInfo,
3740 uint32_t *pImageIndex, VkResult result) {
3741 if ((VK_SUCCESS != result) && (VK_SUBOPTIMAL_KHR != result)) return;
3742 RecordAcquireNextImageState(device, pAcquireInfo->swapchain, pAcquireInfo->timeout, pAcquireInfo->semaphore,
3743 pAcquireInfo->fence, pImageIndex);
3744}
3745
3746void ValidationStateTracker::PostCallRecordEnumeratePhysicalDevices(VkInstance instance, uint32_t *pPhysicalDeviceCount,
3747 VkPhysicalDevice *pPhysicalDevices, VkResult result) {
3748 if ((NULL != pPhysicalDevices) && ((result == VK_SUCCESS || result == VK_INCOMPLETE))) {
3749 for (uint32_t i = 0; i < *pPhysicalDeviceCount; i++) {
3750 auto &phys_device_state = physical_device_map[pPhysicalDevices[i]];
3751 phys_device_state.phys_device = pPhysicalDevices[i];
3752 // Init actual features for each physical device
3753 DispatchGetPhysicalDeviceFeatures(pPhysicalDevices[i], &phys_device_state.features2.features);
3754 }
3755 }
3756}
3757
3758// Common function to update state for GetPhysicalDeviceQueueFamilyProperties & 2KHR version
3759static void StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(PHYSICAL_DEVICE_STATE *pd_state, uint32_t count,
3760 VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
3761 pd_state->queue_family_known_count = std::max(pd_state->queue_family_known_count, count);
3762
3763 if (!pQueueFamilyProperties) {
3764 if (UNCALLED == pd_state->vkGetPhysicalDeviceQueueFamilyPropertiesState)
3765 pd_state->vkGetPhysicalDeviceQueueFamilyPropertiesState = QUERY_COUNT;
3766 } else { // Save queue family properties
3767 pd_state->vkGetPhysicalDeviceQueueFamilyPropertiesState = QUERY_DETAILS;
3768
3769 pd_state->queue_family_properties.resize(std::max(static_cast<uint32_t>(pd_state->queue_family_properties.size()), count));
3770 for (uint32_t i = 0; i < count; ++i) {
3771 pd_state->queue_family_properties[i] = pQueueFamilyProperties[i].queueFamilyProperties;
3772 }
3773 }
3774}
3775
3776void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice,
3777 uint32_t *pQueueFamilyPropertyCount,
3778 VkQueueFamilyProperties *pQueueFamilyProperties) {
3779 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
3780 assert(physical_device_state);
3781 VkQueueFamilyProperties2KHR *pqfp = nullptr;
3782 std::vector<VkQueueFamilyProperties2KHR> qfp;
3783 qfp.resize(*pQueueFamilyPropertyCount);
3784 if (pQueueFamilyProperties) {
3785 for (uint32_t i = 0; i < *pQueueFamilyPropertyCount; ++i) {
3786 qfp[i].sType = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2_KHR;
3787 qfp[i].pNext = nullptr;
3788 qfp[i].queueFamilyProperties = pQueueFamilyProperties[i];
3789 }
3790 pqfp = qfp.data();
3791 }
3792 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount, pqfp);
3793}
3794
3795void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties2(
3796 VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
3797 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
3798 assert(physical_device_state);
3799 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount,
3800 pQueueFamilyProperties);
3801}
3802
3803void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties2KHR(
3804 VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
3805 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
3806 assert(physical_device_state);
3807 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount,
3808 pQueueFamilyProperties);
3809}
3810void ValidationStateTracker::PreCallRecordDestroySurfaceKHR(VkInstance instance, VkSurfaceKHR surface,
3811 const VkAllocationCallbacks *pAllocator) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003812 if (!surface) return;
3813 auto surface_state = GetSurfaceState(surface);
3814 surface_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06003815 surface_map.erase(surface);
3816}
3817
3818void ValidationStateTracker::RecordVulkanSurface(VkSurfaceKHR *pSurface) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003819 surface_map[*pSurface] = std::make_shared<SURFACE_STATE>(*pSurface);
locke-lunargd556cc32019-09-17 01:21:23 -06003820}
3821
3822void ValidationStateTracker::PostCallRecordCreateDisplayPlaneSurfaceKHR(VkInstance instance,
3823 const VkDisplaySurfaceCreateInfoKHR *pCreateInfo,
3824 const VkAllocationCallbacks *pAllocator,
3825 VkSurfaceKHR *pSurface, VkResult result) {
3826 if (VK_SUCCESS != result) return;
3827 RecordVulkanSurface(pSurface);
3828}
3829
3830#ifdef VK_USE_PLATFORM_ANDROID_KHR
3831void ValidationStateTracker::PostCallRecordCreateAndroidSurfaceKHR(VkInstance instance,
3832 const VkAndroidSurfaceCreateInfoKHR *pCreateInfo,
3833 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
3834 VkResult result) {
3835 if (VK_SUCCESS != result) return;
3836 RecordVulkanSurface(pSurface);
3837}
3838#endif // VK_USE_PLATFORM_ANDROID_KHR
3839
3840#ifdef VK_USE_PLATFORM_IOS_MVK
3841void ValidationStateTracker::PostCallRecordCreateIOSSurfaceMVK(VkInstance instance, const VkIOSSurfaceCreateInfoMVK *pCreateInfo,
3842 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
3843 VkResult result) {
3844 if (VK_SUCCESS != result) return;
3845 RecordVulkanSurface(pSurface);
3846}
3847#endif // VK_USE_PLATFORM_IOS_MVK
3848
3849#ifdef VK_USE_PLATFORM_MACOS_MVK
3850void ValidationStateTracker::PostCallRecordCreateMacOSSurfaceMVK(VkInstance instance,
3851 const VkMacOSSurfaceCreateInfoMVK *pCreateInfo,
3852 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
3853 VkResult result) {
3854 if (VK_SUCCESS != result) return;
3855 RecordVulkanSurface(pSurface);
3856}
3857#endif // VK_USE_PLATFORM_MACOS_MVK
3858
3859#ifdef VK_USE_PLATFORM_WAYLAND_KHR
3860void ValidationStateTracker::PostCallRecordCreateWaylandSurfaceKHR(VkInstance instance,
3861 const VkWaylandSurfaceCreateInfoKHR *pCreateInfo,
3862 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
3863 VkResult result) {
3864 if (VK_SUCCESS != result) return;
3865 RecordVulkanSurface(pSurface);
3866}
3867#endif // VK_USE_PLATFORM_WAYLAND_KHR
3868
3869#ifdef VK_USE_PLATFORM_WIN32_KHR
3870void ValidationStateTracker::PostCallRecordCreateWin32SurfaceKHR(VkInstance instance,
3871 const VkWin32SurfaceCreateInfoKHR *pCreateInfo,
3872 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
3873 VkResult result) {
3874 if (VK_SUCCESS != result) return;
3875 RecordVulkanSurface(pSurface);
3876}
3877#endif // VK_USE_PLATFORM_WIN32_KHR
3878
3879#ifdef VK_USE_PLATFORM_XCB_KHR
3880void ValidationStateTracker::PostCallRecordCreateXcbSurfaceKHR(VkInstance instance, const VkXcbSurfaceCreateInfoKHR *pCreateInfo,
3881 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
3882 VkResult result) {
3883 if (VK_SUCCESS != result) return;
3884 RecordVulkanSurface(pSurface);
3885}
3886#endif // VK_USE_PLATFORM_XCB_KHR
3887
3888#ifdef VK_USE_PLATFORM_XLIB_KHR
3889void ValidationStateTracker::PostCallRecordCreateXlibSurfaceKHR(VkInstance instance, const VkXlibSurfaceCreateInfoKHR *pCreateInfo,
3890 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
3891 VkResult result) {
3892 if (VK_SUCCESS != result) return;
3893 RecordVulkanSurface(pSurface);
3894}
3895#endif // VK_USE_PLATFORM_XLIB_KHR
3896
Cort23cf2282019-09-20 18:58:18 +02003897void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02003898 VkPhysicalDeviceFeatures *pFeatures) {
3899 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
3900 physical_device_state->vkGetPhysicalDeviceFeaturesState = QUERY_DETAILS;
3901 physical_device_state->features2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
3902 physical_device_state->features2.pNext = nullptr;
3903 physical_device_state->features2.features = *pFeatures;
Cort23cf2282019-09-20 18:58:18 +02003904}
3905
3906void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures2(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02003907 VkPhysicalDeviceFeatures2 *pFeatures) {
3908 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
3909 physical_device_state->vkGetPhysicalDeviceFeaturesState = QUERY_DETAILS;
3910 physical_device_state->features2.initialize(pFeatures);
Cort23cf2282019-09-20 18:58:18 +02003911}
3912
3913void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures2KHR(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02003914 VkPhysicalDeviceFeatures2 *pFeatures) {
3915 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
3916 physical_device_state->vkGetPhysicalDeviceFeaturesState = QUERY_DETAILS;
3917 physical_device_state->features2.initialize(pFeatures);
Cort23cf2282019-09-20 18:58:18 +02003918}
3919
locke-lunargd556cc32019-09-17 01:21:23 -06003920void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice physicalDevice,
3921 VkSurfaceKHR surface,
3922 VkSurfaceCapabilitiesKHR *pSurfaceCapabilities,
3923 VkResult result) {
3924 if (VK_SUCCESS != result) return;
3925 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
3926 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHRState = QUERY_DETAILS;
3927 physical_device_state->surfaceCapabilities = *pSurfaceCapabilities;
3928}
3929
3930void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilities2KHR(
3931 VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
3932 VkSurfaceCapabilities2KHR *pSurfaceCapabilities, VkResult result) {
3933 if (VK_SUCCESS != result) return;
3934 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
3935 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHRState = QUERY_DETAILS;
3936 physical_device_state->surfaceCapabilities = pSurfaceCapabilities->surfaceCapabilities;
3937}
3938
3939void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilities2EXT(VkPhysicalDevice physicalDevice,
3940 VkSurfaceKHR surface,
3941 VkSurfaceCapabilities2EXT *pSurfaceCapabilities,
3942 VkResult result) {
3943 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
3944 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHRState = QUERY_DETAILS;
3945 physical_device_state->surfaceCapabilities.minImageCount = pSurfaceCapabilities->minImageCount;
3946 physical_device_state->surfaceCapabilities.maxImageCount = pSurfaceCapabilities->maxImageCount;
3947 physical_device_state->surfaceCapabilities.currentExtent = pSurfaceCapabilities->currentExtent;
3948 physical_device_state->surfaceCapabilities.minImageExtent = pSurfaceCapabilities->minImageExtent;
3949 physical_device_state->surfaceCapabilities.maxImageExtent = pSurfaceCapabilities->maxImageExtent;
3950 physical_device_state->surfaceCapabilities.maxImageArrayLayers = pSurfaceCapabilities->maxImageArrayLayers;
3951 physical_device_state->surfaceCapabilities.supportedTransforms = pSurfaceCapabilities->supportedTransforms;
3952 physical_device_state->surfaceCapabilities.currentTransform = pSurfaceCapabilities->currentTransform;
3953 physical_device_state->surfaceCapabilities.supportedCompositeAlpha = pSurfaceCapabilities->supportedCompositeAlpha;
3954 physical_device_state->surfaceCapabilities.supportedUsageFlags = pSurfaceCapabilities->supportedUsageFlags;
3955}
3956
3957void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice,
3958 uint32_t queueFamilyIndex, VkSurfaceKHR surface,
3959 VkBool32 *pSupported, VkResult result) {
3960 if (VK_SUCCESS != result) return;
3961 auto surface_state = GetSurfaceState(surface);
3962 surface_state->gpu_queue_support[{physicalDevice, queueFamilyIndex}] = (*pSupported == VK_TRUE);
3963}
3964
3965void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice physicalDevice,
3966 VkSurfaceKHR surface,
3967 uint32_t *pPresentModeCount,
3968 VkPresentModeKHR *pPresentModes,
3969 VkResult result) {
3970 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
3971
3972 // TODO: This isn't quite right -- available modes may differ by surface AND physical device.
3973 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
3974 auto &call_state = physical_device_state->vkGetPhysicalDeviceSurfacePresentModesKHRState;
3975
3976 if (*pPresentModeCount) {
3977 if (call_state < QUERY_COUNT) call_state = QUERY_COUNT;
3978 if (*pPresentModeCount > physical_device_state->present_modes.size())
3979 physical_device_state->present_modes.resize(*pPresentModeCount);
3980 }
3981 if (pPresentModes) {
3982 if (call_state < QUERY_DETAILS) call_state = QUERY_DETAILS;
3983 for (uint32_t i = 0; i < *pPresentModeCount; i++) {
3984 physical_device_state->present_modes[i] = pPresentModes[i];
3985 }
3986 }
3987}
3988
3989void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
3990 uint32_t *pSurfaceFormatCount,
3991 VkSurfaceFormatKHR *pSurfaceFormats,
3992 VkResult result) {
3993 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
3994
3995 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
3996 auto &call_state = physical_device_state->vkGetPhysicalDeviceSurfaceFormatsKHRState;
3997
3998 if (*pSurfaceFormatCount) {
3999 if (call_state < QUERY_COUNT) call_state = QUERY_COUNT;
4000 if (*pSurfaceFormatCount > physical_device_state->surface_formats.size())
4001 physical_device_state->surface_formats.resize(*pSurfaceFormatCount);
4002 }
4003 if (pSurfaceFormats) {
4004 if (call_state < QUERY_DETAILS) call_state = QUERY_DETAILS;
4005 for (uint32_t i = 0; i < *pSurfaceFormatCount; i++) {
4006 physical_device_state->surface_formats[i] = pSurfaceFormats[i];
4007 }
4008 }
4009}
4010
4011void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceFormats2KHR(VkPhysicalDevice physicalDevice,
4012 const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
4013 uint32_t *pSurfaceFormatCount,
4014 VkSurfaceFormat2KHR *pSurfaceFormats,
4015 VkResult result) {
4016 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4017
4018 auto physicalDeviceState = GetPhysicalDeviceState(physicalDevice);
4019 if (*pSurfaceFormatCount) {
4020 if (physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState < QUERY_COUNT) {
4021 physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState = QUERY_COUNT;
4022 }
4023 if (*pSurfaceFormatCount > physicalDeviceState->surface_formats.size())
4024 physicalDeviceState->surface_formats.resize(*pSurfaceFormatCount);
4025 }
4026 if (pSurfaceFormats) {
4027 if (physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState < QUERY_DETAILS) {
4028 physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState = QUERY_DETAILS;
4029 }
4030 for (uint32_t i = 0; i < *pSurfaceFormatCount; i++) {
4031 physicalDeviceState->surface_formats[i] = pSurfaceFormats[i].surfaceFormat;
4032 }
4033 }
4034}
4035
4036void ValidationStateTracker::PreCallRecordCmdBeginDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,
4037 const VkDebugUtilsLabelEXT *pLabelInfo) {
4038 BeginCmdDebugUtilsLabel(report_data, commandBuffer, pLabelInfo);
4039}
4040
4041void ValidationStateTracker::PostCallRecordCmdEndDebugUtilsLabelEXT(VkCommandBuffer commandBuffer) {
4042 EndCmdDebugUtilsLabel(report_data, commandBuffer);
4043}
4044
4045void ValidationStateTracker::PreCallRecordCmdInsertDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,
4046 const VkDebugUtilsLabelEXT *pLabelInfo) {
4047 InsertCmdDebugUtilsLabel(report_data, commandBuffer, pLabelInfo);
4048
4049 // Squirrel away an easily accessible copy.
4050 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4051 cb_state->debug_label = LoggingLabel(pLabelInfo);
4052}
4053
4054void ValidationStateTracker::RecordEnumeratePhysicalDeviceGroupsState(
4055 uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHR *pPhysicalDeviceGroupProperties) {
4056 if (NULL != pPhysicalDeviceGroupProperties) {
4057 for (uint32_t i = 0; i < *pPhysicalDeviceGroupCount; i++) {
4058 for (uint32_t j = 0; j < pPhysicalDeviceGroupProperties[i].physicalDeviceCount; j++) {
4059 VkPhysicalDevice cur_phys_dev = pPhysicalDeviceGroupProperties[i].physicalDevices[j];
4060 auto &phys_device_state = physical_device_map[cur_phys_dev];
4061 phys_device_state.phys_device = cur_phys_dev;
4062 // Init actual features for each physical device
4063 DispatchGetPhysicalDeviceFeatures(cur_phys_dev, &phys_device_state.features2.features);
4064 }
4065 }
4066 }
4067}
4068
4069void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceGroups(
4070 VkInstance instance, uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHR *pPhysicalDeviceGroupProperties,
4071 VkResult result) {
4072 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4073 RecordEnumeratePhysicalDeviceGroupsState(pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
4074}
4075
4076void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceGroupsKHR(
4077 VkInstance instance, uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHR *pPhysicalDeviceGroupProperties,
4078 VkResult result) {
4079 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4080 RecordEnumeratePhysicalDeviceGroupsState(pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
4081}
4082
Lionel Landwerlinc7420912019-05-23 00:33:42 +01004083void ValidationStateTracker::RecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCounters(VkPhysicalDevice physicalDevice,
4084 uint32_t queueFamilyIndex,
4085 uint32_t *pCounterCount,
4086 VkPerformanceCounterKHR *pCounters) {
4087 if (NULL == pCounters) return;
4088
4089 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4090 assert(physical_device_state);
4091
4092 std::unique_ptr<QUEUE_FAMILY_PERF_COUNTERS> queueFamilyCounters(new QUEUE_FAMILY_PERF_COUNTERS());
4093 queueFamilyCounters->counters.resize(*pCounterCount);
4094 for (uint32_t i = 0; i < *pCounterCount; i++) queueFamilyCounters->counters[i] = pCounters[i];
4095
4096 physical_device_state->perf_counters[queueFamilyIndex] = std::move(queueFamilyCounters);
4097}
4098
4099void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
4100 VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, uint32_t *pCounterCount, VkPerformanceCounterKHR *pCounters,
4101 VkPerformanceCounterDescriptionKHR *pCounterDescriptions, VkResult result) {
4102 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4103 RecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCounters(physicalDevice, queueFamilyIndex, pCounterCount, pCounters);
4104}
4105
4106void ValidationStateTracker::PostCallRecordAcquireProfilingLockKHR(VkDevice device, const VkAcquireProfilingLockInfoKHR *pInfo,
4107 VkResult result) {
4108 if (result == VK_SUCCESS) performance_lock_acquired = true;
4109}
4110
4111bool ValidationStateTracker::PreCallValidateReleaseProfilingLockKHR(VkDevice device) const {
4112 bool skip = false;
4113
4114 if (!performance_lock_acquired) {
4115 skip |= log_msg(
4116 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
4117 "VUID-vkReleaseProfilingLockKHR-device-03235",
4118 "The profiling lock of device must have been held via a previous successful call to vkAcquireProfilingLockKHR.");
4119 }
4120
4121 return skip;
4122}
4123
4124void ValidationStateTracker::PostCallRecordReleaseProfilingLockKHR(VkDevice device) {
4125 performance_lock_acquired = false;
4126 for (auto &cmd_buffer : commandBufferMap) {
4127 cmd_buffer.second->performance_lock_released = true;
4128 }
4129}
4130
locke-lunargd556cc32019-09-17 01:21:23 -06004131void ValidationStateTracker::PreCallRecordDestroyDescriptorUpdateTemplate(VkDevice device,
4132 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
4133 const VkAllocationCallbacks *pAllocator) {
4134 if (!descriptorUpdateTemplate) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004135 auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
4136 template_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004137 desc_template_map.erase(descriptorUpdateTemplate);
4138}
4139
4140void ValidationStateTracker::PreCallRecordDestroyDescriptorUpdateTemplateKHR(VkDevice device,
4141 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
4142 const VkAllocationCallbacks *pAllocator) {
4143 if (!descriptorUpdateTemplate) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004144 auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
4145 template_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004146 desc_template_map.erase(descriptorUpdateTemplate);
4147}
4148
4149void ValidationStateTracker::RecordCreateDescriptorUpdateTemplateState(const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo,
4150 VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate) {
4151 safe_VkDescriptorUpdateTemplateCreateInfo local_create_info(pCreateInfo);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004152 auto template_state = std::make_shared<TEMPLATE_STATE>(*pDescriptorUpdateTemplate, &local_create_info);
locke-lunargd556cc32019-09-17 01:21:23 -06004153 desc_template_map[*pDescriptorUpdateTemplate] = std::move(template_state);
4154}
4155
4156void ValidationStateTracker::PostCallRecordCreateDescriptorUpdateTemplate(
4157 VkDevice device, const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator,
4158 VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate, VkResult result) {
4159 if (VK_SUCCESS != result) return;
4160 RecordCreateDescriptorUpdateTemplateState(pCreateInfo, pDescriptorUpdateTemplate);
4161}
4162
4163void ValidationStateTracker::PostCallRecordCreateDescriptorUpdateTemplateKHR(
4164 VkDevice device, const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator,
4165 VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate, VkResult result) {
4166 if (VK_SUCCESS != result) return;
4167 RecordCreateDescriptorUpdateTemplateState(pCreateInfo, pDescriptorUpdateTemplate);
4168}
4169
4170void ValidationStateTracker::RecordUpdateDescriptorSetWithTemplateState(VkDescriptorSet descriptorSet,
4171 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
4172 const void *pData) {
4173 auto const template_map_entry = desc_template_map.find(descriptorUpdateTemplate);
4174 if ((template_map_entry == desc_template_map.end()) || (template_map_entry->second.get() == nullptr)) {
4175 assert(0);
4176 } else {
4177 const TEMPLATE_STATE *template_state = template_map_entry->second.get();
4178 // TODO: Record template push descriptor updates
4179 if (template_state->create_info.templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET) {
4180 PerformUpdateDescriptorSetsWithTemplateKHR(descriptorSet, template_state, pData);
4181 }
4182 }
4183}
4184
4185void ValidationStateTracker::PreCallRecordUpdateDescriptorSetWithTemplate(VkDevice device, VkDescriptorSet descriptorSet,
4186 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
4187 const void *pData) {
4188 RecordUpdateDescriptorSetWithTemplateState(descriptorSet, descriptorUpdateTemplate, pData);
4189}
4190
4191void ValidationStateTracker::PreCallRecordUpdateDescriptorSetWithTemplateKHR(VkDevice device, VkDescriptorSet descriptorSet,
4192 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
4193 const void *pData) {
4194 RecordUpdateDescriptorSetWithTemplateState(descriptorSet, descriptorUpdateTemplate, pData);
4195}
4196
4197void ValidationStateTracker::PreCallRecordCmdPushDescriptorSetWithTemplateKHR(
4198 VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set,
4199 const void *pData) {
4200 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4201
4202 const auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
4203 if (template_state) {
4204 auto layout_data = GetPipelineLayout(layout);
4205 auto dsl = GetDslFromPipelineLayout(layout_data, set);
4206 const auto &template_ci = template_state->create_info;
Jeff Bolz6ae39612019-10-11 20:57:36 -05004207 if (dsl && !dsl->destroyed) {
locke-lunargd556cc32019-09-17 01:21:23 -06004208 // Decode the template into a set of write updates
4209 cvdescriptorset::DecodedTemplateUpdate decoded_template(this, VK_NULL_HANDLE, template_state, pData,
4210 dsl->GetDescriptorSetLayout());
4211 RecordCmdPushDescriptorSetState(cb_state, template_ci.pipelineBindPoint, layout, set,
4212 static_cast<uint32_t>(decoded_template.desc_writes.size()),
4213 decoded_template.desc_writes.data());
4214 }
4215 }
4216}
4217
4218void ValidationStateTracker::RecordGetPhysicalDeviceDisplayPlanePropertiesState(VkPhysicalDevice physicalDevice,
4219 uint32_t *pPropertyCount, void *pProperties) {
4220 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4221 if (*pPropertyCount) {
4222 if (physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState < QUERY_COUNT) {
4223 physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState = QUERY_COUNT;
4224 }
4225 physical_device_state->display_plane_property_count = *pPropertyCount;
4226 }
4227 if (pProperties) {
4228 if (physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState < QUERY_DETAILS) {
4229 physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState = QUERY_DETAILS;
4230 }
4231 }
4232}
4233
4234void ValidationStateTracker::PostCallRecordGetPhysicalDeviceDisplayPlanePropertiesKHR(VkPhysicalDevice physicalDevice,
4235 uint32_t *pPropertyCount,
4236 VkDisplayPlanePropertiesKHR *pProperties,
4237 VkResult result) {
4238 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4239 RecordGetPhysicalDeviceDisplayPlanePropertiesState(physicalDevice, pPropertyCount, pProperties);
4240}
4241
4242void ValidationStateTracker::PostCallRecordGetPhysicalDeviceDisplayPlaneProperties2KHR(VkPhysicalDevice physicalDevice,
4243 uint32_t *pPropertyCount,
4244 VkDisplayPlaneProperties2KHR *pProperties,
4245 VkResult result) {
4246 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4247 RecordGetPhysicalDeviceDisplayPlanePropertiesState(physicalDevice, pPropertyCount, pProperties);
4248}
4249
4250void ValidationStateTracker::PostCallRecordCmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
4251 uint32_t query, VkQueryControlFlags flags, uint32_t index) {
4252 QueryObject query_obj = {queryPool, query, index};
4253 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4254 RecordCmdBeginQuery(cb_state, query_obj);
4255}
4256
4257void ValidationStateTracker::PostCallRecordCmdEndQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
4258 uint32_t query, uint32_t index) {
4259 QueryObject query_obj = {queryPool, query, index};
4260 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4261 RecordCmdEndQuery(cb_state, query_obj);
4262}
4263
4264void ValidationStateTracker::RecordCreateSamplerYcbcrConversionState(const VkSamplerYcbcrConversionCreateInfo *create_info,
4265 VkSamplerYcbcrConversion ycbcr_conversion) {
4266 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
4267 RecordCreateSamplerYcbcrConversionANDROID(create_info, ycbcr_conversion);
4268 }
4269}
4270
4271void ValidationStateTracker::PostCallRecordCreateSamplerYcbcrConversion(VkDevice device,
4272 const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
4273 const VkAllocationCallbacks *pAllocator,
4274 VkSamplerYcbcrConversion *pYcbcrConversion,
4275 VkResult result) {
4276 if (VK_SUCCESS != result) return;
4277 RecordCreateSamplerYcbcrConversionState(pCreateInfo, *pYcbcrConversion);
4278}
4279
4280void ValidationStateTracker::PostCallRecordCreateSamplerYcbcrConversionKHR(VkDevice device,
4281 const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
4282 const VkAllocationCallbacks *pAllocator,
4283 VkSamplerYcbcrConversion *pYcbcrConversion,
4284 VkResult result) {
4285 if (VK_SUCCESS != result) return;
4286 RecordCreateSamplerYcbcrConversionState(pCreateInfo, *pYcbcrConversion);
4287}
4288
4289void ValidationStateTracker::PostCallRecordDestroySamplerYcbcrConversion(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion,
4290 const VkAllocationCallbacks *pAllocator) {
4291 if (!ycbcrConversion) return;
4292 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
4293 RecordDestroySamplerYcbcrConversionANDROID(ycbcrConversion);
4294 }
4295}
4296
4297void ValidationStateTracker::PostCallRecordDestroySamplerYcbcrConversionKHR(VkDevice device,
4298 VkSamplerYcbcrConversion ycbcrConversion,
4299 const VkAllocationCallbacks *pAllocator) {
4300 if (!ycbcrConversion) return;
4301 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
4302 RecordDestroySamplerYcbcrConversionANDROID(ycbcrConversion);
4303 }
4304}
4305
4306void ValidationStateTracker::PostCallRecordResetQueryPoolEXT(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
4307 uint32_t queryCount) {
4308 // Do nothing if the feature is not enabled.
4309 if (!enabled_features.host_query_reset_features.hostQueryReset) return;
4310
4311 // Do nothing if the query pool has been destroyed.
4312 auto query_pool_state = GetQueryPoolState(queryPool);
4313 if (!query_pool_state) return;
4314
4315 // Reset the state of existing entries.
4316 QueryObject query_obj{queryPool, 0};
Lionel Landwerlinc7420912019-05-23 00:33:42 +01004317 QueryObjectPass query_pass_obj{query_obj, 0};
locke-lunargd556cc32019-09-17 01:21:23 -06004318 const uint32_t max_query_count = std::min(queryCount, query_pool_state->createInfo.queryCount - firstQuery);
4319 for (uint32_t i = 0; i < max_query_count; ++i) {
4320 query_obj.query = firstQuery + i;
4321 auto query_it = queryToStateMap.find(query_obj);
4322 if (query_it != queryToStateMap.end()) query_it->second = QUERYSTATE_RESET;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01004323 if (query_pool_state->createInfo.queryType == VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR) {
4324 for (uint32_t passIndex = 0; passIndex < query_pool_state->n_performance_passes; passIndex++) {
4325 query_pass_obj.perf_pass = passIndex;
4326 auto query_perf_it = queryPassToStateMap.find(query_pass_obj);
4327 if (query_perf_it != queryPassToStateMap.end()) query_perf_it->second = QUERYSTATE_RESET;
4328 }
4329 }
locke-lunargd556cc32019-09-17 01:21:23 -06004330 }
4331}
4332
4333void ValidationStateTracker::PerformUpdateDescriptorSetsWithTemplateKHR(VkDescriptorSet descriptorSet,
4334 const TEMPLATE_STATE *template_state, const void *pData) {
4335 // Translate the templated update into a normal update for validation...
4336 cvdescriptorset::DecodedTemplateUpdate decoded_update(this, descriptorSet, template_state, pData);
4337 cvdescriptorset::PerformUpdateDescriptorSets(this, static_cast<uint32_t>(decoded_update.desc_writes.size()),
4338 decoded_update.desc_writes.data(), 0, NULL);
4339}
4340
4341// Update the common AllocateDescriptorSetsData
4342void ValidationStateTracker::UpdateAllocateDescriptorSetsData(const VkDescriptorSetAllocateInfo *p_alloc_info,
Jeff Bolz46c0ea02019-10-09 13:06:29 -05004343 cvdescriptorset::AllocateDescriptorSetsData *ds_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06004344 for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
Jeff Bolz6ae39612019-10-11 20:57:36 -05004345 auto layout = GetDescriptorSetLayoutShared(p_alloc_info->pSetLayouts[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06004346 if (layout) {
4347 ds_data->layout_nodes[i] = layout;
4348 // Count total descriptors required per type
4349 for (uint32_t j = 0; j < layout->GetBindingCount(); ++j) {
4350 const auto &binding_layout = layout->GetDescriptorSetLayoutBindingPtrFromIndex(j);
4351 uint32_t typeIndex = static_cast<uint32_t>(binding_layout->descriptorType);
4352 ds_data->required_descriptors_by_type[typeIndex] += binding_layout->descriptorCount;
4353 }
4354 }
4355 // Any unknown layouts will be flagged as errors during ValidateAllocateDescriptorSets() call
4356 }
4357}
4358
4359// Decrement allocated sets from the pool and insert new sets into set_map
4360void ValidationStateTracker::PerformAllocateDescriptorSets(const VkDescriptorSetAllocateInfo *p_alloc_info,
4361 const VkDescriptorSet *descriptor_sets,
4362 const cvdescriptorset::AllocateDescriptorSetsData *ds_data) {
4363 auto pool_state = descriptorPoolMap[p_alloc_info->descriptorPool].get();
4364 // Account for sets and individual descriptors allocated from pool
4365 pool_state->availableSets -= p_alloc_info->descriptorSetCount;
4366 for (auto it = ds_data->required_descriptors_by_type.begin(); it != ds_data->required_descriptors_by_type.end(); ++it) {
4367 pool_state->availableDescriptorTypeCount[it->first] -= ds_data->required_descriptors_by_type.at(it->first);
4368 }
4369
4370 const auto *variable_count_info = lvl_find_in_chain<VkDescriptorSetVariableDescriptorCountAllocateInfoEXT>(p_alloc_info->pNext);
4371 bool variable_count_valid = variable_count_info && variable_count_info->descriptorSetCount == p_alloc_info->descriptorSetCount;
4372
4373 // Create tracking object for each descriptor set; insert into global map and the pool's set.
4374 for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
4375 uint32_t variable_count = variable_count_valid ? variable_count_info->pDescriptorCounts[i] : 0;
4376
Jeff Bolz41a1ced2019-10-11 11:40:49 -05004377 auto new_ds = std::make_shared<cvdescriptorset::DescriptorSet>(descriptor_sets[i], pool_state, ds_data->layout_nodes[i],
Jeff Bolz252d2532019-10-15 22:06:39 -05004378 variable_count, this, report_data);
locke-lunargd556cc32019-09-17 01:21:23 -06004379 pool_state->sets.insert(new_ds.get());
4380 new_ds->in_use.store(0);
4381 setMap[descriptor_sets[i]] = std::move(new_ds);
4382 }
4383}
4384
4385// Generic function to handle state update for all CmdDraw* and CmdDispatch* type functions
4386void ValidationStateTracker::UpdateStateCmdDrawDispatchType(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint bind_point) {
4387 UpdateDrawState(cb_state, bind_point);
4388 cb_state->hasDispatchCmd = true;
4389}
4390
locke-lunargd556cc32019-09-17 01:21:23 -06004391// Generic function to handle state update for all CmdDraw* type functions
4392void ValidationStateTracker::UpdateStateCmdDrawType(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint bind_point) {
4393 UpdateStateCmdDrawDispatchType(cb_state, bind_point);
locke-lunargd556cc32019-09-17 01:21:23 -06004394 cb_state->hasDrawCmd = true;
4395}
4396
4397void ValidationStateTracker::PostCallRecordCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount,
4398 uint32_t firstVertex, uint32_t firstInstance) {
4399 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4400 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4401}
4402
4403void ValidationStateTracker::PostCallRecordCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount,
4404 uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset,
4405 uint32_t firstInstance) {
4406 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4407 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4408}
4409
4410void ValidationStateTracker::PostCallRecordCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
4411 uint32_t count, uint32_t stride) {
4412 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4413 BUFFER_STATE *buffer_state = GetBufferState(buffer);
4414 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4415 AddCommandBufferBindingBuffer(cb_state, buffer_state);
4416}
4417
4418void ValidationStateTracker::PostCallRecordCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
4419 VkDeviceSize offset, uint32_t count, uint32_t stride) {
4420 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4421 BUFFER_STATE *buffer_state = GetBufferState(buffer);
4422 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4423 AddCommandBufferBindingBuffer(cb_state, buffer_state);
4424}
4425
4426void ValidationStateTracker::PostCallRecordCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z) {
4427 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4428 UpdateStateCmdDrawDispatchType(cb_state, VK_PIPELINE_BIND_POINT_COMPUTE);
4429}
4430
4431void ValidationStateTracker::PostCallRecordCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
4432 VkDeviceSize offset) {
4433 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4434 UpdateStateCmdDrawDispatchType(cb_state, VK_PIPELINE_BIND_POINT_COMPUTE);
4435 BUFFER_STATE *buffer_state = GetBufferState(buffer);
4436 AddCommandBufferBindingBuffer(cb_state, buffer_state);
4437}
4438
4439void ValidationStateTracker::PreCallRecordCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer,
4440 VkDeviceSize offset, VkBuffer countBuffer,
4441 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
4442 uint32_t stride) {
4443 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4444 BUFFER_STATE *buffer_state = GetBufferState(buffer);
4445 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
4446 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4447 AddCommandBufferBindingBuffer(cb_state, buffer_state);
4448 AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
4449}
4450
4451void ValidationStateTracker::PreCallRecordCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer,
4452 VkDeviceSize offset, VkBuffer countBuffer,
4453 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
4454 uint32_t stride) {
4455 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4456 BUFFER_STATE *buffer_state = GetBufferState(buffer);
4457 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
4458 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4459 AddCommandBufferBindingBuffer(cb_state, buffer_state);
4460 AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
4461}
4462
4463void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksNV(VkCommandBuffer commandBuffer, uint32_t taskCount,
4464 uint32_t firstTask) {
4465 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4466 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4467}
4468
4469void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksIndirectNV(VkCommandBuffer commandBuffer, VkBuffer buffer,
4470 VkDeviceSize offset, uint32_t drawCount, uint32_t stride) {
4471 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4472 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4473 BUFFER_STATE *buffer_state = GetBufferState(buffer);
4474 if (buffer_state) {
4475 AddCommandBufferBindingBuffer(cb_state, buffer_state);
4476 }
4477}
4478
4479void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksIndirectCountNV(VkCommandBuffer commandBuffer, VkBuffer buffer,
4480 VkDeviceSize offset, VkBuffer countBuffer,
4481 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
4482 uint32_t stride) {
4483 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4484 BUFFER_STATE *buffer_state = GetBufferState(buffer);
4485 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
4486 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4487 if (buffer_state) {
4488 AddCommandBufferBindingBuffer(cb_state, buffer_state);
4489 }
4490 if (count_buffer_state) {
4491 AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
4492 }
4493}
4494
4495void ValidationStateTracker::PostCallRecordCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo *pCreateInfo,
4496 const VkAllocationCallbacks *pAllocator,
4497 VkShaderModule *pShaderModule, VkResult result,
4498 void *csm_state_data) {
4499 if (VK_SUCCESS != result) return;
4500 create_shader_module_api_state *csm_state = reinterpret_cast<create_shader_module_api_state *>(csm_state_data);
4501
4502 spv_target_env spirv_environment = ((api_version >= VK_API_VERSION_1_1) ? SPV_ENV_VULKAN_1_1 : SPV_ENV_VULKAN_1_0);
4503 bool is_spirv = (pCreateInfo->pCode[0] == spv::MagicNumber);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004504 auto new_shader_module = is_spirv ? std::make_shared<SHADER_MODULE_STATE>(pCreateInfo, *pShaderModule, spirv_environment,
4505 csm_state->unique_shader_id)
4506 : std::make_shared<SHADER_MODULE_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06004507 shaderModuleMap[*pShaderModule] = std::move(new_shader_module);
4508}
4509
4510void ValidationStateTracker::RecordPipelineShaderStage(VkPipelineShaderStageCreateInfo const *pStage, PIPELINE_STATE *pipeline,
Jeff Bolz46c0ea02019-10-09 13:06:29 -05004511 PIPELINE_STATE::StageState *stage_state) const {
locke-lunargd556cc32019-09-17 01:21:23 -06004512 // Validation shouldn't rely on anything in stage state being valid if the spirv isn't
4513 auto module = GetShaderModuleState(pStage->module);
4514 if (!module->has_valid_spirv) return;
4515
4516 // Validation shouldn't rely on anything in stage state being valid if the entrypoint isn't present
4517 auto entrypoint = FindEntrypoint(module, pStage->pName, pStage->stage);
4518 if (entrypoint == module->end()) return;
4519
4520 // Mark accessible ids
4521 stage_state->accessible_ids = MarkAccessibleIds(module, entrypoint);
4522 ProcessExecutionModes(module, entrypoint, pipeline);
4523
4524 stage_state->descriptor_uses =
4525 CollectInterfaceByDescriptorSlot(report_data, module, stage_state->accessible_ids, &stage_state->has_writable_descriptor);
4526 // Capture descriptor uses for the pipeline
4527 for (auto use : stage_state->descriptor_uses) {
4528 // While validating shaders capture which slots are used by the pipeline
John Zulauf649edd52019-10-02 14:39:41 -06004529 const uint32_t slot = use.first.first;
4530 auto &reqs = pipeline->active_slots[slot][use.first.second];
locke-lunargd556cc32019-09-17 01:21:23 -06004531 reqs = descriptor_req(reqs | DescriptorTypeToReqs(module, use.second.type_id));
John Zulauf649edd52019-10-02 14:39:41 -06004532 pipeline->max_active_slot = std::max(pipeline->max_active_slot, slot);
locke-lunargd556cc32019-09-17 01:21:23 -06004533 }
4534}
4535
4536void ValidationStateTracker::ResetCommandBufferPushConstantDataIfIncompatible(CMD_BUFFER_STATE *cb_state, VkPipelineLayout layout) {
4537 if (cb_state == nullptr) {
4538 return;
4539 }
4540
4541 const PIPELINE_LAYOUT_STATE *pipeline_layout_state = GetPipelineLayout(layout);
4542 if (pipeline_layout_state == nullptr) {
4543 return;
4544 }
4545
4546 if (cb_state->push_constant_data_ranges != pipeline_layout_state->push_constant_ranges) {
4547 cb_state->push_constant_data_ranges = pipeline_layout_state->push_constant_ranges;
4548 cb_state->push_constant_data.clear();
4549 uint32_t size_needed = 0;
4550 for (auto push_constant_range : *cb_state->push_constant_data_ranges) {
4551 size_needed = std::max(size_needed, (push_constant_range.offset + push_constant_range.size));
4552 }
4553 cb_state->push_constant_data.resize(size_needed, 0);
4554 }
4555}
John Zulauf22b0fbe2019-10-15 06:26:16 -06004556
4557void ValidationStateTracker::PostCallRecordGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain,
4558 uint32_t *pSwapchainImageCount, VkImage *pSwapchainImages,
4559 VkResult result) {
4560 if ((result != VK_SUCCESS) && (result != VK_INCOMPLETE)) return;
4561 auto swapchain_state = GetSwapchainState(swapchain);
4562
4563 if (*pSwapchainImageCount > swapchain_state->images.size()) swapchain_state->images.resize(*pSwapchainImageCount);
4564
4565 if (pSwapchainImages) {
4566 if (swapchain_state->vkGetSwapchainImagesKHRState < QUERY_DETAILS) {
4567 swapchain_state->vkGetSwapchainImagesKHRState = QUERY_DETAILS;
4568 }
4569 for (uint32_t i = 0; i < *pSwapchainImageCount; ++i) {
locke-lunargb3584732019-10-28 20:18:36 -06004570 if (swapchain_state->images[i].image != VK_NULL_HANDLE) continue; // Already retrieved this.
John Zulauf22b0fbe2019-10-15 06:26:16 -06004571
4572 // Add imageMap entries for each swapchain image
4573 VkImageCreateInfo image_ci;
4574 image_ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
4575 image_ci.pNext = nullptr; // to be set later
4576 image_ci.flags = VK_IMAGE_CREATE_ALIAS_BIT; // to be updated below
4577 image_ci.imageType = VK_IMAGE_TYPE_2D;
4578 image_ci.format = swapchain_state->createInfo.imageFormat;
4579 image_ci.extent.width = swapchain_state->createInfo.imageExtent.width;
4580 image_ci.extent.height = swapchain_state->createInfo.imageExtent.height;
4581 image_ci.extent.depth = 1;
4582 image_ci.mipLevels = 1;
4583 image_ci.arrayLayers = swapchain_state->createInfo.imageArrayLayers;
4584 image_ci.samples = VK_SAMPLE_COUNT_1_BIT;
4585 image_ci.tiling = VK_IMAGE_TILING_OPTIMAL;
4586 image_ci.usage = swapchain_state->createInfo.imageUsage;
4587 image_ci.sharingMode = swapchain_state->createInfo.imageSharingMode;
4588 image_ci.queueFamilyIndexCount = swapchain_state->createInfo.queueFamilyIndexCount;
4589 image_ci.pQueueFamilyIndices = swapchain_state->createInfo.pQueueFamilyIndices;
4590 image_ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
4591
4592 image_ci.pNext = lvl_find_in_chain<VkImageFormatListCreateInfoKHR>(swapchain_state->createInfo.pNext);
4593
4594 if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR)
4595 image_ci.flags |= VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT;
4596 if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR)
4597 image_ci.flags |= VK_IMAGE_CREATE_PROTECTED_BIT;
4598 if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR)
4599 image_ci.flags |= (VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT | VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR);
4600
4601 imageMap[pSwapchainImages[i]] = std::make_shared<IMAGE_STATE>(pSwapchainImages[i], &image_ci);
4602 auto &image_state = imageMap[pSwapchainImages[i]];
4603 image_state->valid = false;
4604 image_state->create_from_swapchain = swapchain;
4605 image_state->bind_swapchain = swapchain;
4606 image_state->bind_swapchain_imageIndex = i;
locke-lunargb3584732019-10-28 20:18:36 -06004607 swapchain_state->images[i].image = pSwapchainImages[i];
4608 swapchain_state->images[i].bound_images.emplace(pSwapchainImages[i]);
John Zulauf22b0fbe2019-10-15 06:26:16 -06004609 }
4610 }
4611
4612 if (*pSwapchainImageCount) {
4613 if (swapchain_state->vkGetSwapchainImagesKHRState < QUERY_COUNT) {
4614 swapchain_state->vkGetSwapchainImagesKHRState = QUERY_COUNT;
4615 }
4616 swapchain_state->get_swapchain_image_count = *pSwapchainImageCount;
4617 }
4618}