blob: af365db3e4fadafee6cabcea83b83836d35867cc [file] [log] [blame]
Tony-LunarG73719992020-01-15 10:20:28 -07001/* Copyright (c) 2015-2020 The Khronos Group Inc.
2 * Copyright (c) 2015-2020 Valve Corporation
3 * Copyright (c) 2015-2020 LunarG, Inc.
4 * Copyright (C) 2015-2020 Google Inc.
locke-lunargd556cc32019-09-17 01:21:23 -06005 *
6 * Licensed under the Apache License, Version 2.0 (the "License");
7 * you may not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
9 *
10 * http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 *
18 * Author: Mark Lobodzinski <mark@lunarg.com>
19 * Author: Dave Houlton <daveh@lunarg.com>
20 * Shannon McPherson <shannon@lunarg.com>
21 */
22
locke-lunargd556cc32019-09-17 01:21:23 -060023#include <cmath>
24#include <set>
25#include <sstream>
26#include <string>
27
28#include "vk_enum_string_helper.h"
29#include "vk_format_utils.h"
30#include "vk_layer_data.h"
31#include "vk_layer_utils.h"
32#include "vk_layer_logging.h"
33#include "vk_typemap_helper.h"
34
35#include "chassis.h"
36#include "state_tracker.h"
37#include "shader_validation.h"
38
39using std::max;
40using std::string;
41using std::stringstream;
42using std::unique_ptr;
43using std::unordered_map;
44using std::unordered_set;
45using std::vector;
46
Mark Lobodzinskib4ab6ac2020-04-02 13:12:06 -060047void ValidationStateTracker::InitDeviceValidationObject(bool add_obj, ValidationObject *inst_obj, ValidationObject *dev_obj) {
48 if (add_obj) {
49 instance_state = reinterpret_cast<ValidationStateTracker *>(GetValidationObject(inst_obj->object_dispatch, container_type));
50 // Call base class
51 ValidationObject::InitDeviceValidationObject(add_obj, inst_obj, dev_obj);
52 }
53}
54
locke-lunargd556cc32019-09-17 01:21:23 -060055#ifdef VK_USE_PLATFORM_ANDROID_KHR
56// Android-specific validation that uses types defined only with VK_USE_PLATFORM_ANDROID_KHR
57// This could also move into a seperate core_validation_android.cpp file... ?
58
59void ValidationStateTracker::RecordCreateImageANDROID(const VkImageCreateInfo *create_info, IMAGE_STATE *is_node) {
60 const VkExternalMemoryImageCreateInfo *emici = lvl_find_in_chain<VkExternalMemoryImageCreateInfo>(create_info->pNext);
61 if (emici && (emici->handleTypes & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID)) {
Spencer Fricke224c9852020-04-06 07:47:29 -070062 is_node->external_ahb = true;
locke-lunargd556cc32019-09-17 01:21:23 -060063 }
64 const VkExternalFormatANDROID *ext_fmt_android = lvl_find_in_chain<VkExternalFormatANDROID>(create_info->pNext);
65 if (ext_fmt_android && (0 != ext_fmt_android->externalFormat)) {
66 is_node->has_ahb_format = true;
67 is_node->ahb_format = ext_fmt_android->externalFormat;
68 }
69}
70
71void ValidationStateTracker::RecordCreateSamplerYcbcrConversionANDROID(const VkSamplerYcbcrConversionCreateInfo *create_info,
72 VkSamplerYcbcrConversion ycbcr_conversion) {
73 const VkExternalFormatANDROID *ext_format_android = lvl_find_in_chain<VkExternalFormatANDROID>(create_info->pNext);
74 if (ext_format_android && (0 != ext_format_android->externalFormat)) {
75 ycbcr_conversion_ahb_fmt_map.emplace(ycbcr_conversion, ext_format_android->externalFormat);
76 }
77};
78
79void ValidationStateTracker::RecordDestroySamplerYcbcrConversionANDROID(VkSamplerYcbcrConversion ycbcr_conversion) {
80 ycbcr_conversion_ahb_fmt_map.erase(ycbcr_conversion);
81};
82
83#else
84
85void ValidationStateTracker::RecordCreateImageANDROID(const VkImageCreateInfo *create_info, IMAGE_STATE *is_node) {}
86
87void ValidationStateTracker::RecordCreateSamplerYcbcrConversionANDROID(const VkSamplerYcbcrConversionCreateInfo *create_info,
88 VkSamplerYcbcrConversion ycbcr_conversion){};
89
90void ValidationStateTracker::RecordDestroySamplerYcbcrConversionANDROID(VkSamplerYcbcrConversion ycbcr_conversion){};
91
92#endif // VK_USE_PLATFORM_ANDROID_KHR
93
Mark Lobodzinskid3ec86f2020-03-18 11:23:04 -060094std::shared_ptr<cvdescriptorset::DescriptorSetLayout const> GetDslFromPipelineLayout(PIPELINE_LAYOUT_STATE const *layout_data,
95 uint32_t set) {
96 std::shared_ptr<cvdescriptorset::DescriptorSetLayout const> dsl = nullptr;
97 if (layout_data && (set < layout_data->set_layouts.size())) {
98 dsl = layout_data->set_layouts[set];
99 }
100 return dsl;
101}
102
locke-lunargd556cc32019-09-17 01:21:23 -0600103void ValidationStateTracker::PostCallRecordCreateImage(VkDevice device, const VkImageCreateInfo *pCreateInfo,
104 const VkAllocationCallbacks *pAllocator, VkImage *pImage, VkResult result) {
105 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500106 auto is_node = std::make_shared<IMAGE_STATE>(*pImage, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -0600107 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
108 RecordCreateImageANDROID(pCreateInfo, is_node.get());
109 }
110 const auto swapchain_info = lvl_find_in_chain<VkImageSwapchainCreateInfoKHR>(pCreateInfo->pNext);
111 if (swapchain_info) {
112 is_node->create_from_swapchain = swapchain_info->swapchain;
113 }
114
locke-lunargd556cc32019-09-17 01:21:23 -0600115 // Record the memory requirements in case they won't be queried
Spencer Fricke224c9852020-04-06 07:47:29 -0700116 // External AHB memory can't be quired until after memory is bound
117 if (is_node->external_ahb == false) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -0700118 if ((pCreateInfo->flags & VK_IMAGE_CREATE_DISJOINT_BIT) == 0) {
119 DispatchGetImageMemoryRequirements(device, *pImage, &is_node->requirements);
120 } else {
121 uint32_t plane_count = FormatPlaneCount(pCreateInfo->format);
122 VkImagePlaneMemoryRequirementsInfo image_plane_req = {VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO, nullptr};
123 VkMemoryRequirements2 mem_reqs2 = {VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2, nullptr};
124 VkImageMemoryRequirementsInfo2 mem_req_info2 = {VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2};
125 mem_req_info2.pNext = &image_plane_req;
126 mem_req_info2.image = *pImage;
127
128 assert(plane_count != 0); // assumes each format has at least first plane
129 image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_0_BIT;
130 DispatchGetImageMemoryRequirements2(device, &mem_req_info2, &mem_reqs2);
131 is_node->plane0_requirements = mem_reqs2.memoryRequirements;
132
133 if (plane_count >= 2) {
134 image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_1_BIT;
135 DispatchGetImageMemoryRequirements2(device, &mem_req_info2, &mem_reqs2);
136 is_node->plane1_requirements = mem_reqs2.memoryRequirements;
137 }
138 if (plane_count >= 3) {
139 image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_2_BIT;
140 DispatchGetImageMemoryRequirements2(device, &mem_req_info2, &mem_reqs2);
141 is_node->plane2_requirements = mem_reqs2.memoryRequirements;
142 }
143 }
locke-lunargd556cc32019-09-17 01:21:23 -0600144 }
145 imageMap.insert(std::make_pair(*pImage, std::move(is_node)));
146}
147
148void ValidationStateTracker::PreCallRecordDestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks *pAllocator) {
149 if (!image) return;
150 IMAGE_STATE *image_state = GetImageState(image);
151 const VulkanTypedHandle obj_struct(image, kVulkanObjectTypeImage);
152 InvalidateCommandBuffers(image_state->cb_bindings, obj_struct);
153 // Clean up memory mapping, bindings and range references for image
154 for (auto mem_binding : image_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -0700155 RemoveImageMemoryRange(image, mem_binding);
locke-lunargd556cc32019-09-17 01:21:23 -0600156 }
157 if (image_state->bind_swapchain) {
158 auto swapchain = GetSwapchainState(image_state->bind_swapchain);
159 if (swapchain) {
locke-lunargb3584732019-10-28 20:18:36 -0600160 swapchain->images[image_state->bind_swapchain_imageIndex].bound_images.erase(image_state->image);
locke-lunargd556cc32019-09-17 01:21:23 -0600161 }
162 }
163 RemoveAliasingImage(image_state);
164 ClearMemoryObjectBindings(obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500165 image_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600166 // Remove image from imageMap
167 imageMap.erase(image);
168}
169
170void ValidationStateTracker::PreCallRecordCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image,
171 VkImageLayout imageLayout, const VkClearColorValue *pColor,
172 uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
173 auto cb_node = GetCBState(commandBuffer);
174 auto image_state = GetImageState(image);
175 if (cb_node && image_state) {
176 AddCommandBufferBindingImage(cb_node, image_state);
177 }
178}
179
180void ValidationStateTracker::PreCallRecordCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image,
181 VkImageLayout imageLayout,
182 const VkClearDepthStencilValue *pDepthStencil,
183 uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
184 auto cb_node = GetCBState(commandBuffer);
185 auto image_state = GetImageState(image);
186 if (cb_node && image_state) {
187 AddCommandBufferBindingImage(cb_node, image_state);
188 }
189}
190
191void ValidationStateTracker::PreCallRecordCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage,
192 VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout,
193 uint32_t regionCount, const VkImageCopy *pRegions) {
194 auto cb_node = GetCBState(commandBuffer);
195 auto src_image_state = GetImageState(srcImage);
196 auto dst_image_state = GetImageState(dstImage);
197
198 // Update bindings between images and cmd buffer
199 AddCommandBufferBindingImage(cb_node, src_image_state);
200 AddCommandBufferBindingImage(cb_node, dst_image_state);
201}
202
203void ValidationStateTracker::PreCallRecordCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage,
204 VkImageLayout srcImageLayout, VkImage dstImage,
205 VkImageLayout dstImageLayout, uint32_t regionCount,
206 const VkImageResolve *pRegions) {
207 auto cb_node = GetCBState(commandBuffer);
208 auto src_image_state = GetImageState(srcImage);
209 auto dst_image_state = GetImageState(dstImage);
210
211 // Update bindings between images and cmd buffer
212 AddCommandBufferBindingImage(cb_node, src_image_state);
213 AddCommandBufferBindingImage(cb_node, dst_image_state);
214}
215
216void ValidationStateTracker::PreCallRecordCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage,
217 VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout,
218 uint32_t regionCount, const VkImageBlit *pRegions, VkFilter filter) {
219 auto cb_node = GetCBState(commandBuffer);
220 auto src_image_state = GetImageState(srcImage);
221 auto dst_image_state = GetImageState(dstImage);
222
223 // Update bindings between images and cmd buffer
224 AddCommandBufferBindingImage(cb_node, src_image_state);
225 AddCommandBufferBindingImage(cb_node, dst_image_state);
226}
227
228void ValidationStateTracker::PostCallRecordCreateBuffer(VkDevice device, const VkBufferCreateInfo *pCreateInfo,
229 const VkAllocationCallbacks *pAllocator, VkBuffer *pBuffer,
230 VkResult result) {
231 if (result != VK_SUCCESS) return;
232 // TODO : This doesn't create deep copy of pQueueFamilyIndices so need to fix that if/when we want that data to be valid
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500233 auto buffer_state = std::make_shared<BUFFER_STATE>(*pBuffer, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -0600234
235 // Get a set of requirements in the case the app does not
236 DispatchGetBufferMemoryRequirements(device, *pBuffer, &buffer_state->requirements);
237
238 bufferMap.insert(std::make_pair(*pBuffer, std::move(buffer_state)));
239}
240
241void ValidationStateTracker::PostCallRecordCreateBufferView(VkDevice device, const VkBufferViewCreateInfo *pCreateInfo,
242 const VkAllocationCallbacks *pAllocator, VkBufferView *pView,
243 VkResult result) {
244 if (result != VK_SUCCESS) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500245 auto buffer_state = GetBufferShared(pCreateInfo->buffer);
246 bufferViewMap[*pView] = std::make_shared<BUFFER_VIEW_STATE>(buffer_state, *pView, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -0600247}
248
249void ValidationStateTracker::PostCallRecordCreateImageView(VkDevice device, const VkImageViewCreateInfo *pCreateInfo,
250 const VkAllocationCallbacks *pAllocator, VkImageView *pView,
251 VkResult result) {
252 if (result != VK_SUCCESS) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500253 auto image_state = GetImageShared(pCreateInfo->image);
254 imageViewMap[*pView] = std::make_shared<IMAGE_VIEW_STATE>(image_state, *pView, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -0600255}
256
257void ValidationStateTracker::PreCallRecordCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer,
258 uint32_t regionCount, const VkBufferCopy *pRegions) {
259 auto cb_node = GetCBState(commandBuffer);
260 auto src_buffer_state = GetBufferState(srcBuffer);
261 auto dst_buffer_state = GetBufferState(dstBuffer);
262
263 // Update bindings between buffers and cmd buffer
264 AddCommandBufferBindingBuffer(cb_node, src_buffer_state);
265 AddCommandBufferBindingBuffer(cb_node, dst_buffer_state);
266}
267
268void ValidationStateTracker::PreCallRecordDestroyImageView(VkDevice device, VkImageView imageView,
269 const VkAllocationCallbacks *pAllocator) {
270 IMAGE_VIEW_STATE *image_view_state = GetImageViewState(imageView);
271 if (!image_view_state) return;
272 const VulkanTypedHandle obj_struct(imageView, kVulkanObjectTypeImageView);
273
274 // Any bound cmd buffers are now invalid
275 InvalidateCommandBuffers(image_view_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500276 image_view_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600277 imageViewMap.erase(imageView);
278}
279
280void ValidationStateTracker::PreCallRecordDestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks *pAllocator) {
281 if (!buffer) return;
282 auto buffer_state = GetBufferState(buffer);
283 const VulkanTypedHandle obj_struct(buffer, kVulkanObjectTypeBuffer);
284
285 InvalidateCommandBuffers(buffer_state->cb_bindings, obj_struct);
286 for (auto mem_binding : buffer_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -0700287 RemoveBufferMemoryRange(buffer, mem_binding);
locke-lunargd556cc32019-09-17 01:21:23 -0600288 }
289 ClearMemoryObjectBindings(obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500290 buffer_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600291 bufferMap.erase(buffer_state->buffer);
292}
293
294void ValidationStateTracker::PreCallRecordDestroyBufferView(VkDevice device, VkBufferView bufferView,
295 const VkAllocationCallbacks *pAllocator) {
296 if (!bufferView) return;
297 auto buffer_view_state = GetBufferViewState(bufferView);
298 const VulkanTypedHandle obj_struct(bufferView, kVulkanObjectTypeBufferView);
299
300 // Any bound cmd buffers are now invalid
301 InvalidateCommandBuffers(buffer_view_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500302 buffer_view_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600303 bufferViewMap.erase(bufferView);
304}
305
306void ValidationStateTracker::PreCallRecordCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
307 VkDeviceSize size, uint32_t data) {
308 auto cb_node = GetCBState(commandBuffer);
309 auto buffer_state = GetBufferState(dstBuffer);
310 // Update bindings between buffer and cmd buffer
311 AddCommandBufferBindingBuffer(cb_node, buffer_state);
312}
313
314void ValidationStateTracker::PreCallRecordCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage,
315 VkImageLayout srcImageLayout, VkBuffer dstBuffer,
316 uint32_t regionCount, const VkBufferImageCopy *pRegions) {
317 auto cb_node = GetCBState(commandBuffer);
318 auto src_image_state = GetImageState(srcImage);
319 auto dst_buffer_state = GetBufferState(dstBuffer);
320
321 // Update bindings between buffer/image and cmd buffer
322 AddCommandBufferBindingImage(cb_node, src_image_state);
323 AddCommandBufferBindingBuffer(cb_node, dst_buffer_state);
324}
325
326void ValidationStateTracker::PreCallRecordCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
327 VkImageLayout dstImageLayout, uint32_t regionCount,
328 const VkBufferImageCopy *pRegions) {
329 auto cb_node = GetCBState(commandBuffer);
330 auto src_buffer_state = GetBufferState(srcBuffer);
331 auto dst_image_state = GetImageState(dstImage);
332
333 AddCommandBufferBindingBuffer(cb_node, src_buffer_state);
334 AddCommandBufferBindingImage(cb_node, dst_image_state);
335}
336
337// Get the image viewstate for a given framebuffer attachment
338IMAGE_VIEW_STATE *ValidationStateTracker::GetAttachmentImageViewState(FRAMEBUFFER_STATE *framebuffer, uint32_t index) {
Mark Lobodzinski544b3dd2019-12-03 14:44:54 -0700339 if (framebuffer->createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) return nullptr;
locke-lunargd556cc32019-09-17 01:21:23 -0600340 assert(framebuffer && (index < framebuffer->createInfo.attachmentCount));
341 const VkImageView &image_view = framebuffer->createInfo.pAttachments[index];
342 return GetImageViewState(image_view);
343}
344
345// Get the image viewstate for a given framebuffer attachment
346const IMAGE_VIEW_STATE *ValidationStateTracker::GetAttachmentImageViewState(const FRAMEBUFFER_STATE *framebuffer,
347 uint32_t index) const {
Mark Lobodzinski544b3dd2019-12-03 14:44:54 -0700348 if (framebuffer->createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) return nullptr;
locke-lunargd556cc32019-09-17 01:21:23 -0600349 assert(framebuffer && (index < framebuffer->createInfo.attachmentCount));
350 const VkImageView &image_view = framebuffer->createInfo.pAttachments[index];
351 return GetImageViewState(image_view);
352}
353
354void ValidationStateTracker::AddAliasingImage(IMAGE_STATE *image_state) {
locke-lunargd556cc32019-09-17 01:21:23 -0600355 std::unordered_set<VkImage> *bound_images = nullptr;
356
locke-lunargb3584732019-10-28 20:18:36 -0600357 if (image_state->bind_swapchain) {
358 auto swapchain_state = GetSwapchainState(image_state->bind_swapchain);
locke-lunargd556cc32019-09-17 01:21:23 -0600359 if (swapchain_state) {
locke-lunargb3584732019-10-28 20:18:36 -0600360 bound_images = &swapchain_state->images[image_state->bind_swapchain_imageIndex].bound_images;
locke-lunargd556cc32019-09-17 01:21:23 -0600361 }
362 } else {
locke-lunargcf04d582019-11-26 00:31:50 -0700363 if (image_state->binding.mem_state) {
364 bound_images = &image_state->binding.mem_state->bound_images;
locke-lunargd556cc32019-09-17 01:21:23 -0600365 }
366 }
367
368 if (bound_images) {
369 for (const auto &handle : *bound_images) {
370 if (handle != image_state->image) {
371 auto is = GetImageState(handle);
372 if (is && is->IsCompatibleAliasing(image_state)) {
373 auto inserted = is->aliasing_images.emplace(image_state->image);
374 if (inserted.second) {
375 image_state->aliasing_images.emplace(handle);
376 }
377 }
378 }
379 }
380 }
381}
382
383void ValidationStateTracker::RemoveAliasingImage(IMAGE_STATE *image_state) {
384 for (const auto &image : image_state->aliasing_images) {
385 auto is = GetImageState(image);
386 if (is) {
387 is->aliasing_images.erase(image_state->image);
388 }
389 }
390 image_state->aliasing_images.clear();
391}
392
393void ValidationStateTracker::RemoveAliasingImages(const std::unordered_set<VkImage> &bound_images) {
394 // This is one way clear. Because the bound_images include cross references, the one way clear loop could clear the whole
395 // reference. It doesn't need two ways clear.
396 for (const auto &handle : bound_images) {
397 auto is = GetImageState(handle);
398 if (is) {
399 is->aliasing_images.clear();
400 }
401 }
402}
403
Jeff Bolz310775c2019-10-09 00:46:33 -0500404const EVENT_STATE *ValidationStateTracker::GetEventState(VkEvent event) const {
405 auto it = eventMap.find(event);
406 if (it == eventMap.end()) {
407 return nullptr;
408 }
409 return &it->second;
410}
411
locke-lunargd556cc32019-09-17 01:21:23 -0600412EVENT_STATE *ValidationStateTracker::GetEventState(VkEvent event) {
413 auto it = eventMap.find(event);
414 if (it == eventMap.end()) {
415 return nullptr;
416 }
417 return &it->second;
418}
419
420const QUEUE_STATE *ValidationStateTracker::GetQueueState(VkQueue queue) const {
421 auto it = queueMap.find(queue);
422 if (it == queueMap.cend()) {
423 return nullptr;
424 }
425 return &it->second;
426}
427
428QUEUE_STATE *ValidationStateTracker::GetQueueState(VkQueue queue) {
429 auto it = queueMap.find(queue);
430 if (it == queueMap.end()) {
431 return nullptr;
432 }
433 return &it->second;
434}
435
436const PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState(VkPhysicalDevice phys) const {
437 auto *phys_dev_map = ((physical_device_map.size() > 0) ? &physical_device_map : &instance_state->physical_device_map);
438 auto it = phys_dev_map->find(phys);
439 if (it == phys_dev_map->end()) {
440 return nullptr;
441 }
442 return &it->second;
443}
444
445PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState(VkPhysicalDevice phys) {
446 auto *phys_dev_map = ((physical_device_map.size() > 0) ? &physical_device_map : &instance_state->physical_device_map);
447 auto it = phys_dev_map->find(phys);
448 if (it == phys_dev_map->end()) {
449 return nullptr;
450 }
451 return &it->second;
452}
453
454PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState() { return physical_device_state; }
455const PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState() const { return physical_device_state; }
456
457// Return ptr to memory binding for given handle of specified type
458template <typename State, typename Result>
459static Result GetObjectMemBindingImpl(State state, const VulkanTypedHandle &typed_handle) {
460 switch (typed_handle.type) {
461 case kVulkanObjectTypeImage:
462 return state->GetImageState(typed_handle.Cast<VkImage>());
463 case kVulkanObjectTypeBuffer:
464 return state->GetBufferState(typed_handle.Cast<VkBuffer>());
465 case kVulkanObjectTypeAccelerationStructureNV:
466 return state->GetAccelerationStructureState(typed_handle.Cast<VkAccelerationStructureNV>());
467 default:
468 break;
469 }
470 return nullptr;
471}
472
473const BINDABLE *ValidationStateTracker::GetObjectMemBinding(const VulkanTypedHandle &typed_handle) const {
474 return GetObjectMemBindingImpl<const ValidationStateTracker *, const BINDABLE *>(this, typed_handle);
475}
476
477BINDABLE *ValidationStateTracker::GetObjectMemBinding(const VulkanTypedHandle &typed_handle) {
478 return GetObjectMemBindingImpl<ValidationStateTracker *, BINDABLE *>(this, typed_handle);
479}
480
481void ValidationStateTracker::AddMemObjInfo(void *object, const VkDeviceMemory mem, const VkMemoryAllocateInfo *pAllocateInfo) {
482 assert(object != NULL);
483
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500484 memObjMap[mem] = std::make_shared<DEVICE_MEMORY_STATE>(object, mem, pAllocateInfo);
485 auto mem_info = memObjMap[mem].get();
locke-lunargd556cc32019-09-17 01:21:23 -0600486
487 auto dedicated = lvl_find_in_chain<VkMemoryDedicatedAllocateInfoKHR>(pAllocateInfo->pNext);
488 if (dedicated) {
489 mem_info->is_dedicated = true;
490 mem_info->dedicated_buffer = dedicated->buffer;
491 mem_info->dedicated_image = dedicated->image;
492 }
493 auto export_info = lvl_find_in_chain<VkExportMemoryAllocateInfo>(pAllocateInfo->pNext);
494 if (export_info) {
495 mem_info->is_export = true;
496 mem_info->export_handle_type_flags = export_info->handleTypes;
497 }
498}
499
500// Create binding link between given sampler and command buffer node
501void ValidationStateTracker::AddCommandBufferBindingSampler(CMD_BUFFER_STATE *cb_node, SAMPLER_STATE *sampler_state) {
502 if (disabled.command_buffer_state) {
503 return;
504 }
Jeff Bolzadbfa852019-10-04 13:53:30 -0500505 AddCommandBufferBinding(sampler_state->cb_bindings,
506 VulkanTypedHandle(sampler_state->sampler, kVulkanObjectTypeSampler, sampler_state), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -0600507}
508
509// Create binding link between given image node and command buffer node
510void ValidationStateTracker::AddCommandBufferBindingImage(CMD_BUFFER_STATE *cb_node, IMAGE_STATE *image_state) {
511 if (disabled.command_buffer_state) {
512 return;
513 }
514 // Skip validation if this image was created through WSI
515 if (image_state->create_from_swapchain == VK_NULL_HANDLE) {
516 // First update cb binding for image
Jeff Bolzadbfa852019-10-04 13:53:30 -0500517 if (AddCommandBufferBinding(image_state->cb_bindings,
518 VulkanTypedHandle(image_state->image, kVulkanObjectTypeImage, image_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -0600519 // Now update CB binding in MemObj mini CB list
520 for (auto mem_binding : image_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -0700521 // Now update CBInfo's Mem reference list
522 AddCommandBufferBinding(mem_binding->cb_bindings,
523 VulkanTypedHandle(mem_binding->mem, kVulkanObjectTypeDeviceMemory, mem_binding), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -0600524 }
525 }
526 }
527}
528
529// Create binding link between given image view node and its image with command buffer node
530void ValidationStateTracker::AddCommandBufferBindingImageView(CMD_BUFFER_STATE *cb_node, IMAGE_VIEW_STATE *view_state) {
531 if (disabled.command_buffer_state) {
532 return;
533 }
534 // First add bindings for imageView
Jeff Bolzadbfa852019-10-04 13:53:30 -0500535 if (AddCommandBufferBinding(view_state->cb_bindings,
536 VulkanTypedHandle(view_state->image_view, kVulkanObjectTypeImageView, view_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -0600537 // Only need to continue if this is a new item
Jeff Bolzadbfa852019-10-04 13:53:30 -0500538 auto image_state = view_state->image_state.get();
locke-lunargd556cc32019-09-17 01:21:23 -0600539 // Add bindings for image within imageView
540 if (image_state) {
541 AddCommandBufferBindingImage(cb_node, image_state);
542 }
543 }
544}
545
546// Create binding link between given buffer node and command buffer node
547void ValidationStateTracker::AddCommandBufferBindingBuffer(CMD_BUFFER_STATE *cb_node, BUFFER_STATE *buffer_state) {
548 if (disabled.command_buffer_state) {
549 return;
550 }
551 // First update cb binding for buffer
Jeff Bolzadbfa852019-10-04 13:53:30 -0500552 if (AddCommandBufferBinding(buffer_state->cb_bindings,
553 VulkanTypedHandle(buffer_state->buffer, kVulkanObjectTypeBuffer, buffer_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -0600554 // Now update CB binding in MemObj mini CB list
555 for (auto mem_binding : buffer_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -0700556 // Now update CBInfo's Mem reference list
557 AddCommandBufferBinding(mem_binding->cb_bindings,
558 VulkanTypedHandle(mem_binding->mem, kVulkanObjectTypeDeviceMemory, mem_binding), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -0600559 }
560 }
561}
562
563// Create binding link between given buffer view node and its buffer with command buffer node
564void ValidationStateTracker::AddCommandBufferBindingBufferView(CMD_BUFFER_STATE *cb_node, BUFFER_VIEW_STATE *view_state) {
565 if (disabled.command_buffer_state) {
566 return;
567 }
568 // First add bindings for bufferView
Jeff Bolzadbfa852019-10-04 13:53:30 -0500569 if (AddCommandBufferBinding(view_state->cb_bindings,
570 VulkanTypedHandle(view_state->buffer_view, kVulkanObjectTypeBufferView, view_state), cb_node)) {
571 auto buffer_state = view_state->buffer_state.get();
locke-lunargd556cc32019-09-17 01:21:23 -0600572 // Add bindings for buffer within bufferView
573 if (buffer_state) {
574 AddCommandBufferBindingBuffer(cb_node, buffer_state);
575 }
576 }
577}
578
579// Create binding link between given acceleration structure and command buffer node
580void ValidationStateTracker::AddCommandBufferBindingAccelerationStructure(CMD_BUFFER_STATE *cb_node,
581 ACCELERATION_STRUCTURE_STATE *as_state) {
582 if (disabled.command_buffer_state) {
583 return;
584 }
Jeff Bolzadbfa852019-10-04 13:53:30 -0500585 if (AddCommandBufferBinding(
586 as_state->cb_bindings,
587 VulkanTypedHandle(as_state->acceleration_structure, kVulkanObjectTypeAccelerationStructureNV, as_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -0600588 // Now update CB binding in MemObj mini CB list
589 for (auto mem_binding : as_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -0700590 // Now update CBInfo's Mem reference list
591 AddCommandBufferBinding(mem_binding->cb_bindings,
592 VulkanTypedHandle(mem_binding->mem, kVulkanObjectTypeDeviceMemory, mem_binding), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -0600593 }
594 }
595}
596
locke-lunargd556cc32019-09-17 01:21:23 -0600597// Clear a single object binding from given memory object
locke-lunarg5f59e782019-12-19 10:32:23 -0700598void ValidationStateTracker::ClearMemoryObjectBinding(const VulkanTypedHandle &typed_handle, DEVICE_MEMORY_STATE *mem_info) {
locke-lunargd556cc32019-09-17 01:21:23 -0600599 // This obj is bound to a memory object. Remove the reference to this object in that memory object's list
600 if (mem_info) {
601 mem_info->obj_bindings.erase(typed_handle);
602 }
603}
604
605// ClearMemoryObjectBindings clears the binding of objects to memory
606// For the given object it pulls the memory bindings and makes sure that the bindings
607// no longer refer to the object being cleared. This occurs when objects are destroyed.
608void ValidationStateTracker::ClearMemoryObjectBindings(const VulkanTypedHandle &typed_handle) {
609 BINDABLE *mem_binding = GetObjectMemBinding(typed_handle);
610 if (mem_binding) {
611 if (!mem_binding->sparse) {
locke-lunarg5f59e782019-12-19 10:32:23 -0700612 ClearMemoryObjectBinding(typed_handle, mem_binding->binding.mem_state.get());
locke-lunargd556cc32019-09-17 01:21:23 -0600613 } else { // Sparse, clear all bindings
614 for (auto &sparse_mem_binding : mem_binding->sparse_bindings) {
locke-lunarg5f59e782019-12-19 10:32:23 -0700615 ClearMemoryObjectBinding(typed_handle, sparse_mem_binding.mem_state.get());
locke-lunargd556cc32019-09-17 01:21:23 -0600616 }
617 }
618 }
619}
620
621// SetMemBinding is used to establish immutable, non-sparse binding between a single image/buffer object and memory object.
622// Corresponding valid usage checks are in ValidateSetMemBinding().
623void ValidationStateTracker::SetMemBinding(VkDeviceMemory mem, BINDABLE *mem_binding, VkDeviceSize memory_offset,
624 const VulkanTypedHandle &typed_handle) {
625 assert(mem_binding);
locke-lunargd556cc32019-09-17 01:21:23 -0600626
627 if (mem != VK_NULL_HANDLE) {
locke-lunargcf04d582019-11-26 00:31:50 -0700628 mem_binding->binding.mem_state = GetShared<DEVICE_MEMORY_STATE>(mem);
629 if (mem_binding->binding.mem_state) {
locke-lunarg5f59e782019-12-19 10:32:23 -0700630 mem_binding->binding.offset = memory_offset;
631 mem_binding->binding.size = mem_binding->requirements.size;
locke-lunargcf04d582019-11-26 00:31:50 -0700632 mem_binding->binding.mem_state->obj_bindings.insert(typed_handle);
locke-lunargd556cc32019-09-17 01:21:23 -0600633 // For image objects, make sure default memory state is correctly set
634 // TODO : What's the best/correct way to handle this?
635 if (kVulkanObjectTypeImage == typed_handle.type) {
636 auto const image_state = reinterpret_cast<const IMAGE_STATE *>(mem_binding);
637 if (image_state) {
638 VkImageCreateInfo ici = image_state->createInfo;
639 if (ici.usage & (VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT)) {
640 // TODO:: More memory state transition stuff.
641 }
642 }
643 }
locke-lunargcf04d582019-11-26 00:31:50 -0700644 mem_binding->UpdateBoundMemorySet(); // force recreation of cached set
locke-lunargd556cc32019-09-17 01:21:23 -0600645 }
646 }
647}
648
649// For NULL mem case, clear any previous binding Else...
650// Make sure given object is in its object map
651// IF a previous binding existed, update binding
652// Add reference from objectInfo to memoryInfo
653// Add reference off of object's binding info
654// Return VK_TRUE if addition is successful, VK_FALSE otherwise
locke-lunargcf04d582019-11-26 00:31:50 -0700655bool ValidationStateTracker::SetSparseMemBinding(const VkDeviceMemory mem, const VkDeviceSize mem_offset,
656 const VkDeviceSize mem_size, const VulkanTypedHandle &typed_handle) {
locke-lunargd556cc32019-09-17 01:21:23 -0600657 bool skip = VK_FALSE;
658 // Handle NULL case separately, just clear previous binding & decrement reference
locke-lunargcf04d582019-11-26 00:31:50 -0700659 if (mem == VK_NULL_HANDLE) {
locke-lunargd556cc32019-09-17 01:21:23 -0600660 // TODO : This should cause the range of the resource to be unbound according to spec
661 } else {
662 BINDABLE *mem_binding = GetObjectMemBinding(typed_handle);
663 assert(mem_binding);
664 if (mem_binding) { // Invalid handles are reported by object tracker, but Get returns NULL for them, so avoid SEGV here
665 assert(mem_binding->sparse);
locke-lunargcf04d582019-11-26 00:31:50 -0700666 MEM_BINDING binding = {GetShared<DEVICE_MEMORY_STATE>(mem), mem_offset, mem_size};
667 if (binding.mem_state) {
668 binding.mem_state->obj_bindings.insert(typed_handle);
locke-lunargd556cc32019-09-17 01:21:23 -0600669 // Need to set mem binding for this object
670 mem_binding->sparse_bindings.insert(binding);
671 mem_binding->UpdateBoundMemorySet();
672 }
673 }
674 }
675 return skip;
676}
677
locke-lunargd556cc32019-09-17 01:21:23 -0600678void ValidationStateTracker::UpdateDrawState(CMD_BUFFER_STATE *cb_state, const VkPipelineBindPoint bind_point) {
679 auto &state = cb_state->lastBound[bind_point];
680 PIPELINE_STATE *pPipe = state.pipeline_state;
681 if (VK_NULL_HANDLE != state.pipeline_layout) {
682 for (const auto &set_binding_pair : pPipe->active_slots) {
683 uint32_t setIndex = set_binding_pair.first;
684 // Pull the set node
685 cvdescriptorset::DescriptorSet *descriptor_set = state.per_set[setIndex].bound_descriptor_set;
686 if (!descriptor_set->IsPushDescriptor()) {
687 // For the "bindless" style resource usage with many descriptors, need to optimize command <-> descriptor binding
688
689 // TODO: If recreating the reduced_map here shows up in profilinging, need to find a way of sharing with the
690 // Validate pass. Though in the case of "many" descriptors, typically the descriptor count >> binding count
691 cvdescriptorset::PrefilterBindRequestMap reduced_map(*descriptor_set, set_binding_pair.second);
692 const auto &binding_req_map = reduced_map.FilteredMap(*cb_state, *pPipe);
693
694 if (reduced_map.IsManyDescriptors()) {
695 // Only update validate binding tags if we meet the "many" criteria in the Prefilter class
696 descriptor_set->UpdateValidationCache(*cb_state, *pPipe, binding_req_map);
697 }
698
699 // We can skip updating the state if "nothing" has changed since the last validation.
700 // See CoreChecks::ValidateCmdBufDrawState for more details.
Jeff Bolz56308942019-10-06 22:05:23 -0500701 bool descriptor_set_changed =
locke-lunargd556cc32019-09-17 01:21:23 -0600702 !reduced_map.IsManyDescriptors() ||
703 // Update if descriptor set (or contents) has changed
704 state.per_set[setIndex].validated_set != descriptor_set ||
705 state.per_set[setIndex].validated_set_change_count != descriptor_set->GetChangeCount() ||
706 (!disabled.image_layout_validation &&
Jeff Bolz56308942019-10-06 22:05:23 -0500707 state.per_set[setIndex].validated_set_image_layout_change_count != cb_state->image_layout_change_count);
708 bool need_update = descriptor_set_changed ||
709 // Update if previous bindingReqMap doesn't include new bindingReqMap
710 !std::includes(state.per_set[setIndex].validated_set_binding_req_map.begin(),
711 state.per_set[setIndex].validated_set_binding_req_map.end(),
712 binding_req_map.begin(), binding_req_map.end());
locke-lunargd556cc32019-09-17 01:21:23 -0600713
714 if (need_update) {
715 // Bind this set and its active descriptor resources to the command buffer
Jeff Bolz56308942019-10-06 22:05:23 -0500716 if (!descriptor_set_changed && reduced_map.IsManyDescriptors()) {
717 // Only record the bindings that haven't already been recorded
718 BindingReqMap delta_reqs;
719 std::set_difference(binding_req_map.begin(), binding_req_map.end(),
720 state.per_set[setIndex].validated_set_binding_req_map.begin(),
721 state.per_set[setIndex].validated_set_binding_req_map.end(),
722 std::inserter(delta_reqs, delta_reqs.begin()));
locke-gb3ce08f2019-09-30 12:30:56 -0600723 descriptor_set->UpdateDrawState(this, cb_state, pPipe, delta_reqs);
Jeff Bolz56308942019-10-06 22:05:23 -0500724 } else {
locke-gb3ce08f2019-09-30 12:30:56 -0600725 descriptor_set->UpdateDrawState(this, cb_state, pPipe, binding_req_map);
Jeff Bolz56308942019-10-06 22:05:23 -0500726 }
locke-lunargd556cc32019-09-17 01:21:23 -0600727
728 state.per_set[setIndex].validated_set = descriptor_set;
729 state.per_set[setIndex].validated_set_change_count = descriptor_set->GetChangeCount();
730 state.per_set[setIndex].validated_set_image_layout_change_count = cb_state->image_layout_change_count;
731 if (reduced_map.IsManyDescriptors()) {
732 // Check whether old == new before assigning, the equality check is much cheaper than
733 // freeing and reallocating the map.
734 if (state.per_set[setIndex].validated_set_binding_req_map != set_binding_pair.second) {
735 state.per_set[setIndex].validated_set_binding_req_map = set_binding_pair.second;
736 }
737 } else {
738 state.per_set[setIndex].validated_set_binding_req_map = BindingReqMap();
739 }
740 }
741 }
742 }
743 }
744 if (!pPipe->vertex_binding_descriptions_.empty()) {
745 cb_state->vertex_buffer_used = true;
746 }
747}
748
749// Remove set from setMap and delete the set
750void ValidationStateTracker::FreeDescriptorSet(cvdescriptorset::DescriptorSet *descriptor_set) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500751 descriptor_set->destroyed = true;
Jeff Bolzadbfa852019-10-04 13:53:30 -0500752 const VulkanTypedHandle obj_struct(descriptor_set->GetSet(), kVulkanObjectTypeDescriptorSet);
Jeff Bolz41a1ced2019-10-11 11:40:49 -0500753 // Any bound cmd buffers are now invalid
Jeff Bolzadbfa852019-10-04 13:53:30 -0500754 InvalidateCommandBuffers(descriptor_set->cb_bindings, obj_struct);
Jeff Bolz41a1ced2019-10-11 11:40:49 -0500755
locke-lunargd556cc32019-09-17 01:21:23 -0600756 setMap.erase(descriptor_set->GetSet());
757}
758
759// Free all DS Pools including their Sets & related sub-structs
760// NOTE : Calls to this function should be wrapped in mutex
761void ValidationStateTracker::DeleteDescriptorSetPools() {
762 for (auto ii = descriptorPoolMap.begin(); ii != descriptorPoolMap.end();) {
763 // Remove this pools' sets from setMap and delete them
764 for (auto ds : ii->second->sets) {
765 FreeDescriptorSet(ds);
766 }
767 ii->second->sets.clear();
768 ii = descriptorPoolMap.erase(ii);
769 }
770}
771
772// For given object struct return a ptr of BASE_NODE type for its wrapping struct
773BASE_NODE *ValidationStateTracker::GetStateStructPtrFromObject(const VulkanTypedHandle &object_struct) {
Jeff Bolzadbfa852019-10-04 13:53:30 -0500774 if (object_struct.node) {
775#ifdef _DEBUG
776 // assert that lookup would find the same object
777 VulkanTypedHandle other = object_struct;
778 other.node = nullptr;
779 assert(object_struct.node == GetStateStructPtrFromObject(other));
780#endif
781 return object_struct.node;
782 }
locke-lunargd556cc32019-09-17 01:21:23 -0600783 BASE_NODE *base_ptr = nullptr;
784 switch (object_struct.type) {
785 case kVulkanObjectTypeDescriptorSet: {
786 base_ptr = GetSetNode(object_struct.Cast<VkDescriptorSet>());
787 break;
788 }
789 case kVulkanObjectTypeSampler: {
790 base_ptr = GetSamplerState(object_struct.Cast<VkSampler>());
791 break;
792 }
793 case kVulkanObjectTypeQueryPool: {
794 base_ptr = GetQueryPoolState(object_struct.Cast<VkQueryPool>());
795 break;
796 }
797 case kVulkanObjectTypePipeline: {
798 base_ptr = GetPipelineState(object_struct.Cast<VkPipeline>());
799 break;
800 }
801 case kVulkanObjectTypeBuffer: {
802 base_ptr = GetBufferState(object_struct.Cast<VkBuffer>());
803 break;
804 }
805 case kVulkanObjectTypeBufferView: {
806 base_ptr = GetBufferViewState(object_struct.Cast<VkBufferView>());
807 break;
808 }
809 case kVulkanObjectTypeImage: {
810 base_ptr = GetImageState(object_struct.Cast<VkImage>());
811 break;
812 }
813 case kVulkanObjectTypeImageView: {
814 base_ptr = GetImageViewState(object_struct.Cast<VkImageView>());
815 break;
816 }
817 case kVulkanObjectTypeEvent: {
818 base_ptr = GetEventState(object_struct.Cast<VkEvent>());
819 break;
820 }
821 case kVulkanObjectTypeDescriptorPool: {
822 base_ptr = GetDescriptorPoolState(object_struct.Cast<VkDescriptorPool>());
823 break;
824 }
825 case kVulkanObjectTypeCommandPool: {
826 base_ptr = GetCommandPoolState(object_struct.Cast<VkCommandPool>());
827 break;
828 }
829 case kVulkanObjectTypeFramebuffer: {
830 base_ptr = GetFramebufferState(object_struct.Cast<VkFramebuffer>());
831 break;
832 }
833 case kVulkanObjectTypeRenderPass: {
834 base_ptr = GetRenderPassState(object_struct.Cast<VkRenderPass>());
835 break;
836 }
837 case kVulkanObjectTypeDeviceMemory: {
838 base_ptr = GetDevMemState(object_struct.Cast<VkDeviceMemory>());
839 break;
840 }
841 case kVulkanObjectTypeAccelerationStructureNV: {
842 base_ptr = GetAccelerationStructureState(object_struct.Cast<VkAccelerationStructureNV>());
843 break;
844 }
Jeff Bolzadbfa852019-10-04 13:53:30 -0500845 case kVulkanObjectTypeUnknown:
846 // This can happen if an element of the object_bindings vector has been
847 // zeroed out, after an object is destroyed.
848 break;
locke-lunargd556cc32019-09-17 01:21:23 -0600849 default:
850 // TODO : Any other objects to be handled here?
851 assert(0);
852 break;
853 }
854 return base_ptr;
855}
856
857// Tie the VulkanTypedHandle to the cmd buffer which includes:
858// Add object_binding to cmd buffer
859// Add cb_binding to object
Jeff Bolzadbfa852019-10-04 13:53:30 -0500860bool ValidationStateTracker::AddCommandBufferBinding(small_unordered_map<CMD_BUFFER_STATE *, int, 8> &cb_bindings,
locke-lunargd556cc32019-09-17 01:21:23 -0600861 const VulkanTypedHandle &obj, CMD_BUFFER_STATE *cb_node) {
862 if (disabled.command_buffer_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -0500863 return false;
locke-lunargd556cc32019-09-17 01:21:23 -0600864 }
Jeff Bolzadbfa852019-10-04 13:53:30 -0500865 // Insert the cb_binding with a default 'index' of -1. Then push the obj into the object_bindings
866 // vector, and update cb_bindings[cb_node] with the index of that element of the vector.
867 auto inserted = cb_bindings.insert({cb_node, -1});
868 if (inserted.second) {
869 cb_node->object_bindings.push_back(obj);
870 inserted.first->second = (int)cb_node->object_bindings.size() - 1;
871 return true;
872 }
873 return false;
locke-lunargd556cc32019-09-17 01:21:23 -0600874}
875
876// For a given object, if cb_node is in that objects cb_bindings, remove cb_node
877void ValidationStateTracker::RemoveCommandBufferBinding(VulkanTypedHandle const &object, CMD_BUFFER_STATE *cb_node) {
878 BASE_NODE *base_obj = GetStateStructPtrFromObject(object);
879 if (base_obj) base_obj->cb_bindings.erase(cb_node);
880}
881
882// Reset the command buffer state
883// Maintain the createInfo and set state to CB_NEW, but clear all other state
884void ValidationStateTracker::ResetCommandBufferState(const VkCommandBuffer cb) {
885 CMD_BUFFER_STATE *pCB = GetCBState(cb);
886 if (pCB) {
887 pCB->in_use.store(0);
888 // Reset CB state (note that createInfo is not cleared)
889 pCB->commandBuffer = cb;
890 memset(&pCB->beginInfo, 0, sizeof(VkCommandBufferBeginInfo));
891 memset(&pCB->inheritanceInfo, 0, sizeof(VkCommandBufferInheritanceInfo));
892 pCB->hasDrawCmd = false;
893 pCB->hasTraceRaysCmd = false;
894 pCB->hasBuildAccelerationStructureCmd = false;
895 pCB->hasDispatchCmd = false;
896 pCB->state = CB_NEW;
Lionel Landwerlinc7420912019-05-23 00:33:42 +0100897 pCB->commandCount = 0;
locke-lunargd556cc32019-09-17 01:21:23 -0600898 pCB->submitCount = 0;
899 pCB->image_layout_change_count = 1; // Start at 1. 0 is insert value for validation cache versions, s.t. new == dirty
900 pCB->status = 0;
901 pCB->static_status = 0;
902 pCB->viewportMask = 0;
903 pCB->scissorMask = 0;
904
905 for (auto &item : pCB->lastBound) {
906 item.second.reset();
907 }
908
Tony-LunarG61e7c0c2020-03-03 16:09:11 -0700909 pCB->activeRenderPassBeginInfo = safe_VkRenderPassBeginInfo();
locke-lunargd556cc32019-09-17 01:21:23 -0600910 pCB->activeRenderPass = nullptr;
911 pCB->activeSubpassContents = VK_SUBPASS_CONTENTS_INLINE;
912 pCB->activeSubpass = 0;
913 pCB->broken_bindings.clear();
914 pCB->waitedEvents.clear();
915 pCB->events.clear();
916 pCB->writeEventsBeforeWait.clear();
locke-lunargd556cc32019-09-17 01:21:23 -0600917 pCB->activeQueries.clear();
918 pCB->startedQueries.clear();
919 pCB->image_layout_map.clear();
locke-lunargd556cc32019-09-17 01:21:23 -0600920 pCB->current_vertex_buffer_binding_info.vertex_buffer_bindings.clear();
921 pCB->vertex_buffer_used = false;
922 pCB->primaryCommandBuffer = VK_NULL_HANDLE;
923 // If secondary, invalidate any primary command buffer that may call us.
924 if (pCB->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
Jeff Bolzadbfa852019-10-04 13:53:30 -0500925 InvalidateLinkedCommandBuffers(pCB->linkedCommandBuffers, VulkanTypedHandle(cb, kVulkanObjectTypeCommandBuffer));
locke-lunargd556cc32019-09-17 01:21:23 -0600926 }
927
928 // Remove reverse command buffer links.
929 for (auto pSubCB : pCB->linkedCommandBuffers) {
930 pSubCB->linkedCommandBuffers.erase(pCB);
931 }
932 pCB->linkedCommandBuffers.clear();
locke-lunargd556cc32019-09-17 01:21:23 -0600933 pCB->queue_submit_functions.clear();
934 pCB->cmd_execute_commands_functions.clear();
935 pCB->eventUpdates.clear();
936 pCB->queryUpdates.clear();
937
938 // Remove object bindings
939 for (const auto &obj : pCB->object_bindings) {
940 RemoveCommandBufferBinding(obj, pCB);
941 }
942 pCB->object_bindings.clear();
943 // Remove this cmdBuffer's reference from each FrameBuffer's CB ref list
944 for (auto framebuffer : pCB->framebuffers) {
945 auto fb_state = GetFramebufferState(framebuffer);
946 if (fb_state) fb_state->cb_bindings.erase(pCB);
947 }
948 pCB->framebuffers.clear();
949 pCB->activeFramebuffer = VK_NULL_HANDLE;
950 memset(&pCB->index_buffer_binding, 0, sizeof(pCB->index_buffer_binding));
951
952 pCB->qfo_transfer_image_barriers.Reset();
953 pCB->qfo_transfer_buffer_barriers.Reset();
954
955 // Clean up the label data
956 ResetCmdDebugUtilsLabel(report_data, pCB->commandBuffer);
957 pCB->debug_label.Reset();
locke-gb3ce08f2019-09-30 12:30:56 -0600958 pCB->validate_descriptorsets_in_queuesubmit.clear();
Attilio Provenzano02859b22020-02-27 14:17:28 +0000959
960 // Best practices info
961 pCB->small_indexed_draw_call_count = 0;
locke-lunargd556cc32019-09-17 01:21:23 -0600962 }
963 if (command_buffer_reset_callback) {
964 (*command_buffer_reset_callback)(cb);
965 }
966}
967
968void ValidationStateTracker::PostCallRecordCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo,
969 const VkAllocationCallbacks *pAllocator, VkDevice *pDevice,
970 VkResult result) {
971 if (VK_SUCCESS != result) return;
972
973 const VkPhysicalDeviceFeatures *enabled_features_found = pCreateInfo->pEnabledFeatures;
974 if (nullptr == enabled_features_found) {
975 const auto *features2 = lvl_find_in_chain<VkPhysicalDeviceFeatures2KHR>(pCreateInfo->pNext);
976 if (features2) {
977 enabled_features_found = &(features2->features);
978 }
979 }
980
981 ValidationObject *device_object = GetLayerDataPtr(get_dispatch_key(*pDevice), layer_data_map);
982 ValidationObject *validation_data = GetValidationObject(device_object->object_dispatch, this->container_type);
983 ValidationStateTracker *state_tracker = static_cast<ValidationStateTracker *>(validation_data);
984
985 if (nullptr == enabled_features_found) {
986 state_tracker->enabled_features.core = {};
987 } else {
988 state_tracker->enabled_features.core = *enabled_features_found;
989 }
990
991 // Make sure that queue_family_properties are obtained for this device's physical_device, even if the app has not
992 // previously set them through an explicit API call.
993 uint32_t count;
994 auto pd_state = GetPhysicalDeviceState(gpu);
995 DispatchGetPhysicalDeviceQueueFamilyProperties(gpu, &count, nullptr);
996 pd_state->queue_family_properties.resize(std::max(static_cast<uint32_t>(pd_state->queue_family_properties.size()), count));
997 DispatchGetPhysicalDeviceQueueFamilyProperties(gpu, &count, &pd_state->queue_family_properties[0]);
998 // Save local link to this device's physical device state
999 state_tracker->physical_device_state = pd_state;
1000
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001001 const auto *vulkan_12_features = lvl_find_in_chain<VkPhysicalDeviceVulkan12Features>(pCreateInfo->pNext);
1002 if (vulkan_12_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001003 state_tracker->enabled_features.core12 = *vulkan_12_features;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001004 } else {
1005 // These structs are only allowed in pNext chain if there is no kPhysicalDeviceVulkan12Features
1006
1007 const auto *eight_bit_storage_features = lvl_find_in_chain<VkPhysicalDevice8BitStorageFeatures>(pCreateInfo->pNext);
1008 if (eight_bit_storage_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001009 state_tracker->enabled_features.core12.storageBuffer8BitAccess = eight_bit_storage_features->storageBuffer8BitAccess;
1010 state_tracker->enabled_features.core12.uniformAndStorageBuffer8BitAccess =
1011 eight_bit_storage_features->uniformAndStorageBuffer8BitAccess;
1012 state_tracker->enabled_features.core12.storagePushConstant8 = eight_bit_storage_features->storagePushConstant8;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001013 }
1014
1015 const auto *float16_int8_features = lvl_find_in_chain<VkPhysicalDeviceShaderFloat16Int8Features>(pCreateInfo->pNext);
1016 if (float16_int8_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001017 state_tracker->enabled_features.core12.shaderFloat16 = float16_int8_features->shaderFloat16;
1018 state_tracker->enabled_features.core12.shaderInt8 = float16_int8_features->shaderInt8;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001019 }
1020
1021 const auto *descriptor_indexing_features =
1022 lvl_find_in_chain<VkPhysicalDeviceDescriptorIndexingFeatures>(pCreateInfo->pNext);
1023 if (descriptor_indexing_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001024 state_tracker->enabled_features.core12.shaderInputAttachmentArrayDynamicIndexing =
1025 descriptor_indexing_features->shaderInputAttachmentArrayDynamicIndexing;
1026 state_tracker->enabled_features.core12.shaderUniformTexelBufferArrayDynamicIndexing =
1027 descriptor_indexing_features->shaderUniformTexelBufferArrayDynamicIndexing;
1028 state_tracker->enabled_features.core12.shaderStorageTexelBufferArrayDynamicIndexing =
1029 descriptor_indexing_features->shaderStorageTexelBufferArrayDynamicIndexing;
1030 state_tracker->enabled_features.core12.shaderUniformBufferArrayNonUniformIndexing =
1031 descriptor_indexing_features->shaderUniformBufferArrayNonUniformIndexing;
1032 state_tracker->enabled_features.core12.shaderSampledImageArrayNonUniformIndexing =
1033 descriptor_indexing_features->shaderSampledImageArrayNonUniformIndexing;
1034 state_tracker->enabled_features.core12.shaderStorageBufferArrayNonUniformIndexing =
1035 descriptor_indexing_features->shaderStorageBufferArrayNonUniformIndexing;
1036 state_tracker->enabled_features.core12.shaderStorageImageArrayNonUniformIndexing =
1037 descriptor_indexing_features->shaderStorageImageArrayNonUniformIndexing;
1038 state_tracker->enabled_features.core12.shaderInputAttachmentArrayNonUniformIndexing =
1039 descriptor_indexing_features->shaderInputAttachmentArrayNonUniformIndexing;
1040 state_tracker->enabled_features.core12.shaderUniformTexelBufferArrayNonUniformIndexing =
1041 descriptor_indexing_features->shaderUniformTexelBufferArrayNonUniformIndexing;
1042 state_tracker->enabled_features.core12.shaderStorageTexelBufferArrayNonUniformIndexing =
1043 descriptor_indexing_features->shaderStorageTexelBufferArrayNonUniformIndexing;
1044 state_tracker->enabled_features.core12.descriptorBindingUniformBufferUpdateAfterBind =
1045 descriptor_indexing_features->descriptorBindingUniformBufferUpdateAfterBind;
1046 state_tracker->enabled_features.core12.descriptorBindingSampledImageUpdateAfterBind =
1047 descriptor_indexing_features->descriptorBindingSampledImageUpdateAfterBind;
1048 state_tracker->enabled_features.core12.descriptorBindingStorageImageUpdateAfterBind =
1049 descriptor_indexing_features->descriptorBindingStorageImageUpdateAfterBind;
1050 state_tracker->enabled_features.core12.descriptorBindingStorageBufferUpdateAfterBind =
1051 descriptor_indexing_features->descriptorBindingStorageBufferUpdateAfterBind;
1052 state_tracker->enabled_features.core12.descriptorBindingUniformTexelBufferUpdateAfterBind =
1053 descriptor_indexing_features->descriptorBindingUniformTexelBufferUpdateAfterBind;
1054 state_tracker->enabled_features.core12.descriptorBindingStorageTexelBufferUpdateAfterBind =
1055 descriptor_indexing_features->descriptorBindingStorageTexelBufferUpdateAfterBind;
1056 state_tracker->enabled_features.core12.descriptorBindingUpdateUnusedWhilePending =
1057 descriptor_indexing_features->descriptorBindingUpdateUnusedWhilePending;
1058 state_tracker->enabled_features.core12.descriptorBindingPartiallyBound =
1059 descriptor_indexing_features->descriptorBindingPartiallyBound;
1060 state_tracker->enabled_features.core12.descriptorBindingVariableDescriptorCount =
1061 descriptor_indexing_features->descriptorBindingVariableDescriptorCount;
1062 state_tracker->enabled_features.core12.runtimeDescriptorArray = descriptor_indexing_features->runtimeDescriptorArray;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001063 }
1064
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001065 const auto *scalar_block_layout_features = lvl_find_in_chain<VkPhysicalDeviceScalarBlockLayoutFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001066 if (scalar_block_layout_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001067 state_tracker->enabled_features.core12.scalarBlockLayout = scalar_block_layout_features->scalarBlockLayout;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001068 }
1069
1070 const auto *imageless_framebuffer_features =
1071 lvl_find_in_chain<VkPhysicalDeviceImagelessFramebufferFeatures>(pCreateInfo->pNext);
1072 if (imageless_framebuffer_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001073 state_tracker->enabled_features.core12.imagelessFramebuffer = imageless_framebuffer_features->imagelessFramebuffer;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001074 }
1075
1076 const auto *uniform_buffer_standard_layout_features =
1077 lvl_find_in_chain<VkPhysicalDeviceUniformBufferStandardLayoutFeatures>(pCreateInfo->pNext);
1078 if (uniform_buffer_standard_layout_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001079 state_tracker->enabled_features.core12.uniformBufferStandardLayout =
1080 uniform_buffer_standard_layout_features->uniformBufferStandardLayout;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001081 }
1082
1083 const auto *subgroup_extended_types_features =
1084 lvl_find_in_chain<VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures>(pCreateInfo->pNext);
1085 if (subgroup_extended_types_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001086 state_tracker->enabled_features.core12.shaderSubgroupExtendedTypes =
1087 subgroup_extended_types_features->shaderSubgroupExtendedTypes;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001088 }
1089
1090 const auto *separate_depth_stencil_layouts_features =
1091 lvl_find_in_chain<VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures>(pCreateInfo->pNext);
1092 if (separate_depth_stencil_layouts_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001093 state_tracker->enabled_features.core12.separateDepthStencilLayouts =
1094 separate_depth_stencil_layouts_features->separateDepthStencilLayouts;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001095 }
1096
1097 const auto *host_query_reset_features = lvl_find_in_chain<VkPhysicalDeviceHostQueryResetFeatures>(pCreateInfo->pNext);
1098 if (host_query_reset_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001099 state_tracker->enabled_features.core12.hostQueryReset = host_query_reset_features->hostQueryReset;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001100 }
1101
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001102 const auto *timeline_semaphore_features = lvl_find_in_chain<VkPhysicalDeviceTimelineSemaphoreFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001103 if (timeline_semaphore_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001104 state_tracker->enabled_features.core12.timelineSemaphore = timeline_semaphore_features->timelineSemaphore;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001105 }
1106
1107 const auto *buffer_device_address = lvl_find_in_chain<VkPhysicalDeviceBufferDeviceAddressFeatures>(pCreateInfo->pNext);
1108 if (buffer_device_address) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001109 state_tracker->enabled_features.core12.bufferDeviceAddress = buffer_device_address->bufferDeviceAddress;
1110 state_tracker->enabled_features.core12.bufferDeviceAddressCaptureReplay =
1111 buffer_device_address->bufferDeviceAddressCaptureReplay;
1112 state_tracker->enabled_features.core12.bufferDeviceAddressMultiDevice =
1113 buffer_device_address->bufferDeviceAddressMultiDevice;
1114 }
1115 }
1116
1117 const auto *vulkan_11_features = lvl_find_in_chain<VkPhysicalDeviceVulkan11Features>(pCreateInfo->pNext);
1118 if (vulkan_11_features) {
1119 state_tracker->enabled_features.core11 = *vulkan_11_features;
1120 } else {
1121 // These structs are only allowed in pNext chain if there is no kPhysicalDeviceVulkan11Features
1122
1123 const auto *sixteen_bit_storage_features = lvl_find_in_chain<VkPhysicalDevice16BitStorageFeatures>(pCreateInfo->pNext);
1124 if (sixteen_bit_storage_features) {
1125 state_tracker->enabled_features.core11.storageBuffer16BitAccess =
1126 sixteen_bit_storage_features->storageBuffer16BitAccess;
1127 state_tracker->enabled_features.core11.uniformAndStorageBuffer16BitAccess =
1128 sixteen_bit_storage_features->uniformAndStorageBuffer16BitAccess;
1129 state_tracker->enabled_features.core11.storagePushConstant16 = sixteen_bit_storage_features->storagePushConstant16;
1130 state_tracker->enabled_features.core11.storageInputOutput16 = sixteen_bit_storage_features->storageInputOutput16;
1131 }
1132
1133 const auto *multiview_features = lvl_find_in_chain<VkPhysicalDeviceMultiviewFeatures>(pCreateInfo->pNext);
1134 if (multiview_features) {
1135 state_tracker->enabled_features.core11.multiview = multiview_features->multiview;
1136 state_tracker->enabled_features.core11.multiviewGeometryShader = multiview_features->multiviewGeometryShader;
1137 state_tracker->enabled_features.core11.multiviewTessellationShader = multiview_features->multiviewTessellationShader;
1138 }
1139
1140 const auto *variable_pointers_features = lvl_find_in_chain<VkPhysicalDeviceVariablePointersFeatures>(pCreateInfo->pNext);
1141 if (variable_pointers_features) {
1142 state_tracker->enabled_features.core11.variablePointersStorageBuffer =
1143 variable_pointers_features->variablePointersStorageBuffer;
1144 state_tracker->enabled_features.core11.variablePointers = variable_pointers_features->variablePointers;
1145 }
1146
1147 const auto *protected_memory_features = lvl_find_in_chain<VkPhysicalDeviceProtectedMemoryFeatures>(pCreateInfo->pNext);
1148 if (protected_memory_features) {
1149 state_tracker->enabled_features.core11.protectedMemory = protected_memory_features->protectedMemory;
1150 }
1151
1152 const auto *ycbcr_conversion_features =
1153 lvl_find_in_chain<VkPhysicalDeviceSamplerYcbcrConversionFeatures>(pCreateInfo->pNext);
1154 if (ycbcr_conversion_features) {
1155 state_tracker->enabled_features.core11.samplerYcbcrConversion = ycbcr_conversion_features->samplerYcbcrConversion;
1156 }
1157
1158 const auto *shader_draw_parameters_features =
1159 lvl_find_in_chain<VkPhysicalDeviceShaderDrawParametersFeatures>(pCreateInfo->pNext);
1160 if (shader_draw_parameters_features) {
1161 state_tracker->enabled_features.core11.shaderDrawParameters = shader_draw_parameters_features->shaderDrawParameters;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001162 }
1163 }
1164
locke-lunargd556cc32019-09-17 01:21:23 -06001165 const auto *device_group_ci = lvl_find_in_chain<VkDeviceGroupDeviceCreateInfo>(pCreateInfo->pNext);
1166 state_tracker->physical_device_count =
1167 device_group_ci && device_group_ci->physicalDeviceCount > 0 ? device_group_ci->physicalDeviceCount : 1;
1168
locke-lunargd556cc32019-09-17 01:21:23 -06001169 const auto *exclusive_scissor_features = lvl_find_in_chain<VkPhysicalDeviceExclusiveScissorFeaturesNV>(pCreateInfo->pNext);
1170 if (exclusive_scissor_features) {
1171 state_tracker->enabled_features.exclusive_scissor = *exclusive_scissor_features;
1172 }
1173
1174 const auto *shading_rate_image_features = lvl_find_in_chain<VkPhysicalDeviceShadingRateImageFeaturesNV>(pCreateInfo->pNext);
1175 if (shading_rate_image_features) {
1176 state_tracker->enabled_features.shading_rate_image = *shading_rate_image_features;
1177 }
1178
1179 const auto *mesh_shader_features = lvl_find_in_chain<VkPhysicalDeviceMeshShaderFeaturesNV>(pCreateInfo->pNext);
1180 if (mesh_shader_features) {
1181 state_tracker->enabled_features.mesh_shader = *mesh_shader_features;
1182 }
1183
1184 const auto *inline_uniform_block_features =
1185 lvl_find_in_chain<VkPhysicalDeviceInlineUniformBlockFeaturesEXT>(pCreateInfo->pNext);
1186 if (inline_uniform_block_features) {
1187 state_tracker->enabled_features.inline_uniform_block = *inline_uniform_block_features;
1188 }
1189
1190 const auto *transform_feedback_features = lvl_find_in_chain<VkPhysicalDeviceTransformFeedbackFeaturesEXT>(pCreateInfo->pNext);
1191 if (transform_feedback_features) {
1192 state_tracker->enabled_features.transform_feedback_features = *transform_feedback_features;
1193 }
1194
locke-lunargd556cc32019-09-17 01:21:23 -06001195 const auto *vtx_attrib_div_features = lvl_find_in_chain<VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT>(pCreateInfo->pNext);
1196 if (vtx_attrib_div_features) {
1197 state_tracker->enabled_features.vtx_attrib_divisor_features = *vtx_attrib_div_features;
1198 }
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001199
Jeff Bolz4563f2a2019-12-10 13:30:30 -06001200 const auto *buffer_device_address_ext = lvl_find_in_chain<VkPhysicalDeviceBufferDeviceAddressFeaturesEXT>(pCreateInfo->pNext);
1201 if (buffer_device_address_ext) {
Jeff Bolz33fc6722020-03-31 12:58:16 -05001202 state_tracker->enabled_features.buffer_device_address_ext = *buffer_device_address_ext;
locke-lunargd556cc32019-09-17 01:21:23 -06001203 }
1204
1205 const auto *cooperative_matrix_features = lvl_find_in_chain<VkPhysicalDeviceCooperativeMatrixFeaturesNV>(pCreateInfo->pNext);
1206 if (cooperative_matrix_features) {
1207 state_tracker->enabled_features.cooperative_matrix_features = *cooperative_matrix_features;
1208 }
1209
locke-lunargd556cc32019-09-17 01:21:23 -06001210 const auto *compute_shader_derivatives_features =
1211 lvl_find_in_chain<VkPhysicalDeviceComputeShaderDerivativesFeaturesNV>(pCreateInfo->pNext);
1212 if (compute_shader_derivatives_features) {
1213 state_tracker->enabled_features.compute_shader_derivatives_features = *compute_shader_derivatives_features;
1214 }
1215
1216 const auto *fragment_shader_barycentric_features =
1217 lvl_find_in_chain<VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV>(pCreateInfo->pNext);
1218 if (fragment_shader_barycentric_features) {
1219 state_tracker->enabled_features.fragment_shader_barycentric_features = *fragment_shader_barycentric_features;
1220 }
1221
1222 const auto *shader_image_footprint_features =
1223 lvl_find_in_chain<VkPhysicalDeviceShaderImageFootprintFeaturesNV>(pCreateInfo->pNext);
1224 if (shader_image_footprint_features) {
1225 state_tracker->enabled_features.shader_image_footprint_features = *shader_image_footprint_features;
1226 }
1227
1228 const auto *fragment_shader_interlock_features =
1229 lvl_find_in_chain<VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT>(pCreateInfo->pNext);
1230 if (fragment_shader_interlock_features) {
1231 state_tracker->enabled_features.fragment_shader_interlock_features = *fragment_shader_interlock_features;
1232 }
1233
1234 const auto *demote_to_helper_invocation_features =
1235 lvl_find_in_chain<VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT>(pCreateInfo->pNext);
1236 if (demote_to_helper_invocation_features) {
1237 state_tracker->enabled_features.demote_to_helper_invocation_features = *demote_to_helper_invocation_features;
1238 }
1239
1240 const auto *texel_buffer_alignment_features =
1241 lvl_find_in_chain<VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT>(pCreateInfo->pNext);
1242 if (texel_buffer_alignment_features) {
1243 state_tracker->enabled_features.texel_buffer_alignment_features = *texel_buffer_alignment_features;
1244 }
1245
locke-lunargd556cc32019-09-17 01:21:23 -06001246 const auto *pipeline_exe_props_features =
1247 lvl_find_in_chain<VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR>(pCreateInfo->pNext);
1248 if (pipeline_exe_props_features) {
1249 state_tracker->enabled_features.pipeline_exe_props_features = *pipeline_exe_props_features;
1250 }
1251
Jeff Bolz82f854d2019-09-17 14:56:47 -05001252 const auto *dedicated_allocation_image_aliasing_features =
1253 lvl_find_in_chain<VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV>(pCreateInfo->pNext);
1254 if (dedicated_allocation_image_aliasing_features) {
1255 state_tracker->enabled_features.dedicated_allocation_image_aliasing_features =
1256 *dedicated_allocation_image_aliasing_features;
1257 }
1258
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001259 const auto *performance_query_features = lvl_find_in_chain<VkPhysicalDevicePerformanceQueryFeaturesKHR>(pCreateInfo->pNext);
1260 if (performance_query_features) {
1261 state_tracker->enabled_features.performance_query_features = *performance_query_features;
1262 }
1263
Tobias Hector782bcde2019-11-28 16:19:42 +00001264 const auto *device_coherent_memory_features = lvl_find_in_chain<VkPhysicalDeviceCoherentMemoryFeaturesAMD>(pCreateInfo->pNext);
1265 if (device_coherent_memory_features) {
1266 state_tracker->enabled_features.device_coherent_memory_features = *device_coherent_memory_features;
1267 }
1268
sfricke-samsungcead0802020-01-30 22:20:10 -08001269 const auto *ycbcr_image_array_features = lvl_find_in_chain<VkPhysicalDeviceYcbcrImageArraysFeaturesEXT>(pCreateInfo->pNext);
1270 if (ycbcr_image_array_features) {
1271 state_tracker->enabled_features.ycbcr_image_array_features = *ycbcr_image_array_features;
1272 }
1273
Jeff Bolz443c2ca2020-03-19 12:11:51 -05001274 const auto *ray_tracing_features = lvl_find_in_chain<VkPhysicalDeviceRayTracingFeaturesKHR>(pCreateInfo->pNext);
1275 if (ray_tracing_features) {
1276 state_tracker->enabled_features.ray_tracing_features = *ray_tracing_features;
1277 }
1278
locke-lunargd556cc32019-09-17 01:21:23 -06001279 // Store physical device properties and physical device mem limits into CoreChecks structs
1280 DispatchGetPhysicalDeviceMemoryProperties(gpu, &state_tracker->phys_dev_mem_props);
1281 DispatchGetPhysicalDeviceProperties(gpu, &state_tracker->phys_dev_props);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001282 GetPhysicalDeviceExtProperties(gpu, state_tracker->device_extensions.vk_feature_version_1_2,
1283 &state_tracker->phys_dev_props_core11);
1284 GetPhysicalDeviceExtProperties(gpu, state_tracker->device_extensions.vk_feature_version_1_2,
1285 &state_tracker->phys_dev_props_core12);
locke-lunargd556cc32019-09-17 01:21:23 -06001286
1287 const auto &dev_ext = state_tracker->device_extensions;
1288 auto *phys_dev_props = &state_tracker->phys_dev_ext_props;
1289
1290 if (dev_ext.vk_khr_push_descriptor) {
1291 // Get the needed push_descriptor limits
1292 VkPhysicalDevicePushDescriptorPropertiesKHR push_descriptor_prop;
1293 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_push_descriptor, &push_descriptor_prop);
1294 phys_dev_props->max_push_descriptors = push_descriptor_prop.maxPushDescriptors;
1295 }
1296
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001297 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_ext_descriptor_indexing) {
1298 VkPhysicalDeviceDescriptorIndexingPropertiesEXT descriptor_indexing_prop;
1299 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_descriptor_indexing, &descriptor_indexing_prop);
1300 state_tracker->phys_dev_props_core12.maxUpdateAfterBindDescriptorsInAllPools =
1301 descriptor_indexing_prop.maxUpdateAfterBindDescriptorsInAllPools;
1302 state_tracker->phys_dev_props_core12.shaderUniformBufferArrayNonUniformIndexingNative =
1303 descriptor_indexing_prop.shaderUniformBufferArrayNonUniformIndexingNative;
1304 state_tracker->phys_dev_props_core12.shaderSampledImageArrayNonUniformIndexingNative =
1305 descriptor_indexing_prop.shaderSampledImageArrayNonUniformIndexingNative;
1306 state_tracker->phys_dev_props_core12.shaderStorageBufferArrayNonUniformIndexingNative =
1307 descriptor_indexing_prop.shaderStorageBufferArrayNonUniformIndexingNative;
1308 state_tracker->phys_dev_props_core12.shaderStorageImageArrayNonUniformIndexingNative =
1309 descriptor_indexing_prop.shaderStorageImageArrayNonUniformIndexingNative;
1310 state_tracker->phys_dev_props_core12.shaderInputAttachmentArrayNonUniformIndexingNative =
1311 descriptor_indexing_prop.shaderInputAttachmentArrayNonUniformIndexingNative;
1312 state_tracker->phys_dev_props_core12.robustBufferAccessUpdateAfterBind =
1313 descriptor_indexing_prop.robustBufferAccessUpdateAfterBind;
1314 state_tracker->phys_dev_props_core12.quadDivergentImplicitLod = descriptor_indexing_prop.quadDivergentImplicitLod;
1315 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindSamplers =
1316 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindSamplers;
1317 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindUniformBuffers =
1318 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindUniformBuffers;
1319 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindStorageBuffers =
1320 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindStorageBuffers;
1321 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindSampledImages =
1322 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindSampledImages;
1323 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindStorageImages =
1324 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindStorageImages;
1325 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindInputAttachments =
1326 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindInputAttachments;
1327 state_tracker->phys_dev_props_core12.maxPerStageUpdateAfterBindResources =
1328 descriptor_indexing_prop.maxPerStageUpdateAfterBindResources;
1329 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindSamplers =
1330 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindSamplers;
1331 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindUniformBuffers =
1332 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindUniformBuffers;
1333 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic =
1334 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic;
1335 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindStorageBuffers =
1336 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindStorageBuffers;
1337 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic =
1338 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic;
1339 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindSampledImages =
1340 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindSampledImages;
1341 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindStorageImages =
1342 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindStorageImages;
1343 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindInputAttachments =
1344 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindInputAttachments;
1345 }
1346
locke-lunargd556cc32019-09-17 01:21:23 -06001347 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_shading_rate_image, &phys_dev_props->shading_rate_image_props);
1348 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_mesh_shader, &phys_dev_props->mesh_shader_props);
1349 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_inline_uniform_block, &phys_dev_props->inline_uniform_block_props);
1350 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_vertex_attribute_divisor, &phys_dev_props->vtx_attrib_divisor_props);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001351
1352 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_khr_depth_stencil_resolve) {
1353 VkPhysicalDeviceDepthStencilResolvePropertiesKHR depth_stencil_resolve_props;
1354 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_depth_stencil_resolve, &depth_stencil_resolve_props);
1355 state_tracker->phys_dev_props_core12.supportedDepthResolveModes = depth_stencil_resolve_props.supportedDepthResolveModes;
1356 state_tracker->phys_dev_props_core12.supportedStencilResolveModes =
1357 depth_stencil_resolve_props.supportedStencilResolveModes;
1358 state_tracker->phys_dev_props_core12.independentResolveNone = depth_stencil_resolve_props.independentResolveNone;
1359 state_tracker->phys_dev_props_core12.independentResolve = depth_stencil_resolve_props.independentResolve;
1360 }
1361
locke-lunargd556cc32019-09-17 01:21:23 -06001362 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_transform_feedback, &phys_dev_props->transform_feedback_props);
Jeff Bolz443c2ca2020-03-19 12:11:51 -05001363 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_ray_tracing, &phys_dev_props->ray_tracing_propsNV);
1364 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_ray_tracing, &phys_dev_props->ray_tracing_propsKHR);
locke-lunargd556cc32019-09-17 01:21:23 -06001365 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_texel_buffer_alignment, &phys_dev_props->texel_buffer_alignment_props);
1366 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_fragment_density_map, &phys_dev_props->fragment_density_map_props);
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001367 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_performance_query, &phys_dev_props->performance_query_props);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001368
1369 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_khr_timeline_semaphore) {
1370 VkPhysicalDeviceTimelineSemaphorePropertiesKHR timeline_semaphore_props;
1371 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_timeline_semaphore, &timeline_semaphore_props);
1372 state_tracker->phys_dev_props_core12.maxTimelineSemaphoreValueDifference =
1373 timeline_semaphore_props.maxTimelineSemaphoreValueDifference;
1374 }
1375
1376 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_khr_shader_float_controls) {
1377 VkPhysicalDeviceFloatControlsPropertiesKHR float_controls_props;
1378 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_shader_float_controls, &float_controls_props);
1379 state_tracker->phys_dev_props_core12.denormBehaviorIndependence = float_controls_props.denormBehaviorIndependence;
1380 state_tracker->phys_dev_props_core12.roundingModeIndependence = float_controls_props.roundingModeIndependence;
1381 state_tracker->phys_dev_props_core12.shaderSignedZeroInfNanPreserveFloat16 =
1382 float_controls_props.shaderSignedZeroInfNanPreserveFloat16;
1383 state_tracker->phys_dev_props_core12.shaderSignedZeroInfNanPreserveFloat32 =
1384 float_controls_props.shaderSignedZeroInfNanPreserveFloat32;
1385 state_tracker->phys_dev_props_core12.shaderSignedZeroInfNanPreserveFloat64 =
1386 float_controls_props.shaderSignedZeroInfNanPreserveFloat64;
1387 state_tracker->phys_dev_props_core12.shaderDenormPreserveFloat16 = float_controls_props.shaderDenormPreserveFloat16;
1388 state_tracker->phys_dev_props_core12.shaderDenormPreserveFloat32 = float_controls_props.shaderDenormPreserveFloat32;
1389 state_tracker->phys_dev_props_core12.shaderDenormPreserveFloat64 = float_controls_props.shaderDenormPreserveFloat64;
1390 state_tracker->phys_dev_props_core12.shaderDenormFlushToZeroFloat16 = float_controls_props.shaderDenormFlushToZeroFloat16;
1391 state_tracker->phys_dev_props_core12.shaderDenormFlushToZeroFloat32 = float_controls_props.shaderDenormFlushToZeroFloat32;
1392 state_tracker->phys_dev_props_core12.shaderDenormFlushToZeroFloat64 = float_controls_props.shaderDenormFlushToZeroFloat64;
1393 state_tracker->phys_dev_props_core12.shaderRoundingModeRTEFloat16 = float_controls_props.shaderRoundingModeRTEFloat16;
1394 state_tracker->phys_dev_props_core12.shaderRoundingModeRTEFloat32 = float_controls_props.shaderRoundingModeRTEFloat32;
1395 state_tracker->phys_dev_props_core12.shaderRoundingModeRTEFloat64 = float_controls_props.shaderRoundingModeRTEFloat64;
1396 state_tracker->phys_dev_props_core12.shaderRoundingModeRTZFloat16 = float_controls_props.shaderRoundingModeRTZFloat16;
1397 state_tracker->phys_dev_props_core12.shaderRoundingModeRTZFloat32 = float_controls_props.shaderRoundingModeRTZFloat32;
1398 state_tracker->phys_dev_props_core12.shaderRoundingModeRTZFloat64 = float_controls_props.shaderRoundingModeRTZFloat64;
1399 }
Mark Lobodzinskie4a2b7f2019-12-20 12:51:30 -07001400
locke-lunargd556cc32019-09-17 01:21:23 -06001401 if (state_tracker->device_extensions.vk_nv_cooperative_matrix) {
1402 // Get the needed cooperative_matrix properties
1403 auto cooperative_matrix_props = lvl_init_struct<VkPhysicalDeviceCooperativeMatrixPropertiesNV>();
1404 auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(&cooperative_matrix_props);
1405 instance_dispatch_table.GetPhysicalDeviceProperties2KHR(gpu, &prop2);
1406 state_tracker->phys_dev_ext_props.cooperative_matrix_props = cooperative_matrix_props;
1407
1408 uint32_t numCooperativeMatrixProperties = 0;
1409 instance_dispatch_table.GetPhysicalDeviceCooperativeMatrixPropertiesNV(gpu, &numCooperativeMatrixProperties, NULL);
1410 state_tracker->cooperative_matrix_properties.resize(numCooperativeMatrixProperties,
1411 lvl_init_struct<VkCooperativeMatrixPropertiesNV>());
1412
1413 instance_dispatch_table.GetPhysicalDeviceCooperativeMatrixPropertiesNV(gpu, &numCooperativeMatrixProperties,
1414 state_tracker->cooperative_matrix_properties.data());
1415 }
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001416 if (!state_tracker->device_extensions.vk_feature_version_1_2 && state_tracker->api_version >= VK_API_VERSION_1_1) {
locke-lunargd556cc32019-09-17 01:21:23 -06001417 // Get the needed subgroup limits
1418 auto subgroup_prop = lvl_init_struct<VkPhysicalDeviceSubgroupProperties>();
1419 auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(&subgroup_prop);
1420 instance_dispatch_table.GetPhysicalDeviceProperties2(gpu, &prop2);
1421
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001422 state_tracker->phys_dev_props_core11.subgroupSize = subgroup_prop.subgroupSize;
1423 state_tracker->phys_dev_props_core11.subgroupSupportedStages = subgroup_prop.supportedStages;
1424 state_tracker->phys_dev_props_core11.subgroupSupportedOperations = subgroup_prop.supportedOperations;
1425 state_tracker->phys_dev_props_core11.subgroupQuadOperationsInAllStages = subgroup_prop.quadOperationsInAllStages;
locke-lunargd556cc32019-09-17 01:21:23 -06001426 }
1427
1428 // Store queue family data
1429 if (pCreateInfo->pQueueCreateInfos != nullptr) {
1430 for (uint32_t i = 0; i < pCreateInfo->queueCreateInfoCount; ++i) {
1431 state_tracker->queue_family_index_map.insert(
1432 std::make_pair(pCreateInfo->pQueueCreateInfos[i].queueFamilyIndex, pCreateInfo->pQueueCreateInfos[i].queueCount));
1433 }
1434 }
1435}
1436
1437void ValidationStateTracker::PreCallRecordDestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) {
1438 if (!device) return;
1439
locke-lunargd556cc32019-09-17 01:21:23 -06001440 // Reset all command buffers before destroying them, to unlink object_bindings.
1441 for (auto &commandBuffer : commandBufferMap) {
1442 ResetCommandBufferState(commandBuffer.first);
1443 }
Jeff Bolzadbfa852019-10-04 13:53:30 -05001444 pipelineMap.clear();
1445 renderPassMap.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06001446 commandBufferMap.clear();
1447
1448 // This will also delete all sets in the pool & remove them from setMap
1449 DeleteDescriptorSetPools();
1450 // All sets should be removed
1451 assert(setMap.empty());
1452 descriptorSetLayoutMap.clear();
1453 imageViewMap.clear();
1454 imageMap.clear();
1455 bufferViewMap.clear();
1456 bufferMap.clear();
1457 // Queues persist until device is destroyed
1458 queueMap.clear();
1459}
1460
1461// Loop through bound objects and increment their in_use counts.
1462void ValidationStateTracker::IncrementBoundObjects(CMD_BUFFER_STATE const *cb_node) {
1463 for (auto obj : cb_node->object_bindings) {
1464 auto base_obj = GetStateStructPtrFromObject(obj);
1465 if (base_obj) {
1466 base_obj->in_use.fetch_add(1);
1467 }
1468 }
1469}
1470
1471// Track which resources are in-flight by atomically incrementing their "in_use" count
1472void ValidationStateTracker::IncrementResources(CMD_BUFFER_STATE *cb_node) {
1473 cb_node->submitCount++;
1474 cb_node->in_use.fetch_add(1);
1475
1476 // First Increment for all "generic" objects bound to cmd buffer, followed by special-case objects below
1477 IncrementBoundObjects(cb_node);
1478 // TODO : We should be able to remove the NULL look-up checks from the code below as long as
1479 // all the corresponding cases are verified to cause CB_INVALID state and the CB_INVALID state
1480 // should then be flagged prior to calling this function
1481 for (auto event : cb_node->writeEventsBeforeWait) {
1482 auto event_state = GetEventState(event);
1483 if (event_state) event_state->write_in_use++;
1484 }
1485}
1486
1487// Decrement in-use count for objects bound to command buffer
1488void ValidationStateTracker::DecrementBoundResources(CMD_BUFFER_STATE const *cb_node) {
1489 BASE_NODE *base_obj = nullptr;
1490 for (auto obj : cb_node->object_bindings) {
1491 base_obj = GetStateStructPtrFromObject(obj);
1492 if (base_obj) {
1493 base_obj->in_use.fetch_sub(1);
1494 }
1495 }
1496}
1497
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001498void ValidationStateTracker::RetireWorkOnQueue(QUEUE_STATE *pQueue, uint64_t seq) {
locke-lunargd556cc32019-09-17 01:21:23 -06001499 std::unordered_map<VkQueue, uint64_t> otherQueueSeqs;
1500
1501 // Roll this queue forward, one submission at a time.
1502 while (pQueue->seq < seq) {
1503 auto &submission = pQueue->submissions.front();
1504
1505 for (auto &wait : submission.waitSemaphores) {
1506 auto pSemaphore = GetSemaphoreState(wait.semaphore);
1507 if (pSemaphore) {
1508 pSemaphore->in_use.fetch_sub(1);
1509 }
1510 auto &lastSeq = otherQueueSeqs[wait.queue];
1511 lastSeq = std::max(lastSeq, wait.seq);
1512 }
1513
Juan A. Suarez Romero9cef8852020-03-10 12:19:42 +01001514 for (auto &signal : submission.signalSemaphores) {
1515 auto pSemaphore = GetSemaphoreState(signal.semaphore);
locke-lunargd556cc32019-09-17 01:21:23 -06001516 if (pSemaphore) {
1517 pSemaphore->in_use.fetch_sub(1);
Juan A. Suarez Romero9cef8852020-03-10 12:19:42 +01001518 if (pSemaphore->type == VK_SEMAPHORE_TYPE_TIMELINE_KHR && pSemaphore->payload < signal.payload) {
1519 pSemaphore->payload = signal.payload;
1520 }
locke-lunargd556cc32019-09-17 01:21:23 -06001521 }
1522 }
1523
1524 for (auto &semaphore : submission.externalSemaphores) {
1525 auto pSemaphore = GetSemaphoreState(semaphore);
1526 if (pSemaphore) {
1527 pSemaphore->in_use.fetch_sub(1);
1528 }
1529 }
1530
1531 for (auto cb : submission.cbs) {
1532 auto cb_node = GetCBState(cb);
1533 if (!cb_node) {
1534 continue;
1535 }
1536 // First perform decrement on general case bound objects
1537 DecrementBoundResources(cb_node);
1538 for (auto event : cb_node->writeEventsBeforeWait) {
1539 auto eventNode = eventMap.find(event);
1540 if (eventNode != eventMap.end()) {
1541 eventNode->second.write_in_use--;
1542 }
1543 }
Jeff Bolz310775c2019-10-09 00:46:33 -05001544 QueryMap localQueryToStateMap;
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02001545 VkQueryPool first_pool = VK_NULL_HANDLE;
Jeff Bolz310775c2019-10-09 00:46:33 -05001546 for (auto &function : cb_node->queryUpdates) {
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02001547 function(nullptr, /*do_validate*/ false, first_pool, submission.perf_submit_pass, &localQueryToStateMap);
Jeff Bolz310775c2019-10-09 00:46:33 -05001548 }
1549
1550 for (auto queryStatePair : localQueryToStateMap) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001551 if (queryStatePair.second == QUERYSTATE_ENDED) {
1552 queryToStateMap[queryStatePair.first] = QUERYSTATE_AVAILABLE;
1553 }
locke-lunargd556cc32019-09-17 01:21:23 -06001554 }
locke-lunargd556cc32019-09-17 01:21:23 -06001555 cb_node->in_use.fetch_sub(1);
1556 }
1557
1558 auto pFence = GetFenceState(submission.fence);
1559 if (pFence && pFence->scope == kSyncScopeInternal) {
1560 pFence->state = FENCE_RETIRED;
1561 }
1562
1563 pQueue->submissions.pop_front();
1564 pQueue->seq++;
1565 }
1566
1567 // Roll other queues forward to the highest seq we saw a wait for
1568 for (auto qs : otherQueueSeqs) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001569 RetireWorkOnQueue(GetQueueState(qs.first), qs.second);
locke-lunargd556cc32019-09-17 01:21:23 -06001570 }
1571}
1572
1573// Submit a fence to a queue, delimiting previous fences and previous untracked
1574// work by it.
1575static void SubmitFence(QUEUE_STATE *pQueue, FENCE_STATE *pFence, uint64_t submitCount) {
1576 pFence->state = FENCE_INFLIGHT;
1577 pFence->signaler.first = pQueue->queue;
1578 pFence->signaler.second = pQueue->seq + pQueue->submissions.size() + submitCount;
1579}
1580
1581void ValidationStateTracker::PostCallRecordQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits,
1582 VkFence fence, VkResult result) {
1583 uint64_t early_retire_seq = 0;
1584 auto pQueue = GetQueueState(queue);
1585 auto pFence = GetFenceState(fence);
1586
1587 if (pFence) {
1588 if (pFence->scope == kSyncScopeInternal) {
1589 // Mark fence in use
1590 SubmitFence(pQueue, pFence, std::max(1u, submitCount));
1591 if (!submitCount) {
1592 // If no submissions, but just dropping a fence on the end of the queue,
1593 // record an empty submission with just the fence, so we can determine
1594 // its completion.
1595 pQueue->submissions.emplace_back(std::vector<VkCommandBuffer>(), std::vector<SEMAPHORE_WAIT>(),
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001596 std::vector<SEMAPHORE_SIGNAL>(), std::vector<VkSemaphore>(), fence, 0);
locke-lunargd556cc32019-09-17 01:21:23 -06001597 }
1598 } else {
1599 // Retire work up until this fence early, we will not see the wait that corresponds to this signal
1600 early_retire_seq = pQueue->seq + pQueue->submissions.size();
1601 }
1602 }
1603
1604 // Now process each individual submit
1605 for (uint32_t submit_idx = 0; submit_idx < submitCount; submit_idx++) {
1606 std::vector<VkCommandBuffer> cbs;
1607 const VkSubmitInfo *submit = &pSubmits[submit_idx];
1608 vector<SEMAPHORE_WAIT> semaphore_waits;
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001609 vector<SEMAPHORE_SIGNAL> semaphore_signals;
locke-lunargd556cc32019-09-17 01:21:23 -06001610 vector<VkSemaphore> semaphore_externals;
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001611 const uint64_t next_seq = pQueue->seq + pQueue->submissions.size() + 1;
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00001612 auto *timeline_semaphore_submit = lvl_find_in_chain<VkTimelineSemaphoreSubmitInfoKHR>(submit->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001613 for (uint32_t i = 0; i < submit->waitSemaphoreCount; ++i) {
1614 VkSemaphore semaphore = submit->pWaitSemaphores[i];
1615 auto pSemaphore = GetSemaphoreState(semaphore);
1616 if (pSemaphore) {
1617 if (pSemaphore->scope == kSyncScopeInternal) {
Juan A. Suarez Romero9cef8852020-03-10 12:19:42 +01001618 SEMAPHORE_WAIT wait;
1619 wait.semaphore = semaphore;
1620 if (pSemaphore->type == VK_SEMAPHORE_TYPE_BINARY_KHR) {
1621 if (pSemaphore->signaler.first != VK_NULL_HANDLE) {
1622 wait.queue = pSemaphore->signaler.first;
1623 wait.seq = pSemaphore->signaler.second;
1624 semaphore_waits.push_back(wait);
1625 pSemaphore->in_use.fetch_add(1);
1626 }
1627 pSemaphore->signaler.first = VK_NULL_HANDLE;
1628 pSemaphore->signaled = false;
1629 } else if (pSemaphore->payload < timeline_semaphore_submit->pWaitSemaphoreValues[i]) {
1630 wait.queue = queue;
1631 wait.seq = next_seq;
1632 wait.payload = timeline_semaphore_submit->pWaitSemaphoreValues[i];
1633 semaphore_waits.push_back(wait);
locke-lunargd556cc32019-09-17 01:21:23 -06001634 pSemaphore->in_use.fetch_add(1);
1635 }
locke-lunargd556cc32019-09-17 01:21:23 -06001636 } else {
1637 semaphore_externals.push_back(semaphore);
1638 pSemaphore->in_use.fetch_add(1);
1639 if (pSemaphore->scope == kSyncScopeExternalTemporary) {
1640 pSemaphore->scope = kSyncScopeInternal;
1641 }
1642 }
1643 }
1644 }
1645 for (uint32_t i = 0; i < submit->signalSemaphoreCount; ++i) {
1646 VkSemaphore semaphore = submit->pSignalSemaphores[i];
1647 auto pSemaphore = GetSemaphoreState(semaphore);
1648 if (pSemaphore) {
1649 if (pSemaphore->scope == kSyncScopeInternal) {
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001650 SEMAPHORE_SIGNAL signal;
1651 signal.semaphore = semaphore;
1652 signal.seq = next_seq;
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00001653 if (pSemaphore->type == VK_SEMAPHORE_TYPE_BINARY_KHR) {
1654 pSemaphore->signaler.first = queue;
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001655 pSemaphore->signaler.second = next_seq;
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00001656 pSemaphore->signaled = true;
1657 } else {
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001658 signal.payload = timeline_semaphore_submit->pSignalSemaphoreValues[i];
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00001659 }
locke-lunargd556cc32019-09-17 01:21:23 -06001660 pSemaphore->in_use.fetch_add(1);
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001661 semaphore_signals.push_back(signal);
locke-lunargd556cc32019-09-17 01:21:23 -06001662 } else {
1663 // Retire work up until this submit early, we will not see the wait that corresponds to this signal
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001664 early_retire_seq = std::max(early_retire_seq, next_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06001665 }
1666 }
1667 }
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02001668 const auto perf_submit = lvl_find_in_chain<VkPerformanceQuerySubmitInfoKHR>(submit->pNext);
1669 uint32_t perf_pass = perf_submit ? perf_submit->counterPassIndex : 0;
1670
locke-lunargd556cc32019-09-17 01:21:23 -06001671 for (uint32_t i = 0; i < submit->commandBufferCount; i++) {
1672 auto cb_node = GetCBState(submit->pCommandBuffers[i]);
1673 if (cb_node) {
1674 cbs.push_back(submit->pCommandBuffers[i]);
1675 for (auto secondaryCmdBuffer : cb_node->linkedCommandBuffers) {
1676 cbs.push_back(secondaryCmdBuffer->commandBuffer);
1677 IncrementResources(secondaryCmdBuffer);
1678 }
1679 IncrementResources(cb_node);
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001680
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02001681 VkQueryPool first_pool = VK_NULL_HANDLE;
1682 EventToStageMap localEventToStageMap;
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001683 QueryMap localQueryToStateMap;
1684 for (auto &function : cb_node->queryUpdates) {
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02001685 function(nullptr, /*do_validate*/ false, first_pool, perf_pass, &localQueryToStateMap);
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001686 }
1687
1688 for (auto queryStatePair : localQueryToStateMap) {
1689 queryToStateMap[queryStatePair.first] = queryStatePair.second;
1690 }
1691
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001692 for (auto &function : cb_node->eventUpdates) {
1693 function(nullptr, /*do_validate*/ false, &localEventToStageMap);
1694 }
1695
1696 for (auto eventStagePair : localEventToStageMap) {
1697 eventMap[eventStagePair.first].stageMask = eventStagePair.second;
1698 }
locke-lunargd556cc32019-09-17 01:21:23 -06001699 }
1700 }
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001701
locke-lunargd556cc32019-09-17 01:21:23 -06001702 pQueue->submissions.emplace_back(cbs, semaphore_waits, semaphore_signals, semaphore_externals,
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02001703 submit_idx == submitCount - 1 ? fence : (VkFence)VK_NULL_HANDLE, perf_pass);
locke-lunargd556cc32019-09-17 01:21:23 -06001704 }
1705
1706 if (early_retire_seq) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001707 RetireWorkOnQueue(pQueue, early_retire_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06001708 }
1709}
1710
1711void ValidationStateTracker::PostCallRecordAllocateMemory(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo,
1712 const VkAllocationCallbacks *pAllocator, VkDeviceMemory *pMemory,
1713 VkResult result) {
1714 if (VK_SUCCESS == result) {
1715 AddMemObjInfo(device, *pMemory, pAllocateInfo);
1716 }
1717 return;
1718}
1719
1720void ValidationStateTracker::PreCallRecordFreeMemory(VkDevice device, VkDeviceMemory mem, const VkAllocationCallbacks *pAllocator) {
1721 if (!mem) return;
1722 DEVICE_MEMORY_STATE *mem_info = GetDevMemState(mem);
1723 const VulkanTypedHandle obj_struct(mem, kVulkanObjectTypeDeviceMemory);
1724
1725 // Clear mem binding for any bound objects
1726 for (const auto &obj : mem_info->obj_bindings) {
1727 BINDABLE *bindable_state = nullptr;
1728 switch (obj.type) {
1729 case kVulkanObjectTypeImage:
1730 bindable_state = GetImageState(obj.Cast<VkImage>());
1731 break;
1732 case kVulkanObjectTypeBuffer:
1733 bindable_state = GetBufferState(obj.Cast<VkBuffer>());
1734 break;
1735 case kVulkanObjectTypeAccelerationStructureNV:
1736 bindable_state = GetAccelerationStructureState(obj.Cast<VkAccelerationStructureNV>());
1737 break;
1738
1739 default:
1740 // Should only have acceleration structure, buffer, or image objects bound to memory
1741 assert(0);
1742 }
1743
1744 if (bindable_state) {
Jeff Bolz41e29052020-03-29 22:33:55 -05001745 // Remove any sparse bindings bound to the resource that use this memory.
1746 for (auto it = bindable_state->sparse_bindings.begin(); it != bindable_state->sparse_bindings.end();) {
1747 auto nextit = it;
1748 nextit++;
1749
1750 auto &sparse_mem_binding = *it;
1751 if (sparse_mem_binding.mem_state.get() == mem_info) {
1752 bindable_state->sparse_bindings.erase(it);
1753 }
1754
1755 it = nextit;
1756 }
locke-lunargd556cc32019-09-17 01:21:23 -06001757 bindable_state->UpdateBoundMemorySet();
1758 }
1759 }
1760 // Any bound cmd buffers are now invalid
1761 InvalidateCommandBuffers(mem_info->cb_bindings, obj_struct);
1762 RemoveAliasingImages(mem_info->bound_images);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05001763 mem_info->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06001764 memObjMap.erase(mem);
1765}
1766
1767void ValidationStateTracker::PostCallRecordQueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo *pBindInfo,
1768 VkFence fence, VkResult result) {
1769 if (result != VK_SUCCESS) return;
1770 uint64_t early_retire_seq = 0;
1771 auto pFence = GetFenceState(fence);
1772 auto pQueue = GetQueueState(queue);
1773
1774 if (pFence) {
1775 if (pFence->scope == kSyncScopeInternal) {
1776 SubmitFence(pQueue, pFence, std::max(1u, bindInfoCount));
1777 if (!bindInfoCount) {
1778 // No work to do, just dropping a fence in the queue by itself.
1779 pQueue->submissions.emplace_back(std::vector<VkCommandBuffer>(), std::vector<SEMAPHORE_WAIT>(),
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001780 std::vector<SEMAPHORE_SIGNAL>(), std::vector<VkSemaphore>(), fence, 0);
locke-lunargd556cc32019-09-17 01:21:23 -06001781 }
1782 } else {
1783 // Retire work up until this fence early, we will not see the wait that corresponds to this signal
1784 early_retire_seq = pQueue->seq + pQueue->submissions.size();
1785 }
1786 }
1787
1788 for (uint32_t bindIdx = 0; bindIdx < bindInfoCount; ++bindIdx) {
1789 const VkBindSparseInfo &bindInfo = pBindInfo[bindIdx];
1790 // Track objects tied to memory
1791 for (uint32_t j = 0; j < bindInfo.bufferBindCount; j++) {
1792 for (uint32_t k = 0; k < bindInfo.pBufferBinds[j].bindCount; k++) {
1793 auto sparse_binding = bindInfo.pBufferBinds[j].pBinds[k];
locke-lunargcf04d582019-11-26 00:31:50 -07001794 SetSparseMemBinding(sparse_binding.memory, sparse_binding.memoryOffset, sparse_binding.size,
locke-lunargd556cc32019-09-17 01:21:23 -06001795 VulkanTypedHandle(bindInfo.pBufferBinds[j].buffer, kVulkanObjectTypeBuffer));
1796 }
1797 }
1798 for (uint32_t j = 0; j < bindInfo.imageOpaqueBindCount; j++) {
1799 for (uint32_t k = 0; k < bindInfo.pImageOpaqueBinds[j].bindCount; k++) {
1800 auto sparse_binding = bindInfo.pImageOpaqueBinds[j].pBinds[k];
locke-lunargcf04d582019-11-26 00:31:50 -07001801 SetSparseMemBinding(sparse_binding.memory, sparse_binding.memoryOffset, sparse_binding.size,
locke-lunargd556cc32019-09-17 01:21:23 -06001802 VulkanTypedHandle(bindInfo.pImageOpaqueBinds[j].image, kVulkanObjectTypeImage));
1803 }
1804 }
1805 for (uint32_t j = 0; j < bindInfo.imageBindCount; j++) {
1806 for (uint32_t k = 0; k < bindInfo.pImageBinds[j].bindCount; k++) {
1807 auto sparse_binding = bindInfo.pImageBinds[j].pBinds[k];
1808 // TODO: This size is broken for non-opaque bindings, need to update to comprehend full sparse binding data
1809 VkDeviceSize size = sparse_binding.extent.depth * sparse_binding.extent.height * sparse_binding.extent.width * 4;
locke-lunargcf04d582019-11-26 00:31:50 -07001810 SetSparseMemBinding(sparse_binding.memory, sparse_binding.memoryOffset, size,
locke-lunargd556cc32019-09-17 01:21:23 -06001811 VulkanTypedHandle(bindInfo.pImageBinds[j].image, kVulkanObjectTypeImage));
1812 }
1813 }
1814
1815 std::vector<SEMAPHORE_WAIT> semaphore_waits;
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001816 std::vector<SEMAPHORE_SIGNAL> semaphore_signals;
locke-lunargd556cc32019-09-17 01:21:23 -06001817 std::vector<VkSemaphore> semaphore_externals;
1818 for (uint32_t i = 0; i < bindInfo.waitSemaphoreCount; ++i) {
1819 VkSemaphore semaphore = bindInfo.pWaitSemaphores[i];
1820 auto pSemaphore = GetSemaphoreState(semaphore);
1821 if (pSemaphore) {
1822 if (pSemaphore->scope == kSyncScopeInternal) {
1823 if (pSemaphore->signaler.first != VK_NULL_HANDLE) {
1824 semaphore_waits.push_back({semaphore, pSemaphore->signaler.first, pSemaphore->signaler.second});
1825 pSemaphore->in_use.fetch_add(1);
1826 }
1827 pSemaphore->signaler.first = VK_NULL_HANDLE;
1828 pSemaphore->signaled = false;
1829 } else {
1830 semaphore_externals.push_back(semaphore);
1831 pSemaphore->in_use.fetch_add(1);
1832 if (pSemaphore->scope == kSyncScopeExternalTemporary) {
1833 pSemaphore->scope = kSyncScopeInternal;
1834 }
1835 }
1836 }
1837 }
1838 for (uint32_t i = 0; i < bindInfo.signalSemaphoreCount; ++i) {
1839 VkSemaphore semaphore = bindInfo.pSignalSemaphores[i];
1840 auto pSemaphore = GetSemaphoreState(semaphore);
1841 if (pSemaphore) {
1842 if (pSemaphore->scope == kSyncScopeInternal) {
1843 pSemaphore->signaler.first = queue;
1844 pSemaphore->signaler.second = pQueue->seq + pQueue->submissions.size() + 1;
1845 pSemaphore->signaled = true;
1846 pSemaphore->in_use.fetch_add(1);
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001847
1848 SEMAPHORE_SIGNAL signal;
1849 signal.semaphore = semaphore;
1850 signal.seq = pSemaphore->signaler.second;
1851 semaphore_signals.push_back(signal);
locke-lunargd556cc32019-09-17 01:21:23 -06001852 } else {
1853 // Retire work up until this submit early, we will not see the wait that corresponds to this signal
1854 early_retire_seq = std::max(early_retire_seq, pQueue->seq + pQueue->submissions.size() + 1);
1855 }
1856 }
1857 }
1858
1859 pQueue->submissions.emplace_back(std::vector<VkCommandBuffer>(), semaphore_waits, semaphore_signals, semaphore_externals,
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001860 bindIdx == bindInfoCount - 1 ? fence : (VkFence)VK_NULL_HANDLE, 0);
locke-lunargd556cc32019-09-17 01:21:23 -06001861 }
1862
1863 if (early_retire_seq) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001864 RetireWorkOnQueue(pQueue, early_retire_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06001865 }
1866}
1867
1868void ValidationStateTracker::PostCallRecordCreateSemaphore(VkDevice device, const VkSemaphoreCreateInfo *pCreateInfo,
1869 const VkAllocationCallbacks *pAllocator, VkSemaphore *pSemaphore,
1870 VkResult result) {
1871 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05001872 auto semaphore_state = std::make_shared<SEMAPHORE_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06001873 semaphore_state->signaler.first = VK_NULL_HANDLE;
1874 semaphore_state->signaler.second = 0;
1875 semaphore_state->signaled = false;
1876 semaphore_state->scope = kSyncScopeInternal;
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00001877 semaphore_state->type = VK_SEMAPHORE_TYPE_BINARY_KHR;
1878 semaphore_state->payload = 0;
1879 auto semaphore_type_create_info = lvl_find_in_chain<VkSemaphoreTypeCreateInfoKHR>(pCreateInfo->pNext);
1880 if (semaphore_type_create_info) {
1881 semaphore_state->type = semaphore_type_create_info->semaphoreType;
1882 semaphore_state->payload = semaphore_type_create_info->initialValue;
1883 }
locke-lunargd556cc32019-09-17 01:21:23 -06001884 semaphoreMap[*pSemaphore] = std::move(semaphore_state);
1885}
1886
1887void ValidationStateTracker::RecordImportSemaphoreState(VkSemaphore semaphore, VkExternalSemaphoreHandleTypeFlagBitsKHR handle_type,
1888 VkSemaphoreImportFlagsKHR flags) {
1889 SEMAPHORE_STATE *sema_node = GetSemaphoreState(semaphore);
1890 if (sema_node && sema_node->scope != kSyncScopeExternalPermanent) {
1891 if ((handle_type == VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR || flags & VK_SEMAPHORE_IMPORT_TEMPORARY_BIT_KHR) &&
1892 sema_node->scope == kSyncScopeInternal) {
1893 sema_node->scope = kSyncScopeExternalTemporary;
1894 } else {
1895 sema_node->scope = kSyncScopeExternalPermanent;
1896 }
1897 }
1898}
1899
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00001900void ValidationStateTracker::PostCallRecordSignalSemaphoreKHR(VkDevice device, const VkSemaphoreSignalInfoKHR *pSignalInfo,
1901 VkResult result) {
1902 auto *pSemaphore = GetSemaphoreState(pSignalInfo->semaphore);
1903 pSemaphore->payload = pSignalInfo->value;
1904}
1905
locke-lunargd556cc32019-09-17 01:21:23 -06001906void ValidationStateTracker::RecordMappedMemory(VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size, void **ppData) {
1907 auto mem_info = GetDevMemState(mem);
1908 if (mem_info) {
1909 mem_info->mapped_range.offset = offset;
1910 mem_info->mapped_range.size = size;
1911 mem_info->p_driver_data = *ppData;
1912 }
1913}
1914
1915void ValidationStateTracker::RetireFence(VkFence fence) {
1916 auto pFence = GetFenceState(fence);
1917 if (pFence && pFence->scope == kSyncScopeInternal) {
1918 if (pFence->signaler.first != VK_NULL_HANDLE) {
1919 // Fence signaller is a queue -- use this as proof that prior operations on that queue have completed.
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001920 RetireWorkOnQueue(GetQueueState(pFence->signaler.first), pFence->signaler.second);
locke-lunargd556cc32019-09-17 01:21:23 -06001921 } else {
1922 // Fence signaller is the WSI. We're not tracking what the WSI op actually /was/ in CV yet, but we need to mark
1923 // the fence as retired.
1924 pFence->state = FENCE_RETIRED;
1925 }
1926 }
1927}
1928
1929void ValidationStateTracker::PostCallRecordWaitForFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences,
1930 VkBool32 waitAll, uint64_t timeout, VkResult result) {
1931 if (VK_SUCCESS != result) return;
1932
1933 // When we know that all fences are complete we can clean/remove their CBs
1934 if ((VK_TRUE == waitAll) || (1 == fenceCount)) {
1935 for (uint32_t i = 0; i < fenceCount; i++) {
1936 RetireFence(pFences[i]);
1937 }
1938 }
1939 // NOTE : Alternate case not handled here is when some fences have completed. In
1940 // this case for app to guarantee which fences completed it will have to call
1941 // vkGetFenceStatus() at which point we'll clean/remove their CBs if complete.
1942}
1943
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001944void ValidationStateTracker::RetireTimelineSemaphore(VkSemaphore semaphore, uint64_t until_payload) {
1945 auto pSemaphore = GetSemaphoreState(semaphore);
1946 if (pSemaphore) {
1947 for (auto &pair : queueMap) {
1948 QUEUE_STATE &queueState = pair.second;
Tony-LunarG47d5e272020-04-07 15:35:55 -06001949 uint64_t max_seq = 0;
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001950 for (const auto &submission : queueState.submissions) {
1951 for (const auto &signalSemaphore : submission.signalSemaphores) {
1952 if (signalSemaphore.semaphore == semaphore && signalSemaphore.payload <= until_payload) {
Tony-LunarG47d5e272020-04-07 15:35:55 -06001953 if (signalSemaphore.seq > max_seq) {
1954 max_seq = signalSemaphore.seq;
1955 }
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001956 }
1957 }
1958 }
Tony-LunarG47d5e272020-04-07 15:35:55 -06001959 if (max_seq) {
1960 RetireWorkOnQueue(&queueState, max_seq);
1961 }
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001962 }
1963 }
1964}
1965
John Zulauff89de662020-04-13 18:57:34 -06001966void ValidationStateTracker::RecordWaitSemaphores(VkDevice device, const VkSemaphoreWaitInfo *pWaitInfo, uint64_t timeout,
1967 VkResult result) {
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001968 if (VK_SUCCESS != result) return;
1969
1970 for (uint32_t i = 0; i < pWaitInfo->semaphoreCount; i++) {
1971 RetireTimelineSemaphore(pWaitInfo->pSemaphores[i], pWaitInfo->pValues[i]);
1972 }
1973}
1974
John Zulauff89de662020-04-13 18:57:34 -06001975void ValidationStateTracker::PostCallRecordWaitSemaphores(VkDevice device, const VkSemaphoreWaitInfo *pWaitInfo, uint64_t timeout,
1976 VkResult result) {
1977 RecordWaitSemaphores(device, pWaitInfo, timeout, result);
1978}
1979
1980void ValidationStateTracker::PostCallRecordWaitSemaphoresKHR(VkDevice device, const VkSemaphoreWaitInfo *pWaitInfo,
1981 uint64_t timeout, VkResult result) {
1982 RecordWaitSemaphores(device, pWaitInfo, timeout, result);
1983}
1984
locke-lunargd556cc32019-09-17 01:21:23 -06001985void ValidationStateTracker::PostCallRecordGetFenceStatus(VkDevice device, VkFence fence, VkResult result) {
1986 if (VK_SUCCESS != result) return;
1987 RetireFence(fence);
1988}
1989
1990void ValidationStateTracker::RecordGetDeviceQueueState(uint32_t queue_family_index, VkQueue queue) {
1991 // Add queue to tracking set only if it is new
1992 auto queue_is_new = queues.emplace(queue);
1993 if (queue_is_new.second == true) {
1994 QUEUE_STATE *queue_state = &queueMap[queue];
1995 queue_state->queue = queue;
1996 queue_state->queueFamilyIndex = queue_family_index;
1997 queue_state->seq = 0;
1998 }
1999}
2000
2001void ValidationStateTracker::PostCallRecordGetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex,
2002 VkQueue *pQueue) {
2003 RecordGetDeviceQueueState(queueFamilyIndex, *pQueue);
2004}
2005
2006void ValidationStateTracker::PostCallRecordGetDeviceQueue2(VkDevice device, const VkDeviceQueueInfo2 *pQueueInfo, VkQueue *pQueue) {
2007 RecordGetDeviceQueueState(pQueueInfo->queueFamilyIndex, *pQueue);
2008}
2009
2010void ValidationStateTracker::PostCallRecordQueueWaitIdle(VkQueue queue, VkResult result) {
2011 if (VK_SUCCESS != result) return;
2012 QUEUE_STATE *queue_state = GetQueueState(queue);
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002013 RetireWorkOnQueue(queue_state, queue_state->seq + queue_state->submissions.size());
locke-lunargd556cc32019-09-17 01:21:23 -06002014}
2015
2016void ValidationStateTracker::PostCallRecordDeviceWaitIdle(VkDevice device, VkResult result) {
2017 if (VK_SUCCESS != result) return;
2018 for (auto &queue : queueMap) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002019 RetireWorkOnQueue(&queue.second, queue.second.seq + queue.second.submissions.size());
locke-lunargd556cc32019-09-17 01:21:23 -06002020 }
2021}
2022
2023void ValidationStateTracker::PreCallRecordDestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks *pAllocator) {
2024 if (!fence) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002025 auto fence_state = GetFenceState(fence);
2026 fence_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002027 fenceMap.erase(fence);
2028}
2029
2030void ValidationStateTracker::PreCallRecordDestroySemaphore(VkDevice device, VkSemaphore semaphore,
2031 const VkAllocationCallbacks *pAllocator) {
2032 if (!semaphore) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002033 auto semaphore_state = GetSemaphoreState(semaphore);
2034 semaphore_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002035 semaphoreMap.erase(semaphore);
2036}
2037
2038void ValidationStateTracker::PreCallRecordDestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks *pAllocator) {
2039 if (!event) return;
2040 EVENT_STATE *event_state = GetEventState(event);
2041 const VulkanTypedHandle obj_struct(event, kVulkanObjectTypeEvent);
2042 InvalidateCommandBuffers(event_state->cb_bindings, obj_struct);
2043 eventMap.erase(event);
2044}
2045
2046void ValidationStateTracker::PreCallRecordDestroyQueryPool(VkDevice device, VkQueryPool queryPool,
2047 const VkAllocationCallbacks *pAllocator) {
2048 if (!queryPool) return;
2049 QUERY_POOL_STATE *qp_state = GetQueryPoolState(queryPool);
2050 const VulkanTypedHandle obj_struct(queryPool, kVulkanObjectTypeQueryPool);
2051 InvalidateCommandBuffers(qp_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002052 qp_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002053 queryPoolMap.erase(queryPool);
2054}
2055
2056// Object with given handle is being bound to memory w/ given mem_info struct.
2057// Track the newly bound memory range with given memoryOffset
2058// Also scan any previous ranges, track aliased ranges with new range, and flag an error if a linear
2059// and non-linear range incorrectly overlap.
locke-lunargd556cc32019-09-17 01:21:23 -06002060void ValidationStateTracker::InsertMemoryRange(const VulkanTypedHandle &typed_handle, DEVICE_MEMORY_STATE *mem_info,
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002061 VkDeviceSize memoryOffset) {
locke-lunargd556cc32019-09-17 01:21:23 -06002062 if (typed_handle.type == kVulkanObjectTypeImage) {
2063 mem_info->bound_images.insert(typed_handle.Cast<VkImage>());
2064 } else if (typed_handle.type == kVulkanObjectTypeBuffer) {
locke-lunarg37c410a2020-02-17 17:34:13 -07002065 mem_info->bound_buffers.insert(typed_handle.Cast<VkBuffer>());
locke-lunargd556cc32019-09-17 01:21:23 -06002066 } else if (typed_handle.type == kVulkanObjectTypeAccelerationStructureNV) {
locke-lunarg37c410a2020-02-17 17:34:13 -07002067 mem_info->bound_acceleration_structures.insert(typed_handle.Cast<VkAccelerationStructureNV>());
locke-lunargd556cc32019-09-17 01:21:23 -06002068 } else {
2069 // Unsupported object type
2070 assert(false);
2071 }
2072}
2073
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002074void ValidationStateTracker::InsertImageMemoryRange(VkImage image, DEVICE_MEMORY_STATE *mem_info, VkDeviceSize mem_offset) {
2075 InsertMemoryRange(VulkanTypedHandle(image, kVulkanObjectTypeImage), mem_info, mem_offset);
locke-lunargd556cc32019-09-17 01:21:23 -06002076}
2077
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002078void ValidationStateTracker::InsertBufferMemoryRange(VkBuffer buffer, DEVICE_MEMORY_STATE *mem_info, VkDeviceSize mem_offset) {
2079 InsertMemoryRange(VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer), mem_info, mem_offset);
locke-lunargd556cc32019-09-17 01:21:23 -06002080}
2081
2082void ValidationStateTracker::InsertAccelerationStructureMemoryRange(VkAccelerationStructureNV as, DEVICE_MEMORY_STATE *mem_info,
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002083 VkDeviceSize mem_offset) {
2084 InsertMemoryRange(VulkanTypedHandle(as, kVulkanObjectTypeAccelerationStructureNV), mem_info, mem_offset);
locke-lunargd556cc32019-09-17 01:21:23 -06002085}
2086
2087// This function will remove the handle-to-index mapping from the appropriate map.
2088static void RemoveMemoryRange(const VulkanTypedHandle &typed_handle, DEVICE_MEMORY_STATE *mem_info) {
2089 if (typed_handle.type == kVulkanObjectTypeImage) {
2090 mem_info->bound_images.erase(typed_handle.Cast<VkImage>());
2091 } else if (typed_handle.type == kVulkanObjectTypeBuffer) {
locke-lunarg37c410a2020-02-17 17:34:13 -07002092 mem_info->bound_buffers.erase(typed_handle.Cast<VkBuffer>());
locke-lunargd556cc32019-09-17 01:21:23 -06002093 } else if (typed_handle.type == kVulkanObjectTypeAccelerationStructureNV) {
locke-lunarg37c410a2020-02-17 17:34:13 -07002094 mem_info->bound_acceleration_structures.erase(typed_handle.Cast<VkAccelerationStructureNV>());
locke-lunargd556cc32019-09-17 01:21:23 -06002095 } else {
2096 // Unsupported object type
2097 assert(false);
2098 }
2099}
2100
2101void ValidationStateTracker::RemoveBufferMemoryRange(VkBuffer buffer, DEVICE_MEMORY_STATE *mem_info) {
2102 RemoveMemoryRange(VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer), mem_info);
2103}
2104
2105void ValidationStateTracker::RemoveImageMemoryRange(VkImage image, DEVICE_MEMORY_STATE *mem_info) {
2106 RemoveMemoryRange(VulkanTypedHandle(image, kVulkanObjectTypeImage), mem_info);
2107}
2108
2109void ValidationStateTracker::RemoveAccelerationStructureMemoryRange(VkAccelerationStructureNV as, DEVICE_MEMORY_STATE *mem_info) {
2110 RemoveMemoryRange(VulkanTypedHandle(as, kVulkanObjectTypeAccelerationStructureNV), mem_info);
2111}
2112
2113void ValidationStateTracker::UpdateBindBufferMemoryState(VkBuffer buffer, VkDeviceMemory mem, VkDeviceSize memoryOffset) {
2114 BUFFER_STATE *buffer_state = GetBufferState(buffer);
2115 if (buffer_state) {
2116 // Track bound memory range information
2117 auto mem_info = GetDevMemState(mem);
2118 if (mem_info) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002119 InsertBufferMemoryRange(buffer, mem_info, memoryOffset);
locke-lunargd556cc32019-09-17 01:21:23 -06002120 }
2121 // Track objects tied to memory
2122 SetMemBinding(mem, buffer_state, memoryOffset, VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer));
2123 }
2124}
2125
2126void ValidationStateTracker::PostCallRecordBindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory mem,
2127 VkDeviceSize memoryOffset, VkResult result) {
2128 if (VK_SUCCESS != result) return;
2129 UpdateBindBufferMemoryState(buffer, mem, memoryOffset);
2130}
2131
2132void ValidationStateTracker::PostCallRecordBindBufferMemory2(VkDevice device, uint32_t bindInfoCount,
2133 const VkBindBufferMemoryInfoKHR *pBindInfos, VkResult result) {
2134 for (uint32_t i = 0; i < bindInfoCount; i++) {
2135 UpdateBindBufferMemoryState(pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset);
2136 }
2137}
2138
2139void ValidationStateTracker::PostCallRecordBindBufferMemory2KHR(VkDevice device, uint32_t bindInfoCount,
2140 const VkBindBufferMemoryInfoKHR *pBindInfos, VkResult result) {
2141 for (uint32_t i = 0; i < bindInfoCount; i++) {
2142 UpdateBindBufferMemoryState(pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset);
2143 }
2144}
2145
2146void ValidationStateTracker::RecordGetBufferMemoryRequirementsState(VkBuffer buffer, VkMemoryRequirements *pMemoryRequirements) {
2147 BUFFER_STATE *buffer_state = GetBufferState(buffer);
2148 if (buffer_state) {
2149 buffer_state->requirements = *pMemoryRequirements;
2150 buffer_state->memory_requirements_checked = true;
2151 }
2152}
2153
2154void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements(VkDevice device, VkBuffer buffer,
2155 VkMemoryRequirements *pMemoryRequirements) {
2156 RecordGetBufferMemoryRequirementsState(buffer, pMemoryRequirements);
2157}
2158
2159void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements2(VkDevice device,
2160 const VkBufferMemoryRequirementsInfo2KHR *pInfo,
2161 VkMemoryRequirements2KHR *pMemoryRequirements) {
2162 RecordGetBufferMemoryRequirementsState(pInfo->buffer, &pMemoryRequirements->memoryRequirements);
2163}
2164
2165void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements2KHR(VkDevice device,
2166 const VkBufferMemoryRequirementsInfo2KHR *pInfo,
2167 VkMemoryRequirements2KHR *pMemoryRequirements) {
2168 RecordGetBufferMemoryRequirementsState(pInfo->buffer, &pMemoryRequirements->memoryRequirements);
2169}
2170
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002171void ValidationStateTracker::RecordGetImageMemoryRequirementsState(VkImage image, const VkImageMemoryRequirementsInfo2 *pInfo,
2172 VkMemoryRequirements *pMemoryRequirements) {
2173 const VkImagePlaneMemoryRequirementsInfo *plane_info =
2174 (pInfo == nullptr) ? nullptr : lvl_find_in_chain<VkImagePlaneMemoryRequirementsInfo>(pInfo->pNext);
2175 // TODO does the VkMemoryRequirements need to be saved here if PostCallRecordCreateImage tracks it regardless
locke-lunargd556cc32019-09-17 01:21:23 -06002176 IMAGE_STATE *image_state = GetImageState(image);
2177 if (image_state) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002178 if (plane_info != nullptr) {
2179 // Multi-plane image
2180 image_state->memory_requirements_checked = false; // Each image plane needs to be checked itself
2181 if (plane_info->planeAspect == VK_IMAGE_ASPECT_PLANE_0_BIT) {
2182 image_state->plane0_memory_requirements_checked = true;
2183 image_state->plane0_requirements = *pMemoryRequirements;
2184 } else if (plane_info->planeAspect == VK_IMAGE_ASPECT_PLANE_1_BIT) {
2185 image_state->plane1_memory_requirements_checked = true;
2186 image_state->plane1_requirements = *pMemoryRequirements;
2187 } else if (plane_info->planeAspect == VK_IMAGE_ASPECT_PLANE_2_BIT) {
2188 image_state->plane2_memory_requirements_checked = true;
2189 image_state->plane2_requirements = *pMemoryRequirements;
2190 }
2191 } else {
2192 // Single Plane image
2193 image_state->requirements = *pMemoryRequirements;
2194 image_state->memory_requirements_checked = true;
2195 }
locke-lunargd556cc32019-09-17 01:21:23 -06002196 }
2197}
2198
2199void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements(VkDevice device, VkImage image,
2200 VkMemoryRequirements *pMemoryRequirements) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002201 RecordGetImageMemoryRequirementsState(image, nullptr, pMemoryRequirements);
locke-lunargd556cc32019-09-17 01:21:23 -06002202}
2203
2204void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements2(VkDevice device, const VkImageMemoryRequirementsInfo2 *pInfo,
2205 VkMemoryRequirements2 *pMemoryRequirements) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002206 RecordGetImageMemoryRequirementsState(pInfo->image, pInfo, &pMemoryRequirements->memoryRequirements);
locke-lunargd556cc32019-09-17 01:21:23 -06002207}
2208
2209void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements2KHR(VkDevice device,
2210 const VkImageMemoryRequirementsInfo2 *pInfo,
2211 VkMemoryRequirements2 *pMemoryRequirements) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002212 RecordGetImageMemoryRequirementsState(pInfo->image, pInfo, &pMemoryRequirements->memoryRequirements);
locke-lunargd556cc32019-09-17 01:21:23 -06002213}
2214
2215static void RecordGetImageSparseMemoryRequirementsState(IMAGE_STATE *image_state,
2216 VkSparseImageMemoryRequirements *sparse_image_memory_requirements) {
2217 image_state->sparse_requirements.emplace_back(*sparse_image_memory_requirements);
2218 if (sparse_image_memory_requirements->formatProperties.aspectMask & VK_IMAGE_ASPECT_METADATA_BIT) {
2219 image_state->sparse_metadata_required = true;
2220 }
2221}
2222
2223void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements(
2224 VkDevice device, VkImage image, uint32_t *pSparseMemoryRequirementCount,
2225 VkSparseImageMemoryRequirements *pSparseMemoryRequirements) {
2226 auto image_state = GetImageState(image);
2227 image_state->get_sparse_reqs_called = true;
2228 if (!pSparseMemoryRequirements) return;
2229 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
2230 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i]);
2231 }
2232}
2233
2234void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements2(
2235 VkDevice device, const VkImageSparseMemoryRequirementsInfo2KHR *pInfo, uint32_t *pSparseMemoryRequirementCount,
2236 VkSparseImageMemoryRequirements2KHR *pSparseMemoryRequirements) {
2237 auto image_state = GetImageState(pInfo->image);
2238 image_state->get_sparse_reqs_called = true;
2239 if (!pSparseMemoryRequirements) return;
2240 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
2241 assert(!pSparseMemoryRequirements[i].pNext); // TODO: If an extension is ever added here we need to handle it
2242 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i].memoryRequirements);
2243 }
2244}
2245
2246void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements2KHR(
2247 VkDevice device, const VkImageSparseMemoryRequirementsInfo2KHR *pInfo, uint32_t *pSparseMemoryRequirementCount,
2248 VkSparseImageMemoryRequirements2KHR *pSparseMemoryRequirements) {
2249 auto image_state = GetImageState(pInfo->image);
2250 image_state->get_sparse_reqs_called = true;
2251 if (!pSparseMemoryRequirements) return;
2252 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
2253 assert(!pSparseMemoryRequirements[i].pNext); // TODO: If an extension is ever added here we need to handle it
2254 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i].memoryRequirements);
2255 }
2256}
2257
2258void ValidationStateTracker::PreCallRecordDestroyShaderModule(VkDevice device, VkShaderModule shaderModule,
2259 const VkAllocationCallbacks *pAllocator) {
2260 if (!shaderModule) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002261 auto shader_module_state = GetShaderModuleState(shaderModule);
2262 shader_module_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002263 shaderModuleMap.erase(shaderModule);
2264}
2265
2266void ValidationStateTracker::PreCallRecordDestroyPipeline(VkDevice device, VkPipeline pipeline,
2267 const VkAllocationCallbacks *pAllocator) {
2268 if (!pipeline) return;
2269 PIPELINE_STATE *pipeline_state = GetPipelineState(pipeline);
2270 const VulkanTypedHandle obj_struct(pipeline, kVulkanObjectTypePipeline);
2271 // Any bound cmd buffers are now invalid
2272 InvalidateCommandBuffers(pipeline_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002273 pipeline_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002274 pipelineMap.erase(pipeline);
2275}
2276
2277void ValidationStateTracker::PreCallRecordDestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout,
2278 const VkAllocationCallbacks *pAllocator) {
2279 if (!pipelineLayout) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002280 auto pipeline_layout_state = GetPipelineLayout(pipelineLayout);
2281 pipeline_layout_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002282 pipelineLayoutMap.erase(pipelineLayout);
2283}
2284
2285void ValidationStateTracker::PreCallRecordDestroySampler(VkDevice device, VkSampler sampler,
2286 const VkAllocationCallbacks *pAllocator) {
2287 if (!sampler) return;
2288 SAMPLER_STATE *sampler_state = GetSamplerState(sampler);
2289 const VulkanTypedHandle obj_struct(sampler, kVulkanObjectTypeSampler);
2290 // Any bound cmd buffers are now invalid
2291 if (sampler_state) {
2292 InvalidateCommandBuffers(sampler_state->cb_bindings, obj_struct);
2293 }
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002294 sampler_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002295 samplerMap.erase(sampler);
2296}
2297
2298void ValidationStateTracker::PreCallRecordDestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout,
2299 const VkAllocationCallbacks *pAllocator) {
2300 if (!descriptorSetLayout) return;
2301 auto layout_it = descriptorSetLayoutMap.find(descriptorSetLayout);
2302 if (layout_it != descriptorSetLayoutMap.end()) {
Jeff Bolz6ae39612019-10-11 20:57:36 -05002303 layout_it->second.get()->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002304 descriptorSetLayoutMap.erase(layout_it);
2305 }
2306}
2307
2308void ValidationStateTracker::PreCallRecordDestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
2309 const VkAllocationCallbacks *pAllocator) {
2310 if (!descriptorPool) return;
2311 DESCRIPTOR_POOL_STATE *desc_pool_state = GetDescriptorPoolState(descriptorPool);
2312 const VulkanTypedHandle obj_struct(descriptorPool, kVulkanObjectTypeDescriptorPool);
2313 if (desc_pool_state) {
2314 // Any bound cmd buffers are now invalid
2315 InvalidateCommandBuffers(desc_pool_state->cb_bindings, obj_struct);
2316 // Free sets that were in this pool
2317 for (auto ds : desc_pool_state->sets) {
2318 FreeDescriptorSet(ds);
2319 }
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002320 desc_pool_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002321 descriptorPoolMap.erase(descriptorPool);
2322 }
2323}
2324
2325// Free all command buffers in given list, removing all references/links to them using ResetCommandBufferState
2326void ValidationStateTracker::FreeCommandBufferStates(COMMAND_POOL_STATE *pool_state, const uint32_t command_buffer_count,
2327 const VkCommandBuffer *command_buffers) {
2328 for (uint32_t i = 0; i < command_buffer_count; i++) {
2329 auto cb_state = GetCBState(command_buffers[i]);
2330 // Remove references to command buffer's state and delete
2331 if (cb_state) {
2332 // reset prior to delete, removing various references to it.
2333 // TODO: fix this, it's insane.
2334 ResetCommandBufferState(cb_state->commandBuffer);
2335 // Remove the cb_state's references from COMMAND_POOL_STATEs
2336 pool_state->commandBuffers.erase(command_buffers[i]);
2337 // Remove the cb debug labels
2338 EraseCmdDebugUtilsLabel(report_data, cb_state->commandBuffer);
2339 // Remove CBState from CB map
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002340 cb_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002341 commandBufferMap.erase(cb_state->commandBuffer);
2342 }
2343 }
2344}
2345
2346void ValidationStateTracker::PreCallRecordFreeCommandBuffers(VkDevice device, VkCommandPool commandPool,
2347 uint32_t commandBufferCount, const VkCommandBuffer *pCommandBuffers) {
2348 auto pPool = GetCommandPoolState(commandPool);
2349 FreeCommandBufferStates(pPool, commandBufferCount, pCommandBuffers);
2350}
2351
2352void ValidationStateTracker::PostCallRecordCreateCommandPool(VkDevice device, const VkCommandPoolCreateInfo *pCreateInfo,
2353 const VkAllocationCallbacks *pAllocator, VkCommandPool *pCommandPool,
2354 VkResult result) {
2355 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002356 auto cmd_pool_state = std::make_shared<COMMAND_POOL_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002357 cmd_pool_state->createFlags = pCreateInfo->flags;
2358 cmd_pool_state->queueFamilyIndex = pCreateInfo->queueFamilyIndex;
2359 commandPoolMap[*pCommandPool] = std::move(cmd_pool_state);
2360}
2361
2362void ValidationStateTracker::PostCallRecordCreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo *pCreateInfo,
2363 const VkAllocationCallbacks *pAllocator, VkQueryPool *pQueryPool,
2364 VkResult result) {
2365 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002366 auto query_pool_state = std::make_shared<QUERY_POOL_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002367 query_pool_state->createInfo = *pCreateInfo;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002368 query_pool_state->pool = *pQueryPool;
2369 if (pCreateInfo->queryType == VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR) {
2370 const auto *perf = lvl_find_in_chain<VkQueryPoolPerformanceCreateInfoKHR>(pCreateInfo->pNext);
2371 const QUEUE_FAMILY_PERF_COUNTERS &counters = *physical_device_state->perf_counters[perf->queueFamilyIndex];
2372
2373 for (uint32_t i = 0; i < perf->counterIndexCount; i++) {
2374 const auto &counter = counters.counters[perf->pCounterIndices[i]];
2375 switch (counter.scope) {
2376 case VK_QUERY_SCOPE_COMMAND_BUFFER_KHR:
2377 query_pool_state->has_perf_scope_command_buffer = true;
2378 break;
2379 case VK_QUERY_SCOPE_RENDER_PASS_KHR:
2380 query_pool_state->has_perf_scope_render_pass = true;
2381 break;
2382 default:
2383 break;
2384 }
2385 }
2386
2387 DispatchGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(physical_device_state->phys_device, perf,
2388 &query_pool_state->n_performance_passes);
2389 }
2390
locke-lunargd556cc32019-09-17 01:21:23 -06002391 queryPoolMap[*pQueryPool] = std::move(query_pool_state);
2392
2393 QueryObject query_obj{*pQueryPool, 0u};
2394 for (uint32_t i = 0; i < pCreateInfo->queryCount; ++i) {
2395 query_obj.query = i;
2396 queryToStateMap[query_obj] = QUERYSTATE_UNKNOWN;
2397 }
2398}
2399
2400void ValidationStateTracker::PreCallRecordDestroyCommandPool(VkDevice device, VkCommandPool commandPool,
2401 const VkAllocationCallbacks *pAllocator) {
2402 if (!commandPool) return;
2403 COMMAND_POOL_STATE *cp_state = GetCommandPoolState(commandPool);
2404 // Remove cmdpool from cmdpoolmap, after freeing layer data for the command buffers
2405 // "When a pool is destroyed, all command buffers allocated from the pool are freed."
2406 if (cp_state) {
2407 // Create a vector, as FreeCommandBufferStates deletes from cp_state->commandBuffers during iteration.
2408 std::vector<VkCommandBuffer> cb_vec{cp_state->commandBuffers.begin(), cp_state->commandBuffers.end()};
2409 FreeCommandBufferStates(cp_state, static_cast<uint32_t>(cb_vec.size()), cb_vec.data());
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002410 cp_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002411 commandPoolMap.erase(commandPool);
2412 }
2413}
2414
2415void ValidationStateTracker::PostCallRecordResetCommandPool(VkDevice device, VkCommandPool commandPool,
2416 VkCommandPoolResetFlags flags, VkResult result) {
2417 if (VK_SUCCESS != result) return;
2418 // Reset all of the CBs allocated from this pool
2419 auto command_pool_state = GetCommandPoolState(commandPool);
2420 for (auto cmdBuffer : command_pool_state->commandBuffers) {
2421 ResetCommandBufferState(cmdBuffer);
2422 }
2423}
2424
2425void ValidationStateTracker::PostCallRecordResetFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences,
2426 VkResult result) {
2427 for (uint32_t i = 0; i < fenceCount; ++i) {
2428 auto pFence = GetFenceState(pFences[i]);
2429 if (pFence) {
2430 if (pFence->scope == kSyncScopeInternal) {
2431 pFence->state = FENCE_UNSIGNALED;
2432 } else if (pFence->scope == kSyncScopeExternalTemporary) {
2433 pFence->scope = kSyncScopeInternal;
2434 }
2435 }
2436 }
2437}
2438
Jeff Bolzadbfa852019-10-04 13:53:30 -05002439// For given cb_nodes, invalidate them and track object causing invalidation.
2440// InvalidateCommandBuffers and InvalidateLinkedCommandBuffers are essentially
2441// the same, except one takes a map and one takes a set, and InvalidateCommandBuffers
2442// can also unlink objects from command buffers.
2443void ValidationStateTracker::InvalidateCommandBuffers(small_unordered_map<CMD_BUFFER_STATE *, int, 8> &cb_nodes,
2444 const VulkanTypedHandle &obj, bool unlink) {
2445 for (const auto &cb_node_pair : cb_nodes) {
2446 auto &cb_node = cb_node_pair.first;
2447 if (cb_node->state == CB_RECORDING) {
2448 cb_node->state = CB_INVALID_INCOMPLETE;
2449 } else if (cb_node->state == CB_RECORDED) {
2450 cb_node->state = CB_INVALID_COMPLETE;
2451 }
2452 cb_node->broken_bindings.push_back(obj);
2453
2454 // if secondary, then propagate the invalidation to the primaries that will call us.
2455 if (cb_node->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
2456 InvalidateLinkedCommandBuffers(cb_node->linkedCommandBuffers, obj);
2457 }
2458 if (unlink) {
2459 int index = cb_node_pair.second;
2460 assert(cb_node->object_bindings[index] == obj);
2461 cb_node->object_bindings[index] = VulkanTypedHandle();
2462 }
2463 }
2464 if (unlink) {
2465 cb_nodes.clear();
2466 }
2467}
2468
2469void ValidationStateTracker::InvalidateLinkedCommandBuffers(std::unordered_set<CMD_BUFFER_STATE *> &cb_nodes,
2470 const VulkanTypedHandle &obj) {
locke-lunargd556cc32019-09-17 01:21:23 -06002471 for (auto cb_node : cb_nodes) {
2472 if (cb_node->state == CB_RECORDING) {
2473 cb_node->state = CB_INVALID_INCOMPLETE;
2474 } else if (cb_node->state == CB_RECORDED) {
2475 cb_node->state = CB_INVALID_COMPLETE;
2476 }
2477 cb_node->broken_bindings.push_back(obj);
2478
2479 // if secondary, then propagate the invalidation to the primaries that will call us.
2480 if (cb_node->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05002481 InvalidateLinkedCommandBuffers(cb_node->linkedCommandBuffers, obj);
locke-lunargd556cc32019-09-17 01:21:23 -06002482 }
2483 }
2484}
2485
2486void ValidationStateTracker::PreCallRecordDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer,
2487 const VkAllocationCallbacks *pAllocator) {
2488 if (!framebuffer) return;
2489 FRAMEBUFFER_STATE *framebuffer_state = GetFramebufferState(framebuffer);
2490 const VulkanTypedHandle obj_struct(framebuffer, kVulkanObjectTypeFramebuffer);
2491 InvalidateCommandBuffers(framebuffer_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002492 framebuffer_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002493 frameBufferMap.erase(framebuffer);
2494}
2495
2496void ValidationStateTracker::PreCallRecordDestroyRenderPass(VkDevice device, VkRenderPass renderPass,
2497 const VkAllocationCallbacks *pAllocator) {
2498 if (!renderPass) return;
2499 RENDER_PASS_STATE *rp_state = GetRenderPassState(renderPass);
2500 const VulkanTypedHandle obj_struct(renderPass, kVulkanObjectTypeRenderPass);
2501 InvalidateCommandBuffers(rp_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002502 rp_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002503 renderPassMap.erase(renderPass);
2504}
2505
2506void ValidationStateTracker::PostCallRecordCreateFence(VkDevice device, const VkFenceCreateInfo *pCreateInfo,
2507 const VkAllocationCallbacks *pAllocator, VkFence *pFence, VkResult result) {
2508 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002509 auto fence_state = std::make_shared<FENCE_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002510 fence_state->fence = *pFence;
2511 fence_state->createInfo = *pCreateInfo;
2512 fence_state->state = (pCreateInfo->flags & VK_FENCE_CREATE_SIGNALED_BIT) ? FENCE_RETIRED : FENCE_UNSIGNALED;
2513 fenceMap[*pFence] = std::move(fence_state);
2514}
2515
2516bool ValidationStateTracker::PreCallValidateCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2517 const VkGraphicsPipelineCreateInfo *pCreateInfos,
2518 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002519 void *cgpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002520 // Set up the state that CoreChecks, gpu_validation and later StateTracker Record will use.
2521 create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
2522 cgpl_state->pCreateInfos = pCreateInfos; // GPU validation can alter this, so we have to set a default value for the Chassis
2523 cgpl_state->pipe_state.reserve(count);
2524 for (uint32_t i = 0; i < count; i++) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002525 cgpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
Jeff Bolz6ae39612019-10-11 20:57:36 -05002526 (cgpl_state->pipe_state)[i]->initGraphicsPipeline(this, &pCreateInfos[i], GetRenderPassShared(pCreateInfos[i].renderPass));
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002527 (cgpl_state->pipe_state)[i]->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06002528 }
2529 return false;
2530}
2531
2532void ValidationStateTracker::PostCallRecordCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2533 const VkGraphicsPipelineCreateInfo *pCreateInfos,
2534 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
2535 VkResult result, void *cgpl_state_data) {
2536 create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
2537 // This API may create pipelines regardless of the return value
2538 for (uint32_t i = 0; i < count; i++) {
2539 if (pPipelines[i] != VK_NULL_HANDLE) {
2540 (cgpl_state->pipe_state)[i]->pipeline = pPipelines[i];
2541 pipelineMap[pPipelines[i]] = std::move((cgpl_state->pipe_state)[i]);
2542 }
2543 }
2544 cgpl_state->pipe_state.clear();
2545}
2546
2547bool ValidationStateTracker::PreCallValidateCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2548 const VkComputePipelineCreateInfo *pCreateInfos,
2549 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002550 void *ccpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002551 auto *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
2552 ccpl_state->pCreateInfos = pCreateInfos; // GPU validation can alter this, so we have to set a default value for the Chassis
2553 ccpl_state->pipe_state.reserve(count);
2554 for (uint32_t i = 0; i < count; i++) {
2555 // Create and initialize internal tracking data structure
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002556 ccpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
locke-lunargd556cc32019-09-17 01:21:23 -06002557 ccpl_state->pipe_state.back()->initComputePipeline(this, &pCreateInfos[i]);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002558 ccpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06002559 }
2560 return false;
2561}
2562
2563void ValidationStateTracker::PostCallRecordCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2564 const VkComputePipelineCreateInfo *pCreateInfos,
2565 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
2566 VkResult result, void *ccpl_state_data) {
2567 create_compute_pipeline_api_state *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
2568
2569 // This API may create pipelines regardless of the return value
2570 for (uint32_t i = 0; i < count; i++) {
2571 if (pPipelines[i] != VK_NULL_HANDLE) {
2572 (ccpl_state->pipe_state)[i]->pipeline = pPipelines[i];
2573 pipelineMap[pPipelines[i]] = std::move((ccpl_state->pipe_state)[i]);
2574 }
2575 }
2576 ccpl_state->pipe_state.clear();
2577}
2578
2579bool ValidationStateTracker::PreCallValidateCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache,
2580 uint32_t count,
2581 const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
2582 const VkAllocationCallbacks *pAllocator,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002583 VkPipeline *pPipelines, void *crtpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002584 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
2585 crtpl_state->pipe_state.reserve(count);
2586 for (uint32_t i = 0; i < count; i++) {
2587 // Create and initialize internal tracking data structure
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002588 crtpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
Jeff Bolz443c2ca2020-03-19 12:11:51 -05002589 crtpl_state->pipe_state.back()->initRayTracingPipeline(this, &pCreateInfos[i]);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002590 crtpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06002591 }
2592 return false;
2593}
2594
2595void ValidationStateTracker::PostCallRecordCreateRayTracingPipelinesNV(
2596 VkDevice device, VkPipelineCache pipelineCache, uint32_t count, const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
2597 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines, VkResult result, void *crtpl_state_data) {
2598 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
2599 // This API may create pipelines regardless of the return value
2600 for (uint32_t i = 0; i < count; i++) {
2601 if (pPipelines[i] != VK_NULL_HANDLE) {
2602 (crtpl_state->pipe_state)[i]->pipeline = pPipelines[i];
2603 pipelineMap[pPipelines[i]] = std::move((crtpl_state->pipe_state)[i]);
2604 }
2605 }
2606 crtpl_state->pipe_state.clear();
2607}
2608
Jeff Bolz443c2ca2020-03-19 12:11:51 -05002609bool ValidationStateTracker::PreCallValidateCreateRayTracingPipelinesKHR(VkDevice device, VkPipelineCache pipelineCache,
2610 uint32_t count,
2611 const VkRayTracingPipelineCreateInfoKHR *pCreateInfos,
2612 const VkAllocationCallbacks *pAllocator,
2613 VkPipeline *pPipelines, void *crtpl_state_data) const {
2614 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_khr_api_state *>(crtpl_state_data);
2615 crtpl_state->pipe_state.reserve(count);
2616 for (uint32_t i = 0; i < count; i++) {
2617 // Create and initialize internal tracking data structure
2618 crtpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
2619 crtpl_state->pipe_state.back()->initRayTracingPipeline(this, &pCreateInfos[i]);
2620 crtpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
2621 }
2622 return false;
2623}
2624
2625void ValidationStateTracker::PostCallRecordCreateRayTracingPipelinesKHR(
2626 VkDevice device, VkPipelineCache pipelineCache, uint32_t count, const VkRayTracingPipelineCreateInfoKHR *pCreateInfos,
2627 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines, VkResult result, void *crtpl_state_data) {
2628 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_khr_api_state *>(crtpl_state_data);
2629 // This API may create pipelines regardless of the return value
2630 for (uint32_t i = 0; i < count; i++) {
2631 if (pPipelines[i] != VK_NULL_HANDLE) {
2632 (crtpl_state->pipe_state)[i]->pipeline = pPipelines[i];
2633 pipelineMap[pPipelines[i]] = std::move((crtpl_state->pipe_state)[i]);
2634 }
2635 }
2636 crtpl_state->pipe_state.clear();
2637}
2638
locke-lunargd556cc32019-09-17 01:21:23 -06002639void ValidationStateTracker::PostCallRecordCreateSampler(VkDevice device, const VkSamplerCreateInfo *pCreateInfo,
2640 const VkAllocationCallbacks *pAllocator, VkSampler *pSampler,
2641 VkResult result) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002642 samplerMap[*pSampler] = std::make_shared<SAMPLER_STATE>(pSampler, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002643}
2644
2645void ValidationStateTracker::PostCallRecordCreateDescriptorSetLayout(VkDevice device,
2646 const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
2647 const VkAllocationCallbacks *pAllocator,
2648 VkDescriptorSetLayout *pSetLayout, VkResult result) {
2649 if (VK_SUCCESS != result) return;
2650 descriptorSetLayoutMap[*pSetLayout] = std::make_shared<cvdescriptorset::DescriptorSetLayout>(pCreateInfo, *pSetLayout);
2651}
2652
2653// For repeatable sorting, not very useful for "memory in range" search
2654struct PushConstantRangeCompare {
2655 bool operator()(const VkPushConstantRange *lhs, const VkPushConstantRange *rhs) const {
2656 if (lhs->offset == rhs->offset) {
2657 if (lhs->size == rhs->size) {
2658 // The comparison is arbitrary, but avoids false aliasing by comparing all fields.
2659 return lhs->stageFlags < rhs->stageFlags;
2660 }
2661 // If the offsets are the same then sorting by the end of range is useful for validation
2662 return lhs->size < rhs->size;
2663 }
2664 return lhs->offset < rhs->offset;
2665 }
2666};
2667
2668static PushConstantRangesDict push_constant_ranges_dict;
2669
2670PushConstantRangesId GetCanonicalId(const VkPipelineLayoutCreateInfo *info) {
2671 if (!info->pPushConstantRanges) {
2672 // Hand back the empty entry (creating as needed)...
2673 return push_constant_ranges_dict.look_up(PushConstantRanges());
2674 }
2675
2676 // Sort the input ranges to ensure equivalent ranges map to the same id
2677 std::set<const VkPushConstantRange *, PushConstantRangeCompare> sorted;
2678 for (uint32_t i = 0; i < info->pushConstantRangeCount; i++) {
2679 sorted.insert(info->pPushConstantRanges + i);
2680 }
2681
Jeremy Hayesf34b9fb2019-12-06 09:37:03 -07002682 PushConstantRanges ranges;
2683 ranges.reserve(sorted.size());
locke-lunargd556cc32019-09-17 01:21:23 -06002684 for (const auto range : sorted) {
2685 ranges.emplace_back(*range);
2686 }
2687 return push_constant_ranges_dict.look_up(std::move(ranges));
2688}
2689
2690// Dictionary of canoncial form of the pipeline set layout of descriptor set layouts
2691static PipelineLayoutSetLayoutsDict pipeline_layout_set_layouts_dict;
2692
2693// Dictionary of canonical form of the "compatible for set" records
2694static PipelineLayoutCompatDict pipeline_layout_compat_dict;
2695
2696static PipelineLayoutCompatId GetCanonicalId(const uint32_t set_index, const PushConstantRangesId pcr_id,
2697 const PipelineLayoutSetLayoutsId set_layouts_id) {
2698 return pipeline_layout_compat_dict.look_up(PipelineLayoutCompatDef(set_index, pcr_id, set_layouts_id));
2699}
2700
2701void ValidationStateTracker::PostCallRecordCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo *pCreateInfo,
2702 const VkAllocationCallbacks *pAllocator,
2703 VkPipelineLayout *pPipelineLayout, VkResult result) {
2704 if (VK_SUCCESS != result) return;
2705
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002706 auto pipeline_layout_state = std::make_shared<PIPELINE_LAYOUT_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002707 pipeline_layout_state->layout = *pPipelineLayout;
2708 pipeline_layout_state->set_layouts.resize(pCreateInfo->setLayoutCount);
2709 PipelineLayoutSetLayoutsDef set_layouts(pCreateInfo->setLayoutCount);
2710 for (uint32_t i = 0; i < pCreateInfo->setLayoutCount; ++i) {
Jeff Bolz6ae39612019-10-11 20:57:36 -05002711 pipeline_layout_state->set_layouts[i] = GetDescriptorSetLayoutShared(pCreateInfo->pSetLayouts[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06002712 set_layouts[i] = pipeline_layout_state->set_layouts[i]->GetLayoutId();
2713 }
2714
2715 // Get canonical form IDs for the "compatible for set" contents
2716 pipeline_layout_state->push_constant_ranges = GetCanonicalId(pCreateInfo);
2717 auto set_layouts_id = pipeline_layout_set_layouts_dict.look_up(set_layouts);
2718 pipeline_layout_state->compat_for_set.reserve(pCreateInfo->setLayoutCount);
2719
2720 // Create table of "compatible for set N" cannonical forms for trivial accept validation
2721 for (uint32_t i = 0; i < pCreateInfo->setLayoutCount; ++i) {
2722 pipeline_layout_state->compat_for_set.emplace_back(
2723 GetCanonicalId(i, pipeline_layout_state->push_constant_ranges, set_layouts_id));
2724 }
2725 pipelineLayoutMap[*pPipelineLayout] = std::move(pipeline_layout_state);
2726}
2727
2728void ValidationStateTracker::PostCallRecordCreateDescriptorPool(VkDevice device, const VkDescriptorPoolCreateInfo *pCreateInfo,
2729 const VkAllocationCallbacks *pAllocator,
2730 VkDescriptorPool *pDescriptorPool, VkResult result) {
2731 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002732 descriptorPoolMap[*pDescriptorPool] = std::make_shared<DESCRIPTOR_POOL_STATE>(*pDescriptorPool, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002733}
2734
2735void ValidationStateTracker::PostCallRecordResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
2736 VkDescriptorPoolResetFlags flags, VkResult result) {
2737 if (VK_SUCCESS != result) return;
2738 DESCRIPTOR_POOL_STATE *pPool = GetDescriptorPoolState(descriptorPool);
2739 // TODO: validate flags
2740 // For every set off of this pool, clear it, remove from setMap, and free cvdescriptorset::DescriptorSet
2741 for (auto ds : pPool->sets) {
2742 FreeDescriptorSet(ds);
2743 }
2744 pPool->sets.clear();
2745 // Reset available count for each type and available sets for this pool
2746 for (auto it = pPool->availableDescriptorTypeCount.begin(); it != pPool->availableDescriptorTypeCount.end(); ++it) {
2747 pPool->availableDescriptorTypeCount[it->first] = pPool->maxDescriptorTypeCount[it->first];
2748 }
2749 pPool->availableSets = pPool->maxSets;
2750}
2751
2752bool ValidationStateTracker::PreCallValidateAllocateDescriptorSets(VkDevice device,
2753 const VkDescriptorSetAllocateInfo *pAllocateInfo,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002754 VkDescriptorSet *pDescriptorSets, void *ads_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002755 // Always update common data
2756 cvdescriptorset::AllocateDescriptorSetsData *ads_state =
2757 reinterpret_cast<cvdescriptorset::AllocateDescriptorSetsData *>(ads_state_data);
2758 UpdateAllocateDescriptorSetsData(pAllocateInfo, ads_state);
2759
2760 return false;
2761}
2762
2763// Allocation state was good and call down chain was made so update state based on allocating descriptor sets
2764void ValidationStateTracker::PostCallRecordAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo *pAllocateInfo,
2765 VkDescriptorSet *pDescriptorSets, VkResult result,
2766 void *ads_state_data) {
2767 if (VK_SUCCESS != result) return;
2768 // All the updates are contained in a single cvdescriptorset function
2769 cvdescriptorset::AllocateDescriptorSetsData *ads_state =
2770 reinterpret_cast<cvdescriptorset::AllocateDescriptorSetsData *>(ads_state_data);
2771 PerformAllocateDescriptorSets(pAllocateInfo, pDescriptorSets, ads_state);
2772}
2773
2774void ValidationStateTracker::PreCallRecordFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t count,
2775 const VkDescriptorSet *pDescriptorSets) {
2776 DESCRIPTOR_POOL_STATE *pool_state = GetDescriptorPoolState(descriptorPool);
2777 // Update available descriptor sets in pool
2778 pool_state->availableSets += count;
2779
2780 // For each freed descriptor add its resources back into the pool as available and remove from pool and setMap
2781 for (uint32_t i = 0; i < count; ++i) {
2782 if (pDescriptorSets[i] != VK_NULL_HANDLE) {
2783 auto descriptor_set = setMap[pDescriptorSets[i]].get();
2784 uint32_t type_index = 0, descriptor_count = 0;
2785 for (uint32_t j = 0; j < descriptor_set->GetBindingCount(); ++j) {
2786 type_index = static_cast<uint32_t>(descriptor_set->GetTypeFromIndex(j));
2787 descriptor_count = descriptor_set->GetDescriptorCountFromIndex(j);
2788 pool_state->availableDescriptorTypeCount[type_index] += descriptor_count;
2789 }
2790 FreeDescriptorSet(descriptor_set);
2791 pool_state->sets.erase(descriptor_set);
2792 }
2793 }
2794}
2795
2796void ValidationStateTracker::PreCallRecordUpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount,
2797 const VkWriteDescriptorSet *pDescriptorWrites,
2798 uint32_t descriptorCopyCount,
2799 const VkCopyDescriptorSet *pDescriptorCopies) {
2800 cvdescriptorset::PerformUpdateDescriptorSets(this, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount,
2801 pDescriptorCopies);
2802}
2803
2804void ValidationStateTracker::PostCallRecordAllocateCommandBuffers(VkDevice device, const VkCommandBufferAllocateInfo *pCreateInfo,
2805 VkCommandBuffer *pCommandBuffer, VkResult result) {
2806 if (VK_SUCCESS != result) return;
Jeff Bolz6835fda2019-10-06 00:15:34 -05002807 auto pPool = GetCommandPoolShared(pCreateInfo->commandPool);
locke-lunargd556cc32019-09-17 01:21:23 -06002808 if (pPool) {
2809 for (uint32_t i = 0; i < pCreateInfo->commandBufferCount; i++) {
2810 // Add command buffer to its commandPool map
2811 pPool->commandBuffers.insert(pCommandBuffer[i]);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002812 auto pCB = std::make_shared<CMD_BUFFER_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002813 pCB->createInfo = *pCreateInfo;
2814 pCB->device = device;
Jeff Bolz6835fda2019-10-06 00:15:34 -05002815 pCB->command_pool = pPool;
locke-lunargd556cc32019-09-17 01:21:23 -06002816 // Add command buffer to map
2817 commandBufferMap[pCommandBuffer[i]] = std::move(pCB);
2818 ResetCommandBufferState(pCommandBuffer[i]);
2819 }
2820 }
2821}
2822
2823// Add bindings between the given cmd buffer & framebuffer and the framebuffer's children
2824void ValidationStateTracker::AddFramebufferBinding(CMD_BUFFER_STATE *cb_state, FRAMEBUFFER_STATE *fb_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05002825 AddCommandBufferBinding(fb_state->cb_bindings, VulkanTypedHandle(fb_state->framebuffer, kVulkanObjectTypeFramebuffer, fb_state),
locke-lunargd556cc32019-09-17 01:21:23 -06002826 cb_state);
Mark Lobodzinski544b3dd2019-12-03 14:44:54 -07002827 // If imageless fb, skip fb binding
2828 if (fb_state->createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) return;
locke-lunargd556cc32019-09-17 01:21:23 -06002829 const uint32_t attachmentCount = fb_state->createInfo.attachmentCount;
2830 for (uint32_t attachment = 0; attachment < attachmentCount; ++attachment) {
2831 auto view_state = GetAttachmentImageViewState(fb_state, attachment);
2832 if (view_state) {
2833 AddCommandBufferBindingImageView(cb_state, view_state);
2834 }
2835 }
2836}
2837
2838void ValidationStateTracker::PreCallRecordBeginCommandBuffer(VkCommandBuffer commandBuffer,
2839 const VkCommandBufferBeginInfo *pBeginInfo) {
2840 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2841 if (!cb_state) return;
locke-lunargd556cc32019-09-17 01:21:23 -06002842 if (cb_state->createInfo.level != VK_COMMAND_BUFFER_LEVEL_PRIMARY) {
2843 // Secondary Command Buffer
2844 const VkCommandBufferInheritanceInfo *pInfo = pBeginInfo->pInheritanceInfo;
2845 if (pInfo) {
2846 if (pBeginInfo->flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT) {
2847 assert(pInfo->renderPass);
2848 auto framebuffer = GetFramebufferState(pInfo->framebuffer);
2849 if (framebuffer) {
2850 // Connect this framebuffer and its children to this cmdBuffer
2851 AddFramebufferBinding(cb_state, framebuffer);
2852 }
2853 }
2854 }
2855 }
2856 if (CB_RECORDED == cb_state->state || CB_INVALID_COMPLETE == cb_state->state) {
2857 ResetCommandBufferState(commandBuffer);
2858 }
2859 // Set updated state here in case implicit reset occurs above
2860 cb_state->state = CB_RECORDING;
2861 cb_state->beginInfo = *pBeginInfo;
2862 if (cb_state->beginInfo.pInheritanceInfo) {
2863 cb_state->inheritanceInfo = *(cb_state->beginInfo.pInheritanceInfo);
2864 cb_state->beginInfo.pInheritanceInfo = &cb_state->inheritanceInfo;
2865 // If we are a secondary command-buffer and inheriting. Update the items we should inherit.
2866 if ((cb_state->createInfo.level != VK_COMMAND_BUFFER_LEVEL_PRIMARY) &&
2867 (cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT)) {
2868 cb_state->activeRenderPass = GetRenderPassState(cb_state->beginInfo.pInheritanceInfo->renderPass);
2869 cb_state->activeSubpass = cb_state->beginInfo.pInheritanceInfo->subpass;
2870 cb_state->activeFramebuffer = cb_state->beginInfo.pInheritanceInfo->framebuffer;
2871 cb_state->framebuffers.insert(cb_state->beginInfo.pInheritanceInfo->framebuffer);
2872 }
2873 }
2874
2875 auto chained_device_group_struct = lvl_find_in_chain<VkDeviceGroupCommandBufferBeginInfo>(pBeginInfo->pNext);
2876 if (chained_device_group_struct) {
2877 cb_state->initial_device_mask = chained_device_group_struct->deviceMask;
2878 } else {
2879 cb_state->initial_device_mask = (1 << physical_device_count) - 1;
2880 }
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002881
2882 cb_state->performance_lock_acquired = performance_lock_acquired;
locke-lunargd556cc32019-09-17 01:21:23 -06002883}
2884
2885void ValidationStateTracker::PostCallRecordEndCommandBuffer(VkCommandBuffer commandBuffer, VkResult result) {
2886 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2887 if (!cb_state) return;
2888 // Cached validation is specific to a specific recording of a specific command buffer.
2889 for (auto descriptor_set : cb_state->validated_descriptor_sets) {
2890 descriptor_set->ClearCachedValidation(cb_state);
2891 }
2892 cb_state->validated_descriptor_sets.clear();
2893 if (VK_SUCCESS == result) {
2894 cb_state->state = CB_RECORDED;
2895 }
2896}
2897
2898void ValidationStateTracker::PostCallRecordResetCommandBuffer(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags,
2899 VkResult result) {
2900 if (VK_SUCCESS == result) {
2901 ResetCommandBufferState(commandBuffer);
2902 }
2903}
2904
2905CBStatusFlags MakeStaticStateMask(VkPipelineDynamicStateCreateInfo const *ds) {
2906 // initially assume everything is static state
2907 CBStatusFlags flags = CBSTATUS_ALL_STATE_SET;
2908
2909 if (ds) {
2910 for (uint32_t i = 0; i < ds->dynamicStateCount; i++) {
2911 switch (ds->pDynamicStates[i]) {
2912 case VK_DYNAMIC_STATE_LINE_WIDTH:
2913 flags &= ~CBSTATUS_LINE_WIDTH_SET;
2914 break;
2915 case VK_DYNAMIC_STATE_DEPTH_BIAS:
2916 flags &= ~CBSTATUS_DEPTH_BIAS_SET;
2917 break;
2918 case VK_DYNAMIC_STATE_BLEND_CONSTANTS:
2919 flags &= ~CBSTATUS_BLEND_CONSTANTS_SET;
2920 break;
2921 case VK_DYNAMIC_STATE_DEPTH_BOUNDS:
2922 flags &= ~CBSTATUS_DEPTH_BOUNDS_SET;
2923 break;
2924 case VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK:
2925 flags &= ~CBSTATUS_STENCIL_READ_MASK_SET;
2926 break;
2927 case VK_DYNAMIC_STATE_STENCIL_WRITE_MASK:
2928 flags &= ~CBSTATUS_STENCIL_WRITE_MASK_SET;
2929 break;
2930 case VK_DYNAMIC_STATE_STENCIL_REFERENCE:
2931 flags &= ~CBSTATUS_STENCIL_REFERENCE_SET;
2932 break;
2933 case VK_DYNAMIC_STATE_SCISSOR:
2934 flags &= ~CBSTATUS_SCISSOR_SET;
2935 break;
2936 case VK_DYNAMIC_STATE_VIEWPORT:
2937 flags &= ~CBSTATUS_VIEWPORT_SET;
2938 break;
2939 case VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV:
2940 flags &= ~CBSTATUS_EXCLUSIVE_SCISSOR_SET;
2941 break;
2942 case VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV:
2943 flags &= ~CBSTATUS_SHADING_RATE_PALETTE_SET;
2944 break;
2945 case VK_DYNAMIC_STATE_LINE_STIPPLE_EXT:
2946 flags &= ~CBSTATUS_LINE_STIPPLE_SET;
2947 break;
Chris Mayer9ded5eb2019-09-19 16:33:26 +02002948 case VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV:
2949 flags &= ~CBSTATUS_VIEWPORT_W_SCALING_SET;
2950 break;
locke-lunargd556cc32019-09-17 01:21:23 -06002951 default:
2952 break;
2953 }
2954 }
2955 }
2956
2957 return flags;
2958}
2959
2960// Validation cache:
2961// CV is the bottommost implementor of this extension. Don't pass calls down.
2962// utility function to set collective state for pipeline
2963void SetPipelineState(PIPELINE_STATE *pPipe) {
2964 // If any attachment used by this pipeline has blendEnable, set top-level blendEnable
2965 if (pPipe->graphicsPipelineCI.pColorBlendState) {
2966 for (size_t i = 0; i < pPipe->attachments.size(); ++i) {
2967 if (VK_TRUE == pPipe->attachments[i].blendEnable) {
2968 if (((pPipe->attachments[i].dstAlphaBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
2969 (pPipe->attachments[i].dstAlphaBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
2970 ((pPipe->attachments[i].dstColorBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
2971 (pPipe->attachments[i].dstColorBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
2972 ((pPipe->attachments[i].srcAlphaBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
2973 (pPipe->attachments[i].srcAlphaBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
2974 ((pPipe->attachments[i].srcColorBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
2975 (pPipe->attachments[i].srcColorBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA))) {
2976 pPipe->blendConstantsEnabled = true;
2977 }
2978 }
2979 }
2980 }
2981}
2982
2983void ValidationStateTracker::PreCallRecordCmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
2984 VkPipeline pipeline) {
2985 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2986 assert(cb_state);
2987
2988 auto pipe_state = GetPipelineState(pipeline);
2989 if (VK_PIPELINE_BIND_POINT_GRAPHICS == pipelineBindPoint) {
2990 cb_state->status &= ~cb_state->static_status;
2991 cb_state->static_status = MakeStaticStateMask(pipe_state->graphicsPipelineCI.ptr()->pDynamicState);
2992 cb_state->status |= cb_state->static_status;
2993 }
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002994 ResetCommandBufferPushConstantDataIfIncompatible(cb_state, pipe_state->pipeline_layout->layout);
locke-lunargd556cc32019-09-17 01:21:23 -06002995 cb_state->lastBound[pipelineBindPoint].pipeline_state = pipe_state;
2996 SetPipelineState(pipe_state);
Jeff Bolzadbfa852019-10-04 13:53:30 -05002997 AddCommandBufferBinding(pipe_state->cb_bindings, VulkanTypedHandle(pipeline, kVulkanObjectTypePipeline), cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06002998}
2999
3000void ValidationStateTracker::PreCallRecordCmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport,
3001 uint32_t viewportCount, const VkViewport *pViewports) {
3002 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3003 cb_state->viewportMask |= ((1u << viewportCount) - 1u) << firstViewport;
3004 cb_state->status |= CBSTATUS_VIEWPORT_SET;
3005}
3006
3007void ValidationStateTracker::PreCallRecordCmdSetExclusiveScissorNV(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor,
3008 uint32_t exclusiveScissorCount,
3009 const VkRect2D *pExclusiveScissors) {
3010 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3011 // TODO: We don't have VUIDs for validating that all exclusive scissors have been set.
3012 // cb_state->exclusiveScissorMask |= ((1u << exclusiveScissorCount) - 1u) << firstExclusiveScissor;
3013 cb_state->status |= CBSTATUS_EXCLUSIVE_SCISSOR_SET;
3014}
3015
3016void ValidationStateTracker::PreCallRecordCmdBindShadingRateImageNV(VkCommandBuffer commandBuffer, VkImageView imageView,
3017 VkImageLayout imageLayout) {
3018 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3019
3020 if (imageView != VK_NULL_HANDLE) {
3021 auto view_state = GetImageViewState(imageView);
3022 AddCommandBufferBindingImageView(cb_state, view_state);
3023 }
3024}
3025
3026void ValidationStateTracker::PreCallRecordCmdSetViewportShadingRatePaletteNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
3027 uint32_t viewportCount,
3028 const VkShadingRatePaletteNV *pShadingRatePalettes) {
3029 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3030 // TODO: We don't have VUIDs for validating that all shading rate palettes have been set.
3031 // cb_state->shadingRatePaletteMask |= ((1u << viewportCount) - 1u) << firstViewport;
3032 cb_state->status |= CBSTATUS_SHADING_RATE_PALETTE_SET;
3033}
3034
3035void ValidationStateTracker::PostCallRecordCreateAccelerationStructureNV(VkDevice device,
3036 const VkAccelerationStructureCreateInfoNV *pCreateInfo,
3037 const VkAllocationCallbacks *pAllocator,
3038 VkAccelerationStructureNV *pAccelerationStructure,
3039 VkResult result) {
3040 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003041 auto as_state = std::make_shared<ACCELERATION_STRUCTURE_STATE>(*pAccelerationStructure, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06003042
3043 // Query the requirements in case the application doesn't (to avoid bind/validation time query)
3044 VkAccelerationStructureMemoryRequirementsInfoNV as_memory_requirements_info = {};
3045 as_memory_requirements_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
3046 as_memory_requirements_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV;
3047 as_memory_requirements_info.accelerationStructure = as_state->acceleration_structure;
3048 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &as_memory_requirements_info, &as_state->memory_requirements);
3049
3050 VkAccelerationStructureMemoryRequirementsInfoNV scratch_memory_req_info = {};
3051 scratch_memory_req_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
3052 scratch_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV;
3053 scratch_memory_req_info.accelerationStructure = as_state->acceleration_structure;
3054 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &scratch_memory_req_info,
3055 &as_state->build_scratch_memory_requirements);
3056
3057 VkAccelerationStructureMemoryRequirementsInfoNV update_memory_req_info = {};
3058 update_memory_req_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
3059 update_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV;
3060 update_memory_req_info.accelerationStructure = as_state->acceleration_structure;
3061 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &update_memory_req_info,
3062 &as_state->update_scratch_memory_requirements);
3063
3064 accelerationStructureMap[*pAccelerationStructure] = std::move(as_state);
3065}
3066
Jeff Bolz95176d02020-04-01 00:36:16 -05003067void ValidationStateTracker::PostCallRecordCreateAccelerationStructureKHR(VkDevice device,
3068 const VkAccelerationStructureCreateInfoKHR *pCreateInfo,
3069 const VkAllocationCallbacks *pAllocator,
3070 VkAccelerationStructureKHR *pAccelerationStructure,
3071 VkResult result) {
3072 if (VK_SUCCESS != result) return;
3073 auto as_state = std::make_shared<ACCELERATION_STRUCTURE_STATE>(*pAccelerationStructure, pCreateInfo);
3074
3075 // Query the requirements in case the application doesn't (to avoid bind/validation time query)
3076 VkAccelerationStructureMemoryRequirementsInfoKHR as_memory_requirements_info = {};
3077 as_memory_requirements_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_KHR;
3078 as_memory_requirements_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_KHR;
3079 as_memory_requirements_info.buildType = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR;
3080 as_memory_requirements_info.accelerationStructure = as_state->acceleration_structure;
3081 DispatchGetAccelerationStructureMemoryRequirementsKHR(device, &as_memory_requirements_info, &as_state->memory_requirements);
3082
3083 VkAccelerationStructureMemoryRequirementsInfoKHR scratch_memory_req_info = {};
3084 scratch_memory_req_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_KHR;
3085 scratch_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_KHR;
3086 scratch_memory_req_info.buildType = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR;
3087 scratch_memory_req_info.accelerationStructure = as_state->acceleration_structure;
3088 DispatchGetAccelerationStructureMemoryRequirementsKHR(device, &scratch_memory_req_info,
3089 &as_state->build_scratch_memory_requirements);
3090
3091 VkAccelerationStructureMemoryRequirementsInfoKHR update_memory_req_info = {};
3092 update_memory_req_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_KHR;
3093 update_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_KHR;
3094 update_memory_req_info.buildType = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR;
3095 update_memory_req_info.accelerationStructure = as_state->acceleration_structure;
3096 DispatchGetAccelerationStructureMemoryRequirementsKHR(device, &update_memory_req_info,
3097 &as_state->update_scratch_memory_requirements);
3098
3099 accelerationStructureMap[*pAccelerationStructure] = std::move(as_state);
3100}
3101
locke-lunargd556cc32019-09-17 01:21:23 -06003102void ValidationStateTracker::PostCallRecordGetAccelerationStructureMemoryRequirementsNV(
3103 VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV *pInfo, VkMemoryRequirements2KHR *pMemoryRequirements) {
3104 ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureState(pInfo->accelerationStructure);
3105 if (as_state != nullptr) {
3106 if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV) {
3107 as_state->memory_requirements = *pMemoryRequirements;
3108 as_state->memory_requirements_checked = true;
3109 } else if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV) {
3110 as_state->build_scratch_memory_requirements = *pMemoryRequirements;
3111 as_state->build_scratch_memory_requirements_checked = true;
3112 } else if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV) {
3113 as_state->update_scratch_memory_requirements = *pMemoryRequirements;
3114 as_state->update_scratch_memory_requirements_checked = true;
3115 }
3116 }
3117}
3118
Jeff Bolz95176d02020-04-01 00:36:16 -05003119void ValidationStateTracker::PostCallRecordBindAccelerationStructureMemoryCommon(
3120 VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoKHR *pBindInfos, VkResult result,
3121 bool isNV) {
locke-lunargd556cc32019-09-17 01:21:23 -06003122 if (VK_SUCCESS != result) return;
3123 for (uint32_t i = 0; i < bindInfoCount; i++) {
Jeff Bolz95176d02020-04-01 00:36:16 -05003124 const VkBindAccelerationStructureMemoryInfoKHR &info = pBindInfos[i];
locke-lunargd556cc32019-09-17 01:21:23 -06003125
3126 ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureState(info.accelerationStructure);
3127 if (as_state) {
3128 // Track bound memory range information
3129 auto mem_info = GetDevMemState(info.memory);
3130 if (mem_info) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07003131 InsertAccelerationStructureMemoryRange(info.accelerationStructure, mem_info, info.memoryOffset);
locke-lunargd556cc32019-09-17 01:21:23 -06003132 }
3133 // Track objects tied to memory
3134 SetMemBinding(info.memory, as_state, info.memoryOffset,
Jeff Bolz95176d02020-04-01 00:36:16 -05003135 VulkanTypedHandle(info.accelerationStructure, kVulkanObjectTypeAccelerationStructureKHR));
locke-lunargd556cc32019-09-17 01:21:23 -06003136
3137 // GPU validation of top level acceleration structure building needs acceleration structure handles.
Jeff Bolz95176d02020-04-01 00:36:16 -05003138 // XXX TODO: Query device address for KHR extension
3139 if (enabled.gpu_validation && isNV) {
locke-lunargd556cc32019-09-17 01:21:23 -06003140 DispatchGetAccelerationStructureHandleNV(device, info.accelerationStructure, 8, &as_state->opaque_handle);
3141 }
3142 }
3143 }
3144}
3145
Jeff Bolz95176d02020-04-01 00:36:16 -05003146void ValidationStateTracker::PostCallRecordBindAccelerationStructureMemoryNV(
3147 VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV *pBindInfos, VkResult result) {
3148 PostCallRecordBindAccelerationStructureMemoryCommon(device, bindInfoCount, pBindInfos, result, true);
3149}
3150
3151void ValidationStateTracker::PostCallRecordBindAccelerationStructureMemoryKHR(
3152 VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoKHR *pBindInfos, VkResult result) {
3153 PostCallRecordBindAccelerationStructureMemoryCommon(device, bindInfoCount, pBindInfos, result, false);
3154}
3155
locke-lunargd556cc32019-09-17 01:21:23 -06003156void ValidationStateTracker::PostCallRecordCmdBuildAccelerationStructureNV(
3157 VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV *pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset,
3158 VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset) {
3159 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3160 if (cb_state == nullptr) {
3161 return;
3162 }
3163
3164 ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureState(dst);
3165 ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureState(src);
3166 if (dst_as_state != nullptr) {
3167 dst_as_state->built = true;
3168 dst_as_state->build_info.initialize(pInfo);
3169 AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
3170 }
3171 if (src_as_state != nullptr) {
3172 AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
3173 }
3174 cb_state->hasBuildAccelerationStructureCmd = true;
3175}
3176
3177void ValidationStateTracker::PostCallRecordCmdCopyAccelerationStructureNV(VkCommandBuffer commandBuffer,
3178 VkAccelerationStructureNV dst,
3179 VkAccelerationStructureNV src,
3180 VkCopyAccelerationStructureModeNV mode) {
3181 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3182 if (cb_state) {
3183 ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureState(src);
3184 ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureState(dst);
3185 if (dst_as_state != nullptr && src_as_state != nullptr) {
3186 dst_as_state->built = true;
3187 dst_as_state->build_info = src_as_state->build_info;
3188 AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
3189 AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
3190 }
3191 }
3192}
3193
Jeff Bolz95176d02020-04-01 00:36:16 -05003194void ValidationStateTracker::PreCallRecordDestroyAccelerationStructureKHR(VkDevice device,
3195 VkAccelerationStructureKHR accelerationStructure,
3196 const VkAllocationCallbacks *pAllocator) {
locke-lunargd556cc32019-09-17 01:21:23 -06003197 if (!accelerationStructure) return;
3198 auto *as_state = GetAccelerationStructureState(accelerationStructure);
3199 if (as_state) {
Jeff Bolz95176d02020-04-01 00:36:16 -05003200 const VulkanTypedHandle obj_struct(accelerationStructure, kVulkanObjectTypeAccelerationStructureKHR);
locke-lunargd556cc32019-09-17 01:21:23 -06003201 InvalidateCommandBuffers(as_state->cb_bindings, obj_struct);
3202 for (auto mem_binding : as_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -07003203 RemoveAccelerationStructureMemoryRange(accelerationStructure, mem_binding);
locke-lunargd556cc32019-09-17 01:21:23 -06003204 }
3205 ClearMemoryObjectBindings(obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003206 as_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06003207 accelerationStructureMap.erase(accelerationStructure);
3208 }
3209}
3210
Jeff Bolz95176d02020-04-01 00:36:16 -05003211void ValidationStateTracker::PreCallRecordDestroyAccelerationStructureNV(VkDevice device,
3212 VkAccelerationStructureNV accelerationStructure,
3213 const VkAllocationCallbacks *pAllocator) {
3214 PreCallRecordDestroyAccelerationStructureKHR(device, accelerationStructure, pAllocator);
3215}
3216
Chris Mayer9ded5eb2019-09-19 16:33:26 +02003217void ValidationStateTracker::PreCallRecordCmdSetViewportWScalingNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
3218 uint32_t viewportCount,
3219 const VkViewportWScalingNV *pViewportWScalings) {
3220 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3221 cb_state->status |= CBSTATUS_VIEWPORT_W_SCALING_SET;
3222}
3223
locke-lunargd556cc32019-09-17 01:21:23 -06003224void ValidationStateTracker::PreCallRecordCmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth) {
3225 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3226 cb_state->status |= CBSTATUS_LINE_WIDTH_SET;
3227}
3228
3229void ValidationStateTracker::PreCallRecordCmdSetLineStippleEXT(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor,
3230 uint16_t lineStipplePattern) {
3231 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3232 cb_state->status |= CBSTATUS_LINE_STIPPLE_SET;
3233}
3234
3235void ValidationStateTracker::PreCallRecordCmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor,
3236 float depthBiasClamp, float depthBiasSlopeFactor) {
3237 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3238 cb_state->status |= CBSTATUS_DEPTH_BIAS_SET;
3239}
3240
Lionel Landwerlinc7420912019-05-23 00:33:42 +01003241void ValidationStateTracker::PreCallRecordCmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount,
3242 const VkRect2D *pScissors) {
3243 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3244 cb_state->scissorMask |= ((1u << scissorCount) - 1u) << firstScissor;
3245 cb_state->status |= CBSTATUS_SCISSOR_SET;
3246}
3247
locke-lunargd556cc32019-09-17 01:21:23 -06003248void ValidationStateTracker::PreCallRecordCmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4]) {
3249 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3250 cb_state->status |= CBSTATUS_BLEND_CONSTANTS_SET;
3251}
3252
3253void ValidationStateTracker::PreCallRecordCmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds,
3254 float maxDepthBounds) {
3255 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3256 cb_state->status |= CBSTATUS_DEPTH_BOUNDS_SET;
3257}
3258
3259void ValidationStateTracker::PreCallRecordCmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
3260 uint32_t compareMask) {
3261 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3262 cb_state->status |= CBSTATUS_STENCIL_READ_MASK_SET;
3263}
3264
3265void ValidationStateTracker::PreCallRecordCmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
3266 uint32_t writeMask) {
3267 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3268 cb_state->status |= CBSTATUS_STENCIL_WRITE_MASK_SET;
3269}
3270
3271void ValidationStateTracker::PreCallRecordCmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
3272 uint32_t reference) {
3273 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3274 cb_state->status |= CBSTATUS_STENCIL_REFERENCE_SET;
3275}
3276
3277// Update pipeline_layout bind points applying the "Pipeline Layout Compatibility" rules.
3278// One of pDescriptorSets or push_descriptor_set should be nullptr, indicating whether this
3279// is called for CmdBindDescriptorSets or CmdPushDescriptorSet.
3280void ValidationStateTracker::UpdateLastBoundDescriptorSets(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint pipeline_bind_point,
3281 const PIPELINE_LAYOUT_STATE *pipeline_layout, uint32_t first_set,
3282 uint32_t set_count, const VkDescriptorSet *pDescriptorSets,
3283 cvdescriptorset::DescriptorSet *push_descriptor_set,
3284 uint32_t dynamic_offset_count, const uint32_t *p_dynamic_offsets) {
3285 assert((pDescriptorSets == nullptr) ^ (push_descriptor_set == nullptr));
3286 // Defensive
3287 assert(pipeline_layout);
3288 if (!pipeline_layout) return;
3289
3290 uint32_t required_size = first_set + set_count;
3291 const uint32_t last_binding_index = required_size - 1;
3292 assert(last_binding_index < pipeline_layout->compat_for_set.size());
3293
3294 // Some useful shorthand
3295 auto &last_bound = cb_state->lastBound[pipeline_bind_point];
3296 auto &pipe_compat_ids = pipeline_layout->compat_for_set;
3297 const uint32_t current_size = static_cast<uint32_t>(last_bound.per_set.size());
3298
3299 // We need this three times in this function, but nowhere else
3300 auto push_descriptor_cleanup = [&last_bound](const cvdescriptorset::DescriptorSet *ds) -> bool {
3301 if (ds && ds->IsPushDescriptor()) {
3302 assert(ds == last_bound.push_descriptor_set.get());
3303 last_bound.push_descriptor_set = nullptr;
3304 return true;
3305 }
3306 return false;
3307 };
3308
3309 // Clean up the "disturbed" before and after the range to be set
3310 if (required_size < current_size) {
3311 if (last_bound.per_set[last_binding_index].compat_id_for_set != pipe_compat_ids[last_binding_index]) {
3312 // We're disturbing those after last, we'll shrink below, but first need to check for and cleanup the push_descriptor
3313 for (auto set_idx = required_size; set_idx < current_size; ++set_idx) {
3314 if (push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set)) break;
3315 }
3316 } else {
3317 // We're not disturbing past last, so leave the upper binding data alone.
3318 required_size = current_size;
3319 }
3320 }
3321
3322 // We resize if we need more set entries or if those past "last" are disturbed
3323 if (required_size != current_size) {
3324 last_bound.per_set.resize(required_size);
3325 }
3326
3327 // For any previously bound sets, need to set them to "invalid" if they were disturbed by this update
3328 for (uint32_t set_idx = 0; set_idx < first_set; ++set_idx) {
3329 if (last_bound.per_set[set_idx].compat_id_for_set != pipe_compat_ids[set_idx]) {
3330 push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set);
3331 last_bound.per_set[set_idx].bound_descriptor_set = nullptr;
3332 last_bound.per_set[set_idx].dynamicOffsets.clear();
3333 last_bound.per_set[set_idx].compat_id_for_set = pipe_compat_ids[set_idx];
3334 }
3335 }
3336
3337 // Now update the bound sets with the input sets
3338 const uint32_t *input_dynamic_offsets = p_dynamic_offsets; // "read" pointer for dynamic offset data
3339 for (uint32_t input_idx = 0; input_idx < set_count; input_idx++) {
3340 auto set_idx = input_idx + first_set; // set_idx is index within layout, input_idx is index within input descriptor sets
3341 cvdescriptorset::DescriptorSet *descriptor_set =
3342 push_descriptor_set ? push_descriptor_set : GetSetNode(pDescriptorSets[input_idx]);
3343
3344 // Record binding (or push)
3345 if (descriptor_set != last_bound.push_descriptor_set.get()) {
3346 // Only cleanup the push descriptors if they aren't the currently used set.
3347 push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set);
3348 }
3349 last_bound.per_set[set_idx].bound_descriptor_set = descriptor_set;
3350 last_bound.per_set[set_idx].compat_id_for_set = pipe_compat_ids[set_idx]; // compat ids are canonical *per* set index
3351
3352 if (descriptor_set) {
3353 auto set_dynamic_descriptor_count = descriptor_set->GetDynamicDescriptorCount();
3354 // TODO: Add logic for tracking push_descriptor offsets (here or in caller)
3355 if (set_dynamic_descriptor_count && input_dynamic_offsets) {
3356 const uint32_t *end_offset = input_dynamic_offsets + set_dynamic_descriptor_count;
3357 last_bound.per_set[set_idx].dynamicOffsets = std::vector<uint32_t>(input_dynamic_offsets, end_offset);
3358 input_dynamic_offsets = end_offset;
3359 assert(input_dynamic_offsets <= (p_dynamic_offsets + dynamic_offset_count));
3360 } else {
3361 last_bound.per_set[set_idx].dynamicOffsets.clear();
3362 }
3363 if (!descriptor_set->IsPushDescriptor()) {
3364 // Can't cache validation of push_descriptors
3365 cb_state->validated_descriptor_sets.insert(descriptor_set);
3366 }
3367 }
3368 }
3369}
3370
3371// Update the bound state for the bind point, including the effects of incompatible pipeline layouts
3372void ValidationStateTracker::PreCallRecordCmdBindDescriptorSets(VkCommandBuffer commandBuffer,
3373 VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
3374 uint32_t firstSet, uint32_t setCount,
3375 const VkDescriptorSet *pDescriptorSets, uint32_t dynamicOffsetCount,
3376 const uint32_t *pDynamicOffsets) {
3377 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3378 auto pipeline_layout = GetPipelineLayout(layout);
3379
3380 // Resize binding arrays
3381 uint32_t last_set_index = firstSet + setCount - 1;
3382 if (last_set_index >= cb_state->lastBound[pipelineBindPoint].per_set.size()) {
3383 cb_state->lastBound[pipelineBindPoint].per_set.resize(last_set_index + 1);
3384 }
3385
3386 UpdateLastBoundDescriptorSets(cb_state, pipelineBindPoint, pipeline_layout, firstSet, setCount, pDescriptorSets, nullptr,
3387 dynamicOffsetCount, pDynamicOffsets);
3388 cb_state->lastBound[pipelineBindPoint].pipeline_layout = layout;
3389 ResetCommandBufferPushConstantDataIfIncompatible(cb_state, layout);
3390}
3391
3392void ValidationStateTracker::RecordCmdPushDescriptorSetState(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint pipelineBindPoint,
3393 VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount,
3394 const VkWriteDescriptorSet *pDescriptorWrites) {
3395 const auto &pipeline_layout = GetPipelineLayout(layout);
3396 // Short circuit invalid updates
3397 if (!pipeline_layout || (set >= pipeline_layout->set_layouts.size()) || !pipeline_layout->set_layouts[set] ||
3398 !pipeline_layout->set_layouts[set]->IsPushDescriptor())
3399 return;
3400
3401 // We need a descriptor set to update the bindings with, compatible with the passed layout
3402 const auto dsl = pipeline_layout->set_layouts[set];
3403 auto &last_bound = cb_state->lastBound[pipelineBindPoint];
3404 auto &push_descriptor_set = last_bound.push_descriptor_set;
3405 // If we are disturbing the current push_desriptor_set clear it
3406 if (!push_descriptor_set || !CompatForSet(set, last_bound, pipeline_layout->compat_for_set)) {
John Zulaufd2c3dae2019-12-12 11:02:17 -07003407 last_bound.UnbindAndResetPushDescriptorSet(new cvdescriptorset::DescriptorSet(0, nullptr, dsl, 0, this));
locke-lunargd556cc32019-09-17 01:21:23 -06003408 }
3409
3410 UpdateLastBoundDescriptorSets(cb_state, pipelineBindPoint, pipeline_layout, set, 1, nullptr, push_descriptor_set.get(), 0,
3411 nullptr);
3412 last_bound.pipeline_layout = layout;
3413
3414 // Now that we have either the new or extant push_descriptor set ... do the write updates against it
Jeff Bolz41a1ced2019-10-11 11:40:49 -05003415 push_descriptor_set->PerformPushDescriptorsUpdate(this, descriptorWriteCount, pDescriptorWrites);
locke-lunargd556cc32019-09-17 01:21:23 -06003416}
3417
3418void ValidationStateTracker::PreCallRecordCmdPushDescriptorSetKHR(VkCommandBuffer commandBuffer,
3419 VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
3420 uint32_t set, uint32_t descriptorWriteCount,
3421 const VkWriteDescriptorSet *pDescriptorWrites) {
3422 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3423 RecordCmdPushDescriptorSetState(cb_state, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites);
3424}
3425
Tony-LunarG6bb1d0c2019-09-23 10:39:25 -06003426void ValidationStateTracker::PostCallRecordCmdPushConstants(VkCommandBuffer commandBuffer, VkPipelineLayout layout,
3427 VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size,
3428 const void *pValues) {
3429 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3430 if (cb_state != nullptr) {
3431 ResetCommandBufferPushConstantDataIfIncompatible(cb_state, layout);
3432
3433 auto &push_constant_data = cb_state->push_constant_data;
3434 assert((offset + size) <= static_cast<uint32_t>(push_constant_data.size()));
3435 std::memcpy(push_constant_data.data() + offset, pValues, static_cast<std::size_t>(size));
3436 }
3437}
3438
locke-lunargd556cc32019-09-17 01:21:23 -06003439void ValidationStateTracker::PreCallRecordCmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
3440 VkIndexType indexType) {
3441 auto buffer_state = GetBufferState(buffer);
3442 auto cb_state = GetCBState(commandBuffer);
3443
3444 cb_state->status |= CBSTATUS_INDEX_BUFFER_BOUND;
3445 cb_state->index_buffer_binding.buffer = buffer;
3446 cb_state->index_buffer_binding.size = buffer_state->createInfo.size;
3447 cb_state->index_buffer_binding.offset = offset;
3448 cb_state->index_buffer_binding.index_type = indexType;
3449 // Add binding for this index buffer to this commandbuffer
3450 AddCommandBufferBindingBuffer(cb_state, buffer_state);
3451}
3452
3453void ValidationStateTracker::PreCallRecordCmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding,
3454 uint32_t bindingCount, const VkBuffer *pBuffers,
3455 const VkDeviceSize *pOffsets) {
3456 auto cb_state = GetCBState(commandBuffer);
3457
3458 uint32_t end = firstBinding + bindingCount;
3459 if (cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.size() < end) {
3460 cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.resize(end);
3461 }
3462
3463 for (uint32_t i = 0; i < bindingCount; ++i) {
3464 auto &vertex_buffer_binding = cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings[i + firstBinding];
3465 vertex_buffer_binding.buffer = pBuffers[i];
3466 vertex_buffer_binding.offset = pOffsets[i];
3467 // Add binding for this vertex buffer to this commandbuffer
3468 AddCommandBufferBindingBuffer(cb_state, GetBufferState(pBuffers[i]));
3469 }
3470}
3471
3472void ValidationStateTracker::PostCallRecordCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer,
3473 VkDeviceSize dstOffset, VkDeviceSize dataSize, const void *pData) {
3474 auto cb_state = GetCBState(commandBuffer);
3475 auto dst_buffer_state = GetBufferState(dstBuffer);
3476
3477 // Update bindings between buffer and cmd buffer
3478 AddCommandBufferBindingBuffer(cb_state, dst_buffer_state);
3479}
3480
Jeff Bolz310775c2019-10-09 00:46:33 -05003481bool ValidationStateTracker::SetEventStageMask(VkEvent event, VkPipelineStageFlags stageMask,
3482 EventToStageMap *localEventToStageMap) {
3483 (*localEventToStageMap)[event] = stageMask;
locke-lunargd556cc32019-09-17 01:21:23 -06003484 return false;
3485}
3486
3487void ValidationStateTracker::PreCallRecordCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event,
3488 VkPipelineStageFlags stageMask) {
3489 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3490 auto event_state = GetEventState(event);
3491 if (event_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05003492 AddCommandBufferBinding(event_state->cb_bindings, VulkanTypedHandle(event, kVulkanObjectTypeEvent, event_state), cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003493 }
3494 cb_state->events.push_back(event);
3495 if (!cb_state->waitedEvents.count(event)) {
3496 cb_state->writeEventsBeforeWait.push_back(event);
3497 }
Jeff Bolz310775c2019-10-09 00:46:33 -05003498 cb_state->eventUpdates.emplace_back(
3499 [event, stageMask](const ValidationStateTracker *device_data, bool do_validate, EventToStageMap *localEventToStageMap) {
3500 return SetEventStageMask(event, stageMask, localEventToStageMap);
3501 });
locke-lunargd556cc32019-09-17 01:21:23 -06003502}
3503
3504void ValidationStateTracker::PreCallRecordCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event,
3505 VkPipelineStageFlags stageMask) {
3506 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3507 auto event_state = GetEventState(event);
3508 if (event_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05003509 AddCommandBufferBinding(event_state->cb_bindings, VulkanTypedHandle(event, kVulkanObjectTypeEvent, event_state), cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003510 }
3511 cb_state->events.push_back(event);
3512 if (!cb_state->waitedEvents.count(event)) {
3513 cb_state->writeEventsBeforeWait.push_back(event);
3514 }
3515
3516 cb_state->eventUpdates.emplace_back(
Jeff Bolz310775c2019-10-09 00:46:33 -05003517 [event](const ValidationStateTracker *, bool do_validate, EventToStageMap *localEventToStageMap) {
3518 return SetEventStageMask(event, VkPipelineStageFlags(0), localEventToStageMap);
3519 });
locke-lunargd556cc32019-09-17 01:21:23 -06003520}
3521
3522void ValidationStateTracker::PreCallRecordCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents,
3523 VkPipelineStageFlags sourceStageMask, VkPipelineStageFlags dstStageMask,
3524 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
3525 uint32_t bufferMemoryBarrierCount,
3526 const VkBufferMemoryBarrier *pBufferMemoryBarriers,
3527 uint32_t imageMemoryBarrierCount,
3528 const VkImageMemoryBarrier *pImageMemoryBarriers) {
3529 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3530 for (uint32_t i = 0; i < eventCount; ++i) {
3531 auto event_state = GetEventState(pEvents[i]);
3532 if (event_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05003533 AddCommandBufferBinding(event_state->cb_bindings, VulkanTypedHandle(pEvents[i], kVulkanObjectTypeEvent, event_state),
3534 cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003535 }
3536 cb_state->waitedEvents.insert(pEvents[i]);
3537 cb_state->events.push_back(pEvents[i]);
3538 }
3539}
3540
Jeff Bolz310775c2019-10-09 00:46:33 -05003541bool ValidationStateTracker::SetQueryState(QueryObject object, QueryState value, QueryMap *localQueryToStateMap) {
3542 (*localQueryToStateMap)[object] = value;
3543 return false;
3544}
3545
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003546bool ValidationStateTracker::SetQueryStateMulti(VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, uint32_t perfPass,
3547 QueryState value, QueryMap *localQueryToStateMap) {
Jeff Bolz310775c2019-10-09 00:46:33 -05003548 for (uint32_t i = 0; i < queryCount; i++) {
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003549 QueryObject object = QueryObject(QueryObject(queryPool, firstQuery + i), perfPass);
Jeff Bolz310775c2019-10-09 00:46:33 -05003550 (*localQueryToStateMap)[object] = value;
locke-lunargd556cc32019-09-17 01:21:23 -06003551 }
3552 return false;
3553}
3554
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003555QueryState ValidationStateTracker::GetQueryState(const QueryMap *localQueryToStateMap, VkQueryPool queryPool, uint32_t queryIndex,
3556 uint32_t perfPass) const {
3557 QueryObject query = QueryObject(QueryObject(queryPool, queryIndex), perfPass);
locke-lunargd556cc32019-09-17 01:21:23 -06003558
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003559 auto iter = localQueryToStateMap->find(query);
3560 if (iter != localQueryToStateMap->end()) return iter->second;
Jeff Bolz310775c2019-10-09 00:46:33 -05003561
Jeff Bolz310775c2019-10-09 00:46:33 -05003562 return QUERYSTATE_UNKNOWN;
locke-lunargd556cc32019-09-17 01:21:23 -06003563}
3564
3565void ValidationStateTracker::RecordCmdBeginQuery(CMD_BUFFER_STATE *cb_state, const QueryObject &query_obj) {
Jeff Bolz047f3012019-10-09 23:52:23 -05003566 if (disabled.query_validation) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003567 cb_state->activeQueries.insert(query_obj);
3568 cb_state->startedQueries.insert(query_obj);
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003569 cb_state->queryUpdates.emplace_back([query_obj](const ValidationStateTracker *device_data, bool do_validate,
3570 VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass,
3571 QueryMap *localQueryToStateMap) {
3572 SetQueryState(QueryObject(query_obj, perfQueryPass), QUERYSTATE_RUNNING, localQueryToStateMap);
3573 return false;
3574 });
Jeff Bolzadbfa852019-10-04 13:53:30 -05003575 auto pool_state = GetQueryPoolState(query_obj.pool);
3576 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(query_obj.pool, kVulkanObjectTypeQueryPool, pool_state),
3577 cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003578}
3579
3580void ValidationStateTracker::PostCallRecordCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot,
3581 VkFlags flags) {
Jeff Bolz047f3012019-10-09 23:52:23 -05003582 if (disabled.query_validation) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003583 QueryObject query = {queryPool, slot};
3584 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3585 RecordCmdBeginQuery(cb_state, query);
3586}
3587
3588void ValidationStateTracker::RecordCmdEndQuery(CMD_BUFFER_STATE *cb_state, const QueryObject &query_obj) {
Jeff Bolz047f3012019-10-09 23:52:23 -05003589 if (disabled.query_validation) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003590 cb_state->activeQueries.erase(query_obj);
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003591 cb_state->queryUpdates.emplace_back([query_obj](const ValidationStateTracker *device_data, bool do_validate,
3592 VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass,
3593 QueryMap *localQueryToStateMap) {
3594 return SetQueryState(QueryObject(query_obj, perfQueryPass), QUERYSTATE_ENDED, localQueryToStateMap);
3595 });
Jeff Bolzadbfa852019-10-04 13:53:30 -05003596 auto pool_state = GetQueryPoolState(query_obj.pool);
3597 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(query_obj.pool, kVulkanObjectTypeQueryPool, pool_state),
3598 cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003599}
3600
3601void ValidationStateTracker::PostCallRecordCmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot) {
Jeff Bolz047f3012019-10-09 23:52:23 -05003602 if (disabled.query_validation) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003603 QueryObject query_obj = {queryPool, slot};
3604 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3605 RecordCmdEndQuery(cb_state, query_obj);
3606}
3607
3608void ValidationStateTracker::PostCallRecordCmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
3609 uint32_t firstQuery, uint32_t queryCount) {
Jeff Bolz047f3012019-10-09 23:52:23 -05003610 if (disabled.query_validation) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003611 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3612
Lionel Landwerlinb1e5a422020-02-18 16:49:09 +02003613 for (uint32_t slot = firstQuery; slot < (firstQuery + queryCount); slot++) {
3614 QueryObject query = {queryPool, slot};
3615 cb_state->resetQueries.insert(query);
3616 }
3617
Jeff Bolz310775c2019-10-09 00:46:33 -05003618 cb_state->queryUpdates.emplace_back([queryPool, firstQuery, queryCount](const ValidationStateTracker *device_data,
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003619 bool do_validate, VkQueryPool &firstPerfQueryPool,
3620 uint32_t perfQueryPass,
3621 QueryMap *localQueryToStateMap) {
3622 return SetQueryStateMulti(queryPool, firstQuery, queryCount, perfQueryPass, QUERYSTATE_RESET, localQueryToStateMap);
locke-lunargd556cc32019-09-17 01:21:23 -06003623 });
Jeff Bolzadbfa852019-10-04 13:53:30 -05003624 auto pool_state = GetQueryPoolState(queryPool);
3625 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool, pool_state),
locke-lunargd556cc32019-09-17 01:21:23 -06003626 cb_state);
3627}
3628
3629void ValidationStateTracker::PostCallRecordCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
3630 uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer,
3631 VkDeviceSize dstOffset, VkDeviceSize stride,
3632 VkQueryResultFlags flags) {
Jeff Bolz047f3012019-10-09 23:52:23 -05003633 if (disabled.query_validation) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003634 auto cb_state = GetCBState(commandBuffer);
3635 auto dst_buff_state = GetBufferState(dstBuffer);
3636 AddCommandBufferBindingBuffer(cb_state, dst_buff_state);
Jeff Bolzadbfa852019-10-04 13:53:30 -05003637 auto pool_state = GetQueryPoolState(queryPool);
3638 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool, pool_state),
locke-lunargd556cc32019-09-17 01:21:23 -06003639 cb_state);
3640}
3641
3642void ValidationStateTracker::PostCallRecordCmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
3643 VkQueryPool queryPool, uint32_t slot) {
Jeff Bolz047f3012019-10-09 23:52:23 -05003644 if (disabled.query_validation) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003645 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
Jeff Bolzadbfa852019-10-04 13:53:30 -05003646 auto pool_state = GetQueryPoolState(queryPool);
3647 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool, pool_state),
locke-lunargd556cc32019-09-17 01:21:23 -06003648 cb_state);
3649 QueryObject query = {queryPool, slot};
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02003650 cb_state->queryUpdates.emplace_back([query](const ValidationStateTracker *device_data, bool do_validate,
3651 VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass,
3652 QueryMap *localQueryToStateMap) {
3653 return SetQueryState(QueryObject(query, perfQueryPass), QUERYSTATE_ENDED, localQueryToStateMap);
3654 });
locke-lunargd556cc32019-09-17 01:21:23 -06003655}
3656
3657void ValidationStateTracker::PostCallRecordCreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo *pCreateInfo,
3658 const VkAllocationCallbacks *pAllocator, VkFramebuffer *pFramebuffer,
3659 VkResult result) {
3660 if (VK_SUCCESS != result) return;
3661 // Shadow create info and store in map
Jeff Bolz6ae39612019-10-11 20:57:36 -05003662 auto fb_state = std::make_shared<FRAMEBUFFER_STATE>(*pFramebuffer, pCreateInfo, GetRenderPassShared(pCreateInfo->renderPass));
locke-lunargd556cc32019-09-17 01:21:23 -06003663
3664 if ((pCreateInfo->flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) == 0) {
3665 for (uint32_t i = 0; i < pCreateInfo->attachmentCount; ++i) {
3666 VkImageView view = pCreateInfo->pAttachments[i];
3667 auto view_state = GetImageViewState(view);
3668 if (!view_state) {
3669 continue;
3670 }
3671 }
3672 }
3673 frameBufferMap[*pFramebuffer] = std::move(fb_state);
3674}
3675
3676void ValidationStateTracker::RecordRenderPassDAG(RenderPassCreateVersion rp_version, const VkRenderPassCreateInfo2KHR *pCreateInfo,
3677 RENDER_PASS_STATE *render_pass) {
3678 auto &subpass_to_node = render_pass->subpassToNode;
3679 subpass_to_node.resize(pCreateInfo->subpassCount);
3680 auto &self_dependencies = render_pass->self_dependencies;
3681 self_dependencies.resize(pCreateInfo->subpassCount);
3682
3683 for (uint32_t i = 0; i < pCreateInfo->subpassCount; ++i) {
3684 subpass_to_node[i].pass = i;
3685 self_dependencies[i].clear();
3686 }
3687 for (uint32_t i = 0; i < pCreateInfo->dependencyCount; ++i) {
3688 const VkSubpassDependency2KHR &dependency = pCreateInfo->pDependencies[i];
3689 if ((dependency.srcSubpass != VK_SUBPASS_EXTERNAL) && (dependency.dstSubpass != VK_SUBPASS_EXTERNAL)) {
3690 if (dependency.srcSubpass == dependency.dstSubpass) {
3691 self_dependencies[dependency.srcSubpass].push_back(i);
3692 } else {
3693 subpass_to_node[dependency.dstSubpass].prev.push_back(dependency.srcSubpass);
3694 subpass_to_node[dependency.srcSubpass].next.push_back(dependency.dstSubpass);
3695 }
3696 }
3697 }
3698}
3699
3700static void MarkAttachmentFirstUse(RENDER_PASS_STATE *render_pass, uint32_t index, bool is_read) {
3701 if (index == VK_ATTACHMENT_UNUSED) return;
3702
3703 if (!render_pass->attachment_first_read.count(index)) render_pass->attachment_first_read[index] = is_read;
3704}
3705
3706void ValidationStateTracker::RecordCreateRenderPassState(RenderPassCreateVersion rp_version,
3707 std::shared_ptr<RENDER_PASS_STATE> &render_pass,
3708 VkRenderPass *pRenderPass) {
3709 render_pass->renderPass = *pRenderPass;
3710 auto create_info = render_pass->createInfo.ptr();
3711
3712 RecordRenderPassDAG(RENDER_PASS_VERSION_1, create_info, render_pass.get());
3713
3714 for (uint32_t i = 0; i < create_info->subpassCount; ++i) {
3715 const VkSubpassDescription2KHR &subpass = create_info->pSubpasses[i];
3716 for (uint32_t j = 0; j < subpass.colorAttachmentCount; ++j) {
3717 MarkAttachmentFirstUse(render_pass.get(), subpass.pColorAttachments[j].attachment, false);
3718
3719 // resolve attachments are considered to be written
3720 if (subpass.pResolveAttachments) {
3721 MarkAttachmentFirstUse(render_pass.get(), subpass.pResolveAttachments[j].attachment, false);
3722 }
3723 }
3724 if (subpass.pDepthStencilAttachment) {
3725 MarkAttachmentFirstUse(render_pass.get(), subpass.pDepthStencilAttachment->attachment, false);
3726 }
3727 for (uint32_t j = 0; j < subpass.inputAttachmentCount; ++j) {
3728 MarkAttachmentFirstUse(render_pass.get(), subpass.pInputAttachments[j].attachment, true);
3729 }
3730 }
3731
3732 // Even though render_pass is an rvalue-ref parameter, still must move s.t. move assignment is invoked.
3733 renderPassMap[*pRenderPass] = std::move(render_pass);
3734}
3735
3736// Style note:
3737// Use of rvalue reference exceeds reccommended usage of rvalue refs in google style guide, but intentionally forces caller to move
3738// or copy. This is clearer than passing a pointer to shared_ptr and avoids the atomic increment/decrement of shared_ptr copy
3739// construction or assignment.
3740void ValidationStateTracker::PostCallRecordCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo *pCreateInfo,
3741 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
3742 VkResult result) {
3743 if (VK_SUCCESS != result) return;
3744 auto render_pass_state = std::make_shared<RENDER_PASS_STATE>(pCreateInfo);
3745 RecordCreateRenderPassState(RENDER_PASS_VERSION_1, render_pass_state, pRenderPass);
3746}
3747
Tony-LunarG977448c2019-12-02 14:52:02 -07003748void ValidationStateTracker::RecordCreateRenderPass2(VkDevice device, const VkRenderPassCreateInfo2KHR *pCreateInfo,
3749 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
3750 VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06003751 if (VK_SUCCESS != result) return;
3752 auto render_pass_state = std::make_shared<RENDER_PASS_STATE>(pCreateInfo);
3753 RecordCreateRenderPassState(RENDER_PASS_VERSION_2, render_pass_state, pRenderPass);
3754}
3755
Tony-LunarG977448c2019-12-02 14:52:02 -07003756void ValidationStateTracker::PostCallRecordCreateRenderPass2KHR(VkDevice device, const VkRenderPassCreateInfo2KHR *pCreateInfo,
3757 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
3758 VkResult result) {
3759 RecordCreateRenderPass2(device, pCreateInfo, pAllocator, pRenderPass, result);
3760}
3761
3762void ValidationStateTracker::PostCallRecordCreateRenderPass2(VkDevice device, const VkRenderPassCreateInfo2KHR *pCreateInfo,
3763 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
3764 VkResult result) {
3765 RecordCreateRenderPass2(device, pCreateInfo, pAllocator, pRenderPass, result);
3766}
3767
locke-lunargd556cc32019-09-17 01:21:23 -06003768void ValidationStateTracker::RecordCmdBeginRenderPassState(VkCommandBuffer commandBuffer,
3769 const VkRenderPassBeginInfo *pRenderPassBegin,
3770 const VkSubpassContents contents) {
3771 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3772 auto render_pass_state = pRenderPassBegin ? GetRenderPassState(pRenderPassBegin->renderPass) : nullptr;
3773 auto framebuffer = pRenderPassBegin ? GetFramebufferState(pRenderPassBegin->framebuffer) : nullptr;
3774
3775 if (render_pass_state) {
3776 cb_state->activeFramebuffer = pRenderPassBegin->framebuffer;
3777 cb_state->activeRenderPass = render_pass_state;
Tony-LunarG61e7c0c2020-03-03 16:09:11 -07003778 cb_state->activeRenderPassBeginInfo = safe_VkRenderPassBeginInfo(pRenderPassBegin);
locke-lunargd556cc32019-09-17 01:21:23 -06003779 cb_state->activeSubpass = 0;
3780 cb_state->activeSubpassContents = contents;
3781 cb_state->framebuffers.insert(pRenderPassBegin->framebuffer);
3782 // Connect this framebuffer and its children to this cmdBuffer
3783 AddFramebufferBinding(cb_state, framebuffer);
3784 // Connect this RP to cmdBuffer
Jeff Bolzadbfa852019-10-04 13:53:30 -05003785 AddCommandBufferBinding(render_pass_state->cb_bindings,
3786 VulkanTypedHandle(render_pass_state->renderPass, kVulkanObjectTypeRenderPass, render_pass_state),
3787 cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003788
3789 auto chained_device_group_struct = lvl_find_in_chain<VkDeviceGroupRenderPassBeginInfo>(pRenderPassBegin->pNext);
3790 if (chained_device_group_struct) {
3791 cb_state->active_render_pass_device_mask = chained_device_group_struct->deviceMask;
3792 } else {
3793 cb_state->active_render_pass_device_mask = cb_state->initial_device_mask;
3794 }
3795 }
3796}
3797
3798void ValidationStateTracker::PreCallRecordCmdBeginRenderPass(VkCommandBuffer commandBuffer,
3799 const VkRenderPassBeginInfo *pRenderPassBegin,
3800 VkSubpassContents contents) {
3801 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, contents);
3802}
3803
3804void ValidationStateTracker::PreCallRecordCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer,
3805 const VkRenderPassBeginInfo *pRenderPassBegin,
3806 const VkSubpassBeginInfoKHR *pSubpassBeginInfo) {
3807 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, pSubpassBeginInfo->contents);
3808}
3809
Tony-LunarG977448c2019-12-02 14:52:02 -07003810void ValidationStateTracker::PreCallRecordCmdBeginRenderPass2(VkCommandBuffer commandBuffer,
3811 const VkRenderPassBeginInfo *pRenderPassBegin,
3812 const VkSubpassBeginInfoKHR *pSubpassBeginInfo) {
3813 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, pSubpassBeginInfo->contents);
3814}
3815
locke-lunargd556cc32019-09-17 01:21:23 -06003816void ValidationStateTracker::RecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
3817 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3818 cb_state->activeSubpass++;
3819 cb_state->activeSubpassContents = contents;
3820}
3821
3822void ValidationStateTracker::PostCallRecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
3823 RecordCmdNextSubpass(commandBuffer, contents);
3824}
3825
3826void ValidationStateTracker::PostCallRecordCmdNextSubpass2KHR(VkCommandBuffer commandBuffer,
3827 const VkSubpassBeginInfoKHR *pSubpassBeginInfo,
3828 const VkSubpassEndInfoKHR *pSubpassEndInfo) {
3829 RecordCmdNextSubpass(commandBuffer, pSubpassBeginInfo->contents);
3830}
3831
Tony-LunarG977448c2019-12-02 14:52:02 -07003832void ValidationStateTracker::PostCallRecordCmdNextSubpass2(VkCommandBuffer commandBuffer,
3833 const VkSubpassBeginInfoKHR *pSubpassBeginInfo,
3834 const VkSubpassEndInfoKHR *pSubpassEndInfo) {
3835 RecordCmdNextSubpass(commandBuffer, pSubpassBeginInfo->contents);
3836}
3837
locke-lunargd556cc32019-09-17 01:21:23 -06003838void ValidationStateTracker::RecordCmdEndRenderPassState(VkCommandBuffer commandBuffer) {
3839 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3840 cb_state->activeRenderPass = nullptr;
3841 cb_state->activeSubpass = 0;
3842 cb_state->activeFramebuffer = VK_NULL_HANDLE;
3843}
3844
3845void ValidationStateTracker::PostCallRecordCmdEndRenderPass(VkCommandBuffer commandBuffer) {
3846 RecordCmdEndRenderPassState(commandBuffer);
3847}
3848
3849void ValidationStateTracker::PostCallRecordCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer,
3850 const VkSubpassEndInfoKHR *pSubpassEndInfo) {
3851 RecordCmdEndRenderPassState(commandBuffer);
3852}
3853
Tony-LunarG977448c2019-12-02 14:52:02 -07003854void ValidationStateTracker::PostCallRecordCmdEndRenderPass2(VkCommandBuffer commandBuffer,
3855 const VkSubpassEndInfoKHR *pSubpassEndInfo) {
3856 RecordCmdEndRenderPassState(commandBuffer);
3857}
locke-lunargd556cc32019-09-17 01:21:23 -06003858void ValidationStateTracker::PreCallRecordCmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBuffersCount,
3859 const VkCommandBuffer *pCommandBuffers) {
3860 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3861
3862 CMD_BUFFER_STATE *sub_cb_state = NULL;
3863 for (uint32_t i = 0; i < commandBuffersCount; i++) {
3864 sub_cb_state = GetCBState(pCommandBuffers[i]);
3865 assert(sub_cb_state);
3866 if (!(sub_cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT)) {
3867 if (cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT) {
3868 // TODO: Because this is a state change, clearing the VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT needs to be moved
3869 // from the validation step to the recording step
3870 cb_state->beginInfo.flags &= ~VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT;
3871 }
3872 }
3873
3874 // Propagate inital layout and current layout state to the primary cmd buffer
3875 // NOTE: The update/population of the image_layout_map is done in CoreChecks, but for other classes derived from
3876 // ValidationStateTracker these maps will be empty, so leaving the propagation in the the state tracker should be a no-op
3877 // for those other classes.
3878 for (const auto &sub_layout_map_entry : sub_cb_state->image_layout_map) {
3879 const auto image = sub_layout_map_entry.first;
3880 const auto *image_state = GetImageState(image);
3881 if (!image_state) continue; // Can't set layouts of a dead image
3882
3883 auto *cb_subres_map = GetImageSubresourceLayoutMap(cb_state, *image_state);
3884 const auto *sub_cb_subres_map = sub_layout_map_entry.second.get();
3885 assert(cb_subres_map && sub_cb_subres_map); // Non const get and map traversal should never be null
3886 cb_subres_map->UpdateFrom(*sub_cb_subres_map);
3887 }
3888
3889 sub_cb_state->primaryCommandBuffer = cb_state->commandBuffer;
3890 cb_state->linkedCommandBuffers.insert(sub_cb_state);
3891 sub_cb_state->linkedCommandBuffers.insert(cb_state);
3892 for (auto &function : sub_cb_state->queryUpdates) {
3893 cb_state->queryUpdates.push_back(function);
3894 }
3895 for (auto &function : sub_cb_state->queue_submit_functions) {
3896 cb_state->queue_submit_functions.push_back(function);
3897 }
3898 }
3899}
3900
3901void ValidationStateTracker::PostCallRecordMapMemory(VkDevice device, VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size,
3902 VkFlags flags, void **ppData, VkResult result) {
3903 if (VK_SUCCESS != result) return;
3904 RecordMappedMemory(mem, offset, size, ppData);
3905}
3906
3907void ValidationStateTracker::PreCallRecordUnmapMemory(VkDevice device, VkDeviceMemory mem) {
3908 auto mem_info = GetDevMemState(mem);
3909 if (mem_info) {
3910 mem_info->mapped_range = MemRange();
3911 mem_info->p_driver_data = nullptr;
3912 }
3913}
3914
3915void ValidationStateTracker::UpdateBindImageMemoryState(const VkBindImageMemoryInfo &bindInfo) {
3916 IMAGE_STATE *image_state = GetImageState(bindInfo.image);
3917 if (image_state) {
3918 const auto swapchain_info = lvl_find_in_chain<VkBindImageMemorySwapchainInfoKHR>(bindInfo.pNext);
3919 if (swapchain_info) {
3920 auto swapchain = GetSwapchainState(swapchain_info->swapchain);
3921 if (swapchain) {
locke-lunargb3584732019-10-28 20:18:36 -06003922 swapchain->images[swapchain_info->imageIndex].bound_images.emplace(image_state->image);
locke-lunargd556cc32019-09-17 01:21:23 -06003923 image_state->bind_swapchain = swapchain_info->swapchain;
3924 image_state->bind_swapchain_imageIndex = swapchain_info->imageIndex;
3925 }
3926 } else {
3927 // Track bound memory range information
3928 auto mem_info = GetDevMemState(bindInfo.memory);
3929 if (mem_info) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07003930 InsertImageMemoryRange(bindInfo.image, mem_info, bindInfo.memoryOffset);
locke-lunargd556cc32019-09-17 01:21:23 -06003931 }
3932
3933 // Track objects tied to memory
3934 SetMemBinding(bindInfo.memory, image_state, bindInfo.memoryOffset,
3935 VulkanTypedHandle(bindInfo.image, kVulkanObjectTypeImage));
3936 }
Tony-LunarG330cf4c2020-03-04 16:29:03 -07003937 if ((image_state->createInfo.flags & VK_IMAGE_CREATE_ALIAS_BIT) || swapchain_info) {
locke-lunargd556cc32019-09-17 01:21:23 -06003938 AddAliasingImage(image_state);
3939 }
3940 }
3941}
3942
3943void ValidationStateTracker::PostCallRecordBindImageMemory(VkDevice device, VkImage image, VkDeviceMemory mem,
3944 VkDeviceSize memoryOffset, VkResult result) {
3945 if (VK_SUCCESS != result) return;
3946 VkBindImageMemoryInfo bindInfo = {};
3947 bindInfo.sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
3948 bindInfo.image = image;
3949 bindInfo.memory = mem;
3950 bindInfo.memoryOffset = memoryOffset;
3951 UpdateBindImageMemoryState(bindInfo);
3952}
3953
3954void ValidationStateTracker::PostCallRecordBindImageMemory2(VkDevice device, uint32_t bindInfoCount,
3955 const VkBindImageMemoryInfoKHR *pBindInfos, VkResult result) {
3956 if (VK_SUCCESS != result) return;
3957 for (uint32_t i = 0; i < bindInfoCount; i++) {
3958 UpdateBindImageMemoryState(pBindInfos[i]);
3959 }
3960}
3961
3962void ValidationStateTracker::PostCallRecordBindImageMemory2KHR(VkDevice device, uint32_t bindInfoCount,
3963 const VkBindImageMemoryInfoKHR *pBindInfos, VkResult result) {
3964 if (VK_SUCCESS != result) return;
3965 for (uint32_t i = 0; i < bindInfoCount; i++) {
3966 UpdateBindImageMemoryState(pBindInfos[i]);
3967 }
3968}
3969
3970void ValidationStateTracker::PreCallRecordSetEvent(VkDevice device, VkEvent event) {
3971 auto event_state = GetEventState(event);
3972 if (event_state) {
3973 event_state->stageMask = VK_PIPELINE_STAGE_HOST_BIT;
3974 }
locke-lunargd556cc32019-09-17 01:21:23 -06003975}
3976
3977void ValidationStateTracker::PostCallRecordImportSemaphoreFdKHR(VkDevice device,
3978 const VkImportSemaphoreFdInfoKHR *pImportSemaphoreFdInfo,
3979 VkResult result) {
3980 if (VK_SUCCESS != result) return;
3981 RecordImportSemaphoreState(pImportSemaphoreFdInfo->semaphore, pImportSemaphoreFdInfo->handleType,
3982 pImportSemaphoreFdInfo->flags);
3983}
3984
3985void ValidationStateTracker::RecordGetExternalSemaphoreState(VkSemaphore semaphore,
3986 VkExternalSemaphoreHandleTypeFlagBitsKHR handle_type) {
3987 SEMAPHORE_STATE *semaphore_state = GetSemaphoreState(semaphore);
3988 if (semaphore_state && handle_type != VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR) {
3989 // Cannot track semaphore state once it is exported, except for Sync FD handle types which have copy transference
3990 semaphore_state->scope = kSyncScopeExternalPermanent;
3991 }
3992}
3993
3994#ifdef VK_USE_PLATFORM_WIN32_KHR
3995void ValidationStateTracker::PostCallRecordImportSemaphoreWin32HandleKHR(
3996 VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR *pImportSemaphoreWin32HandleInfo, VkResult result) {
3997 if (VK_SUCCESS != result) return;
3998 RecordImportSemaphoreState(pImportSemaphoreWin32HandleInfo->semaphore, pImportSemaphoreWin32HandleInfo->handleType,
3999 pImportSemaphoreWin32HandleInfo->flags);
4000}
4001
4002void ValidationStateTracker::PostCallRecordGetSemaphoreWin32HandleKHR(VkDevice device,
4003 const VkSemaphoreGetWin32HandleInfoKHR *pGetWin32HandleInfo,
4004 HANDLE *pHandle, VkResult result) {
4005 if (VK_SUCCESS != result) return;
4006 RecordGetExternalSemaphoreState(pGetWin32HandleInfo->semaphore, pGetWin32HandleInfo->handleType);
4007}
4008
4009void ValidationStateTracker::PostCallRecordImportFenceWin32HandleKHR(
4010 VkDevice device, const VkImportFenceWin32HandleInfoKHR *pImportFenceWin32HandleInfo, VkResult result) {
4011 if (VK_SUCCESS != result) return;
4012 RecordImportFenceState(pImportFenceWin32HandleInfo->fence, pImportFenceWin32HandleInfo->handleType,
4013 pImportFenceWin32HandleInfo->flags);
4014}
4015
4016void ValidationStateTracker::PostCallRecordGetFenceWin32HandleKHR(VkDevice device,
4017 const VkFenceGetWin32HandleInfoKHR *pGetWin32HandleInfo,
4018 HANDLE *pHandle, VkResult result) {
4019 if (VK_SUCCESS != result) return;
4020 RecordGetExternalFenceState(pGetWin32HandleInfo->fence, pGetWin32HandleInfo->handleType);
4021}
4022#endif
4023
4024void ValidationStateTracker::PostCallRecordGetSemaphoreFdKHR(VkDevice device, const VkSemaphoreGetFdInfoKHR *pGetFdInfo, int *pFd,
4025 VkResult result) {
4026 if (VK_SUCCESS != result) return;
4027 RecordGetExternalSemaphoreState(pGetFdInfo->semaphore, pGetFdInfo->handleType);
4028}
4029
4030void ValidationStateTracker::RecordImportFenceState(VkFence fence, VkExternalFenceHandleTypeFlagBitsKHR handle_type,
4031 VkFenceImportFlagsKHR flags) {
4032 FENCE_STATE *fence_node = GetFenceState(fence);
4033 if (fence_node && fence_node->scope != kSyncScopeExternalPermanent) {
4034 if ((handle_type == VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR || flags & VK_FENCE_IMPORT_TEMPORARY_BIT_KHR) &&
4035 fence_node->scope == kSyncScopeInternal) {
4036 fence_node->scope = kSyncScopeExternalTemporary;
4037 } else {
4038 fence_node->scope = kSyncScopeExternalPermanent;
4039 }
4040 }
4041}
4042
4043void ValidationStateTracker::PostCallRecordImportFenceFdKHR(VkDevice device, const VkImportFenceFdInfoKHR *pImportFenceFdInfo,
4044 VkResult result) {
4045 if (VK_SUCCESS != result) return;
4046 RecordImportFenceState(pImportFenceFdInfo->fence, pImportFenceFdInfo->handleType, pImportFenceFdInfo->flags);
4047}
4048
4049void ValidationStateTracker::RecordGetExternalFenceState(VkFence fence, VkExternalFenceHandleTypeFlagBitsKHR handle_type) {
4050 FENCE_STATE *fence_state = GetFenceState(fence);
4051 if (fence_state) {
4052 if (handle_type != VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR) {
4053 // Export with reference transference becomes external
4054 fence_state->scope = kSyncScopeExternalPermanent;
4055 } else if (fence_state->scope == kSyncScopeInternal) {
4056 // Export with copy transference has a side effect of resetting the fence
4057 fence_state->state = FENCE_UNSIGNALED;
4058 }
4059 }
4060}
4061
4062void ValidationStateTracker::PostCallRecordGetFenceFdKHR(VkDevice device, const VkFenceGetFdInfoKHR *pGetFdInfo, int *pFd,
4063 VkResult result) {
4064 if (VK_SUCCESS != result) return;
4065 RecordGetExternalFenceState(pGetFdInfo->fence, pGetFdInfo->handleType);
4066}
4067
4068void ValidationStateTracker::PostCallRecordCreateEvent(VkDevice device, const VkEventCreateInfo *pCreateInfo,
4069 const VkAllocationCallbacks *pAllocator, VkEvent *pEvent, VkResult result) {
4070 if (VK_SUCCESS != result) return;
4071 eventMap[*pEvent].write_in_use = 0;
4072 eventMap[*pEvent].stageMask = VkPipelineStageFlags(0);
4073}
4074
4075void ValidationStateTracker::RecordCreateSwapchainState(VkResult result, const VkSwapchainCreateInfoKHR *pCreateInfo,
4076 VkSwapchainKHR *pSwapchain, SURFACE_STATE *surface_state,
4077 SWAPCHAIN_NODE *old_swapchain_state) {
4078 if (VK_SUCCESS == result) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004079 auto swapchain_state = std::make_shared<SWAPCHAIN_NODE>(pCreateInfo, *pSwapchain);
locke-lunargd556cc32019-09-17 01:21:23 -06004080 if (VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR == pCreateInfo->presentMode ||
4081 VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR == pCreateInfo->presentMode) {
4082 swapchain_state->shared_presentable = true;
4083 }
4084 surface_state->swapchain = swapchain_state.get();
4085 swapchainMap[*pSwapchain] = std::move(swapchain_state);
4086 } else {
4087 surface_state->swapchain = nullptr;
4088 }
4089 // Spec requires that even if CreateSwapchainKHR fails, oldSwapchain is retired
4090 if (old_swapchain_state) {
4091 old_swapchain_state->retired = true;
4092 }
4093 return;
4094}
4095
4096void ValidationStateTracker::PostCallRecordCreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR *pCreateInfo,
4097 const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchain,
4098 VkResult result) {
4099 auto surface_state = GetSurfaceState(pCreateInfo->surface);
4100 auto old_swapchain_state = GetSwapchainState(pCreateInfo->oldSwapchain);
4101 RecordCreateSwapchainState(result, pCreateInfo, pSwapchain, surface_state, old_swapchain_state);
4102}
4103
4104void ValidationStateTracker::PreCallRecordDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain,
4105 const VkAllocationCallbacks *pAllocator) {
4106 if (!swapchain) return;
4107 auto swapchain_data = GetSwapchainState(swapchain);
4108 if (swapchain_data) {
4109 for (const auto &swapchain_image : swapchain_data->images) {
locke-lunargb3584732019-10-28 20:18:36 -06004110 ClearMemoryObjectBindings(VulkanTypedHandle(swapchain_image.image, kVulkanObjectTypeImage));
4111 imageMap.erase(swapchain_image.image);
4112 RemoveAliasingImages(swapchain_image.bound_images);
locke-lunargd556cc32019-09-17 01:21:23 -06004113 }
4114
4115 auto surface_state = GetSurfaceState(swapchain_data->createInfo.surface);
4116 if (surface_state) {
4117 if (surface_state->swapchain == swapchain_data) surface_state->swapchain = nullptr;
4118 }
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004119 swapchain_data->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004120 swapchainMap.erase(swapchain);
4121 }
4122}
4123
4124void ValidationStateTracker::PostCallRecordQueuePresentKHR(VkQueue queue, const VkPresentInfoKHR *pPresentInfo, VkResult result) {
4125 // Semaphore waits occur before error generation, if the call reached the ICD. (Confirm?)
4126 for (uint32_t i = 0; i < pPresentInfo->waitSemaphoreCount; ++i) {
4127 auto pSemaphore = GetSemaphoreState(pPresentInfo->pWaitSemaphores[i]);
4128 if (pSemaphore) {
4129 pSemaphore->signaler.first = VK_NULL_HANDLE;
4130 pSemaphore->signaled = false;
4131 }
4132 }
4133
4134 for (uint32_t i = 0; i < pPresentInfo->swapchainCount; ++i) {
4135 // Note: this is imperfect, in that we can get confused about what did or didn't succeed-- but if the app does that, it's
4136 // confused itself just as much.
4137 auto local_result = pPresentInfo->pResults ? pPresentInfo->pResults[i] : result;
4138 if (local_result != VK_SUCCESS && local_result != VK_SUBOPTIMAL_KHR) continue; // this present didn't actually happen.
4139 // Mark the image as having been released to the WSI
4140 auto swapchain_data = GetSwapchainState(pPresentInfo->pSwapchains[i]);
4141 if (swapchain_data && (swapchain_data->images.size() > pPresentInfo->pImageIndices[i])) {
locke-lunargb3584732019-10-28 20:18:36 -06004142 auto image = swapchain_data->images[pPresentInfo->pImageIndices[i]].image;
locke-lunargd556cc32019-09-17 01:21:23 -06004143 auto image_state = GetImageState(image);
4144 if (image_state) {
4145 image_state->acquired = false;
Jeff Bolz46c0ea02019-10-09 13:06:29 -05004146 if (image_state->shared_presentable) {
4147 image_state->layout_locked = true;
4148 }
locke-lunargd556cc32019-09-17 01:21:23 -06004149 }
4150 }
4151 }
4152 // Note: even though presentation is directed to a queue, there is no direct ordering between QP and subsequent work, so QP (and
4153 // its semaphore waits) /never/ participate in any completion proof.
4154}
4155
4156void ValidationStateTracker::PostCallRecordCreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount,
4157 const VkSwapchainCreateInfoKHR *pCreateInfos,
4158 const VkAllocationCallbacks *pAllocator,
4159 VkSwapchainKHR *pSwapchains, VkResult result) {
4160 if (pCreateInfos) {
4161 for (uint32_t i = 0; i < swapchainCount; i++) {
4162 auto surface_state = GetSurfaceState(pCreateInfos[i].surface);
4163 auto old_swapchain_state = GetSwapchainState(pCreateInfos[i].oldSwapchain);
4164 RecordCreateSwapchainState(result, &pCreateInfos[i], &pSwapchains[i], surface_state, old_swapchain_state);
4165 }
4166 }
4167}
4168
4169void ValidationStateTracker::RecordAcquireNextImageState(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
4170 VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex) {
4171 auto pFence = GetFenceState(fence);
4172 if (pFence && pFence->scope == kSyncScopeInternal) {
4173 // Treat as inflight since it is valid to wait on this fence, even in cases where it is technically a temporary
4174 // import
4175 pFence->state = FENCE_INFLIGHT;
4176 pFence->signaler.first = VK_NULL_HANDLE; // ANI isn't on a queue, so this can't participate in a completion proof.
4177 }
4178
4179 auto pSemaphore = GetSemaphoreState(semaphore);
4180 if (pSemaphore && pSemaphore->scope == kSyncScopeInternal) {
4181 // Treat as signaled since it is valid to wait on this semaphore, even in cases where it is technically a
4182 // temporary import
4183 pSemaphore->signaled = true;
4184 pSemaphore->signaler.first = VK_NULL_HANDLE;
4185 }
4186
4187 // Mark the image as acquired.
4188 auto swapchain_data = GetSwapchainState(swapchain);
4189 if (swapchain_data && (swapchain_data->images.size() > *pImageIndex)) {
locke-lunargb3584732019-10-28 20:18:36 -06004190 auto image = swapchain_data->images[*pImageIndex].image;
locke-lunargd556cc32019-09-17 01:21:23 -06004191 auto image_state = GetImageState(image);
4192 if (image_state) {
4193 image_state->acquired = true;
4194 image_state->shared_presentable = swapchain_data->shared_presentable;
4195 }
4196 }
4197}
4198
4199void ValidationStateTracker::PostCallRecordAcquireNextImageKHR(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
4200 VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex,
4201 VkResult result) {
4202 if ((VK_SUCCESS != result) && (VK_SUBOPTIMAL_KHR != result)) return;
4203 RecordAcquireNextImageState(device, swapchain, timeout, semaphore, fence, pImageIndex);
4204}
4205
4206void ValidationStateTracker::PostCallRecordAcquireNextImage2KHR(VkDevice device, const VkAcquireNextImageInfoKHR *pAcquireInfo,
4207 uint32_t *pImageIndex, VkResult result) {
4208 if ((VK_SUCCESS != result) && (VK_SUBOPTIMAL_KHR != result)) return;
4209 RecordAcquireNextImageState(device, pAcquireInfo->swapchain, pAcquireInfo->timeout, pAcquireInfo->semaphore,
4210 pAcquireInfo->fence, pImageIndex);
4211}
4212
4213void ValidationStateTracker::PostCallRecordEnumeratePhysicalDevices(VkInstance instance, uint32_t *pPhysicalDeviceCount,
4214 VkPhysicalDevice *pPhysicalDevices, VkResult result) {
4215 if ((NULL != pPhysicalDevices) && ((result == VK_SUCCESS || result == VK_INCOMPLETE))) {
4216 for (uint32_t i = 0; i < *pPhysicalDeviceCount; i++) {
4217 auto &phys_device_state = physical_device_map[pPhysicalDevices[i]];
4218 phys_device_state.phys_device = pPhysicalDevices[i];
4219 // Init actual features for each physical device
4220 DispatchGetPhysicalDeviceFeatures(pPhysicalDevices[i], &phys_device_state.features2.features);
4221 }
4222 }
4223}
4224
4225// Common function to update state for GetPhysicalDeviceQueueFamilyProperties & 2KHR version
4226static void StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(PHYSICAL_DEVICE_STATE *pd_state, uint32_t count,
4227 VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
4228 pd_state->queue_family_known_count = std::max(pd_state->queue_family_known_count, count);
4229
4230 if (!pQueueFamilyProperties) {
4231 if (UNCALLED == pd_state->vkGetPhysicalDeviceQueueFamilyPropertiesState)
4232 pd_state->vkGetPhysicalDeviceQueueFamilyPropertiesState = QUERY_COUNT;
4233 } else { // Save queue family properties
4234 pd_state->vkGetPhysicalDeviceQueueFamilyPropertiesState = QUERY_DETAILS;
4235
4236 pd_state->queue_family_properties.resize(std::max(static_cast<uint32_t>(pd_state->queue_family_properties.size()), count));
4237 for (uint32_t i = 0; i < count; ++i) {
4238 pd_state->queue_family_properties[i] = pQueueFamilyProperties[i].queueFamilyProperties;
4239 }
4240 }
4241}
4242
4243void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice,
4244 uint32_t *pQueueFamilyPropertyCount,
4245 VkQueueFamilyProperties *pQueueFamilyProperties) {
4246 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4247 assert(physical_device_state);
4248 VkQueueFamilyProperties2KHR *pqfp = nullptr;
4249 std::vector<VkQueueFamilyProperties2KHR> qfp;
4250 qfp.resize(*pQueueFamilyPropertyCount);
4251 if (pQueueFamilyProperties) {
4252 for (uint32_t i = 0; i < *pQueueFamilyPropertyCount; ++i) {
4253 qfp[i].sType = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2_KHR;
4254 qfp[i].pNext = nullptr;
4255 qfp[i].queueFamilyProperties = pQueueFamilyProperties[i];
4256 }
4257 pqfp = qfp.data();
4258 }
4259 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount, pqfp);
4260}
4261
4262void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties2(
4263 VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
4264 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4265 assert(physical_device_state);
4266 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount,
4267 pQueueFamilyProperties);
4268}
4269
4270void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties2KHR(
4271 VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
4272 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4273 assert(physical_device_state);
4274 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount,
4275 pQueueFamilyProperties);
4276}
4277void ValidationStateTracker::PreCallRecordDestroySurfaceKHR(VkInstance instance, VkSurfaceKHR surface,
4278 const VkAllocationCallbacks *pAllocator) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004279 if (!surface) return;
4280 auto surface_state = GetSurfaceState(surface);
4281 surface_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004282 surface_map.erase(surface);
4283}
4284
4285void ValidationStateTracker::RecordVulkanSurface(VkSurfaceKHR *pSurface) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004286 surface_map[*pSurface] = std::make_shared<SURFACE_STATE>(*pSurface);
locke-lunargd556cc32019-09-17 01:21:23 -06004287}
4288
4289void ValidationStateTracker::PostCallRecordCreateDisplayPlaneSurfaceKHR(VkInstance instance,
4290 const VkDisplaySurfaceCreateInfoKHR *pCreateInfo,
4291 const VkAllocationCallbacks *pAllocator,
4292 VkSurfaceKHR *pSurface, VkResult result) {
4293 if (VK_SUCCESS != result) return;
4294 RecordVulkanSurface(pSurface);
4295}
4296
4297#ifdef VK_USE_PLATFORM_ANDROID_KHR
4298void ValidationStateTracker::PostCallRecordCreateAndroidSurfaceKHR(VkInstance instance,
4299 const VkAndroidSurfaceCreateInfoKHR *pCreateInfo,
4300 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4301 VkResult result) {
4302 if (VK_SUCCESS != result) return;
4303 RecordVulkanSurface(pSurface);
4304}
4305#endif // VK_USE_PLATFORM_ANDROID_KHR
4306
4307#ifdef VK_USE_PLATFORM_IOS_MVK
4308void ValidationStateTracker::PostCallRecordCreateIOSSurfaceMVK(VkInstance instance, const VkIOSSurfaceCreateInfoMVK *pCreateInfo,
4309 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4310 VkResult result) {
4311 if (VK_SUCCESS != result) return;
4312 RecordVulkanSurface(pSurface);
4313}
4314#endif // VK_USE_PLATFORM_IOS_MVK
4315
4316#ifdef VK_USE_PLATFORM_MACOS_MVK
4317void ValidationStateTracker::PostCallRecordCreateMacOSSurfaceMVK(VkInstance instance,
4318 const VkMacOSSurfaceCreateInfoMVK *pCreateInfo,
4319 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4320 VkResult result) {
4321 if (VK_SUCCESS != result) return;
4322 RecordVulkanSurface(pSurface);
4323}
4324#endif // VK_USE_PLATFORM_MACOS_MVK
4325
Jeremy Kniagerf33a67c2019-12-09 09:44:39 -07004326#ifdef VK_USE_PLATFORM_METAL_EXT
4327void ValidationStateTracker::PostCallRecordCreateMetalSurfaceEXT(VkInstance instance,
4328 const VkMetalSurfaceCreateInfoEXT *pCreateInfo,
4329 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4330 VkResult result) {
4331 if (VK_SUCCESS != result) return;
4332 RecordVulkanSurface(pSurface);
4333}
4334#endif // VK_USE_PLATFORM_METAL_EXT
4335
locke-lunargd556cc32019-09-17 01:21:23 -06004336#ifdef VK_USE_PLATFORM_WAYLAND_KHR
4337void ValidationStateTracker::PostCallRecordCreateWaylandSurfaceKHR(VkInstance instance,
4338 const VkWaylandSurfaceCreateInfoKHR *pCreateInfo,
4339 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4340 VkResult result) {
4341 if (VK_SUCCESS != result) return;
4342 RecordVulkanSurface(pSurface);
4343}
4344#endif // VK_USE_PLATFORM_WAYLAND_KHR
4345
4346#ifdef VK_USE_PLATFORM_WIN32_KHR
4347void ValidationStateTracker::PostCallRecordCreateWin32SurfaceKHR(VkInstance instance,
4348 const VkWin32SurfaceCreateInfoKHR *pCreateInfo,
4349 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4350 VkResult result) {
4351 if (VK_SUCCESS != result) return;
4352 RecordVulkanSurface(pSurface);
4353}
4354#endif // VK_USE_PLATFORM_WIN32_KHR
4355
4356#ifdef VK_USE_PLATFORM_XCB_KHR
4357void ValidationStateTracker::PostCallRecordCreateXcbSurfaceKHR(VkInstance instance, const VkXcbSurfaceCreateInfoKHR *pCreateInfo,
4358 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4359 VkResult result) {
4360 if (VK_SUCCESS != result) return;
4361 RecordVulkanSurface(pSurface);
4362}
4363#endif // VK_USE_PLATFORM_XCB_KHR
4364
4365#ifdef VK_USE_PLATFORM_XLIB_KHR
4366void ValidationStateTracker::PostCallRecordCreateXlibSurfaceKHR(VkInstance instance, const VkXlibSurfaceCreateInfoKHR *pCreateInfo,
4367 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4368 VkResult result) {
4369 if (VK_SUCCESS != result) return;
4370 RecordVulkanSurface(pSurface);
4371}
4372#endif // VK_USE_PLATFORM_XLIB_KHR
4373
Cort23cf2282019-09-20 18:58:18 +02004374void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02004375 VkPhysicalDeviceFeatures *pFeatures) {
4376 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4377 physical_device_state->vkGetPhysicalDeviceFeaturesState = QUERY_DETAILS;
4378 physical_device_state->features2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
4379 physical_device_state->features2.pNext = nullptr;
4380 physical_device_state->features2.features = *pFeatures;
Cort23cf2282019-09-20 18:58:18 +02004381}
4382
4383void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures2(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02004384 VkPhysicalDeviceFeatures2 *pFeatures) {
4385 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4386 physical_device_state->vkGetPhysicalDeviceFeaturesState = QUERY_DETAILS;
4387 physical_device_state->features2.initialize(pFeatures);
Cort23cf2282019-09-20 18:58:18 +02004388}
4389
4390void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures2KHR(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02004391 VkPhysicalDeviceFeatures2 *pFeatures) {
4392 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4393 physical_device_state->vkGetPhysicalDeviceFeaturesState = QUERY_DETAILS;
4394 physical_device_state->features2.initialize(pFeatures);
Cort23cf2282019-09-20 18:58:18 +02004395}
4396
locke-lunargd556cc32019-09-17 01:21:23 -06004397void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice physicalDevice,
4398 VkSurfaceKHR surface,
4399 VkSurfaceCapabilitiesKHR *pSurfaceCapabilities,
4400 VkResult result) {
4401 if (VK_SUCCESS != result) return;
4402 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4403 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHRState = QUERY_DETAILS;
Camden Stocker61050592019-11-27 12:03:09 -08004404 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004405 physical_device_state->surfaceCapabilities = *pSurfaceCapabilities;
4406}
4407
4408void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilities2KHR(
4409 VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
4410 VkSurfaceCapabilities2KHR *pSurfaceCapabilities, VkResult result) {
4411 if (VK_SUCCESS != result) return;
4412 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4413 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHRState = QUERY_DETAILS;
Camden Stocker61050592019-11-27 12:03:09 -08004414 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004415 physical_device_state->surfaceCapabilities = pSurfaceCapabilities->surfaceCapabilities;
4416}
4417
4418void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilities2EXT(VkPhysicalDevice physicalDevice,
4419 VkSurfaceKHR surface,
4420 VkSurfaceCapabilities2EXT *pSurfaceCapabilities,
4421 VkResult result) {
4422 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4423 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHRState = QUERY_DETAILS;
Camden Stocker61050592019-11-27 12:03:09 -08004424 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004425 physical_device_state->surfaceCapabilities.minImageCount = pSurfaceCapabilities->minImageCount;
4426 physical_device_state->surfaceCapabilities.maxImageCount = pSurfaceCapabilities->maxImageCount;
4427 physical_device_state->surfaceCapabilities.currentExtent = pSurfaceCapabilities->currentExtent;
4428 physical_device_state->surfaceCapabilities.minImageExtent = pSurfaceCapabilities->minImageExtent;
4429 physical_device_state->surfaceCapabilities.maxImageExtent = pSurfaceCapabilities->maxImageExtent;
4430 physical_device_state->surfaceCapabilities.maxImageArrayLayers = pSurfaceCapabilities->maxImageArrayLayers;
4431 physical_device_state->surfaceCapabilities.supportedTransforms = pSurfaceCapabilities->supportedTransforms;
4432 physical_device_state->surfaceCapabilities.currentTransform = pSurfaceCapabilities->currentTransform;
4433 physical_device_state->surfaceCapabilities.supportedCompositeAlpha = pSurfaceCapabilities->supportedCompositeAlpha;
4434 physical_device_state->surfaceCapabilities.supportedUsageFlags = pSurfaceCapabilities->supportedUsageFlags;
4435}
4436
4437void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice,
4438 uint32_t queueFamilyIndex, VkSurfaceKHR surface,
4439 VkBool32 *pSupported, VkResult result) {
4440 if (VK_SUCCESS != result) return;
4441 auto surface_state = GetSurfaceState(surface);
4442 surface_state->gpu_queue_support[{physicalDevice, queueFamilyIndex}] = (*pSupported == VK_TRUE);
4443}
4444
4445void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice physicalDevice,
4446 VkSurfaceKHR surface,
4447 uint32_t *pPresentModeCount,
4448 VkPresentModeKHR *pPresentModes,
4449 VkResult result) {
4450 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4451
4452 // TODO: This isn't quite right -- available modes may differ by surface AND physical device.
4453 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4454 auto &call_state = physical_device_state->vkGetPhysicalDeviceSurfacePresentModesKHRState;
4455
4456 if (*pPresentModeCount) {
4457 if (call_state < QUERY_COUNT) call_state = QUERY_COUNT;
4458 if (*pPresentModeCount > physical_device_state->present_modes.size())
4459 physical_device_state->present_modes.resize(*pPresentModeCount);
4460 }
4461 if (pPresentModes) {
4462 if (call_state < QUERY_DETAILS) call_state = QUERY_DETAILS;
4463 for (uint32_t i = 0; i < *pPresentModeCount; i++) {
4464 physical_device_state->present_modes[i] = pPresentModes[i];
4465 }
4466 }
4467}
4468
4469void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
4470 uint32_t *pSurfaceFormatCount,
4471 VkSurfaceFormatKHR *pSurfaceFormats,
4472 VkResult result) {
4473 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4474
4475 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4476 auto &call_state = physical_device_state->vkGetPhysicalDeviceSurfaceFormatsKHRState;
4477
4478 if (*pSurfaceFormatCount) {
4479 if (call_state < QUERY_COUNT) call_state = QUERY_COUNT;
4480 if (*pSurfaceFormatCount > physical_device_state->surface_formats.size())
4481 physical_device_state->surface_formats.resize(*pSurfaceFormatCount);
4482 }
4483 if (pSurfaceFormats) {
4484 if (call_state < QUERY_DETAILS) call_state = QUERY_DETAILS;
4485 for (uint32_t i = 0; i < *pSurfaceFormatCount; i++) {
4486 physical_device_state->surface_formats[i] = pSurfaceFormats[i];
4487 }
4488 }
4489}
4490
4491void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceFormats2KHR(VkPhysicalDevice physicalDevice,
4492 const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
4493 uint32_t *pSurfaceFormatCount,
4494 VkSurfaceFormat2KHR *pSurfaceFormats,
4495 VkResult result) {
4496 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4497
4498 auto physicalDeviceState = GetPhysicalDeviceState(physicalDevice);
4499 if (*pSurfaceFormatCount) {
4500 if (physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState < QUERY_COUNT) {
4501 physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState = QUERY_COUNT;
4502 }
4503 if (*pSurfaceFormatCount > physicalDeviceState->surface_formats.size())
4504 physicalDeviceState->surface_formats.resize(*pSurfaceFormatCount);
4505 }
4506 if (pSurfaceFormats) {
4507 if (physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState < QUERY_DETAILS) {
4508 physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState = QUERY_DETAILS;
4509 }
4510 for (uint32_t i = 0; i < *pSurfaceFormatCount; i++) {
4511 physicalDeviceState->surface_formats[i] = pSurfaceFormats[i].surfaceFormat;
4512 }
4513 }
4514}
4515
4516void ValidationStateTracker::PreCallRecordCmdBeginDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,
4517 const VkDebugUtilsLabelEXT *pLabelInfo) {
4518 BeginCmdDebugUtilsLabel(report_data, commandBuffer, pLabelInfo);
4519}
4520
4521void ValidationStateTracker::PostCallRecordCmdEndDebugUtilsLabelEXT(VkCommandBuffer commandBuffer) {
4522 EndCmdDebugUtilsLabel(report_data, commandBuffer);
4523}
4524
4525void ValidationStateTracker::PreCallRecordCmdInsertDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,
4526 const VkDebugUtilsLabelEXT *pLabelInfo) {
4527 InsertCmdDebugUtilsLabel(report_data, commandBuffer, pLabelInfo);
4528
4529 // Squirrel away an easily accessible copy.
4530 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4531 cb_state->debug_label = LoggingLabel(pLabelInfo);
4532}
4533
4534void ValidationStateTracker::RecordEnumeratePhysicalDeviceGroupsState(
4535 uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHR *pPhysicalDeviceGroupProperties) {
4536 if (NULL != pPhysicalDeviceGroupProperties) {
4537 for (uint32_t i = 0; i < *pPhysicalDeviceGroupCount; i++) {
4538 for (uint32_t j = 0; j < pPhysicalDeviceGroupProperties[i].physicalDeviceCount; j++) {
4539 VkPhysicalDevice cur_phys_dev = pPhysicalDeviceGroupProperties[i].physicalDevices[j];
4540 auto &phys_device_state = physical_device_map[cur_phys_dev];
4541 phys_device_state.phys_device = cur_phys_dev;
4542 // Init actual features for each physical device
4543 DispatchGetPhysicalDeviceFeatures(cur_phys_dev, &phys_device_state.features2.features);
4544 }
4545 }
4546 }
4547}
4548
4549void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceGroups(
4550 VkInstance instance, uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHR *pPhysicalDeviceGroupProperties,
4551 VkResult result) {
4552 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4553 RecordEnumeratePhysicalDeviceGroupsState(pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
4554}
4555
4556void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceGroupsKHR(
4557 VkInstance instance, uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHR *pPhysicalDeviceGroupProperties,
4558 VkResult result) {
4559 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4560 RecordEnumeratePhysicalDeviceGroupsState(pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
4561}
4562
Lionel Landwerlinc7420912019-05-23 00:33:42 +01004563void ValidationStateTracker::RecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCounters(VkPhysicalDevice physicalDevice,
4564 uint32_t queueFamilyIndex,
4565 uint32_t *pCounterCount,
4566 VkPerformanceCounterKHR *pCounters) {
4567 if (NULL == pCounters) return;
4568
4569 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4570 assert(physical_device_state);
4571
4572 std::unique_ptr<QUEUE_FAMILY_PERF_COUNTERS> queueFamilyCounters(new QUEUE_FAMILY_PERF_COUNTERS());
4573 queueFamilyCounters->counters.resize(*pCounterCount);
4574 for (uint32_t i = 0; i < *pCounterCount; i++) queueFamilyCounters->counters[i] = pCounters[i];
4575
4576 physical_device_state->perf_counters[queueFamilyIndex] = std::move(queueFamilyCounters);
4577}
4578
4579void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
4580 VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, uint32_t *pCounterCount, VkPerformanceCounterKHR *pCounters,
4581 VkPerformanceCounterDescriptionKHR *pCounterDescriptions, VkResult result) {
4582 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4583 RecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCounters(physicalDevice, queueFamilyIndex, pCounterCount, pCounters);
4584}
4585
4586void ValidationStateTracker::PostCallRecordAcquireProfilingLockKHR(VkDevice device, const VkAcquireProfilingLockInfoKHR *pInfo,
4587 VkResult result) {
4588 if (result == VK_SUCCESS) performance_lock_acquired = true;
4589}
4590
Lionel Landwerlinc7420912019-05-23 00:33:42 +01004591void ValidationStateTracker::PostCallRecordReleaseProfilingLockKHR(VkDevice device) {
4592 performance_lock_acquired = false;
4593 for (auto &cmd_buffer : commandBufferMap) {
4594 cmd_buffer.second->performance_lock_released = true;
4595 }
4596}
4597
locke-lunargd556cc32019-09-17 01:21:23 -06004598void ValidationStateTracker::PreCallRecordDestroyDescriptorUpdateTemplate(VkDevice device,
4599 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
4600 const VkAllocationCallbacks *pAllocator) {
4601 if (!descriptorUpdateTemplate) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004602 auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
4603 template_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004604 desc_template_map.erase(descriptorUpdateTemplate);
4605}
4606
4607void ValidationStateTracker::PreCallRecordDestroyDescriptorUpdateTemplateKHR(VkDevice device,
4608 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
4609 const VkAllocationCallbacks *pAllocator) {
4610 if (!descriptorUpdateTemplate) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004611 auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
4612 template_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004613 desc_template_map.erase(descriptorUpdateTemplate);
4614}
4615
4616void ValidationStateTracker::RecordCreateDescriptorUpdateTemplateState(const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo,
4617 VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate) {
4618 safe_VkDescriptorUpdateTemplateCreateInfo local_create_info(pCreateInfo);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004619 auto template_state = std::make_shared<TEMPLATE_STATE>(*pDescriptorUpdateTemplate, &local_create_info);
locke-lunargd556cc32019-09-17 01:21:23 -06004620 desc_template_map[*pDescriptorUpdateTemplate] = std::move(template_state);
4621}
4622
4623void ValidationStateTracker::PostCallRecordCreateDescriptorUpdateTemplate(
4624 VkDevice device, const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator,
4625 VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate, VkResult result) {
4626 if (VK_SUCCESS != result) return;
4627 RecordCreateDescriptorUpdateTemplateState(pCreateInfo, pDescriptorUpdateTemplate);
4628}
4629
4630void ValidationStateTracker::PostCallRecordCreateDescriptorUpdateTemplateKHR(
4631 VkDevice device, const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator,
4632 VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate, VkResult result) {
4633 if (VK_SUCCESS != result) return;
4634 RecordCreateDescriptorUpdateTemplateState(pCreateInfo, pDescriptorUpdateTemplate);
4635}
4636
4637void ValidationStateTracker::RecordUpdateDescriptorSetWithTemplateState(VkDescriptorSet descriptorSet,
4638 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
4639 const void *pData) {
4640 auto const template_map_entry = desc_template_map.find(descriptorUpdateTemplate);
4641 if ((template_map_entry == desc_template_map.end()) || (template_map_entry->second.get() == nullptr)) {
4642 assert(0);
4643 } else {
4644 const TEMPLATE_STATE *template_state = template_map_entry->second.get();
4645 // TODO: Record template push descriptor updates
4646 if (template_state->create_info.templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET) {
4647 PerformUpdateDescriptorSetsWithTemplateKHR(descriptorSet, template_state, pData);
4648 }
4649 }
4650}
4651
4652void ValidationStateTracker::PreCallRecordUpdateDescriptorSetWithTemplate(VkDevice device, VkDescriptorSet descriptorSet,
4653 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
4654 const void *pData) {
4655 RecordUpdateDescriptorSetWithTemplateState(descriptorSet, descriptorUpdateTemplate, pData);
4656}
4657
4658void ValidationStateTracker::PreCallRecordUpdateDescriptorSetWithTemplateKHR(VkDevice device, VkDescriptorSet descriptorSet,
4659 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
4660 const void *pData) {
4661 RecordUpdateDescriptorSetWithTemplateState(descriptorSet, descriptorUpdateTemplate, pData);
4662}
4663
4664void ValidationStateTracker::PreCallRecordCmdPushDescriptorSetWithTemplateKHR(
4665 VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set,
4666 const void *pData) {
4667 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4668
4669 const auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
4670 if (template_state) {
4671 auto layout_data = GetPipelineLayout(layout);
4672 auto dsl = GetDslFromPipelineLayout(layout_data, set);
4673 const auto &template_ci = template_state->create_info;
Jeff Bolz6ae39612019-10-11 20:57:36 -05004674 if (dsl && !dsl->destroyed) {
locke-lunargd556cc32019-09-17 01:21:23 -06004675 // Decode the template into a set of write updates
4676 cvdescriptorset::DecodedTemplateUpdate decoded_template(this, VK_NULL_HANDLE, template_state, pData,
4677 dsl->GetDescriptorSetLayout());
4678 RecordCmdPushDescriptorSetState(cb_state, template_ci.pipelineBindPoint, layout, set,
4679 static_cast<uint32_t>(decoded_template.desc_writes.size()),
4680 decoded_template.desc_writes.data());
4681 }
4682 }
4683}
4684
4685void ValidationStateTracker::RecordGetPhysicalDeviceDisplayPlanePropertiesState(VkPhysicalDevice physicalDevice,
4686 uint32_t *pPropertyCount, void *pProperties) {
4687 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4688 if (*pPropertyCount) {
4689 if (physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState < QUERY_COUNT) {
4690 physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState = QUERY_COUNT;
Camden Stocker61050592019-11-27 12:03:09 -08004691 physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004692 }
4693 physical_device_state->display_plane_property_count = *pPropertyCount;
4694 }
4695 if (pProperties) {
4696 if (physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState < QUERY_DETAILS) {
4697 physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState = QUERY_DETAILS;
Camden Stocker61050592019-11-27 12:03:09 -08004698 physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004699 }
4700 }
4701}
4702
4703void ValidationStateTracker::PostCallRecordGetPhysicalDeviceDisplayPlanePropertiesKHR(VkPhysicalDevice physicalDevice,
4704 uint32_t *pPropertyCount,
4705 VkDisplayPlanePropertiesKHR *pProperties,
4706 VkResult result) {
4707 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4708 RecordGetPhysicalDeviceDisplayPlanePropertiesState(physicalDevice, pPropertyCount, pProperties);
4709}
4710
4711void ValidationStateTracker::PostCallRecordGetPhysicalDeviceDisplayPlaneProperties2KHR(VkPhysicalDevice physicalDevice,
4712 uint32_t *pPropertyCount,
4713 VkDisplayPlaneProperties2KHR *pProperties,
4714 VkResult result) {
4715 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4716 RecordGetPhysicalDeviceDisplayPlanePropertiesState(physicalDevice, pPropertyCount, pProperties);
4717}
4718
4719void ValidationStateTracker::PostCallRecordCmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
4720 uint32_t query, VkQueryControlFlags flags, uint32_t index) {
4721 QueryObject query_obj = {queryPool, query, index};
4722 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4723 RecordCmdBeginQuery(cb_state, query_obj);
4724}
4725
4726void ValidationStateTracker::PostCallRecordCmdEndQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
4727 uint32_t query, uint32_t index) {
4728 QueryObject query_obj = {queryPool, query, index};
4729 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4730 RecordCmdEndQuery(cb_state, query_obj);
4731}
4732
4733void ValidationStateTracker::RecordCreateSamplerYcbcrConversionState(const VkSamplerYcbcrConversionCreateInfo *create_info,
4734 VkSamplerYcbcrConversion ycbcr_conversion) {
4735 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
4736 RecordCreateSamplerYcbcrConversionANDROID(create_info, ycbcr_conversion);
4737 }
4738}
4739
4740void ValidationStateTracker::PostCallRecordCreateSamplerYcbcrConversion(VkDevice device,
4741 const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
4742 const VkAllocationCallbacks *pAllocator,
4743 VkSamplerYcbcrConversion *pYcbcrConversion,
4744 VkResult result) {
4745 if (VK_SUCCESS != result) return;
4746 RecordCreateSamplerYcbcrConversionState(pCreateInfo, *pYcbcrConversion);
4747}
4748
4749void ValidationStateTracker::PostCallRecordCreateSamplerYcbcrConversionKHR(VkDevice device,
4750 const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
4751 const VkAllocationCallbacks *pAllocator,
4752 VkSamplerYcbcrConversion *pYcbcrConversion,
4753 VkResult result) {
4754 if (VK_SUCCESS != result) return;
4755 RecordCreateSamplerYcbcrConversionState(pCreateInfo, *pYcbcrConversion);
4756}
4757
4758void ValidationStateTracker::PostCallRecordDestroySamplerYcbcrConversion(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion,
4759 const VkAllocationCallbacks *pAllocator) {
4760 if (!ycbcrConversion) return;
4761 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
4762 RecordDestroySamplerYcbcrConversionANDROID(ycbcrConversion);
4763 }
4764}
4765
4766void ValidationStateTracker::PostCallRecordDestroySamplerYcbcrConversionKHR(VkDevice device,
4767 VkSamplerYcbcrConversion ycbcrConversion,
4768 const VkAllocationCallbacks *pAllocator) {
4769 if (!ycbcrConversion) return;
4770 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
4771 RecordDestroySamplerYcbcrConversionANDROID(ycbcrConversion);
4772 }
4773}
4774
Tony-LunarG977448c2019-12-02 14:52:02 -07004775void ValidationStateTracker::RecordResetQueryPool(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
4776 uint32_t queryCount) {
locke-lunargd556cc32019-09-17 01:21:23 -06004777 // Do nothing if the feature is not enabled.
Piers Daniell41b8c5d2020-01-10 15:42:00 -07004778 if (!enabled_features.core12.hostQueryReset) return;
locke-lunargd556cc32019-09-17 01:21:23 -06004779
4780 // Do nothing if the query pool has been destroyed.
4781 auto query_pool_state = GetQueryPoolState(queryPool);
4782 if (!query_pool_state) return;
4783
4784 // Reset the state of existing entries.
4785 QueryObject query_obj{queryPool, 0};
4786 const uint32_t max_query_count = std::min(queryCount, query_pool_state->createInfo.queryCount - firstQuery);
4787 for (uint32_t i = 0; i < max_query_count; ++i) {
4788 query_obj.query = firstQuery + i;
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02004789 queryToStateMap[query_obj] = QUERYSTATE_RESET;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01004790 if (query_pool_state->createInfo.queryType == VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR) {
4791 for (uint32_t passIndex = 0; passIndex < query_pool_state->n_performance_passes; passIndex++) {
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02004792 query_obj.perf_pass = passIndex;
4793 queryToStateMap[query_obj] = QUERYSTATE_RESET;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01004794 }
4795 }
locke-lunargd556cc32019-09-17 01:21:23 -06004796 }
4797}
4798
Tony-LunarG977448c2019-12-02 14:52:02 -07004799void ValidationStateTracker::PostCallRecordResetQueryPoolEXT(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
4800 uint32_t queryCount) {
4801 RecordResetQueryPool(device, queryPool, firstQuery, queryCount);
4802}
4803
4804void ValidationStateTracker::PostCallRecordResetQueryPool(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
4805 uint32_t queryCount) {
4806 RecordResetQueryPool(device, queryPool, firstQuery, queryCount);
4807}
4808
locke-lunargd556cc32019-09-17 01:21:23 -06004809void ValidationStateTracker::PerformUpdateDescriptorSetsWithTemplateKHR(VkDescriptorSet descriptorSet,
4810 const TEMPLATE_STATE *template_state, const void *pData) {
4811 // Translate the templated update into a normal update for validation...
4812 cvdescriptorset::DecodedTemplateUpdate decoded_update(this, descriptorSet, template_state, pData);
4813 cvdescriptorset::PerformUpdateDescriptorSets(this, static_cast<uint32_t>(decoded_update.desc_writes.size()),
4814 decoded_update.desc_writes.data(), 0, NULL);
4815}
4816
4817// Update the common AllocateDescriptorSetsData
4818void ValidationStateTracker::UpdateAllocateDescriptorSetsData(const VkDescriptorSetAllocateInfo *p_alloc_info,
Jeff Bolz46c0ea02019-10-09 13:06:29 -05004819 cvdescriptorset::AllocateDescriptorSetsData *ds_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06004820 for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
Jeff Bolz6ae39612019-10-11 20:57:36 -05004821 auto layout = GetDescriptorSetLayoutShared(p_alloc_info->pSetLayouts[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06004822 if (layout) {
4823 ds_data->layout_nodes[i] = layout;
4824 // Count total descriptors required per type
4825 for (uint32_t j = 0; j < layout->GetBindingCount(); ++j) {
4826 const auto &binding_layout = layout->GetDescriptorSetLayoutBindingPtrFromIndex(j);
4827 uint32_t typeIndex = static_cast<uint32_t>(binding_layout->descriptorType);
4828 ds_data->required_descriptors_by_type[typeIndex] += binding_layout->descriptorCount;
4829 }
4830 }
4831 // Any unknown layouts will be flagged as errors during ValidateAllocateDescriptorSets() call
4832 }
4833}
4834
4835// Decrement allocated sets from the pool and insert new sets into set_map
4836void ValidationStateTracker::PerformAllocateDescriptorSets(const VkDescriptorSetAllocateInfo *p_alloc_info,
4837 const VkDescriptorSet *descriptor_sets,
4838 const cvdescriptorset::AllocateDescriptorSetsData *ds_data) {
4839 auto pool_state = descriptorPoolMap[p_alloc_info->descriptorPool].get();
4840 // Account for sets and individual descriptors allocated from pool
4841 pool_state->availableSets -= p_alloc_info->descriptorSetCount;
4842 for (auto it = ds_data->required_descriptors_by_type.begin(); it != ds_data->required_descriptors_by_type.end(); ++it) {
4843 pool_state->availableDescriptorTypeCount[it->first] -= ds_data->required_descriptors_by_type.at(it->first);
4844 }
4845
4846 const auto *variable_count_info = lvl_find_in_chain<VkDescriptorSetVariableDescriptorCountAllocateInfoEXT>(p_alloc_info->pNext);
4847 bool variable_count_valid = variable_count_info && variable_count_info->descriptorSetCount == p_alloc_info->descriptorSetCount;
4848
4849 // Create tracking object for each descriptor set; insert into global map and the pool's set.
4850 for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
4851 uint32_t variable_count = variable_count_valid ? variable_count_info->pDescriptorCounts[i] : 0;
4852
Jeff Bolz41a1ced2019-10-11 11:40:49 -05004853 auto new_ds = std::make_shared<cvdescriptorset::DescriptorSet>(descriptor_sets[i], pool_state, ds_data->layout_nodes[i],
John Zulaufd2c3dae2019-12-12 11:02:17 -07004854 variable_count, this);
locke-lunargd556cc32019-09-17 01:21:23 -06004855 pool_state->sets.insert(new_ds.get());
4856 new_ds->in_use.store(0);
4857 setMap[descriptor_sets[i]] = std::move(new_ds);
4858 }
4859}
4860
4861// Generic function to handle state update for all CmdDraw* and CmdDispatch* type functions
4862void ValidationStateTracker::UpdateStateCmdDrawDispatchType(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint bind_point) {
4863 UpdateDrawState(cb_state, bind_point);
4864 cb_state->hasDispatchCmd = true;
4865}
4866
locke-lunargd556cc32019-09-17 01:21:23 -06004867// Generic function to handle state update for all CmdDraw* type functions
4868void ValidationStateTracker::UpdateStateCmdDrawType(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint bind_point) {
4869 UpdateStateCmdDrawDispatchType(cb_state, bind_point);
locke-lunargd556cc32019-09-17 01:21:23 -06004870 cb_state->hasDrawCmd = true;
4871}
4872
4873void ValidationStateTracker::PostCallRecordCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount,
4874 uint32_t firstVertex, uint32_t firstInstance) {
4875 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4876 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4877}
4878
4879void ValidationStateTracker::PostCallRecordCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount,
4880 uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset,
4881 uint32_t firstInstance) {
4882 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4883 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4884}
4885
4886void ValidationStateTracker::PostCallRecordCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
4887 uint32_t count, uint32_t stride) {
4888 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4889 BUFFER_STATE *buffer_state = GetBufferState(buffer);
4890 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4891 AddCommandBufferBindingBuffer(cb_state, buffer_state);
4892}
4893
4894void ValidationStateTracker::PostCallRecordCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
4895 VkDeviceSize offset, uint32_t count, uint32_t stride) {
4896 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4897 BUFFER_STATE *buffer_state = GetBufferState(buffer);
4898 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4899 AddCommandBufferBindingBuffer(cb_state, buffer_state);
4900}
4901
4902void ValidationStateTracker::PostCallRecordCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z) {
4903 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4904 UpdateStateCmdDrawDispatchType(cb_state, VK_PIPELINE_BIND_POINT_COMPUTE);
4905}
4906
4907void ValidationStateTracker::PostCallRecordCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
4908 VkDeviceSize offset) {
4909 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4910 UpdateStateCmdDrawDispatchType(cb_state, VK_PIPELINE_BIND_POINT_COMPUTE);
4911 BUFFER_STATE *buffer_state = GetBufferState(buffer);
4912 AddCommandBufferBindingBuffer(cb_state, buffer_state);
4913}
4914
Tony-LunarG977448c2019-12-02 14:52:02 -07004915void ValidationStateTracker::RecordCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
4916 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
4917 uint32_t stride) {
4918 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4919 BUFFER_STATE *buffer_state = GetBufferState(buffer);
4920 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
4921 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4922 AddCommandBufferBindingBuffer(cb_state, buffer_state);
4923 AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
4924}
4925
locke-lunargd556cc32019-09-17 01:21:23 -06004926void ValidationStateTracker::PreCallRecordCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer,
4927 VkDeviceSize offset, VkBuffer countBuffer,
4928 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
4929 uint32_t stride) {
Tony-LunarG977448c2019-12-02 14:52:02 -07004930 RecordCmdDrawIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
4931}
4932
4933void ValidationStateTracker::PreCallRecordCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
4934 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
4935 uint32_t maxDrawCount, uint32_t stride) {
4936 RecordCmdDrawIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
4937}
4938
4939void ValidationStateTracker::RecordCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
4940 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
4941 uint32_t maxDrawCount, uint32_t stride) {
locke-lunargd556cc32019-09-17 01:21:23 -06004942 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4943 BUFFER_STATE *buffer_state = GetBufferState(buffer);
4944 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
4945 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4946 AddCommandBufferBindingBuffer(cb_state, buffer_state);
4947 AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
4948}
4949
4950void ValidationStateTracker::PreCallRecordCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer,
4951 VkDeviceSize offset, VkBuffer countBuffer,
4952 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
4953 uint32_t stride) {
Tony-LunarG977448c2019-12-02 14:52:02 -07004954 RecordCmdDrawIndexedIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
4955}
4956
4957void ValidationStateTracker::PreCallRecordCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer,
4958 VkDeviceSize offset, VkBuffer countBuffer,
4959 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
4960 uint32_t stride) {
4961 RecordCmdDrawIndexedIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
locke-lunargd556cc32019-09-17 01:21:23 -06004962}
4963
4964void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksNV(VkCommandBuffer commandBuffer, uint32_t taskCount,
4965 uint32_t firstTask) {
4966 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4967 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4968}
4969
4970void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksIndirectNV(VkCommandBuffer commandBuffer, VkBuffer buffer,
4971 VkDeviceSize offset, uint32_t drawCount, uint32_t stride) {
4972 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4973 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4974 BUFFER_STATE *buffer_state = GetBufferState(buffer);
4975 if (buffer_state) {
4976 AddCommandBufferBindingBuffer(cb_state, buffer_state);
4977 }
4978}
4979
4980void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksIndirectCountNV(VkCommandBuffer commandBuffer, VkBuffer buffer,
4981 VkDeviceSize offset, VkBuffer countBuffer,
4982 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
4983 uint32_t stride) {
4984 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4985 BUFFER_STATE *buffer_state = GetBufferState(buffer);
4986 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
4987 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4988 if (buffer_state) {
4989 AddCommandBufferBindingBuffer(cb_state, buffer_state);
4990 }
4991 if (count_buffer_state) {
4992 AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
4993 }
4994}
4995
4996void ValidationStateTracker::PostCallRecordCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo *pCreateInfo,
4997 const VkAllocationCallbacks *pAllocator,
4998 VkShaderModule *pShaderModule, VkResult result,
4999 void *csm_state_data) {
5000 if (VK_SUCCESS != result) return;
5001 create_shader_module_api_state *csm_state = reinterpret_cast<create_shader_module_api_state *>(csm_state_data);
5002
5003 spv_target_env spirv_environment = ((api_version >= VK_API_VERSION_1_1) ? SPV_ENV_VULKAN_1_1 : SPV_ENV_VULKAN_1_0);
5004 bool is_spirv = (pCreateInfo->pCode[0] == spv::MagicNumber);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05005005 auto new_shader_module = is_spirv ? std::make_shared<SHADER_MODULE_STATE>(pCreateInfo, *pShaderModule, spirv_environment,
5006 csm_state->unique_shader_id)
5007 : std::make_shared<SHADER_MODULE_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06005008 shaderModuleMap[*pShaderModule] = std::move(new_shader_module);
5009}
5010
5011void ValidationStateTracker::RecordPipelineShaderStage(VkPipelineShaderStageCreateInfo const *pStage, PIPELINE_STATE *pipeline,
Jeff Bolz46c0ea02019-10-09 13:06:29 -05005012 PIPELINE_STATE::StageState *stage_state) const {
locke-lunargd556cc32019-09-17 01:21:23 -06005013 // Validation shouldn't rely on anything in stage state being valid if the spirv isn't
5014 auto module = GetShaderModuleState(pStage->module);
5015 if (!module->has_valid_spirv) return;
5016
5017 // Validation shouldn't rely on anything in stage state being valid if the entrypoint isn't present
5018 auto entrypoint = FindEntrypoint(module, pStage->pName, pStage->stage);
5019 if (entrypoint == module->end()) return;
5020
5021 // Mark accessible ids
5022 stage_state->accessible_ids = MarkAccessibleIds(module, entrypoint);
5023 ProcessExecutionModes(module, entrypoint, pipeline);
5024
5025 stage_state->descriptor_uses =
Mark Lobodzinskid8d658e2020-01-30 15:05:51 -07005026 CollectInterfaceByDescriptorSlot(module, stage_state->accessible_ids, &stage_state->has_writable_descriptor);
locke-lunargd556cc32019-09-17 01:21:23 -06005027 // Capture descriptor uses for the pipeline
5028 for (auto use : stage_state->descriptor_uses) {
5029 // While validating shaders capture which slots are used by the pipeline
John Zulauf649edd52019-10-02 14:39:41 -06005030 const uint32_t slot = use.first.first;
5031 auto &reqs = pipeline->active_slots[slot][use.first.second];
locke-lunargd556cc32019-09-17 01:21:23 -06005032 reqs = descriptor_req(reqs | DescriptorTypeToReqs(module, use.second.type_id));
John Zulauf649edd52019-10-02 14:39:41 -06005033 pipeline->max_active_slot = std::max(pipeline->max_active_slot, slot);
locke-lunargd556cc32019-09-17 01:21:23 -06005034 }
5035}
5036
5037void ValidationStateTracker::ResetCommandBufferPushConstantDataIfIncompatible(CMD_BUFFER_STATE *cb_state, VkPipelineLayout layout) {
5038 if (cb_state == nullptr) {
5039 return;
5040 }
5041
5042 const PIPELINE_LAYOUT_STATE *pipeline_layout_state = GetPipelineLayout(layout);
5043 if (pipeline_layout_state == nullptr) {
5044 return;
5045 }
5046
5047 if (cb_state->push_constant_data_ranges != pipeline_layout_state->push_constant_ranges) {
5048 cb_state->push_constant_data_ranges = pipeline_layout_state->push_constant_ranges;
5049 cb_state->push_constant_data.clear();
5050 uint32_t size_needed = 0;
5051 for (auto push_constant_range : *cb_state->push_constant_data_ranges) {
5052 size_needed = std::max(size_needed, (push_constant_range.offset + push_constant_range.size));
5053 }
5054 cb_state->push_constant_data.resize(size_needed, 0);
5055 }
5056}
John Zulauf22b0fbe2019-10-15 06:26:16 -06005057
5058void ValidationStateTracker::PostCallRecordGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain,
5059 uint32_t *pSwapchainImageCount, VkImage *pSwapchainImages,
5060 VkResult result) {
5061 if ((result != VK_SUCCESS) && (result != VK_INCOMPLETE)) return;
5062 auto swapchain_state = GetSwapchainState(swapchain);
5063
5064 if (*pSwapchainImageCount > swapchain_state->images.size()) swapchain_state->images.resize(*pSwapchainImageCount);
5065
5066 if (pSwapchainImages) {
5067 if (swapchain_state->vkGetSwapchainImagesKHRState < QUERY_DETAILS) {
5068 swapchain_state->vkGetSwapchainImagesKHRState = QUERY_DETAILS;
5069 }
5070 for (uint32_t i = 0; i < *pSwapchainImageCount; ++i) {
locke-lunargb3584732019-10-28 20:18:36 -06005071 if (swapchain_state->images[i].image != VK_NULL_HANDLE) continue; // Already retrieved this.
John Zulauf22b0fbe2019-10-15 06:26:16 -06005072
5073 // Add imageMap entries for each swapchain image
5074 VkImageCreateInfo image_ci;
5075 image_ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
Mark Lobodzinskid3ec86f2020-03-18 11:23:04 -06005076 image_ci.pNext = nullptr; // to be set later
5077 image_ci.flags = 0; // to be updated below
John Zulauf22b0fbe2019-10-15 06:26:16 -06005078 image_ci.imageType = VK_IMAGE_TYPE_2D;
5079 image_ci.format = swapchain_state->createInfo.imageFormat;
5080 image_ci.extent.width = swapchain_state->createInfo.imageExtent.width;
5081 image_ci.extent.height = swapchain_state->createInfo.imageExtent.height;
5082 image_ci.extent.depth = 1;
5083 image_ci.mipLevels = 1;
5084 image_ci.arrayLayers = swapchain_state->createInfo.imageArrayLayers;
5085 image_ci.samples = VK_SAMPLE_COUNT_1_BIT;
5086 image_ci.tiling = VK_IMAGE_TILING_OPTIMAL;
5087 image_ci.usage = swapchain_state->createInfo.imageUsage;
5088 image_ci.sharingMode = swapchain_state->createInfo.imageSharingMode;
5089 image_ci.queueFamilyIndexCount = swapchain_state->createInfo.queueFamilyIndexCount;
5090 image_ci.pQueueFamilyIndices = swapchain_state->createInfo.pQueueFamilyIndices;
5091 image_ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
5092
5093 image_ci.pNext = lvl_find_in_chain<VkImageFormatListCreateInfoKHR>(swapchain_state->createInfo.pNext);
5094
5095 if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR)
5096 image_ci.flags |= VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT;
5097 if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR)
5098 image_ci.flags |= VK_IMAGE_CREATE_PROTECTED_BIT;
5099 if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR)
5100 image_ci.flags |= (VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT | VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR);
5101
5102 imageMap[pSwapchainImages[i]] = std::make_shared<IMAGE_STATE>(pSwapchainImages[i], &image_ci);
5103 auto &image_state = imageMap[pSwapchainImages[i]];
5104 image_state->valid = false;
5105 image_state->create_from_swapchain = swapchain;
5106 image_state->bind_swapchain = swapchain;
5107 image_state->bind_swapchain_imageIndex = i;
Tony-LunarGe64e4fe2020-02-17 16:21:55 -07005108 image_state->is_swapchain_image = true;
locke-lunargb3584732019-10-28 20:18:36 -06005109 swapchain_state->images[i].image = pSwapchainImages[i];
5110 swapchain_state->images[i].bound_images.emplace(pSwapchainImages[i]);
John Zulauf22b0fbe2019-10-15 06:26:16 -06005111 }
5112 }
5113
5114 if (*pSwapchainImageCount) {
5115 if (swapchain_state->vkGetSwapchainImagesKHRState < QUERY_COUNT) {
5116 swapchain_state->vkGetSwapchainImagesKHRState = QUERY_COUNT;
5117 }
5118 swapchain_state->get_swapchain_image_count = *pSwapchainImageCount;
5119 }
5120}