blob: 403ec646387a05df015ec9ea5beabd8672e7572c [file] [log] [blame]
Tony-LunarG73719992020-01-15 10:20:28 -07001/* Copyright (c) 2015-2020 The Khronos Group Inc.
2 * Copyright (c) 2015-2020 Valve Corporation
3 * Copyright (c) 2015-2020 LunarG, Inc.
4 * Copyright (C) 2015-2020 Google Inc.
locke-lunargd556cc32019-09-17 01:21:23 -06005 *
6 * Licensed under the Apache License, Version 2.0 (the "License");
7 * you may not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
9 *
10 * http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 *
18 * Author: Mark Lobodzinski <mark@lunarg.com>
19 * Author: Dave Houlton <daveh@lunarg.com>
20 * Shannon McPherson <shannon@lunarg.com>
21 */
22
locke-lunargd556cc32019-09-17 01:21:23 -060023#include <cmath>
24#include <set>
25#include <sstream>
26#include <string>
27
28#include "vk_enum_string_helper.h"
29#include "vk_format_utils.h"
30#include "vk_layer_data.h"
31#include "vk_layer_utils.h"
32#include "vk_layer_logging.h"
33#include "vk_typemap_helper.h"
34
35#include "chassis.h"
36#include "state_tracker.h"
37#include "shader_validation.h"
38
39using std::max;
40using std::string;
41using std::stringstream;
42using std::unique_ptr;
43using std::unordered_map;
44using std::unordered_set;
45using std::vector;
46
Mark Lobodzinskib4ab6ac2020-04-02 13:12:06 -060047void ValidationStateTracker::InitDeviceValidationObject(bool add_obj, ValidationObject *inst_obj, ValidationObject *dev_obj) {
48 if (add_obj) {
49 instance_state = reinterpret_cast<ValidationStateTracker *>(GetValidationObject(inst_obj->object_dispatch, container_type));
50 // Call base class
51 ValidationObject::InitDeviceValidationObject(add_obj, inst_obj, dev_obj);
52 }
53}
54
locke-lunargd556cc32019-09-17 01:21:23 -060055#ifdef VK_USE_PLATFORM_ANDROID_KHR
56// Android-specific validation that uses types defined only with VK_USE_PLATFORM_ANDROID_KHR
57// This could also move into a seperate core_validation_android.cpp file... ?
58
59void ValidationStateTracker::RecordCreateImageANDROID(const VkImageCreateInfo *create_info, IMAGE_STATE *is_node) {
60 const VkExternalMemoryImageCreateInfo *emici = lvl_find_in_chain<VkExternalMemoryImageCreateInfo>(create_info->pNext);
61 if (emici && (emici->handleTypes & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID)) {
62 is_node->imported_ahb = true;
63 }
64 const VkExternalFormatANDROID *ext_fmt_android = lvl_find_in_chain<VkExternalFormatANDROID>(create_info->pNext);
65 if (ext_fmt_android && (0 != ext_fmt_android->externalFormat)) {
66 is_node->has_ahb_format = true;
67 is_node->ahb_format = ext_fmt_android->externalFormat;
68 }
69}
70
71void ValidationStateTracker::RecordCreateSamplerYcbcrConversionANDROID(const VkSamplerYcbcrConversionCreateInfo *create_info,
72 VkSamplerYcbcrConversion ycbcr_conversion) {
73 const VkExternalFormatANDROID *ext_format_android = lvl_find_in_chain<VkExternalFormatANDROID>(create_info->pNext);
74 if (ext_format_android && (0 != ext_format_android->externalFormat)) {
75 ycbcr_conversion_ahb_fmt_map.emplace(ycbcr_conversion, ext_format_android->externalFormat);
76 }
77};
78
79void ValidationStateTracker::RecordDestroySamplerYcbcrConversionANDROID(VkSamplerYcbcrConversion ycbcr_conversion) {
80 ycbcr_conversion_ahb_fmt_map.erase(ycbcr_conversion);
81};
82
83#else
84
85void ValidationStateTracker::RecordCreateImageANDROID(const VkImageCreateInfo *create_info, IMAGE_STATE *is_node) {}
86
87void ValidationStateTracker::RecordCreateSamplerYcbcrConversionANDROID(const VkSamplerYcbcrConversionCreateInfo *create_info,
88 VkSamplerYcbcrConversion ycbcr_conversion){};
89
90void ValidationStateTracker::RecordDestroySamplerYcbcrConversionANDROID(VkSamplerYcbcrConversion ycbcr_conversion){};
91
92#endif // VK_USE_PLATFORM_ANDROID_KHR
93
Mark Lobodzinskid3ec86f2020-03-18 11:23:04 -060094std::shared_ptr<cvdescriptorset::DescriptorSetLayout const> GetDslFromPipelineLayout(PIPELINE_LAYOUT_STATE const *layout_data,
95 uint32_t set) {
96 std::shared_ptr<cvdescriptorset::DescriptorSetLayout const> dsl = nullptr;
97 if (layout_data && (set < layout_data->set_layouts.size())) {
98 dsl = layout_data->set_layouts[set];
99 }
100 return dsl;
101}
102
locke-lunargd556cc32019-09-17 01:21:23 -0600103void ValidationStateTracker::PostCallRecordCreateImage(VkDevice device, const VkImageCreateInfo *pCreateInfo,
104 const VkAllocationCallbacks *pAllocator, VkImage *pImage, VkResult result) {
105 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500106 auto is_node = std::make_shared<IMAGE_STATE>(*pImage, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -0600107 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
108 RecordCreateImageANDROID(pCreateInfo, is_node.get());
109 }
110 const auto swapchain_info = lvl_find_in_chain<VkImageSwapchainCreateInfoKHR>(pCreateInfo->pNext);
111 if (swapchain_info) {
112 is_node->create_from_swapchain = swapchain_info->swapchain;
113 }
114
115 bool pre_fetch_memory_reqs = true;
116#ifdef VK_USE_PLATFORM_ANDROID_KHR
117 if (is_node->external_format_android) {
118 // Do not fetch requirements for external memory images
119 pre_fetch_memory_reqs = false;
120 }
121#endif
122 // Record the memory requirements in case they won't be queried
123 if (pre_fetch_memory_reqs) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -0700124 if ((pCreateInfo->flags & VK_IMAGE_CREATE_DISJOINT_BIT) == 0) {
125 DispatchGetImageMemoryRequirements(device, *pImage, &is_node->requirements);
126 } else {
127 uint32_t plane_count = FormatPlaneCount(pCreateInfo->format);
128 VkImagePlaneMemoryRequirementsInfo image_plane_req = {VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO, nullptr};
129 VkMemoryRequirements2 mem_reqs2 = {VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2, nullptr};
130 VkImageMemoryRequirementsInfo2 mem_req_info2 = {VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2};
131 mem_req_info2.pNext = &image_plane_req;
132 mem_req_info2.image = *pImage;
133
134 assert(plane_count != 0); // assumes each format has at least first plane
135 image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_0_BIT;
136 DispatchGetImageMemoryRequirements2(device, &mem_req_info2, &mem_reqs2);
137 is_node->plane0_requirements = mem_reqs2.memoryRequirements;
138
139 if (plane_count >= 2) {
140 image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_1_BIT;
141 DispatchGetImageMemoryRequirements2(device, &mem_req_info2, &mem_reqs2);
142 is_node->plane1_requirements = mem_reqs2.memoryRequirements;
143 }
144 if (plane_count >= 3) {
145 image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_2_BIT;
146 DispatchGetImageMemoryRequirements2(device, &mem_req_info2, &mem_reqs2);
147 is_node->plane2_requirements = mem_reqs2.memoryRequirements;
148 }
149 }
locke-lunargd556cc32019-09-17 01:21:23 -0600150 }
151 imageMap.insert(std::make_pair(*pImage, std::move(is_node)));
152}
153
154void ValidationStateTracker::PreCallRecordDestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks *pAllocator) {
155 if (!image) return;
156 IMAGE_STATE *image_state = GetImageState(image);
157 const VulkanTypedHandle obj_struct(image, kVulkanObjectTypeImage);
158 InvalidateCommandBuffers(image_state->cb_bindings, obj_struct);
159 // Clean up memory mapping, bindings and range references for image
160 for (auto mem_binding : image_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -0700161 RemoveImageMemoryRange(image, mem_binding);
locke-lunargd556cc32019-09-17 01:21:23 -0600162 }
163 if (image_state->bind_swapchain) {
164 auto swapchain = GetSwapchainState(image_state->bind_swapchain);
165 if (swapchain) {
locke-lunargb3584732019-10-28 20:18:36 -0600166 swapchain->images[image_state->bind_swapchain_imageIndex].bound_images.erase(image_state->image);
locke-lunargd556cc32019-09-17 01:21:23 -0600167 }
168 }
169 RemoveAliasingImage(image_state);
170 ClearMemoryObjectBindings(obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500171 image_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600172 // Remove image from imageMap
173 imageMap.erase(image);
174}
175
176void ValidationStateTracker::PreCallRecordCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image,
177 VkImageLayout imageLayout, const VkClearColorValue *pColor,
178 uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
179 auto cb_node = GetCBState(commandBuffer);
180 auto image_state = GetImageState(image);
181 if (cb_node && image_state) {
182 AddCommandBufferBindingImage(cb_node, image_state);
183 }
184}
185
186void ValidationStateTracker::PreCallRecordCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image,
187 VkImageLayout imageLayout,
188 const VkClearDepthStencilValue *pDepthStencil,
189 uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
190 auto cb_node = GetCBState(commandBuffer);
191 auto image_state = GetImageState(image);
192 if (cb_node && image_state) {
193 AddCommandBufferBindingImage(cb_node, image_state);
194 }
195}
196
197void ValidationStateTracker::PreCallRecordCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage,
198 VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout,
199 uint32_t regionCount, const VkImageCopy *pRegions) {
200 auto cb_node = GetCBState(commandBuffer);
201 auto src_image_state = GetImageState(srcImage);
202 auto dst_image_state = GetImageState(dstImage);
203
204 // Update bindings between images and cmd buffer
205 AddCommandBufferBindingImage(cb_node, src_image_state);
206 AddCommandBufferBindingImage(cb_node, dst_image_state);
207}
208
209void ValidationStateTracker::PreCallRecordCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage,
210 VkImageLayout srcImageLayout, VkImage dstImage,
211 VkImageLayout dstImageLayout, uint32_t regionCount,
212 const VkImageResolve *pRegions) {
213 auto cb_node = GetCBState(commandBuffer);
214 auto src_image_state = GetImageState(srcImage);
215 auto dst_image_state = GetImageState(dstImage);
216
217 // Update bindings between images and cmd buffer
218 AddCommandBufferBindingImage(cb_node, src_image_state);
219 AddCommandBufferBindingImage(cb_node, dst_image_state);
220}
221
222void ValidationStateTracker::PreCallRecordCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage,
223 VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout,
224 uint32_t regionCount, const VkImageBlit *pRegions, VkFilter filter) {
225 auto cb_node = GetCBState(commandBuffer);
226 auto src_image_state = GetImageState(srcImage);
227 auto dst_image_state = GetImageState(dstImage);
228
229 // Update bindings between images and cmd buffer
230 AddCommandBufferBindingImage(cb_node, src_image_state);
231 AddCommandBufferBindingImage(cb_node, dst_image_state);
232}
233
234void ValidationStateTracker::PostCallRecordCreateBuffer(VkDevice device, const VkBufferCreateInfo *pCreateInfo,
235 const VkAllocationCallbacks *pAllocator, VkBuffer *pBuffer,
236 VkResult result) {
237 if (result != VK_SUCCESS) return;
238 // TODO : This doesn't create deep copy of pQueueFamilyIndices so need to fix that if/when we want that data to be valid
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500239 auto buffer_state = std::make_shared<BUFFER_STATE>(*pBuffer, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -0600240
241 // Get a set of requirements in the case the app does not
242 DispatchGetBufferMemoryRequirements(device, *pBuffer, &buffer_state->requirements);
243
244 bufferMap.insert(std::make_pair(*pBuffer, std::move(buffer_state)));
245}
246
247void ValidationStateTracker::PostCallRecordCreateBufferView(VkDevice device, const VkBufferViewCreateInfo *pCreateInfo,
248 const VkAllocationCallbacks *pAllocator, VkBufferView *pView,
249 VkResult result) {
250 if (result != VK_SUCCESS) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500251 auto buffer_state = GetBufferShared(pCreateInfo->buffer);
252 bufferViewMap[*pView] = std::make_shared<BUFFER_VIEW_STATE>(buffer_state, *pView, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -0600253}
254
255void ValidationStateTracker::PostCallRecordCreateImageView(VkDevice device, const VkImageViewCreateInfo *pCreateInfo,
256 const VkAllocationCallbacks *pAllocator, VkImageView *pView,
257 VkResult result) {
258 if (result != VK_SUCCESS) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500259 auto image_state = GetImageShared(pCreateInfo->image);
260 imageViewMap[*pView] = std::make_shared<IMAGE_VIEW_STATE>(image_state, *pView, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -0600261}
262
263void ValidationStateTracker::PreCallRecordCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer,
264 uint32_t regionCount, const VkBufferCopy *pRegions) {
265 auto cb_node = GetCBState(commandBuffer);
266 auto src_buffer_state = GetBufferState(srcBuffer);
267 auto dst_buffer_state = GetBufferState(dstBuffer);
268
269 // Update bindings between buffers and cmd buffer
270 AddCommandBufferBindingBuffer(cb_node, src_buffer_state);
271 AddCommandBufferBindingBuffer(cb_node, dst_buffer_state);
272}
273
274void ValidationStateTracker::PreCallRecordDestroyImageView(VkDevice device, VkImageView imageView,
275 const VkAllocationCallbacks *pAllocator) {
276 IMAGE_VIEW_STATE *image_view_state = GetImageViewState(imageView);
277 if (!image_view_state) return;
278 const VulkanTypedHandle obj_struct(imageView, kVulkanObjectTypeImageView);
279
280 // Any bound cmd buffers are now invalid
281 InvalidateCommandBuffers(image_view_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500282 image_view_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600283 imageViewMap.erase(imageView);
284}
285
286void ValidationStateTracker::PreCallRecordDestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks *pAllocator) {
287 if (!buffer) return;
288 auto buffer_state = GetBufferState(buffer);
289 const VulkanTypedHandle obj_struct(buffer, kVulkanObjectTypeBuffer);
290
291 InvalidateCommandBuffers(buffer_state->cb_bindings, obj_struct);
292 for (auto mem_binding : buffer_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -0700293 RemoveBufferMemoryRange(buffer, mem_binding);
locke-lunargd556cc32019-09-17 01:21:23 -0600294 }
295 ClearMemoryObjectBindings(obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500296 buffer_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600297 bufferMap.erase(buffer_state->buffer);
298}
299
300void ValidationStateTracker::PreCallRecordDestroyBufferView(VkDevice device, VkBufferView bufferView,
301 const VkAllocationCallbacks *pAllocator) {
302 if (!bufferView) return;
303 auto buffer_view_state = GetBufferViewState(bufferView);
304 const VulkanTypedHandle obj_struct(bufferView, kVulkanObjectTypeBufferView);
305
306 // Any bound cmd buffers are now invalid
307 InvalidateCommandBuffers(buffer_view_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500308 buffer_view_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600309 bufferViewMap.erase(bufferView);
310}
311
312void ValidationStateTracker::PreCallRecordCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
313 VkDeviceSize size, uint32_t data) {
314 auto cb_node = GetCBState(commandBuffer);
315 auto buffer_state = GetBufferState(dstBuffer);
316 // Update bindings between buffer and cmd buffer
317 AddCommandBufferBindingBuffer(cb_node, buffer_state);
318}
319
320void ValidationStateTracker::PreCallRecordCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage,
321 VkImageLayout srcImageLayout, VkBuffer dstBuffer,
322 uint32_t regionCount, const VkBufferImageCopy *pRegions) {
323 auto cb_node = GetCBState(commandBuffer);
324 auto src_image_state = GetImageState(srcImage);
325 auto dst_buffer_state = GetBufferState(dstBuffer);
326
327 // Update bindings between buffer/image and cmd buffer
328 AddCommandBufferBindingImage(cb_node, src_image_state);
329 AddCommandBufferBindingBuffer(cb_node, dst_buffer_state);
330}
331
332void ValidationStateTracker::PreCallRecordCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
333 VkImageLayout dstImageLayout, uint32_t regionCount,
334 const VkBufferImageCopy *pRegions) {
335 auto cb_node = GetCBState(commandBuffer);
336 auto src_buffer_state = GetBufferState(srcBuffer);
337 auto dst_image_state = GetImageState(dstImage);
338
339 AddCommandBufferBindingBuffer(cb_node, src_buffer_state);
340 AddCommandBufferBindingImage(cb_node, dst_image_state);
341}
342
343// Get the image viewstate for a given framebuffer attachment
344IMAGE_VIEW_STATE *ValidationStateTracker::GetAttachmentImageViewState(FRAMEBUFFER_STATE *framebuffer, uint32_t index) {
Mark Lobodzinski544b3dd2019-12-03 14:44:54 -0700345 if (framebuffer->createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) return nullptr;
locke-lunargd556cc32019-09-17 01:21:23 -0600346 assert(framebuffer && (index < framebuffer->createInfo.attachmentCount));
347 const VkImageView &image_view = framebuffer->createInfo.pAttachments[index];
348 return GetImageViewState(image_view);
349}
350
351// Get the image viewstate for a given framebuffer attachment
352const IMAGE_VIEW_STATE *ValidationStateTracker::GetAttachmentImageViewState(const FRAMEBUFFER_STATE *framebuffer,
353 uint32_t index) const {
Mark Lobodzinski544b3dd2019-12-03 14:44:54 -0700354 if (framebuffer->createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) return nullptr;
locke-lunargd556cc32019-09-17 01:21:23 -0600355 assert(framebuffer && (index < framebuffer->createInfo.attachmentCount));
356 const VkImageView &image_view = framebuffer->createInfo.pAttachments[index];
357 return GetImageViewState(image_view);
358}
359
360void ValidationStateTracker::AddAliasingImage(IMAGE_STATE *image_state) {
locke-lunargd556cc32019-09-17 01:21:23 -0600361 std::unordered_set<VkImage> *bound_images = nullptr;
362
locke-lunargb3584732019-10-28 20:18:36 -0600363 if (image_state->bind_swapchain) {
364 auto swapchain_state = GetSwapchainState(image_state->bind_swapchain);
locke-lunargd556cc32019-09-17 01:21:23 -0600365 if (swapchain_state) {
locke-lunargb3584732019-10-28 20:18:36 -0600366 bound_images = &swapchain_state->images[image_state->bind_swapchain_imageIndex].bound_images;
locke-lunargd556cc32019-09-17 01:21:23 -0600367 }
368 } else {
locke-lunargcf04d582019-11-26 00:31:50 -0700369 if (image_state->binding.mem_state) {
370 bound_images = &image_state->binding.mem_state->bound_images;
locke-lunargd556cc32019-09-17 01:21:23 -0600371 }
372 }
373
374 if (bound_images) {
375 for (const auto &handle : *bound_images) {
376 if (handle != image_state->image) {
377 auto is = GetImageState(handle);
378 if (is && is->IsCompatibleAliasing(image_state)) {
379 auto inserted = is->aliasing_images.emplace(image_state->image);
380 if (inserted.second) {
381 image_state->aliasing_images.emplace(handle);
382 }
383 }
384 }
385 }
386 }
387}
388
389void ValidationStateTracker::RemoveAliasingImage(IMAGE_STATE *image_state) {
390 for (const auto &image : image_state->aliasing_images) {
391 auto is = GetImageState(image);
392 if (is) {
393 is->aliasing_images.erase(image_state->image);
394 }
395 }
396 image_state->aliasing_images.clear();
397}
398
399void ValidationStateTracker::RemoveAliasingImages(const std::unordered_set<VkImage> &bound_images) {
400 // This is one way clear. Because the bound_images include cross references, the one way clear loop could clear the whole
401 // reference. It doesn't need two ways clear.
402 for (const auto &handle : bound_images) {
403 auto is = GetImageState(handle);
404 if (is) {
405 is->aliasing_images.clear();
406 }
407 }
408}
409
Jeff Bolz310775c2019-10-09 00:46:33 -0500410const EVENT_STATE *ValidationStateTracker::GetEventState(VkEvent event) const {
411 auto it = eventMap.find(event);
412 if (it == eventMap.end()) {
413 return nullptr;
414 }
415 return &it->second;
416}
417
locke-lunargd556cc32019-09-17 01:21:23 -0600418EVENT_STATE *ValidationStateTracker::GetEventState(VkEvent event) {
419 auto it = eventMap.find(event);
420 if (it == eventMap.end()) {
421 return nullptr;
422 }
423 return &it->second;
424}
425
426const QUEUE_STATE *ValidationStateTracker::GetQueueState(VkQueue queue) const {
427 auto it = queueMap.find(queue);
428 if (it == queueMap.cend()) {
429 return nullptr;
430 }
431 return &it->second;
432}
433
434QUEUE_STATE *ValidationStateTracker::GetQueueState(VkQueue queue) {
435 auto it = queueMap.find(queue);
436 if (it == queueMap.end()) {
437 return nullptr;
438 }
439 return &it->second;
440}
441
442const PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState(VkPhysicalDevice phys) const {
443 auto *phys_dev_map = ((physical_device_map.size() > 0) ? &physical_device_map : &instance_state->physical_device_map);
444 auto it = phys_dev_map->find(phys);
445 if (it == phys_dev_map->end()) {
446 return nullptr;
447 }
448 return &it->second;
449}
450
451PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState(VkPhysicalDevice phys) {
452 auto *phys_dev_map = ((physical_device_map.size() > 0) ? &physical_device_map : &instance_state->physical_device_map);
453 auto it = phys_dev_map->find(phys);
454 if (it == phys_dev_map->end()) {
455 return nullptr;
456 }
457 return &it->second;
458}
459
460PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState() { return physical_device_state; }
461const PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState() const { return physical_device_state; }
462
463// Return ptr to memory binding for given handle of specified type
464template <typename State, typename Result>
465static Result GetObjectMemBindingImpl(State state, const VulkanTypedHandle &typed_handle) {
466 switch (typed_handle.type) {
467 case kVulkanObjectTypeImage:
468 return state->GetImageState(typed_handle.Cast<VkImage>());
469 case kVulkanObjectTypeBuffer:
470 return state->GetBufferState(typed_handle.Cast<VkBuffer>());
471 case kVulkanObjectTypeAccelerationStructureNV:
472 return state->GetAccelerationStructureState(typed_handle.Cast<VkAccelerationStructureNV>());
473 default:
474 break;
475 }
476 return nullptr;
477}
478
479const BINDABLE *ValidationStateTracker::GetObjectMemBinding(const VulkanTypedHandle &typed_handle) const {
480 return GetObjectMemBindingImpl<const ValidationStateTracker *, const BINDABLE *>(this, typed_handle);
481}
482
483BINDABLE *ValidationStateTracker::GetObjectMemBinding(const VulkanTypedHandle &typed_handle) {
484 return GetObjectMemBindingImpl<ValidationStateTracker *, BINDABLE *>(this, typed_handle);
485}
486
487void ValidationStateTracker::AddMemObjInfo(void *object, const VkDeviceMemory mem, const VkMemoryAllocateInfo *pAllocateInfo) {
488 assert(object != NULL);
489
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500490 memObjMap[mem] = std::make_shared<DEVICE_MEMORY_STATE>(object, mem, pAllocateInfo);
491 auto mem_info = memObjMap[mem].get();
locke-lunargd556cc32019-09-17 01:21:23 -0600492
493 auto dedicated = lvl_find_in_chain<VkMemoryDedicatedAllocateInfoKHR>(pAllocateInfo->pNext);
494 if (dedicated) {
495 mem_info->is_dedicated = true;
496 mem_info->dedicated_buffer = dedicated->buffer;
497 mem_info->dedicated_image = dedicated->image;
498 }
499 auto export_info = lvl_find_in_chain<VkExportMemoryAllocateInfo>(pAllocateInfo->pNext);
500 if (export_info) {
501 mem_info->is_export = true;
502 mem_info->export_handle_type_flags = export_info->handleTypes;
503 }
504}
505
506// Create binding link between given sampler and command buffer node
507void ValidationStateTracker::AddCommandBufferBindingSampler(CMD_BUFFER_STATE *cb_node, SAMPLER_STATE *sampler_state) {
508 if (disabled.command_buffer_state) {
509 return;
510 }
Jeff Bolzadbfa852019-10-04 13:53:30 -0500511 AddCommandBufferBinding(sampler_state->cb_bindings,
512 VulkanTypedHandle(sampler_state->sampler, kVulkanObjectTypeSampler, sampler_state), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -0600513}
514
515// Create binding link between given image node and command buffer node
516void ValidationStateTracker::AddCommandBufferBindingImage(CMD_BUFFER_STATE *cb_node, IMAGE_STATE *image_state) {
517 if (disabled.command_buffer_state) {
518 return;
519 }
520 // Skip validation if this image was created through WSI
521 if (image_state->create_from_swapchain == VK_NULL_HANDLE) {
522 // First update cb binding for image
Jeff Bolzadbfa852019-10-04 13:53:30 -0500523 if (AddCommandBufferBinding(image_state->cb_bindings,
524 VulkanTypedHandle(image_state->image, kVulkanObjectTypeImage, image_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -0600525 // Now update CB binding in MemObj mini CB list
526 for (auto mem_binding : image_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -0700527 // Now update CBInfo's Mem reference list
528 AddCommandBufferBinding(mem_binding->cb_bindings,
529 VulkanTypedHandle(mem_binding->mem, kVulkanObjectTypeDeviceMemory, mem_binding), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -0600530 }
531 }
532 }
533}
534
535// Create binding link between given image view node and its image with command buffer node
536void ValidationStateTracker::AddCommandBufferBindingImageView(CMD_BUFFER_STATE *cb_node, IMAGE_VIEW_STATE *view_state) {
537 if (disabled.command_buffer_state) {
538 return;
539 }
540 // First add bindings for imageView
Jeff Bolzadbfa852019-10-04 13:53:30 -0500541 if (AddCommandBufferBinding(view_state->cb_bindings,
542 VulkanTypedHandle(view_state->image_view, kVulkanObjectTypeImageView, view_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -0600543 // Only need to continue if this is a new item
Jeff Bolzadbfa852019-10-04 13:53:30 -0500544 auto image_state = view_state->image_state.get();
locke-lunargd556cc32019-09-17 01:21:23 -0600545 // Add bindings for image within imageView
546 if (image_state) {
547 AddCommandBufferBindingImage(cb_node, image_state);
548 }
549 }
550}
551
552// Create binding link between given buffer node and command buffer node
553void ValidationStateTracker::AddCommandBufferBindingBuffer(CMD_BUFFER_STATE *cb_node, BUFFER_STATE *buffer_state) {
554 if (disabled.command_buffer_state) {
555 return;
556 }
557 // First update cb binding for buffer
Jeff Bolzadbfa852019-10-04 13:53:30 -0500558 if (AddCommandBufferBinding(buffer_state->cb_bindings,
559 VulkanTypedHandle(buffer_state->buffer, kVulkanObjectTypeBuffer, buffer_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -0600560 // Now update CB binding in MemObj mini CB list
561 for (auto mem_binding : buffer_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -0700562 // Now update CBInfo's Mem reference list
563 AddCommandBufferBinding(mem_binding->cb_bindings,
564 VulkanTypedHandle(mem_binding->mem, kVulkanObjectTypeDeviceMemory, mem_binding), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -0600565 }
566 }
567}
568
569// Create binding link between given buffer view node and its buffer with command buffer node
570void ValidationStateTracker::AddCommandBufferBindingBufferView(CMD_BUFFER_STATE *cb_node, BUFFER_VIEW_STATE *view_state) {
571 if (disabled.command_buffer_state) {
572 return;
573 }
574 // First add bindings for bufferView
Jeff Bolzadbfa852019-10-04 13:53:30 -0500575 if (AddCommandBufferBinding(view_state->cb_bindings,
576 VulkanTypedHandle(view_state->buffer_view, kVulkanObjectTypeBufferView, view_state), cb_node)) {
577 auto buffer_state = view_state->buffer_state.get();
locke-lunargd556cc32019-09-17 01:21:23 -0600578 // Add bindings for buffer within bufferView
579 if (buffer_state) {
580 AddCommandBufferBindingBuffer(cb_node, buffer_state);
581 }
582 }
583}
584
585// Create binding link between given acceleration structure and command buffer node
586void ValidationStateTracker::AddCommandBufferBindingAccelerationStructure(CMD_BUFFER_STATE *cb_node,
587 ACCELERATION_STRUCTURE_STATE *as_state) {
588 if (disabled.command_buffer_state) {
589 return;
590 }
Jeff Bolzadbfa852019-10-04 13:53:30 -0500591 if (AddCommandBufferBinding(
592 as_state->cb_bindings,
593 VulkanTypedHandle(as_state->acceleration_structure, kVulkanObjectTypeAccelerationStructureNV, as_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -0600594 // Now update CB binding in MemObj mini CB list
595 for (auto mem_binding : as_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -0700596 // Now update CBInfo's Mem reference list
597 AddCommandBufferBinding(mem_binding->cb_bindings,
598 VulkanTypedHandle(mem_binding->mem, kVulkanObjectTypeDeviceMemory, mem_binding), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -0600599 }
600 }
601}
602
locke-lunargd556cc32019-09-17 01:21:23 -0600603// Clear a single object binding from given memory object
locke-lunarg5f59e782019-12-19 10:32:23 -0700604void ValidationStateTracker::ClearMemoryObjectBinding(const VulkanTypedHandle &typed_handle, DEVICE_MEMORY_STATE *mem_info) {
locke-lunargd556cc32019-09-17 01:21:23 -0600605 // This obj is bound to a memory object. Remove the reference to this object in that memory object's list
606 if (mem_info) {
607 mem_info->obj_bindings.erase(typed_handle);
608 }
609}
610
611// ClearMemoryObjectBindings clears the binding of objects to memory
612// For the given object it pulls the memory bindings and makes sure that the bindings
613// no longer refer to the object being cleared. This occurs when objects are destroyed.
614void ValidationStateTracker::ClearMemoryObjectBindings(const VulkanTypedHandle &typed_handle) {
615 BINDABLE *mem_binding = GetObjectMemBinding(typed_handle);
616 if (mem_binding) {
617 if (!mem_binding->sparse) {
locke-lunarg5f59e782019-12-19 10:32:23 -0700618 ClearMemoryObjectBinding(typed_handle, mem_binding->binding.mem_state.get());
locke-lunargd556cc32019-09-17 01:21:23 -0600619 } else { // Sparse, clear all bindings
620 for (auto &sparse_mem_binding : mem_binding->sparse_bindings) {
locke-lunarg5f59e782019-12-19 10:32:23 -0700621 ClearMemoryObjectBinding(typed_handle, sparse_mem_binding.mem_state.get());
locke-lunargd556cc32019-09-17 01:21:23 -0600622 }
623 }
624 }
625}
626
627// SetMemBinding is used to establish immutable, non-sparse binding between a single image/buffer object and memory object.
628// Corresponding valid usage checks are in ValidateSetMemBinding().
629void ValidationStateTracker::SetMemBinding(VkDeviceMemory mem, BINDABLE *mem_binding, VkDeviceSize memory_offset,
630 const VulkanTypedHandle &typed_handle) {
631 assert(mem_binding);
locke-lunargd556cc32019-09-17 01:21:23 -0600632
633 if (mem != VK_NULL_HANDLE) {
locke-lunargcf04d582019-11-26 00:31:50 -0700634 mem_binding->binding.mem_state = GetShared<DEVICE_MEMORY_STATE>(mem);
635 if (mem_binding->binding.mem_state) {
locke-lunarg5f59e782019-12-19 10:32:23 -0700636 mem_binding->binding.offset = memory_offset;
637 mem_binding->binding.size = mem_binding->requirements.size;
locke-lunargcf04d582019-11-26 00:31:50 -0700638 mem_binding->binding.mem_state->obj_bindings.insert(typed_handle);
locke-lunargd556cc32019-09-17 01:21:23 -0600639 // For image objects, make sure default memory state is correctly set
640 // TODO : What's the best/correct way to handle this?
641 if (kVulkanObjectTypeImage == typed_handle.type) {
642 auto const image_state = reinterpret_cast<const IMAGE_STATE *>(mem_binding);
643 if (image_state) {
644 VkImageCreateInfo ici = image_state->createInfo;
645 if (ici.usage & (VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT)) {
646 // TODO:: More memory state transition stuff.
647 }
648 }
649 }
locke-lunargcf04d582019-11-26 00:31:50 -0700650 mem_binding->UpdateBoundMemorySet(); // force recreation of cached set
locke-lunargd556cc32019-09-17 01:21:23 -0600651 }
652 }
653}
654
655// For NULL mem case, clear any previous binding Else...
656// Make sure given object is in its object map
657// IF a previous binding existed, update binding
658// Add reference from objectInfo to memoryInfo
659// Add reference off of object's binding info
660// Return VK_TRUE if addition is successful, VK_FALSE otherwise
locke-lunargcf04d582019-11-26 00:31:50 -0700661bool ValidationStateTracker::SetSparseMemBinding(const VkDeviceMemory mem, const VkDeviceSize mem_offset,
662 const VkDeviceSize mem_size, const VulkanTypedHandle &typed_handle) {
locke-lunargd556cc32019-09-17 01:21:23 -0600663 bool skip = VK_FALSE;
664 // Handle NULL case separately, just clear previous binding & decrement reference
locke-lunargcf04d582019-11-26 00:31:50 -0700665 if (mem == VK_NULL_HANDLE) {
locke-lunargd556cc32019-09-17 01:21:23 -0600666 // TODO : This should cause the range of the resource to be unbound according to spec
667 } else {
668 BINDABLE *mem_binding = GetObjectMemBinding(typed_handle);
669 assert(mem_binding);
670 if (mem_binding) { // Invalid handles are reported by object tracker, but Get returns NULL for them, so avoid SEGV here
671 assert(mem_binding->sparse);
locke-lunargcf04d582019-11-26 00:31:50 -0700672 MEM_BINDING binding = {GetShared<DEVICE_MEMORY_STATE>(mem), mem_offset, mem_size};
673 if (binding.mem_state) {
674 binding.mem_state->obj_bindings.insert(typed_handle);
locke-lunargd556cc32019-09-17 01:21:23 -0600675 // Need to set mem binding for this object
676 mem_binding->sparse_bindings.insert(binding);
677 mem_binding->UpdateBoundMemorySet();
678 }
679 }
680 }
681 return skip;
682}
683
locke-lunargd556cc32019-09-17 01:21:23 -0600684void ValidationStateTracker::UpdateDrawState(CMD_BUFFER_STATE *cb_state, const VkPipelineBindPoint bind_point) {
685 auto &state = cb_state->lastBound[bind_point];
686 PIPELINE_STATE *pPipe = state.pipeline_state;
687 if (VK_NULL_HANDLE != state.pipeline_layout) {
688 for (const auto &set_binding_pair : pPipe->active_slots) {
689 uint32_t setIndex = set_binding_pair.first;
690 // Pull the set node
691 cvdescriptorset::DescriptorSet *descriptor_set = state.per_set[setIndex].bound_descriptor_set;
692 if (!descriptor_set->IsPushDescriptor()) {
693 // For the "bindless" style resource usage with many descriptors, need to optimize command <-> descriptor binding
694
695 // TODO: If recreating the reduced_map here shows up in profilinging, need to find a way of sharing with the
696 // Validate pass. Though in the case of "many" descriptors, typically the descriptor count >> binding count
697 cvdescriptorset::PrefilterBindRequestMap reduced_map(*descriptor_set, set_binding_pair.second);
698 const auto &binding_req_map = reduced_map.FilteredMap(*cb_state, *pPipe);
699
700 if (reduced_map.IsManyDescriptors()) {
701 // Only update validate binding tags if we meet the "many" criteria in the Prefilter class
702 descriptor_set->UpdateValidationCache(*cb_state, *pPipe, binding_req_map);
703 }
704
705 // We can skip updating the state if "nothing" has changed since the last validation.
706 // See CoreChecks::ValidateCmdBufDrawState for more details.
Jeff Bolz56308942019-10-06 22:05:23 -0500707 bool descriptor_set_changed =
locke-lunargd556cc32019-09-17 01:21:23 -0600708 !reduced_map.IsManyDescriptors() ||
709 // Update if descriptor set (or contents) has changed
710 state.per_set[setIndex].validated_set != descriptor_set ||
711 state.per_set[setIndex].validated_set_change_count != descriptor_set->GetChangeCount() ||
712 (!disabled.image_layout_validation &&
Jeff Bolz56308942019-10-06 22:05:23 -0500713 state.per_set[setIndex].validated_set_image_layout_change_count != cb_state->image_layout_change_count);
714 bool need_update = descriptor_set_changed ||
715 // Update if previous bindingReqMap doesn't include new bindingReqMap
716 !std::includes(state.per_set[setIndex].validated_set_binding_req_map.begin(),
717 state.per_set[setIndex].validated_set_binding_req_map.end(),
718 binding_req_map.begin(), binding_req_map.end());
locke-lunargd556cc32019-09-17 01:21:23 -0600719
720 if (need_update) {
721 // Bind this set and its active descriptor resources to the command buffer
Jeff Bolz56308942019-10-06 22:05:23 -0500722 if (!descriptor_set_changed && reduced_map.IsManyDescriptors()) {
723 // Only record the bindings that haven't already been recorded
724 BindingReqMap delta_reqs;
725 std::set_difference(binding_req_map.begin(), binding_req_map.end(),
726 state.per_set[setIndex].validated_set_binding_req_map.begin(),
727 state.per_set[setIndex].validated_set_binding_req_map.end(),
728 std::inserter(delta_reqs, delta_reqs.begin()));
locke-gb3ce08f2019-09-30 12:30:56 -0600729 descriptor_set->UpdateDrawState(this, cb_state, pPipe, delta_reqs);
Jeff Bolz56308942019-10-06 22:05:23 -0500730 } else {
locke-gb3ce08f2019-09-30 12:30:56 -0600731 descriptor_set->UpdateDrawState(this, cb_state, pPipe, binding_req_map);
Jeff Bolz56308942019-10-06 22:05:23 -0500732 }
locke-lunargd556cc32019-09-17 01:21:23 -0600733
734 state.per_set[setIndex].validated_set = descriptor_set;
735 state.per_set[setIndex].validated_set_change_count = descriptor_set->GetChangeCount();
736 state.per_set[setIndex].validated_set_image_layout_change_count = cb_state->image_layout_change_count;
737 if (reduced_map.IsManyDescriptors()) {
738 // Check whether old == new before assigning, the equality check is much cheaper than
739 // freeing and reallocating the map.
740 if (state.per_set[setIndex].validated_set_binding_req_map != set_binding_pair.second) {
741 state.per_set[setIndex].validated_set_binding_req_map = set_binding_pair.second;
742 }
743 } else {
744 state.per_set[setIndex].validated_set_binding_req_map = BindingReqMap();
745 }
746 }
747 }
748 }
749 }
750 if (!pPipe->vertex_binding_descriptions_.empty()) {
751 cb_state->vertex_buffer_used = true;
752 }
753}
754
755// Remove set from setMap and delete the set
756void ValidationStateTracker::FreeDescriptorSet(cvdescriptorset::DescriptorSet *descriptor_set) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500757 descriptor_set->destroyed = true;
Jeff Bolzadbfa852019-10-04 13:53:30 -0500758 const VulkanTypedHandle obj_struct(descriptor_set->GetSet(), kVulkanObjectTypeDescriptorSet);
Jeff Bolz41a1ced2019-10-11 11:40:49 -0500759 // Any bound cmd buffers are now invalid
Jeff Bolzadbfa852019-10-04 13:53:30 -0500760 InvalidateCommandBuffers(descriptor_set->cb_bindings, obj_struct);
Jeff Bolz41a1ced2019-10-11 11:40:49 -0500761
locke-lunargd556cc32019-09-17 01:21:23 -0600762 setMap.erase(descriptor_set->GetSet());
763}
764
765// Free all DS Pools including their Sets & related sub-structs
766// NOTE : Calls to this function should be wrapped in mutex
767void ValidationStateTracker::DeleteDescriptorSetPools() {
768 for (auto ii = descriptorPoolMap.begin(); ii != descriptorPoolMap.end();) {
769 // Remove this pools' sets from setMap and delete them
770 for (auto ds : ii->second->sets) {
771 FreeDescriptorSet(ds);
772 }
773 ii->second->sets.clear();
774 ii = descriptorPoolMap.erase(ii);
775 }
776}
777
778// For given object struct return a ptr of BASE_NODE type for its wrapping struct
779BASE_NODE *ValidationStateTracker::GetStateStructPtrFromObject(const VulkanTypedHandle &object_struct) {
Jeff Bolzadbfa852019-10-04 13:53:30 -0500780 if (object_struct.node) {
781#ifdef _DEBUG
782 // assert that lookup would find the same object
783 VulkanTypedHandle other = object_struct;
784 other.node = nullptr;
785 assert(object_struct.node == GetStateStructPtrFromObject(other));
786#endif
787 return object_struct.node;
788 }
locke-lunargd556cc32019-09-17 01:21:23 -0600789 BASE_NODE *base_ptr = nullptr;
790 switch (object_struct.type) {
791 case kVulkanObjectTypeDescriptorSet: {
792 base_ptr = GetSetNode(object_struct.Cast<VkDescriptorSet>());
793 break;
794 }
795 case kVulkanObjectTypeSampler: {
796 base_ptr = GetSamplerState(object_struct.Cast<VkSampler>());
797 break;
798 }
799 case kVulkanObjectTypeQueryPool: {
800 base_ptr = GetQueryPoolState(object_struct.Cast<VkQueryPool>());
801 break;
802 }
803 case kVulkanObjectTypePipeline: {
804 base_ptr = GetPipelineState(object_struct.Cast<VkPipeline>());
805 break;
806 }
807 case kVulkanObjectTypeBuffer: {
808 base_ptr = GetBufferState(object_struct.Cast<VkBuffer>());
809 break;
810 }
811 case kVulkanObjectTypeBufferView: {
812 base_ptr = GetBufferViewState(object_struct.Cast<VkBufferView>());
813 break;
814 }
815 case kVulkanObjectTypeImage: {
816 base_ptr = GetImageState(object_struct.Cast<VkImage>());
817 break;
818 }
819 case kVulkanObjectTypeImageView: {
820 base_ptr = GetImageViewState(object_struct.Cast<VkImageView>());
821 break;
822 }
823 case kVulkanObjectTypeEvent: {
824 base_ptr = GetEventState(object_struct.Cast<VkEvent>());
825 break;
826 }
827 case kVulkanObjectTypeDescriptorPool: {
828 base_ptr = GetDescriptorPoolState(object_struct.Cast<VkDescriptorPool>());
829 break;
830 }
831 case kVulkanObjectTypeCommandPool: {
832 base_ptr = GetCommandPoolState(object_struct.Cast<VkCommandPool>());
833 break;
834 }
835 case kVulkanObjectTypeFramebuffer: {
836 base_ptr = GetFramebufferState(object_struct.Cast<VkFramebuffer>());
837 break;
838 }
839 case kVulkanObjectTypeRenderPass: {
840 base_ptr = GetRenderPassState(object_struct.Cast<VkRenderPass>());
841 break;
842 }
843 case kVulkanObjectTypeDeviceMemory: {
844 base_ptr = GetDevMemState(object_struct.Cast<VkDeviceMemory>());
845 break;
846 }
847 case kVulkanObjectTypeAccelerationStructureNV: {
848 base_ptr = GetAccelerationStructureState(object_struct.Cast<VkAccelerationStructureNV>());
849 break;
850 }
Jeff Bolzadbfa852019-10-04 13:53:30 -0500851 case kVulkanObjectTypeUnknown:
852 // This can happen if an element of the object_bindings vector has been
853 // zeroed out, after an object is destroyed.
854 break;
locke-lunargd556cc32019-09-17 01:21:23 -0600855 default:
856 // TODO : Any other objects to be handled here?
857 assert(0);
858 break;
859 }
860 return base_ptr;
861}
862
863// Tie the VulkanTypedHandle to the cmd buffer which includes:
864// Add object_binding to cmd buffer
865// Add cb_binding to object
Jeff Bolzadbfa852019-10-04 13:53:30 -0500866bool ValidationStateTracker::AddCommandBufferBinding(small_unordered_map<CMD_BUFFER_STATE *, int, 8> &cb_bindings,
locke-lunargd556cc32019-09-17 01:21:23 -0600867 const VulkanTypedHandle &obj, CMD_BUFFER_STATE *cb_node) {
868 if (disabled.command_buffer_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -0500869 return false;
locke-lunargd556cc32019-09-17 01:21:23 -0600870 }
Jeff Bolzadbfa852019-10-04 13:53:30 -0500871 // Insert the cb_binding with a default 'index' of -1. Then push the obj into the object_bindings
872 // vector, and update cb_bindings[cb_node] with the index of that element of the vector.
873 auto inserted = cb_bindings.insert({cb_node, -1});
874 if (inserted.second) {
875 cb_node->object_bindings.push_back(obj);
876 inserted.first->second = (int)cb_node->object_bindings.size() - 1;
877 return true;
878 }
879 return false;
locke-lunargd556cc32019-09-17 01:21:23 -0600880}
881
882// For a given object, if cb_node is in that objects cb_bindings, remove cb_node
883void ValidationStateTracker::RemoveCommandBufferBinding(VulkanTypedHandle const &object, CMD_BUFFER_STATE *cb_node) {
884 BASE_NODE *base_obj = GetStateStructPtrFromObject(object);
885 if (base_obj) base_obj->cb_bindings.erase(cb_node);
886}
887
888// Reset the command buffer state
889// Maintain the createInfo and set state to CB_NEW, but clear all other state
890void ValidationStateTracker::ResetCommandBufferState(const VkCommandBuffer cb) {
891 CMD_BUFFER_STATE *pCB = GetCBState(cb);
892 if (pCB) {
893 pCB->in_use.store(0);
894 // Reset CB state (note that createInfo is not cleared)
895 pCB->commandBuffer = cb;
896 memset(&pCB->beginInfo, 0, sizeof(VkCommandBufferBeginInfo));
897 memset(&pCB->inheritanceInfo, 0, sizeof(VkCommandBufferInheritanceInfo));
898 pCB->hasDrawCmd = false;
899 pCB->hasTraceRaysCmd = false;
900 pCB->hasBuildAccelerationStructureCmd = false;
901 pCB->hasDispatchCmd = false;
902 pCB->state = CB_NEW;
Lionel Landwerlinc7420912019-05-23 00:33:42 +0100903 pCB->commandCount = 0;
locke-lunargd556cc32019-09-17 01:21:23 -0600904 pCB->submitCount = 0;
905 pCB->image_layout_change_count = 1; // Start at 1. 0 is insert value for validation cache versions, s.t. new == dirty
906 pCB->status = 0;
907 pCB->static_status = 0;
908 pCB->viewportMask = 0;
909 pCB->scissorMask = 0;
910
911 for (auto &item : pCB->lastBound) {
912 item.second.reset();
913 }
914
Tony-LunarG61e7c0c2020-03-03 16:09:11 -0700915 pCB->activeRenderPassBeginInfo = safe_VkRenderPassBeginInfo();
locke-lunargd556cc32019-09-17 01:21:23 -0600916 pCB->activeRenderPass = nullptr;
917 pCB->activeSubpassContents = VK_SUBPASS_CONTENTS_INLINE;
918 pCB->activeSubpass = 0;
919 pCB->broken_bindings.clear();
920 pCB->waitedEvents.clear();
921 pCB->events.clear();
922 pCB->writeEventsBeforeWait.clear();
locke-lunargd556cc32019-09-17 01:21:23 -0600923 pCB->activeQueries.clear();
924 pCB->startedQueries.clear();
925 pCB->image_layout_map.clear();
locke-lunargd556cc32019-09-17 01:21:23 -0600926 pCB->current_vertex_buffer_binding_info.vertex_buffer_bindings.clear();
927 pCB->vertex_buffer_used = false;
928 pCB->primaryCommandBuffer = VK_NULL_HANDLE;
929 // If secondary, invalidate any primary command buffer that may call us.
930 if (pCB->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
Jeff Bolzadbfa852019-10-04 13:53:30 -0500931 InvalidateLinkedCommandBuffers(pCB->linkedCommandBuffers, VulkanTypedHandle(cb, kVulkanObjectTypeCommandBuffer));
locke-lunargd556cc32019-09-17 01:21:23 -0600932 }
933
934 // Remove reverse command buffer links.
935 for (auto pSubCB : pCB->linkedCommandBuffers) {
936 pSubCB->linkedCommandBuffers.erase(pCB);
937 }
938 pCB->linkedCommandBuffers.clear();
locke-lunargd556cc32019-09-17 01:21:23 -0600939 pCB->queue_submit_functions.clear();
940 pCB->cmd_execute_commands_functions.clear();
941 pCB->eventUpdates.clear();
942 pCB->queryUpdates.clear();
943
944 // Remove object bindings
945 for (const auto &obj : pCB->object_bindings) {
946 RemoveCommandBufferBinding(obj, pCB);
947 }
948 pCB->object_bindings.clear();
949 // Remove this cmdBuffer's reference from each FrameBuffer's CB ref list
950 for (auto framebuffer : pCB->framebuffers) {
951 auto fb_state = GetFramebufferState(framebuffer);
952 if (fb_state) fb_state->cb_bindings.erase(pCB);
953 }
954 pCB->framebuffers.clear();
955 pCB->activeFramebuffer = VK_NULL_HANDLE;
956 memset(&pCB->index_buffer_binding, 0, sizeof(pCB->index_buffer_binding));
957
958 pCB->qfo_transfer_image_barriers.Reset();
959 pCB->qfo_transfer_buffer_barriers.Reset();
960
961 // Clean up the label data
962 ResetCmdDebugUtilsLabel(report_data, pCB->commandBuffer);
963 pCB->debug_label.Reset();
locke-gb3ce08f2019-09-30 12:30:56 -0600964 pCB->validate_descriptorsets_in_queuesubmit.clear();
Attilio Provenzano02859b22020-02-27 14:17:28 +0000965
966 // Best practices info
967 pCB->small_indexed_draw_call_count = 0;
locke-lunargd556cc32019-09-17 01:21:23 -0600968 }
969 if (command_buffer_reset_callback) {
970 (*command_buffer_reset_callback)(cb);
971 }
972}
973
974void ValidationStateTracker::PostCallRecordCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo,
975 const VkAllocationCallbacks *pAllocator, VkDevice *pDevice,
976 VkResult result) {
977 if (VK_SUCCESS != result) return;
978
979 const VkPhysicalDeviceFeatures *enabled_features_found = pCreateInfo->pEnabledFeatures;
980 if (nullptr == enabled_features_found) {
981 const auto *features2 = lvl_find_in_chain<VkPhysicalDeviceFeatures2KHR>(pCreateInfo->pNext);
982 if (features2) {
983 enabled_features_found = &(features2->features);
984 }
985 }
986
987 ValidationObject *device_object = GetLayerDataPtr(get_dispatch_key(*pDevice), layer_data_map);
988 ValidationObject *validation_data = GetValidationObject(device_object->object_dispatch, this->container_type);
989 ValidationStateTracker *state_tracker = static_cast<ValidationStateTracker *>(validation_data);
990
991 if (nullptr == enabled_features_found) {
992 state_tracker->enabled_features.core = {};
993 } else {
994 state_tracker->enabled_features.core = *enabled_features_found;
995 }
996
997 // Make sure that queue_family_properties are obtained for this device's physical_device, even if the app has not
998 // previously set them through an explicit API call.
999 uint32_t count;
1000 auto pd_state = GetPhysicalDeviceState(gpu);
1001 DispatchGetPhysicalDeviceQueueFamilyProperties(gpu, &count, nullptr);
1002 pd_state->queue_family_properties.resize(std::max(static_cast<uint32_t>(pd_state->queue_family_properties.size()), count));
1003 DispatchGetPhysicalDeviceQueueFamilyProperties(gpu, &count, &pd_state->queue_family_properties[0]);
1004 // Save local link to this device's physical device state
1005 state_tracker->physical_device_state = pd_state;
1006
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001007 const auto *vulkan_12_features = lvl_find_in_chain<VkPhysicalDeviceVulkan12Features>(pCreateInfo->pNext);
1008 if (vulkan_12_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001009 state_tracker->enabled_features.core12 = *vulkan_12_features;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001010 } else {
1011 // These structs are only allowed in pNext chain if there is no kPhysicalDeviceVulkan12Features
1012
1013 const auto *eight_bit_storage_features = lvl_find_in_chain<VkPhysicalDevice8BitStorageFeatures>(pCreateInfo->pNext);
1014 if (eight_bit_storage_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001015 state_tracker->enabled_features.core12.storageBuffer8BitAccess = eight_bit_storage_features->storageBuffer8BitAccess;
1016 state_tracker->enabled_features.core12.uniformAndStorageBuffer8BitAccess =
1017 eight_bit_storage_features->uniformAndStorageBuffer8BitAccess;
1018 state_tracker->enabled_features.core12.storagePushConstant8 = eight_bit_storage_features->storagePushConstant8;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001019 }
1020
1021 const auto *float16_int8_features = lvl_find_in_chain<VkPhysicalDeviceShaderFloat16Int8Features>(pCreateInfo->pNext);
1022 if (float16_int8_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001023 state_tracker->enabled_features.core12.shaderFloat16 = float16_int8_features->shaderFloat16;
1024 state_tracker->enabled_features.core12.shaderInt8 = float16_int8_features->shaderInt8;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001025 }
1026
1027 const auto *descriptor_indexing_features =
1028 lvl_find_in_chain<VkPhysicalDeviceDescriptorIndexingFeatures>(pCreateInfo->pNext);
1029 if (descriptor_indexing_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001030 state_tracker->enabled_features.core12.shaderInputAttachmentArrayDynamicIndexing =
1031 descriptor_indexing_features->shaderInputAttachmentArrayDynamicIndexing;
1032 state_tracker->enabled_features.core12.shaderUniformTexelBufferArrayDynamicIndexing =
1033 descriptor_indexing_features->shaderUniformTexelBufferArrayDynamicIndexing;
1034 state_tracker->enabled_features.core12.shaderStorageTexelBufferArrayDynamicIndexing =
1035 descriptor_indexing_features->shaderStorageTexelBufferArrayDynamicIndexing;
1036 state_tracker->enabled_features.core12.shaderUniformBufferArrayNonUniformIndexing =
1037 descriptor_indexing_features->shaderUniformBufferArrayNonUniformIndexing;
1038 state_tracker->enabled_features.core12.shaderSampledImageArrayNonUniformIndexing =
1039 descriptor_indexing_features->shaderSampledImageArrayNonUniformIndexing;
1040 state_tracker->enabled_features.core12.shaderStorageBufferArrayNonUniformIndexing =
1041 descriptor_indexing_features->shaderStorageBufferArrayNonUniformIndexing;
1042 state_tracker->enabled_features.core12.shaderStorageImageArrayNonUniformIndexing =
1043 descriptor_indexing_features->shaderStorageImageArrayNonUniformIndexing;
1044 state_tracker->enabled_features.core12.shaderInputAttachmentArrayNonUniformIndexing =
1045 descriptor_indexing_features->shaderInputAttachmentArrayNonUniformIndexing;
1046 state_tracker->enabled_features.core12.shaderUniformTexelBufferArrayNonUniformIndexing =
1047 descriptor_indexing_features->shaderUniformTexelBufferArrayNonUniformIndexing;
1048 state_tracker->enabled_features.core12.shaderStorageTexelBufferArrayNonUniformIndexing =
1049 descriptor_indexing_features->shaderStorageTexelBufferArrayNonUniformIndexing;
1050 state_tracker->enabled_features.core12.descriptorBindingUniformBufferUpdateAfterBind =
1051 descriptor_indexing_features->descriptorBindingUniformBufferUpdateAfterBind;
1052 state_tracker->enabled_features.core12.descriptorBindingSampledImageUpdateAfterBind =
1053 descriptor_indexing_features->descriptorBindingSampledImageUpdateAfterBind;
1054 state_tracker->enabled_features.core12.descriptorBindingStorageImageUpdateAfterBind =
1055 descriptor_indexing_features->descriptorBindingStorageImageUpdateAfterBind;
1056 state_tracker->enabled_features.core12.descriptorBindingStorageBufferUpdateAfterBind =
1057 descriptor_indexing_features->descriptorBindingStorageBufferUpdateAfterBind;
1058 state_tracker->enabled_features.core12.descriptorBindingUniformTexelBufferUpdateAfterBind =
1059 descriptor_indexing_features->descriptorBindingUniformTexelBufferUpdateAfterBind;
1060 state_tracker->enabled_features.core12.descriptorBindingStorageTexelBufferUpdateAfterBind =
1061 descriptor_indexing_features->descriptorBindingStorageTexelBufferUpdateAfterBind;
1062 state_tracker->enabled_features.core12.descriptorBindingUpdateUnusedWhilePending =
1063 descriptor_indexing_features->descriptorBindingUpdateUnusedWhilePending;
1064 state_tracker->enabled_features.core12.descriptorBindingPartiallyBound =
1065 descriptor_indexing_features->descriptorBindingPartiallyBound;
1066 state_tracker->enabled_features.core12.descriptorBindingVariableDescriptorCount =
1067 descriptor_indexing_features->descriptorBindingVariableDescriptorCount;
1068 state_tracker->enabled_features.core12.runtimeDescriptorArray = descriptor_indexing_features->runtimeDescriptorArray;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001069 }
1070
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001071 const auto *scalar_block_layout_features = lvl_find_in_chain<VkPhysicalDeviceScalarBlockLayoutFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001072 if (scalar_block_layout_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001073 state_tracker->enabled_features.core12.scalarBlockLayout = scalar_block_layout_features->scalarBlockLayout;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001074 }
1075
1076 const auto *imageless_framebuffer_features =
1077 lvl_find_in_chain<VkPhysicalDeviceImagelessFramebufferFeatures>(pCreateInfo->pNext);
1078 if (imageless_framebuffer_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001079 state_tracker->enabled_features.core12.imagelessFramebuffer = imageless_framebuffer_features->imagelessFramebuffer;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001080 }
1081
1082 const auto *uniform_buffer_standard_layout_features =
1083 lvl_find_in_chain<VkPhysicalDeviceUniformBufferStandardLayoutFeatures>(pCreateInfo->pNext);
1084 if (uniform_buffer_standard_layout_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001085 state_tracker->enabled_features.core12.uniformBufferStandardLayout =
1086 uniform_buffer_standard_layout_features->uniformBufferStandardLayout;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001087 }
1088
1089 const auto *subgroup_extended_types_features =
1090 lvl_find_in_chain<VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures>(pCreateInfo->pNext);
1091 if (subgroup_extended_types_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001092 state_tracker->enabled_features.core12.shaderSubgroupExtendedTypes =
1093 subgroup_extended_types_features->shaderSubgroupExtendedTypes;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001094 }
1095
1096 const auto *separate_depth_stencil_layouts_features =
1097 lvl_find_in_chain<VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures>(pCreateInfo->pNext);
1098 if (separate_depth_stencil_layouts_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001099 state_tracker->enabled_features.core12.separateDepthStencilLayouts =
1100 separate_depth_stencil_layouts_features->separateDepthStencilLayouts;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001101 }
1102
1103 const auto *host_query_reset_features = lvl_find_in_chain<VkPhysicalDeviceHostQueryResetFeatures>(pCreateInfo->pNext);
1104 if (host_query_reset_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001105 state_tracker->enabled_features.core12.hostQueryReset = host_query_reset_features->hostQueryReset;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001106 }
1107
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001108 const auto *timeline_semaphore_features = lvl_find_in_chain<VkPhysicalDeviceTimelineSemaphoreFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001109 if (timeline_semaphore_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001110 state_tracker->enabled_features.core12.timelineSemaphore = timeline_semaphore_features->timelineSemaphore;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001111 }
1112
1113 const auto *buffer_device_address = lvl_find_in_chain<VkPhysicalDeviceBufferDeviceAddressFeatures>(pCreateInfo->pNext);
1114 if (buffer_device_address) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001115 state_tracker->enabled_features.core12.bufferDeviceAddress = buffer_device_address->bufferDeviceAddress;
1116 state_tracker->enabled_features.core12.bufferDeviceAddressCaptureReplay =
1117 buffer_device_address->bufferDeviceAddressCaptureReplay;
1118 state_tracker->enabled_features.core12.bufferDeviceAddressMultiDevice =
1119 buffer_device_address->bufferDeviceAddressMultiDevice;
1120 }
1121 }
1122
1123 const auto *vulkan_11_features = lvl_find_in_chain<VkPhysicalDeviceVulkan11Features>(pCreateInfo->pNext);
1124 if (vulkan_11_features) {
1125 state_tracker->enabled_features.core11 = *vulkan_11_features;
1126 } else {
1127 // These structs are only allowed in pNext chain if there is no kPhysicalDeviceVulkan11Features
1128
1129 const auto *sixteen_bit_storage_features = lvl_find_in_chain<VkPhysicalDevice16BitStorageFeatures>(pCreateInfo->pNext);
1130 if (sixteen_bit_storage_features) {
1131 state_tracker->enabled_features.core11.storageBuffer16BitAccess =
1132 sixteen_bit_storage_features->storageBuffer16BitAccess;
1133 state_tracker->enabled_features.core11.uniformAndStorageBuffer16BitAccess =
1134 sixteen_bit_storage_features->uniformAndStorageBuffer16BitAccess;
1135 state_tracker->enabled_features.core11.storagePushConstant16 = sixteen_bit_storage_features->storagePushConstant16;
1136 state_tracker->enabled_features.core11.storageInputOutput16 = sixteen_bit_storage_features->storageInputOutput16;
1137 }
1138
1139 const auto *multiview_features = lvl_find_in_chain<VkPhysicalDeviceMultiviewFeatures>(pCreateInfo->pNext);
1140 if (multiview_features) {
1141 state_tracker->enabled_features.core11.multiview = multiview_features->multiview;
1142 state_tracker->enabled_features.core11.multiviewGeometryShader = multiview_features->multiviewGeometryShader;
1143 state_tracker->enabled_features.core11.multiviewTessellationShader = multiview_features->multiviewTessellationShader;
1144 }
1145
1146 const auto *variable_pointers_features = lvl_find_in_chain<VkPhysicalDeviceVariablePointersFeatures>(pCreateInfo->pNext);
1147 if (variable_pointers_features) {
1148 state_tracker->enabled_features.core11.variablePointersStorageBuffer =
1149 variable_pointers_features->variablePointersStorageBuffer;
1150 state_tracker->enabled_features.core11.variablePointers = variable_pointers_features->variablePointers;
1151 }
1152
1153 const auto *protected_memory_features = lvl_find_in_chain<VkPhysicalDeviceProtectedMemoryFeatures>(pCreateInfo->pNext);
1154 if (protected_memory_features) {
1155 state_tracker->enabled_features.core11.protectedMemory = protected_memory_features->protectedMemory;
1156 }
1157
1158 const auto *ycbcr_conversion_features =
1159 lvl_find_in_chain<VkPhysicalDeviceSamplerYcbcrConversionFeatures>(pCreateInfo->pNext);
1160 if (ycbcr_conversion_features) {
1161 state_tracker->enabled_features.core11.samplerYcbcrConversion = ycbcr_conversion_features->samplerYcbcrConversion;
1162 }
1163
1164 const auto *shader_draw_parameters_features =
1165 lvl_find_in_chain<VkPhysicalDeviceShaderDrawParametersFeatures>(pCreateInfo->pNext);
1166 if (shader_draw_parameters_features) {
1167 state_tracker->enabled_features.core11.shaderDrawParameters = shader_draw_parameters_features->shaderDrawParameters;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001168 }
1169 }
1170
locke-lunargd556cc32019-09-17 01:21:23 -06001171 const auto *device_group_ci = lvl_find_in_chain<VkDeviceGroupDeviceCreateInfo>(pCreateInfo->pNext);
1172 state_tracker->physical_device_count =
1173 device_group_ci && device_group_ci->physicalDeviceCount > 0 ? device_group_ci->physicalDeviceCount : 1;
1174
locke-lunargd556cc32019-09-17 01:21:23 -06001175 const auto *exclusive_scissor_features = lvl_find_in_chain<VkPhysicalDeviceExclusiveScissorFeaturesNV>(pCreateInfo->pNext);
1176 if (exclusive_scissor_features) {
1177 state_tracker->enabled_features.exclusive_scissor = *exclusive_scissor_features;
1178 }
1179
1180 const auto *shading_rate_image_features = lvl_find_in_chain<VkPhysicalDeviceShadingRateImageFeaturesNV>(pCreateInfo->pNext);
1181 if (shading_rate_image_features) {
1182 state_tracker->enabled_features.shading_rate_image = *shading_rate_image_features;
1183 }
1184
1185 const auto *mesh_shader_features = lvl_find_in_chain<VkPhysicalDeviceMeshShaderFeaturesNV>(pCreateInfo->pNext);
1186 if (mesh_shader_features) {
1187 state_tracker->enabled_features.mesh_shader = *mesh_shader_features;
1188 }
1189
1190 const auto *inline_uniform_block_features =
1191 lvl_find_in_chain<VkPhysicalDeviceInlineUniformBlockFeaturesEXT>(pCreateInfo->pNext);
1192 if (inline_uniform_block_features) {
1193 state_tracker->enabled_features.inline_uniform_block = *inline_uniform_block_features;
1194 }
1195
1196 const auto *transform_feedback_features = lvl_find_in_chain<VkPhysicalDeviceTransformFeedbackFeaturesEXT>(pCreateInfo->pNext);
1197 if (transform_feedback_features) {
1198 state_tracker->enabled_features.transform_feedback_features = *transform_feedback_features;
1199 }
1200
locke-lunargd556cc32019-09-17 01:21:23 -06001201 const auto *vtx_attrib_div_features = lvl_find_in_chain<VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT>(pCreateInfo->pNext);
1202 if (vtx_attrib_div_features) {
1203 state_tracker->enabled_features.vtx_attrib_divisor_features = *vtx_attrib_div_features;
1204 }
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001205
Jeff Bolz4563f2a2019-12-10 13:30:30 -06001206 const auto *buffer_device_address_ext = lvl_find_in_chain<VkPhysicalDeviceBufferDeviceAddressFeaturesEXT>(pCreateInfo->pNext);
1207 if (buffer_device_address_ext) {
Jeff Bolz33fc6722020-03-31 12:58:16 -05001208 state_tracker->enabled_features.buffer_device_address_ext = *buffer_device_address_ext;
locke-lunargd556cc32019-09-17 01:21:23 -06001209 }
1210
1211 const auto *cooperative_matrix_features = lvl_find_in_chain<VkPhysicalDeviceCooperativeMatrixFeaturesNV>(pCreateInfo->pNext);
1212 if (cooperative_matrix_features) {
1213 state_tracker->enabled_features.cooperative_matrix_features = *cooperative_matrix_features;
1214 }
1215
locke-lunargd556cc32019-09-17 01:21:23 -06001216 const auto *compute_shader_derivatives_features =
1217 lvl_find_in_chain<VkPhysicalDeviceComputeShaderDerivativesFeaturesNV>(pCreateInfo->pNext);
1218 if (compute_shader_derivatives_features) {
1219 state_tracker->enabled_features.compute_shader_derivatives_features = *compute_shader_derivatives_features;
1220 }
1221
1222 const auto *fragment_shader_barycentric_features =
1223 lvl_find_in_chain<VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV>(pCreateInfo->pNext);
1224 if (fragment_shader_barycentric_features) {
1225 state_tracker->enabled_features.fragment_shader_barycentric_features = *fragment_shader_barycentric_features;
1226 }
1227
1228 const auto *shader_image_footprint_features =
1229 lvl_find_in_chain<VkPhysicalDeviceShaderImageFootprintFeaturesNV>(pCreateInfo->pNext);
1230 if (shader_image_footprint_features) {
1231 state_tracker->enabled_features.shader_image_footprint_features = *shader_image_footprint_features;
1232 }
1233
1234 const auto *fragment_shader_interlock_features =
1235 lvl_find_in_chain<VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT>(pCreateInfo->pNext);
1236 if (fragment_shader_interlock_features) {
1237 state_tracker->enabled_features.fragment_shader_interlock_features = *fragment_shader_interlock_features;
1238 }
1239
1240 const auto *demote_to_helper_invocation_features =
1241 lvl_find_in_chain<VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT>(pCreateInfo->pNext);
1242 if (demote_to_helper_invocation_features) {
1243 state_tracker->enabled_features.demote_to_helper_invocation_features = *demote_to_helper_invocation_features;
1244 }
1245
1246 const auto *texel_buffer_alignment_features =
1247 lvl_find_in_chain<VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT>(pCreateInfo->pNext);
1248 if (texel_buffer_alignment_features) {
1249 state_tracker->enabled_features.texel_buffer_alignment_features = *texel_buffer_alignment_features;
1250 }
1251
locke-lunargd556cc32019-09-17 01:21:23 -06001252 const auto *pipeline_exe_props_features =
1253 lvl_find_in_chain<VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR>(pCreateInfo->pNext);
1254 if (pipeline_exe_props_features) {
1255 state_tracker->enabled_features.pipeline_exe_props_features = *pipeline_exe_props_features;
1256 }
1257
Jeff Bolz82f854d2019-09-17 14:56:47 -05001258 const auto *dedicated_allocation_image_aliasing_features =
1259 lvl_find_in_chain<VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV>(pCreateInfo->pNext);
1260 if (dedicated_allocation_image_aliasing_features) {
1261 state_tracker->enabled_features.dedicated_allocation_image_aliasing_features =
1262 *dedicated_allocation_image_aliasing_features;
1263 }
1264
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001265 const auto *performance_query_features = lvl_find_in_chain<VkPhysicalDevicePerformanceQueryFeaturesKHR>(pCreateInfo->pNext);
1266 if (performance_query_features) {
1267 state_tracker->enabled_features.performance_query_features = *performance_query_features;
1268 }
1269
Tobias Hector782bcde2019-11-28 16:19:42 +00001270 const auto *device_coherent_memory_features = lvl_find_in_chain<VkPhysicalDeviceCoherentMemoryFeaturesAMD>(pCreateInfo->pNext);
1271 if (device_coherent_memory_features) {
1272 state_tracker->enabled_features.device_coherent_memory_features = *device_coherent_memory_features;
1273 }
1274
sfricke-samsungcead0802020-01-30 22:20:10 -08001275 const auto *ycbcr_image_array_features = lvl_find_in_chain<VkPhysicalDeviceYcbcrImageArraysFeaturesEXT>(pCreateInfo->pNext);
1276 if (ycbcr_image_array_features) {
1277 state_tracker->enabled_features.ycbcr_image_array_features = *ycbcr_image_array_features;
1278 }
1279
Jeff Bolz443c2ca2020-03-19 12:11:51 -05001280 const auto *ray_tracing_features = lvl_find_in_chain<VkPhysicalDeviceRayTracingFeaturesKHR>(pCreateInfo->pNext);
1281 if (ray_tracing_features) {
1282 state_tracker->enabled_features.ray_tracing_features = *ray_tracing_features;
1283 }
1284
locke-lunargd556cc32019-09-17 01:21:23 -06001285 // Store physical device properties and physical device mem limits into CoreChecks structs
1286 DispatchGetPhysicalDeviceMemoryProperties(gpu, &state_tracker->phys_dev_mem_props);
1287 DispatchGetPhysicalDeviceProperties(gpu, &state_tracker->phys_dev_props);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001288 GetPhysicalDeviceExtProperties(gpu, state_tracker->device_extensions.vk_feature_version_1_2,
1289 &state_tracker->phys_dev_props_core11);
1290 GetPhysicalDeviceExtProperties(gpu, state_tracker->device_extensions.vk_feature_version_1_2,
1291 &state_tracker->phys_dev_props_core12);
locke-lunargd556cc32019-09-17 01:21:23 -06001292
1293 const auto &dev_ext = state_tracker->device_extensions;
1294 auto *phys_dev_props = &state_tracker->phys_dev_ext_props;
1295
1296 if (dev_ext.vk_khr_push_descriptor) {
1297 // Get the needed push_descriptor limits
1298 VkPhysicalDevicePushDescriptorPropertiesKHR push_descriptor_prop;
1299 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_push_descriptor, &push_descriptor_prop);
1300 phys_dev_props->max_push_descriptors = push_descriptor_prop.maxPushDescriptors;
1301 }
1302
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001303 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_ext_descriptor_indexing) {
1304 VkPhysicalDeviceDescriptorIndexingPropertiesEXT descriptor_indexing_prop;
1305 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_descriptor_indexing, &descriptor_indexing_prop);
1306 state_tracker->phys_dev_props_core12.maxUpdateAfterBindDescriptorsInAllPools =
1307 descriptor_indexing_prop.maxUpdateAfterBindDescriptorsInAllPools;
1308 state_tracker->phys_dev_props_core12.shaderUniformBufferArrayNonUniformIndexingNative =
1309 descriptor_indexing_prop.shaderUniformBufferArrayNonUniformIndexingNative;
1310 state_tracker->phys_dev_props_core12.shaderSampledImageArrayNonUniformIndexingNative =
1311 descriptor_indexing_prop.shaderSampledImageArrayNonUniformIndexingNative;
1312 state_tracker->phys_dev_props_core12.shaderStorageBufferArrayNonUniformIndexingNative =
1313 descriptor_indexing_prop.shaderStorageBufferArrayNonUniformIndexingNative;
1314 state_tracker->phys_dev_props_core12.shaderStorageImageArrayNonUniformIndexingNative =
1315 descriptor_indexing_prop.shaderStorageImageArrayNonUniformIndexingNative;
1316 state_tracker->phys_dev_props_core12.shaderInputAttachmentArrayNonUniformIndexingNative =
1317 descriptor_indexing_prop.shaderInputAttachmentArrayNonUniformIndexingNative;
1318 state_tracker->phys_dev_props_core12.robustBufferAccessUpdateAfterBind =
1319 descriptor_indexing_prop.robustBufferAccessUpdateAfterBind;
1320 state_tracker->phys_dev_props_core12.quadDivergentImplicitLod = descriptor_indexing_prop.quadDivergentImplicitLod;
1321 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindSamplers =
1322 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindSamplers;
1323 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindUniformBuffers =
1324 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindUniformBuffers;
1325 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindStorageBuffers =
1326 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindStorageBuffers;
1327 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindSampledImages =
1328 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindSampledImages;
1329 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindStorageImages =
1330 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindStorageImages;
1331 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindInputAttachments =
1332 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindInputAttachments;
1333 state_tracker->phys_dev_props_core12.maxPerStageUpdateAfterBindResources =
1334 descriptor_indexing_prop.maxPerStageUpdateAfterBindResources;
1335 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindSamplers =
1336 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindSamplers;
1337 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindUniformBuffers =
1338 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindUniformBuffers;
1339 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic =
1340 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic;
1341 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindStorageBuffers =
1342 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindStorageBuffers;
1343 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic =
1344 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic;
1345 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindSampledImages =
1346 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindSampledImages;
1347 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindStorageImages =
1348 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindStorageImages;
1349 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindInputAttachments =
1350 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindInputAttachments;
1351 }
1352
locke-lunargd556cc32019-09-17 01:21:23 -06001353 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_shading_rate_image, &phys_dev_props->shading_rate_image_props);
1354 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_mesh_shader, &phys_dev_props->mesh_shader_props);
1355 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_inline_uniform_block, &phys_dev_props->inline_uniform_block_props);
1356 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_vertex_attribute_divisor, &phys_dev_props->vtx_attrib_divisor_props);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001357
1358 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_khr_depth_stencil_resolve) {
1359 VkPhysicalDeviceDepthStencilResolvePropertiesKHR depth_stencil_resolve_props;
1360 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_depth_stencil_resolve, &depth_stencil_resolve_props);
1361 state_tracker->phys_dev_props_core12.supportedDepthResolveModes = depth_stencil_resolve_props.supportedDepthResolveModes;
1362 state_tracker->phys_dev_props_core12.supportedStencilResolveModes =
1363 depth_stencil_resolve_props.supportedStencilResolveModes;
1364 state_tracker->phys_dev_props_core12.independentResolveNone = depth_stencil_resolve_props.independentResolveNone;
1365 state_tracker->phys_dev_props_core12.independentResolve = depth_stencil_resolve_props.independentResolve;
1366 }
1367
locke-lunargd556cc32019-09-17 01:21:23 -06001368 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_transform_feedback, &phys_dev_props->transform_feedback_props);
Jeff Bolz443c2ca2020-03-19 12:11:51 -05001369 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_ray_tracing, &phys_dev_props->ray_tracing_propsNV);
1370 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_ray_tracing, &phys_dev_props->ray_tracing_propsKHR);
locke-lunargd556cc32019-09-17 01:21:23 -06001371 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_texel_buffer_alignment, &phys_dev_props->texel_buffer_alignment_props);
1372 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_fragment_density_map, &phys_dev_props->fragment_density_map_props);
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001373 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_performance_query, &phys_dev_props->performance_query_props);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001374
1375 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_khr_timeline_semaphore) {
1376 VkPhysicalDeviceTimelineSemaphorePropertiesKHR timeline_semaphore_props;
1377 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_timeline_semaphore, &timeline_semaphore_props);
1378 state_tracker->phys_dev_props_core12.maxTimelineSemaphoreValueDifference =
1379 timeline_semaphore_props.maxTimelineSemaphoreValueDifference;
1380 }
1381
1382 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_khr_shader_float_controls) {
1383 VkPhysicalDeviceFloatControlsPropertiesKHR float_controls_props;
1384 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_shader_float_controls, &float_controls_props);
1385 state_tracker->phys_dev_props_core12.denormBehaviorIndependence = float_controls_props.denormBehaviorIndependence;
1386 state_tracker->phys_dev_props_core12.roundingModeIndependence = float_controls_props.roundingModeIndependence;
1387 state_tracker->phys_dev_props_core12.shaderSignedZeroInfNanPreserveFloat16 =
1388 float_controls_props.shaderSignedZeroInfNanPreserveFloat16;
1389 state_tracker->phys_dev_props_core12.shaderSignedZeroInfNanPreserveFloat32 =
1390 float_controls_props.shaderSignedZeroInfNanPreserveFloat32;
1391 state_tracker->phys_dev_props_core12.shaderSignedZeroInfNanPreserveFloat64 =
1392 float_controls_props.shaderSignedZeroInfNanPreserveFloat64;
1393 state_tracker->phys_dev_props_core12.shaderDenormPreserveFloat16 = float_controls_props.shaderDenormPreserveFloat16;
1394 state_tracker->phys_dev_props_core12.shaderDenormPreserveFloat32 = float_controls_props.shaderDenormPreserveFloat32;
1395 state_tracker->phys_dev_props_core12.shaderDenormPreserveFloat64 = float_controls_props.shaderDenormPreserveFloat64;
1396 state_tracker->phys_dev_props_core12.shaderDenormFlushToZeroFloat16 = float_controls_props.shaderDenormFlushToZeroFloat16;
1397 state_tracker->phys_dev_props_core12.shaderDenormFlushToZeroFloat32 = float_controls_props.shaderDenormFlushToZeroFloat32;
1398 state_tracker->phys_dev_props_core12.shaderDenormFlushToZeroFloat64 = float_controls_props.shaderDenormFlushToZeroFloat64;
1399 state_tracker->phys_dev_props_core12.shaderRoundingModeRTEFloat16 = float_controls_props.shaderRoundingModeRTEFloat16;
1400 state_tracker->phys_dev_props_core12.shaderRoundingModeRTEFloat32 = float_controls_props.shaderRoundingModeRTEFloat32;
1401 state_tracker->phys_dev_props_core12.shaderRoundingModeRTEFloat64 = float_controls_props.shaderRoundingModeRTEFloat64;
1402 state_tracker->phys_dev_props_core12.shaderRoundingModeRTZFloat16 = float_controls_props.shaderRoundingModeRTZFloat16;
1403 state_tracker->phys_dev_props_core12.shaderRoundingModeRTZFloat32 = float_controls_props.shaderRoundingModeRTZFloat32;
1404 state_tracker->phys_dev_props_core12.shaderRoundingModeRTZFloat64 = float_controls_props.shaderRoundingModeRTZFloat64;
1405 }
Mark Lobodzinskie4a2b7f2019-12-20 12:51:30 -07001406
locke-lunargd556cc32019-09-17 01:21:23 -06001407 if (state_tracker->device_extensions.vk_nv_cooperative_matrix) {
1408 // Get the needed cooperative_matrix properties
1409 auto cooperative_matrix_props = lvl_init_struct<VkPhysicalDeviceCooperativeMatrixPropertiesNV>();
1410 auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(&cooperative_matrix_props);
1411 instance_dispatch_table.GetPhysicalDeviceProperties2KHR(gpu, &prop2);
1412 state_tracker->phys_dev_ext_props.cooperative_matrix_props = cooperative_matrix_props;
1413
1414 uint32_t numCooperativeMatrixProperties = 0;
1415 instance_dispatch_table.GetPhysicalDeviceCooperativeMatrixPropertiesNV(gpu, &numCooperativeMatrixProperties, NULL);
1416 state_tracker->cooperative_matrix_properties.resize(numCooperativeMatrixProperties,
1417 lvl_init_struct<VkCooperativeMatrixPropertiesNV>());
1418
1419 instance_dispatch_table.GetPhysicalDeviceCooperativeMatrixPropertiesNV(gpu, &numCooperativeMatrixProperties,
1420 state_tracker->cooperative_matrix_properties.data());
1421 }
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001422 if (!state_tracker->device_extensions.vk_feature_version_1_2 && state_tracker->api_version >= VK_API_VERSION_1_1) {
locke-lunargd556cc32019-09-17 01:21:23 -06001423 // Get the needed subgroup limits
1424 auto subgroup_prop = lvl_init_struct<VkPhysicalDeviceSubgroupProperties>();
1425 auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(&subgroup_prop);
1426 instance_dispatch_table.GetPhysicalDeviceProperties2(gpu, &prop2);
1427
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001428 state_tracker->phys_dev_props_core11.subgroupSize = subgroup_prop.subgroupSize;
1429 state_tracker->phys_dev_props_core11.subgroupSupportedStages = subgroup_prop.supportedStages;
1430 state_tracker->phys_dev_props_core11.subgroupSupportedOperations = subgroup_prop.supportedOperations;
1431 state_tracker->phys_dev_props_core11.subgroupQuadOperationsInAllStages = subgroup_prop.quadOperationsInAllStages;
locke-lunargd556cc32019-09-17 01:21:23 -06001432 }
1433
1434 // Store queue family data
1435 if (pCreateInfo->pQueueCreateInfos != nullptr) {
1436 for (uint32_t i = 0; i < pCreateInfo->queueCreateInfoCount; ++i) {
1437 state_tracker->queue_family_index_map.insert(
1438 std::make_pair(pCreateInfo->pQueueCreateInfos[i].queueFamilyIndex, pCreateInfo->pQueueCreateInfos[i].queueCount));
1439 }
1440 }
1441}
1442
1443void ValidationStateTracker::PreCallRecordDestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) {
1444 if (!device) return;
1445
locke-lunargd556cc32019-09-17 01:21:23 -06001446 // Reset all command buffers before destroying them, to unlink object_bindings.
1447 for (auto &commandBuffer : commandBufferMap) {
1448 ResetCommandBufferState(commandBuffer.first);
1449 }
Jeff Bolzadbfa852019-10-04 13:53:30 -05001450 pipelineMap.clear();
1451 renderPassMap.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06001452 commandBufferMap.clear();
1453
1454 // This will also delete all sets in the pool & remove them from setMap
1455 DeleteDescriptorSetPools();
1456 // All sets should be removed
1457 assert(setMap.empty());
1458 descriptorSetLayoutMap.clear();
1459 imageViewMap.clear();
1460 imageMap.clear();
1461 bufferViewMap.clear();
1462 bufferMap.clear();
1463 // Queues persist until device is destroyed
1464 queueMap.clear();
1465}
1466
1467// Loop through bound objects and increment their in_use counts.
1468void ValidationStateTracker::IncrementBoundObjects(CMD_BUFFER_STATE const *cb_node) {
1469 for (auto obj : cb_node->object_bindings) {
1470 auto base_obj = GetStateStructPtrFromObject(obj);
1471 if (base_obj) {
1472 base_obj->in_use.fetch_add(1);
1473 }
1474 }
1475}
1476
1477// Track which resources are in-flight by atomically incrementing their "in_use" count
1478void ValidationStateTracker::IncrementResources(CMD_BUFFER_STATE *cb_node) {
1479 cb_node->submitCount++;
1480 cb_node->in_use.fetch_add(1);
1481
1482 // First Increment for all "generic" objects bound to cmd buffer, followed by special-case objects below
1483 IncrementBoundObjects(cb_node);
1484 // TODO : We should be able to remove the NULL look-up checks from the code below as long as
1485 // all the corresponding cases are verified to cause CB_INVALID state and the CB_INVALID state
1486 // should then be flagged prior to calling this function
1487 for (auto event : cb_node->writeEventsBeforeWait) {
1488 auto event_state = GetEventState(event);
1489 if (event_state) event_state->write_in_use++;
1490 }
1491}
1492
1493// Decrement in-use count for objects bound to command buffer
1494void ValidationStateTracker::DecrementBoundResources(CMD_BUFFER_STATE const *cb_node) {
1495 BASE_NODE *base_obj = nullptr;
1496 for (auto obj : cb_node->object_bindings) {
1497 base_obj = GetStateStructPtrFromObject(obj);
1498 if (base_obj) {
1499 base_obj->in_use.fetch_sub(1);
1500 }
1501 }
1502}
1503
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001504void ValidationStateTracker::RetireWorkOnQueue(QUEUE_STATE *pQueue, uint64_t seq) {
locke-lunargd556cc32019-09-17 01:21:23 -06001505 std::unordered_map<VkQueue, uint64_t> otherQueueSeqs;
1506
1507 // Roll this queue forward, one submission at a time.
1508 while (pQueue->seq < seq) {
1509 auto &submission = pQueue->submissions.front();
1510
1511 for (auto &wait : submission.waitSemaphores) {
1512 auto pSemaphore = GetSemaphoreState(wait.semaphore);
1513 if (pSemaphore) {
1514 pSemaphore->in_use.fetch_sub(1);
1515 }
1516 auto &lastSeq = otherQueueSeqs[wait.queue];
1517 lastSeq = std::max(lastSeq, wait.seq);
1518 }
1519
Juan A. Suarez Romero9cef8852020-03-10 12:19:42 +01001520 for (auto &signal : submission.signalSemaphores) {
1521 auto pSemaphore = GetSemaphoreState(signal.semaphore);
locke-lunargd556cc32019-09-17 01:21:23 -06001522 if (pSemaphore) {
1523 pSemaphore->in_use.fetch_sub(1);
Juan A. Suarez Romero9cef8852020-03-10 12:19:42 +01001524 if (pSemaphore->type == VK_SEMAPHORE_TYPE_TIMELINE_KHR && pSemaphore->payload < signal.payload) {
1525 pSemaphore->payload = signal.payload;
1526 }
locke-lunargd556cc32019-09-17 01:21:23 -06001527 }
1528 }
1529
1530 for (auto &semaphore : submission.externalSemaphores) {
1531 auto pSemaphore = GetSemaphoreState(semaphore);
1532 if (pSemaphore) {
1533 pSemaphore->in_use.fetch_sub(1);
1534 }
1535 }
1536
1537 for (auto cb : submission.cbs) {
1538 auto cb_node = GetCBState(cb);
1539 if (!cb_node) {
1540 continue;
1541 }
1542 // First perform decrement on general case bound objects
1543 DecrementBoundResources(cb_node);
1544 for (auto event : cb_node->writeEventsBeforeWait) {
1545 auto eventNode = eventMap.find(event);
1546 if (eventNode != eventMap.end()) {
1547 eventNode->second.write_in_use--;
1548 }
1549 }
Jeff Bolz310775c2019-10-09 00:46:33 -05001550 QueryMap localQueryToStateMap;
1551 for (auto &function : cb_node->queryUpdates) {
1552 function(nullptr, /*do_validate*/ false, &localQueryToStateMap);
1553 }
1554
1555 for (auto queryStatePair : localQueryToStateMap) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001556 if (queryStatePair.second == QUERYSTATE_ENDED) {
1557 queryToStateMap[queryStatePair.first] = QUERYSTATE_AVAILABLE;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001558
1559 const QUERY_POOL_STATE *qp_state = GetQueryPoolState(queryStatePair.first.pool);
1560 if (qp_state->createInfo.queryType == VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR)
1561 queryPassToStateMap[QueryObjectPass(queryStatePair.first, submission.perf_submit_pass)] =
1562 QUERYSTATE_AVAILABLE;
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001563 }
locke-lunargd556cc32019-09-17 01:21:23 -06001564 }
locke-lunargd556cc32019-09-17 01:21:23 -06001565 cb_node->in_use.fetch_sub(1);
1566 }
1567
1568 auto pFence = GetFenceState(submission.fence);
1569 if (pFence && pFence->scope == kSyncScopeInternal) {
1570 pFence->state = FENCE_RETIRED;
1571 }
1572
1573 pQueue->submissions.pop_front();
1574 pQueue->seq++;
1575 }
1576
1577 // Roll other queues forward to the highest seq we saw a wait for
1578 for (auto qs : otherQueueSeqs) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001579 RetireWorkOnQueue(GetQueueState(qs.first), qs.second);
locke-lunargd556cc32019-09-17 01:21:23 -06001580 }
1581}
1582
1583// Submit a fence to a queue, delimiting previous fences and previous untracked
1584// work by it.
1585static void SubmitFence(QUEUE_STATE *pQueue, FENCE_STATE *pFence, uint64_t submitCount) {
1586 pFence->state = FENCE_INFLIGHT;
1587 pFence->signaler.first = pQueue->queue;
1588 pFence->signaler.second = pQueue->seq + pQueue->submissions.size() + submitCount;
1589}
1590
1591void ValidationStateTracker::PostCallRecordQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits,
1592 VkFence fence, VkResult result) {
1593 uint64_t early_retire_seq = 0;
1594 auto pQueue = GetQueueState(queue);
1595 auto pFence = GetFenceState(fence);
1596
1597 if (pFence) {
1598 if (pFence->scope == kSyncScopeInternal) {
1599 // Mark fence in use
1600 SubmitFence(pQueue, pFence, std::max(1u, submitCount));
1601 if (!submitCount) {
1602 // If no submissions, but just dropping a fence on the end of the queue,
1603 // record an empty submission with just the fence, so we can determine
1604 // its completion.
1605 pQueue->submissions.emplace_back(std::vector<VkCommandBuffer>(), std::vector<SEMAPHORE_WAIT>(),
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001606 std::vector<SEMAPHORE_SIGNAL>(), std::vector<VkSemaphore>(), fence, 0);
locke-lunargd556cc32019-09-17 01:21:23 -06001607 }
1608 } else {
1609 // Retire work up until this fence early, we will not see the wait that corresponds to this signal
1610 early_retire_seq = pQueue->seq + pQueue->submissions.size();
1611 }
1612 }
1613
1614 // Now process each individual submit
1615 for (uint32_t submit_idx = 0; submit_idx < submitCount; submit_idx++) {
1616 std::vector<VkCommandBuffer> cbs;
1617 const VkSubmitInfo *submit = &pSubmits[submit_idx];
1618 vector<SEMAPHORE_WAIT> semaphore_waits;
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001619 vector<SEMAPHORE_SIGNAL> semaphore_signals;
locke-lunargd556cc32019-09-17 01:21:23 -06001620 vector<VkSemaphore> semaphore_externals;
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001621 const uint64_t next_seq = pQueue->seq + pQueue->submissions.size() + 1;
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00001622 auto *timeline_semaphore_submit = lvl_find_in_chain<VkTimelineSemaphoreSubmitInfoKHR>(submit->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06001623 for (uint32_t i = 0; i < submit->waitSemaphoreCount; ++i) {
1624 VkSemaphore semaphore = submit->pWaitSemaphores[i];
1625 auto pSemaphore = GetSemaphoreState(semaphore);
1626 if (pSemaphore) {
1627 if (pSemaphore->scope == kSyncScopeInternal) {
Juan A. Suarez Romero9cef8852020-03-10 12:19:42 +01001628 SEMAPHORE_WAIT wait;
1629 wait.semaphore = semaphore;
1630 if (pSemaphore->type == VK_SEMAPHORE_TYPE_BINARY_KHR) {
1631 if (pSemaphore->signaler.first != VK_NULL_HANDLE) {
1632 wait.queue = pSemaphore->signaler.first;
1633 wait.seq = pSemaphore->signaler.second;
1634 semaphore_waits.push_back(wait);
1635 pSemaphore->in_use.fetch_add(1);
1636 }
1637 pSemaphore->signaler.first = VK_NULL_HANDLE;
1638 pSemaphore->signaled = false;
1639 } else if (pSemaphore->payload < timeline_semaphore_submit->pWaitSemaphoreValues[i]) {
1640 wait.queue = queue;
1641 wait.seq = next_seq;
1642 wait.payload = timeline_semaphore_submit->pWaitSemaphoreValues[i];
1643 semaphore_waits.push_back(wait);
locke-lunargd556cc32019-09-17 01:21:23 -06001644 pSemaphore->in_use.fetch_add(1);
1645 }
locke-lunargd556cc32019-09-17 01:21:23 -06001646 } else {
1647 semaphore_externals.push_back(semaphore);
1648 pSemaphore->in_use.fetch_add(1);
1649 if (pSemaphore->scope == kSyncScopeExternalTemporary) {
1650 pSemaphore->scope = kSyncScopeInternal;
1651 }
1652 }
1653 }
1654 }
1655 for (uint32_t i = 0; i < submit->signalSemaphoreCount; ++i) {
1656 VkSemaphore semaphore = submit->pSignalSemaphores[i];
1657 auto pSemaphore = GetSemaphoreState(semaphore);
1658 if (pSemaphore) {
1659 if (pSemaphore->scope == kSyncScopeInternal) {
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001660 SEMAPHORE_SIGNAL signal;
1661 signal.semaphore = semaphore;
1662 signal.seq = next_seq;
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00001663 if (pSemaphore->type == VK_SEMAPHORE_TYPE_BINARY_KHR) {
1664 pSemaphore->signaler.first = queue;
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001665 pSemaphore->signaler.second = next_seq;
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00001666 pSemaphore->signaled = true;
1667 } else {
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001668 signal.payload = timeline_semaphore_submit->pSignalSemaphoreValues[i];
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00001669 }
locke-lunargd556cc32019-09-17 01:21:23 -06001670 pSemaphore->in_use.fetch_add(1);
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001671 semaphore_signals.push_back(signal);
locke-lunargd556cc32019-09-17 01:21:23 -06001672 } else {
1673 // Retire work up until this submit early, we will not see the wait that corresponds to this signal
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001674 early_retire_seq = std::max(early_retire_seq, next_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06001675 }
1676 }
1677 }
1678 for (uint32_t i = 0; i < submit->commandBufferCount; i++) {
1679 auto cb_node = GetCBState(submit->pCommandBuffers[i]);
1680 if (cb_node) {
1681 cbs.push_back(submit->pCommandBuffers[i]);
1682 for (auto secondaryCmdBuffer : cb_node->linkedCommandBuffers) {
1683 cbs.push_back(secondaryCmdBuffer->commandBuffer);
1684 IncrementResources(secondaryCmdBuffer);
1685 }
1686 IncrementResources(cb_node);
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001687
1688 QueryMap localQueryToStateMap;
1689 for (auto &function : cb_node->queryUpdates) {
1690 function(nullptr, /*do_validate*/ false, &localQueryToStateMap);
1691 }
1692
1693 for (auto queryStatePair : localQueryToStateMap) {
1694 queryToStateMap[queryStatePair.first] = queryStatePair.second;
1695 }
1696
1697 EventToStageMap localEventToStageMap;
1698 for (auto &function : cb_node->eventUpdates) {
1699 function(nullptr, /*do_validate*/ false, &localEventToStageMap);
1700 }
1701
1702 for (auto eventStagePair : localEventToStageMap) {
1703 eventMap[eventStagePair.first].stageMask = eventStagePair.second;
1704 }
locke-lunargd556cc32019-09-17 01:21:23 -06001705 }
1706 }
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001707
1708 const auto perf_submit = lvl_find_in_chain<VkPerformanceQuerySubmitInfoKHR>(submit->pNext);
1709
locke-lunargd556cc32019-09-17 01:21:23 -06001710 pQueue->submissions.emplace_back(cbs, semaphore_waits, semaphore_signals, semaphore_externals,
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001711 submit_idx == submitCount - 1 ? fence : (VkFence)VK_NULL_HANDLE,
1712 perf_submit ? perf_submit->counterPassIndex : 0);
locke-lunargd556cc32019-09-17 01:21:23 -06001713 }
1714
1715 if (early_retire_seq) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001716 RetireWorkOnQueue(pQueue, early_retire_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06001717 }
1718}
1719
1720void ValidationStateTracker::PostCallRecordAllocateMemory(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo,
1721 const VkAllocationCallbacks *pAllocator, VkDeviceMemory *pMemory,
1722 VkResult result) {
1723 if (VK_SUCCESS == result) {
1724 AddMemObjInfo(device, *pMemory, pAllocateInfo);
1725 }
1726 return;
1727}
1728
1729void ValidationStateTracker::PreCallRecordFreeMemory(VkDevice device, VkDeviceMemory mem, const VkAllocationCallbacks *pAllocator) {
1730 if (!mem) return;
1731 DEVICE_MEMORY_STATE *mem_info = GetDevMemState(mem);
1732 const VulkanTypedHandle obj_struct(mem, kVulkanObjectTypeDeviceMemory);
1733
1734 // Clear mem binding for any bound objects
1735 for (const auto &obj : mem_info->obj_bindings) {
1736 BINDABLE *bindable_state = nullptr;
1737 switch (obj.type) {
1738 case kVulkanObjectTypeImage:
1739 bindable_state = GetImageState(obj.Cast<VkImage>());
1740 break;
1741 case kVulkanObjectTypeBuffer:
1742 bindable_state = GetBufferState(obj.Cast<VkBuffer>());
1743 break;
1744 case kVulkanObjectTypeAccelerationStructureNV:
1745 bindable_state = GetAccelerationStructureState(obj.Cast<VkAccelerationStructureNV>());
1746 break;
1747
1748 default:
1749 // Should only have acceleration structure, buffer, or image objects bound to memory
1750 assert(0);
1751 }
1752
1753 if (bindable_state) {
Jeff Bolz41e29052020-03-29 22:33:55 -05001754 // Remove any sparse bindings bound to the resource that use this memory.
1755 for (auto it = bindable_state->sparse_bindings.begin(); it != bindable_state->sparse_bindings.end();) {
1756 auto nextit = it;
1757 nextit++;
1758
1759 auto &sparse_mem_binding = *it;
1760 if (sparse_mem_binding.mem_state.get() == mem_info) {
1761 bindable_state->sparse_bindings.erase(it);
1762 }
1763
1764 it = nextit;
1765 }
locke-lunargd556cc32019-09-17 01:21:23 -06001766 bindable_state->UpdateBoundMemorySet();
1767 }
1768 }
1769 // Any bound cmd buffers are now invalid
1770 InvalidateCommandBuffers(mem_info->cb_bindings, obj_struct);
1771 RemoveAliasingImages(mem_info->bound_images);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05001772 mem_info->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06001773 memObjMap.erase(mem);
1774}
1775
1776void ValidationStateTracker::PostCallRecordQueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo *pBindInfo,
1777 VkFence fence, VkResult result) {
1778 if (result != VK_SUCCESS) return;
1779 uint64_t early_retire_seq = 0;
1780 auto pFence = GetFenceState(fence);
1781 auto pQueue = GetQueueState(queue);
1782
1783 if (pFence) {
1784 if (pFence->scope == kSyncScopeInternal) {
1785 SubmitFence(pQueue, pFence, std::max(1u, bindInfoCount));
1786 if (!bindInfoCount) {
1787 // No work to do, just dropping a fence in the queue by itself.
1788 pQueue->submissions.emplace_back(std::vector<VkCommandBuffer>(), std::vector<SEMAPHORE_WAIT>(),
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001789 std::vector<SEMAPHORE_SIGNAL>(), std::vector<VkSemaphore>(), fence, 0);
locke-lunargd556cc32019-09-17 01:21:23 -06001790 }
1791 } else {
1792 // Retire work up until this fence early, we will not see the wait that corresponds to this signal
1793 early_retire_seq = pQueue->seq + pQueue->submissions.size();
1794 }
1795 }
1796
1797 for (uint32_t bindIdx = 0; bindIdx < bindInfoCount; ++bindIdx) {
1798 const VkBindSparseInfo &bindInfo = pBindInfo[bindIdx];
1799 // Track objects tied to memory
1800 for (uint32_t j = 0; j < bindInfo.bufferBindCount; j++) {
1801 for (uint32_t k = 0; k < bindInfo.pBufferBinds[j].bindCount; k++) {
1802 auto sparse_binding = bindInfo.pBufferBinds[j].pBinds[k];
locke-lunargcf04d582019-11-26 00:31:50 -07001803 SetSparseMemBinding(sparse_binding.memory, sparse_binding.memoryOffset, sparse_binding.size,
locke-lunargd556cc32019-09-17 01:21:23 -06001804 VulkanTypedHandle(bindInfo.pBufferBinds[j].buffer, kVulkanObjectTypeBuffer));
1805 }
1806 }
1807 for (uint32_t j = 0; j < bindInfo.imageOpaqueBindCount; j++) {
1808 for (uint32_t k = 0; k < bindInfo.pImageOpaqueBinds[j].bindCount; k++) {
1809 auto sparse_binding = bindInfo.pImageOpaqueBinds[j].pBinds[k];
locke-lunargcf04d582019-11-26 00:31:50 -07001810 SetSparseMemBinding(sparse_binding.memory, sparse_binding.memoryOffset, sparse_binding.size,
locke-lunargd556cc32019-09-17 01:21:23 -06001811 VulkanTypedHandle(bindInfo.pImageOpaqueBinds[j].image, kVulkanObjectTypeImage));
1812 }
1813 }
1814 for (uint32_t j = 0; j < bindInfo.imageBindCount; j++) {
1815 for (uint32_t k = 0; k < bindInfo.pImageBinds[j].bindCount; k++) {
1816 auto sparse_binding = bindInfo.pImageBinds[j].pBinds[k];
1817 // TODO: This size is broken for non-opaque bindings, need to update to comprehend full sparse binding data
1818 VkDeviceSize size = sparse_binding.extent.depth * sparse_binding.extent.height * sparse_binding.extent.width * 4;
locke-lunargcf04d582019-11-26 00:31:50 -07001819 SetSparseMemBinding(sparse_binding.memory, sparse_binding.memoryOffset, size,
locke-lunargd556cc32019-09-17 01:21:23 -06001820 VulkanTypedHandle(bindInfo.pImageBinds[j].image, kVulkanObjectTypeImage));
1821 }
1822 }
1823
1824 std::vector<SEMAPHORE_WAIT> semaphore_waits;
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001825 std::vector<SEMAPHORE_SIGNAL> semaphore_signals;
locke-lunargd556cc32019-09-17 01:21:23 -06001826 std::vector<VkSemaphore> semaphore_externals;
1827 for (uint32_t i = 0; i < bindInfo.waitSemaphoreCount; ++i) {
1828 VkSemaphore semaphore = bindInfo.pWaitSemaphores[i];
1829 auto pSemaphore = GetSemaphoreState(semaphore);
1830 if (pSemaphore) {
1831 if (pSemaphore->scope == kSyncScopeInternal) {
1832 if (pSemaphore->signaler.first != VK_NULL_HANDLE) {
1833 semaphore_waits.push_back({semaphore, pSemaphore->signaler.first, pSemaphore->signaler.second});
1834 pSemaphore->in_use.fetch_add(1);
1835 }
1836 pSemaphore->signaler.first = VK_NULL_HANDLE;
1837 pSemaphore->signaled = false;
1838 } else {
1839 semaphore_externals.push_back(semaphore);
1840 pSemaphore->in_use.fetch_add(1);
1841 if (pSemaphore->scope == kSyncScopeExternalTemporary) {
1842 pSemaphore->scope = kSyncScopeInternal;
1843 }
1844 }
1845 }
1846 }
1847 for (uint32_t i = 0; i < bindInfo.signalSemaphoreCount; ++i) {
1848 VkSemaphore semaphore = bindInfo.pSignalSemaphores[i];
1849 auto pSemaphore = GetSemaphoreState(semaphore);
1850 if (pSemaphore) {
1851 if (pSemaphore->scope == kSyncScopeInternal) {
1852 pSemaphore->signaler.first = queue;
1853 pSemaphore->signaler.second = pQueue->seq + pQueue->submissions.size() + 1;
1854 pSemaphore->signaled = true;
1855 pSemaphore->in_use.fetch_add(1);
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001856
1857 SEMAPHORE_SIGNAL signal;
1858 signal.semaphore = semaphore;
1859 signal.seq = pSemaphore->signaler.second;
1860 semaphore_signals.push_back(signal);
locke-lunargd556cc32019-09-17 01:21:23 -06001861 } else {
1862 // Retire work up until this submit early, we will not see the wait that corresponds to this signal
1863 early_retire_seq = std::max(early_retire_seq, pQueue->seq + pQueue->submissions.size() + 1);
1864 }
1865 }
1866 }
1867
1868 pQueue->submissions.emplace_back(std::vector<VkCommandBuffer>(), semaphore_waits, semaphore_signals, semaphore_externals,
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001869 bindIdx == bindInfoCount - 1 ? fence : (VkFence)VK_NULL_HANDLE, 0);
locke-lunargd556cc32019-09-17 01:21:23 -06001870 }
1871
1872 if (early_retire_seq) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001873 RetireWorkOnQueue(pQueue, early_retire_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06001874 }
1875}
1876
1877void ValidationStateTracker::PostCallRecordCreateSemaphore(VkDevice device, const VkSemaphoreCreateInfo *pCreateInfo,
1878 const VkAllocationCallbacks *pAllocator, VkSemaphore *pSemaphore,
1879 VkResult result) {
1880 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05001881 auto semaphore_state = std::make_shared<SEMAPHORE_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06001882 semaphore_state->signaler.first = VK_NULL_HANDLE;
1883 semaphore_state->signaler.second = 0;
1884 semaphore_state->signaled = false;
1885 semaphore_state->scope = kSyncScopeInternal;
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00001886 semaphore_state->type = VK_SEMAPHORE_TYPE_BINARY_KHR;
1887 semaphore_state->payload = 0;
1888 auto semaphore_type_create_info = lvl_find_in_chain<VkSemaphoreTypeCreateInfoKHR>(pCreateInfo->pNext);
1889 if (semaphore_type_create_info) {
1890 semaphore_state->type = semaphore_type_create_info->semaphoreType;
1891 semaphore_state->payload = semaphore_type_create_info->initialValue;
1892 }
locke-lunargd556cc32019-09-17 01:21:23 -06001893 semaphoreMap[*pSemaphore] = std::move(semaphore_state);
1894}
1895
1896void ValidationStateTracker::RecordImportSemaphoreState(VkSemaphore semaphore, VkExternalSemaphoreHandleTypeFlagBitsKHR handle_type,
1897 VkSemaphoreImportFlagsKHR flags) {
1898 SEMAPHORE_STATE *sema_node = GetSemaphoreState(semaphore);
1899 if (sema_node && sema_node->scope != kSyncScopeExternalPermanent) {
1900 if ((handle_type == VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR || flags & VK_SEMAPHORE_IMPORT_TEMPORARY_BIT_KHR) &&
1901 sema_node->scope == kSyncScopeInternal) {
1902 sema_node->scope = kSyncScopeExternalTemporary;
1903 } else {
1904 sema_node->scope = kSyncScopeExternalPermanent;
1905 }
1906 }
1907}
1908
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00001909void ValidationStateTracker::PostCallRecordSignalSemaphoreKHR(VkDevice device, const VkSemaphoreSignalInfoKHR *pSignalInfo,
1910 VkResult result) {
1911 auto *pSemaphore = GetSemaphoreState(pSignalInfo->semaphore);
1912 pSemaphore->payload = pSignalInfo->value;
1913}
1914
locke-lunargd556cc32019-09-17 01:21:23 -06001915void ValidationStateTracker::RecordMappedMemory(VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size, void **ppData) {
1916 auto mem_info = GetDevMemState(mem);
1917 if (mem_info) {
1918 mem_info->mapped_range.offset = offset;
1919 mem_info->mapped_range.size = size;
1920 mem_info->p_driver_data = *ppData;
1921 }
1922}
1923
1924void ValidationStateTracker::RetireFence(VkFence fence) {
1925 auto pFence = GetFenceState(fence);
1926 if (pFence && pFence->scope == kSyncScopeInternal) {
1927 if (pFence->signaler.first != VK_NULL_HANDLE) {
1928 // Fence signaller is a queue -- use this as proof that prior operations on that queue have completed.
Jeff Bolz8041c5b2019-10-20 22:14:20 -05001929 RetireWorkOnQueue(GetQueueState(pFence->signaler.first), pFence->signaler.second);
locke-lunargd556cc32019-09-17 01:21:23 -06001930 } else {
1931 // Fence signaller is the WSI. We're not tracking what the WSI op actually /was/ in CV yet, but we need to mark
1932 // the fence as retired.
1933 pFence->state = FENCE_RETIRED;
1934 }
1935 }
1936}
1937
1938void ValidationStateTracker::PostCallRecordWaitForFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences,
1939 VkBool32 waitAll, uint64_t timeout, VkResult result) {
1940 if (VK_SUCCESS != result) return;
1941
1942 // When we know that all fences are complete we can clean/remove their CBs
1943 if ((VK_TRUE == waitAll) || (1 == fenceCount)) {
1944 for (uint32_t i = 0; i < fenceCount; i++) {
1945 RetireFence(pFences[i]);
1946 }
1947 }
1948 // NOTE : Alternate case not handled here is when some fences have completed. In
1949 // this case for app to guarantee which fences completed it will have to call
1950 // vkGetFenceStatus() at which point we'll clean/remove their CBs if complete.
1951}
1952
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001953void ValidationStateTracker::RetireTimelineSemaphore(VkSemaphore semaphore, uint64_t until_payload) {
1954 auto pSemaphore = GetSemaphoreState(semaphore);
1955 if (pSemaphore) {
1956 for (auto &pair : queueMap) {
1957 QUEUE_STATE &queueState = pair.second;
Tony-LunarG47d5e272020-04-07 15:35:55 -06001958 uint64_t max_seq = 0;
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001959 for (const auto &submission : queueState.submissions) {
1960 for (const auto &signalSemaphore : submission.signalSemaphores) {
1961 if (signalSemaphore.semaphore == semaphore && signalSemaphore.payload <= until_payload) {
Tony-LunarG47d5e272020-04-07 15:35:55 -06001962 if (signalSemaphore.seq > max_seq) {
1963 max_seq = signalSemaphore.seq;
1964 }
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001965 }
1966 }
1967 }
Tony-LunarG47d5e272020-04-07 15:35:55 -06001968 if (max_seq) {
1969 RetireWorkOnQueue(&queueState, max_seq);
1970 }
Jakub Okoński04feb3b2020-02-01 18:31:01 +01001971 }
1972 }
1973}
1974
1975void ValidationStateTracker::PostCallRecordWaitSemaphores(VkDevice device, const VkSemaphoreWaitInfo *pWaitInfo, uint64_t timeout,
1976 VkResult result) {
1977 if (VK_SUCCESS != result) return;
1978
1979 for (uint32_t i = 0; i < pWaitInfo->semaphoreCount; i++) {
1980 RetireTimelineSemaphore(pWaitInfo->pSemaphores[i], pWaitInfo->pValues[i]);
1981 }
1982}
1983
locke-lunargd556cc32019-09-17 01:21:23 -06001984void ValidationStateTracker::PostCallRecordGetFenceStatus(VkDevice device, VkFence fence, VkResult result) {
1985 if (VK_SUCCESS != result) return;
1986 RetireFence(fence);
1987}
1988
1989void ValidationStateTracker::RecordGetDeviceQueueState(uint32_t queue_family_index, VkQueue queue) {
1990 // Add queue to tracking set only if it is new
1991 auto queue_is_new = queues.emplace(queue);
1992 if (queue_is_new.second == true) {
1993 QUEUE_STATE *queue_state = &queueMap[queue];
1994 queue_state->queue = queue;
1995 queue_state->queueFamilyIndex = queue_family_index;
1996 queue_state->seq = 0;
1997 }
1998}
1999
2000void ValidationStateTracker::PostCallRecordGetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex,
2001 VkQueue *pQueue) {
2002 RecordGetDeviceQueueState(queueFamilyIndex, *pQueue);
2003}
2004
2005void ValidationStateTracker::PostCallRecordGetDeviceQueue2(VkDevice device, const VkDeviceQueueInfo2 *pQueueInfo, VkQueue *pQueue) {
2006 RecordGetDeviceQueueState(pQueueInfo->queueFamilyIndex, *pQueue);
2007}
2008
2009void ValidationStateTracker::PostCallRecordQueueWaitIdle(VkQueue queue, VkResult result) {
2010 if (VK_SUCCESS != result) return;
2011 QUEUE_STATE *queue_state = GetQueueState(queue);
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002012 RetireWorkOnQueue(queue_state, queue_state->seq + queue_state->submissions.size());
locke-lunargd556cc32019-09-17 01:21:23 -06002013}
2014
2015void ValidationStateTracker::PostCallRecordDeviceWaitIdle(VkDevice device, VkResult result) {
2016 if (VK_SUCCESS != result) return;
2017 for (auto &queue : queueMap) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002018 RetireWorkOnQueue(&queue.second, queue.second.seq + queue.second.submissions.size());
locke-lunargd556cc32019-09-17 01:21:23 -06002019 }
2020}
2021
2022void ValidationStateTracker::PreCallRecordDestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks *pAllocator) {
2023 if (!fence) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002024 auto fence_state = GetFenceState(fence);
2025 fence_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002026 fenceMap.erase(fence);
2027}
2028
2029void ValidationStateTracker::PreCallRecordDestroySemaphore(VkDevice device, VkSemaphore semaphore,
2030 const VkAllocationCallbacks *pAllocator) {
2031 if (!semaphore) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002032 auto semaphore_state = GetSemaphoreState(semaphore);
2033 semaphore_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002034 semaphoreMap.erase(semaphore);
2035}
2036
2037void ValidationStateTracker::PreCallRecordDestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks *pAllocator) {
2038 if (!event) return;
2039 EVENT_STATE *event_state = GetEventState(event);
2040 const VulkanTypedHandle obj_struct(event, kVulkanObjectTypeEvent);
2041 InvalidateCommandBuffers(event_state->cb_bindings, obj_struct);
2042 eventMap.erase(event);
2043}
2044
2045void ValidationStateTracker::PreCallRecordDestroyQueryPool(VkDevice device, VkQueryPool queryPool,
2046 const VkAllocationCallbacks *pAllocator) {
2047 if (!queryPool) return;
2048 QUERY_POOL_STATE *qp_state = GetQueryPoolState(queryPool);
2049 const VulkanTypedHandle obj_struct(queryPool, kVulkanObjectTypeQueryPool);
2050 InvalidateCommandBuffers(qp_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002051 qp_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002052 queryPoolMap.erase(queryPool);
2053}
2054
2055// Object with given handle is being bound to memory w/ given mem_info struct.
2056// Track the newly bound memory range with given memoryOffset
2057// Also scan any previous ranges, track aliased ranges with new range, and flag an error if a linear
2058// and non-linear range incorrectly overlap.
locke-lunargd556cc32019-09-17 01:21:23 -06002059void ValidationStateTracker::InsertMemoryRange(const VulkanTypedHandle &typed_handle, DEVICE_MEMORY_STATE *mem_info,
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002060 VkDeviceSize memoryOffset) {
locke-lunargd556cc32019-09-17 01:21:23 -06002061 if (typed_handle.type == kVulkanObjectTypeImage) {
2062 mem_info->bound_images.insert(typed_handle.Cast<VkImage>());
2063 } else if (typed_handle.type == kVulkanObjectTypeBuffer) {
locke-lunarg37c410a2020-02-17 17:34:13 -07002064 mem_info->bound_buffers.insert(typed_handle.Cast<VkBuffer>());
locke-lunargd556cc32019-09-17 01:21:23 -06002065 } else if (typed_handle.type == kVulkanObjectTypeAccelerationStructureNV) {
locke-lunarg37c410a2020-02-17 17:34:13 -07002066 mem_info->bound_acceleration_structures.insert(typed_handle.Cast<VkAccelerationStructureNV>());
locke-lunargd556cc32019-09-17 01:21:23 -06002067 } else {
2068 // Unsupported object type
2069 assert(false);
2070 }
2071}
2072
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002073void ValidationStateTracker::InsertImageMemoryRange(VkImage image, DEVICE_MEMORY_STATE *mem_info, VkDeviceSize mem_offset) {
2074 InsertMemoryRange(VulkanTypedHandle(image, kVulkanObjectTypeImage), mem_info, mem_offset);
locke-lunargd556cc32019-09-17 01:21:23 -06002075}
2076
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002077void ValidationStateTracker::InsertBufferMemoryRange(VkBuffer buffer, DEVICE_MEMORY_STATE *mem_info, VkDeviceSize mem_offset) {
2078 InsertMemoryRange(VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer), mem_info, mem_offset);
locke-lunargd556cc32019-09-17 01:21:23 -06002079}
2080
2081void ValidationStateTracker::InsertAccelerationStructureMemoryRange(VkAccelerationStructureNV as, DEVICE_MEMORY_STATE *mem_info,
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002082 VkDeviceSize mem_offset) {
2083 InsertMemoryRange(VulkanTypedHandle(as, kVulkanObjectTypeAccelerationStructureNV), mem_info, mem_offset);
locke-lunargd556cc32019-09-17 01:21:23 -06002084}
2085
2086// This function will remove the handle-to-index mapping from the appropriate map.
2087static void RemoveMemoryRange(const VulkanTypedHandle &typed_handle, DEVICE_MEMORY_STATE *mem_info) {
2088 if (typed_handle.type == kVulkanObjectTypeImage) {
2089 mem_info->bound_images.erase(typed_handle.Cast<VkImage>());
2090 } else if (typed_handle.type == kVulkanObjectTypeBuffer) {
locke-lunarg37c410a2020-02-17 17:34:13 -07002091 mem_info->bound_buffers.erase(typed_handle.Cast<VkBuffer>());
locke-lunargd556cc32019-09-17 01:21:23 -06002092 } else if (typed_handle.type == kVulkanObjectTypeAccelerationStructureNV) {
locke-lunarg37c410a2020-02-17 17:34:13 -07002093 mem_info->bound_acceleration_structures.erase(typed_handle.Cast<VkAccelerationStructureNV>());
locke-lunargd556cc32019-09-17 01:21:23 -06002094 } else {
2095 // Unsupported object type
2096 assert(false);
2097 }
2098}
2099
2100void ValidationStateTracker::RemoveBufferMemoryRange(VkBuffer buffer, DEVICE_MEMORY_STATE *mem_info) {
2101 RemoveMemoryRange(VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer), mem_info);
2102}
2103
2104void ValidationStateTracker::RemoveImageMemoryRange(VkImage image, DEVICE_MEMORY_STATE *mem_info) {
2105 RemoveMemoryRange(VulkanTypedHandle(image, kVulkanObjectTypeImage), mem_info);
2106}
2107
2108void ValidationStateTracker::RemoveAccelerationStructureMemoryRange(VkAccelerationStructureNV as, DEVICE_MEMORY_STATE *mem_info) {
2109 RemoveMemoryRange(VulkanTypedHandle(as, kVulkanObjectTypeAccelerationStructureNV), mem_info);
2110}
2111
2112void ValidationStateTracker::UpdateBindBufferMemoryState(VkBuffer buffer, VkDeviceMemory mem, VkDeviceSize memoryOffset) {
2113 BUFFER_STATE *buffer_state = GetBufferState(buffer);
2114 if (buffer_state) {
2115 // Track bound memory range information
2116 auto mem_info = GetDevMemState(mem);
2117 if (mem_info) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002118 InsertBufferMemoryRange(buffer, mem_info, memoryOffset);
locke-lunargd556cc32019-09-17 01:21:23 -06002119 }
2120 // Track objects tied to memory
2121 SetMemBinding(mem, buffer_state, memoryOffset, VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer));
2122 }
2123}
2124
2125void ValidationStateTracker::PostCallRecordBindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory mem,
2126 VkDeviceSize memoryOffset, VkResult result) {
2127 if (VK_SUCCESS != result) return;
2128 UpdateBindBufferMemoryState(buffer, mem, memoryOffset);
2129}
2130
2131void ValidationStateTracker::PostCallRecordBindBufferMemory2(VkDevice device, uint32_t bindInfoCount,
2132 const VkBindBufferMemoryInfoKHR *pBindInfos, VkResult result) {
2133 for (uint32_t i = 0; i < bindInfoCount; i++) {
2134 UpdateBindBufferMemoryState(pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset);
2135 }
2136}
2137
2138void ValidationStateTracker::PostCallRecordBindBufferMemory2KHR(VkDevice device, uint32_t bindInfoCount,
2139 const VkBindBufferMemoryInfoKHR *pBindInfos, VkResult result) {
2140 for (uint32_t i = 0; i < bindInfoCount; i++) {
2141 UpdateBindBufferMemoryState(pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset);
2142 }
2143}
2144
2145void ValidationStateTracker::RecordGetBufferMemoryRequirementsState(VkBuffer buffer, VkMemoryRequirements *pMemoryRequirements) {
2146 BUFFER_STATE *buffer_state = GetBufferState(buffer);
2147 if (buffer_state) {
2148 buffer_state->requirements = *pMemoryRequirements;
2149 buffer_state->memory_requirements_checked = true;
2150 }
2151}
2152
2153void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements(VkDevice device, VkBuffer buffer,
2154 VkMemoryRequirements *pMemoryRequirements) {
2155 RecordGetBufferMemoryRequirementsState(buffer, pMemoryRequirements);
2156}
2157
2158void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements2(VkDevice device,
2159 const VkBufferMemoryRequirementsInfo2KHR *pInfo,
2160 VkMemoryRequirements2KHR *pMemoryRequirements) {
2161 RecordGetBufferMemoryRequirementsState(pInfo->buffer, &pMemoryRequirements->memoryRequirements);
2162}
2163
2164void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements2KHR(VkDevice device,
2165 const VkBufferMemoryRequirementsInfo2KHR *pInfo,
2166 VkMemoryRequirements2KHR *pMemoryRequirements) {
2167 RecordGetBufferMemoryRequirementsState(pInfo->buffer, &pMemoryRequirements->memoryRequirements);
2168}
2169
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002170void ValidationStateTracker::RecordGetImageMemoryRequirementsState(VkImage image, const VkImageMemoryRequirementsInfo2 *pInfo,
2171 VkMemoryRequirements *pMemoryRequirements) {
2172 const VkImagePlaneMemoryRequirementsInfo *plane_info =
2173 (pInfo == nullptr) ? nullptr : lvl_find_in_chain<VkImagePlaneMemoryRequirementsInfo>(pInfo->pNext);
2174 // TODO does the VkMemoryRequirements need to be saved here if PostCallRecordCreateImage tracks it regardless
locke-lunargd556cc32019-09-17 01:21:23 -06002175 IMAGE_STATE *image_state = GetImageState(image);
2176 if (image_state) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002177 if (plane_info != nullptr) {
2178 // Multi-plane image
2179 image_state->memory_requirements_checked = false; // Each image plane needs to be checked itself
2180 if (plane_info->planeAspect == VK_IMAGE_ASPECT_PLANE_0_BIT) {
2181 image_state->plane0_memory_requirements_checked = true;
2182 image_state->plane0_requirements = *pMemoryRequirements;
2183 } else if (plane_info->planeAspect == VK_IMAGE_ASPECT_PLANE_1_BIT) {
2184 image_state->plane1_memory_requirements_checked = true;
2185 image_state->plane1_requirements = *pMemoryRequirements;
2186 } else if (plane_info->planeAspect == VK_IMAGE_ASPECT_PLANE_2_BIT) {
2187 image_state->plane2_memory_requirements_checked = true;
2188 image_state->plane2_requirements = *pMemoryRequirements;
2189 }
2190 } else {
2191 // Single Plane image
2192 image_state->requirements = *pMemoryRequirements;
2193 image_state->memory_requirements_checked = true;
2194 }
locke-lunargd556cc32019-09-17 01:21:23 -06002195 }
2196}
2197
2198void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements(VkDevice device, VkImage image,
2199 VkMemoryRequirements *pMemoryRequirements) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002200 RecordGetImageMemoryRequirementsState(image, nullptr, pMemoryRequirements);
locke-lunargd556cc32019-09-17 01:21:23 -06002201}
2202
2203void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements2(VkDevice device, const VkImageMemoryRequirementsInfo2 *pInfo,
2204 VkMemoryRequirements2 *pMemoryRequirements) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002205 RecordGetImageMemoryRequirementsState(pInfo->image, pInfo, &pMemoryRequirements->memoryRequirements);
locke-lunargd556cc32019-09-17 01:21:23 -06002206}
2207
2208void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements2KHR(VkDevice device,
2209 const VkImageMemoryRequirementsInfo2 *pInfo,
2210 VkMemoryRequirements2 *pMemoryRequirements) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002211 RecordGetImageMemoryRequirementsState(pInfo->image, pInfo, &pMemoryRequirements->memoryRequirements);
locke-lunargd556cc32019-09-17 01:21:23 -06002212}
2213
2214static void RecordGetImageSparseMemoryRequirementsState(IMAGE_STATE *image_state,
2215 VkSparseImageMemoryRequirements *sparse_image_memory_requirements) {
2216 image_state->sparse_requirements.emplace_back(*sparse_image_memory_requirements);
2217 if (sparse_image_memory_requirements->formatProperties.aspectMask & VK_IMAGE_ASPECT_METADATA_BIT) {
2218 image_state->sparse_metadata_required = true;
2219 }
2220}
2221
2222void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements(
2223 VkDevice device, VkImage image, uint32_t *pSparseMemoryRequirementCount,
2224 VkSparseImageMemoryRequirements *pSparseMemoryRequirements) {
2225 auto image_state = GetImageState(image);
2226 image_state->get_sparse_reqs_called = true;
2227 if (!pSparseMemoryRequirements) return;
2228 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
2229 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i]);
2230 }
2231}
2232
2233void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements2(
2234 VkDevice device, const VkImageSparseMemoryRequirementsInfo2KHR *pInfo, uint32_t *pSparseMemoryRequirementCount,
2235 VkSparseImageMemoryRequirements2KHR *pSparseMemoryRequirements) {
2236 auto image_state = GetImageState(pInfo->image);
2237 image_state->get_sparse_reqs_called = true;
2238 if (!pSparseMemoryRequirements) return;
2239 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
2240 assert(!pSparseMemoryRequirements[i].pNext); // TODO: If an extension is ever added here we need to handle it
2241 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i].memoryRequirements);
2242 }
2243}
2244
2245void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements2KHR(
2246 VkDevice device, const VkImageSparseMemoryRequirementsInfo2KHR *pInfo, uint32_t *pSparseMemoryRequirementCount,
2247 VkSparseImageMemoryRequirements2KHR *pSparseMemoryRequirements) {
2248 auto image_state = GetImageState(pInfo->image);
2249 image_state->get_sparse_reqs_called = true;
2250 if (!pSparseMemoryRequirements) return;
2251 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
2252 assert(!pSparseMemoryRequirements[i].pNext); // TODO: If an extension is ever added here we need to handle it
2253 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i].memoryRequirements);
2254 }
2255}
2256
2257void ValidationStateTracker::PreCallRecordDestroyShaderModule(VkDevice device, VkShaderModule shaderModule,
2258 const VkAllocationCallbacks *pAllocator) {
2259 if (!shaderModule) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002260 auto shader_module_state = GetShaderModuleState(shaderModule);
2261 shader_module_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002262 shaderModuleMap.erase(shaderModule);
2263}
2264
2265void ValidationStateTracker::PreCallRecordDestroyPipeline(VkDevice device, VkPipeline pipeline,
2266 const VkAllocationCallbacks *pAllocator) {
2267 if (!pipeline) return;
2268 PIPELINE_STATE *pipeline_state = GetPipelineState(pipeline);
2269 const VulkanTypedHandle obj_struct(pipeline, kVulkanObjectTypePipeline);
2270 // Any bound cmd buffers are now invalid
2271 InvalidateCommandBuffers(pipeline_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002272 pipeline_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002273 pipelineMap.erase(pipeline);
2274}
2275
2276void ValidationStateTracker::PreCallRecordDestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout,
2277 const VkAllocationCallbacks *pAllocator) {
2278 if (!pipelineLayout) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002279 auto pipeline_layout_state = GetPipelineLayout(pipelineLayout);
2280 pipeline_layout_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002281 pipelineLayoutMap.erase(pipelineLayout);
2282}
2283
2284void ValidationStateTracker::PreCallRecordDestroySampler(VkDevice device, VkSampler sampler,
2285 const VkAllocationCallbacks *pAllocator) {
2286 if (!sampler) return;
2287 SAMPLER_STATE *sampler_state = GetSamplerState(sampler);
2288 const VulkanTypedHandle obj_struct(sampler, kVulkanObjectTypeSampler);
2289 // Any bound cmd buffers are now invalid
2290 if (sampler_state) {
2291 InvalidateCommandBuffers(sampler_state->cb_bindings, obj_struct);
2292 }
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002293 sampler_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002294 samplerMap.erase(sampler);
2295}
2296
2297void ValidationStateTracker::PreCallRecordDestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout,
2298 const VkAllocationCallbacks *pAllocator) {
2299 if (!descriptorSetLayout) return;
2300 auto layout_it = descriptorSetLayoutMap.find(descriptorSetLayout);
2301 if (layout_it != descriptorSetLayoutMap.end()) {
Jeff Bolz6ae39612019-10-11 20:57:36 -05002302 layout_it->second.get()->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002303 descriptorSetLayoutMap.erase(layout_it);
2304 }
2305}
2306
2307void ValidationStateTracker::PreCallRecordDestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
2308 const VkAllocationCallbacks *pAllocator) {
2309 if (!descriptorPool) return;
2310 DESCRIPTOR_POOL_STATE *desc_pool_state = GetDescriptorPoolState(descriptorPool);
2311 const VulkanTypedHandle obj_struct(descriptorPool, kVulkanObjectTypeDescriptorPool);
2312 if (desc_pool_state) {
2313 // Any bound cmd buffers are now invalid
2314 InvalidateCommandBuffers(desc_pool_state->cb_bindings, obj_struct);
2315 // Free sets that were in this pool
2316 for (auto ds : desc_pool_state->sets) {
2317 FreeDescriptorSet(ds);
2318 }
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002319 desc_pool_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002320 descriptorPoolMap.erase(descriptorPool);
2321 }
2322}
2323
2324// Free all command buffers in given list, removing all references/links to them using ResetCommandBufferState
2325void ValidationStateTracker::FreeCommandBufferStates(COMMAND_POOL_STATE *pool_state, const uint32_t command_buffer_count,
2326 const VkCommandBuffer *command_buffers) {
2327 for (uint32_t i = 0; i < command_buffer_count; i++) {
2328 auto cb_state = GetCBState(command_buffers[i]);
2329 // Remove references to command buffer's state and delete
2330 if (cb_state) {
2331 // reset prior to delete, removing various references to it.
2332 // TODO: fix this, it's insane.
2333 ResetCommandBufferState(cb_state->commandBuffer);
2334 // Remove the cb_state's references from COMMAND_POOL_STATEs
2335 pool_state->commandBuffers.erase(command_buffers[i]);
2336 // Remove the cb debug labels
2337 EraseCmdDebugUtilsLabel(report_data, cb_state->commandBuffer);
2338 // Remove CBState from CB map
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002339 cb_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002340 commandBufferMap.erase(cb_state->commandBuffer);
2341 }
2342 }
2343}
2344
2345void ValidationStateTracker::PreCallRecordFreeCommandBuffers(VkDevice device, VkCommandPool commandPool,
2346 uint32_t commandBufferCount, const VkCommandBuffer *pCommandBuffers) {
2347 auto pPool = GetCommandPoolState(commandPool);
2348 FreeCommandBufferStates(pPool, commandBufferCount, pCommandBuffers);
2349}
2350
2351void ValidationStateTracker::PostCallRecordCreateCommandPool(VkDevice device, const VkCommandPoolCreateInfo *pCreateInfo,
2352 const VkAllocationCallbacks *pAllocator, VkCommandPool *pCommandPool,
2353 VkResult result) {
2354 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002355 auto cmd_pool_state = std::make_shared<COMMAND_POOL_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002356 cmd_pool_state->createFlags = pCreateInfo->flags;
2357 cmd_pool_state->queueFamilyIndex = pCreateInfo->queueFamilyIndex;
2358 commandPoolMap[*pCommandPool] = std::move(cmd_pool_state);
2359}
2360
2361void ValidationStateTracker::PostCallRecordCreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo *pCreateInfo,
2362 const VkAllocationCallbacks *pAllocator, VkQueryPool *pQueryPool,
2363 VkResult result) {
2364 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002365 auto query_pool_state = std::make_shared<QUERY_POOL_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002366 query_pool_state->createInfo = *pCreateInfo;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002367 query_pool_state->pool = *pQueryPool;
2368 if (pCreateInfo->queryType == VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR) {
2369 const auto *perf = lvl_find_in_chain<VkQueryPoolPerformanceCreateInfoKHR>(pCreateInfo->pNext);
2370 const QUEUE_FAMILY_PERF_COUNTERS &counters = *physical_device_state->perf_counters[perf->queueFamilyIndex];
2371
2372 for (uint32_t i = 0; i < perf->counterIndexCount; i++) {
2373 const auto &counter = counters.counters[perf->pCounterIndices[i]];
2374 switch (counter.scope) {
2375 case VK_QUERY_SCOPE_COMMAND_BUFFER_KHR:
2376 query_pool_state->has_perf_scope_command_buffer = true;
2377 break;
2378 case VK_QUERY_SCOPE_RENDER_PASS_KHR:
2379 query_pool_state->has_perf_scope_render_pass = true;
2380 break;
2381 default:
2382 break;
2383 }
2384 }
2385
2386 DispatchGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(physical_device_state->phys_device, perf,
2387 &query_pool_state->n_performance_passes);
2388 }
2389
locke-lunargd556cc32019-09-17 01:21:23 -06002390 queryPoolMap[*pQueryPool] = std::move(query_pool_state);
2391
2392 QueryObject query_obj{*pQueryPool, 0u};
2393 for (uint32_t i = 0; i < pCreateInfo->queryCount; ++i) {
2394 query_obj.query = i;
2395 queryToStateMap[query_obj] = QUERYSTATE_UNKNOWN;
2396 }
2397}
2398
2399void ValidationStateTracker::PreCallRecordDestroyCommandPool(VkDevice device, VkCommandPool commandPool,
2400 const VkAllocationCallbacks *pAllocator) {
2401 if (!commandPool) return;
2402 COMMAND_POOL_STATE *cp_state = GetCommandPoolState(commandPool);
2403 // Remove cmdpool from cmdpoolmap, after freeing layer data for the command buffers
2404 // "When a pool is destroyed, all command buffers allocated from the pool are freed."
2405 if (cp_state) {
2406 // Create a vector, as FreeCommandBufferStates deletes from cp_state->commandBuffers during iteration.
2407 std::vector<VkCommandBuffer> cb_vec{cp_state->commandBuffers.begin(), cp_state->commandBuffers.end()};
2408 FreeCommandBufferStates(cp_state, static_cast<uint32_t>(cb_vec.size()), cb_vec.data());
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002409 cp_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002410 commandPoolMap.erase(commandPool);
2411 }
2412}
2413
2414void ValidationStateTracker::PostCallRecordResetCommandPool(VkDevice device, VkCommandPool commandPool,
2415 VkCommandPoolResetFlags flags, VkResult result) {
2416 if (VK_SUCCESS != result) return;
2417 // Reset all of the CBs allocated from this pool
2418 auto command_pool_state = GetCommandPoolState(commandPool);
2419 for (auto cmdBuffer : command_pool_state->commandBuffers) {
2420 ResetCommandBufferState(cmdBuffer);
2421 }
2422}
2423
2424void ValidationStateTracker::PostCallRecordResetFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences,
2425 VkResult result) {
2426 for (uint32_t i = 0; i < fenceCount; ++i) {
2427 auto pFence = GetFenceState(pFences[i]);
2428 if (pFence) {
2429 if (pFence->scope == kSyncScopeInternal) {
2430 pFence->state = FENCE_UNSIGNALED;
2431 } else if (pFence->scope == kSyncScopeExternalTemporary) {
2432 pFence->scope = kSyncScopeInternal;
2433 }
2434 }
2435 }
2436}
2437
Jeff Bolzadbfa852019-10-04 13:53:30 -05002438// For given cb_nodes, invalidate them and track object causing invalidation.
2439// InvalidateCommandBuffers and InvalidateLinkedCommandBuffers are essentially
2440// the same, except one takes a map and one takes a set, and InvalidateCommandBuffers
2441// can also unlink objects from command buffers.
2442void ValidationStateTracker::InvalidateCommandBuffers(small_unordered_map<CMD_BUFFER_STATE *, int, 8> &cb_nodes,
2443 const VulkanTypedHandle &obj, bool unlink) {
2444 for (const auto &cb_node_pair : cb_nodes) {
2445 auto &cb_node = cb_node_pair.first;
2446 if (cb_node->state == CB_RECORDING) {
2447 cb_node->state = CB_INVALID_INCOMPLETE;
2448 } else if (cb_node->state == CB_RECORDED) {
2449 cb_node->state = CB_INVALID_COMPLETE;
2450 }
2451 cb_node->broken_bindings.push_back(obj);
2452
2453 // if secondary, then propagate the invalidation to the primaries that will call us.
2454 if (cb_node->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
2455 InvalidateLinkedCommandBuffers(cb_node->linkedCommandBuffers, obj);
2456 }
2457 if (unlink) {
2458 int index = cb_node_pair.second;
2459 assert(cb_node->object_bindings[index] == obj);
2460 cb_node->object_bindings[index] = VulkanTypedHandle();
2461 }
2462 }
2463 if (unlink) {
2464 cb_nodes.clear();
2465 }
2466}
2467
2468void ValidationStateTracker::InvalidateLinkedCommandBuffers(std::unordered_set<CMD_BUFFER_STATE *> &cb_nodes,
2469 const VulkanTypedHandle &obj) {
locke-lunargd556cc32019-09-17 01:21:23 -06002470 for (auto cb_node : cb_nodes) {
2471 if (cb_node->state == CB_RECORDING) {
2472 cb_node->state = CB_INVALID_INCOMPLETE;
2473 } else if (cb_node->state == CB_RECORDED) {
2474 cb_node->state = CB_INVALID_COMPLETE;
2475 }
2476 cb_node->broken_bindings.push_back(obj);
2477
2478 // if secondary, then propagate the invalidation to the primaries that will call us.
2479 if (cb_node->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05002480 InvalidateLinkedCommandBuffers(cb_node->linkedCommandBuffers, obj);
locke-lunargd556cc32019-09-17 01:21:23 -06002481 }
2482 }
2483}
2484
2485void ValidationStateTracker::PreCallRecordDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer,
2486 const VkAllocationCallbacks *pAllocator) {
2487 if (!framebuffer) return;
2488 FRAMEBUFFER_STATE *framebuffer_state = GetFramebufferState(framebuffer);
2489 const VulkanTypedHandle obj_struct(framebuffer, kVulkanObjectTypeFramebuffer);
2490 InvalidateCommandBuffers(framebuffer_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002491 framebuffer_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002492 frameBufferMap.erase(framebuffer);
2493}
2494
2495void ValidationStateTracker::PreCallRecordDestroyRenderPass(VkDevice device, VkRenderPass renderPass,
2496 const VkAllocationCallbacks *pAllocator) {
2497 if (!renderPass) return;
2498 RENDER_PASS_STATE *rp_state = GetRenderPassState(renderPass);
2499 const VulkanTypedHandle obj_struct(renderPass, kVulkanObjectTypeRenderPass);
2500 InvalidateCommandBuffers(rp_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002501 rp_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002502 renderPassMap.erase(renderPass);
2503}
2504
2505void ValidationStateTracker::PostCallRecordCreateFence(VkDevice device, const VkFenceCreateInfo *pCreateInfo,
2506 const VkAllocationCallbacks *pAllocator, VkFence *pFence, VkResult result) {
2507 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002508 auto fence_state = std::make_shared<FENCE_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002509 fence_state->fence = *pFence;
2510 fence_state->createInfo = *pCreateInfo;
2511 fence_state->state = (pCreateInfo->flags & VK_FENCE_CREATE_SIGNALED_BIT) ? FENCE_RETIRED : FENCE_UNSIGNALED;
2512 fenceMap[*pFence] = std::move(fence_state);
2513}
2514
2515bool ValidationStateTracker::PreCallValidateCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2516 const VkGraphicsPipelineCreateInfo *pCreateInfos,
2517 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002518 void *cgpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002519 // Set up the state that CoreChecks, gpu_validation and later StateTracker Record will use.
2520 create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
2521 cgpl_state->pCreateInfos = pCreateInfos; // GPU validation can alter this, so we have to set a default value for the Chassis
2522 cgpl_state->pipe_state.reserve(count);
2523 for (uint32_t i = 0; i < count; i++) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002524 cgpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
Jeff Bolz6ae39612019-10-11 20:57:36 -05002525 (cgpl_state->pipe_state)[i]->initGraphicsPipeline(this, &pCreateInfos[i], GetRenderPassShared(pCreateInfos[i].renderPass));
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002526 (cgpl_state->pipe_state)[i]->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06002527 }
2528 return false;
2529}
2530
2531void ValidationStateTracker::PostCallRecordCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2532 const VkGraphicsPipelineCreateInfo *pCreateInfos,
2533 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
2534 VkResult result, void *cgpl_state_data) {
2535 create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
2536 // This API may create pipelines regardless of the return value
2537 for (uint32_t i = 0; i < count; i++) {
2538 if (pPipelines[i] != VK_NULL_HANDLE) {
2539 (cgpl_state->pipe_state)[i]->pipeline = pPipelines[i];
2540 pipelineMap[pPipelines[i]] = std::move((cgpl_state->pipe_state)[i]);
2541 }
2542 }
2543 cgpl_state->pipe_state.clear();
2544}
2545
2546bool ValidationStateTracker::PreCallValidateCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2547 const VkComputePipelineCreateInfo *pCreateInfos,
2548 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002549 void *ccpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002550 auto *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
2551 ccpl_state->pCreateInfos = pCreateInfos; // GPU validation can alter this, so we have to set a default value for the Chassis
2552 ccpl_state->pipe_state.reserve(count);
2553 for (uint32_t i = 0; i < count; i++) {
2554 // Create and initialize internal tracking data structure
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002555 ccpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
locke-lunargd556cc32019-09-17 01:21:23 -06002556 ccpl_state->pipe_state.back()->initComputePipeline(this, &pCreateInfos[i]);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002557 ccpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06002558 }
2559 return false;
2560}
2561
2562void ValidationStateTracker::PostCallRecordCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
2563 const VkComputePipelineCreateInfo *pCreateInfos,
2564 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
2565 VkResult result, void *ccpl_state_data) {
2566 create_compute_pipeline_api_state *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
2567
2568 // This API may create pipelines regardless of the return value
2569 for (uint32_t i = 0; i < count; i++) {
2570 if (pPipelines[i] != VK_NULL_HANDLE) {
2571 (ccpl_state->pipe_state)[i]->pipeline = pPipelines[i];
2572 pipelineMap[pPipelines[i]] = std::move((ccpl_state->pipe_state)[i]);
2573 }
2574 }
2575 ccpl_state->pipe_state.clear();
2576}
2577
2578bool ValidationStateTracker::PreCallValidateCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache,
2579 uint32_t count,
2580 const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
2581 const VkAllocationCallbacks *pAllocator,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002582 VkPipeline *pPipelines, void *crtpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002583 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
2584 crtpl_state->pipe_state.reserve(count);
2585 for (uint32_t i = 0; i < count; i++) {
2586 // Create and initialize internal tracking data structure
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002587 crtpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
Jeff Bolz443c2ca2020-03-19 12:11:51 -05002588 crtpl_state->pipe_state.back()->initRayTracingPipeline(this, &pCreateInfos[i]);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002589 crtpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06002590 }
2591 return false;
2592}
2593
2594void ValidationStateTracker::PostCallRecordCreateRayTracingPipelinesNV(
2595 VkDevice device, VkPipelineCache pipelineCache, uint32_t count, const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
2596 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines, VkResult result, void *crtpl_state_data) {
2597 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
2598 // This API may create pipelines regardless of the return value
2599 for (uint32_t i = 0; i < count; i++) {
2600 if (pPipelines[i] != VK_NULL_HANDLE) {
2601 (crtpl_state->pipe_state)[i]->pipeline = pPipelines[i];
2602 pipelineMap[pPipelines[i]] = std::move((crtpl_state->pipe_state)[i]);
2603 }
2604 }
2605 crtpl_state->pipe_state.clear();
2606}
2607
Jeff Bolz443c2ca2020-03-19 12:11:51 -05002608bool ValidationStateTracker::PreCallValidateCreateRayTracingPipelinesKHR(VkDevice device, VkPipelineCache pipelineCache,
2609 uint32_t count,
2610 const VkRayTracingPipelineCreateInfoKHR *pCreateInfos,
2611 const VkAllocationCallbacks *pAllocator,
2612 VkPipeline *pPipelines, void *crtpl_state_data) const {
2613 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_khr_api_state *>(crtpl_state_data);
2614 crtpl_state->pipe_state.reserve(count);
2615 for (uint32_t i = 0; i < count; i++) {
2616 // Create and initialize internal tracking data structure
2617 crtpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
2618 crtpl_state->pipe_state.back()->initRayTracingPipeline(this, &pCreateInfos[i]);
2619 crtpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
2620 }
2621 return false;
2622}
2623
2624void ValidationStateTracker::PostCallRecordCreateRayTracingPipelinesKHR(
2625 VkDevice device, VkPipelineCache pipelineCache, uint32_t count, const VkRayTracingPipelineCreateInfoKHR *pCreateInfos,
2626 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines, VkResult result, void *crtpl_state_data) {
2627 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_khr_api_state *>(crtpl_state_data);
2628 // This API may create pipelines regardless of the return value
2629 for (uint32_t i = 0; i < count; i++) {
2630 if (pPipelines[i] != VK_NULL_HANDLE) {
2631 (crtpl_state->pipe_state)[i]->pipeline = pPipelines[i];
2632 pipelineMap[pPipelines[i]] = std::move((crtpl_state->pipe_state)[i]);
2633 }
2634 }
2635 crtpl_state->pipe_state.clear();
2636}
2637
locke-lunargd556cc32019-09-17 01:21:23 -06002638void ValidationStateTracker::PostCallRecordCreateSampler(VkDevice device, const VkSamplerCreateInfo *pCreateInfo,
2639 const VkAllocationCallbacks *pAllocator, VkSampler *pSampler,
2640 VkResult result) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002641 samplerMap[*pSampler] = std::make_shared<SAMPLER_STATE>(pSampler, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002642}
2643
2644void ValidationStateTracker::PostCallRecordCreateDescriptorSetLayout(VkDevice device,
2645 const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
2646 const VkAllocationCallbacks *pAllocator,
2647 VkDescriptorSetLayout *pSetLayout, VkResult result) {
2648 if (VK_SUCCESS != result) return;
2649 descriptorSetLayoutMap[*pSetLayout] = std::make_shared<cvdescriptorset::DescriptorSetLayout>(pCreateInfo, *pSetLayout);
2650}
2651
2652// For repeatable sorting, not very useful for "memory in range" search
2653struct PushConstantRangeCompare {
2654 bool operator()(const VkPushConstantRange *lhs, const VkPushConstantRange *rhs) const {
2655 if (lhs->offset == rhs->offset) {
2656 if (lhs->size == rhs->size) {
2657 // The comparison is arbitrary, but avoids false aliasing by comparing all fields.
2658 return lhs->stageFlags < rhs->stageFlags;
2659 }
2660 // If the offsets are the same then sorting by the end of range is useful for validation
2661 return lhs->size < rhs->size;
2662 }
2663 return lhs->offset < rhs->offset;
2664 }
2665};
2666
2667static PushConstantRangesDict push_constant_ranges_dict;
2668
2669PushConstantRangesId GetCanonicalId(const VkPipelineLayoutCreateInfo *info) {
2670 if (!info->pPushConstantRanges) {
2671 // Hand back the empty entry (creating as needed)...
2672 return push_constant_ranges_dict.look_up(PushConstantRanges());
2673 }
2674
2675 // Sort the input ranges to ensure equivalent ranges map to the same id
2676 std::set<const VkPushConstantRange *, PushConstantRangeCompare> sorted;
2677 for (uint32_t i = 0; i < info->pushConstantRangeCount; i++) {
2678 sorted.insert(info->pPushConstantRanges + i);
2679 }
2680
Jeremy Hayesf34b9fb2019-12-06 09:37:03 -07002681 PushConstantRanges ranges;
2682 ranges.reserve(sorted.size());
locke-lunargd556cc32019-09-17 01:21:23 -06002683 for (const auto range : sorted) {
2684 ranges.emplace_back(*range);
2685 }
2686 return push_constant_ranges_dict.look_up(std::move(ranges));
2687}
2688
2689// Dictionary of canoncial form of the pipeline set layout of descriptor set layouts
2690static PipelineLayoutSetLayoutsDict pipeline_layout_set_layouts_dict;
2691
2692// Dictionary of canonical form of the "compatible for set" records
2693static PipelineLayoutCompatDict pipeline_layout_compat_dict;
2694
2695static PipelineLayoutCompatId GetCanonicalId(const uint32_t set_index, const PushConstantRangesId pcr_id,
2696 const PipelineLayoutSetLayoutsId set_layouts_id) {
2697 return pipeline_layout_compat_dict.look_up(PipelineLayoutCompatDef(set_index, pcr_id, set_layouts_id));
2698}
2699
2700void ValidationStateTracker::PostCallRecordCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo *pCreateInfo,
2701 const VkAllocationCallbacks *pAllocator,
2702 VkPipelineLayout *pPipelineLayout, VkResult result) {
2703 if (VK_SUCCESS != result) return;
2704
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002705 auto pipeline_layout_state = std::make_shared<PIPELINE_LAYOUT_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002706 pipeline_layout_state->layout = *pPipelineLayout;
2707 pipeline_layout_state->set_layouts.resize(pCreateInfo->setLayoutCount);
2708 PipelineLayoutSetLayoutsDef set_layouts(pCreateInfo->setLayoutCount);
2709 for (uint32_t i = 0; i < pCreateInfo->setLayoutCount; ++i) {
Jeff Bolz6ae39612019-10-11 20:57:36 -05002710 pipeline_layout_state->set_layouts[i] = GetDescriptorSetLayoutShared(pCreateInfo->pSetLayouts[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06002711 set_layouts[i] = pipeline_layout_state->set_layouts[i]->GetLayoutId();
2712 }
2713
2714 // Get canonical form IDs for the "compatible for set" contents
2715 pipeline_layout_state->push_constant_ranges = GetCanonicalId(pCreateInfo);
2716 auto set_layouts_id = pipeline_layout_set_layouts_dict.look_up(set_layouts);
2717 pipeline_layout_state->compat_for_set.reserve(pCreateInfo->setLayoutCount);
2718
2719 // Create table of "compatible for set N" cannonical forms for trivial accept validation
2720 for (uint32_t i = 0; i < pCreateInfo->setLayoutCount; ++i) {
2721 pipeline_layout_state->compat_for_set.emplace_back(
2722 GetCanonicalId(i, pipeline_layout_state->push_constant_ranges, set_layouts_id));
2723 }
2724 pipelineLayoutMap[*pPipelineLayout] = std::move(pipeline_layout_state);
2725}
2726
2727void ValidationStateTracker::PostCallRecordCreateDescriptorPool(VkDevice device, const VkDescriptorPoolCreateInfo *pCreateInfo,
2728 const VkAllocationCallbacks *pAllocator,
2729 VkDescriptorPool *pDescriptorPool, VkResult result) {
2730 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002731 descriptorPoolMap[*pDescriptorPool] = std::make_shared<DESCRIPTOR_POOL_STATE>(*pDescriptorPool, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002732}
2733
2734void ValidationStateTracker::PostCallRecordResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
2735 VkDescriptorPoolResetFlags flags, VkResult result) {
2736 if (VK_SUCCESS != result) return;
2737 DESCRIPTOR_POOL_STATE *pPool = GetDescriptorPoolState(descriptorPool);
2738 // TODO: validate flags
2739 // For every set off of this pool, clear it, remove from setMap, and free cvdescriptorset::DescriptorSet
2740 for (auto ds : pPool->sets) {
2741 FreeDescriptorSet(ds);
2742 }
2743 pPool->sets.clear();
2744 // Reset available count for each type and available sets for this pool
2745 for (auto it = pPool->availableDescriptorTypeCount.begin(); it != pPool->availableDescriptorTypeCount.end(); ++it) {
2746 pPool->availableDescriptorTypeCount[it->first] = pPool->maxDescriptorTypeCount[it->first];
2747 }
2748 pPool->availableSets = pPool->maxSets;
2749}
2750
2751bool ValidationStateTracker::PreCallValidateAllocateDescriptorSets(VkDevice device,
2752 const VkDescriptorSetAllocateInfo *pAllocateInfo,
Jeff Bolz5c801d12019-10-09 10:38:45 -05002753 VkDescriptorSet *pDescriptorSets, void *ads_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06002754 // Always update common data
2755 cvdescriptorset::AllocateDescriptorSetsData *ads_state =
2756 reinterpret_cast<cvdescriptorset::AllocateDescriptorSetsData *>(ads_state_data);
2757 UpdateAllocateDescriptorSetsData(pAllocateInfo, ads_state);
2758
2759 return false;
2760}
2761
2762// Allocation state was good and call down chain was made so update state based on allocating descriptor sets
2763void ValidationStateTracker::PostCallRecordAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo *pAllocateInfo,
2764 VkDescriptorSet *pDescriptorSets, VkResult result,
2765 void *ads_state_data) {
2766 if (VK_SUCCESS != result) return;
2767 // All the updates are contained in a single cvdescriptorset function
2768 cvdescriptorset::AllocateDescriptorSetsData *ads_state =
2769 reinterpret_cast<cvdescriptorset::AllocateDescriptorSetsData *>(ads_state_data);
2770 PerformAllocateDescriptorSets(pAllocateInfo, pDescriptorSets, ads_state);
2771}
2772
2773void ValidationStateTracker::PreCallRecordFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t count,
2774 const VkDescriptorSet *pDescriptorSets) {
2775 DESCRIPTOR_POOL_STATE *pool_state = GetDescriptorPoolState(descriptorPool);
2776 // Update available descriptor sets in pool
2777 pool_state->availableSets += count;
2778
2779 // For each freed descriptor add its resources back into the pool as available and remove from pool and setMap
2780 for (uint32_t i = 0; i < count; ++i) {
2781 if (pDescriptorSets[i] != VK_NULL_HANDLE) {
2782 auto descriptor_set = setMap[pDescriptorSets[i]].get();
2783 uint32_t type_index = 0, descriptor_count = 0;
2784 for (uint32_t j = 0; j < descriptor_set->GetBindingCount(); ++j) {
2785 type_index = static_cast<uint32_t>(descriptor_set->GetTypeFromIndex(j));
2786 descriptor_count = descriptor_set->GetDescriptorCountFromIndex(j);
2787 pool_state->availableDescriptorTypeCount[type_index] += descriptor_count;
2788 }
2789 FreeDescriptorSet(descriptor_set);
2790 pool_state->sets.erase(descriptor_set);
2791 }
2792 }
2793}
2794
2795void ValidationStateTracker::PreCallRecordUpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount,
2796 const VkWriteDescriptorSet *pDescriptorWrites,
2797 uint32_t descriptorCopyCount,
2798 const VkCopyDescriptorSet *pDescriptorCopies) {
2799 cvdescriptorset::PerformUpdateDescriptorSets(this, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount,
2800 pDescriptorCopies);
2801}
2802
2803void ValidationStateTracker::PostCallRecordAllocateCommandBuffers(VkDevice device, const VkCommandBufferAllocateInfo *pCreateInfo,
2804 VkCommandBuffer *pCommandBuffer, VkResult result) {
2805 if (VK_SUCCESS != result) return;
Jeff Bolz6835fda2019-10-06 00:15:34 -05002806 auto pPool = GetCommandPoolShared(pCreateInfo->commandPool);
locke-lunargd556cc32019-09-17 01:21:23 -06002807 if (pPool) {
2808 for (uint32_t i = 0; i < pCreateInfo->commandBufferCount; i++) {
2809 // Add command buffer to its commandPool map
2810 pPool->commandBuffers.insert(pCommandBuffer[i]);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002811 auto pCB = std::make_shared<CMD_BUFFER_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002812 pCB->createInfo = *pCreateInfo;
2813 pCB->device = device;
Jeff Bolz6835fda2019-10-06 00:15:34 -05002814 pCB->command_pool = pPool;
locke-lunargd556cc32019-09-17 01:21:23 -06002815 // Add command buffer to map
2816 commandBufferMap[pCommandBuffer[i]] = std::move(pCB);
2817 ResetCommandBufferState(pCommandBuffer[i]);
2818 }
2819 }
2820}
2821
2822// Add bindings between the given cmd buffer & framebuffer and the framebuffer's children
2823void ValidationStateTracker::AddFramebufferBinding(CMD_BUFFER_STATE *cb_state, FRAMEBUFFER_STATE *fb_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05002824 AddCommandBufferBinding(fb_state->cb_bindings, VulkanTypedHandle(fb_state->framebuffer, kVulkanObjectTypeFramebuffer, fb_state),
locke-lunargd556cc32019-09-17 01:21:23 -06002825 cb_state);
Mark Lobodzinski544b3dd2019-12-03 14:44:54 -07002826 // If imageless fb, skip fb binding
2827 if (fb_state->createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) return;
locke-lunargd556cc32019-09-17 01:21:23 -06002828 const uint32_t attachmentCount = fb_state->createInfo.attachmentCount;
2829 for (uint32_t attachment = 0; attachment < attachmentCount; ++attachment) {
2830 auto view_state = GetAttachmentImageViewState(fb_state, attachment);
2831 if (view_state) {
2832 AddCommandBufferBindingImageView(cb_state, view_state);
2833 }
2834 }
2835}
2836
2837void ValidationStateTracker::PreCallRecordBeginCommandBuffer(VkCommandBuffer commandBuffer,
2838 const VkCommandBufferBeginInfo *pBeginInfo) {
2839 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2840 if (!cb_state) return;
locke-lunargd556cc32019-09-17 01:21:23 -06002841 if (cb_state->createInfo.level != VK_COMMAND_BUFFER_LEVEL_PRIMARY) {
2842 // Secondary Command Buffer
2843 const VkCommandBufferInheritanceInfo *pInfo = pBeginInfo->pInheritanceInfo;
2844 if (pInfo) {
2845 if (pBeginInfo->flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT) {
2846 assert(pInfo->renderPass);
2847 auto framebuffer = GetFramebufferState(pInfo->framebuffer);
2848 if (framebuffer) {
2849 // Connect this framebuffer and its children to this cmdBuffer
2850 AddFramebufferBinding(cb_state, framebuffer);
2851 }
2852 }
2853 }
2854 }
2855 if (CB_RECORDED == cb_state->state || CB_INVALID_COMPLETE == cb_state->state) {
2856 ResetCommandBufferState(commandBuffer);
2857 }
2858 // Set updated state here in case implicit reset occurs above
2859 cb_state->state = CB_RECORDING;
2860 cb_state->beginInfo = *pBeginInfo;
2861 if (cb_state->beginInfo.pInheritanceInfo) {
2862 cb_state->inheritanceInfo = *(cb_state->beginInfo.pInheritanceInfo);
2863 cb_state->beginInfo.pInheritanceInfo = &cb_state->inheritanceInfo;
2864 // If we are a secondary command-buffer and inheriting. Update the items we should inherit.
2865 if ((cb_state->createInfo.level != VK_COMMAND_BUFFER_LEVEL_PRIMARY) &&
2866 (cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT)) {
2867 cb_state->activeRenderPass = GetRenderPassState(cb_state->beginInfo.pInheritanceInfo->renderPass);
2868 cb_state->activeSubpass = cb_state->beginInfo.pInheritanceInfo->subpass;
2869 cb_state->activeFramebuffer = cb_state->beginInfo.pInheritanceInfo->framebuffer;
2870 cb_state->framebuffers.insert(cb_state->beginInfo.pInheritanceInfo->framebuffer);
2871 }
2872 }
2873
2874 auto chained_device_group_struct = lvl_find_in_chain<VkDeviceGroupCommandBufferBeginInfo>(pBeginInfo->pNext);
2875 if (chained_device_group_struct) {
2876 cb_state->initial_device_mask = chained_device_group_struct->deviceMask;
2877 } else {
2878 cb_state->initial_device_mask = (1 << physical_device_count) - 1;
2879 }
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002880
2881 cb_state->performance_lock_acquired = performance_lock_acquired;
locke-lunargd556cc32019-09-17 01:21:23 -06002882}
2883
2884void ValidationStateTracker::PostCallRecordEndCommandBuffer(VkCommandBuffer commandBuffer, VkResult result) {
2885 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2886 if (!cb_state) return;
2887 // Cached validation is specific to a specific recording of a specific command buffer.
2888 for (auto descriptor_set : cb_state->validated_descriptor_sets) {
2889 descriptor_set->ClearCachedValidation(cb_state);
2890 }
2891 cb_state->validated_descriptor_sets.clear();
2892 if (VK_SUCCESS == result) {
2893 cb_state->state = CB_RECORDED;
2894 }
2895}
2896
2897void ValidationStateTracker::PostCallRecordResetCommandBuffer(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags,
2898 VkResult result) {
2899 if (VK_SUCCESS == result) {
2900 ResetCommandBufferState(commandBuffer);
2901 }
2902}
2903
2904CBStatusFlags MakeStaticStateMask(VkPipelineDynamicStateCreateInfo const *ds) {
2905 // initially assume everything is static state
2906 CBStatusFlags flags = CBSTATUS_ALL_STATE_SET;
2907
2908 if (ds) {
2909 for (uint32_t i = 0; i < ds->dynamicStateCount; i++) {
2910 switch (ds->pDynamicStates[i]) {
2911 case VK_DYNAMIC_STATE_LINE_WIDTH:
2912 flags &= ~CBSTATUS_LINE_WIDTH_SET;
2913 break;
2914 case VK_DYNAMIC_STATE_DEPTH_BIAS:
2915 flags &= ~CBSTATUS_DEPTH_BIAS_SET;
2916 break;
2917 case VK_DYNAMIC_STATE_BLEND_CONSTANTS:
2918 flags &= ~CBSTATUS_BLEND_CONSTANTS_SET;
2919 break;
2920 case VK_DYNAMIC_STATE_DEPTH_BOUNDS:
2921 flags &= ~CBSTATUS_DEPTH_BOUNDS_SET;
2922 break;
2923 case VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK:
2924 flags &= ~CBSTATUS_STENCIL_READ_MASK_SET;
2925 break;
2926 case VK_DYNAMIC_STATE_STENCIL_WRITE_MASK:
2927 flags &= ~CBSTATUS_STENCIL_WRITE_MASK_SET;
2928 break;
2929 case VK_DYNAMIC_STATE_STENCIL_REFERENCE:
2930 flags &= ~CBSTATUS_STENCIL_REFERENCE_SET;
2931 break;
2932 case VK_DYNAMIC_STATE_SCISSOR:
2933 flags &= ~CBSTATUS_SCISSOR_SET;
2934 break;
2935 case VK_DYNAMIC_STATE_VIEWPORT:
2936 flags &= ~CBSTATUS_VIEWPORT_SET;
2937 break;
2938 case VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV:
2939 flags &= ~CBSTATUS_EXCLUSIVE_SCISSOR_SET;
2940 break;
2941 case VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV:
2942 flags &= ~CBSTATUS_SHADING_RATE_PALETTE_SET;
2943 break;
2944 case VK_DYNAMIC_STATE_LINE_STIPPLE_EXT:
2945 flags &= ~CBSTATUS_LINE_STIPPLE_SET;
2946 break;
Chris Mayer9ded5eb2019-09-19 16:33:26 +02002947 case VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV:
2948 flags &= ~CBSTATUS_VIEWPORT_W_SCALING_SET;
2949 break;
locke-lunargd556cc32019-09-17 01:21:23 -06002950 default:
2951 break;
2952 }
2953 }
2954 }
2955
2956 return flags;
2957}
2958
2959// Validation cache:
2960// CV is the bottommost implementor of this extension. Don't pass calls down.
2961// utility function to set collective state for pipeline
2962void SetPipelineState(PIPELINE_STATE *pPipe) {
2963 // If any attachment used by this pipeline has blendEnable, set top-level blendEnable
2964 if (pPipe->graphicsPipelineCI.pColorBlendState) {
2965 for (size_t i = 0; i < pPipe->attachments.size(); ++i) {
2966 if (VK_TRUE == pPipe->attachments[i].blendEnable) {
2967 if (((pPipe->attachments[i].dstAlphaBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
2968 (pPipe->attachments[i].dstAlphaBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
2969 ((pPipe->attachments[i].dstColorBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
2970 (pPipe->attachments[i].dstColorBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
2971 ((pPipe->attachments[i].srcAlphaBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
2972 (pPipe->attachments[i].srcAlphaBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
2973 ((pPipe->attachments[i].srcColorBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
2974 (pPipe->attachments[i].srcColorBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA))) {
2975 pPipe->blendConstantsEnabled = true;
2976 }
2977 }
2978 }
2979 }
2980}
2981
2982void ValidationStateTracker::PreCallRecordCmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
2983 VkPipeline pipeline) {
2984 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
2985 assert(cb_state);
2986
2987 auto pipe_state = GetPipelineState(pipeline);
2988 if (VK_PIPELINE_BIND_POINT_GRAPHICS == pipelineBindPoint) {
2989 cb_state->status &= ~cb_state->static_status;
2990 cb_state->static_status = MakeStaticStateMask(pipe_state->graphicsPipelineCI.ptr()->pDynamicState);
2991 cb_state->status |= cb_state->static_status;
2992 }
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002993 ResetCommandBufferPushConstantDataIfIncompatible(cb_state, pipe_state->pipeline_layout->layout);
locke-lunargd556cc32019-09-17 01:21:23 -06002994 cb_state->lastBound[pipelineBindPoint].pipeline_state = pipe_state;
2995 SetPipelineState(pipe_state);
Jeff Bolzadbfa852019-10-04 13:53:30 -05002996 AddCommandBufferBinding(pipe_state->cb_bindings, VulkanTypedHandle(pipeline, kVulkanObjectTypePipeline), cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06002997}
2998
2999void ValidationStateTracker::PreCallRecordCmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport,
3000 uint32_t viewportCount, const VkViewport *pViewports) {
3001 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3002 cb_state->viewportMask |= ((1u << viewportCount) - 1u) << firstViewport;
3003 cb_state->status |= CBSTATUS_VIEWPORT_SET;
3004}
3005
3006void ValidationStateTracker::PreCallRecordCmdSetExclusiveScissorNV(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor,
3007 uint32_t exclusiveScissorCount,
3008 const VkRect2D *pExclusiveScissors) {
3009 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3010 // TODO: We don't have VUIDs for validating that all exclusive scissors have been set.
3011 // cb_state->exclusiveScissorMask |= ((1u << exclusiveScissorCount) - 1u) << firstExclusiveScissor;
3012 cb_state->status |= CBSTATUS_EXCLUSIVE_SCISSOR_SET;
3013}
3014
3015void ValidationStateTracker::PreCallRecordCmdBindShadingRateImageNV(VkCommandBuffer commandBuffer, VkImageView imageView,
3016 VkImageLayout imageLayout) {
3017 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3018
3019 if (imageView != VK_NULL_HANDLE) {
3020 auto view_state = GetImageViewState(imageView);
3021 AddCommandBufferBindingImageView(cb_state, view_state);
3022 }
3023}
3024
3025void ValidationStateTracker::PreCallRecordCmdSetViewportShadingRatePaletteNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
3026 uint32_t viewportCount,
3027 const VkShadingRatePaletteNV *pShadingRatePalettes) {
3028 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3029 // TODO: We don't have VUIDs for validating that all shading rate palettes have been set.
3030 // cb_state->shadingRatePaletteMask |= ((1u << viewportCount) - 1u) << firstViewport;
3031 cb_state->status |= CBSTATUS_SHADING_RATE_PALETTE_SET;
3032}
3033
3034void ValidationStateTracker::PostCallRecordCreateAccelerationStructureNV(VkDevice device,
3035 const VkAccelerationStructureCreateInfoNV *pCreateInfo,
3036 const VkAllocationCallbacks *pAllocator,
3037 VkAccelerationStructureNV *pAccelerationStructure,
3038 VkResult result) {
3039 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003040 auto as_state = std::make_shared<ACCELERATION_STRUCTURE_STATE>(*pAccelerationStructure, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06003041
3042 // Query the requirements in case the application doesn't (to avoid bind/validation time query)
3043 VkAccelerationStructureMemoryRequirementsInfoNV as_memory_requirements_info = {};
3044 as_memory_requirements_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
3045 as_memory_requirements_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV;
3046 as_memory_requirements_info.accelerationStructure = as_state->acceleration_structure;
3047 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &as_memory_requirements_info, &as_state->memory_requirements);
3048
3049 VkAccelerationStructureMemoryRequirementsInfoNV scratch_memory_req_info = {};
3050 scratch_memory_req_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
3051 scratch_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV;
3052 scratch_memory_req_info.accelerationStructure = as_state->acceleration_structure;
3053 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &scratch_memory_req_info,
3054 &as_state->build_scratch_memory_requirements);
3055
3056 VkAccelerationStructureMemoryRequirementsInfoNV update_memory_req_info = {};
3057 update_memory_req_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
3058 update_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV;
3059 update_memory_req_info.accelerationStructure = as_state->acceleration_structure;
3060 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &update_memory_req_info,
3061 &as_state->update_scratch_memory_requirements);
3062
3063 accelerationStructureMap[*pAccelerationStructure] = std::move(as_state);
3064}
3065
Jeff Bolz95176d02020-04-01 00:36:16 -05003066void ValidationStateTracker::PostCallRecordCreateAccelerationStructureKHR(VkDevice device,
3067 const VkAccelerationStructureCreateInfoKHR *pCreateInfo,
3068 const VkAllocationCallbacks *pAllocator,
3069 VkAccelerationStructureKHR *pAccelerationStructure,
3070 VkResult result) {
3071 if (VK_SUCCESS != result) return;
3072 auto as_state = std::make_shared<ACCELERATION_STRUCTURE_STATE>(*pAccelerationStructure, pCreateInfo);
3073
3074 // Query the requirements in case the application doesn't (to avoid bind/validation time query)
3075 VkAccelerationStructureMemoryRequirementsInfoKHR as_memory_requirements_info = {};
3076 as_memory_requirements_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_KHR;
3077 as_memory_requirements_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_KHR;
3078 as_memory_requirements_info.buildType = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR;
3079 as_memory_requirements_info.accelerationStructure = as_state->acceleration_structure;
3080 DispatchGetAccelerationStructureMemoryRequirementsKHR(device, &as_memory_requirements_info, &as_state->memory_requirements);
3081
3082 VkAccelerationStructureMemoryRequirementsInfoKHR scratch_memory_req_info = {};
3083 scratch_memory_req_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_KHR;
3084 scratch_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_KHR;
3085 scratch_memory_req_info.buildType = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR;
3086 scratch_memory_req_info.accelerationStructure = as_state->acceleration_structure;
3087 DispatchGetAccelerationStructureMemoryRequirementsKHR(device, &scratch_memory_req_info,
3088 &as_state->build_scratch_memory_requirements);
3089
3090 VkAccelerationStructureMemoryRequirementsInfoKHR update_memory_req_info = {};
3091 update_memory_req_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_KHR;
3092 update_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_KHR;
3093 update_memory_req_info.buildType = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR;
3094 update_memory_req_info.accelerationStructure = as_state->acceleration_structure;
3095 DispatchGetAccelerationStructureMemoryRequirementsKHR(device, &update_memory_req_info,
3096 &as_state->update_scratch_memory_requirements);
3097
3098 accelerationStructureMap[*pAccelerationStructure] = std::move(as_state);
3099}
3100
locke-lunargd556cc32019-09-17 01:21:23 -06003101void ValidationStateTracker::PostCallRecordGetAccelerationStructureMemoryRequirementsNV(
3102 VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV *pInfo, VkMemoryRequirements2KHR *pMemoryRequirements) {
3103 ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureState(pInfo->accelerationStructure);
3104 if (as_state != nullptr) {
3105 if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV) {
3106 as_state->memory_requirements = *pMemoryRequirements;
3107 as_state->memory_requirements_checked = true;
3108 } else if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV) {
3109 as_state->build_scratch_memory_requirements = *pMemoryRequirements;
3110 as_state->build_scratch_memory_requirements_checked = true;
3111 } else if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV) {
3112 as_state->update_scratch_memory_requirements = *pMemoryRequirements;
3113 as_state->update_scratch_memory_requirements_checked = true;
3114 }
3115 }
3116}
3117
Jeff Bolz95176d02020-04-01 00:36:16 -05003118void ValidationStateTracker::PostCallRecordBindAccelerationStructureMemoryCommon(
3119 VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoKHR *pBindInfos, VkResult result,
3120 bool isNV) {
locke-lunargd556cc32019-09-17 01:21:23 -06003121 if (VK_SUCCESS != result) return;
3122 for (uint32_t i = 0; i < bindInfoCount; i++) {
Jeff Bolz95176d02020-04-01 00:36:16 -05003123 const VkBindAccelerationStructureMemoryInfoKHR &info = pBindInfos[i];
locke-lunargd556cc32019-09-17 01:21:23 -06003124
3125 ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureState(info.accelerationStructure);
3126 if (as_state) {
3127 // Track bound memory range information
3128 auto mem_info = GetDevMemState(info.memory);
3129 if (mem_info) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07003130 InsertAccelerationStructureMemoryRange(info.accelerationStructure, mem_info, info.memoryOffset);
locke-lunargd556cc32019-09-17 01:21:23 -06003131 }
3132 // Track objects tied to memory
3133 SetMemBinding(info.memory, as_state, info.memoryOffset,
Jeff Bolz95176d02020-04-01 00:36:16 -05003134 VulkanTypedHandle(info.accelerationStructure, kVulkanObjectTypeAccelerationStructureKHR));
locke-lunargd556cc32019-09-17 01:21:23 -06003135
3136 // GPU validation of top level acceleration structure building needs acceleration structure handles.
Jeff Bolz95176d02020-04-01 00:36:16 -05003137 // XXX TODO: Query device address for KHR extension
3138 if (enabled.gpu_validation && isNV) {
locke-lunargd556cc32019-09-17 01:21:23 -06003139 DispatchGetAccelerationStructureHandleNV(device, info.accelerationStructure, 8, &as_state->opaque_handle);
3140 }
3141 }
3142 }
3143}
3144
Jeff Bolz95176d02020-04-01 00:36:16 -05003145void ValidationStateTracker::PostCallRecordBindAccelerationStructureMemoryNV(
3146 VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV *pBindInfos, VkResult result) {
3147 PostCallRecordBindAccelerationStructureMemoryCommon(device, bindInfoCount, pBindInfos, result, true);
3148}
3149
3150void ValidationStateTracker::PostCallRecordBindAccelerationStructureMemoryKHR(
3151 VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoKHR *pBindInfos, VkResult result) {
3152 PostCallRecordBindAccelerationStructureMemoryCommon(device, bindInfoCount, pBindInfos, result, false);
3153}
3154
locke-lunargd556cc32019-09-17 01:21:23 -06003155void ValidationStateTracker::PostCallRecordCmdBuildAccelerationStructureNV(
3156 VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV *pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset,
3157 VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset) {
3158 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3159 if (cb_state == nullptr) {
3160 return;
3161 }
3162
3163 ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureState(dst);
3164 ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureState(src);
3165 if (dst_as_state != nullptr) {
3166 dst_as_state->built = true;
3167 dst_as_state->build_info.initialize(pInfo);
3168 AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
3169 }
3170 if (src_as_state != nullptr) {
3171 AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
3172 }
3173 cb_state->hasBuildAccelerationStructureCmd = true;
3174}
3175
3176void ValidationStateTracker::PostCallRecordCmdCopyAccelerationStructureNV(VkCommandBuffer commandBuffer,
3177 VkAccelerationStructureNV dst,
3178 VkAccelerationStructureNV src,
3179 VkCopyAccelerationStructureModeNV mode) {
3180 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3181 if (cb_state) {
3182 ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureState(src);
3183 ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureState(dst);
3184 if (dst_as_state != nullptr && src_as_state != nullptr) {
3185 dst_as_state->built = true;
3186 dst_as_state->build_info = src_as_state->build_info;
3187 AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
3188 AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
3189 }
3190 }
3191}
3192
Jeff Bolz95176d02020-04-01 00:36:16 -05003193void ValidationStateTracker::PreCallRecordDestroyAccelerationStructureKHR(VkDevice device,
3194 VkAccelerationStructureKHR accelerationStructure,
3195 const VkAllocationCallbacks *pAllocator) {
locke-lunargd556cc32019-09-17 01:21:23 -06003196 if (!accelerationStructure) return;
3197 auto *as_state = GetAccelerationStructureState(accelerationStructure);
3198 if (as_state) {
Jeff Bolz95176d02020-04-01 00:36:16 -05003199 const VulkanTypedHandle obj_struct(accelerationStructure, kVulkanObjectTypeAccelerationStructureKHR);
locke-lunargd556cc32019-09-17 01:21:23 -06003200 InvalidateCommandBuffers(as_state->cb_bindings, obj_struct);
3201 for (auto mem_binding : as_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -07003202 RemoveAccelerationStructureMemoryRange(accelerationStructure, mem_binding);
locke-lunargd556cc32019-09-17 01:21:23 -06003203 }
3204 ClearMemoryObjectBindings(obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003205 as_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06003206 accelerationStructureMap.erase(accelerationStructure);
3207 }
3208}
3209
Jeff Bolz95176d02020-04-01 00:36:16 -05003210void ValidationStateTracker::PreCallRecordDestroyAccelerationStructureNV(VkDevice device,
3211 VkAccelerationStructureNV accelerationStructure,
3212 const VkAllocationCallbacks *pAllocator) {
3213 PreCallRecordDestroyAccelerationStructureKHR(device, accelerationStructure, pAllocator);
3214}
3215
Chris Mayer9ded5eb2019-09-19 16:33:26 +02003216void ValidationStateTracker::PreCallRecordCmdSetViewportWScalingNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
3217 uint32_t viewportCount,
3218 const VkViewportWScalingNV *pViewportWScalings) {
3219 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3220 cb_state->status |= CBSTATUS_VIEWPORT_W_SCALING_SET;
3221}
3222
locke-lunargd556cc32019-09-17 01:21:23 -06003223void ValidationStateTracker::PreCallRecordCmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth) {
3224 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3225 cb_state->status |= CBSTATUS_LINE_WIDTH_SET;
3226}
3227
3228void ValidationStateTracker::PreCallRecordCmdSetLineStippleEXT(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor,
3229 uint16_t lineStipplePattern) {
3230 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3231 cb_state->status |= CBSTATUS_LINE_STIPPLE_SET;
3232}
3233
3234void ValidationStateTracker::PreCallRecordCmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor,
3235 float depthBiasClamp, float depthBiasSlopeFactor) {
3236 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3237 cb_state->status |= CBSTATUS_DEPTH_BIAS_SET;
3238}
3239
Lionel Landwerlinc7420912019-05-23 00:33:42 +01003240void ValidationStateTracker::PreCallRecordCmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount,
3241 const VkRect2D *pScissors) {
3242 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3243 cb_state->scissorMask |= ((1u << scissorCount) - 1u) << firstScissor;
3244 cb_state->status |= CBSTATUS_SCISSOR_SET;
3245}
3246
locke-lunargd556cc32019-09-17 01:21:23 -06003247void ValidationStateTracker::PreCallRecordCmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4]) {
3248 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3249 cb_state->status |= CBSTATUS_BLEND_CONSTANTS_SET;
3250}
3251
3252void ValidationStateTracker::PreCallRecordCmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds,
3253 float maxDepthBounds) {
3254 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3255 cb_state->status |= CBSTATUS_DEPTH_BOUNDS_SET;
3256}
3257
3258void ValidationStateTracker::PreCallRecordCmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
3259 uint32_t compareMask) {
3260 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3261 cb_state->status |= CBSTATUS_STENCIL_READ_MASK_SET;
3262}
3263
3264void ValidationStateTracker::PreCallRecordCmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
3265 uint32_t writeMask) {
3266 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3267 cb_state->status |= CBSTATUS_STENCIL_WRITE_MASK_SET;
3268}
3269
3270void ValidationStateTracker::PreCallRecordCmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
3271 uint32_t reference) {
3272 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3273 cb_state->status |= CBSTATUS_STENCIL_REFERENCE_SET;
3274}
3275
3276// Update pipeline_layout bind points applying the "Pipeline Layout Compatibility" rules.
3277// One of pDescriptorSets or push_descriptor_set should be nullptr, indicating whether this
3278// is called for CmdBindDescriptorSets or CmdPushDescriptorSet.
3279void ValidationStateTracker::UpdateLastBoundDescriptorSets(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint pipeline_bind_point,
3280 const PIPELINE_LAYOUT_STATE *pipeline_layout, uint32_t first_set,
3281 uint32_t set_count, const VkDescriptorSet *pDescriptorSets,
3282 cvdescriptorset::DescriptorSet *push_descriptor_set,
3283 uint32_t dynamic_offset_count, const uint32_t *p_dynamic_offsets) {
3284 assert((pDescriptorSets == nullptr) ^ (push_descriptor_set == nullptr));
3285 // Defensive
3286 assert(pipeline_layout);
3287 if (!pipeline_layout) return;
3288
3289 uint32_t required_size = first_set + set_count;
3290 const uint32_t last_binding_index = required_size - 1;
3291 assert(last_binding_index < pipeline_layout->compat_for_set.size());
3292
3293 // Some useful shorthand
3294 auto &last_bound = cb_state->lastBound[pipeline_bind_point];
3295 auto &pipe_compat_ids = pipeline_layout->compat_for_set;
3296 const uint32_t current_size = static_cast<uint32_t>(last_bound.per_set.size());
3297
3298 // We need this three times in this function, but nowhere else
3299 auto push_descriptor_cleanup = [&last_bound](const cvdescriptorset::DescriptorSet *ds) -> bool {
3300 if (ds && ds->IsPushDescriptor()) {
3301 assert(ds == last_bound.push_descriptor_set.get());
3302 last_bound.push_descriptor_set = nullptr;
3303 return true;
3304 }
3305 return false;
3306 };
3307
3308 // Clean up the "disturbed" before and after the range to be set
3309 if (required_size < current_size) {
3310 if (last_bound.per_set[last_binding_index].compat_id_for_set != pipe_compat_ids[last_binding_index]) {
3311 // We're disturbing those after last, we'll shrink below, but first need to check for and cleanup the push_descriptor
3312 for (auto set_idx = required_size; set_idx < current_size; ++set_idx) {
3313 if (push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set)) break;
3314 }
3315 } else {
3316 // We're not disturbing past last, so leave the upper binding data alone.
3317 required_size = current_size;
3318 }
3319 }
3320
3321 // We resize if we need more set entries or if those past "last" are disturbed
3322 if (required_size != current_size) {
3323 last_bound.per_set.resize(required_size);
3324 }
3325
3326 // For any previously bound sets, need to set them to "invalid" if they were disturbed by this update
3327 for (uint32_t set_idx = 0; set_idx < first_set; ++set_idx) {
3328 if (last_bound.per_set[set_idx].compat_id_for_set != pipe_compat_ids[set_idx]) {
3329 push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set);
3330 last_bound.per_set[set_idx].bound_descriptor_set = nullptr;
3331 last_bound.per_set[set_idx].dynamicOffsets.clear();
3332 last_bound.per_set[set_idx].compat_id_for_set = pipe_compat_ids[set_idx];
3333 }
3334 }
3335
3336 // Now update the bound sets with the input sets
3337 const uint32_t *input_dynamic_offsets = p_dynamic_offsets; // "read" pointer for dynamic offset data
3338 for (uint32_t input_idx = 0; input_idx < set_count; input_idx++) {
3339 auto set_idx = input_idx + first_set; // set_idx is index within layout, input_idx is index within input descriptor sets
3340 cvdescriptorset::DescriptorSet *descriptor_set =
3341 push_descriptor_set ? push_descriptor_set : GetSetNode(pDescriptorSets[input_idx]);
3342
3343 // Record binding (or push)
3344 if (descriptor_set != last_bound.push_descriptor_set.get()) {
3345 // Only cleanup the push descriptors if they aren't the currently used set.
3346 push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set);
3347 }
3348 last_bound.per_set[set_idx].bound_descriptor_set = descriptor_set;
3349 last_bound.per_set[set_idx].compat_id_for_set = pipe_compat_ids[set_idx]; // compat ids are canonical *per* set index
3350
3351 if (descriptor_set) {
3352 auto set_dynamic_descriptor_count = descriptor_set->GetDynamicDescriptorCount();
3353 // TODO: Add logic for tracking push_descriptor offsets (here or in caller)
3354 if (set_dynamic_descriptor_count && input_dynamic_offsets) {
3355 const uint32_t *end_offset = input_dynamic_offsets + set_dynamic_descriptor_count;
3356 last_bound.per_set[set_idx].dynamicOffsets = std::vector<uint32_t>(input_dynamic_offsets, end_offset);
3357 input_dynamic_offsets = end_offset;
3358 assert(input_dynamic_offsets <= (p_dynamic_offsets + dynamic_offset_count));
3359 } else {
3360 last_bound.per_set[set_idx].dynamicOffsets.clear();
3361 }
3362 if (!descriptor_set->IsPushDescriptor()) {
3363 // Can't cache validation of push_descriptors
3364 cb_state->validated_descriptor_sets.insert(descriptor_set);
3365 }
3366 }
3367 }
3368}
3369
3370// Update the bound state for the bind point, including the effects of incompatible pipeline layouts
3371void ValidationStateTracker::PreCallRecordCmdBindDescriptorSets(VkCommandBuffer commandBuffer,
3372 VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
3373 uint32_t firstSet, uint32_t setCount,
3374 const VkDescriptorSet *pDescriptorSets, uint32_t dynamicOffsetCount,
3375 const uint32_t *pDynamicOffsets) {
3376 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3377 auto pipeline_layout = GetPipelineLayout(layout);
3378
3379 // Resize binding arrays
3380 uint32_t last_set_index = firstSet + setCount - 1;
3381 if (last_set_index >= cb_state->lastBound[pipelineBindPoint].per_set.size()) {
3382 cb_state->lastBound[pipelineBindPoint].per_set.resize(last_set_index + 1);
3383 }
3384
3385 UpdateLastBoundDescriptorSets(cb_state, pipelineBindPoint, pipeline_layout, firstSet, setCount, pDescriptorSets, nullptr,
3386 dynamicOffsetCount, pDynamicOffsets);
3387 cb_state->lastBound[pipelineBindPoint].pipeline_layout = layout;
3388 ResetCommandBufferPushConstantDataIfIncompatible(cb_state, layout);
3389}
3390
3391void ValidationStateTracker::RecordCmdPushDescriptorSetState(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint pipelineBindPoint,
3392 VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount,
3393 const VkWriteDescriptorSet *pDescriptorWrites) {
3394 const auto &pipeline_layout = GetPipelineLayout(layout);
3395 // Short circuit invalid updates
3396 if (!pipeline_layout || (set >= pipeline_layout->set_layouts.size()) || !pipeline_layout->set_layouts[set] ||
3397 !pipeline_layout->set_layouts[set]->IsPushDescriptor())
3398 return;
3399
3400 // We need a descriptor set to update the bindings with, compatible with the passed layout
3401 const auto dsl = pipeline_layout->set_layouts[set];
3402 auto &last_bound = cb_state->lastBound[pipelineBindPoint];
3403 auto &push_descriptor_set = last_bound.push_descriptor_set;
3404 // If we are disturbing the current push_desriptor_set clear it
3405 if (!push_descriptor_set || !CompatForSet(set, last_bound, pipeline_layout->compat_for_set)) {
John Zulaufd2c3dae2019-12-12 11:02:17 -07003406 last_bound.UnbindAndResetPushDescriptorSet(new cvdescriptorset::DescriptorSet(0, nullptr, dsl, 0, this));
locke-lunargd556cc32019-09-17 01:21:23 -06003407 }
3408
3409 UpdateLastBoundDescriptorSets(cb_state, pipelineBindPoint, pipeline_layout, set, 1, nullptr, push_descriptor_set.get(), 0,
3410 nullptr);
3411 last_bound.pipeline_layout = layout;
3412
3413 // Now that we have either the new or extant push_descriptor set ... do the write updates against it
Jeff Bolz41a1ced2019-10-11 11:40:49 -05003414 push_descriptor_set->PerformPushDescriptorsUpdate(this, descriptorWriteCount, pDescriptorWrites);
locke-lunargd556cc32019-09-17 01:21:23 -06003415}
3416
3417void ValidationStateTracker::PreCallRecordCmdPushDescriptorSetKHR(VkCommandBuffer commandBuffer,
3418 VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
3419 uint32_t set, uint32_t descriptorWriteCount,
3420 const VkWriteDescriptorSet *pDescriptorWrites) {
3421 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3422 RecordCmdPushDescriptorSetState(cb_state, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites);
3423}
3424
Tony-LunarG6bb1d0c2019-09-23 10:39:25 -06003425void ValidationStateTracker::PostCallRecordCmdPushConstants(VkCommandBuffer commandBuffer, VkPipelineLayout layout,
3426 VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size,
3427 const void *pValues) {
3428 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3429 if (cb_state != nullptr) {
3430 ResetCommandBufferPushConstantDataIfIncompatible(cb_state, layout);
3431
3432 auto &push_constant_data = cb_state->push_constant_data;
3433 assert((offset + size) <= static_cast<uint32_t>(push_constant_data.size()));
3434 std::memcpy(push_constant_data.data() + offset, pValues, static_cast<std::size_t>(size));
3435 }
3436}
3437
locke-lunargd556cc32019-09-17 01:21:23 -06003438void ValidationStateTracker::PreCallRecordCmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
3439 VkIndexType indexType) {
3440 auto buffer_state = GetBufferState(buffer);
3441 auto cb_state = GetCBState(commandBuffer);
3442
3443 cb_state->status |= CBSTATUS_INDEX_BUFFER_BOUND;
3444 cb_state->index_buffer_binding.buffer = buffer;
3445 cb_state->index_buffer_binding.size = buffer_state->createInfo.size;
3446 cb_state->index_buffer_binding.offset = offset;
3447 cb_state->index_buffer_binding.index_type = indexType;
3448 // Add binding for this index buffer to this commandbuffer
3449 AddCommandBufferBindingBuffer(cb_state, buffer_state);
3450}
3451
3452void ValidationStateTracker::PreCallRecordCmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding,
3453 uint32_t bindingCount, const VkBuffer *pBuffers,
3454 const VkDeviceSize *pOffsets) {
3455 auto cb_state = GetCBState(commandBuffer);
3456
3457 uint32_t end = firstBinding + bindingCount;
3458 if (cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.size() < end) {
3459 cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.resize(end);
3460 }
3461
3462 for (uint32_t i = 0; i < bindingCount; ++i) {
3463 auto &vertex_buffer_binding = cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings[i + firstBinding];
3464 vertex_buffer_binding.buffer = pBuffers[i];
3465 vertex_buffer_binding.offset = pOffsets[i];
3466 // Add binding for this vertex buffer to this commandbuffer
3467 AddCommandBufferBindingBuffer(cb_state, GetBufferState(pBuffers[i]));
3468 }
3469}
3470
3471void ValidationStateTracker::PostCallRecordCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer,
3472 VkDeviceSize dstOffset, VkDeviceSize dataSize, const void *pData) {
3473 auto cb_state = GetCBState(commandBuffer);
3474 auto dst_buffer_state = GetBufferState(dstBuffer);
3475
3476 // Update bindings between buffer and cmd buffer
3477 AddCommandBufferBindingBuffer(cb_state, dst_buffer_state);
3478}
3479
Jeff Bolz310775c2019-10-09 00:46:33 -05003480bool ValidationStateTracker::SetEventStageMask(VkEvent event, VkPipelineStageFlags stageMask,
3481 EventToStageMap *localEventToStageMap) {
3482 (*localEventToStageMap)[event] = stageMask;
locke-lunargd556cc32019-09-17 01:21:23 -06003483 return false;
3484}
3485
3486void ValidationStateTracker::PreCallRecordCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event,
3487 VkPipelineStageFlags stageMask) {
3488 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3489 auto event_state = GetEventState(event);
3490 if (event_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05003491 AddCommandBufferBinding(event_state->cb_bindings, VulkanTypedHandle(event, kVulkanObjectTypeEvent, event_state), cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003492 }
3493 cb_state->events.push_back(event);
3494 if (!cb_state->waitedEvents.count(event)) {
3495 cb_state->writeEventsBeforeWait.push_back(event);
3496 }
Jeff Bolz310775c2019-10-09 00:46:33 -05003497 cb_state->eventUpdates.emplace_back(
3498 [event, stageMask](const ValidationStateTracker *device_data, bool do_validate, EventToStageMap *localEventToStageMap) {
3499 return SetEventStageMask(event, stageMask, localEventToStageMap);
3500 });
locke-lunargd556cc32019-09-17 01:21:23 -06003501}
3502
3503void ValidationStateTracker::PreCallRecordCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event,
3504 VkPipelineStageFlags stageMask) {
3505 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3506 auto event_state = GetEventState(event);
3507 if (event_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05003508 AddCommandBufferBinding(event_state->cb_bindings, VulkanTypedHandle(event, kVulkanObjectTypeEvent, event_state), cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003509 }
3510 cb_state->events.push_back(event);
3511 if (!cb_state->waitedEvents.count(event)) {
3512 cb_state->writeEventsBeforeWait.push_back(event);
3513 }
3514
3515 cb_state->eventUpdates.emplace_back(
Jeff Bolz310775c2019-10-09 00:46:33 -05003516 [event](const ValidationStateTracker *, bool do_validate, EventToStageMap *localEventToStageMap) {
3517 return SetEventStageMask(event, VkPipelineStageFlags(0), localEventToStageMap);
3518 });
locke-lunargd556cc32019-09-17 01:21:23 -06003519}
3520
3521void ValidationStateTracker::PreCallRecordCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents,
3522 VkPipelineStageFlags sourceStageMask, VkPipelineStageFlags dstStageMask,
3523 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
3524 uint32_t bufferMemoryBarrierCount,
3525 const VkBufferMemoryBarrier *pBufferMemoryBarriers,
3526 uint32_t imageMemoryBarrierCount,
3527 const VkImageMemoryBarrier *pImageMemoryBarriers) {
3528 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3529 for (uint32_t i = 0; i < eventCount; ++i) {
3530 auto event_state = GetEventState(pEvents[i]);
3531 if (event_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05003532 AddCommandBufferBinding(event_state->cb_bindings, VulkanTypedHandle(pEvents[i], kVulkanObjectTypeEvent, event_state),
3533 cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003534 }
3535 cb_state->waitedEvents.insert(pEvents[i]);
3536 cb_state->events.push_back(pEvents[i]);
3537 }
3538}
3539
Jeff Bolz310775c2019-10-09 00:46:33 -05003540bool ValidationStateTracker::SetQueryState(QueryObject object, QueryState value, QueryMap *localQueryToStateMap) {
3541 (*localQueryToStateMap)[object] = value;
3542 return false;
3543}
3544
3545bool ValidationStateTracker::SetQueryStateMulti(VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, QueryState value,
3546 QueryMap *localQueryToStateMap) {
3547 for (uint32_t i = 0; i < queryCount; i++) {
3548 QueryObject object = {queryPool, firstQuery + i};
3549 (*localQueryToStateMap)[object] = value;
locke-lunargd556cc32019-09-17 01:21:23 -06003550 }
3551 return false;
3552}
3553
Jeff Bolz310775c2019-10-09 00:46:33 -05003554QueryState ValidationStateTracker::GetQueryState(const QueryMap *localQueryToStateMap, VkQueryPool queryPool,
3555 uint32_t queryIndex) const {
3556 QueryObject query = {queryPool, queryIndex};
locke-lunargd556cc32019-09-17 01:21:23 -06003557
dan sinclair2f308782020-03-30 13:20:13 -04003558 const std::array<const decltype(queryToStateMap) *, 2> map_list = {{localQueryToStateMap, &queryToStateMap}};
Jeff Bolz310775c2019-10-09 00:46:33 -05003559
3560 for (const auto map : map_list) {
3561 auto query_data = map->find(query);
3562 if (query_data != map->end()) {
3563 return query_data->second;
locke-lunargd556cc32019-09-17 01:21:23 -06003564 }
3565 }
Jeff Bolz310775c2019-10-09 00:46:33 -05003566 return QUERYSTATE_UNKNOWN;
locke-lunargd556cc32019-09-17 01:21:23 -06003567}
3568
3569void ValidationStateTracker::RecordCmdBeginQuery(CMD_BUFFER_STATE *cb_state, const QueryObject &query_obj) {
Jeff Bolz047f3012019-10-09 23:52:23 -05003570 if (disabled.query_validation) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003571 cb_state->activeQueries.insert(query_obj);
3572 cb_state->startedQueries.insert(query_obj);
Jeff Bolz310775c2019-10-09 00:46:33 -05003573 cb_state->queryUpdates.emplace_back(
3574 [query_obj](const ValidationStateTracker *device_data, bool do_validate, QueryMap *localQueryToStateMap) {
3575 SetQueryState(query_obj, QUERYSTATE_RUNNING, localQueryToStateMap);
3576 return false;
3577 });
Jeff Bolzadbfa852019-10-04 13:53:30 -05003578 auto pool_state = GetQueryPoolState(query_obj.pool);
3579 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(query_obj.pool, kVulkanObjectTypeQueryPool, pool_state),
3580 cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003581}
3582
3583void ValidationStateTracker::PostCallRecordCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot,
3584 VkFlags flags) {
Jeff Bolz047f3012019-10-09 23:52:23 -05003585 if (disabled.query_validation) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003586 QueryObject query = {queryPool, slot};
3587 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3588 RecordCmdBeginQuery(cb_state, query);
3589}
3590
3591void ValidationStateTracker::RecordCmdEndQuery(CMD_BUFFER_STATE *cb_state, const QueryObject &query_obj) {
Jeff Bolz047f3012019-10-09 23:52:23 -05003592 if (disabled.query_validation) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003593 cb_state->activeQueries.erase(query_obj);
3594 cb_state->queryUpdates.emplace_back(
Jeff Bolz310775c2019-10-09 00:46:33 -05003595 [query_obj](const ValidationStateTracker *device_data, bool do_validate, QueryMap *localQueryToStateMap) {
3596 return SetQueryState(query_obj, QUERYSTATE_ENDED, localQueryToStateMap);
3597 });
Jeff Bolzadbfa852019-10-04 13:53:30 -05003598 auto pool_state = GetQueryPoolState(query_obj.pool);
3599 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(query_obj.pool, kVulkanObjectTypeQueryPool, pool_state),
3600 cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003601}
3602
3603void ValidationStateTracker::PostCallRecordCmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot) {
Jeff Bolz047f3012019-10-09 23:52:23 -05003604 if (disabled.query_validation) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003605 QueryObject query_obj = {queryPool, slot};
3606 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3607 RecordCmdEndQuery(cb_state, query_obj);
3608}
3609
3610void ValidationStateTracker::PostCallRecordCmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
3611 uint32_t firstQuery, uint32_t queryCount) {
Jeff Bolz047f3012019-10-09 23:52:23 -05003612 if (disabled.query_validation) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003613 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3614
Lionel Landwerlinb1e5a422020-02-18 16:49:09 +02003615 for (uint32_t slot = firstQuery; slot < (firstQuery + queryCount); slot++) {
3616 QueryObject query = {queryPool, slot};
3617 cb_state->resetQueries.insert(query);
3618 }
3619
Jeff Bolz310775c2019-10-09 00:46:33 -05003620 cb_state->queryUpdates.emplace_back([queryPool, firstQuery, queryCount](const ValidationStateTracker *device_data,
3621 bool do_validate, QueryMap *localQueryToStateMap) {
3622 return SetQueryStateMulti(queryPool, firstQuery, queryCount, QUERYSTATE_RESET, localQueryToStateMap);
locke-lunargd556cc32019-09-17 01:21:23 -06003623 });
Jeff Bolzadbfa852019-10-04 13:53:30 -05003624 auto pool_state = GetQueryPoolState(queryPool);
3625 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool, pool_state),
locke-lunargd556cc32019-09-17 01:21:23 -06003626 cb_state);
3627}
3628
3629void ValidationStateTracker::PostCallRecordCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
3630 uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer,
3631 VkDeviceSize dstOffset, VkDeviceSize stride,
3632 VkQueryResultFlags flags) {
Jeff Bolz047f3012019-10-09 23:52:23 -05003633 if (disabled.query_validation) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003634 auto cb_state = GetCBState(commandBuffer);
3635 auto dst_buff_state = GetBufferState(dstBuffer);
3636 AddCommandBufferBindingBuffer(cb_state, dst_buff_state);
Jeff Bolzadbfa852019-10-04 13:53:30 -05003637 auto pool_state = GetQueryPoolState(queryPool);
3638 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool, pool_state),
locke-lunargd556cc32019-09-17 01:21:23 -06003639 cb_state);
3640}
3641
3642void ValidationStateTracker::PostCallRecordCmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
3643 VkQueryPool queryPool, uint32_t slot) {
Jeff Bolz047f3012019-10-09 23:52:23 -05003644 if (disabled.query_validation) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003645 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
Jeff Bolzadbfa852019-10-04 13:53:30 -05003646 auto pool_state = GetQueryPoolState(queryPool);
3647 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool, pool_state),
locke-lunargd556cc32019-09-17 01:21:23 -06003648 cb_state);
3649 QueryObject query = {queryPool, slot};
3650 cb_state->queryUpdates.emplace_back(
Jeff Bolz310775c2019-10-09 00:46:33 -05003651 [query](const ValidationStateTracker *device_data, bool do_validate, QueryMap *localQueryToStateMap) {
3652 return SetQueryState(query, QUERYSTATE_ENDED, localQueryToStateMap);
3653 });
locke-lunargd556cc32019-09-17 01:21:23 -06003654}
3655
3656void ValidationStateTracker::PostCallRecordCreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo *pCreateInfo,
3657 const VkAllocationCallbacks *pAllocator, VkFramebuffer *pFramebuffer,
3658 VkResult result) {
3659 if (VK_SUCCESS != result) return;
3660 // Shadow create info and store in map
Jeff Bolz6ae39612019-10-11 20:57:36 -05003661 auto fb_state = std::make_shared<FRAMEBUFFER_STATE>(*pFramebuffer, pCreateInfo, GetRenderPassShared(pCreateInfo->renderPass));
locke-lunargd556cc32019-09-17 01:21:23 -06003662
3663 if ((pCreateInfo->flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) == 0) {
3664 for (uint32_t i = 0; i < pCreateInfo->attachmentCount; ++i) {
3665 VkImageView view = pCreateInfo->pAttachments[i];
3666 auto view_state = GetImageViewState(view);
3667 if (!view_state) {
3668 continue;
3669 }
3670 }
3671 }
3672 frameBufferMap[*pFramebuffer] = std::move(fb_state);
3673}
3674
3675void ValidationStateTracker::RecordRenderPassDAG(RenderPassCreateVersion rp_version, const VkRenderPassCreateInfo2KHR *pCreateInfo,
3676 RENDER_PASS_STATE *render_pass) {
3677 auto &subpass_to_node = render_pass->subpassToNode;
3678 subpass_to_node.resize(pCreateInfo->subpassCount);
3679 auto &self_dependencies = render_pass->self_dependencies;
3680 self_dependencies.resize(pCreateInfo->subpassCount);
3681
3682 for (uint32_t i = 0; i < pCreateInfo->subpassCount; ++i) {
3683 subpass_to_node[i].pass = i;
3684 self_dependencies[i].clear();
3685 }
3686 for (uint32_t i = 0; i < pCreateInfo->dependencyCount; ++i) {
3687 const VkSubpassDependency2KHR &dependency = pCreateInfo->pDependencies[i];
3688 if ((dependency.srcSubpass != VK_SUBPASS_EXTERNAL) && (dependency.dstSubpass != VK_SUBPASS_EXTERNAL)) {
3689 if (dependency.srcSubpass == dependency.dstSubpass) {
3690 self_dependencies[dependency.srcSubpass].push_back(i);
3691 } else {
3692 subpass_to_node[dependency.dstSubpass].prev.push_back(dependency.srcSubpass);
3693 subpass_to_node[dependency.srcSubpass].next.push_back(dependency.dstSubpass);
3694 }
3695 }
3696 }
3697}
3698
3699static void MarkAttachmentFirstUse(RENDER_PASS_STATE *render_pass, uint32_t index, bool is_read) {
3700 if (index == VK_ATTACHMENT_UNUSED) return;
3701
3702 if (!render_pass->attachment_first_read.count(index)) render_pass->attachment_first_read[index] = is_read;
3703}
3704
3705void ValidationStateTracker::RecordCreateRenderPassState(RenderPassCreateVersion rp_version,
3706 std::shared_ptr<RENDER_PASS_STATE> &render_pass,
3707 VkRenderPass *pRenderPass) {
3708 render_pass->renderPass = *pRenderPass;
3709 auto create_info = render_pass->createInfo.ptr();
3710
3711 RecordRenderPassDAG(RENDER_PASS_VERSION_1, create_info, render_pass.get());
3712
3713 for (uint32_t i = 0; i < create_info->subpassCount; ++i) {
3714 const VkSubpassDescription2KHR &subpass = create_info->pSubpasses[i];
3715 for (uint32_t j = 0; j < subpass.colorAttachmentCount; ++j) {
3716 MarkAttachmentFirstUse(render_pass.get(), subpass.pColorAttachments[j].attachment, false);
3717
3718 // resolve attachments are considered to be written
3719 if (subpass.pResolveAttachments) {
3720 MarkAttachmentFirstUse(render_pass.get(), subpass.pResolveAttachments[j].attachment, false);
3721 }
3722 }
3723 if (subpass.pDepthStencilAttachment) {
3724 MarkAttachmentFirstUse(render_pass.get(), subpass.pDepthStencilAttachment->attachment, false);
3725 }
3726 for (uint32_t j = 0; j < subpass.inputAttachmentCount; ++j) {
3727 MarkAttachmentFirstUse(render_pass.get(), subpass.pInputAttachments[j].attachment, true);
3728 }
3729 }
3730
3731 // Even though render_pass is an rvalue-ref parameter, still must move s.t. move assignment is invoked.
3732 renderPassMap[*pRenderPass] = std::move(render_pass);
3733}
3734
3735// Style note:
3736// Use of rvalue reference exceeds reccommended usage of rvalue refs in google style guide, but intentionally forces caller to move
3737// or copy. This is clearer than passing a pointer to shared_ptr and avoids the atomic increment/decrement of shared_ptr copy
3738// construction or assignment.
3739void ValidationStateTracker::PostCallRecordCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo *pCreateInfo,
3740 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
3741 VkResult result) {
3742 if (VK_SUCCESS != result) return;
3743 auto render_pass_state = std::make_shared<RENDER_PASS_STATE>(pCreateInfo);
3744 RecordCreateRenderPassState(RENDER_PASS_VERSION_1, render_pass_state, pRenderPass);
3745}
3746
Tony-LunarG977448c2019-12-02 14:52:02 -07003747void ValidationStateTracker::RecordCreateRenderPass2(VkDevice device, const VkRenderPassCreateInfo2KHR *pCreateInfo,
3748 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
3749 VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06003750 if (VK_SUCCESS != result) return;
3751 auto render_pass_state = std::make_shared<RENDER_PASS_STATE>(pCreateInfo);
3752 RecordCreateRenderPassState(RENDER_PASS_VERSION_2, render_pass_state, pRenderPass);
3753}
3754
Tony-LunarG977448c2019-12-02 14:52:02 -07003755void ValidationStateTracker::PostCallRecordCreateRenderPass2KHR(VkDevice device, const VkRenderPassCreateInfo2KHR *pCreateInfo,
3756 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
3757 VkResult result) {
3758 RecordCreateRenderPass2(device, pCreateInfo, pAllocator, pRenderPass, result);
3759}
3760
3761void ValidationStateTracker::PostCallRecordCreateRenderPass2(VkDevice device, const VkRenderPassCreateInfo2KHR *pCreateInfo,
3762 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
3763 VkResult result) {
3764 RecordCreateRenderPass2(device, pCreateInfo, pAllocator, pRenderPass, result);
3765}
3766
locke-lunargd556cc32019-09-17 01:21:23 -06003767void ValidationStateTracker::RecordCmdBeginRenderPassState(VkCommandBuffer commandBuffer,
3768 const VkRenderPassBeginInfo *pRenderPassBegin,
3769 const VkSubpassContents contents) {
3770 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3771 auto render_pass_state = pRenderPassBegin ? GetRenderPassState(pRenderPassBegin->renderPass) : nullptr;
3772 auto framebuffer = pRenderPassBegin ? GetFramebufferState(pRenderPassBegin->framebuffer) : nullptr;
3773
3774 if (render_pass_state) {
3775 cb_state->activeFramebuffer = pRenderPassBegin->framebuffer;
3776 cb_state->activeRenderPass = render_pass_state;
Tony-LunarG61e7c0c2020-03-03 16:09:11 -07003777 cb_state->activeRenderPassBeginInfo = safe_VkRenderPassBeginInfo(pRenderPassBegin);
locke-lunargd556cc32019-09-17 01:21:23 -06003778 cb_state->activeSubpass = 0;
3779 cb_state->activeSubpassContents = contents;
3780 cb_state->framebuffers.insert(pRenderPassBegin->framebuffer);
3781 // Connect this framebuffer and its children to this cmdBuffer
3782 AddFramebufferBinding(cb_state, framebuffer);
3783 // Connect this RP to cmdBuffer
Jeff Bolzadbfa852019-10-04 13:53:30 -05003784 AddCommandBufferBinding(render_pass_state->cb_bindings,
3785 VulkanTypedHandle(render_pass_state->renderPass, kVulkanObjectTypeRenderPass, render_pass_state),
3786 cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003787
3788 auto chained_device_group_struct = lvl_find_in_chain<VkDeviceGroupRenderPassBeginInfo>(pRenderPassBegin->pNext);
3789 if (chained_device_group_struct) {
3790 cb_state->active_render_pass_device_mask = chained_device_group_struct->deviceMask;
3791 } else {
3792 cb_state->active_render_pass_device_mask = cb_state->initial_device_mask;
3793 }
3794 }
3795}
3796
3797void ValidationStateTracker::PreCallRecordCmdBeginRenderPass(VkCommandBuffer commandBuffer,
3798 const VkRenderPassBeginInfo *pRenderPassBegin,
3799 VkSubpassContents contents) {
3800 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, contents);
3801}
3802
3803void ValidationStateTracker::PreCallRecordCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer,
3804 const VkRenderPassBeginInfo *pRenderPassBegin,
3805 const VkSubpassBeginInfoKHR *pSubpassBeginInfo) {
3806 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, pSubpassBeginInfo->contents);
3807}
3808
Tony-LunarG977448c2019-12-02 14:52:02 -07003809void ValidationStateTracker::PreCallRecordCmdBeginRenderPass2(VkCommandBuffer commandBuffer,
3810 const VkRenderPassBeginInfo *pRenderPassBegin,
3811 const VkSubpassBeginInfoKHR *pSubpassBeginInfo) {
3812 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, pSubpassBeginInfo->contents);
3813}
3814
locke-lunargd556cc32019-09-17 01:21:23 -06003815void ValidationStateTracker::RecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
3816 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3817 cb_state->activeSubpass++;
3818 cb_state->activeSubpassContents = contents;
3819}
3820
3821void ValidationStateTracker::PostCallRecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
3822 RecordCmdNextSubpass(commandBuffer, contents);
3823}
3824
3825void ValidationStateTracker::PostCallRecordCmdNextSubpass2KHR(VkCommandBuffer commandBuffer,
3826 const VkSubpassBeginInfoKHR *pSubpassBeginInfo,
3827 const VkSubpassEndInfoKHR *pSubpassEndInfo) {
3828 RecordCmdNextSubpass(commandBuffer, pSubpassBeginInfo->contents);
3829}
3830
Tony-LunarG977448c2019-12-02 14:52:02 -07003831void ValidationStateTracker::PostCallRecordCmdNextSubpass2(VkCommandBuffer commandBuffer,
3832 const VkSubpassBeginInfoKHR *pSubpassBeginInfo,
3833 const VkSubpassEndInfoKHR *pSubpassEndInfo) {
3834 RecordCmdNextSubpass(commandBuffer, pSubpassBeginInfo->contents);
3835}
3836
locke-lunargd556cc32019-09-17 01:21:23 -06003837void ValidationStateTracker::RecordCmdEndRenderPassState(VkCommandBuffer commandBuffer) {
3838 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3839 cb_state->activeRenderPass = nullptr;
3840 cb_state->activeSubpass = 0;
3841 cb_state->activeFramebuffer = VK_NULL_HANDLE;
3842}
3843
3844void ValidationStateTracker::PostCallRecordCmdEndRenderPass(VkCommandBuffer commandBuffer) {
3845 RecordCmdEndRenderPassState(commandBuffer);
3846}
3847
3848void ValidationStateTracker::PostCallRecordCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer,
3849 const VkSubpassEndInfoKHR *pSubpassEndInfo) {
3850 RecordCmdEndRenderPassState(commandBuffer);
3851}
3852
Tony-LunarG977448c2019-12-02 14:52:02 -07003853void ValidationStateTracker::PostCallRecordCmdEndRenderPass2(VkCommandBuffer commandBuffer,
3854 const VkSubpassEndInfoKHR *pSubpassEndInfo) {
3855 RecordCmdEndRenderPassState(commandBuffer);
3856}
locke-lunargd556cc32019-09-17 01:21:23 -06003857void ValidationStateTracker::PreCallRecordCmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBuffersCount,
3858 const VkCommandBuffer *pCommandBuffers) {
3859 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3860
3861 CMD_BUFFER_STATE *sub_cb_state = NULL;
3862 for (uint32_t i = 0; i < commandBuffersCount; i++) {
3863 sub_cb_state = GetCBState(pCommandBuffers[i]);
3864 assert(sub_cb_state);
3865 if (!(sub_cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT)) {
3866 if (cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT) {
3867 // TODO: Because this is a state change, clearing the VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT needs to be moved
3868 // from the validation step to the recording step
3869 cb_state->beginInfo.flags &= ~VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT;
3870 }
3871 }
3872
3873 // Propagate inital layout and current layout state to the primary cmd buffer
3874 // NOTE: The update/population of the image_layout_map is done in CoreChecks, but for other classes derived from
3875 // ValidationStateTracker these maps will be empty, so leaving the propagation in the the state tracker should be a no-op
3876 // for those other classes.
3877 for (const auto &sub_layout_map_entry : sub_cb_state->image_layout_map) {
3878 const auto image = sub_layout_map_entry.first;
3879 const auto *image_state = GetImageState(image);
3880 if (!image_state) continue; // Can't set layouts of a dead image
3881
3882 auto *cb_subres_map = GetImageSubresourceLayoutMap(cb_state, *image_state);
3883 const auto *sub_cb_subres_map = sub_layout_map_entry.second.get();
3884 assert(cb_subres_map && sub_cb_subres_map); // Non const get and map traversal should never be null
3885 cb_subres_map->UpdateFrom(*sub_cb_subres_map);
3886 }
3887
3888 sub_cb_state->primaryCommandBuffer = cb_state->commandBuffer;
3889 cb_state->linkedCommandBuffers.insert(sub_cb_state);
3890 sub_cb_state->linkedCommandBuffers.insert(cb_state);
3891 for (auto &function : sub_cb_state->queryUpdates) {
3892 cb_state->queryUpdates.push_back(function);
3893 }
3894 for (auto &function : sub_cb_state->queue_submit_functions) {
3895 cb_state->queue_submit_functions.push_back(function);
3896 }
3897 }
3898}
3899
3900void ValidationStateTracker::PostCallRecordMapMemory(VkDevice device, VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size,
3901 VkFlags flags, void **ppData, VkResult result) {
3902 if (VK_SUCCESS != result) return;
3903 RecordMappedMemory(mem, offset, size, ppData);
3904}
3905
3906void ValidationStateTracker::PreCallRecordUnmapMemory(VkDevice device, VkDeviceMemory mem) {
3907 auto mem_info = GetDevMemState(mem);
3908 if (mem_info) {
3909 mem_info->mapped_range = MemRange();
3910 mem_info->p_driver_data = nullptr;
3911 }
3912}
3913
3914void ValidationStateTracker::UpdateBindImageMemoryState(const VkBindImageMemoryInfo &bindInfo) {
3915 IMAGE_STATE *image_state = GetImageState(bindInfo.image);
3916 if (image_state) {
3917 const auto swapchain_info = lvl_find_in_chain<VkBindImageMemorySwapchainInfoKHR>(bindInfo.pNext);
3918 if (swapchain_info) {
3919 auto swapchain = GetSwapchainState(swapchain_info->swapchain);
3920 if (swapchain) {
locke-lunargb3584732019-10-28 20:18:36 -06003921 swapchain->images[swapchain_info->imageIndex].bound_images.emplace(image_state->image);
locke-lunargd556cc32019-09-17 01:21:23 -06003922 image_state->bind_swapchain = swapchain_info->swapchain;
3923 image_state->bind_swapchain_imageIndex = swapchain_info->imageIndex;
3924 }
3925 } else {
3926 // Track bound memory range information
3927 auto mem_info = GetDevMemState(bindInfo.memory);
3928 if (mem_info) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07003929 InsertImageMemoryRange(bindInfo.image, mem_info, bindInfo.memoryOffset);
locke-lunargd556cc32019-09-17 01:21:23 -06003930 }
3931
3932 // Track objects tied to memory
3933 SetMemBinding(bindInfo.memory, image_state, bindInfo.memoryOffset,
3934 VulkanTypedHandle(bindInfo.image, kVulkanObjectTypeImage));
3935 }
Tony-LunarG330cf4c2020-03-04 16:29:03 -07003936 if ((image_state->createInfo.flags & VK_IMAGE_CREATE_ALIAS_BIT) || swapchain_info) {
locke-lunargd556cc32019-09-17 01:21:23 -06003937 AddAliasingImage(image_state);
3938 }
3939 }
3940}
3941
3942void ValidationStateTracker::PostCallRecordBindImageMemory(VkDevice device, VkImage image, VkDeviceMemory mem,
3943 VkDeviceSize memoryOffset, VkResult result) {
3944 if (VK_SUCCESS != result) return;
3945 VkBindImageMemoryInfo bindInfo = {};
3946 bindInfo.sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
3947 bindInfo.image = image;
3948 bindInfo.memory = mem;
3949 bindInfo.memoryOffset = memoryOffset;
3950 UpdateBindImageMemoryState(bindInfo);
3951}
3952
3953void ValidationStateTracker::PostCallRecordBindImageMemory2(VkDevice device, uint32_t bindInfoCount,
3954 const VkBindImageMemoryInfoKHR *pBindInfos, VkResult result) {
3955 if (VK_SUCCESS != result) return;
3956 for (uint32_t i = 0; i < bindInfoCount; i++) {
3957 UpdateBindImageMemoryState(pBindInfos[i]);
3958 }
3959}
3960
3961void ValidationStateTracker::PostCallRecordBindImageMemory2KHR(VkDevice device, uint32_t bindInfoCount,
3962 const VkBindImageMemoryInfoKHR *pBindInfos, VkResult result) {
3963 if (VK_SUCCESS != result) return;
3964 for (uint32_t i = 0; i < bindInfoCount; i++) {
3965 UpdateBindImageMemoryState(pBindInfos[i]);
3966 }
3967}
3968
3969void ValidationStateTracker::PreCallRecordSetEvent(VkDevice device, VkEvent event) {
3970 auto event_state = GetEventState(event);
3971 if (event_state) {
3972 event_state->stageMask = VK_PIPELINE_STAGE_HOST_BIT;
3973 }
locke-lunargd556cc32019-09-17 01:21:23 -06003974}
3975
3976void ValidationStateTracker::PostCallRecordImportSemaphoreFdKHR(VkDevice device,
3977 const VkImportSemaphoreFdInfoKHR *pImportSemaphoreFdInfo,
3978 VkResult result) {
3979 if (VK_SUCCESS != result) return;
3980 RecordImportSemaphoreState(pImportSemaphoreFdInfo->semaphore, pImportSemaphoreFdInfo->handleType,
3981 pImportSemaphoreFdInfo->flags);
3982}
3983
3984void ValidationStateTracker::RecordGetExternalSemaphoreState(VkSemaphore semaphore,
3985 VkExternalSemaphoreHandleTypeFlagBitsKHR handle_type) {
3986 SEMAPHORE_STATE *semaphore_state = GetSemaphoreState(semaphore);
3987 if (semaphore_state && handle_type != VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR) {
3988 // Cannot track semaphore state once it is exported, except for Sync FD handle types which have copy transference
3989 semaphore_state->scope = kSyncScopeExternalPermanent;
3990 }
3991}
3992
3993#ifdef VK_USE_PLATFORM_WIN32_KHR
3994void ValidationStateTracker::PostCallRecordImportSemaphoreWin32HandleKHR(
3995 VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR *pImportSemaphoreWin32HandleInfo, VkResult result) {
3996 if (VK_SUCCESS != result) return;
3997 RecordImportSemaphoreState(pImportSemaphoreWin32HandleInfo->semaphore, pImportSemaphoreWin32HandleInfo->handleType,
3998 pImportSemaphoreWin32HandleInfo->flags);
3999}
4000
4001void ValidationStateTracker::PostCallRecordGetSemaphoreWin32HandleKHR(VkDevice device,
4002 const VkSemaphoreGetWin32HandleInfoKHR *pGetWin32HandleInfo,
4003 HANDLE *pHandle, VkResult result) {
4004 if (VK_SUCCESS != result) return;
4005 RecordGetExternalSemaphoreState(pGetWin32HandleInfo->semaphore, pGetWin32HandleInfo->handleType);
4006}
4007
4008void ValidationStateTracker::PostCallRecordImportFenceWin32HandleKHR(
4009 VkDevice device, const VkImportFenceWin32HandleInfoKHR *pImportFenceWin32HandleInfo, VkResult result) {
4010 if (VK_SUCCESS != result) return;
4011 RecordImportFenceState(pImportFenceWin32HandleInfo->fence, pImportFenceWin32HandleInfo->handleType,
4012 pImportFenceWin32HandleInfo->flags);
4013}
4014
4015void ValidationStateTracker::PostCallRecordGetFenceWin32HandleKHR(VkDevice device,
4016 const VkFenceGetWin32HandleInfoKHR *pGetWin32HandleInfo,
4017 HANDLE *pHandle, VkResult result) {
4018 if (VK_SUCCESS != result) return;
4019 RecordGetExternalFenceState(pGetWin32HandleInfo->fence, pGetWin32HandleInfo->handleType);
4020}
4021#endif
4022
4023void ValidationStateTracker::PostCallRecordGetSemaphoreFdKHR(VkDevice device, const VkSemaphoreGetFdInfoKHR *pGetFdInfo, int *pFd,
4024 VkResult result) {
4025 if (VK_SUCCESS != result) return;
4026 RecordGetExternalSemaphoreState(pGetFdInfo->semaphore, pGetFdInfo->handleType);
4027}
4028
4029void ValidationStateTracker::RecordImportFenceState(VkFence fence, VkExternalFenceHandleTypeFlagBitsKHR handle_type,
4030 VkFenceImportFlagsKHR flags) {
4031 FENCE_STATE *fence_node = GetFenceState(fence);
4032 if (fence_node && fence_node->scope != kSyncScopeExternalPermanent) {
4033 if ((handle_type == VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR || flags & VK_FENCE_IMPORT_TEMPORARY_BIT_KHR) &&
4034 fence_node->scope == kSyncScopeInternal) {
4035 fence_node->scope = kSyncScopeExternalTemporary;
4036 } else {
4037 fence_node->scope = kSyncScopeExternalPermanent;
4038 }
4039 }
4040}
4041
4042void ValidationStateTracker::PostCallRecordImportFenceFdKHR(VkDevice device, const VkImportFenceFdInfoKHR *pImportFenceFdInfo,
4043 VkResult result) {
4044 if (VK_SUCCESS != result) return;
4045 RecordImportFenceState(pImportFenceFdInfo->fence, pImportFenceFdInfo->handleType, pImportFenceFdInfo->flags);
4046}
4047
4048void ValidationStateTracker::RecordGetExternalFenceState(VkFence fence, VkExternalFenceHandleTypeFlagBitsKHR handle_type) {
4049 FENCE_STATE *fence_state = GetFenceState(fence);
4050 if (fence_state) {
4051 if (handle_type != VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR) {
4052 // Export with reference transference becomes external
4053 fence_state->scope = kSyncScopeExternalPermanent;
4054 } else if (fence_state->scope == kSyncScopeInternal) {
4055 // Export with copy transference has a side effect of resetting the fence
4056 fence_state->state = FENCE_UNSIGNALED;
4057 }
4058 }
4059}
4060
4061void ValidationStateTracker::PostCallRecordGetFenceFdKHR(VkDevice device, const VkFenceGetFdInfoKHR *pGetFdInfo, int *pFd,
4062 VkResult result) {
4063 if (VK_SUCCESS != result) return;
4064 RecordGetExternalFenceState(pGetFdInfo->fence, pGetFdInfo->handleType);
4065}
4066
4067void ValidationStateTracker::PostCallRecordCreateEvent(VkDevice device, const VkEventCreateInfo *pCreateInfo,
4068 const VkAllocationCallbacks *pAllocator, VkEvent *pEvent, VkResult result) {
4069 if (VK_SUCCESS != result) return;
4070 eventMap[*pEvent].write_in_use = 0;
4071 eventMap[*pEvent].stageMask = VkPipelineStageFlags(0);
4072}
4073
4074void ValidationStateTracker::RecordCreateSwapchainState(VkResult result, const VkSwapchainCreateInfoKHR *pCreateInfo,
4075 VkSwapchainKHR *pSwapchain, SURFACE_STATE *surface_state,
4076 SWAPCHAIN_NODE *old_swapchain_state) {
4077 if (VK_SUCCESS == result) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004078 auto swapchain_state = std::make_shared<SWAPCHAIN_NODE>(pCreateInfo, *pSwapchain);
locke-lunargd556cc32019-09-17 01:21:23 -06004079 if (VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR == pCreateInfo->presentMode ||
4080 VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR == pCreateInfo->presentMode) {
4081 swapchain_state->shared_presentable = true;
4082 }
4083 surface_state->swapchain = swapchain_state.get();
4084 swapchainMap[*pSwapchain] = std::move(swapchain_state);
4085 } else {
4086 surface_state->swapchain = nullptr;
4087 }
4088 // Spec requires that even if CreateSwapchainKHR fails, oldSwapchain is retired
4089 if (old_swapchain_state) {
4090 old_swapchain_state->retired = true;
4091 }
4092 return;
4093}
4094
4095void ValidationStateTracker::PostCallRecordCreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR *pCreateInfo,
4096 const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchain,
4097 VkResult result) {
4098 auto surface_state = GetSurfaceState(pCreateInfo->surface);
4099 auto old_swapchain_state = GetSwapchainState(pCreateInfo->oldSwapchain);
4100 RecordCreateSwapchainState(result, pCreateInfo, pSwapchain, surface_state, old_swapchain_state);
4101}
4102
4103void ValidationStateTracker::PreCallRecordDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain,
4104 const VkAllocationCallbacks *pAllocator) {
4105 if (!swapchain) return;
4106 auto swapchain_data = GetSwapchainState(swapchain);
4107 if (swapchain_data) {
4108 for (const auto &swapchain_image : swapchain_data->images) {
locke-lunargb3584732019-10-28 20:18:36 -06004109 ClearMemoryObjectBindings(VulkanTypedHandle(swapchain_image.image, kVulkanObjectTypeImage));
4110 imageMap.erase(swapchain_image.image);
4111 RemoveAliasingImages(swapchain_image.bound_images);
locke-lunargd556cc32019-09-17 01:21:23 -06004112 }
4113
4114 auto surface_state = GetSurfaceState(swapchain_data->createInfo.surface);
4115 if (surface_state) {
4116 if (surface_state->swapchain == swapchain_data) surface_state->swapchain = nullptr;
4117 }
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004118 swapchain_data->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004119 swapchainMap.erase(swapchain);
4120 }
4121}
4122
4123void ValidationStateTracker::PostCallRecordQueuePresentKHR(VkQueue queue, const VkPresentInfoKHR *pPresentInfo, VkResult result) {
4124 // Semaphore waits occur before error generation, if the call reached the ICD. (Confirm?)
4125 for (uint32_t i = 0; i < pPresentInfo->waitSemaphoreCount; ++i) {
4126 auto pSemaphore = GetSemaphoreState(pPresentInfo->pWaitSemaphores[i]);
4127 if (pSemaphore) {
4128 pSemaphore->signaler.first = VK_NULL_HANDLE;
4129 pSemaphore->signaled = false;
4130 }
4131 }
4132
4133 for (uint32_t i = 0; i < pPresentInfo->swapchainCount; ++i) {
4134 // Note: this is imperfect, in that we can get confused about what did or didn't succeed-- but if the app does that, it's
4135 // confused itself just as much.
4136 auto local_result = pPresentInfo->pResults ? pPresentInfo->pResults[i] : result;
4137 if (local_result != VK_SUCCESS && local_result != VK_SUBOPTIMAL_KHR) continue; // this present didn't actually happen.
4138 // Mark the image as having been released to the WSI
4139 auto swapchain_data = GetSwapchainState(pPresentInfo->pSwapchains[i]);
4140 if (swapchain_data && (swapchain_data->images.size() > pPresentInfo->pImageIndices[i])) {
locke-lunargb3584732019-10-28 20:18:36 -06004141 auto image = swapchain_data->images[pPresentInfo->pImageIndices[i]].image;
locke-lunargd556cc32019-09-17 01:21:23 -06004142 auto image_state = GetImageState(image);
4143 if (image_state) {
4144 image_state->acquired = false;
Jeff Bolz46c0ea02019-10-09 13:06:29 -05004145 if (image_state->shared_presentable) {
4146 image_state->layout_locked = true;
4147 }
locke-lunargd556cc32019-09-17 01:21:23 -06004148 }
4149 }
4150 }
4151 // Note: even though presentation is directed to a queue, there is no direct ordering between QP and subsequent work, so QP (and
4152 // its semaphore waits) /never/ participate in any completion proof.
4153}
4154
4155void ValidationStateTracker::PostCallRecordCreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount,
4156 const VkSwapchainCreateInfoKHR *pCreateInfos,
4157 const VkAllocationCallbacks *pAllocator,
4158 VkSwapchainKHR *pSwapchains, VkResult result) {
4159 if (pCreateInfos) {
4160 for (uint32_t i = 0; i < swapchainCount; i++) {
4161 auto surface_state = GetSurfaceState(pCreateInfos[i].surface);
4162 auto old_swapchain_state = GetSwapchainState(pCreateInfos[i].oldSwapchain);
4163 RecordCreateSwapchainState(result, &pCreateInfos[i], &pSwapchains[i], surface_state, old_swapchain_state);
4164 }
4165 }
4166}
4167
4168void ValidationStateTracker::RecordAcquireNextImageState(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
4169 VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex) {
4170 auto pFence = GetFenceState(fence);
4171 if (pFence && pFence->scope == kSyncScopeInternal) {
4172 // Treat as inflight since it is valid to wait on this fence, even in cases where it is technically a temporary
4173 // import
4174 pFence->state = FENCE_INFLIGHT;
4175 pFence->signaler.first = VK_NULL_HANDLE; // ANI isn't on a queue, so this can't participate in a completion proof.
4176 }
4177
4178 auto pSemaphore = GetSemaphoreState(semaphore);
4179 if (pSemaphore && pSemaphore->scope == kSyncScopeInternal) {
4180 // Treat as signaled since it is valid to wait on this semaphore, even in cases where it is technically a
4181 // temporary import
4182 pSemaphore->signaled = true;
4183 pSemaphore->signaler.first = VK_NULL_HANDLE;
4184 }
4185
4186 // Mark the image as acquired.
4187 auto swapchain_data = GetSwapchainState(swapchain);
4188 if (swapchain_data && (swapchain_data->images.size() > *pImageIndex)) {
locke-lunargb3584732019-10-28 20:18:36 -06004189 auto image = swapchain_data->images[*pImageIndex].image;
locke-lunargd556cc32019-09-17 01:21:23 -06004190 auto image_state = GetImageState(image);
4191 if (image_state) {
4192 image_state->acquired = true;
4193 image_state->shared_presentable = swapchain_data->shared_presentable;
4194 }
4195 }
4196}
4197
4198void ValidationStateTracker::PostCallRecordAcquireNextImageKHR(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
4199 VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex,
4200 VkResult result) {
4201 if ((VK_SUCCESS != result) && (VK_SUBOPTIMAL_KHR != result)) return;
4202 RecordAcquireNextImageState(device, swapchain, timeout, semaphore, fence, pImageIndex);
4203}
4204
4205void ValidationStateTracker::PostCallRecordAcquireNextImage2KHR(VkDevice device, const VkAcquireNextImageInfoKHR *pAcquireInfo,
4206 uint32_t *pImageIndex, VkResult result) {
4207 if ((VK_SUCCESS != result) && (VK_SUBOPTIMAL_KHR != result)) return;
4208 RecordAcquireNextImageState(device, pAcquireInfo->swapchain, pAcquireInfo->timeout, pAcquireInfo->semaphore,
4209 pAcquireInfo->fence, pImageIndex);
4210}
4211
4212void ValidationStateTracker::PostCallRecordEnumeratePhysicalDevices(VkInstance instance, uint32_t *pPhysicalDeviceCount,
4213 VkPhysicalDevice *pPhysicalDevices, VkResult result) {
4214 if ((NULL != pPhysicalDevices) && ((result == VK_SUCCESS || result == VK_INCOMPLETE))) {
4215 for (uint32_t i = 0; i < *pPhysicalDeviceCount; i++) {
4216 auto &phys_device_state = physical_device_map[pPhysicalDevices[i]];
4217 phys_device_state.phys_device = pPhysicalDevices[i];
4218 // Init actual features for each physical device
4219 DispatchGetPhysicalDeviceFeatures(pPhysicalDevices[i], &phys_device_state.features2.features);
4220 }
4221 }
4222}
4223
4224// Common function to update state for GetPhysicalDeviceQueueFamilyProperties & 2KHR version
4225static void StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(PHYSICAL_DEVICE_STATE *pd_state, uint32_t count,
4226 VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
4227 pd_state->queue_family_known_count = std::max(pd_state->queue_family_known_count, count);
4228
4229 if (!pQueueFamilyProperties) {
4230 if (UNCALLED == pd_state->vkGetPhysicalDeviceQueueFamilyPropertiesState)
4231 pd_state->vkGetPhysicalDeviceQueueFamilyPropertiesState = QUERY_COUNT;
4232 } else { // Save queue family properties
4233 pd_state->vkGetPhysicalDeviceQueueFamilyPropertiesState = QUERY_DETAILS;
4234
4235 pd_state->queue_family_properties.resize(std::max(static_cast<uint32_t>(pd_state->queue_family_properties.size()), count));
4236 for (uint32_t i = 0; i < count; ++i) {
4237 pd_state->queue_family_properties[i] = pQueueFamilyProperties[i].queueFamilyProperties;
4238 }
4239 }
4240}
4241
4242void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice,
4243 uint32_t *pQueueFamilyPropertyCount,
4244 VkQueueFamilyProperties *pQueueFamilyProperties) {
4245 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4246 assert(physical_device_state);
4247 VkQueueFamilyProperties2KHR *pqfp = nullptr;
4248 std::vector<VkQueueFamilyProperties2KHR> qfp;
4249 qfp.resize(*pQueueFamilyPropertyCount);
4250 if (pQueueFamilyProperties) {
4251 for (uint32_t i = 0; i < *pQueueFamilyPropertyCount; ++i) {
4252 qfp[i].sType = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2_KHR;
4253 qfp[i].pNext = nullptr;
4254 qfp[i].queueFamilyProperties = pQueueFamilyProperties[i];
4255 }
4256 pqfp = qfp.data();
4257 }
4258 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount, pqfp);
4259}
4260
4261void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties2(
4262 VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
4263 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4264 assert(physical_device_state);
4265 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount,
4266 pQueueFamilyProperties);
4267}
4268
4269void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties2KHR(
4270 VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
4271 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4272 assert(physical_device_state);
4273 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount,
4274 pQueueFamilyProperties);
4275}
4276void ValidationStateTracker::PreCallRecordDestroySurfaceKHR(VkInstance instance, VkSurfaceKHR surface,
4277 const VkAllocationCallbacks *pAllocator) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004278 if (!surface) return;
4279 auto surface_state = GetSurfaceState(surface);
4280 surface_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004281 surface_map.erase(surface);
4282}
4283
4284void ValidationStateTracker::RecordVulkanSurface(VkSurfaceKHR *pSurface) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004285 surface_map[*pSurface] = std::make_shared<SURFACE_STATE>(*pSurface);
locke-lunargd556cc32019-09-17 01:21:23 -06004286}
4287
4288void ValidationStateTracker::PostCallRecordCreateDisplayPlaneSurfaceKHR(VkInstance instance,
4289 const VkDisplaySurfaceCreateInfoKHR *pCreateInfo,
4290 const VkAllocationCallbacks *pAllocator,
4291 VkSurfaceKHR *pSurface, VkResult result) {
4292 if (VK_SUCCESS != result) return;
4293 RecordVulkanSurface(pSurface);
4294}
4295
4296#ifdef VK_USE_PLATFORM_ANDROID_KHR
4297void ValidationStateTracker::PostCallRecordCreateAndroidSurfaceKHR(VkInstance instance,
4298 const VkAndroidSurfaceCreateInfoKHR *pCreateInfo,
4299 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4300 VkResult result) {
4301 if (VK_SUCCESS != result) return;
4302 RecordVulkanSurface(pSurface);
4303}
4304#endif // VK_USE_PLATFORM_ANDROID_KHR
4305
4306#ifdef VK_USE_PLATFORM_IOS_MVK
4307void ValidationStateTracker::PostCallRecordCreateIOSSurfaceMVK(VkInstance instance, const VkIOSSurfaceCreateInfoMVK *pCreateInfo,
4308 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4309 VkResult result) {
4310 if (VK_SUCCESS != result) return;
4311 RecordVulkanSurface(pSurface);
4312}
4313#endif // VK_USE_PLATFORM_IOS_MVK
4314
4315#ifdef VK_USE_PLATFORM_MACOS_MVK
4316void ValidationStateTracker::PostCallRecordCreateMacOSSurfaceMVK(VkInstance instance,
4317 const VkMacOSSurfaceCreateInfoMVK *pCreateInfo,
4318 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4319 VkResult result) {
4320 if (VK_SUCCESS != result) return;
4321 RecordVulkanSurface(pSurface);
4322}
4323#endif // VK_USE_PLATFORM_MACOS_MVK
4324
Jeremy Kniagerf33a67c2019-12-09 09:44:39 -07004325#ifdef VK_USE_PLATFORM_METAL_EXT
4326void ValidationStateTracker::PostCallRecordCreateMetalSurfaceEXT(VkInstance instance,
4327 const VkMetalSurfaceCreateInfoEXT *pCreateInfo,
4328 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4329 VkResult result) {
4330 if (VK_SUCCESS != result) return;
4331 RecordVulkanSurface(pSurface);
4332}
4333#endif // VK_USE_PLATFORM_METAL_EXT
4334
locke-lunargd556cc32019-09-17 01:21:23 -06004335#ifdef VK_USE_PLATFORM_WAYLAND_KHR
4336void ValidationStateTracker::PostCallRecordCreateWaylandSurfaceKHR(VkInstance instance,
4337 const VkWaylandSurfaceCreateInfoKHR *pCreateInfo,
4338 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4339 VkResult result) {
4340 if (VK_SUCCESS != result) return;
4341 RecordVulkanSurface(pSurface);
4342}
4343#endif // VK_USE_PLATFORM_WAYLAND_KHR
4344
4345#ifdef VK_USE_PLATFORM_WIN32_KHR
4346void ValidationStateTracker::PostCallRecordCreateWin32SurfaceKHR(VkInstance instance,
4347 const VkWin32SurfaceCreateInfoKHR *pCreateInfo,
4348 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4349 VkResult result) {
4350 if (VK_SUCCESS != result) return;
4351 RecordVulkanSurface(pSurface);
4352}
4353#endif // VK_USE_PLATFORM_WIN32_KHR
4354
4355#ifdef VK_USE_PLATFORM_XCB_KHR
4356void ValidationStateTracker::PostCallRecordCreateXcbSurfaceKHR(VkInstance instance, const VkXcbSurfaceCreateInfoKHR *pCreateInfo,
4357 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4358 VkResult result) {
4359 if (VK_SUCCESS != result) return;
4360 RecordVulkanSurface(pSurface);
4361}
4362#endif // VK_USE_PLATFORM_XCB_KHR
4363
4364#ifdef VK_USE_PLATFORM_XLIB_KHR
4365void ValidationStateTracker::PostCallRecordCreateXlibSurfaceKHR(VkInstance instance, const VkXlibSurfaceCreateInfoKHR *pCreateInfo,
4366 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
4367 VkResult result) {
4368 if (VK_SUCCESS != result) return;
4369 RecordVulkanSurface(pSurface);
4370}
4371#endif // VK_USE_PLATFORM_XLIB_KHR
4372
Cort23cf2282019-09-20 18:58:18 +02004373void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02004374 VkPhysicalDeviceFeatures *pFeatures) {
4375 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4376 physical_device_state->vkGetPhysicalDeviceFeaturesState = QUERY_DETAILS;
4377 physical_device_state->features2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
4378 physical_device_state->features2.pNext = nullptr;
4379 physical_device_state->features2.features = *pFeatures;
Cort23cf2282019-09-20 18:58:18 +02004380}
4381
4382void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures2(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02004383 VkPhysicalDeviceFeatures2 *pFeatures) {
4384 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4385 physical_device_state->vkGetPhysicalDeviceFeaturesState = QUERY_DETAILS;
4386 physical_device_state->features2.initialize(pFeatures);
Cort23cf2282019-09-20 18:58:18 +02004387}
4388
4389void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures2KHR(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02004390 VkPhysicalDeviceFeatures2 *pFeatures) {
4391 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4392 physical_device_state->vkGetPhysicalDeviceFeaturesState = QUERY_DETAILS;
4393 physical_device_state->features2.initialize(pFeatures);
Cort23cf2282019-09-20 18:58:18 +02004394}
4395
locke-lunargd556cc32019-09-17 01:21:23 -06004396void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice physicalDevice,
4397 VkSurfaceKHR surface,
4398 VkSurfaceCapabilitiesKHR *pSurfaceCapabilities,
4399 VkResult result) {
4400 if (VK_SUCCESS != result) return;
4401 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4402 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHRState = QUERY_DETAILS;
Camden Stocker61050592019-11-27 12:03:09 -08004403 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004404 physical_device_state->surfaceCapabilities = *pSurfaceCapabilities;
4405}
4406
4407void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilities2KHR(
4408 VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
4409 VkSurfaceCapabilities2KHR *pSurfaceCapabilities, VkResult result) {
4410 if (VK_SUCCESS != result) return;
4411 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4412 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHRState = QUERY_DETAILS;
Camden Stocker61050592019-11-27 12:03:09 -08004413 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004414 physical_device_state->surfaceCapabilities = pSurfaceCapabilities->surfaceCapabilities;
4415}
4416
4417void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilities2EXT(VkPhysicalDevice physicalDevice,
4418 VkSurfaceKHR surface,
4419 VkSurfaceCapabilities2EXT *pSurfaceCapabilities,
4420 VkResult result) {
4421 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4422 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHRState = QUERY_DETAILS;
Camden Stocker61050592019-11-27 12:03:09 -08004423 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004424 physical_device_state->surfaceCapabilities.minImageCount = pSurfaceCapabilities->minImageCount;
4425 physical_device_state->surfaceCapabilities.maxImageCount = pSurfaceCapabilities->maxImageCount;
4426 physical_device_state->surfaceCapabilities.currentExtent = pSurfaceCapabilities->currentExtent;
4427 physical_device_state->surfaceCapabilities.minImageExtent = pSurfaceCapabilities->minImageExtent;
4428 physical_device_state->surfaceCapabilities.maxImageExtent = pSurfaceCapabilities->maxImageExtent;
4429 physical_device_state->surfaceCapabilities.maxImageArrayLayers = pSurfaceCapabilities->maxImageArrayLayers;
4430 physical_device_state->surfaceCapabilities.supportedTransforms = pSurfaceCapabilities->supportedTransforms;
4431 physical_device_state->surfaceCapabilities.currentTransform = pSurfaceCapabilities->currentTransform;
4432 physical_device_state->surfaceCapabilities.supportedCompositeAlpha = pSurfaceCapabilities->supportedCompositeAlpha;
4433 physical_device_state->surfaceCapabilities.supportedUsageFlags = pSurfaceCapabilities->supportedUsageFlags;
4434}
4435
4436void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice,
4437 uint32_t queueFamilyIndex, VkSurfaceKHR surface,
4438 VkBool32 *pSupported, VkResult result) {
4439 if (VK_SUCCESS != result) return;
4440 auto surface_state = GetSurfaceState(surface);
4441 surface_state->gpu_queue_support[{physicalDevice, queueFamilyIndex}] = (*pSupported == VK_TRUE);
4442}
4443
4444void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice physicalDevice,
4445 VkSurfaceKHR surface,
4446 uint32_t *pPresentModeCount,
4447 VkPresentModeKHR *pPresentModes,
4448 VkResult result) {
4449 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4450
4451 // TODO: This isn't quite right -- available modes may differ by surface AND physical device.
4452 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4453 auto &call_state = physical_device_state->vkGetPhysicalDeviceSurfacePresentModesKHRState;
4454
4455 if (*pPresentModeCount) {
4456 if (call_state < QUERY_COUNT) call_state = QUERY_COUNT;
4457 if (*pPresentModeCount > physical_device_state->present_modes.size())
4458 physical_device_state->present_modes.resize(*pPresentModeCount);
4459 }
4460 if (pPresentModes) {
4461 if (call_state < QUERY_DETAILS) call_state = QUERY_DETAILS;
4462 for (uint32_t i = 0; i < *pPresentModeCount; i++) {
4463 physical_device_state->present_modes[i] = pPresentModes[i];
4464 }
4465 }
4466}
4467
4468void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
4469 uint32_t *pSurfaceFormatCount,
4470 VkSurfaceFormatKHR *pSurfaceFormats,
4471 VkResult result) {
4472 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4473
4474 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4475 auto &call_state = physical_device_state->vkGetPhysicalDeviceSurfaceFormatsKHRState;
4476
4477 if (*pSurfaceFormatCount) {
4478 if (call_state < QUERY_COUNT) call_state = QUERY_COUNT;
4479 if (*pSurfaceFormatCount > physical_device_state->surface_formats.size())
4480 physical_device_state->surface_formats.resize(*pSurfaceFormatCount);
4481 }
4482 if (pSurfaceFormats) {
4483 if (call_state < QUERY_DETAILS) call_state = QUERY_DETAILS;
4484 for (uint32_t i = 0; i < *pSurfaceFormatCount; i++) {
4485 physical_device_state->surface_formats[i] = pSurfaceFormats[i];
4486 }
4487 }
4488}
4489
4490void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceFormats2KHR(VkPhysicalDevice physicalDevice,
4491 const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
4492 uint32_t *pSurfaceFormatCount,
4493 VkSurfaceFormat2KHR *pSurfaceFormats,
4494 VkResult result) {
4495 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4496
4497 auto physicalDeviceState = GetPhysicalDeviceState(physicalDevice);
4498 if (*pSurfaceFormatCount) {
4499 if (physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState < QUERY_COUNT) {
4500 physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState = QUERY_COUNT;
4501 }
4502 if (*pSurfaceFormatCount > physicalDeviceState->surface_formats.size())
4503 physicalDeviceState->surface_formats.resize(*pSurfaceFormatCount);
4504 }
4505 if (pSurfaceFormats) {
4506 if (physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState < QUERY_DETAILS) {
4507 physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState = QUERY_DETAILS;
4508 }
4509 for (uint32_t i = 0; i < *pSurfaceFormatCount; i++) {
4510 physicalDeviceState->surface_formats[i] = pSurfaceFormats[i].surfaceFormat;
4511 }
4512 }
4513}
4514
4515void ValidationStateTracker::PreCallRecordCmdBeginDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,
4516 const VkDebugUtilsLabelEXT *pLabelInfo) {
4517 BeginCmdDebugUtilsLabel(report_data, commandBuffer, pLabelInfo);
4518}
4519
4520void ValidationStateTracker::PostCallRecordCmdEndDebugUtilsLabelEXT(VkCommandBuffer commandBuffer) {
4521 EndCmdDebugUtilsLabel(report_data, commandBuffer);
4522}
4523
4524void ValidationStateTracker::PreCallRecordCmdInsertDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,
4525 const VkDebugUtilsLabelEXT *pLabelInfo) {
4526 InsertCmdDebugUtilsLabel(report_data, commandBuffer, pLabelInfo);
4527
4528 // Squirrel away an easily accessible copy.
4529 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4530 cb_state->debug_label = LoggingLabel(pLabelInfo);
4531}
4532
4533void ValidationStateTracker::RecordEnumeratePhysicalDeviceGroupsState(
4534 uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHR *pPhysicalDeviceGroupProperties) {
4535 if (NULL != pPhysicalDeviceGroupProperties) {
4536 for (uint32_t i = 0; i < *pPhysicalDeviceGroupCount; i++) {
4537 for (uint32_t j = 0; j < pPhysicalDeviceGroupProperties[i].physicalDeviceCount; j++) {
4538 VkPhysicalDevice cur_phys_dev = pPhysicalDeviceGroupProperties[i].physicalDevices[j];
4539 auto &phys_device_state = physical_device_map[cur_phys_dev];
4540 phys_device_state.phys_device = cur_phys_dev;
4541 // Init actual features for each physical device
4542 DispatchGetPhysicalDeviceFeatures(cur_phys_dev, &phys_device_state.features2.features);
4543 }
4544 }
4545 }
4546}
4547
4548void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceGroups(
4549 VkInstance instance, uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHR *pPhysicalDeviceGroupProperties,
4550 VkResult result) {
4551 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4552 RecordEnumeratePhysicalDeviceGroupsState(pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
4553}
4554
4555void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceGroupsKHR(
4556 VkInstance instance, uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHR *pPhysicalDeviceGroupProperties,
4557 VkResult result) {
4558 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4559 RecordEnumeratePhysicalDeviceGroupsState(pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
4560}
4561
Lionel Landwerlinc7420912019-05-23 00:33:42 +01004562void ValidationStateTracker::RecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCounters(VkPhysicalDevice physicalDevice,
4563 uint32_t queueFamilyIndex,
4564 uint32_t *pCounterCount,
4565 VkPerformanceCounterKHR *pCounters) {
4566 if (NULL == pCounters) return;
4567
4568 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4569 assert(physical_device_state);
4570
4571 std::unique_ptr<QUEUE_FAMILY_PERF_COUNTERS> queueFamilyCounters(new QUEUE_FAMILY_PERF_COUNTERS());
4572 queueFamilyCounters->counters.resize(*pCounterCount);
4573 for (uint32_t i = 0; i < *pCounterCount; i++) queueFamilyCounters->counters[i] = pCounters[i];
4574
4575 physical_device_state->perf_counters[queueFamilyIndex] = std::move(queueFamilyCounters);
4576}
4577
4578void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
4579 VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, uint32_t *pCounterCount, VkPerformanceCounterKHR *pCounters,
4580 VkPerformanceCounterDescriptionKHR *pCounterDescriptions, VkResult result) {
4581 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4582 RecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCounters(physicalDevice, queueFamilyIndex, pCounterCount, pCounters);
4583}
4584
4585void ValidationStateTracker::PostCallRecordAcquireProfilingLockKHR(VkDevice device, const VkAcquireProfilingLockInfoKHR *pInfo,
4586 VkResult result) {
4587 if (result == VK_SUCCESS) performance_lock_acquired = true;
4588}
4589
Lionel Landwerlinc7420912019-05-23 00:33:42 +01004590void ValidationStateTracker::PostCallRecordReleaseProfilingLockKHR(VkDevice device) {
4591 performance_lock_acquired = false;
4592 for (auto &cmd_buffer : commandBufferMap) {
4593 cmd_buffer.second->performance_lock_released = true;
4594 }
4595}
4596
locke-lunargd556cc32019-09-17 01:21:23 -06004597void ValidationStateTracker::PreCallRecordDestroyDescriptorUpdateTemplate(VkDevice device,
4598 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
4599 const VkAllocationCallbacks *pAllocator) {
4600 if (!descriptorUpdateTemplate) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004601 auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
4602 template_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004603 desc_template_map.erase(descriptorUpdateTemplate);
4604}
4605
4606void ValidationStateTracker::PreCallRecordDestroyDescriptorUpdateTemplateKHR(VkDevice device,
4607 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
4608 const VkAllocationCallbacks *pAllocator) {
4609 if (!descriptorUpdateTemplate) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004610 auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
4611 template_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004612 desc_template_map.erase(descriptorUpdateTemplate);
4613}
4614
4615void ValidationStateTracker::RecordCreateDescriptorUpdateTemplateState(const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo,
4616 VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate) {
4617 safe_VkDescriptorUpdateTemplateCreateInfo local_create_info(pCreateInfo);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004618 auto template_state = std::make_shared<TEMPLATE_STATE>(*pDescriptorUpdateTemplate, &local_create_info);
locke-lunargd556cc32019-09-17 01:21:23 -06004619 desc_template_map[*pDescriptorUpdateTemplate] = std::move(template_state);
4620}
4621
4622void ValidationStateTracker::PostCallRecordCreateDescriptorUpdateTemplate(
4623 VkDevice device, const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator,
4624 VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate, VkResult result) {
4625 if (VK_SUCCESS != result) return;
4626 RecordCreateDescriptorUpdateTemplateState(pCreateInfo, pDescriptorUpdateTemplate);
4627}
4628
4629void ValidationStateTracker::PostCallRecordCreateDescriptorUpdateTemplateKHR(
4630 VkDevice device, const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator,
4631 VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate, VkResult result) {
4632 if (VK_SUCCESS != result) return;
4633 RecordCreateDescriptorUpdateTemplateState(pCreateInfo, pDescriptorUpdateTemplate);
4634}
4635
4636void ValidationStateTracker::RecordUpdateDescriptorSetWithTemplateState(VkDescriptorSet descriptorSet,
4637 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
4638 const void *pData) {
4639 auto const template_map_entry = desc_template_map.find(descriptorUpdateTemplate);
4640 if ((template_map_entry == desc_template_map.end()) || (template_map_entry->second.get() == nullptr)) {
4641 assert(0);
4642 } else {
4643 const TEMPLATE_STATE *template_state = template_map_entry->second.get();
4644 // TODO: Record template push descriptor updates
4645 if (template_state->create_info.templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET) {
4646 PerformUpdateDescriptorSetsWithTemplateKHR(descriptorSet, template_state, pData);
4647 }
4648 }
4649}
4650
4651void ValidationStateTracker::PreCallRecordUpdateDescriptorSetWithTemplate(VkDevice device, VkDescriptorSet descriptorSet,
4652 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
4653 const void *pData) {
4654 RecordUpdateDescriptorSetWithTemplateState(descriptorSet, descriptorUpdateTemplate, pData);
4655}
4656
4657void ValidationStateTracker::PreCallRecordUpdateDescriptorSetWithTemplateKHR(VkDevice device, VkDescriptorSet descriptorSet,
4658 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
4659 const void *pData) {
4660 RecordUpdateDescriptorSetWithTemplateState(descriptorSet, descriptorUpdateTemplate, pData);
4661}
4662
4663void ValidationStateTracker::PreCallRecordCmdPushDescriptorSetWithTemplateKHR(
4664 VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set,
4665 const void *pData) {
4666 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4667
4668 const auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
4669 if (template_state) {
4670 auto layout_data = GetPipelineLayout(layout);
4671 auto dsl = GetDslFromPipelineLayout(layout_data, set);
4672 const auto &template_ci = template_state->create_info;
Jeff Bolz6ae39612019-10-11 20:57:36 -05004673 if (dsl && !dsl->destroyed) {
locke-lunargd556cc32019-09-17 01:21:23 -06004674 // Decode the template into a set of write updates
4675 cvdescriptorset::DecodedTemplateUpdate decoded_template(this, VK_NULL_HANDLE, template_state, pData,
4676 dsl->GetDescriptorSetLayout());
4677 RecordCmdPushDescriptorSetState(cb_state, template_ci.pipelineBindPoint, layout, set,
4678 static_cast<uint32_t>(decoded_template.desc_writes.size()),
4679 decoded_template.desc_writes.data());
4680 }
4681 }
4682}
4683
4684void ValidationStateTracker::RecordGetPhysicalDeviceDisplayPlanePropertiesState(VkPhysicalDevice physicalDevice,
4685 uint32_t *pPropertyCount, void *pProperties) {
4686 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
4687 if (*pPropertyCount) {
4688 if (physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState < QUERY_COUNT) {
4689 physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState = QUERY_COUNT;
Camden Stocker61050592019-11-27 12:03:09 -08004690 physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004691 }
4692 physical_device_state->display_plane_property_count = *pPropertyCount;
4693 }
4694 if (pProperties) {
4695 if (physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState < QUERY_DETAILS) {
4696 physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState = QUERY_DETAILS;
Camden Stocker61050592019-11-27 12:03:09 -08004697 physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004698 }
4699 }
4700}
4701
4702void ValidationStateTracker::PostCallRecordGetPhysicalDeviceDisplayPlanePropertiesKHR(VkPhysicalDevice physicalDevice,
4703 uint32_t *pPropertyCount,
4704 VkDisplayPlanePropertiesKHR *pProperties,
4705 VkResult result) {
4706 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4707 RecordGetPhysicalDeviceDisplayPlanePropertiesState(physicalDevice, pPropertyCount, pProperties);
4708}
4709
4710void ValidationStateTracker::PostCallRecordGetPhysicalDeviceDisplayPlaneProperties2KHR(VkPhysicalDevice physicalDevice,
4711 uint32_t *pPropertyCount,
4712 VkDisplayPlaneProperties2KHR *pProperties,
4713 VkResult result) {
4714 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
4715 RecordGetPhysicalDeviceDisplayPlanePropertiesState(physicalDevice, pPropertyCount, pProperties);
4716}
4717
4718void ValidationStateTracker::PostCallRecordCmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
4719 uint32_t query, VkQueryControlFlags flags, uint32_t index) {
4720 QueryObject query_obj = {queryPool, query, index};
4721 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4722 RecordCmdBeginQuery(cb_state, query_obj);
4723}
4724
4725void ValidationStateTracker::PostCallRecordCmdEndQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
4726 uint32_t query, uint32_t index) {
4727 QueryObject query_obj = {queryPool, query, index};
4728 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4729 RecordCmdEndQuery(cb_state, query_obj);
4730}
4731
4732void ValidationStateTracker::RecordCreateSamplerYcbcrConversionState(const VkSamplerYcbcrConversionCreateInfo *create_info,
4733 VkSamplerYcbcrConversion ycbcr_conversion) {
4734 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
4735 RecordCreateSamplerYcbcrConversionANDROID(create_info, ycbcr_conversion);
4736 }
4737}
4738
4739void ValidationStateTracker::PostCallRecordCreateSamplerYcbcrConversion(VkDevice device,
4740 const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
4741 const VkAllocationCallbacks *pAllocator,
4742 VkSamplerYcbcrConversion *pYcbcrConversion,
4743 VkResult result) {
4744 if (VK_SUCCESS != result) return;
4745 RecordCreateSamplerYcbcrConversionState(pCreateInfo, *pYcbcrConversion);
4746}
4747
4748void ValidationStateTracker::PostCallRecordCreateSamplerYcbcrConversionKHR(VkDevice device,
4749 const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
4750 const VkAllocationCallbacks *pAllocator,
4751 VkSamplerYcbcrConversion *pYcbcrConversion,
4752 VkResult result) {
4753 if (VK_SUCCESS != result) return;
4754 RecordCreateSamplerYcbcrConversionState(pCreateInfo, *pYcbcrConversion);
4755}
4756
4757void ValidationStateTracker::PostCallRecordDestroySamplerYcbcrConversion(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion,
4758 const VkAllocationCallbacks *pAllocator) {
4759 if (!ycbcrConversion) return;
4760 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
4761 RecordDestroySamplerYcbcrConversionANDROID(ycbcrConversion);
4762 }
4763}
4764
4765void ValidationStateTracker::PostCallRecordDestroySamplerYcbcrConversionKHR(VkDevice device,
4766 VkSamplerYcbcrConversion ycbcrConversion,
4767 const VkAllocationCallbacks *pAllocator) {
4768 if (!ycbcrConversion) return;
4769 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
4770 RecordDestroySamplerYcbcrConversionANDROID(ycbcrConversion);
4771 }
4772}
4773
Tony-LunarG977448c2019-12-02 14:52:02 -07004774void ValidationStateTracker::RecordResetQueryPool(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
4775 uint32_t queryCount) {
locke-lunargd556cc32019-09-17 01:21:23 -06004776 // Do nothing if the feature is not enabled.
Piers Daniell41b8c5d2020-01-10 15:42:00 -07004777 if (!enabled_features.core12.hostQueryReset) return;
locke-lunargd556cc32019-09-17 01:21:23 -06004778
4779 // Do nothing if the query pool has been destroyed.
4780 auto query_pool_state = GetQueryPoolState(queryPool);
4781 if (!query_pool_state) return;
4782
4783 // Reset the state of existing entries.
4784 QueryObject query_obj{queryPool, 0};
Lionel Landwerlinc7420912019-05-23 00:33:42 +01004785 QueryObjectPass query_pass_obj{query_obj, 0};
locke-lunargd556cc32019-09-17 01:21:23 -06004786 const uint32_t max_query_count = std::min(queryCount, query_pool_state->createInfo.queryCount - firstQuery);
4787 for (uint32_t i = 0; i < max_query_count; ++i) {
4788 query_obj.query = firstQuery + i;
4789 auto query_it = queryToStateMap.find(query_obj);
4790 if (query_it != queryToStateMap.end()) query_it->second = QUERYSTATE_RESET;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01004791 if (query_pool_state->createInfo.queryType == VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR) {
4792 for (uint32_t passIndex = 0; passIndex < query_pool_state->n_performance_passes; passIndex++) {
4793 query_pass_obj.perf_pass = passIndex;
4794 auto query_perf_it = queryPassToStateMap.find(query_pass_obj);
4795 if (query_perf_it != queryPassToStateMap.end()) query_perf_it->second = QUERYSTATE_RESET;
4796 }
4797 }
locke-lunargd556cc32019-09-17 01:21:23 -06004798 }
4799}
4800
Tony-LunarG977448c2019-12-02 14:52:02 -07004801void ValidationStateTracker::PostCallRecordResetQueryPoolEXT(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
4802 uint32_t queryCount) {
4803 RecordResetQueryPool(device, queryPool, firstQuery, queryCount);
4804}
4805
4806void ValidationStateTracker::PostCallRecordResetQueryPool(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
4807 uint32_t queryCount) {
4808 RecordResetQueryPool(device, queryPool, firstQuery, queryCount);
4809}
4810
locke-lunargd556cc32019-09-17 01:21:23 -06004811void ValidationStateTracker::PerformUpdateDescriptorSetsWithTemplateKHR(VkDescriptorSet descriptorSet,
4812 const TEMPLATE_STATE *template_state, const void *pData) {
4813 // Translate the templated update into a normal update for validation...
4814 cvdescriptorset::DecodedTemplateUpdate decoded_update(this, descriptorSet, template_state, pData);
4815 cvdescriptorset::PerformUpdateDescriptorSets(this, static_cast<uint32_t>(decoded_update.desc_writes.size()),
4816 decoded_update.desc_writes.data(), 0, NULL);
4817}
4818
4819// Update the common AllocateDescriptorSetsData
4820void ValidationStateTracker::UpdateAllocateDescriptorSetsData(const VkDescriptorSetAllocateInfo *p_alloc_info,
Jeff Bolz46c0ea02019-10-09 13:06:29 -05004821 cvdescriptorset::AllocateDescriptorSetsData *ds_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06004822 for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
Jeff Bolz6ae39612019-10-11 20:57:36 -05004823 auto layout = GetDescriptorSetLayoutShared(p_alloc_info->pSetLayouts[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06004824 if (layout) {
4825 ds_data->layout_nodes[i] = layout;
4826 // Count total descriptors required per type
4827 for (uint32_t j = 0; j < layout->GetBindingCount(); ++j) {
4828 const auto &binding_layout = layout->GetDescriptorSetLayoutBindingPtrFromIndex(j);
4829 uint32_t typeIndex = static_cast<uint32_t>(binding_layout->descriptorType);
4830 ds_data->required_descriptors_by_type[typeIndex] += binding_layout->descriptorCount;
4831 }
4832 }
4833 // Any unknown layouts will be flagged as errors during ValidateAllocateDescriptorSets() call
4834 }
4835}
4836
4837// Decrement allocated sets from the pool and insert new sets into set_map
4838void ValidationStateTracker::PerformAllocateDescriptorSets(const VkDescriptorSetAllocateInfo *p_alloc_info,
4839 const VkDescriptorSet *descriptor_sets,
4840 const cvdescriptorset::AllocateDescriptorSetsData *ds_data) {
4841 auto pool_state = descriptorPoolMap[p_alloc_info->descriptorPool].get();
4842 // Account for sets and individual descriptors allocated from pool
4843 pool_state->availableSets -= p_alloc_info->descriptorSetCount;
4844 for (auto it = ds_data->required_descriptors_by_type.begin(); it != ds_data->required_descriptors_by_type.end(); ++it) {
4845 pool_state->availableDescriptorTypeCount[it->first] -= ds_data->required_descriptors_by_type.at(it->first);
4846 }
4847
4848 const auto *variable_count_info = lvl_find_in_chain<VkDescriptorSetVariableDescriptorCountAllocateInfoEXT>(p_alloc_info->pNext);
4849 bool variable_count_valid = variable_count_info && variable_count_info->descriptorSetCount == p_alloc_info->descriptorSetCount;
4850
4851 // Create tracking object for each descriptor set; insert into global map and the pool's set.
4852 for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
4853 uint32_t variable_count = variable_count_valid ? variable_count_info->pDescriptorCounts[i] : 0;
4854
Jeff Bolz41a1ced2019-10-11 11:40:49 -05004855 auto new_ds = std::make_shared<cvdescriptorset::DescriptorSet>(descriptor_sets[i], pool_state, ds_data->layout_nodes[i],
John Zulaufd2c3dae2019-12-12 11:02:17 -07004856 variable_count, this);
locke-lunargd556cc32019-09-17 01:21:23 -06004857 pool_state->sets.insert(new_ds.get());
4858 new_ds->in_use.store(0);
4859 setMap[descriptor_sets[i]] = std::move(new_ds);
4860 }
4861}
4862
4863// Generic function to handle state update for all CmdDraw* and CmdDispatch* type functions
4864void ValidationStateTracker::UpdateStateCmdDrawDispatchType(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint bind_point) {
4865 UpdateDrawState(cb_state, bind_point);
4866 cb_state->hasDispatchCmd = true;
4867}
4868
locke-lunargd556cc32019-09-17 01:21:23 -06004869// Generic function to handle state update for all CmdDraw* type functions
4870void ValidationStateTracker::UpdateStateCmdDrawType(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint bind_point) {
4871 UpdateStateCmdDrawDispatchType(cb_state, bind_point);
locke-lunargd556cc32019-09-17 01:21:23 -06004872 cb_state->hasDrawCmd = true;
4873}
4874
4875void ValidationStateTracker::PostCallRecordCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount,
4876 uint32_t firstVertex, uint32_t firstInstance) {
4877 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4878 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4879}
4880
4881void ValidationStateTracker::PostCallRecordCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount,
4882 uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset,
4883 uint32_t firstInstance) {
4884 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4885 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4886}
4887
4888void ValidationStateTracker::PostCallRecordCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
4889 uint32_t count, uint32_t stride) {
4890 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4891 BUFFER_STATE *buffer_state = GetBufferState(buffer);
4892 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4893 AddCommandBufferBindingBuffer(cb_state, buffer_state);
4894}
4895
4896void ValidationStateTracker::PostCallRecordCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
4897 VkDeviceSize offset, uint32_t count, uint32_t stride) {
4898 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4899 BUFFER_STATE *buffer_state = GetBufferState(buffer);
4900 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4901 AddCommandBufferBindingBuffer(cb_state, buffer_state);
4902}
4903
4904void ValidationStateTracker::PostCallRecordCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z) {
4905 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4906 UpdateStateCmdDrawDispatchType(cb_state, VK_PIPELINE_BIND_POINT_COMPUTE);
4907}
4908
4909void ValidationStateTracker::PostCallRecordCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
4910 VkDeviceSize offset) {
4911 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4912 UpdateStateCmdDrawDispatchType(cb_state, VK_PIPELINE_BIND_POINT_COMPUTE);
4913 BUFFER_STATE *buffer_state = GetBufferState(buffer);
4914 AddCommandBufferBindingBuffer(cb_state, buffer_state);
4915}
4916
Tony-LunarG977448c2019-12-02 14:52:02 -07004917void ValidationStateTracker::RecordCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
4918 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
4919 uint32_t stride) {
4920 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4921 BUFFER_STATE *buffer_state = GetBufferState(buffer);
4922 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
4923 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4924 AddCommandBufferBindingBuffer(cb_state, buffer_state);
4925 AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
4926}
4927
locke-lunargd556cc32019-09-17 01:21:23 -06004928void ValidationStateTracker::PreCallRecordCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer,
4929 VkDeviceSize offset, VkBuffer countBuffer,
4930 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
4931 uint32_t stride) {
Tony-LunarG977448c2019-12-02 14:52:02 -07004932 RecordCmdDrawIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
4933}
4934
4935void ValidationStateTracker::PreCallRecordCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
4936 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
4937 uint32_t maxDrawCount, uint32_t stride) {
4938 RecordCmdDrawIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
4939}
4940
4941void ValidationStateTracker::RecordCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
4942 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
4943 uint32_t maxDrawCount, uint32_t stride) {
locke-lunargd556cc32019-09-17 01:21:23 -06004944 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4945 BUFFER_STATE *buffer_state = GetBufferState(buffer);
4946 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
4947 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4948 AddCommandBufferBindingBuffer(cb_state, buffer_state);
4949 AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
4950}
4951
4952void ValidationStateTracker::PreCallRecordCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer,
4953 VkDeviceSize offset, VkBuffer countBuffer,
4954 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
4955 uint32_t stride) {
Tony-LunarG977448c2019-12-02 14:52:02 -07004956 RecordCmdDrawIndexedIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
4957}
4958
4959void ValidationStateTracker::PreCallRecordCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer,
4960 VkDeviceSize offset, VkBuffer countBuffer,
4961 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
4962 uint32_t stride) {
4963 RecordCmdDrawIndexedIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
locke-lunargd556cc32019-09-17 01:21:23 -06004964}
4965
4966void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksNV(VkCommandBuffer commandBuffer, uint32_t taskCount,
4967 uint32_t firstTask) {
4968 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4969 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4970}
4971
4972void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksIndirectNV(VkCommandBuffer commandBuffer, VkBuffer buffer,
4973 VkDeviceSize offset, uint32_t drawCount, uint32_t stride) {
4974 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4975 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4976 BUFFER_STATE *buffer_state = GetBufferState(buffer);
4977 if (buffer_state) {
4978 AddCommandBufferBindingBuffer(cb_state, buffer_state);
4979 }
4980}
4981
4982void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksIndirectCountNV(VkCommandBuffer commandBuffer, VkBuffer buffer,
4983 VkDeviceSize offset, VkBuffer countBuffer,
4984 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
4985 uint32_t stride) {
4986 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4987 BUFFER_STATE *buffer_state = GetBufferState(buffer);
4988 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
4989 UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
4990 if (buffer_state) {
4991 AddCommandBufferBindingBuffer(cb_state, buffer_state);
4992 }
4993 if (count_buffer_state) {
4994 AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
4995 }
4996}
4997
4998void ValidationStateTracker::PostCallRecordCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo *pCreateInfo,
4999 const VkAllocationCallbacks *pAllocator,
5000 VkShaderModule *pShaderModule, VkResult result,
5001 void *csm_state_data) {
5002 if (VK_SUCCESS != result) return;
5003 create_shader_module_api_state *csm_state = reinterpret_cast<create_shader_module_api_state *>(csm_state_data);
5004
5005 spv_target_env spirv_environment = ((api_version >= VK_API_VERSION_1_1) ? SPV_ENV_VULKAN_1_1 : SPV_ENV_VULKAN_1_0);
5006 bool is_spirv = (pCreateInfo->pCode[0] == spv::MagicNumber);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05005007 auto new_shader_module = is_spirv ? std::make_shared<SHADER_MODULE_STATE>(pCreateInfo, *pShaderModule, spirv_environment,
5008 csm_state->unique_shader_id)
5009 : std::make_shared<SHADER_MODULE_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06005010 shaderModuleMap[*pShaderModule] = std::move(new_shader_module);
5011}
5012
5013void ValidationStateTracker::RecordPipelineShaderStage(VkPipelineShaderStageCreateInfo const *pStage, PIPELINE_STATE *pipeline,
Jeff Bolz46c0ea02019-10-09 13:06:29 -05005014 PIPELINE_STATE::StageState *stage_state) const {
locke-lunargd556cc32019-09-17 01:21:23 -06005015 // Validation shouldn't rely on anything in stage state being valid if the spirv isn't
5016 auto module = GetShaderModuleState(pStage->module);
5017 if (!module->has_valid_spirv) return;
5018
5019 // Validation shouldn't rely on anything in stage state being valid if the entrypoint isn't present
5020 auto entrypoint = FindEntrypoint(module, pStage->pName, pStage->stage);
5021 if (entrypoint == module->end()) return;
5022
5023 // Mark accessible ids
5024 stage_state->accessible_ids = MarkAccessibleIds(module, entrypoint);
5025 ProcessExecutionModes(module, entrypoint, pipeline);
5026
5027 stage_state->descriptor_uses =
Mark Lobodzinskid8d658e2020-01-30 15:05:51 -07005028 CollectInterfaceByDescriptorSlot(module, stage_state->accessible_ids, &stage_state->has_writable_descriptor);
locke-lunargd556cc32019-09-17 01:21:23 -06005029 // Capture descriptor uses for the pipeline
5030 for (auto use : stage_state->descriptor_uses) {
5031 // While validating shaders capture which slots are used by the pipeline
John Zulauf649edd52019-10-02 14:39:41 -06005032 const uint32_t slot = use.first.first;
5033 auto &reqs = pipeline->active_slots[slot][use.first.second];
locke-lunargd556cc32019-09-17 01:21:23 -06005034 reqs = descriptor_req(reqs | DescriptorTypeToReqs(module, use.second.type_id));
John Zulauf649edd52019-10-02 14:39:41 -06005035 pipeline->max_active_slot = std::max(pipeline->max_active_slot, slot);
locke-lunargd556cc32019-09-17 01:21:23 -06005036 }
5037}
5038
5039void ValidationStateTracker::ResetCommandBufferPushConstantDataIfIncompatible(CMD_BUFFER_STATE *cb_state, VkPipelineLayout layout) {
5040 if (cb_state == nullptr) {
5041 return;
5042 }
5043
5044 const PIPELINE_LAYOUT_STATE *pipeline_layout_state = GetPipelineLayout(layout);
5045 if (pipeline_layout_state == nullptr) {
5046 return;
5047 }
5048
5049 if (cb_state->push_constant_data_ranges != pipeline_layout_state->push_constant_ranges) {
5050 cb_state->push_constant_data_ranges = pipeline_layout_state->push_constant_ranges;
5051 cb_state->push_constant_data.clear();
5052 uint32_t size_needed = 0;
5053 for (auto push_constant_range : *cb_state->push_constant_data_ranges) {
5054 size_needed = std::max(size_needed, (push_constant_range.offset + push_constant_range.size));
5055 }
5056 cb_state->push_constant_data.resize(size_needed, 0);
5057 }
5058}
John Zulauf22b0fbe2019-10-15 06:26:16 -06005059
5060void ValidationStateTracker::PostCallRecordGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain,
5061 uint32_t *pSwapchainImageCount, VkImage *pSwapchainImages,
5062 VkResult result) {
5063 if ((result != VK_SUCCESS) && (result != VK_INCOMPLETE)) return;
5064 auto swapchain_state = GetSwapchainState(swapchain);
5065
5066 if (*pSwapchainImageCount > swapchain_state->images.size()) swapchain_state->images.resize(*pSwapchainImageCount);
5067
5068 if (pSwapchainImages) {
5069 if (swapchain_state->vkGetSwapchainImagesKHRState < QUERY_DETAILS) {
5070 swapchain_state->vkGetSwapchainImagesKHRState = QUERY_DETAILS;
5071 }
5072 for (uint32_t i = 0; i < *pSwapchainImageCount; ++i) {
locke-lunargb3584732019-10-28 20:18:36 -06005073 if (swapchain_state->images[i].image != VK_NULL_HANDLE) continue; // Already retrieved this.
John Zulauf22b0fbe2019-10-15 06:26:16 -06005074
5075 // Add imageMap entries for each swapchain image
5076 VkImageCreateInfo image_ci;
5077 image_ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
Mark Lobodzinskid3ec86f2020-03-18 11:23:04 -06005078 image_ci.pNext = nullptr; // to be set later
5079 image_ci.flags = 0; // to be updated below
John Zulauf22b0fbe2019-10-15 06:26:16 -06005080 image_ci.imageType = VK_IMAGE_TYPE_2D;
5081 image_ci.format = swapchain_state->createInfo.imageFormat;
5082 image_ci.extent.width = swapchain_state->createInfo.imageExtent.width;
5083 image_ci.extent.height = swapchain_state->createInfo.imageExtent.height;
5084 image_ci.extent.depth = 1;
5085 image_ci.mipLevels = 1;
5086 image_ci.arrayLayers = swapchain_state->createInfo.imageArrayLayers;
5087 image_ci.samples = VK_SAMPLE_COUNT_1_BIT;
5088 image_ci.tiling = VK_IMAGE_TILING_OPTIMAL;
5089 image_ci.usage = swapchain_state->createInfo.imageUsage;
5090 image_ci.sharingMode = swapchain_state->createInfo.imageSharingMode;
5091 image_ci.queueFamilyIndexCount = swapchain_state->createInfo.queueFamilyIndexCount;
5092 image_ci.pQueueFamilyIndices = swapchain_state->createInfo.pQueueFamilyIndices;
5093 image_ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
5094
5095 image_ci.pNext = lvl_find_in_chain<VkImageFormatListCreateInfoKHR>(swapchain_state->createInfo.pNext);
5096
5097 if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR)
5098 image_ci.flags |= VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT;
5099 if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR)
5100 image_ci.flags |= VK_IMAGE_CREATE_PROTECTED_BIT;
5101 if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR)
5102 image_ci.flags |= (VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT | VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR);
5103
5104 imageMap[pSwapchainImages[i]] = std::make_shared<IMAGE_STATE>(pSwapchainImages[i], &image_ci);
5105 auto &image_state = imageMap[pSwapchainImages[i]];
5106 image_state->valid = false;
5107 image_state->create_from_swapchain = swapchain;
5108 image_state->bind_swapchain = swapchain;
5109 image_state->bind_swapchain_imageIndex = i;
Tony-LunarGe64e4fe2020-02-17 16:21:55 -07005110 image_state->is_swapchain_image = true;
locke-lunargb3584732019-10-28 20:18:36 -06005111 swapchain_state->images[i].image = pSwapchainImages[i];
5112 swapchain_state->images[i].bound_images.emplace(pSwapchainImages[i]);
John Zulauf22b0fbe2019-10-15 06:26:16 -06005113 }
5114 }
5115
5116 if (*pSwapchainImageCount) {
5117 if (swapchain_state->vkGetSwapchainImagesKHRState < QUERY_COUNT) {
5118 swapchain_state->vkGetSwapchainImagesKHRState = QUERY_COUNT;
5119 }
5120 swapchain_state->get_swapchain_image_count = *pSwapchainImageCount;
5121 }
5122}