blob: 0473fd4ab663859269d7edd945ecf579179bbb00 [file] [log] [blame]
Tony-LunarG73719992020-01-15 10:20:28 -07001/* Copyright (c) 2015-2020 The Khronos Group Inc.
2 * Copyright (c) 2015-2020 Valve Corporation
3 * Copyright (c) 2015-2020 LunarG, Inc.
4 * Copyright (C) 2015-2020 Google Inc.
locke-lunargd556cc32019-09-17 01:21:23 -06005 *
6 * Licensed under the Apache License, Version 2.0 (the "License");
7 * you may not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
9 *
10 * http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 *
18 * Author: Mark Lobodzinski <mark@lunarg.com>
19 * Author: Dave Houlton <daveh@lunarg.com>
20 * Shannon McPherson <shannon@lunarg.com>
21 */
22
locke-lunargd556cc32019-09-17 01:21:23 -060023#include <cmath>
24#include <set>
locke-lunargd556cc32019-09-17 01:21:23 -060025
26#include "vk_enum_string_helper.h"
27#include "vk_format_utils.h"
28#include "vk_layer_data.h"
29#include "vk_layer_utils.h"
30#include "vk_layer_logging.h"
31#include "vk_typemap_helper.h"
32
33#include "chassis.h"
34#include "state_tracker.h"
35#include "shader_validation.h"
36
John Zulauf890b50b2020-06-17 15:18:19 -060037const char *CommandTypeString(CMD_TYPE type) {
38 // Autogenerated as part of the vk_validation_error_message.h codegen
39 static const std::array<const char *, CMD_RANGE_SIZE> command_name_list = {{VUID_CMD_NAME_LIST}};
40 return command_name_list[type];
41}
42
locke-lunarg4189aa22020-10-21 00:23:48 -060043VkDynamicState ConvertToDynamicState(CBStatusFlagBits flag) {
44 switch (flag) {
45 case CBSTATUS_LINE_WIDTH_SET:
46 return VK_DYNAMIC_STATE_LINE_WIDTH;
47 case CBSTATUS_DEPTH_BIAS_SET:
48 return VK_DYNAMIC_STATE_DEPTH_BIAS;
49 case CBSTATUS_BLEND_CONSTANTS_SET:
50 return VK_DYNAMIC_STATE_BLEND_CONSTANTS;
51 case CBSTATUS_DEPTH_BOUNDS_SET:
52 return VK_DYNAMIC_STATE_DEPTH_BOUNDS;
53 case CBSTATUS_STENCIL_READ_MASK_SET:
54 return VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK;
55 case CBSTATUS_STENCIL_WRITE_MASK_SET:
56 return VK_DYNAMIC_STATE_STENCIL_WRITE_MASK;
57 case CBSTATUS_STENCIL_REFERENCE_SET:
58 return VK_DYNAMIC_STATE_STENCIL_REFERENCE;
59 case CBSTATUS_VIEWPORT_SET:
60 return VK_DYNAMIC_STATE_VIEWPORT;
61 case CBSTATUS_SCISSOR_SET:
62 return VK_DYNAMIC_STATE_SCISSOR;
63 case CBSTATUS_EXCLUSIVE_SCISSOR_SET:
64 return VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV;
65 case CBSTATUS_SHADING_RATE_PALETTE_SET:
66 return VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV;
67 case CBSTATUS_LINE_STIPPLE_SET:
68 return VK_DYNAMIC_STATE_LINE_STIPPLE_EXT;
69 case CBSTATUS_VIEWPORT_W_SCALING_SET:
70 return VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV;
71 case CBSTATUS_CULL_MODE_SET:
72 return VK_DYNAMIC_STATE_CULL_MODE_EXT;
73 case CBSTATUS_FRONT_FACE_SET:
74 return VK_DYNAMIC_STATE_FRONT_FACE_EXT;
75 case CBSTATUS_PRIMITIVE_TOPOLOGY_SET:
76 return VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT;
77 case CBSTATUS_VIEWPORT_WITH_COUNT_SET:
78 return VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT;
79 case CBSTATUS_SCISSOR_WITH_COUNT_SET:
80 return VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT;
81 case CBSTATUS_VERTEX_INPUT_BINDING_STRIDE_SET:
82 return VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT;
83 case CBSTATUS_DEPTH_TEST_ENABLE_SET:
84 return VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE_EXT;
85 case CBSTATUS_DEPTH_WRITE_ENABLE_SET:
86 return VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE_EXT;
87 case CBSTATUS_DEPTH_COMPARE_OP_SET:
88 return VK_DYNAMIC_STATE_DEPTH_COMPARE_OP_EXT;
89 case CBSTATUS_DEPTH_BOUNDS_TEST_ENABLE_SET:
90 return VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE_EXT;
91 case CBSTATUS_STENCIL_TEST_ENABLE_SET:
92 return VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE_EXT;
93 case CBSTATUS_STENCIL_OP_SET:
94 return VK_DYNAMIC_STATE_STENCIL_OP_EXT;
95 case CBSTATUS_DISCARD_RECTANGLE_SET:
96 return VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT;
97 case CBSTATUS_SAMPLE_LOCATIONS_SET:
98 return VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT;
99 case CBSTATUS_COARSE_SAMPLE_ORDER_SET:
100 return VK_DYNAMIC_STATE_VIEWPORT_COARSE_SAMPLE_ORDER_NV;
101 default:
102 // CBSTATUS_INDEX_BUFFER_BOUND is not in VkDynamicState
103 return VK_DYNAMIC_STATE_MAX_ENUM;
104 }
105 return VK_DYNAMIC_STATE_MAX_ENUM;
106}
107
108CBStatusFlagBits ConvertToCBStatusFlagBits(VkDynamicState state) {
109 switch (state) {
110 case VK_DYNAMIC_STATE_VIEWPORT:
111 return CBSTATUS_VIEWPORT_SET;
112 case VK_DYNAMIC_STATE_SCISSOR:
113 return CBSTATUS_SCISSOR_SET;
114 case VK_DYNAMIC_STATE_LINE_WIDTH:
115 return CBSTATUS_LINE_WIDTH_SET;
116 case VK_DYNAMIC_STATE_DEPTH_BIAS:
117 return CBSTATUS_DEPTH_BIAS_SET;
118 case VK_DYNAMIC_STATE_BLEND_CONSTANTS:
119 return CBSTATUS_BLEND_CONSTANTS_SET;
120 case VK_DYNAMIC_STATE_DEPTH_BOUNDS:
121 return CBSTATUS_DEPTH_BOUNDS_SET;
122 case VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK:
123 return CBSTATUS_STENCIL_READ_MASK_SET;
124 case VK_DYNAMIC_STATE_STENCIL_WRITE_MASK:
125 return CBSTATUS_STENCIL_WRITE_MASK_SET;
126 case VK_DYNAMIC_STATE_STENCIL_REFERENCE:
127 return CBSTATUS_STENCIL_REFERENCE_SET;
128 case VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV:
129 return CBSTATUS_VIEWPORT_W_SCALING_SET;
130 case VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT:
131 return CBSTATUS_DISCARD_RECTANGLE_SET;
132 case VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT:
133 return CBSTATUS_SAMPLE_LOCATIONS_SET;
134 case VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV:
135 return CBSTATUS_SHADING_RATE_PALETTE_SET;
136 case VK_DYNAMIC_STATE_VIEWPORT_COARSE_SAMPLE_ORDER_NV:
137 return CBSTATUS_COARSE_SAMPLE_ORDER_SET;
138 case VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV:
139 return CBSTATUS_EXCLUSIVE_SCISSOR_SET;
140 case VK_DYNAMIC_STATE_LINE_STIPPLE_EXT:
141 return CBSTATUS_LINE_STIPPLE_SET;
142 case VK_DYNAMIC_STATE_CULL_MODE_EXT:
143 return CBSTATUS_CULL_MODE_SET;
144 case VK_DYNAMIC_STATE_FRONT_FACE_EXT:
145 return CBSTATUS_FRONT_FACE_SET;
146 case VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT:
147 return CBSTATUS_PRIMITIVE_TOPOLOGY_SET;
148 case VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT:
149 return CBSTATUS_VIEWPORT_WITH_COUNT_SET;
150 case VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT:
151 return CBSTATUS_SCISSOR_WITH_COUNT_SET;
152 case VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT:
153 return CBSTATUS_VERTEX_INPUT_BINDING_STRIDE_SET;
154 case VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE_EXT:
155 return CBSTATUS_DEPTH_TEST_ENABLE_SET;
156 case VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE_EXT:
157 return CBSTATUS_DEPTH_WRITE_ENABLE_SET;
158 case VK_DYNAMIC_STATE_DEPTH_COMPARE_OP_EXT:
159 return CBSTATUS_DEPTH_COMPARE_OP_SET;
160 case VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE_EXT:
161 return CBSTATUS_DEPTH_BOUNDS_TEST_ENABLE_SET;
162 case VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE_EXT:
163 return CBSTATUS_STENCIL_TEST_ENABLE_SET;
164 case VK_DYNAMIC_STATE_STENCIL_OP_EXT:
165 return CBSTATUS_STENCIL_OP_SET;
166 default:
167 return CBSTATUS_NONE;
168 }
169 return CBSTATUS_NONE;
170}
171
Mark Lobodzinskib4ab6ac2020-04-02 13:12:06 -0600172void ValidationStateTracker::InitDeviceValidationObject(bool add_obj, ValidationObject *inst_obj, ValidationObject *dev_obj) {
173 if (add_obj) {
174 instance_state = reinterpret_cast<ValidationStateTracker *>(GetValidationObject(inst_obj->object_dispatch, container_type));
175 // Call base class
176 ValidationObject::InitDeviceValidationObject(add_obj, inst_obj, dev_obj);
177 }
178}
179
John Zulauf5c5e88d2019-12-26 11:22:02 -0700180uint32_t ResolveRemainingLevels(const VkImageSubresourceRange *range, uint32_t mip_levels) {
181 // Return correct number of mip levels taking into account VK_REMAINING_MIP_LEVELS
182 uint32_t mip_level_count = range->levelCount;
183 if (range->levelCount == VK_REMAINING_MIP_LEVELS) {
184 mip_level_count = mip_levels - range->baseMipLevel;
185 }
186 return mip_level_count;
187}
188
189uint32_t ResolveRemainingLayers(const VkImageSubresourceRange *range, uint32_t layers) {
190 // Return correct number of layers taking into account VK_REMAINING_ARRAY_LAYERS
191 uint32_t array_layer_count = range->layerCount;
192 if (range->layerCount == VK_REMAINING_ARRAY_LAYERS) {
193 array_layer_count = layers - range->baseArrayLayer;
194 }
195 return array_layer_count;
196}
197
198VkImageSubresourceRange NormalizeSubresourceRange(const VkImageCreateInfo &image_create_info,
199 const VkImageSubresourceRange &range) {
200 VkImageSubresourceRange norm = range;
201 norm.levelCount = ResolveRemainingLevels(&range, image_create_info.mipLevels);
202
203 // Special case for 3D images with VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR flag bit, where <extent.depth> and
204 // <arrayLayers> can potentially alias.
205 uint32_t layer_limit = (0 != (image_create_info.flags & VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR))
206 ? image_create_info.extent.depth
207 : image_create_info.arrayLayers;
208 norm.layerCount = ResolveRemainingLayers(&range, layer_limit);
209
210 // For multiplanar formats, IMAGE_ASPECT_COLOR is equivalent to adding the aspect of the individual planes
211 VkImageAspectFlags &aspect_mask = norm.aspectMask;
212 if (FormatIsMultiplane(image_create_info.format)) {
213 if (aspect_mask & VK_IMAGE_ASPECT_COLOR_BIT) {
214 aspect_mask &= ~VK_IMAGE_ASPECT_COLOR_BIT;
215 aspect_mask |= (VK_IMAGE_ASPECT_PLANE_0_BIT | VK_IMAGE_ASPECT_PLANE_1_BIT);
216 if (FormatPlaneCount(image_create_info.format) > 2) {
217 aspect_mask |= VK_IMAGE_ASPECT_PLANE_2_BIT;
218 }
219 }
220 }
221 return norm;
222}
223
224VkImageSubresourceRange NormalizeSubresourceRange(const IMAGE_STATE &image_state, const VkImageSubresourceRange &range) {
225 const VkImageCreateInfo &image_create_info = image_state.createInfo;
226 return NormalizeSubresourceRange(image_create_info, range);
227}
228
John Zulauf2bc1fde2020-04-24 15:09:51 -0600229// NOTE: Beware the lifespan of the rp_begin when holding the return. If the rp_begin isn't a "safe" copy, "IMAGELESS"
230// attachments won't persist past the API entry point exit.
231std::pair<uint32_t, const VkImageView *> GetFramebufferAttachments(const VkRenderPassBeginInfo &rp_begin,
232 const FRAMEBUFFER_STATE &fb_state) {
233 const VkImageView *attachments = fb_state.createInfo.pAttachments;
234 uint32_t count = fb_state.createInfo.attachmentCount;
235 if (fb_state.createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT) {
236 const auto *framebuffer_attachments = lvl_find_in_chain<VkRenderPassAttachmentBeginInfo>(rp_begin.pNext);
237 if (framebuffer_attachments) {
238 attachments = framebuffer_attachments->pAttachments;
239 count = framebuffer_attachments->attachmentCount;
240 }
241 }
242 return std::make_pair(count, attachments);
243}
244
245std::vector<const IMAGE_VIEW_STATE *> ValidationStateTracker::GetAttachmentViews(const VkRenderPassBeginInfo &rp_begin,
246 const FRAMEBUFFER_STATE &fb_state) const {
247 std::vector<const IMAGE_VIEW_STATE *> views;
248
249 const auto count_attachment = GetFramebufferAttachments(rp_begin, fb_state);
250 const auto attachment_count = count_attachment.first;
251 const auto *attachments = count_attachment.second;
252 views.resize(attachment_count, nullptr);
253 for (uint32_t i = 0; i < attachment_count; i++) {
254 if (attachments[i] != VK_NULL_HANDLE) {
255 views[i] = Get<IMAGE_VIEW_STATE>(attachments[i]);
256 }
257 }
258 return views;
259}
260
261std::vector<const IMAGE_VIEW_STATE *> ValidationStateTracker::GetCurrentAttachmentViews(const CMD_BUFFER_STATE &cb_state) const {
262 // Only valid *after* RecordBeginRenderPass and *before* RecordEndRenderpass as it relies on cb_state for the renderpass info.
263 std::vector<const IMAGE_VIEW_STATE *> views;
264
locke-lunargaecf2152020-05-12 17:15:41 -0600265 const auto *rp_state = cb_state.activeRenderPass.get();
John Zulauf2bc1fde2020-04-24 15:09:51 -0600266 if (!rp_state) return views;
267 const auto &rp_begin = *cb_state.activeRenderPassBeginInfo.ptr();
268 const auto *fb_state = Get<FRAMEBUFFER_STATE>(rp_begin.framebuffer);
269 if (!fb_state) return views;
270
271 return GetAttachmentViews(rp_begin, *fb_state);
272}
273
locke-lunarg3e127c72020-06-09 17:45:28 -0600274PIPELINE_STATE *GetCurrentPipelineFromCommandBuffer(const CMD_BUFFER_STATE &cmd, VkPipelineBindPoint pipelineBindPoint) {
275 const auto last_bound_it = cmd.lastBound.find(pipelineBindPoint);
276 if (last_bound_it == cmd.lastBound.cend()) {
277 return nullptr;
278 }
279 return last_bound_it->second.pipeline_state;
280}
281
282void GetCurrentPipelineAndDesriptorSetsFromCommandBuffer(const CMD_BUFFER_STATE &cmd, VkPipelineBindPoint pipelineBindPoint,
283 const PIPELINE_STATE **rtn_pipe,
284 const std::vector<LAST_BOUND_STATE::PER_SET> **rtn_sets) {
285 const auto last_bound_it = cmd.lastBound.find(pipelineBindPoint);
286 if (last_bound_it == cmd.lastBound.cend()) {
287 return;
288 }
289 *rtn_pipe = last_bound_it->second.pipeline_state;
290 *rtn_sets = &(last_bound_it->second.per_set);
291}
292
locke-lunargd556cc32019-09-17 01:21:23 -0600293#ifdef VK_USE_PLATFORM_ANDROID_KHR
294// Android-specific validation that uses types defined only with VK_USE_PLATFORM_ANDROID_KHR
295// This could also move into a seperate core_validation_android.cpp file... ?
296
297void ValidationStateTracker::RecordCreateImageANDROID(const VkImageCreateInfo *create_info, IMAGE_STATE *is_node) {
298 const VkExternalMemoryImageCreateInfo *emici = lvl_find_in_chain<VkExternalMemoryImageCreateInfo>(create_info->pNext);
299 if (emici && (emici->handleTypes & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID)) {
Spencer Fricke224c9852020-04-06 07:47:29 -0700300 is_node->external_ahb = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600301 }
302 const VkExternalFormatANDROID *ext_fmt_android = lvl_find_in_chain<VkExternalFormatANDROID>(create_info->pNext);
303 if (ext_fmt_android && (0 != ext_fmt_android->externalFormat)) {
304 is_node->has_ahb_format = true;
305 is_node->ahb_format = ext_fmt_android->externalFormat;
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700306 // VUID 01894 will catch if not found in map
307 auto it = ahb_ext_formats_map.find(ext_fmt_android->externalFormat);
308 if (it != ahb_ext_formats_map.end()) {
309 is_node->format_features = it->second;
310 }
locke-lunargd556cc32019-09-17 01:21:23 -0600311 }
312}
313
sfricke-samsung013f1ef2020-05-14 22:56:20 -0700314void ValidationStateTracker::RecordCreateBufferANDROID(const VkBufferCreateInfo *create_info, BUFFER_STATE *bs_node) {
315 const VkExternalMemoryBufferCreateInfo *embci = lvl_find_in_chain<VkExternalMemoryBufferCreateInfo>(create_info->pNext);
316 if (embci && (embci->handleTypes & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID)) {
317 bs_node->external_ahb = true;
318 }
319}
320
locke-lunargd556cc32019-09-17 01:21:23 -0600321void ValidationStateTracker::RecordCreateSamplerYcbcrConversionANDROID(const VkSamplerYcbcrConversionCreateInfo *create_info,
sfricke-samsungbe3584f2020-04-22 14:58:06 -0700322 VkSamplerYcbcrConversion ycbcr_conversion,
323 SAMPLER_YCBCR_CONVERSION_STATE *ycbcr_state) {
locke-lunargd556cc32019-09-17 01:21:23 -0600324 const VkExternalFormatANDROID *ext_format_android = lvl_find_in_chain<VkExternalFormatANDROID>(create_info->pNext);
325 if (ext_format_android && (0 != ext_format_android->externalFormat)) {
326 ycbcr_conversion_ahb_fmt_map.emplace(ycbcr_conversion, ext_format_android->externalFormat);
sfricke-samsungbe3584f2020-04-22 14:58:06 -0700327 // VUID 01894 will catch if not found in map
328 auto it = ahb_ext_formats_map.find(ext_format_android->externalFormat);
329 if (it != ahb_ext_formats_map.end()) {
330 ycbcr_state->format_features = it->second;
331 }
locke-lunargd556cc32019-09-17 01:21:23 -0600332 }
333};
334
335void ValidationStateTracker::RecordDestroySamplerYcbcrConversionANDROID(VkSamplerYcbcrConversion ycbcr_conversion) {
336 ycbcr_conversion_ahb_fmt_map.erase(ycbcr_conversion);
337};
338
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700339void ValidationStateTracker::PostCallRecordGetAndroidHardwareBufferPropertiesANDROID(
340 VkDevice device, const struct AHardwareBuffer *buffer, VkAndroidHardwareBufferPropertiesANDROID *pProperties, VkResult result) {
341 if (VK_SUCCESS != result) return;
342 auto ahb_format_props = lvl_find_in_chain<VkAndroidHardwareBufferFormatPropertiesANDROID>(pProperties->pNext);
343 if (ahb_format_props) {
344 ahb_ext_formats_map.insert({ahb_format_props->externalFormat, ahb_format_props->formatFeatures});
345 }
346}
347
locke-lunargd556cc32019-09-17 01:21:23 -0600348#else
349
350void ValidationStateTracker::RecordCreateImageANDROID(const VkImageCreateInfo *create_info, IMAGE_STATE *is_node) {}
351
sfricke-samsung013f1ef2020-05-14 22:56:20 -0700352void ValidationStateTracker::RecordCreateBufferANDROID(const VkBufferCreateInfo *create_info, BUFFER_STATE *bs_node) {}
353
locke-lunargd556cc32019-09-17 01:21:23 -0600354void ValidationStateTracker::RecordCreateSamplerYcbcrConversionANDROID(const VkSamplerYcbcrConversionCreateInfo *create_info,
sfricke-samsungbe3584f2020-04-22 14:58:06 -0700355 VkSamplerYcbcrConversion ycbcr_conversion,
356 SAMPLER_YCBCR_CONVERSION_STATE *ycbcr_state){};
locke-lunargd556cc32019-09-17 01:21:23 -0600357
358void ValidationStateTracker::RecordDestroySamplerYcbcrConversionANDROID(VkSamplerYcbcrConversion ycbcr_conversion){};
359
360#endif // VK_USE_PLATFORM_ANDROID_KHR
361
Mark Lobodzinskid3ec86f2020-03-18 11:23:04 -0600362std::shared_ptr<cvdescriptorset::DescriptorSetLayout const> GetDslFromPipelineLayout(PIPELINE_LAYOUT_STATE const *layout_data,
363 uint32_t set) {
364 std::shared_ptr<cvdescriptorset::DescriptorSetLayout const> dsl = nullptr;
365 if (layout_data && (set < layout_data->set_layouts.size())) {
366 dsl = layout_data->set_layouts[set];
367 }
368 return dsl;
369}
370
Petr Kraus44f1c482020-04-25 20:09:25 +0200371void AddImageStateProps(IMAGE_STATE &image_state, const VkDevice device, const VkPhysicalDevice physical_device) {
372 // Add feature support according to Image Format Features (vkspec.html#resources-image-format-features)
373 // if format is AHB external format then the features are already set
374 if (image_state.has_ahb_format == false) {
375 const VkImageTiling image_tiling = image_state.createInfo.tiling;
376 const VkFormat image_format = image_state.createInfo.format;
377 if (image_tiling == VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT) {
378 VkImageDrmFormatModifierPropertiesEXT drm_format_properties = {
379 VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT, nullptr};
380 DispatchGetImageDrmFormatModifierPropertiesEXT(device, image_state.image, &drm_format_properties);
381
382 VkFormatProperties2 format_properties_2 = {VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2, nullptr};
383 VkDrmFormatModifierPropertiesListEXT drm_properties_list = {VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT,
384 nullptr};
385 format_properties_2.pNext = (void *)&drm_properties_list;
386 DispatchGetPhysicalDeviceFormatProperties2(physical_device, image_format, &format_properties_2);
Lionel Landwerlin09351a72020-06-22 18:15:59 +0300387 std::vector<VkDrmFormatModifierPropertiesEXT> drm_properties;
388 drm_properties.resize(drm_properties_list.drmFormatModifierCount);
389 drm_properties_list.pDrmFormatModifierProperties = &drm_properties[0];
390 DispatchGetPhysicalDeviceFormatProperties2(physical_device, image_format, &format_properties_2);
Petr Kraus44f1c482020-04-25 20:09:25 +0200391
392 for (uint32_t i = 0; i < drm_properties_list.drmFormatModifierCount; i++) {
Lionel Landwerlin94f1ce32020-07-02 17:39:31 +0300393 if (drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifier ==
394 drm_format_properties.drmFormatModifier) {
395 image_state.format_features =
Petr Kraus44f1c482020-04-25 20:09:25 +0200396 drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifierTilingFeatures;
Lionel Landwerlin94f1ce32020-07-02 17:39:31 +0300397 break;
Petr Kraus44f1c482020-04-25 20:09:25 +0200398 }
399 }
400 } else {
401 VkFormatProperties format_properties;
402 DispatchGetPhysicalDeviceFormatProperties(physical_device, image_format, &format_properties);
403 image_state.format_features = (image_tiling == VK_IMAGE_TILING_LINEAR) ? format_properties.linearTilingFeatures
404 : format_properties.optimalTilingFeatures;
405 }
406 }
407}
408
locke-lunargd556cc32019-09-17 01:21:23 -0600409void ValidationStateTracker::PostCallRecordCreateImage(VkDevice device, const VkImageCreateInfo *pCreateInfo,
410 const VkAllocationCallbacks *pAllocator, VkImage *pImage, VkResult result) {
411 if (VK_SUCCESS != result) return;
locke-lunarg296a3c92020-03-25 01:04:29 -0600412 auto is_node = std::make_shared<IMAGE_STATE>(device, *pImage, pCreateInfo);
sfricke-samsung71bc6572020-04-29 15:49:43 -0700413 is_node->disjoint = ((pCreateInfo->flags & VK_IMAGE_CREATE_DISJOINT_BIT) != 0);
locke-lunargd556cc32019-09-17 01:21:23 -0600414 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
415 RecordCreateImageANDROID(pCreateInfo, is_node.get());
416 }
417 const auto swapchain_info = lvl_find_in_chain<VkImageSwapchainCreateInfoKHR>(pCreateInfo->pNext);
418 if (swapchain_info) {
419 is_node->create_from_swapchain = swapchain_info->swapchain;
420 }
421
locke-lunargd556cc32019-09-17 01:21:23 -0600422 // Record the memory requirements in case they won't be queried
sfricke-samsung013f1ef2020-05-14 22:56:20 -0700423 // External AHB memory can't be queried until after memory is bound
Spencer Fricke224c9852020-04-06 07:47:29 -0700424 if (is_node->external_ahb == false) {
sfricke-samsung71bc6572020-04-29 15:49:43 -0700425 if (is_node->disjoint == false) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -0700426 DispatchGetImageMemoryRequirements(device, *pImage, &is_node->requirements);
427 } else {
428 uint32_t plane_count = FormatPlaneCount(pCreateInfo->format);
429 VkImagePlaneMemoryRequirementsInfo image_plane_req = {VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO, nullptr};
430 VkMemoryRequirements2 mem_reqs2 = {VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2, nullptr};
431 VkImageMemoryRequirementsInfo2 mem_req_info2 = {VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2};
432 mem_req_info2.pNext = &image_plane_req;
433 mem_req_info2.image = *pImage;
434
435 assert(plane_count != 0); // assumes each format has at least first plane
436 image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_0_BIT;
437 DispatchGetImageMemoryRequirements2(device, &mem_req_info2, &mem_reqs2);
438 is_node->plane0_requirements = mem_reqs2.memoryRequirements;
439
440 if (plane_count >= 2) {
441 image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_1_BIT;
442 DispatchGetImageMemoryRequirements2(device, &mem_req_info2, &mem_reqs2);
443 is_node->plane1_requirements = mem_reqs2.memoryRequirements;
444 }
445 if (plane_count >= 3) {
446 image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_2_BIT;
447 DispatchGetImageMemoryRequirements2(device, &mem_req_info2, &mem_reqs2);
448 is_node->plane2_requirements = mem_reqs2.memoryRequirements;
449 }
450 }
locke-lunargd556cc32019-09-17 01:21:23 -0600451 }
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700452
Petr Kraus44f1c482020-04-25 20:09:25 +0200453 AddImageStateProps(*is_node, device, physical_device);
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700454
sfricke-samsungedce77a2020-07-03 22:35:13 -0700455 is_node->unprotected = ((pCreateInfo->flags & VK_IMAGE_CREATE_PROTECTED_BIT) == 0);
456
locke-lunargd556cc32019-09-17 01:21:23 -0600457 imageMap.insert(std::make_pair(*pImage, std::move(is_node)));
458}
459
460void ValidationStateTracker::PreCallRecordDestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks *pAllocator) {
461 if (!image) return;
462 IMAGE_STATE *image_state = GetImageState(image);
463 const VulkanTypedHandle obj_struct(image, kVulkanObjectTypeImage);
464 InvalidateCommandBuffers(image_state->cb_bindings, obj_struct);
465 // Clean up memory mapping, bindings and range references for image
466 for (auto mem_binding : image_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -0700467 RemoveImageMemoryRange(image, mem_binding);
locke-lunargd556cc32019-09-17 01:21:23 -0600468 }
469 if (image_state->bind_swapchain) {
470 auto swapchain = GetSwapchainState(image_state->bind_swapchain);
471 if (swapchain) {
locke-lunargb3584732019-10-28 20:18:36 -0600472 swapchain->images[image_state->bind_swapchain_imageIndex].bound_images.erase(image_state->image);
locke-lunargd556cc32019-09-17 01:21:23 -0600473 }
474 }
475 RemoveAliasingImage(image_state);
476 ClearMemoryObjectBindings(obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500477 image_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600478 // Remove image from imageMap
479 imageMap.erase(image);
480}
481
482void ValidationStateTracker::PreCallRecordCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image,
483 VkImageLayout imageLayout, const VkClearColorValue *pColor,
484 uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
485 auto cb_node = GetCBState(commandBuffer);
486 auto image_state = GetImageState(image);
487 if (cb_node && image_state) {
488 AddCommandBufferBindingImage(cb_node, image_state);
489 }
490}
491
492void ValidationStateTracker::PreCallRecordCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image,
493 VkImageLayout imageLayout,
494 const VkClearDepthStencilValue *pDepthStencil,
495 uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
496 auto cb_node = GetCBState(commandBuffer);
497 auto image_state = GetImageState(image);
498 if (cb_node && image_state) {
499 AddCommandBufferBindingImage(cb_node, image_state);
500 }
501}
502
503void ValidationStateTracker::PreCallRecordCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage,
504 VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout,
505 uint32_t regionCount, const VkImageCopy *pRegions) {
506 auto cb_node = GetCBState(commandBuffer);
507 auto src_image_state = GetImageState(srcImage);
508 auto dst_image_state = GetImageState(dstImage);
509
510 // Update bindings between images and cmd buffer
511 AddCommandBufferBindingImage(cb_node, src_image_state);
512 AddCommandBufferBindingImage(cb_node, dst_image_state);
513}
514
Jeff Leger178b1e52020-10-05 12:22:23 -0400515void ValidationStateTracker::PreCallRecordCmdCopyImage2KHR(VkCommandBuffer commandBuffer,
516 const VkCopyImageInfo2KHR *pCopyImageInfo) {
517 auto cb_node = GetCBState(commandBuffer);
518 auto src_image_state = GetImageState(pCopyImageInfo->srcImage);
519 auto dst_image_state = GetImageState(pCopyImageInfo->dstImage);
520
521 // Update bindings between images and cmd buffer
522 AddCommandBufferBindingImage(cb_node, src_image_state);
523 AddCommandBufferBindingImage(cb_node, dst_image_state);
524}
525
locke-lunargd556cc32019-09-17 01:21:23 -0600526void ValidationStateTracker::PreCallRecordCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage,
527 VkImageLayout srcImageLayout, VkImage dstImage,
528 VkImageLayout dstImageLayout, uint32_t regionCount,
529 const VkImageResolve *pRegions) {
530 auto cb_node = GetCBState(commandBuffer);
531 auto src_image_state = GetImageState(srcImage);
532 auto dst_image_state = GetImageState(dstImage);
533
534 // Update bindings between images and cmd buffer
535 AddCommandBufferBindingImage(cb_node, src_image_state);
536 AddCommandBufferBindingImage(cb_node, dst_image_state);
537}
538
Jeff Leger178b1e52020-10-05 12:22:23 -0400539void ValidationStateTracker::PreCallRecordCmdResolveImage2KHR(VkCommandBuffer commandBuffer,
540 const VkResolveImageInfo2KHR *pResolveImageInfo) {
541 auto cb_node = GetCBState(commandBuffer);
542 auto src_image_state = GetImageState(pResolveImageInfo->srcImage);
543 auto dst_image_state = GetImageState(pResolveImageInfo->dstImage);
544
545 // Update bindings between images and cmd buffer
546 AddCommandBufferBindingImage(cb_node, src_image_state);
547 AddCommandBufferBindingImage(cb_node, dst_image_state);
548}
549
locke-lunargd556cc32019-09-17 01:21:23 -0600550void ValidationStateTracker::PreCallRecordCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage,
551 VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout,
552 uint32_t regionCount, const VkImageBlit *pRegions, VkFilter filter) {
553 auto cb_node = GetCBState(commandBuffer);
554 auto src_image_state = GetImageState(srcImage);
555 auto dst_image_state = GetImageState(dstImage);
556
557 // Update bindings between images and cmd buffer
558 AddCommandBufferBindingImage(cb_node, src_image_state);
559 AddCommandBufferBindingImage(cb_node, dst_image_state);
560}
561
Jeff Leger178b1e52020-10-05 12:22:23 -0400562void ValidationStateTracker::PreCallRecordCmdBlitImage2KHR(VkCommandBuffer commandBuffer,
563 const VkBlitImageInfo2KHR *pBlitImageInfo) {
564 auto cb_node = GetCBState(commandBuffer);
565 auto src_image_state = GetImageState(pBlitImageInfo->srcImage);
566 auto dst_image_state = GetImageState(pBlitImageInfo->dstImage);
567
568 // Update bindings between images and cmd buffer
569 AddCommandBufferBindingImage(cb_node, src_image_state);
570 AddCommandBufferBindingImage(cb_node, dst_image_state);
571}
572
locke-lunargd556cc32019-09-17 01:21:23 -0600573void ValidationStateTracker::PostCallRecordCreateBuffer(VkDevice device, const VkBufferCreateInfo *pCreateInfo,
574 const VkAllocationCallbacks *pAllocator, VkBuffer *pBuffer,
575 VkResult result) {
576 if (result != VK_SUCCESS) return;
577 // TODO : This doesn't create deep copy of pQueueFamilyIndices so need to fix that if/when we want that data to be valid
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500578 auto buffer_state = std::make_shared<BUFFER_STATE>(*pBuffer, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -0600579
sfricke-samsung013f1ef2020-05-14 22:56:20 -0700580 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
581 RecordCreateBufferANDROID(pCreateInfo, buffer_state.get());
582 }
locke-lunargd556cc32019-09-17 01:21:23 -0600583 // Get a set of requirements in the case the app does not
sfricke-samsungad90e722020-07-08 20:54:24 -0700584 DispatchGetBufferMemoryRequirements(device, *pBuffer, &buffer_state->requirements);
locke-lunargd556cc32019-09-17 01:21:23 -0600585
sfricke-samsungedce77a2020-07-03 22:35:13 -0700586 buffer_state->unprotected = ((pCreateInfo->flags & VK_BUFFER_CREATE_PROTECTED_BIT) == 0);
587
locke-lunargd556cc32019-09-17 01:21:23 -0600588 bufferMap.insert(std::make_pair(*pBuffer, std::move(buffer_state)));
589}
590
591void ValidationStateTracker::PostCallRecordCreateBufferView(VkDevice device, const VkBufferViewCreateInfo *pCreateInfo,
592 const VkAllocationCallbacks *pAllocator, VkBufferView *pView,
593 VkResult result) {
594 if (result != VK_SUCCESS) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500595 auto buffer_state = GetBufferShared(pCreateInfo->buffer);
locke-lunarg25b6c352020-08-06 17:44:18 -0600596 auto buffer_view_state = std::make_shared<BUFFER_VIEW_STATE>(buffer_state, *pView, pCreateInfo);
597
598 VkFormatProperties format_properties;
599 DispatchGetPhysicalDeviceFormatProperties(physical_device, pCreateInfo->format, &format_properties);
600 buffer_view_state->format_features = format_properties.bufferFeatures;
601
602 bufferViewMap.insert(std::make_pair(*pView, std::move(buffer_view_state)));
locke-lunargd556cc32019-09-17 01:21:23 -0600603}
604
605void ValidationStateTracker::PostCallRecordCreateImageView(VkDevice device, const VkImageViewCreateInfo *pCreateInfo,
606 const VkAllocationCallbacks *pAllocator, VkImageView *pView,
607 VkResult result) {
608 if (result != VK_SUCCESS) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500609 auto image_state = GetImageShared(pCreateInfo->image);
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700610 auto image_view_state = std::make_shared<IMAGE_VIEW_STATE>(image_state, *pView, pCreateInfo);
611
612 // Add feature support according to Image View Format Features (vkspec.html#resources-image-view-format-features)
613 const VkImageTiling image_tiling = image_state->createInfo.tiling;
614 const VkFormat image_view_format = pCreateInfo->format;
615 if (image_state->has_ahb_format == true) {
616 // The ImageView uses same Image's format feature since they share same AHB
617 image_view_state->format_features = image_state->format_features;
618 } else if (image_tiling == VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT) {
619 // Parameter validation should catch if this is used without VK_EXT_image_drm_format_modifier
620 assert(device_extensions.vk_ext_image_drm_format_modifier);
621 VkImageDrmFormatModifierPropertiesEXT drm_format_properties = {VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT,
622 nullptr};
623 DispatchGetImageDrmFormatModifierPropertiesEXT(device, image_state->image, &drm_format_properties);
624
625 VkFormatProperties2 format_properties_2 = {VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2, nullptr};
626 VkDrmFormatModifierPropertiesListEXT drm_properties_list = {VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT,
627 nullptr};
628 format_properties_2.pNext = (void *)&drm_properties_list;
629 DispatchGetPhysicalDeviceFormatProperties2(physical_device, image_view_format, &format_properties_2);
630
631 for (uint32_t i = 0; i < drm_properties_list.drmFormatModifierCount; i++) {
Lionel Landwerlin94f1ce32020-07-02 17:39:31 +0300632 if (drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifier == drm_format_properties.drmFormatModifier) {
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700633 image_view_state->format_features |=
634 drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifierTilingFeatures;
Lionel Landwerlin94f1ce32020-07-02 17:39:31 +0300635 break;
Spencer Fricke6bba8c72020-04-06 07:41:21 -0700636 }
637 }
638 } else {
639 VkFormatProperties format_properties;
640 DispatchGetPhysicalDeviceFormatProperties(physical_device, image_view_format, &format_properties);
641 image_view_state->format_features = (image_tiling == VK_IMAGE_TILING_LINEAR) ? format_properties.linearTilingFeatures
642 : format_properties.optimalTilingFeatures;
643 }
644
645 imageViewMap.insert(std::make_pair(*pView, std::move(image_view_state)));
locke-lunargd556cc32019-09-17 01:21:23 -0600646}
647
648void ValidationStateTracker::PreCallRecordCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer,
649 uint32_t regionCount, const VkBufferCopy *pRegions) {
650 auto cb_node = GetCBState(commandBuffer);
651 auto src_buffer_state = GetBufferState(srcBuffer);
652 auto dst_buffer_state = GetBufferState(dstBuffer);
653
654 // Update bindings between buffers and cmd buffer
655 AddCommandBufferBindingBuffer(cb_node, src_buffer_state);
656 AddCommandBufferBindingBuffer(cb_node, dst_buffer_state);
657}
658
Jeff Leger178b1e52020-10-05 12:22:23 -0400659void ValidationStateTracker::PreCallRecordCmdCopyBuffer2KHR(VkCommandBuffer commandBuffer,
660 const VkCopyBufferInfo2KHR *pCopyBufferInfos) {
661 auto cb_node = GetCBState(commandBuffer);
662 auto src_buffer_state = GetBufferState(pCopyBufferInfos->srcBuffer);
663 auto dst_buffer_state = GetBufferState(pCopyBufferInfos->dstBuffer);
664
665 // Update bindings between buffers and cmd buffer
666 AddCommandBufferBindingBuffer(cb_node, src_buffer_state);
667 AddCommandBufferBindingBuffer(cb_node, dst_buffer_state);
668}
669
locke-lunargd556cc32019-09-17 01:21:23 -0600670void ValidationStateTracker::PreCallRecordDestroyImageView(VkDevice device, VkImageView imageView,
671 const VkAllocationCallbacks *pAllocator) {
672 IMAGE_VIEW_STATE *image_view_state = GetImageViewState(imageView);
673 if (!image_view_state) return;
674 const VulkanTypedHandle obj_struct(imageView, kVulkanObjectTypeImageView);
675
676 // Any bound cmd buffers are now invalid
677 InvalidateCommandBuffers(image_view_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500678 image_view_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600679 imageViewMap.erase(imageView);
680}
681
682void ValidationStateTracker::PreCallRecordDestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks *pAllocator) {
683 if (!buffer) return;
684 auto buffer_state = GetBufferState(buffer);
685 const VulkanTypedHandle obj_struct(buffer, kVulkanObjectTypeBuffer);
686
687 InvalidateCommandBuffers(buffer_state->cb_bindings, obj_struct);
688 for (auto mem_binding : buffer_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -0700689 RemoveBufferMemoryRange(buffer, mem_binding);
locke-lunargd556cc32019-09-17 01:21:23 -0600690 }
691 ClearMemoryObjectBindings(obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500692 buffer_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600693 bufferMap.erase(buffer_state->buffer);
694}
695
696void ValidationStateTracker::PreCallRecordDestroyBufferView(VkDevice device, VkBufferView bufferView,
697 const VkAllocationCallbacks *pAllocator) {
698 if (!bufferView) return;
699 auto buffer_view_state = GetBufferViewState(bufferView);
700 const VulkanTypedHandle obj_struct(bufferView, kVulkanObjectTypeBufferView);
701
702 // Any bound cmd buffers are now invalid
703 InvalidateCommandBuffers(buffer_view_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500704 buffer_view_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -0600705 bufferViewMap.erase(bufferView);
706}
707
708void ValidationStateTracker::PreCallRecordCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
709 VkDeviceSize size, uint32_t data) {
710 auto cb_node = GetCBState(commandBuffer);
711 auto buffer_state = GetBufferState(dstBuffer);
712 // Update bindings between buffer and cmd buffer
713 AddCommandBufferBindingBuffer(cb_node, buffer_state);
714}
715
716void ValidationStateTracker::PreCallRecordCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage,
717 VkImageLayout srcImageLayout, VkBuffer dstBuffer,
718 uint32_t regionCount, const VkBufferImageCopy *pRegions) {
719 auto cb_node = GetCBState(commandBuffer);
720 auto src_image_state = GetImageState(srcImage);
721 auto dst_buffer_state = GetBufferState(dstBuffer);
722
723 // Update bindings between buffer/image and cmd buffer
724 AddCommandBufferBindingImage(cb_node, src_image_state);
725 AddCommandBufferBindingBuffer(cb_node, dst_buffer_state);
726}
727
Jeff Leger178b1e52020-10-05 12:22:23 -0400728void ValidationStateTracker::PreCallRecordCmdCopyImageToBuffer2KHR(VkCommandBuffer commandBuffer,
729 const VkCopyImageToBufferInfo2KHR *pCopyImageToBufferInfo) {
730 auto cb_node = GetCBState(commandBuffer);
731 auto src_image_state = GetImageState(pCopyImageToBufferInfo->srcImage);
732 auto dst_buffer_state = GetBufferState(pCopyImageToBufferInfo->dstBuffer);
733
734 // Update bindings between buffer/image and cmd buffer
735 AddCommandBufferBindingImage(cb_node, src_image_state);
736 AddCommandBufferBindingBuffer(cb_node, dst_buffer_state);
737}
738
locke-lunargd556cc32019-09-17 01:21:23 -0600739void ValidationStateTracker::PreCallRecordCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
740 VkImageLayout dstImageLayout, uint32_t regionCount,
741 const VkBufferImageCopy *pRegions) {
742 auto cb_node = GetCBState(commandBuffer);
743 auto src_buffer_state = GetBufferState(srcBuffer);
744 auto dst_image_state = GetImageState(dstImage);
745
746 AddCommandBufferBindingBuffer(cb_node, src_buffer_state);
747 AddCommandBufferBindingImage(cb_node, dst_image_state);
748}
749
Jeff Leger178b1e52020-10-05 12:22:23 -0400750void ValidationStateTracker::PreCallRecordCmdCopyBufferToImage2KHR(VkCommandBuffer commandBuffer,
751 const VkCopyBufferToImageInfo2KHR *pCopyBufferToImageInfo) {
752 auto cb_node = GetCBState(commandBuffer);
753 auto src_buffer_state = GetBufferState(pCopyBufferToImageInfo->srcBuffer);
754 auto dst_image_state = GetImageState(pCopyBufferToImageInfo->dstImage);
755
756 AddCommandBufferBindingBuffer(cb_node, src_buffer_state);
757 AddCommandBufferBindingImage(cb_node, dst_image_state);
758}
759
locke-lunargd556cc32019-09-17 01:21:23 -0600760// Get the image viewstate for a given framebuffer attachment
Lionel Landwerlin484d10f2020-04-24 01:34:47 +0300761IMAGE_VIEW_STATE *ValidationStateTracker::GetAttachmentImageViewState(CMD_BUFFER_STATE *cb, FRAMEBUFFER_STATE *framebuffer,
762 uint32_t index) {
763 if (framebuffer->createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) {
764 assert(index < cb->imagelessFramebufferAttachments.size());
765 return cb->imagelessFramebufferAttachments[index];
766 }
locke-lunargd556cc32019-09-17 01:21:23 -0600767 assert(framebuffer && (index < framebuffer->createInfo.attachmentCount));
768 const VkImageView &image_view = framebuffer->createInfo.pAttachments[index];
769 return GetImageViewState(image_view);
770}
771
772// Get the image viewstate for a given framebuffer attachment
Lionel Landwerlin484d10f2020-04-24 01:34:47 +0300773const IMAGE_VIEW_STATE *ValidationStateTracker::GetAttachmentImageViewState(const CMD_BUFFER_STATE *cb,
774 const FRAMEBUFFER_STATE *framebuffer,
locke-lunargd556cc32019-09-17 01:21:23 -0600775 uint32_t index) const {
Lionel Landwerlin484d10f2020-04-24 01:34:47 +0300776 if (framebuffer->createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) {
777 assert(index < cb->imagelessFramebufferAttachments.size());
778 return cb->imagelessFramebufferAttachments[index];
779 }
locke-lunargd556cc32019-09-17 01:21:23 -0600780 assert(framebuffer && (index < framebuffer->createInfo.attachmentCount));
781 const VkImageView &image_view = framebuffer->createInfo.pAttachments[index];
782 return GetImageViewState(image_view);
783}
784
785void ValidationStateTracker::AddAliasingImage(IMAGE_STATE *image_state) {
locke-lunargd556cc32019-09-17 01:21:23 -0600786 std::unordered_set<VkImage> *bound_images = nullptr;
787
locke-lunargb3584732019-10-28 20:18:36 -0600788 if (image_state->bind_swapchain) {
789 auto swapchain_state = GetSwapchainState(image_state->bind_swapchain);
locke-lunargd556cc32019-09-17 01:21:23 -0600790 if (swapchain_state) {
locke-lunargb3584732019-10-28 20:18:36 -0600791 bound_images = &swapchain_state->images[image_state->bind_swapchain_imageIndex].bound_images;
locke-lunargd556cc32019-09-17 01:21:23 -0600792 }
793 } else {
locke-lunargcf04d582019-11-26 00:31:50 -0700794 if (image_state->binding.mem_state) {
795 bound_images = &image_state->binding.mem_state->bound_images;
locke-lunargd556cc32019-09-17 01:21:23 -0600796 }
797 }
798
799 if (bound_images) {
800 for (const auto &handle : *bound_images) {
801 if (handle != image_state->image) {
802 auto is = GetImageState(handle);
803 if (is && is->IsCompatibleAliasing(image_state)) {
804 auto inserted = is->aliasing_images.emplace(image_state->image);
805 if (inserted.second) {
806 image_state->aliasing_images.emplace(handle);
807 }
808 }
809 }
810 }
811 }
812}
813
814void ValidationStateTracker::RemoveAliasingImage(IMAGE_STATE *image_state) {
815 for (const auto &image : image_state->aliasing_images) {
816 auto is = GetImageState(image);
817 if (is) {
818 is->aliasing_images.erase(image_state->image);
819 }
820 }
821 image_state->aliasing_images.clear();
822}
823
824void ValidationStateTracker::RemoveAliasingImages(const std::unordered_set<VkImage> &bound_images) {
825 // This is one way clear. Because the bound_images include cross references, the one way clear loop could clear the whole
826 // reference. It doesn't need two ways clear.
827 for (const auto &handle : bound_images) {
828 auto is = GetImageState(handle);
829 if (is) {
830 is->aliasing_images.clear();
831 }
832 }
833}
834
Jeff Bolz310775c2019-10-09 00:46:33 -0500835const EVENT_STATE *ValidationStateTracker::GetEventState(VkEvent event) const {
836 auto it = eventMap.find(event);
837 if (it == eventMap.end()) {
838 return nullptr;
839 }
840 return &it->second;
841}
842
locke-lunargd556cc32019-09-17 01:21:23 -0600843EVENT_STATE *ValidationStateTracker::GetEventState(VkEvent event) {
844 auto it = eventMap.find(event);
845 if (it == eventMap.end()) {
846 return nullptr;
847 }
848 return &it->second;
849}
850
851const QUEUE_STATE *ValidationStateTracker::GetQueueState(VkQueue queue) const {
852 auto it = queueMap.find(queue);
853 if (it == queueMap.cend()) {
854 return nullptr;
855 }
856 return &it->second;
857}
858
859QUEUE_STATE *ValidationStateTracker::GetQueueState(VkQueue queue) {
860 auto it = queueMap.find(queue);
861 if (it == queueMap.end()) {
862 return nullptr;
863 }
864 return &it->second;
865}
866
867const PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState(VkPhysicalDevice phys) const {
868 auto *phys_dev_map = ((physical_device_map.size() > 0) ? &physical_device_map : &instance_state->physical_device_map);
869 auto it = phys_dev_map->find(phys);
870 if (it == phys_dev_map->end()) {
871 return nullptr;
872 }
873 return &it->second;
874}
875
876PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState(VkPhysicalDevice phys) {
877 auto *phys_dev_map = ((physical_device_map.size() > 0) ? &physical_device_map : &instance_state->physical_device_map);
878 auto it = phys_dev_map->find(phys);
879 if (it == phys_dev_map->end()) {
880 return nullptr;
881 }
882 return &it->second;
883}
884
885PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState() { return physical_device_state; }
886const PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState() const { return physical_device_state; }
887
888// Return ptr to memory binding for given handle of specified type
889template <typename State, typename Result>
890static Result GetObjectMemBindingImpl(State state, const VulkanTypedHandle &typed_handle) {
891 switch (typed_handle.type) {
892 case kVulkanObjectTypeImage:
893 return state->GetImageState(typed_handle.Cast<VkImage>());
894 case kVulkanObjectTypeBuffer:
895 return state->GetBufferState(typed_handle.Cast<VkBuffer>());
896 case kVulkanObjectTypeAccelerationStructureNV:
897 return state->GetAccelerationStructureState(typed_handle.Cast<VkAccelerationStructureNV>());
898 default:
899 break;
900 }
901 return nullptr;
902}
903
904const BINDABLE *ValidationStateTracker::GetObjectMemBinding(const VulkanTypedHandle &typed_handle) const {
905 return GetObjectMemBindingImpl<const ValidationStateTracker *, const BINDABLE *>(this, typed_handle);
906}
907
908BINDABLE *ValidationStateTracker::GetObjectMemBinding(const VulkanTypedHandle &typed_handle) {
909 return GetObjectMemBindingImpl<ValidationStateTracker *, BINDABLE *>(this, typed_handle);
910}
911
912void ValidationStateTracker::AddMemObjInfo(void *object, const VkDeviceMemory mem, const VkMemoryAllocateInfo *pAllocateInfo) {
913 assert(object != NULL);
914
John Zulauf79952712020-04-07 11:25:54 -0600915 auto fake_address = fake_memory.Alloc(pAllocateInfo->allocationSize);
916 memObjMap[mem] = std::make_shared<DEVICE_MEMORY_STATE>(object, mem, pAllocateInfo, fake_address);
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500917 auto mem_info = memObjMap[mem].get();
locke-lunargd556cc32019-09-17 01:21:23 -0600918
919 auto dedicated = lvl_find_in_chain<VkMemoryDedicatedAllocateInfoKHR>(pAllocateInfo->pNext);
920 if (dedicated) {
921 mem_info->is_dedicated = true;
922 mem_info->dedicated_buffer = dedicated->buffer;
923 mem_info->dedicated_image = dedicated->image;
924 }
925 auto export_info = lvl_find_in_chain<VkExportMemoryAllocateInfo>(pAllocateInfo->pNext);
926 if (export_info) {
927 mem_info->is_export = true;
928 mem_info->export_handle_type_flags = export_info->handleTypes;
929 }
sfricke-samsung23068272020-06-21 14:49:51 -0700930
Mark Lobodzinski7bda6ed2020-07-24 09:57:38 -0600931 auto alloc_flags = lvl_find_in_chain<VkMemoryAllocateFlagsInfo>(pAllocateInfo->pNext);
932 if (alloc_flags) {
933 auto dev_mask = alloc_flags->deviceMask;
934 if ((dev_mask != 0) && (dev_mask & (dev_mask - 1))) {
935 mem_info->multi_instance = true;
936 }
937 }
938 auto heap_index = phys_dev_mem_props.memoryTypes[mem_info->alloc_info.memoryTypeIndex].heapIndex;
Tony-LunarGb399fb02020-08-06 14:20:59 -0600939 mem_info->multi_instance |= (((phys_dev_mem_props.memoryHeaps[heap_index].flags & VK_MEMORY_HEAP_MULTI_INSTANCE_BIT) != 0) &&
940 physical_device_count > 1);
Mark Lobodzinski7bda6ed2020-07-24 09:57:38 -0600941
sfricke-samsung23068272020-06-21 14:49:51 -0700942 // Assumes validation already for only a single import operation in the pNext
943#ifdef VK_USE_PLATFORM_WIN32_KHR
944 auto win32_import = lvl_find_in_chain<VkImportMemoryWin32HandleInfoKHR>(pAllocateInfo->pNext);
945 if (win32_import) {
946 mem_info->is_import = true;
947 mem_info->import_handle_type_flags = win32_import->handleType;
948 }
949#endif
950 auto fd_import = lvl_find_in_chain<VkImportMemoryFdInfoKHR>(pAllocateInfo->pNext);
951 if (fd_import) {
952 mem_info->is_import = true;
953 mem_info->import_handle_type_flags = fd_import->handleType;
954 }
955 auto host_pointer_import = lvl_find_in_chain<VkImportMemoryHostPointerInfoEXT>(pAllocateInfo->pNext);
956 if (host_pointer_import) {
957 mem_info->is_import = true;
958 mem_info->import_handle_type_flags = host_pointer_import->handleType;
959 }
960#ifdef VK_USE_PLATFORM_ANDROID_KHR
961 // AHB Import doesn't have handle in the pNext struct
962 // It should be assumed that all imported AHB can only have the same, single handleType
963 auto ahb_import = lvl_find_in_chain<VkImportAndroidHardwareBufferInfoANDROID>(pAllocateInfo->pNext);
964 if ((ahb_import) && (ahb_import->buffer != nullptr)) {
965 mem_info->is_import_ahb = true;
966 mem_info->is_import = true;
967 mem_info->import_handle_type_flags = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
968 }
sfricke-samsung98acb882020-11-03 19:16:10 -0800969#endif // VK_USE_PLATFORM_ANDROID_KHR
sfricke-samsungedce77a2020-07-03 22:35:13 -0700970
971 const VkMemoryType memory_type = phys_dev_mem_props.memoryTypes[pAllocateInfo->memoryTypeIndex];
972 mem_info->unprotected = ((memory_type.propertyFlags & VK_MEMORY_PROPERTY_PROTECTED_BIT) == 0);
locke-lunargd556cc32019-09-17 01:21:23 -0600973}
974
975// Create binding link between given sampler and command buffer node
976void ValidationStateTracker::AddCommandBufferBindingSampler(CMD_BUFFER_STATE *cb_node, SAMPLER_STATE *sampler_state) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -0600977 if (disabled[command_buffer_state]) {
locke-lunargd556cc32019-09-17 01:21:23 -0600978 return;
979 }
Jeff Bolzadbfa852019-10-04 13:53:30 -0500980 AddCommandBufferBinding(sampler_state->cb_bindings,
981 VulkanTypedHandle(sampler_state->sampler, kVulkanObjectTypeSampler, sampler_state), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -0600982}
983
984// Create binding link between given image node and command buffer node
985void ValidationStateTracker::AddCommandBufferBindingImage(CMD_BUFFER_STATE *cb_node, IMAGE_STATE *image_state) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -0600986 if (disabled[command_buffer_state]) {
locke-lunargd556cc32019-09-17 01:21:23 -0600987 return;
988 }
989 // Skip validation if this image was created through WSI
990 if (image_state->create_from_swapchain == VK_NULL_HANDLE) {
991 // First update cb binding for image
Jeff Bolzadbfa852019-10-04 13:53:30 -0500992 if (AddCommandBufferBinding(image_state->cb_bindings,
993 VulkanTypedHandle(image_state->image, kVulkanObjectTypeImage, image_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -0600994 // Now update CB binding in MemObj mini CB list
995 for (auto mem_binding : image_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -0700996 // Now update CBInfo's Mem reference list
997 AddCommandBufferBinding(mem_binding->cb_bindings,
998 VulkanTypedHandle(mem_binding->mem, kVulkanObjectTypeDeviceMemory, mem_binding), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -0600999 }
1000 }
1001 }
1002}
1003
1004// Create binding link between given image view node and its image with command buffer node
1005void ValidationStateTracker::AddCommandBufferBindingImageView(CMD_BUFFER_STATE *cb_node, IMAGE_VIEW_STATE *view_state) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06001006 if (disabled[command_buffer_state]) {
locke-lunargd556cc32019-09-17 01:21:23 -06001007 return;
1008 }
1009 // First add bindings for imageView
Jeff Bolzadbfa852019-10-04 13:53:30 -05001010 if (AddCommandBufferBinding(view_state->cb_bindings,
1011 VulkanTypedHandle(view_state->image_view, kVulkanObjectTypeImageView, view_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -06001012 // Only need to continue if this is a new item
Jeff Bolzadbfa852019-10-04 13:53:30 -05001013 auto image_state = view_state->image_state.get();
locke-lunargd556cc32019-09-17 01:21:23 -06001014 // Add bindings for image within imageView
1015 if (image_state) {
1016 AddCommandBufferBindingImage(cb_node, image_state);
1017 }
1018 }
1019}
1020
1021// Create binding link between given buffer node and command buffer node
1022void ValidationStateTracker::AddCommandBufferBindingBuffer(CMD_BUFFER_STATE *cb_node, BUFFER_STATE *buffer_state) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06001023 if (disabled[command_buffer_state]) {
locke-lunargd556cc32019-09-17 01:21:23 -06001024 return;
1025 }
1026 // First update cb binding for buffer
Jeff Bolzadbfa852019-10-04 13:53:30 -05001027 if (AddCommandBufferBinding(buffer_state->cb_bindings,
1028 VulkanTypedHandle(buffer_state->buffer, kVulkanObjectTypeBuffer, buffer_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -06001029 // Now update CB binding in MemObj mini CB list
1030 for (auto mem_binding : buffer_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -07001031 // Now update CBInfo's Mem reference list
1032 AddCommandBufferBinding(mem_binding->cb_bindings,
1033 VulkanTypedHandle(mem_binding->mem, kVulkanObjectTypeDeviceMemory, mem_binding), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -06001034 }
1035 }
1036}
1037
1038// Create binding link between given buffer view node and its buffer with command buffer node
1039void ValidationStateTracker::AddCommandBufferBindingBufferView(CMD_BUFFER_STATE *cb_node, BUFFER_VIEW_STATE *view_state) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06001040 if (disabled[command_buffer_state]) {
locke-lunargd556cc32019-09-17 01:21:23 -06001041 return;
1042 }
1043 // First add bindings for bufferView
Jeff Bolzadbfa852019-10-04 13:53:30 -05001044 if (AddCommandBufferBinding(view_state->cb_bindings,
1045 VulkanTypedHandle(view_state->buffer_view, kVulkanObjectTypeBufferView, view_state), cb_node)) {
1046 auto buffer_state = view_state->buffer_state.get();
locke-lunargd556cc32019-09-17 01:21:23 -06001047 // Add bindings for buffer within bufferView
1048 if (buffer_state) {
1049 AddCommandBufferBindingBuffer(cb_node, buffer_state);
1050 }
1051 }
1052}
1053
1054// Create binding link between given acceleration structure and command buffer node
1055void ValidationStateTracker::AddCommandBufferBindingAccelerationStructure(CMD_BUFFER_STATE *cb_node,
1056 ACCELERATION_STRUCTURE_STATE *as_state) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06001057 if (disabled[command_buffer_state]) {
locke-lunargd556cc32019-09-17 01:21:23 -06001058 return;
1059 }
Jeff Bolzadbfa852019-10-04 13:53:30 -05001060 if (AddCommandBufferBinding(
1061 as_state->cb_bindings,
1062 VulkanTypedHandle(as_state->acceleration_structure, kVulkanObjectTypeAccelerationStructureNV, as_state), cb_node)) {
locke-lunargd556cc32019-09-17 01:21:23 -06001063 // Now update CB binding in MemObj mini CB list
1064 for (auto mem_binding : as_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -07001065 // Now update CBInfo's Mem reference list
1066 AddCommandBufferBinding(mem_binding->cb_bindings,
1067 VulkanTypedHandle(mem_binding->mem, kVulkanObjectTypeDeviceMemory, mem_binding), cb_node);
locke-lunargd556cc32019-09-17 01:21:23 -06001068 }
1069 }
1070}
1071
locke-lunargd556cc32019-09-17 01:21:23 -06001072// Clear a single object binding from given memory object
locke-lunarg5f59e782019-12-19 10:32:23 -07001073void ValidationStateTracker::ClearMemoryObjectBinding(const VulkanTypedHandle &typed_handle, DEVICE_MEMORY_STATE *mem_info) {
locke-lunargd556cc32019-09-17 01:21:23 -06001074 // This obj is bound to a memory object. Remove the reference to this object in that memory object's list
1075 if (mem_info) {
1076 mem_info->obj_bindings.erase(typed_handle);
1077 }
1078}
1079
1080// ClearMemoryObjectBindings clears the binding of objects to memory
1081// For the given object it pulls the memory bindings and makes sure that the bindings
1082// no longer refer to the object being cleared. This occurs when objects are destroyed.
1083void ValidationStateTracker::ClearMemoryObjectBindings(const VulkanTypedHandle &typed_handle) {
1084 BINDABLE *mem_binding = GetObjectMemBinding(typed_handle);
1085 if (mem_binding) {
1086 if (!mem_binding->sparse) {
locke-lunarg5f59e782019-12-19 10:32:23 -07001087 ClearMemoryObjectBinding(typed_handle, mem_binding->binding.mem_state.get());
locke-lunargd556cc32019-09-17 01:21:23 -06001088 } else { // Sparse, clear all bindings
1089 for (auto &sparse_mem_binding : mem_binding->sparse_bindings) {
locke-lunarg5f59e782019-12-19 10:32:23 -07001090 ClearMemoryObjectBinding(typed_handle, sparse_mem_binding.mem_state.get());
locke-lunargd556cc32019-09-17 01:21:23 -06001091 }
1092 }
1093 }
1094}
1095
1096// SetMemBinding is used to establish immutable, non-sparse binding between a single image/buffer object and memory object.
1097// Corresponding valid usage checks are in ValidateSetMemBinding().
1098void ValidationStateTracker::SetMemBinding(VkDeviceMemory mem, BINDABLE *mem_binding, VkDeviceSize memory_offset,
1099 const VulkanTypedHandle &typed_handle) {
1100 assert(mem_binding);
locke-lunargd556cc32019-09-17 01:21:23 -06001101
1102 if (mem != VK_NULL_HANDLE) {
locke-lunargcf04d582019-11-26 00:31:50 -07001103 mem_binding->binding.mem_state = GetShared<DEVICE_MEMORY_STATE>(mem);
1104 if (mem_binding->binding.mem_state) {
locke-lunarg5f59e782019-12-19 10:32:23 -07001105 mem_binding->binding.offset = memory_offset;
1106 mem_binding->binding.size = mem_binding->requirements.size;
locke-lunargcf04d582019-11-26 00:31:50 -07001107 mem_binding->binding.mem_state->obj_bindings.insert(typed_handle);
locke-lunargd556cc32019-09-17 01:21:23 -06001108 // For image objects, make sure default memory state is correctly set
1109 // TODO : What's the best/correct way to handle this?
1110 if (kVulkanObjectTypeImage == typed_handle.type) {
1111 auto const image_state = reinterpret_cast<const IMAGE_STATE *>(mem_binding);
1112 if (image_state) {
1113 VkImageCreateInfo ici = image_state->createInfo;
1114 if (ici.usage & (VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT)) {
1115 // TODO:: More memory state transition stuff.
1116 }
1117 }
1118 }
locke-lunargcf04d582019-11-26 00:31:50 -07001119 mem_binding->UpdateBoundMemorySet(); // force recreation of cached set
locke-lunargd556cc32019-09-17 01:21:23 -06001120 }
1121 }
1122}
1123
1124// For NULL mem case, clear any previous binding Else...
1125// Make sure given object is in its object map
1126// IF a previous binding existed, update binding
1127// Add reference from objectInfo to memoryInfo
1128// Add reference off of object's binding info
1129// Return VK_TRUE if addition is successful, VK_FALSE otherwise
locke-lunargcf04d582019-11-26 00:31:50 -07001130bool ValidationStateTracker::SetSparseMemBinding(const VkDeviceMemory mem, const VkDeviceSize mem_offset,
1131 const VkDeviceSize mem_size, const VulkanTypedHandle &typed_handle) {
locke-lunargd556cc32019-09-17 01:21:23 -06001132 bool skip = VK_FALSE;
1133 // Handle NULL case separately, just clear previous binding & decrement reference
locke-lunargcf04d582019-11-26 00:31:50 -07001134 if (mem == VK_NULL_HANDLE) {
locke-lunargd556cc32019-09-17 01:21:23 -06001135 // TODO : This should cause the range of the resource to be unbound according to spec
1136 } else {
1137 BINDABLE *mem_binding = GetObjectMemBinding(typed_handle);
1138 assert(mem_binding);
1139 if (mem_binding) { // Invalid handles are reported by object tracker, but Get returns NULL for them, so avoid SEGV here
1140 assert(mem_binding->sparse);
locke-lunargcf04d582019-11-26 00:31:50 -07001141 MEM_BINDING binding = {GetShared<DEVICE_MEMORY_STATE>(mem), mem_offset, mem_size};
1142 if (binding.mem_state) {
1143 binding.mem_state->obj_bindings.insert(typed_handle);
locke-lunargd556cc32019-09-17 01:21:23 -06001144 // Need to set mem binding for this object
1145 mem_binding->sparse_bindings.insert(binding);
1146 mem_binding->UpdateBoundMemorySet();
1147 }
1148 }
1149 }
1150 return skip;
1151}
1152
locke-lunarg540b2252020-08-03 13:23:36 -06001153void ValidationStateTracker::UpdateDrawState(CMD_BUFFER_STATE *cb_state, CMD_TYPE cmd_type, const VkPipelineBindPoint bind_point,
1154 const char *function) {
locke-lunargd556cc32019-09-17 01:21:23 -06001155 auto &state = cb_state->lastBound[bind_point];
1156 PIPELINE_STATE *pPipe = state.pipeline_state;
1157 if (VK_NULL_HANDLE != state.pipeline_layout) {
1158 for (const auto &set_binding_pair : pPipe->active_slots) {
1159 uint32_t setIndex = set_binding_pair.first;
1160 // Pull the set node
1161 cvdescriptorset::DescriptorSet *descriptor_set = state.per_set[setIndex].bound_descriptor_set;
locke-lunargd556cc32019-09-17 01:21:23 -06001162
Tony-LunarG77822802020-05-28 16:35:46 -06001163 // For the "bindless" style resource usage with many descriptors, need to optimize command <-> descriptor binding
locke-lunargd556cc32019-09-17 01:21:23 -06001164
Tony-LunarG77822802020-05-28 16:35:46 -06001165 // TODO: If recreating the reduced_map here shows up in profilinging, need to find a way of sharing with the
1166 // Validate pass. Though in the case of "many" descriptors, typically the descriptor count >> binding count
1167 cvdescriptorset::PrefilterBindRequestMap reduced_map(*descriptor_set, set_binding_pair.second);
1168 const auto &binding_req_map = reduced_map.FilteredMap(*cb_state, *pPipe);
1169
1170 if (reduced_map.IsManyDescriptors()) {
1171 // Only update validate binding tags if we meet the "many" criteria in the Prefilter class
1172 descriptor_set->UpdateValidationCache(*cb_state, *pPipe, binding_req_map);
1173 }
1174
1175 // We can skip updating the state if "nothing" has changed since the last validation.
1176 // See CoreChecks::ValidateCmdBufDrawState for more details.
1177 bool descriptor_set_changed =
1178 !reduced_map.IsManyDescriptors() ||
1179 // Update if descriptor set (or contents) has changed
1180 state.per_set[setIndex].validated_set != descriptor_set ||
1181 state.per_set[setIndex].validated_set_change_count != descriptor_set->GetChangeCount() ||
1182 (!disabled[image_layout_validation] &&
1183 state.per_set[setIndex].validated_set_image_layout_change_count != cb_state->image_layout_change_count);
1184 bool need_update = descriptor_set_changed ||
1185 // Update if previous bindingReqMap doesn't include new bindingReqMap
1186 !std::includes(state.per_set[setIndex].validated_set_binding_req_map.begin(),
1187 state.per_set[setIndex].validated_set_binding_req_map.end(), binding_req_map.begin(),
1188 binding_req_map.end());
1189
1190 if (need_update) {
1191 // Bind this set and its active descriptor resources to the command buffer
1192 if (!descriptor_set_changed && reduced_map.IsManyDescriptors()) {
1193 // Only record the bindings that haven't already been recorded
1194 BindingReqMap delta_reqs;
1195 std::set_difference(binding_req_map.begin(), binding_req_map.end(),
1196 state.per_set[setIndex].validated_set_binding_req_map.begin(),
1197 state.per_set[setIndex].validated_set_binding_req_map.end(),
1198 std::inserter(delta_reqs, delta_reqs.begin()));
locke-lunarg540b2252020-08-03 13:23:36 -06001199 descriptor_set->UpdateDrawState(this, cb_state, cmd_type, pPipe, delta_reqs, function);
Tony-LunarG77822802020-05-28 16:35:46 -06001200 } else {
locke-lunarg540b2252020-08-03 13:23:36 -06001201 descriptor_set->UpdateDrawState(this, cb_state, cmd_type, pPipe, binding_req_map, function);
locke-lunargd556cc32019-09-17 01:21:23 -06001202 }
1203
Tony-LunarG77822802020-05-28 16:35:46 -06001204 state.per_set[setIndex].validated_set = descriptor_set;
1205 state.per_set[setIndex].validated_set_change_count = descriptor_set->GetChangeCount();
1206 state.per_set[setIndex].validated_set_image_layout_change_count = cb_state->image_layout_change_count;
1207 if (reduced_map.IsManyDescriptors()) {
1208 // Check whether old == new before assigning, the equality check is much cheaper than
1209 // freeing and reallocating the map.
1210 if (state.per_set[setIndex].validated_set_binding_req_map != set_binding_pair.second) {
1211 state.per_set[setIndex].validated_set_binding_req_map = set_binding_pair.second;
Jeff Bolz56308942019-10-06 22:05:23 -05001212 }
Tony-LunarG77822802020-05-28 16:35:46 -06001213 } else {
1214 state.per_set[setIndex].validated_set_binding_req_map = BindingReqMap();
locke-lunargd556cc32019-09-17 01:21:23 -06001215 }
1216 }
1217 }
1218 }
1219 if (!pPipe->vertex_binding_descriptions_.empty()) {
1220 cb_state->vertex_buffer_used = true;
1221 }
1222}
1223
1224// Remove set from setMap and delete the set
1225void ValidationStateTracker::FreeDescriptorSet(cvdescriptorset::DescriptorSet *descriptor_set) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05001226 descriptor_set->destroyed = true;
Jeff Bolzadbfa852019-10-04 13:53:30 -05001227 const VulkanTypedHandle obj_struct(descriptor_set->GetSet(), kVulkanObjectTypeDescriptorSet);
Jeff Bolz41a1ced2019-10-11 11:40:49 -05001228 // Any bound cmd buffers are now invalid
Jeff Bolzadbfa852019-10-04 13:53:30 -05001229 InvalidateCommandBuffers(descriptor_set->cb_bindings, obj_struct);
Jeff Bolz41a1ced2019-10-11 11:40:49 -05001230
locke-lunargd556cc32019-09-17 01:21:23 -06001231 setMap.erase(descriptor_set->GetSet());
1232}
1233
1234// Free all DS Pools including their Sets & related sub-structs
1235// NOTE : Calls to this function should be wrapped in mutex
1236void ValidationStateTracker::DeleteDescriptorSetPools() {
1237 for (auto ii = descriptorPoolMap.begin(); ii != descriptorPoolMap.end();) {
1238 // Remove this pools' sets from setMap and delete them
1239 for (auto ds : ii->second->sets) {
1240 FreeDescriptorSet(ds);
1241 }
1242 ii->second->sets.clear();
1243 ii = descriptorPoolMap.erase(ii);
1244 }
1245}
1246
1247// For given object struct return a ptr of BASE_NODE type for its wrapping struct
1248BASE_NODE *ValidationStateTracker::GetStateStructPtrFromObject(const VulkanTypedHandle &object_struct) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05001249 if (object_struct.node) {
1250#ifdef _DEBUG
1251 // assert that lookup would find the same object
1252 VulkanTypedHandle other = object_struct;
1253 other.node = nullptr;
1254 assert(object_struct.node == GetStateStructPtrFromObject(other));
1255#endif
1256 return object_struct.node;
1257 }
locke-lunargd556cc32019-09-17 01:21:23 -06001258 BASE_NODE *base_ptr = nullptr;
1259 switch (object_struct.type) {
1260 case kVulkanObjectTypeDescriptorSet: {
1261 base_ptr = GetSetNode(object_struct.Cast<VkDescriptorSet>());
1262 break;
1263 }
1264 case kVulkanObjectTypeSampler: {
1265 base_ptr = GetSamplerState(object_struct.Cast<VkSampler>());
1266 break;
1267 }
1268 case kVulkanObjectTypeQueryPool: {
1269 base_ptr = GetQueryPoolState(object_struct.Cast<VkQueryPool>());
1270 break;
1271 }
1272 case kVulkanObjectTypePipeline: {
1273 base_ptr = GetPipelineState(object_struct.Cast<VkPipeline>());
1274 break;
1275 }
1276 case kVulkanObjectTypeBuffer: {
1277 base_ptr = GetBufferState(object_struct.Cast<VkBuffer>());
1278 break;
1279 }
1280 case kVulkanObjectTypeBufferView: {
1281 base_ptr = GetBufferViewState(object_struct.Cast<VkBufferView>());
1282 break;
1283 }
1284 case kVulkanObjectTypeImage: {
1285 base_ptr = GetImageState(object_struct.Cast<VkImage>());
1286 break;
1287 }
1288 case kVulkanObjectTypeImageView: {
1289 base_ptr = GetImageViewState(object_struct.Cast<VkImageView>());
1290 break;
1291 }
1292 case kVulkanObjectTypeEvent: {
1293 base_ptr = GetEventState(object_struct.Cast<VkEvent>());
1294 break;
1295 }
1296 case kVulkanObjectTypeDescriptorPool: {
1297 base_ptr = GetDescriptorPoolState(object_struct.Cast<VkDescriptorPool>());
1298 break;
1299 }
1300 case kVulkanObjectTypeCommandPool: {
1301 base_ptr = GetCommandPoolState(object_struct.Cast<VkCommandPool>());
1302 break;
1303 }
1304 case kVulkanObjectTypeFramebuffer: {
1305 base_ptr = GetFramebufferState(object_struct.Cast<VkFramebuffer>());
1306 break;
1307 }
1308 case kVulkanObjectTypeRenderPass: {
1309 base_ptr = GetRenderPassState(object_struct.Cast<VkRenderPass>());
1310 break;
1311 }
1312 case kVulkanObjectTypeDeviceMemory: {
1313 base_ptr = GetDevMemState(object_struct.Cast<VkDeviceMemory>());
1314 break;
1315 }
1316 case kVulkanObjectTypeAccelerationStructureNV: {
1317 base_ptr = GetAccelerationStructureState(object_struct.Cast<VkAccelerationStructureNV>());
1318 break;
1319 }
Jeff Bolzadbfa852019-10-04 13:53:30 -05001320 case kVulkanObjectTypeUnknown:
1321 // This can happen if an element of the object_bindings vector has been
1322 // zeroed out, after an object is destroyed.
1323 break;
locke-lunargd556cc32019-09-17 01:21:23 -06001324 default:
1325 // TODO : Any other objects to be handled here?
1326 assert(0);
1327 break;
1328 }
1329 return base_ptr;
1330}
1331
sfricke-samsungbf1a2ed2020-06-14 23:31:00 -07001332// Gets union of all features defined by Potential Format Features
1333// except, does not handle the external format case for AHB as that only can be used for sampled images
sfricke-samsungbe3584f2020-04-22 14:58:06 -07001334VkFormatFeatureFlags ValidationStateTracker::GetPotentialFormatFeatures(VkFormat format) const {
1335 VkFormatFeatureFlags format_features = 0;
1336
1337 if (format != VK_FORMAT_UNDEFINED) {
1338 VkFormatProperties format_properties;
1339 DispatchGetPhysicalDeviceFormatProperties(physical_device, format, &format_properties);
1340 format_features |= format_properties.linearTilingFeatures;
1341 format_features |= format_properties.optimalTilingFeatures;
1342 if (device_extensions.vk_ext_image_drm_format_modifier) {
1343 // VK_KHR_get_physical_device_properties2 is required in this case
1344 VkFormatProperties2 format_properties_2 = {VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2};
1345 VkDrmFormatModifierPropertiesListEXT drm_properties_list = {VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT,
1346 nullptr};
1347 format_properties_2.pNext = (void *)&drm_properties_list;
1348 DispatchGetPhysicalDeviceFormatProperties2(physical_device, format, &format_properties_2);
1349 for (uint32_t i = 0; i < drm_properties_list.drmFormatModifierCount; i++) {
1350 format_features |= drm_properties_list.pDrmFormatModifierProperties[i].drmFormatModifierTilingFeatures;
1351 }
1352 }
1353 }
1354
1355 return format_features;
1356}
1357
locke-lunargd556cc32019-09-17 01:21:23 -06001358// Tie the VulkanTypedHandle to the cmd buffer which includes:
1359// Add object_binding to cmd buffer
1360// Add cb_binding to object
Jeff Bolzadbfa852019-10-04 13:53:30 -05001361bool ValidationStateTracker::AddCommandBufferBinding(small_unordered_map<CMD_BUFFER_STATE *, int, 8> &cb_bindings,
locke-lunargd556cc32019-09-17 01:21:23 -06001362 const VulkanTypedHandle &obj, CMD_BUFFER_STATE *cb_node) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06001363 if (disabled[command_buffer_state]) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05001364 return false;
locke-lunargd556cc32019-09-17 01:21:23 -06001365 }
Jeff Bolzadbfa852019-10-04 13:53:30 -05001366 // Insert the cb_binding with a default 'index' of -1. Then push the obj into the object_bindings
1367 // vector, and update cb_bindings[cb_node] with the index of that element of the vector.
1368 auto inserted = cb_bindings.insert({cb_node, -1});
1369 if (inserted.second) {
1370 cb_node->object_bindings.push_back(obj);
1371 inserted.first->second = (int)cb_node->object_bindings.size() - 1;
1372 return true;
1373 }
1374 return false;
locke-lunargd556cc32019-09-17 01:21:23 -06001375}
1376
1377// For a given object, if cb_node is in that objects cb_bindings, remove cb_node
1378void ValidationStateTracker::RemoveCommandBufferBinding(VulkanTypedHandle const &object, CMD_BUFFER_STATE *cb_node) {
1379 BASE_NODE *base_obj = GetStateStructPtrFromObject(object);
1380 if (base_obj) base_obj->cb_bindings.erase(cb_node);
1381}
1382
1383// Reset the command buffer state
1384// Maintain the createInfo and set state to CB_NEW, but clear all other state
1385void ValidationStateTracker::ResetCommandBufferState(const VkCommandBuffer cb) {
1386 CMD_BUFFER_STATE *pCB = GetCBState(cb);
1387 if (pCB) {
1388 pCB->in_use.store(0);
1389 // Reset CB state (note that createInfo is not cleared)
1390 pCB->commandBuffer = cb;
1391 memset(&pCB->beginInfo, 0, sizeof(VkCommandBufferBeginInfo));
1392 memset(&pCB->inheritanceInfo, 0, sizeof(VkCommandBufferInheritanceInfo));
1393 pCB->hasDrawCmd = false;
1394 pCB->hasTraceRaysCmd = false;
1395 pCB->hasBuildAccelerationStructureCmd = false;
1396 pCB->hasDispatchCmd = false;
1397 pCB->state = CB_NEW;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001398 pCB->commandCount = 0;
locke-lunargd556cc32019-09-17 01:21:23 -06001399 pCB->submitCount = 0;
1400 pCB->image_layout_change_count = 1; // Start at 1. 0 is insert value for validation cache versions, s.t. new == dirty
1401 pCB->status = 0;
1402 pCB->static_status = 0;
1403 pCB->viewportMask = 0;
Piers Daniell39842ee2020-07-10 16:42:33 -06001404 pCB->viewportWithCountMask = 0;
locke-lunargd556cc32019-09-17 01:21:23 -06001405 pCB->scissorMask = 0;
Piers Daniell39842ee2020-07-10 16:42:33 -06001406 pCB->scissorWithCountMask = 0;
1407 pCB->primitiveTopology = VK_PRIMITIVE_TOPOLOGY_MAX_ENUM;
locke-lunargd556cc32019-09-17 01:21:23 -06001408
1409 for (auto &item : pCB->lastBound) {
1410 item.second.reset();
1411 }
1412
Tony-LunarG61e7c0c2020-03-03 16:09:11 -07001413 pCB->activeRenderPassBeginInfo = safe_VkRenderPassBeginInfo();
locke-lunargd556cc32019-09-17 01:21:23 -06001414 pCB->activeRenderPass = nullptr;
1415 pCB->activeSubpassContents = VK_SUBPASS_CONTENTS_INLINE;
1416 pCB->activeSubpass = 0;
1417 pCB->broken_bindings.clear();
1418 pCB->waitedEvents.clear();
1419 pCB->events.clear();
1420 pCB->writeEventsBeforeWait.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06001421 pCB->activeQueries.clear();
1422 pCB->startedQueries.clear();
1423 pCB->image_layout_map.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06001424 pCB->current_vertex_buffer_binding_info.vertex_buffer_bindings.clear();
1425 pCB->vertex_buffer_used = false;
1426 pCB->primaryCommandBuffer = VK_NULL_HANDLE;
1427 // If secondary, invalidate any primary command buffer that may call us.
1428 if (pCB->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05001429 InvalidateLinkedCommandBuffers(pCB->linkedCommandBuffers, VulkanTypedHandle(cb, kVulkanObjectTypeCommandBuffer));
locke-lunargd556cc32019-09-17 01:21:23 -06001430 }
1431
1432 // Remove reverse command buffer links.
1433 for (auto pSubCB : pCB->linkedCommandBuffers) {
1434 pSubCB->linkedCommandBuffers.erase(pCB);
1435 }
1436 pCB->linkedCommandBuffers.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06001437 pCB->queue_submit_functions.clear();
1438 pCB->cmd_execute_commands_functions.clear();
1439 pCB->eventUpdates.clear();
1440 pCB->queryUpdates.clear();
1441
1442 // Remove object bindings
1443 for (const auto &obj : pCB->object_bindings) {
1444 RemoveCommandBufferBinding(obj, pCB);
1445 }
1446 pCB->object_bindings.clear();
1447 // Remove this cmdBuffer's reference from each FrameBuffer's CB ref list
1448 for (auto framebuffer : pCB->framebuffers) {
locke-lunargaecf2152020-05-12 17:15:41 -06001449 framebuffer->cb_bindings.erase(pCB);
locke-lunargd556cc32019-09-17 01:21:23 -06001450 }
1451 pCB->framebuffers.clear();
1452 pCB->activeFramebuffer = VK_NULL_HANDLE;
1453 memset(&pCB->index_buffer_binding, 0, sizeof(pCB->index_buffer_binding));
1454
1455 pCB->qfo_transfer_image_barriers.Reset();
1456 pCB->qfo_transfer_buffer_barriers.Reset();
1457
1458 // Clean up the label data
1459 ResetCmdDebugUtilsLabel(report_data, pCB->commandBuffer);
1460 pCB->debug_label.Reset();
locke-gb3ce08f2019-09-30 12:30:56 -06001461 pCB->validate_descriptorsets_in_queuesubmit.clear();
Attilio Provenzano02859b22020-02-27 14:17:28 +00001462
1463 // Best practices info
1464 pCB->small_indexed_draw_call_count = 0;
Jeremy Hayes9bda85a2020-05-21 16:36:17 -06001465
1466 pCB->transform_feedback_active = false;
locke-lunargd556cc32019-09-17 01:21:23 -06001467 }
1468 if (command_buffer_reset_callback) {
1469 (*command_buffer_reset_callback)(cb);
1470 }
1471}
1472
1473void ValidationStateTracker::PostCallRecordCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo,
1474 const VkAllocationCallbacks *pAllocator, VkDevice *pDevice,
1475 VkResult result) {
1476 if (VK_SUCCESS != result) return;
1477
1478 const VkPhysicalDeviceFeatures *enabled_features_found = pCreateInfo->pEnabledFeatures;
1479 if (nullptr == enabled_features_found) {
1480 const auto *features2 = lvl_find_in_chain<VkPhysicalDeviceFeatures2KHR>(pCreateInfo->pNext);
1481 if (features2) {
1482 enabled_features_found = &(features2->features);
1483 }
1484 }
1485
1486 ValidationObject *device_object = GetLayerDataPtr(get_dispatch_key(*pDevice), layer_data_map);
1487 ValidationObject *validation_data = GetValidationObject(device_object->object_dispatch, this->container_type);
1488 ValidationStateTracker *state_tracker = static_cast<ValidationStateTracker *>(validation_data);
1489
1490 if (nullptr == enabled_features_found) {
1491 state_tracker->enabled_features.core = {};
1492 } else {
1493 state_tracker->enabled_features.core = *enabled_features_found;
1494 }
1495
1496 // Make sure that queue_family_properties are obtained for this device's physical_device, even if the app has not
1497 // previously set them through an explicit API call.
1498 uint32_t count;
1499 auto pd_state = GetPhysicalDeviceState(gpu);
1500 DispatchGetPhysicalDeviceQueueFamilyProperties(gpu, &count, nullptr);
1501 pd_state->queue_family_properties.resize(std::max(static_cast<uint32_t>(pd_state->queue_family_properties.size()), count));
1502 DispatchGetPhysicalDeviceQueueFamilyProperties(gpu, &count, &pd_state->queue_family_properties[0]);
1503 // Save local link to this device's physical device state
1504 state_tracker->physical_device_state = pd_state;
1505
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001506 const auto *vulkan_12_features = lvl_find_in_chain<VkPhysicalDeviceVulkan12Features>(pCreateInfo->pNext);
1507 if (vulkan_12_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001508 state_tracker->enabled_features.core12 = *vulkan_12_features;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001509 } else {
sfricke-samsung27c70722020-05-02 08:42:39 -07001510 // Set Extension Feature Aliases to false as there is no struct to check
1511 state_tracker->enabled_features.core12.drawIndirectCount = VK_FALSE;
1512 state_tracker->enabled_features.core12.samplerMirrorClampToEdge = VK_FALSE;
1513 state_tracker->enabled_features.core12.descriptorIndexing = VK_FALSE;
1514 state_tracker->enabled_features.core12.samplerFilterMinmax = VK_FALSE;
1515 state_tracker->enabled_features.core12.shaderOutputLayer = VK_FALSE;
1516 state_tracker->enabled_features.core12.shaderOutputViewportIndex = VK_FALSE;
1517
1518 // These structs are only allowed in pNext chain if there is no VkPhysicalDeviceVulkan12Features
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001519
1520 const auto *eight_bit_storage_features = lvl_find_in_chain<VkPhysicalDevice8BitStorageFeatures>(pCreateInfo->pNext);
1521 if (eight_bit_storage_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001522 state_tracker->enabled_features.core12.storageBuffer8BitAccess = eight_bit_storage_features->storageBuffer8BitAccess;
1523 state_tracker->enabled_features.core12.uniformAndStorageBuffer8BitAccess =
1524 eight_bit_storage_features->uniformAndStorageBuffer8BitAccess;
1525 state_tracker->enabled_features.core12.storagePushConstant8 = eight_bit_storage_features->storagePushConstant8;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001526 }
1527
1528 const auto *float16_int8_features = lvl_find_in_chain<VkPhysicalDeviceShaderFloat16Int8Features>(pCreateInfo->pNext);
1529 if (float16_int8_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001530 state_tracker->enabled_features.core12.shaderFloat16 = float16_int8_features->shaderFloat16;
1531 state_tracker->enabled_features.core12.shaderInt8 = float16_int8_features->shaderInt8;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001532 }
1533
1534 const auto *descriptor_indexing_features =
1535 lvl_find_in_chain<VkPhysicalDeviceDescriptorIndexingFeatures>(pCreateInfo->pNext);
1536 if (descriptor_indexing_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001537 state_tracker->enabled_features.core12.shaderInputAttachmentArrayDynamicIndexing =
1538 descriptor_indexing_features->shaderInputAttachmentArrayDynamicIndexing;
1539 state_tracker->enabled_features.core12.shaderUniformTexelBufferArrayDynamicIndexing =
1540 descriptor_indexing_features->shaderUniformTexelBufferArrayDynamicIndexing;
1541 state_tracker->enabled_features.core12.shaderStorageTexelBufferArrayDynamicIndexing =
1542 descriptor_indexing_features->shaderStorageTexelBufferArrayDynamicIndexing;
1543 state_tracker->enabled_features.core12.shaderUniformBufferArrayNonUniformIndexing =
1544 descriptor_indexing_features->shaderUniformBufferArrayNonUniformIndexing;
1545 state_tracker->enabled_features.core12.shaderSampledImageArrayNonUniformIndexing =
1546 descriptor_indexing_features->shaderSampledImageArrayNonUniformIndexing;
1547 state_tracker->enabled_features.core12.shaderStorageBufferArrayNonUniformIndexing =
1548 descriptor_indexing_features->shaderStorageBufferArrayNonUniformIndexing;
1549 state_tracker->enabled_features.core12.shaderStorageImageArrayNonUniformIndexing =
1550 descriptor_indexing_features->shaderStorageImageArrayNonUniformIndexing;
1551 state_tracker->enabled_features.core12.shaderInputAttachmentArrayNonUniformIndexing =
1552 descriptor_indexing_features->shaderInputAttachmentArrayNonUniformIndexing;
1553 state_tracker->enabled_features.core12.shaderUniformTexelBufferArrayNonUniformIndexing =
1554 descriptor_indexing_features->shaderUniformTexelBufferArrayNonUniformIndexing;
1555 state_tracker->enabled_features.core12.shaderStorageTexelBufferArrayNonUniformIndexing =
1556 descriptor_indexing_features->shaderStorageTexelBufferArrayNonUniformIndexing;
1557 state_tracker->enabled_features.core12.descriptorBindingUniformBufferUpdateAfterBind =
1558 descriptor_indexing_features->descriptorBindingUniformBufferUpdateAfterBind;
1559 state_tracker->enabled_features.core12.descriptorBindingSampledImageUpdateAfterBind =
1560 descriptor_indexing_features->descriptorBindingSampledImageUpdateAfterBind;
1561 state_tracker->enabled_features.core12.descriptorBindingStorageImageUpdateAfterBind =
1562 descriptor_indexing_features->descriptorBindingStorageImageUpdateAfterBind;
1563 state_tracker->enabled_features.core12.descriptorBindingStorageBufferUpdateAfterBind =
1564 descriptor_indexing_features->descriptorBindingStorageBufferUpdateAfterBind;
1565 state_tracker->enabled_features.core12.descriptorBindingUniformTexelBufferUpdateAfterBind =
1566 descriptor_indexing_features->descriptorBindingUniformTexelBufferUpdateAfterBind;
1567 state_tracker->enabled_features.core12.descriptorBindingStorageTexelBufferUpdateAfterBind =
1568 descriptor_indexing_features->descriptorBindingStorageTexelBufferUpdateAfterBind;
1569 state_tracker->enabled_features.core12.descriptorBindingUpdateUnusedWhilePending =
1570 descriptor_indexing_features->descriptorBindingUpdateUnusedWhilePending;
1571 state_tracker->enabled_features.core12.descriptorBindingPartiallyBound =
1572 descriptor_indexing_features->descriptorBindingPartiallyBound;
1573 state_tracker->enabled_features.core12.descriptorBindingVariableDescriptorCount =
1574 descriptor_indexing_features->descriptorBindingVariableDescriptorCount;
1575 state_tracker->enabled_features.core12.runtimeDescriptorArray = descriptor_indexing_features->runtimeDescriptorArray;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001576 }
1577
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001578 const auto *scalar_block_layout_features = lvl_find_in_chain<VkPhysicalDeviceScalarBlockLayoutFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001579 if (scalar_block_layout_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001580 state_tracker->enabled_features.core12.scalarBlockLayout = scalar_block_layout_features->scalarBlockLayout;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001581 }
1582
1583 const auto *imageless_framebuffer_features =
1584 lvl_find_in_chain<VkPhysicalDeviceImagelessFramebufferFeatures>(pCreateInfo->pNext);
1585 if (imageless_framebuffer_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001586 state_tracker->enabled_features.core12.imagelessFramebuffer = imageless_framebuffer_features->imagelessFramebuffer;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001587 }
1588
1589 const auto *uniform_buffer_standard_layout_features =
1590 lvl_find_in_chain<VkPhysicalDeviceUniformBufferStandardLayoutFeatures>(pCreateInfo->pNext);
1591 if (uniform_buffer_standard_layout_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001592 state_tracker->enabled_features.core12.uniformBufferStandardLayout =
1593 uniform_buffer_standard_layout_features->uniformBufferStandardLayout;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001594 }
1595
1596 const auto *subgroup_extended_types_features =
1597 lvl_find_in_chain<VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures>(pCreateInfo->pNext);
1598 if (subgroup_extended_types_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001599 state_tracker->enabled_features.core12.shaderSubgroupExtendedTypes =
1600 subgroup_extended_types_features->shaderSubgroupExtendedTypes;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001601 }
1602
1603 const auto *separate_depth_stencil_layouts_features =
1604 lvl_find_in_chain<VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures>(pCreateInfo->pNext);
1605 if (separate_depth_stencil_layouts_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001606 state_tracker->enabled_features.core12.separateDepthStencilLayouts =
1607 separate_depth_stencil_layouts_features->separateDepthStencilLayouts;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001608 }
1609
1610 const auto *host_query_reset_features = lvl_find_in_chain<VkPhysicalDeviceHostQueryResetFeatures>(pCreateInfo->pNext);
1611 if (host_query_reset_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001612 state_tracker->enabled_features.core12.hostQueryReset = host_query_reset_features->hostQueryReset;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001613 }
1614
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001615 const auto *timeline_semaphore_features = lvl_find_in_chain<VkPhysicalDeviceTimelineSemaphoreFeatures>(pCreateInfo->pNext);
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001616 if (timeline_semaphore_features) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001617 state_tracker->enabled_features.core12.timelineSemaphore = timeline_semaphore_features->timelineSemaphore;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001618 }
1619
1620 const auto *buffer_device_address = lvl_find_in_chain<VkPhysicalDeviceBufferDeviceAddressFeatures>(pCreateInfo->pNext);
1621 if (buffer_device_address) {
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001622 state_tracker->enabled_features.core12.bufferDeviceAddress = buffer_device_address->bufferDeviceAddress;
1623 state_tracker->enabled_features.core12.bufferDeviceAddressCaptureReplay =
1624 buffer_device_address->bufferDeviceAddressCaptureReplay;
1625 state_tracker->enabled_features.core12.bufferDeviceAddressMultiDevice =
1626 buffer_device_address->bufferDeviceAddressMultiDevice;
1627 }
1628 }
1629
1630 const auto *vulkan_11_features = lvl_find_in_chain<VkPhysicalDeviceVulkan11Features>(pCreateInfo->pNext);
1631 if (vulkan_11_features) {
1632 state_tracker->enabled_features.core11 = *vulkan_11_features;
1633 } else {
1634 // These structs are only allowed in pNext chain if there is no kPhysicalDeviceVulkan11Features
1635
1636 const auto *sixteen_bit_storage_features = lvl_find_in_chain<VkPhysicalDevice16BitStorageFeatures>(pCreateInfo->pNext);
1637 if (sixteen_bit_storage_features) {
1638 state_tracker->enabled_features.core11.storageBuffer16BitAccess =
1639 sixteen_bit_storage_features->storageBuffer16BitAccess;
1640 state_tracker->enabled_features.core11.uniformAndStorageBuffer16BitAccess =
1641 sixteen_bit_storage_features->uniformAndStorageBuffer16BitAccess;
1642 state_tracker->enabled_features.core11.storagePushConstant16 = sixteen_bit_storage_features->storagePushConstant16;
1643 state_tracker->enabled_features.core11.storageInputOutput16 = sixteen_bit_storage_features->storageInputOutput16;
1644 }
1645
1646 const auto *multiview_features = lvl_find_in_chain<VkPhysicalDeviceMultiviewFeatures>(pCreateInfo->pNext);
1647 if (multiview_features) {
1648 state_tracker->enabled_features.core11.multiview = multiview_features->multiview;
1649 state_tracker->enabled_features.core11.multiviewGeometryShader = multiview_features->multiviewGeometryShader;
1650 state_tracker->enabled_features.core11.multiviewTessellationShader = multiview_features->multiviewTessellationShader;
1651 }
1652
1653 const auto *variable_pointers_features = lvl_find_in_chain<VkPhysicalDeviceVariablePointersFeatures>(pCreateInfo->pNext);
1654 if (variable_pointers_features) {
1655 state_tracker->enabled_features.core11.variablePointersStorageBuffer =
1656 variable_pointers_features->variablePointersStorageBuffer;
1657 state_tracker->enabled_features.core11.variablePointers = variable_pointers_features->variablePointers;
1658 }
1659
1660 const auto *protected_memory_features = lvl_find_in_chain<VkPhysicalDeviceProtectedMemoryFeatures>(pCreateInfo->pNext);
1661 if (protected_memory_features) {
1662 state_tracker->enabled_features.core11.protectedMemory = protected_memory_features->protectedMemory;
1663 }
1664
1665 const auto *ycbcr_conversion_features =
1666 lvl_find_in_chain<VkPhysicalDeviceSamplerYcbcrConversionFeatures>(pCreateInfo->pNext);
1667 if (ycbcr_conversion_features) {
1668 state_tracker->enabled_features.core11.samplerYcbcrConversion = ycbcr_conversion_features->samplerYcbcrConversion;
1669 }
1670
1671 const auto *shader_draw_parameters_features =
1672 lvl_find_in_chain<VkPhysicalDeviceShaderDrawParametersFeatures>(pCreateInfo->pNext);
1673 if (shader_draw_parameters_features) {
1674 state_tracker->enabled_features.core11.shaderDrawParameters = shader_draw_parameters_features->shaderDrawParameters;
Tony-LunarGb036c2f2019-12-05 14:38:25 -07001675 }
1676 }
1677
locke-lunargd556cc32019-09-17 01:21:23 -06001678 const auto *device_group_ci = lvl_find_in_chain<VkDeviceGroupDeviceCreateInfo>(pCreateInfo->pNext);
Tony-LunarGca4891a2020-08-10 15:46:46 -06001679 if (device_group_ci) {
1680 state_tracker->physical_device_count = device_group_ci->physicalDeviceCount;
1681 state_tracker->device_group_create_info = *device_group_ci;
1682 } else {
1683 state_tracker->physical_device_count = 1;
1684 }
locke-lunargd556cc32019-09-17 01:21:23 -06001685
locke-lunargd556cc32019-09-17 01:21:23 -06001686 const auto *exclusive_scissor_features = lvl_find_in_chain<VkPhysicalDeviceExclusiveScissorFeaturesNV>(pCreateInfo->pNext);
1687 if (exclusive_scissor_features) {
1688 state_tracker->enabled_features.exclusive_scissor = *exclusive_scissor_features;
1689 }
1690
1691 const auto *shading_rate_image_features = lvl_find_in_chain<VkPhysicalDeviceShadingRateImageFeaturesNV>(pCreateInfo->pNext);
1692 if (shading_rate_image_features) {
1693 state_tracker->enabled_features.shading_rate_image = *shading_rate_image_features;
1694 }
1695
1696 const auto *mesh_shader_features = lvl_find_in_chain<VkPhysicalDeviceMeshShaderFeaturesNV>(pCreateInfo->pNext);
1697 if (mesh_shader_features) {
1698 state_tracker->enabled_features.mesh_shader = *mesh_shader_features;
1699 }
1700
1701 const auto *inline_uniform_block_features =
1702 lvl_find_in_chain<VkPhysicalDeviceInlineUniformBlockFeaturesEXT>(pCreateInfo->pNext);
1703 if (inline_uniform_block_features) {
1704 state_tracker->enabled_features.inline_uniform_block = *inline_uniform_block_features;
1705 }
1706
1707 const auto *transform_feedback_features = lvl_find_in_chain<VkPhysicalDeviceTransformFeedbackFeaturesEXT>(pCreateInfo->pNext);
1708 if (transform_feedback_features) {
1709 state_tracker->enabled_features.transform_feedback_features = *transform_feedback_features;
1710 }
1711
locke-lunargd556cc32019-09-17 01:21:23 -06001712 const auto *vtx_attrib_div_features = lvl_find_in_chain<VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT>(pCreateInfo->pNext);
1713 if (vtx_attrib_div_features) {
1714 state_tracker->enabled_features.vtx_attrib_divisor_features = *vtx_attrib_div_features;
1715 }
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001716
Jeff Bolz4563f2a2019-12-10 13:30:30 -06001717 const auto *buffer_device_address_ext = lvl_find_in_chain<VkPhysicalDeviceBufferDeviceAddressFeaturesEXT>(pCreateInfo->pNext);
1718 if (buffer_device_address_ext) {
Jeff Bolz33fc6722020-03-31 12:58:16 -05001719 state_tracker->enabled_features.buffer_device_address_ext = *buffer_device_address_ext;
locke-lunargd556cc32019-09-17 01:21:23 -06001720 }
1721
1722 const auto *cooperative_matrix_features = lvl_find_in_chain<VkPhysicalDeviceCooperativeMatrixFeaturesNV>(pCreateInfo->pNext);
1723 if (cooperative_matrix_features) {
1724 state_tracker->enabled_features.cooperative_matrix_features = *cooperative_matrix_features;
1725 }
1726
locke-lunargd556cc32019-09-17 01:21:23 -06001727 const auto *compute_shader_derivatives_features =
1728 lvl_find_in_chain<VkPhysicalDeviceComputeShaderDerivativesFeaturesNV>(pCreateInfo->pNext);
1729 if (compute_shader_derivatives_features) {
1730 state_tracker->enabled_features.compute_shader_derivatives_features = *compute_shader_derivatives_features;
1731 }
1732
1733 const auto *fragment_shader_barycentric_features =
1734 lvl_find_in_chain<VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV>(pCreateInfo->pNext);
1735 if (fragment_shader_barycentric_features) {
1736 state_tracker->enabled_features.fragment_shader_barycentric_features = *fragment_shader_barycentric_features;
1737 }
1738
1739 const auto *shader_image_footprint_features =
1740 lvl_find_in_chain<VkPhysicalDeviceShaderImageFootprintFeaturesNV>(pCreateInfo->pNext);
1741 if (shader_image_footprint_features) {
1742 state_tracker->enabled_features.shader_image_footprint_features = *shader_image_footprint_features;
1743 }
1744
1745 const auto *fragment_shader_interlock_features =
1746 lvl_find_in_chain<VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT>(pCreateInfo->pNext);
1747 if (fragment_shader_interlock_features) {
1748 state_tracker->enabled_features.fragment_shader_interlock_features = *fragment_shader_interlock_features;
1749 }
1750
1751 const auto *demote_to_helper_invocation_features =
1752 lvl_find_in_chain<VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT>(pCreateInfo->pNext);
1753 if (demote_to_helper_invocation_features) {
1754 state_tracker->enabled_features.demote_to_helper_invocation_features = *demote_to_helper_invocation_features;
1755 }
1756
1757 const auto *texel_buffer_alignment_features =
1758 lvl_find_in_chain<VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT>(pCreateInfo->pNext);
1759 if (texel_buffer_alignment_features) {
1760 state_tracker->enabled_features.texel_buffer_alignment_features = *texel_buffer_alignment_features;
1761 }
1762
locke-lunargd556cc32019-09-17 01:21:23 -06001763 const auto *pipeline_exe_props_features =
1764 lvl_find_in_chain<VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR>(pCreateInfo->pNext);
1765 if (pipeline_exe_props_features) {
1766 state_tracker->enabled_features.pipeline_exe_props_features = *pipeline_exe_props_features;
1767 }
1768
Jeff Bolz82f854d2019-09-17 14:56:47 -05001769 const auto *dedicated_allocation_image_aliasing_features =
1770 lvl_find_in_chain<VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV>(pCreateInfo->pNext);
1771 if (dedicated_allocation_image_aliasing_features) {
1772 state_tracker->enabled_features.dedicated_allocation_image_aliasing_features =
1773 *dedicated_allocation_image_aliasing_features;
1774 }
1775
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001776 const auto *performance_query_features = lvl_find_in_chain<VkPhysicalDevicePerformanceQueryFeaturesKHR>(pCreateInfo->pNext);
1777 if (performance_query_features) {
1778 state_tracker->enabled_features.performance_query_features = *performance_query_features;
1779 }
1780
Tobias Hector782bcde2019-11-28 16:19:42 +00001781 const auto *device_coherent_memory_features = lvl_find_in_chain<VkPhysicalDeviceCoherentMemoryFeaturesAMD>(pCreateInfo->pNext);
1782 if (device_coherent_memory_features) {
1783 state_tracker->enabled_features.device_coherent_memory_features = *device_coherent_memory_features;
1784 }
1785
sfricke-samsungcead0802020-01-30 22:20:10 -08001786 const auto *ycbcr_image_array_features = lvl_find_in_chain<VkPhysicalDeviceYcbcrImageArraysFeaturesEXT>(pCreateInfo->pNext);
1787 if (ycbcr_image_array_features) {
1788 state_tracker->enabled_features.ycbcr_image_array_features = *ycbcr_image_array_features;
1789 }
1790
Jeff Bolz443c2ca2020-03-19 12:11:51 -05001791 const auto *ray_tracing_features = lvl_find_in_chain<VkPhysicalDeviceRayTracingFeaturesKHR>(pCreateInfo->pNext);
1792 if (ray_tracing_features) {
1793 state_tracker->enabled_features.ray_tracing_features = *ray_tracing_features;
1794 }
1795
Jeff Bolz165818a2020-05-08 11:19:03 -05001796 const auto *robustness2_features = lvl_find_in_chain<VkPhysicalDeviceRobustness2FeaturesEXT>(pCreateInfo->pNext);
1797 if (robustness2_features) {
1798 state_tracker->enabled_features.robustness2_features = *robustness2_features;
1799 }
1800
janharaldfredriksen-arm3b793772020-05-12 18:55:53 +02001801 const auto *fragment_density_map_features =
1802 lvl_find_in_chain<VkPhysicalDeviceFragmentDensityMapFeaturesEXT>(pCreateInfo->pNext);
1803 if (fragment_density_map_features) {
1804 state_tracker->enabled_features.fragment_density_map_features = *fragment_density_map_features;
1805 }
1806
janharaldfredriksen-arm36e17572020-07-07 13:59:28 +02001807 const auto *fragment_density_map_features2 =
1808 lvl_find_in_chain<VkPhysicalDeviceFragmentDensityMap2FeaturesEXT>(pCreateInfo->pNext);
1809 if (fragment_density_map_features2) {
1810 state_tracker->enabled_features.fragment_density_map2_features = *fragment_density_map_features2;
1811 }
1812
sfricke-samsung0c4a06f2020-06-27 01:24:32 -07001813 const auto *astc_decode_features = lvl_find_in_chain<VkPhysicalDeviceASTCDecodeFeaturesEXT>(pCreateInfo->pNext);
1814 if (astc_decode_features) {
1815 state_tracker->enabled_features.astc_decode_features = *astc_decode_features;
1816 }
1817
Tony-LunarG7337b312020-04-15 16:40:25 -06001818 const auto *custom_border_color_features = lvl_find_in_chain<VkPhysicalDeviceCustomBorderColorFeaturesEXT>(pCreateInfo->pNext);
1819 if (custom_border_color_features) {
1820 state_tracker->enabled_features.custom_border_color_features = *custom_border_color_features;
1821 }
1822
sfricke-samsungfd661d62020-05-16 00:57:27 -07001823 const auto *pipeline_creation_cache_control_features =
1824 lvl_find_in_chain<VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT>(pCreateInfo->pNext);
1825 if (pipeline_creation_cache_control_features) {
1826 state_tracker->enabled_features.pipeline_creation_cache_control_features = *pipeline_creation_cache_control_features;
1827 }
1828
Piers Daniell39842ee2020-07-10 16:42:33 -06001829 const auto *extended_dynamic_state_features =
1830 lvl_find_in_chain<VkPhysicalDeviceExtendedDynamicStateFeaturesEXT>(pCreateInfo->pNext);
1831 if (extended_dynamic_state_features) {
1832 state_tracker->enabled_features.extended_dynamic_state_features = *extended_dynamic_state_features;
1833 }
1834
locke-lunarg3fa463a2020-10-23 16:39:04 -06001835 const auto *multiview_features = lvl_find_in_chain<VkPhysicalDeviceMultiviewFeatures>(pCreateInfo->pNext);
1836 if (multiview_features) {
1837 state_tracker->enabled_features.multiview_features = *multiview_features;
1838 }
1839
locke-lunargd556cc32019-09-17 01:21:23 -06001840 // Store physical device properties and physical device mem limits into CoreChecks structs
1841 DispatchGetPhysicalDeviceMemoryProperties(gpu, &state_tracker->phys_dev_mem_props);
1842 DispatchGetPhysicalDeviceProperties(gpu, &state_tracker->phys_dev_props);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001843 GetPhysicalDeviceExtProperties(gpu, state_tracker->device_extensions.vk_feature_version_1_2,
1844 &state_tracker->phys_dev_props_core11);
1845 GetPhysicalDeviceExtProperties(gpu, state_tracker->device_extensions.vk_feature_version_1_2,
1846 &state_tracker->phys_dev_props_core12);
locke-lunargd556cc32019-09-17 01:21:23 -06001847
1848 const auto &dev_ext = state_tracker->device_extensions;
1849 auto *phys_dev_props = &state_tracker->phys_dev_ext_props;
1850
1851 if (dev_ext.vk_khr_push_descriptor) {
1852 // Get the needed push_descriptor limits
1853 VkPhysicalDevicePushDescriptorPropertiesKHR push_descriptor_prop;
1854 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_push_descriptor, &push_descriptor_prop);
1855 phys_dev_props->max_push_descriptors = push_descriptor_prop.maxPushDescriptors;
1856 }
1857
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001858 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_ext_descriptor_indexing) {
1859 VkPhysicalDeviceDescriptorIndexingPropertiesEXT descriptor_indexing_prop;
1860 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_descriptor_indexing, &descriptor_indexing_prop);
1861 state_tracker->phys_dev_props_core12.maxUpdateAfterBindDescriptorsInAllPools =
1862 descriptor_indexing_prop.maxUpdateAfterBindDescriptorsInAllPools;
1863 state_tracker->phys_dev_props_core12.shaderUniformBufferArrayNonUniformIndexingNative =
1864 descriptor_indexing_prop.shaderUniformBufferArrayNonUniformIndexingNative;
1865 state_tracker->phys_dev_props_core12.shaderSampledImageArrayNonUniformIndexingNative =
1866 descriptor_indexing_prop.shaderSampledImageArrayNonUniformIndexingNative;
1867 state_tracker->phys_dev_props_core12.shaderStorageBufferArrayNonUniformIndexingNative =
1868 descriptor_indexing_prop.shaderStorageBufferArrayNonUniformIndexingNative;
1869 state_tracker->phys_dev_props_core12.shaderStorageImageArrayNonUniformIndexingNative =
1870 descriptor_indexing_prop.shaderStorageImageArrayNonUniformIndexingNative;
1871 state_tracker->phys_dev_props_core12.shaderInputAttachmentArrayNonUniformIndexingNative =
1872 descriptor_indexing_prop.shaderInputAttachmentArrayNonUniformIndexingNative;
1873 state_tracker->phys_dev_props_core12.robustBufferAccessUpdateAfterBind =
1874 descriptor_indexing_prop.robustBufferAccessUpdateAfterBind;
1875 state_tracker->phys_dev_props_core12.quadDivergentImplicitLod = descriptor_indexing_prop.quadDivergentImplicitLod;
1876 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindSamplers =
1877 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindSamplers;
1878 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindUniformBuffers =
1879 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindUniformBuffers;
1880 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindStorageBuffers =
1881 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindStorageBuffers;
1882 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindSampledImages =
1883 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindSampledImages;
1884 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindStorageImages =
1885 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindStorageImages;
1886 state_tracker->phys_dev_props_core12.maxPerStageDescriptorUpdateAfterBindInputAttachments =
1887 descriptor_indexing_prop.maxPerStageDescriptorUpdateAfterBindInputAttachments;
1888 state_tracker->phys_dev_props_core12.maxPerStageUpdateAfterBindResources =
1889 descriptor_indexing_prop.maxPerStageUpdateAfterBindResources;
1890 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindSamplers =
1891 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindSamplers;
1892 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindUniformBuffers =
1893 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindUniformBuffers;
1894 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic =
1895 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic;
1896 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindStorageBuffers =
1897 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindStorageBuffers;
1898 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic =
1899 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic;
1900 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindSampledImages =
1901 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindSampledImages;
1902 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindStorageImages =
1903 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindStorageImages;
1904 state_tracker->phys_dev_props_core12.maxDescriptorSetUpdateAfterBindInputAttachments =
1905 descriptor_indexing_prop.maxDescriptorSetUpdateAfterBindInputAttachments;
1906 }
1907
locke-lunargd556cc32019-09-17 01:21:23 -06001908 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_shading_rate_image, &phys_dev_props->shading_rate_image_props);
1909 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_mesh_shader, &phys_dev_props->mesh_shader_props);
1910 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_inline_uniform_block, &phys_dev_props->inline_uniform_block_props);
1911 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_vertex_attribute_divisor, &phys_dev_props->vtx_attrib_divisor_props);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001912
1913 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_khr_depth_stencil_resolve) {
1914 VkPhysicalDeviceDepthStencilResolvePropertiesKHR depth_stencil_resolve_props;
1915 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_depth_stencil_resolve, &depth_stencil_resolve_props);
1916 state_tracker->phys_dev_props_core12.supportedDepthResolveModes = depth_stencil_resolve_props.supportedDepthResolveModes;
1917 state_tracker->phys_dev_props_core12.supportedStencilResolveModes =
1918 depth_stencil_resolve_props.supportedStencilResolveModes;
1919 state_tracker->phys_dev_props_core12.independentResolveNone = depth_stencil_resolve_props.independentResolveNone;
1920 state_tracker->phys_dev_props_core12.independentResolve = depth_stencil_resolve_props.independentResolve;
1921 }
1922
locke-lunargd556cc32019-09-17 01:21:23 -06001923 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_transform_feedback, &phys_dev_props->transform_feedback_props);
Jeff Bolz443c2ca2020-03-19 12:11:51 -05001924 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_ray_tracing, &phys_dev_props->ray_tracing_propsNV);
1925 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_ray_tracing, &phys_dev_props->ray_tracing_propsKHR);
locke-lunargd556cc32019-09-17 01:21:23 -06001926 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_texel_buffer_alignment, &phys_dev_props->texel_buffer_alignment_props);
1927 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_fragment_density_map, &phys_dev_props->fragment_density_map_props);
janharaldfredriksen-arm36e17572020-07-07 13:59:28 +02001928 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_fragment_density_map_2, &phys_dev_props->fragment_density_map2_props);
Lionel Landwerlinc7420912019-05-23 00:33:42 +01001929 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_performance_query, &phys_dev_props->performance_query_props);
sfricke-samsung8f658d42020-05-03 20:12:24 -07001930 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_sample_locations, &phys_dev_props->sample_locations_props);
Tony-LunarG7337b312020-04-15 16:40:25 -06001931 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_custom_border_color, &phys_dev_props->custom_border_color_props);
locke-lunarg3fa463a2020-10-23 16:39:04 -06001932 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_multiview, &phys_dev_props->multiview_props);
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001933
1934 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_khr_timeline_semaphore) {
1935 VkPhysicalDeviceTimelineSemaphorePropertiesKHR timeline_semaphore_props;
1936 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_timeline_semaphore, &timeline_semaphore_props);
1937 state_tracker->phys_dev_props_core12.maxTimelineSemaphoreValueDifference =
1938 timeline_semaphore_props.maxTimelineSemaphoreValueDifference;
1939 }
1940
1941 if (!state_tracker->device_extensions.vk_feature_version_1_2 && dev_ext.vk_khr_shader_float_controls) {
1942 VkPhysicalDeviceFloatControlsPropertiesKHR float_controls_props;
1943 GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_shader_float_controls, &float_controls_props);
1944 state_tracker->phys_dev_props_core12.denormBehaviorIndependence = float_controls_props.denormBehaviorIndependence;
1945 state_tracker->phys_dev_props_core12.roundingModeIndependence = float_controls_props.roundingModeIndependence;
1946 state_tracker->phys_dev_props_core12.shaderSignedZeroInfNanPreserveFloat16 =
1947 float_controls_props.shaderSignedZeroInfNanPreserveFloat16;
1948 state_tracker->phys_dev_props_core12.shaderSignedZeroInfNanPreserveFloat32 =
1949 float_controls_props.shaderSignedZeroInfNanPreserveFloat32;
1950 state_tracker->phys_dev_props_core12.shaderSignedZeroInfNanPreserveFloat64 =
1951 float_controls_props.shaderSignedZeroInfNanPreserveFloat64;
1952 state_tracker->phys_dev_props_core12.shaderDenormPreserveFloat16 = float_controls_props.shaderDenormPreserveFloat16;
1953 state_tracker->phys_dev_props_core12.shaderDenormPreserveFloat32 = float_controls_props.shaderDenormPreserveFloat32;
1954 state_tracker->phys_dev_props_core12.shaderDenormPreserveFloat64 = float_controls_props.shaderDenormPreserveFloat64;
1955 state_tracker->phys_dev_props_core12.shaderDenormFlushToZeroFloat16 = float_controls_props.shaderDenormFlushToZeroFloat16;
1956 state_tracker->phys_dev_props_core12.shaderDenormFlushToZeroFloat32 = float_controls_props.shaderDenormFlushToZeroFloat32;
1957 state_tracker->phys_dev_props_core12.shaderDenormFlushToZeroFloat64 = float_controls_props.shaderDenormFlushToZeroFloat64;
1958 state_tracker->phys_dev_props_core12.shaderRoundingModeRTEFloat16 = float_controls_props.shaderRoundingModeRTEFloat16;
1959 state_tracker->phys_dev_props_core12.shaderRoundingModeRTEFloat32 = float_controls_props.shaderRoundingModeRTEFloat32;
1960 state_tracker->phys_dev_props_core12.shaderRoundingModeRTEFloat64 = float_controls_props.shaderRoundingModeRTEFloat64;
1961 state_tracker->phys_dev_props_core12.shaderRoundingModeRTZFloat16 = float_controls_props.shaderRoundingModeRTZFloat16;
1962 state_tracker->phys_dev_props_core12.shaderRoundingModeRTZFloat32 = float_controls_props.shaderRoundingModeRTZFloat32;
1963 state_tracker->phys_dev_props_core12.shaderRoundingModeRTZFloat64 = float_controls_props.shaderRoundingModeRTZFloat64;
1964 }
Mark Lobodzinskie4a2b7f2019-12-20 12:51:30 -07001965
locke-lunargd556cc32019-09-17 01:21:23 -06001966 if (state_tracker->device_extensions.vk_nv_cooperative_matrix) {
1967 // Get the needed cooperative_matrix properties
1968 auto cooperative_matrix_props = lvl_init_struct<VkPhysicalDeviceCooperativeMatrixPropertiesNV>();
1969 auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(&cooperative_matrix_props);
1970 instance_dispatch_table.GetPhysicalDeviceProperties2KHR(gpu, &prop2);
1971 state_tracker->phys_dev_ext_props.cooperative_matrix_props = cooperative_matrix_props;
1972
1973 uint32_t numCooperativeMatrixProperties = 0;
1974 instance_dispatch_table.GetPhysicalDeviceCooperativeMatrixPropertiesNV(gpu, &numCooperativeMatrixProperties, NULL);
1975 state_tracker->cooperative_matrix_properties.resize(numCooperativeMatrixProperties,
1976 lvl_init_struct<VkCooperativeMatrixPropertiesNV>());
1977
1978 instance_dispatch_table.GetPhysicalDeviceCooperativeMatrixPropertiesNV(gpu, &numCooperativeMatrixProperties,
1979 state_tracker->cooperative_matrix_properties.data());
1980 }
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001981 if (!state_tracker->device_extensions.vk_feature_version_1_2 && state_tracker->api_version >= VK_API_VERSION_1_1) {
locke-lunargd556cc32019-09-17 01:21:23 -06001982 // Get the needed subgroup limits
1983 auto subgroup_prop = lvl_init_struct<VkPhysicalDeviceSubgroupProperties>();
1984 auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(&subgroup_prop);
1985 instance_dispatch_table.GetPhysicalDeviceProperties2(gpu, &prop2);
1986
Piers Daniell41b8c5d2020-01-10 15:42:00 -07001987 state_tracker->phys_dev_props_core11.subgroupSize = subgroup_prop.subgroupSize;
1988 state_tracker->phys_dev_props_core11.subgroupSupportedStages = subgroup_prop.supportedStages;
1989 state_tracker->phys_dev_props_core11.subgroupSupportedOperations = subgroup_prop.supportedOperations;
1990 state_tracker->phys_dev_props_core11.subgroupQuadOperationsInAllStages = subgroup_prop.quadOperationsInAllStages;
locke-lunargd556cc32019-09-17 01:21:23 -06001991 }
1992
1993 // Store queue family data
1994 if (pCreateInfo->pQueueCreateInfos != nullptr) {
1995 for (uint32_t i = 0; i < pCreateInfo->queueCreateInfoCount; ++i) {
sfricke-samsung590ae1e2020-04-25 01:18:05 -07001996 const VkDeviceQueueCreateInfo &queue_create_info = pCreateInfo->pQueueCreateInfos[i];
locke-lunargd556cc32019-09-17 01:21:23 -06001997 state_tracker->queue_family_index_map.insert(
sfricke-samsung590ae1e2020-04-25 01:18:05 -07001998 std::make_pair(queue_create_info.queueFamilyIndex, queue_create_info.queueCount));
1999 state_tracker->queue_family_create_flags_map.insert(
2000 std::make_pair(queue_create_info.queueFamilyIndex, queue_create_info.flags));
locke-lunargd556cc32019-09-17 01:21:23 -06002001 }
2002 }
2003}
2004
2005void ValidationStateTracker::PreCallRecordDestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) {
2006 if (!device) return;
2007
locke-lunargd556cc32019-09-17 01:21:23 -06002008 // Reset all command buffers before destroying them, to unlink object_bindings.
2009 for (auto &commandBuffer : commandBufferMap) {
2010 ResetCommandBufferState(commandBuffer.first);
2011 }
Jeff Bolzadbfa852019-10-04 13:53:30 -05002012 pipelineMap.clear();
2013 renderPassMap.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06002014 commandBufferMap.clear();
2015
2016 // This will also delete all sets in the pool & remove them from setMap
2017 DeleteDescriptorSetPools();
2018 // All sets should be removed
2019 assert(setMap.empty());
2020 descriptorSetLayoutMap.clear();
2021 imageViewMap.clear();
2022 imageMap.clear();
2023 bufferViewMap.clear();
2024 bufferMap.clear();
2025 // Queues persist until device is destroyed
2026 queueMap.clear();
2027}
2028
2029// Loop through bound objects and increment their in_use counts.
2030void ValidationStateTracker::IncrementBoundObjects(CMD_BUFFER_STATE const *cb_node) {
2031 for (auto obj : cb_node->object_bindings) {
2032 auto base_obj = GetStateStructPtrFromObject(obj);
2033 if (base_obj) {
2034 base_obj->in_use.fetch_add(1);
2035 }
2036 }
2037}
2038
2039// Track which resources are in-flight by atomically incrementing their "in_use" count
2040void ValidationStateTracker::IncrementResources(CMD_BUFFER_STATE *cb_node) {
2041 cb_node->submitCount++;
2042 cb_node->in_use.fetch_add(1);
2043
2044 // First Increment for all "generic" objects bound to cmd buffer, followed by special-case objects below
2045 IncrementBoundObjects(cb_node);
2046 // TODO : We should be able to remove the NULL look-up checks from the code below as long as
2047 // all the corresponding cases are verified to cause CB_INVALID state and the CB_INVALID state
2048 // should then be flagged prior to calling this function
2049 for (auto event : cb_node->writeEventsBeforeWait) {
2050 auto event_state = GetEventState(event);
2051 if (event_state) event_state->write_in_use++;
2052 }
2053}
2054
2055// Decrement in-use count for objects bound to command buffer
2056void ValidationStateTracker::DecrementBoundResources(CMD_BUFFER_STATE const *cb_node) {
2057 BASE_NODE *base_obj = nullptr;
2058 for (auto obj : cb_node->object_bindings) {
2059 base_obj = GetStateStructPtrFromObject(obj);
2060 if (base_obj) {
2061 base_obj->in_use.fetch_sub(1);
2062 }
2063 }
2064}
2065
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002066void ValidationStateTracker::RetireWorkOnQueue(QUEUE_STATE *pQueue, uint64_t seq) {
locke-lunargd556cc32019-09-17 01:21:23 -06002067 std::unordered_map<VkQueue, uint64_t> otherQueueSeqs;
Marshall Drew-Brook03847582020-11-06 15:10:45 -08002068 std::unordered_map<VkSemaphore, uint64_t> timelineSemaphoreCounters;
locke-lunargd556cc32019-09-17 01:21:23 -06002069
2070 // Roll this queue forward, one submission at a time.
2071 while (pQueue->seq < seq) {
2072 auto &submission = pQueue->submissions.front();
2073
2074 for (auto &wait : submission.waitSemaphores) {
2075 auto pSemaphore = GetSemaphoreState(wait.semaphore);
2076 if (pSemaphore) {
2077 pSemaphore->in_use.fetch_sub(1);
2078 }
Marshall Drew-Brook03847582020-11-06 15:10:45 -08002079 if (wait.type == VK_SEMAPHORE_TYPE_TIMELINE_KHR) {
2080 auto &lastCounter = timelineSemaphoreCounters[wait.semaphore];
2081 lastCounter = std::max(lastCounter, wait.payload);
2082 } else {
2083 auto &lastSeq = otherQueueSeqs[wait.queue];
2084 lastSeq = std::max(lastSeq, wait.seq);
2085 }
locke-lunargd556cc32019-09-17 01:21:23 -06002086 }
2087
Juan A. Suarez Romero9cef8852020-03-10 12:19:42 +01002088 for (auto &signal : submission.signalSemaphores) {
2089 auto pSemaphore = GetSemaphoreState(signal.semaphore);
locke-lunargd556cc32019-09-17 01:21:23 -06002090 if (pSemaphore) {
2091 pSemaphore->in_use.fetch_sub(1);
Juan A. Suarez Romero9cef8852020-03-10 12:19:42 +01002092 if (pSemaphore->type == VK_SEMAPHORE_TYPE_TIMELINE_KHR && pSemaphore->payload < signal.payload) {
2093 pSemaphore->payload = signal.payload;
2094 }
locke-lunargd556cc32019-09-17 01:21:23 -06002095 }
2096 }
2097
2098 for (auto &semaphore : submission.externalSemaphores) {
2099 auto pSemaphore = GetSemaphoreState(semaphore);
2100 if (pSemaphore) {
2101 pSemaphore->in_use.fetch_sub(1);
2102 }
2103 }
2104
2105 for (auto cb : submission.cbs) {
2106 auto cb_node = GetCBState(cb);
2107 if (!cb_node) {
2108 continue;
2109 }
2110 // First perform decrement on general case bound objects
2111 DecrementBoundResources(cb_node);
2112 for (auto event : cb_node->writeEventsBeforeWait) {
2113 auto eventNode = eventMap.find(event);
2114 if (eventNode != eventMap.end()) {
2115 eventNode->second.write_in_use--;
2116 }
2117 }
Jeff Bolz310775c2019-10-09 00:46:33 -05002118 QueryMap localQueryToStateMap;
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02002119 VkQueryPool first_pool = VK_NULL_HANDLE;
Jeff Bolz310775c2019-10-09 00:46:33 -05002120 for (auto &function : cb_node->queryUpdates) {
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02002121 function(nullptr, /*do_validate*/ false, first_pool, submission.perf_submit_pass, &localQueryToStateMap);
Jeff Bolz310775c2019-10-09 00:46:33 -05002122 }
2123
2124 for (auto queryStatePair : localQueryToStateMap) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002125 if (queryStatePair.second == QUERYSTATE_ENDED) {
2126 queryToStateMap[queryStatePair.first] = QUERYSTATE_AVAILABLE;
2127 }
locke-lunargd556cc32019-09-17 01:21:23 -06002128 }
locke-lunargd556cc32019-09-17 01:21:23 -06002129 cb_node->in_use.fetch_sub(1);
2130 }
2131
2132 auto pFence = GetFenceState(submission.fence);
2133 if (pFence && pFence->scope == kSyncScopeInternal) {
2134 pFence->state = FENCE_RETIRED;
2135 }
2136
2137 pQueue->submissions.pop_front();
2138 pQueue->seq++;
2139 }
2140
2141 // Roll other queues forward to the highest seq we saw a wait for
2142 for (auto qs : otherQueueSeqs) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002143 RetireWorkOnQueue(GetQueueState(qs.first), qs.second);
locke-lunargd556cc32019-09-17 01:21:23 -06002144 }
Marshall Drew-Brook03847582020-11-06 15:10:45 -08002145 for (auto sc : timelineSemaphoreCounters) {
2146 RetireTimelineSemaphore(sc.first, sc.second);
2147 }
locke-lunargd556cc32019-09-17 01:21:23 -06002148}
2149
2150// Submit a fence to a queue, delimiting previous fences and previous untracked
2151// work by it.
2152static void SubmitFence(QUEUE_STATE *pQueue, FENCE_STATE *pFence, uint64_t submitCount) {
2153 pFence->state = FENCE_INFLIGHT;
2154 pFence->signaler.first = pQueue->queue;
2155 pFence->signaler.second = pQueue->seq + pQueue->submissions.size() + submitCount;
2156}
2157
2158void ValidationStateTracker::PostCallRecordQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits,
2159 VkFence fence, VkResult result) {
Mark Lobodzinski09379db2020-05-07 08:22:01 -06002160 if (result != VK_SUCCESS) return;
locke-lunargd556cc32019-09-17 01:21:23 -06002161 uint64_t early_retire_seq = 0;
2162 auto pQueue = GetQueueState(queue);
2163 auto pFence = GetFenceState(fence);
2164
2165 if (pFence) {
2166 if (pFence->scope == kSyncScopeInternal) {
2167 // Mark fence in use
2168 SubmitFence(pQueue, pFence, std::max(1u, submitCount));
2169 if (!submitCount) {
2170 // If no submissions, but just dropping a fence on the end of the queue,
2171 // record an empty submission with just the fence, so we can determine
2172 // its completion.
2173 pQueue->submissions.emplace_back(std::vector<VkCommandBuffer>(), std::vector<SEMAPHORE_WAIT>(),
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002174 std::vector<SEMAPHORE_SIGNAL>(), std::vector<VkSemaphore>(), fence, 0);
locke-lunargd556cc32019-09-17 01:21:23 -06002175 }
2176 } else {
2177 // Retire work up until this fence early, we will not see the wait that corresponds to this signal
2178 early_retire_seq = pQueue->seq + pQueue->submissions.size();
2179 }
2180 }
2181
2182 // Now process each individual submit
2183 for (uint32_t submit_idx = 0; submit_idx < submitCount; submit_idx++) {
2184 std::vector<VkCommandBuffer> cbs;
2185 const VkSubmitInfo *submit = &pSubmits[submit_idx];
Jeremy Gebben404e6832020-09-29 14:58:07 -06002186 std::vector<SEMAPHORE_WAIT> semaphore_waits;
2187 std::vector<SEMAPHORE_SIGNAL> semaphore_signals;
2188 std::vector<VkSemaphore> semaphore_externals;
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002189 const uint64_t next_seq = pQueue->seq + pQueue->submissions.size() + 1;
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00002190 auto *timeline_semaphore_submit = lvl_find_in_chain<VkTimelineSemaphoreSubmitInfoKHR>(submit->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06002191 for (uint32_t i = 0; i < submit->waitSemaphoreCount; ++i) {
2192 VkSemaphore semaphore = submit->pWaitSemaphores[i];
2193 auto pSemaphore = GetSemaphoreState(semaphore);
2194 if (pSemaphore) {
2195 if (pSemaphore->scope == kSyncScopeInternal) {
Juan A. Suarez Romero9cef8852020-03-10 12:19:42 +01002196 SEMAPHORE_WAIT wait;
2197 wait.semaphore = semaphore;
Marshall Drew-Brook03847582020-11-06 15:10:45 -08002198 wait.type = pSemaphore->type;
Juan A. Suarez Romero9cef8852020-03-10 12:19:42 +01002199 if (pSemaphore->type == VK_SEMAPHORE_TYPE_BINARY_KHR) {
2200 if (pSemaphore->signaler.first != VK_NULL_HANDLE) {
2201 wait.queue = pSemaphore->signaler.first;
2202 wait.seq = pSemaphore->signaler.second;
2203 semaphore_waits.push_back(wait);
2204 pSemaphore->in_use.fetch_add(1);
2205 }
2206 pSemaphore->signaler.first = VK_NULL_HANDLE;
2207 pSemaphore->signaled = false;
2208 } else if (pSemaphore->payload < timeline_semaphore_submit->pWaitSemaphoreValues[i]) {
2209 wait.queue = queue;
2210 wait.seq = next_seq;
2211 wait.payload = timeline_semaphore_submit->pWaitSemaphoreValues[i];
2212 semaphore_waits.push_back(wait);
locke-lunargd556cc32019-09-17 01:21:23 -06002213 pSemaphore->in_use.fetch_add(1);
2214 }
locke-lunargd556cc32019-09-17 01:21:23 -06002215 } else {
2216 semaphore_externals.push_back(semaphore);
2217 pSemaphore->in_use.fetch_add(1);
2218 if (pSemaphore->scope == kSyncScopeExternalTemporary) {
2219 pSemaphore->scope = kSyncScopeInternal;
2220 }
2221 }
2222 }
2223 }
2224 for (uint32_t i = 0; i < submit->signalSemaphoreCount; ++i) {
2225 VkSemaphore semaphore = submit->pSignalSemaphores[i];
2226 auto pSemaphore = GetSemaphoreState(semaphore);
2227 if (pSemaphore) {
2228 if (pSemaphore->scope == kSyncScopeInternal) {
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002229 SEMAPHORE_SIGNAL signal;
2230 signal.semaphore = semaphore;
2231 signal.seq = next_seq;
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00002232 if (pSemaphore->type == VK_SEMAPHORE_TYPE_BINARY_KHR) {
2233 pSemaphore->signaler.first = queue;
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002234 pSemaphore->signaler.second = next_seq;
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00002235 pSemaphore->signaled = true;
2236 } else {
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002237 signal.payload = timeline_semaphore_submit->pSignalSemaphoreValues[i];
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00002238 }
locke-lunargd556cc32019-09-17 01:21:23 -06002239 pSemaphore->in_use.fetch_add(1);
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002240 semaphore_signals.push_back(signal);
locke-lunargd556cc32019-09-17 01:21:23 -06002241 } else {
2242 // Retire work up until this submit early, we will not see the wait that corresponds to this signal
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002243 early_retire_seq = std::max(early_retire_seq, next_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06002244 }
2245 }
2246 }
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02002247 const auto perf_submit = lvl_find_in_chain<VkPerformanceQuerySubmitInfoKHR>(submit->pNext);
2248 uint32_t perf_pass = perf_submit ? perf_submit->counterPassIndex : 0;
2249
locke-lunargd556cc32019-09-17 01:21:23 -06002250 for (uint32_t i = 0; i < submit->commandBufferCount; i++) {
2251 auto cb_node = GetCBState(submit->pCommandBuffers[i]);
2252 if (cb_node) {
2253 cbs.push_back(submit->pCommandBuffers[i]);
2254 for (auto secondaryCmdBuffer : cb_node->linkedCommandBuffers) {
2255 cbs.push_back(secondaryCmdBuffer->commandBuffer);
2256 IncrementResources(secondaryCmdBuffer);
2257 }
2258 IncrementResources(cb_node);
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002259
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02002260 VkQueryPool first_pool = VK_NULL_HANDLE;
2261 EventToStageMap localEventToStageMap;
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002262 QueryMap localQueryToStateMap;
2263 for (auto &function : cb_node->queryUpdates) {
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02002264 function(nullptr, /*do_validate*/ false, first_pool, perf_pass, &localQueryToStateMap);
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002265 }
2266
2267 for (auto queryStatePair : localQueryToStateMap) {
2268 queryToStateMap[queryStatePair.first] = queryStatePair.second;
2269 }
2270
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002271 for (auto &function : cb_node->eventUpdates) {
2272 function(nullptr, /*do_validate*/ false, &localEventToStageMap);
2273 }
2274
2275 for (auto eventStagePair : localEventToStageMap) {
2276 eventMap[eventStagePair.first].stageMask = eventStagePair.second;
2277 }
locke-lunargd556cc32019-09-17 01:21:23 -06002278 }
2279 }
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002280
locke-lunargd556cc32019-09-17 01:21:23 -06002281 pQueue->submissions.emplace_back(cbs, semaphore_waits, semaphore_signals, semaphore_externals,
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02002282 submit_idx == submitCount - 1 ? fence : (VkFence)VK_NULL_HANDLE, perf_pass);
locke-lunargd556cc32019-09-17 01:21:23 -06002283 }
2284
2285 if (early_retire_seq) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002286 RetireWorkOnQueue(pQueue, early_retire_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06002287 }
2288}
2289
2290void ValidationStateTracker::PostCallRecordAllocateMemory(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo,
2291 const VkAllocationCallbacks *pAllocator, VkDeviceMemory *pMemory,
2292 VkResult result) {
2293 if (VK_SUCCESS == result) {
2294 AddMemObjInfo(device, *pMemory, pAllocateInfo);
2295 }
2296 return;
2297}
2298
2299void ValidationStateTracker::PreCallRecordFreeMemory(VkDevice device, VkDeviceMemory mem, const VkAllocationCallbacks *pAllocator) {
2300 if (!mem) return;
2301 DEVICE_MEMORY_STATE *mem_info = GetDevMemState(mem);
2302 const VulkanTypedHandle obj_struct(mem, kVulkanObjectTypeDeviceMemory);
2303
2304 // Clear mem binding for any bound objects
2305 for (const auto &obj : mem_info->obj_bindings) {
2306 BINDABLE *bindable_state = nullptr;
2307 switch (obj.type) {
2308 case kVulkanObjectTypeImage:
2309 bindable_state = GetImageState(obj.Cast<VkImage>());
2310 break;
2311 case kVulkanObjectTypeBuffer:
2312 bindable_state = GetBufferState(obj.Cast<VkBuffer>());
2313 break;
2314 case kVulkanObjectTypeAccelerationStructureNV:
2315 bindable_state = GetAccelerationStructureState(obj.Cast<VkAccelerationStructureNV>());
2316 break;
2317
2318 default:
2319 // Should only have acceleration structure, buffer, or image objects bound to memory
2320 assert(0);
2321 }
2322
2323 if (bindable_state) {
Jeff Bolz41e29052020-03-29 22:33:55 -05002324 // Remove any sparse bindings bound to the resource that use this memory.
2325 for (auto it = bindable_state->sparse_bindings.begin(); it != bindable_state->sparse_bindings.end();) {
2326 auto nextit = it;
2327 nextit++;
2328
2329 auto &sparse_mem_binding = *it;
2330 if (sparse_mem_binding.mem_state.get() == mem_info) {
2331 bindable_state->sparse_bindings.erase(it);
2332 }
2333
2334 it = nextit;
2335 }
locke-lunargd556cc32019-09-17 01:21:23 -06002336 bindable_state->UpdateBoundMemorySet();
2337 }
2338 }
2339 // Any bound cmd buffers are now invalid
2340 InvalidateCommandBuffers(mem_info->cb_bindings, obj_struct);
2341 RemoveAliasingImages(mem_info->bound_images);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002342 mem_info->destroyed = true;
John Zulauf79952712020-04-07 11:25:54 -06002343 fake_memory.Free(mem_info->fake_base_address);
locke-lunargd556cc32019-09-17 01:21:23 -06002344 memObjMap.erase(mem);
2345}
2346
2347void ValidationStateTracker::PostCallRecordQueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo *pBindInfo,
2348 VkFence fence, VkResult result) {
2349 if (result != VK_SUCCESS) return;
2350 uint64_t early_retire_seq = 0;
2351 auto pFence = GetFenceState(fence);
2352 auto pQueue = GetQueueState(queue);
2353
2354 if (pFence) {
2355 if (pFence->scope == kSyncScopeInternal) {
2356 SubmitFence(pQueue, pFence, std::max(1u, bindInfoCount));
2357 if (!bindInfoCount) {
2358 // No work to do, just dropping a fence in the queue by itself.
2359 pQueue->submissions.emplace_back(std::vector<VkCommandBuffer>(), std::vector<SEMAPHORE_WAIT>(),
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002360 std::vector<SEMAPHORE_SIGNAL>(), std::vector<VkSemaphore>(), fence, 0);
locke-lunargd556cc32019-09-17 01:21:23 -06002361 }
2362 } else {
2363 // Retire work up until this fence early, we will not see the wait that corresponds to this signal
2364 early_retire_seq = pQueue->seq + pQueue->submissions.size();
2365 }
2366 }
2367
2368 for (uint32_t bindIdx = 0; bindIdx < bindInfoCount; ++bindIdx) {
2369 const VkBindSparseInfo &bindInfo = pBindInfo[bindIdx];
2370 // Track objects tied to memory
2371 for (uint32_t j = 0; j < bindInfo.bufferBindCount; j++) {
2372 for (uint32_t k = 0; k < bindInfo.pBufferBinds[j].bindCount; k++) {
2373 auto sparse_binding = bindInfo.pBufferBinds[j].pBinds[k];
locke-lunargcf04d582019-11-26 00:31:50 -07002374 SetSparseMemBinding(sparse_binding.memory, sparse_binding.memoryOffset, sparse_binding.size,
locke-lunargd556cc32019-09-17 01:21:23 -06002375 VulkanTypedHandle(bindInfo.pBufferBinds[j].buffer, kVulkanObjectTypeBuffer));
2376 }
2377 }
2378 for (uint32_t j = 0; j < bindInfo.imageOpaqueBindCount; j++) {
2379 for (uint32_t k = 0; k < bindInfo.pImageOpaqueBinds[j].bindCount; k++) {
2380 auto sparse_binding = bindInfo.pImageOpaqueBinds[j].pBinds[k];
locke-lunargcf04d582019-11-26 00:31:50 -07002381 SetSparseMemBinding(sparse_binding.memory, sparse_binding.memoryOffset, sparse_binding.size,
locke-lunargd556cc32019-09-17 01:21:23 -06002382 VulkanTypedHandle(bindInfo.pImageOpaqueBinds[j].image, kVulkanObjectTypeImage));
2383 }
2384 }
2385 for (uint32_t j = 0; j < bindInfo.imageBindCount; j++) {
2386 for (uint32_t k = 0; k < bindInfo.pImageBinds[j].bindCount; k++) {
2387 auto sparse_binding = bindInfo.pImageBinds[j].pBinds[k];
2388 // TODO: This size is broken for non-opaque bindings, need to update to comprehend full sparse binding data
2389 VkDeviceSize size = sparse_binding.extent.depth * sparse_binding.extent.height * sparse_binding.extent.width * 4;
locke-lunargcf04d582019-11-26 00:31:50 -07002390 SetSparseMemBinding(sparse_binding.memory, sparse_binding.memoryOffset, size,
locke-lunargd556cc32019-09-17 01:21:23 -06002391 VulkanTypedHandle(bindInfo.pImageBinds[j].image, kVulkanObjectTypeImage));
2392 }
2393 }
2394
2395 std::vector<SEMAPHORE_WAIT> semaphore_waits;
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002396 std::vector<SEMAPHORE_SIGNAL> semaphore_signals;
locke-lunargd556cc32019-09-17 01:21:23 -06002397 std::vector<VkSemaphore> semaphore_externals;
2398 for (uint32_t i = 0; i < bindInfo.waitSemaphoreCount; ++i) {
2399 VkSemaphore semaphore = bindInfo.pWaitSemaphores[i];
2400 auto pSemaphore = GetSemaphoreState(semaphore);
2401 if (pSemaphore) {
2402 if (pSemaphore->scope == kSyncScopeInternal) {
2403 if (pSemaphore->signaler.first != VK_NULL_HANDLE) {
Marshall Drew-Brook03847582020-11-06 15:10:45 -08002404 semaphore_waits.push_back(
2405 {semaphore, pSemaphore->type, pSemaphore->signaler.first, pSemaphore->signaler.second});
locke-lunargd556cc32019-09-17 01:21:23 -06002406 pSemaphore->in_use.fetch_add(1);
2407 }
2408 pSemaphore->signaler.first = VK_NULL_HANDLE;
2409 pSemaphore->signaled = false;
2410 } else {
2411 semaphore_externals.push_back(semaphore);
2412 pSemaphore->in_use.fetch_add(1);
2413 if (pSemaphore->scope == kSyncScopeExternalTemporary) {
2414 pSemaphore->scope = kSyncScopeInternal;
2415 }
2416 }
2417 }
2418 }
2419 for (uint32_t i = 0; i < bindInfo.signalSemaphoreCount; ++i) {
2420 VkSemaphore semaphore = bindInfo.pSignalSemaphores[i];
2421 auto pSemaphore = GetSemaphoreState(semaphore);
2422 if (pSemaphore) {
2423 if (pSemaphore->scope == kSyncScopeInternal) {
2424 pSemaphore->signaler.first = queue;
2425 pSemaphore->signaler.second = pQueue->seq + pQueue->submissions.size() + 1;
2426 pSemaphore->signaled = true;
2427 pSemaphore->in_use.fetch_add(1);
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002428
2429 SEMAPHORE_SIGNAL signal;
2430 signal.semaphore = semaphore;
2431 signal.seq = pSemaphore->signaler.second;
2432 semaphore_signals.push_back(signal);
locke-lunargd556cc32019-09-17 01:21:23 -06002433 } else {
2434 // Retire work up until this submit early, we will not see the wait that corresponds to this signal
2435 early_retire_seq = std::max(early_retire_seq, pQueue->seq + pQueue->submissions.size() + 1);
2436 }
2437 }
2438 }
2439
2440 pQueue->submissions.emplace_back(std::vector<VkCommandBuffer>(), semaphore_waits, semaphore_signals, semaphore_externals,
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002441 bindIdx == bindInfoCount - 1 ? fence : (VkFence)VK_NULL_HANDLE, 0);
locke-lunargd556cc32019-09-17 01:21:23 -06002442 }
2443
2444 if (early_retire_seq) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002445 RetireWorkOnQueue(pQueue, early_retire_seq);
locke-lunargd556cc32019-09-17 01:21:23 -06002446 }
2447}
2448
2449void ValidationStateTracker::PostCallRecordCreateSemaphore(VkDevice device, const VkSemaphoreCreateInfo *pCreateInfo,
2450 const VkAllocationCallbacks *pAllocator, VkSemaphore *pSemaphore,
2451 VkResult result) {
2452 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002453 auto semaphore_state = std::make_shared<SEMAPHORE_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002454 semaphore_state->signaler.first = VK_NULL_HANDLE;
2455 semaphore_state->signaler.second = 0;
2456 semaphore_state->signaled = false;
2457 semaphore_state->scope = kSyncScopeInternal;
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00002458 semaphore_state->type = VK_SEMAPHORE_TYPE_BINARY_KHR;
2459 semaphore_state->payload = 0;
2460 auto semaphore_type_create_info = lvl_find_in_chain<VkSemaphoreTypeCreateInfoKHR>(pCreateInfo->pNext);
2461 if (semaphore_type_create_info) {
2462 semaphore_state->type = semaphore_type_create_info->semaphoreType;
2463 semaphore_state->payload = semaphore_type_create_info->initialValue;
2464 }
locke-lunargd556cc32019-09-17 01:21:23 -06002465 semaphoreMap[*pSemaphore] = std::move(semaphore_state);
2466}
2467
2468void ValidationStateTracker::RecordImportSemaphoreState(VkSemaphore semaphore, VkExternalSemaphoreHandleTypeFlagBitsKHR handle_type,
2469 VkSemaphoreImportFlagsKHR flags) {
2470 SEMAPHORE_STATE *sema_node = GetSemaphoreState(semaphore);
2471 if (sema_node && sema_node->scope != kSyncScopeExternalPermanent) {
2472 if ((handle_type == VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR || flags & VK_SEMAPHORE_IMPORT_TEMPORARY_BIT_KHR) &&
2473 sema_node->scope == kSyncScopeInternal) {
2474 sema_node->scope = kSyncScopeExternalTemporary;
2475 } else {
2476 sema_node->scope = kSyncScopeExternalPermanent;
2477 }
2478 }
2479}
2480
Juan A. Suarez Romerof3024162019-10-31 17:57:50 +00002481void ValidationStateTracker::PostCallRecordSignalSemaphoreKHR(VkDevice device, const VkSemaphoreSignalInfoKHR *pSignalInfo,
2482 VkResult result) {
2483 auto *pSemaphore = GetSemaphoreState(pSignalInfo->semaphore);
2484 pSemaphore->payload = pSignalInfo->value;
2485}
2486
locke-lunargd556cc32019-09-17 01:21:23 -06002487void ValidationStateTracker::RecordMappedMemory(VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size, void **ppData) {
2488 auto mem_info = GetDevMemState(mem);
2489 if (mem_info) {
2490 mem_info->mapped_range.offset = offset;
2491 mem_info->mapped_range.size = size;
2492 mem_info->p_driver_data = *ppData;
2493 }
2494}
2495
2496void ValidationStateTracker::RetireFence(VkFence fence) {
2497 auto pFence = GetFenceState(fence);
2498 if (pFence && pFence->scope == kSyncScopeInternal) {
2499 if (pFence->signaler.first != VK_NULL_HANDLE) {
2500 // Fence signaller is a queue -- use this as proof that prior operations on that queue have completed.
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002501 RetireWorkOnQueue(GetQueueState(pFence->signaler.first), pFence->signaler.second);
locke-lunargd556cc32019-09-17 01:21:23 -06002502 } else {
2503 // Fence signaller is the WSI. We're not tracking what the WSI op actually /was/ in CV yet, but we need to mark
2504 // the fence as retired.
2505 pFence->state = FENCE_RETIRED;
2506 }
2507 }
2508}
2509
2510void ValidationStateTracker::PostCallRecordWaitForFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences,
2511 VkBool32 waitAll, uint64_t timeout, VkResult result) {
2512 if (VK_SUCCESS != result) return;
2513
2514 // When we know that all fences are complete we can clean/remove their CBs
2515 if ((VK_TRUE == waitAll) || (1 == fenceCount)) {
2516 for (uint32_t i = 0; i < fenceCount; i++) {
2517 RetireFence(pFences[i]);
2518 }
2519 }
2520 // NOTE : Alternate case not handled here is when some fences have completed. In
2521 // this case for app to guarantee which fences completed it will have to call
2522 // vkGetFenceStatus() at which point we'll clean/remove their CBs if complete.
2523}
2524
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002525void ValidationStateTracker::RetireTimelineSemaphore(VkSemaphore semaphore, uint64_t until_payload) {
2526 auto pSemaphore = GetSemaphoreState(semaphore);
2527 if (pSemaphore) {
2528 for (auto &pair : queueMap) {
2529 QUEUE_STATE &queueState = pair.second;
Tony-LunarG47d5e272020-04-07 15:35:55 -06002530 uint64_t max_seq = 0;
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002531 for (const auto &submission : queueState.submissions) {
2532 for (const auto &signalSemaphore : submission.signalSemaphores) {
2533 if (signalSemaphore.semaphore == semaphore && signalSemaphore.payload <= until_payload) {
Tony-LunarG47d5e272020-04-07 15:35:55 -06002534 if (signalSemaphore.seq > max_seq) {
2535 max_seq = signalSemaphore.seq;
2536 }
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002537 }
2538 }
2539 }
Tony-LunarG47d5e272020-04-07 15:35:55 -06002540 if (max_seq) {
2541 RetireWorkOnQueue(&queueState, max_seq);
2542 }
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002543 }
2544 }
2545}
2546
John Zulauff89de662020-04-13 18:57:34 -06002547void ValidationStateTracker::RecordWaitSemaphores(VkDevice device, const VkSemaphoreWaitInfo *pWaitInfo, uint64_t timeout,
2548 VkResult result) {
Jakub Okoński04feb3b2020-02-01 18:31:01 +01002549 if (VK_SUCCESS != result) return;
2550
2551 for (uint32_t i = 0; i < pWaitInfo->semaphoreCount; i++) {
2552 RetireTimelineSemaphore(pWaitInfo->pSemaphores[i], pWaitInfo->pValues[i]);
2553 }
2554}
2555
John Zulauff89de662020-04-13 18:57:34 -06002556void ValidationStateTracker::PostCallRecordWaitSemaphores(VkDevice device, const VkSemaphoreWaitInfo *pWaitInfo, uint64_t timeout,
2557 VkResult result) {
2558 RecordWaitSemaphores(device, pWaitInfo, timeout, result);
2559}
2560
2561void ValidationStateTracker::PostCallRecordWaitSemaphoresKHR(VkDevice device, const VkSemaphoreWaitInfo *pWaitInfo,
2562 uint64_t timeout, VkResult result) {
2563 RecordWaitSemaphores(device, pWaitInfo, timeout, result);
2564}
2565
Adrian Coca Lorentec7d76102020-09-28 13:58:16 +02002566void ValidationStateTracker::RecordGetSemaphoreCounterValue(VkDevice device, VkSemaphore semaphore, uint64_t *pValue,
2567 VkResult result) {
2568 if (VK_SUCCESS != result) return;
2569
2570 RetireTimelineSemaphore(semaphore, *pValue);
2571}
2572
2573void ValidationStateTracker::PostCallRecordGetSemaphoreCounterValue(VkDevice device, VkSemaphore semaphore, uint64_t *pValue,
2574 VkResult result) {
2575 RecordGetSemaphoreCounterValue(device, semaphore, pValue, result);
2576}
2577void ValidationStateTracker::PostCallRecordGetSemaphoreCounterValueKHR(VkDevice device, VkSemaphore semaphore, uint64_t *pValue,
2578 VkResult result) {
2579 RecordGetSemaphoreCounterValue(device, semaphore, pValue, result);
2580}
2581
locke-lunargd556cc32019-09-17 01:21:23 -06002582void ValidationStateTracker::PostCallRecordGetFenceStatus(VkDevice device, VkFence fence, VkResult result) {
2583 if (VK_SUCCESS != result) return;
2584 RetireFence(fence);
2585}
2586
2587void ValidationStateTracker::RecordGetDeviceQueueState(uint32_t queue_family_index, VkQueue queue) {
2588 // Add queue to tracking set only if it is new
2589 auto queue_is_new = queues.emplace(queue);
2590 if (queue_is_new.second == true) {
2591 QUEUE_STATE *queue_state = &queueMap[queue];
2592 queue_state->queue = queue;
2593 queue_state->queueFamilyIndex = queue_family_index;
2594 queue_state->seq = 0;
2595 }
2596}
2597
2598void ValidationStateTracker::PostCallRecordGetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex,
2599 VkQueue *pQueue) {
2600 RecordGetDeviceQueueState(queueFamilyIndex, *pQueue);
2601}
2602
2603void ValidationStateTracker::PostCallRecordGetDeviceQueue2(VkDevice device, const VkDeviceQueueInfo2 *pQueueInfo, VkQueue *pQueue) {
2604 RecordGetDeviceQueueState(pQueueInfo->queueFamilyIndex, *pQueue);
2605}
2606
2607void ValidationStateTracker::PostCallRecordQueueWaitIdle(VkQueue queue, VkResult result) {
2608 if (VK_SUCCESS != result) return;
2609 QUEUE_STATE *queue_state = GetQueueState(queue);
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002610 RetireWorkOnQueue(queue_state, queue_state->seq + queue_state->submissions.size());
locke-lunargd556cc32019-09-17 01:21:23 -06002611}
2612
2613void ValidationStateTracker::PostCallRecordDeviceWaitIdle(VkDevice device, VkResult result) {
2614 if (VK_SUCCESS != result) return;
2615 for (auto &queue : queueMap) {
Jeff Bolz8041c5b2019-10-20 22:14:20 -05002616 RetireWorkOnQueue(&queue.second, queue.second.seq + queue.second.submissions.size());
locke-lunargd556cc32019-09-17 01:21:23 -06002617 }
2618}
2619
2620void ValidationStateTracker::PreCallRecordDestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks *pAllocator) {
2621 if (!fence) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002622 auto fence_state = GetFenceState(fence);
2623 fence_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002624 fenceMap.erase(fence);
2625}
2626
2627void ValidationStateTracker::PreCallRecordDestroySemaphore(VkDevice device, VkSemaphore semaphore,
2628 const VkAllocationCallbacks *pAllocator) {
2629 if (!semaphore) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002630 auto semaphore_state = GetSemaphoreState(semaphore);
2631 semaphore_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002632 semaphoreMap.erase(semaphore);
2633}
2634
2635void ValidationStateTracker::PreCallRecordDestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks *pAllocator) {
2636 if (!event) return;
2637 EVENT_STATE *event_state = GetEventState(event);
2638 const VulkanTypedHandle obj_struct(event, kVulkanObjectTypeEvent);
2639 InvalidateCommandBuffers(event_state->cb_bindings, obj_struct);
2640 eventMap.erase(event);
2641}
2642
2643void ValidationStateTracker::PreCallRecordDestroyQueryPool(VkDevice device, VkQueryPool queryPool,
2644 const VkAllocationCallbacks *pAllocator) {
2645 if (!queryPool) return;
2646 QUERY_POOL_STATE *qp_state = GetQueryPoolState(queryPool);
2647 const VulkanTypedHandle obj_struct(queryPool, kVulkanObjectTypeQueryPool);
2648 InvalidateCommandBuffers(qp_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002649 qp_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002650 queryPoolMap.erase(queryPool);
2651}
2652
2653// Object with given handle is being bound to memory w/ given mem_info struct.
2654// Track the newly bound memory range with given memoryOffset
2655// Also scan any previous ranges, track aliased ranges with new range, and flag an error if a linear
2656// and non-linear range incorrectly overlap.
locke-lunargd556cc32019-09-17 01:21:23 -06002657void ValidationStateTracker::InsertMemoryRange(const VulkanTypedHandle &typed_handle, DEVICE_MEMORY_STATE *mem_info,
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002658 VkDeviceSize memoryOffset) {
locke-lunargd556cc32019-09-17 01:21:23 -06002659 if (typed_handle.type == kVulkanObjectTypeImage) {
2660 mem_info->bound_images.insert(typed_handle.Cast<VkImage>());
2661 } else if (typed_handle.type == kVulkanObjectTypeBuffer) {
locke-lunarg37c410a2020-02-17 17:34:13 -07002662 mem_info->bound_buffers.insert(typed_handle.Cast<VkBuffer>());
locke-lunargd556cc32019-09-17 01:21:23 -06002663 } else if (typed_handle.type == kVulkanObjectTypeAccelerationStructureNV) {
locke-lunarg37c410a2020-02-17 17:34:13 -07002664 mem_info->bound_acceleration_structures.insert(typed_handle.Cast<VkAccelerationStructureNV>());
locke-lunargd556cc32019-09-17 01:21:23 -06002665 } else {
2666 // Unsupported object type
2667 assert(false);
2668 }
2669}
2670
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002671void ValidationStateTracker::InsertImageMemoryRange(VkImage image, DEVICE_MEMORY_STATE *mem_info, VkDeviceSize mem_offset) {
2672 InsertMemoryRange(VulkanTypedHandle(image, kVulkanObjectTypeImage), mem_info, mem_offset);
locke-lunargd556cc32019-09-17 01:21:23 -06002673}
2674
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002675void ValidationStateTracker::InsertBufferMemoryRange(VkBuffer buffer, DEVICE_MEMORY_STATE *mem_info, VkDeviceSize mem_offset) {
2676 InsertMemoryRange(VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer), mem_info, mem_offset);
locke-lunargd556cc32019-09-17 01:21:23 -06002677}
2678
2679void ValidationStateTracker::InsertAccelerationStructureMemoryRange(VkAccelerationStructureNV as, DEVICE_MEMORY_STATE *mem_info,
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002680 VkDeviceSize mem_offset) {
2681 InsertMemoryRange(VulkanTypedHandle(as, kVulkanObjectTypeAccelerationStructureNV), mem_info, mem_offset);
locke-lunargd556cc32019-09-17 01:21:23 -06002682}
2683
2684// This function will remove the handle-to-index mapping from the appropriate map.
2685static void RemoveMemoryRange(const VulkanTypedHandle &typed_handle, DEVICE_MEMORY_STATE *mem_info) {
2686 if (typed_handle.type == kVulkanObjectTypeImage) {
2687 mem_info->bound_images.erase(typed_handle.Cast<VkImage>());
2688 } else if (typed_handle.type == kVulkanObjectTypeBuffer) {
locke-lunarg37c410a2020-02-17 17:34:13 -07002689 mem_info->bound_buffers.erase(typed_handle.Cast<VkBuffer>());
locke-lunargd556cc32019-09-17 01:21:23 -06002690 } else if (typed_handle.type == kVulkanObjectTypeAccelerationStructureNV) {
locke-lunarg37c410a2020-02-17 17:34:13 -07002691 mem_info->bound_acceleration_structures.erase(typed_handle.Cast<VkAccelerationStructureNV>());
locke-lunargd556cc32019-09-17 01:21:23 -06002692 } else {
2693 // Unsupported object type
2694 assert(false);
2695 }
2696}
2697
2698void ValidationStateTracker::RemoveBufferMemoryRange(VkBuffer buffer, DEVICE_MEMORY_STATE *mem_info) {
2699 RemoveMemoryRange(VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer), mem_info);
2700}
2701
2702void ValidationStateTracker::RemoveImageMemoryRange(VkImage image, DEVICE_MEMORY_STATE *mem_info) {
2703 RemoveMemoryRange(VulkanTypedHandle(image, kVulkanObjectTypeImage), mem_info);
2704}
2705
2706void ValidationStateTracker::RemoveAccelerationStructureMemoryRange(VkAccelerationStructureNV as, DEVICE_MEMORY_STATE *mem_info) {
2707 RemoveMemoryRange(VulkanTypedHandle(as, kVulkanObjectTypeAccelerationStructureNV), mem_info);
2708}
2709
2710void ValidationStateTracker::UpdateBindBufferMemoryState(VkBuffer buffer, VkDeviceMemory mem, VkDeviceSize memoryOffset) {
2711 BUFFER_STATE *buffer_state = GetBufferState(buffer);
2712 if (buffer_state) {
2713 // Track bound memory range information
2714 auto mem_info = GetDevMemState(mem);
2715 if (mem_info) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002716 InsertBufferMemoryRange(buffer, mem_info, memoryOffset);
locke-lunargd556cc32019-09-17 01:21:23 -06002717 }
2718 // Track objects tied to memory
2719 SetMemBinding(mem, buffer_state, memoryOffset, VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer));
2720 }
2721}
2722
2723void ValidationStateTracker::PostCallRecordBindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory mem,
2724 VkDeviceSize memoryOffset, VkResult result) {
2725 if (VK_SUCCESS != result) return;
2726 UpdateBindBufferMemoryState(buffer, mem, memoryOffset);
2727}
2728
2729void ValidationStateTracker::PostCallRecordBindBufferMemory2(VkDevice device, uint32_t bindInfoCount,
2730 const VkBindBufferMemoryInfoKHR *pBindInfos, VkResult result) {
2731 for (uint32_t i = 0; i < bindInfoCount; i++) {
2732 UpdateBindBufferMemoryState(pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset);
2733 }
2734}
2735
2736void ValidationStateTracker::PostCallRecordBindBufferMemory2KHR(VkDevice device, uint32_t bindInfoCount,
2737 const VkBindBufferMemoryInfoKHR *pBindInfos, VkResult result) {
2738 for (uint32_t i = 0; i < bindInfoCount; i++) {
2739 UpdateBindBufferMemoryState(pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset);
2740 }
2741}
2742
Spencer Fricke6c127102020-04-16 06:25:20 -07002743void ValidationStateTracker::RecordGetBufferMemoryRequirementsState(VkBuffer buffer) {
locke-lunargd556cc32019-09-17 01:21:23 -06002744 BUFFER_STATE *buffer_state = GetBufferState(buffer);
2745 if (buffer_state) {
locke-lunargd556cc32019-09-17 01:21:23 -06002746 buffer_state->memory_requirements_checked = true;
2747 }
2748}
2749
2750void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements(VkDevice device, VkBuffer buffer,
2751 VkMemoryRequirements *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002752 RecordGetBufferMemoryRequirementsState(buffer);
locke-lunargd556cc32019-09-17 01:21:23 -06002753}
2754
2755void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements2(VkDevice device,
2756 const VkBufferMemoryRequirementsInfo2KHR *pInfo,
2757 VkMemoryRequirements2KHR *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002758 RecordGetBufferMemoryRequirementsState(pInfo->buffer);
locke-lunargd556cc32019-09-17 01:21:23 -06002759}
2760
2761void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements2KHR(VkDevice device,
2762 const VkBufferMemoryRequirementsInfo2KHR *pInfo,
2763 VkMemoryRequirements2KHR *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002764 RecordGetBufferMemoryRequirementsState(pInfo->buffer);
locke-lunargd556cc32019-09-17 01:21:23 -06002765}
2766
Spencer Fricke6c127102020-04-16 06:25:20 -07002767void ValidationStateTracker::RecordGetImageMemoryRequirementsState(VkImage image, const VkImageMemoryRequirementsInfo2 *pInfo) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002768 const VkImagePlaneMemoryRequirementsInfo *plane_info =
2769 (pInfo == nullptr) ? nullptr : lvl_find_in_chain<VkImagePlaneMemoryRequirementsInfo>(pInfo->pNext);
locke-lunargd556cc32019-09-17 01:21:23 -06002770 IMAGE_STATE *image_state = GetImageState(image);
2771 if (image_state) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002772 if (plane_info != nullptr) {
2773 // Multi-plane image
2774 image_state->memory_requirements_checked = false; // Each image plane needs to be checked itself
2775 if (plane_info->planeAspect == VK_IMAGE_ASPECT_PLANE_0_BIT) {
2776 image_state->plane0_memory_requirements_checked = true;
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002777 } else if (plane_info->planeAspect == VK_IMAGE_ASPECT_PLANE_1_BIT) {
2778 image_state->plane1_memory_requirements_checked = true;
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002779 } else if (plane_info->planeAspect == VK_IMAGE_ASPECT_PLANE_2_BIT) {
2780 image_state->plane2_memory_requirements_checked = true;
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002781 }
2782 } else {
2783 // Single Plane image
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07002784 image_state->memory_requirements_checked = true;
2785 }
locke-lunargd556cc32019-09-17 01:21:23 -06002786 }
2787}
2788
2789void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements(VkDevice device, VkImage image,
2790 VkMemoryRequirements *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002791 RecordGetImageMemoryRequirementsState(image, nullptr);
locke-lunargd556cc32019-09-17 01:21:23 -06002792}
2793
2794void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements2(VkDevice device, const VkImageMemoryRequirementsInfo2 *pInfo,
2795 VkMemoryRequirements2 *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002796 RecordGetImageMemoryRequirementsState(pInfo->image, pInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002797}
2798
2799void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements2KHR(VkDevice device,
2800 const VkImageMemoryRequirementsInfo2 *pInfo,
2801 VkMemoryRequirements2 *pMemoryRequirements) {
Spencer Fricke6c127102020-04-16 06:25:20 -07002802 RecordGetImageMemoryRequirementsState(pInfo->image, pInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06002803}
2804
2805static void RecordGetImageSparseMemoryRequirementsState(IMAGE_STATE *image_state,
2806 VkSparseImageMemoryRequirements *sparse_image_memory_requirements) {
2807 image_state->sparse_requirements.emplace_back(*sparse_image_memory_requirements);
2808 if (sparse_image_memory_requirements->formatProperties.aspectMask & VK_IMAGE_ASPECT_METADATA_BIT) {
2809 image_state->sparse_metadata_required = true;
2810 }
2811}
2812
2813void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements(
2814 VkDevice device, VkImage image, uint32_t *pSparseMemoryRequirementCount,
2815 VkSparseImageMemoryRequirements *pSparseMemoryRequirements) {
2816 auto image_state = GetImageState(image);
2817 image_state->get_sparse_reqs_called = true;
2818 if (!pSparseMemoryRequirements) return;
2819 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
2820 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i]);
2821 }
2822}
2823
2824void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements2(
2825 VkDevice device, const VkImageSparseMemoryRequirementsInfo2KHR *pInfo, uint32_t *pSparseMemoryRequirementCount,
2826 VkSparseImageMemoryRequirements2KHR *pSparseMemoryRequirements) {
2827 auto image_state = GetImageState(pInfo->image);
2828 image_state->get_sparse_reqs_called = true;
2829 if (!pSparseMemoryRequirements) return;
2830 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
2831 assert(!pSparseMemoryRequirements[i].pNext); // TODO: If an extension is ever added here we need to handle it
2832 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i].memoryRequirements);
2833 }
2834}
2835
2836void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements2KHR(
2837 VkDevice device, const VkImageSparseMemoryRequirementsInfo2KHR *pInfo, uint32_t *pSparseMemoryRequirementCount,
2838 VkSparseImageMemoryRequirements2KHR *pSparseMemoryRequirements) {
2839 auto image_state = GetImageState(pInfo->image);
2840 image_state->get_sparse_reqs_called = true;
2841 if (!pSparseMemoryRequirements) return;
2842 for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
2843 assert(!pSparseMemoryRequirements[i].pNext); // TODO: If an extension is ever added here we need to handle it
2844 RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i].memoryRequirements);
2845 }
2846}
2847
2848void ValidationStateTracker::PreCallRecordDestroyShaderModule(VkDevice device, VkShaderModule shaderModule,
2849 const VkAllocationCallbacks *pAllocator) {
2850 if (!shaderModule) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002851 auto shader_module_state = GetShaderModuleState(shaderModule);
2852 shader_module_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002853 shaderModuleMap.erase(shaderModule);
2854}
2855
2856void ValidationStateTracker::PreCallRecordDestroyPipeline(VkDevice device, VkPipeline pipeline,
2857 const VkAllocationCallbacks *pAllocator) {
2858 if (!pipeline) return;
2859 PIPELINE_STATE *pipeline_state = GetPipelineState(pipeline);
2860 const VulkanTypedHandle obj_struct(pipeline, kVulkanObjectTypePipeline);
2861 // Any bound cmd buffers are now invalid
2862 InvalidateCommandBuffers(pipeline_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002863 pipeline_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002864 pipelineMap.erase(pipeline);
2865}
2866
2867void ValidationStateTracker::PreCallRecordDestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout,
2868 const VkAllocationCallbacks *pAllocator) {
2869 if (!pipelineLayout) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002870 auto pipeline_layout_state = GetPipelineLayout(pipelineLayout);
2871 pipeline_layout_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002872 pipelineLayoutMap.erase(pipelineLayout);
2873}
2874
2875void ValidationStateTracker::PreCallRecordDestroySampler(VkDevice device, VkSampler sampler,
2876 const VkAllocationCallbacks *pAllocator) {
2877 if (!sampler) return;
2878 SAMPLER_STATE *sampler_state = GetSamplerState(sampler);
2879 const VulkanTypedHandle obj_struct(sampler, kVulkanObjectTypeSampler);
2880 // Any bound cmd buffers are now invalid
2881 if (sampler_state) {
2882 InvalidateCommandBuffers(sampler_state->cb_bindings, obj_struct);
Yuly Novikov424cdd52020-05-26 16:45:12 -04002883
2884 if (sampler_state->createInfo.borderColor == VK_BORDER_COLOR_INT_CUSTOM_EXT ||
2885 sampler_state->createInfo.borderColor == VK_BORDER_COLOR_FLOAT_CUSTOM_EXT) {
2886 custom_border_color_sampler_count--;
2887 }
2888
2889 sampler_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002890 }
2891 samplerMap.erase(sampler);
2892}
2893
2894void ValidationStateTracker::PreCallRecordDestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout,
2895 const VkAllocationCallbacks *pAllocator) {
2896 if (!descriptorSetLayout) return;
2897 auto layout_it = descriptorSetLayoutMap.find(descriptorSetLayout);
2898 if (layout_it != descriptorSetLayoutMap.end()) {
Jeff Bolz6ae39612019-10-11 20:57:36 -05002899 layout_it->second.get()->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002900 descriptorSetLayoutMap.erase(layout_it);
2901 }
2902}
2903
2904void ValidationStateTracker::PreCallRecordDestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
2905 const VkAllocationCallbacks *pAllocator) {
2906 if (!descriptorPool) return;
2907 DESCRIPTOR_POOL_STATE *desc_pool_state = GetDescriptorPoolState(descriptorPool);
2908 const VulkanTypedHandle obj_struct(descriptorPool, kVulkanObjectTypeDescriptorPool);
2909 if (desc_pool_state) {
2910 // Any bound cmd buffers are now invalid
2911 InvalidateCommandBuffers(desc_pool_state->cb_bindings, obj_struct);
2912 // Free sets that were in this pool
2913 for (auto ds : desc_pool_state->sets) {
2914 FreeDescriptorSet(ds);
2915 }
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002916 desc_pool_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002917 descriptorPoolMap.erase(descriptorPool);
2918 }
2919}
2920
2921// Free all command buffers in given list, removing all references/links to them using ResetCommandBufferState
2922void ValidationStateTracker::FreeCommandBufferStates(COMMAND_POOL_STATE *pool_state, const uint32_t command_buffer_count,
2923 const VkCommandBuffer *command_buffers) {
2924 for (uint32_t i = 0; i < command_buffer_count; i++) {
John Zulaufd1f85d42020-04-15 12:23:15 -06002925 // Allow any derived class to clean up command buffer state
2926 if (command_buffer_free_callback) {
2927 (*command_buffer_free_callback)(command_buffers[i]);
2928 }
2929
locke-lunargd556cc32019-09-17 01:21:23 -06002930 auto cb_state = GetCBState(command_buffers[i]);
2931 // Remove references to command buffer's state and delete
2932 if (cb_state) {
2933 // reset prior to delete, removing various references to it.
2934 // TODO: fix this, it's insane.
2935 ResetCommandBufferState(cb_state->commandBuffer);
2936 // Remove the cb_state's references from COMMAND_POOL_STATEs
2937 pool_state->commandBuffers.erase(command_buffers[i]);
2938 // Remove the cb debug labels
2939 EraseCmdDebugUtilsLabel(report_data, cb_state->commandBuffer);
2940 // Remove CBState from CB map
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002941 cb_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06002942 commandBufferMap.erase(cb_state->commandBuffer);
2943 }
2944 }
2945}
2946
2947void ValidationStateTracker::PreCallRecordFreeCommandBuffers(VkDevice device, VkCommandPool commandPool,
2948 uint32_t commandBufferCount, const VkCommandBuffer *pCommandBuffers) {
2949 auto pPool = GetCommandPoolState(commandPool);
2950 FreeCommandBufferStates(pPool, commandBufferCount, pCommandBuffers);
2951}
2952
2953void ValidationStateTracker::PostCallRecordCreateCommandPool(VkDevice device, const VkCommandPoolCreateInfo *pCreateInfo,
2954 const VkAllocationCallbacks *pAllocator, VkCommandPool *pCommandPool,
2955 VkResult result) {
2956 if (VK_SUCCESS != result) return;
sfricke-samsungc1543372020-08-18 22:37:27 -07002957 VkCommandPool command_pool = *pCommandPool;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002958 auto cmd_pool_state = std::make_shared<COMMAND_POOL_STATE>();
sfricke-samsungc1543372020-08-18 22:37:27 -07002959 cmd_pool_state->commandPool = command_pool;
locke-lunargd556cc32019-09-17 01:21:23 -06002960 cmd_pool_state->createFlags = pCreateInfo->flags;
2961 cmd_pool_state->queueFamilyIndex = pCreateInfo->queueFamilyIndex;
sfricke-samsung0c45edc2020-07-01 22:19:53 -07002962 cmd_pool_state->unprotected = ((pCreateInfo->flags & VK_COMMAND_POOL_CREATE_PROTECTED_BIT) == 0);
sfricke-samsungc1543372020-08-18 22:37:27 -07002963 commandPoolMap[command_pool] = std::move(cmd_pool_state);
locke-lunargd556cc32019-09-17 01:21:23 -06002964}
2965
2966void ValidationStateTracker::PostCallRecordCreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo *pCreateInfo,
2967 const VkAllocationCallbacks *pAllocator, VkQueryPool *pQueryPool,
2968 VkResult result) {
2969 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05002970 auto query_pool_state = std::make_shared<QUERY_POOL_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06002971 query_pool_state->createInfo = *pCreateInfo;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002972 query_pool_state->pool = *pQueryPool;
2973 if (pCreateInfo->queryType == VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR) {
2974 const auto *perf = lvl_find_in_chain<VkQueryPoolPerformanceCreateInfoKHR>(pCreateInfo->pNext);
Mark Lobodzinski7e948e42020-09-09 10:23:36 -06002975 query_pool_state->perf_counter_index_count = perf->counterIndexCount;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002976
Mark Lobodzinski7e948e42020-09-09 10:23:36 -06002977 const QUEUE_FAMILY_PERF_COUNTERS &counters = *physical_device_state->perf_counters[perf->queueFamilyIndex];
Lionel Landwerlinc7420912019-05-23 00:33:42 +01002978 for (uint32_t i = 0; i < perf->counterIndexCount; i++) {
2979 const auto &counter = counters.counters[perf->pCounterIndices[i]];
2980 switch (counter.scope) {
2981 case VK_QUERY_SCOPE_COMMAND_BUFFER_KHR:
2982 query_pool_state->has_perf_scope_command_buffer = true;
2983 break;
2984 case VK_QUERY_SCOPE_RENDER_PASS_KHR:
2985 query_pool_state->has_perf_scope_render_pass = true;
2986 break;
2987 default:
2988 break;
2989 }
2990 }
2991
2992 DispatchGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(physical_device_state->phys_device, perf,
2993 &query_pool_state->n_performance_passes);
2994 }
2995
locke-lunargd556cc32019-09-17 01:21:23 -06002996 queryPoolMap[*pQueryPool] = std::move(query_pool_state);
2997
2998 QueryObject query_obj{*pQueryPool, 0u};
2999 for (uint32_t i = 0; i < pCreateInfo->queryCount; ++i) {
3000 query_obj.query = i;
3001 queryToStateMap[query_obj] = QUERYSTATE_UNKNOWN;
3002 }
3003}
3004
3005void ValidationStateTracker::PreCallRecordDestroyCommandPool(VkDevice device, VkCommandPool commandPool,
3006 const VkAllocationCallbacks *pAllocator) {
3007 if (!commandPool) return;
3008 COMMAND_POOL_STATE *cp_state = GetCommandPoolState(commandPool);
3009 // Remove cmdpool from cmdpoolmap, after freeing layer data for the command buffers
3010 // "When a pool is destroyed, all command buffers allocated from the pool are freed."
3011 if (cp_state) {
3012 // Create a vector, as FreeCommandBufferStates deletes from cp_state->commandBuffers during iteration.
3013 std::vector<VkCommandBuffer> cb_vec{cp_state->commandBuffers.begin(), cp_state->commandBuffers.end()};
3014 FreeCommandBufferStates(cp_state, static_cast<uint32_t>(cb_vec.size()), cb_vec.data());
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003015 cp_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06003016 commandPoolMap.erase(commandPool);
3017 }
3018}
3019
3020void ValidationStateTracker::PostCallRecordResetCommandPool(VkDevice device, VkCommandPool commandPool,
3021 VkCommandPoolResetFlags flags, VkResult result) {
3022 if (VK_SUCCESS != result) return;
3023 // Reset all of the CBs allocated from this pool
3024 auto command_pool_state = GetCommandPoolState(commandPool);
3025 for (auto cmdBuffer : command_pool_state->commandBuffers) {
3026 ResetCommandBufferState(cmdBuffer);
3027 }
3028}
3029
3030void ValidationStateTracker::PostCallRecordResetFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences,
3031 VkResult result) {
3032 for (uint32_t i = 0; i < fenceCount; ++i) {
3033 auto pFence = GetFenceState(pFences[i]);
3034 if (pFence) {
3035 if (pFence->scope == kSyncScopeInternal) {
3036 pFence->state = FENCE_UNSIGNALED;
3037 } else if (pFence->scope == kSyncScopeExternalTemporary) {
3038 pFence->scope = kSyncScopeInternal;
3039 }
3040 }
3041 }
3042}
3043
Jeff Bolzadbfa852019-10-04 13:53:30 -05003044// For given cb_nodes, invalidate them and track object causing invalidation.
3045// InvalidateCommandBuffers and InvalidateLinkedCommandBuffers are essentially
3046// the same, except one takes a map and one takes a set, and InvalidateCommandBuffers
3047// can also unlink objects from command buffers.
3048void ValidationStateTracker::InvalidateCommandBuffers(small_unordered_map<CMD_BUFFER_STATE *, int, 8> &cb_nodes,
3049 const VulkanTypedHandle &obj, bool unlink) {
3050 for (const auto &cb_node_pair : cb_nodes) {
3051 auto &cb_node = cb_node_pair.first;
3052 if (cb_node->state == CB_RECORDING) {
3053 cb_node->state = CB_INVALID_INCOMPLETE;
3054 } else if (cb_node->state == CB_RECORDED) {
3055 cb_node->state = CB_INVALID_COMPLETE;
3056 }
3057 cb_node->broken_bindings.push_back(obj);
3058
3059 // if secondary, then propagate the invalidation to the primaries that will call us.
3060 if (cb_node->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
3061 InvalidateLinkedCommandBuffers(cb_node->linkedCommandBuffers, obj);
3062 }
3063 if (unlink) {
3064 int index = cb_node_pair.second;
3065 assert(cb_node->object_bindings[index] == obj);
3066 cb_node->object_bindings[index] = VulkanTypedHandle();
3067 }
3068 }
3069 if (unlink) {
3070 cb_nodes.clear();
3071 }
3072}
3073
3074void ValidationStateTracker::InvalidateLinkedCommandBuffers(std::unordered_set<CMD_BUFFER_STATE *> &cb_nodes,
3075 const VulkanTypedHandle &obj) {
locke-lunargd556cc32019-09-17 01:21:23 -06003076 for (auto cb_node : cb_nodes) {
3077 if (cb_node->state == CB_RECORDING) {
3078 cb_node->state = CB_INVALID_INCOMPLETE;
3079 } else if (cb_node->state == CB_RECORDED) {
3080 cb_node->state = CB_INVALID_COMPLETE;
3081 }
3082 cb_node->broken_bindings.push_back(obj);
3083
3084 // if secondary, then propagate the invalidation to the primaries that will call us.
3085 if (cb_node->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05003086 InvalidateLinkedCommandBuffers(cb_node->linkedCommandBuffers, obj);
locke-lunargd556cc32019-09-17 01:21:23 -06003087 }
3088 }
3089}
3090
3091void ValidationStateTracker::PreCallRecordDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer,
3092 const VkAllocationCallbacks *pAllocator) {
3093 if (!framebuffer) return;
3094 FRAMEBUFFER_STATE *framebuffer_state = GetFramebufferState(framebuffer);
3095 const VulkanTypedHandle obj_struct(framebuffer, kVulkanObjectTypeFramebuffer);
3096 InvalidateCommandBuffers(framebuffer_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003097 framebuffer_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06003098 frameBufferMap.erase(framebuffer);
3099}
3100
3101void ValidationStateTracker::PreCallRecordDestroyRenderPass(VkDevice device, VkRenderPass renderPass,
3102 const VkAllocationCallbacks *pAllocator) {
3103 if (!renderPass) return;
3104 RENDER_PASS_STATE *rp_state = GetRenderPassState(renderPass);
3105 const VulkanTypedHandle obj_struct(renderPass, kVulkanObjectTypeRenderPass);
3106 InvalidateCommandBuffers(rp_state->cb_bindings, obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003107 rp_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06003108 renderPassMap.erase(renderPass);
3109}
3110
3111void ValidationStateTracker::PostCallRecordCreateFence(VkDevice device, const VkFenceCreateInfo *pCreateInfo,
3112 const VkAllocationCallbacks *pAllocator, VkFence *pFence, VkResult result) {
3113 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003114 auto fence_state = std::make_shared<FENCE_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06003115 fence_state->fence = *pFence;
3116 fence_state->createInfo = *pCreateInfo;
3117 fence_state->state = (pCreateInfo->flags & VK_FENCE_CREATE_SIGNALED_BIT) ? FENCE_RETIRED : FENCE_UNSIGNALED;
3118 fenceMap[*pFence] = std::move(fence_state);
3119}
3120
3121bool ValidationStateTracker::PreCallValidateCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
3122 const VkGraphicsPipelineCreateInfo *pCreateInfos,
3123 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
Jeff Bolz5c801d12019-10-09 10:38:45 -05003124 void *cgpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06003125 // Set up the state that CoreChecks, gpu_validation and later StateTracker Record will use.
3126 create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
3127 cgpl_state->pCreateInfos = pCreateInfos; // GPU validation can alter this, so we have to set a default value for the Chassis
3128 cgpl_state->pipe_state.reserve(count);
3129 for (uint32_t i = 0; i < count; i++) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003130 cgpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
Jeff Bolz6ae39612019-10-11 20:57:36 -05003131 (cgpl_state->pipe_state)[i]->initGraphicsPipeline(this, &pCreateInfos[i], GetRenderPassShared(pCreateInfos[i].renderPass));
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003132 (cgpl_state->pipe_state)[i]->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06003133 }
3134 return false;
3135}
3136
3137void ValidationStateTracker::PostCallRecordCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
3138 const VkGraphicsPipelineCreateInfo *pCreateInfos,
3139 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
3140 VkResult result, void *cgpl_state_data) {
3141 create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
3142 // This API may create pipelines regardless of the return value
3143 for (uint32_t i = 0; i < count; i++) {
3144 if (pPipelines[i] != VK_NULL_HANDLE) {
3145 (cgpl_state->pipe_state)[i]->pipeline = pPipelines[i];
3146 pipelineMap[pPipelines[i]] = std::move((cgpl_state->pipe_state)[i]);
3147 }
3148 }
3149 cgpl_state->pipe_state.clear();
3150}
3151
3152bool ValidationStateTracker::PreCallValidateCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
3153 const VkComputePipelineCreateInfo *pCreateInfos,
3154 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
Jeff Bolz5c801d12019-10-09 10:38:45 -05003155 void *ccpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06003156 auto *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
3157 ccpl_state->pCreateInfos = pCreateInfos; // GPU validation can alter this, so we have to set a default value for the Chassis
3158 ccpl_state->pipe_state.reserve(count);
3159 for (uint32_t i = 0; i < count; i++) {
3160 // Create and initialize internal tracking data structure
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003161 ccpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
locke-lunargd556cc32019-09-17 01:21:23 -06003162 ccpl_state->pipe_state.back()->initComputePipeline(this, &pCreateInfos[i]);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003163 ccpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06003164 }
3165 return false;
3166}
3167
3168void ValidationStateTracker::PostCallRecordCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
3169 const VkComputePipelineCreateInfo *pCreateInfos,
3170 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
3171 VkResult result, void *ccpl_state_data) {
3172 create_compute_pipeline_api_state *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
3173
3174 // This API may create pipelines regardless of the return value
3175 for (uint32_t i = 0; i < count; i++) {
3176 if (pPipelines[i] != VK_NULL_HANDLE) {
3177 (ccpl_state->pipe_state)[i]->pipeline = pPipelines[i];
3178 pipelineMap[pPipelines[i]] = std::move((ccpl_state->pipe_state)[i]);
3179 }
3180 }
3181 ccpl_state->pipe_state.clear();
3182}
3183
3184bool ValidationStateTracker::PreCallValidateCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache,
3185 uint32_t count,
3186 const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
3187 const VkAllocationCallbacks *pAllocator,
Jeff Bolz5c801d12019-10-09 10:38:45 -05003188 VkPipeline *pPipelines, void *crtpl_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06003189 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
3190 crtpl_state->pipe_state.reserve(count);
3191 for (uint32_t i = 0; i < count; i++) {
3192 // Create and initialize internal tracking data structure
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003193 crtpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
Jeff Bolz443c2ca2020-03-19 12:11:51 -05003194 crtpl_state->pipe_state.back()->initRayTracingPipeline(this, &pCreateInfos[i]);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003195 crtpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
locke-lunargd556cc32019-09-17 01:21:23 -06003196 }
3197 return false;
3198}
3199
3200void ValidationStateTracker::PostCallRecordCreateRayTracingPipelinesNV(
3201 VkDevice device, VkPipelineCache pipelineCache, uint32_t count, const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
3202 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines, VkResult result, void *crtpl_state_data) {
3203 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
3204 // This API may create pipelines regardless of the return value
3205 for (uint32_t i = 0; i < count; i++) {
3206 if (pPipelines[i] != VK_NULL_HANDLE) {
3207 (crtpl_state->pipe_state)[i]->pipeline = pPipelines[i];
3208 pipelineMap[pPipelines[i]] = std::move((crtpl_state->pipe_state)[i]);
3209 }
3210 }
3211 crtpl_state->pipe_state.clear();
3212}
3213
Jeff Bolz443c2ca2020-03-19 12:11:51 -05003214bool ValidationStateTracker::PreCallValidateCreateRayTracingPipelinesKHR(VkDevice device, VkPipelineCache pipelineCache,
3215 uint32_t count,
3216 const VkRayTracingPipelineCreateInfoKHR *pCreateInfos,
3217 const VkAllocationCallbacks *pAllocator,
3218 VkPipeline *pPipelines, void *crtpl_state_data) const {
3219 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_khr_api_state *>(crtpl_state_data);
3220 crtpl_state->pipe_state.reserve(count);
3221 for (uint32_t i = 0; i < count; i++) {
3222 // Create and initialize internal tracking data structure
3223 crtpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
3224 crtpl_state->pipe_state.back()->initRayTracingPipeline(this, &pCreateInfos[i]);
3225 crtpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
3226 }
3227 return false;
3228}
3229
3230void ValidationStateTracker::PostCallRecordCreateRayTracingPipelinesKHR(
3231 VkDevice device, VkPipelineCache pipelineCache, uint32_t count, const VkRayTracingPipelineCreateInfoKHR *pCreateInfos,
3232 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines, VkResult result, void *crtpl_state_data) {
3233 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_khr_api_state *>(crtpl_state_data);
3234 // This API may create pipelines regardless of the return value
3235 for (uint32_t i = 0; i < count; i++) {
3236 if (pPipelines[i] != VK_NULL_HANDLE) {
3237 (crtpl_state->pipe_state)[i]->pipeline = pPipelines[i];
3238 pipelineMap[pPipelines[i]] = std::move((crtpl_state->pipe_state)[i]);
3239 }
3240 }
3241 crtpl_state->pipe_state.clear();
3242}
3243
locke-lunargd556cc32019-09-17 01:21:23 -06003244void ValidationStateTracker::PostCallRecordCreateSampler(VkDevice device, const VkSamplerCreateInfo *pCreateInfo,
3245 const VkAllocationCallbacks *pAllocator, VkSampler *pSampler,
3246 VkResult result) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003247 samplerMap[*pSampler] = std::make_shared<SAMPLER_STATE>(pSampler, pCreateInfo);
Tony-LunarG7337b312020-04-15 16:40:25 -06003248 if (pCreateInfo->borderColor == VK_BORDER_COLOR_INT_CUSTOM_EXT || pCreateInfo->borderColor == VK_BORDER_COLOR_FLOAT_CUSTOM_EXT)
3249 custom_border_color_sampler_count++;
locke-lunargd556cc32019-09-17 01:21:23 -06003250}
3251
3252void ValidationStateTracker::PostCallRecordCreateDescriptorSetLayout(VkDevice device,
3253 const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
3254 const VkAllocationCallbacks *pAllocator,
3255 VkDescriptorSetLayout *pSetLayout, VkResult result) {
3256 if (VK_SUCCESS != result) return;
3257 descriptorSetLayoutMap[*pSetLayout] = std::make_shared<cvdescriptorset::DescriptorSetLayout>(pCreateInfo, *pSetLayout);
3258}
3259
3260// For repeatable sorting, not very useful for "memory in range" search
3261struct PushConstantRangeCompare {
3262 bool operator()(const VkPushConstantRange *lhs, const VkPushConstantRange *rhs) const {
3263 if (lhs->offset == rhs->offset) {
3264 if (lhs->size == rhs->size) {
3265 // The comparison is arbitrary, but avoids false aliasing by comparing all fields.
3266 return lhs->stageFlags < rhs->stageFlags;
3267 }
3268 // If the offsets are the same then sorting by the end of range is useful for validation
3269 return lhs->size < rhs->size;
3270 }
3271 return lhs->offset < rhs->offset;
3272 }
3273};
3274
3275static PushConstantRangesDict push_constant_ranges_dict;
3276
3277PushConstantRangesId GetCanonicalId(const VkPipelineLayoutCreateInfo *info) {
3278 if (!info->pPushConstantRanges) {
3279 // Hand back the empty entry (creating as needed)...
3280 return push_constant_ranges_dict.look_up(PushConstantRanges());
3281 }
3282
3283 // Sort the input ranges to ensure equivalent ranges map to the same id
3284 std::set<const VkPushConstantRange *, PushConstantRangeCompare> sorted;
3285 for (uint32_t i = 0; i < info->pushConstantRangeCount; i++) {
3286 sorted.insert(info->pPushConstantRanges + i);
3287 }
3288
Jeremy Hayesf34b9fb2019-12-06 09:37:03 -07003289 PushConstantRanges ranges;
3290 ranges.reserve(sorted.size());
locke-lunargd556cc32019-09-17 01:21:23 -06003291 for (const auto range : sorted) {
3292 ranges.emplace_back(*range);
3293 }
3294 return push_constant_ranges_dict.look_up(std::move(ranges));
3295}
3296
3297// Dictionary of canoncial form of the pipeline set layout of descriptor set layouts
3298static PipelineLayoutSetLayoutsDict pipeline_layout_set_layouts_dict;
3299
3300// Dictionary of canonical form of the "compatible for set" records
3301static PipelineLayoutCompatDict pipeline_layout_compat_dict;
3302
3303static PipelineLayoutCompatId GetCanonicalId(const uint32_t set_index, const PushConstantRangesId pcr_id,
3304 const PipelineLayoutSetLayoutsId set_layouts_id) {
3305 return pipeline_layout_compat_dict.look_up(PipelineLayoutCompatDef(set_index, pcr_id, set_layouts_id));
3306}
3307
3308void ValidationStateTracker::PostCallRecordCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo *pCreateInfo,
3309 const VkAllocationCallbacks *pAllocator,
3310 VkPipelineLayout *pPipelineLayout, VkResult result) {
3311 if (VK_SUCCESS != result) return;
3312
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003313 auto pipeline_layout_state = std::make_shared<PIPELINE_LAYOUT_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06003314 pipeline_layout_state->layout = *pPipelineLayout;
3315 pipeline_layout_state->set_layouts.resize(pCreateInfo->setLayoutCount);
3316 PipelineLayoutSetLayoutsDef set_layouts(pCreateInfo->setLayoutCount);
3317 for (uint32_t i = 0; i < pCreateInfo->setLayoutCount; ++i) {
Jeff Bolz6ae39612019-10-11 20:57:36 -05003318 pipeline_layout_state->set_layouts[i] = GetDescriptorSetLayoutShared(pCreateInfo->pSetLayouts[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06003319 set_layouts[i] = pipeline_layout_state->set_layouts[i]->GetLayoutId();
3320 }
3321
3322 // Get canonical form IDs for the "compatible for set" contents
3323 pipeline_layout_state->push_constant_ranges = GetCanonicalId(pCreateInfo);
3324 auto set_layouts_id = pipeline_layout_set_layouts_dict.look_up(set_layouts);
3325 pipeline_layout_state->compat_for_set.reserve(pCreateInfo->setLayoutCount);
3326
3327 // Create table of "compatible for set N" cannonical forms for trivial accept validation
3328 for (uint32_t i = 0; i < pCreateInfo->setLayoutCount; ++i) {
3329 pipeline_layout_state->compat_for_set.emplace_back(
3330 GetCanonicalId(i, pipeline_layout_state->push_constant_ranges, set_layouts_id));
3331 }
3332 pipelineLayoutMap[*pPipelineLayout] = std::move(pipeline_layout_state);
3333}
3334
3335void ValidationStateTracker::PostCallRecordCreateDescriptorPool(VkDevice device, const VkDescriptorPoolCreateInfo *pCreateInfo,
3336 const VkAllocationCallbacks *pAllocator,
3337 VkDescriptorPool *pDescriptorPool, VkResult result) {
3338 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003339 descriptorPoolMap[*pDescriptorPool] = std::make_shared<DESCRIPTOR_POOL_STATE>(*pDescriptorPool, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06003340}
3341
3342void ValidationStateTracker::PostCallRecordResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
3343 VkDescriptorPoolResetFlags flags, VkResult result) {
3344 if (VK_SUCCESS != result) return;
3345 DESCRIPTOR_POOL_STATE *pPool = GetDescriptorPoolState(descriptorPool);
3346 // TODO: validate flags
3347 // For every set off of this pool, clear it, remove from setMap, and free cvdescriptorset::DescriptorSet
3348 for (auto ds : pPool->sets) {
3349 FreeDescriptorSet(ds);
3350 }
3351 pPool->sets.clear();
3352 // Reset available count for each type and available sets for this pool
3353 for (auto it = pPool->availableDescriptorTypeCount.begin(); it != pPool->availableDescriptorTypeCount.end(); ++it) {
3354 pPool->availableDescriptorTypeCount[it->first] = pPool->maxDescriptorTypeCount[it->first];
3355 }
3356 pPool->availableSets = pPool->maxSets;
3357}
3358
3359bool ValidationStateTracker::PreCallValidateAllocateDescriptorSets(VkDevice device,
3360 const VkDescriptorSetAllocateInfo *pAllocateInfo,
Jeff Bolz5c801d12019-10-09 10:38:45 -05003361 VkDescriptorSet *pDescriptorSets, void *ads_state_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06003362 // Always update common data
3363 cvdescriptorset::AllocateDescriptorSetsData *ads_state =
3364 reinterpret_cast<cvdescriptorset::AllocateDescriptorSetsData *>(ads_state_data);
3365 UpdateAllocateDescriptorSetsData(pAllocateInfo, ads_state);
3366
3367 return false;
3368}
3369
3370// Allocation state was good and call down chain was made so update state based on allocating descriptor sets
3371void ValidationStateTracker::PostCallRecordAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo *pAllocateInfo,
3372 VkDescriptorSet *pDescriptorSets, VkResult result,
3373 void *ads_state_data) {
3374 if (VK_SUCCESS != result) return;
3375 // All the updates are contained in a single cvdescriptorset function
3376 cvdescriptorset::AllocateDescriptorSetsData *ads_state =
3377 reinterpret_cast<cvdescriptorset::AllocateDescriptorSetsData *>(ads_state_data);
3378 PerformAllocateDescriptorSets(pAllocateInfo, pDescriptorSets, ads_state);
3379}
3380
3381void ValidationStateTracker::PreCallRecordFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t count,
3382 const VkDescriptorSet *pDescriptorSets) {
3383 DESCRIPTOR_POOL_STATE *pool_state = GetDescriptorPoolState(descriptorPool);
3384 // Update available descriptor sets in pool
3385 pool_state->availableSets += count;
3386
3387 // For each freed descriptor add its resources back into the pool as available and remove from pool and setMap
3388 for (uint32_t i = 0; i < count; ++i) {
3389 if (pDescriptorSets[i] != VK_NULL_HANDLE) {
3390 auto descriptor_set = setMap[pDescriptorSets[i]].get();
3391 uint32_t type_index = 0, descriptor_count = 0;
3392 for (uint32_t j = 0; j < descriptor_set->GetBindingCount(); ++j) {
3393 type_index = static_cast<uint32_t>(descriptor_set->GetTypeFromIndex(j));
3394 descriptor_count = descriptor_set->GetDescriptorCountFromIndex(j);
3395 pool_state->availableDescriptorTypeCount[type_index] += descriptor_count;
3396 }
3397 FreeDescriptorSet(descriptor_set);
3398 pool_state->sets.erase(descriptor_set);
3399 }
3400 }
3401}
3402
3403void ValidationStateTracker::PreCallRecordUpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount,
3404 const VkWriteDescriptorSet *pDescriptorWrites,
3405 uint32_t descriptorCopyCount,
3406 const VkCopyDescriptorSet *pDescriptorCopies) {
3407 cvdescriptorset::PerformUpdateDescriptorSets(this, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount,
3408 pDescriptorCopies);
3409}
3410
3411void ValidationStateTracker::PostCallRecordAllocateCommandBuffers(VkDevice device, const VkCommandBufferAllocateInfo *pCreateInfo,
3412 VkCommandBuffer *pCommandBuffer, VkResult result) {
3413 if (VK_SUCCESS != result) return;
Jeff Bolz6835fda2019-10-06 00:15:34 -05003414 auto pPool = GetCommandPoolShared(pCreateInfo->commandPool);
locke-lunargd556cc32019-09-17 01:21:23 -06003415 if (pPool) {
3416 for (uint32_t i = 0; i < pCreateInfo->commandBufferCount; i++) {
3417 // Add command buffer to its commandPool map
3418 pPool->commandBuffers.insert(pCommandBuffer[i]);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003419 auto pCB = std::make_shared<CMD_BUFFER_STATE>();
locke-lunargd556cc32019-09-17 01:21:23 -06003420 pCB->createInfo = *pCreateInfo;
Jeff Bolz6835fda2019-10-06 00:15:34 -05003421 pCB->command_pool = pPool;
sfricke-samsung0c45edc2020-07-01 22:19:53 -07003422 pCB->unprotected = pPool->unprotected;
locke-lunargd556cc32019-09-17 01:21:23 -06003423 // Add command buffer to map
3424 commandBufferMap[pCommandBuffer[i]] = std::move(pCB);
3425 ResetCommandBufferState(pCommandBuffer[i]);
3426 }
3427 }
3428}
3429
3430// Add bindings between the given cmd buffer & framebuffer and the framebuffer's children
3431void ValidationStateTracker::AddFramebufferBinding(CMD_BUFFER_STATE *cb_state, FRAMEBUFFER_STATE *fb_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05003432 AddCommandBufferBinding(fb_state->cb_bindings, VulkanTypedHandle(fb_state->framebuffer, kVulkanObjectTypeFramebuffer, fb_state),
locke-lunargd556cc32019-09-17 01:21:23 -06003433 cb_state);
Mark Lobodzinski544b3dd2019-12-03 14:44:54 -07003434 // If imageless fb, skip fb binding
3435 if (fb_state->createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003436 const uint32_t attachmentCount = fb_state->createInfo.attachmentCount;
3437 for (uint32_t attachment = 0; attachment < attachmentCount; ++attachment) {
Lionel Landwerlin484d10f2020-04-24 01:34:47 +03003438 auto view_state = GetAttachmentImageViewState(cb_state, fb_state, attachment);
locke-lunargd556cc32019-09-17 01:21:23 -06003439 if (view_state) {
3440 AddCommandBufferBindingImageView(cb_state, view_state);
3441 }
3442 }
3443}
3444
3445void ValidationStateTracker::PreCallRecordBeginCommandBuffer(VkCommandBuffer commandBuffer,
3446 const VkCommandBufferBeginInfo *pBeginInfo) {
3447 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3448 if (!cb_state) return;
locke-lunargd556cc32019-09-17 01:21:23 -06003449 if (cb_state->createInfo.level != VK_COMMAND_BUFFER_LEVEL_PRIMARY) {
3450 // Secondary Command Buffer
3451 const VkCommandBufferInheritanceInfo *pInfo = pBeginInfo->pInheritanceInfo;
3452 if (pInfo) {
3453 if (pBeginInfo->flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT) {
3454 assert(pInfo->renderPass);
3455 auto framebuffer = GetFramebufferState(pInfo->framebuffer);
3456 if (framebuffer) {
3457 // Connect this framebuffer and its children to this cmdBuffer
3458 AddFramebufferBinding(cb_state, framebuffer);
3459 }
3460 }
3461 }
3462 }
3463 if (CB_RECORDED == cb_state->state || CB_INVALID_COMPLETE == cb_state->state) {
3464 ResetCommandBufferState(commandBuffer);
3465 }
3466 // Set updated state here in case implicit reset occurs above
3467 cb_state->state = CB_RECORDING;
3468 cb_state->beginInfo = *pBeginInfo;
3469 if (cb_state->beginInfo.pInheritanceInfo) {
3470 cb_state->inheritanceInfo = *(cb_state->beginInfo.pInheritanceInfo);
3471 cb_state->beginInfo.pInheritanceInfo = &cb_state->inheritanceInfo;
3472 // If we are a secondary command-buffer and inheriting. Update the items we should inherit.
3473 if ((cb_state->createInfo.level != VK_COMMAND_BUFFER_LEVEL_PRIMARY) &&
3474 (cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT)) {
locke-lunargaecf2152020-05-12 17:15:41 -06003475 cb_state->activeRenderPass = GetShared<RENDER_PASS_STATE>(cb_state->beginInfo.pInheritanceInfo->renderPass);
locke-lunargd556cc32019-09-17 01:21:23 -06003476 cb_state->activeSubpass = cb_state->beginInfo.pInheritanceInfo->subpass;
locke-lunargaecf2152020-05-12 17:15:41 -06003477 if (cb_state->beginInfo.pInheritanceInfo->framebuffer) {
3478 cb_state->activeFramebuffer = GetShared<FRAMEBUFFER_STATE>(cb_state->beginInfo.pInheritanceInfo->framebuffer);
3479 if (cb_state->activeFramebuffer) cb_state->framebuffers.insert(cb_state->activeFramebuffer);
3480 }
locke-lunargd556cc32019-09-17 01:21:23 -06003481 }
3482 }
3483
3484 auto chained_device_group_struct = lvl_find_in_chain<VkDeviceGroupCommandBufferBeginInfo>(pBeginInfo->pNext);
3485 if (chained_device_group_struct) {
3486 cb_state->initial_device_mask = chained_device_group_struct->deviceMask;
3487 } else {
3488 cb_state->initial_device_mask = (1 << physical_device_count) - 1;
3489 }
Lionel Landwerlinc7420912019-05-23 00:33:42 +01003490
3491 cb_state->performance_lock_acquired = performance_lock_acquired;
locke-lunargd556cc32019-09-17 01:21:23 -06003492}
3493
3494void ValidationStateTracker::PostCallRecordEndCommandBuffer(VkCommandBuffer commandBuffer, VkResult result) {
3495 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3496 if (!cb_state) return;
3497 // Cached validation is specific to a specific recording of a specific command buffer.
3498 for (auto descriptor_set : cb_state->validated_descriptor_sets) {
3499 descriptor_set->ClearCachedValidation(cb_state);
3500 }
3501 cb_state->validated_descriptor_sets.clear();
3502 if (VK_SUCCESS == result) {
3503 cb_state->state = CB_RECORDED;
3504 }
3505}
3506
3507void ValidationStateTracker::PostCallRecordResetCommandBuffer(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags,
3508 VkResult result) {
3509 if (VK_SUCCESS == result) {
3510 ResetCommandBufferState(commandBuffer);
3511 }
3512}
3513
3514CBStatusFlags MakeStaticStateMask(VkPipelineDynamicStateCreateInfo const *ds) {
3515 // initially assume everything is static state
3516 CBStatusFlags flags = CBSTATUS_ALL_STATE_SET;
3517
3518 if (ds) {
3519 for (uint32_t i = 0; i < ds->dynamicStateCount; i++) {
locke-lunarg4189aa22020-10-21 00:23:48 -06003520 flags &= ~ConvertToCBStatusFlagBits(ds->pDynamicStates[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06003521 }
3522 }
locke-lunargd556cc32019-09-17 01:21:23 -06003523 return flags;
3524}
3525
3526// Validation cache:
3527// CV is the bottommost implementor of this extension. Don't pass calls down.
3528// utility function to set collective state for pipeline
3529void SetPipelineState(PIPELINE_STATE *pPipe) {
3530 // If any attachment used by this pipeline has blendEnable, set top-level blendEnable
3531 if (pPipe->graphicsPipelineCI.pColorBlendState) {
3532 for (size_t i = 0; i < pPipe->attachments.size(); ++i) {
3533 if (VK_TRUE == pPipe->attachments[i].blendEnable) {
3534 if (((pPipe->attachments[i].dstAlphaBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
3535 (pPipe->attachments[i].dstAlphaBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
3536 ((pPipe->attachments[i].dstColorBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
3537 (pPipe->attachments[i].dstColorBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
3538 ((pPipe->attachments[i].srcAlphaBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
3539 (pPipe->attachments[i].srcAlphaBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
3540 ((pPipe->attachments[i].srcColorBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
3541 (pPipe->attachments[i].srcColorBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA))) {
3542 pPipe->blendConstantsEnabled = true;
3543 }
3544 }
3545 }
3546 }
sfricke-samsung8f658d42020-05-03 20:12:24 -07003547 // Check if sample location is enabled
3548 if (pPipe->graphicsPipelineCI.pMultisampleState) {
3549 const VkPipelineSampleLocationsStateCreateInfoEXT *sample_location_state =
3550 lvl_find_in_chain<VkPipelineSampleLocationsStateCreateInfoEXT>(pPipe->graphicsPipelineCI.pMultisampleState->pNext);
3551 if (sample_location_state != nullptr) {
3552 pPipe->sample_location_enabled = sample_location_state->sampleLocationsEnable;
3553 }
3554 }
locke-lunargd556cc32019-09-17 01:21:23 -06003555}
3556
3557void ValidationStateTracker::PreCallRecordCmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
3558 VkPipeline pipeline) {
3559 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3560 assert(cb_state);
3561
3562 auto pipe_state = GetPipelineState(pipeline);
3563 if (VK_PIPELINE_BIND_POINT_GRAPHICS == pipelineBindPoint) {
3564 cb_state->status &= ~cb_state->static_status;
3565 cb_state->static_status = MakeStaticStateMask(pipe_state->graphicsPipelineCI.ptr()->pDynamicState);
3566 cb_state->status |= cb_state->static_status;
locke-lunarg4189aa22020-10-21 00:23:48 -06003567 cb_state->dynamic_status = CBSTATUS_ALL_STATE_SET & (~cb_state->static_status);
locke-lunargd556cc32019-09-17 01:21:23 -06003568 }
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003569 ResetCommandBufferPushConstantDataIfIncompatible(cb_state, pipe_state->pipeline_layout->layout);
locke-lunargd556cc32019-09-17 01:21:23 -06003570 cb_state->lastBound[pipelineBindPoint].pipeline_state = pipe_state;
3571 SetPipelineState(pipe_state);
Jeff Bolzadbfa852019-10-04 13:53:30 -05003572 AddCommandBufferBinding(pipe_state->cb_bindings, VulkanTypedHandle(pipeline, kVulkanObjectTypePipeline), cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06003573}
3574
3575void ValidationStateTracker::PreCallRecordCmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport,
3576 uint32_t viewportCount, const VkViewport *pViewports) {
3577 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3578 cb_state->viewportMask |= ((1u << viewportCount) - 1u) << firstViewport;
3579 cb_state->status |= CBSTATUS_VIEWPORT_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003580 cb_state->static_status &= ~CBSTATUS_VIEWPORT_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003581}
3582
3583void ValidationStateTracker::PreCallRecordCmdSetExclusiveScissorNV(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor,
3584 uint32_t exclusiveScissorCount,
3585 const VkRect2D *pExclusiveScissors) {
3586 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3587 // TODO: We don't have VUIDs for validating that all exclusive scissors have been set.
3588 // cb_state->exclusiveScissorMask |= ((1u << exclusiveScissorCount) - 1u) << firstExclusiveScissor;
3589 cb_state->status |= CBSTATUS_EXCLUSIVE_SCISSOR_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003590 cb_state->static_status &= ~CBSTATUS_EXCLUSIVE_SCISSOR_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003591}
3592
3593void ValidationStateTracker::PreCallRecordCmdBindShadingRateImageNV(VkCommandBuffer commandBuffer, VkImageView imageView,
3594 VkImageLayout imageLayout) {
3595 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3596
3597 if (imageView != VK_NULL_HANDLE) {
3598 auto view_state = GetImageViewState(imageView);
3599 AddCommandBufferBindingImageView(cb_state, view_state);
3600 }
3601}
3602
3603void ValidationStateTracker::PreCallRecordCmdSetViewportShadingRatePaletteNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
3604 uint32_t viewportCount,
3605 const VkShadingRatePaletteNV *pShadingRatePalettes) {
3606 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3607 // TODO: We don't have VUIDs for validating that all shading rate palettes have been set.
3608 // cb_state->shadingRatePaletteMask |= ((1u << viewportCount) - 1u) << firstViewport;
3609 cb_state->status |= CBSTATUS_SHADING_RATE_PALETTE_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003610 cb_state->static_status &= ~CBSTATUS_SHADING_RATE_PALETTE_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003611}
3612
3613void ValidationStateTracker::PostCallRecordCreateAccelerationStructureNV(VkDevice device,
3614 const VkAccelerationStructureCreateInfoNV *pCreateInfo,
3615 const VkAllocationCallbacks *pAllocator,
3616 VkAccelerationStructureNV *pAccelerationStructure,
3617 VkResult result) {
3618 if (VK_SUCCESS != result) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003619 auto as_state = std::make_shared<ACCELERATION_STRUCTURE_STATE>(*pAccelerationStructure, pCreateInfo);
locke-lunargd556cc32019-09-17 01:21:23 -06003620
3621 // Query the requirements in case the application doesn't (to avoid bind/validation time query)
3622 VkAccelerationStructureMemoryRequirementsInfoNV as_memory_requirements_info = {};
3623 as_memory_requirements_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
3624 as_memory_requirements_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV;
3625 as_memory_requirements_info.accelerationStructure = as_state->acceleration_structure;
3626 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &as_memory_requirements_info, &as_state->memory_requirements);
3627
3628 VkAccelerationStructureMemoryRequirementsInfoNV scratch_memory_req_info = {};
3629 scratch_memory_req_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
3630 scratch_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV;
3631 scratch_memory_req_info.accelerationStructure = as_state->acceleration_structure;
3632 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &scratch_memory_req_info,
3633 &as_state->build_scratch_memory_requirements);
3634
3635 VkAccelerationStructureMemoryRequirementsInfoNV update_memory_req_info = {};
3636 update_memory_req_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
3637 update_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV;
3638 update_memory_req_info.accelerationStructure = as_state->acceleration_structure;
3639 DispatchGetAccelerationStructureMemoryRequirementsNV(device, &update_memory_req_info,
3640 &as_state->update_scratch_memory_requirements);
Mark Lobodzinski17dc4602020-05-29 07:48:40 -06003641 as_state->allocator = pAllocator;
locke-lunargd556cc32019-09-17 01:21:23 -06003642 accelerationStructureMap[*pAccelerationStructure] = std::move(as_state);
3643}
3644
Jeff Bolz95176d02020-04-01 00:36:16 -05003645void ValidationStateTracker::PostCallRecordCreateAccelerationStructureKHR(VkDevice device,
3646 const VkAccelerationStructureCreateInfoKHR *pCreateInfo,
3647 const VkAllocationCallbacks *pAllocator,
3648 VkAccelerationStructureKHR *pAccelerationStructure,
3649 VkResult result) {
3650 if (VK_SUCCESS != result) return;
3651 auto as_state = std::make_shared<ACCELERATION_STRUCTURE_STATE>(*pAccelerationStructure, pCreateInfo);
3652
3653 // Query the requirements in case the application doesn't (to avoid bind/validation time query)
3654 VkAccelerationStructureMemoryRequirementsInfoKHR as_memory_requirements_info = {};
3655 as_memory_requirements_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_KHR;
3656 as_memory_requirements_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_KHR;
3657 as_memory_requirements_info.buildType = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR;
3658 as_memory_requirements_info.accelerationStructure = as_state->acceleration_structure;
3659 DispatchGetAccelerationStructureMemoryRequirementsKHR(device, &as_memory_requirements_info, &as_state->memory_requirements);
3660
3661 VkAccelerationStructureMemoryRequirementsInfoKHR scratch_memory_req_info = {};
3662 scratch_memory_req_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_KHR;
3663 scratch_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_KHR;
3664 scratch_memory_req_info.buildType = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR;
3665 scratch_memory_req_info.accelerationStructure = as_state->acceleration_structure;
3666 DispatchGetAccelerationStructureMemoryRequirementsKHR(device, &scratch_memory_req_info,
3667 &as_state->build_scratch_memory_requirements);
3668
3669 VkAccelerationStructureMemoryRequirementsInfoKHR update_memory_req_info = {};
3670 update_memory_req_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_KHR;
3671 update_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_KHR;
3672 update_memory_req_info.buildType = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR;
3673 update_memory_req_info.accelerationStructure = as_state->acceleration_structure;
3674 DispatchGetAccelerationStructureMemoryRequirementsKHR(device, &update_memory_req_info,
3675 &as_state->update_scratch_memory_requirements);
Mark Lobodzinski17dc4602020-05-29 07:48:40 -06003676 as_state->allocator = pAllocator;
Jeff Bolz95176d02020-04-01 00:36:16 -05003677 accelerationStructureMap[*pAccelerationStructure] = std::move(as_state);
3678}
3679
locke-lunargd556cc32019-09-17 01:21:23 -06003680void ValidationStateTracker::PostCallRecordGetAccelerationStructureMemoryRequirementsNV(
3681 VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV *pInfo, VkMemoryRequirements2KHR *pMemoryRequirements) {
3682 ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureState(pInfo->accelerationStructure);
3683 if (as_state != nullptr) {
3684 if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV) {
3685 as_state->memory_requirements = *pMemoryRequirements;
3686 as_state->memory_requirements_checked = true;
3687 } else if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV) {
3688 as_state->build_scratch_memory_requirements = *pMemoryRequirements;
3689 as_state->build_scratch_memory_requirements_checked = true;
3690 } else if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV) {
3691 as_state->update_scratch_memory_requirements = *pMemoryRequirements;
3692 as_state->update_scratch_memory_requirements_checked = true;
3693 }
3694 }
3695}
3696
Jeff Bolz95176d02020-04-01 00:36:16 -05003697void ValidationStateTracker::PostCallRecordBindAccelerationStructureMemoryCommon(
3698 VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoKHR *pBindInfos, VkResult result,
3699 bool isNV) {
locke-lunargd556cc32019-09-17 01:21:23 -06003700 if (VK_SUCCESS != result) return;
3701 for (uint32_t i = 0; i < bindInfoCount; i++) {
Jeff Bolz95176d02020-04-01 00:36:16 -05003702 const VkBindAccelerationStructureMemoryInfoKHR &info = pBindInfos[i];
locke-lunargd556cc32019-09-17 01:21:23 -06003703
3704 ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureState(info.accelerationStructure);
3705 if (as_state) {
3706 // Track bound memory range information
3707 auto mem_info = GetDevMemState(info.memory);
3708 if (mem_info) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07003709 InsertAccelerationStructureMemoryRange(info.accelerationStructure, mem_info, info.memoryOffset);
locke-lunargd556cc32019-09-17 01:21:23 -06003710 }
3711 // Track objects tied to memory
3712 SetMemBinding(info.memory, as_state, info.memoryOffset,
Jeff Bolz95176d02020-04-01 00:36:16 -05003713 VulkanTypedHandle(info.accelerationStructure, kVulkanObjectTypeAccelerationStructureKHR));
locke-lunargd556cc32019-09-17 01:21:23 -06003714
3715 // GPU validation of top level acceleration structure building needs acceleration structure handles.
Jeff Bolz95176d02020-04-01 00:36:16 -05003716 // XXX TODO: Query device address for KHR extension
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06003717 if (enabled[gpu_validation] && isNV) {
locke-lunargd556cc32019-09-17 01:21:23 -06003718 DispatchGetAccelerationStructureHandleNV(device, info.accelerationStructure, 8, &as_state->opaque_handle);
3719 }
3720 }
3721 }
3722}
3723
Jeff Bolz95176d02020-04-01 00:36:16 -05003724void ValidationStateTracker::PostCallRecordBindAccelerationStructureMemoryNV(
3725 VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV *pBindInfos, VkResult result) {
3726 PostCallRecordBindAccelerationStructureMemoryCommon(device, bindInfoCount, pBindInfos, result, true);
3727}
3728
3729void ValidationStateTracker::PostCallRecordBindAccelerationStructureMemoryKHR(
3730 VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoKHR *pBindInfos, VkResult result) {
3731 PostCallRecordBindAccelerationStructureMemoryCommon(device, bindInfoCount, pBindInfos, result, false);
3732}
3733
locke-lunargd556cc32019-09-17 01:21:23 -06003734void ValidationStateTracker::PostCallRecordCmdBuildAccelerationStructureNV(
3735 VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV *pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset,
3736 VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset) {
3737 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3738 if (cb_state == nullptr) {
3739 return;
3740 }
3741
3742 ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureState(dst);
3743 ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureState(src);
3744 if (dst_as_state != nullptr) {
3745 dst_as_state->built = true;
3746 dst_as_state->build_info.initialize(pInfo);
3747 AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
3748 }
3749 if (src_as_state != nullptr) {
3750 AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
3751 }
3752 cb_state->hasBuildAccelerationStructureCmd = true;
3753}
3754
3755void ValidationStateTracker::PostCallRecordCmdCopyAccelerationStructureNV(VkCommandBuffer commandBuffer,
3756 VkAccelerationStructureNV dst,
3757 VkAccelerationStructureNV src,
3758 VkCopyAccelerationStructureModeNV mode) {
3759 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3760 if (cb_state) {
3761 ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureState(src);
3762 ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureState(dst);
3763 if (dst_as_state != nullptr && src_as_state != nullptr) {
3764 dst_as_state->built = true;
3765 dst_as_state->build_info = src_as_state->build_info;
3766 AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
3767 AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
3768 }
3769 }
3770}
3771
Jeff Bolz95176d02020-04-01 00:36:16 -05003772void ValidationStateTracker::PreCallRecordDestroyAccelerationStructureKHR(VkDevice device,
3773 VkAccelerationStructureKHR accelerationStructure,
3774 const VkAllocationCallbacks *pAllocator) {
locke-lunargd556cc32019-09-17 01:21:23 -06003775 if (!accelerationStructure) return;
3776 auto *as_state = GetAccelerationStructureState(accelerationStructure);
3777 if (as_state) {
Jeff Bolz95176d02020-04-01 00:36:16 -05003778 const VulkanTypedHandle obj_struct(accelerationStructure, kVulkanObjectTypeAccelerationStructureKHR);
locke-lunargd556cc32019-09-17 01:21:23 -06003779 InvalidateCommandBuffers(as_state->cb_bindings, obj_struct);
3780 for (auto mem_binding : as_state->GetBoundMemory()) {
locke-lunargcf04d582019-11-26 00:31:50 -07003781 RemoveAccelerationStructureMemoryRange(accelerationStructure, mem_binding);
locke-lunargd556cc32019-09-17 01:21:23 -06003782 }
3783 ClearMemoryObjectBindings(obj_struct);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05003784 as_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06003785 accelerationStructureMap.erase(accelerationStructure);
3786 }
3787}
3788
Jeff Bolz95176d02020-04-01 00:36:16 -05003789void ValidationStateTracker::PreCallRecordDestroyAccelerationStructureNV(VkDevice device,
3790 VkAccelerationStructureNV accelerationStructure,
3791 const VkAllocationCallbacks *pAllocator) {
3792 PreCallRecordDestroyAccelerationStructureKHR(device, accelerationStructure, pAllocator);
3793}
3794
Chris Mayer9ded5eb2019-09-19 16:33:26 +02003795void ValidationStateTracker::PreCallRecordCmdSetViewportWScalingNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
3796 uint32_t viewportCount,
3797 const VkViewportWScalingNV *pViewportWScalings) {
3798 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3799 cb_state->status |= CBSTATUS_VIEWPORT_W_SCALING_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003800 cb_state->static_status &= ~CBSTATUS_VIEWPORT_W_SCALING_SET;
Chris Mayer9ded5eb2019-09-19 16:33:26 +02003801}
3802
locke-lunargd556cc32019-09-17 01:21:23 -06003803void ValidationStateTracker::PreCallRecordCmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth) {
3804 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3805 cb_state->status |= CBSTATUS_LINE_WIDTH_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003806 cb_state->static_status &= ~CBSTATUS_LINE_WIDTH_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003807}
3808
3809void ValidationStateTracker::PreCallRecordCmdSetLineStippleEXT(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor,
3810 uint16_t lineStipplePattern) {
3811 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3812 cb_state->status |= CBSTATUS_LINE_STIPPLE_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003813 cb_state->static_status &= ~CBSTATUS_LINE_STIPPLE_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003814}
3815
3816void ValidationStateTracker::PreCallRecordCmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor,
3817 float depthBiasClamp, float depthBiasSlopeFactor) {
3818 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3819 cb_state->status |= CBSTATUS_DEPTH_BIAS_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003820 cb_state->static_status &= ~CBSTATUS_DEPTH_BIAS_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003821}
3822
Lionel Landwerlinc7420912019-05-23 00:33:42 +01003823void ValidationStateTracker::PreCallRecordCmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount,
3824 const VkRect2D *pScissors) {
3825 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3826 cb_state->scissorMask |= ((1u << scissorCount) - 1u) << firstScissor;
3827 cb_state->status |= CBSTATUS_SCISSOR_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003828 cb_state->static_status &= ~CBSTATUS_SCISSOR_SET;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01003829}
3830
locke-lunargd556cc32019-09-17 01:21:23 -06003831void ValidationStateTracker::PreCallRecordCmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4]) {
3832 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3833 cb_state->status |= CBSTATUS_BLEND_CONSTANTS_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003834 cb_state->static_status &= ~CBSTATUS_BLEND_CONSTANTS_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003835}
3836
3837void ValidationStateTracker::PreCallRecordCmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds,
3838 float maxDepthBounds) {
3839 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3840 cb_state->status |= CBSTATUS_DEPTH_BOUNDS_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003841 cb_state->static_status &= ~CBSTATUS_DEPTH_BOUNDS_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003842}
3843
3844void ValidationStateTracker::PreCallRecordCmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
3845 uint32_t compareMask) {
3846 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3847 cb_state->status |= CBSTATUS_STENCIL_READ_MASK_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003848 cb_state->static_status &= ~CBSTATUS_STENCIL_READ_MASK_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003849}
3850
3851void ValidationStateTracker::PreCallRecordCmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
3852 uint32_t writeMask) {
3853 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3854 cb_state->status |= CBSTATUS_STENCIL_WRITE_MASK_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003855 cb_state->static_status &= ~CBSTATUS_STENCIL_WRITE_MASK_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003856}
3857
3858void ValidationStateTracker::PreCallRecordCmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
3859 uint32_t reference) {
3860 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3861 cb_state->status |= CBSTATUS_STENCIL_REFERENCE_SET;
Piers Daniell39842ee2020-07-10 16:42:33 -06003862 cb_state->static_status &= ~CBSTATUS_STENCIL_REFERENCE_SET;
locke-lunargd556cc32019-09-17 01:21:23 -06003863}
3864
3865// Update pipeline_layout bind points applying the "Pipeline Layout Compatibility" rules.
3866// One of pDescriptorSets or push_descriptor_set should be nullptr, indicating whether this
3867// is called for CmdBindDescriptorSets or CmdPushDescriptorSet.
3868void ValidationStateTracker::UpdateLastBoundDescriptorSets(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint pipeline_bind_point,
3869 const PIPELINE_LAYOUT_STATE *pipeline_layout, uint32_t first_set,
3870 uint32_t set_count, const VkDescriptorSet *pDescriptorSets,
3871 cvdescriptorset::DescriptorSet *push_descriptor_set,
3872 uint32_t dynamic_offset_count, const uint32_t *p_dynamic_offsets) {
3873 assert((pDescriptorSets == nullptr) ^ (push_descriptor_set == nullptr));
3874 // Defensive
3875 assert(pipeline_layout);
3876 if (!pipeline_layout) return;
3877
3878 uint32_t required_size = first_set + set_count;
3879 const uint32_t last_binding_index = required_size - 1;
3880 assert(last_binding_index < pipeline_layout->compat_for_set.size());
3881
3882 // Some useful shorthand
3883 auto &last_bound = cb_state->lastBound[pipeline_bind_point];
3884 auto &pipe_compat_ids = pipeline_layout->compat_for_set;
3885 const uint32_t current_size = static_cast<uint32_t>(last_bound.per_set.size());
3886
3887 // We need this three times in this function, but nowhere else
3888 auto push_descriptor_cleanup = [&last_bound](const cvdescriptorset::DescriptorSet *ds) -> bool {
3889 if (ds && ds->IsPushDescriptor()) {
3890 assert(ds == last_bound.push_descriptor_set.get());
3891 last_bound.push_descriptor_set = nullptr;
3892 return true;
3893 }
3894 return false;
3895 };
3896
3897 // Clean up the "disturbed" before and after the range to be set
3898 if (required_size < current_size) {
3899 if (last_bound.per_set[last_binding_index].compat_id_for_set != pipe_compat_ids[last_binding_index]) {
3900 // We're disturbing those after last, we'll shrink below, but first need to check for and cleanup the push_descriptor
3901 for (auto set_idx = required_size; set_idx < current_size; ++set_idx) {
3902 if (push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set)) break;
3903 }
3904 } else {
3905 // We're not disturbing past last, so leave the upper binding data alone.
3906 required_size = current_size;
3907 }
3908 }
3909
3910 // We resize if we need more set entries or if those past "last" are disturbed
3911 if (required_size != current_size) {
3912 last_bound.per_set.resize(required_size);
3913 }
3914
3915 // For any previously bound sets, need to set them to "invalid" if they were disturbed by this update
3916 for (uint32_t set_idx = 0; set_idx < first_set; ++set_idx) {
3917 if (last_bound.per_set[set_idx].compat_id_for_set != pipe_compat_ids[set_idx]) {
3918 push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set);
3919 last_bound.per_set[set_idx].bound_descriptor_set = nullptr;
3920 last_bound.per_set[set_idx].dynamicOffsets.clear();
3921 last_bound.per_set[set_idx].compat_id_for_set = pipe_compat_ids[set_idx];
3922 }
3923 }
3924
3925 // Now update the bound sets with the input sets
3926 const uint32_t *input_dynamic_offsets = p_dynamic_offsets; // "read" pointer for dynamic offset data
3927 for (uint32_t input_idx = 0; input_idx < set_count; input_idx++) {
3928 auto set_idx = input_idx + first_set; // set_idx is index within layout, input_idx is index within input descriptor sets
3929 cvdescriptorset::DescriptorSet *descriptor_set =
3930 push_descriptor_set ? push_descriptor_set : GetSetNode(pDescriptorSets[input_idx]);
3931
3932 // Record binding (or push)
3933 if (descriptor_set != last_bound.push_descriptor_set.get()) {
3934 // Only cleanup the push descriptors if they aren't the currently used set.
3935 push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set);
3936 }
3937 last_bound.per_set[set_idx].bound_descriptor_set = descriptor_set;
3938 last_bound.per_set[set_idx].compat_id_for_set = pipe_compat_ids[set_idx]; // compat ids are canonical *per* set index
3939
3940 if (descriptor_set) {
3941 auto set_dynamic_descriptor_count = descriptor_set->GetDynamicDescriptorCount();
3942 // TODO: Add logic for tracking push_descriptor offsets (here or in caller)
3943 if (set_dynamic_descriptor_count && input_dynamic_offsets) {
3944 const uint32_t *end_offset = input_dynamic_offsets + set_dynamic_descriptor_count;
3945 last_bound.per_set[set_idx].dynamicOffsets = std::vector<uint32_t>(input_dynamic_offsets, end_offset);
3946 input_dynamic_offsets = end_offset;
3947 assert(input_dynamic_offsets <= (p_dynamic_offsets + dynamic_offset_count));
3948 } else {
3949 last_bound.per_set[set_idx].dynamicOffsets.clear();
3950 }
3951 if (!descriptor_set->IsPushDescriptor()) {
3952 // Can't cache validation of push_descriptors
3953 cb_state->validated_descriptor_sets.insert(descriptor_set);
3954 }
3955 }
3956 }
3957}
3958
3959// Update the bound state for the bind point, including the effects of incompatible pipeline layouts
3960void ValidationStateTracker::PreCallRecordCmdBindDescriptorSets(VkCommandBuffer commandBuffer,
3961 VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
3962 uint32_t firstSet, uint32_t setCount,
3963 const VkDescriptorSet *pDescriptorSets, uint32_t dynamicOffsetCount,
3964 const uint32_t *pDynamicOffsets) {
3965 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
3966 auto pipeline_layout = GetPipelineLayout(layout);
3967
3968 // Resize binding arrays
3969 uint32_t last_set_index = firstSet + setCount - 1;
3970 if (last_set_index >= cb_state->lastBound[pipelineBindPoint].per_set.size()) {
3971 cb_state->lastBound[pipelineBindPoint].per_set.resize(last_set_index + 1);
3972 }
3973
3974 UpdateLastBoundDescriptorSets(cb_state, pipelineBindPoint, pipeline_layout, firstSet, setCount, pDescriptorSets, nullptr,
3975 dynamicOffsetCount, pDynamicOffsets);
3976 cb_state->lastBound[pipelineBindPoint].pipeline_layout = layout;
3977 ResetCommandBufferPushConstantDataIfIncompatible(cb_state, layout);
3978}
3979
3980void ValidationStateTracker::RecordCmdPushDescriptorSetState(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint pipelineBindPoint,
3981 VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount,
3982 const VkWriteDescriptorSet *pDescriptorWrites) {
3983 const auto &pipeline_layout = GetPipelineLayout(layout);
3984 // Short circuit invalid updates
3985 if (!pipeline_layout || (set >= pipeline_layout->set_layouts.size()) || !pipeline_layout->set_layouts[set] ||
3986 !pipeline_layout->set_layouts[set]->IsPushDescriptor())
3987 return;
3988
3989 // We need a descriptor set to update the bindings with, compatible with the passed layout
3990 const auto dsl = pipeline_layout->set_layouts[set];
3991 auto &last_bound = cb_state->lastBound[pipelineBindPoint];
3992 auto &push_descriptor_set = last_bound.push_descriptor_set;
3993 // If we are disturbing the current push_desriptor_set clear it
3994 if (!push_descriptor_set || !CompatForSet(set, last_bound, pipeline_layout->compat_for_set)) {
John Zulaufd2c3dae2019-12-12 11:02:17 -07003995 last_bound.UnbindAndResetPushDescriptorSet(new cvdescriptorset::DescriptorSet(0, nullptr, dsl, 0, this));
locke-lunargd556cc32019-09-17 01:21:23 -06003996 }
3997
3998 UpdateLastBoundDescriptorSets(cb_state, pipelineBindPoint, pipeline_layout, set, 1, nullptr, push_descriptor_set.get(), 0,
3999 nullptr);
4000 last_bound.pipeline_layout = layout;
4001
4002 // Now that we have either the new or extant push_descriptor set ... do the write updates against it
Jeff Bolz41a1ced2019-10-11 11:40:49 -05004003 push_descriptor_set->PerformPushDescriptorsUpdate(this, descriptorWriteCount, pDescriptorWrites);
locke-lunargd556cc32019-09-17 01:21:23 -06004004}
4005
4006void ValidationStateTracker::PreCallRecordCmdPushDescriptorSetKHR(VkCommandBuffer commandBuffer,
4007 VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
4008 uint32_t set, uint32_t descriptorWriteCount,
4009 const VkWriteDescriptorSet *pDescriptorWrites) {
4010 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4011 RecordCmdPushDescriptorSetState(cb_state, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites);
4012}
4013
Tony-LunarG6bb1d0c2019-09-23 10:39:25 -06004014void ValidationStateTracker::PostCallRecordCmdPushConstants(VkCommandBuffer commandBuffer, VkPipelineLayout layout,
4015 VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size,
4016 const void *pValues) {
4017 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4018 if (cb_state != nullptr) {
4019 ResetCommandBufferPushConstantDataIfIncompatible(cb_state, layout);
4020
4021 auto &push_constant_data = cb_state->push_constant_data;
4022 assert((offset + size) <= static_cast<uint32_t>(push_constant_data.size()));
4023 std::memcpy(push_constant_data.data() + offset, pValues, static_cast<std::size_t>(size));
locke-lunargde3f0fa2020-09-10 11:55:31 -06004024 cb_state->push_constant_pipeline_layout_set = layout;
4025
4026 auto flags = stageFlags;
4027 uint32_t bit_shift = 0;
4028 while (flags) {
4029 if (flags & 1) {
4030 VkShaderStageFlagBits flag = static_cast<VkShaderStageFlagBits>(1 << bit_shift);
4031 const auto it = cb_state->push_constant_data_update.find(flag);
4032
4033 if (it != cb_state->push_constant_data_update.end()) {
4034 std::memset(it->second.data() + offset, 1, static_cast<std::size_t>(size));
4035 }
4036 }
4037 flags = flags >> 1;
4038 ++bit_shift;
4039 }
Tony-LunarG6bb1d0c2019-09-23 10:39:25 -06004040 }
4041}
4042
locke-lunargd556cc32019-09-17 01:21:23 -06004043void ValidationStateTracker::PreCallRecordCmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
4044 VkIndexType indexType) {
locke-lunargd556cc32019-09-17 01:21:23 -06004045 auto cb_state = GetCBState(commandBuffer);
4046
4047 cb_state->status |= CBSTATUS_INDEX_BUFFER_BOUND;
Piers Daniell39842ee2020-07-10 16:42:33 -06004048 cb_state->static_status &= ~CBSTATUS_INDEX_BUFFER_BOUND;
locke-lunarg1ae57d62020-11-18 10:49:19 -07004049 cb_state->index_buffer_binding.buffer_state = GetShared<BUFFER_STATE>(buffer);
4050 cb_state->index_buffer_binding.size = cb_state->index_buffer_binding.buffer_state->createInfo.size;
locke-lunargd556cc32019-09-17 01:21:23 -06004051 cb_state->index_buffer_binding.offset = offset;
4052 cb_state->index_buffer_binding.index_type = indexType;
4053 // Add binding for this index buffer to this commandbuffer
locke-lunarg1ae57d62020-11-18 10:49:19 -07004054 AddCommandBufferBindingBuffer(cb_state, cb_state->index_buffer_binding.buffer_state.get());
locke-lunargd556cc32019-09-17 01:21:23 -06004055}
4056
4057void ValidationStateTracker::PreCallRecordCmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding,
4058 uint32_t bindingCount, const VkBuffer *pBuffers,
4059 const VkDeviceSize *pOffsets) {
4060 auto cb_state = GetCBState(commandBuffer);
4061
4062 uint32_t end = firstBinding + bindingCount;
4063 if (cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.size() < end) {
4064 cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.resize(end);
4065 }
4066
4067 for (uint32_t i = 0; i < bindingCount; ++i) {
4068 auto &vertex_buffer_binding = cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings[i + firstBinding];
locke-lunarg1ae57d62020-11-18 10:49:19 -07004069 vertex_buffer_binding.buffer_state = GetShared<BUFFER_STATE>(pBuffers[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06004070 vertex_buffer_binding.offset = pOffsets[i];
Piers Daniell39842ee2020-07-10 16:42:33 -06004071 vertex_buffer_binding.size = VK_WHOLE_SIZE;
4072 vertex_buffer_binding.stride = 0;
locke-lunargd556cc32019-09-17 01:21:23 -06004073 // Add binding for this vertex buffer to this commandbuffer
Jeff Bolz165818a2020-05-08 11:19:03 -05004074 if (pBuffers[i]) {
locke-lunarg1ae57d62020-11-18 10:49:19 -07004075 AddCommandBufferBindingBuffer(cb_state, vertex_buffer_binding.buffer_state.get());
Jeff Bolz165818a2020-05-08 11:19:03 -05004076 }
locke-lunargd556cc32019-09-17 01:21:23 -06004077 }
4078}
4079
4080void ValidationStateTracker::PostCallRecordCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer,
4081 VkDeviceSize dstOffset, VkDeviceSize dataSize, const void *pData) {
4082 auto cb_state = GetCBState(commandBuffer);
4083 auto dst_buffer_state = GetBufferState(dstBuffer);
4084
4085 // Update bindings between buffer and cmd buffer
4086 AddCommandBufferBindingBuffer(cb_state, dst_buffer_state);
4087}
4088
Jeff Bolz310775c2019-10-09 00:46:33 -05004089bool ValidationStateTracker::SetEventStageMask(VkEvent event, VkPipelineStageFlags stageMask,
4090 EventToStageMap *localEventToStageMap) {
4091 (*localEventToStageMap)[event] = stageMask;
locke-lunargd556cc32019-09-17 01:21:23 -06004092 return false;
4093}
4094
4095void ValidationStateTracker::PreCallRecordCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event,
4096 VkPipelineStageFlags stageMask) {
4097 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4098 auto event_state = GetEventState(event);
4099 if (event_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05004100 AddCommandBufferBinding(event_state->cb_bindings, VulkanTypedHandle(event, kVulkanObjectTypeEvent, event_state), cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06004101 }
4102 cb_state->events.push_back(event);
4103 if (!cb_state->waitedEvents.count(event)) {
4104 cb_state->writeEventsBeforeWait.push_back(event);
4105 }
Jeff Bolz310775c2019-10-09 00:46:33 -05004106 cb_state->eventUpdates.emplace_back(
4107 [event, stageMask](const ValidationStateTracker *device_data, bool do_validate, EventToStageMap *localEventToStageMap) {
4108 return SetEventStageMask(event, stageMask, localEventToStageMap);
4109 });
locke-lunargd556cc32019-09-17 01:21:23 -06004110}
4111
4112void ValidationStateTracker::PreCallRecordCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event,
4113 VkPipelineStageFlags stageMask) {
4114 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4115 auto event_state = GetEventState(event);
4116 if (event_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05004117 AddCommandBufferBinding(event_state->cb_bindings, VulkanTypedHandle(event, kVulkanObjectTypeEvent, event_state), cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06004118 }
4119 cb_state->events.push_back(event);
4120 if (!cb_state->waitedEvents.count(event)) {
4121 cb_state->writeEventsBeforeWait.push_back(event);
4122 }
4123
4124 cb_state->eventUpdates.emplace_back(
Jeff Bolz310775c2019-10-09 00:46:33 -05004125 [event](const ValidationStateTracker *, bool do_validate, EventToStageMap *localEventToStageMap) {
4126 return SetEventStageMask(event, VkPipelineStageFlags(0), localEventToStageMap);
4127 });
locke-lunargd556cc32019-09-17 01:21:23 -06004128}
4129
4130void ValidationStateTracker::PreCallRecordCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents,
4131 VkPipelineStageFlags sourceStageMask, VkPipelineStageFlags dstStageMask,
4132 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
4133 uint32_t bufferMemoryBarrierCount,
4134 const VkBufferMemoryBarrier *pBufferMemoryBarriers,
4135 uint32_t imageMemoryBarrierCount,
4136 const VkImageMemoryBarrier *pImageMemoryBarriers) {
4137 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4138 for (uint32_t i = 0; i < eventCount; ++i) {
4139 auto event_state = GetEventState(pEvents[i]);
4140 if (event_state) {
Jeff Bolzadbfa852019-10-04 13:53:30 -05004141 AddCommandBufferBinding(event_state->cb_bindings, VulkanTypedHandle(pEvents[i], kVulkanObjectTypeEvent, event_state),
4142 cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06004143 }
4144 cb_state->waitedEvents.insert(pEvents[i]);
4145 cb_state->events.push_back(pEvents[i]);
4146 }
4147}
4148
Jeff Bolz310775c2019-10-09 00:46:33 -05004149bool ValidationStateTracker::SetQueryState(QueryObject object, QueryState value, QueryMap *localQueryToStateMap) {
4150 (*localQueryToStateMap)[object] = value;
4151 return false;
4152}
4153
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02004154bool ValidationStateTracker::SetQueryStateMulti(VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, uint32_t perfPass,
4155 QueryState value, QueryMap *localQueryToStateMap) {
Jeff Bolz310775c2019-10-09 00:46:33 -05004156 for (uint32_t i = 0; i < queryCount; i++) {
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02004157 QueryObject object = QueryObject(QueryObject(queryPool, firstQuery + i), perfPass);
Jeff Bolz310775c2019-10-09 00:46:33 -05004158 (*localQueryToStateMap)[object] = value;
locke-lunargd556cc32019-09-17 01:21:23 -06004159 }
4160 return false;
4161}
4162
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02004163QueryState ValidationStateTracker::GetQueryState(const QueryMap *localQueryToStateMap, VkQueryPool queryPool, uint32_t queryIndex,
4164 uint32_t perfPass) const {
4165 QueryObject query = QueryObject(QueryObject(queryPool, queryIndex), perfPass);
locke-lunargd556cc32019-09-17 01:21:23 -06004166
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02004167 auto iter = localQueryToStateMap->find(query);
4168 if (iter != localQueryToStateMap->end()) return iter->second;
Jeff Bolz310775c2019-10-09 00:46:33 -05004169
Jeff Bolz310775c2019-10-09 00:46:33 -05004170 return QUERYSTATE_UNKNOWN;
locke-lunargd556cc32019-09-17 01:21:23 -06004171}
4172
4173void ValidationStateTracker::RecordCmdBeginQuery(CMD_BUFFER_STATE *cb_state, const QueryObject &query_obj) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06004174 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06004175 cb_state->activeQueries.insert(query_obj);
4176 cb_state->startedQueries.insert(query_obj);
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02004177 cb_state->queryUpdates.emplace_back([query_obj](const ValidationStateTracker *device_data, bool do_validate,
4178 VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass,
4179 QueryMap *localQueryToStateMap) {
4180 SetQueryState(QueryObject(query_obj, perfQueryPass), QUERYSTATE_RUNNING, localQueryToStateMap);
4181 return false;
4182 });
Jeff Bolzadbfa852019-10-04 13:53:30 -05004183 auto pool_state = GetQueryPoolState(query_obj.pool);
4184 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(query_obj.pool, kVulkanObjectTypeQueryPool, pool_state),
4185 cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06004186}
4187
4188void ValidationStateTracker::PostCallRecordCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot,
4189 VkFlags flags) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06004190 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06004191 QueryObject query = {queryPool, slot};
4192 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4193 RecordCmdBeginQuery(cb_state, query);
4194}
4195
4196void ValidationStateTracker::RecordCmdEndQuery(CMD_BUFFER_STATE *cb_state, const QueryObject &query_obj) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06004197 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06004198 cb_state->activeQueries.erase(query_obj);
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02004199 cb_state->queryUpdates.emplace_back([query_obj](const ValidationStateTracker *device_data, bool do_validate,
4200 VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass,
4201 QueryMap *localQueryToStateMap) {
4202 return SetQueryState(QueryObject(query_obj, perfQueryPass), QUERYSTATE_ENDED, localQueryToStateMap);
4203 });
Jeff Bolzadbfa852019-10-04 13:53:30 -05004204 auto pool_state = GetQueryPoolState(query_obj.pool);
4205 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(query_obj.pool, kVulkanObjectTypeQueryPool, pool_state),
4206 cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06004207}
4208
4209void ValidationStateTracker::PostCallRecordCmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06004210 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06004211 QueryObject query_obj = {queryPool, slot};
4212 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4213 RecordCmdEndQuery(cb_state, query_obj);
4214}
4215
4216void ValidationStateTracker::PostCallRecordCmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
4217 uint32_t firstQuery, uint32_t queryCount) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06004218 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06004219 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4220
Lionel Landwerlinb1e5a422020-02-18 16:49:09 +02004221 for (uint32_t slot = firstQuery; slot < (firstQuery + queryCount); slot++) {
4222 QueryObject query = {queryPool, slot};
4223 cb_state->resetQueries.insert(query);
4224 }
4225
Jeff Bolz310775c2019-10-09 00:46:33 -05004226 cb_state->queryUpdates.emplace_back([queryPool, firstQuery, queryCount](const ValidationStateTracker *device_data,
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02004227 bool do_validate, VkQueryPool &firstPerfQueryPool,
4228 uint32_t perfQueryPass,
4229 QueryMap *localQueryToStateMap) {
4230 return SetQueryStateMulti(queryPool, firstQuery, queryCount, perfQueryPass, QUERYSTATE_RESET, localQueryToStateMap);
locke-lunargd556cc32019-09-17 01:21:23 -06004231 });
Jeff Bolzadbfa852019-10-04 13:53:30 -05004232 auto pool_state = GetQueryPoolState(queryPool);
4233 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool, pool_state),
locke-lunargd556cc32019-09-17 01:21:23 -06004234 cb_state);
4235}
4236
4237void ValidationStateTracker::PostCallRecordCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
4238 uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer,
4239 VkDeviceSize dstOffset, VkDeviceSize stride,
4240 VkQueryResultFlags flags) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06004241 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06004242 auto cb_state = GetCBState(commandBuffer);
4243 auto dst_buff_state = GetBufferState(dstBuffer);
4244 AddCommandBufferBindingBuffer(cb_state, dst_buff_state);
Jeff Bolzadbfa852019-10-04 13:53:30 -05004245 auto pool_state = GetQueryPoolState(queryPool);
4246 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool, pool_state),
locke-lunargd556cc32019-09-17 01:21:23 -06004247 cb_state);
4248}
4249
4250void ValidationStateTracker::PostCallRecordCmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
4251 VkQueryPool queryPool, uint32_t slot) {
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06004252 if (disabled[query_validation]) return;
locke-lunargd556cc32019-09-17 01:21:23 -06004253 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
Jeff Bolzadbfa852019-10-04 13:53:30 -05004254 auto pool_state = GetQueryPoolState(queryPool);
4255 AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool, pool_state),
locke-lunargd556cc32019-09-17 01:21:23 -06004256 cb_state);
4257 QueryObject query = {queryPool, slot};
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02004258 cb_state->queryUpdates.emplace_back([query](const ValidationStateTracker *device_data, bool do_validate,
4259 VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass,
4260 QueryMap *localQueryToStateMap) {
4261 return SetQueryState(QueryObject(query, perfQueryPass), QUERYSTATE_ENDED, localQueryToStateMap);
4262 });
locke-lunargd556cc32019-09-17 01:21:23 -06004263}
4264
4265void ValidationStateTracker::PostCallRecordCreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo *pCreateInfo,
4266 const VkAllocationCallbacks *pAllocator, VkFramebuffer *pFramebuffer,
4267 VkResult result) {
4268 if (VK_SUCCESS != result) return;
4269 // Shadow create info and store in map
Jeff Bolz6ae39612019-10-11 20:57:36 -05004270 auto fb_state = std::make_shared<FRAMEBUFFER_STATE>(*pFramebuffer, pCreateInfo, GetRenderPassShared(pCreateInfo->renderPass));
locke-lunargd556cc32019-09-17 01:21:23 -06004271
4272 if ((pCreateInfo->flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) == 0) {
locke-lunarg1ae57d62020-11-18 10:49:19 -07004273 fb_state->attachments_view_state.resize(pCreateInfo->attachmentCount);
4274
locke-lunargd556cc32019-09-17 01:21:23 -06004275 for (uint32_t i = 0; i < pCreateInfo->attachmentCount; ++i) {
locke-lunarg1ae57d62020-11-18 10:49:19 -07004276 fb_state->attachments_view_state[i] = GetShared<IMAGE_VIEW_STATE>(pCreateInfo->pAttachments[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06004277 }
4278 }
4279 frameBufferMap[*pFramebuffer] = std::move(fb_state);
4280}
4281
4282void ValidationStateTracker::RecordRenderPassDAG(RenderPassCreateVersion rp_version, const VkRenderPassCreateInfo2KHR *pCreateInfo,
4283 RENDER_PASS_STATE *render_pass) {
4284 auto &subpass_to_node = render_pass->subpassToNode;
4285 subpass_to_node.resize(pCreateInfo->subpassCount);
4286 auto &self_dependencies = render_pass->self_dependencies;
4287 self_dependencies.resize(pCreateInfo->subpassCount);
John Zulauf4aff5d92020-02-21 08:29:35 -07004288 auto &subpass_dependencies = render_pass->subpass_dependencies;
4289 subpass_dependencies.resize(pCreateInfo->subpassCount);
locke-lunargd556cc32019-09-17 01:21:23 -06004290
4291 for (uint32_t i = 0; i < pCreateInfo->subpassCount; ++i) {
4292 subpass_to_node[i].pass = i;
4293 self_dependencies[i].clear();
John Zulauf4aff5d92020-02-21 08:29:35 -07004294 subpass_dependencies[i].pass = i;
locke-lunargd556cc32019-09-17 01:21:23 -06004295 }
4296 for (uint32_t i = 0; i < pCreateInfo->dependencyCount; ++i) {
John Zulaufbaea94f2020-09-15 17:55:16 -06004297 const auto &dependency = pCreateInfo->pDependencies[i];
John Zulauf4aff5d92020-02-21 08:29:35 -07004298 const auto srcSubpass = dependency.srcSubpass;
4299 const auto dstSubpass = dependency.dstSubpass;
locke-lunargd556cc32019-09-17 01:21:23 -06004300 if ((dependency.srcSubpass != VK_SUBPASS_EXTERNAL) && (dependency.dstSubpass != VK_SUBPASS_EXTERNAL)) {
4301 if (dependency.srcSubpass == dependency.dstSubpass) {
4302 self_dependencies[dependency.srcSubpass].push_back(i);
4303 } else {
4304 subpass_to_node[dependency.dstSubpass].prev.push_back(dependency.srcSubpass);
4305 subpass_to_node[dependency.srcSubpass].next.push_back(dependency.dstSubpass);
4306 }
4307 }
John Zulauf4aff5d92020-02-21 08:29:35 -07004308 if (srcSubpass == VK_SUBPASS_EXTERNAL) {
4309 assert(dstSubpass != VK_SUBPASS_EXTERNAL); // this is invalid per VUID-VkSubpassDependency-srcSubpass-00865
John Zulaufbaea94f2020-09-15 17:55:16 -06004310 subpass_dependencies[dstSubpass].barrier_from_external.emplace_back(&dependency);
John Zulauf4aff5d92020-02-21 08:29:35 -07004311 } else if (dstSubpass == VK_SUBPASS_EXTERNAL) {
John Zulaufbaea94f2020-09-15 17:55:16 -06004312 subpass_dependencies[srcSubpass].barrier_to_external.emplace_back(&dependency);
John Zulauf4aff5d92020-02-21 08:29:35 -07004313 } else if (dependency.srcSubpass != dependency.dstSubpass) {
4314 // ignore self dependencies in prev and next
John Zulaufbaea94f2020-09-15 17:55:16 -06004315 subpass_dependencies[srcSubpass].next[&subpass_dependencies[dstSubpass]].emplace_back(&dependency);
4316 subpass_dependencies[dstSubpass].prev[&subpass_dependencies[srcSubpass]].emplace_back(&dependency);
John Zulauf4aff5d92020-02-21 08:29:35 -07004317 }
4318 }
4319
4320 //
4321 // Determine "asynchrononous" subpassess
4322 // syncronization is only interested in asyncronous stages *earlier* that the current one... so we'll only look towards those.
4323 // NOTE: This is O(N^3), which we could shrink to O(N^2logN) using sets instead of arrays, but given that N is likely to be
4324 // small and the K for |= from the prev is must less than for set, we'll accept the brute force.
4325 std::vector<std::vector<bool>> pass_depends(pCreateInfo->subpassCount);
4326 for (uint32_t i = 1; i < pCreateInfo->subpassCount; ++i) {
4327 auto &depends = pass_depends[i];
4328 depends.resize(i);
4329 auto &subpass_dep = subpass_dependencies[i];
4330 for (const auto &prev : subpass_dep.prev) {
John Zulaufbaea94f2020-09-15 17:55:16 -06004331 const auto prev_pass = prev.first->pass;
John Zulauf4aff5d92020-02-21 08:29:35 -07004332 const auto &prev_depends = pass_depends[prev_pass];
4333 for (uint32_t j = 0; j < prev_pass; j++) {
4334 depends[j] = depends[j] | prev_depends[j];
4335 }
4336 depends[prev_pass] = true;
4337 }
4338 for (uint32_t pass = 0; pass < subpass_dep.pass; pass++) {
4339 if (!depends[pass]) {
4340 subpass_dep.async.push_back(pass);
4341 }
4342 }
locke-lunargd556cc32019-09-17 01:21:23 -06004343 }
4344}
4345
John Zulauf4aff5d92020-02-21 08:29:35 -07004346static VkSubpassDependency2 ImplicitDependencyFromExternal(uint32_t subpass) {
4347 VkSubpassDependency2 from_external = {VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2,
4348 nullptr,
4349 VK_SUBPASS_EXTERNAL,
4350 subpass,
4351 VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
4352 VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
4353 0,
4354 VK_ACCESS_INPUT_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_READ_BIT |
4355 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT |
4356 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT,
4357 0,
4358 0};
4359 return from_external;
4360}
4361
4362static VkSubpassDependency2 ImplicitDependencyToExternal(uint32_t subpass) {
4363 VkSubpassDependency2 to_external = {VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2,
4364 nullptr,
4365 subpass,
4366 VK_SUBPASS_EXTERNAL,
4367 VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
4368 VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
4369 VK_ACCESS_INPUT_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_READ_BIT |
4370 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT |
4371 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT,
4372 0,
4373 0,
4374 0};
4375 return to_external;
4376}
4377
locke-lunargd556cc32019-09-17 01:21:23 -06004378void ValidationStateTracker::RecordCreateRenderPassState(RenderPassCreateVersion rp_version,
4379 std::shared_ptr<RENDER_PASS_STATE> &render_pass,
4380 VkRenderPass *pRenderPass) {
4381 render_pass->renderPass = *pRenderPass;
4382 auto create_info = render_pass->createInfo.ptr();
4383
4384 RecordRenderPassDAG(RENDER_PASS_VERSION_1, create_info, render_pass.get());
4385
John Zulauf8863c332020-03-20 10:34:33 -06004386 struct AttachmentTracker { // This is really only of local interest, but a bit big for a lambda
4387 RENDER_PASS_STATE *const rp;
John Zulaufbb9f07f2020-03-19 16:53:06 -06004388 std::vector<uint32_t> &first;
John Zulauf1507ee42020-05-18 11:33:09 -06004389 std::vector<bool> &first_is_transition;
John Zulaufbb9f07f2020-03-19 16:53:06 -06004390 std::vector<uint32_t> &last;
John Zulauf8863c332020-03-20 10:34:33 -06004391 std::vector<std::vector<RENDER_PASS_STATE::AttachmentTransition>> &subpass_transitions;
John Zulaufbb9f07f2020-03-19 16:53:06 -06004392 std::unordered_map<uint32_t, bool> &first_read;
4393 const uint32_t attachment_count;
John Zulauf8863c332020-03-20 10:34:33 -06004394 std::vector<VkImageLayout> attachment_layout;
John Zulauf2bc1fde2020-04-24 15:09:51 -06004395 std::vector<std::vector<VkImageLayout>> subpass_attachment_layout;
John Zulauf8863c332020-03-20 10:34:33 -06004396 AttachmentTracker(std::shared_ptr<RENDER_PASS_STATE> &render_pass)
4397 : rp(render_pass.get()),
4398 first(rp->attachment_first_subpass),
John Zulauf1507ee42020-05-18 11:33:09 -06004399 first_is_transition(rp->attachment_first_is_transition),
John Zulauf8863c332020-03-20 10:34:33 -06004400 last(rp->attachment_last_subpass),
4401 subpass_transitions(rp->subpass_transitions),
4402 first_read(rp->attachment_first_read),
4403 attachment_count(rp->createInfo.attachmentCount),
John Zulauf2bc1fde2020-04-24 15:09:51 -06004404 attachment_layout(),
4405 subpass_attachment_layout() {
John Zulaufbb9f07f2020-03-19 16:53:06 -06004406 first.resize(attachment_count, VK_SUBPASS_EXTERNAL);
John Zulauf1507ee42020-05-18 11:33:09 -06004407 first_is_transition.resize(attachment_count, false);
John Zulaufbb9f07f2020-03-19 16:53:06 -06004408 last.resize(attachment_count, VK_SUBPASS_EXTERNAL);
John Zulauf8863c332020-03-20 10:34:33 -06004409 subpass_transitions.resize(rp->createInfo.subpassCount + 1); // Add an extra for EndRenderPass
4410 attachment_layout.reserve(attachment_count);
John Zulauf2bc1fde2020-04-24 15:09:51 -06004411 subpass_attachment_layout.resize(rp->createInfo.subpassCount);
4412 for (auto &subpass_layouts : subpass_attachment_layout) {
4413 subpass_layouts.resize(attachment_count, kInvalidLayout);
4414 }
4415
John Zulauf8863c332020-03-20 10:34:33 -06004416 for (uint32_t j = 0; j < attachment_count; j++) {
4417 attachment_layout.push_back(rp->createInfo.pAttachments[j].initialLayout);
4418 }
John Zulauf4aff5d92020-02-21 08:29:35 -07004419 }
John Zulauf4aff5d92020-02-21 08:29:35 -07004420
John Zulaufbb9f07f2020-03-19 16:53:06 -06004421 void Update(uint32_t subpass, const VkAttachmentReference2 *attach_ref, uint32_t count, bool is_read) {
4422 if (nullptr == attach_ref) return;
4423 for (uint32_t j = 0; j < count; ++j) {
4424 const auto attachment = attach_ref[j].attachment;
4425 if (attachment != VK_ATTACHMENT_UNUSED) {
John Zulauf8863c332020-03-20 10:34:33 -06004426 const auto layout = attach_ref[j].layout;
John Zulaufbb9f07f2020-03-19 16:53:06 -06004427 // Take advantage of the fact that insert won't overwrite, so we'll only write the first time.
4428 first_read.insert(std::make_pair(attachment, is_read));
John Zulauf2bc1fde2020-04-24 15:09:51 -06004429 if (first[attachment] == VK_SUBPASS_EXTERNAL) {
4430 first[attachment] = subpass;
4431 const auto initial_layout = rp->createInfo.pAttachments[attachment].initialLayout;
John Zulauf1507ee42020-05-18 11:33:09 -06004432 if (initial_layout != layout) {
4433 subpass_transitions[subpass].emplace_back(VK_SUBPASS_EXTERNAL, attachment, initial_layout, layout);
4434 first_is_transition[attachment] = true;
4435 }
John Zulauf2bc1fde2020-04-24 15:09:51 -06004436 }
John Zulaufbb9f07f2020-03-19 16:53:06 -06004437 last[attachment] = subpass;
John Zulauf8863c332020-03-20 10:34:33 -06004438
John Zulauf2bc1fde2020-04-24 15:09:51 -06004439 for (const auto &prev : rp->subpass_dependencies[subpass].prev) {
John Zulaufbaea94f2020-09-15 17:55:16 -06004440 const auto prev_pass = prev.first->pass;
John Zulauf2bc1fde2020-04-24 15:09:51 -06004441 const auto prev_layout = subpass_attachment_layout[prev_pass][attachment];
4442 if ((prev_layout != kInvalidLayout) && (prev_layout != layout)) {
4443 subpass_transitions[subpass].emplace_back(prev_pass, attachment, prev_layout, layout);
4444 }
John Zulauf8863c332020-03-20 10:34:33 -06004445 }
John Zulauf2bc1fde2020-04-24 15:09:51 -06004446 attachment_layout[attachment] = layout;
John Zulauf8863c332020-03-20 10:34:33 -06004447 }
4448 }
4449 }
4450 void FinalTransitions() {
4451 auto &final_transitions = subpass_transitions[rp->createInfo.subpassCount];
4452
4453 for (uint32_t attachment = 0; attachment < attachment_count; ++attachment) {
4454 const auto final_layout = rp->createInfo.pAttachments[attachment].finalLayout;
John Zulauf2bc1fde2020-04-24 15:09:51 -06004455 // Add final transitions for attachments that were used and change layout.
4456 if ((last[attachment] != VK_SUBPASS_EXTERNAL) && final_layout != attachment_layout[attachment]) {
4457 final_transitions.emplace_back(last[attachment], attachment, attachment_layout[attachment], final_layout);
John Zulaufbb9f07f2020-03-19 16:53:06 -06004458 }
locke-lunargd556cc32019-09-17 01:21:23 -06004459 }
4460 }
John Zulaufbb9f07f2020-03-19 16:53:06 -06004461 };
John Zulauf8863c332020-03-20 10:34:33 -06004462 AttachmentTracker attachment_tracker(render_pass);
John Zulaufbb9f07f2020-03-19 16:53:06 -06004463
4464 for (uint32_t subpass_index = 0; subpass_index < create_info->subpassCount; ++subpass_index) {
4465 const VkSubpassDescription2KHR &subpass = create_info->pSubpasses[subpass_index];
John Zulauf8863c332020-03-20 10:34:33 -06004466 attachment_tracker.Update(subpass_index, subpass.pColorAttachments, subpass.colorAttachmentCount, false);
4467 attachment_tracker.Update(subpass_index, subpass.pResolveAttachments, subpass.colorAttachmentCount, false);
4468 attachment_tracker.Update(subpass_index, subpass.pDepthStencilAttachment, 1, false);
4469 attachment_tracker.Update(subpass_index, subpass.pInputAttachments, subpass.inputAttachmentCount, true);
John Zulauf4aff5d92020-02-21 08:29:35 -07004470 }
John Zulauf8863c332020-03-20 10:34:33 -06004471 attachment_tracker.FinalTransitions();
John Zulauf4aff5d92020-02-21 08:29:35 -07004472
John Zulaufbb9f07f2020-03-19 16:53:06 -06004473 // Add implicit dependencies
John Zulauf8863c332020-03-20 10:34:33 -06004474 for (uint32_t attachment = 0; attachment < attachment_tracker.attachment_count; attachment++) {
4475 const auto first_use = attachment_tracker.first[attachment];
John Zulauf4aff5d92020-02-21 08:29:35 -07004476 if (first_use != VK_SUBPASS_EXTERNAL) {
4477 auto &subpass_dep = render_pass->subpass_dependencies[first_use];
John Zulaufbaea94f2020-09-15 17:55:16 -06004478 if (subpass_dep.barrier_from_external.size() == 0) {
4479 // Add implicit from barrier if they're aren't any
John Zulauf4aff5d92020-02-21 08:29:35 -07004480 subpass_dep.implicit_barrier_from_external.reset(
4481 new VkSubpassDependency2(ImplicitDependencyFromExternal(first_use)));
John Zulaufbaea94f2020-09-15 17:55:16 -06004482 subpass_dep.barrier_from_external.emplace_back(subpass_dep.implicit_barrier_from_external.get());
John Zulauf4aff5d92020-02-21 08:29:35 -07004483 }
4484 }
4485
John Zulauf8863c332020-03-20 10:34:33 -06004486 const auto last_use = attachment_tracker.last[attachment];
John Zulauf4aff5d92020-02-21 08:29:35 -07004487 if (last_use != VK_SUBPASS_EXTERNAL) {
4488 auto &subpass_dep = render_pass->subpass_dependencies[last_use];
John Zulaufbaea94f2020-09-15 17:55:16 -06004489 if (render_pass->subpass_dependencies[last_use].barrier_to_external.size() == 0) {
4490 // Add implicit to barrier if they're aren't any
John Zulauf4aff5d92020-02-21 08:29:35 -07004491 subpass_dep.implicit_barrier_to_external.reset(new VkSubpassDependency2(ImplicitDependencyToExternal(last_use)));
John Zulaufbaea94f2020-09-15 17:55:16 -06004492 subpass_dep.barrier_to_external.emplace_back(subpass_dep.implicit_barrier_to_external.get());
John Zulauf4aff5d92020-02-21 08:29:35 -07004493 }
locke-lunargd556cc32019-09-17 01:21:23 -06004494 }
4495 }
4496
4497 // Even though render_pass is an rvalue-ref parameter, still must move s.t. move assignment is invoked.
4498 renderPassMap[*pRenderPass] = std::move(render_pass);
4499}
4500
4501// Style note:
4502// Use of rvalue reference exceeds reccommended usage of rvalue refs in google style guide, but intentionally forces caller to move
4503// or copy. This is clearer than passing a pointer to shared_ptr and avoids the atomic increment/decrement of shared_ptr copy
4504// construction or assignment.
4505void ValidationStateTracker::PostCallRecordCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo *pCreateInfo,
4506 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
4507 VkResult result) {
4508 if (VK_SUCCESS != result) return;
4509 auto render_pass_state = std::make_shared<RENDER_PASS_STATE>(pCreateInfo);
4510 RecordCreateRenderPassState(RENDER_PASS_VERSION_1, render_pass_state, pRenderPass);
4511}
4512
Tony-LunarG977448c2019-12-02 14:52:02 -07004513void ValidationStateTracker::RecordCreateRenderPass2(VkDevice device, const VkRenderPassCreateInfo2KHR *pCreateInfo,
4514 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
4515 VkResult result) {
locke-lunargd556cc32019-09-17 01:21:23 -06004516 if (VK_SUCCESS != result) return;
4517 auto render_pass_state = std::make_shared<RENDER_PASS_STATE>(pCreateInfo);
4518 RecordCreateRenderPassState(RENDER_PASS_VERSION_2, render_pass_state, pRenderPass);
4519}
4520
Tony-LunarG977448c2019-12-02 14:52:02 -07004521void ValidationStateTracker::PostCallRecordCreateRenderPass2KHR(VkDevice device, const VkRenderPassCreateInfo2KHR *pCreateInfo,
4522 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
4523 VkResult result) {
4524 RecordCreateRenderPass2(device, pCreateInfo, pAllocator, pRenderPass, result);
4525}
4526
4527void ValidationStateTracker::PostCallRecordCreateRenderPass2(VkDevice device, const VkRenderPassCreateInfo2KHR *pCreateInfo,
4528 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
4529 VkResult result) {
4530 RecordCreateRenderPass2(device, pCreateInfo, pAllocator, pRenderPass, result);
4531}
4532
locke-lunargd556cc32019-09-17 01:21:23 -06004533void ValidationStateTracker::RecordCmdBeginRenderPassState(VkCommandBuffer commandBuffer,
4534 const VkRenderPassBeginInfo *pRenderPassBegin,
4535 const VkSubpassContents contents) {
4536 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunargaecf2152020-05-12 17:15:41 -06004537 auto render_pass_state = pRenderPassBegin ? GetShared<RENDER_PASS_STATE>(pRenderPassBegin->renderPass) : nullptr;
4538 auto framebuffer = pRenderPassBegin ? GetShared<FRAMEBUFFER_STATE>(pRenderPassBegin->framebuffer) : nullptr;
locke-lunargd556cc32019-09-17 01:21:23 -06004539
4540 if (render_pass_state) {
locke-lunargaecf2152020-05-12 17:15:41 -06004541 cb_state->activeFramebuffer = framebuffer;
locke-lunargd556cc32019-09-17 01:21:23 -06004542 cb_state->activeRenderPass = render_pass_state;
Tony-LunarG61e7c0c2020-03-03 16:09:11 -07004543 cb_state->activeRenderPassBeginInfo = safe_VkRenderPassBeginInfo(pRenderPassBegin);
locke-lunargd556cc32019-09-17 01:21:23 -06004544 cb_state->activeSubpass = 0;
4545 cb_state->activeSubpassContents = contents;
locke-lunargaecf2152020-05-12 17:15:41 -06004546 if (framebuffer) cb_state->framebuffers.insert(framebuffer);
locke-lunargd556cc32019-09-17 01:21:23 -06004547 // Connect this framebuffer and its children to this cmdBuffer
locke-lunargaecf2152020-05-12 17:15:41 -06004548 AddFramebufferBinding(cb_state, framebuffer.get());
locke-lunargd556cc32019-09-17 01:21:23 -06004549 // Connect this RP to cmdBuffer
locke-lunargaecf2152020-05-12 17:15:41 -06004550 AddCommandBufferBinding(
4551 render_pass_state->cb_bindings,
4552 VulkanTypedHandle(render_pass_state->renderPass, kVulkanObjectTypeRenderPass, render_pass_state.get()), cb_state);
locke-lunargd556cc32019-09-17 01:21:23 -06004553
4554 auto chained_device_group_struct = lvl_find_in_chain<VkDeviceGroupRenderPassBeginInfo>(pRenderPassBegin->pNext);
4555 if (chained_device_group_struct) {
4556 cb_state->active_render_pass_device_mask = chained_device_group_struct->deviceMask;
4557 } else {
4558 cb_state->active_render_pass_device_mask = cb_state->initial_device_mask;
4559 }
Lionel Landwerlin484d10f2020-04-24 01:34:47 +03004560
4561 cb_state->imagelessFramebufferAttachments.clear();
4562 auto attachment_info_struct = lvl_find_in_chain<VkRenderPassAttachmentBeginInfo>(pRenderPassBegin->pNext);
4563 if (attachment_info_struct) {
4564 for (uint32_t i = 0; i < attachment_info_struct->attachmentCount; i++) {
4565 IMAGE_VIEW_STATE *img_view_state = GetImageViewState(attachment_info_struct->pAttachments[i]);
4566 cb_state->imagelessFramebufferAttachments.push_back(img_view_state);
4567 }
4568 }
locke-lunargd556cc32019-09-17 01:21:23 -06004569 }
4570}
4571
4572void ValidationStateTracker::PreCallRecordCmdBeginRenderPass(VkCommandBuffer commandBuffer,
4573 const VkRenderPassBeginInfo *pRenderPassBegin,
4574 VkSubpassContents contents) {
4575 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, contents);
4576}
4577
4578void ValidationStateTracker::PreCallRecordCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer,
4579 const VkRenderPassBeginInfo *pRenderPassBegin,
4580 const VkSubpassBeginInfoKHR *pSubpassBeginInfo) {
4581 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, pSubpassBeginInfo->contents);
4582}
4583
Jeremy Hayes9bda85a2020-05-21 16:36:17 -06004584void ValidationStateTracker::PostCallRecordCmdBeginTransformFeedbackEXT(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer,
4585 uint32_t counterBufferCount,
4586 const VkBuffer *pCounterBuffers,
4587 const VkDeviceSize *pCounterBufferOffsets) {
4588 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4589
4590 cb_state->transform_feedback_active = true;
4591}
4592
4593void ValidationStateTracker::PostCallRecordCmdEndTransformFeedbackEXT(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer,
4594 uint32_t counterBufferCount, const VkBuffer *pCounterBuffers,
4595 const VkDeviceSize *pCounterBufferOffsets) {
4596 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4597
4598 cb_state->transform_feedback_active = false;
4599}
4600
Tony-LunarG977448c2019-12-02 14:52:02 -07004601void ValidationStateTracker::PreCallRecordCmdBeginRenderPass2(VkCommandBuffer commandBuffer,
4602 const VkRenderPassBeginInfo *pRenderPassBegin,
4603 const VkSubpassBeginInfoKHR *pSubpassBeginInfo) {
4604 RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, pSubpassBeginInfo->contents);
4605}
4606
locke-lunargd556cc32019-09-17 01:21:23 -06004607void ValidationStateTracker::RecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
4608 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4609 cb_state->activeSubpass++;
4610 cb_state->activeSubpassContents = contents;
4611}
4612
4613void ValidationStateTracker::PostCallRecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
4614 RecordCmdNextSubpass(commandBuffer, contents);
4615}
4616
4617void ValidationStateTracker::PostCallRecordCmdNextSubpass2KHR(VkCommandBuffer commandBuffer,
4618 const VkSubpassBeginInfoKHR *pSubpassBeginInfo,
4619 const VkSubpassEndInfoKHR *pSubpassEndInfo) {
4620 RecordCmdNextSubpass(commandBuffer, pSubpassBeginInfo->contents);
4621}
4622
Tony-LunarG977448c2019-12-02 14:52:02 -07004623void ValidationStateTracker::PostCallRecordCmdNextSubpass2(VkCommandBuffer commandBuffer,
4624 const VkSubpassBeginInfoKHR *pSubpassBeginInfo,
4625 const VkSubpassEndInfoKHR *pSubpassEndInfo) {
4626 RecordCmdNextSubpass(commandBuffer, pSubpassBeginInfo->contents);
4627}
4628
locke-lunargd556cc32019-09-17 01:21:23 -06004629void ValidationStateTracker::RecordCmdEndRenderPassState(VkCommandBuffer commandBuffer) {
4630 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4631 cb_state->activeRenderPass = nullptr;
4632 cb_state->activeSubpass = 0;
4633 cb_state->activeFramebuffer = VK_NULL_HANDLE;
Lionel Landwerlin484d10f2020-04-24 01:34:47 +03004634 cb_state->imagelessFramebufferAttachments.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06004635}
4636
4637void ValidationStateTracker::PostCallRecordCmdEndRenderPass(VkCommandBuffer commandBuffer) {
4638 RecordCmdEndRenderPassState(commandBuffer);
4639}
4640
4641void ValidationStateTracker::PostCallRecordCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer,
4642 const VkSubpassEndInfoKHR *pSubpassEndInfo) {
4643 RecordCmdEndRenderPassState(commandBuffer);
4644}
4645
Tony-LunarG977448c2019-12-02 14:52:02 -07004646void ValidationStateTracker::PostCallRecordCmdEndRenderPass2(VkCommandBuffer commandBuffer,
4647 const VkSubpassEndInfoKHR *pSubpassEndInfo) {
4648 RecordCmdEndRenderPassState(commandBuffer);
4649}
locke-lunargd556cc32019-09-17 01:21:23 -06004650void ValidationStateTracker::PreCallRecordCmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBuffersCount,
4651 const VkCommandBuffer *pCommandBuffers) {
4652 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
4653
4654 CMD_BUFFER_STATE *sub_cb_state = NULL;
4655 for (uint32_t i = 0; i < commandBuffersCount; i++) {
4656 sub_cb_state = GetCBState(pCommandBuffers[i]);
4657 assert(sub_cb_state);
4658 if (!(sub_cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT)) {
4659 if (cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT) {
4660 // TODO: Because this is a state change, clearing the VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT needs to be moved
4661 // from the validation step to the recording step
4662 cb_state->beginInfo.flags &= ~VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT;
4663 }
4664 }
4665
4666 // Propagate inital layout and current layout state to the primary cmd buffer
4667 // NOTE: The update/population of the image_layout_map is done in CoreChecks, but for other classes derived from
4668 // ValidationStateTracker these maps will be empty, so leaving the propagation in the the state tracker should be a no-op
4669 // for those other classes.
4670 for (const auto &sub_layout_map_entry : sub_cb_state->image_layout_map) {
4671 const auto image = sub_layout_map_entry.first;
4672 const auto *image_state = GetImageState(image);
4673 if (!image_state) continue; // Can't set layouts of a dead image
4674
4675 auto *cb_subres_map = GetImageSubresourceLayoutMap(cb_state, *image_state);
4676 const auto *sub_cb_subres_map = sub_layout_map_entry.second.get();
4677 assert(cb_subres_map && sub_cb_subres_map); // Non const get and map traversal should never be null
4678 cb_subres_map->UpdateFrom(*sub_cb_subres_map);
4679 }
4680
4681 sub_cb_state->primaryCommandBuffer = cb_state->commandBuffer;
4682 cb_state->linkedCommandBuffers.insert(sub_cb_state);
4683 sub_cb_state->linkedCommandBuffers.insert(cb_state);
4684 for (auto &function : sub_cb_state->queryUpdates) {
4685 cb_state->queryUpdates.push_back(function);
4686 }
4687 for (auto &function : sub_cb_state->queue_submit_functions) {
4688 cb_state->queue_submit_functions.push_back(function);
4689 }
4690 }
4691}
4692
4693void ValidationStateTracker::PostCallRecordMapMemory(VkDevice device, VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size,
4694 VkFlags flags, void **ppData, VkResult result) {
4695 if (VK_SUCCESS != result) return;
4696 RecordMappedMemory(mem, offset, size, ppData);
4697}
4698
4699void ValidationStateTracker::PreCallRecordUnmapMemory(VkDevice device, VkDeviceMemory mem) {
4700 auto mem_info = GetDevMemState(mem);
4701 if (mem_info) {
4702 mem_info->mapped_range = MemRange();
4703 mem_info->p_driver_data = nullptr;
4704 }
4705}
4706
4707void ValidationStateTracker::UpdateBindImageMemoryState(const VkBindImageMemoryInfo &bindInfo) {
4708 IMAGE_STATE *image_state = GetImageState(bindInfo.image);
4709 if (image_state) {
locke-lunargae26eac2020-04-16 15:29:05 -06004710 // An Android sepcial image cannot get VkSubresourceLayout until the image binds a memory.
4711 // See: VUID-vkGetImageSubresourceLayout-image-01895
4712 image_state->fragment_encoder =
4713 std::unique_ptr<const subresource_adapter::ImageRangeEncoder>(new subresource_adapter::ImageRangeEncoder(*image_state));
locke-lunargd556cc32019-09-17 01:21:23 -06004714 const auto swapchain_info = lvl_find_in_chain<VkBindImageMemorySwapchainInfoKHR>(bindInfo.pNext);
4715 if (swapchain_info) {
4716 auto swapchain = GetSwapchainState(swapchain_info->swapchain);
4717 if (swapchain) {
locke-lunargb3584732019-10-28 20:18:36 -06004718 swapchain->images[swapchain_info->imageIndex].bound_images.emplace(image_state->image);
locke-lunargd556cc32019-09-17 01:21:23 -06004719 image_state->bind_swapchain = swapchain_info->swapchain;
4720 image_state->bind_swapchain_imageIndex = swapchain_info->imageIndex;
4721 }
4722 } else {
4723 // Track bound memory range information
4724 auto mem_info = GetDevMemState(bindInfo.memory);
4725 if (mem_info) {
sfricke-samsungd7ea5de2020-04-08 09:19:18 -07004726 InsertImageMemoryRange(bindInfo.image, mem_info, bindInfo.memoryOffset);
locke-lunargd556cc32019-09-17 01:21:23 -06004727 }
4728
4729 // Track objects tied to memory
4730 SetMemBinding(bindInfo.memory, image_state, bindInfo.memoryOffset,
4731 VulkanTypedHandle(bindInfo.image, kVulkanObjectTypeImage));
4732 }
Tony-LunarG330cf4c2020-03-04 16:29:03 -07004733 if ((image_state->createInfo.flags & VK_IMAGE_CREATE_ALIAS_BIT) || swapchain_info) {
locke-lunargd556cc32019-09-17 01:21:23 -06004734 AddAliasingImage(image_state);
4735 }
4736 }
4737}
4738
4739void ValidationStateTracker::PostCallRecordBindImageMemory(VkDevice device, VkImage image, VkDeviceMemory mem,
4740 VkDeviceSize memoryOffset, VkResult result) {
4741 if (VK_SUCCESS != result) return;
4742 VkBindImageMemoryInfo bindInfo = {};
4743 bindInfo.sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
4744 bindInfo.image = image;
4745 bindInfo.memory = mem;
4746 bindInfo.memoryOffset = memoryOffset;
4747 UpdateBindImageMemoryState(bindInfo);
4748}
4749
4750void ValidationStateTracker::PostCallRecordBindImageMemory2(VkDevice device, uint32_t bindInfoCount,
4751 const VkBindImageMemoryInfoKHR *pBindInfos, VkResult result) {
4752 if (VK_SUCCESS != result) return;
4753 for (uint32_t i = 0; i < bindInfoCount; i++) {
4754 UpdateBindImageMemoryState(pBindInfos[i]);
4755 }
4756}
4757
4758void ValidationStateTracker::PostCallRecordBindImageMemory2KHR(VkDevice device, uint32_t bindInfoCount,
4759 const VkBindImageMemoryInfoKHR *pBindInfos, VkResult result) {
4760 if (VK_SUCCESS != result) return;
4761 for (uint32_t i = 0; i < bindInfoCount; i++) {
4762 UpdateBindImageMemoryState(pBindInfos[i]);
4763 }
4764}
4765
4766void ValidationStateTracker::PreCallRecordSetEvent(VkDevice device, VkEvent event) {
4767 auto event_state = GetEventState(event);
4768 if (event_state) {
4769 event_state->stageMask = VK_PIPELINE_STAGE_HOST_BIT;
4770 }
locke-lunargd556cc32019-09-17 01:21:23 -06004771}
4772
4773void ValidationStateTracker::PostCallRecordImportSemaphoreFdKHR(VkDevice device,
4774 const VkImportSemaphoreFdInfoKHR *pImportSemaphoreFdInfo,
4775 VkResult result) {
4776 if (VK_SUCCESS != result) return;
4777 RecordImportSemaphoreState(pImportSemaphoreFdInfo->semaphore, pImportSemaphoreFdInfo->handleType,
4778 pImportSemaphoreFdInfo->flags);
4779}
4780
4781void ValidationStateTracker::RecordGetExternalSemaphoreState(VkSemaphore semaphore,
4782 VkExternalSemaphoreHandleTypeFlagBitsKHR handle_type) {
4783 SEMAPHORE_STATE *semaphore_state = GetSemaphoreState(semaphore);
4784 if (semaphore_state && handle_type != VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR) {
4785 // Cannot track semaphore state once it is exported, except for Sync FD handle types which have copy transference
4786 semaphore_state->scope = kSyncScopeExternalPermanent;
4787 }
4788}
4789
4790#ifdef VK_USE_PLATFORM_WIN32_KHR
4791void ValidationStateTracker::PostCallRecordImportSemaphoreWin32HandleKHR(
4792 VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR *pImportSemaphoreWin32HandleInfo, VkResult result) {
4793 if (VK_SUCCESS != result) return;
4794 RecordImportSemaphoreState(pImportSemaphoreWin32HandleInfo->semaphore, pImportSemaphoreWin32HandleInfo->handleType,
4795 pImportSemaphoreWin32HandleInfo->flags);
4796}
4797
4798void ValidationStateTracker::PostCallRecordGetSemaphoreWin32HandleKHR(VkDevice device,
4799 const VkSemaphoreGetWin32HandleInfoKHR *pGetWin32HandleInfo,
4800 HANDLE *pHandle, VkResult result) {
4801 if (VK_SUCCESS != result) return;
4802 RecordGetExternalSemaphoreState(pGetWin32HandleInfo->semaphore, pGetWin32HandleInfo->handleType);
4803}
4804
4805void ValidationStateTracker::PostCallRecordImportFenceWin32HandleKHR(
4806 VkDevice device, const VkImportFenceWin32HandleInfoKHR *pImportFenceWin32HandleInfo, VkResult result) {
4807 if (VK_SUCCESS != result) return;
4808 RecordImportFenceState(pImportFenceWin32HandleInfo->fence, pImportFenceWin32HandleInfo->handleType,
4809 pImportFenceWin32HandleInfo->flags);
4810}
4811
4812void ValidationStateTracker::PostCallRecordGetFenceWin32HandleKHR(VkDevice device,
4813 const VkFenceGetWin32HandleInfoKHR *pGetWin32HandleInfo,
4814 HANDLE *pHandle, VkResult result) {
4815 if (VK_SUCCESS != result) return;
4816 RecordGetExternalFenceState(pGetWin32HandleInfo->fence, pGetWin32HandleInfo->handleType);
4817}
4818#endif
4819
4820void ValidationStateTracker::PostCallRecordGetSemaphoreFdKHR(VkDevice device, const VkSemaphoreGetFdInfoKHR *pGetFdInfo, int *pFd,
4821 VkResult result) {
4822 if (VK_SUCCESS != result) return;
4823 RecordGetExternalSemaphoreState(pGetFdInfo->semaphore, pGetFdInfo->handleType);
4824}
4825
4826void ValidationStateTracker::RecordImportFenceState(VkFence fence, VkExternalFenceHandleTypeFlagBitsKHR handle_type,
4827 VkFenceImportFlagsKHR flags) {
4828 FENCE_STATE *fence_node = GetFenceState(fence);
4829 if (fence_node && fence_node->scope != kSyncScopeExternalPermanent) {
4830 if ((handle_type == VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR || flags & VK_FENCE_IMPORT_TEMPORARY_BIT_KHR) &&
4831 fence_node->scope == kSyncScopeInternal) {
4832 fence_node->scope = kSyncScopeExternalTemporary;
4833 } else {
4834 fence_node->scope = kSyncScopeExternalPermanent;
4835 }
4836 }
4837}
4838
4839void ValidationStateTracker::PostCallRecordImportFenceFdKHR(VkDevice device, const VkImportFenceFdInfoKHR *pImportFenceFdInfo,
4840 VkResult result) {
4841 if (VK_SUCCESS != result) return;
4842 RecordImportFenceState(pImportFenceFdInfo->fence, pImportFenceFdInfo->handleType, pImportFenceFdInfo->flags);
4843}
4844
4845void ValidationStateTracker::RecordGetExternalFenceState(VkFence fence, VkExternalFenceHandleTypeFlagBitsKHR handle_type) {
4846 FENCE_STATE *fence_state = GetFenceState(fence);
4847 if (fence_state) {
4848 if (handle_type != VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR) {
4849 // Export with reference transference becomes external
4850 fence_state->scope = kSyncScopeExternalPermanent;
4851 } else if (fence_state->scope == kSyncScopeInternal) {
4852 // Export with copy transference has a side effect of resetting the fence
4853 fence_state->state = FENCE_UNSIGNALED;
4854 }
4855 }
4856}
4857
4858void ValidationStateTracker::PostCallRecordGetFenceFdKHR(VkDevice device, const VkFenceGetFdInfoKHR *pGetFdInfo, int *pFd,
4859 VkResult result) {
4860 if (VK_SUCCESS != result) return;
4861 RecordGetExternalFenceState(pGetFdInfo->fence, pGetFdInfo->handleType);
4862}
4863
4864void ValidationStateTracker::PostCallRecordCreateEvent(VkDevice device, const VkEventCreateInfo *pCreateInfo,
4865 const VkAllocationCallbacks *pAllocator, VkEvent *pEvent, VkResult result) {
4866 if (VK_SUCCESS != result) return;
4867 eventMap[*pEvent].write_in_use = 0;
4868 eventMap[*pEvent].stageMask = VkPipelineStageFlags(0);
4869}
4870
4871void ValidationStateTracker::RecordCreateSwapchainState(VkResult result, const VkSwapchainCreateInfoKHR *pCreateInfo,
4872 VkSwapchainKHR *pSwapchain, SURFACE_STATE *surface_state,
4873 SWAPCHAIN_NODE *old_swapchain_state) {
4874 if (VK_SUCCESS == result) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004875 auto swapchain_state = std::make_shared<SWAPCHAIN_NODE>(pCreateInfo, *pSwapchain);
locke-lunargd556cc32019-09-17 01:21:23 -06004876 if (VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR == pCreateInfo->presentMode ||
4877 VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR == pCreateInfo->presentMode) {
4878 swapchain_state->shared_presentable = true;
4879 }
4880 surface_state->swapchain = swapchain_state.get();
4881 swapchainMap[*pSwapchain] = std::move(swapchain_state);
4882 } else {
4883 surface_state->swapchain = nullptr;
4884 }
4885 // Spec requires that even if CreateSwapchainKHR fails, oldSwapchain is retired
4886 if (old_swapchain_state) {
4887 old_swapchain_state->retired = true;
4888 }
4889 return;
4890}
4891
4892void ValidationStateTracker::PostCallRecordCreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR *pCreateInfo,
4893 const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchain,
4894 VkResult result) {
4895 auto surface_state = GetSurfaceState(pCreateInfo->surface);
4896 auto old_swapchain_state = GetSwapchainState(pCreateInfo->oldSwapchain);
4897 RecordCreateSwapchainState(result, pCreateInfo, pSwapchain, surface_state, old_swapchain_state);
4898}
4899
4900void ValidationStateTracker::PreCallRecordDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain,
4901 const VkAllocationCallbacks *pAllocator) {
4902 if (!swapchain) return;
4903 auto swapchain_data = GetSwapchainState(swapchain);
4904 if (swapchain_data) {
4905 for (const auto &swapchain_image : swapchain_data->images) {
locke-lunargb3584732019-10-28 20:18:36 -06004906 ClearMemoryObjectBindings(VulkanTypedHandle(swapchain_image.image, kVulkanObjectTypeImage));
4907 imageMap.erase(swapchain_image.image);
4908 RemoveAliasingImages(swapchain_image.bound_images);
locke-lunargd556cc32019-09-17 01:21:23 -06004909 }
4910
4911 auto surface_state = GetSurfaceState(swapchain_data->createInfo.surface);
4912 if (surface_state) {
4913 if (surface_state->swapchain == swapchain_data) surface_state->swapchain = nullptr;
4914 }
Jeff Bolze7fc67b2019-10-04 12:29:31 -05004915 swapchain_data->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06004916 swapchainMap.erase(swapchain);
4917 }
4918}
4919
4920void ValidationStateTracker::PostCallRecordQueuePresentKHR(VkQueue queue, const VkPresentInfoKHR *pPresentInfo, VkResult result) {
4921 // Semaphore waits occur before error generation, if the call reached the ICD. (Confirm?)
4922 for (uint32_t i = 0; i < pPresentInfo->waitSemaphoreCount; ++i) {
4923 auto pSemaphore = GetSemaphoreState(pPresentInfo->pWaitSemaphores[i]);
4924 if (pSemaphore) {
4925 pSemaphore->signaler.first = VK_NULL_HANDLE;
4926 pSemaphore->signaled = false;
4927 }
4928 }
4929
4930 for (uint32_t i = 0; i < pPresentInfo->swapchainCount; ++i) {
4931 // Note: this is imperfect, in that we can get confused about what did or didn't succeed-- but if the app does that, it's
4932 // confused itself just as much.
4933 auto local_result = pPresentInfo->pResults ? pPresentInfo->pResults[i] : result;
4934 if (local_result != VK_SUCCESS && local_result != VK_SUBOPTIMAL_KHR) continue; // this present didn't actually happen.
4935 // Mark the image as having been released to the WSI
4936 auto swapchain_data = GetSwapchainState(pPresentInfo->pSwapchains[i]);
4937 if (swapchain_data && (swapchain_data->images.size() > pPresentInfo->pImageIndices[i])) {
locke-lunargb3584732019-10-28 20:18:36 -06004938 auto image = swapchain_data->images[pPresentInfo->pImageIndices[i]].image;
locke-lunargd556cc32019-09-17 01:21:23 -06004939 auto image_state = GetImageState(image);
4940 if (image_state) {
4941 image_state->acquired = false;
Jeff Bolz46c0ea02019-10-09 13:06:29 -05004942 if (image_state->shared_presentable) {
4943 image_state->layout_locked = true;
4944 }
locke-lunargd556cc32019-09-17 01:21:23 -06004945 }
4946 }
4947 }
4948 // Note: even though presentation is directed to a queue, there is no direct ordering between QP and subsequent work, so QP (and
4949 // its semaphore waits) /never/ participate in any completion proof.
4950}
4951
4952void ValidationStateTracker::PostCallRecordCreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount,
4953 const VkSwapchainCreateInfoKHR *pCreateInfos,
4954 const VkAllocationCallbacks *pAllocator,
4955 VkSwapchainKHR *pSwapchains, VkResult result) {
4956 if (pCreateInfos) {
4957 for (uint32_t i = 0; i < swapchainCount; i++) {
4958 auto surface_state = GetSurfaceState(pCreateInfos[i].surface);
4959 auto old_swapchain_state = GetSwapchainState(pCreateInfos[i].oldSwapchain);
4960 RecordCreateSwapchainState(result, &pCreateInfos[i], &pSwapchains[i], surface_state, old_swapchain_state);
4961 }
4962 }
4963}
4964
4965void ValidationStateTracker::RecordAcquireNextImageState(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
4966 VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex) {
4967 auto pFence = GetFenceState(fence);
4968 if (pFence && pFence->scope == kSyncScopeInternal) {
4969 // Treat as inflight since it is valid to wait on this fence, even in cases where it is technically a temporary
4970 // import
4971 pFence->state = FENCE_INFLIGHT;
4972 pFence->signaler.first = VK_NULL_HANDLE; // ANI isn't on a queue, so this can't participate in a completion proof.
4973 }
4974
4975 auto pSemaphore = GetSemaphoreState(semaphore);
4976 if (pSemaphore && pSemaphore->scope == kSyncScopeInternal) {
4977 // Treat as signaled since it is valid to wait on this semaphore, even in cases where it is technically a
4978 // temporary import
4979 pSemaphore->signaled = true;
4980 pSemaphore->signaler.first = VK_NULL_HANDLE;
4981 }
4982
4983 // Mark the image as acquired.
4984 auto swapchain_data = GetSwapchainState(swapchain);
4985 if (swapchain_data && (swapchain_data->images.size() > *pImageIndex)) {
locke-lunargb3584732019-10-28 20:18:36 -06004986 auto image = swapchain_data->images[*pImageIndex].image;
locke-lunargd556cc32019-09-17 01:21:23 -06004987 auto image_state = GetImageState(image);
4988 if (image_state) {
4989 image_state->acquired = true;
4990 image_state->shared_presentable = swapchain_data->shared_presentable;
4991 }
4992 }
4993}
4994
4995void ValidationStateTracker::PostCallRecordAcquireNextImageKHR(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
4996 VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex,
4997 VkResult result) {
4998 if ((VK_SUCCESS != result) && (VK_SUBOPTIMAL_KHR != result)) return;
4999 RecordAcquireNextImageState(device, swapchain, timeout, semaphore, fence, pImageIndex);
5000}
5001
5002void ValidationStateTracker::PostCallRecordAcquireNextImage2KHR(VkDevice device, const VkAcquireNextImageInfoKHR *pAcquireInfo,
5003 uint32_t *pImageIndex, VkResult result) {
5004 if ((VK_SUCCESS != result) && (VK_SUBOPTIMAL_KHR != result)) return;
5005 RecordAcquireNextImageState(device, pAcquireInfo->swapchain, pAcquireInfo->timeout, pAcquireInfo->semaphore,
5006 pAcquireInfo->fence, pImageIndex);
5007}
5008
5009void ValidationStateTracker::PostCallRecordEnumeratePhysicalDevices(VkInstance instance, uint32_t *pPhysicalDeviceCount,
5010 VkPhysicalDevice *pPhysicalDevices, VkResult result) {
5011 if ((NULL != pPhysicalDevices) && ((result == VK_SUCCESS || result == VK_INCOMPLETE))) {
5012 for (uint32_t i = 0; i < *pPhysicalDeviceCount; i++) {
5013 auto &phys_device_state = physical_device_map[pPhysicalDevices[i]];
5014 phys_device_state.phys_device = pPhysicalDevices[i];
5015 // Init actual features for each physical device
5016 DispatchGetPhysicalDeviceFeatures(pPhysicalDevices[i], &phys_device_state.features2.features);
5017 }
5018 }
5019}
5020
5021// Common function to update state for GetPhysicalDeviceQueueFamilyProperties & 2KHR version
5022static void StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(PHYSICAL_DEVICE_STATE *pd_state, uint32_t count,
5023 VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
5024 pd_state->queue_family_known_count = std::max(pd_state->queue_family_known_count, count);
5025
Nathaniel Cesario24184fe2020-10-06 12:46:12 -06005026 if (pQueueFamilyProperties) { // Save queue family properties
locke-lunargd556cc32019-09-17 01:21:23 -06005027 pd_state->queue_family_properties.resize(std::max(static_cast<uint32_t>(pd_state->queue_family_properties.size()), count));
5028 for (uint32_t i = 0; i < count; ++i) {
5029 pd_state->queue_family_properties[i] = pQueueFamilyProperties[i].queueFamilyProperties;
5030 }
5031 }
5032}
5033
5034void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice,
5035 uint32_t *pQueueFamilyPropertyCount,
5036 VkQueueFamilyProperties *pQueueFamilyProperties) {
5037 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
5038 assert(physical_device_state);
5039 VkQueueFamilyProperties2KHR *pqfp = nullptr;
5040 std::vector<VkQueueFamilyProperties2KHR> qfp;
5041 qfp.resize(*pQueueFamilyPropertyCount);
5042 if (pQueueFamilyProperties) {
5043 for (uint32_t i = 0; i < *pQueueFamilyPropertyCount; ++i) {
5044 qfp[i].sType = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2_KHR;
5045 qfp[i].pNext = nullptr;
5046 qfp[i].queueFamilyProperties = pQueueFamilyProperties[i];
5047 }
5048 pqfp = qfp.data();
5049 }
5050 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount, pqfp);
5051}
5052
5053void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties2(
5054 VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
5055 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
5056 assert(physical_device_state);
5057 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount,
5058 pQueueFamilyProperties);
5059}
5060
5061void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties2KHR(
5062 VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
5063 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
5064 assert(physical_device_state);
5065 StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount,
5066 pQueueFamilyProperties);
5067}
5068void ValidationStateTracker::PreCallRecordDestroySurfaceKHR(VkInstance instance, VkSurfaceKHR surface,
5069 const VkAllocationCallbacks *pAllocator) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05005070 if (!surface) return;
5071 auto surface_state = GetSurfaceState(surface);
5072 surface_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06005073 surface_map.erase(surface);
5074}
5075
5076void ValidationStateTracker::RecordVulkanSurface(VkSurfaceKHR *pSurface) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05005077 surface_map[*pSurface] = std::make_shared<SURFACE_STATE>(*pSurface);
locke-lunargd556cc32019-09-17 01:21:23 -06005078}
5079
5080void ValidationStateTracker::PostCallRecordCreateDisplayPlaneSurfaceKHR(VkInstance instance,
5081 const VkDisplaySurfaceCreateInfoKHR *pCreateInfo,
5082 const VkAllocationCallbacks *pAllocator,
5083 VkSurfaceKHR *pSurface, VkResult result) {
5084 if (VK_SUCCESS != result) return;
5085 RecordVulkanSurface(pSurface);
5086}
5087
5088#ifdef VK_USE_PLATFORM_ANDROID_KHR
5089void ValidationStateTracker::PostCallRecordCreateAndroidSurfaceKHR(VkInstance instance,
5090 const VkAndroidSurfaceCreateInfoKHR *pCreateInfo,
5091 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
5092 VkResult result) {
5093 if (VK_SUCCESS != result) return;
5094 RecordVulkanSurface(pSurface);
5095}
5096#endif // VK_USE_PLATFORM_ANDROID_KHR
5097
5098#ifdef VK_USE_PLATFORM_IOS_MVK
5099void ValidationStateTracker::PostCallRecordCreateIOSSurfaceMVK(VkInstance instance, const VkIOSSurfaceCreateInfoMVK *pCreateInfo,
5100 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
5101 VkResult result) {
5102 if (VK_SUCCESS != result) return;
5103 RecordVulkanSurface(pSurface);
5104}
5105#endif // VK_USE_PLATFORM_IOS_MVK
5106
5107#ifdef VK_USE_PLATFORM_MACOS_MVK
5108void ValidationStateTracker::PostCallRecordCreateMacOSSurfaceMVK(VkInstance instance,
5109 const VkMacOSSurfaceCreateInfoMVK *pCreateInfo,
5110 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
5111 VkResult result) {
5112 if (VK_SUCCESS != result) return;
5113 RecordVulkanSurface(pSurface);
5114}
5115#endif // VK_USE_PLATFORM_MACOS_MVK
5116
Jeremy Kniagerf33a67c2019-12-09 09:44:39 -07005117#ifdef VK_USE_PLATFORM_METAL_EXT
5118void ValidationStateTracker::PostCallRecordCreateMetalSurfaceEXT(VkInstance instance,
5119 const VkMetalSurfaceCreateInfoEXT *pCreateInfo,
5120 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
5121 VkResult result) {
5122 if (VK_SUCCESS != result) return;
5123 RecordVulkanSurface(pSurface);
5124}
5125#endif // VK_USE_PLATFORM_METAL_EXT
5126
locke-lunargd556cc32019-09-17 01:21:23 -06005127#ifdef VK_USE_PLATFORM_WAYLAND_KHR
5128void ValidationStateTracker::PostCallRecordCreateWaylandSurfaceKHR(VkInstance instance,
5129 const VkWaylandSurfaceCreateInfoKHR *pCreateInfo,
5130 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
5131 VkResult result) {
5132 if (VK_SUCCESS != result) return;
5133 RecordVulkanSurface(pSurface);
5134}
5135#endif // VK_USE_PLATFORM_WAYLAND_KHR
5136
5137#ifdef VK_USE_PLATFORM_WIN32_KHR
5138void ValidationStateTracker::PostCallRecordCreateWin32SurfaceKHR(VkInstance instance,
5139 const VkWin32SurfaceCreateInfoKHR *pCreateInfo,
5140 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
5141 VkResult result) {
5142 if (VK_SUCCESS != result) return;
5143 RecordVulkanSurface(pSurface);
5144}
5145#endif // VK_USE_PLATFORM_WIN32_KHR
5146
5147#ifdef VK_USE_PLATFORM_XCB_KHR
5148void ValidationStateTracker::PostCallRecordCreateXcbSurfaceKHR(VkInstance instance, const VkXcbSurfaceCreateInfoKHR *pCreateInfo,
5149 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
5150 VkResult result) {
5151 if (VK_SUCCESS != result) return;
5152 RecordVulkanSurface(pSurface);
5153}
5154#endif // VK_USE_PLATFORM_XCB_KHR
5155
5156#ifdef VK_USE_PLATFORM_XLIB_KHR
5157void ValidationStateTracker::PostCallRecordCreateXlibSurfaceKHR(VkInstance instance, const VkXlibSurfaceCreateInfoKHR *pCreateInfo,
5158 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
5159 VkResult result) {
5160 if (VK_SUCCESS != result) return;
5161 RecordVulkanSurface(pSurface);
5162}
5163#endif // VK_USE_PLATFORM_XLIB_KHR
5164
Niklas Haas8b84af12020-04-19 22:20:11 +02005165void ValidationStateTracker::PostCallRecordCreateHeadlessSurfaceEXT(VkInstance instance,
5166 const VkHeadlessSurfaceCreateInfoEXT *pCreateInfo,
5167 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
5168 VkResult result) {
5169 if (VK_SUCCESS != result) return;
5170 RecordVulkanSurface(pSurface);
5171}
5172
Cort23cf2282019-09-20 18:58:18 +02005173void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02005174 VkPhysicalDeviceFeatures *pFeatures) {
5175 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
Yilong Li358152a2020-07-08 02:16:45 -07005176 // Reset the features2 safe struct before setting up the features field.
5177 physical_device_state->features2 = safe_VkPhysicalDeviceFeatures2();
Cortffba2642019-09-20 22:09:41 +02005178 physical_device_state->features2.features = *pFeatures;
Cort23cf2282019-09-20 18:58:18 +02005179}
5180
5181void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures2(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02005182 VkPhysicalDeviceFeatures2 *pFeatures) {
5183 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
Cortffba2642019-09-20 22:09:41 +02005184 physical_device_state->features2.initialize(pFeatures);
Cort23cf2282019-09-20 18:58:18 +02005185}
5186
5187void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures2KHR(VkPhysicalDevice physicalDevice,
Cortffba2642019-09-20 22:09:41 +02005188 VkPhysicalDeviceFeatures2 *pFeatures) {
5189 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
Cortffba2642019-09-20 22:09:41 +02005190 physical_device_state->features2.initialize(pFeatures);
Cort23cf2282019-09-20 18:58:18 +02005191}
5192
locke-lunargd556cc32019-09-17 01:21:23 -06005193void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice physicalDevice,
5194 VkSurfaceKHR surface,
5195 VkSurfaceCapabilitiesKHR *pSurfaceCapabilities,
5196 VkResult result) {
5197 if (VK_SUCCESS != result) return;
5198 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
locke-lunargd556cc32019-09-17 01:21:23 -06005199 physical_device_state->surfaceCapabilities = *pSurfaceCapabilities;
Nathaniel Cesario24184fe2020-10-06 12:46:12 -06005200
5201 // TODO May make sense to move this to BestPractices, but needs further refactoring in CoreChecks first
5202 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06005203}
5204
5205void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilities2KHR(
5206 VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
5207 VkSurfaceCapabilities2KHR *pSurfaceCapabilities, VkResult result) {
5208 if (VK_SUCCESS != result) return;
5209 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
locke-lunargd556cc32019-09-17 01:21:23 -06005210 physical_device_state->surfaceCapabilities = pSurfaceCapabilities->surfaceCapabilities;
Nathaniel Cesario24184fe2020-10-06 12:46:12 -06005211
5212 // TODO May make sense to move this to BestPractices, but needs further refactoring in CoreChecks first
5213 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06005214}
5215
5216void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilities2EXT(VkPhysicalDevice physicalDevice,
5217 VkSurfaceKHR surface,
5218 VkSurfaceCapabilities2EXT *pSurfaceCapabilities,
5219 VkResult result) {
5220 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
locke-lunargd556cc32019-09-17 01:21:23 -06005221 physical_device_state->surfaceCapabilities.minImageCount = pSurfaceCapabilities->minImageCount;
5222 physical_device_state->surfaceCapabilities.maxImageCount = pSurfaceCapabilities->maxImageCount;
5223 physical_device_state->surfaceCapabilities.currentExtent = pSurfaceCapabilities->currentExtent;
5224 physical_device_state->surfaceCapabilities.minImageExtent = pSurfaceCapabilities->minImageExtent;
5225 physical_device_state->surfaceCapabilities.maxImageExtent = pSurfaceCapabilities->maxImageExtent;
5226 physical_device_state->surfaceCapabilities.maxImageArrayLayers = pSurfaceCapabilities->maxImageArrayLayers;
5227 physical_device_state->surfaceCapabilities.supportedTransforms = pSurfaceCapabilities->supportedTransforms;
5228 physical_device_state->surfaceCapabilities.currentTransform = pSurfaceCapabilities->currentTransform;
5229 physical_device_state->surfaceCapabilities.supportedCompositeAlpha = pSurfaceCapabilities->supportedCompositeAlpha;
5230 physical_device_state->surfaceCapabilities.supportedUsageFlags = pSurfaceCapabilities->supportedUsageFlags;
Nathaniel Cesario24184fe2020-10-06 12:46:12 -06005231
5232 // TODO May make sense to move this to BestPractices, but needs further refactoring in CoreChecks first
5233 physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06005234}
5235
5236void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice,
5237 uint32_t queueFamilyIndex, VkSurfaceKHR surface,
5238 VkBool32 *pSupported, VkResult result) {
5239 if (VK_SUCCESS != result) return;
5240 auto surface_state = GetSurfaceState(surface);
5241 surface_state->gpu_queue_support[{physicalDevice, queueFamilyIndex}] = (*pSupported == VK_TRUE);
5242}
5243
5244void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice physicalDevice,
5245 VkSurfaceKHR surface,
5246 uint32_t *pPresentModeCount,
5247 VkPresentModeKHR *pPresentModes,
5248 VkResult result) {
5249 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
5250
5251 // TODO: This isn't quite right -- available modes may differ by surface AND physical device.
5252 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
locke-lunargd556cc32019-09-17 01:21:23 -06005253
5254 if (*pPresentModeCount) {
locke-lunargd556cc32019-09-17 01:21:23 -06005255 if (*pPresentModeCount > physical_device_state->present_modes.size())
5256 physical_device_state->present_modes.resize(*pPresentModeCount);
5257 }
5258 if (pPresentModes) {
locke-lunargd556cc32019-09-17 01:21:23 -06005259 for (uint32_t i = 0; i < *pPresentModeCount; i++) {
5260 physical_device_state->present_modes[i] = pPresentModes[i];
5261 }
5262 }
5263}
5264
5265void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
5266 uint32_t *pSurfaceFormatCount,
5267 VkSurfaceFormatKHR *pSurfaceFormats,
5268 VkResult result) {
5269 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
5270
5271 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
locke-lunargd556cc32019-09-17 01:21:23 -06005272
5273 if (*pSurfaceFormatCount) {
locke-lunargd556cc32019-09-17 01:21:23 -06005274 if (*pSurfaceFormatCount > physical_device_state->surface_formats.size())
5275 physical_device_state->surface_formats.resize(*pSurfaceFormatCount);
5276 }
5277 if (pSurfaceFormats) {
locke-lunargd556cc32019-09-17 01:21:23 -06005278 for (uint32_t i = 0; i < *pSurfaceFormatCount; i++) {
5279 physical_device_state->surface_formats[i] = pSurfaceFormats[i];
5280 }
5281 }
5282}
5283
5284void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceFormats2KHR(VkPhysicalDevice physicalDevice,
5285 const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
5286 uint32_t *pSurfaceFormatCount,
5287 VkSurfaceFormat2KHR *pSurfaceFormats,
5288 VkResult result) {
5289 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
5290
5291 auto physicalDeviceState = GetPhysicalDeviceState(physicalDevice);
5292 if (*pSurfaceFormatCount) {
locke-lunargd556cc32019-09-17 01:21:23 -06005293 if (*pSurfaceFormatCount > physicalDeviceState->surface_formats.size())
5294 physicalDeviceState->surface_formats.resize(*pSurfaceFormatCount);
5295 }
5296 if (pSurfaceFormats) {
locke-lunargd556cc32019-09-17 01:21:23 -06005297 for (uint32_t i = 0; i < *pSurfaceFormatCount; i++) {
5298 physicalDeviceState->surface_formats[i] = pSurfaceFormats[i].surfaceFormat;
5299 }
5300 }
5301}
5302
5303void ValidationStateTracker::PreCallRecordCmdBeginDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,
5304 const VkDebugUtilsLabelEXT *pLabelInfo) {
5305 BeginCmdDebugUtilsLabel(report_data, commandBuffer, pLabelInfo);
5306}
5307
5308void ValidationStateTracker::PostCallRecordCmdEndDebugUtilsLabelEXT(VkCommandBuffer commandBuffer) {
5309 EndCmdDebugUtilsLabel(report_data, commandBuffer);
5310}
5311
5312void ValidationStateTracker::PreCallRecordCmdInsertDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,
5313 const VkDebugUtilsLabelEXT *pLabelInfo) {
5314 InsertCmdDebugUtilsLabel(report_data, commandBuffer, pLabelInfo);
5315
5316 // Squirrel away an easily accessible copy.
5317 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5318 cb_state->debug_label = LoggingLabel(pLabelInfo);
5319}
5320
5321void ValidationStateTracker::RecordEnumeratePhysicalDeviceGroupsState(
5322 uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHR *pPhysicalDeviceGroupProperties) {
5323 if (NULL != pPhysicalDeviceGroupProperties) {
5324 for (uint32_t i = 0; i < *pPhysicalDeviceGroupCount; i++) {
5325 for (uint32_t j = 0; j < pPhysicalDeviceGroupProperties[i].physicalDeviceCount; j++) {
5326 VkPhysicalDevice cur_phys_dev = pPhysicalDeviceGroupProperties[i].physicalDevices[j];
5327 auto &phys_device_state = physical_device_map[cur_phys_dev];
5328 phys_device_state.phys_device = cur_phys_dev;
5329 // Init actual features for each physical device
5330 DispatchGetPhysicalDeviceFeatures(cur_phys_dev, &phys_device_state.features2.features);
5331 }
5332 }
5333 }
5334}
5335
5336void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceGroups(
5337 VkInstance instance, uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHR *pPhysicalDeviceGroupProperties,
5338 VkResult result) {
5339 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
5340 RecordEnumeratePhysicalDeviceGroupsState(pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
5341}
5342
5343void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceGroupsKHR(
5344 VkInstance instance, uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHR *pPhysicalDeviceGroupProperties,
5345 VkResult result) {
5346 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
5347 RecordEnumeratePhysicalDeviceGroupsState(pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
5348}
5349
Lionel Landwerlinc7420912019-05-23 00:33:42 +01005350void ValidationStateTracker::RecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCounters(VkPhysicalDevice physicalDevice,
5351 uint32_t queueFamilyIndex,
5352 uint32_t *pCounterCount,
5353 VkPerformanceCounterKHR *pCounters) {
5354 if (NULL == pCounters) return;
5355
5356 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
5357 assert(physical_device_state);
5358
5359 std::unique_ptr<QUEUE_FAMILY_PERF_COUNTERS> queueFamilyCounters(new QUEUE_FAMILY_PERF_COUNTERS());
5360 queueFamilyCounters->counters.resize(*pCounterCount);
5361 for (uint32_t i = 0; i < *pCounterCount; i++) queueFamilyCounters->counters[i] = pCounters[i];
5362
5363 physical_device_state->perf_counters[queueFamilyIndex] = std::move(queueFamilyCounters);
5364}
5365
5366void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
5367 VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, uint32_t *pCounterCount, VkPerformanceCounterKHR *pCounters,
5368 VkPerformanceCounterDescriptionKHR *pCounterDescriptions, VkResult result) {
5369 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
5370 RecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCounters(physicalDevice, queueFamilyIndex, pCounterCount, pCounters);
5371}
5372
5373void ValidationStateTracker::PostCallRecordAcquireProfilingLockKHR(VkDevice device, const VkAcquireProfilingLockInfoKHR *pInfo,
5374 VkResult result) {
5375 if (result == VK_SUCCESS) performance_lock_acquired = true;
5376}
5377
Lionel Landwerlinc7420912019-05-23 00:33:42 +01005378void ValidationStateTracker::PostCallRecordReleaseProfilingLockKHR(VkDevice device) {
5379 performance_lock_acquired = false;
5380 for (auto &cmd_buffer : commandBufferMap) {
5381 cmd_buffer.second->performance_lock_released = true;
5382 }
5383}
5384
locke-lunargd556cc32019-09-17 01:21:23 -06005385void ValidationStateTracker::PreCallRecordDestroyDescriptorUpdateTemplate(VkDevice device,
5386 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
5387 const VkAllocationCallbacks *pAllocator) {
5388 if (!descriptorUpdateTemplate) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05005389 auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
5390 template_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06005391 desc_template_map.erase(descriptorUpdateTemplate);
5392}
5393
5394void ValidationStateTracker::PreCallRecordDestroyDescriptorUpdateTemplateKHR(VkDevice device,
5395 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
5396 const VkAllocationCallbacks *pAllocator) {
5397 if (!descriptorUpdateTemplate) return;
Jeff Bolze7fc67b2019-10-04 12:29:31 -05005398 auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
5399 template_state->destroyed = true;
locke-lunargd556cc32019-09-17 01:21:23 -06005400 desc_template_map.erase(descriptorUpdateTemplate);
5401}
5402
5403void ValidationStateTracker::RecordCreateDescriptorUpdateTemplateState(const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo,
5404 VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate) {
5405 safe_VkDescriptorUpdateTemplateCreateInfo local_create_info(pCreateInfo);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05005406 auto template_state = std::make_shared<TEMPLATE_STATE>(*pDescriptorUpdateTemplate, &local_create_info);
locke-lunargd556cc32019-09-17 01:21:23 -06005407 desc_template_map[*pDescriptorUpdateTemplate] = std::move(template_state);
5408}
5409
5410void ValidationStateTracker::PostCallRecordCreateDescriptorUpdateTemplate(
5411 VkDevice device, const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator,
5412 VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate, VkResult result) {
5413 if (VK_SUCCESS != result) return;
5414 RecordCreateDescriptorUpdateTemplateState(pCreateInfo, pDescriptorUpdateTemplate);
5415}
5416
5417void ValidationStateTracker::PostCallRecordCreateDescriptorUpdateTemplateKHR(
5418 VkDevice device, const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator,
5419 VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate, VkResult result) {
5420 if (VK_SUCCESS != result) return;
5421 RecordCreateDescriptorUpdateTemplateState(pCreateInfo, pDescriptorUpdateTemplate);
5422}
5423
5424void ValidationStateTracker::RecordUpdateDescriptorSetWithTemplateState(VkDescriptorSet descriptorSet,
5425 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
5426 const void *pData) {
5427 auto const template_map_entry = desc_template_map.find(descriptorUpdateTemplate);
5428 if ((template_map_entry == desc_template_map.end()) || (template_map_entry->second.get() == nullptr)) {
5429 assert(0);
5430 } else {
5431 const TEMPLATE_STATE *template_state = template_map_entry->second.get();
5432 // TODO: Record template push descriptor updates
5433 if (template_state->create_info.templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET) {
5434 PerformUpdateDescriptorSetsWithTemplateKHR(descriptorSet, template_state, pData);
5435 }
5436 }
5437}
5438
5439void ValidationStateTracker::PreCallRecordUpdateDescriptorSetWithTemplate(VkDevice device, VkDescriptorSet descriptorSet,
5440 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
5441 const void *pData) {
5442 RecordUpdateDescriptorSetWithTemplateState(descriptorSet, descriptorUpdateTemplate, pData);
5443}
5444
5445void ValidationStateTracker::PreCallRecordUpdateDescriptorSetWithTemplateKHR(VkDevice device, VkDescriptorSet descriptorSet,
5446 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
5447 const void *pData) {
5448 RecordUpdateDescriptorSetWithTemplateState(descriptorSet, descriptorUpdateTemplate, pData);
5449}
5450
5451void ValidationStateTracker::PreCallRecordCmdPushDescriptorSetWithTemplateKHR(
5452 VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set,
5453 const void *pData) {
5454 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5455
5456 const auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
5457 if (template_state) {
5458 auto layout_data = GetPipelineLayout(layout);
5459 auto dsl = GetDslFromPipelineLayout(layout_data, set);
5460 const auto &template_ci = template_state->create_info;
Jeff Bolz6ae39612019-10-11 20:57:36 -05005461 if (dsl && !dsl->destroyed) {
locke-lunargd556cc32019-09-17 01:21:23 -06005462 // Decode the template into a set of write updates
5463 cvdescriptorset::DecodedTemplateUpdate decoded_template(this, VK_NULL_HANDLE, template_state, pData,
5464 dsl->GetDescriptorSetLayout());
5465 RecordCmdPushDescriptorSetState(cb_state, template_ci.pipelineBindPoint, layout, set,
5466 static_cast<uint32_t>(decoded_template.desc_writes.size()),
5467 decoded_template.desc_writes.data());
5468 }
5469 }
5470}
5471
5472void ValidationStateTracker::RecordGetPhysicalDeviceDisplayPlanePropertiesState(VkPhysicalDevice physicalDevice,
5473 uint32_t *pPropertyCount, void *pProperties) {
5474 auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
5475 if (*pPropertyCount) {
locke-lunargd556cc32019-09-17 01:21:23 -06005476 physical_device_state->display_plane_property_count = *pPropertyCount;
5477 }
Nathaniel Cesario24184fe2020-10-06 12:46:12 -06005478 if (*pPropertyCount || pProperties) {
5479 physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHR_called = true;
locke-lunargd556cc32019-09-17 01:21:23 -06005480 }
5481}
5482
5483void ValidationStateTracker::PostCallRecordGetPhysicalDeviceDisplayPlanePropertiesKHR(VkPhysicalDevice physicalDevice,
5484 uint32_t *pPropertyCount,
5485 VkDisplayPlanePropertiesKHR *pProperties,
5486 VkResult result) {
5487 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
5488 RecordGetPhysicalDeviceDisplayPlanePropertiesState(physicalDevice, pPropertyCount, pProperties);
5489}
5490
5491void ValidationStateTracker::PostCallRecordGetPhysicalDeviceDisplayPlaneProperties2KHR(VkPhysicalDevice physicalDevice,
5492 uint32_t *pPropertyCount,
5493 VkDisplayPlaneProperties2KHR *pProperties,
5494 VkResult result) {
5495 if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
5496 RecordGetPhysicalDeviceDisplayPlanePropertiesState(physicalDevice, pPropertyCount, pProperties);
5497}
5498
5499void ValidationStateTracker::PostCallRecordCmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
5500 uint32_t query, VkQueryControlFlags flags, uint32_t index) {
5501 QueryObject query_obj = {queryPool, query, index};
5502 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5503 RecordCmdBeginQuery(cb_state, query_obj);
5504}
5505
5506void ValidationStateTracker::PostCallRecordCmdEndQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
5507 uint32_t query, uint32_t index) {
5508 QueryObject query_obj = {queryPool, query, index};
5509 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5510 RecordCmdEndQuery(cb_state, query_obj);
5511}
5512
5513void ValidationStateTracker::RecordCreateSamplerYcbcrConversionState(const VkSamplerYcbcrConversionCreateInfo *create_info,
5514 VkSamplerYcbcrConversion ycbcr_conversion) {
sfricke-samsungbe3584f2020-04-22 14:58:06 -07005515 auto ycbcr_state = std::make_shared<SAMPLER_YCBCR_CONVERSION_STATE>();
5516
locke-lunargd556cc32019-09-17 01:21:23 -06005517 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
sfricke-samsungbe3584f2020-04-22 14:58:06 -07005518 RecordCreateSamplerYcbcrConversionANDROID(create_info, ycbcr_conversion, ycbcr_state.get());
locke-lunargd556cc32019-09-17 01:21:23 -06005519 }
sfricke-samsungbe3584f2020-04-22 14:58:06 -07005520
5521 const VkFormat conversion_format = create_info->format;
5522
5523 if (conversion_format != VK_FORMAT_UNDEFINED) {
5524 // If format is VK_FORMAT_UNDEFINED, will be set by external AHB features
5525 ycbcr_state->format_features = GetPotentialFormatFeatures(conversion_format);
5526 }
5527
5528 ycbcr_state->chromaFilter = create_info->chromaFilter;
5529 ycbcr_state->format = conversion_format;
5530 samplerYcbcrConversionMap[ycbcr_conversion] = std::move(ycbcr_state);
locke-lunargd556cc32019-09-17 01:21:23 -06005531}
5532
5533void ValidationStateTracker::PostCallRecordCreateSamplerYcbcrConversion(VkDevice device,
5534 const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
5535 const VkAllocationCallbacks *pAllocator,
5536 VkSamplerYcbcrConversion *pYcbcrConversion,
5537 VkResult result) {
5538 if (VK_SUCCESS != result) return;
5539 RecordCreateSamplerYcbcrConversionState(pCreateInfo, *pYcbcrConversion);
5540}
5541
5542void ValidationStateTracker::PostCallRecordCreateSamplerYcbcrConversionKHR(VkDevice device,
5543 const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
5544 const VkAllocationCallbacks *pAllocator,
5545 VkSamplerYcbcrConversion *pYcbcrConversion,
5546 VkResult result) {
5547 if (VK_SUCCESS != result) return;
5548 RecordCreateSamplerYcbcrConversionState(pCreateInfo, *pYcbcrConversion);
5549}
5550
sfricke-samsungbe3584f2020-04-22 14:58:06 -07005551void ValidationStateTracker::RecordDestroySamplerYcbcrConversionState(VkSamplerYcbcrConversion ycbcr_conversion) {
5552 if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
5553 RecordDestroySamplerYcbcrConversionANDROID(ycbcr_conversion);
5554 }
5555
5556 auto ycbcr_state = GetSamplerYcbcrConversionState(ycbcr_conversion);
5557 ycbcr_state->destroyed = true;
5558 samplerYcbcrConversionMap.erase(ycbcr_conversion);
5559}
5560
locke-lunargd556cc32019-09-17 01:21:23 -06005561void ValidationStateTracker::PostCallRecordDestroySamplerYcbcrConversion(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion,
5562 const VkAllocationCallbacks *pAllocator) {
5563 if (!ycbcrConversion) return;
sfricke-samsungbe3584f2020-04-22 14:58:06 -07005564 RecordDestroySamplerYcbcrConversionState(ycbcrConversion);
locke-lunargd556cc32019-09-17 01:21:23 -06005565}
5566
5567void ValidationStateTracker::PostCallRecordDestroySamplerYcbcrConversionKHR(VkDevice device,
5568 VkSamplerYcbcrConversion ycbcrConversion,
5569 const VkAllocationCallbacks *pAllocator) {
5570 if (!ycbcrConversion) return;
sfricke-samsungbe3584f2020-04-22 14:58:06 -07005571 RecordDestroySamplerYcbcrConversionState(ycbcrConversion);
locke-lunargd556cc32019-09-17 01:21:23 -06005572}
5573
Tony-LunarG977448c2019-12-02 14:52:02 -07005574void ValidationStateTracker::RecordResetQueryPool(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
5575 uint32_t queryCount) {
locke-lunargd556cc32019-09-17 01:21:23 -06005576 // Do nothing if the feature is not enabled.
Piers Daniell41b8c5d2020-01-10 15:42:00 -07005577 if (!enabled_features.core12.hostQueryReset) return;
locke-lunargd556cc32019-09-17 01:21:23 -06005578
5579 // Do nothing if the query pool has been destroyed.
5580 auto query_pool_state = GetQueryPoolState(queryPool);
5581 if (!query_pool_state) return;
5582
5583 // Reset the state of existing entries.
5584 QueryObject query_obj{queryPool, 0};
5585 const uint32_t max_query_count = std::min(queryCount, query_pool_state->createInfo.queryCount - firstQuery);
5586 for (uint32_t i = 0; i < max_query_count; ++i) {
5587 query_obj.query = firstQuery + i;
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02005588 queryToStateMap[query_obj] = QUERYSTATE_RESET;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01005589 if (query_pool_state->createInfo.queryType == VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR) {
5590 for (uint32_t passIndex = 0; passIndex < query_pool_state->n_performance_passes; passIndex++) {
Lionel Landwerlin7dc796b2020-02-18 18:17:10 +02005591 query_obj.perf_pass = passIndex;
5592 queryToStateMap[query_obj] = QUERYSTATE_RESET;
Lionel Landwerlinc7420912019-05-23 00:33:42 +01005593 }
5594 }
locke-lunargd556cc32019-09-17 01:21:23 -06005595 }
5596}
5597
Tony-LunarG977448c2019-12-02 14:52:02 -07005598void ValidationStateTracker::PostCallRecordResetQueryPoolEXT(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
5599 uint32_t queryCount) {
5600 RecordResetQueryPool(device, queryPool, firstQuery, queryCount);
5601}
5602
5603void ValidationStateTracker::PostCallRecordResetQueryPool(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
5604 uint32_t queryCount) {
5605 RecordResetQueryPool(device, queryPool, firstQuery, queryCount);
5606}
5607
locke-lunargd556cc32019-09-17 01:21:23 -06005608void ValidationStateTracker::PerformUpdateDescriptorSetsWithTemplateKHR(VkDescriptorSet descriptorSet,
5609 const TEMPLATE_STATE *template_state, const void *pData) {
5610 // Translate the templated update into a normal update for validation...
5611 cvdescriptorset::DecodedTemplateUpdate decoded_update(this, descriptorSet, template_state, pData);
5612 cvdescriptorset::PerformUpdateDescriptorSets(this, static_cast<uint32_t>(decoded_update.desc_writes.size()),
5613 decoded_update.desc_writes.data(), 0, NULL);
5614}
5615
5616// Update the common AllocateDescriptorSetsData
5617void ValidationStateTracker::UpdateAllocateDescriptorSetsData(const VkDescriptorSetAllocateInfo *p_alloc_info,
Jeff Bolz46c0ea02019-10-09 13:06:29 -05005618 cvdescriptorset::AllocateDescriptorSetsData *ds_data) const {
locke-lunargd556cc32019-09-17 01:21:23 -06005619 for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
Jeff Bolz6ae39612019-10-11 20:57:36 -05005620 auto layout = GetDescriptorSetLayoutShared(p_alloc_info->pSetLayouts[i]);
locke-lunargd556cc32019-09-17 01:21:23 -06005621 if (layout) {
5622 ds_data->layout_nodes[i] = layout;
5623 // Count total descriptors required per type
5624 for (uint32_t j = 0; j < layout->GetBindingCount(); ++j) {
5625 const auto &binding_layout = layout->GetDescriptorSetLayoutBindingPtrFromIndex(j);
5626 uint32_t typeIndex = static_cast<uint32_t>(binding_layout->descriptorType);
5627 ds_data->required_descriptors_by_type[typeIndex] += binding_layout->descriptorCount;
5628 }
5629 }
5630 // Any unknown layouts will be flagged as errors during ValidateAllocateDescriptorSets() call
5631 }
5632}
5633
5634// Decrement allocated sets from the pool and insert new sets into set_map
5635void ValidationStateTracker::PerformAllocateDescriptorSets(const VkDescriptorSetAllocateInfo *p_alloc_info,
5636 const VkDescriptorSet *descriptor_sets,
5637 const cvdescriptorset::AllocateDescriptorSetsData *ds_data) {
5638 auto pool_state = descriptorPoolMap[p_alloc_info->descriptorPool].get();
5639 // Account for sets and individual descriptors allocated from pool
5640 pool_state->availableSets -= p_alloc_info->descriptorSetCount;
5641 for (auto it = ds_data->required_descriptors_by_type.begin(); it != ds_data->required_descriptors_by_type.end(); ++it) {
5642 pool_state->availableDescriptorTypeCount[it->first] -= ds_data->required_descriptors_by_type.at(it->first);
5643 }
5644
5645 const auto *variable_count_info = lvl_find_in_chain<VkDescriptorSetVariableDescriptorCountAllocateInfoEXT>(p_alloc_info->pNext);
5646 bool variable_count_valid = variable_count_info && variable_count_info->descriptorSetCount == p_alloc_info->descriptorSetCount;
5647
5648 // Create tracking object for each descriptor set; insert into global map and the pool's set.
5649 for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
5650 uint32_t variable_count = variable_count_valid ? variable_count_info->pDescriptorCounts[i] : 0;
5651
Jeff Bolz41a1ced2019-10-11 11:40:49 -05005652 auto new_ds = std::make_shared<cvdescriptorset::DescriptorSet>(descriptor_sets[i], pool_state, ds_data->layout_nodes[i],
John Zulaufd2c3dae2019-12-12 11:02:17 -07005653 variable_count, this);
locke-lunargd556cc32019-09-17 01:21:23 -06005654 pool_state->sets.insert(new_ds.get());
5655 new_ds->in_use.store(0);
5656 setMap[descriptor_sets[i]] = std::move(new_ds);
5657 }
5658}
5659
5660// Generic function to handle state update for all CmdDraw* and CmdDispatch* type functions
Jeremy Kniager05631e72020-06-08 14:21:35 -06005661void ValidationStateTracker::UpdateStateCmdDrawDispatchType(CMD_BUFFER_STATE *cb_state, CMD_TYPE cmd_type,
locke-lunarg540b2252020-08-03 13:23:36 -06005662 VkPipelineBindPoint bind_point, const char *function) {
5663 UpdateDrawState(cb_state, cmd_type, bind_point, function);
locke-lunargd556cc32019-09-17 01:21:23 -06005664 cb_state->hasDispatchCmd = true;
5665}
5666
locke-lunargd556cc32019-09-17 01:21:23 -06005667// Generic function to handle state update for all CmdDraw* type functions
locke-lunarg540b2252020-08-03 13:23:36 -06005668void ValidationStateTracker::UpdateStateCmdDrawType(CMD_BUFFER_STATE *cb_state, CMD_TYPE cmd_type, VkPipelineBindPoint bind_point,
5669 const char *function) {
5670 UpdateStateCmdDrawDispatchType(cb_state, cmd_type, bind_point, function);
locke-lunargd556cc32019-09-17 01:21:23 -06005671 cb_state->hasDrawCmd = true;
5672}
5673
5674void ValidationStateTracker::PostCallRecordCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount,
5675 uint32_t firstVertex, uint32_t firstInstance) {
5676 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005677 UpdateStateCmdDrawType(cb_state, CMD_DRAW, VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDraw()");
locke-lunargd556cc32019-09-17 01:21:23 -06005678}
5679
5680void ValidationStateTracker::PostCallRecordCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount,
5681 uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset,
5682 uint32_t firstInstance) {
5683 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005684 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDEXED, VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDrawIndexed()");
locke-lunargd556cc32019-09-17 01:21:23 -06005685}
5686
5687void ValidationStateTracker::PostCallRecordCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
5688 uint32_t count, uint32_t stride) {
5689 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5690 BUFFER_STATE *buffer_state = GetBufferState(buffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005691 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDIRECT, VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDrawIndirect()");
locke-lunargd556cc32019-09-17 01:21:23 -06005692 AddCommandBufferBindingBuffer(cb_state, buffer_state);
5693}
5694
5695void ValidationStateTracker::PostCallRecordCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
5696 VkDeviceSize offset, uint32_t count, uint32_t stride) {
5697 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5698 BUFFER_STATE *buffer_state = GetBufferState(buffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005699 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDEXEDINDIRECT, VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDrawIndexedIndirect()");
locke-lunargd556cc32019-09-17 01:21:23 -06005700 AddCommandBufferBindingBuffer(cb_state, buffer_state);
5701}
5702
5703void ValidationStateTracker::PostCallRecordCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z) {
5704 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005705 UpdateStateCmdDrawDispatchType(cb_state, CMD_DISPATCH, VK_PIPELINE_BIND_POINT_COMPUTE, "vkCmdDispatch()");
locke-lunargd556cc32019-09-17 01:21:23 -06005706}
5707
5708void ValidationStateTracker::PostCallRecordCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
5709 VkDeviceSize offset) {
5710 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005711 UpdateStateCmdDrawDispatchType(cb_state, CMD_DISPATCHINDIRECT, VK_PIPELINE_BIND_POINT_COMPUTE, "vkCmdDispatchIndirect()");
locke-lunargd556cc32019-09-17 01:21:23 -06005712 BUFFER_STATE *buffer_state = GetBufferState(buffer);
5713 AddCommandBufferBindingBuffer(cb_state, buffer_state);
5714}
5715
Tony-LunarG977448c2019-12-02 14:52:02 -07005716void ValidationStateTracker::RecordCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
5717 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
locke-lunarg540b2252020-08-03 13:23:36 -06005718 uint32_t stride, const char *function) {
Tony-LunarG977448c2019-12-02 14:52:02 -07005719 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5720 BUFFER_STATE *buffer_state = GetBufferState(buffer);
5721 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005722 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDIRECTCOUNT, VK_PIPELINE_BIND_POINT_GRAPHICS, function);
Tony-LunarG977448c2019-12-02 14:52:02 -07005723 AddCommandBufferBindingBuffer(cb_state, buffer_state);
5724 AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
5725}
5726
locke-lunargd556cc32019-09-17 01:21:23 -06005727void ValidationStateTracker::PreCallRecordCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer,
5728 VkDeviceSize offset, VkBuffer countBuffer,
5729 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
5730 uint32_t stride) {
locke-lunarg540b2252020-08-03 13:23:36 -06005731 RecordCmdDrawIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride,
5732 "vkCmdDrawIndirectCountKHR()");
Tony-LunarG977448c2019-12-02 14:52:02 -07005733}
5734
5735void ValidationStateTracker::PreCallRecordCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
5736 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
5737 uint32_t maxDrawCount, uint32_t stride) {
locke-lunarg540b2252020-08-03 13:23:36 -06005738 RecordCmdDrawIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride,
5739 "vkCmdDrawIndirectCount()");
Tony-LunarG977448c2019-12-02 14:52:02 -07005740}
5741
5742void ValidationStateTracker::RecordCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
5743 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
locke-lunarg540b2252020-08-03 13:23:36 -06005744 uint32_t maxDrawCount, uint32_t stride, const char *function) {
locke-lunargd556cc32019-09-17 01:21:23 -06005745 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5746 BUFFER_STATE *buffer_state = GetBufferState(buffer);
5747 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005748 UpdateStateCmdDrawType(cb_state, CMD_DRAWINDEXEDINDIRECTCOUNT, VK_PIPELINE_BIND_POINT_GRAPHICS, function);
locke-lunargd556cc32019-09-17 01:21:23 -06005749 AddCommandBufferBindingBuffer(cb_state, buffer_state);
5750 AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
5751}
5752
5753void ValidationStateTracker::PreCallRecordCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer,
5754 VkDeviceSize offset, VkBuffer countBuffer,
5755 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
5756 uint32_t stride) {
locke-lunarg540b2252020-08-03 13:23:36 -06005757 RecordCmdDrawIndexedIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride,
5758 "vkCmdDrawIndexedIndirectCountKHR()");
Tony-LunarG977448c2019-12-02 14:52:02 -07005759}
5760
5761void ValidationStateTracker::PreCallRecordCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer,
5762 VkDeviceSize offset, VkBuffer countBuffer,
5763 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
5764 uint32_t stride) {
locke-lunarg540b2252020-08-03 13:23:36 -06005765 RecordCmdDrawIndexedIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride,
5766 "vkCmdDrawIndexedIndirectCount()");
locke-lunargd556cc32019-09-17 01:21:23 -06005767}
5768
5769void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksNV(VkCommandBuffer commandBuffer, uint32_t taskCount,
5770 uint32_t firstTask) {
5771 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005772 UpdateStateCmdDrawType(cb_state, CMD_DRAWMESHTASKSNV, VK_PIPELINE_BIND_POINT_GRAPHICS, "vkCmdDrawMeshTasksNV()");
locke-lunargd556cc32019-09-17 01:21:23 -06005773}
5774
5775void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksIndirectNV(VkCommandBuffer commandBuffer, VkBuffer buffer,
5776 VkDeviceSize offset, uint32_t drawCount, uint32_t stride) {
5777 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005778 UpdateStateCmdDrawType(cb_state, CMD_DRAWMESHTASKSINDIRECTNV, VK_PIPELINE_BIND_POINT_GRAPHICS,
5779 "vkCmdDrawMeshTasksIndirectNV()");
locke-lunargd556cc32019-09-17 01:21:23 -06005780 BUFFER_STATE *buffer_state = GetBufferState(buffer);
5781 if (buffer_state) {
5782 AddCommandBufferBindingBuffer(cb_state, buffer_state);
5783 }
5784}
5785
5786void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksIndirectCountNV(VkCommandBuffer commandBuffer, VkBuffer buffer,
5787 VkDeviceSize offset, VkBuffer countBuffer,
5788 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
5789 uint32_t stride) {
5790 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5791 BUFFER_STATE *buffer_state = GetBufferState(buffer);
5792 BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
locke-lunarg540b2252020-08-03 13:23:36 -06005793 UpdateStateCmdDrawType(cb_state, CMD_DRAWMESHTASKSINDIRECTCOUNTNV, VK_PIPELINE_BIND_POINT_GRAPHICS,
5794 "vkCmdDrawMeshTasksIndirectCountNV()");
locke-lunargd556cc32019-09-17 01:21:23 -06005795 if (buffer_state) {
5796 AddCommandBufferBindingBuffer(cb_state, buffer_state);
5797 }
5798 if (count_buffer_state) {
5799 AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
5800 }
5801}
5802
5803void ValidationStateTracker::PostCallRecordCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo *pCreateInfo,
5804 const VkAllocationCallbacks *pAllocator,
5805 VkShaderModule *pShaderModule, VkResult result,
5806 void *csm_state_data) {
5807 if (VK_SUCCESS != result) return;
5808 create_shader_module_api_state *csm_state = reinterpret_cast<create_shader_module_api_state *>(csm_state_data);
5809
Tony-LunarG8a51b7d2020-07-01 15:57:23 -06005810 spv_target_env spirv_environment = PickSpirvEnv(api_version, (device_extensions.vk_khr_spirv_1_4 != kNotEnabled));
locke-lunargd556cc32019-09-17 01:21:23 -06005811 bool is_spirv = (pCreateInfo->pCode[0] == spv::MagicNumber);
Jeff Bolze7fc67b2019-10-04 12:29:31 -05005812 auto new_shader_module = is_spirv ? std::make_shared<SHADER_MODULE_STATE>(pCreateInfo, *pShaderModule, spirv_environment,
5813 csm_state->unique_shader_id)
5814 : std::make_shared<SHADER_MODULE_STATE>();
locke-lunargde3f0fa2020-09-10 11:55:31 -06005815 SetPushConstantUsedInShader(*new_shader_module);
locke-lunargd556cc32019-09-17 01:21:23 -06005816 shaderModuleMap[*pShaderModule] = std::move(new_shader_module);
5817}
5818
5819void ValidationStateTracker::RecordPipelineShaderStage(VkPipelineShaderStageCreateInfo const *pStage, PIPELINE_STATE *pipeline,
Jeff Bolz46c0ea02019-10-09 13:06:29 -05005820 PIPELINE_STATE::StageState *stage_state) const {
locke-lunargd556cc32019-09-17 01:21:23 -06005821 // Validation shouldn't rely on anything in stage state being valid if the spirv isn't
locke-lunargde3f0fa2020-09-10 11:55:31 -06005822 stage_state->entry_point_name = pStage->pName;
5823 stage_state->shader_state = GetShared<SHADER_MODULE_STATE>(pStage->module);
5824 auto module = stage_state->shader_state.get();
locke-lunargd556cc32019-09-17 01:21:23 -06005825 if (!module->has_valid_spirv) return;
5826
5827 // Validation shouldn't rely on anything in stage state being valid if the entrypoint isn't present
5828 auto entrypoint = FindEntrypoint(module, pStage->pName, pStage->stage);
5829 if (entrypoint == module->end()) return;
5830
locke-lunarg654e3692020-06-04 17:19:15 -06005831 stage_state->stage_flag = pStage->stage;
5832
locke-lunargd556cc32019-09-17 01:21:23 -06005833 // Mark accessible ids
5834 stage_state->accessible_ids = MarkAccessibleIds(module, entrypoint);
5835 ProcessExecutionModes(module, entrypoint, pipeline);
5836
locke-lunarg63e4daf2020-08-17 17:53:25 -06005837 stage_state->descriptor_uses = CollectInterfaceByDescriptorSlot(
5838 module, stage_state->accessible_ids, &stage_state->has_writable_descriptor, &stage_state->has_atomic_descriptor);
locke-lunargd556cc32019-09-17 01:21:23 -06005839 // Capture descriptor uses for the pipeline
locke-lunarg36045992020-08-20 16:54:37 -06005840 for (const auto &use : stage_state->descriptor_uses) {
locke-lunargd556cc32019-09-17 01:21:23 -06005841 // While validating shaders capture which slots are used by the pipeline
John Zulauf649edd52019-10-02 14:39:41 -06005842 const uint32_t slot = use.first.first;
locke-lunarg351c9d82020-10-23 14:43:21 -06005843 pipeline->active_slots[slot][use.first.second].is_writable |= use.second.is_writable;
locke-lunarg36045992020-08-20 16:54:37 -06005844 auto &reqs = pipeline->active_slots[slot][use.first.second].reqs;
locke-lunargd556cc32019-09-17 01:21:23 -06005845 reqs = descriptor_req(reqs | DescriptorTypeToReqs(module, use.second.type_id));
locke-lunarg25b6c352020-08-06 17:44:18 -06005846 if (use.second.is_atomic_operation) reqs = descriptor_req(reqs | DESCRIPTOR_REQ_VIEW_ATOMIC_OPERATION);
locke-lunarg12d20992020-09-21 12:46:49 -06005847 if (use.second.is_sampler_implicitLod_dref_proj) reqs = descriptor_req(reqs | DESCRIPTOR_REQ_SAMPLER_IMPLICITLOD_DREF_PROJ);
locke-lunargae2a43c2020-09-22 17:21:57 -06005848 if (use.second.is_sampler_bias_offset) reqs = descriptor_req(reqs | DESCRIPTOR_REQ_SAMPLER_BIAS_OFFSET);
locke-lunarg12d20992020-09-21 12:46:49 -06005849
John Zulauf649edd52019-10-02 14:39:41 -06005850 pipeline->max_active_slot = std::max(pipeline->max_active_slot, slot);
locke-lunarg36045992020-08-20 16:54:37 -06005851 if (use.second.samplers_used_by_image.size()) {
locke-lunarg654a9052020-10-13 16:28:42 -06005852 auto &samplers_used_by_image = pipeline->active_slots[slot][use.first.second].samplers_used_by_image;
5853 if (use.second.samplers_used_by_image.size() > samplers_used_by_image.size()) {
5854 samplers_used_by_image.resize(use.second.samplers_used_by_image.size());
5855 }
5856 std::map<VkDescriptorSet, const cvdescriptorset::Descriptor *> sampler_descriptors;
5857 uint32_t image_index = 0;
5858 for (const auto &samplers : use.second.samplers_used_by_image) {
5859 for (const auto &sampler : samplers) {
5860 samplers_used_by_image[image_index].emplace(sampler, sampler_descriptors);
5861 }
5862 ++image_index;
5863 }
locke-lunarg36045992020-08-20 16:54:37 -06005864 }
locke-lunargd556cc32019-09-17 01:21:23 -06005865 }
locke-lunarg78486832020-09-09 19:39:42 -06005866
locke-lunarg96dc9632020-06-10 17:22:18 -06005867 if (pStage->stage == VK_SHADER_STAGE_FRAGMENT_BIT) {
5868 pipeline->fragmentShader_writable_output_location_list = CollectWritableOutputLocationinFS(*module, *pStage);
5869 }
locke-lunargd556cc32019-09-17 01:21:23 -06005870}
5871
5872void ValidationStateTracker::ResetCommandBufferPushConstantDataIfIncompatible(CMD_BUFFER_STATE *cb_state, VkPipelineLayout layout) {
5873 if (cb_state == nullptr) {
5874 return;
5875 }
5876
5877 const PIPELINE_LAYOUT_STATE *pipeline_layout_state = GetPipelineLayout(layout);
5878 if (pipeline_layout_state == nullptr) {
5879 return;
5880 }
5881
5882 if (cb_state->push_constant_data_ranges != pipeline_layout_state->push_constant_ranges) {
5883 cb_state->push_constant_data_ranges = pipeline_layout_state->push_constant_ranges;
5884 cb_state->push_constant_data.clear();
locke-lunargde3f0fa2020-09-10 11:55:31 -06005885 cb_state->push_constant_data_update.clear();
locke-lunargd556cc32019-09-17 01:21:23 -06005886 uint32_t size_needed = 0;
5887 for (auto push_constant_range : *cb_state->push_constant_data_ranges) {
locke-lunargde3f0fa2020-09-10 11:55:31 -06005888 auto size = push_constant_range.offset + push_constant_range.size;
5889 size_needed = std::max(size_needed, size);
5890
5891 auto stageFlags = push_constant_range.stageFlags;
5892 uint32_t bit_shift = 0;
5893 while (stageFlags) {
5894 if (stageFlags & 1) {
5895 VkShaderStageFlagBits flag = static_cast<VkShaderStageFlagBits>(1 << bit_shift);
5896 const auto it = cb_state->push_constant_data_update.find(flag);
5897
5898 if (it != cb_state->push_constant_data_update.end()) {
5899 if (it->second.size() < push_constant_range.offset) {
5900 it->second.resize(push_constant_range.offset, -1);
5901 }
5902 if (it->second.size() < size) {
5903 it->second.resize(size, 0);
5904 }
5905 } else {
5906 std::vector<int8_t> bytes;
5907 bytes.resize(push_constant_range.offset, -1);
5908 bytes.resize(size, 0);
5909 cb_state->push_constant_data_update[flag] = bytes;
5910 }
5911 }
5912 stageFlags = stageFlags >> 1;
5913 ++bit_shift;
5914 }
locke-lunargd556cc32019-09-17 01:21:23 -06005915 }
5916 cb_state->push_constant_data.resize(size_needed, 0);
5917 }
5918}
John Zulauf22b0fbe2019-10-15 06:26:16 -06005919
5920void ValidationStateTracker::PostCallRecordGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain,
5921 uint32_t *pSwapchainImageCount, VkImage *pSwapchainImages,
5922 VkResult result) {
5923 if ((result != VK_SUCCESS) && (result != VK_INCOMPLETE)) return;
5924 auto swapchain_state = GetSwapchainState(swapchain);
5925
5926 if (*pSwapchainImageCount > swapchain_state->images.size()) swapchain_state->images.resize(*pSwapchainImageCount);
5927
5928 if (pSwapchainImages) {
John Zulauf22b0fbe2019-10-15 06:26:16 -06005929 for (uint32_t i = 0; i < *pSwapchainImageCount; ++i) {
locke-lunargb3584732019-10-28 20:18:36 -06005930 if (swapchain_state->images[i].image != VK_NULL_HANDLE) continue; // Already retrieved this.
John Zulauf22b0fbe2019-10-15 06:26:16 -06005931
5932 // Add imageMap entries for each swapchain image
5933 VkImageCreateInfo image_ci;
5934 image_ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
Mark Lobodzinskid3ec86f2020-03-18 11:23:04 -06005935 image_ci.pNext = nullptr; // to be set later
5936 image_ci.flags = 0; // to be updated below
John Zulauf22b0fbe2019-10-15 06:26:16 -06005937 image_ci.imageType = VK_IMAGE_TYPE_2D;
5938 image_ci.format = swapchain_state->createInfo.imageFormat;
5939 image_ci.extent.width = swapchain_state->createInfo.imageExtent.width;
5940 image_ci.extent.height = swapchain_state->createInfo.imageExtent.height;
5941 image_ci.extent.depth = 1;
5942 image_ci.mipLevels = 1;
5943 image_ci.arrayLayers = swapchain_state->createInfo.imageArrayLayers;
5944 image_ci.samples = VK_SAMPLE_COUNT_1_BIT;
5945 image_ci.tiling = VK_IMAGE_TILING_OPTIMAL;
5946 image_ci.usage = swapchain_state->createInfo.imageUsage;
5947 image_ci.sharingMode = swapchain_state->createInfo.imageSharingMode;
5948 image_ci.queueFamilyIndexCount = swapchain_state->createInfo.queueFamilyIndexCount;
5949 image_ci.pQueueFamilyIndices = swapchain_state->createInfo.pQueueFamilyIndices;
5950 image_ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
5951
5952 image_ci.pNext = lvl_find_in_chain<VkImageFormatListCreateInfoKHR>(swapchain_state->createInfo.pNext);
5953
5954 if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR)
5955 image_ci.flags |= VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT;
5956 if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR)
5957 image_ci.flags |= VK_IMAGE_CREATE_PROTECTED_BIT;
5958 if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR)
5959 image_ci.flags |= (VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT | VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR);
5960
locke-lunarg296a3c92020-03-25 01:04:29 -06005961 imageMap[pSwapchainImages[i]] = std::make_shared<IMAGE_STATE>(device, pSwapchainImages[i], &image_ci);
John Zulauf22b0fbe2019-10-15 06:26:16 -06005962 auto &image_state = imageMap[pSwapchainImages[i]];
5963 image_state->valid = false;
5964 image_state->create_from_swapchain = swapchain;
5965 image_state->bind_swapchain = swapchain;
5966 image_state->bind_swapchain_imageIndex = i;
Tony-LunarGe64e4fe2020-02-17 16:21:55 -07005967 image_state->is_swapchain_image = true;
locke-lunargb3584732019-10-28 20:18:36 -06005968 swapchain_state->images[i].image = pSwapchainImages[i];
5969 swapchain_state->images[i].bound_images.emplace(pSwapchainImages[i]);
Petr Kraus44f1c482020-04-25 20:09:25 +02005970
5971 AddImageStateProps(*image_state, device, physical_device);
John Zulauf22b0fbe2019-10-15 06:26:16 -06005972 }
5973 }
5974
5975 if (*pSwapchainImageCount) {
John Zulauf22b0fbe2019-10-15 06:26:16 -06005976 swapchain_state->get_swapchain_image_count = *pSwapchainImageCount;
5977 }
5978}
sourav parmar35e7a002020-06-09 17:58:44 -07005979
5980void ValidationStateTracker::PostCallRecordCmdBuildAccelerationStructureKHR(
5981 VkCommandBuffer commandBuffer, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR *pInfos,
5982 const VkAccelerationStructureBuildOffsetInfoKHR *const *ppOffsetInfos) {
5983 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
5984 if (cb_state == nullptr) {
5985 return;
5986 }
5987 for (uint32_t i = 0; i < infoCount; ++i) {
5988 ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureState(pInfos[i].dstAccelerationStructure);
5989 ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureState(pInfos[i].srcAccelerationStructure);
5990 if (dst_as_state != nullptr) {
5991 dst_as_state->built = true;
5992 dst_as_state->build_info_khr.initialize(pInfos);
5993 AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
5994 }
5995 if (src_as_state != nullptr) {
5996 AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
5997 }
5998 }
5999 cb_state->hasBuildAccelerationStructureCmd = true;
6000}
6001
6002void ValidationStateTracker::PostCallRecordCmdCopyAccelerationStructureKHR(VkCommandBuffer commandBuffer,
6003 const VkCopyAccelerationStructureInfoKHR *pInfo) {
6004 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6005 if (cb_state) {
6006 ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureState(pInfo->src);
6007 ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureState(pInfo->dst);
6008 if (dst_as_state != nullptr && src_as_state != nullptr) {
6009 dst_as_state->built = true;
6010 dst_as_state->build_info_khr = src_as_state->build_info_khr;
6011 AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
6012 AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
6013 }
6014 }
6015}
Piers Daniell39842ee2020-07-10 16:42:33 -06006016
6017void ValidationStateTracker::PreCallRecordCmdSetCullModeEXT(VkCommandBuffer commandBuffer, VkCullModeFlags cullMode) {
6018 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6019 cb_state->status |= CBSTATUS_CULL_MODE_SET;
6020 cb_state->static_status &= ~CBSTATUS_CULL_MODE_SET;
6021}
6022
6023void ValidationStateTracker::PreCallRecordCmdSetFrontFaceEXT(VkCommandBuffer commandBuffer, VkFrontFace frontFace) {
6024 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6025 cb_state->status |= CBSTATUS_FRONT_FACE_SET;
6026 cb_state->static_status &= ~CBSTATUS_FRONT_FACE_SET;
6027}
6028
6029void ValidationStateTracker::PreCallRecordCmdSetPrimitiveTopologyEXT(VkCommandBuffer commandBuffer,
6030 VkPrimitiveTopology primitiveTopology) {
6031 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6032 cb_state->primitiveTopology = primitiveTopology;
6033 cb_state->status |= CBSTATUS_PRIMITIVE_TOPOLOGY_SET;
6034 cb_state->static_status &= ~CBSTATUS_PRIMITIVE_TOPOLOGY_SET;
6035}
6036
6037void ValidationStateTracker::PreCallRecordCmdSetViewportWithCountEXT(VkCommandBuffer commandBuffer, uint32_t viewportCount,
6038 const VkViewport *pViewports) {
6039 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6040 cb_state->viewportWithCountMask |= (1u << viewportCount) - 1u;
6041 cb_state->status |= CBSTATUS_VIEWPORT_WITH_COUNT_SET;
6042 cb_state->static_status &= ~CBSTATUS_VIEWPORT_WITH_COUNT_SET;
6043}
6044
6045void ValidationStateTracker::PreCallRecordCmdSetScissorWithCountEXT(VkCommandBuffer commandBuffer, uint32_t scissorCount,
6046 const VkRect2D *pScissors) {
6047 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6048 cb_state->scissorWithCountMask |= (1u << scissorCount) - 1u;
6049 cb_state->status |= CBSTATUS_SCISSOR_WITH_COUNT_SET;
6050 cb_state->static_status &= ~CBSTATUS_SCISSOR_WITH_COUNT_SET;
6051}
6052
6053void ValidationStateTracker::PreCallRecordCmdBindVertexBuffers2EXT(VkCommandBuffer commandBuffer, uint32_t firstBinding,
6054 uint32_t bindingCount, const VkBuffer *pBuffers,
6055 const VkDeviceSize *pOffsets, const VkDeviceSize *pSizes,
6056 const VkDeviceSize *pStrides) {
6057 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6058 if (pStrides) {
6059 cb_state->status |= CBSTATUS_VERTEX_INPUT_BINDING_STRIDE_SET;
6060 cb_state->static_status &= ~CBSTATUS_VERTEX_INPUT_BINDING_STRIDE_SET;
6061 }
6062
6063 uint32_t end = firstBinding + bindingCount;
6064 if (cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.size() < end) {
6065 cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.resize(end);
6066 }
6067
6068 for (uint32_t i = 0; i < bindingCount; ++i) {
6069 auto &vertex_buffer_binding = cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings[i + firstBinding];
locke-lunarg1ae57d62020-11-18 10:49:19 -07006070 vertex_buffer_binding.buffer_state = GetShared<BUFFER_STATE>(pBuffers[i]);
Piers Daniell39842ee2020-07-10 16:42:33 -06006071 vertex_buffer_binding.offset = pOffsets[i];
6072 vertex_buffer_binding.size = (pSizes) ? pSizes[i] : VK_WHOLE_SIZE;
6073 vertex_buffer_binding.stride = (pStrides) ? pStrides[i] : 0;
6074 // Add binding for this vertex buffer to this commandbuffer
6075 if (pBuffers[i]) {
locke-lunarg1ae57d62020-11-18 10:49:19 -07006076 AddCommandBufferBindingBuffer(cb_state, vertex_buffer_binding.buffer_state.get());
Piers Daniell39842ee2020-07-10 16:42:33 -06006077 }
6078 }
6079}
6080
6081void ValidationStateTracker::PreCallRecordCmdSetDepthTestEnableEXT(VkCommandBuffer commandBuffer, VkBool32 depthTestEnable) {
6082 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6083 cb_state->status |= CBSTATUS_DEPTH_TEST_ENABLE_SET;
6084 cb_state->static_status &= ~CBSTATUS_DEPTH_TEST_ENABLE_SET;
6085}
6086
6087void ValidationStateTracker::PreCallRecordCmdSetDepthWriteEnableEXT(VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable) {
6088 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6089 cb_state->status |= CBSTATUS_DEPTH_WRITE_ENABLE_SET;
6090 cb_state->static_status &= ~CBSTATUS_DEPTH_WRITE_ENABLE_SET;
6091}
6092
6093void ValidationStateTracker::PreCallRecordCmdSetDepthCompareOpEXT(VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp) {
6094 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6095 cb_state->status |= CBSTATUS_DEPTH_COMPARE_OP_SET;
6096 cb_state->static_status &= ~CBSTATUS_DEPTH_COMPARE_OP_SET;
6097}
6098
6099void ValidationStateTracker::PreCallRecordCmdSetDepthBoundsTestEnableEXT(VkCommandBuffer commandBuffer,
6100 VkBool32 depthBoundsTestEnable) {
6101 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6102 cb_state->status |= CBSTATUS_DEPTH_BOUNDS_TEST_ENABLE_SET;
6103 cb_state->static_status &= ~CBSTATUS_DEPTH_BOUNDS_TEST_ENABLE_SET;
6104}
6105void ValidationStateTracker::PreCallRecordCmdSetStencilTestEnableEXT(VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable) {
6106 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6107 cb_state->status |= CBSTATUS_STENCIL_TEST_ENABLE_SET;
6108 cb_state->static_status &= ~CBSTATUS_STENCIL_TEST_ENABLE_SET;
6109}
6110
6111void ValidationStateTracker::PreCallRecordCmdSetStencilOpEXT(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
6112 VkStencilOp failOp, VkStencilOp passOp, VkStencilOp depthFailOp,
6113 VkCompareOp compareOp) {
6114 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6115 cb_state->status |= CBSTATUS_STENCIL_OP_SET;
6116 cb_state->static_status &= ~CBSTATUS_STENCIL_OP_SET;
6117}
locke-lunarg4189aa22020-10-21 00:23:48 -06006118
6119void ValidationStateTracker::PreCallRecordCmdSetDiscardRectangleEXT(VkCommandBuffer commandBuffer, uint32_t firstDiscardRectangle,
6120 uint32_t discardRectangleCount,
6121 const VkRect2D *pDiscardRectangles) {
6122 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6123 cb_state->status |= CBSTATUS_DISCARD_RECTANGLE_SET;
6124 cb_state->static_status &= ~CBSTATUS_DISCARD_RECTANGLE_SET;
6125}
6126
6127void ValidationStateTracker::PreCallRecordCmdSetSampleLocationsEXT(VkCommandBuffer commandBuffer,
6128 const VkSampleLocationsInfoEXT *pSampleLocationsInfo) {
6129 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6130 cb_state->status |= CBSTATUS_SAMPLE_LOCATIONS_SET;
6131 cb_state->static_status &= ~CBSTATUS_SAMPLE_LOCATIONS_SET;
6132}
6133
6134void ValidationStateTracker::PreCallRecordCmdSetCoarseSampleOrderNV(VkCommandBuffer commandBuffer,
6135 VkCoarseSampleOrderTypeNV sampleOrderType,
6136 uint32_t customSampleOrderCount,
6137 const VkCoarseSampleOrderCustomNV *pCustomSampleOrders) {
6138 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
6139 cb_state->status |= CBSTATUS_COARSE_SAMPLE_ORDER_SET;
6140 cb_state->static_status &= ~CBSTATUS_COARSE_SAMPLE_ORDER_SET;
6141}