Tony-LunarG | 7de10e8 | 2020-11-24 11:31:55 -0700 | [diff] [blame] | 1 | /* Copyright (c) 2020-2021 The Khronos Group Inc. |
| 2 | * Copyright (c) 2020-2021 Valve Corporation |
| 3 | * Copyright (c) 2020-2021 LunarG, Inc. |
Tony-LunarG | 1dce239 | 2019-10-23 16:49:29 -0600 | [diff] [blame] | 4 | * |
| 5 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 6 | * you may not use this file except in compliance with the License. |
| 7 | * You may obtain a copy of the License at |
| 8 | * |
| 9 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 10 | * |
| 11 | * Unless required by applicable law or agreed to in writing, software |
| 12 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 14 | * See the License for the specific language governing permissions and |
| 15 | * limitations under the License. |
| 16 | * |
| 17 | * Author: Tony Barbour <tony@lunarg.com> |
| 18 | */ |
| 19 | #pragma once |
| 20 | #include "chassis.h" |
| 21 | #include "shader_validation.h" |
Tony-LunarG | b5fae46 | 2020-03-05 12:43:25 -0700 | [diff] [blame] | 22 | class UtilDescriptorSetManager { |
Tony-LunarG | 1dce239 | 2019-10-23 16:49:29 -0600 | [diff] [blame] | 23 | public: |
Tony-LunarG | b5fae46 | 2020-03-05 12:43:25 -0700 | [diff] [blame] | 24 | UtilDescriptorSetManager(VkDevice device, uint32_t numBindingsInSet); |
| 25 | ~UtilDescriptorSetManager(); |
Tony-LunarG | 1dce239 | 2019-10-23 16:49:29 -0600 | [diff] [blame] | 26 | |
| 27 | VkResult GetDescriptorSet(VkDescriptorPool *desc_pool, VkDescriptorSetLayout ds_layout, VkDescriptorSet *desc_sets); |
| 28 | VkResult GetDescriptorSets(uint32_t count, VkDescriptorPool *pool, VkDescriptorSetLayout ds_layout, |
| 29 | std::vector<VkDescriptorSet> *desc_sets); |
| 30 | void PutBackDescriptorSet(VkDescriptorPool desc_pool, VkDescriptorSet desc_set); |
| 31 | |
| 32 | private: |
| 33 | static const uint32_t kItemsPerChunk = 512; |
| 34 | struct PoolTracker { |
| 35 | uint32_t size; |
| 36 | uint32_t used; |
| 37 | }; |
| 38 | VkDevice device; |
| 39 | uint32_t numBindingsInSet; |
Jeremy Gebben | cbf2286 | 2021-03-03 12:01:22 -0700 | [diff] [blame] | 40 | layer_data::unordered_map<VkDescriptorPool, struct PoolTracker> desc_pool_map_; |
Tony-LunarG | 1dce239 | 2019-10-23 16:49:29 -0600 | [diff] [blame] | 41 | }; |
Tony-LunarG | b5fae46 | 2020-03-05 12:43:25 -0700 | [diff] [blame] | 42 | struct UtilQueueBarrierCommandInfo { |
Tony-LunarG | 1dce239 | 2019-10-23 16:49:29 -0600 | [diff] [blame] | 43 | VkCommandPool barrier_command_pool = VK_NULL_HANDLE; |
| 44 | VkCommandBuffer barrier_command_buffer = VK_NULL_HANDLE; |
| 45 | }; |
Tony-LunarG | b5fae46 | 2020-03-05 12:43:25 -0700 | [diff] [blame] | 46 | VkResult UtilInitializeVma(VkPhysicalDevice physical_device, VkDevice device, VmaAllocator *pAllocator); |
| 47 | void UtilPreCallRecordCreateDevice(VkPhysicalDevice gpu, safe_VkDeviceCreateInfo *modified_create_info, |
| 48 | VkPhysicalDeviceFeatures supported_features, VkPhysicalDeviceFeatures desired_features); |
Tony-LunarG | 1dce239 | 2019-10-23 16:49:29 -0600 | [diff] [blame] | 49 | template <typename ObjectType> |
Tony-LunarG | b5fae46 | 2020-03-05 12:43:25 -0700 | [diff] [blame] | 50 | void UtilPostCallRecordCreateDevice(const VkDeviceCreateInfo *pCreateInfo, std::vector<VkDescriptorSetLayoutBinding> bindings, |
| 51 | ObjectType *object_ptr, VkPhysicalDeviceProperties physical_device_properties) { |
Tony-LunarG | 1dce239 | 2019-10-23 16:49:29 -0600 | [diff] [blame] | 52 | // If api version 1.1 or later, SetDeviceLoaderData will be in the loader |
| 53 | auto chain_info = get_chain_info(pCreateInfo, VK_LOADER_DATA_CALLBACK); |
| 54 | assert(chain_info->u.pfnSetDeviceLoaderData); |
| 55 | object_ptr->vkSetDeviceLoaderData = chain_info->u.pfnSetDeviceLoaderData; |
| 56 | |
| 57 | // Some devices have extremely high limits here, so set a reasonable max because we have to pad |
| 58 | // the pipeline layout with dummy descriptor set layouts. |
| 59 | object_ptr->adjusted_max_desc_sets = physical_device_properties.limits.maxBoundDescriptorSets; |
| 60 | object_ptr->adjusted_max_desc_sets = std::min(33U, object_ptr->adjusted_max_desc_sets); |
| 61 | |
| 62 | // We can't do anything if there is only one. |
| 63 | // Device probably not a legit Vulkan device, since there should be at least 4. Protect ourselves. |
| 64 | if (object_ptr->adjusted_max_desc_sets == 1) { |
| 65 | object_ptr->ReportSetupProblem(object_ptr->device, "Device can bind only a single descriptor set."); |
| 66 | object_ptr->aborted = true; |
| 67 | return; |
| 68 | } |
| 69 | object_ptr->desc_set_bind_index = object_ptr->adjusted_max_desc_sets - 1; |
| 70 | |
Tony-LunarG | b5fae46 | 2020-03-05 12:43:25 -0700 | [diff] [blame] | 71 | VkResult result1 = UtilInitializeVma(object_ptr->physicalDevice, object_ptr->device, &object_ptr->vmaAllocator); |
Tony-LunarG | 1dce239 | 2019-10-23 16:49:29 -0600 | [diff] [blame] | 72 | assert(result1 == VK_SUCCESS); |
Tony-LunarG | b5fae46 | 2020-03-05 12:43:25 -0700 | [diff] [blame] | 73 | std::unique_ptr<UtilDescriptorSetManager> desc_set_manager( |
| 74 | new UtilDescriptorSetManager(object_ptr->device, static_cast<uint32_t>(bindings.size()))); |
Tony-LunarG | 1dce239 | 2019-10-23 16:49:29 -0600 | [diff] [blame] | 75 | |
| 76 | const VkDescriptorSetLayoutCreateInfo debug_desc_layout_info = {VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO, NULL, 0, |
| 77 | static_cast<uint32_t>(bindings.size()), bindings.data()}; |
| 78 | |
| 79 | const VkDescriptorSetLayoutCreateInfo dummy_desc_layout_info = {VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO, NULL, 0, 0, |
| 80 | NULL}; |
| 81 | |
| 82 | result1 = DispatchCreateDescriptorSetLayout(object_ptr->device, &debug_desc_layout_info, NULL, &object_ptr->debug_desc_layout); |
| 83 | |
| 84 | // This is a layout used to "pad" a pipeline layout to fill in any gaps to the selected bind index. |
| 85 | VkResult result2 = |
| 86 | DispatchCreateDescriptorSetLayout(object_ptr->device, &dummy_desc_layout_info, NULL, &object_ptr->dummy_desc_layout); |
| 87 | |
| 88 | assert((result1 == VK_SUCCESS) && (result2 == VK_SUCCESS)); |
| 89 | if ((result1 != VK_SUCCESS) || (result2 != VK_SUCCESS)) { |
| 90 | object_ptr->ReportSetupProblem(object_ptr->device, "Unable to create descriptor set layout."); |
| 91 | if (result1 == VK_SUCCESS) { |
| 92 | DispatchDestroyDescriptorSetLayout(object_ptr->device, object_ptr->debug_desc_layout, NULL); |
| 93 | } |
| 94 | if (result2 == VK_SUCCESS) { |
| 95 | DispatchDestroyDescriptorSetLayout(object_ptr->device, object_ptr->dummy_desc_layout, NULL); |
| 96 | } |
| 97 | object_ptr->debug_desc_layout = VK_NULL_HANDLE; |
| 98 | object_ptr->dummy_desc_layout = VK_NULL_HANDLE; |
| 99 | object_ptr->aborted = true; |
| 100 | return; |
| 101 | } |
| 102 | object_ptr->desc_set_manager = std::move(desc_set_manager); |
| 103 | |
| 104 | // Register callback to be called at any ResetCommandBuffer time |
| 105 | object_ptr->SetCommandBufferResetCallback( |
| 106 | [object_ptr](VkCommandBuffer command_buffer) -> void { object_ptr->ResetCommandBuffer(command_buffer); }); |
| 107 | } |
| 108 | template <typename ObjectType> |
Tony-LunarG | b5fae46 | 2020-03-05 12:43:25 -0700 | [diff] [blame] | 109 | void UtilPreCallRecordDestroyDevice(ObjectType *object_ptr) { |
Tony-LunarG | 1dce239 | 2019-10-23 16:49:29 -0600 | [diff] [blame] | 110 | for (auto &queue_barrier_command_info_kv : object_ptr->queue_barrier_command_infos) { |
Tony-LunarG | b5fae46 | 2020-03-05 12:43:25 -0700 | [diff] [blame] | 111 | UtilQueueBarrierCommandInfo &queue_barrier_command_info = queue_barrier_command_info_kv.second; |
Tony-LunarG | 1dce239 | 2019-10-23 16:49:29 -0600 | [diff] [blame] | 112 | |
| 113 | DispatchFreeCommandBuffers(object_ptr->device, queue_barrier_command_info.barrier_command_pool, 1, |
| 114 | &queue_barrier_command_info.barrier_command_buffer); |
| 115 | queue_barrier_command_info.barrier_command_buffer = VK_NULL_HANDLE; |
| 116 | |
| 117 | DispatchDestroyCommandPool(object_ptr->device, queue_barrier_command_info.barrier_command_pool, NULL); |
| 118 | queue_barrier_command_info.barrier_command_pool = VK_NULL_HANDLE; |
| 119 | } |
| 120 | object_ptr->queue_barrier_command_infos.clear(); |
| 121 | if (object_ptr->debug_desc_layout) { |
| 122 | DispatchDestroyDescriptorSetLayout(object_ptr->device, object_ptr->debug_desc_layout, NULL); |
| 123 | object_ptr->debug_desc_layout = VK_NULL_HANDLE; |
| 124 | } |
| 125 | if (object_ptr->dummy_desc_layout) { |
| 126 | DispatchDestroyDescriptorSetLayout(object_ptr->device, object_ptr->dummy_desc_layout, NULL); |
| 127 | object_ptr->dummy_desc_layout = VK_NULL_HANDLE; |
| 128 | } |
Tony-LunarG | 1dce239 | 2019-10-23 16:49:29 -0600 | [diff] [blame] | 129 | } |
| 130 | |
| 131 | template <typename ObjectType> |
Tony-LunarG | b5fae46 | 2020-03-05 12:43:25 -0700 | [diff] [blame] | 132 | void UtilPreCallRecordCreatePipelineLayout(create_pipeline_layout_api_state *cpl_state, ObjectType *object_ptr, |
| 133 | const VkPipelineLayoutCreateInfo *pCreateInfo) { |
Tony-LunarG | 1dce239 | 2019-10-23 16:49:29 -0600 | [diff] [blame] | 134 | // Modify the pipeline layout by: |
| 135 | // 1. Copying the caller's descriptor set desc_layouts |
| 136 | // 2. Fill in dummy descriptor layouts up to the max binding |
| 137 | // 3. Fill in with the debug descriptor layout at the max binding slot |
| 138 | cpl_state->new_layouts.reserve(object_ptr->adjusted_max_desc_sets); |
| 139 | cpl_state->new_layouts.insert(cpl_state->new_layouts.end(), &pCreateInfo->pSetLayouts[0], |
| 140 | &pCreateInfo->pSetLayouts[pCreateInfo->setLayoutCount]); |
| 141 | for (uint32_t i = pCreateInfo->setLayoutCount; i < object_ptr->adjusted_max_desc_sets - 1; ++i) { |
| 142 | cpl_state->new_layouts.push_back(object_ptr->dummy_desc_layout); |
| 143 | } |
| 144 | cpl_state->new_layouts.push_back(object_ptr->debug_desc_layout); |
| 145 | cpl_state->modified_create_info.pSetLayouts = cpl_state->new_layouts.data(); |
| 146 | cpl_state->modified_create_info.setLayoutCount = object_ptr->adjusted_max_desc_sets; |
| 147 | } |
| 148 | |
| 149 | template <typename CreateInfo> |
| 150 | struct CreatePipelineTraits {}; |
| 151 | template <> |
| 152 | struct CreatePipelineTraits<VkGraphicsPipelineCreateInfo> { |
| 153 | using SafeType = safe_VkGraphicsPipelineCreateInfo; |
| 154 | static const SafeType &GetPipelineCI(const PIPELINE_STATE *pipeline_state) { return pipeline_state->graphicsPipelineCI; } |
| 155 | static uint32_t GetStageCount(const VkGraphicsPipelineCreateInfo &createInfo) { return createInfo.stageCount; } |
| 156 | static VkShaderModule GetShaderModule(const VkGraphicsPipelineCreateInfo &createInfo, uint32_t stage) { |
| 157 | return createInfo.pStages[stage].module; |
| 158 | } |
| 159 | static void SetShaderModule(SafeType *createInfo, VkShaderModule shader_module, uint32_t stage) { |
| 160 | createInfo->pStages[stage].module = shader_module; |
| 161 | } |
| 162 | }; |
| 163 | |
| 164 | template <> |
| 165 | struct CreatePipelineTraits<VkComputePipelineCreateInfo> { |
| 166 | using SafeType = safe_VkComputePipelineCreateInfo; |
| 167 | static const SafeType &GetPipelineCI(const PIPELINE_STATE *pipeline_state) { return pipeline_state->computePipelineCI; } |
| 168 | static uint32_t GetStageCount(const VkComputePipelineCreateInfo &createInfo) { return 1; } |
| 169 | static VkShaderModule GetShaderModule(const VkComputePipelineCreateInfo &createInfo, uint32_t stage) { |
| 170 | return createInfo.stage.module; |
| 171 | } |
| 172 | static void SetShaderModule(SafeType *createInfo, VkShaderModule shader_module, uint32_t stage) { |
| 173 | assert(stage == 0); |
| 174 | createInfo->stage.module = shader_module; |
| 175 | } |
| 176 | }; |
| 177 | |
| 178 | template <> |
| 179 | struct CreatePipelineTraits<VkRayTracingPipelineCreateInfoNV> { |
| 180 | using SafeType = safe_VkRayTracingPipelineCreateInfoCommon; |
| 181 | static const SafeType &GetPipelineCI(const PIPELINE_STATE *pipeline_state) { return pipeline_state->raytracingPipelineCI; } |
| 182 | static uint32_t GetStageCount(const VkRayTracingPipelineCreateInfoNV &createInfo) { return createInfo.stageCount; } |
| 183 | static VkShaderModule GetShaderModule(const VkRayTracingPipelineCreateInfoNV &createInfo, uint32_t stage) { |
| 184 | return createInfo.pStages[stage].module; |
| 185 | } |
| 186 | static void SetShaderModule(SafeType *createInfo, VkShaderModule shader_module, uint32_t stage) { |
| 187 | createInfo->pStages[stage].module = shader_module; |
| 188 | } |
| 189 | }; |
| 190 | |
| 191 | template <> |
| 192 | struct CreatePipelineTraits<VkRayTracingPipelineCreateInfoKHR> { |
| 193 | using SafeType = safe_VkRayTracingPipelineCreateInfoCommon; |
| 194 | static const SafeType &GetPipelineCI(const PIPELINE_STATE *pipeline_state) { return pipeline_state->raytracingPipelineCI; } |
| 195 | static uint32_t GetStageCount(const VkRayTracingPipelineCreateInfoKHR &createInfo) { return createInfo.stageCount; } |
| 196 | static VkShaderModule GetShaderModule(const VkRayTracingPipelineCreateInfoKHR &createInfo, uint32_t stage) { |
| 197 | return createInfo.pStages[stage].module; |
| 198 | } |
| 199 | static void SetShaderModule(SafeType *createInfo, VkShaderModule shader_module, uint32_t stage) { |
| 200 | createInfo->pStages[stage].module = shader_module; |
| 201 | } |
| 202 | }; |
| 203 | |
| 204 | // Examine the pipelines to see if they use the debug descriptor set binding index. |
| 205 | // If any do, create new non-instrumented shader modules and use them to replace the instrumented |
| 206 | // shaders in the pipeline. Return the (possibly) modified create infos to the caller. |
| 207 | template <typename CreateInfo, typename SafeCreateInfo, typename ObjectType> |
Tony-LunarG | b5fae46 | 2020-03-05 12:43:25 -0700 | [diff] [blame] | 208 | void UtilPreCallRecordPipelineCreations(uint32_t count, const CreateInfo *pCreateInfos, const VkAllocationCallbacks *pAllocator, |
| 209 | VkPipeline *pPipelines, std::vector<std::shared_ptr<PIPELINE_STATE>> &pipe_state, |
| 210 | std::vector<SafeCreateInfo> *new_pipeline_create_infos, |
| 211 | const VkPipelineBindPoint bind_point, ObjectType *object_ptr) { |
Tony-LunarG | 1dce239 | 2019-10-23 16:49:29 -0600 | [diff] [blame] | 212 | using Accessor = CreatePipelineTraits<CreateInfo>; |
| 213 | if (bind_point != VK_PIPELINE_BIND_POINT_GRAPHICS && bind_point != VK_PIPELINE_BIND_POINT_COMPUTE && |
| 214 | bind_point != VK_PIPELINE_BIND_POINT_RAY_TRACING_NV) { |
| 215 | return; |
| 216 | } |
| 217 | |
| 218 | // Walk through all the pipelines, make a copy of each and flag each pipeline that contains a shader that uses the debug |
| 219 | // descriptor set index. |
| 220 | for (uint32_t pipeline = 0; pipeline < count; ++pipeline) { |
| 221 | uint32_t stageCount = Accessor::GetStageCount(pCreateInfos[pipeline]); |
| 222 | new_pipeline_create_infos->push_back(Accessor::GetPipelineCI(pipe_state[pipeline].get())); |
| 223 | |
| 224 | bool replace_shaders = false; |
| 225 | if (pipe_state[pipeline]->active_slots.find(object_ptr->desc_set_bind_index) != pipe_state[pipeline]->active_slots.end()) { |
| 226 | replace_shaders = true; |
| 227 | } |
| 228 | // If the app requests all available sets, the pipeline layout was not modified at pipeline layout creation and the already |
| 229 | // instrumented shaders need to be replaced with uninstrumented shaders |
| 230 | if (pipe_state[pipeline]->pipeline_layout->set_layouts.size() >= object_ptr->adjusted_max_desc_sets) { |
| 231 | replace_shaders = true; |
| 232 | } |
| 233 | |
| 234 | if (replace_shaders) { |
| 235 | for (uint32_t stage = 0; stage < stageCount; ++stage) { |
| 236 | const SHADER_MODULE_STATE *shader = |
| 237 | object_ptr->GetShaderModuleState(Accessor::GetShaderModule(pCreateInfos[pipeline], stage)); |
| 238 | |
| 239 | VkShaderModuleCreateInfo create_info = {}; |
| 240 | VkShaderModule shader_module; |
| 241 | create_info.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO; |
| 242 | create_info.pCode = shader->words.data(); |
| 243 | create_info.codeSize = shader->words.size() * sizeof(uint32_t); |
| 244 | VkResult result = DispatchCreateShaderModule(object_ptr->device, &create_info, pAllocator, &shader_module); |
| 245 | if (result == VK_SUCCESS) { |
| 246 | Accessor::SetShaderModule(&(*new_pipeline_create_infos)[pipeline], shader_module, stage); |
| 247 | } else { |
| 248 | object_ptr->ReportSetupProblem(object_ptr->device, |
| 249 | "Unable to replace instrumented shader with non-instrumented one. " |
| 250 | "Device could become unstable."); |
| 251 | } |
| 252 | } |
| 253 | } |
| 254 | } |
| 255 | } |
| 256 | // For every pipeline: |
| 257 | // - For every shader in a pipeline: |
| 258 | // - If the shader had to be replaced in PreCallRecord (because the pipeline is using the debug desc set index): |
| 259 | // - Destroy it since it has been bound into the pipeline by now. This is our only chance to delete it. |
| 260 | // - Track the shader in the shader_map |
| 261 | // - Save the shader binary if it contains debug code |
| 262 | template <typename CreateInfo, typename ObjectType> |
Tony-LunarG | b5fae46 | 2020-03-05 12:43:25 -0700 | [diff] [blame] | 263 | void UtilPostCallRecordPipelineCreations(const uint32_t count, const CreateInfo *pCreateInfos, |
| 264 | const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines, |
| 265 | const VkPipelineBindPoint bind_point, ObjectType *object_ptr) { |
Tony-LunarG | 1dce239 | 2019-10-23 16:49:29 -0600 | [diff] [blame] | 266 | using Accessor = CreatePipelineTraits<CreateInfo>; |
| 267 | if (bind_point != VK_PIPELINE_BIND_POINT_GRAPHICS && bind_point != VK_PIPELINE_BIND_POINT_COMPUTE && |
| 268 | bind_point != VK_PIPELINE_BIND_POINT_RAY_TRACING_NV) { |
| 269 | return; |
| 270 | } |
| 271 | for (uint32_t pipeline = 0; pipeline < count; ++pipeline) { |
| 272 | auto pipeline_state = object_ptr->ValidationStateTracker::GetPipelineState(pPipelines[pipeline]); |
| 273 | if (nullptr == pipeline_state) continue; |
| 274 | |
| 275 | uint32_t stageCount = 0; |
| 276 | if (bind_point == VK_PIPELINE_BIND_POINT_GRAPHICS) { |
| 277 | stageCount = pipeline_state->graphicsPipelineCI.stageCount; |
| 278 | } else if (bind_point == VK_PIPELINE_BIND_POINT_COMPUTE) { |
| 279 | stageCount = 1; |
| 280 | } else if (bind_point == VK_PIPELINE_BIND_POINT_RAY_TRACING_NV) { |
| 281 | stageCount = pipeline_state->raytracingPipelineCI.stageCount; |
| 282 | } else { |
| 283 | assert(false); |
| 284 | } |
| 285 | |
| 286 | for (uint32_t stage = 0; stage < stageCount; ++stage) { |
| 287 | if (pipeline_state->active_slots.find(object_ptr->desc_set_bind_index) != pipeline_state->active_slots.end()) { |
| 288 | DispatchDestroyShaderModule(object_ptr->device, Accessor::GetShaderModule(pCreateInfos[pipeline], stage), |
| 289 | pAllocator); |
| 290 | } |
| 291 | |
| 292 | const SHADER_MODULE_STATE *shader_state = nullptr; |
| 293 | if (bind_point == VK_PIPELINE_BIND_POINT_GRAPHICS) { |
| 294 | shader_state = object_ptr->GetShaderModuleState(pipeline_state->graphicsPipelineCI.pStages[stage].module); |
| 295 | } else if (bind_point == VK_PIPELINE_BIND_POINT_COMPUTE) { |
| 296 | assert(stage == 0); |
| 297 | shader_state = object_ptr->GetShaderModuleState(pipeline_state->computePipelineCI.stage.module); |
| 298 | } else if (bind_point == VK_PIPELINE_BIND_POINT_RAY_TRACING_NV) { |
| 299 | shader_state = object_ptr->GetShaderModuleState(pipeline_state->raytracingPipelineCI.pStages[stage].module); |
| 300 | } else { |
| 301 | assert(false); |
| 302 | } |
| 303 | |
| 304 | std::vector<unsigned int> code; |
| 305 | // Save the shader binary |
| 306 | // The core_validation ShaderModule tracker saves the binary too, but discards it when the ShaderModule |
| 307 | // is destroyed. Applications may destroy ShaderModules after they are placed in a pipeline and before |
| 308 | // the pipeline is used, so we have to keep another copy. |
| 309 | if (shader_state && shader_state->has_valid_spirv) code = shader_state->words; |
| 310 | |
| 311 | object_ptr->shader_map[shader_state->gpu_validation_shader_id].pipeline = pipeline_state->pipeline; |
| 312 | // Be careful to use the originally bound (instrumented) shader here, even if PreCallRecord had to back it |
| 313 | // out with a non-instrumented shader. The non-instrumented shader (found in pCreateInfo) was destroyed above. |
| 314 | VkShaderModule shader_module = VK_NULL_HANDLE; |
| 315 | if (bind_point == VK_PIPELINE_BIND_POINT_GRAPHICS) { |
| 316 | shader_module = pipeline_state->graphicsPipelineCI.pStages[stage].module; |
| 317 | } else if (bind_point == VK_PIPELINE_BIND_POINT_COMPUTE) { |
| 318 | assert(stage == 0); |
| 319 | shader_module = pipeline_state->computePipelineCI.stage.module; |
| 320 | } else if (bind_point == VK_PIPELINE_BIND_POINT_RAY_TRACING_NV) { |
| 321 | shader_module = pipeline_state->raytracingPipelineCI.pStages[stage].module; |
| 322 | } else { |
| 323 | assert(false); |
| 324 | } |
| 325 | object_ptr->shader_map[shader_state->gpu_validation_shader_id].shader_module = shader_module; |
| 326 | object_ptr->shader_map[shader_state->gpu_validation_shader_id].pgm = std::move(code); |
| 327 | } |
| 328 | } |
| 329 | } |
Tony-LunarG | c876c6e | 2020-09-09 15:19:43 -0600 | [diff] [blame] | 330 | template <typename CreateInfos, typename SafeCreateInfos> |
| 331 | void UtilCopyCreatePipelineFeedbackData(const uint32_t count, CreateInfos *pCreateInfos, SafeCreateInfos *pSafeCreateInfos) { |
| 332 | for (uint32_t i = 0; i < count; i++) { |
Mark Lobodzinski | 1f887d3 | 2020-12-30 15:31:33 -0700 | [diff] [blame] | 333 | auto src_feedback_struct = LvlFindInChain<VkPipelineCreationFeedbackCreateInfoEXT>(pSafeCreateInfos[i].pNext); |
Tony-LunarG | c876c6e | 2020-09-09 15:19:43 -0600 | [diff] [blame] | 334 | if (!src_feedback_struct) return; |
| 335 | auto dst_feedback_struct = const_cast<VkPipelineCreationFeedbackCreateInfoEXT *>( |
Mark Lobodzinski | 1f887d3 | 2020-12-30 15:31:33 -0700 | [diff] [blame] | 336 | LvlFindInChain<VkPipelineCreationFeedbackCreateInfoEXT>(pCreateInfos[i].pNext)); |
Tony-LunarG | c876c6e | 2020-09-09 15:19:43 -0600 | [diff] [blame] | 337 | *dst_feedback_struct->pPipelineCreationFeedback = *src_feedback_struct->pPipelineCreationFeedback; |
| 338 | for (uint32_t j = 0; j < src_feedback_struct->pipelineStageCreationFeedbackCount; j++) { |
| 339 | dst_feedback_struct->pPipelineStageCreationFeedbacks[j] = src_feedback_struct->pPipelineStageCreationFeedbacks[j]; |
| 340 | } |
| 341 | } |
| 342 | } |
| 343 | |
Tony-LunarG | 1dce239 | 2019-10-23 16:49:29 -0600 | [diff] [blame] | 344 | template <typename ObjectType> |
| 345 | // For the given command buffer, map its debug data buffers and read their contents for analysis. |
Tony-LunarG | b5fae46 | 2020-03-05 12:43:25 -0700 | [diff] [blame] | 346 | void UtilProcessInstrumentationBuffer(VkQueue queue, CMD_BUFFER_STATE *cb_node, ObjectType *object_ptr) { |
Tony-LunarG | 1dce239 | 2019-10-23 16:49:29 -0600 | [diff] [blame] | 347 | if (cb_node && (cb_node->hasDrawCmd || cb_node->hasTraceRaysCmd || cb_node->hasDispatchCmd)) { |
| 348 | auto gpu_buffer_list = object_ptr->GetBufferInfo(cb_node->commandBuffer); |
| 349 | uint32_t draw_index = 0; |
| 350 | uint32_t compute_index = 0; |
| 351 | uint32_t ray_trace_index = 0; |
| 352 | |
| 353 | for (auto &buffer_info : gpu_buffer_list) { |
| 354 | char *pData; |
| 355 | |
| 356 | uint32_t operation_index = 0; |
| 357 | if (buffer_info.pipeline_bind_point == VK_PIPELINE_BIND_POINT_GRAPHICS) { |
| 358 | operation_index = draw_index; |
| 359 | } else if (buffer_info.pipeline_bind_point == VK_PIPELINE_BIND_POINT_COMPUTE) { |
| 360 | operation_index = compute_index; |
| 361 | } else if (buffer_info.pipeline_bind_point == VK_PIPELINE_BIND_POINT_RAY_TRACING_NV) { |
| 362 | operation_index = ray_trace_index; |
| 363 | } else { |
| 364 | assert(false); |
| 365 | } |
| 366 | |
| 367 | VkResult result = vmaMapMemory(object_ptr->vmaAllocator, buffer_info.output_mem_block.allocation, (void **)&pData); |
| 368 | if (result == VK_SUCCESS) { |
Tony-LunarG | 7de10e8 | 2020-11-24 11:31:55 -0700 | [diff] [blame] | 369 | object_ptr->AnalyzeAndGenerateMessages(cb_node->commandBuffer, queue, buffer_info, |
Tony-LunarG | 1dce239 | 2019-10-23 16:49:29 -0600 | [diff] [blame] | 370 | operation_index, (uint32_t *)pData); |
| 371 | vmaUnmapMemory(object_ptr->vmaAllocator, buffer_info.output_mem_block.allocation); |
| 372 | } |
| 373 | |
| 374 | if (buffer_info.pipeline_bind_point == VK_PIPELINE_BIND_POINT_GRAPHICS) { |
| 375 | draw_index++; |
| 376 | } else if (buffer_info.pipeline_bind_point == VK_PIPELINE_BIND_POINT_COMPUTE) { |
| 377 | compute_index++; |
| 378 | } else if (buffer_info.pipeline_bind_point == VK_PIPELINE_BIND_POINT_RAY_TRACING_NV) { |
| 379 | ray_trace_index++; |
| 380 | } else { |
| 381 | assert(false); |
| 382 | } |
| 383 | } |
| 384 | } |
| 385 | } |
| 386 | template <typename ObjectType> |
| 387 | // Submit a memory barrier on graphics queues. |
| 388 | // Lazy-create and record the needed command buffer. |
Tony-LunarG | b5fae46 | 2020-03-05 12:43:25 -0700 | [diff] [blame] | 389 | void UtilSubmitBarrier(VkQueue queue, ObjectType *object_ptr) { |
| 390 | auto queue_barrier_command_info_it = object_ptr->queue_barrier_command_infos.emplace(queue, UtilQueueBarrierCommandInfo{}); |
Tony-LunarG | 1dce239 | 2019-10-23 16:49:29 -0600 | [diff] [blame] | 391 | if (queue_barrier_command_info_it.second) { |
Tony-LunarG | b5fae46 | 2020-03-05 12:43:25 -0700 | [diff] [blame] | 392 | UtilQueueBarrierCommandInfo &queue_barrier_command_info = queue_barrier_command_info_it.first->second; |
Tony-LunarG | 1dce239 | 2019-10-23 16:49:29 -0600 | [diff] [blame] | 393 | |
| 394 | uint32_t queue_family_index = 0; |
| 395 | |
| 396 | auto queue_state_it = object_ptr->queueMap.find(queue); |
| 397 | if (queue_state_it != object_ptr->queueMap.end()) { |
| 398 | queue_family_index = queue_state_it->second.queueFamilyIndex; |
| 399 | } |
| 400 | |
| 401 | VkResult result = VK_SUCCESS; |
| 402 | |
| 403 | VkCommandPoolCreateInfo pool_create_info = {}; |
| 404 | pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO; |
| 405 | pool_create_info.queueFamilyIndex = queue_family_index; |
| 406 | result = DispatchCreateCommandPool(object_ptr->device, &pool_create_info, nullptr, |
| 407 | &queue_barrier_command_info.barrier_command_pool); |
| 408 | if (result != VK_SUCCESS) { |
| 409 | object_ptr->ReportSetupProblem(object_ptr->device, "Unable to create command pool for barrier CB."); |
| 410 | queue_barrier_command_info.barrier_command_pool = VK_NULL_HANDLE; |
| 411 | return; |
| 412 | } |
| 413 | |
| 414 | VkCommandBufferAllocateInfo buffer_alloc_info = {}; |
| 415 | buffer_alloc_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO; |
| 416 | buffer_alloc_info.commandPool = queue_barrier_command_info.barrier_command_pool; |
| 417 | buffer_alloc_info.commandBufferCount = 1; |
| 418 | buffer_alloc_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY; |
| 419 | result = DispatchAllocateCommandBuffers(object_ptr->device, &buffer_alloc_info, |
| 420 | &queue_barrier_command_info.barrier_command_buffer); |
| 421 | if (result != VK_SUCCESS) { |
| 422 | object_ptr->ReportSetupProblem(object_ptr->device, "Unable to create barrier command buffer."); |
| 423 | DispatchDestroyCommandPool(object_ptr->device, queue_barrier_command_info.barrier_command_pool, nullptr); |
| 424 | queue_barrier_command_info.barrier_command_pool = VK_NULL_HANDLE; |
| 425 | queue_barrier_command_info.barrier_command_buffer = VK_NULL_HANDLE; |
| 426 | return; |
| 427 | } |
| 428 | |
| 429 | // Hook up command buffer dispatch |
| 430 | object_ptr->vkSetDeviceLoaderData(object_ptr->device, queue_barrier_command_info.barrier_command_buffer); |
| 431 | |
| 432 | // Record a global memory barrier to force availability of device memory operations to the host domain. |
| 433 | VkCommandBufferBeginInfo command_buffer_begin_info = {}; |
| 434 | command_buffer_begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO; |
| 435 | result = DispatchBeginCommandBuffer(queue_barrier_command_info.barrier_command_buffer, &command_buffer_begin_info); |
| 436 | if (result == VK_SUCCESS) { |
| 437 | VkMemoryBarrier memory_barrier = {}; |
| 438 | memory_barrier.sType = VK_STRUCTURE_TYPE_MEMORY_BARRIER; |
| 439 | memory_barrier.srcAccessMask = VK_ACCESS_MEMORY_WRITE_BIT; |
| 440 | memory_barrier.dstAccessMask = VK_ACCESS_HOST_READ_BIT; |
| 441 | |
| 442 | DispatchCmdPipelineBarrier(queue_barrier_command_info.barrier_command_buffer, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, |
| 443 | VK_PIPELINE_STAGE_HOST_BIT, 0, 1, &memory_barrier, 0, nullptr, 0, nullptr); |
| 444 | DispatchEndCommandBuffer(queue_barrier_command_info.barrier_command_buffer); |
| 445 | } |
| 446 | } |
| 447 | |
Tony-LunarG | b5fae46 | 2020-03-05 12:43:25 -0700 | [diff] [blame] | 448 | UtilQueueBarrierCommandInfo &queue_barrier_command_info = queue_barrier_command_info_it.first->second; |
Tony-LunarG | 1dce239 | 2019-10-23 16:49:29 -0600 | [diff] [blame] | 449 | if (queue_barrier_command_info.barrier_command_buffer != VK_NULL_HANDLE) { |
| 450 | VkSubmitInfo submit_info = {}; |
| 451 | submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO; |
| 452 | submit_info.commandBufferCount = 1; |
| 453 | submit_info.pCommandBuffers = &queue_barrier_command_info.barrier_command_buffer; |
| 454 | DispatchQueueSubmit(queue, 1, &submit_info, VK_NULL_HANDLE); |
| 455 | } |
| 456 | } |
Tony-LunarG | b5fae46 | 2020-03-05 12:43:25 -0700 | [diff] [blame] | 457 | void UtilGenerateStageMessage(const uint32_t *debug_record, std::string &msg); |
| 458 | void UtilGenerateCommonMessage(const debug_report_data *report_data, const VkCommandBuffer commandBuffer, |
| 459 | const uint32_t *debug_record, const VkShaderModule shader_module_handle, |
| 460 | const VkPipeline pipeline_handle, const VkPipelineBindPoint pipeline_bind_point, |
| 461 | const uint32_t operation_index, std::string &msg); |
| 462 | void UtilGenerateSourceMessages(const std::vector<unsigned int> &pgm, const uint32_t *debug_record, bool from_printf, |
| 463 | std::string &filename_msg, std::string &source_msg); |