Tony-LunarG | b5fae46 | 2020-03-05 12:43:25 -0700 | [diff] [blame^] | 1 | /* Copyright (c) 2020 The Khronos Group Inc. |
| 2 | * Copyright (c) 2020 Valve Corporation |
| 3 | * Copyright (c) 2020 LunarG, Inc. |
Tony-LunarG | 1dce239 | 2019-10-23 16:49:29 -0600 | [diff] [blame] | 4 | * |
| 5 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 6 | * you may not use this file except in compliance with the License. |
| 7 | * You may obtain a copy of the License at |
| 8 | * |
| 9 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 10 | * |
| 11 | * Unless required by applicable law or agreed to in writing, software |
| 12 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 14 | * See the License for the specific language governing permissions and |
| 15 | * limitations under the License. |
| 16 | * |
| 17 | * Author: Tony Barbour <tony@lunarg.com> |
| 18 | */ |
| 19 | #pragma once |
| 20 | #include "chassis.h" |
| 21 | #include "shader_validation.h" |
Tony-LunarG | b5fae46 | 2020-03-05 12:43:25 -0700 | [diff] [blame^] | 22 | class UtilDescriptorSetManager { |
Tony-LunarG | 1dce239 | 2019-10-23 16:49:29 -0600 | [diff] [blame] | 23 | public: |
Tony-LunarG | b5fae46 | 2020-03-05 12:43:25 -0700 | [diff] [blame^] | 24 | UtilDescriptorSetManager(VkDevice device, uint32_t numBindingsInSet); |
| 25 | ~UtilDescriptorSetManager(); |
Tony-LunarG | 1dce239 | 2019-10-23 16:49:29 -0600 | [diff] [blame] | 26 | |
| 27 | VkResult GetDescriptorSet(VkDescriptorPool *desc_pool, VkDescriptorSetLayout ds_layout, VkDescriptorSet *desc_sets); |
| 28 | VkResult GetDescriptorSets(uint32_t count, VkDescriptorPool *pool, VkDescriptorSetLayout ds_layout, |
| 29 | std::vector<VkDescriptorSet> *desc_sets); |
| 30 | void PutBackDescriptorSet(VkDescriptorPool desc_pool, VkDescriptorSet desc_set); |
| 31 | |
| 32 | private: |
| 33 | static const uint32_t kItemsPerChunk = 512; |
| 34 | struct PoolTracker { |
| 35 | uint32_t size; |
| 36 | uint32_t used; |
| 37 | }; |
| 38 | VkDevice device; |
| 39 | uint32_t numBindingsInSet; |
| 40 | std::unordered_map<VkDescriptorPool, struct PoolTracker> desc_pool_map_; |
| 41 | }; |
Tony-LunarG | b5fae46 | 2020-03-05 12:43:25 -0700 | [diff] [blame^] | 42 | struct UtilQueueBarrierCommandInfo { |
Tony-LunarG | 1dce239 | 2019-10-23 16:49:29 -0600 | [diff] [blame] | 43 | VkCommandPool barrier_command_pool = VK_NULL_HANDLE; |
| 44 | VkCommandBuffer barrier_command_buffer = VK_NULL_HANDLE; |
| 45 | }; |
Tony-LunarG | b5fae46 | 2020-03-05 12:43:25 -0700 | [diff] [blame^] | 46 | VkResult UtilInitializeVma(VkPhysicalDevice physical_device, VkDevice device, VmaAllocator *pAllocator); |
| 47 | void UtilPreCallRecordCreateDevice(VkPhysicalDevice gpu, safe_VkDeviceCreateInfo *modified_create_info, |
| 48 | VkPhysicalDeviceFeatures supported_features, VkPhysicalDeviceFeatures desired_features); |
Tony-LunarG | 1dce239 | 2019-10-23 16:49:29 -0600 | [diff] [blame] | 49 | template <typename ObjectType> |
Tony-LunarG | b5fae46 | 2020-03-05 12:43:25 -0700 | [diff] [blame^] | 50 | void UtilPostCallRecordCreateDevice(const VkDeviceCreateInfo *pCreateInfo, std::vector<VkDescriptorSetLayoutBinding> bindings, |
| 51 | ObjectType *object_ptr, VkPhysicalDeviceProperties physical_device_properties) { |
Tony-LunarG | 1dce239 | 2019-10-23 16:49:29 -0600 | [diff] [blame] | 52 | // If api version 1.1 or later, SetDeviceLoaderData will be in the loader |
| 53 | auto chain_info = get_chain_info(pCreateInfo, VK_LOADER_DATA_CALLBACK); |
| 54 | assert(chain_info->u.pfnSetDeviceLoaderData); |
| 55 | object_ptr->vkSetDeviceLoaderData = chain_info->u.pfnSetDeviceLoaderData; |
| 56 | |
| 57 | // Some devices have extremely high limits here, so set a reasonable max because we have to pad |
| 58 | // the pipeline layout with dummy descriptor set layouts. |
| 59 | object_ptr->adjusted_max_desc_sets = physical_device_properties.limits.maxBoundDescriptorSets; |
| 60 | object_ptr->adjusted_max_desc_sets = std::min(33U, object_ptr->adjusted_max_desc_sets); |
| 61 | |
| 62 | // We can't do anything if there is only one. |
| 63 | // Device probably not a legit Vulkan device, since there should be at least 4. Protect ourselves. |
| 64 | if (object_ptr->adjusted_max_desc_sets == 1) { |
| 65 | object_ptr->ReportSetupProblem(object_ptr->device, "Device can bind only a single descriptor set."); |
| 66 | object_ptr->aborted = true; |
| 67 | return; |
| 68 | } |
| 69 | object_ptr->desc_set_bind_index = object_ptr->adjusted_max_desc_sets - 1; |
| 70 | |
Tony-LunarG | b5fae46 | 2020-03-05 12:43:25 -0700 | [diff] [blame^] | 71 | VkResult result1 = UtilInitializeVma(object_ptr->physicalDevice, object_ptr->device, &object_ptr->vmaAllocator); |
Tony-LunarG | 1dce239 | 2019-10-23 16:49:29 -0600 | [diff] [blame] | 72 | assert(result1 == VK_SUCCESS); |
Tony-LunarG | b5fae46 | 2020-03-05 12:43:25 -0700 | [diff] [blame^] | 73 | std::unique_ptr<UtilDescriptorSetManager> desc_set_manager( |
| 74 | new UtilDescriptorSetManager(object_ptr->device, static_cast<uint32_t>(bindings.size()))); |
Tony-LunarG | 1dce239 | 2019-10-23 16:49:29 -0600 | [diff] [blame] | 75 | |
| 76 | const VkDescriptorSetLayoutCreateInfo debug_desc_layout_info = {VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO, NULL, 0, |
| 77 | static_cast<uint32_t>(bindings.size()), bindings.data()}; |
| 78 | |
| 79 | const VkDescriptorSetLayoutCreateInfo dummy_desc_layout_info = {VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO, NULL, 0, 0, |
| 80 | NULL}; |
| 81 | |
| 82 | result1 = DispatchCreateDescriptorSetLayout(object_ptr->device, &debug_desc_layout_info, NULL, &object_ptr->debug_desc_layout); |
| 83 | |
| 84 | // This is a layout used to "pad" a pipeline layout to fill in any gaps to the selected bind index. |
| 85 | VkResult result2 = |
| 86 | DispatchCreateDescriptorSetLayout(object_ptr->device, &dummy_desc_layout_info, NULL, &object_ptr->dummy_desc_layout); |
| 87 | |
| 88 | assert((result1 == VK_SUCCESS) && (result2 == VK_SUCCESS)); |
| 89 | if ((result1 != VK_SUCCESS) || (result2 != VK_SUCCESS)) { |
| 90 | object_ptr->ReportSetupProblem(object_ptr->device, "Unable to create descriptor set layout."); |
| 91 | if (result1 == VK_SUCCESS) { |
| 92 | DispatchDestroyDescriptorSetLayout(object_ptr->device, object_ptr->debug_desc_layout, NULL); |
| 93 | } |
| 94 | if (result2 == VK_SUCCESS) { |
| 95 | DispatchDestroyDescriptorSetLayout(object_ptr->device, object_ptr->dummy_desc_layout, NULL); |
| 96 | } |
| 97 | object_ptr->debug_desc_layout = VK_NULL_HANDLE; |
| 98 | object_ptr->dummy_desc_layout = VK_NULL_HANDLE; |
| 99 | object_ptr->aborted = true; |
| 100 | return; |
| 101 | } |
| 102 | object_ptr->desc_set_manager = std::move(desc_set_manager); |
| 103 | |
| 104 | // Register callback to be called at any ResetCommandBuffer time |
| 105 | object_ptr->SetCommandBufferResetCallback( |
| 106 | [object_ptr](VkCommandBuffer command_buffer) -> void { object_ptr->ResetCommandBuffer(command_buffer); }); |
| 107 | } |
| 108 | template <typename ObjectType> |
Tony-LunarG | b5fae46 | 2020-03-05 12:43:25 -0700 | [diff] [blame^] | 109 | void UtilPreCallRecordDestroyDevice(ObjectType *object_ptr) { |
Tony-LunarG | 1dce239 | 2019-10-23 16:49:29 -0600 | [diff] [blame] | 110 | for (auto &queue_barrier_command_info_kv : object_ptr->queue_barrier_command_infos) { |
Tony-LunarG | b5fae46 | 2020-03-05 12:43:25 -0700 | [diff] [blame^] | 111 | UtilQueueBarrierCommandInfo &queue_barrier_command_info = queue_barrier_command_info_kv.second; |
Tony-LunarG | 1dce239 | 2019-10-23 16:49:29 -0600 | [diff] [blame] | 112 | |
| 113 | DispatchFreeCommandBuffers(object_ptr->device, queue_barrier_command_info.barrier_command_pool, 1, |
| 114 | &queue_barrier_command_info.barrier_command_buffer); |
| 115 | queue_barrier_command_info.barrier_command_buffer = VK_NULL_HANDLE; |
| 116 | |
| 117 | DispatchDestroyCommandPool(object_ptr->device, queue_barrier_command_info.barrier_command_pool, NULL); |
| 118 | queue_barrier_command_info.barrier_command_pool = VK_NULL_HANDLE; |
| 119 | } |
| 120 | object_ptr->queue_barrier_command_infos.clear(); |
| 121 | if (object_ptr->debug_desc_layout) { |
| 122 | DispatchDestroyDescriptorSetLayout(object_ptr->device, object_ptr->debug_desc_layout, NULL); |
| 123 | object_ptr->debug_desc_layout = VK_NULL_HANDLE; |
| 124 | } |
| 125 | if (object_ptr->dummy_desc_layout) { |
| 126 | DispatchDestroyDescriptorSetLayout(object_ptr->device, object_ptr->dummy_desc_layout, NULL); |
| 127 | object_ptr->dummy_desc_layout = VK_NULL_HANDLE; |
| 128 | } |
| 129 | object_ptr->desc_set_manager.reset(); |
| 130 | |
| 131 | if (object_ptr->vmaAllocator) { |
| 132 | vmaDestroyAllocator(object_ptr->vmaAllocator); |
| 133 | } |
| 134 | } |
| 135 | |
| 136 | template <typename ObjectType> |
Tony-LunarG | b5fae46 | 2020-03-05 12:43:25 -0700 | [diff] [blame^] | 137 | void UtilPreCallRecordCreatePipelineLayout(create_pipeline_layout_api_state *cpl_state, ObjectType *object_ptr, |
| 138 | const VkPipelineLayoutCreateInfo *pCreateInfo) { |
Tony-LunarG | 1dce239 | 2019-10-23 16:49:29 -0600 | [diff] [blame] | 139 | // Modify the pipeline layout by: |
| 140 | // 1. Copying the caller's descriptor set desc_layouts |
| 141 | // 2. Fill in dummy descriptor layouts up to the max binding |
| 142 | // 3. Fill in with the debug descriptor layout at the max binding slot |
| 143 | cpl_state->new_layouts.reserve(object_ptr->adjusted_max_desc_sets); |
| 144 | cpl_state->new_layouts.insert(cpl_state->new_layouts.end(), &pCreateInfo->pSetLayouts[0], |
| 145 | &pCreateInfo->pSetLayouts[pCreateInfo->setLayoutCount]); |
| 146 | for (uint32_t i = pCreateInfo->setLayoutCount; i < object_ptr->adjusted_max_desc_sets - 1; ++i) { |
| 147 | cpl_state->new_layouts.push_back(object_ptr->dummy_desc_layout); |
| 148 | } |
| 149 | cpl_state->new_layouts.push_back(object_ptr->debug_desc_layout); |
| 150 | cpl_state->modified_create_info.pSetLayouts = cpl_state->new_layouts.data(); |
| 151 | cpl_state->modified_create_info.setLayoutCount = object_ptr->adjusted_max_desc_sets; |
| 152 | } |
| 153 | |
| 154 | template <typename CreateInfo> |
| 155 | struct CreatePipelineTraits {}; |
| 156 | template <> |
| 157 | struct CreatePipelineTraits<VkGraphicsPipelineCreateInfo> { |
| 158 | using SafeType = safe_VkGraphicsPipelineCreateInfo; |
| 159 | static const SafeType &GetPipelineCI(const PIPELINE_STATE *pipeline_state) { return pipeline_state->graphicsPipelineCI; } |
| 160 | static uint32_t GetStageCount(const VkGraphicsPipelineCreateInfo &createInfo) { return createInfo.stageCount; } |
| 161 | static VkShaderModule GetShaderModule(const VkGraphicsPipelineCreateInfo &createInfo, uint32_t stage) { |
| 162 | return createInfo.pStages[stage].module; |
| 163 | } |
| 164 | static void SetShaderModule(SafeType *createInfo, VkShaderModule shader_module, uint32_t stage) { |
| 165 | createInfo->pStages[stage].module = shader_module; |
| 166 | } |
| 167 | }; |
| 168 | |
| 169 | template <> |
| 170 | struct CreatePipelineTraits<VkComputePipelineCreateInfo> { |
| 171 | using SafeType = safe_VkComputePipelineCreateInfo; |
| 172 | static const SafeType &GetPipelineCI(const PIPELINE_STATE *pipeline_state) { return pipeline_state->computePipelineCI; } |
| 173 | static uint32_t GetStageCount(const VkComputePipelineCreateInfo &createInfo) { return 1; } |
| 174 | static VkShaderModule GetShaderModule(const VkComputePipelineCreateInfo &createInfo, uint32_t stage) { |
| 175 | return createInfo.stage.module; |
| 176 | } |
| 177 | static void SetShaderModule(SafeType *createInfo, VkShaderModule shader_module, uint32_t stage) { |
| 178 | assert(stage == 0); |
| 179 | createInfo->stage.module = shader_module; |
| 180 | } |
| 181 | }; |
| 182 | |
| 183 | template <> |
| 184 | struct CreatePipelineTraits<VkRayTracingPipelineCreateInfoNV> { |
| 185 | using SafeType = safe_VkRayTracingPipelineCreateInfoCommon; |
| 186 | static const SafeType &GetPipelineCI(const PIPELINE_STATE *pipeline_state) { return pipeline_state->raytracingPipelineCI; } |
| 187 | static uint32_t GetStageCount(const VkRayTracingPipelineCreateInfoNV &createInfo) { return createInfo.stageCount; } |
| 188 | static VkShaderModule GetShaderModule(const VkRayTracingPipelineCreateInfoNV &createInfo, uint32_t stage) { |
| 189 | return createInfo.pStages[stage].module; |
| 190 | } |
| 191 | static void SetShaderModule(SafeType *createInfo, VkShaderModule shader_module, uint32_t stage) { |
| 192 | createInfo->pStages[stage].module = shader_module; |
| 193 | } |
| 194 | }; |
| 195 | |
| 196 | template <> |
| 197 | struct CreatePipelineTraits<VkRayTracingPipelineCreateInfoKHR> { |
| 198 | using SafeType = safe_VkRayTracingPipelineCreateInfoCommon; |
| 199 | static const SafeType &GetPipelineCI(const PIPELINE_STATE *pipeline_state) { return pipeline_state->raytracingPipelineCI; } |
| 200 | static uint32_t GetStageCount(const VkRayTracingPipelineCreateInfoKHR &createInfo) { return createInfo.stageCount; } |
| 201 | static VkShaderModule GetShaderModule(const VkRayTracingPipelineCreateInfoKHR &createInfo, uint32_t stage) { |
| 202 | return createInfo.pStages[stage].module; |
| 203 | } |
| 204 | static void SetShaderModule(SafeType *createInfo, VkShaderModule shader_module, uint32_t stage) { |
| 205 | createInfo->pStages[stage].module = shader_module; |
| 206 | } |
| 207 | }; |
| 208 | |
| 209 | // Examine the pipelines to see if they use the debug descriptor set binding index. |
| 210 | // If any do, create new non-instrumented shader modules and use them to replace the instrumented |
| 211 | // shaders in the pipeline. Return the (possibly) modified create infos to the caller. |
| 212 | template <typename CreateInfo, typename SafeCreateInfo, typename ObjectType> |
Tony-LunarG | b5fae46 | 2020-03-05 12:43:25 -0700 | [diff] [blame^] | 213 | void UtilPreCallRecordPipelineCreations(uint32_t count, const CreateInfo *pCreateInfos, const VkAllocationCallbacks *pAllocator, |
| 214 | VkPipeline *pPipelines, std::vector<std::shared_ptr<PIPELINE_STATE>> &pipe_state, |
| 215 | std::vector<SafeCreateInfo> *new_pipeline_create_infos, |
| 216 | const VkPipelineBindPoint bind_point, ObjectType *object_ptr) { |
Tony-LunarG | 1dce239 | 2019-10-23 16:49:29 -0600 | [diff] [blame] | 217 | using Accessor = CreatePipelineTraits<CreateInfo>; |
| 218 | if (bind_point != VK_PIPELINE_BIND_POINT_GRAPHICS && bind_point != VK_PIPELINE_BIND_POINT_COMPUTE && |
| 219 | bind_point != VK_PIPELINE_BIND_POINT_RAY_TRACING_NV) { |
| 220 | return; |
| 221 | } |
| 222 | |
| 223 | // Walk through all the pipelines, make a copy of each and flag each pipeline that contains a shader that uses the debug |
| 224 | // descriptor set index. |
| 225 | for (uint32_t pipeline = 0; pipeline < count; ++pipeline) { |
| 226 | uint32_t stageCount = Accessor::GetStageCount(pCreateInfos[pipeline]); |
| 227 | new_pipeline_create_infos->push_back(Accessor::GetPipelineCI(pipe_state[pipeline].get())); |
| 228 | |
| 229 | bool replace_shaders = false; |
| 230 | if (pipe_state[pipeline]->active_slots.find(object_ptr->desc_set_bind_index) != pipe_state[pipeline]->active_slots.end()) { |
| 231 | replace_shaders = true; |
| 232 | } |
| 233 | // If the app requests all available sets, the pipeline layout was not modified at pipeline layout creation and the already |
| 234 | // instrumented shaders need to be replaced with uninstrumented shaders |
| 235 | if (pipe_state[pipeline]->pipeline_layout->set_layouts.size() >= object_ptr->adjusted_max_desc_sets) { |
| 236 | replace_shaders = true; |
| 237 | } |
| 238 | |
| 239 | if (replace_shaders) { |
| 240 | for (uint32_t stage = 0; stage < stageCount; ++stage) { |
| 241 | const SHADER_MODULE_STATE *shader = |
| 242 | object_ptr->GetShaderModuleState(Accessor::GetShaderModule(pCreateInfos[pipeline], stage)); |
| 243 | |
| 244 | VkShaderModuleCreateInfo create_info = {}; |
| 245 | VkShaderModule shader_module; |
| 246 | create_info.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO; |
| 247 | create_info.pCode = shader->words.data(); |
| 248 | create_info.codeSize = shader->words.size() * sizeof(uint32_t); |
| 249 | VkResult result = DispatchCreateShaderModule(object_ptr->device, &create_info, pAllocator, &shader_module); |
| 250 | if (result == VK_SUCCESS) { |
| 251 | Accessor::SetShaderModule(&(*new_pipeline_create_infos)[pipeline], shader_module, stage); |
| 252 | } else { |
| 253 | object_ptr->ReportSetupProblem(object_ptr->device, |
| 254 | "Unable to replace instrumented shader with non-instrumented one. " |
| 255 | "Device could become unstable."); |
| 256 | } |
| 257 | } |
| 258 | } |
| 259 | } |
| 260 | } |
| 261 | // For every pipeline: |
| 262 | // - For every shader in a pipeline: |
| 263 | // - If the shader had to be replaced in PreCallRecord (because the pipeline is using the debug desc set index): |
| 264 | // - Destroy it since it has been bound into the pipeline by now. This is our only chance to delete it. |
| 265 | // - Track the shader in the shader_map |
| 266 | // - Save the shader binary if it contains debug code |
| 267 | template <typename CreateInfo, typename ObjectType> |
Tony-LunarG | b5fae46 | 2020-03-05 12:43:25 -0700 | [diff] [blame^] | 268 | void UtilPostCallRecordPipelineCreations(const uint32_t count, const CreateInfo *pCreateInfos, |
| 269 | const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines, |
| 270 | const VkPipelineBindPoint bind_point, ObjectType *object_ptr) { |
Tony-LunarG | 1dce239 | 2019-10-23 16:49:29 -0600 | [diff] [blame] | 271 | using Accessor = CreatePipelineTraits<CreateInfo>; |
| 272 | if (bind_point != VK_PIPELINE_BIND_POINT_GRAPHICS && bind_point != VK_PIPELINE_BIND_POINT_COMPUTE && |
| 273 | bind_point != VK_PIPELINE_BIND_POINT_RAY_TRACING_NV) { |
| 274 | return; |
| 275 | } |
| 276 | for (uint32_t pipeline = 0; pipeline < count; ++pipeline) { |
| 277 | auto pipeline_state = object_ptr->ValidationStateTracker::GetPipelineState(pPipelines[pipeline]); |
| 278 | if (nullptr == pipeline_state) continue; |
| 279 | |
| 280 | uint32_t stageCount = 0; |
| 281 | if (bind_point == VK_PIPELINE_BIND_POINT_GRAPHICS) { |
| 282 | stageCount = pipeline_state->graphicsPipelineCI.stageCount; |
| 283 | } else if (bind_point == VK_PIPELINE_BIND_POINT_COMPUTE) { |
| 284 | stageCount = 1; |
| 285 | } else if (bind_point == VK_PIPELINE_BIND_POINT_RAY_TRACING_NV) { |
| 286 | stageCount = pipeline_state->raytracingPipelineCI.stageCount; |
| 287 | } else { |
| 288 | assert(false); |
| 289 | } |
| 290 | |
| 291 | for (uint32_t stage = 0; stage < stageCount; ++stage) { |
| 292 | if (pipeline_state->active_slots.find(object_ptr->desc_set_bind_index) != pipeline_state->active_slots.end()) { |
| 293 | DispatchDestroyShaderModule(object_ptr->device, Accessor::GetShaderModule(pCreateInfos[pipeline], stage), |
| 294 | pAllocator); |
| 295 | } |
| 296 | |
| 297 | const SHADER_MODULE_STATE *shader_state = nullptr; |
| 298 | if (bind_point == VK_PIPELINE_BIND_POINT_GRAPHICS) { |
| 299 | shader_state = object_ptr->GetShaderModuleState(pipeline_state->graphicsPipelineCI.pStages[stage].module); |
| 300 | } else if (bind_point == VK_PIPELINE_BIND_POINT_COMPUTE) { |
| 301 | assert(stage == 0); |
| 302 | shader_state = object_ptr->GetShaderModuleState(pipeline_state->computePipelineCI.stage.module); |
| 303 | } else if (bind_point == VK_PIPELINE_BIND_POINT_RAY_TRACING_NV) { |
| 304 | shader_state = object_ptr->GetShaderModuleState(pipeline_state->raytracingPipelineCI.pStages[stage].module); |
| 305 | } else { |
| 306 | assert(false); |
| 307 | } |
| 308 | |
| 309 | std::vector<unsigned int> code; |
| 310 | // Save the shader binary |
| 311 | // The core_validation ShaderModule tracker saves the binary too, but discards it when the ShaderModule |
| 312 | // is destroyed. Applications may destroy ShaderModules after they are placed in a pipeline and before |
| 313 | // the pipeline is used, so we have to keep another copy. |
| 314 | if (shader_state && shader_state->has_valid_spirv) code = shader_state->words; |
| 315 | |
| 316 | object_ptr->shader_map[shader_state->gpu_validation_shader_id].pipeline = pipeline_state->pipeline; |
| 317 | // Be careful to use the originally bound (instrumented) shader here, even if PreCallRecord had to back it |
| 318 | // out with a non-instrumented shader. The non-instrumented shader (found in pCreateInfo) was destroyed above. |
| 319 | VkShaderModule shader_module = VK_NULL_HANDLE; |
| 320 | if (bind_point == VK_PIPELINE_BIND_POINT_GRAPHICS) { |
| 321 | shader_module = pipeline_state->graphicsPipelineCI.pStages[stage].module; |
| 322 | } else if (bind_point == VK_PIPELINE_BIND_POINT_COMPUTE) { |
| 323 | assert(stage == 0); |
| 324 | shader_module = pipeline_state->computePipelineCI.stage.module; |
| 325 | } else if (bind_point == VK_PIPELINE_BIND_POINT_RAY_TRACING_NV) { |
| 326 | shader_module = pipeline_state->raytracingPipelineCI.pStages[stage].module; |
| 327 | } else { |
| 328 | assert(false); |
| 329 | } |
| 330 | object_ptr->shader_map[shader_state->gpu_validation_shader_id].shader_module = shader_module; |
| 331 | object_ptr->shader_map[shader_state->gpu_validation_shader_id].pgm = std::move(code); |
| 332 | } |
| 333 | } |
| 334 | } |
| 335 | template <typename ObjectType> |
| 336 | // For the given command buffer, map its debug data buffers and read their contents for analysis. |
Tony-LunarG | b5fae46 | 2020-03-05 12:43:25 -0700 | [diff] [blame^] | 337 | void UtilProcessInstrumentationBuffer(VkQueue queue, CMD_BUFFER_STATE *cb_node, ObjectType *object_ptr) { |
Tony-LunarG | 1dce239 | 2019-10-23 16:49:29 -0600 | [diff] [blame] | 338 | if (cb_node && (cb_node->hasDrawCmd || cb_node->hasTraceRaysCmd || cb_node->hasDispatchCmd)) { |
| 339 | auto gpu_buffer_list = object_ptr->GetBufferInfo(cb_node->commandBuffer); |
| 340 | uint32_t draw_index = 0; |
| 341 | uint32_t compute_index = 0; |
| 342 | uint32_t ray_trace_index = 0; |
| 343 | |
| 344 | for (auto &buffer_info : gpu_buffer_list) { |
| 345 | char *pData; |
| 346 | |
| 347 | uint32_t operation_index = 0; |
| 348 | if (buffer_info.pipeline_bind_point == VK_PIPELINE_BIND_POINT_GRAPHICS) { |
| 349 | operation_index = draw_index; |
| 350 | } else if (buffer_info.pipeline_bind_point == VK_PIPELINE_BIND_POINT_COMPUTE) { |
| 351 | operation_index = compute_index; |
| 352 | } else if (buffer_info.pipeline_bind_point == VK_PIPELINE_BIND_POINT_RAY_TRACING_NV) { |
| 353 | operation_index = ray_trace_index; |
| 354 | } else { |
| 355 | assert(false); |
| 356 | } |
| 357 | |
| 358 | VkResult result = vmaMapMemory(object_ptr->vmaAllocator, buffer_info.output_mem_block.allocation, (void **)&pData); |
| 359 | if (result == VK_SUCCESS) { |
| 360 | object_ptr->AnalyzeAndGenerateMessages(cb_node->commandBuffer, queue, buffer_info.pipeline_bind_point, |
| 361 | operation_index, (uint32_t *)pData); |
| 362 | vmaUnmapMemory(object_ptr->vmaAllocator, buffer_info.output_mem_block.allocation); |
| 363 | } |
| 364 | |
| 365 | if (buffer_info.pipeline_bind_point == VK_PIPELINE_BIND_POINT_GRAPHICS) { |
| 366 | draw_index++; |
| 367 | } else if (buffer_info.pipeline_bind_point == VK_PIPELINE_BIND_POINT_COMPUTE) { |
| 368 | compute_index++; |
| 369 | } else if (buffer_info.pipeline_bind_point == VK_PIPELINE_BIND_POINT_RAY_TRACING_NV) { |
| 370 | ray_trace_index++; |
| 371 | } else { |
| 372 | assert(false); |
| 373 | } |
| 374 | } |
| 375 | } |
| 376 | } |
| 377 | template <typename ObjectType> |
| 378 | // Submit a memory barrier on graphics queues. |
| 379 | // Lazy-create and record the needed command buffer. |
Tony-LunarG | b5fae46 | 2020-03-05 12:43:25 -0700 | [diff] [blame^] | 380 | void UtilSubmitBarrier(VkQueue queue, ObjectType *object_ptr) { |
| 381 | auto queue_barrier_command_info_it = object_ptr->queue_barrier_command_infos.emplace(queue, UtilQueueBarrierCommandInfo{}); |
Tony-LunarG | 1dce239 | 2019-10-23 16:49:29 -0600 | [diff] [blame] | 382 | if (queue_barrier_command_info_it.second) { |
Tony-LunarG | b5fae46 | 2020-03-05 12:43:25 -0700 | [diff] [blame^] | 383 | UtilQueueBarrierCommandInfo &queue_barrier_command_info = queue_barrier_command_info_it.first->second; |
Tony-LunarG | 1dce239 | 2019-10-23 16:49:29 -0600 | [diff] [blame] | 384 | |
| 385 | uint32_t queue_family_index = 0; |
| 386 | |
| 387 | auto queue_state_it = object_ptr->queueMap.find(queue); |
| 388 | if (queue_state_it != object_ptr->queueMap.end()) { |
| 389 | queue_family_index = queue_state_it->second.queueFamilyIndex; |
| 390 | } |
| 391 | |
| 392 | VkResult result = VK_SUCCESS; |
| 393 | |
| 394 | VkCommandPoolCreateInfo pool_create_info = {}; |
| 395 | pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO; |
| 396 | pool_create_info.queueFamilyIndex = queue_family_index; |
| 397 | result = DispatchCreateCommandPool(object_ptr->device, &pool_create_info, nullptr, |
| 398 | &queue_barrier_command_info.barrier_command_pool); |
| 399 | if (result != VK_SUCCESS) { |
| 400 | object_ptr->ReportSetupProblem(object_ptr->device, "Unable to create command pool for barrier CB."); |
| 401 | queue_barrier_command_info.barrier_command_pool = VK_NULL_HANDLE; |
| 402 | return; |
| 403 | } |
| 404 | |
| 405 | VkCommandBufferAllocateInfo buffer_alloc_info = {}; |
| 406 | buffer_alloc_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO; |
| 407 | buffer_alloc_info.commandPool = queue_barrier_command_info.barrier_command_pool; |
| 408 | buffer_alloc_info.commandBufferCount = 1; |
| 409 | buffer_alloc_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY; |
| 410 | result = DispatchAllocateCommandBuffers(object_ptr->device, &buffer_alloc_info, |
| 411 | &queue_barrier_command_info.barrier_command_buffer); |
| 412 | if (result != VK_SUCCESS) { |
| 413 | object_ptr->ReportSetupProblem(object_ptr->device, "Unable to create barrier command buffer."); |
| 414 | DispatchDestroyCommandPool(object_ptr->device, queue_barrier_command_info.barrier_command_pool, nullptr); |
| 415 | queue_barrier_command_info.barrier_command_pool = VK_NULL_HANDLE; |
| 416 | queue_barrier_command_info.barrier_command_buffer = VK_NULL_HANDLE; |
| 417 | return; |
| 418 | } |
| 419 | |
| 420 | // Hook up command buffer dispatch |
| 421 | object_ptr->vkSetDeviceLoaderData(object_ptr->device, queue_barrier_command_info.barrier_command_buffer); |
| 422 | |
| 423 | // Record a global memory barrier to force availability of device memory operations to the host domain. |
| 424 | VkCommandBufferBeginInfo command_buffer_begin_info = {}; |
| 425 | command_buffer_begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO; |
| 426 | result = DispatchBeginCommandBuffer(queue_barrier_command_info.barrier_command_buffer, &command_buffer_begin_info); |
| 427 | if (result == VK_SUCCESS) { |
| 428 | VkMemoryBarrier memory_barrier = {}; |
| 429 | memory_barrier.sType = VK_STRUCTURE_TYPE_MEMORY_BARRIER; |
| 430 | memory_barrier.srcAccessMask = VK_ACCESS_MEMORY_WRITE_BIT; |
| 431 | memory_barrier.dstAccessMask = VK_ACCESS_HOST_READ_BIT; |
| 432 | |
| 433 | DispatchCmdPipelineBarrier(queue_barrier_command_info.barrier_command_buffer, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, |
| 434 | VK_PIPELINE_STAGE_HOST_BIT, 0, 1, &memory_barrier, 0, nullptr, 0, nullptr); |
| 435 | DispatchEndCommandBuffer(queue_barrier_command_info.barrier_command_buffer); |
| 436 | } |
| 437 | } |
| 438 | |
Tony-LunarG | b5fae46 | 2020-03-05 12:43:25 -0700 | [diff] [blame^] | 439 | UtilQueueBarrierCommandInfo &queue_barrier_command_info = queue_barrier_command_info_it.first->second; |
Tony-LunarG | 1dce239 | 2019-10-23 16:49:29 -0600 | [diff] [blame] | 440 | if (queue_barrier_command_info.barrier_command_buffer != VK_NULL_HANDLE) { |
| 441 | VkSubmitInfo submit_info = {}; |
| 442 | submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO; |
| 443 | submit_info.commandBufferCount = 1; |
| 444 | submit_info.pCommandBuffers = &queue_barrier_command_info.barrier_command_buffer; |
| 445 | DispatchQueueSubmit(queue, 1, &submit_info, VK_NULL_HANDLE); |
| 446 | } |
| 447 | } |
Tony-LunarG | b5fae46 | 2020-03-05 12:43:25 -0700 | [diff] [blame^] | 448 | void UtilGenerateStageMessage(const uint32_t *debug_record, std::string &msg); |
| 449 | void UtilGenerateCommonMessage(const debug_report_data *report_data, const VkCommandBuffer commandBuffer, |
| 450 | const uint32_t *debug_record, const VkShaderModule shader_module_handle, |
| 451 | const VkPipeline pipeline_handle, const VkPipelineBindPoint pipeline_bind_point, |
| 452 | const uint32_t operation_index, std::string &msg); |
| 453 | void UtilGenerateSourceMessages(const std::vector<unsigned int> &pgm, const uint32_t *debug_record, bool from_printf, |
| 454 | std::string &filename_msg, std::string &source_msg); |