blob: 2f1f1d65cdacd4011978cc79ba7bde322abd24e7 [file] [log] [blame]
Tony-LunarG7de10e82020-11-24 11:31:55 -07001/* Copyright (c) 2020-2021 The Khronos Group Inc.
2 * Copyright (c) 2020-2021 Valve Corporation
3 * Copyright (c) 2020-2021 LunarG, Inc.
Tony-LunarG1dce2392019-10-23 16:49:29 -06004 *
5 * Licensed under the Apache License, Version 2.0 (the "License");
6 * you may not use this file except in compliance with the License.
7 * You may obtain a copy of the License at
8 *
9 * http://www.apache.org/licenses/LICENSE-2.0
10 *
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
16 *
17 * Author: Tony Barbour <tony@lunarg.com>
18 */
19#pragma once
20#include "chassis.h"
21#include "shader_validation.h"
Jeremy Gebben159b3cc2021-06-03 09:09:03 -060022#include "cmd_buffer_state.h"
23
Tony-LunarGb5fae462020-03-05 12:43:25 -070024class UtilDescriptorSetManager {
Tony-LunarG1dce2392019-10-23 16:49:29 -060025 public:
Tony-LunarGb5fae462020-03-05 12:43:25 -070026 UtilDescriptorSetManager(VkDevice device, uint32_t numBindingsInSet);
27 ~UtilDescriptorSetManager();
Tony-LunarG1dce2392019-10-23 16:49:29 -060028
29 VkResult GetDescriptorSet(VkDescriptorPool *desc_pool, VkDescriptorSetLayout ds_layout, VkDescriptorSet *desc_sets);
30 VkResult GetDescriptorSets(uint32_t count, VkDescriptorPool *pool, VkDescriptorSetLayout ds_layout,
31 std::vector<VkDescriptorSet> *desc_sets);
32 void PutBackDescriptorSet(VkDescriptorPool desc_pool, VkDescriptorSet desc_set);
33
34 private:
35 static const uint32_t kItemsPerChunk = 512;
36 struct PoolTracker {
37 uint32_t size;
38 uint32_t used;
39 };
40 VkDevice device;
41 uint32_t numBindingsInSet;
Jeremy Gebbencbf22862021-03-03 12:01:22 -070042 layer_data::unordered_map<VkDescriptorPool, struct PoolTracker> desc_pool_map_;
Tony-LunarG1dce2392019-10-23 16:49:29 -060043};
Tony-LunarGb5fae462020-03-05 12:43:25 -070044struct UtilQueueBarrierCommandInfo {
Tony-LunarG1dce2392019-10-23 16:49:29 -060045 VkCommandPool barrier_command_pool = VK_NULL_HANDLE;
46 VkCommandBuffer barrier_command_buffer = VK_NULL_HANDLE;
47};
Tony-LunarGb5fae462020-03-05 12:43:25 -070048VkResult UtilInitializeVma(VkPhysicalDevice physical_device, VkDevice device, VmaAllocator *pAllocator);
49void UtilPreCallRecordCreateDevice(VkPhysicalDevice gpu, safe_VkDeviceCreateInfo *modified_create_info,
50 VkPhysicalDeviceFeatures supported_features, VkPhysicalDeviceFeatures desired_features);
Tony-LunarG1dce2392019-10-23 16:49:29 -060051template <typename ObjectType>
Tony-LunarGb5fae462020-03-05 12:43:25 -070052void UtilPostCallRecordCreateDevice(const VkDeviceCreateInfo *pCreateInfo, std::vector<VkDescriptorSetLayoutBinding> bindings,
53 ObjectType *object_ptr, VkPhysicalDeviceProperties physical_device_properties) {
Tony-LunarG1dce2392019-10-23 16:49:29 -060054 // If api version 1.1 or later, SetDeviceLoaderData will be in the loader
55 auto chain_info = get_chain_info(pCreateInfo, VK_LOADER_DATA_CALLBACK);
56 assert(chain_info->u.pfnSetDeviceLoaderData);
57 object_ptr->vkSetDeviceLoaderData = chain_info->u.pfnSetDeviceLoaderData;
58
59 // Some devices have extremely high limits here, so set a reasonable max because we have to pad
60 // the pipeline layout with dummy descriptor set layouts.
61 object_ptr->adjusted_max_desc_sets = physical_device_properties.limits.maxBoundDescriptorSets;
62 object_ptr->adjusted_max_desc_sets = std::min(33U, object_ptr->adjusted_max_desc_sets);
63
64 // We can't do anything if there is only one.
65 // Device probably not a legit Vulkan device, since there should be at least 4. Protect ourselves.
66 if (object_ptr->adjusted_max_desc_sets == 1) {
67 object_ptr->ReportSetupProblem(object_ptr->device, "Device can bind only a single descriptor set.");
68 object_ptr->aborted = true;
69 return;
70 }
71 object_ptr->desc_set_bind_index = object_ptr->adjusted_max_desc_sets - 1;
72
Tony-LunarGb5fae462020-03-05 12:43:25 -070073 VkResult result1 = UtilInitializeVma(object_ptr->physicalDevice, object_ptr->device, &object_ptr->vmaAllocator);
Tony-LunarG1dce2392019-10-23 16:49:29 -060074 assert(result1 == VK_SUCCESS);
Tony-LunarGb5fae462020-03-05 12:43:25 -070075 std::unique_ptr<UtilDescriptorSetManager> desc_set_manager(
76 new UtilDescriptorSetManager(object_ptr->device, static_cast<uint32_t>(bindings.size())));
Tony-LunarG1dce2392019-10-23 16:49:29 -060077
78 const VkDescriptorSetLayoutCreateInfo debug_desc_layout_info = {VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO, NULL, 0,
79 static_cast<uint32_t>(bindings.size()), bindings.data()};
80
81 const VkDescriptorSetLayoutCreateInfo dummy_desc_layout_info = {VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO, NULL, 0, 0,
82 NULL};
83
84 result1 = DispatchCreateDescriptorSetLayout(object_ptr->device, &debug_desc_layout_info, NULL, &object_ptr->debug_desc_layout);
85
86 // This is a layout used to "pad" a pipeline layout to fill in any gaps to the selected bind index.
87 VkResult result2 =
88 DispatchCreateDescriptorSetLayout(object_ptr->device, &dummy_desc_layout_info, NULL, &object_ptr->dummy_desc_layout);
89
90 assert((result1 == VK_SUCCESS) && (result2 == VK_SUCCESS));
91 if ((result1 != VK_SUCCESS) || (result2 != VK_SUCCESS)) {
92 object_ptr->ReportSetupProblem(object_ptr->device, "Unable to create descriptor set layout.");
93 if (result1 == VK_SUCCESS) {
94 DispatchDestroyDescriptorSetLayout(object_ptr->device, object_ptr->debug_desc_layout, NULL);
95 }
96 if (result2 == VK_SUCCESS) {
97 DispatchDestroyDescriptorSetLayout(object_ptr->device, object_ptr->dummy_desc_layout, NULL);
98 }
99 object_ptr->debug_desc_layout = VK_NULL_HANDLE;
100 object_ptr->dummy_desc_layout = VK_NULL_HANDLE;
101 object_ptr->aborted = true;
102 return;
103 }
104 object_ptr->desc_set_manager = std::move(desc_set_manager);
105
106 // Register callback to be called at any ResetCommandBuffer time
107 object_ptr->SetCommandBufferResetCallback(
108 [object_ptr](VkCommandBuffer command_buffer) -> void { object_ptr->ResetCommandBuffer(command_buffer); });
109}
110template <typename ObjectType>
Tony-LunarGb5fae462020-03-05 12:43:25 -0700111void UtilPreCallRecordDestroyDevice(ObjectType *object_ptr) {
Tony-LunarG1dce2392019-10-23 16:49:29 -0600112 for (auto &queue_barrier_command_info_kv : object_ptr->queue_barrier_command_infos) {
Tony-LunarGb5fae462020-03-05 12:43:25 -0700113 UtilQueueBarrierCommandInfo &queue_barrier_command_info = queue_barrier_command_info_kv.second;
Tony-LunarG1dce2392019-10-23 16:49:29 -0600114
115 DispatchFreeCommandBuffers(object_ptr->device, queue_barrier_command_info.barrier_command_pool, 1,
116 &queue_barrier_command_info.barrier_command_buffer);
117 queue_barrier_command_info.barrier_command_buffer = VK_NULL_HANDLE;
118
119 DispatchDestroyCommandPool(object_ptr->device, queue_barrier_command_info.barrier_command_pool, NULL);
120 queue_barrier_command_info.barrier_command_pool = VK_NULL_HANDLE;
121 }
122 object_ptr->queue_barrier_command_infos.clear();
123 if (object_ptr->debug_desc_layout) {
124 DispatchDestroyDescriptorSetLayout(object_ptr->device, object_ptr->debug_desc_layout, NULL);
125 object_ptr->debug_desc_layout = VK_NULL_HANDLE;
126 }
127 if (object_ptr->dummy_desc_layout) {
128 DispatchDestroyDescriptorSetLayout(object_ptr->device, object_ptr->dummy_desc_layout, NULL);
129 object_ptr->dummy_desc_layout = VK_NULL_HANDLE;
130 }
Tony-LunarG1dce2392019-10-23 16:49:29 -0600131}
132
133template <typename ObjectType>
Tony-LunarGb5fae462020-03-05 12:43:25 -0700134void UtilPreCallRecordCreatePipelineLayout(create_pipeline_layout_api_state *cpl_state, ObjectType *object_ptr,
135 const VkPipelineLayoutCreateInfo *pCreateInfo) {
Tony-LunarG1dce2392019-10-23 16:49:29 -0600136 // Modify the pipeline layout by:
137 // 1. Copying the caller's descriptor set desc_layouts
138 // 2. Fill in dummy descriptor layouts up to the max binding
139 // 3. Fill in with the debug descriptor layout at the max binding slot
140 cpl_state->new_layouts.reserve(object_ptr->adjusted_max_desc_sets);
141 cpl_state->new_layouts.insert(cpl_state->new_layouts.end(), &pCreateInfo->pSetLayouts[0],
142 &pCreateInfo->pSetLayouts[pCreateInfo->setLayoutCount]);
143 for (uint32_t i = pCreateInfo->setLayoutCount; i < object_ptr->adjusted_max_desc_sets - 1; ++i) {
144 cpl_state->new_layouts.push_back(object_ptr->dummy_desc_layout);
145 }
146 cpl_state->new_layouts.push_back(object_ptr->debug_desc_layout);
147 cpl_state->modified_create_info.pSetLayouts = cpl_state->new_layouts.data();
148 cpl_state->modified_create_info.setLayoutCount = object_ptr->adjusted_max_desc_sets;
149}
150
151template <typename CreateInfo>
152struct CreatePipelineTraits {};
153template <>
154struct CreatePipelineTraits<VkGraphicsPipelineCreateInfo> {
155 using SafeType = safe_VkGraphicsPipelineCreateInfo;
156 static const SafeType &GetPipelineCI(const PIPELINE_STATE *pipeline_state) { return pipeline_state->graphicsPipelineCI; }
157 static uint32_t GetStageCount(const VkGraphicsPipelineCreateInfo &createInfo) { return createInfo.stageCount; }
158 static VkShaderModule GetShaderModule(const VkGraphicsPipelineCreateInfo &createInfo, uint32_t stage) {
159 return createInfo.pStages[stage].module;
160 }
161 static void SetShaderModule(SafeType *createInfo, VkShaderModule shader_module, uint32_t stage) {
162 createInfo->pStages[stage].module = shader_module;
163 }
164};
165
166template <>
167struct CreatePipelineTraits<VkComputePipelineCreateInfo> {
168 using SafeType = safe_VkComputePipelineCreateInfo;
169 static const SafeType &GetPipelineCI(const PIPELINE_STATE *pipeline_state) { return pipeline_state->computePipelineCI; }
170 static uint32_t GetStageCount(const VkComputePipelineCreateInfo &createInfo) { return 1; }
171 static VkShaderModule GetShaderModule(const VkComputePipelineCreateInfo &createInfo, uint32_t stage) {
172 return createInfo.stage.module;
173 }
174 static void SetShaderModule(SafeType *createInfo, VkShaderModule shader_module, uint32_t stage) {
175 assert(stage == 0);
176 createInfo->stage.module = shader_module;
177 }
178};
179
180template <>
181struct CreatePipelineTraits<VkRayTracingPipelineCreateInfoNV> {
182 using SafeType = safe_VkRayTracingPipelineCreateInfoCommon;
183 static const SafeType &GetPipelineCI(const PIPELINE_STATE *pipeline_state) { return pipeline_state->raytracingPipelineCI; }
184 static uint32_t GetStageCount(const VkRayTracingPipelineCreateInfoNV &createInfo) { return createInfo.stageCount; }
185 static VkShaderModule GetShaderModule(const VkRayTracingPipelineCreateInfoNV &createInfo, uint32_t stage) {
186 return createInfo.pStages[stage].module;
187 }
188 static void SetShaderModule(SafeType *createInfo, VkShaderModule shader_module, uint32_t stage) {
189 createInfo->pStages[stage].module = shader_module;
190 }
191};
192
193template <>
194struct CreatePipelineTraits<VkRayTracingPipelineCreateInfoKHR> {
195 using SafeType = safe_VkRayTracingPipelineCreateInfoCommon;
196 static const SafeType &GetPipelineCI(const PIPELINE_STATE *pipeline_state) { return pipeline_state->raytracingPipelineCI; }
197 static uint32_t GetStageCount(const VkRayTracingPipelineCreateInfoKHR &createInfo) { return createInfo.stageCount; }
198 static VkShaderModule GetShaderModule(const VkRayTracingPipelineCreateInfoKHR &createInfo, uint32_t stage) {
199 return createInfo.pStages[stage].module;
200 }
201 static void SetShaderModule(SafeType *createInfo, VkShaderModule shader_module, uint32_t stage) {
202 createInfo->pStages[stage].module = shader_module;
203 }
204};
205
206// Examine the pipelines to see if they use the debug descriptor set binding index.
207// If any do, create new non-instrumented shader modules and use them to replace the instrumented
208// shaders in the pipeline. Return the (possibly) modified create infos to the caller.
209template <typename CreateInfo, typename SafeCreateInfo, typename ObjectType>
Tony-LunarGb5fae462020-03-05 12:43:25 -0700210void UtilPreCallRecordPipelineCreations(uint32_t count, const CreateInfo *pCreateInfos, const VkAllocationCallbacks *pAllocator,
211 VkPipeline *pPipelines, std::vector<std::shared_ptr<PIPELINE_STATE>> &pipe_state,
212 std::vector<SafeCreateInfo> *new_pipeline_create_infos,
213 const VkPipelineBindPoint bind_point, ObjectType *object_ptr) {
Tony-LunarG1dce2392019-10-23 16:49:29 -0600214 using Accessor = CreatePipelineTraits<CreateInfo>;
215 if (bind_point != VK_PIPELINE_BIND_POINT_GRAPHICS && bind_point != VK_PIPELINE_BIND_POINT_COMPUTE &&
216 bind_point != VK_PIPELINE_BIND_POINT_RAY_TRACING_NV) {
217 return;
218 }
219
220 // Walk through all the pipelines, make a copy of each and flag each pipeline that contains a shader that uses the debug
221 // descriptor set index.
222 for (uint32_t pipeline = 0; pipeline < count; ++pipeline) {
223 uint32_t stageCount = Accessor::GetStageCount(pCreateInfos[pipeline]);
224 new_pipeline_create_infos->push_back(Accessor::GetPipelineCI(pipe_state[pipeline].get()));
225
226 bool replace_shaders = false;
227 if (pipe_state[pipeline]->active_slots.find(object_ptr->desc_set_bind_index) != pipe_state[pipeline]->active_slots.end()) {
228 replace_shaders = true;
229 }
230 // If the app requests all available sets, the pipeline layout was not modified at pipeline layout creation and the already
231 // instrumented shaders need to be replaced with uninstrumented shaders
232 if (pipe_state[pipeline]->pipeline_layout->set_layouts.size() >= object_ptr->adjusted_max_desc_sets) {
233 replace_shaders = true;
234 }
235
236 if (replace_shaders) {
237 for (uint32_t stage = 0; stage < stageCount; ++stage) {
238 const SHADER_MODULE_STATE *shader =
239 object_ptr->GetShaderModuleState(Accessor::GetShaderModule(pCreateInfos[pipeline], stage));
240
Tony-LunarG1dce2392019-10-23 16:49:29 -0600241 VkShaderModule shader_module;
Nathaniel Cesariofc6291e2021-04-06 00:22:15 -0600242 auto create_info = LvlInitStruct<VkShaderModuleCreateInfo>();
Tony-LunarG1dce2392019-10-23 16:49:29 -0600243 create_info.pCode = shader->words.data();
244 create_info.codeSize = shader->words.size() * sizeof(uint32_t);
245 VkResult result = DispatchCreateShaderModule(object_ptr->device, &create_info, pAllocator, &shader_module);
246 if (result == VK_SUCCESS) {
247 Accessor::SetShaderModule(&(*new_pipeline_create_infos)[pipeline], shader_module, stage);
248 } else {
249 object_ptr->ReportSetupProblem(object_ptr->device,
250 "Unable to replace instrumented shader with non-instrumented one. "
251 "Device could become unstable.");
252 }
253 }
254 }
255 }
256}
257// For every pipeline:
258// - For every shader in a pipeline:
259// - If the shader had to be replaced in PreCallRecord (because the pipeline is using the debug desc set index):
260// - Destroy it since it has been bound into the pipeline by now. This is our only chance to delete it.
261// - Track the shader in the shader_map
262// - Save the shader binary if it contains debug code
263template <typename CreateInfo, typename ObjectType>
Tony-LunarGb5fae462020-03-05 12:43:25 -0700264void UtilPostCallRecordPipelineCreations(const uint32_t count, const CreateInfo *pCreateInfos,
265 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
266 const VkPipelineBindPoint bind_point, ObjectType *object_ptr) {
Tony-LunarG1dce2392019-10-23 16:49:29 -0600267 using Accessor = CreatePipelineTraits<CreateInfo>;
268 if (bind_point != VK_PIPELINE_BIND_POINT_GRAPHICS && bind_point != VK_PIPELINE_BIND_POINT_COMPUTE &&
269 bind_point != VK_PIPELINE_BIND_POINT_RAY_TRACING_NV) {
270 return;
271 }
272 for (uint32_t pipeline = 0; pipeline < count; ++pipeline) {
273 auto pipeline_state = object_ptr->ValidationStateTracker::GetPipelineState(pPipelines[pipeline]);
274 if (nullptr == pipeline_state) continue;
275
276 uint32_t stageCount = 0;
277 if (bind_point == VK_PIPELINE_BIND_POINT_GRAPHICS) {
278 stageCount = pipeline_state->graphicsPipelineCI.stageCount;
279 } else if (bind_point == VK_PIPELINE_BIND_POINT_COMPUTE) {
280 stageCount = 1;
281 } else if (bind_point == VK_PIPELINE_BIND_POINT_RAY_TRACING_NV) {
282 stageCount = pipeline_state->raytracingPipelineCI.stageCount;
283 } else {
284 assert(false);
285 }
286
287 for (uint32_t stage = 0; stage < stageCount; ++stage) {
288 if (pipeline_state->active_slots.find(object_ptr->desc_set_bind_index) != pipeline_state->active_slots.end()) {
289 DispatchDestroyShaderModule(object_ptr->device, Accessor::GetShaderModule(pCreateInfos[pipeline], stage),
290 pAllocator);
291 }
292
293 const SHADER_MODULE_STATE *shader_state = nullptr;
294 if (bind_point == VK_PIPELINE_BIND_POINT_GRAPHICS) {
295 shader_state = object_ptr->GetShaderModuleState(pipeline_state->graphicsPipelineCI.pStages[stage].module);
296 } else if (bind_point == VK_PIPELINE_BIND_POINT_COMPUTE) {
297 assert(stage == 0);
298 shader_state = object_ptr->GetShaderModuleState(pipeline_state->computePipelineCI.stage.module);
299 } else if (bind_point == VK_PIPELINE_BIND_POINT_RAY_TRACING_NV) {
300 shader_state = object_ptr->GetShaderModuleState(pipeline_state->raytracingPipelineCI.pStages[stage].module);
301 } else {
302 assert(false);
303 }
304
305 std::vector<unsigned int> code;
306 // Save the shader binary
307 // The core_validation ShaderModule tracker saves the binary too, but discards it when the ShaderModule
308 // is destroyed. Applications may destroy ShaderModules after they are placed in a pipeline and before
309 // the pipeline is used, so we have to keep another copy.
310 if (shader_state && shader_state->has_valid_spirv) code = shader_state->words;
311
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -0600312 object_ptr->shader_map[shader_state->gpu_validation_shader_id].pipeline = pipeline_state->pipeline();
Tony-LunarG1dce2392019-10-23 16:49:29 -0600313 // Be careful to use the originally bound (instrumented) shader here, even if PreCallRecord had to back it
314 // out with a non-instrumented shader. The non-instrumented shader (found in pCreateInfo) was destroyed above.
315 VkShaderModule shader_module = VK_NULL_HANDLE;
316 if (bind_point == VK_PIPELINE_BIND_POINT_GRAPHICS) {
317 shader_module = pipeline_state->graphicsPipelineCI.pStages[stage].module;
318 } else if (bind_point == VK_PIPELINE_BIND_POINT_COMPUTE) {
319 assert(stage == 0);
320 shader_module = pipeline_state->computePipelineCI.stage.module;
321 } else if (bind_point == VK_PIPELINE_BIND_POINT_RAY_TRACING_NV) {
322 shader_module = pipeline_state->raytracingPipelineCI.pStages[stage].module;
323 } else {
324 assert(false);
325 }
326 object_ptr->shader_map[shader_state->gpu_validation_shader_id].shader_module = shader_module;
327 object_ptr->shader_map[shader_state->gpu_validation_shader_id].pgm = std::move(code);
328 }
329 }
330}
Tony-LunarGc876c6e2020-09-09 15:19:43 -0600331template <typename CreateInfos, typename SafeCreateInfos>
332void UtilCopyCreatePipelineFeedbackData(const uint32_t count, CreateInfos *pCreateInfos, SafeCreateInfos *pSafeCreateInfos) {
333 for (uint32_t i = 0; i < count; i++) {
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700334 auto src_feedback_struct = LvlFindInChain<VkPipelineCreationFeedbackCreateInfoEXT>(pSafeCreateInfos[i].pNext);
Tony-LunarGc876c6e2020-09-09 15:19:43 -0600335 if (!src_feedback_struct) return;
336 auto dst_feedback_struct = const_cast<VkPipelineCreationFeedbackCreateInfoEXT *>(
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700337 LvlFindInChain<VkPipelineCreationFeedbackCreateInfoEXT>(pCreateInfos[i].pNext));
Tony-LunarGc876c6e2020-09-09 15:19:43 -0600338 *dst_feedback_struct->pPipelineCreationFeedback = *src_feedback_struct->pPipelineCreationFeedback;
339 for (uint32_t j = 0; j < src_feedback_struct->pipelineStageCreationFeedbackCount; j++) {
340 dst_feedback_struct->pPipelineStageCreationFeedbacks[j] = src_feedback_struct->pPipelineStageCreationFeedbacks[j];
341 }
342 }
343}
344
Tony-LunarG1dce2392019-10-23 16:49:29 -0600345template <typename ObjectType>
346// For the given command buffer, map its debug data buffers and read their contents for analysis.
Tony-LunarGb5fae462020-03-05 12:43:25 -0700347void UtilProcessInstrumentationBuffer(VkQueue queue, CMD_BUFFER_STATE *cb_node, ObjectType *object_ptr) {
Tony-LunarG1dce2392019-10-23 16:49:29 -0600348 if (cb_node && (cb_node->hasDrawCmd || cb_node->hasTraceRaysCmd || cb_node->hasDispatchCmd)) {
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -0600349 auto gpu_buffer_list = object_ptr->GetBufferInfo(cb_node->commandBuffer());
Tony-LunarG1dce2392019-10-23 16:49:29 -0600350 uint32_t draw_index = 0;
351 uint32_t compute_index = 0;
352 uint32_t ray_trace_index = 0;
353
354 for (auto &buffer_info : gpu_buffer_list) {
355 char *pData;
356
357 uint32_t operation_index = 0;
358 if (buffer_info.pipeline_bind_point == VK_PIPELINE_BIND_POINT_GRAPHICS) {
359 operation_index = draw_index;
360 } else if (buffer_info.pipeline_bind_point == VK_PIPELINE_BIND_POINT_COMPUTE) {
361 operation_index = compute_index;
362 } else if (buffer_info.pipeline_bind_point == VK_PIPELINE_BIND_POINT_RAY_TRACING_NV) {
363 operation_index = ray_trace_index;
364 } else {
365 assert(false);
366 }
367
368 VkResult result = vmaMapMemory(object_ptr->vmaAllocator, buffer_info.output_mem_block.allocation, (void **)&pData);
369 if (result == VK_SUCCESS) {
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -0600370 object_ptr->AnalyzeAndGenerateMessages(cb_node->commandBuffer(), queue, buffer_info,
Tony-LunarG1dce2392019-10-23 16:49:29 -0600371 operation_index, (uint32_t *)pData);
372 vmaUnmapMemory(object_ptr->vmaAllocator, buffer_info.output_mem_block.allocation);
373 }
374
375 if (buffer_info.pipeline_bind_point == VK_PIPELINE_BIND_POINT_GRAPHICS) {
376 draw_index++;
377 } else if (buffer_info.pipeline_bind_point == VK_PIPELINE_BIND_POINT_COMPUTE) {
378 compute_index++;
379 } else if (buffer_info.pipeline_bind_point == VK_PIPELINE_BIND_POINT_RAY_TRACING_NV) {
380 ray_trace_index++;
381 } else {
382 assert(false);
383 }
384 }
385 }
386}
387template <typename ObjectType>
388// Submit a memory barrier on graphics queues.
389// Lazy-create and record the needed command buffer.
Tony-LunarGb5fae462020-03-05 12:43:25 -0700390void UtilSubmitBarrier(VkQueue queue, ObjectType *object_ptr) {
391 auto queue_barrier_command_info_it = object_ptr->queue_barrier_command_infos.emplace(queue, UtilQueueBarrierCommandInfo{});
Tony-LunarG1dce2392019-10-23 16:49:29 -0600392 if (queue_barrier_command_info_it.second) {
Tony-LunarGb5fae462020-03-05 12:43:25 -0700393 UtilQueueBarrierCommandInfo &queue_barrier_command_info = queue_barrier_command_info_it.first->second;
Tony-LunarG1dce2392019-10-23 16:49:29 -0600394
395 uint32_t queue_family_index = 0;
396
397 auto queue_state_it = object_ptr->queueMap.find(queue);
398 if (queue_state_it != object_ptr->queueMap.end()) {
399 queue_family_index = queue_state_it->second.queueFamilyIndex;
400 }
401
402 VkResult result = VK_SUCCESS;
403
Nathaniel Cesariofc6291e2021-04-06 00:22:15 -0600404 auto pool_create_info = LvlInitStruct<VkCommandPoolCreateInfo>();
Tony-LunarG1dce2392019-10-23 16:49:29 -0600405 pool_create_info.queueFamilyIndex = queue_family_index;
406 result = DispatchCreateCommandPool(object_ptr->device, &pool_create_info, nullptr,
407 &queue_barrier_command_info.barrier_command_pool);
408 if (result != VK_SUCCESS) {
409 object_ptr->ReportSetupProblem(object_ptr->device, "Unable to create command pool for barrier CB.");
410 queue_barrier_command_info.barrier_command_pool = VK_NULL_HANDLE;
411 return;
412 }
413
Nathaniel Cesariofc6291e2021-04-06 00:22:15 -0600414 auto buffer_alloc_info = LvlInitStruct<VkCommandBufferAllocateInfo>();
Tony-LunarG1dce2392019-10-23 16:49:29 -0600415 buffer_alloc_info.commandPool = queue_barrier_command_info.barrier_command_pool;
416 buffer_alloc_info.commandBufferCount = 1;
417 buffer_alloc_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
418 result = DispatchAllocateCommandBuffers(object_ptr->device, &buffer_alloc_info,
419 &queue_barrier_command_info.barrier_command_buffer);
420 if (result != VK_SUCCESS) {
421 object_ptr->ReportSetupProblem(object_ptr->device, "Unable to create barrier command buffer.");
422 DispatchDestroyCommandPool(object_ptr->device, queue_barrier_command_info.barrier_command_pool, nullptr);
423 queue_barrier_command_info.barrier_command_pool = VK_NULL_HANDLE;
424 queue_barrier_command_info.barrier_command_buffer = VK_NULL_HANDLE;
425 return;
426 }
427
428 // Hook up command buffer dispatch
429 object_ptr->vkSetDeviceLoaderData(object_ptr->device, queue_barrier_command_info.barrier_command_buffer);
430
431 // Record a global memory barrier to force availability of device memory operations to the host domain.
Nathaniel Cesariofc6291e2021-04-06 00:22:15 -0600432 auto command_buffer_begin_info = LvlInitStruct<VkCommandBufferBeginInfo>();
Tony-LunarG1dce2392019-10-23 16:49:29 -0600433 result = DispatchBeginCommandBuffer(queue_barrier_command_info.barrier_command_buffer, &command_buffer_begin_info);
434 if (result == VK_SUCCESS) {
Nathaniel Cesariofc6291e2021-04-06 00:22:15 -0600435 auto memory_barrier = LvlInitStruct<VkMemoryBarrier>();
Tony-LunarG1dce2392019-10-23 16:49:29 -0600436 memory_barrier.srcAccessMask = VK_ACCESS_MEMORY_WRITE_BIT;
437 memory_barrier.dstAccessMask = VK_ACCESS_HOST_READ_BIT;
438
439 DispatchCmdPipelineBarrier(queue_barrier_command_info.barrier_command_buffer, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
440 VK_PIPELINE_STAGE_HOST_BIT, 0, 1, &memory_barrier, 0, nullptr, 0, nullptr);
441 DispatchEndCommandBuffer(queue_barrier_command_info.barrier_command_buffer);
442 }
443 }
444
Tony-LunarGb5fae462020-03-05 12:43:25 -0700445 UtilQueueBarrierCommandInfo &queue_barrier_command_info = queue_barrier_command_info_it.first->second;
Tony-LunarG1dce2392019-10-23 16:49:29 -0600446 if (queue_barrier_command_info.barrier_command_buffer != VK_NULL_HANDLE) {
Nathaniel Cesariofc6291e2021-04-06 00:22:15 -0600447 auto submit_info = LvlInitStruct<VkSubmitInfo>();
Tony-LunarG1dce2392019-10-23 16:49:29 -0600448 submit_info.commandBufferCount = 1;
449 submit_info.pCommandBuffers = &queue_barrier_command_info.barrier_command_buffer;
450 DispatchQueueSubmit(queue, 1, &submit_info, VK_NULL_HANDLE);
451 }
452}
Tony-LunarGb5fae462020-03-05 12:43:25 -0700453void UtilGenerateStageMessage(const uint32_t *debug_record, std::string &msg);
454void UtilGenerateCommonMessage(const debug_report_data *report_data, const VkCommandBuffer commandBuffer,
455 const uint32_t *debug_record, const VkShaderModule shader_module_handle,
456 const VkPipeline pipeline_handle, const VkPipelineBindPoint pipeline_bind_point,
457 const uint32_t operation_index, std::string &msg);
458void UtilGenerateSourceMessages(const std::vector<unsigned int> &pgm, const uint32_t *debug_record, bool from_printf,
459 std::string &filename_msg, std::string &source_msg);