blob: 3fbda654104b31cb85e3fb15f29f3b5731be9bd3 [file] [log] [blame]
sfricke-samsungef15e482022-01-26 11:32:49 -08001/* Copyright (c) 2020-2022 The Khronos Group Inc.
2 * Copyright (c) 2020-2022 Valve Corporation
3 * Copyright (c) 2020-2022 LunarG, Inc.
Tony-LunarG1dce2392019-10-23 16:49:29 -06004 *
5 * Licensed under the Apache License, Version 2.0 (the "License");
6 * you may not use this file except in compliance with the License.
7 * You may obtain a copy of the License at
8 *
9 * http://www.apache.org/licenses/LICENSE-2.0
10 *
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
16 *
17 * Author: Tony Barbour <tony@lunarg.com>
18 */
19
Jeremy Gebben5160e032022-03-28 14:57:43 -060020#include "gpu_utils.h"
Jeremy Gebben159b3cc2021-06-03 09:09:03 -060021#include "descriptor_sets.h"
Tony-LunarG1dce2392019-10-23 16:49:29 -060022#include "spirv-tools/libspirv.h"
23#include "spirv-tools/optimizer.hpp"
24#include "spirv-tools/instrument.hpp"
Mark Lobodzinski102687e2020-04-28 11:03:28 -060025#include <spirv/unified1/spirv.hpp>
Tony-LunarG1dce2392019-10-23 16:49:29 -060026#include <algorithm>
27#include <regex>
28
John Zulauf088a54d2022-06-09 10:12:07 -060029#ifdef _MSC_VER
30#pragma warning(push)
31#pragma warning(disable : 4189)
32#endif
33
Tony-LunarG1dce2392019-10-23 16:49:29 -060034#define VMA_IMPLEMENTATION
35// This define indicates that we will supply Vulkan function pointers at initialization
36#define VMA_STATIC_VULKAN_FUNCTIONS 0
37#include "vk_mem_alloc.h"
38
John Zulauf088a54d2022-06-09 10:12:07 -060039#ifdef _MSC_VER
40#pragma warning(pop)
41#endif
42
Tony-LunarG1dce2392019-10-23 16:49:29 -060043// Implementation for Descriptor Set Manager class
Tony-LunarGb5fae462020-03-05 12:43:25 -070044UtilDescriptorSetManager::UtilDescriptorSetManager(VkDevice device, uint32_t numBindingsInSet)
Tony-LunarG1dce2392019-10-23 16:49:29 -060045 : device(device), numBindingsInSet(numBindingsInSet) {}
46
Tony-LunarGb5fae462020-03-05 12:43:25 -070047UtilDescriptorSetManager::~UtilDescriptorSetManager() {
Tony-LunarG1dce2392019-10-23 16:49:29 -060048 for (auto &pool : desc_pool_map_) {
49 DispatchDestroyDescriptorPool(device, pool.first, NULL);
50 }
51 desc_pool_map_.clear();
52}
53
Tony-LunarGb5fae462020-03-05 12:43:25 -070054VkResult UtilDescriptorSetManager::GetDescriptorSet(VkDescriptorPool *desc_pool, VkDescriptorSetLayout ds_layout,
55 VkDescriptorSet *desc_set) {
Tony-LunarG1dce2392019-10-23 16:49:29 -060056 std::vector<VkDescriptorSet> desc_sets;
57 VkResult result = GetDescriptorSets(1, desc_pool, ds_layout, &desc_sets);
Jeremy Gebbenefd97802022-03-28 16:45:05 -060058 assert(result == VK_SUCCESS);
Tony-LunarG1dce2392019-10-23 16:49:29 -060059 if (result == VK_SUCCESS) {
60 *desc_set = desc_sets[0];
61 }
62 return result;
63}
64
Tony-LunarGb5fae462020-03-05 12:43:25 -070065VkResult UtilDescriptorSetManager::GetDescriptorSets(uint32_t count, VkDescriptorPool *pool, VkDescriptorSetLayout ds_layout,
66 std::vector<VkDescriptorSet> *desc_sets) {
Jeremy Gebbenfcfc33c2022-03-28 15:31:29 -060067 auto guard = Lock();
Tony-LunarG1dce2392019-10-23 16:49:29 -060068 const uint32_t default_pool_size = kItemsPerChunk;
69 VkResult result = VK_SUCCESS;
70 VkDescriptorPool pool_to_use = VK_NULL_HANDLE;
71
Jeremy Gebbenefd97802022-03-28 16:45:05 -060072 assert(count > 0);
Tony-LunarG1dce2392019-10-23 16:49:29 -060073 if (0 == count) {
74 return result;
75 }
76 desc_sets->clear();
77 desc_sets->resize(count);
78
79 for (auto &pool : desc_pool_map_) {
80 if (pool.second.used + count < pool.second.size) {
81 pool_to_use = pool.first;
82 break;
83 }
84 }
85 if (VK_NULL_HANDLE == pool_to_use) {
86 uint32_t pool_count = default_pool_size;
87 if (count > default_pool_size) {
88 pool_count = count;
89 }
90 const VkDescriptorPoolSize size_counts = {
91 VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
92 pool_count * numBindingsInSet,
93 };
Nathaniel Cesariofc6291e2021-04-06 00:22:15 -060094 auto desc_pool_info = LvlInitStruct<VkDescriptorPoolCreateInfo>();
Tony-LunarG1dce2392019-10-23 16:49:29 -060095 desc_pool_info.flags = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT;
96 desc_pool_info.maxSets = pool_count;
97 desc_pool_info.poolSizeCount = 1;
98 desc_pool_info.pPoolSizes = &size_counts;
99 result = DispatchCreateDescriptorPool(device, &desc_pool_info, NULL, &pool_to_use);
100 assert(result == VK_SUCCESS);
101 if (result != VK_SUCCESS) {
102 return result;
103 }
104 desc_pool_map_[pool_to_use].size = desc_pool_info.maxSets;
105 desc_pool_map_[pool_to_use].used = 0;
106 }
107 std::vector<VkDescriptorSetLayout> desc_layouts(count, ds_layout);
108
109 VkDescriptorSetAllocateInfo alloc_info = {VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO, NULL, pool_to_use, count,
110 desc_layouts.data()};
111
112 result = DispatchAllocateDescriptorSets(device, &alloc_info, desc_sets->data());
113 assert(result == VK_SUCCESS);
114 if (result != VK_SUCCESS) {
115 return result;
116 }
117 *pool = pool_to_use;
118 desc_pool_map_[pool_to_use].used += count;
119 return result;
120}
121
Tony-LunarGb5fae462020-03-05 12:43:25 -0700122void UtilDescriptorSetManager::PutBackDescriptorSet(VkDescriptorPool desc_pool, VkDescriptorSet desc_set) {
Jeremy Gebbenfcfc33c2022-03-28 15:31:29 -0600123 auto guard = Lock();
Tony-LunarG1dce2392019-10-23 16:49:29 -0600124 auto iter = desc_pool_map_.find(desc_pool);
125 if (iter != desc_pool_map_.end()) {
126 VkResult result = DispatchFreeDescriptorSets(device, desc_pool, 1, &desc_set);
127 assert(result == VK_SUCCESS);
128 if (result != VK_SUCCESS) {
129 return;
130 }
131 desc_pool_map_[desc_pool].used--;
132 if (0 == desc_pool_map_[desc_pool].used) {
133 DispatchDestroyDescriptorPool(device, desc_pool, NULL);
134 desc_pool_map_.erase(desc_pool);
135 }
136 }
137 return;
138}
139
140// Trampolines to make VMA call Dispatch for Vulkan calls
141static VKAPI_ATTR void VKAPI_CALL gpuVkGetPhysicalDeviceProperties(VkPhysicalDevice physicalDevice,
142 VkPhysicalDeviceProperties *pProperties) {
143 DispatchGetPhysicalDeviceProperties(physicalDevice, pProperties);
144}
145static VKAPI_ATTR void VKAPI_CALL gpuVkGetPhysicalDeviceMemoryProperties(VkPhysicalDevice physicalDevice,
146 VkPhysicalDeviceMemoryProperties *pMemoryProperties) {
147 DispatchGetPhysicalDeviceMemoryProperties(physicalDevice, pMemoryProperties);
148}
149static VKAPI_ATTR VkResult VKAPI_CALL gpuVkAllocateMemory(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo,
150 const VkAllocationCallbacks *pAllocator, VkDeviceMemory *pMemory) {
151 return DispatchAllocateMemory(device, pAllocateInfo, pAllocator, pMemory);
152}
153static VKAPI_ATTR void VKAPI_CALL gpuVkFreeMemory(VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks *pAllocator) {
154 DispatchFreeMemory(device, memory, pAllocator);
155}
156static VKAPI_ATTR VkResult VKAPI_CALL gpuVkMapMemory(VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size,
157 VkMemoryMapFlags flags, void **ppData) {
158 return DispatchMapMemory(device, memory, offset, size, flags, ppData);
159}
160static VKAPI_ATTR void VKAPI_CALL gpuVkUnmapMemory(VkDevice device, VkDeviceMemory memory) { DispatchUnmapMemory(device, memory); }
161static VKAPI_ATTR VkResult VKAPI_CALL gpuVkFlushMappedMemoryRanges(VkDevice device, uint32_t memoryRangeCount,
162 const VkMappedMemoryRange *pMemoryRanges) {
163 return DispatchFlushMappedMemoryRanges(device, memoryRangeCount, pMemoryRanges);
164}
165static VKAPI_ATTR VkResult VKAPI_CALL gpuVkInvalidateMappedMemoryRanges(VkDevice device, uint32_t memoryRangeCount,
166 const VkMappedMemoryRange *pMemoryRanges) {
167 return DispatchInvalidateMappedMemoryRanges(device, memoryRangeCount, pMemoryRanges);
168}
169static VKAPI_ATTR VkResult VKAPI_CALL gpuVkBindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory memory,
170 VkDeviceSize memoryOffset) {
171 return DispatchBindBufferMemory(device, buffer, memory, memoryOffset);
172}
173static VKAPI_ATTR VkResult VKAPI_CALL gpuVkBindImageMemory(VkDevice device, VkImage image, VkDeviceMemory memory,
174 VkDeviceSize memoryOffset) {
175 return DispatchBindImageMemory(device, image, memory, memoryOffset);
176}
177static VKAPI_ATTR void VKAPI_CALL gpuVkGetBufferMemoryRequirements(VkDevice device, VkBuffer buffer,
178 VkMemoryRequirements *pMemoryRequirements) {
179 DispatchGetBufferMemoryRequirements(device, buffer, pMemoryRequirements);
180}
181static VKAPI_ATTR void VKAPI_CALL gpuVkGetImageMemoryRequirements(VkDevice device, VkImage image,
182 VkMemoryRequirements *pMemoryRequirements) {
183 DispatchGetImageMemoryRequirements(device, image, pMemoryRequirements);
184}
185static VKAPI_ATTR VkResult VKAPI_CALL gpuVkCreateBuffer(VkDevice device, const VkBufferCreateInfo *pCreateInfo,
186 const VkAllocationCallbacks *pAllocator, VkBuffer *pBuffer) {
187 return DispatchCreateBuffer(device, pCreateInfo, pAllocator, pBuffer);
188}
189static VKAPI_ATTR void VKAPI_CALL gpuVkDestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks *pAllocator) {
190 return DispatchDestroyBuffer(device, buffer, pAllocator);
191}
192static VKAPI_ATTR VkResult VKAPI_CALL gpuVkCreateImage(VkDevice device, const VkImageCreateInfo *pCreateInfo,
193 const VkAllocationCallbacks *pAllocator, VkImage *pImage) {
194 return DispatchCreateImage(device, pCreateInfo, pAllocator, pImage);
195}
196static VKAPI_ATTR void VKAPI_CALL gpuVkDestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks *pAllocator) {
197 DispatchDestroyImage(device, image, pAllocator);
198}
199static VKAPI_ATTR void VKAPI_CALL gpuVkCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer,
200 uint32_t regionCount, const VkBufferCopy *pRegions) {
201 DispatchCmdCopyBuffer(commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions);
202}
203
Tony-LunarGb5fae462020-03-05 12:43:25 -0700204VkResult UtilInitializeVma(VkPhysicalDevice physical_device, VkDevice device, VmaAllocator *pAllocator) {
Tony-LunarG1dce2392019-10-23 16:49:29 -0600205 VmaVulkanFunctions functions;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700206 VmaAllocatorCreateInfo allocator_info = {};
207 allocator_info.device = device;
208 allocator_info.physicalDevice = physical_device;
Tony-LunarG1dce2392019-10-23 16:49:29 -0600209
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700210 functions.vkGetPhysicalDeviceProperties = static_cast<PFN_vkGetPhysicalDeviceProperties>(gpuVkGetPhysicalDeviceProperties);
211 functions.vkGetPhysicalDeviceMemoryProperties =
212 static_cast<PFN_vkGetPhysicalDeviceMemoryProperties>(gpuVkGetPhysicalDeviceMemoryProperties);
213 functions.vkAllocateMemory = static_cast<PFN_vkAllocateMemory>(gpuVkAllocateMemory);
214 functions.vkFreeMemory = static_cast<PFN_vkFreeMemory>(gpuVkFreeMemory);
215 functions.vkMapMemory = static_cast<PFN_vkMapMemory>(gpuVkMapMemory);
216 functions.vkUnmapMemory = static_cast<PFN_vkUnmapMemory>(gpuVkUnmapMemory);
217 functions.vkFlushMappedMemoryRanges = static_cast<PFN_vkFlushMappedMemoryRanges>(gpuVkFlushMappedMemoryRanges);
218 functions.vkInvalidateMappedMemoryRanges = static_cast<PFN_vkInvalidateMappedMemoryRanges>(gpuVkInvalidateMappedMemoryRanges);
219 functions.vkBindBufferMemory = static_cast<PFN_vkBindBufferMemory>(gpuVkBindBufferMemory);
220 functions.vkBindImageMemory = static_cast<PFN_vkBindImageMemory>(gpuVkBindImageMemory);
221 functions.vkGetBufferMemoryRequirements = static_cast<PFN_vkGetBufferMemoryRequirements>(gpuVkGetBufferMemoryRequirements);
222 functions.vkGetImageMemoryRequirements = static_cast<PFN_vkGetImageMemoryRequirements>(gpuVkGetImageMemoryRequirements);
223 functions.vkCreateBuffer = static_cast<PFN_vkCreateBuffer>(gpuVkCreateBuffer);
224 functions.vkDestroyBuffer = static_cast<PFN_vkDestroyBuffer>(gpuVkDestroyBuffer);
225 functions.vkCreateImage = static_cast<PFN_vkCreateImage>(gpuVkCreateImage);
226 functions.vkDestroyImage = static_cast<PFN_vkDestroyImage>(gpuVkDestroyImage);
227 functions.vkCmdCopyBuffer = static_cast<PFN_vkCmdCopyBuffer>(gpuVkCmdCopyBuffer);
228 allocator_info.pVulkanFunctions = &functions;
Tony-LunarG1dce2392019-10-23 16:49:29 -0600229
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700230 return vmaCreateAllocator(&allocator_info, pAllocator);
Tony-LunarG1dce2392019-10-23 16:49:29 -0600231}
232
Jeremy Gebben5ca80b32022-04-11 10:58:39 -0600233gpu_utils_state::CommandBuffer::CommandBuffer(GpuAssistedBase *ga, VkCommandBuffer cb,
234 const VkCommandBufferAllocateInfo *pCreateInfo, const COMMAND_POOL_STATE *pool)
235 : CMD_BUFFER_STATE(ga, cb, pCreateInfo, pool) {}
236
Jeremy Gebben04697b02022-03-23 16:18:12 -0600237ReadLockGuard GpuAssistedBase::ReadLock() {
238 if (fine_grained_locking) {
239 return ReadLockGuard(validation_object_mutex, std::defer_lock);
240 } else {
241 return ReadLockGuard(validation_object_mutex);
242 }
243}
244
245WriteLockGuard GpuAssistedBase::WriteLock() {
246 if (fine_grained_locking) {
247 return WriteLockGuard(validation_object_mutex, std::defer_lock);
248 } else {
249 return WriteLockGuard(validation_object_mutex);
250 }
251}
252
Jeremy Gebben33717862022-03-28 15:53:56 -0600253void GpuAssistedBase::PreCallRecordCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo,
254 const VkAllocationCallbacks *pAllocator, VkDevice *pDevice, void *modified_ci) {
255 ValidationStateTracker::PreCallRecordCreateDevice(gpu, pCreateInfo, pAllocator, pDevice, modified_ci);
Tony-LunarG1dce2392019-10-23 16:49:29 -0600256 VkPhysicalDeviceFeatures *features = nullptr;
Jeremy Gebben33717862022-03-28 15:53:56 -0600257 // Use a local variable to query features since this method runs in the instance validation object.
258 // To avoid confusion and race conditions about which physical device's features are stored in the
259 // 'supported_devices' member variable, it will only be set in the device validation objects.
260 // See CreateDevice() below.
261 VkPhysicalDeviceFeatures gpu_supported_features;
262 DispatchGetPhysicalDeviceFeatures(gpu, &gpu_supported_features);
263 auto modified_create_info = static_cast<VkDeviceCreateInfo *>(modified_ci);
Tony-LunarG1dce2392019-10-23 16:49:29 -0600264 if (modified_create_info->pEnabledFeatures) {
265 // If pEnabledFeatures, VkPhysicalDeviceFeatures2 in pNext chain is not allowed
266 features = const_cast<VkPhysicalDeviceFeatures *>(modified_create_info->pEnabledFeatures);
267 } else {
268 VkPhysicalDeviceFeatures2 *features2 = nullptr;
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700269 features2 = const_cast<VkPhysicalDeviceFeatures2 *>(LvlFindInChain<VkPhysicalDeviceFeatures2>(modified_create_info->pNext));
Tony-LunarG1dce2392019-10-23 16:49:29 -0600270 if (features2) features = &features2->features;
271 }
Tony-LunarGf0634eb2021-01-05 15:11:12 -0700272 VkPhysicalDeviceFeatures new_features = {};
273 VkBool32 *desired = reinterpret_cast<VkBool32 *>(&desired_features);
274 VkBool32 *feature_ptr;
Tony-LunarG1dce2392019-10-23 16:49:29 -0600275 if (features) {
Tony-LunarGf0634eb2021-01-05 15:11:12 -0700276 feature_ptr = reinterpret_cast<VkBool32 *>(features);
Tony-LunarG1dce2392019-10-23 16:49:29 -0600277 } else {
Tony-LunarGf0634eb2021-01-05 15:11:12 -0700278 feature_ptr = reinterpret_cast<VkBool32 *>(&new_features);
279 }
280 VkBool32 *supported = reinterpret_cast<VkBool32 *>(&supported_features);
281 for (size_t i = 0; i < sizeof(VkPhysicalDeviceFeatures); i += (sizeof(VkBool32))) {
282 if (*supported && *desired) {
283 *feature_ptr = true;
284 }
285 supported++;
286 desired++;
287 feature_ptr++;
288 }
289 if (!features) {
Tony-LunarG1dce2392019-10-23 16:49:29 -0600290 delete modified_create_info->pEnabledFeatures;
291 modified_create_info->pEnabledFeatures = new VkPhysicalDeviceFeatures(new_features);
292 }
293}
294
Jeremy Gebben33717862022-03-28 15:53:56 -0600295void GpuAssistedBase::CreateDevice(const VkDeviceCreateInfo *pCreateInfo) {
296 ValidationStateTracker::CreateDevice(pCreateInfo);
297 // If api version 1.1 or later, SetDeviceLoaderData will be in the loader
298 auto chain_info = get_chain_info(pCreateInfo, VK_LOADER_DATA_CALLBACK);
299 assert(chain_info->u.pfnSetDeviceLoaderData);
300 vkSetDeviceLoaderData = chain_info->u.pfnSetDeviceLoaderData;
301
302 // Some devices have extremely high limits here, so set a reasonable max because we have to pad
303 // the pipeline layout with dummy descriptor set layouts.
304 adjusted_max_desc_sets = phys_dev_props.limits.maxBoundDescriptorSets;
305 adjusted_max_desc_sets = std::min(33U, adjusted_max_desc_sets);
306
307 // We can't do anything if there is only one.
308 // Device probably not a legit Vulkan device, since there should be at least 4. Protect ourselves.
309 if (adjusted_max_desc_sets == 1) {
310 ReportSetupProblem(device, "Device can bind only a single descriptor set.");
311 aborted = true;
312 return;
313 }
314 desc_set_bind_index = adjusted_max_desc_sets - 1;
315
316 VkResult result1 = UtilInitializeVma(physical_device, device, &vmaAllocator);
317 assert(result1 == VK_SUCCESS);
318 desc_set_manager = layer_data::make_unique<UtilDescriptorSetManager>(device, static_cast<uint32_t>(bindings_.size()));
319
320 const VkDescriptorSetLayoutCreateInfo debug_desc_layout_info = {VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO, NULL, 0,
321 static_cast<uint32_t>(bindings_.size()), bindings_.data()};
322
323 const VkDescriptorSetLayoutCreateInfo dummy_desc_layout_info = {VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO, NULL, 0, 0,
324 NULL};
325
326 result1 = DispatchCreateDescriptorSetLayout(device, &debug_desc_layout_info, NULL, &debug_desc_layout);
327
328 // This is a layout used to "pad" a pipeline layout to fill in any gaps to the selected bind index.
329 VkResult result2 = DispatchCreateDescriptorSetLayout(device, &dummy_desc_layout_info, NULL, &dummy_desc_layout);
330
331 assert((result1 == VK_SUCCESS) && (result2 == VK_SUCCESS));
332 if ((result1 != VK_SUCCESS) || (result2 != VK_SUCCESS)) {
333 ReportSetupProblem(device, "Unable to create descriptor set layout.");
334 if (result1 == VK_SUCCESS) {
335 DispatchDestroyDescriptorSetLayout(device, debug_desc_layout, NULL);
336 }
337 if (result2 == VK_SUCCESS) {
338 DispatchDestroyDescriptorSetLayout(device, dummy_desc_layout, NULL);
339 }
340 debug_desc_layout = VK_NULL_HANDLE;
341 dummy_desc_layout = VK_NULL_HANDLE;
342 aborted = true;
343 return;
344 }
345}
346
347void GpuAssistedBase::PreCallRecordDestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) {
Jeremy Gebben33717862022-03-28 15:53:56 -0600348 if (debug_desc_layout) {
349 DispatchDestroyDescriptorSetLayout(device, debug_desc_layout, NULL);
350 debug_desc_layout = VK_NULL_HANDLE;
351 }
352 if (dummy_desc_layout) {
353 DispatchDestroyDescriptorSetLayout(device, dummy_desc_layout, NULL);
354 dummy_desc_layout = VK_NULL_HANDLE;
355 }
356 ValidationStateTracker::PreCallRecordDestroyDevice(device, pAllocator);
357 // State Tracker can end up making vma calls through callbacks - don't destroy allocator until ST is done
358 if (vmaAllocator) {
359 vmaDestroyAllocator(vmaAllocator);
360 }
361 desc_set_manager.reset();
362}
363
Jeremy Gebbenfcfc33c2022-03-28 15:31:29 -0600364gpu_utils_state::Queue::Queue(GpuAssistedBase &state, VkQueue q, uint32_t index, VkDeviceQueueCreateFlags flags)
365 : QUEUE_STATE(q, index, flags), state_(state) {}
366
367gpu_utils_state::Queue::~Queue() {
368 if (barrier_command_buffer_) {
369 DispatchFreeCommandBuffers(state_.device, barrier_command_pool_, 1, &barrier_command_buffer_);
370 barrier_command_buffer_ = VK_NULL_HANDLE;
371 }
372 if (barrier_command_pool_) {
373 DispatchDestroyCommandPool(state_.device, barrier_command_pool_, NULL);
374 barrier_command_pool_ = VK_NULL_HANDLE;
375 }
376}
377
378// Submit a memory barrier on graphics queues.
379// Lazy-create and record the needed command buffer.
380void gpu_utils_state::Queue::SubmitBarrier() {
381 if (barrier_command_pool_ == VK_NULL_HANDLE) {
382 VkResult result = VK_SUCCESS;
383
384 auto pool_create_info = LvlInitStruct<VkCommandPoolCreateInfo>();
385 pool_create_info.queueFamilyIndex = queueFamilyIndex;
386 result = DispatchCreateCommandPool(state_.device, &pool_create_info, nullptr, &barrier_command_pool_);
387 if (result != VK_SUCCESS) {
388 state_.ReportSetupProblem(state_.device, "Unable to create command pool for barrier CB.");
389 barrier_command_pool_ = VK_NULL_HANDLE;
390 return;
391 }
392
393 auto buffer_alloc_info = LvlInitStruct<VkCommandBufferAllocateInfo>();
394 buffer_alloc_info.commandPool = barrier_command_pool_;
395 buffer_alloc_info.commandBufferCount = 1;
396 buffer_alloc_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
397 result = DispatchAllocateCommandBuffers(state_.device, &buffer_alloc_info, &barrier_command_buffer_);
398 if (result != VK_SUCCESS) {
399 state_.ReportSetupProblem(state_.device, "Unable to create barrier command buffer.");
400 DispatchDestroyCommandPool(state_.device, barrier_command_pool_, nullptr);
401 barrier_command_pool_ = VK_NULL_HANDLE;
402 barrier_command_buffer_ = VK_NULL_HANDLE;
403 return;
404 }
405
406 // Hook up command buffer dispatch
407 state_.vkSetDeviceLoaderData(state_.device, barrier_command_buffer_);
408
409 // Record a global memory barrier to force availability of device memory operations to the host domain.
410 auto command_buffer_begin_info = LvlInitStruct<VkCommandBufferBeginInfo>();
411 result = DispatchBeginCommandBuffer(barrier_command_buffer_, &command_buffer_begin_info);
412 if (result == VK_SUCCESS) {
413 auto memory_barrier = LvlInitStruct<VkMemoryBarrier>();
414 memory_barrier.srcAccessMask = VK_ACCESS_MEMORY_WRITE_BIT;
415 memory_barrier.dstAccessMask = VK_ACCESS_HOST_READ_BIT;
416 DispatchCmdPipelineBarrier(barrier_command_buffer_, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_HOST_BIT, 0,
417 1, &memory_barrier, 0, nullptr, 0, nullptr);
418 DispatchEndCommandBuffer(barrier_command_buffer_);
419 }
420 }
421 if (barrier_command_buffer_ != VK_NULL_HANDLE) {
422 auto submit_info = LvlInitStruct<VkSubmitInfo>();
423 submit_info.commandBufferCount = 1;
424 submit_info.pCommandBuffers = &barrier_command_buffer_;
425 DispatchQueueSubmit(QUEUE_STATE::Queue(), 1, &submit_info, VK_NULL_HANDLE);
426 }
427}
428
Jeremy Gebben5ca80b32022-04-11 10:58:39 -0600429bool GpuAssistedBase::CommandBufferNeedsProcessing(VkCommandBuffer command_buffer) const {
Jeremy Gebben04697b02022-03-23 16:18:12 -0600430 auto cb_node = GetRead<gpu_utils_state::CommandBuffer>(command_buffer);
Jeremy Gebben5ca80b32022-04-11 10:58:39 -0600431 if (cb_node->NeedsProcessing()) {
432 return true;
433 }
434 for (const auto *secondary_cb : cb_node->linkedCommandBuffers) {
435 auto secondary_cb_node = static_cast<const gpu_utils_state::CommandBuffer *>(secondary_cb);
Jeremy Gebben04697b02022-03-23 16:18:12 -0600436 auto guard = secondary_cb_node->ReadLock();
Jeremy Gebben5ca80b32022-04-11 10:58:39 -0600437 if (secondary_cb_node->NeedsProcessing()) {
438 return true;
439 }
440 }
441 return false;
442}
443
444void GpuAssistedBase::ProcessCommandBuffer(VkQueue queue, VkCommandBuffer command_buffer) {
Jeremy Gebben04697b02022-03-23 16:18:12 -0600445 auto cb_node = GetWrite<gpu_utils_state::CommandBuffer>(command_buffer);
Jeremy Gebben5ca80b32022-04-11 10:58:39 -0600446
447 cb_node->Process(queue);
448 for (auto *secondary_cmd_base : cb_node->linkedCommandBuffers) {
449 auto *secondary_cb_node = static_cast<gpu_utils_state::CommandBuffer *>(secondary_cmd_base);
Jeremy Gebben04697b02022-03-23 16:18:12 -0600450 auto guard = secondary_cb_node->WriteLock();
Jeremy Gebben5ca80b32022-04-11 10:58:39 -0600451 secondary_cb_node->Process(queue);
452 }
453}
454
Jeremy Gebbenfcfc33c2022-03-28 15:31:29 -0600455// Issue a memory barrier to make GPU-written data available to host.
456// Wait for the queue to complete execution.
457// Check the debug buffers for all the command buffers that were submitted.
458void GpuAssistedBase::PostCallRecordQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits, VkFence fence,
459 VkResult result) {
460 ValidationStateTracker::PostCallRecordQueueSubmit(queue, submitCount, pSubmits, fence, result);
461
462 if (aborted || (result != VK_SUCCESS)) return;
463 bool buffers_present = false;
464 // Don't QueueWaitIdle if there's nothing to process
465 for (uint32_t submit_idx = 0; submit_idx < submitCount; submit_idx++) {
466 const VkSubmitInfo *submit = &pSubmits[submit_idx];
467 for (uint32_t i = 0; i < submit->commandBufferCount; i++) {
468 buffers_present |= CommandBufferNeedsProcessing(submit->pCommandBuffers[i]);
469 }
470 }
471 if (!buffers_present) return;
472
473 SubmitBarrier(queue);
474
475 DispatchQueueWaitIdle(queue);
476
477 for (uint32_t submit_idx = 0; submit_idx < submitCount; submit_idx++) {
478 const VkSubmitInfo *submit = &pSubmits[submit_idx];
479 for (uint32_t i = 0; i < submit->commandBufferCount; i++) {
480 ProcessCommandBuffer(queue, submit->pCommandBuffers[i]);
481 }
482 }
483}
484
485void GpuAssistedBase::RecordQueueSubmit2(VkQueue queue, uint32_t submitCount, const VkSubmitInfo2 *pSubmits, VkFence fence,
486 VkResult result) {
487 if (aborted || (result != VK_SUCCESS)) return;
488 bool buffers_present = false;
489 // Don't QueueWaitIdle if there's nothing to process
490 for (uint32_t submit_idx = 0; submit_idx < submitCount; submit_idx++) {
491 const VkSubmitInfo2 *submit = &pSubmits[submit_idx];
492 for (uint32_t i = 0; i < submit->commandBufferInfoCount; i++) {
493 buffers_present |= CommandBufferNeedsProcessing(submit->pCommandBufferInfos[i].commandBuffer);
494 }
495 }
496 if (!buffers_present) return;
497
498 SubmitBarrier(queue);
499
500 DispatchQueueWaitIdle(queue);
501
502 for (uint32_t submit_idx = 0; submit_idx < submitCount; submit_idx++) {
503 const VkSubmitInfo2 *submit = &pSubmits[submit_idx];
504 for (uint32_t i = 0; i < submit->commandBufferInfoCount; i++) {
505 ProcessCommandBuffer(queue, submit->pCommandBufferInfos[i].commandBuffer);
506 }
507 }
508}
509
510void GpuAssistedBase::PostCallRecordQueueSubmit2KHR(VkQueue queue, uint32_t submitCount, const VkSubmitInfo2KHR *pSubmits,
511 VkFence fence, VkResult result) {
512 ValidationStateTracker::PostCallRecordQueueSubmit2KHR(queue, submitCount, pSubmits, fence, result);
513 RecordQueueSubmit2(queue, submitCount, pSubmits, fence, result);
514}
515
516void GpuAssistedBase::PostCallRecordQueueSubmit2(VkQueue queue, uint32_t submitCount, const VkSubmitInfo2 *pSubmits, VkFence fence,
517 VkResult result) {
518 ValidationStateTracker::PostCallRecordQueueSubmit2(queue, submitCount, pSubmits, fence, result);
519 RecordQueueSubmit2(queue, submitCount, pSubmits, fence, result);
520}
521
Jeremy Gebbenefd97802022-03-28 16:45:05 -0600522void GpuAssistedBase::PreCallRecordCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo *pCreateInfo,
523 const VkAllocationCallbacks *pAllocator, VkPipelineLayout *pPipelineLayout,
524 void *cpl_state_data) {
525 if (aborted) {
526 return;
527 }
528 auto cpl_state = static_cast<create_pipeline_layout_api_state *>(cpl_state_data);
529 if (cpl_state->modified_create_info.setLayoutCount >= adjusted_max_desc_sets) {
530 std::ostringstream strm;
531 strm << "Pipeline Layout conflict with validation's descriptor set at slot " << desc_set_bind_index << ". "
532 << "Application has too many descriptor sets in the pipeline layout to continue with gpu validation. "
533 << "Validation is not modifying the pipeline layout. "
534 << "Instrumented shaders are replaced with non-instrumented shaders.";
535 ReportSetupProblem(device, strm.str().c_str());
536 } else {
537 // Modify the pipeline layout by:
538 // 1. Copying the caller's descriptor set desc_layouts
539 // 2. Fill in dummy descriptor layouts up to the max binding
540 // 3. Fill in with the debug descriptor layout at the max binding slot
541 cpl_state->new_layouts.reserve(adjusted_max_desc_sets);
542 cpl_state->new_layouts.insert(cpl_state->new_layouts.end(), &pCreateInfo->pSetLayouts[0],
543 &pCreateInfo->pSetLayouts[pCreateInfo->setLayoutCount]);
544 for (uint32_t i = pCreateInfo->setLayoutCount; i < adjusted_max_desc_sets - 1; ++i) {
545 cpl_state->new_layouts.push_back(dummy_desc_layout);
546 }
547 cpl_state->new_layouts.push_back(debug_desc_layout);
548 cpl_state->modified_create_info.pSetLayouts = cpl_state->new_layouts.data();
549 cpl_state->modified_create_info.setLayoutCount = adjusted_max_desc_sets;
550 }
551 ValidationStateTracker::PreCallRecordCreatePipelineLayout(device, pCreateInfo, pAllocator, pPipelineLayout, cpl_state_data);
552}
553
554void GpuAssistedBase::PostCallRecordCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo *pCreateInfo,
555 const VkAllocationCallbacks *pAllocator, VkPipelineLayout *pPipelineLayout,
556 VkResult result) {
557 if (result != VK_SUCCESS) {
558 ReportSetupProblem(device, "Unable to create pipeline layout. Device could become unstable.");
559 aborted = true;
560 }
561 ValidationStateTracker::PostCallRecordCreatePipelineLayout(device, pCreateInfo, pAllocator, pPipelineLayout, result);
562}
563
564void GpuAssistedBase::PreCallRecordCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
565 const VkGraphicsPipelineCreateInfo *pCreateInfos,
566 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
567 void *cgpl_state_data) {
568 if (aborted) return;
569 std::vector<safe_VkGraphicsPipelineCreateInfo> new_pipeline_create_infos;
570 create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
571 PreCallRecordPipelineCreations(count, pCreateInfos, pAllocator, pPipelines, cgpl_state->pipe_state, &new_pipeline_create_infos,
572 VK_PIPELINE_BIND_POINT_GRAPHICS);
Tony-LunarG806cddb2022-05-11 15:32:48 -0600573 cgpl_state->modified_create_infos = new_pipeline_create_infos;
574 cgpl_state->pCreateInfos = reinterpret_cast<VkGraphicsPipelineCreateInfo *>(cgpl_state->modified_create_infos.data());
Jeremy Gebbenefd97802022-03-28 16:45:05 -0600575}
576
577void GpuAssistedBase::PreCallRecordCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
578 const VkComputePipelineCreateInfo *pCreateInfos,
579 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
580 void *ccpl_state_data) {
581 if (aborted) return;
582 std::vector<safe_VkComputePipelineCreateInfo> new_pipeline_create_infos;
583 auto *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
584 PreCallRecordPipelineCreations(count, pCreateInfos, pAllocator, pPipelines, ccpl_state->pipe_state, &new_pipeline_create_infos,
585 VK_PIPELINE_BIND_POINT_COMPUTE);
Tony-LunarG806cddb2022-05-11 15:32:48 -0600586 ccpl_state->modified_create_infos = new_pipeline_create_infos;
587 ccpl_state->pCreateInfos = reinterpret_cast<VkComputePipelineCreateInfo *>(ccpl_state->modified_create_infos.data());
Jeremy Gebbenefd97802022-03-28 16:45:05 -0600588}
589
590void GpuAssistedBase::PreCallRecordCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
591 const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
592 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
593 void *crtpl_state_data) {
594 if (aborted) return;
595 std::vector<safe_VkRayTracingPipelineCreateInfoCommon> new_pipeline_create_infos;
596 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
597 PreCallRecordPipelineCreations(count, pCreateInfos, pAllocator, pPipelines, crtpl_state->pipe_state, &new_pipeline_create_infos,
598 VK_PIPELINE_BIND_POINT_RAY_TRACING_NV);
Tony-LunarG806cddb2022-05-11 15:32:48 -0600599 crtpl_state->modified_create_infos = new_pipeline_create_infos;
600 crtpl_state->pCreateInfos = reinterpret_cast<VkRayTracingPipelineCreateInfoNV *>(crtpl_state->modified_create_infos.data());
Jeremy Gebbenefd97802022-03-28 16:45:05 -0600601}
602
603void GpuAssistedBase::PreCallRecordCreateRayTracingPipelinesKHR(VkDevice device, VkDeferredOperationKHR deferredOperation,
604 VkPipelineCache pipelineCache, uint32_t count,
605 const VkRayTracingPipelineCreateInfoKHR *pCreateInfos,
606 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
607 void *crtpl_state_data) {
608 if (aborted) return;
609 std::vector<safe_VkRayTracingPipelineCreateInfoCommon> new_pipeline_create_infos;
610 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_khr_api_state *>(crtpl_state_data);
611 PreCallRecordPipelineCreations(count, pCreateInfos, pAllocator, pPipelines, crtpl_state->pipe_state, &new_pipeline_create_infos,
612 VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR);
Tony-LunarG806cddb2022-05-11 15:32:48 -0600613 crtpl_state->modified_create_infos = new_pipeline_create_infos;
614 crtpl_state->pCreateInfos = reinterpret_cast<VkRayTracingPipelineCreateInfoKHR *>(crtpl_state->modified_create_infos.data());
Jeremy Gebbenefd97802022-03-28 16:45:05 -0600615}
616
617template <typename CreateInfos, typename SafeCreateInfos>
618static void UtilCopyCreatePipelineFeedbackData(const uint32_t count, CreateInfos *pCreateInfos, SafeCreateInfos *pSafeCreateInfos) {
619 for (uint32_t i = 0; i < count; i++) {
620 auto src_feedback_struct = LvlFindInChain<VkPipelineCreationFeedbackCreateInfoEXT>(pSafeCreateInfos[i].pNext);
621 if (!src_feedback_struct) return;
622 auto dst_feedback_struct = const_cast<VkPipelineCreationFeedbackCreateInfoEXT *>(
623 LvlFindInChain<VkPipelineCreationFeedbackCreateInfoEXT>(pCreateInfos[i].pNext));
624 *dst_feedback_struct->pPipelineCreationFeedback = *src_feedback_struct->pPipelineCreationFeedback;
625 for (uint32_t j = 0; j < src_feedback_struct->pipelineStageCreationFeedbackCount; j++) {
626 dst_feedback_struct->pPipelineStageCreationFeedbacks[j] = src_feedback_struct->pPipelineStageCreationFeedbacks[j];
627 }
628 }
629}
630
631void GpuAssistedBase::PostCallRecordCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
632 const VkGraphicsPipelineCreateInfo *pCreateInfos,
633 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
634 VkResult result, void *cgpl_state_data) {
635 ValidationStateTracker::PostCallRecordCreateGraphicsPipelines(device, pipelineCache, count, pCreateInfos, pAllocator,
636 pPipelines, result, cgpl_state_data);
637 if (aborted) return;
638 create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
Tony-LunarG806cddb2022-05-11 15:32:48 -0600639 UtilCopyCreatePipelineFeedbackData(count, pCreateInfos, cgpl_state->modified_create_infos.data());
Tony-LunarGabe71bd2022-05-11 15:58:16 -0600640 PostCallRecordPipelineCreations(count, pCreateInfos, pAllocator, pPipelines, VK_PIPELINE_BIND_POINT_GRAPHICS,
641 cgpl_state->modified_create_infos.data());
Jeremy Gebbenefd97802022-03-28 16:45:05 -0600642}
643
644void GpuAssistedBase::PostCallRecordCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
645 const VkComputePipelineCreateInfo *pCreateInfos,
646 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
647 VkResult result, void *ccpl_state_data) {
648 ValidationStateTracker::PostCallRecordCreateComputePipelines(device, pipelineCache, count, pCreateInfos, pAllocator, pPipelines,
649 result, ccpl_state_data);
650 if (aborted) return;
651 create_compute_pipeline_api_state *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
Tony-LunarG806cddb2022-05-11 15:32:48 -0600652 UtilCopyCreatePipelineFeedbackData(count, pCreateInfos, ccpl_state->modified_create_infos.data());
Tony-LunarGabe71bd2022-05-11 15:58:16 -0600653 PostCallRecordPipelineCreations(count, pCreateInfos, pAllocator, pPipelines, VK_PIPELINE_BIND_POINT_COMPUTE,
654 ccpl_state->modified_create_infos.data());
Jeremy Gebbenefd97802022-03-28 16:45:05 -0600655}
656
657void GpuAssistedBase::PostCallRecordCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
658 const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
659 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
660 VkResult result, void *crtpl_state_data) {
661 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_khr_api_state *>(crtpl_state_data);
662 ValidationStateTracker::PostCallRecordCreateRayTracingPipelinesNV(device, pipelineCache, count, pCreateInfos, pAllocator,
663 pPipelines, result, crtpl_state_data);
664 if (aborted) return;
Tony-LunarG806cddb2022-05-11 15:32:48 -0600665 UtilCopyCreatePipelineFeedbackData(count, pCreateInfos, crtpl_state->modified_create_infos.data());
Tony-LunarGabe71bd2022-05-11 15:58:16 -0600666 PostCallRecordPipelineCreations(count, pCreateInfos, pAllocator, pPipelines, VK_PIPELINE_BIND_POINT_RAY_TRACING_NV,
667 crtpl_state->modified_create_infos.data());
Jeremy Gebbenefd97802022-03-28 16:45:05 -0600668}
669
670void GpuAssistedBase::PostCallRecordCreateRayTracingPipelinesKHR(VkDevice device, VkDeferredOperationKHR deferredOperation,
671 VkPipelineCache pipelineCache, uint32_t count,
672 const VkRayTracingPipelineCreateInfoKHR *pCreateInfos,
673 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
674 VkResult result, void *crtpl_state_data) {
675 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_khr_api_state *>(crtpl_state_data);
676 ValidationStateTracker::PostCallRecordCreateRayTracingPipelinesKHR(
677 device, deferredOperation, pipelineCache, count, pCreateInfos, pAllocator, pPipelines, result, crtpl_state_data);
678 if (aborted) return;
Tony-LunarG806cddb2022-05-11 15:32:48 -0600679 UtilCopyCreatePipelineFeedbackData(count, pCreateInfos, crtpl_state->modified_create_infos.data());
Tony-LunarGabe71bd2022-05-11 15:58:16 -0600680 PostCallRecordPipelineCreations(count, pCreateInfos, pAllocator, pPipelines, VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR,
681 crtpl_state->modified_create_infos.data());
Jeremy Gebbenefd97802022-03-28 16:45:05 -0600682}
683
684// Remove all the shader trackers associated with this destroyed pipeline.
685void GpuAssistedBase::PreCallRecordDestroyPipeline(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks *pAllocator) {
Jeremy Gebben58cc9d12022-03-23 17:04:57 -0600686 auto to_erase = shader_map.snapshot([pipeline](const GpuAssistedShaderTracker &entry) { return entry.pipeline == pipeline; });
687 for (const auto &entry : to_erase) {
688 shader_map.erase(entry.first);
Jeremy Gebbenefd97802022-03-28 16:45:05 -0600689 }
690 ValidationStateTracker::PreCallRecordDestroyPipeline(device, pipeline, pAllocator);
691}
692
693template <typename CreateInfo>
694struct CreatePipelineTraits {};
695template <>
696struct CreatePipelineTraits<VkGraphicsPipelineCreateInfo> {
697 using SafeType = safe_VkGraphicsPipelineCreateInfo;
Jeremy Gebbenefd97802022-03-28 16:45:05 -0600698 static uint32_t GetStageCount(const VkGraphicsPipelineCreateInfo &createInfo) { return createInfo.stageCount; }
Tony-LunarG0f647382022-05-12 15:11:59 -0600699 static VkShaderModule GetShaderModule(const VkGraphicsPipelineCreateInfo &createInfo, uint32_t stage) {
700 return createInfo.pStages[stage].module;
701 }
Jeremy Gebbenefd97802022-03-28 16:45:05 -0600702 static void SetShaderModule(SafeType *createInfo, VkShaderModule shader_module, uint32_t stage) {
703 createInfo->pStages[stage].module = shader_module;
704 }
705};
706
707template <>
708struct CreatePipelineTraits<VkComputePipelineCreateInfo> {
709 using SafeType = safe_VkComputePipelineCreateInfo;
Jeremy Gebbenefd97802022-03-28 16:45:05 -0600710 static uint32_t GetStageCount(const VkComputePipelineCreateInfo &createInfo) { return 1; }
Tony-LunarG0f647382022-05-12 15:11:59 -0600711 static VkShaderModule GetShaderModule(const VkComputePipelineCreateInfo &createInfo, uint32_t stage) {
712 return createInfo.stage.module;
713 }
Jeremy Gebbenefd97802022-03-28 16:45:05 -0600714 static void SetShaderModule(SafeType *createInfo, VkShaderModule shader_module, uint32_t stage) {
715 assert(stage == 0);
716 createInfo->stage.module = shader_module;
717 }
718};
719
720template <>
721struct CreatePipelineTraits<VkRayTracingPipelineCreateInfoNV> {
722 using SafeType = safe_VkRayTracingPipelineCreateInfoCommon;
Jeremy Gebbenefd97802022-03-28 16:45:05 -0600723 static uint32_t GetStageCount(const VkRayTracingPipelineCreateInfoNV &createInfo) { return createInfo.stageCount; }
Tony-LunarG0f647382022-05-12 15:11:59 -0600724 static VkShaderModule GetShaderModule(const VkRayTracingPipelineCreateInfoNV &createInfo, uint32_t stage) {
725 return createInfo.pStages[stage].module;
726 }
Jeremy Gebbenefd97802022-03-28 16:45:05 -0600727 static void SetShaderModule(SafeType *createInfo, VkShaderModule shader_module, uint32_t stage) {
728 createInfo->pStages[stage].module = shader_module;
729 }
730};
731
732template <>
733struct CreatePipelineTraits<VkRayTracingPipelineCreateInfoKHR> {
734 using SafeType = safe_VkRayTracingPipelineCreateInfoCommon;
Jeremy Gebbenefd97802022-03-28 16:45:05 -0600735 static uint32_t GetStageCount(const VkRayTracingPipelineCreateInfoKHR &createInfo) { return createInfo.stageCount; }
Tony-LunarG0f647382022-05-12 15:11:59 -0600736 static VkShaderModule GetShaderModule(const VkRayTracingPipelineCreateInfoKHR &createInfo, uint32_t stage) {
737 return createInfo.pStages[stage].module;
738 }
Jeremy Gebbenefd97802022-03-28 16:45:05 -0600739 static void SetShaderModule(SafeType *createInfo, VkShaderModule shader_module, uint32_t stage) {
740 createInfo->pStages[stage].module = shader_module;
741 }
742};
743
744// Examine the pipelines to see if they use the debug descriptor set binding index.
745// If any do, create new non-instrumented shader modules and use them to replace the instrumented
746// shaders in the pipeline. Return the (possibly) modified create infos to the caller.
747template <typename CreateInfo, typename SafeCreateInfo>
748void GpuAssistedBase::PreCallRecordPipelineCreations(uint32_t count, const CreateInfo *pCreateInfos,
749 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
750 std::vector<std::shared_ptr<PIPELINE_STATE>> &pipe_state,
751 std::vector<SafeCreateInfo> *new_pipeline_create_infos,
752 const VkPipelineBindPoint bind_point) {
753 using Accessor = CreatePipelineTraits<CreateInfo>;
754 if (bind_point != VK_PIPELINE_BIND_POINT_GRAPHICS && bind_point != VK_PIPELINE_BIND_POINT_COMPUTE &&
755 bind_point != VK_PIPELINE_BIND_POINT_RAY_TRACING_NV) {
756 return;
757 }
758
759 // Walk through all the pipelines, make a copy of each and flag each pipeline that contains a shader that uses the debug
760 // descriptor set index.
761 for (uint32_t pipeline = 0; pipeline < count; ++pipeline) {
762 uint32_t stageCount = Accessor::GetStageCount(pCreateInfos[pipeline]);
Jeremy Gebbenefd97802022-03-28 16:45:05 -0600763 const auto &pipe = pipe_state[pipeline];
Nathaniel Cesariod4d8fce2022-05-06 15:01:10 -0600764 new_pipeline_create_infos->push_back(pipe->GetCreateInfo<CreateInfo>());
Jeremy Gebbenefd97802022-03-28 16:45:05 -0600765
766 if (!pipe->IsGraphicsLibrary()) {
767 bool replace_shaders = false;
768 if (pipe->active_slots.find(desc_set_bind_index) != pipe->active_slots.end()) {
769 replace_shaders = true;
770 }
771 // If the app requests all available sets, the pipeline layout was not modified at pipeline layout creation and the
772 // already instrumented shaders need to be replaced with uninstrumented shaders
773 const auto pipeline_layout = pipe->PipelineLayoutState();
774 if (pipeline_layout->set_layouts.size() >= adjusted_max_desc_sets) {
775 replace_shaders = true;
776 }
777
778 if (replace_shaders) {
779 for (uint32_t stage = 0; stage < stageCount; ++stage) {
Nathaniel Cesariod4d8fce2022-05-06 15:01:10 -0600780 const auto module_state = Get<SHADER_MODULE_STATE>(pipe->GetShaderModuleByCIIndex<CreateInfo>(stage));
Jeremy Gebbenefd97802022-03-28 16:45:05 -0600781
782 VkShaderModule shader_module;
783 auto create_info = LvlInitStruct<VkShaderModuleCreateInfo>();
784 create_info.pCode = module_state->words.data();
785 create_info.codeSize = module_state->words.size() * sizeof(uint32_t);
786 VkResult result = DispatchCreateShaderModule(device, &create_info, pAllocator, &shader_module);
787 if (result == VK_SUCCESS) {
788 Accessor::SetShaderModule(&(*new_pipeline_create_infos)[pipeline], shader_module, stage);
789 } else {
790 ReportSetupProblem(device,
791 "Unable to replace instrumented shader with non-instrumented one. "
792 "Device could become unstable.");
793 }
794 }
795 }
796 }
797 }
798}
799// For every pipeline:
800// - For every shader in a pipeline:
801// - If the shader had to be replaced in PreCallRecord (because the pipeline is using the debug desc set index):
802// - Destroy it since it has been bound into the pipeline by now. This is our only chance to delete it.
803// - Track the shader in the shader_map
804// - Save the shader binary if it contains debug code
Tony-LunarGabe71bd2022-05-11 15:58:16 -0600805template <typename CreateInfo, typename SafeCreateInfo>
Jeremy Gebbenefd97802022-03-28 16:45:05 -0600806void GpuAssistedBase::PostCallRecordPipelineCreations(const uint32_t count, const CreateInfo *pCreateInfos,
807 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
Tony-LunarG0f647382022-05-12 15:11:59 -0600808 const VkPipelineBindPoint bind_point, const SafeCreateInfo &modified_create_infos) {
Jeremy Gebbenefd97802022-03-28 16:45:05 -0600809 if (bind_point != VK_PIPELINE_BIND_POINT_GRAPHICS && bind_point != VK_PIPELINE_BIND_POINT_COMPUTE &&
810 bind_point != VK_PIPELINE_BIND_POINT_RAY_TRACING_NV) {
811 return;
812 }
813 for (uint32_t pipeline = 0; pipeline < count; ++pipeline) {
814 auto pipeline_state = Get<PIPELINE_STATE>(pPipelines[pipeline]);
815 if (!pipeline_state || pipeline_state->IsGraphicsLibrary()) continue;
816
817 const uint32_t stageCount = static_cast<uint32_t>(pipeline_state->stage_state.size());
818 assert(stageCount > 0);
819
Tony-LunarG0f647382022-05-12 15:11:59 -0600820 const auto pipeline_layout = pipeline_state->PipelineLayoutState();
Jeremy Gebbenefd97802022-03-28 16:45:05 -0600821 for (uint32_t stage = 0; stage < stageCount; ++stage) {
Nathaniel Cesariod4d8fce2022-05-06 15:01:10 -0600822 assert((bind_point != VK_PIPELINE_BIND_POINT_COMPUTE) || (stage == 0));
823 auto shader_module = pipeline_state->GetShaderModuleByCIIndex<CreateInfo>(stage);
824 auto module_state = Get<SHADER_MODULE_STATE>(shader_module);
Jeremy Gebbenefd97802022-03-28 16:45:05 -0600825
Tony-LunarG0f647382022-05-12 15:11:59 -0600826 if (pipeline_state->active_slots.find(desc_set_bind_index) != pipeline_state->active_slots.end() ||
827 (pipeline_layout->set_layouts.size() >= adjusted_max_desc_sets)) {
828 auto *modified_ci = reinterpret_cast<const CreateInfo *>(modified_create_infos[pipeline].ptr());
829 auto uninstrumented_module = CreatePipelineTraits<CreateInfo>::GetShaderModule(*modified_ci, stage);
830 assert(uninstrumented_module != shader_module);
831 DispatchDestroyShaderModule(device, uninstrumented_module, pAllocator);
832 }
833
Jeremy Gebbenefd97802022-03-28 16:45:05 -0600834 std::vector<unsigned int> code;
835 // Save the shader binary
836 // The core_validation ShaderModule tracker saves the binary too, but discards it when the ShaderModule
837 // is destroyed. Applications may destroy ShaderModules after they are placed in a pipeline and before
838 // the pipeline is used, so we have to keep another copy.
839 if (module_state && module_state->has_valid_spirv) code = module_state->words;
840
Jeremy Gebben58cc9d12022-03-23 17:04:57 -0600841 shader_map.insert_or_assign(module_state->gpu_validation_shader_id, pipeline_state->pipeline(), shader_module,
842 std::move(code));
Jeremy Gebbenefd97802022-03-28 16:45:05 -0600843 }
844 }
845}
846
Tony-LunarG1dce2392019-10-23 16:49:29 -0600847// Generate the stage-specific part of the message.
Tony-LunarGb5fae462020-03-05 12:43:25 -0700848void UtilGenerateStageMessage(const uint32_t *debug_record, std::string &msg) {
Tony-LunarG1dce2392019-10-23 16:49:29 -0600849 using namespace spvtools;
850 std::ostringstream strm;
851 switch (debug_record[kInstCommonOutStageIdx]) {
852 case spv::ExecutionModelVertex: {
853 strm << "Stage = Vertex. Vertex Index = " << debug_record[kInstVertOutVertexIndex]
854 << " Instance Index = " << debug_record[kInstVertOutInstanceIndex] << ". ";
855 } break;
856 case spv::ExecutionModelTessellationControl: {
857 strm << "Stage = Tessellation Control. Invocation ID = " << debug_record[kInstTessCtlOutInvocationId]
858 << ", Primitive ID = " << debug_record[kInstTessCtlOutPrimitiveId];
859 } break;
860 case spv::ExecutionModelTessellationEvaluation: {
861 strm << "Stage = Tessellation Eval. Primitive ID = " << debug_record[kInstTessEvalOutPrimitiveId]
862 << ", TessCoord (u, v) = (" << debug_record[kInstTessEvalOutTessCoordU] << ", "
863 << debug_record[kInstTessEvalOutTessCoordV] << "). ";
864 } break;
865 case spv::ExecutionModelGeometry: {
866 strm << "Stage = Geometry. Primitive ID = " << debug_record[kInstGeomOutPrimitiveId]
867 << " Invocation ID = " << debug_record[kInstGeomOutInvocationId] << ". ";
868 } break;
869 case spv::ExecutionModelFragment: {
870 strm << "Stage = Fragment. Fragment coord (x,y) = ("
871 << *reinterpret_cast<const float *>(&debug_record[kInstFragOutFragCoordX]) << ", "
872 << *reinterpret_cast<const float *>(&debug_record[kInstFragOutFragCoordY]) << "). ";
873 } break;
874 case spv::ExecutionModelGLCompute: {
875 strm << "Stage = Compute. Global invocation ID (x, y, z) = (" << debug_record[kInstCompOutGlobalInvocationIdX] << ", "
876 << debug_record[kInstCompOutGlobalInvocationIdY] << ", " << debug_record[kInstCompOutGlobalInvocationIdZ] << " )";
877 } break;
878 case spv::ExecutionModelRayGenerationNV: {
879 strm << "Stage = Ray Generation. Global Launch ID (x,y,z) = (" << debug_record[kInstRayTracingOutLaunchIdX] << ", "
880 << debug_record[kInstRayTracingOutLaunchIdY] << ", " << debug_record[kInstRayTracingOutLaunchIdZ] << "). ";
881 } break;
882 case spv::ExecutionModelIntersectionNV: {
883 strm << "Stage = Intersection. Global Launch ID (x,y,z) = (" << debug_record[kInstRayTracingOutLaunchIdX] << ", "
884 << debug_record[kInstRayTracingOutLaunchIdY] << ", " << debug_record[kInstRayTracingOutLaunchIdZ] << "). ";
885 } break;
886 case spv::ExecutionModelAnyHitNV: {
887 strm << "Stage = Any Hit. Global Launch ID (x,y,z) = (" << debug_record[kInstRayTracingOutLaunchIdX] << ", "
888 << debug_record[kInstRayTracingOutLaunchIdY] << ", " << debug_record[kInstRayTracingOutLaunchIdZ] << "). ";
889 } break;
890 case spv::ExecutionModelClosestHitNV: {
891 strm << "Stage = Closest Hit. Global Launch ID (x,y,z) = (" << debug_record[kInstRayTracingOutLaunchIdX] << ", "
892 << debug_record[kInstRayTracingOutLaunchIdY] << ", " << debug_record[kInstRayTracingOutLaunchIdZ] << "). ";
893 } break;
894 case spv::ExecutionModelMissNV: {
895 strm << "Stage = Miss. Global Launch ID (x,y,z) = (" << debug_record[kInstRayTracingOutLaunchIdX] << ", "
896 << debug_record[kInstRayTracingOutLaunchIdY] << ", " << debug_record[kInstRayTracingOutLaunchIdZ] << "). ";
897 } break;
898 case spv::ExecutionModelCallableNV: {
899 strm << "Stage = Callable. Global Launch ID (x,y,z) = (" << debug_record[kInstRayTracingOutLaunchIdX] << ", "
900 << debug_record[kInstRayTracingOutLaunchIdY] << ", " << debug_record[kInstRayTracingOutLaunchIdZ] << "). ";
901 } break;
Tony-LunarGc7ed2082020-06-11 14:00:04 -0600902 case spv::ExecutionModelTaskNV: {
903 strm << "Stage = Task. Global invocation ID (x, y, z) = (" << debug_record[kInstTaskOutGlobalInvocationIdX] << ", "
904 << debug_record[kInstTaskOutGlobalInvocationIdY] << ", " << debug_record[kInstTaskOutGlobalInvocationIdZ] << " )";
905 } break;
906 case spv::ExecutionModelMeshNV: {
907 strm << "Stage = Mesh.Global invocation ID (x, y, z) = (" << debug_record[kInstMeshOutGlobalInvocationIdX] << ", "
908 << debug_record[kInstMeshOutGlobalInvocationIdY] << ", " << debug_record[kInstMeshOutGlobalInvocationIdZ] << " )";
909 } break;
Tony-LunarG1dce2392019-10-23 16:49:29 -0600910 default: {
911 strm << "Internal Error (unexpected stage = " << debug_record[kInstCommonOutStageIdx] << "). ";
912 assert(false);
913 } break;
914 }
915 msg = strm.str();
916}
917
918std::string LookupDebugUtilsName(const debug_report_data *report_data, const uint64_t object) {
919 auto object_label = report_data->DebugReportGetUtilsObjectName(object);
920 if (object_label != "") {
921 object_label = "(" + object_label + ")";
922 }
923 return object_label;
924}
925
926// Generate message from the common portion of the debug report record.
Tony-LunarGb5fae462020-03-05 12:43:25 -0700927void UtilGenerateCommonMessage(const debug_report_data *report_data, const VkCommandBuffer commandBuffer,
928 const uint32_t *debug_record, const VkShaderModule shader_module_handle,
929 const VkPipeline pipeline_handle, const VkPipelineBindPoint pipeline_bind_point,
930 const uint32_t operation_index, std::string &msg) {
Tony-LunarG1dce2392019-10-23 16:49:29 -0600931 using namespace spvtools;
932 std::ostringstream strm;
933 if (shader_module_handle == VK_NULL_HANDLE) {
934 strm << std::hex << std::showbase << "Internal Error: Unable to locate information for shader used in command buffer "
935 << LookupDebugUtilsName(report_data, HandleToUint64(commandBuffer)) << "(" << HandleToUint64(commandBuffer) << "). ";
936 assert(true);
937 } else {
938 strm << std::hex << std::showbase << "Command buffer " << LookupDebugUtilsName(report_data, HandleToUint64(commandBuffer))
939 << "(" << HandleToUint64(commandBuffer) << "). ";
940 if (pipeline_bind_point == VK_PIPELINE_BIND_POINT_GRAPHICS) {
941 strm << "Draw ";
942 } else if (pipeline_bind_point == VK_PIPELINE_BIND_POINT_COMPUTE) {
943 strm << "Compute ";
944 } else if (pipeline_bind_point == VK_PIPELINE_BIND_POINT_RAY_TRACING_NV) {
945 strm << "Ray Trace ";
946 } else {
947 assert(false);
948 strm << "Unknown Pipeline Operation ";
949 }
950 strm << "Index " << operation_index << ". "
951 << "Pipeline " << LookupDebugUtilsName(report_data, HandleToUint64(pipeline_handle)) << "("
952 << HandleToUint64(pipeline_handle) << "). "
953 << "Shader Module " << LookupDebugUtilsName(report_data, HandleToUint64(shader_module_handle)) << "("
954 << HandleToUint64(shader_module_handle) << "). ";
955 }
956 strm << std::dec << std::noshowbase;
957 strm << "Shader Instruction Index = " << debug_record[kInstCommonOutInstructionIdx] << ". ";
958 msg = strm.str();
959}
960
961// Read the contents of the SPIR-V OpSource instruction and any following continuation instructions.
962// Split the single string into a vector of strings, one for each line, for easier processing.
sfricke-samsungef15e482022-01-26 11:32:49 -0800963void ReadOpSource(const SHADER_MODULE_STATE &module_state, const uint32_t reported_file_id,
964 std::vector<std::string> &opsource_lines) {
965 for (auto insn : module_state) {
Tony-LunarG1dce2392019-10-23 16:49:29 -0600966 if ((insn.opcode() == spv::OpSource) && (insn.len() >= 5) && (insn.word(3) == reported_file_id)) {
967 std::istringstream in_stream;
968 std::string cur_line;
969 in_stream.str((char *)&insn.word(4));
970 while (std::getline(in_stream, cur_line)) {
971 opsource_lines.push_back(cur_line);
972 }
973 while ((++insn).opcode() == spv::OpSourceContinued) {
974 in_stream.str((char *)&insn.word(1));
975 while (std::getline(in_stream, cur_line)) {
976 opsource_lines.push_back(cur_line);
977 }
978 }
979 break;
980 }
981 }
982}
983
984// The task here is to search the OpSource content to find the #line directive with the
985// line number that is closest to, but still prior to the reported error line number and
986// still within the reported filename.
987// From this known position in the OpSource content we can add the difference between
988// the #line line number and the reported error line number to determine the location
989// in the OpSource content of the reported error line.
990//
991// Considerations:
992// - Look only at #line directives that specify the reported_filename since
993// the reported error line number refers to its location in the reported filename.
994// - If a #line directive does not have a filename, the file is the reported filename, or
995// the filename found in a prior #line directive. (This is C-preprocessor behavior)
996// - It is possible (e.g., inlining) for blocks of code to get shuffled out of their
997// original order and the #line directives are used to keep the numbering correct. This
998// is why we need to examine the entire contents of the source, instead of leaving early
999// when finding a #line line number larger than the reported error line number.
1000//
1001
1002// GCC 4.8 has a problem with std::regex that is fixed in GCC 4.9. Provide fallback code for 4.8
1003#define GCC_VERSION (__GNUC__ * 10000 + __GNUC_MINOR__ * 100 + __GNUC_PATCHLEVEL__)
1004
1005#if defined(__GNUC__) && GCC_VERSION < 40900
1006bool GetLineAndFilename(const std::string string, uint32_t *linenumber, std::string &filename) {
1007 // # line <linenumber> "<filename>" or
1008 // #line <linenumber> "<filename>"
1009 std::vector<std::string> tokens;
1010 std::stringstream stream(string);
1011 std::string temp;
1012 uint32_t line_index = 0;
1013
1014 while (stream >> temp) tokens.push_back(temp);
1015 auto size = tokens.size();
1016 if (size > 1) {
1017 if (tokens[0] == "#" && tokens[1] == "line") {
1018 line_index = 2;
1019 } else if (tokens[0] == "#line") {
1020 line_index = 1;
1021 }
1022 }
1023 if (0 == line_index) return false;
Mark Young0ec6b062020-11-19 15:32:17 -07001024 *linenumber = static_cast<uint32_t>(std::stoul(tokens[line_index]));
Tony-LunarG1dce2392019-10-23 16:49:29 -06001025 uint32_t filename_index = line_index + 1;
1026 // Remove enclosing double quotes around filename
1027 if (size > filename_index) filename = tokens[filename_index].substr(1, tokens[filename_index].size() - 2);
1028 return true;
1029}
1030#else
1031bool GetLineAndFilename(const std::string string, uint32_t *linenumber, std::string &filename) {
1032 static const std::regex line_regex( // matches #line directives
1033 "^" // beginning of line
1034 "\\s*" // optional whitespace
1035 "#" // required text
1036 "\\s*" // optional whitespace
1037 "line" // required text
1038 "\\s+" // required whitespace
1039 "([0-9]+)" // required first capture - line number
1040 "(\\s+)?" // optional second capture - whitespace
1041 "(\".+\")?" // optional third capture - quoted filename with at least one char inside
1042 ".*"); // rest of line (needed when using std::regex_match since the entire line is tested)
1043
1044 std::smatch captures;
1045
1046 bool found_line = std::regex_match(string, captures, line_regex);
1047 if (!found_line) return false;
1048
1049 // filename is optional and considered found only if the whitespace and the filename are captured
1050 if (captures[2].matched && captures[3].matched) {
1051 // Remove enclosing double quotes. The regex guarantees the quotes and at least one char.
1052 filename = captures[3].str().substr(1, captures[3].str().size() - 2);
1053 }
Artem Bolgar82d08362021-06-03 13:11:13 -07001054 *linenumber = (uint32_t)std::stoul(captures[1]);
Tony-LunarG1dce2392019-10-23 16:49:29 -06001055 return true;
1056}
1057#endif // GCC_VERSION
1058
1059// Extract the filename, line number, and column number from the correct OpLine and build a message string from it.
1060// Scan the source (from OpSource) to find the line of source at the reported line number and place it in another message string.
sfricke-samsung7fac88a2022-01-26 11:44:22 -08001061void UtilGenerateSourceMessages(const std::vector<uint32_t> &pgm, const uint32_t *debug_record, bool from_printf,
Tony-LunarGb5fae462020-03-05 12:43:25 -07001062 std::string &filename_msg, std::string &source_msg) {
Tony-LunarG1dce2392019-10-23 16:49:29 -06001063 using namespace spvtools;
1064 std::ostringstream filename_stream;
1065 std::ostringstream source_stream;
Nathaniel Cesario77cd59b2021-10-11 23:52:24 -06001066 SHADER_MODULE_STATE shader(pgm);
Tony-LunarG1dce2392019-10-23 16:49:29 -06001067 // Find the OpLine just before the failing instruction indicated by the debug info.
1068 // SPIR-V can only be iterated in the forward direction due to its opcode/length encoding.
1069 uint32_t instruction_index = 0;
1070 uint32_t reported_file_id = 0;
1071 uint32_t reported_line_number = 0;
1072 uint32_t reported_column_number = 0;
1073 if (shader.words.size() > 0) {
John Zulauf79f06582021-02-27 18:38:39 -07001074 for (const auto &insn : shader) {
Tony-LunarG1dce2392019-10-23 16:49:29 -06001075 if (insn.opcode() == spv::OpLine) {
1076 reported_file_id = insn.word(1);
1077 reported_line_number = insn.word(2);
1078 reported_column_number = insn.word(3);
1079 }
1080 if (instruction_index == debug_record[kInstCommonOutInstructionIdx]) {
1081 break;
1082 }
1083 instruction_index++;
1084 }
1085 }
1086 // Create message with file information obtained from the OpString pointed to by the discovered OpLine.
1087 std::string reported_filename;
1088 if (reported_file_id == 0) {
1089 filename_stream
1090 << "Unable to find SPIR-V OpLine for source information. Build shader with debug info to get source information.";
1091 } else {
1092 bool found_opstring = false;
1093 std::string prefix;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001094 if (from_printf) {
Tony-LunarG1dce2392019-10-23 16:49:29 -06001095 prefix = "Debug shader printf message generated ";
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001096 } else {
Tony-LunarG1dce2392019-10-23 16:49:29 -06001097 prefix = "Shader validation error occurred ";
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001098 }
John Zulauf79f06582021-02-27 18:38:39 -07001099 for (const auto &insn : shader) {
Tony-LunarG1dce2392019-10-23 16:49:29 -06001100 if ((insn.opcode() == spv::OpString) && (insn.len() >= 3) && (insn.word(1) == reported_file_id)) {
1101 found_opstring = true;
1102 reported_filename = (char *)&insn.word(2);
1103 if (reported_filename.empty()) {
1104 filename_stream << prefix << "at line " << reported_line_number;
1105 } else {
1106 filename_stream << prefix << "in file " << reported_filename << " at line " << reported_line_number;
1107 }
1108 if (reported_column_number > 0) {
1109 filename_stream << ", column " << reported_column_number;
1110 }
1111 filename_stream << ".";
1112 break;
1113 }
1114 }
1115 if (!found_opstring) {
Tony-LunarG6d195e12020-10-27 16:54:14 -06001116 filename_stream << "Unable to find SPIR-V OpString for file id " << reported_file_id << " from OpLine instruction."
1117 << std::endl;
1118 filename_stream << "File ID = " << reported_file_id << ", Line Number = " << reported_line_number
1119 << ", Column = " << reported_column_number << std::endl;
Tony-LunarG1dce2392019-10-23 16:49:29 -06001120 }
1121 }
1122 filename_msg = filename_stream.str();
1123
1124 // Create message to display source code line containing error.
1125 if ((reported_file_id != 0)) {
1126 // Read the source code and split it up into separate lines.
1127 std::vector<std::string> opsource_lines;
1128 ReadOpSource(shader, reported_file_id, opsource_lines);
1129 // Find the line in the OpSource content that corresponds to the reported error file and line.
1130 if (!opsource_lines.empty()) {
1131 uint32_t saved_line_number = 0;
1132 std::string current_filename = reported_filename; // current "preprocessor" filename state.
1133 std::vector<std::string>::size_type saved_opsource_offset = 0;
1134 bool found_best_line = false;
1135 for (auto it = opsource_lines.begin(); it != opsource_lines.end(); ++it) {
1136 uint32_t parsed_line_number;
1137 std::string parsed_filename;
1138 bool found_line = GetLineAndFilename(*it, &parsed_line_number, parsed_filename);
1139 if (!found_line) continue;
1140
1141 bool found_filename = parsed_filename.size() > 0;
1142 if (found_filename) {
1143 current_filename = parsed_filename;
1144 }
1145 if ((!found_filename) || (current_filename == reported_filename)) {
1146 // Update the candidate best line directive, if the current one is prior and closer to the reported line
1147 if (reported_line_number >= parsed_line_number) {
1148 if (!found_best_line ||
1149 (reported_line_number - parsed_line_number <= reported_line_number - saved_line_number)) {
1150 saved_line_number = parsed_line_number;
1151 saved_opsource_offset = std::distance(opsource_lines.begin(), it);
1152 found_best_line = true;
1153 }
1154 }
1155 }
1156 }
1157 if (found_best_line) {
1158 assert(reported_line_number >= saved_line_number);
1159 std::vector<std::string>::size_type opsource_index =
1160 (reported_line_number - saved_line_number) + 1 + saved_opsource_offset;
1161 if (opsource_index < opsource_lines.size()) {
1162 source_stream << "\n" << reported_line_number << ": " << opsource_lines[opsource_index].c_str();
1163 } else {
1164 source_stream << "Internal error: calculated source line of " << opsource_index << " for source size of "
1165 << opsource_lines.size() << " lines.";
1166 }
1167 } else {
1168 source_stream << "Unable to find suitable #line directive in SPIR-V OpSource.";
1169 }
1170 } else {
1171 source_stream << "Unable to find SPIR-V OpSource.";
1172 }
1173 }
1174 source_msg = source_stream.str();
1175}