Karl Schultz | 7b024b4 | 2018-08-30 16:18:18 -0600 | [diff] [blame] | 1 | /* Copyright (c) 2018-2019 The Khronos Group Inc. |
| 2 | * Copyright (c) 2018-2019 Valve Corporation |
| 3 | * Copyright (c) 2018-2019 LunarG, Inc. |
Karl Schultz | 7b024b4 | 2018-08-30 16:18:18 -0600 | [diff] [blame] | 4 | * |
| 5 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 6 | * you may not use this file except in compliance with the License. |
| 7 | * You may obtain a copy of the License at |
| 8 | * |
| 9 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 10 | * |
| 11 | * Unless required by applicable law or agreed to in writing, software |
| 12 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 14 | * See the License for the specific language governing permissions and |
| 15 | * limitations under the License. |
| 16 | * |
Tony-LunarG | 2ba1cb3 | 2019-09-25 15:16:11 -0600 | [diff] [blame^] | 17 | * Author: Karl Schultz <karl@lunarg.com> |
| 18 | * Author: Tony Barbour <tony@lunarg.com> |
Karl Schultz | 7b024b4 | 2018-08-30 16:18:18 -0600 | [diff] [blame] | 19 | */ |
| 20 | |
Tony-LunarG | 2ba1cb3 | 2019-09-25 15:16:11 -0600 | [diff] [blame^] | 21 | #pragma once |
| 22 | |
| 23 | #include "chassis.h" |
| 24 | #include "state_tracker.h" |
Tony-LunarG | 0e56472 | 2019-03-19 16:09:14 -0600 | [diff] [blame] | 25 | #include "vk_mem_alloc.h" |
Tony-LunarG | 2ba1cb3 | 2019-09-25 15:16:11 -0600 | [diff] [blame^] | 26 | class GpuAssisted; |
Tony-LunarG | 0e56472 | 2019-03-19 16:09:14 -0600 | [diff] [blame] | 27 | |
Tony-LunarG | 2ba1cb3 | 2019-09-25 15:16:11 -0600 | [diff] [blame^] | 28 | struct GpuAssistedDeviceMemoryBlock { |
Mark Lobodzinski | 2a3ee4a | 2019-03-13 13:11:39 -0600 | [diff] [blame] | 29 | VkBuffer buffer; |
Tony-LunarG | 0e56472 | 2019-03-19 16:09:14 -0600 | [diff] [blame] | 30 | VmaAllocation allocation; |
Tony-LunarG | 2ba1cb3 | 2019-09-25 15:16:11 -0600 | [diff] [blame^] | 31 | std::unordered_map<uint32_t, const cvdescriptorset::Descriptor*> update_at_submit; |
Mark Lobodzinski | 2a3ee4a | 2019-03-13 13:11:39 -0600 | [diff] [blame] | 32 | }; |
| 33 | |
Tony-LunarG | 2ba1cb3 | 2019-09-25 15:16:11 -0600 | [diff] [blame^] | 34 | struct GpuAssistedBufferInfo { |
| 35 | GpuAssistedDeviceMemoryBlock output_mem_block; |
| 36 | GpuAssistedDeviceMemoryBlock di_input_mem_block; // Descriptor Indexing input |
| 37 | GpuAssistedDeviceMemoryBlock bda_input_mem_block; // Buffer Device Address input |
Mark Lobodzinski | 2a3ee4a | 2019-03-13 13:11:39 -0600 | [diff] [blame] | 38 | VkDescriptorSet desc_set; |
| 39 | VkDescriptorPool desc_pool; |
Jason Macnak | 67407e7 | 2019-07-11 11:05:09 -0700 | [diff] [blame] | 40 | VkPipelineBindPoint pipeline_bind_point; |
Tony-LunarG | 2ba1cb3 | 2019-09-25 15:16:11 -0600 | [diff] [blame^] | 41 | GpuAssistedBufferInfo(GpuAssistedDeviceMemoryBlock output_mem_block, GpuAssistedDeviceMemoryBlock di_input_mem_block, |
| 42 | GpuAssistedDeviceMemoryBlock bda_input_mem_block, VkDescriptorSet desc_set, VkDescriptorPool desc_pool, |
| 43 | VkPipelineBindPoint pipeline_bind_point) |
Jason Macnak | 67407e7 | 2019-07-11 11:05:09 -0700 | [diff] [blame] | 44 | : output_mem_block(output_mem_block), |
Tony-LunarG | 8eb5a00 | 2019-07-25 16:49:00 -0600 | [diff] [blame] | 45 | di_input_mem_block(di_input_mem_block), |
| 46 | bda_input_mem_block(bda_input_mem_block), |
Jason Macnak | 67407e7 | 2019-07-11 11:05:09 -0700 | [diff] [blame] | 47 | desc_set(desc_set), |
| 48 | desc_pool(desc_pool), |
| 49 | pipeline_bind_point(pipeline_bind_point){}; |
Mark Lobodzinski | 2a3ee4a | 2019-03-13 13:11:39 -0600 | [diff] [blame] | 50 | }; |
| 51 | |
Tony-LunarG | 2ba1cb3 | 2019-09-25 15:16:11 -0600 | [diff] [blame^] | 52 | struct GpuAssistedQueueBarrierCommandInfo { |
Jason Macnak | 8eae572 | 2019-07-17 15:17:45 -0700 | [diff] [blame] | 53 | VkCommandPool barrier_command_pool = VK_NULL_HANDLE; |
| 54 | VkCommandBuffer barrier_command_buffer = VK_NULL_HANDLE; |
| 55 | }; |
| 56 | |
Karl Schultz | 7b024b4 | 2018-08-30 16:18:18 -0600 | [diff] [blame] | 57 | // Class to encapsulate Descriptor Set allocation. This manager creates and destroys Descriptor Pools |
| 58 | // as needed to satisfy requests for descriptor sets. |
Tony-LunarG | 2ba1cb3 | 2019-09-25 15:16:11 -0600 | [diff] [blame^] | 59 | class GpuAssistedDescriptorSetManager { |
Petr Kraus | 4ed81e3 | 2019-09-02 23:41:19 +0200 | [diff] [blame] | 60 | public: |
Tony-LunarG | 2ba1cb3 | 2019-09-25 15:16:11 -0600 | [diff] [blame^] | 61 | GpuAssistedDescriptorSetManager(GpuAssisted* dev_data); |
| 62 | ~GpuAssistedDescriptorSetManager(); |
Karl Schultz | 7b024b4 | 2018-08-30 16:18:18 -0600 | [diff] [blame] | 63 | |
Tony-LunarG | 2ba1cb3 | 2019-09-25 15:16:11 -0600 | [diff] [blame^] | 64 | VkResult GetDescriptorSet(VkDescriptorPool* desc_pool, VkDescriptorSet* desc_sets); |
| 65 | VkResult GetDescriptorSets(uint32_t count, VkDescriptorPool* pool, std::vector<VkDescriptorSet>* desc_sets); |
Karl Schultz | 7b024b4 | 2018-08-30 16:18:18 -0600 | [diff] [blame] | 66 | void PutBackDescriptorSet(VkDescriptorPool desc_pool, VkDescriptorSet desc_set); |
| 67 | |
Petr Kraus | 4ed81e3 | 2019-09-02 23:41:19 +0200 | [diff] [blame] | 68 | private: |
Karl Schultz | 7b024b4 | 2018-08-30 16:18:18 -0600 | [diff] [blame] | 69 | static const uint32_t kItemsPerChunk = 512; |
| 70 | struct PoolTracker { |
| 71 | uint32_t size; |
| 72 | uint32_t used; |
| 73 | }; |
| 74 | |
Tony-LunarG | 2ba1cb3 | 2019-09-25 15:16:11 -0600 | [diff] [blame^] | 75 | GpuAssisted* dev_data_; |
Karl Schultz | 7b024b4 | 2018-08-30 16:18:18 -0600 | [diff] [blame] | 76 | std::unordered_map<VkDescriptorPool, struct PoolTracker> desc_pool_map_; |
| 77 | }; |
| 78 | |
Tony-LunarG | 2ba1cb3 | 2019-09-25 15:16:11 -0600 | [diff] [blame^] | 79 | struct GpuAssistedShaderTracker { |
| 80 | VkPipeline pipeline; |
| 81 | VkShaderModule shader_module; |
| 82 | std::vector<unsigned int> pgm; |
| 83 | }; |
| 84 | |
| 85 | struct GpuAssistedAccelerationStructureBuildValidationBufferInfo { |
Jason Macnak | 83cfd58 | 2019-07-31 10:14:24 -0700 | [diff] [blame] | 86 | // The acceleration structure that is being built. |
| 87 | VkAccelerationStructureNV acceleration_structure = VK_NULL_HANDLE; |
| 88 | |
| 89 | // The descriptor pool and descriptor set being used to validate a given build. |
| 90 | VkDescriptorPool descriptor_pool = VK_NULL_HANDLE; |
| 91 | VkDescriptorSet descriptor_set = VK_NULL_HANDLE; |
| 92 | |
| 93 | // The storage buffer used by the validating compute shader whichcontains info about |
| 94 | // the valid handles and which is written to communicate found invalid handles. |
| 95 | VkBuffer validation_buffer = VK_NULL_HANDLE; |
| 96 | VmaAllocation validation_buffer_allocation = VK_NULL_HANDLE; |
| 97 | }; |
| 98 | |
Tony-LunarG | 2ba1cb3 | 2019-09-25 15:16:11 -0600 | [diff] [blame^] | 99 | struct GpuAssistedAccelerationStructureBuildValidationState { |
Jason Macnak | 83cfd58 | 2019-07-31 10:14:24 -0700 | [diff] [blame] | 100 | bool initialized = false; |
| 101 | |
| 102 | VkPipeline pipeline = VK_NULL_HANDLE; |
| 103 | VkPipelineLayout pipeline_layout = VK_NULL_HANDLE; |
| 104 | |
| 105 | VkAccelerationStructureNV replacement_as = VK_NULL_HANDLE; |
| 106 | VmaAllocation replacement_as_allocation = VK_NULL_HANDLE; |
| 107 | uint64_t replacement_as_handle = 0; |
| 108 | |
Tony-LunarG | 2ba1cb3 | 2019-09-25 15:16:11 -0600 | [diff] [blame^] | 109 | std::unordered_map<VkCommandBuffer, std::vector<GpuAssistedAccelerationStructureBuildValidationBufferInfo>> validation_buffers; |
Jason Macnak | 83cfd58 | 2019-07-31 10:14:24 -0700 | [diff] [blame] | 110 | }; |
| 111 | |
Tony-LunarG | 2ba1cb3 | 2019-09-25 15:16:11 -0600 | [diff] [blame^] | 112 | struct GpuAssistedValidationState { |
Mark Lobodzinski | 2a3ee4a | 2019-03-13 13:11:39 -0600 | [diff] [blame] | 113 | bool aborted; |
Tony-LunarG | 8eb5a00 | 2019-07-25 16:49:00 -0600 | [diff] [blame] | 114 | VkBool32 shaderInt64; |
Mark Lobodzinski | 2a3ee4a | 2019-03-13 13:11:39 -0600 | [diff] [blame] | 115 | VkDescriptorSetLayout debug_desc_layout; |
| 116 | VkDescriptorSetLayout dummy_desc_layout; |
| 117 | uint32_t adjusted_max_desc_sets; |
| 118 | uint32_t desc_set_bind_index; |
| 119 | uint32_t unique_shader_module_id; |
Tony-LunarG | 2ba1cb3 | 2019-09-25 15:16:11 -0600 | [diff] [blame^] | 120 | std::unordered_map<uint32_t, GpuAssistedShaderTracker> shader_map; |
| 121 | std::unique_ptr<GpuAssistedDescriptorSetManager> desc_set_manager; |
| 122 | std::map<VkQueue, GpuAssistedQueueBarrierCommandInfo> queue_barrier_command_infos; |
| 123 | std::unordered_map<VkCommandBuffer, std::vector<GpuAssistedBufferInfo>> command_buffer_map; // gpu_buffer_list; |
Tony-LunarG | 0e56472 | 2019-03-19 16:09:14 -0600 | [diff] [blame] | 124 | uint32_t output_buffer_size; |
| 125 | VmaAllocator vmaAllocator; |
Tony-LunarG | 2ab9ede | 2019-05-10 14:34:31 -0600 | [diff] [blame] | 126 | PFN_vkSetDeviceLoaderData vkSetDeviceLoaderData; |
Tony-LunarG | 8eb5a00 | 2019-07-25 16:49:00 -0600 | [diff] [blame] | 127 | std::map<VkDeviceAddress, VkDeviceSize> buffer_map; |
Tony-LunarG | 2ba1cb3 | 2019-09-25 15:16:11 -0600 | [diff] [blame^] | 128 | GpuAssistedAccelerationStructureBuildValidationState acceleration_structure_validation_state; |
| 129 | GpuAssistedValidationState(bool aborted = false, uint32_t unique_shader_module_id = 0, VmaAllocator vmaAllocator = {}) |
| 130 | : aborted(aborted), unique_shader_module_id(unique_shader_module_id), vmaAllocator(vmaAllocator){}; |
Mark Lobodzinski | 2a3ee4a | 2019-03-13 13:11:39 -0600 | [diff] [blame] | 131 | |
Tony-LunarG | 2ba1cb3 | 2019-09-25 15:16:11 -0600 | [diff] [blame^] | 132 | std::vector<GpuAssistedBufferInfo>& GetGpuAssistedBufferInfo(const VkCommandBuffer command_buffer) { |
Mark Lobodzinski | 2a3ee4a | 2019-03-13 13:11:39 -0600 | [diff] [blame] | 133 | auto buffer_list = command_buffer_map.find(command_buffer); |
| 134 | if (buffer_list == command_buffer_map.end()) { |
Tony-LunarG | 2ba1cb3 | 2019-09-25 15:16:11 -0600 | [diff] [blame^] | 135 | std::vector<GpuAssistedBufferInfo> new_list{}; |
Mark Lobodzinski | 2a3ee4a | 2019-03-13 13:11:39 -0600 | [diff] [blame] | 136 | command_buffer_map[command_buffer] = new_list; |
| 137 | return command_buffer_map[command_buffer]; |
| 138 | } |
| 139 | return buffer_list->second; |
| 140 | } |
| 141 | }; |
| 142 | |
Tony-LunarG | 2ba1cb3 | 2019-09-25 15:16:11 -0600 | [diff] [blame^] | 143 | class GpuAssisted : public ValidationStateTracker { |
| 144 | public: |
| 145 | using StateTracker = ValidationStateTracker; |
| 146 | std::unique_ptr<GpuAssistedValidationState> gpu_validation_state; |
| 147 | void ReportSetupProblem(VkDebugReportObjectTypeEXT object_type, uint64_t object_handle, const char* const specific_message); |
| 148 | void PreCallRecordCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo* pCreateInfo, |
| 149 | const VkAllocationCallbacks* pAllocator, VkDevice* pDevice, |
| 150 | safe_VkDeviceCreateInfo* modified_create_info); |
| 151 | void PostCallRecordCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo* pCreateInfo, |
| 152 | const VkAllocationCallbacks* pAllocator, VkDevice* pDevice, VkResult result); |
| 153 | void PostCallRecordGetBufferDeviceAddressEXT(VkDevice device, const VkBufferDeviceAddressInfoEXT* pInfo, |
| 154 | VkDeviceAddress address); |
| 155 | void PreCallRecordDestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks* pAllocator); |
| 156 | void PreCallRecordDestroyDevice(VkDevice device, const VkAllocationCallbacks* pAllocator); |
| 157 | void PostCallRecordBindAccelerationStructureMemoryNV(VkDevice device, uint32_t bindInfoCount, |
| 158 | const VkBindAccelerationStructureMemoryInfoNV* pBindInfos, |
| 159 | VkResult result); |
| 160 | void PreCallRecordCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo* pCreateInfo, |
| 161 | const VkAllocationCallbacks* pAllocator, VkPipelineLayout* pPipelineLayout, |
| 162 | void* cpl_state_data); |
| 163 | void PostCallRecordCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo* pCreateInfo, |
| 164 | const VkAllocationCallbacks* pAllocator, VkPipelineLayout* pPipelineLayout, |
| 165 | VkResult result); |
| 166 | void ResetCommandBuffer(VkCommandBuffer commandBuffer); |
| 167 | bool PreCallValidateCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, |
| 168 | VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, |
| 169 | uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, |
| 170 | uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, |
| 171 | uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers); |
| 172 | void PreCallRecordCreateBuffer(VkDevice device, const VkBufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, |
| 173 | VkBuffer* pBuffer, void* cb_state_data); |
| 174 | void CreateAccelerationStructureBuildValidationState(GpuAssisted* device_GpuAssisted); |
| 175 | void DestroyAccelerationStructureBuildValidationState(); |
| 176 | void PreCallRecordCmdBuildAccelerationStructureNV(VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV* pInfo, |
| 177 | VkBuffer instanceData, VkDeviceSize instanceOffset, VkBool32 update, |
| 178 | VkAccelerationStructureNV dst, VkAccelerationStructureNV src, |
| 179 | VkBuffer scratch, VkDeviceSize scratchOffset); |
| 180 | void ProcessAccelerationStructureBuildValidationBuffer(VkQueue queue, CMD_BUFFER_STATE* cb_node); |
| 181 | void PreCallRecordCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count, |
| 182 | const VkGraphicsPipelineCreateInfo* pCreateInfos, |
| 183 | const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, |
| 184 | void* cgpl_state_data); |
| 185 | void PreCallRecordCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count, |
| 186 | const VkComputePipelineCreateInfo* pCreateInfos, |
| 187 | const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, |
| 188 | void* ccpl_state_data); |
| 189 | void PreCallRecordCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache, uint32_t count, |
| 190 | const VkRayTracingPipelineCreateInfoNV* pCreateInfos, |
| 191 | const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, |
| 192 | void* crtpl_state_data); |
| 193 | template <typename CreateInfo, typename SafeCreateInfo> |
| 194 | void PreCallRecordPipelineCreations(uint32_t count, const CreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, |
| 195 | VkPipeline* pPipelines, std::vector<std::unique_ptr<PIPELINE_STATE>>& pipe_state, |
| 196 | std::vector<SafeCreateInfo>* new_pipeline_create_infos, |
| 197 | const VkPipelineBindPoint bind_point); |
| 198 | template <typename CreateInfo> |
| 199 | void PostCallRecordPipelineCreations(const uint32_t count, const CreateInfo* pCreateInfos, |
| 200 | const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, |
| 201 | const VkPipelineBindPoint bind_point); |
| 202 | void PostCallRecordCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count, |
| 203 | const VkGraphicsPipelineCreateInfo* pCreateInfos, |
| 204 | const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, VkResult result, |
| 205 | void* cgpl_state_data); |
| 206 | void PostCallRecordCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count, |
| 207 | const VkComputePipelineCreateInfo* pCreateInfos, |
| 208 | const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, VkResult result, |
| 209 | void* ccpl_state_data); |
| 210 | void PostCallRecordCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache, uint32_t count, |
| 211 | const VkRayTracingPipelineCreateInfoNV* pCreateInfos, |
| 212 | const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, VkResult result, |
| 213 | void* crtpl_state_data); |
| 214 | void PreCallRecordDestroyPipeline(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks* pAllocator); |
| 215 | bool InstrumentShader(const VkShaderModuleCreateInfo* pCreateInfo, std::vector<unsigned int>& new_pgm, |
| 216 | uint32_t* unique_shader_id); |
| 217 | void PreCallRecordCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo, |
| 218 | const VkAllocationCallbacks* pAllocator, VkShaderModule* pShaderModule, |
| 219 | void* csm_state_data); |
| 220 | void AnalyzeAndReportError(CMD_BUFFER_STATE* cb_node, VkQueue queue, VkPipelineBindPoint pipeline_bind_point, |
| 221 | uint32_t operation_index, uint32_t* const debug_output_buffer); |
| 222 | void ProcessInstrumentationBuffer(VkQueue queue, CMD_BUFFER_STATE* cb_node); |
| 223 | void UpdateInstrumentationBuffer(CMD_BUFFER_STATE* cb_node); |
| 224 | void SubmitBarrier(VkQueue queue); |
| 225 | void PreCallRecordQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence); |
| 226 | void PostCallRecordQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence, |
| 227 | VkResult result); |
| 228 | void PreCallRecordCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, |
| 229 | uint32_t firstInstance); |
| 230 | void PreCallRecordCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount, |
| 231 | uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance); |
| 232 | void PreCallRecordCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t count, |
| 233 | uint32_t stride); |
| 234 | void PreCallRecordCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t count, |
| 235 | uint32_t stride); |
| 236 | void PreCallRecordCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z); |
| 237 | void PreCallRecordCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset); |
| 238 | void PreCallRecordCmdTraceRaysNV(VkCommandBuffer commandBuffer, VkBuffer raygenShaderBindingTableBuffer, |
| 239 | VkDeviceSize raygenShaderBindingOffset, VkBuffer missShaderBindingTableBuffer, |
| 240 | VkDeviceSize missShaderBindingOffset, VkDeviceSize missShaderBindingStride, |
| 241 | VkBuffer hitShaderBindingTableBuffer, VkDeviceSize hitShaderBindingOffset, |
| 242 | VkDeviceSize hitShaderBindingStride, VkBuffer callableShaderBindingTableBuffer, |
| 243 | VkDeviceSize callableShaderBindingOffset, VkDeviceSize callableShaderBindingStride, |
| 244 | uint32_t width, uint32_t height, uint32_t depth); |
| 245 | void PostCallRecordCmdTraceRaysNV(VkCommandBuffer commandBuffer, VkBuffer raygenShaderBindingTableBuffer, |
| 246 | VkDeviceSize raygenShaderBindingOffset, VkBuffer missShaderBindingTableBuffer, |
| 247 | VkDeviceSize missShaderBindingOffset, VkDeviceSize missShaderBindingStride, |
| 248 | VkBuffer hitShaderBindingTableBuffer, VkDeviceSize hitShaderBindingOffset, |
| 249 | VkDeviceSize hitShaderBindingStride, VkBuffer callableShaderBindingTableBuffer, |
| 250 | VkDeviceSize callableShaderBindingOffset, VkDeviceSize callableShaderBindingStride, |
| 251 | uint32_t width, uint32_t height, uint32_t depth); |
| 252 | void AllocateValidationResources(const VkCommandBuffer cmd_buffer, const VkPipelineBindPoint bind_point); |
| 253 | void PostCallRecordGetPhysicalDeviceProperties(VkPhysicalDevice physicalDevice, |
| 254 | VkPhysicalDeviceProperties* pPhysicalDeviceProperties); |
| 255 | void PostCallRecordGetPhysicalDeviceProperties2(VkPhysicalDevice physicalDevice, |
| 256 | VkPhysicalDeviceProperties2* pPhysicalDeviceProperties2); |
| 257 | VkResult InitializeVma(VkPhysicalDevice physicalDevice, VkDevice device, VmaAllocator* pAllocator); |
| 258 | }; |