Karl Schultz | 7b024b4 | 2018-08-30 16:18:18 -0600 | [diff] [blame] | 1 | /* Copyright (c) 2018-2019 The Khronos Group Inc. |
| 2 | * Copyright (c) 2018-2019 Valve Corporation |
| 3 | * Copyright (c) 2018-2019 LunarG, Inc. |
| 4 | * Copyright (C) 2018-2019 Google Inc. |
| 5 | * |
| 6 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 7 | * you may not use this file except in compliance with the License. |
| 8 | * You may obtain a copy of the License at |
| 9 | * |
| 10 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 11 | * |
| 12 | * Unless required by applicable law or agreed to in writing, software |
| 13 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 15 | * See the License for the specific language governing permissions and |
| 16 | * limitations under the License. |
| 17 | * |
| 18 | */ |
| 19 | |
Tony-LunarG | 0e56472 | 2019-03-19 16:09:14 -0600 | [diff] [blame] | 20 | #include "vk_mem_alloc.h" |
| 21 | |
Karl Schultz | 7b024b4 | 2018-08-30 16:18:18 -0600 | [diff] [blame] | 22 | #ifndef VULKAN_GPU_VALIDATION_H |
| 23 | #define VULKAN_GPU_VALIDATION_H |
| 24 | |
Mark Lobodzinski | 2a3ee4a | 2019-03-13 13:11:39 -0600 | [diff] [blame] | 25 | struct GpuDeviceMemoryBlock { |
| 26 | VkBuffer buffer; |
Tony-LunarG | 0e56472 | 2019-03-19 16:09:14 -0600 | [diff] [blame] | 27 | VmaAllocation allocation; |
Tony-LunarG | 81efe39 | 2019-03-07 15:43:27 -0700 | [diff] [blame] | 28 | std::unordered_map<uint32_t, const cvdescriptorset::Descriptor *> update_at_submit; |
Mark Lobodzinski | 2a3ee4a | 2019-03-13 13:11:39 -0600 | [diff] [blame] | 29 | }; |
| 30 | |
| 31 | struct GpuBufferInfo { |
Tony-LunarG | 1b2e0c3 | 2019-02-07 17:13:27 -0700 | [diff] [blame] | 32 | GpuDeviceMemoryBlock output_mem_block; |
Tony-LunarG | 8eb5a00 | 2019-07-25 16:49:00 -0600 | [diff] [blame] | 33 | GpuDeviceMemoryBlock di_input_mem_block; // Descriptor Indexing input |
| 34 | GpuDeviceMemoryBlock bda_input_mem_block; // Buffer Device Address input |
Mark Lobodzinski | 2a3ee4a | 2019-03-13 13:11:39 -0600 | [diff] [blame] | 35 | VkDescriptorSet desc_set; |
| 36 | VkDescriptorPool desc_pool; |
Jason Macnak | 67407e7 | 2019-07-11 11:05:09 -0700 | [diff] [blame] | 37 | VkPipelineBindPoint pipeline_bind_point; |
Tony-LunarG | 8eb5a00 | 2019-07-25 16:49:00 -0600 | [diff] [blame] | 38 | GpuBufferInfo(GpuDeviceMemoryBlock output_mem_block, GpuDeviceMemoryBlock di_input_mem_block, |
| 39 | GpuDeviceMemoryBlock bda_input_mem_block, VkDescriptorSet desc_set, VkDescriptorPool desc_pool, |
| 40 | VkPipelineBindPoint pipeline_bind_point) |
Jason Macnak | 67407e7 | 2019-07-11 11:05:09 -0700 | [diff] [blame] | 41 | : output_mem_block(output_mem_block), |
Tony-LunarG | 8eb5a00 | 2019-07-25 16:49:00 -0600 | [diff] [blame] | 42 | di_input_mem_block(di_input_mem_block), |
| 43 | bda_input_mem_block(bda_input_mem_block), |
Jason Macnak | 67407e7 | 2019-07-11 11:05:09 -0700 | [diff] [blame] | 44 | desc_set(desc_set), |
| 45 | desc_pool(desc_pool), |
| 46 | pipeline_bind_point(pipeline_bind_point){}; |
Mark Lobodzinski | 2a3ee4a | 2019-03-13 13:11:39 -0600 | [diff] [blame] | 47 | }; |
| 48 | |
Jason Macnak | 8eae572 | 2019-07-17 15:17:45 -0700 | [diff] [blame] | 49 | struct GpuQueueBarrierCommandInfo { |
| 50 | VkCommandPool barrier_command_pool = VK_NULL_HANDLE; |
| 51 | VkCommandBuffer barrier_command_buffer = VK_NULL_HANDLE; |
| 52 | }; |
| 53 | |
Karl Schultz | 7b024b4 | 2018-08-30 16:18:18 -0600 | [diff] [blame] | 54 | // Class to encapsulate Descriptor Set allocation. This manager creates and destroys Descriptor Pools |
| 55 | // as needed to satisfy requests for descriptor sets. |
| 56 | class GpuDescriptorSetManager { |
Petr Kraus | 4ed81e3 | 2019-09-02 23:41:19 +0200 | [diff] [blame] | 57 | public: |
Mark Lobodzinski | 3bf82a5 | 2019-03-11 11:49:34 -0600 | [diff] [blame] | 58 | GpuDescriptorSetManager(CoreChecks *dev_data); |
Mark Lobodzinski | b56bbb9 | 2019-02-18 11:49:59 -0700 | [diff] [blame] | 59 | ~GpuDescriptorSetManager(); |
Karl Schultz | 7b024b4 | 2018-08-30 16:18:18 -0600 | [diff] [blame] | 60 | |
Jason Macnak | 83cfd58 | 2019-07-31 10:14:24 -0700 | [diff] [blame^] | 61 | VkResult GetDescriptorSet(VkDescriptorPool *desc_pool, VkDescriptorSet *desc_sets); |
Karl Schultz | 7b024b4 | 2018-08-30 16:18:18 -0600 | [diff] [blame] | 62 | VkResult GetDescriptorSets(uint32_t count, VkDescriptorPool *pool, std::vector<VkDescriptorSet> *desc_sets); |
| 63 | void PutBackDescriptorSet(VkDescriptorPool desc_pool, VkDescriptorSet desc_set); |
| 64 | |
Petr Kraus | 4ed81e3 | 2019-09-02 23:41:19 +0200 | [diff] [blame] | 65 | private: |
Karl Schultz | 7b024b4 | 2018-08-30 16:18:18 -0600 | [diff] [blame] | 66 | static const uint32_t kItemsPerChunk = 512; |
| 67 | struct PoolTracker { |
| 68 | uint32_t size; |
| 69 | uint32_t used; |
| 70 | }; |
| 71 | |
Mark Lobodzinski | 3bf82a5 | 2019-03-11 11:49:34 -0600 | [diff] [blame] | 72 | CoreChecks *dev_data_; |
Karl Schultz | 7b024b4 | 2018-08-30 16:18:18 -0600 | [diff] [blame] | 73 | std::unordered_map<VkDescriptorPool, struct PoolTracker> desc_pool_map_; |
| 74 | }; |
| 75 | |
Jason Macnak | 83cfd58 | 2019-07-31 10:14:24 -0700 | [diff] [blame^] | 76 | struct GpuAccelerationStructureBuildValidationBufferInfo { |
| 77 | // The acceleration structure that is being built. |
| 78 | VkAccelerationStructureNV acceleration_structure = VK_NULL_HANDLE; |
| 79 | |
| 80 | // The descriptor pool and descriptor set being used to validate a given build. |
| 81 | VkDescriptorPool descriptor_pool = VK_NULL_HANDLE; |
| 82 | VkDescriptorSet descriptor_set = VK_NULL_HANDLE; |
| 83 | |
| 84 | // The storage buffer used by the validating compute shader whichcontains info about |
| 85 | // the valid handles and which is written to communicate found invalid handles. |
| 86 | VkBuffer validation_buffer = VK_NULL_HANDLE; |
| 87 | VmaAllocation validation_buffer_allocation = VK_NULL_HANDLE; |
| 88 | }; |
| 89 | |
| 90 | struct GpuAccelerationStructureBuildValidationState { |
| 91 | bool initialized = false; |
| 92 | |
| 93 | VkPipeline pipeline = VK_NULL_HANDLE; |
| 94 | VkPipelineLayout pipeline_layout = VK_NULL_HANDLE; |
| 95 | |
| 96 | VkAccelerationStructureNV replacement_as = VK_NULL_HANDLE; |
| 97 | VmaAllocation replacement_as_allocation = VK_NULL_HANDLE; |
| 98 | uint64_t replacement_as_handle = 0; |
| 99 | |
| 100 | std::unordered_map<VkCommandBuffer, std::vector<GpuAccelerationStructureBuildValidationBufferInfo>> validation_buffers; |
| 101 | }; |
| 102 | |
Mark Lobodzinski | 2a3ee4a | 2019-03-13 13:11:39 -0600 | [diff] [blame] | 103 | struct GpuValidationState { |
| 104 | bool aborted; |
| 105 | bool reserve_binding_slot; |
Tony-LunarG | 8eb5a00 | 2019-07-25 16:49:00 -0600 | [diff] [blame] | 106 | VkBool32 shaderInt64; |
Mark Lobodzinski | 2a3ee4a | 2019-03-13 13:11:39 -0600 | [diff] [blame] | 107 | VkDescriptorSetLayout debug_desc_layout; |
| 108 | VkDescriptorSetLayout dummy_desc_layout; |
| 109 | uint32_t adjusted_max_desc_sets; |
| 110 | uint32_t desc_set_bind_index; |
| 111 | uint32_t unique_shader_module_id; |
| 112 | std::unordered_map<uint32_t, ShaderTracker> shader_map; |
Mark Lobodzinski | 2a3ee4a | 2019-03-13 13:11:39 -0600 | [diff] [blame] | 113 | std::unique_ptr<GpuDescriptorSetManager> desc_set_manager; |
Jason Macnak | 8eae572 | 2019-07-17 15:17:45 -0700 | [diff] [blame] | 114 | std::map<VkQueue, GpuQueueBarrierCommandInfo> queue_barrier_command_infos; |
Mark Lobodzinski | 2a3ee4a | 2019-03-13 13:11:39 -0600 | [diff] [blame] | 115 | std::unordered_map<VkCommandBuffer, std::vector<GpuBufferInfo>> command_buffer_map; // gpu_buffer_list; |
Tony-LunarG | 0e56472 | 2019-03-19 16:09:14 -0600 | [diff] [blame] | 116 | uint32_t output_buffer_size; |
| 117 | VmaAllocator vmaAllocator; |
Tony-LunarG | 2ab9ede | 2019-05-10 14:34:31 -0600 | [diff] [blame] | 118 | PFN_vkSetDeviceLoaderData vkSetDeviceLoaderData; |
Tony-LunarG | 8eb5a00 | 2019-07-25 16:49:00 -0600 | [diff] [blame] | 119 | std::map<VkDeviceAddress, VkDeviceSize> buffer_map; |
Jason Macnak | 83cfd58 | 2019-07-31 10:14:24 -0700 | [diff] [blame^] | 120 | GpuAccelerationStructureBuildValidationState acceleration_struction_validation_state; |
Tony-LunarG | 76cdcac | 2019-05-22 16:13:12 -0600 | [diff] [blame] | 121 | GpuValidationState(bool aborted = false, bool reserve_binding_slot = false, uint32_t unique_shader_module_id = 0, |
Tony-LunarG | 76cdcac | 2019-05-22 16:13:12 -0600 | [diff] [blame] | 122 | VmaAllocator vmaAllocator = {}) |
Tony-LunarG | 29f48a7 | 2019-04-16 11:53:37 -0600 | [diff] [blame] | 123 | : aborted(aborted), |
| 124 | reserve_binding_slot(reserve_binding_slot), |
Tony-LunarG | 76cdcac | 2019-05-22 16:13:12 -0600 | [diff] [blame] | 125 | unique_shader_module_id(unique_shader_module_id), |
Tony-LunarG | 29f48a7 | 2019-04-16 11:53:37 -0600 | [diff] [blame] | 126 | vmaAllocator(vmaAllocator){}; |
Mark Lobodzinski | 2a3ee4a | 2019-03-13 13:11:39 -0600 | [diff] [blame] | 127 | |
| 128 | std::vector<GpuBufferInfo> &GetGpuBufferInfo(const VkCommandBuffer command_buffer) { |
| 129 | auto buffer_list = command_buffer_map.find(command_buffer); |
| 130 | if (buffer_list == command_buffer_map.end()) { |
| 131 | std::vector<GpuBufferInfo> new_list{}; |
| 132 | command_buffer_map[command_buffer] = new_list; |
| 133 | return command_buffer_map[command_buffer]; |
| 134 | } |
| 135 | return buffer_list->second; |
| 136 | } |
| 137 | }; |
| 138 | |
Karl Schultz | 7b024b4 | 2018-08-30 16:18:18 -0600 | [diff] [blame] | 139 | using mutex_t = std::mutex; |
| 140 | using lock_guard_t = std::lock_guard<mutex_t>; |
| 141 | using unique_lock_t = std::unique_lock<mutex_t>; |
| 142 | |
Karl Schultz | 7b024b4 | 2018-08-30 16:18:18 -0600 | [diff] [blame] | 143 | #endif // VULKAN_GPU_VALIDATION_H |