Karl Schultz | 7b024b4 | 2018-08-30 16:18:18 -0600 | [diff] [blame] | 1 | /* Copyright (c) 2018-2019 The Khronos Group Inc. |
| 2 | * Copyright (c) 2018-2019 Valve Corporation |
| 3 | * Copyright (c) 2018-2019 LunarG, Inc. |
| 4 | * Copyright (C) 2018-2019 Google Inc. |
| 5 | * |
| 6 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 7 | * you may not use this file except in compliance with the License. |
| 8 | * You may obtain a copy of the License at |
| 9 | * |
| 10 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 11 | * |
| 12 | * Unless required by applicable law or agreed to in writing, software |
| 13 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 15 | * See the License for the specific language governing permissions and |
| 16 | * limitations under the License. |
| 17 | * |
| 18 | */ |
| 19 | |
| 20 | #ifndef VULKAN_GPU_VALIDATION_H |
| 21 | #define VULKAN_GPU_VALIDATION_H |
| 22 | |
Mark Lobodzinski | 2a3ee4a | 2019-03-13 13:11:39 -0600 | [diff] [blame^] | 23 | struct GpuDeviceMemoryBlock { |
| 24 | VkBuffer buffer; |
| 25 | VkDeviceMemory memory; |
| 26 | uint32_t offset; |
| 27 | }; |
| 28 | |
| 29 | struct GpuBufferInfo { |
| 30 | GpuDeviceMemoryBlock mem_block; |
| 31 | VkDescriptorSet desc_set; |
| 32 | VkDescriptorPool desc_pool; |
| 33 | GpuBufferInfo(GpuDeviceMemoryBlock mem_block, VkDescriptorSet desc_set, VkDescriptorPool desc_pool) |
| 34 | : mem_block(mem_block), desc_set(desc_set), desc_pool(desc_pool){}; |
| 35 | }; |
| 36 | |
Karl Schultz | 7b024b4 | 2018-08-30 16:18:18 -0600 | [diff] [blame] | 37 | // Class to encapsulate Vulkan Device Memory allocations. |
| 38 | // It allocates device memory in large chunks for efficiency and to avoid |
| 39 | // hitting the device limit of the number of allocations. |
| 40 | // This manager handles only fixed-sized blocks of "data_size" bytes. |
| 41 | // The interface allows the caller to "get" and "put back" blocks. |
| 42 | // The manager allocates and frees chunks as needed. |
| 43 | |
| 44 | class GpuDeviceMemoryManager { |
| 45 | public: |
Mark Lobodzinski | 3bf82a5 | 2019-03-11 11:49:34 -0600 | [diff] [blame] | 46 | GpuDeviceMemoryManager(CoreChecks *dev_data, uint32_t data_size); |
Mark Lobodzinski | b56bbb9 | 2019-02-18 11:49:59 -0700 | [diff] [blame] | 47 | ~GpuDeviceMemoryManager(); |
Karl Schultz | 7b024b4 | 2018-08-30 16:18:18 -0600 | [diff] [blame] | 48 | |
Karl Schultz | 7b024b4 | 2018-08-30 16:18:18 -0600 | [diff] [blame] | 49 | uint32_t GetBlockSize() { return block_size_; } |
| 50 | |
| 51 | VkResult GetBlock(GpuDeviceMemoryBlock *block); |
| 52 | void PutBackBlock(VkBuffer buffer, VkDeviceMemory memory, uint32_t offset); |
| 53 | void PutBackBlock(GpuDeviceMemoryBlock &block); |
Tony-LunarG | d85808d | 2019-02-27 16:12:02 -0700 | [diff] [blame] | 54 | void FreeAllBlocks(); |
Karl Schultz | 7b024b4 | 2018-08-30 16:18:18 -0600 | [diff] [blame] | 55 | |
| 56 | private: |
| 57 | // Define allocation granularity of Vulkan resources. |
| 58 | // Things like device memory and descriptors are allocated in "chunks". |
| 59 | // This number should be chosen to try to avoid too many chunk allocations |
| 60 | // and chunk allocations that are too large. |
| 61 | static const uint32_t kItemsPerChunk = 512; |
| 62 | |
| 63 | struct MemoryChunk { |
| 64 | VkBuffer buffer; |
| 65 | VkDeviceMemory memory; |
| 66 | std::vector<uint32_t> available_offsets; |
| 67 | }; |
| 68 | |
Mark Lobodzinski | 3bf82a5 | 2019-03-11 11:49:34 -0600 | [diff] [blame] | 69 | CoreChecks *dev_data_; |
Karl Schultz | 7b024b4 | 2018-08-30 16:18:18 -0600 | [diff] [blame] | 70 | uint32_t record_size_; |
| 71 | uint32_t block_size_; |
| 72 | uint32_t blocks_per_chunk_; |
| 73 | uint32_t chunk_size_; |
| 74 | std::list<MemoryChunk> chunk_list_; |
| 75 | |
| 76 | bool MemoryTypeFromProperties(uint32_t typeBits, VkFlags requirements_mask, uint32_t *typeIndex); |
| 77 | VkResult AllocMemoryChunk(MemoryChunk &chunk); |
| 78 | void FreeMemoryChunk(MemoryChunk &chunk); |
| 79 | }; |
| 80 | |
| 81 | // Class to encapsulate Descriptor Set allocation. This manager creates and destroys Descriptor Pools |
| 82 | // as needed to satisfy requests for descriptor sets. |
| 83 | class GpuDescriptorSetManager { |
| 84 | public: |
Mark Lobodzinski | 3bf82a5 | 2019-03-11 11:49:34 -0600 | [diff] [blame] | 85 | GpuDescriptorSetManager(CoreChecks *dev_data); |
Mark Lobodzinski | b56bbb9 | 2019-02-18 11:49:59 -0700 | [diff] [blame] | 86 | ~GpuDescriptorSetManager(); |
Karl Schultz | 7b024b4 | 2018-08-30 16:18:18 -0600 | [diff] [blame] | 87 | |
Karl Schultz | 7b024b4 | 2018-08-30 16:18:18 -0600 | [diff] [blame] | 88 | VkResult GetDescriptorSets(uint32_t count, VkDescriptorPool *pool, std::vector<VkDescriptorSet> *desc_sets); |
| 89 | void PutBackDescriptorSet(VkDescriptorPool desc_pool, VkDescriptorSet desc_set); |
Tony-LunarG | d85808d | 2019-02-27 16:12:02 -0700 | [diff] [blame] | 90 | void DestroyDescriptorPools(); |
Karl Schultz | 7b024b4 | 2018-08-30 16:18:18 -0600 | [diff] [blame] | 91 | |
| 92 | private: |
| 93 | static const uint32_t kItemsPerChunk = 512; |
| 94 | struct PoolTracker { |
| 95 | uint32_t size; |
| 96 | uint32_t used; |
| 97 | }; |
| 98 | |
Mark Lobodzinski | 3bf82a5 | 2019-03-11 11:49:34 -0600 | [diff] [blame] | 99 | CoreChecks *dev_data_; |
Karl Schultz | 7b024b4 | 2018-08-30 16:18:18 -0600 | [diff] [blame] | 100 | std::unordered_map<VkDescriptorPool, struct PoolTracker> desc_pool_map_; |
| 101 | }; |
| 102 | |
Mark Lobodzinski | 2a3ee4a | 2019-03-13 13:11:39 -0600 | [diff] [blame^] | 103 | struct GpuValidationState { |
| 104 | bool aborted; |
| 105 | bool reserve_binding_slot; |
| 106 | VkDescriptorSetLayout debug_desc_layout; |
| 107 | VkDescriptorSetLayout dummy_desc_layout; |
| 108 | uint32_t adjusted_max_desc_sets; |
| 109 | uint32_t desc_set_bind_index; |
| 110 | uint32_t unique_shader_module_id; |
| 111 | std::unordered_map<uint32_t, ShaderTracker> shader_map; |
| 112 | std::unique_ptr<GpuDeviceMemoryManager> memory_manager; |
| 113 | std::unique_ptr<GpuDescriptorSetManager> desc_set_manager; |
| 114 | VkCommandPool barrier_command_pool; |
| 115 | VkCommandBuffer barrier_command_buffer; |
| 116 | std::unordered_map<VkCommandBuffer, std::vector<GpuBufferInfo>> command_buffer_map; // gpu_buffer_list; |
| 117 | |
| 118 | std::vector<GpuBufferInfo> &GetGpuBufferInfo(const VkCommandBuffer command_buffer) { |
| 119 | auto buffer_list = command_buffer_map.find(command_buffer); |
| 120 | if (buffer_list == command_buffer_map.end()) { |
| 121 | std::vector<GpuBufferInfo> new_list{}; |
| 122 | command_buffer_map[command_buffer] = new_list; |
| 123 | return command_buffer_map[command_buffer]; |
| 124 | } |
| 125 | return buffer_list->second; |
| 126 | } |
| 127 | }; |
| 128 | |
Karl Schultz | 7b024b4 | 2018-08-30 16:18:18 -0600 | [diff] [blame] | 129 | using mutex_t = std::mutex; |
| 130 | using lock_guard_t = std::lock_guard<mutex_t>; |
| 131 | using unique_lock_t = std::unique_lock<mutex_t>; |
| 132 | |
Karl Schultz | 7b024b4 | 2018-08-30 16:18:18 -0600 | [diff] [blame] | 133 | #endif // VULKAN_GPU_VALIDATION_H |