blob: e7fca4fff8aa5dc7e400e680bd889385bc6e305b [file] [log] [blame]
Karl Schultz7b024b42018-08-30 16:18:18 -06001/* Copyright (c) 2018-2019 The Khronos Group Inc.
2 * Copyright (c) 2018-2019 Valve Corporation
3 * Copyright (c) 2018-2019 LunarG, Inc.
4 * Copyright (C) 2018-2019 Google Inc.
5 *
6 * Licensed under the Apache License, Version 2.0 (the "License");
7 * you may not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
9 *
10 * http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 *
18 */
19
20#ifndef VULKAN_GPU_VALIDATION_H
21#define VULKAN_GPU_VALIDATION_H
22
Mark Lobodzinski2a3ee4a2019-03-13 13:11:39 -060023struct GpuDeviceMemoryBlock {
24 VkBuffer buffer;
25 VkDeviceMemory memory;
26 uint32_t offset;
27};
28
29struct GpuBufferInfo {
30 GpuDeviceMemoryBlock mem_block;
31 VkDescriptorSet desc_set;
32 VkDescriptorPool desc_pool;
33 GpuBufferInfo(GpuDeviceMemoryBlock mem_block, VkDescriptorSet desc_set, VkDescriptorPool desc_pool)
34 : mem_block(mem_block), desc_set(desc_set), desc_pool(desc_pool){};
35};
36
Karl Schultz7b024b42018-08-30 16:18:18 -060037// Class to encapsulate Vulkan Device Memory allocations.
38// It allocates device memory in large chunks for efficiency and to avoid
39// hitting the device limit of the number of allocations.
40// This manager handles only fixed-sized blocks of "data_size" bytes.
41// The interface allows the caller to "get" and "put back" blocks.
42// The manager allocates and frees chunks as needed.
43
44class GpuDeviceMemoryManager {
45 public:
Mark Lobodzinski3bf82a52019-03-11 11:49:34 -060046 GpuDeviceMemoryManager(CoreChecks *dev_data, uint32_t data_size);
Mark Lobodzinskib56bbb92019-02-18 11:49:59 -070047 ~GpuDeviceMemoryManager();
Karl Schultz7b024b42018-08-30 16:18:18 -060048
Karl Schultz7b024b42018-08-30 16:18:18 -060049 uint32_t GetBlockSize() { return block_size_; }
50
51 VkResult GetBlock(GpuDeviceMemoryBlock *block);
52 void PutBackBlock(VkBuffer buffer, VkDeviceMemory memory, uint32_t offset);
53 void PutBackBlock(GpuDeviceMemoryBlock &block);
Tony-LunarGd85808d2019-02-27 16:12:02 -070054 void FreeAllBlocks();
Karl Schultz7b024b42018-08-30 16:18:18 -060055
56 private:
57 // Define allocation granularity of Vulkan resources.
58 // Things like device memory and descriptors are allocated in "chunks".
59 // This number should be chosen to try to avoid too many chunk allocations
60 // and chunk allocations that are too large.
61 static const uint32_t kItemsPerChunk = 512;
62
63 struct MemoryChunk {
64 VkBuffer buffer;
65 VkDeviceMemory memory;
66 std::vector<uint32_t> available_offsets;
67 };
68
Mark Lobodzinski3bf82a52019-03-11 11:49:34 -060069 CoreChecks *dev_data_;
Karl Schultz7b024b42018-08-30 16:18:18 -060070 uint32_t record_size_;
71 uint32_t block_size_;
72 uint32_t blocks_per_chunk_;
73 uint32_t chunk_size_;
74 std::list<MemoryChunk> chunk_list_;
75
76 bool MemoryTypeFromProperties(uint32_t typeBits, VkFlags requirements_mask, uint32_t *typeIndex);
77 VkResult AllocMemoryChunk(MemoryChunk &chunk);
78 void FreeMemoryChunk(MemoryChunk &chunk);
79};
80
81// Class to encapsulate Descriptor Set allocation. This manager creates and destroys Descriptor Pools
82// as needed to satisfy requests for descriptor sets.
83class GpuDescriptorSetManager {
84 public:
Mark Lobodzinski3bf82a52019-03-11 11:49:34 -060085 GpuDescriptorSetManager(CoreChecks *dev_data);
Mark Lobodzinskib56bbb92019-02-18 11:49:59 -070086 ~GpuDescriptorSetManager();
Karl Schultz7b024b42018-08-30 16:18:18 -060087
Karl Schultz7b024b42018-08-30 16:18:18 -060088 VkResult GetDescriptorSets(uint32_t count, VkDescriptorPool *pool, std::vector<VkDescriptorSet> *desc_sets);
89 void PutBackDescriptorSet(VkDescriptorPool desc_pool, VkDescriptorSet desc_set);
Tony-LunarGd85808d2019-02-27 16:12:02 -070090 void DestroyDescriptorPools();
Karl Schultz7b024b42018-08-30 16:18:18 -060091
92 private:
93 static const uint32_t kItemsPerChunk = 512;
94 struct PoolTracker {
95 uint32_t size;
96 uint32_t used;
97 };
98
Mark Lobodzinski3bf82a52019-03-11 11:49:34 -060099 CoreChecks *dev_data_;
Karl Schultz7b024b42018-08-30 16:18:18 -0600100 std::unordered_map<VkDescriptorPool, struct PoolTracker> desc_pool_map_;
101};
102
Mark Lobodzinski2a3ee4a2019-03-13 13:11:39 -0600103struct GpuValidationState {
104 bool aborted;
105 bool reserve_binding_slot;
106 VkDescriptorSetLayout debug_desc_layout;
107 VkDescriptorSetLayout dummy_desc_layout;
108 uint32_t adjusted_max_desc_sets;
109 uint32_t desc_set_bind_index;
110 uint32_t unique_shader_module_id;
111 std::unordered_map<uint32_t, ShaderTracker> shader_map;
112 std::unique_ptr<GpuDeviceMemoryManager> memory_manager;
113 std::unique_ptr<GpuDescriptorSetManager> desc_set_manager;
114 VkCommandPool barrier_command_pool;
115 VkCommandBuffer barrier_command_buffer;
116 std::unordered_map<VkCommandBuffer, std::vector<GpuBufferInfo>> command_buffer_map; // gpu_buffer_list;
117
118 std::vector<GpuBufferInfo> &GetGpuBufferInfo(const VkCommandBuffer command_buffer) {
119 auto buffer_list = command_buffer_map.find(command_buffer);
120 if (buffer_list == command_buffer_map.end()) {
121 std::vector<GpuBufferInfo> new_list{};
122 command_buffer_map[command_buffer] = new_list;
123 return command_buffer_map[command_buffer];
124 }
125 return buffer_list->second;
126 }
127};
128
Karl Schultz7b024b42018-08-30 16:18:18 -0600129using mutex_t = std::mutex;
130using lock_guard_t = std::lock_guard<mutex_t>;
131using unique_lock_t = std::unique_lock<mutex_t>;
132
Karl Schultz7b024b42018-08-30 16:18:18 -0600133#endif // VULKAN_GPU_VALIDATION_H