blob: 2c1a03902e3860aa41001798c6d50fd2f0525629 [file] [log] [blame]
Karl Schultz7b024b42018-08-30 16:18:18 -06001/* Copyright (c) 2018-2019 The Khronos Group Inc.
2 * Copyright (c) 2018-2019 Valve Corporation
3 * Copyright (c) 2018-2019 LunarG, Inc.
4 * Copyright (C) 2018-2019 Google Inc.
5 *
6 * Licensed under the Apache License, Version 2.0 (the "License");
7 * you may not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
9 *
10 * http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 *
18 */
19
Tony-LunarG0e564722019-03-19 16:09:14 -060020#include "vk_mem_alloc.h"
21
Karl Schultz7b024b42018-08-30 16:18:18 -060022#ifndef VULKAN_GPU_VALIDATION_H
23#define VULKAN_GPU_VALIDATION_H
24
Mark Lobodzinski2a3ee4a2019-03-13 13:11:39 -060025struct GpuDeviceMemoryBlock {
26 VkBuffer buffer;
Tony-LunarG0e564722019-03-19 16:09:14 -060027 VmaAllocation allocation;
Tony-LunarG81efe392019-03-07 15:43:27 -070028 std::unordered_map<uint32_t, const cvdescriptorset::Descriptor *> update_at_submit;
Mark Lobodzinski2a3ee4a2019-03-13 13:11:39 -060029};
30
31struct GpuBufferInfo {
Tony-LunarG1b2e0c32019-02-07 17:13:27 -070032 GpuDeviceMemoryBlock output_mem_block;
Tony-LunarG8eb5a002019-07-25 16:49:00 -060033 GpuDeviceMemoryBlock di_input_mem_block; // Descriptor Indexing input
34 GpuDeviceMemoryBlock bda_input_mem_block; // Buffer Device Address input
Mark Lobodzinski2a3ee4a2019-03-13 13:11:39 -060035 VkDescriptorSet desc_set;
36 VkDescriptorPool desc_pool;
Jason Macnak67407e72019-07-11 11:05:09 -070037 VkPipelineBindPoint pipeline_bind_point;
Tony-LunarG8eb5a002019-07-25 16:49:00 -060038 GpuBufferInfo(GpuDeviceMemoryBlock output_mem_block, GpuDeviceMemoryBlock di_input_mem_block,
39 GpuDeviceMemoryBlock bda_input_mem_block, VkDescriptorSet desc_set, VkDescriptorPool desc_pool,
40 VkPipelineBindPoint pipeline_bind_point)
Jason Macnak67407e72019-07-11 11:05:09 -070041 : output_mem_block(output_mem_block),
Tony-LunarG8eb5a002019-07-25 16:49:00 -060042 di_input_mem_block(di_input_mem_block),
43 bda_input_mem_block(bda_input_mem_block),
Jason Macnak67407e72019-07-11 11:05:09 -070044 desc_set(desc_set),
45 desc_pool(desc_pool),
46 pipeline_bind_point(pipeline_bind_point){};
Mark Lobodzinski2a3ee4a2019-03-13 13:11:39 -060047};
48
Jason Macnak8eae5722019-07-17 15:17:45 -070049struct GpuQueueBarrierCommandInfo {
50 VkCommandPool barrier_command_pool = VK_NULL_HANDLE;
51 VkCommandBuffer barrier_command_buffer = VK_NULL_HANDLE;
52};
53
Karl Schultz7b024b42018-08-30 16:18:18 -060054// Class to encapsulate Descriptor Set allocation. This manager creates and destroys Descriptor Pools
55// as needed to satisfy requests for descriptor sets.
56class GpuDescriptorSetManager {
Petr Kraus4ed81e32019-09-02 23:41:19 +020057 public:
Mark Lobodzinski3bf82a52019-03-11 11:49:34 -060058 GpuDescriptorSetManager(CoreChecks *dev_data);
Mark Lobodzinskib56bbb92019-02-18 11:49:59 -070059 ~GpuDescriptorSetManager();
Karl Schultz7b024b42018-08-30 16:18:18 -060060
Jason Macnak83cfd582019-07-31 10:14:24 -070061 VkResult GetDescriptorSet(VkDescriptorPool *desc_pool, VkDescriptorSet *desc_sets);
Karl Schultz7b024b42018-08-30 16:18:18 -060062 VkResult GetDescriptorSets(uint32_t count, VkDescriptorPool *pool, std::vector<VkDescriptorSet> *desc_sets);
63 void PutBackDescriptorSet(VkDescriptorPool desc_pool, VkDescriptorSet desc_set);
64
Petr Kraus4ed81e32019-09-02 23:41:19 +020065 private:
Karl Schultz7b024b42018-08-30 16:18:18 -060066 static const uint32_t kItemsPerChunk = 512;
67 struct PoolTracker {
68 uint32_t size;
69 uint32_t used;
70 };
71
Mark Lobodzinski3bf82a52019-03-11 11:49:34 -060072 CoreChecks *dev_data_;
Karl Schultz7b024b42018-08-30 16:18:18 -060073 std::unordered_map<VkDescriptorPool, struct PoolTracker> desc_pool_map_;
74};
75
Jason Macnak83cfd582019-07-31 10:14:24 -070076struct GpuAccelerationStructureBuildValidationBufferInfo {
77 // The acceleration structure that is being built.
78 VkAccelerationStructureNV acceleration_structure = VK_NULL_HANDLE;
79
80 // The descriptor pool and descriptor set being used to validate a given build.
81 VkDescriptorPool descriptor_pool = VK_NULL_HANDLE;
82 VkDescriptorSet descriptor_set = VK_NULL_HANDLE;
83
84 // The storage buffer used by the validating compute shader whichcontains info about
85 // the valid handles and which is written to communicate found invalid handles.
86 VkBuffer validation_buffer = VK_NULL_HANDLE;
87 VmaAllocation validation_buffer_allocation = VK_NULL_HANDLE;
88};
89
90struct GpuAccelerationStructureBuildValidationState {
91 bool initialized = false;
92
93 VkPipeline pipeline = VK_NULL_HANDLE;
94 VkPipelineLayout pipeline_layout = VK_NULL_HANDLE;
95
96 VkAccelerationStructureNV replacement_as = VK_NULL_HANDLE;
97 VmaAllocation replacement_as_allocation = VK_NULL_HANDLE;
98 uint64_t replacement_as_handle = 0;
99
100 std::unordered_map<VkCommandBuffer, std::vector<GpuAccelerationStructureBuildValidationBufferInfo>> validation_buffers;
101};
102
Mark Lobodzinski2a3ee4a2019-03-13 13:11:39 -0600103struct GpuValidationState {
104 bool aborted;
105 bool reserve_binding_slot;
Tony-LunarG8eb5a002019-07-25 16:49:00 -0600106 VkBool32 shaderInt64;
Mark Lobodzinski2a3ee4a2019-03-13 13:11:39 -0600107 VkDescriptorSetLayout debug_desc_layout;
108 VkDescriptorSetLayout dummy_desc_layout;
109 uint32_t adjusted_max_desc_sets;
110 uint32_t desc_set_bind_index;
111 uint32_t unique_shader_module_id;
112 std::unordered_map<uint32_t, ShaderTracker> shader_map;
Mark Lobodzinski2a3ee4a2019-03-13 13:11:39 -0600113 std::unique_ptr<GpuDescriptorSetManager> desc_set_manager;
Jason Macnak8eae5722019-07-17 15:17:45 -0700114 std::map<VkQueue, GpuQueueBarrierCommandInfo> queue_barrier_command_infos;
Mark Lobodzinski2a3ee4a2019-03-13 13:11:39 -0600115 std::unordered_map<VkCommandBuffer, std::vector<GpuBufferInfo>> command_buffer_map; // gpu_buffer_list;
Tony-LunarG0e564722019-03-19 16:09:14 -0600116 uint32_t output_buffer_size;
117 VmaAllocator vmaAllocator;
Tony-LunarG2ab9ede2019-05-10 14:34:31 -0600118 PFN_vkSetDeviceLoaderData vkSetDeviceLoaderData;
Tony-LunarG8eb5a002019-07-25 16:49:00 -0600119 std::map<VkDeviceAddress, VkDeviceSize> buffer_map;
Jason Macnak83cfd582019-07-31 10:14:24 -0700120 GpuAccelerationStructureBuildValidationState acceleration_struction_validation_state;
Tony-LunarG76cdcac2019-05-22 16:13:12 -0600121 GpuValidationState(bool aborted = false, bool reserve_binding_slot = false, uint32_t unique_shader_module_id = 0,
Tony-LunarG76cdcac2019-05-22 16:13:12 -0600122 VmaAllocator vmaAllocator = {})
Tony-LunarG29f48a72019-04-16 11:53:37 -0600123 : aborted(aborted),
124 reserve_binding_slot(reserve_binding_slot),
Tony-LunarG76cdcac2019-05-22 16:13:12 -0600125 unique_shader_module_id(unique_shader_module_id),
Tony-LunarG29f48a72019-04-16 11:53:37 -0600126 vmaAllocator(vmaAllocator){};
Mark Lobodzinski2a3ee4a2019-03-13 13:11:39 -0600127
128 std::vector<GpuBufferInfo> &GetGpuBufferInfo(const VkCommandBuffer command_buffer) {
129 auto buffer_list = command_buffer_map.find(command_buffer);
130 if (buffer_list == command_buffer_map.end()) {
131 std::vector<GpuBufferInfo> new_list{};
132 command_buffer_map[command_buffer] = new_list;
133 return command_buffer_map[command_buffer];
134 }
135 return buffer_list->second;
136 }
137};
138
Karl Schultz7b024b42018-08-30 16:18:18 -0600139using mutex_t = std::mutex;
140using lock_guard_t = std::lock_guard<mutex_t>;
141using unique_lock_t = std::unique_lock<mutex_t>;
142
Karl Schultz7b024b42018-08-30 16:18:18 -0600143#endif // VULKAN_GPU_VALIDATION_H