blob: 8a65265dd5627df3ff1be10e4decdab18f797bb4 [file] [log] [blame]
Tony-LunarG7de10e82020-11-24 11:31:55 -07001/* Copyright (c) 2018-2021 The Khronos Group Inc.
2 * Copyright (c) 2018-2021 Valve Corporation
3 * Copyright (c) 2018-2021 LunarG, Inc.
Karl Schultz7b024b42018-08-30 16:18:18 -06004 *
5 * Licensed under the Apache License, Version 2.0 (the "License");
6 * you may not use this file except in compliance with the License.
7 * You may obtain a copy of the License at
8 *
9 * http://www.apache.org/licenses/LICENSE-2.0
10 *
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
16 *
Tony-LunarG2ba1cb32019-09-25 15:16:11 -060017 * Author: Karl Schultz <karl@lunarg.com>
18 * Author: Tony Barbour <tony@lunarg.com>
Karl Schultz7b024b42018-08-30 16:18:18 -060019 */
20
Tony-LunarGc28e28a2020-08-14 10:37:48 -060021#include <climits>
Mark Lobodzinskib56bbb92019-02-18 11:49:59 -070022#include "gpu_validation.h"
Karl Schultz7b024b42018-08-30 16:18:18 -060023#include "spirv-tools/optimizer.hpp"
24#include "spirv-tools/instrument.hpp"
Tony-LunarG2ba1cb32019-09-25 15:16:11 -060025#include "layer_chassis_dispatch.h"
Tony-LunarG7de10e82020-11-24 11:31:55 -070026#include "gpu_vuids.h"
Karl Schultz7b024b42018-08-30 16:18:18 -060027
Jason Macnak67407e72019-07-11 11:05:09 -070028static const VkShaderStageFlags kShaderStageAllRayTracing =
29 VK_SHADER_STAGE_ANY_HIT_BIT_NV | VK_SHADER_STAGE_CALLABLE_BIT_NV | VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV |
30 VK_SHADER_STAGE_INTERSECTION_BIT_NV | VK_SHADER_STAGE_MISS_BIT_NV | VK_SHADER_STAGE_RAYGEN_BIT_NV;
31
Jason Macnak83cfd582019-07-31 10:14:24 -070032// Keep in sync with the GLSL shader below.
33struct GpuAccelerationStructureBuildValidationBuffer {
34 uint32_t instances_to_validate;
35 uint32_t replacement_handle_bits_0;
36 uint32_t replacement_handle_bits_1;
37 uint32_t invalid_handle_found;
38 uint32_t invalid_handle_bits_0;
39 uint32_t invalid_handle_bits_1;
40 uint32_t valid_handles_count;
41};
42
43// This is the GLSL source for the compute shader that is used during ray tracing acceleration structure
44// building validation which inspects instance buffers for top level acceleration structure builds and
45// reports and replaces invalid bottom level acceleration structure handles with good bottom level
46// acceleration structure handle so that applications can continue without undefined behavior long enough
47// to report errors.
48//
49// #version 450
50// layout(local_size_x = 1, local_size_y = 1, local_size_z = 1) in;
51// struct VkGeometryInstanceNV {
52// uint unused[14];
53// uint handle_bits_0;
54// uint handle_bits_1;
55// };
56// layout(set=0, binding=0, std430) buffer InstanceBuffer {
57// VkGeometryInstanceNV instances[];
58// };
59// layout(set=0, binding=1, std430) buffer ValidationBuffer {
60// uint instances_to_validate;
61// uint replacement_handle_bits_0;
62// uint replacement_handle_bits_1;
63// uint invalid_handle_found;
64// uint invalid_handle_bits_0;
65// uint invalid_handle_bits_1;
66// uint valid_handles_count;
67// uint valid_handles[];
68// };
69// void main() {
70// for (uint instance_index = 0; instance_index < instances_to_validate; instance_index++) {
71// uint instance_handle_bits_0 = instances[instance_index].handle_bits_0;
72// uint instance_handle_bits_1 = instances[instance_index].handle_bits_1;
73// bool valid = false;
74// for (uint valid_handle_index = 0; valid_handle_index < valid_handles_count; valid_handle_index++) {
75// if (instance_handle_bits_0 == valid_handles[2*valid_handle_index+0] &&
76// instance_handle_bits_1 == valid_handles[2*valid_handle_index+1]) {
77// valid = true;
78// break;
79// }
80// }
81// if (!valid) {
82// invalid_handle_found += 1;
83// invalid_handle_bits_0 = instance_handle_bits_0;
84// invalid_handle_bits_1 = instance_handle_bits_1;
85// instances[instance_index].handle_bits_0 = replacement_handle_bits_0;
86// instances[instance_index].handle_bits_1 = replacement_handle_bits_1;
87// }
88// }
89// }
90//
91// To regenerate the spirv below:
92// 1. Save the above GLSL source to a file called validation_shader.comp.
93// 2. Run in terminal
94//
95// glslangValidator.exe -x -V validation_shader.comp -o validation_shader.comp.spv
96//
97// 4. Copy-paste the contents of validation_shader.comp.spv here (clang-format will fix up the alignment).
98static const uint32_t kComputeShaderSpirv[] = {
99 0x07230203, 0x00010000, 0x00080007, 0x0000006d, 0x00000000, 0x00020011, 0x00000001, 0x0006000b, 0x00000001, 0x4c534c47,
100 0x6474732e, 0x3035342e, 0x00000000, 0x0003000e, 0x00000000, 0x00000001, 0x0005000f, 0x00000005, 0x00000004, 0x6e69616d,
101 0x00000000, 0x00060010, 0x00000004, 0x00000011, 0x00000001, 0x00000001, 0x00000001, 0x00030003, 0x00000002, 0x000001c2,
102 0x00040005, 0x00000004, 0x6e69616d, 0x00000000, 0x00060005, 0x00000008, 0x74736e69, 0x65636e61, 0x646e695f, 0x00007865,
103 0x00070005, 0x00000011, 0x696c6156, 0x69746164, 0x75426e6f, 0x72656666, 0x00000000, 0x00090006, 0x00000011, 0x00000000,
104 0x74736e69, 0x65636e61, 0x6f745f73, 0x6c61765f, 0x74616469, 0x00000065, 0x000a0006, 0x00000011, 0x00000001, 0x6c706572,
105 0x6d656361, 0x5f746e65, 0x646e6168, 0x625f656c, 0x5f737469, 0x00000030, 0x000a0006, 0x00000011, 0x00000002, 0x6c706572,
106 0x6d656361, 0x5f746e65, 0x646e6168, 0x625f656c, 0x5f737469, 0x00000031, 0x00090006, 0x00000011, 0x00000003, 0x61766e69,
107 0x5f64696c, 0x646e6168, 0x665f656c, 0x646e756f, 0x00000000, 0x00090006, 0x00000011, 0x00000004, 0x61766e69, 0x5f64696c,
108 0x646e6168, 0x625f656c, 0x5f737469, 0x00000030, 0x00090006, 0x00000011, 0x00000005, 0x61766e69, 0x5f64696c, 0x646e6168,
109 0x625f656c, 0x5f737469, 0x00000031, 0x00080006, 0x00000011, 0x00000006, 0x696c6176, 0x61685f64, 0x656c646e, 0x6f635f73,
110 0x00746e75, 0x00070006, 0x00000011, 0x00000007, 0x696c6176, 0x61685f64, 0x656c646e, 0x00000073, 0x00030005, 0x00000013,
111 0x00000000, 0x00080005, 0x0000001b, 0x74736e69, 0x65636e61, 0x6e61685f, 0x5f656c64, 0x73746962, 0x0000305f, 0x00080005,
112 0x0000001e, 0x65476b56, 0x74656d6f, 0x6e497972, 0x6e617473, 0x564e6563, 0x00000000, 0x00050006, 0x0000001e, 0x00000000,
113 0x73756e75, 0x00006465, 0x00070006, 0x0000001e, 0x00000001, 0x646e6168, 0x625f656c, 0x5f737469, 0x00000030, 0x00070006,
114 0x0000001e, 0x00000002, 0x646e6168, 0x625f656c, 0x5f737469, 0x00000031, 0x00060005, 0x00000020, 0x74736e49, 0x65636e61,
115 0x66667542, 0x00007265, 0x00060006, 0x00000020, 0x00000000, 0x74736e69, 0x65636e61, 0x00000073, 0x00030005, 0x00000022,
116 0x00000000, 0x00080005, 0x00000027, 0x74736e69, 0x65636e61, 0x6e61685f, 0x5f656c64, 0x73746962, 0x0000315f, 0x00040005,
117 0x0000002d, 0x696c6176, 0x00000064, 0x00070005, 0x0000002f, 0x696c6176, 0x61685f64, 0x656c646e, 0x646e695f, 0x00007865,
118 0x00040047, 0x00000010, 0x00000006, 0x00000004, 0x00050048, 0x00000011, 0x00000000, 0x00000023, 0x00000000, 0x00050048,
119 0x00000011, 0x00000001, 0x00000023, 0x00000004, 0x00050048, 0x00000011, 0x00000002, 0x00000023, 0x00000008, 0x00050048,
120 0x00000011, 0x00000003, 0x00000023, 0x0000000c, 0x00050048, 0x00000011, 0x00000004, 0x00000023, 0x00000010, 0x00050048,
121 0x00000011, 0x00000005, 0x00000023, 0x00000014, 0x00050048, 0x00000011, 0x00000006, 0x00000023, 0x00000018, 0x00050048,
122 0x00000011, 0x00000007, 0x00000023, 0x0000001c, 0x00030047, 0x00000011, 0x00000003, 0x00040047, 0x00000013, 0x00000022,
123 0x00000000, 0x00040047, 0x00000013, 0x00000021, 0x00000001, 0x00040047, 0x0000001d, 0x00000006, 0x00000004, 0x00050048,
124 0x0000001e, 0x00000000, 0x00000023, 0x00000000, 0x00050048, 0x0000001e, 0x00000001, 0x00000023, 0x00000038, 0x00050048,
125 0x0000001e, 0x00000002, 0x00000023, 0x0000003c, 0x00040047, 0x0000001f, 0x00000006, 0x00000040, 0x00050048, 0x00000020,
126 0x00000000, 0x00000023, 0x00000000, 0x00030047, 0x00000020, 0x00000003, 0x00040047, 0x00000022, 0x00000022, 0x00000000,
127 0x00040047, 0x00000022, 0x00000021, 0x00000000, 0x00020013, 0x00000002, 0x00030021, 0x00000003, 0x00000002, 0x00040015,
128 0x00000006, 0x00000020, 0x00000000, 0x00040020, 0x00000007, 0x00000007, 0x00000006, 0x0004002b, 0x00000006, 0x00000009,
129 0x00000000, 0x0003001d, 0x00000010, 0x00000006, 0x000a001e, 0x00000011, 0x00000006, 0x00000006, 0x00000006, 0x00000006,
130 0x00000006, 0x00000006, 0x00000006, 0x00000010, 0x00040020, 0x00000012, 0x00000002, 0x00000011, 0x0004003b, 0x00000012,
131 0x00000013, 0x00000002, 0x00040015, 0x00000014, 0x00000020, 0x00000001, 0x0004002b, 0x00000014, 0x00000015, 0x00000000,
132 0x00040020, 0x00000016, 0x00000002, 0x00000006, 0x00020014, 0x00000019, 0x0004002b, 0x00000006, 0x0000001c, 0x0000000e,
133 0x0004001c, 0x0000001d, 0x00000006, 0x0000001c, 0x0005001e, 0x0000001e, 0x0000001d, 0x00000006, 0x00000006, 0x0003001d,
134 0x0000001f, 0x0000001e, 0x0003001e, 0x00000020, 0x0000001f, 0x00040020, 0x00000021, 0x00000002, 0x00000020, 0x0004003b,
135 0x00000021, 0x00000022, 0x00000002, 0x0004002b, 0x00000014, 0x00000024, 0x00000001, 0x0004002b, 0x00000014, 0x00000029,
136 0x00000002, 0x00040020, 0x0000002c, 0x00000007, 0x00000019, 0x0003002a, 0x00000019, 0x0000002e, 0x0004002b, 0x00000014,
137 0x00000036, 0x00000006, 0x0004002b, 0x00000014, 0x0000003b, 0x00000007, 0x0004002b, 0x00000006, 0x0000003c, 0x00000002,
138 0x0004002b, 0x00000006, 0x00000048, 0x00000001, 0x00030029, 0x00000019, 0x00000050, 0x0004002b, 0x00000014, 0x00000058,
139 0x00000003, 0x0004002b, 0x00000014, 0x0000005d, 0x00000004, 0x0004002b, 0x00000014, 0x00000060, 0x00000005, 0x00050036,
140 0x00000002, 0x00000004, 0x00000000, 0x00000003, 0x000200f8, 0x00000005, 0x0004003b, 0x00000007, 0x00000008, 0x00000007,
141 0x0004003b, 0x00000007, 0x0000001b, 0x00000007, 0x0004003b, 0x00000007, 0x00000027, 0x00000007, 0x0004003b, 0x0000002c,
142 0x0000002d, 0x00000007, 0x0004003b, 0x00000007, 0x0000002f, 0x00000007, 0x0003003e, 0x00000008, 0x00000009, 0x000200f9,
143 0x0000000a, 0x000200f8, 0x0000000a, 0x000400f6, 0x0000000c, 0x0000000d, 0x00000000, 0x000200f9, 0x0000000e, 0x000200f8,
144 0x0000000e, 0x0004003d, 0x00000006, 0x0000000f, 0x00000008, 0x00050041, 0x00000016, 0x00000017, 0x00000013, 0x00000015,
145 0x0004003d, 0x00000006, 0x00000018, 0x00000017, 0x000500b0, 0x00000019, 0x0000001a, 0x0000000f, 0x00000018, 0x000400fa,
146 0x0000001a, 0x0000000b, 0x0000000c, 0x000200f8, 0x0000000b, 0x0004003d, 0x00000006, 0x00000023, 0x00000008, 0x00070041,
147 0x00000016, 0x00000025, 0x00000022, 0x00000015, 0x00000023, 0x00000024, 0x0004003d, 0x00000006, 0x00000026, 0x00000025,
148 0x0003003e, 0x0000001b, 0x00000026, 0x0004003d, 0x00000006, 0x00000028, 0x00000008, 0x00070041, 0x00000016, 0x0000002a,
149 0x00000022, 0x00000015, 0x00000028, 0x00000029, 0x0004003d, 0x00000006, 0x0000002b, 0x0000002a, 0x0003003e, 0x00000027,
150 0x0000002b, 0x0003003e, 0x0000002d, 0x0000002e, 0x0003003e, 0x0000002f, 0x00000009, 0x000200f9, 0x00000030, 0x000200f8,
151 0x00000030, 0x000400f6, 0x00000032, 0x00000033, 0x00000000, 0x000200f9, 0x00000034, 0x000200f8, 0x00000034, 0x0004003d,
152 0x00000006, 0x00000035, 0x0000002f, 0x00050041, 0x00000016, 0x00000037, 0x00000013, 0x00000036, 0x0004003d, 0x00000006,
153 0x00000038, 0x00000037, 0x000500b0, 0x00000019, 0x00000039, 0x00000035, 0x00000038, 0x000400fa, 0x00000039, 0x00000031,
154 0x00000032, 0x000200f8, 0x00000031, 0x0004003d, 0x00000006, 0x0000003a, 0x0000001b, 0x0004003d, 0x00000006, 0x0000003d,
155 0x0000002f, 0x00050084, 0x00000006, 0x0000003e, 0x0000003c, 0x0000003d, 0x00050080, 0x00000006, 0x0000003f, 0x0000003e,
156 0x00000009, 0x00060041, 0x00000016, 0x00000040, 0x00000013, 0x0000003b, 0x0000003f, 0x0004003d, 0x00000006, 0x00000041,
157 0x00000040, 0x000500aa, 0x00000019, 0x00000042, 0x0000003a, 0x00000041, 0x000300f7, 0x00000044, 0x00000000, 0x000400fa,
158 0x00000042, 0x00000043, 0x00000044, 0x000200f8, 0x00000043, 0x0004003d, 0x00000006, 0x00000045, 0x00000027, 0x0004003d,
159 0x00000006, 0x00000046, 0x0000002f, 0x00050084, 0x00000006, 0x00000047, 0x0000003c, 0x00000046, 0x00050080, 0x00000006,
160 0x00000049, 0x00000047, 0x00000048, 0x00060041, 0x00000016, 0x0000004a, 0x00000013, 0x0000003b, 0x00000049, 0x0004003d,
161 0x00000006, 0x0000004b, 0x0000004a, 0x000500aa, 0x00000019, 0x0000004c, 0x00000045, 0x0000004b, 0x000200f9, 0x00000044,
162 0x000200f8, 0x00000044, 0x000700f5, 0x00000019, 0x0000004d, 0x00000042, 0x00000031, 0x0000004c, 0x00000043, 0x000300f7,
163 0x0000004f, 0x00000000, 0x000400fa, 0x0000004d, 0x0000004e, 0x0000004f, 0x000200f8, 0x0000004e, 0x0003003e, 0x0000002d,
164 0x00000050, 0x000200f9, 0x00000032, 0x000200f8, 0x0000004f, 0x000200f9, 0x00000033, 0x000200f8, 0x00000033, 0x0004003d,
165 0x00000006, 0x00000052, 0x0000002f, 0x00050080, 0x00000006, 0x00000053, 0x00000052, 0x00000024, 0x0003003e, 0x0000002f,
166 0x00000053, 0x000200f9, 0x00000030, 0x000200f8, 0x00000032, 0x0004003d, 0x00000019, 0x00000054, 0x0000002d, 0x000400a8,
167 0x00000019, 0x00000055, 0x00000054, 0x000300f7, 0x00000057, 0x00000000, 0x000400fa, 0x00000055, 0x00000056, 0x00000057,
168 0x000200f8, 0x00000056, 0x00050041, 0x00000016, 0x00000059, 0x00000013, 0x00000058, 0x0004003d, 0x00000006, 0x0000005a,
169 0x00000059, 0x00050080, 0x00000006, 0x0000005b, 0x0000005a, 0x00000048, 0x00050041, 0x00000016, 0x0000005c, 0x00000013,
170 0x00000058, 0x0003003e, 0x0000005c, 0x0000005b, 0x0004003d, 0x00000006, 0x0000005e, 0x0000001b, 0x00050041, 0x00000016,
171 0x0000005f, 0x00000013, 0x0000005d, 0x0003003e, 0x0000005f, 0x0000005e, 0x0004003d, 0x00000006, 0x00000061, 0x00000027,
172 0x00050041, 0x00000016, 0x00000062, 0x00000013, 0x00000060, 0x0003003e, 0x00000062, 0x00000061, 0x0004003d, 0x00000006,
173 0x00000063, 0x00000008, 0x00050041, 0x00000016, 0x00000064, 0x00000013, 0x00000024, 0x0004003d, 0x00000006, 0x00000065,
174 0x00000064, 0x00070041, 0x00000016, 0x00000066, 0x00000022, 0x00000015, 0x00000063, 0x00000024, 0x0003003e, 0x00000066,
175 0x00000065, 0x0004003d, 0x00000006, 0x00000067, 0x00000008, 0x00050041, 0x00000016, 0x00000068, 0x00000013, 0x00000029,
176 0x0004003d, 0x00000006, 0x00000069, 0x00000068, 0x00070041, 0x00000016, 0x0000006a, 0x00000022, 0x00000015, 0x00000067,
177 0x00000029, 0x0003003e, 0x0000006a, 0x00000069, 0x000200f9, 0x00000057, 0x000200f8, 0x00000057, 0x000200f9, 0x0000000d,
178 0x000200f8, 0x0000000d, 0x0004003d, 0x00000006, 0x0000006b, 0x00000008, 0x00050080, 0x00000006, 0x0000006c, 0x0000006b,
179 0x00000024, 0x0003003e, 0x00000008, 0x0000006c, 0x000200f9, 0x0000000a, 0x000200f8, 0x0000000c, 0x000100fd, 0x00010038};
180
Karl Schultz7b024b42018-08-30 16:18:18 -0600181// Convenience function for reporting problems with setting up GPU Validation.
Mark Lobodzinskia8151b02020-02-27 13:38:08 -0700182template <typename T>
183void GpuAssisted::ReportSetupProblem(T object, const char *const specific_message) const {
184 LogError(object, "UNASSIGNED-GPU-Assisted Validation Error. ", "Detail: (%s)", specific_message);
Karl Schultz7b024b42018-08-30 16:18:18 -0600185}
186
Tony-LunarG5c38b182020-06-10 16:15:32 -0600187bool GpuAssisted::CheckForDescriptorIndexing(DeviceFeatures enabled_features) const {
188 bool result =
189 (IsExtEnabled(device_extensions.vk_ext_descriptor_indexing) &&
190 (enabled_features.core12.descriptorIndexing || enabled_features.core12.shaderInputAttachmentArrayDynamicIndexing ||
191 enabled_features.core12.shaderUniformTexelBufferArrayDynamicIndexing ||
192 enabled_features.core12.shaderStorageTexelBufferArrayDynamicIndexing ||
193 enabled_features.core12.shaderUniformBufferArrayNonUniformIndexing ||
194 enabled_features.core12.shaderSampledImageArrayNonUniformIndexing ||
195 enabled_features.core12.shaderStorageBufferArrayNonUniformIndexing ||
196 enabled_features.core12.shaderStorageImageArrayNonUniformIndexing ||
197 enabled_features.core12.shaderInputAttachmentArrayNonUniformIndexing ||
198 enabled_features.core12.shaderUniformTexelBufferArrayNonUniformIndexing ||
199 enabled_features.core12.shaderStorageTexelBufferArrayNonUniformIndexing ||
200 enabled_features.core12.descriptorBindingUniformBufferUpdateAfterBind ||
201 enabled_features.core12.descriptorBindingSampledImageUpdateAfterBind ||
202 enabled_features.core12.descriptorBindingStorageImageUpdateAfterBind ||
203 enabled_features.core12.descriptorBindingStorageBufferUpdateAfterBind ||
204 enabled_features.core12.descriptorBindingUniformTexelBufferUpdateAfterBind ||
205 enabled_features.core12.descriptorBindingStorageTexelBufferUpdateAfterBind ||
206 enabled_features.core12.descriptorBindingUpdateUnusedWhilePending ||
207 enabled_features.core12.descriptorBindingPartiallyBound ||
208 enabled_features.core12.descriptorBindingVariableDescriptorCount || enabled_features.core12.runtimeDescriptorArray));
209 return result;
210}
211
Tony-LunarG2ba1cb32019-09-25 15:16:11 -0600212void GpuAssisted::PreCallRecordCreateBuffer(VkDevice device, const VkBufferCreateInfo *pCreateInfo,
213 const VkAllocationCallbacks *pAllocator, VkBuffer *pBuffer, void *cb_state_data) {
Jason Macnak83cfd582019-07-31 10:14:24 -0700214 // Ray tracing acceleration structure instance buffers also need the storage buffer usage as
215 // acceleration structure build validation will find and replace invalid acceleration structure
216 // handles inside of a compute shader.
Tony-LunarG2ba1cb32019-09-25 15:16:11 -0600217 create_buffer_api_state *cb_state = reinterpret_cast<create_buffer_api_state *>(cb_state_data);
218 if (cb_state && cb_state->modified_create_info.usage & VK_BUFFER_USAGE_RAY_TRACING_BIT_NV) {
219 cb_state->modified_create_info.usage |= VK_BUFFER_USAGE_STORAGE_BUFFER_BIT;
Jason Macnak83cfd582019-07-31 10:14:24 -0700220 }
221}
222
Karl Schultz7b024b42018-08-30 16:18:18 -0600223// Turn on necessary device features.
Tony-LunarG2ba1cb32019-09-25 15:16:11 -0600224void GpuAssisted::PreCallRecordCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *create_info,
225 const VkAllocationCallbacks *pAllocator, VkDevice *pDevice,
Mark Lobodzinskib588cd42020-09-30 11:03:34 -0600226 void *modified_create_info) {
Tony-LunarG2ba1cb32019-09-25 15:16:11 -0600227 DispatchGetPhysicalDeviceFeatures(gpu, &supported_features);
Tony-LunarG1dce2392019-10-23 16:49:29 -0600228 VkPhysicalDeviceFeatures features = {};
229 features.vertexPipelineStoresAndAtomics = true;
230 features.fragmentStoresAndAtomics = true;
231 features.shaderInt64 = true;
Mark Lobodzinskib588cd42020-09-30 11:03:34 -0600232 UtilPreCallRecordCreateDevice(gpu, reinterpret_cast<safe_VkDeviceCreateInfo *>(modified_create_info), supported_features,
233 features);
Karl Schultz7b024b42018-08-30 16:18:18 -0600234}
Karl Schultz7b024b42018-08-30 16:18:18 -0600235// Perform initializations that can be done at Create Device time.
Tony-LunarG2ba1cb32019-09-25 15:16:11 -0600236void GpuAssisted::PostCallRecordCreateDevice(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo *pCreateInfo,
237 const VkAllocationCallbacks *pAllocator, VkDevice *pDevice, VkResult result) {
238 // The state tracker sets up the device state
Tony-LunarG99b880b2019-09-26 11:19:52 -0600239 ValidationStateTracker::PostCallRecordCreateDevice(physicalDevice, pCreateInfo, pAllocator, pDevice, result);
Mark Lobodzinski5dc3dcd2019-04-23 14:26:28 -0600240
Tony-LunarG2ba1cb32019-09-25 15:16:11 -0600241 ValidationObject *device_object = GetLayerDataPtr(get_dispatch_key(*pDevice), layer_data_map);
242 ValidationObject *validation_data = GetValidationObject(device_object->object_dispatch, this->container_type);
243 GpuAssisted *device_gpu_assisted = static_cast<GpuAssisted *>(validation_data);
Tony-LunarG65f9c492019-01-17 14:24:42 -0700244
Tony-LunarGc28e28a2020-08-14 10:37:48 -0600245 const char *bufferoob_string = getLayerOption("khronos_validation.gpuav_buffer_oob");
246 if (device_gpu_assisted->enabled_features.core.robustBufferAccess ||
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700247 device_gpu_assisted->enabled_features.robustness2_features.robustBufferAccess2) {
Tony-LunarGc28e28a2020-08-14 10:37:48 -0600248 device_gpu_assisted->buffer_oob_enabled = false;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700249 } else {
Tony-LunarGc28e28a2020-08-14 10:37:48 -0600250 device_gpu_assisted->buffer_oob_enabled = *bufferoob_string ? !strcmp(bufferoob_string, "true") : true;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700251 }
Tony-LunarGc28e28a2020-08-14 10:37:48 -0600252
Tony-LunarG2ba1cb32019-09-25 15:16:11 -0600253 if (device_gpu_assisted->phys_dev_props.apiVersion < VK_API_VERSION_1_1) {
Mark Lobodzinskia8151b02020-02-27 13:38:08 -0700254 ReportSetupProblem(device, "GPU-Assisted validation requires Vulkan 1.1 or later. GPU-Assisted Validation disabled.");
Tony-LunarG99b880b2019-09-26 11:19:52 -0600255 device_gpu_assisted->aborted = true;
Karl Schultz7b024b42018-08-30 16:18:18 -0600256 return;
257 }
Tony-LunarG2ab9ede2019-05-10 14:34:31 -0600258
Tony-LunarG04dc83c2020-07-07 13:53:02 -0600259 if (!supported_features.fragmentStoresAndAtomics || !supported_features.vertexPipelineStoresAndAtomics) {
Mark Lobodzinskia8151b02020-02-27 13:38:08 -0700260 ReportSetupProblem(device,
Tony-LunarG7c668ab2019-08-28 16:13:01 -0600261 "GPU-Assisted validation requires fragmentStoresAndAtomics and vertexPipelineStoresAndAtomics. "
262 "GPU-Assisted Validation disabled.");
Tony-LunarG99b880b2019-09-26 11:19:52 -0600263 device_gpu_assisted->aborted = true;
Tony-LunarG7c668ab2019-08-28 16:13:01 -0600264 return;
265 }
266
Tony-LunarG7e0842f2019-12-10 09:26:34 -0700267 if ((device_extensions.vk_ext_buffer_device_address || device_extensions.vk_khr_buffer_device_address) &&
Tony-LunarG04dc83c2020-07-07 13:53:02 -0600268 !supported_features.shaderInt64) {
Mark Lobodzinskiafa7cb82020-01-29 16:49:36 -0700269 LogWarning(device, "UNASSIGNED-GPU-Assisted Validation Warning",
270 "shaderInt64 feature is not available. No buffer device address checking will be attempted");
Tony-LunarG8eb5a002019-07-25 16:49:00 -0600271 }
Tony-LunarG04dc83c2020-07-07 13:53:02 -0600272 device_gpu_assisted->shaderInt64 = supported_features.shaderInt64;
Tony-LunarG1dce2392019-10-23 16:49:29 -0600273 device_gpu_assisted->physicalDevice = physicalDevice;
274 device_gpu_assisted->device = *pDevice;
Tony-LunarGbb145f32020-04-27 13:41:29 -0600275 device_gpu_assisted->output_buffer_size = sizeof(uint32_t) * (spvtools::kInstMaxOutCnt + 1);
Tony-LunarG5c38b182020-06-10 16:15:32 -0600276 device_gpu_assisted->descriptor_indexing = CheckForDescriptorIndexing(device_gpu_assisted->enabled_features);
Tony-LunarG1dce2392019-10-23 16:49:29 -0600277 std::vector<VkDescriptorSetLayoutBinding> bindings;
278 VkDescriptorSetLayoutBinding binding = {0, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1,
Tony-LunarGc7ed2082020-06-11 14:00:04 -0600279 VK_SHADER_STAGE_ALL_GRAPHICS | VK_SHADER_STAGE_COMPUTE_BIT |
280 VK_SHADER_STAGE_MESH_BIT_NV | VK_SHADER_STAGE_TASK_BIT_NV |
281 kShaderStageAllRayTracing,
Tony-LunarG1dce2392019-10-23 16:49:29 -0600282 NULL};
283 bindings.push_back(binding);
284 for (auto i = 1; i < 3; i++) {
285 binding.binding = i;
286 bindings.push_back(binding);
Karl Schultz7b024b42018-08-30 16:18:18 -0600287 }
Tony-LunarGb5fae462020-03-05 12:43:25 -0700288 UtilPostCallRecordCreateDevice(pCreateInfo, bindings, device_gpu_assisted, device_gpu_assisted->phys_dev_props);
Tony-LunarG2ba1cb32019-09-25 15:16:11 -0600289 CreateAccelerationStructureBuildValidationState(device_gpu_assisted);
Karl Schultz7b024b42018-08-30 16:18:18 -0600290}
291
Mike Schuchardt2df08912020-12-15 16:28:09 -0800292void GpuAssisted::PostCallRecordGetBufferDeviceAddress(VkDevice device, const VkBufferDeviceAddressInfo *pInfo,
Tony-LunarG588c7052020-04-23 10:47:21 -0600293 VkDeviceAddress address) {
Tony-LunarG8eb5a002019-07-25 16:49:00 -0600294 BUFFER_STATE *buffer_state = GetBufferState(pInfo->buffer);
295 // Validate against the size requested when the buffer was created
Tony-LunarG2ba1cb32019-09-25 15:16:11 -0600296 if (buffer_state) {
Tony-LunarG99b880b2019-09-26 11:19:52 -0600297 buffer_map[address] = buffer_state->createInfo.size;
Tony-LunarG2ba1cb32019-09-25 15:16:11 -0600298 buffer_state->deviceAddress = address;
299 }
Tony-LunarG8eb5a002019-07-25 16:49:00 -0600300}
301
Mike Schuchardt2df08912020-12-15 16:28:09 -0800302void GpuAssisted::PostCallRecordGetBufferDeviceAddressEXT(VkDevice device, const VkBufferDeviceAddressInfo *pInfo,
Tony-LunarG588c7052020-04-23 10:47:21 -0600303 VkDeviceAddress address) {
304 PostCallRecordGetBufferDeviceAddress(device, pInfo, address);
305}
306
Mike Schuchardt2df08912020-12-15 16:28:09 -0800307void GpuAssisted::PostCallRecordGetBufferDeviceAddressKHR(VkDevice device, const VkBufferDeviceAddressInfo *pInfo,
Tony-LunarG7e0842f2019-12-10 09:26:34 -0700308 VkDeviceAddress address) {
Tony-LunarG588c7052020-04-23 10:47:21 -0600309 PostCallRecordGetBufferDeviceAddress(device, pInfo, address);
Tony-LunarG7e0842f2019-12-10 09:26:34 -0700310}
311
Tony-LunarG2ba1cb32019-09-25 15:16:11 -0600312void GpuAssisted::PreCallRecordDestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks *pAllocator) {
Tony-LunarG8eb5a002019-07-25 16:49:00 -0600313 BUFFER_STATE *buffer_state = GetBufferState(buffer);
Tony-LunarG99b880b2019-09-26 11:19:52 -0600314 if (buffer_state) buffer_map.erase(buffer_state->deviceAddress);
Tony-LunarG2966c732020-05-21 10:33:53 -0600315 ValidationStateTracker::PreCallRecordDestroyBuffer(device, buffer, pAllocator);
Tony-LunarG8eb5a002019-07-25 16:49:00 -0600316}
Tony-LunarG1dce2392019-10-23 16:49:29 -0600317
Karl Schultz7b024b42018-08-30 16:18:18 -0600318// Clean up device-related resources
Tony-LunarG2ba1cb32019-09-25 15:16:11 -0600319void GpuAssisted::PreCallRecordDestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) {
Tony-LunarG2ba1cb32019-09-25 15:16:11 -0600320 DestroyAccelerationStructureBuildValidationState();
Tony-LunarGb5fae462020-03-05 12:43:25 -0700321 UtilPreCallRecordDestroyDevice(this);
Tony-LunarG2966c732020-05-21 10:33:53 -0600322 ValidationStateTracker::PreCallRecordDestroyDevice(device, pAllocator);
Tony-LunarG0a863bc2020-09-16 09:50:04 -0600323 // State Tracker can end up making vma calls through callbacks - don't destroy allocator until ST is done
324 if (vmaAllocator) {
325 vmaDestroyAllocator(vmaAllocator);
326 }
327 desc_set_manager.reset();
Karl Schultz7b024b42018-08-30 16:18:18 -0600328}
Tony-LunarG1dce2392019-10-23 16:49:29 -0600329
Tony-LunarG2ba1cb32019-09-25 15:16:11 -0600330void GpuAssisted::CreateAccelerationStructureBuildValidationState(GpuAssisted *device_gpuav) {
Tony-LunarG99b880b2019-09-26 11:19:52 -0600331 if (device_gpuav->aborted) {
Jason Macnak83cfd582019-07-31 10:14:24 -0700332 return;
333 }
334
Tony-LunarG99b880b2019-09-26 11:19:52 -0600335 auto &as_validation_state = device_gpuav->acceleration_structure_validation_state;
Jason Macnak83cfd582019-07-31 10:14:24 -0700336 if (as_validation_state.initialized) {
337 return;
338 }
339
340 if (!device_extensions.vk_nv_ray_tracing) {
341 return;
342 }
343
344 // Outline:
345 // - Create valid bottom level acceleration structure which acts as replacement
346 // - Create and load vertex buffer
347 // - Create and load index buffer
348 // - Create, allocate memory for, and bind memory for acceleration structure
349 // - Query acceleration structure handle
350 // - Create command pool and command buffer
351 // - Record build acceleration structure command
352 // - Submit command buffer and wait for completion
353 // - Cleanup
354 // - Create compute pipeline for validating instance buffers
355 // - Create descriptor set layout
356 // - Create pipeline layout
357 // - Create pipeline
358 // - Cleanup
359
360 VkResult result = VK_SUCCESS;
361
362 VkBuffer vbo = VK_NULL_HANDLE;
363 VmaAllocation vbo_allocation = VK_NULL_HANDLE;
364 if (result == VK_SUCCESS) {
365 VkBufferCreateInfo vbo_ci = {};
366 vbo_ci.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
367 vbo_ci.size = sizeof(float) * 9;
368 vbo_ci.usage = VK_BUFFER_USAGE_RAY_TRACING_BIT_NV;
369
370 VmaAllocationCreateInfo vbo_ai = {};
371 vbo_ai.usage = VMA_MEMORY_USAGE_CPU_TO_GPU;
372 vbo_ai.requiredFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
373
Tony-LunarG99b880b2019-09-26 11:19:52 -0600374 result = vmaCreateBuffer(device_gpuav->vmaAllocator, &vbo_ci, &vbo_ai, &vbo, &vbo_allocation, nullptr);
Jason Macnak83cfd582019-07-31 10:14:24 -0700375 if (result != VK_SUCCESS) {
Mark Lobodzinskia8151b02020-02-27 13:38:08 -0700376 ReportSetupProblem(device, "Failed to create vertex buffer for acceleration structure build validation.");
Jason Macnak83cfd582019-07-31 10:14:24 -0700377 }
378 }
379
380 if (result == VK_SUCCESS) {
381 uint8_t *mapped_vbo_buffer = nullptr;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700382 result = vmaMapMemory(device_gpuav->vmaAllocator, vbo_allocation, reinterpret_cast<void **>(&mapped_vbo_buffer));
Jason Macnak83cfd582019-07-31 10:14:24 -0700383 if (result != VK_SUCCESS) {
Mark Lobodzinskia8151b02020-02-27 13:38:08 -0700384 ReportSetupProblem(device, "Failed to map vertex buffer for acceleration structure build validation.");
Jason Macnak83cfd582019-07-31 10:14:24 -0700385 } else {
386 const std::vector<float> vertices = {1.0f, 0.0f, 0.0f, 0.5f, 1.0f, 0.0f, 0.0f, 0.0f, 0.0f};
387 std::memcpy(mapped_vbo_buffer, (uint8_t *)vertices.data(), sizeof(float) * vertices.size());
Tony-LunarG99b880b2019-09-26 11:19:52 -0600388 vmaUnmapMemory(device_gpuav->vmaAllocator, vbo_allocation);
Jason Macnak83cfd582019-07-31 10:14:24 -0700389 }
390 }
391
392 VkBuffer ibo = VK_NULL_HANDLE;
393 VmaAllocation ibo_allocation = VK_NULL_HANDLE;
394 if (result == VK_SUCCESS) {
395 VkBufferCreateInfo ibo_ci = {};
396 ibo_ci.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
397 ibo_ci.size = sizeof(uint32_t) * 3;
398 ibo_ci.usage = VK_BUFFER_USAGE_RAY_TRACING_BIT_NV;
399
400 VmaAllocationCreateInfo ibo_ai = {};
401 ibo_ai.usage = VMA_MEMORY_USAGE_CPU_TO_GPU;
402 ibo_ai.requiredFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
403
Tony-LunarG99b880b2019-09-26 11:19:52 -0600404 result = vmaCreateBuffer(device_gpuav->vmaAllocator, &ibo_ci, &ibo_ai, &ibo, &ibo_allocation, nullptr);
Jason Macnak83cfd582019-07-31 10:14:24 -0700405 if (result != VK_SUCCESS) {
Mark Lobodzinskia8151b02020-02-27 13:38:08 -0700406 ReportSetupProblem(device, "Failed to create index buffer for acceleration structure build validation.");
Jason Macnak83cfd582019-07-31 10:14:24 -0700407 }
408 }
409
410 if (result == VK_SUCCESS) {
411 uint8_t *mapped_ibo_buffer = nullptr;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700412 result = vmaMapMemory(device_gpuav->vmaAllocator, ibo_allocation, reinterpret_cast<void **>(&mapped_ibo_buffer));
Jason Macnak83cfd582019-07-31 10:14:24 -0700413 if (result != VK_SUCCESS) {
Mark Lobodzinskia8151b02020-02-27 13:38:08 -0700414 ReportSetupProblem(device, "Failed to map index buffer for acceleration structure build validation.");
Jason Macnak83cfd582019-07-31 10:14:24 -0700415 } else {
416 const std::vector<uint32_t> indicies = {0, 1, 2};
417 std::memcpy(mapped_ibo_buffer, (uint8_t *)indicies.data(), sizeof(uint32_t) * indicies.size());
Tony-LunarG99b880b2019-09-26 11:19:52 -0600418 vmaUnmapMemory(device_gpuav->vmaAllocator, ibo_allocation);
Jason Macnak83cfd582019-07-31 10:14:24 -0700419 }
420 }
421
422 VkGeometryNV geometry = {};
423 geometry.sType = VK_STRUCTURE_TYPE_GEOMETRY_NV;
424 geometry.geometryType = VK_GEOMETRY_TYPE_TRIANGLES_NV;
425 geometry.geometry.triangles.sType = VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV;
426 geometry.geometry.triangles.vertexData = vbo;
427 geometry.geometry.triangles.vertexOffset = 0;
428 geometry.geometry.triangles.vertexCount = 3;
429 geometry.geometry.triangles.vertexStride = 12;
430 geometry.geometry.triangles.vertexFormat = VK_FORMAT_R32G32B32_SFLOAT;
431 geometry.geometry.triangles.indexData = ibo;
432 geometry.geometry.triangles.indexOffset = 0;
433 geometry.geometry.triangles.indexCount = 3;
434 geometry.geometry.triangles.indexType = VK_INDEX_TYPE_UINT32;
435 geometry.geometry.triangles.transformData = VK_NULL_HANDLE;
436 geometry.geometry.triangles.transformOffset = 0;
437 geometry.geometry.aabbs = {};
438 geometry.geometry.aabbs.sType = VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV;
439
440 VkAccelerationStructureCreateInfoNV as_ci = {};
441 as_ci.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV;
442 as_ci.info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV;
443 as_ci.info.type = VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV;
444 as_ci.info.instanceCount = 0;
445 as_ci.info.geometryCount = 1;
446 as_ci.info.pGeometries = &geometry;
447 if (result == VK_SUCCESS) {
Tony-LunarG2ba1cb32019-09-25 15:16:11 -0600448 result = DispatchCreateAccelerationStructureNV(device_gpuav->device, &as_ci, nullptr, &as_validation_state.replacement_as);
Jason Macnak83cfd582019-07-31 10:14:24 -0700449 if (result != VK_SUCCESS) {
Mark Lobodzinskia8151b02020-02-27 13:38:08 -0700450 ReportSetupProblem(device_gpuav->device,
Jason Macnak83cfd582019-07-31 10:14:24 -0700451 "Failed to create acceleration structure for acceleration structure build validation.");
452 }
453 }
454
455 VkMemoryRequirements2 as_mem_requirements = {};
456 if (result == VK_SUCCESS) {
457 VkAccelerationStructureMemoryRequirementsInfoNV as_mem_requirements_info = {};
458 as_mem_requirements_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
459 as_mem_requirements_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV;
460 as_mem_requirements_info.accelerationStructure = as_validation_state.replacement_as;
461
Tony-LunarG2ba1cb32019-09-25 15:16:11 -0600462 DispatchGetAccelerationStructureMemoryRequirementsNV(device_gpuav->device, &as_mem_requirements_info, &as_mem_requirements);
Jason Macnak83cfd582019-07-31 10:14:24 -0700463 }
464
465 VmaAllocationInfo as_memory_ai = {};
466 if (result == VK_SUCCESS) {
467 VmaAllocationCreateInfo as_memory_aci = {};
468 as_memory_aci.usage = VMA_MEMORY_USAGE_GPU_ONLY;
469
Tony-LunarG99b880b2019-09-26 11:19:52 -0600470 result = vmaAllocateMemory(device_gpuav->vmaAllocator, &as_mem_requirements.memoryRequirements, &as_memory_aci,
471 &as_validation_state.replacement_as_allocation, &as_memory_ai);
Jason Macnak83cfd582019-07-31 10:14:24 -0700472 if (result != VK_SUCCESS) {
Mark Lobodzinskia8151b02020-02-27 13:38:08 -0700473 ReportSetupProblem(device_gpuav->device,
Jason Macnak83cfd582019-07-31 10:14:24 -0700474 "Failed to alloc acceleration structure memory for acceleration structure build validation.");
475 }
476 }
477
478 if (result == VK_SUCCESS) {
479 VkBindAccelerationStructureMemoryInfoNV as_bind_info = {};
480 as_bind_info.sType = VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV;
481 as_bind_info.accelerationStructure = as_validation_state.replacement_as;
482 as_bind_info.memory = as_memory_ai.deviceMemory;
483 as_bind_info.memoryOffset = as_memory_ai.offset;
484
Tony-LunarG2ba1cb32019-09-25 15:16:11 -0600485 result = DispatchBindAccelerationStructureMemoryNV(device_gpuav->device, 1, &as_bind_info);
Jason Macnak83cfd582019-07-31 10:14:24 -0700486 if (result != VK_SUCCESS) {
Mark Lobodzinskia8151b02020-02-27 13:38:08 -0700487 ReportSetupProblem(device_gpuav->device,
Jason Macnak83cfd582019-07-31 10:14:24 -0700488 "Failed to bind acceleration structure memory for acceleration structure build validation.");
489 }
490 }
491
492 if (result == VK_SUCCESS) {
Tony-LunarG2ba1cb32019-09-25 15:16:11 -0600493 result = DispatchGetAccelerationStructureHandleNV(device_gpuav->device, as_validation_state.replacement_as,
494 sizeof(uint64_t), &as_validation_state.replacement_as_handle);
Jason Macnak83cfd582019-07-31 10:14:24 -0700495 if (result != VK_SUCCESS) {
Mark Lobodzinskia8151b02020-02-27 13:38:08 -0700496 ReportSetupProblem(device_gpuav->device,
Jason Macnak83cfd582019-07-31 10:14:24 -0700497 "Failed to get acceleration structure handle for acceleration structure build validation.");
498 }
499 }
500
501 VkMemoryRequirements2 scratch_mem_requirements = {};
502 if (result == VK_SUCCESS) {
503 VkAccelerationStructureMemoryRequirementsInfoNV scratch_mem_requirements_info = {};
504 scratch_mem_requirements_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
505 scratch_mem_requirements_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV;
506 scratch_mem_requirements_info.accelerationStructure = as_validation_state.replacement_as;
507
Tony-LunarG2ba1cb32019-09-25 15:16:11 -0600508 DispatchGetAccelerationStructureMemoryRequirementsNV(device_gpuav->device, &scratch_mem_requirements_info,
509 &scratch_mem_requirements);
Jason Macnak83cfd582019-07-31 10:14:24 -0700510 }
511
512 VkBuffer scratch = VK_NULL_HANDLE;
Tony-LunarG18900282020-05-20 12:34:33 -0600513 VmaAllocation scratch_allocation = {};
Jason Macnak83cfd582019-07-31 10:14:24 -0700514 if (result == VK_SUCCESS) {
515 VkBufferCreateInfo scratch_ci = {};
516 scratch_ci.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
517 scratch_ci.size = scratch_mem_requirements.memoryRequirements.size;
518 scratch_ci.usage = VK_BUFFER_USAGE_RAY_TRACING_BIT_NV;
Jason Macnak83cfd582019-07-31 10:14:24 -0700519 VmaAllocationCreateInfo scratch_aci = {};
520 scratch_aci.usage = VMA_MEMORY_USAGE_GPU_ONLY;
521
Tony-LunarG18900282020-05-20 12:34:33 -0600522 result = vmaCreateBuffer(device_gpuav->vmaAllocator, &scratch_ci, &scratch_aci, &scratch, &scratch_allocation, nullptr);
Jason Macnak83cfd582019-07-31 10:14:24 -0700523 if (result != VK_SUCCESS) {
Tony-LunarG18900282020-05-20 12:34:33 -0600524 ReportSetupProblem(device_gpuav->device,
525 "Failed to create scratch buffer for acceleration structure build validation.");
Jason Macnak83cfd582019-07-31 10:14:24 -0700526 }
527 }
528
529 VkCommandPool command_pool = VK_NULL_HANDLE;
530 if (result == VK_SUCCESS) {
531 VkCommandPoolCreateInfo command_pool_ci = {};
532 command_pool_ci.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
533 command_pool_ci.queueFamilyIndex = 0;
534
Tony-LunarG2ba1cb32019-09-25 15:16:11 -0600535 result = DispatchCreateCommandPool(device_gpuav->device, &command_pool_ci, nullptr, &command_pool);
Jason Macnak83cfd582019-07-31 10:14:24 -0700536 if (result != VK_SUCCESS) {
Mark Lobodzinskia8151b02020-02-27 13:38:08 -0700537 ReportSetupProblem(device_gpuav->device, "Failed to create command pool for acceleration structure build validation.");
Jason Macnak83cfd582019-07-31 10:14:24 -0700538 }
539 }
540
541 VkCommandBuffer command_buffer = VK_NULL_HANDLE;
542
543 if (result == VK_SUCCESS) {
544 VkCommandBufferAllocateInfo command_buffer_ai = {};
545 command_buffer_ai.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
546 command_buffer_ai.commandPool = command_pool;
547 command_buffer_ai.commandBufferCount = 1;
548 command_buffer_ai.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
549
Tony-LunarG2ba1cb32019-09-25 15:16:11 -0600550 result = DispatchAllocateCommandBuffers(device_gpuav->device, &command_buffer_ai, &command_buffer);
Jason Macnak83cfd582019-07-31 10:14:24 -0700551 if (result != VK_SUCCESS) {
Mark Lobodzinskia8151b02020-02-27 13:38:08 -0700552 ReportSetupProblem(device_gpuav->device,
Jason Macnak83cfd582019-07-31 10:14:24 -0700553 "Failed to create command buffer for acceleration structure build validation.");
554 }
555
556 // Hook up command buffer dispatch
Tony-LunarG99b880b2019-09-26 11:19:52 -0600557 device_gpuav->vkSetDeviceLoaderData(device_gpuav->device, command_buffer);
Jason Macnak83cfd582019-07-31 10:14:24 -0700558 }
559
560 if (result == VK_SUCCESS) {
561 VkCommandBufferBeginInfo command_buffer_bi = {};
562 command_buffer_bi.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
563
564 result = DispatchBeginCommandBuffer(command_buffer, &command_buffer_bi);
565 if (result != VK_SUCCESS) {
Mark Lobodzinskia8151b02020-02-27 13:38:08 -0700566 ReportSetupProblem(device_gpuav->device, "Failed to begin command buffer for acceleration structure build validation.");
Jason Macnak83cfd582019-07-31 10:14:24 -0700567 }
568 }
569
570 if (result == VK_SUCCESS) {
571 DispatchCmdBuildAccelerationStructureNV(command_buffer, &as_ci.info, VK_NULL_HANDLE, 0, VK_FALSE,
572 as_validation_state.replacement_as, VK_NULL_HANDLE, scratch, 0);
573 DispatchEndCommandBuffer(command_buffer);
574 }
575
576 VkQueue queue = VK_NULL_HANDLE;
577 if (result == VK_SUCCESS) {
Tony-LunarG2ba1cb32019-09-25 15:16:11 -0600578 DispatchGetDeviceQueue(device_gpuav->device, 0, 0, &queue);
Jason Macnak83cfd582019-07-31 10:14:24 -0700579
580 // Hook up queue dispatch
Tony-LunarG99b880b2019-09-26 11:19:52 -0600581 device_gpuav->vkSetDeviceLoaderData(device_gpuav->device, queue);
Jason Macnak83cfd582019-07-31 10:14:24 -0700582
583 VkSubmitInfo submit_info = {};
584 submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
585 submit_info.commandBufferCount = 1;
586 submit_info.pCommandBuffers = &command_buffer;
587 result = DispatchQueueSubmit(queue, 1, &submit_info, VK_NULL_HANDLE);
588 if (result != VK_SUCCESS) {
Mark Lobodzinskia8151b02020-02-27 13:38:08 -0700589 ReportSetupProblem(device_gpuav->device,
Jason Macnak83cfd582019-07-31 10:14:24 -0700590 "Failed to submit command buffer for acceleration structure build validation.");
591 }
592 }
593
594 if (result == VK_SUCCESS) {
595 result = DispatchQueueWaitIdle(queue);
596 if (result != VK_SUCCESS) {
Mark Lobodzinskia8151b02020-02-27 13:38:08 -0700597 ReportSetupProblem(device_gpuav->device, "Failed to wait for queue idle for acceleration structure build validation.");
Jason Macnak83cfd582019-07-31 10:14:24 -0700598 }
599 }
600
601 if (vbo != VK_NULL_HANDLE) {
Tony-LunarG99b880b2019-09-26 11:19:52 -0600602 vmaDestroyBuffer(device_gpuav->vmaAllocator, vbo, vbo_allocation);
Jason Macnak83cfd582019-07-31 10:14:24 -0700603 }
604 if (ibo != VK_NULL_HANDLE) {
Tony-LunarG99b880b2019-09-26 11:19:52 -0600605 vmaDestroyBuffer(device_gpuav->vmaAllocator, ibo, ibo_allocation);
Jason Macnak83cfd582019-07-31 10:14:24 -0700606 }
607 if (scratch != VK_NULL_HANDLE) {
Tony-LunarG18900282020-05-20 12:34:33 -0600608 vmaDestroyBuffer(device_gpuav->vmaAllocator, scratch, scratch_allocation);
Jason Macnak83cfd582019-07-31 10:14:24 -0700609 }
610 if (command_pool != VK_NULL_HANDLE) {
Tony-LunarG2ba1cb32019-09-25 15:16:11 -0600611 DispatchDestroyCommandPool(device_gpuav->device, command_pool, nullptr);
Jason Macnak83cfd582019-07-31 10:14:24 -0700612 }
613
Tony-LunarG99b880b2019-09-26 11:19:52 -0600614 if (device_gpuav->debug_desc_layout == VK_NULL_HANDLE) {
Mark Lobodzinskia8151b02020-02-27 13:38:08 -0700615 ReportSetupProblem(device_gpuav->device,
Jason Macnak83cfd582019-07-31 10:14:24 -0700616 "Failed to find descriptor set layout for acceleration structure build validation.");
617 result = VK_INCOMPLETE;
618 }
619
620 if (result == VK_SUCCESS) {
621 VkPipelineLayoutCreateInfo pipeline_layout_ci = {};
622 pipeline_layout_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
623 pipeline_layout_ci.setLayoutCount = 1;
Tony-LunarG99b880b2019-09-26 11:19:52 -0600624 pipeline_layout_ci.pSetLayouts = &device_gpuav->debug_desc_layout;
Tony-LunarG2ba1cb32019-09-25 15:16:11 -0600625 result = DispatchCreatePipelineLayout(device_gpuav->device, &pipeline_layout_ci, 0, &as_validation_state.pipeline_layout);
Jason Macnak83cfd582019-07-31 10:14:24 -0700626 if (result != VK_SUCCESS) {
Mark Lobodzinskia8151b02020-02-27 13:38:08 -0700627 ReportSetupProblem(device_gpuav->device,
Jason Macnak83cfd582019-07-31 10:14:24 -0700628 "Failed to create pipeline layout for acceleration structure build validation.");
629 }
630 }
631
632 VkShaderModule shader_module = VK_NULL_HANDLE;
633 if (result == VK_SUCCESS) {
634 VkShaderModuleCreateInfo shader_module_ci = {};
635 shader_module_ci.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
636 shader_module_ci.codeSize = sizeof(kComputeShaderSpirv);
637 shader_module_ci.pCode = (uint32_t *)kComputeShaderSpirv;
638
Tony-LunarG2ba1cb32019-09-25 15:16:11 -0600639 result = DispatchCreateShaderModule(device_gpuav->device, &shader_module_ci, nullptr, &shader_module);
Jason Macnak83cfd582019-07-31 10:14:24 -0700640 if (result != VK_SUCCESS) {
Mark Lobodzinskia8151b02020-02-27 13:38:08 -0700641 ReportSetupProblem(device_gpuav->device,
Jason Macnak83cfd582019-07-31 10:14:24 -0700642 "Failed to create compute shader module for acceleration structure build validation.");
643 }
644 }
645
646 if (result == VK_SUCCESS) {
647 VkPipelineShaderStageCreateInfo pipeline_stage_ci = {};
648 pipeline_stage_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
649 pipeline_stage_ci.stage = VK_SHADER_STAGE_COMPUTE_BIT;
650 pipeline_stage_ci.module = shader_module;
651 pipeline_stage_ci.pName = "main";
652
653 VkComputePipelineCreateInfo pipeline_ci = {};
654 pipeline_ci.sType = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO;
655 pipeline_ci.stage = pipeline_stage_ci;
656 pipeline_ci.layout = as_validation_state.pipeline_layout;
657
Tony-LunarG2ba1cb32019-09-25 15:16:11 -0600658 result = DispatchCreateComputePipelines(device_gpuav->device, VK_NULL_HANDLE, 1, &pipeline_ci, nullptr,
659 &as_validation_state.pipeline);
Jason Macnak83cfd582019-07-31 10:14:24 -0700660 if (result != VK_SUCCESS) {
Mark Lobodzinskia8151b02020-02-27 13:38:08 -0700661 ReportSetupProblem(device_gpuav->device,
Jason Macnak83cfd582019-07-31 10:14:24 -0700662 "Failed to create compute pipeline for acceleration structure build validation.");
663 }
664 }
665
666 if (shader_module != VK_NULL_HANDLE) {
Tony-LunarG2ba1cb32019-09-25 15:16:11 -0600667 DispatchDestroyShaderModule(device_gpuav->device, shader_module, nullptr);
Jason Macnak83cfd582019-07-31 10:14:24 -0700668 }
669
670 if (result == VK_SUCCESS) {
671 as_validation_state.initialized = true;
Mark Lobodzinskiafa7cb82020-01-29 16:49:36 -0700672 LogInfo(device_gpuav->device, "UNASSIGNED-GPU-Assisted Validation.",
Tony-LunarG2ba1cb32019-09-25 15:16:11 -0600673 "Acceleration Structure Building GPU Validation Enabled.");
Jason Macnak83cfd582019-07-31 10:14:24 -0700674 } else {
Tony-LunarG99b880b2019-09-26 11:19:52 -0600675 device_gpuav->aborted = true;
Jason Macnak83cfd582019-07-31 10:14:24 -0700676 }
677}
678
Tony-LunarG2ba1cb32019-09-25 15:16:11 -0600679void GpuAssisted::DestroyAccelerationStructureBuildValidationState() {
Tony-LunarG99b880b2019-09-26 11:19:52 -0600680 auto &as_validation_state = acceleration_structure_validation_state;
Jason Macnak83cfd582019-07-31 10:14:24 -0700681 if (as_validation_state.pipeline != VK_NULL_HANDLE) {
682 DispatchDestroyPipeline(device, as_validation_state.pipeline, nullptr);
683 }
684 if (as_validation_state.pipeline_layout != VK_NULL_HANDLE) {
685 DispatchDestroyPipelineLayout(device, as_validation_state.pipeline_layout, nullptr);
686 }
687 if (as_validation_state.replacement_as != VK_NULL_HANDLE) {
688 DispatchDestroyAccelerationStructureNV(device, as_validation_state.replacement_as, nullptr);
689 }
690 if (as_validation_state.replacement_as_allocation != VK_NULL_HANDLE) {
Tony-LunarG99b880b2019-09-26 11:19:52 -0600691 vmaFreeMemory(vmaAllocator, as_validation_state.replacement_as_allocation);
Jason Macnak83cfd582019-07-31 10:14:24 -0700692 }
693}
694
Tony-LunarG2ba1cb32019-09-25 15:16:11 -0600695struct GPUAV_RESTORABLE_PIPELINE_STATE {
Jason Macnak83cfd582019-07-31 10:14:24 -0700696 VkPipelineBindPoint pipeline_bind_point = VK_PIPELINE_BIND_POINT_MAX_ENUM;
697 VkPipeline pipeline = VK_NULL_HANDLE;
698 VkPipelineLayout pipeline_layout = VK_NULL_HANDLE;
699 std::vector<VkDescriptorSet> descriptor_sets;
700 std::vector<std::vector<uint32_t>> dynamic_offsets;
701 uint32_t push_descriptor_set_index = 0;
702 std::vector<safe_VkWriteDescriptorSet> push_descriptor_set_writes;
703 std::vector<uint8_t> push_constants_data;
704 PushConstantRangesId push_constants_ranges;
705
706 void Create(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint bind_point) {
707 pipeline_bind_point = bind_point;
locke-lunargb8d7a7a2020-10-25 16:01:52 -0600708 const auto lv_bind_point = ConvertToLvlBindPoint(bind_point);
Jason Macnak83cfd582019-07-31 10:14:24 -0700709
locke-lunargb8d7a7a2020-10-25 16:01:52 -0600710 LAST_BOUND_STATE &last_bound = cb_state->lastBound[lv_bind_point];
Jason Macnak83cfd582019-07-31 10:14:24 -0700711 if (last_bound.pipeline_state) {
712 pipeline = last_bound.pipeline_state->pipeline;
713 pipeline_layout = last_bound.pipeline_layout;
714 descriptor_sets.reserve(last_bound.per_set.size());
715 for (std::size_t i = 0; i < last_bound.per_set.size(); i++) {
716 const auto *bound_descriptor_set = last_bound.per_set[i].bound_descriptor_set;
717
718 descriptor_sets.push_back(bound_descriptor_set->GetSet());
719 if (bound_descriptor_set->IsPushDescriptor()) {
720 push_descriptor_set_index = static_cast<uint32_t>(i);
721 }
722 dynamic_offsets.push_back(last_bound.per_set[i].dynamicOffsets);
723 }
724
725 if (last_bound.push_descriptor_set) {
726 push_descriptor_set_writes = last_bound.push_descriptor_set->GetWrites();
727 }
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500728 if (last_bound.pipeline_state->pipeline_layout->push_constant_ranges == cb_state->push_constant_data_ranges) {
Jason Macnak83cfd582019-07-31 10:14:24 -0700729 push_constants_data = cb_state->push_constant_data;
Jeff Bolze7fc67b2019-10-04 12:29:31 -0500730 push_constants_ranges = last_bound.pipeline_state->pipeline_layout->push_constant_ranges;
Jason Macnak83cfd582019-07-31 10:14:24 -0700731 }
732 }
733 }
734
735 void Restore(VkCommandBuffer command_buffer) const {
736 if (pipeline != VK_NULL_HANDLE) {
737 DispatchCmdBindPipeline(command_buffer, pipeline_bind_point, pipeline);
738 if (!descriptor_sets.empty()) {
739 for (std::size_t i = 0; i < descriptor_sets.size(); i++) {
740 VkDescriptorSet descriptor_set = descriptor_sets[i];
741 if (descriptor_set != VK_NULL_HANDLE) {
742 DispatchCmdBindDescriptorSets(command_buffer, pipeline_bind_point, pipeline_layout,
743 static_cast<uint32_t>(i), 1, &descriptor_set,
744 static_cast<uint32_t>(dynamic_offsets[i].size()), dynamic_offsets[i].data());
745 }
746 }
747 }
748 if (!push_descriptor_set_writes.empty()) {
749 DispatchCmdPushDescriptorSetKHR(command_buffer, pipeline_bind_point, pipeline_layout, push_descriptor_set_index,
750 static_cast<uint32_t>(push_descriptor_set_writes.size()),
751 reinterpret_cast<const VkWriteDescriptorSet *>(push_descriptor_set_writes.data()));
752 }
753 for (const auto &push_constant_range : *push_constants_ranges) {
754 if (push_constant_range.size == 0) continue;
755 DispatchCmdPushConstants(command_buffer, pipeline_layout, push_constant_range.stageFlags,
756 push_constant_range.offset, push_constant_range.size, push_constants_data.data());
757 }
758 }
759 }
760};
761
Tony-LunarG2ba1cb32019-09-25 15:16:11 -0600762void GpuAssisted::PreCallRecordCmdBuildAccelerationStructureNV(VkCommandBuffer commandBuffer,
763 const VkAccelerationStructureInfoNV *pInfo, VkBuffer instanceData,
764 VkDeviceSize instanceOffset, VkBool32 update,
765 VkAccelerationStructureNV dst, VkAccelerationStructureNV src,
766 VkBuffer scratch, VkDeviceSize scratchOffset) {
Jason Macnak83cfd582019-07-31 10:14:24 -0700767 if (pInfo == nullptr || pInfo->type != VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV) {
768 return;
769 }
770
Tony-LunarG99b880b2019-09-26 11:19:52 -0600771 auto &as_validation_state = acceleration_structure_validation_state;
Jason Macnak83cfd582019-07-31 10:14:24 -0700772 if (!as_validation_state.initialized) {
773 return;
774 }
775
776 // Empty acceleration structure is valid according to the spec.
777 if (pInfo->instanceCount == 0 || instanceData == VK_NULL_HANDLE) {
778 return;
779 }
780
781 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
782 assert(cb_state != nullptr);
783
784 std::vector<uint64_t> current_valid_handles;
785 for (const auto &as_state_kv : accelerationStructureMap) {
786 const ACCELERATION_STRUCTURE_STATE &as_state = *as_state_kv.second;
Jeff Bolz95176d02020-04-01 00:36:16 -0500787 if (as_state.built && as_state.create_infoNV.info.type == VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV) {
Jason Macnak83cfd582019-07-31 10:14:24 -0700788 current_valid_handles.push_back(as_state.opaque_handle);
789 }
790 }
791
Tony-LunarG2ba1cb32019-09-25 15:16:11 -0600792 GpuAssistedAccelerationStructureBuildValidationBufferInfo as_validation_buffer_info = {};
Jason Macnak83cfd582019-07-31 10:14:24 -0700793 as_validation_buffer_info.acceleration_structure = dst;
794
795 const VkDeviceSize validation_buffer_size =
796 // One uint for number of instances to validate
797 4 +
798 // Two uint for the replacement acceleration structure handle
799 8 +
800 // One uint for number of invalid handles found
801 4 +
802 // Two uint for the first invalid handle found
803 8 +
804 // One uint for the number of current valid handles
805 4 +
806 // Two uint for each current valid handle
807 (8 * current_valid_handles.size());
808
809 VkBufferCreateInfo validation_buffer_create_info = {};
810 validation_buffer_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
811 validation_buffer_create_info.size = validation_buffer_size;
812 validation_buffer_create_info.usage = VK_BUFFER_USAGE_STORAGE_BUFFER_BIT;
813
814 VmaAllocationCreateInfo validation_buffer_alloc_info = {};
815 validation_buffer_alloc_info.requiredFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
816
Tony-LunarG99b880b2019-09-26 11:19:52 -0600817 VkResult result = vmaCreateBuffer(vmaAllocator, &validation_buffer_create_info, &validation_buffer_alloc_info,
818 &as_validation_buffer_info.validation_buffer,
Jason Macnak83cfd582019-07-31 10:14:24 -0700819 &as_validation_buffer_info.validation_buffer_allocation, nullptr);
820 if (result != VK_SUCCESS) {
Mark Lobodzinskia8151b02020-02-27 13:38:08 -0700821 ReportSetupProblem(device, "Unable to allocate device memory. Device could become unstable.");
Tony-LunarG99b880b2019-09-26 11:19:52 -0600822 aborted = true;
Jason Macnak83cfd582019-07-31 10:14:24 -0700823 return;
824 }
825
826 GpuAccelerationStructureBuildValidationBuffer *mapped_validation_buffer = nullptr;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700827 result = vmaMapMemory(vmaAllocator, as_validation_buffer_info.validation_buffer_allocation,
828 reinterpret_cast<void **>(&mapped_validation_buffer));
Jason Macnak83cfd582019-07-31 10:14:24 -0700829 if (result != VK_SUCCESS) {
Mark Lobodzinskia8151b02020-02-27 13:38:08 -0700830 ReportSetupProblem(device, "Unable to allocate device memory for acceleration structure build val buffer.");
Tony-LunarG99b880b2019-09-26 11:19:52 -0600831 aborted = true;
Jason Macnak83cfd582019-07-31 10:14:24 -0700832 return;
833 }
834
835 mapped_validation_buffer->instances_to_validate = pInfo->instanceCount;
836 mapped_validation_buffer->replacement_handle_bits_0 =
837 reinterpret_cast<const uint32_t *>(&as_validation_state.replacement_as_handle)[0];
838 mapped_validation_buffer->replacement_handle_bits_1 =
839 reinterpret_cast<const uint32_t *>(&as_validation_state.replacement_as_handle)[1];
840 mapped_validation_buffer->invalid_handle_found = 0;
841 mapped_validation_buffer->invalid_handle_bits_0 = 0;
842 mapped_validation_buffer->invalid_handle_bits_1 = 0;
843 mapped_validation_buffer->valid_handles_count = static_cast<uint32_t>(current_valid_handles.size());
844
845 uint32_t *mapped_valid_handles = reinterpret_cast<uint32_t *>(&mapped_validation_buffer[1]);
846 for (std::size_t i = 0; i < current_valid_handles.size(); i++) {
847 const uint64_t current_valid_handle = current_valid_handles[i];
848
849 *mapped_valid_handles = reinterpret_cast<const uint32_t *>(&current_valid_handle)[0];
850 ++mapped_valid_handles;
851 *mapped_valid_handles = reinterpret_cast<const uint32_t *>(&current_valid_handle)[1];
852 ++mapped_valid_handles;
853 }
854
Tony-LunarG99b880b2019-09-26 11:19:52 -0600855 vmaUnmapMemory(vmaAllocator, as_validation_buffer_info.validation_buffer_allocation);
Jason Macnak83cfd582019-07-31 10:14:24 -0700856
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700857 static constexpr const VkDeviceSize k_instance_size = 64;
858 const VkDeviceSize instance_buffer_size = k_instance_size * pInfo->instanceCount;
Jason Macnak83cfd582019-07-31 10:14:24 -0700859
Tony-LunarG1dce2392019-10-23 16:49:29 -0600860 result = desc_set_manager->GetDescriptorSet(&as_validation_buffer_info.descriptor_pool, debug_desc_layout,
861 &as_validation_buffer_info.descriptor_set);
Jason Macnak83cfd582019-07-31 10:14:24 -0700862 if (result != VK_SUCCESS) {
Mark Lobodzinskia8151b02020-02-27 13:38:08 -0700863 ReportSetupProblem(device, "Unable to get descriptor set for acceleration structure build.");
Tony-LunarG99b880b2019-09-26 11:19:52 -0600864 aborted = true;
Jason Macnak83cfd582019-07-31 10:14:24 -0700865 return;
866 }
867
868 VkDescriptorBufferInfo descriptor_buffer_infos[2] = {};
869 descriptor_buffer_infos[0].buffer = instanceData;
870 descriptor_buffer_infos[0].offset = instanceOffset;
871 descriptor_buffer_infos[0].range = instance_buffer_size;
872 descriptor_buffer_infos[1].buffer = as_validation_buffer_info.validation_buffer;
873 descriptor_buffer_infos[1].offset = 0;
874 descriptor_buffer_infos[1].range = validation_buffer_size;
875
876 VkWriteDescriptorSet descriptor_set_writes[2] = {};
877 descriptor_set_writes[0].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
878 descriptor_set_writes[0].dstSet = as_validation_buffer_info.descriptor_set;
879 descriptor_set_writes[0].dstBinding = 0;
880 descriptor_set_writes[0].descriptorCount = 1;
881 descriptor_set_writes[0].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
882 descriptor_set_writes[0].pBufferInfo = &descriptor_buffer_infos[0];
883 descriptor_set_writes[1].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
884 descriptor_set_writes[1].dstSet = as_validation_buffer_info.descriptor_set;
885 descriptor_set_writes[1].dstBinding = 1;
886 descriptor_set_writes[1].descriptorCount = 1;
887 descriptor_set_writes[1].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
888 descriptor_set_writes[1].pBufferInfo = &descriptor_buffer_infos[1];
889
890 DispatchUpdateDescriptorSets(device, 2, descriptor_set_writes, 0, nullptr);
891
892 // Issue a memory barrier to make sure anything writing to the instance buffer has finished.
893 VkMemoryBarrier memory_barrier = {};
894 memory_barrier.sType = VK_STRUCTURE_TYPE_MEMORY_BARRIER;
895 memory_barrier.srcAccessMask = VK_ACCESS_MEMORY_WRITE_BIT;
896 memory_barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
897 DispatchCmdPipelineBarrier(commandBuffer, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, 0, 1,
898 &memory_barrier, 0, nullptr, 0, nullptr);
899
900 // Save a copy of the compute pipeline state that needs to be restored.
Tony-LunarG2ba1cb32019-09-25 15:16:11 -0600901 GPUAV_RESTORABLE_PIPELINE_STATE restorable_state;
Jason Macnak83cfd582019-07-31 10:14:24 -0700902 restorable_state.Create(cb_state, VK_PIPELINE_BIND_POINT_COMPUTE);
903
904 // Switch to and launch the validation compute shader to find, replace, and report invalid acceleration structure handles.
905 DispatchCmdBindPipeline(commandBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, as_validation_state.pipeline);
906 DispatchCmdBindDescriptorSets(commandBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, as_validation_state.pipeline_layout, 0, 1,
907 &as_validation_buffer_info.descriptor_set, 0, nullptr);
908 DispatchCmdDispatch(commandBuffer, 1, 1, 1);
909
910 // Issue a buffer memory barrier to make sure that any invalid bottom level acceleration structure handles
911 // have been replaced by the validation compute shader before any builds take place.
912 VkBufferMemoryBarrier instance_buffer_barrier = {};
913 instance_buffer_barrier.sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER;
914 instance_buffer_barrier.srcAccessMask = VK_ACCESS_SHADER_WRITE_BIT;
915 instance_buffer_barrier.dstAccessMask = VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_NV;
916 instance_buffer_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
917 instance_buffer_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
918 instance_buffer_barrier.buffer = instanceData;
919 instance_buffer_barrier.offset = instanceOffset;
920 instance_buffer_barrier.size = instance_buffer_size;
921 DispatchCmdPipelineBarrier(commandBuffer, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
922 VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV, 0, 0, nullptr, 1, &instance_buffer_barrier, 0,
923 nullptr);
924
925 // Restore the previous compute pipeline state.
926 restorable_state.Restore(commandBuffer);
927
928 as_validation_state.validation_buffers[commandBuffer].push_back(std::move(as_validation_buffer_info));
929}
930
Tony-LunarG2ba1cb32019-09-25 15:16:11 -0600931void GpuAssisted::ProcessAccelerationStructureBuildValidationBuffer(VkQueue queue, CMD_BUFFER_STATE *cb_node) {
Jason Macnak83cfd582019-07-31 10:14:24 -0700932 if (cb_node == nullptr || !cb_node->hasBuildAccelerationStructureCmd) {
933 return;
934 }
935
Tony-LunarG99b880b2019-09-26 11:19:52 -0600936 auto &as_validation_info = acceleration_structure_validation_state;
Jason Macnak83cfd582019-07-31 10:14:24 -0700937 auto &as_validation_buffer_infos = as_validation_info.validation_buffers[cb_node->commandBuffer];
938 for (const auto &as_validation_buffer_info : as_validation_buffer_infos) {
939 GpuAccelerationStructureBuildValidationBuffer *mapped_validation_buffer = nullptr;
940
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700941 VkResult result = vmaMapMemory(vmaAllocator, as_validation_buffer_info.validation_buffer_allocation,
942 reinterpret_cast<void **>(&mapped_validation_buffer));
Jason Macnak83cfd582019-07-31 10:14:24 -0700943 if (result == VK_SUCCESS) {
944 if (mapped_validation_buffer->invalid_handle_found > 0) {
945 uint64_t invalid_handle = 0;
946 reinterpret_cast<uint32_t *>(&invalid_handle)[0] = mapped_validation_buffer->invalid_handle_bits_0;
947 reinterpret_cast<uint32_t *>(&invalid_handle)[1] = mapped_validation_buffer->invalid_handle_bits_1;
948
Mark Lobodzinskiafa7cb82020-01-29 16:49:36 -0700949 LogError(as_validation_buffer_info.acceleration_structure, "UNASSIGNED-AccelerationStructure",
950 "Attempted to build top level acceleration structure using invalid bottom level acceleration structure "
951 "handle (%" PRIu64 ")",
952 invalid_handle);
Jason Macnak83cfd582019-07-31 10:14:24 -0700953 }
Tony-LunarG99b880b2019-09-26 11:19:52 -0600954 vmaUnmapMemory(vmaAllocator, as_validation_buffer_info.validation_buffer_allocation);
Jason Macnak83cfd582019-07-31 10:14:24 -0700955 }
956 }
957}
958
Tony-LunarG2ba1cb32019-09-25 15:16:11 -0600959void GpuAssisted::PostCallRecordBindAccelerationStructureMemoryNV(VkDevice device, uint32_t bindInfoCount,
960 const VkBindAccelerationStructureMemoryInfoNV *pBindInfos,
961 VkResult result) {
962 if (VK_SUCCESS != result) return;
Tony-LunarG99b880b2019-09-26 11:19:52 -0600963 ValidationStateTracker::PostCallRecordBindAccelerationStructureMemoryNV(device, bindInfoCount, pBindInfos, result);
Tony-LunarG2ba1cb32019-09-25 15:16:11 -0600964 for (uint32_t i = 0; i < bindInfoCount; i++) {
965 const VkBindAccelerationStructureMemoryInfoNV &info = pBindInfos[i];
sourav parmarcd5fb182020-07-17 12:58:44 -0700966 ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureStateNV(info.accelerationStructure);
Tony-LunarG2ba1cb32019-09-25 15:16:11 -0600967 if (as_state) {
968 DispatchGetAccelerationStructureHandleNV(device, info.accelerationStructure, 8, &as_state->opaque_handle);
969 }
Karl Schultz7b024b42018-08-30 16:18:18 -0600970 }
Tony-LunarG2ba1cb32019-09-25 15:16:11 -0600971}
Mark Lobodzinskiff7d8002019-02-13 13:01:26 -0700972
Tony-LunarG2ba1cb32019-09-25 15:16:11 -0600973// Modify the pipeline layout to include our debug descriptor set and any needed padding with the dummy descriptor set.
974void GpuAssisted::PreCallRecordCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo *pCreateInfo,
975 const VkAllocationCallbacks *pAllocator, VkPipelineLayout *pPipelineLayout,
976 void *cpl_state_data) {
Tony-LunarG99b880b2019-09-26 11:19:52 -0600977 if (aborted) {
Tony-LunarG2ba1cb32019-09-25 15:16:11 -0600978 return;
979 }
Tony-LunarG99b880b2019-09-26 11:19:52 -0600980
Tony-LunarG2ba1cb32019-09-25 15:16:11 -0600981 create_pipeline_layout_api_state *cpl_state = reinterpret_cast<create_pipeline_layout_api_state *>(cpl_state_data);
982
Tony-LunarG99b880b2019-09-26 11:19:52 -0600983 if (cpl_state->modified_create_info.setLayoutCount >= adjusted_max_desc_sets) {
Karl Schultz7b024b42018-08-30 16:18:18 -0600984 std::ostringstream strm;
Tony-LunarG99b880b2019-09-26 11:19:52 -0600985 strm << "Pipeline Layout conflict with validation's descriptor set at slot " << desc_set_bind_index << ". "
Karl Schultz7b024b42018-08-30 16:18:18 -0600986 << "Application has too many descriptor sets in the pipeline layout to continue with gpu validation. "
987 << "Validation is not modifying the pipeline layout. "
988 << "Instrumented shaders are replaced with non-instrumented shaders.";
Mark Lobodzinskia8151b02020-02-27 13:38:08 -0700989 ReportSetupProblem(device, strm.str().c_str());
Karl Schultz7b024b42018-08-30 16:18:18 -0600990 } else {
Tony-LunarGb5fae462020-03-05 12:43:25 -0700991 UtilPreCallRecordCreatePipelineLayout(cpl_state, this, pCreateInfo);
Karl Schultz7b024b42018-08-30 16:18:18 -0600992 }
Mark Lobodzinskiff7d8002019-02-13 13:01:26 -0700993}
994
Tony-LunarG2ba1cb32019-09-25 15:16:11 -0600995void GpuAssisted::PostCallRecordCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo *pCreateInfo,
996 const VkAllocationCallbacks *pAllocator, VkPipelineLayout *pPipelineLayout,
997 VkResult result) {
Tony-LunarG99b880b2019-09-26 11:19:52 -0600998 ValidationStateTracker::PostCallRecordCreatePipelineLayout(device, pCreateInfo, pAllocator, pPipelineLayout, result);
999
Karl Schultz7b024b42018-08-30 16:18:18 -06001000 if (result != VK_SUCCESS) {
Mark Lobodzinskia8151b02020-02-27 13:38:08 -07001001 ReportSetupProblem(device, "Unable to create pipeline layout. Device could become unstable.");
Tony-LunarG99b880b2019-09-26 11:19:52 -06001002 aborted = true;
Karl Schultz7b024b42018-08-30 16:18:18 -06001003 }
Karl Schultz7b024b42018-08-30 16:18:18 -06001004}
1005
Karl Schultz7b024b42018-08-30 16:18:18 -06001006// Free the device memory and descriptor set associated with a command buffer.
Tony-LunarG2ba1cb32019-09-25 15:16:11 -06001007void GpuAssisted::ResetCommandBuffer(VkCommandBuffer commandBuffer) {
Tony-LunarG99b880b2019-09-26 11:19:52 -06001008 if (aborted) {
Karl Schultz7b024b42018-08-30 16:18:18 -06001009 return;
1010 }
Tony-LunarG1dce2392019-10-23 16:49:29 -06001011 auto gpuav_buffer_list = GetBufferInfo(commandBuffer);
Tony-LunarG2ba1cb32019-09-25 15:16:11 -06001012 for (auto buffer_info : gpuav_buffer_list) {
Tony-LunarG99b880b2019-09-26 11:19:52 -06001013 vmaDestroyBuffer(vmaAllocator, buffer_info.output_mem_block.buffer, buffer_info.output_mem_block.allocation);
Tony-LunarG8eb5a002019-07-25 16:49:00 -06001014 if (buffer_info.di_input_mem_block.buffer) {
Tony-LunarG99b880b2019-09-26 11:19:52 -06001015 vmaDestroyBuffer(vmaAllocator, buffer_info.di_input_mem_block.buffer, buffer_info.di_input_mem_block.allocation);
Tony-LunarG8eb5a002019-07-25 16:49:00 -06001016 }
1017 if (buffer_info.bda_input_mem_block.buffer) {
Tony-LunarG99b880b2019-09-26 11:19:52 -06001018 vmaDestroyBuffer(vmaAllocator, buffer_info.bda_input_mem_block.buffer, buffer_info.bda_input_mem_block.allocation);
Karl Schultz7b024b42018-08-30 16:18:18 -06001019 }
Tony-LunarGdcbc2c32019-05-06 10:17:44 -06001020 if (buffer_info.desc_set != VK_NULL_HANDLE) {
Tony-LunarG99b880b2019-09-26 11:19:52 -06001021 desc_set_manager->PutBackDescriptorSet(buffer_info.desc_pool, buffer_info.desc_set);
Tony-LunarGdcbc2c32019-05-06 10:17:44 -06001022 }
Karl Schultz7b024b42018-08-30 16:18:18 -06001023 }
Tony-LunarG99b880b2019-09-26 11:19:52 -06001024 command_buffer_map.erase(commandBuffer);
Jason Macnak83cfd582019-07-31 10:14:24 -07001025
Tony-LunarG99b880b2019-09-26 11:19:52 -06001026 auto &as_validation_info = acceleration_structure_validation_state;
Jason Macnak83cfd582019-07-31 10:14:24 -07001027 auto &as_validation_buffer_infos = as_validation_info.validation_buffers[commandBuffer];
1028 for (auto &as_validation_buffer_info : as_validation_buffer_infos) {
Tony-LunarG99b880b2019-09-26 11:19:52 -06001029 vmaDestroyBuffer(vmaAllocator, as_validation_buffer_info.validation_buffer,
Jason Macnak83cfd582019-07-31 10:14:24 -07001030 as_validation_buffer_info.validation_buffer_allocation);
1031
1032 if (as_validation_buffer_info.descriptor_set != VK_NULL_HANDLE) {
Tony-LunarG99b880b2019-09-26 11:19:52 -06001033 desc_set_manager->PutBackDescriptorSet(as_validation_buffer_info.descriptor_pool,
1034 as_validation_buffer_info.descriptor_set);
Jason Macnak83cfd582019-07-31 10:14:24 -07001035 }
1036 }
1037 as_validation_info.validation_buffers.erase(commandBuffer);
Karl Schultz7b024b42018-08-30 16:18:18 -06001038}
Karl Schultz7b024b42018-08-30 16:18:18 -06001039// Just gives a warning about a possible deadlock.
Tony-LunarG2ba1cb32019-09-25 15:16:11 -06001040bool GpuAssisted::PreCallValidateCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents,
1041 VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask,
1042 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
1043 uint32_t bufferMemoryBarrierCount,
1044 const VkBufferMemoryBarrier *pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount,
Jeff Bolz5c801d12019-10-09 10:38:45 -05001045 const VkImageMemoryBarrier *pImageMemoryBarriers) const {
Tony-LunarG2ba1cb32019-09-25 15:16:11 -06001046 if (srcStageMask & VK_PIPELINE_STAGE_HOST_BIT) {
Mark Lobodzinskia8151b02020-02-27 13:38:08 -07001047 ReportSetupProblem(commandBuffer,
Karl Schultz7b024b42018-08-30 16:18:18 -06001048 "CmdWaitEvents recorded with VK_PIPELINE_STAGE_HOST_BIT set. "
1049 "GPU_Assisted validation waits on queue completion. "
1050 "This wait could block the host's signaling of this event, resulting in deadlock.");
1051 }
Tony-LunarG2ba1cb32019-09-25 15:16:11 -06001052 return false;
Karl Schultz7b024b42018-08-30 16:18:18 -06001053}
1054
Tony-LunarG2ba1cb32019-09-25 15:16:11 -06001055void GpuAssisted::PostCallRecordGetPhysicalDeviceProperties(VkPhysicalDevice physicalDevice,
1056 VkPhysicalDeviceProperties *pPhysicalDeviceProperties) {
1057 // There is an implicit layer that can cause this call to return 0 for maxBoundDescriptorSets - Ignore such calls
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06001058 if (enabled[gpu_validation_reserve_binding_slot] && pPhysicalDeviceProperties->limits.maxBoundDescriptorSets > 0) {
Tony-LunarG2ba1cb32019-09-25 15:16:11 -06001059 if (pPhysicalDeviceProperties->limits.maxBoundDescriptorSets > 1) {
1060 pPhysicalDeviceProperties->limits.maxBoundDescriptorSets -= 1;
1061 } else {
Mark Lobodzinskiafa7cb82020-01-29 16:49:36 -07001062 LogWarning(physicalDevice, "UNASSIGNED-GPU-Assisted Validation Setup Error.",
1063 "Unable to reserve descriptor binding slot on a device with only one slot.");
Tony-LunarG2ba1cb32019-09-25 15:16:11 -06001064 }
1065 }
1066}
1067
1068void GpuAssisted::PostCallRecordGetPhysicalDeviceProperties2(VkPhysicalDevice physicalDevice,
1069 VkPhysicalDeviceProperties2 *pPhysicalDeviceProperties2) {
1070 // There is an implicit layer that can cause this call to return 0 for maxBoundDescriptorSets - Ignore such calls
Mark Lobodzinski90eea5b2020-05-15 12:54:00 -06001071 if (enabled[gpu_validation_reserve_binding_slot] && pPhysicalDeviceProperties2->properties.limits.maxBoundDescriptorSets > 0) {
Tony-LunarG2ba1cb32019-09-25 15:16:11 -06001072 if (pPhysicalDeviceProperties2->properties.limits.maxBoundDescriptorSets > 1) {
1073 pPhysicalDeviceProperties2->properties.limits.maxBoundDescriptorSets -= 1;
1074 } else {
Mark Lobodzinskiafa7cb82020-01-29 16:49:36 -07001075 LogWarning(physicalDevice, "UNASSIGNED-GPU-Assisted Validation Setup Error.",
1076 "Unable to reserve descriptor binding slot on a device with only one slot.");
Tony-LunarG2ba1cb32019-09-25 15:16:11 -06001077 }
1078 }
1079}
1080
1081void GpuAssisted::PreCallRecordCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
1082 const VkGraphicsPipelineCreateInfo *pCreateInfos,
1083 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
1084 void *cgpl_state_data) {
Karl Schultz7b024b42018-08-30 16:18:18 -06001085 std::vector<safe_VkGraphicsPipelineCreateInfo> new_pipeline_create_infos;
Tony-LunarG2ba1cb32019-09-25 15:16:11 -06001086 create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
Tony-LunarGb5fae462020-03-05 12:43:25 -07001087 UtilPreCallRecordPipelineCreations(count, pCreateInfos, pAllocator, pPipelines, cgpl_state->pipe_state,
1088 &new_pipeline_create_infos, VK_PIPELINE_BIND_POINT_GRAPHICS, this);
Tony-LunarG2ba1cb32019-09-25 15:16:11 -06001089 cgpl_state->gpu_create_infos = new_pipeline_create_infos;
1090 cgpl_state->pCreateInfos = reinterpret_cast<VkGraphicsPipelineCreateInfo *>(cgpl_state->gpu_create_infos.data());
Tony-LunarGeb25bf52019-04-26 10:46:41 -06001091}
Tony-LunarG2ba1cb32019-09-25 15:16:11 -06001092
1093void GpuAssisted::PreCallRecordCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
1094 const VkComputePipelineCreateInfo *pCreateInfos,
1095 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
1096 void *ccpl_state_data) {
Tony-LunarGeb25bf52019-04-26 10:46:41 -06001097 std::vector<safe_VkComputePipelineCreateInfo> new_pipeline_create_infos;
Tony-LunarG2ba1cb32019-09-25 15:16:11 -06001098 auto *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
Tony-LunarGb5fae462020-03-05 12:43:25 -07001099 UtilPreCallRecordPipelineCreations(count, pCreateInfos, pAllocator, pPipelines, ccpl_state->pipe_state,
1100 &new_pipeline_create_infos, VK_PIPELINE_BIND_POINT_COMPUTE, this);
Tony-LunarG2ba1cb32019-09-25 15:16:11 -06001101 ccpl_state->gpu_create_infos = new_pipeline_create_infos;
1102 ccpl_state->pCreateInfos = reinterpret_cast<VkComputePipelineCreateInfo *>(ccpl_state->gpu_create_infos.data());
Jason Macnak67407e72019-07-11 11:05:09 -07001103}
Tony-LunarG2ba1cb32019-09-25 15:16:11 -06001104
1105void GpuAssisted::PreCallRecordCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
1106 const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
1107 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
1108 void *crtpl_state_data) {
Jeff Bolz443c2ca2020-03-19 12:11:51 -05001109 std::vector<safe_VkRayTracingPipelineCreateInfoCommon> new_pipeline_create_infos;
Tony-LunarG2ba1cb32019-09-25 15:16:11 -06001110 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
Tony-LunarGb5fae462020-03-05 12:43:25 -07001111 UtilPreCallRecordPipelineCreations(count, pCreateInfos, pAllocator, pPipelines, crtpl_state->pipe_state,
1112 &new_pipeline_create_infos, VK_PIPELINE_BIND_POINT_RAY_TRACING_NV, this);
Tony-LunarG2ba1cb32019-09-25 15:16:11 -06001113 crtpl_state->gpu_create_infos = new_pipeline_create_infos;
1114 crtpl_state->pCreateInfos = reinterpret_cast<VkRayTracingPipelineCreateInfoNV *>(crtpl_state->gpu_create_infos.data());
Tony-LunarGeb25bf52019-04-26 10:46:41 -06001115}
Jeff Bolz443c2ca2020-03-19 12:11:51 -05001116
sourav parmarcd5fb182020-07-17 12:58:44 -07001117void GpuAssisted::PreCallRecordCreateRayTracingPipelinesKHR(VkDevice device, VkDeferredOperationKHR deferredOperation,
1118 VkPipelineCache pipelineCache, uint32_t count,
Jeff Bolz443c2ca2020-03-19 12:11:51 -05001119 const VkRayTracingPipelineCreateInfoKHR *pCreateInfos,
1120 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
1121 void *crtpl_state_data) {
1122 std::vector<safe_VkRayTracingPipelineCreateInfoCommon> new_pipeline_create_infos;
1123 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_khr_api_state *>(crtpl_state_data);
Tony-LunarGb5fae462020-03-05 12:43:25 -07001124 UtilPreCallRecordPipelineCreations(count, pCreateInfos, pAllocator, pPipelines, crtpl_state->pipe_state,
1125 &new_pipeline_create_infos, VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR, this);
Jeff Bolz443c2ca2020-03-19 12:11:51 -05001126 crtpl_state->gpu_create_infos = new_pipeline_create_infos;
1127 crtpl_state->pCreateInfos = reinterpret_cast<VkRayTracingPipelineCreateInfoKHR *>(crtpl_state->gpu_create_infos.data());
1128}
Karl Schultz7b024b42018-08-30 16:18:18 -06001129
Tony-LunarG2ba1cb32019-09-25 15:16:11 -06001130void GpuAssisted::PostCallRecordCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
1131 const VkGraphicsPipelineCreateInfo *pCreateInfos,
1132 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
1133 VkResult result, void *cgpl_state_data) {
Tony-LunarG99b880b2019-09-26 11:19:52 -06001134 ValidationStateTracker::PostCallRecordCreateGraphicsPipelines(device, pipelineCache, count, pCreateInfos, pAllocator,
1135 pPipelines, result, cgpl_state_data);
Tony-LunarGc876c6e2020-09-09 15:19:43 -06001136 create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
1137 UtilCopyCreatePipelineFeedbackData(count, pCreateInfos, cgpl_state->gpu_create_infos.data());
Tony-LunarGb5fae462020-03-05 12:43:25 -07001138 UtilPostCallRecordPipelineCreations(count, pCreateInfos, pAllocator, pPipelines, VK_PIPELINE_BIND_POINT_GRAPHICS, this);
Tony-LunarGeb25bf52019-04-26 10:46:41 -06001139}
Tony-LunarG99b880b2019-09-26 11:19:52 -06001140
Tony-LunarG2ba1cb32019-09-25 15:16:11 -06001141void GpuAssisted::PostCallRecordCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
1142 const VkComputePipelineCreateInfo *pCreateInfos,
1143 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
1144 VkResult result, void *ccpl_state_data) {
Tony-LunarG99b880b2019-09-26 11:19:52 -06001145 ValidationStateTracker::PostCallRecordCreateComputePipelines(device, pipelineCache, count, pCreateInfos, pAllocator, pPipelines,
1146 result, ccpl_state_data);
Tony-LunarGc876c6e2020-09-09 15:19:43 -06001147 create_compute_pipeline_api_state *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
1148 UtilCopyCreatePipelineFeedbackData(count, pCreateInfos, ccpl_state->gpu_create_infos.data());
Tony-LunarGb5fae462020-03-05 12:43:25 -07001149 UtilPostCallRecordPipelineCreations(count, pCreateInfos, pAllocator, pPipelines, VK_PIPELINE_BIND_POINT_COMPUTE, this);
Tony-LunarGeb25bf52019-04-26 10:46:41 -06001150}
Tony-LunarG99b880b2019-09-26 11:19:52 -06001151
Tony-LunarG2ba1cb32019-09-25 15:16:11 -06001152void GpuAssisted::PostCallRecordCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
1153 const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
1154 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
1155 VkResult result, void *crtpl_state_data) {
Tony-LunarGc876c6e2020-09-09 15:19:43 -06001156 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_khr_api_state *>(crtpl_state_data);
Tony-LunarG99b880b2019-09-26 11:19:52 -06001157 ValidationStateTracker::PostCallRecordCreateRayTracingPipelinesNV(device, pipelineCache, count, pCreateInfos, pAllocator,
1158 pPipelines, result, crtpl_state_data);
Tony-LunarGc876c6e2020-09-09 15:19:43 -06001159 UtilCopyCreatePipelineFeedbackData(count, pCreateInfos, crtpl_state->gpu_create_infos.data());
Tony-LunarGb5fae462020-03-05 12:43:25 -07001160 UtilPostCallRecordPipelineCreations(count, pCreateInfos, pAllocator, pPipelines, VK_PIPELINE_BIND_POINT_RAY_TRACING_NV, this);
Jason Macnak67407e72019-07-11 11:05:09 -07001161}
1162
sourav parmarcd5fb182020-07-17 12:58:44 -07001163void GpuAssisted::PostCallRecordCreateRayTracingPipelinesKHR(VkDevice device, VkDeferredOperationKHR deferredOperation,
1164 VkPipelineCache pipelineCache, uint32_t count,
Jeff Bolz443c2ca2020-03-19 12:11:51 -05001165 const VkRayTracingPipelineCreateInfoKHR *pCreateInfos,
1166 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
1167 VkResult result, void *crtpl_state_data) {
Tony-LunarGc876c6e2020-09-09 15:19:43 -06001168 auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_khr_api_state *>(crtpl_state_data);
sourav parmarcd5fb182020-07-17 12:58:44 -07001169 ValidationStateTracker::PostCallRecordCreateRayTracingPipelinesKHR(
1170 device, deferredOperation, pipelineCache, count, pCreateInfos, pAllocator, pPipelines, result, crtpl_state_data);
Tony-LunarGc876c6e2020-09-09 15:19:43 -06001171 UtilCopyCreatePipelineFeedbackData(count, pCreateInfos, crtpl_state->gpu_create_infos.data());
Tony-LunarGb5fae462020-03-05 12:43:25 -07001172 UtilPostCallRecordPipelineCreations(count, pCreateInfos, pAllocator, pPipelines, VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR, this);
Karl Schultz7b024b42018-08-30 16:18:18 -06001173}
1174
1175// Remove all the shader trackers associated with this destroyed pipeline.
Tony-LunarG2ba1cb32019-09-25 15:16:11 -06001176void GpuAssisted::PreCallRecordDestroyPipeline(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks *pAllocator) {
Tony-LunarG99b880b2019-09-26 11:19:52 -06001177 for (auto it = shader_map.begin(); it != shader_map.end();) {
Karl Schultz7b024b42018-08-30 16:18:18 -06001178 if (it->second.pipeline == pipeline) {
Tony-LunarG99b880b2019-09-26 11:19:52 -06001179 it = shader_map.erase(it);
Karl Schultz7b024b42018-08-30 16:18:18 -06001180 } else {
1181 ++it;
1182 }
1183 }
Tony-LunarG99b880b2019-09-26 11:19:52 -06001184 ValidationStateTracker::PreCallRecordDestroyPipeline(device, pipeline, pAllocator);
Karl Schultz7b024b42018-08-30 16:18:18 -06001185}
1186
1187// Call the SPIR-V Optimizer to run the instrumentation pass on the shader.
Tony-LunarG2ba1cb32019-09-25 15:16:11 -06001188bool GpuAssisted::InstrumentShader(const VkShaderModuleCreateInfo *pCreateInfo, std::vector<unsigned int> &new_pgm,
1189 uint32_t *unique_shader_id) {
Tony-LunarG99b880b2019-09-26 11:19:52 -06001190 if (aborted) return false;
Karl Schultz7b024b42018-08-30 16:18:18 -06001191 if (pCreateInfo->pCode[0] != spv::MagicNumber) return false;
1192
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001193 const spvtools::MessageConsumer gpu_console_message_consumer =
Tony-LunarG79641702020-07-13 15:43:05 -06001194 [this](spv_message_level_t level, const char *, const spv_position_t &position, const char *message) -> void {
1195 switch (level) {
1196 case SPV_MSG_FATAL:
1197 case SPV_MSG_INTERNAL_ERROR:
1198 case SPV_MSG_ERROR:
1199 this->LogError(this->device, "UNASSIGNED-GPU-Assisted", "Error during shader instrumentation: line %zu: %s",
1200 position.index, message);
1201 break;
1202 default:
1203 break;
1204 }
1205 };
1206
Karl Schultz7b024b42018-08-30 16:18:18 -06001207 // Load original shader SPIR-V
1208 uint32_t num_words = static_cast<uint32_t>(pCreateInfo->codeSize / 4);
1209 new_pgm.clear();
1210 new_pgm.reserve(num_words);
1211 new_pgm.insert(new_pgm.end(), &pCreateInfo->pCode[0], &pCreateInfo->pCode[num_words]);
1212
1213 // Call the optimizer to instrument the shader.
1214 // Use the unique_shader_module_id as a shader ID so we can look up its handle later in the shader_map.
Tony-LunarGa77cade2019-03-06 10:49:22 -07001215 // If descriptor indexing is enabled, enable length checks and updated descriptor checks
Karl Schultz7b024b42018-08-30 16:18:18 -06001216 using namespace spvtools;
Tony-LunarG8a51b7d2020-07-01 15:57:23 -06001217 spv_target_env target_env = PickSpirvEnv(api_version, (device_extensions.vk_khr_spirv_1_4 != kNotEnabled));
Tony-LunarGf29f77f2020-08-26 15:48:00 -06001218 spvtools::ValidatorOptions val_options;
1219 AdjustValidatorOptions(device_extensions, enabled_features, val_options);
1220 spvtools::OptimizerOptions opt_options;
1221 opt_options.set_run_validator(true);
1222 opt_options.set_validator_options(val_options);
Karl Schultz7b024b42018-08-30 16:18:18 -06001223 Optimizer optimizer(target_env);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001224 optimizer.SetMessageConsumer(gpu_console_message_consumer);
Tony-LunarGc28e28a2020-08-14 10:37:48 -06001225 optimizer.RegisterPass(CreateInstBindlessCheckPass(desc_set_bind_index, unique_shader_module_id, descriptor_indexing,
Tony-LunarGe8632e42020-11-18 17:03:12 -07001226 descriptor_indexing, buffer_oob_enabled, buffer_oob_enabled));
Karl Schultz7b024b42018-08-30 16:18:18 -06001227 optimizer.RegisterPass(CreateAggressiveDCEPass());
Tony-LunarG5c38b182020-06-10 16:15:32 -06001228 if ((device_extensions.vk_ext_buffer_device_address || device_extensions.vk_khr_buffer_device_address) && shaderInt64 &&
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001229 enabled_features.core12.bufferDeviceAddress) {
Tony-LunarG99b880b2019-09-26 11:19:52 -06001230 optimizer.RegisterPass(CreateInstBuffAddrCheckPass(desc_set_bind_index, unique_shader_module_id));
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001231 }
Tony-LunarGf29f77f2020-08-26 15:48:00 -06001232 bool pass = optimizer.Run(new_pgm.data(), new_pgm.size(), &new_pgm, opt_options);
Karl Schultz7b024b42018-08-30 16:18:18 -06001233 if (!pass) {
Mark Lobodzinskia8151b02020-02-27 13:38:08 -07001234 ReportSetupProblem(device, "Failure to instrument shader. Proceeding with non-instrumented shader.");
Karl Schultz7b024b42018-08-30 16:18:18 -06001235 }
Tony-LunarG99b880b2019-09-26 11:19:52 -06001236 *unique_shader_id = unique_shader_module_id++;
Karl Schultz7b024b42018-08-30 16:18:18 -06001237 return pass;
1238}
Mark Lobodzinski01734072019-02-13 17:39:15 -07001239// Create the instrumented shader data to provide to the driver.
Tony-LunarG2ba1cb32019-09-25 15:16:11 -06001240void GpuAssisted::PreCallRecordCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo *pCreateInfo,
1241 const VkAllocationCallbacks *pAllocator, VkShaderModule *pShaderModule,
1242 void *csm_state_data) {
1243 create_shader_module_api_state *csm_state = reinterpret_cast<create_shader_module_api_state *>(csm_state_data);
1244 bool pass = InstrumentShader(pCreateInfo, csm_state->instrumented_pgm, &csm_state->unique_shader_id);
Karl Schultz7b024b42018-08-30 16:18:18 -06001245 if (pass) {
Tony-LunarG2ba1cb32019-09-25 15:16:11 -06001246 csm_state->instrumented_create_info.pCode = csm_state->instrumented_pgm.data();
1247 csm_state->instrumented_create_info.codeSize = csm_state->instrumented_pgm.size() * sizeof(unsigned int);
Karl Schultz7b024b42018-08-30 16:18:18 -06001248 }
Karl Schultz7b024b42018-08-30 16:18:18 -06001249}
1250
Karl Schultz7b024b42018-08-30 16:18:18 -06001251// Generate the part of the message describing the violation.
Tony-LunarG7de10e82020-11-24 11:31:55 -07001252static void GenerateValidationMessage(const uint32_t *debug_record, std::string &msg, std::string &vuid_msg, CMD_TYPE cmd_type) {
Karl Schultz7b024b42018-08-30 16:18:18 -06001253 using namespace spvtools;
1254 std::ostringstream strm;
Tony-LunarGab47cac2019-12-20 15:28:01 -07001255 switch (debug_record[kInstValidationOutError]) {
Tony-LunarG8eb5a002019-07-25 16:49:00 -06001256 case kInstErrorBindlessBounds: {
Tony-LunarGab47cac2019-12-20 15:28:01 -07001257 strm << "Index of " << debug_record[kInstBindlessBoundsOutDescIndex] << " used to index descriptor array of length "
1258 << debug_record[kInstBindlessBoundsOutDescBound] << ". ";
Tony-LunarGc1d657d2019-02-22 14:55:19 -07001259 vuid_msg = "UNASSIGNED-Descriptor index out of bounds";
Karl Schultz7b024b42018-08-30 16:18:18 -06001260 } break;
Tony-LunarG8eb5a002019-07-25 16:49:00 -06001261 case kInstErrorBindlessUninit: {
Tony-LunarGc28e28a2020-08-14 10:37:48 -06001262 strm << "Descriptor index " << debug_record[kInstBindlessUninitOutDescIndex] << " is uninitialized.";
Tony-LunarGc1d657d2019-02-22 14:55:19 -07001263 vuid_msg = "UNASSIGNED-Descriptor uninitialized";
Karl Schultz7b024b42018-08-30 16:18:18 -06001264 } break;
Tony-LunarG8eb5a002019-07-25 16:49:00 -06001265 case kInstErrorBuffAddrUnallocRef: {
Tony-LunarGab47cac2019-12-20 15:28:01 -07001266 uint64_t *ptr = (uint64_t *)&debug_record[kInstBuffAddrUnallocOutDescPtrLo];
Tony-LunarG8eb5a002019-07-25 16:49:00 -06001267 strm << "Device address 0x" << std::hex << *ptr << " access out of bounds. ";
1268 vuid_msg = "UNASSIGNED-Device address out of bounds";
1269 } break;
Tony-LunarG7de10e82020-11-24 11:31:55 -07001270 case kInstErrorBuffOOBUniform:
1271 case kInstErrorBuffOOBStorage: {
Tony-LunarGc28e28a2020-08-14 10:37:48 -06001272 auto size = debug_record[kInstBindlessBuffOOBOutBuffSize];
1273 if (size == 0) {
1274 strm << "Descriptor index " << debug_record[kInstBindlessBuffOOBOutDescIndex] << " is uninitialized.";
1275 vuid_msg = "UNASSIGNED-Descriptor uninitialized";
1276 } else {
1277 strm << "Descriptor index " << debug_record[kInstBindlessBuffOOBOutDescIndex]
1278 << " access out of bounds. Descriptor size is " << debug_record[kInstBindlessBuffOOBOutBuffSize]
Tony-LunarG7de10e82020-11-24 11:31:55 -07001279 << " and highest byte accessed was " << debug_record[kInstBindlessBuffOOBOutBuffOff];
1280 const GpuVuid vuid = GetGpuVuid(cmd_type);
1281 if (debug_record[kInstValidationOutError] == kInstErrorBuffOOBUniform)
1282 vuid_msg = vuid.uniform_access_oob;
1283 else
1284 vuid_msg = vuid.storage_access_oob;
Tony-LunarGc28e28a2020-08-14 10:37:48 -06001285 }
1286 } break;
Tony-LunarG7de10e82020-11-24 11:31:55 -07001287 case kInstErrorBuffOOBUniformTexel:
1288 case kInstErrorBuffOOBStorageTexel: {
1289 auto size = debug_record[kInstBindlessBuffOOBOutBuffSize];
1290 if (size == 0) {
1291 strm << "Descriptor index " << debug_record[kInstBindlessBuffOOBOutDescIndex] << " is uninitialized.";
1292 vuid_msg = "UNASSIGNED-Descriptor uninitialized";
1293 }
1294 else {
1295 strm << "Descriptor index " << debug_record[kInstBindlessBuffOOBOutDescIndex]
1296 << " access out of bounds. Descriptor size is " << debug_record[kInstBindlessBuffOOBOutBuffSize]
1297 << " texels and highest texel accessed was " << debug_record[kInstBindlessBuffOOBOutBuffOff];
1298 const GpuVuid vuid = GetGpuVuid(cmd_type);
1299 if (debug_record[kInstValidationOutError] == kInstErrorBuffOOBUniformTexel)
1300 vuid_msg = vuid.uniform_access_oob;
1301 else
1302 vuid_msg = vuid.storage_access_oob;
1303 } break;
1304 }
Karl Schultz7b024b42018-08-30 16:18:18 -06001305 default: {
Tony-LunarGab47cac2019-12-20 15:28:01 -07001306 strm << "Internal Error (unexpected error type = " << debug_record[kInstValidationOutError] << "). ";
Karl Schultz7b024b42018-08-30 16:18:18 -06001307 vuid_msg = "UNASSIGNED-Internal Error";
1308 assert(false);
1309 } break;
1310 }
1311 msg = strm.str();
1312}
1313
Karl Schultz7b024b42018-08-30 16:18:18 -06001314// Pull together all the information from the debug record to build the error message strings,
1315// and then assemble them into a single message string.
1316// Retrieve the shader program referenced by the unique shader ID provided in the debug record.
1317// We had to keep a copy of the shader program with the same lifecycle as the pipeline to make
1318// sure it is available when the pipeline is submitted. (The ShaderModule tracking object also
1319// keeps a copy, but it can be destroyed after the pipeline is created and before it is submitted.)
1320//
Tony-LunarG7de10e82020-11-24 11:31:55 -07001321void GpuAssisted::AnalyzeAndGenerateMessages(VkCommandBuffer command_buffer, VkQueue queue, GpuAssistedBufferInfo &buffer_info,
Tony-LunarG1dce2392019-10-23 16:49:29 -06001322 uint32_t operation_index, uint32_t *const debug_output_buffer) {
Karl Schultz7b024b42018-08-30 16:18:18 -06001323 using namespace spvtools;
1324 const uint32_t total_words = debug_output_buffer[0];
1325 // A zero here means that the shader instrumentation didn't write anything.
1326 // If you have nothing to say, don't say it here.
1327 if (0 == total_words) {
1328 return;
1329 }
1330 // The first word in the debug output buffer is the number of words that would have
1331 // been written by the shader instrumentation, if there was enough room in the buffer we provided.
1332 // The number of words actually written by the shaders is determined by the size of the buffer
1333 // we provide via the descriptor. So, we process only the number of words that can fit in the
1334 // buffer.
1335 // Each "report" written by the shader instrumentation is considered a "record". This function
1336 // is hard-coded to process only one record because it expects the buffer to be large enough to
1337 // hold only one record. If there is a desire to process more than one record, this function needs
1338 // to be modified to loop over records and the buffer size increased.
Karl Schultz7b024b42018-08-30 16:18:18 -06001339 std::string validation_message;
1340 std::string stage_message;
1341 std::string common_message;
1342 std::string filename_message;
1343 std::string source_message;
1344 std::string vuid_msg;
1345 VkShaderModule shader_module_handle = VK_NULL_HANDLE;
1346 VkPipeline pipeline_handle = VK_NULL_HANDLE;
1347 std::vector<unsigned int> pgm;
1348 // The first record starts at this offset after the total_words.
1349 const uint32_t *debug_record = &debug_output_buffer[kDebugOutputDataOffset];
1350 // Lookup the VkShaderModule handle and SPIR-V code used to create the shader, using the unique shader ID value returned
1351 // by the instrumented shader.
Tony-LunarG99b880b2019-09-26 11:19:52 -06001352 auto it = shader_map.find(debug_record[kInstCommonOutShaderId]);
1353 if (it != shader_map.end()) {
Karl Schultz7b024b42018-08-30 16:18:18 -06001354 shader_module_handle = it->second.shader_module;
1355 pipeline_handle = it->second.pipeline;
1356 pgm = it->second.pgm;
1357 }
Tony-LunarG7de10e82020-11-24 11:31:55 -07001358 GenerateValidationMessage(debug_record, validation_message, vuid_msg, buffer_info.cmd_type);
Tony-LunarGb5fae462020-03-05 12:43:25 -07001359 UtilGenerateStageMessage(debug_record, stage_message);
Tony-LunarG7de10e82020-11-24 11:31:55 -07001360 UtilGenerateCommonMessage(report_data, command_buffer, debug_record, shader_module_handle, pipeline_handle, buffer_info.pipeline_bind_point,
Tony-LunarGb5fae462020-03-05 12:43:25 -07001361 operation_index, common_message);
1362 UtilGenerateSourceMessages(pgm, debug_record, false, filename_message, source_message);
Mark Lobodzinskiafa7cb82020-01-29 16:49:36 -07001363 LogError(queue, vuid_msg.c_str(), "%s %s %s %s%s", validation_message.c_str(), common_message.c_str(), stage_message.c_str(),
1364 filename_message.c_str(), source_message.c_str());
Karl Schultz7b024b42018-08-30 16:18:18 -06001365 // The debug record at word kInstCommonOutSize is the number of words in the record
1366 // written by the shader. Clear the entire record plus the total_words word at the start.
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001367 const uint32_t words_to_clear = 1 + std::min(debug_record[kInstCommonOutSize], static_cast<uint32_t>(kInstMaxOutCnt));
Karl Schultz7b024b42018-08-30 16:18:18 -06001368 memset(debug_output_buffer, 0, sizeof(uint32_t) * words_to_clear);
1369}
1370
Tony-LunarGc28e28a2020-08-14 10:37:48 -06001371void GpuAssisted::SetDescriptorInitialized(uint32_t *pData, uint32_t index, const cvdescriptorset::Descriptor *descriptor) {
1372 if (descriptor->GetClass() == cvdescriptorset::DescriptorClass::GeneralBuffer) {
1373 auto buffer = static_cast<const cvdescriptorset::BufferDescriptor *>(descriptor)->GetBuffer();
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001374 if (buffer == VK_NULL_HANDLE) {
Tony-LunarGc28e28a2020-08-14 10:37:48 -06001375 pData[index] = UINT_MAX;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001376 } else {
Tony-LunarGc28e28a2020-08-14 10:37:48 -06001377 auto buffer_state = static_cast<const cvdescriptorset::BufferDescriptor *>(descriptor)->GetBufferState();
1378 pData[index] = static_cast<uint32_t>(buffer_state->createInfo.size);
1379 }
1380 } else if (descriptor->GetClass() == cvdescriptorset::DescriptorClass::TexelBuffer) {
1381 auto buffer_view = static_cast<const cvdescriptorset::TexelDescriptor *>(descriptor)->GetBufferView();
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001382 if (buffer_view == VK_NULL_HANDLE) {
Tony-LunarGc28e28a2020-08-14 10:37:48 -06001383 pData[index] = UINT_MAX;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001384 } else {
Tony-LunarGc28e28a2020-08-14 10:37:48 -06001385 auto buffer_view_state = static_cast<const cvdescriptorset::TexelDescriptor *>(descriptor)->GetBufferViewState();
1386 pData[index] = static_cast<uint32_t>(buffer_view_state->buffer_state->createInfo.size);
1387 }
1388 } else {
1389 pData[index] = 1;
1390 }
1391}
1392
Tony-LunarG81efe392019-03-07 15:43:27 -07001393// For the given command buffer, map its debug data buffers and update the status of any update after bind descriptors
Tony-LunarG2ba1cb32019-09-25 15:16:11 -06001394void GpuAssisted::UpdateInstrumentationBuffer(CMD_BUFFER_STATE *cb_node) {
Tony-LunarG1dce2392019-10-23 16:49:29 -06001395 auto gpu_buffer_list = GetBufferInfo(cb_node->commandBuffer);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001396 uint32_t *data;
Tony-LunarG81efe392019-03-07 15:43:27 -07001397 for (auto &buffer_info : gpu_buffer_list) {
Tony-LunarG8eb5a002019-07-25 16:49:00 -06001398 if (buffer_info.di_input_mem_block.update_at_submit.size() > 0) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001399 VkResult result =
1400 vmaMapMemory(vmaAllocator, buffer_info.di_input_mem_block.allocation, reinterpret_cast<void **>(&data));
Tony-LunarG81efe392019-03-07 15:43:27 -07001401 if (result == VK_SUCCESS) {
Tony-LunarG8eb5a002019-07-25 16:49:00 -06001402 for (auto update : buffer_info.di_input_mem_block.update_at_submit) {
Tony-LunarGc28e28a2020-08-14 10:37:48 -06001403 if (update.second->updated) {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001404 SetDescriptorInitialized(data, update.first, update.second);
Tony-LunarGc28e28a2020-08-14 10:37:48 -06001405 }
Tony-LunarG81efe392019-03-07 15:43:27 -07001406 }
Tony-LunarG99b880b2019-09-26 11:19:52 -06001407 vmaUnmapMemory(vmaAllocator, buffer_info.di_input_mem_block.allocation);
Tony-LunarG81efe392019-03-07 15:43:27 -07001408 }
1409 }
1410 }
1411}
1412
Tony-LunarG2ba1cb32019-09-25 15:16:11 -06001413void GpuAssisted::PreCallRecordQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits, VkFence fence) {
Tony-LunarG81efe392019-03-07 15:43:27 -07001414 for (uint32_t submit_idx = 0; submit_idx < submitCount; submit_idx++) {
1415 const VkSubmitInfo *submit = &pSubmits[submit_idx];
1416 for (uint32_t i = 0; i < submit->commandBufferCount; i++) {
Mark Lobodzinskicefe42f2019-04-25 12:16:27 -06001417 auto cb_node = GetCBState(submit->pCommandBuffers[i]);
Tony-LunarG81efe392019-03-07 15:43:27 -07001418 UpdateInstrumentationBuffer(cb_node);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001419 for (auto secondary_cmd_buffer : cb_node->linkedCommandBuffers) {
1420 UpdateInstrumentationBuffer(secondary_cmd_buffer);
Tony-LunarG81efe392019-03-07 15:43:27 -07001421 }
1422 }
1423 }
1424}
1425
Karl Schultz58674242019-01-22 15:35:02 -07001426// Issue a memory barrier to make GPU-written data available to host.
1427// Wait for the queue to complete execution.
1428// Check the debug buffers for all the command buffers that were submitted.
Tony-LunarG2ba1cb32019-09-25 15:16:11 -06001429void GpuAssisted::PostCallRecordQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits, VkFence fence,
1430 VkResult result) {
Tony-LunarG99b880b2019-09-26 11:19:52 -06001431 ValidationStateTracker::PostCallRecordQueueSubmit(queue, submitCount, pSubmits, fence, result);
Tony-LunarG2ba1cb32019-09-25 15:16:11 -06001432
Mark Lobodzinski09379db2020-05-07 08:22:01 -06001433 if (aborted || (result != VK_SUCCESS)) return;
Tony-LunarG3cc795e2019-08-26 12:13:50 -06001434 bool buffers_present = false;
1435 // Don't QueueWaitIdle if there's nothing to process
1436 for (uint32_t submit_idx = 0; submit_idx < submitCount; submit_idx++) {
1437 const VkSubmitInfo *submit = &pSubmits[submit_idx];
1438 for (uint32_t i = 0; i < submit->commandBufferCount; i++) {
1439 auto cb_node = GetCBState(submit->pCommandBuffers[i]);
Tony-LunarG1dce2392019-10-23 16:49:29 -06001440 if (GetBufferInfo(cb_node->commandBuffer).size() || cb_node->hasBuildAccelerationStructureCmd) buffers_present = true;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001441 for (auto secondary_cmd_buffer : cb_node->linkedCommandBuffers) {
1442 if (GetBufferInfo(secondary_cmd_buffer->commandBuffer).size() || cb_node->hasBuildAccelerationStructureCmd) {
Jason Macnak83cfd582019-07-31 10:14:24 -07001443 buffers_present = true;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001444 }
Tony-LunarG3cc795e2019-08-26 12:13:50 -06001445 }
1446 }
1447 }
1448 if (!buffers_present) return;
Karl Schultz58674242019-01-22 15:35:02 -07001449
Tony-LunarGb5fae462020-03-05 12:43:25 -07001450 UtilSubmitBarrier(queue, this);
Karl Schultz58674242019-01-22 15:35:02 -07001451
Tony-LunarG152a88b2019-03-20 15:42:24 -06001452 DispatchQueueWaitIdle(queue);
Karl Schultz58674242019-01-22 15:35:02 -07001453
Karl Schultz7b024b42018-08-30 16:18:18 -06001454 for (uint32_t submit_idx = 0; submit_idx < submitCount; submit_idx++) {
1455 const VkSubmitInfo *submit = &pSubmits[submit_idx];
1456 for (uint32_t i = 0; i < submit->commandBufferCount; i++) {
Mark Lobodzinskicefe42f2019-04-25 12:16:27 -06001457 auto cb_node = GetCBState(submit->pCommandBuffers[i]);
Tony-LunarGb5fae462020-03-05 12:43:25 -07001458 UtilProcessInstrumentationBuffer(queue, cb_node, this);
Tony-LunarG2ba1cb32019-09-25 15:16:11 -06001459 ProcessAccelerationStructureBuildValidationBuffer(queue, cb_node);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001460 for (auto secondary_cmd_buffer : cb_node->linkedCommandBuffers) {
1461 UtilProcessInstrumentationBuffer(queue, secondary_cmd_buffer, this);
Tony-LunarG2ba1cb32019-09-25 15:16:11 -06001462 ProcessAccelerationStructureBuildValidationBuffer(queue, cb_node);
Karl Schultz7b024b42018-08-30 16:18:18 -06001463 }
1464 }
1465 }
1466}
Tony-LunarGb2501d22019-01-28 09:59:13 -07001467
Tony-LunarG2ba1cb32019-09-25 15:16:11 -06001468void GpuAssisted::PreCallRecordCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount,
1469 uint32_t firstVertex, uint32_t firstInstance) {
Tony-LunarG7de10e82020-11-24 11:31:55 -07001470 AllocateValidationResources(commandBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, CMD_DRAW);
Tony-LunarG2ba1cb32019-09-25 15:16:11 -06001471}
1472
1473void GpuAssisted::PreCallRecordCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount,
1474 uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance) {
Tony-LunarG7de10e82020-11-24 11:31:55 -07001475 AllocateValidationResources(commandBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, CMD_DRAWINDEXED);
Tony-LunarG2ba1cb32019-09-25 15:16:11 -06001476}
1477
1478void GpuAssisted::PreCallRecordCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t count,
1479 uint32_t stride) {
Tony-LunarG7de10e82020-11-24 11:31:55 -07001480 AllocateValidationResources(commandBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, CMD_DRAWINDIRECT);
Tony-LunarG2ba1cb32019-09-25 15:16:11 -06001481}
1482
1483void GpuAssisted::PreCallRecordCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
1484 uint32_t count, uint32_t stride) {
Tony-LunarG7de10e82020-11-24 11:31:55 -07001485 AllocateValidationResources(commandBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, CMD_DRAWINDEXEDINDIRECT);
Tony-LunarG2ba1cb32019-09-25 15:16:11 -06001486}
1487
Tony-LunarG2fb8ff02020-06-11 12:45:07 -06001488void GpuAssisted::PreCallRecordCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
1489 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
1490 uint32_t stride) {
1491 ValidationStateTracker::PreCallRecordCmdDrawIndirectCountKHR(commandBuffer, buffer, offset, countBuffer, countBufferOffset,
1492 maxDrawCount, stride);
Tony-LunarG7de10e82020-11-24 11:31:55 -07001493 AllocateValidationResources(commandBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, CMD_DRAWINDIRECTCOUNT);
Tony-LunarG2fb8ff02020-06-11 12:45:07 -06001494}
1495
1496void GpuAssisted::PreCallRecordCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
1497 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
1498 uint32_t stride) {
1499 ValidationStateTracker::PreCallRecordCmdDrawIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset,
1500 maxDrawCount, stride);
Tony-LunarG7de10e82020-11-24 11:31:55 -07001501 AllocateValidationResources(commandBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, CMD_DRAWINDIRECTCOUNT);
Tony-LunarG2fb8ff02020-06-11 12:45:07 -06001502}
1503
Tony-LunarG54176fb2020-12-02 10:47:22 -07001504void GpuAssisted::PreCallRecordCmdDrawIndirectByteCountEXT(VkCommandBuffer commandBuffer, uint32_t instanceCount,
1505 uint32_t firstInstance, VkBuffer counterBuffer,
1506 VkDeviceSize counterBufferOffset, uint32_t counterOffset,
1507 uint32_t vertexStride) {
1508 ValidationStateTracker::PreCallRecordCmdDrawIndirectByteCountEXT(commandBuffer, instanceCount, firstInstance, counterBuffer,
1509 counterBufferOffset, counterOffset, vertexStride);
Tony-LunarG7de10e82020-11-24 11:31:55 -07001510 AllocateValidationResources(commandBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, CMD_DRAWINDIRECTBYTECOUNTEXT);
Tony-LunarG54176fb2020-12-02 10:47:22 -07001511}
1512
Tony-LunarG2fb8ff02020-06-11 12:45:07 -06001513void GpuAssisted::PreCallRecordCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
1514 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
1515 uint32_t maxDrawCount, uint32_t stride) {
1516 ValidationStateTracker::PreCallRecordCmdDrawIndexedIndirectCountKHR(commandBuffer, buffer, offset, countBuffer,
1517 countBufferOffset, maxDrawCount, stride);
Tony-LunarG7de10e82020-11-24 11:31:55 -07001518 AllocateValidationResources(commandBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, CMD_DRAWINDEXEDINDIRECTCOUNT);
Tony-LunarG2fb8ff02020-06-11 12:45:07 -06001519}
1520
1521void GpuAssisted::PreCallRecordCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
1522 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
1523 uint32_t maxDrawCount, uint32_t stride) {
1524 ValidationStateTracker::PreCallRecordCmdDrawIndexedIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset,
1525 maxDrawCount, stride);
Tony-LunarG7de10e82020-11-24 11:31:55 -07001526 AllocateValidationResources(commandBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, CMD_DRAWINDEXEDINDIRECTCOUNT);
Tony-LunarG2fb8ff02020-06-11 12:45:07 -06001527}
1528
1529void GpuAssisted::PreCallRecordCmdDrawMeshTasksNV(VkCommandBuffer commandBuffer, uint32_t taskCount, uint32_t firstTask) {
1530 ValidationStateTracker::PreCallRecordCmdDrawMeshTasksNV(commandBuffer, taskCount, firstTask);
Tony-LunarG7de10e82020-11-24 11:31:55 -07001531 AllocateValidationResources(commandBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, CMD_DRAWMESHTASKSNV);
Tony-LunarG2fb8ff02020-06-11 12:45:07 -06001532}
1533
1534void GpuAssisted::PreCallRecordCmdDrawMeshTasksIndirectNV(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
1535 uint32_t drawCount, uint32_t stride) {
1536 ValidationStateTracker::PreCallRecordCmdDrawMeshTasksIndirectNV(commandBuffer, buffer, offset, drawCount, stride);
Tony-LunarG7de10e82020-11-24 11:31:55 -07001537 AllocateValidationResources(commandBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, CMD_DRAWMESHTASKSINDIRECTNV);
Tony-LunarG2fb8ff02020-06-11 12:45:07 -06001538}
1539
1540void GpuAssisted::PreCallRecordCmdDrawMeshTasksIndirectCountNV(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
1541 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
1542 uint32_t maxDrawCount, uint32_t stride) {
1543 ValidationStateTracker::PreCallRecordCmdDrawMeshTasksIndirectCountNV(commandBuffer, buffer, offset, countBuffer,
1544 countBufferOffset, maxDrawCount, stride);
Tony-LunarG7de10e82020-11-24 11:31:55 -07001545 AllocateValidationResources(commandBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, CMD_DRAWMESHTASKSINDIRECTCOUNTNV);
Tony-LunarG2fb8ff02020-06-11 12:45:07 -06001546}
1547
Tony-LunarG2ba1cb32019-09-25 15:16:11 -06001548void GpuAssisted::PreCallRecordCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z) {
Tony-LunarG7de10e82020-11-24 11:31:55 -07001549 AllocateValidationResources(commandBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, CMD_DISPATCH);
Tony-LunarG2ba1cb32019-09-25 15:16:11 -06001550}
1551
1552void GpuAssisted::PreCallRecordCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset) {
Tony-LunarG7de10e82020-11-24 11:31:55 -07001553 AllocateValidationResources(commandBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, CMD_DISPATCHINDIRECT);
Tony-LunarG2ba1cb32019-09-25 15:16:11 -06001554}
1555
Tony-LunarGd13f9b52020-09-08 15:45:45 -06001556void GpuAssisted::PreCallRecordCmdDispatchBase(VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY,
1557 uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY,
1558 uint32_t groupCountZ) {
Tony-LunarG7de10e82020-11-24 11:31:55 -07001559 AllocateValidationResources(commandBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, CMD_DISPATCHBASE);
Tony-LunarGd13f9b52020-09-08 15:45:45 -06001560}
1561
Tony-LunarG52c8c602020-09-10 16:29:56 -06001562void GpuAssisted::PreCallRecordCmdDispatchBaseKHR(VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY,
1563 uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY,
1564 uint32_t groupCountZ) {
Tony-LunarG7de10e82020-11-24 11:31:55 -07001565 AllocateValidationResources(commandBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, CMD_DISPATCHBASE);
Tony-LunarG52c8c602020-09-10 16:29:56 -06001566}
1567
Tony-LunarG2ba1cb32019-09-25 15:16:11 -06001568void GpuAssisted::PreCallRecordCmdTraceRaysNV(VkCommandBuffer commandBuffer, VkBuffer raygenShaderBindingTableBuffer,
1569 VkDeviceSize raygenShaderBindingOffset, VkBuffer missShaderBindingTableBuffer,
1570 VkDeviceSize missShaderBindingOffset, VkDeviceSize missShaderBindingStride,
1571 VkBuffer hitShaderBindingTableBuffer, VkDeviceSize hitShaderBindingOffset,
1572 VkDeviceSize hitShaderBindingStride, VkBuffer callableShaderBindingTableBuffer,
1573 VkDeviceSize callableShaderBindingOffset, VkDeviceSize callableShaderBindingStride,
1574 uint32_t width, uint32_t height, uint32_t depth) {
Tony-LunarG7de10e82020-11-24 11:31:55 -07001575 AllocateValidationResources(commandBuffer, VK_PIPELINE_BIND_POINT_RAY_TRACING_NV, CMD_TRACERAYSNV);
Tony-LunarG2ba1cb32019-09-25 15:16:11 -06001576}
1577
1578void GpuAssisted::PostCallRecordCmdTraceRaysNV(VkCommandBuffer commandBuffer, VkBuffer raygenShaderBindingTableBuffer,
1579 VkDeviceSize raygenShaderBindingOffset, VkBuffer missShaderBindingTableBuffer,
1580 VkDeviceSize missShaderBindingOffset, VkDeviceSize missShaderBindingStride,
1581 VkBuffer hitShaderBindingTableBuffer, VkDeviceSize hitShaderBindingOffset,
1582 VkDeviceSize hitShaderBindingStride, VkBuffer callableShaderBindingTableBuffer,
1583 VkDeviceSize callableShaderBindingOffset, VkDeviceSize callableShaderBindingStride,
1584 uint32_t width, uint32_t height, uint32_t depth) {
1585 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
1586 cb_state->hasTraceRaysCmd = true;
1587}
1588
Jeff Bolz443c2ca2020-03-19 12:11:51 -05001589void GpuAssisted::PreCallRecordCmdTraceRaysKHR(VkCommandBuffer commandBuffer,
sourav parmarcd5fb182020-07-17 12:58:44 -07001590 const VkStridedDeviceAddressRegionKHR *pRaygenShaderBindingTable,
1591 const VkStridedDeviceAddressRegionKHR *pMissShaderBindingTable,
1592 const VkStridedDeviceAddressRegionKHR *pHitShaderBindingTable,
1593 const VkStridedDeviceAddressRegionKHR *pCallableShaderBindingTable, uint32_t width,
Jeff Bolz443c2ca2020-03-19 12:11:51 -05001594 uint32_t height, uint32_t depth) {
Tony-LunarG7de10e82020-11-24 11:31:55 -07001595 AllocateValidationResources(commandBuffer, VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR, CMD_TRACERAYSKHR);
Jeff Bolz443c2ca2020-03-19 12:11:51 -05001596}
1597
1598void GpuAssisted::PostCallRecordCmdTraceRaysKHR(VkCommandBuffer commandBuffer,
sourav parmarcd5fb182020-07-17 12:58:44 -07001599 const VkStridedDeviceAddressRegionKHR *pRaygenShaderBindingTable,
1600 const VkStridedDeviceAddressRegionKHR *pMissShaderBindingTable,
1601 const VkStridedDeviceAddressRegionKHR *pHitShaderBindingTable,
1602 const VkStridedDeviceAddressRegionKHR *pCallableShaderBindingTable, uint32_t width,
Jeff Bolz443c2ca2020-03-19 12:11:51 -05001603 uint32_t height, uint32_t depth) {
1604 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
1605 cb_state->hasTraceRaysCmd = true;
1606}
1607
1608void GpuAssisted::PreCallRecordCmdTraceRaysIndirectKHR(VkCommandBuffer commandBuffer,
sourav parmarcd5fb182020-07-17 12:58:44 -07001609 const VkStridedDeviceAddressRegionKHR *pRaygenShaderBindingTable,
1610 const VkStridedDeviceAddressRegionKHR *pMissShaderBindingTable,
1611 const VkStridedDeviceAddressRegionKHR *pHitShaderBindingTable,
1612 const VkStridedDeviceAddressRegionKHR *pCallableShaderBindingTable,
Shannon McPherson54e1f892020-11-27 11:04:19 -07001613 VkDeviceAddress indirectDeviceAddress) {
Tony-LunarG7de10e82020-11-24 11:31:55 -07001614 AllocateValidationResources(commandBuffer, VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR, CMD_TRACERAYSINDIRECTKHR);
Jeff Bolz443c2ca2020-03-19 12:11:51 -05001615}
1616
1617void GpuAssisted::PostCallRecordCmdTraceRaysIndirectKHR(VkCommandBuffer commandBuffer,
sourav parmarcd5fb182020-07-17 12:58:44 -07001618 const VkStridedDeviceAddressRegionKHR *pRaygenShaderBindingTable,
1619 const VkStridedDeviceAddressRegionKHR *pMissShaderBindingTable,
1620 const VkStridedDeviceAddressRegionKHR *pHitShaderBindingTable,
1621 const VkStridedDeviceAddressRegionKHR *pCallableShaderBindingTable,
Shannon McPherson54e1f892020-11-27 11:04:19 -07001622 VkDeviceAddress indirectDeviceAddress) {
Jeff Bolz443c2ca2020-03-19 12:11:51 -05001623 CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
1624 cb_state->hasTraceRaysCmd = true;
1625}
1626
Tony-LunarG7de10e82020-11-24 11:31:55 -07001627void GpuAssisted::AllocateValidationResources(const VkCommandBuffer cmd_buffer, const VkPipelineBindPoint bind_point,
1628 CMD_TYPE cmd_type) {
Jason Macnak67407e72019-07-11 11:05:09 -07001629 if (bind_point != VK_PIPELINE_BIND_POINT_GRAPHICS && bind_point != VK_PIPELINE_BIND_POINT_COMPUTE &&
1630 bind_point != VK_PIPELINE_BIND_POINT_RAY_TRACING_NV) {
andreygca287f22019-04-10 00:15:33 +03001631 return;
1632 }
Tony-LunarGb2501d22019-01-28 09:59:13 -07001633 VkResult result;
1634
Tony-LunarG99b880b2019-09-26 11:19:52 -06001635 if (aborted) return;
Tony-LunarGb2501d22019-01-28 09:59:13 -07001636
1637 std::vector<VkDescriptorSet> desc_sets;
1638 VkDescriptorPool desc_pool = VK_NULL_HANDLE;
Tony-LunarG1dce2392019-10-23 16:49:29 -06001639 result = desc_set_manager->GetDescriptorSets(1, &desc_pool, debug_desc_layout, &desc_sets);
Tony-LunarGb2501d22019-01-28 09:59:13 -07001640 assert(result == VK_SUCCESS);
1641 if (result != VK_SUCCESS) {
Mark Lobodzinskia8151b02020-02-27 13:38:08 -07001642 ReportSetupProblem(device, "Unable to allocate descriptor sets. Device could become unstable.");
Tony-LunarG99b880b2019-09-26 11:19:52 -06001643 aborted = true;
Tony-LunarGb2501d22019-01-28 09:59:13 -07001644 return;
1645 }
1646
Tony-LunarG1b2e0c32019-02-07 17:13:27 -07001647 VkDescriptorBufferInfo output_desc_buffer_info = {};
Tony-LunarG99b880b2019-09-26 11:19:52 -06001648 output_desc_buffer_info.range = output_buffer_size;
Tony-LunarGb2501d22019-01-28 09:59:13 -07001649
Mark Lobodzinskicefe42f2019-04-25 12:16:27 -06001650 auto cb_node = GetCBState(cmd_buffer);
Tony-LunarGb2501d22019-01-28 09:59:13 -07001651 if (!cb_node) {
Mark Lobodzinskia8151b02020-02-27 13:38:08 -07001652 ReportSetupProblem(device, "Unrecognized command buffer");
Tony-LunarG99b880b2019-09-26 11:19:52 -06001653 aborted = true;
Tony-LunarGb2501d22019-01-28 09:59:13 -07001654 return;
1655 }
1656
Tony-LunarG81efe392019-03-07 15:43:27 -07001657 // Allocate memory for the output block that the gpu will use to return any error information
Tony-LunarG2ba1cb32019-09-25 15:16:11 -06001658 GpuAssistedDeviceMemoryBlock output_block = {};
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001659 VkBufferCreateInfo buffer_info = {VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO};
1660 buffer_info.size = output_buffer_size;
1661 buffer_info.usage = VK_BUFFER_USAGE_STORAGE_BUFFER_BIT;
1662 VmaAllocationCreateInfo alloc_info = {};
1663 alloc_info.usage = VMA_MEMORY_USAGE_GPU_TO_CPU;
1664 result = vmaCreateBuffer(vmaAllocator, &buffer_info, &alloc_info, &output_block.buffer, &output_block.allocation, nullptr);
Tony-LunarGb2501d22019-01-28 09:59:13 -07001665 if (result != VK_SUCCESS) {
Mark Lobodzinskia8151b02020-02-27 13:38:08 -07001666 ReportSetupProblem(device, "Unable to allocate device memory. Device could become unstable.");
Tony-LunarG99b880b2019-09-26 11:19:52 -06001667 aborted = true;
Tony-LunarGb2501d22019-01-28 09:59:13 -07001668 return;
1669 }
Tony-LunarG1b2e0c32019-02-07 17:13:27 -07001670
Tony-LunarG81efe392019-03-07 15:43:27 -07001671 // Clear the output block to zeros so that only error information from the gpu will be present
Tony-LunarGc28e28a2020-08-14 10:37:48 -06001672 uint32_t *data_ptr;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001673 result = vmaMapMemory(vmaAllocator, output_block.allocation, reinterpret_cast<void **>(&data_ptr));
Tony-LunarG0e564722019-03-19 16:09:14 -06001674 if (result == VK_SUCCESS) {
Tony-LunarGc28e28a2020-08-14 10:37:48 -06001675 memset(data_ptr, 0, output_buffer_size);
Tony-LunarG99b880b2019-09-26 11:19:52 -06001676 vmaUnmapMemory(vmaAllocator, output_block.allocation);
Tony-LunarG1b2e0c32019-02-07 17:13:27 -07001677 }
Tony-LunarG81efe392019-03-07 15:43:27 -07001678
Tony-LunarG2ba1cb32019-09-25 15:16:11 -06001679 GpuAssistedDeviceMemoryBlock di_input_block = {}, bda_input_block = {};
Tony-LunarG8eb5a002019-07-25 16:49:00 -06001680 VkDescriptorBufferInfo di_input_desc_buffer_info = {};
1681 VkDescriptorBufferInfo bda_input_desc_buffer_info = {};
1682 VkWriteDescriptorSet desc_writes[3] = {};
Tony-LunarG1b2e0c32019-02-07 17:13:27 -07001683 uint32_t desc_count = 1;
locke-lunargb8d7a7a2020-10-25 16:01:52 -06001684 const auto lv_bind_point = ConvertToLvlBindPoint(bind_point);
1685 auto const &state = cb_node->lastBound[lv_bind_point];
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001686 uint32_t number_of_sets = static_cast<uint32_t>(state.per_set.size());
Tony-LunarG1b2e0c32019-02-07 17:13:27 -07001687
Tony-LunarGe29097a2020-12-03 10:59:19 -07001688 bool has_buffers = false;
Tony-LunarG81efe392019-03-07 15:43:27 -07001689 // Figure out how much memory we need for the input block based on how many sets and bindings there are
1690 // and how big each of the bindings is
Tony-LunarGc28e28a2020-08-14 10:37:48 -06001691 if (number_of_sets > 0 && (descriptor_indexing || buffer_oob_enabled)) {
Tony-LunarG81efe392019-03-07 15:43:27 -07001692 uint32_t descriptor_count = 0; // Number of descriptors, including all array elements
1693 uint32_t binding_count = 0; // Number of bindings based on the max binding number used
Jeff Bolzb1fc0732019-08-11 20:16:49 -05001694 for (auto s : state.per_set) {
1695 auto desc = s.bound_descriptor_set;
Tony-LunarGd9224b12019-09-11 11:43:04 -06001696 if (desc && (desc->GetBindingCount() > 0)) {
1697 auto bindings = desc->GetLayout()->GetSortedBindingSet();
Tony-LunarGa77cade2019-03-06 10:49:22 -07001698 binding_count += desc->GetLayout()->GetMaxBinding() + 1;
1699 for (auto binding : bindings) {
Tony-LunarG7564b382019-08-21 10:11:35 -06001700 // Shader instrumentation is tracking inline uniform blocks as scalers. Don't try to validate inline uniform
1701 // blocks
Tony-LunarGe29097a2020-12-03 10:59:19 -07001702 auto descriptor_type = desc->GetLayout()->GetTypeFromBinding(binding);
1703 if (descriptor_type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT) {
Tony-LunarG7564b382019-08-21 10:11:35 -06001704 descriptor_count++;
Mark Lobodzinskiafa7cb82020-01-29 16:49:36 -07001705 LogWarning(device, "UNASSIGNED-GPU-Assisted Validation Warning",
1706 "VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT descriptors will not be validated by GPU assisted "
1707 "validation");
Tony-LunarG7564b382019-08-21 10:11:35 -06001708 } else if (binding == desc->GetLayout()->GetMaxBinding() && desc->IsVariableDescriptorCount(binding)) {
Tony-LunarGa77cade2019-03-06 10:49:22 -07001709 descriptor_count += desc->GetVariableDescriptorCount();
1710 } else {
1711 descriptor_count += desc->GetDescriptorCountFromBinding(binding);
1712 }
Tony-LunarGe29097a2020-12-03 10:59:19 -07001713 if (!has_buffers && (descriptor_type == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER ||
1714 descriptor_type == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC ||
1715 descriptor_type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER ||
Tony-LunarGe8632e42020-11-18 17:03:12 -07001716 descriptor_type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC ||
1717 descriptor_type == VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER ||
1718 descriptor_type == VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER)) {
Tony-LunarGe29097a2020-12-03 10:59:19 -07001719 has_buffers = true;
Tony-LunarGa77cade2019-03-06 10:49:22 -07001720 }
Tony-LunarGc28e28a2020-08-14 10:37:48 -06001721 }
Tony-LunarG1b2e0c32019-02-07 17:13:27 -07001722 }
1723 }
Tony-LunarG1b2e0c32019-02-07 17:13:27 -07001724
Tony-LunarGe29097a2020-12-03 10:59:19 -07001725 if (descriptor_indexing || has_buffers) {
1726 // Note that the size of the input buffer is dependent on the maximum binding number, which
1727 // can be very large. This is because for (set = s, binding = b, index = i), the validation
1728 // code is going to dereference Input[ i + Input[ b + Input[ s + Input[ Input[0] ] ] ] ] to
1729 // see if descriptors have been written. In gpu_validation.md, we note this and advise
1730 // using densely packed bindings as a best practice when using gpu-av with descriptor indexing
1731 uint32_t words_needed;
1732 if (descriptor_indexing) {
1733 words_needed = 1 + (number_of_sets * 2) + (binding_count * 2) + descriptor_count;
1734 } else {
1735 words_needed = 1 + number_of_sets + binding_count + descriptor_count;
1736 }
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001737 alloc_info.usage = VMA_MEMORY_USAGE_CPU_TO_GPU;
1738 buffer_info.size = words_needed * 4;
1739 result = vmaCreateBuffer(vmaAllocator, &buffer_info, &alloc_info, &di_input_block.buffer, &di_input_block.allocation,
1740 nullptr);
Tony-LunarGe29097a2020-12-03 10:59:19 -07001741 if (result != VK_SUCCESS) {
1742 ReportSetupProblem(device, "Unable to allocate device memory. Device could become unstable.");
1743 aborted = true;
1744 return;
1745 }
Tony-LunarG1b2e0c32019-02-07 17:13:27 -07001746
Tony-LunarGe29097a2020-12-03 10:59:19 -07001747 // Populate input buffer first with the sizes of every descriptor in every set, then with whether
1748 // each element of each descriptor has been written or not. See gpu_validation.md for a more thourough
1749 // outline of the input buffer format
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001750 result = vmaMapMemory(vmaAllocator, di_input_block.allocation, reinterpret_cast<void **>(&data_ptr));
1751 memset(data_ptr, 0, static_cast<size_t>(buffer_info.size));
Tony-LunarG1b2e0c32019-02-07 17:13:27 -07001752
Tony-LunarGe29097a2020-12-03 10:59:19 -07001753 // Descriptor indexing needs the number of descriptors at each binding.
1754 if (descriptor_indexing) {
1755 // Pointer to a sets array that points into the sizes array
1756 uint32_t *sets_to_sizes = data_ptr + 1;
1757 // Pointer to the sizes array that contains the array size of the descriptor at each binding
1758 uint32_t *sizes = sets_to_sizes + number_of_sets;
1759 // Pointer to another sets array that points into the bindings array that points into the written array
1760 uint32_t *sets_to_bindings = sizes + binding_count;
1761 // Pointer to the bindings array that points at the start of the writes in the writes array for each binding
1762 uint32_t *bindings_to_written = sets_to_bindings + number_of_sets;
1763 // Index of the next entry in the written array to be updated
1764 uint32_t written_index = 1 + (number_of_sets * 2) + (binding_count * 2);
1765 uint32_t bind_counter = number_of_sets + 1;
1766 // Index of the start of the sets_to_bindings array
1767 data_ptr[0] = number_of_sets + binding_count + 1;
1768
1769 for (auto s : state.per_set) {
1770 auto desc = s.bound_descriptor_set;
1771 if (desc && (desc->GetBindingCount() > 0)) {
1772 auto layout = desc->GetLayout();
1773 auto bindings = layout->GetSortedBindingSet();
1774 // For each set, fill in index of its bindings sizes in the sizes array
1775 *sets_to_sizes++ = bind_counter;
1776 // For each set, fill in the index of its bindings in the bindings_to_written array
1777 *sets_to_bindings++ = bind_counter + number_of_sets + binding_count;
1778 for (auto binding : bindings) {
1779 // For each binding, fill in its size in the sizes array
1780 // Shader instrumentation is tracking inline uniform blocks as scalers. Don't try to validate inline
1781 // uniform blocks
1782 if (VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT == desc->GetLayout()->GetTypeFromBinding(binding)) {
1783 sizes[binding] = 1;
1784 } else if (binding == layout->GetMaxBinding() && desc->IsVariableDescriptorCount(binding)) {
1785 sizes[binding] = desc->GetVariableDescriptorCount();
1786 } else {
1787 sizes[binding] = desc->GetDescriptorCountFromBinding(binding);
1788 }
1789 // Fill in the starting index for this binding in the written array in the bindings_to_written array
1790 bindings_to_written[binding] = written_index;
1791
1792 // Shader instrumentation is tracking inline uniform blocks as scalers. Don't try to validate inline
1793 // uniform blocks
1794 if (VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT == desc->GetLayout()->GetTypeFromBinding(binding)) {
1795 data_ptr[written_index++] = UINT_MAX;
1796 continue;
1797 }
1798
1799 auto index_range = desc->GetGlobalIndexRangeFromBinding(binding, true);
1800 // For each array element in the binding, update the written array with whether it has been written
1801 for (uint32_t i = index_range.start; i < index_range.end; ++i) {
1802 auto *descriptor = desc->GetDescriptorFromGlobalIndex(i);
1803 if (descriptor->updated) {
1804 SetDescriptorInitialized(data_ptr, written_index, descriptor);
1805 } else if (desc->IsUpdateAfterBind(binding)) {
1806 // If it hasn't been written now and it's update after bind, put it in a list to check at
1807 // QueueSubmit
1808 di_input_block.update_at_submit[written_index] = descriptor;
1809 }
1810 written_index++;
1811 }
1812 }
1813 auto last = desc->GetLayout()->GetMaxBinding();
1814 bindings_to_written += last + 1;
1815 bind_counter += last + 1;
1816 sizes += last + 1;
1817 } else {
1818 *sets_to_sizes++ = 0;
1819 *sets_to_bindings++ = 0;
1820 }
1821 }
1822 } else {
1823 // If no descriptor indexing, we don't need number of descriptors at each binding, so
1824 // no sets_to_sizes or sizes arrays, just sets_to_bindings, bindings_to_written and written_index
1825
1826 // Pointer to sets array that points into the bindings array that points into the written array
1827 uint32_t *sets_to_bindings = data_ptr + 1;
1828 // Pointer to the bindings array that points at the start of the writes in the writes array for each binding
1829 uint32_t *bindings_to_written = sets_to_bindings + number_of_sets;
1830 // Index of the next entry in the written array to be updated
1831 uint32_t written_index = 1 + number_of_sets + binding_count;
1832 uint32_t bind_counter = number_of_sets + 1;
1833 data_ptr[0] = 1;
1834
1835 for (auto s : state.per_set) {
1836 auto desc = s.bound_descriptor_set;
1837 if (desc && (desc->GetBindingCount() > 0)) {
1838 auto layout = desc->GetLayout();
1839 auto bindings = layout->GetSortedBindingSet();
1840 *sets_to_bindings++ = bind_counter;
1841 for (auto binding : bindings) {
1842 // Fill in the starting index for this binding in the written array in the bindings_to_written array
1843 bindings_to_written[binding] = written_index;
1844
1845 // Shader instrumentation is tracking inline uniform blocks as scalers. Don't try to validate inline
1846 // uniform blocks
1847 if (VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT == desc->GetLayout()->GetTypeFromBinding(binding)) {
1848 data_ptr[written_index++] = UINT_MAX;
1849 continue;
1850 }
1851
1852 auto index_range = desc->GetGlobalIndexRangeFromBinding(binding, true);
1853
1854 // For each array element in the binding, update the written array with whether it has been written
1855 for (uint32_t i = index_range.start; i < index_range.end; ++i) {
1856 auto *descriptor = desc->GetDescriptorFromGlobalIndex(i);
1857 if (descriptor->updated) {
1858 SetDescriptorInitialized(data_ptr, written_index, descriptor);
1859 } else if (desc->IsUpdateAfterBind(binding)) {
1860 // If it hasn't been written now and it's update after bind, put it in a list to check at
1861 // QueueSubmit
1862 di_input_block.update_at_submit[written_index] = descriptor;
1863 }
1864 written_index++;
1865 }
1866 }
1867 auto last = desc->GetLayout()->GetMaxBinding();
1868 bindings_to_written += last + 1;
1869 bind_counter += last + 1;
1870 } else {
1871 *sets_to_bindings++ = 0;
1872 }
1873 }
1874 }
1875 vmaUnmapMemory(vmaAllocator, di_input_block.allocation);
1876
1877 di_input_desc_buffer_info.range = (words_needed * 4);
1878 di_input_desc_buffer_info.buffer = di_input_block.buffer;
1879 di_input_desc_buffer_info.offset = 0;
1880
1881 desc_writes[1].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
1882 desc_writes[1].dstBinding = 1;
1883 desc_writes[1].descriptorCount = 1;
1884 desc_writes[1].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
1885 desc_writes[1].pBufferInfo = &di_input_desc_buffer_info;
1886 desc_writes[1].dstSet = desc_sets[0];
1887
1888 desc_count = 2;
1889 }
Tony-LunarG0e564722019-03-19 16:09:14 -06001890 }
Tony-LunarGb2501d22019-01-28 09:59:13 -07001891
Tony-LunarGc111b242020-06-30 14:43:45 -06001892 if ((device_extensions.vk_ext_buffer_device_address || device_extensions.vk_khr_buffer_device_address) && buffer_map.size() &&
Tony-LunarG5c38b182020-06-10 16:15:32 -06001893 shaderInt64 && enabled_features.core12.bufferDeviceAddress) {
Tony-LunarG8eb5a002019-07-25 16:49:00 -06001894 // Example BDA input buffer assuming 2 buffers using BDA:
1895 // Word 0 | Index of start of buffer sizes (in this case 5)
1896 // Word 1 | 0x0000000000000000
1897 // Word 2 | Device Address of first buffer (Addresses sorted in ascending order)
1898 // Word 3 | Device Address of second buffer
1899 // Word 4 | 0xffffffffffffffff
1900 // Word 5 | 0 (size of pretend buffer at word 1)
1901 // Word 6 | Size in bytes of first buffer
1902 // Word 7 | Size in bytes of second buffer
1903 // Word 8 | 0 (size of pretend buffer in word 4)
1904
Tony-LunarG99b880b2019-09-26 11:19:52 -06001905 uint32_t num_buffers = static_cast<uint32_t>(buffer_map.size());
Tony-LunarG8eb5a002019-07-25 16:49:00 -06001906 uint32_t words_needed = (num_buffers + 3) + (num_buffers + 2);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001907 alloc_info.usage = VMA_MEMORY_USAGE_CPU_TO_GPU;
1908 buffer_info.size = words_needed * 8; // 64 bit words
Tony-LunarG99b880b2019-09-26 11:19:52 -06001909 result =
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001910 vmaCreateBuffer(vmaAllocator, &buffer_info, &alloc_info, &bda_input_block.buffer, &bda_input_block.allocation, nullptr);
Tony-LunarG8eb5a002019-07-25 16:49:00 -06001911 if (result != VK_SUCCESS) {
Mark Lobodzinskia8151b02020-02-27 13:38:08 -07001912 ReportSetupProblem(device, "Unable to allocate device memory. Device could become unstable.");
Tony-LunarG99b880b2019-09-26 11:19:52 -06001913 aborted = true;
Tony-LunarG8eb5a002019-07-25 16:49:00 -06001914 return;
1915 }
1916 uint64_t *bda_data;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001917 result = vmaMapMemory(vmaAllocator, bda_input_block.allocation, reinterpret_cast<void **>(&bda_data));
Tony-LunarG8eb5a002019-07-25 16:49:00 -06001918 uint32_t address_index = 1;
1919 uint32_t size_index = 3 + num_buffers;
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -07001920 memset(bda_data, 0, static_cast<size_t>(buffer_info.size));
Tony-LunarG8eb5a002019-07-25 16:49:00 -06001921 bda_data[0] = size_index; // Start of buffer sizes
1922 bda_data[address_index++] = 0; // NULL address
1923 bda_data[size_index++] = 0;
1924
Tony-LunarG99b880b2019-09-26 11:19:52 -06001925 for (auto const &value : buffer_map) {
Tony-LunarG8eb5a002019-07-25 16:49:00 -06001926 bda_data[address_index++] = value.first;
1927 bda_data[size_index++] = value.second;
1928 }
1929 bda_data[address_index] = UINTPTR_MAX;
1930 bda_data[size_index] = 0;
Tony-LunarG99b880b2019-09-26 11:19:52 -06001931 vmaUnmapMemory(vmaAllocator, bda_input_block.allocation);
Tony-LunarG8eb5a002019-07-25 16:49:00 -06001932
1933 bda_input_desc_buffer_info.range = (words_needed * 8);
1934 bda_input_desc_buffer_info.buffer = bda_input_block.buffer;
1935 bda_input_desc_buffer_info.offset = 0;
1936
1937 desc_writes[desc_count].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
1938 desc_writes[desc_count].dstBinding = 2;
1939 desc_writes[desc_count].descriptorCount = 1;
1940 desc_writes[desc_count].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
1941 desc_writes[desc_count].pBufferInfo = &bda_input_desc_buffer_info;
1942 desc_writes[desc_count].dstSet = desc_sets[0];
1943 desc_count++;
1944 }
1945
Tony-LunarGb2501d22019-01-28 09:59:13 -07001946 // Write the descriptor
Tony-LunarG1b2e0c32019-02-07 17:13:27 -07001947 output_desc_buffer_info.buffer = output_block.buffer;
1948 output_desc_buffer_info.offset = 0;
Tony-LunarGb2501d22019-01-28 09:59:13 -07001949
Tony-LunarG1b2e0c32019-02-07 17:13:27 -07001950 desc_writes[0].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
1951 desc_writes[0].descriptorCount = 1;
1952 desc_writes[0].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
1953 desc_writes[0].pBufferInfo = &output_desc_buffer_info;
1954 desc_writes[0].dstSet = desc_sets[0];
1955 DispatchUpdateDescriptorSets(device, desc_count, desc_writes, 0, NULL);
Tony-LunarGb2501d22019-01-28 09:59:13 -07001956
locke-lunargb8d7a7a2020-10-25 16:01:52 -06001957 const auto *pipeline_state = state.pipeline_state;
1958 if (pipeline_state) {
1959 if ((pipeline_state->pipeline_layout->set_layouts.size() <= desc_set_bind_index) &&
Tony-LunarG9de6e5f2020-06-22 13:02:48 -06001960 !pipeline_state->pipeline_layout->destroyed) {
Jeff Bolze7fc67b2019-10-04 12:29:31 -05001961 DispatchCmdBindDescriptorSets(cmd_buffer, bind_point, pipeline_state->pipeline_layout->layout, desc_set_bind_index, 1,
Tony-LunarG99b880b2019-09-26 11:19:52 -06001962 desc_sets.data(), 0, nullptr);
Tony-LunarGb2501d22019-01-28 09:59:13 -07001963 }
locke-lunargb8d7a7a2020-10-25 16:01:52 -06001964 if (pipeline_state->pipeline_layout->destroyed) {
Tony-LunarG9de6e5f2020-06-22 13:02:48 -06001965 ReportSetupProblem(device, "Pipeline layout has been destroyed, aborting GPU-AV");
1966 aborted = true;
1967 } else {
1968 // Record buffer and memory info in CB state tracking
1969 GetBufferInfo(cmd_buffer)
Tony-LunarG7de10e82020-11-24 11:31:55 -07001970 .emplace_back(output_block, di_input_block, bda_input_block, desc_sets[0], desc_pool, bind_point, cmd_type);
Tony-LunarG9de6e5f2020-06-22 13:02:48 -06001971 }
Tony-LunarGb2501d22019-01-28 09:59:13 -07001972 } else {
Mark Lobodzinskia8151b02020-02-27 13:38:08 -07001973 ReportSetupProblem(device, "Unable to find pipeline state");
Tony-LunarG9de6e5f2020-06-22 13:02:48 -06001974 aborted = true;
1975 }
1976 if (aborted) {
Tony-LunarG99b880b2019-09-26 11:19:52 -06001977 vmaDestroyBuffer(vmaAllocator, di_input_block.buffer, di_input_block.allocation);
1978 vmaDestroyBuffer(vmaAllocator, bda_input_block.buffer, bda_input_block.allocation);
1979 vmaDestroyBuffer(vmaAllocator, output_block.buffer, output_block.allocation);
Tony-LunarGb2501d22019-01-28 09:59:13 -07001980 return;
1981 }
1982}