blob: e945bf071f3d3b64fbbb9daa6e1fd1f6ab09c448 [file] [log] [blame]
Karl Schultz7b024b42018-08-30 16:18:18 -06001/* Copyright (c) 2015-2019 The Khronos Group Inc.
2 * Copyright (c) 2015-2019 Valve Corporation
3 * Copyright (c) 2015-2019 LunarG, Inc.
4 * Copyright (C) 2015-2019 Google Inc.
Chris Forbes47567b72017-06-09 12:09:45 -07005 *
6 * Licensed under the Apache License, Version 2.0 (the "License");
7 * you may not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
9 *
10 * http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 *
18 * Author: Chris Forbes <chrisf@ijw.co.nz>
Dave Houlton51653902018-06-22 17:32:13 -060019 * Author: Dave Houlton <daveh@lunarg.com>
Chris Forbes47567b72017-06-09 12:09:45 -070020 */
21
22#include <cinttypes>
23#include <cassert>
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +020024#include <chrono>
Chris Forbes47567b72017-06-09 12:09:45 -070025#include <vector>
26#include <unordered_map>
27#include <string>
28#include <sstream>
29#include <SPIRV/spirv.hpp>
30#include "vk_loader_platform.h"
31#include "vk_enum_string_helper.h"
Chris Forbes47567b72017-06-09 12:09:45 -070032#include "vk_layer_data.h"
33#include "vk_layer_extension_utils.h"
34#include "vk_layer_utils.h"
35#include "core_validation.h"
Chris Forbes47567b72017-06-09 12:09:45 -070036#include "shader_validation.h"
Chris Forbes4ae55b32017-06-09 14:42:56 -070037#include "spirv-tools/libspirv.h"
Chris Forbes9a61e082017-07-24 15:35:29 -070038#include "xxhash.h"
Chris Forbes47567b72017-06-09 12:09:45 -070039
Mark Lobodzinski01734072019-02-13 17:39:15 -070040namespace core_validation {
41extern unordered_map<void *, layer_data *> layer_data_map;
42extern unordered_map<void *, instance_layer_data *> instance_layer_data_map;
43}; // namespace core_validation
44
45using core_validation::instance_layer_data_map;
46using core_validation::layer_data_map;
47
Chris Forbes47567b72017-06-09 12:09:45 -070048enum FORMAT_TYPE {
49 FORMAT_TYPE_FLOAT = 1, // UNORM, SNORM, FLOAT, USCALED, SSCALED, SRGB -- anything we consider float in the shader
50 FORMAT_TYPE_SINT = 2,
51 FORMAT_TYPE_UINT = 4,
52};
53
54typedef std::pair<unsigned, unsigned> location_t;
55
56struct interface_var {
57 uint32_t id;
58 uint32_t type_id;
59 uint32_t offset;
60 bool is_patch;
61 bool is_block_member;
62 bool is_relaxed_precision;
63 // TODO: collect the name, too? Isn't required to be present.
64};
65
66struct shader_stage_attributes {
67 char const *const name;
68 bool arrayed_input;
69 bool arrayed_output;
70};
71
72static shader_stage_attributes shader_stage_attribs[] = {
73 {"vertex shader", false, false}, {"tessellation control shader", true, true}, {"tessellation evaluation shader", true, false},
74 {"geometry shader", true, false}, {"fragment shader", false, false},
75};
76
77// SPIRV utility functions
Shannon McPhersonc06c33d2018-06-28 17:21:12 -060078void shader_module::BuildDefIndex() {
Chris Forbes47567b72017-06-09 12:09:45 -070079 for (auto insn : *this) {
80 switch (insn.opcode()) {
81 // Types
82 case spv::OpTypeVoid:
83 case spv::OpTypeBool:
84 case spv::OpTypeInt:
85 case spv::OpTypeFloat:
86 case spv::OpTypeVector:
87 case spv::OpTypeMatrix:
88 case spv::OpTypeImage:
89 case spv::OpTypeSampler:
90 case spv::OpTypeSampledImage:
91 case spv::OpTypeArray:
92 case spv::OpTypeRuntimeArray:
93 case spv::OpTypeStruct:
94 case spv::OpTypeOpaque:
95 case spv::OpTypePointer:
96 case spv::OpTypeFunction:
97 case spv::OpTypeEvent:
98 case spv::OpTypeDeviceEvent:
99 case spv::OpTypeReserveId:
100 case spv::OpTypeQueue:
101 case spv::OpTypePipe:
Shannon McPherson0fa28232018-11-01 11:59:02 -0600102 case spv::OpTypeAccelerationStructureNV:
Chris Forbes47567b72017-06-09 12:09:45 -0700103 def_index[insn.word(1)] = insn.offset();
104 break;
105
106 // Fixed constants
107 case spv::OpConstantTrue:
108 case spv::OpConstantFalse:
109 case spv::OpConstant:
110 case spv::OpConstantComposite:
111 case spv::OpConstantSampler:
112 case spv::OpConstantNull:
113 def_index[insn.word(2)] = insn.offset();
114 break;
115
116 // Specialization constants
117 case spv::OpSpecConstantTrue:
118 case spv::OpSpecConstantFalse:
119 case spv::OpSpecConstant:
120 case spv::OpSpecConstantComposite:
121 case spv::OpSpecConstantOp:
122 def_index[insn.word(2)] = insn.offset();
123 break;
124
125 // Variables
126 case spv::OpVariable:
127 def_index[insn.word(2)] = insn.offset();
128 break;
129
130 // Functions
131 case spv::OpFunction:
132 def_index[insn.word(2)] = insn.offset();
133 break;
134
135 default:
136 // We don't care about any other defs for now.
137 break;
138 }
139 }
140}
141
Jeff Bolz105d6492018-09-29 15:46:44 -0500142unsigned ExecutionModelToShaderStageFlagBits(unsigned mode) {
143 switch (mode) {
144 case spv::ExecutionModelVertex:
145 return VK_SHADER_STAGE_VERTEX_BIT;
146 case spv::ExecutionModelTessellationControl:
147 return VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT;
148 case spv::ExecutionModelTessellationEvaluation:
149 return VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT;
150 case spv::ExecutionModelGeometry:
151 return VK_SHADER_STAGE_GEOMETRY_BIT;
152 case spv::ExecutionModelFragment:
153 return VK_SHADER_STAGE_FRAGMENT_BIT;
154 case spv::ExecutionModelGLCompute:
155 return VK_SHADER_STAGE_COMPUTE_BIT;
Shannon McPherson0fa28232018-11-01 11:59:02 -0600156 case spv::ExecutionModelRayGenerationNV:
Eric Werness30127fd2018-10-31 21:01:03 -0700157 return VK_SHADER_STAGE_RAYGEN_BIT_NV;
Shannon McPherson0fa28232018-11-01 11:59:02 -0600158 case spv::ExecutionModelAnyHitNV:
Eric Werness30127fd2018-10-31 21:01:03 -0700159 return VK_SHADER_STAGE_ANY_HIT_BIT_NV;
Shannon McPherson0fa28232018-11-01 11:59:02 -0600160 case spv::ExecutionModelClosestHitNV:
Eric Werness30127fd2018-10-31 21:01:03 -0700161 return VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV;
Shannon McPherson0fa28232018-11-01 11:59:02 -0600162 case spv::ExecutionModelMissNV:
Eric Werness30127fd2018-10-31 21:01:03 -0700163 return VK_SHADER_STAGE_MISS_BIT_NV;
Shannon McPherson0fa28232018-11-01 11:59:02 -0600164 case spv::ExecutionModelIntersectionNV:
Eric Werness30127fd2018-10-31 21:01:03 -0700165 return VK_SHADER_STAGE_INTERSECTION_BIT_NV;
Shannon McPherson0fa28232018-11-01 11:59:02 -0600166 case spv::ExecutionModelCallableNV:
Eric Werness30127fd2018-10-31 21:01:03 -0700167 return VK_SHADER_STAGE_CALLABLE_BIT_NV;
Jeff Bolz105d6492018-09-29 15:46:44 -0500168 case spv::ExecutionModelTaskNV:
169 return VK_SHADER_STAGE_TASK_BIT_NV;
170 case spv::ExecutionModelMeshNV:
171 return VK_SHADER_STAGE_MESH_BIT_NV;
172 default:
173 return 0;
174 }
175}
176
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600177static spirv_inst_iter FindEntrypoint(shader_module const *src, char const *name, VkShaderStageFlagBits stageBits) {
Chris Forbes47567b72017-06-09 12:09:45 -0700178 for (auto insn : *src) {
179 if (insn.opcode() == spv::OpEntryPoint) {
180 auto entrypointName = (char const *)&insn.word(3);
Jeff Bolz105d6492018-09-29 15:46:44 -0500181 auto executionModel = insn.word(1);
182 auto entrypointStageBits = ExecutionModelToShaderStageFlagBits(executionModel);
Chris Forbes47567b72017-06-09 12:09:45 -0700183
184 if (!strcmp(entrypointName, name) && (entrypointStageBits & stageBits)) {
185 return insn;
186 }
187 }
188 }
189
190 return src->end();
191}
192
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600193static char const *StorageClassName(unsigned sc) {
Chris Forbes47567b72017-06-09 12:09:45 -0700194 switch (sc) {
195 case spv::StorageClassInput:
196 return "input";
197 case spv::StorageClassOutput:
198 return "output";
199 case spv::StorageClassUniformConstant:
200 return "const uniform";
201 case spv::StorageClassUniform:
202 return "uniform";
203 case spv::StorageClassWorkgroup:
204 return "workgroup local";
205 case spv::StorageClassCrossWorkgroup:
206 return "workgroup global";
207 case spv::StorageClassPrivate:
208 return "private global";
209 case spv::StorageClassFunction:
210 return "function";
211 case spv::StorageClassGeneric:
212 return "generic";
213 case spv::StorageClassAtomicCounter:
214 return "atomic counter";
215 case spv::StorageClassImage:
216 return "image";
217 case spv::StorageClassPushConstant:
218 return "push constant";
Chris Forbes9f89d752018-03-07 12:57:48 -0800219 case spv::StorageClassStorageBuffer:
220 return "storage buffer";
Chris Forbes47567b72017-06-09 12:09:45 -0700221 default:
222 return "unknown";
223 }
224}
225
226// Get the value of an integral constant
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600227unsigned GetConstantValue(shader_module const *src, unsigned id) {
Chris Forbes47567b72017-06-09 12:09:45 -0700228 auto value = src->get_def(id);
229 assert(value != src->end());
230
231 if (value.opcode() != spv::OpConstant) {
232 // TODO: Either ensure that the specialization transform is already performed on a module we're
233 // considering here, OR -- specialize on the fly now.
234 return 1;
235 }
236
237 return value.word(3);
238}
239
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600240static void DescribeTypeInner(std::ostringstream &ss, shader_module const *src, unsigned type) {
Chris Forbes47567b72017-06-09 12:09:45 -0700241 auto insn = src->get_def(type);
242 assert(insn != src->end());
243
244 switch (insn.opcode()) {
245 case spv::OpTypeBool:
246 ss << "bool";
247 break;
248 case spv::OpTypeInt:
249 ss << (insn.word(3) ? 's' : 'u') << "int" << insn.word(2);
250 break;
251 case spv::OpTypeFloat:
252 ss << "float" << insn.word(2);
253 break;
254 case spv::OpTypeVector:
255 ss << "vec" << insn.word(3) << " of ";
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600256 DescribeTypeInner(ss, src, insn.word(2));
Chris Forbes47567b72017-06-09 12:09:45 -0700257 break;
258 case spv::OpTypeMatrix:
259 ss << "mat" << insn.word(3) << " of ";
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600260 DescribeTypeInner(ss, src, insn.word(2));
Chris Forbes47567b72017-06-09 12:09:45 -0700261 break;
262 case spv::OpTypeArray:
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600263 ss << "arr[" << GetConstantValue(src, insn.word(3)) << "] of ";
264 DescribeTypeInner(ss, src, insn.word(2));
Chris Forbes47567b72017-06-09 12:09:45 -0700265 break;
Chris Forbes062f1222018-08-21 15:34:15 -0700266 case spv::OpTypeRuntimeArray:
267 ss << "runtime arr[] of ";
268 DescribeTypeInner(ss, src, insn.word(2));
269 break;
Chris Forbes47567b72017-06-09 12:09:45 -0700270 case spv::OpTypePointer:
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600271 ss << "ptr to " << StorageClassName(insn.word(2)) << " ";
272 DescribeTypeInner(ss, src, insn.word(3));
Chris Forbes47567b72017-06-09 12:09:45 -0700273 break;
274 case spv::OpTypeStruct: {
275 ss << "struct of (";
276 for (unsigned i = 2; i < insn.len(); i++) {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600277 DescribeTypeInner(ss, src, insn.word(i));
Chris Forbes47567b72017-06-09 12:09:45 -0700278 if (i == insn.len() - 1) {
279 ss << ")";
280 } else {
281 ss << ", ";
282 }
283 }
284 break;
285 }
286 case spv::OpTypeSampler:
287 ss << "sampler";
288 break;
289 case spv::OpTypeSampledImage:
290 ss << "sampler+";
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600291 DescribeTypeInner(ss, src, insn.word(2));
Chris Forbes47567b72017-06-09 12:09:45 -0700292 break;
293 case spv::OpTypeImage:
294 ss << "image(dim=" << insn.word(3) << ", sampled=" << insn.word(7) << ")";
295 break;
Shannon McPherson0fa28232018-11-01 11:59:02 -0600296 case spv::OpTypeAccelerationStructureNV:
Jeff Bolz105d6492018-09-29 15:46:44 -0500297 ss << "accelerationStruture";
298 break;
Chris Forbes47567b72017-06-09 12:09:45 -0700299 default:
300 ss << "oddtype";
301 break;
302 }
303}
304
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600305static std::string DescribeType(shader_module const *src, unsigned type) {
Chris Forbes47567b72017-06-09 12:09:45 -0700306 std::ostringstream ss;
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600307 DescribeTypeInner(ss, src, type);
Chris Forbes47567b72017-06-09 12:09:45 -0700308 return ss.str();
309}
310
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600311static bool IsNarrowNumericType(spirv_inst_iter type) {
Chris Forbes47567b72017-06-09 12:09:45 -0700312 if (type.opcode() != spv::OpTypeInt && type.opcode() != spv::OpTypeFloat) return false;
313 return type.word(2) < 64;
314}
315
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600316static bool TypesMatch(shader_module const *a, shader_module const *b, unsigned a_type, unsigned b_type, bool a_arrayed,
317 bool b_arrayed, bool relaxed) {
Chris Forbes47567b72017-06-09 12:09:45 -0700318 // Walk two type trees together, and complain about differences
319 auto a_insn = a->get_def(a_type);
320 auto b_insn = b->get_def(b_type);
321 assert(a_insn != a->end());
322 assert(b_insn != b->end());
323
Chris Forbes062f1222018-08-21 15:34:15 -0700324 // Ignore runtime-sized arrays-- they cannot appear in these interfaces.
325
Chris Forbes47567b72017-06-09 12:09:45 -0700326 if (a_arrayed && a_insn.opcode() == spv::OpTypeArray) {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600327 return TypesMatch(a, b, a_insn.word(2), b_type, false, b_arrayed, relaxed);
Chris Forbes47567b72017-06-09 12:09:45 -0700328 }
329
330 if (b_arrayed && b_insn.opcode() == spv::OpTypeArray) {
331 // We probably just found the extra level of arrayness in b_type: compare the type inside it to a_type
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600332 return TypesMatch(a, b, a_type, b_insn.word(2), a_arrayed, false, relaxed);
Chris Forbes47567b72017-06-09 12:09:45 -0700333 }
334
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600335 if (a_insn.opcode() == spv::OpTypeVector && relaxed && IsNarrowNumericType(b_insn)) {
336 return TypesMatch(a, b, a_insn.word(2), b_type, a_arrayed, b_arrayed, false);
Chris Forbes47567b72017-06-09 12:09:45 -0700337 }
338
339 if (a_insn.opcode() != b_insn.opcode()) {
340 return false;
341 }
342
343 if (a_insn.opcode() == spv::OpTypePointer) {
344 // Match on pointee type. storage class is expected to differ
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600345 return TypesMatch(a, b, a_insn.word(3), b_insn.word(3), a_arrayed, b_arrayed, relaxed);
Chris Forbes47567b72017-06-09 12:09:45 -0700346 }
347
348 if (a_arrayed || b_arrayed) {
349 // If we havent resolved array-of-verts by here, we're not going to.
350 return false;
351 }
352
353 switch (a_insn.opcode()) {
354 case spv::OpTypeBool:
355 return true;
356 case spv::OpTypeInt:
357 // Match on width, signedness
358 return a_insn.word(2) == b_insn.word(2) && a_insn.word(3) == b_insn.word(3);
359 case spv::OpTypeFloat:
360 // Match on width
361 return a_insn.word(2) == b_insn.word(2);
362 case spv::OpTypeVector:
363 // Match on element type, count.
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600364 if (!TypesMatch(a, b, a_insn.word(2), b_insn.word(2), a_arrayed, b_arrayed, false)) return false;
365 if (relaxed && IsNarrowNumericType(a->get_def(a_insn.word(2)))) {
Chris Forbes47567b72017-06-09 12:09:45 -0700366 return a_insn.word(3) >= b_insn.word(3);
367 } else {
368 return a_insn.word(3) == b_insn.word(3);
369 }
370 case spv::OpTypeMatrix:
371 // Match on element type, count.
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600372 return TypesMatch(a, b, a_insn.word(2), b_insn.word(2), a_arrayed, b_arrayed, false) &&
Dave Houltona9df0ce2018-02-07 10:51:23 -0700373 a_insn.word(3) == b_insn.word(3);
Chris Forbes47567b72017-06-09 12:09:45 -0700374 case spv::OpTypeArray:
375 // Match on element type, count. these all have the same layout. we don't get here if b_arrayed. This differs from
376 // vector & matrix types in that the array size is the id of a constant instruction, * not a literal within OpTypeArray
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600377 return TypesMatch(a, b, a_insn.word(2), b_insn.word(2), a_arrayed, b_arrayed, false) &&
378 GetConstantValue(a, a_insn.word(3)) == GetConstantValue(b, b_insn.word(3));
Chris Forbes47567b72017-06-09 12:09:45 -0700379 case spv::OpTypeStruct:
380 // Match on all element types
Dave Houltona9df0ce2018-02-07 10:51:23 -0700381 {
382 if (a_insn.len() != b_insn.len()) {
383 return false; // Structs cannot match if member counts differ
Chris Forbes47567b72017-06-09 12:09:45 -0700384 }
Chris Forbes47567b72017-06-09 12:09:45 -0700385
Dave Houltona9df0ce2018-02-07 10:51:23 -0700386 for (unsigned i = 2; i < a_insn.len(); i++) {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600387 if (!TypesMatch(a, b, a_insn.word(i), b_insn.word(i), a_arrayed, b_arrayed, false)) {
Dave Houltona9df0ce2018-02-07 10:51:23 -0700388 return false;
389 }
390 }
391
392 return true;
393 }
Chris Forbes47567b72017-06-09 12:09:45 -0700394 default:
395 // Remaining types are CLisms, or may not appear in the interfaces we are interested in. Just claim no match.
396 return false;
397 }
398}
399
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600400static unsigned ValueOrDefault(std::unordered_map<unsigned, unsigned> const &map, unsigned id, unsigned def) {
Chris Forbes47567b72017-06-09 12:09:45 -0700401 auto it = map.find(id);
402 if (it == map.end())
403 return def;
404 else
405 return it->second;
406}
407
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600408static unsigned GetLocationsConsumedByType(shader_module const *src, unsigned type, bool strip_array_level) {
Chris Forbes47567b72017-06-09 12:09:45 -0700409 auto insn = src->get_def(type);
410 assert(insn != src->end());
411
412 switch (insn.opcode()) {
413 case spv::OpTypePointer:
414 // See through the ptr -- this is only ever at the toplevel for graphics shaders we're never actually passing
415 // pointers around.
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600416 return GetLocationsConsumedByType(src, insn.word(3), strip_array_level);
Chris Forbes47567b72017-06-09 12:09:45 -0700417 case spv::OpTypeArray:
418 if (strip_array_level) {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600419 return GetLocationsConsumedByType(src, insn.word(2), false);
Chris Forbes47567b72017-06-09 12:09:45 -0700420 } else {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600421 return GetConstantValue(src, insn.word(3)) * GetLocationsConsumedByType(src, insn.word(2), false);
Chris Forbes47567b72017-06-09 12:09:45 -0700422 }
423 case spv::OpTypeMatrix:
424 // Num locations is the dimension * element size
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600425 return insn.word(3) * GetLocationsConsumedByType(src, insn.word(2), false);
Chris Forbes47567b72017-06-09 12:09:45 -0700426 case spv::OpTypeVector: {
427 auto scalar_type = src->get_def(insn.word(2));
428 auto bit_width =
429 (scalar_type.opcode() == spv::OpTypeInt || scalar_type.opcode() == spv::OpTypeFloat) ? scalar_type.word(2) : 32;
430
431 // Locations are 128-bit wide; 3- and 4-component vectors of 64 bit types require two.
432 return (bit_width * insn.word(3) + 127) / 128;
433 }
434 default:
435 // Everything else is just 1.
436 return 1;
437
438 // TODO: extend to handle 64bit scalar types, whose vectors may need multiple locations.
439 }
440}
441
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +0200442static unsigned GetComponentsConsumedByType(shader_module const *src, unsigned type, bool strip_array_level) {
443 auto insn = src->get_def(type);
444 assert(insn != src->end());
445
446 switch (insn.opcode()) {
447 case spv::OpTypePointer:
448 // See through the ptr -- this is only ever at the toplevel for graphics shaders we're never actually passing
449 // pointers around.
450 return GetComponentsConsumedByType(src, insn.word(3), strip_array_level);
451 case spv::OpTypeStruct: {
452 uint32_t sum = 0;
453 for (uint32_t i = 2; i < insn.len(); i++) { // i=2 to skip word(0) and word(1)=ID of struct
454 sum += GetComponentsConsumedByType(src, insn.word(i), false);
455 }
456 return sum;
457 }
458 case spv::OpTypeArray: {
459 uint32_t sum = 0;
460 for (uint32_t i = 2; i < insn.len(); i++) {
461 sum += GetComponentsConsumedByType(src, insn.word(i), false);
462 }
463 return sum;
464 }
465 case spv::OpTypeMatrix:
466 // Num locations is the dimension * element size
467 return insn.word(3) * GetComponentsConsumedByType(src, insn.word(2), false);
468 case spv::OpTypeVector: {
469 auto scalar_type = src->get_def(insn.word(2));
470 auto bit_width =
471 (scalar_type.opcode() == spv::OpTypeInt || scalar_type.opcode() == spv::OpTypeFloat) ? scalar_type.word(2) : 32;
472 // One component is 32-bit
473 return (bit_width * insn.word(3) + 31) / 32;
474 }
475 case spv::OpTypeFloat: {
476 auto bit_width = insn.word(2);
477 return (bit_width + 31) / 32;
478 }
479 case spv::OpTypeInt: {
480 auto bit_width = insn.word(2);
481 return (bit_width + 31) / 32;
482 }
483 case spv::OpConstant:
484 return GetComponentsConsumedByType(src, insn.word(1), false);
485 default:
486 return 0;
487 }
488}
489
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600490static unsigned GetLocationsConsumedByFormat(VkFormat format) {
Chris Forbes47567b72017-06-09 12:09:45 -0700491 switch (format) {
492 case VK_FORMAT_R64G64B64A64_SFLOAT:
493 case VK_FORMAT_R64G64B64A64_SINT:
494 case VK_FORMAT_R64G64B64A64_UINT:
495 case VK_FORMAT_R64G64B64_SFLOAT:
496 case VK_FORMAT_R64G64B64_SINT:
497 case VK_FORMAT_R64G64B64_UINT:
498 return 2;
499 default:
500 return 1;
501 }
502}
503
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600504static unsigned GetFormatType(VkFormat fmt) {
Dave Houltona9df0ce2018-02-07 10:51:23 -0700505 if (FormatIsSInt(fmt)) return FORMAT_TYPE_SINT;
506 if (FormatIsUInt(fmt)) return FORMAT_TYPE_UINT;
507 if (FormatIsDepthAndStencil(fmt)) return FORMAT_TYPE_FLOAT | FORMAT_TYPE_UINT;
508 if (fmt == VK_FORMAT_UNDEFINED) return 0;
Chris Forbes47567b72017-06-09 12:09:45 -0700509 // everything else -- UNORM/SNORM/FLOAT/USCALED/SSCALED is all float in the shader.
510 return FORMAT_TYPE_FLOAT;
511}
512
513// characterizes a SPIR-V type appearing in an interface to a FF stage, for comparison to a VkFormat's characterization above.
Chris Forbes062f1222018-08-21 15:34:15 -0700514// also used for input attachments, as we statically know their format.
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600515static unsigned GetFundamentalType(shader_module const *src, unsigned type) {
Chris Forbes47567b72017-06-09 12:09:45 -0700516 auto insn = src->get_def(type);
517 assert(insn != src->end());
518
519 switch (insn.opcode()) {
520 case spv::OpTypeInt:
521 return insn.word(3) ? FORMAT_TYPE_SINT : FORMAT_TYPE_UINT;
522 case spv::OpTypeFloat:
523 return FORMAT_TYPE_FLOAT;
524 case spv::OpTypeVector:
Chris Forbes47567b72017-06-09 12:09:45 -0700525 case spv::OpTypeMatrix:
Chris Forbes47567b72017-06-09 12:09:45 -0700526 case spv::OpTypeArray:
Chris Forbes062f1222018-08-21 15:34:15 -0700527 case spv::OpTypeRuntimeArray:
528 case spv::OpTypeImage:
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600529 return GetFundamentalType(src, insn.word(2));
Chris Forbes47567b72017-06-09 12:09:45 -0700530 case spv::OpTypePointer:
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600531 return GetFundamentalType(src, insn.word(3));
Chris Forbes47567b72017-06-09 12:09:45 -0700532
533 default:
534 return 0;
535 }
536}
537
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600538static uint32_t GetShaderStageId(VkShaderStageFlagBits stage) {
Chris Forbes47567b72017-06-09 12:09:45 -0700539 uint32_t bit_pos = uint32_t(u_ffs(stage));
540 return bit_pos - 1;
541}
542
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600543static spirv_inst_iter GetStructType(shader_module const *src, spirv_inst_iter def, bool is_array_of_verts) {
Chris Forbes47567b72017-06-09 12:09:45 -0700544 while (true) {
545 if (def.opcode() == spv::OpTypePointer) {
546 def = src->get_def(def.word(3));
547 } else if (def.opcode() == spv::OpTypeArray && is_array_of_verts) {
548 def = src->get_def(def.word(2));
549 is_array_of_verts = false;
550 } else if (def.opcode() == spv::OpTypeStruct) {
551 return def;
552 } else {
553 return src->end();
554 }
555 }
556}
557
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600558static bool CollectInterfaceBlockMembers(shader_module const *src, std::map<location_t, interface_var> *out,
559 std::unordered_map<unsigned, unsigned> const &blocks, bool is_array_of_verts, uint32_t id,
560 uint32_t type_id, bool is_patch, int /*first_location*/) {
Chris Forbes47567b72017-06-09 12:09:45 -0700561 // Walk down the type_id presented, trying to determine whether it's actually an interface block.
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600562 auto type = GetStructType(src, src->get_def(type_id), is_array_of_verts && !is_patch);
Chris Forbes47567b72017-06-09 12:09:45 -0700563 if (type == src->end() || blocks.find(type.word(1)) == blocks.end()) {
564 // This isn't an interface block.
Chris Forbesa313d772017-06-13 13:59:41 -0700565 return false;
Chris Forbes47567b72017-06-09 12:09:45 -0700566 }
567
568 std::unordered_map<unsigned, unsigned> member_components;
569 std::unordered_map<unsigned, unsigned> member_relaxed_precision;
Chris Forbesa313d772017-06-13 13:59:41 -0700570 std::unordered_map<unsigned, unsigned> member_patch;
Chris Forbes47567b72017-06-09 12:09:45 -0700571
572 // Walk all the OpMemberDecorate for type's result id -- first pass, collect components.
573 for (auto insn : *src) {
574 if (insn.opcode() == spv::OpMemberDecorate && insn.word(1) == type.word(1)) {
575 unsigned member_index = insn.word(2);
576
577 if (insn.word(3) == spv::DecorationComponent) {
578 unsigned component = insn.word(4);
579 member_components[member_index] = component;
580 }
581
582 if (insn.word(3) == spv::DecorationRelaxedPrecision) {
583 member_relaxed_precision[member_index] = 1;
584 }
Chris Forbesa313d772017-06-13 13:59:41 -0700585
586 if (insn.word(3) == spv::DecorationPatch) {
587 member_patch[member_index] = 1;
588 }
Chris Forbes47567b72017-06-09 12:09:45 -0700589 }
590 }
591
Chris Forbesa313d772017-06-13 13:59:41 -0700592 // TODO: correctly handle location assignment from outside
593
Chris Forbes47567b72017-06-09 12:09:45 -0700594 // Second pass -- produce the output, from Location decorations
595 for (auto insn : *src) {
596 if (insn.opcode() == spv::OpMemberDecorate && insn.word(1) == type.word(1)) {
597 unsigned member_index = insn.word(2);
598 unsigned member_type_id = type.word(2 + member_index);
599
600 if (insn.word(3) == spv::DecorationLocation) {
601 unsigned location = insn.word(4);
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600602 unsigned num_locations = GetLocationsConsumedByType(src, member_type_id, false);
Chris Forbes47567b72017-06-09 12:09:45 -0700603 auto component_it = member_components.find(member_index);
604 unsigned component = component_it == member_components.end() ? 0 : component_it->second;
605 bool is_relaxed_precision = member_relaxed_precision.find(member_index) != member_relaxed_precision.end();
Dave Houltona9df0ce2018-02-07 10:51:23 -0700606 bool member_is_patch = is_patch || member_patch.count(member_index) > 0;
Chris Forbes47567b72017-06-09 12:09:45 -0700607
608 for (unsigned int offset = 0; offset < num_locations; offset++) {
609 interface_var v = {};
610 v.id = id;
611 // TODO: member index in interface_var too?
612 v.type_id = member_type_id;
613 v.offset = offset;
Chris Forbesa313d772017-06-13 13:59:41 -0700614 v.is_patch = member_is_patch;
Chris Forbes47567b72017-06-09 12:09:45 -0700615 v.is_block_member = true;
616 v.is_relaxed_precision = is_relaxed_precision;
617 (*out)[std::make_pair(location + offset, component)] = v;
618 }
619 }
620 }
621 }
Chris Forbesa313d772017-06-13 13:59:41 -0700622
623 return true;
Chris Forbes47567b72017-06-09 12:09:45 -0700624}
625
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600626static std::map<location_t, interface_var> CollectInterfaceByLocation(shader_module const *src, spirv_inst_iter entrypoint,
627 spv::StorageClass sinterface, bool is_array_of_verts) {
Chris Forbes47567b72017-06-09 12:09:45 -0700628 std::unordered_map<unsigned, unsigned> var_locations;
629 std::unordered_map<unsigned, unsigned> var_builtins;
630 std::unordered_map<unsigned, unsigned> var_components;
631 std::unordered_map<unsigned, unsigned> blocks;
632 std::unordered_map<unsigned, unsigned> var_patch;
633 std::unordered_map<unsigned, unsigned> var_relaxed_precision;
634
635 for (auto insn : *src) {
636 // We consider two interface models: SSO rendezvous-by-location, and builtins. Complain about anything that
637 // fits neither model.
638 if (insn.opcode() == spv::OpDecorate) {
639 if (insn.word(2) == spv::DecorationLocation) {
640 var_locations[insn.word(1)] = insn.word(3);
641 }
642
643 if (insn.word(2) == spv::DecorationBuiltIn) {
644 var_builtins[insn.word(1)] = insn.word(3);
645 }
646
647 if (insn.word(2) == spv::DecorationComponent) {
648 var_components[insn.word(1)] = insn.word(3);
649 }
650
651 if (insn.word(2) == spv::DecorationBlock) {
652 blocks[insn.word(1)] = 1;
653 }
654
655 if (insn.word(2) == spv::DecorationPatch) {
656 var_patch[insn.word(1)] = 1;
657 }
658
659 if (insn.word(2) == spv::DecorationRelaxedPrecision) {
660 var_relaxed_precision[insn.word(1)] = 1;
661 }
662 }
663 }
664
665 // TODO: handle grouped decorations
666 // TODO: handle index=1 dual source outputs from FS -- two vars will have the same location, and we DON'T want to clobber.
667
668 // Find the end of the entrypoint's name string. additional zero bytes follow the actual null terminator, to fill out the
669 // rest of the word - so we only need to look at the last byte in the word to determine which word contains the terminator.
670 uint32_t word = 3;
671 while (entrypoint.word(word) & 0xff000000u) {
672 ++word;
673 }
674 ++word;
675
676 std::map<location_t, interface_var> out;
677
678 for (; word < entrypoint.len(); word++) {
679 auto insn = src->get_def(entrypoint.word(word));
680 assert(insn != src->end());
681 assert(insn.opcode() == spv::OpVariable);
682
683 if (insn.word(3) == static_cast<uint32_t>(sinterface)) {
684 unsigned id = insn.word(2);
685 unsigned type = insn.word(1);
686
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600687 int location = ValueOrDefault(var_locations, id, static_cast<unsigned>(-1));
688 int builtin = ValueOrDefault(var_builtins, id, static_cast<unsigned>(-1));
689 unsigned component = ValueOrDefault(var_components, id, 0); // Unspecified is OK, is 0
Chris Forbes47567b72017-06-09 12:09:45 -0700690 bool is_patch = var_patch.find(id) != var_patch.end();
691 bool is_relaxed_precision = var_relaxed_precision.find(id) != var_relaxed_precision.end();
692
Dave Houltona9df0ce2018-02-07 10:51:23 -0700693 if (builtin != -1)
694 continue;
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600695 else if (!CollectInterfaceBlockMembers(src, &out, blocks, is_array_of_verts, id, type, is_patch, location)) {
Chris Forbes47567b72017-06-09 12:09:45 -0700696 // A user-defined interface variable, with a location. Where a variable occupied multiple locations, emit
697 // one result for each.
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600698 unsigned num_locations = GetLocationsConsumedByType(src, type, is_array_of_verts && !is_patch);
Chris Forbes47567b72017-06-09 12:09:45 -0700699 for (unsigned int offset = 0; offset < num_locations; offset++) {
700 interface_var v = {};
701 v.id = id;
702 v.type_id = type;
703 v.offset = offset;
704 v.is_patch = is_patch;
705 v.is_relaxed_precision = is_relaxed_precision;
706 out[std::make_pair(location + offset, component)] = v;
707 }
Chris Forbes47567b72017-06-09 12:09:45 -0700708 }
709 }
710 }
711
712 return out;
713}
714
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600715static std::vector<std::pair<uint32_t, interface_var>> CollectInterfaceByInputAttachmentIndex(
Chris Forbes47567b72017-06-09 12:09:45 -0700716 shader_module const *src, std::unordered_set<uint32_t> const &accessible_ids) {
717 std::vector<std::pair<uint32_t, interface_var>> out;
718
719 for (auto insn : *src) {
720 if (insn.opcode() == spv::OpDecorate) {
721 if (insn.word(2) == spv::DecorationInputAttachmentIndex) {
722 auto attachment_index = insn.word(3);
723 auto id = insn.word(1);
724
725 if (accessible_ids.count(id)) {
726 auto def = src->get_def(id);
727 assert(def != src->end());
728
729 if (def.opcode() == spv::OpVariable && insn.word(3) == spv::StorageClassUniformConstant) {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600730 auto num_locations = GetLocationsConsumedByType(src, def.word(1), false);
Chris Forbes47567b72017-06-09 12:09:45 -0700731 for (unsigned int offset = 0; offset < num_locations; offset++) {
732 interface_var v = {};
733 v.id = id;
734 v.type_id = def.word(1);
735 v.offset = offset;
736 out.emplace_back(attachment_index + offset, v);
737 }
738 }
739 }
740 }
741 }
742 }
743
744 return out;
745}
746
Chris Forbes8d31e5d2018-10-08 17:19:15 -0700747static bool IsWritableDescriptorType(shader_module const *module, uint32_t type_id, bool is_storage_buffer) {
Chris Forbes8af24522018-03-07 11:37:45 -0800748 auto type = module->get_def(type_id);
749
750 // Strip off any array or ptrs. Where we remove array levels, adjust the descriptor count for each dimension.
Chris Forbes062f1222018-08-21 15:34:15 -0700751 while (type.opcode() == spv::OpTypeArray || type.opcode() == spv::OpTypePointer || type.opcode() == spv::OpTypeRuntimeArray) {
752 if (type.opcode() == spv::OpTypeArray || type.opcode() == spv::OpTypeRuntimeArray) {
Chris Forbes8d31e5d2018-10-08 17:19:15 -0700753 type = module->get_def(type.word(2)); // Element type
Chris Forbes8af24522018-03-07 11:37:45 -0800754 } else {
Chris Forbes8d31e5d2018-10-08 17:19:15 -0700755 type = module->get_def(type.word(3)); // Pointee type
Chris Forbes8af24522018-03-07 11:37:45 -0800756 }
757 }
758
759 switch (type.opcode()) {
760 case spv::OpTypeImage: {
761 auto dim = type.word(3);
762 auto sampled = type.word(7);
763 return sampled == 2 && dim != spv::DimSubpassData;
764 }
765
Chris Forbes8d31e5d2018-10-08 17:19:15 -0700766 case spv::OpTypeStruct: {
767 std::unordered_set<unsigned> nonwritable_members;
Chris Forbes8af24522018-03-07 11:37:45 -0800768 for (auto insn : *module) {
769 if (insn.opcode() == spv::OpDecorate && insn.word(1) == type.word(1)) {
770 if (insn.word(2) == spv::DecorationBufferBlock) {
Chris Forbes8d31e5d2018-10-08 17:19:15 -0700771 // Legacy storage block in the Uniform storage class
772 // has its struct type decorated with BufferBlock.
773 is_storage_buffer = true;
Chris Forbes8af24522018-03-07 11:37:45 -0800774 }
Chris Forbes8d31e5d2018-10-08 17:19:15 -0700775 } else if (insn.opcode() == spv::OpMemberDecorate && insn.word(1) == type.word(1) &&
776 insn.word(3) == spv::DecorationNonWritable) {
777 nonwritable_members.insert(insn.word(2));
Chris Forbes8af24522018-03-07 11:37:45 -0800778 }
779 }
Chris Forbes8d31e5d2018-10-08 17:19:15 -0700780
781 // A buffer is writable if it's either flavor of storage buffer, and has any member not decorated
782 // as nonwritable.
783 return is_storage_buffer && nonwritable_members.size() != type.len() - 2;
784 }
Chris Forbes8af24522018-03-07 11:37:45 -0800785 }
786
787 return false;
788}
789
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600790static std::vector<std::pair<descriptor_slot_t, interface_var>> CollectInterfaceByDescriptorSlot(
Chris Forbes8af24522018-03-07 11:37:45 -0800791 debug_report_data const *report_data, shader_module const *src, std::unordered_set<uint32_t> const &accessible_ids,
792 bool *has_writable_descriptor) {
Chris Forbes47567b72017-06-09 12:09:45 -0700793 std::unordered_map<unsigned, unsigned> var_sets;
794 std::unordered_map<unsigned, unsigned> var_bindings;
Chris Forbes8af24522018-03-07 11:37:45 -0800795 std::unordered_map<unsigned, unsigned> var_nonwritable;
Chris Forbes47567b72017-06-09 12:09:45 -0700796
797 for (auto insn : *src) {
798 // All variables in the Uniform or UniformConstant storage classes are required to be decorated with both
799 // DecorationDescriptorSet and DecorationBinding.
800 if (insn.opcode() == spv::OpDecorate) {
801 if (insn.word(2) == spv::DecorationDescriptorSet) {
802 var_sets[insn.word(1)] = insn.word(3);
803 }
804
805 if (insn.word(2) == spv::DecorationBinding) {
806 var_bindings[insn.word(1)] = insn.word(3);
807 }
Chris Forbes8af24522018-03-07 11:37:45 -0800808
Chris Forbes8d31e5d2018-10-08 17:19:15 -0700809 // Note: do toplevel DecorationNonWritable out here; it applies to
810 // the OpVariable rather than the type.
Chris Forbes8af24522018-03-07 11:37:45 -0800811 if (insn.word(2) == spv::DecorationNonWritable) {
812 var_nonwritable[insn.word(1)] = 1;
813 }
Chris Forbes47567b72017-06-09 12:09:45 -0700814 }
815 }
816
817 std::vector<std::pair<descriptor_slot_t, interface_var>> out;
818
819 for (auto id : accessible_ids) {
820 auto insn = src->get_def(id);
821 assert(insn != src->end());
822
823 if (insn.opcode() == spv::OpVariable &&
Chris Forbes9f89d752018-03-07 12:57:48 -0800824 (insn.word(3) == spv::StorageClassUniform || insn.word(3) == spv::StorageClassUniformConstant ||
825 insn.word(3) == spv::StorageClassStorageBuffer)) {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600826 unsigned set = ValueOrDefault(var_sets, insn.word(2), 0);
827 unsigned binding = ValueOrDefault(var_bindings, insn.word(2), 0);
Chris Forbes47567b72017-06-09 12:09:45 -0700828
829 interface_var v = {};
830 v.id = insn.word(2);
831 v.type_id = insn.word(1);
832 out.emplace_back(std::make_pair(set, binding), v);
Chris Forbes8af24522018-03-07 11:37:45 -0800833
Chris Forbes8d31e5d2018-10-08 17:19:15 -0700834 if (var_nonwritable.find(id) == var_nonwritable.end() &&
835 IsWritableDescriptorType(src, insn.word(1), insn.word(3) == spv::StorageClassStorageBuffer)) {
Chris Forbes8af24522018-03-07 11:37:45 -0800836 *has_writable_descriptor = true;
837 }
Chris Forbes47567b72017-06-09 12:09:45 -0700838 }
839 }
840
841 return out;
842}
843
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600844static bool ValidateViConsistency(debug_report_data const *report_data, VkPipelineVertexInputStateCreateInfo const *vi) {
Chris Forbes47567b72017-06-09 12:09:45 -0700845 // Walk the binding descriptions, which describe the step rate and stride of each vertex buffer. Each binding should
846 // be specified only once.
847 std::unordered_map<uint32_t, VkVertexInputBindingDescription const *> bindings;
848 bool skip = false;
849
850 for (unsigned i = 0; i < vi->vertexBindingDescriptionCount; i++) {
851 auto desc = &vi->pVertexBindingDescriptions[i];
852 auto &binding = bindings[desc->binding];
853 if (binding) {
Dave Houlton78d09922018-05-17 15:48:45 -0600854 // TODO: "VUID-VkGraphicsPipelineCreateInfo-pStages-00742" perhaps?
Mark Lobodzinskib1fd9d12018-03-30 14:26:00 -0600855 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
Dave Houlton51653902018-06-22 17:32:13 -0600856 kVUID_Core_Shader_InconsistentVi, "Duplicate vertex input binding descriptions for binding %d",
Chris Forbes47567b72017-06-09 12:09:45 -0700857 desc->binding);
858 } else {
859 binding = desc;
860 }
861 }
862
863 return skip;
864}
865
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600866static bool ValidateViAgainstVsInputs(debug_report_data const *report_data, VkPipelineVertexInputStateCreateInfo const *vi,
867 shader_module const *vs, spirv_inst_iter entrypoint) {
Chris Forbes47567b72017-06-09 12:09:45 -0700868 bool skip = false;
869
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600870 auto inputs = CollectInterfaceByLocation(vs, entrypoint, spv::StorageClassInput, false);
Chris Forbes47567b72017-06-09 12:09:45 -0700871
872 // Build index by location
873 std::map<uint32_t, VkVertexInputAttributeDescription const *> attribs;
874 if (vi) {
875 for (unsigned i = 0; i < vi->vertexAttributeDescriptionCount; i++) {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600876 auto num_locations = GetLocationsConsumedByFormat(vi->pVertexAttributeDescriptions[i].format);
Chris Forbes47567b72017-06-09 12:09:45 -0700877 for (auto j = 0u; j < num_locations; j++) {
878 attribs[vi->pVertexAttributeDescriptions[i].location + j] = &vi->pVertexAttributeDescriptions[i];
879 }
880 }
881 }
882
883 auto it_a = attribs.begin();
884 auto it_b = inputs.begin();
885 bool used = false;
886
887 while ((attribs.size() > 0 && it_a != attribs.end()) || (inputs.size() > 0 && it_b != inputs.end())) {
888 bool a_at_end = attribs.size() == 0 || it_a == attribs.end();
889 bool b_at_end = inputs.size() == 0 || it_b == inputs.end();
890 auto a_first = a_at_end ? 0 : it_a->first;
891 auto b_first = b_at_end ? 0 : it_b->first.first;
Mark Lobodzinski7caa39c2018-07-25 15:48:34 -0600892
Chris Forbes47567b72017-06-09 12:09:45 -0700893 if (!a_at_end && (b_at_end || a_first < b_first)) {
Mark Young4e919b22018-05-21 15:53:59 -0600894 if (!used &&
895 log_msg(report_data, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
Dave Houlton51653902018-06-22 17:32:13 -0600896 HandleToUint64(vs->vk_shader_module), kVUID_Core_Shader_OutputNotConsumed,
Mark Young4e919b22018-05-21 15:53:59 -0600897 "Vertex attribute at location %d not consumed by vertex shader", a_first)) {
Chris Forbes47567b72017-06-09 12:09:45 -0700898 skip = true;
899 }
900 used = false;
901 it_a++;
902 } else if (!b_at_end && (a_at_end || b_first < a_first)) {
Mark Young4e919b22018-05-21 15:53:59 -0600903 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
Dave Houlton51653902018-06-22 17:32:13 -0600904 HandleToUint64(vs->vk_shader_module), kVUID_Core_Shader_InputNotProduced,
Mark Young4e919b22018-05-21 15:53:59 -0600905 "Vertex shader consumes input at location %d but not provided", b_first);
Chris Forbes47567b72017-06-09 12:09:45 -0700906 it_b++;
907 } else {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600908 unsigned attrib_type = GetFormatType(it_a->second->format);
909 unsigned input_type = GetFundamentalType(vs, it_b->second.type_id);
Chris Forbes47567b72017-06-09 12:09:45 -0700910
911 // Type checking
912 if (!(attrib_type & input_type)) {
Mark Young4e919b22018-05-21 15:53:59 -0600913 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
Dave Houlton51653902018-06-22 17:32:13 -0600914 HandleToUint64(vs->vk_shader_module), kVUID_Core_Shader_InterfaceTypeMismatch,
Chris Forbes47567b72017-06-09 12:09:45 -0700915 "Attribute type of `%s` at location %d does not match vertex shader input type of `%s`",
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600916 string_VkFormat(it_a->second->format), a_first, DescribeType(vs, it_b->second.type_id).c_str());
Chris Forbes47567b72017-06-09 12:09:45 -0700917 }
918
919 // OK!
920 used = true;
921 it_b++;
922 }
923 }
924
925 return skip;
926}
927
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600928static bool ValidateFsOutputsAgainstRenderPass(debug_report_data const *report_data, shader_module const *fs,
929 spirv_inst_iter entrypoint, PIPELINE_STATE const *pipeline, uint32_t subpass_index) {
Petr Krause91f7a12017-12-14 20:57:36 +0100930 auto rpci = pipeline->rp_state->createInfo.ptr();
Chris Forbes8bca1652017-07-20 11:10:09 -0700931
Chris Forbes47567b72017-06-09 12:09:45 -0700932 std::map<uint32_t, VkFormat> color_attachments;
933 auto subpass = rpci->pSubpasses[subpass_index];
934 for (auto i = 0u; i < subpass.colorAttachmentCount; ++i) {
935 uint32_t attachment = subpass.pColorAttachments[i].attachment;
936 if (attachment == VK_ATTACHMENT_UNUSED) continue;
937 if (rpci->pAttachments[attachment].format != VK_FORMAT_UNDEFINED) {
938 color_attachments[i] = rpci->pAttachments[attachment].format;
939 }
940 }
941
942 bool skip = false;
943
944 // TODO: dual source blend index (spv::DecIndex, zero if not provided)
945
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600946 auto outputs = CollectInterfaceByLocation(fs, entrypoint, spv::StorageClassOutput, false);
Chris Forbes47567b72017-06-09 12:09:45 -0700947
948 auto it_a = outputs.begin();
949 auto it_b = color_attachments.begin();
Mark Lobodzinski7caa39c2018-07-25 15:48:34 -0600950 bool used = false;
Ari Suonpaa412b23b2019-02-26 07:56:58 +0200951 bool alphaToCoverageEnabled = pipeline->graphicsPipelineCI.pMultisampleState != NULL &&
952 pipeline->graphicsPipelineCI.pMultisampleState->alphaToCoverageEnable == VK_TRUE;
953 bool locationZeroHasAlpha = false;
Chris Forbes47567b72017-06-09 12:09:45 -0700954
955 // Walk attachment list and outputs together
956
957 while ((outputs.size() > 0 && it_a != outputs.end()) || (color_attachments.size() > 0 && it_b != color_attachments.end())) {
958 bool a_at_end = outputs.size() == 0 || it_a == outputs.end();
959 bool b_at_end = color_attachments.size() == 0 || it_b == color_attachments.end();
960
Ari Suonpaa412b23b2019-02-26 07:56:58 +0200961 if (!a_at_end && it_a->first.first == 0 && fs->get_def(it_a->second.type_id) != fs->end() &&
962 GetComponentsConsumedByType(fs, it_a->second.type_id, false) == 4)
963 locationZeroHasAlpha = true;
964
Chris Forbes47567b72017-06-09 12:09:45 -0700965 if (!a_at_end && (b_at_end || it_a->first.first < it_b->first)) {
Ari Suonpaa412b23b2019-02-26 07:56:58 +0200966 if (!alphaToCoverageEnabled || it_a->first.first != 0) {
967 skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
968 HandleToUint64(fs->vk_shader_module), kVUID_Core_Shader_OutputNotConsumed,
969 "fragment shader writes to output location %d with no matching attachment", it_a->first.first);
970 }
Chris Forbes47567b72017-06-09 12:09:45 -0700971 it_a++;
972 } else if (!b_at_end && (a_at_end || it_a->first.first > it_b->first)) {
Chris Forbesefdd4082017-07-20 11:19:16 -0700973 // Only complain if there are unmasked channels for this attachment. If the writemask is 0, it's acceptable for the
974 // shader to not produce a matching output.
Mark Lobodzinski7caa39c2018-07-25 15:48:34 -0600975 if (!used) {
976 if (pipeline->attachments[it_b->first].colorWriteMask != 0) {
Chris Forbescfe4dca2018-10-05 10:15:00 -0700977 skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
Mark Lobodzinski7caa39c2018-07-25 15:48:34 -0600978 HandleToUint64(fs->vk_shader_module), kVUID_Core_Shader_InputNotProduced,
Chris Forbescfe4dca2018-10-05 10:15:00 -0700979 "Attachment %d not written by fragment shader; undefined values will be written to attachment",
980 it_b->first);
Mark Lobodzinski7caa39c2018-07-25 15:48:34 -0600981 }
Chris Forbesefdd4082017-07-20 11:19:16 -0700982 }
Mark Lobodzinski7caa39c2018-07-25 15:48:34 -0600983 used = false;
Chris Forbes47567b72017-06-09 12:09:45 -0700984 it_b++;
985 } else {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600986 unsigned output_type = GetFundamentalType(fs, it_a->second.type_id);
987 unsigned att_type = GetFormatType(it_b->second);
Chris Forbes47567b72017-06-09 12:09:45 -0700988
989 // Type checking
990 if (!(output_type & att_type)) {
Chris Forbescfe4dca2018-10-05 10:15:00 -0700991 skip |= log_msg(
992 report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
993 HandleToUint64(fs->vk_shader_module), kVUID_Core_Shader_InterfaceTypeMismatch,
994 "Attachment %d of type `%s` does not match fragment shader output type of `%s`; resulting values are undefined",
995 it_b->first, string_VkFormat(it_b->second), DescribeType(fs, it_a->second.type_id).c_str());
Chris Forbes47567b72017-06-09 12:09:45 -0700996 }
997
998 // OK!
999 it_a++;
Mark Lobodzinski7caa39c2018-07-25 15:48:34 -06001000 used = true;
Chris Forbes47567b72017-06-09 12:09:45 -07001001 }
1002 }
1003
Ari Suonpaa412b23b2019-02-26 07:56:58 +02001004 if (alphaToCoverageEnabled && !locationZeroHasAlpha) {
1005 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
1006 HandleToUint64(fs->vk_shader_module), kVUID_Core_Shader_NoAlphaAtLocation0WithAlphaToCoverage,
1007 "fragment shader doesn't declare alpha output at location 0 even though alpha to coverage is enabled.");
1008 }
1009
Chris Forbes47567b72017-06-09 12:09:45 -07001010 return skip;
1011}
1012
Mark Lobodzinski1b4a8ed2018-08-07 08:47:05 -06001013// For PointSize analysis we need to know if the variable decorated with the PointSize built-in was actually written to.
1014// This function examines instructions in the static call tree for a write to this variable.
1015static bool IsPointSizeWritten(shader_module const *src, spirv_inst_iter builtin_instr, spirv_inst_iter entrypoint) {
1016 auto type = builtin_instr.opcode();
1017 uint32_t target_id = builtin_instr.word(1);
1018 bool init_complete = false;
1019
1020 if (type == spv::OpMemberDecorate) {
1021 // Built-in is part of a structure -- examine instructions up to first function body to get initial IDs
1022 auto insn = entrypoint;
1023 while (!init_complete && (insn.opcode() != spv::OpFunction)) {
1024 switch (insn.opcode()) {
1025 case spv::OpTypePointer:
1026 if ((insn.word(3) == target_id) && (insn.word(2) == spv::StorageClassOutput)) {
1027 target_id = insn.word(1);
1028 }
1029 break;
1030 case spv::OpVariable:
1031 if (insn.word(1) == target_id) {
1032 target_id = insn.word(2);
1033 init_complete = true;
1034 }
1035 break;
1036 }
1037 insn++;
1038 }
1039 }
1040
Mark Lobodzinskif84b0b42018-09-11 14:54:32 -06001041 if (!init_complete && (type == spv::OpMemberDecorate)) return false;
1042
1043 bool found_write = false;
Mark Lobodzinski1b4a8ed2018-08-07 08:47:05 -06001044 std::unordered_set<uint32_t> worklist;
1045 worklist.insert(entrypoint.word(2));
1046
1047 // Follow instructions in call graph looking for writes to target
1048 while (!worklist.empty() && !found_write) {
1049 auto id_iter = worklist.begin();
1050 auto id = *id_iter;
1051 worklist.erase(id_iter);
1052
1053 auto insn = src->get_def(id);
1054 if (insn == src->end()) {
1055 continue;
1056 }
1057
1058 if (insn.opcode() == spv::OpFunction) {
1059 // Scan body of function looking for other function calls or items in our ID chain
1060 while (++insn, insn.opcode() != spv::OpFunctionEnd) {
1061 switch (insn.opcode()) {
1062 case spv::OpAccessChain:
1063 if (insn.word(3) == target_id) {
1064 if (type == spv::OpMemberDecorate) {
1065 auto value = GetConstantValue(src, insn.word(4));
1066 if (value == builtin_instr.word(2)) {
1067 target_id = insn.word(2);
1068 }
1069 } else {
1070 target_id = insn.word(2);
1071 }
1072 }
1073 break;
1074 case spv::OpStore:
1075 if (insn.word(1) == target_id) {
1076 found_write = true;
1077 }
1078 break;
1079 case spv::OpFunctionCall:
1080 worklist.insert(insn.word(3));
1081 break;
1082 }
1083 }
1084 }
1085 }
1086 return found_write;
1087}
1088
Chris Forbes47567b72017-06-09 12:09:45 -07001089// For some analyses, we need to know about all ids referenced by the static call tree of a particular entrypoint. This is
1090// important for identifying the set of shader resources actually used by an entrypoint, for example.
1091// Note: we only explore parts of the image which might actually contain ids we care about for the above analyses.
1092// - NOT the shader input/output interfaces.
1093//
1094// TODO: The set of interesting opcodes here was determined by eyeballing the SPIRV spec. It might be worth
1095// converting parts of this to be generated from the machine-readable spec instead.
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06001096static std::unordered_set<uint32_t> MarkAccessibleIds(shader_module const *src, spirv_inst_iter entrypoint) {
Chris Forbes47567b72017-06-09 12:09:45 -07001097 std::unordered_set<uint32_t> ids;
1098 std::unordered_set<uint32_t> worklist;
1099 worklist.insert(entrypoint.word(2));
1100
1101 while (!worklist.empty()) {
1102 auto id_iter = worklist.begin();
1103 auto id = *id_iter;
1104 worklist.erase(id_iter);
1105
1106 auto insn = src->get_def(id);
1107 if (insn == src->end()) {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06001108 // ID is something we didn't collect in BuildDefIndex. that's OK -- we'll stumble across all kinds of things here
Chris Forbes47567b72017-06-09 12:09:45 -07001109 // that we may not care about.
1110 continue;
1111 }
1112
1113 // Try to add to the output set
1114 if (!ids.insert(id).second) {
1115 continue; // If we already saw this id, we don't want to walk it again.
1116 }
1117
1118 switch (insn.opcode()) {
1119 case spv::OpFunction:
1120 // Scan whole body of the function, enlisting anything interesting
1121 while (++insn, insn.opcode() != spv::OpFunctionEnd) {
1122 switch (insn.opcode()) {
1123 case spv::OpLoad:
1124 case spv::OpAtomicLoad:
1125 case spv::OpAtomicExchange:
1126 case spv::OpAtomicCompareExchange:
1127 case spv::OpAtomicCompareExchangeWeak:
1128 case spv::OpAtomicIIncrement:
1129 case spv::OpAtomicIDecrement:
1130 case spv::OpAtomicIAdd:
1131 case spv::OpAtomicISub:
1132 case spv::OpAtomicSMin:
1133 case spv::OpAtomicUMin:
1134 case spv::OpAtomicSMax:
1135 case spv::OpAtomicUMax:
1136 case spv::OpAtomicAnd:
1137 case spv::OpAtomicOr:
1138 case spv::OpAtomicXor:
1139 worklist.insert(insn.word(3)); // ptr
1140 break;
1141 case spv::OpStore:
1142 case spv::OpAtomicStore:
1143 worklist.insert(insn.word(1)); // ptr
1144 break;
1145 case spv::OpAccessChain:
1146 case spv::OpInBoundsAccessChain:
1147 worklist.insert(insn.word(3)); // base ptr
1148 break;
1149 case spv::OpSampledImage:
1150 case spv::OpImageSampleImplicitLod:
1151 case spv::OpImageSampleExplicitLod:
1152 case spv::OpImageSampleDrefImplicitLod:
1153 case spv::OpImageSampleDrefExplicitLod:
1154 case spv::OpImageSampleProjImplicitLod:
1155 case spv::OpImageSampleProjExplicitLod:
1156 case spv::OpImageSampleProjDrefImplicitLod:
1157 case spv::OpImageSampleProjDrefExplicitLod:
1158 case spv::OpImageFetch:
1159 case spv::OpImageGather:
1160 case spv::OpImageDrefGather:
1161 case spv::OpImageRead:
1162 case spv::OpImage:
1163 case spv::OpImageQueryFormat:
1164 case spv::OpImageQueryOrder:
1165 case spv::OpImageQuerySizeLod:
1166 case spv::OpImageQuerySize:
1167 case spv::OpImageQueryLod:
1168 case spv::OpImageQueryLevels:
1169 case spv::OpImageQuerySamples:
1170 case spv::OpImageSparseSampleImplicitLod:
1171 case spv::OpImageSparseSampleExplicitLod:
1172 case spv::OpImageSparseSampleDrefImplicitLod:
1173 case spv::OpImageSparseSampleDrefExplicitLod:
1174 case spv::OpImageSparseSampleProjImplicitLod:
1175 case spv::OpImageSparseSampleProjExplicitLod:
1176 case spv::OpImageSparseSampleProjDrefImplicitLod:
1177 case spv::OpImageSparseSampleProjDrefExplicitLod:
1178 case spv::OpImageSparseFetch:
1179 case spv::OpImageSparseGather:
1180 case spv::OpImageSparseDrefGather:
1181 case spv::OpImageTexelPointer:
1182 worklist.insert(insn.word(3)); // Image or sampled image
1183 break;
1184 case spv::OpImageWrite:
1185 worklist.insert(insn.word(1)); // Image -- different operand order to above
1186 break;
1187 case spv::OpFunctionCall:
1188 for (uint32_t i = 3; i < insn.len(); i++) {
1189 worklist.insert(insn.word(i)); // fn itself, and all args
1190 }
1191 break;
1192
1193 case spv::OpExtInst:
1194 for (uint32_t i = 5; i < insn.len(); i++) {
1195 worklist.insert(insn.word(i)); // Operands to ext inst
1196 }
1197 break;
1198 }
1199 }
1200 break;
1201 }
1202 }
1203
1204 return ids;
1205}
1206
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06001207static bool ValidatePushConstantBlockAgainstPipeline(debug_report_data const *report_data,
1208 std::vector<VkPushConstantRange> const *push_constant_ranges,
1209 shader_module const *src, spirv_inst_iter type, VkShaderStageFlagBits stage) {
Chris Forbes47567b72017-06-09 12:09:45 -07001210 bool skip = false;
1211
1212 // Strip off ptrs etc
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06001213 type = GetStructType(src, type, false);
Chris Forbes47567b72017-06-09 12:09:45 -07001214 assert(type != src->end());
1215
1216 // Validate directly off the offsets. this isn't quite correct for arrays and matrices, but is a good first step.
1217 // TODO: arrays, matrices, weird sizes
1218 for (auto insn : *src) {
1219 if (insn.opcode() == spv::OpMemberDecorate && insn.word(1) == type.word(1)) {
1220 if (insn.word(3) == spv::DecorationOffset) {
1221 unsigned offset = insn.word(4);
1222 auto size = 4; // Bytes; TODO: calculate this based on the type
1223
1224 bool found_range = false;
1225 for (auto const &range : *push_constant_ranges) {
1226 if (range.offset <= offset && range.offset + range.size >= offset + size) {
1227 found_range = true;
1228
1229 if ((range.stageFlags & stage) == 0) {
Dave Houltona9df0ce2018-02-07 10:51:23 -07001230 skip |=
1231 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
Dave Houlton51653902018-06-22 17:32:13 -06001232 kVUID_Core_Shader_PushConstantNotAccessibleFromStage,
Dave Houltona9df0ce2018-02-07 10:51:23 -07001233 "Push constant range covering variable starting at offset %u not accessible from stage %s",
1234 offset, string_VkShaderStageFlagBits(stage));
Chris Forbes47567b72017-06-09 12:09:45 -07001235 }
1236
1237 break;
1238 }
1239 }
1240
1241 if (!found_range) {
1242 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
Dave Houlton51653902018-06-22 17:32:13 -06001243 kVUID_Core_Shader_PushConstantOutOfRange,
Dave Houltona9df0ce2018-02-07 10:51:23 -07001244 "Push constant range covering variable starting at offset %u not declared in layout", offset);
Chris Forbes47567b72017-06-09 12:09:45 -07001245 }
1246 }
1247 }
1248 }
1249
1250 return skip;
1251}
1252
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06001253static bool ValidatePushConstantUsage(debug_report_data const *report_data,
1254 std::vector<VkPushConstantRange> const *push_constant_ranges, shader_module const *src,
1255 std::unordered_set<uint32_t> accessible_ids, VkShaderStageFlagBits stage) {
Chris Forbes47567b72017-06-09 12:09:45 -07001256 bool skip = false;
1257
1258 for (auto id : accessible_ids) {
1259 auto def_insn = src->get_def(id);
1260 if (def_insn.opcode() == spv::OpVariable && def_insn.word(3) == spv::StorageClassPushConstant) {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06001261 skip |= ValidatePushConstantBlockAgainstPipeline(report_data, push_constant_ranges, src, src->get_def(def_insn.word(1)),
1262 stage);
Chris Forbes47567b72017-06-09 12:09:45 -07001263 }
1264 }
1265
1266 return skip;
1267}
1268
1269// Validate that data for each specialization entry is fully contained within the buffer.
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06001270static bool ValidateSpecializationOffsets(debug_report_data const *report_data, VkPipelineShaderStageCreateInfo const *info) {
Chris Forbes47567b72017-06-09 12:09:45 -07001271 bool skip = false;
1272
1273 VkSpecializationInfo const *spec = info->pSpecializationInfo;
1274
1275 if (spec) {
1276 for (auto i = 0u; i < spec->mapEntryCount; i++) {
Dave Houlton78d09922018-05-17 15:48:45 -06001277 // TODO: This is a good place for "VUID-VkSpecializationInfo-offset-00773".
Chris Forbes47567b72017-06-09 12:09:45 -07001278 if (spec->pMapEntries[i].offset + spec->pMapEntries[i].size > spec->dataSize) {
Mark Lobodzinskib1fd9d12018-03-30 14:26:00 -06001279 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, 0,
Dave Houlton78d09922018-05-17 15:48:45 -06001280 "VUID-VkSpecializationInfo-pMapEntries-00774",
Dave Houltona9df0ce2018-02-07 10:51:23 -07001281 "Specialization entry %u (for constant id %u) references memory outside provided specialization "
Mark Lobodzinski487a0d12018-03-30 10:09:03 -06001282 "data (bytes %u.." PRINTF_SIZE_T_SPECIFIER "; " PRINTF_SIZE_T_SPECIFIER " bytes provided)..",
Dave Houltona9df0ce2018-02-07 10:51:23 -07001283 i, spec->pMapEntries[i].constantID, spec->pMapEntries[i].offset,
Mark Lobodzinski487a0d12018-03-30 10:09:03 -06001284 spec->pMapEntries[i].offset + spec->pMapEntries[i].size - 1, spec->dataSize);
Chris Forbes47567b72017-06-09 12:09:45 -07001285 }
1286 }
1287 }
1288
1289 return skip;
1290}
1291
Jeff Bolz38b3ce72018-09-19 12:53:38 -05001292// TODO (jbolz): Can this return a const reference?
Jeff Bolze54ae892018-09-08 12:16:29 -05001293static std::set<uint32_t> TypeToDescriptorTypeSet(shader_module const *module, uint32_t type_id, unsigned &descriptor_count) {
Chris Forbes47567b72017-06-09 12:09:45 -07001294 auto type = module->get_def(type_id);
Chris Forbes9f89d752018-03-07 12:57:48 -08001295 bool is_storage_buffer = false;
Chris Forbes47567b72017-06-09 12:09:45 -07001296 descriptor_count = 1;
Jeff Bolze54ae892018-09-08 12:16:29 -05001297 std::set<uint32_t> ret;
Chris Forbes47567b72017-06-09 12:09:45 -07001298
1299 // Strip off any array or ptrs. Where we remove array levels, adjust the descriptor count for each dimension.
Jeff Bolzfdf96072018-04-10 14:32:18 -05001300 while (type.opcode() == spv::OpTypeArray || type.opcode() == spv::OpTypePointer || type.opcode() == spv::OpTypeRuntimeArray) {
1301 if (type.opcode() == spv::OpTypeRuntimeArray) {
1302 descriptor_count = 0;
1303 type = module->get_def(type.word(2));
1304 } else if (type.opcode() == spv::OpTypeArray) {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06001305 descriptor_count *= GetConstantValue(module, type.word(3));
Chris Forbes47567b72017-06-09 12:09:45 -07001306 type = module->get_def(type.word(2));
1307 } else {
Chris Forbes9f89d752018-03-07 12:57:48 -08001308 if (type.word(2) == spv::StorageClassStorageBuffer) {
1309 is_storage_buffer = true;
1310 }
Chris Forbes47567b72017-06-09 12:09:45 -07001311 type = module->get_def(type.word(3));
1312 }
1313 }
1314
1315 switch (type.opcode()) {
1316 case spv::OpTypeStruct: {
1317 for (auto insn : *module) {
1318 if (insn.opcode() == spv::OpDecorate && insn.word(1) == type.word(1)) {
1319 if (insn.word(2) == spv::DecorationBlock) {
Chris Forbes9f89d752018-03-07 12:57:48 -08001320 if (is_storage_buffer) {
Jeff Bolze54ae892018-09-08 12:16:29 -05001321 ret.insert(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER);
1322 ret.insert(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC);
1323 return ret;
Chris Forbes9f89d752018-03-07 12:57:48 -08001324 } else {
Jeff Bolze54ae892018-09-08 12:16:29 -05001325 ret.insert(VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER);
1326 ret.insert(VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC);
1327 ret.insert(VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT);
1328 return ret;
Chris Forbes9f89d752018-03-07 12:57:48 -08001329 }
Chris Forbes47567b72017-06-09 12:09:45 -07001330 } else if (insn.word(2) == spv::DecorationBufferBlock) {
Jeff Bolze54ae892018-09-08 12:16:29 -05001331 ret.insert(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER);
1332 ret.insert(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC);
1333 return ret;
Chris Forbes47567b72017-06-09 12:09:45 -07001334 }
1335 }
1336 }
1337
1338 // Invalid
Jeff Bolze54ae892018-09-08 12:16:29 -05001339 return ret;
Chris Forbes47567b72017-06-09 12:09:45 -07001340 }
1341
1342 case spv::OpTypeSampler:
Jeff Bolze54ae892018-09-08 12:16:29 -05001343 ret.insert(VK_DESCRIPTOR_TYPE_SAMPLER);
1344 ret.insert(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
1345 return ret;
Chris Forbes47567b72017-06-09 12:09:45 -07001346
Chris Forbes73c00bf2018-06-22 16:28:06 -07001347 case spv::OpTypeSampledImage: {
1348 // Slight relaxation for some GLSL historical madness: samplerBuffer doesn't really have a sampler, and a texel
1349 // buffer descriptor doesn't really provide one. Allow this slight mismatch.
1350 auto image_type = module->get_def(type.word(2));
1351 auto dim = image_type.word(3);
1352 auto sampled = image_type.word(7);
1353 if (dim == spv::DimBuffer && sampled == 1) {
Jeff Bolze54ae892018-09-08 12:16:29 -05001354 ret.insert(VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER);
1355 return ret;
Chris Forbes47567b72017-06-09 12:09:45 -07001356 }
Chris Forbes73c00bf2018-06-22 16:28:06 -07001357 }
Jeff Bolze54ae892018-09-08 12:16:29 -05001358 ret.insert(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
1359 return ret;
Chris Forbes47567b72017-06-09 12:09:45 -07001360
1361 case spv::OpTypeImage: {
1362 // Many descriptor types backing image types-- depends on dimension and whether the image will be used with a sampler.
1363 // SPIRV for Vulkan requires that sampled be 1 or 2 -- leaving the decision to runtime is unacceptable.
1364 auto dim = type.word(3);
1365 auto sampled = type.word(7);
1366
1367 if (dim == spv::DimSubpassData) {
Jeff Bolze54ae892018-09-08 12:16:29 -05001368 ret.insert(VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT);
1369 return ret;
Chris Forbes47567b72017-06-09 12:09:45 -07001370 } else if (dim == spv::DimBuffer) {
1371 if (sampled == 1) {
Jeff Bolze54ae892018-09-08 12:16:29 -05001372 ret.insert(VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER);
1373 return ret;
Chris Forbes47567b72017-06-09 12:09:45 -07001374 } else {
Jeff Bolze54ae892018-09-08 12:16:29 -05001375 ret.insert(VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER);
1376 return ret;
Chris Forbes47567b72017-06-09 12:09:45 -07001377 }
1378 } else if (sampled == 1) {
Jeff Bolze54ae892018-09-08 12:16:29 -05001379 ret.insert(VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE);
1380 ret.insert(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
1381 return ret;
Chris Forbes47567b72017-06-09 12:09:45 -07001382 } else {
Jeff Bolze54ae892018-09-08 12:16:29 -05001383 ret.insert(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE);
1384 return ret;
Chris Forbes47567b72017-06-09 12:09:45 -07001385 }
1386 }
Shannon McPherson0fa28232018-11-01 11:59:02 -06001387 case spv::OpTypeAccelerationStructureNV:
Eric Werness30127fd2018-10-31 21:01:03 -07001388 ret.insert(VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV);
Jeff Bolz105d6492018-09-29 15:46:44 -05001389 return ret;
Chris Forbes47567b72017-06-09 12:09:45 -07001390
1391 // We shouldn't really see any other junk types -- but if we do, they're a mismatch.
1392 default:
Jeff Bolze54ae892018-09-08 12:16:29 -05001393 return ret; // Matches nothing
Chris Forbes47567b72017-06-09 12:09:45 -07001394 }
1395}
1396
Jeff Bolze54ae892018-09-08 12:16:29 -05001397static std::string string_descriptorTypes(const std::set<uint32_t> &descriptor_types) {
Chris Forbes73c00bf2018-06-22 16:28:06 -07001398 std::stringstream ss;
Jeff Bolze54ae892018-09-08 12:16:29 -05001399 for (auto it = descriptor_types.begin(); it != descriptor_types.end(); ++it) {
1400 if (ss.tellp()) ss << ", ";
1401 ss << string_VkDescriptorType(VkDescriptorType(*it));
Chris Forbes73c00bf2018-06-22 16:28:06 -07001402 }
1403 return ss.str();
1404}
1405
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06001406static bool RequireFeature(debug_report_data const *report_data, VkBool32 feature, char const *feature_name) {
Chris Forbes47567b72017-06-09 12:09:45 -07001407 if (!feature) {
Mark Lobodzinskib1fd9d12018-03-30 14:26:00 -06001408 if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
Dave Houlton51653902018-06-22 17:32:13 -06001409 kVUID_Core_Shader_FeatureNotEnabled, "Shader requires %s but is not enabled on the device", feature_name)) {
Chris Forbes47567b72017-06-09 12:09:45 -07001410 return true;
1411 }
1412 }
1413
1414 return false;
1415}
1416
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06001417static bool RequireExtension(debug_report_data const *report_data, bool extension, char const *extension_name) {
Chris Forbes47567b72017-06-09 12:09:45 -07001418 if (!extension) {
Mark Lobodzinskib1fd9d12018-03-30 14:26:00 -06001419 if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
Dave Houlton51653902018-06-22 17:32:13 -06001420 kVUID_Core_Shader_FeatureNotEnabled, "Shader requires extension %s but is not enabled on the device",
Chris Forbes47567b72017-06-09 12:09:45 -07001421 extension_name)) {
1422 return true;
1423 }
1424 }
1425
1426 return false;
1427}
1428
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06001429static bool ValidateShaderCapabilities(layer_data *dev_data, shader_module const *src, VkShaderStageFlagBits stage,
1430 bool has_writable_descriptor) {
Chris Forbes47567b72017-06-09 12:09:45 -07001431 bool skip = false;
1432
1433 auto report_data = GetReportData(dev_data);
Mike Schuchardt8ed5ea02018-07-20 18:24:17 -06001434 auto const &features = GetEnabledFeatures(dev_data);
Cort Strattond2742852018-05-03 13:42:10 -04001435 auto const &extensions = GetDeviceExtensions(dev_data);
Chris Forbes47567b72017-06-09 12:09:45 -07001436
Mike Schuchardt8ed5ea02018-07-20 18:24:17 -06001437 struct FeaturePointer {
1438 // Callable object to test if this feature is enabled in the given aggregate feature struct
1439 const std::function<VkBool32(const DeviceFeatures &)> IsEnabled;
1440
1441 // Test if feature pointer is populated
1442 explicit operator bool() const { return static_cast<bool>(IsEnabled); }
1443
1444 // Default and nullptr constructor to create an empty FeaturePointer
1445 FeaturePointer() : IsEnabled(nullptr) {}
1446 FeaturePointer(std::nullptr_t ptr) : IsEnabled(nullptr) {}
1447
1448 // Constructors to populate FeaturePointer based on given pointer to member
1449 FeaturePointer(VkBool32 VkPhysicalDeviceFeatures::*ptr)
1450 : IsEnabled([=](const DeviceFeatures &features) { return features.core.*ptr; }) {}
1451 FeaturePointer(VkBool32 VkPhysicalDeviceDescriptorIndexingFeaturesEXT::*ptr)
1452 : IsEnabled([=](const DeviceFeatures &features) { return features.descriptor_indexing.*ptr; }) {}
1453 FeaturePointer(VkBool32 VkPhysicalDevice8BitStorageFeaturesKHR::*ptr)
1454 : IsEnabled([=](const DeviceFeatures &features) { return features.eight_bit_storage.*ptr; }) {}
Brett Lawsonbebfb6f2018-10-23 16:58:50 -07001455 FeaturePointer(VkBool32 VkPhysicalDeviceTransformFeedbackFeaturesEXT::*ptr)
1456 : IsEnabled([=](const DeviceFeatures &features) { return features.transform_feedback_features.*ptr; }) {}
Jose-Emilio Munoz-Lopez1109b452018-08-21 09:44:07 +01001457 FeaturePointer(VkBool32 VkPhysicalDeviceFloat16Int8FeaturesKHR::*ptr)
1458 : IsEnabled([=](const DeviceFeatures &features) { return features.float16_int8.*ptr; }) {}
Tobias Hector6a0ece72018-12-10 12:24:05 +00001459 FeaturePointer(VkBool32 VkPhysicalDeviceScalarBlockLayoutFeaturesEXT::*ptr)
1460 : IsEnabled([=](const DeviceFeatures &features) { return features.scalar_block_layout_features.*ptr; }) {}
Mike Schuchardt8ed5ea02018-07-20 18:24:17 -06001461 };
1462
Chris Forbes47567b72017-06-09 12:09:45 -07001463 struct CapabilityInfo {
1464 char const *name;
Mike Schuchardt8ed5ea02018-07-20 18:24:17 -06001465 FeaturePointer feature;
1466 bool DeviceExtensions::*extension;
Chris Forbes47567b72017-06-09 12:09:45 -07001467 };
1468
Chris Forbes47567b72017-06-09 12:09:45 -07001469 // clang-format off
Dave Houltoneb10ea82017-12-22 12:21:50 -07001470 static const std::unordered_multimap<uint32_t, CapabilityInfo> capabilities = {
Chris Forbes47567b72017-06-09 12:09:45 -07001471 // Capabilities always supported by a Vulkan 1.0 implementation -- no
1472 // feature bits.
1473 {spv::CapabilityMatrix, {nullptr}},
1474 {spv::CapabilityShader, {nullptr}},
1475 {spv::CapabilityInputAttachment, {nullptr}},
1476 {spv::CapabilitySampled1D, {nullptr}},
1477 {spv::CapabilityImage1D, {nullptr}},
1478 {spv::CapabilitySampledBuffer, {nullptr}},
1479 {spv::CapabilityImageQuery, {nullptr}},
1480 {spv::CapabilityDerivativeControl, {nullptr}},
1481
1482 // Capabilities that are optionally supported, but require a feature to
1483 // be enabled on the device
Mike Schuchardt8ed5ea02018-07-20 18:24:17 -06001484 {spv::CapabilityGeometry, {"VkPhysicalDeviceFeatures::geometryShader", &VkPhysicalDeviceFeatures::geometryShader}},
1485 {spv::CapabilityTessellation, {"VkPhysicalDeviceFeatures::tessellationShader", &VkPhysicalDeviceFeatures::tessellationShader}},
1486 {spv::CapabilityFloat64, {"VkPhysicalDeviceFeatures::shaderFloat64", &VkPhysicalDeviceFeatures::shaderFloat64}},
1487 {spv::CapabilityInt64, {"VkPhysicalDeviceFeatures::shaderInt64", &VkPhysicalDeviceFeatures::shaderInt64}},
1488 {spv::CapabilityTessellationPointSize, {"VkPhysicalDeviceFeatures::shaderTessellationAndGeometryPointSize", &VkPhysicalDeviceFeatures::shaderTessellationAndGeometryPointSize}},
1489 {spv::CapabilityGeometryPointSize, {"VkPhysicalDeviceFeatures::shaderTessellationAndGeometryPointSize", &VkPhysicalDeviceFeatures::shaderTessellationAndGeometryPointSize}},
1490 {spv::CapabilityImageGatherExtended, {"VkPhysicalDeviceFeatures::shaderImageGatherExtended", &VkPhysicalDeviceFeatures::shaderImageGatherExtended}},
1491 {spv::CapabilityStorageImageMultisample, {"VkPhysicalDeviceFeatures::shaderStorageImageMultisample", &VkPhysicalDeviceFeatures::shaderStorageImageMultisample}},
1492 {spv::CapabilityUniformBufferArrayDynamicIndexing, {"VkPhysicalDeviceFeatures::shaderUniformBufferArrayDynamicIndexing", &VkPhysicalDeviceFeatures::shaderUniformBufferArrayDynamicIndexing}},
1493 {spv::CapabilitySampledImageArrayDynamicIndexing, {"VkPhysicalDeviceFeatures::shaderSampledImageArrayDynamicIndexing", &VkPhysicalDeviceFeatures::shaderSampledImageArrayDynamicIndexing}},
1494 {spv::CapabilityStorageBufferArrayDynamicIndexing, {"VkPhysicalDeviceFeatures::shaderStorageBufferArrayDynamicIndexing", &VkPhysicalDeviceFeatures::shaderStorageBufferArrayDynamicIndexing}},
1495 {spv::CapabilityStorageImageArrayDynamicIndexing, {"VkPhysicalDeviceFeatures::shaderStorageImageArrayDynamicIndexing", &VkPhysicalDeviceFeatures::shaderStorageBufferArrayDynamicIndexing}},
1496 {spv::CapabilityClipDistance, {"VkPhysicalDeviceFeatures::shaderClipDistance", &VkPhysicalDeviceFeatures::shaderClipDistance}},
1497 {spv::CapabilityCullDistance, {"VkPhysicalDeviceFeatures::shaderCullDistance", &VkPhysicalDeviceFeatures::shaderCullDistance}},
1498 {spv::CapabilityImageCubeArray, {"VkPhysicalDeviceFeatures::imageCubeArray", &VkPhysicalDeviceFeatures::imageCubeArray}},
1499 {spv::CapabilitySampleRateShading, {"VkPhysicalDeviceFeatures::sampleRateShading", &VkPhysicalDeviceFeatures::sampleRateShading}},
1500 {spv::CapabilitySparseResidency, {"VkPhysicalDeviceFeatures::shaderResourceResidency", &VkPhysicalDeviceFeatures::shaderResourceResidency}},
1501 {spv::CapabilityMinLod, {"VkPhysicalDeviceFeatures::shaderResourceMinLod", &VkPhysicalDeviceFeatures::shaderResourceMinLod}},
1502 {spv::CapabilitySampledCubeArray, {"VkPhysicalDeviceFeatures::imageCubeArray", &VkPhysicalDeviceFeatures::imageCubeArray}},
1503 {spv::CapabilityImageMSArray, {"VkPhysicalDeviceFeatures::shaderStorageImageMultisample", &VkPhysicalDeviceFeatures::shaderStorageImageMultisample}},
1504 {spv::CapabilityStorageImageExtendedFormats, {"VkPhysicalDeviceFeatures::shaderStorageImageExtendedFormats", &VkPhysicalDeviceFeatures::shaderStorageImageExtendedFormats}},
1505 {spv::CapabilityInterpolationFunction, {"VkPhysicalDeviceFeatures::sampleRateShading", &VkPhysicalDeviceFeatures::sampleRateShading}},
1506 {spv::CapabilityStorageImageReadWithoutFormat, {"VkPhysicalDeviceFeatures::shaderStorageImageReadWithoutFormat", &VkPhysicalDeviceFeatures::shaderStorageImageReadWithoutFormat}},
1507 {spv::CapabilityStorageImageWriteWithoutFormat, {"VkPhysicalDeviceFeatures::shaderStorageImageWriteWithoutFormat", &VkPhysicalDeviceFeatures::shaderStorageImageWriteWithoutFormat}},
1508 {spv::CapabilityMultiViewport, {"VkPhysicalDeviceFeatures::multiViewport", &VkPhysicalDeviceFeatures::multiViewport}},
Jeff Bolzfdf96072018-04-10 14:32:18 -05001509
Mike Schuchardt8ed5ea02018-07-20 18:24:17 -06001510 {spv::CapabilityShaderNonUniformEXT, {VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME, nullptr, &DeviceExtensions::vk_ext_descriptor_indexing}},
1511 {spv::CapabilityRuntimeDescriptorArrayEXT, {"VkPhysicalDeviceDescriptorIndexingFeaturesEXT::runtimeDescriptorArray", &VkPhysicalDeviceDescriptorIndexingFeaturesEXT::runtimeDescriptorArray}},
1512 {spv::CapabilityInputAttachmentArrayDynamicIndexingEXT, {"VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderInputAttachmentArrayDynamicIndexing", &VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderInputAttachmentArrayDynamicIndexing}},
1513 {spv::CapabilityUniformTexelBufferArrayDynamicIndexingEXT, {"VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderUniformTexelBufferArrayDynamicIndexing", &VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderUniformTexelBufferArrayDynamicIndexing}},
1514 {spv::CapabilityStorageTexelBufferArrayDynamicIndexingEXT, {"VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderStorageTexelBufferArrayDynamicIndexing", &VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderStorageTexelBufferArrayDynamicIndexing}},
1515 {spv::CapabilityUniformBufferArrayNonUniformIndexingEXT, {"VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderUniformBufferArrayNonUniformIndexing", &VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderUniformBufferArrayNonUniformIndexing}},
1516 {spv::CapabilitySampledImageArrayNonUniformIndexingEXT, {"VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderSampledImageArrayNonUniformIndexing", &VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderSampledImageArrayNonUniformIndexing}},
1517 {spv::CapabilityStorageBufferArrayNonUniformIndexingEXT, {"VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderStorageBufferArrayNonUniformIndexing", &VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderStorageBufferArrayNonUniformIndexing}},
1518 {spv::CapabilityStorageImageArrayNonUniformIndexingEXT, {"VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderStorageImageArrayNonUniformIndexing", &VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderStorageImageArrayNonUniformIndexing}},
1519 {spv::CapabilityInputAttachmentArrayNonUniformIndexingEXT, {"VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderInputAttachmentArrayNonUniformIndexing", &VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderInputAttachmentArrayNonUniformIndexing}},
1520 {spv::CapabilityUniformTexelBufferArrayNonUniformIndexingEXT, {"VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderUniformTexelBufferArrayNonUniformIndexing", &VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderUniformTexelBufferArrayNonUniformIndexing}},
1521 {spv::CapabilityStorageTexelBufferArrayNonUniformIndexingEXT , {"VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderStorageTexelBufferArrayNonUniformIndexing", &VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderStorageTexelBufferArrayNonUniformIndexing}},
Chris Forbes47567b72017-06-09 12:09:45 -07001522
1523 // Capabilities that require an extension
Mike Schuchardt8ed5ea02018-07-20 18:24:17 -06001524 {spv::CapabilityDrawParameters, {VK_KHR_SHADER_DRAW_PARAMETERS_EXTENSION_NAME, nullptr, &DeviceExtensions::vk_khr_shader_draw_parameters}},
1525 {spv::CapabilityGeometryShaderPassthroughNV, {VK_NV_GEOMETRY_SHADER_PASSTHROUGH_EXTENSION_NAME, nullptr, &DeviceExtensions::vk_nv_geometry_shader_passthrough}},
1526 {spv::CapabilitySampleMaskOverrideCoverageNV, {VK_NV_SAMPLE_MASK_OVERRIDE_COVERAGE_EXTENSION_NAME, nullptr, &DeviceExtensions::vk_nv_sample_mask_override_coverage}},
1527 {spv::CapabilityShaderViewportIndexLayerEXT, {VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_EXTENSION_NAME, nullptr, &DeviceExtensions::vk_ext_shader_viewport_index_layer}},
1528 {spv::CapabilityShaderViewportIndexLayerNV, {VK_NV_VIEWPORT_ARRAY2_EXTENSION_NAME, nullptr, &DeviceExtensions::vk_nv_viewport_array2}},
1529 {spv::CapabilityShaderViewportMaskNV, {VK_NV_VIEWPORT_ARRAY2_EXTENSION_NAME, nullptr, &DeviceExtensions::vk_nv_viewport_array2}},
1530 {spv::CapabilitySubgroupBallotKHR, {VK_EXT_SHADER_SUBGROUP_BALLOT_EXTENSION_NAME, nullptr, &DeviceExtensions::vk_ext_shader_subgroup_ballot }},
1531 {spv::CapabilitySubgroupVoteKHR, {VK_EXT_SHADER_SUBGROUP_VOTE_EXTENSION_NAME, nullptr, &DeviceExtensions::vk_ext_shader_subgroup_vote }},
aqnuep7033c702018-09-11 18:03:29 +02001532 {spv::CapabilityInt64Atomics, {VK_KHR_SHADER_ATOMIC_INT64_EXTENSION_NAME, nullptr, &DeviceExtensions::vk_khr_shader_atomic_int64 }},
Alexander Galazin3bd8e342018-06-14 15:49:07 +02001533
Mike Schuchardt8ed5ea02018-07-20 18:24:17 -06001534 {spv::CapabilityStorageBuffer8BitAccess , {"VkPhysicalDevice8BitStorageFeaturesKHR::storageBuffer8BitAccess", &VkPhysicalDevice8BitStorageFeaturesKHR::storageBuffer8BitAccess, &DeviceExtensions::vk_khr_8bit_storage}},
1535 {spv::CapabilityUniformAndStorageBuffer8BitAccess , {"VkPhysicalDevice8BitStorageFeaturesKHR::uniformAndStorageBuffer8BitAccess", &VkPhysicalDevice8BitStorageFeaturesKHR::uniformAndStorageBuffer8BitAccess, &DeviceExtensions::vk_khr_8bit_storage}},
1536 {spv::CapabilityStoragePushConstant8 , {"VkPhysicalDevice8BitStorageFeaturesKHR::storagePushConstant8", &VkPhysicalDevice8BitStorageFeaturesKHR::storagePushConstant8, &DeviceExtensions::vk_khr_8bit_storage}},
Brett Lawsonbebfb6f2018-10-23 16:58:50 -07001537
1538 {spv::CapabilityTransformFeedback , { "VkPhysicalDeviceTransformFeedbackFeaturesEXT::transformFeedback", &VkPhysicalDeviceTransformFeedbackFeaturesEXT::transformFeedback, &DeviceExtensions::vk_ext_transform_feedback}},
Jose-Emilio Munoz-Lopez1109b452018-08-21 09:44:07 +01001539 {spv::CapabilityGeometryStreams , { "VkPhysicalDeviceTransformFeedbackFeaturesEXT::geometryStreams", &VkPhysicalDeviceTransformFeedbackFeaturesEXT::geometryStreams, &DeviceExtensions::vk_ext_transform_feedback}},
1540
1541 {spv::CapabilityFloat16 , {"VkPhysicalDeviceFloat16Int8FeaturesKHR::shaderFloat16", &VkPhysicalDeviceFloat16Int8FeaturesKHR::shaderFloat16, &DeviceExtensions::vk_khr_shader_float16_int8}},
1542 {spv::CapabilityInt8 , {"VkPhysicalDeviceFloat16Int8FeaturesKHR::shaderInt8", &VkPhysicalDeviceFloat16Int8FeaturesKHR::shaderInt8, &DeviceExtensions::vk_khr_shader_float16_int8}},
Chris Forbes47567b72017-06-09 12:09:45 -07001543 };
1544 // clang-format on
1545
1546 for (auto insn : *src) {
1547 if (insn.opcode() == spv::OpCapability) {
Dave Houltoneb10ea82017-12-22 12:21:50 -07001548 size_t n = capabilities.count(insn.word(1));
1549 if (1 == n) { // key occurs exactly once
1550 auto it = capabilities.find(insn.word(1));
1551 if (it != capabilities.end()) {
1552 if (it->second.feature) {
Mike Schuchardt8ed5ea02018-07-20 18:24:17 -06001553 skip |= RequireFeature(report_data, it->second.feature.IsEnabled(*features), it->second.name);
Dave Houltoneb10ea82017-12-22 12:21:50 -07001554 }
1555 if (it->second.extension) {
Mike Schuchardt8ed5ea02018-07-20 18:24:17 -06001556 skip |= RequireExtension(report_data, extensions->*(it->second.extension), it->second.name);
Dave Houltoneb10ea82017-12-22 12:21:50 -07001557 }
Chris Forbes47567b72017-06-09 12:09:45 -07001558 }
Dave Houltoneb10ea82017-12-22 12:21:50 -07001559 } else if (1 < n) { // key occurs multiple times, at least one must be enabled
1560 bool needs_feature = false, has_feature = false;
1561 bool needs_ext = false, has_ext = false;
1562 std::string feature_names = "(one of) [ ";
1563 std::string extension_names = feature_names;
1564 auto caps = capabilities.equal_range(insn.word(1));
1565 for (auto it = caps.first; it != caps.second; ++it) {
1566 if (it->second.feature) {
1567 needs_feature = true;
Mike Schuchardt8ed5ea02018-07-20 18:24:17 -06001568 has_feature = has_feature || it->second.feature.IsEnabled(*features);
Dave Houltoneb10ea82017-12-22 12:21:50 -07001569 feature_names += it->second.name;
1570 feature_names += " ";
1571 }
1572 if (it->second.extension) {
1573 needs_ext = true;
Mike Schuchardt8ed5ea02018-07-20 18:24:17 -06001574 has_ext = has_ext || extensions->*(it->second.extension);
Dave Houltoneb10ea82017-12-22 12:21:50 -07001575 extension_names += it->second.name;
1576 extension_names += " ";
1577 }
1578 }
1579 if (needs_feature) {
1580 feature_names += "]";
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06001581 skip |= RequireFeature(report_data, has_feature, feature_names.c_str());
Dave Houltoneb10ea82017-12-22 12:21:50 -07001582 }
1583 if (needs_ext) {
1584 extension_names += "]";
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06001585 skip |= RequireExtension(report_data, has_ext, extension_names.c_str());
Chris Forbes47567b72017-06-09 12:09:45 -07001586 }
1587 }
1588 }
1589 }
1590
Chris Forbes349b3132018-03-07 11:38:08 -08001591 if (has_writable_descriptor) {
1592 switch (stage) {
1593 case VK_SHADER_STAGE_COMPUTE_BIT:
Jeff Bolz148d94e2018-12-13 21:25:56 -06001594 case VK_SHADER_STAGE_RAYGEN_BIT_NV:
1595 case VK_SHADER_STAGE_ANY_HIT_BIT_NV:
1596 case VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV:
1597 case VK_SHADER_STAGE_MISS_BIT_NV:
1598 case VK_SHADER_STAGE_INTERSECTION_BIT_NV:
1599 case VK_SHADER_STAGE_CALLABLE_BIT_NV:
1600 case VK_SHADER_STAGE_TASK_BIT_NV:
1601 case VK_SHADER_STAGE_MESH_BIT_NV:
Chris Forbes349b3132018-03-07 11:38:08 -08001602 /* No feature requirements for writes and atomics from compute
Jeff Bolz148d94e2018-12-13 21:25:56 -06001603 * raytracing, or mesh stages */
Chris Forbes349b3132018-03-07 11:38:08 -08001604 break;
1605 case VK_SHADER_STAGE_FRAGMENT_BIT:
Mike Schuchardt8ed5ea02018-07-20 18:24:17 -06001606 skip |= RequireFeature(report_data, features->core.fragmentStoresAndAtomics, "fragmentStoresAndAtomics");
Chris Forbes349b3132018-03-07 11:38:08 -08001607 break;
1608 default:
Mike Schuchardt8ed5ea02018-07-20 18:24:17 -06001609 skip |=
1610 RequireFeature(report_data, features->core.vertexPipelineStoresAndAtomics, "vertexPipelineStoresAndAtomics");
Chris Forbes349b3132018-03-07 11:38:08 -08001611 break;
1612 }
1613 }
1614
Chris Forbes47567b72017-06-09 12:09:45 -07001615 return skip;
1616}
1617
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001618static bool VariableIsBuiltIn(shader_module const *src, const uint32_t ID, std::vector<uint32_t> const &builtInBlockIDs,
1619 std::vector<uint32_t> const &builtInIDs) {
1620 auto insn = src->get_def(ID);
1621
1622 switch (insn.opcode()) {
1623 case spv::OpVariable: {
1624 // First check if the variable is a "pure" built-in type, e.g. gl_ViewportIndex
1625 uint32_t ID = insn.word(2);
1626 for (auto builtInID : builtInIDs) {
1627 if (ID == builtInID) {
1628 return true;
1629 }
1630 }
1631
1632 VariableIsBuiltIn(src, insn.word(1), builtInBlockIDs, builtInIDs);
1633 break;
1634 }
1635 case spv::OpTypePointer:
1636 VariableIsBuiltIn(src, insn.word(3), builtInBlockIDs, builtInIDs);
1637 break;
1638 case spv::OpTypeArray:
1639 VariableIsBuiltIn(src, insn.word(2), builtInBlockIDs, builtInIDs);
1640 break;
1641 case spv::OpTypeStruct: {
1642 uint32_t ID = insn.word(1); // We only need to check the first member as either all will be, or none will be built-in
1643 for (auto builtInBlockID : builtInBlockIDs) {
1644 if (ID == builtInBlockID) {
1645 return true;
1646 }
1647 }
1648 return false;
1649 }
1650 default:
1651 return false;
1652 }
1653
1654 return false;
1655}
1656
1657static bool ValidateShaderStageInputOutputLimits(layer_data *dev_data, shader_module const *src,
1658 VkPipelineShaderStageCreateInfo const *pStage, PIPELINE_STATE *pipeline) {
1659 if (pStage->stage == VK_SHADER_STAGE_COMPUTE_BIT || pStage->stage == VK_SHADER_STAGE_ALL_GRAPHICS ||
1660 pStage->stage == VK_SHADER_STAGE_ALL) {
1661 return false;
1662 }
1663
1664 bool skip = false;
Mark Lobodzinski57a44272019-02-27 12:40:50 -07001665 auto const &limits = dev_data->phys_dev_props.limits;
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001666 auto const report_data = GetReportData(dev_data);
1667
1668 std::vector<uint32_t> builtInBlockIDs;
1669 std::vector<uint32_t> builtInIDs;
1670 struct Variable {
1671 uint32_t baseTypePtrID;
1672 uint32_t ID;
1673 uint32_t storageClass;
1674 };
1675 std::vector<Variable> variables;
1676
1677 for (auto insn : *src) {
1678 switch (insn.opcode()) {
1679 // Find all built-in member decorations
1680 case spv::OpMemberDecorate:
1681 if (insn.word(3) == spv::DecorationBuiltIn) {
1682 builtInBlockIDs.push_back(insn.word(1));
1683 }
1684 break;
1685 // Find all built-in decorations
1686 case spv::OpDecorate:
1687 switch (insn.word(2)) {
1688 case spv::DecorationBlock: {
1689 uint32_t blockID = insn.word(1);
1690 for (auto builtInBlockID : builtInBlockIDs) {
1691 // Check if one of the members of the block are built-in -> the block is built-in
1692 if (blockID == builtInBlockID) {
1693 builtInIDs.push_back(blockID);
1694 break;
1695 }
1696 }
1697 break;
1698 }
1699 case spv::DecorationBuiltIn:
1700 builtInIDs.push_back(insn.word(1));
1701 break;
1702 default:
1703 break;
1704 }
1705 break;
1706 // Find all input and output variables
1707 case spv::OpVariable: {
1708 Variable var = {};
1709 var.storageClass = insn.word(3);
1710 if (var.storageClass == spv::StorageClassInput || var.storageClass == spv::StorageClassOutput) {
1711 var.baseTypePtrID = insn.word(1);
1712 var.ID = insn.word(2);
1713 variables.push_back(var);
1714 }
1715 break;
1716 }
1717 default:
1718 break;
1719 }
1720 }
1721
1722 uint32_t numCompIn = 0, numCompOut = 0;
1723 for (auto &var : variables) {
1724 // Check the variable's ID
1725 if (VariableIsBuiltIn(src, var.ID, builtInBlockIDs, builtInIDs)) {
1726 continue;
1727 }
1728 // Check the variable's type's ID - e.g. gl_PerVertex is made of basic types, not built-in types
1729 if (VariableIsBuiltIn(src, src->get_def(var.baseTypePtrID).word(3), builtInBlockIDs, builtInIDs)) {
1730 continue;
1731 }
1732
1733 if (var.storageClass == spv::StorageClassInput) {
1734 numCompIn += GetComponentsConsumedByType(src, var.baseTypePtrID, false);
1735 } else { // var.storageClass == spv::StorageClassOutput
1736 numCompOut += GetComponentsConsumedByType(src, var.baseTypePtrID, false);
1737 }
1738 }
1739
1740 switch (pStage->stage) {
1741 case VK_SHADER_STAGE_VERTEX_BIT:
Mark Lobodzinski57a44272019-02-27 12:40:50 -07001742 if (numCompOut > limits.maxVertexOutputComponents) {
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001743 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
1744 HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_ExceedDeviceLimit,
1745 "Invalid Pipeline CreateInfo State: Vertex shader exceeds "
1746 "VkPhysicalDeviceLimits::maxVertexOutputComponents of %u "
1747 "components by %u components",
Mark Lobodzinski57a44272019-02-27 12:40:50 -07001748 limits.maxVertexOutputComponents, numCompOut - limits.maxVertexOutputComponents);
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001749 }
1750 break;
1751
1752 case VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT:
Mark Lobodzinski57a44272019-02-27 12:40:50 -07001753 if (numCompIn > limits.maxTessellationControlPerVertexInputComponents) {
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001754 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
1755 HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_ExceedDeviceLimit,
1756 "Invalid Pipeline CreateInfo State: Tessellation control shader exceeds "
1757 "VkPhysicalDeviceLimits::maxTessellationControlPerVertexInputComponents of %u "
1758 "components by %u components",
Mark Lobodzinski57a44272019-02-27 12:40:50 -07001759 limits.maxTessellationControlPerVertexInputComponents,
1760 numCompIn - limits.maxTessellationControlPerVertexInputComponents);
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001761 }
Mark Lobodzinski57a44272019-02-27 12:40:50 -07001762 if (numCompOut > limits.maxTessellationControlPerVertexOutputComponents) {
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001763 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
1764 HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_ExceedDeviceLimit,
1765 "Invalid Pipeline CreateInfo State: Tessellation control shader exceeds "
1766 "VkPhysicalDeviceLimits::maxTessellationControlPerVertexOutputComponents of %u "
1767 "components by %u components",
Mark Lobodzinski57a44272019-02-27 12:40:50 -07001768 limits.maxTessellationControlPerVertexOutputComponents,
1769 numCompOut - limits.maxTessellationControlPerVertexOutputComponents);
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001770 }
1771 break;
1772
1773 case VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT:
Mark Lobodzinski57a44272019-02-27 12:40:50 -07001774 if (numCompIn > limits.maxTessellationEvaluationInputComponents) {
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001775 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
1776 HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_ExceedDeviceLimit,
1777 "Invalid Pipeline CreateInfo State: Tessellation evaluation shader exceeds "
1778 "VkPhysicalDeviceLimits::maxTessellationEvaluationInputComponents of %u "
1779 "components by %u components",
Mark Lobodzinski57a44272019-02-27 12:40:50 -07001780 limits.maxTessellationEvaluationInputComponents,
1781 numCompIn - limits.maxTessellationEvaluationInputComponents);
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001782 }
Mark Lobodzinski57a44272019-02-27 12:40:50 -07001783 if (numCompOut > limits.maxTessellationEvaluationOutputComponents) {
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001784 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
1785 HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_ExceedDeviceLimit,
1786 "Invalid Pipeline CreateInfo State: Tessellation evaluation shader exceeds "
1787 "VkPhysicalDeviceLimits::maxTessellationEvaluationOutputComponents of %u "
1788 "components by %u components",
Mark Lobodzinski57a44272019-02-27 12:40:50 -07001789 limits.maxTessellationEvaluationOutputComponents,
1790 numCompOut - limits.maxTessellationEvaluationOutputComponents);
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001791 }
1792 break;
1793
1794 case VK_SHADER_STAGE_GEOMETRY_BIT:
Mark Lobodzinski57a44272019-02-27 12:40:50 -07001795 if (numCompIn > limits.maxGeometryInputComponents) {
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001796 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
1797 HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_ExceedDeviceLimit,
1798 "Invalid Pipeline CreateInfo State: Geometry shader exceeds "
1799 "VkPhysicalDeviceLimits::maxGeometryInputComponents of %u "
1800 "components by %u components",
Mark Lobodzinski57a44272019-02-27 12:40:50 -07001801 limits.maxGeometryInputComponents, numCompIn - limits.maxGeometryInputComponents);
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001802 }
Mark Lobodzinski57a44272019-02-27 12:40:50 -07001803 if (numCompOut > limits.maxGeometryOutputComponents) {
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001804 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
1805 HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_ExceedDeviceLimit,
1806 "Invalid Pipeline CreateInfo State: Geometry shader exceeds "
1807 "VkPhysicalDeviceLimits::maxGeometryOutputComponents of %u "
1808 "components by %u components",
Mark Lobodzinski57a44272019-02-27 12:40:50 -07001809 limits.maxGeometryOutputComponents, numCompOut - limits.maxGeometryOutputComponents);
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001810 }
1811 break;
1812
1813 case VK_SHADER_STAGE_FRAGMENT_BIT:
Mark Lobodzinski57a44272019-02-27 12:40:50 -07001814 if (numCompIn > limits.maxFragmentInputComponents) {
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001815 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
1816 HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_ExceedDeviceLimit,
1817 "Invalid Pipeline CreateInfo State: Fragment shader exceeds "
1818 "VkPhysicalDeviceLimits::maxFragmentInputComponents of %u "
1819 "components by %u components",
Mark Lobodzinski57a44272019-02-27 12:40:50 -07001820 limits.maxFragmentInputComponents, numCompIn - limits.maxFragmentInputComponents);
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001821 }
1822 break;
1823
Jeff Bolz148d94e2018-12-13 21:25:56 -06001824 case VK_SHADER_STAGE_RAYGEN_BIT_NV:
1825 case VK_SHADER_STAGE_ANY_HIT_BIT_NV:
1826 case VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV:
1827 case VK_SHADER_STAGE_MISS_BIT_NV:
1828 case VK_SHADER_STAGE_INTERSECTION_BIT_NV:
1829 case VK_SHADER_STAGE_CALLABLE_BIT_NV:
1830 case VK_SHADER_STAGE_TASK_BIT_NV:
1831 case VK_SHADER_STAGE_MESH_BIT_NV:
1832 break;
1833
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001834 default:
1835 assert(false); // This should never happen
1836 }
1837 return skip;
1838}
1839
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06001840static uint32_t DescriptorTypeToReqs(shader_module const *module, uint32_t type_id) {
Chris Forbes47567b72017-06-09 12:09:45 -07001841 auto type = module->get_def(type_id);
1842
1843 while (true) {
1844 switch (type.opcode()) {
1845 case spv::OpTypeArray:
Chris Forbes062f1222018-08-21 15:34:15 -07001846 case spv::OpTypeRuntimeArray:
Chris Forbes47567b72017-06-09 12:09:45 -07001847 case spv::OpTypeSampledImage:
1848 type = module->get_def(type.word(2));
1849 break;
1850 case spv::OpTypePointer:
1851 type = module->get_def(type.word(3));
1852 break;
1853 case spv::OpTypeImage: {
1854 auto dim = type.word(3);
1855 auto arrayed = type.word(5);
1856 auto msaa = type.word(6);
1857
Chris Forbes74ba2232018-08-27 15:19:27 -07001858 uint32_t bits = 0;
1859 switch (GetFundamentalType(module, type.word(2))) {
1860 case FORMAT_TYPE_FLOAT:
1861 bits = DESCRIPTOR_REQ_COMPONENT_TYPE_FLOAT;
1862 break;
1863 case FORMAT_TYPE_UINT:
1864 bits = DESCRIPTOR_REQ_COMPONENT_TYPE_UINT;
1865 break;
1866 case FORMAT_TYPE_SINT:
1867 bits = DESCRIPTOR_REQ_COMPONENT_TYPE_SINT;
1868 break;
1869 default:
1870 break;
1871 }
1872
Chris Forbes47567b72017-06-09 12:09:45 -07001873 switch (dim) {
1874 case spv::Dim1D:
Chris Forbes74ba2232018-08-27 15:19:27 -07001875 bits |= arrayed ? DESCRIPTOR_REQ_VIEW_TYPE_1D_ARRAY : DESCRIPTOR_REQ_VIEW_TYPE_1D;
1876 return bits;
Chris Forbes47567b72017-06-09 12:09:45 -07001877 case spv::Dim2D:
Chris Forbes74ba2232018-08-27 15:19:27 -07001878 bits |= msaa ? DESCRIPTOR_REQ_MULTI_SAMPLE : DESCRIPTOR_REQ_SINGLE_SAMPLE;
1879 bits |= arrayed ? DESCRIPTOR_REQ_VIEW_TYPE_2D_ARRAY : DESCRIPTOR_REQ_VIEW_TYPE_2D;
1880 return bits;
Chris Forbes47567b72017-06-09 12:09:45 -07001881 case spv::Dim3D:
Chris Forbes74ba2232018-08-27 15:19:27 -07001882 bits |= DESCRIPTOR_REQ_VIEW_TYPE_3D;
1883 return bits;
Chris Forbes47567b72017-06-09 12:09:45 -07001884 case spv::DimCube:
Chris Forbes74ba2232018-08-27 15:19:27 -07001885 bits |= arrayed ? DESCRIPTOR_REQ_VIEW_TYPE_CUBE_ARRAY : DESCRIPTOR_REQ_VIEW_TYPE_CUBE;
1886 return bits;
Chris Forbes47567b72017-06-09 12:09:45 -07001887 case spv::DimSubpassData:
Chris Forbes74ba2232018-08-27 15:19:27 -07001888 bits |= msaa ? DESCRIPTOR_REQ_MULTI_SAMPLE : DESCRIPTOR_REQ_SINGLE_SAMPLE;
1889 return bits;
Chris Forbes47567b72017-06-09 12:09:45 -07001890 default: // buffer, etc.
Chris Forbes74ba2232018-08-27 15:19:27 -07001891 return bits;
Chris Forbes47567b72017-06-09 12:09:45 -07001892 }
1893 }
1894 default:
1895 return 0;
1896 }
1897 }
1898}
1899
1900// For given pipelineLayout verify that the set_layout_node at slot.first
1901// has the requested binding at slot.second and return ptr to that binding
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06001902static VkDescriptorSetLayoutBinding const *GetDescriptorBinding(PIPELINE_LAYOUT_NODE const *pipelineLayout,
1903 descriptor_slot_t slot) {
Chris Forbes47567b72017-06-09 12:09:45 -07001904 if (!pipelineLayout) return nullptr;
1905
1906 if (slot.first >= pipelineLayout->set_layouts.size()) return nullptr;
1907
1908 return pipelineLayout->set_layouts[slot.first]->GetDescriptorSetLayoutBindingPtrFromBinding(slot.second);
1909}
1910
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06001911static void ProcessExecutionModes(shader_module const *src, spirv_inst_iter entrypoint, PIPELINE_STATE *pipeline) {
Jeff Bolz105d6492018-09-29 15:46:44 -05001912 auto entrypoint_id = entrypoint.word(2);
Chris Forbes0771b672018-03-22 21:13:46 -07001913 bool is_point_mode = false;
1914
1915 for (auto insn : *src) {
1916 if (insn.opcode() == spv::OpExecutionMode && insn.word(1) == entrypoint_id) {
1917 switch (insn.word(2)) {
1918 case spv::ExecutionModePointMode:
1919 // In tessellation shaders, PointMode is separate and trumps the tessellation topology.
1920 is_point_mode = true;
1921 break;
1922
1923 case spv::ExecutionModeOutputPoints:
1924 pipeline->topology_at_rasterizer = VK_PRIMITIVE_TOPOLOGY_POINT_LIST;
1925 break;
1926
1927 case spv::ExecutionModeIsolines:
1928 case spv::ExecutionModeOutputLineStrip:
1929 pipeline->topology_at_rasterizer = VK_PRIMITIVE_TOPOLOGY_LINE_STRIP;
1930 break;
1931
1932 case spv::ExecutionModeTriangles:
1933 case spv::ExecutionModeQuads:
1934 case spv::ExecutionModeOutputTriangleStrip:
1935 pipeline->topology_at_rasterizer = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP;
1936 break;
1937 }
1938 }
1939 }
1940
1941 if (is_point_mode) pipeline->topology_at_rasterizer = VK_PRIMITIVE_TOPOLOGY_POINT_LIST;
1942}
1943
Mark Lobodzinski1b4a8ed2018-08-07 08:47:05 -06001944// If PointList topology is specified in the pipeline, verify that a shader geometry stage writes PointSize
1945// o If there is only a vertex shader : gl_PointSize must be written when using points
1946// o If there is a geometry or tessellation shader:
1947// - If shaderTessellationAndGeometryPointSize feature is enabled:
1948// * gl_PointSize must be written in the final geometry stage
1949// - If shaderTessellationAndGeometryPointSize feature is disabled:
1950// * gl_PointSize must NOT be written and a default of 1.0 is assumed
1951bool ValidatePointListShaderState(const layer_data *dev_data, const PIPELINE_STATE *pipeline, shader_module const *src,
1952 spirv_inst_iter entrypoint, VkShaderStageFlagBits stage) {
1953 if (pipeline->topology_at_rasterizer != VK_PRIMITIVE_TOPOLOGY_POINT_LIST) {
1954 return false;
1955 }
1956
1957 bool pointsize_written = false;
1958 bool skip = false;
1959
1960 // Search for PointSize built-in decorations
1961 std::vector<uint32_t> pointsize_builtin_offsets;
1962 spirv_inst_iter insn = entrypoint;
1963 while (!pointsize_written && (insn.opcode() != spv::OpFunction)) {
1964 if (insn.opcode() == spv::OpMemberDecorate) {
1965 if (insn.word(3) == spv::DecorationBuiltIn) {
1966 if (insn.word(4) == spv::BuiltInPointSize) {
1967 pointsize_written = IsPointSizeWritten(src, insn, entrypoint);
1968 }
1969 }
1970 } else if (insn.opcode() == spv::OpDecorate) {
1971 if (insn.word(2) == spv::DecorationBuiltIn) {
1972 if (insn.word(3) == spv::BuiltInPointSize) {
1973 pointsize_written = IsPointSizeWritten(src, insn, entrypoint);
1974 }
1975 }
1976 }
1977
1978 insn++;
1979 }
1980
1981 if ((stage == VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT || stage == VK_SHADER_STAGE_GEOMETRY_BIT) &&
1982 !GetEnabledFeatures(dev_data)->core.shaderTessellationAndGeometryPointSize) {
1983 if (pointsize_written) {
1984 skip |= log_msg(GetReportData(dev_data), VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
1985 HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_PointSizeBuiltInOverSpecified,
1986 "Pipeline topology is set to POINT_LIST and geometry or tessellation shaders write PointSize which "
1987 "is prohibited when the shaderTessellationAndGeometryPointSize feature is not enabled.");
1988 }
1989 } else if (!pointsize_written) {
1990 skip |=
1991 log_msg(GetReportData(dev_data), VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
1992 HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_MissingPointSizeBuiltIn,
1993 "Pipeline topology is set to POINT_LIST, but PointSize is not written to in the shader corresponding to %s.",
1994 string_VkShaderStageFlagBits(stage));
1995 }
1996 return skip;
1997}
1998
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06001999static bool ValidatePipelineShaderStage(layer_data *dev_data, VkPipelineShaderStageCreateInfo const *pStage,
Mark Lobodzinski1b4a8ed2018-08-07 08:47:05 -06002000 PIPELINE_STATE *pipeline, shader_module const **out_module, spirv_inst_iter *out_entrypoint,
2001 bool check_point_size) {
Chris Forbes47567b72017-06-09 12:09:45 -07002002 bool skip = false;
2003 auto module = *out_module = GetShaderModuleState(dev_data, pStage->module);
2004 auto report_data = GetReportData(dev_data);
2005
2006 if (!module->has_valid_spirv) return false;
2007
2008 // Find the entrypoint
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002009 auto entrypoint = *out_entrypoint = FindEntrypoint(module, pStage->pName, pStage->stage);
Chris Forbes47567b72017-06-09 12:09:45 -07002010 if (entrypoint == module->end()) {
Mark Lobodzinskib1fd9d12018-03-30 14:26:00 -06002011 if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
Dave Houlton78d09922018-05-17 15:48:45 -06002012 "VUID-VkPipelineShaderStageCreateInfo-pName-00707", "No entrypoint found named `%s` for stage %s..",
2013 pStage->pName, string_VkShaderStageFlagBits(pStage->stage))) {
Chris Forbes47567b72017-06-09 12:09:45 -07002014 return true; // no point continuing beyond here, any analysis is just going to be garbage.
2015 }
2016 }
2017
Chris Forbes47567b72017-06-09 12:09:45 -07002018 // Mark accessible ids
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002019 auto accessible_ids = MarkAccessibleIds(module, entrypoint);
2020 ProcessExecutionModes(module, entrypoint, pipeline);
Chris Forbes47567b72017-06-09 12:09:45 -07002021
2022 // Validate descriptor set layout against what the entrypoint actually uses
Chris Forbes8af24522018-03-07 11:37:45 -08002023 bool has_writable_descriptor = false;
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002024 auto descriptor_uses = CollectInterfaceByDescriptorSlot(report_data, module, accessible_ids, &has_writable_descriptor);
Chris Forbes47567b72017-06-09 12:09:45 -07002025
Chris Forbes349b3132018-03-07 11:38:08 -08002026 // Validate shader capabilities against enabled device features
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002027 skip |= ValidateShaderCapabilities(dev_data, module, pStage->stage, has_writable_descriptor);
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02002028 skip |= ValidateShaderStageInputOutputLimits(dev_data, module, pStage, pipeline);
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002029 skip |= ValidateSpecializationOffsets(report_data, pStage);
2030 skip |= ValidatePushConstantUsage(report_data, pipeline->pipeline_layout.push_constant_ranges.get(), module, accessible_ids,
2031 pStage->stage);
Jeff Bolze54ae892018-09-08 12:16:29 -05002032 if (check_point_size && !pipeline->graphicsPipelineCI.pRasterizationState->rasterizerDiscardEnable) {
Mark Lobodzinski1b4a8ed2018-08-07 08:47:05 -06002033 skip |= ValidatePointListShaderState(dev_data, pipeline, module, entrypoint, pStage->stage);
2034 }
Chris Forbes47567b72017-06-09 12:09:45 -07002035
2036 // Validate descriptor use
2037 for (auto use : descriptor_uses) {
2038 // While validating shaders capture which slots are used by the pipeline
2039 auto &reqs = pipeline->active_slots[use.first.first][use.first.second];
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002040 reqs = descriptor_req(reqs | DescriptorTypeToReqs(module, use.second.type_id));
Chris Forbes47567b72017-06-09 12:09:45 -07002041
2042 // Verify given pipelineLayout has requested setLayout with requested binding
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002043 const auto &binding = GetDescriptorBinding(&pipeline->pipeline_layout, use.first);
Chris Forbes47567b72017-06-09 12:09:45 -07002044 unsigned required_descriptor_count;
Jeff Bolze54ae892018-09-08 12:16:29 -05002045 std::set<uint32_t> descriptor_types = TypeToDescriptorTypeSet(module, use.second.type_id, required_descriptor_count);
Chris Forbes47567b72017-06-09 12:09:45 -07002046
2047 if (!binding) {
Mark Lobodzinskib1fd9d12018-03-30 14:26:00 -06002048 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
Dave Houlton51653902018-06-22 17:32:13 -06002049 kVUID_Core_Shader_MissingDescriptor,
Chris Forbes73c00bf2018-06-22 16:28:06 -07002050 "Shader uses descriptor slot %u.%u (expected `%s`) but not declared in pipeline layout",
Jeff Bolze54ae892018-09-08 12:16:29 -05002051 use.first.first, use.first.second, string_descriptorTypes(descriptor_types).c_str());
Chris Forbes47567b72017-06-09 12:09:45 -07002052 } else if (~binding->stageFlags & pStage->stage) {
Mark Lobodzinskib1fd9d12018-03-30 14:26:00 -06002053 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, 0,
Dave Houlton51653902018-06-22 17:32:13 -06002054 kVUID_Core_Shader_DescriptorNotAccessibleFromStage,
Chris Forbes73c00bf2018-06-22 16:28:06 -07002055 "Shader uses descriptor slot %u.%u but descriptor not accessible from stage %s", use.first.first,
2056 use.first.second, string_VkShaderStageFlagBits(pStage->stage));
Jeff Bolze54ae892018-09-08 12:16:29 -05002057 } else if (descriptor_types.find(binding->descriptorType) == descriptor_types.end()) {
Mark Lobodzinskib1fd9d12018-03-30 14:26:00 -06002058 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
Dave Houlton51653902018-06-22 17:32:13 -06002059 kVUID_Core_Shader_DescriptorTypeMismatch,
Chris Forbes73c00bf2018-06-22 16:28:06 -07002060 "Type mismatch on descriptor slot %u.%u (expected `%s`) but descriptor of type %s", use.first.first,
Jeff Bolze54ae892018-09-08 12:16:29 -05002061 use.first.second, string_descriptorTypes(descriptor_types).c_str(),
Chris Forbes47567b72017-06-09 12:09:45 -07002062 string_VkDescriptorType(binding->descriptorType));
2063 } else if (binding->descriptorCount < required_descriptor_count) {
Mark Lobodzinskib1fd9d12018-03-30 14:26:00 -06002064 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
Dave Houlton51653902018-06-22 17:32:13 -06002065 kVUID_Core_Shader_DescriptorTypeMismatch,
Chris Forbes73c00bf2018-06-22 16:28:06 -07002066 "Shader expects at least %u descriptors for binding %u.%u but only %u provided",
2067 required_descriptor_count, use.first.first, use.first.second, binding->descriptorCount);
Chris Forbes47567b72017-06-09 12:09:45 -07002068 }
2069 }
2070
2071 // Validate use of input attachments against subpass structure
2072 if (pStage->stage == VK_SHADER_STAGE_FRAGMENT_BIT) {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002073 auto input_attachment_uses = CollectInterfaceByInputAttachmentIndex(module, accessible_ids);
Chris Forbes47567b72017-06-09 12:09:45 -07002074
Petr Krause91f7a12017-12-14 20:57:36 +01002075 auto rpci = pipeline->rp_state->createInfo.ptr();
Chris Forbes47567b72017-06-09 12:09:45 -07002076 auto subpass = pipeline->graphicsPipelineCI.subpass;
2077
2078 for (auto use : input_attachment_uses) {
2079 auto input_attachments = rpci->pSubpasses[subpass].pInputAttachments;
2080 auto index = (input_attachments && use.first < rpci->pSubpasses[subpass].inputAttachmentCount)
Dave Houltona9df0ce2018-02-07 10:51:23 -07002081 ? input_attachments[use.first].attachment
2082 : VK_ATTACHMENT_UNUSED;
Chris Forbes47567b72017-06-09 12:09:45 -07002083
2084 if (index == VK_ATTACHMENT_UNUSED) {
Mark Lobodzinskib1fd9d12018-03-30 14:26:00 -06002085 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
Dave Houlton51653902018-06-22 17:32:13 -06002086 kVUID_Core_Shader_MissingInputAttachment,
Chris Forbes47567b72017-06-09 12:09:45 -07002087 "Shader consumes input attachment index %d but not provided in subpass", use.first);
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002088 } else if (!(GetFormatType(rpci->pAttachments[index].format) & GetFundamentalType(module, use.second.type_id))) {
Chris Forbes47567b72017-06-09 12:09:45 -07002089 skip |=
Mark Lobodzinskib1fd9d12018-03-30 14:26:00 -06002090 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
Dave Houlton51653902018-06-22 17:32:13 -06002091 kVUID_Core_Shader_InputAttachmentTypeMismatch,
Chris Forbes47567b72017-06-09 12:09:45 -07002092 "Subpass input attachment %u format of %s does not match type used in shader `%s`", use.first,
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002093 string_VkFormat(rpci->pAttachments[index].format), DescribeType(module, use.second.type_id).c_str());
Chris Forbes47567b72017-06-09 12:09:45 -07002094 }
2095 }
2096 }
2097
2098 return skip;
2099}
2100
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002101static bool ValidateInterfaceBetweenStages(debug_report_data const *report_data, shader_module const *producer,
2102 spirv_inst_iter producer_entrypoint, shader_stage_attributes const *producer_stage,
2103 shader_module const *consumer, spirv_inst_iter consumer_entrypoint,
2104 shader_stage_attributes const *consumer_stage) {
Chris Forbes47567b72017-06-09 12:09:45 -07002105 bool skip = false;
2106
2107 auto outputs =
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002108 CollectInterfaceByLocation(producer, producer_entrypoint, spv::StorageClassOutput, producer_stage->arrayed_output);
2109 auto inputs = CollectInterfaceByLocation(consumer, consumer_entrypoint, spv::StorageClassInput, consumer_stage->arrayed_input);
Chris Forbes47567b72017-06-09 12:09:45 -07002110
2111 auto a_it = outputs.begin();
2112 auto b_it = inputs.begin();
2113
2114 // Maps sorted by key (location); walk them together to find mismatches
2115 while ((outputs.size() > 0 && a_it != outputs.end()) || (inputs.size() && b_it != inputs.end())) {
2116 bool a_at_end = outputs.size() == 0 || a_it == outputs.end();
2117 bool b_at_end = inputs.size() == 0 || b_it == inputs.end();
2118 auto a_first = a_at_end ? std::make_pair(0u, 0u) : a_it->first;
2119 auto b_first = b_at_end ? std::make_pair(0u, 0u) : b_it->first;
2120
2121 if (b_at_end || ((!a_at_end) && (a_first < b_first))) {
Mark Young4e919b22018-05-21 15:53:59 -06002122 skip |= log_msg(report_data, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
Dave Houlton51653902018-06-22 17:32:13 -06002123 HandleToUint64(producer->vk_shader_module), kVUID_Core_Shader_OutputNotConsumed,
Mark Young4e919b22018-05-21 15:53:59 -06002124 "%s writes to output location %u.%u which is not consumed by %s", producer_stage->name, a_first.first,
2125 a_first.second, consumer_stage->name);
Chris Forbes47567b72017-06-09 12:09:45 -07002126 a_it++;
2127 } else if (a_at_end || a_first > b_first) {
Mark Young4e919b22018-05-21 15:53:59 -06002128 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
Dave Houlton51653902018-06-22 17:32:13 -06002129 HandleToUint64(consumer->vk_shader_module), kVUID_Core_Shader_InputNotProduced,
Mark Young4e919b22018-05-21 15:53:59 -06002130 "%s consumes input location %u.%u which is not written by %s", consumer_stage->name, b_first.first,
2131 b_first.second, producer_stage->name);
Chris Forbes47567b72017-06-09 12:09:45 -07002132 b_it++;
2133 } else {
2134 // subtleties of arrayed interfaces:
2135 // - if is_patch, then the member is not arrayed, even though the interface may be.
2136 // - if is_block_member, then the extra array level of an arrayed interface is not
2137 // expressed in the member type -- it's expressed in the block type.
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002138 if (!TypesMatch(producer, consumer, a_it->second.type_id, b_it->second.type_id,
2139 producer_stage->arrayed_output && !a_it->second.is_patch && !a_it->second.is_block_member,
2140 consumer_stage->arrayed_input && !b_it->second.is_patch && !b_it->second.is_block_member, true)) {
Mark Young4e919b22018-05-21 15:53:59 -06002141 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
Dave Houlton51653902018-06-22 17:32:13 -06002142 HandleToUint64(producer->vk_shader_module), kVUID_Core_Shader_InterfaceTypeMismatch,
Mark Young4e919b22018-05-21 15:53:59 -06002143 "Type mismatch on location %u.%u: '%s' vs '%s'", a_first.first, a_first.second,
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002144 DescribeType(producer, a_it->second.type_id).c_str(),
2145 DescribeType(consumer, b_it->second.type_id).c_str());
Chris Forbes47567b72017-06-09 12:09:45 -07002146 }
2147 if (a_it->second.is_patch != b_it->second.is_patch) {
Mark Young4e919b22018-05-21 15:53:59 -06002148 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
Dave Houlton51653902018-06-22 17:32:13 -06002149 HandleToUint64(producer->vk_shader_module), kVUID_Core_Shader_InterfaceTypeMismatch,
Dave Houltona9df0ce2018-02-07 10:51:23 -07002150 "Decoration mismatch on location %u.%u: is per-%s in %s stage but per-%s in %s stage",
Chris Forbes47567b72017-06-09 12:09:45 -07002151 a_first.first, a_first.second, a_it->second.is_patch ? "patch" : "vertex", producer_stage->name,
2152 b_it->second.is_patch ? "patch" : "vertex", consumer_stage->name);
2153 }
2154 if (a_it->second.is_relaxed_precision != b_it->second.is_relaxed_precision) {
Mark Young4e919b22018-05-21 15:53:59 -06002155 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
Dave Houlton51653902018-06-22 17:32:13 -06002156 HandleToUint64(producer->vk_shader_module), kVUID_Core_Shader_InterfaceTypeMismatch,
Chris Forbes47567b72017-06-09 12:09:45 -07002157 "Decoration mismatch on location %u.%u: %s and %s stages differ in precision", a_first.first,
2158 a_first.second, producer_stage->name, consumer_stage->name);
2159 }
2160 a_it++;
2161 b_it++;
2162 }
2163 }
2164
2165 return skip;
2166}
2167
Mark Lobodzinski1b4a8ed2018-08-07 08:47:05 -06002168static inline uint32_t DetermineFinalGeomStage(PIPELINE_STATE *pipeline, VkGraphicsPipelineCreateInfo *pCreateInfo) {
2169 uint32_t stage_mask = 0;
2170 if (pipeline->topology_at_rasterizer == VK_PRIMITIVE_TOPOLOGY_POINT_LIST) {
2171 for (uint32_t i = 0; i < pCreateInfo->stageCount; i++) {
2172 stage_mask |= pCreateInfo->pStages[i].stage;
2173 }
2174 // Determine which shader in which PointSize should be written (the final geometry stage)
Jeff Bolz105d6492018-09-29 15:46:44 -05002175 if (stage_mask & VK_SHADER_STAGE_MESH_BIT_NV) {
2176 stage_mask = VK_SHADER_STAGE_MESH_BIT_NV;
2177 } else if (stage_mask & VK_SHADER_STAGE_GEOMETRY_BIT) {
Mark Lobodzinski1b4a8ed2018-08-07 08:47:05 -06002178 stage_mask = VK_SHADER_STAGE_GEOMETRY_BIT;
2179 } else if (stage_mask & VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT) {
2180 stage_mask = VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT;
2181 } else if (stage_mask & VK_SHADER_STAGE_VERTEX_BIT) {
2182 stage_mask = VK_SHADER_STAGE_VERTEX_BIT;
Mark Lobodzinski2c984cc2018-07-31 09:57:46 -06002183 }
2184 }
Mark Lobodzinski1b4a8ed2018-08-07 08:47:05 -06002185 return stage_mask;
Mark Lobodzinski2c984cc2018-07-31 09:57:46 -06002186}
2187
Chris Forbes47567b72017-06-09 12:09:45 -07002188// Validate that the shaders used by the given pipeline and store the active_slots
2189// that are actually used by the pipeline into pPipeline->active_slots
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002190bool ValidateAndCapturePipelineShaderState(layer_data *dev_data, PIPELINE_STATE *pipeline) {
Chris Forbesa400a8a2017-07-20 13:10:24 -07002191 auto pCreateInfo = pipeline->graphicsPipelineCI.ptr();
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002192 int vertex_stage = GetShaderStageId(VK_SHADER_STAGE_VERTEX_BIT);
2193 int fragment_stage = GetShaderStageId(VK_SHADER_STAGE_FRAGMENT_BIT);
Chris Forbes47567b72017-06-09 12:09:45 -07002194 auto report_data = GetReportData(dev_data);
2195
Jeff Bolz7e35c392018-09-04 15:30:41 -05002196 shader_module const *shaders[32];
Chris Forbes47567b72017-06-09 12:09:45 -07002197 memset(shaders, 0, sizeof(shaders));
Jeff Bolz7e35c392018-09-04 15:30:41 -05002198 spirv_inst_iter entrypoints[32];
Chris Forbes47567b72017-06-09 12:09:45 -07002199 memset(entrypoints, 0, sizeof(entrypoints));
2200 bool skip = false;
2201
Mark Lobodzinski1b4a8ed2018-08-07 08:47:05 -06002202 uint32_t pointlist_stage_mask = DetermineFinalGeomStage(pipeline, pCreateInfo);
2203
Chris Forbes47567b72017-06-09 12:09:45 -07002204 for (uint32_t i = 0; i < pCreateInfo->stageCount; i++) {
2205 auto pStage = &pCreateInfo->pStages[i];
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002206 auto stage_id = GetShaderStageId(pStage->stage);
Mark Lobodzinski1b4a8ed2018-08-07 08:47:05 -06002207 skip |= ValidatePipelineShaderStage(dev_data, pStage, pipeline, &shaders[stage_id], &entrypoints[stage_id],
2208 (pointlist_stage_mask == pStage->stage));
Chris Forbes47567b72017-06-09 12:09:45 -07002209 }
2210
2211 // if the shader stages are no good individually, cross-stage validation is pointless.
2212 if (skip) return true;
2213
2214 auto vi = pCreateInfo->pVertexInputState;
2215
2216 if (vi) {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002217 skip |= ValidateViConsistency(report_data, vi);
Chris Forbes47567b72017-06-09 12:09:45 -07002218 }
2219
2220 if (shaders[vertex_stage] && shaders[vertex_stage]->has_valid_spirv) {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002221 skip |= ValidateViAgainstVsInputs(report_data, vi, shaders[vertex_stage], entrypoints[vertex_stage]);
Chris Forbes47567b72017-06-09 12:09:45 -07002222 }
2223
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002224 int producer = GetShaderStageId(VK_SHADER_STAGE_VERTEX_BIT);
2225 int consumer = GetShaderStageId(VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT);
Chris Forbes47567b72017-06-09 12:09:45 -07002226
2227 while (!shaders[producer] && producer != fragment_stage) {
2228 producer++;
2229 consumer++;
2230 }
2231
2232 for (; producer != fragment_stage && consumer <= fragment_stage; consumer++) {
2233 assert(shaders[producer]);
Chris Forbesdbb43fc2018-02-16 16:59:23 -08002234 if (shaders[consumer]) {
2235 if (shaders[consumer]->has_valid_spirv && shaders[producer]->has_valid_spirv) {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002236 skip |= ValidateInterfaceBetweenStages(report_data, shaders[producer], entrypoints[producer],
2237 &shader_stage_attribs[producer], shaders[consumer], entrypoints[consumer],
2238 &shader_stage_attribs[consumer]);
Chris Forbesdbb43fc2018-02-16 16:59:23 -08002239 }
Chris Forbes47567b72017-06-09 12:09:45 -07002240
2241 producer = consumer;
2242 }
2243 }
2244
2245 if (shaders[fragment_stage] && shaders[fragment_stage]->has_valid_spirv) {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002246 skip |= ValidateFsOutputsAgainstRenderPass(report_data, shaders[fragment_stage], entrypoints[fragment_stage], pipeline,
2247 pCreateInfo->subpass);
Chris Forbes47567b72017-06-09 12:09:45 -07002248 }
2249
2250 return skip;
2251}
2252
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002253bool ValidateComputePipeline(layer_data *dev_data, PIPELINE_STATE *pipeline) {
Chris Forbesa400a8a2017-07-20 13:10:24 -07002254 auto pCreateInfo = pipeline->computePipelineCI.ptr();
Chris Forbes47567b72017-06-09 12:09:45 -07002255
2256 shader_module const *module;
2257 spirv_inst_iter entrypoint;
2258
Mark Lobodzinski1b4a8ed2018-08-07 08:47:05 -06002259 return ValidatePipelineShaderStage(dev_data, &pCreateInfo->stage, pipeline, &module, &entrypoint, false);
Chris Forbes47567b72017-06-09 12:09:45 -07002260}
Chris Forbes4ae55b32017-06-09 14:42:56 -07002261
Eric Werness30127fd2018-10-31 21:01:03 -07002262bool ValidateRayTracingPipelineNV(layer_data *dev_data, PIPELINE_STATE *pipeline) {
Jeff Bolzfbe51582018-09-13 10:01:35 -05002263 auto pCreateInfo = pipeline->raytracingPipelineCI.ptr();
2264
2265 shader_module const *module;
2266 spirv_inst_iter entrypoint;
2267
2268 return ValidatePipelineShaderStage(dev_data, pCreateInfo->pStages, pipeline, &module, &entrypoint, false);
2269}
2270
Dave Houltona9df0ce2018-02-07 10:51:23 -07002271uint32_t ValidationCache::MakeShaderHash(VkShaderModuleCreateInfo const *smci) { return XXH32(smci->pCode, smci->codeSize, 0); }
Chris Forbes9a61e082017-07-24 15:35:29 -07002272
Dave Houltona9df0ce2018-02-07 10:51:23 -07002273static ValidationCache *GetValidationCacheInfo(VkShaderModuleCreateInfo const *pCreateInfo) {
Chris Forbes9a61e082017-07-24 15:35:29 -07002274 while ((pCreateInfo = (VkShaderModuleCreateInfo const *)pCreateInfo->pNext) != nullptr) {
2275 if (pCreateInfo->sType == VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT)
2276 return (ValidationCache *)((VkShaderModuleValidationCacheCreateInfoEXT const *)pCreateInfo)->validationCache;
2277 }
2278
2279 return nullptr;
2280}
2281
Mark Lobodzinski01734072019-02-13 17:39:15 -07002282bool PreCallValidateCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo *pCreateInfo,
2283 const VkAllocationCallbacks *pAllocator, VkShaderModule *pShaderModule) {
2284 layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
2285
Chris Forbes4ae55b32017-06-09 14:42:56 -07002286 bool skip = false;
2287 spv_result_t spv_valid = SPV_SUCCESS;
Chris Forbes4ae55b32017-06-09 14:42:56 -07002288
Mark Lobodzinski01734072019-02-13 17:39:15 -07002289 if (GetDisables(device_data)->shader_validation) {
Chris Forbes4ae55b32017-06-09 14:42:56 -07002290 return false;
2291 }
2292
Mark Lobodzinski01734072019-02-13 17:39:15 -07002293 auto have_glsl_shader = GetDeviceExtensions(device_data)->vk_nv_glsl_shader;
Chris Forbes4ae55b32017-06-09 14:42:56 -07002294
2295 if (!have_glsl_shader && (pCreateInfo->codeSize % 4)) {
Mark Lobodzinski01734072019-02-13 17:39:15 -07002296 skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
Dave Houlton78d09922018-05-17 15:48:45 -06002297 "VUID-VkShaderModuleCreateInfo-pCode-01376",
2298 "SPIR-V module not valid: Codesize must be a multiple of 4 but is " PRINTF_SIZE_T_SPECIFIER ".",
2299 pCreateInfo->codeSize);
Chris Forbes4ae55b32017-06-09 14:42:56 -07002300 } else {
Chris Forbes9a61e082017-07-24 15:35:29 -07002301 auto cache = GetValidationCacheInfo(pCreateInfo);
2302 uint32_t hash = 0;
2303 if (cache) {
2304 hash = ValidationCache::MakeShaderHash(pCreateInfo);
Dave Houltona9df0ce2018-02-07 10:51:23 -07002305 if (cache->Contains(hash)) return false;
Chris Forbes9a61e082017-07-24 15:35:29 -07002306 }
2307
Chris Forbes4ae55b32017-06-09 14:42:56 -07002308 // Use SPIRV-Tools validator to try and catch any issues with the module itself
Dave Houlton0ea2d012018-06-21 14:00:26 -06002309 spv_target_env spirv_environment = SPV_ENV_VULKAN_1_0;
Mark Lobodzinski01734072019-02-13 17:39:15 -07002310 if (GetApiVersion(device_data) >= VK_API_VERSION_1_1) {
Dave Houlton0ea2d012018-06-21 14:00:26 -06002311 spirv_environment = SPV_ENV_VULKAN_1_1;
2312 }
2313 spv_context ctx = spvContextCreate(spirv_environment);
Dave Houltona9df0ce2018-02-07 10:51:23 -07002314 spv_const_binary_t binary{pCreateInfo->pCode, pCreateInfo->codeSize / sizeof(uint32_t)};
Chris Forbes4ae55b32017-06-09 14:42:56 -07002315 spv_diagnostic diag = nullptr;
Karl Schultzfda1b382018-08-08 18:56:11 -06002316 spv_validator_options options = spvValidatorOptionsCreate();
Mark Lobodzinski01734072019-02-13 17:39:15 -07002317 if (GetDeviceExtensions(device_data)->vk_khr_relaxed_block_layout) {
Karl Schultzfda1b382018-08-08 18:56:11 -06002318 spvValidatorOptionsSetRelaxBlockLayout(options, true);
2319 }
Mark Lobodzinski01734072019-02-13 17:39:15 -07002320 if (GetDeviceExtensions(device_data)->vk_ext_scalar_block_layout &&
2321 GetEnabledFeatures(device_data)->scalar_block_layout_features.scalarBlockLayout == VK_TRUE) {
Tobias Hector6a0ece72018-12-10 12:24:05 +00002322 spvValidatorOptionsSetScalarBlockLayout(options, true);
2323 }
Karl Schultzfda1b382018-08-08 18:56:11 -06002324 spv_valid = spvValidateWithOptions(ctx, options, &binary, &diag);
Chris Forbes4ae55b32017-06-09 14:42:56 -07002325 if (spv_valid != SPV_SUCCESS) {
2326 if (!have_glsl_shader || (pCreateInfo->pCode[0] == spv::MagicNumber)) {
Mark Lobodzinski01734072019-02-13 17:39:15 -07002327 skip |= log_msg(device_data->report_data,
2328 spv_valid == SPV_WARNING ? VK_DEBUG_REPORT_WARNING_BIT_EXT : VK_DEBUG_REPORT_ERROR_BIT_EXT,
2329 VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, kVUID_Core_Shader_InconsistentSpirv,
2330 "SPIR-V module not valid: %s", diag && diag->error ? diag->error : "(no error text)");
Chris Forbes4ae55b32017-06-09 14:42:56 -07002331 }
Chris Forbes9a61e082017-07-24 15:35:29 -07002332 } else {
2333 if (cache) {
2334 cache->Insert(hash);
2335 }
Chris Forbes4ae55b32017-06-09 14:42:56 -07002336 }
2337
Karl Schultzfda1b382018-08-08 18:56:11 -06002338 spvValidatorOptionsDestroy(options);
Chris Forbes4ae55b32017-06-09 14:42:56 -07002339 spvDiagnosticDestroy(diag);
2340 spvContextDestroy(ctx);
2341 }
2342
Chris Forbes4ae55b32017-06-09 14:42:56 -07002343 return skip;
Mark Lobodzinski01734072019-02-13 17:39:15 -07002344}
2345
2346void PreCallRecordCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo *pCreateInfo,
Mark Lobodzinski1db77e82019-03-01 10:02:54 -07002347 const VkAllocationCallbacks *pAllocator, VkShaderModule *pShaderModule, void *csm_state_data) {
Mark Lobodzinski01734072019-02-13 17:39:15 -07002348 layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), core_validation::layer_data_map);
Mark Lobodzinski1db77e82019-03-01 10:02:54 -07002349 create_shader_module_api_state *csm_state = reinterpret_cast<create_shader_module_api_state *>(csm_state_data);
Mark Lobodzinski01734072019-02-13 17:39:15 -07002350 if (GetEnables(device_data)->gpu_validation) {
2351 GpuPreCallCreateShaderModule(device_data, pCreateInfo, pAllocator, pShaderModule, &csm_state->unique_shader_id,
2352 &csm_state->instrumented_create_info, &csm_state->instrumented_pgm);
2353 }
2354}
2355
2356void PostCallRecordCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo *pCreateInfo,
2357 const VkAllocationCallbacks *pAllocator, VkShaderModule *pShaderModule, VkResult result,
Mark Lobodzinski1db77e82019-03-01 10:02:54 -07002358 void *csm_state_data) {
Mark Lobodzinski01734072019-02-13 17:39:15 -07002359 layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), core_validation::layer_data_map);
2360 if (VK_SUCCESS != result) return;
Mark Lobodzinski1db77e82019-03-01 10:02:54 -07002361 create_shader_module_api_state *csm_state = reinterpret_cast<create_shader_module_api_state *>(csm_state_data);
Mark Lobodzinski01734072019-02-13 17:39:15 -07002362
2363 spv_target_env spirv_environment =
2364 ((GetApiVersion(device_data) >= VK_API_VERSION_1_1) ? SPV_ENV_VULKAN_1_1 : SPV_ENV_VULKAN_1_0);
2365 bool is_spirv = (pCreateInfo->pCode[0] == spv::MagicNumber);
2366 std::unique_ptr<shader_module> new_shader_module(
2367 is_spirv ? new shader_module(pCreateInfo, *pShaderModule, spirv_environment, csm_state->unique_shader_id)
2368 : new shader_module());
2369 device_data->shaderModuleMap[*pShaderModule] = std::move(new_shader_module);
2370}