blob: 45f743f3a633fc58b1e9b7735e08ba6ffd84a584 [file] [log] [blame]
Karl Schultz7b024b42018-08-30 16:18:18 -06001/* Copyright (c) 2015-2019 The Khronos Group Inc.
2 * Copyright (c) 2015-2019 Valve Corporation
3 * Copyright (c) 2015-2019 LunarG, Inc.
4 * Copyright (C) 2015-2019 Google Inc.
Chris Forbes47567b72017-06-09 12:09:45 -07005 *
6 * Licensed under the Apache License, Version 2.0 (the "License");
7 * you may not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
9 *
10 * http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 *
18 * Author: Chris Forbes <chrisf@ijw.co.nz>
Dave Houlton51653902018-06-22 17:32:13 -060019 * Author: Dave Houlton <daveh@lunarg.com>
Chris Forbes47567b72017-06-09 12:09:45 -070020 */
21
22#include <cinttypes>
23#include <cassert>
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +020024#include <chrono>
Chris Forbes47567b72017-06-09 12:09:45 -070025#include <vector>
26#include <unordered_map>
27#include <string>
28#include <sstream>
29#include <SPIRV/spirv.hpp>
30#include "vk_loader_platform.h"
31#include "vk_enum_string_helper.h"
Chris Forbes47567b72017-06-09 12:09:45 -070032#include "vk_layer_data.h"
33#include "vk_layer_extension_utils.h"
34#include "vk_layer_utils.h"
Mark Lobodzinskib56bbb92019-02-18 11:49:59 -070035#include "chassis.h"
Chris Forbes47567b72017-06-09 12:09:45 -070036#include "core_validation.h"
Chris Forbes47567b72017-06-09 12:09:45 -070037#include "shader_validation.h"
Chris Forbes4ae55b32017-06-09 14:42:56 -070038#include "spirv-tools/libspirv.h"
Chris Forbes9a61e082017-07-24 15:35:29 -070039#include "xxhash.h"
Chris Forbes47567b72017-06-09 12:09:45 -070040
41enum FORMAT_TYPE {
42 FORMAT_TYPE_FLOAT = 1, // UNORM, SNORM, FLOAT, USCALED, SSCALED, SRGB -- anything we consider float in the shader
43 FORMAT_TYPE_SINT = 2,
44 FORMAT_TYPE_UINT = 4,
45};
46
47typedef std::pair<unsigned, unsigned> location_t;
48
49struct interface_var {
50 uint32_t id;
51 uint32_t type_id;
52 uint32_t offset;
53 bool is_patch;
54 bool is_block_member;
55 bool is_relaxed_precision;
56 // TODO: collect the name, too? Isn't required to be present.
57};
58
59struct shader_stage_attributes {
60 char const *const name;
61 bool arrayed_input;
62 bool arrayed_output;
Ari Suonpaa696b3432019-03-11 14:02:57 +020063 VkShaderStageFlags stage;
Chris Forbes47567b72017-06-09 12:09:45 -070064};
65
66static shader_stage_attributes shader_stage_attribs[] = {
Ari Suonpaa696b3432019-03-11 14:02:57 +020067 {"vertex shader", false, false, VK_SHADER_STAGE_VERTEX_BIT},
68 {"tessellation control shader", true, true, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT},
69 {"tessellation evaluation shader", true, false, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT},
70 {"geometry shader", true, false, VK_SHADER_STAGE_GEOMETRY_BIT},
71 {"fragment shader", false, false, VK_SHADER_STAGE_FRAGMENT_BIT},
Chris Forbes47567b72017-06-09 12:09:45 -070072};
73
74// SPIRV utility functions
Shannon McPhersonc06c33d2018-06-28 17:21:12 -060075void shader_module::BuildDefIndex() {
Chris Forbes47567b72017-06-09 12:09:45 -070076 for (auto insn : *this) {
77 switch (insn.opcode()) {
78 // Types
79 case spv::OpTypeVoid:
80 case spv::OpTypeBool:
81 case spv::OpTypeInt:
82 case spv::OpTypeFloat:
83 case spv::OpTypeVector:
84 case spv::OpTypeMatrix:
85 case spv::OpTypeImage:
86 case spv::OpTypeSampler:
87 case spv::OpTypeSampledImage:
88 case spv::OpTypeArray:
89 case spv::OpTypeRuntimeArray:
90 case spv::OpTypeStruct:
91 case spv::OpTypeOpaque:
92 case spv::OpTypePointer:
93 case spv::OpTypeFunction:
94 case spv::OpTypeEvent:
95 case spv::OpTypeDeviceEvent:
96 case spv::OpTypeReserveId:
97 case spv::OpTypeQueue:
98 case spv::OpTypePipe:
Shannon McPherson0fa28232018-11-01 11:59:02 -060099 case spv::OpTypeAccelerationStructureNV:
Jeff Bolze4356752019-03-07 11:23:46 -0600100 case spv::OpTypeCooperativeMatrixNV:
Chris Forbes47567b72017-06-09 12:09:45 -0700101 def_index[insn.word(1)] = insn.offset();
102 break;
103
104 // Fixed constants
105 case spv::OpConstantTrue:
106 case spv::OpConstantFalse:
107 case spv::OpConstant:
108 case spv::OpConstantComposite:
109 case spv::OpConstantSampler:
110 case spv::OpConstantNull:
111 def_index[insn.word(2)] = insn.offset();
112 break;
113
114 // Specialization constants
115 case spv::OpSpecConstantTrue:
116 case spv::OpSpecConstantFalse:
117 case spv::OpSpecConstant:
118 case spv::OpSpecConstantComposite:
119 case spv::OpSpecConstantOp:
120 def_index[insn.word(2)] = insn.offset();
121 break;
122
123 // Variables
124 case spv::OpVariable:
125 def_index[insn.word(2)] = insn.offset();
126 break;
127
128 // Functions
129 case spv::OpFunction:
130 def_index[insn.word(2)] = insn.offset();
131 break;
132
133 default:
134 // We don't care about any other defs for now.
135 break;
136 }
137 }
138}
139
Jeff Bolz105d6492018-09-29 15:46:44 -0500140unsigned ExecutionModelToShaderStageFlagBits(unsigned mode) {
141 switch (mode) {
142 case spv::ExecutionModelVertex:
143 return VK_SHADER_STAGE_VERTEX_BIT;
144 case spv::ExecutionModelTessellationControl:
145 return VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT;
146 case spv::ExecutionModelTessellationEvaluation:
147 return VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT;
148 case spv::ExecutionModelGeometry:
149 return VK_SHADER_STAGE_GEOMETRY_BIT;
150 case spv::ExecutionModelFragment:
151 return VK_SHADER_STAGE_FRAGMENT_BIT;
152 case spv::ExecutionModelGLCompute:
153 return VK_SHADER_STAGE_COMPUTE_BIT;
Shannon McPherson0fa28232018-11-01 11:59:02 -0600154 case spv::ExecutionModelRayGenerationNV:
Eric Werness30127fd2018-10-31 21:01:03 -0700155 return VK_SHADER_STAGE_RAYGEN_BIT_NV;
Shannon McPherson0fa28232018-11-01 11:59:02 -0600156 case spv::ExecutionModelAnyHitNV:
Eric Werness30127fd2018-10-31 21:01:03 -0700157 return VK_SHADER_STAGE_ANY_HIT_BIT_NV;
Shannon McPherson0fa28232018-11-01 11:59:02 -0600158 case spv::ExecutionModelClosestHitNV:
Eric Werness30127fd2018-10-31 21:01:03 -0700159 return VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV;
Shannon McPherson0fa28232018-11-01 11:59:02 -0600160 case spv::ExecutionModelMissNV:
Eric Werness30127fd2018-10-31 21:01:03 -0700161 return VK_SHADER_STAGE_MISS_BIT_NV;
Shannon McPherson0fa28232018-11-01 11:59:02 -0600162 case spv::ExecutionModelIntersectionNV:
Eric Werness30127fd2018-10-31 21:01:03 -0700163 return VK_SHADER_STAGE_INTERSECTION_BIT_NV;
Shannon McPherson0fa28232018-11-01 11:59:02 -0600164 case spv::ExecutionModelCallableNV:
Eric Werness30127fd2018-10-31 21:01:03 -0700165 return VK_SHADER_STAGE_CALLABLE_BIT_NV;
Jeff Bolz105d6492018-09-29 15:46:44 -0500166 case spv::ExecutionModelTaskNV:
167 return VK_SHADER_STAGE_TASK_BIT_NV;
168 case spv::ExecutionModelMeshNV:
169 return VK_SHADER_STAGE_MESH_BIT_NV;
170 default:
171 return 0;
172 }
173}
174
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600175static spirv_inst_iter FindEntrypoint(shader_module const *src, char const *name, VkShaderStageFlagBits stageBits) {
Chris Forbes47567b72017-06-09 12:09:45 -0700176 for (auto insn : *src) {
177 if (insn.opcode() == spv::OpEntryPoint) {
178 auto entrypointName = (char const *)&insn.word(3);
Jeff Bolz105d6492018-09-29 15:46:44 -0500179 auto executionModel = insn.word(1);
180 auto entrypointStageBits = ExecutionModelToShaderStageFlagBits(executionModel);
Chris Forbes47567b72017-06-09 12:09:45 -0700181
182 if (!strcmp(entrypointName, name) && (entrypointStageBits & stageBits)) {
183 return insn;
184 }
185 }
186 }
187
188 return src->end();
189}
190
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600191static char const *StorageClassName(unsigned sc) {
Chris Forbes47567b72017-06-09 12:09:45 -0700192 switch (sc) {
193 case spv::StorageClassInput:
194 return "input";
195 case spv::StorageClassOutput:
196 return "output";
197 case spv::StorageClassUniformConstant:
198 return "const uniform";
199 case spv::StorageClassUniform:
200 return "uniform";
201 case spv::StorageClassWorkgroup:
202 return "workgroup local";
203 case spv::StorageClassCrossWorkgroup:
204 return "workgroup global";
205 case spv::StorageClassPrivate:
206 return "private global";
207 case spv::StorageClassFunction:
208 return "function";
209 case spv::StorageClassGeneric:
210 return "generic";
211 case spv::StorageClassAtomicCounter:
212 return "atomic counter";
213 case spv::StorageClassImage:
214 return "image";
215 case spv::StorageClassPushConstant:
216 return "push constant";
Chris Forbes9f89d752018-03-07 12:57:48 -0800217 case spv::StorageClassStorageBuffer:
218 return "storage buffer";
Chris Forbes47567b72017-06-09 12:09:45 -0700219 default:
220 return "unknown";
221 }
222}
223
224// Get the value of an integral constant
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600225unsigned GetConstantValue(shader_module const *src, unsigned id) {
Chris Forbes47567b72017-06-09 12:09:45 -0700226 auto value = src->get_def(id);
227 assert(value != src->end());
228
229 if (value.opcode() != spv::OpConstant) {
230 // TODO: Either ensure that the specialization transform is already performed on a module we're
231 // considering here, OR -- specialize on the fly now.
232 return 1;
233 }
234
235 return value.word(3);
236}
237
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600238static void DescribeTypeInner(std::ostringstream &ss, shader_module const *src, unsigned type) {
Chris Forbes47567b72017-06-09 12:09:45 -0700239 auto insn = src->get_def(type);
240 assert(insn != src->end());
241
242 switch (insn.opcode()) {
243 case spv::OpTypeBool:
244 ss << "bool";
245 break;
246 case spv::OpTypeInt:
247 ss << (insn.word(3) ? 's' : 'u') << "int" << insn.word(2);
248 break;
249 case spv::OpTypeFloat:
250 ss << "float" << insn.word(2);
251 break;
252 case spv::OpTypeVector:
253 ss << "vec" << insn.word(3) << " of ";
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600254 DescribeTypeInner(ss, src, insn.word(2));
Chris Forbes47567b72017-06-09 12:09:45 -0700255 break;
256 case spv::OpTypeMatrix:
257 ss << "mat" << insn.word(3) << " of ";
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600258 DescribeTypeInner(ss, src, insn.word(2));
Chris Forbes47567b72017-06-09 12:09:45 -0700259 break;
260 case spv::OpTypeArray:
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600261 ss << "arr[" << GetConstantValue(src, insn.word(3)) << "] of ";
262 DescribeTypeInner(ss, src, insn.word(2));
Chris Forbes47567b72017-06-09 12:09:45 -0700263 break;
Chris Forbes062f1222018-08-21 15:34:15 -0700264 case spv::OpTypeRuntimeArray:
265 ss << "runtime arr[] of ";
266 DescribeTypeInner(ss, src, insn.word(2));
267 break;
Chris Forbes47567b72017-06-09 12:09:45 -0700268 case spv::OpTypePointer:
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600269 ss << "ptr to " << StorageClassName(insn.word(2)) << " ";
270 DescribeTypeInner(ss, src, insn.word(3));
Chris Forbes47567b72017-06-09 12:09:45 -0700271 break;
272 case spv::OpTypeStruct: {
273 ss << "struct of (";
274 for (unsigned i = 2; i < insn.len(); i++) {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600275 DescribeTypeInner(ss, src, insn.word(i));
Chris Forbes47567b72017-06-09 12:09:45 -0700276 if (i == insn.len() - 1) {
277 ss << ")";
278 } else {
279 ss << ", ";
280 }
281 }
282 break;
283 }
284 case spv::OpTypeSampler:
285 ss << "sampler";
286 break;
287 case spv::OpTypeSampledImage:
288 ss << "sampler+";
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600289 DescribeTypeInner(ss, src, insn.word(2));
Chris Forbes47567b72017-06-09 12:09:45 -0700290 break;
291 case spv::OpTypeImage:
292 ss << "image(dim=" << insn.word(3) << ", sampled=" << insn.word(7) << ")";
293 break;
Shannon McPherson0fa28232018-11-01 11:59:02 -0600294 case spv::OpTypeAccelerationStructureNV:
Jeff Bolz105d6492018-09-29 15:46:44 -0500295 ss << "accelerationStruture";
296 break;
Chris Forbes47567b72017-06-09 12:09:45 -0700297 default:
298 ss << "oddtype";
299 break;
300 }
301}
302
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600303static std::string DescribeType(shader_module const *src, unsigned type) {
Chris Forbes47567b72017-06-09 12:09:45 -0700304 std::ostringstream ss;
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600305 DescribeTypeInner(ss, src, type);
Chris Forbes47567b72017-06-09 12:09:45 -0700306 return ss.str();
307}
308
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600309static bool IsNarrowNumericType(spirv_inst_iter type) {
Chris Forbes47567b72017-06-09 12:09:45 -0700310 if (type.opcode() != spv::OpTypeInt && type.opcode() != spv::OpTypeFloat) return false;
311 return type.word(2) < 64;
312}
313
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600314static bool TypesMatch(shader_module const *a, shader_module const *b, unsigned a_type, unsigned b_type, bool a_arrayed,
315 bool b_arrayed, bool relaxed) {
Chris Forbes47567b72017-06-09 12:09:45 -0700316 // Walk two type trees together, and complain about differences
317 auto a_insn = a->get_def(a_type);
318 auto b_insn = b->get_def(b_type);
319 assert(a_insn != a->end());
320 assert(b_insn != b->end());
321
Chris Forbes062f1222018-08-21 15:34:15 -0700322 // Ignore runtime-sized arrays-- they cannot appear in these interfaces.
323
Chris Forbes47567b72017-06-09 12:09:45 -0700324 if (a_arrayed && a_insn.opcode() == spv::OpTypeArray) {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600325 return TypesMatch(a, b, a_insn.word(2), b_type, false, b_arrayed, relaxed);
Chris Forbes47567b72017-06-09 12:09:45 -0700326 }
327
328 if (b_arrayed && b_insn.opcode() == spv::OpTypeArray) {
329 // We probably just found the extra level of arrayness in b_type: compare the type inside it to a_type
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600330 return TypesMatch(a, b, a_type, b_insn.word(2), a_arrayed, false, relaxed);
Chris Forbes47567b72017-06-09 12:09:45 -0700331 }
332
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600333 if (a_insn.opcode() == spv::OpTypeVector && relaxed && IsNarrowNumericType(b_insn)) {
334 return TypesMatch(a, b, a_insn.word(2), b_type, a_arrayed, b_arrayed, false);
Chris Forbes47567b72017-06-09 12:09:45 -0700335 }
336
337 if (a_insn.opcode() != b_insn.opcode()) {
338 return false;
339 }
340
341 if (a_insn.opcode() == spv::OpTypePointer) {
342 // Match on pointee type. storage class is expected to differ
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600343 return TypesMatch(a, b, a_insn.word(3), b_insn.word(3), a_arrayed, b_arrayed, relaxed);
Chris Forbes47567b72017-06-09 12:09:45 -0700344 }
345
346 if (a_arrayed || b_arrayed) {
347 // If we havent resolved array-of-verts by here, we're not going to.
348 return false;
349 }
350
351 switch (a_insn.opcode()) {
352 case spv::OpTypeBool:
353 return true;
354 case spv::OpTypeInt:
355 // Match on width, signedness
356 return a_insn.word(2) == b_insn.word(2) && a_insn.word(3) == b_insn.word(3);
357 case spv::OpTypeFloat:
358 // Match on width
359 return a_insn.word(2) == b_insn.word(2);
360 case spv::OpTypeVector:
361 // Match on element type, count.
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600362 if (!TypesMatch(a, b, a_insn.word(2), b_insn.word(2), a_arrayed, b_arrayed, false)) return false;
363 if (relaxed && IsNarrowNumericType(a->get_def(a_insn.word(2)))) {
Chris Forbes47567b72017-06-09 12:09:45 -0700364 return a_insn.word(3) >= b_insn.word(3);
365 } else {
366 return a_insn.word(3) == b_insn.word(3);
367 }
368 case spv::OpTypeMatrix:
369 // Match on element type, count.
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600370 return TypesMatch(a, b, a_insn.word(2), b_insn.word(2), a_arrayed, b_arrayed, false) &&
Dave Houltona9df0ce2018-02-07 10:51:23 -0700371 a_insn.word(3) == b_insn.word(3);
Chris Forbes47567b72017-06-09 12:09:45 -0700372 case spv::OpTypeArray:
373 // Match on element type, count. these all have the same layout. we don't get here if b_arrayed. This differs from
374 // vector & matrix types in that the array size is the id of a constant instruction, * not a literal within OpTypeArray
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600375 return TypesMatch(a, b, a_insn.word(2), b_insn.word(2), a_arrayed, b_arrayed, false) &&
376 GetConstantValue(a, a_insn.word(3)) == GetConstantValue(b, b_insn.word(3));
Chris Forbes47567b72017-06-09 12:09:45 -0700377 case spv::OpTypeStruct:
378 // Match on all element types
Dave Houltona9df0ce2018-02-07 10:51:23 -0700379 {
380 if (a_insn.len() != b_insn.len()) {
381 return false; // Structs cannot match if member counts differ
Chris Forbes47567b72017-06-09 12:09:45 -0700382 }
Chris Forbes47567b72017-06-09 12:09:45 -0700383
Dave Houltona9df0ce2018-02-07 10:51:23 -0700384 for (unsigned i = 2; i < a_insn.len(); i++) {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600385 if (!TypesMatch(a, b, a_insn.word(i), b_insn.word(i), a_arrayed, b_arrayed, false)) {
Dave Houltona9df0ce2018-02-07 10:51:23 -0700386 return false;
387 }
388 }
389
390 return true;
391 }
Chris Forbes47567b72017-06-09 12:09:45 -0700392 default:
393 // Remaining types are CLisms, or may not appear in the interfaces we are interested in. Just claim no match.
394 return false;
395 }
396}
397
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600398static unsigned ValueOrDefault(std::unordered_map<unsigned, unsigned> const &map, unsigned id, unsigned def) {
Chris Forbes47567b72017-06-09 12:09:45 -0700399 auto it = map.find(id);
400 if (it == map.end())
401 return def;
402 else
403 return it->second;
404}
405
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600406static unsigned GetLocationsConsumedByType(shader_module const *src, unsigned type, bool strip_array_level) {
Chris Forbes47567b72017-06-09 12:09:45 -0700407 auto insn = src->get_def(type);
408 assert(insn != src->end());
409
410 switch (insn.opcode()) {
411 case spv::OpTypePointer:
412 // See through the ptr -- this is only ever at the toplevel for graphics shaders we're never actually passing
413 // pointers around.
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600414 return GetLocationsConsumedByType(src, insn.word(3), strip_array_level);
Chris Forbes47567b72017-06-09 12:09:45 -0700415 case spv::OpTypeArray:
416 if (strip_array_level) {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600417 return GetLocationsConsumedByType(src, insn.word(2), false);
Chris Forbes47567b72017-06-09 12:09:45 -0700418 } else {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600419 return GetConstantValue(src, insn.word(3)) * GetLocationsConsumedByType(src, insn.word(2), false);
Chris Forbes47567b72017-06-09 12:09:45 -0700420 }
421 case spv::OpTypeMatrix:
422 // Num locations is the dimension * element size
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600423 return insn.word(3) * GetLocationsConsumedByType(src, insn.word(2), false);
Chris Forbes47567b72017-06-09 12:09:45 -0700424 case spv::OpTypeVector: {
425 auto scalar_type = src->get_def(insn.word(2));
426 auto bit_width =
427 (scalar_type.opcode() == spv::OpTypeInt || scalar_type.opcode() == spv::OpTypeFloat) ? scalar_type.word(2) : 32;
428
429 // Locations are 128-bit wide; 3- and 4-component vectors of 64 bit types require two.
430 return (bit_width * insn.word(3) + 127) / 128;
431 }
432 default:
433 // Everything else is just 1.
434 return 1;
435
436 // TODO: extend to handle 64bit scalar types, whose vectors may need multiple locations.
437 }
438}
439
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +0200440static unsigned GetComponentsConsumedByType(shader_module const *src, unsigned type, bool strip_array_level) {
441 auto insn = src->get_def(type);
442 assert(insn != src->end());
443
444 switch (insn.opcode()) {
445 case spv::OpTypePointer:
446 // See through the ptr -- this is only ever at the toplevel for graphics shaders we're never actually passing
447 // pointers around.
448 return GetComponentsConsumedByType(src, insn.word(3), strip_array_level);
449 case spv::OpTypeStruct: {
450 uint32_t sum = 0;
451 for (uint32_t i = 2; i < insn.len(); i++) { // i=2 to skip word(0) and word(1)=ID of struct
452 sum += GetComponentsConsumedByType(src, insn.word(i), false);
453 }
454 return sum;
455 }
456 case spv::OpTypeArray: {
457 uint32_t sum = 0;
458 for (uint32_t i = 2; i < insn.len(); i++) {
459 sum += GetComponentsConsumedByType(src, insn.word(i), false);
460 }
461 return sum;
462 }
463 case spv::OpTypeMatrix:
464 // Num locations is the dimension * element size
465 return insn.word(3) * GetComponentsConsumedByType(src, insn.word(2), false);
466 case spv::OpTypeVector: {
467 auto scalar_type = src->get_def(insn.word(2));
468 auto bit_width =
469 (scalar_type.opcode() == spv::OpTypeInt || scalar_type.opcode() == spv::OpTypeFloat) ? scalar_type.word(2) : 32;
470 // One component is 32-bit
471 return (bit_width * insn.word(3) + 31) / 32;
472 }
473 case spv::OpTypeFloat: {
474 auto bit_width = insn.word(2);
475 return (bit_width + 31) / 32;
476 }
477 case spv::OpTypeInt: {
478 auto bit_width = insn.word(2);
479 return (bit_width + 31) / 32;
480 }
481 case spv::OpConstant:
482 return GetComponentsConsumedByType(src, insn.word(1), false);
483 default:
484 return 0;
485 }
486}
487
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600488static unsigned GetLocationsConsumedByFormat(VkFormat format) {
Chris Forbes47567b72017-06-09 12:09:45 -0700489 switch (format) {
490 case VK_FORMAT_R64G64B64A64_SFLOAT:
491 case VK_FORMAT_R64G64B64A64_SINT:
492 case VK_FORMAT_R64G64B64A64_UINT:
493 case VK_FORMAT_R64G64B64_SFLOAT:
494 case VK_FORMAT_R64G64B64_SINT:
495 case VK_FORMAT_R64G64B64_UINT:
496 return 2;
497 default:
498 return 1;
499 }
500}
501
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600502static unsigned GetFormatType(VkFormat fmt) {
Dave Houltona9df0ce2018-02-07 10:51:23 -0700503 if (FormatIsSInt(fmt)) return FORMAT_TYPE_SINT;
504 if (FormatIsUInt(fmt)) return FORMAT_TYPE_UINT;
505 if (FormatIsDepthAndStencil(fmt)) return FORMAT_TYPE_FLOAT | FORMAT_TYPE_UINT;
506 if (fmt == VK_FORMAT_UNDEFINED) return 0;
Chris Forbes47567b72017-06-09 12:09:45 -0700507 // everything else -- UNORM/SNORM/FLOAT/USCALED/SSCALED is all float in the shader.
508 return FORMAT_TYPE_FLOAT;
509}
510
511// characterizes a SPIR-V type appearing in an interface to a FF stage, for comparison to a VkFormat's characterization above.
Chris Forbes062f1222018-08-21 15:34:15 -0700512// also used for input attachments, as we statically know their format.
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600513static unsigned GetFundamentalType(shader_module const *src, unsigned type) {
Chris Forbes47567b72017-06-09 12:09:45 -0700514 auto insn = src->get_def(type);
515 assert(insn != src->end());
516
517 switch (insn.opcode()) {
518 case spv::OpTypeInt:
519 return insn.word(3) ? FORMAT_TYPE_SINT : FORMAT_TYPE_UINT;
520 case spv::OpTypeFloat:
521 return FORMAT_TYPE_FLOAT;
522 case spv::OpTypeVector:
Chris Forbes47567b72017-06-09 12:09:45 -0700523 case spv::OpTypeMatrix:
Chris Forbes47567b72017-06-09 12:09:45 -0700524 case spv::OpTypeArray:
Chris Forbes062f1222018-08-21 15:34:15 -0700525 case spv::OpTypeRuntimeArray:
526 case spv::OpTypeImage:
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600527 return GetFundamentalType(src, insn.word(2));
Chris Forbes47567b72017-06-09 12:09:45 -0700528 case spv::OpTypePointer:
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600529 return GetFundamentalType(src, insn.word(3));
Chris Forbes47567b72017-06-09 12:09:45 -0700530
531 default:
532 return 0;
533 }
534}
535
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600536static uint32_t GetShaderStageId(VkShaderStageFlagBits stage) {
Chris Forbes47567b72017-06-09 12:09:45 -0700537 uint32_t bit_pos = uint32_t(u_ffs(stage));
538 return bit_pos - 1;
539}
540
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600541static spirv_inst_iter GetStructType(shader_module const *src, spirv_inst_iter def, bool is_array_of_verts) {
Chris Forbes47567b72017-06-09 12:09:45 -0700542 while (true) {
543 if (def.opcode() == spv::OpTypePointer) {
544 def = src->get_def(def.word(3));
545 } else if (def.opcode() == spv::OpTypeArray && is_array_of_verts) {
546 def = src->get_def(def.word(2));
547 is_array_of_verts = false;
548 } else if (def.opcode() == spv::OpTypeStruct) {
549 return def;
550 } else {
551 return src->end();
552 }
553 }
554}
555
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600556static bool CollectInterfaceBlockMembers(shader_module const *src, std::map<location_t, interface_var> *out,
557 std::unordered_map<unsigned, unsigned> const &blocks, bool is_array_of_verts, uint32_t id,
558 uint32_t type_id, bool is_patch, int /*first_location*/) {
Chris Forbes47567b72017-06-09 12:09:45 -0700559 // Walk down the type_id presented, trying to determine whether it's actually an interface block.
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600560 auto type = GetStructType(src, src->get_def(type_id), is_array_of_verts && !is_patch);
Chris Forbes47567b72017-06-09 12:09:45 -0700561 if (type == src->end() || blocks.find(type.word(1)) == blocks.end()) {
562 // This isn't an interface block.
Chris Forbesa313d772017-06-13 13:59:41 -0700563 return false;
Chris Forbes47567b72017-06-09 12:09:45 -0700564 }
565
566 std::unordered_map<unsigned, unsigned> member_components;
567 std::unordered_map<unsigned, unsigned> member_relaxed_precision;
Chris Forbesa313d772017-06-13 13:59:41 -0700568 std::unordered_map<unsigned, unsigned> member_patch;
Chris Forbes47567b72017-06-09 12:09:45 -0700569
570 // Walk all the OpMemberDecorate for type's result id -- first pass, collect components.
571 for (auto insn : *src) {
572 if (insn.opcode() == spv::OpMemberDecorate && insn.word(1) == type.word(1)) {
573 unsigned member_index = insn.word(2);
574
575 if (insn.word(3) == spv::DecorationComponent) {
576 unsigned component = insn.word(4);
577 member_components[member_index] = component;
578 }
579
580 if (insn.word(3) == spv::DecorationRelaxedPrecision) {
581 member_relaxed_precision[member_index] = 1;
582 }
Chris Forbesa313d772017-06-13 13:59:41 -0700583
584 if (insn.word(3) == spv::DecorationPatch) {
585 member_patch[member_index] = 1;
586 }
Chris Forbes47567b72017-06-09 12:09:45 -0700587 }
588 }
589
Chris Forbesa313d772017-06-13 13:59:41 -0700590 // TODO: correctly handle location assignment from outside
591
Chris Forbes47567b72017-06-09 12:09:45 -0700592 // Second pass -- produce the output, from Location decorations
593 for (auto insn : *src) {
594 if (insn.opcode() == spv::OpMemberDecorate && insn.word(1) == type.word(1)) {
595 unsigned member_index = insn.word(2);
596 unsigned member_type_id = type.word(2 + member_index);
597
598 if (insn.word(3) == spv::DecorationLocation) {
599 unsigned location = insn.word(4);
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600600 unsigned num_locations = GetLocationsConsumedByType(src, member_type_id, false);
Chris Forbes47567b72017-06-09 12:09:45 -0700601 auto component_it = member_components.find(member_index);
602 unsigned component = component_it == member_components.end() ? 0 : component_it->second;
603 bool is_relaxed_precision = member_relaxed_precision.find(member_index) != member_relaxed_precision.end();
Dave Houltona9df0ce2018-02-07 10:51:23 -0700604 bool member_is_patch = is_patch || member_patch.count(member_index) > 0;
Chris Forbes47567b72017-06-09 12:09:45 -0700605
606 for (unsigned int offset = 0; offset < num_locations; offset++) {
607 interface_var v = {};
608 v.id = id;
609 // TODO: member index in interface_var too?
610 v.type_id = member_type_id;
611 v.offset = offset;
Chris Forbesa313d772017-06-13 13:59:41 -0700612 v.is_patch = member_is_patch;
Chris Forbes47567b72017-06-09 12:09:45 -0700613 v.is_block_member = true;
614 v.is_relaxed_precision = is_relaxed_precision;
615 (*out)[std::make_pair(location + offset, component)] = v;
616 }
617 }
618 }
619 }
Chris Forbesa313d772017-06-13 13:59:41 -0700620
621 return true;
Chris Forbes47567b72017-06-09 12:09:45 -0700622}
623
Ari Suonpaa696b3432019-03-11 14:02:57 +0200624static std::vector<uint32_t> FindEntrypointInterfaces(spirv_inst_iter entrypoint) {
625 std::vector<uint32_t> interfaces;
626 // Find the end of the entrypoint's name string. additional zero bytes follow the actual null terminator, to fill out the
627 // rest of the word - so we only need to look at the last byte in the word to determine which word contains the terminator.
628 uint32_t word = 3;
629 while (entrypoint.word(word) & 0xff000000u) {
630 ++word;
631 }
632 ++word;
633
634 for (; word < entrypoint.len(); word++) interfaces.push_back(entrypoint.word(word));
635
636 return interfaces;
637}
638
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600639static std::map<location_t, interface_var> CollectInterfaceByLocation(shader_module const *src, spirv_inst_iter entrypoint,
640 spv::StorageClass sinterface, bool is_array_of_verts) {
Chris Forbes47567b72017-06-09 12:09:45 -0700641 std::unordered_map<unsigned, unsigned> var_locations;
642 std::unordered_map<unsigned, unsigned> var_builtins;
643 std::unordered_map<unsigned, unsigned> var_components;
644 std::unordered_map<unsigned, unsigned> blocks;
645 std::unordered_map<unsigned, unsigned> var_patch;
646 std::unordered_map<unsigned, unsigned> var_relaxed_precision;
647
648 for (auto insn : *src) {
649 // We consider two interface models: SSO rendezvous-by-location, and builtins. Complain about anything that
650 // fits neither model.
651 if (insn.opcode() == spv::OpDecorate) {
652 if (insn.word(2) == spv::DecorationLocation) {
653 var_locations[insn.word(1)] = insn.word(3);
654 }
655
656 if (insn.word(2) == spv::DecorationBuiltIn) {
657 var_builtins[insn.word(1)] = insn.word(3);
658 }
659
660 if (insn.word(2) == spv::DecorationComponent) {
661 var_components[insn.word(1)] = insn.word(3);
662 }
663
664 if (insn.word(2) == spv::DecorationBlock) {
665 blocks[insn.word(1)] = 1;
666 }
667
668 if (insn.word(2) == spv::DecorationPatch) {
669 var_patch[insn.word(1)] = 1;
670 }
671
672 if (insn.word(2) == spv::DecorationRelaxedPrecision) {
673 var_relaxed_precision[insn.word(1)] = 1;
674 }
675 }
676 }
677
678 // TODO: handle grouped decorations
679 // TODO: handle index=1 dual source outputs from FS -- two vars will have the same location, and we DON'T want to clobber.
680
Chris Forbes47567b72017-06-09 12:09:45 -0700681 std::map<location_t, interface_var> out;
682
Ari Suonpaa696b3432019-03-11 14:02:57 +0200683 for (uint32_t word : FindEntrypointInterfaces(entrypoint)) {
684 auto insn = src->get_def(word);
Chris Forbes47567b72017-06-09 12:09:45 -0700685 assert(insn != src->end());
686 assert(insn.opcode() == spv::OpVariable);
687
688 if (insn.word(3) == static_cast<uint32_t>(sinterface)) {
689 unsigned id = insn.word(2);
690 unsigned type = insn.word(1);
691
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600692 int location = ValueOrDefault(var_locations, id, static_cast<unsigned>(-1));
693 int builtin = ValueOrDefault(var_builtins, id, static_cast<unsigned>(-1));
694 unsigned component = ValueOrDefault(var_components, id, 0); // Unspecified is OK, is 0
Chris Forbes47567b72017-06-09 12:09:45 -0700695 bool is_patch = var_patch.find(id) != var_patch.end();
696 bool is_relaxed_precision = var_relaxed_precision.find(id) != var_relaxed_precision.end();
697
Dave Houltona9df0ce2018-02-07 10:51:23 -0700698 if (builtin != -1)
699 continue;
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600700 else if (!CollectInterfaceBlockMembers(src, &out, blocks, is_array_of_verts, id, type, is_patch, location)) {
Chris Forbes47567b72017-06-09 12:09:45 -0700701 // A user-defined interface variable, with a location. Where a variable occupied multiple locations, emit
702 // one result for each.
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600703 unsigned num_locations = GetLocationsConsumedByType(src, type, is_array_of_verts && !is_patch);
Chris Forbes47567b72017-06-09 12:09:45 -0700704 for (unsigned int offset = 0; offset < num_locations; offset++) {
705 interface_var v = {};
706 v.id = id;
707 v.type_id = type;
708 v.offset = offset;
709 v.is_patch = is_patch;
710 v.is_relaxed_precision = is_relaxed_precision;
711 out[std::make_pair(location + offset, component)] = v;
712 }
Chris Forbes47567b72017-06-09 12:09:45 -0700713 }
714 }
715 }
716
717 return out;
718}
719
Ari Suonpaa696b3432019-03-11 14:02:57 +0200720static std::vector<uint32_t> CollectBuiltinBlockMembers(shader_module const *src, spirv_inst_iter entrypoint,
721 uint32_t storageClass) {
722 std::vector<uint32_t> variables;
723 std::vector<uint32_t> builtinStructMembers;
724 std::vector<uint32_t> builtinDecorations;
725
726 for (auto insn : *src) {
727 switch (insn.opcode()) {
728 // Find all built-in member decorations
729 case spv::OpMemberDecorate:
730 if (insn.word(3) == spv::DecorationBuiltIn) {
731 builtinStructMembers.push_back(insn.word(1));
732 }
733 break;
734 // Find all built-in decorations
735 case spv::OpDecorate:
736 switch (insn.word(2)) {
737 case spv::DecorationBlock: {
738 uint32_t blockID = insn.word(1);
739 for (auto builtInBlockID : builtinStructMembers) {
740 // Check if one of the members of the block are built-in -> the block is built-in
741 if (blockID == builtInBlockID) {
742 builtinDecorations.push_back(blockID);
743 break;
744 }
745 }
746 break;
747 }
748 case spv::DecorationBuiltIn:
749 builtinDecorations.push_back(insn.word(1));
750 break;
751 default:
752 break;
753 }
754 break;
755 default:
756 break;
757 }
758 }
759
760 // Find all interface variables belonging to the entrypoint and matching the storage class
761 for (uint32_t id : FindEntrypointInterfaces(entrypoint)) {
762 auto def = src->get_def(id);
763 assert(def != src->end());
764 assert(def.opcode() == spv::OpVariable);
765
766 if (def.word(3) == storageClass) variables.push_back(def.word(1));
767 }
768
769 // Find all members belonging to the builtin block selected
770 std::vector<uint32_t> builtinBlockMembers;
771 for (auto &var : variables) {
772 auto def = src->get_def(src->get_def(var).word(3));
773
774 // It could be an array of IO blocks. The element type should be the struct defining the block contents
775 if (def.opcode() == spv::OpTypeArray) def = src->get_def(def.word(2));
776
777 // Now find all members belonging to the struct defining the IO block
778 if (def.opcode() == spv::OpTypeStruct) {
779 for (auto builtInID : builtinDecorations) {
780 if (builtInID == def.word(1)) {
781 for (int i = 2; i < (int)def.len(); i++)
782 builtinBlockMembers.push_back(spv::BuiltInMax); // Start with undefined builtin for each struct member.
783 // These shouldn't be left after replacing.
784 for (auto insn : *src) {
785 if (insn.opcode() == spv::OpMemberDecorate && insn.word(1) == builtInID &&
786 insn.word(3) == spv::DecorationBuiltIn) {
787 auto structIndex = insn.word(2);
788 assert(structIndex < builtinBlockMembers.size());
789 builtinBlockMembers[structIndex] = insn.word(4);
790 }
791 }
792 }
793 }
794 }
795 }
796
797 return builtinBlockMembers;
798}
799
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600800static std::vector<std::pair<uint32_t, interface_var>> CollectInterfaceByInputAttachmentIndex(
Chris Forbes47567b72017-06-09 12:09:45 -0700801 shader_module const *src, std::unordered_set<uint32_t> const &accessible_ids) {
802 std::vector<std::pair<uint32_t, interface_var>> out;
803
804 for (auto insn : *src) {
805 if (insn.opcode() == spv::OpDecorate) {
806 if (insn.word(2) == spv::DecorationInputAttachmentIndex) {
807 auto attachment_index = insn.word(3);
808 auto id = insn.word(1);
809
810 if (accessible_ids.count(id)) {
811 auto def = src->get_def(id);
812 assert(def != src->end());
813
814 if (def.opcode() == spv::OpVariable && insn.word(3) == spv::StorageClassUniformConstant) {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600815 auto num_locations = GetLocationsConsumedByType(src, def.word(1), false);
Chris Forbes47567b72017-06-09 12:09:45 -0700816 for (unsigned int offset = 0; offset < num_locations; offset++) {
817 interface_var v = {};
818 v.id = id;
819 v.type_id = def.word(1);
820 v.offset = offset;
821 out.emplace_back(attachment_index + offset, v);
822 }
823 }
824 }
825 }
826 }
827 }
828
829 return out;
830}
831
Chris Forbes8d31e5d2018-10-08 17:19:15 -0700832static bool IsWritableDescriptorType(shader_module const *module, uint32_t type_id, bool is_storage_buffer) {
Chris Forbes8af24522018-03-07 11:37:45 -0800833 auto type = module->get_def(type_id);
834
835 // Strip off any array or ptrs. Where we remove array levels, adjust the descriptor count for each dimension.
Chris Forbes062f1222018-08-21 15:34:15 -0700836 while (type.opcode() == spv::OpTypeArray || type.opcode() == spv::OpTypePointer || type.opcode() == spv::OpTypeRuntimeArray) {
837 if (type.opcode() == spv::OpTypeArray || type.opcode() == spv::OpTypeRuntimeArray) {
Chris Forbes8d31e5d2018-10-08 17:19:15 -0700838 type = module->get_def(type.word(2)); // Element type
Chris Forbes8af24522018-03-07 11:37:45 -0800839 } else {
Chris Forbes8d31e5d2018-10-08 17:19:15 -0700840 type = module->get_def(type.word(3)); // Pointee type
Chris Forbes8af24522018-03-07 11:37:45 -0800841 }
842 }
843
844 switch (type.opcode()) {
845 case spv::OpTypeImage: {
846 auto dim = type.word(3);
847 auto sampled = type.word(7);
848 return sampled == 2 && dim != spv::DimSubpassData;
849 }
850
Chris Forbes8d31e5d2018-10-08 17:19:15 -0700851 case spv::OpTypeStruct: {
852 std::unordered_set<unsigned> nonwritable_members;
Chris Forbes8af24522018-03-07 11:37:45 -0800853 for (auto insn : *module) {
854 if (insn.opcode() == spv::OpDecorate && insn.word(1) == type.word(1)) {
855 if (insn.word(2) == spv::DecorationBufferBlock) {
Chris Forbes8d31e5d2018-10-08 17:19:15 -0700856 // Legacy storage block in the Uniform storage class
857 // has its struct type decorated with BufferBlock.
858 is_storage_buffer = true;
Chris Forbes8af24522018-03-07 11:37:45 -0800859 }
Chris Forbes8d31e5d2018-10-08 17:19:15 -0700860 } else if (insn.opcode() == spv::OpMemberDecorate && insn.word(1) == type.word(1) &&
861 insn.word(3) == spv::DecorationNonWritable) {
862 nonwritable_members.insert(insn.word(2));
Chris Forbes8af24522018-03-07 11:37:45 -0800863 }
864 }
Chris Forbes8d31e5d2018-10-08 17:19:15 -0700865
866 // A buffer is writable if it's either flavor of storage buffer, and has any member not decorated
867 // as nonwritable.
868 return is_storage_buffer && nonwritable_members.size() != type.len() - 2;
869 }
Chris Forbes8af24522018-03-07 11:37:45 -0800870 }
871
872 return false;
873}
874
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600875static std::vector<std::pair<descriptor_slot_t, interface_var>> CollectInterfaceByDescriptorSlot(
Chris Forbes8af24522018-03-07 11:37:45 -0800876 debug_report_data const *report_data, shader_module const *src, std::unordered_set<uint32_t> const &accessible_ids,
877 bool *has_writable_descriptor) {
Chris Forbes47567b72017-06-09 12:09:45 -0700878 std::unordered_map<unsigned, unsigned> var_sets;
879 std::unordered_map<unsigned, unsigned> var_bindings;
Chris Forbes8af24522018-03-07 11:37:45 -0800880 std::unordered_map<unsigned, unsigned> var_nonwritable;
Chris Forbes47567b72017-06-09 12:09:45 -0700881
882 for (auto insn : *src) {
883 // All variables in the Uniform or UniformConstant storage classes are required to be decorated with both
884 // DecorationDescriptorSet and DecorationBinding.
885 if (insn.opcode() == spv::OpDecorate) {
886 if (insn.word(2) == spv::DecorationDescriptorSet) {
887 var_sets[insn.word(1)] = insn.word(3);
888 }
889
890 if (insn.word(2) == spv::DecorationBinding) {
891 var_bindings[insn.word(1)] = insn.word(3);
892 }
Chris Forbes8af24522018-03-07 11:37:45 -0800893
Chris Forbes8d31e5d2018-10-08 17:19:15 -0700894 // Note: do toplevel DecorationNonWritable out here; it applies to
895 // the OpVariable rather than the type.
Chris Forbes8af24522018-03-07 11:37:45 -0800896 if (insn.word(2) == spv::DecorationNonWritable) {
897 var_nonwritable[insn.word(1)] = 1;
898 }
Chris Forbes47567b72017-06-09 12:09:45 -0700899 }
900 }
901
902 std::vector<std::pair<descriptor_slot_t, interface_var>> out;
903
904 for (auto id : accessible_ids) {
905 auto insn = src->get_def(id);
906 assert(insn != src->end());
907
908 if (insn.opcode() == spv::OpVariable &&
Chris Forbes9f89d752018-03-07 12:57:48 -0800909 (insn.word(3) == spv::StorageClassUniform || insn.word(3) == spv::StorageClassUniformConstant ||
910 insn.word(3) == spv::StorageClassStorageBuffer)) {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600911 unsigned set = ValueOrDefault(var_sets, insn.word(2), 0);
912 unsigned binding = ValueOrDefault(var_bindings, insn.word(2), 0);
Chris Forbes47567b72017-06-09 12:09:45 -0700913
914 interface_var v = {};
915 v.id = insn.word(2);
916 v.type_id = insn.word(1);
917 out.emplace_back(std::make_pair(set, binding), v);
Chris Forbes8af24522018-03-07 11:37:45 -0800918
Chris Forbes8d31e5d2018-10-08 17:19:15 -0700919 if (var_nonwritable.find(id) == var_nonwritable.end() &&
920 IsWritableDescriptorType(src, insn.word(1), insn.word(3) == spv::StorageClassStorageBuffer)) {
Chris Forbes8af24522018-03-07 11:37:45 -0800921 *has_writable_descriptor = true;
922 }
Chris Forbes47567b72017-06-09 12:09:45 -0700923 }
924 }
925
926 return out;
927}
928
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600929static bool ValidateViConsistency(debug_report_data const *report_data, VkPipelineVertexInputStateCreateInfo const *vi) {
Chris Forbes47567b72017-06-09 12:09:45 -0700930 // Walk the binding descriptions, which describe the step rate and stride of each vertex buffer. Each binding should
931 // be specified only once.
932 std::unordered_map<uint32_t, VkVertexInputBindingDescription const *> bindings;
933 bool skip = false;
934
935 for (unsigned i = 0; i < vi->vertexBindingDescriptionCount; i++) {
936 auto desc = &vi->pVertexBindingDescriptions[i];
937 auto &binding = bindings[desc->binding];
938 if (binding) {
Dave Houlton78d09922018-05-17 15:48:45 -0600939 // TODO: "VUID-VkGraphicsPipelineCreateInfo-pStages-00742" perhaps?
Mark Lobodzinskib1fd9d12018-03-30 14:26:00 -0600940 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
Dave Houlton51653902018-06-22 17:32:13 -0600941 kVUID_Core_Shader_InconsistentVi, "Duplicate vertex input binding descriptions for binding %d",
Chris Forbes47567b72017-06-09 12:09:45 -0700942 desc->binding);
943 } else {
944 binding = desc;
945 }
946 }
947
948 return skip;
949}
950
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600951static bool ValidateViAgainstVsInputs(debug_report_data const *report_data, VkPipelineVertexInputStateCreateInfo const *vi,
952 shader_module const *vs, spirv_inst_iter entrypoint) {
Chris Forbes47567b72017-06-09 12:09:45 -0700953 bool skip = false;
954
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600955 auto inputs = CollectInterfaceByLocation(vs, entrypoint, spv::StorageClassInput, false);
Chris Forbes47567b72017-06-09 12:09:45 -0700956
957 // Build index by location
958 std::map<uint32_t, VkVertexInputAttributeDescription const *> attribs;
959 if (vi) {
960 for (unsigned i = 0; i < vi->vertexAttributeDescriptionCount; i++) {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600961 auto num_locations = GetLocationsConsumedByFormat(vi->pVertexAttributeDescriptions[i].format);
Chris Forbes47567b72017-06-09 12:09:45 -0700962 for (auto j = 0u; j < num_locations; j++) {
963 attribs[vi->pVertexAttributeDescriptions[i].location + j] = &vi->pVertexAttributeDescriptions[i];
964 }
965 }
966 }
967
968 auto it_a = attribs.begin();
969 auto it_b = inputs.begin();
970 bool used = false;
971
972 while ((attribs.size() > 0 && it_a != attribs.end()) || (inputs.size() > 0 && it_b != inputs.end())) {
973 bool a_at_end = attribs.size() == 0 || it_a == attribs.end();
974 bool b_at_end = inputs.size() == 0 || it_b == inputs.end();
975 auto a_first = a_at_end ? 0 : it_a->first;
976 auto b_first = b_at_end ? 0 : it_b->first.first;
Mark Lobodzinski7caa39c2018-07-25 15:48:34 -0600977
Chris Forbes47567b72017-06-09 12:09:45 -0700978 if (!a_at_end && (b_at_end || a_first < b_first)) {
Mark Young4e919b22018-05-21 15:53:59 -0600979 if (!used &&
980 log_msg(report_data, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
Dave Houlton51653902018-06-22 17:32:13 -0600981 HandleToUint64(vs->vk_shader_module), kVUID_Core_Shader_OutputNotConsumed,
Mark Young4e919b22018-05-21 15:53:59 -0600982 "Vertex attribute at location %d not consumed by vertex shader", a_first)) {
Chris Forbes47567b72017-06-09 12:09:45 -0700983 skip = true;
984 }
985 used = false;
986 it_a++;
987 } else if (!b_at_end && (a_at_end || b_first < a_first)) {
Mark Young4e919b22018-05-21 15:53:59 -0600988 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
Dave Houlton51653902018-06-22 17:32:13 -0600989 HandleToUint64(vs->vk_shader_module), kVUID_Core_Shader_InputNotProduced,
Mark Young4e919b22018-05-21 15:53:59 -0600990 "Vertex shader consumes input at location %d but not provided", b_first);
Chris Forbes47567b72017-06-09 12:09:45 -0700991 it_b++;
992 } else {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600993 unsigned attrib_type = GetFormatType(it_a->second->format);
994 unsigned input_type = GetFundamentalType(vs, it_b->second.type_id);
Chris Forbes47567b72017-06-09 12:09:45 -0700995
996 // Type checking
997 if (!(attrib_type & input_type)) {
Mark Young4e919b22018-05-21 15:53:59 -0600998 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
Dave Houlton51653902018-06-22 17:32:13 -0600999 HandleToUint64(vs->vk_shader_module), kVUID_Core_Shader_InterfaceTypeMismatch,
Chris Forbes47567b72017-06-09 12:09:45 -07001000 "Attribute type of `%s` at location %d does not match vertex shader input type of `%s`",
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06001001 string_VkFormat(it_a->second->format), a_first, DescribeType(vs, it_b->second.type_id).c_str());
Chris Forbes47567b72017-06-09 12:09:45 -07001002 }
1003
1004 // OK!
1005 used = true;
1006 it_b++;
1007 }
1008 }
1009
1010 return skip;
1011}
1012
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06001013static bool ValidateFsOutputsAgainstRenderPass(debug_report_data const *report_data, shader_module const *fs,
1014 spirv_inst_iter entrypoint, PIPELINE_STATE const *pipeline, uint32_t subpass_index) {
Petr Krause91f7a12017-12-14 20:57:36 +01001015 auto rpci = pipeline->rp_state->createInfo.ptr();
Chris Forbes8bca1652017-07-20 11:10:09 -07001016
Chris Forbes47567b72017-06-09 12:09:45 -07001017 std::map<uint32_t, VkFormat> color_attachments;
1018 auto subpass = rpci->pSubpasses[subpass_index];
1019 for (auto i = 0u; i < subpass.colorAttachmentCount; ++i) {
1020 uint32_t attachment = subpass.pColorAttachments[i].attachment;
1021 if (attachment == VK_ATTACHMENT_UNUSED) continue;
1022 if (rpci->pAttachments[attachment].format != VK_FORMAT_UNDEFINED) {
1023 color_attachments[i] = rpci->pAttachments[attachment].format;
1024 }
1025 }
1026
1027 bool skip = false;
1028
1029 // TODO: dual source blend index (spv::DecIndex, zero if not provided)
1030
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06001031 auto outputs = CollectInterfaceByLocation(fs, entrypoint, spv::StorageClassOutput, false);
Chris Forbes47567b72017-06-09 12:09:45 -07001032
1033 auto it_a = outputs.begin();
1034 auto it_b = color_attachments.begin();
Mark Lobodzinski7caa39c2018-07-25 15:48:34 -06001035 bool used = false;
Ari Suonpaa412b23b2019-02-26 07:56:58 +02001036 bool alphaToCoverageEnabled = pipeline->graphicsPipelineCI.pMultisampleState != NULL &&
1037 pipeline->graphicsPipelineCI.pMultisampleState->alphaToCoverageEnable == VK_TRUE;
1038 bool locationZeroHasAlpha = false;
Chris Forbes47567b72017-06-09 12:09:45 -07001039
1040 // Walk attachment list and outputs together
1041
1042 while ((outputs.size() > 0 && it_a != outputs.end()) || (color_attachments.size() > 0 && it_b != color_attachments.end())) {
1043 bool a_at_end = outputs.size() == 0 || it_a == outputs.end();
1044 bool b_at_end = color_attachments.size() == 0 || it_b == color_attachments.end();
1045
Ari Suonpaa412b23b2019-02-26 07:56:58 +02001046 if (!a_at_end && it_a->first.first == 0 && fs->get_def(it_a->second.type_id) != fs->end() &&
1047 GetComponentsConsumedByType(fs, it_a->second.type_id, false) == 4)
1048 locationZeroHasAlpha = true;
1049
Chris Forbes47567b72017-06-09 12:09:45 -07001050 if (!a_at_end && (b_at_end || it_a->first.first < it_b->first)) {
Ari Suonpaa412b23b2019-02-26 07:56:58 +02001051 if (!alphaToCoverageEnabled || it_a->first.first != 0) {
1052 skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
1053 HandleToUint64(fs->vk_shader_module), kVUID_Core_Shader_OutputNotConsumed,
1054 "fragment shader writes to output location %d with no matching attachment", it_a->first.first);
1055 }
Chris Forbes47567b72017-06-09 12:09:45 -07001056 it_a++;
1057 } else if (!b_at_end && (a_at_end || it_a->first.first > it_b->first)) {
Chris Forbesefdd4082017-07-20 11:19:16 -07001058 // Only complain if there are unmasked channels for this attachment. If the writemask is 0, it's acceptable for the
1059 // shader to not produce a matching output.
Mark Lobodzinski7caa39c2018-07-25 15:48:34 -06001060 if (!used) {
1061 if (pipeline->attachments[it_b->first].colorWriteMask != 0) {
Chris Forbescfe4dca2018-10-05 10:15:00 -07001062 skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
Mark Lobodzinski7caa39c2018-07-25 15:48:34 -06001063 HandleToUint64(fs->vk_shader_module), kVUID_Core_Shader_InputNotProduced,
Chris Forbescfe4dca2018-10-05 10:15:00 -07001064 "Attachment %d not written by fragment shader; undefined values will be written to attachment",
1065 it_b->first);
Mark Lobodzinski7caa39c2018-07-25 15:48:34 -06001066 }
Chris Forbesefdd4082017-07-20 11:19:16 -07001067 }
Mark Lobodzinski7caa39c2018-07-25 15:48:34 -06001068 used = false;
Chris Forbes47567b72017-06-09 12:09:45 -07001069 it_b++;
1070 } else {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06001071 unsigned output_type = GetFundamentalType(fs, it_a->second.type_id);
1072 unsigned att_type = GetFormatType(it_b->second);
Chris Forbes47567b72017-06-09 12:09:45 -07001073
1074 // Type checking
1075 if (!(output_type & att_type)) {
Chris Forbescfe4dca2018-10-05 10:15:00 -07001076 skip |= log_msg(
1077 report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
1078 HandleToUint64(fs->vk_shader_module), kVUID_Core_Shader_InterfaceTypeMismatch,
1079 "Attachment %d of type `%s` does not match fragment shader output type of `%s`; resulting values are undefined",
1080 it_b->first, string_VkFormat(it_b->second), DescribeType(fs, it_a->second.type_id).c_str());
Chris Forbes47567b72017-06-09 12:09:45 -07001081 }
1082
1083 // OK!
1084 it_a++;
Mark Lobodzinski7caa39c2018-07-25 15:48:34 -06001085 used = true;
Chris Forbes47567b72017-06-09 12:09:45 -07001086 }
1087 }
1088
Ari Suonpaa412b23b2019-02-26 07:56:58 +02001089 if (alphaToCoverageEnabled && !locationZeroHasAlpha) {
1090 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
1091 HandleToUint64(fs->vk_shader_module), kVUID_Core_Shader_NoAlphaAtLocation0WithAlphaToCoverage,
1092 "fragment shader doesn't declare alpha output at location 0 even though alpha to coverage is enabled.");
1093 }
1094
Chris Forbes47567b72017-06-09 12:09:45 -07001095 return skip;
1096}
1097
Mark Lobodzinski1b4a8ed2018-08-07 08:47:05 -06001098// For PointSize analysis we need to know if the variable decorated with the PointSize built-in was actually written to.
1099// This function examines instructions in the static call tree for a write to this variable.
1100static bool IsPointSizeWritten(shader_module const *src, spirv_inst_iter builtin_instr, spirv_inst_iter entrypoint) {
1101 auto type = builtin_instr.opcode();
1102 uint32_t target_id = builtin_instr.word(1);
1103 bool init_complete = false;
1104
1105 if (type == spv::OpMemberDecorate) {
1106 // Built-in is part of a structure -- examine instructions up to first function body to get initial IDs
1107 auto insn = entrypoint;
1108 while (!init_complete && (insn.opcode() != spv::OpFunction)) {
1109 switch (insn.opcode()) {
1110 case spv::OpTypePointer:
1111 if ((insn.word(3) == target_id) && (insn.word(2) == spv::StorageClassOutput)) {
1112 target_id = insn.word(1);
1113 }
1114 break;
1115 case spv::OpVariable:
1116 if (insn.word(1) == target_id) {
1117 target_id = insn.word(2);
1118 init_complete = true;
1119 }
1120 break;
1121 }
1122 insn++;
1123 }
1124 }
1125
Mark Lobodzinskif84b0b42018-09-11 14:54:32 -06001126 if (!init_complete && (type == spv::OpMemberDecorate)) return false;
1127
1128 bool found_write = false;
Mark Lobodzinski1b4a8ed2018-08-07 08:47:05 -06001129 std::unordered_set<uint32_t> worklist;
1130 worklist.insert(entrypoint.word(2));
1131
1132 // Follow instructions in call graph looking for writes to target
1133 while (!worklist.empty() && !found_write) {
1134 auto id_iter = worklist.begin();
1135 auto id = *id_iter;
1136 worklist.erase(id_iter);
1137
1138 auto insn = src->get_def(id);
1139 if (insn == src->end()) {
1140 continue;
1141 }
1142
1143 if (insn.opcode() == spv::OpFunction) {
1144 // Scan body of function looking for other function calls or items in our ID chain
1145 while (++insn, insn.opcode() != spv::OpFunctionEnd) {
1146 switch (insn.opcode()) {
1147 case spv::OpAccessChain:
1148 if (insn.word(3) == target_id) {
1149 if (type == spv::OpMemberDecorate) {
1150 auto value = GetConstantValue(src, insn.word(4));
1151 if (value == builtin_instr.word(2)) {
1152 target_id = insn.word(2);
1153 }
1154 } else {
1155 target_id = insn.word(2);
1156 }
1157 }
1158 break;
1159 case spv::OpStore:
1160 if (insn.word(1) == target_id) {
1161 found_write = true;
1162 }
1163 break;
1164 case spv::OpFunctionCall:
1165 worklist.insert(insn.word(3));
1166 break;
1167 }
1168 }
1169 }
1170 }
1171 return found_write;
1172}
1173
Chris Forbes47567b72017-06-09 12:09:45 -07001174// For some analyses, we need to know about all ids referenced by the static call tree of a particular entrypoint. This is
1175// important for identifying the set of shader resources actually used by an entrypoint, for example.
1176// Note: we only explore parts of the image which might actually contain ids we care about for the above analyses.
1177// - NOT the shader input/output interfaces.
1178//
1179// TODO: The set of interesting opcodes here was determined by eyeballing the SPIRV spec. It might be worth
1180// converting parts of this to be generated from the machine-readable spec instead.
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06001181static std::unordered_set<uint32_t> MarkAccessibleIds(shader_module const *src, spirv_inst_iter entrypoint) {
Chris Forbes47567b72017-06-09 12:09:45 -07001182 std::unordered_set<uint32_t> ids;
1183 std::unordered_set<uint32_t> worklist;
1184 worklist.insert(entrypoint.word(2));
1185
1186 while (!worklist.empty()) {
1187 auto id_iter = worklist.begin();
1188 auto id = *id_iter;
1189 worklist.erase(id_iter);
1190
1191 auto insn = src->get_def(id);
1192 if (insn == src->end()) {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06001193 // ID is something we didn't collect in BuildDefIndex. that's OK -- we'll stumble across all kinds of things here
Chris Forbes47567b72017-06-09 12:09:45 -07001194 // that we may not care about.
1195 continue;
1196 }
1197
1198 // Try to add to the output set
1199 if (!ids.insert(id).second) {
1200 continue; // If we already saw this id, we don't want to walk it again.
1201 }
1202
1203 switch (insn.opcode()) {
1204 case spv::OpFunction:
1205 // Scan whole body of the function, enlisting anything interesting
1206 while (++insn, insn.opcode() != spv::OpFunctionEnd) {
1207 switch (insn.opcode()) {
1208 case spv::OpLoad:
1209 case spv::OpAtomicLoad:
1210 case spv::OpAtomicExchange:
1211 case spv::OpAtomicCompareExchange:
1212 case spv::OpAtomicCompareExchangeWeak:
1213 case spv::OpAtomicIIncrement:
1214 case spv::OpAtomicIDecrement:
1215 case spv::OpAtomicIAdd:
1216 case spv::OpAtomicISub:
1217 case spv::OpAtomicSMin:
1218 case spv::OpAtomicUMin:
1219 case spv::OpAtomicSMax:
1220 case spv::OpAtomicUMax:
1221 case spv::OpAtomicAnd:
1222 case spv::OpAtomicOr:
1223 case spv::OpAtomicXor:
1224 worklist.insert(insn.word(3)); // ptr
1225 break;
1226 case spv::OpStore:
1227 case spv::OpAtomicStore:
1228 worklist.insert(insn.word(1)); // ptr
1229 break;
1230 case spv::OpAccessChain:
1231 case spv::OpInBoundsAccessChain:
1232 worklist.insert(insn.word(3)); // base ptr
1233 break;
1234 case spv::OpSampledImage:
1235 case spv::OpImageSampleImplicitLod:
1236 case spv::OpImageSampleExplicitLod:
1237 case spv::OpImageSampleDrefImplicitLod:
1238 case spv::OpImageSampleDrefExplicitLod:
1239 case spv::OpImageSampleProjImplicitLod:
1240 case spv::OpImageSampleProjExplicitLod:
1241 case spv::OpImageSampleProjDrefImplicitLod:
1242 case spv::OpImageSampleProjDrefExplicitLod:
1243 case spv::OpImageFetch:
1244 case spv::OpImageGather:
1245 case spv::OpImageDrefGather:
1246 case spv::OpImageRead:
1247 case spv::OpImage:
1248 case spv::OpImageQueryFormat:
1249 case spv::OpImageQueryOrder:
1250 case spv::OpImageQuerySizeLod:
1251 case spv::OpImageQuerySize:
1252 case spv::OpImageQueryLod:
1253 case spv::OpImageQueryLevels:
1254 case spv::OpImageQuerySamples:
1255 case spv::OpImageSparseSampleImplicitLod:
1256 case spv::OpImageSparseSampleExplicitLod:
1257 case spv::OpImageSparseSampleDrefImplicitLod:
1258 case spv::OpImageSparseSampleDrefExplicitLod:
1259 case spv::OpImageSparseSampleProjImplicitLod:
1260 case spv::OpImageSparseSampleProjExplicitLod:
1261 case spv::OpImageSparseSampleProjDrefImplicitLod:
1262 case spv::OpImageSparseSampleProjDrefExplicitLod:
1263 case spv::OpImageSparseFetch:
1264 case spv::OpImageSparseGather:
1265 case spv::OpImageSparseDrefGather:
1266 case spv::OpImageTexelPointer:
1267 worklist.insert(insn.word(3)); // Image or sampled image
1268 break;
1269 case spv::OpImageWrite:
1270 worklist.insert(insn.word(1)); // Image -- different operand order to above
1271 break;
1272 case spv::OpFunctionCall:
1273 for (uint32_t i = 3; i < insn.len(); i++) {
1274 worklist.insert(insn.word(i)); // fn itself, and all args
1275 }
1276 break;
1277
1278 case spv::OpExtInst:
1279 for (uint32_t i = 5; i < insn.len(); i++) {
1280 worklist.insert(insn.word(i)); // Operands to ext inst
1281 }
1282 break;
1283 }
1284 }
1285 break;
1286 }
1287 }
1288
1289 return ids;
1290}
1291
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06001292static bool ValidatePushConstantBlockAgainstPipeline(debug_report_data const *report_data,
1293 std::vector<VkPushConstantRange> const *push_constant_ranges,
1294 shader_module const *src, spirv_inst_iter type, VkShaderStageFlagBits stage) {
Chris Forbes47567b72017-06-09 12:09:45 -07001295 bool skip = false;
1296
1297 // Strip off ptrs etc
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06001298 type = GetStructType(src, type, false);
Chris Forbes47567b72017-06-09 12:09:45 -07001299 assert(type != src->end());
1300
1301 // Validate directly off the offsets. this isn't quite correct for arrays and matrices, but is a good first step.
1302 // TODO: arrays, matrices, weird sizes
1303 for (auto insn : *src) {
1304 if (insn.opcode() == spv::OpMemberDecorate && insn.word(1) == type.word(1)) {
1305 if (insn.word(3) == spv::DecorationOffset) {
1306 unsigned offset = insn.word(4);
1307 auto size = 4; // Bytes; TODO: calculate this based on the type
1308
1309 bool found_range = false;
1310 for (auto const &range : *push_constant_ranges) {
1311 if (range.offset <= offset && range.offset + range.size >= offset + size) {
1312 found_range = true;
1313
1314 if ((range.stageFlags & stage) == 0) {
Dave Houltona9df0ce2018-02-07 10:51:23 -07001315 skip |=
1316 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
Dave Houlton51653902018-06-22 17:32:13 -06001317 kVUID_Core_Shader_PushConstantNotAccessibleFromStage,
Dave Houltona9df0ce2018-02-07 10:51:23 -07001318 "Push constant range covering variable starting at offset %u not accessible from stage %s",
1319 offset, string_VkShaderStageFlagBits(stage));
Chris Forbes47567b72017-06-09 12:09:45 -07001320 }
1321
1322 break;
1323 }
1324 }
1325
1326 if (!found_range) {
1327 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
Dave Houlton51653902018-06-22 17:32:13 -06001328 kVUID_Core_Shader_PushConstantOutOfRange,
Dave Houltona9df0ce2018-02-07 10:51:23 -07001329 "Push constant range covering variable starting at offset %u not declared in layout", offset);
Chris Forbes47567b72017-06-09 12:09:45 -07001330 }
1331 }
1332 }
1333 }
1334
1335 return skip;
1336}
1337
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06001338static bool ValidatePushConstantUsage(debug_report_data const *report_data,
1339 std::vector<VkPushConstantRange> const *push_constant_ranges, shader_module const *src,
1340 std::unordered_set<uint32_t> accessible_ids, VkShaderStageFlagBits stage) {
Chris Forbes47567b72017-06-09 12:09:45 -07001341 bool skip = false;
1342
1343 for (auto id : accessible_ids) {
1344 auto def_insn = src->get_def(id);
1345 if (def_insn.opcode() == spv::OpVariable && def_insn.word(3) == spv::StorageClassPushConstant) {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06001346 skip |= ValidatePushConstantBlockAgainstPipeline(report_data, push_constant_ranges, src, src->get_def(def_insn.word(1)),
1347 stage);
Chris Forbes47567b72017-06-09 12:09:45 -07001348 }
1349 }
1350
1351 return skip;
1352}
1353
1354// Validate that data for each specialization entry is fully contained within the buffer.
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06001355static bool ValidateSpecializationOffsets(debug_report_data const *report_data, VkPipelineShaderStageCreateInfo const *info) {
Chris Forbes47567b72017-06-09 12:09:45 -07001356 bool skip = false;
1357
1358 VkSpecializationInfo const *spec = info->pSpecializationInfo;
1359
1360 if (spec) {
1361 for (auto i = 0u; i < spec->mapEntryCount; i++) {
Dave Houlton78d09922018-05-17 15:48:45 -06001362 // TODO: This is a good place for "VUID-VkSpecializationInfo-offset-00773".
Chris Forbes47567b72017-06-09 12:09:45 -07001363 if (spec->pMapEntries[i].offset + spec->pMapEntries[i].size > spec->dataSize) {
Mark Lobodzinskib1fd9d12018-03-30 14:26:00 -06001364 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, 0,
Dave Houlton78d09922018-05-17 15:48:45 -06001365 "VUID-VkSpecializationInfo-pMapEntries-00774",
Dave Houltona9df0ce2018-02-07 10:51:23 -07001366 "Specialization entry %u (for constant id %u) references memory outside provided specialization "
Mark Lobodzinski487a0d12018-03-30 10:09:03 -06001367 "data (bytes %u.." PRINTF_SIZE_T_SPECIFIER "; " PRINTF_SIZE_T_SPECIFIER " bytes provided)..",
Dave Houltona9df0ce2018-02-07 10:51:23 -07001368 i, spec->pMapEntries[i].constantID, spec->pMapEntries[i].offset,
Mark Lobodzinski487a0d12018-03-30 10:09:03 -06001369 spec->pMapEntries[i].offset + spec->pMapEntries[i].size - 1, spec->dataSize);
Chris Forbes47567b72017-06-09 12:09:45 -07001370 }
1371 }
1372 }
1373
1374 return skip;
1375}
1376
Jeff Bolz38b3ce72018-09-19 12:53:38 -05001377// TODO (jbolz): Can this return a const reference?
Jeff Bolze54ae892018-09-08 12:16:29 -05001378static std::set<uint32_t> TypeToDescriptorTypeSet(shader_module const *module, uint32_t type_id, unsigned &descriptor_count) {
Chris Forbes47567b72017-06-09 12:09:45 -07001379 auto type = module->get_def(type_id);
Chris Forbes9f89d752018-03-07 12:57:48 -08001380 bool is_storage_buffer = false;
Chris Forbes47567b72017-06-09 12:09:45 -07001381 descriptor_count = 1;
Jeff Bolze54ae892018-09-08 12:16:29 -05001382 std::set<uint32_t> ret;
Chris Forbes47567b72017-06-09 12:09:45 -07001383
1384 // Strip off any array or ptrs. Where we remove array levels, adjust the descriptor count for each dimension.
Jeff Bolzfdf96072018-04-10 14:32:18 -05001385 while (type.opcode() == spv::OpTypeArray || type.opcode() == spv::OpTypePointer || type.opcode() == spv::OpTypeRuntimeArray) {
1386 if (type.opcode() == spv::OpTypeRuntimeArray) {
1387 descriptor_count = 0;
1388 type = module->get_def(type.word(2));
1389 } else if (type.opcode() == spv::OpTypeArray) {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06001390 descriptor_count *= GetConstantValue(module, type.word(3));
Chris Forbes47567b72017-06-09 12:09:45 -07001391 type = module->get_def(type.word(2));
1392 } else {
Chris Forbes9f89d752018-03-07 12:57:48 -08001393 if (type.word(2) == spv::StorageClassStorageBuffer) {
1394 is_storage_buffer = true;
1395 }
Chris Forbes47567b72017-06-09 12:09:45 -07001396 type = module->get_def(type.word(3));
1397 }
1398 }
1399
1400 switch (type.opcode()) {
1401 case spv::OpTypeStruct: {
1402 for (auto insn : *module) {
1403 if (insn.opcode() == spv::OpDecorate && insn.word(1) == type.word(1)) {
1404 if (insn.word(2) == spv::DecorationBlock) {
Chris Forbes9f89d752018-03-07 12:57:48 -08001405 if (is_storage_buffer) {
Jeff Bolze54ae892018-09-08 12:16:29 -05001406 ret.insert(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER);
1407 ret.insert(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC);
1408 return ret;
Chris Forbes9f89d752018-03-07 12:57:48 -08001409 } else {
Jeff Bolze54ae892018-09-08 12:16:29 -05001410 ret.insert(VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER);
1411 ret.insert(VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC);
1412 ret.insert(VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT);
1413 return ret;
Chris Forbes9f89d752018-03-07 12:57:48 -08001414 }
Chris Forbes47567b72017-06-09 12:09:45 -07001415 } else if (insn.word(2) == spv::DecorationBufferBlock) {
Jeff Bolze54ae892018-09-08 12:16:29 -05001416 ret.insert(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER);
1417 ret.insert(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC);
1418 return ret;
Chris Forbes47567b72017-06-09 12:09:45 -07001419 }
1420 }
1421 }
1422
1423 // Invalid
Jeff Bolze54ae892018-09-08 12:16:29 -05001424 return ret;
Chris Forbes47567b72017-06-09 12:09:45 -07001425 }
1426
1427 case spv::OpTypeSampler:
Jeff Bolze54ae892018-09-08 12:16:29 -05001428 ret.insert(VK_DESCRIPTOR_TYPE_SAMPLER);
1429 ret.insert(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
1430 return ret;
Chris Forbes47567b72017-06-09 12:09:45 -07001431
Chris Forbes73c00bf2018-06-22 16:28:06 -07001432 case spv::OpTypeSampledImage: {
1433 // Slight relaxation for some GLSL historical madness: samplerBuffer doesn't really have a sampler, and a texel
1434 // buffer descriptor doesn't really provide one. Allow this slight mismatch.
1435 auto image_type = module->get_def(type.word(2));
1436 auto dim = image_type.word(3);
1437 auto sampled = image_type.word(7);
1438 if (dim == spv::DimBuffer && sampled == 1) {
Jeff Bolze54ae892018-09-08 12:16:29 -05001439 ret.insert(VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER);
1440 return ret;
Chris Forbes47567b72017-06-09 12:09:45 -07001441 }
Chris Forbes73c00bf2018-06-22 16:28:06 -07001442 }
Jeff Bolze54ae892018-09-08 12:16:29 -05001443 ret.insert(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
1444 return ret;
Chris Forbes47567b72017-06-09 12:09:45 -07001445
1446 case spv::OpTypeImage: {
1447 // Many descriptor types backing image types-- depends on dimension and whether the image will be used with a sampler.
1448 // SPIRV for Vulkan requires that sampled be 1 or 2 -- leaving the decision to runtime is unacceptable.
1449 auto dim = type.word(3);
1450 auto sampled = type.word(7);
1451
1452 if (dim == spv::DimSubpassData) {
Jeff Bolze54ae892018-09-08 12:16:29 -05001453 ret.insert(VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT);
1454 return ret;
Chris Forbes47567b72017-06-09 12:09:45 -07001455 } else if (dim == spv::DimBuffer) {
1456 if (sampled == 1) {
Jeff Bolze54ae892018-09-08 12:16:29 -05001457 ret.insert(VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER);
1458 return ret;
Chris Forbes47567b72017-06-09 12:09:45 -07001459 } else {
Jeff Bolze54ae892018-09-08 12:16:29 -05001460 ret.insert(VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER);
1461 return ret;
Chris Forbes47567b72017-06-09 12:09:45 -07001462 }
1463 } else if (sampled == 1) {
Jeff Bolze54ae892018-09-08 12:16:29 -05001464 ret.insert(VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE);
1465 ret.insert(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
1466 return ret;
Chris Forbes47567b72017-06-09 12:09:45 -07001467 } else {
Jeff Bolze54ae892018-09-08 12:16:29 -05001468 ret.insert(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE);
1469 return ret;
Chris Forbes47567b72017-06-09 12:09:45 -07001470 }
1471 }
Shannon McPherson0fa28232018-11-01 11:59:02 -06001472 case spv::OpTypeAccelerationStructureNV:
Eric Werness30127fd2018-10-31 21:01:03 -07001473 ret.insert(VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV);
Jeff Bolz105d6492018-09-29 15:46:44 -05001474 return ret;
Chris Forbes47567b72017-06-09 12:09:45 -07001475
1476 // We shouldn't really see any other junk types -- but if we do, they're a mismatch.
1477 default:
Jeff Bolze54ae892018-09-08 12:16:29 -05001478 return ret; // Matches nothing
Chris Forbes47567b72017-06-09 12:09:45 -07001479 }
1480}
1481
Jeff Bolze54ae892018-09-08 12:16:29 -05001482static std::string string_descriptorTypes(const std::set<uint32_t> &descriptor_types) {
Chris Forbes73c00bf2018-06-22 16:28:06 -07001483 std::stringstream ss;
Jeff Bolze54ae892018-09-08 12:16:29 -05001484 for (auto it = descriptor_types.begin(); it != descriptor_types.end(); ++it) {
1485 if (ss.tellp()) ss << ", ";
1486 ss << string_VkDescriptorType(VkDescriptorType(*it));
Chris Forbes73c00bf2018-06-22 16:28:06 -07001487 }
1488 return ss.str();
1489}
1490
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06001491static bool RequireFeature(debug_report_data const *report_data, VkBool32 feature, char const *feature_name) {
Chris Forbes47567b72017-06-09 12:09:45 -07001492 if (!feature) {
Mark Lobodzinskib1fd9d12018-03-30 14:26:00 -06001493 if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
Dave Houlton51653902018-06-22 17:32:13 -06001494 kVUID_Core_Shader_FeatureNotEnabled, "Shader requires %s but is not enabled on the device", feature_name)) {
Chris Forbes47567b72017-06-09 12:09:45 -07001495 return true;
1496 }
1497 }
1498
1499 return false;
1500}
1501
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06001502static bool RequireExtension(debug_report_data const *report_data, bool extension, char const *extension_name) {
Chris Forbes47567b72017-06-09 12:09:45 -07001503 if (!extension) {
Mark Lobodzinskib1fd9d12018-03-30 14:26:00 -06001504 if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
Dave Houlton51653902018-06-22 17:32:13 -06001505 kVUID_Core_Shader_FeatureNotEnabled, "Shader requires extension %s but is not enabled on the device",
Chris Forbes47567b72017-06-09 12:09:45 -07001506 extension_name)) {
1507 return true;
1508 }
1509 }
1510
1511 return false;
1512}
1513
Mark Lobodzinski518eadc2019-03-09 12:07:30 -07001514bool CoreChecks::ValidateShaderCapabilities(shader_module const *src, VkShaderStageFlagBits stage, bool has_writable_descriptor) {
Chris Forbes47567b72017-06-09 12:09:45 -07001515 bool skip = false;
1516
Mike Schuchardt8ed5ea02018-07-20 18:24:17 -06001517 struct FeaturePointer {
1518 // Callable object to test if this feature is enabled in the given aggregate feature struct
1519 const std::function<VkBool32(const DeviceFeatures &)> IsEnabled;
1520
1521 // Test if feature pointer is populated
1522 explicit operator bool() const { return static_cast<bool>(IsEnabled); }
1523
1524 // Default and nullptr constructor to create an empty FeaturePointer
1525 FeaturePointer() : IsEnabled(nullptr) {}
1526 FeaturePointer(std::nullptr_t ptr) : IsEnabled(nullptr) {}
1527
1528 // Constructors to populate FeaturePointer based on given pointer to member
1529 FeaturePointer(VkBool32 VkPhysicalDeviceFeatures::*ptr)
1530 : IsEnabled([=](const DeviceFeatures &features) { return features.core.*ptr; }) {}
1531 FeaturePointer(VkBool32 VkPhysicalDeviceDescriptorIndexingFeaturesEXT::*ptr)
1532 : IsEnabled([=](const DeviceFeatures &features) { return features.descriptor_indexing.*ptr; }) {}
1533 FeaturePointer(VkBool32 VkPhysicalDevice8BitStorageFeaturesKHR::*ptr)
1534 : IsEnabled([=](const DeviceFeatures &features) { return features.eight_bit_storage.*ptr; }) {}
Brett Lawsonbebfb6f2018-10-23 16:58:50 -07001535 FeaturePointer(VkBool32 VkPhysicalDeviceTransformFeedbackFeaturesEXT::*ptr)
1536 : IsEnabled([=](const DeviceFeatures &features) { return features.transform_feedback_features.*ptr; }) {}
Jose-Emilio Munoz-Lopez1109b452018-08-21 09:44:07 +01001537 FeaturePointer(VkBool32 VkPhysicalDeviceFloat16Int8FeaturesKHR::*ptr)
1538 : IsEnabled([=](const DeviceFeatures &features) { return features.float16_int8.*ptr; }) {}
Tobias Hector6a0ece72018-12-10 12:24:05 +00001539 FeaturePointer(VkBool32 VkPhysicalDeviceScalarBlockLayoutFeaturesEXT::*ptr)
1540 : IsEnabled([=](const DeviceFeatures &features) { return features.scalar_block_layout_features.*ptr; }) {}
Jeff Bolze4356752019-03-07 11:23:46 -06001541 FeaturePointer(VkBool32 VkPhysicalDeviceCooperativeMatrixFeaturesNV::*ptr)
1542 : IsEnabled([=](const DeviceFeatures &features) { return features.cooperative_matrix_features.*ptr; }) {}
Attilio Provenzanoc5d50102019-03-25 17:40:37 +00001543 FeaturePointer(VkBool32 VkPhysicalDeviceFloatControlsPropertiesKHR::*ptr)
1544 : IsEnabled([=](const DeviceFeatures &features) { return features.float_controls.*ptr; }) {}
Mike Schuchardt8ed5ea02018-07-20 18:24:17 -06001545 };
1546
Chris Forbes47567b72017-06-09 12:09:45 -07001547 struct CapabilityInfo {
1548 char const *name;
Mike Schuchardt8ed5ea02018-07-20 18:24:17 -06001549 FeaturePointer feature;
1550 bool DeviceExtensions::*extension;
Chris Forbes47567b72017-06-09 12:09:45 -07001551 };
1552
Chris Forbes47567b72017-06-09 12:09:45 -07001553 // clang-format off
Dave Houltoneb10ea82017-12-22 12:21:50 -07001554 static const std::unordered_multimap<uint32_t, CapabilityInfo> capabilities = {
Chris Forbes47567b72017-06-09 12:09:45 -07001555 // Capabilities always supported by a Vulkan 1.0 implementation -- no
1556 // feature bits.
1557 {spv::CapabilityMatrix, {nullptr}},
1558 {spv::CapabilityShader, {nullptr}},
1559 {spv::CapabilityInputAttachment, {nullptr}},
1560 {spv::CapabilitySampled1D, {nullptr}},
1561 {spv::CapabilityImage1D, {nullptr}},
1562 {spv::CapabilitySampledBuffer, {nullptr}},
1563 {spv::CapabilityImageQuery, {nullptr}},
1564 {spv::CapabilityDerivativeControl, {nullptr}},
1565
1566 // Capabilities that are optionally supported, but require a feature to
1567 // be enabled on the device
Mike Schuchardt8ed5ea02018-07-20 18:24:17 -06001568 {spv::CapabilityGeometry, {"VkPhysicalDeviceFeatures::geometryShader", &VkPhysicalDeviceFeatures::geometryShader}},
1569 {spv::CapabilityTessellation, {"VkPhysicalDeviceFeatures::tessellationShader", &VkPhysicalDeviceFeatures::tessellationShader}},
1570 {spv::CapabilityFloat64, {"VkPhysicalDeviceFeatures::shaderFloat64", &VkPhysicalDeviceFeatures::shaderFloat64}},
1571 {spv::CapabilityInt64, {"VkPhysicalDeviceFeatures::shaderInt64", &VkPhysicalDeviceFeatures::shaderInt64}},
1572 {spv::CapabilityTessellationPointSize, {"VkPhysicalDeviceFeatures::shaderTessellationAndGeometryPointSize", &VkPhysicalDeviceFeatures::shaderTessellationAndGeometryPointSize}},
1573 {spv::CapabilityGeometryPointSize, {"VkPhysicalDeviceFeatures::shaderTessellationAndGeometryPointSize", &VkPhysicalDeviceFeatures::shaderTessellationAndGeometryPointSize}},
1574 {spv::CapabilityImageGatherExtended, {"VkPhysicalDeviceFeatures::shaderImageGatherExtended", &VkPhysicalDeviceFeatures::shaderImageGatherExtended}},
1575 {spv::CapabilityStorageImageMultisample, {"VkPhysicalDeviceFeatures::shaderStorageImageMultisample", &VkPhysicalDeviceFeatures::shaderStorageImageMultisample}},
1576 {spv::CapabilityUniformBufferArrayDynamicIndexing, {"VkPhysicalDeviceFeatures::shaderUniformBufferArrayDynamicIndexing", &VkPhysicalDeviceFeatures::shaderUniformBufferArrayDynamicIndexing}},
1577 {spv::CapabilitySampledImageArrayDynamicIndexing, {"VkPhysicalDeviceFeatures::shaderSampledImageArrayDynamicIndexing", &VkPhysicalDeviceFeatures::shaderSampledImageArrayDynamicIndexing}},
1578 {spv::CapabilityStorageBufferArrayDynamicIndexing, {"VkPhysicalDeviceFeatures::shaderStorageBufferArrayDynamicIndexing", &VkPhysicalDeviceFeatures::shaderStorageBufferArrayDynamicIndexing}},
1579 {spv::CapabilityStorageImageArrayDynamicIndexing, {"VkPhysicalDeviceFeatures::shaderStorageImageArrayDynamicIndexing", &VkPhysicalDeviceFeatures::shaderStorageBufferArrayDynamicIndexing}},
1580 {spv::CapabilityClipDistance, {"VkPhysicalDeviceFeatures::shaderClipDistance", &VkPhysicalDeviceFeatures::shaderClipDistance}},
1581 {spv::CapabilityCullDistance, {"VkPhysicalDeviceFeatures::shaderCullDistance", &VkPhysicalDeviceFeatures::shaderCullDistance}},
1582 {spv::CapabilityImageCubeArray, {"VkPhysicalDeviceFeatures::imageCubeArray", &VkPhysicalDeviceFeatures::imageCubeArray}},
1583 {spv::CapabilitySampleRateShading, {"VkPhysicalDeviceFeatures::sampleRateShading", &VkPhysicalDeviceFeatures::sampleRateShading}},
1584 {spv::CapabilitySparseResidency, {"VkPhysicalDeviceFeatures::shaderResourceResidency", &VkPhysicalDeviceFeatures::shaderResourceResidency}},
1585 {spv::CapabilityMinLod, {"VkPhysicalDeviceFeatures::shaderResourceMinLod", &VkPhysicalDeviceFeatures::shaderResourceMinLod}},
1586 {spv::CapabilitySampledCubeArray, {"VkPhysicalDeviceFeatures::imageCubeArray", &VkPhysicalDeviceFeatures::imageCubeArray}},
1587 {spv::CapabilityImageMSArray, {"VkPhysicalDeviceFeatures::shaderStorageImageMultisample", &VkPhysicalDeviceFeatures::shaderStorageImageMultisample}},
1588 {spv::CapabilityStorageImageExtendedFormats, {"VkPhysicalDeviceFeatures::shaderStorageImageExtendedFormats", &VkPhysicalDeviceFeatures::shaderStorageImageExtendedFormats}},
1589 {spv::CapabilityInterpolationFunction, {"VkPhysicalDeviceFeatures::sampleRateShading", &VkPhysicalDeviceFeatures::sampleRateShading}},
1590 {spv::CapabilityStorageImageReadWithoutFormat, {"VkPhysicalDeviceFeatures::shaderStorageImageReadWithoutFormat", &VkPhysicalDeviceFeatures::shaderStorageImageReadWithoutFormat}},
1591 {spv::CapabilityStorageImageWriteWithoutFormat, {"VkPhysicalDeviceFeatures::shaderStorageImageWriteWithoutFormat", &VkPhysicalDeviceFeatures::shaderStorageImageWriteWithoutFormat}},
1592 {spv::CapabilityMultiViewport, {"VkPhysicalDeviceFeatures::multiViewport", &VkPhysicalDeviceFeatures::multiViewport}},
Jeff Bolzfdf96072018-04-10 14:32:18 -05001593
Mike Schuchardt8ed5ea02018-07-20 18:24:17 -06001594 {spv::CapabilityShaderNonUniformEXT, {VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME, nullptr, &DeviceExtensions::vk_ext_descriptor_indexing}},
1595 {spv::CapabilityRuntimeDescriptorArrayEXT, {"VkPhysicalDeviceDescriptorIndexingFeaturesEXT::runtimeDescriptorArray", &VkPhysicalDeviceDescriptorIndexingFeaturesEXT::runtimeDescriptorArray}},
1596 {spv::CapabilityInputAttachmentArrayDynamicIndexingEXT, {"VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderInputAttachmentArrayDynamicIndexing", &VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderInputAttachmentArrayDynamicIndexing}},
1597 {spv::CapabilityUniformTexelBufferArrayDynamicIndexingEXT, {"VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderUniformTexelBufferArrayDynamicIndexing", &VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderUniformTexelBufferArrayDynamicIndexing}},
1598 {spv::CapabilityStorageTexelBufferArrayDynamicIndexingEXT, {"VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderStorageTexelBufferArrayDynamicIndexing", &VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderStorageTexelBufferArrayDynamicIndexing}},
1599 {spv::CapabilityUniformBufferArrayNonUniformIndexingEXT, {"VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderUniformBufferArrayNonUniformIndexing", &VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderUniformBufferArrayNonUniformIndexing}},
1600 {spv::CapabilitySampledImageArrayNonUniformIndexingEXT, {"VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderSampledImageArrayNonUniformIndexing", &VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderSampledImageArrayNonUniformIndexing}},
1601 {spv::CapabilityStorageBufferArrayNonUniformIndexingEXT, {"VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderStorageBufferArrayNonUniformIndexing", &VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderStorageBufferArrayNonUniformIndexing}},
1602 {spv::CapabilityStorageImageArrayNonUniformIndexingEXT, {"VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderStorageImageArrayNonUniformIndexing", &VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderStorageImageArrayNonUniformIndexing}},
1603 {spv::CapabilityInputAttachmentArrayNonUniformIndexingEXT, {"VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderInputAttachmentArrayNonUniformIndexing", &VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderInputAttachmentArrayNonUniformIndexing}},
1604 {spv::CapabilityUniformTexelBufferArrayNonUniformIndexingEXT, {"VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderUniformTexelBufferArrayNonUniformIndexing", &VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderUniformTexelBufferArrayNonUniformIndexing}},
1605 {spv::CapabilityStorageTexelBufferArrayNonUniformIndexingEXT , {"VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderStorageTexelBufferArrayNonUniformIndexing", &VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderStorageTexelBufferArrayNonUniformIndexing}},
Chris Forbes47567b72017-06-09 12:09:45 -07001606
1607 // Capabilities that require an extension
Mike Schuchardt8ed5ea02018-07-20 18:24:17 -06001608 {spv::CapabilityDrawParameters, {VK_KHR_SHADER_DRAW_PARAMETERS_EXTENSION_NAME, nullptr, &DeviceExtensions::vk_khr_shader_draw_parameters}},
1609 {spv::CapabilityGeometryShaderPassthroughNV, {VK_NV_GEOMETRY_SHADER_PASSTHROUGH_EXTENSION_NAME, nullptr, &DeviceExtensions::vk_nv_geometry_shader_passthrough}},
1610 {spv::CapabilitySampleMaskOverrideCoverageNV, {VK_NV_SAMPLE_MASK_OVERRIDE_COVERAGE_EXTENSION_NAME, nullptr, &DeviceExtensions::vk_nv_sample_mask_override_coverage}},
1611 {spv::CapabilityShaderViewportIndexLayerEXT, {VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_EXTENSION_NAME, nullptr, &DeviceExtensions::vk_ext_shader_viewport_index_layer}},
1612 {spv::CapabilityShaderViewportIndexLayerNV, {VK_NV_VIEWPORT_ARRAY2_EXTENSION_NAME, nullptr, &DeviceExtensions::vk_nv_viewport_array2}},
1613 {spv::CapabilityShaderViewportMaskNV, {VK_NV_VIEWPORT_ARRAY2_EXTENSION_NAME, nullptr, &DeviceExtensions::vk_nv_viewport_array2}},
1614 {spv::CapabilitySubgroupBallotKHR, {VK_EXT_SHADER_SUBGROUP_BALLOT_EXTENSION_NAME, nullptr, &DeviceExtensions::vk_ext_shader_subgroup_ballot }},
1615 {spv::CapabilitySubgroupVoteKHR, {VK_EXT_SHADER_SUBGROUP_VOTE_EXTENSION_NAME, nullptr, &DeviceExtensions::vk_ext_shader_subgroup_vote }},
aqnuep7033c702018-09-11 18:03:29 +02001616 {spv::CapabilityInt64Atomics, {VK_KHR_SHADER_ATOMIC_INT64_EXTENSION_NAME, nullptr, &DeviceExtensions::vk_khr_shader_atomic_int64 }},
Alexander Galazin3bd8e342018-06-14 15:49:07 +02001617
Mike Schuchardt8ed5ea02018-07-20 18:24:17 -06001618 {spv::CapabilityStorageBuffer8BitAccess , {"VkPhysicalDevice8BitStorageFeaturesKHR::storageBuffer8BitAccess", &VkPhysicalDevice8BitStorageFeaturesKHR::storageBuffer8BitAccess, &DeviceExtensions::vk_khr_8bit_storage}},
1619 {spv::CapabilityUniformAndStorageBuffer8BitAccess , {"VkPhysicalDevice8BitStorageFeaturesKHR::uniformAndStorageBuffer8BitAccess", &VkPhysicalDevice8BitStorageFeaturesKHR::uniformAndStorageBuffer8BitAccess, &DeviceExtensions::vk_khr_8bit_storage}},
1620 {spv::CapabilityStoragePushConstant8 , {"VkPhysicalDevice8BitStorageFeaturesKHR::storagePushConstant8", &VkPhysicalDevice8BitStorageFeaturesKHR::storagePushConstant8, &DeviceExtensions::vk_khr_8bit_storage}},
Brett Lawsonbebfb6f2018-10-23 16:58:50 -07001621
1622 {spv::CapabilityTransformFeedback , { "VkPhysicalDeviceTransformFeedbackFeaturesEXT::transformFeedback", &VkPhysicalDeviceTransformFeedbackFeaturesEXT::transformFeedback, &DeviceExtensions::vk_ext_transform_feedback}},
Jose-Emilio Munoz-Lopez1109b452018-08-21 09:44:07 +01001623 {spv::CapabilityGeometryStreams , { "VkPhysicalDeviceTransformFeedbackFeaturesEXT::geometryStreams", &VkPhysicalDeviceTransformFeedbackFeaturesEXT::geometryStreams, &DeviceExtensions::vk_ext_transform_feedback}},
1624
1625 {spv::CapabilityFloat16 , {"VkPhysicalDeviceFloat16Int8FeaturesKHR::shaderFloat16", &VkPhysicalDeviceFloat16Int8FeaturesKHR::shaderFloat16, &DeviceExtensions::vk_khr_shader_float16_int8}},
1626 {spv::CapabilityInt8 , {"VkPhysicalDeviceFloat16Int8FeaturesKHR::shaderInt8", &VkPhysicalDeviceFloat16Int8FeaturesKHR::shaderInt8, &DeviceExtensions::vk_khr_shader_float16_int8}},
Jeff Bolze4356752019-03-07 11:23:46 -06001627
1628 {spv::CapabilityCooperativeMatrixNV, {"VkPhysicalDeviceCooperativeMatrixFeaturesNV::cooperativeMatrix", &VkPhysicalDeviceCooperativeMatrixFeaturesNV::cooperativeMatrix, &DeviceExtensions::vk_nv_cooperative_matrix}},
Attilio Provenzanoc5d50102019-03-25 17:40:37 +00001629
1630 {spv::CapabilitySignedZeroInfNanPreserve, {"VkPhysicalDeviceFloatControlsPropertiesKHR::shaderSignedZeroInfNanPreserveFloat16", &VkPhysicalDeviceFloatControlsPropertiesKHR::shaderSignedZeroInfNanPreserveFloat16, &DeviceExtensions::vk_khr_shader_float_controls}},
1631 {spv::CapabilitySignedZeroInfNanPreserve, {"VkPhysicalDeviceFloatControlsPropertiesKHR::shaderSignedZeroInfNanPreserveFloat32", &VkPhysicalDeviceFloatControlsPropertiesKHR::shaderSignedZeroInfNanPreserveFloat32, &DeviceExtensions::vk_khr_shader_float_controls}},
1632 {spv::CapabilitySignedZeroInfNanPreserve, {"VkPhysicalDeviceFloatControlsPropertiesKHR::shaderSignedZeroInfNanPreserveFloat64", &VkPhysicalDeviceFloatControlsPropertiesKHR::shaderSignedZeroInfNanPreserveFloat64, &DeviceExtensions::vk_khr_shader_float_controls}},
1633 {spv::CapabilityDenormPreserve, {"VkPhysicalDeviceFloatControlsPropertiesKHR::shaderDenormPreserveFloat16", &VkPhysicalDeviceFloatControlsPropertiesKHR::shaderDenormPreserveFloat16, &DeviceExtensions::vk_khr_shader_float_controls}},
1634 {spv::CapabilityDenormPreserve, {"VkPhysicalDeviceFloatControlsPropertiesKHR::shaderDenormPreserveFloat32", &VkPhysicalDeviceFloatControlsPropertiesKHR::shaderDenormPreserveFloat32, &DeviceExtensions::vk_khr_shader_float_controls}},
1635 {spv::CapabilityDenormPreserve, {"VkPhysicalDeviceFloatControlsPropertiesKHR::shaderDenormPreserveFloat64", &VkPhysicalDeviceFloatControlsPropertiesKHR::shaderDenormPreserveFloat64, &DeviceExtensions::vk_khr_shader_float_controls}},
1636 {spv::CapabilityDenormFlushToZero, {"VkPhysicalDeviceFloatControlsPropertiesKHR::shaderDenormFlushToZeroFloat16", &VkPhysicalDeviceFloatControlsPropertiesKHR::shaderDenormFlushToZeroFloat16, &DeviceExtensions::vk_khr_shader_float_controls}},
1637 {spv::CapabilityDenormFlushToZero, {"VkPhysicalDeviceFloatControlsPropertiesKHR::shaderDenormFlushToZeroFloat32", &VkPhysicalDeviceFloatControlsPropertiesKHR::shaderDenormFlushToZeroFloat32, &DeviceExtensions::vk_khr_shader_float_controls}},
1638 {spv::CapabilityDenormFlushToZero, {"VkPhysicalDeviceFloatControlsPropertiesKHR::shaderDenormFlushToZeroFloat64", &VkPhysicalDeviceFloatControlsPropertiesKHR::shaderDenormFlushToZeroFloat64, &DeviceExtensions::vk_khr_shader_float_controls}},
1639 {spv::CapabilityRoundingModeRTE, {"VkPhysicalDeviceFloatControlsPropertiesKHR::shaderRoundingModeRTEFloat16", &VkPhysicalDeviceFloatControlsPropertiesKHR::shaderRoundingModeRTEFloat16, &DeviceExtensions::vk_khr_shader_float_controls}},
1640 {spv::CapabilityRoundingModeRTE, {"VkPhysicalDeviceFloatControlsPropertiesKHR::shaderRoundingModeRTEFloat32", &VkPhysicalDeviceFloatControlsPropertiesKHR::shaderRoundingModeRTEFloat32, &DeviceExtensions::vk_khr_shader_float_controls}},
1641 {spv::CapabilityRoundingModeRTE, {"VkPhysicalDeviceFloatControlsPropertiesKHR::shaderRoundingModeRTEFloat64", &VkPhysicalDeviceFloatControlsPropertiesKHR::shaderRoundingModeRTEFloat64, &DeviceExtensions::vk_khr_shader_float_controls}},
1642 {spv::CapabilityRoundingModeRTZ, {"VkPhysicalDeviceFloatControlsPropertiesKHR::shaderRoundingModeRTZFloat16", &VkPhysicalDeviceFloatControlsPropertiesKHR::shaderRoundingModeRTZFloat16, &DeviceExtensions::vk_khr_shader_float_controls}},
1643 {spv::CapabilityRoundingModeRTZ, {"VkPhysicalDeviceFloatControlsPropertiesKHR::shaderRoundingModeRTZFloat32", &VkPhysicalDeviceFloatControlsPropertiesKHR::shaderRoundingModeRTZFloat32, &DeviceExtensions::vk_khr_shader_float_controls}},
1644 {spv::CapabilityRoundingModeRTZ, {"VkPhysicalDeviceFloatControlsPropertiesKHR::shaderRoundingModeRTZFloat64", &VkPhysicalDeviceFloatControlsPropertiesKHR::shaderRoundingModeRTZFloat64, &DeviceExtensions::vk_khr_shader_float_controls}},
Chris Forbes47567b72017-06-09 12:09:45 -07001645 };
1646 // clang-format on
1647
1648 for (auto insn : *src) {
1649 if (insn.opcode() == spv::OpCapability) {
Dave Houltoneb10ea82017-12-22 12:21:50 -07001650 size_t n = capabilities.count(insn.word(1));
1651 if (1 == n) { // key occurs exactly once
1652 auto it = capabilities.find(insn.word(1));
1653 if (it != capabilities.end()) {
1654 if (it->second.feature) {
Mark Lobodzinskid7b03cc2019-04-19 14:23:10 -06001655 skip |= RequireFeature(report_data, it->second.feature.IsEnabled(enabled_features), it->second.name);
Dave Houltoneb10ea82017-12-22 12:21:50 -07001656 }
1657 if (it->second.extension) {
Mark Lobodzinskif45e45f2019-04-19 14:15:39 -06001658 skip |= RequireExtension(report_data, device_extensions.*(it->second.extension), it->second.name);
Dave Houltoneb10ea82017-12-22 12:21:50 -07001659 }
Chris Forbes47567b72017-06-09 12:09:45 -07001660 }
Dave Houltoneb10ea82017-12-22 12:21:50 -07001661 } else if (1 < n) { // key occurs multiple times, at least one must be enabled
1662 bool needs_feature = false, has_feature = false;
1663 bool needs_ext = false, has_ext = false;
1664 std::string feature_names = "(one of) [ ";
1665 std::string extension_names = feature_names;
1666 auto caps = capabilities.equal_range(insn.word(1));
1667 for (auto it = caps.first; it != caps.second; ++it) {
1668 if (it->second.feature) {
1669 needs_feature = true;
Mark Lobodzinskid7b03cc2019-04-19 14:23:10 -06001670 has_feature = has_feature || it->second.feature.IsEnabled(enabled_features);
Dave Houltoneb10ea82017-12-22 12:21:50 -07001671 feature_names += it->second.name;
1672 feature_names += " ";
1673 }
1674 if (it->second.extension) {
1675 needs_ext = true;
Mark Lobodzinskif45e45f2019-04-19 14:15:39 -06001676 has_ext = has_ext || device_extensions.*(it->second.extension);
Dave Houltoneb10ea82017-12-22 12:21:50 -07001677 extension_names += it->second.name;
1678 extension_names += " ";
1679 }
1680 }
1681 if (needs_feature) {
1682 feature_names += "]";
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06001683 skip |= RequireFeature(report_data, has_feature, feature_names.c_str());
Dave Houltoneb10ea82017-12-22 12:21:50 -07001684 }
1685 if (needs_ext) {
1686 extension_names += "]";
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06001687 skip |= RequireExtension(report_data, has_ext, extension_names.c_str());
Chris Forbes47567b72017-06-09 12:09:45 -07001688 }
1689 }
1690 }
1691 }
1692
Chris Forbes349b3132018-03-07 11:38:08 -08001693 if (has_writable_descriptor) {
1694 switch (stage) {
1695 case VK_SHADER_STAGE_COMPUTE_BIT:
Jeff Bolz148d94e2018-12-13 21:25:56 -06001696 case VK_SHADER_STAGE_RAYGEN_BIT_NV:
1697 case VK_SHADER_STAGE_ANY_HIT_BIT_NV:
1698 case VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV:
1699 case VK_SHADER_STAGE_MISS_BIT_NV:
1700 case VK_SHADER_STAGE_INTERSECTION_BIT_NV:
1701 case VK_SHADER_STAGE_CALLABLE_BIT_NV:
1702 case VK_SHADER_STAGE_TASK_BIT_NV:
1703 case VK_SHADER_STAGE_MESH_BIT_NV:
Chris Forbes349b3132018-03-07 11:38:08 -08001704 /* No feature requirements for writes and atomics from compute
Jeff Bolz148d94e2018-12-13 21:25:56 -06001705 * raytracing, or mesh stages */
Chris Forbes349b3132018-03-07 11:38:08 -08001706 break;
1707 case VK_SHADER_STAGE_FRAGMENT_BIT:
Mark Lobodzinskid7b03cc2019-04-19 14:23:10 -06001708 skip |= RequireFeature(report_data, enabled_features.core.fragmentStoresAndAtomics, "fragmentStoresAndAtomics");
Chris Forbes349b3132018-03-07 11:38:08 -08001709 break;
1710 default:
Mark Lobodzinskid7b03cc2019-04-19 14:23:10 -06001711 skip |= RequireFeature(report_data, enabled_features.core.vertexPipelineStoresAndAtomics,
1712 "vertexPipelineStoresAndAtomics");
Chris Forbes349b3132018-03-07 11:38:08 -08001713 break;
1714 }
1715 }
1716
Chris Forbes47567b72017-06-09 12:09:45 -07001717 return skip;
1718}
1719
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001720static bool VariableIsBuiltIn(shader_module const *src, const uint32_t ID, std::vector<uint32_t> const &builtInBlockIDs,
1721 std::vector<uint32_t> const &builtInIDs) {
1722 auto insn = src->get_def(ID);
1723
1724 switch (insn.opcode()) {
1725 case spv::OpVariable: {
1726 // First check if the variable is a "pure" built-in type, e.g. gl_ViewportIndex
1727 uint32_t ID = insn.word(2);
1728 for (auto builtInID : builtInIDs) {
1729 if (ID == builtInID) {
1730 return true;
1731 }
1732 }
1733
Ari Suonpaa89c60822019-03-25 14:13:02 +02001734 return VariableIsBuiltIn(src, insn.word(1), builtInBlockIDs, builtInIDs);
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001735 }
1736 case spv::OpTypePointer:
Ari Suonpaa89c60822019-03-25 14:13:02 +02001737 return VariableIsBuiltIn(src, insn.word(3), builtInBlockIDs, builtInIDs);
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001738 case spv::OpTypeArray:
Ari Suonpaa89c60822019-03-25 14:13:02 +02001739 return VariableIsBuiltIn(src, insn.word(2), builtInBlockIDs, builtInIDs);
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001740 case spv::OpTypeStruct: {
1741 uint32_t ID = insn.word(1); // We only need to check the first member as either all will be, or none will be built-in
1742 for (auto builtInBlockID : builtInBlockIDs) {
1743 if (ID == builtInBlockID) {
1744 return true;
1745 }
1746 }
1747 return false;
1748 }
1749 default:
1750 return false;
1751 }
1752
1753 return false;
1754}
1755
Mark Lobodzinski518eadc2019-03-09 12:07:30 -07001756bool CoreChecks::ValidateShaderStageInputOutputLimits(shader_module const *src, VkPipelineShaderStageCreateInfo const *pStage,
1757 PIPELINE_STATE *pipeline) {
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001758 if (pStage->stage == VK_SHADER_STAGE_COMPUTE_BIT || pStage->stage == VK_SHADER_STAGE_ALL_GRAPHICS ||
1759 pStage->stage == VK_SHADER_STAGE_ALL) {
1760 return false;
1761 }
1762
1763 bool skip = false;
Mark Lobodzinski518eadc2019-03-09 12:07:30 -07001764 auto const &limits = phys_dev_props.limits;
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001765
1766 std::vector<uint32_t> builtInBlockIDs;
1767 std::vector<uint32_t> builtInIDs;
1768 struct Variable {
1769 uint32_t baseTypePtrID;
1770 uint32_t ID;
1771 uint32_t storageClass;
1772 };
1773 std::vector<Variable> variables;
1774
1775 for (auto insn : *src) {
1776 switch (insn.opcode()) {
1777 // Find all built-in member decorations
1778 case spv::OpMemberDecorate:
1779 if (insn.word(3) == spv::DecorationBuiltIn) {
1780 builtInBlockIDs.push_back(insn.word(1));
1781 }
1782 break;
1783 // Find all built-in decorations
1784 case spv::OpDecorate:
1785 switch (insn.word(2)) {
1786 case spv::DecorationBlock: {
1787 uint32_t blockID = insn.word(1);
1788 for (auto builtInBlockID : builtInBlockIDs) {
1789 // Check if one of the members of the block are built-in -> the block is built-in
1790 if (blockID == builtInBlockID) {
1791 builtInIDs.push_back(blockID);
1792 break;
1793 }
1794 }
1795 break;
1796 }
1797 case spv::DecorationBuiltIn:
1798 builtInIDs.push_back(insn.word(1));
1799 break;
1800 default:
1801 break;
1802 }
1803 break;
1804 // Find all input and output variables
1805 case spv::OpVariable: {
1806 Variable var = {};
1807 var.storageClass = insn.word(3);
1808 if (var.storageClass == spv::StorageClassInput || var.storageClass == spv::StorageClassOutput) {
1809 var.baseTypePtrID = insn.word(1);
1810 var.ID = insn.word(2);
1811 variables.push_back(var);
1812 }
1813 break;
1814 }
1815 default:
1816 break;
1817 }
1818 }
1819
1820 uint32_t numCompIn = 0, numCompOut = 0;
1821 for (auto &var : variables) {
1822 // Check the variable's ID
1823 if (VariableIsBuiltIn(src, var.ID, builtInBlockIDs, builtInIDs)) {
1824 continue;
1825 }
1826 // Check the variable's type's ID - e.g. gl_PerVertex is made of basic types, not built-in types
1827 if (VariableIsBuiltIn(src, src->get_def(var.baseTypePtrID).word(3), builtInBlockIDs, builtInIDs)) {
1828 continue;
1829 }
1830
1831 if (var.storageClass == spv::StorageClassInput) {
1832 numCompIn += GetComponentsConsumedByType(src, var.baseTypePtrID, false);
1833 } else { // var.storageClass == spv::StorageClassOutput
1834 numCompOut += GetComponentsConsumedByType(src, var.baseTypePtrID, false);
1835 }
1836 }
1837
1838 switch (pStage->stage) {
1839 case VK_SHADER_STAGE_VERTEX_BIT:
Mark Lobodzinski57a44272019-02-27 12:40:50 -07001840 if (numCompOut > limits.maxVertexOutputComponents) {
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001841 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
1842 HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_ExceedDeviceLimit,
1843 "Invalid Pipeline CreateInfo State: Vertex shader exceeds "
1844 "VkPhysicalDeviceLimits::maxVertexOutputComponents of %u "
1845 "components by %u components",
Mark Lobodzinski57a44272019-02-27 12:40:50 -07001846 limits.maxVertexOutputComponents, numCompOut - limits.maxVertexOutputComponents);
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001847 }
1848 break;
1849
1850 case VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT:
Mark Lobodzinski57a44272019-02-27 12:40:50 -07001851 if (numCompIn > limits.maxTessellationControlPerVertexInputComponents) {
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001852 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
1853 HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_ExceedDeviceLimit,
1854 "Invalid Pipeline CreateInfo State: Tessellation control shader exceeds "
1855 "VkPhysicalDeviceLimits::maxTessellationControlPerVertexInputComponents of %u "
1856 "components by %u components",
Mark Lobodzinski57a44272019-02-27 12:40:50 -07001857 limits.maxTessellationControlPerVertexInputComponents,
1858 numCompIn - limits.maxTessellationControlPerVertexInputComponents);
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001859 }
Mark Lobodzinski57a44272019-02-27 12:40:50 -07001860 if (numCompOut > limits.maxTessellationControlPerVertexOutputComponents) {
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001861 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
1862 HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_ExceedDeviceLimit,
1863 "Invalid Pipeline CreateInfo State: Tessellation control shader exceeds "
1864 "VkPhysicalDeviceLimits::maxTessellationControlPerVertexOutputComponents of %u "
1865 "components by %u components",
Mark Lobodzinski57a44272019-02-27 12:40:50 -07001866 limits.maxTessellationControlPerVertexOutputComponents,
1867 numCompOut - limits.maxTessellationControlPerVertexOutputComponents);
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001868 }
1869 break;
1870
1871 case VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT:
Mark Lobodzinski57a44272019-02-27 12:40:50 -07001872 if (numCompIn > limits.maxTessellationEvaluationInputComponents) {
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001873 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
1874 HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_ExceedDeviceLimit,
1875 "Invalid Pipeline CreateInfo State: Tessellation evaluation shader exceeds "
1876 "VkPhysicalDeviceLimits::maxTessellationEvaluationInputComponents of %u "
1877 "components by %u components",
Mark Lobodzinski57a44272019-02-27 12:40:50 -07001878 limits.maxTessellationEvaluationInputComponents,
1879 numCompIn - limits.maxTessellationEvaluationInputComponents);
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001880 }
Mark Lobodzinski57a44272019-02-27 12:40:50 -07001881 if (numCompOut > limits.maxTessellationEvaluationOutputComponents) {
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001882 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
1883 HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_ExceedDeviceLimit,
1884 "Invalid Pipeline CreateInfo State: Tessellation evaluation shader exceeds "
1885 "VkPhysicalDeviceLimits::maxTessellationEvaluationOutputComponents of %u "
1886 "components by %u components",
Mark Lobodzinski57a44272019-02-27 12:40:50 -07001887 limits.maxTessellationEvaluationOutputComponents,
1888 numCompOut - limits.maxTessellationEvaluationOutputComponents);
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001889 }
1890 break;
1891
1892 case VK_SHADER_STAGE_GEOMETRY_BIT:
Mark Lobodzinski57a44272019-02-27 12:40:50 -07001893 if (numCompIn > limits.maxGeometryInputComponents) {
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001894 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
1895 HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_ExceedDeviceLimit,
1896 "Invalid Pipeline CreateInfo State: Geometry shader exceeds "
1897 "VkPhysicalDeviceLimits::maxGeometryInputComponents of %u "
1898 "components by %u components",
Mark Lobodzinski57a44272019-02-27 12:40:50 -07001899 limits.maxGeometryInputComponents, numCompIn - limits.maxGeometryInputComponents);
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001900 }
Mark Lobodzinski57a44272019-02-27 12:40:50 -07001901 if (numCompOut > limits.maxGeometryOutputComponents) {
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001902 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
1903 HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_ExceedDeviceLimit,
1904 "Invalid Pipeline CreateInfo State: Geometry shader exceeds "
1905 "VkPhysicalDeviceLimits::maxGeometryOutputComponents of %u "
1906 "components by %u components",
Mark Lobodzinski57a44272019-02-27 12:40:50 -07001907 limits.maxGeometryOutputComponents, numCompOut - limits.maxGeometryOutputComponents);
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001908 }
1909 break;
1910
1911 case VK_SHADER_STAGE_FRAGMENT_BIT:
Mark Lobodzinski57a44272019-02-27 12:40:50 -07001912 if (numCompIn > limits.maxFragmentInputComponents) {
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001913 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
1914 HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_ExceedDeviceLimit,
1915 "Invalid Pipeline CreateInfo State: Fragment shader exceeds "
1916 "VkPhysicalDeviceLimits::maxFragmentInputComponents of %u "
1917 "components by %u components",
Mark Lobodzinski57a44272019-02-27 12:40:50 -07001918 limits.maxFragmentInputComponents, numCompIn - limits.maxFragmentInputComponents);
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001919 }
1920 break;
1921
Jeff Bolz148d94e2018-12-13 21:25:56 -06001922 case VK_SHADER_STAGE_RAYGEN_BIT_NV:
1923 case VK_SHADER_STAGE_ANY_HIT_BIT_NV:
1924 case VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV:
1925 case VK_SHADER_STAGE_MISS_BIT_NV:
1926 case VK_SHADER_STAGE_INTERSECTION_BIT_NV:
1927 case VK_SHADER_STAGE_CALLABLE_BIT_NV:
1928 case VK_SHADER_STAGE_TASK_BIT_NV:
1929 case VK_SHADER_STAGE_MESH_BIT_NV:
1930 break;
1931
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001932 default:
1933 assert(false); // This should never happen
1934 }
1935 return skip;
1936}
1937
Jeff Bolze4356752019-03-07 11:23:46 -06001938// copy the specialization constant value into buf, if it is present
1939void GetSpecConstantValue(VkPipelineShaderStageCreateInfo const *pStage, uint32_t spec_id, void *buf) {
1940 VkSpecializationInfo const *spec = pStage->pSpecializationInfo;
1941
1942 if (spec && spec_id < spec->mapEntryCount) {
1943 memcpy(buf, (uint8_t *)spec->pData + spec->pMapEntries[spec_id].offset, spec->pMapEntries[spec_id].size);
1944 }
1945}
1946
1947// Fill in value with the constant or specialization constant value, if available.
1948// Returns true if the value has been accurately filled out.
1949static bool GetIntConstantValue(spirv_inst_iter insn, shader_module const *src, VkPipelineShaderStageCreateInfo const *pStage,
1950 const std::unordered_map<uint32_t, uint32_t> &id_to_spec_id, uint32_t *value) {
1951 auto type_id = src->get_def(insn.word(1));
1952 if (type_id.opcode() != spv::OpTypeInt || type_id.word(2) != 32) {
1953 return false;
1954 }
1955 switch (insn.opcode()) {
1956 case spv::OpSpecConstant:
1957 *value = insn.word(3);
1958 GetSpecConstantValue(pStage, id_to_spec_id.at(insn.word(2)), value);
1959 return true;
1960 case spv::OpConstant:
1961 *value = insn.word(3);
1962 return true;
1963 default:
1964 return false;
1965 }
1966}
1967
1968// Map SPIR-V type to VK_COMPONENT_TYPE enum
1969VkComponentTypeNV GetComponentType(spirv_inst_iter insn, shader_module const *src) {
1970 switch (insn.opcode()) {
1971 case spv::OpTypeInt:
1972 switch (insn.word(2)) {
1973 case 8:
1974 return insn.word(3) != 0 ? VK_COMPONENT_TYPE_SINT8_NV : VK_COMPONENT_TYPE_UINT8_NV;
1975 case 16:
1976 return insn.word(3) != 0 ? VK_COMPONENT_TYPE_SINT16_NV : VK_COMPONENT_TYPE_UINT16_NV;
1977 case 32:
1978 return insn.word(3) != 0 ? VK_COMPONENT_TYPE_SINT32_NV : VK_COMPONENT_TYPE_UINT32_NV;
1979 case 64:
1980 return insn.word(3) != 0 ? VK_COMPONENT_TYPE_SINT64_NV : VK_COMPONENT_TYPE_UINT64_NV;
1981 default:
1982 return VK_COMPONENT_TYPE_MAX_ENUM_NV;
1983 }
1984 case spv::OpTypeFloat:
1985 switch (insn.word(2)) {
1986 case 16:
1987 return VK_COMPONENT_TYPE_FLOAT16_NV;
1988 case 32:
1989 return VK_COMPONENT_TYPE_FLOAT32_NV;
1990 case 64:
1991 return VK_COMPONENT_TYPE_FLOAT64_NV;
1992 default:
1993 return VK_COMPONENT_TYPE_MAX_ENUM_NV;
1994 }
1995 default:
1996 return VK_COMPONENT_TYPE_MAX_ENUM_NV;
1997 }
1998}
1999
2000// Validate SPV_NV_cooperative_matrix behavior that can't be statically validated
2001// in SPIRV-Tools (e.g. due to specialization constant usage).
2002bool CoreChecks::ValidateCooperativeMatrix(shader_module const *src, VkPipelineShaderStageCreateInfo const *pStage,
2003 PIPELINE_STATE *pipeline) {
2004 bool skip = false;
2005
2006 // Map SPIR-V result ID to specialization constant id (SpecId decoration value)
2007 std::unordered_map<uint32_t, uint32_t> id_to_spec_id;
2008 // Map SPIR-V result ID to the ID of its type.
2009 std::unordered_map<uint32_t, uint32_t> id_to_type_id;
2010
2011 struct CoopMatType {
2012 uint32_t scope, rows, cols;
2013 VkComponentTypeNV component_type;
2014 bool all_constant;
2015
2016 CoopMatType() : scope(0), rows(0), cols(0), component_type(VK_COMPONENT_TYPE_MAX_ENUM_NV), all_constant(false) {}
2017
2018 void Init(uint32_t id, shader_module const *src, VkPipelineShaderStageCreateInfo const *pStage,
2019 const std::unordered_map<uint32_t, uint32_t> &id_to_spec_id) {
2020 spirv_inst_iter insn = src->get_def(id);
2021 uint32_t component_type_id = insn.word(2);
2022 uint32_t scope_id = insn.word(3);
2023 uint32_t rows_id = insn.word(4);
2024 uint32_t cols_id = insn.word(5);
2025 auto component_type_iter = src->get_def(component_type_id);
2026 auto scope_iter = src->get_def(scope_id);
2027 auto rows_iter = src->get_def(rows_id);
2028 auto cols_iter = src->get_def(cols_id);
2029
2030 all_constant = true;
2031 if (!GetIntConstantValue(scope_iter, src, pStage, id_to_spec_id, &scope)) {
2032 all_constant = false;
2033 }
2034 if (!GetIntConstantValue(rows_iter, src, pStage, id_to_spec_id, &rows)) {
2035 all_constant = false;
2036 }
2037 if (!GetIntConstantValue(cols_iter, src, pStage, id_to_spec_id, &cols)) {
2038 all_constant = false;
2039 }
2040 component_type = GetComponentType(component_type_iter, src);
2041 }
2042 };
2043
2044 bool seen_coopmat_capability = false;
2045
2046 for (auto insn : *src) {
2047 // Whitelist instructions whose result can be a cooperative matrix type, and
2048 // keep track of their types. It would be nice if SPIRV-Headers generated code
2049 // to identify which instructions have a result type and result id. Lacking that,
2050 // this whitelist is based on the set of instructions that
2051 // SPV_NV_cooperative_matrix says can be used with cooperative matrix types.
2052 switch (insn.opcode()) {
2053 case spv::OpLoad:
2054 case spv::OpCooperativeMatrixLoadNV:
2055 case spv::OpCooperativeMatrixMulAddNV:
2056 case spv::OpSNegate:
2057 case spv::OpFNegate:
2058 case spv::OpIAdd:
2059 case spv::OpFAdd:
2060 case spv::OpISub:
2061 case spv::OpFSub:
2062 case spv::OpFDiv:
2063 case spv::OpSDiv:
2064 case spv::OpUDiv:
2065 case spv::OpMatrixTimesScalar:
2066 case spv::OpConstantComposite:
2067 case spv::OpCompositeConstruct:
2068 case spv::OpConvertFToU:
2069 case spv::OpConvertFToS:
2070 case spv::OpConvertSToF:
2071 case spv::OpConvertUToF:
2072 case spv::OpUConvert:
2073 case spv::OpSConvert:
2074 case spv::OpFConvert:
2075 id_to_type_id[insn.word(2)] = insn.word(1);
2076 break;
2077 default:
2078 break;
2079 }
2080
2081 switch (insn.opcode()) {
2082 case spv::OpDecorate:
2083 if (insn.word(2) == spv::DecorationSpecId) {
2084 id_to_spec_id[insn.word(1)] = insn.word(3);
2085 }
2086 break;
2087 case spv::OpCapability:
2088 if (insn.word(1) == spv::CapabilityCooperativeMatrixNV) {
2089 seen_coopmat_capability = true;
2090
2091 if (!(pStage->stage & phys_dev_ext_props.cooperative_matrix_props.cooperativeMatrixSupportedStages)) {
2092 skip |=
Mark Lobodzinski93a1fa72019-04-19 12:12:25 -06002093 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
Jeff Bolze4356752019-03-07 11:23:46 -06002094 HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_CooperativeMatrixSupportedStages,
2095 "OpTypeCooperativeMatrixNV used in shader stage not in cooperativeMatrixSupportedStages (= %u)",
2096 phys_dev_ext_props.cooperative_matrix_props.cooperativeMatrixSupportedStages);
2097 }
2098 }
2099 break;
2100 case spv::OpMemoryModel:
2101 // If the capability isn't enabled, don't bother with the rest of this function.
2102 // OpMemoryModel is the first required instruction after all OpCapability instructions.
2103 if (!seen_coopmat_capability) {
2104 return skip;
2105 }
2106 break;
2107 case spv::OpTypeCooperativeMatrixNV: {
2108 CoopMatType M;
2109 M.Init(insn.word(1), src, pStage, id_to_spec_id);
2110
2111 if (M.all_constant) {
2112 // Validate that the type parameters are all supported for one of the
2113 // operands of a cooperative matrix property.
2114 bool valid = false;
2115 for (unsigned i = 0; i < cooperative_matrix_properties.size(); ++i) {
2116 if (cooperative_matrix_properties[i].AType == M.component_type &&
2117 cooperative_matrix_properties[i].MSize == M.rows && cooperative_matrix_properties[i].KSize == M.cols &&
2118 cooperative_matrix_properties[i].scope == M.scope) {
2119 valid = true;
2120 break;
2121 }
2122 if (cooperative_matrix_properties[i].BType == M.component_type &&
2123 cooperative_matrix_properties[i].KSize == M.rows && cooperative_matrix_properties[i].NSize == M.cols &&
2124 cooperative_matrix_properties[i].scope == M.scope) {
2125 valid = true;
2126 break;
2127 }
2128 if (cooperative_matrix_properties[i].CType == M.component_type &&
2129 cooperative_matrix_properties[i].MSize == M.rows && cooperative_matrix_properties[i].NSize == M.cols &&
2130 cooperative_matrix_properties[i].scope == M.scope) {
2131 valid = true;
2132 break;
2133 }
2134 if (cooperative_matrix_properties[i].DType == M.component_type &&
2135 cooperative_matrix_properties[i].MSize == M.rows && cooperative_matrix_properties[i].NSize == M.cols &&
2136 cooperative_matrix_properties[i].scope == M.scope) {
2137 valid = true;
2138 break;
2139 }
2140 }
2141 if (!valid) {
Mark Lobodzinski93a1fa72019-04-19 12:12:25 -06002142 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
Jeff Bolze4356752019-03-07 11:23:46 -06002143 HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_CooperativeMatrixType,
2144 "OpTypeCooperativeMatrixNV (result id = %u) operands don't match a supported matrix type",
2145 insn.word(1));
2146 }
2147 }
2148 break;
2149 }
2150 case spv::OpCooperativeMatrixMulAddNV: {
2151 CoopMatType A, B, C, D;
2152 if (id_to_type_id.find(insn.word(2)) == id_to_type_id.end() ||
2153 id_to_type_id.find(insn.word(3)) == id_to_type_id.end() ||
2154 id_to_type_id.find(insn.word(4)) == id_to_type_id.end() ||
2155 id_to_type_id.find(insn.word(5)) == id_to_type_id.end()) {
2156 assert(!"Couldn't find type of matrix");
2157 break;
2158 }
2159 D.Init(id_to_type_id[insn.word(2)], src, pStage, id_to_spec_id);
2160 A.Init(id_to_type_id[insn.word(3)], src, pStage, id_to_spec_id);
2161 B.Init(id_to_type_id[insn.word(4)], src, pStage, id_to_spec_id);
2162 C.Init(id_to_type_id[insn.word(5)], src, pStage, id_to_spec_id);
2163
2164 if (A.all_constant && B.all_constant && C.all_constant && D.all_constant) {
2165 // Validate that the type parameters are all supported for the same
2166 // cooperative matrix property.
2167 bool valid = false;
2168 for (unsigned i = 0; i < cooperative_matrix_properties.size(); ++i) {
2169 if (cooperative_matrix_properties[i].AType == A.component_type &&
2170 cooperative_matrix_properties[i].MSize == A.rows && cooperative_matrix_properties[i].KSize == A.cols &&
2171 cooperative_matrix_properties[i].scope == A.scope &&
2172
2173 cooperative_matrix_properties[i].BType == B.component_type &&
2174 cooperative_matrix_properties[i].KSize == B.rows && cooperative_matrix_properties[i].NSize == B.cols &&
2175 cooperative_matrix_properties[i].scope == B.scope &&
2176
2177 cooperative_matrix_properties[i].CType == C.component_type &&
2178 cooperative_matrix_properties[i].MSize == C.rows && cooperative_matrix_properties[i].NSize == C.cols &&
2179 cooperative_matrix_properties[i].scope == C.scope &&
2180
2181 cooperative_matrix_properties[i].DType == D.component_type &&
2182 cooperative_matrix_properties[i].MSize == D.rows && cooperative_matrix_properties[i].NSize == D.cols &&
2183 cooperative_matrix_properties[i].scope == D.scope) {
2184 valid = true;
2185 break;
2186 }
2187 }
2188 if (!valid) {
Mark Lobodzinski93a1fa72019-04-19 12:12:25 -06002189 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
Jeff Bolze4356752019-03-07 11:23:46 -06002190 HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_CooperativeMatrixMulAdd,
2191 "OpCooperativeMatrixMulAddNV (result id = %u) operands don't match a supported matrix "
2192 "VkCooperativeMatrixPropertiesNV",
2193 insn.word(2));
2194 }
2195 }
2196 break;
2197 }
2198 default:
2199 break;
2200 }
2201 }
2202
2203 return skip;
2204}
2205
Attilio Provenzanoc5d50102019-03-25 17:40:37 +00002206bool CoreChecks::ValidateExecutionModes(shader_module const *src, spirv_inst_iter entrypoint) {
2207 auto entrypoint_id = entrypoint.word(2);
2208
Attilio Provenzanof6c0e852019-04-09 11:01:18 +01002209 // The first denorm execution mode encountered, along with its bit width.
2210 // Used to check if SeparateDenormSettings is respected.
2211 std::pair<spv::ExecutionMode, uint32_t> first_denorm_execution_mode = std::make_pair(spv::ExecutionModeMax, 0);
Attilio Provenzanoc5d50102019-03-25 17:40:37 +00002212
Attilio Provenzanof6c0e852019-04-09 11:01:18 +01002213 // The first rounding mode encountered, along with its bit width.
2214 // Used to check if SeparateRoundingModeSettings is respected.
2215 std::pair<spv::ExecutionMode, uint32_t> first_rounding_mode = std::make_pair(spv::ExecutionModeMax, 0);
Attilio Provenzanoc5d50102019-03-25 17:40:37 +00002216
2217 bool skip = false;
2218
2219 for (auto insn : *src) {
2220 if (insn.opcode() == spv::OpExecutionMode && insn.word(1) == entrypoint_id) {
2221 auto mode = insn.word(2);
2222 switch (mode) {
2223 case spv::ExecutionModeSignedZeroInfNanPreserve: {
2224 auto bit_width = insn.word(3);
Mark Lobodzinskid7b03cc2019-04-19 14:23:10 -06002225 if ((bit_width == 16 && !enabled_features.float_controls.shaderSignedZeroInfNanPreserveFloat16) ||
2226 (bit_width == 32 && !enabled_features.float_controls.shaderSignedZeroInfNanPreserveFloat32) ||
2227 (bit_width == 64 && !enabled_features.float_controls.shaderSignedZeroInfNanPreserveFloat64)) {
Attilio Provenzanoc5d50102019-03-25 17:40:37 +00002228 skip |=
2229 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
2230 kVUID_Core_Shader_FeatureNotEnabled,
2231 "Shader requires SignedZeroInfNanPreserve for bit width %d but it is not enabled on the device",
2232 bit_width);
2233 }
2234 break;
2235 }
2236
2237 case spv::ExecutionModeDenormPreserve: {
2238 auto bit_width = insn.word(3);
Mark Lobodzinskid7b03cc2019-04-19 14:23:10 -06002239 if ((bit_width == 16 && !enabled_features.float_controls.shaderDenormPreserveFloat16) ||
2240 (bit_width == 32 && !enabled_features.float_controls.shaderDenormPreserveFloat32) ||
2241 (bit_width == 64 && !enabled_features.float_controls.shaderDenormPreserveFloat64)) {
Attilio Provenzanoc5d50102019-03-25 17:40:37 +00002242 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
2243 kVUID_Core_Shader_FeatureNotEnabled,
2244 "Shader requires DenormPreserve for bit width %d but it is not enabled on the device",
2245 bit_width);
2246 }
2247
Attilio Provenzanof6c0e852019-04-09 11:01:18 +01002248 if (first_denorm_execution_mode.first == spv::ExecutionModeMax) {
2249 // Register the first denorm execution mode found
2250 first_denorm_execution_mode = std::make_pair(static_cast<spv::ExecutionMode>(mode), bit_width);
2251 } else if (first_denorm_execution_mode.first != mode && first_denorm_execution_mode.second != bit_width &&
Mark Lobodzinskid7b03cc2019-04-19 14:23:10 -06002252 !enabled_features.float_controls.separateDenormSettings) {
Attilio Provenzanoc5d50102019-03-25 17:40:37 +00002253 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
2254 kVUID_Core_Shader_FeatureNotEnabled,
2255 "Shader uses separate denorm execution modes for different bit widths but "
2256 "SeparateDenormSettings is not enabled on the device");
2257 }
2258 break;
2259 }
2260
2261 case spv::ExecutionModeDenormFlushToZero: {
2262 auto bit_width = insn.word(3);
Mark Lobodzinskid7b03cc2019-04-19 14:23:10 -06002263 if ((bit_width == 16 && !enabled_features.float_controls.shaderDenormFlushToZeroFloat16) ||
2264 (bit_width == 32 && !enabled_features.float_controls.shaderDenormFlushToZeroFloat32) ||
2265 (bit_width == 64 && !enabled_features.float_controls.shaderDenormFlushToZeroFloat64)) {
Attilio Provenzanoc5d50102019-03-25 17:40:37 +00002266 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
2267 kVUID_Core_Shader_FeatureNotEnabled,
2268 "Shader requires DenormFlushToZero for bit width %d but it is not enabled on the device",
2269 bit_width);
2270 }
2271
Attilio Provenzanof6c0e852019-04-09 11:01:18 +01002272 if (first_denorm_execution_mode.first == spv::ExecutionModeMax) {
2273 // Register the first denorm execution mode found
2274 first_denorm_execution_mode = std::make_pair(static_cast<spv::ExecutionMode>(mode), bit_width);
2275 } else if (first_denorm_execution_mode.first != mode && first_denorm_execution_mode.second != bit_width &&
Mark Lobodzinskid7b03cc2019-04-19 14:23:10 -06002276 !enabled_features.float_controls.separateDenormSettings) {
Attilio Provenzanoc5d50102019-03-25 17:40:37 +00002277 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
2278 kVUID_Core_Shader_FeatureNotEnabled,
2279 "Shader uses separate denorm execution modes for different bit widths but "
2280 "SeparateDenormSettings is not enabled on the device");
2281 }
2282 break;
2283 }
2284
2285 case spv::ExecutionModeRoundingModeRTE: {
2286 auto bit_width = insn.word(3);
Mark Lobodzinskid7b03cc2019-04-19 14:23:10 -06002287 if ((bit_width == 16 && !enabled_features.float_controls.shaderRoundingModeRTEFloat16) ||
2288 (bit_width == 32 && !enabled_features.float_controls.shaderRoundingModeRTEFloat32) ||
2289 (bit_width == 64 && !enabled_features.float_controls.shaderRoundingModeRTEFloat64)) {
Attilio Provenzanoc5d50102019-03-25 17:40:37 +00002290 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
2291 kVUID_Core_Shader_FeatureNotEnabled,
2292 "Shader requires RoundingModeRTE for bit width %d but it is not enabled on the device",
2293 bit_width);
2294 }
2295
Attilio Provenzanof6c0e852019-04-09 11:01:18 +01002296 if (first_rounding_mode.first == spv::ExecutionModeMax) {
2297 // Register the first rounding mode found
2298 first_rounding_mode = std::make_pair(static_cast<spv::ExecutionMode>(mode), bit_width);
2299 } else if (first_rounding_mode.first != mode && first_rounding_mode.second != bit_width &&
Mark Lobodzinskid7b03cc2019-04-19 14:23:10 -06002300 !enabled_features.float_controls.separateRoundingModeSettings) {
Attilio Provenzanoc5d50102019-03-25 17:40:37 +00002301 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
2302 kVUID_Core_Shader_FeatureNotEnabled,
2303 "Shader uses separate rounding modes for different bit widths but "
2304 "SeparateRoundingModeSettings is not enabled on the device");
2305 }
2306 break;
2307 }
2308
2309 case spv::ExecutionModeRoundingModeRTZ: {
2310 auto bit_width = insn.word(3);
Mark Lobodzinskid7b03cc2019-04-19 14:23:10 -06002311 if ((bit_width == 16 && !enabled_features.float_controls.shaderRoundingModeRTZFloat16) ||
2312 (bit_width == 32 && !enabled_features.float_controls.shaderRoundingModeRTZFloat32) ||
2313 (bit_width == 64 && !enabled_features.float_controls.shaderRoundingModeRTZFloat64)) {
Attilio Provenzanoc5d50102019-03-25 17:40:37 +00002314 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
2315 kVUID_Core_Shader_FeatureNotEnabled,
2316 "Shader requires RoundingModeRTZ for bit width %d but it is not enabled on the device",
2317 bit_width);
2318 }
2319
Attilio Provenzanof6c0e852019-04-09 11:01:18 +01002320 if (first_rounding_mode.first == spv::ExecutionModeMax) {
2321 // Register the first rounding mode found
2322 first_rounding_mode = std::make_pair(static_cast<spv::ExecutionMode>(mode), bit_width);
2323 } else if (first_rounding_mode.first != mode && first_rounding_mode.second != bit_width &&
Mark Lobodzinskid7b03cc2019-04-19 14:23:10 -06002324 !enabled_features.float_controls.separateRoundingModeSettings) {
Attilio Provenzanoc5d50102019-03-25 17:40:37 +00002325 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
2326 kVUID_Core_Shader_FeatureNotEnabled,
2327 "Shader uses separate rounding modes for different bit widths but "
2328 "SeparateRoundingModeSettings is not enabled on the device");
2329 }
2330 break;
2331 }
2332 }
2333 }
2334 }
2335
2336 return skip;
2337}
2338
Jeff Bolze4356752019-03-07 11:23:46 -06002339static uint32_t DescriptorTypeToReqs(shader_module const *module, uint32_t type_id) {
Chris Forbes47567b72017-06-09 12:09:45 -07002340 auto type = module->get_def(type_id);
2341
2342 while (true) {
2343 switch (type.opcode()) {
2344 case spv::OpTypeArray:
Chris Forbes062f1222018-08-21 15:34:15 -07002345 case spv::OpTypeRuntimeArray:
Chris Forbes47567b72017-06-09 12:09:45 -07002346 case spv::OpTypeSampledImage:
2347 type = module->get_def(type.word(2));
2348 break;
2349 case spv::OpTypePointer:
2350 type = module->get_def(type.word(3));
2351 break;
2352 case spv::OpTypeImage: {
2353 auto dim = type.word(3);
2354 auto arrayed = type.word(5);
2355 auto msaa = type.word(6);
2356
Chris Forbes74ba2232018-08-27 15:19:27 -07002357 uint32_t bits = 0;
2358 switch (GetFundamentalType(module, type.word(2))) {
2359 case FORMAT_TYPE_FLOAT:
2360 bits = DESCRIPTOR_REQ_COMPONENT_TYPE_FLOAT;
2361 break;
2362 case FORMAT_TYPE_UINT:
2363 bits = DESCRIPTOR_REQ_COMPONENT_TYPE_UINT;
2364 break;
2365 case FORMAT_TYPE_SINT:
2366 bits = DESCRIPTOR_REQ_COMPONENT_TYPE_SINT;
2367 break;
2368 default:
2369 break;
2370 }
2371
Chris Forbes47567b72017-06-09 12:09:45 -07002372 switch (dim) {
2373 case spv::Dim1D:
Chris Forbes74ba2232018-08-27 15:19:27 -07002374 bits |= arrayed ? DESCRIPTOR_REQ_VIEW_TYPE_1D_ARRAY : DESCRIPTOR_REQ_VIEW_TYPE_1D;
2375 return bits;
Chris Forbes47567b72017-06-09 12:09:45 -07002376 case spv::Dim2D:
Chris Forbes74ba2232018-08-27 15:19:27 -07002377 bits |= msaa ? DESCRIPTOR_REQ_MULTI_SAMPLE : DESCRIPTOR_REQ_SINGLE_SAMPLE;
2378 bits |= arrayed ? DESCRIPTOR_REQ_VIEW_TYPE_2D_ARRAY : DESCRIPTOR_REQ_VIEW_TYPE_2D;
2379 return bits;
Chris Forbes47567b72017-06-09 12:09:45 -07002380 case spv::Dim3D:
Chris Forbes74ba2232018-08-27 15:19:27 -07002381 bits |= DESCRIPTOR_REQ_VIEW_TYPE_3D;
2382 return bits;
Chris Forbes47567b72017-06-09 12:09:45 -07002383 case spv::DimCube:
Chris Forbes74ba2232018-08-27 15:19:27 -07002384 bits |= arrayed ? DESCRIPTOR_REQ_VIEW_TYPE_CUBE_ARRAY : DESCRIPTOR_REQ_VIEW_TYPE_CUBE;
2385 return bits;
Chris Forbes47567b72017-06-09 12:09:45 -07002386 case spv::DimSubpassData:
Chris Forbes74ba2232018-08-27 15:19:27 -07002387 bits |= msaa ? DESCRIPTOR_REQ_MULTI_SAMPLE : DESCRIPTOR_REQ_SINGLE_SAMPLE;
2388 return bits;
Chris Forbes47567b72017-06-09 12:09:45 -07002389 default: // buffer, etc.
Chris Forbes74ba2232018-08-27 15:19:27 -07002390 return bits;
Chris Forbes47567b72017-06-09 12:09:45 -07002391 }
2392 }
2393 default:
2394 return 0;
2395 }
2396 }
2397}
2398
2399// For given pipelineLayout verify that the set_layout_node at slot.first
2400// has the requested binding at slot.second and return ptr to that binding
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002401static VkDescriptorSetLayoutBinding const *GetDescriptorBinding(PIPELINE_LAYOUT_NODE const *pipelineLayout,
2402 descriptor_slot_t slot) {
Chris Forbes47567b72017-06-09 12:09:45 -07002403 if (!pipelineLayout) return nullptr;
2404
2405 if (slot.first >= pipelineLayout->set_layouts.size()) return nullptr;
2406
2407 return pipelineLayout->set_layouts[slot.first]->GetDescriptorSetLayoutBindingPtrFromBinding(slot.second);
2408}
2409
Locke1ec6d952019-04-02 11:57:21 -06002410static bool FindLocalSize(shader_module const *src, uint32_t &local_size_x, uint32_t &local_size_y, uint32_t &local_size_z) {
2411 for (auto insn : *src) {
2412 if (insn.opcode() == spv::OpEntryPoint) {
2413 auto executionModel = insn.word(1);
2414 auto entrypointStageBits = ExecutionModelToShaderStageFlagBits(executionModel);
2415 if (entrypointStageBits == VK_SHADER_STAGE_COMPUTE_BIT) {
2416 auto entrypoint_id = insn.word(2);
2417 for (auto insn1 : *src) {
2418 if (insn1.opcode() == spv::OpExecutionMode && insn1.word(1) == entrypoint_id &&
2419 insn1.word(2) == spv::ExecutionModeLocalSize) {
2420 local_size_x = insn1.word(3);
2421 local_size_y = insn1.word(4);
2422 local_size_z = insn1.word(5);
2423 return true;
2424 }
2425 }
2426 }
2427 }
2428 }
2429 return false;
2430}
2431
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002432static void ProcessExecutionModes(shader_module const *src, spirv_inst_iter entrypoint, PIPELINE_STATE *pipeline) {
Jeff Bolz105d6492018-09-29 15:46:44 -05002433 auto entrypoint_id = entrypoint.word(2);
Chris Forbes0771b672018-03-22 21:13:46 -07002434 bool is_point_mode = false;
2435
2436 for (auto insn : *src) {
2437 if (insn.opcode() == spv::OpExecutionMode && insn.word(1) == entrypoint_id) {
2438 switch (insn.word(2)) {
2439 case spv::ExecutionModePointMode:
2440 // In tessellation shaders, PointMode is separate and trumps the tessellation topology.
2441 is_point_mode = true;
2442 break;
2443
2444 case spv::ExecutionModeOutputPoints:
2445 pipeline->topology_at_rasterizer = VK_PRIMITIVE_TOPOLOGY_POINT_LIST;
2446 break;
2447
2448 case spv::ExecutionModeIsolines:
2449 case spv::ExecutionModeOutputLineStrip:
2450 pipeline->topology_at_rasterizer = VK_PRIMITIVE_TOPOLOGY_LINE_STRIP;
2451 break;
2452
2453 case spv::ExecutionModeTriangles:
2454 case spv::ExecutionModeQuads:
2455 case spv::ExecutionModeOutputTriangleStrip:
2456 pipeline->topology_at_rasterizer = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP;
2457 break;
2458 }
2459 }
2460 }
2461
2462 if (is_point_mode) pipeline->topology_at_rasterizer = VK_PRIMITIVE_TOPOLOGY_POINT_LIST;
2463}
2464
Mark Lobodzinski1b4a8ed2018-08-07 08:47:05 -06002465// If PointList topology is specified in the pipeline, verify that a shader geometry stage writes PointSize
2466// o If there is only a vertex shader : gl_PointSize must be written when using points
2467// o If there is a geometry or tessellation shader:
2468// - If shaderTessellationAndGeometryPointSize feature is enabled:
2469// * gl_PointSize must be written in the final geometry stage
2470// - If shaderTessellationAndGeometryPointSize feature is disabled:
2471// * gl_PointSize must NOT be written and a default of 1.0 is assumed
Mark Lobodzinski518eadc2019-03-09 12:07:30 -07002472bool CoreChecks::ValidatePointListShaderState(const PIPELINE_STATE *pipeline, shader_module const *src, spirv_inst_iter entrypoint,
2473 VkShaderStageFlagBits stage) {
Mark Lobodzinski1b4a8ed2018-08-07 08:47:05 -06002474 if (pipeline->topology_at_rasterizer != VK_PRIMITIVE_TOPOLOGY_POINT_LIST) {
2475 return false;
2476 }
2477
2478 bool pointsize_written = false;
2479 bool skip = false;
2480
2481 // Search for PointSize built-in decorations
2482 std::vector<uint32_t> pointsize_builtin_offsets;
2483 spirv_inst_iter insn = entrypoint;
2484 while (!pointsize_written && (insn.opcode() != spv::OpFunction)) {
2485 if (insn.opcode() == spv::OpMemberDecorate) {
2486 if (insn.word(3) == spv::DecorationBuiltIn) {
2487 if (insn.word(4) == spv::BuiltInPointSize) {
2488 pointsize_written = IsPointSizeWritten(src, insn, entrypoint);
2489 }
2490 }
2491 } else if (insn.opcode() == spv::OpDecorate) {
2492 if (insn.word(2) == spv::DecorationBuiltIn) {
2493 if (insn.word(3) == spv::BuiltInPointSize) {
2494 pointsize_written = IsPointSizeWritten(src, insn, entrypoint);
2495 }
2496 }
2497 }
2498
2499 insn++;
2500 }
2501
2502 if ((stage == VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT || stage == VK_SHADER_STAGE_GEOMETRY_BIT) &&
Mark Lobodzinskid7b03cc2019-04-19 14:23:10 -06002503 !enabled_features.core.shaderTessellationAndGeometryPointSize) {
Mark Lobodzinski1b4a8ed2018-08-07 08:47:05 -06002504 if (pointsize_written) {
Mark Lobodzinski93a1fa72019-04-19 12:12:25 -06002505 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
Mark Lobodzinski1b4a8ed2018-08-07 08:47:05 -06002506 HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_PointSizeBuiltInOverSpecified,
2507 "Pipeline topology is set to POINT_LIST and geometry or tessellation shaders write PointSize which "
2508 "is prohibited when the shaderTessellationAndGeometryPointSize feature is not enabled.");
2509 }
2510 } else if (!pointsize_written) {
2511 skip |=
Mark Lobodzinski93a1fa72019-04-19 12:12:25 -06002512 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
Mark Lobodzinski1b4a8ed2018-08-07 08:47:05 -06002513 HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_MissingPointSizeBuiltIn,
2514 "Pipeline topology is set to POINT_LIST, but PointSize is not written to in the shader corresponding to %s.",
2515 string_VkShaderStageFlagBits(stage));
2516 }
2517 return skip;
2518}
2519
Mark Lobodzinski518eadc2019-03-09 12:07:30 -07002520bool CoreChecks::ValidatePipelineShaderStage(VkPipelineShaderStageCreateInfo const *pStage, PIPELINE_STATE *pipeline,
2521 shader_module const **out_module, spirv_inst_iter *out_entrypoint,
2522 bool check_point_size) {
Chris Forbes47567b72017-06-09 12:09:45 -07002523 bool skip = false;
Mark Lobodzinski9e9da292019-03-06 16:19:55 -07002524 auto module = *out_module = GetShaderModuleState(pStage->module);
Chris Forbes47567b72017-06-09 12:09:45 -07002525
2526 if (!module->has_valid_spirv) return false;
2527
2528 // Find the entrypoint
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002529 auto entrypoint = *out_entrypoint = FindEntrypoint(module, pStage->pName, pStage->stage);
Chris Forbes47567b72017-06-09 12:09:45 -07002530 if (entrypoint == module->end()) {
Mark Lobodzinskib1fd9d12018-03-30 14:26:00 -06002531 if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
Dave Houlton78d09922018-05-17 15:48:45 -06002532 "VUID-VkPipelineShaderStageCreateInfo-pName-00707", "No entrypoint found named `%s` for stage %s..",
2533 pStage->pName, string_VkShaderStageFlagBits(pStage->stage))) {
Chris Forbes47567b72017-06-09 12:09:45 -07002534 return true; // no point continuing beyond here, any analysis is just going to be garbage.
2535 }
2536 }
2537
Chris Forbes47567b72017-06-09 12:09:45 -07002538 // Mark accessible ids
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002539 auto accessible_ids = MarkAccessibleIds(module, entrypoint);
2540 ProcessExecutionModes(module, entrypoint, pipeline);
Chris Forbes47567b72017-06-09 12:09:45 -07002541
2542 // Validate descriptor set layout against what the entrypoint actually uses
Chris Forbes8af24522018-03-07 11:37:45 -08002543 bool has_writable_descriptor = false;
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002544 auto descriptor_uses = CollectInterfaceByDescriptorSlot(report_data, module, accessible_ids, &has_writable_descriptor);
Chris Forbes47567b72017-06-09 12:09:45 -07002545
Chris Forbes349b3132018-03-07 11:38:08 -08002546 // Validate shader capabilities against enabled device features
Mark Lobodzinski518eadc2019-03-09 12:07:30 -07002547 skip |= ValidateShaderCapabilities(module, pStage->stage, has_writable_descriptor);
2548 skip |= ValidateShaderStageInputOutputLimits(module, pStage, pipeline);
Attilio Provenzanoc5d50102019-03-25 17:40:37 +00002549 skip |= ValidateExecutionModes(module, entrypoint);
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002550 skip |= ValidateSpecializationOffsets(report_data, pStage);
2551 skip |= ValidatePushConstantUsage(report_data, pipeline->pipeline_layout.push_constant_ranges.get(), module, accessible_ids,
2552 pStage->stage);
Jeff Bolze54ae892018-09-08 12:16:29 -05002553 if (check_point_size && !pipeline->graphicsPipelineCI.pRasterizationState->rasterizerDiscardEnable) {
Mark Lobodzinski518eadc2019-03-09 12:07:30 -07002554 skip |= ValidatePointListShaderState(pipeline, module, entrypoint, pStage->stage);
Mark Lobodzinski1b4a8ed2018-08-07 08:47:05 -06002555 }
Jeff Bolze4356752019-03-07 11:23:46 -06002556 skip |= ValidateCooperativeMatrix(module, pStage, pipeline);
Chris Forbes47567b72017-06-09 12:09:45 -07002557
2558 // Validate descriptor use
2559 for (auto use : descriptor_uses) {
2560 // While validating shaders capture which slots are used by the pipeline
2561 auto &reqs = pipeline->active_slots[use.first.first][use.first.second];
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002562 reqs = descriptor_req(reqs | DescriptorTypeToReqs(module, use.second.type_id));
Chris Forbes47567b72017-06-09 12:09:45 -07002563
2564 // Verify given pipelineLayout has requested setLayout with requested binding
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002565 const auto &binding = GetDescriptorBinding(&pipeline->pipeline_layout, use.first);
Chris Forbes47567b72017-06-09 12:09:45 -07002566 unsigned required_descriptor_count;
Jeff Bolze54ae892018-09-08 12:16:29 -05002567 std::set<uint32_t> descriptor_types = TypeToDescriptorTypeSet(module, use.second.type_id, required_descriptor_count);
Chris Forbes47567b72017-06-09 12:09:45 -07002568
2569 if (!binding) {
Mark Lobodzinskib1fd9d12018-03-30 14:26:00 -06002570 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
Dave Houlton51653902018-06-22 17:32:13 -06002571 kVUID_Core_Shader_MissingDescriptor,
Chris Forbes73c00bf2018-06-22 16:28:06 -07002572 "Shader uses descriptor slot %u.%u (expected `%s`) but not declared in pipeline layout",
Jeff Bolze54ae892018-09-08 12:16:29 -05002573 use.first.first, use.first.second, string_descriptorTypes(descriptor_types).c_str());
Chris Forbes47567b72017-06-09 12:09:45 -07002574 } else if (~binding->stageFlags & pStage->stage) {
Mark Lobodzinskib1fd9d12018-03-30 14:26:00 -06002575 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, 0,
Dave Houlton51653902018-06-22 17:32:13 -06002576 kVUID_Core_Shader_DescriptorNotAccessibleFromStage,
Chris Forbes73c00bf2018-06-22 16:28:06 -07002577 "Shader uses descriptor slot %u.%u but descriptor not accessible from stage %s", use.first.first,
2578 use.first.second, string_VkShaderStageFlagBits(pStage->stage));
Jeff Bolze54ae892018-09-08 12:16:29 -05002579 } else if (descriptor_types.find(binding->descriptorType) == descriptor_types.end()) {
Mark Lobodzinskib1fd9d12018-03-30 14:26:00 -06002580 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
Dave Houlton51653902018-06-22 17:32:13 -06002581 kVUID_Core_Shader_DescriptorTypeMismatch,
Chris Forbes73c00bf2018-06-22 16:28:06 -07002582 "Type mismatch on descriptor slot %u.%u (expected `%s`) but descriptor of type %s", use.first.first,
Jeff Bolze54ae892018-09-08 12:16:29 -05002583 use.first.second, string_descriptorTypes(descriptor_types).c_str(),
Chris Forbes47567b72017-06-09 12:09:45 -07002584 string_VkDescriptorType(binding->descriptorType));
2585 } else if (binding->descriptorCount < required_descriptor_count) {
Mark Lobodzinskib1fd9d12018-03-30 14:26:00 -06002586 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
Dave Houlton51653902018-06-22 17:32:13 -06002587 kVUID_Core_Shader_DescriptorTypeMismatch,
Chris Forbes73c00bf2018-06-22 16:28:06 -07002588 "Shader expects at least %u descriptors for binding %u.%u but only %u provided",
2589 required_descriptor_count, use.first.first, use.first.second, binding->descriptorCount);
Chris Forbes47567b72017-06-09 12:09:45 -07002590 }
2591 }
2592
2593 // Validate use of input attachments against subpass structure
2594 if (pStage->stage == VK_SHADER_STAGE_FRAGMENT_BIT) {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002595 auto input_attachment_uses = CollectInterfaceByInputAttachmentIndex(module, accessible_ids);
Chris Forbes47567b72017-06-09 12:09:45 -07002596
Petr Krause91f7a12017-12-14 20:57:36 +01002597 auto rpci = pipeline->rp_state->createInfo.ptr();
Chris Forbes47567b72017-06-09 12:09:45 -07002598 auto subpass = pipeline->graphicsPipelineCI.subpass;
2599
2600 for (auto use : input_attachment_uses) {
2601 auto input_attachments = rpci->pSubpasses[subpass].pInputAttachments;
2602 auto index = (input_attachments && use.first < rpci->pSubpasses[subpass].inputAttachmentCount)
Dave Houltona9df0ce2018-02-07 10:51:23 -07002603 ? input_attachments[use.first].attachment
2604 : VK_ATTACHMENT_UNUSED;
Chris Forbes47567b72017-06-09 12:09:45 -07002605
2606 if (index == VK_ATTACHMENT_UNUSED) {
Mark Lobodzinskib1fd9d12018-03-30 14:26:00 -06002607 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
Dave Houlton51653902018-06-22 17:32:13 -06002608 kVUID_Core_Shader_MissingInputAttachment,
Chris Forbes47567b72017-06-09 12:09:45 -07002609 "Shader consumes input attachment index %d but not provided in subpass", use.first);
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002610 } else if (!(GetFormatType(rpci->pAttachments[index].format) & GetFundamentalType(module, use.second.type_id))) {
Chris Forbes47567b72017-06-09 12:09:45 -07002611 skip |=
Mark Lobodzinskib1fd9d12018-03-30 14:26:00 -06002612 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
Dave Houlton51653902018-06-22 17:32:13 -06002613 kVUID_Core_Shader_InputAttachmentTypeMismatch,
Chris Forbes47567b72017-06-09 12:09:45 -07002614 "Subpass input attachment %u format of %s does not match type used in shader `%s`", use.first,
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002615 string_VkFormat(rpci->pAttachments[index].format), DescribeType(module, use.second.type_id).c_str());
Chris Forbes47567b72017-06-09 12:09:45 -07002616 }
2617 }
2618 }
Lockeaa8fdc02019-04-02 11:59:20 -06002619 if (pStage->stage == VK_SHADER_STAGE_COMPUTE_BIT) {
2620 skip |= ValidateComputeWorkGroupSizes(module);
2621 }
Chris Forbes47567b72017-06-09 12:09:45 -07002622 return skip;
2623}
2624
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002625static bool ValidateInterfaceBetweenStages(debug_report_data const *report_data, shader_module const *producer,
2626 spirv_inst_iter producer_entrypoint, shader_stage_attributes const *producer_stage,
2627 shader_module const *consumer, spirv_inst_iter consumer_entrypoint,
2628 shader_stage_attributes const *consumer_stage) {
Chris Forbes47567b72017-06-09 12:09:45 -07002629 bool skip = false;
2630
2631 auto outputs =
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002632 CollectInterfaceByLocation(producer, producer_entrypoint, spv::StorageClassOutput, producer_stage->arrayed_output);
2633 auto inputs = CollectInterfaceByLocation(consumer, consumer_entrypoint, spv::StorageClassInput, consumer_stage->arrayed_input);
Chris Forbes47567b72017-06-09 12:09:45 -07002634
2635 auto a_it = outputs.begin();
2636 auto b_it = inputs.begin();
2637
2638 // Maps sorted by key (location); walk them together to find mismatches
2639 while ((outputs.size() > 0 && a_it != outputs.end()) || (inputs.size() && b_it != inputs.end())) {
2640 bool a_at_end = outputs.size() == 0 || a_it == outputs.end();
2641 bool b_at_end = inputs.size() == 0 || b_it == inputs.end();
2642 auto a_first = a_at_end ? std::make_pair(0u, 0u) : a_it->first;
2643 auto b_first = b_at_end ? std::make_pair(0u, 0u) : b_it->first;
2644
2645 if (b_at_end || ((!a_at_end) && (a_first < b_first))) {
Mark Young4e919b22018-05-21 15:53:59 -06002646 skip |= log_msg(report_data, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
Dave Houlton51653902018-06-22 17:32:13 -06002647 HandleToUint64(producer->vk_shader_module), kVUID_Core_Shader_OutputNotConsumed,
Mark Young4e919b22018-05-21 15:53:59 -06002648 "%s writes to output location %u.%u which is not consumed by %s", producer_stage->name, a_first.first,
2649 a_first.second, consumer_stage->name);
Chris Forbes47567b72017-06-09 12:09:45 -07002650 a_it++;
2651 } else if (a_at_end || a_first > b_first) {
Mark Young4e919b22018-05-21 15:53:59 -06002652 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
Dave Houlton51653902018-06-22 17:32:13 -06002653 HandleToUint64(consumer->vk_shader_module), kVUID_Core_Shader_InputNotProduced,
Mark Young4e919b22018-05-21 15:53:59 -06002654 "%s consumes input location %u.%u which is not written by %s", consumer_stage->name, b_first.first,
2655 b_first.second, producer_stage->name);
Chris Forbes47567b72017-06-09 12:09:45 -07002656 b_it++;
2657 } else {
2658 // subtleties of arrayed interfaces:
2659 // - if is_patch, then the member is not arrayed, even though the interface may be.
2660 // - if is_block_member, then the extra array level of an arrayed interface is not
2661 // expressed in the member type -- it's expressed in the block type.
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002662 if (!TypesMatch(producer, consumer, a_it->second.type_id, b_it->second.type_id,
2663 producer_stage->arrayed_output && !a_it->second.is_patch && !a_it->second.is_block_member,
2664 consumer_stage->arrayed_input && !b_it->second.is_patch && !b_it->second.is_block_member, true)) {
Mark Young4e919b22018-05-21 15:53:59 -06002665 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
Dave Houlton51653902018-06-22 17:32:13 -06002666 HandleToUint64(producer->vk_shader_module), kVUID_Core_Shader_InterfaceTypeMismatch,
Mark Young4e919b22018-05-21 15:53:59 -06002667 "Type mismatch on location %u.%u: '%s' vs '%s'", a_first.first, a_first.second,
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002668 DescribeType(producer, a_it->second.type_id).c_str(),
2669 DescribeType(consumer, b_it->second.type_id).c_str());
Chris Forbes47567b72017-06-09 12:09:45 -07002670 }
2671 if (a_it->second.is_patch != b_it->second.is_patch) {
Mark Young4e919b22018-05-21 15:53:59 -06002672 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
Dave Houlton51653902018-06-22 17:32:13 -06002673 HandleToUint64(producer->vk_shader_module), kVUID_Core_Shader_InterfaceTypeMismatch,
Dave Houltona9df0ce2018-02-07 10:51:23 -07002674 "Decoration mismatch on location %u.%u: is per-%s in %s stage but per-%s in %s stage",
Chris Forbes47567b72017-06-09 12:09:45 -07002675 a_first.first, a_first.second, a_it->second.is_patch ? "patch" : "vertex", producer_stage->name,
2676 b_it->second.is_patch ? "patch" : "vertex", consumer_stage->name);
2677 }
2678 if (a_it->second.is_relaxed_precision != b_it->second.is_relaxed_precision) {
Mark Young4e919b22018-05-21 15:53:59 -06002679 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
Dave Houlton51653902018-06-22 17:32:13 -06002680 HandleToUint64(producer->vk_shader_module), kVUID_Core_Shader_InterfaceTypeMismatch,
Chris Forbes47567b72017-06-09 12:09:45 -07002681 "Decoration mismatch on location %u.%u: %s and %s stages differ in precision", a_first.first,
2682 a_first.second, producer_stage->name, consumer_stage->name);
2683 }
2684 a_it++;
2685 b_it++;
2686 }
2687 }
2688
Ari Suonpaa696b3432019-03-11 14:02:57 +02002689 if (consumer_stage->stage != VK_SHADER_STAGE_FRAGMENT_BIT) {
2690 auto builtins_producer = CollectBuiltinBlockMembers(producer, producer_entrypoint, spv::StorageClassOutput);
2691 auto builtins_consumer = CollectBuiltinBlockMembers(consumer, consumer_entrypoint, spv::StorageClassInput);
2692
2693 if (!builtins_producer.empty() && !builtins_consumer.empty()) {
2694 if (builtins_producer.size() != builtins_consumer.size()) {
2695 skip |=
2696 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
2697 HandleToUint64(producer->vk_shader_module), kVUID_Core_Shader_InterfaceTypeMismatch,
2698 "Number of elements inside builtin block differ between stages (%s %d vs %s %d).", producer_stage->name,
2699 (int)builtins_producer.size(), consumer_stage->name, (int)builtins_consumer.size());
2700 } else {
2701 auto it_producer = builtins_producer.begin();
2702 auto it_consumer = builtins_consumer.begin();
2703 while (it_producer != builtins_producer.end() && it_consumer != builtins_consumer.end()) {
2704 if (*it_producer != *it_consumer) {
2705 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
2706 HandleToUint64(producer->vk_shader_module), kVUID_Core_Shader_InterfaceTypeMismatch,
2707 "Builtin variable inside block doesn't match between %s and %s.", producer_stage->name,
2708 consumer_stage->name);
2709 break;
2710 }
2711 it_producer++;
2712 it_consumer++;
2713 }
2714 }
2715 }
2716 }
2717
Chris Forbes47567b72017-06-09 12:09:45 -07002718 return skip;
2719}
2720
Mark Lobodzinski1b4a8ed2018-08-07 08:47:05 -06002721static inline uint32_t DetermineFinalGeomStage(PIPELINE_STATE *pipeline, VkGraphicsPipelineCreateInfo *pCreateInfo) {
2722 uint32_t stage_mask = 0;
2723 if (pipeline->topology_at_rasterizer == VK_PRIMITIVE_TOPOLOGY_POINT_LIST) {
2724 for (uint32_t i = 0; i < pCreateInfo->stageCount; i++) {
2725 stage_mask |= pCreateInfo->pStages[i].stage;
2726 }
2727 // Determine which shader in which PointSize should be written (the final geometry stage)
Jeff Bolz105d6492018-09-29 15:46:44 -05002728 if (stage_mask & VK_SHADER_STAGE_MESH_BIT_NV) {
2729 stage_mask = VK_SHADER_STAGE_MESH_BIT_NV;
2730 } else if (stage_mask & VK_SHADER_STAGE_GEOMETRY_BIT) {
Mark Lobodzinski1b4a8ed2018-08-07 08:47:05 -06002731 stage_mask = VK_SHADER_STAGE_GEOMETRY_BIT;
2732 } else if (stage_mask & VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT) {
2733 stage_mask = VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT;
2734 } else if (stage_mask & VK_SHADER_STAGE_VERTEX_BIT) {
2735 stage_mask = VK_SHADER_STAGE_VERTEX_BIT;
Mark Lobodzinski2c984cc2018-07-31 09:57:46 -06002736 }
2737 }
Mark Lobodzinski1b4a8ed2018-08-07 08:47:05 -06002738 return stage_mask;
Mark Lobodzinski2c984cc2018-07-31 09:57:46 -06002739}
2740
Chris Forbes47567b72017-06-09 12:09:45 -07002741// Validate that the shaders used by the given pipeline and store the active_slots
2742// that are actually used by the pipeline into pPipeline->active_slots
Mark Lobodzinski518eadc2019-03-09 12:07:30 -07002743bool CoreChecks::ValidateAndCapturePipelineShaderState(PIPELINE_STATE *pipeline) {
Chris Forbesa400a8a2017-07-20 13:10:24 -07002744 auto pCreateInfo = pipeline->graphicsPipelineCI.ptr();
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002745 int vertex_stage = GetShaderStageId(VK_SHADER_STAGE_VERTEX_BIT);
2746 int fragment_stage = GetShaderStageId(VK_SHADER_STAGE_FRAGMENT_BIT);
Chris Forbes47567b72017-06-09 12:09:45 -07002747
Jeff Bolz7e35c392018-09-04 15:30:41 -05002748 shader_module const *shaders[32];
Chris Forbes47567b72017-06-09 12:09:45 -07002749 memset(shaders, 0, sizeof(shaders));
Jeff Bolz7e35c392018-09-04 15:30:41 -05002750 spirv_inst_iter entrypoints[32];
Chris Forbes47567b72017-06-09 12:09:45 -07002751 memset(entrypoints, 0, sizeof(entrypoints));
2752 bool skip = false;
2753
Mark Lobodzinski1b4a8ed2018-08-07 08:47:05 -06002754 uint32_t pointlist_stage_mask = DetermineFinalGeomStage(pipeline, pCreateInfo);
2755
Chris Forbes47567b72017-06-09 12:09:45 -07002756 for (uint32_t i = 0; i < pCreateInfo->stageCount; i++) {
2757 auto pStage = &pCreateInfo->pStages[i];
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002758 auto stage_id = GetShaderStageId(pStage->stage);
Mark Lobodzinski518eadc2019-03-09 12:07:30 -07002759 skip |= ValidatePipelineShaderStage(pStage, pipeline, &shaders[stage_id], &entrypoints[stage_id],
Mark Lobodzinski1b4a8ed2018-08-07 08:47:05 -06002760 (pointlist_stage_mask == pStage->stage));
Chris Forbes47567b72017-06-09 12:09:45 -07002761 }
2762
2763 // if the shader stages are no good individually, cross-stage validation is pointless.
2764 if (skip) return true;
2765
2766 auto vi = pCreateInfo->pVertexInputState;
2767
2768 if (vi) {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002769 skip |= ValidateViConsistency(report_data, vi);
Chris Forbes47567b72017-06-09 12:09:45 -07002770 }
2771
2772 if (shaders[vertex_stage] && shaders[vertex_stage]->has_valid_spirv) {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002773 skip |= ValidateViAgainstVsInputs(report_data, vi, shaders[vertex_stage], entrypoints[vertex_stage]);
Chris Forbes47567b72017-06-09 12:09:45 -07002774 }
2775
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002776 int producer = GetShaderStageId(VK_SHADER_STAGE_VERTEX_BIT);
2777 int consumer = GetShaderStageId(VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT);
Chris Forbes47567b72017-06-09 12:09:45 -07002778
2779 while (!shaders[producer] && producer != fragment_stage) {
2780 producer++;
2781 consumer++;
2782 }
2783
2784 for (; producer != fragment_stage && consumer <= fragment_stage; consumer++) {
2785 assert(shaders[producer]);
Chris Forbesdbb43fc2018-02-16 16:59:23 -08002786 if (shaders[consumer]) {
2787 if (shaders[consumer]->has_valid_spirv && shaders[producer]->has_valid_spirv) {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002788 skip |= ValidateInterfaceBetweenStages(report_data, shaders[producer], entrypoints[producer],
2789 &shader_stage_attribs[producer], shaders[consumer], entrypoints[consumer],
2790 &shader_stage_attribs[consumer]);
Chris Forbesdbb43fc2018-02-16 16:59:23 -08002791 }
Chris Forbes47567b72017-06-09 12:09:45 -07002792
2793 producer = consumer;
2794 }
2795 }
2796
2797 if (shaders[fragment_stage] && shaders[fragment_stage]->has_valid_spirv) {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002798 skip |= ValidateFsOutputsAgainstRenderPass(report_data, shaders[fragment_stage], entrypoints[fragment_stage], pipeline,
2799 pCreateInfo->subpass);
Chris Forbes47567b72017-06-09 12:09:45 -07002800 }
2801
2802 return skip;
2803}
2804
Mark Lobodzinski518eadc2019-03-09 12:07:30 -07002805bool CoreChecks::ValidateComputePipeline(PIPELINE_STATE *pipeline) {
Chris Forbesa400a8a2017-07-20 13:10:24 -07002806 auto pCreateInfo = pipeline->computePipelineCI.ptr();
Chris Forbes47567b72017-06-09 12:09:45 -07002807
2808 shader_module const *module;
2809 spirv_inst_iter entrypoint;
2810
Mark Lobodzinski518eadc2019-03-09 12:07:30 -07002811 return ValidatePipelineShaderStage(&pCreateInfo->stage, pipeline, &module, &entrypoint, false);
Chris Forbes47567b72017-06-09 12:09:45 -07002812}
Chris Forbes4ae55b32017-06-09 14:42:56 -07002813
Mark Lobodzinski518eadc2019-03-09 12:07:30 -07002814bool CoreChecks::ValidateRayTracingPipelineNV(PIPELINE_STATE *pipeline) {
Jeff Bolzfbe51582018-09-13 10:01:35 -05002815 auto pCreateInfo = pipeline->raytracingPipelineCI.ptr();
2816
2817 shader_module const *module;
2818 spirv_inst_iter entrypoint;
2819
Mark Lobodzinski518eadc2019-03-09 12:07:30 -07002820 return ValidatePipelineShaderStage(pCreateInfo->pStages, pipeline, &module, &entrypoint, false);
Jeff Bolzfbe51582018-09-13 10:01:35 -05002821}
2822
Dave Houltona9df0ce2018-02-07 10:51:23 -07002823uint32_t ValidationCache::MakeShaderHash(VkShaderModuleCreateInfo const *smci) { return XXH32(smci->pCode, smci->codeSize, 0); }
Chris Forbes9a61e082017-07-24 15:35:29 -07002824
Dave Houltona9df0ce2018-02-07 10:51:23 -07002825static ValidationCache *GetValidationCacheInfo(VkShaderModuleCreateInfo const *pCreateInfo) {
John Zulauf25ea2432019-04-05 10:07:38 -06002826 const auto validation_cache_ci = lvl_find_in_chain<VkShaderModuleValidationCacheCreateInfoEXT>(pCreateInfo->pNext);
2827 if (validation_cache_ci) {
John Zulauf146ee802019-04-05 15:31:06 -06002828 return CastFromHandle<ValidationCache *>(validation_cache_ci->validationCache);
Chris Forbes9a61e082017-07-24 15:35:29 -07002829 }
Chris Forbes9a61e082017-07-24 15:35:29 -07002830 return nullptr;
2831}
2832
Mark Lobodzinskib56bbb92019-02-18 11:49:59 -07002833bool CoreChecks::PreCallValidateCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo *pCreateInfo,
2834 const VkAllocationCallbacks *pAllocator, VkShaderModule *pShaderModule) {
Chris Forbes4ae55b32017-06-09 14:42:56 -07002835 bool skip = false;
2836 spv_result_t spv_valid = SPV_SUCCESS;
Chris Forbes4ae55b32017-06-09 14:42:56 -07002837
Mark Lobodzinskib02a4852019-04-19 12:35:30 -06002838 if (disabled.shader_validation) {
Chris Forbes4ae55b32017-06-09 14:42:56 -07002839 return false;
2840 }
2841
Mark Lobodzinskif45e45f2019-04-19 14:15:39 -06002842 auto have_glsl_shader = device_extensions.vk_nv_glsl_shader;
Chris Forbes4ae55b32017-06-09 14:42:56 -07002843
2844 if (!have_glsl_shader && (pCreateInfo->codeSize % 4)) {
Mark Lobodzinski7767ad82019-03-09 13:35:25 -07002845 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
Dave Houlton78d09922018-05-17 15:48:45 -06002846 "VUID-VkShaderModuleCreateInfo-pCode-01376",
2847 "SPIR-V module not valid: Codesize must be a multiple of 4 but is " PRINTF_SIZE_T_SPECIFIER ".",
2848 pCreateInfo->codeSize);
Chris Forbes4ae55b32017-06-09 14:42:56 -07002849 } else {
Chris Forbes9a61e082017-07-24 15:35:29 -07002850 auto cache = GetValidationCacheInfo(pCreateInfo);
2851 uint32_t hash = 0;
2852 if (cache) {
2853 hash = ValidationCache::MakeShaderHash(pCreateInfo);
Dave Houltona9df0ce2018-02-07 10:51:23 -07002854 if (cache->Contains(hash)) return false;
Chris Forbes9a61e082017-07-24 15:35:29 -07002855 }
2856
Chris Forbes4ae55b32017-06-09 14:42:56 -07002857 // Use SPIRV-Tools validator to try and catch any issues with the module itself
Dave Houlton0ea2d012018-06-21 14:00:26 -06002858 spv_target_env spirv_environment = SPV_ENV_VULKAN_1_0;
Mark Lobodzinski544def72019-04-19 14:25:59 -06002859 if (api_version >= VK_API_VERSION_1_1) {
Dave Houlton0ea2d012018-06-21 14:00:26 -06002860 spirv_environment = SPV_ENV_VULKAN_1_1;
2861 }
2862 spv_context ctx = spvContextCreate(spirv_environment);
Dave Houltona9df0ce2018-02-07 10:51:23 -07002863 spv_const_binary_t binary{pCreateInfo->pCode, pCreateInfo->codeSize / sizeof(uint32_t)};
Chris Forbes4ae55b32017-06-09 14:42:56 -07002864 spv_diagnostic diag = nullptr;
Karl Schultzfda1b382018-08-08 18:56:11 -06002865 spv_validator_options options = spvValidatorOptionsCreate();
Mark Lobodzinskif45e45f2019-04-19 14:15:39 -06002866 if (device_extensions.vk_khr_relaxed_block_layout) {
Karl Schultzfda1b382018-08-08 18:56:11 -06002867 spvValidatorOptionsSetRelaxBlockLayout(options, true);
2868 }
Mark Lobodzinskif45e45f2019-04-19 14:15:39 -06002869 if (device_extensions.vk_ext_scalar_block_layout &&
Mark Lobodzinskid7b03cc2019-04-19 14:23:10 -06002870 enabled_features.scalar_block_layout_features.scalarBlockLayout == VK_TRUE) {
Tobias Hector6a0ece72018-12-10 12:24:05 +00002871 spvValidatorOptionsSetScalarBlockLayout(options, true);
2872 }
Karl Schultzfda1b382018-08-08 18:56:11 -06002873 spv_valid = spvValidateWithOptions(ctx, options, &binary, &diag);
Chris Forbes4ae55b32017-06-09 14:42:56 -07002874 if (spv_valid != SPV_SUCCESS) {
2875 if (!have_glsl_shader || (pCreateInfo->pCode[0] == spv::MagicNumber)) {
Mark Lobodzinski7767ad82019-03-09 13:35:25 -07002876 skip |=
2877 log_msg(report_data, spv_valid == SPV_WARNING ? VK_DEBUG_REPORT_WARNING_BIT_EXT : VK_DEBUG_REPORT_ERROR_BIT_EXT,
2878 VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, kVUID_Core_Shader_InconsistentSpirv,
2879 "SPIR-V module not valid: %s", diag && diag->error ? diag->error : "(no error text)");
Chris Forbes4ae55b32017-06-09 14:42:56 -07002880 }
Chris Forbes9a61e082017-07-24 15:35:29 -07002881 } else {
2882 if (cache) {
2883 cache->Insert(hash);
2884 }
Chris Forbes4ae55b32017-06-09 14:42:56 -07002885 }
2886
Karl Schultzfda1b382018-08-08 18:56:11 -06002887 spvValidatorOptionsDestroy(options);
Chris Forbes4ae55b32017-06-09 14:42:56 -07002888 spvDiagnosticDestroy(diag);
2889 spvContextDestroy(ctx);
2890 }
2891
Chris Forbes4ae55b32017-06-09 14:42:56 -07002892 return skip;
Mark Lobodzinski01734072019-02-13 17:39:15 -07002893}
2894
Mark Lobodzinskib56bbb92019-02-18 11:49:59 -07002895void CoreChecks::PreCallRecordCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo *pCreateInfo,
2896 const VkAllocationCallbacks *pAllocator, VkShaderModule *pShaderModule,
2897 void *csm_state_data) {
Mark Lobodzinski1db77e82019-03-01 10:02:54 -07002898 create_shader_module_api_state *csm_state = reinterpret_cast<create_shader_module_api_state *>(csm_state_data);
Mark Lobodzinskib02a4852019-04-19 12:35:30 -06002899 if (enabled.gpu_validation) {
Mark Lobodzinski586d10e2019-03-08 18:19:48 -07002900 GpuPreCallCreateShaderModule(pCreateInfo, pAllocator, pShaderModule, &csm_state->unique_shader_id,
Mark Lobodzinski01734072019-02-13 17:39:15 -07002901 &csm_state->instrumented_create_info, &csm_state->instrumented_pgm);
2902 }
2903}
2904
Mark Lobodzinskib56bbb92019-02-18 11:49:59 -07002905void CoreChecks::PostCallRecordCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo *pCreateInfo,
2906 const VkAllocationCallbacks *pAllocator, VkShaderModule *pShaderModule,
2907 VkResult result, void *csm_state_data) {
Mark Lobodzinski01734072019-02-13 17:39:15 -07002908 if (VK_SUCCESS != result) return;
Mark Lobodzinski1db77e82019-03-01 10:02:54 -07002909 create_shader_module_api_state *csm_state = reinterpret_cast<create_shader_module_api_state *>(csm_state_data);
Mark Lobodzinski01734072019-02-13 17:39:15 -07002910
Mark Lobodzinski544def72019-04-19 14:25:59 -06002911 spv_target_env spirv_environment = ((api_version >= VK_API_VERSION_1_1) ? SPV_ENV_VULKAN_1_1 : SPV_ENV_VULKAN_1_0);
Mark Lobodzinski01734072019-02-13 17:39:15 -07002912 bool is_spirv = (pCreateInfo->pCode[0] == spv::MagicNumber);
2913 std::unique_ptr<shader_module> new_shader_module(
2914 is_spirv ? new shader_module(pCreateInfo, *pShaderModule, spirv_environment, csm_state->unique_shader_id)
2915 : new shader_module());
Mark Lobodzinski7767ad82019-03-09 13:35:25 -07002916 shaderModuleMap[*pShaderModule] = std::move(new_shader_module);
Mark Lobodzinski01734072019-02-13 17:39:15 -07002917}
Lockeaa8fdc02019-04-02 11:59:20 -06002918
2919bool CoreChecks::ValidateComputeWorkGroupSizes(const shader_module *shader) {
2920 bool skip = false;
2921 uint32_t local_size_x = 0;
2922 uint32_t local_size_y = 0;
2923 uint32_t local_size_z = 0;
2924 if (FindLocalSize(shader, local_size_x, local_size_y, local_size_z)) {
2925 if (local_size_x > phys_dev_props.limits.maxComputeWorkGroupSize[0]) {
2926 skip |=
2927 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
2928 HandleToUint64(shader->vk_shader_module), "UNASSIGNED-features-limits-maxComputeWorkGroupSize",
2929 "ShaderMdoule %s local_size_x (%" PRIu32 ") exceeds device limit maxComputeWorkGroupSize[0] (%" PRIu32 ").",
2930 report_data->FormatHandle(shader->vk_shader_module).c_str(), local_size_x,
2931 phys_dev_props.limits.maxComputeWorkGroupSize[0]);
2932 }
2933 if (local_size_y > phys_dev_props.limits.maxComputeWorkGroupSize[1]) {
2934 skip |=
2935 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
2936 HandleToUint64(shader->vk_shader_module), "UNASSIGNED-features-limits-maxComputeWorkGroupSize",
2937 "ShaderMdoule %s local_size_y (%" PRIu32 ") exceeds device limit maxComputeWorkGroupSize[1] (%" PRIu32 ").",
2938 report_data->FormatHandle(shader->vk_shader_module).c_str(), local_size_x,
2939 phys_dev_props.limits.maxComputeWorkGroupSize[1]);
2940 }
2941 if (local_size_z > phys_dev_props.limits.maxComputeWorkGroupSize[2]) {
2942 skip |=
2943 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
2944 HandleToUint64(shader->vk_shader_module), "UNASSIGNED-features-limits-maxComputeWorkGroupSize",
2945 "ShaderMdoule %s local_size_z (%" PRIu32 ") exceeds device limit maxComputeWorkGroupSize[2] (%" PRIu32 ").",
2946 report_data->FormatHandle(shader->vk_shader_module).c_str(), local_size_x,
2947 phys_dev_props.limits.maxComputeWorkGroupSize[2]);
2948 }
2949
2950 uint32_t limit = phys_dev_props.limits.maxComputeWorkGroupInvocations;
2951 uint64_t invocations = local_size_x * local_size_y;
2952 // Prevent overflow.
2953 bool fail = false;
2954 if (invocations > UINT32_MAX || invocations > limit) {
2955 fail = true;
2956 }
2957 if (!fail) {
2958 invocations *= local_size_z;
2959 if (invocations > UINT32_MAX || invocations > limit) {
2960 fail = true;
2961 }
2962 }
2963 if (fail) {
2964 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
2965 HandleToUint64(shader->vk_shader_module), "UNASSIGNED-features-limits-maxComputeWorkGroupInvocations",
2966 "ShaderMdoule %s local_size (%" PRIu32 ", %" PRIu32 ", %" PRIu32
2967 ") exceeds device limit maxComputeWorkGroupInvocations (%" PRIu32 ").",
2968 report_data->FormatHandle(shader->vk_shader_module).c_str(), local_size_x, local_size_y, local_size_z,
2969 limit);
2970 }
2971 }
2972 return skip;
2973}