blob: edb3fa0e5ab61576bab838967cba2beabd8fdcb4 [file] [log] [blame]
Karl Schultz7b024b42018-08-30 16:18:18 -06001/* Copyright (c) 2015-2019 The Khronos Group Inc.
2 * Copyright (c) 2015-2019 Valve Corporation
3 * Copyright (c) 2015-2019 LunarG, Inc.
4 * Copyright (C) 2015-2019 Google Inc.
Chris Forbes47567b72017-06-09 12:09:45 -07005 *
6 * Licensed under the Apache License, Version 2.0 (the "License");
7 * you may not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
9 *
10 * http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 *
18 * Author: Chris Forbes <chrisf@ijw.co.nz>
Dave Houlton51653902018-06-22 17:32:13 -060019 * Author: Dave Houlton <daveh@lunarg.com>
Chris Forbes47567b72017-06-09 12:09:45 -070020 */
21
22#include <cinttypes>
23#include <cassert>
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +020024#include <chrono>
Chris Forbes47567b72017-06-09 12:09:45 -070025#include <vector>
26#include <unordered_map>
27#include <string>
28#include <sstream>
29#include <SPIRV/spirv.hpp>
30#include "vk_loader_platform.h"
31#include "vk_enum_string_helper.h"
Chris Forbes47567b72017-06-09 12:09:45 -070032#include "vk_layer_data.h"
33#include "vk_layer_extension_utils.h"
34#include "vk_layer_utils.h"
Mark Lobodzinskib56bbb92019-02-18 11:49:59 -070035#include "chassis.h"
Chris Forbes47567b72017-06-09 12:09:45 -070036#include "core_validation.h"
Chris Forbes47567b72017-06-09 12:09:45 -070037#include "shader_validation.h"
Chris Forbes4ae55b32017-06-09 14:42:56 -070038#include "spirv-tools/libspirv.h"
Chris Forbes9a61e082017-07-24 15:35:29 -070039#include "xxhash.h"
Chris Forbes47567b72017-06-09 12:09:45 -070040
41enum FORMAT_TYPE {
42 FORMAT_TYPE_FLOAT = 1, // UNORM, SNORM, FLOAT, USCALED, SSCALED, SRGB -- anything we consider float in the shader
43 FORMAT_TYPE_SINT = 2,
44 FORMAT_TYPE_UINT = 4,
45};
46
47typedef std::pair<unsigned, unsigned> location_t;
48
49struct interface_var {
50 uint32_t id;
51 uint32_t type_id;
52 uint32_t offset;
53 bool is_patch;
54 bool is_block_member;
55 bool is_relaxed_precision;
56 // TODO: collect the name, too? Isn't required to be present.
57};
58
59struct shader_stage_attributes {
60 char const *const name;
61 bool arrayed_input;
62 bool arrayed_output;
Ari Suonpaa696b3432019-03-11 14:02:57 +020063 VkShaderStageFlags stage;
Chris Forbes47567b72017-06-09 12:09:45 -070064};
65
66static shader_stage_attributes shader_stage_attribs[] = {
Ari Suonpaa696b3432019-03-11 14:02:57 +020067 {"vertex shader", false, false, VK_SHADER_STAGE_VERTEX_BIT},
68 {"tessellation control shader", true, true, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT},
69 {"tessellation evaluation shader", true, false, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT},
70 {"geometry shader", true, false, VK_SHADER_STAGE_GEOMETRY_BIT},
71 {"fragment shader", false, false, VK_SHADER_STAGE_FRAGMENT_BIT},
Chris Forbes47567b72017-06-09 12:09:45 -070072};
73
74// SPIRV utility functions
Mark Lobodzinski3c59d972019-04-25 11:28:14 -060075void SHADER_MODULE_STATE::BuildDefIndex() {
Chris Forbes47567b72017-06-09 12:09:45 -070076 for (auto insn : *this) {
77 switch (insn.opcode()) {
78 // Types
79 case spv::OpTypeVoid:
80 case spv::OpTypeBool:
81 case spv::OpTypeInt:
82 case spv::OpTypeFloat:
83 case spv::OpTypeVector:
84 case spv::OpTypeMatrix:
85 case spv::OpTypeImage:
86 case spv::OpTypeSampler:
87 case spv::OpTypeSampledImage:
88 case spv::OpTypeArray:
89 case spv::OpTypeRuntimeArray:
90 case spv::OpTypeStruct:
91 case spv::OpTypeOpaque:
92 case spv::OpTypePointer:
93 case spv::OpTypeFunction:
94 case spv::OpTypeEvent:
95 case spv::OpTypeDeviceEvent:
96 case spv::OpTypeReserveId:
97 case spv::OpTypeQueue:
98 case spv::OpTypePipe:
Shannon McPherson0fa28232018-11-01 11:59:02 -060099 case spv::OpTypeAccelerationStructureNV:
Jeff Bolze4356752019-03-07 11:23:46 -0600100 case spv::OpTypeCooperativeMatrixNV:
Chris Forbes47567b72017-06-09 12:09:45 -0700101 def_index[insn.word(1)] = insn.offset();
102 break;
103
104 // Fixed constants
105 case spv::OpConstantTrue:
106 case spv::OpConstantFalse:
107 case spv::OpConstant:
108 case spv::OpConstantComposite:
109 case spv::OpConstantSampler:
110 case spv::OpConstantNull:
111 def_index[insn.word(2)] = insn.offset();
112 break;
113
114 // Specialization constants
115 case spv::OpSpecConstantTrue:
116 case spv::OpSpecConstantFalse:
117 case spv::OpSpecConstant:
118 case spv::OpSpecConstantComposite:
119 case spv::OpSpecConstantOp:
120 def_index[insn.word(2)] = insn.offset();
121 break;
122
123 // Variables
124 case spv::OpVariable:
125 def_index[insn.word(2)] = insn.offset();
126 break;
127
128 // Functions
129 case spv::OpFunction:
130 def_index[insn.word(2)] = insn.offset();
131 break;
132
133 default:
134 // We don't care about any other defs for now.
135 break;
136 }
137 }
138}
139
Jeff Bolz105d6492018-09-29 15:46:44 -0500140unsigned ExecutionModelToShaderStageFlagBits(unsigned mode) {
141 switch (mode) {
142 case spv::ExecutionModelVertex:
143 return VK_SHADER_STAGE_VERTEX_BIT;
144 case spv::ExecutionModelTessellationControl:
145 return VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT;
146 case spv::ExecutionModelTessellationEvaluation:
147 return VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT;
148 case spv::ExecutionModelGeometry:
149 return VK_SHADER_STAGE_GEOMETRY_BIT;
150 case spv::ExecutionModelFragment:
151 return VK_SHADER_STAGE_FRAGMENT_BIT;
152 case spv::ExecutionModelGLCompute:
153 return VK_SHADER_STAGE_COMPUTE_BIT;
Shannon McPherson0fa28232018-11-01 11:59:02 -0600154 case spv::ExecutionModelRayGenerationNV:
Eric Werness30127fd2018-10-31 21:01:03 -0700155 return VK_SHADER_STAGE_RAYGEN_BIT_NV;
Shannon McPherson0fa28232018-11-01 11:59:02 -0600156 case spv::ExecutionModelAnyHitNV:
Eric Werness30127fd2018-10-31 21:01:03 -0700157 return VK_SHADER_STAGE_ANY_HIT_BIT_NV;
Shannon McPherson0fa28232018-11-01 11:59:02 -0600158 case spv::ExecutionModelClosestHitNV:
Eric Werness30127fd2018-10-31 21:01:03 -0700159 return VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV;
Shannon McPherson0fa28232018-11-01 11:59:02 -0600160 case spv::ExecutionModelMissNV:
Eric Werness30127fd2018-10-31 21:01:03 -0700161 return VK_SHADER_STAGE_MISS_BIT_NV;
Shannon McPherson0fa28232018-11-01 11:59:02 -0600162 case spv::ExecutionModelIntersectionNV:
Eric Werness30127fd2018-10-31 21:01:03 -0700163 return VK_SHADER_STAGE_INTERSECTION_BIT_NV;
Shannon McPherson0fa28232018-11-01 11:59:02 -0600164 case spv::ExecutionModelCallableNV:
Eric Werness30127fd2018-10-31 21:01:03 -0700165 return VK_SHADER_STAGE_CALLABLE_BIT_NV;
Jeff Bolz105d6492018-09-29 15:46:44 -0500166 case spv::ExecutionModelTaskNV:
167 return VK_SHADER_STAGE_TASK_BIT_NV;
168 case spv::ExecutionModelMeshNV:
169 return VK_SHADER_STAGE_MESH_BIT_NV;
170 default:
171 return 0;
172 }
173}
174
Mark Lobodzinski3c59d972019-04-25 11:28:14 -0600175static spirv_inst_iter FindEntrypoint(SHADER_MODULE_STATE const *src, char const *name, VkShaderStageFlagBits stageBits) {
Chris Forbes47567b72017-06-09 12:09:45 -0700176 for (auto insn : *src) {
177 if (insn.opcode() == spv::OpEntryPoint) {
178 auto entrypointName = (char const *)&insn.word(3);
Jeff Bolz105d6492018-09-29 15:46:44 -0500179 auto executionModel = insn.word(1);
180 auto entrypointStageBits = ExecutionModelToShaderStageFlagBits(executionModel);
Chris Forbes47567b72017-06-09 12:09:45 -0700181
182 if (!strcmp(entrypointName, name) && (entrypointStageBits & stageBits)) {
183 return insn;
184 }
185 }
186 }
187
188 return src->end();
189}
190
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600191static char const *StorageClassName(unsigned sc) {
Chris Forbes47567b72017-06-09 12:09:45 -0700192 switch (sc) {
193 case spv::StorageClassInput:
194 return "input";
195 case spv::StorageClassOutput:
196 return "output";
197 case spv::StorageClassUniformConstant:
198 return "const uniform";
199 case spv::StorageClassUniform:
200 return "uniform";
201 case spv::StorageClassWorkgroup:
202 return "workgroup local";
203 case spv::StorageClassCrossWorkgroup:
204 return "workgroup global";
205 case spv::StorageClassPrivate:
206 return "private global";
207 case spv::StorageClassFunction:
208 return "function";
209 case spv::StorageClassGeneric:
210 return "generic";
211 case spv::StorageClassAtomicCounter:
212 return "atomic counter";
213 case spv::StorageClassImage:
214 return "image";
215 case spv::StorageClassPushConstant:
216 return "push constant";
Chris Forbes9f89d752018-03-07 12:57:48 -0800217 case spv::StorageClassStorageBuffer:
218 return "storage buffer";
Chris Forbes47567b72017-06-09 12:09:45 -0700219 default:
220 return "unknown";
221 }
222}
223
224// Get the value of an integral constant
Mark Lobodzinski3c59d972019-04-25 11:28:14 -0600225unsigned GetConstantValue(SHADER_MODULE_STATE const *src, unsigned id) {
Chris Forbes47567b72017-06-09 12:09:45 -0700226 auto value = src->get_def(id);
227 assert(value != src->end());
228
229 if (value.opcode() != spv::OpConstant) {
230 // TODO: Either ensure that the specialization transform is already performed on a module we're
231 // considering here, OR -- specialize on the fly now.
232 return 1;
233 }
234
235 return value.word(3);
236}
237
Mark Lobodzinski3c59d972019-04-25 11:28:14 -0600238static void DescribeTypeInner(std::ostringstream &ss, SHADER_MODULE_STATE const *src, unsigned type) {
Chris Forbes47567b72017-06-09 12:09:45 -0700239 auto insn = src->get_def(type);
240 assert(insn != src->end());
241
242 switch (insn.opcode()) {
243 case spv::OpTypeBool:
244 ss << "bool";
245 break;
246 case spv::OpTypeInt:
247 ss << (insn.word(3) ? 's' : 'u') << "int" << insn.word(2);
248 break;
249 case spv::OpTypeFloat:
250 ss << "float" << insn.word(2);
251 break;
252 case spv::OpTypeVector:
253 ss << "vec" << insn.word(3) << " of ";
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600254 DescribeTypeInner(ss, src, insn.word(2));
Chris Forbes47567b72017-06-09 12:09:45 -0700255 break;
256 case spv::OpTypeMatrix:
257 ss << "mat" << insn.word(3) << " of ";
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600258 DescribeTypeInner(ss, src, insn.word(2));
Chris Forbes47567b72017-06-09 12:09:45 -0700259 break;
260 case spv::OpTypeArray:
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600261 ss << "arr[" << GetConstantValue(src, insn.word(3)) << "] of ";
262 DescribeTypeInner(ss, src, insn.word(2));
Chris Forbes47567b72017-06-09 12:09:45 -0700263 break;
Chris Forbes062f1222018-08-21 15:34:15 -0700264 case spv::OpTypeRuntimeArray:
265 ss << "runtime arr[] of ";
266 DescribeTypeInner(ss, src, insn.word(2));
267 break;
Chris Forbes47567b72017-06-09 12:09:45 -0700268 case spv::OpTypePointer:
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600269 ss << "ptr to " << StorageClassName(insn.word(2)) << " ";
270 DescribeTypeInner(ss, src, insn.word(3));
Chris Forbes47567b72017-06-09 12:09:45 -0700271 break;
272 case spv::OpTypeStruct: {
273 ss << "struct of (";
274 for (unsigned i = 2; i < insn.len(); i++) {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600275 DescribeTypeInner(ss, src, insn.word(i));
Chris Forbes47567b72017-06-09 12:09:45 -0700276 if (i == insn.len() - 1) {
277 ss << ")";
278 } else {
279 ss << ", ";
280 }
281 }
282 break;
283 }
284 case spv::OpTypeSampler:
285 ss << "sampler";
286 break;
287 case spv::OpTypeSampledImage:
288 ss << "sampler+";
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600289 DescribeTypeInner(ss, src, insn.word(2));
Chris Forbes47567b72017-06-09 12:09:45 -0700290 break;
291 case spv::OpTypeImage:
292 ss << "image(dim=" << insn.word(3) << ", sampled=" << insn.word(7) << ")";
293 break;
Shannon McPherson0fa28232018-11-01 11:59:02 -0600294 case spv::OpTypeAccelerationStructureNV:
Jeff Bolz105d6492018-09-29 15:46:44 -0500295 ss << "accelerationStruture";
296 break;
Chris Forbes47567b72017-06-09 12:09:45 -0700297 default:
298 ss << "oddtype";
299 break;
300 }
301}
302
Mark Lobodzinski3c59d972019-04-25 11:28:14 -0600303static std::string DescribeType(SHADER_MODULE_STATE const *src, unsigned type) {
Chris Forbes47567b72017-06-09 12:09:45 -0700304 std::ostringstream ss;
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600305 DescribeTypeInner(ss, src, type);
Chris Forbes47567b72017-06-09 12:09:45 -0700306 return ss.str();
307}
308
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600309static bool IsNarrowNumericType(spirv_inst_iter type) {
Chris Forbes47567b72017-06-09 12:09:45 -0700310 if (type.opcode() != spv::OpTypeInt && type.opcode() != spv::OpTypeFloat) return false;
311 return type.word(2) < 64;
312}
313
Mark Lobodzinski3c59d972019-04-25 11:28:14 -0600314static bool TypesMatch(SHADER_MODULE_STATE const *a, SHADER_MODULE_STATE const *b, unsigned a_type, unsigned b_type, bool a_arrayed,
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600315 bool b_arrayed, bool relaxed) {
Chris Forbes47567b72017-06-09 12:09:45 -0700316 // Walk two type trees together, and complain about differences
317 auto a_insn = a->get_def(a_type);
318 auto b_insn = b->get_def(b_type);
319 assert(a_insn != a->end());
320 assert(b_insn != b->end());
321
Chris Forbes062f1222018-08-21 15:34:15 -0700322 // Ignore runtime-sized arrays-- they cannot appear in these interfaces.
323
Chris Forbes47567b72017-06-09 12:09:45 -0700324 if (a_arrayed && a_insn.opcode() == spv::OpTypeArray) {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600325 return TypesMatch(a, b, a_insn.word(2), b_type, false, b_arrayed, relaxed);
Chris Forbes47567b72017-06-09 12:09:45 -0700326 }
327
328 if (b_arrayed && b_insn.opcode() == spv::OpTypeArray) {
329 // We probably just found the extra level of arrayness in b_type: compare the type inside it to a_type
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600330 return TypesMatch(a, b, a_type, b_insn.word(2), a_arrayed, false, relaxed);
Chris Forbes47567b72017-06-09 12:09:45 -0700331 }
332
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600333 if (a_insn.opcode() == spv::OpTypeVector && relaxed && IsNarrowNumericType(b_insn)) {
334 return TypesMatch(a, b, a_insn.word(2), b_type, a_arrayed, b_arrayed, false);
Chris Forbes47567b72017-06-09 12:09:45 -0700335 }
336
337 if (a_insn.opcode() != b_insn.opcode()) {
338 return false;
339 }
340
341 if (a_insn.opcode() == spv::OpTypePointer) {
342 // Match on pointee type. storage class is expected to differ
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600343 return TypesMatch(a, b, a_insn.word(3), b_insn.word(3), a_arrayed, b_arrayed, relaxed);
Chris Forbes47567b72017-06-09 12:09:45 -0700344 }
345
346 if (a_arrayed || b_arrayed) {
347 // If we havent resolved array-of-verts by here, we're not going to.
348 return false;
349 }
350
351 switch (a_insn.opcode()) {
352 case spv::OpTypeBool:
353 return true;
354 case spv::OpTypeInt:
355 // Match on width, signedness
356 return a_insn.word(2) == b_insn.word(2) && a_insn.word(3) == b_insn.word(3);
357 case spv::OpTypeFloat:
358 // Match on width
359 return a_insn.word(2) == b_insn.word(2);
360 case spv::OpTypeVector:
361 // Match on element type, count.
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600362 if (!TypesMatch(a, b, a_insn.word(2), b_insn.word(2), a_arrayed, b_arrayed, false)) return false;
363 if (relaxed && IsNarrowNumericType(a->get_def(a_insn.word(2)))) {
Chris Forbes47567b72017-06-09 12:09:45 -0700364 return a_insn.word(3) >= b_insn.word(3);
365 } else {
366 return a_insn.word(3) == b_insn.word(3);
367 }
368 case spv::OpTypeMatrix:
369 // Match on element type, count.
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600370 return TypesMatch(a, b, a_insn.word(2), b_insn.word(2), a_arrayed, b_arrayed, false) &&
Dave Houltona9df0ce2018-02-07 10:51:23 -0700371 a_insn.word(3) == b_insn.word(3);
Chris Forbes47567b72017-06-09 12:09:45 -0700372 case spv::OpTypeArray:
373 // Match on element type, count. these all have the same layout. we don't get here if b_arrayed. This differs from
374 // vector & matrix types in that the array size is the id of a constant instruction, * not a literal within OpTypeArray
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600375 return TypesMatch(a, b, a_insn.word(2), b_insn.word(2), a_arrayed, b_arrayed, false) &&
376 GetConstantValue(a, a_insn.word(3)) == GetConstantValue(b, b_insn.word(3));
Chris Forbes47567b72017-06-09 12:09:45 -0700377 case spv::OpTypeStruct:
378 // Match on all element types
Dave Houltona9df0ce2018-02-07 10:51:23 -0700379 {
380 if (a_insn.len() != b_insn.len()) {
381 return false; // Structs cannot match if member counts differ
Chris Forbes47567b72017-06-09 12:09:45 -0700382 }
Chris Forbes47567b72017-06-09 12:09:45 -0700383
Dave Houltona9df0ce2018-02-07 10:51:23 -0700384 for (unsigned i = 2; i < a_insn.len(); i++) {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600385 if (!TypesMatch(a, b, a_insn.word(i), b_insn.word(i), a_arrayed, b_arrayed, false)) {
Dave Houltona9df0ce2018-02-07 10:51:23 -0700386 return false;
387 }
388 }
389
390 return true;
391 }
Chris Forbes47567b72017-06-09 12:09:45 -0700392 default:
393 // Remaining types are CLisms, or may not appear in the interfaces we are interested in. Just claim no match.
394 return false;
395 }
396}
397
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600398static unsigned ValueOrDefault(std::unordered_map<unsigned, unsigned> const &map, unsigned id, unsigned def) {
Chris Forbes47567b72017-06-09 12:09:45 -0700399 auto it = map.find(id);
400 if (it == map.end())
401 return def;
402 else
403 return it->second;
404}
405
Mark Lobodzinski3c59d972019-04-25 11:28:14 -0600406static unsigned GetLocationsConsumedByType(SHADER_MODULE_STATE const *src, unsigned type, bool strip_array_level) {
Chris Forbes47567b72017-06-09 12:09:45 -0700407 auto insn = src->get_def(type);
408 assert(insn != src->end());
409
410 switch (insn.opcode()) {
411 case spv::OpTypePointer:
412 // See through the ptr -- this is only ever at the toplevel for graphics shaders we're never actually passing
413 // pointers around.
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600414 return GetLocationsConsumedByType(src, insn.word(3), strip_array_level);
Chris Forbes47567b72017-06-09 12:09:45 -0700415 case spv::OpTypeArray:
416 if (strip_array_level) {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600417 return GetLocationsConsumedByType(src, insn.word(2), false);
Chris Forbes47567b72017-06-09 12:09:45 -0700418 } else {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600419 return GetConstantValue(src, insn.word(3)) * GetLocationsConsumedByType(src, insn.word(2), false);
Chris Forbes47567b72017-06-09 12:09:45 -0700420 }
421 case spv::OpTypeMatrix:
422 // Num locations is the dimension * element size
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600423 return insn.word(3) * GetLocationsConsumedByType(src, insn.word(2), false);
Chris Forbes47567b72017-06-09 12:09:45 -0700424 case spv::OpTypeVector: {
425 auto scalar_type = src->get_def(insn.word(2));
426 auto bit_width =
427 (scalar_type.opcode() == spv::OpTypeInt || scalar_type.opcode() == spv::OpTypeFloat) ? scalar_type.word(2) : 32;
428
429 // Locations are 128-bit wide; 3- and 4-component vectors of 64 bit types require two.
430 return (bit_width * insn.word(3) + 127) / 128;
431 }
432 default:
433 // Everything else is just 1.
434 return 1;
435
436 // TODO: extend to handle 64bit scalar types, whose vectors may need multiple locations.
437 }
438}
439
Mark Lobodzinski3c59d972019-04-25 11:28:14 -0600440static unsigned GetComponentsConsumedByType(SHADER_MODULE_STATE const *src, unsigned type, bool strip_array_level) {
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +0200441 auto insn = src->get_def(type);
442 assert(insn != src->end());
443
444 switch (insn.opcode()) {
445 case spv::OpTypePointer:
446 // See through the ptr -- this is only ever at the toplevel for graphics shaders we're never actually passing
447 // pointers around.
448 return GetComponentsConsumedByType(src, insn.word(3), strip_array_level);
449 case spv::OpTypeStruct: {
450 uint32_t sum = 0;
451 for (uint32_t i = 2; i < insn.len(); i++) { // i=2 to skip word(0) and word(1)=ID of struct
452 sum += GetComponentsConsumedByType(src, insn.word(i), false);
453 }
454 return sum;
455 }
456 case spv::OpTypeArray: {
457 uint32_t sum = 0;
458 for (uint32_t i = 2; i < insn.len(); i++) {
459 sum += GetComponentsConsumedByType(src, insn.word(i), false);
460 }
461 return sum;
462 }
463 case spv::OpTypeMatrix:
464 // Num locations is the dimension * element size
465 return insn.word(3) * GetComponentsConsumedByType(src, insn.word(2), false);
466 case spv::OpTypeVector: {
467 auto scalar_type = src->get_def(insn.word(2));
468 auto bit_width =
469 (scalar_type.opcode() == spv::OpTypeInt || scalar_type.opcode() == spv::OpTypeFloat) ? scalar_type.word(2) : 32;
470 // One component is 32-bit
471 return (bit_width * insn.word(3) + 31) / 32;
472 }
473 case spv::OpTypeFloat: {
474 auto bit_width = insn.word(2);
475 return (bit_width + 31) / 32;
476 }
477 case spv::OpTypeInt: {
478 auto bit_width = insn.word(2);
479 return (bit_width + 31) / 32;
480 }
481 case spv::OpConstant:
482 return GetComponentsConsumedByType(src, insn.word(1), false);
483 default:
484 return 0;
485 }
486}
487
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600488static unsigned GetLocationsConsumedByFormat(VkFormat format) {
Chris Forbes47567b72017-06-09 12:09:45 -0700489 switch (format) {
490 case VK_FORMAT_R64G64B64A64_SFLOAT:
491 case VK_FORMAT_R64G64B64A64_SINT:
492 case VK_FORMAT_R64G64B64A64_UINT:
493 case VK_FORMAT_R64G64B64_SFLOAT:
494 case VK_FORMAT_R64G64B64_SINT:
495 case VK_FORMAT_R64G64B64_UINT:
496 return 2;
497 default:
498 return 1;
499 }
500}
501
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600502static unsigned GetFormatType(VkFormat fmt) {
Dave Houltona9df0ce2018-02-07 10:51:23 -0700503 if (FormatIsSInt(fmt)) return FORMAT_TYPE_SINT;
504 if (FormatIsUInt(fmt)) return FORMAT_TYPE_UINT;
505 if (FormatIsDepthAndStencil(fmt)) return FORMAT_TYPE_FLOAT | FORMAT_TYPE_UINT;
506 if (fmt == VK_FORMAT_UNDEFINED) return 0;
Chris Forbes47567b72017-06-09 12:09:45 -0700507 // everything else -- UNORM/SNORM/FLOAT/USCALED/SSCALED is all float in the shader.
508 return FORMAT_TYPE_FLOAT;
509}
510
511// characterizes a SPIR-V type appearing in an interface to a FF stage, for comparison to a VkFormat's characterization above.
Chris Forbes062f1222018-08-21 15:34:15 -0700512// also used for input attachments, as we statically know their format.
Mark Lobodzinski3c59d972019-04-25 11:28:14 -0600513static unsigned GetFundamentalType(SHADER_MODULE_STATE const *src, unsigned type) {
Chris Forbes47567b72017-06-09 12:09:45 -0700514 auto insn = src->get_def(type);
515 assert(insn != src->end());
516
517 switch (insn.opcode()) {
518 case spv::OpTypeInt:
519 return insn.word(3) ? FORMAT_TYPE_SINT : FORMAT_TYPE_UINT;
520 case spv::OpTypeFloat:
521 return FORMAT_TYPE_FLOAT;
522 case spv::OpTypeVector:
Chris Forbes47567b72017-06-09 12:09:45 -0700523 case spv::OpTypeMatrix:
Chris Forbes47567b72017-06-09 12:09:45 -0700524 case spv::OpTypeArray:
Chris Forbes062f1222018-08-21 15:34:15 -0700525 case spv::OpTypeRuntimeArray:
526 case spv::OpTypeImage:
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600527 return GetFundamentalType(src, insn.word(2));
Chris Forbes47567b72017-06-09 12:09:45 -0700528 case spv::OpTypePointer:
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600529 return GetFundamentalType(src, insn.word(3));
Chris Forbes47567b72017-06-09 12:09:45 -0700530
531 default:
532 return 0;
533 }
534}
535
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600536static uint32_t GetShaderStageId(VkShaderStageFlagBits stage) {
Chris Forbes47567b72017-06-09 12:09:45 -0700537 uint32_t bit_pos = uint32_t(u_ffs(stage));
538 return bit_pos - 1;
539}
540
Mark Lobodzinski3c59d972019-04-25 11:28:14 -0600541static spirv_inst_iter GetStructType(SHADER_MODULE_STATE const *src, spirv_inst_iter def, bool is_array_of_verts) {
Chris Forbes47567b72017-06-09 12:09:45 -0700542 while (true) {
543 if (def.opcode() == spv::OpTypePointer) {
544 def = src->get_def(def.word(3));
545 } else if (def.opcode() == spv::OpTypeArray && is_array_of_verts) {
546 def = src->get_def(def.word(2));
547 is_array_of_verts = false;
548 } else if (def.opcode() == spv::OpTypeStruct) {
549 return def;
550 } else {
551 return src->end();
552 }
553 }
554}
555
Mark Lobodzinski3c59d972019-04-25 11:28:14 -0600556static bool CollectInterfaceBlockMembers(SHADER_MODULE_STATE const *src, std::map<location_t, interface_var> *out,
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600557 std::unordered_map<unsigned, unsigned> const &blocks, bool is_array_of_verts, uint32_t id,
558 uint32_t type_id, bool is_patch, int /*first_location*/) {
Chris Forbes47567b72017-06-09 12:09:45 -0700559 // Walk down the type_id presented, trying to determine whether it's actually an interface block.
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600560 auto type = GetStructType(src, src->get_def(type_id), is_array_of_verts && !is_patch);
Chris Forbes47567b72017-06-09 12:09:45 -0700561 if (type == src->end() || blocks.find(type.word(1)) == blocks.end()) {
562 // This isn't an interface block.
Chris Forbesa313d772017-06-13 13:59:41 -0700563 return false;
Chris Forbes47567b72017-06-09 12:09:45 -0700564 }
565
566 std::unordered_map<unsigned, unsigned> member_components;
567 std::unordered_map<unsigned, unsigned> member_relaxed_precision;
Chris Forbesa313d772017-06-13 13:59:41 -0700568 std::unordered_map<unsigned, unsigned> member_patch;
Chris Forbes47567b72017-06-09 12:09:45 -0700569
570 // Walk all the OpMemberDecorate for type's result id -- first pass, collect components.
571 for (auto insn : *src) {
572 if (insn.opcode() == spv::OpMemberDecorate && insn.word(1) == type.word(1)) {
573 unsigned member_index = insn.word(2);
574
575 if (insn.word(3) == spv::DecorationComponent) {
576 unsigned component = insn.word(4);
577 member_components[member_index] = component;
578 }
579
580 if (insn.word(3) == spv::DecorationRelaxedPrecision) {
581 member_relaxed_precision[member_index] = 1;
582 }
Chris Forbesa313d772017-06-13 13:59:41 -0700583
584 if (insn.word(3) == spv::DecorationPatch) {
585 member_patch[member_index] = 1;
586 }
Chris Forbes47567b72017-06-09 12:09:45 -0700587 }
588 }
589
Chris Forbesa313d772017-06-13 13:59:41 -0700590 // TODO: correctly handle location assignment from outside
591
Chris Forbes47567b72017-06-09 12:09:45 -0700592 // Second pass -- produce the output, from Location decorations
593 for (auto insn : *src) {
594 if (insn.opcode() == spv::OpMemberDecorate && insn.word(1) == type.word(1)) {
595 unsigned member_index = insn.word(2);
596 unsigned member_type_id = type.word(2 + member_index);
597
598 if (insn.word(3) == spv::DecorationLocation) {
599 unsigned location = insn.word(4);
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600600 unsigned num_locations = GetLocationsConsumedByType(src, member_type_id, false);
Chris Forbes47567b72017-06-09 12:09:45 -0700601 auto component_it = member_components.find(member_index);
602 unsigned component = component_it == member_components.end() ? 0 : component_it->second;
603 bool is_relaxed_precision = member_relaxed_precision.find(member_index) != member_relaxed_precision.end();
Dave Houltona9df0ce2018-02-07 10:51:23 -0700604 bool member_is_patch = is_patch || member_patch.count(member_index) > 0;
Chris Forbes47567b72017-06-09 12:09:45 -0700605
606 for (unsigned int offset = 0; offset < num_locations; offset++) {
607 interface_var v = {};
608 v.id = id;
609 // TODO: member index in interface_var too?
610 v.type_id = member_type_id;
611 v.offset = offset;
Chris Forbesa313d772017-06-13 13:59:41 -0700612 v.is_patch = member_is_patch;
Chris Forbes47567b72017-06-09 12:09:45 -0700613 v.is_block_member = true;
614 v.is_relaxed_precision = is_relaxed_precision;
615 (*out)[std::make_pair(location + offset, component)] = v;
616 }
617 }
618 }
619 }
Chris Forbesa313d772017-06-13 13:59:41 -0700620
621 return true;
Chris Forbes47567b72017-06-09 12:09:45 -0700622}
623
Ari Suonpaa696b3432019-03-11 14:02:57 +0200624static std::vector<uint32_t> FindEntrypointInterfaces(spirv_inst_iter entrypoint) {
625 std::vector<uint32_t> interfaces;
626 // Find the end of the entrypoint's name string. additional zero bytes follow the actual null terminator, to fill out the
627 // rest of the word - so we only need to look at the last byte in the word to determine which word contains the terminator.
628 uint32_t word = 3;
629 while (entrypoint.word(word) & 0xff000000u) {
630 ++word;
631 }
632 ++word;
633
634 for (; word < entrypoint.len(); word++) interfaces.push_back(entrypoint.word(word));
635
636 return interfaces;
637}
638
Mark Lobodzinski3c59d972019-04-25 11:28:14 -0600639static std::map<location_t, interface_var> CollectInterfaceByLocation(SHADER_MODULE_STATE const *src, spirv_inst_iter entrypoint,
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600640 spv::StorageClass sinterface, bool is_array_of_verts) {
Chris Forbes47567b72017-06-09 12:09:45 -0700641 std::unordered_map<unsigned, unsigned> var_locations;
642 std::unordered_map<unsigned, unsigned> var_builtins;
643 std::unordered_map<unsigned, unsigned> var_components;
644 std::unordered_map<unsigned, unsigned> blocks;
645 std::unordered_map<unsigned, unsigned> var_patch;
646 std::unordered_map<unsigned, unsigned> var_relaxed_precision;
647
648 for (auto insn : *src) {
649 // We consider two interface models: SSO rendezvous-by-location, and builtins. Complain about anything that
650 // fits neither model.
651 if (insn.opcode() == spv::OpDecorate) {
652 if (insn.word(2) == spv::DecorationLocation) {
653 var_locations[insn.word(1)] = insn.word(3);
654 }
655
656 if (insn.word(2) == spv::DecorationBuiltIn) {
657 var_builtins[insn.word(1)] = insn.word(3);
658 }
659
660 if (insn.word(2) == spv::DecorationComponent) {
661 var_components[insn.word(1)] = insn.word(3);
662 }
663
664 if (insn.word(2) == spv::DecorationBlock) {
665 blocks[insn.word(1)] = 1;
666 }
667
668 if (insn.word(2) == spv::DecorationPatch) {
669 var_patch[insn.word(1)] = 1;
670 }
671
672 if (insn.word(2) == spv::DecorationRelaxedPrecision) {
673 var_relaxed_precision[insn.word(1)] = 1;
674 }
675 }
676 }
677
678 // TODO: handle grouped decorations
679 // TODO: handle index=1 dual source outputs from FS -- two vars will have the same location, and we DON'T want to clobber.
680
Chris Forbes47567b72017-06-09 12:09:45 -0700681 std::map<location_t, interface_var> out;
682
Ari Suonpaa696b3432019-03-11 14:02:57 +0200683 for (uint32_t word : FindEntrypointInterfaces(entrypoint)) {
684 auto insn = src->get_def(word);
Chris Forbes47567b72017-06-09 12:09:45 -0700685 assert(insn != src->end());
686 assert(insn.opcode() == spv::OpVariable);
687
688 if (insn.word(3) == static_cast<uint32_t>(sinterface)) {
689 unsigned id = insn.word(2);
690 unsigned type = insn.word(1);
691
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600692 int location = ValueOrDefault(var_locations, id, static_cast<unsigned>(-1));
693 int builtin = ValueOrDefault(var_builtins, id, static_cast<unsigned>(-1));
694 unsigned component = ValueOrDefault(var_components, id, 0); // Unspecified is OK, is 0
Chris Forbes47567b72017-06-09 12:09:45 -0700695 bool is_patch = var_patch.find(id) != var_patch.end();
696 bool is_relaxed_precision = var_relaxed_precision.find(id) != var_relaxed_precision.end();
697
Dave Houltona9df0ce2018-02-07 10:51:23 -0700698 if (builtin != -1)
699 continue;
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600700 else if (!CollectInterfaceBlockMembers(src, &out, blocks, is_array_of_verts, id, type, is_patch, location)) {
Chris Forbes47567b72017-06-09 12:09:45 -0700701 // A user-defined interface variable, with a location. Where a variable occupied multiple locations, emit
702 // one result for each.
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600703 unsigned num_locations = GetLocationsConsumedByType(src, type, is_array_of_verts && !is_patch);
Chris Forbes47567b72017-06-09 12:09:45 -0700704 for (unsigned int offset = 0; offset < num_locations; offset++) {
705 interface_var v = {};
706 v.id = id;
707 v.type_id = type;
708 v.offset = offset;
709 v.is_patch = is_patch;
710 v.is_relaxed_precision = is_relaxed_precision;
711 out[std::make_pair(location + offset, component)] = v;
712 }
Chris Forbes47567b72017-06-09 12:09:45 -0700713 }
714 }
715 }
716
717 return out;
718}
719
Mark Lobodzinski3c59d972019-04-25 11:28:14 -0600720static std::vector<uint32_t> CollectBuiltinBlockMembers(SHADER_MODULE_STATE const *src, spirv_inst_iter entrypoint,
Ari Suonpaa696b3432019-03-11 14:02:57 +0200721 uint32_t storageClass) {
722 std::vector<uint32_t> variables;
723 std::vector<uint32_t> builtinStructMembers;
724 std::vector<uint32_t> builtinDecorations;
725
726 for (auto insn : *src) {
727 switch (insn.opcode()) {
728 // Find all built-in member decorations
729 case spv::OpMemberDecorate:
730 if (insn.word(3) == spv::DecorationBuiltIn) {
731 builtinStructMembers.push_back(insn.word(1));
732 }
733 break;
734 // Find all built-in decorations
735 case spv::OpDecorate:
736 switch (insn.word(2)) {
737 case spv::DecorationBlock: {
738 uint32_t blockID = insn.word(1);
739 for (auto builtInBlockID : builtinStructMembers) {
740 // Check if one of the members of the block are built-in -> the block is built-in
741 if (blockID == builtInBlockID) {
742 builtinDecorations.push_back(blockID);
743 break;
744 }
745 }
746 break;
747 }
748 case spv::DecorationBuiltIn:
749 builtinDecorations.push_back(insn.word(1));
750 break;
751 default:
752 break;
753 }
754 break;
755 default:
756 break;
757 }
758 }
759
760 // Find all interface variables belonging to the entrypoint and matching the storage class
761 for (uint32_t id : FindEntrypointInterfaces(entrypoint)) {
762 auto def = src->get_def(id);
763 assert(def != src->end());
764 assert(def.opcode() == spv::OpVariable);
765
766 if (def.word(3) == storageClass) variables.push_back(def.word(1));
767 }
768
769 // Find all members belonging to the builtin block selected
770 std::vector<uint32_t> builtinBlockMembers;
771 for (auto &var : variables) {
772 auto def = src->get_def(src->get_def(var).word(3));
773
774 // It could be an array of IO blocks. The element type should be the struct defining the block contents
775 if (def.opcode() == spv::OpTypeArray) def = src->get_def(def.word(2));
776
777 // Now find all members belonging to the struct defining the IO block
778 if (def.opcode() == spv::OpTypeStruct) {
779 for (auto builtInID : builtinDecorations) {
780 if (builtInID == def.word(1)) {
781 for (int i = 2; i < (int)def.len(); i++)
782 builtinBlockMembers.push_back(spv::BuiltInMax); // Start with undefined builtin for each struct member.
783 // These shouldn't be left after replacing.
784 for (auto insn : *src) {
785 if (insn.opcode() == spv::OpMemberDecorate && insn.word(1) == builtInID &&
786 insn.word(3) == spv::DecorationBuiltIn) {
787 auto structIndex = insn.word(2);
788 assert(structIndex < builtinBlockMembers.size());
789 builtinBlockMembers[structIndex] = insn.word(4);
790 }
791 }
792 }
793 }
794 }
795 }
796
797 return builtinBlockMembers;
798}
799
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600800static std::vector<std::pair<uint32_t, interface_var>> CollectInterfaceByInputAttachmentIndex(
Mark Lobodzinski3c59d972019-04-25 11:28:14 -0600801 SHADER_MODULE_STATE const *src, std::unordered_set<uint32_t> const &accessible_ids) {
Chris Forbes47567b72017-06-09 12:09:45 -0700802 std::vector<std::pair<uint32_t, interface_var>> out;
803
804 for (auto insn : *src) {
805 if (insn.opcode() == spv::OpDecorate) {
806 if (insn.word(2) == spv::DecorationInputAttachmentIndex) {
807 auto attachment_index = insn.word(3);
808 auto id = insn.word(1);
809
810 if (accessible_ids.count(id)) {
811 auto def = src->get_def(id);
812 assert(def != src->end());
813
814 if (def.opcode() == spv::OpVariable && insn.word(3) == spv::StorageClassUniformConstant) {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600815 auto num_locations = GetLocationsConsumedByType(src, def.word(1), false);
Chris Forbes47567b72017-06-09 12:09:45 -0700816 for (unsigned int offset = 0; offset < num_locations; offset++) {
817 interface_var v = {};
818 v.id = id;
819 v.type_id = def.word(1);
820 v.offset = offset;
821 out.emplace_back(attachment_index + offset, v);
822 }
823 }
824 }
825 }
826 }
827 }
828
829 return out;
830}
831
Mark Lobodzinski3c59d972019-04-25 11:28:14 -0600832static bool IsWritableDescriptorType(SHADER_MODULE_STATE const *module, uint32_t type_id, bool is_storage_buffer) {
Chris Forbes8af24522018-03-07 11:37:45 -0800833 auto type = module->get_def(type_id);
834
835 // Strip off any array or ptrs. Where we remove array levels, adjust the descriptor count for each dimension.
Chris Forbes062f1222018-08-21 15:34:15 -0700836 while (type.opcode() == spv::OpTypeArray || type.opcode() == spv::OpTypePointer || type.opcode() == spv::OpTypeRuntimeArray) {
837 if (type.opcode() == spv::OpTypeArray || type.opcode() == spv::OpTypeRuntimeArray) {
Chris Forbes8d31e5d2018-10-08 17:19:15 -0700838 type = module->get_def(type.word(2)); // Element type
Chris Forbes8af24522018-03-07 11:37:45 -0800839 } else {
Chris Forbes8d31e5d2018-10-08 17:19:15 -0700840 type = module->get_def(type.word(3)); // Pointee type
Chris Forbes8af24522018-03-07 11:37:45 -0800841 }
842 }
843
844 switch (type.opcode()) {
845 case spv::OpTypeImage: {
846 auto dim = type.word(3);
847 auto sampled = type.word(7);
848 return sampled == 2 && dim != spv::DimSubpassData;
849 }
850
Chris Forbes8d31e5d2018-10-08 17:19:15 -0700851 case spv::OpTypeStruct: {
852 std::unordered_set<unsigned> nonwritable_members;
Chris Forbes8af24522018-03-07 11:37:45 -0800853 for (auto insn : *module) {
854 if (insn.opcode() == spv::OpDecorate && insn.word(1) == type.word(1)) {
855 if (insn.word(2) == spv::DecorationBufferBlock) {
Chris Forbes8d31e5d2018-10-08 17:19:15 -0700856 // Legacy storage block in the Uniform storage class
857 // has its struct type decorated with BufferBlock.
858 is_storage_buffer = true;
Chris Forbes8af24522018-03-07 11:37:45 -0800859 }
Chris Forbes8d31e5d2018-10-08 17:19:15 -0700860 } else if (insn.opcode() == spv::OpMemberDecorate && insn.word(1) == type.word(1) &&
861 insn.word(3) == spv::DecorationNonWritable) {
862 nonwritable_members.insert(insn.word(2));
Chris Forbes8af24522018-03-07 11:37:45 -0800863 }
864 }
Chris Forbes8d31e5d2018-10-08 17:19:15 -0700865
866 // A buffer is writable if it's either flavor of storage buffer, and has any member not decorated
867 // as nonwritable.
868 return is_storage_buffer && nonwritable_members.size() != type.len() - 2;
869 }
Chris Forbes8af24522018-03-07 11:37:45 -0800870 }
871
872 return false;
873}
874
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600875static std::vector<std::pair<descriptor_slot_t, interface_var>> CollectInterfaceByDescriptorSlot(
Mark Lobodzinski3c59d972019-04-25 11:28:14 -0600876 debug_report_data const *report_data, SHADER_MODULE_STATE const *src, std::unordered_set<uint32_t> const &accessible_ids,
Chris Forbes8af24522018-03-07 11:37:45 -0800877 bool *has_writable_descriptor) {
Chris Forbes47567b72017-06-09 12:09:45 -0700878 std::unordered_map<unsigned, unsigned> var_sets;
879 std::unordered_map<unsigned, unsigned> var_bindings;
Chris Forbes8af24522018-03-07 11:37:45 -0800880 std::unordered_map<unsigned, unsigned> var_nonwritable;
Chris Forbes47567b72017-06-09 12:09:45 -0700881
882 for (auto insn : *src) {
883 // All variables in the Uniform or UniformConstant storage classes are required to be decorated with both
884 // DecorationDescriptorSet and DecorationBinding.
885 if (insn.opcode() == spv::OpDecorate) {
886 if (insn.word(2) == spv::DecorationDescriptorSet) {
887 var_sets[insn.word(1)] = insn.word(3);
888 }
889
890 if (insn.word(2) == spv::DecorationBinding) {
891 var_bindings[insn.word(1)] = insn.word(3);
892 }
Chris Forbes8af24522018-03-07 11:37:45 -0800893
Chris Forbes8d31e5d2018-10-08 17:19:15 -0700894 // Note: do toplevel DecorationNonWritable out here; it applies to
895 // the OpVariable rather than the type.
Chris Forbes8af24522018-03-07 11:37:45 -0800896 if (insn.word(2) == spv::DecorationNonWritable) {
897 var_nonwritable[insn.word(1)] = 1;
898 }
Chris Forbes47567b72017-06-09 12:09:45 -0700899 }
900 }
901
902 std::vector<std::pair<descriptor_slot_t, interface_var>> out;
903
904 for (auto id : accessible_ids) {
905 auto insn = src->get_def(id);
906 assert(insn != src->end());
907
908 if (insn.opcode() == spv::OpVariable &&
Chris Forbes9f89d752018-03-07 12:57:48 -0800909 (insn.word(3) == spv::StorageClassUniform || insn.word(3) == spv::StorageClassUniformConstant ||
910 insn.word(3) == spv::StorageClassStorageBuffer)) {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600911 unsigned set = ValueOrDefault(var_sets, insn.word(2), 0);
912 unsigned binding = ValueOrDefault(var_bindings, insn.word(2), 0);
Chris Forbes47567b72017-06-09 12:09:45 -0700913
914 interface_var v = {};
915 v.id = insn.word(2);
916 v.type_id = insn.word(1);
917 out.emplace_back(std::make_pair(set, binding), v);
Chris Forbes8af24522018-03-07 11:37:45 -0800918
Chris Forbes8d31e5d2018-10-08 17:19:15 -0700919 if (var_nonwritable.find(id) == var_nonwritable.end() &&
920 IsWritableDescriptorType(src, insn.word(1), insn.word(3) == spv::StorageClassStorageBuffer)) {
Chris Forbes8af24522018-03-07 11:37:45 -0800921 *has_writable_descriptor = true;
922 }
Chris Forbes47567b72017-06-09 12:09:45 -0700923 }
924 }
925
926 return out;
927}
928
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600929static bool ValidateViConsistency(debug_report_data const *report_data, VkPipelineVertexInputStateCreateInfo const *vi) {
Chris Forbes47567b72017-06-09 12:09:45 -0700930 // Walk the binding descriptions, which describe the step rate and stride of each vertex buffer. Each binding should
931 // be specified only once.
932 std::unordered_map<uint32_t, VkVertexInputBindingDescription const *> bindings;
933 bool skip = false;
934
935 for (unsigned i = 0; i < vi->vertexBindingDescriptionCount; i++) {
936 auto desc = &vi->pVertexBindingDescriptions[i];
937 auto &binding = bindings[desc->binding];
938 if (binding) {
Dave Houlton78d09922018-05-17 15:48:45 -0600939 // TODO: "VUID-VkGraphicsPipelineCreateInfo-pStages-00742" perhaps?
Mark Lobodzinskib1fd9d12018-03-30 14:26:00 -0600940 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
Dave Houlton51653902018-06-22 17:32:13 -0600941 kVUID_Core_Shader_InconsistentVi, "Duplicate vertex input binding descriptions for binding %d",
Chris Forbes47567b72017-06-09 12:09:45 -0700942 desc->binding);
943 } else {
944 binding = desc;
945 }
946 }
947
948 return skip;
949}
950
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600951static bool ValidateViAgainstVsInputs(debug_report_data const *report_data, VkPipelineVertexInputStateCreateInfo const *vi,
Mark Lobodzinski3c59d972019-04-25 11:28:14 -0600952 SHADER_MODULE_STATE const *vs, spirv_inst_iter entrypoint) {
Chris Forbes47567b72017-06-09 12:09:45 -0700953 bool skip = false;
954
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600955 auto inputs = CollectInterfaceByLocation(vs, entrypoint, spv::StorageClassInput, false);
Chris Forbes47567b72017-06-09 12:09:45 -0700956
957 // Build index by location
958 std::map<uint32_t, VkVertexInputAttributeDescription const *> attribs;
959 if (vi) {
960 for (unsigned i = 0; i < vi->vertexAttributeDescriptionCount; i++) {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600961 auto num_locations = GetLocationsConsumedByFormat(vi->pVertexAttributeDescriptions[i].format);
Chris Forbes47567b72017-06-09 12:09:45 -0700962 for (auto j = 0u; j < num_locations; j++) {
963 attribs[vi->pVertexAttributeDescriptions[i].location + j] = &vi->pVertexAttributeDescriptions[i];
964 }
965 }
966 }
967
968 auto it_a = attribs.begin();
969 auto it_b = inputs.begin();
970 bool used = false;
971
972 while ((attribs.size() > 0 && it_a != attribs.end()) || (inputs.size() > 0 && it_b != inputs.end())) {
973 bool a_at_end = attribs.size() == 0 || it_a == attribs.end();
974 bool b_at_end = inputs.size() == 0 || it_b == inputs.end();
975 auto a_first = a_at_end ? 0 : it_a->first;
976 auto b_first = b_at_end ? 0 : it_b->first.first;
Mark Lobodzinski7caa39c2018-07-25 15:48:34 -0600977
Chris Forbes47567b72017-06-09 12:09:45 -0700978 if (!a_at_end && (b_at_end || a_first < b_first)) {
Mark Young4e919b22018-05-21 15:53:59 -0600979 if (!used &&
980 log_msg(report_data, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
Dave Houlton51653902018-06-22 17:32:13 -0600981 HandleToUint64(vs->vk_shader_module), kVUID_Core_Shader_OutputNotConsumed,
Mark Young4e919b22018-05-21 15:53:59 -0600982 "Vertex attribute at location %d not consumed by vertex shader", a_first)) {
Chris Forbes47567b72017-06-09 12:09:45 -0700983 skip = true;
984 }
985 used = false;
986 it_a++;
987 } else if (!b_at_end && (a_at_end || b_first < a_first)) {
Mark Young4e919b22018-05-21 15:53:59 -0600988 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
Dave Houlton51653902018-06-22 17:32:13 -0600989 HandleToUint64(vs->vk_shader_module), kVUID_Core_Shader_InputNotProduced,
Mark Young4e919b22018-05-21 15:53:59 -0600990 "Vertex shader consumes input at location %d but not provided", b_first);
Chris Forbes47567b72017-06-09 12:09:45 -0700991 it_b++;
992 } else {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600993 unsigned attrib_type = GetFormatType(it_a->second->format);
994 unsigned input_type = GetFundamentalType(vs, it_b->second.type_id);
Chris Forbes47567b72017-06-09 12:09:45 -0700995
996 // Type checking
997 if (!(attrib_type & input_type)) {
Mark Young4e919b22018-05-21 15:53:59 -0600998 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
Dave Houlton51653902018-06-22 17:32:13 -0600999 HandleToUint64(vs->vk_shader_module), kVUID_Core_Shader_InterfaceTypeMismatch,
Chris Forbes47567b72017-06-09 12:09:45 -07001000 "Attribute type of `%s` at location %d does not match vertex shader input type of `%s`",
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06001001 string_VkFormat(it_a->second->format), a_first, DescribeType(vs, it_b->second.type_id).c_str());
Chris Forbes47567b72017-06-09 12:09:45 -07001002 }
1003
1004 // OK!
1005 used = true;
1006 it_b++;
1007 }
1008 }
1009
1010 return skip;
1011}
1012
Mark Lobodzinski3c59d972019-04-25 11:28:14 -06001013static bool ValidateFsOutputsAgainstRenderPass(debug_report_data const *report_data, SHADER_MODULE_STATE const *fs,
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06001014 spirv_inst_iter entrypoint, PIPELINE_STATE const *pipeline, uint32_t subpass_index) {
Petr Krause91f7a12017-12-14 20:57:36 +01001015 auto rpci = pipeline->rp_state->createInfo.ptr();
Chris Forbes8bca1652017-07-20 11:10:09 -07001016
Chris Forbes47567b72017-06-09 12:09:45 -07001017 std::map<uint32_t, VkFormat> color_attachments;
1018 auto subpass = rpci->pSubpasses[subpass_index];
1019 for (auto i = 0u; i < subpass.colorAttachmentCount; ++i) {
1020 uint32_t attachment = subpass.pColorAttachments[i].attachment;
1021 if (attachment == VK_ATTACHMENT_UNUSED) continue;
1022 if (rpci->pAttachments[attachment].format != VK_FORMAT_UNDEFINED) {
1023 color_attachments[i] = rpci->pAttachments[attachment].format;
1024 }
1025 }
1026
1027 bool skip = false;
1028
1029 // TODO: dual source blend index (spv::DecIndex, zero if not provided)
1030
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06001031 auto outputs = CollectInterfaceByLocation(fs, entrypoint, spv::StorageClassOutput, false);
Chris Forbes47567b72017-06-09 12:09:45 -07001032
1033 auto it_a = outputs.begin();
1034 auto it_b = color_attachments.begin();
Mark Lobodzinski7caa39c2018-07-25 15:48:34 -06001035 bool used = false;
Ari Suonpaa412b23b2019-02-26 07:56:58 +02001036 bool alphaToCoverageEnabled = pipeline->graphicsPipelineCI.pMultisampleState != NULL &&
1037 pipeline->graphicsPipelineCI.pMultisampleState->alphaToCoverageEnable == VK_TRUE;
1038 bool locationZeroHasAlpha = false;
Chris Forbes47567b72017-06-09 12:09:45 -07001039
1040 // Walk attachment list and outputs together
1041
1042 while ((outputs.size() > 0 && it_a != outputs.end()) || (color_attachments.size() > 0 && it_b != color_attachments.end())) {
1043 bool a_at_end = outputs.size() == 0 || it_a == outputs.end();
1044 bool b_at_end = color_attachments.size() == 0 || it_b == color_attachments.end();
1045
Ari Suonpaa412b23b2019-02-26 07:56:58 +02001046 if (!a_at_end && it_a->first.first == 0 && fs->get_def(it_a->second.type_id) != fs->end() &&
1047 GetComponentsConsumedByType(fs, it_a->second.type_id, false) == 4)
1048 locationZeroHasAlpha = true;
1049
Chris Forbes47567b72017-06-09 12:09:45 -07001050 if (!a_at_end && (b_at_end || it_a->first.first < it_b->first)) {
Ari Suonpaa412b23b2019-02-26 07:56:58 +02001051 if (!alphaToCoverageEnabled || it_a->first.first != 0) {
1052 skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
1053 HandleToUint64(fs->vk_shader_module), kVUID_Core_Shader_OutputNotConsumed,
1054 "fragment shader writes to output location %d with no matching attachment", it_a->first.first);
1055 }
Chris Forbes47567b72017-06-09 12:09:45 -07001056 it_a++;
1057 } else if (!b_at_end && (a_at_end || it_a->first.first > it_b->first)) {
Chris Forbesefdd4082017-07-20 11:19:16 -07001058 // Only complain if there are unmasked channels for this attachment. If the writemask is 0, it's acceptable for the
1059 // shader to not produce a matching output.
Mark Lobodzinski7caa39c2018-07-25 15:48:34 -06001060 if (!used) {
1061 if (pipeline->attachments[it_b->first].colorWriteMask != 0) {
Chris Forbescfe4dca2018-10-05 10:15:00 -07001062 skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
Mark Lobodzinski7caa39c2018-07-25 15:48:34 -06001063 HandleToUint64(fs->vk_shader_module), kVUID_Core_Shader_InputNotProduced,
Chris Forbescfe4dca2018-10-05 10:15:00 -07001064 "Attachment %d not written by fragment shader; undefined values will be written to attachment",
1065 it_b->first);
Mark Lobodzinski7caa39c2018-07-25 15:48:34 -06001066 }
Chris Forbesefdd4082017-07-20 11:19:16 -07001067 }
Mark Lobodzinski7caa39c2018-07-25 15:48:34 -06001068 used = false;
Chris Forbes47567b72017-06-09 12:09:45 -07001069 it_b++;
1070 } else {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06001071 unsigned output_type = GetFundamentalType(fs, it_a->second.type_id);
1072 unsigned att_type = GetFormatType(it_b->second);
Chris Forbes47567b72017-06-09 12:09:45 -07001073
1074 // Type checking
1075 if (!(output_type & att_type)) {
Chris Forbescfe4dca2018-10-05 10:15:00 -07001076 skip |= log_msg(
1077 report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
1078 HandleToUint64(fs->vk_shader_module), kVUID_Core_Shader_InterfaceTypeMismatch,
1079 "Attachment %d of type `%s` does not match fragment shader output type of `%s`; resulting values are undefined",
1080 it_b->first, string_VkFormat(it_b->second), DescribeType(fs, it_a->second.type_id).c_str());
Chris Forbes47567b72017-06-09 12:09:45 -07001081 }
1082
1083 // OK!
1084 it_a++;
Mark Lobodzinski7caa39c2018-07-25 15:48:34 -06001085 used = true;
Chris Forbes47567b72017-06-09 12:09:45 -07001086 }
1087 }
1088
Ari Suonpaa412b23b2019-02-26 07:56:58 +02001089 if (alphaToCoverageEnabled && !locationZeroHasAlpha) {
1090 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
1091 HandleToUint64(fs->vk_shader_module), kVUID_Core_Shader_NoAlphaAtLocation0WithAlphaToCoverage,
1092 "fragment shader doesn't declare alpha output at location 0 even though alpha to coverage is enabled.");
1093 }
1094
Chris Forbes47567b72017-06-09 12:09:45 -07001095 return skip;
1096}
1097
Mark Lobodzinski1b4a8ed2018-08-07 08:47:05 -06001098// For PointSize analysis we need to know if the variable decorated with the PointSize built-in was actually written to.
1099// This function examines instructions in the static call tree for a write to this variable.
Mark Lobodzinski3c59d972019-04-25 11:28:14 -06001100static bool IsPointSizeWritten(SHADER_MODULE_STATE const *src, spirv_inst_iter builtin_instr, spirv_inst_iter entrypoint) {
Mark Lobodzinski1b4a8ed2018-08-07 08:47:05 -06001101 auto type = builtin_instr.opcode();
1102 uint32_t target_id = builtin_instr.word(1);
1103 bool init_complete = false;
1104
1105 if (type == spv::OpMemberDecorate) {
1106 // Built-in is part of a structure -- examine instructions up to first function body to get initial IDs
1107 auto insn = entrypoint;
1108 while (!init_complete && (insn.opcode() != spv::OpFunction)) {
1109 switch (insn.opcode()) {
1110 case spv::OpTypePointer:
1111 if ((insn.word(3) == target_id) && (insn.word(2) == spv::StorageClassOutput)) {
1112 target_id = insn.word(1);
1113 }
1114 break;
1115 case spv::OpVariable:
1116 if (insn.word(1) == target_id) {
1117 target_id = insn.word(2);
1118 init_complete = true;
1119 }
1120 break;
1121 }
1122 insn++;
1123 }
1124 }
1125
Mark Lobodzinskif84b0b42018-09-11 14:54:32 -06001126 if (!init_complete && (type == spv::OpMemberDecorate)) return false;
1127
1128 bool found_write = false;
Mark Lobodzinski1b4a8ed2018-08-07 08:47:05 -06001129 std::unordered_set<uint32_t> worklist;
1130 worklist.insert(entrypoint.word(2));
1131
1132 // Follow instructions in call graph looking for writes to target
1133 while (!worklist.empty() && !found_write) {
1134 auto id_iter = worklist.begin();
1135 auto id = *id_iter;
1136 worklist.erase(id_iter);
1137
1138 auto insn = src->get_def(id);
1139 if (insn == src->end()) {
1140 continue;
1141 }
1142
1143 if (insn.opcode() == spv::OpFunction) {
1144 // Scan body of function looking for other function calls or items in our ID chain
1145 while (++insn, insn.opcode() != spv::OpFunctionEnd) {
1146 switch (insn.opcode()) {
1147 case spv::OpAccessChain:
1148 if (insn.word(3) == target_id) {
1149 if (type == spv::OpMemberDecorate) {
1150 auto value = GetConstantValue(src, insn.word(4));
1151 if (value == builtin_instr.word(2)) {
1152 target_id = insn.word(2);
1153 }
1154 } else {
1155 target_id = insn.word(2);
1156 }
1157 }
1158 break;
1159 case spv::OpStore:
1160 if (insn.word(1) == target_id) {
1161 found_write = true;
1162 }
1163 break;
1164 case spv::OpFunctionCall:
1165 worklist.insert(insn.word(3));
1166 break;
1167 }
1168 }
1169 }
1170 }
1171 return found_write;
1172}
1173
Chris Forbes47567b72017-06-09 12:09:45 -07001174// For some analyses, we need to know about all ids referenced by the static call tree of a particular entrypoint. This is
1175// important for identifying the set of shader resources actually used by an entrypoint, for example.
1176// Note: we only explore parts of the image which might actually contain ids we care about for the above analyses.
1177// - NOT the shader input/output interfaces.
1178//
1179// TODO: The set of interesting opcodes here was determined by eyeballing the SPIRV spec. It might be worth
1180// converting parts of this to be generated from the machine-readable spec instead.
Mark Lobodzinski3c59d972019-04-25 11:28:14 -06001181static std::unordered_set<uint32_t> MarkAccessibleIds(SHADER_MODULE_STATE const *src, spirv_inst_iter entrypoint) {
Chris Forbes47567b72017-06-09 12:09:45 -07001182 std::unordered_set<uint32_t> ids;
1183 std::unordered_set<uint32_t> worklist;
1184 worklist.insert(entrypoint.word(2));
1185
1186 while (!worklist.empty()) {
1187 auto id_iter = worklist.begin();
1188 auto id = *id_iter;
1189 worklist.erase(id_iter);
1190
1191 auto insn = src->get_def(id);
1192 if (insn == src->end()) {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06001193 // ID is something we didn't collect in BuildDefIndex. that's OK -- we'll stumble across all kinds of things here
Chris Forbes47567b72017-06-09 12:09:45 -07001194 // that we may not care about.
1195 continue;
1196 }
1197
1198 // Try to add to the output set
1199 if (!ids.insert(id).second) {
1200 continue; // If we already saw this id, we don't want to walk it again.
1201 }
1202
1203 switch (insn.opcode()) {
1204 case spv::OpFunction:
1205 // Scan whole body of the function, enlisting anything interesting
1206 while (++insn, insn.opcode() != spv::OpFunctionEnd) {
1207 switch (insn.opcode()) {
1208 case spv::OpLoad:
1209 case spv::OpAtomicLoad:
1210 case spv::OpAtomicExchange:
1211 case spv::OpAtomicCompareExchange:
1212 case spv::OpAtomicCompareExchangeWeak:
1213 case spv::OpAtomicIIncrement:
1214 case spv::OpAtomicIDecrement:
1215 case spv::OpAtomicIAdd:
1216 case spv::OpAtomicISub:
1217 case spv::OpAtomicSMin:
1218 case spv::OpAtomicUMin:
1219 case spv::OpAtomicSMax:
1220 case spv::OpAtomicUMax:
1221 case spv::OpAtomicAnd:
1222 case spv::OpAtomicOr:
1223 case spv::OpAtomicXor:
1224 worklist.insert(insn.word(3)); // ptr
1225 break;
1226 case spv::OpStore:
1227 case spv::OpAtomicStore:
1228 worklist.insert(insn.word(1)); // ptr
1229 break;
1230 case spv::OpAccessChain:
1231 case spv::OpInBoundsAccessChain:
1232 worklist.insert(insn.word(3)); // base ptr
1233 break;
1234 case spv::OpSampledImage:
1235 case spv::OpImageSampleImplicitLod:
1236 case spv::OpImageSampleExplicitLod:
1237 case spv::OpImageSampleDrefImplicitLod:
1238 case spv::OpImageSampleDrefExplicitLod:
1239 case spv::OpImageSampleProjImplicitLod:
1240 case spv::OpImageSampleProjExplicitLod:
1241 case spv::OpImageSampleProjDrefImplicitLod:
1242 case spv::OpImageSampleProjDrefExplicitLod:
1243 case spv::OpImageFetch:
1244 case spv::OpImageGather:
1245 case spv::OpImageDrefGather:
1246 case spv::OpImageRead:
1247 case spv::OpImage:
1248 case spv::OpImageQueryFormat:
1249 case spv::OpImageQueryOrder:
1250 case spv::OpImageQuerySizeLod:
1251 case spv::OpImageQuerySize:
1252 case spv::OpImageQueryLod:
1253 case spv::OpImageQueryLevels:
1254 case spv::OpImageQuerySamples:
1255 case spv::OpImageSparseSampleImplicitLod:
1256 case spv::OpImageSparseSampleExplicitLod:
1257 case spv::OpImageSparseSampleDrefImplicitLod:
1258 case spv::OpImageSparseSampleDrefExplicitLod:
1259 case spv::OpImageSparseSampleProjImplicitLod:
1260 case spv::OpImageSparseSampleProjExplicitLod:
1261 case spv::OpImageSparseSampleProjDrefImplicitLod:
1262 case spv::OpImageSparseSampleProjDrefExplicitLod:
1263 case spv::OpImageSparseFetch:
1264 case spv::OpImageSparseGather:
1265 case spv::OpImageSparseDrefGather:
1266 case spv::OpImageTexelPointer:
1267 worklist.insert(insn.word(3)); // Image or sampled image
1268 break;
1269 case spv::OpImageWrite:
1270 worklist.insert(insn.word(1)); // Image -- different operand order to above
1271 break;
1272 case spv::OpFunctionCall:
1273 for (uint32_t i = 3; i < insn.len(); i++) {
1274 worklist.insert(insn.word(i)); // fn itself, and all args
1275 }
1276 break;
1277
1278 case spv::OpExtInst:
1279 for (uint32_t i = 5; i < insn.len(); i++) {
1280 worklist.insert(insn.word(i)); // Operands to ext inst
1281 }
1282 break;
1283 }
1284 }
1285 break;
1286 }
1287 }
1288
1289 return ids;
1290}
1291
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06001292static bool ValidatePushConstantBlockAgainstPipeline(debug_report_data const *report_data,
1293 std::vector<VkPushConstantRange> const *push_constant_ranges,
Mark Lobodzinski3c59d972019-04-25 11:28:14 -06001294 SHADER_MODULE_STATE const *src, spirv_inst_iter type,
1295 VkShaderStageFlagBits stage) {
Chris Forbes47567b72017-06-09 12:09:45 -07001296 bool skip = false;
1297
1298 // Strip off ptrs etc
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06001299 type = GetStructType(src, type, false);
Chris Forbes47567b72017-06-09 12:09:45 -07001300 assert(type != src->end());
1301
1302 // Validate directly off the offsets. this isn't quite correct for arrays and matrices, but is a good first step.
1303 // TODO: arrays, matrices, weird sizes
1304 for (auto insn : *src) {
1305 if (insn.opcode() == spv::OpMemberDecorate && insn.word(1) == type.word(1)) {
1306 if (insn.word(3) == spv::DecorationOffset) {
1307 unsigned offset = insn.word(4);
1308 auto size = 4; // Bytes; TODO: calculate this based on the type
1309
1310 bool found_range = false;
1311 for (auto const &range : *push_constant_ranges) {
1312 if (range.offset <= offset && range.offset + range.size >= offset + size) {
1313 found_range = true;
1314
1315 if ((range.stageFlags & stage) == 0) {
Dave Houltona9df0ce2018-02-07 10:51:23 -07001316 skip |=
1317 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
Dave Houlton51653902018-06-22 17:32:13 -06001318 kVUID_Core_Shader_PushConstantNotAccessibleFromStage,
Dave Houltona9df0ce2018-02-07 10:51:23 -07001319 "Push constant range covering variable starting at offset %u not accessible from stage %s",
1320 offset, string_VkShaderStageFlagBits(stage));
Chris Forbes47567b72017-06-09 12:09:45 -07001321 }
1322
1323 break;
1324 }
1325 }
1326
1327 if (!found_range) {
1328 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
Dave Houlton51653902018-06-22 17:32:13 -06001329 kVUID_Core_Shader_PushConstantOutOfRange,
Dave Houltona9df0ce2018-02-07 10:51:23 -07001330 "Push constant range covering variable starting at offset %u not declared in layout", offset);
Chris Forbes47567b72017-06-09 12:09:45 -07001331 }
1332 }
1333 }
1334 }
1335
1336 return skip;
1337}
1338
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06001339static bool ValidatePushConstantUsage(debug_report_data const *report_data,
Mark Lobodzinski3c59d972019-04-25 11:28:14 -06001340 std::vector<VkPushConstantRange> const *push_constant_ranges, SHADER_MODULE_STATE const *src,
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06001341 std::unordered_set<uint32_t> accessible_ids, VkShaderStageFlagBits stage) {
Chris Forbes47567b72017-06-09 12:09:45 -07001342 bool skip = false;
1343
1344 for (auto id : accessible_ids) {
1345 auto def_insn = src->get_def(id);
1346 if (def_insn.opcode() == spv::OpVariable && def_insn.word(3) == spv::StorageClassPushConstant) {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06001347 skip |= ValidatePushConstantBlockAgainstPipeline(report_data, push_constant_ranges, src, src->get_def(def_insn.word(1)),
1348 stage);
Chris Forbes47567b72017-06-09 12:09:45 -07001349 }
1350 }
1351
1352 return skip;
1353}
1354
1355// Validate that data for each specialization entry is fully contained within the buffer.
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06001356static bool ValidateSpecializationOffsets(debug_report_data const *report_data, VkPipelineShaderStageCreateInfo const *info) {
Chris Forbes47567b72017-06-09 12:09:45 -07001357 bool skip = false;
1358
1359 VkSpecializationInfo const *spec = info->pSpecializationInfo;
1360
1361 if (spec) {
1362 for (auto i = 0u; i < spec->mapEntryCount; i++) {
Dave Houlton78d09922018-05-17 15:48:45 -06001363 // TODO: This is a good place for "VUID-VkSpecializationInfo-offset-00773".
Chris Forbes47567b72017-06-09 12:09:45 -07001364 if (spec->pMapEntries[i].offset + spec->pMapEntries[i].size > spec->dataSize) {
Mark Lobodzinskib1fd9d12018-03-30 14:26:00 -06001365 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, 0,
Dave Houlton78d09922018-05-17 15:48:45 -06001366 "VUID-VkSpecializationInfo-pMapEntries-00774",
Dave Houltona9df0ce2018-02-07 10:51:23 -07001367 "Specialization entry %u (for constant id %u) references memory outside provided specialization "
Mark Lobodzinski487a0d12018-03-30 10:09:03 -06001368 "data (bytes %u.." PRINTF_SIZE_T_SPECIFIER "; " PRINTF_SIZE_T_SPECIFIER " bytes provided)..",
Dave Houltona9df0ce2018-02-07 10:51:23 -07001369 i, spec->pMapEntries[i].constantID, spec->pMapEntries[i].offset,
Mark Lobodzinski487a0d12018-03-30 10:09:03 -06001370 spec->pMapEntries[i].offset + spec->pMapEntries[i].size - 1, spec->dataSize);
Chris Forbes47567b72017-06-09 12:09:45 -07001371 }
1372 }
1373 }
1374
1375 return skip;
1376}
1377
Jeff Bolz38b3ce72018-09-19 12:53:38 -05001378// TODO (jbolz): Can this return a const reference?
Mark Lobodzinski3c59d972019-04-25 11:28:14 -06001379static std::set<uint32_t> TypeToDescriptorTypeSet(SHADER_MODULE_STATE const *module, uint32_t type_id, unsigned &descriptor_count) {
Chris Forbes47567b72017-06-09 12:09:45 -07001380 auto type = module->get_def(type_id);
Chris Forbes9f89d752018-03-07 12:57:48 -08001381 bool is_storage_buffer = false;
Chris Forbes47567b72017-06-09 12:09:45 -07001382 descriptor_count = 1;
Jeff Bolze54ae892018-09-08 12:16:29 -05001383 std::set<uint32_t> ret;
Chris Forbes47567b72017-06-09 12:09:45 -07001384
1385 // Strip off any array or ptrs. Where we remove array levels, adjust the descriptor count for each dimension.
Jeff Bolzfdf96072018-04-10 14:32:18 -05001386 while (type.opcode() == spv::OpTypeArray || type.opcode() == spv::OpTypePointer || type.opcode() == spv::OpTypeRuntimeArray) {
1387 if (type.opcode() == spv::OpTypeRuntimeArray) {
1388 descriptor_count = 0;
1389 type = module->get_def(type.word(2));
1390 } else if (type.opcode() == spv::OpTypeArray) {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06001391 descriptor_count *= GetConstantValue(module, type.word(3));
Chris Forbes47567b72017-06-09 12:09:45 -07001392 type = module->get_def(type.word(2));
1393 } else {
Chris Forbes9f89d752018-03-07 12:57:48 -08001394 if (type.word(2) == spv::StorageClassStorageBuffer) {
1395 is_storage_buffer = true;
1396 }
Chris Forbes47567b72017-06-09 12:09:45 -07001397 type = module->get_def(type.word(3));
1398 }
1399 }
1400
1401 switch (type.opcode()) {
1402 case spv::OpTypeStruct: {
1403 for (auto insn : *module) {
1404 if (insn.opcode() == spv::OpDecorate && insn.word(1) == type.word(1)) {
1405 if (insn.word(2) == spv::DecorationBlock) {
Chris Forbes9f89d752018-03-07 12:57:48 -08001406 if (is_storage_buffer) {
Jeff Bolze54ae892018-09-08 12:16:29 -05001407 ret.insert(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER);
1408 ret.insert(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC);
1409 return ret;
Chris Forbes9f89d752018-03-07 12:57:48 -08001410 } else {
Jeff Bolze54ae892018-09-08 12:16:29 -05001411 ret.insert(VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER);
1412 ret.insert(VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC);
1413 ret.insert(VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT);
1414 return ret;
Chris Forbes9f89d752018-03-07 12:57:48 -08001415 }
Chris Forbes47567b72017-06-09 12:09:45 -07001416 } else if (insn.word(2) == spv::DecorationBufferBlock) {
Jeff Bolze54ae892018-09-08 12:16:29 -05001417 ret.insert(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER);
1418 ret.insert(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC);
1419 return ret;
Chris Forbes47567b72017-06-09 12:09:45 -07001420 }
1421 }
1422 }
1423
1424 // Invalid
Jeff Bolze54ae892018-09-08 12:16:29 -05001425 return ret;
Chris Forbes47567b72017-06-09 12:09:45 -07001426 }
1427
1428 case spv::OpTypeSampler:
Jeff Bolze54ae892018-09-08 12:16:29 -05001429 ret.insert(VK_DESCRIPTOR_TYPE_SAMPLER);
1430 ret.insert(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
1431 return ret;
Chris Forbes47567b72017-06-09 12:09:45 -07001432
Chris Forbes73c00bf2018-06-22 16:28:06 -07001433 case spv::OpTypeSampledImage: {
1434 // Slight relaxation for some GLSL historical madness: samplerBuffer doesn't really have a sampler, and a texel
1435 // buffer descriptor doesn't really provide one. Allow this slight mismatch.
1436 auto image_type = module->get_def(type.word(2));
1437 auto dim = image_type.word(3);
1438 auto sampled = image_type.word(7);
1439 if (dim == spv::DimBuffer && sampled == 1) {
Jeff Bolze54ae892018-09-08 12:16:29 -05001440 ret.insert(VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER);
1441 return ret;
Chris Forbes47567b72017-06-09 12:09:45 -07001442 }
Chris Forbes73c00bf2018-06-22 16:28:06 -07001443 }
Jeff Bolze54ae892018-09-08 12:16:29 -05001444 ret.insert(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
1445 return ret;
Chris Forbes47567b72017-06-09 12:09:45 -07001446
1447 case spv::OpTypeImage: {
1448 // Many descriptor types backing image types-- depends on dimension and whether the image will be used with a sampler.
1449 // SPIRV for Vulkan requires that sampled be 1 or 2 -- leaving the decision to runtime is unacceptable.
1450 auto dim = type.word(3);
1451 auto sampled = type.word(7);
1452
1453 if (dim == spv::DimSubpassData) {
Jeff Bolze54ae892018-09-08 12:16:29 -05001454 ret.insert(VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT);
1455 return ret;
Chris Forbes47567b72017-06-09 12:09:45 -07001456 } else if (dim == spv::DimBuffer) {
1457 if (sampled == 1) {
Jeff Bolze54ae892018-09-08 12:16:29 -05001458 ret.insert(VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER);
1459 return ret;
Chris Forbes47567b72017-06-09 12:09:45 -07001460 } else {
Jeff Bolze54ae892018-09-08 12:16:29 -05001461 ret.insert(VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER);
1462 return ret;
Chris Forbes47567b72017-06-09 12:09:45 -07001463 }
1464 } else if (sampled == 1) {
Jeff Bolze54ae892018-09-08 12:16:29 -05001465 ret.insert(VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE);
1466 ret.insert(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
1467 return ret;
Chris Forbes47567b72017-06-09 12:09:45 -07001468 } else {
Jeff Bolze54ae892018-09-08 12:16:29 -05001469 ret.insert(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE);
1470 return ret;
Chris Forbes47567b72017-06-09 12:09:45 -07001471 }
1472 }
Shannon McPherson0fa28232018-11-01 11:59:02 -06001473 case spv::OpTypeAccelerationStructureNV:
Eric Werness30127fd2018-10-31 21:01:03 -07001474 ret.insert(VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV);
Jeff Bolz105d6492018-09-29 15:46:44 -05001475 return ret;
Chris Forbes47567b72017-06-09 12:09:45 -07001476
1477 // We shouldn't really see any other junk types -- but if we do, they're a mismatch.
1478 default:
Jeff Bolze54ae892018-09-08 12:16:29 -05001479 return ret; // Matches nothing
Chris Forbes47567b72017-06-09 12:09:45 -07001480 }
1481}
1482
Jeff Bolze54ae892018-09-08 12:16:29 -05001483static std::string string_descriptorTypes(const std::set<uint32_t> &descriptor_types) {
Chris Forbes73c00bf2018-06-22 16:28:06 -07001484 std::stringstream ss;
Jeff Bolze54ae892018-09-08 12:16:29 -05001485 for (auto it = descriptor_types.begin(); it != descriptor_types.end(); ++it) {
1486 if (ss.tellp()) ss << ", ";
1487 ss << string_VkDescriptorType(VkDescriptorType(*it));
Chris Forbes73c00bf2018-06-22 16:28:06 -07001488 }
1489 return ss.str();
1490}
1491
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06001492static bool RequireFeature(debug_report_data const *report_data, VkBool32 feature, char const *feature_name) {
Chris Forbes47567b72017-06-09 12:09:45 -07001493 if (!feature) {
Mark Lobodzinskib1fd9d12018-03-30 14:26:00 -06001494 if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
Dave Houlton51653902018-06-22 17:32:13 -06001495 kVUID_Core_Shader_FeatureNotEnabled, "Shader requires %s but is not enabled on the device", feature_name)) {
Chris Forbes47567b72017-06-09 12:09:45 -07001496 return true;
1497 }
1498 }
1499
1500 return false;
1501}
1502
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06001503static bool RequireExtension(debug_report_data const *report_data, bool extension, char const *extension_name) {
Chris Forbes47567b72017-06-09 12:09:45 -07001504 if (!extension) {
Mark Lobodzinskib1fd9d12018-03-30 14:26:00 -06001505 if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
Dave Houlton51653902018-06-22 17:32:13 -06001506 kVUID_Core_Shader_FeatureNotEnabled, "Shader requires extension %s but is not enabled on the device",
Chris Forbes47567b72017-06-09 12:09:45 -07001507 extension_name)) {
1508 return true;
1509 }
1510 }
1511
1512 return false;
1513}
1514
Mark Lobodzinski3c59d972019-04-25 11:28:14 -06001515bool CoreChecks::ValidateShaderCapabilities(SHADER_MODULE_STATE const *src, VkShaderStageFlagBits stage,
1516 bool has_writable_descriptor) {
Chris Forbes47567b72017-06-09 12:09:45 -07001517 bool skip = false;
1518
Mike Schuchardt8ed5ea02018-07-20 18:24:17 -06001519 struct FeaturePointer {
1520 // Callable object to test if this feature is enabled in the given aggregate feature struct
1521 const std::function<VkBool32(const DeviceFeatures &)> IsEnabled;
1522
1523 // Test if feature pointer is populated
1524 explicit operator bool() const { return static_cast<bool>(IsEnabled); }
1525
1526 // Default and nullptr constructor to create an empty FeaturePointer
1527 FeaturePointer() : IsEnabled(nullptr) {}
1528 FeaturePointer(std::nullptr_t ptr) : IsEnabled(nullptr) {}
1529
1530 // Constructors to populate FeaturePointer based on given pointer to member
1531 FeaturePointer(VkBool32 VkPhysicalDeviceFeatures::*ptr)
1532 : IsEnabled([=](const DeviceFeatures &features) { return features.core.*ptr; }) {}
1533 FeaturePointer(VkBool32 VkPhysicalDeviceDescriptorIndexingFeaturesEXT::*ptr)
1534 : IsEnabled([=](const DeviceFeatures &features) { return features.descriptor_indexing.*ptr; }) {}
1535 FeaturePointer(VkBool32 VkPhysicalDevice8BitStorageFeaturesKHR::*ptr)
1536 : IsEnabled([=](const DeviceFeatures &features) { return features.eight_bit_storage.*ptr; }) {}
Brett Lawsonbebfb6f2018-10-23 16:58:50 -07001537 FeaturePointer(VkBool32 VkPhysicalDeviceTransformFeedbackFeaturesEXT::*ptr)
1538 : IsEnabled([=](const DeviceFeatures &features) { return features.transform_feedback_features.*ptr; }) {}
Jose-Emilio Munoz-Lopez1109b452018-08-21 09:44:07 +01001539 FeaturePointer(VkBool32 VkPhysicalDeviceFloat16Int8FeaturesKHR::*ptr)
1540 : IsEnabled([=](const DeviceFeatures &features) { return features.float16_int8.*ptr; }) {}
Tobias Hector6a0ece72018-12-10 12:24:05 +00001541 FeaturePointer(VkBool32 VkPhysicalDeviceScalarBlockLayoutFeaturesEXT::*ptr)
1542 : IsEnabled([=](const DeviceFeatures &features) { return features.scalar_block_layout_features.*ptr; }) {}
Jeff Bolze4356752019-03-07 11:23:46 -06001543 FeaturePointer(VkBool32 VkPhysicalDeviceCooperativeMatrixFeaturesNV::*ptr)
1544 : IsEnabled([=](const DeviceFeatures &features) { return features.cooperative_matrix_features.*ptr; }) {}
Attilio Provenzanoc5d50102019-03-25 17:40:37 +00001545 FeaturePointer(VkBool32 VkPhysicalDeviceFloatControlsPropertiesKHR::*ptr)
1546 : IsEnabled([=](const DeviceFeatures &features) { return features.float_controls.*ptr; }) {}
Mike Schuchardt8ed5ea02018-07-20 18:24:17 -06001547 };
1548
Chris Forbes47567b72017-06-09 12:09:45 -07001549 struct CapabilityInfo {
1550 char const *name;
Mike Schuchardt8ed5ea02018-07-20 18:24:17 -06001551 FeaturePointer feature;
1552 bool DeviceExtensions::*extension;
Chris Forbes47567b72017-06-09 12:09:45 -07001553 };
1554
Chris Forbes47567b72017-06-09 12:09:45 -07001555 // clang-format off
Dave Houltoneb10ea82017-12-22 12:21:50 -07001556 static const std::unordered_multimap<uint32_t, CapabilityInfo> capabilities = {
Chris Forbes47567b72017-06-09 12:09:45 -07001557 // Capabilities always supported by a Vulkan 1.0 implementation -- no
1558 // feature bits.
1559 {spv::CapabilityMatrix, {nullptr}},
1560 {spv::CapabilityShader, {nullptr}},
1561 {spv::CapabilityInputAttachment, {nullptr}},
1562 {spv::CapabilitySampled1D, {nullptr}},
1563 {spv::CapabilityImage1D, {nullptr}},
1564 {spv::CapabilitySampledBuffer, {nullptr}},
1565 {spv::CapabilityImageQuery, {nullptr}},
1566 {spv::CapabilityDerivativeControl, {nullptr}},
1567
1568 // Capabilities that are optionally supported, but require a feature to
1569 // be enabled on the device
Mike Schuchardt8ed5ea02018-07-20 18:24:17 -06001570 {spv::CapabilityGeometry, {"VkPhysicalDeviceFeatures::geometryShader", &VkPhysicalDeviceFeatures::geometryShader}},
1571 {spv::CapabilityTessellation, {"VkPhysicalDeviceFeatures::tessellationShader", &VkPhysicalDeviceFeatures::tessellationShader}},
1572 {spv::CapabilityFloat64, {"VkPhysicalDeviceFeatures::shaderFloat64", &VkPhysicalDeviceFeatures::shaderFloat64}},
1573 {spv::CapabilityInt64, {"VkPhysicalDeviceFeatures::shaderInt64", &VkPhysicalDeviceFeatures::shaderInt64}},
1574 {spv::CapabilityTessellationPointSize, {"VkPhysicalDeviceFeatures::shaderTessellationAndGeometryPointSize", &VkPhysicalDeviceFeatures::shaderTessellationAndGeometryPointSize}},
1575 {spv::CapabilityGeometryPointSize, {"VkPhysicalDeviceFeatures::shaderTessellationAndGeometryPointSize", &VkPhysicalDeviceFeatures::shaderTessellationAndGeometryPointSize}},
1576 {spv::CapabilityImageGatherExtended, {"VkPhysicalDeviceFeatures::shaderImageGatherExtended", &VkPhysicalDeviceFeatures::shaderImageGatherExtended}},
1577 {spv::CapabilityStorageImageMultisample, {"VkPhysicalDeviceFeatures::shaderStorageImageMultisample", &VkPhysicalDeviceFeatures::shaderStorageImageMultisample}},
1578 {spv::CapabilityUniformBufferArrayDynamicIndexing, {"VkPhysicalDeviceFeatures::shaderUniformBufferArrayDynamicIndexing", &VkPhysicalDeviceFeatures::shaderUniformBufferArrayDynamicIndexing}},
1579 {spv::CapabilitySampledImageArrayDynamicIndexing, {"VkPhysicalDeviceFeatures::shaderSampledImageArrayDynamicIndexing", &VkPhysicalDeviceFeatures::shaderSampledImageArrayDynamicIndexing}},
1580 {spv::CapabilityStorageBufferArrayDynamicIndexing, {"VkPhysicalDeviceFeatures::shaderStorageBufferArrayDynamicIndexing", &VkPhysicalDeviceFeatures::shaderStorageBufferArrayDynamicIndexing}},
1581 {spv::CapabilityStorageImageArrayDynamicIndexing, {"VkPhysicalDeviceFeatures::shaderStorageImageArrayDynamicIndexing", &VkPhysicalDeviceFeatures::shaderStorageBufferArrayDynamicIndexing}},
1582 {spv::CapabilityClipDistance, {"VkPhysicalDeviceFeatures::shaderClipDistance", &VkPhysicalDeviceFeatures::shaderClipDistance}},
1583 {spv::CapabilityCullDistance, {"VkPhysicalDeviceFeatures::shaderCullDistance", &VkPhysicalDeviceFeatures::shaderCullDistance}},
1584 {spv::CapabilityImageCubeArray, {"VkPhysicalDeviceFeatures::imageCubeArray", &VkPhysicalDeviceFeatures::imageCubeArray}},
1585 {spv::CapabilitySampleRateShading, {"VkPhysicalDeviceFeatures::sampleRateShading", &VkPhysicalDeviceFeatures::sampleRateShading}},
1586 {spv::CapabilitySparseResidency, {"VkPhysicalDeviceFeatures::shaderResourceResidency", &VkPhysicalDeviceFeatures::shaderResourceResidency}},
1587 {spv::CapabilityMinLod, {"VkPhysicalDeviceFeatures::shaderResourceMinLod", &VkPhysicalDeviceFeatures::shaderResourceMinLod}},
1588 {spv::CapabilitySampledCubeArray, {"VkPhysicalDeviceFeatures::imageCubeArray", &VkPhysicalDeviceFeatures::imageCubeArray}},
1589 {spv::CapabilityImageMSArray, {"VkPhysicalDeviceFeatures::shaderStorageImageMultisample", &VkPhysicalDeviceFeatures::shaderStorageImageMultisample}},
1590 {spv::CapabilityStorageImageExtendedFormats, {"VkPhysicalDeviceFeatures::shaderStorageImageExtendedFormats", &VkPhysicalDeviceFeatures::shaderStorageImageExtendedFormats}},
1591 {spv::CapabilityInterpolationFunction, {"VkPhysicalDeviceFeatures::sampleRateShading", &VkPhysicalDeviceFeatures::sampleRateShading}},
1592 {spv::CapabilityStorageImageReadWithoutFormat, {"VkPhysicalDeviceFeatures::shaderStorageImageReadWithoutFormat", &VkPhysicalDeviceFeatures::shaderStorageImageReadWithoutFormat}},
1593 {spv::CapabilityStorageImageWriteWithoutFormat, {"VkPhysicalDeviceFeatures::shaderStorageImageWriteWithoutFormat", &VkPhysicalDeviceFeatures::shaderStorageImageWriteWithoutFormat}},
1594 {spv::CapabilityMultiViewport, {"VkPhysicalDeviceFeatures::multiViewport", &VkPhysicalDeviceFeatures::multiViewport}},
Jeff Bolzfdf96072018-04-10 14:32:18 -05001595
Mike Schuchardt8ed5ea02018-07-20 18:24:17 -06001596 {spv::CapabilityShaderNonUniformEXT, {VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME, nullptr, &DeviceExtensions::vk_ext_descriptor_indexing}},
1597 {spv::CapabilityRuntimeDescriptorArrayEXT, {"VkPhysicalDeviceDescriptorIndexingFeaturesEXT::runtimeDescriptorArray", &VkPhysicalDeviceDescriptorIndexingFeaturesEXT::runtimeDescriptorArray}},
1598 {spv::CapabilityInputAttachmentArrayDynamicIndexingEXT, {"VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderInputAttachmentArrayDynamicIndexing", &VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderInputAttachmentArrayDynamicIndexing}},
1599 {spv::CapabilityUniformTexelBufferArrayDynamicIndexingEXT, {"VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderUniformTexelBufferArrayDynamicIndexing", &VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderUniformTexelBufferArrayDynamicIndexing}},
1600 {spv::CapabilityStorageTexelBufferArrayDynamicIndexingEXT, {"VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderStorageTexelBufferArrayDynamicIndexing", &VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderStorageTexelBufferArrayDynamicIndexing}},
1601 {spv::CapabilityUniformBufferArrayNonUniformIndexingEXT, {"VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderUniformBufferArrayNonUniformIndexing", &VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderUniformBufferArrayNonUniformIndexing}},
1602 {spv::CapabilitySampledImageArrayNonUniformIndexingEXT, {"VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderSampledImageArrayNonUniformIndexing", &VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderSampledImageArrayNonUniformIndexing}},
1603 {spv::CapabilityStorageBufferArrayNonUniformIndexingEXT, {"VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderStorageBufferArrayNonUniformIndexing", &VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderStorageBufferArrayNonUniformIndexing}},
1604 {spv::CapabilityStorageImageArrayNonUniformIndexingEXT, {"VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderStorageImageArrayNonUniformIndexing", &VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderStorageImageArrayNonUniformIndexing}},
1605 {spv::CapabilityInputAttachmentArrayNonUniformIndexingEXT, {"VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderInputAttachmentArrayNonUniformIndexing", &VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderInputAttachmentArrayNonUniformIndexing}},
1606 {spv::CapabilityUniformTexelBufferArrayNonUniformIndexingEXT, {"VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderUniformTexelBufferArrayNonUniformIndexing", &VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderUniformTexelBufferArrayNonUniformIndexing}},
1607 {spv::CapabilityStorageTexelBufferArrayNonUniformIndexingEXT , {"VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderStorageTexelBufferArrayNonUniformIndexing", &VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderStorageTexelBufferArrayNonUniformIndexing}},
Chris Forbes47567b72017-06-09 12:09:45 -07001608
1609 // Capabilities that require an extension
Mike Schuchardt8ed5ea02018-07-20 18:24:17 -06001610 {spv::CapabilityDrawParameters, {VK_KHR_SHADER_DRAW_PARAMETERS_EXTENSION_NAME, nullptr, &DeviceExtensions::vk_khr_shader_draw_parameters}},
1611 {spv::CapabilityGeometryShaderPassthroughNV, {VK_NV_GEOMETRY_SHADER_PASSTHROUGH_EXTENSION_NAME, nullptr, &DeviceExtensions::vk_nv_geometry_shader_passthrough}},
1612 {spv::CapabilitySampleMaskOverrideCoverageNV, {VK_NV_SAMPLE_MASK_OVERRIDE_COVERAGE_EXTENSION_NAME, nullptr, &DeviceExtensions::vk_nv_sample_mask_override_coverage}},
1613 {spv::CapabilityShaderViewportIndexLayerEXT, {VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_EXTENSION_NAME, nullptr, &DeviceExtensions::vk_ext_shader_viewport_index_layer}},
1614 {spv::CapabilityShaderViewportIndexLayerNV, {VK_NV_VIEWPORT_ARRAY2_EXTENSION_NAME, nullptr, &DeviceExtensions::vk_nv_viewport_array2}},
1615 {spv::CapabilityShaderViewportMaskNV, {VK_NV_VIEWPORT_ARRAY2_EXTENSION_NAME, nullptr, &DeviceExtensions::vk_nv_viewport_array2}},
1616 {spv::CapabilitySubgroupBallotKHR, {VK_EXT_SHADER_SUBGROUP_BALLOT_EXTENSION_NAME, nullptr, &DeviceExtensions::vk_ext_shader_subgroup_ballot }},
1617 {spv::CapabilitySubgroupVoteKHR, {VK_EXT_SHADER_SUBGROUP_VOTE_EXTENSION_NAME, nullptr, &DeviceExtensions::vk_ext_shader_subgroup_vote }},
aqnuep7033c702018-09-11 18:03:29 +02001618 {spv::CapabilityInt64Atomics, {VK_KHR_SHADER_ATOMIC_INT64_EXTENSION_NAME, nullptr, &DeviceExtensions::vk_khr_shader_atomic_int64 }},
Alexander Galazin3bd8e342018-06-14 15:49:07 +02001619
Mike Schuchardt8ed5ea02018-07-20 18:24:17 -06001620 {spv::CapabilityStorageBuffer8BitAccess , {"VkPhysicalDevice8BitStorageFeaturesKHR::storageBuffer8BitAccess", &VkPhysicalDevice8BitStorageFeaturesKHR::storageBuffer8BitAccess, &DeviceExtensions::vk_khr_8bit_storage}},
1621 {spv::CapabilityUniformAndStorageBuffer8BitAccess , {"VkPhysicalDevice8BitStorageFeaturesKHR::uniformAndStorageBuffer8BitAccess", &VkPhysicalDevice8BitStorageFeaturesKHR::uniformAndStorageBuffer8BitAccess, &DeviceExtensions::vk_khr_8bit_storage}},
1622 {spv::CapabilityStoragePushConstant8 , {"VkPhysicalDevice8BitStorageFeaturesKHR::storagePushConstant8", &VkPhysicalDevice8BitStorageFeaturesKHR::storagePushConstant8, &DeviceExtensions::vk_khr_8bit_storage}},
Brett Lawsonbebfb6f2018-10-23 16:58:50 -07001623
1624 {spv::CapabilityTransformFeedback , { "VkPhysicalDeviceTransformFeedbackFeaturesEXT::transformFeedback", &VkPhysicalDeviceTransformFeedbackFeaturesEXT::transformFeedback, &DeviceExtensions::vk_ext_transform_feedback}},
Jose-Emilio Munoz-Lopez1109b452018-08-21 09:44:07 +01001625 {spv::CapabilityGeometryStreams , { "VkPhysicalDeviceTransformFeedbackFeaturesEXT::geometryStreams", &VkPhysicalDeviceTransformFeedbackFeaturesEXT::geometryStreams, &DeviceExtensions::vk_ext_transform_feedback}},
1626
1627 {spv::CapabilityFloat16 , {"VkPhysicalDeviceFloat16Int8FeaturesKHR::shaderFloat16", &VkPhysicalDeviceFloat16Int8FeaturesKHR::shaderFloat16, &DeviceExtensions::vk_khr_shader_float16_int8}},
1628 {spv::CapabilityInt8 , {"VkPhysicalDeviceFloat16Int8FeaturesKHR::shaderInt8", &VkPhysicalDeviceFloat16Int8FeaturesKHR::shaderInt8, &DeviceExtensions::vk_khr_shader_float16_int8}},
Jeff Bolze4356752019-03-07 11:23:46 -06001629
1630 {spv::CapabilityCooperativeMatrixNV, {"VkPhysicalDeviceCooperativeMatrixFeaturesNV::cooperativeMatrix", &VkPhysicalDeviceCooperativeMatrixFeaturesNV::cooperativeMatrix, &DeviceExtensions::vk_nv_cooperative_matrix}},
Attilio Provenzanoc5d50102019-03-25 17:40:37 +00001631
1632 {spv::CapabilitySignedZeroInfNanPreserve, {"VkPhysicalDeviceFloatControlsPropertiesKHR::shaderSignedZeroInfNanPreserveFloat16", &VkPhysicalDeviceFloatControlsPropertiesKHR::shaderSignedZeroInfNanPreserveFloat16, &DeviceExtensions::vk_khr_shader_float_controls}},
1633 {spv::CapabilitySignedZeroInfNanPreserve, {"VkPhysicalDeviceFloatControlsPropertiesKHR::shaderSignedZeroInfNanPreserveFloat32", &VkPhysicalDeviceFloatControlsPropertiesKHR::shaderSignedZeroInfNanPreserveFloat32, &DeviceExtensions::vk_khr_shader_float_controls}},
1634 {spv::CapabilitySignedZeroInfNanPreserve, {"VkPhysicalDeviceFloatControlsPropertiesKHR::shaderSignedZeroInfNanPreserveFloat64", &VkPhysicalDeviceFloatControlsPropertiesKHR::shaderSignedZeroInfNanPreserveFloat64, &DeviceExtensions::vk_khr_shader_float_controls}},
1635 {spv::CapabilityDenormPreserve, {"VkPhysicalDeviceFloatControlsPropertiesKHR::shaderDenormPreserveFloat16", &VkPhysicalDeviceFloatControlsPropertiesKHR::shaderDenormPreserveFloat16, &DeviceExtensions::vk_khr_shader_float_controls}},
1636 {spv::CapabilityDenormPreserve, {"VkPhysicalDeviceFloatControlsPropertiesKHR::shaderDenormPreserveFloat32", &VkPhysicalDeviceFloatControlsPropertiesKHR::shaderDenormPreserveFloat32, &DeviceExtensions::vk_khr_shader_float_controls}},
1637 {spv::CapabilityDenormPreserve, {"VkPhysicalDeviceFloatControlsPropertiesKHR::shaderDenormPreserveFloat64", &VkPhysicalDeviceFloatControlsPropertiesKHR::shaderDenormPreserveFloat64, &DeviceExtensions::vk_khr_shader_float_controls}},
1638 {spv::CapabilityDenormFlushToZero, {"VkPhysicalDeviceFloatControlsPropertiesKHR::shaderDenormFlushToZeroFloat16", &VkPhysicalDeviceFloatControlsPropertiesKHR::shaderDenormFlushToZeroFloat16, &DeviceExtensions::vk_khr_shader_float_controls}},
1639 {spv::CapabilityDenormFlushToZero, {"VkPhysicalDeviceFloatControlsPropertiesKHR::shaderDenormFlushToZeroFloat32", &VkPhysicalDeviceFloatControlsPropertiesKHR::shaderDenormFlushToZeroFloat32, &DeviceExtensions::vk_khr_shader_float_controls}},
1640 {spv::CapabilityDenormFlushToZero, {"VkPhysicalDeviceFloatControlsPropertiesKHR::shaderDenormFlushToZeroFloat64", &VkPhysicalDeviceFloatControlsPropertiesKHR::shaderDenormFlushToZeroFloat64, &DeviceExtensions::vk_khr_shader_float_controls}},
1641 {spv::CapabilityRoundingModeRTE, {"VkPhysicalDeviceFloatControlsPropertiesKHR::shaderRoundingModeRTEFloat16", &VkPhysicalDeviceFloatControlsPropertiesKHR::shaderRoundingModeRTEFloat16, &DeviceExtensions::vk_khr_shader_float_controls}},
1642 {spv::CapabilityRoundingModeRTE, {"VkPhysicalDeviceFloatControlsPropertiesKHR::shaderRoundingModeRTEFloat32", &VkPhysicalDeviceFloatControlsPropertiesKHR::shaderRoundingModeRTEFloat32, &DeviceExtensions::vk_khr_shader_float_controls}},
1643 {spv::CapabilityRoundingModeRTE, {"VkPhysicalDeviceFloatControlsPropertiesKHR::shaderRoundingModeRTEFloat64", &VkPhysicalDeviceFloatControlsPropertiesKHR::shaderRoundingModeRTEFloat64, &DeviceExtensions::vk_khr_shader_float_controls}},
1644 {spv::CapabilityRoundingModeRTZ, {"VkPhysicalDeviceFloatControlsPropertiesKHR::shaderRoundingModeRTZFloat16", &VkPhysicalDeviceFloatControlsPropertiesKHR::shaderRoundingModeRTZFloat16, &DeviceExtensions::vk_khr_shader_float_controls}},
1645 {spv::CapabilityRoundingModeRTZ, {"VkPhysicalDeviceFloatControlsPropertiesKHR::shaderRoundingModeRTZFloat32", &VkPhysicalDeviceFloatControlsPropertiesKHR::shaderRoundingModeRTZFloat32, &DeviceExtensions::vk_khr_shader_float_controls}},
1646 {spv::CapabilityRoundingModeRTZ, {"VkPhysicalDeviceFloatControlsPropertiesKHR::shaderRoundingModeRTZFloat64", &VkPhysicalDeviceFloatControlsPropertiesKHR::shaderRoundingModeRTZFloat64, &DeviceExtensions::vk_khr_shader_float_controls}},
Chris Forbes47567b72017-06-09 12:09:45 -07001647 };
1648 // clang-format on
1649
1650 for (auto insn : *src) {
1651 if (insn.opcode() == spv::OpCapability) {
Dave Houltoneb10ea82017-12-22 12:21:50 -07001652 size_t n = capabilities.count(insn.word(1));
1653 if (1 == n) { // key occurs exactly once
1654 auto it = capabilities.find(insn.word(1));
1655 if (it != capabilities.end()) {
1656 if (it->second.feature) {
Mark Lobodzinskid7b03cc2019-04-19 14:23:10 -06001657 skip |= RequireFeature(report_data, it->second.feature.IsEnabled(enabled_features), it->second.name);
Dave Houltoneb10ea82017-12-22 12:21:50 -07001658 }
1659 if (it->second.extension) {
Mark Lobodzinskif45e45f2019-04-19 14:15:39 -06001660 skip |= RequireExtension(report_data, device_extensions.*(it->second.extension), it->second.name);
Dave Houltoneb10ea82017-12-22 12:21:50 -07001661 }
Chris Forbes47567b72017-06-09 12:09:45 -07001662 }
Dave Houltoneb10ea82017-12-22 12:21:50 -07001663 } else if (1 < n) { // key occurs multiple times, at least one must be enabled
1664 bool needs_feature = false, has_feature = false;
1665 bool needs_ext = false, has_ext = false;
1666 std::string feature_names = "(one of) [ ";
1667 std::string extension_names = feature_names;
1668 auto caps = capabilities.equal_range(insn.word(1));
1669 for (auto it = caps.first; it != caps.second; ++it) {
1670 if (it->second.feature) {
1671 needs_feature = true;
Mark Lobodzinskid7b03cc2019-04-19 14:23:10 -06001672 has_feature = has_feature || it->second.feature.IsEnabled(enabled_features);
Dave Houltoneb10ea82017-12-22 12:21:50 -07001673 feature_names += it->second.name;
1674 feature_names += " ";
1675 }
1676 if (it->second.extension) {
1677 needs_ext = true;
Mark Lobodzinskif45e45f2019-04-19 14:15:39 -06001678 has_ext = has_ext || device_extensions.*(it->second.extension);
Dave Houltoneb10ea82017-12-22 12:21:50 -07001679 extension_names += it->second.name;
1680 extension_names += " ";
1681 }
1682 }
1683 if (needs_feature) {
1684 feature_names += "]";
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06001685 skip |= RequireFeature(report_data, has_feature, feature_names.c_str());
Dave Houltoneb10ea82017-12-22 12:21:50 -07001686 }
1687 if (needs_ext) {
1688 extension_names += "]";
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06001689 skip |= RequireExtension(report_data, has_ext, extension_names.c_str());
Chris Forbes47567b72017-06-09 12:09:45 -07001690 }
1691 }
1692 }
1693 }
1694
Chris Forbes349b3132018-03-07 11:38:08 -08001695 if (has_writable_descriptor) {
1696 switch (stage) {
1697 case VK_SHADER_STAGE_COMPUTE_BIT:
Jeff Bolz148d94e2018-12-13 21:25:56 -06001698 case VK_SHADER_STAGE_RAYGEN_BIT_NV:
1699 case VK_SHADER_STAGE_ANY_HIT_BIT_NV:
1700 case VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV:
1701 case VK_SHADER_STAGE_MISS_BIT_NV:
1702 case VK_SHADER_STAGE_INTERSECTION_BIT_NV:
1703 case VK_SHADER_STAGE_CALLABLE_BIT_NV:
1704 case VK_SHADER_STAGE_TASK_BIT_NV:
1705 case VK_SHADER_STAGE_MESH_BIT_NV:
Chris Forbes349b3132018-03-07 11:38:08 -08001706 /* No feature requirements for writes and atomics from compute
Jeff Bolz148d94e2018-12-13 21:25:56 -06001707 * raytracing, or mesh stages */
Chris Forbes349b3132018-03-07 11:38:08 -08001708 break;
1709 case VK_SHADER_STAGE_FRAGMENT_BIT:
Mark Lobodzinskid7b03cc2019-04-19 14:23:10 -06001710 skip |= RequireFeature(report_data, enabled_features.core.fragmentStoresAndAtomics, "fragmentStoresAndAtomics");
Chris Forbes349b3132018-03-07 11:38:08 -08001711 break;
1712 default:
Mark Lobodzinskid7b03cc2019-04-19 14:23:10 -06001713 skip |= RequireFeature(report_data, enabled_features.core.vertexPipelineStoresAndAtomics,
1714 "vertexPipelineStoresAndAtomics");
Chris Forbes349b3132018-03-07 11:38:08 -08001715 break;
1716 }
1717 }
1718
Chris Forbes47567b72017-06-09 12:09:45 -07001719 return skip;
1720}
1721
Mark Lobodzinski3c59d972019-04-25 11:28:14 -06001722static bool VariableIsBuiltIn(SHADER_MODULE_STATE const *src, const uint32_t ID, std::vector<uint32_t> const &builtInBlockIDs,
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001723 std::vector<uint32_t> const &builtInIDs) {
1724 auto insn = src->get_def(ID);
1725
1726 switch (insn.opcode()) {
1727 case spv::OpVariable: {
1728 // First check if the variable is a "pure" built-in type, e.g. gl_ViewportIndex
1729 uint32_t ID = insn.word(2);
1730 for (auto builtInID : builtInIDs) {
1731 if (ID == builtInID) {
1732 return true;
1733 }
1734 }
1735
Ari Suonpaa89c60822019-03-25 14:13:02 +02001736 return VariableIsBuiltIn(src, insn.word(1), builtInBlockIDs, builtInIDs);
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001737 }
1738 case spv::OpTypePointer:
Ari Suonpaa89c60822019-03-25 14:13:02 +02001739 return VariableIsBuiltIn(src, insn.word(3), builtInBlockIDs, builtInIDs);
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001740 case spv::OpTypeArray:
Ari Suonpaa89c60822019-03-25 14:13:02 +02001741 return VariableIsBuiltIn(src, insn.word(2), builtInBlockIDs, builtInIDs);
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001742 case spv::OpTypeStruct: {
1743 uint32_t ID = insn.word(1); // We only need to check the first member as either all will be, or none will be built-in
1744 for (auto builtInBlockID : builtInBlockIDs) {
1745 if (ID == builtInBlockID) {
1746 return true;
1747 }
1748 }
1749 return false;
1750 }
1751 default:
1752 return false;
1753 }
1754
1755 return false;
1756}
1757
Mark Lobodzinski3c59d972019-04-25 11:28:14 -06001758bool CoreChecks::ValidateShaderStageInputOutputLimits(SHADER_MODULE_STATE const *src, VkPipelineShaderStageCreateInfo const *pStage,
Mark Lobodzinski518eadc2019-03-09 12:07:30 -07001759 PIPELINE_STATE *pipeline) {
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001760 if (pStage->stage == VK_SHADER_STAGE_COMPUTE_BIT || pStage->stage == VK_SHADER_STAGE_ALL_GRAPHICS ||
1761 pStage->stage == VK_SHADER_STAGE_ALL) {
1762 return false;
1763 }
1764
1765 bool skip = false;
Mark Lobodzinski518eadc2019-03-09 12:07:30 -07001766 auto const &limits = phys_dev_props.limits;
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001767
1768 std::vector<uint32_t> builtInBlockIDs;
1769 std::vector<uint32_t> builtInIDs;
1770 struct Variable {
1771 uint32_t baseTypePtrID;
1772 uint32_t ID;
1773 uint32_t storageClass;
1774 };
1775 std::vector<Variable> variables;
1776
1777 for (auto insn : *src) {
1778 switch (insn.opcode()) {
1779 // Find all built-in member decorations
1780 case spv::OpMemberDecorate:
1781 if (insn.word(3) == spv::DecorationBuiltIn) {
1782 builtInBlockIDs.push_back(insn.word(1));
1783 }
1784 break;
1785 // Find all built-in decorations
1786 case spv::OpDecorate:
1787 switch (insn.word(2)) {
1788 case spv::DecorationBlock: {
1789 uint32_t blockID = insn.word(1);
1790 for (auto builtInBlockID : builtInBlockIDs) {
1791 // Check if one of the members of the block are built-in -> the block is built-in
1792 if (blockID == builtInBlockID) {
1793 builtInIDs.push_back(blockID);
1794 break;
1795 }
1796 }
1797 break;
1798 }
1799 case spv::DecorationBuiltIn:
1800 builtInIDs.push_back(insn.word(1));
1801 break;
1802 default:
1803 break;
1804 }
1805 break;
1806 // Find all input and output variables
1807 case spv::OpVariable: {
1808 Variable var = {};
1809 var.storageClass = insn.word(3);
1810 if (var.storageClass == spv::StorageClassInput || var.storageClass == spv::StorageClassOutput) {
1811 var.baseTypePtrID = insn.word(1);
1812 var.ID = insn.word(2);
1813 variables.push_back(var);
1814 }
1815 break;
1816 }
1817 default:
1818 break;
1819 }
1820 }
1821
1822 uint32_t numCompIn = 0, numCompOut = 0;
1823 for (auto &var : variables) {
1824 // Check the variable's ID
1825 if (VariableIsBuiltIn(src, var.ID, builtInBlockIDs, builtInIDs)) {
1826 continue;
1827 }
1828 // Check the variable's type's ID - e.g. gl_PerVertex is made of basic types, not built-in types
1829 if (VariableIsBuiltIn(src, src->get_def(var.baseTypePtrID).word(3), builtInBlockIDs, builtInIDs)) {
1830 continue;
1831 }
1832
1833 if (var.storageClass == spv::StorageClassInput) {
1834 numCompIn += GetComponentsConsumedByType(src, var.baseTypePtrID, false);
1835 } else { // var.storageClass == spv::StorageClassOutput
1836 numCompOut += GetComponentsConsumedByType(src, var.baseTypePtrID, false);
1837 }
1838 }
1839
1840 switch (pStage->stage) {
1841 case VK_SHADER_STAGE_VERTEX_BIT:
Mark Lobodzinski57a44272019-02-27 12:40:50 -07001842 if (numCompOut > limits.maxVertexOutputComponents) {
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001843 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
1844 HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_ExceedDeviceLimit,
1845 "Invalid Pipeline CreateInfo State: Vertex shader exceeds "
1846 "VkPhysicalDeviceLimits::maxVertexOutputComponents of %u "
1847 "components by %u components",
Mark Lobodzinski57a44272019-02-27 12:40:50 -07001848 limits.maxVertexOutputComponents, numCompOut - limits.maxVertexOutputComponents);
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001849 }
1850 break;
1851
1852 case VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT:
Mark Lobodzinski57a44272019-02-27 12:40:50 -07001853 if (numCompIn > limits.maxTessellationControlPerVertexInputComponents) {
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001854 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
1855 HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_ExceedDeviceLimit,
1856 "Invalid Pipeline CreateInfo State: Tessellation control shader exceeds "
1857 "VkPhysicalDeviceLimits::maxTessellationControlPerVertexInputComponents of %u "
1858 "components by %u components",
Mark Lobodzinski57a44272019-02-27 12:40:50 -07001859 limits.maxTessellationControlPerVertexInputComponents,
1860 numCompIn - limits.maxTessellationControlPerVertexInputComponents);
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001861 }
Mark Lobodzinski57a44272019-02-27 12:40:50 -07001862 if (numCompOut > limits.maxTessellationControlPerVertexOutputComponents) {
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001863 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
1864 HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_ExceedDeviceLimit,
1865 "Invalid Pipeline CreateInfo State: Tessellation control shader exceeds "
1866 "VkPhysicalDeviceLimits::maxTessellationControlPerVertexOutputComponents of %u "
1867 "components by %u components",
Mark Lobodzinski57a44272019-02-27 12:40:50 -07001868 limits.maxTessellationControlPerVertexOutputComponents,
1869 numCompOut - limits.maxTessellationControlPerVertexOutputComponents);
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001870 }
1871 break;
1872
1873 case VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT:
Mark Lobodzinski57a44272019-02-27 12:40:50 -07001874 if (numCompIn > limits.maxTessellationEvaluationInputComponents) {
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001875 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
1876 HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_ExceedDeviceLimit,
1877 "Invalid Pipeline CreateInfo State: Tessellation evaluation shader exceeds "
1878 "VkPhysicalDeviceLimits::maxTessellationEvaluationInputComponents of %u "
1879 "components by %u components",
Mark Lobodzinski57a44272019-02-27 12:40:50 -07001880 limits.maxTessellationEvaluationInputComponents,
1881 numCompIn - limits.maxTessellationEvaluationInputComponents);
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001882 }
Mark Lobodzinski57a44272019-02-27 12:40:50 -07001883 if (numCompOut > limits.maxTessellationEvaluationOutputComponents) {
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001884 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
1885 HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_ExceedDeviceLimit,
1886 "Invalid Pipeline CreateInfo State: Tessellation evaluation shader exceeds "
1887 "VkPhysicalDeviceLimits::maxTessellationEvaluationOutputComponents of %u "
1888 "components by %u components",
Mark Lobodzinski57a44272019-02-27 12:40:50 -07001889 limits.maxTessellationEvaluationOutputComponents,
1890 numCompOut - limits.maxTessellationEvaluationOutputComponents);
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001891 }
1892 break;
1893
1894 case VK_SHADER_STAGE_GEOMETRY_BIT:
Mark Lobodzinski57a44272019-02-27 12:40:50 -07001895 if (numCompIn > limits.maxGeometryInputComponents) {
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001896 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
1897 HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_ExceedDeviceLimit,
1898 "Invalid Pipeline CreateInfo State: Geometry shader exceeds "
1899 "VkPhysicalDeviceLimits::maxGeometryInputComponents of %u "
1900 "components by %u components",
Mark Lobodzinski57a44272019-02-27 12:40:50 -07001901 limits.maxGeometryInputComponents, numCompIn - limits.maxGeometryInputComponents);
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001902 }
Mark Lobodzinski57a44272019-02-27 12:40:50 -07001903 if (numCompOut > limits.maxGeometryOutputComponents) {
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001904 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
1905 HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_ExceedDeviceLimit,
1906 "Invalid Pipeline CreateInfo State: Geometry shader exceeds "
1907 "VkPhysicalDeviceLimits::maxGeometryOutputComponents of %u "
1908 "components by %u components",
Mark Lobodzinski57a44272019-02-27 12:40:50 -07001909 limits.maxGeometryOutputComponents, numCompOut - limits.maxGeometryOutputComponents);
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001910 }
1911 break;
1912
1913 case VK_SHADER_STAGE_FRAGMENT_BIT:
Mark Lobodzinski57a44272019-02-27 12:40:50 -07001914 if (numCompIn > limits.maxFragmentInputComponents) {
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001915 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
1916 HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_ExceedDeviceLimit,
1917 "Invalid Pipeline CreateInfo State: Fragment shader exceeds "
1918 "VkPhysicalDeviceLimits::maxFragmentInputComponents of %u "
1919 "components by %u components",
Mark Lobodzinski57a44272019-02-27 12:40:50 -07001920 limits.maxFragmentInputComponents, numCompIn - limits.maxFragmentInputComponents);
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001921 }
1922 break;
1923
Jeff Bolz148d94e2018-12-13 21:25:56 -06001924 case VK_SHADER_STAGE_RAYGEN_BIT_NV:
1925 case VK_SHADER_STAGE_ANY_HIT_BIT_NV:
1926 case VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV:
1927 case VK_SHADER_STAGE_MISS_BIT_NV:
1928 case VK_SHADER_STAGE_INTERSECTION_BIT_NV:
1929 case VK_SHADER_STAGE_CALLABLE_BIT_NV:
1930 case VK_SHADER_STAGE_TASK_BIT_NV:
1931 case VK_SHADER_STAGE_MESH_BIT_NV:
1932 break;
1933
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001934 default:
1935 assert(false); // This should never happen
1936 }
1937 return skip;
1938}
1939
Jeff Bolze4356752019-03-07 11:23:46 -06001940// copy the specialization constant value into buf, if it is present
1941void GetSpecConstantValue(VkPipelineShaderStageCreateInfo const *pStage, uint32_t spec_id, void *buf) {
1942 VkSpecializationInfo const *spec = pStage->pSpecializationInfo;
1943
1944 if (spec && spec_id < spec->mapEntryCount) {
1945 memcpy(buf, (uint8_t *)spec->pData + spec->pMapEntries[spec_id].offset, spec->pMapEntries[spec_id].size);
1946 }
1947}
1948
1949// Fill in value with the constant or specialization constant value, if available.
1950// Returns true if the value has been accurately filled out.
Mark Lobodzinski3c59d972019-04-25 11:28:14 -06001951static bool GetIntConstantValue(spirv_inst_iter insn, SHADER_MODULE_STATE const *src, VkPipelineShaderStageCreateInfo const *pStage,
Jeff Bolze4356752019-03-07 11:23:46 -06001952 const std::unordered_map<uint32_t, uint32_t> &id_to_spec_id, uint32_t *value) {
1953 auto type_id = src->get_def(insn.word(1));
1954 if (type_id.opcode() != spv::OpTypeInt || type_id.word(2) != 32) {
1955 return false;
1956 }
1957 switch (insn.opcode()) {
1958 case spv::OpSpecConstant:
1959 *value = insn.word(3);
1960 GetSpecConstantValue(pStage, id_to_spec_id.at(insn.word(2)), value);
1961 return true;
1962 case spv::OpConstant:
1963 *value = insn.word(3);
1964 return true;
1965 default:
1966 return false;
1967 }
1968}
1969
1970// Map SPIR-V type to VK_COMPONENT_TYPE enum
Mark Lobodzinski3c59d972019-04-25 11:28:14 -06001971VkComponentTypeNV GetComponentType(spirv_inst_iter insn, SHADER_MODULE_STATE const *src) {
Jeff Bolze4356752019-03-07 11:23:46 -06001972 switch (insn.opcode()) {
1973 case spv::OpTypeInt:
1974 switch (insn.word(2)) {
1975 case 8:
1976 return insn.word(3) != 0 ? VK_COMPONENT_TYPE_SINT8_NV : VK_COMPONENT_TYPE_UINT8_NV;
1977 case 16:
1978 return insn.word(3) != 0 ? VK_COMPONENT_TYPE_SINT16_NV : VK_COMPONENT_TYPE_UINT16_NV;
1979 case 32:
1980 return insn.word(3) != 0 ? VK_COMPONENT_TYPE_SINT32_NV : VK_COMPONENT_TYPE_UINT32_NV;
1981 case 64:
1982 return insn.word(3) != 0 ? VK_COMPONENT_TYPE_SINT64_NV : VK_COMPONENT_TYPE_UINT64_NV;
1983 default:
1984 return VK_COMPONENT_TYPE_MAX_ENUM_NV;
1985 }
1986 case spv::OpTypeFloat:
1987 switch (insn.word(2)) {
1988 case 16:
1989 return VK_COMPONENT_TYPE_FLOAT16_NV;
1990 case 32:
1991 return VK_COMPONENT_TYPE_FLOAT32_NV;
1992 case 64:
1993 return VK_COMPONENT_TYPE_FLOAT64_NV;
1994 default:
1995 return VK_COMPONENT_TYPE_MAX_ENUM_NV;
1996 }
1997 default:
1998 return VK_COMPONENT_TYPE_MAX_ENUM_NV;
1999 }
2000}
2001
2002// Validate SPV_NV_cooperative_matrix behavior that can't be statically validated
2003// in SPIRV-Tools (e.g. due to specialization constant usage).
Mark Lobodzinski3c59d972019-04-25 11:28:14 -06002004bool CoreChecks::ValidateCooperativeMatrix(SHADER_MODULE_STATE const *src, VkPipelineShaderStageCreateInfo const *pStage,
Jeff Bolze4356752019-03-07 11:23:46 -06002005 PIPELINE_STATE *pipeline) {
2006 bool skip = false;
2007
2008 // Map SPIR-V result ID to specialization constant id (SpecId decoration value)
2009 std::unordered_map<uint32_t, uint32_t> id_to_spec_id;
2010 // Map SPIR-V result ID to the ID of its type.
2011 std::unordered_map<uint32_t, uint32_t> id_to_type_id;
2012
2013 struct CoopMatType {
2014 uint32_t scope, rows, cols;
2015 VkComponentTypeNV component_type;
2016 bool all_constant;
2017
2018 CoopMatType() : scope(0), rows(0), cols(0), component_type(VK_COMPONENT_TYPE_MAX_ENUM_NV), all_constant(false) {}
2019
Mark Lobodzinski3c59d972019-04-25 11:28:14 -06002020 void Init(uint32_t id, SHADER_MODULE_STATE const *src, VkPipelineShaderStageCreateInfo const *pStage,
Jeff Bolze4356752019-03-07 11:23:46 -06002021 const std::unordered_map<uint32_t, uint32_t> &id_to_spec_id) {
2022 spirv_inst_iter insn = src->get_def(id);
2023 uint32_t component_type_id = insn.word(2);
2024 uint32_t scope_id = insn.word(3);
2025 uint32_t rows_id = insn.word(4);
2026 uint32_t cols_id = insn.word(5);
2027 auto component_type_iter = src->get_def(component_type_id);
2028 auto scope_iter = src->get_def(scope_id);
2029 auto rows_iter = src->get_def(rows_id);
2030 auto cols_iter = src->get_def(cols_id);
2031
2032 all_constant = true;
2033 if (!GetIntConstantValue(scope_iter, src, pStage, id_to_spec_id, &scope)) {
2034 all_constant = false;
2035 }
2036 if (!GetIntConstantValue(rows_iter, src, pStage, id_to_spec_id, &rows)) {
2037 all_constant = false;
2038 }
2039 if (!GetIntConstantValue(cols_iter, src, pStage, id_to_spec_id, &cols)) {
2040 all_constant = false;
2041 }
2042 component_type = GetComponentType(component_type_iter, src);
2043 }
2044 };
2045
2046 bool seen_coopmat_capability = false;
2047
2048 for (auto insn : *src) {
2049 // Whitelist instructions whose result can be a cooperative matrix type, and
2050 // keep track of their types. It would be nice if SPIRV-Headers generated code
2051 // to identify which instructions have a result type and result id. Lacking that,
2052 // this whitelist is based on the set of instructions that
2053 // SPV_NV_cooperative_matrix says can be used with cooperative matrix types.
2054 switch (insn.opcode()) {
2055 case spv::OpLoad:
2056 case spv::OpCooperativeMatrixLoadNV:
2057 case spv::OpCooperativeMatrixMulAddNV:
2058 case spv::OpSNegate:
2059 case spv::OpFNegate:
2060 case spv::OpIAdd:
2061 case spv::OpFAdd:
2062 case spv::OpISub:
2063 case spv::OpFSub:
2064 case spv::OpFDiv:
2065 case spv::OpSDiv:
2066 case spv::OpUDiv:
2067 case spv::OpMatrixTimesScalar:
2068 case spv::OpConstantComposite:
2069 case spv::OpCompositeConstruct:
2070 case spv::OpConvertFToU:
2071 case spv::OpConvertFToS:
2072 case spv::OpConvertSToF:
2073 case spv::OpConvertUToF:
2074 case spv::OpUConvert:
2075 case spv::OpSConvert:
2076 case spv::OpFConvert:
2077 id_to_type_id[insn.word(2)] = insn.word(1);
2078 break;
2079 default:
2080 break;
2081 }
2082
2083 switch (insn.opcode()) {
2084 case spv::OpDecorate:
2085 if (insn.word(2) == spv::DecorationSpecId) {
2086 id_to_spec_id[insn.word(1)] = insn.word(3);
2087 }
2088 break;
2089 case spv::OpCapability:
2090 if (insn.word(1) == spv::CapabilityCooperativeMatrixNV) {
2091 seen_coopmat_capability = true;
2092
2093 if (!(pStage->stage & phys_dev_ext_props.cooperative_matrix_props.cooperativeMatrixSupportedStages)) {
2094 skip |=
Mark Lobodzinski93a1fa72019-04-19 12:12:25 -06002095 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
Jeff Bolze4356752019-03-07 11:23:46 -06002096 HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_CooperativeMatrixSupportedStages,
2097 "OpTypeCooperativeMatrixNV used in shader stage not in cooperativeMatrixSupportedStages (= %u)",
2098 phys_dev_ext_props.cooperative_matrix_props.cooperativeMatrixSupportedStages);
2099 }
2100 }
2101 break;
2102 case spv::OpMemoryModel:
2103 // If the capability isn't enabled, don't bother with the rest of this function.
2104 // OpMemoryModel is the first required instruction after all OpCapability instructions.
2105 if (!seen_coopmat_capability) {
2106 return skip;
2107 }
2108 break;
2109 case spv::OpTypeCooperativeMatrixNV: {
2110 CoopMatType M;
2111 M.Init(insn.word(1), src, pStage, id_to_spec_id);
2112
2113 if (M.all_constant) {
2114 // Validate that the type parameters are all supported for one of the
2115 // operands of a cooperative matrix property.
2116 bool valid = false;
2117 for (unsigned i = 0; i < cooperative_matrix_properties.size(); ++i) {
2118 if (cooperative_matrix_properties[i].AType == M.component_type &&
2119 cooperative_matrix_properties[i].MSize == M.rows && cooperative_matrix_properties[i].KSize == M.cols &&
2120 cooperative_matrix_properties[i].scope == M.scope) {
2121 valid = true;
2122 break;
2123 }
2124 if (cooperative_matrix_properties[i].BType == M.component_type &&
2125 cooperative_matrix_properties[i].KSize == M.rows && cooperative_matrix_properties[i].NSize == M.cols &&
2126 cooperative_matrix_properties[i].scope == M.scope) {
2127 valid = true;
2128 break;
2129 }
2130 if (cooperative_matrix_properties[i].CType == M.component_type &&
2131 cooperative_matrix_properties[i].MSize == M.rows && cooperative_matrix_properties[i].NSize == M.cols &&
2132 cooperative_matrix_properties[i].scope == M.scope) {
2133 valid = true;
2134 break;
2135 }
2136 if (cooperative_matrix_properties[i].DType == M.component_type &&
2137 cooperative_matrix_properties[i].MSize == M.rows && cooperative_matrix_properties[i].NSize == M.cols &&
2138 cooperative_matrix_properties[i].scope == M.scope) {
2139 valid = true;
2140 break;
2141 }
2142 }
2143 if (!valid) {
Mark Lobodzinski93a1fa72019-04-19 12:12:25 -06002144 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
Jeff Bolze4356752019-03-07 11:23:46 -06002145 HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_CooperativeMatrixType,
2146 "OpTypeCooperativeMatrixNV (result id = %u) operands don't match a supported matrix type",
2147 insn.word(1));
2148 }
2149 }
2150 break;
2151 }
2152 case spv::OpCooperativeMatrixMulAddNV: {
2153 CoopMatType A, B, C, D;
2154 if (id_to_type_id.find(insn.word(2)) == id_to_type_id.end() ||
2155 id_to_type_id.find(insn.word(3)) == id_to_type_id.end() ||
2156 id_to_type_id.find(insn.word(4)) == id_to_type_id.end() ||
2157 id_to_type_id.find(insn.word(5)) == id_to_type_id.end()) {
2158 assert(!"Couldn't find type of matrix");
2159 break;
2160 }
2161 D.Init(id_to_type_id[insn.word(2)], src, pStage, id_to_spec_id);
2162 A.Init(id_to_type_id[insn.word(3)], src, pStage, id_to_spec_id);
2163 B.Init(id_to_type_id[insn.word(4)], src, pStage, id_to_spec_id);
2164 C.Init(id_to_type_id[insn.word(5)], src, pStage, id_to_spec_id);
2165
2166 if (A.all_constant && B.all_constant && C.all_constant && D.all_constant) {
2167 // Validate that the type parameters are all supported for the same
2168 // cooperative matrix property.
2169 bool valid = false;
2170 for (unsigned i = 0; i < cooperative_matrix_properties.size(); ++i) {
2171 if (cooperative_matrix_properties[i].AType == A.component_type &&
2172 cooperative_matrix_properties[i].MSize == A.rows && cooperative_matrix_properties[i].KSize == A.cols &&
2173 cooperative_matrix_properties[i].scope == A.scope &&
2174
2175 cooperative_matrix_properties[i].BType == B.component_type &&
2176 cooperative_matrix_properties[i].KSize == B.rows && cooperative_matrix_properties[i].NSize == B.cols &&
2177 cooperative_matrix_properties[i].scope == B.scope &&
2178
2179 cooperative_matrix_properties[i].CType == C.component_type &&
2180 cooperative_matrix_properties[i].MSize == C.rows && cooperative_matrix_properties[i].NSize == C.cols &&
2181 cooperative_matrix_properties[i].scope == C.scope &&
2182
2183 cooperative_matrix_properties[i].DType == D.component_type &&
2184 cooperative_matrix_properties[i].MSize == D.rows && cooperative_matrix_properties[i].NSize == D.cols &&
2185 cooperative_matrix_properties[i].scope == D.scope) {
2186 valid = true;
2187 break;
2188 }
2189 }
2190 if (!valid) {
Mark Lobodzinski93a1fa72019-04-19 12:12:25 -06002191 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
Jeff Bolze4356752019-03-07 11:23:46 -06002192 HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_CooperativeMatrixMulAdd,
2193 "OpCooperativeMatrixMulAddNV (result id = %u) operands don't match a supported matrix "
2194 "VkCooperativeMatrixPropertiesNV",
2195 insn.word(2));
2196 }
2197 }
2198 break;
2199 }
2200 default:
2201 break;
2202 }
2203 }
2204
2205 return skip;
2206}
2207
Mark Lobodzinski3c59d972019-04-25 11:28:14 -06002208bool CoreChecks::ValidateExecutionModes(SHADER_MODULE_STATE const *src, spirv_inst_iter entrypoint) {
Attilio Provenzanoc5d50102019-03-25 17:40:37 +00002209 auto entrypoint_id = entrypoint.word(2);
2210
Attilio Provenzanof6c0e852019-04-09 11:01:18 +01002211 // The first denorm execution mode encountered, along with its bit width.
2212 // Used to check if SeparateDenormSettings is respected.
2213 std::pair<spv::ExecutionMode, uint32_t> first_denorm_execution_mode = std::make_pair(spv::ExecutionModeMax, 0);
Attilio Provenzanoc5d50102019-03-25 17:40:37 +00002214
Attilio Provenzanof6c0e852019-04-09 11:01:18 +01002215 // The first rounding mode encountered, along with its bit width.
2216 // Used to check if SeparateRoundingModeSettings is respected.
2217 std::pair<spv::ExecutionMode, uint32_t> first_rounding_mode = std::make_pair(spv::ExecutionModeMax, 0);
Attilio Provenzanoc5d50102019-03-25 17:40:37 +00002218
2219 bool skip = false;
2220
2221 for (auto insn : *src) {
2222 if (insn.opcode() == spv::OpExecutionMode && insn.word(1) == entrypoint_id) {
2223 auto mode = insn.word(2);
2224 switch (mode) {
2225 case spv::ExecutionModeSignedZeroInfNanPreserve: {
2226 auto bit_width = insn.word(3);
Mark Lobodzinskid7b03cc2019-04-19 14:23:10 -06002227 if ((bit_width == 16 && !enabled_features.float_controls.shaderSignedZeroInfNanPreserveFloat16) ||
2228 (bit_width == 32 && !enabled_features.float_controls.shaderSignedZeroInfNanPreserveFloat32) ||
2229 (bit_width == 64 && !enabled_features.float_controls.shaderSignedZeroInfNanPreserveFloat64)) {
Attilio Provenzanoc5d50102019-03-25 17:40:37 +00002230 skip |=
2231 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
2232 kVUID_Core_Shader_FeatureNotEnabled,
2233 "Shader requires SignedZeroInfNanPreserve for bit width %d but it is not enabled on the device",
2234 bit_width);
2235 }
2236 break;
2237 }
2238
2239 case spv::ExecutionModeDenormPreserve: {
2240 auto bit_width = insn.word(3);
Mark Lobodzinskid7b03cc2019-04-19 14:23:10 -06002241 if ((bit_width == 16 && !enabled_features.float_controls.shaderDenormPreserveFloat16) ||
2242 (bit_width == 32 && !enabled_features.float_controls.shaderDenormPreserveFloat32) ||
2243 (bit_width == 64 && !enabled_features.float_controls.shaderDenormPreserveFloat64)) {
Attilio Provenzanoc5d50102019-03-25 17:40:37 +00002244 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
2245 kVUID_Core_Shader_FeatureNotEnabled,
2246 "Shader requires DenormPreserve for bit width %d but it is not enabled on the device",
2247 bit_width);
2248 }
2249
Attilio Provenzanof6c0e852019-04-09 11:01:18 +01002250 if (first_denorm_execution_mode.first == spv::ExecutionModeMax) {
2251 // Register the first denorm execution mode found
2252 first_denorm_execution_mode = std::make_pair(static_cast<spv::ExecutionMode>(mode), bit_width);
2253 } else if (first_denorm_execution_mode.first != mode && first_denorm_execution_mode.second != bit_width &&
Mark Lobodzinskid7b03cc2019-04-19 14:23:10 -06002254 !enabled_features.float_controls.separateDenormSettings) {
Attilio Provenzanoc5d50102019-03-25 17:40:37 +00002255 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
2256 kVUID_Core_Shader_FeatureNotEnabled,
2257 "Shader uses separate denorm execution modes for different bit widths but "
2258 "SeparateDenormSettings is not enabled on the device");
2259 }
2260 break;
2261 }
2262
2263 case spv::ExecutionModeDenormFlushToZero: {
2264 auto bit_width = insn.word(3);
Mark Lobodzinskid7b03cc2019-04-19 14:23:10 -06002265 if ((bit_width == 16 && !enabled_features.float_controls.shaderDenormFlushToZeroFloat16) ||
2266 (bit_width == 32 && !enabled_features.float_controls.shaderDenormFlushToZeroFloat32) ||
2267 (bit_width == 64 && !enabled_features.float_controls.shaderDenormFlushToZeroFloat64)) {
Attilio Provenzanoc5d50102019-03-25 17:40:37 +00002268 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
2269 kVUID_Core_Shader_FeatureNotEnabled,
2270 "Shader requires DenormFlushToZero for bit width %d but it is not enabled on the device",
2271 bit_width);
2272 }
2273
Attilio Provenzanof6c0e852019-04-09 11:01:18 +01002274 if (first_denorm_execution_mode.first == spv::ExecutionModeMax) {
2275 // Register the first denorm execution mode found
2276 first_denorm_execution_mode = std::make_pair(static_cast<spv::ExecutionMode>(mode), bit_width);
2277 } else if (first_denorm_execution_mode.first != mode && first_denorm_execution_mode.second != bit_width &&
Mark Lobodzinskid7b03cc2019-04-19 14:23:10 -06002278 !enabled_features.float_controls.separateDenormSettings) {
Attilio Provenzanoc5d50102019-03-25 17:40:37 +00002279 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
2280 kVUID_Core_Shader_FeatureNotEnabled,
2281 "Shader uses separate denorm execution modes for different bit widths but "
2282 "SeparateDenormSettings is not enabled on the device");
2283 }
2284 break;
2285 }
2286
2287 case spv::ExecutionModeRoundingModeRTE: {
2288 auto bit_width = insn.word(3);
Mark Lobodzinskid7b03cc2019-04-19 14:23:10 -06002289 if ((bit_width == 16 && !enabled_features.float_controls.shaderRoundingModeRTEFloat16) ||
2290 (bit_width == 32 && !enabled_features.float_controls.shaderRoundingModeRTEFloat32) ||
2291 (bit_width == 64 && !enabled_features.float_controls.shaderRoundingModeRTEFloat64)) {
Attilio Provenzanoc5d50102019-03-25 17:40:37 +00002292 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
2293 kVUID_Core_Shader_FeatureNotEnabled,
2294 "Shader requires RoundingModeRTE for bit width %d but it is not enabled on the device",
2295 bit_width);
2296 }
2297
Attilio Provenzanof6c0e852019-04-09 11:01:18 +01002298 if (first_rounding_mode.first == spv::ExecutionModeMax) {
2299 // Register the first rounding mode found
2300 first_rounding_mode = std::make_pair(static_cast<spv::ExecutionMode>(mode), bit_width);
2301 } else if (first_rounding_mode.first != mode && first_rounding_mode.second != bit_width &&
Mark Lobodzinskid7b03cc2019-04-19 14:23:10 -06002302 !enabled_features.float_controls.separateRoundingModeSettings) {
Attilio Provenzanoc5d50102019-03-25 17:40:37 +00002303 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
2304 kVUID_Core_Shader_FeatureNotEnabled,
2305 "Shader uses separate rounding modes for different bit widths but "
2306 "SeparateRoundingModeSettings is not enabled on the device");
2307 }
2308 break;
2309 }
2310
2311 case spv::ExecutionModeRoundingModeRTZ: {
2312 auto bit_width = insn.word(3);
Mark Lobodzinskid7b03cc2019-04-19 14:23:10 -06002313 if ((bit_width == 16 && !enabled_features.float_controls.shaderRoundingModeRTZFloat16) ||
2314 (bit_width == 32 && !enabled_features.float_controls.shaderRoundingModeRTZFloat32) ||
2315 (bit_width == 64 && !enabled_features.float_controls.shaderRoundingModeRTZFloat64)) {
Attilio Provenzanoc5d50102019-03-25 17:40:37 +00002316 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
2317 kVUID_Core_Shader_FeatureNotEnabled,
2318 "Shader requires RoundingModeRTZ for bit width %d but it is not enabled on the device",
2319 bit_width);
2320 }
2321
Attilio Provenzanof6c0e852019-04-09 11:01:18 +01002322 if (first_rounding_mode.first == spv::ExecutionModeMax) {
2323 // Register the first rounding mode found
2324 first_rounding_mode = std::make_pair(static_cast<spv::ExecutionMode>(mode), bit_width);
2325 } else if (first_rounding_mode.first != mode && first_rounding_mode.second != bit_width &&
Mark Lobodzinskid7b03cc2019-04-19 14:23:10 -06002326 !enabled_features.float_controls.separateRoundingModeSettings) {
Attilio Provenzanoc5d50102019-03-25 17:40:37 +00002327 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
2328 kVUID_Core_Shader_FeatureNotEnabled,
2329 "Shader uses separate rounding modes for different bit widths but "
2330 "SeparateRoundingModeSettings is not enabled on the device");
2331 }
2332 break;
2333 }
2334 }
2335 }
2336 }
2337
2338 return skip;
2339}
2340
Mark Lobodzinski3c59d972019-04-25 11:28:14 -06002341static uint32_t DescriptorTypeToReqs(SHADER_MODULE_STATE const *module, uint32_t type_id) {
Chris Forbes47567b72017-06-09 12:09:45 -07002342 auto type = module->get_def(type_id);
2343
2344 while (true) {
2345 switch (type.opcode()) {
2346 case spv::OpTypeArray:
Chris Forbes062f1222018-08-21 15:34:15 -07002347 case spv::OpTypeRuntimeArray:
Chris Forbes47567b72017-06-09 12:09:45 -07002348 case spv::OpTypeSampledImage:
2349 type = module->get_def(type.word(2));
2350 break;
2351 case spv::OpTypePointer:
2352 type = module->get_def(type.word(3));
2353 break;
2354 case spv::OpTypeImage: {
2355 auto dim = type.word(3);
2356 auto arrayed = type.word(5);
2357 auto msaa = type.word(6);
2358
Chris Forbes74ba2232018-08-27 15:19:27 -07002359 uint32_t bits = 0;
2360 switch (GetFundamentalType(module, type.word(2))) {
2361 case FORMAT_TYPE_FLOAT:
2362 bits = DESCRIPTOR_REQ_COMPONENT_TYPE_FLOAT;
2363 break;
2364 case FORMAT_TYPE_UINT:
2365 bits = DESCRIPTOR_REQ_COMPONENT_TYPE_UINT;
2366 break;
2367 case FORMAT_TYPE_SINT:
2368 bits = DESCRIPTOR_REQ_COMPONENT_TYPE_SINT;
2369 break;
2370 default:
2371 break;
2372 }
2373
Chris Forbes47567b72017-06-09 12:09:45 -07002374 switch (dim) {
2375 case spv::Dim1D:
Chris Forbes74ba2232018-08-27 15:19:27 -07002376 bits |= arrayed ? DESCRIPTOR_REQ_VIEW_TYPE_1D_ARRAY : DESCRIPTOR_REQ_VIEW_TYPE_1D;
2377 return bits;
Chris Forbes47567b72017-06-09 12:09:45 -07002378 case spv::Dim2D:
Chris Forbes74ba2232018-08-27 15:19:27 -07002379 bits |= msaa ? DESCRIPTOR_REQ_MULTI_SAMPLE : DESCRIPTOR_REQ_SINGLE_SAMPLE;
2380 bits |= arrayed ? DESCRIPTOR_REQ_VIEW_TYPE_2D_ARRAY : DESCRIPTOR_REQ_VIEW_TYPE_2D;
2381 return bits;
Chris Forbes47567b72017-06-09 12:09:45 -07002382 case spv::Dim3D:
Chris Forbes74ba2232018-08-27 15:19:27 -07002383 bits |= DESCRIPTOR_REQ_VIEW_TYPE_3D;
2384 return bits;
Chris Forbes47567b72017-06-09 12:09:45 -07002385 case spv::DimCube:
Chris Forbes74ba2232018-08-27 15:19:27 -07002386 bits |= arrayed ? DESCRIPTOR_REQ_VIEW_TYPE_CUBE_ARRAY : DESCRIPTOR_REQ_VIEW_TYPE_CUBE;
2387 return bits;
Chris Forbes47567b72017-06-09 12:09:45 -07002388 case spv::DimSubpassData:
Chris Forbes74ba2232018-08-27 15:19:27 -07002389 bits |= msaa ? DESCRIPTOR_REQ_MULTI_SAMPLE : DESCRIPTOR_REQ_SINGLE_SAMPLE;
2390 return bits;
Chris Forbes47567b72017-06-09 12:09:45 -07002391 default: // buffer, etc.
Chris Forbes74ba2232018-08-27 15:19:27 -07002392 return bits;
Chris Forbes47567b72017-06-09 12:09:45 -07002393 }
2394 }
2395 default:
2396 return 0;
2397 }
2398 }
2399}
2400
2401// For given pipelineLayout verify that the set_layout_node at slot.first
2402// has the requested binding at slot.second and return ptr to that binding
Mark Lobodzinskica6ebe32019-04-25 11:43:37 -06002403static VkDescriptorSetLayoutBinding const *GetDescriptorBinding(PIPELINE_LAYOUT_STATE const *pipelineLayout,
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002404 descriptor_slot_t slot) {
Chris Forbes47567b72017-06-09 12:09:45 -07002405 if (!pipelineLayout) return nullptr;
2406
2407 if (slot.first >= pipelineLayout->set_layouts.size()) return nullptr;
2408
2409 return pipelineLayout->set_layouts[slot.first]->GetDescriptorSetLayoutBindingPtrFromBinding(slot.second);
2410}
2411
Mark Lobodzinski3c59d972019-04-25 11:28:14 -06002412static bool FindLocalSize(SHADER_MODULE_STATE const *src, uint32_t &local_size_x, uint32_t &local_size_y, uint32_t &local_size_z) {
Locke1ec6d952019-04-02 11:57:21 -06002413 for (auto insn : *src) {
2414 if (insn.opcode() == spv::OpEntryPoint) {
2415 auto executionModel = insn.word(1);
2416 auto entrypointStageBits = ExecutionModelToShaderStageFlagBits(executionModel);
2417 if (entrypointStageBits == VK_SHADER_STAGE_COMPUTE_BIT) {
2418 auto entrypoint_id = insn.word(2);
2419 for (auto insn1 : *src) {
2420 if (insn1.opcode() == spv::OpExecutionMode && insn1.word(1) == entrypoint_id &&
2421 insn1.word(2) == spv::ExecutionModeLocalSize) {
2422 local_size_x = insn1.word(3);
2423 local_size_y = insn1.word(4);
2424 local_size_z = insn1.word(5);
2425 return true;
2426 }
2427 }
2428 }
2429 }
2430 }
2431 return false;
2432}
2433
Mark Lobodzinski3c59d972019-04-25 11:28:14 -06002434static void ProcessExecutionModes(SHADER_MODULE_STATE const *src, spirv_inst_iter entrypoint, PIPELINE_STATE *pipeline) {
Jeff Bolz105d6492018-09-29 15:46:44 -05002435 auto entrypoint_id = entrypoint.word(2);
Chris Forbes0771b672018-03-22 21:13:46 -07002436 bool is_point_mode = false;
2437
2438 for (auto insn : *src) {
2439 if (insn.opcode() == spv::OpExecutionMode && insn.word(1) == entrypoint_id) {
2440 switch (insn.word(2)) {
2441 case spv::ExecutionModePointMode:
2442 // In tessellation shaders, PointMode is separate and trumps the tessellation topology.
2443 is_point_mode = true;
2444 break;
2445
2446 case spv::ExecutionModeOutputPoints:
2447 pipeline->topology_at_rasterizer = VK_PRIMITIVE_TOPOLOGY_POINT_LIST;
2448 break;
2449
2450 case spv::ExecutionModeIsolines:
2451 case spv::ExecutionModeOutputLineStrip:
2452 pipeline->topology_at_rasterizer = VK_PRIMITIVE_TOPOLOGY_LINE_STRIP;
2453 break;
2454
2455 case spv::ExecutionModeTriangles:
2456 case spv::ExecutionModeQuads:
2457 case spv::ExecutionModeOutputTriangleStrip:
2458 pipeline->topology_at_rasterizer = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP;
2459 break;
2460 }
2461 }
2462 }
2463
2464 if (is_point_mode) pipeline->topology_at_rasterizer = VK_PRIMITIVE_TOPOLOGY_POINT_LIST;
2465}
2466
Mark Lobodzinski1b4a8ed2018-08-07 08:47:05 -06002467// If PointList topology is specified in the pipeline, verify that a shader geometry stage writes PointSize
2468// o If there is only a vertex shader : gl_PointSize must be written when using points
2469// o If there is a geometry or tessellation shader:
2470// - If shaderTessellationAndGeometryPointSize feature is enabled:
2471// * gl_PointSize must be written in the final geometry stage
2472// - If shaderTessellationAndGeometryPointSize feature is disabled:
2473// * gl_PointSize must NOT be written and a default of 1.0 is assumed
Mark Lobodzinski3c59d972019-04-25 11:28:14 -06002474bool CoreChecks::ValidatePointListShaderState(const PIPELINE_STATE *pipeline, SHADER_MODULE_STATE const *src,
2475 spirv_inst_iter entrypoint, VkShaderStageFlagBits stage) {
Mark Lobodzinski1b4a8ed2018-08-07 08:47:05 -06002476 if (pipeline->topology_at_rasterizer != VK_PRIMITIVE_TOPOLOGY_POINT_LIST) {
2477 return false;
2478 }
2479
2480 bool pointsize_written = false;
2481 bool skip = false;
2482
2483 // Search for PointSize built-in decorations
2484 std::vector<uint32_t> pointsize_builtin_offsets;
2485 spirv_inst_iter insn = entrypoint;
2486 while (!pointsize_written && (insn.opcode() != spv::OpFunction)) {
2487 if (insn.opcode() == spv::OpMemberDecorate) {
2488 if (insn.word(3) == spv::DecorationBuiltIn) {
2489 if (insn.word(4) == spv::BuiltInPointSize) {
2490 pointsize_written = IsPointSizeWritten(src, insn, entrypoint);
2491 }
2492 }
2493 } else if (insn.opcode() == spv::OpDecorate) {
2494 if (insn.word(2) == spv::DecorationBuiltIn) {
2495 if (insn.word(3) == spv::BuiltInPointSize) {
2496 pointsize_written = IsPointSizeWritten(src, insn, entrypoint);
2497 }
2498 }
2499 }
2500
2501 insn++;
2502 }
2503
2504 if ((stage == VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT || stage == VK_SHADER_STAGE_GEOMETRY_BIT) &&
Mark Lobodzinskid7b03cc2019-04-19 14:23:10 -06002505 !enabled_features.core.shaderTessellationAndGeometryPointSize) {
Mark Lobodzinski1b4a8ed2018-08-07 08:47:05 -06002506 if (pointsize_written) {
Mark Lobodzinski93a1fa72019-04-19 12:12:25 -06002507 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
Mark Lobodzinski1b4a8ed2018-08-07 08:47:05 -06002508 HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_PointSizeBuiltInOverSpecified,
2509 "Pipeline topology is set to POINT_LIST and geometry or tessellation shaders write PointSize which "
2510 "is prohibited when the shaderTessellationAndGeometryPointSize feature is not enabled.");
2511 }
2512 } else if (!pointsize_written) {
2513 skip |=
Mark Lobodzinski93a1fa72019-04-19 12:12:25 -06002514 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
Mark Lobodzinski1b4a8ed2018-08-07 08:47:05 -06002515 HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_MissingPointSizeBuiltIn,
2516 "Pipeline topology is set to POINT_LIST, but PointSize is not written to in the shader corresponding to %s.",
2517 string_VkShaderStageFlagBits(stage));
2518 }
2519 return skip;
2520}
2521
Mark Lobodzinski518eadc2019-03-09 12:07:30 -07002522bool CoreChecks::ValidatePipelineShaderStage(VkPipelineShaderStageCreateInfo const *pStage, PIPELINE_STATE *pipeline,
Mark Lobodzinski3c59d972019-04-25 11:28:14 -06002523 SHADER_MODULE_STATE const **out_module, spirv_inst_iter *out_entrypoint,
Mark Lobodzinski518eadc2019-03-09 12:07:30 -07002524 bool check_point_size) {
Chris Forbes47567b72017-06-09 12:09:45 -07002525 bool skip = false;
Mark Lobodzinski9e9da292019-03-06 16:19:55 -07002526 auto module = *out_module = GetShaderModuleState(pStage->module);
Chris Forbes47567b72017-06-09 12:09:45 -07002527
2528 if (!module->has_valid_spirv) return false;
2529
2530 // Find the entrypoint
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002531 auto entrypoint = *out_entrypoint = FindEntrypoint(module, pStage->pName, pStage->stage);
Chris Forbes47567b72017-06-09 12:09:45 -07002532 if (entrypoint == module->end()) {
Mark Lobodzinskib1fd9d12018-03-30 14:26:00 -06002533 if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
Dave Houlton78d09922018-05-17 15:48:45 -06002534 "VUID-VkPipelineShaderStageCreateInfo-pName-00707", "No entrypoint found named `%s` for stage %s..",
2535 pStage->pName, string_VkShaderStageFlagBits(pStage->stage))) {
Chris Forbes47567b72017-06-09 12:09:45 -07002536 return true; // no point continuing beyond here, any analysis is just going to be garbage.
2537 }
2538 }
2539
Chris Forbes47567b72017-06-09 12:09:45 -07002540 // Mark accessible ids
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002541 auto accessible_ids = MarkAccessibleIds(module, entrypoint);
2542 ProcessExecutionModes(module, entrypoint, pipeline);
Chris Forbes47567b72017-06-09 12:09:45 -07002543
2544 // Validate descriptor set layout against what the entrypoint actually uses
Chris Forbes8af24522018-03-07 11:37:45 -08002545 bool has_writable_descriptor = false;
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002546 auto descriptor_uses = CollectInterfaceByDescriptorSlot(report_data, module, accessible_ids, &has_writable_descriptor);
Chris Forbes47567b72017-06-09 12:09:45 -07002547
Chris Forbes349b3132018-03-07 11:38:08 -08002548 // Validate shader capabilities against enabled device features
Mark Lobodzinski518eadc2019-03-09 12:07:30 -07002549 skip |= ValidateShaderCapabilities(module, pStage->stage, has_writable_descriptor);
2550 skip |= ValidateShaderStageInputOutputLimits(module, pStage, pipeline);
Attilio Provenzanoc5d50102019-03-25 17:40:37 +00002551 skip |= ValidateExecutionModes(module, entrypoint);
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002552 skip |= ValidateSpecializationOffsets(report_data, pStage);
2553 skip |= ValidatePushConstantUsage(report_data, pipeline->pipeline_layout.push_constant_ranges.get(), module, accessible_ids,
2554 pStage->stage);
Jeff Bolze54ae892018-09-08 12:16:29 -05002555 if (check_point_size && !pipeline->graphicsPipelineCI.pRasterizationState->rasterizerDiscardEnable) {
Mark Lobodzinski518eadc2019-03-09 12:07:30 -07002556 skip |= ValidatePointListShaderState(pipeline, module, entrypoint, pStage->stage);
Mark Lobodzinski1b4a8ed2018-08-07 08:47:05 -06002557 }
Jeff Bolze4356752019-03-07 11:23:46 -06002558 skip |= ValidateCooperativeMatrix(module, pStage, pipeline);
Chris Forbes47567b72017-06-09 12:09:45 -07002559
2560 // Validate descriptor use
2561 for (auto use : descriptor_uses) {
2562 // While validating shaders capture which slots are used by the pipeline
2563 auto &reqs = pipeline->active_slots[use.first.first][use.first.second];
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002564 reqs = descriptor_req(reqs | DescriptorTypeToReqs(module, use.second.type_id));
Chris Forbes47567b72017-06-09 12:09:45 -07002565
2566 // Verify given pipelineLayout has requested setLayout with requested binding
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002567 const auto &binding = GetDescriptorBinding(&pipeline->pipeline_layout, use.first);
Chris Forbes47567b72017-06-09 12:09:45 -07002568 unsigned required_descriptor_count;
Jeff Bolze54ae892018-09-08 12:16:29 -05002569 std::set<uint32_t> descriptor_types = TypeToDescriptorTypeSet(module, use.second.type_id, required_descriptor_count);
Chris Forbes47567b72017-06-09 12:09:45 -07002570
2571 if (!binding) {
Mark Lobodzinskib1fd9d12018-03-30 14:26:00 -06002572 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
Dave Houlton51653902018-06-22 17:32:13 -06002573 kVUID_Core_Shader_MissingDescriptor,
Chris Forbes73c00bf2018-06-22 16:28:06 -07002574 "Shader uses descriptor slot %u.%u (expected `%s`) but not declared in pipeline layout",
Jeff Bolze54ae892018-09-08 12:16:29 -05002575 use.first.first, use.first.second, string_descriptorTypes(descriptor_types).c_str());
Chris Forbes47567b72017-06-09 12:09:45 -07002576 } else if (~binding->stageFlags & pStage->stage) {
Mark Lobodzinskib1fd9d12018-03-30 14:26:00 -06002577 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, 0,
Dave Houlton51653902018-06-22 17:32:13 -06002578 kVUID_Core_Shader_DescriptorNotAccessibleFromStage,
Chris Forbes73c00bf2018-06-22 16:28:06 -07002579 "Shader uses descriptor slot %u.%u but descriptor not accessible from stage %s", use.first.first,
2580 use.first.second, string_VkShaderStageFlagBits(pStage->stage));
Jeff Bolze54ae892018-09-08 12:16:29 -05002581 } else if (descriptor_types.find(binding->descriptorType) == descriptor_types.end()) {
Mark Lobodzinskib1fd9d12018-03-30 14:26:00 -06002582 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
Dave Houlton51653902018-06-22 17:32:13 -06002583 kVUID_Core_Shader_DescriptorTypeMismatch,
Chris Forbes73c00bf2018-06-22 16:28:06 -07002584 "Type mismatch on descriptor slot %u.%u (expected `%s`) but descriptor of type %s", use.first.first,
Jeff Bolze54ae892018-09-08 12:16:29 -05002585 use.first.second, string_descriptorTypes(descriptor_types).c_str(),
Chris Forbes47567b72017-06-09 12:09:45 -07002586 string_VkDescriptorType(binding->descriptorType));
2587 } else if (binding->descriptorCount < required_descriptor_count) {
Mark Lobodzinskib1fd9d12018-03-30 14:26:00 -06002588 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
Dave Houlton51653902018-06-22 17:32:13 -06002589 kVUID_Core_Shader_DescriptorTypeMismatch,
Chris Forbes73c00bf2018-06-22 16:28:06 -07002590 "Shader expects at least %u descriptors for binding %u.%u but only %u provided",
2591 required_descriptor_count, use.first.first, use.first.second, binding->descriptorCount);
Chris Forbes47567b72017-06-09 12:09:45 -07002592 }
2593 }
2594
2595 // Validate use of input attachments against subpass structure
2596 if (pStage->stage == VK_SHADER_STAGE_FRAGMENT_BIT) {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002597 auto input_attachment_uses = CollectInterfaceByInputAttachmentIndex(module, accessible_ids);
Chris Forbes47567b72017-06-09 12:09:45 -07002598
Petr Krause91f7a12017-12-14 20:57:36 +01002599 auto rpci = pipeline->rp_state->createInfo.ptr();
Chris Forbes47567b72017-06-09 12:09:45 -07002600 auto subpass = pipeline->graphicsPipelineCI.subpass;
2601
2602 for (auto use : input_attachment_uses) {
2603 auto input_attachments = rpci->pSubpasses[subpass].pInputAttachments;
2604 auto index = (input_attachments && use.first < rpci->pSubpasses[subpass].inputAttachmentCount)
Dave Houltona9df0ce2018-02-07 10:51:23 -07002605 ? input_attachments[use.first].attachment
2606 : VK_ATTACHMENT_UNUSED;
Chris Forbes47567b72017-06-09 12:09:45 -07002607
2608 if (index == VK_ATTACHMENT_UNUSED) {
Mark Lobodzinskib1fd9d12018-03-30 14:26:00 -06002609 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
Dave Houlton51653902018-06-22 17:32:13 -06002610 kVUID_Core_Shader_MissingInputAttachment,
Chris Forbes47567b72017-06-09 12:09:45 -07002611 "Shader consumes input attachment index %d but not provided in subpass", use.first);
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002612 } else if (!(GetFormatType(rpci->pAttachments[index].format) & GetFundamentalType(module, use.second.type_id))) {
Chris Forbes47567b72017-06-09 12:09:45 -07002613 skip |=
Mark Lobodzinskib1fd9d12018-03-30 14:26:00 -06002614 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
Dave Houlton51653902018-06-22 17:32:13 -06002615 kVUID_Core_Shader_InputAttachmentTypeMismatch,
Chris Forbes47567b72017-06-09 12:09:45 -07002616 "Subpass input attachment %u format of %s does not match type used in shader `%s`", use.first,
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002617 string_VkFormat(rpci->pAttachments[index].format), DescribeType(module, use.second.type_id).c_str());
Chris Forbes47567b72017-06-09 12:09:45 -07002618 }
2619 }
2620 }
Lockeaa8fdc02019-04-02 11:59:20 -06002621 if (pStage->stage == VK_SHADER_STAGE_COMPUTE_BIT) {
2622 skip |= ValidateComputeWorkGroupSizes(module);
2623 }
Chris Forbes47567b72017-06-09 12:09:45 -07002624 return skip;
2625}
2626
Mark Lobodzinski3c59d972019-04-25 11:28:14 -06002627static bool ValidateInterfaceBetweenStages(debug_report_data const *report_data, SHADER_MODULE_STATE const *producer,
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002628 spirv_inst_iter producer_entrypoint, shader_stage_attributes const *producer_stage,
Mark Lobodzinski3c59d972019-04-25 11:28:14 -06002629 SHADER_MODULE_STATE const *consumer, spirv_inst_iter consumer_entrypoint,
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002630 shader_stage_attributes const *consumer_stage) {
Chris Forbes47567b72017-06-09 12:09:45 -07002631 bool skip = false;
2632
2633 auto outputs =
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002634 CollectInterfaceByLocation(producer, producer_entrypoint, spv::StorageClassOutput, producer_stage->arrayed_output);
2635 auto inputs = CollectInterfaceByLocation(consumer, consumer_entrypoint, spv::StorageClassInput, consumer_stage->arrayed_input);
Chris Forbes47567b72017-06-09 12:09:45 -07002636
2637 auto a_it = outputs.begin();
2638 auto b_it = inputs.begin();
2639
2640 // Maps sorted by key (location); walk them together to find mismatches
2641 while ((outputs.size() > 0 && a_it != outputs.end()) || (inputs.size() && b_it != inputs.end())) {
2642 bool a_at_end = outputs.size() == 0 || a_it == outputs.end();
2643 bool b_at_end = inputs.size() == 0 || b_it == inputs.end();
2644 auto a_first = a_at_end ? std::make_pair(0u, 0u) : a_it->first;
2645 auto b_first = b_at_end ? std::make_pair(0u, 0u) : b_it->first;
2646
2647 if (b_at_end || ((!a_at_end) && (a_first < b_first))) {
Mark Young4e919b22018-05-21 15:53:59 -06002648 skip |= log_msg(report_data, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
Dave Houlton51653902018-06-22 17:32:13 -06002649 HandleToUint64(producer->vk_shader_module), kVUID_Core_Shader_OutputNotConsumed,
Mark Young4e919b22018-05-21 15:53:59 -06002650 "%s writes to output location %u.%u which is not consumed by %s", producer_stage->name, a_first.first,
2651 a_first.second, consumer_stage->name);
Chris Forbes47567b72017-06-09 12:09:45 -07002652 a_it++;
2653 } else if (a_at_end || a_first > b_first) {
Mark Young4e919b22018-05-21 15:53:59 -06002654 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
Dave Houlton51653902018-06-22 17:32:13 -06002655 HandleToUint64(consumer->vk_shader_module), kVUID_Core_Shader_InputNotProduced,
Mark Young4e919b22018-05-21 15:53:59 -06002656 "%s consumes input location %u.%u which is not written by %s", consumer_stage->name, b_first.first,
2657 b_first.second, producer_stage->name);
Chris Forbes47567b72017-06-09 12:09:45 -07002658 b_it++;
2659 } else {
2660 // subtleties of arrayed interfaces:
2661 // - if is_patch, then the member is not arrayed, even though the interface may be.
2662 // - if is_block_member, then the extra array level of an arrayed interface is not
2663 // expressed in the member type -- it's expressed in the block type.
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002664 if (!TypesMatch(producer, consumer, a_it->second.type_id, b_it->second.type_id,
2665 producer_stage->arrayed_output && !a_it->second.is_patch && !a_it->second.is_block_member,
2666 consumer_stage->arrayed_input && !b_it->second.is_patch && !b_it->second.is_block_member, true)) {
Mark Young4e919b22018-05-21 15:53:59 -06002667 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
Dave Houlton51653902018-06-22 17:32:13 -06002668 HandleToUint64(producer->vk_shader_module), kVUID_Core_Shader_InterfaceTypeMismatch,
Mark Young4e919b22018-05-21 15:53:59 -06002669 "Type mismatch on location %u.%u: '%s' vs '%s'", a_first.first, a_first.second,
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002670 DescribeType(producer, a_it->second.type_id).c_str(),
2671 DescribeType(consumer, b_it->second.type_id).c_str());
Chris Forbes47567b72017-06-09 12:09:45 -07002672 }
2673 if (a_it->second.is_patch != b_it->second.is_patch) {
Mark Young4e919b22018-05-21 15:53:59 -06002674 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
Dave Houlton51653902018-06-22 17:32:13 -06002675 HandleToUint64(producer->vk_shader_module), kVUID_Core_Shader_InterfaceTypeMismatch,
Dave Houltona9df0ce2018-02-07 10:51:23 -07002676 "Decoration mismatch on location %u.%u: is per-%s in %s stage but per-%s in %s stage",
Chris Forbes47567b72017-06-09 12:09:45 -07002677 a_first.first, a_first.second, a_it->second.is_patch ? "patch" : "vertex", producer_stage->name,
2678 b_it->second.is_patch ? "patch" : "vertex", consumer_stage->name);
2679 }
2680 if (a_it->second.is_relaxed_precision != b_it->second.is_relaxed_precision) {
Mark Young4e919b22018-05-21 15:53:59 -06002681 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
Dave Houlton51653902018-06-22 17:32:13 -06002682 HandleToUint64(producer->vk_shader_module), kVUID_Core_Shader_InterfaceTypeMismatch,
Chris Forbes47567b72017-06-09 12:09:45 -07002683 "Decoration mismatch on location %u.%u: %s and %s stages differ in precision", a_first.first,
2684 a_first.second, producer_stage->name, consumer_stage->name);
2685 }
2686 a_it++;
2687 b_it++;
2688 }
2689 }
2690
Ari Suonpaa696b3432019-03-11 14:02:57 +02002691 if (consumer_stage->stage != VK_SHADER_STAGE_FRAGMENT_BIT) {
2692 auto builtins_producer = CollectBuiltinBlockMembers(producer, producer_entrypoint, spv::StorageClassOutput);
2693 auto builtins_consumer = CollectBuiltinBlockMembers(consumer, consumer_entrypoint, spv::StorageClassInput);
2694
2695 if (!builtins_producer.empty() && !builtins_consumer.empty()) {
2696 if (builtins_producer.size() != builtins_consumer.size()) {
2697 skip |=
2698 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
2699 HandleToUint64(producer->vk_shader_module), kVUID_Core_Shader_InterfaceTypeMismatch,
2700 "Number of elements inside builtin block differ between stages (%s %d vs %s %d).", producer_stage->name,
2701 (int)builtins_producer.size(), consumer_stage->name, (int)builtins_consumer.size());
2702 } else {
2703 auto it_producer = builtins_producer.begin();
2704 auto it_consumer = builtins_consumer.begin();
2705 while (it_producer != builtins_producer.end() && it_consumer != builtins_consumer.end()) {
2706 if (*it_producer != *it_consumer) {
2707 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
2708 HandleToUint64(producer->vk_shader_module), kVUID_Core_Shader_InterfaceTypeMismatch,
2709 "Builtin variable inside block doesn't match between %s and %s.", producer_stage->name,
2710 consumer_stage->name);
2711 break;
2712 }
2713 it_producer++;
2714 it_consumer++;
2715 }
2716 }
2717 }
2718 }
2719
Chris Forbes47567b72017-06-09 12:09:45 -07002720 return skip;
2721}
2722
Mark Lobodzinski1b4a8ed2018-08-07 08:47:05 -06002723static inline uint32_t DetermineFinalGeomStage(PIPELINE_STATE *pipeline, VkGraphicsPipelineCreateInfo *pCreateInfo) {
2724 uint32_t stage_mask = 0;
2725 if (pipeline->topology_at_rasterizer == VK_PRIMITIVE_TOPOLOGY_POINT_LIST) {
2726 for (uint32_t i = 0; i < pCreateInfo->stageCount; i++) {
2727 stage_mask |= pCreateInfo->pStages[i].stage;
2728 }
2729 // Determine which shader in which PointSize should be written (the final geometry stage)
Jeff Bolz105d6492018-09-29 15:46:44 -05002730 if (stage_mask & VK_SHADER_STAGE_MESH_BIT_NV) {
2731 stage_mask = VK_SHADER_STAGE_MESH_BIT_NV;
2732 } else if (stage_mask & VK_SHADER_STAGE_GEOMETRY_BIT) {
Mark Lobodzinski1b4a8ed2018-08-07 08:47:05 -06002733 stage_mask = VK_SHADER_STAGE_GEOMETRY_BIT;
2734 } else if (stage_mask & VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT) {
2735 stage_mask = VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT;
2736 } else if (stage_mask & VK_SHADER_STAGE_VERTEX_BIT) {
2737 stage_mask = VK_SHADER_STAGE_VERTEX_BIT;
Mark Lobodzinski2c984cc2018-07-31 09:57:46 -06002738 }
2739 }
Mark Lobodzinski1b4a8ed2018-08-07 08:47:05 -06002740 return stage_mask;
Mark Lobodzinski2c984cc2018-07-31 09:57:46 -06002741}
2742
Chris Forbes47567b72017-06-09 12:09:45 -07002743// Validate that the shaders used by the given pipeline and store the active_slots
2744// that are actually used by the pipeline into pPipeline->active_slots
Mark Lobodzinski518eadc2019-03-09 12:07:30 -07002745bool CoreChecks::ValidateAndCapturePipelineShaderState(PIPELINE_STATE *pipeline) {
Chris Forbesa400a8a2017-07-20 13:10:24 -07002746 auto pCreateInfo = pipeline->graphicsPipelineCI.ptr();
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002747 int vertex_stage = GetShaderStageId(VK_SHADER_STAGE_VERTEX_BIT);
2748 int fragment_stage = GetShaderStageId(VK_SHADER_STAGE_FRAGMENT_BIT);
Chris Forbes47567b72017-06-09 12:09:45 -07002749
Mark Lobodzinski3c59d972019-04-25 11:28:14 -06002750 SHADER_MODULE_STATE const *shaders[32];
Chris Forbes47567b72017-06-09 12:09:45 -07002751 memset(shaders, 0, sizeof(shaders));
Jeff Bolz7e35c392018-09-04 15:30:41 -05002752 spirv_inst_iter entrypoints[32];
Chris Forbes47567b72017-06-09 12:09:45 -07002753 memset(entrypoints, 0, sizeof(entrypoints));
2754 bool skip = false;
2755
Mark Lobodzinski1b4a8ed2018-08-07 08:47:05 -06002756 uint32_t pointlist_stage_mask = DetermineFinalGeomStage(pipeline, pCreateInfo);
2757
Chris Forbes47567b72017-06-09 12:09:45 -07002758 for (uint32_t i = 0; i < pCreateInfo->stageCount; i++) {
2759 auto pStage = &pCreateInfo->pStages[i];
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002760 auto stage_id = GetShaderStageId(pStage->stage);
Mark Lobodzinski518eadc2019-03-09 12:07:30 -07002761 skip |= ValidatePipelineShaderStage(pStage, pipeline, &shaders[stage_id], &entrypoints[stage_id],
Mark Lobodzinski1b4a8ed2018-08-07 08:47:05 -06002762 (pointlist_stage_mask == pStage->stage));
Chris Forbes47567b72017-06-09 12:09:45 -07002763 }
2764
2765 // if the shader stages are no good individually, cross-stage validation is pointless.
2766 if (skip) return true;
2767
2768 auto vi = pCreateInfo->pVertexInputState;
2769
2770 if (vi) {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002771 skip |= ValidateViConsistency(report_data, vi);
Chris Forbes47567b72017-06-09 12:09:45 -07002772 }
2773
2774 if (shaders[vertex_stage] && shaders[vertex_stage]->has_valid_spirv) {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002775 skip |= ValidateViAgainstVsInputs(report_data, vi, shaders[vertex_stage], entrypoints[vertex_stage]);
Chris Forbes47567b72017-06-09 12:09:45 -07002776 }
2777
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002778 int producer = GetShaderStageId(VK_SHADER_STAGE_VERTEX_BIT);
2779 int consumer = GetShaderStageId(VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT);
Chris Forbes47567b72017-06-09 12:09:45 -07002780
2781 while (!shaders[producer] && producer != fragment_stage) {
2782 producer++;
2783 consumer++;
2784 }
2785
2786 for (; producer != fragment_stage && consumer <= fragment_stage; consumer++) {
2787 assert(shaders[producer]);
Chris Forbesdbb43fc2018-02-16 16:59:23 -08002788 if (shaders[consumer]) {
2789 if (shaders[consumer]->has_valid_spirv && shaders[producer]->has_valid_spirv) {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002790 skip |= ValidateInterfaceBetweenStages(report_data, shaders[producer], entrypoints[producer],
2791 &shader_stage_attribs[producer], shaders[consumer], entrypoints[consumer],
2792 &shader_stage_attribs[consumer]);
Chris Forbesdbb43fc2018-02-16 16:59:23 -08002793 }
Chris Forbes47567b72017-06-09 12:09:45 -07002794
2795 producer = consumer;
2796 }
2797 }
2798
2799 if (shaders[fragment_stage] && shaders[fragment_stage]->has_valid_spirv) {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002800 skip |= ValidateFsOutputsAgainstRenderPass(report_data, shaders[fragment_stage], entrypoints[fragment_stage], pipeline,
2801 pCreateInfo->subpass);
Chris Forbes47567b72017-06-09 12:09:45 -07002802 }
2803
2804 return skip;
2805}
2806
Mark Lobodzinski518eadc2019-03-09 12:07:30 -07002807bool CoreChecks::ValidateComputePipeline(PIPELINE_STATE *pipeline) {
Chris Forbesa400a8a2017-07-20 13:10:24 -07002808 auto pCreateInfo = pipeline->computePipelineCI.ptr();
Chris Forbes47567b72017-06-09 12:09:45 -07002809
Mark Lobodzinski3c59d972019-04-25 11:28:14 -06002810 SHADER_MODULE_STATE const *module;
Chris Forbes47567b72017-06-09 12:09:45 -07002811 spirv_inst_iter entrypoint;
2812
Mark Lobodzinski518eadc2019-03-09 12:07:30 -07002813 return ValidatePipelineShaderStage(&pCreateInfo->stage, pipeline, &module, &entrypoint, false);
Chris Forbes47567b72017-06-09 12:09:45 -07002814}
Chris Forbes4ae55b32017-06-09 14:42:56 -07002815
Mark Lobodzinski518eadc2019-03-09 12:07:30 -07002816bool CoreChecks::ValidateRayTracingPipelineNV(PIPELINE_STATE *pipeline) {
Jeff Bolzfbe51582018-09-13 10:01:35 -05002817 auto pCreateInfo = pipeline->raytracingPipelineCI.ptr();
2818
Mark Lobodzinski3c59d972019-04-25 11:28:14 -06002819 SHADER_MODULE_STATE const *module;
Jeff Bolzfbe51582018-09-13 10:01:35 -05002820 spirv_inst_iter entrypoint;
2821
Mark Lobodzinski518eadc2019-03-09 12:07:30 -07002822 return ValidatePipelineShaderStage(pCreateInfo->pStages, pipeline, &module, &entrypoint, false);
Jeff Bolzfbe51582018-09-13 10:01:35 -05002823}
2824
Dave Houltona9df0ce2018-02-07 10:51:23 -07002825uint32_t ValidationCache::MakeShaderHash(VkShaderModuleCreateInfo const *smci) { return XXH32(smci->pCode, smci->codeSize, 0); }
Chris Forbes9a61e082017-07-24 15:35:29 -07002826
Dave Houltona9df0ce2018-02-07 10:51:23 -07002827static ValidationCache *GetValidationCacheInfo(VkShaderModuleCreateInfo const *pCreateInfo) {
John Zulauf25ea2432019-04-05 10:07:38 -06002828 const auto validation_cache_ci = lvl_find_in_chain<VkShaderModuleValidationCacheCreateInfoEXT>(pCreateInfo->pNext);
2829 if (validation_cache_ci) {
John Zulauf146ee802019-04-05 15:31:06 -06002830 return CastFromHandle<ValidationCache *>(validation_cache_ci->validationCache);
Chris Forbes9a61e082017-07-24 15:35:29 -07002831 }
Chris Forbes9a61e082017-07-24 15:35:29 -07002832 return nullptr;
2833}
2834
Mark Lobodzinskib56bbb92019-02-18 11:49:59 -07002835bool CoreChecks::PreCallValidateCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo *pCreateInfo,
2836 const VkAllocationCallbacks *pAllocator, VkShaderModule *pShaderModule) {
Chris Forbes4ae55b32017-06-09 14:42:56 -07002837 bool skip = false;
2838 spv_result_t spv_valid = SPV_SUCCESS;
Chris Forbes4ae55b32017-06-09 14:42:56 -07002839
Mark Lobodzinskib02a4852019-04-19 12:35:30 -06002840 if (disabled.shader_validation) {
Chris Forbes4ae55b32017-06-09 14:42:56 -07002841 return false;
2842 }
2843
Mark Lobodzinskif45e45f2019-04-19 14:15:39 -06002844 auto have_glsl_shader = device_extensions.vk_nv_glsl_shader;
Chris Forbes4ae55b32017-06-09 14:42:56 -07002845
2846 if (!have_glsl_shader && (pCreateInfo->codeSize % 4)) {
Mark Lobodzinski7767ad82019-03-09 13:35:25 -07002847 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
Dave Houlton78d09922018-05-17 15:48:45 -06002848 "VUID-VkShaderModuleCreateInfo-pCode-01376",
2849 "SPIR-V module not valid: Codesize must be a multiple of 4 but is " PRINTF_SIZE_T_SPECIFIER ".",
2850 pCreateInfo->codeSize);
Chris Forbes4ae55b32017-06-09 14:42:56 -07002851 } else {
Chris Forbes9a61e082017-07-24 15:35:29 -07002852 auto cache = GetValidationCacheInfo(pCreateInfo);
2853 uint32_t hash = 0;
2854 if (cache) {
2855 hash = ValidationCache::MakeShaderHash(pCreateInfo);
Dave Houltona9df0ce2018-02-07 10:51:23 -07002856 if (cache->Contains(hash)) return false;
Chris Forbes9a61e082017-07-24 15:35:29 -07002857 }
2858
Chris Forbes4ae55b32017-06-09 14:42:56 -07002859 // Use SPIRV-Tools validator to try and catch any issues with the module itself
Dave Houlton0ea2d012018-06-21 14:00:26 -06002860 spv_target_env spirv_environment = SPV_ENV_VULKAN_1_0;
Mark Lobodzinski544def72019-04-19 14:25:59 -06002861 if (api_version >= VK_API_VERSION_1_1) {
Dave Houlton0ea2d012018-06-21 14:00:26 -06002862 spirv_environment = SPV_ENV_VULKAN_1_1;
2863 }
2864 spv_context ctx = spvContextCreate(spirv_environment);
Dave Houltona9df0ce2018-02-07 10:51:23 -07002865 spv_const_binary_t binary{pCreateInfo->pCode, pCreateInfo->codeSize / sizeof(uint32_t)};
Chris Forbes4ae55b32017-06-09 14:42:56 -07002866 spv_diagnostic diag = nullptr;
Karl Schultzfda1b382018-08-08 18:56:11 -06002867 spv_validator_options options = spvValidatorOptionsCreate();
Mark Lobodzinskif45e45f2019-04-19 14:15:39 -06002868 if (device_extensions.vk_khr_relaxed_block_layout) {
Karl Schultzfda1b382018-08-08 18:56:11 -06002869 spvValidatorOptionsSetRelaxBlockLayout(options, true);
2870 }
Mark Lobodzinskif45e45f2019-04-19 14:15:39 -06002871 if (device_extensions.vk_ext_scalar_block_layout &&
Mark Lobodzinskid7b03cc2019-04-19 14:23:10 -06002872 enabled_features.scalar_block_layout_features.scalarBlockLayout == VK_TRUE) {
Tobias Hector6a0ece72018-12-10 12:24:05 +00002873 spvValidatorOptionsSetScalarBlockLayout(options, true);
2874 }
Karl Schultzfda1b382018-08-08 18:56:11 -06002875 spv_valid = spvValidateWithOptions(ctx, options, &binary, &diag);
Chris Forbes4ae55b32017-06-09 14:42:56 -07002876 if (spv_valid != SPV_SUCCESS) {
2877 if (!have_glsl_shader || (pCreateInfo->pCode[0] == spv::MagicNumber)) {
Mark Lobodzinski7767ad82019-03-09 13:35:25 -07002878 skip |=
2879 log_msg(report_data, spv_valid == SPV_WARNING ? VK_DEBUG_REPORT_WARNING_BIT_EXT : VK_DEBUG_REPORT_ERROR_BIT_EXT,
2880 VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, kVUID_Core_Shader_InconsistentSpirv,
2881 "SPIR-V module not valid: %s", diag && diag->error ? diag->error : "(no error text)");
Chris Forbes4ae55b32017-06-09 14:42:56 -07002882 }
Chris Forbes9a61e082017-07-24 15:35:29 -07002883 } else {
2884 if (cache) {
2885 cache->Insert(hash);
2886 }
Chris Forbes4ae55b32017-06-09 14:42:56 -07002887 }
2888
Karl Schultzfda1b382018-08-08 18:56:11 -06002889 spvValidatorOptionsDestroy(options);
Chris Forbes4ae55b32017-06-09 14:42:56 -07002890 spvDiagnosticDestroy(diag);
2891 spvContextDestroy(ctx);
2892 }
2893
Chris Forbes4ae55b32017-06-09 14:42:56 -07002894 return skip;
Mark Lobodzinski01734072019-02-13 17:39:15 -07002895}
2896
Mark Lobodzinskib56bbb92019-02-18 11:49:59 -07002897void CoreChecks::PreCallRecordCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo *pCreateInfo,
2898 const VkAllocationCallbacks *pAllocator, VkShaderModule *pShaderModule,
2899 void *csm_state_data) {
Mark Lobodzinski1db77e82019-03-01 10:02:54 -07002900 create_shader_module_api_state *csm_state = reinterpret_cast<create_shader_module_api_state *>(csm_state_data);
Mark Lobodzinskib02a4852019-04-19 12:35:30 -06002901 if (enabled.gpu_validation) {
Mark Lobodzinski586d10e2019-03-08 18:19:48 -07002902 GpuPreCallCreateShaderModule(pCreateInfo, pAllocator, pShaderModule, &csm_state->unique_shader_id,
Mark Lobodzinski01734072019-02-13 17:39:15 -07002903 &csm_state->instrumented_create_info, &csm_state->instrumented_pgm);
2904 }
2905}
2906
Mark Lobodzinskib56bbb92019-02-18 11:49:59 -07002907void CoreChecks::PostCallRecordCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo *pCreateInfo,
2908 const VkAllocationCallbacks *pAllocator, VkShaderModule *pShaderModule,
2909 VkResult result, void *csm_state_data) {
Mark Lobodzinski01734072019-02-13 17:39:15 -07002910 if (VK_SUCCESS != result) return;
Mark Lobodzinski1db77e82019-03-01 10:02:54 -07002911 create_shader_module_api_state *csm_state = reinterpret_cast<create_shader_module_api_state *>(csm_state_data);
Mark Lobodzinski01734072019-02-13 17:39:15 -07002912
Mark Lobodzinski544def72019-04-19 14:25:59 -06002913 spv_target_env spirv_environment = ((api_version >= VK_API_VERSION_1_1) ? SPV_ENV_VULKAN_1_1 : SPV_ENV_VULKAN_1_0);
Mark Lobodzinski01734072019-02-13 17:39:15 -07002914 bool is_spirv = (pCreateInfo->pCode[0] == spv::MagicNumber);
Mark Lobodzinski3c59d972019-04-25 11:28:14 -06002915 std::unique_ptr<SHADER_MODULE_STATE> new_shader_module(
2916 is_spirv ? new SHADER_MODULE_STATE(pCreateInfo, *pShaderModule, spirv_environment, csm_state->unique_shader_id)
2917 : new SHADER_MODULE_STATE());
Mark Lobodzinski7767ad82019-03-09 13:35:25 -07002918 shaderModuleMap[*pShaderModule] = std::move(new_shader_module);
Mark Lobodzinski01734072019-02-13 17:39:15 -07002919}
Lockeaa8fdc02019-04-02 11:59:20 -06002920
Mark Lobodzinski3c59d972019-04-25 11:28:14 -06002921bool CoreChecks::ValidateComputeWorkGroupSizes(const SHADER_MODULE_STATE *shader) {
Lockeaa8fdc02019-04-02 11:59:20 -06002922 bool skip = false;
2923 uint32_t local_size_x = 0;
2924 uint32_t local_size_y = 0;
2925 uint32_t local_size_z = 0;
2926 if (FindLocalSize(shader, local_size_x, local_size_y, local_size_z)) {
2927 if (local_size_x > phys_dev_props.limits.maxComputeWorkGroupSize[0]) {
2928 skip |=
2929 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
2930 HandleToUint64(shader->vk_shader_module), "UNASSIGNED-features-limits-maxComputeWorkGroupSize",
2931 "ShaderMdoule %s local_size_x (%" PRIu32 ") exceeds device limit maxComputeWorkGroupSize[0] (%" PRIu32 ").",
2932 report_data->FormatHandle(shader->vk_shader_module).c_str(), local_size_x,
2933 phys_dev_props.limits.maxComputeWorkGroupSize[0]);
2934 }
2935 if (local_size_y > phys_dev_props.limits.maxComputeWorkGroupSize[1]) {
2936 skip |=
2937 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
2938 HandleToUint64(shader->vk_shader_module), "UNASSIGNED-features-limits-maxComputeWorkGroupSize",
2939 "ShaderMdoule %s local_size_y (%" PRIu32 ") exceeds device limit maxComputeWorkGroupSize[1] (%" PRIu32 ").",
2940 report_data->FormatHandle(shader->vk_shader_module).c_str(), local_size_x,
2941 phys_dev_props.limits.maxComputeWorkGroupSize[1]);
2942 }
2943 if (local_size_z > phys_dev_props.limits.maxComputeWorkGroupSize[2]) {
2944 skip |=
2945 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
2946 HandleToUint64(shader->vk_shader_module), "UNASSIGNED-features-limits-maxComputeWorkGroupSize",
2947 "ShaderMdoule %s local_size_z (%" PRIu32 ") exceeds device limit maxComputeWorkGroupSize[2] (%" PRIu32 ").",
2948 report_data->FormatHandle(shader->vk_shader_module).c_str(), local_size_x,
2949 phys_dev_props.limits.maxComputeWorkGroupSize[2]);
2950 }
2951
2952 uint32_t limit = phys_dev_props.limits.maxComputeWorkGroupInvocations;
2953 uint64_t invocations = local_size_x * local_size_y;
2954 // Prevent overflow.
2955 bool fail = false;
2956 if (invocations > UINT32_MAX || invocations > limit) {
2957 fail = true;
2958 }
2959 if (!fail) {
2960 invocations *= local_size_z;
2961 if (invocations > UINT32_MAX || invocations > limit) {
2962 fail = true;
2963 }
2964 }
2965 if (fail) {
2966 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
2967 HandleToUint64(shader->vk_shader_module), "UNASSIGNED-features-limits-maxComputeWorkGroupInvocations",
2968 "ShaderMdoule %s local_size (%" PRIu32 ", %" PRIu32 ", %" PRIu32
2969 ") exceeds device limit maxComputeWorkGroupInvocations (%" PRIu32 ").",
2970 report_data->FormatHandle(shader->vk_shader_module).c_str(), local_size_x, local_size_y, local_size_z,
2971 limit);
2972 }
2973 }
2974 return skip;
2975}
Lockef39c0772019-04-03 14:40:02 -06002976
Mark Lobodzinski33a34b82019-04-25 11:38:36 -06002977bool CoreChecks::ValidateComputeWorkGroupInvocations(CMD_BUFFER_STATE *cb_state, uint32_t groupCountX, uint32_t groupCountY,
Lockef39c0772019-04-03 14:40:02 -06002978 uint32_t groupCountZ) {
2979 auto const &state = cb_state->lastBound[VK_PIPELINE_BIND_POINT_COMPUTE];
2980 PIPELINE_STATE *pPipe = state.pipeline_state;
2981 if (!pPipe) return false;
2982 auto pCreateInfo = pPipe->computePipelineCI.ptr();
2983 if (!pCreateInfo) return false;
2984
Mark Lobodzinski3c59d972019-04-25 11:28:14 -06002985 unordered_map<VkShaderModule, std::unique_ptr<SHADER_MODULE_STATE>>::iterator it =
2986 shaderModuleMap.find(pCreateInfo->stage.module);
Lockef39c0772019-04-03 14:40:02 -06002987 if (it != shaderModuleMap.end()) {
2988 uint32_t local_size_x = 0;
2989 uint32_t local_size_y = 0;
2990 uint32_t local_size_z = 0;
2991 if (FindLocalSize(&(*it->second), local_size_x, local_size_y, local_size_z)) {
2992 uint32_t limit = phys_dev_props.limits.maxComputeWorkGroupInvocations;
2993 uint64_t invocations = local_size_x * local_size_y;
2994 // Prevent overflow.
2995 bool overflow = false;
2996 if (invocations > UINT32_MAX) {
2997 overflow = true;
2998 }
2999 if (!overflow) {
3000 invocations *= local_size_z;
3001 if (invocations > UINT32_MAX) {
3002 overflow = true;
3003 }
3004 }
3005 if (!overflow) {
3006 invocations *= groupCountX;
3007 if (invocations > UINT32_MAX) {
3008 overflow = true;
3009 }
3010 }
3011 if (!overflow) {
3012 invocations *= groupCountY;
3013 if (invocations > UINT32_MAX) {
3014 overflow = true;
3015 }
3016 }
3017 if (!overflow) {
3018 invocations *= groupCountZ;
3019 if (invocations > UINT32_MAX) {
3020 overflow = true;
3021 }
3022 }
3023 if (overflow) {
3024 return log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
3025 HandleToUint64(it->first), "UNASSIGNED-features-limits-maxComputeWorkGroupInvocations",
3026 "ShaderMdoule %s invocations (>%" PRIu32
3027 ") exceeds device limit maxComputeWorkGroupInvocations (%" PRIu32 ").",
3028 report_data->FormatHandle(it->first).c_str(), UINT32_MAX, limit);
3029 }
3030 if (invocations > limit) {
3031 return log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
3032 HandleToUint64(it->first), "UNASSIGNED-features-limits-maxComputeWorkGroupInvocations",
3033 "ShaderMdoule %s invocations (%" PRIu64
3034 ") exceeds device limit maxComputeWorkGroupInvocations (%" PRIu32 ").",
3035 report_data->FormatHandle(it->first).c_str(), invocations, limit);
3036 }
3037 }
3038 }
3039 return false;
3040}