blob: edd5182a5f137ec41eab6188b5ed0b9c76239418 [file] [log] [blame]
Karl Schultz7b024b42018-08-30 16:18:18 -06001/* Copyright (c) 2015-2019 The Khronos Group Inc.
2 * Copyright (c) 2015-2019 Valve Corporation
3 * Copyright (c) 2015-2019 LunarG, Inc.
4 * Copyright (C) 2015-2019 Google Inc.
Chris Forbes47567b72017-06-09 12:09:45 -07005 *
6 * Licensed under the Apache License, Version 2.0 (the "License");
7 * you may not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
9 *
10 * http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 *
18 * Author: Chris Forbes <chrisf@ijw.co.nz>
Dave Houlton51653902018-06-22 17:32:13 -060019 * Author: Dave Houlton <daveh@lunarg.com>
Chris Forbes47567b72017-06-09 12:09:45 -070020 */
21
22#include <cinttypes>
23#include <cassert>
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +020024#include <chrono>
Chris Forbes47567b72017-06-09 12:09:45 -070025#include <vector>
26#include <unordered_map>
27#include <string>
28#include <sstream>
29#include <SPIRV/spirv.hpp>
30#include "vk_loader_platform.h"
31#include "vk_enum_string_helper.h"
Chris Forbes47567b72017-06-09 12:09:45 -070032#include "vk_layer_data.h"
33#include "vk_layer_extension_utils.h"
34#include "vk_layer_utils.h"
Mark Lobodzinskib56bbb92019-02-18 11:49:59 -070035#include "chassis.h"
Chris Forbes47567b72017-06-09 12:09:45 -070036#include "core_validation.h"
Chris Forbes47567b72017-06-09 12:09:45 -070037#include "shader_validation.h"
Chris Forbes4ae55b32017-06-09 14:42:56 -070038#include "spirv-tools/libspirv.h"
Chris Forbes9a61e082017-07-24 15:35:29 -070039#include "xxhash.h"
Chris Forbes47567b72017-06-09 12:09:45 -070040
41enum FORMAT_TYPE {
42 FORMAT_TYPE_FLOAT = 1, // UNORM, SNORM, FLOAT, USCALED, SSCALED, SRGB -- anything we consider float in the shader
43 FORMAT_TYPE_SINT = 2,
44 FORMAT_TYPE_UINT = 4,
45};
46
47typedef std::pair<unsigned, unsigned> location_t;
48
Chris Forbes47567b72017-06-09 12:09:45 -070049struct shader_stage_attributes {
50 char const *const name;
51 bool arrayed_input;
52 bool arrayed_output;
Ari Suonpaa696b3432019-03-11 14:02:57 +020053 VkShaderStageFlags stage;
Chris Forbes47567b72017-06-09 12:09:45 -070054};
55
56static shader_stage_attributes shader_stage_attribs[] = {
Ari Suonpaa696b3432019-03-11 14:02:57 +020057 {"vertex shader", false, false, VK_SHADER_STAGE_VERTEX_BIT},
58 {"tessellation control shader", true, true, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT},
59 {"tessellation evaluation shader", true, false, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT},
60 {"geometry shader", true, false, VK_SHADER_STAGE_GEOMETRY_BIT},
61 {"fragment shader", false, false, VK_SHADER_STAGE_FRAGMENT_BIT},
Chris Forbes47567b72017-06-09 12:09:45 -070062};
63
John Zulauf14c355b2019-06-27 16:09:37 -060064unsigned ExecutionModelToShaderStageFlagBits(unsigned mode);
65
Chris Forbes47567b72017-06-09 12:09:45 -070066// SPIRV utility functions
Mark Lobodzinski3c59d972019-04-25 11:28:14 -060067void SHADER_MODULE_STATE::BuildDefIndex() {
Chris Forbes47567b72017-06-09 12:09:45 -070068 for (auto insn : *this) {
69 switch (insn.opcode()) {
70 // Types
71 case spv::OpTypeVoid:
72 case spv::OpTypeBool:
73 case spv::OpTypeInt:
74 case spv::OpTypeFloat:
75 case spv::OpTypeVector:
76 case spv::OpTypeMatrix:
77 case spv::OpTypeImage:
78 case spv::OpTypeSampler:
79 case spv::OpTypeSampledImage:
80 case spv::OpTypeArray:
81 case spv::OpTypeRuntimeArray:
82 case spv::OpTypeStruct:
83 case spv::OpTypeOpaque:
84 case spv::OpTypePointer:
85 case spv::OpTypeFunction:
86 case spv::OpTypeEvent:
87 case spv::OpTypeDeviceEvent:
88 case spv::OpTypeReserveId:
89 case spv::OpTypeQueue:
90 case spv::OpTypePipe:
Shannon McPherson0fa28232018-11-01 11:59:02 -060091 case spv::OpTypeAccelerationStructureNV:
Jeff Bolze4356752019-03-07 11:23:46 -060092 case spv::OpTypeCooperativeMatrixNV:
Chris Forbes47567b72017-06-09 12:09:45 -070093 def_index[insn.word(1)] = insn.offset();
94 break;
95
96 // Fixed constants
97 case spv::OpConstantTrue:
98 case spv::OpConstantFalse:
99 case spv::OpConstant:
100 case spv::OpConstantComposite:
101 case spv::OpConstantSampler:
102 case spv::OpConstantNull:
103 def_index[insn.word(2)] = insn.offset();
104 break;
105
106 // Specialization constants
107 case spv::OpSpecConstantTrue:
108 case spv::OpSpecConstantFalse:
109 case spv::OpSpecConstant:
110 case spv::OpSpecConstantComposite:
111 case spv::OpSpecConstantOp:
112 def_index[insn.word(2)] = insn.offset();
113 break;
114
115 // Variables
116 case spv::OpVariable:
117 def_index[insn.word(2)] = insn.offset();
118 break;
119
120 // Functions
121 case spv::OpFunction:
122 def_index[insn.word(2)] = insn.offset();
123 break;
124
John Zulauf14c355b2019-06-27 16:09:37 -0600125 // Entry points ... add to the entrypoint table
126 case spv::OpEntryPoint: {
127 // Entry points do not have an id (the id is the function id) and thus need their own table
128 auto entrypoint_name = (char const *)&insn.word(3);
129 auto execution_model = insn.word(1);
130 auto entrypoint_stage = ExecutionModelToShaderStageFlagBits(execution_model);
131 entry_points.emplace(entrypoint_name, EntryPoint{insn.offset(), entrypoint_stage});
132 break;
133 }
Chris Forbes47567b72017-06-09 12:09:45 -0700134 default:
135 // We don't care about any other defs for now.
136 break;
137 }
138 }
139}
140
Jeff Bolz105d6492018-09-29 15:46:44 -0500141unsigned ExecutionModelToShaderStageFlagBits(unsigned mode) {
142 switch (mode) {
143 case spv::ExecutionModelVertex:
144 return VK_SHADER_STAGE_VERTEX_BIT;
145 case spv::ExecutionModelTessellationControl:
146 return VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT;
147 case spv::ExecutionModelTessellationEvaluation:
148 return VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT;
149 case spv::ExecutionModelGeometry:
150 return VK_SHADER_STAGE_GEOMETRY_BIT;
151 case spv::ExecutionModelFragment:
152 return VK_SHADER_STAGE_FRAGMENT_BIT;
153 case spv::ExecutionModelGLCompute:
154 return VK_SHADER_STAGE_COMPUTE_BIT;
Shannon McPherson0fa28232018-11-01 11:59:02 -0600155 case spv::ExecutionModelRayGenerationNV:
Eric Werness30127fd2018-10-31 21:01:03 -0700156 return VK_SHADER_STAGE_RAYGEN_BIT_NV;
Shannon McPherson0fa28232018-11-01 11:59:02 -0600157 case spv::ExecutionModelAnyHitNV:
Eric Werness30127fd2018-10-31 21:01:03 -0700158 return VK_SHADER_STAGE_ANY_HIT_BIT_NV;
Shannon McPherson0fa28232018-11-01 11:59:02 -0600159 case spv::ExecutionModelClosestHitNV:
Eric Werness30127fd2018-10-31 21:01:03 -0700160 return VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV;
Shannon McPherson0fa28232018-11-01 11:59:02 -0600161 case spv::ExecutionModelMissNV:
Eric Werness30127fd2018-10-31 21:01:03 -0700162 return VK_SHADER_STAGE_MISS_BIT_NV;
Shannon McPherson0fa28232018-11-01 11:59:02 -0600163 case spv::ExecutionModelIntersectionNV:
Eric Werness30127fd2018-10-31 21:01:03 -0700164 return VK_SHADER_STAGE_INTERSECTION_BIT_NV;
Shannon McPherson0fa28232018-11-01 11:59:02 -0600165 case spv::ExecutionModelCallableNV:
Eric Werness30127fd2018-10-31 21:01:03 -0700166 return VK_SHADER_STAGE_CALLABLE_BIT_NV;
Jeff Bolz105d6492018-09-29 15:46:44 -0500167 case spv::ExecutionModelTaskNV:
168 return VK_SHADER_STAGE_TASK_BIT_NV;
169 case spv::ExecutionModelMeshNV:
170 return VK_SHADER_STAGE_MESH_BIT_NV;
171 default:
172 return 0;
173 }
174}
175
Mark Lobodzinski3c59d972019-04-25 11:28:14 -0600176static spirv_inst_iter FindEntrypoint(SHADER_MODULE_STATE const *src, char const *name, VkShaderStageFlagBits stageBits) {
John Zulauf14c355b2019-06-27 16:09:37 -0600177 auto range = src->entry_points.equal_range(name);
178 for (auto it = range.first; it != range.second; ++it) {
179 if (it->second.stage == stageBits) {
180 return src->at(it->second.offset);
Chris Forbes47567b72017-06-09 12:09:45 -0700181 }
182 }
Chris Forbes47567b72017-06-09 12:09:45 -0700183 return src->end();
184}
185
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600186static char const *StorageClassName(unsigned sc) {
Chris Forbes47567b72017-06-09 12:09:45 -0700187 switch (sc) {
188 case spv::StorageClassInput:
189 return "input";
190 case spv::StorageClassOutput:
191 return "output";
192 case spv::StorageClassUniformConstant:
193 return "const uniform";
194 case spv::StorageClassUniform:
195 return "uniform";
196 case spv::StorageClassWorkgroup:
197 return "workgroup local";
198 case spv::StorageClassCrossWorkgroup:
199 return "workgroup global";
200 case spv::StorageClassPrivate:
201 return "private global";
202 case spv::StorageClassFunction:
203 return "function";
204 case spv::StorageClassGeneric:
205 return "generic";
206 case spv::StorageClassAtomicCounter:
207 return "atomic counter";
208 case spv::StorageClassImage:
209 return "image";
210 case spv::StorageClassPushConstant:
211 return "push constant";
Chris Forbes9f89d752018-03-07 12:57:48 -0800212 case spv::StorageClassStorageBuffer:
213 return "storage buffer";
Chris Forbes47567b72017-06-09 12:09:45 -0700214 default:
215 return "unknown";
216 }
217}
218
219// Get the value of an integral constant
Mark Lobodzinski3c59d972019-04-25 11:28:14 -0600220unsigned GetConstantValue(SHADER_MODULE_STATE const *src, unsigned id) {
Chris Forbes47567b72017-06-09 12:09:45 -0700221 auto value = src->get_def(id);
222 assert(value != src->end());
223
224 if (value.opcode() != spv::OpConstant) {
225 // TODO: Either ensure that the specialization transform is already performed on a module we're
226 // considering here, OR -- specialize on the fly now.
227 return 1;
228 }
229
230 return value.word(3);
231}
232
Mark Lobodzinski3c59d972019-04-25 11:28:14 -0600233static void DescribeTypeInner(std::ostringstream &ss, SHADER_MODULE_STATE const *src, unsigned type) {
Chris Forbes47567b72017-06-09 12:09:45 -0700234 auto insn = src->get_def(type);
235 assert(insn != src->end());
236
237 switch (insn.opcode()) {
238 case spv::OpTypeBool:
239 ss << "bool";
240 break;
241 case spv::OpTypeInt:
242 ss << (insn.word(3) ? 's' : 'u') << "int" << insn.word(2);
243 break;
244 case spv::OpTypeFloat:
245 ss << "float" << insn.word(2);
246 break;
247 case spv::OpTypeVector:
248 ss << "vec" << insn.word(3) << " of ";
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600249 DescribeTypeInner(ss, src, insn.word(2));
Chris Forbes47567b72017-06-09 12:09:45 -0700250 break;
251 case spv::OpTypeMatrix:
252 ss << "mat" << insn.word(3) << " of ";
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600253 DescribeTypeInner(ss, src, insn.word(2));
Chris Forbes47567b72017-06-09 12:09:45 -0700254 break;
255 case spv::OpTypeArray:
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600256 ss << "arr[" << GetConstantValue(src, insn.word(3)) << "] of ";
257 DescribeTypeInner(ss, src, insn.word(2));
Chris Forbes47567b72017-06-09 12:09:45 -0700258 break;
Chris Forbes062f1222018-08-21 15:34:15 -0700259 case spv::OpTypeRuntimeArray:
260 ss << "runtime arr[] of ";
261 DescribeTypeInner(ss, src, insn.word(2));
262 break;
Chris Forbes47567b72017-06-09 12:09:45 -0700263 case spv::OpTypePointer:
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600264 ss << "ptr to " << StorageClassName(insn.word(2)) << " ";
265 DescribeTypeInner(ss, src, insn.word(3));
Chris Forbes47567b72017-06-09 12:09:45 -0700266 break;
267 case spv::OpTypeStruct: {
268 ss << "struct of (";
269 for (unsigned i = 2; i < insn.len(); i++) {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600270 DescribeTypeInner(ss, src, insn.word(i));
Chris Forbes47567b72017-06-09 12:09:45 -0700271 if (i == insn.len() - 1) {
272 ss << ")";
273 } else {
274 ss << ", ";
275 }
276 }
277 break;
278 }
279 case spv::OpTypeSampler:
280 ss << "sampler";
281 break;
282 case spv::OpTypeSampledImage:
283 ss << "sampler+";
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600284 DescribeTypeInner(ss, src, insn.word(2));
Chris Forbes47567b72017-06-09 12:09:45 -0700285 break;
286 case spv::OpTypeImage:
287 ss << "image(dim=" << insn.word(3) << ", sampled=" << insn.word(7) << ")";
288 break;
Shannon McPherson0fa28232018-11-01 11:59:02 -0600289 case spv::OpTypeAccelerationStructureNV:
Jeff Bolz105d6492018-09-29 15:46:44 -0500290 ss << "accelerationStruture";
291 break;
Chris Forbes47567b72017-06-09 12:09:45 -0700292 default:
293 ss << "oddtype";
294 break;
295 }
296}
297
Mark Lobodzinski3c59d972019-04-25 11:28:14 -0600298static std::string DescribeType(SHADER_MODULE_STATE const *src, unsigned type) {
Chris Forbes47567b72017-06-09 12:09:45 -0700299 std::ostringstream ss;
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600300 DescribeTypeInner(ss, src, type);
Chris Forbes47567b72017-06-09 12:09:45 -0700301 return ss.str();
302}
303
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600304static bool IsNarrowNumericType(spirv_inst_iter type) {
Chris Forbes47567b72017-06-09 12:09:45 -0700305 if (type.opcode() != spv::OpTypeInt && type.opcode() != spv::OpTypeFloat) return false;
306 return type.word(2) < 64;
307}
308
Mark Lobodzinski3c59d972019-04-25 11:28:14 -0600309static bool TypesMatch(SHADER_MODULE_STATE const *a, SHADER_MODULE_STATE const *b, unsigned a_type, unsigned b_type, bool a_arrayed,
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600310 bool b_arrayed, bool relaxed) {
Chris Forbes47567b72017-06-09 12:09:45 -0700311 // Walk two type trees together, and complain about differences
312 auto a_insn = a->get_def(a_type);
313 auto b_insn = b->get_def(b_type);
314 assert(a_insn != a->end());
315 assert(b_insn != b->end());
316
Chris Forbes062f1222018-08-21 15:34:15 -0700317 // Ignore runtime-sized arrays-- they cannot appear in these interfaces.
318
Chris Forbes47567b72017-06-09 12:09:45 -0700319 if (a_arrayed && a_insn.opcode() == spv::OpTypeArray) {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600320 return TypesMatch(a, b, a_insn.word(2), b_type, false, b_arrayed, relaxed);
Chris Forbes47567b72017-06-09 12:09:45 -0700321 }
322
323 if (b_arrayed && b_insn.opcode() == spv::OpTypeArray) {
324 // We probably just found the extra level of arrayness in b_type: compare the type inside it to a_type
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600325 return TypesMatch(a, b, a_type, b_insn.word(2), a_arrayed, false, relaxed);
Chris Forbes47567b72017-06-09 12:09:45 -0700326 }
327
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600328 if (a_insn.opcode() == spv::OpTypeVector && relaxed && IsNarrowNumericType(b_insn)) {
329 return TypesMatch(a, b, a_insn.word(2), b_type, a_arrayed, b_arrayed, false);
Chris Forbes47567b72017-06-09 12:09:45 -0700330 }
331
332 if (a_insn.opcode() != b_insn.opcode()) {
333 return false;
334 }
335
336 if (a_insn.opcode() == spv::OpTypePointer) {
337 // Match on pointee type. storage class is expected to differ
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600338 return TypesMatch(a, b, a_insn.word(3), b_insn.word(3), a_arrayed, b_arrayed, relaxed);
Chris Forbes47567b72017-06-09 12:09:45 -0700339 }
340
341 if (a_arrayed || b_arrayed) {
342 // If we havent resolved array-of-verts by here, we're not going to.
343 return false;
344 }
345
346 switch (a_insn.opcode()) {
347 case spv::OpTypeBool:
348 return true;
349 case spv::OpTypeInt:
350 // Match on width, signedness
351 return a_insn.word(2) == b_insn.word(2) && a_insn.word(3) == b_insn.word(3);
352 case spv::OpTypeFloat:
353 // Match on width
354 return a_insn.word(2) == b_insn.word(2);
355 case spv::OpTypeVector:
356 // Match on element type, count.
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600357 if (!TypesMatch(a, b, a_insn.word(2), b_insn.word(2), a_arrayed, b_arrayed, false)) return false;
358 if (relaxed && IsNarrowNumericType(a->get_def(a_insn.word(2)))) {
Chris Forbes47567b72017-06-09 12:09:45 -0700359 return a_insn.word(3) >= b_insn.word(3);
360 } else {
361 return a_insn.word(3) == b_insn.word(3);
362 }
363 case spv::OpTypeMatrix:
364 // Match on element type, count.
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600365 return TypesMatch(a, b, a_insn.word(2), b_insn.word(2), a_arrayed, b_arrayed, false) &&
Dave Houltona9df0ce2018-02-07 10:51:23 -0700366 a_insn.word(3) == b_insn.word(3);
Chris Forbes47567b72017-06-09 12:09:45 -0700367 case spv::OpTypeArray:
368 // Match on element type, count. these all have the same layout. we don't get here if b_arrayed. This differs from
369 // vector & matrix types in that the array size is the id of a constant instruction, * not a literal within OpTypeArray
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600370 return TypesMatch(a, b, a_insn.word(2), b_insn.word(2), a_arrayed, b_arrayed, false) &&
371 GetConstantValue(a, a_insn.word(3)) == GetConstantValue(b, b_insn.word(3));
Chris Forbes47567b72017-06-09 12:09:45 -0700372 case spv::OpTypeStruct:
373 // Match on all element types
Dave Houltona9df0ce2018-02-07 10:51:23 -0700374 {
375 if (a_insn.len() != b_insn.len()) {
376 return false; // Structs cannot match if member counts differ
Chris Forbes47567b72017-06-09 12:09:45 -0700377 }
Chris Forbes47567b72017-06-09 12:09:45 -0700378
Dave Houltona9df0ce2018-02-07 10:51:23 -0700379 for (unsigned i = 2; i < a_insn.len(); i++) {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600380 if (!TypesMatch(a, b, a_insn.word(i), b_insn.word(i), a_arrayed, b_arrayed, false)) {
Dave Houltona9df0ce2018-02-07 10:51:23 -0700381 return false;
382 }
383 }
384
385 return true;
386 }
Chris Forbes47567b72017-06-09 12:09:45 -0700387 default:
388 // Remaining types are CLisms, or may not appear in the interfaces we are interested in. Just claim no match.
389 return false;
390 }
391}
392
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600393static unsigned ValueOrDefault(std::unordered_map<unsigned, unsigned> const &map, unsigned id, unsigned def) {
Chris Forbes47567b72017-06-09 12:09:45 -0700394 auto it = map.find(id);
395 if (it == map.end())
396 return def;
397 else
398 return it->second;
399}
400
Mark Lobodzinski3c59d972019-04-25 11:28:14 -0600401static unsigned GetLocationsConsumedByType(SHADER_MODULE_STATE const *src, unsigned type, bool strip_array_level) {
Chris Forbes47567b72017-06-09 12:09:45 -0700402 auto insn = src->get_def(type);
403 assert(insn != src->end());
404
405 switch (insn.opcode()) {
406 case spv::OpTypePointer:
407 // See through the ptr -- this is only ever at the toplevel for graphics shaders we're never actually passing
408 // pointers around.
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600409 return GetLocationsConsumedByType(src, insn.word(3), strip_array_level);
Chris Forbes47567b72017-06-09 12:09:45 -0700410 case spv::OpTypeArray:
411 if (strip_array_level) {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600412 return GetLocationsConsumedByType(src, insn.word(2), false);
Chris Forbes47567b72017-06-09 12:09:45 -0700413 } else {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600414 return GetConstantValue(src, insn.word(3)) * GetLocationsConsumedByType(src, insn.word(2), false);
Chris Forbes47567b72017-06-09 12:09:45 -0700415 }
416 case spv::OpTypeMatrix:
417 // Num locations is the dimension * element size
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600418 return insn.word(3) * GetLocationsConsumedByType(src, insn.word(2), false);
Chris Forbes47567b72017-06-09 12:09:45 -0700419 case spv::OpTypeVector: {
420 auto scalar_type = src->get_def(insn.word(2));
421 auto bit_width =
422 (scalar_type.opcode() == spv::OpTypeInt || scalar_type.opcode() == spv::OpTypeFloat) ? scalar_type.word(2) : 32;
423
424 // Locations are 128-bit wide; 3- and 4-component vectors of 64 bit types require two.
425 return (bit_width * insn.word(3) + 127) / 128;
426 }
427 default:
428 // Everything else is just 1.
429 return 1;
430
431 // TODO: extend to handle 64bit scalar types, whose vectors may need multiple locations.
432 }
433}
434
Mark Lobodzinski3c59d972019-04-25 11:28:14 -0600435static unsigned GetComponentsConsumedByType(SHADER_MODULE_STATE const *src, unsigned type, bool strip_array_level) {
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +0200436 auto insn = src->get_def(type);
437 assert(insn != src->end());
438
439 switch (insn.opcode()) {
440 case spv::OpTypePointer:
441 // See through the ptr -- this is only ever at the toplevel for graphics shaders we're never actually passing
442 // pointers around.
443 return GetComponentsConsumedByType(src, insn.word(3), strip_array_level);
444 case spv::OpTypeStruct: {
445 uint32_t sum = 0;
446 for (uint32_t i = 2; i < insn.len(); i++) { // i=2 to skip word(0) and word(1)=ID of struct
447 sum += GetComponentsConsumedByType(src, insn.word(i), false);
448 }
449 return sum;
450 }
Jeff Bolze9ee3d82019-05-29 13:45:13 -0500451 case spv::OpTypeArray:
452 if (strip_array_level) {
453 return GetComponentsConsumedByType(src, insn.word(2), false);
454 } else {
455 return GetConstantValue(src, insn.word(3)) * GetComponentsConsumedByType(src, insn.word(2), false);
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +0200456 }
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +0200457 case spv::OpTypeMatrix:
458 // Num locations is the dimension * element size
459 return insn.word(3) * GetComponentsConsumedByType(src, insn.word(2), false);
460 case spv::OpTypeVector: {
461 auto scalar_type = src->get_def(insn.word(2));
462 auto bit_width =
463 (scalar_type.opcode() == spv::OpTypeInt || scalar_type.opcode() == spv::OpTypeFloat) ? scalar_type.word(2) : 32;
464 // One component is 32-bit
465 return (bit_width * insn.word(3) + 31) / 32;
466 }
467 case spv::OpTypeFloat: {
468 auto bit_width = insn.word(2);
469 return (bit_width + 31) / 32;
470 }
471 case spv::OpTypeInt: {
472 auto bit_width = insn.word(2);
473 return (bit_width + 31) / 32;
474 }
475 case spv::OpConstant:
476 return GetComponentsConsumedByType(src, insn.word(1), false);
477 default:
478 return 0;
479 }
480}
481
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600482static unsigned GetLocationsConsumedByFormat(VkFormat format) {
Chris Forbes47567b72017-06-09 12:09:45 -0700483 switch (format) {
484 case VK_FORMAT_R64G64B64A64_SFLOAT:
485 case VK_FORMAT_R64G64B64A64_SINT:
486 case VK_FORMAT_R64G64B64A64_UINT:
487 case VK_FORMAT_R64G64B64_SFLOAT:
488 case VK_FORMAT_R64G64B64_SINT:
489 case VK_FORMAT_R64G64B64_UINT:
490 return 2;
491 default:
492 return 1;
493 }
494}
495
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600496static unsigned GetFormatType(VkFormat fmt) {
Dave Houltona9df0ce2018-02-07 10:51:23 -0700497 if (FormatIsSInt(fmt)) return FORMAT_TYPE_SINT;
498 if (FormatIsUInt(fmt)) return FORMAT_TYPE_UINT;
499 if (FormatIsDepthAndStencil(fmt)) return FORMAT_TYPE_FLOAT | FORMAT_TYPE_UINT;
500 if (fmt == VK_FORMAT_UNDEFINED) return 0;
Chris Forbes47567b72017-06-09 12:09:45 -0700501 // everything else -- UNORM/SNORM/FLOAT/USCALED/SSCALED is all float in the shader.
502 return FORMAT_TYPE_FLOAT;
503}
504
505// characterizes a SPIR-V type appearing in an interface to a FF stage, for comparison to a VkFormat's characterization above.
Chris Forbes062f1222018-08-21 15:34:15 -0700506// also used for input attachments, as we statically know their format.
Mark Lobodzinski3c59d972019-04-25 11:28:14 -0600507static unsigned GetFundamentalType(SHADER_MODULE_STATE const *src, unsigned type) {
Chris Forbes47567b72017-06-09 12:09:45 -0700508 auto insn = src->get_def(type);
509 assert(insn != src->end());
510
511 switch (insn.opcode()) {
512 case spv::OpTypeInt:
513 return insn.word(3) ? FORMAT_TYPE_SINT : FORMAT_TYPE_UINT;
514 case spv::OpTypeFloat:
515 return FORMAT_TYPE_FLOAT;
516 case spv::OpTypeVector:
Chris Forbes47567b72017-06-09 12:09:45 -0700517 case spv::OpTypeMatrix:
Chris Forbes47567b72017-06-09 12:09:45 -0700518 case spv::OpTypeArray:
Chris Forbes062f1222018-08-21 15:34:15 -0700519 case spv::OpTypeRuntimeArray:
520 case spv::OpTypeImage:
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600521 return GetFundamentalType(src, insn.word(2));
Chris Forbes47567b72017-06-09 12:09:45 -0700522 case spv::OpTypePointer:
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600523 return GetFundamentalType(src, insn.word(3));
Chris Forbes47567b72017-06-09 12:09:45 -0700524
525 default:
526 return 0;
527 }
528}
529
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600530static uint32_t GetShaderStageId(VkShaderStageFlagBits stage) {
Chris Forbes47567b72017-06-09 12:09:45 -0700531 uint32_t bit_pos = uint32_t(u_ffs(stage));
532 return bit_pos - 1;
533}
534
Mark Lobodzinski3c59d972019-04-25 11:28:14 -0600535static spirv_inst_iter GetStructType(SHADER_MODULE_STATE const *src, spirv_inst_iter def, bool is_array_of_verts) {
Chris Forbes47567b72017-06-09 12:09:45 -0700536 while (true) {
537 if (def.opcode() == spv::OpTypePointer) {
538 def = src->get_def(def.word(3));
539 } else if (def.opcode() == spv::OpTypeArray && is_array_of_verts) {
540 def = src->get_def(def.word(2));
541 is_array_of_verts = false;
542 } else if (def.opcode() == spv::OpTypeStruct) {
543 return def;
544 } else {
545 return src->end();
546 }
547 }
548}
549
Mark Lobodzinski3c59d972019-04-25 11:28:14 -0600550static bool CollectInterfaceBlockMembers(SHADER_MODULE_STATE const *src, std::map<location_t, interface_var> *out,
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600551 std::unordered_map<unsigned, unsigned> const &blocks, bool is_array_of_verts, uint32_t id,
552 uint32_t type_id, bool is_patch, int /*first_location*/) {
Chris Forbes47567b72017-06-09 12:09:45 -0700553 // Walk down the type_id presented, trying to determine whether it's actually an interface block.
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600554 auto type = GetStructType(src, src->get_def(type_id), is_array_of_verts && !is_patch);
Chris Forbes47567b72017-06-09 12:09:45 -0700555 if (type == src->end() || blocks.find(type.word(1)) == blocks.end()) {
556 // This isn't an interface block.
Chris Forbesa313d772017-06-13 13:59:41 -0700557 return false;
Chris Forbes47567b72017-06-09 12:09:45 -0700558 }
559
560 std::unordered_map<unsigned, unsigned> member_components;
561 std::unordered_map<unsigned, unsigned> member_relaxed_precision;
Chris Forbesa313d772017-06-13 13:59:41 -0700562 std::unordered_map<unsigned, unsigned> member_patch;
Chris Forbes47567b72017-06-09 12:09:45 -0700563
564 // Walk all the OpMemberDecorate for type's result id -- first pass, collect components.
565 for (auto insn : *src) {
566 if (insn.opcode() == spv::OpMemberDecorate && insn.word(1) == type.word(1)) {
567 unsigned member_index = insn.word(2);
568
569 if (insn.word(3) == spv::DecorationComponent) {
570 unsigned component = insn.word(4);
571 member_components[member_index] = component;
572 }
573
574 if (insn.word(3) == spv::DecorationRelaxedPrecision) {
575 member_relaxed_precision[member_index] = 1;
576 }
Chris Forbesa313d772017-06-13 13:59:41 -0700577
578 if (insn.word(3) == spv::DecorationPatch) {
579 member_patch[member_index] = 1;
580 }
Chris Forbes47567b72017-06-09 12:09:45 -0700581 }
582 }
583
Chris Forbesa313d772017-06-13 13:59:41 -0700584 // TODO: correctly handle location assignment from outside
585
Chris Forbes47567b72017-06-09 12:09:45 -0700586 // Second pass -- produce the output, from Location decorations
587 for (auto insn : *src) {
588 if (insn.opcode() == spv::OpMemberDecorate && insn.word(1) == type.word(1)) {
589 unsigned member_index = insn.word(2);
590 unsigned member_type_id = type.word(2 + member_index);
591
592 if (insn.word(3) == spv::DecorationLocation) {
593 unsigned location = insn.word(4);
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600594 unsigned num_locations = GetLocationsConsumedByType(src, member_type_id, false);
Chris Forbes47567b72017-06-09 12:09:45 -0700595 auto component_it = member_components.find(member_index);
596 unsigned component = component_it == member_components.end() ? 0 : component_it->second;
597 bool is_relaxed_precision = member_relaxed_precision.find(member_index) != member_relaxed_precision.end();
Dave Houltona9df0ce2018-02-07 10:51:23 -0700598 bool member_is_patch = is_patch || member_patch.count(member_index) > 0;
Chris Forbes47567b72017-06-09 12:09:45 -0700599
600 for (unsigned int offset = 0; offset < num_locations; offset++) {
601 interface_var v = {};
602 v.id = id;
603 // TODO: member index in interface_var too?
604 v.type_id = member_type_id;
605 v.offset = offset;
Chris Forbesa313d772017-06-13 13:59:41 -0700606 v.is_patch = member_is_patch;
Chris Forbes47567b72017-06-09 12:09:45 -0700607 v.is_block_member = true;
608 v.is_relaxed_precision = is_relaxed_precision;
609 (*out)[std::make_pair(location + offset, component)] = v;
610 }
611 }
612 }
613 }
Chris Forbesa313d772017-06-13 13:59:41 -0700614
615 return true;
Chris Forbes47567b72017-06-09 12:09:45 -0700616}
617
Ari Suonpaa696b3432019-03-11 14:02:57 +0200618static std::vector<uint32_t> FindEntrypointInterfaces(spirv_inst_iter entrypoint) {
619 std::vector<uint32_t> interfaces;
620 // Find the end of the entrypoint's name string. additional zero bytes follow the actual null terminator, to fill out the
621 // rest of the word - so we only need to look at the last byte in the word to determine which word contains the terminator.
622 uint32_t word = 3;
623 while (entrypoint.word(word) & 0xff000000u) {
624 ++word;
625 }
626 ++word;
627
628 for (; word < entrypoint.len(); word++) interfaces.push_back(entrypoint.word(word));
629
630 return interfaces;
631}
632
Mark Lobodzinski3c59d972019-04-25 11:28:14 -0600633static std::map<location_t, interface_var> CollectInterfaceByLocation(SHADER_MODULE_STATE const *src, spirv_inst_iter entrypoint,
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600634 spv::StorageClass sinterface, bool is_array_of_verts) {
Chris Forbes47567b72017-06-09 12:09:45 -0700635 std::unordered_map<unsigned, unsigned> var_locations;
636 std::unordered_map<unsigned, unsigned> var_builtins;
637 std::unordered_map<unsigned, unsigned> var_components;
638 std::unordered_map<unsigned, unsigned> blocks;
639 std::unordered_map<unsigned, unsigned> var_patch;
640 std::unordered_map<unsigned, unsigned> var_relaxed_precision;
641
642 for (auto insn : *src) {
643 // We consider two interface models: SSO rendezvous-by-location, and builtins. Complain about anything that
644 // fits neither model.
645 if (insn.opcode() == spv::OpDecorate) {
646 if (insn.word(2) == spv::DecorationLocation) {
647 var_locations[insn.word(1)] = insn.word(3);
648 }
649
650 if (insn.word(2) == spv::DecorationBuiltIn) {
651 var_builtins[insn.word(1)] = insn.word(3);
652 }
653
654 if (insn.word(2) == spv::DecorationComponent) {
655 var_components[insn.word(1)] = insn.word(3);
656 }
657
658 if (insn.word(2) == spv::DecorationBlock) {
659 blocks[insn.word(1)] = 1;
660 }
661
662 if (insn.word(2) == spv::DecorationPatch) {
663 var_patch[insn.word(1)] = 1;
664 }
665
666 if (insn.word(2) == spv::DecorationRelaxedPrecision) {
667 var_relaxed_precision[insn.word(1)] = 1;
668 }
669 }
670 }
671
672 // TODO: handle grouped decorations
673 // TODO: handle index=1 dual source outputs from FS -- two vars will have the same location, and we DON'T want to clobber.
674
Chris Forbes47567b72017-06-09 12:09:45 -0700675 std::map<location_t, interface_var> out;
676
Ari Suonpaa696b3432019-03-11 14:02:57 +0200677 for (uint32_t word : FindEntrypointInterfaces(entrypoint)) {
678 auto insn = src->get_def(word);
Chris Forbes47567b72017-06-09 12:09:45 -0700679 assert(insn != src->end());
680 assert(insn.opcode() == spv::OpVariable);
681
682 if (insn.word(3) == static_cast<uint32_t>(sinterface)) {
683 unsigned id = insn.word(2);
684 unsigned type = insn.word(1);
685
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600686 int location = ValueOrDefault(var_locations, id, static_cast<unsigned>(-1));
687 int builtin = ValueOrDefault(var_builtins, id, static_cast<unsigned>(-1));
688 unsigned component = ValueOrDefault(var_components, id, 0); // Unspecified is OK, is 0
Chris Forbes47567b72017-06-09 12:09:45 -0700689 bool is_patch = var_patch.find(id) != var_patch.end();
690 bool is_relaxed_precision = var_relaxed_precision.find(id) != var_relaxed_precision.end();
691
Dave Houltona9df0ce2018-02-07 10:51:23 -0700692 if (builtin != -1)
693 continue;
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600694 else if (!CollectInterfaceBlockMembers(src, &out, blocks, is_array_of_verts, id, type, is_patch, location)) {
Chris Forbes47567b72017-06-09 12:09:45 -0700695 // A user-defined interface variable, with a location. Where a variable occupied multiple locations, emit
696 // one result for each.
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600697 unsigned num_locations = GetLocationsConsumedByType(src, type, is_array_of_verts && !is_patch);
Chris Forbes47567b72017-06-09 12:09:45 -0700698 for (unsigned int offset = 0; offset < num_locations; offset++) {
699 interface_var v = {};
700 v.id = id;
701 v.type_id = type;
702 v.offset = offset;
703 v.is_patch = is_patch;
704 v.is_relaxed_precision = is_relaxed_precision;
705 out[std::make_pair(location + offset, component)] = v;
706 }
Chris Forbes47567b72017-06-09 12:09:45 -0700707 }
708 }
709 }
710
711 return out;
712}
713
Mark Lobodzinski3c59d972019-04-25 11:28:14 -0600714static std::vector<uint32_t> CollectBuiltinBlockMembers(SHADER_MODULE_STATE const *src, spirv_inst_iter entrypoint,
Ari Suonpaa696b3432019-03-11 14:02:57 +0200715 uint32_t storageClass) {
716 std::vector<uint32_t> variables;
717 std::vector<uint32_t> builtinStructMembers;
718 std::vector<uint32_t> builtinDecorations;
719
720 for (auto insn : *src) {
721 switch (insn.opcode()) {
722 // Find all built-in member decorations
723 case spv::OpMemberDecorate:
724 if (insn.word(3) == spv::DecorationBuiltIn) {
725 builtinStructMembers.push_back(insn.word(1));
726 }
727 break;
728 // Find all built-in decorations
729 case spv::OpDecorate:
730 switch (insn.word(2)) {
731 case spv::DecorationBlock: {
732 uint32_t blockID = insn.word(1);
733 for (auto builtInBlockID : builtinStructMembers) {
734 // Check if one of the members of the block are built-in -> the block is built-in
735 if (blockID == builtInBlockID) {
736 builtinDecorations.push_back(blockID);
737 break;
738 }
739 }
740 break;
741 }
742 case spv::DecorationBuiltIn:
743 builtinDecorations.push_back(insn.word(1));
744 break;
745 default:
746 break;
747 }
748 break;
749 default:
750 break;
751 }
752 }
753
754 // Find all interface variables belonging to the entrypoint and matching the storage class
755 for (uint32_t id : FindEntrypointInterfaces(entrypoint)) {
756 auto def = src->get_def(id);
757 assert(def != src->end());
758 assert(def.opcode() == spv::OpVariable);
759
760 if (def.word(3) == storageClass) variables.push_back(def.word(1));
761 }
762
763 // Find all members belonging to the builtin block selected
764 std::vector<uint32_t> builtinBlockMembers;
765 for (auto &var : variables) {
766 auto def = src->get_def(src->get_def(var).word(3));
767
768 // It could be an array of IO blocks. The element type should be the struct defining the block contents
769 if (def.opcode() == spv::OpTypeArray) def = src->get_def(def.word(2));
770
771 // Now find all members belonging to the struct defining the IO block
772 if (def.opcode() == spv::OpTypeStruct) {
773 for (auto builtInID : builtinDecorations) {
774 if (builtInID == def.word(1)) {
775 for (int i = 2; i < (int)def.len(); i++)
776 builtinBlockMembers.push_back(spv::BuiltInMax); // Start with undefined builtin for each struct member.
777 // These shouldn't be left after replacing.
778 for (auto insn : *src) {
779 if (insn.opcode() == spv::OpMemberDecorate && insn.word(1) == builtInID &&
780 insn.word(3) == spv::DecorationBuiltIn) {
781 auto structIndex = insn.word(2);
782 assert(structIndex < builtinBlockMembers.size());
783 builtinBlockMembers[structIndex] = insn.word(4);
784 }
785 }
786 }
787 }
788 }
789 }
790
791 return builtinBlockMembers;
792}
793
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600794static std::vector<std::pair<uint32_t, interface_var>> CollectInterfaceByInputAttachmentIndex(
Mark Lobodzinski3c59d972019-04-25 11:28:14 -0600795 SHADER_MODULE_STATE const *src, std::unordered_set<uint32_t> const &accessible_ids) {
Chris Forbes47567b72017-06-09 12:09:45 -0700796 std::vector<std::pair<uint32_t, interface_var>> out;
797
798 for (auto insn : *src) {
799 if (insn.opcode() == spv::OpDecorate) {
800 if (insn.word(2) == spv::DecorationInputAttachmentIndex) {
801 auto attachment_index = insn.word(3);
802 auto id = insn.word(1);
803
804 if (accessible_ids.count(id)) {
805 auto def = src->get_def(id);
806 assert(def != src->end());
807
808 if (def.opcode() == spv::OpVariable && insn.word(3) == spv::StorageClassUniformConstant) {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600809 auto num_locations = GetLocationsConsumedByType(src, def.word(1), false);
Chris Forbes47567b72017-06-09 12:09:45 -0700810 for (unsigned int offset = 0; offset < num_locations; offset++) {
811 interface_var v = {};
812 v.id = id;
813 v.type_id = def.word(1);
814 v.offset = offset;
815 out.emplace_back(attachment_index + offset, v);
816 }
817 }
818 }
819 }
820 }
821 }
822
823 return out;
824}
825
Mark Lobodzinski3c59d972019-04-25 11:28:14 -0600826static bool IsWritableDescriptorType(SHADER_MODULE_STATE const *module, uint32_t type_id, bool is_storage_buffer) {
Chris Forbes8af24522018-03-07 11:37:45 -0800827 auto type = module->get_def(type_id);
828
829 // Strip off any array or ptrs. Where we remove array levels, adjust the descriptor count for each dimension.
Chris Forbes062f1222018-08-21 15:34:15 -0700830 while (type.opcode() == spv::OpTypeArray || type.opcode() == spv::OpTypePointer || type.opcode() == spv::OpTypeRuntimeArray) {
831 if (type.opcode() == spv::OpTypeArray || type.opcode() == spv::OpTypeRuntimeArray) {
Chris Forbes8d31e5d2018-10-08 17:19:15 -0700832 type = module->get_def(type.word(2)); // Element type
Chris Forbes8af24522018-03-07 11:37:45 -0800833 } else {
Chris Forbes8d31e5d2018-10-08 17:19:15 -0700834 type = module->get_def(type.word(3)); // Pointee type
Chris Forbes8af24522018-03-07 11:37:45 -0800835 }
836 }
837
838 switch (type.opcode()) {
839 case spv::OpTypeImage: {
840 auto dim = type.word(3);
841 auto sampled = type.word(7);
842 return sampled == 2 && dim != spv::DimSubpassData;
843 }
844
Chris Forbes8d31e5d2018-10-08 17:19:15 -0700845 case spv::OpTypeStruct: {
846 std::unordered_set<unsigned> nonwritable_members;
Chris Forbes8af24522018-03-07 11:37:45 -0800847 for (auto insn : *module) {
848 if (insn.opcode() == spv::OpDecorate && insn.word(1) == type.word(1)) {
849 if (insn.word(2) == spv::DecorationBufferBlock) {
Chris Forbes8d31e5d2018-10-08 17:19:15 -0700850 // Legacy storage block in the Uniform storage class
851 // has its struct type decorated with BufferBlock.
852 is_storage_buffer = true;
Chris Forbes8af24522018-03-07 11:37:45 -0800853 }
Chris Forbes8d31e5d2018-10-08 17:19:15 -0700854 } else if (insn.opcode() == spv::OpMemberDecorate && insn.word(1) == type.word(1) &&
855 insn.word(3) == spv::DecorationNonWritable) {
856 nonwritable_members.insert(insn.word(2));
Chris Forbes8af24522018-03-07 11:37:45 -0800857 }
858 }
Chris Forbes8d31e5d2018-10-08 17:19:15 -0700859
860 // A buffer is writable if it's either flavor of storage buffer, and has any member not decorated
861 // as nonwritable.
862 return is_storage_buffer && nonwritable_members.size() != type.len() - 2;
863 }
Chris Forbes8af24522018-03-07 11:37:45 -0800864 }
865
866 return false;
867}
868
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600869static std::vector<std::pair<descriptor_slot_t, interface_var>> CollectInterfaceByDescriptorSlot(
Mark Lobodzinski3c59d972019-04-25 11:28:14 -0600870 debug_report_data const *report_data, SHADER_MODULE_STATE const *src, std::unordered_set<uint32_t> const &accessible_ids,
Chris Forbes8af24522018-03-07 11:37:45 -0800871 bool *has_writable_descriptor) {
Chris Forbes47567b72017-06-09 12:09:45 -0700872 std::unordered_map<unsigned, unsigned> var_sets;
873 std::unordered_map<unsigned, unsigned> var_bindings;
Chris Forbes8af24522018-03-07 11:37:45 -0800874 std::unordered_map<unsigned, unsigned> var_nonwritable;
Chris Forbes47567b72017-06-09 12:09:45 -0700875
876 for (auto insn : *src) {
877 // All variables in the Uniform or UniformConstant storage classes are required to be decorated with both
878 // DecorationDescriptorSet and DecorationBinding.
879 if (insn.opcode() == spv::OpDecorate) {
880 if (insn.word(2) == spv::DecorationDescriptorSet) {
881 var_sets[insn.word(1)] = insn.word(3);
882 }
883
884 if (insn.word(2) == spv::DecorationBinding) {
885 var_bindings[insn.word(1)] = insn.word(3);
886 }
Chris Forbes8af24522018-03-07 11:37:45 -0800887
Chris Forbes8d31e5d2018-10-08 17:19:15 -0700888 // Note: do toplevel DecorationNonWritable out here; it applies to
889 // the OpVariable rather than the type.
Chris Forbes8af24522018-03-07 11:37:45 -0800890 if (insn.word(2) == spv::DecorationNonWritable) {
891 var_nonwritable[insn.word(1)] = 1;
892 }
Chris Forbes47567b72017-06-09 12:09:45 -0700893 }
894 }
895
896 std::vector<std::pair<descriptor_slot_t, interface_var>> out;
897
898 for (auto id : accessible_ids) {
899 auto insn = src->get_def(id);
900 assert(insn != src->end());
901
902 if (insn.opcode() == spv::OpVariable &&
Chris Forbes9f89d752018-03-07 12:57:48 -0800903 (insn.word(3) == spv::StorageClassUniform || insn.word(3) == spv::StorageClassUniformConstant ||
904 insn.word(3) == spv::StorageClassStorageBuffer)) {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600905 unsigned set = ValueOrDefault(var_sets, insn.word(2), 0);
906 unsigned binding = ValueOrDefault(var_bindings, insn.word(2), 0);
Chris Forbes47567b72017-06-09 12:09:45 -0700907
908 interface_var v = {};
909 v.id = insn.word(2);
910 v.type_id = insn.word(1);
911 out.emplace_back(std::make_pair(set, binding), v);
Chris Forbes8af24522018-03-07 11:37:45 -0800912
Chris Forbes8d31e5d2018-10-08 17:19:15 -0700913 if (var_nonwritable.find(id) == var_nonwritable.end() &&
914 IsWritableDescriptorType(src, insn.word(1), insn.word(3) == spv::StorageClassStorageBuffer)) {
Chris Forbes8af24522018-03-07 11:37:45 -0800915 *has_writable_descriptor = true;
916 }
Chris Forbes47567b72017-06-09 12:09:45 -0700917 }
918 }
919
920 return out;
921}
922
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600923static bool ValidateViConsistency(debug_report_data const *report_data, VkPipelineVertexInputStateCreateInfo const *vi) {
Chris Forbes47567b72017-06-09 12:09:45 -0700924 // Walk the binding descriptions, which describe the step rate and stride of each vertex buffer. Each binding should
925 // be specified only once.
926 std::unordered_map<uint32_t, VkVertexInputBindingDescription const *> bindings;
927 bool skip = false;
928
929 for (unsigned i = 0; i < vi->vertexBindingDescriptionCount; i++) {
930 auto desc = &vi->pVertexBindingDescriptions[i];
931 auto &binding = bindings[desc->binding];
932 if (binding) {
Dave Houlton78d09922018-05-17 15:48:45 -0600933 // TODO: "VUID-VkGraphicsPipelineCreateInfo-pStages-00742" perhaps?
Mark Lobodzinskib1fd9d12018-03-30 14:26:00 -0600934 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
Dave Houlton51653902018-06-22 17:32:13 -0600935 kVUID_Core_Shader_InconsistentVi, "Duplicate vertex input binding descriptions for binding %d",
Chris Forbes47567b72017-06-09 12:09:45 -0700936 desc->binding);
937 } else {
938 binding = desc;
939 }
940 }
941
942 return skip;
943}
944
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600945static bool ValidateViAgainstVsInputs(debug_report_data const *report_data, VkPipelineVertexInputStateCreateInfo const *vi,
Mark Lobodzinski3c59d972019-04-25 11:28:14 -0600946 SHADER_MODULE_STATE const *vs, spirv_inst_iter entrypoint) {
Chris Forbes47567b72017-06-09 12:09:45 -0700947 bool skip = false;
948
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600949 auto inputs = CollectInterfaceByLocation(vs, entrypoint, spv::StorageClassInput, false);
Chris Forbes47567b72017-06-09 12:09:45 -0700950
951 // Build index by location
952 std::map<uint32_t, VkVertexInputAttributeDescription const *> attribs;
953 if (vi) {
954 for (unsigned i = 0; i < vi->vertexAttributeDescriptionCount; i++) {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600955 auto num_locations = GetLocationsConsumedByFormat(vi->pVertexAttributeDescriptions[i].format);
Chris Forbes47567b72017-06-09 12:09:45 -0700956 for (auto j = 0u; j < num_locations; j++) {
957 attribs[vi->pVertexAttributeDescriptions[i].location + j] = &vi->pVertexAttributeDescriptions[i];
958 }
959 }
960 }
961
962 auto it_a = attribs.begin();
963 auto it_b = inputs.begin();
964 bool used = false;
965
966 while ((attribs.size() > 0 && it_a != attribs.end()) || (inputs.size() > 0 && it_b != inputs.end())) {
967 bool a_at_end = attribs.size() == 0 || it_a == attribs.end();
968 bool b_at_end = inputs.size() == 0 || it_b == inputs.end();
969 auto a_first = a_at_end ? 0 : it_a->first;
970 auto b_first = b_at_end ? 0 : it_b->first.first;
Mark Lobodzinski7caa39c2018-07-25 15:48:34 -0600971
Chris Forbes47567b72017-06-09 12:09:45 -0700972 if (!a_at_end && (b_at_end || a_first < b_first)) {
Mark Young4e919b22018-05-21 15:53:59 -0600973 if (!used &&
974 log_msg(report_data, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
Dave Houlton51653902018-06-22 17:32:13 -0600975 HandleToUint64(vs->vk_shader_module), kVUID_Core_Shader_OutputNotConsumed,
Mark Young4e919b22018-05-21 15:53:59 -0600976 "Vertex attribute at location %d not consumed by vertex shader", a_first)) {
Chris Forbes47567b72017-06-09 12:09:45 -0700977 skip = true;
978 }
979 used = false;
980 it_a++;
981 } else if (!b_at_end && (a_at_end || b_first < a_first)) {
Mark Young4e919b22018-05-21 15:53:59 -0600982 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
Dave Houlton51653902018-06-22 17:32:13 -0600983 HandleToUint64(vs->vk_shader_module), kVUID_Core_Shader_InputNotProduced,
Mark Young4e919b22018-05-21 15:53:59 -0600984 "Vertex shader consumes input at location %d but not provided", b_first);
Chris Forbes47567b72017-06-09 12:09:45 -0700985 it_b++;
986 } else {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600987 unsigned attrib_type = GetFormatType(it_a->second->format);
988 unsigned input_type = GetFundamentalType(vs, it_b->second.type_id);
Chris Forbes47567b72017-06-09 12:09:45 -0700989
990 // Type checking
991 if (!(attrib_type & input_type)) {
Mark Young4e919b22018-05-21 15:53:59 -0600992 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
Dave Houlton51653902018-06-22 17:32:13 -0600993 HandleToUint64(vs->vk_shader_module), kVUID_Core_Shader_InterfaceTypeMismatch,
Chris Forbes47567b72017-06-09 12:09:45 -0700994 "Attribute type of `%s` at location %d does not match vertex shader input type of `%s`",
Shannon McPhersonc06c33d2018-06-28 17:21:12 -0600995 string_VkFormat(it_a->second->format), a_first, DescribeType(vs, it_b->second.type_id).c_str());
Chris Forbes47567b72017-06-09 12:09:45 -0700996 }
997
998 // OK!
999 used = true;
1000 it_b++;
1001 }
1002 }
1003
1004 return skip;
1005}
1006
Mark Lobodzinski3c59d972019-04-25 11:28:14 -06001007static bool ValidateFsOutputsAgainstRenderPass(debug_report_data const *report_data, SHADER_MODULE_STATE const *fs,
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06001008 spirv_inst_iter entrypoint, PIPELINE_STATE const *pipeline, uint32_t subpass_index) {
Petr Krause91f7a12017-12-14 20:57:36 +01001009 auto rpci = pipeline->rp_state->createInfo.ptr();
Chris Forbes8bca1652017-07-20 11:10:09 -07001010
Chris Forbes47567b72017-06-09 12:09:45 -07001011 std::map<uint32_t, VkFormat> color_attachments;
1012 auto subpass = rpci->pSubpasses[subpass_index];
1013 for (auto i = 0u; i < subpass.colorAttachmentCount; ++i) {
1014 uint32_t attachment = subpass.pColorAttachments[i].attachment;
1015 if (attachment == VK_ATTACHMENT_UNUSED) continue;
1016 if (rpci->pAttachments[attachment].format != VK_FORMAT_UNDEFINED) {
1017 color_attachments[i] = rpci->pAttachments[attachment].format;
1018 }
1019 }
1020
1021 bool skip = false;
1022
1023 // TODO: dual source blend index (spv::DecIndex, zero if not provided)
1024
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06001025 auto outputs = CollectInterfaceByLocation(fs, entrypoint, spv::StorageClassOutput, false);
Chris Forbes47567b72017-06-09 12:09:45 -07001026
1027 auto it_a = outputs.begin();
1028 auto it_b = color_attachments.begin();
Mark Lobodzinski7caa39c2018-07-25 15:48:34 -06001029 bool used = false;
Ari Suonpaa412b23b2019-02-26 07:56:58 +02001030 bool alphaToCoverageEnabled = pipeline->graphicsPipelineCI.pMultisampleState != NULL &&
1031 pipeline->graphicsPipelineCI.pMultisampleState->alphaToCoverageEnable == VK_TRUE;
1032 bool locationZeroHasAlpha = false;
Chris Forbes47567b72017-06-09 12:09:45 -07001033
1034 // Walk attachment list and outputs together
1035
1036 while ((outputs.size() > 0 && it_a != outputs.end()) || (color_attachments.size() > 0 && it_b != color_attachments.end())) {
1037 bool a_at_end = outputs.size() == 0 || it_a == outputs.end();
1038 bool b_at_end = color_attachments.size() == 0 || it_b == color_attachments.end();
1039
Ari Suonpaa412b23b2019-02-26 07:56:58 +02001040 if (!a_at_end && it_a->first.first == 0 && fs->get_def(it_a->second.type_id) != fs->end() &&
1041 GetComponentsConsumedByType(fs, it_a->second.type_id, false) == 4)
1042 locationZeroHasAlpha = true;
1043
Chris Forbes47567b72017-06-09 12:09:45 -07001044 if (!a_at_end && (b_at_end || it_a->first.first < it_b->first)) {
Ari Suonpaa412b23b2019-02-26 07:56:58 +02001045 if (!alphaToCoverageEnabled || it_a->first.first != 0) {
1046 skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
1047 HandleToUint64(fs->vk_shader_module), kVUID_Core_Shader_OutputNotConsumed,
1048 "fragment shader writes to output location %d with no matching attachment", it_a->first.first);
1049 }
Chris Forbes47567b72017-06-09 12:09:45 -07001050 it_a++;
1051 } else if (!b_at_end && (a_at_end || it_a->first.first > it_b->first)) {
Chris Forbesefdd4082017-07-20 11:19:16 -07001052 // Only complain if there are unmasked channels for this attachment. If the writemask is 0, it's acceptable for the
1053 // shader to not produce a matching output.
Mark Lobodzinski7caa39c2018-07-25 15:48:34 -06001054 if (!used) {
1055 if (pipeline->attachments[it_b->first].colorWriteMask != 0) {
Chris Forbescfe4dca2018-10-05 10:15:00 -07001056 skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
Mark Lobodzinski7caa39c2018-07-25 15:48:34 -06001057 HandleToUint64(fs->vk_shader_module), kVUID_Core_Shader_InputNotProduced,
Chris Forbescfe4dca2018-10-05 10:15:00 -07001058 "Attachment %d not written by fragment shader; undefined values will be written to attachment",
1059 it_b->first);
Mark Lobodzinski7caa39c2018-07-25 15:48:34 -06001060 }
Chris Forbesefdd4082017-07-20 11:19:16 -07001061 }
Mark Lobodzinski7caa39c2018-07-25 15:48:34 -06001062 used = false;
Chris Forbes47567b72017-06-09 12:09:45 -07001063 it_b++;
1064 } else {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06001065 unsigned output_type = GetFundamentalType(fs, it_a->second.type_id);
1066 unsigned att_type = GetFormatType(it_b->second);
Chris Forbes47567b72017-06-09 12:09:45 -07001067
1068 // Type checking
1069 if (!(output_type & att_type)) {
Chris Forbescfe4dca2018-10-05 10:15:00 -07001070 skip |= log_msg(
1071 report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
1072 HandleToUint64(fs->vk_shader_module), kVUID_Core_Shader_InterfaceTypeMismatch,
1073 "Attachment %d of type `%s` does not match fragment shader output type of `%s`; resulting values are undefined",
1074 it_b->first, string_VkFormat(it_b->second), DescribeType(fs, it_a->second.type_id).c_str());
Chris Forbes47567b72017-06-09 12:09:45 -07001075 }
1076
1077 // OK!
1078 it_a++;
Mark Lobodzinski7caa39c2018-07-25 15:48:34 -06001079 used = true;
Chris Forbes47567b72017-06-09 12:09:45 -07001080 }
1081 }
1082
Ari Suonpaa412b23b2019-02-26 07:56:58 +02001083 if (alphaToCoverageEnabled && !locationZeroHasAlpha) {
1084 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
1085 HandleToUint64(fs->vk_shader_module), kVUID_Core_Shader_NoAlphaAtLocation0WithAlphaToCoverage,
1086 "fragment shader doesn't declare alpha output at location 0 even though alpha to coverage is enabled.");
1087 }
1088
Chris Forbes47567b72017-06-09 12:09:45 -07001089 return skip;
1090}
1091
Mark Lobodzinski1b4a8ed2018-08-07 08:47:05 -06001092// For PointSize analysis we need to know if the variable decorated with the PointSize built-in was actually written to.
1093// This function examines instructions in the static call tree for a write to this variable.
Mark Lobodzinski3c59d972019-04-25 11:28:14 -06001094static bool IsPointSizeWritten(SHADER_MODULE_STATE const *src, spirv_inst_iter builtin_instr, spirv_inst_iter entrypoint) {
Mark Lobodzinski1b4a8ed2018-08-07 08:47:05 -06001095 auto type = builtin_instr.opcode();
1096 uint32_t target_id = builtin_instr.word(1);
1097 bool init_complete = false;
1098
1099 if (type == spv::OpMemberDecorate) {
1100 // Built-in is part of a structure -- examine instructions up to first function body to get initial IDs
1101 auto insn = entrypoint;
1102 while (!init_complete && (insn.opcode() != spv::OpFunction)) {
1103 switch (insn.opcode()) {
1104 case spv::OpTypePointer:
1105 if ((insn.word(3) == target_id) && (insn.word(2) == spv::StorageClassOutput)) {
1106 target_id = insn.word(1);
1107 }
1108 break;
1109 case spv::OpVariable:
1110 if (insn.word(1) == target_id) {
1111 target_id = insn.word(2);
1112 init_complete = true;
1113 }
1114 break;
1115 }
1116 insn++;
1117 }
1118 }
1119
Mark Lobodzinskif84b0b42018-09-11 14:54:32 -06001120 if (!init_complete && (type == spv::OpMemberDecorate)) return false;
1121
1122 bool found_write = false;
Mark Lobodzinski1b4a8ed2018-08-07 08:47:05 -06001123 std::unordered_set<uint32_t> worklist;
1124 worklist.insert(entrypoint.word(2));
1125
1126 // Follow instructions in call graph looking for writes to target
1127 while (!worklist.empty() && !found_write) {
1128 auto id_iter = worklist.begin();
1129 auto id = *id_iter;
1130 worklist.erase(id_iter);
1131
1132 auto insn = src->get_def(id);
1133 if (insn == src->end()) {
1134 continue;
1135 }
1136
1137 if (insn.opcode() == spv::OpFunction) {
1138 // Scan body of function looking for other function calls or items in our ID chain
1139 while (++insn, insn.opcode() != spv::OpFunctionEnd) {
1140 switch (insn.opcode()) {
1141 case spv::OpAccessChain:
1142 if (insn.word(3) == target_id) {
1143 if (type == spv::OpMemberDecorate) {
1144 auto value = GetConstantValue(src, insn.word(4));
1145 if (value == builtin_instr.word(2)) {
1146 target_id = insn.word(2);
1147 }
1148 } else {
1149 target_id = insn.word(2);
1150 }
1151 }
1152 break;
1153 case spv::OpStore:
1154 if (insn.word(1) == target_id) {
1155 found_write = true;
1156 }
1157 break;
1158 case spv::OpFunctionCall:
1159 worklist.insert(insn.word(3));
1160 break;
1161 }
1162 }
1163 }
1164 }
1165 return found_write;
1166}
1167
Chris Forbes47567b72017-06-09 12:09:45 -07001168// For some analyses, we need to know about all ids referenced by the static call tree of a particular entrypoint. This is
1169// important for identifying the set of shader resources actually used by an entrypoint, for example.
1170// Note: we only explore parts of the image which might actually contain ids we care about for the above analyses.
1171// - NOT the shader input/output interfaces.
1172//
1173// TODO: The set of interesting opcodes here was determined by eyeballing the SPIRV spec. It might be worth
1174// converting parts of this to be generated from the machine-readable spec instead.
Mark Lobodzinski3c59d972019-04-25 11:28:14 -06001175static std::unordered_set<uint32_t> MarkAccessibleIds(SHADER_MODULE_STATE const *src, spirv_inst_iter entrypoint) {
Chris Forbes47567b72017-06-09 12:09:45 -07001176 std::unordered_set<uint32_t> ids;
1177 std::unordered_set<uint32_t> worklist;
1178 worklist.insert(entrypoint.word(2));
1179
1180 while (!worklist.empty()) {
1181 auto id_iter = worklist.begin();
1182 auto id = *id_iter;
1183 worklist.erase(id_iter);
1184
1185 auto insn = src->get_def(id);
1186 if (insn == src->end()) {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06001187 // ID is something we didn't collect in BuildDefIndex. that's OK -- we'll stumble across all kinds of things here
Chris Forbes47567b72017-06-09 12:09:45 -07001188 // that we may not care about.
1189 continue;
1190 }
1191
1192 // Try to add to the output set
1193 if (!ids.insert(id).second) {
1194 continue; // If we already saw this id, we don't want to walk it again.
1195 }
1196
1197 switch (insn.opcode()) {
1198 case spv::OpFunction:
1199 // Scan whole body of the function, enlisting anything interesting
1200 while (++insn, insn.opcode() != spv::OpFunctionEnd) {
1201 switch (insn.opcode()) {
1202 case spv::OpLoad:
1203 case spv::OpAtomicLoad:
1204 case spv::OpAtomicExchange:
1205 case spv::OpAtomicCompareExchange:
1206 case spv::OpAtomicCompareExchangeWeak:
1207 case spv::OpAtomicIIncrement:
1208 case spv::OpAtomicIDecrement:
1209 case spv::OpAtomicIAdd:
1210 case spv::OpAtomicISub:
1211 case spv::OpAtomicSMin:
1212 case spv::OpAtomicUMin:
1213 case spv::OpAtomicSMax:
1214 case spv::OpAtomicUMax:
1215 case spv::OpAtomicAnd:
1216 case spv::OpAtomicOr:
1217 case spv::OpAtomicXor:
1218 worklist.insert(insn.word(3)); // ptr
1219 break;
1220 case spv::OpStore:
1221 case spv::OpAtomicStore:
1222 worklist.insert(insn.word(1)); // ptr
1223 break;
1224 case spv::OpAccessChain:
1225 case spv::OpInBoundsAccessChain:
1226 worklist.insert(insn.word(3)); // base ptr
1227 break;
1228 case spv::OpSampledImage:
1229 case spv::OpImageSampleImplicitLod:
1230 case spv::OpImageSampleExplicitLod:
1231 case spv::OpImageSampleDrefImplicitLod:
1232 case spv::OpImageSampleDrefExplicitLod:
1233 case spv::OpImageSampleProjImplicitLod:
1234 case spv::OpImageSampleProjExplicitLod:
1235 case spv::OpImageSampleProjDrefImplicitLod:
1236 case spv::OpImageSampleProjDrefExplicitLod:
1237 case spv::OpImageFetch:
1238 case spv::OpImageGather:
1239 case spv::OpImageDrefGather:
1240 case spv::OpImageRead:
1241 case spv::OpImage:
1242 case spv::OpImageQueryFormat:
1243 case spv::OpImageQueryOrder:
1244 case spv::OpImageQuerySizeLod:
1245 case spv::OpImageQuerySize:
1246 case spv::OpImageQueryLod:
1247 case spv::OpImageQueryLevels:
1248 case spv::OpImageQuerySamples:
1249 case spv::OpImageSparseSampleImplicitLod:
1250 case spv::OpImageSparseSampleExplicitLod:
1251 case spv::OpImageSparseSampleDrefImplicitLod:
1252 case spv::OpImageSparseSampleDrefExplicitLod:
1253 case spv::OpImageSparseSampleProjImplicitLod:
1254 case spv::OpImageSparseSampleProjExplicitLod:
1255 case spv::OpImageSparseSampleProjDrefImplicitLod:
1256 case spv::OpImageSparseSampleProjDrefExplicitLod:
1257 case spv::OpImageSparseFetch:
1258 case spv::OpImageSparseGather:
1259 case spv::OpImageSparseDrefGather:
1260 case spv::OpImageTexelPointer:
1261 worklist.insert(insn.word(3)); // Image or sampled image
1262 break;
1263 case spv::OpImageWrite:
1264 worklist.insert(insn.word(1)); // Image -- different operand order to above
1265 break;
1266 case spv::OpFunctionCall:
1267 for (uint32_t i = 3; i < insn.len(); i++) {
1268 worklist.insert(insn.word(i)); // fn itself, and all args
1269 }
1270 break;
1271
1272 case spv::OpExtInst:
1273 for (uint32_t i = 5; i < insn.len(); i++) {
1274 worklist.insert(insn.word(i)); // Operands to ext inst
1275 }
1276 break;
1277 }
1278 }
1279 break;
1280 }
1281 }
1282
1283 return ids;
1284}
1285
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06001286static bool ValidatePushConstantBlockAgainstPipeline(debug_report_data const *report_data,
1287 std::vector<VkPushConstantRange> const *push_constant_ranges,
Mark Lobodzinski3c59d972019-04-25 11:28:14 -06001288 SHADER_MODULE_STATE const *src, spirv_inst_iter type,
1289 VkShaderStageFlagBits stage) {
Chris Forbes47567b72017-06-09 12:09:45 -07001290 bool skip = false;
1291
1292 // Strip off ptrs etc
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06001293 type = GetStructType(src, type, false);
Chris Forbes47567b72017-06-09 12:09:45 -07001294 assert(type != src->end());
1295
1296 // Validate directly off the offsets. this isn't quite correct for arrays and matrices, but is a good first step.
1297 // TODO: arrays, matrices, weird sizes
1298 for (auto insn : *src) {
1299 if (insn.opcode() == spv::OpMemberDecorate && insn.word(1) == type.word(1)) {
1300 if (insn.word(3) == spv::DecorationOffset) {
1301 unsigned offset = insn.word(4);
1302 auto size = 4; // Bytes; TODO: calculate this based on the type
1303
1304 bool found_range = false;
1305 for (auto const &range : *push_constant_ranges) {
1306 if (range.offset <= offset && range.offset + range.size >= offset + size) {
1307 found_range = true;
1308
1309 if ((range.stageFlags & stage) == 0) {
Dave Houltona9df0ce2018-02-07 10:51:23 -07001310 skip |=
1311 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
Dave Houlton51653902018-06-22 17:32:13 -06001312 kVUID_Core_Shader_PushConstantNotAccessibleFromStage,
Dave Houltona9df0ce2018-02-07 10:51:23 -07001313 "Push constant range covering variable starting at offset %u not accessible from stage %s",
1314 offset, string_VkShaderStageFlagBits(stage));
Chris Forbes47567b72017-06-09 12:09:45 -07001315 }
1316
1317 break;
1318 }
1319 }
1320
1321 if (!found_range) {
1322 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
Dave Houlton51653902018-06-22 17:32:13 -06001323 kVUID_Core_Shader_PushConstantOutOfRange,
Dave Houltona9df0ce2018-02-07 10:51:23 -07001324 "Push constant range covering variable starting at offset %u not declared in layout", offset);
Chris Forbes47567b72017-06-09 12:09:45 -07001325 }
1326 }
1327 }
1328 }
1329
1330 return skip;
1331}
1332
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06001333static bool ValidatePushConstantUsage(debug_report_data const *report_data,
Mark Lobodzinski3c59d972019-04-25 11:28:14 -06001334 std::vector<VkPushConstantRange> const *push_constant_ranges, SHADER_MODULE_STATE const *src,
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06001335 std::unordered_set<uint32_t> accessible_ids, VkShaderStageFlagBits stage) {
Chris Forbes47567b72017-06-09 12:09:45 -07001336 bool skip = false;
1337
1338 for (auto id : accessible_ids) {
1339 auto def_insn = src->get_def(id);
1340 if (def_insn.opcode() == spv::OpVariable && def_insn.word(3) == spv::StorageClassPushConstant) {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06001341 skip |= ValidatePushConstantBlockAgainstPipeline(report_data, push_constant_ranges, src, src->get_def(def_insn.word(1)),
1342 stage);
Chris Forbes47567b72017-06-09 12:09:45 -07001343 }
1344 }
1345
1346 return skip;
1347}
1348
1349// Validate that data for each specialization entry is fully contained within the buffer.
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06001350static bool ValidateSpecializationOffsets(debug_report_data const *report_data, VkPipelineShaderStageCreateInfo const *info) {
Chris Forbes47567b72017-06-09 12:09:45 -07001351 bool skip = false;
1352
1353 VkSpecializationInfo const *spec = info->pSpecializationInfo;
1354
1355 if (spec) {
1356 for (auto i = 0u; i < spec->mapEntryCount; i++) {
Dave Houlton78d09922018-05-17 15:48:45 -06001357 // TODO: This is a good place for "VUID-VkSpecializationInfo-offset-00773".
Chris Forbes47567b72017-06-09 12:09:45 -07001358 if (spec->pMapEntries[i].offset + spec->pMapEntries[i].size > spec->dataSize) {
Mark Lobodzinskib1fd9d12018-03-30 14:26:00 -06001359 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, 0,
Dave Houlton78d09922018-05-17 15:48:45 -06001360 "VUID-VkSpecializationInfo-pMapEntries-00774",
Dave Houltona9df0ce2018-02-07 10:51:23 -07001361 "Specialization entry %u (for constant id %u) references memory outside provided specialization "
Mark Lobodzinski487a0d12018-03-30 10:09:03 -06001362 "data (bytes %u.." PRINTF_SIZE_T_SPECIFIER "; " PRINTF_SIZE_T_SPECIFIER " bytes provided)..",
Dave Houltona9df0ce2018-02-07 10:51:23 -07001363 i, spec->pMapEntries[i].constantID, spec->pMapEntries[i].offset,
Mark Lobodzinski487a0d12018-03-30 10:09:03 -06001364 spec->pMapEntries[i].offset + spec->pMapEntries[i].size - 1, spec->dataSize);
Chris Forbes47567b72017-06-09 12:09:45 -07001365 }
1366 }
1367 }
1368
1369 return skip;
1370}
1371
Jeff Bolz38b3ce72018-09-19 12:53:38 -05001372// TODO (jbolz): Can this return a const reference?
Mark Lobodzinski3c59d972019-04-25 11:28:14 -06001373static std::set<uint32_t> TypeToDescriptorTypeSet(SHADER_MODULE_STATE const *module, uint32_t type_id, unsigned &descriptor_count) {
Chris Forbes47567b72017-06-09 12:09:45 -07001374 auto type = module->get_def(type_id);
Chris Forbes9f89d752018-03-07 12:57:48 -08001375 bool is_storage_buffer = false;
Chris Forbes47567b72017-06-09 12:09:45 -07001376 descriptor_count = 1;
Jeff Bolze54ae892018-09-08 12:16:29 -05001377 std::set<uint32_t> ret;
Chris Forbes47567b72017-06-09 12:09:45 -07001378
1379 // Strip off any array or ptrs. Where we remove array levels, adjust the descriptor count for each dimension.
Jeff Bolzfdf96072018-04-10 14:32:18 -05001380 while (type.opcode() == spv::OpTypeArray || type.opcode() == spv::OpTypePointer || type.opcode() == spv::OpTypeRuntimeArray) {
1381 if (type.opcode() == spv::OpTypeRuntimeArray) {
1382 descriptor_count = 0;
1383 type = module->get_def(type.word(2));
1384 } else if (type.opcode() == spv::OpTypeArray) {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06001385 descriptor_count *= GetConstantValue(module, type.word(3));
Chris Forbes47567b72017-06-09 12:09:45 -07001386 type = module->get_def(type.word(2));
1387 } else {
Chris Forbes9f89d752018-03-07 12:57:48 -08001388 if (type.word(2) == spv::StorageClassStorageBuffer) {
1389 is_storage_buffer = true;
1390 }
Chris Forbes47567b72017-06-09 12:09:45 -07001391 type = module->get_def(type.word(3));
1392 }
1393 }
1394
1395 switch (type.opcode()) {
1396 case spv::OpTypeStruct: {
1397 for (auto insn : *module) {
1398 if (insn.opcode() == spv::OpDecorate && insn.word(1) == type.word(1)) {
1399 if (insn.word(2) == spv::DecorationBlock) {
Chris Forbes9f89d752018-03-07 12:57:48 -08001400 if (is_storage_buffer) {
Jeff Bolze54ae892018-09-08 12:16:29 -05001401 ret.insert(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER);
1402 ret.insert(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC);
1403 return ret;
Chris Forbes9f89d752018-03-07 12:57:48 -08001404 } else {
Jeff Bolze54ae892018-09-08 12:16:29 -05001405 ret.insert(VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER);
1406 ret.insert(VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC);
1407 ret.insert(VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT);
1408 return ret;
Chris Forbes9f89d752018-03-07 12:57:48 -08001409 }
Chris Forbes47567b72017-06-09 12:09:45 -07001410 } else if (insn.word(2) == spv::DecorationBufferBlock) {
Jeff Bolze54ae892018-09-08 12:16:29 -05001411 ret.insert(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER);
1412 ret.insert(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC);
1413 return ret;
Chris Forbes47567b72017-06-09 12:09:45 -07001414 }
1415 }
1416 }
1417
1418 // Invalid
Jeff Bolze54ae892018-09-08 12:16:29 -05001419 return ret;
Chris Forbes47567b72017-06-09 12:09:45 -07001420 }
1421
1422 case spv::OpTypeSampler:
Jeff Bolze54ae892018-09-08 12:16:29 -05001423 ret.insert(VK_DESCRIPTOR_TYPE_SAMPLER);
1424 ret.insert(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
1425 return ret;
Chris Forbes47567b72017-06-09 12:09:45 -07001426
Chris Forbes73c00bf2018-06-22 16:28:06 -07001427 case spv::OpTypeSampledImage: {
1428 // Slight relaxation for some GLSL historical madness: samplerBuffer doesn't really have a sampler, and a texel
1429 // buffer descriptor doesn't really provide one. Allow this slight mismatch.
1430 auto image_type = module->get_def(type.word(2));
1431 auto dim = image_type.word(3);
1432 auto sampled = image_type.word(7);
1433 if (dim == spv::DimBuffer && sampled == 1) {
Jeff Bolze54ae892018-09-08 12:16:29 -05001434 ret.insert(VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER);
1435 return ret;
Chris Forbes47567b72017-06-09 12:09:45 -07001436 }
Chris Forbes73c00bf2018-06-22 16:28:06 -07001437 }
Jeff Bolze54ae892018-09-08 12:16:29 -05001438 ret.insert(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
1439 return ret;
Chris Forbes47567b72017-06-09 12:09:45 -07001440
1441 case spv::OpTypeImage: {
1442 // Many descriptor types backing image types-- depends on dimension and whether the image will be used with a sampler.
1443 // SPIRV for Vulkan requires that sampled be 1 or 2 -- leaving the decision to runtime is unacceptable.
1444 auto dim = type.word(3);
1445 auto sampled = type.word(7);
1446
1447 if (dim == spv::DimSubpassData) {
Jeff Bolze54ae892018-09-08 12:16:29 -05001448 ret.insert(VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT);
1449 return ret;
Chris Forbes47567b72017-06-09 12:09:45 -07001450 } else if (dim == spv::DimBuffer) {
1451 if (sampled == 1) {
Jeff Bolze54ae892018-09-08 12:16:29 -05001452 ret.insert(VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER);
1453 return ret;
Chris Forbes47567b72017-06-09 12:09:45 -07001454 } else {
Jeff Bolze54ae892018-09-08 12:16:29 -05001455 ret.insert(VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER);
1456 return ret;
Chris Forbes47567b72017-06-09 12:09:45 -07001457 }
1458 } else if (sampled == 1) {
Jeff Bolze54ae892018-09-08 12:16:29 -05001459 ret.insert(VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE);
1460 ret.insert(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
1461 return ret;
Chris Forbes47567b72017-06-09 12:09:45 -07001462 } else {
Jeff Bolze54ae892018-09-08 12:16:29 -05001463 ret.insert(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE);
1464 return ret;
Chris Forbes47567b72017-06-09 12:09:45 -07001465 }
1466 }
Shannon McPherson0fa28232018-11-01 11:59:02 -06001467 case spv::OpTypeAccelerationStructureNV:
Eric Werness30127fd2018-10-31 21:01:03 -07001468 ret.insert(VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV);
Jeff Bolz105d6492018-09-29 15:46:44 -05001469 return ret;
Chris Forbes47567b72017-06-09 12:09:45 -07001470
1471 // We shouldn't really see any other junk types -- but if we do, they're a mismatch.
1472 default:
Jeff Bolze54ae892018-09-08 12:16:29 -05001473 return ret; // Matches nothing
Chris Forbes47567b72017-06-09 12:09:45 -07001474 }
1475}
1476
Jeff Bolze54ae892018-09-08 12:16:29 -05001477static std::string string_descriptorTypes(const std::set<uint32_t> &descriptor_types) {
Chris Forbes73c00bf2018-06-22 16:28:06 -07001478 std::stringstream ss;
Jeff Bolze54ae892018-09-08 12:16:29 -05001479 for (auto it = descriptor_types.begin(); it != descriptor_types.end(); ++it) {
1480 if (ss.tellp()) ss << ", ";
1481 ss << string_VkDescriptorType(VkDescriptorType(*it));
Chris Forbes73c00bf2018-06-22 16:28:06 -07001482 }
1483 return ss.str();
1484}
1485
Jeff Bolzee743412019-06-20 22:24:32 -05001486static bool RequirePropertyFlag(debug_report_data const *report_data, VkBool32 check, char const *flag, char const *structure) {
1487 if (!check) {
1488 if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
1489 kVUID_Core_Shader_ExceedDeviceLimit, "Shader requires flag %s set in %s but it is not set on the device", flag,
1490 structure)) {
1491 return true;
1492 }
1493 }
1494
1495 return false;
1496}
1497
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06001498static bool RequireFeature(debug_report_data const *report_data, VkBool32 feature, char const *feature_name) {
Chris Forbes47567b72017-06-09 12:09:45 -07001499 if (!feature) {
Mark Lobodzinskib1fd9d12018-03-30 14:26:00 -06001500 if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
Dave Houlton51653902018-06-22 17:32:13 -06001501 kVUID_Core_Shader_FeatureNotEnabled, "Shader requires %s but is not enabled on the device", feature_name)) {
Chris Forbes47567b72017-06-09 12:09:45 -07001502 return true;
1503 }
1504 }
1505
1506 return false;
1507}
1508
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06001509static bool RequireExtension(debug_report_data const *report_data, bool extension, char const *extension_name) {
Chris Forbes47567b72017-06-09 12:09:45 -07001510 if (!extension) {
Mark Lobodzinskib1fd9d12018-03-30 14:26:00 -06001511 if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
Dave Houlton51653902018-06-22 17:32:13 -06001512 kVUID_Core_Shader_FeatureNotEnabled, "Shader requires extension %s but is not enabled on the device",
Chris Forbes47567b72017-06-09 12:09:45 -07001513 extension_name)) {
1514 return true;
1515 }
1516 }
1517
1518 return false;
1519}
1520
Jeff Bolzee743412019-06-20 22:24:32 -05001521bool CoreChecks::ValidateShaderCapabilities(SHADER_MODULE_STATE const *src, VkShaderStageFlagBits stage) {
Chris Forbes47567b72017-06-09 12:09:45 -07001522 bool skip = false;
1523
Mike Schuchardt8ed5ea02018-07-20 18:24:17 -06001524 struct FeaturePointer {
1525 // Callable object to test if this feature is enabled in the given aggregate feature struct
1526 const std::function<VkBool32(const DeviceFeatures &)> IsEnabled;
1527
1528 // Test if feature pointer is populated
1529 explicit operator bool() const { return static_cast<bool>(IsEnabled); }
1530
1531 // Default and nullptr constructor to create an empty FeaturePointer
1532 FeaturePointer() : IsEnabled(nullptr) {}
1533 FeaturePointer(std::nullptr_t ptr) : IsEnabled(nullptr) {}
1534
1535 // Constructors to populate FeaturePointer based on given pointer to member
1536 FeaturePointer(VkBool32 VkPhysicalDeviceFeatures::*ptr)
1537 : IsEnabled([=](const DeviceFeatures &features) { return features.core.*ptr; }) {}
1538 FeaturePointer(VkBool32 VkPhysicalDeviceDescriptorIndexingFeaturesEXT::*ptr)
1539 : IsEnabled([=](const DeviceFeatures &features) { return features.descriptor_indexing.*ptr; }) {}
1540 FeaturePointer(VkBool32 VkPhysicalDevice8BitStorageFeaturesKHR::*ptr)
1541 : IsEnabled([=](const DeviceFeatures &features) { return features.eight_bit_storage.*ptr; }) {}
Brett Lawsonbebfb6f2018-10-23 16:58:50 -07001542 FeaturePointer(VkBool32 VkPhysicalDeviceTransformFeedbackFeaturesEXT::*ptr)
1543 : IsEnabled([=](const DeviceFeatures &features) { return features.transform_feedback_features.*ptr; }) {}
Jose-Emilio Munoz-Lopez1109b452018-08-21 09:44:07 +01001544 FeaturePointer(VkBool32 VkPhysicalDeviceFloat16Int8FeaturesKHR::*ptr)
1545 : IsEnabled([=](const DeviceFeatures &features) { return features.float16_int8.*ptr; }) {}
Tobias Hector6a0ece72018-12-10 12:24:05 +00001546 FeaturePointer(VkBool32 VkPhysicalDeviceScalarBlockLayoutFeaturesEXT::*ptr)
1547 : IsEnabled([=](const DeviceFeatures &features) { return features.scalar_block_layout_features.*ptr; }) {}
Jeff Bolze4356752019-03-07 11:23:46 -06001548 FeaturePointer(VkBool32 VkPhysicalDeviceCooperativeMatrixFeaturesNV::*ptr)
1549 : IsEnabled([=](const DeviceFeatures &features) { return features.cooperative_matrix_features.*ptr; }) {}
Attilio Provenzanoc5d50102019-03-25 17:40:37 +00001550 FeaturePointer(VkBool32 VkPhysicalDeviceFloatControlsPropertiesKHR::*ptr)
1551 : IsEnabled([=](const DeviceFeatures &features) { return features.float_controls.*ptr; }) {}
Graeme Leese9b6a1522019-06-07 20:49:45 +01001552 FeaturePointer(VkBool32 VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR::*ptr)
1553 : IsEnabled([=](const DeviceFeatures &features) { return features.uniform_buffer_standard_layout.*ptr; }) {}
Jason Macnakc5a621d2019-06-10 12:42:50 -07001554 FeaturePointer(VkBool32 VkPhysicalDeviceComputeShaderDerivativesFeaturesNV::*ptr)
1555 : IsEnabled([=](const DeviceFeatures &features) { return features.compute_shader_derivatives_features.*ptr; }) {}
Jason Macnak325e8b52019-06-10 13:33:10 -07001556 FeaturePointer(VkBool32 VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV::*ptr)
1557 : IsEnabled([=](const DeviceFeatures &features) { return features.fragment_shader_barycentric_features.*ptr; }) {}
Jason Macnakd7fddf82019-06-13 09:52:49 -07001558 FeaturePointer(VkBool32 VkPhysicalDeviceShaderImageFootprintFeaturesNV::*ptr)
1559 : IsEnabled([=](const DeviceFeatures &features) { return features.shader_image_footprint_features.*ptr; }) {}
Jeff Bolz38f6cb52019-06-30 16:26:44 -05001560 FeaturePointer(VkBool32 VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT::*ptr)
1561 : IsEnabled([=](const DeviceFeatures &features) { return features.fragment_shader_interlock_features.*ptr; }) {}
Mike Schuchardt8ed5ea02018-07-20 18:24:17 -06001562 };
1563
Chris Forbes47567b72017-06-09 12:09:45 -07001564 struct CapabilityInfo {
1565 char const *name;
Mike Schuchardt8ed5ea02018-07-20 18:24:17 -06001566 FeaturePointer feature;
1567 bool DeviceExtensions::*extension;
Chris Forbes47567b72017-06-09 12:09:45 -07001568 };
1569
Chris Forbes47567b72017-06-09 12:09:45 -07001570 // clang-format off
Dave Houltoneb10ea82017-12-22 12:21:50 -07001571 static const std::unordered_multimap<uint32_t, CapabilityInfo> capabilities = {
Chris Forbes47567b72017-06-09 12:09:45 -07001572 // Capabilities always supported by a Vulkan 1.0 implementation -- no
1573 // feature bits.
1574 {spv::CapabilityMatrix, {nullptr}},
1575 {spv::CapabilityShader, {nullptr}},
1576 {spv::CapabilityInputAttachment, {nullptr}},
1577 {spv::CapabilitySampled1D, {nullptr}},
1578 {spv::CapabilityImage1D, {nullptr}},
1579 {spv::CapabilitySampledBuffer, {nullptr}},
Toni Merilehtib13a4a22019-05-21 12:58:44 +03001580 {spv::CapabilityStorageImageExtendedFormats, {nullptr}},
Chris Forbes47567b72017-06-09 12:09:45 -07001581 {spv::CapabilityImageQuery, {nullptr}},
1582 {spv::CapabilityDerivativeControl, {nullptr}},
1583
1584 // Capabilities that are optionally supported, but require a feature to
1585 // be enabled on the device
Mike Schuchardt8ed5ea02018-07-20 18:24:17 -06001586 {spv::CapabilityGeometry, {"VkPhysicalDeviceFeatures::geometryShader", &VkPhysicalDeviceFeatures::geometryShader}},
1587 {spv::CapabilityTessellation, {"VkPhysicalDeviceFeatures::tessellationShader", &VkPhysicalDeviceFeatures::tessellationShader}},
1588 {spv::CapabilityFloat64, {"VkPhysicalDeviceFeatures::shaderFloat64", &VkPhysicalDeviceFeatures::shaderFloat64}},
1589 {spv::CapabilityInt64, {"VkPhysicalDeviceFeatures::shaderInt64", &VkPhysicalDeviceFeatures::shaderInt64}},
1590 {spv::CapabilityTessellationPointSize, {"VkPhysicalDeviceFeatures::shaderTessellationAndGeometryPointSize", &VkPhysicalDeviceFeatures::shaderTessellationAndGeometryPointSize}},
1591 {spv::CapabilityGeometryPointSize, {"VkPhysicalDeviceFeatures::shaderTessellationAndGeometryPointSize", &VkPhysicalDeviceFeatures::shaderTessellationAndGeometryPointSize}},
1592 {spv::CapabilityImageGatherExtended, {"VkPhysicalDeviceFeatures::shaderImageGatherExtended", &VkPhysicalDeviceFeatures::shaderImageGatherExtended}},
1593 {spv::CapabilityStorageImageMultisample, {"VkPhysicalDeviceFeatures::shaderStorageImageMultisample", &VkPhysicalDeviceFeatures::shaderStorageImageMultisample}},
1594 {spv::CapabilityUniformBufferArrayDynamicIndexing, {"VkPhysicalDeviceFeatures::shaderUniformBufferArrayDynamicIndexing", &VkPhysicalDeviceFeatures::shaderUniformBufferArrayDynamicIndexing}},
1595 {spv::CapabilitySampledImageArrayDynamicIndexing, {"VkPhysicalDeviceFeatures::shaderSampledImageArrayDynamicIndexing", &VkPhysicalDeviceFeatures::shaderSampledImageArrayDynamicIndexing}},
1596 {spv::CapabilityStorageBufferArrayDynamicIndexing, {"VkPhysicalDeviceFeatures::shaderStorageBufferArrayDynamicIndexing", &VkPhysicalDeviceFeatures::shaderStorageBufferArrayDynamicIndexing}},
1597 {spv::CapabilityStorageImageArrayDynamicIndexing, {"VkPhysicalDeviceFeatures::shaderStorageImageArrayDynamicIndexing", &VkPhysicalDeviceFeatures::shaderStorageBufferArrayDynamicIndexing}},
1598 {spv::CapabilityClipDistance, {"VkPhysicalDeviceFeatures::shaderClipDistance", &VkPhysicalDeviceFeatures::shaderClipDistance}},
1599 {spv::CapabilityCullDistance, {"VkPhysicalDeviceFeatures::shaderCullDistance", &VkPhysicalDeviceFeatures::shaderCullDistance}},
1600 {spv::CapabilityImageCubeArray, {"VkPhysicalDeviceFeatures::imageCubeArray", &VkPhysicalDeviceFeatures::imageCubeArray}},
1601 {spv::CapabilitySampleRateShading, {"VkPhysicalDeviceFeatures::sampleRateShading", &VkPhysicalDeviceFeatures::sampleRateShading}},
1602 {spv::CapabilitySparseResidency, {"VkPhysicalDeviceFeatures::shaderResourceResidency", &VkPhysicalDeviceFeatures::shaderResourceResidency}},
1603 {spv::CapabilityMinLod, {"VkPhysicalDeviceFeatures::shaderResourceMinLod", &VkPhysicalDeviceFeatures::shaderResourceMinLod}},
1604 {spv::CapabilitySampledCubeArray, {"VkPhysicalDeviceFeatures::imageCubeArray", &VkPhysicalDeviceFeatures::imageCubeArray}},
1605 {spv::CapabilityImageMSArray, {"VkPhysicalDeviceFeatures::shaderStorageImageMultisample", &VkPhysicalDeviceFeatures::shaderStorageImageMultisample}},
Mike Schuchardt8ed5ea02018-07-20 18:24:17 -06001606 {spv::CapabilityInterpolationFunction, {"VkPhysicalDeviceFeatures::sampleRateShading", &VkPhysicalDeviceFeatures::sampleRateShading}},
1607 {spv::CapabilityStorageImageReadWithoutFormat, {"VkPhysicalDeviceFeatures::shaderStorageImageReadWithoutFormat", &VkPhysicalDeviceFeatures::shaderStorageImageReadWithoutFormat}},
1608 {spv::CapabilityStorageImageWriteWithoutFormat, {"VkPhysicalDeviceFeatures::shaderStorageImageWriteWithoutFormat", &VkPhysicalDeviceFeatures::shaderStorageImageWriteWithoutFormat}},
1609 {spv::CapabilityMultiViewport, {"VkPhysicalDeviceFeatures::multiViewport", &VkPhysicalDeviceFeatures::multiViewport}},
Jeff Bolzfdf96072018-04-10 14:32:18 -05001610
Mike Schuchardt8ed5ea02018-07-20 18:24:17 -06001611 {spv::CapabilityShaderNonUniformEXT, {VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME, nullptr, &DeviceExtensions::vk_ext_descriptor_indexing}},
1612 {spv::CapabilityRuntimeDescriptorArrayEXT, {"VkPhysicalDeviceDescriptorIndexingFeaturesEXT::runtimeDescriptorArray", &VkPhysicalDeviceDescriptorIndexingFeaturesEXT::runtimeDescriptorArray}},
1613 {spv::CapabilityInputAttachmentArrayDynamicIndexingEXT, {"VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderInputAttachmentArrayDynamicIndexing", &VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderInputAttachmentArrayDynamicIndexing}},
1614 {spv::CapabilityUniformTexelBufferArrayDynamicIndexingEXT, {"VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderUniformTexelBufferArrayDynamicIndexing", &VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderUniformTexelBufferArrayDynamicIndexing}},
1615 {spv::CapabilityStorageTexelBufferArrayDynamicIndexingEXT, {"VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderStorageTexelBufferArrayDynamicIndexing", &VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderStorageTexelBufferArrayDynamicIndexing}},
1616 {spv::CapabilityUniformBufferArrayNonUniformIndexingEXT, {"VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderUniformBufferArrayNonUniformIndexing", &VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderUniformBufferArrayNonUniformIndexing}},
1617 {spv::CapabilitySampledImageArrayNonUniformIndexingEXT, {"VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderSampledImageArrayNonUniformIndexing", &VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderSampledImageArrayNonUniformIndexing}},
1618 {spv::CapabilityStorageBufferArrayNonUniformIndexingEXT, {"VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderStorageBufferArrayNonUniformIndexing", &VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderStorageBufferArrayNonUniformIndexing}},
1619 {spv::CapabilityStorageImageArrayNonUniformIndexingEXT, {"VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderStorageImageArrayNonUniformIndexing", &VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderStorageImageArrayNonUniformIndexing}},
1620 {spv::CapabilityInputAttachmentArrayNonUniformIndexingEXT, {"VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderInputAttachmentArrayNonUniformIndexing", &VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderInputAttachmentArrayNonUniformIndexing}},
1621 {spv::CapabilityUniformTexelBufferArrayNonUniformIndexingEXT, {"VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderUniformTexelBufferArrayNonUniformIndexing", &VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderUniformTexelBufferArrayNonUniformIndexing}},
Jason Macnakf7019582019-06-13 10:07:26 -07001622 {spv::CapabilityStorageTexelBufferArrayNonUniformIndexingEXT, {"VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderStorageTexelBufferArrayNonUniformIndexing", &VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderStorageTexelBufferArrayNonUniformIndexing}},
Chris Forbes47567b72017-06-09 12:09:45 -07001623
1624 // Capabilities that require an extension
Mike Schuchardt8ed5ea02018-07-20 18:24:17 -06001625 {spv::CapabilityDrawParameters, {VK_KHR_SHADER_DRAW_PARAMETERS_EXTENSION_NAME, nullptr, &DeviceExtensions::vk_khr_shader_draw_parameters}},
1626 {spv::CapabilityGeometryShaderPassthroughNV, {VK_NV_GEOMETRY_SHADER_PASSTHROUGH_EXTENSION_NAME, nullptr, &DeviceExtensions::vk_nv_geometry_shader_passthrough}},
1627 {spv::CapabilitySampleMaskOverrideCoverageNV, {VK_NV_SAMPLE_MASK_OVERRIDE_COVERAGE_EXTENSION_NAME, nullptr, &DeviceExtensions::vk_nv_sample_mask_override_coverage}},
1628 {spv::CapabilityShaderViewportIndexLayerEXT, {VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_EXTENSION_NAME, nullptr, &DeviceExtensions::vk_ext_shader_viewport_index_layer}},
1629 {spv::CapabilityShaderViewportIndexLayerNV, {VK_NV_VIEWPORT_ARRAY2_EXTENSION_NAME, nullptr, &DeviceExtensions::vk_nv_viewport_array2}},
1630 {spv::CapabilityShaderViewportMaskNV, {VK_NV_VIEWPORT_ARRAY2_EXTENSION_NAME, nullptr, &DeviceExtensions::vk_nv_viewport_array2}},
1631 {spv::CapabilitySubgroupBallotKHR, {VK_EXT_SHADER_SUBGROUP_BALLOT_EXTENSION_NAME, nullptr, &DeviceExtensions::vk_ext_shader_subgroup_ballot }},
1632 {spv::CapabilitySubgroupVoteKHR, {VK_EXT_SHADER_SUBGROUP_VOTE_EXTENSION_NAME, nullptr, &DeviceExtensions::vk_ext_shader_subgroup_vote }},
Jason Macnakb7d091c2019-06-10 11:13:11 -07001633 {spv::CapabilityGroupNonUniformPartitionedNV, {VK_NV_SHADER_SUBGROUP_PARTITIONED_EXTENSION_NAME, nullptr, &DeviceExtensions::vk_nv_shader_subgroup_partitioned}},
aqnuep7033c702018-09-11 18:03:29 +02001634 {spv::CapabilityInt64Atomics, {VK_KHR_SHADER_ATOMIC_INT64_EXTENSION_NAME, nullptr, &DeviceExtensions::vk_khr_shader_atomic_int64 }},
Alexander Galazin3bd8e342018-06-14 15:49:07 +02001635
Jason Macnakc5a621d2019-06-10 12:42:50 -07001636 {spv::CapabilityComputeDerivativeGroupQuadsNV, {"VkPhysicalDeviceComputeShaderDerivativesFeaturesNV::computeDerivativeGroupQuads", &VkPhysicalDeviceComputeShaderDerivativesFeaturesNV::computeDerivativeGroupQuads, &DeviceExtensions::vk_nv_compute_shader_derivatives}},
1637 {spv::CapabilityComputeDerivativeGroupLinearNV, {"VkPhysicalDeviceComputeShaderDerivativesFeaturesNV::computeDerivativeGroupLinear", &VkPhysicalDeviceComputeShaderDerivativesFeaturesNV::computeDerivativeGroupLinear, &DeviceExtensions::vk_nv_compute_shader_derivatives}},
Jason Macnakf7019582019-06-13 10:07:26 -07001638 {spv::CapabilityFragmentBarycentricNV, {"VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV::fragmentShaderBarycentric", &VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV::fragmentShaderBarycentric, &DeviceExtensions::vk_nv_fragment_shader_barycentric}},
Jason Macnakc5a621d2019-06-10 12:42:50 -07001639
Jason Macnakf7019582019-06-13 10:07:26 -07001640 {spv::CapabilityStorageBuffer8BitAccess, {"VkPhysicalDevice8BitStorageFeaturesKHR::storageBuffer8BitAccess", &VkPhysicalDevice8BitStorageFeaturesKHR::storageBuffer8BitAccess, &DeviceExtensions::vk_khr_8bit_storage}},
1641 {spv::CapabilityUniformAndStorageBuffer8BitAccess, {"VkPhysicalDevice8BitStorageFeaturesKHR::uniformAndStorageBuffer8BitAccess", &VkPhysicalDevice8BitStorageFeaturesKHR::uniformAndStorageBuffer8BitAccess, &DeviceExtensions::vk_khr_8bit_storage}},
1642 {spv::CapabilityStoragePushConstant8, {"VkPhysicalDevice8BitStorageFeaturesKHR::storagePushConstant8", &VkPhysicalDevice8BitStorageFeaturesKHR::storagePushConstant8, &DeviceExtensions::vk_khr_8bit_storage}},
Brett Lawsonbebfb6f2018-10-23 16:58:50 -07001643
Jason Macnakf7019582019-06-13 10:07:26 -07001644 {spv::CapabilityTransformFeedback, { "VkPhysicalDeviceTransformFeedbackFeaturesEXT::transformFeedback", &VkPhysicalDeviceTransformFeedbackFeaturesEXT::transformFeedback, &DeviceExtensions::vk_ext_transform_feedback}},
1645 {spv::CapabilityGeometryStreams, { "VkPhysicalDeviceTransformFeedbackFeaturesEXT::geometryStreams", &VkPhysicalDeviceTransformFeedbackFeaturesEXT::geometryStreams, &DeviceExtensions::vk_ext_transform_feedback}},
Jose-Emilio Munoz-Lopez1109b452018-08-21 09:44:07 +01001646
Jason Macnakf7019582019-06-13 10:07:26 -07001647 {spv::CapabilityFloat16, {"VkPhysicalDeviceFloat16Int8FeaturesKHR::shaderFloat16", &VkPhysicalDeviceFloat16Int8FeaturesKHR::shaderFloat16, &DeviceExtensions::vk_khr_shader_float16_int8}},
1648 {spv::CapabilityInt8, {"VkPhysicalDeviceFloat16Int8FeaturesKHR::shaderInt8", &VkPhysicalDeviceFloat16Int8FeaturesKHR::shaderInt8, &DeviceExtensions::vk_khr_shader_float16_int8}},
Jeff Bolze4356752019-03-07 11:23:46 -06001649
Jason Macnakd7fddf82019-06-13 09:52:49 -07001650 {spv::CapabilityImageFootprintNV, {"VkPhysicalDeviceShaderImageFootprintFeaturesNV::imageFootprint", &VkPhysicalDeviceShaderImageFootprintFeaturesNV::imageFootprint, &DeviceExtensions::vk_nv_shader_image_footprint}},
1651
Jeff Bolze4356752019-03-07 11:23:46 -06001652 {spv::CapabilityCooperativeMatrixNV, {"VkPhysicalDeviceCooperativeMatrixFeaturesNV::cooperativeMatrix", &VkPhysicalDeviceCooperativeMatrixFeaturesNV::cooperativeMatrix, &DeviceExtensions::vk_nv_cooperative_matrix}},
Attilio Provenzanoc5d50102019-03-25 17:40:37 +00001653
1654 {spv::CapabilitySignedZeroInfNanPreserve, {"VkPhysicalDeviceFloatControlsPropertiesKHR::shaderSignedZeroInfNanPreserveFloat16", &VkPhysicalDeviceFloatControlsPropertiesKHR::shaderSignedZeroInfNanPreserveFloat16, &DeviceExtensions::vk_khr_shader_float_controls}},
1655 {spv::CapabilitySignedZeroInfNanPreserve, {"VkPhysicalDeviceFloatControlsPropertiesKHR::shaderSignedZeroInfNanPreserveFloat32", &VkPhysicalDeviceFloatControlsPropertiesKHR::shaderSignedZeroInfNanPreserveFloat32, &DeviceExtensions::vk_khr_shader_float_controls}},
1656 {spv::CapabilitySignedZeroInfNanPreserve, {"VkPhysicalDeviceFloatControlsPropertiesKHR::shaderSignedZeroInfNanPreserveFloat64", &VkPhysicalDeviceFloatControlsPropertiesKHR::shaderSignedZeroInfNanPreserveFloat64, &DeviceExtensions::vk_khr_shader_float_controls}},
1657 {spv::CapabilityDenormPreserve, {"VkPhysicalDeviceFloatControlsPropertiesKHR::shaderDenormPreserveFloat16", &VkPhysicalDeviceFloatControlsPropertiesKHR::shaderDenormPreserveFloat16, &DeviceExtensions::vk_khr_shader_float_controls}},
1658 {spv::CapabilityDenormPreserve, {"VkPhysicalDeviceFloatControlsPropertiesKHR::shaderDenormPreserveFloat32", &VkPhysicalDeviceFloatControlsPropertiesKHR::shaderDenormPreserveFloat32, &DeviceExtensions::vk_khr_shader_float_controls}},
1659 {spv::CapabilityDenormPreserve, {"VkPhysicalDeviceFloatControlsPropertiesKHR::shaderDenormPreserveFloat64", &VkPhysicalDeviceFloatControlsPropertiesKHR::shaderDenormPreserveFloat64, &DeviceExtensions::vk_khr_shader_float_controls}},
1660 {spv::CapabilityDenormFlushToZero, {"VkPhysicalDeviceFloatControlsPropertiesKHR::shaderDenormFlushToZeroFloat16", &VkPhysicalDeviceFloatControlsPropertiesKHR::shaderDenormFlushToZeroFloat16, &DeviceExtensions::vk_khr_shader_float_controls}},
1661 {spv::CapabilityDenormFlushToZero, {"VkPhysicalDeviceFloatControlsPropertiesKHR::shaderDenormFlushToZeroFloat32", &VkPhysicalDeviceFloatControlsPropertiesKHR::shaderDenormFlushToZeroFloat32, &DeviceExtensions::vk_khr_shader_float_controls}},
1662 {spv::CapabilityDenormFlushToZero, {"VkPhysicalDeviceFloatControlsPropertiesKHR::shaderDenormFlushToZeroFloat64", &VkPhysicalDeviceFloatControlsPropertiesKHR::shaderDenormFlushToZeroFloat64, &DeviceExtensions::vk_khr_shader_float_controls}},
1663 {spv::CapabilityRoundingModeRTE, {"VkPhysicalDeviceFloatControlsPropertiesKHR::shaderRoundingModeRTEFloat16", &VkPhysicalDeviceFloatControlsPropertiesKHR::shaderRoundingModeRTEFloat16, &DeviceExtensions::vk_khr_shader_float_controls}},
1664 {spv::CapabilityRoundingModeRTE, {"VkPhysicalDeviceFloatControlsPropertiesKHR::shaderRoundingModeRTEFloat32", &VkPhysicalDeviceFloatControlsPropertiesKHR::shaderRoundingModeRTEFloat32, &DeviceExtensions::vk_khr_shader_float_controls}},
1665 {spv::CapabilityRoundingModeRTE, {"VkPhysicalDeviceFloatControlsPropertiesKHR::shaderRoundingModeRTEFloat64", &VkPhysicalDeviceFloatControlsPropertiesKHR::shaderRoundingModeRTEFloat64, &DeviceExtensions::vk_khr_shader_float_controls}},
1666 {spv::CapabilityRoundingModeRTZ, {"VkPhysicalDeviceFloatControlsPropertiesKHR::shaderRoundingModeRTZFloat16", &VkPhysicalDeviceFloatControlsPropertiesKHR::shaderRoundingModeRTZFloat16, &DeviceExtensions::vk_khr_shader_float_controls}},
1667 {spv::CapabilityRoundingModeRTZ, {"VkPhysicalDeviceFloatControlsPropertiesKHR::shaderRoundingModeRTZFloat32", &VkPhysicalDeviceFloatControlsPropertiesKHR::shaderRoundingModeRTZFloat32, &DeviceExtensions::vk_khr_shader_float_controls}},
1668 {spv::CapabilityRoundingModeRTZ, {"VkPhysicalDeviceFloatControlsPropertiesKHR::shaderRoundingModeRTZFloat64", &VkPhysicalDeviceFloatControlsPropertiesKHR::shaderRoundingModeRTZFloat64, &DeviceExtensions::vk_khr_shader_float_controls}},
Jeff Bolz38f6cb52019-06-30 16:26:44 -05001669
1670 {spv::CapabilityFragmentShaderSampleInterlockEXT, {"VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT::fragmentShaderSampleInterlock", &VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT::fragmentShaderSampleInterlock, &DeviceExtensions::vk_ext_fragment_shader_interlock}},
1671 {spv::CapabilityFragmentShaderPixelInterlockEXT, {"VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT::fragmentShaderPixelInterlock", &VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT::fragmentShaderPixelInterlock, &DeviceExtensions::vk_ext_fragment_shader_interlock}},
1672 {spv::CapabilityFragmentShaderShadingRateInterlockEXT, {"VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT::fragmentShaderShadingRateInterlock", &VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT::fragmentShaderShadingRateInterlock, &DeviceExtensions::vk_ext_fragment_shader_interlock}},
Chris Forbes47567b72017-06-09 12:09:45 -07001673 };
1674 // clang-format on
1675
1676 for (auto insn : *src) {
1677 if (insn.opcode() == spv::OpCapability) {
Dave Houltoneb10ea82017-12-22 12:21:50 -07001678 size_t n = capabilities.count(insn.word(1));
1679 if (1 == n) { // key occurs exactly once
1680 auto it = capabilities.find(insn.word(1));
1681 if (it != capabilities.end()) {
1682 if (it->second.feature) {
Mark Lobodzinskid7b03cc2019-04-19 14:23:10 -06001683 skip |= RequireFeature(report_data, it->second.feature.IsEnabled(enabled_features), it->second.name);
Dave Houltoneb10ea82017-12-22 12:21:50 -07001684 }
1685 if (it->second.extension) {
Mark Lobodzinskif45e45f2019-04-19 14:15:39 -06001686 skip |= RequireExtension(report_data, device_extensions.*(it->second.extension), it->second.name);
Dave Houltoneb10ea82017-12-22 12:21:50 -07001687 }
Chris Forbes47567b72017-06-09 12:09:45 -07001688 }
Dave Houltoneb10ea82017-12-22 12:21:50 -07001689 } else if (1 < n) { // key occurs multiple times, at least one must be enabled
1690 bool needs_feature = false, has_feature = false;
1691 bool needs_ext = false, has_ext = false;
1692 std::string feature_names = "(one of) [ ";
1693 std::string extension_names = feature_names;
1694 auto caps = capabilities.equal_range(insn.word(1));
1695 for (auto it = caps.first; it != caps.second; ++it) {
1696 if (it->second.feature) {
1697 needs_feature = true;
Mark Lobodzinskid7b03cc2019-04-19 14:23:10 -06001698 has_feature = has_feature || it->second.feature.IsEnabled(enabled_features);
Dave Houltoneb10ea82017-12-22 12:21:50 -07001699 feature_names += it->second.name;
1700 feature_names += " ";
1701 }
1702 if (it->second.extension) {
1703 needs_ext = true;
Mark Lobodzinskif45e45f2019-04-19 14:15:39 -06001704 has_ext = has_ext || device_extensions.*(it->second.extension);
Dave Houltoneb10ea82017-12-22 12:21:50 -07001705 extension_names += it->second.name;
1706 extension_names += " ";
1707 }
1708 }
1709 if (needs_feature) {
1710 feature_names += "]";
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06001711 skip |= RequireFeature(report_data, has_feature, feature_names.c_str());
Dave Houltoneb10ea82017-12-22 12:21:50 -07001712 }
1713 if (needs_ext) {
1714 extension_names += "]";
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06001715 skip |= RequireExtension(report_data, has_ext, extension_names.c_str());
Chris Forbes47567b72017-06-09 12:09:45 -07001716 }
Jeff Bolzee743412019-06-20 22:24:32 -05001717 } else { // Do group non-uniform checks
1718 const VkSubgroupFeatureFlags supportedOperations = phys_dev_ext_props.subgroup_props.supportedOperations;
1719 const VkSubgroupFeatureFlags supportedStages = phys_dev_ext_props.subgroup_props.supportedStages;
1720
1721 switch (insn.word(1)) {
1722 default:
1723 break;
1724 case spv::CapabilityGroupNonUniform:
1725 case spv::CapabilityGroupNonUniformVote:
1726 case spv::CapabilityGroupNonUniformArithmetic:
1727 case spv::CapabilityGroupNonUniformBallot:
1728 case spv::CapabilityGroupNonUniformShuffle:
1729 case spv::CapabilityGroupNonUniformShuffleRelative:
1730 case spv::CapabilityGroupNonUniformClustered:
1731 case spv::CapabilityGroupNonUniformQuad:
1732 case spv::CapabilityGroupNonUniformPartitionedNV:
1733 RequirePropertyFlag(report_data, supportedStages & stage, string_VkShaderStageFlagBits(stage),
1734 "VkPhysicalDeviceSubgroupProperties::supportedStages");
1735 break;
1736 }
1737
1738 switch (insn.word(1)) {
1739 default:
1740 break;
1741 case spv::CapabilityGroupNonUniform:
1742 RequirePropertyFlag(report_data, supportedOperations & VK_SUBGROUP_FEATURE_BASIC_BIT,
1743 "VK_SUBGROUP_FEATURE_BASIC_BIT",
1744 "VkPhysicalDeviceSubgroupProperties::supportedOperations");
1745 break;
1746 case spv::CapabilityGroupNonUniformVote:
1747 RequirePropertyFlag(report_data, supportedOperations & VK_SUBGROUP_FEATURE_VOTE_BIT,
1748 "VK_SUBGROUP_FEATURE_VOTE_BIT",
1749 "VkPhysicalDeviceSubgroupProperties::supportedOperations");
1750 break;
1751 case spv::CapabilityGroupNonUniformArithmetic:
1752 RequirePropertyFlag(report_data, supportedOperations & VK_SUBGROUP_FEATURE_ARITHMETIC_BIT,
1753 "VK_SUBGROUP_FEATURE_ARITHMETIC_BIT",
1754 "VkPhysicalDeviceSubgroupProperties::supportedOperations");
1755 break;
1756 case spv::CapabilityGroupNonUniformBallot:
1757 RequirePropertyFlag(report_data, supportedOperations & VK_SUBGROUP_FEATURE_BALLOT_BIT,
1758 "VK_SUBGROUP_FEATURE_BALLOT_BIT",
1759 "VkPhysicalDeviceSubgroupProperties::supportedOperations");
1760 break;
1761 case spv::CapabilityGroupNonUniformShuffle:
1762 RequirePropertyFlag(report_data, supportedOperations & VK_SUBGROUP_FEATURE_SHUFFLE_BIT,
1763 "VK_SUBGROUP_FEATURE_SHUFFLE_BIT",
1764 "VkPhysicalDeviceSubgroupProperties::supportedOperations");
1765 break;
1766 case spv::CapabilityGroupNonUniformShuffleRelative:
1767 RequirePropertyFlag(report_data, supportedOperations & VK_SUBGROUP_FEATURE_SHUFFLE_RELATIVE_BIT,
1768 "VK_SUBGROUP_FEATURE_SHUFFLE_RELATIVE_BIT",
1769 "VkPhysicalDeviceSubgroupProperties::supportedOperations");
1770 break;
1771 case spv::CapabilityGroupNonUniformClustered:
1772 RequirePropertyFlag(report_data, supportedOperations & VK_SUBGROUP_FEATURE_CLUSTERED_BIT,
1773 "VK_SUBGROUP_FEATURE_CLUSTERED_BIT",
1774 "VkPhysicalDeviceSubgroupProperties::supportedOperations");
1775 break;
1776 case spv::CapabilityGroupNonUniformQuad:
1777 RequirePropertyFlag(report_data, supportedOperations & VK_SUBGROUP_FEATURE_QUAD_BIT,
1778 "VK_SUBGROUP_FEATURE_QUAD_BIT",
1779 "VkPhysicalDeviceSubgroupProperties::supportedOperations");
1780 break;
1781 case spv::CapabilityGroupNonUniformPartitionedNV:
1782 RequirePropertyFlag(report_data, supportedOperations & VK_SUBGROUP_FEATURE_PARTITIONED_BIT_NV,
1783 "VK_SUBGROUP_FEATURE_PARTITIONED_BIT_NV",
1784 "VkPhysicalDeviceSubgroupProperties::supportedOperations");
1785 break;
1786 }
Chris Forbes47567b72017-06-09 12:09:45 -07001787 }
1788 }
1789 }
1790
Jeff Bolzee743412019-06-20 22:24:32 -05001791 return skip;
1792}
1793
1794bool CoreChecks::ValidateShaderStageWritableDescriptor(VkShaderStageFlagBits stage, bool has_writable_descriptor) {
1795 bool skip = false;
1796
Chris Forbes349b3132018-03-07 11:38:08 -08001797 if (has_writable_descriptor) {
1798 switch (stage) {
1799 case VK_SHADER_STAGE_COMPUTE_BIT:
Jeff Bolz148d94e2018-12-13 21:25:56 -06001800 case VK_SHADER_STAGE_RAYGEN_BIT_NV:
1801 case VK_SHADER_STAGE_ANY_HIT_BIT_NV:
1802 case VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV:
1803 case VK_SHADER_STAGE_MISS_BIT_NV:
1804 case VK_SHADER_STAGE_INTERSECTION_BIT_NV:
1805 case VK_SHADER_STAGE_CALLABLE_BIT_NV:
1806 case VK_SHADER_STAGE_TASK_BIT_NV:
1807 case VK_SHADER_STAGE_MESH_BIT_NV:
Chris Forbes349b3132018-03-07 11:38:08 -08001808 /* No feature requirements for writes and atomics from compute
Jeff Bolz148d94e2018-12-13 21:25:56 -06001809 * raytracing, or mesh stages */
Chris Forbes349b3132018-03-07 11:38:08 -08001810 break;
1811 case VK_SHADER_STAGE_FRAGMENT_BIT:
Mark Lobodzinskid7b03cc2019-04-19 14:23:10 -06001812 skip |= RequireFeature(report_data, enabled_features.core.fragmentStoresAndAtomics, "fragmentStoresAndAtomics");
Chris Forbes349b3132018-03-07 11:38:08 -08001813 break;
1814 default:
Mark Lobodzinskid7b03cc2019-04-19 14:23:10 -06001815 skip |= RequireFeature(report_data, enabled_features.core.vertexPipelineStoresAndAtomics,
1816 "vertexPipelineStoresAndAtomics");
Chris Forbes349b3132018-03-07 11:38:08 -08001817 break;
1818 }
1819 }
1820
Chris Forbes47567b72017-06-09 12:09:45 -07001821 return skip;
1822}
1823
Jeff Bolzee743412019-06-20 22:24:32 -05001824bool CoreChecks::ValidateShaderStageGroupNonUniform(SHADER_MODULE_STATE const *module, VkShaderStageFlagBits stage,
1825 std::unordered_set<uint32_t> const &accessible_ids) {
1826 bool skip = false;
1827
1828 auto const subgroup_props = phys_dev_ext_props.subgroup_props;
1829
1830 for (uint32_t id : accessible_ids) {
1831 auto inst = module->get_def(id);
1832
1833 // Check the quad operations.
1834 switch (inst.opcode()) {
1835 default:
1836 break;
1837 case spv::OpGroupNonUniformQuadBroadcast:
1838 case spv::OpGroupNonUniformQuadSwap:
1839 if ((stage != VK_SHADER_STAGE_FRAGMENT_BIT) && (stage != VK_SHADER_STAGE_COMPUTE_BIT)) {
1840 skip |= RequireFeature(report_data, subgroup_props.quadOperationsInAllStages,
1841 "VkPhysicalDeviceSubgroupProperties::quadOperationsInAllStages");
1842 }
1843 break;
1844 }
1845 }
1846
1847 return skip;
1848}
1849
Mark Lobodzinski3c59d972019-04-25 11:28:14 -06001850bool CoreChecks::ValidateShaderStageInputOutputLimits(SHADER_MODULE_STATE const *src, VkPipelineShaderStageCreateInfo const *pStage,
John Zulauf14c355b2019-06-27 16:09:37 -06001851 const PIPELINE_STATE *pipeline, spirv_inst_iter entrypoint) {
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001852 if (pStage->stage == VK_SHADER_STAGE_COMPUTE_BIT || pStage->stage == VK_SHADER_STAGE_ALL_GRAPHICS ||
1853 pStage->stage == VK_SHADER_STAGE_ALL) {
1854 return false;
1855 }
1856
1857 bool skip = false;
Mark Lobodzinski518eadc2019-03-09 12:07:30 -07001858 auto const &limits = phys_dev_props.limits;
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001859
Jeff Bolze9ee3d82019-05-29 13:45:13 -05001860 std::set<uint32_t> patchIDs;
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001861 struct Variable {
1862 uint32_t baseTypePtrID;
1863 uint32_t ID;
1864 uint32_t storageClass;
1865 };
1866 std::vector<Variable> variables;
1867
Jeff Bolze9ee3d82019-05-29 13:45:13 -05001868 uint32_t numVertices = 0;
1869
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001870 for (auto insn : *src) {
1871 switch (insn.opcode()) {
Jeff Bolze9ee3d82019-05-29 13:45:13 -05001872 // Find all Patch decorations
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001873 case spv::OpDecorate:
1874 switch (insn.word(2)) {
Jeff Bolze9ee3d82019-05-29 13:45:13 -05001875 case spv::DecorationPatch: {
1876 patchIDs.insert(insn.word(1));
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001877 break;
1878 }
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001879 default:
1880 break;
1881 }
1882 break;
1883 // Find all input and output variables
1884 case spv::OpVariable: {
1885 Variable var = {};
1886 var.storageClass = insn.word(3);
1887 if (var.storageClass == spv::StorageClassInput || var.storageClass == spv::StorageClassOutput) {
1888 var.baseTypePtrID = insn.word(1);
1889 var.ID = insn.word(2);
1890 variables.push_back(var);
1891 }
1892 break;
1893 }
Jeff Bolze9ee3d82019-05-29 13:45:13 -05001894 case spv::OpExecutionMode:
1895 if (insn.word(1) == entrypoint.word(2)) {
1896 switch (insn.word(2)) {
1897 default:
1898 break;
1899 case spv::ExecutionModeOutputVertices:
1900 numVertices = insn.word(3);
1901 break;
1902 }
1903 }
1904 break;
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001905 default:
1906 break;
1907 }
1908 }
1909
Jeff Bolze9ee3d82019-05-29 13:45:13 -05001910 bool strip_output_array_level =
1911 (pStage->stage == VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT || pStage->stage == VK_SHADER_STAGE_MESH_BIT_NV);
1912 bool strip_input_array_level =
1913 (pStage->stage == VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT ||
1914 pStage->stage == VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT || pStage->stage == VK_SHADER_STAGE_GEOMETRY_BIT);
1915
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001916 uint32_t numCompIn = 0, numCompOut = 0;
1917 for (auto &var : variables) {
Jeff Bolze9ee3d82019-05-29 13:45:13 -05001918 // Check if the variable is a patch. Patches can also be members of blocks,
1919 // but if they are then the top-level arrayness has already been stripped
1920 // by the time GetComponentsConsumedByType gets to it.
1921 bool isPatch = patchIDs.find(var.ID) != patchIDs.end();
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001922
1923 if (var.storageClass == spv::StorageClassInput) {
Jeff Bolze9ee3d82019-05-29 13:45:13 -05001924 numCompIn += GetComponentsConsumedByType(src, var.baseTypePtrID, strip_input_array_level && !isPatch);
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001925 } else { // var.storageClass == spv::StorageClassOutput
Jeff Bolze9ee3d82019-05-29 13:45:13 -05001926 numCompOut += GetComponentsConsumedByType(src, var.baseTypePtrID, strip_output_array_level && !isPatch);
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001927 }
1928 }
1929
1930 switch (pStage->stage) {
1931 case VK_SHADER_STAGE_VERTEX_BIT:
Mark Lobodzinski57a44272019-02-27 12:40:50 -07001932 if (numCompOut > limits.maxVertexOutputComponents) {
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001933 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
1934 HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_ExceedDeviceLimit,
1935 "Invalid Pipeline CreateInfo State: Vertex shader exceeds "
1936 "VkPhysicalDeviceLimits::maxVertexOutputComponents of %u "
1937 "components by %u components",
Mark Lobodzinski57a44272019-02-27 12:40:50 -07001938 limits.maxVertexOutputComponents, numCompOut - limits.maxVertexOutputComponents);
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001939 }
1940 break;
1941
1942 case VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT:
Mark Lobodzinski57a44272019-02-27 12:40:50 -07001943 if (numCompIn > limits.maxTessellationControlPerVertexInputComponents) {
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001944 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
1945 HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_ExceedDeviceLimit,
1946 "Invalid Pipeline CreateInfo State: Tessellation control shader exceeds "
1947 "VkPhysicalDeviceLimits::maxTessellationControlPerVertexInputComponents of %u "
1948 "components by %u components",
Mark Lobodzinski57a44272019-02-27 12:40:50 -07001949 limits.maxTessellationControlPerVertexInputComponents,
1950 numCompIn - limits.maxTessellationControlPerVertexInputComponents);
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001951 }
Mark Lobodzinski57a44272019-02-27 12:40:50 -07001952 if (numCompOut > limits.maxTessellationControlPerVertexOutputComponents) {
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001953 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
1954 HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_ExceedDeviceLimit,
1955 "Invalid Pipeline CreateInfo State: Tessellation control shader exceeds "
1956 "VkPhysicalDeviceLimits::maxTessellationControlPerVertexOutputComponents of %u "
1957 "components by %u components",
Mark Lobodzinski57a44272019-02-27 12:40:50 -07001958 limits.maxTessellationControlPerVertexOutputComponents,
1959 numCompOut - limits.maxTessellationControlPerVertexOutputComponents);
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001960 }
1961 break;
1962
1963 case VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT:
Mark Lobodzinski57a44272019-02-27 12:40:50 -07001964 if (numCompIn > limits.maxTessellationEvaluationInputComponents) {
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001965 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
1966 HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_ExceedDeviceLimit,
1967 "Invalid Pipeline CreateInfo State: Tessellation evaluation shader exceeds "
1968 "VkPhysicalDeviceLimits::maxTessellationEvaluationInputComponents of %u "
1969 "components by %u components",
Mark Lobodzinski57a44272019-02-27 12:40:50 -07001970 limits.maxTessellationEvaluationInputComponents,
1971 numCompIn - limits.maxTessellationEvaluationInputComponents);
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001972 }
Mark Lobodzinski57a44272019-02-27 12:40:50 -07001973 if (numCompOut > limits.maxTessellationEvaluationOutputComponents) {
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001974 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
1975 HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_ExceedDeviceLimit,
1976 "Invalid Pipeline CreateInfo State: Tessellation evaluation shader exceeds "
1977 "VkPhysicalDeviceLimits::maxTessellationEvaluationOutputComponents of %u "
1978 "components by %u components",
Mark Lobodzinski57a44272019-02-27 12:40:50 -07001979 limits.maxTessellationEvaluationOutputComponents,
1980 numCompOut - limits.maxTessellationEvaluationOutputComponents);
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001981 }
1982 break;
1983
1984 case VK_SHADER_STAGE_GEOMETRY_BIT:
Mark Lobodzinski57a44272019-02-27 12:40:50 -07001985 if (numCompIn > limits.maxGeometryInputComponents) {
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001986 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
1987 HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_ExceedDeviceLimit,
1988 "Invalid Pipeline CreateInfo State: Geometry shader exceeds "
1989 "VkPhysicalDeviceLimits::maxGeometryInputComponents of %u "
1990 "components by %u components",
Mark Lobodzinski57a44272019-02-27 12:40:50 -07001991 limits.maxGeometryInputComponents, numCompIn - limits.maxGeometryInputComponents);
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001992 }
Mark Lobodzinski57a44272019-02-27 12:40:50 -07001993 if (numCompOut > limits.maxGeometryOutputComponents) {
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02001994 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
1995 HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_ExceedDeviceLimit,
1996 "Invalid Pipeline CreateInfo State: Geometry shader exceeds "
1997 "VkPhysicalDeviceLimits::maxGeometryOutputComponents of %u "
1998 "components by %u components",
Mark Lobodzinski57a44272019-02-27 12:40:50 -07001999 limits.maxGeometryOutputComponents, numCompOut - limits.maxGeometryOutputComponents);
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02002000 }
Jeff Bolze9ee3d82019-05-29 13:45:13 -05002001 if (numCompOut * numVertices > limits.maxGeometryTotalOutputComponents) {
2002 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
2003 HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_ExceedDeviceLimit,
2004 "Invalid Pipeline CreateInfo State: Geometry shader exceeds "
2005 "VkPhysicalDeviceLimits::maxGeometryTotalOutputComponents of %u "
2006 "components by %u components",
2007 limits.maxGeometryTotalOutputComponents,
2008 numCompOut * numVertices - limits.maxGeometryTotalOutputComponents);
2009 }
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02002010 break;
2011
2012 case VK_SHADER_STAGE_FRAGMENT_BIT:
Mark Lobodzinski57a44272019-02-27 12:40:50 -07002013 if (numCompIn > limits.maxFragmentInputComponents) {
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02002014 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
2015 HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_ExceedDeviceLimit,
2016 "Invalid Pipeline CreateInfo State: Fragment shader exceeds "
2017 "VkPhysicalDeviceLimits::maxFragmentInputComponents of %u "
2018 "components by %u components",
Mark Lobodzinski57a44272019-02-27 12:40:50 -07002019 limits.maxFragmentInputComponents, numCompIn - limits.maxFragmentInputComponents);
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02002020 }
2021 break;
2022
Jeff Bolz148d94e2018-12-13 21:25:56 -06002023 case VK_SHADER_STAGE_RAYGEN_BIT_NV:
2024 case VK_SHADER_STAGE_ANY_HIT_BIT_NV:
2025 case VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV:
2026 case VK_SHADER_STAGE_MISS_BIT_NV:
2027 case VK_SHADER_STAGE_INTERSECTION_BIT_NV:
2028 case VK_SHADER_STAGE_CALLABLE_BIT_NV:
2029 case VK_SHADER_STAGE_TASK_BIT_NV:
2030 case VK_SHADER_STAGE_MESH_BIT_NV:
2031 break;
2032
Daniel Fedai Larsenc939abc2018-08-07 10:01:58 +02002033 default:
2034 assert(false); // This should never happen
2035 }
2036 return skip;
2037}
2038
Jeff Bolze4356752019-03-07 11:23:46 -06002039// copy the specialization constant value into buf, if it is present
2040void GetSpecConstantValue(VkPipelineShaderStageCreateInfo const *pStage, uint32_t spec_id, void *buf) {
2041 VkSpecializationInfo const *spec = pStage->pSpecializationInfo;
2042
2043 if (spec && spec_id < spec->mapEntryCount) {
2044 memcpy(buf, (uint8_t *)spec->pData + spec->pMapEntries[spec_id].offset, spec->pMapEntries[spec_id].size);
2045 }
2046}
2047
2048// Fill in value with the constant or specialization constant value, if available.
2049// Returns true if the value has been accurately filled out.
Mark Lobodzinski3c59d972019-04-25 11:28:14 -06002050static bool GetIntConstantValue(spirv_inst_iter insn, SHADER_MODULE_STATE const *src, VkPipelineShaderStageCreateInfo const *pStage,
Jeff Bolze4356752019-03-07 11:23:46 -06002051 const std::unordered_map<uint32_t, uint32_t> &id_to_spec_id, uint32_t *value) {
2052 auto type_id = src->get_def(insn.word(1));
2053 if (type_id.opcode() != spv::OpTypeInt || type_id.word(2) != 32) {
2054 return false;
2055 }
2056 switch (insn.opcode()) {
2057 case spv::OpSpecConstant:
2058 *value = insn.word(3);
2059 GetSpecConstantValue(pStage, id_to_spec_id.at(insn.word(2)), value);
2060 return true;
2061 case spv::OpConstant:
2062 *value = insn.word(3);
2063 return true;
2064 default:
2065 return false;
2066 }
2067}
2068
2069// Map SPIR-V type to VK_COMPONENT_TYPE enum
Mark Lobodzinski3c59d972019-04-25 11:28:14 -06002070VkComponentTypeNV GetComponentType(spirv_inst_iter insn, SHADER_MODULE_STATE const *src) {
Jeff Bolze4356752019-03-07 11:23:46 -06002071 switch (insn.opcode()) {
2072 case spv::OpTypeInt:
2073 switch (insn.word(2)) {
2074 case 8:
2075 return insn.word(3) != 0 ? VK_COMPONENT_TYPE_SINT8_NV : VK_COMPONENT_TYPE_UINT8_NV;
2076 case 16:
2077 return insn.word(3) != 0 ? VK_COMPONENT_TYPE_SINT16_NV : VK_COMPONENT_TYPE_UINT16_NV;
2078 case 32:
2079 return insn.word(3) != 0 ? VK_COMPONENT_TYPE_SINT32_NV : VK_COMPONENT_TYPE_UINT32_NV;
2080 case 64:
2081 return insn.word(3) != 0 ? VK_COMPONENT_TYPE_SINT64_NV : VK_COMPONENT_TYPE_UINT64_NV;
2082 default:
2083 return VK_COMPONENT_TYPE_MAX_ENUM_NV;
2084 }
2085 case spv::OpTypeFloat:
2086 switch (insn.word(2)) {
2087 case 16:
2088 return VK_COMPONENT_TYPE_FLOAT16_NV;
2089 case 32:
2090 return VK_COMPONENT_TYPE_FLOAT32_NV;
2091 case 64:
2092 return VK_COMPONENT_TYPE_FLOAT64_NV;
2093 default:
2094 return VK_COMPONENT_TYPE_MAX_ENUM_NV;
2095 }
2096 default:
2097 return VK_COMPONENT_TYPE_MAX_ENUM_NV;
2098 }
2099}
2100
2101// Validate SPV_NV_cooperative_matrix behavior that can't be statically validated
2102// in SPIRV-Tools (e.g. due to specialization constant usage).
Mark Lobodzinski3c59d972019-04-25 11:28:14 -06002103bool CoreChecks::ValidateCooperativeMatrix(SHADER_MODULE_STATE const *src, VkPipelineShaderStageCreateInfo const *pStage,
John Zulauf14c355b2019-06-27 16:09:37 -06002104 const PIPELINE_STATE *pipeline) {
Jeff Bolze4356752019-03-07 11:23:46 -06002105 bool skip = false;
2106
2107 // Map SPIR-V result ID to specialization constant id (SpecId decoration value)
2108 std::unordered_map<uint32_t, uint32_t> id_to_spec_id;
2109 // Map SPIR-V result ID to the ID of its type.
2110 std::unordered_map<uint32_t, uint32_t> id_to_type_id;
2111
2112 struct CoopMatType {
2113 uint32_t scope, rows, cols;
2114 VkComponentTypeNV component_type;
2115 bool all_constant;
2116
2117 CoopMatType() : scope(0), rows(0), cols(0), component_type(VK_COMPONENT_TYPE_MAX_ENUM_NV), all_constant(false) {}
2118
Mark Lobodzinski3c59d972019-04-25 11:28:14 -06002119 void Init(uint32_t id, SHADER_MODULE_STATE const *src, VkPipelineShaderStageCreateInfo const *pStage,
Jeff Bolze4356752019-03-07 11:23:46 -06002120 const std::unordered_map<uint32_t, uint32_t> &id_to_spec_id) {
2121 spirv_inst_iter insn = src->get_def(id);
2122 uint32_t component_type_id = insn.word(2);
2123 uint32_t scope_id = insn.word(3);
2124 uint32_t rows_id = insn.word(4);
2125 uint32_t cols_id = insn.word(5);
2126 auto component_type_iter = src->get_def(component_type_id);
2127 auto scope_iter = src->get_def(scope_id);
2128 auto rows_iter = src->get_def(rows_id);
2129 auto cols_iter = src->get_def(cols_id);
2130
2131 all_constant = true;
2132 if (!GetIntConstantValue(scope_iter, src, pStage, id_to_spec_id, &scope)) {
2133 all_constant = false;
2134 }
2135 if (!GetIntConstantValue(rows_iter, src, pStage, id_to_spec_id, &rows)) {
2136 all_constant = false;
2137 }
2138 if (!GetIntConstantValue(cols_iter, src, pStage, id_to_spec_id, &cols)) {
2139 all_constant = false;
2140 }
2141 component_type = GetComponentType(component_type_iter, src);
2142 }
2143 };
2144
2145 bool seen_coopmat_capability = false;
2146
2147 for (auto insn : *src) {
2148 // Whitelist instructions whose result can be a cooperative matrix type, and
2149 // keep track of their types. It would be nice if SPIRV-Headers generated code
2150 // to identify which instructions have a result type and result id. Lacking that,
2151 // this whitelist is based on the set of instructions that
2152 // SPV_NV_cooperative_matrix says can be used with cooperative matrix types.
2153 switch (insn.opcode()) {
2154 case spv::OpLoad:
2155 case spv::OpCooperativeMatrixLoadNV:
2156 case spv::OpCooperativeMatrixMulAddNV:
2157 case spv::OpSNegate:
2158 case spv::OpFNegate:
2159 case spv::OpIAdd:
2160 case spv::OpFAdd:
2161 case spv::OpISub:
2162 case spv::OpFSub:
2163 case spv::OpFDiv:
2164 case spv::OpSDiv:
2165 case spv::OpUDiv:
2166 case spv::OpMatrixTimesScalar:
2167 case spv::OpConstantComposite:
2168 case spv::OpCompositeConstruct:
2169 case spv::OpConvertFToU:
2170 case spv::OpConvertFToS:
2171 case spv::OpConvertSToF:
2172 case spv::OpConvertUToF:
2173 case spv::OpUConvert:
2174 case spv::OpSConvert:
2175 case spv::OpFConvert:
2176 id_to_type_id[insn.word(2)] = insn.word(1);
2177 break;
2178 default:
2179 break;
2180 }
2181
2182 switch (insn.opcode()) {
2183 case spv::OpDecorate:
2184 if (insn.word(2) == spv::DecorationSpecId) {
2185 id_to_spec_id[insn.word(1)] = insn.word(3);
2186 }
2187 break;
2188 case spv::OpCapability:
2189 if (insn.word(1) == spv::CapabilityCooperativeMatrixNV) {
2190 seen_coopmat_capability = true;
2191
2192 if (!(pStage->stage & phys_dev_ext_props.cooperative_matrix_props.cooperativeMatrixSupportedStages)) {
2193 skip |=
Mark Lobodzinski93a1fa72019-04-19 12:12:25 -06002194 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
Jeff Bolze4356752019-03-07 11:23:46 -06002195 HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_CooperativeMatrixSupportedStages,
2196 "OpTypeCooperativeMatrixNV used in shader stage not in cooperativeMatrixSupportedStages (= %u)",
2197 phys_dev_ext_props.cooperative_matrix_props.cooperativeMatrixSupportedStages);
2198 }
2199 }
2200 break;
2201 case spv::OpMemoryModel:
2202 // If the capability isn't enabled, don't bother with the rest of this function.
2203 // OpMemoryModel is the first required instruction after all OpCapability instructions.
2204 if (!seen_coopmat_capability) {
2205 return skip;
2206 }
2207 break;
2208 case spv::OpTypeCooperativeMatrixNV: {
2209 CoopMatType M;
2210 M.Init(insn.word(1), src, pStage, id_to_spec_id);
2211
2212 if (M.all_constant) {
2213 // Validate that the type parameters are all supported for one of the
2214 // operands of a cooperative matrix property.
2215 bool valid = false;
2216 for (unsigned i = 0; i < cooperative_matrix_properties.size(); ++i) {
2217 if (cooperative_matrix_properties[i].AType == M.component_type &&
2218 cooperative_matrix_properties[i].MSize == M.rows && cooperative_matrix_properties[i].KSize == M.cols &&
2219 cooperative_matrix_properties[i].scope == M.scope) {
2220 valid = true;
2221 break;
2222 }
2223 if (cooperative_matrix_properties[i].BType == M.component_type &&
2224 cooperative_matrix_properties[i].KSize == M.rows && cooperative_matrix_properties[i].NSize == M.cols &&
2225 cooperative_matrix_properties[i].scope == M.scope) {
2226 valid = true;
2227 break;
2228 }
2229 if (cooperative_matrix_properties[i].CType == M.component_type &&
2230 cooperative_matrix_properties[i].MSize == M.rows && cooperative_matrix_properties[i].NSize == M.cols &&
2231 cooperative_matrix_properties[i].scope == M.scope) {
2232 valid = true;
2233 break;
2234 }
2235 if (cooperative_matrix_properties[i].DType == M.component_type &&
2236 cooperative_matrix_properties[i].MSize == M.rows && cooperative_matrix_properties[i].NSize == M.cols &&
2237 cooperative_matrix_properties[i].scope == M.scope) {
2238 valid = true;
2239 break;
2240 }
2241 }
2242 if (!valid) {
Mark Lobodzinski93a1fa72019-04-19 12:12:25 -06002243 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
Jeff Bolze4356752019-03-07 11:23:46 -06002244 HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_CooperativeMatrixType,
2245 "OpTypeCooperativeMatrixNV (result id = %u) operands don't match a supported matrix type",
2246 insn.word(1));
2247 }
2248 }
2249 break;
2250 }
2251 case spv::OpCooperativeMatrixMulAddNV: {
2252 CoopMatType A, B, C, D;
2253 if (id_to_type_id.find(insn.word(2)) == id_to_type_id.end() ||
2254 id_to_type_id.find(insn.word(3)) == id_to_type_id.end() ||
2255 id_to_type_id.find(insn.word(4)) == id_to_type_id.end() ||
2256 id_to_type_id.find(insn.word(5)) == id_to_type_id.end()) {
Mike Schuchardte48dc142019-04-18 09:12:03 -07002257 // Couldn't find type of matrix
2258 assert(false);
Jeff Bolze4356752019-03-07 11:23:46 -06002259 break;
2260 }
2261 D.Init(id_to_type_id[insn.word(2)], src, pStage, id_to_spec_id);
2262 A.Init(id_to_type_id[insn.word(3)], src, pStage, id_to_spec_id);
2263 B.Init(id_to_type_id[insn.word(4)], src, pStage, id_to_spec_id);
2264 C.Init(id_to_type_id[insn.word(5)], src, pStage, id_to_spec_id);
2265
2266 if (A.all_constant && B.all_constant && C.all_constant && D.all_constant) {
2267 // Validate that the type parameters are all supported for the same
2268 // cooperative matrix property.
2269 bool valid = false;
2270 for (unsigned i = 0; i < cooperative_matrix_properties.size(); ++i) {
2271 if (cooperative_matrix_properties[i].AType == A.component_type &&
2272 cooperative_matrix_properties[i].MSize == A.rows && cooperative_matrix_properties[i].KSize == A.cols &&
2273 cooperative_matrix_properties[i].scope == A.scope &&
2274
2275 cooperative_matrix_properties[i].BType == B.component_type &&
2276 cooperative_matrix_properties[i].KSize == B.rows && cooperative_matrix_properties[i].NSize == B.cols &&
2277 cooperative_matrix_properties[i].scope == B.scope &&
2278
2279 cooperative_matrix_properties[i].CType == C.component_type &&
2280 cooperative_matrix_properties[i].MSize == C.rows && cooperative_matrix_properties[i].NSize == C.cols &&
2281 cooperative_matrix_properties[i].scope == C.scope &&
2282
2283 cooperative_matrix_properties[i].DType == D.component_type &&
2284 cooperative_matrix_properties[i].MSize == D.rows && cooperative_matrix_properties[i].NSize == D.cols &&
2285 cooperative_matrix_properties[i].scope == D.scope) {
2286 valid = true;
2287 break;
2288 }
2289 }
2290 if (!valid) {
Mark Lobodzinski93a1fa72019-04-19 12:12:25 -06002291 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
Jeff Bolze4356752019-03-07 11:23:46 -06002292 HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_CooperativeMatrixMulAdd,
2293 "OpCooperativeMatrixMulAddNV (result id = %u) operands don't match a supported matrix "
2294 "VkCooperativeMatrixPropertiesNV",
2295 insn.word(2));
2296 }
2297 }
2298 break;
2299 }
2300 default:
2301 break;
2302 }
2303 }
2304
2305 return skip;
2306}
2307
Mark Lobodzinski3c59d972019-04-25 11:28:14 -06002308bool CoreChecks::ValidateExecutionModes(SHADER_MODULE_STATE const *src, spirv_inst_iter entrypoint) {
Attilio Provenzanoc5d50102019-03-25 17:40:37 +00002309 auto entrypoint_id = entrypoint.word(2);
2310
Attilio Provenzanof6c0e852019-04-09 11:01:18 +01002311 // The first denorm execution mode encountered, along with its bit width.
2312 // Used to check if SeparateDenormSettings is respected.
2313 std::pair<spv::ExecutionMode, uint32_t> first_denorm_execution_mode = std::make_pair(spv::ExecutionModeMax, 0);
Attilio Provenzanoc5d50102019-03-25 17:40:37 +00002314
Attilio Provenzanof6c0e852019-04-09 11:01:18 +01002315 // The first rounding mode encountered, along with its bit width.
2316 // Used to check if SeparateRoundingModeSettings is respected.
2317 std::pair<spv::ExecutionMode, uint32_t> first_rounding_mode = std::make_pair(spv::ExecutionModeMax, 0);
Attilio Provenzanoc5d50102019-03-25 17:40:37 +00002318
2319 bool skip = false;
2320
Jeff Bolze9ee3d82019-05-29 13:45:13 -05002321 uint32_t verticesOut = 0;
2322 uint32_t invocations = 0;
2323
Attilio Provenzanoc5d50102019-03-25 17:40:37 +00002324 for (auto insn : *src) {
2325 if (insn.opcode() == spv::OpExecutionMode && insn.word(1) == entrypoint_id) {
2326 auto mode = insn.word(2);
2327 switch (mode) {
2328 case spv::ExecutionModeSignedZeroInfNanPreserve: {
2329 auto bit_width = insn.word(3);
Mark Lobodzinskid7b03cc2019-04-19 14:23:10 -06002330 if ((bit_width == 16 && !enabled_features.float_controls.shaderSignedZeroInfNanPreserveFloat16) ||
2331 (bit_width == 32 && !enabled_features.float_controls.shaderSignedZeroInfNanPreserveFloat32) ||
2332 (bit_width == 64 && !enabled_features.float_controls.shaderSignedZeroInfNanPreserveFloat64)) {
Attilio Provenzanoc5d50102019-03-25 17:40:37 +00002333 skip |=
2334 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
2335 kVUID_Core_Shader_FeatureNotEnabled,
2336 "Shader requires SignedZeroInfNanPreserve for bit width %d but it is not enabled on the device",
2337 bit_width);
2338 }
2339 break;
2340 }
2341
2342 case spv::ExecutionModeDenormPreserve: {
2343 auto bit_width = insn.word(3);
Mark Lobodzinskid7b03cc2019-04-19 14:23:10 -06002344 if ((bit_width == 16 && !enabled_features.float_controls.shaderDenormPreserveFloat16) ||
2345 (bit_width == 32 && !enabled_features.float_controls.shaderDenormPreserveFloat32) ||
2346 (bit_width == 64 && !enabled_features.float_controls.shaderDenormPreserveFloat64)) {
Attilio Provenzanoc5d50102019-03-25 17:40:37 +00002347 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
2348 kVUID_Core_Shader_FeatureNotEnabled,
2349 "Shader requires DenormPreserve for bit width %d but it is not enabled on the device",
2350 bit_width);
2351 }
2352
Attilio Provenzanof6c0e852019-04-09 11:01:18 +01002353 if (first_denorm_execution_mode.first == spv::ExecutionModeMax) {
2354 // Register the first denorm execution mode found
2355 first_denorm_execution_mode = std::make_pair(static_cast<spv::ExecutionMode>(mode), bit_width);
2356 } else if (first_denorm_execution_mode.first != mode && first_denorm_execution_mode.second != bit_width &&
Mark Lobodzinskid7b03cc2019-04-19 14:23:10 -06002357 !enabled_features.float_controls.separateDenormSettings) {
Attilio Provenzanoc5d50102019-03-25 17:40:37 +00002358 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
2359 kVUID_Core_Shader_FeatureNotEnabled,
2360 "Shader uses separate denorm execution modes for different bit widths but "
2361 "SeparateDenormSettings is not enabled on the device");
2362 }
2363 break;
2364 }
2365
2366 case spv::ExecutionModeDenormFlushToZero: {
2367 auto bit_width = insn.word(3);
Mark Lobodzinskid7b03cc2019-04-19 14:23:10 -06002368 if ((bit_width == 16 && !enabled_features.float_controls.shaderDenormFlushToZeroFloat16) ||
2369 (bit_width == 32 && !enabled_features.float_controls.shaderDenormFlushToZeroFloat32) ||
2370 (bit_width == 64 && !enabled_features.float_controls.shaderDenormFlushToZeroFloat64)) {
Attilio Provenzanoc5d50102019-03-25 17:40:37 +00002371 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
2372 kVUID_Core_Shader_FeatureNotEnabled,
2373 "Shader requires DenormFlushToZero for bit width %d but it is not enabled on the device",
2374 bit_width);
2375 }
2376
Attilio Provenzanof6c0e852019-04-09 11:01:18 +01002377 if (first_denorm_execution_mode.first == spv::ExecutionModeMax) {
2378 // Register the first denorm execution mode found
2379 first_denorm_execution_mode = std::make_pair(static_cast<spv::ExecutionMode>(mode), bit_width);
2380 } else if (first_denorm_execution_mode.first != mode && first_denorm_execution_mode.second != bit_width &&
Mark Lobodzinskid7b03cc2019-04-19 14:23:10 -06002381 !enabled_features.float_controls.separateDenormSettings) {
Attilio Provenzanoc5d50102019-03-25 17:40:37 +00002382 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
2383 kVUID_Core_Shader_FeatureNotEnabled,
2384 "Shader uses separate denorm execution modes for different bit widths but "
2385 "SeparateDenormSettings is not enabled on the device");
2386 }
2387 break;
2388 }
2389
2390 case spv::ExecutionModeRoundingModeRTE: {
2391 auto bit_width = insn.word(3);
Mark Lobodzinskid7b03cc2019-04-19 14:23:10 -06002392 if ((bit_width == 16 && !enabled_features.float_controls.shaderRoundingModeRTEFloat16) ||
2393 (bit_width == 32 && !enabled_features.float_controls.shaderRoundingModeRTEFloat32) ||
2394 (bit_width == 64 && !enabled_features.float_controls.shaderRoundingModeRTEFloat64)) {
Attilio Provenzanoc5d50102019-03-25 17:40:37 +00002395 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
2396 kVUID_Core_Shader_FeatureNotEnabled,
2397 "Shader requires RoundingModeRTE for bit width %d but it is not enabled on the device",
2398 bit_width);
2399 }
2400
Attilio Provenzanof6c0e852019-04-09 11:01:18 +01002401 if (first_rounding_mode.first == spv::ExecutionModeMax) {
2402 // Register the first rounding mode found
2403 first_rounding_mode = std::make_pair(static_cast<spv::ExecutionMode>(mode), bit_width);
2404 } else if (first_rounding_mode.first != mode && first_rounding_mode.second != bit_width &&
Mark Lobodzinskid7b03cc2019-04-19 14:23:10 -06002405 !enabled_features.float_controls.separateRoundingModeSettings) {
Attilio Provenzanoc5d50102019-03-25 17:40:37 +00002406 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
2407 kVUID_Core_Shader_FeatureNotEnabled,
2408 "Shader uses separate rounding modes for different bit widths but "
2409 "SeparateRoundingModeSettings is not enabled on the device");
2410 }
2411 break;
2412 }
2413
2414 case spv::ExecutionModeRoundingModeRTZ: {
2415 auto bit_width = insn.word(3);
Mark Lobodzinskid7b03cc2019-04-19 14:23:10 -06002416 if ((bit_width == 16 && !enabled_features.float_controls.shaderRoundingModeRTZFloat16) ||
2417 (bit_width == 32 && !enabled_features.float_controls.shaderRoundingModeRTZFloat32) ||
2418 (bit_width == 64 && !enabled_features.float_controls.shaderRoundingModeRTZFloat64)) {
Attilio Provenzanoc5d50102019-03-25 17:40:37 +00002419 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
2420 kVUID_Core_Shader_FeatureNotEnabled,
2421 "Shader requires RoundingModeRTZ for bit width %d but it is not enabled on the device",
2422 bit_width);
2423 }
2424
Attilio Provenzanof6c0e852019-04-09 11:01:18 +01002425 if (first_rounding_mode.first == spv::ExecutionModeMax) {
2426 // Register the first rounding mode found
2427 first_rounding_mode = std::make_pair(static_cast<spv::ExecutionMode>(mode), bit_width);
2428 } else if (first_rounding_mode.first != mode && first_rounding_mode.second != bit_width &&
Mark Lobodzinskid7b03cc2019-04-19 14:23:10 -06002429 !enabled_features.float_controls.separateRoundingModeSettings) {
Attilio Provenzanoc5d50102019-03-25 17:40:37 +00002430 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
2431 kVUID_Core_Shader_FeatureNotEnabled,
2432 "Shader uses separate rounding modes for different bit widths but "
2433 "SeparateRoundingModeSettings is not enabled on the device");
2434 }
2435 break;
2436 }
Jeff Bolze9ee3d82019-05-29 13:45:13 -05002437
2438 case spv::ExecutionModeOutputVertices: {
2439 verticesOut = insn.word(3);
2440 break;
2441 }
2442
2443 case spv::ExecutionModeInvocations: {
2444 invocations = insn.word(3);
2445 break;
2446 }
Attilio Provenzanoc5d50102019-03-25 17:40:37 +00002447 }
2448 }
2449 }
2450
Jeff Bolze9ee3d82019-05-29 13:45:13 -05002451 if (entrypoint.word(1) == spv::ExecutionModelGeometry) {
2452 if (verticesOut == 0 || verticesOut > phys_dev_props.limits.maxGeometryOutputVertices) {
2453 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
2454 "VUID-VkPipelineShaderStageCreateInfo-stage-00714",
2455 "Geometry shader entry point must have an OpExecutionMode instruction that "
2456 "specifies a maximum output vertex count that is greater than 0 and less "
2457 "than or equal to maxGeometryOutputVertices. "
2458 "OutputVertices=%d, maxGeometryOutputVertices=%d",
2459 verticesOut, phys_dev_props.limits.maxGeometryOutputVertices);
2460 }
2461
2462 if (invocations == 0 || invocations > phys_dev_props.limits.maxGeometryShaderInvocations) {
2463 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
2464 "VUID-VkPipelineShaderStageCreateInfo-stage-00715",
2465 "Geometry shader entry point must have an OpExecutionMode instruction that "
2466 "specifies an invocation count that is greater than 0 and less "
2467 "than or equal to maxGeometryShaderInvocations. "
2468 "Invocations=%d, maxGeometryShaderInvocations=%d",
2469 invocations, phys_dev_props.limits.maxGeometryShaderInvocations);
2470 }
2471 }
Attilio Provenzanoc5d50102019-03-25 17:40:37 +00002472 return skip;
2473}
2474
Mark Lobodzinski3c59d972019-04-25 11:28:14 -06002475static uint32_t DescriptorTypeToReqs(SHADER_MODULE_STATE const *module, uint32_t type_id) {
Chris Forbes47567b72017-06-09 12:09:45 -07002476 auto type = module->get_def(type_id);
2477
2478 while (true) {
2479 switch (type.opcode()) {
2480 case spv::OpTypeArray:
Chris Forbes062f1222018-08-21 15:34:15 -07002481 case spv::OpTypeRuntimeArray:
Chris Forbes47567b72017-06-09 12:09:45 -07002482 case spv::OpTypeSampledImage:
2483 type = module->get_def(type.word(2));
2484 break;
2485 case spv::OpTypePointer:
2486 type = module->get_def(type.word(3));
2487 break;
2488 case spv::OpTypeImage: {
2489 auto dim = type.word(3);
2490 auto arrayed = type.word(5);
2491 auto msaa = type.word(6);
2492
Chris Forbes74ba2232018-08-27 15:19:27 -07002493 uint32_t bits = 0;
2494 switch (GetFundamentalType(module, type.word(2))) {
2495 case FORMAT_TYPE_FLOAT:
2496 bits = DESCRIPTOR_REQ_COMPONENT_TYPE_FLOAT;
2497 break;
2498 case FORMAT_TYPE_UINT:
2499 bits = DESCRIPTOR_REQ_COMPONENT_TYPE_UINT;
2500 break;
2501 case FORMAT_TYPE_SINT:
2502 bits = DESCRIPTOR_REQ_COMPONENT_TYPE_SINT;
2503 break;
2504 default:
2505 break;
2506 }
2507
Chris Forbes47567b72017-06-09 12:09:45 -07002508 switch (dim) {
2509 case spv::Dim1D:
Chris Forbes74ba2232018-08-27 15:19:27 -07002510 bits |= arrayed ? DESCRIPTOR_REQ_VIEW_TYPE_1D_ARRAY : DESCRIPTOR_REQ_VIEW_TYPE_1D;
2511 return bits;
Chris Forbes47567b72017-06-09 12:09:45 -07002512 case spv::Dim2D:
Chris Forbes74ba2232018-08-27 15:19:27 -07002513 bits |= msaa ? DESCRIPTOR_REQ_MULTI_SAMPLE : DESCRIPTOR_REQ_SINGLE_SAMPLE;
2514 bits |= arrayed ? DESCRIPTOR_REQ_VIEW_TYPE_2D_ARRAY : DESCRIPTOR_REQ_VIEW_TYPE_2D;
2515 return bits;
Chris Forbes47567b72017-06-09 12:09:45 -07002516 case spv::Dim3D:
Chris Forbes74ba2232018-08-27 15:19:27 -07002517 bits |= DESCRIPTOR_REQ_VIEW_TYPE_3D;
2518 return bits;
Chris Forbes47567b72017-06-09 12:09:45 -07002519 case spv::DimCube:
Chris Forbes74ba2232018-08-27 15:19:27 -07002520 bits |= arrayed ? DESCRIPTOR_REQ_VIEW_TYPE_CUBE_ARRAY : DESCRIPTOR_REQ_VIEW_TYPE_CUBE;
2521 return bits;
Chris Forbes47567b72017-06-09 12:09:45 -07002522 case spv::DimSubpassData:
Chris Forbes74ba2232018-08-27 15:19:27 -07002523 bits |= msaa ? DESCRIPTOR_REQ_MULTI_SAMPLE : DESCRIPTOR_REQ_SINGLE_SAMPLE;
2524 return bits;
Chris Forbes47567b72017-06-09 12:09:45 -07002525 default: // buffer, etc.
Chris Forbes74ba2232018-08-27 15:19:27 -07002526 return bits;
Chris Forbes47567b72017-06-09 12:09:45 -07002527 }
2528 }
2529 default:
2530 return 0;
2531 }
2532 }
2533}
2534
2535// For given pipelineLayout verify that the set_layout_node at slot.first
2536// has the requested binding at slot.second and return ptr to that binding
Mark Lobodzinskica6ebe32019-04-25 11:43:37 -06002537static VkDescriptorSetLayoutBinding const *GetDescriptorBinding(PIPELINE_LAYOUT_STATE const *pipelineLayout,
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002538 descriptor_slot_t slot) {
Chris Forbes47567b72017-06-09 12:09:45 -07002539 if (!pipelineLayout) return nullptr;
2540
2541 if (slot.first >= pipelineLayout->set_layouts.size()) return nullptr;
2542
2543 return pipelineLayout->set_layouts[slot.first]->GetDescriptorSetLayoutBindingPtrFromBinding(slot.second);
2544}
2545
Mark Lobodzinski3c59d972019-04-25 11:28:14 -06002546static bool FindLocalSize(SHADER_MODULE_STATE const *src, uint32_t &local_size_x, uint32_t &local_size_y, uint32_t &local_size_z) {
Locke1ec6d952019-04-02 11:57:21 -06002547 for (auto insn : *src) {
2548 if (insn.opcode() == spv::OpEntryPoint) {
2549 auto executionModel = insn.word(1);
2550 auto entrypointStageBits = ExecutionModelToShaderStageFlagBits(executionModel);
2551 if (entrypointStageBits == VK_SHADER_STAGE_COMPUTE_BIT) {
2552 auto entrypoint_id = insn.word(2);
2553 for (auto insn1 : *src) {
2554 if (insn1.opcode() == spv::OpExecutionMode && insn1.word(1) == entrypoint_id &&
2555 insn1.word(2) == spv::ExecutionModeLocalSize) {
2556 local_size_x = insn1.word(3);
2557 local_size_y = insn1.word(4);
2558 local_size_z = insn1.word(5);
2559 return true;
2560 }
2561 }
2562 }
2563 }
2564 }
2565 return false;
2566}
2567
John Zulauf14c355b2019-06-27 16:09:37 -06002568static void ProcessExecutionModes(SHADER_MODULE_STATE const *src, const spirv_inst_iter &entrypoint, PIPELINE_STATE *pipeline) {
Jeff Bolz105d6492018-09-29 15:46:44 -05002569 auto entrypoint_id = entrypoint.word(2);
Chris Forbes0771b672018-03-22 21:13:46 -07002570 bool is_point_mode = false;
2571
2572 for (auto insn : *src) {
2573 if (insn.opcode() == spv::OpExecutionMode && insn.word(1) == entrypoint_id) {
2574 switch (insn.word(2)) {
2575 case spv::ExecutionModePointMode:
2576 // In tessellation shaders, PointMode is separate and trumps the tessellation topology.
2577 is_point_mode = true;
2578 break;
2579
2580 case spv::ExecutionModeOutputPoints:
2581 pipeline->topology_at_rasterizer = VK_PRIMITIVE_TOPOLOGY_POINT_LIST;
2582 break;
2583
2584 case spv::ExecutionModeIsolines:
2585 case spv::ExecutionModeOutputLineStrip:
2586 pipeline->topology_at_rasterizer = VK_PRIMITIVE_TOPOLOGY_LINE_STRIP;
2587 break;
2588
2589 case spv::ExecutionModeTriangles:
2590 case spv::ExecutionModeQuads:
2591 case spv::ExecutionModeOutputTriangleStrip:
2592 pipeline->topology_at_rasterizer = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP;
2593 break;
2594 }
2595 }
2596 }
2597
2598 if (is_point_mode) pipeline->topology_at_rasterizer = VK_PRIMITIVE_TOPOLOGY_POINT_LIST;
2599}
2600
Mark Lobodzinski1b4a8ed2018-08-07 08:47:05 -06002601// If PointList topology is specified in the pipeline, verify that a shader geometry stage writes PointSize
2602// o If there is only a vertex shader : gl_PointSize must be written when using points
2603// o If there is a geometry or tessellation shader:
2604// - If shaderTessellationAndGeometryPointSize feature is enabled:
2605// * gl_PointSize must be written in the final geometry stage
2606// - If shaderTessellationAndGeometryPointSize feature is disabled:
2607// * gl_PointSize must NOT be written and a default of 1.0 is assumed
Mark Lobodzinski3c59d972019-04-25 11:28:14 -06002608bool CoreChecks::ValidatePointListShaderState(const PIPELINE_STATE *pipeline, SHADER_MODULE_STATE const *src,
2609 spirv_inst_iter entrypoint, VkShaderStageFlagBits stage) {
Mark Lobodzinski1b4a8ed2018-08-07 08:47:05 -06002610 if (pipeline->topology_at_rasterizer != VK_PRIMITIVE_TOPOLOGY_POINT_LIST) {
2611 return false;
2612 }
2613
2614 bool pointsize_written = false;
2615 bool skip = false;
2616
2617 // Search for PointSize built-in decorations
2618 std::vector<uint32_t> pointsize_builtin_offsets;
2619 spirv_inst_iter insn = entrypoint;
2620 while (!pointsize_written && (insn.opcode() != spv::OpFunction)) {
2621 if (insn.opcode() == spv::OpMemberDecorate) {
2622 if (insn.word(3) == spv::DecorationBuiltIn) {
2623 if (insn.word(4) == spv::BuiltInPointSize) {
2624 pointsize_written = IsPointSizeWritten(src, insn, entrypoint);
2625 }
2626 }
2627 } else if (insn.opcode() == spv::OpDecorate) {
2628 if (insn.word(2) == spv::DecorationBuiltIn) {
2629 if (insn.word(3) == spv::BuiltInPointSize) {
2630 pointsize_written = IsPointSizeWritten(src, insn, entrypoint);
2631 }
2632 }
2633 }
2634
2635 insn++;
2636 }
2637
2638 if ((stage == VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT || stage == VK_SHADER_STAGE_GEOMETRY_BIT) &&
Mark Lobodzinskid7b03cc2019-04-19 14:23:10 -06002639 !enabled_features.core.shaderTessellationAndGeometryPointSize) {
Mark Lobodzinski1b4a8ed2018-08-07 08:47:05 -06002640 if (pointsize_written) {
Mark Lobodzinski93a1fa72019-04-19 12:12:25 -06002641 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
Mark Lobodzinski1b4a8ed2018-08-07 08:47:05 -06002642 HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_PointSizeBuiltInOverSpecified,
2643 "Pipeline topology is set to POINT_LIST and geometry or tessellation shaders write PointSize which "
2644 "is prohibited when the shaderTessellationAndGeometryPointSize feature is not enabled.");
2645 }
2646 } else if (!pointsize_written) {
2647 skip |=
Mark Lobodzinski93a1fa72019-04-19 12:12:25 -06002648 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
Mark Lobodzinski1b4a8ed2018-08-07 08:47:05 -06002649 HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_MissingPointSizeBuiltIn,
2650 "Pipeline topology is set to POINT_LIST, but PointSize is not written to in the shader corresponding to %s.",
2651 string_VkShaderStageFlagBits(stage));
2652 }
2653 return skip;
2654}
John Zulauf14c355b2019-06-27 16:09:37 -06002655void ValidationStateTracker::RecordPipelineShaderStage(VkPipelineShaderStageCreateInfo const *pStage, PIPELINE_STATE *pipeline,
2656 PIPELINE_STATE::StageState *stage_state) {
2657 // Validation shouldn't rely on anything in stage state being valid if the spirv isn't
2658 auto module = GetShaderModuleState(pStage->module);
2659 if (!module->has_valid_spirv) return;
Mark Lobodzinski1b4a8ed2018-08-07 08:47:05 -06002660
John Zulauf14c355b2019-06-27 16:09:37 -06002661 // Validation shouldn't rely on anything in stage state being valid if the entrypoint isn't present
2662 auto entrypoint = FindEntrypoint(module, pStage->pName, pStage->stage);
2663 if (entrypoint == module->end()) return;
Chris Forbes47567b72017-06-09 12:09:45 -07002664
Chris Forbes47567b72017-06-09 12:09:45 -07002665 // Mark accessible ids
John Zulauf14c355b2019-06-27 16:09:37 -06002666 stage_state->accessible_ids = MarkAccessibleIds(module, entrypoint);
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002667 ProcessExecutionModes(module, entrypoint, pipeline);
Chris Forbes47567b72017-06-09 12:09:45 -07002668
John Zulauf14c355b2019-06-27 16:09:37 -06002669 stage_state->descriptor_uses =
2670 CollectInterfaceByDescriptorSlot(report_data, module, stage_state->accessible_ids, &stage_state->has_writable_descriptor);
2671 // Capture descriptor uses for the pipeline
2672 for (auto use : stage_state->descriptor_uses) {
2673 // While validating shaders capture which slots are used by the pipeline
2674 auto &reqs = pipeline->active_slots[use.first.first][use.first.second];
2675 reqs = descriptor_req(reqs | DescriptorTypeToReqs(module, use.second.type_id));
2676 }
2677}
2678
2679bool CoreChecks::ValidatePipelineShaderStage(VkPipelineShaderStageCreateInfo const *pStage, const PIPELINE_STATE *pipeline,
2680 const PIPELINE_STATE::StageState &stage_state, const SHADER_MODULE_STATE *module,
2681 const spirv_inst_iter &entrypoint, bool check_point_size) {
2682 bool skip = false;
2683
2684 // Check the module
2685 if (!module->has_valid_spirv) {
2686 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
2687 "VUID-VkPipelineShaderStageCreateInfo-module-parameter", "%s does not contain valid spirv for stage %s.",
2688 report_data->FormatHandle(module->vk_shader_module).c_str(), string_VkShaderStageFlagBits(pStage->stage));
2689 }
2690
2691 // Check the entrypoint
2692 if (entrypoint == module->end()) {
2693 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
2694 "VUID-VkPipelineShaderStageCreateInfo-pName-00707", "No entrypoint found named `%s` for stage %s..",
2695 pStage->pName, string_VkShaderStageFlagBits(pStage->stage));
2696 }
2697 if (skip) return true; // no point continuing beyond here, any analysis is just going to be garbage.
2698
2699 // Mark accessible ids
2700 auto &accessible_ids = stage_state.accessible_ids;
2701
Chris Forbes47567b72017-06-09 12:09:45 -07002702 // Validate descriptor set layout against what the entrypoint actually uses
John Zulauf14c355b2019-06-27 16:09:37 -06002703 bool has_writable_descriptor = stage_state.has_writable_descriptor;
2704 auto &descriptor_uses = stage_state.descriptor_uses;
Chris Forbes47567b72017-06-09 12:09:45 -07002705
Chris Forbes349b3132018-03-07 11:38:08 -08002706 // Validate shader capabilities against enabled device features
Jeff Bolzee743412019-06-20 22:24:32 -05002707 skip |= ValidateShaderCapabilities(module, pStage->stage);
2708 skip |= ValidateShaderStageWritableDescriptor(pStage->stage, has_writable_descriptor);
Jeff Bolze9ee3d82019-05-29 13:45:13 -05002709 skip |= ValidateShaderStageInputOutputLimits(module, pStage, pipeline, entrypoint);
Jeff Bolzee743412019-06-20 22:24:32 -05002710 skip |= ValidateShaderStageGroupNonUniform(module, pStage->stage, accessible_ids);
Attilio Provenzanoc5d50102019-03-25 17:40:37 +00002711 skip |= ValidateExecutionModes(module, entrypoint);
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002712 skip |= ValidateSpecializationOffsets(report_data, pStage);
2713 skip |= ValidatePushConstantUsage(report_data, pipeline->pipeline_layout.push_constant_ranges.get(), module, accessible_ids,
2714 pStage->stage);
Jeff Bolze54ae892018-09-08 12:16:29 -05002715 if (check_point_size && !pipeline->graphicsPipelineCI.pRasterizationState->rasterizerDiscardEnable) {
Mark Lobodzinski518eadc2019-03-09 12:07:30 -07002716 skip |= ValidatePointListShaderState(pipeline, module, entrypoint, pStage->stage);
Mark Lobodzinski1b4a8ed2018-08-07 08:47:05 -06002717 }
Jeff Bolze4356752019-03-07 11:23:46 -06002718 skip |= ValidateCooperativeMatrix(module, pStage, pipeline);
Chris Forbes47567b72017-06-09 12:09:45 -07002719
2720 // Validate descriptor use
2721 for (auto use : descriptor_uses) {
Chris Forbes47567b72017-06-09 12:09:45 -07002722 // Verify given pipelineLayout has requested setLayout with requested binding
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002723 const auto &binding = GetDescriptorBinding(&pipeline->pipeline_layout, use.first);
Chris Forbes47567b72017-06-09 12:09:45 -07002724 unsigned required_descriptor_count;
Jeff Bolze54ae892018-09-08 12:16:29 -05002725 std::set<uint32_t> descriptor_types = TypeToDescriptorTypeSet(module, use.second.type_id, required_descriptor_count);
Chris Forbes47567b72017-06-09 12:09:45 -07002726
2727 if (!binding) {
Mark Lobodzinskib1fd9d12018-03-30 14:26:00 -06002728 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
Dave Houlton51653902018-06-22 17:32:13 -06002729 kVUID_Core_Shader_MissingDescriptor,
Chris Forbes73c00bf2018-06-22 16:28:06 -07002730 "Shader uses descriptor slot %u.%u (expected `%s`) but not declared in pipeline layout",
Jeff Bolze54ae892018-09-08 12:16:29 -05002731 use.first.first, use.first.second, string_descriptorTypes(descriptor_types).c_str());
Chris Forbes47567b72017-06-09 12:09:45 -07002732 } else if (~binding->stageFlags & pStage->stage) {
Mark Lobodzinskib1fd9d12018-03-30 14:26:00 -06002733 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, 0,
Dave Houlton51653902018-06-22 17:32:13 -06002734 kVUID_Core_Shader_DescriptorNotAccessibleFromStage,
Chris Forbes73c00bf2018-06-22 16:28:06 -07002735 "Shader uses descriptor slot %u.%u but descriptor not accessible from stage %s", use.first.first,
2736 use.first.second, string_VkShaderStageFlagBits(pStage->stage));
Jeff Bolze54ae892018-09-08 12:16:29 -05002737 } else if (descriptor_types.find(binding->descriptorType) == descriptor_types.end()) {
Mark Lobodzinskib1fd9d12018-03-30 14:26:00 -06002738 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
Dave Houlton51653902018-06-22 17:32:13 -06002739 kVUID_Core_Shader_DescriptorTypeMismatch,
Chris Forbes73c00bf2018-06-22 16:28:06 -07002740 "Type mismatch on descriptor slot %u.%u (expected `%s`) but descriptor of type %s", use.first.first,
Jeff Bolze54ae892018-09-08 12:16:29 -05002741 use.first.second, string_descriptorTypes(descriptor_types).c_str(),
Chris Forbes47567b72017-06-09 12:09:45 -07002742 string_VkDescriptorType(binding->descriptorType));
2743 } else if (binding->descriptorCount < required_descriptor_count) {
Mark Lobodzinskib1fd9d12018-03-30 14:26:00 -06002744 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
Dave Houlton51653902018-06-22 17:32:13 -06002745 kVUID_Core_Shader_DescriptorTypeMismatch,
Chris Forbes73c00bf2018-06-22 16:28:06 -07002746 "Shader expects at least %u descriptors for binding %u.%u but only %u provided",
2747 required_descriptor_count, use.first.first, use.first.second, binding->descriptorCount);
Chris Forbes47567b72017-06-09 12:09:45 -07002748 }
2749 }
2750
2751 // Validate use of input attachments against subpass structure
2752 if (pStage->stage == VK_SHADER_STAGE_FRAGMENT_BIT) {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002753 auto input_attachment_uses = CollectInterfaceByInputAttachmentIndex(module, accessible_ids);
Chris Forbes47567b72017-06-09 12:09:45 -07002754
Petr Krause91f7a12017-12-14 20:57:36 +01002755 auto rpci = pipeline->rp_state->createInfo.ptr();
Chris Forbes47567b72017-06-09 12:09:45 -07002756 auto subpass = pipeline->graphicsPipelineCI.subpass;
2757
2758 for (auto use : input_attachment_uses) {
2759 auto input_attachments = rpci->pSubpasses[subpass].pInputAttachments;
2760 auto index = (input_attachments && use.first < rpci->pSubpasses[subpass].inputAttachmentCount)
Dave Houltona9df0ce2018-02-07 10:51:23 -07002761 ? input_attachments[use.first].attachment
2762 : VK_ATTACHMENT_UNUSED;
Chris Forbes47567b72017-06-09 12:09:45 -07002763
2764 if (index == VK_ATTACHMENT_UNUSED) {
Mark Lobodzinskib1fd9d12018-03-30 14:26:00 -06002765 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
Dave Houlton51653902018-06-22 17:32:13 -06002766 kVUID_Core_Shader_MissingInputAttachment,
Chris Forbes47567b72017-06-09 12:09:45 -07002767 "Shader consumes input attachment index %d but not provided in subpass", use.first);
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002768 } else if (!(GetFormatType(rpci->pAttachments[index].format) & GetFundamentalType(module, use.second.type_id))) {
Chris Forbes47567b72017-06-09 12:09:45 -07002769 skip |=
Mark Lobodzinskib1fd9d12018-03-30 14:26:00 -06002770 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
Dave Houlton51653902018-06-22 17:32:13 -06002771 kVUID_Core_Shader_InputAttachmentTypeMismatch,
Chris Forbes47567b72017-06-09 12:09:45 -07002772 "Subpass input attachment %u format of %s does not match type used in shader `%s`", use.first,
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002773 string_VkFormat(rpci->pAttachments[index].format), DescribeType(module, use.second.type_id).c_str());
Chris Forbes47567b72017-06-09 12:09:45 -07002774 }
2775 }
2776 }
Lockeaa8fdc02019-04-02 11:59:20 -06002777 if (pStage->stage == VK_SHADER_STAGE_COMPUTE_BIT) {
2778 skip |= ValidateComputeWorkGroupSizes(module);
2779 }
Chris Forbes47567b72017-06-09 12:09:45 -07002780 return skip;
2781}
2782
Mark Lobodzinski3c59d972019-04-25 11:28:14 -06002783static bool ValidateInterfaceBetweenStages(debug_report_data const *report_data, SHADER_MODULE_STATE const *producer,
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002784 spirv_inst_iter producer_entrypoint, shader_stage_attributes const *producer_stage,
Mark Lobodzinski3c59d972019-04-25 11:28:14 -06002785 SHADER_MODULE_STATE const *consumer, spirv_inst_iter consumer_entrypoint,
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002786 shader_stage_attributes const *consumer_stage) {
Chris Forbes47567b72017-06-09 12:09:45 -07002787 bool skip = false;
2788
2789 auto outputs =
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002790 CollectInterfaceByLocation(producer, producer_entrypoint, spv::StorageClassOutput, producer_stage->arrayed_output);
2791 auto inputs = CollectInterfaceByLocation(consumer, consumer_entrypoint, spv::StorageClassInput, consumer_stage->arrayed_input);
Chris Forbes47567b72017-06-09 12:09:45 -07002792
2793 auto a_it = outputs.begin();
2794 auto b_it = inputs.begin();
2795
2796 // Maps sorted by key (location); walk them together to find mismatches
2797 while ((outputs.size() > 0 && a_it != outputs.end()) || (inputs.size() && b_it != inputs.end())) {
2798 bool a_at_end = outputs.size() == 0 || a_it == outputs.end();
2799 bool b_at_end = inputs.size() == 0 || b_it == inputs.end();
2800 auto a_first = a_at_end ? std::make_pair(0u, 0u) : a_it->first;
2801 auto b_first = b_at_end ? std::make_pair(0u, 0u) : b_it->first;
2802
2803 if (b_at_end || ((!a_at_end) && (a_first < b_first))) {
Mark Young4e919b22018-05-21 15:53:59 -06002804 skip |= log_msg(report_data, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
Dave Houlton51653902018-06-22 17:32:13 -06002805 HandleToUint64(producer->vk_shader_module), kVUID_Core_Shader_OutputNotConsumed,
Mark Young4e919b22018-05-21 15:53:59 -06002806 "%s writes to output location %u.%u which is not consumed by %s", producer_stage->name, a_first.first,
2807 a_first.second, consumer_stage->name);
Chris Forbes47567b72017-06-09 12:09:45 -07002808 a_it++;
2809 } else if (a_at_end || a_first > b_first) {
Mark Young4e919b22018-05-21 15:53:59 -06002810 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
Dave Houlton51653902018-06-22 17:32:13 -06002811 HandleToUint64(consumer->vk_shader_module), kVUID_Core_Shader_InputNotProduced,
Mark Young4e919b22018-05-21 15:53:59 -06002812 "%s consumes input location %u.%u which is not written by %s", consumer_stage->name, b_first.first,
2813 b_first.second, producer_stage->name);
Chris Forbes47567b72017-06-09 12:09:45 -07002814 b_it++;
2815 } else {
2816 // subtleties of arrayed interfaces:
2817 // - if is_patch, then the member is not arrayed, even though the interface may be.
2818 // - if is_block_member, then the extra array level of an arrayed interface is not
2819 // expressed in the member type -- it's expressed in the block type.
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002820 if (!TypesMatch(producer, consumer, a_it->second.type_id, b_it->second.type_id,
2821 producer_stage->arrayed_output && !a_it->second.is_patch && !a_it->second.is_block_member,
2822 consumer_stage->arrayed_input && !b_it->second.is_patch && !b_it->second.is_block_member, true)) {
Mark Young4e919b22018-05-21 15:53:59 -06002823 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
Dave Houlton51653902018-06-22 17:32:13 -06002824 HandleToUint64(producer->vk_shader_module), kVUID_Core_Shader_InterfaceTypeMismatch,
Mark Young4e919b22018-05-21 15:53:59 -06002825 "Type mismatch on location %u.%u: '%s' vs '%s'", a_first.first, a_first.second,
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002826 DescribeType(producer, a_it->second.type_id).c_str(),
2827 DescribeType(consumer, b_it->second.type_id).c_str());
Chris Forbes47567b72017-06-09 12:09:45 -07002828 }
2829 if (a_it->second.is_patch != b_it->second.is_patch) {
Mark Young4e919b22018-05-21 15:53:59 -06002830 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
Dave Houlton51653902018-06-22 17:32:13 -06002831 HandleToUint64(producer->vk_shader_module), kVUID_Core_Shader_InterfaceTypeMismatch,
Dave Houltona9df0ce2018-02-07 10:51:23 -07002832 "Decoration mismatch on location %u.%u: is per-%s in %s stage but per-%s in %s stage",
Chris Forbes47567b72017-06-09 12:09:45 -07002833 a_first.first, a_first.second, a_it->second.is_patch ? "patch" : "vertex", producer_stage->name,
2834 b_it->second.is_patch ? "patch" : "vertex", consumer_stage->name);
2835 }
2836 if (a_it->second.is_relaxed_precision != b_it->second.is_relaxed_precision) {
Mark Young4e919b22018-05-21 15:53:59 -06002837 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
Dave Houlton51653902018-06-22 17:32:13 -06002838 HandleToUint64(producer->vk_shader_module), kVUID_Core_Shader_InterfaceTypeMismatch,
Chris Forbes47567b72017-06-09 12:09:45 -07002839 "Decoration mismatch on location %u.%u: %s and %s stages differ in precision", a_first.first,
2840 a_first.second, producer_stage->name, consumer_stage->name);
2841 }
2842 a_it++;
2843 b_it++;
2844 }
2845 }
2846
Ari Suonpaa696b3432019-03-11 14:02:57 +02002847 if (consumer_stage->stage != VK_SHADER_STAGE_FRAGMENT_BIT) {
2848 auto builtins_producer = CollectBuiltinBlockMembers(producer, producer_entrypoint, spv::StorageClassOutput);
2849 auto builtins_consumer = CollectBuiltinBlockMembers(consumer, consumer_entrypoint, spv::StorageClassInput);
2850
2851 if (!builtins_producer.empty() && !builtins_consumer.empty()) {
2852 if (builtins_producer.size() != builtins_consumer.size()) {
2853 skip |=
2854 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
2855 HandleToUint64(producer->vk_shader_module), kVUID_Core_Shader_InterfaceTypeMismatch,
2856 "Number of elements inside builtin block differ between stages (%s %d vs %s %d).", producer_stage->name,
2857 (int)builtins_producer.size(), consumer_stage->name, (int)builtins_consumer.size());
2858 } else {
2859 auto it_producer = builtins_producer.begin();
2860 auto it_consumer = builtins_consumer.begin();
2861 while (it_producer != builtins_producer.end() && it_consumer != builtins_consumer.end()) {
2862 if (*it_producer != *it_consumer) {
2863 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
2864 HandleToUint64(producer->vk_shader_module), kVUID_Core_Shader_InterfaceTypeMismatch,
2865 "Builtin variable inside block doesn't match between %s and %s.", producer_stage->name,
2866 consumer_stage->name);
2867 break;
2868 }
2869 it_producer++;
2870 it_consumer++;
2871 }
2872 }
2873 }
2874 }
2875
Chris Forbes47567b72017-06-09 12:09:45 -07002876 return skip;
2877}
2878
John Zulauf14c355b2019-06-27 16:09:37 -06002879static inline uint32_t DetermineFinalGeomStage(const PIPELINE_STATE *pipeline, const VkGraphicsPipelineCreateInfo *pCreateInfo) {
Mark Lobodzinski1b4a8ed2018-08-07 08:47:05 -06002880 uint32_t stage_mask = 0;
2881 if (pipeline->topology_at_rasterizer == VK_PRIMITIVE_TOPOLOGY_POINT_LIST) {
2882 for (uint32_t i = 0; i < pCreateInfo->stageCount; i++) {
2883 stage_mask |= pCreateInfo->pStages[i].stage;
2884 }
2885 // Determine which shader in which PointSize should be written (the final geometry stage)
Jeff Bolz105d6492018-09-29 15:46:44 -05002886 if (stage_mask & VK_SHADER_STAGE_MESH_BIT_NV) {
2887 stage_mask = VK_SHADER_STAGE_MESH_BIT_NV;
2888 } else if (stage_mask & VK_SHADER_STAGE_GEOMETRY_BIT) {
Mark Lobodzinski1b4a8ed2018-08-07 08:47:05 -06002889 stage_mask = VK_SHADER_STAGE_GEOMETRY_BIT;
2890 } else if (stage_mask & VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT) {
2891 stage_mask = VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT;
2892 } else if (stage_mask & VK_SHADER_STAGE_VERTEX_BIT) {
2893 stage_mask = VK_SHADER_STAGE_VERTEX_BIT;
Mark Lobodzinski2c984cc2018-07-31 09:57:46 -06002894 }
2895 }
Mark Lobodzinski1b4a8ed2018-08-07 08:47:05 -06002896 return stage_mask;
Mark Lobodzinski2c984cc2018-07-31 09:57:46 -06002897}
2898
Chris Forbes47567b72017-06-09 12:09:45 -07002899// Validate that the shaders used by the given pipeline and store the active_slots
2900// that are actually used by the pipeline into pPipeline->active_slots
John Zulauf14c355b2019-06-27 16:09:37 -06002901bool CoreChecks::ValidateGraphicsPipelineShaderState(const PIPELINE_STATE *pipeline) {
Chris Forbesa400a8a2017-07-20 13:10:24 -07002902 auto pCreateInfo = pipeline->graphicsPipelineCI.ptr();
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002903 int vertex_stage = GetShaderStageId(VK_SHADER_STAGE_VERTEX_BIT);
2904 int fragment_stage = GetShaderStageId(VK_SHADER_STAGE_FRAGMENT_BIT);
Chris Forbes47567b72017-06-09 12:09:45 -07002905
John Zulauf14c355b2019-06-27 16:09:37 -06002906 const SHADER_MODULE_STATE *shaders[32];
Chris Forbes47567b72017-06-09 12:09:45 -07002907 memset(shaders, 0, sizeof(shaders));
Jeff Bolz7e35c392018-09-04 15:30:41 -05002908 spirv_inst_iter entrypoints[32];
Chris Forbes47567b72017-06-09 12:09:45 -07002909 memset(entrypoints, 0, sizeof(entrypoints));
2910 bool skip = false;
2911
Mark Lobodzinski1b4a8ed2018-08-07 08:47:05 -06002912 uint32_t pointlist_stage_mask = DetermineFinalGeomStage(pipeline, pCreateInfo);
2913
Chris Forbes47567b72017-06-09 12:09:45 -07002914 for (uint32_t i = 0; i < pCreateInfo->stageCount; i++) {
2915 auto pStage = &pCreateInfo->pStages[i];
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002916 auto stage_id = GetShaderStageId(pStage->stage);
John Zulauf14c355b2019-06-27 16:09:37 -06002917 shaders[stage_id] = GetShaderModuleState(pStage->module);
2918 entrypoints[stage_id] = FindEntrypoint(shaders[stage_id], pStage->pName, pStage->stage);
2919 skip |= ValidatePipelineShaderStage(pStage, pipeline, pipeline->stage_state[i], shaders[stage_id], entrypoints[stage_id],
2920
Mark Lobodzinski1b4a8ed2018-08-07 08:47:05 -06002921 (pointlist_stage_mask == pStage->stage));
Chris Forbes47567b72017-06-09 12:09:45 -07002922 }
2923
2924 // if the shader stages are no good individually, cross-stage validation is pointless.
2925 if (skip) return true;
2926
2927 auto vi = pCreateInfo->pVertexInputState;
2928
2929 if (vi) {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002930 skip |= ValidateViConsistency(report_data, vi);
Chris Forbes47567b72017-06-09 12:09:45 -07002931 }
2932
2933 if (shaders[vertex_stage] && shaders[vertex_stage]->has_valid_spirv) {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002934 skip |= ValidateViAgainstVsInputs(report_data, vi, shaders[vertex_stage], entrypoints[vertex_stage]);
Chris Forbes47567b72017-06-09 12:09:45 -07002935 }
2936
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002937 int producer = GetShaderStageId(VK_SHADER_STAGE_VERTEX_BIT);
2938 int consumer = GetShaderStageId(VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT);
Chris Forbes47567b72017-06-09 12:09:45 -07002939
2940 while (!shaders[producer] && producer != fragment_stage) {
2941 producer++;
2942 consumer++;
2943 }
2944
2945 for (; producer != fragment_stage && consumer <= fragment_stage; consumer++) {
2946 assert(shaders[producer]);
Chris Forbesdbb43fc2018-02-16 16:59:23 -08002947 if (shaders[consumer]) {
2948 if (shaders[consumer]->has_valid_spirv && shaders[producer]->has_valid_spirv) {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002949 skip |= ValidateInterfaceBetweenStages(report_data, shaders[producer], entrypoints[producer],
2950 &shader_stage_attribs[producer], shaders[consumer], entrypoints[consumer],
2951 &shader_stage_attribs[consumer]);
Chris Forbesdbb43fc2018-02-16 16:59:23 -08002952 }
Chris Forbes47567b72017-06-09 12:09:45 -07002953
2954 producer = consumer;
2955 }
2956 }
2957
2958 if (shaders[fragment_stage] && shaders[fragment_stage]->has_valid_spirv) {
Shannon McPhersonc06c33d2018-06-28 17:21:12 -06002959 skip |= ValidateFsOutputsAgainstRenderPass(report_data, shaders[fragment_stage], entrypoints[fragment_stage], pipeline,
2960 pCreateInfo->subpass);
Chris Forbes47567b72017-06-09 12:09:45 -07002961 }
2962
2963 return skip;
2964}
2965
Mark Lobodzinski518eadc2019-03-09 12:07:30 -07002966bool CoreChecks::ValidateComputePipeline(PIPELINE_STATE *pipeline) {
John Zulauf14c355b2019-06-27 16:09:37 -06002967 const auto &stage = *pipeline->computePipelineCI.stage.ptr();
Chris Forbes47567b72017-06-09 12:09:45 -07002968
John Zulauf14c355b2019-06-27 16:09:37 -06002969 const SHADER_MODULE_STATE *module = GetShaderModuleState(stage.module);
2970 const spirv_inst_iter entrypoint = FindEntrypoint(module, stage.pName, stage.stage);
Chris Forbes47567b72017-06-09 12:09:45 -07002971
John Zulauf14c355b2019-06-27 16:09:37 -06002972 return ValidatePipelineShaderStage(&stage, pipeline, pipeline->stage_state[0], module, entrypoint, false);
Chris Forbes47567b72017-06-09 12:09:45 -07002973}
Chris Forbes4ae55b32017-06-09 14:42:56 -07002974
Mark Lobodzinski518eadc2019-03-09 12:07:30 -07002975bool CoreChecks::ValidateRayTracingPipelineNV(PIPELINE_STATE *pipeline) {
John Zulauf14c355b2019-06-27 16:09:37 -06002976 const auto &stage = pipeline->raytracingPipelineCI.ptr()->pStages[0];
Jeff Bolzfbe51582018-09-13 10:01:35 -05002977
John Zulauf14c355b2019-06-27 16:09:37 -06002978 const SHADER_MODULE_STATE *module = GetShaderModuleState(stage.module);
2979 const spirv_inst_iter entrypoint = FindEntrypoint(module, stage.pName, stage.stage);
Jeff Bolzfbe51582018-09-13 10:01:35 -05002980
John Zulauf14c355b2019-06-27 16:09:37 -06002981 return ValidatePipelineShaderStage(&stage, pipeline, pipeline->stage_state[0], module, entrypoint, false);
Jeff Bolzfbe51582018-09-13 10:01:35 -05002982}
2983
Dave Houltona9df0ce2018-02-07 10:51:23 -07002984uint32_t ValidationCache::MakeShaderHash(VkShaderModuleCreateInfo const *smci) { return XXH32(smci->pCode, smci->codeSize, 0); }
Chris Forbes9a61e082017-07-24 15:35:29 -07002985
Dave Houltona9df0ce2018-02-07 10:51:23 -07002986static ValidationCache *GetValidationCacheInfo(VkShaderModuleCreateInfo const *pCreateInfo) {
John Zulauf25ea2432019-04-05 10:07:38 -06002987 const auto validation_cache_ci = lvl_find_in_chain<VkShaderModuleValidationCacheCreateInfoEXT>(pCreateInfo->pNext);
2988 if (validation_cache_ci) {
John Zulauf146ee802019-04-05 15:31:06 -06002989 return CastFromHandle<ValidationCache *>(validation_cache_ci->validationCache);
Chris Forbes9a61e082017-07-24 15:35:29 -07002990 }
Chris Forbes9a61e082017-07-24 15:35:29 -07002991 return nullptr;
2992}
2993
Mark Lobodzinskib56bbb92019-02-18 11:49:59 -07002994bool CoreChecks::PreCallValidateCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo *pCreateInfo,
2995 const VkAllocationCallbacks *pAllocator, VkShaderModule *pShaderModule) {
Chris Forbes4ae55b32017-06-09 14:42:56 -07002996 bool skip = false;
2997 spv_result_t spv_valid = SPV_SUCCESS;
Chris Forbes4ae55b32017-06-09 14:42:56 -07002998
Mark Lobodzinskib02a4852019-04-19 12:35:30 -06002999 if (disabled.shader_validation) {
Chris Forbes4ae55b32017-06-09 14:42:56 -07003000 return false;
3001 }
3002
Mark Lobodzinskif45e45f2019-04-19 14:15:39 -06003003 auto have_glsl_shader = device_extensions.vk_nv_glsl_shader;
Chris Forbes4ae55b32017-06-09 14:42:56 -07003004
3005 if (!have_glsl_shader && (pCreateInfo->codeSize % 4)) {
Mark Lobodzinski7767ad82019-03-09 13:35:25 -07003006 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
Dave Houlton78d09922018-05-17 15:48:45 -06003007 "VUID-VkShaderModuleCreateInfo-pCode-01376",
3008 "SPIR-V module not valid: Codesize must be a multiple of 4 but is " PRINTF_SIZE_T_SPECIFIER ".",
3009 pCreateInfo->codeSize);
Chris Forbes4ae55b32017-06-09 14:42:56 -07003010 } else {
Chris Forbes9a61e082017-07-24 15:35:29 -07003011 auto cache = GetValidationCacheInfo(pCreateInfo);
3012 uint32_t hash = 0;
3013 if (cache) {
3014 hash = ValidationCache::MakeShaderHash(pCreateInfo);
Dave Houltona9df0ce2018-02-07 10:51:23 -07003015 if (cache->Contains(hash)) return false;
Chris Forbes9a61e082017-07-24 15:35:29 -07003016 }
3017
Chris Forbes4ae55b32017-06-09 14:42:56 -07003018 // Use SPIRV-Tools validator to try and catch any issues with the module itself
Dave Houlton0ea2d012018-06-21 14:00:26 -06003019 spv_target_env spirv_environment = SPV_ENV_VULKAN_1_0;
Mark Lobodzinski544def72019-04-19 14:25:59 -06003020 if (api_version >= VK_API_VERSION_1_1) {
Dave Houlton0ea2d012018-06-21 14:00:26 -06003021 spirv_environment = SPV_ENV_VULKAN_1_1;
3022 }
3023 spv_context ctx = spvContextCreate(spirv_environment);
Dave Houltona9df0ce2018-02-07 10:51:23 -07003024 spv_const_binary_t binary{pCreateInfo->pCode, pCreateInfo->codeSize / sizeof(uint32_t)};
Chris Forbes4ae55b32017-06-09 14:42:56 -07003025 spv_diagnostic diag = nullptr;
Karl Schultzfda1b382018-08-08 18:56:11 -06003026 spv_validator_options options = spvValidatorOptionsCreate();
Mark Lobodzinskif45e45f2019-04-19 14:15:39 -06003027 if (device_extensions.vk_khr_relaxed_block_layout) {
Karl Schultzfda1b382018-08-08 18:56:11 -06003028 spvValidatorOptionsSetRelaxBlockLayout(options, true);
3029 }
Graeme Leese9b6a1522019-06-07 20:49:45 +01003030 if (device_extensions.vk_khr_uniform_buffer_standard_layout &&
3031 enabled_features.uniform_buffer_standard_layout.uniformBufferStandardLayout == VK_TRUE) {
3032 spvValidatorOptionsSetUniformBufferStandardLayout(options, true);
3033 }
Mark Lobodzinskif45e45f2019-04-19 14:15:39 -06003034 if (device_extensions.vk_ext_scalar_block_layout &&
Mark Lobodzinskid7b03cc2019-04-19 14:23:10 -06003035 enabled_features.scalar_block_layout_features.scalarBlockLayout == VK_TRUE) {
Tobias Hector6a0ece72018-12-10 12:24:05 +00003036 spvValidatorOptionsSetScalarBlockLayout(options, true);
3037 }
Karl Schultzfda1b382018-08-08 18:56:11 -06003038 spv_valid = spvValidateWithOptions(ctx, options, &binary, &diag);
Chris Forbes4ae55b32017-06-09 14:42:56 -07003039 if (spv_valid != SPV_SUCCESS) {
3040 if (!have_glsl_shader || (pCreateInfo->pCode[0] == spv::MagicNumber)) {
Mark Lobodzinski7767ad82019-03-09 13:35:25 -07003041 skip |=
3042 log_msg(report_data, spv_valid == SPV_WARNING ? VK_DEBUG_REPORT_WARNING_BIT_EXT : VK_DEBUG_REPORT_ERROR_BIT_EXT,
3043 VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, kVUID_Core_Shader_InconsistentSpirv,
3044 "SPIR-V module not valid: %s", diag && diag->error ? diag->error : "(no error text)");
Chris Forbes4ae55b32017-06-09 14:42:56 -07003045 }
Chris Forbes9a61e082017-07-24 15:35:29 -07003046 } else {
3047 if (cache) {
3048 cache->Insert(hash);
3049 }
Chris Forbes4ae55b32017-06-09 14:42:56 -07003050 }
3051
Karl Schultzfda1b382018-08-08 18:56:11 -06003052 spvValidatorOptionsDestroy(options);
Chris Forbes4ae55b32017-06-09 14:42:56 -07003053 spvDiagnosticDestroy(diag);
3054 spvContextDestroy(ctx);
3055 }
3056
Chris Forbes4ae55b32017-06-09 14:42:56 -07003057 return skip;
Mark Lobodzinski01734072019-02-13 17:39:15 -07003058}
3059
Mark Lobodzinskib56bbb92019-02-18 11:49:59 -07003060void CoreChecks::PreCallRecordCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo *pCreateInfo,
3061 const VkAllocationCallbacks *pAllocator, VkShaderModule *pShaderModule,
3062 void *csm_state_data) {
Mark Lobodzinski1db77e82019-03-01 10:02:54 -07003063 create_shader_module_api_state *csm_state = reinterpret_cast<create_shader_module_api_state *>(csm_state_data);
Mark Lobodzinskib02a4852019-04-19 12:35:30 -06003064 if (enabled.gpu_validation) {
Mark Lobodzinski586d10e2019-03-08 18:19:48 -07003065 GpuPreCallCreateShaderModule(pCreateInfo, pAllocator, pShaderModule, &csm_state->unique_shader_id,
Mark Lobodzinski01734072019-02-13 17:39:15 -07003066 &csm_state->instrumented_create_info, &csm_state->instrumented_pgm);
3067 }
3068}
3069
John Zulauf7eeb6f72019-06-17 11:56:36 -06003070void ValidationStateTracker::PostCallRecordCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo *pCreateInfo,
3071 const VkAllocationCallbacks *pAllocator,
3072 VkShaderModule *pShaderModule, VkResult result,
3073 void *csm_state_data) {
Mark Lobodzinski01734072019-02-13 17:39:15 -07003074 if (VK_SUCCESS != result) return;
Mark Lobodzinski1db77e82019-03-01 10:02:54 -07003075 create_shader_module_api_state *csm_state = reinterpret_cast<create_shader_module_api_state *>(csm_state_data);
Mark Lobodzinski01734072019-02-13 17:39:15 -07003076
Mark Lobodzinski544def72019-04-19 14:25:59 -06003077 spv_target_env spirv_environment = ((api_version >= VK_API_VERSION_1_1) ? SPV_ENV_VULKAN_1_1 : SPV_ENV_VULKAN_1_0);
Mark Lobodzinski01734072019-02-13 17:39:15 -07003078 bool is_spirv = (pCreateInfo->pCode[0] == spv::MagicNumber);
Mark Lobodzinski3c59d972019-04-25 11:28:14 -06003079 std::unique_ptr<SHADER_MODULE_STATE> new_shader_module(
3080 is_spirv ? new SHADER_MODULE_STATE(pCreateInfo, *pShaderModule, spirv_environment, csm_state->unique_shader_id)
3081 : new SHADER_MODULE_STATE());
Mark Lobodzinski7767ad82019-03-09 13:35:25 -07003082 shaderModuleMap[*pShaderModule] = std::move(new_shader_module);
Mark Lobodzinski01734072019-02-13 17:39:15 -07003083}
Lockeaa8fdc02019-04-02 11:59:20 -06003084
Mark Lobodzinski3c59d972019-04-25 11:28:14 -06003085bool CoreChecks::ValidateComputeWorkGroupSizes(const SHADER_MODULE_STATE *shader) {
Lockeaa8fdc02019-04-02 11:59:20 -06003086 bool skip = false;
3087 uint32_t local_size_x = 0;
3088 uint32_t local_size_y = 0;
3089 uint32_t local_size_z = 0;
3090 if (FindLocalSize(shader, local_size_x, local_size_y, local_size_z)) {
3091 if (local_size_x > phys_dev_props.limits.maxComputeWorkGroupSize[0]) {
locke-lunarg9edc2812019-06-17 23:18:52 -06003092 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
3093 HandleToUint64(shader->vk_shader_module), "UNASSIGNED-features-limits-maxComputeWorkGroupSize",
3094 "%s local_size_x (%" PRIu32 ") exceeds device limit maxComputeWorkGroupSize[0] (%" PRIu32 ").",
3095 report_data->FormatHandle(shader->vk_shader_module).c_str(), local_size_x,
3096 phys_dev_props.limits.maxComputeWorkGroupSize[0]);
Lockeaa8fdc02019-04-02 11:59:20 -06003097 }
3098 if (local_size_y > phys_dev_props.limits.maxComputeWorkGroupSize[1]) {
locke-lunarg9edc2812019-06-17 23:18:52 -06003099 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
3100 HandleToUint64(shader->vk_shader_module), "UNASSIGNED-features-limits-maxComputeWorkGroupSize",
3101 "%s local_size_y (%" PRIu32 ") exceeds device limit maxComputeWorkGroupSize[1] (%" PRIu32 ").",
3102 report_data->FormatHandle(shader->vk_shader_module).c_str(), local_size_x,
3103 phys_dev_props.limits.maxComputeWorkGroupSize[1]);
Lockeaa8fdc02019-04-02 11:59:20 -06003104 }
3105 if (local_size_z > phys_dev_props.limits.maxComputeWorkGroupSize[2]) {
locke-lunarg9edc2812019-06-17 23:18:52 -06003106 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
3107 HandleToUint64(shader->vk_shader_module), "UNASSIGNED-features-limits-maxComputeWorkGroupSize",
3108 "%s local_size_z (%" PRIu32 ") exceeds device limit maxComputeWorkGroupSize[2] (%" PRIu32 ").",
3109 report_data->FormatHandle(shader->vk_shader_module).c_str(), local_size_x,
3110 phys_dev_props.limits.maxComputeWorkGroupSize[2]);
Lockeaa8fdc02019-04-02 11:59:20 -06003111 }
3112
3113 uint32_t limit = phys_dev_props.limits.maxComputeWorkGroupInvocations;
3114 uint64_t invocations = local_size_x * local_size_y;
3115 // Prevent overflow.
3116 bool fail = false;
3117 if (invocations > UINT32_MAX || invocations > limit) {
3118 fail = true;
3119 }
3120 if (!fail) {
3121 invocations *= local_size_z;
3122 if (invocations > UINT32_MAX || invocations > limit) {
3123 fail = true;
3124 }
3125 }
3126 if (fail) {
3127 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
3128 HandleToUint64(shader->vk_shader_module), "UNASSIGNED-features-limits-maxComputeWorkGroupInvocations",
locke-lunarg9edc2812019-06-17 23:18:52 -06003129 "%s local_size (%" PRIu32 ", %" PRIu32 ", %" PRIu32
Lockeaa8fdc02019-04-02 11:59:20 -06003130 ") exceeds device limit maxComputeWorkGroupInvocations (%" PRIu32 ").",
3131 report_data->FormatHandle(shader->vk_shader_module).c_str(), local_size_x, local_size_y, local_size_z,
3132 limit);
3133 }
3134 }
3135 return skip;
3136}