blob: b1a6edb9de150452916c3a2d47ce2998f823fd3a [file] [log] [blame]
Chris Forbescc5697f2019-01-30 11:54:08 -08001// Copyright (c) 2018 The Khronos Group Inc.
2// Copyright (c) 2018 Valve Corporation
3// Copyright (c) 2018 LunarG Inc.
4//
5// Licensed under the Apache License, Version 2.0 (the "License");
6// you may not use this file except in compliance with the License.
7// You may obtain a copy of the License at
8//
9// http://www.apache.org/licenses/LICENSE-2.0
10//
11// Unless required by applicable law or agreed to in writing, software
12// distributed under the License is distributed on an "AS IS" BASIS,
13// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14// See the License for the specific language governing permissions and
15// limitations under the License.
16
17#include "instrument_pass.h"
18
19#include "source/cfa.h"
Ben Claytonb73b7602019-07-29 13:56:13 +010020#include "source/spirv_constant.h"
Chris Forbescc5697f2019-01-30 11:54:08 -080021
22namespace {
23
24// Common Parameter Positions
25static const int kInstCommonParamInstIdx = 0;
26static const int kInstCommonParamCnt = 1;
27
28// Indices of operands in SPIR-V instructions
29static const int kEntryPointExecutionModelInIdx = 0;
30static const int kEntryPointFunctionIdInIdx = 1;
31
32} // anonymous namespace
33
34namespace spvtools {
35namespace opt {
36
37void InstrumentPass::MovePreludeCode(
38 BasicBlock::iterator ref_inst_itr,
39 UptrVectorIterator<BasicBlock> ref_block_itr,
40 std::unique_ptr<BasicBlock>* new_blk_ptr) {
41 same_block_pre_.clear();
42 same_block_post_.clear();
43 // Initialize new block. Reuse label from original block.
44 new_blk_ptr->reset(new BasicBlock(std::move(ref_block_itr->GetLabel())));
45 // Move contents of original ref block up to ref instruction.
46 for (auto cii = ref_block_itr->begin(); cii != ref_inst_itr;
47 cii = ref_block_itr->begin()) {
48 Instruction* inst = &*cii;
49 inst->RemoveFromList();
50 std::unique_ptr<Instruction> mv_ptr(inst);
51 // Remember same-block ops for possible regeneration.
52 if (IsSameBlockOp(&*mv_ptr)) {
53 auto* sb_inst_ptr = mv_ptr.get();
54 same_block_pre_[mv_ptr->result_id()] = sb_inst_ptr;
55 }
56 (*new_blk_ptr)->AddInstruction(std::move(mv_ptr));
57 }
58}
59
60void InstrumentPass::MovePostludeCode(
Ben Claytonb73b7602019-07-29 13:56:13 +010061 UptrVectorIterator<BasicBlock> ref_block_itr, BasicBlock* new_blk_ptr) {
Chris Forbescc5697f2019-01-30 11:54:08 -080062 // new_blk_ptr->reset(new BasicBlock(NewLabel(ref_block_itr->id())));
63 // Move contents of original ref block.
64 for (auto cii = ref_block_itr->begin(); cii != ref_block_itr->end();
65 cii = ref_block_itr->begin()) {
66 Instruction* inst = &*cii;
67 inst->RemoveFromList();
68 std::unique_ptr<Instruction> mv_inst(inst);
69 // Regenerate any same-block instruction that has not been seen in the
70 // current block.
71 if (same_block_pre_.size() > 0) {
72 CloneSameBlockOps(&mv_inst, &same_block_post_, &same_block_pre_,
73 new_blk_ptr);
74 // Remember same-block ops in this block.
75 if (IsSameBlockOp(&*mv_inst)) {
76 const uint32_t rid = mv_inst->result_id();
77 same_block_post_[rid] = rid;
78 }
79 }
Ben Claytonb73b7602019-07-29 13:56:13 +010080 new_blk_ptr->AddInstruction(std::move(mv_inst));
Chris Forbescc5697f2019-01-30 11:54:08 -080081 }
82}
83
84std::unique_ptr<Instruction> InstrumentPass::NewLabel(uint32_t label_id) {
85 std::unique_ptr<Instruction> newLabel(
86 new Instruction(context(), SpvOpLabel, 0, label_id, {}));
87 get_def_use_mgr()->AnalyzeInstDefUse(&*newLabel);
88 return newLabel;
89}
90
91uint32_t InstrumentPass::GenUintCastCode(uint32_t val_id,
92 InstructionBuilder* builder) {
93 // Cast value to 32-bit unsigned if necessary
94 if (get_def_use_mgr()->GetDef(val_id)->type_id() == GetUintId())
95 return val_id;
96 return builder->AddUnaryOp(GetUintId(), SpvOpBitcast, val_id)->result_id();
97}
98
99void InstrumentPass::GenDebugOutputFieldCode(uint32_t base_offset_id,
100 uint32_t field_offset,
101 uint32_t field_value_id,
102 InstructionBuilder* builder) {
103 // Cast value to 32-bit unsigned if necessary
104 uint32_t val_id = GenUintCastCode(field_value_id, builder);
105 // Store value
106 Instruction* data_idx_inst =
107 builder->AddBinaryOp(GetUintId(), SpvOpIAdd, base_offset_id,
108 builder->GetUintConstantId(field_offset));
109 uint32_t buf_id = GetOutputBufferId();
Ben Claytond0f684e2019-08-30 22:36:08 +0100110 uint32_t buf_uint_ptr_id = GetOutputBufferPtrId();
Chris Forbescc5697f2019-01-30 11:54:08 -0800111 Instruction* achain_inst =
112 builder->AddTernaryOp(buf_uint_ptr_id, SpvOpAccessChain, buf_id,
113 builder->GetUintConstantId(kDebugOutputDataOffset),
114 data_idx_inst->result_id());
115 (void)builder->AddBinaryOp(0, SpvOpStore, achain_inst->result_id(), val_id);
116}
117
118void InstrumentPass::GenCommonStreamWriteCode(uint32_t record_sz,
119 uint32_t inst_id,
120 uint32_t stage_idx,
121 uint32_t base_offset_id,
122 InstructionBuilder* builder) {
123 // Store record size
124 GenDebugOutputFieldCode(base_offset_id, kInstCommonOutSize,
125 builder->GetUintConstantId(record_sz), builder);
126 // Store Shader Id
127 GenDebugOutputFieldCode(base_offset_id, kInstCommonOutShaderId,
128 builder->GetUintConstantId(shader_id_), builder);
129 // Store Instruction Idx
130 GenDebugOutputFieldCode(base_offset_id, kInstCommonOutInstructionIdx, inst_id,
131 builder);
132 // Store Stage Idx
133 GenDebugOutputFieldCode(base_offset_id, kInstCommonOutStageIdx,
134 builder->GetUintConstantId(stage_idx), builder);
135}
136
137void InstrumentPass::GenFragCoordEltDebugOutputCode(
138 uint32_t base_offset_id, uint32_t uint_frag_coord_id, uint32_t element,
139 InstructionBuilder* builder) {
140 Instruction* element_val_inst = builder->AddIdLiteralOp(
141 GetUintId(), SpvOpCompositeExtract, uint_frag_coord_id, element);
142 GenDebugOutputFieldCode(base_offset_id, kInstFragOutFragCoordX + element,
143 element_val_inst->result_id(), builder);
144}
145
Ben Claytonb73b7602019-07-29 13:56:13 +0100146uint32_t InstrumentPass::GenVarLoad(uint32_t var_id,
147 InstructionBuilder* builder) {
148 Instruction* var_inst = get_def_use_mgr()->GetDef(var_id);
149 uint32_t type_id = GetPointeeTypeId(var_inst);
150 Instruction* load_inst = builder->AddUnaryOp(type_id, SpvOpLoad, var_id);
151 return load_inst->result_id();
152}
153
Chris Forbescc5697f2019-01-30 11:54:08 -0800154void InstrumentPass::GenBuiltinOutputCode(uint32_t builtin_id,
155 uint32_t builtin_off,
156 uint32_t base_offset_id,
157 InstructionBuilder* builder) {
158 // Load and store builtin
Ben Claytonb73b7602019-07-29 13:56:13 +0100159 uint32_t load_id = GenVarLoad(builtin_id, builder);
160 GenDebugOutputFieldCode(base_offset_id, builtin_off, load_id, builder);
Chris Forbescc5697f2019-01-30 11:54:08 -0800161}
162
163void InstrumentPass::GenStageStreamWriteCode(uint32_t stage_idx,
164 uint32_t base_offset_id,
165 InstructionBuilder* builder) {
166 // TODO(greg-lunarg): Add support for all stages
167 switch (stage_idx) {
168 case SpvExecutionModelVertex: {
169 // Load and store VertexId and InstanceId
Ben Claytonb73b7602019-07-29 13:56:13 +0100170 GenBuiltinOutputCode(
171 context()->GetBuiltinInputVarId(SpvBuiltInVertexIndex),
172 kInstVertOutVertexIndex, base_offset_id, builder);
173 GenBuiltinOutputCode(
174 context()->GetBuiltinInputVarId(SpvBuiltInInstanceIndex),
175 kInstVertOutInstanceIndex, base_offset_id, builder);
Chris Forbescc5697f2019-01-30 11:54:08 -0800176 } break;
177 case SpvExecutionModelGLCompute: {
Ben Claytonb73b7602019-07-29 13:56:13 +0100178 // Load and store GlobalInvocationId.
179 uint32_t load_id = GenVarLoad(
180 context()->GetBuiltinInputVarId(SpvBuiltInGlobalInvocationId),
181 builder);
182 Instruction* x_inst = builder->AddIdLiteralOp(
183 GetUintId(), SpvOpCompositeExtract, load_id, 0);
184 Instruction* y_inst = builder->AddIdLiteralOp(
185 GetUintId(), SpvOpCompositeExtract, load_id, 1);
186 Instruction* z_inst = builder->AddIdLiteralOp(
187 GetUintId(), SpvOpCompositeExtract, load_id, 2);
Ben Clayton0b54f132020-01-06 13:38:54 +0000188 GenDebugOutputFieldCode(base_offset_id, kInstCompOutGlobalInvocationIdX,
189 x_inst->result_id(), builder);
190 GenDebugOutputFieldCode(base_offset_id, kInstCompOutGlobalInvocationIdY,
191 y_inst->result_id(), builder);
192 GenDebugOutputFieldCode(base_offset_id, kInstCompOutGlobalInvocationIdZ,
193 z_inst->result_id(), builder);
Chris Forbescc5697f2019-01-30 11:54:08 -0800194 } break;
195 case SpvExecutionModelGeometry: {
196 // Load and store PrimitiveId and InvocationId.
Ben Claytonb73b7602019-07-29 13:56:13 +0100197 GenBuiltinOutputCode(
198 context()->GetBuiltinInputVarId(SpvBuiltInPrimitiveId),
199 kInstGeomOutPrimitiveId, base_offset_id, builder);
200 GenBuiltinOutputCode(
201 context()->GetBuiltinInputVarId(SpvBuiltInInvocationId),
202 kInstGeomOutInvocationId, base_offset_id, builder);
Chris Forbescc5697f2019-01-30 11:54:08 -0800203 } break;
Ben Claytonb73b7602019-07-29 13:56:13 +0100204 case SpvExecutionModelTessellationControl: {
205 // Load and store InvocationId and PrimitiveId
206 GenBuiltinOutputCode(
207 context()->GetBuiltinInputVarId(SpvBuiltInInvocationId),
208 kInstTessCtlOutInvocationId, base_offset_id, builder);
209 GenBuiltinOutputCode(
210 context()->GetBuiltinInputVarId(SpvBuiltInPrimitiveId),
211 kInstTessCtlOutPrimitiveId, base_offset_id, builder);
212 } break;
Chris Forbescc5697f2019-01-30 11:54:08 -0800213 case SpvExecutionModelTessellationEvaluation: {
Ben Clayton0b54f132020-01-06 13:38:54 +0000214 // Load and store PrimitiveId and TessCoord.uv
215 GenBuiltinOutputCode(
216 context()->GetBuiltinInputVarId(SpvBuiltInPrimitiveId),
217 kInstTessEvalOutPrimitiveId, base_offset_id, builder);
218 uint32_t load_id = GenVarLoad(
219 context()->GetBuiltinInputVarId(SpvBuiltInTessCoord), builder);
220 Instruction* uvec3_cast_inst =
221 builder->AddUnaryOp(GetVec3UintId(), SpvOpBitcast, load_id);
222 uint32_t uvec3_cast_id = uvec3_cast_inst->result_id();
223 Instruction* u_inst = builder->AddIdLiteralOp(
224 GetUintId(), SpvOpCompositeExtract, uvec3_cast_id, 0);
225 Instruction* v_inst = builder->AddIdLiteralOp(
226 GetUintId(), SpvOpCompositeExtract, uvec3_cast_id, 1);
227 GenDebugOutputFieldCode(base_offset_id, kInstTessEvalOutTessCoordU,
228 u_inst->result_id(), builder);
229 GenDebugOutputFieldCode(base_offset_id, kInstTessEvalOutTessCoordV,
230 v_inst->result_id(), builder);
Chris Forbescc5697f2019-01-30 11:54:08 -0800231 } break;
232 case SpvExecutionModelFragment: {
233 // Load FragCoord and convert to Uint
Ben Claytonb73b7602019-07-29 13:56:13 +0100234 Instruction* frag_coord_inst = builder->AddUnaryOp(
235 GetVec4FloatId(), SpvOpLoad,
236 context()->GetBuiltinInputVarId(SpvBuiltInFragCoord));
Chris Forbescc5697f2019-01-30 11:54:08 -0800237 Instruction* uint_frag_coord_inst = builder->AddUnaryOp(
238 GetVec4UintId(), SpvOpBitcast, frag_coord_inst->result_id());
239 for (uint32_t u = 0; u < 2u; ++u)
240 GenFragCoordEltDebugOutputCode(
241 base_offset_id, uint_frag_coord_inst->result_id(), u, builder);
242 } break;
Ben Claytonb73b7602019-07-29 13:56:13 +0100243 case SpvExecutionModelRayGenerationNV:
244 case SpvExecutionModelIntersectionNV:
245 case SpvExecutionModelAnyHitNV:
246 case SpvExecutionModelClosestHitNV:
247 case SpvExecutionModelMissNV:
248 case SpvExecutionModelCallableNV: {
249 // Load and store LaunchIdNV.
250 uint32_t launch_id = GenVarLoad(
251 context()->GetBuiltinInputVarId(SpvBuiltInLaunchIdNV), builder);
252 Instruction* x_launch_inst = builder->AddIdLiteralOp(
253 GetUintId(), SpvOpCompositeExtract, launch_id, 0);
254 Instruction* y_launch_inst = builder->AddIdLiteralOp(
255 GetUintId(), SpvOpCompositeExtract, launch_id, 1);
256 Instruction* z_launch_inst = builder->AddIdLiteralOp(
257 GetUintId(), SpvOpCompositeExtract, launch_id, 2);
258 GenDebugOutputFieldCode(base_offset_id, kInstRayTracingOutLaunchIdX,
259 x_launch_inst->result_id(), builder);
260 GenDebugOutputFieldCode(base_offset_id, kInstRayTracingOutLaunchIdY,
261 y_launch_inst->result_id(), builder);
262 GenDebugOutputFieldCode(base_offset_id, kInstRayTracingOutLaunchIdZ,
263 z_launch_inst->result_id(), builder);
264 } break;
Chris Forbescc5697f2019-01-30 11:54:08 -0800265 default: { assert(false && "unsupported stage"); } break;
266 }
267}
268
269void InstrumentPass::GenDebugStreamWrite(
270 uint32_t instruction_idx, uint32_t stage_idx,
271 const std::vector<uint32_t>& validation_ids, InstructionBuilder* builder) {
272 // Call debug output function. Pass func_idx, instruction_idx and
273 // validation ids as args.
274 uint32_t val_id_cnt = static_cast<uint32_t>(validation_ids.size());
275 uint32_t output_func_id = GetStreamWriteFunctionId(stage_idx, val_id_cnt);
276 std::vector<uint32_t> args = {output_func_id,
277 builder->GetUintConstantId(instruction_idx)};
278 (void)args.insert(args.end(), validation_ids.begin(), validation_ids.end());
279 (void)builder->AddNaryOp(GetVoidId(), SpvOpFunctionCall, args);
280}
281
Ben Claytonb73b7602019-07-29 13:56:13 +0100282uint32_t InstrumentPass::GenDebugDirectRead(
283 const std::vector<uint32_t>& offset_ids, InstructionBuilder* builder) {
284 // Call debug input function. Pass func_idx and offset ids as args.
285 uint32_t off_id_cnt = static_cast<uint32_t>(offset_ids.size());
286 uint32_t input_func_id = GetDirectReadFunctionId(off_id_cnt);
287 std::vector<uint32_t> args = {input_func_id};
288 (void)args.insert(args.end(), offset_ids.begin(), offset_ids.end());
289 return builder->AddNaryOp(GetUintId(), SpvOpFunctionCall, args)->result_id();
290}
291
Chris Forbescc5697f2019-01-30 11:54:08 -0800292bool InstrumentPass::IsSameBlockOp(const Instruction* inst) const {
293 return inst->opcode() == SpvOpSampledImage || inst->opcode() == SpvOpImage;
294}
295
296void InstrumentPass::CloneSameBlockOps(
297 std::unique_ptr<Instruction>* inst,
298 std::unordered_map<uint32_t, uint32_t>* same_blk_post,
299 std::unordered_map<uint32_t, Instruction*>* same_blk_pre,
Ben Claytonb73b7602019-07-29 13:56:13 +0100300 BasicBlock* block_ptr) {
Ben Claytond552f632019-11-18 11:18:41 +0000301 bool changed = false;
302 (*inst)->ForEachInId([&same_blk_post, &same_blk_pre, &block_ptr, &changed,
303 this](uint32_t* iid) {
304 const auto map_itr = (*same_blk_post).find(*iid);
305 if (map_itr == (*same_blk_post).end()) {
306 const auto map_itr2 = (*same_blk_pre).find(*iid);
307 if (map_itr2 != (*same_blk_pre).end()) {
308 // Clone pre-call same-block ops, map result id.
309 const Instruction* in_inst = map_itr2->second;
310 std::unique_ptr<Instruction> sb_inst(in_inst->Clone(context()));
311 const uint32_t rid = sb_inst->result_id();
312 const uint32_t nid = this->TakeNextId();
313 get_decoration_mgr()->CloneDecorations(rid, nid);
314 sb_inst->SetResultId(nid);
315 get_def_use_mgr()->AnalyzeInstDefUse(&*sb_inst);
316 (*same_blk_post)[rid] = nid;
317 *iid = nid;
318 changed = true;
319 CloneSameBlockOps(&sb_inst, same_blk_post, same_blk_pre, block_ptr);
320 block_ptr->AddInstruction(std::move(sb_inst));
321 }
322 } else {
323 // Reset same-block op operand if necessary
324 if (*iid != map_itr->second) {
325 *iid = map_itr->second;
326 changed = true;
327 }
328 }
329 });
330 if (changed) get_def_use_mgr()->AnalyzeInstUse(&**inst);
Chris Forbescc5697f2019-01-30 11:54:08 -0800331}
332
333void InstrumentPass::UpdateSucceedingPhis(
334 std::vector<std::unique_ptr<BasicBlock>>& new_blocks) {
335 const auto first_blk = new_blocks.begin();
336 const auto last_blk = new_blocks.end() - 1;
337 const uint32_t first_id = (*first_blk)->id();
338 const uint32_t last_id = (*last_blk)->id();
339 const BasicBlock& const_last_block = *last_blk->get();
340 const_last_block.ForEachSuccessorLabel(
341 [&first_id, &last_id, this](const uint32_t succ) {
342 BasicBlock* sbp = this->id2block_[succ];
343 sbp->ForEachPhiInst([&first_id, &last_id, this](Instruction* phi) {
344 bool changed = false;
345 phi->ForEachInId([&first_id, &last_id, &changed](uint32_t* id) {
346 if (*id == first_id) {
347 *id = last_id;
348 changed = true;
349 }
350 });
351 if (changed) get_def_use_mgr()->AnalyzeInstUse(phi);
352 });
353 });
354}
355
Ben Claytond0f684e2019-08-30 22:36:08 +0100356uint32_t InstrumentPass::GetOutputBufferPtrId() {
357 if (output_buffer_ptr_id_ == 0) {
358 output_buffer_ptr_id_ = context()->get_type_mgr()->FindPointerToType(
Chris Forbescc5697f2019-01-30 11:54:08 -0800359 GetUintId(), SpvStorageClassStorageBuffer);
360 }
Ben Claytond0f684e2019-08-30 22:36:08 +0100361 return output_buffer_ptr_id_;
362}
363
364uint32_t InstrumentPass::GetInputBufferTypeId() {
365 return (validation_id_ == kInstValidationIdBuffAddr) ? GetUint64Id()
366 : GetUintId();
367}
368
369uint32_t InstrumentPass::GetInputBufferPtrId() {
370 if (input_buffer_ptr_id_ == 0) {
371 input_buffer_ptr_id_ = context()->get_type_mgr()->FindPointerToType(
372 GetInputBufferTypeId(), SpvStorageClassStorageBuffer);
373 }
374 return input_buffer_ptr_id_;
Chris Forbescc5697f2019-01-30 11:54:08 -0800375}
376
377uint32_t InstrumentPass::GetOutputBufferBinding() {
378 switch (validation_id_) {
379 case kInstValidationIdBindless:
380 return kDebugOutputBindingStream;
Ben Claytond0f684e2019-08-30 22:36:08 +0100381 case kInstValidationIdBuffAddr:
382 return kDebugOutputBindingStream;
Chris Forbescc5697f2019-01-30 11:54:08 -0800383 default:
384 assert(false && "unexpected validation id");
385 }
386 return 0;
387}
388
Ben Claytonb73b7602019-07-29 13:56:13 +0100389uint32_t InstrumentPass::GetInputBufferBinding() {
390 switch (validation_id_) {
391 case kInstValidationIdBindless:
392 return kDebugInputBindingBindless;
Ben Claytond0f684e2019-08-30 22:36:08 +0100393 case kInstValidationIdBuffAddr:
394 return kDebugInputBindingBuffAddr;
Ben Claytonb73b7602019-07-29 13:56:13 +0100395 default:
396 assert(false && "unexpected validation id");
397 }
398 return 0;
399}
400
Ben Claytond0f684e2019-08-30 22:36:08 +0100401analysis::Type* InstrumentPass::GetUintXRuntimeArrayType(
402 uint32_t width, analysis::Type** rarr_ty) {
403 if (*rarr_ty == nullptr) {
404 analysis::DecorationManager* deco_mgr = get_decoration_mgr();
405 analysis::TypeManager* type_mgr = context()->get_type_mgr();
406 analysis::Integer uint_ty(width, false);
Ben Claytonb73b7602019-07-29 13:56:13 +0100407 analysis::Type* reg_uint_ty = type_mgr->GetRegisteredType(&uint_ty);
408 analysis::RuntimeArray uint_rarr_ty_tmp(reg_uint_ty);
Ben Claytond0f684e2019-08-30 22:36:08 +0100409 *rarr_ty = type_mgr->GetRegisteredType(&uint_rarr_ty_tmp);
410 uint32_t uint_arr_ty_id = type_mgr->GetTypeInstruction(*rarr_ty);
Ben Claytonb73b7602019-07-29 13:56:13 +0100411 // By the Vulkan spec, a pre-existing RuntimeArray of uint must be part of
412 // a block, and will therefore be decorated with an ArrayStride. Therefore
413 // the undecorated type returned here will not be pre-existing and can
414 // safely be decorated. Since this type is now decorated, it is out of
415 // sync with the TypeManager and therefore the TypeManager must be
416 // invalidated after this pass.
417 assert(context()->get_def_use_mgr()->NumUses(uint_arr_ty_id) == 0 &&
418 "used RuntimeArray type returned");
Ben Claytond0f684e2019-08-30 22:36:08 +0100419 deco_mgr->AddDecorationVal(uint_arr_ty_id, SpvDecorationArrayStride,
420 width / 8u);
Ben Claytonb73b7602019-07-29 13:56:13 +0100421 }
Ben Claytond0f684e2019-08-30 22:36:08 +0100422 return *rarr_ty;
423}
424
425analysis::Type* InstrumentPass::GetUintRuntimeArrayType(uint32_t width) {
426 analysis::Type** rarr_ty =
427 (width == 64) ? &uint64_rarr_ty_ : &uint32_rarr_ty_;
428 return GetUintXRuntimeArrayType(width, rarr_ty);
Ben Claytonb73b7602019-07-29 13:56:13 +0100429}
430
431void InstrumentPass::AddStorageBufferExt() {
432 if (storage_buffer_ext_defined_) return;
433 if (!get_feature_mgr()->HasExtension(kSPV_KHR_storage_buffer_storage_class)) {
Ben Claytond0f684e2019-08-30 22:36:08 +0100434 context()->AddExtension("SPV_KHR_storage_buffer_storage_class");
Ben Claytonb73b7602019-07-29 13:56:13 +0100435 }
436 storage_buffer_ext_defined_ = true;
437}
438
Chris Forbescc5697f2019-01-30 11:54:08 -0800439// Return id for output buffer
440uint32_t InstrumentPass::GetOutputBufferId() {
441 if (output_buffer_id_ == 0) {
442 // If not created yet, create one
443 analysis::DecorationManager* deco_mgr = get_decoration_mgr();
444 analysis::TypeManager* type_mgr = context()->get_type_mgr();
Ben Claytond0f684e2019-08-30 22:36:08 +0100445 analysis::Type* reg_uint_rarr_ty = GetUintRuntimeArrayType(32);
Chris Forbescc5697f2019-01-30 11:54:08 -0800446 analysis::Integer uint_ty(32, false);
447 analysis::Type* reg_uint_ty = type_mgr->GetRegisteredType(&uint_ty);
Ben Claytonb73b7602019-07-29 13:56:13 +0100448 analysis::Struct buf_ty({reg_uint_ty, reg_uint_rarr_ty});
449 analysis::Type* reg_buf_ty = type_mgr->GetRegisteredType(&buf_ty);
450 uint32_t obufTyId = type_mgr->GetTypeInstruction(reg_buf_ty);
451 // By the Vulkan spec, a pre-existing struct containing a RuntimeArray
452 // must be a block, and will therefore be decorated with Block. Therefore
453 // the undecorated type returned here will not be pre-existing and can
454 // safely be decorated. Since this type is now decorated, it is out of
455 // sync with the TypeManager and therefore the TypeManager must be
456 // invalidated after this pass.
457 assert(context()->get_def_use_mgr()->NumUses(obufTyId) == 0 &&
458 "used struct type returned");
Chris Forbescc5697f2019-01-30 11:54:08 -0800459 deco_mgr->AddDecoration(obufTyId, SpvDecorationBlock);
460 deco_mgr->AddMemberDecoration(obufTyId, kDebugOutputSizeOffset,
461 SpvDecorationOffset, 0);
462 deco_mgr->AddMemberDecoration(obufTyId, kDebugOutputDataOffset,
463 SpvDecorationOffset, 4);
464 uint32_t obufTyPtrId_ =
465 type_mgr->FindPointerToType(obufTyId, SpvStorageClassStorageBuffer);
466 output_buffer_id_ = TakeNextId();
467 std::unique_ptr<Instruction> newVarOp(new Instruction(
468 context(), SpvOpVariable, obufTyPtrId_, output_buffer_id_,
469 {{spv_operand_type_t::SPV_OPERAND_TYPE_LITERAL_INTEGER,
470 {SpvStorageClassStorageBuffer}}}));
471 context()->AddGlobalValue(std::move(newVarOp));
472 deco_mgr->AddDecorationVal(output_buffer_id_, SpvDecorationDescriptorSet,
473 desc_set_);
474 deco_mgr->AddDecorationVal(output_buffer_id_, SpvDecorationBinding,
475 GetOutputBufferBinding());
Ben Claytonb73b7602019-07-29 13:56:13 +0100476 AddStorageBufferExt();
477 if (get_module()->version() >= SPV_SPIRV_VERSION_WORD(1, 4)) {
478 // Add the new buffer to all entry points.
479 for (auto& entry : get_module()->entry_points()) {
480 entry.AddOperand({SPV_OPERAND_TYPE_ID, {output_buffer_id_}});
481 context()->AnalyzeUses(&entry);
482 }
Chris Forbescc5697f2019-01-30 11:54:08 -0800483 }
484 }
485 return output_buffer_id_;
486}
487
Ben Claytonb73b7602019-07-29 13:56:13 +0100488uint32_t InstrumentPass::GetInputBufferId() {
489 if (input_buffer_id_ == 0) {
490 // If not created yet, create one
491 analysis::DecorationManager* deco_mgr = get_decoration_mgr();
492 analysis::TypeManager* type_mgr = context()->get_type_mgr();
Ben Claytond0f684e2019-08-30 22:36:08 +0100493 uint32_t width = (validation_id_ == kInstValidationIdBuffAddr) ? 64u : 32u;
494 analysis::Type* reg_uint_rarr_ty = GetUintRuntimeArrayType(width);
Ben Claytonb73b7602019-07-29 13:56:13 +0100495 analysis::Struct buf_ty({reg_uint_rarr_ty});
496 analysis::Type* reg_buf_ty = type_mgr->GetRegisteredType(&buf_ty);
497 uint32_t ibufTyId = type_mgr->GetTypeInstruction(reg_buf_ty);
498 // By the Vulkan spec, a pre-existing struct containing a RuntimeArray
499 // must be a block, and will therefore be decorated with Block. Therefore
500 // the undecorated type returned here will not be pre-existing and can
501 // safely be decorated. Since this type is now decorated, it is out of
502 // sync with the TypeManager and therefore the TypeManager must be
503 // invalidated after this pass.
504 assert(context()->get_def_use_mgr()->NumUses(ibufTyId) == 0 &&
505 "used struct type returned");
506 deco_mgr->AddDecoration(ibufTyId, SpvDecorationBlock);
507 deco_mgr->AddMemberDecoration(ibufTyId, 0, SpvDecorationOffset, 0);
508 uint32_t ibufTyPtrId_ =
509 type_mgr->FindPointerToType(ibufTyId, SpvStorageClassStorageBuffer);
510 input_buffer_id_ = TakeNextId();
511 std::unique_ptr<Instruction> newVarOp(new Instruction(
512 context(), SpvOpVariable, ibufTyPtrId_, input_buffer_id_,
513 {{spv_operand_type_t::SPV_OPERAND_TYPE_LITERAL_INTEGER,
514 {SpvStorageClassStorageBuffer}}}));
515 context()->AddGlobalValue(std::move(newVarOp));
516 deco_mgr->AddDecorationVal(input_buffer_id_, SpvDecorationDescriptorSet,
517 desc_set_);
518 deco_mgr->AddDecorationVal(input_buffer_id_, SpvDecorationBinding,
519 GetInputBufferBinding());
520 AddStorageBufferExt();
521 if (get_module()->version() >= SPV_SPIRV_VERSION_WORD(1, 4)) {
522 // Add the new buffer to all entry points.
523 for (auto& entry : get_module()->entry_points()) {
524 entry.AddOperand({SPV_OPERAND_TYPE_ID, {input_buffer_id_}});
525 context()->AnalyzeUses(&entry);
526 }
527 }
528 }
529 return input_buffer_id_;
530}
531
Chris Forbescc5697f2019-01-30 11:54:08 -0800532uint32_t InstrumentPass::GetVec4FloatId() {
533 if (v4float_id_ == 0) {
534 analysis::TypeManager* type_mgr = context()->get_type_mgr();
535 analysis::Float float_ty(32);
536 analysis::Type* reg_float_ty = type_mgr->GetRegisteredType(&float_ty);
537 analysis::Vector v4float_ty(reg_float_ty, 4);
538 analysis::Type* reg_v4float_ty = type_mgr->GetRegisteredType(&v4float_ty);
539 v4float_id_ = type_mgr->GetTypeInstruction(reg_v4float_ty);
540 }
541 return v4float_id_;
542}
543
544uint32_t InstrumentPass::GetUintId() {
545 if (uint_id_ == 0) {
546 analysis::TypeManager* type_mgr = context()->get_type_mgr();
547 analysis::Integer uint_ty(32, false);
548 analysis::Type* reg_uint_ty = type_mgr->GetRegisteredType(&uint_ty);
549 uint_id_ = type_mgr->GetTypeInstruction(reg_uint_ty);
550 }
551 return uint_id_;
552}
553
Ben Claytond0f684e2019-08-30 22:36:08 +0100554uint32_t InstrumentPass::GetUint64Id() {
555 if (uint64_id_ == 0) {
Chris Forbescc5697f2019-01-30 11:54:08 -0800556 analysis::TypeManager* type_mgr = context()->get_type_mgr();
Ben Claytond0f684e2019-08-30 22:36:08 +0100557 analysis::Integer uint64_ty(64, false);
558 analysis::Type* reg_uint64_ty = type_mgr->GetRegisteredType(&uint64_ty);
559 uint64_id_ = type_mgr->GetTypeInstruction(reg_uint64_ty);
Chris Forbescc5697f2019-01-30 11:54:08 -0800560 }
Ben Claytond0f684e2019-08-30 22:36:08 +0100561 return uint64_id_;
562}
563
564uint32_t InstrumentPass::GetVecUintId(uint32_t len) {
565 analysis::TypeManager* type_mgr = context()->get_type_mgr();
566 analysis::Integer uint_ty(32, false);
567 analysis::Type* reg_uint_ty = type_mgr->GetRegisteredType(&uint_ty);
568 analysis::Vector v_uint_ty(reg_uint_ty, len);
569 analysis::Type* reg_v_uint_ty = type_mgr->GetRegisteredType(&v_uint_ty);
570 uint32_t v_uint_id = type_mgr->GetTypeInstruction(reg_v_uint_ty);
571 return v_uint_id;
572}
573
574uint32_t InstrumentPass::GetVec4UintId() {
575 if (v4uint_id_ == 0) v4uint_id_ = GetVecUintId(4u);
Chris Forbescc5697f2019-01-30 11:54:08 -0800576 return v4uint_id_;
577}
578
Ben Claytond0f684e2019-08-30 22:36:08 +0100579uint32_t InstrumentPass::GetVec3UintId() {
580 if (v3uint_id_ == 0) v3uint_id_ = GetVecUintId(3u);
581 return v3uint_id_;
582}
583
Chris Forbescc5697f2019-01-30 11:54:08 -0800584uint32_t InstrumentPass::GetBoolId() {
585 if (bool_id_ == 0) {
586 analysis::TypeManager* type_mgr = context()->get_type_mgr();
587 analysis::Bool bool_ty;
588 analysis::Type* reg_bool_ty = type_mgr->GetRegisteredType(&bool_ty);
589 bool_id_ = type_mgr->GetTypeInstruction(reg_bool_ty);
590 }
591 return bool_id_;
592}
593
594uint32_t InstrumentPass::GetVoidId() {
595 if (void_id_ == 0) {
596 analysis::TypeManager* type_mgr = context()->get_type_mgr();
597 analysis::Void void_ty;
598 analysis::Type* reg_void_ty = type_mgr->GetRegisteredType(&void_ty);
599 void_id_ = type_mgr->GetTypeInstruction(reg_void_ty);
600 }
601 return void_id_;
602}
603
604uint32_t InstrumentPass::GetStreamWriteFunctionId(uint32_t stage_idx,
605 uint32_t val_spec_param_cnt) {
606 // Total param count is common params plus validation-specific
607 // params
608 uint32_t param_cnt = kInstCommonParamCnt + val_spec_param_cnt;
609 if (output_func_id_ == 0) {
610 // Create function
611 output_func_id_ = TakeNextId();
612 analysis::TypeManager* type_mgr = context()->get_type_mgr();
613 std::vector<const analysis::Type*> param_types;
614 for (uint32_t c = 0; c < param_cnt; ++c)
615 param_types.push_back(type_mgr->GetType(GetUintId()));
616 analysis::Function func_ty(type_mgr->GetType(GetVoidId()), param_types);
617 analysis::Type* reg_func_ty = type_mgr->GetRegisteredType(&func_ty);
618 std::unique_ptr<Instruction> func_inst(new Instruction(
619 get_module()->context(), SpvOpFunction, GetVoidId(), output_func_id_,
620 {{spv_operand_type_t::SPV_OPERAND_TYPE_LITERAL_INTEGER,
621 {SpvFunctionControlMaskNone}},
622 {spv_operand_type_t::SPV_OPERAND_TYPE_ID,
623 {type_mgr->GetTypeInstruction(reg_func_ty)}}}));
624 get_def_use_mgr()->AnalyzeInstDefUse(&*func_inst);
625 std::unique_ptr<Function> output_func =
626 MakeUnique<Function>(std::move(func_inst));
627 // Add parameters
628 std::vector<uint32_t> param_vec;
629 for (uint32_t c = 0; c < param_cnt; ++c) {
630 uint32_t pid = TakeNextId();
631 param_vec.push_back(pid);
632 std::unique_ptr<Instruction> param_inst(
633 new Instruction(get_module()->context(), SpvOpFunctionParameter,
634 GetUintId(), pid, {}));
635 get_def_use_mgr()->AnalyzeInstDefUse(&*param_inst);
636 output_func->AddParameter(std::move(param_inst));
637 }
638 // Create first block
639 uint32_t test_blk_id = TakeNextId();
640 std::unique_ptr<Instruction> test_label(NewLabel(test_blk_id));
641 std::unique_ptr<BasicBlock> new_blk_ptr =
642 MakeUnique<BasicBlock>(std::move(test_label));
643 InstructionBuilder builder(
644 context(), &*new_blk_ptr,
645 IRContext::kAnalysisDefUse | IRContext::kAnalysisInstrToBlockMapping);
646 // Gen test if debug output buffer size will not be exceeded.
Ben Clayton0b54f132020-01-06 13:38:54 +0000647 uint32_t val_spec_offset = kInstStageOutCnt;
Ben Claytonb73b7602019-07-29 13:56:13 +0100648 uint32_t obuf_record_sz = val_spec_offset + val_spec_param_cnt;
Chris Forbescc5697f2019-01-30 11:54:08 -0800649 uint32_t buf_id = GetOutputBufferId();
Ben Claytond0f684e2019-08-30 22:36:08 +0100650 uint32_t buf_uint_ptr_id = GetOutputBufferPtrId();
Chris Forbescc5697f2019-01-30 11:54:08 -0800651 Instruction* obuf_curr_sz_ac_inst =
652 builder.AddBinaryOp(buf_uint_ptr_id, SpvOpAccessChain, buf_id,
653 builder.GetUintConstantId(kDebugOutputSizeOffset));
654 // Fetch the current debug buffer written size atomically, adding the
655 // size of the record to be written.
656 uint32_t obuf_record_sz_id = builder.GetUintConstantId(obuf_record_sz);
657 uint32_t mask_none_id = builder.GetUintConstantId(SpvMemoryAccessMaskNone);
658 uint32_t scope_invok_id = builder.GetUintConstantId(SpvScopeInvocation);
659 Instruction* obuf_curr_sz_inst = builder.AddQuadOp(
660 GetUintId(), SpvOpAtomicIAdd, obuf_curr_sz_ac_inst->result_id(),
661 scope_invok_id, mask_none_id, obuf_record_sz_id);
662 uint32_t obuf_curr_sz_id = obuf_curr_sz_inst->result_id();
663 // Compute new written size
664 Instruction* obuf_new_sz_inst =
665 builder.AddBinaryOp(GetUintId(), SpvOpIAdd, obuf_curr_sz_id,
666 builder.GetUintConstantId(obuf_record_sz));
667 // Fetch the data bound
668 Instruction* obuf_bnd_inst =
669 builder.AddIdLiteralOp(GetUintId(), SpvOpArrayLength,
670 GetOutputBufferId(), kDebugOutputDataOffset);
671 // Test that new written size is less than or equal to debug output
672 // data bound
673 Instruction* obuf_safe_inst = builder.AddBinaryOp(
674 GetBoolId(), SpvOpULessThanEqual, obuf_new_sz_inst->result_id(),
675 obuf_bnd_inst->result_id());
676 uint32_t merge_blk_id = TakeNextId();
677 uint32_t write_blk_id = TakeNextId();
678 std::unique_ptr<Instruction> merge_label(NewLabel(merge_blk_id));
679 std::unique_ptr<Instruction> write_label(NewLabel(write_blk_id));
680 (void)builder.AddConditionalBranch(obuf_safe_inst->result_id(),
681 write_blk_id, merge_blk_id, merge_blk_id,
682 SpvSelectionControlMaskNone);
683 // Close safety test block and gen write block
684 new_blk_ptr->SetParent(&*output_func);
685 output_func->AddBasicBlock(std::move(new_blk_ptr));
686 new_blk_ptr = MakeUnique<BasicBlock>(std::move(write_label));
687 builder.SetInsertPoint(&*new_blk_ptr);
688 // Generate common and stage-specific debug record members
689 GenCommonStreamWriteCode(obuf_record_sz, param_vec[kInstCommonParamInstIdx],
690 stage_idx, obuf_curr_sz_id, &builder);
691 GenStageStreamWriteCode(stage_idx, obuf_curr_sz_id, &builder);
692 // Gen writes of validation specific data
693 for (uint32_t i = 0; i < val_spec_param_cnt; ++i) {
Ben Claytonb73b7602019-07-29 13:56:13 +0100694 GenDebugOutputFieldCode(obuf_curr_sz_id, val_spec_offset + i,
Chris Forbescc5697f2019-01-30 11:54:08 -0800695 param_vec[kInstCommonParamCnt + i], &builder);
696 }
697 // Close write block and gen merge block
698 (void)builder.AddBranch(merge_blk_id);
699 new_blk_ptr->SetParent(&*output_func);
700 output_func->AddBasicBlock(std::move(new_blk_ptr));
701 new_blk_ptr = MakeUnique<BasicBlock>(std::move(merge_label));
702 builder.SetInsertPoint(&*new_blk_ptr);
703 // Close merge block and function and add function to module
704 (void)builder.AddNullaryOp(0, SpvOpReturn);
705 new_blk_ptr->SetParent(&*output_func);
706 output_func->AddBasicBlock(std::move(new_blk_ptr));
707 std::unique_ptr<Instruction> func_end_inst(
708 new Instruction(get_module()->context(), SpvOpFunctionEnd, 0, 0, {}));
709 get_def_use_mgr()->AnalyzeInstDefUse(&*func_end_inst);
710 output_func->SetFunctionEnd(std::move(func_end_inst));
711 context()->AddFunction(std::move(output_func));
712 output_func_param_cnt_ = param_cnt;
713 }
714 assert(param_cnt == output_func_param_cnt_ && "bad arg count");
715 return output_func_id_;
716}
717
Ben Claytonb73b7602019-07-29 13:56:13 +0100718uint32_t InstrumentPass::GetDirectReadFunctionId(uint32_t param_cnt) {
719 uint32_t func_id = param2input_func_id_[param_cnt];
720 if (func_id != 0) return func_id;
Ben Claytond0f684e2019-08-30 22:36:08 +0100721 // Create input function for param_cnt.
Ben Claytonb73b7602019-07-29 13:56:13 +0100722 func_id = TakeNextId();
723 analysis::TypeManager* type_mgr = context()->get_type_mgr();
724 std::vector<const analysis::Type*> param_types;
725 for (uint32_t c = 0; c < param_cnt; ++c)
726 param_types.push_back(type_mgr->GetType(GetUintId()));
Ben Claytond0f684e2019-08-30 22:36:08 +0100727 uint32_t ibuf_type_id = GetInputBufferTypeId();
728 analysis::Function func_ty(type_mgr->GetType(ibuf_type_id), param_types);
Ben Claytonb73b7602019-07-29 13:56:13 +0100729 analysis::Type* reg_func_ty = type_mgr->GetRegisteredType(&func_ty);
730 std::unique_ptr<Instruction> func_inst(new Instruction(
Ben Claytond0f684e2019-08-30 22:36:08 +0100731 get_module()->context(), SpvOpFunction, ibuf_type_id, func_id,
Ben Claytonb73b7602019-07-29 13:56:13 +0100732 {{spv_operand_type_t::SPV_OPERAND_TYPE_LITERAL_INTEGER,
733 {SpvFunctionControlMaskNone}},
734 {spv_operand_type_t::SPV_OPERAND_TYPE_ID,
735 {type_mgr->GetTypeInstruction(reg_func_ty)}}}));
736 get_def_use_mgr()->AnalyzeInstDefUse(&*func_inst);
737 std::unique_ptr<Function> input_func =
738 MakeUnique<Function>(std::move(func_inst));
739 // Add parameters
740 std::vector<uint32_t> param_vec;
741 for (uint32_t c = 0; c < param_cnt; ++c) {
742 uint32_t pid = TakeNextId();
743 param_vec.push_back(pid);
744 std::unique_ptr<Instruction> param_inst(new Instruction(
745 get_module()->context(), SpvOpFunctionParameter, GetUintId(), pid, {}));
746 get_def_use_mgr()->AnalyzeInstDefUse(&*param_inst);
747 input_func->AddParameter(std::move(param_inst));
748 }
749 // Create block
750 uint32_t blk_id = TakeNextId();
751 std::unique_ptr<Instruction> blk_label(NewLabel(blk_id));
752 std::unique_ptr<BasicBlock> new_blk_ptr =
753 MakeUnique<BasicBlock>(std::move(blk_label));
754 InstructionBuilder builder(
755 context(), &*new_blk_ptr,
756 IRContext::kAnalysisDefUse | IRContext::kAnalysisInstrToBlockMapping);
757 // For each offset parameter, generate new offset with parameter, adding last
758 // loaded value if it exists, and load value from input buffer at new offset.
759 // Return last loaded value.
760 uint32_t buf_id = GetInputBufferId();
Ben Claytond0f684e2019-08-30 22:36:08 +0100761 uint32_t buf_ptr_id = GetInputBufferPtrId();
Ben Claytonb73b7602019-07-29 13:56:13 +0100762 uint32_t last_value_id = 0;
763 for (uint32_t p = 0; p < param_cnt; ++p) {
764 uint32_t offset_id;
765 if (p == 0) {
766 offset_id = param_vec[0];
767 } else {
Ben Claytond0f684e2019-08-30 22:36:08 +0100768 if (ibuf_type_id != GetUintId()) {
769 Instruction* ucvt_inst =
770 builder.AddUnaryOp(GetUintId(), SpvOpUConvert, last_value_id);
771 last_value_id = ucvt_inst->result_id();
772 }
Ben Claytonb73b7602019-07-29 13:56:13 +0100773 Instruction* offset_inst = builder.AddBinaryOp(
774 GetUintId(), SpvOpIAdd, last_value_id, param_vec[p]);
775 offset_id = offset_inst->result_id();
776 }
777 Instruction* ac_inst = builder.AddTernaryOp(
Ben Claytond0f684e2019-08-30 22:36:08 +0100778 buf_ptr_id, SpvOpAccessChain, buf_id,
Ben Claytonb73b7602019-07-29 13:56:13 +0100779 builder.GetUintConstantId(kDebugInputDataOffset), offset_id);
780 Instruction* load_inst =
Ben Claytond0f684e2019-08-30 22:36:08 +0100781 builder.AddUnaryOp(ibuf_type_id, SpvOpLoad, ac_inst->result_id());
Ben Claytonb73b7602019-07-29 13:56:13 +0100782 last_value_id = load_inst->result_id();
783 }
784 (void)builder.AddInstruction(MakeUnique<Instruction>(
785 context(), SpvOpReturnValue, 0, 0,
786 std::initializer_list<Operand>{{SPV_OPERAND_TYPE_ID, {last_value_id}}}));
787 // Close block and function and add function to module
788 new_blk_ptr->SetParent(&*input_func);
789 input_func->AddBasicBlock(std::move(new_blk_ptr));
790 std::unique_ptr<Instruction> func_end_inst(
791 new Instruction(get_module()->context(), SpvOpFunctionEnd, 0, 0, {}));
792 get_def_use_mgr()->AnalyzeInstDefUse(&*func_end_inst);
793 input_func->SetFunctionEnd(std::move(func_end_inst));
794 context()->AddFunction(std::move(input_func));
795 param2input_func_id_[param_cnt] = func_id;
796 return func_id;
797}
798
Chris Forbescc5697f2019-01-30 11:54:08 -0800799bool InstrumentPass::InstrumentFunction(Function* func, uint32_t stage_idx,
800 InstProcessFunction& pfn) {
801 bool modified = false;
802 // Compute function index
803 uint32_t function_idx = 0;
804 for (auto fii = get_module()->begin(); fii != get_module()->end(); ++fii) {
805 if (&*fii == func) break;
806 ++function_idx;
807 }
808 std::vector<std::unique_ptr<BasicBlock>> new_blks;
Chris Forbescc5697f2019-01-30 11:54:08 -0800809 // Using block iterators here because of block erasures and insertions.
810 for (auto bi = func->begin(); bi != func->end(); ++bi) {
Ben Claytonb73b7602019-07-29 13:56:13 +0100811 for (auto ii = bi->begin(); ii != bi->end();) {
Chris Forbescc5697f2019-01-30 11:54:08 -0800812 // Generate instrumentation if warranted
Ben Claytonb73b7602019-07-29 13:56:13 +0100813 pfn(ii, bi, stage_idx, &new_blks);
Chris Forbescc5697f2019-01-30 11:54:08 -0800814 if (new_blks.size() == 0) {
815 ++ii;
816 continue;
817 }
Ben Claytonb73b7602019-07-29 13:56:13 +0100818 // Add new blocks to label id map
819 for (auto& blk : new_blks) id2block_[blk->id()] = &*blk;
Chris Forbescc5697f2019-01-30 11:54:08 -0800820 // If there are new blocks we know there will always be two or
821 // more, so update succeeding phis with label of new last block.
822 size_t newBlocksSize = new_blks.size();
823 assert(newBlocksSize > 1);
824 UpdateSucceedingPhis(new_blks);
825 // Replace original block with new block(s)
826 bi = bi.Erase();
827 for (auto& bb : new_blks) {
828 bb->SetParent(func);
829 }
830 bi = bi.InsertBefore(&new_blks);
831 // Reset block iterator to last new block
832 for (size_t i = 0; i < newBlocksSize - 1; i++) ++bi;
833 modified = true;
834 // Restart instrumenting at beginning of last new block,
835 // but skip over any new phi or copy instruction.
836 ii = bi->begin();
837 if (ii->opcode() == SpvOpPhi || ii->opcode() == SpvOpCopyObject) ++ii;
838 new_blks.clear();
839 }
840 }
841 return modified;
842}
843
844bool InstrumentPass::InstProcessCallTreeFromRoots(InstProcessFunction& pfn,
845 std::queue<uint32_t>* roots,
846 uint32_t stage_idx) {
847 bool modified = false;
848 std::unordered_set<uint32_t> done;
Ben Claytonb73b7602019-07-29 13:56:13 +0100849 // Don't process input and output functions
850 for (auto& ifn : param2input_func_id_) done.insert(ifn.second);
851 if (output_func_id_ != 0) done.insert(output_func_id_);
Chris Forbescc5697f2019-01-30 11:54:08 -0800852 // Process all functions from roots
853 while (!roots->empty()) {
854 const uint32_t fi = roots->front();
855 roots->pop();
856 if (done.insert(fi).second) {
857 Function* fn = id2function_.at(fi);
858 // Add calls first so we don't add new output function
859 context()->AddCalls(fn, roots);
860 modified = InstrumentFunction(fn, stage_idx, pfn) || modified;
861 }
862 }
863 return modified;
864}
865
866bool InstrumentPass::InstProcessEntryPointCallTree(InstProcessFunction& pfn) {
Ben Clayton0b54f132020-01-06 13:38:54 +0000867 // Check that format version 2 requested
868 if (version_ != 2u) {
869 if (consumer()) {
870 std::string message = "Unsupported instrumentation format requested";
871 consumer()(SPV_MSG_ERROR, 0, {0, 0, 0}, message.c_str());
872 }
873 return false;
874 }
Chris Forbescc5697f2019-01-30 11:54:08 -0800875 // Make sure all entry points have the same execution model. Do not
876 // instrument if they do not.
877 // TODO(greg-lunarg): Handle mixed stages. Technically, a shader module
878 // can contain entry points with different execution models, although
879 // such modules will likely be rare as GLSL and HLSL are geared toward
880 // one model per module. In such cases we will need
881 // to clone any functions which are in the call trees of entrypoints
882 // with differing execution models.
883 uint32_t ecnt = 0;
884 uint32_t stage = SpvExecutionModelMax;
885 for (auto& e : get_module()->entry_points()) {
886 if (ecnt == 0)
887 stage = e.GetSingleWordInOperand(kEntryPointExecutionModelInIdx);
Ben Clayton0b54f132020-01-06 13:38:54 +0000888 else if (e.GetSingleWordInOperand(kEntryPointExecutionModelInIdx) !=
889 stage) {
890 if (consumer()) {
891 std::string message = "Mixed stage shader module not supported";
892 consumer()(SPV_MSG_ERROR, 0, {0, 0, 0}, message.c_str());
893 }
Chris Forbescc5697f2019-01-30 11:54:08 -0800894 return false;
Ben Clayton0b54f132020-01-06 13:38:54 +0000895 }
Chris Forbescc5697f2019-01-30 11:54:08 -0800896 ++ecnt;
897 }
Ben Clayton0b54f132020-01-06 13:38:54 +0000898 // Check for supported stages
Chris Forbescc5697f2019-01-30 11:54:08 -0800899 if (stage != SpvExecutionModelVertex && stage != SpvExecutionModelFragment &&
900 stage != SpvExecutionModelGeometry &&
901 stage != SpvExecutionModelGLCompute &&
902 stage != SpvExecutionModelTessellationControl &&
Ben Claytonb73b7602019-07-29 13:56:13 +0100903 stage != SpvExecutionModelTessellationEvaluation &&
904 stage != SpvExecutionModelRayGenerationNV &&
905 stage != SpvExecutionModelIntersectionNV &&
906 stage != SpvExecutionModelAnyHitNV &&
907 stage != SpvExecutionModelClosestHitNV &&
Ben Clayton0b54f132020-01-06 13:38:54 +0000908 stage != SpvExecutionModelMissNV &&
909 stage != SpvExecutionModelCallableNV) {
910 if (consumer()) {
911 std::string message = "Stage not supported by instrumentation";
912 consumer()(SPV_MSG_ERROR, 0, {0, 0, 0}, message.c_str());
913 }
Chris Forbescc5697f2019-01-30 11:54:08 -0800914 return false;
Ben Clayton0b54f132020-01-06 13:38:54 +0000915 }
Chris Forbescc5697f2019-01-30 11:54:08 -0800916 // Add together the roots of all entry points
917 std::queue<uint32_t> roots;
918 for (auto& e : get_module()->entry_points()) {
919 roots.push(e.GetSingleWordInOperand(kEntryPointFunctionIdInIdx));
920 }
921 bool modified = InstProcessCallTreeFromRoots(pfn, &roots, stage);
922 return modified;
923}
924
925void InstrumentPass::InitializeInstrument() {
926 output_buffer_id_ = 0;
Ben Claytond0f684e2019-08-30 22:36:08 +0100927 output_buffer_ptr_id_ = 0;
928 input_buffer_ptr_id_ = 0;
Chris Forbescc5697f2019-01-30 11:54:08 -0800929 output_func_id_ = 0;
930 output_func_param_cnt_ = 0;
Ben Claytonb73b7602019-07-29 13:56:13 +0100931 input_buffer_id_ = 0;
Chris Forbescc5697f2019-01-30 11:54:08 -0800932 v4float_id_ = 0;
933 uint_id_ = 0;
Ben Claytond0f684e2019-08-30 22:36:08 +0100934 uint64_id_ = 0;
Chris Forbescc5697f2019-01-30 11:54:08 -0800935 v4uint_id_ = 0;
Ben Claytond0f684e2019-08-30 22:36:08 +0100936 v3uint_id_ = 0;
Chris Forbescc5697f2019-01-30 11:54:08 -0800937 bool_id_ = 0;
938 void_id_ = 0;
Ben Claytonb73b7602019-07-29 13:56:13 +0100939 storage_buffer_ext_defined_ = false;
Ben Claytond0f684e2019-08-30 22:36:08 +0100940 uint32_rarr_ty_ = nullptr;
941 uint64_rarr_ty_ = nullptr;
Chris Forbescc5697f2019-01-30 11:54:08 -0800942
943 // clear collections
944 id2function_.clear();
945 id2block_.clear();
946
947 // Initialize function and block maps.
948 for (auto& fn : *get_module()) {
949 id2function_[fn.result_id()] = &fn;
950 for (auto& blk : fn) {
951 id2block_[blk.id()] = &blk;
952 }
953 }
954
Ben Claytonb73b7602019-07-29 13:56:13 +0100955 // Remember original instruction offsets
956 uint32_t module_offset = 0;
Chris Forbescc5697f2019-01-30 11:54:08 -0800957 Module* module = get_module();
958 for (auto& i : context()->capabilities()) {
959 (void)i;
Ben Claytonb73b7602019-07-29 13:56:13 +0100960 ++module_offset;
Chris Forbescc5697f2019-01-30 11:54:08 -0800961 }
962 for (auto& i : module->extensions()) {
963 (void)i;
Ben Claytonb73b7602019-07-29 13:56:13 +0100964 ++module_offset;
Chris Forbescc5697f2019-01-30 11:54:08 -0800965 }
966 for (auto& i : module->ext_inst_imports()) {
967 (void)i;
Ben Claytonb73b7602019-07-29 13:56:13 +0100968 ++module_offset;
Chris Forbescc5697f2019-01-30 11:54:08 -0800969 }
Ben Claytonb73b7602019-07-29 13:56:13 +0100970 ++module_offset; // memory_model
Chris Forbescc5697f2019-01-30 11:54:08 -0800971 for (auto& i : module->entry_points()) {
972 (void)i;
Ben Claytonb73b7602019-07-29 13:56:13 +0100973 ++module_offset;
Chris Forbescc5697f2019-01-30 11:54:08 -0800974 }
975 for (auto& i : module->execution_modes()) {
976 (void)i;
Ben Claytonb73b7602019-07-29 13:56:13 +0100977 ++module_offset;
Chris Forbescc5697f2019-01-30 11:54:08 -0800978 }
979 for (auto& i : module->debugs1()) {
980 (void)i;
Ben Claytonb73b7602019-07-29 13:56:13 +0100981 ++module_offset;
Chris Forbescc5697f2019-01-30 11:54:08 -0800982 }
983 for (auto& i : module->debugs2()) {
984 (void)i;
Ben Claytonb73b7602019-07-29 13:56:13 +0100985 ++module_offset;
Chris Forbescc5697f2019-01-30 11:54:08 -0800986 }
987 for (auto& i : module->debugs3()) {
988 (void)i;
Ben Claytonb73b7602019-07-29 13:56:13 +0100989 ++module_offset;
Chris Forbescc5697f2019-01-30 11:54:08 -0800990 }
Ben Claytondc6b76a2020-02-24 14:53:40 +0000991 for (auto& i : module->ext_inst_debuginfo()) {
992 (void)i;
993 ++module_offset;
994 }
Chris Forbescc5697f2019-01-30 11:54:08 -0800995 for (auto& i : module->annotations()) {
996 (void)i;
Ben Claytonb73b7602019-07-29 13:56:13 +0100997 ++module_offset;
Chris Forbescc5697f2019-01-30 11:54:08 -0800998 }
999 for (auto& i : module->types_values()) {
Ben Claytonb73b7602019-07-29 13:56:13 +01001000 module_offset += 1;
1001 module_offset += static_cast<uint32_t>(i.dbg_line_insts().size());
Chris Forbescc5697f2019-01-30 11:54:08 -08001002 }
Chris Forbescc5697f2019-01-30 11:54:08 -08001003
Ben Claytonb73b7602019-07-29 13:56:13 +01001004 auto curr_fn = get_module()->begin();
1005 for (; curr_fn != get_module()->end(); ++curr_fn) {
1006 // Count function instruction
1007 module_offset += 1;
1008 curr_fn->ForEachParam(
1009 [&module_offset](const Instruction*) { module_offset += 1; }, true);
1010 for (auto& blk : *curr_fn) {
Chris Forbescc5697f2019-01-30 11:54:08 -08001011 // Count label
Ben Claytonb73b7602019-07-29 13:56:13 +01001012 module_offset += 1;
Chris Forbescc5697f2019-01-30 11:54:08 -08001013 for (auto& inst : blk) {
Ben Claytonb73b7602019-07-29 13:56:13 +01001014 module_offset += static_cast<uint32_t>(inst.dbg_line_insts().size());
1015 uid2offset_[inst.unique_id()] = module_offset;
1016 module_offset += 1;
Chris Forbescc5697f2019-01-30 11:54:08 -08001017 }
1018 }
Ben Claytonb73b7602019-07-29 13:56:13 +01001019 // Count function end instruction
1020 module_offset += 1;
Chris Forbescc5697f2019-01-30 11:54:08 -08001021 }
1022}
1023
1024} // namespace opt
1025} // namespace spvtools