Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 1 | // Copyright (c) 2018 The Khronos Group Inc. |
| 2 | // Copyright (c) 2018 Valve Corporation |
| 3 | // Copyright (c) 2018 LunarG Inc. |
| 4 | // |
| 5 | // Licensed under the Apache License, Version 2.0 (the "License"); |
| 6 | // you may not use this file except in compliance with the License. |
| 7 | // You may obtain a copy of the License at |
| 8 | // |
| 9 | // http://www.apache.org/licenses/LICENSE-2.0 |
| 10 | // |
| 11 | // Unless required by applicable law or agreed to in writing, software |
| 12 | // distributed under the License is distributed on an "AS IS" BASIS, |
| 13 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 14 | // See the License for the specific language governing permissions and |
| 15 | // limitations under the License. |
| 16 | |
| 17 | #include "instrument_pass.h" |
| 18 | |
| 19 | #include "source/cfa.h" |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 20 | #include "source/spirv_constant.h" |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 21 | |
| 22 | namespace { |
| 23 | |
| 24 | // Common Parameter Positions |
| 25 | static const int kInstCommonParamInstIdx = 0; |
| 26 | static const int kInstCommonParamCnt = 1; |
| 27 | |
| 28 | // Indices of operands in SPIR-V instructions |
| 29 | static const int kEntryPointExecutionModelInIdx = 0; |
| 30 | static const int kEntryPointFunctionIdInIdx = 1; |
| 31 | |
| 32 | } // anonymous namespace |
| 33 | |
| 34 | namespace spvtools { |
| 35 | namespace opt { |
| 36 | |
| 37 | void InstrumentPass::MovePreludeCode( |
| 38 | BasicBlock::iterator ref_inst_itr, |
| 39 | UptrVectorIterator<BasicBlock> ref_block_itr, |
| 40 | std::unique_ptr<BasicBlock>* new_blk_ptr) { |
| 41 | same_block_pre_.clear(); |
| 42 | same_block_post_.clear(); |
| 43 | // Initialize new block. Reuse label from original block. |
| 44 | new_blk_ptr->reset(new BasicBlock(std::move(ref_block_itr->GetLabel()))); |
| 45 | // Move contents of original ref block up to ref instruction. |
| 46 | for (auto cii = ref_block_itr->begin(); cii != ref_inst_itr; |
| 47 | cii = ref_block_itr->begin()) { |
| 48 | Instruction* inst = &*cii; |
| 49 | inst->RemoveFromList(); |
| 50 | std::unique_ptr<Instruction> mv_ptr(inst); |
| 51 | // Remember same-block ops for possible regeneration. |
| 52 | if (IsSameBlockOp(&*mv_ptr)) { |
| 53 | auto* sb_inst_ptr = mv_ptr.get(); |
| 54 | same_block_pre_[mv_ptr->result_id()] = sb_inst_ptr; |
| 55 | } |
| 56 | (*new_blk_ptr)->AddInstruction(std::move(mv_ptr)); |
| 57 | } |
| 58 | } |
| 59 | |
| 60 | void InstrumentPass::MovePostludeCode( |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 61 | UptrVectorIterator<BasicBlock> ref_block_itr, BasicBlock* new_blk_ptr) { |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 62 | // new_blk_ptr->reset(new BasicBlock(NewLabel(ref_block_itr->id()))); |
| 63 | // Move contents of original ref block. |
| 64 | for (auto cii = ref_block_itr->begin(); cii != ref_block_itr->end(); |
| 65 | cii = ref_block_itr->begin()) { |
| 66 | Instruction* inst = &*cii; |
| 67 | inst->RemoveFromList(); |
| 68 | std::unique_ptr<Instruction> mv_inst(inst); |
| 69 | // Regenerate any same-block instruction that has not been seen in the |
| 70 | // current block. |
| 71 | if (same_block_pre_.size() > 0) { |
| 72 | CloneSameBlockOps(&mv_inst, &same_block_post_, &same_block_pre_, |
| 73 | new_blk_ptr); |
| 74 | // Remember same-block ops in this block. |
| 75 | if (IsSameBlockOp(&*mv_inst)) { |
| 76 | const uint32_t rid = mv_inst->result_id(); |
| 77 | same_block_post_[rid] = rid; |
| 78 | } |
| 79 | } |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 80 | new_blk_ptr->AddInstruction(std::move(mv_inst)); |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 81 | } |
| 82 | } |
| 83 | |
| 84 | std::unique_ptr<Instruction> InstrumentPass::NewLabel(uint32_t label_id) { |
| 85 | std::unique_ptr<Instruction> newLabel( |
| 86 | new Instruction(context(), SpvOpLabel, 0, label_id, {})); |
| 87 | get_def_use_mgr()->AnalyzeInstDefUse(&*newLabel); |
| 88 | return newLabel; |
| 89 | } |
| 90 | |
Alexis Hetu | 3eb4dd8 | 2020-10-29 21:37:20 -0400 | [diff] [blame] | 91 | uint32_t InstrumentPass::Gen32BitCvtCode(uint32_t val_id, |
| 92 | InstructionBuilder* builder) { |
| 93 | // Convert integer value to 32-bit if necessary |
| 94 | analysis::TypeManager* type_mgr = context()->get_type_mgr(); |
| 95 | uint32_t val_ty_id = get_def_use_mgr()->GetDef(val_id)->type_id(); |
| 96 | analysis::Integer* val_ty = type_mgr->GetType(val_ty_id)->AsInteger(); |
| 97 | if (val_ty->width() == 32) return val_id; |
| 98 | bool is_signed = val_ty->IsSigned(); |
| 99 | analysis::Integer val_32b_ty(32, is_signed); |
| 100 | analysis::Type* val_32b_reg_ty = type_mgr->GetRegisteredType(&val_32b_ty); |
| 101 | uint32_t val_32b_reg_ty_id = type_mgr->GetId(val_32b_reg_ty); |
| 102 | if (is_signed) |
| 103 | return builder->AddUnaryOp(val_32b_reg_ty_id, SpvOpSConvert, val_id) |
| 104 | ->result_id(); |
| 105 | else |
| 106 | return builder->AddUnaryOp(val_32b_reg_ty_id, SpvOpUConvert, val_id) |
| 107 | ->result_id(); |
| 108 | } |
| 109 | |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 110 | uint32_t InstrumentPass::GenUintCastCode(uint32_t val_id, |
| 111 | InstructionBuilder* builder) { |
Alexis Hetu | 3eb4dd8 | 2020-10-29 21:37:20 -0400 | [diff] [blame] | 112 | // Convert value to 32-bit if necessary |
| 113 | uint32_t val_32b_id = Gen32BitCvtCode(val_id, builder); |
| 114 | // Cast value to unsigned if necessary |
| 115 | analysis::TypeManager* type_mgr = context()->get_type_mgr(); |
| 116 | uint32_t val_ty_id = get_def_use_mgr()->GetDef(val_32b_id)->type_id(); |
| 117 | analysis::Integer* val_ty = type_mgr->GetType(val_ty_id)->AsInteger(); |
| 118 | if (!val_ty->IsSigned()) return val_32b_id; |
| 119 | return builder->AddUnaryOp(GetUintId(), SpvOpBitcast, val_32b_id) |
| 120 | ->result_id(); |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 121 | } |
| 122 | |
| 123 | void InstrumentPass::GenDebugOutputFieldCode(uint32_t base_offset_id, |
| 124 | uint32_t field_offset, |
| 125 | uint32_t field_value_id, |
| 126 | InstructionBuilder* builder) { |
| 127 | // Cast value to 32-bit unsigned if necessary |
| 128 | uint32_t val_id = GenUintCastCode(field_value_id, builder); |
| 129 | // Store value |
| 130 | Instruction* data_idx_inst = |
| 131 | builder->AddBinaryOp(GetUintId(), SpvOpIAdd, base_offset_id, |
| 132 | builder->GetUintConstantId(field_offset)); |
| 133 | uint32_t buf_id = GetOutputBufferId(); |
Ben Clayton | d0f684e | 2019-08-30 22:36:08 +0100 | [diff] [blame] | 134 | uint32_t buf_uint_ptr_id = GetOutputBufferPtrId(); |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 135 | Instruction* achain_inst = |
| 136 | builder->AddTernaryOp(buf_uint_ptr_id, SpvOpAccessChain, buf_id, |
| 137 | builder->GetUintConstantId(kDebugOutputDataOffset), |
| 138 | data_idx_inst->result_id()); |
| 139 | (void)builder->AddBinaryOp(0, SpvOpStore, achain_inst->result_id(), val_id); |
| 140 | } |
| 141 | |
| 142 | void InstrumentPass::GenCommonStreamWriteCode(uint32_t record_sz, |
| 143 | uint32_t inst_id, |
| 144 | uint32_t stage_idx, |
| 145 | uint32_t base_offset_id, |
| 146 | InstructionBuilder* builder) { |
| 147 | // Store record size |
| 148 | GenDebugOutputFieldCode(base_offset_id, kInstCommonOutSize, |
| 149 | builder->GetUintConstantId(record_sz), builder); |
| 150 | // Store Shader Id |
| 151 | GenDebugOutputFieldCode(base_offset_id, kInstCommonOutShaderId, |
| 152 | builder->GetUintConstantId(shader_id_), builder); |
| 153 | // Store Instruction Idx |
| 154 | GenDebugOutputFieldCode(base_offset_id, kInstCommonOutInstructionIdx, inst_id, |
| 155 | builder); |
| 156 | // Store Stage Idx |
| 157 | GenDebugOutputFieldCode(base_offset_id, kInstCommonOutStageIdx, |
| 158 | builder->GetUintConstantId(stage_idx), builder); |
| 159 | } |
| 160 | |
| 161 | void InstrumentPass::GenFragCoordEltDebugOutputCode( |
| 162 | uint32_t base_offset_id, uint32_t uint_frag_coord_id, uint32_t element, |
| 163 | InstructionBuilder* builder) { |
| 164 | Instruction* element_val_inst = builder->AddIdLiteralOp( |
| 165 | GetUintId(), SpvOpCompositeExtract, uint_frag_coord_id, element); |
| 166 | GenDebugOutputFieldCode(base_offset_id, kInstFragOutFragCoordX + element, |
| 167 | element_val_inst->result_id(), builder); |
| 168 | } |
| 169 | |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 170 | uint32_t InstrumentPass::GenVarLoad(uint32_t var_id, |
| 171 | InstructionBuilder* builder) { |
| 172 | Instruction* var_inst = get_def_use_mgr()->GetDef(var_id); |
| 173 | uint32_t type_id = GetPointeeTypeId(var_inst); |
| 174 | Instruction* load_inst = builder->AddUnaryOp(type_id, SpvOpLoad, var_id); |
| 175 | return load_inst->result_id(); |
| 176 | } |
| 177 | |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 178 | void InstrumentPass::GenBuiltinOutputCode(uint32_t builtin_id, |
| 179 | uint32_t builtin_off, |
| 180 | uint32_t base_offset_id, |
| 181 | InstructionBuilder* builder) { |
| 182 | // Load and store builtin |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 183 | uint32_t load_id = GenVarLoad(builtin_id, builder); |
| 184 | GenDebugOutputFieldCode(base_offset_id, builtin_off, load_id, builder); |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 185 | } |
| 186 | |
| 187 | void InstrumentPass::GenStageStreamWriteCode(uint32_t stage_idx, |
| 188 | uint32_t base_offset_id, |
| 189 | InstructionBuilder* builder) { |
| 190 | // TODO(greg-lunarg): Add support for all stages |
| 191 | switch (stage_idx) { |
| 192 | case SpvExecutionModelVertex: { |
| 193 | // Load and store VertexId and InstanceId |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 194 | GenBuiltinOutputCode( |
| 195 | context()->GetBuiltinInputVarId(SpvBuiltInVertexIndex), |
| 196 | kInstVertOutVertexIndex, base_offset_id, builder); |
| 197 | GenBuiltinOutputCode( |
| 198 | context()->GetBuiltinInputVarId(SpvBuiltInInstanceIndex), |
| 199 | kInstVertOutInstanceIndex, base_offset_id, builder); |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 200 | } break; |
Alexis Hetu | c00ee6c | 2020-07-27 10:48:25 -0400 | [diff] [blame] | 201 | case SpvExecutionModelGLCompute: |
| 202 | case SpvExecutionModelTaskNV: |
| 203 | case SpvExecutionModelMeshNV: { |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 204 | // Load and store GlobalInvocationId. |
| 205 | uint32_t load_id = GenVarLoad( |
| 206 | context()->GetBuiltinInputVarId(SpvBuiltInGlobalInvocationId), |
| 207 | builder); |
| 208 | Instruction* x_inst = builder->AddIdLiteralOp( |
| 209 | GetUintId(), SpvOpCompositeExtract, load_id, 0); |
| 210 | Instruction* y_inst = builder->AddIdLiteralOp( |
| 211 | GetUintId(), SpvOpCompositeExtract, load_id, 1); |
| 212 | Instruction* z_inst = builder->AddIdLiteralOp( |
| 213 | GetUintId(), SpvOpCompositeExtract, load_id, 2); |
Ben Clayton | 0b54f13 | 2020-01-06 13:38:54 +0000 | [diff] [blame] | 214 | GenDebugOutputFieldCode(base_offset_id, kInstCompOutGlobalInvocationIdX, |
| 215 | x_inst->result_id(), builder); |
| 216 | GenDebugOutputFieldCode(base_offset_id, kInstCompOutGlobalInvocationIdY, |
| 217 | y_inst->result_id(), builder); |
| 218 | GenDebugOutputFieldCode(base_offset_id, kInstCompOutGlobalInvocationIdZ, |
| 219 | z_inst->result_id(), builder); |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 220 | } break; |
| 221 | case SpvExecutionModelGeometry: { |
| 222 | // Load and store PrimitiveId and InvocationId. |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 223 | GenBuiltinOutputCode( |
| 224 | context()->GetBuiltinInputVarId(SpvBuiltInPrimitiveId), |
| 225 | kInstGeomOutPrimitiveId, base_offset_id, builder); |
| 226 | GenBuiltinOutputCode( |
| 227 | context()->GetBuiltinInputVarId(SpvBuiltInInvocationId), |
| 228 | kInstGeomOutInvocationId, base_offset_id, builder); |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 229 | } break; |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 230 | case SpvExecutionModelTessellationControl: { |
| 231 | // Load and store InvocationId and PrimitiveId |
| 232 | GenBuiltinOutputCode( |
| 233 | context()->GetBuiltinInputVarId(SpvBuiltInInvocationId), |
| 234 | kInstTessCtlOutInvocationId, base_offset_id, builder); |
| 235 | GenBuiltinOutputCode( |
| 236 | context()->GetBuiltinInputVarId(SpvBuiltInPrimitiveId), |
| 237 | kInstTessCtlOutPrimitiveId, base_offset_id, builder); |
| 238 | } break; |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 239 | case SpvExecutionModelTessellationEvaluation: { |
Ben Clayton | 0b54f13 | 2020-01-06 13:38:54 +0000 | [diff] [blame] | 240 | // Load and store PrimitiveId and TessCoord.uv |
| 241 | GenBuiltinOutputCode( |
| 242 | context()->GetBuiltinInputVarId(SpvBuiltInPrimitiveId), |
| 243 | kInstTessEvalOutPrimitiveId, base_offset_id, builder); |
| 244 | uint32_t load_id = GenVarLoad( |
| 245 | context()->GetBuiltinInputVarId(SpvBuiltInTessCoord), builder); |
| 246 | Instruction* uvec3_cast_inst = |
| 247 | builder->AddUnaryOp(GetVec3UintId(), SpvOpBitcast, load_id); |
| 248 | uint32_t uvec3_cast_id = uvec3_cast_inst->result_id(); |
| 249 | Instruction* u_inst = builder->AddIdLiteralOp( |
| 250 | GetUintId(), SpvOpCompositeExtract, uvec3_cast_id, 0); |
| 251 | Instruction* v_inst = builder->AddIdLiteralOp( |
| 252 | GetUintId(), SpvOpCompositeExtract, uvec3_cast_id, 1); |
| 253 | GenDebugOutputFieldCode(base_offset_id, kInstTessEvalOutTessCoordU, |
| 254 | u_inst->result_id(), builder); |
| 255 | GenDebugOutputFieldCode(base_offset_id, kInstTessEvalOutTessCoordV, |
| 256 | v_inst->result_id(), builder); |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 257 | } break; |
| 258 | case SpvExecutionModelFragment: { |
| 259 | // Load FragCoord and convert to Uint |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 260 | Instruction* frag_coord_inst = builder->AddUnaryOp( |
| 261 | GetVec4FloatId(), SpvOpLoad, |
| 262 | context()->GetBuiltinInputVarId(SpvBuiltInFragCoord)); |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 263 | Instruction* uint_frag_coord_inst = builder->AddUnaryOp( |
| 264 | GetVec4UintId(), SpvOpBitcast, frag_coord_inst->result_id()); |
| 265 | for (uint32_t u = 0; u < 2u; ++u) |
| 266 | GenFragCoordEltDebugOutputCode( |
| 267 | base_offset_id, uint_frag_coord_inst->result_id(), u, builder); |
| 268 | } break; |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 269 | case SpvExecutionModelRayGenerationNV: |
| 270 | case SpvExecutionModelIntersectionNV: |
| 271 | case SpvExecutionModelAnyHitNV: |
| 272 | case SpvExecutionModelClosestHitNV: |
| 273 | case SpvExecutionModelMissNV: |
| 274 | case SpvExecutionModelCallableNV: { |
| 275 | // Load and store LaunchIdNV. |
| 276 | uint32_t launch_id = GenVarLoad( |
| 277 | context()->GetBuiltinInputVarId(SpvBuiltInLaunchIdNV), builder); |
| 278 | Instruction* x_launch_inst = builder->AddIdLiteralOp( |
| 279 | GetUintId(), SpvOpCompositeExtract, launch_id, 0); |
| 280 | Instruction* y_launch_inst = builder->AddIdLiteralOp( |
| 281 | GetUintId(), SpvOpCompositeExtract, launch_id, 1); |
| 282 | Instruction* z_launch_inst = builder->AddIdLiteralOp( |
| 283 | GetUintId(), SpvOpCompositeExtract, launch_id, 2); |
| 284 | GenDebugOutputFieldCode(base_offset_id, kInstRayTracingOutLaunchIdX, |
| 285 | x_launch_inst->result_id(), builder); |
| 286 | GenDebugOutputFieldCode(base_offset_id, kInstRayTracingOutLaunchIdY, |
| 287 | y_launch_inst->result_id(), builder); |
| 288 | GenDebugOutputFieldCode(base_offset_id, kInstRayTracingOutLaunchIdZ, |
| 289 | z_launch_inst->result_id(), builder); |
| 290 | } break; |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 291 | default: { assert(false && "unsupported stage"); } break; |
| 292 | } |
| 293 | } |
| 294 | |
| 295 | void InstrumentPass::GenDebugStreamWrite( |
| 296 | uint32_t instruction_idx, uint32_t stage_idx, |
| 297 | const std::vector<uint32_t>& validation_ids, InstructionBuilder* builder) { |
| 298 | // Call debug output function. Pass func_idx, instruction_idx and |
| 299 | // validation ids as args. |
| 300 | uint32_t val_id_cnt = static_cast<uint32_t>(validation_ids.size()); |
| 301 | uint32_t output_func_id = GetStreamWriteFunctionId(stage_idx, val_id_cnt); |
| 302 | std::vector<uint32_t> args = {output_func_id, |
| 303 | builder->GetUintConstantId(instruction_idx)}; |
| 304 | (void)args.insert(args.end(), validation_ids.begin(), validation_ids.end()); |
| 305 | (void)builder->AddNaryOp(GetVoidId(), SpvOpFunctionCall, args); |
| 306 | } |
| 307 | |
Alexis Hetu | 3eb4dd8 | 2020-10-29 21:37:20 -0400 | [diff] [blame] | 308 | bool InstrumentPass::AllConstant(const std::vector<uint32_t>& ids) { |
| 309 | for (auto& id : ids) { |
| 310 | Instruction* id_inst = context()->get_def_use_mgr()->GetDef(id); |
| 311 | if (!spvOpcodeIsConstant(id_inst->opcode())) return false; |
| 312 | } |
| 313 | return true; |
| 314 | } |
| 315 | |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 316 | uint32_t InstrumentPass::GenDebugDirectRead( |
Alexis Hetu | 3eb4dd8 | 2020-10-29 21:37:20 -0400 | [diff] [blame] | 317 | const std::vector<uint32_t>& offset_ids, InstructionBuilder* ref_builder) { |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 318 | // Call debug input function. Pass func_idx and offset ids as args. |
| 319 | uint32_t off_id_cnt = static_cast<uint32_t>(offset_ids.size()); |
| 320 | uint32_t input_func_id = GetDirectReadFunctionId(off_id_cnt); |
| 321 | std::vector<uint32_t> args = {input_func_id}; |
| 322 | (void)args.insert(args.end(), offset_ids.begin(), offset_ids.end()); |
Alexis Hetu | 3eb4dd8 | 2020-10-29 21:37:20 -0400 | [diff] [blame] | 323 | // If optimizing direct reads and the call has already been generated, |
| 324 | // use its result |
| 325 | if (opt_direct_reads_) { |
| 326 | uint32_t res_id = call2id_[args]; |
| 327 | if (res_id != 0) return res_id; |
| 328 | } |
| 329 | // If the offsets are all constants, the call can be moved to the first block |
| 330 | // of the function where its result can be reused. One example where this is |
| 331 | // profitable is for uniform buffer references, of which there are often many. |
| 332 | InstructionBuilder builder(ref_builder->GetContext(), |
| 333 | &*ref_builder->GetInsertPoint(), |
| 334 | ref_builder->GetPreservedAnalysis()); |
| 335 | bool insert_in_first_block = opt_direct_reads_ && AllConstant(offset_ids); |
| 336 | if (insert_in_first_block) { |
| 337 | Instruction* insert_before = &*curr_func_->begin()->tail(); |
| 338 | builder.SetInsertPoint(insert_before); |
| 339 | } |
| 340 | uint32_t res_id = |
| 341 | builder.AddNaryOp(GetUintId(), SpvOpFunctionCall, args)->result_id(); |
| 342 | if (insert_in_first_block) call2id_[args] = res_id; |
| 343 | return res_id; |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 344 | } |
| 345 | |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 346 | bool InstrumentPass::IsSameBlockOp(const Instruction* inst) const { |
| 347 | return inst->opcode() == SpvOpSampledImage || inst->opcode() == SpvOpImage; |
| 348 | } |
| 349 | |
| 350 | void InstrumentPass::CloneSameBlockOps( |
| 351 | std::unique_ptr<Instruction>* inst, |
| 352 | std::unordered_map<uint32_t, uint32_t>* same_blk_post, |
| 353 | std::unordered_map<uint32_t, Instruction*>* same_blk_pre, |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 354 | BasicBlock* block_ptr) { |
Ben Clayton | d552f63 | 2019-11-18 11:18:41 +0000 | [diff] [blame] | 355 | bool changed = false; |
| 356 | (*inst)->ForEachInId([&same_blk_post, &same_blk_pre, &block_ptr, &changed, |
| 357 | this](uint32_t* iid) { |
| 358 | const auto map_itr = (*same_blk_post).find(*iid); |
| 359 | if (map_itr == (*same_blk_post).end()) { |
| 360 | const auto map_itr2 = (*same_blk_pre).find(*iid); |
| 361 | if (map_itr2 != (*same_blk_pre).end()) { |
| 362 | // Clone pre-call same-block ops, map result id. |
| 363 | const Instruction* in_inst = map_itr2->second; |
| 364 | std::unique_ptr<Instruction> sb_inst(in_inst->Clone(context())); |
| 365 | const uint32_t rid = sb_inst->result_id(); |
| 366 | const uint32_t nid = this->TakeNextId(); |
| 367 | get_decoration_mgr()->CloneDecorations(rid, nid); |
| 368 | sb_inst->SetResultId(nid); |
| 369 | get_def_use_mgr()->AnalyzeInstDefUse(&*sb_inst); |
| 370 | (*same_blk_post)[rid] = nid; |
| 371 | *iid = nid; |
| 372 | changed = true; |
| 373 | CloneSameBlockOps(&sb_inst, same_blk_post, same_blk_pre, block_ptr); |
| 374 | block_ptr->AddInstruction(std::move(sb_inst)); |
| 375 | } |
| 376 | } else { |
| 377 | // Reset same-block op operand if necessary |
| 378 | if (*iid != map_itr->second) { |
| 379 | *iid = map_itr->second; |
| 380 | changed = true; |
| 381 | } |
| 382 | } |
| 383 | }); |
| 384 | if (changed) get_def_use_mgr()->AnalyzeInstUse(&**inst); |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 385 | } |
| 386 | |
| 387 | void InstrumentPass::UpdateSucceedingPhis( |
| 388 | std::vector<std::unique_ptr<BasicBlock>>& new_blocks) { |
| 389 | const auto first_blk = new_blocks.begin(); |
| 390 | const auto last_blk = new_blocks.end() - 1; |
| 391 | const uint32_t first_id = (*first_blk)->id(); |
| 392 | const uint32_t last_id = (*last_blk)->id(); |
| 393 | const BasicBlock& const_last_block = *last_blk->get(); |
| 394 | const_last_block.ForEachSuccessorLabel( |
| 395 | [&first_id, &last_id, this](const uint32_t succ) { |
| 396 | BasicBlock* sbp = this->id2block_[succ]; |
| 397 | sbp->ForEachPhiInst([&first_id, &last_id, this](Instruction* phi) { |
| 398 | bool changed = false; |
| 399 | phi->ForEachInId([&first_id, &last_id, &changed](uint32_t* id) { |
| 400 | if (*id == first_id) { |
| 401 | *id = last_id; |
| 402 | changed = true; |
| 403 | } |
| 404 | }); |
| 405 | if (changed) get_def_use_mgr()->AnalyzeInstUse(phi); |
| 406 | }); |
| 407 | }); |
| 408 | } |
| 409 | |
Ben Clayton | d0f684e | 2019-08-30 22:36:08 +0100 | [diff] [blame] | 410 | uint32_t InstrumentPass::GetOutputBufferPtrId() { |
| 411 | if (output_buffer_ptr_id_ == 0) { |
| 412 | output_buffer_ptr_id_ = context()->get_type_mgr()->FindPointerToType( |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 413 | GetUintId(), SpvStorageClassStorageBuffer); |
| 414 | } |
Ben Clayton | d0f684e | 2019-08-30 22:36:08 +0100 | [diff] [blame] | 415 | return output_buffer_ptr_id_; |
| 416 | } |
| 417 | |
| 418 | uint32_t InstrumentPass::GetInputBufferTypeId() { |
| 419 | return (validation_id_ == kInstValidationIdBuffAddr) ? GetUint64Id() |
| 420 | : GetUintId(); |
| 421 | } |
| 422 | |
| 423 | uint32_t InstrumentPass::GetInputBufferPtrId() { |
| 424 | if (input_buffer_ptr_id_ == 0) { |
| 425 | input_buffer_ptr_id_ = context()->get_type_mgr()->FindPointerToType( |
| 426 | GetInputBufferTypeId(), SpvStorageClassStorageBuffer); |
| 427 | } |
| 428 | return input_buffer_ptr_id_; |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 429 | } |
| 430 | |
| 431 | uint32_t InstrumentPass::GetOutputBufferBinding() { |
| 432 | switch (validation_id_) { |
| 433 | case kInstValidationIdBindless: |
| 434 | return kDebugOutputBindingStream; |
Ben Clayton | d0f684e | 2019-08-30 22:36:08 +0100 | [diff] [blame] | 435 | case kInstValidationIdBuffAddr: |
| 436 | return kDebugOutputBindingStream; |
Alexis Hetu | b8a7746 | 2020-03-27 07:59:09 -0400 | [diff] [blame] | 437 | case kInstValidationIdDebugPrintf: |
| 438 | return kDebugOutputPrintfStream; |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 439 | default: |
| 440 | assert(false && "unexpected validation id"); |
| 441 | } |
| 442 | return 0; |
| 443 | } |
| 444 | |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 445 | uint32_t InstrumentPass::GetInputBufferBinding() { |
| 446 | switch (validation_id_) { |
| 447 | case kInstValidationIdBindless: |
| 448 | return kDebugInputBindingBindless; |
Ben Clayton | d0f684e | 2019-08-30 22:36:08 +0100 | [diff] [blame] | 449 | case kInstValidationIdBuffAddr: |
| 450 | return kDebugInputBindingBuffAddr; |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 451 | default: |
| 452 | assert(false && "unexpected validation id"); |
| 453 | } |
| 454 | return 0; |
| 455 | } |
| 456 | |
Ben Clayton | d0f684e | 2019-08-30 22:36:08 +0100 | [diff] [blame] | 457 | analysis::Type* InstrumentPass::GetUintXRuntimeArrayType( |
| 458 | uint32_t width, analysis::Type** rarr_ty) { |
| 459 | if (*rarr_ty == nullptr) { |
| 460 | analysis::DecorationManager* deco_mgr = get_decoration_mgr(); |
| 461 | analysis::TypeManager* type_mgr = context()->get_type_mgr(); |
| 462 | analysis::Integer uint_ty(width, false); |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 463 | analysis::Type* reg_uint_ty = type_mgr->GetRegisteredType(&uint_ty); |
| 464 | analysis::RuntimeArray uint_rarr_ty_tmp(reg_uint_ty); |
Ben Clayton | d0f684e | 2019-08-30 22:36:08 +0100 | [diff] [blame] | 465 | *rarr_ty = type_mgr->GetRegisteredType(&uint_rarr_ty_tmp); |
| 466 | uint32_t uint_arr_ty_id = type_mgr->GetTypeInstruction(*rarr_ty); |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 467 | // By the Vulkan spec, a pre-existing RuntimeArray of uint must be part of |
| 468 | // a block, and will therefore be decorated with an ArrayStride. Therefore |
| 469 | // the undecorated type returned here will not be pre-existing and can |
| 470 | // safely be decorated. Since this type is now decorated, it is out of |
| 471 | // sync with the TypeManager and therefore the TypeManager must be |
| 472 | // invalidated after this pass. |
| 473 | assert(context()->get_def_use_mgr()->NumUses(uint_arr_ty_id) == 0 && |
| 474 | "used RuntimeArray type returned"); |
Ben Clayton | d0f684e | 2019-08-30 22:36:08 +0100 | [diff] [blame] | 475 | deco_mgr->AddDecorationVal(uint_arr_ty_id, SpvDecorationArrayStride, |
| 476 | width / 8u); |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 477 | } |
Ben Clayton | d0f684e | 2019-08-30 22:36:08 +0100 | [diff] [blame] | 478 | return *rarr_ty; |
| 479 | } |
| 480 | |
| 481 | analysis::Type* InstrumentPass::GetUintRuntimeArrayType(uint32_t width) { |
| 482 | analysis::Type** rarr_ty = |
| 483 | (width == 64) ? &uint64_rarr_ty_ : &uint32_rarr_ty_; |
| 484 | return GetUintXRuntimeArrayType(width, rarr_ty); |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 485 | } |
| 486 | |
| 487 | void InstrumentPass::AddStorageBufferExt() { |
| 488 | if (storage_buffer_ext_defined_) return; |
| 489 | if (!get_feature_mgr()->HasExtension(kSPV_KHR_storage_buffer_storage_class)) { |
Ben Clayton | d0f684e | 2019-08-30 22:36:08 +0100 | [diff] [blame] | 490 | context()->AddExtension("SPV_KHR_storage_buffer_storage_class"); |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 491 | } |
| 492 | storage_buffer_ext_defined_ = true; |
| 493 | } |
| 494 | |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 495 | // Return id for output buffer |
| 496 | uint32_t InstrumentPass::GetOutputBufferId() { |
| 497 | if (output_buffer_id_ == 0) { |
| 498 | // If not created yet, create one |
| 499 | analysis::DecorationManager* deco_mgr = get_decoration_mgr(); |
| 500 | analysis::TypeManager* type_mgr = context()->get_type_mgr(); |
Ben Clayton | d0f684e | 2019-08-30 22:36:08 +0100 | [diff] [blame] | 501 | analysis::Type* reg_uint_rarr_ty = GetUintRuntimeArrayType(32); |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 502 | analysis::Integer uint_ty(32, false); |
| 503 | analysis::Type* reg_uint_ty = type_mgr->GetRegisteredType(&uint_ty); |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 504 | analysis::Struct buf_ty({reg_uint_ty, reg_uint_rarr_ty}); |
| 505 | analysis::Type* reg_buf_ty = type_mgr->GetRegisteredType(&buf_ty); |
| 506 | uint32_t obufTyId = type_mgr->GetTypeInstruction(reg_buf_ty); |
| 507 | // By the Vulkan spec, a pre-existing struct containing a RuntimeArray |
| 508 | // must be a block, and will therefore be decorated with Block. Therefore |
| 509 | // the undecorated type returned here will not be pre-existing and can |
| 510 | // safely be decorated. Since this type is now decorated, it is out of |
| 511 | // sync with the TypeManager and therefore the TypeManager must be |
| 512 | // invalidated after this pass. |
| 513 | assert(context()->get_def_use_mgr()->NumUses(obufTyId) == 0 && |
| 514 | "used struct type returned"); |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 515 | deco_mgr->AddDecoration(obufTyId, SpvDecorationBlock); |
| 516 | deco_mgr->AddMemberDecoration(obufTyId, kDebugOutputSizeOffset, |
| 517 | SpvDecorationOffset, 0); |
| 518 | deco_mgr->AddMemberDecoration(obufTyId, kDebugOutputDataOffset, |
| 519 | SpvDecorationOffset, 4); |
| 520 | uint32_t obufTyPtrId_ = |
| 521 | type_mgr->FindPointerToType(obufTyId, SpvStorageClassStorageBuffer); |
| 522 | output_buffer_id_ = TakeNextId(); |
| 523 | std::unique_ptr<Instruction> newVarOp(new Instruction( |
| 524 | context(), SpvOpVariable, obufTyPtrId_, output_buffer_id_, |
| 525 | {{spv_operand_type_t::SPV_OPERAND_TYPE_LITERAL_INTEGER, |
| 526 | {SpvStorageClassStorageBuffer}}})); |
| 527 | context()->AddGlobalValue(std::move(newVarOp)); |
| 528 | deco_mgr->AddDecorationVal(output_buffer_id_, SpvDecorationDescriptorSet, |
| 529 | desc_set_); |
| 530 | deco_mgr->AddDecorationVal(output_buffer_id_, SpvDecorationBinding, |
| 531 | GetOutputBufferBinding()); |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 532 | AddStorageBufferExt(); |
| 533 | if (get_module()->version() >= SPV_SPIRV_VERSION_WORD(1, 4)) { |
| 534 | // Add the new buffer to all entry points. |
| 535 | for (auto& entry : get_module()->entry_points()) { |
| 536 | entry.AddOperand({SPV_OPERAND_TYPE_ID, {output_buffer_id_}}); |
| 537 | context()->AnalyzeUses(&entry); |
| 538 | } |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 539 | } |
| 540 | } |
| 541 | return output_buffer_id_; |
| 542 | } |
| 543 | |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 544 | uint32_t InstrumentPass::GetInputBufferId() { |
| 545 | if (input_buffer_id_ == 0) { |
| 546 | // If not created yet, create one |
| 547 | analysis::DecorationManager* deco_mgr = get_decoration_mgr(); |
| 548 | analysis::TypeManager* type_mgr = context()->get_type_mgr(); |
Ben Clayton | d0f684e | 2019-08-30 22:36:08 +0100 | [diff] [blame] | 549 | uint32_t width = (validation_id_ == kInstValidationIdBuffAddr) ? 64u : 32u; |
| 550 | analysis::Type* reg_uint_rarr_ty = GetUintRuntimeArrayType(width); |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 551 | analysis::Struct buf_ty({reg_uint_rarr_ty}); |
| 552 | analysis::Type* reg_buf_ty = type_mgr->GetRegisteredType(&buf_ty); |
| 553 | uint32_t ibufTyId = type_mgr->GetTypeInstruction(reg_buf_ty); |
| 554 | // By the Vulkan spec, a pre-existing struct containing a RuntimeArray |
| 555 | // must be a block, and will therefore be decorated with Block. Therefore |
| 556 | // the undecorated type returned here will not be pre-existing and can |
| 557 | // safely be decorated. Since this type is now decorated, it is out of |
| 558 | // sync with the TypeManager and therefore the TypeManager must be |
| 559 | // invalidated after this pass. |
| 560 | assert(context()->get_def_use_mgr()->NumUses(ibufTyId) == 0 && |
| 561 | "used struct type returned"); |
| 562 | deco_mgr->AddDecoration(ibufTyId, SpvDecorationBlock); |
| 563 | deco_mgr->AddMemberDecoration(ibufTyId, 0, SpvDecorationOffset, 0); |
| 564 | uint32_t ibufTyPtrId_ = |
| 565 | type_mgr->FindPointerToType(ibufTyId, SpvStorageClassStorageBuffer); |
| 566 | input_buffer_id_ = TakeNextId(); |
| 567 | std::unique_ptr<Instruction> newVarOp(new Instruction( |
| 568 | context(), SpvOpVariable, ibufTyPtrId_, input_buffer_id_, |
| 569 | {{spv_operand_type_t::SPV_OPERAND_TYPE_LITERAL_INTEGER, |
| 570 | {SpvStorageClassStorageBuffer}}})); |
| 571 | context()->AddGlobalValue(std::move(newVarOp)); |
| 572 | deco_mgr->AddDecorationVal(input_buffer_id_, SpvDecorationDescriptorSet, |
| 573 | desc_set_); |
| 574 | deco_mgr->AddDecorationVal(input_buffer_id_, SpvDecorationBinding, |
| 575 | GetInputBufferBinding()); |
| 576 | AddStorageBufferExt(); |
| 577 | if (get_module()->version() >= SPV_SPIRV_VERSION_WORD(1, 4)) { |
| 578 | // Add the new buffer to all entry points. |
| 579 | for (auto& entry : get_module()->entry_points()) { |
| 580 | entry.AddOperand({SPV_OPERAND_TYPE_ID, {input_buffer_id_}}); |
| 581 | context()->AnalyzeUses(&entry); |
| 582 | } |
| 583 | } |
| 584 | } |
| 585 | return input_buffer_id_; |
| 586 | } |
| 587 | |
Alexis Hetu | b8a7746 | 2020-03-27 07:59:09 -0400 | [diff] [blame] | 588 | uint32_t InstrumentPass::GetFloatId() { |
| 589 | if (float_id_ == 0) { |
| 590 | analysis::TypeManager* type_mgr = context()->get_type_mgr(); |
| 591 | analysis::Float float_ty(32); |
| 592 | analysis::Type* reg_float_ty = type_mgr->GetRegisteredType(&float_ty); |
| 593 | float_id_ = type_mgr->GetTypeInstruction(reg_float_ty); |
| 594 | } |
| 595 | return float_id_; |
| 596 | } |
| 597 | |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 598 | uint32_t InstrumentPass::GetVec4FloatId() { |
| 599 | if (v4float_id_ == 0) { |
| 600 | analysis::TypeManager* type_mgr = context()->get_type_mgr(); |
| 601 | analysis::Float float_ty(32); |
| 602 | analysis::Type* reg_float_ty = type_mgr->GetRegisteredType(&float_ty); |
| 603 | analysis::Vector v4float_ty(reg_float_ty, 4); |
| 604 | analysis::Type* reg_v4float_ty = type_mgr->GetRegisteredType(&v4float_ty); |
| 605 | v4float_id_ = type_mgr->GetTypeInstruction(reg_v4float_ty); |
| 606 | } |
| 607 | return v4float_id_; |
| 608 | } |
| 609 | |
| 610 | uint32_t InstrumentPass::GetUintId() { |
| 611 | if (uint_id_ == 0) { |
| 612 | analysis::TypeManager* type_mgr = context()->get_type_mgr(); |
| 613 | analysis::Integer uint_ty(32, false); |
| 614 | analysis::Type* reg_uint_ty = type_mgr->GetRegisteredType(&uint_ty); |
| 615 | uint_id_ = type_mgr->GetTypeInstruction(reg_uint_ty); |
| 616 | } |
| 617 | return uint_id_; |
| 618 | } |
| 619 | |
Ben Clayton | d0f684e | 2019-08-30 22:36:08 +0100 | [diff] [blame] | 620 | uint32_t InstrumentPass::GetUint64Id() { |
| 621 | if (uint64_id_ == 0) { |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 622 | analysis::TypeManager* type_mgr = context()->get_type_mgr(); |
Ben Clayton | d0f684e | 2019-08-30 22:36:08 +0100 | [diff] [blame] | 623 | analysis::Integer uint64_ty(64, false); |
| 624 | analysis::Type* reg_uint64_ty = type_mgr->GetRegisteredType(&uint64_ty); |
| 625 | uint64_id_ = type_mgr->GetTypeInstruction(reg_uint64_ty); |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 626 | } |
Ben Clayton | d0f684e | 2019-08-30 22:36:08 +0100 | [diff] [blame] | 627 | return uint64_id_; |
| 628 | } |
| 629 | |
Alexis Hetu | b8a7746 | 2020-03-27 07:59:09 -0400 | [diff] [blame] | 630 | uint32_t InstrumentPass::GetUint8Id() { |
| 631 | if (uint8_id_ == 0) { |
| 632 | analysis::TypeManager* type_mgr = context()->get_type_mgr(); |
| 633 | analysis::Integer uint8_ty(8, false); |
| 634 | analysis::Type* reg_uint8_ty = type_mgr->GetRegisteredType(&uint8_ty); |
| 635 | uint8_id_ = type_mgr->GetTypeInstruction(reg_uint8_ty); |
| 636 | } |
| 637 | return uint8_id_; |
| 638 | } |
| 639 | |
Ben Clayton | d0f684e | 2019-08-30 22:36:08 +0100 | [diff] [blame] | 640 | uint32_t InstrumentPass::GetVecUintId(uint32_t len) { |
| 641 | analysis::TypeManager* type_mgr = context()->get_type_mgr(); |
| 642 | analysis::Integer uint_ty(32, false); |
| 643 | analysis::Type* reg_uint_ty = type_mgr->GetRegisteredType(&uint_ty); |
| 644 | analysis::Vector v_uint_ty(reg_uint_ty, len); |
| 645 | analysis::Type* reg_v_uint_ty = type_mgr->GetRegisteredType(&v_uint_ty); |
| 646 | uint32_t v_uint_id = type_mgr->GetTypeInstruction(reg_v_uint_ty); |
| 647 | return v_uint_id; |
| 648 | } |
| 649 | |
| 650 | uint32_t InstrumentPass::GetVec4UintId() { |
| 651 | if (v4uint_id_ == 0) v4uint_id_ = GetVecUintId(4u); |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 652 | return v4uint_id_; |
| 653 | } |
| 654 | |
Ben Clayton | d0f684e | 2019-08-30 22:36:08 +0100 | [diff] [blame] | 655 | uint32_t InstrumentPass::GetVec3UintId() { |
| 656 | if (v3uint_id_ == 0) v3uint_id_ = GetVecUintId(3u); |
| 657 | return v3uint_id_; |
| 658 | } |
| 659 | |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 660 | uint32_t InstrumentPass::GetBoolId() { |
| 661 | if (bool_id_ == 0) { |
| 662 | analysis::TypeManager* type_mgr = context()->get_type_mgr(); |
| 663 | analysis::Bool bool_ty; |
| 664 | analysis::Type* reg_bool_ty = type_mgr->GetRegisteredType(&bool_ty); |
| 665 | bool_id_ = type_mgr->GetTypeInstruction(reg_bool_ty); |
| 666 | } |
| 667 | return bool_id_; |
| 668 | } |
| 669 | |
| 670 | uint32_t InstrumentPass::GetVoidId() { |
| 671 | if (void_id_ == 0) { |
| 672 | analysis::TypeManager* type_mgr = context()->get_type_mgr(); |
| 673 | analysis::Void void_ty; |
| 674 | analysis::Type* reg_void_ty = type_mgr->GetRegisteredType(&void_ty); |
| 675 | void_id_ = type_mgr->GetTypeInstruction(reg_void_ty); |
| 676 | } |
| 677 | return void_id_; |
| 678 | } |
| 679 | |
| 680 | uint32_t InstrumentPass::GetStreamWriteFunctionId(uint32_t stage_idx, |
| 681 | uint32_t val_spec_param_cnt) { |
| 682 | // Total param count is common params plus validation-specific |
| 683 | // params |
| 684 | uint32_t param_cnt = kInstCommonParamCnt + val_spec_param_cnt; |
Alexis Hetu | b8a7746 | 2020-03-27 07:59:09 -0400 | [diff] [blame] | 685 | if (param2output_func_id_[param_cnt] == 0) { |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 686 | // Create function |
Alexis Hetu | b8a7746 | 2020-03-27 07:59:09 -0400 | [diff] [blame] | 687 | param2output_func_id_[param_cnt] = TakeNextId(); |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 688 | analysis::TypeManager* type_mgr = context()->get_type_mgr(); |
| 689 | std::vector<const analysis::Type*> param_types; |
| 690 | for (uint32_t c = 0; c < param_cnt; ++c) |
| 691 | param_types.push_back(type_mgr->GetType(GetUintId())); |
| 692 | analysis::Function func_ty(type_mgr->GetType(GetVoidId()), param_types); |
| 693 | analysis::Type* reg_func_ty = type_mgr->GetRegisteredType(&func_ty); |
Alexis Hetu | b8a7746 | 2020-03-27 07:59:09 -0400 | [diff] [blame] | 694 | std::unique_ptr<Instruction> func_inst( |
| 695 | new Instruction(get_module()->context(), SpvOpFunction, GetVoidId(), |
| 696 | param2output_func_id_[param_cnt], |
| 697 | {{spv_operand_type_t::SPV_OPERAND_TYPE_LITERAL_INTEGER, |
| 698 | {SpvFunctionControlMaskNone}}, |
| 699 | {spv_operand_type_t::SPV_OPERAND_TYPE_ID, |
| 700 | {type_mgr->GetTypeInstruction(reg_func_ty)}}})); |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 701 | get_def_use_mgr()->AnalyzeInstDefUse(&*func_inst); |
| 702 | std::unique_ptr<Function> output_func = |
| 703 | MakeUnique<Function>(std::move(func_inst)); |
| 704 | // Add parameters |
| 705 | std::vector<uint32_t> param_vec; |
| 706 | for (uint32_t c = 0; c < param_cnt; ++c) { |
| 707 | uint32_t pid = TakeNextId(); |
| 708 | param_vec.push_back(pid); |
| 709 | std::unique_ptr<Instruction> param_inst( |
| 710 | new Instruction(get_module()->context(), SpvOpFunctionParameter, |
| 711 | GetUintId(), pid, {})); |
| 712 | get_def_use_mgr()->AnalyzeInstDefUse(&*param_inst); |
| 713 | output_func->AddParameter(std::move(param_inst)); |
| 714 | } |
| 715 | // Create first block |
| 716 | uint32_t test_blk_id = TakeNextId(); |
| 717 | std::unique_ptr<Instruction> test_label(NewLabel(test_blk_id)); |
| 718 | std::unique_ptr<BasicBlock> new_blk_ptr = |
| 719 | MakeUnique<BasicBlock>(std::move(test_label)); |
| 720 | InstructionBuilder builder( |
| 721 | context(), &*new_blk_ptr, |
| 722 | IRContext::kAnalysisDefUse | IRContext::kAnalysisInstrToBlockMapping); |
| 723 | // Gen test if debug output buffer size will not be exceeded. |
Ben Clayton | 0b54f13 | 2020-01-06 13:38:54 +0000 | [diff] [blame] | 724 | uint32_t val_spec_offset = kInstStageOutCnt; |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 725 | uint32_t obuf_record_sz = val_spec_offset + val_spec_param_cnt; |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 726 | uint32_t buf_id = GetOutputBufferId(); |
Ben Clayton | d0f684e | 2019-08-30 22:36:08 +0100 | [diff] [blame] | 727 | uint32_t buf_uint_ptr_id = GetOutputBufferPtrId(); |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 728 | Instruction* obuf_curr_sz_ac_inst = |
| 729 | builder.AddBinaryOp(buf_uint_ptr_id, SpvOpAccessChain, buf_id, |
| 730 | builder.GetUintConstantId(kDebugOutputSizeOffset)); |
| 731 | // Fetch the current debug buffer written size atomically, adding the |
| 732 | // size of the record to be written. |
| 733 | uint32_t obuf_record_sz_id = builder.GetUintConstantId(obuf_record_sz); |
| 734 | uint32_t mask_none_id = builder.GetUintConstantId(SpvMemoryAccessMaskNone); |
| 735 | uint32_t scope_invok_id = builder.GetUintConstantId(SpvScopeInvocation); |
| 736 | Instruction* obuf_curr_sz_inst = builder.AddQuadOp( |
| 737 | GetUintId(), SpvOpAtomicIAdd, obuf_curr_sz_ac_inst->result_id(), |
| 738 | scope_invok_id, mask_none_id, obuf_record_sz_id); |
| 739 | uint32_t obuf_curr_sz_id = obuf_curr_sz_inst->result_id(); |
| 740 | // Compute new written size |
| 741 | Instruction* obuf_new_sz_inst = |
| 742 | builder.AddBinaryOp(GetUintId(), SpvOpIAdd, obuf_curr_sz_id, |
| 743 | builder.GetUintConstantId(obuf_record_sz)); |
| 744 | // Fetch the data bound |
| 745 | Instruction* obuf_bnd_inst = |
| 746 | builder.AddIdLiteralOp(GetUintId(), SpvOpArrayLength, |
| 747 | GetOutputBufferId(), kDebugOutputDataOffset); |
| 748 | // Test that new written size is less than or equal to debug output |
| 749 | // data bound |
| 750 | Instruction* obuf_safe_inst = builder.AddBinaryOp( |
| 751 | GetBoolId(), SpvOpULessThanEqual, obuf_new_sz_inst->result_id(), |
| 752 | obuf_bnd_inst->result_id()); |
| 753 | uint32_t merge_blk_id = TakeNextId(); |
| 754 | uint32_t write_blk_id = TakeNextId(); |
| 755 | std::unique_ptr<Instruction> merge_label(NewLabel(merge_blk_id)); |
| 756 | std::unique_ptr<Instruction> write_label(NewLabel(write_blk_id)); |
| 757 | (void)builder.AddConditionalBranch(obuf_safe_inst->result_id(), |
| 758 | write_blk_id, merge_blk_id, merge_blk_id, |
| 759 | SpvSelectionControlMaskNone); |
| 760 | // Close safety test block and gen write block |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 761 | output_func->AddBasicBlock(std::move(new_blk_ptr)); |
| 762 | new_blk_ptr = MakeUnique<BasicBlock>(std::move(write_label)); |
| 763 | builder.SetInsertPoint(&*new_blk_ptr); |
| 764 | // Generate common and stage-specific debug record members |
| 765 | GenCommonStreamWriteCode(obuf_record_sz, param_vec[kInstCommonParamInstIdx], |
| 766 | stage_idx, obuf_curr_sz_id, &builder); |
| 767 | GenStageStreamWriteCode(stage_idx, obuf_curr_sz_id, &builder); |
| 768 | // Gen writes of validation specific data |
| 769 | for (uint32_t i = 0; i < val_spec_param_cnt; ++i) { |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 770 | GenDebugOutputFieldCode(obuf_curr_sz_id, val_spec_offset + i, |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 771 | param_vec[kInstCommonParamCnt + i], &builder); |
| 772 | } |
| 773 | // Close write block and gen merge block |
| 774 | (void)builder.AddBranch(merge_blk_id); |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 775 | output_func->AddBasicBlock(std::move(new_blk_ptr)); |
| 776 | new_blk_ptr = MakeUnique<BasicBlock>(std::move(merge_label)); |
| 777 | builder.SetInsertPoint(&*new_blk_ptr); |
| 778 | // Close merge block and function and add function to module |
| 779 | (void)builder.AddNullaryOp(0, SpvOpReturn); |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 780 | output_func->AddBasicBlock(std::move(new_blk_ptr)); |
| 781 | std::unique_ptr<Instruction> func_end_inst( |
| 782 | new Instruction(get_module()->context(), SpvOpFunctionEnd, 0, 0, {})); |
| 783 | get_def_use_mgr()->AnalyzeInstDefUse(&*func_end_inst); |
| 784 | output_func->SetFunctionEnd(std::move(func_end_inst)); |
| 785 | context()->AddFunction(std::move(output_func)); |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 786 | } |
Alexis Hetu | b8a7746 | 2020-03-27 07:59:09 -0400 | [diff] [blame] | 787 | return param2output_func_id_[param_cnt]; |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 788 | } |
| 789 | |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 790 | uint32_t InstrumentPass::GetDirectReadFunctionId(uint32_t param_cnt) { |
| 791 | uint32_t func_id = param2input_func_id_[param_cnt]; |
| 792 | if (func_id != 0) return func_id; |
Ben Clayton | d0f684e | 2019-08-30 22:36:08 +0100 | [diff] [blame] | 793 | // Create input function for param_cnt. |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 794 | func_id = TakeNextId(); |
| 795 | analysis::TypeManager* type_mgr = context()->get_type_mgr(); |
| 796 | std::vector<const analysis::Type*> param_types; |
| 797 | for (uint32_t c = 0; c < param_cnt; ++c) |
| 798 | param_types.push_back(type_mgr->GetType(GetUintId())); |
Ben Clayton | d0f684e | 2019-08-30 22:36:08 +0100 | [diff] [blame] | 799 | uint32_t ibuf_type_id = GetInputBufferTypeId(); |
| 800 | analysis::Function func_ty(type_mgr->GetType(ibuf_type_id), param_types); |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 801 | analysis::Type* reg_func_ty = type_mgr->GetRegisteredType(&func_ty); |
| 802 | std::unique_ptr<Instruction> func_inst(new Instruction( |
Ben Clayton | d0f684e | 2019-08-30 22:36:08 +0100 | [diff] [blame] | 803 | get_module()->context(), SpvOpFunction, ibuf_type_id, func_id, |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 804 | {{spv_operand_type_t::SPV_OPERAND_TYPE_LITERAL_INTEGER, |
| 805 | {SpvFunctionControlMaskNone}}, |
| 806 | {spv_operand_type_t::SPV_OPERAND_TYPE_ID, |
| 807 | {type_mgr->GetTypeInstruction(reg_func_ty)}}})); |
| 808 | get_def_use_mgr()->AnalyzeInstDefUse(&*func_inst); |
| 809 | std::unique_ptr<Function> input_func = |
| 810 | MakeUnique<Function>(std::move(func_inst)); |
| 811 | // Add parameters |
| 812 | std::vector<uint32_t> param_vec; |
| 813 | for (uint32_t c = 0; c < param_cnt; ++c) { |
| 814 | uint32_t pid = TakeNextId(); |
| 815 | param_vec.push_back(pid); |
| 816 | std::unique_ptr<Instruction> param_inst(new Instruction( |
| 817 | get_module()->context(), SpvOpFunctionParameter, GetUintId(), pid, {})); |
| 818 | get_def_use_mgr()->AnalyzeInstDefUse(&*param_inst); |
| 819 | input_func->AddParameter(std::move(param_inst)); |
| 820 | } |
| 821 | // Create block |
| 822 | uint32_t blk_id = TakeNextId(); |
| 823 | std::unique_ptr<Instruction> blk_label(NewLabel(blk_id)); |
| 824 | std::unique_ptr<BasicBlock> new_blk_ptr = |
| 825 | MakeUnique<BasicBlock>(std::move(blk_label)); |
| 826 | InstructionBuilder builder( |
| 827 | context(), &*new_blk_ptr, |
| 828 | IRContext::kAnalysisDefUse | IRContext::kAnalysisInstrToBlockMapping); |
| 829 | // For each offset parameter, generate new offset with parameter, adding last |
| 830 | // loaded value if it exists, and load value from input buffer at new offset. |
| 831 | // Return last loaded value. |
| 832 | uint32_t buf_id = GetInputBufferId(); |
Ben Clayton | d0f684e | 2019-08-30 22:36:08 +0100 | [diff] [blame] | 833 | uint32_t buf_ptr_id = GetInputBufferPtrId(); |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 834 | uint32_t last_value_id = 0; |
| 835 | for (uint32_t p = 0; p < param_cnt; ++p) { |
| 836 | uint32_t offset_id; |
| 837 | if (p == 0) { |
| 838 | offset_id = param_vec[0]; |
| 839 | } else { |
Ben Clayton | d0f684e | 2019-08-30 22:36:08 +0100 | [diff] [blame] | 840 | if (ibuf_type_id != GetUintId()) { |
| 841 | Instruction* ucvt_inst = |
| 842 | builder.AddUnaryOp(GetUintId(), SpvOpUConvert, last_value_id); |
| 843 | last_value_id = ucvt_inst->result_id(); |
| 844 | } |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 845 | Instruction* offset_inst = builder.AddBinaryOp( |
| 846 | GetUintId(), SpvOpIAdd, last_value_id, param_vec[p]); |
| 847 | offset_id = offset_inst->result_id(); |
| 848 | } |
| 849 | Instruction* ac_inst = builder.AddTernaryOp( |
Ben Clayton | d0f684e | 2019-08-30 22:36:08 +0100 | [diff] [blame] | 850 | buf_ptr_id, SpvOpAccessChain, buf_id, |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 851 | builder.GetUintConstantId(kDebugInputDataOffset), offset_id); |
| 852 | Instruction* load_inst = |
Ben Clayton | d0f684e | 2019-08-30 22:36:08 +0100 | [diff] [blame] | 853 | builder.AddUnaryOp(ibuf_type_id, SpvOpLoad, ac_inst->result_id()); |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 854 | last_value_id = load_inst->result_id(); |
| 855 | } |
| 856 | (void)builder.AddInstruction(MakeUnique<Instruction>( |
| 857 | context(), SpvOpReturnValue, 0, 0, |
| 858 | std::initializer_list<Operand>{{SPV_OPERAND_TYPE_ID, {last_value_id}}})); |
| 859 | // Close block and function and add function to module |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 860 | input_func->AddBasicBlock(std::move(new_blk_ptr)); |
| 861 | std::unique_ptr<Instruction> func_end_inst( |
| 862 | new Instruction(get_module()->context(), SpvOpFunctionEnd, 0, 0, {})); |
| 863 | get_def_use_mgr()->AnalyzeInstDefUse(&*func_end_inst); |
| 864 | input_func->SetFunctionEnd(std::move(func_end_inst)); |
| 865 | context()->AddFunction(std::move(input_func)); |
| 866 | param2input_func_id_[param_cnt] = func_id; |
| 867 | return func_id; |
| 868 | } |
| 869 | |
Alexis Hetu | 3eb4dd8 | 2020-10-29 21:37:20 -0400 | [diff] [blame] | 870 | void InstrumentPass::SplitBlock( |
| 871 | BasicBlock::iterator inst_itr, UptrVectorIterator<BasicBlock> block_itr, |
| 872 | std::vector<std::unique_ptr<BasicBlock>>* new_blocks) { |
| 873 | // Make sure def/use analysis is done before we start moving instructions |
| 874 | // out of function |
| 875 | (void)get_def_use_mgr(); |
| 876 | // Move original block's preceding instructions into first new block |
| 877 | std::unique_ptr<BasicBlock> first_blk_ptr; |
| 878 | MovePreludeCode(inst_itr, block_itr, &first_blk_ptr); |
| 879 | InstructionBuilder builder( |
| 880 | context(), &*first_blk_ptr, |
| 881 | IRContext::kAnalysisDefUse | IRContext::kAnalysisInstrToBlockMapping); |
| 882 | uint32_t split_blk_id = TakeNextId(); |
| 883 | std::unique_ptr<Instruction> split_label(NewLabel(split_blk_id)); |
| 884 | (void)builder.AddBranch(split_blk_id); |
| 885 | new_blocks->push_back(std::move(first_blk_ptr)); |
| 886 | // Move remaining instructions into split block and add to new blocks |
| 887 | std::unique_ptr<BasicBlock> split_blk_ptr( |
| 888 | new BasicBlock(std::move(split_label))); |
| 889 | MovePostludeCode(block_itr, &*split_blk_ptr); |
| 890 | new_blocks->push_back(std::move(split_blk_ptr)); |
| 891 | } |
| 892 | |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 893 | bool InstrumentPass::InstrumentFunction(Function* func, uint32_t stage_idx, |
| 894 | InstProcessFunction& pfn) { |
Alexis Hetu | 3eb4dd8 | 2020-10-29 21:37:20 -0400 | [diff] [blame] | 895 | curr_func_ = func; |
| 896 | call2id_.clear(); |
| 897 | bool first_block_split = false; |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 898 | bool modified = false; |
Alexis Hetu | 3eb4dd8 | 2020-10-29 21:37:20 -0400 | [diff] [blame] | 899 | // Apply instrumentation function to each instruction. |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 900 | // Using block iterators here because of block erasures and insertions. |
Alexis Hetu | 3eb4dd8 | 2020-10-29 21:37:20 -0400 | [diff] [blame] | 901 | std::vector<std::unique_ptr<BasicBlock>> new_blks; |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 902 | for (auto bi = func->begin(); bi != func->end(); ++bi) { |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 903 | for (auto ii = bi->begin(); ii != bi->end();) { |
Alexis Hetu | 3eb4dd8 | 2020-10-29 21:37:20 -0400 | [diff] [blame] | 904 | // Split all executable instructions out of first block into a following |
| 905 | // block. This will allow function calls to be inserted into the first |
| 906 | // block without interfering with the instrumentation algorithm. |
| 907 | if (opt_direct_reads_ && !first_block_split) { |
| 908 | if (ii->opcode() != SpvOpVariable) { |
| 909 | SplitBlock(ii, bi, &new_blks); |
| 910 | first_block_split = true; |
| 911 | } |
| 912 | } else { |
| 913 | pfn(ii, bi, stage_idx, &new_blks); |
| 914 | } |
| 915 | // If no new code, continue |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 916 | if (new_blks.size() == 0) { |
| 917 | ++ii; |
| 918 | continue; |
| 919 | } |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 920 | // Add new blocks to label id map |
| 921 | for (auto& blk : new_blks) id2block_[blk->id()] = &*blk; |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 922 | // If there are new blocks we know there will always be two or |
| 923 | // more, so update succeeding phis with label of new last block. |
| 924 | size_t newBlocksSize = new_blks.size(); |
| 925 | assert(newBlocksSize > 1); |
| 926 | UpdateSucceedingPhis(new_blks); |
| 927 | // Replace original block with new block(s) |
| 928 | bi = bi.Erase(); |
| 929 | for (auto& bb : new_blks) { |
| 930 | bb->SetParent(func); |
| 931 | } |
| 932 | bi = bi.InsertBefore(&new_blks); |
| 933 | // Reset block iterator to last new block |
| 934 | for (size_t i = 0; i < newBlocksSize - 1; i++) ++bi; |
| 935 | modified = true; |
| 936 | // Restart instrumenting at beginning of last new block, |
| 937 | // but skip over any new phi or copy instruction. |
| 938 | ii = bi->begin(); |
| 939 | if (ii->opcode() == SpvOpPhi || ii->opcode() == SpvOpCopyObject) ++ii; |
| 940 | new_blks.clear(); |
| 941 | } |
| 942 | } |
| 943 | return modified; |
| 944 | } |
| 945 | |
| 946 | bool InstrumentPass::InstProcessCallTreeFromRoots(InstProcessFunction& pfn, |
| 947 | std::queue<uint32_t>* roots, |
| 948 | uint32_t stage_idx) { |
| 949 | bool modified = false; |
| 950 | std::unordered_set<uint32_t> done; |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 951 | // Don't process input and output functions |
| 952 | for (auto& ifn : param2input_func_id_) done.insert(ifn.second); |
Alexis Hetu | b8a7746 | 2020-03-27 07:59:09 -0400 | [diff] [blame] | 953 | for (auto& ofn : param2output_func_id_) done.insert(ofn.second); |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 954 | // Process all functions from roots |
| 955 | while (!roots->empty()) { |
| 956 | const uint32_t fi = roots->front(); |
| 957 | roots->pop(); |
| 958 | if (done.insert(fi).second) { |
| 959 | Function* fn = id2function_.at(fi); |
| 960 | // Add calls first so we don't add new output function |
| 961 | context()->AddCalls(fn, roots); |
| 962 | modified = InstrumentFunction(fn, stage_idx, pfn) || modified; |
| 963 | } |
| 964 | } |
| 965 | return modified; |
| 966 | } |
| 967 | |
| 968 | bool InstrumentPass::InstProcessEntryPointCallTree(InstProcessFunction& pfn) { |
| 969 | // Make sure all entry points have the same execution model. Do not |
| 970 | // instrument if they do not. |
| 971 | // TODO(greg-lunarg): Handle mixed stages. Technically, a shader module |
| 972 | // can contain entry points with different execution models, although |
| 973 | // such modules will likely be rare as GLSL and HLSL are geared toward |
| 974 | // one model per module. In such cases we will need |
| 975 | // to clone any functions which are in the call trees of entrypoints |
| 976 | // with differing execution models. |
| 977 | uint32_t ecnt = 0; |
| 978 | uint32_t stage = SpvExecutionModelMax; |
| 979 | for (auto& e : get_module()->entry_points()) { |
| 980 | if (ecnt == 0) |
| 981 | stage = e.GetSingleWordInOperand(kEntryPointExecutionModelInIdx); |
Ben Clayton | 0b54f13 | 2020-01-06 13:38:54 +0000 | [diff] [blame] | 982 | else if (e.GetSingleWordInOperand(kEntryPointExecutionModelInIdx) != |
| 983 | stage) { |
| 984 | if (consumer()) { |
| 985 | std::string message = "Mixed stage shader module not supported"; |
| 986 | consumer()(SPV_MSG_ERROR, 0, {0, 0, 0}, message.c_str()); |
| 987 | } |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 988 | return false; |
Ben Clayton | 0b54f13 | 2020-01-06 13:38:54 +0000 | [diff] [blame] | 989 | } |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 990 | ++ecnt; |
| 991 | } |
Ben Clayton | 0b54f13 | 2020-01-06 13:38:54 +0000 | [diff] [blame] | 992 | // Check for supported stages |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 993 | if (stage != SpvExecutionModelVertex && stage != SpvExecutionModelFragment && |
| 994 | stage != SpvExecutionModelGeometry && |
| 995 | stage != SpvExecutionModelGLCompute && |
| 996 | stage != SpvExecutionModelTessellationControl && |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 997 | stage != SpvExecutionModelTessellationEvaluation && |
Alexis Hetu | c00ee6c | 2020-07-27 10:48:25 -0400 | [diff] [blame] | 998 | stage != SpvExecutionModelTaskNV && stage != SpvExecutionModelMeshNV && |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 999 | stage != SpvExecutionModelRayGenerationNV && |
| 1000 | stage != SpvExecutionModelIntersectionNV && |
| 1001 | stage != SpvExecutionModelAnyHitNV && |
| 1002 | stage != SpvExecutionModelClosestHitNV && |
Ben Clayton | 0b54f13 | 2020-01-06 13:38:54 +0000 | [diff] [blame] | 1003 | stage != SpvExecutionModelMissNV && |
| 1004 | stage != SpvExecutionModelCallableNV) { |
| 1005 | if (consumer()) { |
| 1006 | std::string message = "Stage not supported by instrumentation"; |
| 1007 | consumer()(SPV_MSG_ERROR, 0, {0, 0, 0}, message.c_str()); |
| 1008 | } |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 1009 | return false; |
Ben Clayton | 0b54f13 | 2020-01-06 13:38:54 +0000 | [diff] [blame] | 1010 | } |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 1011 | // Add together the roots of all entry points |
| 1012 | std::queue<uint32_t> roots; |
| 1013 | for (auto& e : get_module()->entry_points()) { |
| 1014 | roots.push(e.GetSingleWordInOperand(kEntryPointFunctionIdInIdx)); |
| 1015 | } |
| 1016 | bool modified = InstProcessCallTreeFromRoots(pfn, &roots, stage); |
| 1017 | return modified; |
| 1018 | } |
| 1019 | |
| 1020 | void InstrumentPass::InitializeInstrument() { |
| 1021 | output_buffer_id_ = 0; |
Ben Clayton | d0f684e | 2019-08-30 22:36:08 +0100 | [diff] [blame] | 1022 | output_buffer_ptr_id_ = 0; |
| 1023 | input_buffer_ptr_id_ = 0; |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 1024 | input_buffer_id_ = 0; |
Alexis Hetu | b8a7746 | 2020-03-27 07:59:09 -0400 | [diff] [blame] | 1025 | float_id_ = 0; |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 1026 | v4float_id_ = 0; |
| 1027 | uint_id_ = 0; |
Ben Clayton | d0f684e | 2019-08-30 22:36:08 +0100 | [diff] [blame] | 1028 | uint64_id_ = 0; |
Alexis Hetu | b8a7746 | 2020-03-27 07:59:09 -0400 | [diff] [blame] | 1029 | uint8_id_ = 0; |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 1030 | v4uint_id_ = 0; |
Ben Clayton | d0f684e | 2019-08-30 22:36:08 +0100 | [diff] [blame] | 1031 | v3uint_id_ = 0; |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 1032 | bool_id_ = 0; |
| 1033 | void_id_ = 0; |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 1034 | storage_buffer_ext_defined_ = false; |
Ben Clayton | d0f684e | 2019-08-30 22:36:08 +0100 | [diff] [blame] | 1035 | uint32_rarr_ty_ = nullptr; |
| 1036 | uint64_rarr_ty_ = nullptr; |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 1037 | |
| 1038 | // clear collections |
| 1039 | id2function_.clear(); |
| 1040 | id2block_.clear(); |
| 1041 | |
Alexis Hetu | b8a7746 | 2020-03-27 07:59:09 -0400 | [diff] [blame] | 1042 | // clear maps |
| 1043 | param2input_func_id_.clear(); |
| 1044 | param2output_func_id_.clear(); |
| 1045 | |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 1046 | // Initialize function and block maps. |
| 1047 | for (auto& fn : *get_module()) { |
| 1048 | id2function_[fn.result_id()] = &fn; |
| 1049 | for (auto& blk : fn) { |
| 1050 | id2block_[blk.id()] = &blk; |
| 1051 | } |
| 1052 | } |
| 1053 | |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 1054 | // Remember original instruction offsets |
| 1055 | uint32_t module_offset = 0; |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 1056 | Module* module = get_module(); |
| 1057 | for (auto& i : context()->capabilities()) { |
| 1058 | (void)i; |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 1059 | ++module_offset; |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 1060 | } |
| 1061 | for (auto& i : module->extensions()) { |
| 1062 | (void)i; |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 1063 | ++module_offset; |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 1064 | } |
| 1065 | for (auto& i : module->ext_inst_imports()) { |
| 1066 | (void)i; |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 1067 | ++module_offset; |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 1068 | } |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 1069 | ++module_offset; // memory_model |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 1070 | for (auto& i : module->entry_points()) { |
| 1071 | (void)i; |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 1072 | ++module_offset; |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 1073 | } |
| 1074 | for (auto& i : module->execution_modes()) { |
| 1075 | (void)i; |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 1076 | ++module_offset; |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 1077 | } |
| 1078 | for (auto& i : module->debugs1()) { |
| 1079 | (void)i; |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 1080 | ++module_offset; |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 1081 | } |
| 1082 | for (auto& i : module->debugs2()) { |
| 1083 | (void)i; |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 1084 | ++module_offset; |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 1085 | } |
| 1086 | for (auto& i : module->debugs3()) { |
| 1087 | (void)i; |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 1088 | ++module_offset; |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 1089 | } |
Ben Clayton | dc6b76a | 2020-02-24 14:53:40 +0000 | [diff] [blame] | 1090 | for (auto& i : module->ext_inst_debuginfo()) { |
| 1091 | (void)i; |
| 1092 | ++module_offset; |
| 1093 | } |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 1094 | for (auto& i : module->annotations()) { |
| 1095 | (void)i; |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 1096 | ++module_offset; |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 1097 | } |
| 1098 | for (auto& i : module->types_values()) { |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 1099 | module_offset += 1; |
| 1100 | module_offset += static_cast<uint32_t>(i.dbg_line_insts().size()); |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 1101 | } |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 1102 | |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 1103 | auto curr_fn = get_module()->begin(); |
| 1104 | for (; curr_fn != get_module()->end(); ++curr_fn) { |
| 1105 | // Count function instruction |
| 1106 | module_offset += 1; |
| 1107 | curr_fn->ForEachParam( |
| 1108 | [&module_offset](const Instruction*) { module_offset += 1; }, true); |
| 1109 | for (auto& blk : *curr_fn) { |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 1110 | // Count label |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 1111 | module_offset += 1; |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 1112 | for (auto& inst : blk) { |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 1113 | module_offset += static_cast<uint32_t>(inst.dbg_line_insts().size()); |
| 1114 | uid2offset_[inst.unique_id()] = module_offset; |
| 1115 | module_offset += 1; |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 1116 | } |
| 1117 | } |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 1118 | // Count function end instruction |
| 1119 | module_offset += 1; |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 1120 | } |
| 1121 | } |
| 1122 | |
| 1123 | } // namespace opt |
| 1124 | } // namespace spvtools |