Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 1 | // Copyright (c) 2018 The Khronos Group Inc. |
| 2 | // Copyright (c) 2018 Valve Corporation |
| 3 | // Copyright (c) 2018 LunarG Inc. |
| 4 | // |
| 5 | // Licensed under the Apache License, Version 2.0 (the "License"); |
| 6 | // you may not use this file except in compliance with the License. |
| 7 | // You may obtain a copy of the License at |
| 8 | // |
| 9 | // http://www.apache.org/licenses/LICENSE-2.0 |
| 10 | // |
| 11 | // Unless required by applicable law or agreed to in writing, software |
| 12 | // distributed under the License is distributed on an "AS IS" BASIS, |
| 13 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 14 | // See the License for the specific language governing permissions and |
| 15 | // limitations under the License. |
| 16 | |
| 17 | #include "instrument_pass.h" |
| 18 | |
| 19 | #include "source/cfa.h" |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 20 | #include "source/spirv_constant.h" |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 21 | |
| 22 | namespace { |
| 23 | |
| 24 | // Common Parameter Positions |
| 25 | static const int kInstCommonParamInstIdx = 0; |
| 26 | static const int kInstCommonParamCnt = 1; |
| 27 | |
| 28 | // Indices of operands in SPIR-V instructions |
| 29 | static const int kEntryPointExecutionModelInIdx = 0; |
| 30 | static const int kEntryPointFunctionIdInIdx = 1; |
| 31 | |
| 32 | } // anonymous namespace |
| 33 | |
| 34 | namespace spvtools { |
| 35 | namespace opt { |
| 36 | |
| 37 | void InstrumentPass::MovePreludeCode( |
| 38 | BasicBlock::iterator ref_inst_itr, |
| 39 | UptrVectorIterator<BasicBlock> ref_block_itr, |
| 40 | std::unique_ptr<BasicBlock>* new_blk_ptr) { |
| 41 | same_block_pre_.clear(); |
| 42 | same_block_post_.clear(); |
| 43 | // Initialize new block. Reuse label from original block. |
| 44 | new_blk_ptr->reset(new BasicBlock(std::move(ref_block_itr->GetLabel()))); |
| 45 | // Move contents of original ref block up to ref instruction. |
| 46 | for (auto cii = ref_block_itr->begin(); cii != ref_inst_itr; |
| 47 | cii = ref_block_itr->begin()) { |
| 48 | Instruction* inst = &*cii; |
| 49 | inst->RemoveFromList(); |
| 50 | std::unique_ptr<Instruction> mv_ptr(inst); |
| 51 | // Remember same-block ops for possible regeneration. |
| 52 | if (IsSameBlockOp(&*mv_ptr)) { |
| 53 | auto* sb_inst_ptr = mv_ptr.get(); |
| 54 | same_block_pre_[mv_ptr->result_id()] = sb_inst_ptr; |
| 55 | } |
| 56 | (*new_blk_ptr)->AddInstruction(std::move(mv_ptr)); |
| 57 | } |
| 58 | } |
| 59 | |
| 60 | void InstrumentPass::MovePostludeCode( |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 61 | UptrVectorIterator<BasicBlock> ref_block_itr, BasicBlock* new_blk_ptr) { |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 62 | // new_blk_ptr->reset(new BasicBlock(NewLabel(ref_block_itr->id()))); |
| 63 | // Move contents of original ref block. |
| 64 | for (auto cii = ref_block_itr->begin(); cii != ref_block_itr->end(); |
| 65 | cii = ref_block_itr->begin()) { |
| 66 | Instruction* inst = &*cii; |
| 67 | inst->RemoveFromList(); |
| 68 | std::unique_ptr<Instruction> mv_inst(inst); |
| 69 | // Regenerate any same-block instruction that has not been seen in the |
| 70 | // current block. |
| 71 | if (same_block_pre_.size() > 0) { |
| 72 | CloneSameBlockOps(&mv_inst, &same_block_post_, &same_block_pre_, |
| 73 | new_blk_ptr); |
| 74 | // Remember same-block ops in this block. |
| 75 | if (IsSameBlockOp(&*mv_inst)) { |
| 76 | const uint32_t rid = mv_inst->result_id(); |
| 77 | same_block_post_[rid] = rid; |
| 78 | } |
| 79 | } |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 80 | new_blk_ptr->AddInstruction(std::move(mv_inst)); |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 81 | } |
| 82 | } |
| 83 | |
| 84 | std::unique_ptr<Instruction> InstrumentPass::NewLabel(uint32_t label_id) { |
| 85 | std::unique_ptr<Instruction> newLabel( |
| 86 | new Instruction(context(), SpvOpLabel, 0, label_id, {})); |
| 87 | get_def_use_mgr()->AnalyzeInstDefUse(&*newLabel); |
| 88 | return newLabel; |
| 89 | } |
| 90 | |
| 91 | uint32_t InstrumentPass::GenUintCastCode(uint32_t val_id, |
| 92 | InstructionBuilder* builder) { |
| 93 | // Cast value to 32-bit unsigned if necessary |
| 94 | if (get_def_use_mgr()->GetDef(val_id)->type_id() == GetUintId()) |
| 95 | return val_id; |
| 96 | return builder->AddUnaryOp(GetUintId(), SpvOpBitcast, val_id)->result_id(); |
| 97 | } |
| 98 | |
| 99 | void InstrumentPass::GenDebugOutputFieldCode(uint32_t base_offset_id, |
| 100 | uint32_t field_offset, |
| 101 | uint32_t field_value_id, |
| 102 | InstructionBuilder* builder) { |
| 103 | // Cast value to 32-bit unsigned if necessary |
| 104 | uint32_t val_id = GenUintCastCode(field_value_id, builder); |
| 105 | // Store value |
| 106 | Instruction* data_idx_inst = |
| 107 | builder->AddBinaryOp(GetUintId(), SpvOpIAdd, base_offset_id, |
| 108 | builder->GetUintConstantId(field_offset)); |
| 109 | uint32_t buf_id = GetOutputBufferId(); |
Ben Clayton | d0f684e | 2019-08-30 22:36:08 +0100 | [diff] [blame] | 110 | uint32_t buf_uint_ptr_id = GetOutputBufferPtrId(); |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 111 | Instruction* achain_inst = |
| 112 | builder->AddTernaryOp(buf_uint_ptr_id, SpvOpAccessChain, buf_id, |
| 113 | builder->GetUintConstantId(kDebugOutputDataOffset), |
| 114 | data_idx_inst->result_id()); |
| 115 | (void)builder->AddBinaryOp(0, SpvOpStore, achain_inst->result_id(), val_id); |
| 116 | } |
| 117 | |
| 118 | void InstrumentPass::GenCommonStreamWriteCode(uint32_t record_sz, |
| 119 | uint32_t inst_id, |
| 120 | uint32_t stage_idx, |
| 121 | uint32_t base_offset_id, |
| 122 | InstructionBuilder* builder) { |
| 123 | // Store record size |
| 124 | GenDebugOutputFieldCode(base_offset_id, kInstCommonOutSize, |
| 125 | builder->GetUintConstantId(record_sz), builder); |
| 126 | // Store Shader Id |
| 127 | GenDebugOutputFieldCode(base_offset_id, kInstCommonOutShaderId, |
| 128 | builder->GetUintConstantId(shader_id_), builder); |
| 129 | // Store Instruction Idx |
| 130 | GenDebugOutputFieldCode(base_offset_id, kInstCommonOutInstructionIdx, inst_id, |
| 131 | builder); |
| 132 | // Store Stage Idx |
| 133 | GenDebugOutputFieldCode(base_offset_id, kInstCommonOutStageIdx, |
| 134 | builder->GetUintConstantId(stage_idx), builder); |
| 135 | } |
| 136 | |
| 137 | void InstrumentPass::GenFragCoordEltDebugOutputCode( |
| 138 | uint32_t base_offset_id, uint32_t uint_frag_coord_id, uint32_t element, |
| 139 | InstructionBuilder* builder) { |
| 140 | Instruction* element_val_inst = builder->AddIdLiteralOp( |
| 141 | GetUintId(), SpvOpCompositeExtract, uint_frag_coord_id, element); |
| 142 | GenDebugOutputFieldCode(base_offset_id, kInstFragOutFragCoordX + element, |
| 143 | element_val_inst->result_id(), builder); |
| 144 | } |
| 145 | |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 146 | uint32_t InstrumentPass::GenVarLoad(uint32_t var_id, |
| 147 | InstructionBuilder* builder) { |
| 148 | Instruction* var_inst = get_def_use_mgr()->GetDef(var_id); |
| 149 | uint32_t type_id = GetPointeeTypeId(var_inst); |
| 150 | Instruction* load_inst = builder->AddUnaryOp(type_id, SpvOpLoad, var_id); |
| 151 | return load_inst->result_id(); |
| 152 | } |
| 153 | |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 154 | void InstrumentPass::GenBuiltinOutputCode(uint32_t builtin_id, |
| 155 | uint32_t builtin_off, |
| 156 | uint32_t base_offset_id, |
| 157 | InstructionBuilder* builder) { |
| 158 | // Load and store builtin |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 159 | uint32_t load_id = GenVarLoad(builtin_id, builder); |
| 160 | GenDebugOutputFieldCode(base_offset_id, builtin_off, load_id, builder); |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 161 | } |
| 162 | |
| 163 | void InstrumentPass::GenStageStreamWriteCode(uint32_t stage_idx, |
| 164 | uint32_t base_offset_id, |
| 165 | InstructionBuilder* builder) { |
| 166 | // TODO(greg-lunarg): Add support for all stages |
| 167 | switch (stage_idx) { |
| 168 | case SpvExecutionModelVertex: { |
| 169 | // Load and store VertexId and InstanceId |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 170 | GenBuiltinOutputCode( |
| 171 | context()->GetBuiltinInputVarId(SpvBuiltInVertexIndex), |
| 172 | kInstVertOutVertexIndex, base_offset_id, builder); |
| 173 | GenBuiltinOutputCode( |
| 174 | context()->GetBuiltinInputVarId(SpvBuiltInInstanceIndex), |
| 175 | kInstVertOutInstanceIndex, base_offset_id, builder); |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 176 | } break; |
Alexis Hetu | c00ee6c | 2020-07-27 10:48:25 -0400 | [diff] [blame^] | 177 | case SpvExecutionModelGLCompute: |
| 178 | case SpvExecutionModelTaskNV: |
| 179 | case SpvExecutionModelMeshNV: { |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 180 | // Load and store GlobalInvocationId. |
| 181 | uint32_t load_id = GenVarLoad( |
| 182 | context()->GetBuiltinInputVarId(SpvBuiltInGlobalInvocationId), |
| 183 | builder); |
| 184 | Instruction* x_inst = builder->AddIdLiteralOp( |
| 185 | GetUintId(), SpvOpCompositeExtract, load_id, 0); |
| 186 | Instruction* y_inst = builder->AddIdLiteralOp( |
| 187 | GetUintId(), SpvOpCompositeExtract, load_id, 1); |
| 188 | Instruction* z_inst = builder->AddIdLiteralOp( |
| 189 | GetUintId(), SpvOpCompositeExtract, load_id, 2); |
Ben Clayton | 0b54f13 | 2020-01-06 13:38:54 +0000 | [diff] [blame] | 190 | GenDebugOutputFieldCode(base_offset_id, kInstCompOutGlobalInvocationIdX, |
| 191 | x_inst->result_id(), builder); |
| 192 | GenDebugOutputFieldCode(base_offset_id, kInstCompOutGlobalInvocationIdY, |
| 193 | y_inst->result_id(), builder); |
| 194 | GenDebugOutputFieldCode(base_offset_id, kInstCompOutGlobalInvocationIdZ, |
| 195 | z_inst->result_id(), builder); |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 196 | } break; |
| 197 | case SpvExecutionModelGeometry: { |
| 198 | // Load and store PrimitiveId and InvocationId. |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 199 | GenBuiltinOutputCode( |
| 200 | context()->GetBuiltinInputVarId(SpvBuiltInPrimitiveId), |
| 201 | kInstGeomOutPrimitiveId, base_offset_id, builder); |
| 202 | GenBuiltinOutputCode( |
| 203 | context()->GetBuiltinInputVarId(SpvBuiltInInvocationId), |
| 204 | kInstGeomOutInvocationId, base_offset_id, builder); |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 205 | } break; |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 206 | case SpvExecutionModelTessellationControl: { |
| 207 | // Load and store InvocationId and PrimitiveId |
| 208 | GenBuiltinOutputCode( |
| 209 | context()->GetBuiltinInputVarId(SpvBuiltInInvocationId), |
| 210 | kInstTessCtlOutInvocationId, base_offset_id, builder); |
| 211 | GenBuiltinOutputCode( |
| 212 | context()->GetBuiltinInputVarId(SpvBuiltInPrimitiveId), |
| 213 | kInstTessCtlOutPrimitiveId, base_offset_id, builder); |
| 214 | } break; |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 215 | case SpvExecutionModelTessellationEvaluation: { |
Ben Clayton | 0b54f13 | 2020-01-06 13:38:54 +0000 | [diff] [blame] | 216 | // Load and store PrimitiveId and TessCoord.uv |
| 217 | GenBuiltinOutputCode( |
| 218 | context()->GetBuiltinInputVarId(SpvBuiltInPrimitiveId), |
| 219 | kInstTessEvalOutPrimitiveId, base_offset_id, builder); |
| 220 | uint32_t load_id = GenVarLoad( |
| 221 | context()->GetBuiltinInputVarId(SpvBuiltInTessCoord), builder); |
| 222 | Instruction* uvec3_cast_inst = |
| 223 | builder->AddUnaryOp(GetVec3UintId(), SpvOpBitcast, load_id); |
| 224 | uint32_t uvec3_cast_id = uvec3_cast_inst->result_id(); |
| 225 | Instruction* u_inst = builder->AddIdLiteralOp( |
| 226 | GetUintId(), SpvOpCompositeExtract, uvec3_cast_id, 0); |
| 227 | Instruction* v_inst = builder->AddIdLiteralOp( |
| 228 | GetUintId(), SpvOpCompositeExtract, uvec3_cast_id, 1); |
| 229 | GenDebugOutputFieldCode(base_offset_id, kInstTessEvalOutTessCoordU, |
| 230 | u_inst->result_id(), builder); |
| 231 | GenDebugOutputFieldCode(base_offset_id, kInstTessEvalOutTessCoordV, |
| 232 | v_inst->result_id(), builder); |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 233 | } break; |
| 234 | case SpvExecutionModelFragment: { |
| 235 | // Load FragCoord and convert to Uint |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 236 | Instruction* frag_coord_inst = builder->AddUnaryOp( |
| 237 | GetVec4FloatId(), SpvOpLoad, |
| 238 | context()->GetBuiltinInputVarId(SpvBuiltInFragCoord)); |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 239 | Instruction* uint_frag_coord_inst = builder->AddUnaryOp( |
| 240 | GetVec4UintId(), SpvOpBitcast, frag_coord_inst->result_id()); |
| 241 | for (uint32_t u = 0; u < 2u; ++u) |
| 242 | GenFragCoordEltDebugOutputCode( |
| 243 | base_offset_id, uint_frag_coord_inst->result_id(), u, builder); |
| 244 | } break; |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 245 | case SpvExecutionModelRayGenerationNV: |
| 246 | case SpvExecutionModelIntersectionNV: |
| 247 | case SpvExecutionModelAnyHitNV: |
| 248 | case SpvExecutionModelClosestHitNV: |
| 249 | case SpvExecutionModelMissNV: |
| 250 | case SpvExecutionModelCallableNV: { |
| 251 | // Load and store LaunchIdNV. |
| 252 | uint32_t launch_id = GenVarLoad( |
| 253 | context()->GetBuiltinInputVarId(SpvBuiltInLaunchIdNV), builder); |
| 254 | Instruction* x_launch_inst = builder->AddIdLiteralOp( |
| 255 | GetUintId(), SpvOpCompositeExtract, launch_id, 0); |
| 256 | Instruction* y_launch_inst = builder->AddIdLiteralOp( |
| 257 | GetUintId(), SpvOpCompositeExtract, launch_id, 1); |
| 258 | Instruction* z_launch_inst = builder->AddIdLiteralOp( |
| 259 | GetUintId(), SpvOpCompositeExtract, launch_id, 2); |
| 260 | GenDebugOutputFieldCode(base_offset_id, kInstRayTracingOutLaunchIdX, |
| 261 | x_launch_inst->result_id(), builder); |
| 262 | GenDebugOutputFieldCode(base_offset_id, kInstRayTracingOutLaunchIdY, |
| 263 | y_launch_inst->result_id(), builder); |
| 264 | GenDebugOutputFieldCode(base_offset_id, kInstRayTracingOutLaunchIdZ, |
| 265 | z_launch_inst->result_id(), builder); |
| 266 | } break; |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 267 | default: { assert(false && "unsupported stage"); } break; |
| 268 | } |
| 269 | } |
| 270 | |
| 271 | void InstrumentPass::GenDebugStreamWrite( |
| 272 | uint32_t instruction_idx, uint32_t stage_idx, |
| 273 | const std::vector<uint32_t>& validation_ids, InstructionBuilder* builder) { |
| 274 | // Call debug output function. Pass func_idx, instruction_idx and |
| 275 | // validation ids as args. |
| 276 | uint32_t val_id_cnt = static_cast<uint32_t>(validation_ids.size()); |
| 277 | uint32_t output_func_id = GetStreamWriteFunctionId(stage_idx, val_id_cnt); |
| 278 | std::vector<uint32_t> args = {output_func_id, |
| 279 | builder->GetUintConstantId(instruction_idx)}; |
| 280 | (void)args.insert(args.end(), validation_ids.begin(), validation_ids.end()); |
| 281 | (void)builder->AddNaryOp(GetVoidId(), SpvOpFunctionCall, args); |
| 282 | } |
| 283 | |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 284 | uint32_t InstrumentPass::GenDebugDirectRead( |
| 285 | const std::vector<uint32_t>& offset_ids, InstructionBuilder* builder) { |
| 286 | // Call debug input function. Pass func_idx and offset ids as args. |
| 287 | uint32_t off_id_cnt = static_cast<uint32_t>(offset_ids.size()); |
| 288 | uint32_t input_func_id = GetDirectReadFunctionId(off_id_cnt); |
| 289 | std::vector<uint32_t> args = {input_func_id}; |
| 290 | (void)args.insert(args.end(), offset_ids.begin(), offset_ids.end()); |
| 291 | return builder->AddNaryOp(GetUintId(), SpvOpFunctionCall, args)->result_id(); |
| 292 | } |
| 293 | |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 294 | bool InstrumentPass::IsSameBlockOp(const Instruction* inst) const { |
| 295 | return inst->opcode() == SpvOpSampledImage || inst->opcode() == SpvOpImage; |
| 296 | } |
| 297 | |
| 298 | void InstrumentPass::CloneSameBlockOps( |
| 299 | std::unique_ptr<Instruction>* inst, |
| 300 | std::unordered_map<uint32_t, uint32_t>* same_blk_post, |
| 301 | std::unordered_map<uint32_t, Instruction*>* same_blk_pre, |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 302 | BasicBlock* block_ptr) { |
Ben Clayton | d552f63 | 2019-11-18 11:18:41 +0000 | [diff] [blame] | 303 | bool changed = false; |
| 304 | (*inst)->ForEachInId([&same_blk_post, &same_blk_pre, &block_ptr, &changed, |
| 305 | this](uint32_t* iid) { |
| 306 | const auto map_itr = (*same_blk_post).find(*iid); |
| 307 | if (map_itr == (*same_blk_post).end()) { |
| 308 | const auto map_itr2 = (*same_blk_pre).find(*iid); |
| 309 | if (map_itr2 != (*same_blk_pre).end()) { |
| 310 | // Clone pre-call same-block ops, map result id. |
| 311 | const Instruction* in_inst = map_itr2->second; |
| 312 | std::unique_ptr<Instruction> sb_inst(in_inst->Clone(context())); |
| 313 | const uint32_t rid = sb_inst->result_id(); |
| 314 | const uint32_t nid = this->TakeNextId(); |
| 315 | get_decoration_mgr()->CloneDecorations(rid, nid); |
| 316 | sb_inst->SetResultId(nid); |
| 317 | get_def_use_mgr()->AnalyzeInstDefUse(&*sb_inst); |
| 318 | (*same_blk_post)[rid] = nid; |
| 319 | *iid = nid; |
| 320 | changed = true; |
| 321 | CloneSameBlockOps(&sb_inst, same_blk_post, same_blk_pre, block_ptr); |
| 322 | block_ptr->AddInstruction(std::move(sb_inst)); |
| 323 | } |
| 324 | } else { |
| 325 | // Reset same-block op operand if necessary |
| 326 | if (*iid != map_itr->second) { |
| 327 | *iid = map_itr->second; |
| 328 | changed = true; |
| 329 | } |
| 330 | } |
| 331 | }); |
| 332 | if (changed) get_def_use_mgr()->AnalyzeInstUse(&**inst); |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 333 | } |
| 334 | |
| 335 | void InstrumentPass::UpdateSucceedingPhis( |
| 336 | std::vector<std::unique_ptr<BasicBlock>>& new_blocks) { |
| 337 | const auto first_blk = new_blocks.begin(); |
| 338 | const auto last_blk = new_blocks.end() - 1; |
| 339 | const uint32_t first_id = (*first_blk)->id(); |
| 340 | const uint32_t last_id = (*last_blk)->id(); |
| 341 | const BasicBlock& const_last_block = *last_blk->get(); |
| 342 | const_last_block.ForEachSuccessorLabel( |
| 343 | [&first_id, &last_id, this](const uint32_t succ) { |
| 344 | BasicBlock* sbp = this->id2block_[succ]; |
| 345 | sbp->ForEachPhiInst([&first_id, &last_id, this](Instruction* phi) { |
| 346 | bool changed = false; |
| 347 | phi->ForEachInId([&first_id, &last_id, &changed](uint32_t* id) { |
| 348 | if (*id == first_id) { |
| 349 | *id = last_id; |
| 350 | changed = true; |
| 351 | } |
| 352 | }); |
| 353 | if (changed) get_def_use_mgr()->AnalyzeInstUse(phi); |
| 354 | }); |
| 355 | }); |
| 356 | } |
| 357 | |
Ben Clayton | d0f684e | 2019-08-30 22:36:08 +0100 | [diff] [blame] | 358 | uint32_t InstrumentPass::GetOutputBufferPtrId() { |
| 359 | if (output_buffer_ptr_id_ == 0) { |
| 360 | output_buffer_ptr_id_ = context()->get_type_mgr()->FindPointerToType( |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 361 | GetUintId(), SpvStorageClassStorageBuffer); |
| 362 | } |
Ben Clayton | d0f684e | 2019-08-30 22:36:08 +0100 | [diff] [blame] | 363 | return output_buffer_ptr_id_; |
| 364 | } |
| 365 | |
| 366 | uint32_t InstrumentPass::GetInputBufferTypeId() { |
| 367 | return (validation_id_ == kInstValidationIdBuffAddr) ? GetUint64Id() |
| 368 | : GetUintId(); |
| 369 | } |
| 370 | |
| 371 | uint32_t InstrumentPass::GetInputBufferPtrId() { |
| 372 | if (input_buffer_ptr_id_ == 0) { |
| 373 | input_buffer_ptr_id_ = context()->get_type_mgr()->FindPointerToType( |
| 374 | GetInputBufferTypeId(), SpvStorageClassStorageBuffer); |
| 375 | } |
| 376 | return input_buffer_ptr_id_; |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 377 | } |
| 378 | |
| 379 | uint32_t InstrumentPass::GetOutputBufferBinding() { |
| 380 | switch (validation_id_) { |
| 381 | case kInstValidationIdBindless: |
| 382 | return kDebugOutputBindingStream; |
Ben Clayton | d0f684e | 2019-08-30 22:36:08 +0100 | [diff] [blame] | 383 | case kInstValidationIdBuffAddr: |
| 384 | return kDebugOutputBindingStream; |
Alexis Hetu | b8a7746 | 2020-03-27 07:59:09 -0400 | [diff] [blame] | 385 | case kInstValidationIdDebugPrintf: |
| 386 | return kDebugOutputPrintfStream; |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 387 | default: |
| 388 | assert(false && "unexpected validation id"); |
| 389 | } |
| 390 | return 0; |
| 391 | } |
| 392 | |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 393 | uint32_t InstrumentPass::GetInputBufferBinding() { |
| 394 | switch (validation_id_) { |
| 395 | case kInstValidationIdBindless: |
| 396 | return kDebugInputBindingBindless; |
Ben Clayton | d0f684e | 2019-08-30 22:36:08 +0100 | [diff] [blame] | 397 | case kInstValidationIdBuffAddr: |
| 398 | return kDebugInputBindingBuffAddr; |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 399 | default: |
| 400 | assert(false && "unexpected validation id"); |
| 401 | } |
| 402 | return 0; |
| 403 | } |
| 404 | |
Ben Clayton | d0f684e | 2019-08-30 22:36:08 +0100 | [diff] [blame] | 405 | analysis::Type* InstrumentPass::GetUintXRuntimeArrayType( |
| 406 | uint32_t width, analysis::Type** rarr_ty) { |
| 407 | if (*rarr_ty == nullptr) { |
| 408 | analysis::DecorationManager* deco_mgr = get_decoration_mgr(); |
| 409 | analysis::TypeManager* type_mgr = context()->get_type_mgr(); |
| 410 | analysis::Integer uint_ty(width, false); |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 411 | analysis::Type* reg_uint_ty = type_mgr->GetRegisteredType(&uint_ty); |
| 412 | analysis::RuntimeArray uint_rarr_ty_tmp(reg_uint_ty); |
Ben Clayton | d0f684e | 2019-08-30 22:36:08 +0100 | [diff] [blame] | 413 | *rarr_ty = type_mgr->GetRegisteredType(&uint_rarr_ty_tmp); |
| 414 | uint32_t uint_arr_ty_id = type_mgr->GetTypeInstruction(*rarr_ty); |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 415 | // By the Vulkan spec, a pre-existing RuntimeArray of uint must be part of |
| 416 | // a block, and will therefore be decorated with an ArrayStride. Therefore |
| 417 | // the undecorated type returned here will not be pre-existing and can |
| 418 | // safely be decorated. Since this type is now decorated, it is out of |
| 419 | // sync with the TypeManager and therefore the TypeManager must be |
| 420 | // invalidated after this pass. |
| 421 | assert(context()->get_def_use_mgr()->NumUses(uint_arr_ty_id) == 0 && |
| 422 | "used RuntimeArray type returned"); |
Ben Clayton | d0f684e | 2019-08-30 22:36:08 +0100 | [diff] [blame] | 423 | deco_mgr->AddDecorationVal(uint_arr_ty_id, SpvDecorationArrayStride, |
| 424 | width / 8u); |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 425 | } |
Ben Clayton | d0f684e | 2019-08-30 22:36:08 +0100 | [diff] [blame] | 426 | return *rarr_ty; |
| 427 | } |
| 428 | |
| 429 | analysis::Type* InstrumentPass::GetUintRuntimeArrayType(uint32_t width) { |
| 430 | analysis::Type** rarr_ty = |
| 431 | (width == 64) ? &uint64_rarr_ty_ : &uint32_rarr_ty_; |
| 432 | return GetUintXRuntimeArrayType(width, rarr_ty); |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 433 | } |
| 434 | |
| 435 | void InstrumentPass::AddStorageBufferExt() { |
| 436 | if (storage_buffer_ext_defined_) return; |
| 437 | if (!get_feature_mgr()->HasExtension(kSPV_KHR_storage_buffer_storage_class)) { |
Ben Clayton | d0f684e | 2019-08-30 22:36:08 +0100 | [diff] [blame] | 438 | context()->AddExtension("SPV_KHR_storage_buffer_storage_class"); |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 439 | } |
| 440 | storage_buffer_ext_defined_ = true; |
| 441 | } |
| 442 | |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 443 | // Return id for output buffer |
| 444 | uint32_t InstrumentPass::GetOutputBufferId() { |
| 445 | if (output_buffer_id_ == 0) { |
| 446 | // If not created yet, create one |
| 447 | analysis::DecorationManager* deco_mgr = get_decoration_mgr(); |
| 448 | analysis::TypeManager* type_mgr = context()->get_type_mgr(); |
Ben Clayton | d0f684e | 2019-08-30 22:36:08 +0100 | [diff] [blame] | 449 | analysis::Type* reg_uint_rarr_ty = GetUintRuntimeArrayType(32); |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 450 | analysis::Integer uint_ty(32, false); |
| 451 | analysis::Type* reg_uint_ty = type_mgr->GetRegisteredType(&uint_ty); |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 452 | analysis::Struct buf_ty({reg_uint_ty, reg_uint_rarr_ty}); |
| 453 | analysis::Type* reg_buf_ty = type_mgr->GetRegisteredType(&buf_ty); |
| 454 | uint32_t obufTyId = type_mgr->GetTypeInstruction(reg_buf_ty); |
| 455 | // By the Vulkan spec, a pre-existing struct containing a RuntimeArray |
| 456 | // must be a block, and will therefore be decorated with Block. Therefore |
| 457 | // the undecorated type returned here will not be pre-existing and can |
| 458 | // safely be decorated. Since this type is now decorated, it is out of |
| 459 | // sync with the TypeManager and therefore the TypeManager must be |
| 460 | // invalidated after this pass. |
| 461 | assert(context()->get_def_use_mgr()->NumUses(obufTyId) == 0 && |
| 462 | "used struct type returned"); |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 463 | deco_mgr->AddDecoration(obufTyId, SpvDecorationBlock); |
| 464 | deco_mgr->AddMemberDecoration(obufTyId, kDebugOutputSizeOffset, |
| 465 | SpvDecorationOffset, 0); |
| 466 | deco_mgr->AddMemberDecoration(obufTyId, kDebugOutputDataOffset, |
| 467 | SpvDecorationOffset, 4); |
| 468 | uint32_t obufTyPtrId_ = |
| 469 | type_mgr->FindPointerToType(obufTyId, SpvStorageClassStorageBuffer); |
| 470 | output_buffer_id_ = TakeNextId(); |
| 471 | std::unique_ptr<Instruction> newVarOp(new Instruction( |
| 472 | context(), SpvOpVariable, obufTyPtrId_, output_buffer_id_, |
| 473 | {{spv_operand_type_t::SPV_OPERAND_TYPE_LITERAL_INTEGER, |
| 474 | {SpvStorageClassStorageBuffer}}})); |
| 475 | context()->AddGlobalValue(std::move(newVarOp)); |
| 476 | deco_mgr->AddDecorationVal(output_buffer_id_, SpvDecorationDescriptorSet, |
| 477 | desc_set_); |
| 478 | deco_mgr->AddDecorationVal(output_buffer_id_, SpvDecorationBinding, |
| 479 | GetOutputBufferBinding()); |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 480 | AddStorageBufferExt(); |
| 481 | if (get_module()->version() >= SPV_SPIRV_VERSION_WORD(1, 4)) { |
| 482 | // Add the new buffer to all entry points. |
| 483 | for (auto& entry : get_module()->entry_points()) { |
| 484 | entry.AddOperand({SPV_OPERAND_TYPE_ID, {output_buffer_id_}}); |
| 485 | context()->AnalyzeUses(&entry); |
| 486 | } |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 487 | } |
| 488 | } |
| 489 | return output_buffer_id_; |
| 490 | } |
| 491 | |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 492 | uint32_t InstrumentPass::GetInputBufferId() { |
| 493 | if (input_buffer_id_ == 0) { |
| 494 | // If not created yet, create one |
| 495 | analysis::DecorationManager* deco_mgr = get_decoration_mgr(); |
| 496 | analysis::TypeManager* type_mgr = context()->get_type_mgr(); |
Ben Clayton | d0f684e | 2019-08-30 22:36:08 +0100 | [diff] [blame] | 497 | uint32_t width = (validation_id_ == kInstValidationIdBuffAddr) ? 64u : 32u; |
| 498 | analysis::Type* reg_uint_rarr_ty = GetUintRuntimeArrayType(width); |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 499 | analysis::Struct buf_ty({reg_uint_rarr_ty}); |
| 500 | analysis::Type* reg_buf_ty = type_mgr->GetRegisteredType(&buf_ty); |
| 501 | uint32_t ibufTyId = type_mgr->GetTypeInstruction(reg_buf_ty); |
| 502 | // By the Vulkan spec, a pre-existing struct containing a RuntimeArray |
| 503 | // must be a block, and will therefore be decorated with Block. Therefore |
| 504 | // the undecorated type returned here will not be pre-existing and can |
| 505 | // safely be decorated. Since this type is now decorated, it is out of |
| 506 | // sync with the TypeManager and therefore the TypeManager must be |
| 507 | // invalidated after this pass. |
| 508 | assert(context()->get_def_use_mgr()->NumUses(ibufTyId) == 0 && |
| 509 | "used struct type returned"); |
| 510 | deco_mgr->AddDecoration(ibufTyId, SpvDecorationBlock); |
| 511 | deco_mgr->AddMemberDecoration(ibufTyId, 0, SpvDecorationOffset, 0); |
| 512 | uint32_t ibufTyPtrId_ = |
| 513 | type_mgr->FindPointerToType(ibufTyId, SpvStorageClassStorageBuffer); |
| 514 | input_buffer_id_ = TakeNextId(); |
| 515 | std::unique_ptr<Instruction> newVarOp(new Instruction( |
| 516 | context(), SpvOpVariable, ibufTyPtrId_, input_buffer_id_, |
| 517 | {{spv_operand_type_t::SPV_OPERAND_TYPE_LITERAL_INTEGER, |
| 518 | {SpvStorageClassStorageBuffer}}})); |
| 519 | context()->AddGlobalValue(std::move(newVarOp)); |
| 520 | deco_mgr->AddDecorationVal(input_buffer_id_, SpvDecorationDescriptorSet, |
| 521 | desc_set_); |
| 522 | deco_mgr->AddDecorationVal(input_buffer_id_, SpvDecorationBinding, |
| 523 | GetInputBufferBinding()); |
| 524 | AddStorageBufferExt(); |
| 525 | if (get_module()->version() >= SPV_SPIRV_VERSION_WORD(1, 4)) { |
| 526 | // Add the new buffer to all entry points. |
| 527 | for (auto& entry : get_module()->entry_points()) { |
| 528 | entry.AddOperand({SPV_OPERAND_TYPE_ID, {input_buffer_id_}}); |
| 529 | context()->AnalyzeUses(&entry); |
| 530 | } |
| 531 | } |
| 532 | } |
| 533 | return input_buffer_id_; |
| 534 | } |
| 535 | |
Alexis Hetu | b8a7746 | 2020-03-27 07:59:09 -0400 | [diff] [blame] | 536 | uint32_t InstrumentPass::GetFloatId() { |
| 537 | if (float_id_ == 0) { |
| 538 | analysis::TypeManager* type_mgr = context()->get_type_mgr(); |
| 539 | analysis::Float float_ty(32); |
| 540 | analysis::Type* reg_float_ty = type_mgr->GetRegisteredType(&float_ty); |
| 541 | float_id_ = type_mgr->GetTypeInstruction(reg_float_ty); |
| 542 | } |
| 543 | return float_id_; |
| 544 | } |
| 545 | |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 546 | uint32_t InstrumentPass::GetVec4FloatId() { |
| 547 | if (v4float_id_ == 0) { |
| 548 | analysis::TypeManager* type_mgr = context()->get_type_mgr(); |
| 549 | analysis::Float float_ty(32); |
| 550 | analysis::Type* reg_float_ty = type_mgr->GetRegisteredType(&float_ty); |
| 551 | analysis::Vector v4float_ty(reg_float_ty, 4); |
| 552 | analysis::Type* reg_v4float_ty = type_mgr->GetRegisteredType(&v4float_ty); |
| 553 | v4float_id_ = type_mgr->GetTypeInstruction(reg_v4float_ty); |
| 554 | } |
| 555 | return v4float_id_; |
| 556 | } |
| 557 | |
| 558 | uint32_t InstrumentPass::GetUintId() { |
| 559 | if (uint_id_ == 0) { |
| 560 | analysis::TypeManager* type_mgr = context()->get_type_mgr(); |
| 561 | analysis::Integer uint_ty(32, false); |
| 562 | analysis::Type* reg_uint_ty = type_mgr->GetRegisteredType(&uint_ty); |
| 563 | uint_id_ = type_mgr->GetTypeInstruction(reg_uint_ty); |
| 564 | } |
| 565 | return uint_id_; |
| 566 | } |
| 567 | |
Ben Clayton | d0f684e | 2019-08-30 22:36:08 +0100 | [diff] [blame] | 568 | uint32_t InstrumentPass::GetUint64Id() { |
| 569 | if (uint64_id_ == 0) { |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 570 | analysis::TypeManager* type_mgr = context()->get_type_mgr(); |
Ben Clayton | d0f684e | 2019-08-30 22:36:08 +0100 | [diff] [blame] | 571 | analysis::Integer uint64_ty(64, false); |
| 572 | analysis::Type* reg_uint64_ty = type_mgr->GetRegisteredType(&uint64_ty); |
| 573 | uint64_id_ = type_mgr->GetTypeInstruction(reg_uint64_ty); |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 574 | } |
Ben Clayton | d0f684e | 2019-08-30 22:36:08 +0100 | [diff] [blame] | 575 | return uint64_id_; |
| 576 | } |
| 577 | |
Alexis Hetu | b8a7746 | 2020-03-27 07:59:09 -0400 | [diff] [blame] | 578 | uint32_t InstrumentPass::GetUint8Id() { |
| 579 | if (uint8_id_ == 0) { |
| 580 | analysis::TypeManager* type_mgr = context()->get_type_mgr(); |
| 581 | analysis::Integer uint8_ty(8, false); |
| 582 | analysis::Type* reg_uint8_ty = type_mgr->GetRegisteredType(&uint8_ty); |
| 583 | uint8_id_ = type_mgr->GetTypeInstruction(reg_uint8_ty); |
| 584 | } |
| 585 | return uint8_id_; |
| 586 | } |
| 587 | |
Ben Clayton | d0f684e | 2019-08-30 22:36:08 +0100 | [diff] [blame] | 588 | uint32_t InstrumentPass::GetVecUintId(uint32_t len) { |
| 589 | analysis::TypeManager* type_mgr = context()->get_type_mgr(); |
| 590 | analysis::Integer uint_ty(32, false); |
| 591 | analysis::Type* reg_uint_ty = type_mgr->GetRegisteredType(&uint_ty); |
| 592 | analysis::Vector v_uint_ty(reg_uint_ty, len); |
| 593 | analysis::Type* reg_v_uint_ty = type_mgr->GetRegisteredType(&v_uint_ty); |
| 594 | uint32_t v_uint_id = type_mgr->GetTypeInstruction(reg_v_uint_ty); |
| 595 | return v_uint_id; |
| 596 | } |
| 597 | |
| 598 | uint32_t InstrumentPass::GetVec4UintId() { |
| 599 | if (v4uint_id_ == 0) v4uint_id_ = GetVecUintId(4u); |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 600 | return v4uint_id_; |
| 601 | } |
| 602 | |
Ben Clayton | d0f684e | 2019-08-30 22:36:08 +0100 | [diff] [blame] | 603 | uint32_t InstrumentPass::GetVec3UintId() { |
| 604 | if (v3uint_id_ == 0) v3uint_id_ = GetVecUintId(3u); |
| 605 | return v3uint_id_; |
| 606 | } |
| 607 | |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 608 | uint32_t InstrumentPass::GetBoolId() { |
| 609 | if (bool_id_ == 0) { |
| 610 | analysis::TypeManager* type_mgr = context()->get_type_mgr(); |
| 611 | analysis::Bool bool_ty; |
| 612 | analysis::Type* reg_bool_ty = type_mgr->GetRegisteredType(&bool_ty); |
| 613 | bool_id_ = type_mgr->GetTypeInstruction(reg_bool_ty); |
| 614 | } |
| 615 | return bool_id_; |
| 616 | } |
| 617 | |
| 618 | uint32_t InstrumentPass::GetVoidId() { |
| 619 | if (void_id_ == 0) { |
| 620 | analysis::TypeManager* type_mgr = context()->get_type_mgr(); |
| 621 | analysis::Void void_ty; |
| 622 | analysis::Type* reg_void_ty = type_mgr->GetRegisteredType(&void_ty); |
| 623 | void_id_ = type_mgr->GetTypeInstruction(reg_void_ty); |
| 624 | } |
| 625 | return void_id_; |
| 626 | } |
| 627 | |
| 628 | uint32_t InstrumentPass::GetStreamWriteFunctionId(uint32_t stage_idx, |
| 629 | uint32_t val_spec_param_cnt) { |
| 630 | // Total param count is common params plus validation-specific |
| 631 | // params |
| 632 | uint32_t param_cnt = kInstCommonParamCnt + val_spec_param_cnt; |
Alexis Hetu | b8a7746 | 2020-03-27 07:59:09 -0400 | [diff] [blame] | 633 | if (param2output_func_id_[param_cnt] == 0) { |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 634 | // Create function |
Alexis Hetu | b8a7746 | 2020-03-27 07:59:09 -0400 | [diff] [blame] | 635 | param2output_func_id_[param_cnt] = TakeNextId(); |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 636 | analysis::TypeManager* type_mgr = context()->get_type_mgr(); |
| 637 | std::vector<const analysis::Type*> param_types; |
| 638 | for (uint32_t c = 0; c < param_cnt; ++c) |
| 639 | param_types.push_back(type_mgr->GetType(GetUintId())); |
| 640 | analysis::Function func_ty(type_mgr->GetType(GetVoidId()), param_types); |
| 641 | analysis::Type* reg_func_ty = type_mgr->GetRegisteredType(&func_ty); |
Alexis Hetu | b8a7746 | 2020-03-27 07:59:09 -0400 | [diff] [blame] | 642 | std::unique_ptr<Instruction> func_inst( |
| 643 | new Instruction(get_module()->context(), SpvOpFunction, GetVoidId(), |
| 644 | param2output_func_id_[param_cnt], |
| 645 | {{spv_operand_type_t::SPV_OPERAND_TYPE_LITERAL_INTEGER, |
| 646 | {SpvFunctionControlMaskNone}}, |
| 647 | {spv_operand_type_t::SPV_OPERAND_TYPE_ID, |
| 648 | {type_mgr->GetTypeInstruction(reg_func_ty)}}})); |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 649 | get_def_use_mgr()->AnalyzeInstDefUse(&*func_inst); |
| 650 | std::unique_ptr<Function> output_func = |
| 651 | MakeUnique<Function>(std::move(func_inst)); |
| 652 | // Add parameters |
| 653 | std::vector<uint32_t> param_vec; |
| 654 | for (uint32_t c = 0; c < param_cnt; ++c) { |
| 655 | uint32_t pid = TakeNextId(); |
| 656 | param_vec.push_back(pid); |
| 657 | std::unique_ptr<Instruction> param_inst( |
| 658 | new Instruction(get_module()->context(), SpvOpFunctionParameter, |
| 659 | GetUintId(), pid, {})); |
| 660 | get_def_use_mgr()->AnalyzeInstDefUse(&*param_inst); |
| 661 | output_func->AddParameter(std::move(param_inst)); |
| 662 | } |
| 663 | // Create first block |
| 664 | uint32_t test_blk_id = TakeNextId(); |
| 665 | std::unique_ptr<Instruction> test_label(NewLabel(test_blk_id)); |
| 666 | std::unique_ptr<BasicBlock> new_blk_ptr = |
| 667 | MakeUnique<BasicBlock>(std::move(test_label)); |
| 668 | InstructionBuilder builder( |
| 669 | context(), &*new_blk_ptr, |
| 670 | IRContext::kAnalysisDefUse | IRContext::kAnalysisInstrToBlockMapping); |
| 671 | // Gen test if debug output buffer size will not be exceeded. |
Ben Clayton | 0b54f13 | 2020-01-06 13:38:54 +0000 | [diff] [blame] | 672 | uint32_t val_spec_offset = kInstStageOutCnt; |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 673 | uint32_t obuf_record_sz = val_spec_offset + val_spec_param_cnt; |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 674 | uint32_t buf_id = GetOutputBufferId(); |
Ben Clayton | d0f684e | 2019-08-30 22:36:08 +0100 | [diff] [blame] | 675 | uint32_t buf_uint_ptr_id = GetOutputBufferPtrId(); |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 676 | Instruction* obuf_curr_sz_ac_inst = |
| 677 | builder.AddBinaryOp(buf_uint_ptr_id, SpvOpAccessChain, buf_id, |
| 678 | builder.GetUintConstantId(kDebugOutputSizeOffset)); |
| 679 | // Fetch the current debug buffer written size atomically, adding the |
| 680 | // size of the record to be written. |
| 681 | uint32_t obuf_record_sz_id = builder.GetUintConstantId(obuf_record_sz); |
| 682 | uint32_t mask_none_id = builder.GetUintConstantId(SpvMemoryAccessMaskNone); |
| 683 | uint32_t scope_invok_id = builder.GetUintConstantId(SpvScopeInvocation); |
| 684 | Instruction* obuf_curr_sz_inst = builder.AddQuadOp( |
| 685 | GetUintId(), SpvOpAtomicIAdd, obuf_curr_sz_ac_inst->result_id(), |
| 686 | scope_invok_id, mask_none_id, obuf_record_sz_id); |
| 687 | uint32_t obuf_curr_sz_id = obuf_curr_sz_inst->result_id(); |
| 688 | // Compute new written size |
| 689 | Instruction* obuf_new_sz_inst = |
| 690 | builder.AddBinaryOp(GetUintId(), SpvOpIAdd, obuf_curr_sz_id, |
| 691 | builder.GetUintConstantId(obuf_record_sz)); |
| 692 | // Fetch the data bound |
| 693 | Instruction* obuf_bnd_inst = |
| 694 | builder.AddIdLiteralOp(GetUintId(), SpvOpArrayLength, |
| 695 | GetOutputBufferId(), kDebugOutputDataOffset); |
| 696 | // Test that new written size is less than or equal to debug output |
| 697 | // data bound |
| 698 | Instruction* obuf_safe_inst = builder.AddBinaryOp( |
| 699 | GetBoolId(), SpvOpULessThanEqual, obuf_new_sz_inst->result_id(), |
| 700 | obuf_bnd_inst->result_id()); |
| 701 | uint32_t merge_blk_id = TakeNextId(); |
| 702 | uint32_t write_blk_id = TakeNextId(); |
| 703 | std::unique_ptr<Instruction> merge_label(NewLabel(merge_blk_id)); |
| 704 | std::unique_ptr<Instruction> write_label(NewLabel(write_blk_id)); |
| 705 | (void)builder.AddConditionalBranch(obuf_safe_inst->result_id(), |
| 706 | write_blk_id, merge_blk_id, merge_blk_id, |
| 707 | SpvSelectionControlMaskNone); |
| 708 | // Close safety test block and gen write block |
| 709 | new_blk_ptr->SetParent(&*output_func); |
| 710 | output_func->AddBasicBlock(std::move(new_blk_ptr)); |
| 711 | new_blk_ptr = MakeUnique<BasicBlock>(std::move(write_label)); |
| 712 | builder.SetInsertPoint(&*new_blk_ptr); |
| 713 | // Generate common and stage-specific debug record members |
| 714 | GenCommonStreamWriteCode(obuf_record_sz, param_vec[kInstCommonParamInstIdx], |
| 715 | stage_idx, obuf_curr_sz_id, &builder); |
| 716 | GenStageStreamWriteCode(stage_idx, obuf_curr_sz_id, &builder); |
| 717 | // Gen writes of validation specific data |
| 718 | for (uint32_t i = 0; i < val_spec_param_cnt; ++i) { |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 719 | GenDebugOutputFieldCode(obuf_curr_sz_id, val_spec_offset + i, |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 720 | param_vec[kInstCommonParamCnt + i], &builder); |
| 721 | } |
| 722 | // Close write block and gen merge block |
| 723 | (void)builder.AddBranch(merge_blk_id); |
| 724 | new_blk_ptr->SetParent(&*output_func); |
| 725 | output_func->AddBasicBlock(std::move(new_blk_ptr)); |
| 726 | new_blk_ptr = MakeUnique<BasicBlock>(std::move(merge_label)); |
| 727 | builder.SetInsertPoint(&*new_blk_ptr); |
| 728 | // Close merge block and function and add function to module |
| 729 | (void)builder.AddNullaryOp(0, SpvOpReturn); |
| 730 | new_blk_ptr->SetParent(&*output_func); |
| 731 | output_func->AddBasicBlock(std::move(new_blk_ptr)); |
| 732 | std::unique_ptr<Instruction> func_end_inst( |
| 733 | new Instruction(get_module()->context(), SpvOpFunctionEnd, 0, 0, {})); |
| 734 | get_def_use_mgr()->AnalyzeInstDefUse(&*func_end_inst); |
| 735 | output_func->SetFunctionEnd(std::move(func_end_inst)); |
| 736 | context()->AddFunction(std::move(output_func)); |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 737 | } |
Alexis Hetu | b8a7746 | 2020-03-27 07:59:09 -0400 | [diff] [blame] | 738 | return param2output_func_id_[param_cnt]; |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 739 | } |
| 740 | |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 741 | uint32_t InstrumentPass::GetDirectReadFunctionId(uint32_t param_cnt) { |
| 742 | uint32_t func_id = param2input_func_id_[param_cnt]; |
| 743 | if (func_id != 0) return func_id; |
Ben Clayton | d0f684e | 2019-08-30 22:36:08 +0100 | [diff] [blame] | 744 | // Create input function for param_cnt. |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 745 | func_id = TakeNextId(); |
| 746 | analysis::TypeManager* type_mgr = context()->get_type_mgr(); |
| 747 | std::vector<const analysis::Type*> param_types; |
| 748 | for (uint32_t c = 0; c < param_cnt; ++c) |
| 749 | param_types.push_back(type_mgr->GetType(GetUintId())); |
Ben Clayton | d0f684e | 2019-08-30 22:36:08 +0100 | [diff] [blame] | 750 | uint32_t ibuf_type_id = GetInputBufferTypeId(); |
| 751 | analysis::Function func_ty(type_mgr->GetType(ibuf_type_id), param_types); |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 752 | analysis::Type* reg_func_ty = type_mgr->GetRegisteredType(&func_ty); |
| 753 | std::unique_ptr<Instruction> func_inst(new Instruction( |
Ben Clayton | d0f684e | 2019-08-30 22:36:08 +0100 | [diff] [blame] | 754 | get_module()->context(), SpvOpFunction, ibuf_type_id, func_id, |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 755 | {{spv_operand_type_t::SPV_OPERAND_TYPE_LITERAL_INTEGER, |
| 756 | {SpvFunctionControlMaskNone}}, |
| 757 | {spv_operand_type_t::SPV_OPERAND_TYPE_ID, |
| 758 | {type_mgr->GetTypeInstruction(reg_func_ty)}}})); |
| 759 | get_def_use_mgr()->AnalyzeInstDefUse(&*func_inst); |
| 760 | std::unique_ptr<Function> input_func = |
| 761 | MakeUnique<Function>(std::move(func_inst)); |
| 762 | // Add parameters |
| 763 | std::vector<uint32_t> param_vec; |
| 764 | for (uint32_t c = 0; c < param_cnt; ++c) { |
| 765 | uint32_t pid = TakeNextId(); |
| 766 | param_vec.push_back(pid); |
| 767 | std::unique_ptr<Instruction> param_inst(new Instruction( |
| 768 | get_module()->context(), SpvOpFunctionParameter, GetUintId(), pid, {})); |
| 769 | get_def_use_mgr()->AnalyzeInstDefUse(&*param_inst); |
| 770 | input_func->AddParameter(std::move(param_inst)); |
| 771 | } |
| 772 | // Create block |
| 773 | uint32_t blk_id = TakeNextId(); |
| 774 | std::unique_ptr<Instruction> blk_label(NewLabel(blk_id)); |
| 775 | std::unique_ptr<BasicBlock> new_blk_ptr = |
| 776 | MakeUnique<BasicBlock>(std::move(blk_label)); |
| 777 | InstructionBuilder builder( |
| 778 | context(), &*new_blk_ptr, |
| 779 | IRContext::kAnalysisDefUse | IRContext::kAnalysisInstrToBlockMapping); |
| 780 | // For each offset parameter, generate new offset with parameter, adding last |
| 781 | // loaded value if it exists, and load value from input buffer at new offset. |
| 782 | // Return last loaded value. |
| 783 | uint32_t buf_id = GetInputBufferId(); |
Ben Clayton | d0f684e | 2019-08-30 22:36:08 +0100 | [diff] [blame] | 784 | uint32_t buf_ptr_id = GetInputBufferPtrId(); |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 785 | uint32_t last_value_id = 0; |
| 786 | for (uint32_t p = 0; p < param_cnt; ++p) { |
| 787 | uint32_t offset_id; |
| 788 | if (p == 0) { |
| 789 | offset_id = param_vec[0]; |
| 790 | } else { |
Ben Clayton | d0f684e | 2019-08-30 22:36:08 +0100 | [diff] [blame] | 791 | if (ibuf_type_id != GetUintId()) { |
| 792 | Instruction* ucvt_inst = |
| 793 | builder.AddUnaryOp(GetUintId(), SpvOpUConvert, last_value_id); |
| 794 | last_value_id = ucvt_inst->result_id(); |
| 795 | } |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 796 | Instruction* offset_inst = builder.AddBinaryOp( |
| 797 | GetUintId(), SpvOpIAdd, last_value_id, param_vec[p]); |
| 798 | offset_id = offset_inst->result_id(); |
| 799 | } |
| 800 | Instruction* ac_inst = builder.AddTernaryOp( |
Ben Clayton | d0f684e | 2019-08-30 22:36:08 +0100 | [diff] [blame] | 801 | buf_ptr_id, SpvOpAccessChain, buf_id, |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 802 | builder.GetUintConstantId(kDebugInputDataOffset), offset_id); |
| 803 | Instruction* load_inst = |
Ben Clayton | d0f684e | 2019-08-30 22:36:08 +0100 | [diff] [blame] | 804 | builder.AddUnaryOp(ibuf_type_id, SpvOpLoad, ac_inst->result_id()); |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 805 | last_value_id = load_inst->result_id(); |
| 806 | } |
| 807 | (void)builder.AddInstruction(MakeUnique<Instruction>( |
| 808 | context(), SpvOpReturnValue, 0, 0, |
| 809 | std::initializer_list<Operand>{{SPV_OPERAND_TYPE_ID, {last_value_id}}})); |
| 810 | // Close block and function and add function to module |
| 811 | new_blk_ptr->SetParent(&*input_func); |
| 812 | input_func->AddBasicBlock(std::move(new_blk_ptr)); |
| 813 | std::unique_ptr<Instruction> func_end_inst( |
| 814 | new Instruction(get_module()->context(), SpvOpFunctionEnd, 0, 0, {})); |
| 815 | get_def_use_mgr()->AnalyzeInstDefUse(&*func_end_inst); |
| 816 | input_func->SetFunctionEnd(std::move(func_end_inst)); |
| 817 | context()->AddFunction(std::move(input_func)); |
| 818 | param2input_func_id_[param_cnt] = func_id; |
| 819 | return func_id; |
| 820 | } |
| 821 | |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 822 | bool InstrumentPass::InstrumentFunction(Function* func, uint32_t stage_idx, |
| 823 | InstProcessFunction& pfn) { |
| 824 | bool modified = false; |
| 825 | // Compute function index |
| 826 | uint32_t function_idx = 0; |
| 827 | for (auto fii = get_module()->begin(); fii != get_module()->end(); ++fii) { |
| 828 | if (&*fii == func) break; |
| 829 | ++function_idx; |
| 830 | } |
| 831 | std::vector<std::unique_ptr<BasicBlock>> new_blks; |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 832 | // Using block iterators here because of block erasures and insertions. |
| 833 | for (auto bi = func->begin(); bi != func->end(); ++bi) { |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 834 | for (auto ii = bi->begin(); ii != bi->end();) { |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 835 | // Generate instrumentation if warranted |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 836 | pfn(ii, bi, stage_idx, &new_blks); |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 837 | if (new_blks.size() == 0) { |
| 838 | ++ii; |
| 839 | continue; |
| 840 | } |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 841 | // Add new blocks to label id map |
| 842 | for (auto& blk : new_blks) id2block_[blk->id()] = &*blk; |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 843 | // If there are new blocks we know there will always be two or |
| 844 | // more, so update succeeding phis with label of new last block. |
| 845 | size_t newBlocksSize = new_blks.size(); |
| 846 | assert(newBlocksSize > 1); |
| 847 | UpdateSucceedingPhis(new_blks); |
| 848 | // Replace original block with new block(s) |
| 849 | bi = bi.Erase(); |
| 850 | for (auto& bb : new_blks) { |
| 851 | bb->SetParent(func); |
| 852 | } |
| 853 | bi = bi.InsertBefore(&new_blks); |
| 854 | // Reset block iterator to last new block |
| 855 | for (size_t i = 0; i < newBlocksSize - 1; i++) ++bi; |
| 856 | modified = true; |
| 857 | // Restart instrumenting at beginning of last new block, |
| 858 | // but skip over any new phi or copy instruction. |
| 859 | ii = bi->begin(); |
| 860 | if (ii->opcode() == SpvOpPhi || ii->opcode() == SpvOpCopyObject) ++ii; |
| 861 | new_blks.clear(); |
| 862 | } |
| 863 | } |
| 864 | return modified; |
| 865 | } |
| 866 | |
| 867 | bool InstrumentPass::InstProcessCallTreeFromRoots(InstProcessFunction& pfn, |
| 868 | std::queue<uint32_t>* roots, |
| 869 | uint32_t stage_idx) { |
| 870 | bool modified = false; |
| 871 | std::unordered_set<uint32_t> done; |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 872 | // Don't process input and output functions |
| 873 | for (auto& ifn : param2input_func_id_) done.insert(ifn.second); |
Alexis Hetu | b8a7746 | 2020-03-27 07:59:09 -0400 | [diff] [blame] | 874 | for (auto& ofn : param2output_func_id_) done.insert(ofn.second); |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 875 | // Process all functions from roots |
| 876 | while (!roots->empty()) { |
| 877 | const uint32_t fi = roots->front(); |
| 878 | roots->pop(); |
| 879 | if (done.insert(fi).second) { |
| 880 | Function* fn = id2function_.at(fi); |
| 881 | // Add calls first so we don't add new output function |
| 882 | context()->AddCalls(fn, roots); |
| 883 | modified = InstrumentFunction(fn, stage_idx, pfn) || modified; |
| 884 | } |
| 885 | } |
| 886 | return modified; |
| 887 | } |
| 888 | |
| 889 | bool InstrumentPass::InstProcessEntryPointCallTree(InstProcessFunction& pfn) { |
| 890 | // Make sure all entry points have the same execution model. Do not |
| 891 | // instrument if they do not. |
| 892 | // TODO(greg-lunarg): Handle mixed stages. Technically, a shader module |
| 893 | // can contain entry points with different execution models, although |
| 894 | // such modules will likely be rare as GLSL and HLSL are geared toward |
| 895 | // one model per module. In such cases we will need |
| 896 | // to clone any functions which are in the call trees of entrypoints |
| 897 | // with differing execution models. |
| 898 | uint32_t ecnt = 0; |
| 899 | uint32_t stage = SpvExecutionModelMax; |
| 900 | for (auto& e : get_module()->entry_points()) { |
| 901 | if (ecnt == 0) |
| 902 | stage = e.GetSingleWordInOperand(kEntryPointExecutionModelInIdx); |
Ben Clayton | 0b54f13 | 2020-01-06 13:38:54 +0000 | [diff] [blame] | 903 | else if (e.GetSingleWordInOperand(kEntryPointExecutionModelInIdx) != |
| 904 | stage) { |
| 905 | if (consumer()) { |
| 906 | std::string message = "Mixed stage shader module not supported"; |
| 907 | consumer()(SPV_MSG_ERROR, 0, {0, 0, 0}, message.c_str()); |
| 908 | } |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 909 | return false; |
Ben Clayton | 0b54f13 | 2020-01-06 13:38:54 +0000 | [diff] [blame] | 910 | } |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 911 | ++ecnt; |
| 912 | } |
Ben Clayton | 0b54f13 | 2020-01-06 13:38:54 +0000 | [diff] [blame] | 913 | // Check for supported stages |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 914 | if (stage != SpvExecutionModelVertex && stage != SpvExecutionModelFragment && |
| 915 | stage != SpvExecutionModelGeometry && |
| 916 | stage != SpvExecutionModelGLCompute && |
| 917 | stage != SpvExecutionModelTessellationControl && |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 918 | stage != SpvExecutionModelTessellationEvaluation && |
Alexis Hetu | c00ee6c | 2020-07-27 10:48:25 -0400 | [diff] [blame^] | 919 | stage != SpvExecutionModelTaskNV && stage != SpvExecutionModelMeshNV && |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 920 | stage != SpvExecutionModelRayGenerationNV && |
| 921 | stage != SpvExecutionModelIntersectionNV && |
| 922 | stage != SpvExecutionModelAnyHitNV && |
| 923 | stage != SpvExecutionModelClosestHitNV && |
Ben Clayton | 0b54f13 | 2020-01-06 13:38:54 +0000 | [diff] [blame] | 924 | stage != SpvExecutionModelMissNV && |
| 925 | stage != SpvExecutionModelCallableNV) { |
| 926 | if (consumer()) { |
| 927 | std::string message = "Stage not supported by instrumentation"; |
| 928 | consumer()(SPV_MSG_ERROR, 0, {0, 0, 0}, message.c_str()); |
| 929 | } |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 930 | return false; |
Ben Clayton | 0b54f13 | 2020-01-06 13:38:54 +0000 | [diff] [blame] | 931 | } |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 932 | // Add together the roots of all entry points |
| 933 | std::queue<uint32_t> roots; |
| 934 | for (auto& e : get_module()->entry_points()) { |
| 935 | roots.push(e.GetSingleWordInOperand(kEntryPointFunctionIdInIdx)); |
| 936 | } |
| 937 | bool modified = InstProcessCallTreeFromRoots(pfn, &roots, stage); |
| 938 | return modified; |
| 939 | } |
| 940 | |
| 941 | void InstrumentPass::InitializeInstrument() { |
| 942 | output_buffer_id_ = 0; |
Ben Clayton | d0f684e | 2019-08-30 22:36:08 +0100 | [diff] [blame] | 943 | output_buffer_ptr_id_ = 0; |
| 944 | input_buffer_ptr_id_ = 0; |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 945 | input_buffer_id_ = 0; |
Alexis Hetu | b8a7746 | 2020-03-27 07:59:09 -0400 | [diff] [blame] | 946 | float_id_ = 0; |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 947 | v4float_id_ = 0; |
| 948 | uint_id_ = 0; |
Ben Clayton | d0f684e | 2019-08-30 22:36:08 +0100 | [diff] [blame] | 949 | uint64_id_ = 0; |
Alexis Hetu | b8a7746 | 2020-03-27 07:59:09 -0400 | [diff] [blame] | 950 | uint8_id_ = 0; |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 951 | v4uint_id_ = 0; |
Ben Clayton | d0f684e | 2019-08-30 22:36:08 +0100 | [diff] [blame] | 952 | v3uint_id_ = 0; |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 953 | bool_id_ = 0; |
| 954 | void_id_ = 0; |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 955 | storage_buffer_ext_defined_ = false; |
Ben Clayton | d0f684e | 2019-08-30 22:36:08 +0100 | [diff] [blame] | 956 | uint32_rarr_ty_ = nullptr; |
| 957 | uint64_rarr_ty_ = nullptr; |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 958 | |
| 959 | // clear collections |
| 960 | id2function_.clear(); |
| 961 | id2block_.clear(); |
| 962 | |
Alexis Hetu | b8a7746 | 2020-03-27 07:59:09 -0400 | [diff] [blame] | 963 | // clear maps |
| 964 | param2input_func_id_.clear(); |
| 965 | param2output_func_id_.clear(); |
| 966 | |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 967 | // Initialize function and block maps. |
| 968 | for (auto& fn : *get_module()) { |
| 969 | id2function_[fn.result_id()] = &fn; |
| 970 | for (auto& blk : fn) { |
| 971 | id2block_[blk.id()] = &blk; |
| 972 | } |
| 973 | } |
| 974 | |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 975 | // Remember original instruction offsets |
| 976 | uint32_t module_offset = 0; |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 977 | Module* module = get_module(); |
| 978 | for (auto& i : context()->capabilities()) { |
| 979 | (void)i; |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 980 | ++module_offset; |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 981 | } |
| 982 | for (auto& i : module->extensions()) { |
| 983 | (void)i; |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 984 | ++module_offset; |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 985 | } |
| 986 | for (auto& i : module->ext_inst_imports()) { |
| 987 | (void)i; |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 988 | ++module_offset; |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 989 | } |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 990 | ++module_offset; // memory_model |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 991 | for (auto& i : module->entry_points()) { |
| 992 | (void)i; |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 993 | ++module_offset; |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 994 | } |
| 995 | for (auto& i : module->execution_modes()) { |
| 996 | (void)i; |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 997 | ++module_offset; |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 998 | } |
| 999 | for (auto& i : module->debugs1()) { |
| 1000 | (void)i; |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 1001 | ++module_offset; |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 1002 | } |
| 1003 | for (auto& i : module->debugs2()) { |
| 1004 | (void)i; |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 1005 | ++module_offset; |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 1006 | } |
| 1007 | for (auto& i : module->debugs3()) { |
| 1008 | (void)i; |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 1009 | ++module_offset; |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 1010 | } |
Ben Clayton | dc6b76a | 2020-02-24 14:53:40 +0000 | [diff] [blame] | 1011 | for (auto& i : module->ext_inst_debuginfo()) { |
| 1012 | (void)i; |
| 1013 | ++module_offset; |
| 1014 | } |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 1015 | for (auto& i : module->annotations()) { |
| 1016 | (void)i; |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 1017 | ++module_offset; |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 1018 | } |
| 1019 | for (auto& i : module->types_values()) { |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 1020 | module_offset += 1; |
| 1021 | module_offset += static_cast<uint32_t>(i.dbg_line_insts().size()); |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 1022 | } |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 1023 | |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 1024 | auto curr_fn = get_module()->begin(); |
| 1025 | for (; curr_fn != get_module()->end(); ++curr_fn) { |
| 1026 | // Count function instruction |
| 1027 | module_offset += 1; |
| 1028 | curr_fn->ForEachParam( |
| 1029 | [&module_offset](const Instruction*) { module_offset += 1; }, true); |
| 1030 | for (auto& blk : *curr_fn) { |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 1031 | // Count label |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 1032 | module_offset += 1; |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 1033 | for (auto& inst : blk) { |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 1034 | module_offset += static_cast<uint32_t>(inst.dbg_line_insts().size()); |
| 1035 | uid2offset_[inst.unique_id()] = module_offset; |
| 1036 | module_offset += 1; |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 1037 | } |
| 1038 | } |
Ben Clayton | b73b760 | 2019-07-29 13:56:13 +0100 | [diff] [blame] | 1039 | // Count function end instruction |
| 1040 | module_offset += 1; |
Chris Forbes | cc5697f | 2019-01-30 11:54:08 -0800 | [diff] [blame] | 1041 | } |
| 1042 | } |
| 1043 | |
| 1044 | } // namespace opt |
| 1045 | } // namespace spvtools |