blob: f4c3e6f9a14159f48ff9b0c44f3b4ddf9ee24232 [file] [log] [blame]
David Neto22f144c2017-06-12 14:26:21 -04001// Copyright 2017 The Clspv Authors. All rights reserved.
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7// http://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
15#ifdef _MSC_VER
16#pragma warning(push, 0)
17#endif
18
David Neto156783e2017-07-05 15:39:41 -040019#include <cassert>
David Neto257c3892018-04-11 13:19:45 -040020#include <cstring>
David Neto118188e2018-08-24 11:27:54 -040021#include <iomanip>
22#include <list>
David Neto862b7d82018-06-14 18:48:37 -040023#include <memory>
David Neto118188e2018-08-24 11:27:54 -040024#include <set>
25#include <sstream>
26#include <string>
27#include <tuple>
28#include <unordered_set>
29#include <utility>
David Neto862b7d82018-06-14 18:48:37 -040030
David Neto118188e2018-08-24 11:27:54 -040031#include "llvm/ADT/StringSwitch.h"
32#include "llvm/ADT/UniqueVector.h"
33#include "llvm/Analysis/LoopInfo.h"
34#include "llvm/IR/Constants.h"
35#include "llvm/IR/Dominators.h"
36#include "llvm/IR/Instructions.h"
37#include "llvm/IR/Metadata.h"
38#include "llvm/IR/Module.h"
alan-bakerf67468c2019-11-25 15:51:49 -050039#include "llvm/IR/ValueSymbolTable.h"
David Neto118188e2018-08-24 11:27:54 -040040#include "llvm/Pass.h"
41#include "llvm/Support/CommandLine.h"
42#include "llvm/Support/raw_ostream.h"
43#include "llvm/Transforms/Utils/Cloning.h"
David Neto22f144c2017-06-12 14:26:21 -040044
David Neto85082642018-03-24 06:55:20 -070045#include "spirv/1.0/spirv.hpp"
David Neto118188e2018-08-24 11:27:54 -040046
David Neto85082642018-03-24 06:55:20 -070047#include "clspv/AddressSpace.h"
alan-bakerf5e5f692018-11-27 08:33:24 -050048#include "clspv/DescriptorMap.h"
David Neto118188e2018-08-24 11:27:54 -040049#include "clspv/Option.h"
David Neto85082642018-03-24 06:55:20 -070050#include "clspv/spirv_c_strings.hpp"
51#include "clspv/spirv_glsl.hpp"
David Neto22f144c2017-06-12 14:26:21 -040052
David Neto4feb7a42017-10-06 17:29:42 -040053#include "ArgKind.h"
alan-bakerf67468c2019-11-25 15:51:49 -050054#include "Builtins.h"
alan-baker06cad652019-12-03 17:56:47 -050055#include "ComputeStructuredOrder.h"
David Neto85082642018-03-24 06:55:20 -070056#include "ConstantEmitter.h"
Alan Baker202c8c72018-08-13 13:47:44 -040057#include "Constants.h"
David Neto78383442018-06-15 20:31:56 -040058#include "DescriptorCounter.h"
alan-baker56f7aff2019-05-22 08:06:42 -040059#include "NormalizeGlobalVariable.h"
Diego Novilloa4c44fa2019-04-11 10:56:15 -040060#include "Passes.h"
alan-bakerce179f12019-12-06 19:02:22 -050061#include "Types.h"
David Neto48f56a42017-10-06 16:44:25 -040062
David Neto22f144c2017-06-12 14:26:21 -040063#if defined(_MSC_VER)
64#pragma warning(pop)
65#endif
66
67using namespace llvm;
68using namespace clspv;
David Neto156783e2017-07-05 15:39:41 -040069using namespace mdconst;
David Neto22f144c2017-06-12 14:26:21 -040070
71namespace {
David Netocd8ca5f2017-10-02 23:34:11 -040072
David Neto862b7d82018-06-14 18:48:37 -040073cl::opt<bool> ShowResourceVars("show-rv", cl::init(false), cl::Hidden,
74 cl::desc("Show resource variable creation"));
75
76// These hacks exist to help transition code generation algorithms
77// without making huge noise in detailed test output.
78const bool Hack_generate_runtime_array_stride_early = true;
79
David Neto3fbb4072017-10-16 11:28:14 -040080// The value of 1/pi. This value is from MSDN
81// https://msdn.microsoft.com/en-us/library/4hwaceh6.aspx
82const double kOneOverPi = 0.318309886183790671538;
83const glsl::ExtInst kGlslExtInstBad = static_cast<glsl::ExtInst>(0);
84
alan-bakerb6b09dc2018-11-08 16:59:28 -050085const char *kCompositeConstructFunctionPrefix = "clspv.composite_construct.";
David Netoab03f432017-11-03 17:00:44 -040086
David Neto22f144c2017-06-12 14:26:21 -040087enum SPIRVOperandType {
88 NUMBERID,
89 LITERAL_INTEGER,
90 LITERAL_STRING,
91 LITERAL_FLOAT
92};
93
94struct SPIRVOperand {
95 explicit SPIRVOperand(SPIRVOperandType Ty, uint32_t Num)
96 : Type(Ty), LiteralNum(1, Num) {}
97 explicit SPIRVOperand(SPIRVOperandType Ty, const char *Str)
98 : Type(Ty), LiteralStr(Str) {}
99 explicit SPIRVOperand(SPIRVOperandType Ty, StringRef Str)
100 : Type(Ty), LiteralStr(Str) {}
101 explicit SPIRVOperand(SPIRVOperandType Ty, ArrayRef<uint32_t> NumVec)
102 : Type(Ty), LiteralNum(NumVec.begin(), NumVec.end()) {}
103
104 SPIRVOperandType getType() { return Type; };
105 uint32_t getNumID() { return LiteralNum[0]; };
106 std::string getLiteralStr() { return LiteralStr; };
107 ArrayRef<uint32_t> getLiteralNum() { return LiteralNum; };
108
David Neto87846742018-04-11 17:36:22 -0400109 uint32_t GetNumWords() const {
110 switch (Type) {
111 case NUMBERID:
112 return 1;
113 case LITERAL_INTEGER:
114 case LITERAL_FLOAT:
David Netoee2660d2018-06-28 16:31:29 -0400115 return uint32_t(LiteralNum.size());
David Neto87846742018-04-11 17:36:22 -0400116 case LITERAL_STRING:
117 // Account for the terminating null character.
David Netoee2660d2018-06-28 16:31:29 -0400118 return uint32_t((LiteralStr.size() + 4) / 4);
David Neto87846742018-04-11 17:36:22 -0400119 }
120 llvm_unreachable("Unhandled case in SPIRVOperand::GetNumWords()");
121 }
122
David Neto22f144c2017-06-12 14:26:21 -0400123private:
124 SPIRVOperandType Type;
125 std::string LiteralStr;
126 SmallVector<uint32_t, 4> LiteralNum;
127};
128
David Netoc6f3ab22018-04-06 18:02:31 -0400129class SPIRVOperandList {
130public:
131 SPIRVOperandList() {}
alan-bakerb6b09dc2018-11-08 16:59:28 -0500132 SPIRVOperandList(const SPIRVOperandList &other) = delete;
133 SPIRVOperandList(SPIRVOperandList &&other) {
David Netoc6f3ab22018-04-06 18:02:31 -0400134 contents_ = std::move(other.contents_);
135 other.contents_.clear();
136 }
137 SPIRVOperandList(ArrayRef<SPIRVOperand *> init)
138 : contents_(init.begin(), init.end()) {}
139 operator ArrayRef<SPIRVOperand *>() { return contents_; }
140 void push_back(SPIRVOperand *op) { contents_.push_back(op); }
alan-bakerb6b09dc2018-11-08 16:59:28 -0500141 void clear() { contents_.clear(); }
David Netoc6f3ab22018-04-06 18:02:31 -0400142 size_t size() const { return contents_.size(); }
143 SPIRVOperand *&operator[](size_t i) { return contents_[i]; }
144
David Neto87846742018-04-11 17:36:22 -0400145 const SmallVector<SPIRVOperand *, 8> &getOperands() const {
146 return contents_;
147 }
148
David Netoc6f3ab22018-04-06 18:02:31 -0400149private:
alan-bakerb6b09dc2018-11-08 16:59:28 -0500150 SmallVector<SPIRVOperand *, 8> contents_;
David Netoc6f3ab22018-04-06 18:02:31 -0400151};
152
153SPIRVOperandList &operator<<(SPIRVOperandList &list, SPIRVOperand *elem) {
154 list.push_back(elem);
155 return list;
156}
157
alan-bakerb6b09dc2018-11-08 16:59:28 -0500158SPIRVOperand *MkNum(uint32_t num) {
David Netoc6f3ab22018-04-06 18:02:31 -0400159 return new SPIRVOperand(LITERAL_INTEGER, num);
160}
alan-bakerb6b09dc2018-11-08 16:59:28 -0500161SPIRVOperand *MkInteger(ArrayRef<uint32_t> num_vec) {
David Neto257c3892018-04-11 13:19:45 -0400162 return new SPIRVOperand(LITERAL_INTEGER, num_vec);
163}
alan-bakerb6b09dc2018-11-08 16:59:28 -0500164SPIRVOperand *MkFloat(ArrayRef<uint32_t> num_vec) {
David Neto257c3892018-04-11 13:19:45 -0400165 return new SPIRVOperand(LITERAL_FLOAT, num_vec);
166}
alan-bakerb6b09dc2018-11-08 16:59:28 -0500167SPIRVOperand *MkId(uint32_t id) { return new SPIRVOperand(NUMBERID, id); }
168SPIRVOperand *MkString(StringRef str) {
David Neto257c3892018-04-11 13:19:45 -0400169 return new SPIRVOperand(LITERAL_STRING, str);
170}
David Netoc6f3ab22018-04-06 18:02:31 -0400171
David Neto22f144c2017-06-12 14:26:21 -0400172struct SPIRVInstruction {
David Neto87846742018-04-11 17:36:22 -0400173 // Create an instruction with an opcode and no result ID, and with the given
174 // operands. This computes its own word count.
175 explicit SPIRVInstruction(spv::Op Opc, ArrayRef<SPIRVOperand *> Ops)
176 : WordCount(1), Opcode(static_cast<uint16_t>(Opc)), ResultID(0),
177 Operands(Ops.begin(), Ops.end()) {
178 for (auto *operand : Ops) {
David Netoee2660d2018-06-28 16:31:29 -0400179 WordCount += uint16_t(operand->GetNumWords());
David Neto87846742018-04-11 17:36:22 -0400180 }
181 }
182 // Create an instruction with an opcode and a no-zero result ID, and
183 // with the given operands. This computes its own word count.
184 explicit SPIRVInstruction(spv::Op Opc, uint32_t ResID,
David Neto22f144c2017-06-12 14:26:21 -0400185 ArrayRef<SPIRVOperand *> Ops)
David Neto87846742018-04-11 17:36:22 -0400186 : WordCount(2), Opcode(static_cast<uint16_t>(Opc)), ResultID(ResID),
187 Operands(Ops.begin(), Ops.end()) {
188 if (ResID == 0) {
189 llvm_unreachable("Result ID of 0 was provided");
190 }
191 for (auto *operand : Ops) {
192 WordCount += operand->GetNumWords();
193 }
194 }
David Neto22f144c2017-06-12 14:26:21 -0400195
David Netoee2660d2018-06-28 16:31:29 -0400196 uint32_t getWordCount() const { return WordCount; }
David Neto22f144c2017-06-12 14:26:21 -0400197 uint16_t getOpcode() const { return Opcode; }
198 uint32_t getResultID() const { return ResultID; }
199 ArrayRef<SPIRVOperand *> getOperands() const { return Operands; }
200
201private:
David Netoee2660d2018-06-28 16:31:29 -0400202 uint32_t WordCount; // Check the 16-bit bound at code generation time.
David Neto22f144c2017-06-12 14:26:21 -0400203 uint16_t Opcode;
204 uint32_t ResultID;
205 SmallVector<SPIRVOperand *, 4> Operands;
206};
207
208struct SPIRVProducerPass final : public ModulePass {
David Neto22f144c2017-06-12 14:26:21 -0400209 typedef DenseMap<Type *, uint32_t> TypeMapType;
210 typedef UniqueVector<Type *> TypeList;
211 typedef DenseMap<Value *, uint32_t> ValueMapType;
David Netofb9a7972017-08-25 17:08:24 -0400212 typedef UniqueVector<Value *> ValueList;
David Neto22f144c2017-06-12 14:26:21 -0400213 typedef std::vector<std::pair<Value *, uint32_t>> EntryPointVecType;
214 typedef std::list<SPIRVInstruction *> SPIRVInstructionList;
David Neto87846742018-04-11 17:36:22 -0400215 // A vector of tuples, each of which is:
216 // - the LLVM instruction that we will later generate SPIR-V code for
217 // - where the SPIR-V instruction should be inserted
218 // - the result ID of the SPIR-V instruction
David Neto22f144c2017-06-12 14:26:21 -0400219 typedef std::vector<
220 std::tuple<Value *, SPIRVInstructionList::iterator, uint32_t>>
221 DeferredInstVecType;
222 typedef DenseMap<FunctionType *, std::pair<FunctionType *, uint32_t>>
223 GlobalConstFuncMapType;
224
David Neto44795152017-07-13 15:45:28 -0400225 explicit SPIRVProducerPass(
alan-bakerf5e5f692018-11-27 08:33:24 -0500226 raw_pwrite_stream &out,
227 std::vector<clspv::version0::DescriptorMapEntry> *descriptor_map_entries,
alan-baker00e7a582019-06-07 12:54:21 -0400228 ArrayRef<std::pair<unsigned, std::string>> samplerMap,
David Neto44795152017-07-13 15:45:28 -0400229 bool outputCInitList)
David Netoc2c368d2017-06-30 16:50:17 -0400230 : ModulePass(ID), samplerMap(samplerMap), out(out),
David Neto0676e6f2017-07-11 18:47:44 -0400231 binaryTempOut(binaryTempUnderlyingVector), binaryOut(&out),
alan-baker00e7a582019-06-07 12:54:21 -0400232 descriptorMapEntries(descriptor_map_entries),
David Neto0676e6f2017-07-11 18:47:44 -0400233 outputCInitList(outputCInitList), patchBoundOffset(0), nextID(1),
alan-baker5b86ed72019-02-15 08:26:50 -0500234 OpExtInstImportID(0), HasVariablePointersStorageBuffer(false),
235 HasVariablePointers(false), SamplerTy(nullptr), WorkgroupSizeValueID(0),
Kévin Petit89a525c2019-06-15 08:13:07 +0100236 WorkgroupSizeVarID(0), max_local_spec_id_(0) {}
David Neto22f144c2017-06-12 14:26:21 -0400237
238 void getAnalysisUsage(AnalysisUsage &AU) const override {
239 AU.addRequired<DominatorTreeWrapperPass>();
240 AU.addRequired<LoopInfoWrapperPass>();
241 }
242
243 virtual bool runOnModule(Module &module) override;
244
245 // output the SPIR-V header block
246 void outputHeader();
247
248 // patch the SPIR-V header block
249 void patchHeader();
250
251 uint32_t lookupType(Type *Ty) {
252 if (Ty->isPointerTy() &&
253 (Ty->getPointerAddressSpace() != AddressSpace::UniformConstant)) {
254 auto PointeeTy = Ty->getPointerElementType();
255 if (PointeeTy->isStructTy() &&
256 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
257 Ty = PointeeTy;
258 }
259 }
260
David Neto862b7d82018-06-14 18:48:37 -0400261 auto where = TypeMap.find(Ty);
262 if (where == TypeMap.end()) {
263 if (Ty) {
264 errs() << "Unhandled type " << *Ty << "\n";
265 } else {
266 errs() << "Unhandled type (null)\n";
267 }
David Netoe439d702018-03-23 13:14:08 -0700268 llvm_unreachable("\nUnhandled type!");
David Neto22f144c2017-06-12 14:26:21 -0400269 }
270
David Neto862b7d82018-06-14 18:48:37 -0400271 return where->second;
David Neto22f144c2017-06-12 14:26:21 -0400272 }
273 TypeMapType &getImageTypeMap() { return ImageTypeMap; }
alan-bakerabd82722019-12-03 17:14:51 -0500274 TypeList &getImageTypeList() { return ImageTypeList; }
David Neto22f144c2017-06-12 14:26:21 -0400275 TypeList &getTypeList() { return Types; };
276 ValueList &getConstantList() { return Constants; };
277 ValueMapType &getValueMap() { return ValueMap; }
278 ValueMapType &getAllocatedValueMap() { return AllocatedValueMap; }
279 SPIRVInstructionList &getSPIRVInstList() { return SPIRVInsts; };
David Neto22f144c2017-06-12 14:26:21 -0400280 EntryPointVecType &getEntryPointVec() { return EntryPointVec; };
281 DeferredInstVecType &getDeferredInstVec() { return DeferredInstVec; };
282 ValueList &getEntryPointInterfacesVec() { return EntryPointInterfacesVec; };
283 uint32_t &getOpExtInstImportID() { return OpExtInstImportID; };
284 std::vector<uint32_t> &getBuiltinDimVec() { return BuiltinDimensionVec; };
alan-baker5b86ed72019-02-15 08:26:50 -0500285 bool hasVariablePointersStorageBuffer() {
286 return HasVariablePointersStorageBuffer;
287 }
288 void setVariablePointersStorageBuffer(bool Val) {
289 HasVariablePointersStorageBuffer = Val;
290 }
Diego Novillo3cc8d7a2019-04-10 13:30:34 -0400291 bool hasVariablePointers() { return HasVariablePointers; };
David Neto22f144c2017-06-12 14:26:21 -0400292 void setVariablePointers(bool Val) { HasVariablePointers = Val; };
alan-bakerb6b09dc2018-11-08 16:59:28 -0500293 ArrayRef<std::pair<unsigned, std::string>> &getSamplerMap() {
294 return samplerMap;
295 }
David Neto22f144c2017-06-12 14:26:21 -0400296 GlobalConstFuncMapType &getGlobalConstFuncTypeMap() {
297 return GlobalConstFuncTypeMap;
298 }
299 SmallPtrSet<Value *, 16> &getGlobalConstArgSet() {
300 return GlobalConstArgumentSet;
301 }
alan-bakerb6b09dc2018-11-08 16:59:28 -0500302 TypeList &getTypesNeedingArrayStride() { return TypesNeedingArrayStride; }
David Neto22f144c2017-06-12 14:26:21 -0400303
David Netoc6f3ab22018-04-06 18:02:31 -0400304 void GenerateLLVMIRInfo(Module &M, const DataLayout &DL);
alan-bakerb6b09dc2018-11-08 16:59:28 -0500305 // Populate GlobalConstFuncTypeMap. Also, if module-scope __constant will
306 // *not* be converted to a storage buffer, replace each such global variable
307 // with one in the storage class expecgted by SPIR-V.
David Neto862b7d82018-06-14 18:48:37 -0400308 void FindGlobalConstVars(Module &M, const DataLayout &DL);
309 // Populate ResourceVarInfoList, FunctionToResourceVarsMap, and
310 // ModuleOrderedResourceVars.
311 void FindResourceVars(Module &M, const DataLayout &DL);
Alan Baker202c8c72018-08-13 13:47:44 -0400312 void FindWorkgroupVars(Module &M);
David Neto22f144c2017-06-12 14:26:21 -0400313 bool FindExtInst(Module &M);
314 void FindTypePerGlobalVar(GlobalVariable &GV);
315 void FindTypePerFunc(Function &F);
David Neto862b7d82018-06-14 18:48:37 -0400316 void FindTypesForSamplerMap(Module &M);
317 void FindTypesForResourceVars(Module &M);
alan-bakerb6b09dc2018-11-08 16:59:28 -0500318 // Inserts |Ty| and relevant sub-types into the |Types| member, indicating
319 // that |Ty| and its subtypes will need a corresponding SPIR-V type.
David Neto22f144c2017-06-12 14:26:21 -0400320 void FindType(Type *Ty);
321 void FindConstantPerGlobalVar(GlobalVariable &GV);
322 void FindConstantPerFunc(Function &F);
323 void FindConstant(Value *V);
324 void GenerateExtInstImport();
David Neto19a1bad2017-08-25 15:01:41 -0400325 // Generates instructions for SPIR-V types corresponding to the LLVM types
326 // saved in the |Types| member. A type follows its subtypes. IDs are
327 // allocated sequentially starting with the current value of nextID, and
328 // with a type following its subtypes. Also updates nextID to just beyond
329 // the last generated ID.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500330 void GenerateSPIRVTypes(LLVMContext &context, Module &module);
David Neto22f144c2017-06-12 14:26:21 -0400331 void GenerateSPIRVConstants();
David Neto5c22a252018-03-15 16:07:41 -0400332 void GenerateModuleInfo(Module &M);
David Neto22f144c2017-06-12 14:26:21 -0400333 void GenerateGlobalVar(GlobalVariable &GV);
David Netoc6f3ab22018-04-06 18:02:31 -0400334 void GenerateWorkgroupVars();
David Neto862b7d82018-06-14 18:48:37 -0400335 // Generate descriptor map entries for resource variables associated with
336 // arguments to F.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500337 void GenerateDescriptorMapInfo(const DataLayout &DL, Function &F);
David Neto22f144c2017-06-12 14:26:21 -0400338 void GenerateSamplers(Module &M);
David Neto862b7d82018-06-14 18:48:37 -0400339 // Generate OpVariables for %clspv.resource.var.* calls.
340 void GenerateResourceVars(Module &M);
David Neto22f144c2017-06-12 14:26:21 -0400341 void GenerateFuncPrologue(Function &F);
342 void GenerateFuncBody(Function &F);
David Netob6e2e062018-04-25 10:32:06 -0400343 void GenerateEntryPointInitialStores();
David Neto22f144c2017-06-12 14:26:21 -0400344 spv::Op GetSPIRVCmpOpcode(CmpInst *CmpI);
345 spv::Op GetSPIRVCastOpcode(Instruction &I);
346 spv::Op GetSPIRVBinaryOpcode(Instruction &I);
347 void GenerateInstruction(Instruction &I);
348 void GenerateFuncEpilogue();
349 void HandleDeferredInstruction();
alan-bakerb6b09dc2018-11-08 16:59:28 -0500350 void HandleDeferredDecorations(const DataLayout &DL);
David Neto22f144c2017-06-12 14:26:21 -0400351 bool is4xi8vec(Type *Ty) const;
352 spv::StorageClass GetStorageClass(unsigned AddrSpace) const;
David Neto862b7d82018-06-14 18:48:37 -0400353 spv::StorageClass GetStorageClassForArgKind(clspv::ArgKind arg_kind) const;
David Neto22f144c2017-06-12 14:26:21 -0400354 spv::BuiltIn GetBuiltin(StringRef globalVarName) const;
David Neto3fbb4072017-10-16 11:28:14 -0400355 // Returns the GLSL extended instruction enum that the given function
356 // call maps to. If none, then returns the 0 value, i.e. GLSLstd4580Bad.
David Neto22f144c2017-06-12 14:26:21 -0400357 glsl::ExtInst getExtInstEnum(StringRef Name);
David Neto3fbb4072017-10-16 11:28:14 -0400358 // Returns the GLSL extended instruction enum indirectly used by the given
359 // function. That is, to implement the given function, we use an extended
360 // instruction plus one more instruction. If none, then returns the 0 value,
361 // i.e. GLSLstd4580Bad.
362 glsl::ExtInst getIndirectExtInstEnum(StringRef Name);
363 // Returns the single GLSL extended instruction used directly or
364 // indirectly by the given function call.
365 glsl::ExtInst getDirectOrIndirectExtInstEnum(StringRef Name);
David Neto22f144c2017-06-12 14:26:21 -0400366 void WriteOneWord(uint32_t Word);
367 void WriteResultID(SPIRVInstruction *Inst);
368 void WriteWordCountAndOpcode(SPIRVInstruction *Inst);
369 void WriteOperand(SPIRVOperand *Op);
370 void WriteSPIRVBinary();
371
Alan Baker9bf93fb2018-08-28 16:59:26 -0400372 // Returns true if |type| is compatible with OpConstantNull.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500373 bool IsTypeNullable(const Type *type) const;
Alan Baker9bf93fb2018-08-28 16:59:26 -0400374
Alan Bakerfcda9482018-10-02 17:09:59 -0400375 // Populate UBO remapped type maps.
376 void PopulateUBOTypeMaps(Module &module);
377
alan-baker06cad652019-12-03 17:56:47 -0500378 // Populate the merge and continue block maps.
379 void PopulateStructuredCFGMaps(Module &module);
380
Alan Bakerfcda9482018-10-02 17:09:59 -0400381 // Wrapped methods of DataLayout accessors. If |type| was remapped for UBOs,
382 // uses the internal map, otherwise it falls back on the data layout.
383 uint64_t GetTypeSizeInBits(Type *type, const DataLayout &DL);
384 uint64_t GetTypeStoreSize(Type *type, const DataLayout &DL);
385 uint64_t GetTypeAllocSize(Type *type, const DataLayout &DL);
386
alan-baker5b86ed72019-02-15 08:26:50 -0500387 // Returns the base pointer of |v|.
388 Value *GetBasePointer(Value *v);
389
390 // Sets |HasVariablePointersStorageBuffer| or |HasVariablePointers| base on
391 // |address_space|.
392 void setVariablePointersCapabilities(unsigned address_space);
393
394 // Returns true if |lhs| and |rhs| represent the same resource or workgroup
395 // variable.
396 bool sameResource(Value *lhs, Value *rhs) const;
397
398 // Returns true if |inst| is phi or select that selects from the same
399 // structure (or null).
400 bool selectFromSameObject(Instruction *inst);
401
alan-bakere9308012019-03-15 10:25:13 -0400402 // Returns true if |Arg| is called with a coherent resource.
403 bool CalledWithCoherentResource(Argument &Arg);
404
David Neto22f144c2017-06-12 14:26:21 -0400405private:
406 static char ID;
David Neto44795152017-07-13 15:45:28 -0400407 ArrayRef<std::pair<unsigned, std::string>> samplerMap;
David Neto22f144c2017-06-12 14:26:21 -0400408 raw_pwrite_stream &out;
David Neto0676e6f2017-07-11 18:47:44 -0400409
410 // TODO(dneto): Wouldn't it be better to always just emit a binary, and then
411 // convert to other formats on demand?
412
413 // When emitting a C initialization list, the WriteSPIRVBinary method
414 // will actually write its words to this vector via binaryTempOut.
415 SmallVector<char, 100> binaryTempUnderlyingVector;
416 raw_svector_ostream binaryTempOut;
417
418 // Binary output writes to this stream, which might be |out| or
419 // |binaryTempOut|. It's the latter when we really want to write a C
420 // initializer list.
Diego Novillo3cc8d7a2019-04-10 13:30:34 -0400421 raw_pwrite_stream *binaryOut;
alan-bakerf5e5f692018-11-27 08:33:24 -0500422 std::vector<version0::DescriptorMapEntry> *descriptorMapEntries;
David Neto0676e6f2017-07-11 18:47:44 -0400423 const bool outputCInitList; // If true, output look like {0x7023, ... , 5}
David Neto22f144c2017-06-12 14:26:21 -0400424 uint64_t patchBoundOffset;
425 uint32_t nextID;
426
alan-bakerf67468c2019-11-25 15:51:49 -0500427 // ID for OpTypeInt 32 1.
428 uint32_t int32ID = 0;
429 // ID for OpTypeVector %int 4.
430 uint32_t v4int32ID = 0;
431
David Neto19a1bad2017-08-25 15:01:41 -0400432 // Maps an LLVM Value pointer to the corresponding SPIR-V Id.
David Neto22f144c2017-06-12 14:26:21 -0400433 TypeMapType TypeMap;
David Neto19a1bad2017-08-25 15:01:41 -0400434 // Maps an LLVM image type to its SPIR-V ID.
David Neto22f144c2017-06-12 14:26:21 -0400435 TypeMapType ImageTypeMap;
alan-bakerabd82722019-12-03 17:14:51 -0500436 // A unique-vector of LLVM image types. This list is used to provide
437 // deterministic traversal of image types.
438 TypeList ImageTypeList;
David Neto19a1bad2017-08-25 15:01:41 -0400439 // A unique-vector of LLVM types that map to a SPIR-V type.
David Neto22f144c2017-06-12 14:26:21 -0400440 TypeList Types;
441 ValueList Constants;
David Neto19a1bad2017-08-25 15:01:41 -0400442 // Maps an LLVM Value pointer to the corresponding SPIR-V Id.
David Neto22f144c2017-06-12 14:26:21 -0400443 ValueMapType ValueMap;
444 ValueMapType AllocatedValueMap;
445 SPIRVInstructionList SPIRVInsts;
David Neto862b7d82018-06-14 18:48:37 -0400446
David Neto22f144c2017-06-12 14:26:21 -0400447 EntryPointVecType EntryPointVec;
448 DeferredInstVecType DeferredInstVec;
449 ValueList EntryPointInterfacesVec;
450 uint32_t OpExtInstImportID;
451 std::vector<uint32_t> BuiltinDimensionVec;
alan-baker5b86ed72019-02-15 08:26:50 -0500452 bool HasVariablePointersStorageBuffer;
David Neto22f144c2017-06-12 14:26:21 -0400453 bool HasVariablePointers;
454 Type *SamplerTy;
alan-bakerb6b09dc2018-11-08 16:59:28 -0500455 DenseMap<unsigned, uint32_t> SamplerMapIndexToIDMap;
David Netoc77d9e22018-03-24 06:30:28 -0700456
457 // If a function F has a pointer-to-__constant parameter, then this variable
David Neto9ed8e2f2018-03-24 06:47:24 -0700458 // will map F's type to (G, index of the parameter), where in a first phase
459 // G is F's type. During FindTypePerFunc, G will be changed to F's type
460 // but replacing the pointer-to-constant parameter with
461 // pointer-to-ModuleScopePrivate.
David Netoc77d9e22018-03-24 06:30:28 -0700462 // TODO(dneto): This doesn't seem general enough? A function might have
463 // more than one such parameter.
David Neto22f144c2017-06-12 14:26:21 -0400464 GlobalConstFuncMapType GlobalConstFuncTypeMap;
465 SmallPtrSet<Value *, 16> GlobalConstArgumentSet;
David Neto1a1a0582017-07-07 12:01:44 -0400466 // An ordered set of pointer types of Base arguments to OpPtrAccessChain,
David Neto85082642018-03-24 06:55:20 -0700467 // or array types, and which point into transparent memory (StorageBuffer
468 // storage class). These will require an ArrayStride decoration.
David Neto1a1a0582017-07-07 12:01:44 -0400469 // See SPV_KHR_variable_pointers rev 13.
David Neto85082642018-03-24 06:55:20 -0700470 TypeList TypesNeedingArrayStride;
David Netoa60b00b2017-09-15 16:34:09 -0400471
472 // This is truly ugly, but works around what look like driver bugs.
473 // For get_local_size, an earlier part of the flow has created a module-scope
474 // variable in Private address space to hold the value for the workgroup
475 // size. Its intializer is a uint3 value marked as builtin WorkgroupSize.
476 // When this is present, save the IDs of the initializer value and variable
477 // in these two variables. We only ever do a vector load from it, and
478 // when we see one of those, substitute just the value of the intializer.
479 // This mimics what Glslang does, and that's what drivers are used to.
David Neto66cfe642018-03-24 06:13:56 -0700480 // TODO(dneto): Remove this once drivers are fixed.
David Netoa60b00b2017-09-15 16:34:09 -0400481 uint32_t WorkgroupSizeValueID;
482 uint32_t WorkgroupSizeVarID;
David Neto26aaf622017-10-23 18:11:53 -0400483
David Neto862b7d82018-06-14 18:48:37 -0400484 // Bookkeeping for mapping kernel arguments to resource variables.
485 struct ResourceVarInfo {
486 ResourceVarInfo(int index_arg, unsigned set_arg, unsigned binding_arg,
alan-bakere9308012019-03-15 10:25:13 -0400487 Function *fn, clspv::ArgKind arg_kind_arg, int coherent_arg)
David Neto862b7d82018-06-14 18:48:37 -0400488 : index(index_arg), descriptor_set(set_arg), binding(binding_arg),
alan-bakere9308012019-03-15 10:25:13 -0400489 var_fn(fn), arg_kind(arg_kind_arg), coherent(coherent_arg),
David Neto862b7d82018-06-14 18:48:37 -0400490 addr_space(fn->getReturnType()->getPointerAddressSpace()) {}
491 const int index; // Index into ResourceVarInfoList
492 const unsigned descriptor_set;
493 const unsigned binding;
494 Function *const var_fn; // The @clspv.resource.var.* function.
495 const clspv::ArgKind arg_kind;
alan-bakere9308012019-03-15 10:25:13 -0400496 const int coherent;
David Neto862b7d82018-06-14 18:48:37 -0400497 const unsigned addr_space; // The LLVM address space
498 // The SPIR-V ID of the OpVariable. Not populated at construction time.
499 uint32_t var_id = 0;
500 };
501 // A list of resource var info. Each one correponds to a module-scope
502 // resource variable we will have to create. Resource var indices are
503 // indices into this vector.
504 SmallVector<std::unique_ptr<ResourceVarInfo>, 8> ResourceVarInfoList;
505 // This is a vector of pointers of all the resource vars, but ordered by
506 // kernel function, and then by argument.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500507 UniqueVector<ResourceVarInfo *> ModuleOrderedResourceVars;
David Neto862b7d82018-06-14 18:48:37 -0400508 // Map a function to the ordered list of resource variables it uses, one for
509 // each argument. If an argument does not use a resource variable, it
510 // will have a null pointer entry.
511 using FunctionToResourceVarsMapType =
512 DenseMap<Function *, SmallVector<ResourceVarInfo *, 8>>;
513 FunctionToResourceVarsMapType FunctionToResourceVarsMap;
514
515 // What LLVM types map to SPIR-V types needing layout? These are the
516 // arrays and structures supporting storage buffers and uniform buffers.
517 TypeList TypesNeedingLayout;
518 // What LLVM struct types map to a SPIR-V struct type with Block decoration?
519 UniqueVector<StructType *> StructTypesNeedingBlock;
520 // For a call that represents a load from an opaque type (samplers, images),
521 // map it to the variable id it should load from.
522 DenseMap<CallInst *, uint32_t> ResourceVarDeferredLoadCalls;
David Neto85082642018-03-24 06:55:20 -0700523
Alan Baker202c8c72018-08-13 13:47:44 -0400524 // One larger than the maximum used SpecId for pointer-to-local arguments.
525 int max_local_spec_id_;
David Netoc6f3ab22018-04-06 18:02:31 -0400526 // An ordered list of the kernel arguments of type pointer-to-local.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500527 using LocalArgList = SmallVector<Argument *, 8>;
David Netoc6f3ab22018-04-06 18:02:31 -0400528 LocalArgList LocalArgs;
529 // Information about a pointer-to-local argument.
530 struct LocalArgInfo {
531 // The SPIR-V ID of the array variable.
532 uint32_t variable_id;
533 // The element type of the
alan-bakerb6b09dc2018-11-08 16:59:28 -0500534 Type *elem_type;
David Netoc6f3ab22018-04-06 18:02:31 -0400535 // The ID of the array type.
536 uint32_t array_size_id;
537 // The ID of the array type.
538 uint32_t array_type_id;
539 // The ID of the pointer to the array type.
540 uint32_t ptr_array_type_id;
David Netoc6f3ab22018-04-06 18:02:31 -0400541 // The specialization constant ID of the array size.
542 int spec_id;
543 };
Alan Baker202c8c72018-08-13 13:47:44 -0400544 // A mapping from Argument to its assigned SpecId.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500545 DenseMap<const Argument *, int> LocalArgSpecIds;
Alan Baker202c8c72018-08-13 13:47:44 -0400546 // A mapping from SpecId to its LocalArgInfo.
547 DenseMap<int, LocalArgInfo> LocalSpecIdInfoMap;
Alan Bakerfcda9482018-10-02 17:09:59 -0400548 // A mapping from a remapped type to its real offsets.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500549 DenseMap<Type *, std::vector<uint32_t>> RemappedUBOTypeOffsets;
Alan Bakerfcda9482018-10-02 17:09:59 -0400550 // A mapping from a remapped type to its real sizes.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500551 DenseMap<Type *, std::tuple<uint64_t, uint64_t, uint64_t>>
552 RemappedUBOTypeSizes;
alan-baker06cad652019-12-03 17:56:47 -0500553
554 // Maps basic block to its merge block.
555 DenseMap<BasicBlock *, BasicBlock *> MergeBlocks;
556 // Maps basic block to its continue block.
557 DenseMap<BasicBlock *, BasicBlock *> ContinueBlocks;
David Neto22f144c2017-06-12 14:26:21 -0400558};
559
560char SPIRVProducerPass::ID;
David Netoc6f3ab22018-04-06 18:02:31 -0400561
alan-bakerb6b09dc2018-11-08 16:59:28 -0500562} // namespace
David Neto22f144c2017-06-12 14:26:21 -0400563
564namespace clspv {
alan-bakerf5e5f692018-11-27 08:33:24 -0500565ModulePass *createSPIRVProducerPass(
566 raw_pwrite_stream &out,
567 std::vector<version0::DescriptorMapEntry> *descriptor_map_entries,
alan-baker00e7a582019-06-07 12:54:21 -0400568 ArrayRef<std::pair<unsigned, std::string>> samplerMap,
alan-bakerf5e5f692018-11-27 08:33:24 -0500569 bool outputCInitList) {
570 return new SPIRVProducerPass(out, descriptor_map_entries, samplerMap,
alan-baker00e7a582019-06-07 12:54:21 -0400571 outputCInitList);
David Neto22f144c2017-06-12 14:26:21 -0400572}
David Netoc2c368d2017-06-30 16:50:17 -0400573} // namespace clspv
David Neto22f144c2017-06-12 14:26:21 -0400574
575bool SPIRVProducerPass::runOnModule(Module &module) {
David Neto0676e6f2017-07-11 18:47:44 -0400576 binaryOut = outputCInitList ? &binaryTempOut : &out;
577
Alan Bakerfcda9482018-10-02 17:09:59 -0400578 PopulateUBOTypeMaps(module);
alan-baker06cad652019-12-03 17:56:47 -0500579 PopulateStructuredCFGMaps(module);
Alan Bakerfcda9482018-10-02 17:09:59 -0400580
David Neto22f144c2017-06-12 14:26:21 -0400581 // SPIR-V always begins with its header information
582 outputHeader();
583
David Netoc6f3ab22018-04-06 18:02:31 -0400584 const DataLayout &DL = module.getDataLayout();
585
David Neto22f144c2017-06-12 14:26:21 -0400586 // Gather information from the LLVM IR that we require.
David Netoc6f3ab22018-04-06 18:02:31 -0400587 GenerateLLVMIRInfo(module, DL);
David Neto22f144c2017-06-12 14:26:21 -0400588
David Neto22f144c2017-06-12 14:26:21 -0400589 // Collect information on global variables too.
590 for (GlobalVariable &GV : module.globals()) {
591 // If the GV is one of our special __spirv_* variables, remove the
592 // initializer as it was only placed there to force LLVM to not throw the
593 // value away.
594 if (GV.getName().startswith("__spirv_")) {
595 GV.setInitializer(nullptr);
596 }
597
598 // Collect types' information from global variable.
599 FindTypePerGlobalVar(GV);
600
601 // Collect constant information from global variable.
602 FindConstantPerGlobalVar(GV);
603
604 // If the variable is an input, entry points need to know about it.
605 if (AddressSpace::Input == GV.getType()->getPointerAddressSpace()) {
David Netofb9a7972017-08-25 17:08:24 -0400606 getEntryPointInterfacesVec().insert(&GV);
David Neto22f144c2017-06-12 14:26:21 -0400607 }
608 }
609
610 // If there are extended instructions, generate OpExtInstImport.
611 if (FindExtInst(module)) {
612 GenerateExtInstImport();
613 }
614
615 // Generate SPIRV instructions for types.
Alan Bakerfcda9482018-10-02 17:09:59 -0400616 GenerateSPIRVTypes(module.getContext(), module);
David Neto22f144c2017-06-12 14:26:21 -0400617
618 // Generate SPIRV constants.
619 GenerateSPIRVConstants();
620
621 // If we have a sampler map, we might have literal samplers to generate.
622 if (0 < getSamplerMap().size()) {
623 GenerateSamplers(module);
624 }
625
626 // Generate SPIRV variables.
627 for (GlobalVariable &GV : module.globals()) {
628 GenerateGlobalVar(GV);
629 }
David Neto862b7d82018-06-14 18:48:37 -0400630 GenerateResourceVars(module);
David Netoc6f3ab22018-04-06 18:02:31 -0400631 GenerateWorkgroupVars();
David Neto22f144c2017-06-12 14:26:21 -0400632
633 // Generate SPIRV instructions for each function.
634 for (Function &F : module) {
635 if (F.isDeclaration()) {
636 continue;
637 }
638
David Neto862b7d82018-06-14 18:48:37 -0400639 GenerateDescriptorMapInfo(DL, F);
640
David Neto22f144c2017-06-12 14:26:21 -0400641 // Generate Function Prologue.
642 GenerateFuncPrologue(F);
643
644 // Generate SPIRV instructions for function body.
645 GenerateFuncBody(F);
646
647 // Generate Function Epilogue.
648 GenerateFuncEpilogue();
649 }
650
651 HandleDeferredInstruction();
David Neto1a1a0582017-07-07 12:01:44 -0400652 HandleDeferredDecorations(DL);
David Neto22f144c2017-06-12 14:26:21 -0400653
654 // Generate SPIRV module information.
David Neto5c22a252018-03-15 16:07:41 -0400655 GenerateModuleInfo(module);
David Neto22f144c2017-06-12 14:26:21 -0400656
alan-baker00e7a582019-06-07 12:54:21 -0400657 WriteSPIRVBinary();
David Neto22f144c2017-06-12 14:26:21 -0400658
659 // We need to patch the SPIR-V header to set bound correctly.
660 patchHeader();
David Neto0676e6f2017-07-11 18:47:44 -0400661
662 if (outputCInitList) {
663 bool first = true;
David Neto0676e6f2017-07-11 18:47:44 -0400664 std::ostringstream os;
665
David Neto57fb0b92017-08-04 15:35:09 -0400666 auto emit_word = [&os, &first](uint32_t word) {
David Neto0676e6f2017-07-11 18:47:44 -0400667 if (!first)
David Neto57fb0b92017-08-04 15:35:09 -0400668 os << ",\n";
669 os << word;
David Neto0676e6f2017-07-11 18:47:44 -0400670 first = false;
671 };
672
673 os << "{";
David Neto57fb0b92017-08-04 15:35:09 -0400674 const std::string str(binaryTempOut.str());
675 for (unsigned i = 0; i < str.size(); i += 4) {
676 const uint32_t a = static_cast<unsigned char>(str[i]);
677 const uint32_t b = static_cast<unsigned char>(str[i + 1]);
678 const uint32_t c = static_cast<unsigned char>(str[i + 2]);
679 const uint32_t d = static_cast<unsigned char>(str[i + 3]);
680 emit_word(a | (b << 8) | (c << 16) | (d << 24));
David Neto0676e6f2017-07-11 18:47:44 -0400681 }
682 os << "}\n";
683 out << os.str();
684 }
685
David Neto22f144c2017-06-12 14:26:21 -0400686 return false;
687}
688
689void SPIRVProducerPass::outputHeader() {
alan-baker00e7a582019-06-07 12:54:21 -0400690 binaryOut->write(reinterpret_cast<const char *>(&spv::MagicNumber),
691 sizeof(spv::MagicNumber));
692 binaryOut->write(reinterpret_cast<const char *>(&spv::Version),
693 sizeof(spv::Version));
David Neto22f144c2017-06-12 14:26:21 -0400694
alan-baker0c18ab02019-06-12 10:23:21 -0400695 // use Google's vendor ID
696 const uint32_t vendor = 21 << 16;
alan-baker00e7a582019-06-07 12:54:21 -0400697 binaryOut->write(reinterpret_cast<const char *>(&vendor), sizeof(vendor));
David Neto22f144c2017-06-12 14:26:21 -0400698
alan-baker00e7a582019-06-07 12:54:21 -0400699 // we record where we need to come back to and patch in the bound value
700 patchBoundOffset = binaryOut->tell();
David Neto22f144c2017-06-12 14:26:21 -0400701
alan-baker00e7a582019-06-07 12:54:21 -0400702 // output a bad bound for now
703 binaryOut->write(reinterpret_cast<const char *>(&nextID), sizeof(nextID));
David Neto22f144c2017-06-12 14:26:21 -0400704
alan-baker00e7a582019-06-07 12:54:21 -0400705 // output the schema (reserved for use and must be 0)
706 const uint32_t schema = 0;
707 binaryOut->write(reinterpret_cast<const char *>(&schema), sizeof(schema));
David Neto22f144c2017-06-12 14:26:21 -0400708}
709
710void SPIRVProducerPass::patchHeader() {
alan-baker00e7a582019-06-07 12:54:21 -0400711 // for a binary we just write the value of nextID over bound
712 binaryOut->pwrite(reinterpret_cast<char *>(&nextID), sizeof(nextID),
713 patchBoundOffset);
David Neto22f144c2017-06-12 14:26:21 -0400714}
715
David Netoc6f3ab22018-04-06 18:02:31 -0400716void SPIRVProducerPass::GenerateLLVMIRInfo(Module &M, const DataLayout &DL) {
David Neto22f144c2017-06-12 14:26:21 -0400717 // This function generates LLVM IR for function such as global variable for
718 // argument, constant and pointer type for argument access. These information
719 // is artificial one because we need Vulkan SPIR-V output. This function is
720 // executed ahead of FindType and FindConstant.
David Neto22f144c2017-06-12 14:26:21 -0400721 LLVMContext &Context = M.getContext();
722
David Neto862b7d82018-06-14 18:48:37 -0400723 FindGlobalConstVars(M, DL);
David Neto5c22a252018-03-15 16:07:41 -0400724
David Neto862b7d82018-06-14 18:48:37 -0400725 FindResourceVars(M, DL);
David Neto22f144c2017-06-12 14:26:21 -0400726
727 bool HasWorkGroupBuiltin = false;
728 for (GlobalVariable &GV : M.globals()) {
729 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
730 if (spv::BuiltInWorkgroupSize == BuiltinType) {
731 HasWorkGroupBuiltin = true;
732 }
733 }
734
David Neto862b7d82018-06-14 18:48:37 -0400735 FindTypesForSamplerMap(M);
736 FindTypesForResourceVars(M);
Alan Baker202c8c72018-08-13 13:47:44 -0400737 FindWorkgroupVars(M);
David Neto22f144c2017-06-12 14:26:21 -0400738
739 for (Function &F : M) {
Kévin Petitabef4522019-03-27 13:08:01 +0000740 if (F.isDeclaration()) {
David Neto22f144c2017-06-12 14:26:21 -0400741 continue;
742 }
743
744 for (BasicBlock &BB : F) {
745 for (Instruction &I : BB) {
746 if (I.getOpcode() == Instruction::ZExt ||
747 I.getOpcode() == Instruction::SExt ||
748 I.getOpcode() == Instruction::UIToFP) {
749 // If there is zext with i1 type, it will be changed to OpSelect. The
750 // OpSelect needs constant 0 and 1 so the constants are added here.
751
752 auto OpTy = I.getOperand(0)->getType();
753
Kévin Petit24272b62018-10-18 19:16:12 +0000754 if (OpTy->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -0400755 if (I.getOpcode() == Instruction::ZExt) {
David Neto22f144c2017-06-12 14:26:21 -0400756 FindConstant(Constant::getNullValue(I.getType()));
Kévin Petit7bfb8992019-02-26 13:45:08 +0000757 FindConstant(ConstantInt::get(I.getType(), 1));
David Neto22f144c2017-06-12 14:26:21 -0400758 } else if (I.getOpcode() == Instruction::SExt) {
David Neto22f144c2017-06-12 14:26:21 -0400759 FindConstant(Constant::getNullValue(I.getType()));
Kévin Petit7bfb8992019-02-26 13:45:08 +0000760 FindConstant(ConstantInt::getSigned(I.getType(), -1));
David Neto22f144c2017-06-12 14:26:21 -0400761 } else {
762 FindConstant(ConstantFP::get(Context, APFloat(0.0f)));
763 FindConstant(ConstantFP::get(Context, APFloat(1.0f)));
764 }
765 }
766 } else if (CallInst *Call = dyn_cast<CallInst>(&I)) {
David Neto862b7d82018-06-14 18:48:37 -0400767 StringRef callee_name = Call->getCalledFunction()->getName();
David Neto22f144c2017-06-12 14:26:21 -0400768
769 // Handle image type specially.
alan-bakerf67468c2019-11-25 15:51:49 -0500770 if (clspv::IsSampledImageRead(callee_name)) {
David Neto22f144c2017-06-12 14:26:21 -0400771 TypeMapType &OpImageTypeMap = getImageTypeMap();
772 Type *ImageTy =
773 Call->getArgOperand(0)->getType()->getPointerElementType();
774 OpImageTypeMap[ImageTy] = 0;
alan-bakerabd82722019-12-03 17:14:51 -0500775 getImageTypeList().insert(ImageTy);
David Neto22f144c2017-06-12 14:26:21 -0400776
alan-bakerf67468c2019-11-25 15:51:49 -0500777 // All sampled reads need a floating point 0 for the Lod operand.
David Neto22f144c2017-06-12 14:26:21 -0400778 FindConstant(ConstantFP::get(Context, APFloat(0.0f)));
779 }
David Neto5c22a252018-03-15 16:07:41 -0400780
alan-bakerce179f12019-12-06 19:02:22 -0500781 if (clspv::IsImageQuery(callee_name)) {
782 Type *ImageTy = Call->getOperand(0)->getType();
783 const uint32_t dim = ImageDimensionality(ImageTy);
784 uint32_t components = dim;
785 if (components > 1) {
786 // OpImageQuerySize* return |components| components.
787 FindType(VectorType::get(Type::getInt32Ty(Context), components));
788 if (dim == 3 && IsGetImageDim(callee_name)) {
789 // get_image_dim for 3D images returns an int4.
790 FindType(
791 VectorType::get(Type::getInt32Ty(Context), components + 1));
792 }
793 }
794
795 if (clspv::IsSampledImageType(ImageTy)) {
796 // All sampled image queries need a integer 0 for the Lod
797 // operand.
798 FindConstant(ConstantInt::get(Context, APInt(32, 0)));
799 }
David Neto5c22a252018-03-15 16:07:41 -0400800 }
David Neto22f144c2017-06-12 14:26:21 -0400801 }
802 }
803 }
804
Kévin Petitabef4522019-03-27 13:08:01 +0000805 // More things to do on kernel functions
806 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
807 if (const MDNode *MD =
808 dyn_cast<Function>(&F)->getMetadata("reqd_work_group_size")) {
809 // We generate constants if the WorkgroupSize builtin is being used.
810 if (HasWorkGroupBuiltin) {
811 // Collect constant information for work group size.
812 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(0)));
813 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(1)));
814 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(2)));
David Neto22f144c2017-06-12 14:26:21 -0400815 }
816 }
817 }
818
alan-bakerf67468c2019-11-25 15:51:49 -0500819 // TODO(alan-baker): make this better.
820 if (M.getTypeByName("opencl.image2d_ro_t.float") ||
821 M.getTypeByName("opencl.image2d_ro_t.float.sampled") ||
822 M.getTypeByName("opencl.image2d_wo_t.float") ||
823 M.getTypeByName("opencl.image3d_ro_t.float") ||
824 M.getTypeByName("opencl.image3d_ro_t.float.sampled") ||
825 M.getTypeByName("opencl.image3d_wo_t.float")) {
826 FindType(Type::getFloatTy(Context));
827 } else if (M.getTypeByName("opencl.image2d_ro_t.uint") ||
828 M.getTypeByName("opencl.image2d_ro_t.uint.sampled") ||
829 M.getTypeByName("opencl.image2d_wo_t.uint") ||
830 M.getTypeByName("opencl.image3d_ro_t.uint") ||
831 M.getTypeByName("opencl.image3d_ro_t.uint.sampled") ||
832 M.getTypeByName("opencl.image3d_wo_t.uint")) {
833 FindType(Type::getInt32Ty(Context));
834 } else if (M.getTypeByName("opencl.image2d_ro_t.int") ||
835 M.getTypeByName("opencl.image2d_ro_t.int.sampled") ||
836 M.getTypeByName("opencl.image2d_wo_t.int") ||
837 M.getTypeByName("opencl.image3d_ro_t.int") ||
838 M.getTypeByName("opencl.image3d_ro_t.int.sampled") ||
839 M.getTypeByName("opencl.image3d_wo_t.int")) {
840 // Nothing for now...
841 } else {
842 // This was likely an UndefValue.
David Neto22f144c2017-06-12 14:26:21 -0400843 FindType(Type::getFloatTy(Context));
844 }
845
846 // Collect types' information from function.
847 FindTypePerFunc(F);
848
849 // Collect constant information from function.
850 FindConstantPerFunc(F);
851 }
852}
853
David Neto862b7d82018-06-14 18:48:37 -0400854void SPIRVProducerPass::FindGlobalConstVars(Module &M, const DataLayout &DL) {
alan-baker56f7aff2019-05-22 08:06:42 -0400855 clspv::NormalizeGlobalVariables(M);
856
David Neto862b7d82018-06-14 18:48:37 -0400857 SmallVector<GlobalVariable *, 8> GVList;
858 SmallVector<GlobalVariable *, 8> DeadGVList;
859 for (GlobalVariable &GV : M.globals()) {
860 if (GV.getType()->getAddressSpace() == AddressSpace::Constant) {
861 if (GV.use_empty()) {
862 DeadGVList.push_back(&GV);
863 } else {
864 GVList.push_back(&GV);
865 }
866 }
867 }
868
869 // Remove dead global __constant variables.
870 for (auto GV : DeadGVList) {
871 GV->eraseFromParent();
872 }
873 DeadGVList.clear();
874
875 if (clspv::Option::ModuleConstantsInStorageBuffer()) {
876 // For now, we only support a single storage buffer.
877 if (GVList.size() > 0) {
878 assert(GVList.size() == 1);
879 const auto *GV = GVList[0];
880 const auto constants_byte_size =
Alan Bakerfcda9482018-10-02 17:09:59 -0400881 (GetTypeSizeInBits(GV->getInitializer()->getType(), DL)) / 8;
David Neto862b7d82018-06-14 18:48:37 -0400882 const size_t kConstantMaxSize = 65536;
883 if (constants_byte_size > kConstantMaxSize) {
884 outs() << "Max __constant capacity of " << kConstantMaxSize
885 << " bytes exceeded: " << constants_byte_size << " bytes used\n";
886 llvm_unreachable("Max __constant capacity exceeded");
887 }
888 }
889 } else {
890 // Change global constant variable's address space to ModuleScopePrivate.
891 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
892 for (auto GV : GVList) {
893 // Create new gv with ModuleScopePrivate address space.
894 Type *NewGVTy = GV->getType()->getPointerElementType();
895 GlobalVariable *NewGV = new GlobalVariable(
896 M, NewGVTy, false, GV->getLinkage(), GV->getInitializer(), "",
897 nullptr, GV->getThreadLocalMode(), AddressSpace::ModuleScopePrivate);
898 NewGV->takeName(GV);
899
900 const SmallVector<User *, 8> GVUsers(GV->user_begin(), GV->user_end());
901 SmallVector<User *, 8> CandidateUsers;
902
903 auto record_called_function_type_as_user =
904 [&GlobalConstFuncTyMap](Value *gv, CallInst *call) {
905 // Find argument index.
906 unsigned index = 0;
907 for (unsigned i = 0; i < call->getNumArgOperands(); i++) {
908 if (gv == call->getOperand(i)) {
909 // TODO(dneto): Should we break here?
910 index = i;
911 }
912 }
913
914 // Record function type with global constant.
915 GlobalConstFuncTyMap[call->getFunctionType()] =
916 std::make_pair(call->getFunctionType(), index);
917 };
918
919 for (User *GVU : GVUsers) {
920 if (CallInst *Call = dyn_cast<CallInst>(GVU)) {
921 record_called_function_type_as_user(GV, Call);
922 } else if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(GVU)) {
923 // Check GEP users.
924 for (User *GEPU : GEP->users()) {
925 if (CallInst *GEPCall = dyn_cast<CallInst>(GEPU)) {
926 record_called_function_type_as_user(GEP, GEPCall);
927 }
928 }
929 }
930
931 CandidateUsers.push_back(GVU);
932 }
933
934 for (User *U : CandidateUsers) {
935 // Update users of gv with new gv.
alan-bakered80f572019-02-11 17:28:26 -0500936 if (!isa<Constant>(U)) {
937 // #254: Can't change operands of a constant, but this shouldn't be
938 // something that sticks around in the module.
939 U->replaceUsesOfWith(GV, NewGV);
940 }
David Neto862b7d82018-06-14 18:48:37 -0400941 }
942
943 // Delete original gv.
944 GV->eraseFromParent();
945 }
946 }
947}
948
Radek Szymanskibe4b0c42018-10-04 22:20:53 +0100949void SPIRVProducerPass::FindResourceVars(Module &M, const DataLayout &) {
David Neto862b7d82018-06-14 18:48:37 -0400950 ResourceVarInfoList.clear();
951 FunctionToResourceVarsMap.clear();
952 ModuleOrderedResourceVars.reset();
953 // Normally, there is one resource variable per clspv.resource.var.*
954 // function, since that is unique'd by arg type and index. By design,
955 // we can share these resource variables across kernels because all
956 // kernels use the same descriptor set.
957 //
958 // But if the user requested distinct descriptor sets per kernel, then
959 // the descriptor allocator has made different (set,binding) pairs for
960 // the same (type,arg_index) pair. Since we can decorate a resource
961 // variable with only exactly one DescriptorSet and Binding, we are
962 // forced in this case to make distinct resource variables whenever
963 // the same clspv.reource.var.X function is seen with disintct
964 // (set,binding) values.
965 const bool always_distinct_sets =
966 clspv::Option::DistinctKernelDescriptorSets();
967 for (Function &F : M) {
968 // Rely on the fact the resource var functions have a stable ordering
969 // in the module.
Alan Baker202c8c72018-08-13 13:47:44 -0400970 if (F.getName().startswith(clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -0400971 // Find all calls to this function with distinct set and binding pairs.
972 // Save them in ResourceVarInfoList.
973
974 // Determine uniqueness of the (set,binding) pairs only withing this
975 // one resource-var builtin function.
976 using SetAndBinding = std::pair<unsigned, unsigned>;
977 // Maps set and binding to the resource var info.
978 DenseMap<SetAndBinding, ResourceVarInfo *> set_and_binding_map;
979 bool first_use = true;
980 for (auto &U : F.uses()) {
981 if (auto *call = dyn_cast<CallInst>(U.getUser())) {
982 const auto set = unsigned(
983 dyn_cast<ConstantInt>(call->getArgOperand(0))->getZExtValue());
984 const auto binding = unsigned(
985 dyn_cast<ConstantInt>(call->getArgOperand(1))->getZExtValue());
986 const auto arg_kind = clspv::ArgKind(
987 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
988 const auto arg_index = unsigned(
989 dyn_cast<ConstantInt>(call->getArgOperand(3))->getZExtValue());
alan-bakere9308012019-03-15 10:25:13 -0400990 const auto coherent = unsigned(
991 dyn_cast<ConstantInt>(call->getArgOperand(5))->getZExtValue());
David Neto862b7d82018-06-14 18:48:37 -0400992
993 // Find or make the resource var info for this combination.
994 ResourceVarInfo *rv = nullptr;
995 if (always_distinct_sets) {
996 // Make a new resource var any time we see a different
997 // (set,binding) pair.
998 SetAndBinding key{set, binding};
999 auto where = set_and_binding_map.find(key);
1000 if (where == set_and_binding_map.end()) {
1001 rv = new ResourceVarInfo(int(ResourceVarInfoList.size()), set,
alan-bakere9308012019-03-15 10:25:13 -04001002 binding, &F, arg_kind, coherent);
David Neto862b7d82018-06-14 18:48:37 -04001003 ResourceVarInfoList.emplace_back(rv);
1004 set_and_binding_map[key] = rv;
1005 } else {
1006 rv = where->second;
1007 }
1008 } else {
1009 // The default is to make exactly one resource for each
1010 // clspv.resource.var.* function.
1011 if (first_use) {
1012 first_use = false;
1013 rv = new ResourceVarInfo(int(ResourceVarInfoList.size()), set,
alan-bakere9308012019-03-15 10:25:13 -04001014 binding, &F, arg_kind, coherent);
David Neto862b7d82018-06-14 18:48:37 -04001015 ResourceVarInfoList.emplace_back(rv);
1016 } else {
1017 rv = ResourceVarInfoList.back().get();
1018 }
1019 }
1020
1021 // Now populate FunctionToResourceVarsMap.
1022 auto &mapping =
1023 FunctionToResourceVarsMap[call->getParent()->getParent()];
1024 while (mapping.size() <= arg_index) {
1025 mapping.push_back(nullptr);
1026 }
1027 mapping[arg_index] = rv;
1028 }
1029 }
1030 }
1031 }
1032
1033 // Populate ModuleOrderedResourceVars.
1034 for (Function &F : M) {
1035 auto where = FunctionToResourceVarsMap.find(&F);
1036 if (where != FunctionToResourceVarsMap.end()) {
1037 for (auto &rv : where->second) {
1038 if (rv != nullptr) {
1039 ModuleOrderedResourceVars.insert(rv);
1040 }
1041 }
1042 }
1043 }
1044 if (ShowResourceVars) {
1045 for (auto *info : ModuleOrderedResourceVars) {
1046 outs() << "MORV index " << info->index << " (" << info->descriptor_set
1047 << "," << info->binding << ") " << *(info->var_fn->getReturnType())
1048 << "\n";
1049 }
1050 }
1051}
1052
David Neto22f144c2017-06-12 14:26:21 -04001053bool SPIRVProducerPass::FindExtInst(Module &M) {
1054 LLVMContext &Context = M.getContext();
1055 bool HasExtInst = false;
1056
1057 for (Function &F : M) {
1058 for (BasicBlock &BB : F) {
1059 for (Instruction &I : BB) {
1060 if (CallInst *Call = dyn_cast<CallInst>(&I)) {
1061 Function *Callee = Call->getCalledFunction();
1062 // Check whether this call is for extend instructions.
David Neto3fbb4072017-10-16 11:28:14 -04001063 auto callee_name = Callee->getName();
1064 const glsl::ExtInst EInst = getExtInstEnum(callee_name);
1065 const glsl::ExtInst IndirectEInst =
1066 getIndirectExtInstEnum(callee_name);
David Neto22f144c2017-06-12 14:26:21 -04001067
David Neto3fbb4072017-10-16 11:28:14 -04001068 HasExtInst |=
1069 (EInst != kGlslExtInstBad) || (IndirectEInst != kGlslExtInstBad);
1070
1071 if (IndirectEInst) {
1072 // Register extra constants if needed.
1073
1074 // Registers a type and constant for computing the result of the
1075 // given instruction. If the result of the instruction is a vector,
1076 // then make a splat vector constant with the same number of
1077 // elements.
1078 auto register_constant = [this, &I](Constant *constant) {
1079 FindType(constant->getType());
1080 FindConstant(constant);
1081 if (auto *vectorTy = dyn_cast<VectorType>(I.getType())) {
1082 // Register the splat vector of the value with the same
1083 // width as the result of the instruction.
1084 auto *vec_constant = ConstantVector::getSplat(
1085 static_cast<unsigned>(vectorTy->getNumElements()),
1086 constant);
1087 FindConstant(vec_constant);
1088 FindType(vec_constant->getType());
1089 }
1090 };
1091 switch (IndirectEInst) {
1092 case glsl::ExtInstFindUMsb:
1093 // clz needs OpExtInst and OpISub with constant 31, or splat
1094 // vector of 31. Add it to the constant list here.
1095 register_constant(
1096 ConstantInt::get(Type::getInt32Ty(Context), 31));
1097 break;
1098 case glsl::ExtInstAcos:
1099 case glsl::ExtInstAsin:
Kévin Petiteb9f90a2018-09-29 12:29:34 +01001100 case glsl::ExtInstAtan:
David Neto3fbb4072017-10-16 11:28:14 -04001101 case glsl::ExtInstAtan2:
1102 // We need 1/pi for acospi, asinpi, atan2pi.
1103 register_constant(
1104 ConstantFP::get(Type::getFloatTy(Context), kOneOverPi));
1105 break;
1106 default:
1107 assert(false && "internally inconsistent");
1108 }
David Neto22f144c2017-06-12 14:26:21 -04001109 }
1110 }
1111 }
1112 }
1113 }
1114
1115 return HasExtInst;
1116}
1117
1118void SPIRVProducerPass::FindTypePerGlobalVar(GlobalVariable &GV) {
1119 // Investigate global variable's type.
1120 FindType(GV.getType());
1121}
1122
1123void SPIRVProducerPass::FindTypePerFunc(Function &F) {
1124 // Investigate function's type.
1125 FunctionType *FTy = F.getFunctionType();
1126
1127 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
1128 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
David Neto9ed8e2f2018-03-24 06:47:24 -07001129 // Handle a regular function with global constant parameters.
David Neto22f144c2017-06-12 14:26:21 -04001130 if (GlobalConstFuncTyMap.count(FTy)) {
1131 uint32_t GVCstArgIdx = GlobalConstFuncTypeMap[FTy].second;
1132 SmallVector<Type *, 4> NewFuncParamTys;
1133 for (unsigned i = 0; i < FTy->getNumParams(); i++) {
1134 Type *ParamTy = FTy->getParamType(i);
1135 if (i == GVCstArgIdx) {
1136 Type *EleTy = ParamTy->getPointerElementType();
1137 ParamTy = PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
1138 }
1139
1140 NewFuncParamTys.push_back(ParamTy);
1141 }
1142
1143 FunctionType *NewFTy =
1144 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
1145 GlobalConstFuncTyMap[FTy] = std::make_pair(NewFTy, GVCstArgIdx);
1146 FTy = NewFTy;
1147 }
1148
1149 FindType(FTy);
1150 } else {
1151 // As kernel functions do not have parameters, create new function type and
1152 // add it to type map.
1153 SmallVector<Type *, 4> NewFuncParamTys;
1154 FunctionType *NewFTy =
1155 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
1156 FindType(NewFTy);
1157 }
1158
1159 // Investigate instructions' type in function body.
1160 for (BasicBlock &BB : F) {
1161 for (Instruction &I : BB) {
1162 if (isa<ShuffleVectorInst>(I)) {
1163 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1164 // Ignore type for mask of shuffle vector instruction.
1165 if (i == 2) {
1166 continue;
1167 }
1168
1169 Value *Op = I.getOperand(i);
1170 if (!isa<MetadataAsValue>(Op)) {
1171 FindType(Op->getType());
1172 }
1173 }
1174
1175 FindType(I.getType());
1176 continue;
1177 }
1178
David Neto862b7d82018-06-14 18:48:37 -04001179 CallInst *Call = dyn_cast<CallInst>(&I);
1180
1181 if (Call && Call->getCalledFunction()->getName().startswith(
Alan Baker202c8c72018-08-13 13:47:44 -04001182 clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04001183 // This is a fake call representing access to a resource variable.
1184 // We handle that elsewhere.
1185 continue;
1186 }
1187
Alan Baker202c8c72018-08-13 13:47:44 -04001188 if (Call && Call->getCalledFunction()->getName().startswith(
1189 clspv::WorkgroupAccessorFunction())) {
1190 // This is a fake call representing access to a workgroup variable.
1191 // We handle that elsewhere.
1192 continue;
1193 }
1194
David Neto22f144c2017-06-12 14:26:21 -04001195 // Work through the operands of the instruction.
1196 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1197 Value *const Op = I.getOperand(i);
1198 // If any of the operands is a constant, find the type!
1199 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1200 FindType(Op->getType());
1201 }
1202 }
1203
1204 for (Use &Op : I.operands()) {
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001205 if (isa<CallInst>(&I)) {
David Neto22f144c2017-06-12 14:26:21 -04001206 // Avoid to check call instruction's type.
1207 break;
1208 }
Alan Baker202c8c72018-08-13 13:47:44 -04001209 if (CallInst *OpCall = dyn_cast<CallInst>(Op)) {
1210 if (OpCall && OpCall->getCalledFunction()->getName().startswith(
1211 clspv::WorkgroupAccessorFunction())) {
1212 // This is a fake call representing access to a workgroup variable.
1213 // We handle that elsewhere.
1214 continue;
1215 }
1216 }
David Neto22f144c2017-06-12 14:26:21 -04001217 if (!isa<MetadataAsValue>(&Op)) {
1218 FindType(Op->getType());
1219 continue;
1220 }
1221 }
1222
David Neto22f144c2017-06-12 14:26:21 -04001223 // We don't want to track the type of this call as we are going to replace
1224 // it.
Kévin Petitdf71de32019-04-09 14:09:50 +01001225 if (Call && (clspv::LiteralSamplerFunction() ==
David Neto22f144c2017-06-12 14:26:21 -04001226 Call->getCalledFunction()->getName())) {
1227 continue;
1228 }
1229
1230 if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(&I)) {
1231 // If gep's base operand has ModuleScopePrivate address space, make gep
1232 // return ModuleScopePrivate address space.
1233 if (GEP->getPointerAddressSpace() == AddressSpace::ModuleScopePrivate) {
1234 // Add pointer type with private address space for global constant to
1235 // type list.
1236 Type *EleTy = I.getType()->getPointerElementType();
1237 Type *NewPTy =
1238 PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
1239
1240 FindType(NewPTy);
1241 continue;
1242 }
1243 }
1244
1245 FindType(I.getType());
1246 }
1247 }
1248}
1249
David Neto862b7d82018-06-14 18:48:37 -04001250void SPIRVProducerPass::FindTypesForSamplerMap(Module &M) {
1251 // If we are using a sampler map, find the type of the sampler.
Kévin Petitdf71de32019-04-09 14:09:50 +01001252 if (M.getFunction(clspv::LiteralSamplerFunction()) ||
David Neto862b7d82018-06-14 18:48:37 -04001253 0 < getSamplerMap().size()) {
1254 auto SamplerStructTy = M.getTypeByName("opencl.sampler_t");
1255 if (!SamplerStructTy) {
1256 SamplerStructTy = StructType::create(M.getContext(), "opencl.sampler_t");
1257 }
1258
1259 SamplerTy = SamplerStructTy->getPointerTo(AddressSpace::UniformConstant);
1260
1261 FindType(SamplerTy);
1262 }
1263}
1264
1265void SPIRVProducerPass::FindTypesForResourceVars(Module &M) {
1266 // Record types so they are generated.
1267 TypesNeedingLayout.reset();
1268 StructTypesNeedingBlock.reset();
1269
1270 // To match older clspv codegen, generate the float type first if required
1271 // for images.
1272 for (const auto *info : ModuleOrderedResourceVars) {
1273 if (info->arg_kind == clspv::ArgKind::ReadOnlyImage ||
1274 info->arg_kind == clspv::ArgKind::WriteOnlyImage) {
alan-bakerf67468c2019-11-25 15:51:49 -05001275 if (IsIntImageType(info->var_fn->getReturnType())) {
1276 // Nothing for now...
1277 } else if (IsUintImageType(info->var_fn->getReturnType())) {
1278 FindType(Type::getInt32Ty(M.getContext()));
1279 }
1280
1281 // We need "float" either for the sampled type or for the Lod operand.
David Neto862b7d82018-06-14 18:48:37 -04001282 FindType(Type::getFloatTy(M.getContext()));
David Neto862b7d82018-06-14 18:48:37 -04001283 }
1284 }
1285
1286 for (const auto *info : ModuleOrderedResourceVars) {
1287 Type *type = info->var_fn->getReturnType();
1288
1289 switch (info->arg_kind) {
1290 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04001291 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04001292 if (auto *sty = dyn_cast<StructType>(type->getPointerElementType())) {
1293 StructTypesNeedingBlock.insert(sty);
1294 } else {
1295 errs() << *type << "\n";
1296 llvm_unreachable("Buffer arguments must map to structures!");
1297 }
1298 break;
1299 case clspv::ArgKind::Pod:
1300 if (auto *sty = dyn_cast<StructType>(type->getPointerElementType())) {
1301 StructTypesNeedingBlock.insert(sty);
1302 } else {
1303 errs() << *type << "\n";
1304 llvm_unreachable("POD arguments must map to structures!");
1305 }
1306 break;
1307 case clspv::ArgKind::ReadOnlyImage:
1308 case clspv::ArgKind::WriteOnlyImage:
1309 case clspv::ArgKind::Sampler:
1310 // Sampler and image types map to the pointee type but
1311 // in the uniform constant address space.
1312 type = PointerType::get(type->getPointerElementType(),
1313 clspv::AddressSpace::UniformConstant);
1314 break;
1315 default:
1316 break;
1317 }
1318
1319 // The converted type is the type of the OpVariable we will generate.
1320 // If the pointee type is an array of size zero, FindType will convert it
1321 // to a runtime array.
1322 FindType(type);
1323 }
1324
alan-bakerdcd97412019-09-16 15:32:30 -04001325 // If module constants are clustered in a storage buffer then that struct
1326 // needs layout decorations.
1327 if (clspv::Option::ModuleConstantsInStorageBuffer()) {
1328 for (GlobalVariable &GV : M.globals()) {
1329 PointerType *PTy = cast<PointerType>(GV.getType());
1330 const auto AS = PTy->getAddressSpace();
1331 const bool module_scope_constant_external_init =
1332 (AS == AddressSpace::Constant) && GV.hasInitializer();
1333 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
1334 if (module_scope_constant_external_init &&
1335 spv::BuiltInMax == BuiltinType) {
1336 StructTypesNeedingBlock.insert(
1337 cast<StructType>(PTy->getPointerElementType()));
1338 }
1339 }
1340 }
1341
David Neto862b7d82018-06-14 18:48:37 -04001342 // Traverse the arrays and structures underneath each Block, and
1343 // mark them as needing layout.
1344 std::vector<Type *> work_list(StructTypesNeedingBlock.begin(),
1345 StructTypesNeedingBlock.end());
1346 while (!work_list.empty()) {
1347 Type *type = work_list.back();
1348 work_list.pop_back();
1349 TypesNeedingLayout.insert(type);
1350 switch (type->getTypeID()) {
1351 case Type::ArrayTyID:
1352 work_list.push_back(type->getArrayElementType());
1353 if (!Hack_generate_runtime_array_stride_early) {
1354 // Remember this array type for deferred decoration.
1355 TypesNeedingArrayStride.insert(type);
1356 }
1357 break;
1358 case Type::StructTyID:
1359 for (auto *elem_ty : cast<StructType>(type)->elements()) {
1360 work_list.push_back(elem_ty);
1361 }
1362 default:
1363 // This type and its contained types don't get layout.
1364 break;
1365 }
1366 }
1367}
1368
Alan Baker202c8c72018-08-13 13:47:44 -04001369void SPIRVProducerPass::FindWorkgroupVars(Module &M) {
1370 // The SpecId assignment for pointer-to-local arguments is recorded in
1371 // module-level metadata. Translate that information into local argument
1372 // information.
1373 NamedMDNode *nmd = M.getNamedMetadata(clspv::LocalSpecIdMetadataName());
alan-bakerb6b09dc2018-11-08 16:59:28 -05001374 if (!nmd)
1375 return;
Alan Baker202c8c72018-08-13 13:47:44 -04001376 for (auto operand : nmd->operands()) {
1377 MDTuple *tuple = cast<MDTuple>(operand);
1378 ValueAsMetadata *fn_md = cast<ValueAsMetadata>(tuple->getOperand(0));
1379 Function *func = cast<Function>(fn_md->getValue());
alan-bakerb6b09dc2018-11-08 16:59:28 -05001380 ConstantAsMetadata *arg_index_md =
1381 cast<ConstantAsMetadata>(tuple->getOperand(1));
1382 int arg_index = static_cast<int>(
1383 cast<ConstantInt>(arg_index_md->getValue())->getSExtValue());
1384 Argument *arg = &*(func->arg_begin() + arg_index);
Alan Baker202c8c72018-08-13 13:47:44 -04001385
1386 ConstantAsMetadata *spec_id_md =
1387 cast<ConstantAsMetadata>(tuple->getOperand(2));
alan-bakerb6b09dc2018-11-08 16:59:28 -05001388 int spec_id = static_cast<int>(
1389 cast<ConstantInt>(spec_id_md->getValue())->getSExtValue());
Alan Baker202c8c72018-08-13 13:47:44 -04001390
1391 max_local_spec_id_ = std::max(max_local_spec_id_, spec_id + 1);
1392 LocalArgSpecIds[arg] = spec_id;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001393 if (LocalSpecIdInfoMap.count(spec_id))
1394 continue;
Alan Baker202c8c72018-08-13 13:47:44 -04001395
1396 // We haven't seen this SpecId yet, so generate the LocalArgInfo for it.
1397 LocalArgInfo info{nextID, arg->getType()->getPointerElementType(),
1398 nextID + 1, nextID + 2,
1399 nextID + 3, spec_id};
1400 LocalSpecIdInfoMap[spec_id] = info;
1401 nextID += 4;
1402
1403 // Ensure the types necessary for this argument get generated.
1404 Type *IdxTy = Type::getInt32Ty(M.getContext());
1405 FindConstant(ConstantInt::get(IdxTy, 0));
1406 FindType(IdxTy);
1407 FindType(arg->getType());
1408 }
1409}
1410
David Neto22f144c2017-06-12 14:26:21 -04001411void SPIRVProducerPass::FindType(Type *Ty) {
1412 TypeList &TyList = getTypeList();
1413
1414 if (0 != TyList.idFor(Ty)) {
1415 return;
1416 }
1417
1418 if (Ty->isPointerTy()) {
1419 auto AddrSpace = Ty->getPointerAddressSpace();
1420 if ((AddressSpace::Constant == AddrSpace) ||
1421 (AddressSpace::Global == AddrSpace)) {
1422 auto PointeeTy = Ty->getPointerElementType();
1423
1424 if (PointeeTy->isStructTy() &&
1425 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
1426 FindType(PointeeTy);
1427 auto ActualPointerTy =
1428 PointeeTy->getPointerTo(AddressSpace::UniformConstant);
1429 FindType(ActualPointerTy);
1430 return;
1431 }
1432 }
1433 }
1434
David Neto862b7d82018-06-14 18:48:37 -04001435 // By convention, LLVM array type with 0 elements will map to
1436 // OpTypeRuntimeArray. Otherwise, it will map to OpTypeArray, which
1437 // has a constant number of elements. We need to support type of the
1438 // constant.
1439 if (auto *arrayTy = dyn_cast<ArrayType>(Ty)) {
1440 if (arrayTy->getNumElements() > 0) {
1441 LLVMContext &Context = Ty->getContext();
1442 FindType(Type::getInt32Ty(Context));
1443 }
David Neto22f144c2017-06-12 14:26:21 -04001444 }
1445
1446 for (Type *SubTy : Ty->subtypes()) {
1447 FindType(SubTy);
1448 }
1449
1450 TyList.insert(Ty);
1451}
1452
1453void SPIRVProducerPass::FindConstantPerGlobalVar(GlobalVariable &GV) {
1454 // If the global variable has a (non undef) initializer.
1455 if (GV.hasInitializer() && !isa<UndefValue>(GV.getInitializer())) {
David Neto862b7d82018-06-14 18:48:37 -04001456 // Generate the constant if it's not the initializer to a module scope
1457 // constant that we will expect in a storage buffer.
1458 const bool module_scope_constant_external_init =
1459 (GV.getType()->getPointerAddressSpace() == AddressSpace::Constant) &&
1460 clspv::Option::ModuleConstantsInStorageBuffer();
1461 if (!module_scope_constant_external_init) {
1462 FindConstant(GV.getInitializer());
1463 }
David Neto22f144c2017-06-12 14:26:21 -04001464 }
1465}
1466
1467void SPIRVProducerPass::FindConstantPerFunc(Function &F) {
1468 // Investigate constants in function body.
1469 for (BasicBlock &BB : F) {
1470 for (Instruction &I : BB) {
David Neto862b7d82018-06-14 18:48:37 -04001471 if (auto *call = dyn_cast<CallInst>(&I)) {
1472 auto name = call->getCalledFunction()->getName();
Kévin Petitdf71de32019-04-09 14:09:50 +01001473 if (name == clspv::LiteralSamplerFunction()) {
David Neto862b7d82018-06-14 18:48:37 -04001474 // We've handled these constants elsewhere, so skip it.
1475 continue;
1476 }
Alan Baker202c8c72018-08-13 13:47:44 -04001477 if (name.startswith(clspv::ResourceAccessorFunction())) {
1478 continue;
1479 }
1480 if (name.startswith(clspv::WorkgroupAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04001481 continue;
1482 }
Kévin Petit617a76d2019-04-04 13:54:16 +01001483 if (name.startswith(clspv::SPIRVOpIntrinsicFunction())) {
1484 // Skip the first operand that has the SPIR-V Opcode
1485 for (unsigned i = 1; i < I.getNumOperands(); i++) {
1486 if (isa<Constant>(I.getOperand(i)) &&
1487 !isa<GlobalValue>(I.getOperand(i))) {
1488 FindConstant(I.getOperand(i));
1489 }
1490 }
1491 continue;
1492 }
David Neto22f144c2017-06-12 14:26:21 -04001493 }
1494
1495 if (isa<AllocaInst>(I)) {
1496 // Alloca instruction has constant for the number of element. Ignore it.
1497 continue;
1498 } else if (isa<ShuffleVectorInst>(I)) {
1499 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1500 // Ignore constant for mask of shuffle vector instruction.
1501 if (i == 2) {
1502 continue;
1503 }
1504
1505 if (isa<Constant>(I.getOperand(i)) &&
1506 !isa<GlobalValue>(I.getOperand(i))) {
1507 FindConstant(I.getOperand(i));
1508 }
1509 }
1510
1511 continue;
1512 } else if (isa<InsertElementInst>(I)) {
1513 // Handle InsertElement with <4 x i8> specially.
1514 Type *CompositeTy = I.getOperand(0)->getType();
1515 if (is4xi8vec(CompositeTy)) {
1516 LLVMContext &Context = CompositeTy->getContext();
1517 if (isa<Constant>(I.getOperand(0))) {
1518 FindConstant(I.getOperand(0));
1519 }
1520
1521 if (isa<Constant>(I.getOperand(1))) {
1522 FindConstant(I.getOperand(1));
1523 }
1524
1525 // Add mask constant 0xFF.
1526 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
1527 FindConstant(CstFF);
1528
1529 // Add shift amount constant.
1530 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
1531 uint64_t Idx = CI->getZExtValue();
1532 Constant *CstShiftAmount =
1533 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
1534 FindConstant(CstShiftAmount);
1535 }
1536
1537 continue;
1538 }
1539
1540 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1541 // Ignore constant for index of InsertElement instruction.
1542 if (i == 2) {
1543 continue;
1544 }
1545
1546 if (isa<Constant>(I.getOperand(i)) &&
1547 !isa<GlobalValue>(I.getOperand(i))) {
1548 FindConstant(I.getOperand(i));
1549 }
1550 }
1551
1552 continue;
1553 } else if (isa<ExtractElementInst>(I)) {
1554 // Handle ExtractElement with <4 x i8> specially.
1555 Type *CompositeTy = I.getOperand(0)->getType();
1556 if (is4xi8vec(CompositeTy)) {
1557 LLVMContext &Context = CompositeTy->getContext();
1558 if (isa<Constant>(I.getOperand(0))) {
1559 FindConstant(I.getOperand(0));
1560 }
1561
1562 // Add mask constant 0xFF.
1563 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
1564 FindConstant(CstFF);
1565
1566 // Add shift amount constant.
1567 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
1568 uint64_t Idx = CI->getZExtValue();
1569 Constant *CstShiftAmount =
1570 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
1571 FindConstant(CstShiftAmount);
1572 } else {
1573 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
1574 FindConstant(Cst8);
1575 }
1576
1577 continue;
1578 }
1579
1580 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1581 // Ignore constant for index of ExtractElement instruction.
1582 if (i == 1) {
1583 continue;
1584 }
1585
1586 if (isa<Constant>(I.getOperand(i)) &&
1587 !isa<GlobalValue>(I.getOperand(i))) {
1588 FindConstant(I.getOperand(i));
1589 }
1590 }
1591
1592 continue;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001593 } else if ((Instruction::Xor == I.getOpcode()) &&
1594 I.getType()->isIntegerTy(1)) {
1595 // We special case for Xor where the type is i1 and one of the arguments
1596 // is a constant 1 (true), this is an OpLogicalNot in SPIR-V, and we
1597 // don't need the constant
David Neto22f144c2017-06-12 14:26:21 -04001598 bool foundConstantTrue = false;
1599 for (Use &Op : I.operands()) {
1600 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1601 auto CI = cast<ConstantInt>(Op);
1602
1603 if (CI->isZero() || foundConstantTrue) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05001604 // If we already found the true constant, we might (probably only
1605 // on -O0) have an OpLogicalNot which is taking a constant
1606 // argument, so discover it anyway.
David Neto22f144c2017-06-12 14:26:21 -04001607 FindConstant(Op);
1608 } else {
1609 foundConstantTrue = true;
1610 }
1611 }
1612 }
1613
1614 continue;
David Netod2de94a2017-08-28 17:27:47 -04001615 } else if (isa<TruncInst>(I)) {
alan-bakerb39c8262019-03-08 14:03:37 -05001616 // Special case if i8 is not generally handled.
1617 if (!clspv::Option::Int8Support()) {
1618 // For truncation to i8 we mask against 255.
1619 Type *ToTy = I.getType();
1620 if (8u == ToTy->getPrimitiveSizeInBits()) {
1621 LLVMContext &Context = ToTy->getContext();
1622 Constant *Cst255 =
1623 ConstantInt::get(Type::getInt32Ty(Context), 0xff);
1624 FindConstant(Cst255);
1625 }
David Netod2de94a2017-08-28 17:27:47 -04001626 }
Neil Henning39672102017-09-29 14:33:13 +01001627 } else if (isa<AtomicRMWInst>(I)) {
1628 LLVMContext &Context = I.getContext();
1629
1630 FindConstant(
1631 ConstantInt::get(Type::getInt32Ty(Context), spv::ScopeDevice));
1632 FindConstant(ConstantInt::get(
1633 Type::getInt32Ty(Context),
1634 spv::MemorySemanticsUniformMemoryMask |
1635 spv::MemorySemanticsSequentiallyConsistentMask));
David Neto22f144c2017-06-12 14:26:21 -04001636 }
1637
1638 for (Use &Op : I.operands()) {
1639 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1640 FindConstant(Op);
1641 }
1642 }
1643 }
1644 }
1645}
1646
1647void SPIRVProducerPass::FindConstant(Value *V) {
David Neto22f144c2017-06-12 14:26:21 -04001648 ValueList &CstList = getConstantList();
1649
David Netofb9a7972017-08-25 17:08:24 -04001650 // If V is already tracked, ignore it.
1651 if (0 != CstList.idFor(V)) {
David Neto22f144c2017-06-12 14:26:21 -04001652 return;
1653 }
1654
David Neto862b7d82018-06-14 18:48:37 -04001655 if (isa<GlobalValue>(V) && clspv::Option::ModuleConstantsInStorageBuffer()) {
1656 return;
1657 }
1658
David Neto22f144c2017-06-12 14:26:21 -04001659 Constant *Cst = cast<Constant>(V);
David Neto862b7d82018-06-14 18:48:37 -04001660 Type *CstTy = Cst->getType();
David Neto22f144c2017-06-12 14:26:21 -04001661
1662 // Handle constant with <4 x i8> type specially.
David Neto22f144c2017-06-12 14:26:21 -04001663 if (is4xi8vec(CstTy)) {
1664 if (!isa<GlobalValue>(V)) {
David Netofb9a7972017-08-25 17:08:24 -04001665 CstList.insert(V);
David Neto22f144c2017-06-12 14:26:21 -04001666 }
1667 }
1668
1669 if (Cst->getNumOperands()) {
1670 for (User::const_op_iterator I = Cst->op_begin(), E = Cst->op_end(); I != E;
1671 ++I) {
1672 FindConstant(*I);
1673 }
1674
David Netofb9a7972017-08-25 17:08:24 -04001675 CstList.insert(Cst);
David Neto22f144c2017-06-12 14:26:21 -04001676 return;
1677 } else if (const ConstantDataSequential *CDS =
1678 dyn_cast<ConstantDataSequential>(Cst)) {
1679 // Add constants for each element to constant list.
1680 for (unsigned i = 0; i < CDS->getNumElements(); i++) {
1681 Constant *EleCst = CDS->getElementAsConstant(i);
1682 FindConstant(EleCst);
1683 }
1684 }
1685
1686 if (!isa<GlobalValue>(V)) {
David Netofb9a7972017-08-25 17:08:24 -04001687 CstList.insert(V);
David Neto22f144c2017-06-12 14:26:21 -04001688 }
1689}
1690
1691spv::StorageClass SPIRVProducerPass::GetStorageClass(unsigned AddrSpace) const {
1692 switch (AddrSpace) {
1693 default:
1694 llvm_unreachable("Unsupported OpenCL address space");
1695 case AddressSpace::Private:
1696 return spv::StorageClassFunction;
1697 case AddressSpace::Global:
David Neto22f144c2017-06-12 14:26:21 -04001698 return spv::StorageClassStorageBuffer;
Alan Bakerfcda9482018-10-02 17:09:59 -04001699 case AddressSpace::Constant:
1700 return clspv::Option::ConstantArgsInUniformBuffer()
1701 ? spv::StorageClassUniform
1702 : spv::StorageClassStorageBuffer;
David Neto22f144c2017-06-12 14:26:21 -04001703 case AddressSpace::Input:
1704 return spv::StorageClassInput;
1705 case AddressSpace::Local:
1706 return spv::StorageClassWorkgroup;
1707 case AddressSpace::UniformConstant:
1708 return spv::StorageClassUniformConstant;
David Neto9ed8e2f2018-03-24 06:47:24 -07001709 case AddressSpace::Uniform:
David Netoe439d702018-03-23 13:14:08 -07001710 return spv::StorageClassUniform;
David Neto22f144c2017-06-12 14:26:21 -04001711 case AddressSpace::ModuleScopePrivate:
1712 return spv::StorageClassPrivate;
1713 }
1714}
1715
David Neto862b7d82018-06-14 18:48:37 -04001716spv::StorageClass
1717SPIRVProducerPass::GetStorageClassForArgKind(clspv::ArgKind arg_kind) const {
1718 switch (arg_kind) {
1719 case clspv::ArgKind::Buffer:
1720 return spv::StorageClassStorageBuffer;
Alan Bakerfcda9482018-10-02 17:09:59 -04001721 case clspv::ArgKind::BufferUBO:
1722 return spv::StorageClassUniform;
David Neto862b7d82018-06-14 18:48:37 -04001723 case clspv::ArgKind::Pod:
1724 return clspv::Option::PodArgsInUniformBuffer()
1725 ? spv::StorageClassUniform
1726 : spv::StorageClassStorageBuffer;
1727 case clspv::ArgKind::Local:
1728 return spv::StorageClassWorkgroup;
1729 case clspv::ArgKind::ReadOnlyImage:
1730 case clspv::ArgKind::WriteOnlyImage:
1731 case clspv::ArgKind::Sampler:
1732 return spv::StorageClassUniformConstant;
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001733 default:
1734 llvm_unreachable("Unsupported storage class for argument kind");
David Neto862b7d82018-06-14 18:48:37 -04001735 }
1736}
1737
David Neto22f144c2017-06-12 14:26:21 -04001738spv::BuiltIn SPIRVProducerPass::GetBuiltin(StringRef Name) const {
1739 return StringSwitch<spv::BuiltIn>(Name)
1740 .Case("__spirv_GlobalInvocationId", spv::BuiltInGlobalInvocationId)
1741 .Case("__spirv_LocalInvocationId", spv::BuiltInLocalInvocationId)
1742 .Case("__spirv_WorkgroupSize", spv::BuiltInWorkgroupSize)
1743 .Case("__spirv_NumWorkgroups", spv::BuiltInNumWorkgroups)
1744 .Case("__spirv_WorkgroupId", spv::BuiltInWorkgroupId)
1745 .Default(spv::BuiltInMax);
1746}
1747
1748void SPIRVProducerPass::GenerateExtInstImport() {
1749 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
1750 uint32_t &ExtInstImportID = getOpExtInstImportID();
1751
1752 //
1753 // Generate OpExtInstImport.
1754 //
1755 // Ops[0] ... Ops[n] = Name (Literal String)
David Neto22f144c2017-06-12 14:26:21 -04001756 ExtInstImportID = nextID;
David Neto87846742018-04-11 17:36:22 -04001757 SPIRVInstList.push_back(new SPIRVInstruction(spv::OpExtInstImport, nextID++,
1758 MkString("GLSL.std.450")));
David Neto22f144c2017-06-12 14:26:21 -04001759}
1760
alan-bakerb6b09dc2018-11-08 16:59:28 -05001761void SPIRVProducerPass::GenerateSPIRVTypes(LLVMContext &Context,
1762 Module &module) {
David Neto22f144c2017-06-12 14:26:21 -04001763 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
1764 ValueMapType &VMap = getValueMap();
1765 ValueMapType &AllocatedVMap = getAllocatedValueMap();
Alan Bakerfcda9482018-10-02 17:09:59 -04001766 const auto &DL = module.getDataLayout();
David Neto22f144c2017-06-12 14:26:21 -04001767
1768 // Map for OpTypeRuntimeArray. If argument has pointer type, 2 spirv type
1769 // instructions are generated. They are OpTypePointer and OpTypeRuntimeArray.
1770 DenseMap<Type *, uint32_t> OpRuntimeTyMap;
1771
1772 for (Type *Ty : getTypeList()) {
1773 // Update TypeMap with nextID for reference later.
1774 TypeMap[Ty] = nextID;
1775
1776 switch (Ty->getTypeID()) {
1777 default: {
1778 Ty->print(errs());
1779 llvm_unreachable("Unsupported type???");
1780 break;
1781 }
1782 case Type::MetadataTyID:
1783 case Type::LabelTyID: {
1784 // Ignore these types.
1785 break;
1786 }
1787 case Type::PointerTyID: {
1788 PointerType *PTy = cast<PointerType>(Ty);
1789 unsigned AddrSpace = PTy->getAddressSpace();
1790
1791 // For the purposes of our Vulkan SPIR-V type system, constant and global
1792 // are conflated.
1793 bool UseExistingOpTypePointer = false;
1794 if (AddressSpace::Constant == AddrSpace) {
Alan Bakerfcda9482018-10-02 17:09:59 -04001795 if (!clspv::Option::ConstantArgsInUniformBuffer()) {
1796 AddrSpace = AddressSpace::Global;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001797 // Check to see if we already created this type (for instance, if we
1798 // had a constant <type>* and a global <type>*, the type would be
1799 // created by one of these types, and shared by both).
Alan Bakerfcda9482018-10-02 17:09:59 -04001800 auto GlobalTy = PTy->getPointerElementType()->getPointerTo(AddrSpace);
1801 if (0 < TypeMap.count(GlobalTy)) {
1802 TypeMap[PTy] = TypeMap[GlobalTy];
1803 UseExistingOpTypePointer = true;
1804 break;
1805 }
David Neto22f144c2017-06-12 14:26:21 -04001806 }
1807 } else if (AddressSpace::Global == AddrSpace) {
Alan Bakerfcda9482018-10-02 17:09:59 -04001808 if (!clspv::Option::ConstantArgsInUniformBuffer()) {
1809 AddrSpace = AddressSpace::Constant;
David Neto22f144c2017-06-12 14:26:21 -04001810
alan-bakerb6b09dc2018-11-08 16:59:28 -05001811 // Check to see if we already created this type (for instance, if we
1812 // had a constant <type>* and a global <type>*, the type would be
1813 // created by one of these types, and shared by both).
1814 auto ConstantTy =
1815 PTy->getPointerElementType()->getPointerTo(AddrSpace);
Alan Bakerfcda9482018-10-02 17:09:59 -04001816 if (0 < TypeMap.count(ConstantTy)) {
1817 TypeMap[PTy] = TypeMap[ConstantTy];
1818 UseExistingOpTypePointer = true;
1819 }
David Neto22f144c2017-06-12 14:26:21 -04001820 }
1821 }
1822
David Neto862b7d82018-06-14 18:48:37 -04001823 const bool HasArgUser = true;
David Neto22f144c2017-06-12 14:26:21 -04001824
David Neto862b7d82018-06-14 18:48:37 -04001825 if (HasArgUser && !UseExistingOpTypePointer) {
David Neto22f144c2017-06-12 14:26:21 -04001826 //
1827 // Generate OpTypePointer.
1828 //
1829
1830 // OpTypePointer
1831 // Ops[0] = Storage Class
1832 // Ops[1] = Element Type ID
1833 SPIRVOperandList Ops;
1834
David Neto257c3892018-04-11 13:19:45 -04001835 Ops << MkNum(GetStorageClass(AddrSpace))
1836 << MkId(lookupType(PTy->getElementType()));
David Neto22f144c2017-06-12 14:26:21 -04001837
David Neto87846742018-04-11 17:36:22 -04001838 auto *Inst = new SPIRVInstruction(spv::OpTypePointer, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04001839 SPIRVInstList.push_back(Inst);
1840 }
David Neto22f144c2017-06-12 14:26:21 -04001841 break;
1842 }
1843 case Type::StructTyID: {
David Neto22f144c2017-06-12 14:26:21 -04001844 StructType *STy = cast<StructType>(Ty);
1845
1846 // Handle sampler type.
1847 if (STy->isOpaque()) {
1848 if (STy->getName().equals("opencl.sampler_t")) {
1849 //
1850 // Generate OpTypeSampler
1851 //
1852 // Empty Ops.
1853 SPIRVOperandList Ops;
1854
David Neto87846742018-04-11 17:36:22 -04001855 auto *Inst = new SPIRVInstruction(spv::OpTypeSampler, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04001856 SPIRVInstList.push_back(Inst);
1857 break;
alan-bakerf67468c2019-11-25 15:51:49 -05001858 } else if (STy->getName().startswith("opencl.image2d_ro_t") ||
1859 STy->getName().startswith("opencl.image2d_wo_t") ||
1860 STy->getName().startswith("opencl.image3d_ro_t") ||
1861 STy->getName().startswith("opencl.image3d_wo_t")) {
David Neto22f144c2017-06-12 14:26:21 -04001862 //
1863 // Generate OpTypeImage
1864 //
1865 // Ops[0] = Sampled Type ID
1866 // Ops[1] = Dim ID
1867 // Ops[2] = Depth (Literal Number)
1868 // Ops[3] = Arrayed (Literal Number)
1869 // Ops[4] = MS (Literal Number)
1870 // Ops[5] = Sampled (Literal Number)
1871 // Ops[6] = Image Format ID
1872 //
1873 SPIRVOperandList Ops;
1874
alan-bakerf67468c2019-11-25 15:51:49 -05001875 uint32_t ImageTyID = nextID++;
1876 uint32_t SampledTyID = 0;
1877 if (STy->getName().contains(".float")) {
1878 SampledTyID = lookupType(Type::getFloatTy(Context));
1879 } else if (STy->getName().contains(".uint")) {
1880 SampledTyID = lookupType(Type::getInt32Ty(Context));
1881 } else if (STy->getName().contains(".int")) {
1882 // Generate a signed 32-bit integer if necessary.
1883 if (int32ID == 0) {
1884 int32ID = nextID++;
1885 SPIRVOperandList intOps;
1886 intOps << MkNum(32);
1887 intOps << MkNum(1);
1888 auto signed_int =
1889 new SPIRVInstruction(spv::OpTypeInt, int32ID, intOps);
1890 SPIRVInstList.push_back(signed_int);
1891 }
1892 SampledTyID = int32ID;
1893
1894 // Generate a vec4 of the signed int if necessary.
1895 if (v4int32ID == 0) {
1896 v4int32ID = nextID++;
1897 SPIRVOperandList vecOps;
1898 vecOps << MkId(int32ID);
1899 vecOps << MkNum(4);
1900 auto int_vec =
1901 new SPIRVInstruction(spv::OpTypeVector, v4int32ID, vecOps);
1902 SPIRVInstList.push_back(int_vec);
1903 }
1904 } else {
1905 // This was likely an UndefValue.
1906 SampledTyID = lookupType(Type::getFloatTy(Context));
1907 }
David Neto257c3892018-04-11 13:19:45 -04001908 Ops << MkId(SampledTyID);
David Neto22f144c2017-06-12 14:26:21 -04001909
1910 spv::Dim DimID = spv::Dim2D;
alan-bakerf67468c2019-11-25 15:51:49 -05001911 if (STy->getName().startswith("opencl.image3d_ro_t") ||
1912 STy->getName().startswith("opencl.image3d_wo_t")) {
David Neto22f144c2017-06-12 14:26:21 -04001913 DimID = spv::Dim3D;
1914 }
David Neto257c3892018-04-11 13:19:45 -04001915 Ops << MkNum(DimID);
David Neto22f144c2017-06-12 14:26:21 -04001916
1917 // TODO: Set up Depth.
David Neto257c3892018-04-11 13:19:45 -04001918 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04001919
1920 // TODO: Set up Arrayed.
David Neto257c3892018-04-11 13:19:45 -04001921 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04001922
1923 // TODO: Set up MS.
David Neto257c3892018-04-11 13:19:45 -04001924 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04001925
1926 // TODO: Set up Sampled.
1927 //
1928 // From Spec
1929 //
1930 // 0 indicates this is only known at run time, not at compile time
1931 // 1 indicates will be used with sampler
1932 // 2 indicates will be used without a sampler (a storage image)
1933 uint32_t Sampled = 1;
alan-bakerf67468c2019-11-25 15:51:49 -05001934 if (!STy->getName().contains(".sampled")) {
David Neto22f144c2017-06-12 14:26:21 -04001935 Sampled = 2;
1936 }
David Neto257c3892018-04-11 13:19:45 -04001937 Ops << MkNum(Sampled);
David Neto22f144c2017-06-12 14:26:21 -04001938
1939 // TODO: Set up Image Format.
David Neto257c3892018-04-11 13:19:45 -04001940 Ops << MkNum(spv::ImageFormatUnknown);
David Neto22f144c2017-06-12 14:26:21 -04001941
alan-bakerf67468c2019-11-25 15:51:49 -05001942 auto *Inst = new SPIRVInstruction(spv::OpTypeImage, ImageTyID, Ops);
David Neto22f144c2017-06-12 14:26:21 -04001943 SPIRVInstList.push_back(Inst);
1944 break;
1945 }
1946 }
1947
1948 //
1949 // Generate OpTypeStruct
1950 //
1951 // Ops[0] ... Ops[n] = Member IDs
1952 SPIRVOperandList Ops;
1953
1954 for (auto *EleTy : STy->elements()) {
David Neto862b7d82018-06-14 18:48:37 -04001955 Ops << MkId(lookupType(EleTy));
David Neto22f144c2017-06-12 14:26:21 -04001956 }
1957
David Neto22f144c2017-06-12 14:26:21 -04001958 uint32_t STyID = nextID;
1959
alan-bakerb6b09dc2018-11-08 16:59:28 -05001960 auto *Inst = new SPIRVInstruction(spv::OpTypeStruct, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04001961 SPIRVInstList.push_back(Inst);
1962
1963 // Generate OpMemberDecorate.
1964 auto DecoInsertPoint =
1965 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
1966 [](SPIRVInstruction *Inst) -> bool {
1967 return Inst->getOpcode() != spv::OpDecorate &&
1968 Inst->getOpcode() != spv::OpMemberDecorate &&
1969 Inst->getOpcode() != spv::OpExtInstImport;
1970 });
1971
David Netoc463b372017-08-10 15:32:21 -04001972 const auto StructLayout = DL.getStructLayout(STy);
Alan Bakerfcda9482018-10-02 17:09:59 -04001973 // Search for the correct offsets if this type was remapped.
1974 std::vector<uint32_t> *offsets = nullptr;
1975 auto iter = RemappedUBOTypeOffsets.find(STy);
1976 if (iter != RemappedUBOTypeOffsets.end()) {
1977 offsets = &iter->second;
1978 }
David Netoc463b372017-08-10 15:32:21 -04001979
David Neto862b7d82018-06-14 18:48:37 -04001980 // #error TODO(dneto): Only do this if in TypesNeedingLayout.
David Neto22f144c2017-06-12 14:26:21 -04001981 for (unsigned MemberIdx = 0; MemberIdx < STy->getNumElements();
1982 MemberIdx++) {
1983 // Ops[0] = Structure Type ID
1984 // Ops[1] = Member Index(Literal Number)
1985 // Ops[2] = Decoration (Offset)
1986 // Ops[3] = Byte Offset (Literal Number)
1987 Ops.clear();
1988
David Neto257c3892018-04-11 13:19:45 -04001989 Ops << MkId(STyID) << MkNum(MemberIdx) << MkNum(spv::DecorationOffset);
David Neto22f144c2017-06-12 14:26:21 -04001990
alan-bakerb6b09dc2018-11-08 16:59:28 -05001991 auto ByteOffset =
1992 static_cast<uint32_t>(StructLayout->getElementOffset(MemberIdx));
Alan Bakerfcda9482018-10-02 17:09:59 -04001993 if (offsets) {
1994 ByteOffset = (*offsets)[MemberIdx];
1995 }
alan-bakerb6b09dc2018-11-08 16:59:28 -05001996 // const auto ByteOffset =
Alan Bakerfcda9482018-10-02 17:09:59 -04001997 // uint32_t(StructLayout->getElementOffset(MemberIdx));
David Neto257c3892018-04-11 13:19:45 -04001998 Ops << MkNum(ByteOffset);
David Neto22f144c2017-06-12 14:26:21 -04001999
David Neto87846742018-04-11 17:36:22 -04002000 auto *DecoInst = new SPIRVInstruction(spv::OpMemberDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002001 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
David Neto22f144c2017-06-12 14:26:21 -04002002 }
2003
2004 // Generate OpDecorate.
David Neto862b7d82018-06-14 18:48:37 -04002005 if (StructTypesNeedingBlock.idFor(STy)) {
2006 Ops.clear();
2007 // Use Block decorations with StorageBuffer storage class.
2008 Ops << MkId(STyID) << MkNum(spv::DecorationBlock);
David Neto22f144c2017-06-12 14:26:21 -04002009
David Neto862b7d82018-06-14 18:48:37 -04002010 auto *DecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
2011 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
David Neto22f144c2017-06-12 14:26:21 -04002012 }
2013 break;
2014 }
2015 case Type::IntegerTyID: {
alan-baker0e64a592019-11-18 13:36:25 -05002016 uint32_t BitWidth = static_cast<uint32_t>(Ty->getPrimitiveSizeInBits());
David Neto22f144c2017-06-12 14:26:21 -04002017
2018 if (BitWidth == 1) {
David Neto87846742018-04-11 17:36:22 -04002019 auto *Inst = new SPIRVInstruction(spv::OpTypeBool, nextID++, {});
David Neto22f144c2017-06-12 14:26:21 -04002020 SPIRVInstList.push_back(Inst);
2021 } else {
alan-bakerb39c8262019-03-08 14:03:37 -05002022 if (!clspv::Option::Int8Support()) {
2023 // i8 is added to TypeMap as i32.
2024 // No matter what LLVM type is requested first, always alias the
2025 // second one's SPIR-V type to be the same as the one we generated
2026 // first.
2027 unsigned aliasToWidth = 0;
2028 if (BitWidth == 8) {
2029 aliasToWidth = 32;
2030 BitWidth = 32;
2031 } else if (BitWidth == 32) {
2032 aliasToWidth = 8;
2033 }
2034 if (aliasToWidth) {
2035 Type *otherType = Type::getIntNTy(Ty->getContext(), aliasToWidth);
2036 auto where = TypeMap.find(otherType);
2037 if (where == TypeMap.end()) {
2038 // Go ahead and make it, but also map the other type to it.
2039 TypeMap[otherType] = nextID;
2040 } else {
2041 // Alias this SPIR-V type the existing type.
2042 TypeMap[Ty] = where->second;
2043 break;
2044 }
David Neto391aeb12017-08-26 15:51:58 -04002045 }
David Neto22f144c2017-06-12 14:26:21 -04002046 }
2047
David Neto257c3892018-04-11 13:19:45 -04002048 SPIRVOperandList Ops;
2049 Ops << MkNum(BitWidth) << MkNum(0 /* not signed */);
David Neto22f144c2017-06-12 14:26:21 -04002050
2051 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002052 new SPIRVInstruction(spv::OpTypeInt, nextID++, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002053 }
2054 break;
2055 }
2056 case Type::HalfTyID:
2057 case Type::FloatTyID:
2058 case Type::DoubleTyID: {
alan-baker0e64a592019-11-18 13:36:25 -05002059 uint32_t BitWidth = static_cast<uint32_t>(Ty->getPrimitiveSizeInBits());
2060 SPIRVOperand *WidthOp =
2061 new SPIRVOperand(SPIRVOperandType::LITERAL_INTEGER, BitWidth);
David Neto22f144c2017-06-12 14:26:21 -04002062
2063 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002064 new SPIRVInstruction(spv::OpTypeFloat, nextID++, WidthOp));
David Neto22f144c2017-06-12 14:26:21 -04002065 break;
2066 }
2067 case Type::ArrayTyID: {
David Neto22f144c2017-06-12 14:26:21 -04002068 ArrayType *ArrTy = cast<ArrayType>(Ty);
David Neto862b7d82018-06-14 18:48:37 -04002069 const uint64_t Length = ArrTy->getArrayNumElements();
2070 if (Length == 0) {
2071 // By convention, map it to a RuntimeArray.
David Neto22f144c2017-06-12 14:26:21 -04002072
David Neto862b7d82018-06-14 18:48:37 -04002073 // Only generate the type once.
2074 // TODO(dneto): Can it ever be generated more than once?
2075 // Doesn't LLVM type uniqueness guarantee we'll only see this
2076 // once?
2077 Type *EleTy = ArrTy->getArrayElementType();
2078 if (OpRuntimeTyMap.count(EleTy) == 0) {
2079 uint32_t OpTypeRuntimeArrayID = nextID;
2080 OpRuntimeTyMap[Ty] = nextID;
David Neto22f144c2017-06-12 14:26:21 -04002081
David Neto862b7d82018-06-14 18:48:37 -04002082 //
2083 // Generate OpTypeRuntimeArray.
2084 //
David Neto22f144c2017-06-12 14:26:21 -04002085
David Neto862b7d82018-06-14 18:48:37 -04002086 // OpTypeRuntimeArray
2087 // Ops[0] = Element Type ID
2088 SPIRVOperandList Ops;
2089 Ops << MkId(lookupType(EleTy));
David Neto22f144c2017-06-12 14:26:21 -04002090
David Neto862b7d82018-06-14 18:48:37 -04002091 SPIRVInstList.push_back(
2092 new SPIRVInstruction(spv::OpTypeRuntimeArray, nextID++, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002093
David Neto862b7d82018-06-14 18:48:37 -04002094 if (Hack_generate_runtime_array_stride_early) {
2095 // Generate OpDecorate.
2096 auto DecoInsertPoint = std::find_if(
2097 SPIRVInstList.begin(), SPIRVInstList.end(),
2098 [](SPIRVInstruction *Inst) -> bool {
2099 return Inst->getOpcode() != spv::OpDecorate &&
2100 Inst->getOpcode() != spv::OpMemberDecorate &&
2101 Inst->getOpcode() != spv::OpExtInstImport;
2102 });
David Neto22f144c2017-06-12 14:26:21 -04002103
David Neto862b7d82018-06-14 18:48:37 -04002104 // Ops[0] = Target ID
2105 // Ops[1] = Decoration (ArrayStride)
2106 // Ops[2] = Stride Number(Literal Number)
2107 Ops.clear();
David Neto85082642018-03-24 06:55:20 -07002108
David Neto862b7d82018-06-14 18:48:37 -04002109 Ops << MkId(OpTypeRuntimeArrayID)
2110 << MkNum(spv::DecorationArrayStride)
Alan Bakerfcda9482018-10-02 17:09:59 -04002111 << MkNum(static_cast<uint32_t>(GetTypeAllocSize(EleTy, DL)));
David Neto22f144c2017-06-12 14:26:21 -04002112
David Neto862b7d82018-06-14 18:48:37 -04002113 auto *DecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
2114 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
2115 }
2116 }
David Neto22f144c2017-06-12 14:26:21 -04002117
David Neto862b7d82018-06-14 18:48:37 -04002118 } else {
David Neto22f144c2017-06-12 14:26:21 -04002119
David Neto862b7d82018-06-14 18:48:37 -04002120 //
2121 // Generate OpConstant and OpTypeArray.
2122 //
2123
2124 //
2125 // Generate OpConstant for array length.
2126 //
2127 // Ops[0] = Result Type ID
2128 // Ops[1] .. Ops[n] = Values LiteralNumber
2129 SPIRVOperandList Ops;
2130
2131 Type *LengthTy = Type::getInt32Ty(Context);
2132 uint32_t ResTyID = lookupType(LengthTy);
2133 Ops << MkId(ResTyID);
2134
2135 assert(Length < UINT32_MAX);
2136 Ops << MkNum(static_cast<uint32_t>(Length));
2137
2138 // Add constant for length to constant list.
2139 Constant *CstLength = ConstantInt::get(LengthTy, Length);
2140 AllocatedVMap[CstLength] = nextID;
2141 VMap[CstLength] = nextID;
2142 uint32_t LengthID = nextID;
2143
2144 auto *CstInst = new SPIRVInstruction(spv::OpConstant, nextID++, Ops);
2145 SPIRVInstList.push_back(CstInst);
2146
2147 // Remember to generate ArrayStride later
2148 getTypesNeedingArrayStride().insert(Ty);
2149
2150 //
2151 // Generate OpTypeArray.
2152 //
2153 // Ops[0] = Element Type ID
2154 // Ops[1] = Array Length Constant ID
2155 Ops.clear();
2156
2157 uint32_t EleTyID = lookupType(ArrTy->getElementType());
2158 Ops << MkId(EleTyID) << MkId(LengthID);
2159
2160 // Update TypeMap with nextID.
2161 TypeMap[Ty] = nextID;
2162
2163 auto *ArrayInst = new SPIRVInstruction(spv::OpTypeArray, nextID++, Ops);
2164 SPIRVInstList.push_back(ArrayInst);
2165 }
David Neto22f144c2017-06-12 14:26:21 -04002166 break;
2167 }
2168 case Type::VectorTyID: {
alan-bakerb39c8262019-03-08 14:03:37 -05002169 // <4 x i8> is changed to i32 if i8 is not generally supported.
2170 if (!clspv::Option::Int8Support() &&
2171 Ty->getVectorElementType() == Type::getInt8Ty(Context)) {
David Neto22f144c2017-06-12 14:26:21 -04002172 if (Ty->getVectorNumElements() == 4) {
2173 TypeMap[Ty] = lookupType(Ty->getVectorElementType());
2174 break;
2175 } else {
2176 Ty->print(errs());
2177 llvm_unreachable("Support above i8 vector type");
2178 }
2179 }
2180
2181 // Ops[0] = Component Type ID
2182 // Ops[1] = Component Count (Literal Number)
David Neto257c3892018-04-11 13:19:45 -04002183 SPIRVOperandList Ops;
2184 Ops << MkId(lookupType(Ty->getVectorElementType()))
2185 << MkNum(Ty->getVectorNumElements());
David Neto22f144c2017-06-12 14:26:21 -04002186
alan-bakerb6b09dc2018-11-08 16:59:28 -05002187 SPIRVInstruction *inst =
2188 new SPIRVInstruction(spv::OpTypeVector, nextID++, Ops);
David Netoc6f3ab22018-04-06 18:02:31 -04002189 SPIRVInstList.push_back(inst);
David Neto22f144c2017-06-12 14:26:21 -04002190 break;
2191 }
2192 case Type::VoidTyID: {
David Neto87846742018-04-11 17:36:22 -04002193 auto *Inst = new SPIRVInstruction(spv::OpTypeVoid, nextID++, {});
David Neto22f144c2017-06-12 14:26:21 -04002194 SPIRVInstList.push_back(Inst);
2195 break;
2196 }
2197 case Type::FunctionTyID: {
2198 // Generate SPIRV instruction for function type.
2199 FunctionType *FTy = cast<FunctionType>(Ty);
2200
2201 // Ops[0] = Return Type ID
2202 // Ops[1] ... Ops[n] = Parameter Type IDs
2203 SPIRVOperandList Ops;
2204
2205 // Find SPIRV instruction for return type
David Netoc6f3ab22018-04-06 18:02:31 -04002206 Ops << MkId(lookupType(FTy->getReturnType()));
David Neto22f144c2017-06-12 14:26:21 -04002207
2208 // Find SPIRV instructions for parameter types
2209 for (unsigned k = 0; k < FTy->getNumParams(); k++) {
2210 // Find SPIRV instruction for parameter type.
2211 auto ParamTy = FTy->getParamType(k);
2212 if (ParamTy->isPointerTy()) {
2213 auto PointeeTy = ParamTy->getPointerElementType();
2214 if (PointeeTy->isStructTy() &&
2215 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
2216 ParamTy = PointeeTy;
2217 }
2218 }
2219
David Netoc6f3ab22018-04-06 18:02:31 -04002220 Ops << MkId(lookupType(ParamTy));
David Neto22f144c2017-06-12 14:26:21 -04002221 }
2222
David Neto87846742018-04-11 17:36:22 -04002223 auto *Inst = new SPIRVInstruction(spv::OpTypeFunction, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002224 SPIRVInstList.push_back(Inst);
2225 break;
2226 }
2227 }
2228 }
2229
2230 // Generate OpTypeSampledImage.
alan-bakerabd82722019-12-03 17:14:51 -05002231 for (auto &ImgTy : getImageTypeList()) {
David Neto22f144c2017-06-12 14:26:21 -04002232 //
2233 // Generate OpTypeSampledImage.
2234 //
2235 // Ops[0] = Image Type ID
2236 //
2237 SPIRVOperandList Ops;
2238
David Netoc6f3ab22018-04-06 18:02:31 -04002239 Ops << MkId(TypeMap[ImgTy]);
David Neto22f144c2017-06-12 14:26:21 -04002240
alan-bakerabd82722019-12-03 17:14:51 -05002241 // Update the image type map.
2242 getImageTypeMap()[ImgTy] = nextID;
David Neto22f144c2017-06-12 14:26:21 -04002243
David Neto87846742018-04-11 17:36:22 -04002244 auto *Inst = new SPIRVInstruction(spv::OpTypeSampledImage, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002245 SPIRVInstList.push_back(Inst);
2246 }
David Netoc6f3ab22018-04-06 18:02:31 -04002247
2248 // Generate types for pointer-to-local arguments.
Alan Baker202c8c72018-08-13 13:47:44 -04002249 for (auto spec_id = clspv::FirstLocalSpecId(); spec_id < max_local_spec_id_;
2250 ++spec_id) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05002251 LocalArgInfo &arg_info = LocalSpecIdInfoMap[spec_id];
David Netoc6f3ab22018-04-06 18:02:31 -04002252
2253 // Generate the spec constant.
2254 SPIRVOperandList Ops;
2255 Ops << MkId(lookupType(Type::getInt32Ty(Context))) << MkNum(1);
David Neto87846742018-04-11 17:36:22 -04002256 SPIRVInstList.push_back(
2257 new SPIRVInstruction(spv::OpSpecConstant, arg_info.array_size_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002258
2259 // Generate the array type.
2260 Ops.clear();
2261 // The element type must have been created.
2262 uint32_t elem_ty_id = lookupType(arg_info.elem_type);
2263 assert(elem_ty_id);
2264 Ops << MkId(elem_ty_id) << MkId(arg_info.array_size_id);
2265
2266 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002267 new SPIRVInstruction(spv::OpTypeArray, arg_info.array_type_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002268
2269 Ops.clear();
2270 Ops << MkNum(spv::StorageClassWorkgroup) << MkId(arg_info.array_type_id);
David Neto87846742018-04-11 17:36:22 -04002271 SPIRVInstList.push_back(new SPIRVInstruction(
2272 spv::OpTypePointer, arg_info.ptr_array_type_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002273 }
David Neto22f144c2017-06-12 14:26:21 -04002274}
2275
2276void SPIRVProducerPass::GenerateSPIRVConstants() {
2277 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
2278 ValueMapType &VMap = getValueMap();
2279 ValueMapType &AllocatedVMap = getAllocatedValueMap();
2280 ValueList &CstList = getConstantList();
David Neto482550a2018-03-24 05:21:07 -07002281 const bool hack_undef = clspv::Option::HackUndef();
David Neto22f144c2017-06-12 14:26:21 -04002282
2283 for (uint32_t i = 0; i < CstList.size(); i++) {
David Netofb9a7972017-08-25 17:08:24 -04002284 // UniqueVector ids are 1-based.
alan-bakerb6b09dc2018-11-08 16:59:28 -05002285 Constant *Cst = cast<Constant>(CstList[i + 1]);
David Neto22f144c2017-06-12 14:26:21 -04002286
2287 // OpTypeArray's constant was already generated.
David Netofb9a7972017-08-25 17:08:24 -04002288 if (AllocatedVMap.find_as(Cst) != AllocatedVMap.end()) {
David Neto22f144c2017-06-12 14:26:21 -04002289 continue;
2290 }
2291
David Netofb9a7972017-08-25 17:08:24 -04002292 // Set ValueMap with nextID for reference later.
David Neto22f144c2017-06-12 14:26:21 -04002293 VMap[Cst] = nextID;
2294
2295 //
2296 // Generate OpConstant.
2297 //
2298
2299 // Ops[0] = Result Type ID
2300 // Ops[1] .. Ops[n] = Values LiteralNumber
2301 SPIRVOperandList Ops;
2302
David Neto257c3892018-04-11 13:19:45 -04002303 Ops << MkId(lookupType(Cst->getType()));
David Neto22f144c2017-06-12 14:26:21 -04002304
2305 std::vector<uint32_t> LiteralNum;
David Neto22f144c2017-06-12 14:26:21 -04002306 spv::Op Opcode = spv::OpNop;
2307
2308 if (isa<UndefValue>(Cst)) {
2309 // Ops[0] = Result Type ID
David Netoc66b3352017-10-20 14:28:46 -04002310 Opcode = spv::OpUndef;
Alan Baker9bf93fb2018-08-28 16:59:26 -04002311 if (hack_undef && IsTypeNullable(Cst->getType())) {
2312 Opcode = spv::OpConstantNull;
David Netoc66b3352017-10-20 14:28:46 -04002313 }
David Neto22f144c2017-06-12 14:26:21 -04002314 } else if (const ConstantInt *CI = dyn_cast<ConstantInt>(Cst)) {
2315 unsigned BitWidth = CI->getBitWidth();
2316 if (BitWidth == 1) {
2317 // If the bitwidth of constant is 1, generate OpConstantTrue or
2318 // OpConstantFalse.
2319 if (CI->getZExtValue()) {
2320 // Ops[0] = Result Type ID
2321 Opcode = spv::OpConstantTrue;
2322 } else {
2323 // Ops[0] = Result Type ID
2324 Opcode = spv::OpConstantFalse;
2325 }
David Neto22f144c2017-06-12 14:26:21 -04002326 } else {
2327 auto V = CI->getZExtValue();
2328 LiteralNum.push_back(V & 0xFFFFFFFF);
2329
2330 if (BitWidth > 32) {
2331 LiteralNum.push_back(V >> 32);
2332 }
2333
2334 Opcode = spv::OpConstant;
David Neto22f144c2017-06-12 14:26:21 -04002335
David Neto257c3892018-04-11 13:19:45 -04002336 Ops << MkInteger(LiteralNum);
David Neto22f144c2017-06-12 14:26:21 -04002337 }
2338 } else if (const ConstantFP *CFP = dyn_cast<ConstantFP>(Cst)) {
2339 uint64_t FPVal = CFP->getValueAPF().bitcastToAPInt().getZExtValue();
2340 Type *CFPTy = CFP->getType();
2341 if (CFPTy->isFloatTy()) {
2342 LiteralNum.push_back(FPVal & 0xFFFFFFFF);
Kévin Petit02ee34e2019-04-04 19:03:22 +01002343 } else if (CFPTy->isDoubleTy()) {
2344 LiteralNum.push_back(FPVal & 0xFFFFFFFF);
2345 LiteralNum.push_back(FPVal >> 32);
David Neto22f144c2017-06-12 14:26:21 -04002346 } else {
2347 CFPTy->print(errs());
2348 llvm_unreachable("Implement this ConstantFP Type");
2349 }
2350
2351 Opcode = spv::OpConstant;
David Neto22f144c2017-06-12 14:26:21 -04002352
David Neto257c3892018-04-11 13:19:45 -04002353 Ops << MkFloat(LiteralNum);
David Neto22f144c2017-06-12 14:26:21 -04002354 } else if (isa<ConstantDataSequential>(Cst) &&
2355 cast<ConstantDataSequential>(Cst)->isString()) {
2356 Cst->print(errs());
2357 llvm_unreachable("Implement this Constant");
2358
2359 } else if (const ConstantDataSequential *CDS =
2360 dyn_cast<ConstantDataSequential>(Cst)) {
David Neto49351ac2017-08-26 17:32:20 -04002361 // Let's convert <4 x i8> constant to int constant specially.
2362 // This case occurs when all the values are specified as constant
2363 // ints.
2364 Type *CstTy = Cst->getType();
2365 if (is4xi8vec(CstTy)) {
2366 LLVMContext &Context = CstTy->getContext();
2367
2368 //
2369 // Generate OpConstant with OpTypeInt 32 0.
2370 //
Neil Henning39672102017-09-29 14:33:13 +01002371 uint32_t IntValue = 0;
2372 for (unsigned k = 0; k < 4; k++) {
2373 const uint64_t Val = CDS->getElementAsInteger(k);
David Neto49351ac2017-08-26 17:32:20 -04002374 IntValue = (IntValue << 8) | (Val & 0xffu);
2375 }
2376
2377 Type *i32 = Type::getInt32Ty(Context);
2378 Constant *CstInt = ConstantInt::get(i32, IntValue);
2379 // If this constant is already registered on VMap, use it.
2380 if (VMap.count(CstInt)) {
2381 uint32_t CstID = VMap[CstInt];
2382 VMap[Cst] = CstID;
2383 continue;
2384 }
2385
David Neto257c3892018-04-11 13:19:45 -04002386 Ops << MkNum(IntValue);
David Neto49351ac2017-08-26 17:32:20 -04002387
David Neto87846742018-04-11 17:36:22 -04002388 auto *CstInst = new SPIRVInstruction(spv::OpConstant, nextID++, Ops);
David Neto49351ac2017-08-26 17:32:20 -04002389 SPIRVInstList.push_back(CstInst);
2390
2391 continue;
2392 }
2393
2394 // A normal constant-data-sequential case.
David Neto22f144c2017-06-12 14:26:21 -04002395 for (unsigned k = 0; k < CDS->getNumElements(); k++) {
2396 Constant *EleCst = CDS->getElementAsConstant(k);
2397 uint32_t EleCstID = VMap[EleCst];
David Neto257c3892018-04-11 13:19:45 -04002398 Ops << MkId(EleCstID);
David Neto22f144c2017-06-12 14:26:21 -04002399 }
2400
2401 Opcode = spv::OpConstantComposite;
David Neto22f144c2017-06-12 14:26:21 -04002402 } else if (const ConstantAggregate *CA = dyn_cast<ConstantAggregate>(Cst)) {
2403 // Let's convert <4 x i8> constant to int constant specially.
David Neto49351ac2017-08-26 17:32:20 -04002404 // This case occurs when at least one of the values is an undef.
David Neto22f144c2017-06-12 14:26:21 -04002405 Type *CstTy = Cst->getType();
2406 if (is4xi8vec(CstTy)) {
2407 LLVMContext &Context = CstTy->getContext();
2408
2409 //
2410 // Generate OpConstant with OpTypeInt 32 0.
2411 //
Neil Henning39672102017-09-29 14:33:13 +01002412 uint32_t IntValue = 0;
David Neto22f144c2017-06-12 14:26:21 -04002413 for (User::const_op_iterator I = Cst->op_begin(), E = Cst->op_end();
2414 I != E; ++I) {
2415 uint64_t Val = 0;
alan-bakerb6b09dc2018-11-08 16:59:28 -05002416 const Value *CV = *I;
Neil Henning39672102017-09-29 14:33:13 +01002417 if (auto *CI2 = dyn_cast<ConstantInt>(CV)) {
2418 Val = CI2->getZExtValue();
David Neto22f144c2017-06-12 14:26:21 -04002419 }
David Neto49351ac2017-08-26 17:32:20 -04002420 IntValue = (IntValue << 8) | (Val & 0xffu);
David Neto22f144c2017-06-12 14:26:21 -04002421 }
2422
David Neto49351ac2017-08-26 17:32:20 -04002423 Type *i32 = Type::getInt32Ty(Context);
2424 Constant *CstInt = ConstantInt::get(i32, IntValue);
David Neto22f144c2017-06-12 14:26:21 -04002425 // If this constant is already registered on VMap, use it.
2426 if (VMap.count(CstInt)) {
2427 uint32_t CstID = VMap[CstInt];
2428 VMap[Cst] = CstID;
David Neto19a1bad2017-08-25 15:01:41 -04002429 continue;
David Neto22f144c2017-06-12 14:26:21 -04002430 }
2431
David Neto257c3892018-04-11 13:19:45 -04002432 Ops << MkNum(IntValue);
David Neto22f144c2017-06-12 14:26:21 -04002433
David Neto87846742018-04-11 17:36:22 -04002434 auto *CstInst = new SPIRVInstruction(spv::OpConstant, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002435 SPIRVInstList.push_back(CstInst);
2436
David Neto19a1bad2017-08-25 15:01:41 -04002437 continue;
David Neto22f144c2017-06-12 14:26:21 -04002438 }
2439
2440 // We use a constant composite in SPIR-V for our constant aggregate in
2441 // LLVM.
2442 Opcode = spv::OpConstantComposite;
David Neto22f144c2017-06-12 14:26:21 -04002443
2444 for (unsigned k = 0; k < CA->getNumOperands(); k++) {
2445 // Look up the ID of the element of this aggregate (which we will
2446 // previously have created a constant for).
2447 uint32_t ElementConstantID = VMap[CA->getAggregateElement(k)];
2448
2449 // And add an operand to the composite we are constructing
David Neto257c3892018-04-11 13:19:45 -04002450 Ops << MkId(ElementConstantID);
David Neto22f144c2017-06-12 14:26:21 -04002451 }
2452 } else if (Cst->isNullValue()) {
2453 Opcode = spv::OpConstantNull;
David Neto22f144c2017-06-12 14:26:21 -04002454 } else {
2455 Cst->print(errs());
2456 llvm_unreachable("Unsupported Constant???");
2457 }
2458
alan-baker5b86ed72019-02-15 08:26:50 -05002459 if (Opcode == spv::OpConstantNull && Cst->getType()->isPointerTy()) {
2460 // Null pointer requires variable pointers.
2461 setVariablePointersCapabilities(Cst->getType()->getPointerAddressSpace());
2462 }
2463
David Neto87846742018-04-11 17:36:22 -04002464 auto *CstInst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002465 SPIRVInstList.push_back(CstInst);
2466 }
2467}
2468
2469void SPIRVProducerPass::GenerateSamplers(Module &M) {
2470 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
David Neto22f144c2017-06-12 14:26:21 -04002471
alan-bakerb6b09dc2018-11-08 16:59:28 -05002472 auto &sampler_map = getSamplerMap();
David Neto862b7d82018-06-14 18:48:37 -04002473 SamplerMapIndexToIDMap.clear();
David Neto22f144c2017-06-12 14:26:21 -04002474 DenseMap<unsigned, unsigned> SamplerLiteralToIDMap;
David Neto862b7d82018-06-14 18:48:37 -04002475 DenseMap<unsigned, unsigned> SamplerLiteralToDescriptorSetMap;
2476 DenseMap<unsigned, unsigned> SamplerLiteralToBindingMap;
David Neto22f144c2017-06-12 14:26:21 -04002477
David Neto862b7d82018-06-14 18:48:37 -04002478 // We might have samplers in the sampler map that are not used
2479 // in the translation unit. We need to allocate variables
2480 // for them and bindings too.
2481 DenseSet<unsigned> used_bindings;
David Neto22f144c2017-06-12 14:26:21 -04002482
Kévin Petitdf71de32019-04-09 14:09:50 +01002483 auto *var_fn = M.getFunction(clspv::LiteralSamplerFunction());
alan-bakerb6b09dc2018-11-08 16:59:28 -05002484 if (!var_fn)
2485 return;
David Neto862b7d82018-06-14 18:48:37 -04002486 for (auto user : var_fn->users()) {
2487 // Populate SamplerLiteralToDescriptorSetMap and
2488 // SamplerLiteralToBindingMap.
2489 //
2490 // Look for calls like
2491 // call %opencl.sampler_t addrspace(2)*
2492 // @clspv.sampler.var.literal(
2493 // i32 descriptor,
2494 // i32 binding,
2495 // i32 index-into-sampler-map)
alan-bakerb6b09dc2018-11-08 16:59:28 -05002496 if (auto *call = dyn_cast<CallInst>(user)) {
2497 const size_t index_into_sampler_map = static_cast<size_t>(
2498 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
David Neto862b7d82018-06-14 18:48:37 -04002499 if (index_into_sampler_map >= sampler_map.size()) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05002500 errs() << "Out of bounds index to sampler map: "
2501 << index_into_sampler_map;
David Neto862b7d82018-06-14 18:48:37 -04002502 llvm_unreachable("bad sampler init: out of bounds");
2503 }
2504
2505 auto sampler_value = sampler_map[index_into_sampler_map].first;
2506 const auto descriptor_set = static_cast<unsigned>(
2507 dyn_cast<ConstantInt>(call->getArgOperand(0))->getZExtValue());
2508 const auto binding = static_cast<unsigned>(
2509 dyn_cast<ConstantInt>(call->getArgOperand(1))->getZExtValue());
2510
2511 SamplerLiteralToDescriptorSetMap[sampler_value] = descriptor_set;
2512 SamplerLiteralToBindingMap[sampler_value] = binding;
2513 used_bindings.insert(binding);
2514 }
2515 }
2516
2517 unsigned index = 0;
2518 for (auto SamplerLiteral : sampler_map) {
David Neto22f144c2017-06-12 14:26:21 -04002519 // Generate OpVariable.
2520 //
2521 // GIDOps[0] : Result Type ID
2522 // GIDOps[1] : Storage Class
2523 SPIRVOperandList Ops;
2524
David Neto257c3892018-04-11 13:19:45 -04002525 Ops << MkId(lookupType(SamplerTy))
2526 << MkNum(spv::StorageClassUniformConstant);
David Neto22f144c2017-06-12 14:26:21 -04002527
David Neto862b7d82018-06-14 18:48:37 -04002528 auto sampler_var_id = nextID++;
2529 auto *Inst = new SPIRVInstruction(spv::OpVariable, sampler_var_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002530 SPIRVInstList.push_back(Inst);
2531
David Neto862b7d82018-06-14 18:48:37 -04002532 SamplerMapIndexToIDMap[index] = sampler_var_id;
2533 SamplerLiteralToIDMap[SamplerLiteral.first] = sampler_var_id;
David Neto22f144c2017-06-12 14:26:21 -04002534
2535 // Find Insert Point for OpDecorate.
2536 auto DecoInsertPoint =
2537 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
2538 [](SPIRVInstruction *Inst) -> bool {
2539 return Inst->getOpcode() != spv::OpDecorate &&
2540 Inst->getOpcode() != spv::OpMemberDecorate &&
2541 Inst->getOpcode() != spv::OpExtInstImport;
2542 });
2543
2544 // Ops[0] = Target ID
2545 // Ops[1] = Decoration (DescriptorSet)
2546 // Ops[2] = LiteralNumber according to Decoration
2547 Ops.clear();
2548
David Neto862b7d82018-06-14 18:48:37 -04002549 unsigned descriptor_set;
2550 unsigned binding;
alan-bakerb6b09dc2018-11-08 16:59:28 -05002551 if (SamplerLiteralToBindingMap.find(SamplerLiteral.first) ==
2552 SamplerLiteralToBindingMap.end()) {
David Neto862b7d82018-06-14 18:48:37 -04002553 // This sampler is not actually used. Find the next one.
2554 for (binding = 0; used_bindings.count(binding); binding++)
2555 ;
2556 descriptor_set = 0; // Literal samplers always use descriptor set 0.
2557 used_bindings.insert(binding);
2558 } else {
2559 descriptor_set = SamplerLiteralToDescriptorSetMap[SamplerLiteral.first];
2560 binding = SamplerLiteralToBindingMap[SamplerLiteral.first];
alan-bakercff80152019-06-15 00:38:00 -04002561
2562 version0::DescriptorMapEntry::SamplerData sampler_data = {
2563 SamplerLiteral.first};
2564 descriptorMapEntries->emplace_back(std::move(sampler_data),
2565 descriptor_set, binding);
David Neto862b7d82018-06-14 18:48:37 -04002566 }
2567
2568 Ops << MkId(sampler_var_id) << MkNum(spv::DecorationDescriptorSet)
2569 << MkNum(descriptor_set);
David Neto22f144c2017-06-12 14:26:21 -04002570
David Neto87846742018-04-11 17:36:22 -04002571 auto *DescDecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002572 SPIRVInstList.insert(DecoInsertPoint, DescDecoInst);
2573
2574 // Ops[0] = Target ID
2575 // Ops[1] = Decoration (Binding)
2576 // Ops[2] = LiteralNumber according to Decoration
2577 Ops.clear();
David Neto862b7d82018-06-14 18:48:37 -04002578 Ops << MkId(sampler_var_id) << MkNum(spv::DecorationBinding)
2579 << MkNum(binding);
David Neto22f144c2017-06-12 14:26:21 -04002580
David Neto87846742018-04-11 17:36:22 -04002581 auto *BindDecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002582 SPIRVInstList.insert(DecoInsertPoint, BindDecoInst);
David Neto862b7d82018-06-14 18:48:37 -04002583
2584 index++;
David Neto22f144c2017-06-12 14:26:21 -04002585 }
David Neto862b7d82018-06-14 18:48:37 -04002586}
David Neto22f144c2017-06-12 14:26:21 -04002587
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01002588void SPIRVProducerPass::GenerateResourceVars(Module &) {
David Neto862b7d82018-06-14 18:48:37 -04002589 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
2590 ValueMapType &VMap = getValueMap();
David Neto22f144c2017-06-12 14:26:21 -04002591
David Neto862b7d82018-06-14 18:48:37 -04002592 // Generate variables. Make one for each of resource var info object.
2593 for (auto *info : ModuleOrderedResourceVars) {
2594 Type *type = info->var_fn->getReturnType();
2595 // Remap the address space for opaque types.
2596 switch (info->arg_kind) {
2597 case clspv::ArgKind::Sampler:
2598 case clspv::ArgKind::ReadOnlyImage:
2599 case clspv::ArgKind::WriteOnlyImage:
2600 type = PointerType::get(type->getPointerElementType(),
2601 clspv::AddressSpace::UniformConstant);
2602 break;
2603 default:
2604 break;
2605 }
David Neto22f144c2017-06-12 14:26:21 -04002606
David Neto862b7d82018-06-14 18:48:37 -04002607 info->var_id = nextID++;
David Neto22f144c2017-06-12 14:26:21 -04002608
David Neto862b7d82018-06-14 18:48:37 -04002609 const auto type_id = lookupType(type);
2610 const auto sc = GetStorageClassForArgKind(info->arg_kind);
2611 SPIRVOperandList Ops;
2612 Ops << MkId(type_id) << MkNum(sc);
David Neto22f144c2017-06-12 14:26:21 -04002613
David Neto862b7d82018-06-14 18:48:37 -04002614 auto *Inst = new SPIRVInstruction(spv::OpVariable, info->var_id, Ops);
2615 SPIRVInstList.push_back(Inst);
2616
2617 // Map calls to the variable-builtin-function.
2618 for (auto &U : info->var_fn->uses()) {
2619 if (auto *call = dyn_cast<CallInst>(U.getUser())) {
2620 const auto set = unsigned(
2621 dyn_cast<ConstantInt>(call->getOperand(0))->getZExtValue());
2622 const auto binding = unsigned(
2623 dyn_cast<ConstantInt>(call->getOperand(1))->getZExtValue());
2624 if (set == info->descriptor_set && binding == info->binding) {
2625 switch (info->arg_kind) {
2626 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04002627 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04002628 case clspv::ArgKind::Pod:
2629 // The call maps to the variable directly.
2630 VMap[call] = info->var_id;
2631 break;
2632 case clspv::ArgKind::Sampler:
2633 case clspv::ArgKind::ReadOnlyImage:
2634 case clspv::ArgKind::WriteOnlyImage:
2635 // The call maps to a load we generate later.
2636 ResourceVarDeferredLoadCalls[call] = info->var_id;
2637 break;
2638 default:
2639 llvm_unreachable("Unhandled arg kind");
2640 }
2641 }
David Neto22f144c2017-06-12 14:26:21 -04002642 }
David Neto862b7d82018-06-14 18:48:37 -04002643 }
2644 }
David Neto22f144c2017-06-12 14:26:21 -04002645
David Neto862b7d82018-06-14 18:48:37 -04002646 // Generate associated decorations.
David Neto22f144c2017-06-12 14:26:21 -04002647
David Neto862b7d82018-06-14 18:48:37 -04002648 // Find Insert Point for OpDecorate.
2649 auto DecoInsertPoint =
2650 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
2651 [](SPIRVInstruction *Inst) -> bool {
2652 return Inst->getOpcode() != spv::OpDecorate &&
2653 Inst->getOpcode() != spv::OpMemberDecorate &&
2654 Inst->getOpcode() != spv::OpExtInstImport;
2655 });
2656
2657 SPIRVOperandList Ops;
2658 for (auto *info : ModuleOrderedResourceVars) {
2659 // Decorate with DescriptorSet and Binding.
2660 Ops.clear();
2661 Ops << MkId(info->var_id) << MkNum(spv::DecorationDescriptorSet)
2662 << MkNum(info->descriptor_set);
2663 SPIRVInstList.insert(DecoInsertPoint,
2664 new SPIRVInstruction(spv::OpDecorate, Ops));
2665
2666 Ops.clear();
2667 Ops << MkId(info->var_id) << MkNum(spv::DecorationBinding)
2668 << MkNum(info->binding);
2669 SPIRVInstList.insert(DecoInsertPoint,
2670 new SPIRVInstruction(spv::OpDecorate, Ops));
2671
alan-bakere9308012019-03-15 10:25:13 -04002672 if (info->coherent) {
2673 // Decorate with Coherent if required for the variable.
2674 Ops.clear();
2675 Ops << MkId(info->var_id) << MkNum(spv::DecorationCoherent);
2676 SPIRVInstList.insert(DecoInsertPoint,
2677 new SPIRVInstruction(spv::OpDecorate, Ops));
2678 }
2679
David Neto862b7d82018-06-14 18:48:37 -04002680 // Generate NonWritable and NonReadable
2681 switch (info->arg_kind) {
2682 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04002683 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04002684 if (info->var_fn->getReturnType()->getPointerAddressSpace() ==
2685 clspv::AddressSpace::Constant) {
2686 Ops.clear();
2687 Ops << MkId(info->var_id) << MkNum(spv::DecorationNonWritable);
2688 SPIRVInstList.insert(DecoInsertPoint,
2689 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002690 }
David Neto862b7d82018-06-14 18:48:37 -04002691 break;
David Neto862b7d82018-06-14 18:48:37 -04002692 case clspv::ArgKind::WriteOnlyImage:
2693 Ops.clear();
2694 Ops << MkId(info->var_id) << MkNum(spv::DecorationNonReadable);
2695 SPIRVInstList.insert(DecoInsertPoint,
2696 new SPIRVInstruction(spv::OpDecorate, Ops));
2697 break;
2698 default:
2699 break;
David Neto22f144c2017-06-12 14:26:21 -04002700 }
2701 }
2702}
2703
2704void SPIRVProducerPass::GenerateGlobalVar(GlobalVariable &GV) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05002705 Module &M = *GV.getParent();
David Neto22f144c2017-06-12 14:26:21 -04002706 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
2707 ValueMapType &VMap = getValueMap();
2708 std::vector<uint32_t> &BuiltinDimVec = getBuiltinDimVec();
David Neto85082642018-03-24 06:55:20 -07002709 const DataLayout &DL = GV.getParent()->getDataLayout();
David Neto22f144c2017-06-12 14:26:21 -04002710
2711 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
2712 Type *Ty = GV.getType();
2713 PointerType *PTy = cast<PointerType>(Ty);
2714
2715 uint32_t InitializerID = 0;
2716
2717 // Workgroup size is handled differently (it goes into a constant)
2718 if (spv::BuiltInWorkgroupSize == BuiltinType) {
2719 std::vector<bool> HasMDVec;
2720 uint32_t PrevXDimCst = 0xFFFFFFFF;
2721 uint32_t PrevYDimCst = 0xFFFFFFFF;
2722 uint32_t PrevZDimCst = 0xFFFFFFFF;
2723 for (Function &Func : *GV.getParent()) {
2724 if (Func.isDeclaration()) {
2725 continue;
2726 }
2727
2728 // We only need to check kernels.
2729 if (Func.getCallingConv() != CallingConv::SPIR_KERNEL) {
2730 continue;
2731 }
2732
2733 if (const MDNode *MD =
2734 dyn_cast<Function>(&Func)->getMetadata("reqd_work_group_size")) {
2735 uint32_t CurXDimCst = static_cast<uint32_t>(
2736 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
2737 uint32_t CurYDimCst = static_cast<uint32_t>(
2738 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
2739 uint32_t CurZDimCst = static_cast<uint32_t>(
2740 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
2741
2742 if (PrevXDimCst == 0xFFFFFFFF && PrevYDimCst == 0xFFFFFFFF &&
2743 PrevZDimCst == 0xFFFFFFFF) {
2744 PrevXDimCst = CurXDimCst;
2745 PrevYDimCst = CurYDimCst;
2746 PrevZDimCst = CurZDimCst;
2747 } else if (CurXDimCst != PrevXDimCst || CurYDimCst != PrevYDimCst ||
2748 CurZDimCst != PrevZDimCst) {
2749 llvm_unreachable(
2750 "reqd_work_group_size must be the same across all kernels");
2751 } else {
2752 continue;
2753 }
2754
2755 //
2756 // Generate OpConstantComposite.
2757 //
2758 // Ops[0] : Result Type ID
2759 // Ops[1] : Constant size for x dimension.
2760 // Ops[2] : Constant size for y dimension.
2761 // Ops[3] : Constant size for z dimension.
2762 SPIRVOperandList Ops;
2763
2764 uint32_t XDimCstID =
2765 VMap[mdconst::extract<ConstantInt>(MD->getOperand(0))];
2766 uint32_t YDimCstID =
2767 VMap[mdconst::extract<ConstantInt>(MD->getOperand(1))];
2768 uint32_t ZDimCstID =
2769 VMap[mdconst::extract<ConstantInt>(MD->getOperand(2))];
2770
2771 InitializerID = nextID;
2772
David Neto257c3892018-04-11 13:19:45 -04002773 Ops << MkId(lookupType(Ty->getPointerElementType())) << MkId(XDimCstID)
2774 << MkId(YDimCstID) << MkId(ZDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04002775
David Neto87846742018-04-11 17:36:22 -04002776 auto *Inst =
2777 new SPIRVInstruction(spv::OpConstantComposite, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002778 SPIRVInstList.push_back(Inst);
2779
2780 HasMDVec.push_back(true);
2781 } else {
2782 HasMDVec.push_back(false);
2783 }
2784 }
2785
2786 // Check all kernels have same definitions for work_group_size.
2787 bool HasMD = false;
2788 if (!HasMDVec.empty()) {
2789 HasMD = HasMDVec[0];
2790 for (uint32_t i = 1; i < HasMDVec.size(); i++) {
2791 if (HasMD != HasMDVec[i]) {
2792 llvm_unreachable(
2793 "Kernels should have consistent work group size definition");
2794 }
2795 }
2796 }
2797
2798 // If all kernels do not have metadata for reqd_work_group_size, generate
2799 // OpSpecConstants for x/y/z dimension.
2800 if (!HasMD) {
2801 //
2802 // Generate OpSpecConstants for x/y/z dimension.
2803 //
2804 // Ops[0] : Result Type ID
2805 // Ops[1] : Constant size for x/y/z dimension (Literal Number).
2806 uint32_t XDimCstID = 0;
2807 uint32_t YDimCstID = 0;
2808 uint32_t ZDimCstID = 0;
2809
David Neto22f144c2017-06-12 14:26:21 -04002810 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04002811 uint32_t result_type_id =
2812 lookupType(Ty->getPointerElementType()->getSequentialElementType());
David Neto22f144c2017-06-12 14:26:21 -04002813
David Neto257c3892018-04-11 13:19:45 -04002814 // X Dimension
2815 Ops << MkId(result_type_id) << MkNum(1);
2816 XDimCstID = nextID++;
2817 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002818 new SPIRVInstruction(spv::OpSpecConstant, XDimCstID, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002819
2820 // Y Dimension
2821 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04002822 Ops << MkId(result_type_id) << MkNum(1);
2823 YDimCstID = nextID++;
2824 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002825 new SPIRVInstruction(spv::OpSpecConstant, YDimCstID, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002826
2827 // Z Dimension
2828 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04002829 Ops << MkId(result_type_id) << MkNum(1);
2830 ZDimCstID = nextID++;
2831 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002832 new SPIRVInstruction(spv::OpSpecConstant, ZDimCstID, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002833
David Neto257c3892018-04-11 13:19:45 -04002834 BuiltinDimVec.push_back(XDimCstID);
2835 BuiltinDimVec.push_back(YDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04002836 BuiltinDimVec.push_back(ZDimCstID);
2837
David Neto22f144c2017-06-12 14:26:21 -04002838 //
2839 // Generate OpSpecConstantComposite.
2840 //
2841 // Ops[0] : Result Type ID
2842 // Ops[1] : Constant size for x dimension.
2843 // Ops[2] : Constant size for y dimension.
2844 // Ops[3] : Constant size for z dimension.
2845 InitializerID = nextID;
2846
2847 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04002848 Ops << MkId(lookupType(Ty->getPointerElementType())) << MkId(XDimCstID)
2849 << MkId(YDimCstID) << MkId(ZDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04002850
David Neto87846742018-04-11 17:36:22 -04002851 auto *Inst =
2852 new SPIRVInstruction(spv::OpSpecConstantComposite, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002853 SPIRVInstList.push_back(Inst);
2854 }
2855 }
2856
David Neto22f144c2017-06-12 14:26:21 -04002857 VMap[&GV] = nextID;
2858
2859 //
2860 // Generate OpVariable.
2861 //
2862 // GIDOps[0] : Result Type ID
2863 // GIDOps[1] : Storage Class
2864 SPIRVOperandList Ops;
2865
David Neto85082642018-03-24 06:55:20 -07002866 const auto AS = PTy->getAddressSpace();
David Netoc6f3ab22018-04-06 18:02:31 -04002867 Ops << MkId(lookupType(Ty)) << MkNum(GetStorageClass(AS));
David Neto22f144c2017-06-12 14:26:21 -04002868
David Neto85082642018-03-24 06:55:20 -07002869 const bool module_scope_constant_external_init =
David Neto862b7d82018-06-14 18:48:37 -04002870 (AS == AddressSpace::Constant) && GV.hasInitializer() &&
David Neto85082642018-03-24 06:55:20 -07002871 clspv::Option::ModuleConstantsInStorageBuffer();
2872
Kévin Petit23d5f182019-08-13 16:21:29 +01002873 if (GV.hasInitializer()) {
2874 auto GVInit = GV.getInitializer();
2875 if (!isa<UndefValue>(GVInit) && !module_scope_constant_external_init) {
2876 assert(VMap.count(GVInit) == 1);
2877 InitializerID = VMap[GVInit];
David Neto85082642018-03-24 06:55:20 -07002878 }
2879 }
Kévin Petit23d5f182019-08-13 16:21:29 +01002880
2881 if (0 != InitializerID) {
2882 // Emit the ID of the intiializer as part of the variable definition.
2883 Ops << MkId(InitializerID);
2884 }
David Neto85082642018-03-24 06:55:20 -07002885 const uint32_t var_id = nextID++;
2886
David Neto87846742018-04-11 17:36:22 -04002887 auto *Inst = new SPIRVInstruction(spv::OpVariable, var_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002888 SPIRVInstList.push_back(Inst);
2889
2890 // If we have a builtin.
2891 if (spv::BuiltInMax != BuiltinType) {
2892 // Find Insert Point for OpDecorate.
2893 auto DecoInsertPoint =
2894 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
2895 [](SPIRVInstruction *Inst) -> bool {
2896 return Inst->getOpcode() != spv::OpDecorate &&
2897 Inst->getOpcode() != spv::OpMemberDecorate &&
2898 Inst->getOpcode() != spv::OpExtInstImport;
2899 });
2900 //
2901 // Generate OpDecorate.
2902 //
2903 // DOps[0] = Target ID
2904 // DOps[1] = Decoration (Builtin)
2905 // DOps[2] = BuiltIn ID
2906 uint32_t ResultID;
2907
2908 // WorkgroupSize is different, we decorate the constant composite that has
2909 // its value, rather than the variable that we use to access the value.
2910 if (spv::BuiltInWorkgroupSize == BuiltinType) {
2911 ResultID = InitializerID;
David Netoa60b00b2017-09-15 16:34:09 -04002912 // Save both the value and variable IDs for later.
2913 WorkgroupSizeValueID = InitializerID;
2914 WorkgroupSizeVarID = VMap[&GV];
David Neto22f144c2017-06-12 14:26:21 -04002915 } else {
2916 ResultID = VMap[&GV];
2917 }
2918
2919 SPIRVOperandList DOps;
David Neto257c3892018-04-11 13:19:45 -04002920 DOps << MkId(ResultID) << MkNum(spv::DecorationBuiltIn)
2921 << MkNum(BuiltinType);
David Neto22f144c2017-06-12 14:26:21 -04002922
David Neto87846742018-04-11 17:36:22 -04002923 auto *DescDecoInst = new SPIRVInstruction(spv::OpDecorate, DOps);
David Neto22f144c2017-06-12 14:26:21 -04002924 SPIRVInstList.insert(DecoInsertPoint, DescDecoInst);
David Neto85082642018-03-24 06:55:20 -07002925 } else if (module_scope_constant_external_init) {
2926 // This module scope constant is initialized from a storage buffer with data
2927 // provided by the host at binding 0 of the next descriptor set.
David Neto78383442018-06-15 20:31:56 -04002928 const uint32_t descriptor_set = TakeDescriptorIndex(&M);
David Neto85082642018-03-24 06:55:20 -07002929
David Neto862b7d82018-06-14 18:48:37 -04002930 // Emit the intializer to the descriptor map file.
David Neto85082642018-03-24 06:55:20 -07002931 // Use "kind,buffer" to indicate storage buffer. We might want to expand
2932 // that later to other types, like uniform buffer.
alan-bakerf5e5f692018-11-27 08:33:24 -05002933 std::string hexbytes;
2934 llvm::raw_string_ostream str(hexbytes);
2935 clspv::ConstantEmitter(DL, str).Emit(GV.getInitializer());
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04002936 version0::DescriptorMapEntry::ConstantData constant_data = {ArgKind::Buffer,
2937 str.str()};
2938 descriptorMapEntries->emplace_back(std::move(constant_data), descriptor_set,
2939 0);
David Neto85082642018-03-24 06:55:20 -07002940
2941 // Find Insert Point for OpDecorate.
2942 auto DecoInsertPoint =
2943 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
2944 [](SPIRVInstruction *Inst) -> bool {
2945 return Inst->getOpcode() != spv::OpDecorate &&
2946 Inst->getOpcode() != spv::OpMemberDecorate &&
2947 Inst->getOpcode() != spv::OpExtInstImport;
2948 });
2949
David Neto257c3892018-04-11 13:19:45 -04002950 // OpDecorate %var Binding <binding>
David Neto85082642018-03-24 06:55:20 -07002951 SPIRVOperandList DOps;
David Neto257c3892018-04-11 13:19:45 -04002952 DOps << MkId(var_id) << MkNum(spv::DecorationBinding) << MkNum(0);
2953 DecoInsertPoint = SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04002954 DecoInsertPoint, new SPIRVInstruction(spv::OpDecorate, DOps));
David Neto85082642018-03-24 06:55:20 -07002955
2956 // OpDecorate %var DescriptorSet <descriptor_set>
2957 DOps.clear();
David Neto257c3892018-04-11 13:19:45 -04002958 DOps << MkId(var_id) << MkNum(spv::DecorationDescriptorSet)
2959 << MkNum(descriptor_set);
David Netoc6f3ab22018-04-06 18:02:31 -04002960 SPIRVInstList.insert(DecoInsertPoint,
David Neto87846742018-04-11 17:36:22 -04002961 new SPIRVInstruction(spv::OpDecorate, DOps));
David Neto22f144c2017-06-12 14:26:21 -04002962 }
2963}
2964
David Netoc6f3ab22018-04-06 18:02:31 -04002965void SPIRVProducerPass::GenerateWorkgroupVars() {
2966 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
Alan Baker202c8c72018-08-13 13:47:44 -04002967 for (auto spec_id = clspv::FirstLocalSpecId(); spec_id < max_local_spec_id_;
2968 ++spec_id) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05002969 LocalArgInfo &info = LocalSpecIdInfoMap[spec_id];
David Netoc6f3ab22018-04-06 18:02:31 -04002970
2971 // Generate OpVariable.
2972 //
2973 // GIDOps[0] : Result Type ID
2974 // GIDOps[1] : Storage Class
2975 SPIRVOperandList Ops;
2976 Ops << MkId(info.ptr_array_type_id) << MkNum(spv::StorageClassWorkgroup);
2977
2978 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002979 new SPIRVInstruction(spv::OpVariable, info.variable_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002980 }
2981}
2982
David Neto862b7d82018-06-14 18:48:37 -04002983void SPIRVProducerPass::GenerateDescriptorMapInfo(const DataLayout &DL,
2984 Function &F) {
David Netoc5fb5242018-07-30 13:28:31 -04002985 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
2986 return;
2987 }
David Neto862b7d82018-06-14 18:48:37 -04002988 // Gather the list of resources that are used by this function's arguments.
2989 auto &resource_var_at_index = FunctionToResourceVarsMap[&F];
2990
alan-bakerf5e5f692018-11-27 08:33:24 -05002991 // TODO(alan-baker): This should become unnecessary by fixing the rest of the
2992 // flow to generate pod_ubo arguments earlier.
David Neto862b7d82018-06-14 18:48:37 -04002993 auto remap_arg_kind = [](StringRef argKind) {
alan-bakerf5e5f692018-11-27 08:33:24 -05002994 std::string kind =
2995 clspv::Option::PodArgsInUniformBuffer() && argKind.equals("pod")
2996 ? "pod_ubo"
2997 : argKind;
2998 return GetArgKindFromName(kind);
David Neto862b7d82018-06-14 18:48:37 -04002999 };
3000
3001 auto *fty = F.getType()->getPointerElementType();
3002 auto *func_ty = dyn_cast<FunctionType>(fty);
3003
alan-baker038e9242019-04-19 22:14:41 -04003004 // If we've clustered POD arguments, then argument details are in metadata.
David Neto862b7d82018-06-14 18:48:37 -04003005 // If an argument maps to a resource variable, then get descriptor set and
3006 // binding from the resoure variable. Other info comes from the metadata.
3007 const auto *arg_map = F.getMetadata("kernel_arg_map");
3008 if (arg_map) {
3009 for (const auto &arg : arg_map->operands()) {
3010 const MDNode *arg_node = dyn_cast<MDNode>(arg.get());
Kévin PETITa353c832018-03-20 23:21:21 +00003011 assert(arg_node->getNumOperands() == 7);
David Neto862b7d82018-06-14 18:48:37 -04003012 const auto name =
3013 dyn_cast<MDString>(arg_node->getOperand(0))->getString();
3014 const auto old_index =
3015 dyn_extract<ConstantInt>(arg_node->getOperand(1))->getZExtValue();
3016 // Remapped argument index
alan-bakerb6b09dc2018-11-08 16:59:28 -05003017 const size_t new_index = static_cast<size_t>(
3018 dyn_extract<ConstantInt>(arg_node->getOperand(2))->getZExtValue());
David Neto862b7d82018-06-14 18:48:37 -04003019 const auto offset =
3020 dyn_extract<ConstantInt>(arg_node->getOperand(3))->getZExtValue();
Kévin PETITa353c832018-03-20 23:21:21 +00003021 const auto arg_size =
3022 dyn_extract<ConstantInt>(arg_node->getOperand(4))->getZExtValue();
David Neto862b7d82018-06-14 18:48:37 -04003023 const auto argKind = remap_arg_kind(
Kévin PETITa353c832018-03-20 23:21:21 +00003024 dyn_cast<MDString>(arg_node->getOperand(5))->getString());
David Neto862b7d82018-06-14 18:48:37 -04003025 const auto spec_id =
Kévin PETITa353c832018-03-20 23:21:21 +00003026 dyn_extract<ConstantInt>(arg_node->getOperand(6))->getSExtValue();
alan-bakerf5e5f692018-11-27 08:33:24 -05003027
3028 uint32_t descriptor_set = 0;
3029 uint32_t binding = 0;
3030 version0::DescriptorMapEntry::KernelArgData kernel_data = {
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003031 F.getName(), name, static_cast<uint32_t>(old_index), argKind,
alan-bakerf5e5f692018-11-27 08:33:24 -05003032 static_cast<uint32_t>(spec_id),
3033 // This will be set below for pointer-to-local args.
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003034 0, static_cast<uint32_t>(offset), static_cast<uint32_t>(arg_size)};
David Neto862b7d82018-06-14 18:48:37 -04003035 if (spec_id > 0) {
alan-bakerf5e5f692018-11-27 08:33:24 -05003036 kernel_data.local_element_size = static_cast<uint32_t>(GetTypeAllocSize(
3037 func_ty->getParamType(unsigned(new_index))->getPointerElementType(),
3038 DL));
David Neto862b7d82018-06-14 18:48:37 -04003039 } else {
3040 auto *info = resource_var_at_index[new_index];
3041 assert(info);
alan-bakerf5e5f692018-11-27 08:33:24 -05003042 descriptor_set = info->descriptor_set;
3043 binding = info->binding;
David Neto862b7d82018-06-14 18:48:37 -04003044 }
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003045 descriptorMapEntries->emplace_back(std::move(kernel_data), descriptor_set,
3046 binding);
David Neto862b7d82018-06-14 18:48:37 -04003047 }
3048 } else {
3049 // There is no argument map.
3050 // Take descriptor info from the resource variable calls.
Kévin PETITa353c832018-03-20 23:21:21 +00003051 // Take argument name and size from the arguments list.
David Neto862b7d82018-06-14 18:48:37 -04003052
3053 SmallVector<Argument *, 4> arguments;
3054 for (auto &arg : F.args()) {
3055 arguments.push_back(&arg);
3056 }
3057
3058 unsigned arg_index = 0;
3059 for (auto *info : resource_var_at_index) {
3060 if (info) {
Kévin PETITa353c832018-03-20 23:21:21 +00003061 auto arg = arguments[arg_index];
alan-bakerb6b09dc2018-11-08 16:59:28 -05003062 unsigned arg_size = 0;
Kévin PETITa353c832018-03-20 23:21:21 +00003063 if (info->arg_kind == clspv::ArgKind::Pod) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05003064 arg_size = static_cast<uint32_t>(DL.getTypeStoreSize(arg->getType()));
Kévin PETITa353c832018-03-20 23:21:21 +00003065 }
3066
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003067 // Local pointer arguments are unused in this case. Offset is always
3068 // zero.
alan-bakerf5e5f692018-11-27 08:33:24 -05003069 version0::DescriptorMapEntry::KernelArgData kernel_data = {
3070 F.getName(), arg->getName(),
3071 arg_index, remap_arg_kind(clspv::GetArgKindName(info->arg_kind)),
3072 0, 0,
3073 0, arg_size};
3074 descriptorMapEntries->emplace_back(std::move(kernel_data),
3075 info->descriptor_set, info->binding);
David Neto862b7d82018-06-14 18:48:37 -04003076 }
3077 arg_index++;
3078 }
3079 // Generate mappings for pointer-to-local arguments.
3080 for (arg_index = 0; arg_index < arguments.size(); ++arg_index) {
3081 Argument *arg = arguments[arg_index];
Alan Baker202c8c72018-08-13 13:47:44 -04003082 auto where = LocalArgSpecIds.find(arg);
3083 if (where != LocalArgSpecIds.end()) {
3084 auto &local_arg_info = LocalSpecIdInfoMap[where->second];
alan-bakerf5e5f692018-11-27 08:33:24 -05003085 // Pod arguments members are unused in this case.
3086 version0::DescriptorMapEntry::KernelArgData kernel_data = {
3087 F.getName(),
3088 arg->getName(),
3089 arg_index,
3090 ArgKind::Local,
3091 static_cast<uint32_t>(local_arg_info.spec_id),
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003092 static_cast<uint32_t>(
3093 GetTypeAllocSize(local_arg_info.elem_type, DL)),
alan-bakerf5e5f692018-11-27 08:33:24 -05003094 0,
3095 0};
3096 // Pointer-to-local arguments do not utilize descriptor set and binding.
3097 descriptorMapEntries->emplace_back(std::move(kernel_data), 0, 0);
David Neto862b7d82018-06-14 18:48:37 -04003098 }
3099 }
3100 }
3101}
3102
David Neto22f144c2017-06-12 14:26:21 -04003103void SPIRVProducerPass::GenerateFuncPrologue(Function &F) {
3104 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3105 ValueMapType &VMap = getValueMap();
3106 EntryPointVecType &EntryPoints = getEntryPointVec();
David Neto22f144c2017-06-12 14:26:21 -04003107 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
3108 auto &GlobalConstArgSet = getGlobalConstArgSet();
3109
3110 FunctionType *FTy = F.getFunctionType();
3111
3112 //
David Neto22f144c2017-06-12 14:26:21 -04003113 // Generate OPFunction.
3114 //
3115
3116 // FOps[0] : Result Type ID
3117 // FOps[1] : Function Control
3118 // FOps[2] : Function Type ID
3119 SPIRVOperandList FOps;
3120
3121 // Find SPIRV instruction for return type.
David Neto257c3892018-04-11 13:19:45 -04003122 FOps << MkId(lookupType(FTy->getReturnType()));
David Neto22f144c2017-06-12 14:26:21 -04003123
3124 // Check function attributes for SPIRV Function Control.
3125 uint32_t FuncControl = spv::FunctionControlMaskNone;
3126 if (F.hasFnAttribute(Attribute::AlwaysInline)) {
3127 FuncControl |= spv::FunctionControlInlineMask;
3128 }
3129 if (F.hasFnAttribute(Attribute::NoInline)) {
3130 FuncControl |= spv::FunctionControlDontInlineMask;
3131 }
3132 // TODO: Check llvm attribute for Function Control Pure.
3133 if (F.hasFnAttribute(Attribute::ReadOnly)) {
3134 FuncControl |= spv::FunctionControlPureMask;
3135 }
3136 // TODO: Check llvm attribute for Function Control Const.
3137 if (F.hasFnAttribute(Attribute::ReadNone)) {
3138 FuncControl |= spv::FunctionControlConstMask;
3139 }
3140
David Neto257c3892018-04-11 13:19:45 -04003141 FOps << MkNum(FuncControl);
David Neto22f144c2017-06-12 14:26:21 -04003142
3143 uint32_t FTyID;
3144 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
3145 SmallVector<Type *, 4> NewFuncParamTys;
3146 FunctionType *NewFTy =
3147 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
3148 FTyID = lookupType(NewFTy);
3149 } else {
David Neto9ed8e2f2018-03-24 06:47:24 -07003150 // Handle regular function with global constant parameters.
David Neto22f144c2017-06-12 14:26:21 -04003151 if (GlobalConstFuncTyMap.count(FTy)) {
3152 FTyID = lookupType(GlobalConstFuncTyMap[FTy].first);
3153 } else {
3154 FTyID = lookupType(FTy);
3155 }
3156 }
3157
David Neto257c3892018-04-11 13:19:45 -04003158 FOps << MkId(FTyID);
David Neto22f144c2017-06-12 14:26:21 -04003159
3160 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
3161 EntryPoints.push_back(std::make_pair(&F, nextID));
3162 }
3163
3164 VMap[&F] = nextID;
3165
David Neto482550a2018-03-24 05:21:07 -07003166 if (clspv::Option::ShowIDs()) {
David Netob05675d2018-02-16 12:37:49 -05003167 errs() << "Function " << F.getName() << " is " << nextID << "\n";
3168 }
David Neto22f144c2017-06-12 14:26:21 -04003169 // Generate SPIRV instruction for function.
David Neto87846742018-04-11 17:36:22 -04003170 auto *FuncInst = new SPIRVInstruction(spv::OpFunction, nextID++, FOps);
David Neto22f144c2017-06-12 14:26:21 -04003171 SPIRVInstList.push_back(FuncInst);
3172
3173 //
3174 // Generate OpFunctionParameter for Normal function.
3175 //
3176
3177 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
alan-bakere9308012019-03-15 10:25:13 -04003178
3179 // Find Insert Point for OpDecorate.
3180 auto DecoInsertPoint =
3181 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
3182 [](SPIRVInstruction *Inst) -> bool {
3183 return Inst->getOpcode() != spv::OpDecorate &&
3184 Inst->getOpcode() != spv::OpMemberDecorate &&
3185 Inst->getOpcode() != spv::OpExtInstImport;
3186 });
3187
David Neto22f144c2017-06-12 14:26:21 -04003188 // Iterate Argument for name instead of param type from function type.
3189 unsigned ArgIdx = 0;
3190 for (Argument &Arg : F.args()) {
alan-bakere9308012019-03-15 10:25:13 -04003191 uint32_t param_id = nextID++;
3192 VMap[&Arg] = param_id;
3193
3194 if (CalledWithCoherentResource(Arg)) {
3195 // If the arg is passed a coherent resource ever, then decorate this
3196 // parameter with Coherent too.
3197 SPIRVOperandList decoration_ops;
3198 decoration_ops << MkId(param_id) << MkNum(spv::DecorationCoherent);
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003199 SPIRVInstList.insert(
3200 DecoInsertPoint,
3201 new SPIRVInstruction(spv::OpDecorate, decoration_ops));
alan-bakere9308012019-03-15 10:25:13 -04003202 }
David Neto22f144c2017-06-12 14:26:21 -04003203
3204 // ParamOps[0] : Result Type ID
3205 SPIRVOperandList ParamOps;
3206
3207 // Find SPIRV instruction for parameter type.
3208 uint32_t ParamTyID = lookupType(Arg.getType());
3209 if (PointerType *PTy = dyn_cast<PointerType>(Arg.getType())) {
3210 if (GlobalConstFuncTyMap.count(FTy)) {
3211 if (ArgIdx == GlobalConstFuncTyMap[FTy].second) {
3212 Type *EleTy = PTy->getPointerElementType();
3213 Type *ArgTy =
3214 PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
3215 ParamTyID = lookupType(ArgTy);
3216 GlobalConstArgSet.insert(&Arg);
3217 }
3218 }
3219 }
David Neto257c3892018-04-11 13:19:45 -04003220 ParamOps << MkId(ParamTyID);
David Neto22f144c2017-06-12 14:26:21 -04003221
3222 // Generate SPIRV instruction for parameter.
David Neto87846742018-04-11 17:36:22 -04003223 auto *ParamInst =
alan-bakere9308012019-03-15 10:25:13 -04003224 new SPIRVInstruction(spv::OpFunctionParameter, param_id, ParamOps);
David Neto22f144c2017-06-12 14:26:21 -04003225 SPIRVInstList.push_back(ParamInst);
3226
3227 ArgIdx++;
3228 }
3229 }
3230}
3231
alan-bakerb6b09dc2018-11-08 16:59:28 -05003232void SPIRVProducerPass::GenerateModuleInfo(Module &module) {
David Neto22f144c2017-06-12 14:26:21 -04003233 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3234 EntryPointVecType &EntryPoints = getEntryPointVec();
3235 ValueMapType &VMap = getValueMap();
3236 ValueList &EntryPointInterfaces = getEntryPointInterfacesVec();
3237 uint32_t &ExtInstImportID = getOpExtInstImportID();
3238 std::vector<uint32_t> &BuiltinDimVec = getBuiltinDimVec();
3239
3240 // Set up insert point.
3241 auto InsertPoint = SPIRVInstList.begin();
3242
3243 //
3244 // Generate OpCapability
3245 //
3246 // TODO: Which llvm information is mapped to SPIRV Capapbility?
3247
3248 // Ops[0] = Capability
3249 SPIRVOperandList Ops;
3250
David Neto87846742018-04-11 17:36:22 -04003251 auto *CapInst =
3252 new SPIRVInstruction(spv::OpCapability, {MkNum(spv::CapabilityShader)});
David Neto22f144c2017-06-12 14:26:21 -04003253 SPIRVInstList.insert(InsertPoint, CapInst);
3254
3255 for (Type *Ty : getTypeList()) {
alan-bakerb39c8262019-03-08 14:03:37 -05003256 if (clspv::Option::Int8Support() && Ty->isIntegerTy(8)) {
3257 // Generate OpCapability for i8 type.
3258 SPIRVInstList.insert(InsertPoint,
3259 new SPIRVInstruction(spv::OpCapability,
3260 {MkNum(spv::CapabilityInt8)}));
3261 } else if (Ty->isIntegerTy(16)) {
David Neto22f144c2017-06-12 14:26:21 -04003262 // Generate OpCapability for i16 type.
David Neto87846742018-04-11 17:36:22 -04003263 SPIRVInstList.insert(InsertPoint,
3264 new SPIRVInstruction(spv::OpCapability,
3265 {MkNum(spv::CapabilityInt16)}));
David Neto22f144c2017-06-12 14:26:21 -04003266 } else if (Ty->isIntegerTy(64)) {
3267 // Generate OpCapability for i64 type.
David Neto87846742018-04-11 17:36:22 -04003268 SPIRVInstList.insert(InsertPoint,
3269 new SPIRVInstruction(spv::OpCapability,
3270 {MkNum(spv::CapabilityInt64)}));
David Neto22f144c2017-06-12 14:26:21 -04003271 } else if (Ty->isHalfTy()) {
3272 // Generate OpCapability for half type.
3273 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04003274 InsertPoint, new SPIRVInstruction(spv::OpCapability,
3275 {MkNum(spv::CapabilityFloat16)}));
David Neto22f144c2017-06-12 14:26:21 -04003276 } else if (Ty->isDoubleTy()) {
3277 // Generate OpCapability for double type.
3278 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04003279 InsertPoint, new SPIRVInstruction(spv::OpCapability,
3280 {MkNum(spv::CapabilityFloat64)}));
David Neto22f144c2017-06-12 14:26:21 -04003281 } else if (auto *STy = dyn_cast<StructType>(Ty)) {
3282 if (STy->isOpaque()) {
alan-bakerf67468c2019-11-25 15:51:49 -05003283 if (STy->getName().startswith("opencl.image2d_wo_t") ||
3284 STy->getName().startswith("opencl.image3d_wo_t")) {
David Neto22f144c2017-06-12 14:26:21 -04003285 // Generate OpCapability for write only image type.
3286 SPIRVInstList.insert(
3287 InsertPoint,
3288 new SPIRVInstruction(
David Neto87846742018-04-11 17:36:22 -04003289 spv::OpCapability,
3290 {MkNum(spv::CapabilityStorageImageWriteWithoutFormat)}));
David Neto22f144c2017-06-12 14:26:21 -04003291 }
3292 }
3293 }
3294 }
3295
David Neto5c22a252018-03-15 16:07:41 -04003296 { // OpCapability ImageQuery
3297 bool hasImageQuery = false;
alan-bakerf67468c2019-11-25 15:51:49 -05003298 for (const auto &SymVal : module.getValueSymbolTable()) {
3299 if (auto F = dyn_cast<Function>(SymVal.getValue())) {
alan-bakerce179f12019-12-06 19:02:22 -05003300 if (clspv::IsImageQuery(F)) {
alan-bakerf67468c2019-11-25 15:51:49 -05003301 hasImageQuery = true;
3302 break;
3303 }
David Neto5c22a252018-03-15 16:07:41 -04003304 }
3305 }
alan-bakerf67468c2019-11-25 15:51:49 -05003306
David Neto5c22a252018-03-15 16:07:41 -04003307 if (hasImageQuery) {
David Neto87846742018-04-11 17:36:22 -04003308 auto *ImageQueryCapInst = new SPIRVInstruction(
3309 spv::OpCapability, {MkNum(spv::CapabilityImageQuery)});
David Neto5c22a252018-03-15 16:07:41 -04003310 SPIRVInstList.insert(InsertPoint, ImageQueryCapInst);
3311 }
3312 }
3313
David Neto22f144c2017-06-12 14:26:21 -04003314 if (hasVariablePointers()) {
3315 //
David Neto22f144c2017-06-12 14:26:21 -04003316 // Generate OpCapability.
3317 //
3318 // Ops[0] = Capability
3319 //
3320 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003321 Ops << MkNum(spv::CapabilityVariablePointers);
David Neto22f144c2017-06-12 14:26:21 -04003322
David Neto87846742018-04-11 17:36:22 -04003323 SPIRVInstList.insert(InsertPoint,
3324 new SPIRVInstruction(spv::OpCapability, Ops));
alan-baker5b86ed72019-02-15 08:26:50 -05003325 } else if (hasVariablePointersStorageBuffer()) {
3326 //
3327 // Generate OpCapability.
3328 //
3329 // Ops[0] = Capability
3330 //
3331 Ops.clear();
3332 Ops << MkNum(spv::CapabilityVariablePointersStorageBuffer);
David Neto22f144c2017-06-12 14:26:21 -04003333
alan-baker5b86ed72019-02-15 08:26:50 -05003334 SPIRVInstList.insert(InsertPoint,
3335 new SPIRVInstruction(spv::OpCapability, Ops));
3336 }
3337
3338 // Always add the storage buffer extension
3339 {
David Neto22f144c2017-06-12 14:26:21 -04003340 //
3341 // Generate OpExtension.
3342 //
3343 // Ops[0] = Name (Literal String)
3344 //
alan-baker5b86ed72019-02-15 08:26:50 -05003345 auto *ExtensionInst = new SPIRVInstruction(
3346 spv::OpExtension, {MkString("SPV_KHR_storage_buffer_storage_class")});
3347 SPIRVInstList.insert(InsertPoint, ExtensionInst);
3348 }
David Neto22f144c2017-06-12 14:26:21 -04003349
alan-baker5b86ed72019-02-15 08:26:50 -05003350 if (hasVariablePointers() || hasVariablePointersStorageBuffer()) {
3351 //
3352 // Generate OpExtension.
3353 //
3354 // Ops[0] = Name (Literal String)
3355 //
3356 auto *ExtensionInst = new SPIRVInstruction(
3357 spv::OpExtension, {MkString("SPV_KHR_variable_pointers")});
3358 SPIRVInstList.insert(InsertPoint, ExtensionInst);
David Neto22f144c2017-06-12 14:26:21 -04003359 }
3360
3361 if (ExtInstImportID) {
3362 ++InsertPoint;
3363 }
3364
3365 //
3366 // Generate OpMemoryModel
3367 //
3368 // Memory model for Vulkan will always be GLSL450.
3369
3370 // Ops[0] = Addressing Model
3371 // Ops[1] = Memory Model
3372 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003373 Ops << MkNum(spv::AddressingModelLogical) << MkNum(spv::MemoryModelGLSL450);
David Neto22f144c2017-06-12 14:26:21 -04003374
David Neto87846742018-04-11 17:36:22 -04003375 auto *MemModelInst = new SPIRVInstruction(spv::OpMemoryModel, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003376 SPIRVInstList.insert(InsertPoint, MemModelInst);
3377
3378 //
3379 // Generate OpEntryPoint
3380 //
3381 for (auto EntryPoint : EntryPoints) {
3382 // Ops[0] = Execution Model
3383 // Ops[1] = EntryPoint ID
3384 // Ops[2] = Name (Literal String)
3385 // ...
3386 //
3387 // TODO: Do we need to consider Interface ID for forward references???
3388 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05003389 const StringRef &name = EntryPoint.first->getName();
David Neto257c3892018-04-11 13:19:45 -04003390 Ops << MkNum(spv::ExecutionModelGLCompute) << MkId(EntryPoint.second)
3391 << MkString(name);
David Neto22f144c2017-06-12 14:26:21 -04003392
David Neto22f144c2017-06-12 14:26:21 -04003393 for (Value *Interface : EntryPointInterfaces) {
David Neto257c3892018-04-11 13:19:45 -04003394 Ops << MkId(VMap[Interface]);
David Neto22f144c2017-06-12 14:26:21 -04003395 }
3396
David Neto87846742018-04-11 17:36:22 -04003397 auto *EntryPointInst = new SPIRVInstruction(spv::OpEntryPoint, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003398 SPIRVInstList.insert(InsertPoint, EntryPointInst);
3399 }
3400
3401 for (auto EntryPoint : EntryPoints) {
3402 if (const MDNode *MD = dyn_cast<Function>(EntryPoint.first)
3403 ->getMetadata("reqd_work_group_size")) {
3404
3405 if (!BuiltinDimVec.empty()) {
3406 llvm_unreachable(
3407 "Kernels should have consistent work group size definition");
3408 }
3409
3410 //
3411 // Generate OpExecutionMode
3412 //
3413
3414 // Ops[0] = Entry Point ID
3415 // Ops[1] = Execution Mode
3416 // Ops[2] ... Ops[n] = Optional literals according to Execution Mode
3417 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05003418 Ops << MkId(EntryPoint.second) << MkNum(spv::ExecutionModeLocalSize);
David Neto22f144c2017-06-12 14:26:21 -04003419
3420 uint32_t XDim = static_cast<uint32_t>(
3421 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
3422 uint32_t YDim = static_cast<uint32_t>(
3423 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
3424 uint32_t ZDim = static_cast<uint32_t>(
3425 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
3426
David Neto257c3892018-04-11 13:19:45 -04003427 Ops << MkNum(XDim) << MkNum(YDim) << MkNum(ZDim);
David Neto22f144c2017-06-12 14:26:21 -04003428
David Neto87846742018-04-11 17:36:22 -04003429 auto *ExecModeInst = new SPIRVInstruction(spv::OpExecutionMode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003430 SPIRVInstList.insert(InsertPoint, ExecModeInst);
3431 }
3432 }
3433
3434 //
3435 // Generate OpSource.
3436 //
3437 // Ops[0] = SourceLanguage ID
3438 // Ops[1] = Version (LiteralNum)
3439 //
3440 Ops.clear();
Kévin Petit0fc88042019-04-09 23:25:02 +01003441 if (clspv::Option::CPlusPlus()) {
3442 Ops << MkNum(spv::SourceLanguageOpenCL_CPP) << MkNum(100);
3443 } else {
3444 Ops << MkNum(spv::SourceLanguageOpenCL_C) << MkNum(120);
3445 }
David Neto22f144c2017-06-12 14:26:21 -04003446
David Neto87846742018-04-11 17:36:22 -04003447 auto *OpenSourceInst = new SPIRVInstruction(spv::OpSource, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003448 SPIRVInstList.insert(InsertPoint, OpenSourceInst);
3449
3450 if (!BuiltinDimVec.empty()) {
3451 //
3452 // Generate OpDecorates for x/y/z dimension.
3453 //
3454 // Ops[0] = Target ID
3455 // Ops[1] = Decoration (SpecId)
David Neto257c3892018-04-11 13:19:45 -04003456 // Ops[2] = Specialization Constant ID (Literal Number)
David Neto22f144c2017-06-12 14:26:21 -04003457
3458 // X Dimension
3459 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003460 Ops << MkId(BuiltinDimVec[0]) << MkNum(spv::DecorationSpecId) << MkNum(0);
David Neto87846742018-04-11 17:36:22 -04003461 SPIRVInstList.insert(InsertPoint,
3462 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003463
3464 // Y Dimension
3465 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003466 Ops << MkId(BuiltinDimVec[1]) << MkNum(spv::DecorationSpecId) << MkNum(1);
David Neto87846742018-04-11 17:36:22 -04003467 SPIRVInstList.insert(InsertPoint,
3468 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003469
3470 // Z Dimension
3471 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003472 Ops << MkId(BuiltinDimVec[2]) << MkNum(spv::DecorationSpecId) << MkNum(2);
David Neto87846742018-04-11 17:36:22 -04003473 SPIRVInstList.insert(InsertPoint,
3474 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003475 }
3476}
3477
David Netob6e2e062018-04-25 10:32:06 -04003478void SPIRVProducerPass::GenerateEntryPointInitialStores() {
3479 // Work around a driver bug. Initializers on Private variables might not
3480 // work. So the start of the kernel should store the initializer value to the
3481 // variables. Yes, *every* entry point pays this cost if *any* entry point
3482 // uses this builtin. At this point I judge this to be an acceptable tradeoff
3483 // of complexity vs. runtime, for a broken driver.
alan-bakerb6b09dc2018-11-08 16:59:28 -05003484 // TODO(dneto): Remove this at some point once fixed drivers are widely
3485 // available.
David Netob6e2e062018-04-25 10:32:06 -04003486 if (WorkgroupSizeVarID) {
3487 assert(WorkgroupSizeValueID);
3488
3489 SPIRVOperandList Ops;
3490 Ops << MkId(WorkgroupSizeVarID) << MkId(WorkgroupSizeValueID);
3491
3492 auto *Inst = new SPIRVInstruction(spv::OpStore, Ops);
3493 getSPIRVInstList().push_back(Inst);
3494 }
3495}
3496
David Neto22f144c2017-06-12 14:26:21 -04003497void SPIRVProducerPass::GenerateFuncBody(Function &F) {
3498 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3499 ValueMapType &VMap = getValueMap();
3500
David Netob6e2e062018-04-25 10:32:06 -04003501 const bool IsKernel = F.getCallingConv() == CallingConv::SPIR_KERNEL;
David Neto22f144c2017-06-12 14:26:21 -04003502
3503 for (BasicBlock &BB : F) {
3504 // Register BasicBlock to ValueMap.
3505 VMap[&BB] = nextID;
3506
3507 //
3508 // Generate OpLabel for Basic Block.
3509 //
3510 SPIRVOperandList Ops;
David Neto87846742018-04-11 17:36:22 -04003511 auto *Inst = new SPIRVInstruction(spv::OpLabel, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003512 SPIRVInstList.push_back(Inst);
3513
David Neto6dcd4712017-06-23 11:06:47 -04003514 // OpVariable instructions must come first.
3515 for (Instruction &I : BB) {
alan-baker5b86ed72019-02-15 08:26:50 -05003516 if (auto *alloca = dyn_cast<AllocaInst>(&I)) {
3517 // Allocating a pointer requires variable pointers.
3518 if (alloca->getAllocatedType()->isPointerTy()) {
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003519 setVariablePointersCapabilities(
3520 alloca->getAllocatedType()->getPointerAddressSpace());
alan-baker5b86ed72019-02-15 08:26:50 -05003521 }
David Neto6dcd4712017-06-23 11:06:47 -04003522 GenerateInstruction(I);
3523 }
3524 }
3525
David Neto22f144c2017-06-12 14:26:21 -04003526 if (&BB == &F.getEntryBlock() && IsKernel) {
David Netob6e2e062018-04-25 10:32:06 -04003527 if (clspv::Option::HackInitializers()) {
3528 GenerateEntryPointInitialStores();
3529 }
David Neto22f144c2017-06-12 14:26:21 -04003530 }
3531
3532 for (Instruction &I : BB) {
David Neto6dcd4712017-06-23 11:06:47 -04003533 if (!isa<AllocaInst>(I)) {
3534 GenerateInstruction(I);
3535 }
David Neto22f144c2017-06-12 14:26:21 -04003536 }
3537 }
3538}
3539
3540spv::Op SPIRVProducerPass::GetSPIRVCmpOpcode(CmpInst *I) {
3541 const std::map<CmpInst::Predicate, spv::Op> Map = {
3542 {CmpInst::ICMP_EQ, spv::OpIEqual},
3543 {CmpInst::ICMP_NE, spv::OpINotEqual},
3544 {CmpInst::ICMP_UGT, spv::OpUGreaterThan},
3545 {CmpInst::ICMP_UGE, spv::OpUGreaterThanEqual},
3546 {CmpInst::ICMP_ULT, spv::OpULessThan},
3547 {CmpInst::ICMP_ULE, spv::OpULessThanEqual},
3548 {CmpInst::ICMP_SGT, spv::OpSGreaterThan},
3549 {CmpInst::ICMP_SGE, spv::OpSGreaterThanEqual},
3550 {CmpInst::ICMP_SLT, spv::OpSLessThan},
3551 {CmpInst::ICMP_SLE, spv::OpSLessThanEqual},
3552 {CmpInst::FCMP_OEQ, spv::OpFOrdEqual},
3553 {CmpInst::FCMP_OGT, spv::OpFOrdGreaterThan},
3554 {CmpInst::FCMP_OGE, spv::OpFOrdGreaterThanEqual},
3555 {CmpInst::FCMP_OLT, spv::OpFOrdLessThan},
3556 {CmpInst::FCMP_OLE, spv::OpFOrdLessThanEqual},
3557 {CmpInst::FCMP_ONE, spv::OpFOrdNotEqual},
3558 {CmpInst::FCMP_UEQ, spv::OpFUnordEqual},
3559 {CmpInst::FCMP_UGT, spv::OpFUnordGreaterThan},
3560 {CmpInst::FCMP_UGE, spv::OpFUnordGreaterThanEqual},
3561 {CmpInst::FCMP_ULT, spv::OpFUnordLessThan},
3562 {CmpInst::FCMP_ULE, spv::OpFUnordLessThanEqual},
3563 {CmpInst::FCMP_UNE, spv::OpFUnordNotEqual}};
3564
3565 assert(0 != Map.count(I->getPredicate()));
3566
3567 return Map.at(I->getPredicate());
3568}
3569
3570spv::Op SPIRVProducerPass::GetSPIRVCastOpcode(Instruction &I) {
3571 const std::map<unsigned, spv::Op> Map{
3572 {Instruction::Trunc, spv::OpUConvert},
3573 {Instruction::ZExt, spv::OpUConvert},
3574 {Instruction::SExt, spv::OpSConvert},
3575 {Instruction::FPToUI, spv::OpConvertFToU},
3576 {Instruction::FPToSI, spv::OpConvertFToS},
3577 {Instruction::UIToFP, spv::OpConvertUToF},
3578 {Instruction::SIToFP, spv::OpConvertSToF},
3579 {Instruction::FPTrunc, spv::OpFConvert},
3580 {Instruction::FPExt, spv::OpFConvert},
3581 {Instruction::BitCast, spv::OpBitcast}};
3582
3583 assert(0 != Map.count(I.getOpcode()));
3584
3585 return Map.at(I.getOpcode());
3586}
3587
3588spv::Op SPIRVProducerPass::GetSPIRVBinaryOpcode(Instruction &I) {
Kévin Petit24272b62018-10-18 19:16:12 +00003589 if (I.getType()->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -04003590 switch (I.getOpcode()) {
3591 default:
3592 break;
3593 case Instruction::Or:
3594 return spv::OpLogicalOr;
3595 case Instruction::And:
3596 return spv::OpLogicalAnd;
3597 case Instruction::Xor:
3598 return spv::OpLogicalNotEqual;
3599 }
3600 }
3601
alan-bakerb6b09dc2018-11-08 16:59:28 -05003602 const std::map<unsigned, spv::Op> Map{
David Neto22f144c2017-06-12 14:26:21 -04003603 {Instruction::Add, spv::OpIAdd},
3604 {Instruction::FAdd, spv::OpFAdd},
3605 {Instruction::Sub, spv::OpISub},
3606 {Instruction::FSub, spv::OpFSub},
3607 {Instruction::Mul, spv::OpIMul},
3608 {Instruction::FMul, spv::OpFMul},
3609 {Instruction::UDiv, spv::OpUDiv},
3610 {Instruction::SDiv, spv::OpSDiv},
3611 {Instruction::FDiv, spv::OpFDiv},
3612 {Instruction::URem, spv::OpUMod},
3613 {Instruction::SRem, spv::OpSRem},
3614 {Instruction::FRem, spv::OpFRem},
3615 {Instruction::Or, spv::OpBitwiseOr},
3616 {Instruction::Xor, spv::OpBitwiseXor},
3617 {Instruction::And, spv::OpBitwiseAnd},
3618 {Instruction::Shl, spv::OpShiftLeftLogical},
3619 {Instruction::LShr, spv::OpShiftRightLogical},
3620 {Instruction::AShr, spv::OpShiftRightArithmetic}};
3621
3622 assert(0 != Map.count(I.getOpcode()));
3623
3624 return Map.at(I.getOpcode());
3625}
3626
3627void SPIRVProducerPass::GenerateInstruction(Instruction &I) {
3628 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3629 ValueMapType &VMap = getValueMap();
David Neto22f144c2017-06-12 14:26:21 -04003630 DeferredInstVecType &DeferredInsts = getDeferredInstVec();
3631 LLVMContext &Context = I.getParent()->getParent()->getParent()->getContext();
3632
3633 // Register Instruction to ValueMap.
3634 if (0 == VMap[&I]) {
3635 VMap[&I] = nextID;
3636 }
3637
3638 switch (I.getOpcode()) {
3639 default: {
3640 if (Instruction::isCast(I.getOpcode())) {
3641 //
3642 // Generate SPIRV instructions for cast operators.
3643 //
3644
David Netod2de94a2017-08-28 17:27:47 -04003645 auto Ty = I.getType();
David Neto22f144c2017-06-12 14:26:21 -04003646 auto OpTy = I.getOperand(0)->getType();
David Netod2de94a2017-08-28 17:27:47 -04003647 auto toI8 = Ty == Type::getInt8Ty(Context);
3648 auto fromI32 = OpTy == Type::getInt32Ty(Context);
David Neto22f144c2017-06-12 14:26:21 -04003649 // Handle zext, sext and uitofp with i1 type specially.
3650 if ((I.getOpcode() == Instruction::ZExt ||
3651 I.getOpcode() == Instruction::SExt ||
3652 I.getOpcode() == Instruction::UIToFP) &&
alan-bakerb6b09dc2018-11-08 16:59:28 -05003653 OpTy->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -04003654 //
3655 // Generate OpSelect.
3656 //
3657
3658 // Ops[0] = Result Type ID
3659 // Ops[1] = Condition ID
3660 // Ops[2] = True Constant ID
3661 // Ops[3] = False Constant ID
3662 SPIRVOperandList Ops;
3663
David Neto257c3892018-04-11 13:19:45 -04003664 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04003665
David Neto22f144c2017-06-12 14:26:21 -04003666 uint32_t CondID = VMap[I.getOperand(0)];
David Neto257c3892018-04-11 13:19:45 -04003667 Ops << MkId(CondID);
David Neto22f144c2017-06-12 14:26:21 -04003668
3669 uint32_t TrueID = 0;
3670 if (I.getOpcode() == Instruction::ZExt) {
Kévin Petit7bfb8992019-02-26 13:45:08 +00003671 TrueID = VMap[ConstantInt::get(I.getType(), 1)];
David Neto22f144c2017-06-12 14:26:21 -04003672 } else if (I.getOpcode() == Instruction::SExt) {
Kévin Petit7bfb8992019-02-26 13:45:08 +00003673 TrueID = VMap[ConstantInt::getSigned(I.getType(), -1)];
David Neto22f144c2017-06-12 14:26:21 -04003674 } else {
3675 TrueID = VMap[ConstantFP::get(Context, APFloat(1.0f))];
3676 }
David Neto257c3892018-04-11 13:19:45 -04003677 Ops << MkId(TrueID);
David Neto22f144c2017-06-12 14:26:21 -04003678
3679 uint32_t FalseID = 0;
3680 if (I.getOpcode() == Instruction::ZExt) {
3681 FalseID = VMap[Constant::getNullValue(I.getType())];
3682 } else if (I.getOpcode() == Instruction::SExt) {
3683 FalseID = VMap[Constant::getNullValue(I.getType())];
3684 } else {
3685 FalseID = VMap[ConstantFP::get(Context, APFloat(0.0f))];
3686 }
David Neto257c3892018-04-11 13:19:45 -04003687 Ops << MkId(FalseID);
David Neto22f144c2017-06-12 14:26:21 -04003688
David Neto87846742018-04-11 17:36:22 -04003689 auto *Inst = new SPIRVInstruction(spv::OpSelect, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003690 SPIRVInstList.push_back(Inst);
alan-bakerb39c8262019-03-08 14:03:37 -05003691 } else if (!clspv::Option::Int8Support() &&
3692 I.getOpcode() == Instruction::Trunc && fromI32 && toI8) {
David Netod2de94a2017-08-28 17:27:47 -04003693 // The SPIR-V target type is a 32-bit int. Keep only the bottom
3694 // 8 bits.
3695 // Before:
3696 // %result = trunc i32 %a to i8
3697 // After
3698 // %result = OpBitwiseAnd %uint %a %uint_255
3699
3700 SPIRVOperandList Ops;
3701
David Neto257c3892018-04-11 13:19:45 -04003702 Ops << MkId(lookupType(OpTy)) << MkId(VMap[I.getOperand(0)]);
David Netod2de94a2017-08-28 17:27:47 -04003703
3704 Type *UintTy = Type::getInt32Ty(Context);
3705 uint32_t MaskID = VMap[ConstantInt::get(UintTy, 255)];
David Neto257c3892018-04-11 13:19:45 -04003706 Ops << MkId(MaskID);
David Netod2de94a2017-08-28 17:27:47 -04003707
David Neto87846742018-04-11 17:36:22 -04003708 auto *Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Netod2de94a2017-08-28 17:27:47 -04003709 SPIRVInstList.push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04003710 } else {
3711 // Ops[0] = Result Type ID
3712 // Ops[1] = Source Value ID
3713 SPIRVOperandList Ops;
3714
David Neto257c3892018-04-11 13:19:45 -04003715 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04003716
David Neto87846742018-04-11 17:36:22 -04003717 auto *Inst = new SPIRVInstruction(GetSPIRVCastOpcode(I), nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003718 SPIRVInstList.push_back(Inst);
3719 }
3720 } else if (isa<BinaryOperator>(I)) {
3721 //
3722 // Generate SPIRV instructions for binary operators.
3723 //
3724
3725 // Handle xor with i1 type specially.
3726 if (I.getOpcode() == Instruction::Xor &&
3727 I.getType() == Type::getInt1Ty(Context) &&
Kévin Petit24272b62018-10-18 19:16:12 +00003728 ((isa<ConstantInt>(I.getOperand(0)) &&
3729 !cast<ConstantInt>(I.getOperand(0))->isZero()) ||
3730 (isa<ConstantInt>(I.getOperand(1)) &&
3731 !cast<ConstantInt>(I.getOperand(1))->isZero()))) {
David Neto22f144c2017-06-12 14:26:21 -04003732 //
3733 // Generate OpLogicalNot.
3734 //
3735 // Ops[0] = Result Type ID
3736 // Ops[1] = Operand
3737 SPIRVOperandList Ops;
3738
David Neto257c3892018-04-11 13:19:45 -04003739 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04003740
3741 Value *CondV = I.getOperand(0);
3742 if (isa<Constant>(I.getOperand(0))) {
3743 CondV = I.getOperand(1);
3744 }
David Neto257c3892018-04-11 13:19:45 -04003745 Ops << MkId(VMap[CondV]);
David Neto22f144c2017-06-12 14:26:21 -04003746
David Neto87846742018-04-11 17:36:22 -04003747 auto *Inst = new SPIRVInstruction(spv::OpLogicalNot, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003748 SPIRVInstList.push_back(Inst);
3749 } else {
3750 // Ops[0] = Result Type ID
3751 // Ops[1] = Operand 0
3752 // Ops[2] = Operand 1
3753 SPIRVOperandList Ops;
3754
David Neto257c3892018-04-11 13:19:45 -04003755 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)])
3756 << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04003757
David Neto87846742018-04-11 17:36:22 -04003758 auto *Inst =
3759 new SPIRVInstruction(GetSPIRVBinaryOpcode(I), nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003760 SPIRVInstList.push_back(Inst);
3761 }
alan-bakerc9c55ae2019-12-02 16:01:27 -05003762 } else if (I.getOpcode() == Instruction::FNeg) {
3763 // The only unary operator.
3764 //
3765 // Ops[0] = Result Type ID
3766 // Ops[1] = Operand 0
3767 SPIRVOperandList ops;
3768
3769 ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)]);
3770 auto *Inst = new SPIRVInstruction(spv::OpFNegate, nextID++, ops);
3771 SPIRVInstList.push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04003772 } else {
3773 I.print(errs());
3774 llvm_unreachable("Unsupported instruction???");
3775 }
3776 break;
3777 }
3778 case Instruction::GetElementPtr: {
3779 auto &GlobalConstArgSet = getGlobalConstArgSet();
3780
3781 //
3782 // Generate OpAccessChain.
3783 //
3784 GetElementPtrInst *GEP = cast<GetElementPtrInst>(&I);
3785
3786 //
3787 // Generate OpAccessChain.
3788 //
3789
3790 // Ops[0] = Result Type ID
3791 // Ops[1] = Base ID
3792 // Ops[2] ... Ops[n] = Indexes ID
3793 SPIRVOperandList Ops;
3794
alan-bakerb6b09dc2018-11-08 16:59:28 -05003795 PointerType *ResultType = cast<PointerType>(GEP->getType());
David Neto22f144c2017-06-12 14:26:21 -04003796 if (GEP->getPointerAddressSpace() == AddressSpace::ModuleScopePrivate ||
3797 GlobalConstArgSet.count(GEP->getPointerOperand())) {
3798 // Use pointer type with private address space for global constant.
3799 Type *EleTy = I.getType()->getPointerElementType();
David Neto1a1a0582017-07-07 12:01:44 -04003800 ResultType = PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
David Neto22f144c2017-06-12 14:26:21 -04003801 }
David Neto257c3892018-04-11 13:19:45 -04003802
3803 Ops << MkId(lookupType(ResultType));
David Neto22f144c2017-06-12 14:26:21 -04003804
David Neto862b7d82018-06-14 18:48:37 -04003805 // Generate the base pointer.
3806 Ops << MkId(VMap[GEP->getPointerOperand()]);
David Neto22f144c2017-06-12 14:26:21 -04003807
David Neto862b7d82018-06-14 18:48:37 -04003808 // TODO(dneto): Simplify the following?
David Neto22f144c2017-06-12 14:26:21 -04003809
3810 //
3811 // Follows below rules for gep.
3812 //
David Neto862b7d82018-06-14 18:48:37 -04003813 // 1. If gep's first index is 0 generate OpAccessChain and ignore gep's
3814 // first index.
David Neto22f144c2017-06-12 14:26:21 -04003815 // 2. If gep's first index is not 0, generate OpPtrAccessChain and use gep's
3816 // first index.
3817 // 3. If gep's first index is not constant, generate OpPtrAccessChain and
3818 // use gep's first index.
3819 // 4. If it is not above case 1, 2 and 3, generate OpAccessChain and use
3820 // gep's first index.
3821 //
3822 spv::Op Opcode = spv::OpAccessChain;
3823 unsigned offset = 0;
3824 if (ConstantInt *CstInt = dyn_cast<ConstantInt>(GEP->getOperand(1))) {
David Neto862b7d82018-06-14 18:48:37 -04003825 if (CstInt->getZExtValue() == 0) {
David Neto22f144c2017-06-12 14:26:21 -04003826 offset = 1;
David Neto862b7d82018-06-14 18:48:37 -04003827 } else if (CstInt->getZExtValue() != 0) {
David Neto22f144c2017-06-12 14:26:21 -04003828 Opcode = spv::OpPtrAccessChain;
David Neto22f144c2017-06-12 14:26:21 -04003829 }
David Neto862b7d82018-06-14 18:48:37 -04003830 } else {
David Neto22f144c2017-06-12 14:26:21 -04003831 Opcode = spv::OpPtrAccessChain;
David Neto1a1a0582017-07-07 12:01:44 -04003832 }
3833
3834 if (Opcode == spv::OpPtrAccessChain) {
David Neto1a1a0582017-07-07 12:01:44 -04003835 // Do we need to generate ArrayStride? Check against the GEP result type
3836 // rather than the pointer type of the base because when indexing into
3837 // an OpenCL program-scope constant, we'll swap out the LLVM base pointer
3838 // for something else in the SPIR-V.
3839 // E.g. see test/PointerAccessChain/pointer_index_is_constant_1.cl
alan-baker5b86ed72019-02-15 08:26:50 -05003840 auto address_space = ResultType->getAddressSpace();
3841 setVariablePointersCapabilities(address_space);
3842 switch (GetStorageClass(address_space)) {
Alan Bakerfcda9482018-10-02 17:09:59 -04003843 case spv::StorageClassStorageBuffer:
3844 case spv::StorageClassUniform:
David Neto1a1a0582017-07-07 12:01:44 -04003845 // Save the need to generate an ArrayStride decoration. But defer
3846 // generation until later, so we only make one decoration.
David Neto85082642018-03-24 06:55:20 -07003847 getTypesNeedingArrayStride().insert(ResultType);
Alan Bakerfcda9482018-10-02 17:09:59 -04003848 break;
3849 default:
3850 break;
David Neto1a1a0582017-07-07 12:01:44 -04003851 }
David Neto22f144c2017-06-12 14:26:21 -04003852 }
3853
3854 for (auto II = GEP->idx_begin() + offset; II != GEP->idx_end(); II++) {
David Neto257c3892018-04-11 13:19:45 -04003855 Ops << MkId(VMap[*II]);
David Neto22f144c2017-06-12 14:26:21 -04003856 }
3857
David Neto87846742018-04-11 17:36:22 -04003858 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003859 SPIRVInstList.push_back(Inst);
3860 break;
3861 }
3862 case Instruction::ExtractValue: {
3863 ExtractValueInst *EVI = cast<ExtractValueInst>(&I);
3864 // Ops[0] = Result Type ID
3865 // Ops[1] = Composite ID
3866 // Ops[2] ... Ops[n] = Indexes (Literal Number)
3867 SPIRVOperandList Ops;
3868
David Neto257c3892018-04-11 13:19:45 -04003869 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04003870
3871 uint32_t CompositeID = VMap[EVI->getAggregateOperand()];
David Neto257c3892018-04-11 13:19:45 -04003872 Ops << MkId(CompositeID);
David Neto22f144c2017-06-12 14:26:21 -04003873
3874 for (auto &Index : EVI->indices()) {
David Neto257c3892018-04-11 13:19:45 -04003875 Ops << MkNum(Index);
David Neto22f144c2017-06-12 14:26:21 -04003876 }
3877
David Neto87846742018-04-11 17:36:22 -04003878 auto *Inst = new SPIRVInstruction(spv::OpCompositeExtract, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003879 SPIRVInstList.push_back(Inst);
3880 break;
3881 }
3882 case Instruction::InsertValue: {
3883 InsertValueInst *IVI = cast<InsertValueInst>(&I);
3884 // Ops[0] = Result Type ID
3885 // Ops[1] = Object ID
3886 // Ops[2] = Composite ID
3887 // Ops[3] ... Ops[n] = Indexes (Literal Number)
3888 SPIRVOperandList Ops;
3889
3890 uint32_t ResTyID = lookupType(I.getType());
David Neto257c3892018-04-11 13:19:45 -04003891 Ops << MkId(ResTyID);
David Neto22f144c2017-06-12 14:26:21 -04003892
3893 uint32_t ObjectID = VMap[IVI->getInsertedValueOperand()];
David Neto257c3892018-04-11 13:19:45 -04003894 Ops << MkId(ObjectID);
David Neto22f144c2017-06-12 14:26:21 -04003895
3896 uint32_t CompositeID = VMap[IVI->getAggregateOperand()];
David Neto257c3892018-04-11 13:19:45 -04003897 Ops << MkId(CompositeID);
David Neto22f144c2017-06-12 14:26:21 -04003898
3899 for (auto &Index : IVI->indices()) {
David Neto257c3892018-04-11 13:19:45 -04003900 Ops << MkNum(Index);
David Neto22f144c2017-06-12 14:26:21 -04003901 }
3902
David Neto87846742018-04-11 17:36:22 -04003903 auto *Inst = new SPIRVInstruction(spv::OpCompositeInsert, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003904 SPIRVInstList.push_back(Inst);
3905 break;
3906 }
3907 case Instruction::Select: {
3908 //
3909 // Generate OpSelect.
3910 //
3911
3912 // Ops[0] = Result Type ID
3913 // Ops[1] = Condition ID
3914 // Ops[2] = True Constant ID
3915 // Ops[3] = False Constant ID
3916 SPIRVOperandList Ops;
3917
3918 // Find SPIRV instruction for parameter type.
3919 auto Ty = I.getType();
3920 if (Ty->isPointerTy()) {
3921 auto PointeeTy = Ty->getPointerElementType();
3922 if (PointeeTy->isStructTy() &&
3923 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
3924 Ty = PointeeTy;
alan-baker5b86ed72019-02-15 08:26:50 -05003925 } else {
3926 // Selecting between pointers requires variable pointers.
3927 setVariablePointersCapabilities(Ty->getPointerAddressSpace());
3928 if (!hasVariablePointers() && !selectFromSameObject(&I)) {
3929 setVariablePointers(true);
3930 }
David Neto22f144c2017-06-12 14:26:21 -04003931 }
3932 }
3933
David Neto257c3892018-04-11 13:19:45 -04003934 Ops << MkId(lookupType(Ty)) << MkId(VMap[I.getOperand(0)])
3935 << MkId(VMap[I.getOperand(1)]) << MkId(VMap[I.getOperand(2)]);
David Neto22f144c2017-06-12 14:26:21 -04003936
David Neto87846742018-04-11 17:36:22 -04003937 auto *Inst = new SPIRVInstruction(spv::OpSelect, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003938 SPIRVInstList.push_back(Inst);
3939 break;
3940 }
3941 case Instruction::ExtractElement: {
3942 // Handle <4 x i8> type manually.
3943 Type *CompositeTy = I.getOperand(0)->getType();
3944 if (is4xi8vec(CompositeTy)) {
3945 //
3946 // Generate OpShiftRightLogical and OpBitwiseAnd for extractelement with
3947 // <4 x i8>.
3948 //
3949
3950 //
3951 // Generate OpShiftRightLogical
3952 //
3953 // Ops[0] = Result Type ID
3954 // Ops[1] = Operand 0
3955 // Ops[2] = Operand 1
3956 //
3957 SPIRVOperandList Ops;
3958
David Neto257c3892018-04-11 13:19:45 -04003959 Ops << MkId(lookupType(CompositeTy));
David Neto22f144c2017-06-12 14:26:21 -04003960
3961 uint32_t Op0ID = VMap[I.getOperand(0)];
David Neto257c3892018-04-11 13:19:45 -04003962 Ops << MkId(Op0ID);
David Neto22f144c2017-06-12 14:26:21 -04003963
3964 uint32_t Op1ID = 0;
3965 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
3966 // Handle constant index.
3967 uint64_t Idx = CI->getZExtValue();
3968 Value *ShiftAmount =
3969 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
3970 Op1ID = VMap[ShiftAmount];
3971 } else {
3972 // Handle variable index.
3973 SPIRVOperandList TmpOps;
3974
David Neto257c3892018-04-11 13:19:45 -04003975 TmpOps << MkId(lookupType(Type::getInt32Ty(Context)))
3976 << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04003977
3978 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
David Neto257c3892018-04-11 13:19:45 -04003979 TmpOps << MkId(VMap[Cst8]);
David Neto22f144c2017-06-12 14:26:21 -04003980
3981 Op1ID = nextID;
3982
David Neto87846742018-04-11 17:36:22 -04003983 auto *TmpInst = new SPIRVInstruction(spv::OpIMul, nextID++, TmpOps);
David Neto22f144c2017-06-12 14:26:21 -04003984 SPIRVInstList.push_back(TmpInst);
3985 }
David Neto257c3892018-04-11 13:19:45 -04003986 Ops << MkId(Op1ID);
David Neto22f144c2017-06-12 14:26:21 -04003987
3988 uint32_t ShiftID = nextID;
3989
David Neto87846742018-04-11 17:36:22 -04003990 auto *Inst =
3991 new SPIRVInstruction(spv::OpShiftRightLogical, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003992 SPIRVInstList.push_back(Inst);
3993
3994 //
3995 // Generate OpBitwiseAnd
3996 //
3997 // Ops[0] = Result Type ID
3998 // Ops[1] = Operand 0
3999 // Ops[2] = Operand 1
4000 //
4001 Ops.clear();
4002
David Neto257c3892018-04-11 13:19:45 -04004003 Ops << MkId(lookupType(CompositeTy)) << MkId(ShiftID);
David Neto22f144c2017-06-12 14:26:21 -04004004
4005 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
David Neto257c3892018-04-11 13:19:45 -04004006 Ops << MkId(VMap[CstFF]);
David Neto22f144c2017-06-12 14:26:21 -04004007
David Neto9b2d6252017-09-06 15:47:37 -04004008 // Reset mapping for this value to the result of the bitwise and.
4009 VMap[&I] = nextID;
4010
David Neto87846742018-04-11 17:36:22 -04004011 Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004012 SPIRVInstList.push_back(Inst);
4013 break;
4014 }
4015
4016 // Ops[0] = Result Type ID
4017 // Ops[1] = Composite ID
4018 // Ops[2] ... Ops[n] = Indexes (Literal Number)
4019 SPIRVOperandList Ops;
4020
David Neto257c3892018-04-11 13:19:45 -04004021 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04004022
4023 spv::Op Opcode = spv::OpCompositeExtract;
4024 if (const ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
David Neto257c3892018-04-11 13:19:45 -04004025 Ops << MkNum(static_cast<uint32_t>(CI->getZExtValue()));
David Neto22f144c2017-06-12 14:26:21 -04004026 } else {
David Neto257c3892018-04-11 13:19:45 -04004027 Ops << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004028 Opcode = spv::OpVectorExtractDynamic;
4029 }
4030
David Neto87846742018-04-11 17:36:22 -04004031 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004032 SPIRVInstList.push_back(Inst);
4033 break;
4034 }
4035 case Instruction::InsertElement: {
4036 // Handle <4 x i8> type manually.
4037 Type *CompositeTy = I.getOperand(0)->getType();
4038 if (is4xi8vec(CompositeTy)) {
4039 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
4040 uint32_t CstFFID = VMap[CstFF];
4041
4042 uint32_t ShiftAmountID = 0;
4043 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
4044 // Handle constant index.
4045 uint64_t Idx = CI->getZExtValue();
4046 Value *ShiftAmount =
4047 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
4048 ShiftAmountID = VMap[ShiftAmount];
4049 } else {
4050 // Handle variable index.
4051 SPIRVOperandList TmpOps;
4052
David Neto257c3892018-04-11 13:19:45 -04004053 TmpOps << MkId(lookupType(Type::getInt32Ty(Context)))
4054 << MkId(VMap[I.getOperand(2)]);
David Neto22f144c2017-06-12 14:26:21 -04004055
4056 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
David Neto257c3892018-04-11 13:19:45 -04004057 TmpOps << MkId(VMap[Cst8]);
David Neto22f144c2017-06-12 14:26:21 -04004058
4059 ShiftAmountID = nextID;
4060
David Neto87846742018-04-11 17:36:22 -04004061 auto *TmpInst = new SPIRVInstruction(spv::OpIMul, nextID++, TmpOps);
David Neto22f144c2017-06-12 14:26:21 -04004062 SPIRVInstList.push_back(TmpInst);
4063 }
4064
4065 //
4066 // Generate mask operations.
4067 //
4068
4069 // ShiftLeft mask according to index of insertelement.
4070 SPIRVOperandList Ops;
4071
David Neto257c3892018-04-11 13:19:45 -04004072 const uint32_t ResTyID = lookupType(CompositeTy);
4073 Ops << MkId(ResTyID) << MkId(CstFFID) << MkId(ShiftAmountID);
David Neto22f144c2017-06-12 14:26:21 -04004074
4075 uint32_t MaskID = nextID;
4076
David Neto87846742018-04-11 17:36:22 -04004077 auto *Inst = new SPIRVInstruction(spv::OpShiftLeftLogical, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004078 SPIRVInstList.push_back(Inst);
4079
4080 // Inverse mask.
4081 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004082 Ops << MkId(ResTyID) << MkId(MaskID);
David Neto22f144c2017-06-12 14:26:21 -04004083
4084 uint32_t InvMaskID = nextID;
4085
David Neto87846742018-04-11 17:36:22 -04004086 Inst = new SPIRVInstruction(spv::OpNot, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004087 SPIRVInstList.push_back(Inst);
4088
4089 // Apply mask.
4090 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004091 Ops << MkId(ResTyID) << MkId(VMap[I.getOperand(0)]) << MkId(InvMaskID);
David Neto22f144c2017-06-12 14:26:21 -04004092
4093 uint32_t OrgValID = nextID;
4094
David Neto87846742018-04-11 17:36:22 -04004095 Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004096 SPIRVInstList.push_back(Inst);
4097
4098 // Create correct value according to index of insertelement.
4099 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05004100 Ops << MkId(ResTyID) << MkId(VMap[I.getOperand(1)])
4101 << MkId(ShiftAmountID);
David Neto22f144c2017-06-12 14:26:21 -04004102
4103 uint32_t InsertValID = nextID;
4104
David Neto87846742018-04-11 17:36:22 -04004105 Inst = new SPIRVInstruction(spv::OpShiftLeftLogical, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004106 SPIRVInstList.push_back(Inst);
4107
4108 // Insert value to original value.
4109 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004110 Ops << MkId(ResTyID) << MkId(OrgValID) << MkId(InsertValID);
David Neto22f144c2017-06-12 14:26:21 -04004111
David Netoa394f392017-08-26 20:45:29 -04004112 VMap[&I] = nextID;
4113
David Neto87846742018-04-11 17:36:22 -04004114 Inst = new SPIRVInstruction(spv::OpBitwiseOr, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004115 SPIRVInstList.push_back(Inst);
4116
4117 break;
4118 }
4119
David Neto22f144c2017-06-12 14:26:21 -04004120 SPIRVOperandList Ops;
4121
James Priced26efea2018-06-09 23:28:32 +01004122 // Ops[0] = Result Type ID
4123 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04004124
4125 spv::Op Opcode = spv::OpCompositeInsert;
4126 if (const ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
David Neto257c3892018-04-11 13:19:45 -04004127 const auto value = CI->getZExtValue();
4128 assert(value <= UINT32_MAX);
James Priced26efea2018-06-09 23:28:32 +01004129 // Ops[1] = Object ID
4130 // Ops[2] = Composite ID
4131 // Ops[3] ... Ops[n] = Indexes (Literal Number)
alan-bakerb6b09dc2018-11-08 16:59:28 -05004132 Ops << MkId(VMap[I.getOperand(1)]) << MkId(VMap[I.getOperand(0)])
James Priced26efea2018-06-09 23:28:32 +01004133 << MkNum(static_cast<uint32_t>(value));
David Neto22f144c2017-06-12 14:26:21 -04004134 } else {
James Priced26efea2018-06-09 23:28:32 +01004135 // Ops[1] = Composite ID
4136 // Ops[2] = Object ID
4137 // Ops[3] ... Ops[n] = Indexes (Literal Number)
alan-bakerb6b09dc2018-11-08 16:59:28 -05004138 Ops << MkId(VMap[I.getOperand(0)]) << MkId(VMap[I.getOperand(1)])
James Priced26efea2018-06-09 23:28:32 +01004139 << MkId(VMap[I.getOperand(2)]);
David Neto22f144c2017-06-12 14:26:21 -04004140 Opcode = spv::OpVectorInsertDynamic;
4141 }
4142
David Neto87846742018-04-11 17:36:22 -04004143 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004144 SPIRVInstList.push_back(Inst);
4145 break;
4146 }
4147 case Instruction::ShuffleVector: {
4148 // Ops[0] = Result Type ID
4149 // Ops[1] = Vector 1 ID
4150 // Ops[2] = Vector 2 ID
4151 // Ops[3] ... Ops[n] = Components (Literal Number)
4152 SPIRVOperandList Ops;
4153
David Neto257c3892018-04-11 13:19:45 -04004154 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)])
4155 << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004156
4157 uint64_t NumElements = 0;
4158 if (Constant *Cst = dyn_cast<Constant>(I.getOperand(2))) {
4159 NumElements = cast<VectorType>(Cst->getType())->getNumElements();
4160
4161 if (Cst->isNullValue()) {
4162 for (unsigned i = 0; i < NumElements; i++) {
David Neto257c3892018-04-11 13:19:45 -04004163 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04004164 }
4165 } else if (const ConstantDataSequential *CDS =
4166 dyn_cast<ConstantDataSequential>(Cst)) {
4167 for (unsigned i = 0; i < CDS->getNumElements(); i++) {
4168 std::vector<uint32_t> LiteralNum;
David Neto257c3892018-04-11 13:19:45 -04004169 const auto value = CDS->getElementAsInteger(i);
4170 assert(value <= UINT32_MAX);
4171 Ops << MkNum(static_cast<uint32_t>(value));
David Neto22f144c2017-06-12 14:26:21 -04004172 }
4173 } else if (const ConstantVector *CV = dyn_cast<ConstantVector>(Cst)) {
4174 for (unsigned i = 0; i < CV->getNumOperands(); i++) {
4175 auto Op = CV->getOperand(i);
4176
4177 uint32_t literal = 0;
4178
4179 if (auto CI = dyn_cast<ConstantInt>(Op)) {
4180 literal = static_cast<uint32_t>(CI->getZExtValue());
4181 } else if (auto UI = dyn_cast<UndefValue>(Op)) {
4182 literal = 0xFFFFFFFFu;
4183 } else {
4184 Op->print(errs());
4185 llvm_unreachable("Unsupported element in ConstantVector!");
4186 }
4187
David Neto257c3892018-04-11 13:19:45 -04004188 Ops << MkNum(literal);
David Neto22f144c2017-06-12 14:26:21 -04004189 }
4190 } else {
4191 Cst->print(errs());
4192 llvm_unreachable("Unsupported constant mask in ShuffleVector!");
4193 }
4194 }
4195
David Neto87846742018-04-11 17:36:22 -04004196 auto *Inst = new SPIRVInstruction(spv::OpVectorShuffle, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004197 SPIRVInstList.push_back(Inst);
4198 break;
4199 }
4200 case Instruction::ICmp:
4201 case Instruction::FCmp: {
4202 CmpInst *CmpI = cast<CmpInst>(&I);
4203
David Netod4ca2e62017-07-06 18:47:35 -04004204 // Pointer equality is invalid.
alan-bakerb6b09dc2018-11-08 16:59:28 -05004205 Type *ArgTy = CmpI->getOperand(0)->getType();
David Netod4ca2e62017-07-06 18:47:35 -04004206 if (isa<PointerType>(ArgTy)) {
4207 CmpI->print(errs());
4208 std::string name = I.getParent()->getParent()->getName();
4209 errs()
4210 << "\nPointer equality test is not supported by SPIR-V for Vulkan, "
4211 << "in function " << name << "\n";
4212 llvm_unreachable("Pointer equality check is invalid");
4213 break;
4214 }
4215
David Neto257c3892018-04-11 13:19:45 -04004216 // Ops[0] = Result Type ID
4217 // Ops[1] = Operand 1 ID
4218 // Ops[2] = Operand 2 ID
4219 SPIRVOperandList Ops;
David Neto22f144c2017-06-12 14:26:21 -04004220
David Neto257c3892018-04-11 13:19:45 -04004221 Ops << MkId(lookupType(CmpI->getType())) << MkId(VMap[CmpI->getOperand(0)])
4222 << MkId(VMap[CmpI->getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004223
4224 spv::Op Opcode = GetSPIRVCmpOpcode(CmpI);
David Neto87846742018-04-11 17:36:22 -04004225 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004226 SPIRVInstList.push_back(Inst);
4227 break;
4228 }
4229 case Instruction::Br: {
4230 // Branch instrucion is deferred because it needs label's ID. Record slot's
4231 // location on SPIRVInstructionList.
4232 DeferredInsts.push_back(
4233 std::make_tuple(&I, --SPIRVInstList.end(), 0 /* No id */));
4234 break;
4235 }
4236 case Instruction::Switch: {
4237 I.print(errs());
4238 llvm_unreachable("Unsupported instruction???");
4239 break;
4240 }
4241 case Instruction::IndirectBr: {
4242 I.print(errs());
4243 llvm_unreachable("Unsupported instruction???");
4244 break;
4245 }
4246 case Instruction::PHI: {
4247 // Branch instrucion is deferred because it needs label's ID. Record slot's
4248 // location on SPIRVInstructionList.
4249 DeferredInsts.push_back(
4250 std::make_tuple(&I, --SPIRVInstList.end(), nextID++));
4251 break;
4252 }
4253 case Instruction::Alloca: {
4254 //
4255 // Generate OpVariable.
4256 //
4257 // Ops[0] : Result Type ID
4258 // Ops[1] : Storage Class
4259 SPIRVOperandList Ops;
4260
David Neto257c3892018-04-11 13:19:45 -04004261 Ops << MkId(lookupType(I.getType())) << MkNum(spv::StorageClassFunction);
David Neto22f144c2017-06-12 14:26:21 -04004262
David Neto87846742018-04-11 17:36:22 -04004263 auto *Inst = new SPIRVInstruction(spv::OpVariable, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004264 SPIRVInstList.push_back(Inst);
4265 break;
4266 }
4267 case Instruction::Load: {
4268 LoadInst *LD = cast<LoadInst>(&I);
4269 //
4270 // Generate OpLoad.
4271 //
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04004272
alan-baker5b86ed72019-02-15 08:26:50 -05004273 if (LD->getType()->isPointerTy()) {
4274 // Loading a pointer requires variable pointers.
4275 setVariablePointersCapabilities(LD->getType()->getPointerAddressSpace());
4276 }
David Neto22f144c2017-06-12 14:26:21 -04004277
David Neto0a2f98d2017-09-15 19:38:40 -04004278 uint32_t ResTyID = lookupType(LD->getType());
David Netoa60b00b2017-09-15 16:34:09 -04004279 uint32_t PointerID = VMap[LD->getPointerOperand()];
4280
4281 // This is a hack to work around what looks like a driver bug.
4282 // When we're loading from the special variable holding the WorkgroupSize
David Neto0a2f98d2017-09-15 19:38:40 -04004283 // builtin value, use an OpBitWiseAnd of the value's ID rather than
4284 // generating a load.
David Neto66cfe642018-03-24 06:13:56 -07004285 // TODO(dneto): Remove this awful hack once drivers are fixed.
David Netoa60b00b2017-09-15 16:34:09 -04004286 if (PointerID == WorkgroupSizeVarID) {
David Neto0a2f98d2017-09-15 19:38:40 -04004287 // Generate a bitwise-and of the original value with itself.
4288 // We should have been able to get away with just an OpCopyObject,
4289 // but we need something more complex to get past certain driver bugs.
4290 // This is ridiculous, but necessary.
4291 // TODO(dneto): Revisit this once drivers fix their bugs.
4292
4293 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04004294 Ops << MkId(ResTyID) << MkId(WorkgroupSizeValueID)
4295 << MkId(WorkgroupSizeValueID);
David Neto0a2f98d2017-09-15 19:38:40 -04004296
David Neto87846742018-04-11 17:36:22 -04004297 auto *Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Neto0a2f98d2017-09-15 19:38:40 -04004298 SPIRVInstList.push_back(Inst);
David Netoa60b00b2017-09-15 16:34:09 -04004299 break;
4300 }
4301
4302 // This is the normal path. Generate a load.
4303
David Neto22f144c2017-06-12 14:26:21 -04004304 // Ops[0] = Result Type ID
4305 // Ops[1] = Pointer ID
4306 // Ops[2] ... Ops[n] = Optional Memory Access
4307 //
4308 // TODO: Do we need to implement Optional Memory Access???
David Neto0a2f98d2017-09-15 19:38:40 -04004309
David Neto22f144c2017-06-12 14:26:21 -04004310 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04004311 Ops << MkId(ResTyID) << MkId(PointerID);
David Neto22f144c2017-06-12 14:26:21 -04004312
David Neto87846742018-04-11 17:36:22 -04004313 auto *Inst = new SPIRVInstruction(spv::OpLoad, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004314 SPIRVInstList.push_back(Inst);
4315 break;
4316 }
4317 case Instruction::Store: {
4318 StoreInst *ST = cast<StoreInst>(&I);
4319 //
4320 // Generate OpStore.
4321 //
4322
alan-baker5b86ed72019-02-15 08:26:50 -05004323 if (ST->getValueOperand()->getType()->isPointerTy()) {
4324 // Storing a pointer requires variable pointers.
4325 setVariablePointersCapabilities(
4326 ST->getValueOperand()->getType()->getPointerAddressSpace());
4327 }
4328
David Neto22f144c2017-06-12 14:26:21 -04004329 // Ops[0] = Pointer ID
4330 // Ops[1] = Object ID
4331 // Ops[2] ... Ops[n] = Optional Memory Access (later???)
4332 //
4333 // TODO: Do we need to implement Optional Memory Access???
David Neto257c3892018-04-11 13:19:45 -04004334 SPIRVOperandList Ops;
4335 Ops << MkId(VMap[ST->getPointerOperand()])
4336 << MkId(VMap[ST->getValueOperand()]);
David Neto22f144c2017-06-12 14:26:21 -04004337
David Neto87846742018-04-11 17:36:22 -04004338 auto *Inst = new SPIRVInstruction(spv::OpStore, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004339 SPIRVInstList.push_back(Inst);
4340 break;
4341 }
4342 case Instruction::AtomicCmpXchg: {
4343 I.print(errs());
4344 llvm_unreachable("Unsupported instruction???");
4345 break;
4346 }
4347 case Instruction::AtomicRMW: {
Neil Henning39672102017-09-29 14:33:13 +01004348 AtomicRMWInst *AtomicRMW = dyn_cast<AtomicRMWInst>(&I);
4349
4350 spv::Op opcode;
4351
4352 switch (AtomicRMW->getOperation()) {
4353 default:
4354 I.print(errs());
4355 llvm_unreachable("Unsupported instruction???");
4356 case llvm::AtomicRMWInst::Add:
4357 opcode = spv::OpAtomicIAdd;
4358 break;
4359 case llvm::AtomicRMWInst::Sub:
4360 opcode = spv::OpAtomicISub;
4361 break;
4362 case llvm::AtomicRMWInst::Xchg:
4363 opcode = spv::OpAtomicExchange;
4364 break;
4365 case llvm::AtomicRMWInst::Min:
4366 opcode = spv::OpAtomicSMin;
4367 break;
4368 case llvm::AtomicRMWInst::Max:
4369 opcode = spv::OpAtomicSMax;
4370 break;
4371 case llvm::AtomicRMWInst::UMin:
4372 opcode = spv::OpAtomicUMin;
4373 break;
4374 case llvm::AtomicRMWInst::UMax:
4375 opcode = spv::OpAtomicUMax;
4376 break;
4377 case llvm::AtomicRMWInst::And:
4378 opcode = spv::OpAtomicAnd;
4379 break;
4380 case llvm::AtomicRMWInst::Or:
4381 opcode = spv::OpAtomicOr;
4382 break;
4383 case llvm::AtomicRMWInst::Xor:
4384 opcode = spv::OpAtomicXor;
4385 break;
4386 }
4387
4388 //
4389 // Generate OpAtomic*.
4390 //
4391 SPIRVOperandList Ops;
4392
David Neto257c3892018-04-11 13:19:45 -04004393 Ops << MkId(lookupType(I.getType()))
4394 << MkId(VMap[AtomicRMW->getPointerOperand()]);
Neil Henning39672102017-09-29 14:33:13 +01004395
4396 auto IntTy = Type::getInt32Ty(I.getContext());
Neil Henning39672102017-09-29 14:33:13 +01004397 const auto ConstantScopeDevice = ConstantInt::get(IntTy, spv::ScopeDevice);
David Neto257c3892018-04-11 13:19:45 -04004398 Ops << MkId(VMap[ConstantScopeDevice]);
Neil Henning39672102017-09-29 14:33:13 +01004399
4400 const auto ConstantMemorySemantics = ConstantInt::get(
4401 IntTy, spv::MemorySemanticsUniformMemoryMask |
4402 spv::MemorySemanticsSequentiallyConsistentMask);
David Neto257c3892018-04-11 13:19:45 -04004403 Ops << MkId(VMap[ConstantMemorySemantics]);
Neil Henning39672102017-09-29 14:33:13 +01004404
David Neto257c3892018-04-11 13:19:45 -04004405 Ops << MkId(VMap[AtomicRMW->getValOperand()]);
Neil Henning39672102017-09-29 14:33:13 +01004406
4407 VMap[&I] = nextID;
4408
David Neto87846742018-04-11 17:36:22 -04004409 auto *Inst = new SPIRVInstruction(opcode, nextID++, Ops);
Neil Henning39672102017-09-29 14:33:13 +01004410 SPIRVInstList.push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04004411 break;
4412 }
4413 case Instruction::Fence: {
4414 I.print(errs());
4415 llvm_unreachable("Unsupported instruction???");
4416 break;
4417 }
4418 case Instruction::Call: {
4419 CallInst *Call = dyn_cast<CallInst>(&I);
4420 Function *Callee = Call->getCalledFunction();
4421
Alan Baker202c8c72018-08-13 13:47:44 -04004422 if (Callee->getName().startswith(clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04004423 if (ResourceVarDeferredLoadCalls.count(Call) && Call->hasNUsesOrMore(1)) {
4424 // Generate an OpLoad
4425 SPIRVOperandList Ops;
4426 const auto load_id = nextID++;
David Neto22f144c2017-06-12 14:26:21 -04004427
David Neto862b7d82018-06-14 18:48:37 -04004428 Ops << MkId(lookupType(Call->getType()->getPointerElementType()))
4429 << MkId(ResourceVarDeferredLoadCalls[Call]);
4430
4431 auto *Inst = new SPIRVInstruction(spv::OpLoad, load_id, Ops);
4432 SPIRVInstList.push_back(Inst);
4433 VMap[Call] = load_id;
4434 break;
4435
4436 } else {
4437 // This maps to an OpVariable we've already generated.
4438 // No code is generated for the call.
4439 }
4440 break;
alan-bakerb6b09dc2018-11-08 16:59:28 -05004441 } else if (Callee->getName().startswith(
4442 clspv::WorkgroupAccessorFunction())) {
Alan Baker202c8c72018-08-13 13:47:44 -04004443 // Don't codegen an instruction here, but instead map this call directly
4444 // to the workgroup variable id.
alan-bakerb6b09dc2018-11-08 16:59:28 -05004445 int spec_id = static_cast<int>(
4446 cast<ConstantInt>(Call->getOperand(0))->getSExtValue());
Alan Baker202c8c72018-08-13 13:47:44 -04004447 const auto &info = LocalSpecIdInfoMap[spec_id];
4448 VMap[Call] = info.variable_id;
4449 break;
David Neto862b7d82018-06-14 18:48:37 -04004450 }
4451
4452 // Sampler initializers become a load of the corresponding sampler.
4453
Kévin Petitdf71de32019-04-09 14:09:50 +01004454 if (Callee->getName().equals(clspv::LiteralSamplerFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04004455 // Map this to a load from the variable.
4456 const auto index_into_sampler_map =
4457 dyn_cast<ConstantInt>(Call->getArgOperand(2))->getZExtValue();
4458
4459 // Generate an OpLoad
David Neto22f144c2017-06-12 14:26:21 -04004460 SPIRVOperandList Ops;
David Neto862b7d82018-06-14 18:48:37 -04004461 const auto load_id = nextID++;
David Neto22f144c2017-06-12 14:26:21 -04004462
David Neto257c3892018-04-11 13:19:45 -04004463 Ops << MkId(lookupType(SamplerTy->getPointerElementType()))
alan-bakerb6b09dc2018-11-08 16:59:28 -05004464 << MkId(SamplerMapIndexToIDMap[static_cast<unsigned>(
4465 index_into_sampler_map)]);
David Neto22f144c2017-06-12 14:26:21 -04004466
David Neto862b7d82018-06-14 18:48:37 -04004467 auto *Inst = new SPIRVInstruction(spv::OpLoad, load_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004468 SPIRVInstList.push_back(Inst);
David Neto862b7d82018-06-14 18:48:37 -04004469 VMap[Call] = load_id;
David Neto22f144c2017-06-12 14:26:21 -04004470 break;
4471 }
4472
Kévin Petit349c9502019-03-28 17:24:14 +00004473 // Handle SPIR-V intrinsics
Kévin Petit9b340262019-06-19 18:31:11 +01004474 spv::Op opcode = StringSwitch<spv::Op>(Callee->getName())
4475 .Case("spirv.atomic_xor", spv::OpAtomicXor)
4476 .Default(spv::OpNop);
David Neto22f144c2017-06-12 14:26:21 -04004477
Kévin Petit617a76d2019-04-04 13:54:16 +01004478 // If the switch above didn't have an entry maybe the intrinsic
4479 // is using the name mangling logic.
4480 bool usesMangler = false;
4481 if (opcode == spv::OpNop) {
4482 if (Callee->getName().startswith(clspv::SPIRVOpIntrinsicFunction())) {
4483 auto OpCst = cast<ConstantInt>(Call->getOperand(0));
4484 opcode = static_cast<spv::Op>(OpCst->getZExtValue());
4485 usesMangler = true;
4486 }
4487 }
4488
Kévin Petit349c9502019-03-28 17:24:14 +00004489 if (opcode != spv::OpNop) {
4490
David Neto22f144c2017-06-12 14:26:21 -04004491 SPIRVOperandList Ops;
4492
Kévin Petit349c9502019-03-28 17:24:14 +00004493 if (!I.getType()->isVoidTy()) {
4494 Ops << MkId(lookupType(I.getType()));
4495 }
David Neto22f144c2017-06-12 14:26:21 -04004496
Kévin Petit617a76d2019-04-04 13:54:16 +01004497 unsigned firstOperand = usesMangler ? 1 : 0;
4498 for (unsigned i = firstOperand; i < Call->getNumArgOperands(); i++) {
David Neto257c3892018-04-11 13:19:45 -04004499 Ops << MkId(VMap[Call->getArgOperand(i)]);
David Neto22f144c2017-06-12 14:26:21 -04004500 }
4501
Kévin Petit349c9502019-03-28 17:24:14 +00004502 if (!I.getType()->isVoidTy()) {
4503 VMap[&I] = nextID;
Kévin Petit8a560882019-03-21 15:24:34 +00004504 }
4505
Kévin Petit349c9502019-03-28 17:24:14 +00004506 SPIRVInstruction *Inst;
4507 if (!I.getType()->isVoidTy()) {
4508 Inst = new SPIRVInstruction(opcode, nextID++, Ops);
4509 } else {
4510 Inst = new SPIRVInstruction(opcode, Ops);
4511 }
Kévin Petit8a560882019-03-21 15:24:34 +00004512 SPIRVInstList.push_back(Inst);
4513 break;
4514 }
4515
David Neto22f144c2017-06-12 14:26:21 -04004516 // spirv.copy_memory.* intrinsics become OpMemoryMemory's.
4517 if (Callee->getName().startswith("spirv.copy_memory")) {
4518 //
4519 // Generate OpCopyMemory.
4520 //
4521
4522 // Ops[0] = Dst ID
4523 // Ops[1] = Src ID
4524 // Ops[2] = Memory Access
4525 // Ops[3] = Alignment
4526
4527 auto IsVolatile =
4528 dyn_cast<ConstantInt>(Call->getArgOperand(3))->getZExtValue() != 0;
4529
4530 auto VolatileMemoryAccess = (IsVolatile) ? spv::MemoryAccessVolatileMask
4531 : spv::MemoryAccessMaskNone;
4532
4533 auto MemoryAccess = VolatileMemoryAccess | spv::MemoryAccessAlignedMask;
4534
4535 auto Alignment =
4536 dyn_cast<ConstantInt>(Call->getArgOperand(2))->getZExtValue();
4537
David Neto257c3892018-04-11 13:19:45 -04004538 SPIRVOperandList Ops;
4539 Ops << MkId(VMap[Call->getArgOperand(0)])
4540 << MkId(VMap[Call->getArgOperand(1)]) << MkNum(MemoryAccess)
4541 << MkNum(static_cast<uint32_t>(Alignment));
David Neto22f144c2017-06-12 14:26:21 -04004542
David Neto87846742018-04-11 17:36:22 -04004543 auto *Inst = new SPIRVInstruction(spv::OpCopyMemory, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004544
4545 SPIRVInstList.push_back(Inst);
4546
4547 break;
4548 }
4549
David Neto22f144c2017-06-12 14:26:21 -04004550 // read_image is converted to OpSampledImage and OpImageSampleExplicitLod.
4551 // Additionally, OpTypeSampledImage is generated.
alan-bakerf67468c2019-11-25 15:51:49 -05004552 if (clspv::IsSampledImageRead(Callee)) {
David Neto22f144c2017-06-12 14:26:21 -04004553 //
4554 // Generate OpSampledImage.
4555 //
4556 // Ops[0] = Result Type ID
4557 // Ops[1] = Image ID
4558 // Ops[2] = Sampler ID
4559 //
4560 SPIRVOperandList Ops;
4561
4562 Value *Image = Call->getArgOperand(0);
4563 Value *Sampler = Call->getArgOperand(1);
4564 Value *Coordinate = Call->getArgOperand(2);
4565
4566 TypeMapType &OpImageTypeMap = getImageTypeMap();
4567 Type *ImageTy = Image->getType()->getPointerElementType();
4568 uint32_t ImageTyID = OpImageTypeMap[ImageTy];
David Neto22f144c2017-06-12 14:26:21 -04004569 uint32_t ImageID = VMap[Image];
David Neto22f144c2017-06-12 14:26:21 -04004570 uint32_t SamplerID = VMap[Sampler];
David Neto257c3892018-04-11 13:19:45 -04004571
4572 Ops << MkId(ImageTyID) << MkId(ImageID) << MkId(SamplerID);
David Neto22f144c2017-06-12 14:26:21 -04004573
4574 uint32_t SampledImageID = nextID;
4575
David Neto87846742018-04-11 17:36:22 -04004576 auto *Inst = new SPIRVInstruction(spv::OpSampledImage, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004577 SPIRVInstList.push_back(Inst);
4578
4579 //
4580 // Generate OpImageSampleExplicitLod.
4581 //
4582 // Ops[0] = Result Type ID
4583 // Ops[1] = Sampled Image ID
4584 // Ops[2] = Coordinate ID
4585 // Ops[3] = Image Operands Type ID
4586 // Ops[4] ... Ops[n] = Operands ID
4587 //
4588 Ops.clear();
4589
alan-bakerf67468c2019-11-25 15:51:49 -05004590 const bool is_int_image = IsIntImageType(Image->getType());
4591 uint32_t result_type = 0;
4592 if (is_int_image) {
4593 result_type = v4int32ID;
4594 } else {
4595 result_type = lookupType(Call->getType());
4596 }
4597
4598 Ops << MkId(result_type) << MkId(SampledImageID) << MkId(VMap[Coordinate])
4599 << MkNum(spv::ImageOperandsLodMask);
David Neto22f144c2017-06-12 14:26:21 -04004600
4601 Constant *CstFP0 = ConstantFP::get(Context, APFloat(0.0f));
David Neto257c3892018-04-11 13:19:45 -04004602 Ops << MkId(VMap[CstFP0]);
David Neto22f144c2017-06-12 14:26:21 -04004603
alan-bakerf67468c2019-11-25 15:51:49 -05004604 uint32_t final_id = nextID++;
4605 VMap[&I] = final_id;
David Neto22f144c2017-06-12 14:26:21 -04004606
alan-bakerf67468c2019-11-25 15:51:49 -05004607 uint32_t image_id = final_id;
4608 if (is_int_image) {
4609 // Int image requires a bitcast from v4int to v4uint.
4610 image_id = nextID++;
4611 }
4612
4613 Inst = new SPIRVInstruction(spv::OpImageSampleExplicitLod, image_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004614 SPIRVInstList.push_back(Inst);
alan-bakerf67468c2019-11-25 15:51:49 -05004615
4616 if (is_int_image) {
4617 // Generate the bitcast.
4618 Ops.clear();
4619 Ops << MkId(lookupType(Call->getType())) << MkId(image_id);
4620 Inst = new SPIRVInstruction(spv::OpBitcast, final_id, Ops);
4621 SPIRVInstList.push_back(Inst);
4622 }
David Neto22f144c2017-06-12 14:26:21 -04004623 break;
4624 }
4625
alan-bakerf67468c2019-11-25 15:51:49 -05004626 // write_image is mapped to OpImageWrite.
4627 if (clspv::IsImageWrite(Callee)) {
David Neto22f144c2017-06-12 14:26:21 -04004628 //
4629 // Generate OpImageWrite.
4630 //
4631 // Ops[0] = Image ID
4632 // Ops[1] = Coordinate ID
4633 // Ops[2] = Texel ID
4634 // Ops[3] = (Optional) Image Operands Type (Literal Number)
4635 // Ops[4] ... Ops[n] = (Optional) Operands ID
4636 //
4637 SPIRVOperandList Ops;
4638
4639 Value *Image = Call->getArgOperand(0);
4640 Value *Coordinate = Call->getArgOperand(1);
4641 Value *Texel = Call->getArgOperand(2);
4642
4643 uint32_t ImageID = VMap[Image];
David Neto22f144c2017-06-12 14:26:21 -04004644 uint32_t CoordinateID = VMap[Coordinate];
David Neto22f144c2017-06-12 14:26:21 -04004645 uint32_t TexelID = VMap[Texel];
alan-bakerf67468c2019-11-25 15:51:49 -05004646
4647 const bool is_int_image = IsIntImageType(Image->getType());
4648 if (is_int_image) {
4649 // Generate a bitcast to v4int and use it as the texel value.
4650 uint32_t castID = nextID++;
4651 Ops << MkId(v4int32ID) << MkId(TexelID);
4652 auto cast = new SPIRVInstruction(spv::OpBitcast, castID, Ops);
4653 SPIRVInstList.push_back(cast);
4654 Ops.clear();
4655 TexelID = castID;
4656 }
David Neto257c3892018-04-11 13:19:45 -04004657 Ops << MkId(ImageID) << MkId(CoordinateID) << MkId(TexelID);
David Neto22f144c2017-06-12 14:26:21 -04004658
David Neto87846742018-04-11 17:36:22 -04004659 auto *Inst = new SPIRVInstruction(spv::OpImageWrite, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004660 SPIRVInstList.push_back(Inst);
4661 break;
4662 }
4663
alan-bakerce179f12019-12-06 19:02:22 -05004664 // get_image_* is mapped to OpImageQuerySize or OpImageQuerySizeLod
4665 if (clspv::IsImageQuery(Callee)) {
David Neto5c22a252018-03-15 16:07:41 -04004666 //
alan-bakerce179f12019-12-06 19:02:22 -05004667 // Generate OpImageQuerySize[Lod]
David Neto5c22a252018-03-15 16:07:41 -04004668 //
4669 // Ops[0] = Image ID
4670 //
alan-bakerce179f12019-12-06 19:02:22 -05004671 // Result type has components equal to the dimensionality of the image,
4672 // plus 1 if the image is arrayed.
4673 //
4674 // %sizes = OpImageQuerySize[Lod] %uint[2|3|4] %im [%int_0]
David Neto5c22a252018-03-15 16:07:41 -04004675 SPIRVOperandList Ops;
4676
4677 // Implement:
alan-bakerce179f12019-12-06 19:02:22 -05004678 // %sizes = OpImageQuerySize[Lod] %uint[2|3|4] %im [%uint_0]
4679 uint32_t SizesTypeID = 0;
4680
David Neto5c22a252018-03-15 16:07:41 -04004681 Value *Image = Call->getArgOperand(0);
alan-bakerce179f12019-12-06 19:02:22 -05004682 const uint32_t dim = ImageDimensionality(Image->getType());
4683 const uint32_t components = dim;
4684 if (components == 1) {
4685 // 1D images aren't currently supported.
4686 SizesTypeID = TypeMap[Type::getInt32Ty(Context)];
4687 } else {
4688 SizesTypeID = TypeMap[VectorType::get(Type::getInt32Ty(Context), dim)];
4689 }
David Neto5c22a252018-03-15 16:07:41 -04004690 uint32_t ImageID = VMap[Image];
David Neto257c3892018-04-11 13:19:45 -04004691 Ops << MkId(SizesTypeID) << MkId(ImageID);
alan-bakerce179f12019-12-06 19:02:22 -05004692 spv::Op query_opcode = spv::OpImageQuerySize;
4693 if (clspv::IsSampledImageType(Image->getType())) {
4694 query_opcode = spv::OpImageQuerySizeLod;
4695 // Need explicit 0 for Lod operand.
4696 Constant *CstInt0 = ConstantInt::get(Context, APInt(32, 0));
4697 Ops << MkId(VMap[CstInt0]);
4698 }
David Neto5c22a252018-03-15 16:07:41 -04004699
4700 uint32_t SizesID = nextID++;
alan-bakerce179f12019-12-06 19:02:22 -05004701 auto *QueryInst = new SPIRVInstruction(query_opcode, SizesID, Ops);
David Neto5c22a252018-03-15 16:07:41 -04004702 SPIRVInstList.push_back(QueryInst);
4703
alan-bakerce179f12019-12-06 19:02:22 -05004704 // May require an extra instruction to create the appropriate result of
4705 // the builtin function.
4706 if (clspv::IsGetImageDim(Callee)) {
4707 if (dim == 3) {
4708 // get_image_dim returns an int4 for 3D images.
4709 //
4710 // Reset value map entry since we generated an intermediate
4711 // instruction.
4712 VMap[&I] = nextID;
David Neto5c22a252018-03-15 16:07:41 -04004713
alan-bakerce179f12019-12-06 19:02:22 -05004714 // Implement:
4715 // %result = OpCompositeConstruct %uint4 %sizes %uint_0
4716 Ops.clear();
4717 Ops << MkId(lookupType(VectorType::get(Type::getInt32Ty(Context), 4)))
4718 << MkId(SizesID);
David Neto5c22a252018-03-15 16:07:41 -04004719
alan-bakerce179f12019-12-06 19:02:22 -05004720 Constant *CstInt0 = ConstantInt::get(Context, APInt(32, 0));
4721 Ops << MkId(VMap[CstInt0]);
David Neto5c22a252018-03-15 16:07:41 -04004722
alan-bakerce179f12019-12-06 19:02:22 -05004723 auto *Inst =
4724 new SPIRVInstruction(spv::OpCompositeConstruct, nextID++, Ops);
4725 SPIRVInstList.push_back(Inst);
4726 } else if (dim != components) {
4727 // get_image_dim return an int2 regardless of the arrayedness of the
4728 // image. If the image is arrayed an element must be dropped from the
4729 // query result.
4730 //
4731 // Reset value map entry since we generated an intermediate
4732 // instruction.
4733 VMap[&I] = nextID;
4734
4735 // Implement:
4736 // %result = OpVectorShuffle %uint2 %sizes %sizes 0 1
4737 Ops.clear();
4738 Ops << MkId(lookupType(VectorType::get(Type::getInt32Ty(Context), 2)))
4739 << MkId(SizesID) << MkId(SizesID) << MkNum(0) << MkNum(1);
4740
4741 auto *Inst =
4742 new SPIRVInstruction(spv::OpVectorShuffle, nextID++, Ops);
4743 SPIRVInstList.push_back(Inst);
4744 }
4745 } else if (components > 1) {
4746 // Reset value map entry since we generated an intermediate instruction.
4747 VMap[&I] = nextID;
4748
4749 // Implement:
4750 // %result = OpCompositeExtract %uint %sizes <component number>
4751 Ops.clear();
4752 Ops << MkId(TypeMap[I.getType()]) << MkId(SizesID);
4753
4754 uint32_t component = 0;
4755 if (IsGetImageHeight(Callee))
4756 component = 1;
4757 else if (IsGetImageDepth(Callee))
4758 component = 2;
4759 Ops << MkNum(component);
4760
4761 auto *Inst =
4762 new SPIRVInstruction(spv::OpCompositeExtract, nextID++, Ops);
4763 SPIRVInstList.push_back(Inst);
4764 }
David Neto5c22a252018-03-15 16:07:41 -04004765 break;
4766 }
4767
David Neto22f144c2017-06-12 14:26:21 -04004768 // Call instrucion is deferred because it needs function's ID. Record
4769 // slot's location on SPIRVInstructionList.
4770 DeferredInsts.push_back(
4771 std::make_tuple(&I, --SPIRVInstList.end(), nextID++));
4772
David Neto3fbb4072017-10-16 11:28:14 -04004773 // Check whether the implementation of this call uses an extended
4774 // instruction plus one more value-producing instruction. If so, then
4775 // reserve the id for the extra value-producing slot.
4776 glsl::ExtInst EInst = getIndirectExtInstEnum(Callee->getName());
4777 if (EInst != kGlslExtInstBad) {
4778 // Reserve a spot for the extra value.
David Neto4d02a532017-09-17 12:57:44 -04004779 // Increase nextID.
David Neto22f144c2017-06-12 14:26:21 -04004780 VMap[&I] = nextID;
4781 nextID++;
4782 }
4783 break;
4784 }
4785 case Instruction::Ret: {
4786 unsigned NumOps = I.getNumOperands();
4787 if (NumOps == 0) {
4788 //
4789 // Generate OpReturn.
4790 //
David Neto87846742018-04-11 17:36:22 -04004791 SPIRVInstList.push_back(new SPIRVInstruction(spv::OpReturn, {}));
David Neto22f144c2017-06-12 14:26:21 -04004792 } else {
4793 //
4794 // Generate OpReturnValue.
4795 //
4796
4797 // Ops[0] = Return Value ID
4798 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04004799
4800 Ops << MkId(VMap[I.getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04004801
David Neto87846742018-04-11 17:36:22 -04004802 auto *Inst = new SPIRVInstruction(spv::OpReturnValue, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004803 SPIRVInstList.push_back(Inst);
4804 break;
4805 }
4806 break;
4807 }
4808 }
4809}
4810
4811void SPIRVProducerPass::GenerateFuncEpilogue() {
4812 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
4813
4814 //
4815 // Generate OpFunctionEnd
4816 //
4817
David Neto87846742018-04-11 17:36:22 -04004818 auto *Inst = new SPIRVInstruction(spv::OpFunctionEnd, {});
David Neto22f144c2017-06-12 14:26:21 -04004819 SPIRVInstList.push_back(Inst);
4820}
4821
4822bool SPIRVProducerPass::is4xi8vec(Type *Ty) const {
alan-bakerb39c8262019-03-08 14:03:37 -05004823 // Don't specialize <4 x i8> if i8 is generally supported.
4824 if (clspv::Option::Int8Support())
4825 return false;
4826
David Neto22f144c2017-06-12 14:26:21 -04004827 LLVMContext &Context = Ty->getContext();
4828 if (Ty->isVectorTy()) {
4829 if (Ty->getVectorElementType() == Type::getInt8Ty(Context) &&
4830 Ty->getVectorNumElements() == 4) {
4831 return true;
4832 }
4833 }
4834
4835 return false;
4836}
4837
4838void SPIRVProducerPass::HandleDeferredInstruction() {
4839 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
4840 ValueMapType &VMap = getValueMap();
4841 DeferredInstVecType &DeferredInsts = getDeferredInstVec();
4842
4843 for (auto DeferredInst = DeferredInsts.rbegin();
4844 DeferredInst != DeferredInsts.rend(); ++DeferredInst) {
4845 Value *Inst = std::get<0>(*DeferredInst);
4846 SPIRVInstructionList::iterator InsertPoint = ++std::get<1>(*DeferredInst);
4847 if (InsertPoint != SPIRVInstList.end()) {
4848 while ((*InsertPoint)->getOpcode() == spv::OpPhi) {
4849 ++InsertPoint;
4850 }
4851 }
4852
4853 if (BranchInst *Br = dyn_cast<BranchInst>(Inst)) {
alan-baker06cad652019-12-03 17:56:47 -05004854 // Check whether this branch needs to be preceeded by merge instruction.
David Neto22f144c2017-06-12 14:26:21 -04004855 BasicBlock *BrBB = Br->getParent();
alan-baker06cad652019-12-03 17:56:47 -05004856 if (ContinueBlocks.count(BrBB)) {
David Neto22f144c2017-06-12 14:26:21 -04004857 //
4858 // Generate OpLoopMerge.
4859 //
4860 // Ops[0] = Merge Block ID
4861 // Ops[1] = Continue Target ID
4862 // Ops[2] = Selection Control
4863 SPIRVOperandList Ops;
4864
alan-baker06cad652019-12-03 17:56:47 -05004865 auto MergeBB = MergeBlocks[BrBB];
4866 auto ContinueBB = ContinueBlocks[BrBB];
David Neto22f144c2017-06-12 14:26:21 -04004867 uint32_t MergeBBID = VMap[MergeBB];
David Neto22f144c2017-06-12 14:26:21 -04004868 uint32_t ContinueBBID = VMap[ContinueBB];
David Neto257c3892018-04-11 13:19:45 -04004869 Ops << MkId(MergeBBID) << MkId(ContinueBBID)
alan-baker06cad652019-12-03 17:56:47 -05004870 << MkNum(spv::LoopControlMaskNone);
David Neto22f144c2017-06-12 14:26:21 -04004871
David Neto87846742018-04-11 17:36:22 -04004872 auto *MergeInst = new SPIRVInstruction(spv::OpLoopMerge, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004873 SPIRVInstList.insert(InsertPoint, MergeInst);
alan-baker06cad652019-12-03 17:56:47 -05004874 } else if (MergeBlocks.count(BrBB)) {
4875 //
4876 // Generate OpSelectionMerge.
4877 //
4878 // Ops[0] = Merge Block ID
4879 // Ops[1] = Selection Control
4880 SPIRVOperandList Ops;
David Neto22f144c2017-06-12 14:26:21 -04004881
alan-baker06cad652019-12-03 17:56:47 -05004882 auto MergeBB = MergeBlocks[BrBB];
4883 uint32_t MergeBBID = VMap[MergeBB];
4884 Ops << MkId(MergeBBID) << MkNum(spv::SelectionControlMaskNone);
David Neto22f144c2017-06-12 14:26:21 -04004885
alan-baker06cad652019-12-03 17:56:47 -05004886 auto *MergeInst = new SPIRVInstruction(spv::OpSelectionMerge, Ops);
4887 SPIRVInstList.insert(InsertPoint, MergeInst);
David Neto22f144c2017-06-12 14:26:21 -04004888 }
4889
4890 if (Br->isConditional()) {
4891 //
4892 // Generate OpBranchConditional.
4893 //
4894 // Ops[0] = Condition ID
4895 // Ops[1] = True Label ID
4896 // Ops[2] = False Label ID
4897 // Ops[3] ... Ops[n] = Branch weights (Literal Number)
4898 SPIRVOperandList Ops;
4899
4900 uint32_t CondID = VMap[Br->getCondition()];
David Neto22f144c2017-06-12 14:26:21 -04004901 uint32_t TrueBBID = VMap[Br->getSuccessor(0)];
David Neto22f144c2017-06-12 14:26:21 -04004902 uint32_t FalseBBID = VMap[Br->getSuccessor(1)];
David Neto257c3892018-04-11 13:19:45 -04004903
4904 Ops << MkId(CondID) << MkId(TrueBBID) << MkId(FalseBBID);
David Neto22f144c2017-06-12 14:26:21 -04004905
David Neto87846742018-04-11 17:36:22 -04004906 auto *BrInst = new SPIRVInstruction(spv::OpBranchConditional, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004907 SPIRVInstList.insert(InsertPoint, BrInst);
4908 } else {
4909 //
4910 // Generate OpBranch.
4911 //
4912 // Ops[0] = Target Label ID
4913 SPIRVOperandList Ops;
4914
4915 uint32_t TargetID = VMap[Br->getSuccessor(0)];
David Neto257c3892018-04-11 13:19:45 -04004916 Ops << MkId(TargetID);
David Neto22f144c2017-06-12 14:26:21 -04004917
David Neto87846742018-04-11 17:36:22 -04004918 SPIRVInstList.insert(InsertPoint,
4919 new SPIRVInstruction(spv::OpBranch, Ops));
David Neto22f144c2017-06-12 14:26:21 -04004920 }
4921 } else if (PHINode *PHI = dyn_cast<PHINode>(Inst)) {
alan-baker5b86ed72019-02-15 08:26:50 -05004922 if (PHI->getType()->isPointerTy()) {
4923 // OpPhi on pointers requires variable pointers.
4924 setVariablePointersCapabilities(
4925 PHI->getType()->getPointerAddressSpace());
4926 if (!hasVariablePointers() && !selectFromSameObject(PHI)) {
4927 setVariablePointers(true);
4928 }
4929 }
4930
David Neto22f144c2017-06-12 14:26:21 -04004931 //
4932 // Generate OpPhi.
4933 //
4934 // Ops[0] = Result Type ID
4935 // Ops[1] ... Ops[n] = (Variable ID, Parent ID) pairs
4936 SPIRVOperandList Ops;
4937
David Neto257c3892018-04-11 13:19:45 -04004938 Ops << MkId(lookupType(PHI->getType()));
David Neto22f144c2017-06-12 14:26:21 -04004939
David Neto22f144c2017-06-12 14:26:21 -04004940 for (unsigned i = 0; i < PHI->getNumIncomingValues(); i++) {
4941 uint32_t VarID = VMap[PHI->getIncomingValue(i)];
David Neto22f144c2017-06-12 14:26:21 -04004942 uint32_t ParentID = VMap[PHI->getIncomingBlock(i)];
David Neto257c3892018-04-11 13:19:45 -04004943 Ops << MkId(VarID) << MkId(ParentID);
David Neto22f144c2017-06-12 14:26:21 -04004944 }
4945
4946 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04004947 InsertPoint,
4948 new SPIRVInstruction(spv::OpPhi, std::get<2>(*DeferredInst), Ops));
David Neto22f144c2017-06-12 14:26:21 -04004949 } else if (CallInst *Call = dyn_cast<CallInst>(Inst)) {
4950 Function *Callee = Call->getCalledFunction();
David Neto3fbb4072017-10-16 11:28:14 -04004951 auto callee_name = Callee->getName();
4952 glsl::ExtInst EInst = getDirectOrIndirectExtInstEnum(callee_name);
David Neto22f144c2017-06-12 14:26:21 -04004953
4954 if (EInst) {
4955 uint32_t &ExtInstImportID = getOpExtInstImportID();
4956
4957 //
4958 // Generate OpExtInst.
4959 //
4960
4961 // Ops[0] = Result Type ID
4962 // Ops[1] = Set ID (OpExtInstImport ID)
4963 // Ops[2] = Instruction Number (Literal Number)
4964 // Ops[3] ... Ops[n] = Operand 1, ... , Operand n
4965 SPIRVOperandList Ops;
4966
David Neto862b7d82018-06-14 18:48:37 -04004967 Ops << MkId(lookupType(Call->getType())) << MkId(ExtInstImportID)
4968 << MkNum(EInst);
David Neto22f144c2017-06-12 14:26:21 -04004969
David Neto22f144c2017-06-12 14:26:21 -04004970 FunctionType *CalleeFTy = cast<FunctionType>(Call->getFunctionType());
4971 for (unsigned i = 0; i < CalleeFTy->getNumParams(); i++) {
David Neto257c3892018-04-11 13:19:45 -04004972 Ops << MkId(VMap[Call->getOperand(i)]);
David Neto22f144c2017-06-12 14:26:21 -04004973 }
4974
David Neto87846742018-04-11 17:36:22 -04004975 auto *ExtInst = new SPIRVInstruction(spv::OpExtInst,
4976 std::get<2>(*DeferredInst), Ops);
David Neto22f144c2017-06-12 14:26:21 -04004977 SPIRVInstList.insert(InsertPoint, ExtInst);
4978
David Neto3fbb4072017-10-16 11:28:14 -04004979 const auto IndirectExtInst = getIndirectExtInstEnum(callee_name);
4980 if (IndirectExtInst != kGlslExtInstBad) {
4981 // Generate one more instruction that uses the result of the extended
4982 // instruction. Its result id is one more than the id of the
4983 // extended instruction.
David Neto22f144c2017-06-12 14:26:21 -04004984 LLVMContext &Context =
4985 Call->getParent()->getParent()->getParent()->getContext();
David Neto22f144c2017-06-12 14:26:21 -04004986
David Neto3fbb4072017-10-16 11:28:14 -04004987 auto generate_extra_inst = [this, &Context, &Call, &DeferredInst,
4988 &VMap, &SPIRVInstList, &InsertPoint](
4989 spv::Op opcode, Constant *constant) {
4990 //
4991 // Generate instruction like:
4992 // result = opcode constant <extinst-result>
4993 //
4994 // Ops[0] = Result Type ID
4995 // Ops[1] = Operand 0 ;; the constant, suitably splatted
4996 // Ops[2] = Operand 1 ;; the result of the extended instruction
4997 SPIRVOperandList Ops;
David Neto22f144c2017-06-12 14:26:21 -04004998
David Neto3fbb4072017-10-16 11:28:14 -04004999 Type *resultTy = Call->getType();
David Neto257c3892018-04-11 13:19:45 -04005000 Ops << MkId(lookupType(resultTy));
David Neto3fbb4072017-10-16 11:28:14 -04005001
5002 if (auto *vectorTy = dyn_cast<VectorType>(resultTy)) {
5003 constant = ConstantVector::getSplat(
5004 static_cast<unsigned>(vectorTy->getNumElements()), constant);
5005 }
David Neto257c3892018-04-11 13:19:45 -04005006 Ops << MkId(VMap[constant]) << MkId(std::get<2>(*DeferredInst));
David Neto3fbb4072017-10-16 11:28:14 -04005007
5008 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005009 InsertPoint, new SPIRVInstruction(
5010 opcode, std::get<2>(*DeferredInst) + 1, Ops));
David Neto3fbb4072017-10-16 11:28:14 -04005011 };
5012
5013 switch (IndirectExtInst) {
5014 case glsl::ExtInstFindUMsb: // Implementing clz
5015 generate_extra_inst(
5016 spv::OpISub, ConstantInt::get(Type::getInt32Ty(Context), 31));
5017 break;
5018 case glsl::ExtInstAcos: // Implementing acospi
5019 case glsl::ExtInstAsin: // Implementing asinpi
Kévin Petiteb9f90a2018-09-29 12:29:34 +01005020 case glsl::ExtInstAtan: // Implementing atanpi
David Neto3fbb4072017-10-16 11:28:14 -04005021 case glsl::ExtInstAtan2: // Implementing atan2pi
5022 generate_extra_inst(
5023 spv::OpFMul,
5024 ConstantFP::get(Type::getFloatTy(Context), kOneOverPi));
5025 break;
5026
5027 default:
5028 assert(false && "internally inconsistent");
David Neto4d02a532017-09-17 12:57:44 -04005029 }
David Neto22f144c2017-06-12 14:26:21 -04005030 }
David Neto3fbb4072017-10-16 11:28:14 -04005031
alan-bakerb39c8262019-03-08 14:03:37 -05005032 } else if (callee_name.startswith("_Z8popcount")) {
David Neto22f144c2017-06-12 14:26:21 -04005033 //
5034 // Generate OpBitCount
5035 //
5036 // Ops[0] = Result Type ID
5037 // Ops[1] = Base ID
David Neto257c3892018-04-11 13:19:45 -04005038 SPIRVOperandList Ops;
5039 Ops << MkId(lookupType(Call->getType()))
5040 << MkId(VMap[Call->getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04005041
5042 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005043 InsertPoint, new SPIRVInstruction(spv::OpBitCount,
David Neto22f144c2017-06-12 14:26:21 -04005044 std::get<2>(*DeferredInst), Ops));
David Netoab03f432017-11-03 17:00:44 -04005045
David Neto862b7d82018-06-14 18:48:37 -04005046 } else if (callee_name.startswith(kCompositeConstructFunctionPrefix)) {
David Netoab03f432017-11-03 17:00:44 -04005047
5048 // Generate an OpCompositeConstruct
5049 SPIRVOperandList Ops;
5050
5051 // The result type.
David Neto257c3892018-04-11 13:19:45 -04005052 Ops << MkId(lookupType(Call->getType()));
David Netoab03f432017-11-03 17:00:44 -04005053
5054 for (Use &use : Call->arg_operands()) {
David Neto257c3892018-04-11 13:19:45 -04005055 Ops << MkId(VMap[use.get()]);
David Netoab03f432017-11-03 17:00:44 -04005056 }
5057
5058 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005059 InsertPoint, new SPIRVInstruction(spv::OpCompositeConstruct,
5060 std::get<2>(*DeferredInst), Ops));
David Netoab03f432017-11-03 17:00:44 -04005061
Alan Baker202c8c72018-08-13 13:47:44 -04005062 } else if (callee_name.startswith(clspv::ResourceAccessorFunction())) {
5063
5064 // We have already mapped the call's result value to an ID.
5065 // Don't generate any code now.
5066
5067 } else if (callee_name.startswith(clspv::WorkgroupAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04005068
5069 // We have already mapped the call's result value to an ID.
5070 // Don't generate any code now.
5071
David Neto22f144c2017-06-12 14:26:21 -04005072 } else {
alan-baker5b86ed72019-02-15 08:26:50 -05005073 if (Call->getType()->isPointerTy()) {
5074 // Functions returning pointers require variable pointers.
5075 setVariablePointersCapabilities(
5076 Call->getType()->getPointerAddressSpace());
5077 }
5078
David Neto22f144c2017-06-12 14:26:21 -04005079 //
5080 // Generate OpFunctionCall.
5081 //
5082
5083 // Ops[0] = Result Type ID
5084 // Ops[1] = Callee Function ID
5085 // Ops[2] ... Ops[n] = Argument 0, ... , Argument n
5086 SPIRVOperandList Ops;
5087
David Neto862b7d82018-06-14 18:48:37 -04005088 Ops << MkId(lookupType(Call->getType()));
David Neto22f144c2017-06-12 14:26:21 -04005089
5090 uint32_t CalleeID = VMap[Callee];
David Neto43568eb2017-10-13 18:25:25 -04005091 if (CalleeID == 0) {
5092 errs() << "Can't translate function call. Missing builtin? "
David Neto862b7d82018-06-14 18:48:37 -04005093 << callee_name << " in: " << *Call << "\n";
David Neto43568eb2017-10-13 18:25:25 -04005094 // TODO(dneto): Can we error out? Enabling this llvm_unreachable
5095 // causes an infinite loop. Instead, go ahead and generate
5096 // the bad function call. A validator will catch the 0-Id.
5097 // llvm_unreachable("Can't translate function call");
5098 }
David Neto22f144c2017-06-12 14:26:21 -04005099
David Neto257c3892018-04-11 13:19:45 -04005100 Ops << MkId(CalleeID);
David Neto22f144c2017-06-12 14:26:21 -04005101
David Neto22f144c2017-06-12 14:26:21 -04005102 FunctionType *CalleeFTy = cast<FunctionType>(Call->getFunctionType());
5103 for (unsigned i = 0; i < CalleeFTy->getNumParams(); i++) {
alan-baker5b86ed72019-02-15 08:26:50 -05005104 auto *operand = Call->getOperand(i);
alan-bakerd4d50652019-12-03 17:17:15 -05005105 auto *operand_type = operand->getType();
5106 // Images and samplers can be passed as function parameters without
5107 // variable pointers.
5108 if (operand_type->isPointerTy() && !IsImageType(operand_type) &&
5109 !IsSamplerType(operand_type)) {
alan-baker5b86ed72019-02-15 08:26:50 -05005110 auto sc =
5111 GetStorageClass(operand->getType()->getPointerAddressSpace());
5112 if (sc == spv::StorageClassStorageBuffer) {
5113 // Passing SSBO by reference requires variable pointers storage
5114 // buffer.
5115 setVariablePointersStorageBuffer(true);
5116 } else if (sc == spv::StorageClassWorkgroup) {
5117 // Workgroup references require variable pointers if they are not
5118 // memory object declarations.
5119 if (auto *operand_call = dyn_cast<CallInst>(operand)) {
5120 // Workgroup accessor represents a variable reference.
5121 if (!operand_call->getCalledFunction()->getName().startswith(
5122 clspv::WorkgroupAccessorFunction()))
5123 setVariablePointers(true);
5124 } else {
5125 // Arguments are function parameters.
5126 if (!isa<Argument>(operand))
5127 setVariablePointers(true);
5128 }
5129 }
5130 }
5131 Ops << MkId(VMap[operand]);
David Neto22f144c2017-06-12 14:26:21 -04005132 }
5133
David Neto87846742018-04-11 17:36:22 -04005134 auto *CallInst = new SPIRVInstruction(spv::OpFunctionCall,
5135 std::get<2>(*DeferredInst), Ops);
David Neto22f144c2017-06-12 14:26:21 -04005136 SPIRVInstList.insert(InsertPoint, CallInst);
5137 }
5138 }
5139 }
5140}
5141
David Neto1a1a0582017-07-07 12:01:44 -04005142void SPIRVProducerPass::HandleDeferredDecorations(const DataLayout &DL) {
Alan Baker202c8c72018-08-13 13:47:44 -04005143 if (getTypesNeedingArrayStride().empty() && LocalArgSpecIds.empty()) {
David Neto1a1a0582017-07-07 12:01:44 -04005144 return;
David Netoc6f3ab22018-04-06 18:02:31 -04005145 }
David Neto1a1a0582017-07-07 12:01:44 -04005146
5147 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
David Neto1a1a0582017-07-07 12:01:44 -04005148
5149 // Find an iterator pointing just past the last decoration.
5150 bool seen_decorations = false;
5151 auto DecoInsertPoint =
5152 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
5153 [&seen_decorations](SPIRVInstruction *Inst) -> bool {
5154 const bool is_decoration =
5155 Inst->getOpcode() == spv::OpDecorate ||
5156 Inst->getOpcode() == spv::OpMemberDecorate;
5157 if (is_decoration) {
5158 seen_decorations = true;
5159 return false;
5160 } else {
5161 return seen_decorations;
5162 }
5163 });
5164
David Netoc6f3ab22018-04-06 18:02:31 -04005165 // Insert ArrayStride decorations on pointer types, due to OpPtrAccessChain
5166 // instructions we generated earlier.
David Neto85082642018-03-24 06:55:20 -07005167 for (auto *type : getTypesNeedingArrayStride()) {
5168 Type *elemTy = nullptr;
5169 if (auto *ptrTy = dyn_cast<PointerType>(type)) {
5170 elemTy = ptrTy->getElementType();
alan-bakerb6b09dc2018-11-08 16:59:28 -05005171 } else if (auto *arrayTy = dyn_cast<ArrayType>(type)) {
David Neto85082642018-03-24 06:55:20 -07005172 elemTy = arrayTy->getArrayElementType();
alan-bakerb6b09dc2018-11-08 16:59:28 -05005173 } else if (auto *seqTy = dyn_cast<SequentialType>(type)) {
David Neto85082642018-03-24 06:55:20 -07005174 elemTy = seqTy->getSequentialElementType();
5175 } else {
5176 errs() << "Unhandled strided type " << *type << "\n";
5177 llvm_unreachable("Unhandled strided type");
5178 }
David Neto1a1a0582017-07-07 12:01:44 -04005179
5180 // Ops[0] = Target ID
5181 // Ops[1] = Decoration (ArrayStride)
5182 // Ops[2] = Stride number (Literal Number)
5183 SPIRVOperandList Ops;
5184
David Neto85082642018-03-24 06:55:20 -07005185 // Same as DL.getIndexedOffsetInType( elemTy, { 1 } );
Alan Bakerfcda9482018-10-02 17:09:59 -04005186 const uint32_t stride = static_cast<uint32_t>(GetTypeAllocSize(elemTy, DL));
David Neto257c3892018-04-11 13:19:45 -04005187
5188 Ops << MkId(lookupType(type)) << MkNum(spv::DecorationArrayStride)
5189 << MkNum(stride);
David Neto1a1a0582017-07-07 12:01:44 -04005190
David Neto87846742018-04-11 17:36:22 -04005191 auto *DecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
David Neto1a1a0582017-07-07 12:01:44 -04005192 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
5193 }
David Netoc6f3ab22018-04-06 18:02:31 -04005194
5195 // Emit SpecId decorations targeting the array size value.
Alan Baker202c8c72018-08-13 13:47:44 -04005196 for (auto spec_id = clspv::FirstLocalSpecId(); spec_id < max_local_spec_id_;
5197 ++spec_id) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05005198 LocalArgInfo &arg_info = LocalSpecIdInfoMap[spec_id];
David Netoc6f3ab22018-04-06 18:02:31 -04005199 SPIRVOperandList Ops;
5200 Ops << MkId(arg_info.array_size_id) << MkNum(spv::DecorationSpecId)
5201 << MkNum(arg_info.spec_id);
5202 SPIRVInstList.insert(DecoInsertPoint,
David Neto87846742018-04-11 17:36:22 -04005203 new SPIRVInstruction(spv::OpDecorate, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04005204 }
David Neto1a1a0582017-07-07 12:01:44 -04005205}
5206
David Neto22f144c2017-06-12 14:26:21 -04005207glsl::ExtInst SPIRVProducerPass::getExtInstEnum(StringRef Name) {
5208 return StringSwitch<glsl::ExtInst>(Name)
alan-bakerb39c8262019-03-08 14:03:37 -05005209 .Case("_Z3absc", glsl::ExtInst::ExtInstSAbs)
5210 .Case("_Z3absDv2_c", glsl::ExtInst::ExtInstSAbs)
5211 .Case("_Z3absDv3_c", glsl::ExtInst::ExtInstSAbs)
5212 .Case("_Z3absDv4_c", glsl::ExtInst::ExtInstSAbs)
Kévin Petit2444e9b2018-11-09 14:14:37 +00005213 .Case("_Z3abss", glsl::ExtInst::ExtInstSAbs)
5214 .Case("_Z3absDv2_s", glsl::ExtInst::ExtInstSAbs)
5215 .Case("_Z3absDv3_s", glsl::ExtInst::ExtInstSAbs)
5216 .Case("_Z3absDv4_s", glsl::ExtInst::ExtInstSAbs)
David Neto22f144c2017-06-12 14:26:21 -04005217 .Case("_Z3absi", glsl::ExtInst::ExtInstSAbs)
5218 .Case("_Z3absDv2_i", glsl::ExtInst::ExtInstSAbs)
5219 .Case("_Z3absDv3_i", glsl::ExtInst::ExtInstSAbs)
5220 .Case("_Z3absDv4_i", glsl::ExtInst::ExtInstSAbs)
Kévin Petit2444e9b2018-11-09 14:14:37 +00005221 .Case("_Z3absl", glsl::ExtInst::ExtInstSAbs)
5222 .Case("_Z3absDv2_l", glsl::ExtInst::ExtInstSAbs)
5223 .Case("_Z3absDv3_l", glsl::ExtInst::ExtInstSAbs)
5224 .Case("_Z3absDv4_l", glsl::ExtInst::ExtInstSAbs)
alan-bakerb39c8262019-03-08 14:03:37 -05005225 .Case("_Z5clampccc", glsl::ExtInst::ExtInstSClamp)
5226 .Case("_Z5clampDv2_cS_S_", glsl::ExtInst::ExtInstSClamp)
5227 .Case("_Z5clampDv3_cS_S_", glsl::ExtInst::ExtInstSClamp)
5228 .Case("_Z5clampDv4_cS_S_", glsl::ExtInst::ExtInstSClamp)
5229 .Case("_Z5clamphhh", glsl::ExtInst::ExtInstUClamp)
5230 .Case("_Z5clampDv2_hS_S_", glsl::ExtInst::ExtInstUClamp)
5231 .Case("_Z5clampDv3_hS_S_", glsl::ExtInst::ExtInstUClamp)
5232 .Case("_Z5clampDv4_hS_S_", glsl::ExtInst::ExtInstUClamp)
Kévin Petit495255d2019-03-06 13:56:48 +00005233 .Case("_Z5clampsss", glsl::ExtInst::ExtInstSClamp)
5234 .Case("_Z5clampDv2_sS_S_", glsl::ExtInst::ExtInstSClamp)
5235 .Case("_Z5clampDv3_sS_S_", glsl::ExtInst::ExtInstSClamp)
5236 .Case("_Z5clampDv4_sS_S_", glsl::ExtInst::ExtInstSClamp)
5237 .Case("_Z5clampttt", glsl::ExtInst::ExtInstUClamp)
5238 .Case("_Z5clampDv2_tS_S_", glsl::ExtInst::ExtInstUClamp)
5239 .Case("_Z5clampDv3_tS_S_", glsl::ExtInst::ExtInstUClamp)
5240 .Case("_Z5clampDv4_tS_S_", glsl::ExtInst::ExtInstUClamp)
David Neto22f144c2017-06-12 14:26:21 -04005241 .Case("_Z5clampiii", glsl::ExtInst::ExtInstSClamp)
5242 .Case("_Z5clampDv2_iS_S_", glsl::ExtInst::ExtInstSClamp)
5243 .Case("_Z5clampDv3_iS_S_", glsl::ExtInst::ExtInstSClamp)
5244 .Case("_Z5clampDv4_iS_S_", glsl::ExtInst::ExtInstSClamp)
5245 .Case("_Z5clampjjj", glsl::ExtInst::ExtInstUClamp)
5246 .Case("_Z5clampDv2_jS_S_", glsl::ExtInst::ExtInstUClamp)
5247 .Case("_Z5clampDv3_jS_S_", glsl::ExtInst::ExtInstUClamp)
5248 .Case("_Z5clampDv4_jS_S_", glsl::ExtInst::ExtInstUClamp)
Kévin Petit495255d2019-03-06 13:56:48 +00005249 .Case("_Z5clamplll", glsl::ExtInst::ExtInstSClamp)
5250 .Case("_Z5clampDv2_lS_S_", glsl::ExtInst::ExtInstSClamp)
5251 .Case("_Z5clampDv3_lS_S_", glsl::ExtInst::ExtInstSClamp)
5252 .Case("_Z5clampDv4_lS_S_", glsl::ExtInst::ExtInstSClamp)
5253 .Case("_Z5clampmmm", glsl::ExtInst::ExtInstUClamp)
5254 .Case("_Z5clampDv2_mS_S_", glsl::ExtInst::ExtInstUClamp)
5255 .Case("_Z5clampDv3_mS_S_", glsl::ExtInst::ExtInstUClamp)
5256 .Case("_Z5clampDv4_mS_S_", glsl::ExtInst::ExtInstUClamp)
David Neto22f144c2017-06-12 14:26:21 -04005257 .Case("_Z5clampfff", glsl::ExtInst::ExtInstFClamp)
5258 .Case("_Z5clampDv2_fS_S_", glsl::ExtInst::ExtInstFClamp)
5259 .Case("_Z5clampDv3_fS_S_", glsl::ExtInst::ExtInstFClamp)
5260 .Case("_Z5clampDv4_fS_S_", glsl::ExtInst::ExtInstFClamp)
alan-bakerb39c8262019-03-08 14:03:37 -05005261 .Case("_Z3maxcc", glsl::ExtInst::ExtInstSMax)
5262 .Case("_Z3maxDv2_cS_", glsl::ExtInst::ExtInstSMax)
5263 .Case("_Z3maxDv3_cS_", glsl::ExtInst::ExtInstSMax)
5264 .Case("_Z3maxDv4_cS_", glsl::ExtInst::ExtInstSMax)
5265 .Case("_Z3maxhh", glsl::ExtInst::ExtInstUMax)
5266 .Case("_Z3maxDv2_hS_", glsl::ExtInst::ExtInstUMax)
5267 .Case("_Z3maxDv3_hS_", glsl::ExtInst::ExtInstUMax)
5268 .Case("_Z3maxDv4_hS_", glsl::ExtInst::ExtInstUMax)
Kévin Petit9e1971c2018-11-09 14:17:18 +00005269 .Case("_Z3maxss", glsl::ExtInst::ExtInstSMax)
5270 .Case("_Z3maxDv2_sS_", glsl::ExtInst::ExtInstSMax)
5271 .Case("_Z3maxDv3_sS_", glsl::ExtInst::ExtInstSMax)
5272 .Case("_Z3maxDv4_sS_", glsl::ExtInst::ExtInstSMax)
5273 .Case("_Z3maxtt", glsl::ExtInst::ExtInstUMax)
5274 .Case("_Z3maxDv2_tS_", glsl::ExtInst::ExtInstUMax)
5275 .Case("_Z3maxDv3_tS_", glsl::ExtInst::ExtInstUMax)
5276 .Case("_Z3maxDv4_tS_", glsl::ExtInst::ExtInstUMax)
David Neto22f144c2017-06-12 14:26:21 -04005277 .Case("_Z3maxii", glsl::ExtInst::ExtInstSMax)
5278 .Case("_Z3maxDv2_iS_", glsl::ExtInst::ExtInstSMax)
5279 .Case("_Z3maxDv3_iS_", glsl::ExtInst::ExtInstSMax)
5280 .Case("_Z3maxDv4_iS_", glsl::ExtInst::ExtInstSMax)
5281 .Case("_Z3maxjj", glsl::ExtInst::ExtInstUMax)
5282 .Case("_Z3maxDv2_jS_", glsl::ExtInst::ExtInstUMax)
5283 .Case("_Z3maxDv3_jS_", glsl::ExtInst::ExtInstUMax)
5284 .Case("_Z3maxDv4_jS_", glsl::ExtInst::ExtInstUMax)
Kévin Petit9e1971c2018-11-09 14:17:18 +00005285 .Case("_Z3maxll", glsl::ExtInst::ExtInstSMax)
5286 .Case("_Z3maxDv2_lS_", glsl::ExtInst::ExtInstSMax)
5287 .Case("_Z3maxDv3_lS_", glsl::ExtInst::ExtInstSMax)
5288 .Case("_Z3maxDv4_lS_", glsl::ExtInst::ExtInstSMax)
5289 .Case("_Z3maxmm", glsl::ExtInst::ExtInstUMax)
5290 .Case("_Z3maxDv2_mS_", glsl::ExtInst::ExtInstUMax)
5291 .Case("_Z3maxDv3_mS_", glsl::ExtInst::ExtInstUMax)
5292 .Case("_Z3maxDv4_mS_", glsl::ExtInst::ExtInstUMax)
David Neto22f144c2017-06-12 14:26:21 -04005293 .Case("_Z3maxff", glsl::ExtInst::ExtInstFMax)
5294 .Case("_Z3maxDv2_fS_", glsl::ExtInst::ExtInstFMax)
5295 .Case("_Z3maxDv3_fS_", glsl::ExtInst::ExtInstFMax)
5296 .Case("_Z3maxDv4_fS_", glsl::ExtInst::ExtInstFMax)
5297 .StartsWith("_Z4fmax", glsl::ExtInst::ExtInstFMax)
alan-bakerb39c8262019-03-08 14:03:37 -05005298 .Case("_Z3mincc", glsl::ExtInst::ExtInstSMin)
5299 .Case("_Z3minDv2_cS_", glsl::ExtInst::ExtInstSMin)
5300 .Case("_Z3minDv3_cS_", glsl::ExtInst::ExtInstSMin)
5301 .Case("_Z3minDv4_cS_", glsl::ExtInst::ExtInstSMin)
5302 .Case("_Z3minhh", glsl::ExtInst::ExtInstUMin)
5303 .Case("_Z3minDv2_hS_", glsl::ExtInst::ExtInstUMin)
5304 .Case("_Z3minDv3_hS_", glsl::ExtInst::ExtInstUMin)
5305 .Case("_Z3minDv4_hS_", glsl::ExtInst::ExtInstUMin)
Kévin Petit9e1971c2018-11-09 14:17:18 +00005306 .Case("_Z3minss", glsl::ExtInst::ExtInstSMin)
5307 .Case("_Z3minDv2_sS_", glsl::ExtInst::ExtInstSMin)
5308 .Case("_Z3minDv3_sS_", glsl::ExtInst::ExtInstSMin)
5309 .Case("_Z3minDv4_sS_", glsl::ExtInst::ExtInstSMin)
5310 .Case("_Z3mintt", glsl::ExtInst::ExtInstUMin)
5311 .Case("_Z3minDv2_tS_", glsl::ExtInst::ExtInstUMin)
5312 .Case("_Z3minDv3_tS_", glsl::ExtInst::ExtInstUMin)
5313 .Case("_Z3minDv4_tS_", glsl::ExtInst::ExtInstUMin)
David Neto22f144c2017-06-12 14:26:21 -04005314 .Case("_Z3minii", glsl::ExtInst::ExtInstSMin)
5315 .Case("_Z3minDv2_iS_", glsl::ExtInst::ExtInstSMin)
5316 .Case("_Z3minDv3_iS_", glsl::ExtInst::ExtInstSMin)
5317 .Case("_Z3minDv4_iS_", glsl::ExtInst::ExtInstSMin)
5318 .Case("_Z3minjj", glsl::ExtInst::ExtInstUMin)
5319 .Case("_Z3minDv2_jS_", glsl::ExtInst::ExtInstUMin)
5320 .Case("_Z3minDv3_jS_", glsl::ExtInst::ExtInstUMin)
5321 .Case("_Z3minDv4_jS_", glsl::ExtInst::ExtInstUMin)
Kévin Petit9e1971c2018-11-09 14:17:18 +00005322 .Case("_Z3minll", glsl::ExtInst::ExtInstSMin)
5323 .Case("_Z3minDv2_lS_", glsl::ExtInst::ExtInstSMin)
5324 .Case("_Z3minDv3_lS_", glsl::ExtInst::ExtInstSMin)
5325 .Case("_Z3minDv4_lS_", glsl::ExtInst::ExtInstSMin)
5326 .Case("_Z3minmm", glsl::ExtInst::ExtInstUMin)
5327 .Case("_Z3minDv2_mS_", glsl::ExtInst::ExtInstUMin)
5328 .Case("_Z3minDv3_mS_", glsl::ExtInst::ExtInstUMin)
5329 .Case("_Z3minDv4_mS_", glsl::ExtInst::ExtInstUMin)
David Neto22f144c2017-06-12 14:26:21 -04005330 .Case("_Z3minff", glsl::ExtInst::ExtInstFMin)
5331 .Case("_Z3minDv2_fS_", glsl::ExtInst::ExtInstFMin)
5332 .Case("_Z3minDv3_fS_", glsl::ExtInst::ExtInstFMin)
5333 .Case("_Z3minDv4_fS_", glsl::ExtInst::ExtInstFMin)
5334 .StartsWith("_Z4fmin", glsl::ExtInst::ExtInstFMin)
5335 .StartsWith("_Z7degrees", glsl::ExtInst::ExtInstDegrees)
5336 .StartsWith("_Z7radians", glsl::ExtInst::ExtInstRadians)
5337 .StartsWith("_Z3mix", glsl::ExtInst::ExtInstFMix)
5338 .StartsWith("_Z4acos", glsl::ExtInst::ExtInstAcos)
5339 .StartsWith("_Z5acosh", glsl::ExtInst::ExtInstAcosh)
5340 .StartsWith("_Z4asin", glsl::ExtInst::ExtInstAsin)
5341 .StartsWith("_Z5asinh", glsl::ExtInst::ExtInstAsinh)
5342 .StartsWith("_Z4atan", glsl::ExtInst::ExtInstAtan)
5343 .StartsWith("_Z5atan2", glsl::ExtInst::ExtInstAtan2)
5344 .StartsWith("_Z5atanh", glsl::ExtInst::ExtInstAtanh)
5345 .StartsWith("_Z4ceil", glsl::ExtInst::ExtInstCeil)
5346 .StartsWith("_Z3sin", glsl::ExtInst::ExtInstSin)
5347 .StartsWith("_Z4sinh", glsl::ExtInst::ExtInstSinh)
5348 .StartsWith("_Z8half_sin", glsl::ExtInst::ExtInstSin)
5349 .StartsWith("_Z10native_sin", glsl::ExtInst::ExtInstSin)
5350 .StartsWith("_Z3cos", glsl::ExtInst::ExtInstCos)
5351 .StartsWith("_Z4cosh", glsl::ExtInst::ExtInstCosh)
5352 .StartsWith("_Z8half_cos", glsl::ExtInst::ExtInstCos)
5353 .StartsWith("_Z10native_cos", glsl::ExtInst::ExtInstCos)
5354 .StartsWith("_Z3tan", glsl::ExtInst::ExtInstTan)
5355 .StartsWith("_Z4tanh", glsl::ExtInst::ExtInstTanh)
5356 .StartsWith("_Z8half_tan", glsl::ExtInst::ExtInstTan)
5357 .StartsWith("_Z10native_tan", glsl::ExtInst::ExtInstTan)
5358 .StartsWith("_Z3exp", glsl::ExtInst::ExtInstExp)
5359 .StartsWith("_Z8half_exp", glsl::ExtInst::ExtInstExp)
5360 .StartsWith("_Z10native_exp", glsl::ExtInst::ExtInstExp)
5361 .StartsWith("_Z4exp2", glsl::ExtInst::ExtInstExp2)
5362 .StartsWith("_Z9half_exp2", glsl::ExtInst::ExtInstExp2)
5363 .StartsWith("_Z11native_exp2", glsl::ExtInst::ExtInstExp2)
5364 .StartsWith("_Z3log", glsl::ExtInst::ExtInstLog)
5365 .StartsWith("_Z8half_log", glsl::ExtInst::ExtInstLog)
5366 .StartsWith("_Z10native_log", glsl::ExtInst::ExtInstLog)
5367 .StartsWith("_Z4log2", glsl::ExtInst::ExtInstLog2)
5368 .StartsWith("_Z9half_log2", glsl::ExtInst::ExtInstLog2)
5369 .StartsWith("_Z11native_log2", glsl::ExtInst::ExtInstLog2)
5370 .StartsWith("_Z4fabs", glsl::ExtInst::ExtInstFAbs)
kpet3458e942018-10-03 14:35:21 +01005371 .StartsWith("_Z3fma", glsl::ExtInst::ExtInstFma)
David Neto22f144c2017-06-12 14:26:21 -04005372 .StartsWith("_Z5floor", glsl::ExtInst::ExtInstFloor)
5373 .StartsWith("_Z5ldexp", glsl::ExtInst::ExtInstLdexp)
5374 .StartsWith("_Z3pow", glsl::ExtInst::ExtInstPow)
5375 .StartsWith("_Z4powr", glsl::ExtInst::ExtInstPow)
5376 .StartsWith("_Z9half_powr", glsl::ExtInst::ExtInstPow)
5377 .StartsWith("_Z11native_powr", glsl::ExtInst::ExtInstPow)
5378 .StartsWith("_Z5round", glsl::ExtInst::ExtInstRound)
5379 .StartsWith("_Z4sqrt", glsl::ExtInst::ExtInstSqrt)
5380 .StartsWith("_Z9half_sqrt", glsl::ExtInst::ExtInstSqrt)
5381 .StartsWith("_Z11native_sqrt", glsl::ExtInst::ExtInstSqrt)
5382 .StartsWith("_Z5rsqrt", glsl::ExtInst::ExtInstInverseSqrt)
5383 .StartsWith("_Z10half_rsqrt", glsl::ExtInst::ExtInstInverseSqrt)
5384 .StartsWith("_Z12native_rsqrt", glsl::ExtInst::ExtInstInverseSqrt)
5385 .StartsWith("_Z5trunc", glsl::ExtInst::ExtInstTrunc)
5386 .StartsWith("_Z5frexp", glsl::ExtInst::ExtInstFrexp)
5387 .StartsWith("_Z4sign", glsl::ExtInst::ExtInstFSign)
5388 .StartsWith("_Z6length", glsl::ExtInst::ExtInstLength)
Kévin Petit7d09cec2018-09-22 15:43:38 +01005389 .StartsWith("_Z11fast_length", glsl::ExtInst::ExtInstLength)
David Neto22f144c2017-06-12 14:26:21 -04005390 .StartsWith("_Z8distance", glsl::ExtInst::ExtInstDistance)
Kévin Petit7d09cec2018-09-22 15:43:38 +01005391 .StartsWith("_Z13fast_distance", glsl::ExtInst::ExtInstDistance)
David Netoe9a03512017-10-16 10:08:27 -04005392 .StartsWith("_Z4step", glsl::ExtInst::ExtInstStep)
kpet6fd2a262018-10-03 14:48:01 +01005393 .StartsWith("_Z10smoothstep", glsl::ExtInst::ExtInstSmoothStep)
David Neto22f144c2017-06-12 14:26:21 -04005394 .Case("_Z5crossDv3_fS_", glsl::ExtInst::ExtInstCross)
5395 .StartsWith("_Z9normalize", glsl::ExtInst::ExtInstNormalize)
Kévin Petit7d09cec2018-09-22 15:43:38 +01005396 .StartsWith("_Z14fast_normalize", glsl::ExtInst::ExtInstNormalize)
David Neto22f144c2017-06-12 14:26:21 -04005397 .StartsWith("llvm.fmuladd.", glsl::ExtInst::ExtInstFma)
5398 .Case("spirv.unpack.v2f16", glsl::ExtInst::ExtInstUnpackHalf2x16)
5399 .Case("spirv.pack.v2f16", glsl::ExtInst::ExtInstPackHalf2x16)
David Neto62653202017-10-16 19:05:18 -04005400 .Case("clspv.fract.f", glsl::ExtInst::ExtInstFract)
5401 .Case("clspv.fract.v2f", glsl::ExtInst::ExtInstFract)
5402 .Case("clspv.fract.v3f", glsl::ExtInst::ExtInstFract)
5403 .Case("clspv.fract.v4f", glsl::ExtInst::ExtInstFract)
David Neto3fbb4072017-10-16 11:28:14 -04005404 .Default(kGlslExtInstBad);
5405}
5406
5407glsl::ExtInst SPIRVProducerPass::getIndirectExtInstEnum(StringRef Name) {
5408 // Check indirect cases.
5409 return StringSwitch<glsl::ExtInst>(Name)
5410 .StartsWith("_Z3clz", glsl::ExtInst::ExtInstFindUMsb)
5411 // Use exact match on float arg because these need a multiply
5412 // of a constant of the right floating point type.
5413 .Case("_Z6acospif", glsl::ExtInst::ExtInstAcos)
5414 .Case("_Z6acospiDv2_f", glsl::ExtInst::ExtInstAcos)
5415 .Case("_Z6acospiDv3_f", glsl::ExtInst::ExtInstAcos)
5416 .Case("_Z6acospiDv4_f", glsl::ExtInst::ExtInstAcos)
5417 .Case("_Z6asinpif", glsl::ExtInst::ExtInstAsin)
5418 .Case("_Z6asinpiDv2_f", glsl::ExtInst::ExtInstAsin)
5419 .Case("_Z6asinpiDv3_f", glsl::ExtInst::ExtInstAsin)
5420 .Case("_Z6asinpiDv4_f", glsl::ExtInst::ExtInstAsin)
Kévin Petiteb9f90a2018-09-29 12:29:34 +01005421 .Case("_Z6atanpif", glsl::ExtInst::ExtInstAtan)
5422 .Case("_Z6atanpiDv2_f", glsl::ExtInst::ExtInstAtan)
5423 .Case("_Z6atanpiDv3_f", glsl::ExtInst::ExtInstAtan)
5424 .Case("_Z6atanpiDv4_f", glsl::ExtInst::ExtInstAtan)
David Neto3fbb4072017-10-16 11:28:14 -04005425 .Case("_Z7atan2piff", glsl::ExtInst::ExtInstAtan2)
5426 .Case("_Z7atan2piDv2_fS_", glsl::ExtInst::ExtInstAtan2)
5427 .Case("_Z7atan2piDv3_fS_", glsl::ExtInst::ExtInstAtan2)
5428 .Case("_Z7atan2piDv4_fS_", glsl::ExtInst::ExtInstAtan2)
5429 .Default(kGlslExtInstBad);
5430}
5431
alan-bakerb6b09dc2018-11-08 16:59:28 -05005432glsl::ExtInst
5433SPIRVProducerPass::getDirectOrIndirectExtInstEnum(StringRef Name) {
David Neto3fbb4072017-10-16 11:28:14 -04005434 auto direct = getExtInstEnum(Name);
5435 if (direct != kGlslExtInstBad)
5436 return direct;
5437 return getIndirectExtInstEnum(Name);
David Neto22f144c2017-06-12 14:26:21 -04005438}
5439
David Neto22f144c2017-06-12 14:26:21 -04005440void SPIRVProducerPass::WriteOneWord(uint32_t Word) {
David Neto0676e6f2017-07-11 18:47:44 -04005441 binaryOut->write(reinterpret_cast<const char *>(&Word), sizeof(uint32_t));
David Neto22f144c2017-06-12 14:26:21 -04005442}
5443
5444void SPIRVProducerPass::WriteResultID(SPIRVInstruction *Inst) {
5445 WriteOneWord(Inst->getResultID());
5446}
5447
5448void SPIRVProducerPass::WriteWordCountAndOpcode(SPIRVInstruction *Inst) {
5449 // High 16 bit : Word Count
5450 // Low 16 bit : Opcode
5451 uint32_t Word = Inst->getOpcode();
David Netoee2660d2018-06-28 16:31:29 -04005452 const uint32_t count = Inst->getWordCount();
5453 if (count > 65535) {
5454 errs() << "Word count limit of 65535 exceeded: " << count << "\n";
5455 llvm_unreachable("Word count too high");
5456 }
David Neto22f144c2017-06-12 14:26:21 -04005457 Word |= Inst->getWordCount() << 16;
5458 WriteOneWord(Word);
5459}
5460
5461void SPIRVProducerPass::WriteOperand(SPIRVOperand *Op) {
5462 SPIRVOperandType OpTy = Op->getType();
5463 switch (OpTy) {
5464 default: {
5465 llvm_unreachable("Unsupported SPIRV Operand Type???");
5466 break;
5467 }
5468 case SPIRVOperandType::NUMBERID: {
5469 WriteOneWord(Op->getNumID());
5470 break;
5471 }
5472 case SPIRVOperandType::LITERAL_STRING: {
5473 std::string Str = Op->getLiteralStr();
5474 const char *Data = Str.c_str();
5475 size_t WordSize = Str.size() / 4;
5476 for (unsigned Idx = 0; Idx < WordSize; Idx++) {
5477 WriteOneWord(*reinterpret_cast<const uint32_t *>(&Data[4 * Idx]));
5478 }
5479
5480 uint32_t Remainder = Str.size() % 4;
5481 uint32_t LastWord = 0;
5482 if (Remainder) {
5483 for (unsigned Idx = 0; Idx < Remainder; Idx++) {
5484 LastWord |= Data[4 * WordSize + Idx] << 8 * Idx;
5485 }
5486 }
5487
5488 WriteOneWord(LastWord);
5489 break;
5490 }
5491 case SPIRVOperandType::LITERAL_INTEGER:
5492 case SPIRVOperandType::LITERAL_FLOAT: {
5493 auto LiteralNum = Op->getLiteralNum();
5494 // TODO: Handle LiteranNum carefully.
5495 for (auto Word : LiteralNum) {
5496 WriteOneWord(Word);
5497 }
5498 break;
5499 }
5500 }
5501}
5502
5503void SPIRVProducerPass::WriteSPIRVBinary() {
5504 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
5505
5506 for (auto Inst : SPIRVInstList) {
David Netoc6f3ab22018-04-06 18:02:31 -04005507 SPIRVOperandList Ops{Inst->getOperands()};
David Neto22f144c2017-06-12 14:26:21 -04005508 spv::Op Opcode = static_cast<spv::Op>(Inst->getOpcode());
5509
5510 switch (Opcode) {
5511 default: {
David Neto5c22a252018-03-15 16:07:41 -04005512 errs() << "Unsupported SPIR-V instruction opcode " << int(Opcode) << "\n";
David Neto22f144c2017-06-12 14:26:21 -04005513 llvm_unreachable("Unsupported SPIRV instruction");
5514 break;
5515 }
5516 case spv::OpCapability:
5517 case spv::OpExtension:
5518 case spv::OpMemoryModel:
5519 case spv::OpEntryPoint:
5520 case spv::OpExecutionMode:
5521 case spv::OpSource:
5522 case spv::OpDecorate:
5523 case spv::OpMemberDecorate:
5524 case spv::OpBranch:
5525 case spv::OpBranchConditional:
5526 case spv::OpSelectionMerge:
5527 case spv::OpLoopMerge:
5528 case spv::OpStore:
5529 case spv::OpImageWrite:
5530 case spv::OpReturnValue:
5531 case spv::OpControlBarrier:
5532 case spv::OpMemoryBarrier:
5533 case spv::OpReturn:
5534 case spv::OpFunctionEnd:
5535 case spv::OpCopyMemory: {
5536 WriteWordCountAndOpcode(Inst);
5537 for (uint32_t i = 0; i < Ops.size(); i++) {
5538 WriteOperand(Ops[i]);
5539 }
5540 break;
5541 }
5542 case spv::OpTypeBool:
5543 case spv::OpTypeVoid:
5544 case spv::OpTypeSampler:
5545 case spv::OpLabel:
5546 case spv::OpExtInstImport:
5547 case spv::OpTypePointer:
5548 case spv::OpTypeRuntimeArray:
5549 case spv::OpTypeStruct:
5550 case spv::OpTypeImage:
5551 case spv::OpTypeSampledImage:
5552 case spv::OpTypeInt:
5553 case spv::OpTypeFloat:
5554 case spv::OpTypeArray:
5555 case spv::OpTypeVector:
5556 case spv::OpTypeFunction: {
5557 WriteWordCountAndOpcode(Inst);
5558 WriteResultID(Inst);
5559 for (uint32_t i = 0; i < Ops.size(); i++) {
5560 WriteOperand(Ops[i]);
5561 }
5562 break;
5563 }
5564 case spv::OpFunction:
5565 case spv::OpFunctionParameter:
5566 case spv::OpAccessChain:
5567 case spv::OpPtrAccessChain:
5568 case spv::OpInBoundsAccessChain:
5569 case spv::OpUConvert:
5570 case spv::OpSConvert:
5571 case spv::OpConvertFToU:
5572 case spv::OpConvertFToS:
5573 case spv::OpConvertUToF:
5574 case spv::OpConvertSToF:
5575 case spv::OpFConvert:
5576 case spv::OpConvertPtrToU:
5577 case spv::OpConvertUToPtr:
5578 case spv::OpBitcast:
alan-bakerc9c55ae2019-12-02 16:01:27 -05005579 case spv::OpFNegate:
David Neto22f144c2017-06-12 14:26:21 -04005580 case spv::OpIAdd:
5581 case spv::OpFAdd:
5582 case spv::OpISub:
5583 case spv::OpFSub:
5584 case spv::OpIMul:
5585 case spv::OpFMul:
5586 case spv::OpUDiv:
5587 case spv::OpSDiv:
5588 case spv::OpFDiv:
5589 case spv::OpUMod:
5590 case spv::OpSRem:
5591 case spv::OpFRem:
Kévin Petit8a560882019-03-21 15:24:34 +00005592 case spv::OpUMulExtended:
5593 case spv::OpSMulExtended:
David Neto22f144c2017-06-12 14:26:21 -04005594 case spv::OpBitwiseOr:
5595 case spv::OpBitwiseXor:
5596 case spv::OpBitwiseAnd:
David Netoa394f392017-08-26 20:45:29 -04005597 case spv::OpNot:
David Neto22f144c2017-06-12 14:26:21 -04005598 case spv::OpShiftLeftLogical:
5599 case spv::OpShiftRightLogical:
5600 case spv::OpShiftRightArithmetic:
5601 case spv::OpBitCount:
David Netoab03f432017-11-03 17:00:44 -04005602 case spv::OpCompositeConstruct:
David Neto22f144c2017-06-12 14:26:21 -04005603 case spv::OpCompositeExtract:
5604 case spv::OpVectorExtractDynamic:
5605 case spv::OpCompositeInsert:
David Neto0a2f98d2017-09-15 19:38:40 -04005606 case spv::OpCopyObject:
David Neto22f144c2017-06-12 14:26:21 -04005607 case spv::OpVectorInsertDynamic:
5608 case spv::OpVectorShuffle:
5609 case spv::OpIEqual:
5610 case spv::OpINotEqual:
5611 case spv::OpUGreaterThan:
5612 case spv::OpUGreaterThanEqual:
5613 case spv::OpULessThan:
5614 case spv::OpULessThanEqual:
5615 case spv::OpSGreaterThan:
5616 case spv::OpSGreaterThanEqual:
5617 case spv::OpSLessThan:
5618 case spv::OpSLessThanEqual:
5619 case spv::OpFOrdEqual:
5620 case spv::OpFOrdGreaterThan:
5621 case spv::OpFOrdGreaterThanEqual:
5622 case spv::OpFOrdLessThan:
5623 case spv::OpFOrdLessThanEqual:
5624 case spv::OpFOrdNotEqual:
5625 case spv::OpFUnordEqual:
5626 case spv::OpFUnordGreaterThan:
5627 case spv::OpFUnordGreaterThanEqual:
5628 case spv::OpFUnordLessThan:
5629 case spv::OpFUnordLessThanEqual:
5630 case spv::OpFUnordNotEqual:
5631 case spv::OpExtInst:
5632 case spv::OpIsInf:
5633 case spv::OpIsNan:
5634 case spv::OpAny:
5635 case spv::OpAll:
5636 case spv::OpUndef:
5637 case spv::OpConstantNull:
5638 case spv::OpLogicalOr:
5639 case spv::OpLogicalAnd:
5640 case spv::OpLogicalNot:
5641 case spv::OpLogicalNotEqual:
5642 case spv::OpConstantComposite:
5643 case spv::OpSpecConstantComposite:
5644 case spv::OpConstantTrue:
5645 case spv::OpConstantFalse:
5646 case spv::OpConstant:
5647 case spv::OpSpecConstant:
5648 case spv::OpVariable:
5649 case spv::OpFunctionCall:
5650 case spv::OpSampledImage:
5651 case spv::OpImageSampleExplicitLod:
David Neto5c22a252018-03-15 16:07:41 -04005652 case spv::OpImageQuerySize:
alan-bakerce179f12019-12-06 19:02:22 -05005653 case spv::OpImageQuerySizeLod:
David Neto22f144c2017-06-12 14:26:21 -04005654 case spv::OpSelect:
5655 case spv::OpPhi:
5656 case spv::OpLoad:
5657 case spv::OpAtomicIAdd:
5658 case spv::OpAtomicISub:
5659 case spv::OpAtomicExchange:
5660 case spv::OpAtomicIIncrement:
5661 case spv::OpAtomicIDecrement:
5662 case spv::OpAtomicCompareExchange:
5663 case spv::OpAtomicUMin:
5664 case spv::OpAtomicSMin:
5665 case spv::OpAtomicUMax:
5666 case spv::OpAtomicSMax:
5667 case spv::OpAtomicAnd:
5668 case spv::OpAtomicOr:
5669 case spv::OpAtomicXor:
5670 case spv::OpDot: {
5671 WriteWordCountAndOpcode(Inst);
5672 WriteOperand(Ops[0]);
5673 WriteResultID(Inst);
5674 for (uint32_t i = 1; i < Ops.size(); i++) {
5675 WriteOperand(Ops[i]);
5676 }
5677 break;
5678 }
5679 }
5680 }
5681}
Alan Baker9bf93fb2018-08-28 16:59:26 -04005682
alan-bakerb6b09dc2018-11-08 16:59:28 -05005683bool SPIRVProducerPass::IsTypeNullable(const Type *type) const {
Alan Baker9bf93fb2018-08-28 16:59:26 -04005684 switch (type->getTypeID()) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05005685 case Type::HalfTyID:
5686 case Type::FloatTyID:
5687 case Type::DoubleTyID:
5688 case Type::IntegerTyID:
5689 case Type::VectorTyID:
5690 return true;
5691 case Type::PointerTyID: {
5692 const PointerType *pointer_type = cast<PointerType>(type);
5693 if (pointer_type->getPointerAddressSpace() !=
5694 AddressSpace::UniformConstant) {
5695 auto pointee_type = pointer_type->getPointerElementType();
5696 if (pointee_type->isStructTy() &&
5697 cast<StructType>(pointee_type)->isOpaque()) {
5698 // Images and samplers are not nullable.
5699 return false;
Alan Baker9bf93fb2018-08-28 16:59:26 -04005700 }
Alan Baker9bf93fb2018-08-28 16:59:26 -04005701 }
alan-bakerb6b09dc2018-11-08 16:59:28 -05005702 return true;
5703 }
5704 case Type::ArrayTyID:
5705 return IsTypeNullable(cast<CompositeType>(type)->getTypeAtIndex(0u));
5706 case Type::StructTyID: {
5707 const StructType *struct_type = cast<StructType>(type);
5708 // Images and samplers are not nullable.
5709 if (struct_type->isOpaque())
Alan Baker9bf93fb2018-08-28 16:59:26 -04005710 return false;
alan-bakerb6b09dc2018-11-08 16:59:28 -05005711 for (const auto element : struct_type->elements()) {
5712 if (!IsTypeNullable(element))
5713 return false;
5714 }
5715 return true;
5716 }
5717 default:
5718 return false;
Alan Baker9bf93fb2018-08-28 16:59:26 -04005719 }
5720}
Alan Bakerfcda9482018-10-02 17:09:59 -04005721
5722void SPIRVProducerPass::PopulateUBOTypeMaps(Module &module) {
5723 if (auto *offsets_md =
5724 module.getNamedMetadata(clspv::RemappedTypeOffsetMetadataName())) {
5725 // Metdata is stored as key-value pair operands. The first element of each
5726 // operand is the type and the second is a vector of offsets.
5727 for (const auto *operand : offsets_md->operands()) {
5728 const auto *pair = cast<MDTuple>(operand);
5729 auto *type =
5730 cast<ConstantAsMetadata>(pair->getOperand(0))->getValue()->getType();
5731 const auto *offset_vector = cast<MDTuple>(pair->getOperand(1));
5732 std::vector<uint32_t> offsets;
5733 for (const Metadata *offset_md : offset_vector->operands()) {
5734 const auto *constant_md = cast<ConstantAsMetadata>(offset_md);
alan-bakerb6b09dc2018-11-08 16:59:28 -05005735 offsets.push_back(static_cast<uint32_t>(
5736 cast<ConstantInt>(constant_md->getValue())->getZExtValue()));
Alan Bakerfcda9482018-10-02 17:09:59 -04005737 }
5738 RemappedUBOTypeOffsets.insert(std::make_pair(type, offsets));
5739 }
5740 }
5741
5742 if (auto *sizes_md =
5743 module.getNamedMetadata(clspv::RemappedTypeSizesMetadataName())) {
5744 // Metadata is stored as key-value pair operands. The first element of each
5745 // operand is the type and the second is a triple of sizes: type size in
5746 // bits, store size and alloc size.
5747 for (const auto *operand : sizes_md->operands()) {
5748 const auto *pair = cast<MDTuple>(operand);
5749 auto *type =
5750 cast<ConstantAsMetadata>(pair->getOperand(0))->getValue()->getType();
5751 const auto *size_triple = cast<MDTuple>(pair->getOperand(1));
5752 uint64_t type_size_in_bits =
5753 cast<ConstantInt>(
5754 cast<ConstantAsMetadata>(size_triple->getOperand(0))->getValue())
5755 ->getZExtValue();
5756 uint64_t type_store_size =
5757 cast<ConstantInt>(
5758 cast<ConstantAsMetadata>(size_triple->getOperand(1))->getValue())
5759 ->getZExtValue();
5760 uint64_t type_alloc_size =
5761 cast<ConstantInt>(
5762 cast<ConstantAsMetadata>(size_triple->getOperand(2))->getValue())
5763 ->getZExtValue();
5764 RemappedUBOTypeSizes.insert(std::make_pair(
5765 type, std::make_tuple(type_size_in_bits, type_store_size,
5766 type_alloc_size)));
5767 }
5768 }
5769}
5770
5771uint64_t SPIRVProducerPass::GetTypeSizeInBits(Type *type,
5772 const DataLayout &DL) {
5773 auto iter = RemappedUBOTypeSizes.find(type);
5774 if (iter != RemappedUBOTypeSizes.end()) {
5775 return std::get<0>(iter->second);
5776 }
5777
5778 return DL.getTypeSizeInBits(type);
5779}
5780
5781uint64_t SPIRVProducerPass::GetTypeStoreSize(Type *type, const DataLayout &DL) {
5782 auto iter = RemappedUBOTypeSizes.find(type);
5783 if (iter != RemappedUBOTypeSizes.end()) {
5784 return std::get<1>(iter->second);
5785 }
5786
5787 return DL.getTypeStoreSize(type);
5788}
5789
5790uint64_t SPIRVProducerPass::GetTypeAllocSize(Type *type, const DataLayout &DL) {
5791 auto iter = RemappedUBOTypeSizes.find(type);
5792 if (iter != RemappedUBOTypeSizes.end()) {
5793 return std::get<2>(iter->second);
5794 }
5795
5796 return DL.getTypeAllocSize(type);
5797}
alan-baker5b86ed72019-02-15 08:26:50 -05005798
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04005799void SPIRVProducerPass::setVariablePointersCapabilities(
5800 unsigned address_space) {
alan-baker5b86ed72019-02-15 08:26:50 -05005801 if (GetStorageClass(address_space) == spv::StorageClassStorageBuffer) {
5802 setVariablePointersStorageBuffer(true);
5803 } else {
5804 setVariablePointers(true);
5805 }
5806}
5807
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04005808Value *SPIRVProducerPass::GetBasePointer(Value *v) {
alan-baker5b86ed72019-02-15 08:26:50 -05005809 if (auto *gep = dyn_cast<GetElementPtrInst>(v)) {
5810 return GetBasePointer(gep->getPointerOperand());
5811 }
5812
5813 // Conservatively return |v|.
5814 return v;
5815}
5816
5817bool SPIRVProducerPass::sameResource(Value *lhs, Value *rhs) const {
5818 if (auto *lhs_call = dyn_cast<CallInst>(lhs)) {
5819 if (auto *rhs_call = dyn_cast<CallInst>(rhs)) {
5820 if (lhs_call->getCalledFunction()->getName().startswith(
5821 clspv::ResourceAccessorFunction()) &&
5822 rhs_call->getCalledFunction()->getName().startswith(
5823 clspv::ResourceAccessorFunction())) {
5824 // For resource accessors, match descriptor set and binding.
5825 if (lhs_call->getOperand(0) == rhs_call->getOperand(0) &&
5826 lhs_call->getOperand(1) == rhs_call->getOperand(1))
5827 return true;
5828 } else if (lhs_call->getCalledFunction()->getName().startswith(
5829 clspv::WorkgroupAccessorFunction()) &&
5830 rhs_call->getCalledFunction()->getName().startswith(
5831 clspv::WorkgroupAccessorFunction())) {
5832 // For workgroup resources, match spec id.
5833 if (lhs_call->getOperand(0) == rhs_call->getOperand(0))
5834 return true;
5835 }
5836 }
5837 }
5838
5839 return false;
5840}
5841
5842bool SPIRVProducerPass::selectFromSameObject(Instruction *inst) {
5843 assert(inst->getType()->isPointerTy());
5844 assert(GetStorageClass(inst->getType()->getPointerAddressSpace()) ==
5845 spv::StorageClassStorageBuffer);
5846 const bool hack_undef = clspv::Option::HackUndef();
5847 if (auto *select = dyn_cast<SelectInst>(inst)) {
5848 auto *true_base = GetBasePointer(select->getTrueValue());
5849 auto *false_base = GetBasePointer(select->getFalseValue());
5850
5851 if (true_base == false_base)
5852 return true;
5853
5854 // If either the true or false operand is a null, then we satisfy the same
5855 // object constraint.
5856 if (auto *true_cst = dyn_cast<Constant>(true_base)) {
5857 if (true_cst->isNullValue() || (hack_undef && isa<UndefValue>(true_base)))
5858 return true;
5859 }
5860
5861 if (auto *false_cst = dyn_cast<Constant>(false_base)) {
5862 if (false_cst->isNullValue() ||
5863 (hack_undef && isa<UndefValue>(false_base)))
5864 return true;
5865 }
5866
5867 if (sameResource(true_base, false_base))
5868 return true;
5869 } else if (auto *phi = dyn_cast<PHINode>(inst)) {
5870 Value *value = nullptr;
5871 bool ok = true;
5872 for (unsigned i = 0; ok && i != phi->getNumIncomingValues(); ++i) {
5873 auto *base = GetBasePointer(phi->getIncomingValue(i));
5874 // Null values satisfy the constraint of selecting of selecting from the
5875 // same object.
5876 if (!value) {
5877 if (auto *cst = dyn_cast<Constant>(base)) {
5878 if (!cst->isNullValue() && !(hack_undef && isa<UndefValue>(base)))
5879 value = base;
5880 } else {
5881 value = base;
5882 }
5883 } else if (base != value) {
5884 if (auto *base_cst = dyn_cast<Constant>(base)) {
5885 if (base_cst->isNullValue() || (hack_undef && isa<UndefValue>(base)))
5886 continue;
5887 }
5888
5889 if (sameResource(value, base))
5890 continue;
5891
5892 // Values don't represent the same base.
5893 ok = false;
5894 }
5895 }
5896
5897 return ok;
5898 }
5899
5900 // Conservatively return false.
5901 return false;
5902}
alan-bakere9308012019-03-15 10:25:13 -04005903
5904bool SPIRVProducerPass::CalledWithCoherentResource(Argument &Arg) {
5905 if (!Arg.getType()->isPointerTy() ||
5906 Arg.getType()->getPointerAddressSpace() != clspv::AddressSpace::Global) {
5907 // Only SSBOs need to be annotated as coherent.
5908 return false;
5909 }
5910
5911 DenseSet<Value *> visited;
5912 std::vector<Value *> stack;
5913 for (auto *U : Arg.getParent()->users()) {
5914 if (auto *call = dyn_cast<CallInst>(U)) {
5915 stack.push_back(call->getOperand(Arg.getArgNo()));
5916 }
5917 }
5918
5919 while (!stack.empty()) {
5920 Value *v = stack.back();
5921 stack.pop_back();
5922
5923 if (!visited.insert(v).second)
5924 continue;
5925
5926 auto *resource_call = dyn_cast<CallInst>(v);
5927 if (resource_call &&
5928 resource_call->getCalledFunction()->getName().startswith(
5929 clspv::ResourceAccessorFunction())) {
5930 // If this is a resource accessor function, check if the coherent operand
5931 // is set.
5932 const auto coherent =
5933 unsigned(dyn_cast<ConstantInt>(resource_call->getArgOperand(5))
5934 ->getZExtValue());
5935 if (coherent == 1)
5936 return true;
5937 } else if (auto *arg = dyn_cast<Argument>(v)) {
5938 // If this is a function argument, trace through its callers.
alan-bakere98f3f92019-04-08 15:06:36 -04005939 for (auto U : arg->getParent()->users()) {
alan-bakere9308012019-03-15 10:25:13 -04005940 if (auto *call = dyn_cast<CallInst>(U)) {
5941 stack.push_back(call->getOperand(arg->getArgNo()));
5942 }
5943 }
5944 } else if (auto *user = dyn_cast<User>(v)) {
5945 // If this is a user, traverse all operands that could lead to resource
5946 // variables.
5947 for (unsigned i = 0; i != user->getNumOperands(); ++i) {
5948 Value *operand = user->getOperand(i);
5949 if (operand->getType()->isPointerTy() &&
5950 operand->getType()->getPointerAddressSpace() ==
5951 clspv::AddressSpace::Global) {
5952 stack.push_back(operand);
5953 }
5954 }
5955 }
5956 }
5957
5958 // No coherent resource variables encountered.
5959 return false;
5960}
alan-baker06cad652019-12-03 17:56:47 -05005961
5962void SPIRVProducerPass::PopulateStructuredCFGMaps(Module &module) {
5963 // First, track loop merges and continues.
5964 DenseSet<BasicBlock *> LoopMergesAndContinues;
5965 for (auto &F : module) {
5966 if (F.isDeclaration())
5967 continue;
5968
5969 DominatorTree &DT = getAnalysis<DominatorTreeWrapperPass>(F).getDomTree();
5970 const LoopInfo &LI = getAnalysis<LoopInfoWrapperPass>(F).getLoopInfo();
5971 std::deque<BasicBlock *> order;
5972 DenseSet<BasicBlock *> visited;
5973 clspv::ComputeStructuredOrder(&*F.begin(), &DT, LI, &order, &visited);
5974
5975 for (auto BB : order) {
5976 auto terminator = BB->getTerminator();
5977 auto branch = dyn_cast<BranchInst>(terminator);
5978 if (LI.isLoopHeader(BB)) {
5979 auto L = LI.getLoopFor(BB);
5980 BasicBlock *ContinueBB = nullptr;
5981 BasicBlock *MergeBB = nullptr;
5982
5983 MergeBB = L->getExitBlock();
5984 if (!MergeBB) {
5985 // StructurizeCFG pass converts CFG into triangle shape and the cfg
5986 // has regions with single entry/exit. As a result, loop should not
5987 // have multiple exits.
5988 llvm_unreachable("Loop has multiple exits???");
5989 }
5990
5991 if (L->isLoopLatch(BB)) {
5992 ContinueBB = BB;
5993 } else {
5994 // From SPIR-V spec 2.11, Continue Target must dominate that back-edge
5995 // block.
5996 BasicBlock *Header = L->getHeader();
5997 BasicBlock *Latch = L->getLoopLatch();
5998 for (auto *loop_block : L->blocks()) {
5999 if (loop_block == Header) {
6000 continue;
6001 }
6002
6003 // Check whether block dominates block with back-edge.
6004 // The loop latch is the single block with a back-edge. If it was
6005 // possible, StructurizeCFG made the loop conform to this
6006 // requirement, otherwise |Latch| is a nullptr.
6007 if (DT.dominates(loop_block, Latch)) {
6008 ContinueBB = loop_block;
6009 }
6010 }
6011
6012 if (!ContinueBB) {
6013 llvm_unreachable("Wrong continue block from loop");
6014 }
6015 }
6016
6017 // Record the continue and merge blocks.
6018 MergeBlocks[BB] = MergeBB;
6019 ContinueBlocks[BB] = ContinueBB;
6020 LoopMergesAndContinues.insert(MergeBB);
6021 LoopMergesAndContinues.insert(ContinueBB);
6022 } else if (branch && branch->isConditional()) {
6023 auto L = LI.getLoopFor(BB);
6024 bool HasBackedge = false;
6025 while (L && !HasBackedge) {
6026 if (L->isLoopLatch(BB)) {
6027 HasBackedge = true;
6028 }
6029 L = L->getParentLoop();
6030 }
6031
6032 if (!HasBackedge) {
6033 // Only need a merge if the branch doesn't include a loop break or
6034 // continue.
6035 auto true_bb = branch->getSuccessor(0);
6036 auto false_bb = branch->getSuccessor(1);
6037 if (!LoopMergesAndContinues.count(true_bb) &&
6038 !LoopMergesAndContinues.count(false_bb)) {
6039 // StructurizeCFG pass already manipulated CFG. Just use false block
6040 // of branch instruction as merge block.
6041 MergeBlocks[BB] = false_bb;
6042 }
6043 }
6044 }
6045 }
6046 }
6047}