blob: ff8f76c7bffe79efe2bf8f550a988de66a095330 [file] [log] [blame]
David Neto22f144c2017-06-12 14:26:21 -04001// Copyright 2017 The Clspv Authors. All rights reserved.
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7// http://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
15#ifdef _MSC_VER
16#pragma warning(push, 0)
17#endif
18
David Neto156783e2017-07-05 15:39:41 -040019#include <cassert>
David Neto257c3892018-04-11 13:19:45 -040020#include <cstring>
David Neto118188e2018-08-24 11:27:54 -040021#include <iomanip>
22#include <list>
David Neto862b7d82018-06-14 18:48:37 -040023#include <memory>
David Neto118188e2018-08-24 11:27:54 -040024#include <set>
25#include <sstream>
26#include <string>
27#include <tuple>
28#include <unordered_set>
29#include <utility>
David Neto862b7d82018-06-14 18:48:37 -040030
David Neto118188e2018-08-24 11:27:54 -040031#include "llvm/ADT/StringSwitch.h"
32#include "llvm/ADT/UniqueVector.h"
33#include "llvm/Analysis/LoopInfo.h"
34#include "llvm/IR/Constants.h"
35#include "llvm/IR/Dominators.h"
36#include "llvm/IR/Instructions.h"
37#include "llvm/IR/Metadata.h"
38#include "llvm/IR/Module.h"
39#include "llvm/Pass.h"
40#include "llvm/Support/CommandLine.h"
41#include "llvm/Support/raw_ostream.h"
42#include "llvm/Transforms/Utils/Cloning.h"
David Neto22f144c2017-06-12 14:26:21 -040043
David Neto85082642018-03-24 06:55:20 -070044#include "spirv/1.0/spirv.hpp"
David Neto118188e2018-08-24 11:27:54 -040045
David Neto85082642018-03-24 06:55:20 -070046#include "clspv/AddressSpace.h"
alan-bakerf5e5f692018-11-27 08:33:24 -050047#include "clspv/DescriptorMap.h"
David Neto118188e2018-08-24 11:27:54 -040048#include "clspv/Option.h"
David Neto85082642018-03-24 06:55:20 -070049#include "clspv/spirv_c_strings.hpp"
50#include "clspv/spirv_glsl.hpp"
David Neto22f144c2017-06-12 14:26:21 -040051
David Neto4feb7a42017-10-06 17:29:42 -040052#include "ArgKind.h"
David Neto85082642018-03-24 06:55:20 -070053#include "ConstantEmitter.h"
Alan Baker202c8c72018-08-13 13:47:44 -040054#include "Constants.h"
David Neto78383442018-06-15 20:31:56 -040055#include "DescriptorCounter.h"
alan-baker56f7aff2019-05-22 08:06:42 -040056#include "NormalizeGlobalVariable.h"
Diego Novilloa4c44fa2019-04-11 10:56:15 -040057#include "Passes.h"
David Neto48f56a42017-10-06 16:44:25 -040058
David Neto22f144c2017-06-12 14:26:21 -040059#if defined(_MSC_VER)
60#pragma warning(pop)
61#endif
62
63using namespace llvm;
64using namespace clspv;
David Neto156783e2017-07-05 15:39:41 -040065using namespace mdconst;
David Neto22f144c2017-06-12 14:26:21 -040066
67namespace {
David Netocd8ca5f2017-10-02 23:34:11 -040068
David Neto862b7d82018-06-14 18:48:37 -040069cl::opt<bool> ShowResourceVars("show-rv", cl::init(false), cl::Hidden,
70 cl::desc("Show resource variable creation"));
71
72// These hacks exist to help transition code generation algorithms
73// without making huge noise in detailed test output.
74const bool Hack_generate_runtime_array_stride_early = true;
75
David Neto3fbb4072017-10-16 11:28:14 -040076// The value of 1/pi. This value is from MSDN
77// https://msdn.microsoft.com/en-us/library/4hwaceh6.aspx
78const double kOneOverPi = 0.318309886183790671538;
79const glsl::ExtInst kGlslExtInstBad = static_cast<glsl::ExtInst>(0);
80
alan-bakerb6b09dc2018-11-08 16:59:28 -050081const char *kCompositeConstructFunctionPrefix = "clspv.composite_construct.";
David Netoab03f432017-11-03 17:00:44 -040082
David Neto22f144c2017-06-12 14:26:21 -040083enum SPIRVOperandType {
84 NUMBERID,
85 LITERAL_INTEGER,
86 LITERAL_STRING,
87 LITERAL_FLOAT
88};
89
90struct SPIRVOperand {
91 explicit SPIRVOperand(SPIRVOperandType Ty, uint32_t Num)
92 : Type(Ty), LiteralNum(1, Num) {}
93 explicit SPIRVOperand(SPIRVOperandType Ty, const char *Str)
94 : Type(Ty), LiteralStr(Str) {}
95 explicit SPIRVOperand(SPIRVOperandType Ty, StringRef Str)
96 : Type(Ty), LiteralStr(Str) {}
97 explicit SPIRVOperand(SPIRVOperandType Ty, ArrayRef<uint32_t> NumVec)
98 : Type(Ty), LiteralNum(NumVec.begin(), NumVec.end()) {}
99
100 SPIRVOperandType getType() { return Type; };
101 uint32_t getNumID() { return LiteralNum[0]; };
102 std::string getLiteralStr() { return LiteralStr; };
103 ArrayRef<uint32_t> getLiteralNum() { return LiteralNum; };
104
David Neto87846742018-04-11 17:36:22 -0400105 uint32_t GetNumWords() const {
106 switch (Type) {
107 case NUMBERID:
108 return 1;
109 case LITERAL_INTEGER:
110 case LITERAL_FLOAT:
David Netoee2660d2018-06-28 16:31:29 -0400111 return uint32_t(LiteralNum.size());
David Neto87846742018-04-11 17:36:22 -0400112 case LITERAL_STRING:
113 // Account for the terminating null character.
David Netoee2660d2018-06-28 16:31:29 -0400114 return uint32_t((LiteralStr.size() + 4) / 4);
David Neto87846742018-04-11 17:36:22 -0400115 }
116 llvm_unreachable("Unhandled case in SPIRVOperand::GetNumWords()");
117 }
118
David Neto22f144c2017-06-12 14:26:21 -0400119private:
120 SPIRVOperandType Type;
121 std::string LiteralStr;
122 SmallVector<uint32_t, 4> LiteralNum;
123};
124
David Netoc6f3ab22018-04-06 18:02:31 -0400125class SPIRVOperandList {
126public:
127 SPIRVOperandList() {}
alan-bakerb6b09dc2018-11-08 16:59:28 -0500128 SPIRVOperandList(const SPIRVOperandList &other) = delete;
129 SPIRVOperandList(SPIRVOperandList &&other) {
David Netoc6f3ab22018-04-06 18:02:31 -0400130 contents_ = std::move(other.contents_);
131 other.contents_.clear();
132 }
133 SPIRVOperandList(ArrayRef<SPIRVOperand *> init)
134 : contents_(init.begin(), init.end()) {}
135 operator ArrayRef<SPIRVOperand *>() { return contents_; }
136 void push_back(SPIRVOperand *op) { contents_.push_back(op); }
alan-bakerb6b09dc2018-11-08 16:59:28 -0500137 void clear() { contents_.clear(); }
David Netoc6f3ab22018-04-06 18:02:31 -0400138 size_t size() const { return contents_.size(); }
139 SPIRVOperand *&operator[](size_t i) { return contents_[i]; }
140
David Neto87846742018-04-11 17:36:22 -0400141 const SmallVector<SPIRVOperand *, 8> &getOperands() const {
142 return contents_;
143 }
144
David Netoc6f3ab22018-04-06 18:02:31 -0400145private:
alan-bakerb6b09dc2018-11-08 16:59:28 -0500146 SmallVector<SPIRVOperand *, 8> contents_;
David Netoc6f3ab22018-04-06 18:02:31 -0400147};
148
149SPIRVOperandList &operator<<(SPIRVOperandList &list, SPIRVOperand *elem) {
150 list.push_back(elem);
151 return list;
152}
153
alan-bakerb6b09dc2018-11-08 16:59:28 -0500154SPIRVOperand *MkNum(uint32_t num) {
David Netoc6f3ab22018-04-06 18:02:31 -0400155 return new SPIRVOperand(LITERAL_INTEGER, num);
156}
alan-bakerb6b09dc2018-11-08 16:59:28 -0500157SPIRVOperand *MkInteger(ArrayRef<uint32_t> num_vec) {
David Neto257c3892018-04-11 13:19:45 -0400158 return new SPIRVOperand(LITERAL_INTEGER, num_vec);
159}
alan-bakerb6b09dc2018-11-08 16:59:28 -0500160SPIRVOperand *MkFloat(ArrayRef<uint32_t> num_vec) {
David Neto257c3892018-04-11 13:19:45 -0400161 return new SPIRVOperand(LITERAL_FLOAT, num_vec);
162}
alan-bakerb6b09dc2018-11-08 16:59:28 -0500163SPIRVOperand *MkId(uint32_t id) { return new SPIRVOperand(NUMBERID, id); }
164SPIRVOperand *MkString(StringRef str) {
David Neto257c3892018-04-11 13:19:45 -0400165 return new SPIRVOperand(LITERAL_STRING, str);
166}
David Netoc6f3ab22018-04-06 18:02:31 -0400167
David Neto22f144c2017-06-12 14:26:21 -0400168struct SPIRVInstruction {
David Neto87846742018-04-11 17:36:22 -0400169 // Create an instruction with an opcode and no result ID, and with the given
170 // operands. This computes its own word count.
171 explicit SPIRVInstruction(spv::Op Opc, ArrayRef<SPIRVOperand *> Ops)
172 : WordCount(1), Opcode(static_cast<uint16_t>(Opc)), ResultID(0),
173 Operands(Ops.begin(), Ops.end()) {
174 for (auto *operand : Ops) {
David Netoee2660d2018-06-28 16:31:29 -0400175 WordCount += uint16_t(operand->GetNumWords());
David Neto87846742018-04-11 17:36:22 -0400176 }
177 }
178 // Create an instruction with an opcode and a no-zero result ID, and
179 // with the given operands. This computes its own word count.
180 explicit SPIRVInstruction(spv::Op Opc, uint32_t ResID,
David Neto22f144c2017-06-12 14:26:21 -0400181 ArrayRef<SPIRVOperand *> Ops)
David Neto87846742018-04-11 17:36:22 -0400182 : WordCount(2), Opcode(static_cast<uint16_t>(Opc)), ResultID(ResID),
183 Operands(Ops.begin(), Ops.end()) {
184 if (ResID == 0) {
185 llvm_unreachable("Result ID of 0 was provided");
186 }
187 for (auto *operand : Ops) {
188 WordCount += operand->GetNumWords();
189 }
190 }
David Neto22f144c2017-06-12 14:26:21 -0400191
David Netoee2660d2018-06-28 16:31:29 -0400192 uint32_t getWordCount() const { return WordCount; }
David Neto22f144c2017-06-12 14:26:21 -0400193 uint16_t getOpcode() const { return Opcode; }
194 uint32_t getResultID() const { return ResultID; }
195 ArrayRef<SPIRVOperand *> getOperands() const { return Operands; }
196
197private:
David Netoee2660d2018-06-28 16:31:29 -0400198 uint32_t WordCount; // Check the 16-bit bound at code generation time.
David Neto22f144c2017-06-12 14:26:21 -0400199 uint16_t Opcode;
200 uint32_t ResultID;
201 SmallVector<SPIRVOperand *, 4> Operands;
202};
203
204struct SPIRVProducerPass final : public ModulePass {
David Neto22f144c2017-06-12 14:26:21 -0400205 typedef DenseMap<Type *, uint32_t> TypeMapType;
206 typedef UniqueVector<Type *> TypeList;
207 typedef DenseMap<Value *, uint32_t> ValueMapType;
David Netofb9a7972017-08-25 17:08:24 -0400208 typedef UniqueVector<Value *> ValueList;
David Neto22f144c2017-06-12 14:26:21 -0400209 typedef std::vector<std::pair<Value *, uint32_t>> EntryPointVecType;
210 typedef std::list<SPIRVInstruction *> SPIRVInstructionList;
David Neto87846742018-04-11 17:36:22 -0400211 // A vector of tuples, each of which is:
212 // - the LLVM instruction that we will later generate SPIR-V code for
213 // - where the SPIR-V instruction should be inserted
214 // - the result ID of the SPIR-V instruction
David Neto22f144c2017-06-12 14:26:21 -0400215 typedef std::vector<
216 std::tuple<Value *, SPIRVInstructionList::iterator, uint32_t>>
217 DeferredInstVecType;
218 typedef DenseMap<FunctionType *, std::pair<FunctionType *, uint32_t>>
219 GlobalConstFuncMapType;
220
David Neto44795152017-07-13 15:45:28 -0400221 explicit SPIRVProducerPass(
alan-bakerf5e5f692018-11-27 08:33:24 -0500222 raw_pwrite_stream &out,
223 std::vector<clspv::version0::DescriptorMapEntry> *descriptor_map_entries,
alan-baker00e7a582019-06-07 12:54:21 -0400224 ArrayRef<std::pair<unsigned, std::string>> samplerMap,
David Neto44795152017-07-13 15:45:28 -0400225 bool outputCInitList)
David Netoc2c368d2017-06-30 16:50:17 -0400226 : ModulePass(ID), samplerMap(samplerMap), out(out),
David Neto0676e6f2017-07-11 18:47:44 -0400227 binaryTempOut(binaryTempUnderlyingVector), binaryOut(&out),
alan-baker00e7a582019-06-07 12:54:21 -0400228 descriptorMapEntries(descriptor_map_entries),
David Neto0676e6f2017-07-11 18:47:44 -0400229 outputCInitList(outputCInitList), patchBoundOffset(0), nextID(1),
alan-baker5b86ed72019-02-15 08:26:50 -0500230 OpExtInstImportID(0), HasVariablePointersStorageBuffer(false),
231 HasVariablePointers(false), SamplerTy(nullptr), WorkgroupSizeValueID(0),
Kévin Petit89a525c2019-06-15 08:13:07 +0100232 WorkgroupSizeVarID(0), max_local_spec_id_(0) {}
David Neto22f144c2017-06-12 14:26:21 -0400233
234 void getAnalysisUsage(AnalysisUsage &AU) const override {
235 AU.addRequired<DominatorTreeWrapperPass>();
236 AU.addRequired<LoopInfoWrapperPass>();
237 }
238
239 virtual bool runOnModule(Module &module) override;
240
241 // output the SPIR-V header block
242 void outputHeader();
243
244 // patch the SPIR-V header block
245 void patchHeader();
246
247 uint32_t lookupType(Type *Ty) {
248 if (Ty->isPointerTy() &&
249 (Ty->getPointerAddressSpace() != AddressSpace::UniformConstant)) {
250 auto PointeeTy = Ty->getPointerElementType();
251 if (PointeeTy->isStructTy() &&
252 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
253 Ty = PointeeTy;
254 }
255 }
256
David Neto862b7d82018-06-14 18:48:37 -0400257 auto where = TypeMap.find(Ty);
258 if (where == TypeMap.end()) {
259 if (Ty) {
260 errs() << "Unhandled type " << *Ty << "\n";
261 } else {
262 errs() << "Unhandled type (null)\n";
263 }
David Netoe439d702018-03-23 13:14:08 -0700264 llvm_unreachable("\nUnhandled type!");
David Neto22f144c2017-06-12 14:26:21 -0400265 }
266
David Neto862b7d82018-06-14 18:48:37 -0400267 return where->second;
David Neto22f144c2017-06-12 14:26:21 -0400268 }
269 TypeMapType &getImageTypeMap() { return ImageTypeMap; }
270 TypeList &getTypeList() { return Types; };
271 ValueList &getConstantList() { return Constants; };
272 ValueMapType &getValueMap() { return ValueMap; }
273 ValueMapType &getAllocatedValueMap() { return AllocatedValueMap; }
274 SPIRVInstructionList &getSPIRVInstList() { return SPIRVInsts; };
David Neto22f144c2017-06-12 14:26:21 -0400275 EntryPointVecType &getEntryPointVec() { return EntryPointVec; };
276 DeferredInstVecType &getDeferredInstVec() { return DeferredInstVec; };
277 ValueList &getEntryPointInterfacesVec() { return EntryPointInterfacesVec; };
278 uint32_t &getOpExtInstImportID() { return OpExtInstImportID; };
279 std::vector<uint32_t> &getBuiltinDimVec() { return BuiltinDimensionVec; };
alan-baker5b86ed72019-02-15 08:26:50 -0500280 bool hasVariablePointersStorageBuffer() {
281 return HasVariablePointersStorageBuffer;
282 }
283 void setVariablePointersStorageBuffer(bool Val) {
284 HasVariablePointersStorageBuffer = Val;
285 }
Diego Novillo3cc8d7a2019-04-10 13:30:34 -0400286 bool hasVariablePointers() { return HasVariablePointers; };
David Neto22f144c2017-06-12 14:26:21 -0400287 void setVariablePointers(bool Val) { HasVariablePointers = Val; };
alan-bakerb6b09dc2018-11-08 16:59:28 -0500288 ArrayRef<std::pair<unsigned, std::string>> &getSamplerMap() {
289 return samplerMap;
290 }
David Neto22f144c2017-06-12 14:26:21 -0400291 GlobalConstFuncMapType &getGlobalConstFuncTypeMap() {
292 return GlobalConstFuncTypeMap;
293 }
294 SmallPtrSet<Value *, 16> &getGlobalConstArgSet() {
295 return GlobalConstArgumentSet;
296 }
alan-bakerb6b09dc2018-11-08 16:59:28 -0500297 TypeList &getTypesNeedingArrayStride() { return TypesNeedingArrayStride; }
David Neto22f144c2017-06-12 14:26:21 -0400298
David Netoc6f3ab22018-04-06 18:02:31 -0400299 void GenerateLLVMIRInfo(Module &M, const DataLayout &DL);
alan-bakerb6b09dc2018-11-08 16:59:28 -0500300 // Populate GlobalConstFuncTypeMap. Also, if module-scope __constant will
301 // *not* be converted to a storage buffer, replace each such global variable
302 // with one in the storage class expecgted by SPIR-V.
David Neto862b7d82018-06-14 18:48:37 -0400303 void FindGlobalConstVars(Module &M, const DataLayout &DL);
304 // Populate ResourceVarInfoList, FunctionToResourceVarsMap, and
305 // ModuleOrderedResourceVars.
306 void FindResourceVars(Module &M, const DataLayout &DL);
Alan Baker202c8c72018-08-13 13:47:44 -0400307 void FindWorkgroupVars(Module &M);
David Neto22f144c2017-06-12 14:26:21 -0400308 bool FindExtInst(Module &M);
309 void FindTypePerGlobalVar(GlobalVariable &GV);
310 void FindTypePerFunc(Function &F);
David Neto862b7d82018-06-14 18:48:37 -0400311 void FindTypesForSamplerMap(Module &M);
312 void FindTypesForResourceVars(Module &M);
alan-bakerb6b09dc2018-11-08 16:59:28 -0500313 // Inserts |Ty| and relevant sub-types into the |Types| member, indicating
314 // that |Ty| and its subtypes will need a corresponding SPIR-V type.
David Neto22f144c2017-06-12 14:26:21 -0400315 void FindType(Type *Ty);
316 void FindConstantPerGlobalVar(GlobalVariable &GV);
317 void FindConstantPerFunc(Function &F);
318 void FindConstant(Value *V);
319 void GenerateExtInstImport();
David Neto19a1bad2017-08-25 15:01:41 -0400320 // Generates instructions for SPIR-V types corresponding to the LLVM types
321 // saved in the |Types| member. A type follows its subtypes. IDs are
322 // allocated sequentially starting with the current value of nextID, and
323 // with a type following its subtypes. Also updates nextID to just beyond
324 // the last generated ID.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500325 void GenerateSPIRVTypes(LLVMContext &context, Module &module);
David Neto22f144c2017-06-12 14:26:21 -0400326 void GenerateSPIRVConstants();
David Neto5c22a252018-03-15 16:07:41 -0400327 void GenerateModuleInfo(Module &M);
David Neto22f144c2017-06-12 14:26:21 -0400328 void GenerateGlobalVar(GlobalVariable &GV);
David Netoc6f3ab22018-04-06 18:02:31 -0400329 void GenerateWorkgroupVars();
David Neto862b7d82018-06-14 18:48:37 -0400330 // Generate descriptor map entries for resource variables associated with
331 // arguments to F.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500332 void GenerateDescriptorMapInfo(const DataLayout &DL, Function &F);
David Neto22f144c2017-06-12 14:26:21 -0400333 void GenerateSamplers(Module &M);
David Neto862b7d82018-06-14 18:48:37 -0400334 // Generate OpVariables for %clspv.resource.var.* calls.
335 void GenerateResourceVars(Module &M);
David Neto22f144c2017-06-12 14:26:21 -0400336 void GenerateFuncPrologue(Function &F);
337 void GenerateFuncBody(Function &F);
David Netob6e2e062018-04-25 10:32:06 -0400338 void GenerateEntryPointInitialStores();
David Neto22f144c2017-06-12 14:26:21 -0400339 spv::Op GetSPIRVCmpOpcode(CmpInst *CmpI);
340 spv::Op GetSPIRVCastOpcode(Instruction &I);
341 spv::Op GetSPIRVBinaryOpcode(Instruction &I);
342 void GenerateInstruction(Instruction &I);
343 void GenerateFuncEpilogue();
344 void HandleDeferredInstruction();
alan-bakerb6b09dc2018-11-08 16:59:28 -0500345 void HandleDeferredDecorations(const DataLayout &DL);
David Neto22f144c2017-06-12 14:26:21 -0400346 bool is4xi8vec(Type *Ty) const;
347 spv::StorageClass GetStorageClass(unsigned AddrSpace) const;
David Neto862b7d82018-06-14 18:48:37 -0400348 spv::StorageClass GetStorageClassForArgKind(clspv::ArgKind arg_kind) const;
David Neto22f144c2017-06-12 14:26:21 -0400349 spv::BuiltIn GetBuiltin(StringRef globalVarName) const;
David Neto3fbb4072017-10-16 11:28:14 -0400350 // Returns the GLSL extended instruction enum that the given function
351 // call maps to. If none, then returns the 0 value, i.e. GLSLstd4580Bad.
David Neto22f144c2017-06-12 14:26:21 -0400352 glsl::ExtInst getExtInstEnum(StringRef Name);
David Neto3fbb4072017-10-16 11:28:14 -0400353 // Returns the GLSL extended instruction enum indirectly used by the given
354 // function. That is, to implement the given function, we use an extended
355 // instruction plus one more instruction. If none, then returns the 0 value,
356 // i.e. GLSLstd4580Bad.
357 glsl::ExtInst getIndirectExtInstEnum(StringRef Name);
358 // Returns the single GLSL extended instruction used directly or
359 // indirectly by the given function call.
360 glsl::ExtInst getDirectOrIndirectExtInstEnum(StringRef Name);
David Neto22f144c2017-06-12 14:26:21 -0400361 void WriteOneWord(uint32_t Word);
362 void WriteResultID(SPIRVInstruction *Inst);
363 void WriteWordCountAndOpcode(SPIRVInstruction *Inst);
364 void WriteOperand(SPIRVOperand *Op);
365 void WriteSPIRVBinary();
366
Alan Baker9bf93fb2018-08-28 16:59:26 -0400367 // Returns true if |type| is compatible with OpConstantNull.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500368 bool IsTypeNullable(const Type *type) const;
Alan Baker9bf93fb2018-08-28 16:59:26 -0400369
Alan Bakerfcda9482018-10-02 17:09:59 -0400370 // Populate UBO remapped type maps.
371 void PopulateUBOTypeMaps(Module &module);
372
373 // Wrapped methods of DataLayout accessors. If |type| was remapped for UBOs,
374 // uses the internal map, otherwise it falls back on the data layout.
375 uint64_t GetTypeSizeInBits(Type *type, const DataLayout &DL);
376 uint64_t GetTypeStoreSize(Type *type, const DataLayout &DL);
377 uint64_t GetTypeAllocSize(Type *type, const DataLayout &DL);
378
alan-baker5b86ed72019-02-15 08:26:50 -0500379 // Returns the base pointer of |v|.
380 Value *GetBasePointer(Value *v);
381
382 // Sets |HasVariablePointersStorageBuffer| or |HasVariablePointers| base on
383 // |address_space|.
384 void setVariablePointersCapabilities(unsigned address_space);
385
386 // Returns true if |lhs| and |rhs| represent the same resource or workgroup
387 // variable.
388 bool sameResource(Value *lhs, Value *rhs) const;
389
390 // Returns true if |inst| is phi or select that selects from the same
391 // structure (or null).
392 bool selectFromSameObject(Instruction *inst);
393
alan-bakere9308012019-03-15 10:25:13 -0400394 // Returns true if |Arg| is called with a coherent resource.
395 bool CalledWithCoherentResource(Argument &Arg);
396
David Neto22f144c2017-06-12 14:26:21 -0400397private:
398 static char ID;
David Neto44795152017-07-13 15:45:28 -0400399 ArrayRef<std::pair<unsigned, std::string>> samplerMap;
David Neto22f144c2017-06-12 14:26:21 -0400400 raw_pwrite_stream &out;
David Neto0676e6f2017-07-11 18:47:44 -0400401
402 // TODO(dneto): Wouldn't it be better to always just emit a binary, and then
403 // convert to other formats on demand?
404
405 // When emitting a C initialization list, the WriteSPIRVBinary method
406 // will actually write its words to this vector via binaryTempOut.
407 SmallVector<char, 100> binaryTempUnderlyingVector;
408 raw_svector_ostream binaryTempOut;
409
410 // Binary output writes to this stream, which might be |out| or
411 // |binaryTempOut|. It's the latter when we really want to write a C
412 // initializer list.
Diego Novillo3cc8d7a2019-04-10 13:30:34 -0400413 raw_pwrite_stream *binaryOut;
alan-bakerf5e5f692018-11-27 08:33:24 -0500414 std::vector<version0::DescriptorMapEntry> *descriptorMapEntries;
David Neto0676e6f2017-07-11 18:47:44 -0400415 const bool outputCInitList; // If true, output look like {0x7023, ... , 5}
David Neto22f144c2017-06-12 14:26:21 -0400416 uint64_t patchBoundOffset;
417 uint32_t nextID;
418
David Neto19a1bad2017-08-25 15:01:41 -0400419 // Maps an LLVM Value pointer to the corresponding SPIR-V Id.
David Neto22f144c2017-06-12 14:26:21 -0400420 TypeMapType TypeMap;
David Neto19a1bad2017-08-25 15:01:41 -0400421 // Maps an LLVM image type to its SPIR-V ID.
David Neto22f144c2017-06-12 14:26:21 -0400422 TypeMapType ImageTypeMap;
David Neto19a1bad2017-08-25 15:01:41 -0400423 // A unique-vector of LLVM types that map to a SPIR-V type.
David Neto22f144c2017-06-12 14:26:21 -0400424 TypeList Types;
425 ValueList Constants;
David Neto19a1bad2017-08-25 15:01:41 -0400426 // Maps an LLVM Value pointer to the corresponding SPIR-V Id.
David Neto22f144c2017-06-12 14:26:21 -0400427 ValueMapType ValueMap;
428 ValueMapType AllocatedValueMap;
429 SPIRVInstructionList SPIRVInsts;
David Neto862b7d82018-06-14 18:48:37 -0400430
David Neto22f144c2017-06-12 14:26:21 -0400431 EntryPointVecType EntryPointVec;
432 DeferredInstVecType DeferredInstVec;
433 ValueList EntryPointInterfacesVec;
434 uint32_t OpExtInstImportID;
435 std::vector<uint32_t> BuiltinDimensionVec;
alan-baker5b86ed72019-02-15 08:26:50 -0500436 bool HasVariablePointersStorageBuffer;
David Neto22f144c2017-06-12 14:26:21 -0400437 bool HasVariablePointers;
438 Type *SamplerTy;
alan-bakerb6b09dc2018-11-08 16:59:28 -0500439 DenseMap<unsigned, uint32_t> SamplerMapIndexToIDMap;
David Netoc77d9e22018-03-24 06:30:28 -0700440
441 // If a function F has a pointer-to-__constant parameter, then this variable
David Neto9ed8e2f2018-03-24 06:47:24 -0700442 // will map F's type to (G, index of the parameter), where in a first phase
443 // G is F's type. During FindTypePerFunc, G will be changed to F's type
444 // but replacing the pointer-to-constant parameter with
445 // pointer-to-ModuleScopePrivate.
David Netoc77d9e22018-03-24 06:30:28 -0700446 // TODO(dneto): This doesn't seem general enough? A function might have
447 // more than one such parameter.
David Neto22f144c2017-06-12 14:26:21 -0400448 GlobalConstFuncMapType GlobalConstFuncTypeMap;
449 SmallPtrSet<Value *, 16> GlobalConstArgumentSet;
David Neto1a1a0582017-07-07 12:01:44 -0400450 // An ordered set of pointer types of Base arguments to OpPtrAccessChain,
David Neto85082642018-03-24 06:55:20 -0700451 // or array types, and which point into transparent memory (StorageBuffer
452 // storage class). These will require an ArrayStride decoration.
David Neto1a1a0582017-07-07 12:01:44 -0400453 // See SPV_KHR_variable_pointers rev 13.
David Neto85082642018-03-24 06:55:20 -0700454 TypeList TypesNeedingArrayStride;
David Netoa60b00b2017-09-15 16:34:09 -0400455
456 // This is truly ugly, but works around what look like driver bugs.
457 // For get_local_size, an earlier part of the flow has created a module-scope
458 // variable in Private address space to hold the value for the workgroup
459 // size. Its intializer is a uint3 value marked as builtin WorkgroupSize.
460 // When this is present, save the IDs of the initializer value and variable
461 // in these two variables. We only ever do a vector load from it, and
462 // when we see one of those, substitute just the value of the intializer.
463 // This mimics what Glslang does, and that's what drivers are used to.
David Neto66cfe642018-03-24 06:13:56 -0700464 // TODO(dneto): Remove this once drivers are fixed.
David Netoa60b00b2017-09-15 16:34:09 -0400465 uint32_t WorkgroupSizeValueID;
466 uint32_t WorkgroupSizeVarID;
David Neto26aaf622017-10-23 18:11:53 -0400467
David Neto862b7d82018-06-14 18:48:37 -0400468 // Bookkeeping for mapping kernel arguments to resource variables.
469 struct ResourceVarInfo {
470 ResourceVarInfo(int index_arg, unsigned set_arg, unsigned binding_arg,
alan-bakere9308012019-03-15 10:25:13 -0400471 Function *fn, clspv::ArgKind arg_kind_arg, int coherent_arg)
David Neto862b7d82018-06-14 18:48:37 -0400472 : index(index_arg), descriptor_set(set_arg), binding(binding_arg),
alan-bakere9308012019-03-15 10:25:13 -0400473 var_fn(fn), arg_kind(arg_kind_arg), coherent(coherent_arg),
David Neto862b7d82018-06-14 18:48:37 -0400474 addr_space(fn->getReturnType()->getPointerAddressSpace()) {}
475 const int index; // Index into ResourceVarInfoList
476 const unsigned descriptor_set;
477 const unsigned binding;
478 Function *const var_fn; // The @clspv.resource.var.* function.
479 const clspv::ArgKind arg_kind;
alan-bakere9308012019-03-15 10:25:13 -0400480 const int coherent;
David Neto862b7d82018-06-14 18:48:37 -0400481 const unsigned addr_space; // The LLVM address space
482 // The SPIR-V ID of the OpVariable. Not populated at construction time.
483 uint32_t var_id = 0;
484 };
485 // A list of resource var info. Each one correponds to a module-scope
486 // resource variable we will have to create. Resource var indices are
487 // indices into this vector.
488 SmallVector<std::unique_ptr<ResourceVarInfo>, 8> ResourceVarInfoList;
489 // This is a vector of pointers of all the resource vars, but ordered by
490 // kernel function, and then by argument.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500491 UniqueVector<ResourceVarInfo *> ModuleOrderedResourceVars;
David Neto862b7d82018-06-14 18:48:37 -0400492 // Map a function to the ordered list of resource variables it uses, one for
493 // each argument. If an argument does not use a resource variable, it
494 // will have a null pointer entry.
495 using FunctionToResourceVarsMapType =
496 DenseMap<Function *, SmallVector<ResourceVarInfo *, 8>>;
497 FunctionToResourceVarsMapType FunctionToResourceVarsMap;
498
499 // What LLVM types map to SPIR-V types needing layout? These are the
500 // arrays and structures supporting storage buffers and uniform buffers.
501 TypeList TypesNeedingLayout;
502 // What LLVM struct types map to a SPIR-V struct type with Block decoration?
503 UniqueVector<StructType *> StructTypesNeedingBlock;
504 // For a call that represents a load from an opaque type (samplers, images),
505 // map it to the variable id it should load from.
506 DenseMap<CallInst *, uint32_t> ResourceVarDeferredLoadCalls;
David Neto85082642018-03-24 06:55:20 -0700507
Alan Baker202c8c72018-08-13 13:47:44 -0400508 // One larger than the maximum used SpecId for pointer-to-local arguments.
509 int max_local_spec_id_;
David Netoc6f3ab22018-04-06 18:02:31 -0400510 // An ordered list of the kernel arguments of type pointer-to-local.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500511 using LocalArgList = SmallVector<Argument *, 8>;
David Netoc6f3ab22018-04-06 18:02:31 -0400512 LocalArgList LocalArgs;
513 // Information about a pointer-to-local argument.
514 struct LocalArgInfo {
515 // The SPIR-V ID of the array variable.
516 uint32_t variable_id;
517 // The element type of the
alan-bakerb6b09dc2018-11-08 16:59:28 -0500518 Type *elem_type;
David Netoc6f3ab22018-04-06 18:02:31 -0400519 // The ID of the array type.
520 uint32_t array_size_id;
521 // The ID of the array type.
522 uint32_t array_type_id;
523 // The ID of the pointer to the array type.
524 uint32_t ptr_array_type_id;
David Netoc6f3ab22018-04-06 18:02:31 -0400525 // The specialization constant ID of the array size.
526 int spec_id;
527 };
Alan Baker202c8c72018-08-13 13:47:44 -0400528 // A mapping from Argument to its assigned SpecId.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500529 DenseMap<const Argument *, int> LocalArgSpecIds;
Alan Baker202c8c72018-08-13 13:47:44 -0400530 // A mapping from SpecId to its LocalArgInfo.
531 DenseMap<int, LocalArgInfo> LocalSpecIdInfoMap;
Alan Bakerfcda9482018-10-02 17:09:59 -0400532 // A mapping from a remapped type to its real offsets.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500533 DenseMap<Type *, std::vector<uint32_t>> RemappedUBOTypeOffsets;
Alan Bakerfcda9482018-10-02 17:09:59 -0400534 // A mapping from a remapped type to its real sizes.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500535 DenseMap<Type *, std::tuple<uint64_t, uint64_t, uint64_t>>
536 RemappedUBOTypeSizes;
David Neto22f144c2017-06-12 14:26:21 -0400537};
538
539char SPIRVProducerPass::ID;
David Netoc6f3ab22018-04-06 18:02:31 -0400540
alan-bakerb6b09dc2018-11-08 16:59:28 -0500541} // namespace
David Neto22f144c2017-06-12 14:26:21 -0400542
543namespace clspv {
alan-bakerf5e5f692018-11-27 08:33:24 -0500544ModulePass *createSPIRVProducerPass(
545 raw_pwrite_stream &out,
546 std::vector<version0::DescriptorMapEntry> *descriptor_map_entries,
alan-baker00e7a582019-06-07 12:54:21 -0400547 ArrayRef<std::pair<unsigned, std::string>> samplerMap,
alan-bakerf5e5f692018-11-27 08:33:24 -0500548 bool outputCInitList) {
549 return new SPIRVProducerPass(out, descriptor_map_entries, samplerMap,
alan-baker00e7a582019-06-07 12:54:21 -0400550 outputCInitList);
David Neto22f144c2017-06-12 14:26:21 -0400551}
David Netoc2c368d2017-06-30 16:50:17 -0400552} // namespace clspv
David Neto22f144c2017-06-12 14:26:21 -0400553
554bool SPIRVProducerPass::runOnModule(Module &module) {
David Neto0676e6f2017-07-11 18:47:44 -0400555 binaryOut = outputCInitList ? &binaryTempOut : &out;
556
Alan Bakerfcda9482018-10-02 17:09:59 -0400557 PopulateUBOTypeMaps(module);
558
David Neto22f144c2017-06-12 14:26:21 -0400559 // SPIR-V always begins with its header information
560 outputHeader();
561
David Netoc6f3ab22018-04-06 18:02:31 -0400562 const DataLayout &DL = module.getDataLayout();
563
David Neto22f144c2017-06-12 14:26:21 -0400564 // Gather information from the LLVM IR that we require.
David Netoc6f3ab22018-04-06 18:02:31 -0400565 GenerateLLVMIRInfo(module, DL);
David Neto22f144c2017-06-12 14:26:21 -0400566
David Neto22f144c2017-06-12 14:26:21 -0400567 // Collect information on global variables too.
568 for (GlobalVariable &GV : module.globals()) {
569 // If the GV is one of our special __spirv_* variables, remove the
570 // initializer as it was only placed there to force LLVM to not throw the
571 // value away.
572 if (GV.getName().startswith("__spirv_")) {
573 GV.setInitializer(nullptr);
574 }
575
576 // Collect types' information from global variable.
577 FindTypePerGlobalVar(GV);
578
579 // Collect constant information from global variable.
580 FindConstantPerGlobalVar(GV);
581
582 // If the variable is an input, entry points need to know about it.
583 if (AddressSpace::Input == GV.getType()->getPointerAddressSpace()) {
David Netofb9a7972017-08-25 17:08:24 -0400584 getEntryPointInterfacesVec().insert(&GV);
David Neto22f144c2017-06-12 14:26:21 -0400585 }
586 }
587
588 // If there are extended instructions, generate OpExtInstImport.
589 if (FindExtInst(module)) {
590 GenerateExtInstImport();
591 }
592
593 // Generate SPIRV instructions for types.
Alan Bakerfcda9482018-10-02 17:09:59 -0400594 GenerateSPIRVTypes(module.getContext(), module);
David Neto22f144c2017-06-12 14:26:21 -0400595
596 // Generate SPIRV constants.
597 GenerateSPIRVConstants();
598
599 // If we have a sampler map, we might have literal samplers to generate.
600 if (0 < getSamplerMap().size()) {
601 GenerateSamplers(module);
602 }
603
604 // Generate SPIRV variables.
605 for (GlobalVariable &GV : module.globals()) {
606 GenerateGlobalVar(GV);
607 }
David Neto862b7d82018-06-14 18:48:37 -0400608 GenerateResourceVars(module);
David Netoc6f3ab22018-04-06 18:02:31 -0400609 GenerateWorkgroupVars();
David Neto22f144c2017-06-12 14:26:21 -0400610
611 // Generate SPIRV instructions for each function.
612 for (Function &F : module) {
613 if (F.isDeclaration()) {
614 continue;
615 }
616
David Neto862b7d82018-06-14 18:48:37 -0400617 GenerateDescriptorMapInfo(DL, F);
618
David Neto22f144c2017-06-12 14:26:21 -0400619 // Generate Function Prologue.
620 GenerateFuncPrologue(F);
621
622 // Generate SPIRV instructions for function body.
623 GenerateFuncBody(F);
624
625 // Generate Function Epilogue.
626 GenerateFuncEpilogue();
627 }
628
629 HandleDeferredInstruction();
David Neto1a1a0582017-07-07 12:01:44 -0400630 HandleDeferredDecorations(DL);
David Neto22f144c2017-06-12 14:26:21 -0400631
632 // Generate SPIRV module information.
David Neto5c22a252018-03-15 16:07:41 -0400633 GenerateModuleInfo(module);
David Neto22f144c2017-06-12 14:26:21 -0400634
alan-baker00e7a582019-06-07 12:54:21 -0400635 WriteSPIRVBinary();
David Neto22f144c2017-06-12 14:26:21 -0400636
637 // We need to patch the SPIR-V header to set bound correctly.
638 patchHeader();
David Neto0676e6f2017-07-11 18:47:44 -0400639
640 if (outputCInitList) {
641 bool first = true;
David Neto0676e6f2017-07-11 18:47:44 -0400642 std::ostringstream os;
643
David Neto57fb0b92017-08-04 15:35:09 -0400644 auto emit_word = [&os, &first](uint32_t word) {
David Neto0676e6f2017-07-11 18:47:44 -0400645 if (!first)
David Neto57fb0b92017-08-04 15:35:09 -0400646 os << ",\n";
647 os << word;
David Neto0676e6f2017-07-11 18:47:44 -0400648 first = false;
649 };
650
651 os << "{";
David Neto57fb0b92017-08-04 15:35:09 -0400652 const std::string str(binaryTempOut.str());
653 for (unsigned i = 0; i < str.size(); i += 4) {
654 const uint32_t a = static_cast<unsigned char>(str[i]);
655 const uint32_t b = static_cast<unsigned char>(str[i + 1]);
656 const uint32_t c = static_cast<unsigned char>(str[i + 2]);
657 const uint32_t d = static_cast<unsigned char>(str[i + 3]);
658 emit_word(a | (b << 8) | (c << 16) | (d << 24));
David Neto0676e6f2017-07-11 18:47:44 -0400659 }
660 os << "}\n";
661 out << os.str();
662 }
663
David Neto22f144c2017-06-12 14:26:21 -0400664 return false;
665}
666
667void SPIRVProducerPass::outputHeader() {
alan-baker00e7a582019-06-07 12:54:21 -0400668 binaryOut->write(reinterpret_cast<const char *>(&spv::MagicNumber),
669 sizeof(spv::MagicNumber));
670 binaryOut->write(reinterpret_cast<const char *>(&spv::Version),
671 sizeof(spv::Version));
David Neto22f144c2017-06-12 14:26:21 -0400672
alan-baker0c18ab02019-06-12 10:23:21 -0400673 // use Google's vendor ID
674 const uint32_t vendor = 21 << 16;
alan-baker00e7a582019-06-07 12:54:21 -0400675 binaryOut->write(reinterpret_cast<const char *>(&vendor), sizeof(vendor));
David Neto22f144c2017-06-12 14:26:21 -0400676
alan-baker00e7a582019-06-07 12:54:21 -0400677 // we record where we need to come back to and patch in the bound value
678 patchBoundOffset = binaryOut->tell();
David Neto22f144c2017-06-12 14:26:21 -0400679
alan-baker00e7a582019-06-07 12:54:21 -0400680 // output a bad bound for now
681 binaryOut->write(reinterpret_cast<const char *>(&nextID), sizeof(nextID));
David Neto22f144c2017-06-12 14:26:21 -0400682
alan-baker00e7a582019-06-07 12:54:21 -0400683 // output the schema (reserved for use and must be 0)
684 const uint32_t schema = 0;
685 binaryOut->write(reinterpret_cast<const char *>(&schema), sizeof(schema));
David Neto22f144c2017-06-12 14:26:21 -0400686}
687
688void SPIRVProducerPass::patchHeader() {
alan-baker00e7a582019-06-07 12:54:21 -0400689 // for a binary we just write the value of nextID over bound
690 binaryOut->pwrite(reinterpret_cast<char *>(&nextID), sizeof(nextID),
691 patchBoundOffset);
David Neto22f144c2017-06-12 14:26:21 -0400692}
693
David Netoc6f3ab22018-04-06 18:02:31 -0400694void SPIRVProducerPass::GenerateLLVMIRInfo(Module &M, const DataLayout &DL) {
David Neto22f144c2017-06-12 14:26:21 -0400695 // This function generates LLVM IR for function such as global variable for
696 // argument, constant and pointer type for argument access. These information
697 // is artificial one because we need Vulkan SPIR-V output. This function is
698 // executed ahead of FindType and FindConstant.
David Neto22f144c2017-06-12 14:26:21 -0400699 LLVMContext &Context = M.getContext();
700
David Neto862b7d82018-06-14 18:48:37 -0400701 FindGlobalConstVars(M, DL);
David Neto5c22a252018-03-15 16:07:41 -0400702
David Neto862b7d82018-06-14 18:48:37 -0400703 FindResourceVars(M, DL);
David Neto22f144c2017-06-12 14:26:21 -0400704
705 bool HasWorkGroupBuiltin = false;
706 for (GlobalVariable &GV : M.globals()) {
707 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
708 if (spv::BuiltInWorkgroupSize == BuiltinType) {
709 HasWorkGroupBuiltin = true;
710 }
711 }
712
David Neto862b7d82018-06-14 18:48:37 -0400713 FindTypesForSamplerMap(M);
714 FindTypesForResourceVars(M);
Alan Baker202c8c72018-08-13 13:47:44 -0400715 FindWorkgroupVars(M);
David Neto22f144c2017-06-12 14:26:21 -0400716
David Neto862b7d82018-06-14 18:48:37 -0400717 // These function calls need a <2 x i32> as an intermediate result but not
718 // the final result.
719 std::unordered_set<std::string> NeedsIVec2{
720 "_Z15get_image_width14ocl_image2d_ro",
721 "_Z15get_image_width14ocl_image2d_wo",
722 "_Z16get_image_height14ocl_image2d_ro",
723 "_Z16get_image_height14ocl_image2d_wo",
724 };
725
David Neto22f144c2017-06-12 14:26:21 -0400726 for (Function &F : M) {
Kévin Petitabef4522019-03-27 13:08:01 +0000727 if (F.isDeclaration()) {
David Neto22f144c2017-06-12 14:26:21 -0400728 continue;
729 }
730
731 for (BasicBlock &BB : F) {
732 for (Instruction &I : BB) {
733 if (I.getOpcode() == Instruction::ZExt ||
734 I.getOpcode() == Instruction::SExt ||
735 I.getOpcode() == Instruction::UIToFP) {
736 // If there is zext with i1 type, it will be changed to OpSelect. The
737 // OpSelect needs constant 0 and 1 so the constants are added here.
738
739 auto OpTy = I.getOperand(0)->getType();
740
Kévin Petit24272b62018-10-18 19:16:12 +0000741 if (OpTy->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -0400742 if (I.getOpcode() == Instruction::ZExt) {
David Neto22f144c2017-06-12 14:26:21 -0400743 FindConstant(Constant::getNullValue(I.getType()));
Kévin Petit7bfb8992019-02-26 13:45:08 +0000744 FindConstant(ConstantInt::get(I.getType(), 1));
David Neto22f144c2017-06-12 14:26:21 -0400745 } else if (I.getOpcode() == Instruction::SExt) {
David Neto22f144c2017-06-12 14:26:21 -0400746 FindConstant(Constant::getNullValue(I.getType()));
Kévin Petit7bfb8992019-02-26 13:45:08 +0000747 FindConstant(ConstantInt::getSigned(I.getType(), -1));
David Neto22f144c2017-06-12 14:26:21 -0400748 } else {
749 FindConstant(ConstantFP::get(Context, APFloat(0.0f)));
750 FindConstant(ConstantFP::get(Context, APFloat(1.0f)));
751 }
752 }
753 } else if (CallInst *Call = dyn_cast<CallInst>(&I)) {
David Neto862b7d82018-06-14 18:48:37 -0400754 StringRef callee_name = Call->getCalledFunction()->getName();
David Neto22f144c2017-06-12 14:26:21 -0400755
756 // Handle image type specially.
David Neto862b7d82018-06-14 18:48:37 -0400757 if (callee_name.equals(
David Neto22f144c2017-06-12 14:26:21 -0400758 "_Z11read_imagef14ocl_image2d_ro11ocl_samplerDv2_f") ||
David Neto862b7d82018-06-14 18:48:37 -0400759 callee_name.equals(
David Neto22f144c2017-06-12 14:26:21 -0400760 "_Z11read_imagef14ocl_image3d_ro11ocl_samplerDv4_f")) {
761 TypeMapType &OpImageTypeMap = getImageTypeMap();
762 Type *ImageTy =
763 Call->getArgOperand(0)->getType()->getPointerElementType();
764 OpImageTypeMap[ImageTy] = 0;
765
766 FindConstant(ConstantFP::get(Context, APFloat(0.0f)));
767 }
David Neto5c22a252018-03-15 16:07:41 -0400768
David Neto862b7d82018-06-14 18:48:37 -0400769 if (NeedsIVec2.find(callee_name) != NeedsIVec2.end()) {
David Neto5c22a252018-03-15 16:07:41 -0400770 FindType(VectorType::get(Type::getInt32Ty(Context), 2));
771 }
David Neto22f144c2017-06-12 14:26:21 -0400772 }
773 }
774 }
775
Kévin Petitabef4522019-03-27 13:08:01 +0000776 // More things to do on kernel functions
777 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
778 if (const MDNode *MD =
779 dyn_cast<Function>(&F)->getMetadata("reqd_work_group_size")) {
780 // We generate constants if the WorkgroupSize builtin is being used.
781 if (HasWorkGroupBuiltin) {
782 // Collect constant information for work group size.
783 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(0)));
784 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(1)));
785 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(2)));
David Neto22f144c2017-06-12 14:26:21 -0400786 }
787 }
788 }
789
790 if (M.getTypeByName("opencl.image2d_ro_t") ||
791 M.getTypeByName("opencl.image2d_wo_t") ||
792 M.getTypeByName("opencl.image3d_ro_t") ||
793 M.getTypeByName("opencl.image3d_wo_t")) {
794 // Assume Image type's sampled type is float type.
795 FindType(Type::getFloatTy(Context));
796 }
797
798 // Collect types' information from function.
799 FindTypePerFunc(F);
800
801 // Collect constant information from function.
802 FindConstantPerFunc(F);
803 }
804}
805
David Neto862b7d82018-06-14 18:48:37 -0400806void SPIRVProducerPass::FindGlobalConstVars(Module &M, const DataLayout &DL) {
alan-baker56f7aff2019-05-22 08:06:42 -0400807 clspv::NormalizeGlobalVariables(M);
808
David Neto862b7d82018-06-14 18:48:37 -0400809 SmallVector<GlobalVariable *, 8> GVList;
810 SmallVector<GlobalVariable *, 8> DeadGVList;
811 for (GlobalVariable &GV : M.globals()) {
812 if (GV.getType()->getAddressSpace() == AddressSpace::Constant) {
813 if (GV.use_empty()) {
814 DeadGVList.push_back(&GV);
815 } else {
816 GVList.push_back(&GV);
817 }
818 }
819 }
820
821 // Remove dead global __constant variables.
822 for (auto GV : DeadGVList) {
823 GV->eraseFromParent();
824 }
825 DeadGVList.clear();
826
827 if (clspv::Option::ModuleConstantsInStorageBuffer()) {
828 // For now, we only support a single storage buffer.
829 if (GVList.size() > 0) {
830 assert(GVList.size() == 1);
831 const auto *GV = GVList[0];
832 const auto constants_byte_size =
Alan Bakerfcda9482018-10-02 17:09:59 -0400833 (GetTypeSizeInBits(GV->getInitializer()->getType(), DL)) / 8;
David Neto862b7d82018-06-14 18:48:37 -0400834 const size_t kConstantMaxSize = 65536;
835 if (constants_byte_size > kConstantMaxSize) {
836 outs() << "Max __constant capacity of " << kConstantMaxSize
837 << " bytes exceeded: " << constants_byte_size << " bytes used\n";
838 llvm_unreachable("Max __constant capacity exceeded");
839 }
840 }
841 } else {
842 // Change global constant variable's address space to ModuleScopePrivate.
843 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
844 for (auto GV : GVList) {
845 // Create new gv with ModuleScopePrivate address space.
846 Type *NewGVTy = GV->getType()->getPointerElementType();
847 GlobalVariable *NewGV = new GlobalVariable(
848 M, NewGVTy, false, GV->getLinkage(), GV->getInitializer(), "",
849 nullptr, GV->getThreadLocalMode(), AddressSpace::ModuleScopePrivate);
850 NewGV->takeName(GV);
851
852 const SmallVector<User *, 8> GVUsers(GV->user_begin(), GV->user_end());
853 SmallVector<User *, 8> CandidateUsers;
854
855 auto record_called_function_type_as_user =
856 [&GlobalConstFuncTyMap](Value *gv, CallInst *call) {
857 // Find argument index.
858 unsigned index = 0;
859 for (unsigned i = 0; i < call->getNumArgOperands(); i++) {
860 if (gv == call->getOperand(i)) {
861 // TODO(dneto): Should we break here?
862 index = i;
863 }
864 }
865
866 // Record function type with global constant.
867 GlobalConstFuncTyMap[call->getFunctionType()] =
868 std::make_pair(call->getFunctionType(), index);
869 };
870
871 for (User *GVU : GVUsers) {
872 if (CallInst *Call = dyn_cast<CallInst>(GVU)) {
873 record_called_function_type_as_user(GV, Call);
874 } else if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(GVU)) {
875 // Check GEP users.
876 for (User *GEPU : GEP->users()) {
877 if (CallInst *GEPCall = dyn_cast<CallInst>(GEPU)) {
878 record_called_function_type_as_user(GEP, GEPCall);
879 }
880 }
881 }
882
883 CandidateUsers.push_back(GVU);
884 }
885
886 for (User *U : CandidateUsers) {
887 // Update users of gv with new gv.
alan-bakered80f572019-02-11 17:28:26 -0500888 if (!isa<Constant>(U)) {
889 // #254: Can't change operands of a constant, but this shouldn't be
890 // something that sticks around in the module.
891 U->replaceUsesOfWith(GV, NewGV);
892 }
David Neto862b7d82018-06-14 18:48:37 -0400893 }
894
895 // Delete original gv.
896 GV->eraseFromParent();
897 }
898 }
899}
900
Radek Szymanskibe4b0c42018-10-04 22:20:53 +0100901void SPIRVProducerPass::FindResourceVars(Module &M, const DataLayout &) {
David Neto862b7d82018-06-14 18:48:37 -0400902 ResourceVarInfoList.clear();
903 FunctionToResourceVarsMap.clear();
904 ModuleOrderedResourceVars.reset();
905 // Normally, there is one resource variable per clspv.resource.var.*
906 // function, since that is unique'd by arg type and index. By design,
907 // we can share these resource variables across kernels because all
908 // kernels use the same descriptor set.
909 //
910 // But if the user requested distinct descriptor sets per kernel, then
911 // the descriptor allocator has made different (set,binding) pairs for
912 // the same (type,arg_index) pair. Since we can decorate a resource
913 // variable with only exactly one DescriptorSet and Binding, we are
914 // forced in this case to make distinct resource variables whenever
915 // the same clspv.reource.var.X function is seen with disintct
916 // (set,binding) values.
917 const bool always_distinct_sets =
918 clspv::Option::DistinctKernelDescriptorSets();
919 for (Function &F : M) {
920 // Rely on the fact the resource var functions have a stable ordering
921 // in the module.
Alan Baker202c8c72018-08-13 13:47:44 -0400922 if (F.getName().startswith(clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -0400923 // Find all calls to this function with distinct set and binding pairs.
924 // Save them in ResourceVarInfoList.
925
926 // Determine uniqueness of the (set,binding) pairs only withing this
927 // one resource-var builtin function.
928 using SetAndBinding = std::pair<unsigned, unsigned>;
929 // Maps set and binding to the resource var info.
930 DenseMap<SetAndBinding, ResourceVarInfo *> set_and_binding_map;
931 bool first_use = true;
932 for (auto &U : F.uses()) {
933 if (auto *call = dyn_cast<CallInst>(U.getUser())) {
934 const auto set = unsigned(
935 dyn_cast<ConstantInt>(call->getArgOperand(0))->getZExtValue());
936 const auto binding = unsigned(
937 dyn_cast<ConstantInt>(call->getArgOperand(1))->getZExtValue());
938 const auto arg_kind = clspv::ArgKind(
939 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
940 const auto arg_index = unsigned(
941 dyn_cast<ConstantInt>(call->getArgOperand(3))->getZExtValue());
alan-bakere9308012019-03-15 10:25:13 -0400942 const auto coherent = unsigned(
943 dyn_cast<ConstantInt>(call->getArgOperand(5))->getZExtValue());
David Neto862b7d82018-06-14 18:48:37 -0400944
945 // Find or make the resource var info for this combination.
946 ResourceVarInfo *rv = nullptr;
947 if (always_distinct_sets) {
948 // Make a new resource var any time we see a different
949 // (set,binding) pair.
950 SetAndBinding key{set, binding};
951 auto where = set_and_binding_map.find(key);
952 if (where == set_and_binding_map.end()) {
953 rv = new ResourceVarInfo(int(ResourceVarInfoList.size()), set,
alan-bakere9308012019-03-15 10:25:13 -0400954 binding, &F, arg_kind, coherent);
David Neto862b7d82018-06-14 18:48:37 -0400955 ResourceVarInfoList.emplace_back(rv);
956 set_and_binding_map[key] = rv;
957 } else {
958 rv = where->second;
959 }
960 } else {
961 // The default is to make exactly one resource for each
962 // clspv.resource.var.* function.
963 if (first_use) {
964 first_use = false;
965 rv = new ResourceVarInfo(int(ResourceVarInfoList.size()), set,
alan-bakere9308012019-03-15 10:25:13 -0400966 binding, &F, arg_kind, coherent);
David Neto862b7d82018-06-14 18:48:37 -0400967 ResourceVarInfoList.emplace_back(rv);
968 } else {
969 rv = ResourceVarInfoList.back().get();
970 }
971 }
972
973 // Now populate FunctionToResourceVarsMap.
974 auto &mapping =
975 FunctionToResourceVarsMap[call->getParent()->getParent()];
976 while (mapping.size() <= arg_index) {
977 mapping.push_back(nullptr);
978 }
979 mapping[arg_index] = rv;
980 }
981 }
982 }
983 }
984
985 // Populate ModuleOrderedResourceVars.
986 for (Function &F : M) {
987 auto where = FunctionToResourceVarsMap.find(&F);
988 if (where != FunctionToResourceVarsMap.end()) {
989 for (auto &rv : where->second) {
990 if (rv != nullptr) {
991 ModuleOrderedResourceVars.insert(rv);
992 }
993 }
994 }
995 }
996 if (ShowResourceVars) {
997 for (auto *info : ModuleOrderedResourceVars) {
998 outs() << "MORV index " << info->index << " (" << info->descriptor_set
999 << "," << info->binding << ") " << *(info->var_fn->getReturnType())
1000 << "\n";
1001 }
1002 }
1003}
1004
David Neto22f144c2017-06-12 14:26:21 -04001005bool SPIRVProducerPass::FindExtInst(Module &M) {
1006 LLVMContext &Context = M.getContext();
1007 bool HasExtInst = false;
1008
1009 for (Function &F : M) {
1010 for (BasicBlock &BB : F) {
1011 for (Instruction &I : BB) {
1012 if (CallInst *Call = dyn_cast<CallInst>(&I)) {
1013 Function *Callee = Call->getCalledFunction();
1014 // Check whether this call is for extend instructions.
David Neto3fbb4072017-10-16 11:28:14 -04001015 auto callee_name = Callee->getName();
1016 const glsl::ExtInst EInst = getExtInstEnum(callee_name);
1017 const glsl::ExtInst IndirectEInst =
1018 getIndirectExtInstEnum(callee_name);
David Neto22f144c2017-06-12 14:26:21 -04001019
David Neto3fbb4072017-10-16 11:28:14 -04001020 HasExtInst |=
1021 (EInst != kGlslExtInstBad) || (IndirectEInst != kGlslExtInstBad);
1022
1023 if (IndirectEInst) {
1024 // Register extra constants if needed.
1025
1026 // Registers a type and constant for computing the result of the
1027 // given instruction. If the result of the instruction is a vector,
1028 // then make a splat vector constant with the same number of
1029 // elements.
1030 auto register_constant = [this, &I](Constant *constant) {
1031 FindType(constant->getType());
1032 FindConstant(constant);
1033 if (auto *vectorTy = dyn_cast<VectorType>(I.getType())) {
1034 // Register the splat vector of the value with the same
1035 // width as the result of the instruction.
1036 auto *vec_constant = ConstantVector::getSplat(
1037 static_cast<unsigned>(vectorTy->getNumElements()),
1038 constant);
1039 FindConstant(vec_constant);
1040 FindType(vec_constant->getType());
1041 }
1042 };
1043 switch (IndirectEInst) {
1044 case glsl::ExtInstFindUMsb:
1045 // clz needs OpExtInst and OpISub with constant 31, or splat
1046 // vector of 31. Add it to the constant list here.
1047 register_constant(
1048 ConstantInt::get(Type::getInt32Ty(Context), 31));
1049 break;
1050 case glsl::ExtInstAcos:
1051 case glsl::ExtInstAsin:
Kévin Petiteb9f90a2018-09-29 12:29:34 +01001052 case glsl::ExtInstAtan:
David Neto3fbb4072017-10-16 11:28:14 -04001053 case glsl::ExtInstAtan2:
1054 // We need 1/pi for acospi, asinpi, atan2pi.
1055 register_constant(
1056 ConstantFP::get(Type::getFloatTy(Context), kOneOverPi));
1057 break;
1058 default:
1059 assert(false && "internally inconsistent");
1060 }
David Neto22f144c2017-06-12 14:26:21 -04001061 }
1062 }
1063 }
1064 }
1065 }
1066
1067 return HasExtInst;
1068}
1069
1070void SPIRVProducerPass::FindTypePerGlobalVar(GlobalVariable &GV) {
1071 // Investigate global variable's type.
1072 FindType(GV.getType());
1073}
1074
1075void SPIRVProducerPass::FindTypePerFunc(Function &F) {
1076 // Investigate function's type.
1077 FunctionType *FTy = F.getFunctionType();
1078
1079 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
1080 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
David Neto9ed8e2f2018-03-24 06:47:24 -07001081 // Handle a regular function with global constant parameters.
David Neto22f144c2017-06-12 14:26:21 -04001082 if (GlobalConstFuncTyMap.count(FTy)) {
1083 uint32_t GVCstArgIdx = GlobalConstFuncTypeMap[FTy].second;
1084 SmallVector<Type *, 4> NewFuncParamTys;
1085 for (unsigned i = 0; i < FTy->getNumParams(); i++) {
1086 Type *ParamTy = FTy->getParamType(i);
1087 if (i == GVCstArgIdx) {
1088 Type *EleTy = ParamTy->getPointerElementType();
1089 ParamTy = PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
1090 }
1091
1092 NewFuncParamTys.push_back(ParamTy);
1093 }
1094
1095 FunctionType *NewFTy =
1096 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
1097 GlobalConstFuncTyMap[FTy] = std::make_pair(NewFTy, GVCstArgIdx);
1098 FTy = NewFTy;
1099 }
1100
1101 FindType(FTy);
1102 } else {
1103 // As kernel functions do not have parameters, create new function type and
1104 // add it to type map.
1105 SmallVector<Type *, 4> NewFuncParamTys;
1106 FunctionType *NewFTy =
1107 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
1108 FindType(NewFTy);
1109 }
1110
1111 // Investigate instructions' type in function body.
1112 for (BasicBlock &BB : F) {
1113 for (Instruction &I : BB) {
1114 if (isa<ShuffleVectorInst>(I)) {
1115 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1116 // Ignore type for mask of shuffle vector instruction.
1117 if (i == 2) {
1118 continue;
1119 }
1120
1121 Value *Op = I.getOperand(i);
1122 if (!isa<MetadataAsValue>(Op)) {
1123 FindType(Op->getType());
1124 }
1125 }
1126
1127 FindType(I.getType());
1128 continue;
1129 }
1130
David Neto862b7d82018-06-14 18:48:37 -04001131 CallInst *Call = dyn_cast<CallInst>(&I);
1132
1133 if (Call && Call->getCalledFunction()->getName().startswith(
Alan Baker202c8c72018-08-13 13:47:44 -04001134 clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04001135 // This is a fake call representing access to a resource variable.
1136 // We handle that elsewhere.
1137 continue;
1138 }
1139
Alan Baker202c8c72018-08-13 13:47:44 -04001140 if (Call && Call->getCalledFunction()->getName().startswith(
1141 clspv::WorkgroupAccessorFunction())) {
1142 // This is a fake call representing access to a workgroup variable.
1143 // We handle that elsewhere.
1144 continue;
1145 }
1146
David Neto22f144c2017-06-12 14:26:21 -04001147 // Work through the operands of the instruction.
1148 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1149 Value *const Op = I.getOperand(i);
1150 // If any of the operands is a constant, find the type!
1151 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1152 FindType(Op->getType());
1153 }
1154 }
1155
1156 for (Use &Op : I.operands()) {
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001157 if (isa<CallInst>(&I)) {
David Neto22f144c2017-06-12 14:26:21 -04001158 // Avoid to check call instruction's type.
1159 break;
1160 }
Alan Baker202c8c72018-08-13 13:47:44 -04001161 if (CallInst *OpCall = dyn_cast<CallInst>(Op)) {
1162 if (OpCall && OpCall->getCalledFunction()->getName().startswith(
1163 clspv::WorkgroupAccessorFunction())) {
1164 // This is a fake call representing access to a workgroup variable.
1165 // We handle that elsewhere.
1166 continue;
1167 }
1168 }
David Neto22f144c2017-06-12 14:26:21 -04001169 if (!isa<MetadataAsValue>(&Op)) {
1170 FindType(Op->getType());
1171 continue;
1172 }
1173 }
1174
David Neto22f144c2017-06-12 14:26:21 -04001175 // We don't want to track the type of this call as we are going to replace
1176 // it.
Kévin Petitdf71de32019-04-09 14:09:50 +01001177 if (Call && (clspv::LiteralSamplerFunction() ==
David Neto22f144c2017-06-12 14:26:21 -04001178 Call->getCalledFunction()->getName())) {
1179 continue;
1180 }
1181
1182 if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(&I)) {
1183 // If gep's base operand has ModuleScopePrivate address space, make gep
1184 // return ModuleScopePrivate address space.
1185 if (GEP->getPointerAddressSpace() == AddressSpace::ModuleScopePrivate) {
1186 // Add pointer type with private address space for global constant to
1187 // type list.
1188 Type *EleTy = I.getType()->getPointerElementType();
1189 Type *NewPTy =
1190 PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
1191
1192 FindType(NewPTy);
1193 continue;
1194 }
1195 }
1196
1197 FindType(I.getType());
1198 }
1199 }
1200}
1201
David Neto862b7d82018-06-14 18:48:37 -04001202void SPIRVProducerPass::FindTypesForSamplerMap(Module &M) {
1203 // If we are using a sampler map, find the type of the sampler.
Kévin Petitdf71de32019-04-09 14:09:50 +01001204 if (M.getFunction(clspv::LiteralSamplerFunction()) ||
David Neto862b7d82018-06-14 18:48:37 -04001205 0 < getSamplerMap().size()) {
1206 auto SamplerStructTy = M.getTypeByName("opencl.sampler_t");
1207 if (!SamplerStructTy) {
1208 SamplerStructTy = StructType::create(M.getContext(), "opencl.sampler_t");
1209 }
1210
1211 SamplerTy = SamplerStructTy->getPointerTo(AddressSpace::UniformConstant);
1212
1213 FindType(SamplerTy);
1214 }
1215}
1216
1217void SPIRVProducerPass::FindTypesForResourceVars(Module &M) {
1218 // Record types so they are generated.
1219 TypesNeedingLayout.reset();
1220 StructTypesNeedingBlock.reset();
1221
1222 // To match older clspv codegen, generate the float type first if required
1223 // for images.
1224 for (const auto *info : ModuleOrderedResourceVars) {
1225 if (info->arg_kind == clspv::ArgKind::ReadOnlyImage ||
1226 info->arg_kind == clspv::ArgKind::WriteOnlyImage) {
1227 // We need "float" for the sampled component type.
1228 FindType(Type::getFloatTy(M.getContext()));
1229 // We only need to find it once.
1230 break;
1231 }
1232 }
1233
1234 for (const auto *info : ModuleOrderedResourceVars) {
1235 Type *type = info->var_fn->getReturnType();
1236
1237 switch (info->arg_kind) {
1238 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04001239 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04001240 if (auto *sty = dyn_cast<StructType>(type->getPointerElementType())) {
1241 StructTypesNeedingBlock.insert(sty);
1242 } else {
1243 errs() << *type << "\n";
1244 llvm_unreachable("Buffer arguments must map to structures!");
1245 }
1246 break;
1247 case clspv::ArgKind::Pod:
1248 if (auto *sty = dyn_cast<StructType>(type->getPointerElementType())) {
1249 StructTypesNeedingBlock.insert(sty);
1250 } else {
1251 errs() << *type << "\n";
1252 llvm_unreachable("POD arguments must map to structures!");
1253 }
1254 break;
1255 case clspv::ArgKind::ReadOnlyImage:
1256 case clspv::ArgKind::WriteOnlyImage:
1257 case clspv::ArgKind::Sampler:
1258 // Sampler and image types map to the pointee type but
1259 // in the uniform constant address space.
1260 type = PointerType::get(type->getPointerElementType(),
1261 clspv::AddressSpace::UniformConstant);
1262 break;
1263 default:
1264 break;
1265 }
1266
1267 // The converted type is the type of the OpVariable we will generate.
1268 // If the pointee type is an array of size zero, FindType will convert it
1269 // to a runtime array.
1270 FindType(type);
1271 }
1272
alan-bakerdcd97412019-09-16 15:32:30 -04001273 // If module constants are clustered in a storage buffer then that struct
1274 // needs layout decorations.
1275 if (clspv::Option::ModuleConstantsInStorageBuffer()) {
1276 for (GlobalVariable &GV : M.globals()) {
1277 PointerType *PTy = cast<PointerType>(GV.getType());
1278 const auto AS = PTy->getAddressSpace();
1279 const bool module_scope_constant_external_init =
1280 (AS == AddressSpace::Constant) && GV.hasInitializer();
1281 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
1282 if (module_scope_constant_external_init &&
1283 spv::BuiltInMax == BuiltinType) {
1284 StructTypesNeedingBlock.insert(
1285 cast<StructType>(PTy->getPointerElementType()));
1286 }
1287 }
1288 }
1289
David Neto862b7d82018-06-14 18:48:37 -04001290 // Traverse the arrays and structures underneath each Block, and
1291 // mark them as needing layout.
1292 std::vector<Type *> work_list(StructTypesNeedingBlock.begin(),
1293 StructTypesNeedingBlock.end());
1294 while (!work_list.empty()) {
1295 Type *type = work_list.back();
1296 work_list.pop_back();
1297 TypesNeedingLayout.insert(type);
1298 switch (type->getTypeID()) {
1299 case Type::ArrayTyID:
1300 work_list.push_back(type->getArrayElementType());
1301 if (!Hack_generate_runtime_array_stride_early) {
1302 // Remember this array type for deferred decoration.
1303 TypesNeedingArrayStride.insert(type);
1304 }
1305 break;
1306 case Type::StructTyID:
1307 for (auto *elem_ty : cast<StructType>(type)->elements()) {
1308 work_list.push_back(elem_ty);
1309 }
1310 default:
1311 // This type and its contained types don't get layout.
1312 break;
1313 }
1314 }
1315}
1316
Alan Baker202c8c72018-08-13 13:47:44 -04001317void SPIRVProducerPass::FindWorkgroupVars(Module &M) {
1318 // The SpecId assignment for pointer-to-local arguments is recorded in
1319 // module-level metadata. Translate that information into local argument
1320 // information.
1321 NamedMDNode *nmd = M.getNamedMetadata(clspv::LocalSpecIdMetadataName());
alan-bakerb6b09dc2018-11-08 16:59:28 -05001322 if (!nmd)
1323 return;
Alan Baker202c8c72018-08-13 13:47:44 -04001324 for (auto operand : nmd->operands()) {
1325 MDTuple *tuple = cast<MDTuple>(operand);
1326 ValueAsMetadata *fn_md = cast<ValueAsMetadata>(tuple->getOperand(0));
1327 Function *func = cast<Function>(fn_md->getValue());
alan-bakerb6b09dc2018-11-08 16:59:28 -05001328 ConstantAsMetadata *arg_index_md =
1329 cast<ConstantAsMetadata>(tuple->getOperand(1));
1330 int arg_index = static_cast<int>(
1331 cast<ConstantInt>(arg_index_md->getValue())->getSExtValue());
1332 Argument *arg = &*(func->arg_begin() + arg_index);
Alan Baker202c8c72018-08-13 13:47:44 -04001333
1334 ConstantAsMetadata *spec_id_md =
1335 cast<ConstantAsMetadata>(tuple->getOperand(2));
alan-bakerb6b09dc2018-11-08 16:59:28 -05001336 int spec_id = static_cast<int>(
1337 cast<ConstantInt>(spec_id_md->getValue())->getSExtValue());
Alan Baker202c8c72018-08-13 13:47:44 -04001338
1339 max_local_spec_id_ = std::max(max_local_spec_id_, spec_id + 1);
1340 LocalArgSpecIds[arg] = spec_id;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001341 if (LocalSpecIdInfoMap.count(spec_id))
1342 continue;
Alan Baker202c8c72018-08-13 13:47:44 -04001343
1344 // We haven't seen this SpecId yet, so generate the LocalArgInfo for it.
1345 LocalArgInfo info{nextID, arg->getType()->getPointerElementType(),
1346 nextID + 1, nextID + 2,
1347 nextID + 3, spec_id};
1348 LocalSpecIdInfoMap[spec_id] = info;
1349 nextID += 4;
1350
1351 // Ensure the types necessary for this argument get generated.
1352 Type *IdxTy = Type::getInt32Ty(M.getContext());
1353 FindConstant(ConstantInt::get(IdxTy, 0));
1354 FindType(IdxTy);
1355 FindType(arg->getType());
1356 }
1357}
1358
David Neto22f144c2017-06-12 14:26:21 -04001359void SPIRVProducerPass::FindType(Type *Ty) {
1360 TypeList &TyList = getTypeList();
1361
1362 if (0 != TyList.idFor(Ty)) {
1363 return;
1364 }
1365
1366 if (Ty->isPointerTy()) {
1367 auto AddrSpace = Ty->getPointerAddressSpace();
1368 if ((AddressSpace::Constant == AddrSpace) ||
1369 (AddressSpace::Global == AddrSpace)) {
1370 auto PointeeTy = Ty->getPointerElementType();
1371
1372 if (PointeeTy->isStructTy() &&
1373 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
1374 FindType(PointeeTy);
1375 auto ActualPointerTy =
1376 PointeeTy->getPointerTo(AddressSpace::UniformConstant);
1377 FindType(ActualPointerTy);
1378 return;
1379 }
1380 }
1381 }
1382
David Neto862b7d82018-06-14 18:48:37 -04001383 // By convention, LLVM array type with 0 elements will map to
1384 // OpTypeRuntimeArray. Otherwise, it will map to OpTypeArray, which
1385 // has a constant number of elements. We need to support type of the
1386 // constant.
1387 if (auto *arrayTy = dyn_cast<ArrayType>(Ty)) {
1388 if (arrayTy->getNumElements() > 0) {
1389 LLVMContext &Context = Ty->getContext();
1390 FindType(Type::getInt32Ty(Context));
1391 }
David Neto22f144c2017-06-12 14:26:21 -04001392 }
1393
1394 for (Type *SubTy : Ty->subtypes()) {
1395 FindType(SubTy);
1396 }
1397
1398 TyList.insert(Ty);
1399}
1400
1401void SPIRVProducerPass::FindConstantPerGlobalVar(GlobalVariable &GV) {
1402 // If the global variable has a (non undef) initializer.
1403 if (GV.hasInitializer() && !isa<UndefValue>(GV.getInitializer())) {
David Neto862b7d82018-06-14 18:48:37 -04001404 // Generate the constant if it's not the initializer to a module scope
1405 // constant that we will expect in a storage buffer.
1406 const bool module_scope_constant_external_init =
1407 (GV.getType()->getPointerAddressSpace() == AddressSpace::Constant) &&
1408 clspv::Option::ModuleConstantsInStorageBuffer();
1409 if (!module_scope_constant_external_init) {
1410 FindConstant(GV.getInitializer());
1411 }
David Neto22f144c2017-06-12 14:26:21 -04001412 }
1413}
1414
1415void SPIRVProducerPass::FindConstantPerFunc(Function &F) {
1416 // Investigate constants in function body.
1417 for (BasicBlock &BB : F) {
1418 for (Instruction &I : BB) {
David Neto862b7d82018-06-14 18:48:37 -04001419 if (auto *call = dyn_cast<CallInst>(&I)) {
1420 auto name = call->getCalledFunction()->getName();
Kévin Petitdf71de32019-04-09 14:09:50 +01001421 if (name == clspv::LiteralSamplerFunction()) {
David Neto862b7d82018-06-14 18:48:37 -04001422 // We've handled these constants elsewhere, so skip it.
1423 continue;
1424 }
Alan Baker202c8c72018-08-13 13:47:44 -04001425 if (name.startswith(clspv::ResourceAccessorFunction())) {
1426 continue;
1427 }
1428 if (name.startswith(clspv::WorkgroupAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04001429 continue;
1430 }
Kévin Petit617a76d2019-04-04 13:54:16 +01001431 if (name.startswith(clspv::SPIRVOpIntrinsicFunction())) {
1432 // Skip the first operand that has the SPIR-V Opcode
1433 for (unsigned i = 1; i < I.getNumOperands(); i++) {
1434 if (isa<Constant>(I.getOperand(i)) &&
1435 !isa<GlobalValue>(I.getOperand(i))) {
1436 FindConstant(I.getOperand(i));
1437 }
1438 }
1439 continue;
1440 }
David Neto22f144c2017-06-12 14:26:21 -04001441 }
1442
1443 if (isa<AllocaInst>(I)) {
1444 // Alloca instruction has constant for the number of element. Ignore it.
1445 continue;
1446 } else if (isa<ShuffleVectorInst>(I)) {
1447 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1448 // Ignore constant for mask of shuffle vector instruction.
1449 if (i == 2) {
1450 continue;
1451 }
1452
1453 if (isa<Constant>(I.getOperand(i)) &&
1454 !isa<GlobalValue>(I.getOperand(i))) {
1455 FindConstant(I.getOperand(i));
1456 }
1457 }
1458
1459 continue;
1460 } else if (isa<InsertElementInst>(I)) {
1461 // Handle InsertElement with <4 x i8> specially.
1462 Type *CompositeTy = I.getOperand(0)->getType();
1463 if (is4xi8vec(CompositeTy)) {
1464 LLVMContext &Context = CompositeTy->getContext();
1465 if (isa<Constant>(I.getOperand(0))) {
1466 FindConstant(I.getOperand(0));
1467 }
1468
1469 if (isa<Constant>(I.getOperand(1))) {
1470 FindConstant(I.getOperand(1));
1471 }
1472
1473 // Add mask constant 0xFF.
1474 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
1475 FindConstant(CstFF);
1476
1477 // Add shift amount constant.
1478 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
1479 uint64_t Idx = CI->getZExtValue();
1480 Constant *CstShiftAmount =
1481 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
1482 FindConstant(CstShiftAmount);
1483 }
1484
1485 continue;
1486 }
1487
1488 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1489 // Ignore constant for index of InsertElement instruction.
1490 if (i == 2) {
1491 continue;
1492 }
1493
1494 if (isa<Constant>(I.getOperand(i)) &&
1495 !isa<GlobalValue>(I.getOperand(i))) {
1496 FindConstant(I.getOperand(i));
1497 }
1498 }
1499
1500 continue;
1501 } else if (isa<ExtractElementInst>(I)) {
1502 // Handle ExtractElement with <4 x i8> specially.
1503 Type *CompositeTy = I.getOperand(0)->getType();
1504 if (is4xi8vec(CompositeTy)) {
1505 LLVMContext &Context = CompositeTy->getContext();
1506 if (isa<Constant>(I.getOperand(0))) {
1507 FindConstant(I.getOperand(0));
1508 }
1509
1510 // Add mask constant 0xFF.
1511 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
1512 FindConstant(CstFF);
1513
1514 // Add shift amount constant.
1515 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
1516 uint64_t Idx = CI->getZExtValue();
1517 Constant *CstShiftAmount =
1518 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
1519 FindConstant(CstShiftAmount);
1520 } else {
1521 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
1522 FindConstant(Cst8);
1523 }
1524
1525 continue;
1526 }
1527
1528 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1529 // Ignore constant for index of ExtractElement instruction.
1530 if (i == 1) {
1531 continue;
1532 }
1533
1534 if (isa<Constant>(I.getOperand(i)) &&
1535 !isa<GlobalValue>(I.getOperand(i))) {
1536 FindConstant(I.getOperand(i));
1537 }
1538 }
1539
1540 continue;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001541 } else if ((Instruction::Xor == I.getOpcode()) &&
1542 I.getType()->isIntegerTy(1)) {
1543 // We special case for Xor where the type is i1 and one of the arguments
1544 // is a constant 1 (true), this is an OpLogicalNot in SPIR-V, and we
1545 // don't need the constant
David Neto22f144c2017-06-12 14:26:21 -04001546 bool foundConstantTrue = false;
1547 for (Use &Op : I.operands()) {
1548 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1549 auto CI = cast<ConstantInt>(Op);
1550
1551 if (CI->isZero() || foundConstantTrue) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05001552 // If we already found the true constant, we might (probably only
1553 // on -O0) have an OpLogicalNot which is taking a constant
1554 // argument, so discover it anyway.
David Neto22f144c2017-06-12 14:26:21 -04001555 FindConstant(Op);
1556 } else {
1557 foundConstantTrue = true;
1558 }
1559 }
1560 }
1561
1562 continue;
David Netod2de94a2017-08-28 17:27:47 -04001563 } else if (isa<TruncInst>(I)) {
alan-bakerb39c8262019-03-08 14:03:37 -05001564 // Special case if i8 is not generally handled.
1565 if (!clspv::Option::Int8Support()) {
1566 // For truncation to i8 we mask against 255.
1567 Type *ToTy = I.getType();
1568 if (8u == ToTy->getPrimitiveSizeInBits()) {
1569 LLVMContext &Context = ToTy->getContext();
1570 Constant *Cst255 =
1571 ConstantInt::get(Type::getInt32Ty(Context), 0xff);
1572 FindConstant(Cst255);
1573 }
David Netod2de94a2017-08-28 17:27:47 -04001574 }
Neil Henning39672102017-09-29 14:33:13 +01001575 } else if (isa<AtomicRMWInst>(I)) {
1576 LLVMContext &Context = I.getContext();
1577
1578 FindConstant(
1579 ConstantInt::get(Type::getInt32Ty(Context), spv::ScopeDevice));
1580 FindConstant(ConstantInt::get(
1581 Type::getInt32Ty(Context),
1582 spv::MemorySemanticsUniformMemoryMask |
1583 spv::MemorySemanticsSequentiallyConsistentMask));
David Neto22f144c2017-06-12 14:26:21 -04001584 }
1585
1586 for (Use &Op : I.operands()) {
1587 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1588 FindConstant(Op);
1589 }
1590 }
1591 }
1592 }
1593}
1594
1595void SPIRVProducerPass::FindConstant(Value *V) {
David Neto22f144c2017-06-12 14:26:21 -04001596 ValueList &CstList = getConstantList();
1597
David Netofb9a7972017-08-25 17:08:24 -04001598 // If V is already tracked, ignore it.
1599 if (0 != CstList.idFor(V)) {
David Neto22f144c2017-06-12 14:26:21 -04001600 return;
1601 }
1602
David Neto862b7d82018-06-14 18:48:37 -04001603 if (isa<GlobalValue>(V) && clspv::Option::ModuleConstantsInStorageBuffer()) {
1604 return;
1605 }
1606
David Neto22f144c2017-06-12 14:26:21 -04001607 Constant *Cst = cast<Constant>(V);
David Neto862b7d82018-06-14 18:48:37 -04001608 Type *CstTy = Cst->getType();
David Neto22f144c2017-06-12 14:26:21 -04001609
1610 // Handle constant with <4 x i8> type specially.
David Neto22f144c2017-06-12 14:26:21 -04001611 if (is4xi8vec(CstTy)) {
1612 if (!isa<GlobalValue>(V)) {
David Netofb9a7972017-08-25 17:08:24 -04001613 CstList.insert(V);
David Neto22f144c2017-06-12 14:26:21 -04001614 }
1615 }
1616
1617 if (Cst->getNumOperands()) {
1618 for (User::const_op_iterator I = Cst->op_begin(), E = Cst->op_end(); I != E;
1619 ++I) {
1620 FindConstant(*I);
1621 }
1622
David Netofb9a7972017-08-25 17:08:24 -04001623 CstList.insert(Cst);
David Neto22f144c2017-06-12 14:26:21 -04001624 return;
1625 } else if (const ConstantDataSequential *CDS =
1626 dyn_cast<ConstantDataSequential>(Cst)) {
1627 // Add constants for each element to constant list.
1628 for (unsigned i = 0; i < CDS->getNumElements(); i++) {
1629 Constant *EleCst = CDS->getElementAsConstant(i);
1630 FindConstant(EleCst);
1631 }
1632 }
1633
1634 if (!isa<GlobalValue>(V)) {
David Netofb9a7972017-08-25 17:08:24 -04001635 CstList.insert(V);
David Neto22f144c2017-06-12 14:26:21 -04001636 }
1637}
1638
1639spv::StorageClass SPIRVProducerPass::GetStorageClass(unsigned AddrSpace) const {
1640 switch (AddrSpace) {
1641 default:
1642 llvm_unreachable("Unsupported OpenCL address space");
1643 case AddressSpace::Private:
1644 return spv::StorageClassFunction;
1645 case AddressSpace::Global:
David Neto22f144c2017-06-12 14:26:21 -04001646 return spv::StorageClassStorageBuffer;
Alan Bakerfcda9482018-10-02 17:09:59 -04001647 case AddressSpace::Constant:
1648 return clspv::Option::ConstantArgsInUniformBuffer()
1649 ? spv::StorageClassUniform
1650 : spv::StorageClassStorageBuffer;
David Neto22f144c2017-06-12 14:26:21 -04001651 case AddressSpace::Input:
1652 return spv::StorageClassInput;
1653 case AddressSpace::Local:
1654 return spv::StorageClassWorkgroup;
1655 case AddressSpace::UniformConstant:
1656 return spv::StorageClassUniformConstant;
David Neto9ed8e2f2018-03-24 06:47:24 -07001657 case AddressSpace::Uniform:
David Netoe439d702018-03-23 13:14:08 -07001658 return spv::StorageClassUniform;
David Neto22f144c2017-06-12 14:26:21 -04001659 case AddressSpace::ModuleScopePrivate:
1660 return spv::StorageClassPrivate;
1661 }
1662}
1663
David Neto862b7d82018-06-14 18:48:37 -04001664spv::StorageClass
1665SPIRVProducerPass::GetStorageClassForArgKind(clspv::ArgKind arg_kind) const {
1666 switch (arg_kind) {
1667 case clspv::ArgKind::Buffer:
1668 return spv::StorageClassStorageBuffer;
Alan Bakerfcda9482018-10-02 17:09:59 -04001669 case clspv::ArgKind::BufferUBO:
1670 return spv::StorageClassUniform;
David Neto862b7d82018-06-14 18:48:37 -04001671 case clspv::ArgKind::Pod:
1672 return clspv::Option::PodArgsInUniformBuffer()
1673 ? spv::StorageClassUniform
1674 : spv::StorageClassStorageBuffer;
1675 case clspv::ArgKind::Local:
1676 return spv::StorageClassWorkgroup;
1677 case clspv::ArgKind::ReadOnlyImage:
1678 case clspv::ArgKind::WriteOnlyImage:
1679 case clspv::ArgKind::Sampler:
1680 return spv::StorageClassUniformConstant;
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001681 default:
1682 llvm_unreachable("Unsupported storage class for argument kind");
David Neto862b7d82018-06-14 18:48:37 -04001683 }
1684}
1685
David Neto22f144c2017-06-12 14:26:21 -04001686spv::BuiltIn SPIRVProducerPass::GetBuiltin(StringRef Name) const {
1687 return StringSwitch<spv::BuiltIn>(Name)
1688 .Case("__spirv_GlobalInvocationId", spv::BuiltInGlobalInvocationId)
1689 .Case("__spirv_LocalInvocationId", spv::BuiltInLocalInvocationId)
1690 .Case("__spirv_WorkgroupSize", spv::BuiltInWorkgroupSize)
1691 .Case("__spirv_NumWorkgroups", spv::BuiltInNumWorkgroups)
1692 .Case("__spirv_WorkgroupId", spv::BuiltInWorkgroupId)
1693 .Default(spv::BuiltInMax);
1694}
1695
1696void SPIRVProducerPass::GenerateExtInstImport() {
1697 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
1698 uint32_t &ExtInstImportID = getOpExtInstImportID();
1699
1700 //
1701 // Generate OpExtInstImport.
1702 //
1703 // Ops[0] ... Ops[n] = Name (Literal String)
David Neto22f144c2017-06-12 14:26:21 -04001704 ExtInstImportID = nextID;
David Neto87846742018-04-11 17:36:22 -04001705 SPIRVInstList.push_back(new SPIRVInstruction(spv::OpExtInstImport, nextID++,
1706 MkString("GLSL.std.450")));
David Neto22f144c2017-06-12 14:26:21 -04001707}
1708
alan-bakerb6b09dc2018-11-08 16:59:28 -05001709void SPIRVProducerPass::GenerateSPIRVTypes(LLVMContext &Context,
1710 Module &module) {
David Neto22f144c2017-06-12 14:26:21 -04001711 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
1712 ValueMapType &VMap = getValueMap();
1713 ValueMapType &AllocatedVMap = getAllocatedValueMap();
Alan Bakerfcda9482018-10-02 17:09:59 -04001714 const auto &DL = module.getDataLayout();
David Neto22f144c2017-06-12 14:26:21 -04001715
1716 // Map for OpTypeRuntimeArray. If argument has pointer type, 2 spirv type
1717 // instructions are generated. They are OpTypePointer and OpTypeRuntimeArray.
1718 DenseMap<Type *, uint32_t> OpRuntimeTyMap;
1719
1720 for (Type *Ty : getTypeList()) {
1721 // Update TypeMap with nextID for reference later.
1722 TypeMap[Ty] = nextID;
1723
1724 switch (Ty->getTypeID()) {
1725 default: {
1726 Ty->print(errs());
1727 llvm_unreachable("Unsupported type???");
1728 break;
1729 }
1730 case Type::MetadataTyID:
1731 case Type::LabelTyID: {
1732 // Ignore these types.
1733 break;
1734 }
1735 case Type::PointerTyID: {
1736 PointerType *PTy = cast<PointerType>(Ty);
1737 unsigned AddrSpace = PTy->getAddressSpace();
1738
1739 // For the purposes of our Vulkan SPIR-V type system, constant and global
1740 // are conflated.
1741 bool UseExistingOpTypePointer = false;
1742 if (AddressSpace::Constant == AddrSpace) {
Alan Bakerfcda9482018-10-02 17:09:59 -04001743 if (!clspv::Option::ConstantArgsInUniformBuffer()) {
1744 AddrSpace = AddressSpace::Global;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001745 // Check to see if we already created this type (for instance, if we
1746 // had a constant <type>* and a global <type>*, the type would be
1747 // created by one of these types, and shared by both).
Alan Bakerfcda9482018-10-02 17:09:59 -04001748 auto GlobalTy = PTy->getPointerElementType()->getPointerTo(AddrSpace);
1749 if (0 < TypeMap.count(GlobalTy)) {
1750 TypeMap[PTy] = TypeMap[GlobalTy];
1751 UseExistingOpTypePointer = true;
1752 break;
1753 }
David Neto22f144c2017-06-12 14:26:21 -04001754 }
1755 } else if (AddressSpace::Global == AddrSpace) {
Alan Bakerfcda9482018-10-02 17:09:59 -04001756 if (!clspv::Option::ConstantArgsInUniformBuffer()) {
1757 AddrSpace = AddressSpace::Constant;
David Neto22f144c2017-06-12 14:26:21 -04001758
alan-bakerb6b09dc2018-11-08 16:59:28 -05001759 // Check to see if we already created this type (for instance, if we
1760 // had a constant <type>* and a global <type>*, the type would be
1761 // created by one of these types, and shared by both).
1762 auto ConstantTy =
1763 PTy->getPointerElementType()->getPointerTo(AddrSpace);
Alan Bakerfcda9482018-10-02 17:09:59 -04001764 if (0 < TypeMap.count(ConstantTy)) {
1765 TypeMap[PTy] = TypeMap[ConstantTy];
1766 UseExistingOpTypePointer = true;
1767 }
David Neto22f144c2017-06-12 14:26:21 -04001768 }
1769 }
1770
David Neto862b7d82018-06-14 18:48:37 -04001771 const bool HasArgUser = true;
David Neto22f144c2017-06-12 14:26:21 -04001772
David Neto862b7d82018-06-14 18:48:37 -04001773 if (HasArgUser && !UseExistingOpTypePointer) {
David Neto22f144c2017-06-12 14:26:21 -04001774 //
1775 // Generate OpTypePointer.
1776 //
1777
1778 // OpTypePointer
1779 // Ops[0] = Storage Class
1780 // Ops[1] = Element Type ID
1781 SPIRVOperandList Ops;
1782
David Neto257c3892018-04-11 13:19:45 -04001783 Ops << MkNum(GetStorageClass(AddrSpace))
1784 << MkId(lookupType(PTy->getElementType()));
David Neto22f144c2017-06-12 14:26:21 -04001785
David Neto87846742018-04-11 17:36:22 -04001786 auto *Inst = new SPIRVInstruction(spv::OpTypePointer, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04001787 SPIRVInstList.push_back(Inst);
1788 }
David Neto22f144c2017-06-12 14:26:21 -04001789 break;
1790 }
1791 case Type::StructTyID: {
David Neto22f144c2017-06-12 14:26:21 -04001792 StructType *STy = cast<StructType>(Ty);
1793
1794 // Handle sampler type.
1795 if (STy->isOpaque()) {
1796 if (STy->getName().equals("opencl.sampler_t")) {
1797 //
1798 // Generate OpTypeSampler
1799 //
1800 // Empty Ops.
1801 SPIRVOperandList Ops;
1802
David Neto87846742018-04-11 17:36:22 -04001803 auto *Inst = new SPIRVInstruction(spv::OpTypeSampler, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04001804 SPIRVInstList.push_back(Inst);
1805 break;
1806 } else if (STy->getName().equals("opencl.image2d_ro_t") ||
1807 STy->getName().equals("opencl.image2d_wo_t") ||
1808 STy->getName().equals("opencl.image3d_ro_t") ||
1809 STy->getName().equals("opencl.image3d_wo_t")) {
1810 //
1811 // Generate OpTypeImage
1812 //
1813 // Ops[0] = Sampled Type ID
1814 // Ops[1] = Dim ID
1815 // Ops[2] = Depth (Literal Number)
1816 // Ops[3] = Arrayed (Literal Number)
1817 // Ops[4] = MS (Literal Number)
1818 // Ops[5] = Sampled (Literal Number)
1819 // Ops[6] = Image Format ID
1820 //
1821 SPIRVOperandList Ops;
1822
1823 // TODO: Changed Sampled Type according to situations.
1824 uint32_t SampledTyID = lookupType(Type::getFloatTy(Context));
David Neto257c3892018-04-11 13:19:45 -04001825 Ops << MkId(SampledTyID);
David Neto22f144c2017-06-12 14:26:21 -04001826
1827 spv::Dim DimID = spv::Dim2D;
1828 if (STy->getName().equals("opencl.image3d_ro_t") ||
1829 STy->getName().equals("opencl.image3d_wo_t")) {
1830 DimID = spv::Dim3D;
1831 }
David Neto257c3892018-04-11 13:19:45 -04001832 Ops << MkNum(DimID);
David Neto22f144c2017-06-12 14:26:21 -04001833
1834 // TODO: Set up Depth.
David Neto257c3892018-04-11 13:19:45 -04001835 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04001836
1837 // TODO: Set up Arrayed.
David Neto257c3892018-04-11 13:19:45 -04001838 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04001839
1840 // TODO: Set up MS.
David Neto257c3892018-04-11 13:19:45 -04001841 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04001842
1843 // TODO: Set up Sampled.
1844 //
1845 // From Spec
1846 //
1847 // 0 indicates this is only known at run time, not at compile time
1848 // 1 indicates will be used with sampler
1849 // 2 indicates will be used without a sampler (a storage image)
1850 uint32_t Sampled = 1;
1851 if (STy->getName().equals("opencl.image2d_wo_t") ||
1852 STy->getName().equals("opencl.image3d_wo_t")) {
1853 Sampled = 2;
1854 }
David Neto257c3892018-04-11 13:19:45 -04001855 Ops << MkNum(Sampled);
David Neto22f144c2017-06-12 14:26:21 -04001856
1857 // TODO: Set up Image Format.
David Neto257c3892018-04-11 13:19:45 -04001858 Ops << MkNum(spv::ImageFormatUnknown);
David Neto22f144c2017-06-12 14:26:21 -04001859
David Neto87846742018-04-11 17:36:22 -04001860 auto *Inst = new SPIRVInstruction(spv::OpTypeImage, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04001861 SPIRVInstList.push_back(Inst);
1862 break;
1863 }
1864 }
1865
1866 //
1867 // Generate OpTypeStruct
1868 //
1869 // Ops[0] ... Ops[n] = Member IDs
1870 SPIRVOperandList Ops;
1871
1872 for (auto *EleTy : STy->elements()) {
David Neto862b7d82018-06-14 18:48:37 -04001873 Ops << MkId(lookupType(EleTy));
David Neto22f144c2017-06-12 14:26:21 -04001874 }
1875
David Neto22f144c2017-06-12 14:26:21 -04001876 uint32_t STyID = nextID;
1877
alan-bakerb6b09dc2018-11-08 16:59:28 -05001878 auto *Inst = new SPIRVInstruction(spv::OpTypeStruct, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04001879 SPIRVInstList.push_back(Inst);
1880
1881 // Generate OpMemberDecorate.
1882 auto DecoInsertPoint =
1883 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
1884 [](SPIRVInstruction *Inst) -> bool {
1885 return Inst->getOpcode() != spv::OpDecorate &&
1886 Inst->getOpcode() != spv::OpMemberDecorate &&
1887 Inst->getOpcode() != spv::OpExtInstImport;
1888 });
1889
David Netoc463b372017-08-10 15:32:21 -04001890 const auto StructLayout = DL.getStructLayout(STy);
Alan Bakerfcda9482018-10-02 17:09:59 -04001891 // Search for the correct offsets if this type was remapped.
1892 std::vector<uint32_t> *offsets = nullptr;
1893 auto iter = RemappedUBOTypeOffsets.find(STy);
1894 if (iter != RemappedUBOTypeOffsets.end()) {
1895 offsets = &iter->second;
1896 }
David Netoc463b372017-08-10 15:32:21 -04001897
David Neto862b7d82018-06-14 18:48:37 -04001898 // #error TODO(dneto): Only do this if in TypesNeedingLayout.
David Neto22f144c2017-06-12 14:26:21 -04001899 for (unsigned MemberIdx = 0; MemberIdx < STy->getNumElements();
1900 MemberIdx++) {
1901 // Ops[0] = Structure Type ID
1902 // Ops[1] = Member Index(Literal Number)
1903 // Ops[2] = Decoration (Offset)
1904 // Ops[3] = Byte Offset (Literal Number)
1905 Ops.clear();
1906
David Neto257c3892018-04-11 13:19:45 -04001907 Ops << MkId(STyID) << MkNum(MemberIdx) << MkNum(spv::DecorationOffset);
David Neto22f144c2017-06-12 14:26:21 -04001908
alan-bakerb6b09dc2018-11-08 16:59:28 -05001909 auto ByteOffset =
1910 static_cast<uint32_t>(StructLayout->getElementOffset(MemberIdx));
Alan Bakerfcda9482018-10-02 17:09:59 -04001911 if (offsets) {
1912 ByteOffset = (*offsets)[MemberIdx];
1913 }
alan-bakerb6b09dc2018-11-08 16:59:28 -05001914 // const auto ByteOffset =
Alan Bakerfcda9482018-10-02 17:09:59 -04001915 // uint32_t(StructLayout->getElementOffset(MemberIdx));
David Neto257c3892018-04-11 13:19:45 -04001916 Ops << MkNum(ByteOffset);
David Neto22f144c2017-06-12 14:26:21 -04001917
David Neto87846742018-04-11 17:36:22 -04001918 auto *DecoInst = new SPIRVInstruction(spv::OpMemberDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04001919 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
David Neto22f144c2017-06-12 14:26:21 -04001920 }
1921
1922 // Generate OpDecorate.
David Neto862b7d82018-06-14 18:48:37 -04001923 if (StructTypesNeedingBlock.idFor(STy)) {
1924 Ops.clear();
1925 // Use Block decorations with StorageBuffer storage class.
1926 Ops << MkId(STyID) << MkNum(spv::DecorationBlock);
David Neto22f144c2017-06-12 14:26:21 -04001927
David Neto862b7d82018-06-14 18:48:37 -04001928 auto *DecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
1929 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
David Neto22f144c2017-06-12 14:26:21 -04001930 }
1931 break;
1932 }
1933 case Type::IntegerTyID: {
alan-baker0e64a592019-11-18 13:36:25 -05001934 uint32_t BitWidth = static_cast<uint32_t>(Ty->getPrimitiveSizeInBits());
David Neto22f144c2017-06-12 14:26:21 -04001935
1936 if (BitWidth == 1) {
David Neto87846742018-04-11 17:36:22 -04001937 auto *Inst = new SPIRVInstruction(spv::OpTypeBool, nextID++, {});
David Neto22f144c2017-06-12 14:26:21 -04001938 SPIRVInstList.push_back(Inst);
1939 } else {
alan-bakerb39c8262019-03-08 14:03:37 -05001940 if (!clspv::Option::Int8Support()) {
1941 // i8 is added to TypeMap as i32.
1942 // No matter what LLVM type is requested first, always alias the
1943 // second one's SPIR-V type to be the same as the one we generated
1944 // first.
1945 unsigned aliasToWidth = 0;
1946 if (BitWidth == 8) {
1947 aliasToWidth = 32;
1948 BitWidth = 32;
1949 } else if (BitWidth == 32) {
1950 aliasToWidth = 8;
1951 }
1952 if (aliasToWidth) {
1953 Type *otherType = Type::getIntNTy(Ty->getContext(), aliasToWidth);
1954 auto where = TypeMap.find(otherType);
1955 if (where == TypeMap.end()) {
1956 // Go ahead and make it, but also map the other type to it.
1957 TypeMap[otherType] = nextID;
1958 } else {
1959 // Alias this SPIR-V type the existing type.
1960 TypeMap[Ty] = where->second;
1961 break;
1962 }
David Neto391aeb12017-08-26 15:51:58 -04001963 }
David Neto22f144c2017-06-12 14:26:21 -04001964 }
1965
David Neto257c3892018-04-11 13:19:45 -04001966 SPIRVOperandList Ops;
1967 Ops << MkNum(BitWidth) << MkNum(0 /* not signed */);
David Neto22f144c2017-06-12 14:26:21 -04001968
1969 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04001970 new SPIRVInstruction(spv::OpTypeInt, nextID++, Ops));
David Neto22f144c2017-06-12 14:26:21 -04001971 }
1972 break;
1973 }
1974 case Type::HalfTyID:
1975 case Type::FloatTyID:
1976 case Type::DoubleTyID: {
alan-baker0e64a592019-11-18 13:36:25 -05001977 uint32_t BitWidth = static_cast<uint32_t>(Ty->getPrimitiveSizeInBits());
1978 SPIRVOperand *WidthOp =
1979 new SPIRVOperand(SPIRVOperandType::LITERAL_INTEGER, BitWidth);
David Neto22f144c2017-06-12 14:26:21 -04001980
1981 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04001982 new SPIRVInstruction(spv::OpTypeFloat, nextID++, WidthOp));
David Neto22f144c2017-06-12 14:26:21 -04001983 break;
1984 }
1985 case Type::ArrayTyID: {
David Neto22f144c2017-06-12 14:26:21 -04001986 ArrayType *ArrTy = cast<ArrayType>(Ty);
David Neto862b7d82018-06-14 18:48:37 -04001987 const uint64_t Length = ArrTy->getArrayNumElements();
1988 if (Length == 0) {
1989 // By convention, map it to a RuntimeArray.
David Neto22f144c2017-06-12 14:26:21 -04001990
David Neto862b7d82018-06-14 18:48:37 -04001991 // Only generate the type once.
1992 // TODO(dneto): Can it ever be generated more than once?
1993 // Doesn't LLVM type uniqueness guarantee we'll only see this
1994 // once?
1995 Type *EleTy = ArrTy->getArrayElementType();
1996 if (OpRuntimeTyMap.count(EleTy) == 0) {
1997 uint32_t OpTypeRuntimeArrayID = nextID;
1998 OpRuntimeTyMap[Ty] = nextID;
David Neto22f144c2017-06-12 14:26:21 -04001999
David Neto862b7d82018-06-14 18:48:37 -04002000 //
2001 // Generate OpTypeRuntimeArray.
2002 //
David Neto22f144c2017-06-12 14:26:21 -04002003
David Neto862b7d82018-06-14 18:48:37 -04002004 // OpTypeRuntimeArray
2005 // Ops[0] = Element Type ID
2006 SPIRVOperandList Ops;
2007 Ops << MkId(lookupType(EleTy));
David Neto22f144c2017-06-12 14:26:21 -04002008
David Neto862b7d82018-06-14 18:48:37 -04002009 SPIRVInstList.push_back(
2010 new SPIRVInstruction(spv::OpTypeRuntimeArray, nextID++, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002011
David Neto862b7d82018-06-14 18:48:37 -04002012 if (Hack_generate_runtime_array_stride_early) {
2013 // Generate OpDecorate.
2014 auto DecoInsertPoint = std::find_if(
2015 SPIRVInstList.begin(), SPIRVInstList.end(),
2016 [](SPIRVInstruction *Inst) -> bool {
2017 return Inst->getOpcode() != spv::OpDecorate &&
2018 Inst->getOpcode() != spv::OpMemberDecorate &&
2019 Inst->getOpcode() != spv::OpExtInstImport;
2020 });
David Neto22f144c2017-06-12 14:26:21 -04002021
David Neto862b7d82018-06-14 18:48:37 -04002022 // Ops[0] = Target ID
2023 // Ops[1] = Decoration (ArrayStride)
2024 // Ops[2] = Stride Number(Literal Number)
2025 Ops.clear();
David Neto85082642018-03-24 06:55:20 -07002026
David Neto862b7d82018-06-14 18:48:37 -04002027 Ops << MkId(OpTypeRuntimeArrayID)
2028 << MkNum(spv::DecorationArrayStride)
Alan Bakerfcda9482018-10-02 17:09:59 -04002029 << MkNum(static_cast<uint32_t>(GetTypeAllocSize(EleTy, DL)));
David Neto22f144c2017-06-12 14:26:21 -04002030
David Neto862b7d82018-06-14 18:48:37 -04002031 auto *DecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
2032 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
2033 }
2034 }
David Neto22f144c2017-06-12 14:26:21 -04002035
David Neto862b7d82018-06-14 18:48:37 -04002036 } else {
David Neto22f144c2017-06-12 14:26:21 -04002037
David Neto862b7d82018-06-14 18:48:37 -04002038 //
2039 // Generate OpConstant and OpTypeArray.
2040 //
2041
2042 //
2043 // Generate OpConstant for array length.
2044 //
2045 // Ops[0] = Result Type ID
2046 // Ops[1] .. Ops[n] = Values LiteralNumber
2047 SPIRVOperandList Ops;
2048
2049 Type *LengthTy = Type::getInt32Ty(Context);
2050 uint32_t ResTyID = lookupType(LengthTy);
2051 Ops << MkId(ResTyID);
2052
2053 assert(Length < UINT32_MAX);
2054 Ops << MkNum(static_cast<uint32_t>(Length));
2055
2056 // Add constant for length to constant list.
2057 Constant *CstLength = ConstantInt::get(LengthTy, Length);
2058 AllocatedVMap[CstLength] = nextID;
2059 VMap[CstLength] = nextID;
2060 uint32_t LengthID = nextID;
2061
2062 auto *CstInst = new SPIRVInstruction(spv::OpConstant, nextID++, Ops);
2063 SPIRVInstList.push_back(CstInst);
2064
2065 // Remember to generate ArrayStride later
2066 getTypesNeedingArrayStride().insert(Ty);
2067
2068 //
2069 // Generate OpTypeArray.
2070 //
2071 // Ops[0] = Element Type ID
2072 // Ops[1] = Array Length Constant ID
2073 Ops.clear();
2074
2075 uint32_t EleTyID = lookupType(ArrTy->getElementType());
2076 Ops << MkId(EleTyID) << MkId(LengthID);
2077
2078 // Update TypeMap with nextID.
2079 TypeMap[Ty] = nextID;
2080
2081 auto *ArrayInst = new SPIRVInstruction(spv::OpTypeArray, nextID++, Ops);
2082 SPIRVInstList.push_back(ArrayInst);
2083 }
David Neto22f144c2017-06-12 14:26:21 -04002084 break;
2085 }
2086 case Type::VectorTyID: {
alan-bakerb39c8262019-03-08 14:03:37 -05002087 // <4 x i8> is changed to i32 if i8 is not generally supported.
2088 if (!clspv::Option::Int8Support() &&
2089 Ty->getVectorElementType() == Type::getInt8Ty(Context)) {
David Neto22f144c2017-06-12 14:26:21 -04002090 if (Ty->getVectorNumElements() == 4) {
2091 TypeMap[Ty] = lookupType(Ty->getVectorElementType());
2092 break;
2093 } else {
2094 Ty->print(errs());
2095 llvm_unreachable("Support above i8 vector type");
2096 }
2097 }
2098
2099 // Ops[0] = Component Type ID
2100 // Ops[1] = Component Count (Literal Number)
David Neto257c3892018-04-11 13:19:45 -04002101 SPIRVOperandList Ops;
2102 Ops << MkId(lookupType(Ty->getVectorElementType()))
2103 << MkNum(Ty->getVectorNumElements());
David Neto22f144c2017-06-12 14:26:21 -04002104
alan-bakerb6b09dc2018-11-08 16:59:28 -05002105 SPIRVInstruction *inst =
2106 new SPIRVInstruction(spv::OpTypeVector, nextID++, Ops);
David Netoc6f3ab22018-04-06 18:02:31 -04002107 SPIRVInstList.push_back(inst);
David Neto22f144c2017-06-12 14:26:21 -04002108 break;
2109 }
2110 case Type::VoidTyID: {
David Neto87846742018-04-11 17:36:22 -04002111 auto *Inst = new SPIRVInstruction(spv::OpTypeVoid, nextID++, {});
David Neto22f144c2017-06-12 14:26:21 -04002112 SPIRVInstList.push_back(Inst);
2113 break;
2114 }
2115 case Type::FunctionTyID: {
2116 // Generate SPIRV instruction for function type.
2117 FunctionType *FTy = cast<FunctionType>(Ty);
2118
2119 // Ops[0] = Return Type ID
2120 // Ops[1] ... Ops[n] = Parameter Type IDs
2121 SPIRVOperandList Ops;
2122
2123 // Find SPIRV instruction for return type
David Netoc6f3ab22018-04-06 18:02:31 -04002124 Ops << MkId(lookupType(FTy->getReturnType()));
David Neto22f144c2017-06-12 14:26:21 -04002125
2126 // Find SPIRV instructions for parameter types
2127 for (unsigned k = 0; k < FTy->getNumParams(); k++) {
2128 // Find SPIRV instruction for parameter type.
2129 auto ParamTy = FTy->getParamType(k);
2130 if (ParamTy->isPointerTy()) {
2131 auto PointeeTy = ParamTy->getPointerElementType();
2132 if (PointeeTy->isStructTy() &&
2133 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
2134 ParamTy = PointeeTy;
2135 }
2136 }
2137
David Netoc6f3ab22018-04-06 18:02:31 -04002138 Ops << MkId(lookupType(ParamTy));
David Neto22f144c2017-06-12 14:26:21 -04002139 }
2140
David Neto87846742018-04-11 17:36:22 -04002141 auto *Inst = new SPIRVInstruction(spv::OpTypeFunction, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002142 SPIRVInstList.push_back(Inst);
2143 break;
2144 }
2145 }
2146 }
2147
2148 // Generate OpTypeSampledImage.
2149 TypeMapType &OpImageTypeMap = getImageTypeMap();
2150 for (auto &ImageType : OpImageTypeMap) {
2151 //
2152 // Generate OpTypeSampledImage.
2153 //
2154 // Ops[0] = Image Type ID
2155 //
2156 SPIRVOperandList Ops;
2157
2158 Type *ImgTy = ImageType.first;
David Netoc6f3ab22018-04-06 18:02:31 -04002159 Ops << MkId(TypeMap[ImgTy]);
David Neto22f144c2017-06-12 14:26:21 -04002160
2161 // Update OpImageTypeMap.
2162 ImageType.second = nextID;
2163
David Neto87846742018-04-11 17:36:22 -04002164 auto *Inst = new SPIRVInstruction(spv::OpTypeSampledImage, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002165 SPIRVInstList.push_back(Inst);
2166 }
David Netoc6f3ab22018-04-06 18:02:31 -04002167
2168 // Generate types for pointer-to-local arguments.
Alan Baker202c8c72018-08-13 13:47:44 -04002169 for (auto spec_id = clspv::FirstLocalSpecId(); spec_id < max_local_spec_id_;
2170 ++spec_id) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05002171 LocalArgInfo &arg_info = LocalSpecIdInfoMap[spec_id];
David Netoc6f3ab22018-04-06 18:02:31 -04002172
2173 // Generate the spec constant.
2174 SPIRVOperandList Ops;
2175 Ops << MkId(lookupType(Type::getInt32Ty(Context))) << MkNum(1);
David Neto87846742018-04-11 17:36:22 -04002176 SPIRVInstList.push_back(
2177 new SPIRVInstruction(spv::OpSpecConstant, arg_info.array_size_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002178
2179 // Generate the array type.
2180 Ops.clear();
2181 // The element type must have been created.
2182 uint32_t elem_ty_id = lookupType(arg_info.elem_type);
2183 assert(elem_ty_id);
2184 Ops << MkId(elem_ty_id) << MkId(arg_info.array_size_id);
2185
2186 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002187 new SPIRVInstruction(spv::OpTypeArray, arg_info.array_type_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002188
2189 Ops.clear();
2190 Ops << MkNum(spv::StorageClassWorkgroup) << MkId(arg_info.array_type_id);
David Neto87846742018-04-11 17:36:22 -04002191 SPIRVInstList.push_back(new SPIRVInstruction(
2192 spv::OpTypePointer, arg_info.ptr_array_type_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002193 }
David Neto22f144c2017-06-12 14:26:21 -04002194}
2195
2196void SPIRVProducerPass::GenerateSPIRVConstants() {
2197 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
2198 ValueMapType &VMap = getValueMap();
2199 ValueMapType &AllocatedVMap = getAllocatedValueMap();
2200 ValueList &CstList = getConstantList();
David Neto482550a2018-03-24 05:21:07 -07002201 const bool hack_undef = clspv::Option::HackUndef();
David Neto22f144c2017-06-12 14:26:21 -04002202
2203 for (uint32_t i = 0; i < CstList.size(); i++) {
David Netofb9a7972017-08-25 17:08:24 -04002204 // UniqueVector ids are 1-based.
alan-bakerb6b09dc2018-11-08 16:59:28 -05002205 Constant *Cst = cast<Constant>(CstList[i + 1]);
David Neto22f144c2017-06-12 14:26:21 -04002206
2207 // OpTypeArray's constant was already generated.
David Netofb9a7972017-08-25 17:08:24 -04002208 if (AllocatedVMap.find_as(Cst) != AllocatedVMap.end()) {
David Neto22f144c2017-06-12 14:26:21 -04002209 continue;
2210 }
2211
David Netofb9a7972017-08-25 17:08:24 -04002212 // Set ValueMap with nextID for reference later.
David Neto22f144c2017-06-12 14:26:21 -04002213 VMap[Cst] = nextID;
2214
2215 //
2216 // Generate OpConstant.
2217 //
2218
2219 // Ops[0] = Result Type ID
2220 // Ops[1] .. Ops[n] = Values LiteralNumber
2221 SPIRVOperandList Ops;
2222
David Neto257c3892018-04-11 13:19:45 -04002223 Ops << MkId(lookupType(Cst->getType()));
David Neto22f144c2017-06-12 14:26:21 -04002224
2225 std::vector<uint32_t> LiteralNum;
David Neto22f144c2017-06-12 14:26:21 -04002226 spv::Op Opcode = spv::OpNop;
2227
2228 if (isa<UndefValue>(Cst)) {
2229 // Ops[0] = Result Type ID
David Netoc66b3352017-10-20 14:28:46 -04002230 Opcode = spv::OpUndef;
Alan Baker9bf93fb2018-08-28 16:59:26 -04002231 if (hack_undef && IsTypeNullable(Cst->getType())) {
2232 Opcode = spv::OpConstantNull;
David Netoc66b3352017-10-20 14:28:46 -04002233 }
David Neto22f144c2017-06-12 14:26:21 -04002234 } else if (const ConstantInt *CI = dyn_cast<ConstantInt>(Cst)) {
2235 unsigned BitWidth = CI->getBitWidth();
2236 if (BitWidth == 1) {
2237 // If the bitwidth of constant is 1, generate OpConstantTrue or
2238 // OpConstantFalse.
2239 if (CI->getZExtValue()) {
2240 // Ops[0] = Result Type ID
2241 Opcode = spv::OpConstantTrue;
2242 } else {
2243 // Ops[0] = Result Type ID
2244 Opcode = spv::OpConstantFalse;
2245 }
David Neto22f144c2017-06-12 14:26:21 -04002246 } else {
2247 auto V = CI->getZExtValue();
2248 LiteralNum.push_back(V & 0xFFFFFFFF);
2249
2250 if (BitWidth > 32) {
2251 LiteralNum.push_back(V >> 32);
2252 }
2253
2254 Opcode = spv::OpConstant;
David Neto22f144c2017-06-12 14:26:21 -04002255
David Neto257c3892018-04-11 13:19:45 -04002256 Ops << MkInteger(LiteralNum);
David Neto22f144c2017-06-12 14:26:21 -04002257 }
2258 } else if (const ConstantFP *CFP = dyn_cast<ConstantFP>(Cst)) {
2259 uint64_t FPVal = CFP->getValueAPF().bitcastToAPInt().getZExtValue();
2260 Type *CFPTy = CFP->getType();
2261 if (CFPTy->isFloatTy()) {
2262 LiteralNum.push_back(FPVal & 0xFFFFFFFF);
Kévin Petit02ee34e2019-04-04 19:03:22 +01002263 } else if (CFPTy->isDoubleTy()) {
2264 LiteralNum.push_back(FPVal & 0xFFFFFFFF);
2265 LiteralNum.push_back(FPVal >> 32);
David Neto22f144c2017-06-12 14:26:21 -04002266 } else {
2267 CFPTy->print(errs());
2268 llvm_unreachable("Implement this ConstantFP Type");
2269 }
2270
2271 Opcode = spv::OpConstant;
David Neto22f144c2017-06-12 14:26:21 -04002272
David Neto257c3892018-04-11 13:19:45 -04002273 Ops << MkFloat(LiteralNum);
David Neto22f144c2017-06-12 14:26:21 -04002274 } else if (isa<ConstantDataSequential>(Cst) &&
2275 cast<ConstantDataSequential>(Cst)->isString()) {
2276 Cst->print(errs());
2277 llvm_unreachable("Implement this Constant");
2278
2279 } else if (const ConstantDataSequential *CDS =
2280 dyn_cast<ConstantDataSequential>(Cst)) {
David Neto49351ac2017-08-26 17:32:20 -04002281 // Let's convert <4 x i8> constant to int constant specially.
2282 // This case occurs when all the values are specified as constant
2283 // ints.
2284 Type *CstTy = Cst->getType();
2285 if (is4xi8vec(CstTy)) {
2286 LLVMContext &Context = CstTy->getContext();
2287
2288 //
2289 // Generate OpConstant with OpTypeInt 32 0.
2290 //
Neil Henning39672102017-09-29 14:33:13 +01002291 uint32_t IntValue = 0;
2292 for (unsigned k = 0; k < 4; k++) {
2293 const uint64_t Val = CDS->getElementAsInteger(k);
David Neto49351ac2017-08-26 17:32:20 -04002294 IntValue = (IntValue << 8) | (Val & 0xffu);
2295 }
2296
2297 Type *i32 = Type::getInt32Ty(Context);
2298 Constant *CstInt = ConstantInt::get(i32, IntValue);
2299 // If this constant is already registered on VMap, use it.
2300 if (VMap.count(CstInt)) {
2301 uint32_t CstID = VMap[CstInt];
2302 VMap[Cst] = CstID;
2303 continue;
2304 }
2305
David Neto257c3892018-04-11 13:19:45 -04002306 Ops << MkNum(IntValue);
David Neto49351ac2017-08-26 17:32:20 -04002307
David Neto87846742018-04-11 17:36:22 -04002308 auto *CstInst = new SPIRVInstruction(spv::OpConstant, nextID++, Ops);
David Neto49351ac2017-08-26 17:32:20 -04002309 SPIRVInstList.push_back(CstInst);
2310
2311 continue;
2312 }
2313
2314 // A normal constant-data-sequential case.
David Neto22f144c2017-06-12 14:26:21 -04002315 for (unsigned k = 0; k < CDS->getNumElements(); k++) {
2316 Constant *EleCst = CDS->getElementAsConstant(k);
2317 uint32_t EleCstID = VMap[EleCst];
David Neto257c3892018-04-11 13:19:45 -04002318 Ops << MkId(EleCstID);
David Neto22f144c2017-06-12 14:26:21 -04002319 }
2320
2321 Opcode = spv::OpConstantComposite;
David Neto22f144c2017-06-12 14:26:21 -04002322 } else if (const ConstantAggregate *CA = dyn_cast<ConstantAggregate>(Cst)) {
2323 // Let's convert <4 x i8> constant to int constant specially.
David Neto49351ac2017-08-26 17:32:20 -04002324 // This case occurs when at least one of the values is an undef.
David Neto22f144c2017-06-12 14:26:21 -04002325 Type *CstTy = Cst->getType();
2326 if (is4xi8vec(CstTy)) {
2327 LLVMContext &Context = CstTy->getContext();
2328
2329 //
2330 // Generate OpConstant with OpTypeInt 32 0.
2331 //
Neil Henning39672102017-09-29 14:33:13 +01002332 uint32_t IntValue = 0;
David Neto22f144c2017-06-12 14:26:21 -04002333 for (User::const_op_iterator I = Cst->op_begin(), E = Cst->op_end();
2334 I != E; ++I) {
2335 uint64_t Val = 0;
alan-bakerb6b09dc2018-11-08 16:59:28 -05002336 const Value *CV = *I;
Neil Henning39672102017-09-29 14:33:13 +01002337 if (auto *CI2 = dyn_cast<ConstantInt>(CV)) {
2338 Val = CI2->getZExtValue();
David Neto22f144c2017-06-12 14:26:21 -04002339 }
David Neto49351ac2017-08-26 17:32:20 -04002340 IntValue = (IntValue << 8) | (Val & 0xffu);
David Neto22f144c2017-06-12 14:26:21 -04002341 }
2342
David Neto49351ac2017-08-26 17:32:20 -04002343 Type *i32 = Type::getInt32Ty(Context);
2344 Constant *CstInt = ConstantInt::get(i32, IntValue);
David Neto22f144c2017-06-12 14:26:21 -04002345 // If this constant is already registered on VMap, use it.
2346 if (VMap.count(CstInt)) {
2347 uint32_t CstID = VMap[CstInt];
2348 VMap[Cst] = CstID;
David Neto19a1bad2017-08-25 15:01:41 -04002349 continue;
David Neto22f144c2017-06-12 14:26:21 -04002350 }
2351
David Neto257c3892018-04-11 13:19:45 -04002352 Ops << MkNum(IntValue);
David Neto22f144c2017-06-12 14:26:21 -04002353
David Neto87846742018-04-11 17:36:22 -04002354 auto *CstInst = new SPIRVInstruction(spv::OpConstant, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002355 SPIRVInstList.push_back(CstInst);
2356
David Neto19a1bad2017-08-25 15:01:41 -04002357 continue;
David Neto22f144c2017-06-12 14:26:21 -04002358 }
2359
2360 // We use a constant composite in SPIR-V for our constant aggregate in
2361 // LLVM.
2362 Opcode = spv::OpConstantComposite;
David Neto22f144c2017-06-12 14:26:21 -04002363
2364 for (unsigned k = 0; k < CA->getNumOperands(); k++) {
2365 // Look up the ID of the element of this aggregate (which we will
2366 // previously have created a constant for).
2367 uint32_t ElementConstantID = VMap[CA->getAggregateElement(k)];
2368
2369 // And add an operand to the composite we are constructing
David Neto257c3892018-04-11 13:19:45 -04002370 Ops << MkId(ElementConstantID);
David Neto22f144c2017-06-12 14:26:21 -04002371 }
2372 } else if (Cst->isNullValue()) {
2373 Opcode = spv::OpConstantNull;
David Neto22f144c2017-06-12 14:26:21 -04002374 } else {
2375 Cst->print(errs());
2376 llvm_unreachable("Unsupported Constant???");
2377 }
2378
alan-baker5b86ed72019-02-15 08:26:50 -05002379 if (Opcode == spv::OpConstantNull && Cst->getType()->isPointerTy()) {
2380 // Null pointer requires variable pointers.
2381 setVariablePointersCapabilities(Cst->getType()->getPointerAddressSpace());
2382 }
2383
David Neto87846742018-04-11 17:36:22 -04002384 auto *CstInst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002385 SPIRVInstList.push_back(CstInst);
2386 }
2387}
2388
2389void SPIRVProducerPass::GenerateSamplers(Module &M) {
2390 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
David Neto22f144c2017-06-12 14:26:21 -04002391
alan-bakerb6b09dc2018-11-08 16:59:28 -05002392 auto &sampler_map = getSamplerMap();
David Neto862b7d82018-06-14 18:48:37 -04002393 SamplerMapIndexToIDMap.clear();
David Neto22f144c2017-06-12 14:26:21 -04002394 DenseMap<unsigned, unsigned> SamplerLiteralToIDMap;
David Neto862b7d82018-06-14 18:48:37 -04002395 DenseMap<unsigned, unsigned> SamplerLiteralToDescriptorSetMap;
2396 DenseMap<unsigned, unsigned> SamplerLiteralToBindingMap;
David Neto22f144c2017-06-12 14:26:21 -04002397
David Neto862b7d82018-06-14 18:48:37 -04002398 // We might have samplers in the sampler map that are not used
2399 // in the translation unit. We need to allocate variables
2400 // for them and bindings too.
2401 DenseSet<unsigned> used_bindings;
David Neto22f144c2017-06-12 14:26:21 -04002402
Kévin Petitdf71de32019-04-09 14:09:50 +01002403 auto *var_fn = M.getFunction(clspv::LiteralSamplerFunction());
alan-bakerb6b09dc2018-11-08 16:59:28 -05002404 if (!var_fn)
2405 return;
David Neto862b7d82018-06-14 18:48:37 -04002406 for (auto user : var_fn->users()) {
2407 // Populate SamplerLiteralToDescriptorSetMap and
2408 // SamplerLiteralToBindingMap.
2409 //
2410 // Look for calls like
2411 // call %opencl.sampler_t addrspace(2)*
2412 // @clspv.sampler.var.literal(
2413 // i32 descriptor,
2414 // i32 binding,
2415 // i32 index-into-sampler-map)
alan-bakerb6b09dc2018-11-08 16:59:28 -05002416 if (auto *call = dyn_cast<CallInst>(user)) {
2417 const size_t index_into_sampler_map = static_cast<size_t>(
2418 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
David Neto862b7d82018-06-14 18:48:37 -04002419 if (index_into_sampler_map >= sampler_map.size()) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05002420 errs() << "Out of bounds index to sampler map: "
2421 << index_into_sampler_map;
David Neto862b7d82018-06-14 18:48:37 -04002422 llvm_unreachable("bad sampler init: out of bounds");
2423 }
2424
2425 auto sampler_value = sampler_map[index_into_sampler_map].first;
2426 const auto descriptor_set = static_cast<unsigned>(
2427 dyn_cast<ConstantInt>(call->getArgOperand(0))->getZExtValue());
2428 const auto binding = static_cast<unsigned>(
2429 dyn_cast<ConstantInt>(call->getArgOperand(1))->getZExtValue());
2430
2431 SamplerLiteralToDescriptorSetMap[sampler_value] = descriptor_set;
2432 SamplerLiteralToBindingMap[sampler_value] = binding;
2433 used_bindings.insert(binding);
2434 }
2435 }
2436
2437 unsigned index = 0;
2438 for (auto SamplerLiteral : sampler_map) {
David Neto22f144c2017-06-12 14:26:21 -04002439 // Generate OpVariable.
2440 //
2441 // GIDOps[0] : Result Type ID
2442 // GIDOps[1] : Storage Class
2443 SPIRVOperandList Ops;
2444
David Neto257c3892018-04-11 13:19:45 -04002445 Ops << MkId(lookupType(SamplerTy))
2446 << MkNum(spv::StorageClassUniformConstant);
David Neto22f144c2017-06-12 14:26:21 -04002447
David Neto862b7d82018-06-14 18:48:37 -04002448 auto sampler_var_id = nextID++;
2449 auto *Inst = new SPIRVInstruction(spv::OpVariable, sampler_var_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002450 SPIRVInstList.push_back(Inst);
2451
David Neto862b7d82018-06-14 18:48:37 -04002452 SamplerMapIndexToIDMap[index] = sampler_var_id;
2453 SamplerLiteralToIDMap[SamplerLiteral.first] = sampler_var_id;
David Neto22f144c2017-06-12 14:26:21 -04002454
2455 // Find Insert Point for OpDecorate.
2456 auto DecoInsertPoint =
2457 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
2458 [](SPIRVInstruction *Inst) -> bool {
2459 return Inst->getOpcode() != spv::OpDecorate &&
2460 Inst->getOpcode() != spv::OpMemberDecorate &&
2461 Inst->getOpcode() != spv::OpExtInstImport;
2462 });
2463
2464 // Ops[0] = Target ID
2465 // Ops[1] = Decoration (DescriptorSet)
2466 // Ops[2] = LiteralNumber according to Decoration
2467 Ops.clear();
2468
David Neto862b7d82018-06-14 18:48:37 -04002469 unsigned descriptor_set;
2470 unsigned binding;
alan-bakerb6b09dc2018-11-08 16:59:28 -05002471 if (SamplerLiteralToBindingMap.find(SamplerLiteral.first) ==
2472 SamplerLiteralToBindingMap.end()) {
David Neto862b7d82018-06-14 18:48:37 -04002473 // This sampler is not actually used. Find the next one.
2474 for (binding = 0; used_bindings.count(binding); binding++)
2475 ;
2476 descriptor_set = 0; // Literal samplers always use descriptor set 0.
2477 used_bindings.insert(binding);
2478 } else {
2479 descriptor_set = SamplerLiteralToDescriptorSetMap[SamplerLiteral.first];
2480 binding = SamplerLiteralToBindingMap[SamplerLiteral.first];
alan-bakercff80152019-06-15 00:38:00 -04002481
2482 version0::DescriptorMapEntry::SamplerData sampler_data = {
2483 SamplerLiteral.first};
2484 descriptorMapEntries->emplace_back(std::move(sampler_data),
2485 descriptor_set, binding);
David Neto862b7d82018-06-14 18:48:37 -04002486 }
2487
2488 Ops << MkId(sampler_var_id) << MkNum(spv::DecorationDescriptorSet)
2489 << MkNum(descriptor_set);
David Neto22f144c2017-06-12 14:26:21 -04002490
David Neto87846742018-04-11 17:36:22 -04002491 auto *DescDecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002492 SPIRVInstList.insert(DecoInsertPoint, DescDecoInst);
2493
2494 // Ops[0] = Target ID
2495 // Ops[1] = Decoration (Binding)
2496 // Ops[2] = LiteralNumber according to Decoration
2497 Ops.clear();
David Neto862b7d82018-06-14 18:48:37 -04002498 Ops << MkId(sampler_var_id) << MkNum(spv::DecorationBinding)
2499 << MkNum(binding);
David Neto22f144c2017-06-12 14:26:21 -04002500
David Neto87846742018-04-11 17:36:22 -04002501 auto *BindDecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002502 SPIRVInstList.insert(DecoInsertPoint, BindDecoInst);
David Neto862b7d82018-06-14 18:48:37 -04002503
2504 index++;
David Neto22f144c2017-06-12 14:26:21 -04002505 }
David Neto862b7d82018-06-14 18:48:37 -04002506}
David Neto22f144c2017-06-12 14:26:21 -04002507
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01002508void SPIRVProducerPass::GenerateResourceVars(Module &) {
David Neto862b7d82018-06-14 18:48:37 -04002509 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
2510 ValueMapType &VMap = getValueMap();
David Neto22f144c2017-06-12 14:26:21 -04002511
David Neto862b7d82018-06-14 18:48:37 -04002512 // Generate variables. Make one for each of resource var info object.
2513 for (auto *info : ModuleOrderedResourceVars) {
2514 Type *type = info->var_fn->getReturnType();
2515 // Remap the address space for opaque types.
2516 switch (info->arg_kind) {
2517 case clspv::ArgKind::Sampler:
2518 case clspv::ArgKind::ReadOnlyImage:
2519 case clspv::ArgKind::WriteOnlyImage:
2520 type = PointerType::get(type->getPointerElementType(),
2521 clspv::AddressSpace::UniformConstant);
2522 break;
2523 default:
2524 break;
2525 }
David Neto22f144c2017-06-12 14:26:21 -04002526
David Neto862b7d82018-06-14 18:48:37 -04002527 info->var_id = nextID++;
David Neto22f144c2017-06-12 14:26:21 -04002528
David Neto862b7d82018-06-14 18:48:37 -04002529 const auto type_id = lookupType(type);
2530 const auto sc = GetStorageClassForArgKind(info->arg_kind);
2531 SPIRVOperandList Ops;
2532 Ops << MkId(type_id) << MkNum(sc);
David Neto22f144c2017-06-12 14:26:21 -04002533
David Neto862b7d82018-06-14 18:48:37 -04002534 auto *Inst = new SPIRVInstruction(spv::OpVariable, info->var_id, Ops);
2535 SPIRVInstList.push_back(Inst);
2536
2537 // Map calls to the variable-builtin-function.
2538 for (auto &U : info->var_fn->uses()) {
2539 if (auto *call = dyn_cast<CallInst>(U.getUser())) {
2540 const auto set = unsigned(
2541 dyn_cast<ConstantInt>(call->getOperand(0))->getZExtValue());
2542 const auto binding = unsigned(
2543 dyn_cast<ConstantInt>(call->getOperand(1))->getZExtValue());
2544 if (set == info->descriptor_set && binding == info->binding) {
2545 switch (info->arg_kind) {
2546 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04002547 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04002548 case clspv::ArgKind::Pod:
2549 // The call maps to the variable directly.
2550 VMap[call] = info->var_id;
2551 break;
2552 case clspv::ArgKind::Sampler:
2553 case clspv::ArgKind::ReadOnlyImage:
2554 case clspv::ArgKind::WriteOnlyImage:
2555 // The call maps to a load we generate later.
2556 ResourceVarDeferredLoadCalls[call] = info->var_id;
2557 break;
2558 default:
2559 llvm_unreachable("Unhandled arg kind");
2560 }
2561 }
David Neto22f144c2017-06-12 14:26:21 -04002562 }
David Neto862b7d82018-06-14 18:48:37 -04002563 }
2564 }
David Neto22f144c2017-06-12 14:26:21 -04002565
David Neto862b7d82018-06-14 18:48:37 -04002566 // Generate associated decorations.
David Neto22f144c2017-06-12 14:26:21 -04002567
David Neto862b7d82018-06-14 18:48:37 -04002568 // Find Insert Point for OpDecorate.
2569 auto DecoInsertPoint =
2570 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
2571 [](SPIRVInstruction *Inst) -> bool {
2572 return Inst->getOpcode() != spv::OpDecorate &&
2573 Inst->getOpcode() != spv::OpMemberDecorate &&
2574 Inst->getOpcode() != spv::OpExtInstImport;
2575 });
2576
2577 SPIRVOperandList Ops;
2578 for (auto *info : ModuleOrderedResourceVars) {
2579 // Decorate with DescriptorSet and Binding.
2580 Ops.clear();
2581 Ops << MkId(info->var_id) << MkNum(spv::DecorationDescriptorSet)
2582 << MkNum(info->descriptor_set);
2583 SPIRVInstList.insert(DecoInsertPoint,
2584 new SPIRVInstruction(spv::OpDecorate, Ops));
2585
2586 Ops.clear();
2587 Ops << MkId(info->var_id) << MkNum(spv::DecorationBinding)
2588 << MkNum(info->binding);
2589 SPIRVInstList.insert(DecoInsertPoint,
2590 new SPIRVInstruction(spv::OpDecorate, Ops));
2591
alan-bakere9308012019-03-15 10:25:13 -04002592 if (info->coherent) {
2593 // Decorate with Coherent if required for the variable.
2594 Ops.clear();
2595 Ops << MkId(info->var_id) << MkNum(spv::DecorationCoherent);
2596 SPIRVInstList.insert(DecoInsertPoint,
2597 new SPIRVInstruction(spv::OpDecorate, Ops));
2598 }
2599
David Neto862b7d82018-06-14 18:48:37 -04002600 // Generate NonWritable and NonReadable
2601 switch (info->arg_kind) {
2602 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04002603 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04002604 if (info->var_fn->getReturnType()->getPointerAddressSpace() ==
2605 clspv::AddressSpace::Constant) {
2606 Ops.clear();
2607 Ops << MkId(info->var_id) << MkNum(spv::DecorationNonWritable);
2608 SPIRVInstList.insert(DecoInsertPoint,
2609 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002610 }
David Neto862b7d82018-06-14 18:48:37 -04002611 break;
David Neto862b7d82018-06-14 18:48:37 -04002612 case clspv::ArgKind::WriteOnlyImage:
2613 Ops.clear();
2614 Ops << MkId(info->var_id) << MkNum(spv::DecorationNonReadable);
2615 SPIRVInstList.insert(DecoInsertPoint,
2616 new SPIRVInstruction(spv::OpDecorate, Ops));
2617 break;
2618 default:
2619 break;
David Neto22f144c2017-06-12 14:26:21 -04002620 }
2621 }
2622}
2623
2624void SPIRVProducerPass::GenerateGlobalVar(GlobalVariable &GV) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05002625 Module &M = *GV.getParent();
David Neto22f144c2017-06-12 14:26:21 -04002626 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
2627 ValueMapType &VMap = getValueMap();
2628 std::vector<uint32_t> &BuiltinDimVec = getBuiltinDimVec();
David Neto85082642018-03-24 06:55:20 -07002629 const DataLayout &DL = GV.getParent()->getDataLayout();
David Neto22f144c2017-06-12 14:26:21 -04002630
2631 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
2632 Type *Ty = GV.getType();
2633 PointerType *PTy = cast<PointerType>(Ty);
2634
2635 uint32_t InitializerID = 0;
2636
2637 // Workgroup size is handled differently (it goes into a constant)
2638 if (spv::BuiltInWorkgroupSize == BuiltinType) {
2639 std::vector<bool> HasMDVec;
2640 uint32_t PrevXDimCst = 0xFFFFFFFF;
2641 uint32_t PrevYDimCst = 0xFFFFFFFF;
2642 uint32_t PrevZDimCst = 0xFFFFFFFF;
2643 for (Function &Func : *GV.getParent()) {
2644 if (Func.isDeclaration()) {
2645 continue;
2646 }
2647
2648 // We only need to check kernels.
2649 if (Func.getCallingConv() != CallingConv::SPIR_KERNEL) {
2650 continue;
2651 }
2652
2653 if (const MDNode *MD =
2654 dyn_cast<Function>(&Func)->getMetadata("reqd_work_group_size")) {
2655 uint32_t CurXDimCst = static_cast<uint32_t>(
2656 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
2657 uint32_t CurYDimCst = static_cast<uint32_t>(
2658 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
2659 uint32_t CurZDimCst = static_cast<uint32_t>(
2660 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
2661
2662 if (PrevXDimCst == 0xFFFFFFFF && PrevYDimCst == 0xFFFFFFFF &&
2663 PrevZDimCst == 0xFFFFFFFF) {
2664 PrevXDimCst = CurXDimCst;
2665 PrevYDimCst = CurYDimCst;
2666 PrevZDimCst = CurZDimCst;
2667 } else if (CurXDimCst != PrevXDimCst || CurYDimCst != PrevYDimCst ||
2668 CurZDimCst != PrevZDimCst) {
2669 llvm_unreachable(
2670 "reqd_work_group_size must be the same across all kernels");
2671 } else {
2672 continue;
2673 }
2674
2675 //
2676 // Generate OpConstantComposite.
2677 //
2678 // Ops[0] : Result Type ID
2679 // Ops[1] : Constant size for x dimension.
2680 // Ops[2] : Constant size for y dimension.
2681 // Ops[3] : Constant size for z dimension.
2682 SPIRVOperandList Ops;
2683
2684 uint32_t XDimCstID =
2685 VMap[mdconst::extract<ConstantInt>(MD->getOperand(0))];
2686 uint32_t YDimCstID =
2687 VMap[mdconst::extract<ConstantInt>(MD->getOperand(1))];
2688 uint32_t ZDimCstID =
2689 VMap[mdconst::extract<ConstantInt>(MD->getOperand(2))];
2690
2691 InitializerID = nextID;
2692
David Neto257c3892018-04-11 13:19:45 -04002693 Ops << MkId(lookupType(Ty->getPointerElementType())) << MkId(XDimCstID)
2694 << MkId(YDimCstID) << MkId(ZDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04002695
David Neto87846742018-04-11 17:36:22 -04002696 auto *Inst =
2697 new SPIRVInstruction(spv::OpConstantComposite, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002698 SPIRVInstList.push_back(Inst);
2699
2700 HasMDVec.push_back(true);
2701 } else {
2702 HasMDVec.push_back(false);
2703 }
2704 }
2705
2706 // Check all kernels have same definitions for work_group_size.
2707 bool HasMD = false;
2708 if (!HasMDVec.empty()) {
2709 HasMD = HasMDVec[0];
2710 for (uint32_t i = 1; i < HasMDVec.size(); i++) {
2711 if (HasMD != HasMDVec[i]) {
2712 llvm_unreachable(
2713 "Kernels should have consistent work group size definition");
2714 }
2715 }
2716 }
2717
2718 // If all kernels do not have metadata for reqd_work_group_size, generate
2719 // OpSpecConstants for x/y/z dimension.
2720 if (!HasMD) {
2721 //
2722 // Generate OpSpecConstants for x/y/z dimension.
2723 //
2724 // Ops[0] : Result Type ID
2725 // Ops[1] : Constant size for x/y/z dimension (Literal Number).
2726 uint32_t XDimCstID = 0;
2727 uint32_t YDimCstID = 0;
2728 uint32_t ZDimCstID = 0;
2729
David Neto22f144c2017-06-12 14:26:21 -04002730 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04002731 uint32_t result_type_id =
2732 lookupType(Ty->getPointerElementType()->getSequentialElementType());
David Neto22f144c2017-06-12 14:26:21 -04002733
David Neto257c3892018-04-11 13:19:45 -04002734 // X Dimension
2735 Ops << MkId(result_type_id) << MkNum(1);
2736 XDimCstID = nextID++;
2737 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002738 new SPIRVInstruction(spv::OpSpecConstant, XDimCstID, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002739
2740 // Y Dimension
2741 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04002742 Ops << MkId(result_type_id) << MkNum(1);
2743 YDimCstID = nextID++;
2744 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002745 new SPIRVInstruction(spv::OpSpecConstant, YDimCstID, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002746
2747 // Z Dimension
2748 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04002749 Ops << MkId(result_type_id) << MkNum(1);
2750 ZDimCstID = nextID++;
2751 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002752 new SPIRVInstruction(spv::OpSpecConstant, ZDimCstID, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002753
David Neto257c3892018-04-11 13:19:45 -04002754 BuiltinDimVec.push_back(XDimCstID);
2755 BuiltinDimVec.push_back(YDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04002756 BuiltinDimVec.push_back(ZDimCstID);
2757
David Neto22f144c2017-06-12 14:26:21 -04002758 //
2759 // Generate OpSpecConstantComposite.
2760 //
2761 // Ops[0] : Result Type ID
2762 // Ops[1] : Constant size for x dimension.
2763 // Ops[2] : Constant size for y dimension.
2764 // Ops[3] : Constant size for z dimension.
2765 InitializerID = nextID;
2766
2767 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04002768 Ops << MkId(lookupType(Ty->getPointerElementType())) << MkId(XDimCstID)
2769 << MkId(YDimCstID) << MkId(ZDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04002770
David Neto87846742018-04-11 17:36:22 -04002771 auto *Inst =
2772 new SPIRVInstruction(spv::OpSpecConstantComposite, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002773 SPIRVInstList.push_back(Inst);
2774 }
2775 }
2776
David Neto22f144c2017-06-12 14:26:21 -04002777 VMap[&GV] = nextID;
2778
2779 //
2780 // Generate OpVariable.
2781 //
2782 // GIDOps[0] : Result Type ID
2783 // GIDOps[1] : Storage Class
2784 SPIRVOperandList Ops;
2785
David Neto85082642018-03-24 06:55:20 -07002786 const auto AS = PTy->getAddressSpace();
David Netoc6f3ab22018-04-06 18:02:31 -04002787 Ops << MkId(lookupType(Ty)) << MkNum(GetStorageClass(AS));
David Neto22f144c2017-06-12 14:26:21 -04002788
David Neto85082642018-03-24 06:55:20 -07002789 const bool module_scope_constant_external_init =
David Neto862b7d82018-06-14 18:48:37 -04002790 (AS == AddressSpace::Constant) && GV.hasInitializer() &&
David Neto85082642018-03-24 06:55:20 -07002791 clspv::Option::ModuleConstantsInStorageBuffer();
2792
Kévin Petit23d5f182019-08-13 16:21:29 +01002793 if (GV.hasInitializer()) {
2794 auto GVInit = GV.getInitializer();
2795 if (!isa<UndefValue>(GVInit) && !module_scope_constant_external_init) {
2796 assert(VMap.count(GVInit) == 1);
2797 InitializerID = VMap[GVInit];
David Neto85082642018-03-24 06:55:20 -07002798 }
2799 }
Kévin Petit23d5f182019-08-13 16:21:29 +01002800
2801 if (0 != InitializerID) {
2802 // Emit the ID of the intiializer as part of the variable definition.
2803 Ops << MkId(InitializerID);
2804 }
David Neto85082642018-03-24 06:55:20 -07002805 const uint32_t var_id = nextID++;
2806
David Neto87846742018-04-11 17:36:22 -04002807 auto *Inst = new SPIRVInstruction(spv::OpVariable, var_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002808 SPIRVInstList.push_back(Inst);
2809
2810 // If we have a builtin.
2811 if (spv::BuiltInMax != BuiltinType) {
2812 // Find Insert Point for OpDecorate.
2813 auto DecoInsertPoint =
2814 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
2815 [](SPIRVInstruction *Inst) -> bool {
2816 return Inst->getOpcode() != spv::OpDecorate &&
2817 Inst->getOpcode() != spv::OpMemberDecorate &&
2818 Inst->getOpcode() != spv::OpExtInstImport;
2819 });
2820 //
2821 // Generate OpDecorate.
2822 //
2823 // DOps[0] = Target ID
2824 // DOps[1] = Decoration (Builtin)
2825 // DOps[2] = BuiltIn ID
2826 uint32_t ResultID;
2827
2828 // WorkgroupSize is different, we decorate the constant composite that has
2829 // its value, rather than the variable that we use to access the value.
2830 if (spv::BuiltInWorkgroupSize == BuiltinType) {
2831 ResultID = InitializerID;
David Netoa60b00b2017-09-15 16:34:09 -04002832 // Save both the value and variable IDs for later.
2833 WorkgroupSizeValueID = InitializerID;
2834 WorkgroupSizeVarID = VMap[&GV];
David Neto22f144c2017-06-12 14:26:21 -04002835 } else {
2836 ResultID = VMap[&GV];
2837 }
2838
2839 SPIRVOperandList DOps;
David Neto257c3892018-04-11 13:19:45 -04002840 DOps << MkId(ResultID) << MkNum(spv::DecorationBuiltIn)
2841 << MkNum(BuiltinType);
David Neto22f144c2017-06-12 14:26:21 -04002842
David Neto87846742018-04-11 17:36:22 -04002843 auto *DescDecoInst = new SPIRVInstruction(spv::OpDecorate, DOps);
David Neto22f144c2017-06-12 14:26:21 -04002844 SPIRVInstList.insert(DecoInsertPoint, DescDecoInst);
David Neto85082642018-03-24 06:55:20 -07002845 } else if (module_scope_constant_external_init) {
2846 // This module scope constant is initialized from a storage buffer with data
2847 // provided by the host at binding 0 of the next descriptor set.
David Neto78383442018-06-15 20:31:56 -04002848 const uint32_t descriptor_set = TakeDescriptorIndex(&M);
David Neto85082642018-03-24 06:55:20 -07002849
David Neto862b7d82018-06-14 18:48:37 -04002850 // Emit the intializer to the descriptor map file.
David Neto85082642018-03-24 06:55:20 -07002851 // Use "kind,buffer" to indicate storage buffer. We might want to expand
2852 // that later to other types, like uniform buffer.
alan-bakerf5e5f692018-11-27 08:33:24 -05002853 std::string hexbytes;
2854 llvm::raw_string_ostream str(hexbytes);
2855 clspv::ConstantEmitter(DL, str).Emit(GV.getInitializer());
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04002856 version0::DescriptorMapEntry::ConstantData constant_data = {ArgKind::Buffer,
2857 str.str()};
2858 descriptorMapEntries->emplace_back(std::move(constant_data), descriptor_set,
2859 0);
David Neto85082642018-03-24 06:55:20 -07002860
2861 // Find Insert Point for OpDecorate.
2862 auto DecoInsertPoint =
2863 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
2864 [](SPIRVInstruction *Inst) -> bool {
2865 return Inst->getOpcode() != spv::OpDecorate &&
2866 Inst->getOpcode() != spv::OpMemberDecorate &&
2867 Inst->getOpcode() != spv::OpExtInstImport;
2868 });
2869
David Neto257c3892018-04-11 13:19:45 -04002870 // OpDecorate %var Binding <binding>
David Neto85082642018-03-24 06:55:20 -07002871 SPIRVOperandList DOps;
David Neto257c3892018-04-11 13:19:45 -04002872 DOps << MkId(var_id) << MkNum(spv::DecorationBinding) << MkNum(0);
2873 DecoInsertPoint = SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04002874 DecoInsertPoint, new SPIRVInstruction(spv::OpDecorate, DOps));
David Neto85082642018-03-24 06:55:20 -07002875
2876 // OpDecorate %var DescriptorSet <descriptor_set>
2877 DOps.clear();
David Neto257c3892018-04-11 13:19:45 -04002878 DOps << MkId(var_id) << MkNum(spv::DecorationDescriptorSet)
2879 << MkNum(descriptor_set);
David Netoc6f3ab22018-04-06 18:02:31 -04002880 SPIRVInstList.insert(DecoInsertPoint,
David Neto87846742018-04-11 17:36:22 -04002881 new SPIRVInstruction(spv::OpDecorate, DOps));
David Neto22f144c2017-06-12 14:26:21 -04002882 }
2883}
2884
David Netoc6f3ab22018-04-06 18:02:31 -04002885void SPIRVProducerPass::GenerateWorkgroupVars() {
2886 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
Alan Baker202c8c72018-08-13 13:47:44 -04002887 for (auto spec_id = clspv::FirstLocalSpecId(); spec_id < max_local_spec_id_;
2888 ++spec_id) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05002889 LocalArgInfo &info = LocalSpecIdInfoMap[spec_id];
David Netoc6f3ab22018-04-06 18:02:31 -04002890
2891 // Generate OpVariable.
2892 //
2893 // GIDOps[0] : Result Type ID
2894 // GIDOps[1] : Storage Class
2895 SPIRVOperandList Ops;
2896 Ops << MkId(info.ptr_array_type_id) << MkNum(spv::StorageClassWorkgroup);
2897
2898 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002899 new SPIRVInstruction(spv::OpVariable, info.variable_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002900 }
2901}
2902
David Neto862b7d82018-06-14 18:48:37 -04002903void SPIRVProducerPass::GenerateDescriptorMapInfo(const DataLayout &DL,
2904 Function &F) {
David Netoc5fb5242018-07-30 13:28:31 -04002905 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
2906 return;
2907 }
David Neto862b7d82018-06-14 18:48:37 -04002908 // Gather the list of resources that are used by this function's arguments.
2909 auto &resource_var_at_index = FunctionToResourceVarsMap[&F];
2910
alan-bakerf5e5f692018-11-27 08:33:24 -05002911 // TODO(alan-baker): This should become unnecessary by fixing the rest of the
2912 // flow to generate pod_ubo arguments earlier.
David Neto862b7d82018-06-14 18:48:37 -04002913 auto remap_arg_kind = [](StringRef argKind) {
alan-bakerf5e5f692018-11-27 08:33:24 -05002914 std::string kind =
2915 clspv::Option::PodArgsInUniformBuffer() && argKind.equals("pod")
2916 ? "pod_ubo"
2917 : argKind;
2918 return GetArgKindFromName(kind);
David Neto862b7d82018-06-14 18:48:37 -04002919 };
2920
2921 auto *fty = F.getType()->getPointerElementType();
2922 auto *func_ty = dyn_cast<FunctionType>(fty);
2923
alan-baker038e9242019-04-19 22:14:41 -04002924 // If we've clustered POD arguments, then argument details are in metadata.
David Neto862b7d82018-06-14 18:48:37 -04002925 // If an argument maps to a resource variable, then get descriptor set and
2926 // binding from the resoure variable. Other info comes from the metadata.
2927 const auto *arg_map = F.getMetadata("kernel_arg_map");
2928 if (arg_map) {
2929 for (const auto &arg : arg_map->operands()) {
2930 const MDNode *arg_node = dyn_cast<MDNode>(arg.get());
Kévin PETITa353c832018-03-20 23:21:21 +00002931 assert(arg_node->getNumOperands() == 7);
David Neto862b7d82018-06-14 18:48:37 -04002932 const auto name =
2933 dyn_cast<MDString>(arg_node->getOperand(0))->getString();
2934 const auto old_index =
2935 dyn_extract<ConstantInt>(arg_node->getOperand(1))->getZExtValue();
2936 // Remapped argument index
alan-bakerb6b09dc2018-11-08 16:59:28 -05002937 const size_t new_index = static_cast<size_t>(
2938 dyn_extract<ConstantInt>(arg_node->getOperand(2))->getZExtValue());
David Neto862b7d82018-06-14 18:48:37 -04002939 const auto offset =
2940 dyn_extract<ConstantInt>(arg_node->getOperand(3))->getZExtValue();
Kévin PETITa353c832018-03-20 23:21:21 +00002941 const auto arg_size =
2942 dyn_extract<ConstantInt>(arg_node->getOperand(4))->getZExtValue();
David Neto862b7d82018-06-14 18:48:37 -04002943 const auto argKind = remap_arg_kind(
Kévin PETITa353c832018-03-20 23:21:21 +00002944 dyn_cast<MDString>(arg_node->getOperand(5))->getString());
David Neto862b7d82018-06-14 18:48:37 -04002945 const auto spec_id =
Kévin PETITa353c832018-03-20 23:21:21 +00002946 dyn_extract<ConstantInt>(arg_node->getOperand(6))->getSExtValue();
alan-bakerf5e5f692018-11-27 08:33:24 -05002947
2948 uint32_t descriptor_set = 0;
2949 uint32_t binding = 0;
2950 version0::DescriptorMapEntry::KernelArgData kernel_data = {
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04002951 F.getName(), name, static_cast<uint32_t>(old_index), argKind,
alan-bakerf5e5f692018-11-27 08:33:24 -05002952 static_cast<uint32_t>(spec_id),
2953 // This will be set below for pointer-to-local args.
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04002954 0, static_cast<uint32_t>(offset), static_cast<uint32_t>(arg_size)};
David Neto862b7d82018-06-14 18:48:37 -04002955 if (spec_id > 0) {
alan-bakerf5e5f692018-11-27 08:33:24 -05002956 kernel_data.local_element_size = static_cast<uint32_t>(GetTypeAllocSize(
2957 func_ty->getParamType(unsigned(new_index))->getPointerElementType(),
2958 DL));
David Neto862b7d82018-06-14 18:48:37 -04002959 } else {
2960 auto *info = resource_var_at_index[new_index];
2961 assert(info);
alan-bakerf5e5f692018-11-27 08:33:24 -05002962 descriptor_set = info->descriptor_set;
2963 binding = info->binding;
David Neto862b7d82018-06-14 18:48:37 -04002964 }
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04002965 descriptorMapEntries->emplace_back(std::move(kernel_data), descriptor_set,
2966 binding);
David Neto862b7d82018-06-14 18:48:37 -04002967 }
2968 } else {
2969 // There is no argument map.
2970 // Take descriptor info from the resource variable calls.
Kévin PETITa353c832018-03-20 23:21:21 +00002971 // Take argument name and size from the arguments list.
David Neto862b7d82018-06-14 18:48:37 -04002972
2973 SmallVector<Argument *, 4> arguments;
2974 for (auto &arg : F.args()) {
2975 arguments.push_back(&arg);
2976 }
2977
2978 unsigned arg_index = 0;
2979 for (auto *info : resource_var_at_index) {
2980 if (info) {
Kévin PETITa353c832018-03-20 23:21:21 +00002981 auto arg = arguments[arg_index];
alan-bakerb6b09dc2018-11-08 16:59:28 -05002982 unsigned arg_size = 0;
Kévin PETITa353c832018-03-20 23:21:21 +00002983 if (info->arg_kind == clspv::ArgKind::Pod) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05002984 arg_size = static_cast<uint32_t>(DL.getTypeStoreSize(arg->getType()));
Kévin PETITa353c832018-03-20 23:21:21 +00002985 }
2986
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04002987 // Local pointer arguments are unused in this case. Offset is always
2988 // zero.
alan-bakerf5e5f692018-11-27 08:33:24 -05002989 version0::DescriptorMapEntry::KernelArgData kernel_data = {
2990 F.getName(), arg->getName(),
2991 arg_index, remap_arg_kind(clspv::GetArgKindName(info->arg_kind)),
2992 0, 0,
2993 0, arg_size};
2994 descriptorMapEntries->emplace_back(std::move(kernel_data),
2995 info->descriptor_set, info->binding);
David Neto862b7d82018-06-14 18:48:37 -04002996 }
2997 arg_index++;
2998 }
2999 // Generate mappings for pointer-to-local arguments.
3000 for (arg_index = 0; arg_index < arguments.size(); ++arg_index) {
3001 Argument *arg = arguments[arg_index];
Alan Baker202c8c72018-08-13 13:47:44 -04003002 auto where = LocalArgSpecIds.find(arg);
3003 if (where != LocalArgSpecIds.end()) {
3004 auto &local_arg_info = LocalSpecIdInfoMap[where->second];
alan-bakerf5e5f692018-11-27 08:33:24 -05003005 // Pod arguments members are unused in this case.
3006 version0::DescriptorMapEntry::KernelArgData kernel_data = {
3007 F.getName(),
3008 arg->getName(),
3009 arg_index,
3010 ArgKind::Local,
3011 static_cast<uint32_t>(local_arg_info.spec_id),
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003012 static_cast<uint32_t>(
3013 GetTypeAllocSize(local_arg_info.elem_type, DL)),
alan-bakerf5e5f692018-11-27 08:33:24 -05003014 0,
3015 0};
3016 // Pointer-to-local arguments do not utilize descriptor set and binding.
3017 descriptorMapEntries->emplace_back(std::move(kernel_data), 0, 0);
David Neto862b7d82018-06-14 18:48:37 -04003018 }
3019 }
3020 }
3021}
3022
David Neto22f144c2017-06-12 14:26:21 -04003023void SPIRVProducerPass::GenerateFuncPrologue(Function &F) {
3024 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3025 ValueMapType &VMap = getValueMap();
3026 EntryPointVecType &EntryPoints = getEntryPointVec();
David Neto22f144c2017-06-12 14:26:21 -04003027 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
3028 auto &GlobalConstArgSet = getGlobalConstArgSet();
3029
3030 FunctionType *FTy = F.getFunctionType();
3031
3032 //
David Neto22f144c2017-06-12 14:26:21 -04003033 // Generate OPFunction.
3034 //
3035
3036 // FOps[0] : Result Type ID
3037 // FOps[1] : Function Control
3038 // FOps[2] : Function Type ID
3039 SPIRVOperandList FOps;
3040
3041 // Find SPIRV instruction for return type.
David Neto257c3892018-04-11 13:19:45 -04003042 FOps << MkId(lookupType(FTy->getReturnType()));
David Neto22f144c2017-06-12 14:26:21 -04003043
3044 // Check function attributes for SPIRV Function Control.
3045 uint32_t FuncControl = spv::FunctionControlMaskNone;
3046 if (F.hasFnAttribute(Attribute::AlwaysInline)) {
3047 FuncControl |= spv::FunctionControlInlineMask;
3048 }
3049 if (F.hasFnAttribute(Attribute::NoInline)) {
3050 FuncControl |= spv::FunctionControlDontInlineMask;
3051 }
3052 // TODO: Check llvm attribute for Function Control Pure.
3053 if (F.hasFnAttribute(Attribute::ReadOnly)) {
3054 FuncControl |= spv::FunctionControlPureMask;
3055 }
3056 // TODO: Check llvm attribute for Function Control Const.
3057 if (F.hasFnAttribute(Attribute::ReadNone)) {
3058 FuncControl |= spv::FunctionControlConstMask;
3059 }
3060
David Neto257c3892018-04-11 13:19:45 -04003061 FOps << MkNum(FuncControl);
David Neto22f144c2017-06-12 14:26:21 -04003062
3063 uint32_t FTyID;
3064 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
3065 SmallVector<Type *, 4> NewFuncParamTys;
3066 FunctionType *NewFTy =
3067 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
3068 FTyID = lookupType(NewFTy);
3069 } else {
David Neto9ed8e2f2018-03-24 06:47:24 -07003070 // Handle regular function with global constant parameters.
David Neto22f144c2017-06-12 14:26:21 -04003071 if (GlobalConstFuncTyMap.count(FTy)) {
3072 FTyID = lookupType(GlobalConstFuncTyMap[FTy].first);
3073 } else {
3074 FTyID = lookupType(FTy);
3075 }
3076 }
3077
David Neto257c3892018-04-11 13:19:45 -04003078 FOps << MkId(FTyID);
David Neto22f144c2017-06-12 14:26:21 -04003079
3080 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
3081 EntryPoints.push_back(std::make_pair(&F, nextID));
3082 }
3083
3084 VMap[&F] = nextID;
3085
David Neto482550a2018-03-24 05:21:07 -07003086 if (clspv::Option::ShowIDs()) {
David Netob05675d2018-02-16 12:37:49 -05003087 errs() << "Function " << F.getName() << " is " << nextID << "\n";
3088 }
David Neto22f144c2017-06-12 14:26:21 -04003089 // Generate SPIRV instruction for function.
David Neto87846742018-04-11 17:36:22 -04003090 auto *FuncInst = new SPIRVInstruction(spv::OpFunction, nextID++, FOps);
David Neto22f144c2017-06-12 14:26:21 -04003091 SPIRVInstList.push_back(FuncInst);
3092
3093 //
3094 // Generate OpFunctionParameter for Normal function.
3095 //
3096
3097 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
alan-bakere9308012019-03-15 10:25:13 -04003098
3099 // Find Insert Point for OpDecorate.
3100 auto DecoInsertPoint =
3101 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
3102 [](SPIRVInstruction *Inst) -> bool {
3103 return Inst->getOpcode() != spv::OpDecorate &&
3104 Inst->getOpcode() != spv::OpMemberDecorate &&
3105 Inst->getOpcode() != spv::OpExtInstImport;
3106 });
3107
David Neto22f144c2017-06-12 14:26:21 -04003108 // Iterate Argument for name instead of param type from function type.
3109 unsigned ArgIdx = 0;
3110 for (Argument &Arg : F.args()) {
alan-bakere9308012019-03-15 10:25:13 -04003111 uint32_t param_id = nextID++;
3112 VMap[&Arg] = param_id;
3113
3114 if (CalledWithCoherentResource(Arg)) {
3115 // If the arg is passed a coherent resource ever, then decorate this
3116 // parameter with Coherent too.
3117 SPIRVOperandList decoration_ops;
3118 decoration_ops << MkId(param_id) << MkNum(spv::DecorationCoherent);
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003119 SPIRVInstList.insert(
3120 DecoInsertPoint,
3121 new SPIRVInstruction(spv::OpDecorate, decoration_ops));
alan-bakere9308012019-03-15 10:25:13 -04003122 }
David Neto22f144c2017-06-12 14:26:21 -04003123
3124 // ParamOps[0] : Result Type ID
3125 SPIRVOperandList ParamOps;
3126
3127 // Find SPIRV instruction for parameter type.
3128 uint32_t ParamTyID = lookupType(Arg.getType());
3129 if (PointerType *PTy = dyn_cast<PointerType>(Arg.getType())) {
3130 if (GlobalConstFuncTyMap.count(FTy)) {
3131 if (ArgIdx == GlobalConstFuncTyMap[FTy].second) {
3132 Type *EleTy = PTy->getPointerElementType();
3133 Type *ArgTy =
3134 PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
3135 ParamTyID = lookupType(ArgTy);
3136 GlobalConstArgSet.insert(&Arg);
3137 }
3138 }
3139 }
David Neto257c3892018-04-11 13:19:45 -04003140 ParamOps << MkId(ParamTyID);
David Neto22f144c2017-06-12 14:26:21 -04003141
3142 // Generate SPIRV instruction for parameter.
David Neto87846742018-04-11 17:36:22 -04003143 auto *ParamInst =
alan-bakere9308012019-03-15 10:25:13 -04003144 new SPIRVInstruction(spv::OpFunctionParameter, param_id, ParamOps);
David Neto22f144c2017-06-12 14:26:21 -04003145 SPIRVInstList.push_back(ParamInst);
3146
3147 ArgIdx++;
3148 }
3149 }
3150}
3151
alan-bakerb6b09dc2018-11-08 16:59:28 -05003152void SPIRVProducerPass::GenerateModuleInfo(Module &module) {
David Neto22f144c2017-06-12 14:26:21 -04003153 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3154 EntryPointVecType &EntryPoints = getEntryPointVec();
3155 ValueMapType &VMap = getValueMap();
3156 ValueList &EntryPointInterfaces = getEntryPointInterfacesVec();
3157 uint32_t &ExtInstImportID = getOpExtInstImportID();
3158 std::vector<uint32_t> &BuiltinDimVec = getBuiltinDimVec();
3159
3160 // Set up insert point.
3161 auto InsertPoint = SPIRVInstList.begin();
3162
3163 //
3164 // Generate OpCapability
3165 //
3166 // TODO: Which llvm information is mapped to SPIRV Capapbility?
3167
3168 // Ops[0] = Capability
3169 SPIRVOperandList Ops;
3170
David Neto87846742018-04-11 17:36:22 -04003171 auto *CapInst =
3172 new SPIRVInstruction(spv::OpCapability, {MkNum(spv::CapabilityShader)});
David Neto22f144c2017-06-12 14:26:21 -04003173 SPIRVInstList.insert(InsertPoint, CapInst);
3174
3175 for (Type *Ty : getTypeList()) {
alan-bakerb39c8262019-03-08 14:03:37 -05003176 if (clspv::Option::Int8Support() && Ty->isIntegerTy(8)) {
3177 // Generate OpCapability for i8 type.
3178 SPIRVInstList.insert(InsertPoint,
3179 new SPIRVInstruction(spv::OpCapability,
3180 {MkNum(spv::CapabilityInt8)}));
3181 } else if (Ty->isIntegerTy(16)) {
David Neto22f144c2017-06-12 14:26:21 -04003182 // Generate OpCapability for i16 type.
David Neto87846742018-04-11 17:36:22 -04003183 SPIRVInstList.insert(InsertPoint,
3184 new SPIRVInstruction(spv::OpCapability,
3185 {MkNum(spv::CapabilityInt16)}));
David Neto22f144c2017-06-12 14:26:21 -04003186 } else if (Ty->isIntegerTy(64)) {
3187 // Generate OpCapability for i64 type.
David Neto87846742018-04-11 17:36:22 -04003188 SPIRVInstList.insert(InsertPoint,
3189 new SPIRVInstruction(spv::OpCapability,
3190 {MkNum(spv::CapabilityInt64)}));
David Neto22f144c2017-06-12 14:26:21 -04003191 } else if (Ty->isHalfTy()) {
3192 // Generate OpCapability for half type.
3193 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04003194 InsertPoint, new SPIRVInstruction(spv::OpCapability,
3195 {MkNum(spv::CapabilityFloat16)}));
David Neto22f144c2017-06-12 14:26:21 -04003196 } else if (Ty->isDoubleTy()) {
3197 // Generate OpCapability for double type.
3198 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04003199 InsertPoint, new SPIRVInstruction(spv::OpCapability,
3200 {MkNum(spv::CapabilityFloat64)}));
David Neto22f144c2017-06-12 14:26:21 -04003201 } else if (auto *STy = dyn_cast<StructType>(Ty)) {
3202 if (STy->isOpaque()) {
David Neto565571c2017-08-21 12:00:05 -04003203 if (STy->getName().equals("opencl.image2d_wo_t") ||
3204 STy->getName().equals("opencl.image3d_wo_t")) {
David Neto22f144c2017-06-12 14:26:21 -04003205 // Generate OpCapability for write only image type.
3206 SPIRVInstList.insert(
3207 InsertPoint,
3208 new SPIRVInstruction(
David Neto87846742018-04-11 17:36:22 -04003209 spv::OpCapability,
3210 {MkNum(spv::CapabilityStorageImageWriteWithoutFormat)}));
David Neto22f144c2017-06-12 14:26:21 -04003211 }
3212 }
3213 }
3214 }
3215
David Neto5c22a252018-03-15 16:07:41 -04003216 { // OpCapability ImageQuery
3217 bool hasImageQuery = false;
3218 for (const char *imageQuery : {
3219 "_Z15get_image_width14ocl_image2d_ro",
3220 "_Z15get_image_width14ocl_image2d_wo",
3221 "_Z16get_image_height14ocl_image2d_ro",
3222 "_Z16get_image_height14ocl_image2d_wo",
3223 }) {
3224 if (module.getFunction(imageQuery)) {
3225 hasImageQuery = true;
3226 break;
3227 }
3228 }
3229 if (hasImageQuery) {
David Neto87846742018-04-11 17:36:22 -04003230 auto *ImageQueryCapInst = new SPIRVInstruction(
3231 spv::OpCapability, {MkNum(spv::CapabilityImageQuery)});
David Neto5c22a252018-03-15 16:07:41 -04003232 SPIRVInstList.insert(InsertPoint, ImageQueryCapInst);
3233 }
3234 }
3235
David Neto22f144c2017-06-12 14:26:21 -04003236 if (hasVariablePointers()) {
3237 //
David Neto22f144c2017-06-12 14:26:21 -04003238 // Generate OpCapability.
3239 //
3240 // Ops[0] = Capability
3241 //
3242 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003243 Ops << MkNum(spv::CapabilityVariablePointers);
David Neto22f144c2017-06-12 14:26:21 -04003244
David Neto87846742018-04-11 17:36:22 -04003245 SPIRVInstList.insert(InsertPoint,
3246 new SPIRVInstruction(spv::OpCapability, Ops));
alan-baker5b86ed72019-02-15 08:26:50 -05003247 } else if (hasVariablePointersStorageBuffer()) {
3248 //
3249 // Generate OpCapability.
3250 //
3251 // Ops[0] = Capability
3252 //
3253 Ops.clear();
3254 Ops << MkNum(spv::CapabilityVariablePointersStorageBuffer);
David Neto22f144c2017-06-12 14:26:21 -04003255
alan-baker5b86ed72019-02-15 08:26:50 -05003256 SPIRVInstList.insert(InsertPoint,
3257 new SPIRVInstruction(spv::OpCapability, Ops));
3258 }
3259
3260 // Always add the storage buffer extension
3261 {
David Neto22f144c2017-06-12 14:26:21 -04003262 //
3263 // Generate OpExtension.
3264 //
3265 // Ops[0] = Name (Literal String)
3266 //
alan-baker5b86ed72019-02-15 08:26:50 -05003267 auto *ExtensionInst = new SPIRVInstruction(
3268 spv::OpExtension, {MkString("SPV_KHR_storage_buffer_storage_class")});
3269 SPIRVInstList.insert(InsertPoint, ExtensionInst);
3270 }
David Neto22f144c2017-06-12 14:26:21 -04003271
alan-baker5b86ed72019-02-15 08:26:50 -05003272 if (hasVariablePointers() || hasVariablePointersStorageBuffer()) {
3273 //
3274 // Generate OpExtension.
3275 //
3276 // Ops[0] = Name (Literal String)
3277 //
3278 auto *ExtensionInst = new SPIRVInstruction(
3279 spv::OpExtension, {MkString("SPV_KHR_variable_pointers")});
3280 SPIRVInstList.insert(InsertPoint, ExtensionInst);
David Neto22f144c2017-06-12 14:26:21 -04003281 }
3282
3283 if (ExtInstImportID) {
3284 ++InsertPoint;
3285 }
3286
3287 //
3288 // Generate OpMemoryModel
3289 //
3290 // Memory model for Vulkan will always be GLSL450.
3291
3292 // Ops[0] = Addressing Model
3293 // Ops[1] = Memory Model
3294 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003295 Ops << MkNum(spv::AddressingModelLogical) << MkNum(spv::MemoryModelGLSL450);
David Neto22f144c2017-06-12 14:26:21 -04003296
David Neto87846742018-04-11 17:36:22 -04003297 auto *MemModelInst = new SPIRVInstruction(spv::OpMemoryModel, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003298 SPIRVInstList.insert(InsertPoint, MemModelInst);
3299
3300 //
3301 // Generate OpEntryPoint
3302 //
3303 for (auto EntryPoint : EntryPoints) {
3304 // Ops[0] = Execution Model
3305 // Ops[1] = EntryPoint ID
3306 // Ops[2] = Name (Literal String)
3307 // ...
3308 //
3309 // TODO: Do we need to consider Interface ID for forward references???
3310 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05003311 const StringRef &name = EntryPoint.first->getName();
David Neto257c3892018-04-11 13:19:45 -04003312 Ops << MkNum(spv::ExecutionModelGLCompute) << MkId(EntryPoint.second)
3313 << MkString(name);
David Neto22f144c2017-06-12 14:26:21 -04003314
David Neto22f144c2017-06-12 14:26:21 -04003315 for (Value *Interface : EntryPointInterfaces) {
David Neto257c3892018-04-11 13:19:45 -04003316 Ops << MkId(VMap[Interface]);
David Neto22f144c2017-06-12 14:26:21 -04003317 }
3318
David Neto87846742018-04-11 17:36:22 -04003319 auto *EntryPointInst = new SPIRVInstruction(spv::OpEntryPoint, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003320 SPIRVInstList.insert(InsertPoint, EntryPointInst);
3321 }
3322
3323 for (auto EntryPoint : EntryPoints) {
3324 if (const MDNode *MD = dyn_cast<Function>(EntryPoint.first)
3325 ->getMetadata("reqd_work_group_size")) {
3326
3327 if (!BuiltinDimVec.empty()) {
3328 llvm_unreachable(
3329 "Kernels should have consistent work group size definition");
3330 }
3331
3332 //
3333 // Generate OpExecutionMode
3334 //
3335
3336 // Ops[0] = Entry Point ID
3337 // Ops[1] = Execution Mode
3338 // Ops[2] ... Ops[n] = Optional literals according to Execution Mode
3339 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05003340 Ops << MkId(EntryPoint.second) << MkNum(spv::ExecutionModeLocalSize);
David Neto22f144c2017-06-12 14:26:21 -04003341
3342 uint32_t XDim = static_cast<uint32_t>(
3343 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
3344 uint32_t YDim = static_cast<uint32_t>(
3345 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
3346 uint32_t ZDim = static_cast<uint32_t>(
3347 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
3348
David Neto257c3892018-04-11 13:19:45 -04003349 Ops << MkNum(XDim) << MkNum(YDim) << MkNum(ZDim);
David Neto22f144c2017-06-12 14:26:21 -04003350
David Neto87846742018-04-11 17:36:22 -04003351 auto *ExecModeInst = new SPIRVInstruction(spv::OpExecutionMode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003352 SPIRVInstList.insert(InsertPoint, ExecModeInst);
3353 }
3354 }
3355
3356 //
3357 // Generate OpSource.
3358 //
3359 // Ops[0] = SourceLanguage ID
3360 // Ops[1] = Version (LiteralNum)
3361 //
3362 Ops.clear();
Kévin Petit0fc88042019-04-09 23:25:02 +01003363 if (clspv::Option::CPlusPlus()) {
3364 Ops << MkNum(spv::SourceLanguageOpenCL_CPP) << MkNum(100);
3365 } else {
3366 Ops << MkNum(spv::SourceLanguageOpenCL_C) << MkNum(120);
3367 }
David Neto22f144c2017-06-12 14:26:21 -04003368
David Neto87846742018-04-11 17:36:22 -04003369 auto *OpenSourceInst = new SPIRVInstruction(spv::OpSource, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003370 SPIRVInstList.insert(InsertPoint, OpenSourceInst);
3371
3372 if (!BuiltinDimVec.empty()) {
3373 //
3374 // Generate OpDecorates for x/y/z dimension.
3375 //
3376 // Ops[0] = Target ID
3377 // Ops[1] = Decoration (SpecId)
David Neto257c3892018-04-11 13:19:45 -04003378 // Ops[2] = Specialization Constant ID (Literal Number)
David Neto22f144c2017-06-12 14:26:21 -04003379
3380 // X Dimension
3381 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003382 Ops << MkId(BuiltinDimVec[0]) << MkNum(spv::DecorationSpecId) << MkNum(0);
David Neto87846742018-04-11 17:36:22 -04003383 SPIRVInstList.insert(InsertPoint,
3384 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003385
3386 // Y Dimension
3387 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003388 Ops << MkId(BuiltinDimVec[1]) << MkNum(spv::DecorationSpecId) << MkNum(1);
David Neto87846742018-04-11 17:36:22 -04003389 SPIRVInstList.insert(InsertPoint,
3390 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003391
3392 // Z Dimension
3393 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003394 Ops << MkId(BuiltinDimVec[2]) << MkNum(spv::DecorationSpecId) << MkNum(2);
David Neto87846742018-04-11 17:36:22 -04003395 SPIRVInstList.insert(InsertPoint,
3396 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003397 }
3398}
3399
David Netob6e2e062018-04-25 10:32:06 -04003400void SPIRVProducerPass::GenerateEntryPointInitialStores() {
3401 // Work around a driver bug. Initializers on Private variables might not
3402 // work. So the start of the kernel should store the initializer value to the
3403 // variables. Yes, *every* entry point pays this cost if *any* entry point
3404 // uses this builtin. At this point I judge this to be an acceptable tradeoff
3405 // of complexity vs. runtime, for a broken driver.
alan-bakerb6b09dc2018-11-08 16:59:28 -05003406 // TODO(dneto): Remove this at some point once fixed drivers are widely
3407 // available.
David Netob6e2e062018-04-25 10:32:06 -04003408 if (WorkgroupSizeVarID) {
3409 assert(WorkgroupSizeValueID);
3410
3411 SPIRVOperandList Ops;
3412 Ops << MkId(WorkgroupSizeVarID) << MkId(WorkgroupSizeValueID);
3413
3414 auto *Inst = new SPIRVInstruction(spv::OpStore, Ops);
3415 getSPIRVInstList().push_back(Inst);
3416 }
3417}
3418
David Neto22f144c2017-06-12 14:26:21 -04003419void SPIRVProducerPass::GenerateFuncBody(Function &F) {
3420 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3421 ValueMapType &VMap = getValueMap();
3422
David Netob6e2e062018-04-25 10:32:06 -04003423 const bool IsKernel = F.getCallingConv() == CallingConv::SPIR_KERNEL;
David Neto22f144c2017-06-12 14:26:21 -04003424
3425 for (BasicBlock &BB : F) {
3426 // Register BasicBlock to ValueMap.
3427 VMap[&BB] = nextID;
3428
3429 //
3430 // Generate OpLabel for Basic Block.
3431 //
3432 SPIRVOperandList Ops;
David Neto87846742018-04-11 17:36:22 -04003433 auto *Inst = new SPIRVInstruction(spv::OpLabel, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003434 SPIRVInstList.push_back(Inst);
3435
David Neto6dcd4712017-06-23 11:06:47 -04003436 // OpVariable instructions must come first.
3437 for (Instruction &I : BB) {
alan-baker5b86ed72019-02-15 08:26:50 -05003438 if (auto *alloca = dyn_cast<AllocaInst>(&I)) {
3439 // Allocating a pointer requires variable pointers.
3440 if (alloca->getAllocatedType()->isPointerTy()) {
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003441 setVariablePointersCapabilities(
3442 alloca->getAllocatedType()->getPointerAddressSpace());
alan-baker5b86ed72019-02-15 08:26:50 -05003443 }
David Neto6dcd4712017-06-23 11:06:47 -04003444 GenerateInstruction(I);
3445 }
3446 }
3447
David Neto22f144c2017-06-12 14:26:21 -04003448 if (&BB == &F.getEntryBlock() && IsKernel) {
David Netob6e2e062018-04-25 10:32:06 -04003449 if (clspv::Option::HackInitializers()) {
3450 GenerateEntryPointInitialStores();
3451 }
David Neto22f144c2017-06-12 14:26:21 -04003452 }
3453
3454 for (Instruction &I : BB) {
David Neto6dcd4712017-06-23 11:06:47 -04003455 if (!isa<AllocaInst>(I)) {
3456 GenerateInstruction(I);
3457 }
David Neto22f144c2017-06-12 14:26:21 -04003458 }
3459 }
3460}
3461
3462spv::Op SPIRVProducerPass::GetSPIRVCmpOpcode(CmpInst *I) {
3463 const std::map<CmpInst::Predicate, spv::Op> Map = {
3464 {CmpInst::ICMP_EQ, spv::OpIEqual},
3465 {CmpInst::ICMP_NE, spv::OpINotEqual},
3466 {CmpInst::ICMP_UGT, spv::OpUGreaterThan},
3467 {CmpInst::ICMP_UGE, spv::OpUGreaterThanEqual},
3468 {CmpInst::ICMP_ULT, spv::OpULessThan},
3469 {CmpInst::ICMP_ULE, spv::OpULessThanEqual},
3470 {CmpInst::ICMP_SGT, spv::OpSGreaterThan},
3471 {CmpInst::ICMP_SGE, spv::OpSGreaterThanEqual},
3472 {CmpInst::ICMP_SLT, spv::OpSLessThan},
3473 {CmpInst::ICMP_SLE, spv::OpSLessThanEqual},
3474 {CmpInst::FCMP_OEQ, spv::OpFOrdEqual},
3475 {CmpInst::FCMP_OGT, spv::OpFOrdGreaterThan},
3476 {CmpInst::FCMP_OGE, spv::OpFOrdGreaterThanEqual},
3477 {CmpInst::FCMP_OLT, spv::OpFOrdLessThan},
3478 {CmpInst::FCMP_OLE, spv::OpFOrdLessThanEqual},
3479 {CmpInst::FCMP_ONE, spv::OpFOrdNotEqual},
3480 {CmpInst::FCMP_UEQ, spv::OpFUnordEqual},
3481 {CmpInst::FCMP_UGT, spv::OpFUnordGreaterThan},
3482 {CmpInst::FCMP_UGE, spv::OpFUnordGreaterThanEqual},
3483 {CmpInst::FCMP_ULT, spv::OpFUnordLessThan},
3484 {CmpInst::FCMP_ULE, spv::OpFUnordLessThanEqual},
3485 {CmpInst::FCMP_UNE, spv::OpFUnordNotEqual}};
3486
3487 assert(0 != Map.count(I->getPredicate()));
3488
3489 return Map.at(I->getPredicate());
3490}
3491
3492spv::Op SPIRVProducerPass::GetSPIRVCastOpcode(Instruction &I) {
3493 const std::map<unsigned, spv::Op> Map{
3494 {Instruction::Trunc, spv::OpUConvert},
3495 {Instruction::ZExt, spv::OpUConvert},
3496 {Instruction::SExt, spv::OpSConvert},
3497 {Instruction::FPToUI, spv::OpConvertFToU},
3498 {Instruction::FPToSI, spv::OpConvertFToS},
3499 {Instruction::UIToFP, spv::OpConvertUToF},
3500 {Instruction::SIToFP, spv::OpConvertSToF},
3501 {Instruction::FPTrunc, spv::OpFConvert},
3502 {Instruction::FPExt, spv::OpFConvert},
3503 {Instruction::BitCast, spv::OpBitcast}};
3504
3505 assert(0 != Map.count(I.getOpcode()));
3506
3507 return Map.at(I.getOpcode());
3508}
3509
3510spv::Op SPIRVProducerPass::GetSPIRVBinaryOpcode(Instruction &I) {
Kévin Petit24272b62018-10-18 19:16:12 +00003511 if (I.getType()->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -04003512 switch (I.getOpcode()) {
3513 default:
3514 break;
3515 case Instruction::Or:
3516 return spv::OpLogicalOr;
3517 case Instruction::And:
3518 return spv::OpLogicalAnd;
3519 case Instruction::Xor:
3520 return spv::OpLogicalNotEqual;
3521 }
3522 }
3523
alan-bakerb6b09dc2018-11-08 16:59:28 -05003524 const std::map<unsigned, spv::Op> Map{
David Neto22f144c2017-06-12 14:26:21 -04003525 {Instruction::Add, spv::OpIAdd},
3526 {Instruction::FAdd, spv::OpFAdd},
3527 {Instruction::Sub, spv::OpISub},
3528 {Instruction::FSub, spv::OpFSub},
3529 {Instruction::Mul, spv::OpIMul},
3530 {Instruction::FMul, spv::OpFMul},
3531 {Instruction::UDiv, spv::OpUDiv},
3532 {Instruction::SDiv, spv::OpSDiv},
3533 {Instruction::FDiv, spv::OpFDiv},
3534 {Instruction::URem, spv::OpUMod},
3535 {Instruction::SRem, spv::OpSRem},
3536 {Instruction::FRem, spv::OpFRem},
3537 {Instruction::Or, spv::OpBitwiseOr},
3538 {Instruction::Xor, spv::OpBitwiseXor},
3539 {Instruction::And, spv::OpBitwiseAnd},
3540 {Instruction::Shl, spv::OpShiftLeftLogical},
3541 {Instruction::LShr, spv::OpShiftRightLogical},
3542 {Instruction::AShr, spv::OpShiftRightArithmetic}};
3543
3544 assert(0 != Map.count(I.getOpcode()));
3545
3546 return Map.at(I.getOpcode());
3547}
3548
3549void SPIRVProducerPass::GenerateInstruction(Instruction &I) {
3550 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3551 ValueMapType &VMap = getValueMap();
David Neto22f144c2017-06-12 14:26:21 -04003552 DeferredInstVecType &DeferredInsts = getDeferredInstVec();
3553 LLVMContext &Context = I.getParent()->getParent()->getParent()->getContext();
3554
3555 // Register Instruction to ValueMap.
3556 if (0 == VMap[&I]) {
3557 VMap[&I] = nextID;
3558 }
3559
3560 switch (I.getOpcode()) {
3561 default: {
3562 if (Instruction::isCast(I.getOpcode())) {
3563 //
3564 // Generate SPIRV instructions for cast operators.
3565 //
3566
David Netod2de94a2017-08-28 17:27:47 -04003567 auto Ty = I.getType();
David Neto22f144c2017-06-12 14:26:21 -04003568 auto OpTy = I.getOperand(0)->getType();
David Netod2de94a2017-08-28 17:27:47 -04003569 auto toI8 = Ty == Type::getInt8Ty(Context);
3570 auto fromI32 = OpTy == Type::getInt32Ty(Context);
David Neto22f144c2017-06-12 14:26:21 -04003571 // Handle zext, sext and uitofp with i1 type specially.
3572 if ((I.getOpcode() == Instruction::ZExt ||
3573 I.getOpcode() == Instruction::SExt ||
3574 I.getOpcode() == Instruction::UIToFP) &&
alan-bakerb6b09dc2018-11-08 16:59:28 -05003575 OpTy->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -04003576 //
3577 // Generate OpSelect.
3578 //
3579
3580 // Ops[0] = Result Type ID
3581 // Ops[1] = Condition ID
3582 // Ops[2] = True Constant ID
3583 // Ops[3] = False Constant ID
3584 SPIRVOperandList Ops;
3585
David Neto257c3892018-04-11 13:19:45 -04003586 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04003587
David Neto22f144c2017-06-12 14:26:21 -04003588 uint32_t CondID = VMap[I.getOperand(0)];
David Neto257c3892018-04-11 13:19:45 -04003589 Ops << MkId(CondID);
David Neto22f144c2017-06-12 14:26:21 -04003590
3591 uint32_t TrueID = 0;
3592 if (I.getOpcode() == Instruction::ZExt) {
Kévin Petit7bfb8992019-02-26 13:45:08 +00003593 TrueID = VMap[ConstantInt::get(I.getType(), 1)];
David Neto22f144c2017-06-12 14:26:21 -04003594 } else if (I.getOpcode() == Instruction::SExt) {
Kévin Petit7bfb8992019-02-26 13:45:08 +00003595 TrueID = VMap[ConstantInt::getSigned(I.getType(), -1)];
David Neto22f144c2017-06-12 14:26:21 -04003596 } else {
3597 TrueID = VMap[ConstantFP::get(Context, APFloat(1.0f))];
3598 }
David Neto257c3892018-04-11 13:19:45 -04003599 Ops << MkId(TrueID);
David Neto22f144c2017-06-12 14:26:21 -04003600
3601 uint32_t FalseID = 0;
3602 if (I.getOpcode() == Instruction::ZExt) {
3603 FalseID = VMap[Constant::getNullValue(I.getType())];
3604 } else if (I.getOpcode() == Instruction::SExt) {
3605 FalseID = VMap[Constant::getNullValue(I.getType())];
3606 } else {
3607 FalseID = VMap[ConstantFP::get(Context, APFloat(0.0f))];
3608 }
David Neto257c3892018-04-11 13:19:45 -04003609 Ops << MkId(FalseID);
David Neto22f144c2017-06-12 14:26:21 -04003610
David Neto87846742018-04-11 17:36:22 -04003611 auto *Inst = new SPIRVInstruction(spv::OpSelect, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003612 SPIRVInstList.push_back(Inst);
alan-bakerb39c8262019-03-08 14:03:37 -05003613 } else if (!clspv::Option::Int8Support() &&
3614 I.getOpcode() == Instruction::Trunc && fromI32 && toI8) {
David Netod2de94a2017-08-28 17:27:47 -04003615 // The SPIR-V target type is a 32-bit int. Keep only the bottom
3616 // 8 bits.
3617 // Before:
3618 // %result = trunc i32 %a to i8
3619 // After
3620 // %result = OpBitwiseAnd %uint %a %uint_255
3621
3622 SPIRVOperandList Ops;
3623
David Neto257c3892018-04-11 13:19:45 -04003624 Ops << MkId(lookupType(OpTy)) << MkId(VMap[I.getOperand(0)]);
David Netod2de94a2017-08-28 17:27:47 -04003625
3626 Type *UintTy = Type::getInt32Ty(Context);
3627 uint32_t MaskID = VMap[ConstantInt::get(UintTy, 255)];
David Neto257c3892018-04-11 13:19:45 -04003628 Ops << MkId(MaskID);
David Netod2de94a2017-08-28 17:27:47 -04003629
David Neto87846742018-04-11 17:36:22 -04003630 auto *Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Netod2de94a2017-08-28 17:27:47 -04003631 SPIRVInstList.push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04003632 } else {
3633 // Ops[0] = Result Type ID
3634 // Ops[1] = Source Value ID
3635 SPIRVOperandList Ops;
3636
David Neto257c3892018-04-11 13:19:45 -04003637 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04003638
David Neto87846742018-04-11 17:36:22 -04003639 auto *Inst = new SPIRVInstruction(GetSPIRVCastOpcode(I), nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003640 SPIRVInstList.push_back(Inst);
3641 }
3642 } else if (isa<BinaryOperator>(I)) {
3643 //
3644 // Generate SPIRV instructions for binary operators.
3645 //
3646
3647 // Handle xor with i1 type specially.
3648 if (I.getOpcode() == Instruction::Xor &&
3649 I.getType() == Type::getInt1Ty(Context) &&
Kévin Petit24272b62018-10-18 19:16:12 +00003650 ((isa<ConstantInt>(I.getOperand(0)) &&
3651 !cast<ConstantInt>(I.getOperand(0))->isZero()) ||
3652 (isa<ConstantInt>(I.getOperand(1)) &&
3653 !cast<ConstantInt>(I.getOperand(1))->isZero()))) {
David Neto22f144c2017-06-12 14:26:21 -04003654 //
3655 // Generate OpLogicalNot.
3656 //
3657 // Ops[0] = Result Type ID
3658 // Ops[1] = Operand
3659 SPIRVOperandList Ops;
3660
David Neto257c3892018-04-11 13:19:45 -04003661 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04003662
3663 Value *CondV = I.getOperand(0);
3664 if (isa<Constant>(I.getOperand(0))) {
3665 CondV = I.getOperand(1);
3666 }
David Neto257c3892018-04-11 13:19:45 -04003667 Ops << MkId(VMap[CondV]);
David Neto22f144c2017-06-12 14:26:21 -04003668
David Neto87846742018-04-11 17:36:22 -04003669 auto *Inst = new SPIRVInstruction(spv::OpLogicalNot, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003670 SPIRVInstList.push_back(Inst);
3671 } else {
3672 // Ops[0] = Result Type ID
3673 // Ops[1] = Operand 0
3674 // Ops[2] = Operand 1
3675 SPIRVOperandList Ops;
3676
David Neto257c3892018-04-11 13:19:45 -04003677 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)])
3678 << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04003679
David Neto87846742018-04-11 17:36:22 -04003680 auto *Inst =
3681 new SPIRVInstruction(GetSPIRVBinaryOpcode(I), nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003682 SPIRVInstList.push_back(Inst);
3683 }
3684 } else {
3685 I.print(errs());
3686 llvm_unreachable("Unsupported instruction???");
3687 }
3688 break;
3689 }
3690 case Instruction::GetElementPtr: {
3691 auto &GlobalConstArgSet = getGlobalConstArgSet();
3692
3693 //
3694 // Generate OpAccessChain.
3695 //
3696 GetElementPtrInst *GEP = cast<GetElementPtrInst>(&I);
3697
3698 //
3699 // Generate OpAccessChain.
3700 //
3701
3702 // Ops[0] = Result Type ID
3703 // Ops[1] = Base ID
3704 // Ops[2] ... Ops[n] = Indexes ID
3705 SPIRVOperandList Ops;
3706
alan-bakerb6b09dc2018-11-08 16:59:28 -05003707 PointerType *ResultType = cast<PointerType>(GEP->getType());
David Neto22f144c2017-06-12 14:26:21 -04003708 if (GEP->getPointerAddressSpace() == AddressSpace::ModuleScopePrivate ||
3709 GlobalConstArgSet.count(GEP->getPointerOperand())) {
3710 // Use pointer type with private address space for global constant.
3711 Type *EleTy = I.getType()->getPointerElementType();
David Neto1a1a0582017-07-07 12:01:44 -04003712 ResultType = PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
David Neto22f144c2017-06-12 14:26:21 -04003713 }
David Neto257c3892018-04-11 13:19:45 -04003714
3715 Ops << MkId(lookupType(ResultType));
David Neto22f144c2017-06-12 14:26:21 -04003716
David Neto862b7d82018-06-14 18:48:37 -04003717 // Generate the base pointer.
3718 Ops << MkId(VMap[GEP->getPointerOperand()]);
David Neto22f144c2017-06-12 14:26:21 -04003719
David Neto862b7d82018-06-14 18:48:37 -04003720 // TODO(dneto): Simplify the following?
David Neto22f144c2017-06-12 14:26:21 -04003721
3722 //
3723 // Follows below rules for gep.
3724 //
David Neto862b7d82018-06-14 18:48:37 -04003725 // 1. If gep's first index is 0 generate OpAccessChain and ignore gep's
3726 // first index.
David Neto22f144c2017-06-12 14:26:21 -04003727 // 2. If gep's first index is not 0, generate OpPtrAccessChain and use gep's
3728 // first index.
3729 // 3. If gep's first index is not constant, generate OpPtrAccessChain and
3730 // use gep's first index.
3731 // 4. If it is not above case 1, 2 and 3, generate OpAccessChain and use
3732 // gep's first index.
3733 //
3734 spv::Op Opcode = spv::OpAccessChain;
3735 unsigned offset = 0;
3736 if (ConstantInt *CstInt = dyn_cast<ConstantInt>(GEP->getOperand(1))) {
David Neto862b7d82018-06-14 18:48:37 -04003737 if (CstInt->getZExtValue() == 0) {
David Neto22f144c2017-06-12 14:26:21 -04003738 offset = 1;
David Neto862b7d82018-06-14 18:48:37 -04003739 } else if (CstInt->getZExtValue() != 0) {
David Neto22f144c2017-06-12 14:26:21 -04003740 Opcode = spv::OpPtrAccessChain;
David Neto22f144c2017-06-12 14:26:21 -04003741 }
David Neto862b7d82018-06-14 18:48:37 -04003742 } else {
David Neto22f144c2017-06-12 14:26:21 -04003743 Opcode = spv::OpPtrAccessChain;
David Neto1a1a0582017-07-07 12:01:44 -04003744 }
3745
3746 if (Opcode == spv::OpPtrAccessChain) {
David Neto1a1a0582017-07-07 12:01:44 -04003747 // Do we need to generate ArrayStride? Check against the GEP result type
3748 // rather than the pointer type of the base because when indexing into
3749 // an OpenCL program-scope constant, we'll swap out the LLVM base pointer
3750 // for something else in the SPIR-V.
3751 // E.g. see test/PointerAccessChain/pointer_index_is_constant_1.cl
alan-baker5b86ed72019-02-15 08:26:50 -05003752 auto address_space = ResultType->getAddressSpace();
3753 setVariablePointersCapabilities(address_space);
3754 switch (GetStorageClass(address_space)) {
Alan Bakerfcda9482018-10-02 17:09:59 -04003755 case spv::StorageClassStorageBuffer:
3756 case spv::StorageClassUniform:
David Neto1a1a0582017-07-07 12:01:44 -04003757 // Save the need to generate an ArrayStride decoration. But defer
3758 // generation until later, so we only make one decoration.
David Neto85082642018-03-24 06:55:20 -07003759 getTypesNeedingArrayStride().insert(ResultType);
Alan Bakerfcda9482018-10-02 17:09:59 -04003760 break;
3761 default:
3762 break;
David Neto1a1a0582017-07-07 12:01:44 -04003763 }
David Neto22f144c2017-06-12 14:26:21 -04003764 }
3765
3766 for (auto II = GEP->idx_begin() + offset; II != GEP->idx_end(); II++) {
David Neto257c3892018-04-11 13:19:45 -04003767 Ops << MkId(VMap[*II]);
David Neto22f144c2017-06-12 14:26:21 -04003768 }
3769
David Neto87846742018-04-11 17:36:22 -04003770 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003771 SPIRVInstList.push_back(Inst);
3772 break;
3773 }
3774 case Instruction::ExtractValue: {
3775 ExtractValueInst *EVI = cast<ExtractValueInst>(&I);
3776 // Ops[0] = Result Type ID
3777 // Ops[1] = Composite ID
3778 // Ops[2] ... Ops[n] = Indexes (Literal Number)
3779 SPIRVOperandList Ops;
3780
David Neto257c3892018-04-11 13:19:45 -04003781 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04003782
3783 uint32_t CompositeID = VMap[EVI->getAggregateOperand()];
David Neto257c3892018-04-11 13:19:45 -04003784 Ops << MkId(CompositeID);
David Neto22f144c2017-06-12 14:26:21 -04003785
3786 for (auto &Index : EVI->indices()) {
David Neto257c3892018-04-11 13:19:45 -04003787 Ops << MkNum(Index);
David Neto22f144c2017-06-12 14:26:21 -04003788 }
3789
David Neto87846742018-04-11 17:36:22 -04003790 auto *Inst = new SPIRVInstruction(spv::OpCompositeExtract, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003791 SPIRVInstList.push_back(Inst);
3792 break;
3793 }
3794 case Instruction::InsertValue: {
3795 InsertValueInst *IVI = cast<InsertValueInst>(&I);
3796 // Ops[0] = Result Type ID
3797 // Ops[1] = Object ID
3798 // Ops[2] = Composite ID
3799 // Ops[3] ... Ops[n] = Indexes (Literal Number)
3800 SPIRVOperandList Ops;
3801
3802 uint32_t ResTyID = lookupType(I.getType());
David Neto257c3892018-04-11 13:19:45 -04003803 Ops << MkId(ResTyID);
David Neto22f144c2017-06-12 14:26:21 -04003804
3805 uint32_t ObjectID = VMap[IVI->getInsertedValueOperand()];
David Neto257c3892018-04-11 13:19:45 -04003806 Ops << MkId(ObjectID);
David Neto22f144c2017-06-12 14:26:21 -04003807
3808 uint32_t CompositeID = VMap[IVI->getAggregateOperand()];
David Neto257c3892018-04-11 13:19:45 -04003809 Ops << MkId(CompositeID);
David Neto22f144c2017-06-12 14:26:21 -04003810
3811 for (auto &Index : IVI->indices()) {
David Neto257c3892018-04-11 13:19:45 -04003812 Ops << MkNum(Index);
David Neto22f144c2017-06-12 14:26:21 -04003813 }
3814
David Neto87846742018-04-11 17:36:22 -04003815 auto *Inst = new SPIRVInstruction(spv::OpCompositeInsert, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003816 SPIRVInstList.push_back(Inst);
3817 break;
3818 }
3819 case Instruction::Select: {
3820 //
3821 // Generate OpSelect.
3822 //
3823
3824 // Ops[0] = Result Type ID
3825 // Ops[1] = Condition ID
3826 // Ops[2] = True Constant ID
3827 // Ops[3] = False Constant ID
3828 SPIRVOperandList Ops;
3829
3830 // Find SPIRV instruction for parameter type.
3831 auto Ty = I.getType();
3832 if (Ty->isPointerTy()) {
3833 auto PointeeTy = Ty->getPointerElementType();
3834 if (PointeeTy->isStructTy() &&
3835 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
3836 Ty = PointeeTy;
alan-baker5b86ed72019-02-15 08:26:50 -05003837 } else {
3838 // Selecting between pointers requires variable pointers.
3839 setVariablePointersCapabilities(Ty->getPointerAddressSpace());
3840 if (!hasVariablePointers() && !selectFromSameObject(&I)) {
3841 setVariablePointers(true);
3842 }
David Neto22f144c2017-06-12 14:26:21 -04003843 }
3844 }
3845
David Neto257c3892018-04-11 13:19:45 -04003846 Ops << MkId(lookupType(Ty)) << MkId(VMap[I.getOperand(0)])
3847 << MkId(VMap[I.getOperand(1)]) << MkId(VMap[I.getOperand(2)]);
David Neto22f144c2017-06-12 14:26:21 -04003848
David Neto87846742018-04-11 17:36:22 -04003849 auto *Inst = new SPIRVInstruction(spv::OpSelect, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003850 SPIRVInstList.push_back(Inst);
3851 break;
3852 }
3853 case Instruction::ExtractElement: {
3854 // Handle <4 x i8> type manually.
3855 Type *CompositeTy = I.getOperand(0)->getType();
3856 if (is4xi8vec(CompositeTy)) {
3857 //
3858 // Generate OpShiftRightLogical and OpBitwiseAnd for extractelement with
3859 // <4 x i8>.
3860 //
3861
3862 //
3863 // Generate OpShiftRightLogical
3864 //
3865 // Ops[0] = Result Type ID
3866 // Ops[1] = Operand 0
3867 // Ops[2] = Operand 1
3868 //
3869 SPIRVOperandList Ops;
3870
David Neto257c3892018-04-11 13:19:45 -04003871 Ops << MkId(lookupType(CompositeTy));
David Neto22f144c2017-06-12 14:26:21 -04003872
3873 uint32_t Op0ID = VMap[I.getOperand(0)];
David Neto257c3892018-04-11 13:19:45 -04003874 Ops << MkId(Op0ID);
David Neto22f144c2017-06-12 14:26:21 -04003875
3876 uint32_t Op1ID = 0;
3877 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
3878 // Handle constant index.
3879 uint64_t Idx = CI->getZExtValue();
3880 Value *ShiftAmount =
3881 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
3882 Op1ID = VMap[ShiftAmount];
3883 } else {
3884 // Handle variable index.
3885 SPIRVOperandList TmpOps;
3886
David Neto257c3892018-04-11 13:19:45 -04003887 TmpOps << MkId(lookupType(Type::getInt32Ty(Context)))
3888 << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04003889
3890 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
David Neto257c3892018-04-11 13:19:45 -04003891 TmpOps << MkId(VMap[Cst8]);
David Neto22f144c2017-06-12 14:26:21 -04003892
3893 Op1ID = nextID;
3894
David Neto87846742018-04-11 17:36:22 -04003895 auto *TmpInst = new SPIRVInstruction(spv::OpIMul, nextID++, TmpOps);
David Neto22f144c2017-06-12 14:26:21 -04003896 SPIRVInstList.push_back(TmpInst);
3897 }
David Neto257c3892018-04-11 13:19:45 -04003898 Ops << MkId(Op1ID);
David Neto22f144c2017-06-12 14:26:21 -04003899
3900 uint32_t ShiftID = nextID;
3901
David Neto87846742018-04-11 17:36:22 -04003902 auto *Inst =
3903 new SPIRVInstruction(spv::OpShiftRightLogical, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003904 SPIRVInstList.push_back(Inst);
3905
3906 //
3907 // Generate OpBitwiseAnd
3908 //
3909 // Ops[0] = Result Type ID
3910 // Ops[1] = Operand 0
3911 // Ops[2] = Operand 1
3912 //
3913 Ops.clear();
3914
David Neto257c3892018-04-11 13:19:45 -04003915 Ops << MkId(lookupType(CompositeTy)) << MkId(ShiftID);
David Neto22f144c2017-06-12 14:26:21 -04003916
3917 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
David Neto257c3892018-04-11 13:19:45 -04003918 Ops << MkId(VMap[CstFF]);
David Neto22f144c2017-06-12 14:26:21 -04003919
David Neto9b2d6252017-09-06 15:47:37 -04003920 // Reset mapping for this value to the result of the bitwise and.
3921 VMap[&I] = nextID;
3922
David Neto87846742018-04-11 17:36:22 -04003923 Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003924 SPIRVInstList.push_back(Inst);
3925 break;
3926 }
3927
3928 // Ops[0] = Result Type ID
3929 // Ops[1] = Composite ID
3930 // Ops[2] ... Ops[n] = Indexes (Literal Number)
3931 SPIRVOperandList Ops;
3932
David Neto257c3892018-04-11 13:19:45 -04003933 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04003934
3935 spv::Op Opcode = spv::OpCompositeExtract;
3936 if (const ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
David Neto257c3892018-04-11 13:19:45 -04003937 Ops << MkNum(static_cast<uint32_t>(CI->getZExtValue()));
David Neto22f144c2017-06-12 14:26:21 -04003938 } else {
David Neto257c3892018-04-11 13:19:45 -04003939 Ops << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04003940 Opcode = spv::OpVectorExtractDynamic;
3941 }
3942
David Neto87846742018-04-11 17:36:22 -04003943 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003944 SPIRVInstList.push_back(Inst);
3945 break;
3946 }
3947 case Instruction::InsertElement: {
3948 // Handle <4 x i8> type manually.
3949 Type *CompositeTy = I.getOperand(0)->getType();
3950 if (is4xi8vec(CompositeTy)) {
3951 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
3952 uint32_t CstFFID = VMap[CstFF];
3953
3954 uint32_t ShiftAmountID = 0;
3955 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
3956 // Handle constant index.
3957 uint64_t Idx = CI->getZExtValue();
3958 Value *ShiftAmount =
3959 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
3960 ShiftAmountID = VMap[ShiftAmount];
3961 } else {
3962 // Handle variable index.
3963 SPIRVOperandList TmpOps;
3964
David Neto257c3892018-04-11 13:19:45 -04003965 TmpOps << MkId(lookupType(Type::getInt32Ty(Context)))
3966 << MkId(VMap[I.getOperand(2)]);
David Neto22f144c2017-06-12 14:26:21 -04003967
3968 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
David Neto257c3892018-04-11 13:19:45 -04003969 TmpOps << MkId(VMap[Cst8]);
David Neto22f144c2017-06-12 14:26:21 -04003970
3971 ShiftAmountID = nextID;
3972
David Neto87846742018-04-11 17:36:22 -04003973 auto *TmpInst = new SPIRVInstruction(spv::OpIMul, nextID++, TmpOps);
David Neto22f144c2017-06-12 14:26:21 -04003974 SPIRVInstList.push_back(TmpInst);
3975 }
3976
3977 //
3978 // Generate mask operations.
3979 //
3980
3981 // ShiftLeft mask according to index of insertelement.
3982 SPIRVOperandList Ops;
3983
David Neto257c3892018-04-11 13:19:45 -04003984 const uint32_t ResTyID = lookupType(CompositeTy);
3985 Ops << MkId(ResTyID) << MkId(CstFFID) << MkId(ShiftAmountID);
David Neto22f144c2017-06-12 14:26:21 -04003986
3987 uint32_t MaskID = nextID;
3988
David Neto87846742018-04-11 17:36:22 -04003989 auto *Inst = new SPIRVInstruction(spv::OpShiftLeftLogical, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003990 SPIRVInstList.push_back(Inst);
3991
3992 // Inverse mask.
3993 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003994 Ops << MkId(ResTyID) << MkId(MaskID);
David Neto22f144c2017-06-12 14:26:21 -04003995
3996 uint32_t InvMaskID = nextID;
3997
David Neto87846742018-04-11 17:36:22 -04003998 Inst = new SPIRVInstruction(spv::OpNot, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003999 SPIRVInstList.push_back(Inst);
4000
4001 // Apply mask.
4002 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004003 Ops << MkId(ResTyID) << MkId(VMap[I.getOperand(0)]) << MkId(InvMaskID);
David Neto22f144c2017-06-12 14:26:21 -04004004
4005 uint32_t OrgValID = nextID;
4006
David Neto87846742018-04-11 17:36:22 -04004007 Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004008 SPIRVInstList.push_back(Inst);
4009
4010 // Create correct value according to index of insertelement.
4011 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05004012 Ops << MkId(ResTyID) << MkId(VMap[I.getOperand(1)])
4013 << MkId(ShiftAmountID);
David Neto22f144c2017-06-12 14:26:21 -04004014
4015 uint32_t InsertValID = nextID;
4016
David Neto87846742018-04-11 17:36:22 -04004017 Inst = new SPIRVInstruction(spv::OpShiftLeftLogical, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004018 SPIRVInstList.push_back(Inst);
4019
4020 // Insert value to original value.
4021 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004022 Ops << MkId(ResTyID) << MkId(OrgValID) << MkId(InsertValID);
David Neto22f144c2017-06-12 14:26:21 -04004023
David Netoa394f392017-08-26 20:45:29 -04004024 VMap[&I] = nextID;
4025
David Neto87846742018-04-11 17:36:22 -04004026 Inst = new SPIRVInstruction(spv::OpBitwiseOr, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004027 SPIRVInstList.push_back(Inst);
4028
4029 break;
4030 }
4031
David Neto22f144c2017-06-12 14:26:21 -04004032 SPIRVOperandList Ops;
4033
James Priced26efea2018-06-09 23:28:32 +01004034 // Ops[0] = Result Type ID
4035 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04004036
4037 spv::Op Opcode = spv::OpCompositeInsert;
4038 if (const ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
David Neto257c3892018-04-11 13:19:45 -04004039 const auto value = CI->getZExtValue();
4040 assert(value <= UINT32_MAX);
James Priced26efea2018-06-09 23:28:32 +01004041 // Ops[1] = Object ID
4042 // Ops[2] = Composite ID
4043 // Ops[3] ... Ops[n] = Indexes (Literal Number)
alan-bakerb6b09dc2018-11-08 16:59:28 -05004044 Ops << MkId(VMap[I.getOperand(1)]) << MkId(VMap[I.getOperand(0)])
James Priced26efea2018-06-09 23:28:32 +01004045 << MkNum(static_cast<uint32_t>(value));
David Neto22f144c2017-06-12 14:26:21 -04004046 } else {
James Priced26efea2018-06-09 23:28:32 +01004047 // Ops[1] = Composite ID
4048 // Ops[2] = Object ID
4049 // Ops[3] ... Ops[n] = Indexes (Literal Number)
alan-bakerb6b09dc2018-11-08 16:59:28 -05004050 Ops << MkId(VMap[I.getOperand(0)]) << MkId(VMap[I.getOperand(1)])
James Priced26efea2018-06-09 23:28:32 +01004051 << MkId(VMap[I.getOperand(2)]);
David Neto22f144c2017-06-12 14:26:21 -04004052 Opcode = spv::OpVectorInsertDynamic;
4053 }
4054
David Neto87846742018-04-11 17:36:22 -04004055 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004056 SPIRVInstList.push_back(Inst);
4057 break;
4058 }
4059 case Instruction::ShuffleVector: {
4060 // Ops[0] = Result Type ID
4061 // Ops[1] = Vector 1 ID
4062 // Ops[2] = Vector 2 ID
4063 // Ops[3] ... Ops[n] = Components (Literal Number)
4064 SPIRVOperandList Ops;
4065
David Neto257c3892018-04-11 13:19:45 -04004066 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)])
4067 << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004068
4069 uint64_t NumElements = 0;
4070 if (Constant *Cst = dyn_cast<Constant>(I.getOperand(2))) {
4071 NumElements = cast<VectorType>(Cst->getType())->getNumElements();
4072
4073 if (Cst->isNullValue()) {
4074 for (unsigned i = 0; i < NumElements; i++) {
David Neto257c3892018-04-11 13:19:45 -04004075 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04004076 }
4077 } else if (const ConstantDataSequential *CDS =
4078 dyn_cast<ConstantDataSequential>(Cst)) {
4079 for (unsigned i = 0; i < CDS->getNumElements(); i++) {
4080 std::vector<uint32_t> LiteralNum;
David Neto257c3892018-04-11 13:19:45 -04004081 const auto value = CDS->getElementAsInteger(i);
4082 assert(value <= UINT32_MAX);
4083 Ops << MkNum(static_cast<uint32_t>(value));
David Neto22f144c2017-06-12 14:26:21 -04004084 }
4085 } else if (const ConstantVector *CV = dyn_cast<ConstantVector>(Cst)) {
4086 for (unsigned i = 0; i < CV->getNumOperands(); i++) {
4087 auto Op = CV->getOperand(i);
4088
4089 uint32_t literal = 0;
4090
4091 if (auto CI = dyn_cast<ConstantInt>(Op)) {
4092 literal = static_cast<uint32_t>(CI->getZExtValue());
4093 } else if (auto UI = dyn_cast<UndefValue>(Op)) {
4094 literal = 0xFFFFFFFFu;
4095 } else {
4096 Op->print(errs());
4097 llvm_unreachable("Unsupported element in ConstantVector!");
4098 }
4099
David Neto257c3892018-04-11 13:19:45 -04004100 Ops << MkNum(literal);
David Neto22f144c2017-06-12 14:26:21 -04004101 }
4102 } else {
4103 Cst->print(errs());
4104 llvm_unreachable("Unsupported constant mask in ShuffleVector!");
4105 }
4106 }
4107
David Neto87846742018-04-11 17:36:22 -04004108 auto *Inst = new SPIRVInstruction(spv::OpVectorShuffle, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004109 SPIRVInstList.push_back(Inst);
4110 break;
4111 }
4112 case Instruction::ICmp:
4113 case Instruction::FCmp: {
4114 CmpInst *CmpI = cast<CmpInst>(&I);
4115
David Netod4ca2e62017-07-06 18:47:35 -04004116 // Pointer equality is invalid.
alan-bakerb6b09dc2018-11-08 16:59:28 -05004117 Type *ArgTy = CmpI->getOperand(0)->getType();
David Netod4ca2e62017-07-06 18:47:35 -04004118 if (isa<PointerType>(ArgTy)) {
4119 CmpI->print(errs());
4120 std::string name = I.getParent()->getParent()->getName();
4121 errs()
4122 << "\nPointer equality test is not supported by SPIR-V for Vulkan, "
4123 << "in function " << name << "\n";
4124 llvm_unreachable("Pointer equality check is invalid");
4125 break;
4126 }
4127
David Neto257c3892018-04-11 13:19:45 -04004128 // Ops[0] = Result Type ID
4129 // Ops[1] = Operand 1 ID
4130 // Ops[2] = Operand 2 ID
4131 SPIRVOperandList Ops;
David Neto22f144c2017-06-12 14:26:21 -04004132
David Neto257c3892018-04-11 13:19:45 -04004133 Ops << MkId(lookupType(CmpI->getType())) << MkId(VMap[CmpI->getOperand(0)])
4134 << MkId(VMap[CmpI->getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004135
4136 spv::Op Opcode = GetSPIRVCmpOpcode(CmpI);
David Neto87846742018-04-11 17:36:22 -04004137 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004138 SPIRVInstList.push_back(Inst);
4139 break;
4140 }
4141 case Instruction::Br: {
4142 // Branch instrucion is deferred because it needs label's ID. Record slot's
4143 // location on SPIRVInstructionList.
4144 DeferredInsts.push_back(
4145 std::make_tuple(&I, --SPIRVInstList.end(), 0 /* No id */));
4146 break;
4147 }
4148 case Instruction::Switch: {
4149 I.print(errs());
4150 llvm_unreachable("Unsupported instruction???");
4151 break;
4152 }
4153 case Instruction::IndirectBr: {
4154 I.print(errs());
4155 llvm_unreachable("Unsupported instruction???");
4156 break;
4157 }
4158 case Instruction::PHI: {
4159 // Branch instrucion is deferred because it needs label's ID. Record slot's
4160 // location on SPIRVInstructionList.
4161 DeferredInsts.push_back(
4162 std::make_tuple(&I, --SPIRVInstList.end(), nextID++));
4163 break;
4164 }
4165 case Instruction::Alloca: {
4166 //
4167 // Generate OpVariable.
4168 //
4169 // Ops[0] : Result Type ID
4170 // Ops[1] : Storage Class
4171 SPIRVOperandList Ops;
4172
David Neto257c3892018-04-11 13:19:45 -04004173 Ops << MkId(lookupType(I.getType())) << MkNum(spv::StorageClassFunction);
David Neto22f144c2017-06-12 14:26:21 -04004174
David Neto87846742018-04-11 17:36:22 -04004175 auto *Inst = new SPIRVInstruction(spv::OpVariable, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004176 SPIRVInstList.push_back(Inst);
4177 break;
4178 }
4179 case Instruction::Load: {
4180 LoadInst *LD = cast<LoadInst>(&I);
4181 //
4182 // Generate OpLoad.
4183 //
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04004184
alan-baker5b86ed72019-02-15 08:26:50 -05004185 if (LD->getType()->isPointerTy()) {
4186 // Loading a pointer requires variable pointers.
4187 setVariablePointersCapabilities(LD->getType()->getPointerAddressSpace());
4188 }
David Neto22f144c2017-06-12 14:26:21 -04004189
David Neto0a2f98d2017-09-15 19:38:40 -04004190 uint32_t ResTyID = lookupType(LD->getType());
David Netoa60b00b2017-09-15 16:34:09 -04004191 uint32_t PointerID = VMap[LD->getPointerOperand()];
4192
4193 // This is a hack to work around what looks like a driver bug.
4194 // When we're loading from the special variable holding the WorkgroupSize
David Neto0a2f98d2017-09-15 19:38:40 -04004195 // builtin value, use an OpBitWiseAnd of the value's ID rather than
4196 // generating a load.
David Neto66cfe642018-03-24 06:13:56 -07004197 // TODO(dneto): Remove this awful hack once drivers are fixed.
David Netoa60b00b2017-09-15 16:34:09 -04004198 if (PointerID == WorkgroupSizeVarID) {
David Neto0a2f98d2017-09-15 19:38:40 -04004199 // Generate a bitwise-and of the original value with itself.
4200 // We should have been able to get away with just an OpCopyObject,
4201 // but we need something more complex to get past certain driver bugs.
4202 // This is ridiculous, but necessary.
4203 // TODO(dneto): Revisit this once drivers fix their bugs.
4204
4205 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04004206 Ops << MkId(ResTyID) << MkId(WorkgroupSizeValueID)
4207 << MkId(WorkgroupSizeValueID);
David Neto0a2f98d2017-09-15 19:38:40 -04004208
David Neto87846742018-04-11 17:36:22 -04004209 auto *Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Neto0a2f98d2017-09-15 19:38:40 -04004210 SPIRVInstList.push_back(Inst);
David Netoa60b00b2017-09-15 16:34:09 -04004211 break;
4212 }
4213
4214 // This is the normal path. Generate a load.
4215
David Neto22f144c2017-06-12 14:26:21 -04004216 // Ops[0] = Result Type ID
4217 // Ops[1] = Pointer ID
4218 // Ops[2] ... Ops[n] = Optional Memory Access
4219 //
4220 // TODO: Do we need to implement Optional Memory Access???
David Neto0a2f98d2017-09-15 19:38:40 -04004221
David Neto22f144c2017-06-12 14:26:21 -04004222 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04004223 Ops << MkId(ResTyID) << MkId(PointerID);
David Neto22f144c2017-06-12 14:26:21 -04004224
David Neto87846742018-04-11 17:36:22 -04004225 auto *Inst = new SPIRVInstruction(spv::OpLoad, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004226 SPIRVInstList.push_back(Inst);
4227 break;
4228 }
4229 case Instruction::Store: {
4230 StoreInst *ST = cast<StoreInst>(&I);
4231 //
4232 // Generate OpStore.
4233 //
4234
alan-baker5b86ed72019-02-15 08:26:50 -05004235 if (ST->getValueOperand()->getType()->isPointerTy()) {
4236 // Storing a pointer requires variable pointers.
4237 setVariablePointersCapabilities(
4238 ST->getValueOperand()->getType()->getPointerAddressSpace());
4239 }
4240
David Neto22f144c2017-06-12 14:26:21 -04004241 // Ops[0] = Pointer ID
4242 // Ops[1] = Object ID
4243 // Ops[2] ... Ops[n] = Optional Memory Access (later???)
4244 //
4245 // TODO: Do we need to implement Optional Memory Access???
David Neto257c3892018-04-11 13:19:45 -04004246 SPIRVOperandList Ops;
4247 Ops << MkId(VMap[ST->getPointerOperand()])
4248 << MkId(VMap[ST->getValueOperand()]);
David Neto22f144c2017-06-12 14:26:21 -04004249
David Neto87846742018-04-11 17:36:22 -04004250 auto *Inst = new SPIRVInstruction(spv::OpStore, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004251 SPIRVInstList.push_back(Inst);
4252 break;
4253 }
4254 case Instruction::AtomicCmpXchg: {
4255 I.print(errs());
4256 llvm_unreachable("Unsupported instruction???");
4257 break;
4258 }
4259 case Instruction::AtomicRMW: {
Neil Henning39672102017-09-29 14:33:13 +01004260 AtomicRMWInst *AtomicRMW = dyn_cast<AtomicRMWInst>(&I);
4261
4262 spv::Op opcode;
4263
4264 switch (AtomicRMW->getOperation()) {
4265 default:
4266 I.print(errs());
4267 llvm_unreachable("Unsupported instruction???");
4268 case llvm::AtomicRMWInst::Add:
4269 opcode = spv::OpAtomicIAdd;
4270 break;
4271 case llvm::AtomicRMWInst::Sub:
4272 opcode = spv::OpAtomicISub;
4273 break;
4274 case llvm::AtomicRMWInst::Xchg:
4275 opcode = spv::OpAtomicExchange;
4276 break;
4277 case llvm::AtomicRMWInst::Min:
4278 opcode = spv::OpAtomicSMin;
4279 break;
4280 case llvm::AtomicRMWInst::Max:
4281 opcode = spv::OpAtomicSMax;
4282 break;
4283 case llvm::AtomicRMWInst::UMin:
4284 opcode = spv::OpAtomicUMin;
4285 break;
4286 case llvm::AtomicRMWInst::UMax:
4287 opcode = spv::OpAtomicUMax;
4288 break;
4289 case llvm::AtomicRMWInst::And:
4290 opcode = spv::OpAtomicAnd;
4291 break;
4292 case llvm::AtomicRMWInst::Or:
4293 opcode = spv::OpAtomicOr;
4294 break;
4295 case llvm::AtomicRMWInst::Xor:
4296 opcode = spv::OpAtomicXor;
4297 break;
4298 }
4299
4300 //
4301 // Generate OpAtomic*.
4302 //
4303 SPIRVOperandList Ops;
4304
David Neto257c3892018-04-11 13:19:45 -04004305 Ops << MkId(lookupType(I.getType()))
4306 << MkId(VMap[AtomicRMW->getPointerOperand()]);
Neil Henning39672102017-09-29 14:33:13 +01004307
4308 auto IntTy = Type::getInt32Ty(I.getContext());
Neil Henning39672102017-09-29 14:33:13 +01004309 const auto ConstantScopeDevice = ConstantInt::get(IntTy, spv::ScopeDevice);
David Neto257c3892018-04-11 13:19:45 -04004310 Ops << MkId(VMap[ConstantScopeDevice]);
Neil Henning39672102017-09-29 14:33:13 +01004311
4312 const auto ConstantMemorySemantics = ConstantInt::get(
4313 IntTy, spv::MemorySemanticsUniformMemoryMask |
4314 spv::MemorySemanticsSequentiallyConsistentMask);
David Neto257c3892018-04-11 13:19:45 -04004315 Ops << MkId(VMap[ConstantMemorySemantics]);
Neil Henning39672102017-09-29 14:33:13 +01004316
David Neto257c3892018-04-11 13:19:45 -04004317 Ops << MkId(VMap[AtomicRMW->getValOperand()]);
Neil Henning39672102017-09-29 14:33:13 +01004318
4319 VMap[&I] = nextID;
4320
David Neto87846742018-04-11 17:36:22 -04004321 auto *Inst = new SPIRVInstruction(opcode, nextID++, Ops);
Neil Henning39672102017-09-29 14:33:13 +01004322 SPIRVInstList.push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04004323 break;
4324 }
4325 case Instruction::Fence: {
4326 I.print(errs());
4327 llvm_unreachable("Unsupported instruction???");
4328 break;
4329 }
4330 case Instruction::Call: {
4331 CallInst *Call = dyn_cast<CallInst>(&I);
4332 Function *Callee = Call->getCalledFunction();
4333
Alan Baker202c8c72018-08-13 13:47:44 -04004334 if (Callee->getName().startswith(clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04004335 if (ResourceVarDeferredLoadCalls.count(Call) && Call->hasNUsesOrMore(1)) {
4336 // Generate an OpLoad
4337 SPIRVOperandList Ops;
4338 const auto load_id = nextID++;
David Neto22f144c2017-06-12 14:26:21 -04004339
David Neto862b7d82018-06-14 18:48:37 -04004340 Ops << MkId(lookupType(Call->getType()->getPointerElementType()))
4341 << MkId(ResourceVarDeferredLoadCalls[Call]);
4342
4343 auto *Inst = new SPIRVInstruction(spv::OpLoad, load_id, Ops);
4344 SPIRVInstList.push_back(Inst);
4345 VMap[Call] = load_id;
4346 break;
4347
4348 } else {
4349 // This maps to an OpVariable we've already generated.
4350 // No code is generated for the call.
4351 }
4352 break;
alan-bakerb6b09dc2018-11-08 16:59:28 -05004353 } else if (Callee->getName().startswith(
4354 clspv::WorkgroupAccessorFunction())) {
Alan Baker202c8c72018-08-13 13:47:44 -04004355 // Don't codegen an instruction here, but instead map this call directly
4356 // to the workgroup variable id.
alan-bakerb6b09dc2018-11-08 16:59:28 -05004357 int spec_id = static_cast<int>(
4358 cast<ConstantInt>(Call->getOperand(0))->getSExtValue());
Alan Baker202c8c72018-08-13 13:47:44 -04004359 const auto &info = LocalSpecIdInfoMap[spec_id];
4360 VMap[Call] = info.variable_id;
4361 break;
David Neto862b7d82018-06-14 18:48:37 -04004362 }
4363
4364 // Sampler initializers become a load of the corresponding sampler.
4365
Kévin Petitdf71de32019-04-09 14:09:50 +01004366 if (Callee->getName().equals(clspv::LiteralSamplerFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04004367 // Map this to a load from the variable.
4368 const auto index_into_sampler_map =
4369 dyn_cast<ConstantInt>(Call->getArgOperand(2))->getZExtValue();
4370
4371 // Generate an OpLoad
David Neto22f144c2017-06-12 14:26:21 -04004372 SPIRVOperandList Ops;
David Neto862b7d82018-06-14 18:48:37 -04004373 const auto load_id = nextID++;
David Neto22f144c2017-06-12 14:26:21 -04004374
David Neto257c3892018-04-11 13:19:45 -04004375 Ops << MkId(lookupType(SamplerTy->getPointerElementType()))
alan-bakerb6b09dc2018-11-08 16:59:28 -05004376 << MkId(SamplerMapIndexToIDMap[static_cast<unsigned>(
4377 index_into_sampler_map)]);
David Neto22f144c2017-06-12 14:26:21 -04004378
David Neto862b7d82018-06-14 18:48:37 -04004379 auto *Inst = new SPIRVInstruction(spv::OpLoad, load_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004380 SPIRVInstList.push_back(Inst);
David Neto862b7d82018-06-14 18:48:37 -04004381 VMap[Call] = load_id;
David Neto22f144c2017-06-12 14:26:21 -04004382 break;
4383 }
4384
Kévin Petit349c9502019-03-28 17:24:14 +00004385 // Handle SPIR-V intrinsics
Kévin Petit9b340262019-06-19 18:31:11 +01004386 spv::Op opcode = StringSwitch<spv::Op>(Callee->getName())
4387 .Case("spirv.atomic_xor", spv::OpAtomicXor)
4388 .Default(spv::OpNop);
David Neto22f144c2017-06-12 14:26:21 -04004389
Kévin Petit617a76d2019-04-04 13:54:16 +01004390 // If the switch above didn't have an entry maybe the intrinsic
4391 // is using the name mangling logic.
4392 bool usesMangler = false;
4393 if (opcode == spv::OpNop) {
4394 if (Callee->getName().startswith(clspv::SPIRVOpIntrinsicFunction())) {
4395 auto OpCst = cast<ConstantInt>(Call->getOperand(0));
4396 opcode = static_cast<spv::Op>(OpCst->getZExtValue());
4397 usesMangler = true;
4398 }
4399 }
4400
Kévin Petit349c9502019-03-28 17:24:14 +00004401 if (opcode != spv::OpNop) {
4402
David Neto22f144c2017-06-12 14:26:21 -04004403 SPIRVOperandList Ops;
4404
Kévin Petit349c9502019-03-28 17:24:14 +00004405 if (!I.getType()->isVoidTy()) {
4406 Ops << MkId(lookupType(I.getType()));
4407 }
David Neto22f144c2017-06-12 14:26:21 -04004408
Kévin Petit617a76d2019-04-04 13:54:16 +01004409 unsigned firstOperand = usesMangler ? 1 : 0;
4410 for (unsigned i = firstOperand; i < Call->getNumArgOperands(); i++) {
David Neto257c3892018-04-11 13:19:45 -04004411 Ops << MkId(VMap[Call->getArgOperand(i)]);
David Neto22f144c2017-06-12 14:26:21 -04004412 }
4413
Kévin Petit349c9502019-03-28 17:24:14 +00004414 if (!I.getType()->isVoidTy()) {
4415 VMap[&I] = nextID;
Kévin Petit8a560882019-03-21 15:24:34 +00004416 }
4417
Kévin Petit349c9502019-03-28 17:24:14 +00004418 SPIRVInstruction *Inst;
4419 if (!I.getType()->isVoidTy()) {
4420 Inst = new SPIRVInstruction(opcode, nextID++, Ops);
4421 } else {
4422 Inst = new SPIRVInstruction(opcode, Ops);
4423 }
Kévin Petit8a560882019-03-21 15:24:34 +00004424 SPIRVInstList.push_back(Inst);
4425 break;
4426 }
4427
David Neto22f144c2017-06-12 14:26:21 -04004428 // spirv.copy_memory.* intrinsics become OpMemoryMemory's.
4429 if (Callee->getName().startswith("spirv.copy_memory")) {
4430 //
4431 // Generate OpCopyMemory.
4432 //
4433
4434 // Ops[0] = Dst ID
4435 // Ops[1] = Src ID
4436 // Ops[2] = Memory Access
4437 // Ops[3] = Alignment
4438
4439 auto IsVolatile =
4440 dyn_cast<ConstantInt>(Call->getArgOperand(3))->getZExtValue() != 0;
4441
4442 auto VolatileMemoryAccess = (IsVolatile) ? spv::MemoryAccessVolatileMask
4443 : spv::MemoryAccessMaskNone;
4444
4445 auto MemoryAccess = VolatileMemoryAccess | spv::MemoryAccessAlignedMask;
4446
4447 auto Alignment =
4448 dyn_cast<ConstantInt>(Call->getArgOperand(2))->getZExtValue();
4449
David Neto257c3892018-04-11 13:19:45 -04004450 SPIRVOperandList Ops;
4451 Ops << MkId(VMap[Call->getArgOperand(0)])
4452 << MkId(VMap[Call->getArgOperand(1)]) << MkNum(MemoryAccess)
4453 << MkNum(static_cast<uint32_t>(Alignment));
David Neto22f144c2017-06-12 14:26:21 -04004454
David Neto87846742018-04-11 17:36:22 -04004455 auto *Inst = new SPIRVInstruction(spv::OpCopyMemory, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004456
4457 SPIRVInstList.push_back(Inst);
4458
4459 break;
4460 }
4461
David Neto22f144c2017-06-12 14:26:21 -04004462 // read_image is converted to OpSampledImage and OpImageSampleExplicitLod.
4463 // Additionally, OpTypeSampledImage is generated.
4464 if (Callee->getName().equals(
4465 "_Z11read_imagef14ocl_image2d_ro11ocl_samplerDv2_f") ||
4466 Callee->getName().equals(
4467 "_Z11read_imagef14ocl_image3d_ro11ocl_samplerDv4_f")) {
4468 //
4469 // Generate OpSampledImage.
4470 //
4471 // Ops[0] = Result Type ID
4472 // Ops[1] = Image ID
4473 // Ops[2] = Sampler ID
4474 //
4475 SPIRVOperandList Ops;
4476
4477 Value *Image = Call->getArgOperand(0);
4478 Value *Sampler = Call->getArgOperand(1);
4479 Value *Coordinate = Call->getArgOperand(2);
4480
4481 TypeMapType &OpImageTypeMap = getImageTypeMap();
4482 Type *ImageTy = Image->getType()->getPointerElementType();
4483 uint32_t ImageTyID = OpImageTypeMap[ImageTy];
David Neto22f144c2017-06-12 14:26:21 -04004484 uint32_t ImageID = VMap[Image];
David Neto22f144c2017-06-12 14:26:21 -04004485 uint32_t SamplerID = VMap[Sampler];
David Neto257c3892018-04-11 13:19:45 -04004486
4487 Ops << MkId(ImageTyID) << MkId(ImageID) << MkId(SamplerID);
David Neto22f144c2017-06-12 14:26:21 -04004488
4489 uint32_t SampledImageID = nextID;
4490
David Neto87846742018-04-11 17:36:22 -04004491 auto *Inst = new SPIRVInstruction(spv::OpSampledImage, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004492 SPIRVInstList.push_back(Inst);
4493
4494 //
4495 // Generate OpImageSampleExplicitLod.
4496 //
4497 // Ops[0] = Result Type ID
4498 // Ops[1] = Sampled Image ID
4499 // Ops[2] = Coordinate ID
4500 // Ops[3] = Image Operands Type ID
4501 // Ops[4] ... Ops[n] = Operands ID
4502 //
4503 Ops.clear();
4504
David Neto257c3892018-04-11 13:19:45 -04004505 Ops << MkId(lookupType(Call->getType())) << MkId(SampledImageID)
4506 << MkId(VMap[Coordinate]) << MkNum(spv::ImageOperandsLodMask);
David Neto22f144c2017-06-12 14:26:21 -04004507
4508 Constant *CstFP0 = ConstantFP::get(Context, APFloat(0.0f));
David Neto257c3892018-04-11 13:19:45 -04004509 Ops << MkId(VMap[CstFP0]);
David Neto22f144c2017-06-12 14:26:21 -04004510
4511 VMap[&I] = nextID;
4512
David Neto87846742018-04-11 17:36:22 -04004513 Inst = new SPIRVInstruction(spv::OpImageSampleExplicitLod, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004514 SPIRVInstList.push_back(Inst);
4515 break;
4516 }
4517
4518 // write_imagef is mapped to OpImageWrite.
4519 if (Callee->getName().equals(
4520 "_Z12write_imagef14ocl_image2d_woDv2_iDv4_f") ||
4521 Callee->getName().equals(
4522 "_Z12write_imagef14ocl_image3d_woDv4_iDv4_f")) {
4523 //
4524 // Generate OpImageWrite.
4525 //
4526 // Ops[0] = Image ID
4527 // Ops[1] = Coordinate ID
4528 // Ops[2] = Texel ID
4529 // Ops[3] = (Optional) Image Operands Type (Literal Number)
4530 // Ops[4] ... Ops[n] = (Optional) Operands ID
4531 //
4532 SPIRVOperandList Ops;
4533
4534 Value *Image = Call->getArgOperand(0);
4535 Value *Coordinate = Call->getArgOperand(1);
4536 Value *Texel = Call->getArgOperand(2);
4537
4538 uint32_t ImageID = VMap[Image];
David Neto22f144c2017-06-12 14:26:21 -04004539 uint32_t CoordinateID = VMap[Coordinate];
David Neto22f144c2017-06-12 14:26:21 -04004540 uint32_t TexelID = VMap[Texel];
David Neto257c3892018-04-11 13:19:45 -04004541 Ops << MkId(ImageID) << MkId(CoordinateID) << MkId(TexelID);
David Neto22f144c2017-06-12 14:26:21 -04004542
David Neto87846742018-04-11 17:36:22 -04004543 auto *Inst = new SPIRVInstruction(spv::OpImageWrite, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004544 SPIRVInstList.push_back(Inst);
4545 break;
4546 }
4547
David Neto5c22a252018-03-15 16:07:41 -04004548 // get_image_width is mapped to OpImageQuerySize
4549 if (Callee->getName().equals("_Z15get_image_width14ocl_image2d_ro") ||
4550 Callee->getName().equals("_Z15get_image_width14ocl_image2d_wo") ||
4551 Callee->getName().equals("_Z16get_image_height14ocl_image2d_ro") ||
4552 Callee->getName().equals("_Z16get_image_height14ocl_image2d_wo")) {
4553 //
4554 // Generate OpImageQuerySize, then pull out the right component.
4555 // Assume 2D image for now.
4556 //
4557 // Ops[0] = Image ID
4558 //
4559 // %sizes = OpImageQuerySizes %uint2 %im
4560 // %result = OpCompositeExtract %uint %sizes 0-or-1
4561 SPIRVOperandList Ops;
4562
4563 // Implement:
4564 // %sizes = OpImageQuerySizes %uint2 %im
4565 uint32_t SizesTypeID =
4566 TypeMap[VectorType::get(Type::getInt32Ty(Context), 2)];
David Neto5c22a252018-03-15 16:07:41 -04004567 Value *Image = Call->getArgOperand(0);
4568 uint32_t ImageID = VMap[Image];
David Neto257c3892018-04-11 13:19:45 -04004569 Ops << MkId(SizesTypeID) << MkId(ImageID);
David Neto5c22a252018-03-15 16:07:41 -04004570
4571 uint32_t SizesID = nextID++;
David Neto87846742018-04-11 17:36:22 -04004572 auto *QueryInst =
4573 new SPIRVInstruction(spv::OpImageQuerySize, SizesID, Ops);
David Neto5c22a252018-03-15 16:07:41 -04004574 SPIRVInstList.push_back(QueryInst);
4575
4576 // Reset value map entry since we generated an intermediate instruction.
4577 VMap[&I] = nextID;
4578
4579 // Implement:
4580 // %result = OpCompositeExtract %uint %sizes 0-or-1
4581 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004582 Ops << MkId(TypeMap[I.getType()]) << MkId(SizesID);
David Neto5c22a252018-03-15 16:07:41 -04004583
4584 uint32_t component = Callee->getName().contains("height") ? 1 : 0;
David Neto257c3892018-04-11 13:19:45 -04004585 Ops << MkNum(component);
David Neto5c22a252018-03-15 16:07:41 -04004586
David Neto87846742018-04-11 17:36:22 -04004587 auto *Inst = new SPIRVInstruction(spv::OpCompositeExtract, nextID++, Ops);
David Neto5c22a252018-03-15 16:07:41 -04004588 SPIRVInstList.push_back(Inst);
4589 break;
4590 }
4591
David Neto22f144c2017-06-12 14:26:21 -04004592 // Call instrucion is deferred because it needs function's ID. Record
4593 // slot's location on SPIRVInstructionList.
4594 DeferredInsts.push_back(
4595 std::make_tuple(&I, --SPIRVInstList.end(), nextID++));
4596
David Neto3fbb4072017-10-16 11:28:14 -04004597 // Check whether the implementation of this call uses an extended
4598 // instruction plus one more value-producing instruction. If so, then
4599 // reserve the id for the extra value-producing slot.
4600 glsl::ExtInst EInst = getIndirectExtInstEnum(Callee->getName());
4601 if (EInst != kGlslExtInstBad) {
4602 // Reserve a spot for the extra value.
David Neto4d02a532017-09-17 12:57:44 -04004603 // Increase nextID.
David Neto22f144c2017-06-12 14:26:21 -04004604 VMap[&I] = nextID;
4605 nextID++;
4606 }
4607 break;
4608 }
4609 case Instruction::Ret: {
4610 unsigned NumOps = I.getNumOperands();
4611 if (NumOps == 0) {
4612 //
4613 // Generate OpReturn.
4614 //
David Neto87846742018-04-11 17:36:22 -04004615 SPIRVInstList.push_back(new SPIRVInstruction(spv::OpReturn, {}));
David Neto22f144c2017-06-12 14:26:21 -04004616 } else {
4617 //
4618 // Generate OpReturnValue.
4619 //
4620
4621 // Ops[0] = Return Value ID
4622 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04004623
4624 Ops << MkId(VMap[I.getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04004625
David Neto87846742018-04-11 17:36:22 -04004626 auto *Inst = new SPIRVInstruction(spv::OpReturnValue, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004627 SPIRVInstList.push_back(Inst);
4628 break;
4629 }
4630 break;
4631 }
4632 }
4633}
4634
4635void SPIRVProducerPass::GenerateFuncEpilogue() {
4636 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
4637
4638 //
4639 // Generate OpFunctionEnd
4640 //
4641
David Neto87846742018-04-11 17:36:22 -04004642 auto *Inst = new SPIRVInstruction(spv::OpFunctionEnd, {});
David Neto22f144c2017-06-12 14:26:21 -04004643 SPIRVInstList.push_back(Inst);
4644}
4645
4646bool SPIRVProducerPass::is4xi8vec(Type *Ty) const {
alan-bakerb39c8262019-03-08 14:03:37 -05004647 // Don't specialize <4 x i8> if i8 is generally supported.
4648 if (clspv::Option::Int8Support())
4649 return false;
4650
David Neto22f144c2017-06-12 14:26:21 -04004651 LLVMContext &Context = Ty->getContext();
4652 if (Ty->isVectorTy()) {
4653 if (Ty->getVectorElementType() == Type::getInt8Ty(Context) &&
4654 Ty->getVectorNumElements() == 4) {
4655 return true;
4656 }
4657 }
4658
4659 return false;
4660}
4661
4662void SPIRVProducerPass::HandleDeferredInstruction() {
4663 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
4664 ValueMapType &VMap = getValueMap();
4665 DeferredInstVecType &DeferredInsts = getDeferredInstVec();
4666
4667 for (auto DeferredInst = DeferredInsts.rbegin();
4668 DeferredInst != DeferredInsts.rend(); ++DeferredInst) {
4669 Value *Inst = std::get<0>(*DeferredInst);
4670 SPIRVInstructionList::iterator InsertPoint = ++std::get<1>(*DeferredInst);
4671 if (InsertPoint != SPIRVInstList.end()) {
4672 while ((*InsertPoint)->getOpcode() == spv::OpPhi) {
4673 ++InsertPoint;
4674 }
4675 }
4676
4677 if (BranchInst *Br = dyn_cast<BranchInst>(Inst)) {
4678 // Check whether basic block, which has this branch instruction, is loop
4679 // header or not. If it is loop header, generate OpLoopMerge and
4680 // OpBranchConditional.
4681 Function *Func = Br->getParent()->getParent();
4682 DominatorTree &DT =
4683 getAnalysis<DominatorTreeWrapperPass>(*Func).getDomTree();
4684 const LoopInfo &LI =
4685 getAnalysis<LoopInfoWrapperPass>(*Func).getLoopInfo();
4686
4687 BasicBlock *BrBB = Br->getParent();
alan-baker49531082019-06-05 17:30:56 -04004688 Loop *L = LI.getLoopFor(BrBB);
David Neto22f144c2017-06-12 14:26:21 -04004689 if (LI.isLoopHeader(BrBB)) {
4690 Value *ContinueBB = nullptr;
4691 Value *MergeBB = nullptr;
4692
David Neto22f144c2017-06-12 14:26:21 -04004693 MergeBB = L->getExitBlock();
4694 if (!MergeBB) {
4695 // StructurizeCFG pass converts CFG into triangle shape and the cfg
4696 // has regions with single entry/exit. As a result, loop should not
4697 // have multiple exits.
4698 llvm_unreachable("Loop has multiple exits???");
4699 }
4700
4701 if (L->isLoopLatch(BrBB)) {
4702 ContinueBB = BrBB;
4703 } else {
4704 // From SPIR-V spec 2.11, Continue Target must dominate that back-edge
4705 // block.
4706 BasicBlock *Header = L->getHeader();
4707 BasicBlock *Latch = L->getLoopLatch();
4708 for (BasicBlock *BB : L->blocks()) {
4709 if (BB == Header) {
4710 continue;
4711 }
4712
4713 // Check whether block dominates block with back-edge.
4714 if (DT.dominates(BB, Latch)) {
4715 ContinueBB = BB;
4716 }
4717 }
4718
4719 if (!ContinueBB) {
4720 llvm_unreachable("Wrong continue block from loop");
4721 }
4722 }
4723
4724 //
4725 // Generate OpLoopMerge.
4726 //
4727 // Ops[0] = Merge Block ID
4728 // Ops[1] = Continue Target ID
4729 // Ops[2] = Selection Control
4730 SPIRVOperandList Ops;
4731
4732 // StructurizeCFG pass already manipulated CFG. Just use false block of
4733 // branch instruction as merge block.
4734 uint32_t MergeBBID = VMap[MergeBB];
David Neto22f144c2017-06-12 14:26:21 -04004735 uint32_t ContinueBBID = VMap[ContinueBB];
David Neto257c3892018-04-11 13:19:45 -04004736 Ops << MkId(MergeBBID) << MkId(ContinueBBID)
4737 << MkNum(spv::SelectionControlMaskNone);
David Neto22f144c2017-06-12 14:26:21 -04004738
David Neto87846742018-04-11 17:36:22 -04004739 auto *MergeInst = new SPIRVInstruction(spv::OpLoopMerge, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004740 SPIRVInstList.insert(InsertPoint, MergeInst);
4741
4742 } else if (Br->isConditional()) {
alan-baker49531082019-06-05 17:30:56 -04004743 // Generate a selection merge unless this is a back-edge block.
4744 bool HasBackedge = false;
4745 while (L && !HasBackedge) {
4746 if (L->isLoopLatch(BrBB)) {
4747 HasBackedge = true;
David Neto22f144c2017-06-12 14:26:21 -04004748 }
alan-baker49531082019-06-05 17:30:56 -04004749 L = L->getParentLoop();
David Neto22f144c2017-06-12 14:26:21 -04004750 }
alan-baker49531082019-06-05 17:30:56 -04004751 if (!HasBackedge) {
David Neto22f144c2017-06-12 14:26:21 -04004752 //
4753 // Generate OpSelectionMerge.
4754 //
4755 // Ops[0] = Merge Block ID
4756 // Ops[1] = Selection Control
4757 SPIRVOperandList Ops;
4758
4759 // StructurizeCFG pass already manipulated CFG. Just use false block
4760 // of branch instruction as merge block.
4761 uint32_t MergeBBID = VMap[Br->getSuccessor(1)];
David Neto257c3892018-04-11 13:19:45 -04004762 Ops << MkId(MergeBBID) << MkNum(spv::SelectionControlMaskNone);
David Neto22f144c2017-06-12 14:26:21 -04004763
David Neto87846742018-04-11 17:36:22 -04004764 auto *MergeInst = new SPIRVInstruction(spv::OpSelectionMerge, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004765 SPIRVInstList.insert(InsertPoint, MergeInst);
4766 }
4767 }
4768
4769 if (Br->isConditional()) {
4770 //
4771 // Generate OpBranchConditional.
4772 //
4773 // Ops[0] = Condition ID
4774 // Ops[1] = True Label ID
4775 // Ops[2] = False Label ID
4776 // Ops[3] ... Ops[n] = Branch weights (Literal Number)
4777 SPIRVOperandList Ops;
4778
4779 uint32_t CondID = VMap[Br->getCondition()];
David Neto22f144c2017-06-12 14:26:21 -04004780 uint32_t TrueBBID = VMap[Br->getSuccessor(0)];
David Neto22f144c2017-06-12 14:26:21 -04004781 uint32_t FalseBBID = VMap[Br->getSuccessor(1)];
David Neto257c3892018-04-11 13:19:45 -04004782
4783 Ops << MkId(CondID) << MkId(TrueBBID) << MkId(FalseBBID);
David Neto22f144c2017-06-12 14:26:21 -04004784
David Neto87846742018-04-11 17:36:22 -04004785 auto *BrInst = new SPIRVInstruction(spv::OpBranchConditional, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004786 SPIRVInstList.insert(InsertPoint, BrInst);
4787 } else {
4788 //
4789 // Generate OpBranch.
4790 //
4791 // Ops[0] = Target Label ID
4792 SPIRVOperandList Ops;
4793
4794 uint32_t TargetID = VMap[Br->getSuccessor(0)];
David Neto257c3892018-04-11 13:19:45 -04004795 Ops << MkId(TargetID);
David Neto22f144c2017-06-12 14:26:21 -04004796
David Neto87846742018-04-11 17:36:22 -04004797 SPIRVInstList.insert(InsertPoint,
4798 new SPIRVInstruction(spv::OpBranch, Ops));
David Neto22f144c2017-06-12 14:26:21 -04004799 }
4800 } else if (PHINode *PHI = dyn_cast<PHINode>(Inst)) {
alan-baker5b86ed72019-02-15 08:26:50 -05004801 if (PHI->getType()->isPointerTy()) {
4802 // OpPhi on pointers requires variable pointers.
4803 setVariablePointersCapabilities(
4804 PHI->getType()->getPointerAddressSpace());
4805 if (!hasVariablePointers() && !selectFromSameObject(PHI)) {
4806 setVariablePointers(true);
4807 }
4808 }
4809
David Neto22f144c2017-06-12 14:26:21 -04004810 //
4811 // Generate OpPhi.
4812 //
4813 // Ops[0] = Result Type ID
4814 // Ops[1] ... Ops[n] = (Variable ID, Parent ID) pairs
4815 SPIRVOperandList Ops;
4816
David Neto257c3892018-04-11 13:19:45 -04004817 Ops << MkId(lookupType(PHI->getType()));
David Neto22f144c2017-06-12 14:26:21 -04004818
David Neto22f144c2017-06-12 14:26:21 -04004819 for (unsigned i = 0; i < PHI->getNumIncomingValues(); i++) {
4820 uint32_t VarID = VMap[PHI->getIncomingValue(i)];
David Neto22f144c2017-06-12 14:26:21 -04004821 uint32_t ParentID = VMap[PHI->getIncomingBlock(i)];
David Neto257c3892018-04-11 13:19:45 -04004822 Ops << MkId(VarID) << MkId(ParentID);
David Neto22f144c2017-06-12 14:26:21 -04004823 }
4824
4825 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04004826 InsertPoint,
4827 new SPIRVInstruction(spv::OpPhi, std::get<2>(*DeferredInst), Ops));
David Neto22f144c2017-06-12 14:26:21 -04004828 } else if (CallInst *Call = dyn_cast<CallInst>(Inst)) {
4829 Function *Callee = Call->getCalledFunction();
David Neto3fbb4072017-10-16 11:28:14 -04004830 auto callee_name = Callee->getName();
4831 glsl::ExtInst EInst = getDirectOrIndirectExtInstEnum(callee_name);
David Neto22f144c2017-06-12 14:26:21 -04004832
4833 if (EInst) {
4834 uint32_t &ExtInstImportID = getOpExtInstImportID();
4835
4836 //
4837 // Generate OpExtInst.
4838 //
4839
4840 // Ops[0] = Result Type ID
4841 // Ops[1] = Set ID (OpExtInstImport ID)
4842 // Ops[2] = Instruction Number (Literal Number)
4843 // Ops[3] ... Ops[n] = Operand 1, ... , Operand n
4844 SPIRVOperandList Ops;
4845
David Neto862b7d82018-06-14 18:48:37 -04004846 Ops << MkId(lookupType(Call->getType())) << MkId(ExtInstImportID)
4847 << MkNum(EInst);
David Neto22f144c2017-06-12 14:26:21 -04004848
David Neto22f144c2017-06-12 14:26:21 -04004849 FunctionType *CalleeFTy = cast<FunctionType>(Call->getFunctionType());
4850 for (unsigned i = 0; i < CalleeFTy->getNumParams(); i++) {
David Neto257c3892018-04-11 13:19:45 -04004851 Ops << MkId(VMap[Call->getOperand(i)]);
David Neto22f144c2017-06-12 14:26:21 -04004852 }
4853
David Neto87846742018-04-11 17:36:22 -04004854 auto *ExtInst = new SPIRVInstruction(spv::OpExtInst,
4855 std::get<2>(*DeferredInst), Ops);
David Neto22f144c2017-06-12 14:26:21 -04004856 SPIRVInstList.insert(InsertPoint, ExtInst);
4857
David Neto3fbb4072017-10-16 11:28:14 -04004858 const auto IndirectExtInst = getIndirectExtInstEnum(callee_name);
4859 if (IndirectExtInst != kGlslExtInstBad) {
4860 // Generate one more instruction that uses the result of the extended
4861 // instruction. Its result id is one more than the id of the
4862 // extended instruction.
David Neto22f144c2017-06-12 14:26:21 -04004863 LLVMContext &Context =
4864 Call->getParent()->getParent()->getParent()->getContext();
David Neto22f144c2017-06-12 14:26:21 -04004865
David Neto3fbb4072017-10-16 11:28:14 -04004866 auto generate_extra_inst = [this, &Context, &Call, &DeferredInst,
4867 &VMap, &SPIRVInstList, &InsertPoint](
4868 spv::Op opcode, Constant *constant) {
4869 //
4870 // Generate instruction like:
4871 // result = opcode constant <extinst-result>
4872 //
4873 // Ops[0] = Result Type ID
4874 // Ops[1] = Operand 0 ;; the constant, suitably splatted
4875 // Ops[2] = Operand 1 ;; the result of the extended instruction
4876 SPIRVOperandList Ops;
David Neto22f144c2017-06-12 14:26:21 -04004877
David Neto3fbb4072017-10-16 11:28:14 -04004878 Type *resultTy = Call->getType();
David Neto257c3892018-04-11 13:19:45 -04004879 Ops << MkId(lookupType(resultTy));
David Neto3fbb4072017-10-16 11:28:14 -04004880
4881 if (auto *vectorTy = dyn_cast<VectorType>(resultTy)) {
4882 constant = ConstantVector::getSplat(
4883 static_cast<unsigned>(vectorTy->getNumElements()), constant);
4884 }
David Neto257c3892018-04-11 13:19:45 -04004885 Ops << MkId(VMap[constant]) << MkId(std::get<2>(*DeferredInst));
David Neto3fbb4072017-10-16 11:28:14 -04004886
4887 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04004888 InsertPoint, new SPIRVInstruction(
4889 opcode, std::get<2>(*DeferredInst) + 1, Ops));
David Neto3fbb4072017-10-16 11:28:14 -04004890 };
4891
4892 switch (IndirectExtInst) {
4893 case glsl::ExtInstFindUMsb: // Implementing clz
4894 generate_extra_inst(
4895 spv::OpISub, ConstantInt::get(Type::getInt32Ty(Context), 31));
4896 break;
4897 case glsl::ExtInstAcos: // Implementing acospi
4898 case glsl::ExtInstAsin: // Implementing asinpi
Kévin Petiteb9f90a2018-09-29 12:29:34 +01004899 case glsl::ExtInstAtan: // Implementing atanpi
David Neto3fbb4072017-10-16 11:28:14 -04004900 case glsl::ExtInstAtan2: // Implementing atan2pi
4901 generate_extra_inst(
4902 spv::OpFMul,
4903 ConstantFP::get(Type::getFloatTy(Context), kOneOverPi));
4904 break;
4905
4906 default:
4907 assert(false && "internally inconsistent");
David Neto4d02a532017-09-17 12:57:44 -04004908 }
David Neto22f144c2017-06-12 14:26:21 -04004909 }
David Neto3fbb4072017-10-16 11:28:14 -04004910
alan-bakerb39c8262019-03-08 14:03:37 -05004911 } else if (callee_name.startswith("_Z8popcount")) {
David Neto22f144c2017-06-12 14:26:21 -04004912 //
4913 // Generate OpBitCount
4914 //
4915 // Ops[0] = Result Type ID
4916 // Ops[1] = Base ID
David Neto257c3892018-04-11 13:19:45 -04004917 SPIRVOperandList Ops;
4918 Ops << MkId(lookupType(Call->getType()))
4919 << MkId(VMap[Call->getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04004920
4921 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04004922 InsertPoint, new SPIRVInstruction(spv::OpBitCount,
David Neto22f144c2017-06-12 14:26:21 -04004923 std::get<2>(*DeferredInst), Ops));
David Netoab03f432017-11-03 17:00:44 -04004924
David Neto862b7d82018-06-14 18:48:37 -04004925 } else if (callee_name.startswith(kCompositeConstructFunctionPrefix)) {
David Netoab03f432017-11-03 17:00:44 -04004926
4927 // Generate an OpCompositeConstruct
4928 SPIRVOperandList Ops;
4929
4930 // The result type.
David Neto257c3892018-04-11 13:19:45 -04004931 Ops << MkId(lookupType(Call->getType()));
David Netoab03f432017-11-03 17:00:44 -04004932
4933 for (Use &use : Call->arg_operands()) {
David Neto257c3892018-04-11 13:19:45 -04004934 Ops << MkId(VMap[use.get()]);
David Netoab03f432017-11-03 17:00:44 -04004935 }
4936
4937 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04004938 InsertPoint, new SPIRVInstruction(spv::OpCompositeConstruct,
4939 std::get<2>(*DeferredInst), Ops));
David Netoab03f432017-11-03 17:00:44 -04004940
Alan Baker202c8c72018-08-13 13:47:44 -04004941 } else if (callee_name.startswith(clspv::ResourceAccessorFunction())) {
4942
4943 // We have already mapped the call's result value to an ID.
4944 // Don't generate any code now.
4945
4946 } else if (callee_name.startswith(clspv::WorkgroupAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04004947
4948 // We have already mapped the call's result value to an ID.
4949 // Don't generate any code now.
4950
David Neto22f144c2017-06-12 14:26:21 -04004951 } else {
alan-baker5b86ed72019-02-15 08:26:50 -05004952 if (Call->getType()->isPointerTy()) {
4953 // Functions returning pointers require variable pointers.
4954 setVariablePointersCapabilities(
4955 Call->getType()->getPointerAddressSpace());
4956 }
4957
David Neto22f144c2017-06-12 14:26:21 -04004958 //
4959 // Generate OpFunctionCall.
4960 //
4961
4962 // Ops[0] = Result Type ID
4963 // Ops[1] = Callee Function ID
4964 // Ops[2] ... Ops[n] = Argument 0, ... , Argument n
4965 SPIRVOperandList Ops;
4966
David Neto862b7d82018-06-14 18:48:37 -04004967 Ops << MkId(lookupType(Call->getType()));
David Neto22f144c2017-06-12 14:26:21 -04004968
4969 uint32_t CalleeID = VMap[Callee];
David Neto43568eb2017-10-13 18:25:25 -04004970 if (CalleeID == 0) {
4971 errs() << "Can't translate function call. Missing builtin? "
David Neto862b7d82018-06-14 18:48:37 -04004972 << callee_name << " in: " << *Call << "\n";
David Neto43568eb2017-10-13 18:25:25 -04004973 // TODO(dneto): Can we error out? Enabling this llvm_unreachable
4974 // causes an infinite loop. Instead, go ahead and generate
4975 // the bad function call. A validator will catch the 0-Id.
4976 // llvm_unreachable("Can't translate function call");
4977 }
David Neto22f144c2017-06-12 14:26:21 -04004978
David Neto257c3892018-04-11 13:19:45 -04004979 Ops << MkId(CalleeID);
David Neto22f144c2017-06-12 14:26:21 -04004980
David Neto22f144c2017-06-12 14:26:21 -04004981 FunctionType *CalleeFTy = cast<FunctionType>(Call->getFunctionType());
4982 for (unsigned i = 0; i < CalleeFTy->getNumParams(); i++) {
alan-baker5b86ed72019-02-15 08:26:50 -05004983 auto *operand = Call->getOperand(i);
4984 if (operand->getType()->isPointerTy()) {
4985 auto sc =
4986 GetStorageClass(operand->getType()->getPointerAddressSpace());
4987 if (sc == spv::StorageClassStorageBuffer) {
4988 // Passing SSBO by reference requires variable pointers storage
4989 // buffer.
4990 setVariablePointersStorageBuffer(true);
4991 } else if (sc == spv::StorageClassWorkgroup) {
4992 // Workgroup references require variable pointers if they are not
4993 // memory object declarations.
4994 if (auto *operand_call = dyn_cast<CallInst>(operand)) {
4995 // Workgroup accessor represents a variable reference.
4996 if (!operand_call->getCalledFunction()->getName().startswith(
4997 clspv::WorkgroupAccessorFunction()))
4998 setVariablePointers(true);
4999 } else {
5000 // Arguments are function parameters.
5001 if (!isa<Argument>(operand))
5002 setVariablePointers(true);
5003 }
5004 }
5005 }
5006 Ops << MkId(VMap[operand]);
David Neto22f144c2017-06-12 14:26:21 -04005007 }
5008
David Neto87846742018-04-11 17:36:22 -04005009 auto *CallInst = new SPIRVInstruction(spv::OpFunctionCall,
5010 std::get<2>(*DeferredInst), Ops);
David Neto22f144c2017-06-12 14:26:21 -04005011 SPIRVInstList.insert(InsertPoint, CallInst);
5012 }
5013 }
5014 }
5015}
5016
David Neto1a1a0582017-07-07 12:01:44 -04005017void SPIRVProducerPass::HandleDeferredDecorations(const DataLayout &DL) {
Alan Baker202c8c72018-08-13 13:47:44 -04005018 if (getTypesNeedingArrayStride().empty() && LocalArgSpecIds.empty()) {
David Neto1a1a0582017-07-07 12:01:44 -04005019 return;
David Netoc6f3ab22018-04-06 18:02:31 -04005020 }
David Neto1a1a0582017-07-07 12:01:44 -04005021
5022 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
David Neto1a1a0582017-07-07 12:01:44 -04005023
5024 // Find an iterator pointing just past the last decoration.
5025 bool seen_decorations = false;
5026 auto DecoInsertPoint =
5027 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
5028 [&seen_decorations](SPIRVInstruction *Inst) -> bool {
5029 const bool is_decoration =
5030 Inst->getOpcode() == spv::OpDecorate ||
5031 Inst->getOpcode() == spv::OpMemberDecorate;
5032 if (is_decoration) {
5033 seen_decorations = true;
5034 return false;
5035 } else {
5036 return seen_decorations;
5037 }
5038 });
5039
David Netoc6f3ab22018-04-06 18:02:31 -04005040 // Insert ArrayStride decorations on pointer types, due to OpPtrAccessChain
5041 // instructions we generated earlier.
David Neto85082642018-03-24 06:55:20 -07005042 for (auto *type : getTypesNeedingArrayStride()) {
5043 Type *elemTy = nullptr;
5044 if (auto *ptrTy = dyn_cast<PointerType>(type)) {
5045 elemTy = ptrTy->getElementType();
alan-bakerb6b09dc2018-11-08 16:59:28 -05005046 } else if (auto *arrayTy = dyn_cast<ArrayType>(type)) {
David Neto85082642018-03-24 06:55:20 -07005047 elemTy = arrayTy->getArrayElementType();
alan-bakerb6b09dc2018-11-08 16:59:28 -05005048 } else if (auto *seqTy = dyn_cast<SequentialType>(type)) {
David Neto85082642018-03-24 06:55:20 -07005049 elemTy = seqTy->getSequentialElementType();
5050 } else {
5051 errs() << "Unhandled strided type " << *type << "\n";
5052 llvm_unreachable("Unhandled strided type");
5053 }
David Neto1a1a0582017-07-07 12:01:44 -04005054
5055 // Ops[0] = Target ID
5056 // Ops[1] = Decoration (ArrayStride)
5057 // Ops[2] = Stride number (Literal Number)
5058 SPIRVOperandList Ops;
5059
David Neto85082642018-03-24 06:55:20 -07005060 // Same as DL.getIndexedOffsetInType( elemTy, { 1 } );
Alan Bakerfcda9482018-10-02 17:09:59 -04005061 const uint32_t stride = static_cast<uint32_t>(GetTypeAllocSize(elemTy, DL));
David Neto257c3892018-04-11 13:19:45 -04005062
5063 Ops << MkId(lookupType(type)) << MkNum(spv::DecorationArrayStride)
5064 << MkNum(stride);
David Neto1a1a0582017-07-07 12:01:44 -04005065
David Neto87846742018-04-11 17:36:22 -04005066 auto *DecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
David Neto1a1a0582017-07-07 12:01:44 -04005067 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
5068 }
David Netoc6f3ab22018-04-06 18:02:31 -04005069
5070 // Emit SpecId decorations targeting the array size value.
Alan Baker202c8c72018-08-13 13:47:44 -04005071 for (auto spec_id = clspv::FirstLocalSpecId(); spec_id < max_local_spec_id_;
5072 ++spec_id) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05005073 LocalArgInfo &arg_info = LocalSpecIdInfoMap[spec_id];
David Netoc6f3ab22018-04-06 18:02:31 -04005074 SPIRVOperandList Ops;
5075 Ops << MkId(arg_info.array_size_id) << MkNum(spv::DecorationSpecId)
5076 << MkNum(arg_info.spec_id);
5077 SPIRVInstList.insert(DecoInsertPoint,
David Neto87846742018-04-11 17:36:22 -04005078 new SPIRVInstruction(spv::OpDecorate, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04005079 }
David Neto1a1a0582017-07-07 12:01:44 -04005080}
5081
David Neto22f144c2017-06-12 14:26:21 -04005082glsl::ExtInst SPIRVProducerPass::getExtInstEnum(StringRef Name) {
5083 return StringSwitch<glsl::ExtInst>(Name)
alan-bakerb39c8262019-03-08 14:03:37 -05005084 .Case("_Z3absc", glsl::ExtInst::ExtInstSAbs)
5085 .Case("_Z3absDv2_c", glsl::ExtInst::ExtInstSAbs)
5086 .Case("_Z3absDv3_c", glsl::ExtInst::ExtInstSAbs)
5087 .Case("_Z3absDv4_c", glsl::ExtInst::ExtInstSAbs)
Kévin Petit2444e9b2018-11-09 14:14:37 +00005088 .Case("_Z3abss", glsl::ExtInst::ExtInstSAbs)
5089 .Case("_Z3absDv2_s", glsl::ExtInst::ExtInstSAbs)
5090 .Case("_Z3absDv3_s", glsl::ExtInst::ExtInstSAbs)
5091 .Case("_Z3absDv4_s", glsl::ExtInst::ExtInstSAbs)
David Neto22f144c2017-06-12 14:26:21 -04005092 .Case("_Z3absi", glsl::ExtInst::ExtInstSAbs)
5093 .Case("_Z3absDv2_i", glsl::ExtInst::ExtInstSAbs)
5094 .Case("_Z3absDv3_i", glsl::ExtInst::ExtInstSAbs)
5095 .Case("_Z3absDv4_i", glsl::ExtInst::ExtInstSAbs)
Kévin Petit2444e9b2018-11-09 14:14:37 +00005096 .Case("_Z3absl", glsl::ExtInst::ExtInstSAbs)
5097 .Case("_Z3absDv2_l", glsl::ExtInst::ExtInstSAbs)
5098 .Case("_Z3absDv3_l", glsl::ExtInst::ExtInstSAbs)
5099 .Case("_Z3absDv4_l", glsl::ExtInst::ExtInstSAbs)
alan-bakerb39c8262019-03-08 14:03:37 -05005100 .Case("_Z5clampccc", glsl::ExtInst::ExtInstSClamp)
5101 .Case("_Z5clampDv2_cS_S_", glsl::ExtInst::ExtInstSClamp)
5102 .Case("_Z5clampDv3_cS_S_", glsl::ExtInst::ExtInstSClamp)
5103 .Case("_Z5clampDv4_cS_S_", glsl::ExtInst::ExtInstSClamp)
5104 .Case("_Z5clamphhh", glsl::ExtInst::ExtInstUClamp)
5105 .Case("_Z5clampDv2_hS_S_", glsl::ExtInst::ExtInstUClamp)
5106 .Case("_Z5clampDv3_hS_S_", glsl::ExtInst::ExtInstUClamp)
5107 .Case("_Z5clampDv4_hS_S_", glsl::ExtInst::ExtInstUClamp)
Kévin Petit495255d2019-03-06 13:56:48 +00005108 .Case("_Z5clampsss", glsl::ExtInst::ExtInstSClamp)
5109 .Case("_Z5clampDv2_sS_S_", glsl::ExtInst::ExtInstSClamp)
5110 .Case("_Z5clampDv3_sS_S_", glsl::ExtInst::ExtInstSClamp)
5111 .Case("_Z5clampDv4_sS_S_", glsl::ExtInst::ExtInstSClamp)
5112 .Case("_Z5clampttt", glsl::ExtInst::ExtInstUClamp)
5113 .Case("_Z5clampDv2_tS_S_", glsl::ExtInst::ExtInstUClamp)
5114 .Case("_Z5clampDv3_tS_S_", glsl::ExtInst::ExtInstUClamp)
5115 .Case("_Z5clampDv4_tS_S_", glsl::ExtInst::ExtInstUClamp)
David Neto22f144c2017-06-12 14:26:21 -04005116 .Case("_Z5clampiii", glsl::ExtInst::ExtInstSClamp)
5117 .Case("_Z5clampDv2_iS_S_", glsl::ExtInst::ExtInstSClamp)
5118 .Case("_Z5clampDv3_iS_S_", glsl::ExtInst::ExtInstSClamp)
5119 .Case("_Z5clampDv4_iS_S_", glsl::ExtInst::ExtInstSClamp)
5120 .Case("_Z5clampjjj", glsl::ExtInst::ExtInstUClamp)
5121 .Case("_Z5clampDv2_jS_S_", glsl::ExtInst::ExtInstUClamp)
5122 .Case("_Z5clampDv3_jS_S_", glsl::ExtInst::ExtInstUClamp)
5123 .Case("_Z5clampDv4_jS_S_", glsl::ExtInst::ExtInstUClamp)
Kévin Petit495255d2019-03-06 13:56:48 +00005124 .Case("_Z5clamplll", glsl::ExtInst::ExtInstSClamp)
5125 .Case("_Z5clampDv2_lS_S_", glsl::ExtInst::ExtInstSClamp)
5126 .Case("_Z5clampDv3_lS_S_", glsl::ExtInst::ExtInstSClamp)
5127 .Case("_Z5clampDv4_lS_S_", glsl::ExtInst::ExtInstSClamp)
5128 .Case("_Z5clampmmm", glsl::ExtInst::ExtInstUClamp)
5129 .Case("_Z5clampDv2_mS_S_", glsl::ExtInst::ExtInstUClamp)
5130 .Case("_Z5clampDv3_mS_S_", glsl::ExtInst::ExtInstUClamp)
5131 .Case("_Z5clampDv4_mS_S_", glsl::ExtInst::ExtInstUClamp)
David Neto22f144c2017-06-12 14:26:21 -04005132 .Case("_Z5clampfff", glsl::ExtInst::ExtInstFClamp)
5133 .Case("_Z5clampDv2_fS_S_", glsl::ExtInst::ExtInstFClamp)
5134 .Case("_Z5clampDv3_fS_S_", glsl::ExtInst::ExtInstFClamp)
5135 .Case("_Z5clampDv4_fS_S_", glsl::ExtInst::ExtInstFClamp)
alan-bakerb39c8262019-03-08 14:03:37 -05005136 .Case("_Z3maxcc", glsl::ExtInst::ExtInstSMax)
5137 .Case("_Z3maxDv2_cS_", glsl::ExtInst::ExtInstSMax)
5138 .Case("_Z3maxDv3_cS_", glsl::ExtInst::ExtInstSMax)
5139 .Case("_Z3maxDv4_cS_", glsl::ExtInst::ExtInstSMax)
5140 .Case("_Z3maxhh", glsl::ExtInst::ExtInstUMax)
5141 .Case("_Z3maxDv2_hS_", glsl::ExtInst::ExtInstUMax)
5142 .Case("_Z3maxDv3_hS_", glsl::ExtInst::ExtInstUMax)
5143 .Case("_Z3maxDv4_hS_", glsl::ExtInst::ExtInstUMax)
Kévin Petit9e1971c2018-11-09 14:17:18 +00005144 .Case("_Z3maxss", glsl::ExtInst::ExtInstSMax)
5145 .Case("_Z3maxDv2_sS_", glsl::ExtInst::ExtInstSMax)
5146 .Case("_Z3maxDv3_sS_", glsl::ExtInst::ExtInstSMax)
5147 .Case("_Z3maxDv4_sS_", glsl::ExtInst::ExtInstSMax)
5148 .Case("_Z3maxtt", glsl::ExtInst::ExtInstUMax)
5149 .Case("_Z3maxDv2_tS_", glsl::ExtInst::ExtInstUMax)
5150 .Case("_Z3maxDv3_tS_", glsl::ExtInst::ExtInstUMax)
5151 .Case("_Z3maxDv4_tS_", glsl::ExtInst::ExtInstUMax)
David Neto22f144c2017-06-12 14:26:21 -04005152 .Case("_Z3maxii", glsl::ExtInst::ExtInstSMax)
5153 .Case("_Z3maxDv2_iS_", glsl::ExtInst::ExtInstSMax)
5154 .Case("_Z3maxDv3_iS_", glsl::ExtInst::ExtInstSMax)
5155 .Case("_Z3maxDv4_iS_", glsl::ExtInst::ExtInstSMax)
5156 .Case("_Z3maxjj", glsl::ExtInst::ExtInstUMax)
5157 .Case("_Z3maxDv2_jS_", glsl::ExtInst::ExtInstUMax)
5158 .Case("_Z3maxDv3_jS_", glsl::ExtInst::ExtInstUMax)
5159 .Case("_Z3maxDv4_jS_", glsl::ExtInst::ExtInstUMax)
Kévin Petit9e1971c2018-11-09 14:17:18 +00005160 .Case("_Z3maxll", glsl::ExtInst::ExtInstSMax)
5161 .Case("_Z3maxDv2_lS_", glsl::ExtInst::ExtInstSMax)
5162 .Case("_Z3maxDv3_lS_", glsl::ExtInst::ExtInstSMax)
5163 .Case("_Z3maxDv4_lS_", glsl::ExtInst::ExtInstSMax)
5164 .Case("_Z3maxmm", glsl::ExtInst::ExtInstUMax)
5165 .Case("_Z3maxDv2_mS_", glsl::ExtInst::ExtInstUMax)
5166 .Case("_Z3maxDv3_mS_", glsl::ExtInst::ExtInstUMax)
5167 .Case("_Z3maxDv4_mS_", glsl::ExtInst::ExtInstUMax)
David Neto22f144c2017-06-12 14:26:21 -04005168 .Case("_Z3maxff", glsl::ExtInst::ExtInstFMax)
5169 .Case("_Z3maxDv2_fS_", glsl::ExtInst::ExtInstFMax)
5170 .Case("_Z3maxDv3_fS_", glsl::ExtInst::ExtInstFMax)
5171 .Case("_Z3maxDv4_fS_", glsl::ExtInst::ExtInstFMax)
5172 .StartsWith("_Z4fmax", glsl::ExtInst::ExtInstFMax)
alan-bakerb39c8262019-03-08 14:03:37 -05005173 .Case("_Z3mincc", glsl::ExtInst::ExtInstSMin)
5174 .Case("_Z3minDv2_cS_", glsl::ExtInst::ExtInstSMin)
5175 .Case("_Z3minDv3_cS_", glsl::ExtInst::ExtInstSMin)
5176 .Case("_Z3minDv4_cS_", glsl::ExtInst::ExtInstSMin)
5177 .Case("_Z3minhh", glsl::ExtInst::ExtInstUMin)
5178 .Case("_Z3minDv2_hS_", glsl::ExtInst::ExtInstUMin)
5179 .Case("_Z3minDv3_hS_", glsl::ExtInst::ExtInstUMin)
5180 .Case("_Z3minDv4_hS_", glsl::ExtInst::ExtInstUMin)
Kévin Petit9e1971c2018-11-09 14:17:18 +00005181 .Case("_Z3minss", glsl::ExtInst::ExtInstSMin)
5182 .Case("_Z3minDv2_sS_", glsl::ExtInst::ExtInstSMin)
5183 .Case("_Z3minDv3_sS_", glsl::ExtInst::ExtInstSMin)
5184 .Case("_Z3minDv4_sS_", glsl::ExtInst::ExtInstSMin)
5185 .Case("_Z3mintt", glsl::ExtInst::ExtInstUMin)
5186 .Case("_Z3minDv2_tS_", glsl::ExtInst::ExtInstUMin)
5187 .Case("_Z3minDv3_tS_", glsl::ExtInst::ExtInstUMin)
5188 .Case("_Z3minDv4_tS_", glsl::ExtInst::ExtInstUMin)
David Neto22f144c2017-06-12 14:26:21 -04005189 .Case("_Z3minii", glsl::ExtInst::ExtInstSMin)
5190 .Case("_Z3minDv2_iS_", glsl::ExtInst::ExtInstSMin)
5191 .Case("_Z3minDv3_iS_", glsl::ExtInst::ExtInstSMin)
5192 .Case("_Z3minDv4_iS_", glsl::ExtInst::ExtInstSMin)
5193 .Case("_Z3minjj", glsl::ExtInst::ExtInstUMin)
5194 .Case("_Z3minDv2_jS_", glsl::ExtInst::ExtInstUMin)
5195 .Case("_Z3minDv3_jS_", glsl::ExtInst::ExtInstUMin)
5196 .Case("_Z3minDv4_jS_", glsl::ExtInst::ExtInstUMin)
Kévin Petit9e1971c2018-11-09 14:17:18 +00005197 .Case("_Z3minll", glsl::ExtInst::ExtInstSMin)
5198 .Case("_Z3minDv2_lS_", glsl::ExtInst::ExtInstSMin)
5199 .Case("_Z3minDv3_lS_", glsl::ExtInst::ExtInstSMin)
5200 .Case("_Z3minDv4_lS_", glsl::ExtInst::ExtInstSMin)
5201 .Case("_Z3minmm", glsl::ExtInst::ExtInstUMin)
5202 .Case("_Z3minDv2_mS_", glsl::ExtInst::ExtInstUMin)
5203 .Case("_Z3minDv3_mS_", glsl::ExtInst::ExtInstUMin)
5204 .Case("_Z3minDv4_mS_", glsl::ExtInst::ExtInstUMin)
David Neto22f144c2017-06-12 14:26:21 -04005205 .Case("_Z3minff", glsl::ExtInst::ExtInstFMin)
5206 .Case("_Z3minDv2_fS_", glsl::ExtInst::ExtInstFMin)
5207 .Case("_Z3minDv3_fS_", glsl::ExtInst::ExtInstFMin)
5208 .Case("_Z3minDv4_fS_", glsl::ExtInst::ExtInstFMin)
5209 .StartsWith("_Z4fmin", glsl::ExtInst::ExtInstFMin)
5210 .StartsWith("_Z7degrees", glsl::ExtInst::ExtInstDegrees)
5211 .StartsWith("_Z7radians", glsl::ExtInst::ExtInstRadians)
5212 .StartsWith("_Z3mix", glsl::ExtInst::ExtInstFMix)
5213 .StartsWith("_Z4acos", glsl::ExtInst::ExtInstAcos)
5214 .StartsWith("_Z5acosh", glsl::ExtInst::ExtInstAcosh)
5215 .StartsWith("_Z4asin", glsl::ExtInst::ExtInstAsin)
5216 .StartsWith("_Z5asinh", glsl::ExtInst::ExtInstAsinh)
5217 .StartsWith("_Z4atan", glsl::ExtInst::ExtInstAtan)
5218 .StartsWith("_Z5atan2", glsl::ExtInst::ExtInstAtan2)
5219 .StartsWith("_Z5atanh", glsl::ExtInst::ExtInstAtanh)
5220 .StartsWith("_Z4ceil", glsl::ExtInst::ExtInstCeil)
5221 .StartsWith("_Z3sin", glsl::ExtInst::ExtInstSin)
5222 .StartsWith("_Z4sinh", glsl::ExtInst::ExtInstSinh)
5223 .StartsWith("_Z8half_sin", glsl::ExtInst::ExtInstSin)
5224 .StartsWith("_Z10native_sin", glsl::ExtInst::ExtInstSin)
5225 .StartsWith("_Z3cos", glsl::ExtInst::ExtInstCos)
5226 .StartsWith("_Z4cosh", glsl::ExtInst::ExtInstCosh)
5227 .StartsWith("_Z8half_cos", glsl::ExtInst::ExtInstCos)
5228 .StartsWith("_Z10native_cos", glsl::ExtInst::ExtInstCos)
5229 .StartsWith("_Z3tan", glsl::ExtInst::ExtInstTan)
5230 .StartsWith("_Z4tanh", glsl::ExtInst::ExtInstTanh)
5231 .StartsWith("_Z8half_tan", glsl::ExtInst::ExtInstTan)
5232 .StartsWith("_Z10native_tan", glsl::ExtInst::ExtInstTan)
5233 .StartsWith("_Z3exp", glsl::ExtInst::ExtInstExp)
5234 .StartsWith("_Z8half_exp", glsl::ExtInst::ExtInstExp)
5235 .StartsWith("_Z10native_exp", glsl::ExtInst::ExtInstExp)
5236 .StartsWith("_Z4exp2", glsl::ExtInst::ExtInstExp2)
5237 .StartsWith("_Z9half_exp2", glsl::ExtInst::ExtInstExp2)
5238 .StartsWith("_Z11native_exp2", glsl::ExtInst::ExtInstExp2)
5239 .StartsWith("_Z3log", glsl::ExtInst::ExtInstLog)
5240 .StartsWith("_Z8half_log", glsl::ExtInst::ExtInstLog)
5241 .StartsWith("_Z10native_log", glsl::ExtInst::ExtInstLog)
5242 .StartsWith("_Z4log2", glsl::ExtInst::ExtInstLog2)
5243 .StartsWith("_Z9half_log2", glsl::ExtInst::ExtInstLog2)
5244 .StartsWith("_Z11native_log2", glsl::ExtInst::ExtInstLog2)
5245 .StartsWith("_Z4fabs", glsl::ExtInst::ExtInstFAbs)
kpet3458e942018-10-03 14:35:21 +01005246 .StartsWith("_Z3fma", glsl::ExtInst::ExtInstFma)
David Neto22f144c2017-06-12 14:26:21 -04005247 .StartsWith("_Z5floor", glsl::ExtInst::ExtInstFloor)
5248 .StartsWith("_Z5ldexp", glsl::ExtInst::ExtInstLdexp)
5249 .StartsWith("_Z3pow", glsl::ExtInst::ExtInstPow)
5250 .StartsWith("_Z4powr", glsl::ExtInst::ExtInstPow)
5251 .StartsWith("_Z9half_powr", glsl::ExtInst::ExtInstPow)
5252 .StartsWith("_Z11native_powr", glsl::ExtInst::ExtInstPow)
5253 .StartsWith("_Z5round", glsl::ExtInst::ExtInstRound)
5254 .StartsWith("_Z4sqrt", glsl::ExtInst::ExtInstSqrt)
5255 .StartsWith("_Z9half_sqrt", glsl::ExtInst::ExtInstSqrt)
5256 .StartsWith("_Z11native_sqrt", glsl::ExtInst::ExtInstSqrt)
5257 .StartsWith("_Z5rsqrt", glsl::ExtInst::ExtInstInverseSqrt)
5258 .StartsWith("_Z10half_rsqrt", glsl::ExtInst::ExtInstInverseSqrt)
5259 .StartsWith("_Z12native_rsqrt", glsl::ExtInst::ExtInstInverseSqrt)
5260 .StartsWith("_Z5trunc", glsl::ExtInst::ExtInstTrunc)
5261 .StartsWith("_Z5frexp", glsl::ExtInst::ExtInstFrexp)
5262 .StartsWith("_Z4sign", glsl::ExtInst::ExtInstFSign)
5263 .StartsWith("_Z6length", glsl::ExtInst::ExtInstLength)
Kévin Petit7d09cec2018-09-22 15:43:38 +01005264 .StartsWith("_Z11fast_length", glsl::ExtInst::ExtInstLength)
David Neto22f144c2017-06-12 14:26:21 -04005265 .StartsWith("_Z8distance", glsl::ExtInst::ExtInstDistance)
Kévin Petit7d09cec2018-09-22 15:43:38 +01005266 .StartsWith("_Z13fast_distance", glsl::ExtInst::ExtInstDistance)
David Netoe9a03512017-10-16 10:08:27 -04005267 .StartsWith("_Z4step", glsl::ExtInst::ExtInstStep)
kpet6fd2a262018-10-03 14:48:01 +01005268 .StartsWith("_Z10smoothstep", glsl::ExtInst::ExtInstSmoothStep)
David Neto22f144c2017-06-12 14:26:21 -04005269 .Case("_Z5crossDv3_fS_", glsl::ExtInst::ExtInstCross)
5270 .StartsWith("_Z9normalize", glsl::ExtInst::ExtInstNormalize)
Kévin Petit7d09cec2018-09-22 15:43:38 +01005271 .StartsWith("_Z14fast_normalize", glsl::ExtInst::ExtInstNormalize)
David Neto22f144c2017-06-12 14:26:21 -04005272 .StartsWith("llvm.fmuladd.", glsl::ExtInst::ExtInstFma)
5273 .Case("spirv.unpack.v2f16", glsl::ExtInst::ExtInstUnpackHalf2x16)
5274 .Case("spirv.pack.v2f16", glsl::ExtInst::ExtInstPackHalf2x16)
David Neto62653202017-10-16 19:05:18 -04005275 .Case("clspv.fract.f", glsl::ExtInst::ExtInstFract)
5276 .Case("clspv.fract.v2f", glsl::ExtInst::ExtInstFract)
5277 .Case("clspv.fract.v3f", glsl::ExtInst::ExtInstFract)
5278 .Case("clspv.fract.v4f", glsl::ExtInst::ExtInstFract)
David Neto3fbb4072017-10-16 11:28:14 -04005279 .Default(kGlslExtInstBad);
5280}
5281
5282glsl::ExtInst SPIRVProducerPass::getIndirectExtInstEnum(StringRef Name) {
5283 // Check indirect cases.
5284 return StringSwitch<glsl::ExtInst>(Name)
5285 .StartsWith("_Z3clz", glsl::ExtInst::ExtInstFindUMsb)
5286 // Use exact match on float arg because these need a multiply
5287 // of a constant of the right floating point type.
5288 .Case("_Z6acospif", glsl::ExtInst::ExtInstAcos)
5289 .Case("_Z6acospiDv2_f", glsl::ExtInst::ExtInstAcos)
5290 .Case("_Z6acospiDv3_f", glsl::ExtInst::ExtInstAcos)
5291 .Case("_Z6acospiDv4_f", glsl::ExtInst::ExtInstAcos)
5292 .Case("_Z6asinpif", glsl::ExtInst::ExtInstAsin)
5293 .Case("_Z6asinpiDv2_f", glsl::ExtInst::ExtInstAsin)
5294 .Case("_Z6asinpiDv3_f", glsl::ExtInst::ExtInstAsin)
5295 .Case("_Z6asinpiDv4_f", glsl::ExtInst::ExtInstAsin)
Kévin Petiteb9f90a2018-09-29 12:29:34 +01005296 .Case("_Z6atanpif", glsl::ExtInst::ExtInstAtan)
5297 .Case("_Z6atanpiDv2_f", glsl::ExtInst::ExtInstAtan)
5298 .Case("_Z6atanpiDv3_f", glsl::ExtInst::ExtInstAtan)
5299 .Case("_Z6atanpiDv4_f", glsl::ExtInst::ExtInstAtan)
David Neto3fbb4072017-10-16 11:28:14 -04005300 .Case("_Z7atan2piff", glsl::ExtInst::ExtInstAtan2)
5301 .Case("_Z7atan2piDv2_fS_", glsl::ExtInst::ExtInstAtan2)
5302 .Case("_Z7atan2piDv3_fS_", glsl::ExtInst::ExtInstAtan2)
5303 .Case("_Z7atan2piDv4_fS_", glsl::ExtInst::ExtInstAtan2)
5304 .Default(kGlslExtInstBad);
5305}
5306
alan-bakerb6b09dc2018-11-08 16:59:28 -05005307glsl::ExtInst
5308SPIRVProducerPass::getDirectOrIndirectExtInstEnum(StringRef Name) {
David Neto3fbb4072017-10-16 11:28:14 -04005309 auto direct = getExtInstEnum(Name);
5310 if (direct != kGlslExtInstBad)
5311 return direct;
5312 return getIndirectExtInstEnum(Name);
David Neto22f144c2017-06-12 14:26:21 -04005313}
5314
David Neto22f144c2017-06-12 14:26:21 -04005315void SPIRVProducerPass::WriteOneWord(uint32_t Word) {
David Neto0676e6f2017-07-11 18:47:44 -04005316 binaryOut->write(reinterpret_cast<const char *>(&Word), sizeof(uint32_t));
David Neto22f144c2017-06-12 14:26:21 -04005317}
5318
5319void SPIRVProducerPass::WriteResultID(SPIRVInstruction *Inst) {
5320 WriteOneWord(Inst->getResultID());
5321}
5322
5323void SPIRVProducerPass::WriteWordCountAndOpcode(SPIRVInstruction *Inst) {
5324 // High 16 bit : Word Count
5325 // Low 16 bit : Opcode
5326 uint32_t Word = Inst->getOpcode();
David Netoee2660d2018-06-28 16:31:29 -04005327 const uint32_t count = Inst->getWordCount();
5328 if (count > 65535) {
5329 errs() << "Word count limit of 65535 exceeded: " << count << "\n";
5330 llvm_unreachable("Word count too high");
5331 }
David Neto22f144c2017-06-12 14:26:21 -04005332 Word |= Inst->getWordCount() << 16;
5333 WriteOneWord(Word);
5334}
5335
5336void SPIRVProducerPass::WriteOperand(SPIRVOperand *Op) {
5337 SPIRVOperandType OpTy = Op->getType();
5338 switch (OpTy) {
5339 default: {
5340 llvm_unreachable("Unsupported SPIRV Operand Type???");
5341 break;
5342 }
5343 case SPIRVOperandType::NUMBERID: {
5344 WriteOneWord(Op->getNumID());
5345 break;
5346 }
5347 case SPIRVOperandType::LITERAL_STRING: {
5348 std::string Str = Op->getLiteralStr();
5349 const char *Data = Str.c_str();
5350 size_t WordSize = Str.size() / 4;
5351 for (unsigned Idx = 0; Idx < WordSize; Idx++) {
5352 WriteOneWord(*reinterpret_cast<const uint32_t *>(&Data[4 * Idx]));
5353 }
5354
5355 uint32_t Remainder = Str.size() % 4;
5356 uint32_t LastWord = 0;
5357 if (Remainder) {
5358 for (unsigned Idx = 0; Idx < Remainder; Idx++) {
5359 LastWord |= Data[4 * WordSize + Idx] << 8 * Idx;
5360 }
5361 }
5362
5363 WriteOneWord(LastWord);
5364 break;
5365 }
5366 case SPIRVOperandType::LITERAL_INTEGER:
5367 case SPIRVOperandType::LITERAL_FLOAT: {
5368 auto LiteralNum = Op->getLiteralNum();
5369 // TODO: Handle LiteranNum carefully.
5370 for (auto Word : LiteralNum) {
5371 WriteOneWord(Word);
5372 }
5373 break;
5374 }
5375 }
5376}
5377
5378void SPIRVProducerPass::WriteSPIRVBinary() {
5379 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
5380
5381 for (auto Inst : SPIRVInstList) {
David Netoc6f3ab22018-04-06 18:02:31 -04005382 SPIRVOperandList Ops{Inst->getOperands()};
David Neto22f144c2017-06-12 14:26:21 -04005383 spv::Op Opcode = static_cast<spv::Op>(Inst->getOpcode());
5384
5385 switch (Opcode) {
5386 default: {
David Neto5c22a252018-03-15 16:07:41 -04005387 errs() << "Unsupported SPIR-V instruction opcode " << int(Opcode) << "\n";
David Neto22f144c2017-06-12 14:26:21 -04005388 llvm_unreachable("Unsupported SPIRV instruction");
5389 break;
5390 }
5391 case spv::OpCapability:
5392 case spv::OpExtension:
5393 case spv::OpMemoryModel:
5394 case spv::OpEntryPoint:
5395 case spv::OpExecutionMode:
5396 case spv::OpSource:
5397 case spv::OpDecorate:
5398 case spv::OpMemberDecorate:
5399 case spv::OpBranch:
5400 case spv::OpBranchConditional:
5401 case spv::OpSelectionMerge:
5402 case spv::OpLoopMerge:
5403 case spv::OpStore:
5404 case spv::OpImageWrite:
5405 case spv::OpReturnValue:
5406 case spv::OpControlBarrier:
5407 case spv::OpMemoryBarrier:
5408 case spv::OpReturn:
5409 case spv::OpFunctionEnd:
5410 case spv::OpCopyMemory: {
5411 WriteWordCountAndOpcode(Inst);
5412 for (uint32_t i = 0; i < Ops.size(); i++) {
5413 WriteOperand(Ops[i]);
5414 }
5415 break;
5416 }
5417 case spv::OpTypeBool:
5418 case spv::OpTypeVoid:
5419 case spv::OpTypeSampler:
5420 case spv::OpLabel:
5421 case spv::OpExtInstImport:
5422 case spv::OpTypePointer:
5423 case spv::OpTypeRuntimeArray:
5424 case spv::OpTypeStruct:
5425 case spv::OpTypeImage:
5426 case spv::OpTypeSampledImage:
5427 case spv::OpTypeInt:
5428 case spv::OpTypeFloat:
5429 case spv::OpTypeArray:
5430 case spv::OpTypeVector:
5431 case spv::OpTypeFunction: {
5432 WriteWordCountAndOpcode(Inst);
5433 WriteResultID(Inst);
5434 for (uint32_t i = 0; i < Ops.size(); i++) {
5435 WriteOperand(Ops[i]);
5436 }
5437 break;
5438 }
5439 case spv::OpFunction:
5440 case spv::OpFunctionParameter:
5441 case spv::OpAccessChain:
5442 case spv::OpPtrAccessChain:
5443 case spv::OpInBoundsAccessChain:
5444 case spv::OpUConvert:
5445 case spv::OpSConvert:
5446 case spv::OpConvertFToU:
5447 case spv::OpConvertFToS:
5448 case spv::OpConvertUToF:
5449 case spv::OpConvertSToF:
5450 case spv::OpFConvert:
5451 case spv::OpConvertPtrToU:
5452 case spv::OpConvertUToPtr:
5453 case spv::OpBitcast:
5454 case spv::OpIAdd:
5455 case spv::OpFAdd:
5456 case spv::OpISub:
5457 case spv::OpFSub:
5458 case spv::OpIMul:
5459 case spv::OpFMul:
5460 case spv::OpUDiv:
5461 case spv::OpSDiv:
5462 case spv::OpFDiv:
5463 case spv::OpUMod:
5464 case spv::OpSRem:
5465 case spv::OpFRem:
Kévin Petit8a560882019-03-21 15:24:34 +00005466 case spv::OpUMulExtended:
5467 case spv::OpSMulExtended:
David Neto22f144c2017-06-12 14:26:21 -04005468 case spv::OpBitwiseOr:
5469 case spv::OpBitwiseXor:
5470 case spv::OpBitwiseAnd:
David Netoa394f392017-08-26 20:45:29 -04005471 case spv::OpNot:
David Neto22f144c2017-06-12 14:26:21 -04005472 case spv::OpShiftLeftLogical:
5473 case spv::OpShiftRightLogical:
5474 case spv::OpShiftRightArithmetic:
5475 case spv::OpBitCount:
David Netoab03f432017-11-03 17:00:44 -04005476 case spv::OpCompositeConstruct:
David Neto22f144c2017-06-12 14:26:21 -04005477 case spv::OpCompositeExtract:
5478 case spv::OpVectorExtractDynamic:
5479 case spv::OpCompositeInsert:
David Neto0a2f98d2017-09-15 19:38:40 -04005480 case spv::OpCopyObject:
David Neto22f144c2017-06-12 14:26:21 -04005481 case spv::OpVectorInsertDynamic:
5482 case spv::OpVectorShuffle:
5483 case spv::OpIEqual:
5484 case spv::OpINotEqual:
5485 case spv::OpUGreaterThan:
5486 case spv::OpUGreaterThanEqual:
5487 case spv::OpULessThan:
5488 case spv::OpULessThanEqual:
5489 case spv::OpSGreaterThan:
5490 case spv::OpSGreaterThanEqual:
5491 case spv::OpSLessThan:
5492 case spv::OpSLessThanEqual:
5493 case spv::OpFOrdEqual:
5494 case spv::OpFOrdGreaterThan:
5495 case spv::OpFOrdGreaterThanEqual:
5496 case spv::OpFOrdLessThan:
5497 case spv::OpFOrdLessThanEqual:
5498 case spv::OpFOrdNotEqual:
5499 case spv::OpFUnordEqual:
5500 case spv::OpFUnordGreaterThan:
5501 case spv::OpFUnordGreaterThanEqual:
5502 case spv::OpFUnordLessThan:
5503 case spv::OpFUnordLessThanEqual:
5504 case spv::OpFUnordNotEqual:
5505 case spv::OpExtInst:
5506 case spv::OpIsInf:
5507 case spv::OpIsNan:
5508 case spv::OpAny:
5509 case spv::OpAll:
5510 case spv::OpUndef:
5511 case spv::OpConstantNull:
5512 case spv::OpLogicalOr:
5513 case spv::OpLogicalAnd:
5514 case spv::OpLogicalNot:
5515 case spv::OpLogicalNotEqual:
5516 case spv::OpConstantComposite:
5517 case spv::OpSpecConstantComposite:
5518 case spv::OpConstantTrue:
5519 case spv::OpConstantFalse:
5520 case spv::OpConstant:
5521 case spv::OpSpecConstant:
5522 case spv::OpVariable:
5523 case spv::OpFunctionCall:
5524 case spv::OpSampledImage:
5525 case spv::OpImageSampleExplicitLod:
David Neto5c22a252018-03-15 16:07:41 -04005526 case spv::OpImageQuerySize:
David Neto22f144c2017-06-12 14:26:21 -04005527 case spv::OpSelect:
5528 case spv::OpPhi:
5529 case spv::OpLoad:
5530 case spv::OpAtomicIAdd:
5531 case spv::OpAtomicISub:
5532 case spv::OpAtomicExchange:
5533 case spv::OpAtomicIIncrement:
5534 case spv::OpAtomicIDecrement:
5535 case spv::OpAtomicCompareExchange:
5536 case spv::OpAtomicUMin:
5537 case spv::OpAtomicSMin:
5538 case spv::OpAtomicUMax:
5539 case spv::OpAtomicSMax:
5540 case spv::OpAtomicAnd:
5541 case spv::OpAtomicOr:
5542 case spv::OpAtomicXor:
5543 case spv::OpDot: {
5544 WriteWordCountAndOpcode(Inst);
5545 WriteOperand(Ops[0]);
5546 WriteResultID(Inst);
5547 for (uint32_t i = 1; i < Ops.size(); i++) {
5548 WriteOperand(Ops[i]);
5549 }
5550 break;
5551 }
5552 }
5553 }
5554}
Alan Baker9bf93fb2018-08-28 16:59:26 -04005555
alan-bakerb6b09dc2018-11-08 16:59:28 -05005556bool SPIRVProducerPass::IsTypeNullable(const Type *type) const {
Alan Baker9bf93fb2018-08-28 16:59:26 -04005557 switch (type->getTypeID()) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05005558 case Type::HalfTyID:
5559 case Type::FloatTyID:
5560 case Type::DoubleTyID:
5561 case Type::IntegerTyID:
5562 case Type::VectorTyID:
5563 return true;
5564 case Type::PointerTyID: {
5565 const PointerType *pointer_type = cast<PointerType>(type);
5566 if (pointer_type->getPointerAddressSpace() !=
5567 AddressSpace::UniformConstant) {
5568 auto pointee_type = pointer_type->getPointerElementType();
5569 if (pointee_type->isStructTy() &&
5570 cast<StructType>(pointee_type)->isOpaque()) {
5571 // Images and samplers are not nullable.
5572 return false;
Alan Baker9bf93fb2018-08-28 16:59:26 -04005573 }
Alan Baker9bf93fb2018-08-28 16:59:26 -04005574 }
alan-bakerb6b09dc2018-11-08 16:59:28 -05005575 return true;
5576 }
5577 case Type::ArrayTyID:
5578 return IsTypeNullable(cast<CompositeType>(type)->getTypeAtIndex(0u));
5579 case Type::StructTyID: {
5580 const StructType *struct_type = cast<StructType>(type);
5581 // Images and samplers are not nullable.
5582 if (struct_type->isOpaque())
Alan Baker9bf93fb2018-08-28 16:59:26 -04005583 return false;
alan-bakerb6b09dc2018-11-08 16:59:28 -05005584 for (const auto element : struct_type->elements()) {
5585 if (!IsTypeNullable(element))
5586 return false;
5587 }
5588 return true;
5589 }
5590 default:
5591 return false;
Alan Baker9bf93fb2018-08-28 16:59:26 -04005592 }
5593}
Alan Bakerfcda9482018-10-02 17:09:59 -04005594
5595void SPIRVProducerPass::PopulateUBOTypeMaps(Module &module) {
5596 if (auto *offsets_md =
5597 module.getNamedMetadata(clspv::RemappedTypeOffsetMetadataName())) {
5598 // Metdata is stored as key-value pair operands. The first element of each
5599 // operand is the type and the second is a vector of offsets.
5600 for (const auto *operand : offsets_md->operands()) {
5601 const auto *pair = cast<MDTuple>(operand);
5602 auto *type =
5603 cast<ConstantAsMetadata>(pair->getOperand(0))->getValue()->getType();
5604 const auto *offset_vector = cast<MDTuple>(pair->getOperand(1));
5605 std::vector<uint32_t> offsets;
5606 for (const Metadata *offset_md : offset_vector->operands()) {
5607 const auto *constant_md = cast<ConstantAsMetadata>(offset_md);
alan-bakerb6b09dc2018-11-08 16:59:28 -05005608 offsets.push_back(static_cast<uint32_t>(
5609 cast<ConstantInt>(constant_md->getValue())->getZExtValue()));
Alan Bakerfcda9482018-10-02 17:09:59 -04005610 }
5611 RemappedUBOTypeOffsets.insert(std::make_pair(type, offsets));
5612 }
5613 }
5614
5615 if (auto *sizes_md =
5616 module.getNamedMetadata(clspv::RemappedTypeSizesMetadataName())) {
5617 // Metadata is stored as key-value pair operands. The first element of each
5618 // operand is the type and the second is a triple of sizes: type size in
5619 // bits, store size and alloc size.
5620 for (const auto *operand : sizes_md->operands()) {
5621 const auto *pair = cast<MDTuple>(operand);
5622 auto *type =
5623 cast<ConstantAsMetadata>(pair->getOperand(0))->getValue()->getType();
5624 const auto *size_triple = cast<MDTuple>(pair->getOperand(1));
5625 uint64_t type_size_in_bits =
5626 cast<ConstantInt>(
5627 cast<ConstantAsMetadata>(size_triple->getOperand(0))->getValue())
5628 ->getZExtValue();
5629 uint64_t type_store_size =
5630 cast<ConstantInt>(
5631 cast<ConstantAsMetadata>(size_triple->getOperand(1))->getValue())
5632 ->getZExtValue();
5633 uint64_t type_alloc_size =
5634 cast<ConstantInt>(
5635 cast<ConstantAsMetadata>(size_triple->getOperand(2))->getValue())
5636 ->getZExtValue();
5637 RemappedUBOTypeSizes.insert(std::make_pair(
5638 type, std::make_tuple(type_size_in_bits, type_store_size,
5639 type_alloc_size)));
5640 }
5641 }
5642}
5643
5644uint64_t SPIRVProducerPass::GetTypeSizeInBits(Type *type,
5645 const DataLayout &DL) {
5646 auto iter = RemappedUBOTypeSizes.find(type);
5647 if (iter != RemappedUBOTypeSizes.end()) {
5648 return std::get<0>(iter->second);
5649 }
5650
5651 return DL.getTypeSizeInBits(type);
5652}
5653
5654uint64_t SPIRVProducerPass::GetTypeStoreSize(Type *type, const DataLayout &DL) {
5655 auto iter = RemappedUBOTypeSizes.find(type);
5656 if (iter != RemappedUBOTypeSizes.end()) {
5657 return std::get<1>(iter->second);
5658 }
5659
5660 return DL.getTypeStoreSize(type);
5661}
5662
5663uint64_t SPIRVProducerPass::GetTypeAllocSize(Type *type, const DataLayout &DL) {
5664 auto iter = RemappedUBOTypeSizes.find(type);
5665 if (iter != RemappedUBOTypeSizes.end()) {
5666 return std::get<2>(iter->second);
5667 }
5668
5669 return DL.getTypeAllocSize(type);
5670}
alan-baker5b86ed72019-02-15 08:26:50 -05005671
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04005672void SPIRVProducerPass::setVariablePointersCapabilities(
5673 unsigned address_space) {
alan-baker5b86ed72019-02-15 08:26:50 -05005674 if (GetStorageClass(address_space) == spv::StorageClassStorageBuffer) {
5675 setVariablePointersStorageBuffer(true);
5676 } else {
5677 setVariablePointers(true);
5678 }
5679}
5680
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04005681Value *SPIRVProducerPass::GetBasePointer(Value *v) {
alan-baker5b86ed72019-02-15 08:26:50 -05005682 if (auto *gep = dyn_cast<GetElementPtrInst>(v)) {
5683 return GetBasePointer(gep->getPointerOperand());
5684 }
5685
5686 // Conservatively return |v|.
5687 return v;
5688}
5689
5690bool SPIRVProducerPass::sameResource(Value *lhs, Value *rhs) const {
5691 if (auto *lhs_call = dyn_cast<CallInst>(lhs)) {
5692 if (auto *rhs_call = dyn_cast<CallInst>(rhs)) {
5693 if (lhs_call->getCalledFunction()->getName().startswith(
5694 clspv::ResourceAccessorFunction()) &&
5695 rhs_call->getCalledFunction()->getName().startswith(
5696 clspv::ResourceAccessorFunction())) {
5697 // For resource accessors, match descriptor set and binding.
5698 if (lhs_call->getOperand(0) == rhs_call->getOperand(0) &&
5699 lhs_call->getOperand(1) == rhs_call->getOperand(1))
5700 return true;
5701 } else if (lhs_call->getCalledFunction()->getName().startswith(
5702 clspv::WorkgroupAccessorFunction()) &&
5703 rhs_call->getCalledFunction()->getName().startswith(
5704 clspv::WorkgroupAccessorFunction())) {
5705 // For workgroup resources, match spec id.
5706 if (lhs_call->getOperand(0) == rhs_call->getOperand(0))
5707 return true;
5708 }
5709 }
5710 }
5711
5712 return false;
5713}
5714
5715bool SPIRVProducerPass::selectFromSameObject(Instruction *inst) {
5716 assert(inst->getType()->isPointerTy());
5717 assert(GetStorageClass(inst->getType()->getPointerAddressSpace()) ==
5718 spv::StorageClassStorageBuffer);
5719 const bool hack_undef = clspv::Option::HackUndef();
5720 if (auto *select = dyn_cast<SelectInst>(inst)) {
5721 auto *true_base = GetBasePointer(select->getTrueValue());
5722 auto *false_base = GetBasePointer(select->getFalseValue());
5723
5724 if (true_base == false_base)
5725 return true;
5726
5727 // If either the true or false operand is a null, then we satisfy the same
5728 // object constraint.
5729 if (auto *true_cst = dyn_cast<Constant>(true_base)) {
5730 if (true_cst->isNullValue() || (hack_undef && isa<UndefValue>(true_base)))
5731 return true;
5732 }
5733
5734 if (auto *false_cst = dyn_cast<Constant>(false_base)) {
5735 if (false_cst->isNullValue() ||
5736 (hack_undef && isa<UndefValue>(false_base)))
5737 return true;
5738 }
5739
5740 if (sameResource(true_base, false_base))
5741 return true;
5742 } else if (auto *phi = dyn_cast<PHINode>(inst)) {
5743 Value *value = nullptr;
5744 bool ok = true;
5745 for (unsigned i = 0; ok && i != phi->getNumIncomingValues(); ++i) {
5746 auto *base = GetBasePointer(phi->getIncomingValue(i));
5747 // Null values satisfy the constraint of selecting of selecting from the
5748 // same object.
5749 if (!value) {
5750 if (auto *cst = dyn_cast<Constant>(base)) {
5751 if (!cst->isNullValue() && !(hack_undef && isa<UndefValue>(base)))
5752 value = base;
5753 } else {
5754 value = base;
5755 }
5756 } else if (base != value) {
5757 if (auto *base_cst = dyn_cast<Constant>(base)) {
5758 if (base_cst->isNullValue() || (hack_undef && isa<UndefValue>(base)))
5759 continue;
5760 }
5761
5762 if (sameResource(value, base))
5763 continue;
5764
5765 // Values don't represent the same base.
5766 ok = false;
5767 }
5768 }
5769
5770 return ok;
5771 }
5772
5773 // Conservatively return false.
5774 return false;
5775}
alan-bakere9308012019-03-15 10:25:13 -04005776
5777bool SPIRVProducerPass::CalledWithCoherentResource(Argument &Arg) {
5778 if (!Arg.getType()->isPointerTy() ||
5779 Arg.getType()->getPointerAddressSpace() != clspv::AddressSpace::Global) {
5780 // Only SSBOs need to be annotated as coherent.
5781 return false;
5782 }
5783
5784 DenseSet<Value *> visited;
5785 std::vector<Value *> stack;
5786 for (auto *U : Arg.getParent()->users()) {
5787 if (auto *call = dyn_cast<CallInst>(U)) {
5788 stack.push_back(call->getOperand(Arg.getArgNo()));
5789 }
5790 }
5791
5792 while (!stack.empty()) {
5793 Value *v = stack.back();
5794 stack.pop_back();
5795
5796 if (!visited.insert(v).second)
5797 continue;
5798
5799 auto *resource_call = dyn_cast<CallInst>(v);
5800 if (resource_call &&
5801 resource_call->getCalledFunction()->getName().startswith(
5802 clspv::ResourceAccessorFunction())) {
5803 // If this is a resource accessor function, check if the coherent operand
5804 // is set.
5805 const auto coherent =
5806 unsigned(dyn_cast<ConstantInt>(resource_call->getArgOperand(5))
5807 ->getZExtValue());
5808 if (coherent == 1)
5809 return true;
5810 } else if (auto *arg = dyn_cast<Argument>(v)) {
5811 // If this is a function argument, trace through its callers.
alan-bakere98f3f92019-04-08 15:06:36 -04005812 for (auto U : arg->getParent()->users()) {
alan-bakere9308012019-03-15 10:25:13 -04005813 if (auto *call = dyn_cast<CallInst>(U)) {
5814 stack.push_back(call->getOperand(arg->getArgNo()));
5815 }
5816 }
5817 } else if (auto *user = dyn_cast<User>(v)) {
5818 // If this is a user, traverse all operands that could lead to resource
5819 // variables.
5820 for (unsigned i = 0; i != user->getNumOperands(); ++i) {
5821 Value *operand = user->getOperand(i);
5822 if (operand->getType()->isPointerTy() &&
5823 operand->getType()->getPointerAddressSpace() ==
5824 clspv::AddressSpace::Global) {
5825 stack.push_back(operand);
5826 }
5827 }
5828 }
5829 }
5830
5831 // No coherent resource variables encountered.
5832 return false;
5833}