blob: c491a89b4f92ab3fa931243337b842485f59ae3c [file] [log] [blame]
David Neto22f144c2017-06-12 14:26:21 -04001// Copyright 2017 The Clspv Authors. All rights reserved.
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7// http://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
15#ifdef _MSC_VER
16#pragma warning(push, 0)
17#endif
18
David Neto156783e2017-07-05 15:39:41 -040019#include <cassert>
David Neto257c3892018-04-11 13:19:45 -040020#include <cstring>
David Neto118188e2018-08-24 11:27:54 -040021#include <iomanip>
22#include <list>
David Neto862b7d82018-06-14 18:48:37 -040023#include <memory>
David Neto118188e2018-08-24 11:27:54 -040024#include <set>
25#include <sstream>
26#include <string>
27#include <tuple>
28#include <unordered_set>
29#include <utility>
David Neto862b7d82018-06-14 18:48:37 -040030
David Neto118188e2018-08-24 11:27:54 -040031#include "llvm/ADT/StringSwitch.h"
32#include "llvm/ADT/UniqueVector.h"
33#include "llvm/Analysis/LoopInfo.h"
34#include "llvm/IR/Constants.h"
35#include "llvm/IR/Dominators.h"
36#include "llvm/IR/Instructions.h"
37#include "llvm/IR/Metadata.h"
38#include "llvm/IR/Module.h"
39#include "llvm/Pass.h"
40#include "llvm/Support/CommandLine.h"
41#include "llvm/Support/raw_ostream.h"
42#include "llvm/Transforms/Utils/Cloning.h"
David Neto22f144c2017-06-12 14:26:21 -040043
David Neto85082642018-03-24 06:55:20 -070044#include "spirv/1.0/spirv.hpp"
David Neto118188e2018-08-24 11:27:54 -040045
David Neto85082642018-03-24 06:55:20 -070046#include "clspv/AddressSpace.h"
alan-bakerf5e5f692018-11-27 08:33:24 -050047#include "clspv/DescriptorMap.h"
David Neto118188e2018-08-24 11:27:54 -040048#include "clspv/Option.h"
David Neto85082642018-03-24 06:55:20 -070049#include "clspv/spirv_c_strings.hpp"
50#include "clspv/spirv_glsl.hpp"
David Neto22f144c2017-06-12 14:26:21 -040051
David Neto4feb7a42017-10-06 17:29:42 -040052#include "ArgKind.h"
David Neto85082642018-03-24 06:55:20 -070053#include "ConstantEmitter.h"
Alan Baker202c8c72018-08-13 13:47:44 -040054#include "Constants.h"
David Neto78383442018-06-15 20:31:56 -040055#include "DescriptorCounter.h"
alan-baker56f7aff2019-05-22 08:06:42 -040056#include "NormalizeGlobalVariable.h"
Diego Novilloa4c44fa2019-04-11 10:56:15 -040057#include "Passes.h"
David Neto48f56a42017-10-06 16:44:25 -040058
David Neto22f144c2017-06-12 14:26:21 -040059#if defined(_MSC_VER)
60#pragma warning(pop)
61#endif
62
63using namespace llvm;
64using namespace clspv;
David Neto156783e2017-07-05 15:39:41 -040065using namespace mdconst;
David Neto22f144c2017-06-12 14:26:21 -040066
67namespace {
David Netocd8ca5f2017-10-02 23:34:11 -040068
David Neto862b7d82018-06-14 18:48:37 -040069cl::opt<bool> ShowResourceVars("show-rv", cl::init(false), cl::Hidden,
70 cl::desc("Show resource variable creation"));
71
72// These hacks exist to help transition code generation algorithms
73// without making huge noise in detailed test output.
74const bool Hack_generate_runtime_array_stride_early = true;
75
David Neto3fbb4072017-10-16 11:28:14 -040076// The value of 1/pi. This value is from MSDN
77// https://msdn.microsoft.com/en-us/library/4hwaceh6.aspx
78const double kOneOverPi = 0.318309886183790671538;
79const glsl::ExtInst kGlslExtInstBad = static_cast<glsl::ExtInst>(0);
80
alan-bakerb6b09dc2018-11-08 16:59:28 -050081const char *kCompositeConstructFunctionPrefix = "clspv.composite_construct.";
David Netoab03f432017-11-03 17:00:44 -040082
David Neto22f144c2017-06-12 14:26:21 -040083enum SPIRVOperandType {
84 NUMBERID,
85 LITERAL_INTEGER,
86 LITERAL_STRING,
87 LITERAL_FLOAT
88};
89
90struct SPIRVOperand {
91 explicit SPIRVOperand(SPIRVOperandType Ty, uint32_t Num)
92 : Type(Ty), LiteralNum(1, Num) {}
93 explicit SPIRVOperand(SPIRVOperandType Ty, const char *Str)
94 : Type(Ty), LiteralStr(Str) {}
95 explicit SPIRVOperand(SPIRVOperandType Ty, StringRef Str)
96 : Type(Ty), LiteralStr(Str) {}
97 explicit SPIRVOperand(SPIRVOperandType Ty, ArrayRef<uint32_t> NumVec)
98 : Type(Ty), LiteralNum(NumVec.begin(), NumVec.end()) {}
99
100 SPIRVOperandType getType() { return Type; };
101 uint32_t getNumID() { return LiteralNum[0]; };
102 std::string getLiteralStr() { return LiteralStr; };
103 ArrayRef<uint32_t> getLiteralNum() { return LiteralNum; };
104
David Neto87846742018-04-11 17:36:22 -0400105 uint32_t GetNumWords() const {
106 switch (Type) {
107 case NUMBERID:
108 return 1;
109 case LITERAL_INTEGER:
110 case LITERAL_FLOAT:
David Netoee2660d2018-06-28 16:31:29 -0400111 return uint32_t(LiteralNum.size());
David Neto87846742018-04-11 17:36:22 -0400112 case LITERAL_STRING:
113 // Account for the terminating null character.
David Netoee2660d2018-06-28 16:31:29 -0400114 return uint32_t((LiteralStr.size() + 4) / 4);
David Neto87846742018-04-11 17:36:22 -0400115 }
116 llvm_unreachable("Unhandled case in SPIRVOperand::GetNumWords()");
117 }
118
David Neto22f144c2017-06-12 14:26:21 -0400119private:
120 SPIRVOperandType Type;
121 std::string LiteralStr;
122 SmallVector<uint32_t, 4> LiteralNum;
123};
124
David Netoc6f3ab22018-04-06 18:02:31 -0400125class SPIRVOperandList {
126public:
127 SPIRVOperandList() {}
alan-bakerb6b09dc2018-11-08 16:59:28 -0500128 SPIRVOperandList(const SPIRVOperandList &other) = delete;
129 SPIRVOperandList(SPIRVOperandList &&other) {
David Netoc6f3ab22018-04-06 18:02:31 -0400130 contents_ = std::move(other.contents_);
131 other.contents_.clear();
132 }
133 SPIRVOperandList(ArrayRef<SPIRVOperand *> init)
134 : contents_(init.begin(), init.end()) {}
135 operator ArrayRef<SPIRVOperand *>() { return contents_; }
136 void push_back(SPIRVOperand *op) { contents_.push_back(op); }
alan-bakerb6b09dc2018-11-08 16:59:28 -0500137 void clear() { contents_.clear(); }
David Netoc6f3ab22018-04-06 18:02:31 -0400138 size_t size() const { return contents_.size(); }
139 SPIRVOperand *&operator[](size_t i) { return contents_[i]; }
140
David Neto87846742018-04-11 17:36:22 -0400141 const SmallVector<SPIRVOperand *, 8> &getOperands() const {
142 return contents_;
143 }
144
David Netoc6f3ab22018-04-06 18:02:31 -0400145private:
alan-bakerb6b09dc2018-11-08 16:59:28 -0500146 SmallVector<SPIRVOperand *, 8> contents_;
David Netoc6f3ab22018-04-06 18:02:31 -0400147};
148
149SPIRVOperandList &operator<<(SPIRVOperandList &list, SPIRVOperand *elem) {
150 list.push_back(elem);
151 return list;
152}
153
alan-bakerb6b09dc2018-11-08 16:59:28 -0500154SPIRVOperand *MkNum(uint32_t num) {
David Netoc6f3ab22018-04-06 18:02:31 -0400155 return new SPIRVOperand(LITERAL_INTEGER, num);
156}
alan-bakerb6b09dc2018-11-08 16:59:28 -0500157SPIRVOperand *MkInteger(ArrayRef<uint32_t> num_vec) {
David Neto257c3892018-04-11 13:19:45 -0400158 return new SPIRVOperand(LITERAL_INTEGER, num_vec);
159}
alan-bakerb6b09dc2018-11-08 16:59:28 -0500160SPIRVOperand *MkFloat(ArrayRef<uint32_t> num_vec) {
David Neto257c3892018-04-11 13:19:45 -0400161 return new SPIRVOperand(LITERAL_FLOAT, num_vec);
162}
alan-bakerb6b09dc2018-11-08 16:59:28 -0500163SPIRVOperand *MkId(uint32_t id) { return new SPIRVOperand(NUMBERID, id); }
164SPIRVOperand *MkString(StringRef str) {
David Neto257c3892018-04-11 13:19:45 -0400165 return new SPIRVOperand(LITERAL_STRING, str);
166}
David Netoc6f3ab22018-04-06 18:02:31 -0400167
David Neto22f144c2017-06-12 14:26:21 -0400168struct SPIRVInstruction {
David Neto87846742018-04-11 17:36:22 -0400169 // Create an instruction with an opcode and no result ID, and with the given
170 // operands. This computes its own word count.
171 explicit SPIRVInstruction(spv::Op Opc, ArrayRef<SPIRVOperand *> Ops)
172 : WordCount(1), Opcode(static_cast<uint16_t>(Opc)), ResultID(0),
173 Operands(Ops.begin(), Ops.end()) {
174 for (auto *operand : Ops) {
David Netoee2660d2018-06-28 16:31:29 -0400175 WordCount += uint16_t(operand->GetNumWords());
David Neto87846742018-04-11 17:36:22 -0400176 }
177 }
178 // Create an instruction with an opcode and a no-zero result ID, and
179 // with the given operands. This computes its own word count.
180 explicit SPIRVInstruction(spv::Op Opc, uint32_t ResID,
David Neto22f144c2017-06-12 14:26:21 -0400181 ArrayRef<SPIRVOperand *> Ops)
David Neto87846742018-04-11 17:36:22 -0400182 : WordCount(2), Opcode(static_cast<uint16_t>(Opc)), ResultID(ResID),
183 Operands(Ops.begin(), Ops.end()) {
184 if (ResID == 0) {
185 llvm_unreachable("Result ID of 0 was provided");
186 }
187 for (auto *operand : Ops) {
188 WordCount += operand->GetNumWords();
189 }
190 }
David Neto22f144c2017-06-12 14:26:21 -0400191
David Netoee2660d2018-06-28 16:31:29 -0400192 uint32_t getWordCount() const { return WordCount; }
David Neto22f144c2017-06-12 14:26:21 -0400193 uint16_t getOpcode() const { return Opcode; }
194 uint32_t getResultID() const { return ResultID; }
195 ArrayRef<SPIRVOperand *> getOperands() const { return Operands; }
196
197private:
David Netoee2660d2018-06-28 16:31:29 -0400198 uint32_t WordCount; // Check the 16-bit bound at code generation time.
David Neto22f144c2017-06-12 14:26:21 -0400199 uint16_t Opcode;
200 uint32_t ResultID;
201 SmallVector<SPIRVOperand *, 4> Operands;
202};
203
204struct SPIRVProducerPass final : public ModulePass {
David Neto22f144c2017-06-12 14:26:21 -0400205 typedef DenseMap<Type *, uint32_t> TypeMapType;
206 typedef UniqueVector<Type *> TypeList;
207 typedef DenseMap<Value *, uint32_t> ValueMapType;
David Netofb9a7972017-08-25 17:08:24 -0400208 typedef UniqueVector<Value *> ValueList;
David Neto22f144c2017-06-12 14:26:21 -0400209 typedef std::vector<std::pair<Value *, uint32_t>> EntryPointVecType;
210 typedef std::list<SPIRVInstruction *> SPIRVInstructionList;
David Neto87846742018-04-11 17:36:22 -0400211 // A vector of tuples, each of which is:
212 // - the LLVM instruction that we will later generate SPIR-V code for
213 // - where the SPIR-V instruction should be inserted
214 // - the result ID of the SPIR-V instruction
David Neto22f144c2017-06-12 14:26:21 -0400215 typedef std::vector<
216 std::tuple<Value *, SPIRVInstructionList::iterator, uint32_t>>
217 DeferredInstVecType;
218 typedef DenseMap<FunctionType *, std::pair<FunctionType *, uint32_t>>
219 GlobalConstFuncMapType;
220
David Neto44795152017-07-13 15:45:28 -0400221 explicit SPIRVProducerPass(
alan-bakerf5e5f692018-11-27 08:33:24 -0500222 raw_pwrite_stream &out,
223 std::vector<clspv::version0::DescriptorMapEntry> *descriptor_map_entries,
alan-baker00e7a582019-06-07 12:54:21 -0400224 ArrayRef<std::pair<unsigned, std::string>> samplerMap,
David Neto44795152017-07-13 15:45:28 -0400225 bool outputCInitList)
David Netoc2c368d2017-06-30 16:50:17 -0400226 : ModulePass(ID), samplerMap(samplerMap), out(out),
David Neto0676e6f2017-07-11 18:47:44 -0400227 binaryTempOut(binaryTempUnderlyingVector), binaryOut(&out),
alan-baker00e7a582019-06-07 12:54:21 -0400228 descriptorMapEntries(descriptor_map_entries),
David Neto0676e6f2017-07-11 18:47:44 -0400229 outputCInitList(outputCInitList), patchBoundOffset(0), nextID(1),
alan-baker5b86ed72019-02-15 08:26:50 -0500230 OpExtInstImportID(0), HasVariablePointersStorageBuffer(false),
231 HasVariablePointers(false), SamplerTy(nullptr), WorkgroupSizeValueID(0),
232 WorkgroupSizeVarID(0), max_local_spec_id_(0), constant_i32_zero_id_(0) {
233 }
David Neto22f144c2017-06-12 14:26:21 -0400234
235 void getAnalysisUsage(AnalysisUsage &AU) const override {
236 AU.addRequired<DominatorTreeWrapperPass>();
237 AU.addRequired<LoopInfoWrapperPass>();
238 }
239
240 virtual bool runOnModule(Module &module) override;
241
242 // output the SPIR-V header block
243 void outputHeader();
244
245 // patch the SPIR-V header block
246 void patchHeader();
247
248 uint32_t lookupType(Type *Ty) {
249 if (Ty->isPointerTy() &&
250 (Ty->getPointerAddressSpace() != AddressSpace::UniformConstant)) {
251 auto PointeeTy = Ty->getPointerElementType();
252 if (PointeeTy->isStructTy() &&
253 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
254 Ty = PointeeTy;
255 }
256 }
257
David Neto862b7d82018-06-14 18:48:37 -0400258 auto where = TypeMap.find(Ty);
259 if (where == TypeMap.end()) {
260 if (Ty) {
261 errs() << "Unhandled type " << *Ty << "\n";
262 } else {
263 errs() << "Unhandled type (null)\n";
264 }
David Netoe439d702018-03-23 13:14:08 -0700265 llvm_unreachable("\nUnhandled type!");
David Neto22f144c2017-06-12 14:26:21 -0400266 }
267
David Neto862b7d82018-06-14 18:48:37 -0400268 return where->second;
David Neto22f144c2017-06-12 14:26:21 -0400269 }
270 TypeMapType &getImageTypeMap() { return ImageTypeMap; }
271 TypeList &getTypeList() { return Types; };
272 ValueList &getConstantList() { return Constants; };
273 ValueMapType &getValueMap() { return ValueMap; }
274 ValueMapType &getAllocatedValueMap() { return AllocatedValueMap; }
275 SPIRVInstructionList &getSPIRVInstList() { return SPIRVInsts; };
David Neto22f144c2017-06-12 14:26:21 -0400276 EntryPointVecType &getEntryPointVec() { return EntryPointVec; };
277 DeferredInstVecType &getDeferredInstVec() { return DeferredInstVec; };
278 ValueList &getEntryPointInterfacesVec() { return EntryPointInterfacesVec; };
279 uint32_t &getOpExtInstImportID() { return OpExtInstImportID; };
280 std::vector<uint32_t> &getBuiltinDimVec() { return BuiltinDimensionVec; };
alan-baker5b86ed72019-02-15 08:26:50 -0500281 bool hasVariablePointersStorageBuffer() {
282 return HasVariablePointersStorageBuffer;
283 }
284 void setVariablePointersStorageBuffer(bool Val) {
285 HasVariablePointersStorageBuffer = Val;
286 }
Diego Novillo3cc8d7a2019-04-10 13:30:34 -0400287 bool hasVariablePointers() { return HasVariablePointers; };
David Neto22f144c2017-06-12 14:26:21 -0400288 void setVariablePointers(bool Val) { HasVariablePointers = Val; };
alan-bakerb6b09dc2018-11-08 16:59:28 -0500289 ArrayRef<std::pair<unsigned, std::string>> &getSamplerMap() {
290 return samplerMap;
291 }
David Neto22f144c2017-06-12 14:26:21 -0400292 GlobalConstFuncMapType &getGlobalConstFuncTypeMap() {
293 return GlobalConstFuncTypeMap;
294 }
295 SmallPtrSet<Value *, 16> &getGlobalConstArgSet() {
296 return GlobalConstArgumentSet;
297 }
alan-bakerb6b09dc2018-11-08 16:59:28 -0500298 TypeList &getTypesNeedingArrayStride() { return TypesNeedingArrayStride; }
David Neto22f144c2017-06-12 14:26:21 -0400299
David Netoc6f3ab22018-04-06 18:02:31 -0400300 void GenerateLLVMIRInfo(Module &M, const DataLayout &DL);
alan-bakerb6b09dc2018-11-08 16:59:28 -0500301 // Populate GlobalConstFuncTypeMap. Also, if module-scope __constant will
302 // *not* be converted to a storage buffer, replace each such global variable
303 // with one in the storage class expecgted by SPIR-V.
David Neto862b7d82018-06-14 18:48:37 -0400304 void FindGlobalConstVars(Module &M, const DataLayout &DL);
305 // Populate ResourceVarInfoList, FunctionToResourceVarsMap, and
306 // ModuleOrderedResourceVars.
307 void FindResourceVars(Module &M, const DataLayout &DL);
Alan Baker202c8c72018-08-13 13:47:44 -0400308 void FindWorkgroupVars(Module &M);
David Neto22f144c2017-06-12 14:26:21 -0400309 bool FindExtInst(Module &M);
310 void FindTypePerGlobalVar(GlobalVariable &GV);
311 void FindTypePerFunc(Function &F);
David Neto862b7d82018-06-14 18:48:37 -0400312 void FindTypesForSamplerMap(Module &M);
313 void FindTypesForResourceVars(Module &M);
alan-bakerb6b09dc2018-11-08 16:59:28 -0500314 // Inserts |Ty| and relevant sub-types into the |Types| member, indicating
315 // that |Ty| and its subtypes will need a corresponding SPIR-V type.
David Neto22f144c2017-06-12 14:26:21 -0400316 void FindType(Type *Ty);
317 void FindConstantPerGlobalVar(GlobalVariable &GV);
318 void FindConstantPerFunc(Function &F);
319 void FindConstant(Value *V);
320 void GenerateExtInstImport();
David Neto19a1bad2017-08-25 15:01:41 -0400321 // Generates instructions for SPIR-V types corresponding to the LLVM types
322 // saved in the |Types| member. A type follows its subtypes. IDs are
323 // allocated sequentially starting with the current value of nextID, and
324 // with a type following its subtypes. Also updates nextID to just beyond
325 // the last generated ID.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500326 void GenerateSPIRVTypes(LLVMContext &context, Module &module);
David Neto22f144c2017-06-12 14:26:21 -0400327 void GenerateSPIRVConstants();
David Neto5c22a252018-03-15 16:07:41 -0400328 void GenerateModuleInfo(Module &M);
David Neto22f144c2017-06-12 14:26:21 -0400329 void GenerateGlobalVar(GlobalVariable &GV);
David Netoc6f3ab22018-04-06 18:02:31 -0400330 void GenerateWorkgroupVars();
David Neto862b7d82018-06-14 18:48:37 -0400331 // Generate descriptor map entries for resource variables associated with
332 // arguments to F.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500333 void GenerateDescriptorMapInfo(const DataLayout &DL, Function &F);
David Neto22f144c2017-06-12 14:26:21 -0400334 void GenerateSamplers(Module &M);
David Neto862b7d82018-06-14 18:48:37 -0400335 // Generate OpVariables for %clspv.resource.var.* calls.
336 void GenerateResourceVars(Module &M);
David Neto22f144c2017-06-12 14:26:21 -0400337 void GenerateFuncPrologue(Function &F);
338 void GenerateFuncBody(Function &F);
David Netob6e2e062018-04-25 10:32:06 -0400339 void GenerateEntryPointInitialStores();
David Neto22f144c2017-06-12 14:26:21 -0400340 spv::Op GetSPIRVCmpOpcode(CmpInst *CmpI);
341 spv::Op GetSPIRVCastOpcode(Instruction &I);
342 spv::Op GetSPIRVBinaryOpcode(Instruction &I);
343 void GenerateInstruction(Instruction &I);
344 void GenerateFuncEpilogue();
345 void HandleDeferredInstruction();
alan-bakerb6b09dc2018-11-08 16:59:28 -0500346 void HandleDeferredDecorations(const DataLayout &DL);
David Neto22f144c2017-06-12 14:26:21 -0400347 bool is4xi8vec(Type *Ty) const;
David Neto257c3892018-04-11 13:19:45 -0400348 // Return the SPIR-V Id for 32-bit constant zero. The constant must already
349 // have been created.
350 uint32_t GetI32Zero();
David Neto22f144c2017-06-12 14:26:21 -0400351 spv::StorageClass GetStorageClass(unsigned AddrSpace) const;
David Neto862b7d82018-06-14 18:48:37 -0400352 spv::StorageClass GetStorageClassForArgKind(clspv::ArgKind arg_kind) const;
David Neto22f144c2017-06-12 14:26:21 -0400353 spv::BuiltIn GetBuiltin(StringRef globalVarName) const;
David Neto3fbb4072017-10-16 11:28:14 -0400354 // Returns the GLSL extended instruction enum that the given function
355 // call maps to. If none, then returns the 0 value, i.e. GLSLstd4580Bad.
David Neto22f144c2017-06-12 14:26:21 -0400356 glsl::ExtInst getExtInstEnum(StringRef Name);
David Neto3fbb4072017-10-16 11:28:14 -0400357 // Returns the GLSL extended instruction enum indirectly used by the given
358 // function. That is, to implement the given function, we use an extended
359 // instruction plus one more instruction. If none, then returns the 0 value,
360 // i.e. GLSLstd4580Bad.
361 glsl::ExtInst getIndirectExtInstEnum(StringRef Name);
362 // Returns the single GLSL extended instruction used directly or
363 // indirectly by the given function call.
364 glsl::ExtInst getDirectOrIndirectExtInstEnum(StringRef Name);
David Neto22f144c2017-06-12 14:26:21 -0400365 void WriteOneWord(uint32_t Word);
366 void WriteResultID(SPIRVInstruction *Inst);
367 void WriteWordCountAndOpcode(SPIRVInstruction *Inst);
368 void WriteOperand(SPIRVOperand *Op);
369 void WriteSPIRVBinary();
370
Alan Baker9bf93fb2018-08-28 16:59:26 -0400371 // Returns true if |type| is compatible with OpConstantNull.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500372 bool IsTypeNullable(const Type *type) const;
Alan Baker9bf93fb2018-08-28 16:59:26 -0400373
Alan Bakerfcda9482018-10-02 17:09:59 -0400374 // Populate UBO remapped type maps.
375 void PopulateUBOTypeMaps(Module &module);
376
377 // Wrapped methods of DataLayout accessors. If |type| was remapped for UBOs,
378 // uses the internal map, otherwise it falls back on the data layout.
379 uint64_t GetTypeSizeInBits(Type *type, const DataLayout &DL);
380 uint64_t GetTypeStoreSize(Type *type, const DataLayout &DL);
381 uint64_t GetTypeAllocSize(Type *type, const DataLayout &DL);
382
alan-baker5b86ed72019-02-15 08:26:50 -0500383 // Returns the base pointer of |v|.
384 Value *GetBasePointer(Value *v);
385
386 // Sets |HasVariablePointersStorageBuffer| or |HasVariablePointers| base on
387 // |address_space|.
388 void setVariablePointersCapabilities(unsigned address_space);
389
390 // Returns true if |lhs| and |rhs| represent the same resource or workgroup
391 // variable.
392 bool sameResource(Value *lhs, Value *rhs) const;
393
394 // Returns true if |inst| is phi or select that selects from the same
395 // structure (or null).
396 bool selectFromSameObject(Instruction *inst);
397
alan-bakere9308012019-03-15 10:25:13 -0400398 // Returns true if |Arg| is called with a coherent resource.
399 bool CalledWithCoherentResource(Argument &Arg);
400
David Neto22f144c2017-06-12 14:26:21 -0400401private:
402 static char ID;
David Neto44795152017-07-13 15:45:28 -0400403 ArrayRef<std::pair<unsigned, std::string>> samplerMap;
David Neto22f144c2017-06-12 14:26:21 -0400404 raw_pwrite_stream &out;
David Neto0676e6f2017-07-11 18:47:44 -0400405
406 // TODO(dneto): Wouldn't it be better to always just emit a binary, and then
407 // convert to other formats on demand?
408
409 // When emitting a C initialization list, the WriteSPIRVBinary method
410 // will actually write its words to this vector via binaryTempOut.
411 SmallVector<char, 100> binaryTempUnderlyingVector;
412 raw_svector_ostream binaryTempOut;
413
414 // Binary output writes to this stream, which might be |out| or
415 // |binaryTempOut|. It's the latter when we really want to write a C
416 // initializer list.
Diego Novillo3cc8d7a2019-04-10 13:30:34 -0400417 raw_pwrite_stream *binaryOut;
alan-bakerf5e5f692018-11-27 08:33:24 -0500418 std::vector<version0::DescriptorMapEntry> *descriptorMapEntries;
David Neto0676e6f2017-07-11 18:47:44 -0400419 const bool outputCInitList; // If true, output look like {0x7023, ... , 5}
David Neto22f144c2017-06-12 14:26:21 -0400420 uint64_t patchBoundOffset;
421 uint32_t nextID;
422
David Neto19a1bad2017-08-25 15:01:41 -0400423 // Maps an LLVM Value pointer to the corresponding SPIR-V Id.
David Neto22f144c2017-06-12 14:26:21 -0400424 TypeMapType TypeMap;
David Neto19a1bad2017-08-25 15:01:41 -0400425 // Maps an LLVM image type to its SPIR-V ID.
David Neto22f144c2017-06-12 14:26:21 -0400426 TypeMapType ImageTypeMap;
David Neto19a1bad2017-08-25 15:01:41 -0400427 // A unique-vector of LLVM types that map to a SPIR-V type.
David Neto22f144c2017-06-12 14:26:21 -0400428 TypeList Types;
429 ValueList Constants;
David Neto19a1bad2017-08-25 15:01:41 -0400430 // Maps an LLVM Value pointer to the corresponding SPIR-V Id.
David Neto22f144c2017-06-12 14:26:21 -0400431 ValueMapType ValueMap;
432 ValueMapType AllocatedValueMap;
433 SPIRVInstructionList SPIRVInsts;
David Neto862b7d82018-06-14 18:48:37 -0400434
David Neto22f144c2017-06-12 14:26:21 -0400435 EntryPointVecType EntryPointVec;
436 DeferredInstVecType DeferredInstVec;
437 ValueList EntryPointInterfacesVec;
438 uint32_t OpExtInstImportID;
439 std::vector<uint32_t> BuiltinDimensionVec;
alan-baker5b86ed72019-02-15 08:26:50 -0500440 bool HasVariablePointersStorageBuffer;
David Neto22f144c2017-06-12 14:26:21 -0400441 bool HasVariablePointers;
442 Type *SamplerTy;
alan-bakerb6b09dc2018-11-08 16:59:28 -0500443 DenseMap<unsigned, uint32_t> SamplerMapIndexToIDMap;
David Netoc77d9e22018-03-24 06:30:28 -0700444
445 // If a function F has a pointer-to-__constant parameter, then this variable
David Neto9ed8e2f2018-03-24 06:47:24 -0700446 // will map F's type to (G, index of the parameter), where in a first phase
447 // G is F's type. During FindTypePerFunc, G will be changed to F's type
448 // but replacing the pointer-to-constant parameter with
449 // pointer-to-ModuleScopePrivate.
David Netoc77d9e22018-03-24 06:30:28 -0700450 // TODO(dneto): This doesn't seem general enough? A function might have
451 // more than one such parameter.
David Neto22f144c2017-06-12 14:26:21 -0400452 GlobalConstFuncMapType GlobalConstFuncTypeMap;
453 SmallPtrSet<Value *, 16> GlobalConstArgumentSet;
David Neto1a1a0582017-07-07 12:01:44 -0400454 // An ordered set of pointer types of Base arguments to OpPtrAccessChain,
David Neto85082642018-03-24 06:55:20 -0700455 // or array types, and which point into transparent memory (StorageBuffer
456 // storage class). These will require an ArrayStride decoration.
David Neto1a1a0582017-07-07 12:01:44 -0400457 // See SPV_KHR_variable_pointers rev 13.
David Neto85082642018-03-24 06:55:20 -0700458 TypeList TypesNeedingArrayStride;
David Netoa60b00b2017-09-15 16:34:09 -0400459
460 // This is truly ugly, but works around what look like driver bugs.
461 // For get_local_size, an earlier part of the flow has created a module-scope
462 // variable in Private address space to hold the value for the workgroup
463 // size. Its intializer is a uint3 value marked as builtin WorkgroupSize.
464 // When this is present, save the IDs of the initializer value and variable
465 // in these two variables. We only ever do a vector load from it, and
466 // when we see one of those, substitute just the value of the intializer.
467 // This mimics what Glslang does, and that's what drivers are used to.
David Neto66cfe642018-03-24 06:13:56 -0700468 // TODO(dneto): Remove this once drivers are fixed.
David Netoa60b00b2017-09-15 16:34:09 -0400469 uint32_t WorkgroupSizeValueID;
470 uint32_t WorkgroupSizeVarID;
David Neto26aaf622017-10-23 18:11:53 -0400471
David Neto862b7d82018-06-14 18:48:37 -0400472 // Bookkeeping for mapping kernel arguments to resource variables.
473 struct ResourceVarInfo {
474 ResourceVarInfo(int index_arg, unsigned set_arg, unsigned binding_arg,
alan-bakere9308012019-03-15 10:25:13 -0400475 Function *fn, clspv::ArgKind arg_kind_arg, int coherent_arg)
David Neto862b7d82018-06-14 18:48:37 -0400476 : index(index_arg), descriptor_set(set_arg), binding(binding_arg),
alan-bakere9308012019-03-15 10:25:13 -0400477 var_fn(fn), arg_kind(arg_kind_arg), coherent(coherent_arg),
David Neto862b7d82018-06-14 18:48:37 -0400478 addr_space(fn->getReturnType()->getPointerAddressSpace()) {}
479 const int index; // Index into ResourceVarInfoList
480 const unsigned descriptor_set;
481 const unsigned binding;
482 Function *const var_fn; // The @clspv.resource.var.* function.
483 const clspv::ArgKind arg_kind;
alan-bakere9308012019-03-15 10:25:13 -0400484 const int coherent;
David Neto862b7d82018-06-14 18:48:37 -0400485 const unsigned addr_space; // The LLVM address space
486 // The SPIR-V ID of the OpVariable. Not populated at construction time.
487 uint32_t var_id = 0;
488 };
489 // A list of resource var info. Each one correponds to a module-scope
490 // resource variable we will have to create. Resource var indices are
491 // indices into this vector.
492 SmallVector<std::unique_ptr<ResourceVarInfo>, 8> ResourceVarInfoList;
493 // This is a vector of pointers of all the resource vars, but ordered by
494 // kernel function, and then by argument.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500495 UniqueVector<ResourceVarInfo *> ModuleOrderedResourceVars;
David Neto862b7d82018-06-14 18:48:37 -0400496 // Map a function to the ordered list of resource variables it uses, one for
497 // each argument. If an argument does not use a resource variable, it
498 // will have a null pointer entry.
499 using FunctionToResourceVarsMapType =
500 DenseMap<Function *, SmallVector<ResourceVarInfo *, 8>>;
501 FunctionToResourceVarsMapType FunctionToResourceVarsMap;
502
503 // What LLVM types map to SPIR-V types needing layout? These are the
504 // arrays and structures supporting storage buffers and uniform buffers.
505 TypeList TypesNeedingLayout;
506 // What LLVM struct types map to a SPIR-V struct type with Block decoration?
507 UniqueVector<StructType *> StructTypesNeedingBlock;
508 // For a call that represents a load from an opaque type (samplers, images),
509 // map it to the variable id it should load from.
510 DenseMap<CallInst *, uint32_t> ResourceVarDeferredLoadCalls;
David Neto85082642018-03-24 06:55:20 -0700511
Alan Baker202c8c72018-08-13 13:47:44 -0400512 // One larger than the maximum used SpecId for pointer-to-local arguments.
513 int max_local_spec_id_;
David Netoc6f3ab22018-04-06 18:02:31 -0400514 // An ordered list of the kernel arguments of type pointer-to-local.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500515 using LocalArgList = SmallVector<Argument *, 8>;
David Netoc6f3ab22018-04-06 18:02:31 -0400516 LocalArgList LocalArgs;
517 // Information about a pointer-to-local argument.
518 struct LocalArgInfo {
519 // The SPIR-V ID of the array variable.
520 uint32_t variable_id;
521 // The element type of the
alan-bakerb6b09dc2018-11-08 16:59:28 -0500522 Type *elem_type;
David Netoc6f3ab22018-04-06 18:02:31 -0400523 // The ID of the array type.
524 uint32_t array_size_id;
525 // The ID of the array type.
526 uint32_t array_type_id;
527 // The ID of the pointer to the array type.
528 uint32_t ptr_array_type_id;
David Netoc6f3ab22018-04-06 18:02:31 -0400529 // The specialization constant ID of the array size.
530 int spec_id;
531 };
Alan Baker202c8c72018-08-13 13:47:44 -0400532 // A mapping from Argument to its assigned SpecId.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500533 DenseMap<const Argument *, int> LocalArgSpecIds;
Alan Baker202c8c72018-08-13 13:47:44 -0400534 // A mapping from SpecId to its LocalArgInfo.
535 DenseMap<int, LocalArgInfo> LocalSpecIdInfoMap;
Alan Bakerfcda9482018-10-02 17:09:59 -0400536 // A mapping from a remapped type to its real offsets.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500537 DenseMap<Type *, std::vector<uint32_t>> RemappedUBOTypeOffsets;
Alan Bakerfcda9482018-10-02 17:09:59 -0400538 // A mapping from a remapped type to its real sizes.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500539 DenseMap<Type *, std::tuple<uint64_t, uint64_t, uint64_t>>
540 RemappedUBOTypeSizes;
David Neto257c3892018-04-11 13:19:45 -0400541
542 // The ID of 32-bit integer zero constant. This is only valid after
543 // GenerateSPIRVConstants has run.
544 uint32_t constant_i32_zero_id_;
David Neto22f144c2017-06-12 14:26:21 -0400545};
546
547char SPIRVProducerPass::ID;
David Netoc6f3ab22018-04-06 18:02:31 -0400548
alan-bakerb6b09dc2018-11-08 16:59:28 -0500549} // namespace
David Neto22f144c2017-06-12 14:26:21 -0400550
551namespace clspv {
alan-bakerf5e5f692018-11-27 08:33:24 -0500552ModulePass *createSPIRVProducerPass(
553 raw_pwrite_stream &out,
554 std::vector<version0::DescriptorMapEntry> *descriptor_map_entries,
alan-baker00e7a582019-06-07 12:54:21 -0400555 ArrayRef<std::pair<unsigned, std::string>> samplerMap,
alan-bakerf5e5f692018-11-27 08:33:24 -0500556 bool outputCInitList) {
557 return new SPIRVProducerPass(out, descriptor_map_entries, samplerMap,
alan-baker00e7a582019-06-07 12:54:21 -0400558 outputCInitList);
David Neto22f144c2017-06-12 14:26:21 -0400559}
David Netoc2c368d2017-06-30 16:50:17 -0400560} // namespace clspv
David Neto22f144c2017-06-12 14:26:21 -0400561
562bool SPIRVProducerPass::runOnModule(Module &module) {
David Neto0676e6f2017-07-11 18:47:44 -0400563 binaryOut = outputCInitList ? &binaryTempOut : &out;
564
David Neto257c3892018-04-11 13:19:45 -0400565 constant_i32_zero_id_ = 0; // Reset, for the benefit of validity checks.
566
Alan Bakerfcda9482018-10-02 17:09:59 -0400567 PopulateUBOTypeMaps(module);
568
David Neto22f144c2017-06-12 14:26:21 -0400569 // SPIR-V always begins with its header information
570 outputHeader();
571
David Netoc6f3ab22018-04-06 18:02:31 -0400572 const DataLayout &DL = module.getDataLayout();
573
David Neto22f144c2017-06-12 14:26:21 -0400574 // Gather information from the LLVM IR that we require.
David Netoc6f3ab22018-04-06 18:02:31 -0400575 GenerateLLVMIRInfo(module, DL);
David Neto22f144c2017-06-12 14:26:21 -0400576
David Neto22f144c2017-06-12 14:26:21 -0400577 // Collect information on global variables too.
578 for (GlobalVariable &GV : module.globals()) {
579 // If the GV is one of our special __spirv_* variables, remove the
580 // initializer as it was only placed there to force LLVM to not throw the
581 // value away.
582 if (GV.getName().startswith("__spirv_")) {
583 GV.setInitializer(nullptr);
584 }
585
586 // Collect types' information from global variable.
587 FindTypePerGlobalVar(GV);
588
589 // Collect constant information from global variable.
590 FindConstantPerGlobalVar(GV);
591
592 // If the variable is an input, entry points need to know about it.
593 if (AddressSpace::Input == GV.getType()->getPointerAddressSpace()) {
David Netofb9a7972017-08-25 17:08:24 -0400594 getEntryPointInterfacesVec().insert(&GV);
David Neto22f144c2017-06-12 14:26:21 -0400595 }
596 }
597
598 // If there are extended instructions, generate OpExtInstImport.
599 if (FindExtInst(module)) {
600 GenerateExtInstImport();
601 }
602
603 // Generate SPIRV instructions for types.
Alan Bakerfcda9482018-10-02 17:09:59 -0400604 GenerateSPIRVTypes(module.getContext(), module);
David Neto22f144c2017-06-12 14:26:21 -0400605
606 // Generate SPIRV constants.
607 GenerateSPIRVConstants();
608
609 // If we have a sampler map, we might have literal samplers to generate.
610 if (0 < getSamplerMap().size()) {
611 GenerateSamplers(module);
612 }
613
614 // Generate SPIRV variables.
615 for (GlobalVariable &GV : module.globals()) {
616 GenerateGlobalVar(GV);
617 }
David Neto862b7d82018-06-14 18:48:37 -0400618 GenerateResourceVars(module);
David Netoc6f3ab22018-04-06 18:02:31 -0400619 GenerateWorkgroupVars();
David Neto22f144c2017-06-12 14:26:21 -0400620
621 // Generate SPIRV instructions for each function.
622 for (Function &F : module) {
623 if (F.isDeclaration()) {
624 continue;
625 }
626
David Neto862b7d82018-06-14 18:48:37 -0400627 GenerateDescriptorMapInfo(DL, F);
628
David Neto22f144c2017-06-12 14:26:21 -0400629 // Generate Function Prologue.
630 GenerateFuncPrologue(F);
631
632 // Generate SPIRV instructions for function body.
633 GenerateFuncBody(F);
634
635 // Generate Function Epilogue.
636 GenerateFuncEpilogue();
637 }
638
639 HandleDeferredInstruction();
David Neto1a1a0582017-07-07 12:01:44 -0400640 HandleDeferredDecorations(DL);
David Neto22f144c2017-06-12 14:26:21 -0400641
642 // Generate SPIRV module information.
David Neto5c22a252018-03-15 16:07:41 -0400643 GenerateModuleInfo(module);
David Neto22f144c2017-06-12 14:26:21 -0400644
alan-baker00e7a582019-06-07 12:54:21 -0400645 WriteSPIRVBinary();
David Neto22f144c2017-06-12 14:26:21 -0400646
647 // We need to patch the SPIR-V header to set bound correctly.
648 patchHeader();
David Neto0676e6f2017-07-11 18:47:44 -0400649
650 if (outputCInitList) {
651 bool first = true;
David Neto0676e6f2017-07-11 18:47:44 -0400652 std::ostringstream os;
653
David Neto57fb0b92017-08-04 15:35:09 -0400654 auto emit_word = [&os, &first](uint32_t word) {
David Neto0676e6f2017-07-11 18:47:44 -0400655 if (!first)
David Neto57fb0b92017-08-04 15:35:09 -0400656 os << ",\n";
657 os << word;
David Neto0676e6f2017-07-11 18:47:44 -0400658 first = false;
659 };
660
661 os << "{";
David Neto57fb0b92017-08-04 15:35:09 -0400662 const std::string str(binaryTempOut.str());
663 for (unsigned i = 0; i < str.size(); i += 4) {
664 const uint32_t a = static_cast<unsigned char>(str[i]);
665 const uint32_t b = static_cast<unsigned char>(str[i + 1]);
666 const uint32_t c = static_cast<unsigned char>(str[i + 2]);
667 const uint32_t d = static_cast<unsigned char>(str[i + 3]);
668 emit_word(a | (b << 8) | (c << 16) | (d << 24));
David Neto0676e6f2017-07-11 18:47:44 -0400669 }
670 os << "}\n";
671 out << os.str();
672 }
673
David Neto22f144c2017-06-12 14:26:21 -0400674 return false;
675}
676
677void SPIRVProducerPass::outputHeader() {
alan-baker00e7a582019-06-07 12:54:21 -0400678 binaryOut->write(reinterpret_cast<const char *>(&spv::MagicNumber),
679 sizeof(spv::MagicNumber));
680 binaryOut->write(reinterpret_cast<const char *>(&spv::Version),
681 sizeof(spv::Version));
David Neto22f144c2017-06-12 14:26:21 -0400682
alan-baker0c18ab02019-06-12 10:23:21 -0400683 // use Google's vendor ID
684 const uint32_t vendor = 21 << 16;
alan-baker00e7a582019-06-07 12:54:21 -0400685 binaryOut->write(reinterpret_cast<const char *>(&vendor), sizeof(vendor));
David Neto22f144c2017-06-12 14:26:21 -0400686
alan-baker00e7a582019-06-07 12:54:21 -0400687 // we record where we need to come back to and patch in the bound value
688 patchBoundOffset = binaryOut->tell();
David Neto22f144c2017-06-12 14:26:21 -0400689
alan-baker00e7a582019-06-07 12:54:21 -0400690 // output a bad bound for now
691 binaryOut->write(reinterpret_cast<const char *>(&nextID), sizeof(nextID));
David Neto22f144c2017-06-12 14:26:21 -0400692
alan-baker00e7a582019-06-07 12:54:21 -0400693 // output the schema (reserved for use and must be 0)
694 const uint32_t schema = 0;
695 binaryOut->write(reinterpret_cast<const char *>(&schema), sizeof(schema));
David Neto22f144c2017-06-12 14:26:21 -0400696}
697
698void SPIRVProducerPass::patchHeader() {
alan-baker00e7a582019-06-07 12:54:21 -0400699 // for a binary we just write the value of nextID over bound
700 binaryOut->pwrite(reinterpret_cast<char *>(&nextID), sizeof(nextID),
701 patchBoundOffset);
David Neto22f144c2017-06-12 14:26:21 -0400702}
703
David Netoc6f3ab22018-04-06 18:02:31 -0400704void SPIRVProducerPass::GenerateLLVMIRInfo(Module &M, const DataLayout &DL) {
David Neto22f144c2017-06-12 14:26:21 -0400705 // This function generates LLVM IR for function such as global variable for
706 // argument, constant and pointer type for argument access. These information
707 // is artificial one because we need Vulkan SPIR-V output. This function is
708 // executed ahead of FindType and FindConstant.
David Neto22f144c2017-06-12 14:26:21 -0400709 LLVMContext &Context = M.getContext();
710
David Neto862b7d82018-06-14 18:48:37 -0400711 FindGlobalConstVars(M, DL);
David Neto5c22a252018-03-15 16:07:41 -0400712
David Neto862b7d82018-06-14 18:48:37 -0400713 FindResourceVars(M, DL);
David Neto22f144c2017-06-12 14:26:21 -0400714
715 bool HasWorkGroupBuiltin = false;
716 for (GlobalVariable &GV : M.globals()) {
717 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
718 if (spv::BuiltInWorkgroupSize == BuiltinType) {
719 HasWorkGroupBuiltin = true;
720 }
721 }
722
David Neto862b7d82018-06-14 18:48:37 -0400723 FindTypesForSamplerMap(M);
724 FindTypesForResourceVars(M);
Alan Baker202c8c72018-08-13 13:47:44 -0400725 FindWorkgroupVars(M);
David Neto22f144c2017-06-12 14:26:21 -0400726
David Neto862b7d82018-06-14 18:48:37 -0400727 // These function calls need a <2 x i32> as an intermediate result but not
728 // the final result.
729 std::unordered_set<std::string> NeedsIVec2{
730 "_Z15get_image_width14ocl_image2d_ro",
731 "_Z15get_image_width14ocl_image2d_wo",
732 "_Z16get_image_height14ocl_image2d_ro",
733 "_Z16get_image_height14ocl_image2d_wo",
734 };
735
David Neto22f144c2017-06-12 14:26:21 -0400736 for (Function &F : M) {
Kévin Petitabef4522019-03-27 13:08:01 +0000737 if (F.isDeclaration()) {
David Neto22f144c2017-06-12 14:26:21 -0400738 continue;
739 }
740
741 for (BasicBlock &BB : F) {
742 for (Instruction &I : BB) {
743 if (I.getOpcode() == Instruction::ZExt ||
744 I.getOpcode() == Instruction::SExt ||
745 I.getOpcode() == Instruction::UIToFP) {
746 // If there is zext with i1 type, it will be changed to OpSelect. The
747 // OpSelect needs constant 0 and 1 so the constants are added here.
748
749 auto OpTy = I.getOperand(0)->getType();
750
Kévin Petit24272b62018-10-18 19:16:12 +0000751 if (OpTy->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -0400752 if (I.getOpcode() == Instruction::ZExt) {
David Neto22f144c2017-06-12 14:26:21 -0400753 FindConstant(Constant::getNullValue(I.getType()));
Kévin Petit7bfb8992019-02-26 13:45:08 +0000754 FindConstant(ConstantInt::get(I.getType(), 1));
David Neto22f144c2017-06-12 14:26:21 -0400755 } else if (I.getOpcode() == Instruction::SExt) {
David Neto22f144c2017-06-12 14:26:21 -0400756 FindConstant(Constant::getNullValue(I.getType()));
Kévin Petit7bfb8992019-02-26 13:45:08 +0000757 FindConstant(ConstantInt::getSigned(I.getType(), -1));
David Neto22f144c2017-06-12 14:26:21 -0400758 } else {
759 FindConstant(ConstantFP::get(Context, APFloat(0.0f)));
760 FindConstant(ConstantFP::get(Context, APFloat(1.0f)));
761 }
762 }
763 } else if (CallInst *Call = dyn_cast<CallInst>(&I)) {
David Neto862b7d82018-06-14 18:48:37 -0400764 StringRef callee_name = Call->getCalledFunction()->getName();
David Neto22f144c2017-06-12 14:26:21 -0400765
766 // Handle image type specially.
David Neto862b7d82018-06-14 18:48:37 -0400767 if (callee_name.equals(
David Neto22f144c2017-06-12 14:26:21 -0400768 "_Z11read_imagef14ocl_image2d_ro11ocl_samplerDv2_f") ||
David Neto862b7d82018-06-14 18:48:37 -0400769 callee_name.equals(
David Neto22f144c2017-06-12 14:26:21 -0400770 "_Z11read_imagef14ocl_image3d_ro11ocl_samplerDv4_f")) {
771 TypeMapType &OpImageTypeMap = getImageTypeMap();
772 Type *ImageTy =
773 Call->getArgOperand(0)->getType()->getPointerElementType();
774 OpImageTypeMap[ImageTy] = 0;
775
776 FindConstant(ConstantFP::get(Context, APFloat(0.0f)));
777 }
David Neto5c22a252018-03-15 16:07:41 -0400778
David Neto862b7d82018-06-14 18:48:37 -0400779 if (NeedsIVec2.find(callee_name) != NeedsIVec2.end()) {
David Neto5c22a252018-03-15 16:07:41 -0400780 FindType(VectorType::get(Type::getInt32Ty(Context), 2));
781 }
David Neto22f144c2017-06-12 14:26:21 -0400782 }
783 }
784 }
785
Kévin Petitabef4522019-03-27 13:08:01 +0000786 // More things to do on kernel functions
787 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
788 if (const MDNode *MD =
789 dyn_cast<Function>(&F)->getMetadata("reqd_work_group_size")) {
790 // We generate constants if the WorkgroupSize builtin is being used.
791 if (HasWorkGroupBuiltin) {
792 // Collect constant information for work group size.
793 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(0)));
794 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(1)));
795 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(2)));
David Neto22f144c2017-06-12 14:26:21 -0400796 }
797 }
798 }
799
800 if (M.getTypeByName("opencl.image2d_ro_t") ||
801 M.getTypeByName("opencl.image2d_wo_t") ||
802 M.getTypeByName("opencl.image3d_ro_t") ||
803 M.getTypeByName("opencl.image3d_wo_t")) {
804 // Assume Image type's sampled type is float type.
805 FindType(Type::getFloatTy(Context));
806 }
807
808 // Collect types' information from function.
809 FindTypePerFunc(F);
810
811 // Collect constant information from function.
812 FindConstantPerFunc(F);
813 }
814}
815
David Neto862b7d82018-06-14 18:48:37 -0400816void SPIRVProducerPass::FindGlobalConstVars(Module &M, const DataLayout &DL) {
alan-baker56f7aff2019-05-22 08:06:42 -0400817 clspv::NormalizeGlobalVariables(M);
818
David Neto862b7d82018-06-14 18:48:37 -0400819 SmallVector<GlobalVariable *, 8> GVList;
820 SmallVector<GlobalVariable *, 8> DeadGVList;
821 for (GlobalVariable &GV : M.globals()) {
822 if (GV.getType()->getAddressSpace() == AddressSpace::Constant) {
823 if (GV.use_empty()) {
824 DeadGVList.push_back(&GV);
825 } else {
826 GVList.push_back(&GV);
827 }
828 }
829 }
830
831 // Remove dead global __constant variables.
832 for (auto GV : DeadGVList) {
833 GV->eraseFromParent();
834 }
835 DeadGVList.clear();
836
837 if (clspv::Option::ModuleConstantsInStorageBuffer()) {
838 // For now, we only support a single storage buffer.
839 if (GVList.size() > 0) {
840 assert(GVList.size() == 1);
841 const auto *GV = GVList[0];
842 const auto constants_byte_size =
Alan Bakerfcda9482018-10-02 17:09:59 -0400843 (GetTypeSizeInBits(GV->getInitializer()->getType(), DL)) / 8;
David Neto862b7d82018-06-14 18:48:37 -0400844 const size_t kConstantMaxSize = 65536;
845 if (constants_byte_size > kConstantMaxSize) {
846 outs() << "Max __constant capacity of " << kConstantMaxSize
847 << " bytes exceeded: " << constants_byte_size << " bytes used\n";
848 llvm_unreachable("Max __constant capacity exceeded");
849 }
850 }
851 } else {
852 // Change global constant variable's address space to ModuleScopePrivate.
853 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
854 for (auto GV : GVList) {
855 // Create new gv with ModuleScopePrivate address space.
856 Type *NewGVTy = GV->getType()->getPointerElementType();
857 GlobalVariable *NewGV = new GlobalVariable(
858 M, NewGVTy, false, GV->getLinkage(), GV->getInitializer(), "",
859 nullptr, GV->getThreadLocalMode(), AddressSpace::ModuleScopePrivate);
860 NewGV->takeName(GV);
861
862 const SmallVector<User *, 8> GVUsers(GV->user_begin(), GV->user_end());
863 SmallVector<User *, 8> CandidateUsers;
864
865 auto record_called_function_type_as_user =
866 [&GlobalConstFuncTyMap](Value *gv, CallInst *call) {
867 // Find argument index.
868 unsigned index = 0;
869 for (unsigned i = 0; i < call->getNumArgOperands(); i++) {
870 if (gv == call->getOperand(i)) {
871 // TODO(dneto): Should we break here?
872 index = i;
873 }
874 }
875
876 // Record function type with global constant.
877 GlobalConstFuncTyMap[call->getFunctionType()] =
878 std::make_pair(call->getFunctionType(), index);
879 };
880
881 for (User *GVU : GVUsers) {
882 if (CallInst *Call = dyn_cast<CallInst>(GVU)) {
883 record_called_function_type_as_user(GV, Call);
884 } else if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(GVU)) {
885 // Check GEP users.
886 for (User *GEPU : GEP->users()) {
887 if (CallInst *GEPCall = dyn_cast<CallInst>(GEPU)) {
888 record_called_function_type_as_user(GEP, GEPCall);
889 }
890 }
891 }
892
893 CandidateUsers.push_back(GVU);
894 }
895
896 for (User *U : CandidateUsers) {
897 // Update users of gv with new gv.
alan-bakered80f572019-02-11 17:28:26 -0500898 if (!isa<Constant>(U)) {
899 // #254: Can't change operands of a constant, but this shouldn't be
900 // something that sticks around in the module.
901 U->replaceUsesOfWith(GV, NewGV);
902 }
David Neto862b7d82018-06-14 18:48:37 -0400903 }
904
905 // Delete original gv.
906 GV->eraseFromParent();
907 }
908 }
909}
910
Radek Szymanskibe4b0c42018-10-04 22:20:53 +0100911void SPIRVProducerPass::FindResourceVars(Module &M, const DataLayout &) {
David Neto862b7d82018-06-14 18:48:37 -0400912 ResourceVarInfoList.clear();
913 FunctionToResourceVarsMap.clear();
914 ModuleOrderedResourceVars.reset();
915 // Normally, there is one resource variable per clspv.resource.var.*
916 // function, since that is unique'd by arg type and index. By design,
917 // we can share these resource variables across kernels because all
918 // kernels use the same descriptor set.
919 //
920 // But if the user requested distinct descriptor sets per kernel, then
921 // the descriptor allocator has made different (set,binding) pairs for
922 // the same (type,arg_index) pair. Since we can decorate a resource
923 // variable with only exactly one DescriptorSet and Binding, we are
924 // forced in this case to make distinct resource variables whenever
925 // the same clspv.reource.var.X function is seen with disintct
926 // (set,binding) values.
927 const bool always_distinct_sets =
928 clspv::Option::DistinctKernelDescriptorSets();
929 for (Function &F : M) {
930 // Rely on the fact the resource var functions have a stable ordering
931 // in the module.
Alan Baker202c8c72018-08-13 13:47:44 -0400932 if (F.getName().startswith(clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -0400933 // Find all calls to this function with distinct set and binding pairs.
934 // Save them in ResourceVarInfoList.
935
936 // Determine uniqueness of the (set,binding) pairs only withing this
937 // one resource-var builtin function.
938 using SetAndBinding = std::pair<unsigned, unsigned>;
939 // Maps set and binding to the resource var info.
940 DenseMap<SetAndBinding, ResourceVarInfo *> set_and_binding_map;
941 bool first_use = true;
942 for (auto &U : F.uses()) {
943 if (auto *call = dyn_cast<CallInst>(U.getUser())) {
944 const auto set = unsigned(
945 dyn_cast<ConstantInt>(call->getArgOperand(0))->getZExtValue());
946 const auto binding = unsigned(
947 dyn_cast<ConstantInt>(call->getArgOperand(1))->getZExtValue());
948 const auto arg_kind = clspv::ArgKind(
949 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
950 const auto arg_index = unsigned(
951 dyn_cast<ConstantInt>(call->getArgOperand(3))->getZExtValue());
alan-bakere9308012019-03-15 10:25:13 -0400952 const auto coherent = unsigned(
953 dyn_cast<ConstantInt>(call->getArgOperand(5))->getZExtValue());
David Neto862b7d82018-06-14 18:48:37 -0400954
955 // Find or make the resource var info for this combination.
956 ResourceVarInfo *rv = nullptr;
957 if (always_distinct_sets) {
958 // Make a new resource var any time we see a different
959 // (set,binding) pair.
960 SetAndBinding key{set, binding};
961 auto where = set_and_binding_map.find(key);
962 if (where == set_and_binding_map.end()) {
963 rv = new ResourceVarInfo(int(ResourceVarInfoList.size()), set,
alan-bakere9308012019-03-15 10:25:13 -0400964 binding, &F, arg_kind, coherent);
David Neto862b7d82018-06-14 18:48:37 -0400965 ResourceVarInfoList.emplace_back(rv);
966 set_and_binding_map[key] = rv;
967 } else {
968 rv = where->second;
969 }
970 } else {
971 // The default is to make exactly one resource for each
972 // clspv.resource.var.* function.
973 if (first_use) {
974 first_use = false;
975 rv = new ResourceVarInfo(int(ResourceVarInfoList.size()), set,
alan-bakere9308012019-03-15 10:25:13 -0400976 binding, &F, arg_kind, coherent);
David Neto862b7d82018-06-14 18:48:37 -0400977 ResourceVarInfoList.emplace_back(rv);
978 } else {
979 rv = ResourceVarInfoList.back().get();
980 }
981 }
982
983 // Now populate FunctionToResourceVarsMap.
984 auto &mapping =
985 FunctionToResourceVarsMap[call->getParent()->getParent()];
986 while (mapping.size() <= arg_index) {
987 mapping.push_back(nullptr);
988 }
989 mapping[arg_index] = rv;
990 }
991 }
992 }
993 }
994
995 // Populate ModuleOrderedResourceVars.
996 for (Function &F : M) {
997 auto where = FunctionToResourceVarsMap.find(&F);
998 if (where != FunctionToResourceVarsMap.end()) {
999 for (auto &rv : where->second) {
1000 if (rv != nullptr) {
1001 ModuleOrderedResourceVars.insert(rv);
1002 }
1003 }
1004 }
1005 }
1006 if (ShowResourceVars) {
1007 for (auto *info : ModuleOrderedResourceVars) {
1008 outs() << "MORV index " << info->index << " (" << info->descriptor_set
1009 << "," << info->binding << ") " << *(info->var_fn->getReturnType())
1010 << "\n";
1011 }
1012 }
1013}
1014
David Neto22f144c2017-06-12 14:26:21 -04001015bool SPIRVProducerPass::FindExtInst(Module &M) {
1016 LLVMContext &Context = M.getContext();
1017 bool HasExtInst = false;
1018
1019 for (Function &F : M) {
1020 for (BasicBlock &BB : F) {
1021 for (Instruction &I : BB) {
1022 if (CallInst *Call = dyn_cast<CallInst>(&I)) {
1023 Function *Callee = Call->getCalledFunction();
1024 // Check whether this call is for extend instructions.
David Neto3fbb4072017-10-16 11:28:14 -04001025 auto callee_name = Callee->getName();
1026 const glsl::ExtInst EInst = getExtInstEnum(callee_name);
1027 const glsl::ExtInst IndirectEInst =
1028 getIndirectExtInstEnum(callee_name);
David Neto22f144c2017-06-12 14:26:21 -04001029
David Neto3fbb4072017-10-16 11:28:14 -04001030 HasExtInst |=
1031 (EInst != kGlslExtInstBad) || (IndirectEInst != kGlslExtInstBad);
1032
1033 if (IndirectEInst) {
1034 // Register extra constants if needed.
1035
1036 // Registers a type and constant for computing the result of the
1037 // given instruction. If the result of the instruction is a vector,
1038 // then make a splat vector constant with the same number of
1039 // elements.
1040 auto register_constant = [this, &I](Constant *constant) {
1041 FindType(constant->getType());
1042 FindConstant(constant);
1043 if (auto *vectorTy = dyn_cast<VectorType>(I.getType())) {
1044 // Register the splat vector of the value with the same
1045 // width as the result of the instruction.
1046 auto *vec_constant = ConstantVector::getSplat(
1047 static_cast<unsigned>(vectorTy->getNumElements()),
1048 constant);
1049 FindConstant(vec_constant);
1050 FindType(vec_constant->getType());
1051 }
1052 };
1053 switch (IndirectEInst) {
1054 case glsl::ExtInstFindUMsb:
1055 // clz needs OpExtInst and OpISub with constant 31, or splat
1056 // vector of 31. Add it to the constant list here.
1057 register_constant(
1058 ConstantInt::get(Type::getInt32Ty(Context), 31));
1059 break;
1060 case glsl::ExtInstAcos:
1061 case glsl::ExtInstAsin:
Kévin Petiteb9f90a2018-09-29 12:29:34 +01001062 case glsl::ExtInstAtan:
David Neto3fbb4072017-10-16 11:28:14 -04001063 case glsl::ExtInstAtan2:
1064 // We need 1/pi for acospi, asinpi, atan2pi.
1065 register_constant(
1066 ConstantFP::get(Type::getFloatTy(Context), kOneOverPi));
1067 break;
1068 default:
1069 assert(false && "internally inconsistent");
1070 }
David Neto22f144c2017-06-12 14:26:21 -04001071 }
1072 }
1073 }
1074 }
1075 }
1076
1077 return HasExtInst;
1078}
1079
1080void SPIRVProducerPass::FindTypePerGlobalVar(GlobalVariable &GV) {
1081 // Investigate global variable's type.
1082 FindType(GV.getType());
1083}
1084
1085void SPIRVProducerPass::FindTypePerFunc(Function &F) {
1086 // Investigate function's type.
1087 FunctionType *FTy = F.getFunctionType();
1088
1089 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
1090 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
David Neto9ed8e2f2018-03-24 06:47:24 -07001091 // Handle a regular function with global constant parameters.
David Neto22f144c2017-06-12 14:26:21 -04001092 if (GlobalConstFuncTyMap.count(FTy)) {
1093 uint32_t GVCstArgIdx = GlobalConstFuncTypeMap[FTy].second;
1094 SmallVector<Type *, 4> NewFuncParamTys;
1095 for (unsigned i = 0; i < FTy->getNumParams(); i++) {
1096 Type *ParamTy = FTy->getParamType(i);
1097 if (i == GVCstArgIdx) {
1098 Type *EleTy = ParamTy->getPointerElementType();
1099 ParamTy = PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
1100 }
1101
1102 NewFuncParamTys.push_back(ParamTy);
1103 }
1104
1105 FunctionType *NewFTy =
1106 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
1107 GlobalConstFuncTyMap[FTy] = std::make_pair(NewFTy, GVCstArgIdx);
1108 FTy = NewFTy;
1109 }
1110
1111 FindType(FTy);
1112 } else {
1113 // As kernel functions do not have parameters, create new function type and
1114 // add it to type map.
1115 SmallVector<Type *, 4> NewFuncParamTys;
1116 FunctionType *NewFTy =
1117 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
1118 FindType(NewFTy);
1119 }
1120
1121 // Investigate instructions' type in function body.
1122 for (BasicBlock &BB : F) {
1123 for (Instruction &I : BB) {
1124 if (isa<ShuffleVectorInst>(I)) {
1125 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1126 // Ignore type for mask of shuffle vector instruction.
1127 if (i == 2) {
1128 continue;
1129 }
1130
1131 Value *Op = I.getOperand(i);
1132 if (!isa<MetadataAsValue>(Op)) {
1133 FindType(Op->getType());
1134 }
1135 }
1136
1137 FindType(I.getType());
1138 continue;
1139 }
1140
David Neto862b7d82018-06-14 18:48:37 -04001141 CallInst *Call = dyn_cast<CallInst>(&I);
1142
1143 if (Call && Call->getCalledFunction()->getName().startswith(
Alan Baker202c8c72018-08-13 13:47:44 -04001144 clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04001145 // This is a fake call representing access to a resource variable.
1146 // We handle that elsewhere.
1147 continue;
1148 }
1149
Alan Baker202c8c72018-08-13 13:47:44 -04001150 if (Call && Call->getCalledFunction()->getName().startswith(
1151 clspv::WorkgroupAccessorFunction())) {
1152 // This is a fake call representing access to a workgroup variable.
1153 // We handle that elsewhere.
1154 continue;
1155 }
1156
David Neto22f144c2017-06-12 14:26:21 -04001157 // Work through the operands of the instruction.
1158 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1159 Value *const Op = I.getOperand(i);
1160 // If any of the operands is a constant, find the type!
1161 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1162 FindType(Op->getType());
1163 }
1164 }
1165
1166 for (Use &Op : I.operands()) {
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001167 if (isa<CallInst>(&I)) {
David Neto22f144c2017-06-12 14:26:21 -04001168 // Avoid to check call instruction's type.
1169 break;
1170 }
Alan Baker202c8c72018-08-13 13:47:44 -04001171 if (CallInst *OpCall = dyn_cast<CallInst>(Op)) {
1172 if (OpCall && OpCall->getCalledFunction()->getName().startswith(
1173 clspv::WorkgroupAccessorFunction())) {
1174 // This is a fake call representing access to a workgroup variable.
1175 // We handle that elsewhere.
1176 continue;
1177 }
1178 }
David Neto22f144c2017-06-12 14:26:21 -04001179 if (!isa<MetadataAsValue>(&Op)) {
1180 FindType(Op->getType());
1181 continue;
1182 }
1183 }
1184
David Neto22f144c2017-06-12 14:26:21 -04001185 // We don't want to track the type of this call as we are going to replace
1186 // it.
Kévin Petitdf71de32019-04-09 14:09:50 +01001187 if (Call && (clspv::LiteralSamplerFunction() ==
David Neto22f144c2017-06-12 14:26:21 -04001188 Call->getCalledFunction()->getName())) {
1189 continue;
1190 }
1191
1192 if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(&I)) {
1193 // If gep's base operand has ModuleScopePrivate address space, make gep
1194 // return ModuleScopePrivate address space.
1195 if (GEP->getPointerAddressSpace() == AddressSpace::ModuleScopePrivate) {
1196 // Add pointer type with private address space for global constant to
1197 // type list.
1198 Type *EleTy = I.getType()->getPointerElementType();
1199 Type *NewPTy =
1200 PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
1201
1202 FindType(NewPTy);
1203 continue;
1204 }
1205 }
1206
1207 FindType(I.getType());
1208 }
1209 }
1210}
1211
David Neto862b7d82018-06-14 18:48:37 -04001212void SPIRVProducerPass::FindTypesForSamplerMap(Module &M) {
1213 // If we are using a sampler map, find the type of the sampler.
Kévin Petitdf71de32019-04-09 14:09:50 +01001214 if (M.getFunction(clspv::LiteralSamplerFunction()) ||
David Neto862b7d82018-06-14 18:48:37 -04001215 0 < getSamplerMap().size()) {
1216 auto SamplerStructTy = M.getTypeByName("opencl.sampler_t");
1217 if (!SamplerStructTy) {
1218 SamplerStructTy = StructType::create(M.getContext(), "opencl.sampler_t");
1219 }
1220
1221 SamplerTy = SamplerStructTy->getPointerTo(AddressSpace::UniformConstant);
1222
1223 FindType(SamplerTy);
1224 }
1225}
1226
1227void SPIRVProducerPass::FindTypesForResourceVars(Module &M) {
1228 // Record types so they are generated.
1229 TypesNeedingLayout.reset();
1230 StructTypesNeedingBlock.reset();
1231
1232 // To match older clspv codegen, generate the float type first if required
1233 // for images.
1234 for (const auto *info : ModuleOrderedResourceVars) {
1235 if (info->arg_kind == clspv::ArgKind::ReadOnlyImage ||
1236 info->arg_kind == clspv::ArgKind::WriteOnlyImage) {
1237 // We need "float" for the sampled component type.
1238 FindType(Type::getFloatTy(M.getContext()));
1239 // We only need to find it once.
1240 break;
1241 }
1242 }
1243
1244 for (const auto *info : ModuleOrderedResourceVars) {
1245 Type *type = info->var_fn->getReturnType();
1246
1247 switch (info->arg_kind) {
1248 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04001249 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04001250 if (auto *sty = dyn_cast<StructType>(type->getPointerElementType())) {
1251 StructTypesNeedingBlock.insert(sty);
1252 } else {
1253 errs() << *type << "\n";
1254 llvm_unreachable("Buffer arguments must map to structures!");
1255 }
1256 break;
1257 case clspv::ArgKind::Pod:
1258 if (auto *sty = dyn_cast<StructType>(type->getPointerElementType())) {
1259 StructTypesNeedingBlock.insert(sty);
1260 } else {
1261 errs() << *type << "\n";
1262 llvm_unreachable("POD arguments must map to structures!");
1263 }
1264 break;
1265 case clspv::ArgKind::ReadOnlyImage:
1266 case clspv::ArgKind::WriteOnlyImage:
1267 case clspv::ArgKind::Sampler:
1268 // Sampler and image types map to the pointee type but
1269 // in the uniform constant address space.
1270 type = PointerType::get(type->getPointerElementType(),
1271 clspv::AddressSpace::UniformConstant);
1272 break;
1273 default:
1274 break;
1275 }
1276
1277 // The converted type is the type of the OpVariable we will generate.
1278 // If the pointee type is an array of size zero, FindType will convert it
1279 // to a runtime array.
1280 FindType(type);
1281 }
1282
1283 // Traverse the arrays and structures underneath each Block, and
1284 // mark them as needing layout.
1285 std::vector<Type *> work_list(StructTypesNeedingBlock.begin(),
1286 StructTypesNeedingBlock.end());
1287 while (!work_list.empty()) {
1288 Type *type = work_list.back();
1289 work_list.pop_back();
1290 TypesNeedingLayout.insert(type);
1291 switch (type->getTypeID()) {
1292 case Type::ArrayTyID:
1293 work_list.push_back(type->getArrayElementType());
1294 if (!Hack_generate_runtime_array_stride_early) {
1295 // Remember this array type for deferred decoration.
1296 TypesNeedingArrayStride.insert(type);
1297 }
1298 break;
1299 case Type::StructTyID:
1300 for (auto *elem_ty : cast<StructType>(type)->elements()) {
1301 work_list.push_back(elem_ty);
1302 }
1303 default:
1304 // This type and its contained types don't get layout.
1305 break;
1306 }
1307 }
1308}
1309
Alan Baker202c8c72018-08-13 13:47:44 -04001310void SPIRVProducerPass::FindWorkgroupVars(Module &M) {
1311 // The SpecId assignment for pointer-to-local arguments is recorded in
1312 // module-level metadata. Translate that information into local argument
1313 // information.
1314 NamedMDNode *nmd = M.getNamedMetadata(clspv::LocalSpecIdMetadataName());
alan-bakerb6b09dc2018-11-08 16:59:28 -05001315 if (!nmd)
1316 return;
Alan Baker202c8c72018-08-13 13:47:44 -04001317 for (auto operand : nmd->operands()) {
1318 MDTuple *tuple = cast<MDTuple>(operand);
1319 ValueAsMetadata *fn_md = cast<ValueAsMetadata>(tuple->getOperand(0));
1320 Function *func = cast<Function>(fn_md->getValue());
alan-bakerb6b09dc2018-11-08 16:59:28 -05001321 ConstantAsMetadata *arg_index_md =
1322 cast<ConstantAsMetadata>(tuple->getOperand(1));
1323 int arg_index = static_cast<int>(
1324 cast<ConstantInt>(arg_index_md->getValue())->getSExtValue());
1325 Argument *arg = &*(func->arg_begin() + arg_index);
Alan Baker202c8c72018-08-13 13:47:44 -04001326
1327 ConstantAsMetadata *spec_id_md =
1328 cast<ConstantAsMetadata>(tuple->getOperand(2));
alan-bakerb6b09dc2018-11-08 16:59:28 -05001329 int spec_id = static_cast<int>(
1330 cast<ConstantInt>(spec_id_md->getValue())->getSExtValue());
Alan Baker202c8c72018-08-13 13:47:44 -04001331
1332 max_local_spec_id_ = std::max(max_local_spec_id_, spec_id + 1);
1333 LocalArgSpecIds[arg] = spec_id;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001334 if (LocalSpecIdInfoMap.count(spec_id))
1335 continue;
Alan Baker202c8c72018-08-13 13:47:44 -04001336
1337 // We haven't seen this SpecId yet, so generate the LocalArgInfo for it.
1338 LocalArgInfo info{nextID, arg->getType()->getPointerElementType(),
1339 nextID + 1, nextID + 2,
1340 nextID + 3, spec_id};
1341 LocalSpecIdInfoMap[spec_id] = info;
1342 nextID += 4;
1343
1344 // Ensure the types necessary for this argument get generated.
1345 Type *IdxTy = Type::getInt32Ty(M.getContext());
1346 FindConstant(ConstantInt::get(IdxTy, 0));
1347 FindType(IdxTy);
1348 FindType(arg->getType());
1349 }
1350}
1351
David Neto22f144c2017-06-12 14:26:21 -04001352void SPIRVProducerPass::FindType(Type *Ty) {
1353 TypeList &TyList = getTypeList();
1354
1355 if (0 != TyList.idFor(Ty)) {
1356 return;
1357 }
1358
1359 if (Ty->isPointerTy()) {
1360 auto AddrSpace = Ty->getPointerAddressSpace();
1361 if ((AddressSpace::Constant == AddrSpace) ||
1362 (AddressSpace::Global == AddrSpace)) {
1363 auto PointeeTy = Ty->getPointerElementType();
1364
1365 if (PointeeTy->isStructTy() &&
1366 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
1367 FindType(PointeeTy);
1368 auto ActualPointerTy =
1369 PointeeTy->getPointerTo(AddressSpace::UniformConstant);
1370 FindType(ActualPointerTy);
1371 return;
1372 }
1373 }
1374 }
1375
David Neto862b7d82018-06-14 18:48:37 -04001376 // By convention, LLVM array type with 0 elements will map to
1377 // OpTypeRuntimeArray. Otherwise, it will map to OpTypeArray, which
1378 // has a constant number of elements. We need to support type of the
1379 // constant.
1380 if (auto *arrayTy = dyn_cast<ArrayType>(Ty)) {
1381 if (arrayTy->getNumElements() > 0) {
1382 LLVMContext &Context = Ty->getContext();
1383 FindType(Type::getInt32Ty(Context));
1384 }
David Neto22f144c2017-06-12 14:26:21 -04001385 }
1386
1387 for (Type *SubTy : Ty->subtypes()) {
1388 FindType(SubTy);
1389 }
1390
1391 TyList.insert(Ty);
1392}
1393
1394void SPIRVProducerPass::FindConstantPerGlobalVar(GlobalVariable &GV) {
1395 // If the global variable has a (non undef) initializer.
1396 if (GV.hasInitializer() && !isa<UndefValue>(GV.getInitializer())) {
David Neto862b7d82018-06-14 18:48:37 -04001397 // Generate the constant if it's not the initializer to a module scope
1398 // constant that we will expect in a storage buffer.
1399 const bool module_scope_constant_external_init =
1400 (GV.getType()->getPointerAddressSpace() == AddressSpace::Constant) &&
1401 clspv::Option::ModuleConstantsInStorageBuffer();
1402 if (!module_scope_constant_external_init) {
1403 FindConstant(GV.getInitializer());
1404 }
David Neto22f144c2017-06-12 14:26:21 -04001405 }
1406}
1407
1408void SPIRVProducerPass::FindConstantPerFunc(Function &F) {
1409 // Investigate constants in function body.
1410 for (BasicBlock &BB : F) {
1411 for (Instruction &I : BB) {
David Neto862b7d82018-06-14 18:48:37 -04001412 if (auto *call = dyn_cast<CallInst>(&I)) {
1413 auto name = call->getCalledFunction()->getName();
Kévin Petitdf71de32019-04-09 14:09:50 +01001414 if (name == clspv::LiteralSamplerFunction()) {
David Neto862b7d82018-06-14 18:48:37 -04001415 // We've handled these constants elsewhere, so skip it.
1416 continue;
1417 }
Alan Baker202c8c72018-08-13 13:47:44 -04001418 if (name.startswith(clspv::ResourceAccessorFunction())) {
1419 continue;
1420 }
1421 if (name.startswith(clspv::WorkgroupAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04001422 continue;
1423 }
Kévin Petit617a76d2019-04-04 13:54:16 +01001424 if (name.startswith(clspv::SPIRVOpIntrinsicFunction())) {
1425 // Skip the first operand that has the SPIR-V Opcode
1426 for (unsigned i = 1; i < I.getNumOperands(); i++) {
1427 if (isa<Constant>(I.getOperand(i)) &&
1428 !isa<GlobalValue>(I.getOperand(i))) {
1429 FindConstant(I.getOperand(i));
1430 }
1431 }
1432 continue;
1433 }
David Neto22f144c2017-06-12 14:26:21 -04001434 }
1435
1436 if (isa<AllocaInst>(I)) {
1437 // Alloca instruction has constant for the number of element. Ignore it.
1438 continue;
1439 } else if (isa<ShuffleVectorInst>(I)) {
1440 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1441 // Ignore constant for mask of shuffle vector instruction.
1442 if (i == 2) {
1443 continue;
1444 }
1445
1446 if (isa<Constant>(I.getOperand(i)) &&
1447 !isa<GlobalValue>(I.getOperand(i))) {
1448 FindConstant(I.getOperand(i));
1449 }
1450 }
1451
1452 continue;
1453 } else if (isa<InsertElementInst>(I)) {
1454 // Handle InsertElement with <4 x i8> specially.
1455 Type *CompositeTy = I.getOperand(0)->getType();
1456 if (is4xi8vec(CompositeTy)) {
1457 LLVMContext &Context = CompositeTy->getContext();
1458 if (isa<Constant>(I.getOperand(0))) {
1459 FindConstant(I.getOperand(0));
1460 }
1461
1462 if (isa<Constant>(I.getOperand(1))) {
1463 FindConstant(I.getOperand(1));
1464 }
1465
1466 // Add mask constant 0xFF.
1467 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
1468 FindConstant(CstFF);
1469
1470 // Add shift amount constant.
1471 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
1472 uint64_t Idx = CI->getZExtValue();
1473 Constant *CstShiftAmount =
1474 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
1475 FindConstant(CstShiftAmount);
1476 }
1477
1478 continue;
1479 }
1480
1481 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1482 // Ignore constant for index of InsertElement instruction.
1483 if (i == 2) {
1484 continue;
1485 }
1486
1487 if (isa<Constant>(I.getOperand(i)) &&
1488 !isa<GlobalValue>(I.getOperand(i))) {
1489 FindConstant(I.getOperand(i));
1490 }
1491 }
1492
1493 continue;
1494 } else if (isa<ExtractElementInst>(I)) {
1495 // Handle ExtractElement with <4 x i8> specially.
1496 Type *CompositeTy = I.getOperand(0)->getType();
1497 if (is4xi8vec(CompositeTy)) {
1498 LLVMContext &Context = CompositeTy->getContext();
1499 if (isa<Constant>(I.getOperand(0))) {
1500 FindConstant(I.getOperand(0));
1501 }
1502
1503 // Add mask constant 0xFF.
1504 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
1505 FindConstant(CstFF);
1506
1507 // Add shift amount constant.
1508 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
1509 uint64_t Idx = CI->getZExtValue();
1510 Constant *CstShiftAmount =
1511 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
1512 FindConstant(CstShiftAmount);
1513 } else {
1514 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
1515 FindConstant(Cst8);
1516 }
1517
1518 continue;
1519 }
1520
1521 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1522 // Ignore constant for index of ExtractElement instruction.
1523 if (i == 1) {
1524 continue;
1525 }
1526
1527 if (isa<Constant>(I.getOperand(i)) &&
1528 !isa<GlobalValue>(I.getOperand(i))) {
1529 FindConstant(I.getOperand(i));
1530 }
1531 }
1532
1533 continue;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001534 } else if ((Instruction::Xor == I.getOpcode()) &&
1535 I.getType()->isIntegerTy(1)) {
1536 // We special case for Xor where the type is i1 and one of the arguments
1537 // is a constant 1 (true), this is an OpLogicalNot in SPIR-V, and we
1538 // don't need the constant
David Neto22f144c2017-06-12 14:26:21 -04001539 bool foundConstantTrue = false;
1540 for (Use &Op : I.operands()) {
1541 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1542 auto CI = cast<ConstantInt>(Op);
1543
1544 if (CI->isZero() || foundConstantTrue) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05001545 // If we already found the true constant, we might (probably only
1546 // on -O0) have an OpLogicalNot which is taking a constant
1547 // argument, so discover it anyway.
David Neto22f144c2017-06-12 14:26:21 -04001548 FindConstant(Op);
1549 } else {
1550 foundConstantTrue = true;
1551 }
1552 }
1553 }
1554
1555 continue;
David Netod2de94a2017-08-28 17:27:47 -04001556 } else if (isa<TruncInst>(I)) {
alan-bakerb39c8262019-03-08 14:03:37 -05001557 // Special case if i8 is not generally handled.
1558 if (!clspv::Option::Int8Support()) {
1559 // For truncation to i8 we mask against 255.
1560 Type *ToTy = I.getType();
1561 if (8u == ToTy->getPrimitiveSizeInBits()) {
1562 LLVMContext &Context = ToTy->getContext();
1563 Constant *Cst255 =
1564 ConstantInt::get(Type::getInt32Ty(Context), 0xff);
1565 FindConstant(Cst255);
1566 }
David Netod2de94a2017-08-28 17:27:47 -04001567 }
Neil Henning39672102017-09-29 14:33:13 +01001568 } else if (isa<AtomicRMWInst>(I)) {
1569 LLVMContext &Context = I.getContext();
1570
1571 FindConstant(
1572 ConstantInt::get(Type::getInt32Ty(Context), spv::ScopeDevice));
1573 FindConstant(ConstantInt::get(
1574 Type::getInt32Ty(Context),
1575 spv::MemorySemanticsUniformMemoryMask |
1576 spv::MemorySemanticsSequentiallyConsistentMask));
David Neto22f144c2017-06-12 14:26:21 -04001577 }
1578
1579 for (Use &Op : I.operands()) {
1580 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1581 FindConstant(Op);
1582 }
1583 }
1584 }
1585 }
1586}
1587
1588void SPIRVProducerPass::FindConstant(Value *V) {
David Neto22f144c2017-06-12 14:26:21 -04001589 ValueList &CstList = getConstantList();
1590
David Netofb9a7972017-08-25 17:08:24 -04001591 // If V is already tracked, ignore it.
1592 if (0 != CstList.idFor(V)) {
David Neto22f144c2017-06-12 14:26:21 -04001593 return;
1594 }
1595
David Neto862b7d82018-06-14 18:48:37 -04001596 if (isa<GlobalValue>(V) && clspv::Option::ModuleConstantsInStorageBuffer()) {
1597 return;
1598 }
1599
David Neto22f144c2017-06-12 14:26:21 -04001600 Constant *Cst = cast<Constant>(V);
David Neto862b7d82018-06-14 18:48:37 -04001601 Type *CstTy = Cst->getType();
David Neto22f144c2017-06-12 14:26:21 -04001602
1603 // Handle constant with <4 x i8> type specially.
David Neto22f144c2017-06-12 14:26:21 -04001604 if (is4xi8vec(CstTy)) {
1605 if (!isa<GlobalValue>(V)) {
David Netofb9a7972017-08-25 17:08:24 -04001606 CstList.insert(V);
David Neto22f144c2017-06-12 14:26:21 -04001607 }
1608 }
1609
1610 if (Cst->getNumOperands()) {
1611 for (User::const_op_iterator I = Cst->op_begin(), E = Cst->op_end(); I != E;
1612 ++I) {
1613 FindConstant(*I);
1614 }
1615
David Netofb9a7972017-08-25 17:08:24 -04001616 CstList.insert(Cst);
David Neto22f144c2017-06-12 14:26:21 -04001617 return;
1618 } else if (const ConstantDataSequential *CDS =
1619 dyn_cast<ConstantDataSequential>(Cst)) {
1620 // Add constants for each element to constant list.
1621 for (unsigned i = 0; i < CDS->getNumElements(); i++) {
1622 Constant *EleCst = CDS->getElementAsConstant(i);
1623 FindConstant(EleCst);
1624 }
1625 }
1626
1627 if (!isa<GlobalValue>(V)) {
David Netofb9a7972017-08-25 17:08:24 -04001628 CstList.insert(V);
David Neto22f144c2017-06-12 14:26:21 -04001629 }
1630}
1631
1632spv::StorageClass SPIRVProducerPass::GetStorageClass(unsigned AddrSpace) const {
1633 switch (AddrSpace) {
1634 default:
1635 llvm_unreachable("Unsupported OpenCL address space");
1636 case AddressSpace::Private:
1637 return spv::StorageClassFunction;
1638 case AddressSpace::Global:
David Neto22f144c2017-06-12 14:26:21 -04001639 return spv::StorageClassStorageBuffer;
Alan Bakerfcda9482018-10-02 17:09:59 -04001640 case AddressSpace::Constant:
1641 return clspv::Option::ConstantArgsInUniformBuffer()
1642 ? spv::StorageClassUniform
1643 : spv::StorageClassStorageBuffer;
David Neto22f144c2017-06-12 14:26:21 -04001644 case AddressSpace::Input:
1645 return spv::StorageClassInput;
1646 case AddressSpace::Local:
1647 return spv::StorageClassWorkgroup;
1648 case AddressSpace::UniformConstant:
1649 return spv::StorageClassUniformConstant;
David Neto9ed8e2f2018-03-24 06:47:24 -07001650 case AddressSpace::Uniform:
David Netoe439d702018-03-23 13:14:08 -07001651 return spv::StorageClassUniform;
David Neto22f144c2017-06-12 14:26:21 -04001652 case AddressSpace::ModuleScopePrivate:
1653 return spv::StorageClassPrivate;
1654 }
1655}
1656
David Neto862b7d82018-06-14 18:48:37 -04001657spv::StorageClass
1658SPIRVProducerPass::GetStorageClassForArgKind(clspv::ArgKind arg_kind) const {
1659 switch (arg_kind) {
1660 case clspv::ArgKind::Buffer:
1661 return spv::StorageClassStorageBuffer;
Alan Bakerfcda9482018-10-02 17:09:59 -04001662 case clspv::ArgKind::BufferUBO:
1663 return spv::StorageClassUniform;
David Neto862b7d82018-06-14 18:48:37 -04001664 case clspv::ArgKind::Pod:
1665 return clspv::Option::PodArgsInUniformBuffer()
1666 ? spv::StorageClassUniform
1667 : spv::StorageClassStorageBuffer;
1668 case clspv::ArgKind::Local:
1669 return spv::StorageClassWorkgroup;
1670 case clspv::ArgKind::ReadOnlyImage:
1671 case clspv::ArgKind::WriteOnlyImage:
1672 case clspv::ArgKind::Sampler:
1673 return spv::StorageClassUniformConstant;
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001674 default:
1675 llvm_unreachable("Unsupported storage class for argument kind");
David Neto862b7d82018-06-14 18:48:37 -04001676 }
1677}
1678
David Neto22f144c2017-06-12 14:26:21 -04001679spv::BuiltIn SPIRVProducerPass::GetBuiltin(StringRef Name) const {
1680 return StringSwitch<spv::BuiltIn>(Name)
1681 .Case("__spirv_GlobalInvocationId", spv::BuiltInGlobalInvocationId)
1682 .Case("__spirv_LocalInvocationId", spv::BuiltInLocalInvocationId)
1683 .Case("__spirv_WorkgroupSize", spv::BuiltInWorkgroupSize)
1684 .Case("__spirv_NumWorkgroups", spv::BuiltInNumWorkgroups)
1685 .Case("__spirv_WorkgroupId", spv::BuiltInWorkgroupId)
1686 .Default(spv::BuiltInMax);
1687}
1688
1689void SPIRVProducerPass::GenerateExtInstImport() {
1690 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
1691 uint32_t &ExtInstImportID = getOpExtInstImportID();
1692
1693 //
1694 // Generate OpExtInstImport.
1695 //
1696 // Ops[0] ... Ops[n] = Name (Literal String)
David Neto22f144c2017-06-12 14:26:21 -04001697 ExtInstImportID = nextID;
David Neto87846742018-04-11 17:36:22 -04001698 SPIRVInstList.push_back(new SPIRVInstruction(spv::OpExtInstImport, nextID++,
1699 MkString("GLSL.std.450")));
David Neto22f144c2017-06-12 14:26:21 -04001700}
1701
alan-bakerb6b09dc2018-11-08 16:59:28 -05001702void SPIRVProducerPass::GenerateSPIRVTypes(LLVMContext &Context,
1703 Module &module) {
David Neto22f144c2017-06-12 14:26:21 -04001704 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
1705 ValueMapType &VMap = getValueMap();
1706 ValueMapType &AllocatedVMap = getAllocatedValueMap();
Alan Bakerfcda9482018-10-02 17:09:59 -04001707 const auto &DL = module.getDataLayout();
David Neto22f144c2017-06-12 14:26:21 -04001708
1709 // Map for OpTypeRuntimeArray. If argument has pointer type, 2 spirv type
1710 // instructions are generated. They are OpTypePointer and OpTypeRuntimeArray.
1711 DenseMap<Type *, uint32_t> OpRuntimeTyMap;
1712
1713 for (Type *Ty : getTypeList()) {
1714 // Update TypeMap with nextID for reference later.
1715 TypeMap[Ty] = nextID;
1716
1717 switch (Ty->getTypeID()) {
1718 default: {
1719 Ty->print(errs());
1720 llvm_unreachable("Unsupported type???");
1721 break;
1722 }
1723 case Type::MetadataTyID:
1724 case Type::LabelTyID: {
1725 // Ignore these types.
1726 break;
1727 }
1728 case Type::PointerTyID: {
1729 PointerType *PTy = cast<PointerType>(Ty);
1730 unsigned AddrSpace = PTy->getAddressSpace();
1731
1732 // For the purposes of our Vulkan SPIR-V type system, constant and global
1733 // are conflated.
1734 bool UseExistingOpTypePointer = false;
1735 if (AddressSpace::Constant == AddrSpace) {
Alan Bakerfcda9482018-10-02 17:09:59 -04001736 if (!clspv::Option::ConstantArgsInUniformBuffer()) {
1737 AddrSpace = AddressSpace::Global;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001738 // Check to see if we already created this type (for instance, if we
1739 // had a constant <type>* and a global <type>*, the type would be
1740 // created by one of these types, and shared by both).
Alan Bakerfcda9482018-10-02 17:09:59 -04001741 auto GlobalTy = PTy->getPointerElementType()->getPointerTo(AddrSpace);
1742 if (0 < TypeMap.count(GlobalTy)) {
1743 TypeMap[PTy] = TypeMap[GlobalTy];
1744 UseExistingOpTypePointer = true;
1745 break;
1746 }
David Neto22f144c2017-06-12 14:26:21 -04001747 }
1748 } else if (AddressSpace::Global == AddrSpace) {
Alan Bakerfcda9482018-10-02 17:09:59 -04001749 if (!clspv::Option::ConstantArgsInUniformBuffer()) {
1750 AddrSpace = AddressSpace::Constant;
David Neto22f144c2017-06-12 14:26:21 -04001751
alan-bakerb6b09dc2018-11-08 16:59:28 -05001752 // Check to see if we already created this type (for instance, if we
1753 // had a constant <type>* and a global <type>*, the type would be
1754 // created by one of these types, and shared by both).
1755 auto ConstantTy =
1756 PTy->getPointerElementType()->getPointerTo(AddrSpace);
Alan Bakerfcda9482018-10-02 17:09:59 -04001757 if (0 < TypeMap.count(ConstantTy)) {
1758 TypeMap[PTy] = TypeMap[ConstantTy];
1759 UseExistingOpTypePointer = true;
1760 }
David Neto22f144c2017-06-12 14:26:21 -04001761 }
1762 }
1763
David Neto862b7d82018-06-14 18:48:37 -04001764 const bool HasArgUser = true;
David Neto22f144c2017-06-12 14:26:21 -04001765
David Neto862b7d82018-06-14 18:48:37 -04001766 if (HasArgUser && !UseExistingOpTypePointer) {
David Neto22f144c2017-06-12 14:26:21 -04001767 //
1768 // Generate OpTypePointer.
1769 //
1770
1771 // OpTypePointer
1772 // Ops[0] = Storage Class
1773 // Ops[1] = Element Type ID
1774 SPIRVOperandList Ops;
1775
David Neto257c3892018-04-11 13:19:45 -04001776 Ops << MkNum(GetStorageClass(AddrSpace))
1777 << MkId(lookupType(PTy->getElementType()));
David Neto22f144c2017-06-12 14:26:21 -04001778
David Neto87846742018-04-11 17:36:22 -04001779 auto *Inst = new SPIRVInstruction(spv::OpTypePointer, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04001780 SPIRVInstList.push_back(Inst);
1781 }
David Neto22f144c2017-06-12 14:26:21 -04001782 break;
1783 }
1784 case Type::StructTyID: {
David Neto22f144c2017-06-12 14:26:21 -04001785 StructType *STy = cast<StructType>(Ty);
1786
1787 // Handle sampler type.
1788 if (STy->isOpaque()) {
1789 if (STy->getName().equals("opencl.sampler_t")) {
1790 //
1791 // Generate OpTypeSampler
1792 //
1793 // Empty Ops.
1794 SPIRVOperandList Ops;
1795
David Neto87846742018-04-11 17:36:22 -04001796 auto *Inst = new SPIRVInstruction(spv::OpTypeSampler, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04001797 SPIRVInstList.push_back(Inst);
1798 break;
1799 } else if (STy->getName().equals("opencl.image2d_ro_t") ||
1800 STy->getName().equals("opencl.image2d_wo_t") ||
1801 STy->getName().equals("opencl.image3d_ro_t") ||
1802 STy->getName().equals("opencl.image3d_wo_t")) {
1803 //
1804 // Generate OpTypeImage
1805 //
1806 // Ops[0] = Sampled Type ID
1807 // Ops[1] = Dim ID
1808 // Ops[2] = Depth (Literal Number)
1809 // Ops[3] = Arrayed (Literal Number)
1810 // Ops[4] = MS (Literal Number)
1811 // Ops[5] = Sampled (Literal Number)
1812 // Ops[6] = Image Format ID
1813 //
1814 SPIRVOperandList Ops;
1815
1816 // TODO: Changed Sampled Type according to situations.
1817 uint32_t SampledTyID = lookupType(Type::getFloatTy(Context));
David Neto257c3892018-04-11 13:19:45 -04001818 Ops << MkId(SampledTyID);
David Neto22f144c2017-06-12 14:26:21 -04001819
1820 spv::Dim DimID = spv::Dim2D;
1821 if (STy->getName().equals("opencl.image3d_ro_t") ||
1822 STy->getName().equals("opencl.image3d_wo_t")) {
1823 DimID = spv::Dim3D;
1824 }
David Neto257c3892018-04-11 13:19:45 -04001825 Ops << MkNum(DimID);
David Neto22f144c2017-06-12 14:26:21 -04001826
1827 // TODO: Set up Depth.
David Neto257c3892018-04-11 13:19:45 -04001828 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04001829
1830 // TODO: Set up Arrayed.
David Neto257c3892018-04-11 13:19:45 -04001831 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04001832
1833 // TODO: Set up MS.
David Neto257c3892018-04-11 13:19:45 -04001834 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04001835
1836 // TODO: Set up Sampled.
1837 //
1838 // From Spec
1839 //
1840 // 0 indicates this is only known at run time, not at compile time
1841 // 1 indicates will be used with sampler
1842 // 2 indicates will be used without a sampler (a storage image)
1843 uint32_t Sampled = 1;
1844 if (STy->getName().equals("opencl.image2d_wo_t") ||
1845 STy->getName().equals("opencl.image3d_wo_t")) {
1846 Sampled = 2;
1847 }
David Neto257c3892018-04-11 13:19:45 -04001848 Ops << MkNum(Sampled);
David Neto22f144c2017-06-12 14:26:21 -04001849
1850 // TODO: Set up Image Format.
David Neto257c3892018-04-11 13:19:45 -04001851 Ops << MkNum(spv::ImageFormatUnknown);
David Neto22f144c2017-06-12 14:26:21 -04001852
David Neto87846742018-04-11 17:36:22 -04001853 auto *Inst = new SPIRVInstruction(spv::OpTypeImage, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04001854 SPIRVInstList.push_back(Inst);
1855 break;
1856 }
1857 }
1858
1859 //
1860 // Generate OpTypeStruct
1861 //
1862 // Ops[0] ... Ops[n] = Member IDs
1863 SPIRVOperandList Ops;
1864
1865 for (auto *EleTy : STy->elements()) {
David Neto862b7d82018-06-14 18:48:37 -04001866 Ops << MkId(lookupType(EleTy));
David Neto22f144c2017-06-12 14:26:21 -04001867 }
1868
David Neto22f144c2017-06-12 14:26:21 -04001869 uint32_t STyID = nextID;
1870
alan-bakerb6b09dc2018-11-08 16:59:28 -05001871 auto *Inst = new SPIRVInstruction(spv::OpTypeStruct, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04001872 SPIRVInstList.push_back(Inst);
1873
1874 // Generate OpMemberDecorate.
1875 auto DecoInsertPoint =
1876 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
1877 [](SPIRVInstruction *Inst) -> bool {
1878 return Inst->getOpcode() != spv::OpDecorate &&
1879 Inst->getOpcode() != spv::OpMemberDecorate &&
1880 Inst->getOpcode() != spv::OpExtInstImport;
1881 });
1882
David Netoc463b372017-08-10 15:32:21 -04001883 const auto StructLayout = DL.getStructLayout(STy);
Alan Bakerfcda9482018-10-02 17:09:59 -04001884 // Search for the correct offsets if this type was remapped.
1885 std::vector<uint32_t> *offsets = nullptr;
1886 auto iter = RemappedUBOTypeOffsets.find(STy);
1887 if (iter != RemappedUBOTypeOffsets.end()) {
1888 offsets = &iter->second;
1889 }
David Netoc463b372017-08-10 15:32:21 -04001890
David Neto862b7d82018-06-14 18:48:37 -04001891 // #error TODO(dneto): Only do this if in TypesNeedingLayout.
David Neto22f144c2017-06-12 14:26:21 -04001892 for (unsigned MemberIdx = 0; MemberIdx < STy->getNumElements();
1893 MemberIdx++) {
1894 // Ops[0] = Structure Type ID
1895 // Ops[1] = Member Index(Literal Number)
1896 // Ops[2] = Decoration (Offset)
1897 // Ops[3] = Byte Offset (Literal Number)
1898 Ops.clear();
1899
David Neto257c3892018-04-11 13:19:45 -04001900 Ops << MkId(STyID) << MkNum(MemberIdx) << MkNum(spv::DecorationOffset);
David Neto22f144c2017-06-12 14:26:21 -04001901
alan-bakerb6b09dc2018-11-08 16:59:28 -05001902 auto ByteOffset =
1903 static_cast<uint32_t>(StructLayout->getElementOffset(MemberIdx));
Alan Bakerfcda9482018-10-02 17:09:59 -04001904 if (offsets) {
1905 ByteOffset = (*offsets)[MemberIdx];
1906 }
alan-bakerb6b09dc2018-11-08 16:59:28 -05001907 // const auto ByteOffset =
Alan Bakerfcda9482018-10-02 17:09:59 -04001908 // uint32_t(StructLayout->getElementOffset(MemberIdx));
David Neto257c3892018-04-11 13:19:45 -04001909 Ops << MkNum(ByteOffset);
David Neto22f144c2017-06-12 14:26:21 -04001910
David Neto87846742018-04-11 17:36:22 -04001911 auto *DecoInst = new SPIRVInstruction(spv::OpMemberDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04001912 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
David Neto22f144c2017-06-12 14:26:21 -04001913 }
1914
1915 // Generate OpDecorate.
David Neto862b7d82018-06-14 18:48:37 -04001916 if (StructTypesNeedingBlock.idFor(STy)) {
1917 Ops.clear();
1918 // Use Block decorations with StorageBuffer storage class.
1919 Ops << MkId(STyID) << MkNum(spv::DecorationBlock);
David Neto22f144c2017-06-12 14:26:21 -04001920
David Neto862b7d82018-06-14 18:48:37 -04001921 auto *DecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
1922 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
David Neto22f144c2017-06-12 14:26:21 -04001923 }
1924 break;
1925 }
1926 case Type::IntegerTyID: {
1927 unsigned BitWidth = Ty->getPrimitiveSizeInBits();
1928
1929 if (BitWidth == 1) {
David Neto87846742018-04-11 17:36:22 -04001930 auto *Inst = new SPIRVInstruction(spv::OpTypeBool, nextID++, {});
David Neto22f144c2017-06-12 14:26:21 -04001931 SPIRVInstList.push_back(Inst);
1932 } else {
alan-bakerb39c8262019-03-08 14:03:37 -05001933 if (!clspv::Option::Int8Support()) {
1934 // i8 is added to TypeMap as i32.
1935 // No matter what LLVM type is requested first, always alias the
1936 // second one's SPIR-V type to be the same as the one we generated
1937 // first.
1938 unsigned aliasToWidth = 0;
1939 if (BitWidth == 8) {
1940 aliasToWidth = 32;
1941 BitWidth = 32;
1942 } else if (BitWidth == 32) {
1943 aliasToWidth = 8;
1944 }
1945 if (aliasToWidth) {
1946 Type *otherType = Type::getIntNTy(Ty->getContext(), aliasToWidth);
1947 auto where = TypeMap.find(otherType);
1948 if (where == TypeMap.end()) {
1949 // Go ahead and make it, but also map the other type to it.
1950 TypeMap[otherType] = nextID;
1951 } else {
1952 // Alias this SPIR-V type the existing type.
1953 TypeMap[Ty] = where->second;
1954 break;
1955 }
David Neto391aeb12017-08-26 15:51:58 -04001956 }
David Neto22f144c2017-06-12 14:26:21 -04001957 }
1958
David Neto257c3892018-04-11 13:19:45 -04001959 SPIRVOperandList Ops;
1960 Ops << MkNum(BitWidth) << MkNum(0 /* not signed */);
David Neto22f144c2017-06-12 14:26:21 -04001961
1962 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04001963 new SPIRVInstruction(spv::OpTypeInt, nextID++, Ops));
David Neto22f144c2017-06-12 14:26:21 -04001964 }
1965 break;
1966 }
1967 case Type::HalfTyID:
1968 case Type::FloatTyID:
1969 case Type::DoubleTyID: {
1970 SPIRVOperand *WidthOp = new SPIRVOperand(
1971 SPIRVOperandType::LITERAL_INTEGER, Ty->getPrimitiveSizeInBits());
1972
1973 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04001974 new SPIRVInstruction(spv::OpTypeFloat, nextID++, WidthOp));
David Neto22f144c2017-06-12 14:26:21 -04001975 break;
1976 }
1977 case Type::ArrayTyID: {
David Neto22f144c2017-06-12 14:26:21 -04001978 ArrayType *ArrTy = cast<ArrayType>(Ty);
David Neto862b7d82018-06-14 18:48:37 -04001979 const uint64_t Length = ArrTy->getArrayNumElements();
1980 if (Length == 0) {
1981 // By convention, map it to a RuntimeArray.
David Neto22f144c2017-06-12 14:26:21 -04001982
David Neto862b7d82018-06-14 18:48:37 -04001983 // Only generate the type once.
1984 // TODO(dneto): Can it ever be generated more than once?
1985 // Doesn't LLVM type uniqueness guarantee we'll only see this
1986 // once?
1987 Type *EleTy = ArrTy->getArrayElementType();
1988 if (OpRuntimeTyMap.count(EleTy) == 0) {
1989 uint32_t OpTypeRuntimeArrayID = nextID;
1990 OpRuntimeTyMap[Ty] = nextID;
David Neto22f144c2017-06-12 14:26:21 -04001991
David Neto862b7d82018-06-14 18:48:37 -04001992 //
1993 // Generate OpTypeRuntimeArray.
1994 //
David Neto22f144c2017-06-12 14:26:21 -04001995
David Neto862b7d82018-06-14 18:48:37 -04001996 // OpTypeRuntimeArray
1997 // Ops[0] = Element Type ID
1998 SPIRVOperandList Ops;
1999 Ops << MkId(lookupType(EleTy));
David Neto22f144c2017-06-12 14:26:21 -04002000
David Neto862b7d82018-06-14 18:48:37 -04002001 SPIRVInstList.push_back(
2002 new SPIRVInstruction(spv::OpTypeRuntimeArray, nextID++, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002003
David Neto862b7d82018-06-14 18:48:37 -04002004 if (Hack_generate_runtime_array_stride_early) {
2005 // Generate OpDecorate.
2006 auto DecoInsertPoint = std::find_if(
2007 SPIRVInstList.begin(), SPIRVInstList.end(),
2008 [](SPIRVInstruction *Inst) -> bool {
2009 return Inst->getOpcode() != spv::OpDecorate &&
2010 Inst->getOpcode() != spv::OpMemberDecorate &&
2011 Inst->getOpcode() != spv::OpExtInstImport;
2012 });
David Neto22f144c2017-06-12 14:26:21 -04002013
David Neto862b7d82018-06-14 18:48:37 -04002014 // Ops[0] = Target ID
2015 // Ops[1] = Decoration (ArrayStride)
2016 // Ops[2] = Stride Number(Literal Number)
2017 Ops.clear();
David Neto85082642018-03-24 06:55:20 -07002018
David Neto862b7d82018-06-14 18:48:37 -04002019 Ops << MkId(OpTypeRuntimeArrayID)
2020 << MkNum(spv::DecorationArrayStride)
Alan Bakerfcda9482018-10-02 17:09:59 -04002021 << MkNum(static_cast<uint32_t>(GetTypeAllocSize(EleTy, DL)));
David Neto22f144c2017-06-12 14:26:21 -04002022
David Neto862b7d82018-06-14 18:48:37 -04002023 auto *DecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
2024 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
2025 }
2026 }
David Neto22f144c2017-06-12 14:26:21 -04002027
David Neto862b7d82018-06-14 18:48:37 -04002028 } else {
David Neto22f144c2017-06-12 14:26:21 -04002029
David Neto862b7d82018-06-14 18:48:37 -04002030 //
2031 // Generate OpConstant and OpTypeArray.
2032 //
2033
2034 //
2035 // Generate OpConstant for array length.
2036 //
2037 // Ops[0] = Result Type ID
2038 // Ops[1] .. Ops[n] = Values LiteralNumber
2039 SPIRVOperandList Ops;
2040
2041 Type *LengthTy = Type::getInt32Ty(Context);
2042 uint32_t ResTyID = lookupType(LengthTy);
2043 Ops << MkId(ResTyID);
2044
2045 assert(Length < UINT32_MAX);
2046 Ops << MkNum(static_cast<uint32_t>(Length));
2047
2048 // Add constant for length to constant list.
2049 Constant *CstLength = ConstantInt::get(LengthTy, Length);
2050 AllocatedVMap[CstLength] = nextID;
2051 VMap[CstLength] = nextID;
2052 uint32_t LengthID = nextID;
2053
2054 auto *CstInst = new SPIRVInstruction(spv::OpConstant, nextID++, Ops);
2055 SPIRVInstList.push_back(CstInst);
2056
2057 // Remember to generate ArrayStride later
2058 getTypesNeedingArrayStride().insert(Ty);
2059
2060 //
2061 // Generate OpTypeArray.
2062 //
2063 // Ops[0] = Element Type ID
2064 // Ops[1] = Array Length Constant ID
2065 Ops.clear();
2066
2067 uint32_t EleTyID = lookupType(ArrTy->getElementType());
2068 Ops << MkId(EleTyID) << MkId(LengthID);
2069
2070 // Update TypeMap with nextID.
2071 TypeMap[Ty] = nextID;
2072
2073 auto *ArrayInst = new SPIRVInstruction(spv::OpTypeArray, nextID++, Ops);
2074 SPIRVInstList.push_back(ArrayInst);
2075 }
David Neto22f144c2017-06-12 14:26:21 -04002076 break;
2077 }
2078 case Type::VectorTyID: {
alan-bakerb39c8262019-03-08 14:03:37 -05002079 // <4 x i8> is changed to i32 if i8 is not generally supported.
2080 if (!clspv::Option::Int8Support() &&
2081 Ty->getVectorElementType() == Type::getInt8Ty(Context)) {
David Neto22f144c2017-06-12 14:26:21 -04002082 if (Ty->getVectorNumElements() == 4) {
2083 TypeMap[Ty] = lookupType(Ty->getVectorElementType());
2084 break;
2085 } else {
2086 Ty->print(errs());
2087 llvm_unreachable("Support above i8 vector type");
2088 }
2089 }
2090
2091 // Ops[0] = Component Type ID
2092 // Ops[1] = Component Count (Literal Number)
David Neto257c3892018-04-11 13:19:45 -04002093 SPIRVOperandList Ops;
2094 Ops << MkId(lookupType(Ty->getVectorElementType()))
2095 << MkNum(Ty->getVectorNumElements());
David Neto22f144c2017-06-12 14:26:21 -04002096
alan-bakerb6b09dc2018-11-08 16:59:28 -05002097 SPIRVInstruction *inst =
2098 new SPIRVInstruction(spv::OpTypeVector, nextID++, Ops);
David Netoc6f3ab22018-04-06 18:02:31 -04002099 SPIRVInstList.push_back(inst);
David Neto22f144c2017-06-12 14:26:21 -04002100 break;
2101 }
2102 case Type::VoidTyID: {
David Neto87846742018-04-11 17:36:22 -04002103 auto *Inst = new SPIRVInstruction(spv::OpTypeVoid, nextID++, {});
David Neto22f144c2017-06-12 14:26:21 -04002104 SPIRVInstList.push_back(Inst);
2105 break;
2106 }
2107 case Type::FunctionTyID: {
2108 // Generate SPIRV instruction for function type.
2109 FunctionType *FTy = cast<FunctionType>(Ty);
2110
2111 // Ops[0] = Return Type ID
2112 // Ops[1] ... Ops[n] = Parameter Type IDs
2113 SPIRVOperandList Ops;
2114
2115 // Find SPIRV instruction for return type
David Netoc6f3ab22018-04-06 18:02:31 -04002116 Ops << MkId(lookupType(FTy->getReturnType()));
David Neto22f144c2017-06-12 14:26:21 -04002117
2118 // Find SPIRV instructions for parameter types
2119 for (unsigned k = 0; k < FTy->getNumParams(); k++) {
2120 // Find SPIRV instruction for parameter type.
2121 auto ParamTy = FTy->getParamType(k);
2122 if (ParamTy->isPointerTy()) {
2123 auto PointeeTy = ParamTy->getPointerElementType();
2124 if (PointeeTy->isStructTy() &&
2125 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
2126 ParamTy = PointeeTy;
2127 }
2128 }
2129
David Netoc6f3ab22018-04-06 18:02:31 -04002130 Ops << MkId(lookupType(ParamTy));
David Neto22f144c2017-06-12 14:26:21 -04002131 }
2132
David Neto87846742018-04-11 17:36:22 -04002133 auto *Inst = new SPIRVInstruction(spv::OpTypeFunction, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002134 SPIRVInstList.push_back(Inst);
2135 break;
2136 }
2137 }
2138 }
2139
2140 // Generate OpTypeSampledImage.
2141 TypeMapType &OpImageTypeMap = getImageTypeMap();
2142 for (auto &ImageType : OpImageTypeMap) {
2143 //
2144 // Generate OpTypeSampledImage.
2145 //
2146 // Ops[0] = Image Type ID
2147 //
2148 SPIRVOperandList Ops;
2149
2150 Type *ImgTy = ImageType.first;
David Netoc6f3ab22018-04-06 18:02:31 -04002151 Ops << MkId(TypeMap[ImgTy]);
David Neto22f144c2017-06-12 14:26:21 -04002152
2153 // Update OpImageTypeMap.
2154 ImageType.second = nextID;
2155
David Neto87846742018-04-11 17:36:22 -04002156 auto *Inst = new SPIRVInstruction(spv::OpTypeSampledImage, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002157 SPIRVInstList.push_back(Inst);
2158 }
David Netoc6f3ab22018-04-06 18:02:31 -04002159
2160 // Generate types for pointer-to-local arguments.
Alan Baker202c8c72018-08-13 13:47:44 -04002161 for (auto spec_id = clspv::FirstLocalSpecId(); spec_id < max_local_spec_id_;
2162 ++spec_id) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05002163 LocalArgInfo &arg_info = LocalSpecIdInfoMap[spec_id];
David Netoc6f3ab22018-04-06 18:02:31 -04002164
2165 // Generate the spec constant.
2166 SPIRVOperandList Ops;
2167 Ops << MkId(lookupType(Type::getInt32Ty(Context))) << MkNum(1);
David Neto87846742018-04-11 17:36:22 -04002168 SPIRVInstList.push_back(
2169 new SPIRVInstruction(spv::OpSpecConstant, arg_info.array_size_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002170
2171 // Generate the array type.
2172 Ops.clear();
2173 // The element type must have been created.
2174 uint32_t elem_ty_id = lookupType(arg_info.elem_type);
2175 assert(elem_ty_id);
2176 Ops << MkId(elem_ty_id) << MkId(arg_info.array_size_id);
2177
2178 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002179 new SPIRVInstruction(spv::OpTypeArray, arg_info.array_type_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002180
2181 Ops.clear();
2182 Ops << MkNum(spv::StorageClassWorkgroup) << MkId(arg_info.array_type_id);
David Neto87846742018-04-11 17:36:22 -04002183 SPIRVInstList.push_back(new SPIRVInstruction(
2184 spv::OpTypePointer, arg_info.ptr_array_type_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002185 }
David Neto22f144c2017-06-12 14:26:21 -04002186}
2187
2188void SPIRVProducerPass::GenerateSPIRVConstants() {
2189 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
2190 ValueMapType &VMap = getValueMap();
2191 ValueMapType &AllocatedVMap = getAllocatedValueMap();
2192 ValueList &CstList = getConstantList();
David Neto482550a2018-03-24 05:21:07 -07002193 const bool hack_undef = clspv::Option::HackUndef();
David Neto22f144c2017-06-12 14:26:21 -04002194
2195 for (uint32_t i = 0; i < CstList.size(); i++) {
David Netofb9a7972017-08-25 17:08:24 -04002196 // UniqueVector ids are 1-based.
alan-bakerb6b09dc2018-11-08 16:59:28 -05002197 Constant *Cst = cast<Constant>(CstList[i + 1]);
David Neto22f144c2017-06-12 14:26:21 -04002198
2199 // OpTypeArray's constant was already generated.
David Netofb9a7972017-08-25 17:08:24 -04002200 if (AllocatedVMap.find_as(Cst) != AllocatedVMap.end()) {
David Neto22f144c2017-06-12 14:26:21 -04002201 continue;
2202 }
2203
David Netofb9a7972017-08-25 17:08:24 -04002204 // Set ValueMap with nextID for reference later.
David Neto22f144c2017-06-12 14:26:21 -04002205 VMap[Cst] = nextID;
2206
2207 //
2208 // Generate OpConstant.
2209 //
2210
2211 // Ops[0] = Result Type ID
2212 // Ops[1] .. Ops[n] = Values LiteralNumber
2213 SPIRVOperandList Ops;
2214
David Neto257c3892018-04-11 13:19:45 -04002215 Ops << MkId(lookupType(Cst->getType()));
David Neto22f144c2017-06-12 14:26:21 -04002216
2217 std::vector<uint32_t> LiteralNum;
David Neto22f144c2017-06-12 14:26:21 -04002218 spv::Op Opcode = spv::OpNop;
2219
2220 if (isa<UndefValue>(Cst)) {
2221 // Ops[0] = Result Type ID
David Netoc66b3352017-10-20 14:28:46 -04002222 Opcode = spv::OpUndef;
Alan Baker9bf93fb2018-08-28 16:59:26 -04002223 if (hack_undef && IsTypeNullable(Cst->getType())) {
2224 Opcode = spv::OpConstantNull;
David Netoc66b3352017-10-20 14:28:46 -04002225 }
David Neto22f144c2017-06-12 14:26:21 -04002226 } else if (const ConstantInt *CI = dyn_cast<ConstantInt>(Cst)) {
2227 unsigned BitWidth = CI->getBitWidth();
2228 if (BitWidth == 1) {
2229 // If the bitwidth of constant is 1, generate OpConstantTrue or
2230 // OpConstantFalse.
2231 if (CI->getZExtValue()) {
2232 // Ops[0] = Result Type ID
2233 Opcode = spv::OpConstantTrue;
2234 } else {
2235 // Ops[0] = Result Type ID
2236 Opcode = spv::OpConstantFalse;
2237 }
David Neto22f144c2017-06-12 14:26:21 -04002238 } else {
2239 auto V = CI->getZExtValue();
2240 LiteralNum.push_back(V & 0xFFFFFFFF);
2241
2242 if (BitWidth > 32) {
2243 LiteralNum.push_back(V >> 32);
2244 }
2245
2246 Opcode = spv::OpConstant;
David Neto22f144c2017-06-12 14:26:21 -04002247
David Neto257c3892018-04-11 13:19:45 -04002248 Ops << MkInteger(LiteralNum);
2249
2250 if (BitWidth == 32 && V == 0) {
2251 constant_i32_zero_id_ = nextID;
2252 }
David Neto22f144c2017-06-12 14:26:21 -04002253 }
2254 } else if (const ConstantFP *CFP = dyn_cast<ConstantFP>(Cst)) {
2255 uint64_t FPVal = CFP->getValueAPF().bitcastToAPInt().getZExtValue();
2256 Type *CFPTy = CFP->getType();
2257 if (CFPTy->isFloatTy()) {
2258 LiteralNum.push_back(FPVal & 0xFFFFFFFF);
Kévin Petit02ee34e2019-04-04 19:03:22 +01002259 } else if (CFPTy->isDoubleTy()) {
2260 LiteralNum.push_back(FPVal & 0xFFFFFFFF);
2261 LiteralNum.push_back(FPVal >> 32);
David Neto22f144c2017-06-12 14:26:21 -04002262 } else {
2263 CFPTy->print(errs());
2264 llvm_unreachable("Implement this ConstantFP Type");
2265 }
2266
2267 Opcode = spv::OpConstant;
David Neto22f144c2017-06-12 14:26:21 -04002268
David Neto257c3892018-04-11 13:19:45 -04002269 Ops << MkFloat(LiteralNum);
David Neto22f144c2017-06-12 14:26:21 -04002270 } else if (isa<ConstantDataSequential>(Cst) &&
2271 cast<ConstantDataSequential>(Cst)->isString()) {
2272 Cst->print(errs());
2273 llvm_unreachable("Implement this Constant");
2274
2275 } else if (const ConstantDataSequential *CDS =
2276 dyn_cast<ConstantDataSequential>(Cst)) {
David Neto49351ac2017-08-26 17:32:20 -04002277 // Let's convert <4 x i8> constant to int constant specially.
2278 // This case occurs when all the values are specified as constant
2279 // ints.
2280 Type *CstTy = Cst->getType();
2281 if (is4xi8vec(CstTy)) {
2282 LLVMContext &Context = CstTy->getContext();
2283
2284 //
2285 // Generate OpConstant with OpTypeInt 32 0.
2286 //
Neil Henning39672102017-09-29 14:33:13 +01002287 uint32_t IntValue = 0;
2288 for (unsigned k = 0; k < 4; k++) {
2289 const uint64_t Val = CDS->getElementAsInteger(k);
David Neto49351ac2017-08-26 17:32:20 -04002290 IntValue = (IntValue << 8) | (Val & 0xffu);
2291 }
2292
2293 Type *i32 = Type::getInt32Ty(Context);
2294 Constant *CstInt = ConstantInt::get(i32, IntValue);
2295 // If this constant is already registered on VMap, use it.
2296 if (VMap.count(CstInt)) {
2297 uint32_t CstID = VMap[CstInt];
2298 VMap[Cst] = CstID;
2299 continue;
2300 }
2301
David Neto257c3892018-04-11 13:19:45 -04002302 Ops << MkNum(IntValue);
David Neto49351ac2017-08-26 17:32:20 -04002303
David Neto87846742018-04-11 17:36:22 -04002304 auto *CstInst = new SPIRVInstruction(spv::OpConstant, nextID++, Ops);
David Neto49351ac2017-08-26 17:32:20 -04002305 SPIRVInstList.push_back(CstInst);
2306
2307 continue;
2308 }
2309
2310 // A normal constant-data-sequential case.
David Neto22f144c2017-06-12 14:26:21 -04002311 for (unsigned k = 0; k < CDS->getNumElements(); k++) {
2312 Constant *EleCst = CDS->getElementAsConstant(k);
2313 uint32_t EleCstID = VMap[EleCst];
David Neto257c3892018-04-11 13:19:45 -04002314 Ops << MkId(EleCstID);
David Neto22f144c2017-06-12 14:26:21 -04002315 }
2316
2317 Opcode = spv::OpConstantComposite;
David Neto22f144c2017-06-12 14:26:21 -04002318 } else if (const ConstantAggregate *CA = dyn_cast<ConstantAggregate>(Cst)) {
2319 // Let's convert <4 x i8> constant to int constant specially.
David Neto49351ac2017-08-26 17:32:20 -04002320 // This case occurs when at least one of the values is an undef.
David Neto22f144c2017-06-12 14:26:21 -04002321 Type *CstTy = Cst->getType();
2322 if (is4xi8vec(CstTy)) {
2323 LLVMContext &Context = CstTy->getContext();
2324
2325 //
2326 // Generate OpConstant with OpTypeInt 32 0.
2327 //
Neil Henning39672102017-09-29 14:33:13 +01002328 uint32_t IntValue = 0;
David Neto22f144c2017-06-12 14:26:21 -04002329 for (User::const_op_iterator I = Cst->op_begin(), E = Cst->op_end();
2330 I != E; ++I) {
2331 uint64_t Val = 0;
alan-bakerb6b09dc2018-11-08 16:59:28 -05002332 const Value *CV = *I;
Neil Henning39672102017-09-29 14:33:13 +01002333 if (auto *CI2 = dyn_cast<ConstantInt>(CV)) {
2334 Val = CI2->getZExtValue();
David Neto22f144c2017-06-12 14:26:21 -04002335 }
David Neto49351ac2017-08-26 17:32:20 -04002336 IntValue = (IntValue << 8) | (Val & 0xffu);
David Neto22f144c2017-06-12 14:26:21 -04002337 }
2338
David Neto49351ac2017-08-26 17:32:20 -04002339 Type *i32 = Type::getInt32Ty(Context);
2340 Constant *CstInt = ConstantInt::get(i32, IntValue);
David Neto22f144c2017-06-12 14:26:21 -04002341 // If this constant is already registered on VMap, use it.
2342 if (VMap.count(CstInt)) {
2343 uint32_t CstID = VMap[CstInt];
2344 VMap[Cst] = CstID;
David Neto19a1bad2017-08-25 15:01:41 -04002345 continue;
David Neto22f144c2017-06-12 14:26:21 -04002346 }
2347
David Neto257c3892018-04-11 13:19:45 -04002348 Ops << MkNum(IntValue);
David Neto22f144c2017-06-12 14:26:21 -04002349
David Neto87846742018-04-11 17:36:22 -04002350 auto *CstInst = new SPIRVInstruction(spv::OpConstant, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002351 SPIRVInstList.push_back(CstInst);
2352
David Neto19a1bad2017-08-25 15:01:41 -04002353 continue;
David Neto22f144c2017-06-12 14:26:21 -04002354 }
2355
2356 // We use a constant composite in SPIR-V for our constant aggregate in
2357 // LLVM.
2358 Opcode = spv::OpConstantComposite;
David Neto22f144c2017-06-12 14:26:21 -04002359
2360 for (unsigned k = 0; k < CA->getNumOperands(); k++) {
2361 // Look up the ID of the element of this aggregate (which we will
2362 // previously have created a constant for).
2363 uint32_t ElementConstantID = VMap[CA->getAggregateElement(k)];
2364
2365 // And add an operand to the composite we are constructing
David Neto257c3892018-04-11 13:19:45 -04002366 Ops << MkId(ElementConstantID);
David Neto22f144c2017-06-12 14:26:21 -04002367 }
2368 } else if (Cst->isNullValue()) {
2369 Opcode = spv::OpConstantNull;
David Neto22f144c2017-06-12 14:26:21 -04002370 } else {
2371 Cst->print(errs());
2372 llvm_unreachable("Unsupported Constant???");
2373 }
2374
alan-baker5b86ed72019-02-15 08:26:50 -05002375 if (Opcode == spv::OpConstantNull && Cst->getType()->isPointerTy()) {
2376 // Null pointer requires variable pointers.
2377 setVariablePointersCapabilities(Cst->getType()->getPointerAddressSpace());
2378 }
2379
David Neto87846742018-04-11 17:36:22 -04002380 auto *CstInst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002381 SPIRVInstList.push_back(CstInst);
2382 }
2383}
2384
2385void SPIRVProducerPass::GenerateSamplers(Module &M) {
2386 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
David Neto22f144c2017-06-12 14:26:21 -04002387
alan-bakerb6b09dc2018-11-08 16:59:28 -05002388 auto &sampler_map = getSamplerMap();
David Neto862b7d82018-06-14 18:48:37 -04002389 SamplerMapIndexToIDMap.clear();
David Neto22f144c2017-06-12 14:26:21 -04002390 DenseMap<unsigned, unsigned> SamplerLiteralToIDMap;
David Neto862b7d82018-06-14 18:48:37 -04002391 DenseMap<unsigned, unsigned> SamplerLiteralToDescriptorSetMap;
2392 DenseMap<unsigned, unsigned> SamplerLiteralToBindingMap;
David Neto22f144c2017-06-12 14:26:21 -04002393
David Neto862b7d82018-06-14 18:48:37 -04002394 // We might have samplers in the sampler map that are not used
2395 // in the translation unit. We need to allocate variables
2396 // for them and bindings too.
2397 DenseSet<unsigned> used_bindings;
David Neto22f144c2017-06-12 14:26:21 -04002398
Kévin Petitdf71de32019-04-09 14:09:50 +01002399 auto *var_fn = M.getFunction(clspv::LiteralSamplerFunction());
alan-bakerb6b09dc2018-11-08 16:59:28 -05002400 if (!var_fn)
2401 return;
David Neto862b7d82018-06-14 18:48:37 -04002402 for (auto user : var_fn->users()) {
2403 // Populate SamplerLiteralToDescriptorSetMap and
2404 // SamplerLiteralToBindingMap.
2405 //
2406 // Look for calls like
2407 // call %opencl.sampler_t addrspace(2)*
2408 // @clspv.sampler.var.literal(
2409 // i32 descriptor,
2410 // i32 binding,
2411 // i32 index-into-sampler-map)
alan-bakerb6b09dc2018-11-08 16:59:28 -05002412 if (auto *call = dyn_cast<CallInst>(user)) {
2413 const size_t index_into_sampler_map = static_cast<size_t>(
2414 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
David Neto862b7d82018-06-14 18:48:37 -04002415 if (index_into_sampler_map >= sampler_map.size()) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05002416 errs() << "Out of bounds index to sampler map: "
2417 << index_into_sampler_map;
David Neto862b7d82018-06-14 18:48:37 -04002418 llvm_unreachable("bad sampler init: out of bounds");
2419 }
2420
2421 auto sampler_value = sampler_map[index_into_sampler_map].first;
2422 const auto descriptor_set = static_cast<unsigned>(
2423 dyn_cast<ConstantInt>(call->getArgOperand(0))->getZExtValue());
2424 const auto binding = static_cast<unsigned>(
2425 dyn_cast<ConstantInt>(call->getArgOperand(1))->getZExtValue());
2426
2427 SamplerLiteralToDescriptorSetMap[sampler_value] = descriptor_set;
2428 SamplerLiteralToBindingMap[sampler_value] = binding;
2429 used_bindings.insert(binding);
2430 }
2431 }
2432
2433 unsigned index = 0;
2434 for (auto SamplerLiteral : sampler_map) {
David Neto22f144c2017-06-12 14:26:21 -04002435 // Generate OpVariable.
2436 //
2437 // GIDOps[0] : Result Type ID
2438 // GIDOps[1] : Storage Class
2439 SPIRVOperandList Ops;
2440
David Neto257c3892018-04-11 13:19:45 -04002441 Ops << MkId(lookupType(SamplerTy))
2442 << MkNum(spv::StorageClassUniformConstant);
David Neto22f144c2017-06-12 14:26:21 -04002443
David Neto862b7d82018-06-14 18:48:37 -04002444 auto sampler_var_id = nextID++;
2445 auto *Inst = new SPIRVInstruction(spv::OpVariable, sampler_var_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002446 SPIRVInstList.push_back(Inst);
2447
David Neto862b7d82018-06-14 18:48:37 -04002448 SamplerMapIndexToIDMap[index] = sampler_var_id;
2449 SamplerLiteralToIDMap[SamplerLiteral.first] = sampler_var_id;
David Neto22f144c2017-06-12 14:26:21 -04002450
2451 // Find Insert Point for OpDecorate.
2452 auto DecoInsertPoint =
2453 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
2454 [](SPIRVInstruction *Inst) -> bool {
2455 return Inst->getOpcode() != spv::OpDecorate &&
2456 Inst->getOpcode() != spv::OpMemberDecorate &&
2457 Inst->getOpcode() != spv::OpExtInstImport;
2458 });
2459
2460 // Ops[0] = Target ID
2461 // Ops[1] = Decoration (DescriptorSet)
2462 // Ops[2] = LiteralNumber according to Decoration
2463 Ops.clear();
2464
David Neto862b7d82018-06-14 18:48:37 -04002465 unsigned descriptor_set;
2466 unsigned binding;
alan-bakerb6b09dc2018-11-08 16:59:28 -05002467 if (SamplerLiteralToBindingMap.find(SamplerLiteral.first) ==
2468 SamplerLiteralToBindingMap.end()) {
David Neto862b7d82018-06-14 18:48:37 -04002469 // This sampler is not actually used. Find the next one.
2470 for (binding = 0; used_bindings.count(binding); binding++)
2471 ;
2472 descriptor_set = 0; // Literal samplers always use descriptor set 0.
2473 used_bindings.insert(binding);
2474 } else {
2475 descriptor_set = SamplerLiteralToDescriptorSetMap[SamplerLiteral.first];
2476 binding = SamplerLiteralToBindingMap[SamplerLiteral.first];
alan-bakercff80152019-06-15 00:38:00 -04002477
2478 version0::DescriptorMapEntry::SamplerData sampler_data = {
2479 SamplerLiteral.first};
2480 descriptorMapEntries->emplace_back(std::move(sampler_data),
2481 descriptor_set, binding);
David Neto862b7d82018-06-14 18:48:37 -04002482 }
2483
2484 Ops << MkId(sampler_var_id) << MkNum(spv::DecorationDescriptorSet)
2485 << MkNum(descriptor_set);
David Neto22f144c2017-06-12 14:26:21 -04002486
David Neto87846742018-04-11 17:36:22 -04002487 auto *DescDecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002488 SPIRVInstList.insert(DecoInsertPoint, DescDecoInst);
2489
2490 // Ops[0] = Target ID
2491 // Ops[1] = Decoration (Binding)
2492 // Ops[2] = LiteralNumber according to Decoration
2493 Ops.clear();
David Neto862b7d82018-06-14 18:48:37 -04002494 Ops << MkId(sampler_var_id) << MkNum(spv::DecorationBinding)
2495 << MkNum(binding);
David Neto22f144c2017-06-12 14:26:21 -04002496
David Neto87846742018-04-11 17:36:22 -04002497 auto *BindDecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002498 SPIRVInstList.insert(DecoInsertPoint, BindDecoInst);
David Neto862b7d82018-06-14 18:48:37 -04002499
2500 index++;
David Neto22f144c2017-06-12 14:26:21 -04002501 }
David Neto862b7d82018-06-14 18:48:37 -04002502}
David Neto22f144c2017-06-12 14:26:21 -04002503
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01002504void SPIRVProducerPass::GenerateResourceVars(Module &) {
David Neto862b7d82018-06-14 18:48:37 -04002505 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
2506 ValueMapType &VMap = getValueMap();
David Neto22f144c2017-06-12 14:26:21 -04002507
David Neto862b7d82018-06-14 18:48:37 -04002508 // Generate variables. Make one for each of resource var info object.
2509 for (auto *info : ModuleOrderedResourceVars) {
2510 Type *type = info->var_fn->getReturnType();
2511 // Remap the address space for opaque types.
2512 switch (info->arg_kind) {
2513 case clspv::ArgKind::Sampler:
2514 case clspv::ArgKind::ReadOnlyImage:
2515 case clspv::ArgKind::WriteOnlyImage:
2516 type = PointerType::get(type->getPointerElementType(),
2517 clspv::AddressSpace::UniformConstant);
2518 break;
2519 default:
2520 break;
2521 }
David Neto22f144c2017-06-12 14:26:21 -04002522
David Neto862b7d82018-06-14 18:48:37 -04002523 info->var_id = nextID++;
David Neto22f144c2017-06-12 14:26:21 -04002524
David Neto862b7d82018-06-14 18:48:37 -04002525 const auto type_id = lookupType(type);
2526 const auto sc = GetStorageClassForArgKind(info->arg_kind);
2527 SPIRVOperandList Ops;
2528 Ops << MkId(type_id) << MkNum(sc);
David Neto22f144c2017-06-12 14:26:21 -04002529
David Neto862b7d82018-06-14 18:48:37 -04002530 auto *Inst = new SPIRVInstruction(spv::OpVariable, info->var_id, Ops);
2531 SPIRVInstList.push_back(Inst);
2532
2533 // Map calls to the variable-builtin-function.
2534 for (auto &U : info->var_fn->uses()) {
2535 if (auto *call = dyn_cast<CallInst>(U.getUser())) {
2536 const auto set = unsigned(
2537 dyn_cast<ConstantInt>(call->getOperand(0))->getZExtValue());
2538 const auto binding = unsigned(
2539 dyn_cast<ConstantInt>(call->getOperand(1))->getZExtValue());
2540 if (set == info->descriptor_set && binding == info->binding) {
2541 switch (info->arg_kind) {
2542 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04002543 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04002544 case clspv::ArgKind::Pod:
2545 // The call maps to the variable directly.
2546 VMap[call] = info->var_id;
2547 break;
2548 case clspv::ArgKind::Sampler:
2549 case clspv::ArgKind::ReadOnlyImage:
2550 case clspv::ArgKind::WriteOnlyImage:
2551 // The call maps to a load we generate later.
2552 ResourceVarDeferredLoadCalls[call] = info->var_id;
2553 break;
2554 default:
2555 llvm_unreachable("Unhandled arg kind");
2556 }
2557 }
David Neto22f144c2017-06-12 14:26:21 -04002558 }
David Neto862b7d82018-06-14 18:48:37 -04002559 }
2560 }
David Neto22f144c2017-06-12 14:26:21 -04002561
David Neto862b7d82018-06-14 18:48:37 -04002562 // Generate associated decorations.
David Neto22f144c2017-06-12 14:26:21 -04002563
David Neto862b7d82018-06-14 18:48:37 -04002564 // Find Insert Point for OpDecorate.
2565 auto DecoInsertPoint =
2566 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
2567 [](SPIRVInstruction *Inst) -> bool {
2568 return Inst->getOpcode() != spv::OpDecorate &&
2569 Inst->getOpcode() != spv::OpMemberDecorate &&
2570 Inst->getOpcode() != spv::OpExtInstImport;
2571 });
2572
2573 SPIRVOperandList Ops;
2574 for (auto *info : ModuleOrderedResourceVars) {
2575 // Decorate with DescriptorSet and Binding.
2576 Ops.clear();
2577 Ops << MkId(info->var_id) << MkNum(spv::DecorationDescriptorSet)
2578 << MkNum(info->descriptor_set);
2579 SPIRVInstList.insert(DecoInsertPoint,
2580 new SPIRVInstruction(spv::OpDecorate, Ops));
2581
2582 Ops.clear();
2583 Ops << MkId(info->var_id) << MkNum(spv::DecorationBinding)
2584 << MkNum(info->binding);
2585 SPIRVInstList.insert(DecoInsertPoint,
2586 new SPIRVInstruction(spv::OpDecorate, Ops));
2587
alan-bakere9308012019-03-15 10:25:13 -04002588 if (info->coherent) {
2589 // Decorate with Coherent if required for the variable.
2590 Ops.clear();
2591 Ops << MkId(info->var_id) << MkNum(spv::DecorationCoherent);
2592 SPIRVInstList.insert(DecoInsertPoint,
2593 new SPIRVInstruction(spv::OpDecorate, Ops));
2594 }
2595
David Neto862b7d82018-06-14 18:48:37 -04002596 // Generate NonWritable and NonReadable
2597 switch (info->arg_kind) {
2598 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04002599 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04002600 if (info->var_fn->getReturnType()->getPointerAddressSpace() ==
2601 clspv::AddressSpace::Constant) {
2602 Ops.clear();
2603 Ops << MkId(info->var_id) << MkNum(spv::DecorationNonWritable);
2604 SPIRVInstList.insert(DecoInsertPoint,
2605 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002606 }
David Neto862b7d82018-06-14 18:48:37 -04002607 break;
David Neto862b7d82018-06-14 18:48:37 -04002608 case clspv::ArgKind::WriteOnlyImage:
2609 Ops.clear();
2610 Ops << MkId(info->var_id) << MkNum(spv::DecorationNonReadable);
2611 SPIRVInstList.insert(DecoInsertPoint,
2612 new SPIRVInstruction(spv::OpDecorate, Ops));
2613 break;
2614 default:
2615 break;
David Neto22f144c2017-06-12 14:26:21 -04002616 }
2617 }
2618}
2619
2620void SPIRVProducerPass::GenerateGlobalVar(GlobalVariable &GV) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05002621 Module &M = *GV.getParent();
David Neto22f144c2017-06-12 14:26:21 -04002622 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
2623 ValueMapType &VMap = getValueMap();
2624 std::vector<uint32_t> &BuiltinDimVec = getBuiltinDimVec();
David Neto85082642018-03-24 06:55:20 -07002625 const DataLayout &DL = GV.getParent()->getDataLayout();
David Neto22f144c2017-06-12 14:26:21 -04002626
2627 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
2628 Type *Ty = GV.getType();
2629 PointerType *PTy = cast<PointerType>(Ty);
2630
2631 uint32_t InitializerID = 0;
2632
2633 // Workgroup size is handled differently (it goes into a constant)
2634 if (spv::BuiltInWorkgroupSize == BuiltinType) {
2635 std::vector<bool> HasMDVec;
2636 uint32_t PrevXDimCst = 0xFFFFFFFF;
2637 uint32_t PrevYDimCst = 0xFFFFFFFF;
2638 uint32_t PrevZDimCst = 0xFFFFFFFF;
2639 for (Function &Func : *GV.getParent()) {
2640 if (Func.isDeclaration()) {
2641 continue;
2642 }
2643
2644 // We only need to check kernels.
2645 if (Func.getCallingConv() != CallingConv::SPIR_KERNEL) {
2646 continue;
2647 }
2648
2649 if (const MDNode *MD =
2650 dyn_cast<Function>(&Func)->getMetadata("reqd_work_group_size")) {
2651 uint32_t CurXDimCst = static_cast<uint32_t>(
2652 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
2653 uint32_t CurYDimCst = static_cast<uint32_t>(
2654 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
2655 uint32_t CurZDimCst = static_cast<uint32_t>(
2656 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
2657
2658 if (PrevXDimCst == 0xFFFFFFFF && PrevYDimCst == 0xFFFFFFFF &&
2659 PrevZDimCst == 0xFFFFFFFF) {
2660 PrevXDimCst = CurXDimCst;
2661 PrevYDimCst = CurYDimCst;
2662 PrevZDimCst = CurZDimCst;
2663 } else if (CurXDimCst != PrevXDimCst || CurYDimCst != PrevYDimCst ||
2664 CurZDimCst != PrevZDimCst) {
2665 llvm_unreachable(
2666 "reqd_work_group_size must be the same across all kernels");
2667 } else {
2668 continue;
2669 }
2670
2671 //
2672 // Generate OpConstantComposite.
2673 //
2674 // Ops[0] : Result Type ID
2675 // Ops[1] : Constant size for x dimension.
2676 // Ops[2] : Constant size for y dimension.
2677 // Ops[3] : Constant size for z dimension.
2678 SPIRVOperandList Ops;
2679
2680 uint32_t XDimCstID =
2681 VMap[mdconst::extract<ConstantInt>(MD->getOperand(0))];
2682 uint32_t YDimCstID =
2683 VMap[mdconst::extract<ConstantInt>(MD->getOperand(1))];
2684 uint32_t ZDimCstID =
2685 VMap[mdconst::extract<ConstantInt>(MD->getOperand(2))];
2686
2687 InitializerID = nextID;
2688
David Neto257c3892018-04-11 13:19:45 -04002689 Ops << MkId(lookupType(Ty->getPointerElementType())) << MkId(XDimCstID)
2690 << MkId(YDimCstID) << MkId(ZDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04002691
David Neto87846742018-04-11 17:36:22 -04002692 auto *Inst =
2693 new SPIRVInstruction(spv::OpConstantComposite, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002694 SPIRVInstList.push_back(Inst);
2695
2696 HasMDVec.push_back(true);
2697 } else {
2698 HasMDVec.push_back(false);
2699 }
2700 }
2701
2702 // Check all kernels have same definitions for work_group_size.
2703 bool HasMD = false;
2704 if (!HasMDVec.empty()) {
2705 HasMD = HasMDVec[0];
2706 for (uint32_t i = 1; i < HasMDVec.size(); i++) {
2707 if (HasMD != HasMDVec[i]) {
2708 llvm_unreachable(
2709 "Kernels should have consistent work group size definition");
2710 }
2711 }
2712 }
2713
2714 // If all kernels do not have metadata for reqd_work_group_size, generate
2715 // OpSpecConstants for x/y/z dimension.
2716 if (!HasMD) {
2717 //
2718 // Generate OpSpecConstants for x/y/z dimension.
2719 //
2720 // Ops[0] : Result Type ID
2721 // Ops[1] : Constant size for x/y/z dimension (Literal Number).
2722 uint32_t XDimCstID = 0;
2723 uint32_t YDimCstID = 0;
2724 uint32_t ZDimCstID = 0;
2725
David Neto22f144c2017-06-12 14:26:21 -04002726 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04002727 uint32_t result_type_id =
2728 lookupType(Ty->getPointerElementType()->getSequentialElementType());
David Neto22f144c2017-06-12 14:26:21 -04002729
David Neto257c3892018-04-11 13:19:45 -04002730 // X Dimension
2731 Ops << MkId(result_type_id) << MkNum(1);
2732 XDimCstID = nextID++;
2733 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002734 new SPIRVInstruction(spv::OpSpecConstant, XDimCstID, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002735
2736 // Y Dimension
2737 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04002738 Ops << MkId(result_type_id) << MkNum(1);
2739 YDimCstID = nextID++;
2740 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002741 new SPIRVInstruction(spv::OpSpecConstant, YDimCstID, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002742
2743 // Z Dimension
2744 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04002745 Ops << MkId(result_type_id) << MkNum(1);
2746 ZDimCstID = nextID++;
2747 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002748 new SPIRVInstruction(spv::OpSpecConstant, ZDimCstID, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002749
David Neto257c3892018-04-11 13:19:45 -04002750 BuiltinDimVec.push_back(XDimCstID);
2751 BuiltinDimVec.push_back(YDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04002752 BuiltinDimVec.push_back(ZDimCstID);
2753
David Neto22f144c2017-06-12 14:26:21 -04002754 //
2755 // Generate OpSpecConstantComposite.
2756 //
2757 // Ops[0] : Result Type ID
2758 // Ops[1] : Constant size for x dimension.
2759 // Ops[2] : Constant size for y dimension.
2760 // Ops[3] : Constant size for z dimension.
2761 InitializerID = nextID;
2762
2763 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04002764 Ops << MkId(lookupType(Ty->getPointerElementType())) << MkId(XDimCstID)
2765 << MkId(YDimCstID) << MkId(ZDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04002766
David Neto87846742018-04-11 17:36:22 -04002767 auto *Inst =
2768 new SPIRVInstruction(spv::OpSpecConstantComposite, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002769 SPIRVInstList.push_back(Inst);
2770 }
2771 }
2772
David Neto22f144c2017-06-12 14:26:21 -04002773 VMap[&GV] = nextID;
2774
2775 //
2776 // Generate OpVariable.
2777 //
2778 // GIDOps[0] : Result Type ID
2779 // GIDOps[1] : Storage Class
2780 SPIRVOperandList Ops;
2781
David Neto85082642018-03-24 06:55:20 -07002782 const auto AS = PTy->getAddressSpace();
David Netoc6f3ab22018-04-06 18:02:31 -04002783 Ops << MkId(lookupType(Ty)) << MkNum(GetStorageClass(AS));
David Neto22f144c2017-06-12 14:26:21 -04002784
David Neto85082642018-03-24 06:55:20 -07002785 if (GV.hasInitializer()) {
2786 InitializerID = VMap[GV.getInitializer()];
David Neto22f144c2017-06-12 14:26:21 -04002787 }
2788
David Neto85082642018-03-24 06:55:20 -07002789 const bool module_scope_constant_external_init =
David Neto862b7d82018-06-14 18:48:37 -04002790 (AS == AddressSpace::Constant) && GV.hasInitializer() &&
David Neto85082642018-03-24 06:55:20 -07002791 clspv::Option::ModuleConstantsInStorageBuffer();
2792
2793 if (0 != InitializerID) {
2794 if (!module_scope_constant_external_init) {
2795 // Emit the ID of the intiializer as part of the variable definition.
David Netoc6f3ab22018-04-06 18:02:31 -04002796 Ops << MkId(InitializerID);
David Neto85082642018-03-24 06:55:20 -07002797 }
2798 }
2799 const uint32_t var_id = nextID++;
2800
David Neto87846742018-04-11 17:36:22 -04002801 auto *Inst = new SPIRVInstruction(spv::OpVariable, var_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002802 SPIRVInstList.push_back(Inst);
2803
2804 // If we have a builtin.
2805 if (spv::BuiltInMax != BuiltinType) {
2806 // Find Insert Point for OpDecorate.
2807 auto DecoInsertPoint =
2808 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
2809 [](SPIRVInstruction *Inst) -> bool {
2810 return Inst->getOpcode() != spv::OpDecorate &&
2811 Inst->getOpcode() != spv::OpMemberDecorate &&
2812 Inst->getOpcode() != spv::OpExtInstImport;
2813 });
2814 //
2815 // Generate OpDecorate.
2816 //
2817 // DOps[0] = Target ID
2818 // DOps[1] = Decoration (Builtin)
2819 // DOps[2] = BuiltIn ID
2820 uint32_t ResultID;
2821
2822 // WorkgroupSize is different, we decorate the constant composite that has
2823 // its value, rather than the variable that we use to access the value.
2824 if (spv::BuiltInWorkgroupSize == BuiltinType) {
2825 ResultID = InitializerID;
David Netoa60b00b2017-09-15 16:34:09 -04002826 // Save both the value and variable IDs for later.
2827 WorkgroupSizeValueID = InitializerID;
2828 WorkgroupSizeVarID = VMap[&GV];
David Neto22f144c2017-06-12 14:26:21 -04002829 } else {
2830 ResultID = VMap[&GV];
2831 }
2832
2833 SPIRVOperandList DOps;
David Neto257c3892018-04-11 13:19:45 -04002834 DOps << MkId(ResultID) << MkNum(spv::DecorationBuiltIn)
2835 << MkNum(BuiltinType);
David Neto22f144c2017-06-12 14:26:21 -04002836
David Neto87846742018-04-11 17:36:22 -04002837 auto *DescDecoInst = new SPIRVInstruction(spv::OpDecorate, DOps);
David Neto22f144c2017-06-12 14:26:21 -04002838 SPIRVInstList.insert(DecoInsertPoint, DescDecoInst);
David Neto85082642018-03-24 06:55:20 -07002839 } else if (module_scope_constant_external_init) {
2840 // This module scope constant is initialized from a storage buffer with data
2841 // provided by the host at binding 0 of the next descriptor set.
David Neto78383442018-06-15 20:31:56 -04002842 const uint32_t descriptor_set = TakeDescriptorIndex(&M);
David Neto85082642018-03-24 06:55:20 -07002843
David Neto862b7d82018-06-14 18:48:37 -04002844 // Emit the intializer to the descriptor map file.
David Neto85082642018-03-24 06:55:20 -07002845 // Use "kind,buffer" to indicate storage buffer. We might want to expand
2846 // that later to other types, like uniform buffer.
alan-bakerf5e5f692018-11-27 08:33:24 -05002847 std::string hexbytes;
2848 llvm::raw_string_ostream str(hexbytes);
2849 clspv::ConstantEmitter(DL, str).Emit(GV.getInitializer());
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04002850 version0::DescriptorMapEntry::ConstantData constant_data = {ArgKind::Buffer,
2851 str.str()};
2852 descriptorMapEntries->emplace_back(std::move(constant_data), descriptor_set,
2853 0);
David Neto85082642018-03-24 06:55:20 -07002854
2855 // Find Insert Point for OpDecorate.
2856 auto DecoInsertPoint =
2857 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
2858 [](SPIRVInstruction *Inst) -> bool {
2859 return Inst->getOpcode() != spv::OpDecorate &&
2860 Inst->getOpcode() != spv::OpMemberDecorate &&
2861 Inst->getOpcode() != spv::OpExtInstImport;
2862 });
2863
David Neto257c3892018-04-11 13:19:45 -04002864 // OpDecorate %var Binding <binding>
David Neto85082642018-03-24 06:55:20 -07002865 SPIRVOperandList DOps;
David Neto257c3892018-04-11 13:19:45 -04002866 DOps << MkId(var_id) << MkNum(spv::DecorationBinding) << MkNum(0);
2867 DecoInsertPoint = SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04002868 DecoInsertPoint, new SPIRVInstruction(spv::OpDecorate, DOps));
David Neto85082642018-03-24 06:55:20 -07002869
2870 // OpDecorate %var DescriptorSet <descriptor_set>
2871 DOps.clear();
David Neto257c3892018-04-11 13:19:45 -04002872 DOps << MkId(var_id) << MkNum(spv::DecorationDescriptorSet)
2873 << MkNum(descriptor_set);
David Netoc6f3ab22018-04-06 18:02:31 -04002874 SPIRVInstList.insert(DecoInsertPoint,
David Neto87846742018-04-11 17:36:22 -04002875 new SPIRVInstruction(spv::OpDecorate, DOps));
David Neto22f144c2017-06-12 14:26:21 -04002876 }
2877}
2878
David Netoc6f3ab22018-04-06 18:02:31 -04002879void SPIRVProducerPass::GenerateWorkgroupVars() {
2880 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
Alan Baker202c8c72018-08-13 13:47:44 -04002881 for (auto spec_id = clspv::FirstLocalSpecId(); spec_id < max_local_spec_id_;
2882 ++spec_id) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05002883 LocalArgInfo &info = LocalSpecIdInfoMap[spec_id];
David Netoc6f3ab22018-04-06 18:02:31 -04002884
2885 // Generate OpVariable.
2886 //
2887 // GIDOps[0] : Result Type ID
2888 // GIDOps[1] : Storage Class
2889 SPIRVOperandList Ops;
2890 Ops << MkId(info.ptr_array_type_id) << MkNum(spv::StorageClassWorkgroup);
2891
2892 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002893 new SPIRVInstruction(spv::OpVariable, info.variable_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002894 }
2895}
2896
David Neto862b7d82018-06-14 18:48:37 -04002897void SPIRVProducerPass::GenerateDescriptorMapInfo(const DataLayout &DL,
2898 Function &F) {
David Netoc5fb5242018-07-30 13:28:31 -04002899 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
2900 return;
2901 }
David Neto862b7d82018-06-14 18:48:37 -04002902 // Gather the list of resources that are used by this function's arguments.
2903 auto &resource_var_at_index = FunctionToResourceVarsMap[&F];
2904
alan-bakerf5e5f692018-11-27 08:33:24 -05002905 // TODO(alan-baker): This should become unnecessary by fixing the rest of the
2906 // flow to generate pod_ubo arguments earlier.
David Neto862b7d82018-06-14 18:48:37 -04002907 auto remap_arg_kind = [](StringRef argKind) {
alan-bakerf5e5f692018-11-27 08:33:24 -05002908 std::string kind =
2909 clspv::Option::PodArgsInUniformBuffer() && argKind.equals("pod")
2910 ? "pod_ubo"
2911 : argKind;
2912 return GetArgKindFromName(kind);
David Neto862b7d82018-06-14 18:48:37 -04002913 };
2914
2915 auto *fty = F.getType()->getPointerElementType();
2916 auto *func_ty = dyn_cast<FunctionType>(fty);
2917
alan-baker038e9242019-04-19 22:14:41 -04002918 // If we've clustered POD arguments, then argument details are in metadata.
David Neto862b7d82018-06-14 18:48:37 -04002919 // If an argument maps to a resource variable, then get descriptor set and
2920 // binding from the resoure variable. Other info comes from the metadata.
2921 const auto *arg_map = F.getMetadata("kernel_arg_map");
2922 if (arg_map) {
2923 for (const auto &arg : arg_map->operands()) {
2924 const MDNode *arg_node = dyn_cast<MDNode>(arg.get());
Kévin PETITa353c832018-03-20 23:21:21 +00002925 assert(arg_node->getNumOperands() == 7);
David Neto862b7d82018-06-14 18:48:37 -04002926 const auto name =
2927 dyn_cast<MDString>(arg_node->getOperand(0))->getString();
2928 const auto old_index =
2929 dyn_extract<ConstantInt>(arg_node->getOperand(1))->getZExtValue();
2930 // Remapped argument index
alan-bakerb6b09dc2018-11-08 16:59:28 -05002931 const size_t new_index = static_cast<size_t>(
2932 dyn_extract<ConstantInt>(arg_node->getOperand(2))->getZExtValue());
David Neto862b7d82018-06-14 18:48:37 -04002933 const auto offset =
2934 dyn_extract<ConstantInt>(arg_node->getOperand(3))->getZExtValue();
Kévin PETITa353c832018-03-20 23:21:21 +00002935 const auto arg_size =
2936 dyn_extract<ConstantInt>(arg_node->getOperand(4))->getZExtValue();
David Neto862b7d82018-06-14 18:48:37 -04002937 const auto argKind = remap_arg_kind(
Kévin PETITa353c832018-03-20 23:21:21 +00002938 dyn_cast<MDString>(arg_node->getOperand(5))->getString());
David Neto862b7d82018-06-14 18:48:37 -04002939 const auto spec_id =
Kévin PETITa353c832018-03-20 23:21:21 +00002940 dyn_extract<ConstantInt>(arg_node->getOperand(6))->getSExtValue();
alan-bakerf5e5f692018-11-27 08:33:24 -05002941
2942 uint32_t descriptor_set = 0;
2943 uint32_t binding = 0;
2944 version0::DescriptorMapEntry::KernelArgData kernel_data = {
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04002945 F.getName(), name, static_cast<uint32_t>(old_index), argKind,
alan-bakerf5e5f692018-11-27 08:33:24 -05002946 static_cast<uint32_t>(spec_id),
2947 // This will be set below for pointer-to-local args.
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04002948 0, static_cast<uint32_t>(offset), static_cast<uint32_t>(arg_size)};
David Neto862b7d82018-06-14 18:48:37 -04002949 if (spec_id > 0) {
alan-bakerf5e5f692018-11-27 08:33:24 -05002950 kernel_data.local_element_size = static_cast<uint32_t>(GetTypeAllocSize(
2951 func_ty->getParamType(unsigned(new_index))->getPointerElementType(),
2952 DL));
David Neto862b7d82018-06-14 18:48:37 -04002953 } else {
2954 auto *info = resource_var_at_index[new_index];
2955 assert(info);
alan-bakerf5e5f692018-11-27 08:33:24 -05002956 descriptor_set = info->descriptor_set;
2957 binding = info->binding;
David Neto862b7d82018-06-14 18:48:37 -04002958 }
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04002959 descriptorMapEntries->emplace_back(std::move(kernel_data), descriptor_set,
2960 binding);
David Neto862b7d82018-06-14 18:48:37 -04002961 }
2962 } else {
2963 // There is no argument map.
2964 // Take descriptor info from the resource variable calls.
Kévin PETITa353c832018-03-20 23:21:21 +00002965 // Take argument name and size from the arguments list.
David Neto862b7d82018-06-14 18:48:37 -04002966
2967 SmallVector<Argument *, 4> arguments;
2968 for (auto &arg : F.args()) {
2969 arguments.push_back(&arg);
2970 }
2971
2972 unsigned arg_index = 0;
2973 for (auto *info : resource_var_at_index) {
2974 if (info) {
Kévin PETITa353c832018-03-20 23:21:21 +00002975 auto arg = arguments[arg_index];
alan-bakerb6b09dc2018-11-08 16:59:28 -05002976 unsigned arg_size = 0;
Kévin PETITa353c832018-03-20 23:21:21 +00002977 if (info->arg_kind == clspv::ArgKind::Pod) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05002978 arg_size = static_cast<uint32_t>(DL.getTypeStoreSize(arg->getType()));
Kévin PETITa353c832018-03-20 23:21:21 +00002979 }
2980
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04002981 // Local pointer arguments are unused in this case. Offset is always
2982 // zero.
alan-bakerf5e5f692018-11-27 08:33:24 -05002983 version0::DescriptorMapEntry::KernelArgData kernel_data = {
2984 F.getName(), arg->getName(),
2985 arg_index, remap_arg_kind(clspv::GetArgKindName(info->arg_kind)),
2986 0, 0,
2987 0, arg_size};
2988 descriptorMapEntries->emplace_back(std::move(kernel_data),
2989 info->descriptor_set, info->binding);
David Neto862b7d82018-06-14 18:48:37 -04002990 }
2991 arg_index++;
2992 }
2993 // Generate mappings for pointer-to-local arguments.
2994 for (arg_index = 0; arg_index < arguments.size(); ++arg_index) {
2995 Argument *arg = arguments[arg_index];
Alan Baker202c8c72018-08-13 13:47:44 -04002996 auto where = LocalArgSpecIds.find(arg);
2997 if (where != LocalArgSpecIds.end()) {
2998 auto &local_arg_info = LocalSpecIdInfoMap[where->second];
alan-bakerf5e5f692018-11-27 08:33:24 -05002999 // Pod arguments members are unused in this case.
3000 version0::DescriptorMapEntry::KernelArgData kernel_data = {
3001 F.getName(),
3002 arg->getName(),
3003 arg_index,
3004 ArgKind::Local,
3005 static_cast<uint32_t>(local_arg_info.spec_id),
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003006 static_cast<uint32_t>(
3007 GetTypeAllocSize(local_arg_info.elem_type, DL)),
alan-bakerf5e5f692018-11-27 08:33:24 -05003008 0,
3009 0};
3010 // Pointer-to-local arguments do not utilize descriptor set and binding.
3011 descriptorMapEntries->emplace_back(std::move(kernel_data), 0, 0);
David Neto862b7d82018-06-14 18:48:37 -04003012 }
3013 }
3014 }
3015}
3016
David Neto22f144c2017-06-12 14:26:21 -04003017void SPIRVProducerPass::GenerateFuncPrologue(Function &F) {
3018 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3019 ValueMapType &VMap = getValueMap();
3020 EntryPointVecType &EntryPoints = getEntryPointVec();
David Neto22f144c2017-06-12 14:26:21 -04003021 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
3022 auto &GlobalConstArgSet = getGlobalConstArgSet();
3023
3024 FunctionType *FTy = F.getFunctionType();
3025
3026 //
David Neto22f144c2017-06-12 14:26:21 -04003027 // Generate OPFunction.
3028 //
3029
3030 // FOps[0] : Result Type ID
3031 // FOps[1] : Function Control
3032 // FOps[2] : Function Type ID
3033 SPIRVOperandList FOps;
3034
3035 // Find SPIRV instruction for return type.
David Neto257c3892018-04-11 13:19:45 -04003036 FOps << MkId(lookupType(FTy->getReturnType()));
David Neto22f144c2017-06-12 14:26:21 -04003037
3038 // Check function attributes for SPIRV Function Control.
3039 uint32_t FuncControl = spv::FunctionControlMaskNone;
3040 if (F.hasFnAttribute(Attribute::AlwaysInline)) {
3041 FuncControl |= spv::FunctionControlInlineMask;
3042 }
3043 if (F.hasFnAttribute(Attribute::NoInline)) {
3044 FuncControl |= spv::FunctionControlDontInlineMask;
3045 }
3046 // TODO: Check llvm attribute for Function Control Pure.
3047 if (F.hasFnAttribute(Attribute::ReadOnly)) {
3048 FuncControl |= spv::FunctionControlPureMask;
3049 }
3050 // TODO: Check llvm attribute for Function Control Const.
3051 if (F.hasFnAttribute(Attribute::ReadNone)) {
3052 FuncControl |= spv::FunctionControlConstMask;
3053 }
3054
David Neto257c3892018-04-11 13:19:45 -04003055 FOps << MkNum(FuncControl);
David Neto22f144c2017-06-12 14:26:21 -04003056
3057 uint32_t FTyID;
3058 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
3059 SmallVector<Type *, 4> NewFuncParamTys;
3060 FunctionType *NewFTy =
3061 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
3062 FTyID = lookupType(NewFTy);
3063 } else {
David Neto9ed8e2f2018-03-24 06:47:24 -07003064 // Handle regular function with global constant parameters.
David Neto22f144c2017-06-12 14:26:21 -04003065 if (GlobalConstFuncTyMap.count(FTy)) {
3066 FTyID = lookupType(GlobalConstFuncTyMap[FTy].first);
3067 } else {
3068 FTyID = lookupType(FTy);
3069 }
3070 }
3071
David Neto257c3892018-04-11 13:19:45 -04003072 FOps << MkId(FTyID);
David Neto22f144c2017-06-12 14:26:21 -04003073
3074 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
3075 EntryPoints.push_back(std::make_pair(&F, nextID));
3076 }
3077
3078 VMap[&F] = nextID;
3079
David Neto482550a2018-03-24 05:21:07 -07003080 if (clspv::Option::ShowIDs()) {
David Netob05675d2018-02-16 12:37:49 -05003081 errs() << "Function " << F.getName() << " is " << nextID << "\n";
3082 }
David Neto22f144c2017-06-12 14:26:21 -04003083 // Generate SPIRV instruction for function.
David Neto87846742018-04-11 17:36:22 -04003084 auto *FuncInst = new SPIRVInstruction(spv::OpFunction, nextID++, FOps);
David Neto22f144c2017-06-12 14:26:21 -04003085 SPIRVInstList.push_back(FuncInst);
3086
3087 //
3088 // Generate OpFunctionParameter for Normal function.
3089 //
3090
3091 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
alan-bakere9308012019-03-15 10:25:13 -04003092
3093 // Find Insert Point for OpDecorate.
3094 auto DecoInsertPoint =
3095 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
3096 [](SPIRVInstruction *Inst) -> bool {
3097 return Inst->getOpcode() != spv::OpDecorate &&
3098 Inst->getOpcode() != spv::OpMemberDecorate &&
3099 Inst->getOpcode() != spv::OpExtInstImport;
3100 });
3101
David Neto22f144c2017-06-12 14:26:21 -04003102 // Iterate Argument for name instead of param type from function type.
3103 unsigned ArgIdx = 0;
3104 for (Argument &Arg : F.args()) {
alan-bakere9308012019-03-15 10:25:13 -04003105 uint32_t param_id = nextID++;
3106 VMap[&Arg] = param_id;
3107
3108 if (CalledWithCoherentResource(Arg)) {
3109 // If the arg is passed a coherent resource ever, then decorate this
3110 // parameter with Coherent too.
3111 SPIRVOperandList decoration_ops;
3112 decoration_ops << MkId(param_id) << MkNum(spv::DecorationCoherent);
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003113 SPIRVInstList.insert(
3114 DecoInsertPoint,
3115 new SPIRVInstruction(spv::OpDecorate, decoration_ops));
alan-bakere9308012019-03-15 10:25:13 -04003116 }
David Neto22f144c2017-06-12 14:26:21 -04003117
3118 // ParamOps[0] : Result Type ID
3119 SPIRVOperandList ParamOps;
3120
3121 // Find SPIRV instruction for parameter type.
3122 uint32_t ParamTyID = lookupType(Arg.getType());
3123 if (PointerType *PTy = dyn_cast<PointerType>(Arg.getType())) {
3124 if (GlobalConstFuncTyMap.count(FTy)) {
3125 if (ArgIdx == GlobalConstFuncTyMap[FTy].second) {
3126 Type *EleTy = PTy->getPointerElementType();
3127 Type *ArgTy =
3128 PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
3129 ParamTyID = lookupType(ArgTy);
3130 GlobalConstArgSet.insert(&Arg);
3131 }
3132 }
3133 }
David Neto257c3892018-04-11 13:19:45 -04003134 ParamOps << MkId(ParamTyID);
David Neto22f144c2017-06-12 14:26:21 -04003135
3136 // Generate SPIRV instruction for parameter.
David Neto87846742018-04-11 17:36:22 -04003137 auto *ParamInst =
alan-bakere9308012019-03-15 10:25:13 -04003138 new SPIRVInstruction(spv::OpFunctionParameter, param_id, ParamOps);
David Neto22f144c2017-06-12 14:26:21 -04003139 SPIRVInstList.push_back(ParamInst);
3140
3141 ArgIdx++;
3142 }
3143 }
3144}
3145
alan-bakerb6b09dc2018-11-08 16:59:28 -05003146void SPIRVProducerPass::GenerateModuleInfo(Module &module) {
David Neto22f144c2017-06-12 14:26:21 -04003147 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3148 EntryPointVecType &EntryPoints = getEntryPointVec();
3149 ValueMapType &VMap = getValueMap();
3150 ValueList &EntryPointInterfaces = getEntryPointInterfacesVec();
3151 uint32_t &ExtInstImportID = getOpExtInstImportID();
3152 std::vector<uint32_t> &BuiltinDimVec = getBuiltinDimVec();
3153
3154 // Set up insert point.
3155 auto InsertPoint = SPIRVInstList.begin();
3156
3157 //
3158 // Generate OpCapability
3159 //
3160 // TODO: Which llvm information is mapped to SPIRV Capapbility?
3161
3162 // Ops[0] = Capability
3163 SPIRVOperandList Ops;
3164
David Neto87846742018-04-11 17:36:22 -04003165 auto *CapInst =
3166 new SPIRVInstruction(spv::OpCapability, {MkNum(spv::CapabilityShader)});
David Neto22f144c2017-06-12 14:26:21 -04003167 SPIRVInstList.insert(InsertPoint, CapInst);
3168
3169 for (Type *Ty : getTypeList()) {
alan-bakerb39c8262019-03-08 14:03:37 -05003170 if (clspv::Option::Int8Support() && Ty->isIntegerTy(8)) {
3171 // Generate OpCapability for i8 type.
3172 SPIRVInstList.insert(InsertPoint,
3173 new SPIRVInstruction(spv::OpCapability,
3174 {MkNum(spv::CapabilityInt8)}));
3175 } else if (Ty->isIntegerTy(16)) {
David Neto22f144c2017-06-12 14:26:21 -04003176 // Generate OpCapability for i16 type.
David Neto87846742018-04-11 17:36:22 -04003177 SPIRVInstList.insert(InsertPoint,
3178 new SPIRVInstruction(spv::OpCapability,
3179 {MkNum(spv::CapabilityInt16)}));
David Neto22f144c2017-06-12 14:26:21 -04003180 } else if (Ty->isIntegerTy(64)) {
3181 // Generate OpCapability for i64 type.
David Neto87846742018-04-11 17:36:22 -04003182 SPIRVInstList.insert(InsertPoint,
3183 new SPIRVInstruction(spv::OpCapability,
3184 {MkNum(spv::CapabilityInt64)}));
David Neto22f144c2017-06-12 14:26:21 -04003185 } else if (Ty->isHalfTy()) {
3186 // Generate OpCapability for half type.
3187 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04003188 InsertPoint, new SPIRVInstruction(spv::OpCapability,
3189 {MkNum(spv::CapabilityFloat16)}));
David Neto22f144c2017-06-12 14:26:21 -04003190 } else if (Ty->isDoubleTy()) {
3191 // Generate OpCapability for double type.
3192 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04003193 InsertPoint, new SPIRVInstruction(spv::OpCapability,
3194 {MkNum(spv::CapabilityFloat64)}));
David Neto22f144c2017-06-12 14:26:21 -04003195 } else if (auto *STy = dyn_cast<StructType>(Ty)) {
3196 if (STy->isOpaque()) {
David Neto565571c2017-08-21 12:00:05 -04003197 if (STy->getName().equals("opencl.image2d_wo_t") ||
3198 STy->getName().equals("opencl.image3d_wo_t")) {
David Neto22f144c2017-06-12 14:26:21 -04003199 // Generate OpCapability for write only image type.
3200 SPIRVInstList.insert(
3201 InsertPoint,
3202 new SPIRVInstruction(
David Neto87846742018-04-11 17:36:22 -04003203 spv::OpCapability,
3204 {MkNum(spv::CapabilityStorageImageWriteWithoutFormat)}));
David Neto22f144c2017-06-12 14:26:21 -04003205 }
3206 }
3207 }
3208 }
3209
David Neto5c22a252018-03-15 16:07:41 -04003210 { // OpCapability ImageQuery
3211 bool hasImageQuery = false;
3212 for (const char *imageQuery : {
3213 "_Z15get_image_width14ocl_image2d_ro",
3214 "_Z15get_image_width14ocl_image2d_wo",
3215 "_Z16get_image_height14ocl_image2d_ro",
3216 "_Z16get_image_height14ocl_image2d_wo",
3217 }) {
3218 if (module.getFunction(imageQuery)) {
3219 hasImageQuery = true;
3220 break;
3221 }
3222 }
3223 if (hasImageQuery) {
David Neto87846742018-04-11 17:36:22 -04003224 auto *ImageQueryCapInst = new SPIRVInstruction(
3225 spv::OpCapability, {MkNum(spv::CapabilityImageQuery)});
David Neto5c22a252018-03-15 16:07:41 -04003226 SPIRVInstList.insert(InsertPoint, ImageQueryCapInst);
3227 }
3228 }
3229
David Neto22f144c2017-06-12 14:26:21 -04003230 if (hasVariablePointers()) {
3231 //
David Neto22f144c2017-06-12 14:26:21 -04003232 // Generate OpCapability.
3233 //
3234 // Ops[0] = Capability
3235 //
3236 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003237 Ops << MkNum(spv::CapabilityVariablePointers);
David Neto22f144c2017-06-12 14:26:21 -04003238
David Neto87846742018-04-11 17:36:22 -04003239 SPIRVInstList.insert(InsertPoint,
3240 new SPIRVInstruction(spv::OpCapability, Ops));
alan-baker5b86ed72019-02-15 08:26:50 -05003241 } else if (hasVariablePointersStorageBuffer()) {
3242 //
3243 // Generate OpCapability.
3244 //
3245 // Ops[0] = Capability
3246 //
3247 Ops.clear();
3248 Ops << MkNum(spv::CapabilityVariablePointersStorageBuffer);
David Neto22f144c2017-06-12 14:26:21 -04003249
alan-baker5b86ed72019-02-15 08:26:50 -05003250 SPIRVInstList.insert(InsertPoint,
3251 new SPIRVInstruction(spv::OpCapability, Ops));
3252 }
3253
3254 // Always add the storage buffer extension
3255 {
David Neto22f144c2017-06-12 14:26:21 -04003256 //
3257 // Generate OpExtension.
3258 //
3259 // Ops[0] = Name (Literal String)
3260 //
alan-baker5b86ed72019-02-15 08:26:50 -05003261 auto *ExtensionInst = new SPIRVInstruction(
3262 spv::OpExtension, {MkString("SPV_KHR_storage_buffer_storage_class")});
3263 SPIRVInstList.insert(InsertPoint, ExtensionInst);
3264 }
David Neto22f144c2017-06-12 14:26:21 -04003265
alan-baker5b86ed72019-02-15 08:26:50 -05003266 if (hasVariablePointers() || hasVariablePointersStorageBuffer()) {
3267 //
3268 // Generate OpExtension.
3269 //
3270 // Ops[0] = Name (Literal String)
3271 //
3272 auto *ExtensionInst = new SPIRVInstruction(
3273 spv::OpExtension, {MkString("SPV_KHR_variable_pointers")});
3274 SPIRVInstList.insert(InsertPoint, ExtensionInst);
David Neto22f144c2017-06-12 14:26:21 -04003275 }
3276
3277 if (ExtInstImportID) {
3278 ++InsertPoint;
3279 }
3280
3281 //
3282 // Generate OpMemoryModel
3283 //
3284 // Memory model for Vulkan will always be GLSL450.
3285
3286 // Ops[0] = Addressing Model
3287 // Ops[1] = Memory Model
3288 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003289 Ops << MkNum(spv::AddressingModelLogical) << MkNum(spv::MemoryModelGLSL450);
David Neto22f144c2017-06-12 14:26:21 -04003290
David Neto87846742018-04-11 17:36:22 -04003291 auto *MemModelInst = new SPIRVInstruction(spv::OpMemoryModel, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003292 SPIRVInstList.insert(InsertPoint, MemModelInst);
3293
3294 //
3295 // Generate OpEntryPoint
3296 //
3297 for (auto EntryPoint : EntryPoints) {
3298 // Ops[0] = Execution Model
3299 // Ops[1] = EntryPoint ID
3300 // Ops[2] = Name (Literal String)
3301 // ...
3302 //
3303 // TODO: Do we need to consider Interface ID for forward references???
3304 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05003305 const StringRef &name = EntryPoint.first->getName();
David Neto257c3892018-04-11 13:19:45 -04003306 Ops << MkNum(spv::ExecutionModelGLCompute) << MkId(EntryPoint.second)
3307 << MkString(name);
David Neto22f144c2017-06-12 14:26:21 -04003308
David Neto22f144c2017-06-12 14:26:21 -04003309 for (Value *Interface : EntryPointInterfaces) {
David Neto257c3892018-04-11 13:19:45 -04003310 Ops << MkId(VMap[Interface]);
David Neto22f144c2017-06-12 14:26:21 -04003311 }
3312
David Neto87846742018-04-11 17:36:22 -04003313 auto *EntryPointInst = new SPIRVInstruction(spv::OpEntryPoint, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003314 SPIRVInstList.insert(InsertPoint, EntryPointInst);
3315 }
3316
3317 for (auto EntryPoint : EntryPoints) {
3318 if (const MDNode *MD = dyn_cast<Function>(EntryPoint.first)
3319 ->getMetadata("reqd_work_group_size")) {
3320
3321 if (!BuiltinDimVec.empty()) {
3322 llvm_unreachable(
3323 "Kernels should have consistent work group size definition");
3324 }
3325
3326 //
3327 // Generate OpExecutionMode
3328 //
3329
3330 // Ops[0] = Entry Point ID
3331 // Ops[1] = Execution Mode
3332 // Ops[2] ... Ops[n] = Optional literals according to Execution Mode
3333 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05003334 Ops << MkId(EntryPoint.second) << MkNum(spv::ExecutionModeLocalSize);
David Neto22f144c2017-06-12 14:26:21 -04003335
3336 uint32_t XDim = static_cast<uint32_t>(
3337 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
3338 uint32_t YDim = static_cast<uint32_t>(
3339 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
3340 uint32_t ZDim = static_cast<uint32_t>(
3341 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
3342
David Neto257c3892018-04-11 13:19:45 -04003343 Ops << MkNum(XDim) << MkNum(YDim) << MkNum(ZDim);
David Neto22f144c2017-06-12 14:26:21 -04003344
David Neto87846742018-04-11 17:36:22 -04003345 auto *ExecModeInst = new SPIRVInstruction(spv::OpExecutionMode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003346 SPIRVInstList.insert(InsertPoint, ExecModeInst);
3347 }
3348 }
3349
3350 //
3351 // Generate OpSource.
3352 //
3353 // Ops[0] = SourceLanguage ID
3354 // Ops[1] = Version (LiteralNum)
3355 //
3356 Ops.clear();
Kévin Petit0fc88042019-04-09 23:25:02 +01003357 if (clspv::Option::CPlusPlus()) {
3358 Ops << MkNum(spv::SourceLanguageOpenCL_CPP) << MkNum(100);
3359 } else {
3360 Ops << MkNum(spv::SourceLanguageOpenCL_C) << MkNum(120);
3361 }
David Neto22f144c2017-06-12 14:26:21 -04003362
David Neto87846742018-04-11 17:36:22 -04003363 auto *OpenSourceInst = new SPIRVInstruction(spv::OpSource, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003364 SPIRVInstList.insert(InsertPoint, OpenSourceInst);
3365
3366 if (!BuiltinDimVec.empty()) {
3367 //
3368 // Generate OpDecorates for x/y/z dimension.
3369 //
3370 // Ops[0] = Target ID
3371 // Ops[1] = Decoration (SpecId)
David Neto257c3892018-04-11 13:19:45 -04003372 // Ops[2] = Specialization Constant ID (Literal Number)
David Neto22f144c2017-06-12 14:26:21 -04003373
3374 // X Dimension
3375 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003376 Ops << MkId(BuiltinDimVec[0]) << MkNum(spv::DecorationSpecId) << MkNum(0);
David Neto87846742018-04-11 17:36:22 -04003377 SPIRVInstList.insert(InsertPoint,
3378 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003379
3380 // Y Dimension
3381 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003382 Ops << MkId(BuiltinDimVec[1]) << MkNum(spv::DecorationSpecId) << MkNum(1);
David Neto87846742018-04-11 17:36:22 -04003383 SPIRVInstList.insert(InsertPoint,
3384 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003385
3386 // Z Dimension
3387 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003388 Ops << MkId(BuiltinDimVec[2]) << MkNum(spv::DecorationSpecId) << MkNum(2);
David Neto87846742018-04-11 17:36:22 -04003389 SPIRVInstList.insert(InsertPoint,
3390 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003391 }
3392}
3393
David Netob6e2e062018-04-25 10:32:06 -04003394void SPIRVProducerPass::GenerateEntryPointInitialStores() {
3395 // Work around a driver bug. Initializers on Private variables might not
3396 // work. So the start of the kernel should store the initializer value to the
3397 // variables. Yes, *every* entry point pays this cost if *any* entry point
3398 // uses this builtin. At this point I judge this to be an acceptable tradeoff
3399 // of complexity vs. runtime, for a broken driver.
alan-bakerb6b09dc2018-11-08 16:59:28 -05003400 // TODO(dneto): Remove this at some point once fixed drivers are widely
3401 // available.
David Netob6e2e062018-04-25 10:32:06 -04003402 if (WorkgroupSizeVarID) {
3403 assert(WorkgroupSizeValueID);
3404
3405 SPIRVOperandList Ops;
3406 Ops << MkId(WorkgroupSizeVarID) << MkId(WorkgroupSizeValueID);
3407
3408 auto *Inst = new SPIRVInstruction(spv::OpStore, Ops);
3409 getSPIRVInstList().push_back(Inst);
3410 }
3411}
3412
David Neto22f144c2017-06-12 14:26:21 -04003413void SPIRVProducerPass::GenerateFuncBody(Function &F) {
3414 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3415 ValueMapType &VMap = getValueMap();
3416
David Netob6e2e062018-04-25 10:32:06 -04003417 const bool IsKernel = F.getCallingConv() == CallingConv::SPIR_KERNEL;
David Neto22f144c2017-06-12 14:26:21 -04003418
3419 for (BasicBlock &BB : F) {
3420 // Register BasicBlock to ValueMap.
3421 VMap[&BB] = nextID;
3422
3423 //
3424 // Generate OpLabel for Basic Block.
3425 //
3426 SPIRVOperandList Ops;
David Neto87846742018-04-11 17:36:22 -04003427 auto *Inst = new SPIRVInstruction(spv::OpLabel, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003428 SPIRVInstList.push_back(Inst);
3429
David Neto6dcd4712017-06-23 11:06:47 -04003430 // OpVariable instructions must come first.
3431 for (Instruction &I : BB) {
alan-baker5b86ed72019-02-15 08:26:50 -05003432 if (auto *alloca = dyn_cast<AllocaInst>(&I)) {
3433 // Allocating a pointer requires variable pointers.
3434 if (alloca->getAllocatedType()->isPointerTy()) {
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003435 setVariablePointersCapabilities(
3436 alloca->getAllocatedType()->getPointerAddressSpace());
alan-baker5b86ed72019-02-15 08:26:50 -05003437 }
David Neto6dcd4712017-06-23 11:06:47 -04003438 GenerateInstruction(I);
3439 }
3440 }
3441
David Neto22f144c2017-06-12 14:26:21 -04003442 if (&BB == &F.getEntryBlock() && IsKernel) {
David Netob6e2e062018-04-25 10:32:06 -04003443 if (clspv::Option::HackInitializers()) {
3444 GenerateEntryPointInitialStores();
3445 }
David Neto22f144c2017-06-12 14:26:21 -04003446 }
3447
3448 for (Instruction &I : BB) {
David Neto6dcd4712017-06-23 11:06:47 -04003449 if (!isa<AllocaInst>(I)) {
3450 GenerateInstruction(I);
3451 }
David Neto22f144c2017-06-12 14:26:21 -04003452 }
3453 }
3454}
3455
3456spv::Op SPIRVProducerPass::GetSPIRVCmpOpcode(CmpInst *I) {
3457 const std::map<CmpInst::Predicate, spv::Op> Map = {
3458 {CmpInst::ICMP_EQ, spv::OpIEqual},
3459 {CmpInst::ICMP_NE, spv::OpINotEqual},
3460 {CmpInst::ICMP_UGT, spv::OpUGreaterThan},
3461 {CmpInst::ICMP_UGE, spv::OpUGreaterThanEqual},
3462 {CmpInst::ICMP_ULT, spv::OpULessThan},
3463 {CmpInst::ICMP_ULE, spv::OpULessThanEqual},
3464 {CmpInst::ICMP_SGT, spv::OpSGreaterThan},
3465 {CmpInst::ICMP_SGE, spv::OpSGreaterThanEqual},
3466 {CmpInst::ICMP_SLT, spv::OpSLessThan},
3467 {CmpInst::ICMP_SLE, spv::OpSLessThanEqual},
3468 {CmpInst::FCMP_OEQ, spv::OpFOrdEqual},
3469 {CmpInst::FCMP_OGT, spv::OpFOrdGreaterThan},
3470 {CmpInst::FCMP_OGE, spv::OpFOrdGreaterThanEqual},
3471 {CmpInst::FCMP_OLT, spv::OpFOrdLessThan},
3472 {CmpInst::FCMP_OLE, spv::OpFOrdLessThanEqual},
3473 {CmpInst::FCMP_ONE, spv::OpFOrdNotEqual},
3474 {CmpInst::FCMP_UEQ, spv::OpFUnordEqual},
3475 {CmpInst::FCMP_UGT, spv::OpFUnordGreaterThan},
3476 {CmpInst::FCMP_UGE, spv::OpFUnordGreaterThanEqual},
3477 {CmpInst::FCMP_ULT, spv::OpFUnordLessThan},
3478 {CmpInst::FCMP_ULE, spv::OpFUnordLessThanEqual},
3479 {CmpInst::FCMP_UNE, spv::OpFUnordNotEqual}};
3480
3481 assert(0 != Map.count(I->getPredicate()));
3482
3483 return Map.at(I->getPredicate());
3484}
3485
3486spv::Op SPIRVProducerPass::GetSPIRVCastOpcode(Instruction &I) {
3487 const std::map<unsigned, spv::Op> Map{
3488 {Instruction::Trunc, spv::OpUConvert},
3489 {Instruction::ZExt, spv::OpUConvert},
3490 {Instruction::SExt, spv::OpSConvert},
3491 {Instruction::FPToUI, spv::OpConvertFToU},
3492 {Instruction::FPToSI, spv::OpConvertFToS},
3493 {Instruction::UIToFP, spv::OpConvertUToF},
3494 {Instruction::SIToFP, spv::OpConvertSToF},
3495 {Instruction::FPTrunc, spv::OpFConvert},
3496 {Instruction::FPExt, spv::OpFConvert},
3497 {Instruction::BitCast, spv::OpBitcast}};
3498
3499 assert(0 != Map.count(I.getOpcode()));
3500
3501 return Map.at(I.getOpcode());
3502}
3503
3504spv::Op SPIRVProducerPass::GetSPIRVBinaryOpcode(Instruction &I) {
Kévin Petit24272b62018-10-18 19:16:12 +00003505 if (I.getType()->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -04003506 switch (I.getOpcode()) {
3507 default:
3508 break;
3509 case Instruction::Or:
3510 return spv::OpLogicalOr;
3511 case Instruction::And:
3512 return spv::OpLogicalAnd;
3513 case Instruction::Xor:
3514 return spv::OpLogicalNotEqual;
3515 }
3516 }
3517
alan-bakerb6b09dc2018-11-08 16:59:28 -05003518 const std::map<unsigned, spv::Op> Map{
David Neto22f144c2017-06-12 14:26:21 -04003519 {Instruction::Add, spv::OpIAdd},
3520 {Instruction::FAdd, spv::OpFAdd},
3521 {Instruction::Sub, spv::OpISub},
3522 {Instruction::FSub, spv::OpFSub},
3523 {Instruction::Mul, spv::OpIMul},
3524 {Instruction::FMul, spv::OpFMul},
3525 {Instruction::UDiv, spv::OpUDiv},
3526 {Instruction::SDiv, spv::OpSDiv},
3527 {Instruction::FDiv, spv::OpFDiv},
3528 {Instruction::URem, spv::OpUMod},
3529 {Instruction::SRem, spv::OpSRem},
3530 {Instruction::FRem, spv::OpFRem},
3531 {Instruction::Or, spv::OpBitwiseOr},
3532 {Instruction::Xor, spv::OpBitwiseXor},
3533 {Instruction::And, spv::OpBitwiseAnd},
3534 {Instruction::Shl, spv::OpShiftLeftLogical},
3535 {Instruction::LShr, spv::OpShiftRightLogical},
3536 {Instruction::AShr, spv::OpShiftRightArithmetic}};
3537
3538 assert(0 != Map.count(I.getOpcode()));
3539
3540 return Map.at(I.getOpcode());
3541}
3542
3543void SPIRVProducerPass::GenerateInstruction(Instruction &I) {
3544 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3545 ValueMapType &VMap = getValueMap();
David Neto22f144c2017-06-12 14:26:21 -04003546 DeferredInstVecType &DeferredInsts = getDeferredInstVec();
3547 LLVMContext &Context = I.getParent()->getParent()->getParent()->getContext();
3548
3549 // Register Instruction to ValueMap.
3550 if (0 == VMap[&I]) {
3551 VMap[&I] = nextID;
3552 }
3553
3554 switch (I.getOpcode()) {
3555 default: {
3556 if (Instruction::isCast(I.getOpcode())) {
3557 //
3558 // Generate SPIRV instructions for cast operators.
3559 //
3560
David Netod2de94a2017-08-28 17:27:47 -04003561 auto Ty = I.getType();
David Neto22f144c2017-06-12 14:26:21 -04003562 auto OpTy = I.getOperand(0)->getType();
David Netod2de94a2017-08-28 17:27:47 -04003563 auto toI8 = Ty == Type::getInt8Ty(Context);
3564 auto fromI32 = OpTy == Type::getInt32Ty(Context);
David Neto22f144c2017-06-12 14:26:21 -04003565 // Handle zext, sext and uitofp with i1 type specially.
3566 if ((I.getOpcode() == Instruction::ZExt ||
3567 I.getOpcode() == Instruction::SExt ||
3568 I.getOpcode() == Instruction::UIToFP) &&
alan-bakerb6b09dc2018-11-08 16:59:28 -05003569 OpTy->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -04003570 //
3571 // Generate OpSelect.
3572 //
3573
3574 // Ops[0] = Result Type ID
3575 // Ops[1] = Condition ID
3576 // Ops[2] = True Constant ID
3577 // Ops[3] = False Constant ID
3578 SPIRVOperandList Ops;
3579
David Neto257c3892018-04-11 13:19:45 -04003580 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04003581
David Neto22f144c2017-06-12 14:26:21 -04003582 uint32_t CondID = VMap[I.getOperand(0)];
David Neto257c3892018-04-11 13:19:45 -04003583 Ops << MkId(CondID);
David Neto22f144c2017-06-12 14:26:21 -04003584
3585 uint32_t TrueID = 0;
3586 if (I.getOpcode() == Instruction::ZExt) {
Kévin Petit7bfb8992019-02-26 13:45:08 +00003587 TrueID = VMap[ConstantInt::get(I.getType(), 1)];
David Neto22f144c2017-06-12 14:26:21 -04003588 } else if (I.getOpcode() == Instruction::SExt) {
Kévin Petit7bfb8992019-02-26 13:45:08 +00003589 TrueID = VMap[ConstantInt::getSigned(I.getType(), -1)];
David Neto22f144c2017-06-12 14:26:21 -04003590 } else {
3591 TrueID = VMap[ConstantFP::get(Context, APFloat(1.0f))];
3592 }
David Neto257c3892018-04-11 13:19:45 -04003593 Ops << MkId(TrueID);
David Neto22f144c2017-06-12 14:26:21 -04003594
3595 uint32_t FalseID = 0;
3596 if (I.getOpcode() == Instruction::ZExt) {
3597 FalseID = VMap[Constant::getNullValue(I.getType())];
3598 } else if (I.getOpcode() == Instruction::SExt) {
3599 FalseID = VMap[Constant::getNullValue(I.getType())];
3600 } else {
3601 FalseID = VMap[ConstantFP::get(Context, APFloat(0.0f))];
3602 }
David Neto257c3892018-04-11 13:19:45 -04003603 Ops << MkId(FalseID);
David Neto22f144c2017-06-12 14:26:21 -04003604
David Neto87846742018-04-11 17:36:22 -04003605 auto *Inst = new SPIRVInstruction(spv::OpSelect, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003606 SPIRVInstList.push_back(Inst);
alan-bakerb39c8262019-03-08 14:03:37 -05003607 } else if (!clspv::Option::Int8Support() &&
3608 I.getOpcode() == Instruction::Trunc && fromI32 && toI8) {
David Netod2de94a2017-08-28 17:27:47 -04003609 // The SPIR-V target type is a 32-bit int. Keep only the bottom
3610 // 8 bits.
3611 // Before:
3612 // %result = trunc i32 %a to i8
3613 // After
3614 // %result = OpBitwiseAnd %uint %a %uint_255
3615
3616 SPIRVOperandList Ops;
3617
David Neto257c3892018-04-11 13:19:45 -04003618 Ops << MkId(lookupType(OpTy)) << MkId(VMap[I.getOperand(0)]);
David Netod2de94a2017-08-28 17:27:47 -04003619
3620 Type *UintTy = Type::getInt32Ty(Context);
3621 uint32_t MaskID = VMap[ConstantInt::get(UintTy, 255)];
David Neto257c3892018-04-11 13:19:45 -04003622 Ops << MkId(MaskID);
David Netod2de94a2017-08-28 17:27:47 -04003623
David Neto87846742018-04-11 17:36:22 -04003624 auto *Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Netod2de94a2017-08-28 17:27:47 -04003625 SPIRVInstList.push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04003626 } else {
3627 // Ops[0] = Result Type ID
3628 // Ops[1] = Source Value ID
3629 SPIRVOperandList Ops;
3630
David Neto257c3892018-04-11 13:19:45 -04003631 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04003632
David Neto87846742018-04-11 17:36:22 -04003633 auto *Inst = new SPIRVInstruction(GetSPIRVCastOpcode(I), nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003634 SPIRVInstList.push_back(Inst);
3635 }
3636 } else if (isa<BinaryOperator>(I)) {
3637 //
3638 // Generate SPIRV instructions for binary operators.
3639 //
3640
3641 // Handle xor with i1 type specially.
3642 if (I.getOpcode() == Instruction::Xor &&
3643 I.getType() == Type::getInt1Ty(Context) &&
Kévin Petit24272b62018-10-18 19:16:12 +00003644 ((isa<ConstantInt>(I.getOperand(0)) &&
3645 !cast<ConstantInt>(I.getOperand(0))->isZero()) ||
3646 (isa<ConstantInt>(I.getOperand(1)) &&
3647 !cast<ConstantInt>(I.getOperand(1))->isZero()))) {
David Neto22f144c2017-06-12 14:26:21 -04003648 //
3649 // Generate OpLogicalNot.
3650 //
3651 // Ops[0] = Result Type ID
3652 // Ops[1] = Operand
3653 SPIRVOperandList Ops;
3654
David Neto257c3892018-04-11 13:19:45 -04003655 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04003656
3657 Value *CondV = I.getOperand(0);
3658 if (isa<Constant>(I.getOperand(0))) {
3659 CondV = I.getOperand(1);
3660 }
David Neto257c3892018-04-11 13:19:45 -04003661 Ops << MkId(VMap[CondV]);
David Neto22f144c2017-06-12 14:26:21 -04003662
David Neto87846742018-04-11 17:36:22 -04003663 auto *Inst = new SPIRVInstruction(spv::OpLogicalNot, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003664 SPIRVInstList.push_back(Inst);
3665 } else {
3666 // Ops[0] = Result Type ID
3667 // Ops[1] = Operand 0
3668 // Ops[2] = Operand 1
3669 SPIRVOperandList Ops;
3670
David Neto257c3892018-04-11 13:19:45 -04003671 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)])
3672 << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04003673
David Neto87846742018-04-11 17:36:22 -04003674 auto *Inst =
3675 new SPIRVInstruction(GetSPIRVBinaryOpcode(I), nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003676 SPIRVInstList.push_back(Inst);
3677 }
3678 } else {
3679 I.print(errs());
3680 llvm_unreachable("Unsupported instruction???");
3681 }
3682 break;
3683 }
3684 case Instruction::GetElementPtr: {
3685 auto &GlobalConstArgSet = getGlobalConstArgSet();
3686
3687 //
3688 // Generate OpAccessChain.
3689 //
3690 GetElementPtrInst *GEP = cast<GetElementPtrInst>(&I);
3691
3692 //
3693 // Generate OpAccessChain.
3694 //
3695
3696 // Ops[0] = Result Type ID
3697 // Ops[1] = Base ID
3698 // Ops[2] ... Ops[n] = Indexes ID
3699 SPIRVOperandList Ops;
3700
alan-bakerb6b09dc2018-11-08 16:59:28 -05003701 PointerType *ResultType = cast<PointerType>(GEP->getType());
David Neto22f144c2017-06-12 14:26:21 -04003702 if (GEP->getPointerAddressSpace() == AddressSpace::ModuleScopePrivate ||
3703 GlobalConstArgSet.count(GEP->getPointerOperand())) {
3704 // Use pointer type with private address space for global constant.
3705 Type *EleTy = I.getType()->getPointerElementType();
David Neto1a1a0582017-07-07 12:01:44 -04003706 ResultType = PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
David Neto22f144c2017-06-12 14:26:21 -04003707 }
David Neto257c3892018-04-11 13:19:45 -04003708
3709 Ops << MkId(lookupType(ResultType));
David Neto22f144c2017-06-12 14:26:21 -04003710
David Neto862b7d82018-06-14 18:48:37 -04003711 // Generate the base pointer.
3712 Ops << MkId(VMap[GEP->getPointerOperand()]);
David Neto22f144c2017-06-12 14:26:21 -04003713
David Neto862b7d82018-06-14 18:48:37 -04003714 // TODO(dneto): Simplify the following?
David Neto22f144c2017-06-12 14:26:21 -04003715
3716 //
3717 // Follows below rules for gep.
3718 //
David Neto862b7d82018-06-14 18:48:37 -04003719 // 1. If gep's first index is 0 generate OpAccessChain and ignore gep's
3720 // first index.
David Neto22f144c2017-06-12 14:26:21 -04003721 // 2. If gep's first index is not 0, generate OpPtrAccessChain and use gep's
3722 // first index.
3723 // 3. If gep's first index is not constant, generate OpPtrAccessChain and
3724 // use gep's first index.
3725 // 4. If it is not above case 1, 2 and 3, generate OpAccessChain and use
3726 // gep's first index.
3727 //
3728 spv::Op Opcode = spv::OpAccessChain;
3729 unsigned offset = 0;
3730 if (ConstantInt *CstInt = dyn_cast<ConstantInt>(GEP->getOperand(1))) {
David Neto862b7d82018-06-14 18:48:37 -04003731 if (CstInt->getZExtValue() == 0) {
David Neto22f144c2017-06-12 14:26:21 -04003732 offset = 1;
David Neto862b7d82018-06-14 18:48:37 -04003733 } else if (CstInt->getZExtValue() != 0) {
David Neto22f144c2017-06-12 14:26:21 -04003734 Opcode = spv::OpPtrAccessChain;
David Neto22f144c2017-06-12 14:26:21 -04003735 }
David Neto862b7d82018-06-14 18:48:37 -04003736 } else {
David Neto22f144c2017-06-12 14:26:21 -04003737 Opcode = spv::OpPtrAccessChain;
David Neto1a1a0582017-07-07 12:01:44 -04003738 }
3739
3740 if (Opcode == spv::OpPtrAccessChain) {
David Neto1a1a0582017-07-07 12:01:44 -04003741 // Do we need to generate ArrayStride? Check against the GEP result type
3742 // rather than the pointer type of the base because when indexing into
3743 // an OpenCL program-scope constant, we'll swap out the LLVM base pointer
3744 // for something else in the SPIR-V.
3745 // E.g. see test/PointerAccessChain/pointer_index_is_constant_1.cl
alan-baker5b86ed72019-02-15 08:26:50 -05003746 auto address_space = ResultType->getAddressSpace();
3747 setVariablePointersCapabilities(address_space);
3748 switch (GetStorageClass(address_space)) {
Alan Bakerfcda9482018-10-02 17:09:59 -04003749 case spv::StorageClassStorageBuffer:
3750 case spv::StorageClassUniform:
David Neto1a1a0582017-07-07 12:01:44 -04003751 // Save the need to generate an ArrayStride decoration. But defer
3752 // generation until later, so we only make one decoration.
David Neto85082642018-03-24 06:55:20 -07003753 getTypesNeedingArrayStride().insert(ResultType);
Alan Bakerfcda9482018-10-02 17:09:59 -04003754 break;
3755 default:
3756 break;
David Neto1a1a0582017-07-07 12:01:44 -04003757 }
David Neto22f144c2017-06-12 14:26:21 -04003758 }
3759
3760 for (auto II = GEP->idx_begin() + offset; II != GEP->idx_end(); II++) {
David Neto257c3892018-04-11 13:19:45 -04003761 Ops << MkId(VMap[*II]);
David Neto22f144c2017-06-12 14:26:21 -04003762 }
3763
David Neto87846742018-04-11 17:36:22 -04003764 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003765 SPIRVInstList.push_back(Inst);
3766 break;
3767 }
3768 case Instruction::ExtractValue: {
3769 ExtractValueInst *EVI = cast<ExtractValueInst>(&I);
3770 // Ops[0] = Result Type ID
3771 // Ops[1] = Composite ID
3772 // Ops[2] ... Ops[n] = Indexes (Literal Number)
3773 SPIRVOperandList Ops;
3774
David Neto257c3892018-04-11 13:19:45 -04003775 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04003776
3777 uint32_t CompositeID = VMap[EVI->getAggregateOperand()];
David Neto257c3892018-04-11 13:19:45 -04003778 Ops << MkId(CompositeID);
David Neto22f144c2017-06-12 14:26:21 -04003779
3780 for (auto &Index : EVI->indices()) {
David Neto257c3892018-04-11 13:19:45 -04003781 Ops << MkNum(Index);
David Neto22f144c2017-06-12 14:26:21 -04003782 }
3783
David Neto87846742018-04-11 17:36:22 -04003784 auto *Inst = new SPIRVInstruction(spv::OpCompositeExtract, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003785 SPIRVInstList.push_back(Inst);
3786 break;
3787 }
3788 case Instruction::InsertValue: {
3789 InsertValueInst *IVI = cast<InsertValueInst>(&I);
3790 // Ops[0] = Result Type ID
3791 // Ops[1] = Object ID
3792 // Ops[2] = Composite ID
3793 // Ops[3] ... Ops[n] = Indexes (Literal Number)
3794 SPIRVOperandList Ops;
3795
3796 uint32_t ResTyID = lookupType(I.getType());
David Neto257c3892018-04-11 13:19:45 -04003797 Ops << MkId(ResTyID);
David Neto22f144c2017-06-12 14:26:21 -04003798
3799 uint32_t ObjectID = VMap[IVI->getInsertedValueOperand()];
David Neto257c3892018-04-11 13:19:45 -04003800 Ops << MkId(ObjectID);
David Neto22f144c2017-06-12 14:26:21 -04003801
3802 uint32_t CompositeID = VMap[IVI->getAggregateOperand()];
David Neto257c3892018-04-11 13:19:45 -04003803 Ops << MkId(CompositeID);
David Neto22f144c2017-06-12 14:26:21 -04003804
3805 for (auto &Index : IVI->indices()) {
David Neto257c3892018-04-11 13:19:45 -04003806 Ops << MkNum(Index);
David Neto22f144c2017-06-12 14:26:21 -04003807 }
3808
David Neto87846742018-04-11 17:36:22 -04003809 auto *Inst = new SPIRVInstruction(spv::OpCompositeInsert, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003810 SPIRVInstList.push_back(Inst);
3811 break;
3812 }
3813 case Instruction::Select: {
3814 //
3815 // Generate OpSelect.
3816 //
3817
3818 // Ops[0] = Result Type ID
3819 // Ops[1] = Condition ID
3820 // Ops[2] = True Constant ID
3821 // Ops[3] = False Constant ID
3822 SPIRVOperandList Ops;
3823
3824 // Find SPIRV instruction for parameter type.
3825 auto Ty = I.getType();
3826 if (Ty->isPointerTy()) {
3827 auto PointeeTy = Ty->getPointerElementType();
3828 if (PointeeTy->isStructTy() &&
3829 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
3830 Ty = PointeeTy;
alan-baker5b86ed72019-02-15 08:26:50 -05003831 } else {
3832 // Selecting between pointers requires variable pointers.
3833 setVariablePointersCapabilities(Ty->getPointerAddressSpace());
3834 if (!hasVariablePointers() && !selectFromSameObject(&I)) {
3835 setVariablePointers(true);
3836 }
David Neto22f144c2017-06-12 14:26:21 -04003837 }
3838 }
3839
David Neto257c3892018-04-11 13:19:45 -04003840 Ops << MkId(lookupType(Ty)) << MkId(VMap[I.getOperand(0)])
3841 << MkId(VMap[I.getOperand(1)]) << MkId(VMap[I.getOperand(2)]);
David Neto22f144c2017-06-12 14:26:21 -04003842
David Neto87846742018-04-11 17:36:22 -04003843 auto *Inst = new SPIRVInstruction(spv::OpSelect, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003844 SPIRVInstList.push_back(Inst);
3845 break;
3846 }
3847 case Instruction::ExtractElement: {
3848 // Handle <4 x i8> type manually.
3849 Type *CompositeTy = I.getOperand(0)->getType();
3850 if (is4xi8vec(CompositeTy)) {
3851 //
3852 // Generate OpShiftRightLogical and OpBitwiseAnd for extractelement with
3853 // <4 x i8>.
3854 //
3855
3856 //
3857 // Generate OpShiftRightLogical
3858 //
3859 // Ops[0] = Result Type ID
3860 // Ops[1] = Operand 0
3861 // Ops[2] = Operand 1
3862 //
3863 SPIRVOperandList Ops;
3864
David Neto257c3892018-04-11 13:19:45 -04003865 Ops << MkId(lookupType(CompositeTy));
David Neto22f144c2017-06-12 14:26:21 -04003866
3867 uint32_t Op0ID = VMap[I.getOperand(0)];
David Neto257c3892018-04-11 13:19:45 -04003868 Ops << MkId(Op0ID);
David Neto22f144c2017-06-12 14:26:21 -04003869
3870 uint32_t Op1ID = 0;
3871 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
3872 // Handle constant index.
3873 uint64_t Idx = CI->getZExtValue();
3874 Value *ShiftAmount =
3875 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
3876 Op1ID = VMap[ShiftAmount];
3877 } else {
3878 // Handle variable index.
3879 SPIRVOperandList TmpOps;
3880
David Neto257c3892018-04-11 13:19:45 -04003881 TmpOps << MkId(lookupType(Type::getInt32Ty(Context)))
3882 << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04003883
3884 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
David Neto257c3892018-04-11 13:19:45 -04003885 TmpOps << MkId(VMap[Cst8]);
David Neto22f144c2017-06-12 14:26:21 -04003886
3887 Op1ID = nextID;
3888
David Neto87846742018-04-11 17:36:22 -04003889 auto *TmpInst = new SPIRVInstruction(spv::OpIMul, nextID++, TmpOps);
David Neto22f144c2017-06-12 14:26:21 -04003890 SPIRVInstList.push_back(TmpInst);
3891 }
David Neto257c3892018-04-11 13:19:45 -04003892 Ops << MkId(Op1ID);
David Neto22f144c2017-06-12 14:26:21 -04003893
3894 uint32_t ShiftID = nextID;
3895
David Neto87846742018-04-11 17:36:22 -04003896 auto *Inst =
3897 new SPIRVInstruction(spv::OpShiftRightLogical, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003898 SPIRVInstList.push_back(Inst);
3899
3900 //
3901 // Generate OpBitwiseAnd
3902 //
3903 // Ops[0] = Result Type ID
3904 // Ops[1] = Operand 0
3905 // Ops[2] = Operand 1
3906 //
3907 Ops.clear();
3908
David Neto257c3892018-04-11 13:19:45 -04003909 Ops << MkId(lookupType(CompositeTy)) << MkId(ShiftID);
David Neto22f144c2017-06-12 14:26:21 -04003910
3911 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
David Neto257c3892018-04-11 13:19:45 -04003912 Ops << MkId(VMap[CstFF]);
David Neto22f144c2017-06-12 14:26:21 -04003913
David Neto9b2d6252017-09-06 15:47:37 -04003914 // Reset mapping for this value to the result of the bitwise and.
3915 VMap[&I] = nextID;
3916
David Neto87846742018-04-11 17:36:22 -04003917 Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003918 SPIRVInstList.push_back(Inst);
3919 break;
3920 }
3921
3922 // Ops[0] = Result Type ID
3923 // Ops[1] = Composite ID
3924 // Ops[2] ... Ops[n] = Indexes (Literal Number)
3925 SPIRVOperandList Ops;
3926
David Neto257c3892018-04-11 13:19:45 -04003927 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04003928
3929 spv::Op Opcode = spv::OpCompositeExtract;
3930 if (const ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
David Neto257c3892018-04-11 13:19:45 -04003931 Ops << MkNum(static_cast<uint32_t>(CI->getZExtValue()));
David Neto22f144c2017-06-12 14:26:21 -04003932 } else {
David Neto257c3892018-04-11 13:19:45 -04003933 Ops << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04003934 Opcode = spv::OpVectorExtractDynamic;
3935 }
3936
David Neto87846742018-04-11 17:36:22 -04003937 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003938 SPIRVInstList.push_back(Inst);
3939 break;
3940 }
3941 case Instruction::InsertElement: {
3942 // Handle <4 x i8> type manually.
3943 Type *CompositeTy = I.getOperand(0)->getType();
3944 if (is4xi8vec(CompositeTy)) {
3945 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
3946 uint32_t CstFFID = VMap[CstFF];
3947
3948 uint32_t ShiftAmountID = 0;
3949 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
3950 // Handle constant index.
3951 uint64_t Idx = CI->getZExtValue();
3952 Value *ShiftAmount =
3953 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
3954 ShiftAmountID = VMap[ShiftAmount];
3955 } else {
3956 // Handle variable index.
3957 SPIRVOperandList TmpOps;
3958
David Neto257c3892018-04-11 13:19:45 -04003959 TmpOps << MkId(lookupType(Type::getInt32Ty(Context)))
3960 << MkId(VMap[I.getOperand(2)]);
David Neto22f144c2017-06-12 14:26:21 -04003961
3962 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
David Neto257c3892018-04-11 13:19:45 -04003963 TmpOps << MkId(VMap[Cst8]);
David Neto22f144c2017-06-12 14:26:21 -04003964
3965 ShiftAmountID = nextID;
3966
David Neto87846742018-04-11 17:36:22 -04003967 auto *TmpInst = new SPIRVInstruction(spv::OpIMul, nextID++, TmpOps);
David Neto22f144c2017-06-12 14:26:21 -04003968 SPIRVInstList.push_back(TmpInst);
3969 }
3970
3971 //
3972 // Generate mask operations.
3973 //
3974
3975 // ShiftLeft mask according to index of insertelement.
3976 SPIRVOperandList Ops;
3977
David Neto257c3892018-04-11 13:19:45 -04003978 const uint32_t ResTyID = lookupType(CompositeTy);
3979 Ops << MkId(ResTyID) << MkId(CstFFID) << MkId(ShiftAmountID);
David Neto22f144c2017-06-12 14:26:21 -04003980
3981 uint32_t MaskID = nextID;
3982
David Neto87846742018-04-11 17:36:22 -04003983 auto *Inst = new SPIRVInstruction(spv::OpShiftLeftLogical, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003984 SPIRVInstList.push_back(Inst);
3985
3986 // Inverse mask.
3987 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003988 Ops << MkId(ResTyID) << MkId(MaskID);
David Neto22f144c2017-06-12 14:26:21 -04003989
3990 uint32_t InvMaskID = nextID;
3991
David Neto87846742018-04-11 17:36:22 -04003992 Inst = new SPIRVInstruction(spv::OpNot, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003993 SPIRVInstList.push_back(Inst);
3994
3995 // Apply mask.
3996 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003997 Ops << MkId(ResTyID) << MkId(VMap[I.getOperand(0)]) << MkId(InvMaskID);
David Neto22f144c2017-06-12 14:26:21 -04003998
3999 uint32_t OrgValID = nextID;
4000
David Neto87846742018-04-11 17:36:22 -04004001 Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004002 SPIRVInstList.push_back(Inst);
4003
4004 // Create correct value according to index of insertelement.
4005 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05004006 Ops << MkId(ResTyID) << MkId(VMap[I.getOperand(1)])
4007 << MkId(ShiftAmountID);
David Neto22f144c2017-06-12 14:26:21 -04004008
4009 uint32_t InsertValID = nextID;
4010
David Neto87846742018-04-11 17:36:22 -04004011 Inst = new SPIRVInstruction(spv::OpShiftLeftLogical, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004012 SPIRVInstList.push_back(Inst);
4013
4014 // Insert value to original value.
4015 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004016 Ops << MkId(ResTyID) << MkId(OrgValID) << MkId(InsertValID);
David Neto22f144c2017-06-12 14:26:21 -04004017
David Netoa394f392017-08-26 20:45:29 -04004018 VMap[&I] = nextID;
4019
David Neto87846742018-04-11 17:36:22 -04004020 Inst = new SPIRVInstruction(spv::OpBitwiseOr, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004021 SPIRVInstList.push_back(Inst);
4022
4023 break;
4024 }
4025
David Neto22f144c2017-06-12 14:26:21 -04004026 SPIRVOperandList Ops;
4027
James Priced26efea2018-06-09 23:28:32 +01004028 // Ops[0] = Result Type ID
4029 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04004030
4031 spv::Op Opcode = spv::OpCompositeInsert;
4032 if (const ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
David Neto257c3892018-04-11 13:19:45 -04004033 const auto value = CI->getZExtValue();
4034 assert(value <= UINT32_MAX);
James Priced26efea2018-06-09 23:28:32 +01004035 // Ops[1] = Object ID
4036 // Ops[2] = Composite ID
4037 // Ops[3] ... Ops[n] = Indexes (Literal Number)
alan-bakerb6b09dc2018-11-08 16:59:28 -05004038 Ops << MkId(VMap[I.getOperand(1)]) << MkId(VMap[I.getOperand(0)])
James Priced26efea2018-06-09 23:28:32 +01004039 << MkNum(static_cast<uint32_t>(value));
David Neto22f144c2017-06-12 14:26:21 -04004040 } else {
James Priced26efea2018-06-09 23:28:32 +01004041 // Ops[1] = Composite ID
4042 // Ops[2] = Object ID
4043 // Ops[3] ... Ops[n] = Indexes (Literal Number)
alan-bakerb6b09dc2018-11-08 16:59:28 -05004044 Ops << MkId(VMap[I.getOperand(0)]) << MkId(VMap[I.getOperand(1)])
James Priced26efea2018-06-09 23:28:32 +01004045 << MkId(VMap[I.getOperand(2)]);
David Neto22f144c2017-06-12 14:26:21 -04004046 Opcode = spv::OpVectorInsertDynamic;
4047 }
4048
David Neto87846742018-04-11 17:36:22 -04004049 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004050 SPIRVInstList.push_back(Inst);
4051 break;
4052 }
4053 case Instruction::ShuffleVector: {
4054 // Ops[0] = Result Type ID
4055 // Ops[1] = Vector 1 ID
4056 // Ops[2] = Vector 2 ID
4057 // Ops[3] ... Ops[n] = Components (Literal Number)
4058 SPIRVOperandList Ops;
4059
David Neto257c3892018-04-11 13:19:45 -04004060 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)])
4061 << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004062
4063 uint64_t NumElements = 0;
4064 if (Constant *Cst = dyn_cast<Constant>(I.getOperand(2))) {
4065 NumElements = cast<VectorType>(Cst->getType())->getNumElements();
4066
4067 if (Cst->isNullValue()) {
4068 for (unsigned i = 0; i < NumElements; i++) {
David Neto257c3892018-04-11 13:19:45 -04004069 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04004070 }
4071 } else if (const ConstantDataSequential *CDS =
4072 dyn_cast<ConstantDataSequential>(Cst)) {
4073 for (unsigned i = 0; i < CDS->getNumElements(); i++) {
4074 std::vector<uint32_t> LiteralNum;
David Neto257c3892018-04-11 13:19:45 -04004075 const auto value = CDS->getElementAsInteger(i);
4076 assert(value <= UINT32_MAX);
4077 Ops << MkNum(static_cast<uint32_t>(value));
David Neto22f144c2017-06-12 14:26:21 -04004078 }
4079 } else if (const ConstantVector *CV = dyn_cast<ConstantVector>(Cst)) {
4080 for (unsigned i = 0; i < CV->getNumOperands(); i++) {
4081 auto Op = CV->getOperand(i);
4082
4083 uint32_t literal = 0;
4084
4085 if (auto CI = dyn_cast<ConstantInt>(Op)) {
4086 literal = static_cast<uint32_t>(CI->getZExtValue());
4087 } else if (auto UI = dyn_cast<UndefValue>(Op)) {
4088 literal = 0xFFFFFFFFu;
4089 } else {
4090 Op->print(errs());
4091 llvm_unreachable("Unsupported element in ConstantVector!");
4092 }
4093
David Neto257c3892018-04-11 13:19:45 -04004094 Ops << MkNum(literal);
David Neto22f144c2017-06-12 14:26:21 -04004095 }
4096 } else {
4097 Cst->print(errs());
4098 llvm_unreachable("Unsupported constant mask in ShuffleVector!");
4099 }
4100 }
4101
David Neto87846742018-04-11 17:36:22 -04004102 auto *Inst = new SPIRVInstruction(spv::OpVectorShuffle, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004103 SPIRVInstList.push_back(Inst);
4104 break;
4105 }
4106 case Instruction::ICmp:
4107 case Instruction::FCmp: {
4108 CmpInst *CmpI = cast<CmpInst>(&I);
4109
David Netod4ca2e62017-07-06 18:47:35 -04004110 // Pointer equality is invalid.
alan-bakerb6b09dc2018-11-08 16:59:28 -05004111 Type *ArgTy = CmpI->getOperand(0)->getType();
David Netod4ca2e62017-07-06 18:47:35 -04004112 if (isa<PointerType>(ArgTy)) {
4113 CmpI->print(errs());
4114 std::string name = I.getParent()->getParent()->getName();
4115 errs()
4116 << "\nPointer equality test is not supported by SPIR-V for Vulkan, "
4117 << "in function " << name << "\n";
4118 llvm_unreachable("Pointer equality check is invalid");
4119 break;
4120 }
4121
David Neto257c3892018-04-11 13:19:45 -04004122 // Ops[0] = Result Type ID
4123 // Ops[1] = Operand 1 ID
4124 // Ops[2] = Operand 2 ID
4125 SPIRVOperandList Ops;
David Neto22f144c2017-06-12 14:26:21 -04004126
David Neto257c3892018-04-11 13:19:45 -04004127 Ops << MkId(lookupType(CmpI->getType())) << MkId(VMap[CmpI->getOperand(0)])
4128 << MkId(VMap[CmpI->getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004129
4130 spv::Op Opcode = GetSPIRVCmpOpcode(CmpI);
David Neto87846742018-04-11 17:36:22 -04004131 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004132 SPIRVInstList.push_back(Inst);
4133 break;
4134 }
4135 case Instruction::Br: {
4136 // Branch instrucion is deferred because it needs label's ID. Record slot's
4137 // location on SPIRVInstructionList.
4138 DeferredInsts.push_back(
4139 std::make_tuple(&I, --SPIRVInstList.end(), 0 /* No id */));
4140 break;
4141 }
4142 case Instruction::Switch: {
4143 I.print(errs());
4144 llvm_unreachable("Unsupported instruction???");
4145 break;
4146 }
4147 case Instruction::IndirectBr: {
4148 I.print(errs());
4149 llvm_unreachable("Unsupported instruction???");
4150 break;
4151 }
4152 case Instruction::PHI: {
4153 // Branch instrucion is deferred because it needs label's ID. Record slot's
4154 // location on SPIRVInstructionList.
4155 DeferredInsts.push_back(
4156 std::make_tuple(&I, --SPIRVInstList.end(), nextID++));
4157 break;
4158 }
4159 case Instruction::Alloca: {
4160 //
4161 // Generate OpVariable.
4162 //
4163 // Ops[0] : Result Type ID
4164 // Ops[1] : Storage Class
4165 SPIRVOperandList Ops;
4166
David Neto257c3892018-04-11 13:19:45 -04004167 Ops << MkId(lookupType(I.getType())) << MkNum(spv::StorageClassFunction);
David Neto22f144c2017-06-12 14:26:21 -04004168
David Neto87846742018-04-11 17:36:22 -04004169 auto *Inst = new SPIRVInstruction(spv::OpVariable, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004170 SPIRVInstList.push_back(Inst);
4171 break;
4172 }
4173 case Instruction::Load: {
4174 LoadInst *LD = cast<LoadInst>(&I);
4175 //
4176 // Generate OpLoad.
4177 //
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04004178
alan-baker5b86ed72019-02-15 08:26:50 -05004179 if (LD->getType()->isPointerTy()) {
4180 // Loading a pointer requires variable pointers.
4181 setVariablePointersCapabilities(LD->getType()->getPointerAddressSpace());
4182 }
David Neto22f144c2017-06-12 14:26:21 -04004183
David Neto0a2f98d2017-09-15 19:38:40 -04004184 uint32_t ResTyID = lookupType(LD->getType());
David Netoa60b00b2017-09-15 16:34:09 -04004185 uint32_t PointerID = VMap[LD->getPointerOperand()];
4186
4187 // This is a hack to work around what looks like a driver bug.
4188 // When we're loading from the special variable holding the WorkgroupSize
David Neto0a2f98d2017-09-15 19:38:40 -04004189 // builtin value, use an OpBitWiseAnd of the value's ID rather than
4190 // generating a load.
David Neto66cfe642018-03-24 06:13:56 -07004191 // TODO(dneto): Remove this awful hack once drivers are fixed.
David Netoa60b00b2017-09-15 16:34:09 -04004192 if (PointerID == WorkgroupSizeVarID) {
David Neto0a2f98d2017-09-15 19:38:40 -04004193 // Generate a bitwise-and of the original value with itself.
4194 // We should have been able to get away with just an OpCopyObject,
4195 // but we need something more complex to get past certain driver bugs.
4196 // This is ridiculous, but necessary.
4197 // TODO(dneto): Revisit this once drivers fix their bugs.
4198
4199 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04004200 Ops << MkId(ResTyID) << MkId(WorkgroupSizeValueID)
4201 << MkId(WorkgroupSizeValueID);
David Neto0a2f98d2017-09-15 19:38:40 -04004202
David Neto87846742018-04-11 17:36:22 -04004203 auto *Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Neto0a2f98d2017-09-15 19:38:40 -04004204 SPIRVInstList.push_back(Inst);
David Netoa60b00b2017-09-15 16:34:09 -04004205 break;
4206 }
4207
4208 // This is the normal path. Generate a load.
4209
David Neto22f144c2017-06-12 14:26:21 -04004210 // Ops[0] = Result Type ID
4211 // Ops[1] = Pointer ID
4212 // Ops[2] ... Ops[n] = Optional Memory Access
4213 //
4214 // TODO: Do we need to implement Optional Memory Access???
David Neto0a2f98d2017-09-15 19:38:40 -04004215
David Neto22f144c2017-06-12 14:26:21 -04004216 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04004217 Ops << MkId(ResTyID) << MkId(PointerID);
David Neto22f144c2017-06-12 14:26:21 -04004218
David Neto87846742018-04-11 17:36:22 -04004219 auto *Inst = new SPIRVInstruction(spv::OpLoad, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004220 SPIRVInstList.push_back(Inst);
4221 break;
4222 }
4223 case Instruction::Store: {
4224 StoreInst *ST = cast<StoreInst>(&I);
4225 //
4226 // Generate OpStore.
4227 //
4228
alan-baker5b86ed72019-02-15 08:26:50 -05004229 if (ST->getValueOperand()->getType()->isPointerTy()) {
4230 // Storing a pointer requires variable pointers.
4231 setVariablePointersCapabilities(
4232 ST->getValueOperand()->getType()->getPointerAddressSpace());
4233 }
4234
David Neto22f144c2017-06-12 14:26:21 -04004235 // Ops[0] = Pointer ID
4236 // Ops[1] = Object ID
4237 // Ops[2] ... Ops[n] = Optional Memory Access (later???)
4238 //
4239 // TODO: Do we need to implement Optional Memory Access???
David Neto257c3892018-04-11 13:19:45 -04004240 SPIRVOperandList Ops;
4241 Ops << MkId(VMap[ST->getPointerOperand()])
4242 << MkId(VMap[ST->getValueOperand()]);
David Neto22f144c2017-06-12 14:26:21 -04004243
David Neto87846742018-04-11 17:36:22 -04004244 auto *Inst = new SPIRVInstruction(spv::OpStore, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004245 SPIRVInstList.push_back(Inst);
4246 break;
4247 }
4248 case Instruction::AtomicCmpXchg: {
4249 I.print(errs());
4250 llvm_unreachable("Unsupported instruction???");
4251 break;
4252 }
4253 case Instruction::AtomicRMW: {
Neil Henning39672102017-09-29 14:33:13 +01004254 AtomicRMWInst *AtomicRMW = dyn_cast<AtomicRMWInst>(&I);
4255
4256 spv::Op opcode;
4257
4258 switch (AtomicRMW->getOperation()) {
4259 default:
4260 I.print(errs());
4261 llvm_unreachable("Unsupported instruction???");
4262 case llvm::AtomicRMWInst::Add:
4263 opcode = spv::OpAtomicIAdd;
4264 break;
4265 case llvm::AtomicRMWInst::Sub:
4266 opcode = spv::OpAtomicISub;
4267 break;
4268 case llvm::AtomicRMWInst::Xchg:
4269 opcode = spv::OpAtomicExchange;
4270 break;
4271 case llvm::AtomicRMWInst::Min:
4272 opcode = spv::OpAtomicSMin;
4273 break;
4274 case llvm::AtomicRMWInst::Max:
4275 opcode = spv::OpAtomicSMax;
4276 break;
4277 case llvm::AtomicRMWInst::UMin:
4278 opcode = spv::OpAtomicUMin;
4279 break;
4280 case llvm::AtomicRMWInst::UMax:
4281 opcode = spv::OpAtomicUMax;
4282 break;
4283 case llvm::AtomicRMWInst::And:
4284 opcode = spv::OpAtomicAnd;
4285 break;
4286 case llvm::AtomicRMWInst::Or:
4287 opcode = spv::OpAtomicOr;
4288 break;
4289 case llvm::AtomicRMWInst::Xor:
4290 opcode = spv::OpAtomicXor;
4291 break;
4292 }
4293
4294 //
4295 // Generate OpAtomic*.
4296 //
4297 SPIRVOperandList Ops;
4298
David Neto257c3892018-04-11 13:19:45 -04004299 Ops << MkId(lookupType(I.getType()))
4300 << MkId(VMap[AtomicRMW->getPointerOperand()]);
Neil Henning39672102017-09-29 14:33:13 +01004301
4302 auto IntTy = Type::getInt32Ty(I.getContext());
Neil Henning39672102017-09-29 14:33:13 +01004303 const auto ConstantScopeDevice = ConstantInt::get(IntTy, spv::ScopeDevice);
David Neto257c3892018-04-11 13:19:45 -04004304 Ops << MkId(VMap[ConstantScopeDevice]);
Neil Henning39672102017-09-29 14:33:13 +01004305
4306 const auto ConstantMemorySemantics = ConstantInt::get(
4307 IntTy, spv::MemorySemanticsUniformMemoryMask |
4308 spv::MemorySemanticsSequentiallyConsistentMask);
David Neto257c3892018-04-11 13:19:45 -04004309 Ops << MkId(VMap[ConstantMemorySemantics]);
Neil Henning39672102017-09-29 14:33:13 +01004310
David Neto257c3892018-04-11 13:19:45 -04004311 Ops << MkId(VMap[AtomicRMW->getValOperand()]);
Neil Henning39672102017-09-29 14:33:13 +01004312
4313 VMap[&I] = nextID;
4314
David Neto87846742018-04-11 17:36:22 -04004315 auto *Inst = new SPIRVInstruction(opcode, nextID++, Ops);
Neil Henning39672102017-09-29 14:33:13 +01004316 SPIRVInstList.push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04004317 break;
4318 }
4319 case Instruction::Fence: {
4320 I.print(errs());
4321 llvm_unreachable("Unsupported instruction???");
4322 break;
4323 }
4324 case Instruction::Call: {
4325 CallInst *Call = dyn_cast<CallInst>(&I);
4326 Function *Callee = Call->getCalledFunction();
4327
Alan Baker202c8c72018-08-13 13:47:44 -04004328 if (Callee->getName().startswith(clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04004329 if (ResourceVarDeferredLoadCalls.count(Call) && Call->hasNUsesOrMore(1)) {
4330 // Generate an OpLoad
4331 SPIRVOperandList Ops;
4332 const auto load_id = nextID++;
David Neto22f144c2017-06-12 14:26:21 -04004333
David Neto862b7d82018-06-14 18:48:37 -04004334 Ops << MkId(lookupType(Call->getType()->getPointerElementType()))
4335 << MkId(ResourceVarDeferredLoadCalls[Call]);
4336
4337 auto *Inst = new SPIRVInstruction(spv::OpLoad, load_id, Ops);
4338 SPIRVInstList.push_back(Inst);
4339 VMap[Call] = load_id;
4340 break;
4341
4342 } else {
4343 // This maps to an OpVariable we've already generated.
4344 // No code is generated for the call.
4345 }
4346 break;
alan-bakerb6b09dc2018-11-08 16:59:28 -05004347 } else if (Callee->getName().startswith(
4348 clspv::WorkgroupAccessorFunction())) {
Alan Baker202c8c72018-08-13 13:47:44 -04004349 // Don't codegen an instruction here, but instead map this call directly
4350 // to the workgroup variable id.
alan-bakerb6b09dc2018-11-08 16:59:28 -05004351 int spec_id = static_cast<int>(
4352 cast<ConstantInt>(Call->getOperand(0))->getSExtValue());
Alan Baker202c8c72018-08-13 13:47:44 -04004353 const auto &info = LocalSpecIdInfoMap[spec_id];
4354 VMap[Call] = info.variable_id;
4355 break;
David Neto862b7d82018-06-14 18:48:37 -04004356 }
4357
4358 // Sampler initializers become a load of the corresponding sampler.
4359
Kévin Petitdf71de32019-04-09 14:09:50 +01004360 if (Callee->getName().equals(clspv::LiteralSamplerFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04004361 // Map this to a load from the variable.
4362 const auto index_into_sampler_map =
4363 dyn_cast<ConstantInt>(Call->getArgOperand(2))->getZExtValue();
4364
4365 // Generate an OpLoad
David Neto22f144c2017-06-12 14:26:21 -04004366 SPIRVOperandList Ops;
David Neto862b7d82018-06-14 18:48:37 -04004367 const auto load_id = nextID++;
David Neto22f144c2017-06-12 14:26:21 -04004368
David Neto257c3892018-04-11 13:19:45 -04004369 Ops << MkId(lookupType(SamplerTy->getPointerElementType()))
alan-bakerb6b09dc2018-11-08 16:59:28 -05004370 << MkId(SamplerMapIndexToIDMap[static_cast<unsigned>(
4371 index_into_sampler_map)]);
David Neto22f144c2017-06-12 14:26:21 -04004372
David Neto862b7d82018-06-14 18:48:37 -04004373 auto *Inst = new SPIRVInstruction(spv::OpLoad, load_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004374 SPIRVInstList.push_back(Inst);
David Neto862b7d82018-06-14 18:48:37 -04004375 VMap[Call] = load_id;
David Neto22f144c2017-06-12 14:26:21 -04004376 break;
4377 }
4378
Kévin Petit349c9502019-03-28 17:24:14 +00004379 // Handle SPIR-V intrinsics
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04004380 spv::Op opcode =
4381 StringSwitch<spv::Op>(Callee->getName())
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04004382 .Case("spirv.atomic_inc", spv::OpAtomicIIncrement)
4383 .Case("spirv.atomic_dec", spv::OpAtomicIDecrement)
4384 .Case("spirv.atomic_compare_exchange", spv::OpAtomicCompareExchange)
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04004385 .Case("spirv.atomic_xor", spv::OpAtomicXor)
4386 .Case("__spirv_control_barrier", spv::OpControlBarrier)
4387 .Case("__spirv_memory_barrier", spv::OpMemoryBarrier)
4388 .StartsWith("spirv.store_null", spv::OpStore)
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04004389 .Default(spv::OpNop);
David Neto22f144c2017-06-12 14:26:21 -04004390
Kévin Petit617a76d2019-04-04 13:54:16 +01004391 // If the switch above didn't have an entry maybe the intrinsic
4392 // is using the name mangling logic.
4393 bool usesMangler = false;
4394 if (opcode == spv::OpNop) {
4395 if (Callee->getName().startswith(clspv::SPIRVOpIntrinsicFunction())) {
4396 auto OpCst = cast<ConstantInt>(Call->getOperand(0));
4397 opcode = static_cast<spv::Op>(OpCst->getZExtValue());
4398 usesMangler = true;
4399 }
4400 }
4401
Kévin Petit349c9502019-03-28 17:24:14 +00004402 if (opcode != spv::OpNop) {
4403
David Neto22f144c2017-06-12 14:26:21 -04004404 SPIRVOperandList Ops;
4405
Kévin Petit349c9502019-03-28 17:24:14 +00004406 if (!I.getType()->isVoidTy()) {
4407 Ops << MkId(lookupType(I.getType()));
4408 }
David Neto22f144c2017-06-12 14:26:21 -04004409
Kévin Petit617a76d2019-04-04 13:54:16 +01004410 unsigned firstOperand = usesMangler ? 1 : 0;
4411 for (unsigned i = firstOperand; i < Call->getNumArgOperands(); i++) {
David Neto257c3892018-04-11 13:19:45 -04004412 Ops << MkId(VMap[Call->getArgOperand(i)]);
David Neto22f144c2017-06-12 14:26:21 -04004413 }
4414
Kévin Petit349c9502019-03-28 17:24:14 +00004415 if (!I.getType()->isVoidTy()) {
4416 VMap[&I] = nextID;
Kévin Petit8a560882019-03-21 15:24:34 +00004417 }
4418
Kévin Petit349c9502019-03-28 17:24:14 +00004419 SPIRVInstruction *Inst;
4420 if (!I.getType()->isVoidTy()) {
4421 Inst = new SPIRVInstruction(opcode, nextID++, Ops);
4422 } else {
4423 Inst = new SPIRVInstruction(opcode, Ops);
4424 }
Kévin Petit8a560882019-03-21 15:24:34 +00004425 SPIRVInstList.push_back(Inst);
4426 break;
4427 }
4428
David Neto22f144c2017-06-12 14:26:21 -04004429 if (Callee->getName().startswith("_Z3dot")) {
4430 // If the argument is a vector type, generate OpDot
4431 if (Call->getArgOperand(0)->getType()->isVectorTy()) {
4432 //
4433 // Generate OpDot.
4434 //
4435 SPIRVOperandList Ops;
4436
David Neto257c3892018-04-11 13:19:45 -04004437 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04004438
4439 for (unsigned i = 0; i < Call->getNumArgOperands(); i++) {
David Neto257c3892018-04-11 13:19:45 -04004440 Ops << MkId(VMap[Call->getArgOperand(i)]);
David Neto22f144c2017-06-12 14:26:21 -04004441 }
4442
4443 VMap[&I] = nextID;
4444
David Neto87846742018-04-11 17:36:22 -04004445 auto *Inst = new SPIRVInstruction(spv::OpDot, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004446 SPIRVInstList.push_back(Inst);
4447 } else {
4448 //
4449 // Generate OpFMul.
4450 //
4451 SPIRVOperandList Ops;
4452
David Neto257c3892018-04-11 13:19:45 -04004453 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04004454
4455 for (unsigned i = 0; i < Call->getNumArgOperands(); i++) {
David Neto257c3892018-04-11 13:19:45 -04004456 Ops << MkId(VMap[Call->getArgOperand(i)]);
David Neto22f144c2017-06-12 14:26:21 -04004457 }
4458
4459 VMap[&I] = nextID;
4460
David Neto87846742018-04-11 17:36:22 -04004461 auto *Inst = new SPIRVInstruction(spv::OpFMul, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004462 SPIRVInstList.push_back(Inst);
4463 }
4464 break;
4465 }
4466
David Neto8505ebf2017-10-13 18:50:50 -04004467 if (Callee->getName().startswith("_Z4fmod")) {
4468 // OpenCL fmod(x,y) is x - y * trunc(x/y)
4469 // The sign for a non-zero result is taken from x.
4470 // (Try an example.)
4471 // So translate to OpFRem
4472
4473 SPIRVOperandList Ops;
4474
David Neto257c3892018-04-11 13:19:45 -04004475 Ops << MkId(lookupType(I.getType()));
David Neto8505ebf2017-10-13 18:50:50 -04004476
4477 for (unsigned i = 0; i < Call->getNumArgOperands(); i++) {
David Neto257c3892018-04-11 13:19:45 -04004478 Ops << MkId(VMap[Call->getArgOperand(i)]);
David Neto8505ebf2017-10-13 18:50:50 -04004479 }
4480
4481 VMap[&I] = nextID;
4482
David Neto87846742018-04-11 17:36:22 -04004483 auto *Inst = new SPIRVInstruction(spv::OpFRem, nextID++, Ops);
David Neto8505ebf2017-10-13 18:50:50 -04004484 SPIRVInstList.push_back(Inst);
4485 break;
4486 }
4487
David Neto22f144c2017-06-12 14:26:21 -04004488 // spirv.copy_memory.* intrinsics become OpMemoryMemory's.
4489 if (Callee->getName().startswith("spirv.copy_memory")) {
4490 //
4491 // Generate OpCopyMemory.
4492 //
4493
4494 // Ops[0] = Dst ID
4495 // Ops[1] = Src ID
4496 // Ops[2] = Memory Access
4497 // Ops[3] = Alignment
4498
4499 auto IsVolatile =
4500 dyn_cast<ConstantInt>(Call->getArgOperand(3))->getZExtValue() != 0;
4501
4502 auto VolatileMemoryAccess = (IsVolatile) ? spv::MemoryAccessVolatileMask
4503 : spv::MemoryAccessMaskNone;
4504
4505 auto MemoryAccess = VolatileMemoryAccess | spv::MemoryAccessAlignedMask;
4506
4507 auto Alignment =
4508 dyn_cast<ConstantInt>(Call->getArgOperand(2))->getZExtValue();
4509
David Neto257c3892018-04-11 13:19:45 -04004510 SPIRVOperandList Ops;
4511 Ops << MkId(VMap[Call->getArgOperand(0)])
4512 << MkId(VMap[Call->getArgOperand(1)]) << MkNum(MemoryAccess)
4513 << MkNum(static_cast<uint32_t>(Alignment));
David Neto22f144c2017-06-12 14:26:21 -04004514
David Neto87846742018-04-11 17:36:22 -04004515 auto *Inst = new SPIRVInstruction(spv::OpCopyMemory, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004516
4517 SPIRVInstList.push_back(Inst);
4518
4519 break;
4520 }
4521
David Neto22f144c2017-06-12 14:26:21 -04004522 // read_image is converted to OpSampledImage and OpImageSampleExplicitLod.
4523 // Additionally, OpTypeSampledImage is generated.
4524 if (Callee->getName().equals(
4525 "_Z11read_imagef14ocl_image2d_ro11ocl_samplerDv2_f") ||
4526 Callee->getName().equals(
4527 "_Z11read_imagef14ocl_image3d_ro11ocl_samplerDv4_f")) {
4528 //
4529 // Generate OpSampledImage.
4530 //
4531 // Ops[0] = Result Type ID
4532 // Ops[1] = Image ID
4533 // Ops[2] = Sampler ID
4534 //
4535 SPIRVOperandList Ops;
4536
4537 Value *Image = Call->getArgOperand(0);
4538 Value *Sampler = Call->getArgOperand(1);
4539 Value *Coordinate = Call->getArgOperand(2);
4540
4541 TypeMapType &OpImageTypeMap = getImageTypeMap();
4542 Type *ImageTy = Image->getType()->getPointerElementType();
4543 uint32_t ImageTyID = OpImageTypeMap[ImageTy];
David Neto22f144c2017-06-12 14:26:21 -04004544 uint32_t ImageID = VMap[Image];
David Neto22f144c2017-06-12 14:26:21 -04004545 uint32_t SamplerID = VMap[Sampler];
David Neto257c3892018-04-11 13:19:45 -04004546
4547 Ops << MkId(ImageTyID) << MkId(ImageID) << MkId(SamplerID);
David Neto22f144c2017-06-12 14:26:21 -04004548
4549 uint32_t SampledImageID = nextID;
4550
David Neto87846742018-04-11 17:36:22 -04004551 auto *Inst = new SPIRVInstruction(spv::OpSampledImage, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004552 SPIRVInstList.push_back(Inst);
4553
4554 //
4555 // Generate OpImageSampleExplicitLod.
4556 //
4557 // Ops[0] = Result Type ID
4558 // Ops[1] = Sampled Image ID
4559 // Ops[2] = Coordinate ID
4560 // Ops[3] = Image Operands Type ID
4561 // Ops[4] ... Ops[n] = Operands ID
4562 //
4563 Ops.clear();
4564
David Neto257c3892018-04-11 13:19:45 -04004565 Ops << MkId(lookupType(Call->getType())) << MkId(SampledImageID)
4566 << MkId(VMap[Coordinate]) << MkNum(spv::ImageOperandsLodMask);
David Neto22f144c2017-06-12 14:26:21 -04004567
4568 Constant *CstFP0 = ConstantFP::get(Context, APFloat(0.0f));
David Neto257c3892018-04-11 13:19:45 -04004569 Ops << MkId(VMap[CstFP0]);
David Neto22f144c2017-06-12 14:26:21 -04004570
4571 VMap[&I] = nextID;
4572
David Neto87846742018-04-11 17:36:22 -04004573 Inst = new SPIRVInstruction(spv::OpImageSampleExplicitLod, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004574 SPIRVInstList.push_back(Inst);
4575 break;
4576 }
4577
4578 // write_imagef is mapped to OpImageWrite.
4579 if (Callee->getName().equals(
4580 "_Z12write_imagef14ocl_image2d_woDv2_iDv4_f") ||
4581 Callee->getName().equals(
4582 "_Z12write_imagef14ocl_image3d_woDv4_iDv4_f")) {
4583 //
4584 // Generate OpImageWrite.
4585 //
4586 // Ops[0] = Image ID
4587 // Ops[1] = Coordinate ID
4588 // Ops[2] = Texel ID
4589 // Ops[3] = (Optional) Image Operands Type (Literal Number)
4590 // Ops[4] ... Ops[n] = (Optional) Operands ID
4591 //
4592 SPIRVOperandList Ops;
4593
4594 Value *Image = Call->getArgOperand(0);
4595 Value *Coordinate = Call->getArgOperand(1);
4596 Value *Texel = Call->getArgOperand(2);
4597
4598 uint32_t ImageID = VMap[Image];
David Neto22f144c2017-06-12 14:26:21 -04004599 uint32_t CoordinateID = VMap[Coordinate];
David Neto22f144c2017-06-12 14:26:21 -04004600 uint32_t TexelID = VMap[Texel];
David Neto257c3892018-04-11 13:19:45 -04004601 Ops << MkId(ImageID) << MkId(CoordinateID) << MkId(TexelID);
David Neto22f144c2017-06-12 14:26:21 -04004602
David Neto87846742018-04-11 17:36:22 -04004603 auto *Inst = new SPIRVInstruction(spv::OpImageWrite, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004604 SPIRVInstList.push_back(Inst);
4605 break;
4606 }
4607
David Neto5c22a252018-03-15 16:07:41 -04004608 // get_image_width is mapped to OpImageQuerySize
4609 if (Callee->getName().equals("_Z15get_image_width14ocl_image2d_ro") ||
4610 Callee->getName().equals("_Z15get_image_width14ocl_image2d_wo") ||
4611 Callee->getName().equals("_Z16get_image_height14ocl_image2d_ro") ||
4612 Callee->getName().equals("_Z16get_image_height14ocl_image2d_wo")) {
4613 //
4614 // Generate OpImageQuerySize, then pull out the right component.
4615 // Assume 2D image for now.
4616 //
4617 // Ops[0] = Image ID
4618 //
4619 // %sizes = OpImageQuerySizes %uint2 %im
4620 // %result = OpCompositeExtract %uint %sizes 0-or-1
4621 SPIRVOperandList Ops;
4622
4623 // Implement:
4624 // %sizes = OpImageQuerySizes %uint2 %im
4625 uint32_t SizesTypeID =
4626 TypeMap[VectorType::get(Type::getInt32Ty(Context), 2)];
David Neto5c22a252018-03-15 16:07:41 -04004627 Value *Image = Call->getArgOperand(0);
4628 uint32_t ImageID = VMap[Image];
David Neto257c3892018-04-11 13:19:45 -04004629 Ops << MkId(SizesTypeID) << MkId(ImageID);
David Neto5c22a252018-03-15 16:07:41 -04004630
4631 uint32_t SizesID = nextID++;
David Neto87846742018-04-11 17:36:22 -04004632 auto *QueryInst =
4633 new SPIRVInstruction(spv::OpImageQuerySize, SizesID, Ops);
David Neto5c22a252018-03-15 16:07:41 -04004634 SPIRVInstList.push_back(QueryInst);
4635
4636 // Reset value map entry since we generated an intermediate instruction.
4637 VMap[&I] = nextID;
4638
4639 // Implement:
4640 // %result = OpCompositeExtract %uint %sizes 0-or-1
4641 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004642 Ops << MkId(TypeMap[I.getType()]) << MkId(SizesID);
David Neto5c22a252018-03-15 16:07:41 -04004643
4644 uint32_t component = Callee->getName().contains("height") ? 1 : 0;
David Neto257c3892018-04-11 13:19:45 -04004645 Ops << MkNum(component);
David Neto5c22a252018-03-15 16:07:41 -04004646
David Neto87846742018-04-11 17:36:22 -04004647 auto *Inst = new SPIRVInstruction(spv::OpCompositeExtract, nextID++, Ops);
David Neto5c22a252018-03-15 16:07:41 -04004648 SPIRVInstList.push_back(Inst);
4649 break;
4650 }
4651
David Neto22f144c2017-06-12 14:26:21 -04004652 // Call instrucion is deferred because it needs function's ID. Record
4653 // slot's location on SPIRVInstructionList.
4654 DeferredInsts.push_back(
4655 std::make_tuple(&I, --SPIRVInstList.end(), nextID++));
4656
David Neto3fbb4072017-10-16 11:28:14 -04004657 // Check whether the implementation of this call uses an extended
4658 // instruction plus one more value-producing instruction. If so, then
4659 // reserve the id for the extra value-producing slot.
4660 glsl::ExtInst EInst = getIndirectExtInstEnum(Callee->getName());
4661 if (EInst != kGlslExtInstBad) {
4662 // Reserve a spot for the extra value.
David Neto4d02a532017-09-17 12:57:44 -04004663 // Increase nextID.
David Neto22f144c2017-06-12 14:26:21 -04004664 VMap[&I] = nextID;
4665 nextID++;
4666 }
4667 break;
4668 }
4669 case Instruction::Ret: {
4670 unsigned NumOps = I.getNumOperands();
4671 if (NumOps == 0) {
4672 //
4673 // Generate OpReturn.
4674 //
David Neto87846742018-04-11 17:36:22 -04004675 SPIRVInstList.push_back(new SPIRVInstruction(spv::OpReturn, {}));
David Neto22f144c2017-06-12 14:26:21 -04004676 } else {
4677 //
4678 // Generate OpReturnValue.
4679 //
4680
4681 // Ops[0] = Return Value ID
4682 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04004683
4684 Ops << MkId(VMap[I.getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04004685
David Neto87846742018-04-11 17:36:22 -04004686 auto *Inst = new SPIRVInstruction(spv::OpReturnValue, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004687 SPIRVInstList.push_back(Inst);
4688 break;
4689 }
4690 break;
4691 }
4692 }
4693}
4694
4695void SPIRVProducerPass::GenerateFuncEpilogue() {
4696 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
4697
4698 //
4699 // Generate OpFunctionEnd
4700 //
4701
David Neto87846742018-04-11 17:36:22 -04004702 auto *Inst = new SPIRVInstruction(spv::OpFunctionEnd, {});
David Neto22f144c2017-06-12 14:26:21 -04004703 SPIRVInstList.push_back(Inst);
4704}
4705
4706bool SPIRVProducerPass::is4xi8vec(Type *Ty) const {
alan-bakerb39c8262019-03-08 14:03:37 -05004707 // Don't specialize <4 x i8> if i8 is generally supported.
4708 if (clspv::Option::Int8Support())
4709 return false;
4710
David Neto22f144c2017-06-12 14:26:21 -04004711 LLVMContext &Context = Ty->getContext();
4712 if (Ty->isVectorTy()) {
4713 if (Ty->getVectorElementType() == Type::getInt8Ty(Context) &&
4714 Ty->getVectorNumElements() == 4) {
4715 return true;
4716 }
4717 }
4718
4719 return false;
4720}
4721
David Neto257c3892018-04-11 13:19:45 -04004722uint32_t SPIRVProducerPass::GetI32Zero() {
4723 if (0 == constant_i32_zero_id_) {
4724 llvm_unreachable("Requesting a 32-bit integer constant but it is not "
4725 "defined in the SPIR-V module");
4726 }
4727 return constant_i32_zero_id_;
4728}
4729
David Neto22f144c2017-06-12 14:26:21 -04004730void SPIRVProducerPass::HandleDeferredInstruction() {
4731 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
4732 ValueMapType &VMap = getValueMap();
4733 DeferredInstVecType &DeferredInsts = getDeferredInstVec();
4734
4735 for (auto DeferredInst = DeferredInsts.rbegin();
4736 DeferredInst != DeferredInsts.rend(); ++DeferredInst) {
4737 Value *Inst = std::get<0>(*DeferredInst);
4738 SPIRVInstructionList::iterator InsertPoint = ++std::get<1>(*DeferredInst);
4739 if (InsertPoint != SPIRVInstList.end()) {
4740 while ((*InsertPoint)->getOpcode() == spv::OpPhi) {
4741 ++InsertPoint;
4742 }
4743 }
4744
4745 if (BranchInst *Br = dyn_cast<BranchInst>(Inst)) {
4746 // Check whether basic block, which has this branch instruction, is loop
4747 // header or not. If it is loop header, generate OpLoopMerge and
4748 // OpBranchConditional.
4749 Function *Func = Br->getParent()->getParent();
4750 DominatorTree &DT =
4751 getAnalysis<DominatorTreeWrapperPass>(*Func).getDomTree();
4752 const LoopInfo &LI =
4753 getAnalysis<LoopInfoWrapperPass>(*Func).getLoopInfo();
4754
4755 BasicBlock *BrBB = Br->getParent();
alan-baker49531082019-06-05 17:30:56 -04004756 Loop *L = LI.getLoopFor(BrBB);
David Neto22f144c2017-06-12 14:26:21 -04004757 if (LI.isLoopHeader(BrBB)) {
4758 Value *ContinueBB = nullptr;
4759 Value *MergeBB = nullptr;
4760
David Neto22f144c2017-06-12 14:26:21 -04004761 MergeBB = L->getExitBlock();
4762 if (!MergeBB) {
4763 // StructurizeCFG pass converts CFG into triangle shape and the cfg
4764 // has regions with single entry/exit. As a result, loop should not
4765 // have multiple exits.
4766 llvm_unreachable("Loop has multiple exits???");
4767 }
4768
4769 if (L->isLoopLatch(BrBB)) {
4770 ContinueBB = BrBB;
4771 } else {
4772 // From SPIR-V spec 2.11, Continue Target must dominate that back-edge
4773 // block.
4774 BasicBlock *Header = L->getHeader();
4775 BasicBlock *Latch = L->getLoopLatch();
4776 for (BasicBlock *BB : L->blocks()) {
4777 if (BB == Header) {
4778 continue;
4779 }
4780
4781 // Check whether block dominates block with back-edge.
4782 if (DT.dominates(BB, Latch)) {
4783 ContinueBB = BB;
4784 }
4785 }
4786
4787 if (!ContinueBB) {
4788 llvm_unreachable("Wrong continue block from loop");
4789 }
4790 }
4791
4792 //
4793 // Generate OpLoopMerge.
4794 //
4795 // Ops[0] = Merge Block ID
4796 // Ops[1] = Continue Target ID
4797 // Ops[2] = Selection Control
4798 SPIRVOperandList Ops;
4799
4800 // StructurizeCFG pass already manipulated CFG. Just use false block of
4801 // branch instruction as merge block.
4802 uint32_t MergeBBID = VMap[MergeBB];
David Neto22f144c2017-06-12 14:26:21 -04004803 uint32_t ContinueBBID = VMap[ContinueBB];
David Neto257c3892018-04-11 13:19:45 -04004804 Ops << MkId(MergeBBID) << MkId(ContinueBBID)
4805 << MkNum(spv::SelectionControlMaskNone);
David Neto22f144c2017-06-12 14:26:21 -04004806
David Neto87846742018-04-11 17:36:22 -04004807 auto *MergeInst = new SPIRVInstruction(spv::OpLoopMerge, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004808 SPIRVInstList.insert(InsertPoint, MergeInst);
4809
4810 } else if (Br->isConditional()) {
alan-baker49531082019-06-05 17:30:56 -04004811 // Generate a selection merge unless this is a back-edge block.
4812 bool HasBackedge = false;
4813 while (L && !HasBackedge) {
4814 if (L->isLoopLatch(BrBB)) {
4815 HasBackedge = true;
David Neto22f144c2017-06-12 14:26:21 -04004816 }
alan-baker49531082019-06-05 17:30:56 -04004817 L = L->getParentLoop();
David Neto22f144c2017-06-12 14:26:21 -04004818 }
alan-baker49531082019-06-05 17:30:56 -04004819 if (!HasBackedge) {
David Neto22f144c2017-06-12 14:26:21 -04004820 //
4821 // Generate OpSelectionMerge.
4822 //
4823 // Ops[0] = Merge Block ID
4824 // Ops[1] = Selection Control
4825 SPIRVOperandList Ops;
4826
4827 // StructurizeCFG pass already manipulated CFG. Just use false block
4828 // of branch instruction as merge block.
4829 uint32_t MergeBBID = VMap[Br->getSuccessor(1)];
David Neto257c3892018-04-11 13:19:45 -04004830 Ops << MkId(MergeBBID) << MkNum(spv::SelectionControlMaskNone);
David Neto22f144c2017-06-12 14:26:21 -04004831
David Neto87846742018-04-11 17:36:22 -04004832 auto *MergeInst = new SPIRVInstruction(spv::OpSelectionMerge, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004833 SPIRVInstList.insert(InsertPoint, MergeInst);
4834 }
4835 }
4836
4837 if (Br->isConditional()) {
4838 //
4839 // Generate OpBranchConditional.
4840 //
4841 // Ops[0] = Condition ID
4842 // Ops[1] = True Label ID
4843 // Ops[2] = False Label ID
4844 // Ops[3] ... Ops[n] = Branch weights (Literal Number)
4845 SPIRVOperandList Ops;
4846
4847 uint32_t CondID = VMap[Br->getCondition()];
David Neto22f144c2017-06-12 14:26:21 -04004848 uint32_t TrueBBID = VMap[Br->getSuccessor(0)];
David Neto22f144c2017-06-12 14:26:21 -04004849 uint32_t FalseBBID = VMap[Br->getSuccessor(1)];
David Neto257c3892018-04-11 13:19:45 -04004850
4851 Ops << MkId(CondID) << MkId(TrueBBID) << MkId(FalseBBID);
David Neto22f144c2017-06-12 14:26:21 -04004852
David Neto87846742018-04-11 17:36:22 -04004853 auto *BrInst = new SPIRVInstruction(spv::OpBranchConditional, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004854 SPIRVInstList.insert(InsertPoint, BrInst);
4855 } else {
4856 //
4857 // Generate OpBranch.
4858 //
4859 // Ops[0] = Target Label ID
4860 SPIRVOperandList Ops;
4861
4862 uint32_t TargetID = VMap[Br->getSuccessor(0)];
David Neto257c3892018-04-11 13:19:45 -04004863 Ops << MkId(TargetID);
David Neto22f144c2017-06-12 14:26:21 -04004864
David Neto87846742018-04-11 17:36:22 -04004865 SPIRVInstList.insert(InsertPoint,
4866 new SPIRVInstruction(spv::OpBranch, Ops));
David Neto22f144c2017-06-12 14:26:21 -04004867 }
4868 } else if (PHINode *PHI = dyn_cast<PHINode>(Inst)) {
alan-baker5b86ed72019-02-15 08:26:50 -05004869 if (PHI->getType()->isPointerTy()) {
4870 // OpPhi on pointers requires variable pointers.
4871 setVariablePointersCapabilities(
4872 PHI->getType()->getPointerAddressSpace());
4873 if (!hasVariablePointers() && !selectFromSameObject(PHI)) {
4874 setVariablePointers(true);
4875 }
4876 }
4877
David Neto22f144c2017-06-12 14:26:21 -04004878 //
4879 // Generate OpPhi.
4880 //
4881 // Ops[0] = Result Type ID
4882 // Ops[1] ... Ops[n] = (Variable ID, Parent ID) pairs
4883 SPIRVOperandList Ops;
4884
David Neto257c3892018-04-11 13:19:45 -04004885 Ops << MkId(lookupType(PHI->getType()));
David Neto22f144c2017-06-12 14:26:21 -04004886
David Neto22f144c2017-06-12 14:26:21 -04004887 for (unsigned i = 0; i < PHI->getNumIncomingValues(); i++) {
4888 uint32_t VarID = VMap[PHI->getIncomingValue(i)];
David Neto22f144c2017-06-12 14:26:21 -04004889 uint32_t ParentID = VMap[PHI->getIncomingBlock(i)];
David Neto257c3892018-04-11 13:19:45 -04004890 Ops << MkId(VarID) << MkId(ParentID);
David Neto22f144c2017-06-12 14:26:21 -04004891 }
4892
4893 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04004894 InsertPoint,
4895 new SPIRVInstruction(spv::OpPhi, std::get<2>(*DeferredInst), Ops));
David Neto22f144c2017-06-12 14:26:21 -04004896 } else if (CallInst *Call = dyn_cast<CallInst>(Inst)) {
4897 Function *Callee = Call->getCalledFunction();
David Neto3fbb4072017-10-16 11:28:14 -04004898 auto callee_name = Callee->getName();
4899 glsl::ExtInst EInst = getDirectOrIndirectExtInstEnum(callee_name);
David Neto22f144c2017-06-12 14:26:21 -04004900
4901 if (EInst) {
4902 uint32_t &ExtInstImportID = getOpExtInstImportID();
4903
4904 //
4905 // Generate OpExtInst.
4906 //
4907
4908 // Ops[0] = Result Type ID
4909 // Ops[1] = Set ID (OpExtInstImport ID)
4910 // Ops[2] = Instruction Number (Literal Number)
4911 // Ops[3] ... Ops[n] = Operand 1, ... , Operand n
4912 SPIRVOperandList Ops;
4913
David Neto862b7d82018-06-14 18:48:37 -04004914 Ops << MkId(lookupType(Call->getType())) << MkId(ExtInstImportID)
4915 << MkNum(EInst);
David Neto22f144c2017-06-12 14:26:21 -04004916
David Neto22f144c2017-06-12 14:26:21 -04004917 FunctionType *CalleeFTy = cast<FunctionType>(Call->getFunctionType());
4918 for (unsigned i = 0; i < CalleeFTy->getNumParams(); i++) {
David Neto257c3892018-04-11 13:19:45 -04004919 Ops << MkId(VMap[Call->getOperand(i)]);
David Neto22f144c2017-06-12 14:26:21 -04004920 }
4921
David Neto87846742018-04-11 17:36:22 -04004922 auto *ExtInst = new SPIRVInstruction(spv::OpExtInst,
4923 std::get<2>(*DeferredInst), Ops);
David Neto22f144c2017-06-12 14:26:21 -04004924 SPIRVInstList.insert(InsertPoint, ExtInst);
4925
David Neto3fbb4072017-10-16 11:28:14 -04004926 const auto IndirectExtInst = getIndirectExtInstEnum(callee_name);
4927 if (IndirectExtInst != kGlslExtInstBad) {
4928 // Generate one more instruction that uses the result of the extended
4929 // instruction. Its result id is one more than the id of the
4930 // extended instruction.
David Neto22f144c2017-06-12 14:26:21 -04004931 LLVMContext &Context =
4932 Call->getParent()->getParent()->getParent()->getContext();
David Neto22f144c2017-06-12 14:26:21 -04004933
David Neto3fbb4072017-10-16 11:28:14 -04004934 auto generate_extra_inst = [this, &Context, &Call, &DeferredInst,
4935 &VMap, &SPIRVInstList, &InsertPoint](
4936 spv::Op opcode, Constant *constant) {
4937 //
4938 // Generate instruction like:
4939 // result = opcode constant <extinst-result>
4940 //
4941 // Ops[0] = Result Type ID
4942 // Ops[1] = Operand 0 ;; the constant, suitably splatted
4943 // Ops[2] = Operand 1 ;; the result of the extended instruction
4944 SPIRVOperandList Ops;
David Neto22f144c2017-06-12 14:26:21 -04004945
David Neto3fbb4072017-10-16 11:28:14 -04004946 Type *resultTy = Call->getType();
David Neto257c3892018-04-11 13:19:45 -04004947 Ops << MkId(lookupType(resultTy));
David Neto3fbb4072017-10-16 11:28:14 -04004948
4949 if (auto *vectorTy = dyn_cast<VectorType>(resultTy)) {
4950 constant = ConstantVector::getSplat(
4951 static_cast<unsigned>(vectorTy->getNumElements()), constant);
4952 }
David Neto257c3892018-04-11 13:19:45 -04004953 Ops << MkId(VMap[constant]) << MkId(std::get<2>(*DeferredInst));
David Neto3fbb4072017-10-16 11:28:14 -04004954
4955 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04004956 InsertPoint, new SPIRVInstruction(
4957 opcode, std::get<2>(*DeferredInst) + 1, Ops));
David Neto3fbb4072017-10-16 11:28:14 -04004958 };
4959
4960 switch (IndirectExtInst) {
4961 case glsl::ExtInstFindUMsb: // Implementing clz
4962 generate_extra_inst(
4963 spv::OpISub, ConstantInt::get(Type::getInt32Ty(Context), 31));
4964 break;
4965 case glsl::ExtInstAcos: // Implementing acospi
4966 case glsl::ExtInstAsin: // Implementing asinpi
Kévin Petiteb9f90a2018-09-29 12:29:34 +01004967 case glsl::ExtInstAtan: // Implementing atanpi
David Neto3fbb4072017-10-16 11:28:14 -04004968 case glsl::ExtInstAtan2: // Implementing atan2pi
4969 generate_extra_inst(
4970 spv::OpFMul,
4971 ConstantFP::get(Type::getFloatTy(Context), kOneOverPi));
4972 break;
4973
4974 default:
4975 assert(false && "internally inconsistent");
David Neto4d02a532017-09-17 12:57:44 -04004976 }
David Neto22f144c2017-06-12 14:26:21 -04004977 }
David Neto3fbb4072017-10-16 11:28:14 -04004978
alan-bakerb39c8262019-03-08 14:03:37 -05004979 } else if (callee_name.startswith("_Z8popcount")) {
David Neto22f144c2017-06-12 14:26:21 -04004980 //
4981 // Generate OpBitCount
4982 //
4983 // Ops[0] = Result Type ID
4984 // Ops[1] = Base ID
David Neto257c3892018-04-11 13:19:45 -04004985 SPIRVOperandList Ops;
4986 Ops << MkId(lookupType(Call->getType()))
4987 << MkId(VMap[Call->getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04004988
4989 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04004990 InsertPoint, new SPIRVInstruction(spv::OpBitCount,
David Neto22f144c2017-06-12 14:26:21 -04004991 std::get<2>(*DeferredInst), Ops));
David Netoab03f432017-11-03 17:00:44 -04004992
David Neto862b7d82018-06-14 18:48:37 -04004993 } else if (callee_name.startswith(kCompositeConstructFunctionPrefix)) {
David Netoab03f432017-11-03 17:00:44 -04004994
4995 // Generate an OpCompositeConstruct
4996 SPIRVOperandList Ops;
4997
4998 // The result type.
David Neto257c3892018-04-11 13:19:45 -04004999 Ops << MkId(lookupType(Call->getType()));
David Netoab03f432017-11-03 17:00:44 -04005000
5001 for (Use &use : Call->arg_operands()) {
David Neto257c3892018-04-11 13:19:45 -04005002 Ops << MkId(VMap[use.get()]);
David Netoab03f432017-11-03 17:00:44 -04005003 }
5004
5005 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005006 InsertPoint, new SPIRVInstruction(spv::OpCompositeConstruct,
5007 std::get<2>(*DeferredInst), Ops));
David Netoab03f432017-11-03 17:00:44 -04005008
Alan Baker202c8c72018-08-13 13:47:44 -04005009 } else if (callee_name.startswith(clspv::ResourceAccessorFunction())) {
5010
5011 // We have already mapped the call's result value to an ID.
5012 // Don't generate any code now.
5013
5014 } else if (callee_name.startswith(clspv::WorkgroupAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04005015
5016 // We have already mapped the call's result value to an ID.
5017 // Don't generate any code now.
5018
David Neto22f144c2017-06-12 14:26:21 -04005019 } else {
alan-baker5b86ed72019-02-15 08:26:50 -05005020 if (Call->getType()->isPointerTy()) {
5021 // Functions returning pointers require variable pointers.
5022 setVariablePointersCapabilities(
5023 Call->getType()->getPointerAddressSpace());
5024 }
5025
David Neto22f144c2017-06-12 14:26:21 -04005026 //
5027 // Generate OpFunctionCall.
5028 //
5029
5030 // Ops[0] = Result Type ID
5031 // Ops[1] = Callee Function ID
5032 // Ops[2] ... Ops[n] = Argument 0, ... , Argument n
5033 SPIRVOperandList Ops;
5034
David Neto862b7d82018-06-14 18:48:37 -04005035 Ops << MkId(lookupType(Call->getType()));
David Neto22f144c2017-06-12 14:26:21 -04005036
5037 uint32_t CalleeID = VMap[Callee];
David Neto43568eb2017-10-13 18:25:25 -04005038 if (CalleeID == 0) {
5039 errs() << "Can't translate function call. Missing builtin? "
David Neto862b7d82018-06-14 18:48:37 -04005040 << callee_name << " in: " << *Call << "\n";
David Neto43568eb2017-10-13 18:25:25 -04005041 // TODO(dneto): Can we error out? Enabling this llvm_unreachable
5042 // causes an infinite loop. Instead, go ahead and generate
5043 // the bad function call. A validator will catch the 0-Id.
5044 // llvm_unreachable("Can't translate function call");
5045 }
David Neto22f144c2017-06-12 14:26:21 -04005046
David Neto257c3892018-04-11 13:19:45 -04005047 Ops << MkId(CalleeID);
David Neto22f144c2017-06-12 14:26:21 -04005048
David Neto22f144c2017-06-12 14:26:21 -04005049 FunctionType *CalleeFTy = cast<FunctionType>(Call->getFunctionType());
5050 for (unsigned i = 0; i < CalleeFTy->getNumParams(); i++) {
alan-baker5b86ed72019-02-15 08:26:50 -05005051 auto *operand = Call->getOperand(i);
5052 if (operand->getType()->isPointerTy()) {
5053 auto sc =
5054 GetStorageClass(operand->getType()->getPointerAddressSpace());
5055 if (sc == spv::StorageClassStorageBuffer) {
5056 // Passing SSBO by reference requires variable pointers storage
5057 // buffer.
5058 setVariablePointersStorageBuffer(true);
5059 } else if (sc == spv::StorageClassWorkgroup) {
5060 // Workgroup references require variable pointers if they are not
5061 // memory object declarations.
5062 if (auto *operand_call = dyn_cast<CallInst>(operand)) {
5063 // Workgroup accessor represents a variable reference.
5064 if (!operand_call->getCalledFunction()->getName().startswith(
5065 clspv::WorkgroupAccessorFunction()))
5066 setVariablePointers(true);
5067 } else {
5068 // Arguments are function parameters.
5069 if (!isa<Argument>(operand))
5070 setVariablePointers(true);
5071 }
5072 }
5073 }
5074 Ops << MkId(VMap[operand]);
David Neto22f144c2017-06-12 14:26:21 -04005075 }
5076
David Neto87846742018-04-11 17:36:22 -04005077 auto *CallInst = new SPIRVInstruction(spv::OpFunctionCall,
5078 std::get<2>(*DeferredInst), Ops);
David Neto22f144c2017-06-12 14:26:21 -04005079 SPIRVInstList.insert(InsertPoint, CallInst);
5080 }
5081 }
5082 }
5083}
5084
David Neto1a1a0582017-07-07 12:01:44 -04005085void SPIRVProducerPass::HandleDeferredDecorations(const DataLayout &DL) {
Alan Baker202c8c72018-08-13 13:47:44 -04005086 if (getTypesNeedingArrayStride().empty() && LocalArgSpecIds.empty()) {
David Neto1a1a0582017-07-07 12:01:44 -04005087 return;
David Netoc6f3ab22018-04-06 18:02:31 -04005088 }
David Neto1a1a0582017-07-07 12:01:44 -04005089
5090 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
David Neto1a1a0582017-07-07 12:01:44 -04005091
5092 // Find an iterator pointing just past the last decoration.
5093 bool seen_decorations = false;
5094 auto DecoInsertPoint =
5095 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
5096 [&seen_decorations](SPIRVInstruction *Inst) -> bool {
5097 const bool is_decoration =
5098 Inst->getOpcode() == spv::OpDecorate ||
5099 Inst->getOpcode() == spv::OpMemberDecorate;
5100 if (is_decoration) {
5101 seen_decorations = true;
5102 return false;
5103 } else {
5104 return seen_decorations;
5105 }
5106 });
5107
David Netoc6f3ab22018-04-06 18:02:31 -04005108 // Insert ArrayStride decorations on pointer types, due to OpPtrAccessChain
5109 // instructions we generated earlier.
David Neto85082642018-03-24 06:55:20 -07005110 for (auto *type : getTypesNeedingArrayStride()) {
5111 Type *elemTy = nullptr;
5112 if (auto *ptrTy = dyn_cast<PointerType>(type)) {
5113 elemTy = ptrTy->getElementType();
alan-bakerb6b09dc2018-11-08 16:59:28 -05005114 } else if (auto *arrayTy = dyn_cast<ArrayType>(type)) {
David Neto85082642018-03-24 06:55:20 -07005115 elemTy = arrayTy->getArrayElementType();
alan-bakerb6b09dc2018-11-08 16:59:28 -05005116 } else if (auto *seqTy = dyn_cast<SequentialType>(type)) {
David Neto85082642018-03-24 06:55:20 -07005117 elemTy = seqTy->getSequentialElementType();
5118 } else {
5119 errs() << "Unhandled strided type " << *type << "\n";
5120 llvm_unreachable("Unhandled strided type");
5121 }
David Neto1a1a0582017-07-07 12:01:44 -04005122
5123 // Ops[0] = Target ID
5124 // Ops[1] = Decoration (ArrayStride)
5125 // Ops[2] = Stride number (Literal Number)
5126 SPIRVOperandList Ops;
5127
David Neto85082642018-03-24 06:55:20 -07005128 // Same as DL.getIndexedOffsetInType( elemTy, { 1 } );
Alan Bakerfcda9482018-10-02 17:09:59 -04005129 const uint32_t stride = static_cast<uint32_t>(GetTypeAllocSize(elemTy, DL));
David Neto257c3892018-04-11 13:19:45 -04005130
5131 Ops << MkId(lookupType(type)) << MkNum(spv::DecorationArrayStride)
5132 << MkNum(stride);
David Neto1a1a0582017-07-07 12:01:44 -04005133
David Neto87846742018-04-11 17:36:22 -04005134 auto *DecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
David Neto1a1a0582017-07-07 12:01:44 -04005135 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
5136 }
David Netoc6f3ab22018-04-06 18:02:31 -04005137
5138 // Emit SpecId decorations targeting the array size value.
Alan Baker202c8c72018-08-13 13:47:44 -04005139 for (auto spec_id = clspv::FirstLocalSpecId(); spec_id < max_local_spec_id_;
5140 ++spec_id) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05005141 LocalArgInfo &arg_info = LocalSpecIdInfoMap[spec_id];
David Netoc6f3ab22018-04-06 18:02:31 -04005142 SPIRVOperandList Ops;
5143 Ops << MkId(arg_info.array_size_id) << MkNum(spv::DecorationSpecId)
5144 << MkNum(arg_info.spec_id);
5145 SPIRVInstList.insert(DecoInsertPoint,
David Neto87846742018-04-11 17:36:22 -04005146 new SPIRVInstruction(spv::OpDecorate, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04005147 }
David Neto1a1a0582017-07-07 12:01:44 -04005148}
5149
David Neto22f144c2017-06-12 14:26:21 -04005150glsl::ExtInst SPIRVProducerPass::getExtInstEnum(StringRef Name) {
5151 return StringSwitch<glsl::ExtInst>(Name)
alan-bakerb39c8262019-03-08 14:03:37 -05005152 .Case("_Z3absc", glsl::ExtInst::ExtInstSAbs)
5153 .Case("_Z3absDv2_c", glsl::ExtInst::ExtInstSAbs)
5154 .Case("_Z3absDv3_c", glsl::ExtInst::ExtInstSAbs)
5155 .Case("_Z3absDv4_c", glsl::ExtInst::ExtInstSAbs)
Kévin Petit2444e9b2018-11-09 14:14:37 +00005156 .Case("_Z3abss", glsl::ExtInst::ExtInstSAbs)
5157 .Case("_Z3absDv2_s", glsl::ExtInst::ExtInstSAbs)
5158 .Case("_Z3absDv3_s", glsl::ExtInst::ExtInstSAbs)
5159 .Case("_Z3absDv4_s", glsl::ExtInst::ExtInstSAbs)
David Neto22f144c2017-06-12 14:26:21 -04005160 .Case("_Z3absi", glsl::ExtInst::ExtInstSAbs)
5161 .Case("_Z3absDv2_i", glsl::ExtInst::ExtInstSAbs)
5162 .Case("_Z3absDv3_i", glsl::ExtInst::ExtInstSAbs)
5163 .Case("_Z3absDv4_i", glsl::ExtInst::ExtInstSAbs)
Kévin Petit2444e9b2018-11-09 14:14:37 +00005164 .Case("_Z3absl", glsl::ExtInst::ExtInstSAbs)
5165 .Case("_Z3absDv2_l", glsl::ExtInst::ExtInstSAbs)
5166 .Case("_Z3absDv3_l", glsl::ExtInst::ExtInstSAbs)
5167 .Case("_Z3absDv4_l", glsl::ExtInst::ExtInstSAbs)
alan-bakerb39c8262019-03-08 14:03:37 -05005168 .Case("_Z5clampccc", glsl::ExtInst::ExtInstSClamp)
5169 .Case("_Z5clampDv2_cS_S_", glsl::ExtInst::ExtInstSClamp)
5170 .Case("_Z5clampDv3_cS_S_", glsl::ExtInst::ExtInstSClamp)
5171 .Case("_Z5clampDv4_cS_S_", glsl::ExtInst::ExtInstSClamp)
5172 .Case("_Z5clamphhh", glsl::ExtInst::ExtInstUClamp)
5173 .Case("_Z5clampDv2_hS_S_", glsl::ExtInst::ExtInstUClamp)
5174 .Case("_Z5clampDv3_hS_S_", glsl::ExtInst::ExtInstUClamp)
5175 .Case("_Z5clampDv4_hS_S_", glsl::ExtInst::ExtInstUClamp)
Kévin Petit495255d2019-03-06 13:56:48 +00005176 .Case("_Z5clampsss", glsl::ExtInst::ExtInstSClamp)
5177 .Case("_Z5clampDv2_sS_S_", glsl::ExtInst::ExtInstSClamp)
5178 .Case("_Z5clampDv3_sS_S_", glsl::ExtInst::ExtInstSClamp)
5179 .Case("_Z5clampDv4_sS_S_", glsl::ExtInst::ExtInstSClamp)
5180 .Case("_Z5clampttt", glsl::ExtInst::ExtInstUClamp)
5181 .Case("_Z5clampDv2_tS_S_", glsl::ExtInst::ExtInstUClamp)
5182 .Case("_Z5clampDv3_tS_S_", glsl::ExtInst::ExtInstUClamp)
5183 .Case("_Z5clampDv4_tS_S_", glsl::ExtInst::ExtInstUClamp)
David Neto22f144c2017-06-12 14:26:21 -04005184 .Case("_Z5clampiii", glsl::ExtInst::ExtInstSClamp)
5185 .Case("_Z5clampDv2_iS_S_", glsl::ExtInst::ExtInstSClamp)
5186 .Case("_Z5clampDv3_iS_S_", glsl::ExtInst::ExtInstSClamp)
5187 .Case("_Z5clampDv4_iS_S_", glsl::ExtInst::ExtInstSClamp)
5188 .Case("_Z5clampjjj", glsl::ExtInst::ExtInstUClamp)
5189 .Case("_Z5clampDv2_jS_S_", glsl::ExtInst::ExtInstUClamp)
5190 .Case("_Z5clampDv3_jS_S_", glsl::ExtInst::ExtInstUClamp)
5191 .Case("_Z5clampDv4_jS_S_", glsl::ExtInst::ExtInstUClamp)
Kévin Petit495255d2019-03-06 13:56:48 +00005192 .Case("_Z5clamplll", glsl::ExtInst::ExtInstSClamp)
5193 .Case("_Z5clampDv2_lS_S_", glsl::ExtInst::ExtInstSClamp)
5194 .Case("_Z5clampDv3_lS_S_", glsl::ExtInst::ExtInstSClamp)
5195 .Case("_Z5clampDv4_lS_S_", glsl::ExtInst::ExtInstSClamp)
5196 .Case("_Z5clampmmm", glsl::ExtInst::ExtInstUClamp)
5197 .Case("_Z5clampDv2_mS_S_", glsl::ExtInst::ExtInstUClamp)
5198 .Case("_Z5clampDv3_mS_S_", glsl::ExtInst::ExtInstUClamp)
5199 .Case("_Z5clampDv4_mS_S_", glsl::ExtInst::ExtInstUClamp)
David Neto22f144c2017-06-12 14:26:21 -04005200 .Case("_Z5clampfff", glsl::ExtInst::ExtInstFClamp)
5201 .Case("_Z5clampDv2_fS_S_", glsl::ExtInst::ExtInstFClamp)
5202 .Case("_Z5clampDv3_fS_S_", glsl::ExtInst::ExtInstFClamp)
5203 .Case("_Z5clampDv4_fS_S_", glsl::ExtInst::ExtInstFClamp)
alan-bakerb39c8262019-03-08 14:03:37 -05005204 .Case("_Z3maxcc", glsl::ExtInst::ExtInstSMax)
5205 .Case("_Z3maxDv2_cS_", glsl::ExtInst::ExtInstSMax)
5206 .Case("_Z3maxDv3_cS_", glsl::ExtInst::ExtInstSMax)
5207 .Case("_Z3maxDv4_cS_", glsl::ExtInst::ExtInstSMax)
5208 .Case("_Z3maxhh", glsl::ExtInst::ExtInstUMax)
5209 .Case("_Z3maxDv2_hS_", glsl::ExtInst::ExtInstUMax)
5210 .Case("_Z3maxDv3_hS_", glsl::ExtInst::ExtInstUMax)
5211 .Case("_Z3maxDv4_hS_", glsl::ExtInst::ExtInstUMax)
Kévin Petit9e1971c2018-11-09 14:17:18 +00005212 .Case("_Z3maxss", glsl::ExtInst::ExtInstSMax)
5213 .Case("_Z3maxDv2_sS_", glsl::ExtInst::ExtInstSMax)
5214 .Case("_Z3maxDv3_sS_", glsl::ExtInst::ExtInstSMax)
5215 .Case("_Z3maxDv4_sS_", glsl::ExtInst::ExtInstSMax)
5216 .Case("_Z3maxtt", glsl::ExtInst::ExtInstUMax)
5217 .Case("_Z3maxDv2_tS_", glsl::ExtInst::ExtInstUMax)
5218 .Case("_Z3maxDv3_tS_", glsl::ExtInst::ExtInstUMax)
5219 .Case("_Z3maxDv4_tS_", glsl::ExtInst::ExtInstUMax)
David Neto22f144c2017-06-12 14:26:21 -04005220 .Case("_Z3maxii", glsl::ExtInst::ExtInstSMax)
5221 .Case("_Z3maxDv2_iS_", glsl::ExtInst::ExtInstSMax)
5222 .Case("_Z3maxDv3_iS_", glsl::ExtInst::ExtInstSMax)
5223 .Case("_Z3maxDv4_iS_", glsl::ExtInst::ExtInstSMax)
5224 .Case("_Z3maxjj", glsl::ExtInst::ExtInstUMax)
5225 .Case("_Z3maxDv2_jS_", glsl::ExtInst::ExtInstUMax)
5226 .Case("_Z3maxDv3_jS_", glsl::ExtInst::ExtInstUMax)
5227 .Case("_Z3maxDv4_jS_", glsl::ExtInst::ExtInstUMax)
Kévin Petit9e1971c2018-11-09 14:17:18 +00005228 .Case("_Z3maxll", glsl::ExtInst::ExtInstSMax)
5229 .Case("_Z3maxDv2_lS_", glsl::ExtInst::ExtInstSMax)
5230 .Case("_Z3maxDv3_lS_", glsl::ExtInst::ExtInstSMax)
5231 .Case("_Z3maxDv4_lS_", glsl::ExtInst::ExtInstSMax)
5232 .Case("_Z3maxmm", glsl::ExtInst::ExtInstUMax)
5233 .Case("_Z3maxDv2_mS_", glsl::ExtInst::ExtInstUMax)
5234 .Case("_Z3maxDv3_mS_", glsl::ExtInst::ExtInstUMax)
5235 .Case("_Z3maxDv4_mS_", glsl::ExtInst::ExtInstUMax)
David Neto22f144c2017-06-12 14:26:21 -04005236 .Case("_Z3maxff", glsl::ExtInst::ExtInstFMax)
5237 .Case("_Z3maxDv2_fS_", glsl::ExtInst::ExtInstFMax)
5238 .Case("_Z3maxDv3_fS_", glsl::ExtInst::ExtInstFMax)
5239 .Case("_Z3maxDv4_fS_", glsl::ExtInst::ExtInstFMax)
5240 .StartsWith("_Z4fmax", glsl::ExtInst::ExtInstFMax)
alan-bakerb39c8262019-03-08 14:03:37 -05005241 .Case("_Z3mincc", glsl::ExtInst::ExtInstSMin)
5242 .Case("_Z3minDv2_cS_", glsl::ExtInst::ExtInstSMin)
5243 .Case("_Z3minDv3_cS_", glsl::ExtInst::ExtInstSMin)
5244 .Case("_Z3minDv4_cS_", glsl::ExtInst::ExtInstSMin)
5245 .Case("_Z3minhh", glsl::ExtInst::ExtInstUMin)
5246 .Case("_Z3minDv2_hS_", glsl::ExtInst::ExtInstUMin)
5247 .Case("_Z3minDv3_hS_", glsl::ExtInst::ExtInstUMin)
5248 .Case("_Z3minDv4_hS_", glsl::ExtInst::ExtInstUMin)
Kévin Petit9e1971c2018-11-09 14:17:18 +00005249 .Case("_Z3minss", glsl::ExtInst::ExtInstSMin)
5250 .Case("_Z3minDv2_sS_", glsl::ExtInst::ExtInstSMin)
5251 .Case("_Z3minDv3_sS_", glsl::ExtInst::ExtInstSMin)
5252 .Case("_Z3minDv4_sS_", glsl::ExtInst::ExtInstSMin)
5253 .Case("_Z3mintt", glsl::ExtInst::ExtInstUMin)
5254 .Case("_Z3minDv2_tS_", glsl::ExtInst::ExtInstUMin)
5255 .Case("_Z3minDv3_tS_", glsl::ExtInst::ExtInstUMin)
5256 .Case("_Z3minDv4_tS_", glsl::ExtInst::ExtInstUMin)
David Neto22f144c2017-06-12 14:26:21 -04005257 .Case("_Z3minii", glsl::ExtInst::ExtInstSMin)
5258 .Case("_Z3minDv2_iS_", glsl::ExtInst::ExtInstSMin)
5259 .Case("_Z3minDv3_iS_", glsl::ExtInst::ExtInstSMin)
5260 .Case("_Z3minDv4_iS_", glsl::ExtInst::ExtInstSMin)
5261 .Case("_Z3minjj", glsl::ExtInst::ExtInstUMin)
5262 .Case("_Z3minDv2_jS_", glsl::ExtInst::ExtInstUMin)
5263 .Case("_Z3minDv3_jS_", glsl::ExtInst::ExtInstUMin)
5264 .Case("_Z3minDv4_jS_", glsl::ExtInst::ExtInstUMin)
Kévin Petit9e1971c2018-11-09 14:17:18 +00005265 .Case("_Z3minll", glsl::ExtInst::ExtInstSMin)
5266 .Case("_Z3minDv2_lS_", glsl::ExtInst::ExtInstSMin)
5267 .Case("_Z3minDv3_lS_", glsl::ExtInst::ExtInstSMin)
5268 .Case("_Z3minDv4_lS_", glsl::ExtInst::ExtInstSMin)
5269 .Case("_Z3minmm", glsl::ExtInst::ExtInstUMin)
5270 .Case("_Z3minDv2_mS_", glsl::ExtInst::ExtInstUMin)
5271 .Case("_Z3minDv3_mS_", glsl::ExtInst::ExtInstUMin)
5272 .Case("_Z3minDv4_mS_", glsl::ExtInst::ExtInstUMin)
David Neto22f144c2017-06-12 14:26:21 -04005273 .Case("_Z3minff", glsl::ExtInst::ExtInstFMin)
5274 .Case("_Z3minDv2_fS_", glsl::ExtInst::ExtInstFMin)
5275 .Case("_Z3minDv3_fS_", glsl::ExtInst::ExtInstFMin)
5276 .Case("_Z3minDv4_fS_", glsl::ExtInst::ExtInstFMin)
5277 .StartsWith("_Z4fmin", glsl::ExtInst::ExtInstFMin)
5278 .StartsWith("_Z7degrees", glsl::ExtInst::ExtInstDegrees)
5279 .StartsWith("_Z7radians", glsl::ExtInst::ExtInstRadians)
5280 .StartsWith("_Z3mix", glsl::ExtInst::ExtInstFMix)
5281 .StartsWith("_Z4acos", glsl::ExtInst::ExtInstAcos)
5282 .StartsWith("_Z5acosh", glsl::ExtInst::ExtInstAcosh)
5283 .StartsWith("_Z4asin", glsl::ExtInst::ExtInstAsin)
5284 .StartsWith("_Z5asinh", glsl::ExtInst::ExtInstAsinh)
5285 .StartsWith("_Z4atan", glsl::ExtInst::ExtInstAtan)
5286 .StartsWith("_Z5atan2", glsl::ExtInst::ExtInstAtan2)
5287 .StartsWith("_Z5atanh", glsl::ExtInst::ExtInstAtanh)
5288 .StartsWith("_Z4ceil", glsl::ExtInst::ExtInstCeil)
5289 .StartsWith("_Z3sin", glsl::ExtInst::ExtInstSin)
5290 .StartsWith("_Z4sinh", glsl::ExtInst::ExtInstSinh)
5291 .StartsWith("_Z8half_sin", glsl::ExtInst::ExtInstSin)
5292 .StartsWith("_Z10native_sin", glsl::ExtInst::ExtInstSin)
5293 .StartsWith("_Z3cos", glsl::ExtInst::ExtInstCos)
5294 .StartsWith("_Z4cosh", glsl::ExtInst::ExtInstCosh)
5295 .StartsWith("_Z8half_cos", glsl::ExtInst::ExtInstCos)
5296 .StartsWith("_Z10native_cos", glsl::ExtInst::ExtInstCos)
5297 .StartsWith("_Z3tan", glsl::ExtInst::ExtInstTan)
5298 .StartsWith("_Z4tanh", glsl::ExtInst::ExtInstTanh)
5299 .StartsWith("_Z8half_tan", glsl::ExtInst::ExtInstTan)
5300 .StartsWith("_Z10native_tan", glsl::ExtInst::ExtInstTan)
5301 .StartsWith("_Z3exp", glsl::ExtInst::ExtInstExp)
5302 .StartsWith("_Z8half_exp", glsl::ExtInst::ExtInstExp)
5303 .StartsWith("_Z10native_exp", glsl::ExtInst::ExtInstExp)
5304 .StartsWith("_Z4exp2", glsl::ExtInst::ExtInstExp2)
5305 .StartsWith("_Z9half_exp2", glsl::ExtInst::ExtInstExp2)
5306 .StartsWith("_Z11native_exp2", glsl::ExtInst::ExtInstExp2)
5307 .StartsWith("_Z3log", glsl::ExtInst::ExtInstLog)
5308 .StartsWith("_Z8half_log", glsl::ExtInst::ExtInstLog)
5309 .StartsWith("_Z10native_log", glsl::ExtInst::ExtInstLog)
5310 .StartsWith("_Z4log2", glsl::ExtInst::ExtInstLog2)
5311 .StartsWith("_Z9half_log2", glsl::ExtInst::ExtInstLog2)
5312 .StartsWith("_Z11native_log2", glsl::ExtInst::ExtInstLog2)
5313 .StartsWith("_Z4fabs", glsl::ExtInst::ExtInstFAbs)
kpet3458e942018-10-03 14:35:21 +01005314 .StartsWith("_Z3fma", glsl::ExtInst::ExtInstFma)
David Neto22f144c2017-06-12 14:26:21 -04005315 .StartsWith("_Z5floor", glsl::ExtInst::ExtInstFloor)
5316 .StartsWith("_Z5ldexp", glsl::ExtInst::ExtInstLdexp)
5317 .StartsWith("_Z3pow", glsl::ExtInst::ExtInstPow)
5318 .StartsWith("_Z4powr", glsl::ExtInst::ExtInstPow)
5319 .StartsWith("_Z9half_powr", glsl::ExtInst::ExtInstPow)
5320 .StartsWith("_Z11native_powr", glsl::ExtInst::ExtInstPow)
5321 .StartsWith("_Z5round", glsl::ExtInst::ExtInstRound)
5322 .StartsWith("_Z4sqrt", glsl::ExtInst::ExtInstSqrt)
5323 .StartsWith("_Z9half_sqrt", glsl::ExtInst::ExtInstSqrt)
5324 .StartsWith("_Z11native_sqrt", glsl::ExtInst::ExtInstSqrt)
5325 .StartsWith("_Z5rsqrt", glsl::ExtInst::ExtInstInverseSqrt)
5326 .StartsWith("_Z10half_rsqrt", glsl::ExtInst::ExtInstInverseSqrt)
5327 .StartsWith("_Z12native_rsqrt", glsl::ExtInst::ExtInstInverseSqrt)
5328 .StartsWith("_Z5trunc", glsl::ExtInst::ExtInstTrunc)
5329 .StartsWith("_Z5frexp", glsl::ExtInst::ExtInstFrexp)
5330 .StartsWith("_Z4sign", glsl::ExtInst::ExtInstFSign)
5331 .StartsWith("_Z6length", glsl::ExtInst::ExtInstLength)
Kévin Petit7d09cec2018-09-22 15:43:38 +01005332 .StartsWith("_Z11fast_length", glsl::ExtInst::ExtInstLength)
David Neto22f144c2017-06-12 14:26:21 -04005333 .StartsWith("_Z8distance", glsl::ExtInst::ExtInstDistance)
Kévin Petit7d09cec2018-09-22 15:43:38 +01005334 .StartsWith("_Z13fast_distance", glsl::ExtInst::ExtInstDistance)
David Netoe9a03512017-10-16 10:08:27 -04005335 .StartsWith("_Z4step", glsl::ExtInst::ExtInstStep)
kpet6fd2a262018-10-03 14:48:01 +01005336 .StartsWith("_Z10smoothstep", glsl::ExtInst::ExtInstSmoothStep)
David Neto22f144c2017-06-12 14:26:21 -04005337 .Case("_Z5crossDv3_fS_", glsl::ExtInst::ExtInstCross)
5338 .StartsWith("_Z9normalize", glsl::ExtInst::ExtInstNormalize)
Kévin Petit7d09cec2018-09-22 15:43:38 +01005339 .StartsWith("_Z14fast_normalize", glsl::ExtInst::ExtInstNormalize)
David Neto22f144c2017-06-12 14:26:21 -04005340 .StartsWith("llvm.fmuladd.", glsl::ExtInst::ExtInstFma)
5341 .Case("spirv.unpack.v2f16", glsl::ExtInst::ExtInstUnpackHalf2x16)
5342 .Case("spirv.pack.v2f16", glsl::ExtInst::ExtInstPackHalf2x16)
David Neto62653202017-10-16 19:05:18 -04005343 .Case("clspv.fract.f", glsl::ExtInst::ExtInstFract)
5344 .Case("clspv.fract.v2f", glsl::ExtInst::ExtInstFract)
5345 .Case("clspv.fract.v3f", glsl::ExtInst::ExtInstFract)
5346 .Case("clspv.fract.v4f", glsl::ExtInst::ExtInstFract)
David Neto3fbb4072017-10-16 11:28:14 -04005347 .Default(kGlslExtInstBad);
5348}
5349
5350glsl::ExtInst SPIRVProducerPass::getIndirectExtInstEnum(StringRef Name) {
5351 // Check indirect cases.
5352 return StringSwitch<glsl::ExtInst>(Name)
5353 .StartsWith("_Z3clz", glsl::ExtInst::ExtInstFindUMsb)
5354 // Use exact match on float arg because these need a multiply
5355 // of a constant of the right floating point type.
5356 .Case("_Z6acospif", glsl::ExtInst::ExtInstAcos)
5357 .Case("_Z6acospiDv2_f", glsl::ExtInst::ExtInstAcos)
5358 .Case("_Z6acospiDv3_f", glsl::ExtInst::ExtInstAcos)
5359 .Case("_Z6acospiDv4_f", glsl::ExtInst::ExtInstAcos)
5360 .Case("_Z6asinpif", glsl::ExtInst::ExtInstAsin)
5361 .Case("_Z6asinpiDv2_f", glsl::ExtInst::ExtInstAsin)
5362 .Case("_Z6asinpiDv3_f", glsl::ExtInst::ExtInstAsin)
5363 .Case("_Z6asinpiDv4_f", glsl::ExtInst::ExtInstAsin)
Kévin Petiteb9f90a2018-09-29 12:29:34 +01005364 .Case("_Z6atanpif", glsl::ExtInst::ExtInstAtan)
5365 .Case("_Z6atanpiDv2_f", glsl::ExtInst::ExtInstAtan)
5366 .Case("_Z6atanpiDv3_f", glsl::ExtInst::ExtInstAtan)
5367 .Case("_Z6atanpiDv4_f", glsl::ExtInst::ExtInstAtan)
David Neto3fbb4072017-10-16 11:28:14 -04005368 .Case("_Z7atan2piff", glsl::ExtInst::ExtInstAtan2)
5369 .Case("_Z7atan2piDv2_fS_", glsl::ExtInst::ExtInstAtan2)
5370 .Case("_Z7atan2piDv3_fS_", glsl::ExtInst::ExtInstAtan2)
5371 .Case("_Z7atan2piDv4_fS_", glsl::ExtInst::ExtInstAtan2)
5372 .Default(kGlslExtInstBad);
5373}
5374
alan-bakerb6b09dc2018-11-08 16:59:28 -05005375glsl::ExtInst
5376SPIRVProducerPass::getDirectOrIndirectExtInstEnum(StringRef Name) {
David Neto3fbb4072017-10-16 11:28:14 -04005377 auto direct = getExtInstEnum(Name);
5378 if (direct != kGlslExtInstBad)
5379 return direct;
5380 return getIndirectExtInstEnum(Name);
David Neto22f144c2017-06-12 14:26:21 -04005381}
5382
David Neto22f144c2017-06-12 14:26:21 -04005383void SPIRVProducerPass::WriteOneWord(uint32_t Word) {
David Neto0676e6f2017-07-11 18:47:44 -04005384 binaryOut->write(reinterpret_cast<const char *>(&Word), sizeof(uint32_t));
David Neto22f144c2017-06-12 14:26:21 -04005385}
5386
5387void SPIRVProducerPass::WriteResultID(SPIRVInstruction *Inst) {
5388 WriteOneWord(Inst->getResultID());
5389}
5390
5391void SPIRVProducerPass::WriteWordCountAndOpcode(SPIRVInstruction *Inst) {
5392 // High 16 bit : Word Count
5393 // Low 16 bit : Opcode
5394 uint32_t Word = Inst->getOpcode();
David Netoee2660d2018-06-28 16:31:29 -04005395 const uint32_t count = Inst->getWordCount();
5396 if (count > 65535) {
5397 errs() << "Word count limit of 65535 exceeded: " << count << "\n";
5398 llvm_unreachable("Word count too high");
5399 }
David Neto22f144c2017-06-12 14:26:21 -04005400 Word |= Inst->getWordCount() << 16;
5401 WriteOneWord(Word);
5402}
5403
5404void SPIRVProducerPass::WriteOperand(SPIRVOperand *Op) {
5405 SPIRVOperandType OpTy = Op->getType();
5406 switch (OpTy) {
5407 default: {
5408 llvm_unreachable("Unsupported SPIRV Operand Type???");
5409 break;
5410 }
5411 case SPIRVOperandType::NUMBERID: {
5412 WriteOneWord(Op->getNumID());
5413 break;
5414 }
5415 case SPIRVOperandType::LITERAL_STRING: {
5416 std::string Str = Op->getLiteralStr();
5417 const char *Data = Str.c_str();
5418 size_t WordSize = Str.size() / 4;
5419 for (unsigned Idx = 0; Idx < WordSize; Idx++) {
5420 WriteOneWord(*reinterpret_cast<const uint32_t *>(&Data[4 * Idx]));
5421 }
5422
5423 uint32_t Remainder = Str.size() % 4;
5424 uint32_t LastWord = 0;
5425 if (Remainder) {
5426 for (unsigned Idx = 0; Idx < Remainder; Idx++) {
5427 LastWord |= Data[4 * WordSize + Idx] << 8 * Idx;
5428 }
5429 }
5430
5431 WriteOneWord(LastWord);
5432 break;
5433 }
5434 case SPIRVOperandType::LITERAL_INTEGER:
5435 case SPIRVOperandType::LITERAL_FLOAT: {
5436 auto LiteralNum = Op->getLiteralNum();
5437 // TODO: Handle LiteranNum carefully.
5438 for (auto Word : LiteralNum) {
5439 WriteOneWord(Word);
5440 }
5441 break;
5442 }
5443 }
5444}
5445
5446void SPIRVProducerPass::WriteSPIRVBinary() {
5447 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
5448
5449 for (auto Inst : SPIRVInstList) {
David Netoc6f3ab22018-04-06 18:02:31 -04005450 SPIRVOperandList Ops{Inst->getOperands()};
David Neto22f144c2017-06-12 14:26:21 -04005451 spv::Op Opcode = static_cast<spv::Op>(Inst->getOpcode());
5452
5453 switch (Opcode) {
5454 default: {
David Neto5c22a252018-03-15 16:07:41 -04005455 errs() << "Unsupported SPIR-V instruction opcode " << int(Opcode) << "\n";
David Neto22f144c2017-06-12 14:26:21 -04005456 llvm_unreachable("Unsupported SPIRV instruction");
5457 break;
5458 }
5459 case spv::OpCapability:
5460 case spv::OpExtension:
5461 case spv::OpMemoryModel:
5462 case spv::OpEntryPoint:
5463 case spv::OpExecutionMode:
5464 case spv::OpSource:
5465 case spv::OpDecorate:
5466 case spv::OpMemberDecorate:
5467 case spv::OpBranch:
5468 case spv::OpBranchConditional:
5469 case spv::OpSelectionMerge:
5470 case spv::OpLoopMerge:
5471 case spv::OpStore:
5472 case spv::OpImageWrite:
5473 case spv::OpReturnValue:
5474 case spv::OpControlBarrier:
5475 case spv::OpMemoryBarrier:
5476 case spv::OpReturn:
5477 case spv::OpFunctionEnd:
5478 case spv::OpCopyMemory: {
5479 WriteWordCountAndOpcode(Inst);
5480 for (uint32_t i = 0; i < Ops.size(); i++) {
5481 WriteOperand(Ops[i]);
5482 }
5483 break;
5484 }
5485 case spv::OpTypeBool:
5486 case spv::OpTypeVoid:
5487 case spv::OpTypeSampler:
5488 case spv::OpLabel:
5489 case spv::OpExtInstImport:
5490 case spv::OpTypePointer:
5491 case spv::OpTypeRuntimeArray:
5492 case spv::OpTypeStruct:
5493 case spv::OpTypeImage:
5494 case spv::OpTypeSampledImage:
5495 case spv::OpTypeInt:
5496 case spv::OpTypeFloat:
5497 case spv::OpTypeArray:
5498 case spv::OpTypeVector:
5499 case spv::OpTypeFunction: {
5500 WriteWordCountAndOpcode(Inst);
5501 WriteResultID(Inst);
5502 for (uint32_t i = 0; i < Ops.size(); i++) {
5503 WriteOperand(Ops[i]);
5504 }
5505 break;
5506 }
5507 case spv::OpFunction:
5508 case spv::OpFunctionParameter:
5509 case spv::OpAccessChain:
5510 case spv::OpPtrAccessChain:
5511 case spv::OpInBoundsAccessChain:
5512 case spv::OpUConvert:
5513 case spv::OpSConvert:
5514 case spv::OpConvertFToU:
5515 case spv::OpConvertFToS:
5516 case spv::OpConvertUToF:
5517 case spv::OpConvertSToF:
5518 case spv::OpFConvert:
5519 case spv::OpConvertPtrToU:
5520 case spv::OpConvertUToPtr:
5521 case spv::OpBitcast:
5522 case spv::OpIAdd:
5523 case spv::OpFAdd:
5524 case spv::OpISub:
5525 case spv::OpFSub:
5526 case spv::OpIMul:
5527 case spv::OpFMul:
5528 case spv::OpUDiv:
5529 case spv::OpSDiv:
5530 case spv::OpFDiv:
5531 case spv::OpUMod:
5532 case spv::OpSRem:
5533 case spv::OpFRem:
Kévin Petit8a560882019-03-21 15:24:34 +00005534 case spv::OpUMulExtended:
5535 case spv::OpSMulExtended:
David Neto22f144c2017-06-12 14:26:21 -04005536 case spv::OpBitwiseOr:
5537 case spv::OpBitwiseXor:
5538 case spv::OpBitwiseAnd:
David Netoa394f392017-08-26 20:45:29 -04005539 case spv::OpNot:
David Neto22f144c2017-06-12 14:26:21 -04005540 case spv::OpShiftLeftLogical:
5541 case spv::OpShiftRightLogical:
5542 case spv::OpShiftRightArithmetic:
5543 case spv::OpBitCount:
David Netoab03f432017-11-03 17:00:44 -04005544 case spv::OpCompositeConstruct:
David Neto22f144c2017-06-12 14:26:21 -04005545 case spv::OpCompositeExtract:
5546 case spv::OpVectorExtractDynamic:
5547 case spv::OpCompositeInsert:
David Neto0a2f98d2017-09-15 19:38:40 -04005548 case spv::OpCopyObject:
David Neto22f144c2017-06-12 14:26:21 -04005549 case spv::OpVectorInsertDynamic:
5550 case spv::OpVectorShuffle:
5551 case spv::OpIEqual:
5552 case spv::OpINotEqual:
5553 case spv::OpUGreaterThan:
5554 case spv::OpUGreaterThanEqual:
5555 case spv::OpULessThan:
5556 case spv::OpULessThanEqual:
5557 case spv::OpSGreaterThan:
5558 case spv::OpSGreaterThanEqual:
5559 case spv::OpSLessThan:
5560 case spv::OpSLessThanEqual:
5561 case spv::OpFOrdEqual:
5562 case spv::OpFOrdGreaterThan:
5563 case spv::OpFOrdGreaterThanEqual:
5564 case spv::OpFOrdLessThan:
5565 case spv::OpFOrdLessThanEqual:
5566 case spv::OpFOrdNotEqual:
5567 case spv::OpFUnordEqual:
5568 case spv::OpFUnordGreaterThan:
5569 case spv::OpFUnordGreaterThanEqual:
5570 case spv::OpFUnordLessThan:
5571 case spv::OpFUnordLessThanEqual:
5572 case spv::OpFUnordNotEqual:
5573 case spv::OpExtInst:
5574 case spv::OpIsInf:
5575 case spv::OpIsNan:
5576 case spv::OpAny:
5577 case spv::OpAll:
5578 case spv::OpUndef:
5579 case spv::OpConstantNull:
5580 case spv::OpLogicalOr:
5581 case spv::OpLogicalAnd:
5582 case spv::OpLogicalNot:
5583 case spv::OpLogicalNotEqual:
5584 case spv::OpConstantComposite:
5585 case spv::OpSpecConstantComposite:
5586 case spv::OpConstantTrue:
5587 case spv::OpConstantFalse:
5588 case spv::OpConstant:
5589 case spv::OpSpecConstant:
5590 case spv::OpVariable:
5591 case spv::OpFunctionCall:
5592 case spv::OpSampledImage:
5593 case spv::OpImageSampleExplicitLod:
David Neto5c22a252018-03-15 16:07:41 -04005594 case spv::OpImageQuerySize:
David Neto22f144c2017-06-12 14:26:21 -04005595 case spv::OpSelect:
5596 case spv::OpPhi:
5597 case spv::OpLoad:
5598 case spv::OpAtomicIAdd:
5599 case spv::OpAtomicISub:
5600 case spv::OpAtomicExchange:
5601 case spv::OpAtomicIIncrement:
5602 case spv::OpAtomicIDecrement:
5603 case spv::OpAtomicCompareExchange:
5604 case spv::OpAtomicUMin:
5605 case spv::OpAtomicSMin:
5606 case spv::OpAtomicUMax:
5607 case spv::OpAtomicSMax:
5608 case spv::OpAtomicAnd:
5609 case spv::OpAtomicOr:
5610 case spv::OpAtomicXor:
5611 case spv::OpDot: {
5612 WriteWordCountAndOpcode(Inst);
5613 WriteOperand(Ops[0]);
5614 WriteResultID(Inst);
5615 for (uint32_t i = 1; i < Ops.size(); i++) {
5616 WriteOperand(Ops[i]);
5617 }
5618 break;
5619 }
5620 }
5621 }
5622}
Alan Baker9bf93fb2018-08-28 16:59:26 -04005623
alan-bakerb6b09dc2018-11-08 16:59:28 -05005624bool SPIRVProducerPass::IsTypeNullable(const Type *type) const {
Alan Baker9bf93fb2018-08-28 16:59:26 -04005625 switch (type->getTypeID()) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05005626 case Type::HalfTyID:
5627 case Type::FloatTyID:
5628 case Type::DoubleTyID:
5629 case Type::IntegerTyID:
5630 case Type::VectorTyID:
5631 return true;
5632 case Type::PointerTyID: {
5633 const PointerType *pointer_type = cast<PointerType>(type);
5634 if (pointer_type->getPointerAddressSpace() !=
5635 AddressSpace::UniformConstant) {
5636 auto pointee_type = pointer_type->getPointerElementType();
5637 if (pointee_type->isStructTy() &&
5638 cast<StructType>(pointee_type)->isOpaque()) {
5639 // Images and samplers are not nullable.
5640 return false;
Alan Baker9bf93fb2018-08-28 16:59:26 -04005641 }
Alan Baker9bf93fb2018-08-28 16:59:26 -04005642 }
alan-bakerb6b09dc2018-11-08 16:59:28 -05005643 return true;
5644 }
5645 case Type::ArrayTyID:
5646 return IsTypeNullable(cast<CompositeType>(type)->getTypeAtIndex(0u));
5647 case Type::StructTyID: {
5648 const StructType *struct_type = cast<StructType>(type);
5649 // Images and samplers are not nullable.
5650 if (struct_type->isOpaque())
Alan Baker9bf93fb2018-08-28 16:59:26 -04005651 return false;
alan-bakerb6b09dc2018-11-08 16:59:28 -05005652 for (const auto element : struct_type->elements()) {
5653 if (!IsTypeNullable(element))
5654 return false;
5655 }
5656 return true;
5657 }
5658 default:
5659 return false;
Alan Baker9bf93fb2018-08-28 16:59:26 -04005660 }
5661}
Alan Bakerfcda9482018-10-02 17:09:59 -04005662
5663void SPIRVProducerPass::PopulateUBOTypeMaps(Module &module) {
5664 if (auto *offsets_md =
5665 module.getNamedMetadata(clspv::RemappedTypeOffsetMetadataName())) {
5666 // Metdata is stored as key-value pair operands. The first element of each
5667 // operand is the type and the second is a vector of offsets.
5668 for (const auto *operand : offsets_md->operands()) {
5669 const auto *pair = cast<MDTuple>(operand);
5670 auto *type =
5671 cast<ConstantAsMetadata>(pair->getOperand(0))->getValue()->getType();
5672 const auto *offset_vector = cast<MDTuple>(pair->getOperand(1));
5673 std::vector<uint32_t> offsets;
5674 for (const Metadata *offset_md : offset_vector->operands()) {
5675 const auto *constant_md = cast<ConstantAsMetadata>(offset_md);
alan-bakerb6b09dc2018-11-08 16:59:28 -05005676 offsets.push_back(static_cast<uint32_t>(
5677 cast<ConstantInt>(constant_md->getValue())->getZExtValue()));
Alan Bakerfcda9482018-10-02 17:09:59 -04005678 }
5679 RemappedUBOTypeOffsets.insert(std::make_pair(type, offsets));
5680 }
5681 }
5682
5683 if (auto *sizes_md =
5684 module.getNamedMetadata(clspv::RemappedTypeSizesMetadataName())) {
5685 // Metadata is stored as key-value pair operands. The first element of each
5686 // operand is the type and the second is a triple of sizes: type size in
5687 // bits, store size and alloc size.
5688 for (const auto *operand : sizes_md->operands()) {
5689 const auto *pair = cast<MDTuple>(operand);
5690 auto *type =
5691 cast<ConstantAsMetadata>(pair->getOperand(0))->getValue()->getType();
5692 const auto *size_triple = cast<MDTuple>(pair->getOperand(1));
5693 uint64_t type_size_in_bits =
5694 cast<ConstantInt>(
5695 cast<ConstantAsMetadata>(size_triple->getOperand(0))->getValue())
5696 ->getZExtValue();
5697 uint64_t type_store_size =
5698 cast<ConstantInt>(
5699 cast<ConstantAsMetadata>(size_triple->getOperand(1))->getValue())
5700 ->getZExtValue();
5701 uint64_t type_alloc_size =
5702 cast<ConstantInt>(
5703 cast<ConstantAsMetadata>(size_triple->getOperand(2))->getValue())
5704 ->getZExtValue();
5705 RemappedUBOTypeSizes.insert(std::make_pair(
5706 type, std::make_tuple(type_size_in_bits, type_store_size,
5707 type_alloc_size)));
5708 }
5709 }
5710}
5711
5712uint64_t SPIRVProducerPass::GetTypeSizeInBits(Type *type,
5713 const DataLayout &DL) {
5714 auto iter = RemappedUBOTypeSizes.find(type);
5715 if (iter != RemappedUBOTypeSizes.end()) {
5716 return std::get<0>(iter->second);
5717 }
5718
5719 return DL.getTypeSizeInBits(type);
5720}
5721
5722uint64_t SPIRVProducerPass::GetTypeStoreSize(Type *type, const DataLayout &DL) {
5723 auto iter = RemappedUBOTypeSizes.find(type);
5724 if (iter != RemappedUBOTypeSizes.end()) {
5725 return std::get<1>(iter->second);
5726 }
5727
5728 return DL.getTypeStoreSize(type);
5729}
5730
5731uint64_t SPIRVProducerPass::GetTypeAllocSize(Type *type, const DataLayout &DL) {
5732 auto iter = RemappedUBOTypeSizes.find(type);
5733 if (iter != RemappedUBOTypeSizes.end()) {
5734 return std::get<2>(iter->second);
5735 }
5736
5737 return DL.getTypeAllocSize(type);
5738}
alan-baker5b86ed72019-02-15 08:26:50 -05005739
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04005740void SPIRVProducerPass::setVariablePointersCapabilities(
5741 unsigned address_space) {
alan-baker5b86ed72019-02-15 08:26:50 -05005742 if (GetStorageClass(address_space) == spv::StorageClassStorageBuffer) {
5743 setVariablePointersStorageBuffer(true);
5744 } else {
5745 setVariablePointers(true);
5746 }
5747}
5748
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04005749Value *SPIRVProducerPass::GetBasePointer(Value *v) {
alan-baker5b86ed72019-02-15 08:26:50 -05005750 if (auto *gep = dyn_cast<GetElementPtrInst>(v)) {
5751 return GetBasePointer(gep->getPointerOperand());
5752 }
5753
5754 // Conservatively return |v|.
5755 return v;
5756}
5757
5758bool SPIRVProducerPass::sameResource(Value *lhs, Value *rhs) const {
5759 if (auto *lhs_call = dyn_cast<CallInst>(lhs)) {
5760 if (auto *rhs_call = dyn_cast<CallInst>(rhs)) {
5761 if (lhs_call->getCalledFunction()->getName().startswith(
5762 clspv::ResourceAccessorFunction()) &&
5763 rhs_call->getCalledFunction()->getName().startswith(
5764 clspv::ResourceAccessorFunction())) {
5765 // For resource accessors, match descriptor set and binding.
5766 if (lhs_call->getOperand(0) == rhs_call->getOperand(0) &&
5767 lhs_call->getOperand(1) == rhs_call->getOperand(1))
5768 return true;
5769 } else if (lhs_call->getCalledFunction()->getName().startswith(
5770 clspv::WorkgroupAccessorFunction()) &&
5771 rhs_call->getCalledFunction()->getName().startswith(
5772 clspv::WorkgroupAccessorFunction())) {
5773 // For workgroup resources, match spec id.
5774 if (lhs_call->getOperand(0) == rhs_call->getOperand(0))
5775 return true;
5776 }
5777 }
5778 }
5779
5780 return false;
5781}
5782
5783bool SPIRVProducerPass::selectFromSameObject(Instruction *inst) {
5784 assert(inst->getType()->isPointerTy());
5785 assert(GetStorageClass(inst->getType()->getPointerAddressSpace()) ==
5786 spv::StorageClassStorageBuffer);
5787 const bool hack_undef = clspv::Option::HackUndef();
5788 if (auto *select = dyn_cast<SelectInst>(inst)) {
5789 auto *true_base = GetBasePointer(select->getTrueValue());
5790 auto *false_base = GetBasePointer(select->getFalseValue());
5791
5792 if (true_base == false_base)
5793 return true;
5794
5795 // If either the true or false operand is a null, then we satisfy the same
5796 // object constraint.
5797 if (auto *true_cst = dyn_cast<Constant>(true_base)) {
5798 if (true_cst->isNullValue() || (hack_undef && isa<UndefValue>(true_base)))
5799 return true;
5800 }
5801
5802 if (auto *false_cst = dyn_cast<Constant>(false_base)) {
5803 if (false_cst->isNullValue() ||
5804 (hack_undef && isa<UndefValue>(false_base)))
5805 return true;
5806 }
5807
5808 if (sameResource(true_base, false_base))
5809 return true;
5810 } else if (auto *phi = dyn_cast<PHINode>(inst)) {
5811 Value *value = nullptr;
5812 bool ok = true;
5813 for (unsigned i = 0; ok && i != phi->getNumIncomingValues(); ++i) {
5814 auto *base = GetBasePointer(phi->getIncomingValue(i));
5815 // Null values satisfy the constraint of selecting of selecting from the
5816 // same object.
5817 if (!value) {
5818 if (auto *cst = dyn_cast<Constant>(base)) {
5819 if (!cst->isNullValue() && !(hack_undef && isa<UndefValue>(base)))
5820 value = base;
5821 } else {
5822 value = base;
5823 }
5824 } else if (base != value) {
5825 if (auto *base_cst = dyn_cast<Constant>(base)) {
5826 if (base_cst->isNullValue() || (hack_undef && isa<UndefValue>(base)))
5827 continue;
5828 }
5829
5830 if (sameResource(value, base))
5831 continue;
5832
5833 // Values don't represent the same base.
5834 ok = false;
5835 }
5836 }
5837
5838 return ok;
5839 }
5840
5841 // Conservatively return false.
5842 return false;
5843}
alan-bakere9308012019-03-15 10:25:13 -04005844
5845bool SPIRVProducerPass::CalledWithCoherentResource(Argument &Arg) {
5846 if (!Arg.getType()->isPointerTy() ||
5847 Arg.getType()->getPointerAddressSpace() != clspv::AddressSpace::Global) {
5848 // Only SSBOs need to be annotated as coherent.
5849 return false;
5850 }
5851
5852 DenseSet<Value *> visited;
5853 std::vector<Value *> stack;
5854 for (auto *U : Arg.getParent()->users()) {
5855 if (auto *call = dyn_cast<CallInst>(U)) {
5856 stack.push_back(call->getOperand(Arg.getArgNo()));
5857 }
5858 }
5859
5860 while (!stack.empty()) {
5861 Value *v = stack.back();
5862 stack.pop_back();
5863
5864 if (!visited.insert(v).second)
5865 continue;
5866
5867 auto *resource_call = dyn_cast<CallInst>(v);
5868 if (resource_call &&
5869 resource_call->getCalledFunction()->getName().startswith(
5870 clspv::ResourceAccessorFunction())) {
5871 // If this is a resource accessor function, check if the coherent operand
5872 // is set.
5873 const auto coherent =
5874 unsigned(dyn_cast<ConstantInt>(resource_call->getArgOperand(5))
5875 ->getZExtValue());
5876 if (coherent == 1)
5877 return true;
5878 } else if (auto *arg = dyn_cast<Argument>(v)) {
5879 // If this is a function argument, trace through its callers.
alan-bakere98f3f92019-04-08 15:06:36 -04005880 for (auto U : arg->getParent()->users()) {
alan-bakere9308012019-03-15 10:25:13 -04005881 if (auto *call = dyn_cast<CallInst>(U)) {
5882 stack.push_back(call->getOperand(arg->getArgNo()));
5883 }
5884 }
5885 } else if (auto *user = dyn_cast<User>(v)) {
5886 // If this is a user, traverse all operands that could lead to resource
5887 // variables.
5888 for (unsigned i = 0; i != user->getNumOperands(); ++i) {
5889 Value *operand = user->getOperand(i);
5890 if (operand->getType()->isPointerTy() &&
5891 operand->getType()->getPointerAddressSpace() ==
5892 clspv::AddressSpace::Global) {
5893 stack.push_back(operand);
5894 }
5895 }
5896 }
5897 }
5898
5899 // No coherent resource variables encountered.
5900 return false;
5901}