blob: ca8d2466a246b49b8be680f13d03ef3177795341 [file] [log] [blame]
David Neto22f144c2017-06-12 14:26:21 -04001// Copyright 2017 The Clspv Authors. All rights reserved.
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7// http://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
15#ifdef _MSC_VER
16#pragma warning(push, 0)
17#endif
18
David Neto156783e2017-07-05 15:39:41 -040019#include <cassert>
David Neto257c3892018-04-11 13:19:45 -040020#include <cstring>
David Neto118188e2018-08-24 11:27:54 -040021#include <iomanip>
22#include <list>
David Neto862b7d82018-06-14 18:48:37 -040023#include <memory>
David Neto118188e2018-08-24 11:27:54 -040024#include <set>
25#include <sstream>
26#include <string>
27#include <tuple>
28#include <unordered_set>
29#include <utility>
David Neto862b7d82018-06-14 18:48:37 -040030
David Neto118188e2018-08-24 11:27:54 -040031#include "llvm/ADT/StringSwitch.h"
32#include "llvm/ADT/UniqueVector.h"
33#include "llvm/Analysis/LoopInfo.h"
34#include "llvm/IR/Constants.h"
35#include "llvm/IR/Dominators.h"
36#include "llvm/IR/Instructions.h"
37#include "llvm/IR/Metadata.h"
38#include "llvm/IR/Module.h"
alan-bakerf67468c2019-11-25 15:51:49 -050039#include "llvm/IR/ValueSymbolTable.h"
David Neto118188e2018-08-24 11:27:54 -040040#include "llvm/Pass.h"
41#include "llvm/Support/CommandLine.h"
42#include "llvm/Support/raw_ostream.h"
43#include "llvm/Transforms/Utils/Cloning.h"
David Neto22f144c2017-06-12 14:26:21 -040044
David Neto85082642018-03-24 06:55:20 -070045#include "spirv/1.0/spirv.hpp"
David Neto118188e2018-08-24 11:27:54 -040046
David Neto85082642018-03-24 06:55:20 -070047#include "clspv/AddressSpace.h"
alan-bakerf5e5f692018-11-27 08:33:24 -050048#include "clspv/DescriptorMap.h"
David Neto118188e2018-08-24 11:27:54 -040049#include "clspv/Option.h"
David Neto85082642018-03-24 06:55:20 -070050#include "clspv/spirv_c_strings.hpp"
51#include "clspv/spirv_glsl.hpp"
David Neto22f144c2017-06-12 14:26:21 -040052
David Neto4feb7a42017-10-06 17:29:42 -040053#include "ArgKind.h"
alan-bakerf67468c2019-11-25 15:51:49 -050054#include "Builtins.h"
David Neto85082642018-03-24 06:55:20 -070055#include "ConstantEmitter.h"
Alan Baker202c8c72018-08-13 13:47:44 -040056#include "Constants.h"
David Neto78383442018-06-15 20:31:56 -040057#include "DescriptorCounter.h"
alan-baker56f7aff2019-05-22 08:06:42 -040058#include "NormalizeGlobalVariable.h"
Diego Novilloa4c44fa2019-04-11 10:56:15 -040059#include "Passes.h"
David Neto48f56a42017-10-06 16:44:25 -040060
David Neto22f144c2017-06-12 14:26:21 -040061#if defined(_MSC_VER)
62#pragma warning(pop)
63#endif
64
65using namespace llvm;
66using namespace clspv;
David Neto156783e2017-07-05 15:39:41 -040067using namespace mdconst;
David Neto22f144c2017-06-12 14:26:21 -040068
69namespace {
David Netocd8ca5f2017-10-02 23:34:11 -040070
David Neto862b7d82018-06-14 18:48:37 -040071cl::opt<bool> ShowResourceVars("show-rv", cl::init(false), cl::Hidden,
72 cl::desc("Show resource variable creation"));
73
74// These hacks exist to help transition code generation algorithms
75// without making huge noise in detailed test output.
76const bool Hack_generate_runtime_array_stride_early = true;
77
David Neto3fbb4072017-10-16 11:28:14 -040078// The value of 1/pi. This value is from MSDN
79// https://msdn.microsoft.com/en-us/library/4hwaceh6.aspx
80const double kOneOverPi = 0.318309886183790671538;
81const glsl::ExtInst kGlslExtInstBad = static_cast<glsl::ExtInst>(0);
82
alan-bakerb6b09dc2018-11-08 16:59:28 -050083const char *kCompositeConstructFunctionPrefix = "clspv.composite_construct.";
David Netoab03f432017-11-03 17:00:44 -040084
David Neto22f144c2017-06-12 14:26:21 -040085enum SPIRVOperandType {
86 NUMBERID,
87 LITERAL_INTEGER,
88 LITERAL_STRING,
89 LITERAL_FLOAT
90};
91
92struct SPIRVOperand {
93 explicit SPIRVOperand(SPIRVOperandType Ty, uint32_t Num)
94 : Type(Ty), LiteralNum(1, Num) {}
95 explicit SPIRVOperand(SPIRVOperandType Ty, const char *Str)
96 : Type(Ty), LiteralStr(Str) {}
97 explicit SPIRVOperand(SPIRVOperandType Ty, StringRef Str)
98 : Type(Ty), LiteralStr(Str) {}
99 explicit SPIRVOperand(SPIRVOperandType Ty, ArrayRef<uint32_t> NumVec)
100 : Type(Ty), LiteralNum(NumVec.begin(), NumVec.end()) {}
101
102 SPIRVOperandType getType() { return Type; };
103 uint32_t getNumID() { return LiteralNum[0]; };
104 std::string getLiteralStr() { return LiteralStr; };
105 ArrayRef<uint32_t> getLiteralNum() { return LiteralNum; };
106
David Neto87846742018-04-11 17:36:22 -0400107 uint32_t GetNumWords() const {
108 switch (Type) {
109 case NUMBERID:
110 return 1;
111 case LITERAL_INTEGER:
112 case LITERAL_FLOAT:
David Netoee2660d2018-06-28 16:31:29 -0400113 return uint32_t(LiteralNum.size());
David Neto87846742018-04-11 17:36:22 -0400114 case LITERAL_STRING:
115 // Account for the terminating null character.
David Netoee2660d2018-06-28 16:31:29 -0400116 return uint32_t((LiteralStr.size() + 4) / 4);
David Neto87846742018-04-11 17:36:22 -0400117 }
118 llvm_unreachable("Unhandled case in SPIRVOperand::GetNumWords()");
119 }
120
David Neto22f144c2017-06-12 14:26:21 -0400121private:
122 SPIRVOperandType Type;
123 std::string LiteralStr;
124 SmallVector<uint32_t, 4> LiteralNum;
125};
126
David Netoc6f3ab22018-04-06 18:02:31 -0400127class SPIRVOperandList {
128public:
129 SPIRVOperandList() {}
alan-bakerb6b09dc2018-11-08 16:59:28 -0500130 SPIRVOperandList(const SPIRVOperandList &other) = delete;
131 SPIRVOperandList(SPIRVOperandList &&other) {
David Netoc6f3ab22018-04-06 18:02:31 -0400132 contents_ = std::move(other.contents_);
133 other.contents_.clear();
134 }
135 SPIRVOperandList(ArrayRef<SPIRVOperand *> init)
136 : contents_(init.begin(), init.end()) {}
137 operator ArrayRef<SPIRVOperand *>() { return contents_; }
138 void push_back(SPIRVOperand *op) { contents_.push_back(op); }
alan-bakerb6b09dc2018-11-08 16:59:28 -0500139 void clear() { contents_.clear(); }
David Netoc6f3ab22018-04-06 18:02:31 -0400140 size_t size() const { return contents_.size(); }
141 SPIRVOperand *&operator[](size_t i) { return contents_[i]; }
142
David Neto87846742018-04-11 17:36:22 -0400143 const SmallVector<SPIRVOperand *, 8> &getOperands() const {
144 return contents_;
145 }
146
David Netoc6f3ab22018-04-06 18:02:31 -0400147private:
alan-bakerb6b09dc2018-11-08 16:59:28 -0500148 SmallVector<SPIRVOperand *, 8> contents_;
David Netoc6f3ab22018-04-06 18:02:31 -0400149};
150
151SPIRVOperandList &operator<<(SPIRVOperandList &list, SPIRVOperand *elem) {
152 list.push_back(elem);
153 return list;
154}
155
alan-bakerb6b09dc2018-11-08 16:59:28 -0500156SPIRVOperand *MkNum(uint32_t num) {
David Netoc6f3ab22018-04-06 18:02:31 -0400157 return new SPIRVOperand(LITERAL_INTEGER, num);
158}
alan-bakerb6b09dc2018-11-08 16:59:28 -0500159SPIRVOperand *MkInteger(ArrayRef<uint32_t> num_vec) {
David Neto257c3892018-04-11 13:19:45 -0400160 return new SPIRVOperand(LITERAL_INTEGER, num_vec);
161}
alan-bakerb6b09dc2018-11-08 16:59:28 -0500162SPIRVOperand *MkFloat(ArrayRef<uint32_t> num_vec) {
David Neto257c3892018-04-11 13:19:45 -0400163 return new SPIRVOperand(LITERAL_FLOAT, num_vec);
164}
alan-bakerb6b09dc2018-11-08 16:59:28 -0500165SPIRVOperand *MkId(uint32_t id) { return new SPIRVOperand(NUMBERID, id); }
166SPIRVOperand *MkString(StringRef str) {
David Neto257c3892018-04-11 13:19:45 -0400167 return new SPIRVOperand(LITERAL_STRING, str);
168}
David Netoc6f3ab22018-04-06 18:02:31 -0400169
David Neto22f144c2017-06-12 14:26:21 -0400170struct SPIRVInstruction {
David Neto87846742018-04-11 17:36:22 -0400171 // Create an instruction with an opcode and no result ID, and with the given
172 // operands. This computes its own word count.
173 explicit SPIRVInstruction(spv::Op Opc, ArrayRef<SPIRVOperand *> Ops)
174 : WordCount(1), Opcode(static_cast<uint16_t>(Opc)), ResultID(0),
175 Operands(Ops.begin(), Ops.end()) {
176 for (auto *operand : Ops) {
David Netoee2660d2018-06-28 16:31:29 -0400177 WordCount += uint16_t(operand->GetNumWords());
David Neto87846742018-04-11 17:36:22 -0400178 }
179 }
180 // Create an instruction with an opcode and a no-zero result ID, and
181 // with the given operands. This computes its own word count.
182 explicit SPIRVInstruction(spv::Op Opc, uint32_t ResID,
David Neto22f144c2017-06-12 14:26:21 -0400183 ArrayRef<SPIRVOperand *> Ops)
David Neto87846742018-04-11 17:36:22 -0400184 : WordCount(2), Opcode(static_cast<uint16_t>(Opc)), ResultID(ResID),
185 Operands(Ops.begin(), Ops.end()) {
186 if (ResID == 0) {
187 llvm_unreachable("Result ID of 0 was provided");
188 }
189 for (auto *operand : Ops) {
190 WordCount += operand->GetNumWords();
191 }
192 }
David Neto22f144c2017-06-12 14:26:21 -0400193
David Netoee2660d2018-06-28 16:31:29 -0400194 uint32_t getWordCount() const { return WordCount; }
David Neto22f144c2017-06-12 14:26:21 -0400195 uint16_t getOpcode() const { return Opcode; }
196 uint32_t getResultID() const { return ResultID; }
197 ArrayRef<SPIRVOperand *> getOperands() const { return Operands; }
198
199private:
David Netoee2660d2018-06-28 16:31:29 -0400200 uint32_t WordCount; // Check the 16-bit bound at code generation time.
David Neto22f144c2017-06-12 14:26:21 -0400201 uint16_t Opcode;
202 uint32_t ResultID;
203 SmallVector<SPIRVOperand *, 4> Operands;
204};
205
206struct SPIRVProducerPass final : public ModulePass {
David Neto22f144c2017-06-12 14:26:21 -0400207 typedef DenseMap<Type *, uint32_t> TypeMapType;
208 typedef UniqueVector<Type *> TypeList;
209 typedef DenseMap<Value *, uint32_t> ValueMapType;
David Netofb9a7972017-08-25 17:08:24 -0400210 typedef UniqueVector<Value *> ValueList;
David Neto22f144c2017-06-12 14:26:21 -0400211 typedef std::vector<std::pair<Value *, uint32_t>> EntryPointVecType;
212 typedef std::list<SPIRVInstruction *> SPIRVInstructionList;
David Neto87846742018-04-11 17:36:22 -0400213 // A vector of tuples, each of which is:
214 // - the LLVM instruction that we will later generate SPIR-V code for
215 // - where the SPIR-V instruction should be inserted
216 // - the result ID of the SPIR-V instruction
David Neto22f144c2017-06-12 14:26:21 -0400217 typedef std::vector<
218 std::tuple<Value *, SPIRVInstructionList::iterator, uint32_t>>
219 DeferredInstVecType;
220 typedef DenseMap<FunctionType *, std::pair<FunctionType *, uint32_t>>
221 GlobalConstFuncMapType;
222
David Neto44795152017-07-13 15:45:28 -0400223 explicit SPIRVProducerPass(
alan-bakerf5e5f692018-11-27 08:33:24 -0500224 raw_pwrite_stream &out,
225 std::vector<clspv::version0::DescriptorMapEntry> *descriptor_map_entries,
alan-baker00e7a582019-06-07 12:54:21 -0400226 ArrayRef<std::pair<unsigned, std::string>> samplerMap,
David Neto44795152017-07-13 15:45:28 -0400227 bool outputCInitList)
David Netoc2c368d2017-06-30 16:50:17 -0400228 : ModulePass(ID), samplerMap(samplerMap), out(out),
David Neto0676e6f2017-07-11 18:47:44 -0400229 binaryTempOut(binaryTempUnderlyingVector), binaryOut(&out),
alan-baker00e7a582019-06-07 12:54:21 -0400230 descriptorMapEntries(descriptor_map_entries),
David Neto0676e6f2017-07-11 18:47:44 -0400231 outputCInitList(outputCInitList), patchBoundOffset(0), nextID(1),
alan-baker5b86ed72019-02-15 08:26:50 -0500232 OpExtInstImportID(0), HasVariablePointersStorageBuffer(false),
233 HasVariablePointers(false), SamplerTy(nullptr), WorkgroupSizeValueID(0),
Kévin Petit89a525c2019-06-15 08:13:07 +0100234 WorkgroupSizeVarID(0), max_local_spec_id_(0) {}
David Neto22f144c2017-06-12 14:26:21 -0400235
236 void getAnalysisUsage(AnalysisUsage &AU) const override {
237 AU.addRequired<DominatorTreeWrapperPass>();
238 AU.addRequired<LoopInfoWrapperPass>();
239 }
240
241 virtual bool runOnModule(Module &module) override;
242
243 // output the SPIR-V header block
244 void outputHeader();
245
246 // patch the SPIR-V header block
247 void patchHeader();
248
249 uint32_t lookupType(Type *Ty) {
250 if (Ty->isPointerTy() &&
251 (Ty->getPointerAddressSpace() != AddressSpace::UniformConstant)) {
252 auto PointeeTy = Ty->getPointerElementType();
253 if (PointeeTy->isStructTy() &&
254 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
255 Ty = PointeeTy;
256 }
257 }
258
David Neto862b7d82018-06-14 18:48:37 -0400259 auto where = TypeMap.find(Ty);
260 if (where == TypeMap.end()) {
261 if (Ty) {
262 errs() << "Unhandled type " << *Ty << "\n";
263 } else {
264 errs() << "Unhandled type (null)\n";
265 }
David Netoe439d702018-03-23 13:14:08 -0700266 llvm_unreachable("\nUnhandled type!");
David Neto22f144c2017-06-12 14:26:21 -0400267 }
268
David Neto862b7d82018-06-14 18:48:37 -0400269 return where->second;
David Neto22f144c2017-06-12 14:26:21 -0400270 }
271 TypeMapType &getImageTypeMap() { return ImageTypeMap; }
272 TypeList &getTypeList() { return Types; };
273 ValueList &getConstantList() { return Constants; };
274 ValueMapType &getValueMap() { return ValueMap; }
275 ValueMapType &getAllocatedValueMap() { return AllocatedValueMap; }
276 SPIRVInstructionList &getSPIRVInstList() { return SPIRVInsts; };
David Neto22f144c2017-06-12 14:26:21 -0400277 EntryPointVecType &getEntryPointVec() { return EntryPointVec; };
278 DeferredInstVecType &getDeferredInstVec() { return DeferredInstVec; };
279 ValueList &getEntryPointInterfacesVec() { return EntryPointInterfacesVec; };
280 uint32_t &getOpExtInstImportID() { return OpExtInstImportID; };
281 std::vector<uint32_t> &getBuiltinDimVec() { return BuiltinDimensionVec; };
alan-baker5b86ed72019-02-15 08:26:50 -0500282 bool hasVariablePointersStorageBuffer() {
283 return HasVariablePointersStorageBuffer;
284 }
285 void setVariablePointersStorageBuffer(bool Val) {
286 HasVariablePointersStorageBuffer = Val;
287 }
Diego Novillo3cc8d7a2019-04-10 13:30:34 -0400288 bool hasVariablePointers() { return HasVariablePointers; };
David Neto22f144c2017-06-12 14:26:21 -0400289 void setVariablePointers(bool Val) { HasVariablePointers = Val; };
alan-bakerb6b09dc2018-11-08 16:59:28 -0500290 ArrayRef<std::pair<unsigned, std::string>> &getSamplerMap() {
291 return samplerMap;
292 }
David Neto22f144c2017-06-12 14:26:21 -0400293 GlobalConstFuncMapType &getGlobalConstFuncTypeMap() {
294 return GlobalConstFuncTypeMap;
295 }
296 SmallPtrSet<Value *, 16> &getGlobalConstArgSet() {
297 return GlobalConstArgumentSet;
298 }
alan-bakerb6b09dc2018-11-08 16:59:28 -0500299 TypeList &getTypesNeedingArrayStride() { return TypesNeedingArrayStride; }
David Neto22f144c2017-06-12 14:26:21 -0400300
David Netoc6f3ab22018-04-06 18:02:31 -0400301 void GenerateLLVMIRInfo(Module &M, const DataLayout &DL);
alan-bakerb6b09dc2018-11-08 16:59:28 -0500302 // Populate GlobalConstFuncTypeMap. Also, if module-scope __constant will
303 // *not* be converted to a storage buffer, replace each such global variable
304 // with one in the storage class expecgted by SPIR-V.
David Neto862b7d82018-06-14 18:48:37 -0400305 void FindGlobalConstVars(Module &M, const DataLayout &DL);
306 // Populate ResourceVarInfoList, FunctionToResourceVarsMap, and
307 // ModuleOrderedResourceVars.
308 void FindResourceVars(Module &M, const DataLayout &DL);
Alan Baker202c8c72018-08-13 13:47:44 -0400309 void FindWorkgroupVars(Module &M);
David Neto22f144c2017-06-12 14:26:21 -0400310 bool FindExtInst(Module &M);
311 void FindTypePerGlobalVar(GlobalVariable &GV);
312 void FindTypePerFunc(Function &F);
David Neto862b7d82018-06-14 18:48:37 -0400313 void FindTypesForSamplerMap(Module &M);
314 void FindTypesForResourceVars(Module &M);
alan-bakerb6b09dc2018-11-08 16:59:28 -0500315 // Inserts |Ty| and relevant sub-types into the |Types| member, indicating
316 // that |Ty| and its subtypes will need a corresponding SPIR-V type.
David Neto22f144c2017-06-12 14:26:21 -0400317 void FindType(Type *Ty);
318 void FindConstantPerGlobalVar(GlobalVariable &GV);
319 void FindConstantPerFunc(Function &F);
320 void FindConstant(Value *V);
321 void GenerateExtInstImport();
David Neto19a1bad2017-08-25 15:01:41 -0400322 // Generates instructions for SPIR-V types corresponding to the LLVM types
323 // saved in the |Types| member. A type follows its subtypes. IDs are
324 // allocated sequentially starting with the current value of nextID, and
325 // with a type following its subtypes. Also updates nextID to just beyond
326 // the last generated ID.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500327 void GenerateSPIRVTypes(LLVMContext &context, Module &module);
David Neto22f144c2017-06-12 14:26:21 -0400328 void GenerateSPIRVConstants();
David Neto5c22a252018-03-15 16:07:41 -0400329 void GenerateModuleInfo(Module &M);
David Neto22f144c2017-06-12 14:26:21 -0400330 void GenerateGlobalVar(GlobalVariable &GV);
David Netoc6f3ab22018-04-06 18:02:31 -0400331 void GenerateWorkgroupVars();
David Neto862b7d82018-06-14 18:48:37 -0400332 // Generate descriptor map entries for resource variables associated with
333 // arguments to F.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500334 void GenerateDescriptorMapInfo(const DataLayout &DL, Function &F);
David Neto22f144c2017-06-12 14:26:21 -0400335 void GenerateSamplers(Module &M);
David Neto862b7d82018-06-14 18:48:37 -0400336 // Generate OpVariables for %clspv.resource.var.* calls.
337 void GenerateResourceVars(Module &M);
David Neto22f144c2017-06-12 14:26:21 -0400338 void GenerateFuncPrologue(Function &F);
339 void GenerateFuncBody(Function &F);
David Netob6e2e062018-04-25 10:32:06 -0400340 void GenerateEntryPointInitialStores();
David Neto22f144c2017-06-12 14:26:21 -0400341 spv::Op GetSPIRVCmpOpcode(CmpInst *CmpI);
342 spv::Op GetSPIRVCastOpcode(Instruction &I);
343 spv::Op GetSPIRVBinaryOpcode(Instruction &I);
344 void GenerateInstruction(Instruction &I);
345 void GenerateFuncEpilogue();
346 void HandleDeferredInstruction();
alan-bakerb6b09dc2018-11-08 16:59:28 -0500347 void HandleDeferredDecorations(const DataLayout &DL);
David Neto22f144c2017-06-12 14:26:21 -0400348 bool is4xi8vec(Type *Ty) const;
349 spv::StorageClass GetStorageClass(unsigned AddrSpace) const;
David Neto862b7d82018-06-14 18:48:37 -0400350 spv::StorageClass GetStorageClassForArgKind(clspv::ArgKind arg_kind) const;
David Neto22f144c2017-06-12 14:26:21 -0400351 spv::BuiltIn GetBuiltin(StringRef globalVarName) const;
David Neto3fbb4072017-10-16 11:28:14 -0400352 // Returns the GLSL extended instruction enum that the given function
353 // call maps to. If none, then returns the 0 value, i.e. GLSLstd4580Bad.
David Neto22f144c2017-06-12 14:26:21 -0400354 glsl::ExtInst getExtInstEnum(StringRef Name);
David Neto3fbb4072017-10-16 11:28:14 -0400355 // Returns the GLSL extended instruction enum indirectly used by the given
356 // function. That is, to implement the given function, we use an extended
357 // instruction plus one more instruction. If none, then returns the 0 value,
358 // i.e. GLSLstd4580Bad.
359 glsl::ExtInst getIndirectExtInstEnum(StringRef Name);
360 // Returns the single GLSL extended instruction used directly or
361 // indirectly by the given function call.
362 glsl::ExtInst getDirectOrIndirectExtInstEnum(StringRef Name);
David Neto22f144c2017-06-12 14:26:21 -0400363 void WriteOneWord(uint32_t Word);
364 void WriteResultID(SPIRVInstruction *Inst);
365 void WriteWordCountAndOpcode(SPIRVInstruction *Inst);
366 void WriteOperand(SPIRVOperand *Op);
367 void WriteSPIRVBinary();
368
Alan Baker9bf93fb2018-08-28 16:59:26 -0400369 // Returns true if |type| is compatible with OpConstantNull.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500370 bool IsTypeNullable(const Type *type) const;
Alan Baker9bf93fb2018-08-28 16:59:26 -0400371
Alan Bakerfcda9482018-10-02 17:09:59 -0400372 // Populate UBO remapped type maps.
373 void PopulateUBOTypeMaps(Module &module);
374
375 // Wrapped methods of DataLayout accessors. If |type| was remapped for UBOs,
376 // uses the internal map, otherwise it falls back on the data layout.
377 uint64_t GetTypeSizeInBits(Type *type, const DataLayout &DL);
378 uint64_t GetTypeStoreSize(Type *type, const DataLayout &DL);
379 uint64_t GetTypeAllocSize(Type *type, const DataLayout &DL);
380
alan-baker5b86ed72019-02-15 08:26:50 -0500381 // Returns the base pointer of |v|.
382 Value *GetBasePointer(Value *v);
383
384 // Sets |HasVariablePointersStorageBuffer| or |HasVariablePointers| base on
385 // |address_space|.
386 void setVariablePointersCapabilities(unsigned address_space);
387
388 // Returns true if |lhs| and |rhs| represent the same resource or workgroup
389 // variable.
390 bool sameResource(Value *lhs, Value *rhs) const;
391
392 // Returns true if |inst| is phi or select that selects from the same
393 // structure (or null).
394 bool selectFromSameObject(Instruction *inst);
395
alan-bakere9308012019-03-15 10:25:13 -0400396 // Returns true if |Arg| is called with a coherent resource.
397 bool CalledWithCoherentResource(Argument &Arg);
398
David Neto22f144c2017-06-12 14:26:21 -0400399private:
400 static char ID;
David Neto44795152017-07-13 15:45:28 -0400401 ArrayRef<std::pair<unsigned, std::string>> samplerMap;
David Neto22f144c2017-06-12 14:26:21 -0400402 raw_pwrite_stream &out;
David Neto0676e6f2017-07-11 18:47:44 -0400403
404 // TODO(dneto): Wouldn't it be better to always just emit a binary, and then
405 // convert to other formats on demand?
406
407 // When emitting a C initialization list, the WriteSPIRVBinary method
408 // will actually write its words to this vector via binaryTempOut.
409 SmallVector<char, 100> binaryTempUnderlyingVector;
410 raw_svector_ostream binaryTempOut;
411
412 // Binary output writes to this stream, which might be |out| or
413 // |binaryTempOut|. It's the latter when we really want to write a C
414 // initializer list.
Diego Novillo3cc8d7a2019-04-10 13:30:34 -0400415 raw_pwrite_stream *binaryOut;
alan-bakerf5e5f692018-11-27 08:33:24 -0500416 std::vector<version0::DescriptorMapEntry> *descriptorMapEntries;
David Neto0676e6f2017-07-11 18:47:44 -0400417 const bool outputCInitList; // If true, output look like {0x7023, ... , 5}
David Neto22f144c2017-06-12 14:26:21 -0400418 uint64_t patchBoundOffset;
419 uint32_t nextID;
420
alan-bakerf67468c2019-11-25 15:51:49 -0500421 // ID for OpTypeInt 32 1.
422 uint32_t int32ID = 0;
423 // ID for OpTypeVector %int 4.
424 uint32_t v4int32ID = 0;
425
David Neto19a1bad2017-08-25 15:01:41 -0400426 // Maps an LLVM Value pointer to the corresponding SPIR-V Id.
David Neto22f144c2017-06-12 14:26:21 -0400427 TypeMapType TypeMap;
David Neto19a1bad2017-08-25 15:01:41 -0400428 // Maps an LLVM image type to its SPIR-V ID.
David Neto22f144c2017-06-12 14:26:21 -0400429 TypeMapType ImageTypeMap;
David Neto19a1bad2017-08-25 15:01:41 -0400430 // A unique-vector of LLVM types that map to a SPIR-V type.
David Neto22f144c2017-06-12 14:26:21 -0400431 TypeList Types;
432 ValueList Constants;
David Neto19a1bad2017-08-25 15:01:41 -0400433 // Maps an LLVM Value pointer to the corresponding SPIR-V Id.
David Neto22f144c2017-06-12 14:26:21 -0400434 ValueMapType ValueMap;
435 ValueMapType AllocatedValueMap;
436 SPIRVInstructionList SPIRVInsts;
David Neto862b7d82018-06-14 18:48:37 -0400437
David Neto22f144c2017-06-12 14:26:21 -0400438 EntryPointVecType EntryPointVec;
439 DeferredInstVecType DeferredInstVec;
440 ValueList EntryPointInterfacesVec;
441 uint32_t OpExtInstImportID;
442 std::vector<uint32_t> BuiltinDimensionVec;
alan-baker5b86ed72019-02-15 08:26:50 -0500443 bool HasVariablePointersStorageBuffer;
David Neto22f144c2017-06-12 14:26:21 -0400444 bool HasVariablePointers;
445 Type *SamplerTy;
alan-bakerb6b09dc2018-11-08 16:59:28 -0500446 DenseMap<unsigned, uint32_t> SamplerMapIndexToIDMap;
David Netoc77d9e22018-03-24 06:30:28 -0700447
448 // If a function F has a pointer-to-__constant parameter, then this variable
David Neto9ed8e2f2018-03-24 06:47:24 -0700449 // will map F's type to (G, index of the parameter), where in a first phase
450 // G is F's type. During FindTypePerFunc, G will be changed to F's type
451 // but replacing the pointer-to-constant parameter with
452 // pointer-to-ModuleScopePrivate.
David Netoc77d9e22018-03-24 06:30:28 -0700453 // TODO(dneto): This doesn't seem general enough? A function might have
454 // more than one such parameter.
David Neto22f144c2017-06-12 14:26:21 -0400455 GlobalConstFuncMapType GlobalConstFuncTypeMap;
456 SmallPtrSet<Value *, 16> GlobalConstArgumentSet;
David Neto1a1a0582017-07-07 12:01:44 -0400457 // An ordered set of pointer types of Base arguments to OpPtrAccessChain,
David Neto85082642018-03-24 06:55:20 -0700458 // or array types, and which point into transparent memory (StorageBuffer
459 // storage class). These will require an ArrayStride decoration.
David Neto1a1a0582017-07-07 12:01:44 -0400460 // See SPV_KHR_variable_pointers rev 13.
David Neto85082642018-03-24 06:55:20 -0700461 TypeList TypesNeedingArrayStride;
David Netoa60b00b2017-09-15 16:34:09 -0400462
463 // This is truly ugly, but works around what look like driver bugs.
464 // For get_local_size, an earlier part of the flow has created a module-scope
465 // variable in Private address space to hold the value for the workgroup
466 // size. Its intializer is a uint3 value marked as builtin WorkgroupSize.
467 // When this is present, save the IDs of the initializer value and variable
468 // in these two variables. We only ever do a vector load from it, and
469 // when we see one of those, substitute just the value of the intializer.
470 // This mimics what Glslang does, and that's what drivers are used to.
David Neto66cfe642018-03-24 06:13:56 -0700471 // TODO(dneto): Remove this once drivers are fixed.
David Netoa60b00b2017-09-15 16:34:09 -0400472 uint32_t WorkgroupSizeValueID;
473 uint32_t WorkgroupSizeVarID;
David Neto26aaf622017-10-23 18:11:53 -0400474
David Neto862b7d82018-06-14 18:48:37 -0400475 // Bookkeeping for mapping kernel arguments to resource variables.
476 struct ResourceVarInfo {
477 ResourceVarInfo(int index_arg, unsigned set_arg, unsigned binding_arg,
alan-bakere9308012019-03-15 10:25:13 -0400478 Function *fn, clspv::ArgKind arg_kind_arg, int coherent_arg)
David Neto862b7d82018-06-14 18:48:37 -0400479 : index(index_arg), descriptor_set(set_arg), binding(binding_arg),
alan-bakere9308012019-03-15 10:25:13 -0400480 var_fn(fn), arg_kind(arg_kind_arg), coherent(coherent_arg),
David Neto862b7d82018-06-14 18:48:37 -0400481 addr_space(fn->getReturnType()->getPointerAddressSpace()) {}
482 const int index; // Index into ResourceVarInfoList
483 const unsigned descriptor_set;
484 const unsigned binding;
485 Function *const var_fn; // The @clspv.resource.var.* function.
486 const clspv::ArgKind arg_kind;
alan-bakere9308012019-03-15 10:25:13 -0400487 const int coherent;
David Neto862b7d82018-06-14 18:48:37 -0400488 const unsigned addr_space; // The LLVM address space
489 // The SPIR-V ID of the OpVariable. Not populated at construction time.
490 uint32_t var_id = 0;
491 };
492 // A list of resource var info. Each one correponds to a module-scope
493 // resource variable we will have to create. Resource var indices are
494 // indices into this vector.
495 SmallVector<std::unique_ptr<ResourceVarInfo>, 8> ResourceVarInfoList;
496 // This is a vector of pointers of all the resource vars, but ordered by
497 // kernel function, and then by argument.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500498 UniqueVector<ResourceVarInfo *> ModuleOrderedResourceVars;
David Neto862b7d82018-06-14 18:48:37 -0400499 // Map a function to the ordered list of resource variables it uses, one for
500 // each argument. If an argument does not use a resource variable, it
501 // will have a null pointer entry.
502 using FunctionToResourceVarsMapType =
503 DenseMap<Function *, SmallVector<ResourceVarInfo *, 8>>;
504 FunctionToResourceVarsMapType FunctionToResourceVarsMap;
505
506 // What LLVM types map to SPIR-V types needing layout? These are the
507 // arrays and structures supporting storage buffers and uniform buffers.
508 TypeList TypesNeedingLayout;
509 // What LLVM struct types map to a SPIR-V struct type with Block decoration?
510 UniqueVector<StructType *> StructTypesNeedingBlock;
511 // For a call that represents a load from an opaque type (samplers, images),
512 // map it to the variable id it should load from.
513 DenseMap<CallInst *, uint32_t> ResourceVarDeferredLoadCalls;
David Neto85082642018-03-24 06:55:20 -0700514
Alan Baker202c8c72018-08-13 13:47:44 -0400515 // One larger than the maximum used SpecId for pointer-to-local arguments.
516 int max_local_spec_id_;
David Netoc6f3ab22018-04-06 18:02:31 -0400517 // An ordered list of the kernel arguments of type pointer-to-local.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500518 using LocalArgList = SmallVector<Argument *, 8>;
David Netoc6f3ab22018-04-06 18:02:31 -0400519 LocalArgList LocalArgs;
520 // Information about a pointer-to-local argument.
521 struct LocalArgInfo {
522 // The SPIR-V ID of the array variable.
523 uint32_t variable_id;
524 // The element type of the
alan-bakerb6b09dc2018-11-08 16:59:28 -0500525 Type *elem_type;
David Netoc6f3ab22018-04-06 18:02:31 -0400526 // The ID of the array type.
527 uint32_t array_size_id;
528 // The ID of the array type.
529 uint32_t array_type_id;
530 // The ID of the pointer to the array type.
531 uint32_t ptr_array_type_id;
David Netoc6f3ab22018-04-06 18:02:31 -0400532 // The specialization constant ID of the array size.
533 int spec_id;
534 };
Alan Baker202c8c72018-08-13 13:47:44 -0400535 // A mapping from Argument to its assigned SpecId.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500536 DenseMap<const Argument *, int> LocalArgSpecIds;
Alan Baker202c8c72018-08-13 13:47:44 -0400537 // A mapping from SpecId to its LocalArgInfo.
538 DenseMap<int, LocalArgInfo> LocalSpecIdInfoMap;
Alan Bakerfcda9482018-10-02 17:09:59 -0400539 // A mapping from a remapped type to its real offsets.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500540 DenseMap<Type *, std::vector<uint32_t>> RemappedUBOTypeOffsets;
Alan Bakerfcda9482018-10-02 17:09:59 -0400541 // A mapping from a remapped type to its real sizes.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500542 DenseMap<Type *, std::tuple<uint64_t, uint64_t, uint64_t>>
543 RemappedUBOTypeSizes;
David Neto22f144c2017-06-12 14:26:21 -0400544};
545
546char SPIRVProducerPass::ID;
David Netoc6f3ab22018-04-06 18:02:31 -0400547
alan-bakerb6b09dc2018-11-08 16:59:28 -0500548} // namespace
David Neto22f144c2017-06-12 14:26:21 -0400549
550namespace clspv {
alan-bakerf5e5f692018-11-27 08:33:24 -0500551ModulePass *createSPIRVProducerPass(
552 raw_pwrite_stream &out,
553 std::vector<version0::DescriptorMapEntry> *descriptor_map_entries,
alan-baker00e7a582019-06-07 12:54:21 -0400554 ArrayRef<std::pair<unsigned, std::string>> samplerMap,
alan-bakerf5e5f692018-11-27 08:33:24 -0500555 bool outputCInitList) {
556 return new SPIRVProducerPass(out, descriptor_map_entries, samplerMap,
alan-baker00e7a582019-06-07 12:54:21 -0400557 outputCInitList);
David Neto22f144c2017-06-12 14:26:21 -0400558}
David Netoc2c368d2017-06-30 16:50:17 -0400559} // namespace clspv
David Neto22f144c2017-06-12 14:26:21 -0400560
561bool SPIRVProducerPass::runOnModule(Module &module) {
David Neto0676e6f2017-07-11 18:47:44 -0400562 binaryOut = outputCInitList ? &binaryTempOut : &out;
563
Alan Bakerfcda9482018-10-02 17:09:59 -0400564 PopulateUBOTypeMaps(module);
565
David Neto22f144c2017-06-12 14:26:21 -0400566 // SPIR-V always begins with its header information
567 outputHeader();
568
David Netoc6f3ab22018-04-06 18:02:31 -0400569 const DataLayout &DL = module.getDataLayout();
570
David Neto22f144c2017-06-12 14:26:21 -0400571 // Gather information from the LLVM IR that we require.
David Netoc6f3ab22018-04-06 18:02:31 -0400572 GenerateLLVMIRInfo(module, DL);
David Neto22f144c2017-06-12 14:26:21 -0400573
David Neto22f144c2017-06-12 14:26:21 -0400574 // Collect information on global variables too.
575 for (GlobalVariable &GV : module.globals()) {
576 // If the GV is one of our special __spirv_* variables, remove the
577 // initializer as it was only placed there to force LLVM to not throw the
578 // value away.
579 if (GV.getName().startswith("__spirv_")) {
580 GV.setInitializer(nullptr);
581 }
582
583 // Collect types' information from global variable.
584 FindTypePerGlobalVar(GV);
585
586 // Collect constant information from global variable.
587 FindConstantPerGlobalVar(GV);
588
589 // If the variable is an input, entry points need to know about it.
590 if (AddressSpace::Input == GV.getType()->getPointerAddressSpace()) {
David Netofb9a7972017-08-25 17:08:24 -0400591 getEntryPointInterfacesVec().insert(&GV);
David Neto22f144c2017-06-12 14:26:21 -0400592 }
593 }
594
595 // If there are extended instructions, generate OpExtInstImport.
596 if (FindExtInst(module)) {
597 GenerateExtInstImport();
598 }
599
600 // Generate SPIRV instructions for types.
Alan Bakerfcda9482018-10-02 17:09:59 -0400601 GenerateSPIRVTypes(module.getContext(), module);
David Neto22f144c2017-06-12 14:26:21 -0400602
603 // Generate SPIRV constants.
604 GenerateSPIRVConstants();
605
606 // If we have a sampler map, we might have literal samplers to generate.
607 if (0 < getSamplerMap().size()) {
608 GenerateSamplers(module);
609 }
610
611 // Generate SPIRV variables.
612 for (GlobalVariable &GV : module.globals()) {
613 GenerateGlobalVar(GV);
614 }
David Neto862b7d82018-06-14 18:48:37 -0400615 GenerateResourceVars(module);
David Netoc6f3ab22018-04-06 18:02:31 -0400616 GenerateWorkgroupVars();
David Neto22f144c2017-06-12 14:26:21 -0400617
618 // Generate SPIRV instructions for each function.
619 for (Function &F : module) {
620 if (F.isDeclaration()) {
621 continue;
622 }
623
David Neto862b7d82018-06-14 18:48:37 -0400624 GenerateDescriptorMapInfo(DL, F);
625
David Neto22f144c2017-06-12 14:26:21 -0400626 // Generate Function Prologue.
627 GenerateFuncPrologue(F);
628
629 // Generate SPIRV instructions for function body.
630 GenerateFuncBody(F);
631
632 // Generate Function Epilogue.
633 GenerateFuncEpilogue();
634 }
635
636 HandleDeferredInstruction();
David Neto1a1a0582017-07-07 12:01:44 -0400637 HandleDeferredDecorations(DL);
David Neto22f144c2017-06-12 14:26:21 -0400638
639 // Generate SPIRV module information.
David Neto5c22a252018-03-15 16:07:41 -0400640 GenerateModuleInfo(module);
David Neto22f144c2017-06-12 14:26:21 -0400641
alan-baker00e7a582019-06-07 12:54:21 -0400642 WriteSPIRVBinary();
David Neto22f144c2017-06-12 14:26:21 -0400643
644 // We need to patch the SPIR-V header to set bound correctly.
645 patchHeader();
David Neto0676e6f2017-07-11 18:47:44 -0400646
647 if (outputCInitList) {
648 bool first = true;
David Neto0676e6f2017-07-11 18:47:44 -0400649 std::ostringstream os;
650
David Neto57fb0b92017-08-04 15:35:09 -0400651 auto emit_word = [&os, &first](uint32_t word) {
David Neto0676e6f2017-07-11 18:47:44 -0400652 if (!first)
David Neto57fb0b92017-08-04 15:35:09 -0400653 os << ",\n";
654 os << word;
David Neto0676e6f2017-07-11 18:47:44 -0400655 first = false;
656 };
657
658 os << "{";
David Neto57fb0b92017-08-04 15:35:09 -0400659 const std::string str(binaryTempOut.str());
660 for (unsigned i = 0; i < str.size(); i += 4) {
661 const uint32_t a = static_cast<unsigned char>(str[i]);
662 const uint32_t b = static_cast<unsigned char>(str[i + 1]);
663 const uint32_t c = static_cast<unsigned char>(str[i + 2]);
664 const uint32_t d = static_cast<unsigned char>(str[i + 3]);
665 emit_word(a | (b << 8) | (c << 16) | (d << 24));
David Neto0676e6f2017-07-11 18:47:44 -0400666 }
667 os << "}\n";
668 out << os.str();
669 }
670
David Neto22f144c2017-06-12 14:26:21 -0400671 return false;
672}
673
674void SPIRVProducerPass::outputHeader() {
alan-baker00e7a582019-06-07 12:54:21 -0400675 binaryOut->write(reinterpret_cast<const char *>(&spv::MagicNumber),
676 sizeof(spv::MagicNumber));
677 binaryOut->write(reinterpret_cast<const char *>(&spv::Version),
678 sizeof(spv::Version));
David Neto22f144c2017-06-12 14:26:21 -0400679
alan-baker0c18ab02019-06-12 10:23:21 -0400680 // use Google's vendor ID
681 const uint32_t vendor = 21 << 16;
alan-baker00e7a582019-06-07 12:54:21 -0400682 binaryOut->write(reinterpret_cast<const char *>(&vendor), sizeof(vendor));
David Neto22f144c2017-06-12 14:26:21 -0400683
alan-baker00e7a582019-06-07 12:54:21 -0400684 // we record where we need to come back to and patch in the bound value
685 patchBoundOffset = binaryOut->tell();
David Neto22f144c2017-06-12 14:26:21 -0400686
alan-baker00e7a582019-06-07 12:54:21 -0400687 // output a bad bound for now
688 binaryOut->write(reinterpret_cast<const char *>(&nextID), sizeof(nextID));
David Neto22f144c2017-06-12 14:26:21 -0400689
alan-baker00e7a582019-06-07 12:54:21 -0400690 // output the schema (reserved for use and must be 0)
691 const uint32_t schema = 0;
692 binaryOut->write(reinterpret_cast<const char *>(&schema), sizeof(schema));
David Neto22f144c2017-06-12 14:26:21 -0400693}
694
695void SPIRVProducerPass::patchHeader() {
alan-baker00e7a582019-06-07 12:54:21 -0400696 // for a binary we just write the value of nextID over bound
697 binaryOut->pwrite(reinterpret_cast<char *>(&nextID), sizeof(nextID),
698 patchBoundOffset);
David Neto22f144c2017-06-12 14:26:21 -0400699}
700
David Netoc6f3ab22018-04-06 18:02:31 -0400701void SPIRVProducerPass::GenerateLLVMIRInfo(Module &M, const DataLayout &DL) {
David Neto22f144c2017-06-12 14:26:21 -0400702 // This function generates LLVM IR for function such as global variable for
703 // argument, constant and pointer type for argument access. These information
704 // is artificial one because we need Vulkan SPIR-V output. This function is
705 // executed ahead of FindType and FindConstant.
David Neto22f144c2017-06-12 14:26:21 -0400706 LLVMContext &Context = M.getContext();
707
David Neto862b7d82018-06-14 18:48:37 -0400708 FindGlobalConstVars(M, DL);
David Neto5c22a252018-03-15 16:07:41 -0400709
David Neto862b7d82018-06-14 18:48:37 -0400710 FindResourceVars(M, DL);
David Neto22f144c2017-06-12 14:26:21 -0400711
712 bool HasWorkGroupBuiltin = false;
713 for (GlobalVariable &GV : M.globals()) {
714 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
715 if (spv::BuiltInWorkgroupSize == BuiltinType) {
716 HasWorkGroupBuiltin = true;
717 }
718 }
719
David Neto862b7d82018-06-14 18:48:37 -0400720 FindTypesForSamplerMap(M);
721 FindTypesForResourceVars(M);
Alan Baker202c8c72018-08-13 13:47:44 -0400722 FindWorkgroupVars(M);
David Neto22f144c2017-06-12 14:26:21 -0400723
724 for (Function &F : M) {
Kévin Petitabef4522019-03-27 13:08:01 +0000725 if (F.isDeclaration()) {
David Neto22f144c2017-06-12 14:26:21 -0400726 continue;
727 }
728
729 for (BasicBlock &BB : F) {
730 for (Instruction &I : BB) {
731 if (I.getOpcode() == Instruction::ZExt ||
732 I.getOpcode() == Instruction::SExt ||
733 I.getOpcode() == Instruction::UIToFP) {
734 // If there is zext with i1 type, it will be changed to OpSelect. The
735 // OpSelect needs constant 0 and 1 so the constants are added here.
736
737 auto OpTy = I.getOperand(0)->getType();
738
Kévin Petit24272b62018-10-18 19:16:12 +0000739 if (OpTy->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -0400740 if (I.getOpcode() == Instruction::ZExt) {
David Neto22f144c2017-06-12 14:26:21 -0400741 FindConstant(Constant::getNullValue(I.getType()));
Kévin Petit7bfb8992019-02-26 13:45:08 +0000742 FindConstant(ConstantInt::get(I.getType(), 1));
David Neto22f144c2017-06-12 14:26:21 -0400743 } else if (I.getOpcode() == Instruction::SExt) {
David Neto22f144c2017-06-12 14:26:21 -0400744 FindConstant(Constant::getNullValue(I.getType()));
Kévin Petit7bfb8992019-02-26 13:45:08 +0000745 FindConstant(ConstantInt::getSigned(I.getType(), -1));
David Neto22f144c2017-06-12 14:26:21 -0400746 } else {
747 FindConstant(ConstantFP::get(Context, APFloat(0.0f)));
748 FindConstant(ConstantFP::get(Context, APFloat(1.0f)));
749 }
750 }
751 } else if (CallInst *Call = dyn_cast<CallInst>(&I)) {
David Neto862b7d82018-06-14 18:48:37 -0400752 StringRef callee_name = Call->getCalledFunction()->getName();
David Neto22f144c2017-06-12 14:26:21 -0400753
754 // Handle image type specially.
alan-bakerf67468c2019-11-25 15:51:49 -0500755 if (clspv::IsSampledImageRead(callee_name)) {
David Neto22f144c2017-06-12 14:26:21 -0400756 TypeMapType &OpImageTypeMap = getImageTypeMap();
757 Type *ImageTy =
758 Call->getArgOperand(0)->getType()->getPointerElementType();
759 OpImageTypeMap[ImageTy] = 0;
760
alan-bakerf67468c2019-11-25 15:51:49 -0500761 // All sampled reads need a floating point 0 for the Lod operand.
David Neto22f144c2017-06-12 14:26:21 -0400762 FindConstant(ConstantFP::get(Context, APFloat(0.0f)));
763 }
David Neto5c22a252018-03-15 16:07:41 -0400764
alan-bakerf67468c2019-11-25 15:51:49 -0500765 if (clspv::IsGetImageHeight(callee_name) ||
766 clspv::IsGetImageWidth(callee_name)) {
David Neto5c22a252018-03-15 16:07:41 -0400767 FindType(VectorType::get(Type::getInt32Ty(Context), 2));
768 }
David Neto22f144c2017-06-12 14:26:21 -0400769 }
770 }
771 }
772
Kévin Petitabef4522019-03-27 13:08:01 +0000773 // More things to do on kernel functions
774 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
775 if (const MDNode *MD =
776 dyn_cast<Function>(&F)->getMetadata("reqd_work_group_size")) {
777 // We generate constants if the WorkgroupSize builtin is being used.
778 if (HasWorkGroupBuiltin) {
779 // Collect constant information for work group size.
780 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(0)));
781 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(1)));
782 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(2)));
David Neto22f144c2017-06-12 14:26:21 -0400783 }
784 }
785 }
786
alan-bakerf67468c2019-11-25 15:51:49 -0500787 // TODO(alan-baker): make this better.
788 if (M.getTypeByName("opencl.image2d_ro_t.float") ||
789 M.getTypeByName("opencl.image2d_ro_t.float.sampled") ||
790 M.getTypeByName("opencl.image2d_wo_t.float") ||
791 M.getTypeByName("opencl.image3d_ro_t.float") ||
792 M.getTypeByName("opencl.image3d_ro_t.float.sampled") ||
793 M.getTypeByName("opencl.image3d_wo_t.float")) {
794 FindType(Type::getFloatTy(Context));
795 } else if (M.getTypeByName("opencl.image2d_ro_t.uint") ||
796 M.getTypeByName("opencl.image2d_ro_t.uint.sampled") ||
797 M.getTypeByName("opencl.image2d_wo_t.uint") ||
798 M.getTypeByName("opencl.image3d_ro_t.uint") ||
799 M.getTypeByName("opencl.image3d_ro_t.uint.sampled") ||
800 M.getTypeByName("opencl.image3d_wo_t.uint")) {
801 FindType(Type::getInt32Ty(Context));
802 } else if (M.getTypeByName("opencl.image2d_ro_t.int") ||
803 M.getTypeByName("opencl.image2d_ro_t.int.sampled") ||
804 M.getTypeByName("opencl.image2d_wo_t.int") ||
805 M.getTypeByName("opencl.image3d_ro_t.int") ||
806 M.getTypeByName("opencl.image3d_ro_t.int.sampled") ||
807 M.getTypeByName("opencl.image3d_wo_t.int")) {
808 // Nothing for now...
809 } else {
810 // This was likely an UndefValue.
David Neto22f144c2017-06-12 14:26:21 -0400811 FindType(Type::getFloatTy(Context));
812 }
813
814 // Collect types' information from function.
815 FindTypePerFunc(F);
816
817 // Collect constant information from function.
818 FindConstantPerFunc(F);
819 }
820}
821
David Neto862b7d82018-06-14 18:48:37 -0400822void SPIRVProducerPass::FindGlobalConstVars(Module &M, const DataLayout &DL) {
alan-baker56f7aff2019-05-22 08:06:42 -0400823 clspv::NormalizeGlobalVariables(M);
824
David Neto862b7d82018-06-14 18:48:37 -0400825 SmallVector<GlobalVariable *, 8> GVList;
826 SmallVector<GlobalVariable *, 8> DeadGVList;
827 for (GlobalVariable &GV : M.globals()) {
828 if (GV.getType()->getAddressSpace() == AddressSpace::Constant) {
829 if (GV.use_empty()) {
830 DeadGVList.push_back(&GV);
831 } else {
832 GVList.push_back(&GV);
833 }
834 }
835 }
836
837 // Remove dead global __constant variables.
838 for (auto GV : DeadGVList) {
839 GV->eraseFromParent();
840 }
841 DeadGVList.clear();
842
843 if (clspv::Option::ModuleConstantsInStorageBuffer()) {
844 // For now, we only support a single storage buffer.
845 if (GVList.size() > 0) {
846 assert(GVList.size() == 1);
847 const auto *GV = GVList[0];
848 const auto constants_byte_size =
Alan Bakerfcda9482018-10-02 17:09:59 -0400849 (GetTypeSizeInBits(GV->getInitializer()->getType(), DL)) / 8;
David Neto862b7d82018-06-14 18:48:37 -0400850 const size_t kConstantMaxSize = 65536;
851 if (constants_byte_size > kConstantMaxSize) {
852 outs() << "Max __constant capacity of " << kConstantMaxSize
853 << " bytes exceeded: " << constants_byte_size << " bytes used\n";
854 llvm_unreachable("Max __constant capacity exceeded");
855 }
856 }
857 } else {
858 // Change global constant variable's address space to ModuleScopePrivate.
859 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
860 for (auto GV : GVList) {
861 // Create new gv with ModuleScopePrivate address space.
862 Type *NewGVTy = GV->getType()->getPointerElementType();
863 GlobalVariable *NewGV = new GlobalVariable(
864 M, NewGVTy, false, GV->getLinkage(), GV->getInitializer(), "",
865 nullptr, GV->getThreadLocalMode(), AddressSpace::ModuleScopePrivate);
866 NewGV->takeName(GV);
867
868 const SmallVector<User *, 8> GVUsers(GV->user_begin(), GV->user_end());
869 SmallVector<User *, 8> CandidateUsers;
870
871 auto record_called_function_type_as_user =
872 [&GlobalConstFuncTyMap](Value *gv, CallInst *call) {
873 // Find argument index.
874 unsigned index = 0;
875 for (unsigned i = 0; i < call->getNumArgOperands(); i++) {
876 if (gv == call->getOperand(i)) {
877 // TODO(dneto): Should we break here?
878 index = i;
879 }
880 }
881
882 // Record function type with global constant.
883 GlobalConstFuncTyMap[call->getFunctionType()] =
884 std::make_pair(call->getFunctionType(), index);
885 };
886
887 for (User *GVU : GVUsers) {
888 if (CallInst *Call = dyn_cast<CallInst>(GVU)) {
889 record_called_function_type_as_user(GV, Call);
890 } else if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(GVU)) {
891 // Check GEP users.
892 for (User *GEPU : GEP->users()) {
893 if (CallInst *GEPCall = dyn_cast<CallInst>(GEPU)) {
894 record_called_function_type_as_user(GEP, GEPCall);
895 }
896 }
897 }
898
899 CandidateUsers.push_back(GVU);
900 }
901
902 for (User *U : CandidateUsers) {
903 // Update users of gv with new gv.
alan-bakered80f572019-02-11 17:28:26 -0500904 if (!isa<Constant>(U)) {
905 // #254: Can't change operands of a constant, but this shouldn't be
906 // something that sticks around in the module.
907 U->replaceUsesOfWith(GV, NewGV);
908 }
David Neto862b7d82018-06-14 18:48:37 -0400909 }
910
911 // Delete original gv.
912 GV->eraseFromParent();
913 }
914 }
915}
916
Radek Szymanskibe4b0c42018-10-04 22:20:53 +0100917void SPIRVProducerPass::FindResourceVars(Module &M, const DataLayout &) {
David Neto862b7d82018-06-14 18:48:37 -0400918 ResourceVarInfoList.clear();
919 FunctionToResourceVarsMap.clear();
920 ModuleOrderedResourceVars.reset();
921 // Normally, there is one resource variable per clspv.resource.var.*
922 // function, since that is unique'd by arg type and index. By design,
923 // we can share these resource variables across kernels because all
924 // kernels use the same descriptor set.
925 //
926 // But if the user requested distinct descriptor sets per kernel, then
927 // the descriptor allocator has made different (set,binding) pairs for
928 // the same (type,arg_index) pair. Since we can decorate a resource
929 // variable with only exactly one DescriptorSet and Binding, we are
930 // forced in this case to make distinct resource variables whenever
931 // the same clspv.reource.var.X function is seen with disintct
932 // (set,binding) values.
933 const bool always_distinct_sets =
934 clspv::Option::DistinctKernelDescriptorSets();
935 for (Function &F : M) {
936 // Rely on the fact the resource var functions have a stable ordering
937 // in the module.
Alan Baker202c8c72018-08-13 13:47:44 -0400938 if (F.getName().startswith(clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -0400939 // Find all calls to this function with distinct set and binding pairs.
940 // Save them in ResourceVarInfoList.
941
942 // Determine uniqueness of the (set,binding) pairs only withing this
943 // one resource-var builtin function.
944 using SetAndBinding = std::pair<unsigned, unsigned>;
945 // Maps set and binding to the resource var info.
946 DenseMap<SetAndBinding, ResourceVarInfo *> set_and_binding_map;
947 bool first_use = true;
948 for (auto &U : F.uses()) {
949 if (auto *call = dyn_cast<CallInst>(U.getUser())) {
950 const auto set = unsigned(
951 dyn_cast<ConstantInt>(call->getArgOperand(0))->getZExtValue());
952 const auto binding = unsigned(
953 dyn_cast<ConstantInt>(call->getArgOperand(1))->getZExtValue());
954 const auto arg_kind = clspv::ArgKind(
955 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
956 const auto arg_index = unsigned(
957 dyn_cast<ConstantInt>(call->getArgOperand(3))->getZExtValue());
alan-bakere9308012019-03-15 10:25:13 -0400958 const auto coherent = unsigned(
959 dyn_cast<ConstantInt>(call->getArgOperand(5))->getZExtValue());
David Neto862b7d82018-06-14 18:48:37 -0400960
961 // Find or make the resource var info for this combination.
962 ResourceVarInfo *rv = nullptr;
963 if (always_distinct_sets) {
964 // Make a new resource var any time we see a different
965 // (set,binding) pair.
966 SetAndBinding key{set, binding};
967 auto where = set_and_binding_map.find(key);
968 if (where == set_and_binding_map.end()) {
969 rv = new ResourceVarInfo(int(ResourceVarInfoList.size()), set,
alan-bakere9308012019-03-15 10:25:13 -0400970 binding, &F, arg_kind, coherent);
David Neto862b7d82018-06-14 18:48:37 -0400971 ResourceVarInfoList.emplace_back(rv);
972 set_and_binding_map[key] = rv;
973 } else {
974 rv = where->second;
975 }
976 } else {
977 // The default is to make exactly one resource for each
978 // clspv.resource.var.* function.
979 if (first_use) {
980 first_use = false;
981 rv = new ResourceVarInfo(int(ResourceVarInfoList.size()), set,
alan-bakere9308012019-03-15 10:25:13 -0400982 binding, &F, arg_kind, coherent);
David Neto862b7d82018-06-14 18:48:37 -0400983 ResourceVarInfoList.emplace_back(rv);
984 } else {
985 rv = ResourceVarInfoList.back().get();
986 }
987 }
988
989 // Now populate FunctionToResourceVarsMap.
990 auto &mapping =
991 FunctionToResourceVarsMap[call->getParent()->getParent()];
992 while (mapping.size() <= arg_index) {
993 mapping.push_back(nullptr);
994 }
995 mapping[arg_index] = rv;
996 }
997 }
998 }
999 }
1000
1001 // Populate ModuleOrderedResourceVars.
1002 for (Function &F : M) {
1003 auto where = FunctionToResourceVarsMap.find(&F);
1004 if (where != FunctionToResourceVarsMap.end()) {
1005 for (auto &rv : where->second) {
1006 if (rv != nullptr) {
1007 ModuleOrderedResourceVars.insert(rv);
1008 }
1009 }
1010 }
1011 }
1012 if (ShowResourceVars) {
1013 for (auto *info : ModuleOrderedResourceVars) {
1014 outs() << "MORV index " << info->index << " (" << info->descriptor_set
1015 << "," << info->binding << ") " << *(info->var_fn->getReturnType())
1016 << "\n";
1017 }
1018 }
1019}
1020
David Neto22f144c2017-06-12 14:26:21 -04001021bool SPIRVProducerPass::FindExtInst(Module &M) {
1022 LLVMContext &Context = M.getContext();
1023 bool HasExtInst = false;
1024
1025 for (Function &F : M) {
1026 for (BasicBlock &BB : F) {
1027 for (Instruction &I : BB) {
1028 if (CallInst *Call = dyn_cast<CallInst>(&I)) {
1029 Function *Callee = Call->getCalledFunction();
1030 // Check whether this call is for extend instructions.
David Neto3fbb4072017-10-16 11:28:14 -04001031 auto callee_name = Callee->getName();
1032 const glsl::ExtInst EInst = getExtInstEnum(callee_name);
1033 const glsl::ExtInst IndirectEInst =
1034 getIndirectExtInstEnum(callee_name);
David Neto22f144c2017-06-12 14:26:21 -04001035
David Neto3fbb4072017-10-16 11:28:14 -04001036 HasExtInst |=
1037 (EInst != kGlslExtInstBad) || (IndirectEInst != kGlslExtInstBad);
1038
1039 if (IndirectEInst) {
1040 // Register extra constants if needed.
1041
1042 // Registers a type and constant for computing the result of the
1043 // given instruction. If the result of the instruction is a vector,
1044 // then make a splat vector constant with the same number of
1045 // elements.
1046 auto register_constant = [this, &I](Constant *constant) {
1047 FindType(constant->getType());
1048 FindConstant(constant);
1049 if (auto *vectorTy = dyn_cast<VectorType>(I.getType())) {
1050 // Register the splat vector of the value with the same
1051 // width as the result of the instruction.
1052 auto *vec_constant = ConstantVector::getSplat(
1053 static_cast<unsigned>(vectorTy->getNumElements()),
1054 constant);
1055 FindConstant(vec_constant);
1056 FindType(vec_constant->getType());
1057 }
1058 };
1059 switch (IndirectEInst) {
1060 case glsl::ExtInstFindUMsb:
1061 // clz needs OpExtInst and OpISub with constant 31, or splat
1062 // vector of 31. Add it to the constant list here.
1063 register_constant(
1064 ConstantInt::get(Type::getInt32Ty(Context), 31));
1065 break;
1066 case glsl::ExtInstAcos:
1067 case glsl::ExtInstAsin:
Kévin Petiteb9f90a2018-09-29 12:29:34 +01001068 case glsl::ExtInstAtan:
David Neto3fbb4072017-10-16 11:28:14 -04001069 case glsl::ExtInstAtan2:
1070 // We need 1/pi for acospi, asinpi, atan2pi.
1071 register_constant(
1072 ConstantFP::get(Type::getFloatTy(Context), kOneOverPi));
1073 break;
1074 default:
1075 assert(false && "internally inconsistent");
1076 }
David Neto22f144c2017-06-12 14:26:21 -04001077 }
1078 }
1079 }
1080 }
1081 }
1082
1083 return HasExtInst;
1084}
1085
1086void SPIRVProducerPass::FindTypePerGlobalVar(GlobalVariable &GV) {
1087 // Investigate global variable's type.
1088 FindType(GV.getType());
1089}
1090
1091void SPIRVProducerPass::FindTypePerFunc(Function &F) {
1092 // Investigate function's type.
1093 FunctionType *FTy = F.getFunctionType();
1094
1095 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
1096 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
David Neto9ed8e2f2018-03-24 06:47:24 -07001097 // Handle a regular function with global constant parameters.
David Neto22f144c2017-06-12 14:26:21 -04001098 if (GlobalConstFuncTyMap.count(FTy)) {
1099 uint32_t GVCstArgIdx = GlobalConstFuncTypeMap[FTy].second;
1100 SmallVector<Type *, 4> NewFuncParamTys;
1101 for (unsigned i = 0; i < FTy->getNumParams(); i++) {
1102 Type *ParamTy = FTy->getParamType(i);
1103 if (i == GVCstArgIdx) {
1104 Type *EleTy = ParamTy->getPointerElementType();
1105 ParamTy = PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
1106 }
1107
1108 NewFuncParamTys.push_back(ParamTy);
1109 }
1110
1111 FunctionType *NewFTy =
1112 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
1113 GlobalConstFuncTyMap[FTy] = std::make_pair(NewFTy, GVCstArgIdx);
1114 FTy = NewFTy;
1115 }
1116
1117 FindType(FTy);
1118 } else {
1119 // As kernel functions do not have parameters, create new function type and
1120 // add it to type map.
1121 SmallVector<Type *, 4> NewFuncParamTys;
1122 FunctionType *NewFTy =
1123 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
1124 FindType(NewFTy);
1125 }
1126
1127 // Investigate instructions' type in function body.
1128 for (BasicBlock &BB : F) {
1129 for (Instruction &I : BB) {
1130 if (isa<ShuffleVectorInst>(I)) {
1131 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1132 // Ignore type for mask of shuffle vector instruction.
1133 if (i == 2) {
1134 continue;
1135 }
1136
1137 Value *Op = I.getOperand(i);
1138 if (!isa<MetadataAsValue>(Op)) {
1139 FindType(Op->getType());
1140 }
1141 }
1142
1143 FindType(I.getType());
1144 continue;
1145 }
1146
David Neto862b7d82018-06-14 18:48:37 -04001147 CallInst *Call = dyn_cast<CallInst>(&I);
1148
1149 if (Call && Call->getCalledFunction()->getName().startswith(
Alan Baker202c8c72018-08-13 13:47:44 -04001150 clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04001151 // This is a fake call representing access to a resource variable.
1152 // We handle that elsewhere.
1153 continue;
1154 }
1155
Alan Baker202c8c72018-08-13 13:47:44 -04001156 if (Call && Call->getCalledFunction()->getName().startswith(
1157 clspv::WorkgroupAccessorFunction())) {
1158 // This is a fake call representing access to a workgroup variable.
1159 // We handle that elsewhere.
1160 continue;
1161 }
1162
David Neto22f144c2017-06-12 14:26:21 -04001163 // Work through the operands of the instruction.
1164 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1165 Value *const Op = I.getOperand(i);
1166 // If any of the operands is a constant, find the type!
1167 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1168 FindType(Op->getType());
1169 }
1170 }
1171
1172 for (Use &Op : I.operands()) {
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001173 if (isa<CallInst>(&I)) {
David Neto22f144c2017-06-12 14:26:21 -04001174 // Avoid to check call instruction's type.
1175 break;
1176 }
Alan Baker202c8c72018-08-13 13:47:44 -04001177 if (CallInst *OpCall = dyn_cast<CallInst>(Op)) {
1178 if (OpCall && OpCall->getCalledFunction()->getName().startswith(
1179 clspv::WorkgroupAccessorFunction())) {
1180 // This is a fake call representing access to a workgroup variable.
1181 // We handle that elsewhere.
1182 continue;
1183 }
1184 }
David Neto22f144c2017-06-12 14:26:21 -04001185 if (!isa<MetadataAsValue>(&Op)) {
1186 FindType(Op->getType());
1187 continue;
1188 }
1189 }
1190
David Neto22f144c2017-06-12 14:26:21 -04001191 // We don't want to track the type of this call as we are going to replace
1192 // it.
Kévin Petitdf71de32019-04-09 14:09:50 +01001193 if (Call && (clspv::LiteralSamplerFunction() ==
David Neto22f144c2017-06-12 14:26:21 -04001194 Call->getCalledFunction()->getName())) {
1195 continue;
1196 }
1197
1198 if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(&I)) {
1199 // If gep's base operand has ModuleScopePrivate address space, make gep
1200 // return ModuleScopePrivate address space.
1201 if (GEP->getPointerAddressSpace() == AddressSpace::ModuleScopePrivate) {
1202 // Add pointer type with private address space for global constant to
1203 // type list.
1204 Type *EleTy = I.getType()->getPointerElementType();
1205 Type *NewPTy =
1206 PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
1207
1208 FindType(NewPTy);
1209 continue;
1210 }
1211 }
1212
1213 FindType(I.getType());
1214 }
1215 }
1216}
1217
David Neto862b7d82018-06-14 18:48:37 -04001218void SPIRVProducerPass::FindTypesForSamplerMap(Module &M) {
1219 // If we are using a sampler map, find the type of the sampler.
Kévin Petitdf71de32019-04-09 14:09:50 +01001220 if (M.getFunction(clspv::LiteralSamplerFunction()) ||
David Neto862b7d82018-06-14 18:48:37 -04001221 0 < getSamplerMap().size()) {
1222 auto SamplerStructTy = M.getTypeByName("opencl.sampler_t");
1223 if (!SamplerStructTy) {
1224 SamplerStructTy = StructType::create(M.getContext(), "opencl.sampler_t");
1225 }
1226
1227 SamplerTy = SamplerStructTy->getPointerTo(AddressSpace::UniformConstant);
1228
1229 FindType(SamplerTy);
1230 }
1231}
1232
1233void SPIRVProducerPass::FindTypesForResourceVars(Module &M) {
1234 // Record types so they are generated.
1235 TypesNeedingLayout.reset();
1236 StructTypesNeedingBlock.reset();
1237
1238 // To match older clspv codegen, generate the float type first if required
1239 // for images.
1240 for (const auto *info : ModuleOrderedResourceVars) {
1241 if (info->arg_kind == clspv::ArgKind::ReadOnlyImage ||
1242 info->arg_kind == clspv::ArgKind::WriteOnlyImage) {
alan-bakerf67468c2019-11-25 15:51:49 -05001243 if (IsIntImageType(info->var_fn->getReturnType())) {
1244 // Nothing for now...
1245 } else if (IsUintImageType(info->var_fn->getReturnType())) {
1246 FindType(Type::getInt32Ty(M.getContext()));
1247 }
1248
1249 // We need "float" either for the sampled type or for the Lod operand.
David Neto862b7d82018-06-14 18:48:37 -04001250 FindType(Type::getFloatTy(M.getContext()));
David Neto862b7d82018-06-14 18:48:37 -04001251 }
1252 }
1253
1254 for (const auto *info : ModuleOrderedResourceVars) {
1255 Type *type = info->var_fn->getReturnType();
1256
1257 switch (info->arg_kind) {
1258 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04001259 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04001260 if (auto *sty = dyn_cast<StructType>(type->getPointerElementType())) {
1261 StructTypesNeedingBlock.insert(sty);
1262 } else {
1263 errs() << *type << "\n";
1264 llvm_unreachable("Buffer arguments must map to structures!");
1265 }
1266 break;
1267 case clspv::ArgKind::Pod:
1268 if (auto *sty = dyn_cast<StructType>(type->getPointerElementType())) {
1269 StructTypesNeedingBlock.insert(sty);
1270 } else {
1271 errs() << *type << "\n";
1272 llvm_unreachable("POD arguments must map to structures!");
1273 }
1274 break;
1275 case clspv::ArgKind::ReadOnlyImage:
1276 case clspv::ArgKind::WriteOnlyImage:
1277 case clspv::ArgKind::Sampler:
1278 // Sampler and image types map to the pointee type but
1279 // in the uniform constant address space.
1280 type = PointerType::get(type->getPointerElementType(),
1281 clspv::AddressSpace::UniformConstant);
1282 break;
1283 default:
1284 break;
1285 }
1286
1287 // The converted type is the type of the OpVariable we will generate.
1288 // If the pointee type is an array of size zero, FindType will convert it
1289 // to a runtime array.
1290 FindType(type);
1291 }
1292
alan-bakerdcd97412019-09-16 15:32:30 -04001293 // If module constants are clustered in a storage buffer then that struct
1294 // needs layout decorations.
1295 if (clspv::Option::ModuleConstantsInStorageBuffer()) {
1296 for (GlobalVariable &GV : M.globals()) {
1297 PointerType *PTy = cast<PointerType>(GV.getType());
1298 const auto AS = PTy->getAddressSpace();
1299 const bool module_scope_constant_external_init =
1300 (AS == AddressSpace::Constant) && GV.hasInitializer();
1301 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
1302 if (module_scope_constant_external_init &&
1303 spv::BuiltInMax == BuiltinType) {
1304 StructTypesNeedingBlock.insert(
1305 cast<StructType>(PTy->getPointerElementType()));
1306 }
1307 }
1308 }
1309
David Neto862b7d82018-06-14 18:48:37 -04001310 // Traverse the arrays and structures underneath each Block, and
1311 // mark them as needing layout.
1312 std::vector<Type *> work_list(StructTypesNeedingBlock.begin(),
1313 StructTypesNeedingBlock.end());
1314 while (!work_list.empty()) {
1315 Type *type = work_list.back();
1316 work_list.pop_back();
1317 TypesNeedingLayout.insert(type);
1318 switch (type->getTypeID()) {
1319 case Type::ArrayTyID:
1320 work_list.push_back(type->getArrayElementType());
1321 if (!Hack_generate_runtime_array_stride_early) {
1322 // Remember this array type for deferred decoration.
1323 TypesNeedingArrayStride.insert(type);
1324 }
1325 break;
1326 case Type::StructTyID:
1327 for (auto *elem_ty : cast<StructType>(type)->elements()) {
1328 work_list.push_back(elem_ty);
1329 }
1330 default:
1331 // This type and its contained types don't get layout.
1332 break;
1333 }
1334 }
1335}
1336
Alan Baker202c8c72018-08-13 13:47:44 -04001337void SPIRVProducerPass::FindWorkgroupVars(Module &M) {
1338 // The SpecId assignment for pointer-to-local arguments is recorded in
1339 // module-level metadata. Translate that information into local argument
1340 // information.
1341 NamedMDNode *nmd = M.getNamedMetadata(clspv::LocalSpecIdMetadataName());
alan-bakerb6b09dc2018-11-08 16:59:28 -05001342 if (!nmd)
1343 return;
Alan Baker202c8c72018-08-13 13:47:44 -04001344 for (auto operand : nmd->operands()) {
1345 MDTuple *tuple = cast<MDTuple>(operand);
1346 ValueAsMetadata *fn_md = cast<ValueAsMetadata>(tuple->getOperand(0));
1347 Function *func = cast<Function>(fn_md->getValue());
alan-bakerb6b09dc2018-11-08 16:59:28 -05001348 ConstantAsMetadata *arg_index_md =
1349 cast<ConstantAsMetadata>(tuple->getOperand(1));
1350 int arg_index = static_cast<int>(
1351 cast<ConstantInt>(arg_index_md->getValue())->getSExtValue());
1352 Argument *arg = &*(func->arg_begin() + arg_index);
Alan Baker202c8c72018-08-13 13:47:44 -04001353
1354 ConstantAsMetadata *spec_id_md =
1355 cast<ConstantAsMetadata>(tuple->getOperand(2));
alan-bakerb6b09dc2018-11-08 16:59:28 -05001356 int spec_id = static_cast<int>(
1357 cast<ConstantInt>(spec_id_md->getValue())->getSExtValue());
Alan Baker202c8c72018-08-13 13:47:44 -04001358
1359 max_local_spec_id_ = std::max(max_local_spec_id_, spec_id + 1);
1360 LocalArgSpecIds[arg] = spec_id;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001361 if (LocalSpecIdInfoMap.count(spec_id))
1362 continue;
Alan Baker202c8c72018-08-13 13:47:44 -04001363
1364 // We haven't seen this SpecId yet, so generate the LocalArgInfo for it.
1365 LocalArgInfo info{nextID, arg->getType()->getPointerElementType(),
1366 nextID + 1, nextID + 2,
1367 nextID + 3, spec_id};
1368 LocalSpecIdInfoMap[spec_id] = info;
1369 nextID += 4;
1370
1371 // Ensure the types necessary for this argument get generated.
1372 Type *IdxTy = Type::getInt32Ty(M.getContext());
1373 FindConstant(ConstantInt::get(IdxTy, 0));
1374 FindType(IdxTy);
1375 FindType(arg->getType());
1376 }
1377}
1378
David Neto22f144c2017-06-12 14:26:21 -04001379void SPIRVProducerPass::FindType(Type *Ty) {
1380 TypeList &TyList = getTypeList();
1381
1382 if (0 != TyList.idFor(Ty)) {
1383 return;
1384 }
1385
1386 if (Ty->isPointerTy()) {
1387 auto AddrSpace = Ty->getPointerAddressSpace();
1388 if ((AddressSpace::Constant == AddrSpace) ||
1389 (AddressSpace::Global == AddrSpace)) {
1390 auto PointeeTy = Ty->getPointerElementType();
1391
1392 if (PointeeTy->isStructTy() &&
1393 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
1394 FindType(PointeeTy);
1395 auto ActualPointerTy =
1396 PointeeTy->getPointerTo(AddressSpace::UniformConstant);
1397 FindType(ActualPointerTy);
1398 return;
1399 }
1400 }
1401 }
1402
David Neto862b7d82018-06-14 18:48:37 -04001403 // By convention, LLVM array type with 0 elements will map to
1404 // OpTypeRuntimeArray. Otherwise, it will map to OpTypeArray, which
1405 // has a constant number of elements. We need to support type of the
1406 // constant.
1407 if (auto *arrayTy = dyn_cast<ArrayType>(Ty)) {
1408 if (arrayTy->getNumElements() > 0) {
1409 LLVMContext &Context = Ty->getContext();
1410 FindType(Type::getInt32Ty(Context));
1411 }
David Neto22f144c2017-06-12 14:26:21 -04001412 }
1413
1414 for (Type *SubTy : Ty->subtypes()) {
1415 FindType(SubTy);
1416 }
1417
1418 TyList.insert(Ty);
1419}
1420
1421void SPIRVProducerPass::FindConstantPerGlobalVar(GlobalVariable &GV) {
1422 // If the global variable has a (non undef) initializer.
1423 if (GV.hasInitializer() && !isa<UndefValue>(GV.getInitializer())) {
David Neto862b7d82018-06-14 18:48:37 -04001424 // Generate the constant if it's not the initializer to a module scope
1425 // constant that we will expect in a storage buffer.
1426 const bool module_scope_constant_external_init =
1427 (GV.getType()->getPointerAddressSpace() == AddressSpace::Constant) &&
1428 clspv::Option::ModuleConstantsInStorageBuffer();
1429 if (!module_scope_constant_external_init) {
1430 FindConstant(GV.getInitializer());
1431 }
David Neto22f144c2017-06-12 14:26:21 -04001432 }
1433}
1434
1435void SPIRVProducerPass::FindConstantPerFunc(Function &F) {
1436 // Investigate constants in function body.
1437 for (BasicBlock &BB : F) {
1438 for (Instruction &I : BB) {
David Neto862b7d82018-06-14 18:48:37 -04001439 if (auto *call = dyn_cast<CallInst>(&I)) {
1440 auto name = call->getCalledFunction()->getName();
Kévin Petitdf71de32019-04-09 14:09:50 +01001441 if (name == clspv::LiteralSamplerFunction()) {
David Neto862b7d82018-06-14 18:48:37 -04001442 // We've handled these constants elsewhere, so skip it.
1443 continue;
1444 }
Alan Baker202c8c72018-08-13 13:47:44 -04001445 if (name.startswith(clspv::ResourceAccessorFunction())) {
1446 continue;
1447 }
1448 if (name.startswith(clspv::WorkgroupAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04001449 continue;
1450 }
Kévin Petit617a76d2019-04-04 13:54:16 +01001451 if (name.startswith(clspv::SPIRVOpIntrinsicFunction())) {
1452 // Skip the first operand that has the SPIR-V Opcode
1453 for (unsigned i = 1; i < I.getNumOperands(); i++) {
1454 if (isa<Constant>(I.getOperand(i)) &&
1455 !isa<GlobalValue>(I.getOperand(i))) {
1456 FindConstant(I.getOperand(i));
1457 }
1458 }
1459 continue;
1460 }
David Neto22f144c2017-06-12 14:26:21 -04001461 }
1462
1463 if (isa<AllocaInst>(I)) {
1464 // Alloca instruction has constant for the number of element. Ignore it.
1465 continue;
1466 } else if (isa<ShuffleVectorInst>(I)) {
1467 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1468 // Ignore constant for mask of shuffle vector instruction.
1469 if (i == 2) {
1470 continue;
1471 }
1472
1473 if (isa<Constant>(I.getOperand(i)) &&
1474 !isa<GlobalValue>(I.getOperand(i))) {
1475 FindConstant(I.getOperand(i));
1476 }
1477 }
1478
1479 continue;
1480 } else if (isa<InsertElementInst>(I)) {
1481 // Handle InsertElement with <4 x i8> specially.
1482 Type *CompositeTy = I.getOperand(0)->getType();
1483 if (is4xi8vec(CompositeTy)) {
1484 LLVMContext &Context = CompositeTy->getContext();
1485 if (isa<Constant>(I.getOperand(0))) {
1486 FindConstant(I.getOperand(0));
1487 }
1488
1489 if (isa<Constant>(I.getOperand(1))) {
1490 FindConstant(I.getOperand(1));
1491 }
1492
1493 // Add mask constant 0xFF.
1494 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
1495 FindConstant(CstFF);
1496
1497 // Add shift amount constant.
1498 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
1499 uint64_t Idx = CI->getZExtValue();
1500 Constant *CstShiftAmount =
1501 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
1502 FindConstant(CstShiftAmount);
1503 }
1504
1505 continue;
1506 }
1507
1508 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1509 // Ignore constant for index of InsertElement instruction.
1510 if (i == 2) {
1511 continue;
1512 }
1513
1514 if (isa<Constant>(I.getOperand(i)) &&
1515 !isa<GlobalValue>(I.getOperand(i))) {
1516 FindConstant(I.getOperand(i));
1517 }
1518 }
1519
1520 continue;
1521 } else if (isa<ExtractElementInst>(I)) {
1522 // Handle ExtractElement with <4 x i8> specially.
1523 Type *CompositeTy = I.getOperand(0)->getType();
1524 if (is4xi8vec(CompositeTy)) {
1525 LLVMContext &Context = CompositeTy->getContext();
1526 if (isa<Constant>(I.getOperand(0))) {
1527 FindConstant(I.getOperand(0));
1528 }
1529
1530 // Add mask constant 0xFF.
1531 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
1532 FindConstant(CstFF);
1533
1534 // Add shift amount constant.
1535 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
1536 uint64_t Idx = CI->getZExtValue();
1537 Constant *CstShiftAmount =
1538 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
1539 FindConstant(CstShiftAmount);
1540 } else {
1541 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
1542 FindConstant(Cst8);
1543 }
1544
1545 continue;
1546 }
1547
1548 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1549 // Ignore constant for index of ExtractElement instruction.
1550 if (i == 1) {
1551 continue;
1552 }
1553
1554 if (isa<Constant>(I.getOperand(i)) &&
1555 !isa<GlobalValue>(I.getOperand(i))) {
1556 FindConstant(I.getOperand(i));
1557 }
1558 }
1559
1560 continue;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001561 } else if ((Instruction::Xor == I.getOpcode()) &&
1562 I.getType()->isIntegerTy(1)) {
1563 // We special case for Xor where the type is i1 and one of the arguments
1564 // is a constant 1 (true), this is an OpLogicalNot in SPIR-V, and we
1565 // don't need the constant
David Neto22f144c2017-06-12 14:26:21 -04001566 bool foundConstantTrue = false;
1567 for (Use &Op : I.operands()) {
1568 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1569 auto CI = cast<ConstantInt>(Op);
1570
1571 if (CI->isZero() || foundConstantTrue) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05001572 // If we already found the true constant, we might (probably only
1573 // on -O0) have an OpLogicalNot which is taking a constant
1574 // argument, so discover it anyway.
David Neto22f144c2017-06-12 14:26:21 -04001575 FindConstant(Op);
1576 } else {
1577 foundConstantTrue = true;
1578 }
1579 }
1580 }
1581
1582 continue;
David Netod2de94a2017-08-28 17:27:47 -04001583 } else if (isa<TruncInst>(I)) {
alan-bakerb39c8262019-03-08 14:03:37 -05001584 // Special case if i8 is not generally handled.
1585 if (!clspv::Option::Int8Support()) {
1586 // For truncation to i8 we mask against 255.
1587 Type *ToTy = I.getType();
1588 if (8u == ToTy->getPrimitiveSizeInBits()) {
1589 LLVMContext &Context = ToTy->getContext();
1590 Constant *Cst255 =
1591 ConstantInt::get(Type::getInt32Ty(Context), 0xff);
1592 FindConstant(Cst255);
1593 }
David Netod2de94a2017-08-28 17:27:47 -04001594 }
Neil Henning39672102017-09-29 14:33:13 +01001595 } else if (isa<AtomicRMWInst>(I)) {
1596 LLVMContext &Context = I.getContext();
1597
1598 FindConstant(
1599 ConstantInt::get(Type::getInt32Ty(Context), spv::ScopeDevice));
1600 FindConstant(ConstantInt::get(
1601 Type::getInt32Ty(Context),
1602 spv::MemorySemanticsUniformMemoryMask |
1603 spv::MemorySemanticsSequentiallyConsistentMask));
David Neto22f144c2017-06-12 14:26:21 -04001604 }
1605
1606 for (Use &Op : I.operands()) {
1607 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1608 FindConstant(Op);
1609 }
1610 }
1611 }
1612 }
1613}
1614
1615void SPIRVProducerPass::FindConstant(Value *V) {
David Neto22f144c2017-06-12 14:26:21 -04001616 ValueList &CstList = getConstantList();
1617
David Netofb9a7972017-08-25 17:08:24 -04001618 // If V is already tracked, ignore it.
1619 if (0 != CstList.idFor(V)) {
David Neto22f144c2017-06-12 14:26:21 -04001620 return;
1621 }
1622
David Neto862b7d82018-06-14 18:48:37 -04001623 if (isa<GlobalValue>(V) && clspv::Option::ModuleConstantsInStorageBuffer()) {
1624 return;
1625 }
1626
David Neto22f144c2017-06-12 14:26:21 -04001627 Constant *Cst = cast<Constant>(V);
David Neto862b7d82018-06-14 18:48:37 -04001628 Type *CstTy = Cst->getType();
David Neto22f144c2017-06-12 14:26:21 -04001629
1630 // Handle constant with <4 x i8> type specially.
David Neto22f144c2017-06-12 14:26:21 -04001631 if (is4xi8vec(CstTy)) {
1632 if (!isa<GlobalValue>(V)) {
David Netofb9a7972017-08-25 17:08:24 -04001633 CstList.insert(V);
David Neto22f144c2017-06-12 14:26:21 -04001634 }
1635 }
1636
1637 if (Cst->getNumOperands()) {
1638 for (User::const_op_iterator I = Cst->op_begin(), E = Cst->op_end(); I != E;
1639 ++I) {
1640 FindConstant(*I);
1641 }
1642
David Netofb9a7972017-08-25 17:08:24 -04001643 CstList.insert(Cst);
David Neto22f144c2017-06-12 14:26:21 -04001644 return;
1645 } else if (const ConstantDataSequential *CDS =
1646 dyn_cast<ConstantDataSequential>(Cst)) {
1647 // Add constants for each element to constant list.
1648 for (unsigned i = 0; i < CDS->getNumElements(); i++) {
1649 Constant *EleCst = CDS->getElementAsConstant(i);
1650 FindConstant(EleCst);
1651 }
1652 }
1653
1654 if (!isa<GlobalValue>(V)) {
David Netofb9a7972017-08-25 17:08:24 -04001655 CstList.insert(V);
David Neto22f144c2017-06-12 14:26:21 -04001656 }
1657}
1658
1659spv::StorageClass SPIRVProducerPass::GetStorageClass(unsigned AddrSpace) const {
1660 switch (AddrSpace) {
1661 default:
1662 llvm_unreachable("Unsupported OpenCL address space");
1663 case AddressSpace::Private:
1664 return spv::StorageClassFunction;
1665 case AddressSpace::Global:
David Neto22f144c2017-06-12 14:26:21 -04001666 return spv::StorageClassStorageBuffer;
Alan Bakerfcda9482018-10-02 17:09:59 -04001667 case AddressSpace::Constant:
1668 return clspv::Option::ConstantArgsInUniformBuffer()
1669 ? spv::StorageClassUniform
1670 : spv::StorageClassStorageBuffer;
David Neto22f144c2017-06-12 14:26:21 -04001671 case AddressSpace::Input:
1672 return spv::StorageClassInput;
1673 case AddressSpace::Local:
1674 return spv::StorageClassWorkgroup;
1675 case AddressSpace::UniformConstant:
1676 return spv::StorageClassUniformConstant;
David Neto9ed8e2f2018-03-24 06:47:24 -07001677 case AddressSpace::Uniform:
David Netoe439d702018-03-23 13:14:08 -07001678 return spv::StorageClassUniform;
David Neto22f144c2017-06-12 14:26:21 -04001679 case AddressSpace::ModuleScopePrivate:
1680 return spv::StorageClassPrivate;
1681 }
1682}
1683
David Neto862b7d82018-06-14 18:48:37 -04001684spv::StorageClass
1685SPIRVProducerPass::GetStorageClassForArgKind(clspv::ArgKind arg_kind) const {
1686 switch (arg_kind) {
1687 case clspv::ArgKind::Buffer:
1688 return spv::StorageClassStorageBuffer;
Alan Bakerfcda9482018-10-02 17:09:59 -04001689 case clspv::ArgKind::BufferUBO:
1690 return spv::StorageClassUniform;
David Neto862b7d82018-06-14 18:48:37 -04001691 case clspv::ArgKind::Pod:
1692 return clspv::Option::PodArgsInUniformBuffer()
1693 ? spv::StorageClassUniform
1694 : spv::StorageClassStorageBuffer;
1695 case clspv::ArgKind::Local:
1696 return spv::StorageClassWorkgroup;
1697 case clspv::ArgKind::ReadOnlyImage:
1698 case clspv::ArgKind::WriteOnlyImage:
1699 case clspv::ArgKind::Sampler:
1700 return spv::StorageClassUniformConstant;
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001701 default:
1702 llvm_unreachable("Unsupported storage class for argument kind");
David Neto862b7d82018-06-14 18:48:37 -04001703 }
1704}
1705
David Neto22f144c2017-06-12 14:26:21 -04001706spv::BuiltIn SPIRVProducerPass::GetBuiltin(StringRef Name) const {
1707 return StringSwitch<spv::BuiltIn>(Name)
1708 .Case("__spirv_GlobalInvocationId", spv::BuiltInGlobalInvocationId)
1709 .Case("__spirv_LocalInvocationId", spv::BuiltInLocalInvocationId)
1710 .Case("__spirv_WorkgroupSize", spv::BuiltInWorkgroupSize)
1711 .Case("__spirv_NumWorkgroups", spv::BuiltInNumWorkgroups)
1712 .Case("__spirv_WorkgroupId", spv::BuiltInWorkgroupId)
1713 .Default(spv::BuiltInMax);
1714}
1715
1716void SPIRVProducerPass::GenerateExtInstImport() {
1717 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
1718 uint32_t &ExtInstImportID = getOpExtInstImportID();
1719
1720 //
1721 // Generate OpExtInstImport.
1722 //
1723 // Ops[0] ... Ops[n] = Name (Literal String)
David Neto22f144c2017-06-12 14:26:21 -04001724 ExtInstImportID = nextID;
David Neto87846742018-04-11 17:36:22 -04001725 SPIRVInstList.push_back(new SPIRVInstruction(spv::OpExtInstImport, nextID++,
1726 MkString("GLSL.std.450")));
David Neto22f144c2017-06-12 14:26:21 -04001727}
1728
alan-bakerb6b09dc2018-11-08 16:59:28 -05001729void SPIRVProducerPass::GenerateSPIRVTypes(LLVMContext &Context,
1730 Module &module) {
David Neto22f144c2017-06-12 14:26:21 -04001731 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
1732 ValueMapType &VMap = getValueMap();
1733 ValueMapType &AllocatedVMap = getAllocatedValueMap();
Alan Bakerfcda9482018-10-02 17:09:59 -04001734 const auto &DL = module.getDataLayout();
David Neto22f144c2017-06-12 14:26:21 -04001735
1736 // Map for OpTypeRuntimeArray. If argument has pointer type, 2 spirv type
1737 // instructions are generated. They are OpTypePointer and OpTypeRuntimeArray.
1738 DenseMap<Type *, uint32_t> OpRuntimeTyMap;
1739
1740 for (Type *Ty : getTypeList()) {
1741 // Update TypeMap with nextID for reference later.
1742 TypeMap[Ty] = nextID;
1743
1744 switch (Ty->getTypeID()) {
1745 default: {
1746 Ty->print(errs());
1747 llvm_unreachable("Unsupported type???");
1748 break;
1749 }
1750 case Type::MetadataTyID:
1751 case Type::LabelTyID: {
1752 // Ignore these types.
1753 break;
1754 }
1755 case Type::PointerTyID: {
1756 PointerType *PTy = cast<PointerType>(Ty);
1757 unsigned AddrSpace = PTy->getAddressSpace();
1758
1759 // For the purposes of our Vulkan SPIR-V type system, constant and global
1760 // are conflated.
1761 bool UseExistingOpTypePointer = false;
1762 if (AddressSpace::Constant == AddrSpace) {
Alan Bakerfcda9482018-10-02 17:09:59 -04001763 if (!clspv::Option::ConstantArgsInUniformBuffer()) {
1764 AddrSpace = AddressSpace::Global;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001765 // Check to see if we already created this type (for instance, if we
1766 // had a constant <type>* and a global <type>*, the type would be
1767 // created by one of these types, and shared by both).
Alan Bakerfcda9482018-10-02 17:09:59 -04001768 auto GlobalTy = PTy->getPointerElementType()->getPointerTo(AddrSpace);
1769 if (0 < TypeMap.count(GlobalTy)) {
1770 TypeMap[PTy] = TypeMap[GlobalTy];
1771 UseExistingOpTypePointer = true;
1772 break;
1773 }
David Neto22f144c2017-06-12 14:26:21 -04001774 }
1775 } else if (AddressSpace::Global == AddrSpace) {
Alan Bakerfcda9482018-10-02 17:09:59 -04001776 if (!clspv::Option::ConstantArgsInUniformBuffer()) {
1777 AddrSpace = AddressSpace::Constant;
David Neto22f144c2017-06-12 14:26:21 -04001778
alan-bakerb6b09dc2018-11-08 16:59:28 -05001779 // Check to see if we already created this type (for instance, if we
1780 // had a constant <type>* and a global <type>*, the type would be
1781 // created by one of these types, and shared by both).
1782 auto ConstantTy =
1783 PTy->getPointerElementType()->getPointerTo(AddrSpace);
Alan Bakerfcda9482018-10-02 17:09:59 -04001784 if (0 < TypeMap.count(ConstantTy)) {
1785 TypeMap[PTy] = TypeMap[ConstantTy];
1786 UseExistingOpTypePointer = true;
1787 }
David Neto22f144c2017-06-12 14:26:21 -04001788 }
1789 }
1790
David Neto862b7d82018-06-14 18:48:37 -04001791 const bool HasArgUser = true;
David Neto22f144c2017-06-12 14:26:21 -04001792
David Neto862b7d82018-06-14 18:48:37 -04001793 if (HasArgUser && !UseExistingOpTypePointer) {
David Neto22f144c2017-06-12 14:26:21 -04001794 //
1795 // Generate OpTypePointer.
1796 //
1797
1798 // OpTypePointer
1799 // Ops[0] = Storage Class
1800 // Ops[1] = Element Type ID
1801 SPIRVOperandList Ops;
1802
David Neto257c3892018-04-11 13:19:45 -04001803 Ops << MkNum(GetStorageClass(AddrSpace))
1804 << MkId(lookupType(PTy->getElementType()));
David Neto22f144c2017-06-12 14:26:21 -04001805
David Neto87846742018-04-11 17:36:22 -04001806 auto *Inst = new SPIRVInstruction(spv::OpTypePointer, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04001807 SPIRVInstList.push_back(Inst);
1808 }
David Neto22f144c2017-06-12 14:26:21 -04001809 break;
1810 }
1811 case Type::StructTyID: {
David Neto22f144c2017-06-12 14:26:21 -04001812 StructType *STy = cast<StructType>(Ty);
1813
1814 // Handle sampler type.
1815 if (STy->isOpaque()) {
1816 if (STy->getName().equals("opencl.sampler_t")) {
1817 //
1818 // Generate OpTypeSampler
1819 //
1820 // Empty Ops.
1821 SPIRVOperandList Ops;
1822
David Neto87846742018-04-11 17:36:22 -04001823 auto *Inst = new SPIRVInstruction(spv::OpTypeSampler, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04001824 SPIRVInstList.push_back(Inst);
1825 break;
alan-bakerf67468c2019-11-25 15:51:49 -05001826 } else if (STy->getName().startswith("opencl.image2d_ro_t") ||
1827 STy->getName().startswith("opencl.image2d_wo_t") ||
1828 STy->getName().startswith("opencl.image3d_ro_t") ||
1829 STy->getName().startswith("opencl.image3d_wo_t")) {
David Neto22f144c2017-06-12 14:26:21 -04001830 //
1831 // Generate OpTypeImage
1832 //
1833 // Ops[0] = Sampled Type ID
1834 // Ops[1] = Dim ID
1835 // Ops[2] = Depth (Literal Number)
1836 // Ops[3] = Arrayed (Literal Number)
1837 // Ops[4] = MS (Literal Number)
1838 // Ops[5] = Sampled (Literal Number)
1839 // Ops[6] = Image Format ID
1840 //
1841 SPIRVOperandList Ops;
1842
alan-bakerf67468c2019-11-25 15:51:49 -05001843 uint32_t ImageTyID = nextID++;
1844 uint32_t SampledTyID = 0;
1845 if (STy->getName().contains(".float")) {
1846 SampledTyID = lookupType(Type::getFloatTy(Context));
1847 } else if (STy->getName().contains(".uint")) {
1848 SampledTyID = lookupType(Type::getInt32Ty(Context));
1849 } else if (STy->getName().contains(".int")) {
1850 // Generate a signed 32-bit integer if necessary.
1851 if (int32ID == 0) {
1852 int32ID = nextID++;
1853 SPIRVOperandList intOps;
1854 intOps << MkNum(32);
1855 intOps << MkNum(1);
1856 auto signed_int =
1857 new SPIRVInstruction(spv::OpTypeInt, int32ID, intOps);
1858 SPIRVInstList.push_back(signed_int);
1859 }
1860 SampledTyID = int32ID;
1861
1862 // Generate a vec4 of the signed int if necessary.
1863 if (v4int32ID == 0) {
1864 v4int32ID = nextID++;
1865 SPIRVOperandList vecOps;
1866 vecOps << MkId(int32ID);
1867 vecOps << MkNum(4);
1868 auto int_vec =
1869 new SPIRVInstruction(spv::OpTypeVector, v4int32ID, vecOps);
1870 SPIRVInstList.push_back(int_vec);
1871 }
1872 } else {
1873 // This was likely an UndefValue.
1874 SampledTyID = lookupType(Type::getFloatTy(Context));
1875 }
David Neto257c3892018-04-11 13:19:45 -04001876 Ops << MkId(SampledTyID);
David Neto22f144c2017-06-12 14:26:21 -04001877
1878 spv::Dim DimID = spv::Dim2D;
alan-bakerf67468c2019-11-25 15:51:49 -05001879 if (STy->getName().startswith("opencl.image3d_ro_t") ||
1880 STy->getName().startswith("opencl.image3d_wo_t")) {
David Neto22f144c2017-06-12 14:26:21 -04001881 DimID = spv::Dim3D;
1882 }
David Neto257c3892018-04-11 13:19:45 -04001883 Ops << MkNum(DimID);
David Neto22f144c2017-06-12 14:26:21 -04001884
1885 // TODO: Set up Depth.
David Neto257c3892018-04-11 13:19:45 -04001886 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04001887
1888 // TODO: Set up Arrayed.
David Neto257c3892018-04-11 13:19:45 -04001889 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04001890
1891 // TODO: Set up MS.
David Neto257c3892018-04-11 13:19:45 -04001892 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04001893
1894 // TODO: Set up Sampled.
1895 //
1896 // From Spec
1897 //
1898 // 0 indicates this is only known at run time, not at compile time
1899 // 1 indicates will be used with sampler
1900 // 2 indicates will be used without a sampler (a storage image)
1901 uint32_t Sampled = 1;
alan-bakerf67468c2019-11-25 15:51:49 -05001902 if (!STy->getName().contains(".sampled")) {
David Neto22f144c2017-06-12 14:26:21 -04001903 Sampled = 2;
1904 }
David Neto257c3892018-04-11 13:19:45 -04001905 Ops << MkNum(Sampled);
David Neto22f144c2017-06-12 14:26:21 -04001906
1907 // TODO: Set up Image Format.
David Neto257c3892018-04-11 13:19:45 -04001908 Ops << MkNum(spv::ImageFormatUnknown);
David Neto22f144c2017-06-12 14:26:21 -04001909
alan-bakerf67468c2019-11-25 15:51:49 -05001910 auto *Inst = new SPIRVInstruction(spv::OpTypeImage, ImageTyID, Ops);
David Neto22f144c2017-06-12 14:26:21 -04001911 SPIRVInstList.push_back(Inst);
1912 break;
1913 }
1914 }
1915
1916 //
1917 // Generate OpTypeStruct
1918 //
1919 // Ops[0] ... Ops[n] = Member IDs
1920 SPIRVOperandList Ops;
1921
1922 for (auto *EleTy : STy->elements()) {
David Neto862b7d82018-06-14 18:48:37 -04001923 Ops << MkId(lookupType(EleTy));
David Neto22f144c2017-06-12 14:26:21 -04001924 }
1925
David Neto22f144c2017-06-12 14:26:21 -04001926 uint32_t STyID = nextID;
1927
alan-bakerb6b09dc2018-11-08 16:59:28 -05001928 auto *Inst = new SPIRVInstruction(spv::OpTypeStruct, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04001929 SPIRVInstList.push_back(Inst);
1930
1931 // Generate OpMemberDecorate.
1932 auto DecoInsertPoint =
1933 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
1934 [](SPIRVInstruction *Inst) -> bool {
1935 return Inst->getOpcode() != spv::OpDecorate &&
1936 Inst->getOpcode() != spv::OpMemberDecorate &&
1937 Inst->getOpcode() != spv::OpExtInstImport;
1938 });
1939
David Netoc463b372017-08-10 15:32:21 -04001940 const auto StructLayout = DL.getStructLayout(STy);
Alan Bakerfcda9482018-10-02 17:09:59 -04001941 // Search for the correct offsets if this type was remapped.
1942 std::vector<uint32_t> *offsets = nullptr;
1943 auto iter = RemappedUBOTypeOffsets.find(STy);
1944 if (iter != RemappedUBOTypeOffsets.end()) {
1945 offsets = &iter->second;
1946 }
David Netoc463b372017-08-10 15:32:21 -04001947
David Neto862b7d82018-06-14 18:48:37 -04001948 // #error TODO(dneto): Only do this if in TypesNeedingLayout.
David Neto22f144c2017-06-12 14:26:21 -04001949 for (unsigned MemberIdx = 0; MemberIdx < STy->getNumElements();
1950 MemberIdx++) {
1951 // Ops[0] = Structure Type ID
1952 // Ops[1] = Member Index(Literal Number)
1953 // Ops[2] = Decoration (Offset)
1954 // Ops[3] = Byte Offset (Literal Number)
1955 Ops.clear();
1956
David Neto257c3892018-04-11 13:19:45 -04001957 Ops << MkId(STyID) << MkNum(MemberIdx) << MkNum(spv::DecorationOffset);
David Neto22f144c2017-06-12 14:26:21 -04001958
alan-bakerb6b09dc2018-11-08 16:59:28 -05001959 auto ByteOffset =
1960 static_cast<uint32_t>(StructLayout->getElementOffset(MemberIdx));
Alan Bakerfcda9482018-10-02 17:09:59 -04001961 if (offsets) {
1962 ByteOffset = (*offsets)[MemberIdx];
1963 }
alan-bakerb6b09dc2018-11-08 16:59:28 -05001964 // const auto ByteOffset =
Alan Bakerfcda9482018-10-02 17:09:59 -04001965 // uint32_t(StructLayout->getElementOffset(MemberIdx));
David Neto257c3892018-04-11 13:19:45 -04001966 Ops << MkNum(ByteOffset);
David Neto22f144c2017-06-12 14:26:21 -04001967
David Neto87846742018-04-11 17:36:22 -04001968 auto *DecoInst = new SPIRVInstruction(spv::OpMemberDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04001969 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
David Neto22f144c2017-06-12 14:26:21 -04001970 }
1971
1972 // Generate OpDecorate.
David Neto862b7d82018-06-14 18:48:37 -04001973 if (StructTypesNeedingBlock.idFor(STy)) {
1974 Ops.clear();
1975 // Use Block decorations with StorageBuffer storage class.
1976 Ops << MkId(STyID) << MkNum(spv::DecorationBlock);
David Neto22f144c2017-06-12 14:26:21 -04001977
David Neto862b7d82018-06-14 18:48:37 -04001978 auto *DecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
1979 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
David Neto22f144c2017-06-12 14:26:21 -04001980 }
1981 break;
1982 }
1983 case Type::IntegerTyID: {
alan-baker0e64a592019-11-18 13:36:25 -05001984 uint32_t BitWidth = static_cast<uint32_t>(Ty->getPrimitiveSizeInBits());
David Neto22f144c2017-06-12 14:26:21 -04001985
1986 if (BitWidth == 1) {
David Neto87846742018-04-11 17:36:22 -04001987 auto *Inst = new SPIRVInstruction(spv::OpTypeBool, nextID++, {});
David Neto22f144c2017-06-12 14:26:21 -04001988 SPIRVInstList.push_back(Inst);
1989 } else {
alan-bakerb39c8262019-03-08 14:03:37 -05001990 if (!clspv::Option::Int8Support()) {
1991 // i8 is added to TypeMap as i32.
1992 // No matter what LLVM type is requested first, always alias the
1993 // second one's SPIR-V type to be the same as the one we generated
1994 // first.
1995 unsigned aliasToWidth = 0;
1996 if (BitWidth == 8) {
1997 aliasToWidth = 32;
1998 BitWidth = 32;
1999 } else if (BitWidth == 32) {
2000 aliasToWidth = 8;
2001 }
2002 if (aliasToWidth) {
2003 Type *otherType = Type::getIntNTy(Ty->getContext(), aliasToWidth);
2004 auto where = TypeMap.find(otherType);
2005 if (where == TypeMap.end()) {
2006 // Go ahead and make it, but also map the other type to it.
2007 TypeMap[otherType] = nextID;
2008 } else {
2009 // Alias this SPIR-V type the existing type.
2010 TypeMap[Ty] = where->second;
2011 break;
2012 }
David Neto391aeb12017-08-26 15:51:58 -04002013 }
David Neto22f144c2017-06-12 14:26:21 -04002014 }
2015
David Neto257c3892018-04-11 13:19:45 -04002016 SPIRVOperandList Ops;
2017 Ops << MkNum(BitWidth) << MkNum(0 /* not signed */);
David Neto22f144c2017-06-12 14:26:21 -04002018
2019 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002020 new SPIRVInstruction(spv::OpTypeInt, nextID++, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002021 }
2022 break;
2023 }
2024 case Type::HalfTyID:
2025 case Type::FloatTyID:
2026 case Type::DoubleTyID: {
alan-baker0e64a592019-11-18 13:36:25 -05002027 uint32_t BitWidth = static_cast<uint32_t>(Ty->getPrimitiveSizeInBits());
2028 SPIRVOperand *WidthOp =
2029 new SPIRVOperand(SPIRVOperandType::LITERAL_INTEGER, BitWidth);
David Neto22f144c2017-06-12 14:26:21 -04002030
2031 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002032 new SPIRVInstruction(spv::OpTypeFloat, nextID++, WidthOp));
David Neto22f144c2017-06-12 14:26:21 -04002033 break;
2034 }
2035 case Type::ArrayTyID: {
David Neto22f144c2017-06-12 14:26:21 -04002036 ArrayType *ArrTy = cast<ArrayType>(Ty);
David Neto862b7d82018-06-14 18:48:37 -04002037 const uint64_t Length = ArrTy->getArrayNumElements();
2038 if (Length == 0) {
2039 // By convention, map it to a RuntimeArray.
David Neto22f144c2017-06-12 14:26:21 -04002040
David Neto862b7d82018-06-14 18:48:37 -04002041 // Only generate the type once.
2042 // TODO(dneto): Can it ever be generated more than once?
2043 // Doesn't LLVM type uniqueness guarantee we'll only see this
2044 // once?
2045 Type *EleTy = ArrTy->getArrayElementType();
2046 if (OpRuntimeTyMap.count(EleTy) == 0) {
2047 uint32_t OpTypeRuntimeArrayID = nextID;
2048 OpRuntimeTyMap[Ty] = nextID;
David Neto22f144c2017-06-12 14:26:21 -04002049
David Neto862b7d82018-06-14 18:48:37 -04002050 //
2051 // Generate OpTypeRuntimeArray.
2052 //
David Neto22f144c2017-06-12 14:26:21 -04002053
David Neto862b7d82018-06-14 18:48:37 -04002054 // OpTypeRuntimeArray
2055 // Ops[0] = Element Type ID
2056 SPIRVOperandList Ops;
2057 Ops << MkId(lookupType(EleTy));
David Neto22f144c2017-06-12 14:26:21 -04002058
David Neto862b7d82018-06-14 18:48:37 -04002059 SPIRVInstList.push_back(
2060 new SPIRVInstruction(spv::OpTypeRuntimeArray, nextID++, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002061
David Neto862b7d82018-06-14 18:48:37 -04002062 if (Hack_generate_runtime_array_stride_early) {
2063 // Generate OpDecorate.
2064 auto DecoInsertPoint = std::find_if(
2065 SPIRVInstList.begin(), SPIRVInstList.end(),
2066 [](SPIRVInstruction *Inst) -> bool {
2067 return Inst->getOpcode() != spv::OpDecorate &&
2068 Inst->getOpcode() != spv::OpMemberDecorate &&
2069 Inst->getOpcode() != spv::OpExtInstImport;
2070 });
David Neto22f144c2017-06-12 14:26:21 -04002071
David Neto862b7d82018-06-14 18:48:37 -04002072 // Ops[0] = Target ID
2073 // Ops[1] = Decoration (ArrayStride)
2074 // Ops[2] = Stride Number(Literal Number)
2075 Ops.clear();
David Neto85082642018-03-24 06:55:20 -07002076
David Neto862b7d82018-06-14 18:48:37 -04002077 Ops << MkId(OpTypeRuntimeArrayID)
2078 << MkNum(spv::DecorationArrayStride)
Alan Bakerfcda9482018-10-02 17:09:59 -04002079 << MkNum(static_cast<uint32_t>(GetTypeAllocSize(EleTy, DL)));
David Neto22f144c2017-06-12 14:26:21 -04002080
David Neto862b7d82018-06-14 18:48:37 -04002081 auto *DecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
2082 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
2083 }
2084 }
David Neto22f144c2017-06-12 14:26:21 -04002085
David Neto862b7d82018-06-14 18:48:37 -04002086 } else {
David Neto22f144c2017-06-12 14:26:21 -04002087
David Neto862b7d82018-06-14 18:48:37 -04002088 //
2089 // Generate OpConstant and OpTypeArray.
2090 //
2091
2092 //
2093 // Generate OpConstant for array length.
2094 //
2095 // Ops[0] = Result Type ID
2096 // Ops[1] .. Ops[n] = Values LiteralNumber
2097 SPIRVOperandList Ops;
2098
2099 Type *LengthTy = Type::getInt32Ty(Context);
2100 uint32_t ResTyID = lookupType(LengthTy);
2101 Ops << MkId(ResTyID);
2102
2103 assert(Length < UINT32_MAX);
2104 Ops << MkNum(static_cast<uint32_t>(Length));
2105
2106 // Add constant for length to constant list.
2107 Constant *CstLength = ConstantInt::get(LengthTy, Length);
2108 AllocatedVMap[CstLength] = nextID;
2109 VMap[CstLength] = nextID;
2110 uint32_t LengthID = nextID;
2111
2112 auto *CstInst = new SPIRVInstruction(spv::OpConstant, nextID++, Ops);
2113 SPIRVInstList.push_back(CstInst);
2114
2115 // Remember to generate ArrayStride later
2116 getTypesNeedingArrayStride().insert(Ty);
2117
2118 //
2119 // Generate OpTypeArray.
2120 //
2121 // Ops[0] = Element Type ID
2122 // Ops[1] = Array Length Constant ID
2123 Ops.clear();
2124
2125 uint32_t EleTyID = lookupType(ArrTy->getElementType());
2126 Ops << MkId(EleTyID) << MkId(LengthID);
2127
2128 // Update TypeMap with nextID.
2129 TypeMap[Ty] = nextID;
2130
2131 auto *ArrayInst = new SPIRVInstruction(spv::OpTypeArray, nextID++, Ops);
2132 SPIRVInstList.push_back(ArrayInst);
2133 }
David Neto22f144c2017-06-12 14:26:21 -04002134 break;
2135 }
2136 case Type::VectorTyID: {
alan-bakerb39c8262019-03-08 14:03:37 -05002137 // <4 x i8> is changed to i32 if i8 is not generally supported.
2138 if (!clspv::Option::Int8Support() &&
2139 Ty->getVectorElementType() == Type::getInt8Ty(Context)) {
David Neto22f144c2017-06-12 14:26:21 -04002140 if (Ty->getVectorNumElements() == 4) {
2141 TypeMap[Ty] = lookupType(Ty->getVectorElementType());
2142 break;
2143 } else {
2144 Ty->print(errs());
2145 llvm_unreachable("Support above i8 vector type");
2146 }
2147 }
2148
2149 // Ops[0] = Component Type ID
2150 // Ops[1] = Component Count (Literal Number)
David Neto257c3892018-04-11 13:19:45 -04002151 SPIRVOperandList Ops;
2152 Ops << MkId(lookupType(Ty->getVectorElementType()))
2153 << MkNum(Ty->getVectorNumElements());
David Neto22f144c2017-06-12 14:26:21 -04002154
alan-bakerb6b09dc2018-11-08 16:59:28 -05002155 SPIRVInstruction *inst =
2156 new SPIRVInstruction(spv::OpTypeVector, nextID++, Ops);
David Netoc6f3ab22018-04-06 18:02:31 -04002157 SPIRVInstList.push_back(inst);
David Neto22f144c2017-06-12 14:26:21 -04002158 break;
2159 }
2160 case Type::VoidTyID: {
David Neto87846742018-04-11 17:36:22 -04002161 auto *Inst = new SPIRVInstruction(spv::OpTypeVoid, nextID++, {});
David Neto22f144c2017-06-12 14:26:21 -04002162 SPIRVInstList.push_back(Inst);
2163 break;
2164 }
2165 case Type::FunctionTyID: {
2166 // Generate SPIRV instruction for function type.
2167 FunctionType *FTy = cast<FunctionType>(Ty);
2168
2169 // Ops[0] = Return Type ID
2170 // Ops[1] ... Ops[n] = Parameter Type IDs
2171 SPIRVOperandList Ops;
2172
2173 // Find SPIRV instruction for return type
David Netoc6f3ab22018-04-06 18:02:31 -04002174 Ops << MkId(lookupType(FTy->getReturnType()));
David Neto22f144c2017-06-12 14:26:21 -04002175
2176 // Find SPIRV instructions for parameter types
2177 for (unsigned k = 0; k < FTy->getNumParams(); k++) {
2178 // Find SPIRV instruction for parameter type.
2179 auto ParamTy = FTy->getParamType(k);
2180 if (ParamTy->isPointerTy()) {
2181 auto PointeeTy = ParamTy->getPointerElementType();
2182 if (PointeeTy->isStructTy() &&
2183 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
2184 ParamTy = PointeeTy;
2185 }
2186 }
2187
David Netoc6f3ab22018-04-06 18:02:31 -04002188 Ops << MkId(lookupType(ParamTy));
David Neto22f144c2017-06-12 14:26:21 -04002189 }
2190
David Neto87846742018-04-11 17:36:22 -04002191 auto *Inst = new SPIRVInstruction(spv::OpTypeFunction, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002192 SPIRVInstList.push_back(Inst);
2193 break;
2194 }
2195 }
2196 }
2197
2198 // Generate OpTypeSampledImage.
2199 TypeMapType &OpImageTypeMap = getImageTypeMap();
2200 for (auto &ImageType : OpImageTypeMap) {
2201 //
2202 // Generate OpTypeSampledImage.
2203 //
2204 // Ops[0] = Image Type ID
2205 //
2206 SPIRVOperandList Ops;
2207
2208 Type *ImgTy = ImageType.first;
David Netoc6f3ab22018-04-06 18:02:31 -04002209 Ops << MkId(TypeMap[ImgTy]);
David Neto22f144c2017-06-12 14:26:21 -04002210
2211 // Update OpImageTypeMap.
2212 ImageType.second = nextID;
2213
David Neto87846742018-04-11 17:36:22 -04002214 auto *Inst = new SPIRVInstruction(spv::OpTypeSampledImage, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002215 SPIRVInstList.push_back(Inst);
2216 }
David Netoc6f3ab22018-04-06 18:02:31 -04002217
2218 // Generate types for pointer-to-local arguments.
Alan Baker202c8c72018-08-13 13:47:44 -04002219 for (auto spec_id = clspv::FirstLocalSpecId(); spec_id < max_local_spec_id_;
2220 ++spec_id) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05002221 LocalArgInfo &arg_info = LocalSpecIdInfoMap[spec_id];
David Netoc6f3ab22018-04-06 18:02:31 -04002222
2223 // Generate the spec constant.
2224 SPIRVOperandList Ops;
2225 Ops << MkId(lookupType(Type::getInt32Ty(Context))) << MkNum(1);
David Neto87846742018-04-11 17:36:22 -04002226 SPIRVInstList.push_back(
2227 new SPIRVInstruction(spv::OpSpecConstant, arg_info.array_size_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002228
2229 // Generate the array type.
2230 Ops.clear();
2231 // The element type must have been created.
2232 uint32_t elem_ty_id = lookupType(arg_info.elem_type);
2233 assert(elem_ty_id);
2234 Ops << MkId(elem_ty_id) << MkId(arg_info.array_size_id);
2235
2236 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002237 new SPIRVInstruction(spv::OpTypeArray, arg_info.array_type_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002238
2239 Ops.clear();
2240 Ops << MkNum(spv::StorageClassWorkgroup) << MkId(arg_info.array_type_id);
David Neto87846742018-04-11 17:36:22 -04002241 SPIRVInstList.push_back(new SPIRVInstruction(
2242 spv::OpTypePointer, arg_info.ptr_array_type_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002243 }
David Neto22f144c2017-06-12 14:26:21 -04002244}
2245
2246void SPIRVProducerPass::GenerateSPIRVConstants() {
2247 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
2248 ValueMapType &VMap = getValueMap();
2249 ValueMapType &AllocatedVMap = getAllocatedValueMap();
2250 ValueList &CstList = getConstantList();
David Neto482550a2018-03-24 05:21:07 -07002251 const bool hack_undef = clspv::Option::HackUndef();
David Neto22f144c2017-06-12 14:26:21 -04002252
2253 for (uint32_t i = 0; i < CstList.size(); i++) {
David Netofb9a7972017-08-25 17:08:24 -04002254 // UniqueVector ids are 1-based.
alan-bakerb6b09dc2018-11-08 16:59:28 -05002255 Constant *Cst = cast<Constant>(CstList[i + 1]);
David Neto22f144c2017-06-12 14:26:21 -04002256
2257 // OpTypeArray's constant was already generated.
David Netofb9a7972017-08-25 17:08:24 -04002258 if (AllocatedVMap.find_as(Cst) != AllocatedVMap.end()) {
David Neto22f144c2017-06-12 14:26:21 -04002259 continue;
2260 }
2261
David Netofb9a7972017-08-25 17:08:24 -04002262 // Set ValueMap with nextID for reference later.
David Neto22f144c2017-06-12 14:26:21 -04002263 VMap[Cst] = nextID;
2264
2265 //
2266 // Generate OpConstant.
2267 //
2268
2269 // Ops[0] = Result Type ID
2270 // Ops[1] .. Ops[n] = Values LiteralNumber
2271 SPIRVOperandList Ops;
2272
David Neto257c3892018-04-11 13:19:45 -04002273 Ops << MkId(lookupType(Cst->getType()));
David Neto22f144c2017-06-12 14:26:21 -04002274
2275 std::vector<uint32_t> LiteralNum;
David Neto22f144c2017-06-12 14:26:21 -04002276 spv::Op Opcode = spv::OpNop;
2277
2278 if (isa<UndefValue>(Cst)) {
2279 // Ops[0] = Result Type ID
David Netoc66b3352017-10-20 14:28:46 -04002280 Opcode = spv::OpUndef;
Alan Baker9bf93fb2018-08-28 16:59:26 -04002281 if (hack_undef && IsTypeNullable(Cst->getType())) {
2282 Opcode = spv::OpConstantNull;
David Netoc66b3352017-10-20 14:28:46 -04002283 }
David Neto22f144c2017-06-12 14:26:21 -04002284 } else if (const ConstantInt *CI = dyn_cast<ConstantInt>(Cst)) {
2285 unsigned BitWidth = CI->getBitWidth();
2286 if (BitWidth == 1) {
2287 // If the bitwidth of constant is 1, generate OpConstantTrue or
2288 // OpConstantFalse.
2289 if (CI->getZExtValue()) {
2290 // Ops[0] = Result Type ID
2291 Opcode = spv::OpConstantTrue;
2292 } else {
2293 // Ops[0] = Result Type ID
2294 Opcode = spv::OpConstantFalse;
2295 }
David Neto22f144c2017-06-12 14:26:21 -04002296 } else {
2297 auto V = CI->getZExtValue();
2298 LiteralNum.push_back(V & 0xFFFFFFFF);
2299
2300 if (BitWidth > 32) {
2301 LiteralNum.push_back(V >> 32);
2302 }
2303
2304 Opcode = spv::OpConstant;
David Neto22f144c2017-06-12 14:26:21 -04002305
David Neto257c3892018-04-11 13:19:45 -04002306 Ops << MkInteger(LiteralNum);
David Neto22f144c2017-06-12 14:26:21 -04002307 }
2308 } else if (const ConstantFP *CFP = dyn_cast<ConstantFP>(Cst)) {
2309 uint64_t FPVal = CFP->getValueAPF().bitcastToAPInt().getZExtValue();
2310 Type *CFPTy = CFP->getType();
2311 if (CFPTy->isFloatTy()) {
2312 LiteralNum.push_back(FPVal & 0xFFFFFFFF);
Kévin Petit02ee34e2019-04-04 19:03:22 +01002313 } else if (CFPTy->isDoubleTy()) {
2314 LiteralNum.push_back(FPVal & 0xFFFFFFFF);
2315 LiteralNum.push_back(FPVal >> 32);
David Neto22f144c2017-06-12 14:26:21 -04002316 } else {
2317 CFPTy->print(errs());
2318 llvm_unreachable("Implement this ConstantFP Type");
2319 }
2320
2321 Opcode = spv::OpConstant;
David Neto22f144c2017-06-12 14:26:21 -04002322
David Neto257c3892018-04-11 13:19:45 -04002323 Ops << MkFloat(LiteralNum);
David Neto22f144c2017-06-12 14:26:21 -04002324 } else if (isa<ConstantDataSequential>(Cst) &&
2325 cast<ConstantDataSequential>(Cst)->isString()) {
2326 Cst->print(errs());
2327 llvm_unreachable("Implement this Constant");
2328
2329 } else if (const ConstantDataSequential *CDS =
2330 dyn_cast<ConstantDataSequential>(Cst)) {
David Neto49351ac2017-08-26 17:32:20 -04002331 // Let's convert <4 x i8> constant to int constant specially.
2332 // This case occurs when all the values are specified as constant
2333 // ints.
2334 Type *CstTy = Cst->getType();
2335 if (is4xi8vec(CstTy)) {
2336 LLVMContext &Context = CstTy->getContext();
2337
2338 //
2339 // Generate OpConstant with OpTypeInt 32 0.
2340 //
Neil Henning39672102017-09-29 14:33:13 +01002341 uint32_t IntValue = 0;
2342 for (unsigned k = 0; k < 4; k++) {
2343 const uint64_t Val = CDS->getElementAsInteger(k);
David Neto49351ac2017-08-26 17:32:20 -04002344 IntValue = (IntValue << 8) | (Val & 0xffu);
2345 }
2346
2347 Type *i32 = Type::getInt32Ty(Context);
2348 Constant *CstInt = ConstantInt::get(i32, IntValue);
2349 // If this constant is already registered on VMap, use it.
2350 if (VMap.count(CstInt)) {
2351 uint32_t CstID = VMap[CstInt];
2352 VMap[Cst] = CstID;
2353 continue;
2354 }
2355
David Neto257c3892018-04-11 13:19:45 -04002356 Ops << MkNum(IntValue);
David Neto49351ac2017-08-26 17:32:20 -04002357
David Neto87846742018-04-11 17:36:22 -04002358 auto *CstInst = new SPIRVInstruction(spv::OpConstant, nextID++, Ops);
David Neto49351ac2017-08-26 17:32:20 -04002359 SPIRVInstList.push_back(CstInst);
2360
2361 continue;
2362 }
2363
2364 // A normal constant-data-sequential case.
David Neto22f144c2017-06-12 14:26:21 -04002365 for (unsigned k = 0; k < CDS->getNumElements(); k++) {
2366 Constant *EleCst = CDS->getElementAsConstant(k);
2367 uint32_t EleCstID = VMap[EleCst];
David Neto257c3892018-04-11 13:19:45 -04002368 Ops << MkId(EleCstID);
David Neto22f144c2017-06-12 14:26:21 -04002369 }
2370
2371 Opcode = spv::OpConstantComposite;
David Neto22f144c2017-06-12 14:26:21 -04002372 } else if (const ConstantAggregate *CA = dyn_cast<ConstantAggregate>(Cst)) {
2373 // Let's convert <4 x i8> constant to int constant specially.
David Neto49351ac2017-08-26 17:32:20 -04002374 // This case occurs when at least one of the values is an undef.
David Neto22f144c2017-06-12 14:26:21 -04002375 Type *CstTy = Cst->getType();
2376 if (is4xi8vec(CstTy)) {
2377 LLVMContext &Context = CstTy->getContext();
2378
2379 //
2380 // Generate OpConstant with OpTypeInt 32 0.
2381 //
Neil Henning39672102017-09-29 14:33:13 +01002382 uint32_t IntValue = 0;
David Neto22f144c2017-06-12 14:26:21 -04002383 for (User::const_op_iterator I = Cst->op_begin(), E = Cst->op_end();
2384 I != E; ++I) {
2385 uint64_t Val = 0;
alan-bakerb6b09dc2018-11-08 16:59:28 -05002386 const Value *CV = *I;
Neil Henning39672102017-09-29 14:33:13 +01002387 if (auto *CI2 = dyn_cast<ConstantInt>(CV)) {
2388 Val = CI2->getZExtValue();
David Neto22f144c2017-06-12 14:26:21 -04002389 }
David Neto49351ac2017-08-26 17:32:20 -04002390 IntValue = (IntValue << 8) | (Val & 0xffu);
David Neto22f144c2017-06-12 14:26:21 -04002391 }
2392
David Neto49351ac2017-08-26 17:32:20 -04002393 Type *i32 = Type::getInt32Ty(Context);
2394 Constant *CstInt = ConstantInt::get(i32, IntValue);
David Neto22f144c2017-06-12 14:26:21 -04002395 // If this constant is already registered on VMap, use it.
2396 if (VMap.count(CstInt)) {
2397 uint32_t CstID = VMap[CstInt];
2398 VMap[Cst] = CstID;
David Neto19a1bad2017-08-25 15:01:41 -04002399 continue;
David Neto22f144c2017-06-12 14:26:21 -04002400 }
2401
David Neto257c3892018-04-11 13:19:45 -04002402 Ops << MkNum(IntValue);
David Neto22f144c2017-06-12 14:26:21 -04002403
David Neto87846742018-04-11 17:36:22 -04002404 auto *CstInst = new SPIRVInstruction(spv::OpConstant, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002405 SPIRVInstList.push_back(CstInst);
2406
David Neto19a1bad2017-08-25 15:01:41 -04002407 continue;
David Neto22f144c2017-06-12 14:26:21 -04002408 }
2409
2410 // We use a constant composite in SPIR-V for our constant aggregate in
2411 // LLVM.
2412 Opcode = spv::OpConstantComposite;
David Neto22f144c2017-06-12 14:26:21 -04002413
2414 for (unsigned k = 0; k < CA->getNumOperands(); k++) {
2415 // Look up the ID of the element of this aggregate (which we will
2416 // previously have created a constant for).
2417 uint32_t ElementConstantID = VMap[CA->getAggregateElement(k)];
2418
2419 // And add an operand to the composite we are constructing
David Neto257c3892018-04-11 13:19:45 -04002420 Ops << MkId(ElementConstantID);
David Neto22f144c2017-06-12 14:26:21 -04002421 }
2422 } else if (Cst->isNullValue()) {
2423 Opcode = spv::OpConstantNull;
David Neto22f144c2017-06-12 14:26:21 -04002424 } else {
2425 Cst->print(errs());
2426 llvm_unreachable("Unsupported Constant???");
2427 }
2428
alan-baker5b86ed72019-02-15 08:26:50 -05002429 if (Opcode == spv::OpConstantNull && Cst->getType()->isPointerTy()) {
2430 // Null pointer requires variable pointers.
2431 setVariablePointersCapabilities(Cst->getType()->getPointerAddressSpace());
2432 }
2433
David Neto87846742018-04-11 17:36:22 -04002434 auto *CstInst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002435 SPIRVInstList.push_back(CstInst);
2436 }
2437}
2438
2439void SPIRVProducerPass::GenerateSamplers(Module &M) {
2440 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
David Neto22f144c2017-06-12 14:26:21 -04002441
alan-bakerb6b09dc2018-11-08 16:59:28 -05002442 auto &sampler_map = getSamplerMap();
David Neto862b7d82018-06-14 18:48:37 -04002443 SamplerMapIndexToIDMap.clear();
David Neto22f144c2017-06-12 14:26:21 -04002444 DenseMap<unsigned, unsigned> SamplerLiteralToIDMap;
David Neto862b7d82018-06-14 18:48:37 -04002445 DenseMap<unsigned, unsigned> SamplerLiteralToDescriptorSetMap;
2446 DenseMap<unsigned, unsigned> SamplerLiteralToBindingMap;
David Neto22f144c2017-06-12 14:26:21 -04002447
David Neto862b7d82018-06-14 18:48:37 -04002448 // We might have samplers in the sampler map that are not used
2449 // in the translation unit. We need to allocate variables
2450 // for them and bindings too.
2451 DenseSet<unsigned> used_bindings;
David Neto22f144c2017-06-12 14:26:21 -04002452
Kévin Petitdf71de32019-04-09 14:09:50 +01002453 auto *var_fn = M.getFunction(clspv::LiteralSamplerFunction());
alan-bakerb6b09dc2018-11-08 16:59:28 -05002454 if (!var_fn)
2455 return;
David Neto862b7d82018-06-14 18:48:37 -04002456 for (auto user : var_fn->users()) {
2457 // Populate SamplerLiteralToDescriptorSetMap and
2458 // SamplerLiteralToBindingMap.
2459 //
2460 // Look for calls like
2461 // call %opencl.sampler_t addrspace(2)*
2462 // @clspv.sampler.var.literal(
2463 // i32 descriptor,
2464 // i32 binding,
2465 // i32 index-into-sampler-map)
alan-bakerb6b09dc2018-11-08 16:59:28 -05002466 if (auto *call = dyn_cast<CallInst>(user)) {
2467 const size_t index_into_sampler_map = static_cast<size_t>(
2468 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
David Neto862b7d82018-06-14 18:48:37 -04002469 if (index_into_sampler_map >= sampler_map.size()) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05002470 errs() << "Out of bounds index to sampler map: "
2471 << index_into_sampler_map;
David Neto862b7d82018-06-14 18:48:37 -04002472 llvm_unreachable("bad sampler init: out of bounds");
2473 }
2474
2475 auto sampler_value = sampler_map[index_into_sampler_map].first;
2476 const auto descriptor_set = static_cast<unsigned>(
2477 dyn_cast<ConstantInt>(call->getArgOperand(0))->getZExtValue());
2478 const auto binding = static_cast<unsigned>(
2479 dyn_cast<ConstantInt>(call->getArgOperand(1))->getZExtValue());
2480
2481 SamplerLiteralToDescriptorSetMap[sampler_value] = descriptor_set;
2482 SamplerLiteralToBindingMap[sampler_value] = binding;
2483 used_bindings.insert(binding);
2484 }
2485 }
2486
2487 unsigned index = 0;
2488 for (auto SamplerLiteral : sampler_map) {
David Neto22f144c2017-06-12 14:26:21 -04002489 // Generate OpVariable.
2490 //
2491 // GIDOps[0] : Result Type ID
2492 // GIDOps[1] : Storage Class
2493 SPIRVOperandList Ops;
2494
David Neto257c3892018-04-11 13:19:45 -04002495 Ops << MkId(lookupType(SamplerTy))
2496 << MkNum(spv::StorageClassUniformConstant);
David Neto22f144c2017-06-12 14:26:21 -04002497
David Neto862b7d82018-06-14 18:48:37 -04002498 auto sampler_var_id = nextID++;
2499 auto *Inst = new SPIRVInstruction(spv::OpVariable, sampler_var_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002500 SPIRVInstList.push_back(Inst);
2501
David Neto862b7d82018-06-14 18:48:37 -04002502 SamplerMapIndexToIDMap[index] = sampler_var_id;
2503 SamplerLiteralToIDMap[SamplerLiteral.first] = sampler_var_id;
David Neto22f144c2017-06-12 14:26:21 -04002504
2505 // Find Insert Point for OpDecorate.
2506 auto DecoInsertPoint =
2507 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
2508 [](SPIRVInstruction *Inst) -> bool {
2509 return Inst->getOpcode() != spv::OpDecorate &&
2510 Inst->getOpcode() != spv::OpMemberDecorate &&
2511 Inst->getOpcode() != spv::OpExtInstImport;
2512 });
2513
2514 // Ops[0] = Target ID
2515 // Ops[1] = Decoration (DescriptorSet)
2516 // Ops[2] = LiteralNumber according to Decoration
2517 Ops.clear();
2518
David Neto862b7d82018-06-14 18:48:37 -04002519 unsigned descriptor_set;
2520 unsigned binding;
alan-bakerb6b09dc2018-11-08 16:59:28 -05002521 if (SamplerLiteralToBindingMap.find(SamplerLiteral.first) ==
2522 SamplerLiteralToBindingMap.end()) {
David Neto862b7d82018-06-14 18:48:37 -04002523 // This sampler is not actually used. Find the next one.
2524 for (binding = 0; used_bindings.count(binding); binding++)
2525 ;
2526 descriptor_set = 0; // Literal samplers always use descriptor set 0.
2527 used_bindings.insert(binding);
2528 } else {
2529 descriptor_set = SamplerLiteralToDescriptorSetMap[SamplerLiteral.first];
2530 binding = SamplerLiteralToBindingMap[SamplerLiteral.first];
alan-bakercff80152019-06-15 00:38:00 -04002531
2532 version0::DescriptorMapEntry::SamplerData sampler_data = {
2533 SamplerLiteral.first};
2534 descriptorMapEntries->emplace_back(std::move(sampler_data),
2535 descriptor_set, binding);
David Neto862b7d82018-06-14 18:48:37 -04002536 }
2537
2538 Ops << MkId(sampler_var_id) << MkNum(spv::DecorationDescriptorSet)
2539 << MkNum(descriptor_set);
David Neto22f144c2017-06-12 14:26:21 -04002540
David Neto87846742018-04-11 17:36:22 -04002541 auto *DescDecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002542 SPIRVInstList.insert(DecoInsertPoint, DescDecoInst);
2543
2544 // Ops[0] = Target ID
2545 // Ops[1] = Decoration (Binding)
2546 // Ops[2] = LiteralNumber according to Decoration
2547 Ops.clear();
David Neto862b7d82018-06-14 18:48:37 -04002548 Ops << MkId(sampler_var_id) << MkNum(spv::DecorationBinding)
2549 << MkNum(binding);
David Neto22f144c2017-06-12 14:26:21 -04002550
David Neto87846742018-04-11 17:36:22 -04002551 auto *BindDecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002552 SPIRVInstList.insert(DecoInsertPoint, BindDecoInst);
David Neto862b7d82018-06-14 18:48:37 -04002553
2554 index++;
David Neto22f144c2017-06-12 14:26:21 -04002555 }
David Neto862b7d82018-06-14 18:48:37 -04002556}
David Neto22f144c2017-06-12 14:26:21 -04002557
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01002558void SPIRVProducerPass::GenerateResourceVars(Module &) {
David Neto862b7d82018-06-14 18:48:37 -04002559 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
2560 ValueMapType &VMap = getValueMap();
David Neto22f144c2017-06-12 14:26:21 -04002561
David Neto862b7d82018-06-14 18:48:37 -04002562 // Generate variables. Make one for each of resource var info object.
2563 for (auto *info : ModuleOrderedResourceVars) {
2564 Type *type = info->var_fn->getReturnType();
2565 // Remap the address space for opaque types.
2566 switch (info->arg_kind) {
2567 case clspv::ArgKind::Sampler:
2568 case clspv::ArgKind::ReadOnlyImage:
2569 case clspv::ArgKind::WriteOnlyImage:
2570 type = PointerType::get(type->getPointerElementType(),
2571 clspv::AddressSpace::UniformConstant);
2572 break;
2573 default:
2574 break;
2575 }
David Neto22f144c2017-06-12 14:26:21 -04002576
David Neto862b7d82018-06-14 18:48:37 -04002577 info->var_id = nextID++;
David Neto22f144c2017-06-12 14:26:21 -04002578
David Neto862b7d82018-06-14 18:48:37 -04002579 const auto type_id = lookupType(type);
2580 const auto sc = GetStorageClassForArgKind(info->arg_kind);
2581 SPIRVOperandList Ops;
2582 Ops << MkId(type_id) << MkNum(sc);
David Neto22f144c2017-06-12 14:26:21 -04002583
David Neto862b7d82018-06-14 18:48:37 -04002584 auto *Inst = new SPIRVInstruction(spv::OpVariable, info->var_id, Ops);
2585 SPIRVInstList.push_back(Inst);
2586
2587 // Map calls to the variable-builtin-function.
2588 for (auto &U : info->var_fn->uses()) {
2589 if (auto *call = dyn_cast<CallInst>(U.getUser())) {
2590 const auto set = unsigned(
2591 dyn_cast<ConstantInt>(call->getOperand(0))->getZExtValue());
2592 const auto binding = unsigned(
2593 dyn_cast<ConstantInt>(call->getOperand(1))->getZExtValue());
2594 if (set == info->descriptor_set && binding == info->binding) {
2595 switch (info->arg_kind) {
2596 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04002597 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04002598 case clspv::ArgKind::Pod:
2599 // The call maps to the variable directly.
2600 VMap[call] = info->var_id;
2601 break;
2602 case clspv::ArgKind::Sampler:
2603 case clspv::ArgKind::ReadOnlyImage:
2604 case clspv::ArgKind::WriteOnlyImage:
2605 // The call maps to a load we generate later.
2606 ResourceVarDeferredLoadCalls[call] = info->var_id;
2607 break;
2608 default:
2609 llvm_unreachable("Unhandled arg kind");
2610 }
2611 }
David Neto22f144c2017-06-12 14:26:21 -04002612 }
David Neto862b7d82018-06-14 18:48:37 -04002613 }
2614 }
David Neto22f144c2017-06-12 14:26:21 -04002615
David Neto862b7d82018-06-14 18:48:37 -04002616 // Generate associated decorations.
David Neto22f144c2017-06-12 14:26:21 -04002617
David Neto862b7d82018-06-14 18:48:37 -04002618 // Find Insert Point for OpDecorate.
2619 auto DecoInsertPoint =
2620 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
2621 [](SPIRVInstruction *Inst) -> bool {
2622 return Inst->getOpcode() != spv::OpDecorate &&
2623 Inst->getOpcode() != spv::OpMemberDecorate &&
2624 Inst->getOpcode() != spv::OpExtInstImport;
2625 });
2626
2627 SPIRVOperandList Ops;
2628 for (auto *info : ModuleOrderedResourceVars) {
2629 // Decorate with DescriptorSet and Binding.
2630 Ops.clear();
2631 Ops << MkId(info->var_id) << MkNum(spv::DecorationDescriptorSet)
2632 << MkNum(info->descriptor_set);
2633 SPIRVInstList.insert(DecoInsertPoint,
2634 new SPIRVInstruction(spv::OpDecorate, Ops));
2635
2636 Ops.clear();
2637 Ops << MkId(info->var_id) << MkNum(spv::DecorationBinding)
2638 << MkNum(info->binding);
2639 SPIRVInstList.insert(DecoInsertPoint,
2640 new SPIRVInstruction(spv::OpDecorate, Ops));
2641
alan-bakere9308012019-03-15 10:25:13 -04002642 if (info->coherent) {
2643 // Decorate with Coherent if required for the variable.
2644 Ops.clear();
2645 Ops << MkId(info->var_id) << MkNum(spv::DecorationCoherent);
2646 SPIRVInstList.insert(DecoInsertPoint,
2647 new SPIRVInstruction(spv::OpDecorate, Ops));
2648 }
2649
David Neto862b7d82018-06-14 18:48:37 -04002650 // Generate NonWritable and NonReadable
2651 switch (info->arg_kind) {
2652 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04002653 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04002654 if (info->var_fn->getReturnType()->getPointerAddressSpace() ==
2655 clspv::AddressSpace::Constant) {
2656 Ops.clear();
2657 Ops << MkId(info->var_id) << MkNum(spv::DecorationNonWritable);
2658 SPIRVInstList.insert(DecoInsertPoint,
2659 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002660 }
David Neto862b7d82018-06-14 18:48:37 -04002661 break;
David Neto862b7d82018-06-14 18:48:37 -04002662 case clspv::ArgKind::WriteOnlyImage:
2663 Ops.clear();
2664 Ops << MkId(info->var_id) << MkNum(spv::DecorationNonReadable);
2665 SPIRVInstList.insert(DecoInsertPoint,
2666 new SPIRVInstruction(spv::OpDecorate, Ops));
2667 break;
2668 default:
2669 break;
David Neto22f144c2017-06-12 14:26:21 -04002670 }
2671 }
2672}
2673
2674void SPIRVProducerPass::GenerateGlobalVar(GlobalVariable &GV) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05002675 Module &M = *GV.getParent();
David Neto22f144c2017-06-12 14:26:21 -04002676 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
2677 ValueMapType &VMap = getValueMap();
2678 std::vector<uint32_t> &BuiltinDimVec = getBuiltinDimVec();
David Neto85082642018-03-24 06:55:20 -07002679 const DataLayout &DL = GV.getParent()->getDataLayout();
David Neto22f144c2017-06-12 14:26:21 -04002680
2681 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
2682 Type *Ty = GV.getType();
2683 PointerType *PTy = cast<PointerType>(Ty);
2684
2685 uint32_t InitializerID = 0;
2686
2687 // Workgroup size is handled differently (it goes into a constant)
2688 if (spv::BuiltInWorkgroupSize == BuiltinType) {
2689 std::vector<bool> HasMDVec;
2690 uint32_t PrevXDimCst = 0xFFFFFFFF;
2691 uint32_t PrevYDimCst = 0xFFFFFFFF;
2692 uint32_t PrevZDimCst = 0xFFFFFFFF;
2693 for (Function &Func : *GV.getParent()) {
2694 if (Func.isDeclaration()) {
2695 continue;
2696 }
2697
2698 // We only need to check kernels.
2699 if (Func.getCallingConv() != CallingConv::SPIR_KERNEL) {
2700 continue;
2701 }
2702
2703 if (const MDNode *MD =
2704 dyn_cast<Function>(&Func)->getMetadata("reqd_work_group_size")) {
2705 uint32_t CurXDimCst = static_cast<uint32_t>(
2706 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
2707 uint32_t CurYDimCst = static_cast<uint32_t>(
2708 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
2709 uint32_t CurZDimCst = static_cast<uint32_t>(
2710 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
2711
2712 if (PrevXDimCst == 0xFFFFFFFF && PrevYDimCst == 0xFFFFFFFF &&
2713 PrevZDimCst == 0xFFFFFFFF) {
2714 PrevXDimCst = CurXDimCst;
2715 PrevYDimCst = CurYDimCst;
2716 PrevZDimCst = CurZDimCst;
2717 } else if (CurXDimCst != PrevXDimCst || CurYDimCst != PrevYDimCst ||
2718 CurZDimCst != PrevZDimCst) {
2719 llvm_unreachable(
2720 "reqd_work_group_size must be the same across all kernels");
2721 } else {
2722 continue;
2723 }
2724
2725 //
2726 // Generate OpConstantComposite.
2727 //
2728 // Ops[0] : Result Type ID
2729 // Ops[1] : Constant size for x dimension.
2730 // Ops[2] : Constant size for y dimension.
2731 // Ops[3] : Constant size for z dimension.
2732 SPIRVOperandList Ops;
2733
2734 uint32_t XDimCstID =
2735 VMap[mdconst::extract<ConstantInt>(MD->getOperand(0))];
2736 uint32_t YDimCstID =
2737 VMap[mdconst::extract<ConstantInt>(MD->getOperand(1))];
2738 uint32_t ZDimCstID =
2739 VMap[mdconst::extract<ConstantInt>(MD->getOperand(2))];
2740
2741 InitializerID = nextID;
2742
David Neto257c3892018-04-11 13:19:45 -04002743 Ops << MkId(lookupType(Ty->getPointerElementType())) << MkId(XDimCstID)
2744 << MkId(YDimCstID) << MkId(ZDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04002745
David Neto87846742018-04-11 17:36:22 -04002746 auto *Inst =
2747 new SPIRVInstruction(spv::OpConstantComposite, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002748 SPIRVInstList.push_back(Inst);
2749
2750 HasMDVec.push_back(true);
2751 } else {
2752 HasMDVec.push_back(false);
2753 }
2754 }
2755
2756 // Check all kernels have same definitions for work_group_size.
2757 bool HasMD = false;
2758 if (!HasMDVec.empty()) {
2759 HasMD = HasMDVec[0];
2760 for (uint32_t i = 1; i < HasMDVec.size(); i++) {
2761 if (HasMD != HasMDVec[i]) {
2762 llvm_unreachable(
2763 "Kernels should have consistent work group size definition");
2764 }
2765 }
2766 }
2767
2768 // If all kernels do not have metadata for reqd_work_group_size, generate
2769 // OpSpecConstants for x/y/z dimension.
2770 if (!HasMD) {
2771 //
2772 // Generate OpSpecConstants for x/y/z dimension.
2773 //
2774 // Ops[0] : Result Type ID
2775 // Ops[1] : Constant size for x/y/z dimension (Literal Number).
2776 uint32_t XDimCstID = 0;
2777 uint32_t YDimCstID = 0;
2778 uint32_t ZDimCstID = 0;
2779
David Neto22f144c2017-06-12 14:26:21 -04002780 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04002781 uint32_t result_type_id =
2782 lookupType(Ty->getPointerElementType()->getSequentialElementType());
David Neto22f144c2017-06-12 14:26:21 -04002783
David Neto257c3892018-04-11 13:19:45 -04002784 // X Dimension
2785 Ops << MkId(result_type_id) << MkNum(1);
2786 XDimCstID = nextID++;
2787 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002788 new SPIRVInstruction(spv::OpSpecConstant, XDimCstID, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002789
2790 // Y Dimension
2791 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04002792 Ops << MkId(result_type_id) << MkNum(1);
2793 YDimCstID = nextID++;
2794 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002795 new SPIRVInstruction(spv::OpSpecConstant, YDimCstID, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002796
2797 // Z Dimension
2798 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04002799 Ops << MkId(result_type_id) << MkNum(1);
2800 ZDimCstID = nextID++;
2801 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002802 new SPIRVInstruction(spv::OpSpecConstant, ZDimCstID, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002803
David Neto257c3892018-04-11 13:19:45 -04002804 BuiltinDimVec.push_back(XDimCstID);
2805 BuiltinDimVec.push_back(YDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04002806 BuiltinDimVec.push_back(ZDimCstID);
2807
David Neto22f144c2017-06-12 14:26:21 -04002808 //
2809 // Generate OpSpecConstantComposite.
2810 //
2811 // Ops[0] : Result Type ID
2812 // Ops[1] : Constant size for x dimension.
2813 // Ops[2] : Constant size for y dimension.
2814 // Ops[3] : Constant size for z dimension.
2815 InitializerID = nextID;
2816
2817 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04002818 Ops << MkId(lookupType(Ty->getPointerElementType())) << MkId(XDimCstID)
2819 << MkId(YDimCstID) << MkId(ZDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04002820
David Neto87846742018-04-11 17:36:22 -04002821 auto *Inst =
2822 new SPIRVInstruction(spv::OpSpecConstantComposite, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002823 SPIRVInstList.push_back(Inst);
2824 }
2825 }
2826
David Neto22f144c2017-06-12 14:26:21 -04002827 VMap[&GV] = nextID;
2828
2829 //
2830 // Generate OpVariable.
2831 //
2832 // GIDOps[0] : Result Type ID
2833 // GIDOps[1] : Storage Class
2834 SPIRVOperandList Ops;
2835
David Neto85082642018-03-24 06:55:20 -07002836 const auto AS = PTy->getAddressSpace();
David Netoc6f3ab22018-04-06 18:02:31 -04002837 Ops << MkId(lookupType(Ty)) << MkNum(GetStorageClass(AS));
David Neto22f144c2017-06-12 14:26:21 -04002838
David Neto85082642018-03-24 06:55:20 -07002839 const bool module_scope_constant_external_init =
David Neto862b7d82018-06-14 18:48:37 -04002840 (AS == AddressSpace::Constant) && GV.hasInitializer() &&
David Neto85082642018-03-24 06:55:20 -07002841 clspv::Option::ModuleConstantsInStorageBuffer();
2842
Kévin Petit23d5f182019-08-13 16:21:29 +01002843 if (GV.hasInitializer()) {
2844 auto GVInit = GV.getInitializer();
2845 if (!isa<UndefValue>(GVInit) && !module_scope_constant_external_init) {
2846 assert(VMap.count(GVInit) == 1);
2847 InitializerID = VMap[GVInit];
David Neto85082642018-03-24 06:55:20 -07002848 }
2849 }
Kévin Petit23d5f182019-08-13 16:21:29 +01002850
2851 if (0 != InitializerID) {
2852 // Emit the ID of the intiializer as part of the variable definition.
2853 Ops << MkId(InitializerID);
2854 }
David Neto85082642018-03-24 06:55:20 -07002855 const uint32_t var_id = nextID++;
2856
David Neto87846742018-04-11 17:36:22 -04002857 auto *Inst = new SPIRVInstruction(spv::OpVariable, var_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002858 SPIRVInstList.push_back(Inst);
2859
2860 // If we have a builtin.
2861 if (spv::BuiltInMax != BuiltinType) {
2862 // Find Insert Point for OpDecorate.
2863 auto DecoInsertPoint =
2864 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
2865 [](SPIRVInstruction *Inst) -> bool {
2866 return Inst->getOpcode() != spv::OpDecorate &&
2867 Inst->getOpcode() != spv::OpMemberDecorate &&
2868 Inst->getOpcode() != spv::OpExtInstImport;
2869 });
2870 //
2871 // Generate OpDecorate.
2872 //
2873 // DOps[0] = Target ID
2874 // DOps[1] = Decoration (Builtin)
2875 // DOps[2] = BuiltIn ID
2876 uint32_t ResultID;
2877
2878 // WorkgroupSize is different, we decorate the constant composite that has
2879 // its value, rather than the variable that we use to access the value.
2880 if (spv::BuiltInWorkgroupSize == BuiltinType) {
2881 ResultID = InitializerID;
David Netoa60b00b2017-09-15 16:34:09 -04002882 // Save both the value and variable IDs for later.
2883 WorkgroupSizeValueID = InitializerID;
2884 WorkgroupSizeVarID = VMap[&GV];
David Neto22f144c2017-06-12 14:26:21 -04002885 } else {
2886 ResultID = VMap[&GV];
2887 }
2888
2889 SPIRVOperandList DOps;
David Neto257c3892018-04-11 13:19:45 -04002890 DOps << MkId(ResultID) << MkNum(spv::DecorationBuiltIn)
2891 << MkNum(BuiltinType);
David Neto22f144c2017-06-12 14:26:21 -04002892
David Neto87846742018-04-11 17:36:22 -04002893 auto *DescDecoInst = new SPIRVInstruction(spv::OpDecorate, DOps);
David Neto22f144c2017-06-12 14:26:21 -04002894 SPIRVInstList.insert(DecoInsertPoint, DescDecoInst);
David Neto85082642018-03-24 06:55:20 -07002895 } else if (module_scope_constant_external_init) {
2896 // This module scope constant is initialized from a storage buffer with data
2897 // provided by the host at binding 0 of the next descriptor set.
David Neto78383442018-06-15 20:31:56 -04002898 const uint32_t descriptor_set = TakeDescriptorIndex(&M);
David Neto85082642018-03-24 06:55:20 -07002899
David Neto862b7d82018-06-14 18:48:37 -04002900 // Emit the intializer to the descriptor map file.
David Neto85082642018-03-24 06:55:20 -07002901 // Use "kind,buffer" to indicate storage buffer. We might want to expand
2902 // that later to other types, like uniform buffer.
alan-bakerf5e5f692018-11-27 08:33:24 -05002903 std::string hexbytes;
2904 llvm::raw_string_ostream str(hexbytes);
2905 clspv::ConstantEmitter(DL, str).Emit(GV.getInitializer());
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04002906 version0::DescriptorMapEntry::ConstantData constant_data = {ArgKind::Buffer,
2907 str.str()};
2908 descriptorMapEntries->emplace_back(std::move(constant_data), descriptor_set,
2909 0);
David Neto85082642018-03-24 06:55:20 -07002910
2911 // Find Insert Point for OpDecorate.
2912 auto DecoInsertPoint =
2913 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
2914 [](SPIRVInstruction *Inst) -> bool {
2915 return Inst->getOpcode() != spv::OpDecorate &&
2916 Inst->getOpcode() != spv::OpMemberDecorate &&
2917 Inst->getOpcode() != spv::OpExtInstImport;
2918 });
2919
David Neto257c3892018-04-11 13:19:45 -04002920 // OpDecorate %var Binding <binding>
David Neto85082642018-03-24 06:55:20 -07002921 SPIRVOperandList DOps;
David Neto257c3892018-04-11 13:19:45 -04002922 DOps << MkId(var_id) << MkNum(spv::DecorationBinding) << MkNum(0);
2923 DecoInsertPoint = SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04002924 DecoInsertPoint, new SPIRVInstruction(spv::OpDecorate, DOps));
David Neto85082642018-03-24 06:55:20 -07002925
2926 // OpDecorate %var DescriptorSet <descriptor_set>
2927 DOps.clear();
David Neto257c3892018-04-11 13:19:45 -04002928 DOps << MkId(var_id) << MkNum(spv::DecorationDescriptorSet)
2929 << MkNum(descriptor_set);
David Netoc6f3ab22018-04-06 18:02:31 -04002930 SPIRVInstList.insert(DecoInsertPoint,
David Neto87846742018-04-11 17:36:22 -04002931 new SPIRVInstruction(spv::OpDecorate, DOps));
David Neto22f144c2017-06-12 14:26:21 -04002932 }
2933}
2934
David Netoc6f3ab22018-04-06 18:02:31 -04002935void SPIRVProducerPass::GenerateWorkgroupVars() {
2936 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
Alan Baker202c8c72018-08-13 13:47:44 -04002937 for (auto spec_id = clspv::FirstLocalSpecId(); spec_id < max_local_spec_id_;
2938 ++spec_id) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05002939 LocalArgInfo &info = LocalSpecIdInfoMap[spec_id];
David Netoc6f3ab22018-04-06 18:02:31 -04002940
2941 // Generate OpVariable.
2942 //
2943 // GIDOps[0] : Result Type ID
2944 // GIDOps[1] : Storage Class
2945 SPIRVOperandList Ops;
2946 Ops << MkId(info.ptr_array_type_id) << MkNum(spv::StorageClassWorkgroup);
2947
2948 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002949 new SPIRVInstruction(spv::OpVariable, info.variable_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002950 }
2951}
2952
David Neto862b7d82018-06-14 18:48:37 -04002953void SPIRVProducerPass::GenerateDescriptorMapInfo(const DataLayout &DL,
2954 Function &F) {
David Netoc5fb5242018-07-30 13:28:31 -04002955 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
2956 return;
2957 }
David Neto862b7d82018-06-14 18:48:37 -04002958 // Gather the list of resources that are used by this function's arguments.
2959 auto &resource_var_at_index = FunctionToResourceVarsMap[&F];
2960
alan-bakerf5e5f692018-11-27 08:33:24 -05002961 // TODO(alan-baker): This should become unnecessary by fixing the rest of the
2962 // flow to generate pod_ubo arguments earlier.
David Neto862b7d82018-06-14 18:48:37 -04002963 auto remap_arg_kind = [](StringRef argKind) {
alan-bakerf5e5f692018-11-27 08:33:24 -05002964 std::string kind =
2965 clspv::Option::PodArgsInUniformBuffer() && argKind.equals("pod")
2966 ? "pod_ubo"
2967 : argKind;
2968 return GetArgKindFromName(kind);
David Neto862b7d82018-06-14 18:48:37 -04002969 };
2970
2971 auto *fty = F.getType()->getPointerElementType();
2972 auto *func_ty = dyn_cast<FunctionType>(fty);
2973
alan-baker038e9242019-04-19 22:14:41 -04002974 // If we've clustered POD arguments, then argument details are in metadata.
David Neto862b7d82018-06-14 18:48:37 -04002975 // If an argument maps to a resource variable, then get descriptor set and
2976 // binding from the resoure variable. Other info comes from the metadata.
2977 const auto *arg_map = F.getMetadata("kernel_arg_map");
2978 if (arg_map) {
2979 for (const auto &arg : arg_map->operands()) {
2980 const MDNode *arg_node = dyn_cast<MDNode>(arg.get());
Kévin PETITa353c832018-03-20 23:21:21 +00002981 assert(arg_node->getNumOperands() == 7);
David Neto862b7d82018-06-14 18:48:37 -04002982 const auto name =
2983 dyn_cast<MDString>(arg_node->getOperand(0))->getString();
2984 const auto old_index =
2985 dyn_extract<ConstantInt>(arg_node->getOperand(1))->getZExtValue();
2986 // Remapped argument index
alan-bakerb6b09dc2018-11-08 16:59:28 -05002987 const size_t new_index = static_cast<size_t>(
2988 dyn_extract<ConstantInt>(arg_node->getOperand(2))->getZExtValue());
David Neto862b7d82018-06-14 18:48:37 -04002989 const auto offset =
2990 dyn_extract<ConstantInt>(arg_node->getOperand(3))->getZExtValue();
Kévin PETITa353c832018-03-20 23:21:21 +00002991 const auto arg_size =
2992 dyn_extract<ConstantInt>(arg_node->getOperand(4))->getZExtValue();
David Neto862b7d82018-06-14 18:48:37 -04002993 const auto argKind = remap_arg_kind(
Kévin PETITa353c832018-03-20 23:21:21 +00002994 dyn_cast<MDString>(arg_node->getOperand(5))->getString());
David Neto862b7d82018-06-14 18:48:37 -04002995 const auto spec_id =
Kévin PETITa353c832018-03-20 23:21:21 +00002996 dyn_extract<ConstantInt>(arg_node->getOperand(6))->getSExtValue();
alan-bakerf5e5f692018-11-27 08:33:24 -05002997
2998 uint32_t descriptor_set = 0;
2999 uint32_t binding = 0;
3000 version0::DescriptorMapEntry::KernelArgData kernel_data = {
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003001 F.getName(), name, static_cast<uint32_t>(old_index), argKind,
alan-bakerf5e5f692018-11-27 08:33:24 -05003002 static_cast<uint32_t>(spec_id),
3003 // This will be set below for pointer-to-local args.
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003004 0, static_cast<uint32_t>(offset), static_cast<uint32_t>(arg_size)};
David Neto862b7d82018-06-14 18:48:37 -04003005 if (spec_id > 0) {
alan-bakerf5e5f692018-11-27 08:33:24 -05003006 kernel_data.local_element_size = static_cast<uint32_t>(GetTypeAllocSize(
3007 func_ty->getParamType(unsigned(new_index))->getPointerElementType(),
3008 DL));
David Neto862b7d82018-06-14 18:48:37 -04003009 } else {
3010 auto *info = resource_var_at_index[new_index];
3011 assert(info);
alan-bakerf5e5f692018-11-27 08:33:24 -05003012 descriptor_set = info->descriptor_set;
3013 binding = info->binding;
David Neto862b7d82018-06-14 18:48:37 -04003014 }
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003015 descriptorMapEntries->emplace_back(std::move(kernel_data), descriptor_set,
3016 binding);
David Neto862b7d82018-06-14 18:48:37 -04003017 }
3018 } else {
3019 // There is no argument map.
3020 // Take descriptor info from the resource variable calls.
Kévin PETITa353c832018-03-20 23:21:21 +00003021 // Take argument name and size from the arguments list.
David Neto862b7d82018-06-14 18:48:37 -04003022
3023 SmallVector<Argument *, 4> arguments;
3024 for (auto &arg : F.args()) {
3025 arguments.push_back(&arg);
3026 }
3027
3028 unsigned arg_index = 0;
3029 for (auto *info : resource_var_at_index) {
3030 if (info) {
Kévin PETITa353c832018-03-20 23:21:21 +00003031 auto arg = arguments[arg_index];
alan-bakerb6b09dc2018-11-08 16:59:28 -05003032 unsigned arg_size = 0;
Kévin PETITa353c832018-03-20 23:21:21 +00003033 if (info->arg_kind == clspv::ArgKind::Pod) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05003034 arg_size = static_cast<uint32_t>(DL.getTypeStoreSize(arg->getType()));
Kévin PETITa353c832018-03-20 23:21:21 +00003035 }
3036
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003037 // Local pointer arguments are unused in this case. Offset is always
3038 // zero.
alan-bakerf5e5f692018-11-27 08:33:24 -05003039 version0::DescriptorMapEntry::KernelArgData kernel_data = {
3040 F.getName(), arg->getName(),
3041 arg_index, remap_arg_kind(clspv::GetArgKindName(info->arg_kind)),
3042 0, 0,
3043 0, arg_size};
3044 descriptorMapEntries->emplace_back(std::move(kernel_data),
3045 info->descriptor_set, info->binding);
David Neto862b7d82018-06-14 18:48:37 -04003046 }
3047 arg_index++;
3048 }
3049 // Generate mappings for pointer-to-local arguments.
3050 for (arg_index = 0; arg_index < arguments.size(); ++arg_index) {
3051 Argument *arg = arguments[arg_index];
Alan Baker202c8c72018-08-13 13:47:44 -04003052 auto where = LocalArgSpecIds.find(arg);
3053 if (where != LocalArgSpecIds.end()) {
3054 auto &local_arg_info = LocalSpecIdInfoMap[where->second];
alan-bakerf5e5f692018-11-27 08:33:24 -05003055 // Pod arguments members are unused in this case.
3056 version0::DescriptorMapEntry::KernelArgData kernel_data = {
3057 F.getName(),
3058 arg->getName(),
3059 arg_index,
3060 ArgKind::Local,
3061 static_cast<uint32_t>(local_arg_info.spec_id),
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003062 static_cast<uint32_t>(
3063 GetTypeAllocSize(local_arg_info.elem_type, DL)),
alan-bakerf5e5f692018-11-27 08:33:24 -05003064 0,
3065 0};
3066 // Pointer-to-local arguments do not utilize descriptor set and binding.
3067 descriptorMapEntries->emplace_back(std::move(kernel_data), 0, 0);
David Neto862b7d82018-06-14 18:48:37 -04003068 }
3069 }
3070 }
3071}
3072
David Neto22f144c2017-06-12 14:26:21 -04003073void SPIRVProducerPass::GenerateFuncPrologue(Function &F) {
3074 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3075 ValueMapType &VMap = getValueMap();
3076 EntryPointVecType &EntryPoints = getEntryPointVec();
David Neto22f144c2017-06-12 14:26:21 -04003077 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
3078 auto &GlobalConstArgSet = getGlobalConstArgSet();
3079
3080 FunctionType *FTy = F.getFunctionType();
3081
3082 //
David Neto22f144c2017-06-12 14:26:21 -04003083 // Generate OPFunction.
3084 //
3085
3086 // FOps[0] : Result Type ID
3087 // FOps[1] : Function Control
3088 // FOps[2] : Function Type ID
3089 SPIRVOperandList FOps;
3090
3091 // Find SPIRV instruction for return type.
David Neto257c3892018-04-11 13:19:45 -04003092 FOps << MkId(lookupType(FTy->getReturnType()));
David Neto22f144c2017-06-12 14:26:21 -04003093
3094 // Check function attributes for SPIRV Function Control.
3095 uint32_t FuncControl = spv::FunctionControlMaskNone;
3096 if (F.hasFnAttribute(Attribute::AlwaysInline)) {
3097 FuncControl |= spv::FunctionControlInlineMask;
3098 }
3099 if (F.hasFnAttribute(Attribute::NoInline)) {
3100 FuncControl |= spv::FunctionControlDontInlineMask;
3101 }
3102 // TODO: Check llvm attribute for Function Control Pure.
3103 if (F.hasFnAttribute(Attribute::ReadOnly)) {
3104 FuncControl |= spv::FunctionControlPureMask;
3105 }
3106 // TODO: Check llvm attribute for Function Control Const.
3107 if (F.hasFnAttribute(Attribute::ReadNone)) {
3108 FuncControl |= spv::FunctionControlConstMask;
3109 }
3110
David Neto257c3892018-04-11 13:19:45 -04003111 FOps << MkNum(FuncControl);
David Neto22f144c2017-06-12 14:26:21 -04003112
3113 uint32_t FTyID;
3114 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
3115 SmallVector<Type *, 4> NewFuncParamTys;
3116 FunctionType *NewFTy =
3117 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
3118 FTyID = lookupType(NewFTy);
3119 } else {
David Neto9ed8e2f2018-03-24 06:47:24 -07003120 // Handle regular function with global constant parameters.
David Neto22f144c2017-06-12 14:26:21 -04003121 if (GlobalConstFuncTyMap.count(FTy)) {
3122 FTyID = lookupType(GlobalConstFuncTyMap[FTy].first);
3123 } else {
3124 FTyID = lookupType(FTy);
3125 }
3126 }
3127
David Neto257c3892018-04-11 13:19:45 -04003128 FOps << MkId(FTyID);
David Neto22f144c2017-06-12 14:26:21 -04003129
3130 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
3131 EntryPoints.push_back(std::make_pair(&F, nextID));
3132 }
3133
3134 VMap[&F] = nextID;
3135
David Neto482550a2018-03-24 05:21:07 -07003136 if (clspv::Option::ShowIDs()) {
David Netob05675d2018-02-16 12:37:49 -05003137 errs() << "Function " << F.getName() << " is " << nextID << "\n";
3138 }
David Neto22f144c2017-06-12 14:26:21 -04003139 // Generate SPIRV instruction for function.
David Neto87846742018-04-11 17:36:22 -04003140 auto *FuncInst = new SPIRVInstruction(spv::OpFunction, nextID++, FOps);
David Neto22f144c2017-06-12 14:26:21 -04003141 SPIRVInstList.push_back(FuncInst);
3142
3143 //
3144 // Generate OpFunctionParameter for Normal function.
3145 //
3146
3147 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
alan-bakere9308012019-03-15 10:25:13 -04003148
3149 // Find Insert Point for OpDecorate.
3150 auto DecoInsertPoint =
3151 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
3152 [](SPIRVInstruction *Inst) -> bool {
3153 return Inst->getOpcode() != spv::OpDecorate &&
3154 Inst->getOpcode() != spv::OpMemberDecorate &&
3155 Inst->getOpcode() != spv::OpExtInstImport;
3156 });
3157
David Neto22f144c2017-06-12 14:26:21 -04003158 // Iterate Argument for name instead of param type from function type.
3159 unsigned ArgIdx = 0;
3160 for (Argument &Arg : F.args()) {
alan-bakere9308012019-03-15 10:25:13 -04003161 uint32_t param_id = nextID++;
3162 VMap[&Arg] = param_id;
3163
3164 if (CalledWithCoherentResource(Arg)) {
3165 // If the arg is passed a coherent resource ever, then decorate this
3166 // parameter with Coherent too.
3167 SPIRVOperandList decoration_ops;
3168 decoration_ops << MkId(param_id) << MkNum(spv::DecorationCoherent);
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003169 SPIRVInstList.insert(
3170 DecoInsertPoint,
3171 new SPIRVInstruction(spv::OpDecorate, decoration_ops));
alan-bakere9308012019-03-15 10:25:13 -04003172 }
David Neto22f144c2017-06-12 14:26:21 -04003173
3174 // ParamOps[0] : Result Type ID
3175 SPIRVOperandList ParamOps;
3176
3177 // Find SPIRV instruction for parameter type.
3178 uint32_t ParamTyID = lookupType(Arg.getType());
3179 if (PointerType *PTy = dyn_cast<PointerType>(Arg.getType())) {
3180 if (GlobalConstFuncTyMap.count(FTy)) {
3181 if (ArgIdx == GlobalConstFuncTyMap[FTy].second) {
3182 Type *EleTy = PTy->getPointerElementType();
3183 Type *ArgTy =
3184 PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
3185 ParamTyID = lookupType(ArgTy);
3186 GlobalConstArgSet.insert(&Arg);
3187 }
3188 }
3189 }
David Neto257c3892018-04-11 13:19:45 -04003190 ParamOps << MkId(ParamTyID);
David Neto22f144c2017-06-12 14:26:21 -04003191
3192 // Generate SPIRV instruction for parameter.
David Neto87846742018-04-11 17:36:22 -04003193 auto *ParamInst =
alan-bakere9308012019-03-15 10:25:13 -04003194 new SPIRVInstruction(spv::OpFunctionParameter, param_id, ParamOps);
David Neto22f144c2017-06-12 14:26:21 -04003195 SPIRVInstList.push_back(ParamInst);
3196
3197 ArgIdx++;
3198 }
3199 }
3200}
3201
alan-bakerb6b09dc2018-11-08 16:59:28 -05003202void SPIRVProducerPass::GenerateModuleInfo(Module &module) {
David Neto22f144c2017-06-12 14:26:21 -04003203 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3204 EntryPointVecType &EntryPoints = getEntryPointVec();
3205 ValueMapType &VMap = getValueMap();
3206 ValueList &EntryPointInterfaces = getEntryPointInterfacesVec();
3207 uint32_t &ExtInstImportID = getOpExtInstImportID();
3208 std::vector<uint32_t> &BuiltinDimVec = getBuiltinDimVec();
3209
3210 // Set up insert point.
3211 auto InsertPoint = SPIRVInstList.begin();
3212
3213 //
3214 // Generate OpCapability
3215 //
3216 // TODO: Which llvm information is mapped to SPIRV Capapbility?
3217
3218 // Ops[0] = Capability
3219 SPIRVOperandList Ops;
3220
David Neto87846742018-04-11 17:36:22 -04003221 auto *CapInst =
3222 new SPIRVInstruction(spv::OpCapability, {MkNum(spv::CapabilityShader)});
David Neto22f144c2017-06-12 14:26:21 -04003223 SPIRVInstList.insert(InsertPoint, CapInst);
3224
3225 for (Type *Ty : getTypeList()) {
alan-bakerb39c8262019-03-08 14:03:37 -05003226 if (clspv::Option::Int8Support() && Ty->isIntegerTy(8)) {
3227 // Generate OpCapability for i8 type.
3228 SPIRVInstList.insert(InsertPoint,
3229 new SPIRVInstruction(spv::OpCapability,
3230 {MkNum(spv::CapabilityInt8)}));
3231 } else if (Ty->isIntegerTy(16)) {
David Neto22f144c2017-06-12 14:26:21 -04003232 // Generate OpCapability for i16 type.
David Neto87846742018-04-11 17:36:22 -04003233 SPIRVInstList.insert(InsertPoint,
3234 new SPIRVInstruction(spv::OpCapability,
3235 {MkNum(spv::CapabilityInt16)}));
David Neto22f144c2017-06-12 14:26:21 -04003236 } else if (Ty->isIntegerTy(64)) {
3237 // Generate OpCapability for i64 type.
David Neto87846742018-04-11 17:36:22 -04003238 SPIRVInstList.insert(InsertPoint,
3239 new SPIRVInstruction(spv::OpCapability,
3240 {MkNum(spv::CapabilityInt64)}));
David Neto22f144c2017-06-12 14:26:21 -04003241 } else if (Ty->isHalfTy()) {
3242 // Generate OpCapability for half type.
3243 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04003244 InsertPoint, new SPIRVInstruction(spv::OpCapability,
3245 {MkNum(spv::CapabilityFloat16)}));
David Neto22f144c2017-06-12 14:26:21 -04003246 } else if (Ty->isDoubleTy()) {
3247 // Generate OpCapability for double type.
3248 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04003249 InsertPoint, new SPIRVInstruction(spv::OpCapability,
3250 {MkNum(spv::CapabilityFloat64)}));
David Neto22f144c2017-06-12 14:26:21 -04003251 } else if (auto *STy = dyn_cast<StructType>(Ty)) {
3252 if (STy->isOpaque()) {
alan-bakerf67468c2019-11-25 15:51:49 -05003253 if (STy->getName().startswith("opencl.image2d_wo_t") ||
3254 STy->getName().startswith("opencl.image3d_wo_t")) {
David Neto22f144c2017-06-12 14:26:21 -04003255 // Generate OpCapability for write only image type.
3256 SPIRVInstList.insert(
3257 InsertPoint,
3258 new SPIRVInstruction(
David Neto87846742018-04-11 17:36:22 -04003259 spv::OpCapability,
3260 {MkNum(spv::CapabilityStorageImageWriteWithoutFormat)}));
David Neto22f144c2017-06-12 14:26:21 -04003261 }
3262 }
3263 }
3264 }
3265
David Neto5c22a252018-03-15 16:07:41 -04003266 { // OpCapability ImageQuery
3267 bool hasImageQuery = false;
alan-bakerf67468c2019-11-25 15:51:49 -05003268 for (const auto &SymVal : module.getValueSymbolTable()) {
3269 if (auto F = dyn_cast<Function>(SymVal.getValue())) {
3270 if (clspv::IsGetImageHeight(F) || clspv::IsGetImageWidth(F)) {
3271 hasImageQuery = true;
3272 break;
3273 }
David Neto5c22a252018-03-15 16:07:41 -04003274 }
3275 }
alan-bakerf67468c2019-11-25 15:51:49 -05003276
David Neto5c22a252018-03-15 16:07:41 -04003277 if (hasImageQuery) {
David Neto87846742018-04-11 17:36:22 -04003278 auto *ImageQueryCapInst = new SPIRVInstruction(
3279 spv::OpCapability, {MkNum(spv::CapabilityImageQuery)});
David Neto5c22a252018-03-15 16:07:41 -04003280 SPIRVInstList.insert(InsertPoint, ImageQueryCapInst);
3281 }
3282 }
3283
David Neto22f144c2017-06-12 14:26:21 -04003284 if (hasVariablePointers()) {
3285 //
David Neto22f144c2017-06-12 14:26:21 -04003286 // Generate OpCapability.
3287 //
3288 // Ops[0] = Capability
3289 //
3290 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003291 Ops << MkNum(spv::CapabilityVariablePointers);
David Neto22f144c2017-06-12 14:26:21 -04003292
David Neto87846742018-04-11 17:36:22 -04003293 SPIRVInstList.insert(InsertPoint,
3294 new SPIRVInstruction(spv::OpCapability, Ops));
alan-baker5b86ed72019-02-15 08:26:50 -05003295 } else if (hasVariablePointersStorageBuffer()) {
3296 //
3297 // Generate OpCapability.
3298 //
3299 // Ops[0] = Capability
3300 //
3301 Ops.clear();
3302 Ops << MkNum(spv::CapabilityVariablePointersStorageBuffer);
David Neto22f144c2017-06-12 14:26:21 -04003303
alan-baker5b86ed72019-02-15 08:26:50 -05003304 SPIRVInstList.insert(InsertPoint,
3305 new SPIRVInstruction(spv::OpCapability, Ops));
3306 }
3307
3308 // Always add the storage buffer extension
3309 {
David Neto22f144c2017-06-12 14:26:21 -04003310 //
3311 // Generate OpExtension.
3312 //
3313 // Ops[0] = Name (Literal String)
3314 //
alan-baker5b86ed72019-02-15 08:26:50 -05003315 auto *ExtensionInst = new SPIRVInstruction(
3316 spv::OpExtension, {MkString("SPV_KHR_storage_buffer_storage_class")});
3317 SPIRVInstList.insert(InsertPoint, ExtensionInst);
3318 }
David Neto22f144c2017-06-12 14:26:21 -04003319
alan-baker5b86ed72019-02-15 08:26:50 -05003320 if (hasVariablePointers() || hasVariablePointersStorageBuffer()) {
3321 //
3322 // Generate OpExtension.
3323 //
3324 // Ops[0] = Name (Literal String)
3325 //
3326 auto *ExtensionInst = new SPIRVInstruction(
3327 spv::OpExtension, {MkString("SPV_KHR_variable_pointers")});
3328 SPIRVInstList.insert(InsertPoint, ExtensionInst);
David Neto22f144c2017-06-12 14:26:21 -04003329 }
3330
3331 if (ExtInstImportID) {
3332 ++InsertPoint;
3333 }
3334
3335 //
3336 // Generate OpMemoryModel
3337 //
3338 // Memory model for Vulkan will always be GLSL450.
3339
3340 // Ops[0] = Addressing Model
3341 // Ops[1] = Memory Model
3342 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003343 Ops << MkNum(spv::AddressingModelLogical) << MkNum(spv::MemoryModelGLSL450);
David Neto22f144c2017-06-12 14:26:21 -04003344
David Neto87846742018-04-11 17:36:22 -04003345 auto *MemModelInst = new SPIRVInstruction(spv::OpMemoryModel, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003346 SPIRVInstList.insert(InsertPoint, MemModelInst);
3347
3348 //
3349 // Generate OpEntryPoint
3350 //
3351 for (auto EntryPoint : EntryPoints) {
3352 // Ops[0] = Execution Model
3353 // Ops[1] = EntryPoint ID
3354 // Ops[2] = Name (Literal String)
3355 // ...
3356 //
3357 // TODO: Do we need to consider Interface ID for forward references???
3358 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05003359 const StringRef &name = EntryPoint.first->getName();
David Neto257c3892018-04-11 13:19:45 -04003360 Ops << MkNum(spv::ExecutionModelGLCompute) << MkId(EntryPoint.second)
3361 << MkString(name);
David Neto22f144c2017-06-12 14:26:21 -04003362
David Neto22f144c2017-06-12 14:26:21 -04003363 for (Value *Interface : EntryPointInterfaces) {
David Neto257c3892018-04-11 13:19:45 -04003364 Ops << MkId(VMap[Interface]);
David Neto22f144c2017-06-12 14:26:21 -04003365 }
3366
David Neto87846742018-04-11 17:36:22 -04003367 auto *EntryPointInst = new SPIRVInstruction(spv::OpEntryPoint, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003368 SPIRVInstList.insert(InsertPoint, EntryPointInst);
3369 }
3370
3371 for (auto EntryPoint : EntryPoints) {
3372 if (const MDNode *MD = dyn_cast<Function>(EntryPoint.first)
3373 ->getMetadata("reqd_work_group_size")) {
3374
3375 if (!BuiltinDimVec.empty()) {
3376 llvm_unreachable(
3377 "Kernels should have consistent work group size definition");
3378 }
3379
3380 //
3381 // Generate OpExecutionMode
3382 //
3383
3384 // Ops[0] = Entry Point ID
3385 // Ops[1] = Execution Mode
3386 // Ops[2] ... Ops[n] = Optional literals according to Execution Mode
3387 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05003388 Ops << MkId(EntryPoint.second) << MkNum(spv::ExecutionModeLocalSize);
David Neto22f144c2017-06-12 14:26:21 -04003389
3390 uint32_t XDim = static_cast<uint32_t>(
3391 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
3392 uint32_t YDim = static_cast<uint32_t>(
3393 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
3394 uint32_t ZDim = static_cast<uint32_t>(
3395 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
3396
David Neto257c3892018-04-11 13:19:45 -04003397 Ops << MkNum(XDim) << MkNum(YDim) << MkNum(ZDim);
David Neto22f144c2017-06-12 14:26:21 -04003398
David Neto87846742018-04-11 17:36:22 -04003399 auto *ExecModeInst = new SPIRVInstruction(spv::OpExecutionMode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003400 SPIRVInstList.insert(InsertPoint, ExecModeInst);
3401 }
3402 }
3403
3404 //
3405 // Generate OpSource.
3406 //
3407 // Ops[0] = SourceLanguage ID
3408 // Ops[1] = Version (LiteralNum)
3409 //
3410 Ops.clear();
Kévin Petit0fc88042019-04-09 23:25:02 +01003411 if (clspv::Option::CPlusPlus()) {
3412 Ops << MkNum(spv::SourceLanguageOpenCL_CPP) << MkNum(100);
3413 } else {
3414 Ops << MkNum(spv::SourceLanguageOpenCL_C) << MkNum(120);
3415 }
David Neto22f144c2017-06-12 14:26:21 -04003416
David Neto87846742018-04-11 17:36:22 -04003417 auto *OpenSourceInst = new SPIRVInstruction(spv::OpSource, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003418 SPIRVInstList.insert(InsertPoint, OpenSourceInst);
3419
3420 if (!BuiltinDimVec.empty()) {
3421 //
3422 // Generate OpDecorates for x/y/z dimension.
3423 //
3424 // Ops[0] = Target ID
3425 // Ops[1] = Decoration (SpecId)
David Neto257c3892018-04-11 13:19:45 -04003426 // Ops[2] = Specialization Constant ID (Literal Number)
David Neto22f144c2017-06-12 14:26:21 -04003427
3428 // X Dimension
3429 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003430 Ops << MkId(BuiltinDimVec[0]) << MkNum(spv::DecorationSpecId) << MkNum(0);
David Neto87846742018-04-11 17:36:22 -04003431 SPIRVInstList.insert(InsertPoint,
3432 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003433
3434 // Y Dimension
3435 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003436 Ops << MkId(BuiltinDimVec[1]) << MkNum(spv::DecorationSpecId) << MkNum(1);
David Neto87846742018-04-11 17:36:22 -04003437 SPIRVInstList.insert(InsertPoint,
3438 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003439
3440 // Z Dimension
3441 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003442 Ops << MkId(BuiltinDimVec[2]) << MkNum(spv::DecorationSpecId) << MkNum(2);
David Neto87846742018-04-11 17:36:22 -04003443 SPIRVInstList.insert(InsertPoint,
3444 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003445 }
3446}
3447
David Netob6e2e062018-04-25 10:32:06 -04003448void SPIRVProducerPass::GenerateEntryPointInitialStores() {
3449 // Work around a driver bug. Initializers on Private variables might not
3450 // work. So the start of the kernel should store the initializer value to the
3451 // variables. Yes, *every* entry point pays this cost if *any* entry point
3452 // uses this builtin. At this point I judge this to be an acceptable tradeoff
3453 // of complexity vs. runtime, for a broken driver.
alan-bakerb6b09dc2018-11-08 16:59:28 -05003454 // TODO(dneto): Remove this at some point once fixed drivers are widely
3455 // available.
David Netob6e2e062018-04-25 10:32:06 -04003456 if (WorkgroupSizeVarID) {
3457 assert(WorkgroupSizeValueID);
3458
3459 SPIRVOperandList Ops;
3460 Ops << MkId(WorkgroupSizeVarID) << MkId(WorkgroupSizeValueID);
3461
3462 auto *Inst = new SPIRVInstruction(spv::OpStore, Ops);
3463 getSPIRVInstList().push_back(Inst);
3464 }
3465}
3466
David Neto22f144c2017-06-12 14:26:21 -04003467void SPIRVProducerPass::GenerateFuncBody(Function &F) {
3468 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3469 ValueMapType &VMap = getValueMap();
3470
David Netob6e2e062018-04-25 10:32:06 -04003471 const bool IsKernel = F.getCallingConv() == CallingConv::SPIR_KERNEL;
David Neto22f144c2017-06-12 14:26:21 -04003472
3473 for (BasicBlock &BB : F) {
3474 // Register BasicBlock to ValueMap.
3475 VMap[&BB] = nextID;
3476
3477 //
3478 // Generate OpLabel for Basic Block.
3479 //
3480 SPIRVOperandList Ops;
David Neto87846742018-04-11 17:36:22 -04003481 auto *Inst = new SPIRVInstruction(spv::OpLabel, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003482 SPIRVInstList.push_back(Inst);
3483
David Neto6dcd4712017-06-23 11:06:47 -04003484 // OpVariable instructions must come first.
3485 for (Instruction &I : BB) {
alan-baker5b86ed72019-02-15 08:26:50 -05003486 if (auto *alloca = dyn_cast<AllocaInst>(&I)) {
3487 // Allocating a pointer requires variable pointers.
3488 if (alloca->getAllocatedType()->isPointerTy()) {
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003489 setVariablePointersCapabilities(
3490 alloca->getAllocatedType()->getPointerAddressSpace());
alan-baker5b86ed72019-02-15 08:26:50 -05003491 }
David Neto6dcd4712017-06-23 11:06:47 -04003492 GenerateInstruction(I);
3493 }
3494 }
3495
David Neto22f144c2017-06-12 14:26:21 -04003496 if (&BB == &F.getEntryBlock() && IsKernel) {
David Netob6e2e062018-04-25 10:32:06 -04003497 if (clspv::Option::HackInitializers()) {
3498 GenerateEntryPointInitialStores();
3499 }
David Neto22f144c2017-06-12 14:26:21 -04003500 }
3501
3502 for (Instruction &I : BB) {
David Neto6dcd4712017-06-23 11:06:47 -04003503 if (!isa<AllocaInst>(I)) {
3504 GenerateInstruction(I);
3505 }
David Neto22f144c2017-06-12 14:26:21 -04003506 }
3507 }
3508}
3509
3510spv::Op SPIRVProducerPass::GetSPIRVCmpOpcode(CmpInst *I) {
3511 const std::map<CmpInst::Predicate, spv::Op> Map = {
3512 {CmpInst::ICMP_EQ, spv::OpIEqual},
3513 {CmpInst::ICMP_NE, spv::OpINotEqual},
3514 {CmpInst::ICMP_UGT, spv::OpUGreaterThan},
3515 {CmpInst::ICMP_UGE, spv::OpUGreaterThanEqual},
3516 {CmpInst::ICMP_ULT, spv::OpULessThan},
3517 {CmpInst::ICMP_ULE, spv::OpULessThanEqual},
3518 {CmpInst::ICMP_SGT, spv::OpSGreaterThan},
3519 {CmpInst::ICMP_SGE, spv::OpSGreaterThanEqual},
3520 {CmpInst::ICMP_SLT, spv::OpSLessThan},
3521 {CmpInst::ICMP_SLE, spv::OpSLessThanEqual},
3522 {CmpInst::FCMP_OEQ, spv::OpFOrdEqual},
3523 {CmpInst::FCMP_OGT, spv::OpFOrdGreaterThan},
3524 {CmpInst::FCMP_OGE, spv::OpFOrdGreaterThanEqual},
3525 {CmpInst::FCMP_OLT, spv::OpFOrdLessThan},
3526 {CmpInst::FCMP_OLE, spv::OpFOrdLessThanEqual},
3527 {CmpInst::FCMP_ONE, spv::OpFOrdNotEqual},
3528 {CmpInst::FCMP_UEQ, spv::OpFUnordEqual},
3529 {CmpInst::FCMP_UGT, spv::OpFUnordGreaterThan},
3530 {CmpInst::FCMP_UGE, spv::OpFUnordGreaterThanEqual},
3531 {CmpInst::FCMP_ULT, spv::OpFUnordLessThan},
3532 {CmpInst::FCMP_ULE, spv::OpFUnordLessThanEqual},
3533 {CmpInst::FCMP_UNE, spv::OpFUnordNotEqual}};
3534
3535 assert(0 != Map.count(I->getPredicate()));
3536
3537 return Map.at(I->getPredicate());
3538}
3539
3540spv::Op SPIRVProducerPass::GetSPIRVCastOpcode(Instruction &I) {
3541 const std::map<unsigned, spv::Op> Map{
3542 {Instruction::Trunc, spv::OpUConvert},
3543 {Instruction::ZExt, spv::OpUConvert},
3544 {Instruction::SExt, spv::OpSConvert},
3545 {Instruction::FPToUI, spv::OpConvertFToU},
3546 {Instruction::FPToSI, spv::OpConvertFToS},
3547 {Instruction::UIToFP, spv::OpConvertUToF},
3548 {Instruction::SIToFP, spv::OpConvertSToF},
3549 {Instruction::FPTrunc, spv::OpFConvert},
3550 {Instruction::FPExt, spv::OpFConvert},
3551 {Instruction::BitCast, spv::OpBitcast}};
3552
3553 assert(0 != Map.count(I.getOpcode()));
3554
3555 return Map.at(I.getOpcode());
3556}
3557
3558spv::Op SPIRVProducerPass::GetSPIRVBinaryOpcode(Instruction &I) {
Kévin Petit24272b62018-10-18 19:16:12 +00003559 if (I.getType()->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -04003560 switch (I.getOpcode()) {
3561 default:
3562 break;
3563 case Instruction::Or:
3564 return spv::OpLogicalOr;
3565 case Instruction::And:
3566 return spv::OpLogicalAnd;
3567 case Instruction::Xor:
3568 return spv::OpLogicalNotEqual;
3569 }
3570 }
3571
alan-bakerb6b09dc2018-11-08 16:59:28 -05003572 const std::map<unsigned, spv::Op> Map{
David Neto22f144c2017-06-12 14:26:21 -04003573 {Instruction::Add, spv::OpIAdd},
3574 {Instruction::FAdd, spv::OpFAdd},
3575 {Instruction::Sub, spv::OpISub},
3576 {Instruction::FSub, spv::OpFSub},
3577 {Instruction::Mul, spv::OpIMul},
3578 {Instruction::FMul, spv::OpFMul},
3579 {Instruction::UDiv, spv::OpUDiv},
3580 {Instruction::SDiv, spv::OpSDiv},
3581 {Instruction::FDiv, spv::OpFDiv},
3582 {Instruction::URem, spv::OpUMod},
3583 {Instruction::SRem, spv::OpSRem},
3584 {Instruction::FRem, spv::OpFRem},
3585 {Instruction::Or, spv::OpBitwiseOr},
3586 {Instruction::Xor, spv::OpBitwiseXor},
3587 {Instruction::And, spv::OpBitwiseAnd},
3588 {Instruction::Shl, spv::OpShiftLeftLogical},
3589 {Instruction::LShr, spv::OpShiftRightLogical},
3590 {Instruction::AShr, spv::OpShiftRightArithmetic}};
3591
3592 assert(0 != Map.count(I.getOpcode()));
3593
3594 return Map.at(I.getOpcode());
3595}
3596
3597void SPIRVProducerPass::GenerateInstruction(Instruction &I) {
3598 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3599 ValueMapType &VMap = getValueMap();
David Neto22f144c2017-06-12 14:26:21 -04003600 DeferredInstVecType &DeferredInsts = getDeferredInstVec();
3601 LLVMContext &Context = I.getParent()->getParent()->getParent()->getContext();
3602
3603 // Register Instruction to ValueMap.
3604 if (0 == VMap[&I]) {
3605 VMap[&I] = nextID;
3606 }
3607
3608 switch (I.getOpcode()) {
3609 default: {
3610 if (Instruction::isCast(I.getOpcode())) {
3611 //
3612 // Generate SPIRV instructions for cast operators.
3613 //
3614
David Netod2de94a2017-08-28 17:27:47 -04003615 auto Ty = I.getType();
David Neto22f144c2017-06-12 14:26:21 -04003616 auto OpTy = I.getOperand(0)->getType();
David Netod2de94a2017-08-28 17:27:47 -04003617 auto toI8 = Ty == Type::getInt8Ty(Context);
3618 auto fromI32 = OpTy == Type::getInt32Ty(Context);
David Neto22f144c2017-06-12 14:26:21 -04003619 // Handle zext, sext and uitofp with i1 type specially.
3620 if ((I.getOpcode() == Instruction::ZExt ||
3621 I.getOpcode() == Instruction::SExt ||
3622 I.getOpcode() == Instruction::UIToFP) &&
alan-bakerb6b09dc2018-11-08 16:59:28 -05003623 OpTy->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -04003624 //
3625 // Generate OpSelect.
3626 //
3627
3628 // Ops[0] = Result Type ID
3629 // Ops[1] = Condition ID
3630 // Ops[2] = True Constant ID
3631 // Ops[3] = False Constant ID
3632 SPIRVOperandList Ops;
3633
David Neto257c3892018-04-11 13:19:45 -04003634 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04003635
David Neto22f144c2017-06-12 14:26:21 -04003636 uint32_t CondID = VMap[I.getOperand(0)];
David Neto257c3892018-04-11 13:19:45 -04003637 Ops << MkId(CondID);
David Neto22f144c2017-06-12 14:26:21 -04003638
3639 uint32_t TrueID = 0;
3640 if (I.getOpcode() == Instruction::ZExt) {
Kévin Petit7bfb8992019-02-26 13:45:08 +00003641 TrueID = VMap[ConstantInt::get(I.getType(), 1)];
David Neto22f144c2017-06-12 14:26:21 -04003642 } else if (I.getOpcode() == Instruction::SExt) {
Kévin Petit7bfb8992019-02-26 13:45:08 +00003643 TrueID = VMap[ConstantInt::getSigned(I.getType(), -1)];
David Neto22f144c2017-06-12 14:26:21 -04003644 } else {
3645 TrueID = VMap[ConstantFP::get(Context, APFloat(1.0f))];
3646 }
David Neto257c3892018-04-11 13:19:45 -04003647 Ops << MkId(TrueID);
David Neto22f144c2017-06-12 14:26:21 -04003648
3649 uint32_t FalseID = 0;
3650 if (I.getOpcode() == Instruction::ZExt) {
3651 FalseID = VMap[Constant::getNullValue(I.getType())];
3652 } else if (I.getOpcode() == Instruction::SExt) {
3653 FalseID = VMap[Constant::getNullValue(I.getType())];
3654 } else {
3655 FalseID = VMap[ConstantFP::get(Context, APFloat(0.0f))];
3656 }
David Neto257c3892018-04-11 13:19:45 -04003657 Ops << MkId(FalseID);
David Neto22f144c2017-06-12 14:26:21 -04003658
David Neto87846742018-04-11 17:36:22 -04003659 auto *Inst = new SPIRVInstruction(spv::OpSelect, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003660 SPIRVInstList.push_back(Inst);
alan-bakerb39c8262019-03-08 14:03:37 -05003661 } else if (!clspv::Option::Int8Support() &&
3662 I.getOpcode() == Instruction::Trunc && fromI32 && toI8) {
David Netod2de94a2017-08-28 17:27:47 -04003663 // The SPIR-V target type is a 32-bit int. Keep only the bottom
3664 // 8 bits.
3665 // Before:
3666 // %result = trunc i32 %a to i8
3667 // After
3668 // %result = OpBitwiseAnd %uint %a %uint_255
3669
3670 SPIRVOperandList Ops;
3671
David Neto257c3892018-04-11 13:19:45 -04003672 Ops << MkId(lookupType(OpTy)) << MkId(VMap[I.getOperand(0)]);
David Netod2de94a2017-08-28 17:27:47 -04003673
3674 Type *UintTy = Type::getInt32Ty(Context);
3675 uint32_t MaskID = VMap[ConstantInt::get(UintTy, 255)];
David Neto257c3892018-04-11 13:19:45 -04003676 Ops << MkId(MaskID);
David Netod2de94a2017-08-28 17:27:47 -04003677
David Neto87846742018-04-11 17:36:22 -04003678 auto *Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Netod2de94a2017-08-28 17:27:47 -04003679 SPIRVInstList.push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04003680 } else {
3681 // Ops[0] = Result Type ID
3682 // Ops[1] = Source Value ID
3683 SPIRVOperandList Ops;
3684
David Neto257c3892018-04-11 13:19:45 -04003685 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04003686
David Neto87846742018-04-11 17:36:22 -04003687 auto *Inst = new SPIRVInstruction(GetSPIRVCastOpcode(I), nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003688 SPIRVInstList.push_back(Inst);
3689 }
3690 } else if (isa<BinaryOperator>(I)) {
3691 //
3692 // Generate SPIRV instructions for binary operators.
3693 //
3694
3695 // Handle xor with i1 type specially.
3696 if (I.getOpcode() == Instruction::Xor &&
3697 I.getType() == Type::getInt1Ty(Context) &&
Kévin Petit24272b62018-10-18 19:16:12 +00003698 ((isa<ConstantInt>(I.getOperand(0)) &&
3699 !cast<ConstantInt>(I.getOperand(0))->isZero()) ||
3700 (isa<ConstantInt>(I.getOperand(1)) &&
3701 !cast<ConstantInt>(I.getOperand(1))->isZero()))) {
David Neto22f144c2017-06-12 14:26:21 -04003702 //
3703 // Generate OpLogicalNot.
3704 //
3705 // Ops[0] = Result Type ID
3706 // Ops[1] = Operand
3707 SPIRVOperandList Ops;
3708
David Neto257c3892018-04-11 13:19:45 -04003709 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04003710
3711 Value *CondV = I.getOperand(0);
3712 if (isa<Constant>(I.getOperand(0))) {
3713 CondV = I.getOperand(1);
3714 }
David Neto257c3892018-04-11 13:19:45 -04003715 Ops << MkId(VMap[CondV]);
David Neto22f144c2017-06-12 14:26:21 -04003716
David Neto87846742018-04-11 17:36:22 -04003717 auto *Inst = new SPIRVInstruction(spv::OpLogicalNot, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003718 SPIRVInstList.push_back(Inst);
3719 } else {
3720 // Ops[0] = Result Type ID
3721 // Ops[1] = Operand 0
3722 // Ops[2] = Operand 1
3723 SPIRVOperandList Ops;
3724
David Neto257c3892018-04-11 13:19:45 -04003725 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)])
3726 << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04003727
David Neto87846742018-04-11 17:36:22 -04003728 auto *Inst =
3729 new SPIRVInstruction(GetSPIRVBinaryOpcode(I), nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003730 SPIRVInstList.push_back(Inst);
3731 }
alan-bakerc9c55ae2019-12-02 16:01:27 -05003732 } else if (I.getOpcode() == Instruction::FNeg) {
3733 // The only unary operator.
3734 //
3735 // Ops[0] = Result Type ID
3736 // Ops[1] = Operand 0
3737 SPIRVOperandList ops;
3738
3739 ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)]);
3740 auto *Inst = new SPIRVInstruction(spv::OpFNegate, nextID++, ops);
3741 SPIRVInstList.push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04003742 } else {
3743 I.print(errs());
3744 llvm_unreachable("Unsupported instruction???");
3745 }
3746 break;
3747 }
3748 case Instruction::GetElementPtr: {
3749 auto &GlobalConstArgSet = getGlobalConstArgSet();
3750
3751 //
3752 // Generate OpAccessChain.
3753 //
3754 GetElementPtrInst *GEP = cast<GetElementPtrInst>(&I);
3755
3756 //
3757 // Generate OpAccessChain.
3758 //
3759
3760 // Ops[0] = Result Type ID
3761 // Ops[1] = Base ID
3762 // Ops[2] ... Ops[n] = Indexes ID
3763 SPIRVOperandList Ops;
3764
alan-bakerb6b09dc2018-11-08 16:59:28 -05003765 PointerType *ResultType = cast<PointerType>(GEP->getType());
David Neto22f144c2017-06-12 14:26:21 -04003766 if (GEP->getPointerAddressSpace() == AddressSpace::ModuleScopePrivate ||
3767 GlobalConstArgSet.count(GEP->getPointerOperand())) {
3768 // Use pointer type with private address space for global constant.
3769 Type *EleTy = I.getType()->getPointerElementType();
David Neto1a1a0582017-07-07 12:01:44 -04003770 ResultType = PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
David Neto22f144c2017-06-12 14:26:21 -04003771 }
David Neto257c3892018-04-11 13:19:45 -04003772
3773 Ops << MkId(lookupType(ResultType));
David Neto22f144c2017-06-12 14:26:21 -04003774
David Neto862b7d82018-06-14 18:48:37 -04003775 // Generate the base pointer.
3776 Ops << MkId(VMap[GEP->getPointerOperand()]);
David Neto22f144c2017-06-12 14:26:21 -04003777
David Neto862b7d82018-06-14 18:48:37 -04003778 // TODO(dneto): Simplify the following?
David Neto22f144c2017-06-12 14:26:21 -04003779
3780 //
3781 // Follows below rules for gep.
3782 //
David Neto862b7d82018-06-14 18:48:37 -04003783 // 1. If gep's first index is 0 generate OpAccessChain and ignore gep's
3784 // first index.
David Neto22f144c2017-06-12 14:26:21 -04003785 // 2. If gep's first index is not 0, generate OpPtrAccessChain and use gep's
3786 // first index.
3787 // 3. If gep's first index is not constant, generate OpPtrAccessChain and
3788 // use gep's first index.
3789 // 4. If it is not above case 1, 2 and 3, generate OpAccessChain and use
3790 // gep's first index.
3791 //
3792 spv::Op Opcode = spv::OpAccessChain;
3793 unsigned offset = 0;
3794 if (ConstantInt *CstInt = dyn_cast<ConstantInt>(GEP->getOperand(1))) {
David Neto862b7d82018-06-14 18:48:37 -04003795 if (CstInt->getZExtValue() == 0) {
David Neto22f144c2017-06-12 14:26:21 -04003796 offset = 1;
David Neto862b7d82018-06-14 18:48:37 -04003797 } else if (CstInt->getZExtValue() != 0) {
David Neto22f144c2017-06-12 14:26:21 -04003798 Opcode = spv::OpPtrAccessChain;
David Neto22f144c2017-06-12 14:26:21 -04003799 }
David Neto862b7d82018-06-14 18:48:37 -04003800 } else {
David Neto22f144c2017-06-12 14:26:21 -04003801 Opcode = spv::OpPtrAccessChain;
David Neto1a1a0582017-07-07 12:01:44 -04003802 }
3803
3804 if (Opcode == spv::OpPtrAccessChain) {
David Neto1a1a0582017-07-07 12:01:44 -04003805 // Do we need to generate ArrayStride? Check against the GEP result type
3806 // rather than the pointer type of the base because when indexing into
3807 // an OpenCL program-scope constant, we'll swap out the LLVM base pointer
3808 // for something else in the SPIR-V.
3809 // E.g. see test/PointerAccessChain/pointer_index_is_constant_1.cl
alan-baker5b86ed72019-02-15 08:26:50 -05003810 auto address_space = ResultType->getAddressSpace();
3811 setVariablePointersCapabilities(address_space);
3812 switch (GetStorageClass(address_space)) {
Alan Bakerfcda9482018-10-02 17:09:59 -04003813 case spv::StorageClassStorageBuffer:
3814 case spv::StorageClassUniform:
David Neto1a1a0582017-07-07 12:01:44 -04003815 // Save the need to generate an ArrayStride decoration. But defer
3816 // generation until later, so we only make one decoration.
David Neto85082642018-03-24 06:55:20 -07003817 getTypesNeedingArrayStride().insert(ResultType);
Alan Bakerfcda9482018-10-02 17:09:59 -04003818 break;
3819 default:
3820 break;
David Neto1a1a0582017-07-07 12:01:44 -04003821 }
David Neto22f144c2017-06-12 14:26:21 -04003822 }
3823
3824 for (auto II = GEP->idx_begin() + offset; II != GEP->idx_end(); II++) {
David Neto257c3892018-04-11 13:19:45 -04003825 Ops << MkId(VMap[*II]);
David Neto22f144c2017-06-12 14:26:21 -04003826 }
3827
David Neto87846742018-04-11 17:36:22 -04003828 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003829 SPIRVInstList.push_back(Inst);
3830 break;
3831 }
3832 case Instruction::ExtractValue: {
3833 ExtractValueInst *EVI = cast<ExtractValueInst>(&I);
3834 // Ops[0] = Result Type ID
3835 // Ops[1] = Composite ID
3836 // Ops[2] ... Ops[n] = Indexes (Literal Number)
3837 SPIRVOperandList Ops;
3838
David Neto257c3892018-04-11 13:19:45 -04003839 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04003840
3841 uint32_t CompositeID = VMap[EVI->getAggregateOperand()];
David Neto257c3892018-04-11 13:19:45 -04003842 Ops << MkId(CompositeID);
David Neto22f144c2017-06-12 14:26:21 -04003843
3844 for (auto &Index : EVI->indices()) {
David Neto257c3892018-04-11 13:19:45 -04003845 Ops << MkNum(Index);
David Neto22f144c2017-06-12 14:26:21 -04003846 }
3847
David Neto87846742018-04-11 17:36:22 -04003848 auto *Inst = new SPIRVInstruction(spv::OpCompositeExtract, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003849 SPIRVInstList.push_back(Inst);
3850 break;
3851 }
3852 case Instruction::InsertValue: {
3853 InsertValueInst *IVI = cast<InsertValueInst>(&I);
3854 // Ops[0] = Result Type ID
3855 // Ops[1] = Object ID
3856 // Ops[2] = Composite ID
3857 // Ops[3] ... Ops[n] = Indexes (Literal Number)
3858 SPIRVOperandList Ops;
3859
3860 uint32_t ResTyID = lookupType(I.getType());
David Neto257c3892018-04-11 13:19:45 -04003861 Ops << MkId(ResTyID);
David Neto22f144c2017-06-12 14:26:21 -04003862
3863 uint32_t ObjectID = VMap[IVI->getInsertedValueOperand()];
David Neto257c3892018-04-11 13:19:45 -04003864 Ops << MkId(ObjectID);
David Neto22f144c2017-06-12 14:26:21 -04003865
3866 uint32_t CompositeID = VMap[IVI->getAggregateOperand()];
David Neto257c3892018-04-11 13:19:45 -04003867 Ops << MkId(CompositeID);
David Neto22f144c2017-06-12 14:26:21 -04003868
3869 for (auto &Index : IVI->indices()) {
David Neto257c3892018-04-11 13:19:45 -04003870 Ops << MkNum(Index);
David Neto22f144c2017-06-12 14:26:21 -04003871 }
3872
David Neto87846742018-04-11 17:36:22 -04003873 auto *Inst = new SPIRVInstruction(spv::OpCompositeInsert, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003874 SPIRVInstList.push_back(Inst);
3875 break;
3876 }
3877 case Instruction::Select: {
3878 //
3879 // Generate OpSelect.
3880 //
3881
3882 // Ops[0] = Result Type ID
3883 // Ops[1] = Condition ID
3884 // Ops[2] = True Constant ID
3885 // Ops[3] = False Constant ID
3886 SPIRVOperandList Ops;
3887
3888 // Find SPIRV instruction for parameter type.
3889 auto Ty = I.getType();
3890 if (Ty->isPointerTy()) {
3891 auto PointeeTy = Ty->getPointerElementType();
3892 if (PointeeTy->isStructTy() &&
3893 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
3894 Ty = PointeeTy;
alan-baker5b86ed72019-02-15 08:26:50 -05003895 } else {
3896 // Selecting between pointers requires variable pointers.
3897 setVariablePointersCapabilities(Ty->getPointerAddressSpace());
3898 if (!hasVariablePointers() && !selectFromSameObject(&I)) {
3899 setVariablePointers(true);
3900 }
David Neto22f144c2017-06-12 14:26:21 -04003901 }
3902 }
3903
David Neto257c3892018-04-11 13:19:45 -04003904 Ops << MkId(lookupType(Ty)) << MkId(VMap[I.getOperand(0)])
3905 << MkId(VMap[I.getOperand(1)]) << MkId(VMap[I.getOperand(2)]);
David Neto22f144c2017-06-12 14:26:21 -04003906
David Neto87846742018-04-11 17:36:22 -04003907 auto *Inst = new SPIRVInstruction(spv::OpSelect, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003908 SPIRVInstList.push_back(Inst);
3909 break;
3910 }
3911 case Instruction::ExtractElement: {
3912 // Handle <4 x i8> type manually.
3913 Type *CompositeTy = I.getOperand(0)->getType();
3914 if (is4xi8vec(CompositeTy)) {
3915 //
3916 // Generate OpShiftRightLogical and OpBitwiseAnd for extractelement with
3917 // <4 x i8>.
3918 //
3919
3920 //
3921 // Generate OpShiftRightLogical
3922 //
3923 // Ops[0] = Result Type ID
3924 // Ops[1] = Operand 0
3925 // Ops[2] = Operand 1
3926 //
3927 SPIRVOperandList Ops;
3928
David Neto257c3892018-04-11 13:19:45 -04003929 Ops << MkId(lookupType(CompositeTy));
David Neto22f144c2017-06-12 14:26:21 -04003930
3931 uint32_t Op0ID = VMap[I.getOperand(0)];
David Neto257c3892018-04-11 13:19:45 -04003932 Ops << MkId(Op0ID);
David Neto22f144c2017-06-12 14:26:21 -04003933
3934 uint32_t Op1ID = 0;
3935 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
3936 // Handle constant index.
3937 uint64_t Idx = CI->getZExtValue();
3938 Value *ShiftAmount =
3939 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
3940 Op1ID = VMap[ShiftAmount];
3941 } else {
3942 // Handle variable index.
3943 SPIRVOperandList TmpOps;
3944
David Neto257c3892018-04-11 13:19:45 -04003945 TmpOps << MkId(lookupType(Type::getInt32Ty(Context)))
3946 << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04003947
3948 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
David Neto257c3892018-04-11 13:19:45 -04003949 TmpOps << MkId(VMap[Cst8]);
David Neto22f144c2017-06-12 14:26:21 -04003950
3951 Op1ID = nextID;
3952
David Neto87846742018-04-11 17:36:22 -04003953 auto *TmpInst = new SPIRVInstruction(spv::OpIMul, nextID++, TmpOps);
David Neto22f144c2017-06-12 14:26:21 -04003954 SPIRVInstList.push_back(TmpInst);
3955 }
David Neto257c3892018-04-11 13:19:45 -04003956 Ops << MkId(Op1ID);
David Neto22f144c2017-06-12 14:26:21 -04003957
3958 uint32_t ShiftID = nextID;
3959
David Neto87846742018-04-11 17:36:22 -04003960 auto *Inst =
3961 new SPIRVInstruction(spv::OpShiftRightLogical, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003962 SPIRVInstList.push_back(Inst);
3963
3964 //
3965 // Generate OpBitwiseAnd
3966 //
3967 // Ops[0] = Result Type ID
3968 // Ops[1] = Operand 0
3969 // Ops[2] = Operand 1
3970 //
3971 Ops.clear();
3972
David Neto257c3892018-04-11 13:19:45 -04003973 Ops << MkId(lookupType(CompositeTy)) << MkId(ShiftID);
David Neto22f144c2017-06-12 14:26:21 -04003974
3975 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
David Neto257c3892018-04-11 13:19:45 -04003976 Ops << MkId(VMap[CstFF]);
David Neto22f144c2017-06-12 14:26:21 -04003977
David Neto9b2d6252017-09-06 15:47:37 -04003978 // Reset mapping for this value to the result of the bitwise and.
3979 VMap[&I] = nextID;
3980
David Neto87846742018-04-11 17:36:22 -04003981 Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003982 SPIRVInstList.push_back(Inst);
3983 break;
3984 }
3985
3986 // Ops[0] = Result Type ID
3987 // Ops[1] = Composite ID
3988 // Ops[2] ... Ops[n] = Indexes (Literal Number)
3989 SPIRVOperandList Ops;
3990
David Neto257c3892018-04-11 13:19:45 -04003991 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04003992
3993 spv::Op Opcode = spv::OpCompositeExtract;
3994 if (const ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
David Neto257c3892018-04-11 13:19:45 -04003995 Ops << MkNum(static_cast<uint32_t>(CI->getZExtValue()));
David Neto22f144c2017-06-12 14:26:21 -04003996 } else {
David Neto257c3892018-04-11 13:19:45 -04003997 Ops << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04003998 Opcode = spv::OpVectorExtractDynamic;
3999 }
4000
David Neto87846742018-04-11 17:36:22 -04004001 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004002 SPIRVInstList.push_back(Inst);
4003 break;
4004 }
4005 case Instruction::InsertElement: {
4006 // Handle <4 x i8> type manually.
4007 Type *CompositeTy = I.getOperand(0)->getType();
4008 if (is4xi8vec(CompositeTy)) {
4009 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
4010 uint32_t CstFFID = VMap[CstFF];
4011
4012 uint32_t ShiftAmountID = 0;
4013 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
4014 // Handle constant index.
4015 uint64_t Idx = CI->getZExtValue();
4016 Value *ShiftAmount =
4017 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
4018 ShiftAmountID = VMap[ShiftAmount];
4019 } else {
4020 // Handle variable index.
4021 SPIRVOperandList TmpOps;
4022
David Neto257c3892018-04-11 13:19:45 -04004023 TmpOps << MkId(lookupType(Type::getInt32Ty(Context)))
4024 << MkId(VMap[I.getOperand(2)]);
David Neto22f144c2017-06-12 14:26:21 -04004025
4026 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
David Neto257c3892018-04-11 13:19:45 -04004027 TmpOps << MkId(VMap[Cst8]);
David Neto22f144c2017-06-12 14:26:21 -04004028
4029 ShiftAmountID = nextID;
4030
David Neto87846742018-04-11 17:36:22 -04004031 auto *TmpInst = new SPIRVInstruction(spv::OpIMul, nextID++, TmpOps);
David Neto22f144c2017-06-12 14:26:21 -04004032 SPIRVInstList.push_back(TmpInst);
4033 }
4034
4035 //
4036 // Generate mask operations.
4037 //
4038
4039 // ShiftLeft mask according to index of insertelement.
4040 SPIRVOperandList Ops;
4041
David Neto257c3892018-04-11 13:19:45 -04004042 const uint32_t ResTyID = lookupType(CompositeTy);
4043 Ops << MkId(ResTyID) << MkId(CstFFID) << MkId(ShiftAmountID);
David Neto22f144c2017-06-12 14:26:21 -04004044
4045 uint32_t MaskID = nextID;
4046
David Neto87846742018-04-11 17:36:22 -04004047 auto *Inst = new SPIRVInstruction(spv::OpShiftLeftLogical, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004048 SPIRVInstList.push_back(Inst);
4049
4050 // Inverse mask.
4051 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004052 Ops << MkId(ResTyID) << MkId(MaskID);
David Neto22f144c2017-06-12 14:26:21 -04004053
4054 uint32_t InvMaskID = nextID;
4055
David Neto87846742018-04-11 17:36:22 -04004056 Inst = new SPIRVInstruction(spv::OpNot, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004057 SPIRVInstList.push_back(Inst);
4058
4059 // Apply mask.
4060 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004061 Ops << MkId(ResTyID) << MkId(VMap[I.getOperand(0)]) << MkId(InvMaskID);
David Neto22f144c2017-06-12 14:26:21 -04004062
4063 uint32_t OrgValID = nextID;
4064
David Neto87846742018-04-11 17:36:22 -04004065 Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004066 SPIRVInstList.push_back(Inst);
4067
4068 // Create correct value according to index of insertelement.
4069 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05004070 Ops << MkId(ResTyID) << MkId(VMap[I.getOperand(1)])
4071 << MkId(ShiftAmountID);
David Neto22f144c2017-06-12 14:26:21 -04004072
4073 uint32_t InsertValID = nextID;
4074
David Neto87846742018-04-11 17:36:22 -04004075 Inst = new SPIRVInstruction(spv::OpShiftLeftLogical, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004076 SPIRVInstList.push_back(Inst);
4077
4078 // Insert value to original value.
4079 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004080 Ops << MkId(ResTyID) << MkId(OrgValID) << MkId(InsertValID);
David Neto22f144c2017-06-12 14:26:21 -04004081
David Netoa394f392017-08-26 20:45:29 -04004082 VMap[&I] = nextID;
4083
David Neto87846742018-04-11 17:36:22 -04004084 Inst = new SPIRVInstruction(spv::OpBitwiseOr, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004085 SPIRVInstList.push_back(Inst);
4086
4087 break;
4088 }
4089
David Neto22f144c2017-06-12 14:26:21 -04004090 SPIRVOperandList Ops;
4091
James Priced26efea2018-06-09 23:28:32 +01004092 // Ops[0] = Result Type ID
4093 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04004094
4095 spv::Op Opcode = spv::OpCompositeInsert;
4096 if (const ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
David Neto257c3892018-04-11 13:19:45 -04004097 const auto value = CI->getZExtValue();
4098 assert(value <= UINT32_MAX);
James Priced26efea2018-06-09 23:28:32 +01004099 // Ops[1] = Object ID
4100 // Ops[2] = Composite ID
4101 // Ops[3] ... Ops[n] = Indexes (Literal Number)
alan-bakerb6b09dc2018-11-08 16:59:28 -05004102 Ops << MkId(VMap[I.getOperand(1)]) << MkId(VMap[I.getOperand(0)])
James Priced26efea2018-06-09 23:28:32 +01004103 << MkNum(static_cast<uint32_t>(value));
David Neto22f144c2017-06-12 14:26:21 -04004104 } else {
James Priced26efea2018-06-09 23:28:32 +01004105 // Ops[1] = Composite ID
4106 // Ops[2] = Object ID
4107 // Ops[3] ... Ops[n] = Indexes (Literal Number)
alan-bakerb6b09dc2018-11-08 16:59:28 -05004108 Ops << MkId(VMap[I.getOperand(0)]) << MkId(VMap[I.getOperand(1)])
James Priced26efea2018-06-09 23:28:32 +01004109 << MkId(VMap[I.getOperand(2)]);
David Neto22f144c2017-06-12 14:26:21 -04004110 Opcode = spv::OpVectorInsertDynamic;
4111 }
4112
David Neto87846742018-04-11 17:36:22 -04004113 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004114 SPIRVInstList.push_back(Inst);
4115 break;
4116 }
4117 case Instruction::ShuffleVector: {
4118 // Ops[0] = Result Type ID
4119 // Ops[1] = Vector 1 ID
4120 // Ops[2] = Vector 2 ID
4121 // Ops[3] ... Ops[n] = Components (Literal Number)
4122 SPIRVOperandList Ops;
4123
David Neto257c3892018-04-11 13:19:45 -04004124 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)])
4125 << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004126
4127 uint64_t NumElements = 0;
4128 if (Constant *Cst = dyn_cast<Constant>(I.getOperand(2))) {
4129 NumElements = cast<VectorType>(Cst->getType())->getNumElements();
4130
4131 if (Cst->isNullValue()) {
4132 for (unsigned i = 0; i < NumElements; i++) {
David Neto257c3892018-04-11 13:19:45 -04004133 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04004134 }
4135 } else if (const ConstantDataSequential *CDS =
4136 dyn_cast<ConstantDataSequential>(Cst)) {
4137 for (unsigned i = 0; i < CDS->getNumElements(); i++) {
4138 std::vector<uint32_t> LiteralNum;
David Neto257c3892018-04-11 13:19:45 -04004139 const auto value = CDS->getElementAsInteger(i);
4140 assert(value <= UINT32_MAX);
4141 Ops << MkNum(static_cast<uint32_t>(value));
David Neto22f144c2017-06-12 14:26:21 -04004142 }
4143 } else if (const ConstantVector *CV = dyn_cast<ConstantVector>(Cst)) {
4144 for (unsigned i = 0; i < CV->getNumOperands(); i++) {
4145 auto Op = CV->getOperand(i);
4146
4147 uint32_t literal = 0;
4148
4149 if (auto CI = dyn_cast<ConstantInt>(Op)) {
4150 literal = static_cast<uint32_t>(CI->getZExtValue());
4151 } else if (auto UI = dyn_cast<UndefValue>(Op)) {
4152 literal = 0xFFFFFFFFu;
4153 } else {
4154 Op->print(errs());
4155 llvm_unreachable("Unsupported element in ConstantVector!");
4156 }
4157
David Neto257c3892018-04-11 13:19:45 -04004158 Ops << MkNum(literal);
David Neto22f144c2017-06-12 14:26:21 -04004159 }
4160 } else {
4161 Cst->print(errs());
4162 llvm_unreachable("Unsupported constant mask in ShuffleVector!");
4163 }
4164 }
4165
David Neto87846742018-04-11 17:36:22 -04004166 auto *Inst = new SPIRVInstruction(spv::OpVectorShuffle, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004167 SPIRVInstList.push_back(Inst);
4168 break;
4169 }
4170 case Instruction::ICmp:
4171 case Instruction::FCmp: {
4172 CmpInst *CmpI = cast<CmpInst>(&I);
4173
David Netod4ca2e62017-07-06 18:47:35 -04004174 // Pointer equality is invalid.
alan-bakerb6b09dc2018-11-08 16:59:28 -05004175 Type *ArgTy = CmpI->getOperand(0)->getType();
David Netod4ca2e62017-07-06 18:47:35 -04004176 if (isa<PointerType>(ArgTy)) {
4177 CmpI->print(errs());
4178 std::string name = I.getParent()->getParent()->getName();
4179 errs()
4180 << "\nPointer equality test is not supported by SPIR-V for Vulkan, "
4181 << "in function " << name << "\n";
4182 llvm_unreachable("Pointer equality check is invalid");
4183 break;
4184 }
4185
David Neto257c3892018-04-11 13:19:45 -04004186 // Ops[0] = Result Type ID
4187 // Ops[1] = Operand 1 ID
4188 // Ops[2] = Operand 2 ID
4189 SPIRVOperandList Ops;
David Neto22f144c2017-06-12 14:26:21 -04004190
David Neto257c3892018-04-11 13:19:45 -04004191 Ops << MkId(lookupType(CmpI->getType())) << MkId(VMap[CmpI->getOperand(0)])
4192 << MkId(VMap[CmpI->getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004193
4194 spv::Op Opcode = GetSPIRVCmpOpcode(CmpI);
David Neto87846742018-04-11 17:36:22 -04004195 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004196 SPIRVInstList.push_back(Inst);
4197 break;
4198 }
4199 case Instruction::Br: {
4200 // Branch instrucion is deferred because it needs label's ID. Record slot's
4201 // location on SPIRVInstructionList.
4202 DeferredInsts.push_back(
4203 std::make_tuple(&I, --SPIRVInstList.end(), 0 /* No id */));
4204 break;
4205 }
4206 case Instruction::Switch: {
4207 I.print(errs());
4208 llvm_unreachable("Unsupported instruction???");
4209 break;
4210 }
4211 case Instruction::IndirectBr: {
4212 I.print(errs());
4213 llvm_unreachable("Unsupported instruction???");
4214 break;
4215 }
4216 case Instruction::PHI: {
4217 // Branch instrucion is deferred because it needs label's ID. Record slot's
4218 // location on SPIRVInstructionList.
4219 DeferredInsts.push_back(
4220 std::make_tuple(&I, --SPIRVInstList.end(), nextID++));
4221 break;
4222 }
4223 case Instruction::Alloca: {
4224 //
4225 // Generate OpVariable.
4226 //
4227 // Ops[0] : Result Type ID
4228 // Ops[1] : Storage Class
4229 SPIRVOperandList Ops;
4230
David Neto257c3892018-04-11 13:19:45 -04004231 Ops << MkId(lookupType(I.getType())) << MkNum(spv::StorageClassFunction);
David Neto22f144c2017-06-12 14:26:21 -04004232
David Neto87846742018-04-11 17:36:22 -04004233 auto *Inst = new SPIRVInstruction(spv::OpVariable, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004234 SPIRVInstList.push_back(Inst);
4235 break;
4236 }
4237 case Instruction::Load: {
4238 LoadInst *LD = cast<LoadInst>(&I);
4239 //
4240 // Generate OpLoad.
4241 //
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04004242
alan-baker5b86ed72019-02-15 08:26:50 -05004243 if (LD->getType()->isPointerTy()) {
4244 // Loading a pointer requires variable pointers.
4245 setVariablePointersCapabilities(LD->getType()->getPointerAddressSpace());
4246 }
David Neto22f144c2017-06-12 14:26:21 -04004247
David Neto0a2f98d2017-09-15 19:38:40 -04004248 uint32_t ResTyID = lookupType(LD->getType());
David Netoa60b00b2017-09-15 16:34:09 -04004249 uint32_t PointerID = VMap[LD->getPointerOperand()];
4250
4251 // This is a hack to work around what looks like a driver bug.
4252 // When we're loading from the special variable holding the WorkgroupSize
David Neto0a2f98d2017-09-15 19:38:40 -04004253 // builtin value, use an OpBitWiseAnd of the value's ID rather than
4254 // generating a load.
David Neto66cfe642018-03-24 06:13:56 -07004255 // TODO(dneto): Remove this awful hack once drivers are fixed.
David Netoa60b00b2017-09-15 16:34:09 -04004256 if (PointerID == WorkgroupSizeVarID) {
David Neto0a2f98d2017-09-15 19:38:40 -04004257 // Generate a bitwise-and of the original value with itself.
4258 // We should have been able to get away with just an OpCopyObject,
4259 // but we need something more complex to get past certain driver bugs.
4260 // This is ridiculous, but necessary.
4261 // TODO(dneto): Revisit this once drivers fix their bugs.
4262
4263 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04004264 Ops << MkId(ResTyID) << MkId(WorkgroupSizeValueID)
4265 << MkId(WorkgroupSizeValueID);
David Neto0a2f98d2017-09-15 19:38:40 -04004266
David Neto87846742018-04-11 17:36:22 -04004267 auto *Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Neto0a2f98d2017-09-15 19:38:40 -04004268 SPIRVInstList.push_back(Inst);
David Netoa60b00b2017-09-15 16:34:09 -04004269 break;
4270 }
4271
4272 // This is the normal path. Generate a load.
4273
David Neto22f144c2017-06-12 14:26:21 -04004274 // Ops[0] = Result Type ID
4275 // Ops[1] = Pointer ID
4276 // Ops[2] ... Ops[n] = Optional Memory Access
4277 //
4278 // TODO: Do we need to implement Optional Memory Access???
David Neto0a2f98d2017-09-15 19:38:40 -04004279
David Neto22f144c2017-06-12 14:26:21 -04004280 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04004281 Ops << MkId(ResTyID) << MkId(PointerID);
David Neto22f144c2017-06-12 14:26:21 -04004282
David Neto87846742018-04-11 17:36:22 -04004283 auto *Inst = new SPIRVInstruction(spv::OpLoad, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004284 SPIRVInstList.push_back(Inst);
4285 break;
4286 }
4287 case Instruction::Store: {
4288 StoreInst *ST = cast<StoreInst>(&I);
4289 //
4290 // Generate OpStore.
4291 //
4292
alan-baker5b86ed72019-02-15 08:26:50 -05004293 if (ST->getValueOperand()->getType()->isPointerTy()) {
4294 // Storing a pointer requires variable pointers.
4295 setVariablePointersCapabilities(
4296 ST->getValueOperand()->getType()->getPointerAddressSpace());
4297 }
4298
David Neto22f144c2017-06-12 14:26:21 -04004299 // Ops[0] = Pointer ID
4300 // Ops[1] = Object ID
4301 // Ops[2] ... Ops[n] = Optional Memory Access (later???)
4302 //
4303 // TODO: Do we need to implement Optional Memory Access???
David Neto257c3892018-04-11 13:19:45 -04004304 SPIRVOperandList Ops;
4305 Ops << MkId(VMap[ST->getPointerOperand()])
4306 << MkId(VMap[ST->getValueOperand()]);
David Neto22f144c2017-06-12 14:26:21 -04004307
David Neto87846742018-04-11 17:36:22 -04004308 auto *Inst = new SPIRVInstruction(spv::OpStore, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004309 SPIRVInstList.push_back(Inst);
4310 break;
4311 }
4312 case Instruction::AtomicCmpXchg: {
4313 I.print(errs());
4314 llvm_unreachable("Unsupported instruction???");
4315 break;
4316 }
4317 case Instruction::AtomicRMW: {
Neil Henning39672102017-09-29 14:33:13 +01004318 AtomicRMWInst *AtomicRMW = dyn_cast<AtomicRMWInst>(&I);
4319
4320 spv::Op opcode;
4321
4322 switch (AtomicRMW->getOperation()) {
4323 default:
4324 I.print(errs());
4325 llvm_unreachable("Unsupported instruction???");
4326 case llvm::AtomicRMWInst::Add:
4327 opcode = spv::OpAtomicIAdd;
4328 break;
4329 case llvm::AtomicRMWInst::Sub:
4330 opcode = spv::OpAtomicISub;
4331 break;
4332 case llvm::AtomicRMWInst::Xchg:
4333 opcode = spv::OpAtomicExchange;
4334 break;
4335 case llvm::AtomicRMWInst::Min:
4336 opcode = spv::OpAtomicSMin;
4337 break;
4338 case llvm::AtomicRMWInst::Max:
4339 opcode = spv::OpAtomicSMax;
4340 break;
4341 case llvm::AtomicRMWInst::UMin:
4342 opcode = spv::OpAtomicUMin;
4343 break;
4344 case llvm::AtomicRMWInst::UMax:
4345 opcode = spv::OpAtomicUMax;
4346 break;
4347 case llvm::AtomicRMWInst::And:
4348 opcode = spv::OpAtomicAnd;
4349 break;
4350 case llvm::AtomicRMWInst::Or:
4351 opcode = spv::OpAtomicOr;
4352 break;
4353 case llvm::AtomicRMWInst::Xor:
4354 opcode = spv::OpAtomicXor;
4355 break;
4356 }
4357
4358 //
4359 // Generate OpAtomic*.
4360 //
4361 SPIRVOperandList Ops;
4362
David Neto257c3892018-04-11 13:19:45 -04004363 Ops << MkId(lookupType(I.getType()))
4364 << MkId(VMap[AtomicRMW->getPointerOperand()]);
Neil Henning39672102017-09-29 14:33:13 +01004365
4366 auto IntTy = Type::getInt32Ty(I.getContext());
Neil Henning39672102017-09-29 14:33:13 +01004367 const auto ConstantScopeDevice = ConstantInt::get(IntTy, spv::ScopeDevice);
David Neto257c3892018-04-11 13:19:45 -04004368 Ops << MkId(VMap[ConstantScopeDevice]);
Neil Henning39672102017-09-29 14:33:13 +01004369
4370 const auto ConstantMemorySemantics = ConstantInt::get(
4371 IntTy, spv::MemorySemanticsUniformMemoryMask |
4372 spv::MemorySemanticsSequentiallyConsistentMask);
David Neto257c3892018-04-11 13:19:45 -04004373 Ops << MkId(VMap[ConstantMemorySemantics]);
Neil Henning39672102017-09-29 14:33:13 +01004374
David Neto257c3892018-04-11 13:19:45 -04004375 Ops << MkId(VMap[AtomicRMW->getValOperand()]);
Neil Henning39672102017-09-29 14:33:13 +01004376
4377 VMap[&I] = nextID;
4378
David Neto87846742018-04-11 17:36:22 -04004379 auto *Inst = new SPIRVInstruction(opcode, nextID++, Ops);
Neil Henning39672102017-09-29 14:33:13 +01004380 SPIRVInstList.push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04004381 break;
4382 }
4383 case Instruction::Fence: {
4384 I.print(errs());
4385 llvm_unreachable("Unsupported instruction???");
4386 break;
4387 }
4388 case Instruction::Call: {
4389 CallInst *Call = dyn_cast<CallInst>(&I);
4390 Function *Callee = Call->getCalledFunction();
4391
Alan Baker202c8c72018-08-13 13:47:44 -04004392 if (Callee->getName().startswith(clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04004393 if (ResourceVarDeferredLoadCalls.count(Call) && Call->hasNUsesOrMore(1)) {
4394 // Generate an OpLoad
4395 SPIRVOperandList Ops;
4396 const auto load_id = nextID++;
David Neto22f144c2017-06-12 14:26:21 -04004397
David Neto862b7d82018-06-14 18:48:37 -04004398 Ops << MkId(lookupType(Call->getType()->getPointerElementType()))
4399 << MkId(ResourceVarDeferredLoadCalls[Call]);
4400
4401 auto *Inst = new SPIRVInstruction(spv::OpLoad, load_id, Ops);
4402 SPIRVInstList.push_back(Inst);
4403 VMap[Call] = load_id;
4404 break;
4405
4406 } else {
4407 // This maps to an OpVariable we've already generated.
4408 // No code is generated for the call.
4409 }
4410 break;
alan-bakerb6b09dc2018-11-08 16:59:28 -05004411 } else if (Callee->getName().startswith(
4412 clspv::WorkgroupAccessorFunction())) {
Alan Baker202c8c72018-08-13 13:47:44 -04004413 // Don't codegen an instruction here, but instead map this call directly
4414 // to the workgroup variable id.
alan-bakerb6b09dc2018-11-08 16:59:28 -05004415 int spec_id = static_cast<int>(
4416 cast<ConstantInt>(Call->getOperand(0))->getSExtValue());
Alan Baker202c8c72018-08-13 13:47:44 -04004417 const auto &info = LocalSpecIdInfoMap[spec_id];
4418 VMap[Call] = info.variable_id;
4419 break;
David Neto862b7d82018-06-14 18:48:37 -04004420 }
4421
4422 // Sampler initializers become a load of the corresponding sampler.
4423
Kévin Petitdf71de32019-04-09 14:09:50 +01004424 if (Callee->getName().equals(clspv::LiteralSamplerFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04004425 // Map this to a load from the variable.
4426 const auto index_into_sampler_map =
4427 dyn_cast<ConstantInt>(Call->getArgOperand(2))->getZExtValue();
4428
4429 // Generate an OpLoad
David Neto22f144c2017-06-12 14:26:21 -04004430 SPIRVOperandList Ops;
David Neto862b7d82018-06-14 18:48:37 -04004431 const auto load_id = nextID++;
David Neto22f144c2017-06-12 14:26:21 -04004432
David Neto257c3892018-04-11 13:19:45 -04004433 Ops << MkId(lookupType(SamplerTy->getPointerElementType()))
alan-bakerb6b09dc2018-11-08 16:59:28 -05004434 << MkId(SamplerMapIndexToIDMap[static_cast<unsigned>(
4435 index_into_sampler_map)]);
David Neto22f144c2017-06-12 14:26:21 -04004436
David Neto862b7d82018-06-14 18:48:37 -04004437 auto *Inst = new SPIRVInstruction(spv::OpLoad, load_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004438 SPIRVInstList.push_back(Inst);
David Neto862b7d82018-06-14 18:48:37 -04004439 VMap[Call] = load_id;
David Neto22f144c2017-06-12 14:26:21 -04004440 break;
4441 }
4442
Kévin Petit349c9502019-03-28 17:24:14 +00004443 // Handle SPIR-V intrinsics
Kévin Petit9b340262019-06-19 18:31:11 +01004444 spv::Op opcode = StringSwitch<spv::Op>(Callee->getName())
4445 .Case("spirv.atomic_xor", spv::OpAtomicXor)
4446 .Default(spv::OpNop);
David Neto22f144c2017-06-12 14:26:21 -04004447
Kévin Petit617a76d2019-04-04 13:54:16 +01004448 // If the switch above didn't have an entry maybe the intrinsic
4449 // is using the name mangling logic.
4450 bool usesMangler = false;
4451 if (opcode == spv::OpNop) {
4452 if (Callee->getName().startswith(clspv::SPIRVOpIntrinsicFunction())) {
4453 auto OpCst = cast<ConstantInt>(Call->getOperand(0));
4454 opcode = static_cast<spv::Op>(OpCst->getZExtValue());
4455 usesMangler = true;
4456 }
4457 }
4458
Kévin Petit349c9502019-03-28 17:24:14 +00004459 if (opcode != spv::OpNop) {
4460
David Neto22f144c2017-06-12 14:26:21 -04004461 SPIRVOperandList Ops;
4462
Kévin Petit349c9502019-03-28 17:24:14 +00004463 if (!I.getType()->isVoidTy()) {
4464 Ops << MkId(lookupType(I.getType()));
4465 }
David Neto22f144c2017-06-12 14:26:21 -04004466
Kévin Petit617a76d2019-04-04 13:54:16 +01004467 unsigned firstOperand = usesMangler ? 1 : 0;
4468 for (unsigned i = firstOperand; i < Call->getNumArgOperands(); i++) {
David Neto257c3892018-04-11 13:19:45 -04004469 Ops << MkId(VMap[Call->getArgOperand(i)]);
David Neto22f144c2017-06-12 14:26:21 -04004470 }
4471
Kévin Petit349c9502019-03-28 17:24:14 +00004472 if (!I.getType()->isVoidTy()) {
4473 VMap[&I] = nextID;
Kévin Petit8a560882019-03-21 15:24:34 +00004474 }
4475
Kévin Petit349c9502019-03-28 17:24:14 +00004476 SPIRVInstruction *Inst;
4477 if (!I.getType()->isVoidTy()) {
4478 Inst = new SPIRVInstruction(opcode, nextID++, Ops);
4479 } else {
4480 Inst = new SPIRVInstruction(opcode, Ops);
4481 }
Kévin Petit8a560882019-03-21 15:24:34 +00004482 SPIRVInstList.push_back(Inst);
4483 break;
4484 }
4485
David Neto22f144c2017-06-12 14:26:21 -04004486 // spirv.copy_memory.* intrinsics become OpMemoryMemory's.
4487 if (Callee->getName().startswith("spirv.copy_memory")) {
4488 //
4489 // Generate OpCopyMemory.
4490 //
4491
4492 // Ops[0] = Dst ID
4493 // Ops[1] = Src ID
4494 // Ops[2] = Memory Access
4495 // Ops[3] = Alignment
4496
4497 auto IsVolatile =
4498 dyn_cast<ConstantInt>(Call->getArgOperand(3))->getZExtValue() != 0;
4499
4500 auto VolatileMemoryAccess = (IsVolatile) ? spv::MemoryAccessVolatileMask
4501 : spv::MemoryAccessMaskNone;
4502
4503 auto MemoryAccess = VolatileMemoryAccess | spv::MemoryAccessAlignedMask;
4504
4505 auto Alignment =
4506 dyn_cast<ConstantInt>(Call->getArgOperand(2))->getZExtValue();
4507
David Neto257c3892018-04-11 13:19:45 -04004508 SPIRVOperandList Ops;
4509 Ops << MkId(VMap[Call->getArgOperand(0)])
4510 << MkId(VMap[Call->getArgOperand(1)]) << MkNum(MemoryAccess)
4511 << MkNum(static_cast<uint32_t>(Alignment));
David Neto22f144c2017-06-12 14:26:21 -04004512
David Neto87846742018-04-11 17:36:22 -04004513 auto *Inst = new SPIRVInstruction(spv::OpCopyMemory, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004514
4515 SPIRVInstList.push_back(Inst);
4516
4517 break;
4518 }
4519
David Neto22f144c2017-06-12 14:26:21 -04004520 // read_image is converted to OpSampledImage and OpImageSampleExplicitLod.
4521 // Additionally, OpTypeSampledImage is generated.
alan-bakerf67468c2019-11-25 15:51:49 -05004522 if (clspv::IsSampledImageRead(Callee)) {
David Neto22f144c2017-06-12 14:26:21 -04004523 //
4524 // Generate OpSampledImage.
4525 //
4526 // Ops[0] = Result Type ID
4527 // Ops[1] = Image ID
4528 // Ops[2] = Sampler ID
4529 //
4530 SPIRVOperandList Ops;
4531
4532 Value *Image = Call->getArgOperand(0);
4533 Value *Sampler = Call->getArgOperand(1);
4534 Value *Coordinate = Call->getArgOperand(2);
4535
4536 TypeMapType &OpImageTypeMap = getImageTypeMap();
4537 Type *ImageTy = Image->getType()->getPointerElementType();
4538 uint32_t ImageTyID = OpImageTypeMap[ImageTy];
David Neto22f144c2017-06-12 14:26:21 -04004539 uint32_t ImageID = VMap[Image];
David Neto22f144c2017-06-12 14:26:21 -04004540 uint32_t SamplerID = VMap[Sampler];
David Neto257c3892018-04-11 13:19:45 -04004541
4542 Ops << MkId(ImageTyID) << MkId(ImageID) << MkId(SamplerID);
David Neto22f144c2017-06-12 14:26:21 -04004543
4544 uint32_t SampledImageID = nextID;
4545
David Neto87846742018-04-11 17:36:22 -04004546 auto *Inst = new SPIRVInstruction(spv::OpSampledImage, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004547 SPIRVInstList.push_back(Inst);
4548
4549 //
4550 // Generate OpImageSampleExplicitLod.
4551 //
4552 // Ops[0] = Result Type ID
4553 // Ops[1] = Sampled Image ID
4554 // Ops[2] = Coordinate ID
4555 // Ops[3] = Image Operands Type ID
4556 // Ops[4] ... Ops[n] = Operands ID
4557 //
4558 Ops.clear();
4559
alan-bakerf67468c2019-11-25 15:51:49 -05004560 const bool is_int_image = IsIntImageType(Image->getType());
4561 uint32_t result_type = 0;
4562 if (is_int_image) {
4563 result_type = v4int32ID;
4564 } else {
4565 result_type = lookupType(Call->getType());
4566 }
4567
4568 Ops << MkId(result_type) << MkId(SampledImageID) << MkId(VMap[Coordinate])
4569 << MkNum(spv::ImageOperandsLodMask);
David Neto22f144c2017-06-12 14:26:21 -04004570
4571 Constant *CstFP0 = ConstantFP::get(Context, APFloat(0.0f));
David Neto257c3892018-04-11 13:19:45 -04004572 Ops << MkId(VMap[CstFP0]);
David Neto22f144c2017-06-12 14:26:21 -04004573
alan-bakerf67468c2019-11-25 15:51:49 -05004574 uint32_t final_id = nextID++;
4575 VMap[&I] = final_id;
David Neto22f144c2017-06-12 14:26:21 -04004576
alan-bakerf67468c2019-11-25 15:51:49 -05004577 uint32_t image_id = final_id;
4578 if (is_int_image) {
4579 // Int image requires a bitcast from v4int to v4uint.
4580 image_id = nextID++;
4581 }
4582
4583 Inst = new SPIRVInstruction(spv::OpImageSampleExplicitLod, image_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004584 SPIRVInstList.push_back(Inst);
alan-bakerf67468c2019-11-25 15:51:49 -05004585
4586 if (is_int_image) {
4587 // Generate the bitcast.
4588 Ops.clear();
4589 Ops << MkId(lookupType(Call->getType())) << MkId(image_id);
4590 Inst = new SPIRVInstruction(spv::OpBitcast, final_id, Ops);
4591 SPIRVInstList.push_back(Inst);
4592 }
David Neto22f144c2017-06-12 14:26:21 -04004593 break;
4594 }
4595
alan-bakerf67468c2019-11-25 15:51:49 -05004596 // write_image is mapped to OpImageWrite.
4597 if (clspv::IsImageWrite(Callee)) {
David Neto22f144c2017-06-12 14:26:21 -04004598 //
4599 // Generate OpImageWrite.
4600 //
4601 // Ops[0] = Image ID
4602 // Ops[1] = Coordinate ID
4603 // Ops[2] = Texel ID
4604 // Ops[3] = (Optional) Image Operands Type (Literal Number)
4605 // Ops[4] ... Ops[n] = (Optional) Operands ID
4606 //
4607 SPIRVOperandList Ops;
4608
4609 Value *Image = Call->getArgOperand(0);
4610 Value *Coordinate = Call->getArgOperand(1);
4611 Value *Texel = Call->getArgOperand(2);
4612
4613 uint32_t ImageID = VMap[Image];
David Neto22f144c2017-06-12 14:26:21 -04004614 uint32_t CoordinateID = VMap[Coordinate];
David Neto22f144c2017-06-12 14:26:21 -04004615 uint32_t TexelID = VMap[Texel];
alan-bakerf67468c2019-11-25 15:51:49 -05004616
4617 const bool is_int_image = IsIntImageType(Image->getType());
4618 if (is_int_image) {
4619 // Generate a bitcast to v4int and use it as the texel value.
4620 uint32_t castID = nextID++;
4621 Ops << MkId(v4int32ID) << MkId(TexelID);
4622 auto cast = new SPIRVInstruction(spv::OpBitcast, castID, Ops);
4623 SPIRVInstList.push_back(cast);
4624 Ops.clear();
4625 TexelID = castID;
4626 }
David Neto257c3892018-04-11 13:19:45 -04004627 Ops << MkId(ImageID) << MkId(CoordinateID) << MkId(TexelID);
David Neto22f144c2017-06-12 14:26:21 -04004628
David Neto87846742018-04-11 17:36:22 -04004629 auto *Inst = new SPIRVInstruction(spv::OpImageWrite, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004630 SPIRVInstList.push_back(Inst);
4631 break;
4632 }
4633
alan-bakerf67468c2019-11-25 15:51:49 -05004634 // get_image_* is mapped to OpImageQuerySize
4635 if (clspv::IsGetImageHeight(Callee) || clspv::IsGetImageWidth(Callee)) {
David Neto5c22a252018-03-15 16:07:41 -04004636 //
4637 // Generate OpImageQuerySize, then pull out the right component.
4638 // Assume 2D image for now.
4639 //
4640 // Ops[0] = Image ID
4641 //
4642 // %sizes = OpImageQuerySizes %uint2 %im
4643 // %result = OpCompositeExtract %uint %sizes 0-or-1
4644 SPIRVOperandList Ops;
4645
4646 // Implement:
4647 // %sizes = OpImageQuerySizes %uint2 %im
4648 uint32_t SizesTypeID =
4649 TypeMap[VectorType::get(Type::getInt32Ty(Context), 2)];
David Neto5c22a252018-03-15 16:07:41 -04004650 Value *Image = Call->getArgOperand(0);
4651 uint32_t ImageID = VMap[Image];
David Neto257c3892018-04-11 13:19:45 -04004652 Ops << MkId(SizesTypeID) << MkId(ImageID);
David Neto5c22a252018-03-15 16:07:41 -04004653
4654 uint32_t SizesID = nextID++;
David Neto87846742018-04-11 17:36:22 -04004655 auto *QueryInst =
4656 new SPIRVInstruction(spv::OpImageQuerySize, SizesID, Ops);
David Neto5c22a252018-03-15 16:07:41 -04004657 SPIRVInstList.push_back(QueryInst);
4658
4659 // Reset value map entry since we generated an intermediate instruction.
4660 VMap[&I] = nextID;
4661
4662 // Implement:
4663 // %result = OpCompositeExtract %uint %sizes 0-or-1
4664 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004665 Ops << MkId(TypeMap[I.getType()]) << MkId(SizesID);
David Neto5c22a252018-03-15 16:07:41 -04004666
4667 uint32_t component = Callee->getName().contains("height") ? 1 : 0;
David Neto257c3892018-04-11 13:19:45 -04004668 Ops << MkNum(component);
David Neto5c22a252018-03-15 16:07:41 -04004669
David Neto87846742018-04-11 17:36:22 -04004670 auto *Inst = new SPIRVInstruction(spv::OpCompositeExtract, nextID++, Ops);
David Neto5c22a252018-03-15 16:07:41 -04004671 SPIRVInstList.push_back(Inst);
4672 break;
4673 }
4674
David Neto22f144c2017-06-12 14:26:21 -04004675 // Call instrucion is deferred because it needs function's ID. Record
4676 // slot's location on SPIRVInstructionList.
4677 DeferredInsts.push_back(
4678 std::make_tuple(&I, --SPIRVInstList.end(), nextID++));
4679
David Neto3fbb4072017-10-16 11:28:14 -04004680 // Check whether the implementation of this call uses an extended
4681 // instruction plus one more value-producing instruction. If so, then
4682 // reserve the id for the extra value-producing slot.
4683 glsl::ExtInst EInst = getIndirectExtInstEnum(Callee->getName());
4684 if (EInst != kGlslExtInstBad) {
4685 // Reserve a spot for the extra value.
David Neto4d02a532017-09-17 12:57:44 -04004686 // Increase nextID.
David Neto22f144c2017-06-12 14:26:21 -04004687 VMap[&I] = nextID;
4688 nextID++;
4689 }
4690 break;
4691 }
4692 case Instruction::Ret: {
4693 unsigned NumOps = I.getNumOperands();
4694 if (NumOps == 0) {
4695 //
4696 // Generate OpReturn.
4697 //
David Neto87846742018-04-11 17:36:22 -04004698 SPIRVInstList.push_back(new SPIRVInstruction(spv::OpReturn, {}));
David Neto22f144c2017-06-12 14:26:21 -04004699 } else {
4700 //
4701 // Generate OpReturnValue.
4702 //
4703
4704 // Ops[0] = Return Value ID
4705 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04004706
4707 Ops << MkId(VMap[I.getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04004708
David Neto87846742018-04-11 17:36:22 -04004709 auto *Inst = new SPIRVInstruction(spv::OpReturnValue, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004710 SPIRVInstList.push_back(Inst);
4711 break;
4712 }
4713 break;
4714 }
4715 }
4716}
4717
4718void SPIRVProducerPass::GenerateFuncEpilogue() {
4719 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
4720
4721 //
4722 // Generate OpFunctionEnd
4723 //
4724
David Neto87846742018-04-11 17:36:22 -04004725 auto *Inst = new SPIRVInstruction(spv::OpFunctionEnd, {});
David Neto22f144c2017-06-12 14:26:21 -04004726 SPIRVInstList.push_back(Inst);
4727}
4728
4729bool SPIRVProducerPass::is4xi8vec(Type *Ty) const {
alan-bakerb39c8262019-03-08 14:03:37 -05004730 // Don't specialize <4 x i8> if i8 is generally supported.
4731 if (clspv::Option::Int8Support())
4732 return false;
4733
David Neto22f144c2017-06-12 14:26:21 -04004734 LLVMContext &Context = Ty->getContext();
4735 if (Ty->isVectorTy()) {
4736 if (Ty->getVectorElementType() == Type::getInt8Ty(Context) &&
4737 Ty->getVectorNumElements() == 4) {
4738 return true;
4739 }
4740 }
4741
4742 return false;
4743}
4744
4745void SPIRVProducerPass::HandleDeferredInstruction() {
4746 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
4747 ValueMapType &VMap = getValueMap();
4748 DeferredInstVecType &DeferredInsts = getDeferredInstVec();
4749
4750 for (auto DeferredInst = DeferredInsts.rbegin();
4751 DeferredInst != DeferredInsts.rend(); ++DeferredInst) {
4752 Value *Inst = std::get<0>(*DeferredInst);
4753 SPIRVInstructionList::iterator InsertPoint = ++std::get<1>(*DeferredInst);
4754 if (InsertPoint != SPIRVInstList.end()) {
4755 while ((*InsertPoint)->getOpcode() == spv::OpPhi) {
4756 ++InsertPoint;
4757 }
4758 }
4759
4760 if (BranchInst *Br = dyn_cast<BranchInst>(Inst)) {
4761 // Check whether basic block, which has this branch instruction, is loop
4762 // header or not. If it is loop header, generate OpLoopMerge and
4763 // OpBranchConditional.
4764 Function *Func = Br->getParent()->getParent();
4765 DominatorTree &DT =
4766 getAnalysis<DominatorTreeWrapperPass>(*Func).getDomTree();
4767 const LoopInfo &LI =
4768 getAnalysis<LoopInfoWrapperPass>(*Func).getLoopInfo();
4769
4770 BasicBlock *BrBB = Br->getParent();
alan-baker49531082019-06-05 17:30:56 -04004771 Loop *L = LI.getLoopFor(BrBB);
David Neto22f144c2017-06-12 14:26:21 -04004772 if (LI.isLoopHeader(BrBB)) {
4773 Value *ContinueBB = nullptr;
4774 Value *MergeBB = nullptr;
4775
David Neto22f144c2017-06-12 14:26:21 -04004776 MergeBB = L->getExitBlock();
4777 if (!MergeBB) {
4778 // StructurizeCFG pass converts CFG into triangle shape and the cfg
4779 // has regions with single entry/exit. As a result, loop should not
4780 // have multiple exits.
4781 llvm_unreachable("Loop has multiple exits???");
4782 }
4783
4784 if (L->isLoopLatch(BrBB)) {
4785 ContinueBB = BrBB;
4786 } else {
4787 // From SPIR-V spec 2.11, Continue Target must dominate that back-edge
4788 // block.
4789 BasicBlock *Header = L->getHeader();
4790 BasicBlock *Latch = L->getLoopLatch();
4791 for (BasicBlock *BB : L->blocks()) {
4792 if (BB == Header) {
4793 continue;
4794 }
4795
4796 // Check whether block dominates block with back-edge.
4797 if (DT.dominates(BB, Latch)) {
4798 ContinueBB = BB;
4799 }
4800 }
4801
4802 if (!ContinueBB) {
4803 llvm_unreachable("Wrong continue block from loop");
4804 }
4805 }
4806
4807 //
4808 // Generate OpLoopMerge.
4809 //
4810 // Ops[0] = Merge Block ID
4811 // Ops[1] = Continue Target ID
4812 // Ops[2] = Selection Control
4813 SPIRVOperandList Ops;
4814
4815 // StructurizeCFG pass already manipulated CFG. Just use false block of
4816 // branch instruction as merge block.
4817 uint32_t MergeBBID = VMap[MergeBB];
David Neto22f144c2017-06-12 14:26:21 -04004818 uint32_t ContinueBBID = VMap[ContinueBB];
David Neto257c3892018-04-11 13:19:45 -04004819 Ops << MkId(MergeBBID) << MkId(ContinueBBID)
4820 << MkNum(spv::SelectionControlMaskNone);
David Neto22f144c2017-06-12 14:26:21 -04004821
David Neto87846742018-04-11 17:36:22 -04004822 auto *MergeInst = new SPIRVInstruction(spv::OpLoopMerge, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004823 SPIRVInstList.insert(InsertPoint, MergeInst);
4824
4825 } else if (Br->isConditional()) {
alan-baker49531082019-06-05 17:30:56 -04004826 // Generate a selection merge unless this is a back-edge block.
4827 bool HasBackedge = false;
4828 while (L && !HasBackedge) {
4829 if (L->isLoopLatch(BrBB)) {
4830 HasBackedge = true;
David Neto22f144c2017-06-12 14:26:21 -04004831 }
alan-baker49531082019-06-05 17:30:56 -04004832 L = L->getParentLoop();
David Neto22f144c2017-06-12 14:26:21 -04004833 }
alan-baker49531082019-06-05 17:30:56 -04004834 if (!HasBackedge) {
David Neto22f144c2017-06-12 14:26:21 -04004835 //
4836 // Generate OpSelectionMerge.
4837 //
4838 // Ops[0] = Merge Block ID
4839 // Ops[1] = Selection Control
4840 SPIRVOperandList Ops;
4841
4842 // StructurizeCFG pass already manipulated CFG. Just use false block
4843 // of branch instruction as merge block.
4844 uint32_t MergeBBID = VMap[Br->getSuccessor(1)];
David Neto257c3892018-04-11 13:19:45 -04004845 Ops << MkId(MergeBBID) << MkNum(spv::SelectionControlMaskNone);
David Neto22f144c2017-06-12 14:26:21 -04004846
David Neto87846742018-04-11 17:36:22 -04004847 auto *MergeInst = new SPIRVInstruction(spv::OpSelectionMerge, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004848 SPIRVInstList.insert(InsertPoint, MergeInst);
4849 }
4850 }
4851
4852 if (Br->isConditional()) {
4853 //
4854 // Generate OpBranchConditional.
4855 //
4856 // Ops[0] = Condition ID
4857 // Ops[1] = True Label ID
4858 // Ops[2] = False Label ID
4859 // Ops[3] ... Ops[n] = Branch weights (Literal Number)
4860 SPIRVOperandList Ops;
4861
4862 uint32_t CondID = VMap[Br->getCondition()];
David Neto22f144c2017-06-12 14:26:21 -04004863 uint32_t TrueBBID = VMap[Br->getSuccessor(0)];
David Neto22f144c2017-06-12 14:26:21 -04004864 uint32_t FalseBBID = VMap[Br->getSuccessor(1)];
David Neto257c3892018-04-11 13:19:45 -04004865
4866 Ops << MkId(CondID) << MkId(TrueBBID) << MkId(FalseBBID);
David Neto22f144c2017-06-12 14:26:21 -04004867
David Neto87846742018-04-11 17:36:22 -04004868 auto *BrInst = new SPIRVInstruction(spv::OpBranchConditional, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004869 SPIRVInstList.insert(InsertPoint, BrInst);
4870 } else {
4871 //
4872 // Generate OpBranch.
4873 //
4874 // Ops[0] = Target Label ID
4875 SPIRVOperandList Ops;
4876
4877 uint32_t TargetID = VMap[Br->getSuccessor(0)];
David Neto257c3892018-04-11 13:19:45 -04004878 Ops << MkId(TargetID);
David Neto22f144c2017-06-12 14:26:21 -04004879
David Neto87846742018-04-11 17:36:22 -04004880 SPIRVInstList.insert(InsertPoint,
4881 new SPIRVInstruction(spv::OpBranch, Ops));
David Neto22f144c2017-06-12 14:26:21 -04004882 }
4883 } else if (PHINode *PHI = dyn_cast<PHINode>(Inst)) {
alan-baker5b86ed72019-02-15 08:26:50 -05004884 if (PHI->getType()->isPointerTy()) {
4885 // OpPhi on pointers requires variable pointers.
4886 setVariablePointersCapabilities(
4887 PHI->getType()->getPointerAddressSpace());
4888 if (!hasVariablePointers() && !selectFromSameObject(PHI)) {
4889 setVariablePointers(true);
4890 }
4891 }
4892
David Neto22f144c2017-06-12 14:26:21 -04004893 //
4894 // Generate OpPhi.
4895 //
4896 // Ops[0] = Result Type ID
4897 // Ops[1] ... Ops[n] = (Variable ID, Parent ID) pairs
4898 SPIRVOperandList Ops;
4899
David Neto257c3892018-04-11 13:19:45 -04004900 Ops << MkId(lookupType(PHI->getType()));
David Neto22f144c2017-06-12 14:26:21 -04004901
David Neto22f144c2017-06-12 14:26:21 -04004902 for (unsigned i = 0; i < PHI->getNumIncomingValues(); i++) {
4903 uint32_t VarID = VMap[PHI->getIncomingValue(i)];
David Neto22f144c2017-06-12 14:26:21 -04004904 uint32_t ParentID = VMap[PHI->getIncomingBlock(i)];
David Neto257c3892018-04-11 13:19:45 -04004905 Ops << MkId(VarID) << MkId(ParentID);
David Neto22f144c2017-06-12 14:26:21 -04004906 }
4907
4908 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04004909 InsertPoint,
4910 new SPIRVInstruction(spv::OpPhi, std::get<2>(*DeferredInst), Ops));
David Neto22f144c2017-06-12 14:26:21 -04004911 } else if (CallInst *Call = dyn_cast<CallInst>(Inst)) {
4912 Function *Callee = Call->getCalledFunction();
David Neto3fbb4072017-10-16 11:28:14 -04004913 auto callee_name = Callee->getName();
4914 glsl::ExtInst EInst = getDirectOrIndirectExtInstEnum(callee_name);
David Neto22f144c2017-06-12 14:26:21 -04004915
4916 if (EInst) {
4917 uint32_t &ExtInstImportID = getOpExtInstImportID();
4918
4919 //
4920 // Generate OpExtInst.
4921 //
4922
4923 // Ops[0] = Result Type ID
4924 // Ops[1] = Set ID (OpExtInstImport ID)
4925 // Ops[2] = Instruction Number (Literal Number)
4926 // Ops[3] ... Ops[n] = Operand 1, ... , Operand n
4927 SPIRVOperandList Ops;
4928
David Neto862b7d82018-06-14 18:48:37 -04004929 Ops << MkId(lookupType(Call->getType())) << MkId(ExtInstImportID)
4930 << MkNum(EInst);
David Neto22f144c2017-06-12 14:26:21 -04004931
David Neto22f144c2017-06-12 14:26:21 -04004932 FunctionType *CalleeFTy = cast<FunctionType>(Call->getFunctionType());
4933 for (unsigned i = 0; i < CalleeFTy->getNumParams(); i++) {
David Neto257c3892018-04-11 13:19:45 -04004934 Ops << MkId(VMap[Call->getOperand(i)]);
David Neto22f144c2017-06-12 14:26:21 -04004935 }
4936
David Neto87846742018-04-11 17:36:22 -04004937 auto *ExtInst = new SPIRVInstruction(spv::OpExtInst,
4938 std::get<2>(*DeferredInst), Ops);
David Neto22f144c2017-06-12 14:26:21 -04004939 SPIRVInstList.insert(InsertPoint, ExtInst);
4940
David Neto3fbb4072017-10-16 11:28:14 -04004941 const auto IndirectExtInst = getIndirectExtInstEnum(callee_name);
4942 if (IndirectExtInst != kGlslExtInstBad) {
4943 // Generate one more instruction that uses the result of the extended
4944 // instruction. Its result id is one more than the id of the
4945 // extended instruction.
David Neto22f144c2017-06-12 14:26:21 -04004946 LLVMContext &Context =
4947 Call->getParent()->getParent()->getParent()->getContext();
David Neto22f144c2017-06-12 14:26:21 -04004948
David Neto3fbb4072017-10-16 11:28:14 -04004949 auto generate_extra_inst = [this, &Context, &Call, &DeferredInst,
4950 &VMap, &SPIRVInstList, &InsertPoint](
4951 spv::Op opcode, Constant *constant) {
4952 //
4953 // Generate instruction like:
4954 // result = opcode constant <extinst-result>
4955 //
4956 // Ops[0] = Result Type ID
4957 // Ops[1] = Operand 0 ;; the constant, suitably splatted
4958 // Ops[2] = Operand 1 ;; the result of the extended instruction
4959 SPIRVOperandList Ops;
David Neto22f144c2017-06-12 14:26:21 -04004960
David Neto3fbb4072017-10-16 11:28:14 -04004961 Type *resultTy = Call->getType();
David Neto257c3892018-04-11 13:19:45 -04004962 Ops << MkId(lookupType(resultTy));
David Neto3fbb4072017-10-16 11:28:14 -04004963
4964 if (auto *vectorTy = dyn_cast<VectorType>(resultTy)) {
4965 constant = ConstantVector::getSplat(
4966 static_cast<unsigned>(vectorTy->getNumElements()), constant);
4967 }
David Neto257c3892018-04-11 13:19:45 -04004968 Ops << MkId(VMap[constant]) << MkId(std::get<2>(*DeferredInst));
David Neto3fbb4072017-10-16 11:28:14 -04004969
4970 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04004971 InsertPoint, new SPIRVInstruction(
4972 opcode, std::get<2>(*DeferredInst) + 1, Ops));
David Neto3fbb4072017-10-16 11:28:14 -04004973 };
4974
4975 switch (IndirectExtInst) {
4976 case glsl::ExtInstFindUMsb: // Implementing clz
4977 generate_extra_inst(
4978 spv::OpISub, ConstantInt::get(Type::getInt32Ty(Context), 31));
4979 break;
4980 case glsl::ExtInstAcos: // Implementing acospi
4981 case glsl::ExtInstAsin: // Implementing asinpi
Kévin Petiteb9f90a2018-09-29 12:29:34 +01004982 case glsl::ExtInstAtan: // Implementing atanpi
David Neto3fbb4072017-10-16 11:28:14 -04004983 case glsl::ExtInstAtan2: // Implementing atan2pi
4984 generate_extra_inst(
4985 spv::OpFMul,
4986 ConstantFP::get(Type::getFloatTy(Context), kOneOverPi));
4987 break;
4988
4989 default:
4990 assert(false && "internally inconsistent");
David Neto4d02a532017-09-17 12:57:44 -04004991 }
David Neto22f144c2017-06-12 14:26:21 -04004992 }
David Neto3fbb4072017-10-16 11:28:14 -04004993
alan-bakerb39c8262019-03-08 14:03:37 -05004994 } else if (callee_name.startswith("_Z8popcount")) {
David Neto22f144c2017-06-12 14:26:21 -04004995 //
4996 // Generate OpBitCount
4997 //
4998 // Ops[0] = Result Type ID
4999 // Ops[1] = Base ID
David Neto257c3892018-04-11 13:19:45 -04005000 SPIRVOperandList Ops;
5001 Ops << MkId(lookupType(Call->getType()))
5002 << MkId(VMap[Call->getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04005003
5004 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005005 InsertPoint, new SPIRVInstruction(spv::OpBitCount,
David Neto22f144c2017-06-12 14:26:21 -04005006 std::get<2>(*DeferredInst), Ops));
David Netoab03f432017-11-03 17:00:44 -04005007
David Neto862b7d82018-06-14 18:48:37 -04005008 } else if (callee_name.startswith(kCompositeConstructFunctionPrefix)) {
David Netoab03f432017-11-03 17:00:44 -04005009
5010 // Generate an OpCompositeConstruct
5011 SPIRVOperandList Ops;
5012
5013 // The result type.
David Neto257c3892018-04-11 13:19:45 -04005014 Ops << MkId(lookupType(Call->getType()));
David Netoab03f432017-11-03 17:00:44 -04005015
5016 for (Use &use : Call->arg_operands()) {
David Neto257c3892018-04-11 13:19:45 -04005017 Ops << MkId(VMap[use.get()]);
David Netoab03f432017-11-03 17:00:44 -04005018 }
5019
5020 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005021 InsertPoint, new SPIRVInstruction(spv::OpCompositeConstruct,
5022 std::get<2>(*DeferredInst), Ops));
David Netoab03f432017-11-03 17:00:44 -04005023
Alan Baker202c8c72018-08-13 13:47:44 -04005024 } else if (callee_name.startswith(clspv::ResourceAccessorFunction())) {
5025
5026 // We have already mapped the call's result value to an ID.
5027 // Don't generate any code now.
5028
5029 } else if (callee_name.startswith(clspv::WorkgroupAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04005030
5031 // We have already mapped the call's result value to an ID.
5032 // Don't generate any code now.
5033
David Neto22f144c2017-06-12 14:26:21 -04005034 } else {
alan-baker5b86ed72019-02-15 08:26:50 -05005035 if (Call->getType()->isPointerTy()) {
5036 // Functions returning pointers require variable pointers.
5037 setVariablePointersCapabilities(
5038 Call->getType()->getPointerAddressSpace());
5039 }
5040
David Neto22f144c2017-06-12 14:26:21 -04005041 //
5042 // Generate OpFunctionCall.
5043 //
5044
5045 // Ops[0] = Result Type ID
5046 // Ops[1] = Callee Function ID
5047 // Ops[2] ... Ops[n] = Argument 0, ... , Argument n
5048 SPIRVOperandList Ops;
5049
David Neto862b7d82018-06-14 18:48:37 -04005050 Ops << MkId(lookupType(Call->getType()));
David Neto22f144c2017-06-12 14:26:21 -04005051
5052 uint32_t CalleeID = VMap[Callee];
David Neto43568eb2017-10-13 18:25:25 -04005053 if (CalleeID == 0) {
5054 errs() << "Can't translate function call. Missing builtin? "
David Neto862b7d82018-06-14 18:48:37 -04005055 << callee_name << " in: " << *Call << "\n";
David Neto43568eb2017-10-13 18:25:25 -04005056 // TODO(dneto): Can we error out? Enabling this llvm_unreachable
5057 // causes an infinite loop. Instead, go ahead and generate
5058 // the bad function call. A validator will catch the 0-Id.
5059 // llvm_unreachable("Can't translate function call");
5060 }
David Neto22f144c2017-06-12 14:26:21 -04005061
David Neto257c3892018-04-11 13:19:45 -04005062 Ops << MkId(CalleeID);
David Neto22f144c2017-06-12 14:26:21 -04005063
David Neto22f144c2017-06-12 14:26:21 -04005064 FunctionType *CalleeFTy = cast<FunctionType>(Call->getFunctionType());
5065 for (unsigned i = 0; i < CalleeFTy->getNumParams(); i++) {
alan-baker5b86ed72019-02-15 08:26:50 -05005066 auto *operand = Call->getOperand(i);
5067 if (operand->getType()->isPointerTy()) {
5068 auto sc =
5069 GetStorageClass(operand->getType()->getPointerAddressSpace());
5070 if (sc == spv::StorageClassStorageBuffer) {
5071 // Passing SSBO by reference requires variable pointers storage
5072 // buffer.
5073 setVariablePointersStorageBuffer(true);
5074 } else if (sc == spv::StorageClassWorkgroup) {
5075 // Workgroup references require variable pointers if they are not
5076 // memory object declarations.
5077 if (auto *operand_call = dyn_cast<CallInst>(operand)) {
5078 // Workgroup accessor represents a variable reference.
5079 if (!operand_call->getCalledFunction()->getName().startswith(
5080 clspv::WorkgroupAccessorFunction()))
5081 setVariablePointers(true);
5082 } else {
5083 // Arguments are function parameters.
5084 if (!isa<Argument>(operand))
5085 setVariablePointers(true);
5086 }
5087 }
5088 }
5089 Ops << MkId(VMap[operand]);
David Neto22f144c2017-06-12 14:26:21 -04005090 }
5091
David Neto87846742018-04-11 17:36:22 -04005092 auto *CallInst = new SPIRVInstruction(spv::OpFunctionCall,
5093 std::get<2>(*DeferredInst), Ops);
David Neto22f144c2017-06-12 14:26:21 -04005094 SPIRVInstList.insert(InsertPoint, CallInst);
5095 }
5096 }
5097 }
5098}
5099
David Neto1a1a0582017-07-07 12:01:44 -04005100void SPIRVProducerPass::HandleDeferredDecorations(const DataLayout &DL) {
Alan Baker202c8c72018-08-13 13:47:44 -04005101 if (getTypesNeedingArrayStride().empty() && LocalArgSpecIds.empty()) {
David Neto1a1a0582017-07-07 12:01:44 -04005102 return;
David Netoc6f3ab22018-04-06 18:02:31 -04005103 }
David Neto1a1a0582017-07-07 12:01:44 -04005104
5105 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
David Neto1a1a0582017-07-07 12:01:44 -04005106
5107 // Find an iterator pointing just past the last decoration.
5108 bool seen_decorations = false;
5109 auto DecoInsertPoint =
5110 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
5111 [&seen_decorations](SPIRVInstruction *Inst) -> bool {
5112 const bool is_decoration =
5113 Inst->getOpcode() == spv::OpDecorate ||
5114 Inst->getOpcode() == spv::OpMemberDecorate;
5115 if (is_decoration) {
5116 seen_decorations = true;
5117 return false;
5118 } else {
5119 return seen_decorations;
5120 }
5121 });
5122
David Netoc6f3ab22018-04-06 18:02:31 -04005123 // Insert ArrayStride decorations on pointer types, due to OpPtrAccessChain
5124 // instructions we generated earlier.
David Neto85082642018-03-24 06:55:20 -07005125 for (auto *type : getTypesNeedingArrayStride()) {
5126 Type *elemTy = nullptr;
5127 if (auto *ptrTy = dyn_cast<PointerType>(type)) {
5128 elemTy = ptrTy->getElementType();
alan-bakerb6b09dc2018-11-08 16:59:28 -05005129 } else if (auto *arrayTy = dyn_cast<ArrayType>(type)) {
David Neto85082642018-03-24 06:55:20 -07005130 elemTy = arrayTy->getArrayElementType();
alan-bakerb6b09dc2018-11-08 16:59:28 -05005131 } else if (auto *seqTy = dyn_cast<SequentialType>(type)) {
David Neto85082642018-03-24 06:55:20 -07005132 elemTy = seqTy->getSequentialElementType();
5133 } else {
5134 errs() << "Unhandled strided type " << *type << "\n";
5135 llvm_unreachable("Unhandled strided type");
5136 }
David Neto1a1a0582017-07-07 12:01:44 -04005137
5138 // Ops[0] = Target ID
5139 // Ops[1] = Decoration (ArrayStride)
5140 // Ops[2] = Stride number (Literal Number)
5141 SPIRVOperandList Ops;
5142
David Neto85082642018-03-24 06:55:20 -07005143 // Same as DL.getIndexedOffsetInType( elemTy, { 1 } );
Alan Bakerfcda9482018-10-02 17:09:59 -04005144 const uint32_t stride = static_cast<uint32_t>(GetTypeAllocSize(elemTy, DL));
David Neto257c3892018-04-11 13:19:45 -04005145
5146 Ops << MkId(lookupType(type)) << MkNum(spv::DecorationArrayStride)
5147 << MkNum(stride);
David Neto1a1a0582017-07-07 12:01:44 -04005148
David Neto87846742018-04-11 17:36:22 -04005149 auto *DecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
David Neto1a1a0582017-07-07 12:01:44 -04005150 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
5151 }
David Netoc6f3ab22018-04-06 18:02:31 -04005152
5153 // Emit SpecId decorations targeting the array size value.
Alan Baker202c8c72018-08-13 13:47:44 -04005154 for (auto spec_id = clspv::FirstLocalSpecId(); spec_id < max_local_spec_id_;
5155 ++spec_id) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05005156 LocalArgInfo &arg_info = LocalSpecIdInfoMap[spec_id];
David Netoc6f3ab22018-04-06 18:02:31 -04005157 SPIRVOperandList Ops;
5158 Ops << MkId(arg_info.array_size_id) << MkNum(spv::DecorationSpecId)
5159 << MkNum(arg_info.spec_id);
5160 SPIRVInstList.insert(DecoInsertPoint,
David Neto87846742018-04-11 17:36:22 -04005161 new SPIRVInstruction(spv::OpDecorate, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04005162 }
David Neto1a1a0582017-07-07 12:01:44 -04005163}
5164
David Neto22f144c2017-06-12 14:26:21 -04005165glsl::ExtInst SPIRVProducerPass::getExtInstEnum(StringRef Name) {
5166 return StringSwitch<glsl::ExtInst>(Name)
alan-bakerb39c8262019-03-08 14:03:37 -05005167 .Case("_Z3absc", glsl::ExtInst::ExtInstSAbs)
5168 .Case("_Z3absDv2_c", glsl::ExtInst::ExtInstSAbs)
5169 .Case("_Z3absDv3_c", glsl::ExtInst::ExtInstSAbs)
5170 .Case("_Z3absDv4_c", glsl::ExtInst::ExtInstSAbs)
Kévin Petit2444e9b2018-11-09 14:14:37 +00005171 .Case("_Z3abss", glsl::ExtInst::ExtInstSAbs)
5172 .Case("_Z3absDv2_s", glsl::ExtInst::ExtInstSAbs)
5173 .Case("_Z3absDv3_s", glsl::ExtInst::ExtInstSAbs)
5174 .Case("_Z3absDv4_s", glsl::ExtInst::ExtInstSAbs)
David Neto22f144c2017-06-12 14:26:21 -04005175 .Case("_Z3absi", glsl::ExtInst::ExtInstSAbs)
5176 .Case("_Z3absDv2_i", glsl::ExtInst::ExtInstSAbs)
5177 .Case("_Z3absDv3_i", glsl::ExtInst::ExtInstSAbs)
5178 .Case("_Z3absDv4_i", glsl::ExtInst::ExtInstSAbs)
Kévin Petit2444e9b2018-11-09 14:14:37 +00005179 .Case("_Z3absl", glsl::ExtInst::ExtInstSAbs)
5180 .Case("_Z3absDv2_l", glsl::ExtInst::ExtInstSAbs)
5181 .Case("_Z3absDv3_l", glsl::ExtInst::ExtInstSAbs)
5182 .Case("_Z3absDv4_l", glsl::ExtInst::ExtInstSAbs)
alan-bakerb39c8262019-03-08 14:03:37 -05005183 .Case("_Z5clampccc", glsl::ExtInst::ExtInstSClamp)
5184 .Case("_Z5clampDv2_cS_S_", glsl::ExtInst::ExtInstSClamp)
5185 .Case("_Z5clampDv3_cS_S_", glsl::ExtInst::ExtInstSClamp)
5186 .Case("_Z5clampDv4_cS_S_", glsl::ExtInst::ExtInstSClamp)
5187 .Case("_Z5clamphhh", glsl::ExtInst::ExtInstUClamp)
5188 .Case("_Z5clampDv2_hS_S_", glsl::ExtInst::ExtInstUClamp)
5189 .Case("_Z5clampDv3_hS_S_", glsl::ExtInst::ExtInstUClamp)
5190 .Case("_Z5clampDv4_hS_S_", glsl::ExtInst::ExtInstUClamp)
Kévin Petit495255d2019-03-06 13:56:48 +00005191 .Case("_Z5clampsss", glsl::ExtInst::ExtInstSClamp)
5192 .Case("_Z5clampDv2_sS_S_", glsl::ExtInst::ExtInstSClamp)
5193 .Case("_Z5clampDv3_sS_S_", glsl::ExtInst::ExtInstSClamp)
5194 .Case("_Z5clampDv4_sS_S_", glsl::ExtInst::ExtInstSClamp)
5195 .Case("_Z5clampttt", glsl::ExtInst::ExtInstUClamp)
5196 .Case("_Z5clampDv2_tS_S_", glsl::ExtInst::ExtInstUClamp)
5197 .Case("_Z5clampDv3_tS_S_", glsl::ExtInst::ExtInstUClamp)
5198 .Case("_Z5clampDv4_tS_S_", glsl::ExtInst::ExtInstUClamp)
David Neto22f144c2017-06-12 14:26:21 -04005199 .Case("_Z5clampiii", glsl::ExtInst::ExtInstSClamp)
5200 .Case("_Z5clampDv2_iS_S_", glsl::ExtInst::ExtInstSClamp)
5201 .Case("_Z5clampDv3_iS_S_", glsl::ExtInst::ExtInstSClamp)
5202 .Case("_Z5clampDv4_iS_S_", glsl::ExtInst::ExtInstSClamp)
5203 .Case("_Z5clampjjj", glsl::ExtInst::ExtInstUClamp)
5204 .Case("_Z5clampDv2_jS_S_", glsl::ExtInst::ExtInstUClamp)
5205 .Case("_Z5clampDv3_jS_S_", glsl::ExtInst::ExtInstUClamp)
5206 .Case("_Z5clampDv4_jS_S_", glsl::ExtInst::ExtInstUClamp)
Kévin Petit495255d2019-03-06 13:56:48 +00005207 .Case("_Z5clamplll", glsl::ExtInst::ExtInstSClamp)
5208 .Case("_Z5clampDv2_lS_S_", glsl::ExtInst::ExtInstSClamp)
5209 .Case("_Z5clampDv3_lS_S_", glsl::ExtInst::ExtInstSClamp)
5210 .Case("_Z5clampDv4_lS_S_", glsl::ExtInst::ExtInstSClamp)
5211 .Case("_Z5clampmmm", glsl::ExtInst::ExtInstUClamp)
5212 .Case("_Z5clampDv2_mS_S_", glsl::ExtInst::ExtInstUClamp)
5213 .Case("_Z5clampDv3_mS_S_", glsl::ExtInst::ExtInstUClamp)
5214 .Case("_Z5clampDv4_mS_S_", glsl::ExtInst::ExtInstUClamp)
David Neto22f144c2017-06-12 14:26:21 -04005215 .Case("_Z5clampfff", glsl::ExtInst::ExtInstFClamp)
5216 .Case("_Z5clampDv2_fS_S_", glsl::ExtInst::ExtInstFClamp)
5217 .Case("_Z5clampDv3_fS_S_", glsl::ExtInst::ExtInstFClamp)
5218 .Case("_Z5clampDv4_fS_S_", glsl::ExtInst::ExtInstFClamp)
alan-bakerb39c8262019-03-08 14:03:37 -05005219 .Case("_Z3maxcc", glsl::ExtInst::ExtInstSMax)
5220 .Case("_Z3maxDv2_cS_", glsl::ExtInst::ExtInstSMax)
5221 .Case("_Z3maxDv3_cS_", glsl::ExtInst::ExtInstSMax)
5222 .Case("_Z3maxDv4_cS_", glsl::ExtInst::ExtInstSMax)
5223 .Case("_Z3maxhh", glsl::ExtInst::ExtInstUMax)
5224 .Case("_Z3maxDv2_hS_", glsl::ExtInst::ExtInstUMax)
5225 .Case("_Z3maxDv3_hS_", glsl::ExtInst::ExtInstUMax)
5226 .Case("_Z3maxDv4_hS_", glsl::ExtInst::ExtInstUMax)
Kévin Petit9e1971c2018-11-09 14:17:18 +00005227 .Case("_Z3maxss", glsl::ExtInst::ExtInstSMax)
5228 .Case("_Z3maxDv2_sS_", glsl::ExtInst::ExtInstSMax)
5229 .Case("_Z3maxDv3_sS_", glsl::ExtInst::ExtInstSMax)
5230 .Case("_Z3maxDv4_sS_", glsl::ExtInst::ExtInstSMax)
5231 .Case("_Z3maxtt", glsl::ExtInst::ExtInstUMax)
5232 .Case("_Z3maxDv2_tS_", glsl::ExtInst::ExtInstUMax)
5233 .Case("_Z3maxDv3_tS_", glsl::ExtInst::ExtInstUMax)
5234 .Case("_Z3maxDv4_tS_", glsl::ExtInst::ExtInstUMax)
David Neto22f144c2017-06-12 14:26:21 -04005235 .Case("_Z3maxii", glsl::ExtInst::ExtInstSMax)
5236 .Case("_Z3maxDv2_iS_", glsl::ExtInst::ExtInstSMax)
5237 .Case("_Z3maxDv3_iS_", glsl::ExtInst::ExtInstSMax)
5238 .Case("_Z3maxDv4_iS_", glsl::ExtInst::ExtInstSMax)
5239 .Case("_Z3maxjj", glsl::ExtInst::ExtInstUMax)
5240 .Case("_Z3maxDv2_jS_", glsl::ExtInst::ExtInstUMax)
5241 .Case("_Z3maxDv3_jS_", glsl::ExtInst::ExtInstUMax)
5242 .Case("_Z3maxDv4_jS_", glsl::ExtInst::ExtInstUMax)
Kévin Petit9e1971c2018-11-09 14:17:18 +00005243 .Case("_Z3maxll", glsl::ExtInst::ExtInstSMax)
5244 .Case("_Z3maxDv2_lS_", glsl::ExtInst::ExtInstSMax)
5245 .Case("_Z3maxDv3_lS_", glsl::ExtInst::ExtInstSMax)
5246 .Case("_Z3maxDv4_lS_", glsl::ExtInst::ExtInstSMax)
5247 .Case("_Z3maxmm", glsl::ExtInst::ExtInstUMax)
5248 .Case("_Z3maxDv2_mS_", glsl::ExtInst::ExtInstUMax)
5249 .Case("_Z3maxDv3_mS_", glsl::ExtInst::ExtInstUMax)
5250 .Case("_Z3maxDv4_mS_", glsl::ExtInst::ExtInstUMax)
David Neto22f144c2017-06-12 14:26:21 -04005251 .Case("_Z3maxff", glsl::ExtInst::ExtInstFMax)
5252 .Case("_Z3maxDv2_fS_", glsl::ExtInst::ExtInstFMax)
5253 .Case("_Z3maxDv3_fS_", glsl::ExtInst::ExtInstFMax)
5254 .Case("_Z3maxDv4_fS_", glsl::ExtInst::ExtInstFMax)
5255 .StartsWith("_Z4fmax", glsl::ExtInst::ExtInstFMax)
alan-bakerb39c8262019-03-08 14:03:37 -05005256 .Case("_Z3mincc", glsl::ExtInst::ExtInstSMin)
5257 .Case("_Z3minDv2_cS_", glsl::ExtInst::ExtInstSMin)
5258 .Case("_Z3minDv3_cS_", glsl::ExtInst::ExtInstSMin)
5259 .Case("_Z3minDv4_cS_", glsl::ExtInst::ExtInstSMin)
5260 .Case("_Z3minhh", glsl::ExtInst::ExtInstUMin)
5261 .Case("_Z3minDv2_hS_", glsl::ExtInst::ExtInstUMin)
5262 .Case("_Z3minDv3_hS_", glsl::ExtInst::ExtInstUMin)
5263 .Case("_Z3minDv4_hS_", glsl::ExtInst::ExtInstUMin)
Kévin Petit9e1971c2018-11-09 14:17:18 +00005264 .Case("_Z3minss", glsl::ExtInst::ExtInstSMin)
5265 .Case("_Z3minDv2_sS_", glsl::ExtInst::ExtInstSMin)
5266 .Case("_Z3minDv3_sS_", glsl::ExtInst::ExtInstSMin)
5267 .Case("_Z3minDv4_sS_", glsl::ExtInst::ExtInstSMin)
5268 .Case("_Z3mintt", glsl::ExtInst::ExtInstUMin)
5269 .Case("_Z3minDv2_tS_", glsl::ExtInst::ExtInstUMin)
5270 .Case("_Z3minDv3_tS_", glsl::ExtInst::ExtInstUMin)
5271 .Case("_Z3minDv4_tS_", glsl::ExtInst::ExtInstUMin)
David Neto22f144c2017-06-12 14:26:21 -04005272 .Case("_Z3minii", glsl::ExtInst::ExtInstSMin)
5273 .Case("_Z3minDv2_iS_", glsl::ExtInst::ExtInstSMin)
5274 .Case("_Z3minDv3_iS_", glsl::ExtInst::ExtInstSMin)
5275 .Case("_Z3minDv4_iS_", glsl::ExtInst::ExtInstSMin)
5276 .Case("_Z3minjj", glsl::ExtInst::ExtInstUMin)
5277 .Case("_Z3minDv2_jS_", glsl::ExtInst::ExtInstUMin)
5278 .Case("_Z3minDv3_jS_", glsl::ExtInst::ExtInstUMin)
5279 .Case("_Z3minDv4_jS_", glsl::ExtInst::ExtInstUMin)
Kévin Petit9e1971c2018-11-09 14:17:18 +00005280 .Case("_Z3minll", glsl::ExtInst::ExtInstSMin)
5281 .Case("_Z3minDv2_lS_", glsl::ExtInst::ExtInstSMin)
5282 .Case("_Z3minDv3_lS_", glsl::ExtInst::ExtInstSMin)
5283 .Case("_Z3minDv4_lS_", glsl::ExtInst::ExtInstSMin)
5284 .Case("_Z3minmm", glsl::ExtInst::ExtInstUMin)
5285 .Case("_Z3minDv2_mS_", glsl::ExtInst::ExtInstUMin)
5286 .Case("_Z3minDv3_mS_", glsl::ExtInst::ExtInstUMin)
5287 .Case("_Z3minDv4_mS_", glsl::ExtInst::ExtInstUMin)
David Neto22f144c2017-06-12 14:26:21 -04005288 .Case("_Z3minff", glsl::ExtInst::ExtInstFMin)
5289 .Case("_Z3minDv2_fS_", glsl::ExtInst::ExtInstFMin)
5290 .Case("_Z3minDv3_fS_", glsl::ExtInst::ExtInstFMin)
5291 .Case("_Z3minDv4_fS_", glsl::ExtInst::ExtInstFMin)
5292 .StartsWith("_Z4fmin", glsl::ExtInst::ExtInstFMin)
5293 .StartsWith("_Z7degrees", glsl::ExtInst::ExtInstDegrees)
5294 .StartsWith("_Z7radians", glsl::ExtInst::ExtInstRadians)
5295 .StartsWith("_Z3mix", glsl::ExtInst::ExtInstFMix)
5296 .StartsWith("_Z4acos", glsl::ExtInst::ExtInstAcos)
5297 .StartsWith("_Z5acosh", glsl::ExtInst::ExtInstAcosh)
5298 .StartsWith("_Z4asin", glsl::ExtInst::ExtInstAsin)
5299 .StartsWith("_Z5asinh", glsl::ExtInst::ExtInstAsinh)
5300 .StartsWith("_Z4atan", glsl::ExtInst::ExtInstAtan)
5301 .StartsWith("_Z5atan2", glsl::ExtInst::ExtInstAtan2)
5302 .StartsWith("_Z5atanh", glsl::ExtInst::ExtInstAtanh)
5303 .StartsWith("_Z4ceil", glsl::ExtInst::ExtInstCeil)
5304 .StartsWith("_Z3sin", glsl::ExtInst::ExtInstSin)
5305 .StartsWith("_Z4sinh", glsl::ExtInst::ExtInstSinh)
5306 .StartsWith("_Z8half_sin", glsl::ExtInst::ExtInstSin)
5307 .StartsWith("_Z10native_sin", glsl::ExtInst::ExtInstSin)
5308 .StartsWith("_Z3cos", glsl::ExtInst::ExtInstCos)
5309 .StartsWith("_Z4cosh", glsl::ExtInst::ExtInstCosh)
5310 .StartsWith("_Z8half_cos", glsl::ExtInst::ExtInstCos)
5311 .StartsWith("_Z10native_cos", glsl::ExtInst::ExtInstCos)
5312 .StartsWith("_Z3tan", glsl::ExtInst::ExtInstTan)
5313 .StartsWith("_Z4tanh", glsl::ExtInst::ExtInstTanh)
5314 .StartsWith("_Z8half_tan", glsl::ExtInst::ExtInstTan)
5315 .StartsWith("_Z10native_tan", glsl::ExtInst::ExtInstTan)
5316 .StartsWith("_Z3exp", glsl::ExtInst::ExtInstExp)
5317 .StartsWith("_Z8half_exp", glsl::ExtInst::ExtInstExp)
5318 .StartsWith("_Z10native_exp", glsl::ExtInst::ExtInstExp)
5319 .StartsWith("_Z4exp2", glsl::ExtInst::ExtInstExp2)
5320 .StartsWith("_Z9half_exp2", glsl::ExtInst::ExtInstExp2)
5321 .StartsWith("_Z11native_exp2", glsl::ExtInst::ExtInstExp2)
5322 .StartsWith("_Z3log", glsl::ExtInst::ExtInstLog)
5323 .StartsWith("_Z8half_log", glsl::ExtInst::ExtInstLog)
5324 .StartsWith("_Z10native_log", glsl::ExtInst::ExtInstLog)
5325 .StartsWith("_Z4log2", glsl::ExtInst::ExtInstLog2)
5326 .StartsWith("_Z9half_log2", glsl::ExtInst::ExtInstLog2)
5327 .StartsWith("_Z11native_log2", glsl::ExtInst::ExtInstLog2)
5328 .StartsWith("_Z4fabs", glsl::ExtInst::ExtInstFAbs)
kpet3458e942018-10-03 14:35:21 +01005329 .StartsWith("_Z3fma", glsl::ExtInst::ExtInstFma)
David Neto22f144c2017-06-12 14:26:21 -04005330 .StartsWith("_Z5floor", glsl::ExtInst::ExtInstFloor)
5331 .StartsWith("_Z5ldexp", glsl::ExtInst::ExtInstLdexp)
5332 .StartsWith("_Z3pow", glsl::ExtInst::ExtInstPow)
5333 .StartsWith("_Z4powr", glsl::ExtInst::ExtInstPow)
5334 .StartsWith("_Z9half_powr", glsl::ExtInst::ExtInstPow)
5335 .StartsWith("_Z11native_powr", glsl::ExtInst::ExtInstPow)
5336 .StartsWith("_Z5round", glsl::ExtInst::ExtInstRound)
5337 .StartsWith("_Z4sqrt", glsl::ExtInst::ExtInstSqrt)
5338 .StartsWith("_Z9half_sqrt", glsl::ExtInst::ExtInstSqrt)
5339 .StartsWith("_Z11native_sqrt", glsl::ExtInst::ExtInstSqrt)
5340 .StartsWith("_Z5rsqrt", glsl::ExtInst::ExtInstInverseSqrt)
5341 .StartsWith("_Z10half_rsqrt", glsl::ExtInst::ExtInstInverseSqrt)
5342 .StartsWith("_Z12native_rsqrt", glsl::ExtInst::ExtInstInverseSqrt)
5343 .StartsWith("_Z5trunc", glsl::ExtInst::ExtInstTrunc)
5344 .StartsWith("_Z5frexp", glsl::ExtInst::ExtInstFrexp)
5345 .StartsWith("_Z4sign", glsl::ExtInst::ExtInstFSign)
5346 .StartsWith("_Z6length", glsl::ExtInst::ExtInstLength)
Kévin Petit7d09cec2018-09-22 15:43:38 +01005347 .StartsWith("_Z11fast_length", glsl::ExtInst::ExtInstLength)
David Neto22f144c2017-06-12 14:26:21 -04005348 .StartsWith("_Z8distance", glsl::ExtInst::ExtInstDistance)
Kévin Petit7d09cec2018-09-22 15:43:38 +01005349 .StartsWith("_Z13fast_distance", glsl::ExtInst::ExtInstDistance)
David Netoe9a03512017-10-16 10:08:27 -04005350 .StartsWith("_Z4step", glsl::ExtInst::ExtInstStep)
kpet6fd2a262018-10-03 14:48:01 +01005351 .StartsWith("_Z10smoothstep", glsl::ExtInst::ExtInstSmoothStep)
David Neto22f144c2017-06-12 14:26:21 -04005352 .Case("_Z5crossDv3_fS_", glsl::ExtInst::ExtInstCross)
5353 .StartsWith("_Z9normalize", glsl::ExtInst::ExtInstNormalize)
Kévin Petit7d09cec2018-09-22 15:43:38 +01005354 .StartsWith("_Z14fast_normalize", glsl::ExtInst::ExtInstNormalize)
David Neto22f144c2017-06-12 14:26:21 -04005355 .StartsWith("llvm.fmuladd.", glsl::ExtInst::ExtInstFma)
5356 .Case("spirv.unpack.v2f16", glsl::ExtInst::ExtInstUnpackHalf2x16)
5357 .Case("spirv.pack.v2f16", glsl::ExtInst::ExtInstPackHalf2x16)
David Neto62653202017-10-16 19:05:18 -04005358 .Case("clspv.fract.f", glsl::ExtInst::ExtInstFract)
5359 .Case("clspv.fract.v2f", glsl::ExtInst::ExtInstFract)
5360 .Case("clspv.fract.v3f", glsl::ExtInst::ExtInstFract)
5361 .Case("clspv.fract.v4f", glsl::ExtInst::ExtInstFract)
David Neto3fbb4072017-10-16 11:28:14 -04005362 .Default(kGlslExtInstBad);
5363}
5364
5365glsl::ExtInst SPIRVProducerPass::getIndirectExtInstEnum(StringRef Name) {
5366 // Check indirect cases.
5367 return StringSwitch<glsl::ExtInst>(Name)
5368 .StartsWith("_Z3clz", glsl::ExtInst::ExtInstFindUMsb)
5369 // Use exact match on float arg because these need a multiply
5370 // of a constant of the right floating point type.
5371 .Case("_Z6acospif", glsl::ExtInst::ExtInstAcos)
5372 .Case("_Z6acospiDv2_f", glsl::ExtInst::ExtInstAcos)
5373 .Case("_Z6acospiDv3_f", glsl::ExtInst::ExtInstAcos)
5374 .Case("_Z6acospiDv4_f", glsl::ExtInst::ExtInstAcos)
5375 .Case("_Z6asinpif", glsl::ExtInst::ExtInstAsin)
5376 .Case("_Z6asinpiDv2_f", glsl::ExtInst::ExtInstAsin)
5377 .Case("_Z6asinpiDv3_f", glsl::ExtInst::ExtInstAsin)
5378 .Case("_Z6asinpiDv4_f", glsl::ExtInst::ExtInstAsin)
Kévin Petiteb9f90a2018-09-29 12:29:34 +01005379 .Case("_Z6atanpif", glsl::ExtInst::ExtInstAtan)
5380 .Case("_Z6atanpiDv2_f", glsl::ExtInst::ExtInstAtan)
5381 .Case("_Z6atanpiDv3_f", glsl::ExtInst::ExtInstAtan)
5382 .Case("_Z6atanpiDv4_f", glsl::ExtInst::ExtInstAtan)
David Neto3fbb4072017-10-16 11:28:14 -04005383 .Case("_Z7atan2piff", glsl::ExtInst::ExtInstAtan2)
5384 .Case("_Z7atan2piDv2_fS_", glsl::ExtInst::ExtInstAtan2)
5385 .Case("_Z7atan2piDv3_fS_", glsl::ExtInst::ExtInstAtan2)
5386 .Case("_Z7atan2piDv4_fS_", glsl::ExtInst::ExtInstAtan2)
5387 .Default(kGlslExtInstBad);
5388}
5389
alan-bakerb6b09dc2018-11-08 16:59:28 -05005390glsl::ExtInst
5391SPIRVProducerPass::getDirectOrIndirectExtInstEnum(StringRef Name) {
David Neto3fbb4072017-10-16 11:28:14 -04005392 auto direct = getExtInstEnum(Name);
5393 if (direct != kGlslExtInstBad)
5394 return direct;
5395 return getIndirectExtInstEnum(Name);
David Neto22f144c2017-06-12 14:26:21 -04005396}
5397
David Neto22f144c2017-06-12 14:26:21 -04005398void SPIRVProducerPass::WriteOneWord(uint32_t Word) {
David Neto0676e6f2017-07-11 18:47:44 -04005399 binaryOut->write(reinterpret_cast<const char *>(&Word), sizeof(uint32_t));
David Neto22f144c2017-06-12 14:26:21 -04005400}
5401
5402void SPIRVProducerPass::WriteResultID(SPIRVInstruction *Inst) {
5403 WriteOneWord(Inst->getResultID());
5404}
5405
5406void SPIRVProducerPass::WriteWordCountAndOpcode(SPIRVInstruction *Inst) {
5407 // High 16 bit : Word Count
5408 // Low 16 bit : Opcode
5409 uint32_t Word = Inst->getOpcode();
David Netoee2660d2018-06-28 16:31:29 -04005410 const uint32_t count = Inst->getWordCount();
5411 if (count > 65535) {
5412 errs() << "Word count limit of 65535 exceeded: " << count << "\n";
5413 llvm_unreachable("Word count too high");
5414 }
David Neto22f144c2017-06-12 14:26:21 -04005415 Word |= Inst->getWordCount() << 16;
5416 WriteOneWord(Word);
5417}
5418
5419void SPIRVProducerPass::WriteOperand(SPIRVOperand *Op) {
5420 SPIRVOperandType OpTy = Op->getType();
5421 switch (OpTy) {
5422 default: {
5423 llvm_unreachable("Unsupported SPIRV Operand Type???");
5424 break;
5425 }
5426 case SPIRVOperandType::NUMBERID: {
5427 WriteOneWord(Op->getNumID());
5428 break;
5429 }
5430 case SPIRVOperandType::LITERAL_STRING: {
5431 std::string Str = Op->getLiteralStr();
5432 const char *Data = Str.c_str();
5433 size_t WordSize = Str.size() / 4;
5434 for (unsigned Idx = 0; Idx < WordSize; Idx++) {
5435 WriteOneWord(*reinterpret_cast<const uint32_t *>(&Data[4 * Idx]));
5436 }
5437
5438 uint32_t Remainder = Str.size() % 4;
5439 uint32_t LastWord = 0;
5440 if (Remainder) {
5441 for (unsigned Idx = 0; Idx < Remainder; Idx++) {
5442 LastWord |= Data[4 * WordSize + Idx] << 8 * Idx;
5443 }
5444 }
5445
5446 WriteOneWord(LastWord);
5447 break;
5448 }
5449 case SPIRVOperandType::LITERAL_INTEGER:
5450 case SPIRVOperandType::LITERAL_FLOAT: {
5451 auto LiteralNum = Op->getLiteralNum();
5452 // TODO: Handle LiteranNum carefully.
5453 for (auto Word : LiteralNum) {
5454 WriteOneWord(Word);
5455 }
5456 break;
5457 }
5458 }
5459}
5460
5461void SPIRVProducerPass::WriteSPIRVBinary() {
5462 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
5463
5464 for (auto Inst : SPIRVInstList) {
David Netoc6f3ab22018-04-06 18:02:31 -04005465 SPIRVOperandList Ops{Inst->getOperands()};
David Neto22f144c2017-06-12 14:26:21 -04005466 spv::Op Opcode = static_cast<spv::Op>(Inst->getOpcode());
5467
5468 switch (Opcode) {
5469 default: {
David Neto5c22a252018-03-15 16:07:41 -04005470 errs() << "Unsupported SPIR-V instruction opcode " << int(Opcode) << "\n";
David Neto22f144c2017-06-12 14:26:21 -04005471 llvm_unreachable("Unsupported SPIRV instruction");
5472 break;
5473 }
5474 case spv::OpCapability:
5475 case spv::OpExtension:
5476 case spv::OpMemoryModel:
5477 case spv::OpEntryPoint:
5478 case spv::OpExecutionMode:
5479 case spv::OpSource:
5480 case spv::OpDecorate:
5481 case spv::OpMemberDecorate:
5482 case spv::OpBranch:
5483 case spv::OpBranchConditional:
5484 case spv::OpSelectionMerge:
5485 case spv::OpLoopMerge:
5486 case spv::OpStore:
5487 case spv::OpImageWrite:
5488 case spv::OpReturnValue:
5489 case spv::OpControlBarrier:
5490 case spv::OpMemoryBarrier:
5491 case spv::OpReturn:
5492 case spv::OpFunctionEnd:
5493 case spv::OpCopyMemory: {
5494 WriteWordCountAndOpcode(Inst);
5495 for (uint32_t i = 0; i < Ops.size(); i++) {
5496 WriteOperand(Ops[i]);
5497 }
5498 break;
5499 }
5500 case spv::OpTypeBool:
5501 case spv::OpTypeVoid:
5502 case spv::OpTypeSampler:
5503 case spv::OpLabel:
5504 case spv::OpExtInstImport:
5505 case spv::OpTypePointer:
5506 case spv::OpTypeRuntimeArray:
5507 case spv::OpTypeStruct:
5508 case spv::OpTypeImage:
5509 case spv::OpTypeSampledImage:
5510 case spv::OpTypeInt:
5511 case spv::OpTypeFloat:
5512 case spv::OpTypeArray:
5513 case spv::OpTypeVector:
5514 case spv::OpTypeFunction: {
5515 WriteWordCountAndOpcode(Inst);
5516 WriteResultID(Inst);
5517 for (uint32_t i = 0; i < Ops.size(); i++) {
5518 WriteOperand(Ops[i]);
5519 }
5520 break;
5521 }
5522 case spv::OpFunction:
5523 case spv::OpFunctionParameter:
5524 case spv::OpAccessChain:
5525 case spv::OpPtrAccessChain:
5526 case spv::OpInBoundsAccessChain:
5527 case spv::OpUConvert:
5528 case spv::OpSConvert:
5529 case spv::OpConvertFToU:
5530 case spv::OpConvertFToS:
5531 case spv::OpConvertUToF:
5532 case spv::OpConvertSToF:
5533 case spv::OpFConvert:
5534 case spv::OpConvertPtrToU:
5535 case spv::OpConvertUToPtr:
5536 case spv::OpBitcast:
alan-bakerc9c55ae2019-12-02 16:01:27 -05005537 case spv::OpFNegate:
David Neto22f144c2017-06-12 14:26:21 -04005538 case spv::OpIAdd:
5539 case spv::OpFAdd:
5540 case spv::OpISub:
5541 case spv::OpFSub:
5542 case spv::OpIMul:
5543 case spv::OpFMul:
5544 case spv::OpUDiv:
5545 case spv::OpSDiv:
5546 case spv::OpFDiv:
5547 case spv::OpUMod:
5548 case spv::OpSRem:
5549 case spv::OpFRem:
Kévin Petit8a560882019-03-21 15:24:34 +00005550 case spv::OpUMulExtended:
5551 case spv::OpSMulExtended:
David Neto22f144c2017-06-12 14:26:21 -04005552 case spv::OpBitwiseOr:
5553 case spv::OpBitwiseXor:
5554 case spv::OpBitwiseAnd:
David Netoa394f392017-08-26 20:45:29 -04005555 case spv::OpNot:
David Neto22f144c2017-06-12 14:26:21 -04005556 case spv::OpShiftLeftLogical:
5557 case spv::OpShiftRightLogical:
5558 case spv::OpShiftRightArithmetic:
5559 case spv::OpBitCount:
David Netoab03f432017-11-03 17:00:44 -04005560 case spv::OpCompositeConstruct:
David Neto22f144c2017-06-12 14:26:21 -04005561 case spv::OpCompositeExtract:
5562 case spv::OpVectorExtractDynamic:
5563 case spv::OpCompositeInsert:
David Neto0a2f98d2017-09-15 19:38:40 -04005564 case spv::OpCopyObject:
David Neto22f144c2017-06-12 14:26:21 -04005565 case spv::OpVectorInsertDynamic:
5566 case spv::OpVectorShuffle:
5567 case spv::OpIEqual:
5568 case spv::OpINotEqual:
5569 case spv::OpUGreaterThan:
5570 case spv::OpUGreaterThanEqual:
5571 case spv::OpULessThan:
5572 case spv::OpULessThanEqual:
5573 case spv::OpSGreaterThan:
5574 case spv::OpSGreaterThanEqual:
5575 case spv::OpSLessThan:
5576 case spv::OpSLessThanEqual:
5577 case spv::OpFOrdEqual:
5578 case spv::OpFOrdGreaterThan:
5579 case spv::OpFOrdGreaterThanEqual:
5580 case spv::OpFOrdLessThan:
5581 case spv::OpFOrdLessThanEqual:
5582 case spv::OpFOrdNotEqual:
5583 case spv::OpFUnordEqual:
5584 case spv::OpFUnordGreaterThan:
5585 case spv::OpFUnordGreaterThanEqual:
5586 case spv::OpFUnordLessThan:
5587 case spv::OpFUnordLessThanEqual:
5588 case spv::OpFUnordNotEqual:
5589 case spv::OpExtInst:
5590 case spv::OpIsInf:
5591 case spv::OpIsNan:
5592 case spv::OpAny:
5593 case spv::OpAll:
5594 case spv::OpUndef:
5595 case spv::OpConstantNull:
5596 case spv::OpLogicalOr:
5597 case spv::OpLogicalAnd:
5598 case spv::OpLogicalNot:
5599 case spv::OpLogicalNotEqual:
5600 case spv::OpConstantComposite:
5601 case spv::OpSpecConstantComposite:
5602 case spv::OpConstantTrue:
5603 case spv::OpConstantFalse:
5604 case spv::OpConstant:
5605 case spv::OpSpecConstant:
5606 case spv::OpVariable:
5607 case spv::OpFunctionCall:
5608 case spv::OpSampledImage:
5609 case spv::OpImageSampleExplicitLod:
David Neto5c22a252018-03-15 16:07:41 -04005610 case spv::OpImageQuerySize:
David Neto22f144c2017-06-12 14:26:21 -04005611 case spv::OpSelect:
5612 case spv::OpPhi:
5613 case spv::OpLoad:
5614 case spv::OpAtomicIAdd:
5615 case spv::OpAtomicISub:
5616 case spv::OpAtomicExchange:
5617 case spv::OpAtomicIIncrement:
5618 case spv::OpAtomicIDecrement:
5619 case spv::OpAtomicCompareExchange:
5620 case spv::OpAtomicUMin:
5621 case spv::OpAtomicSMin:
5622 case spv::OpAtomicUMax:
5623 case spv::OpAtomicSMax:
5624 case spv::OpAtomicAnd:
5625 case spv::OpAtomicOr:
5626 case spv::OpAtomicXor:
5627 case spv::OpDot: {
5628 WriteWordCountAndOpcode(Inst);
5629 WriteOperand(Ops[0]);
5630 WriteResultID(Inst);
5631 for (uint32_t i = 1; i < Ops.size(); i++) {
5632 WriteOperand(Ops[i]);
5633 }
5634 break;
5635 }
5636 }
5637 }
5638}
Alan Baker9bf93fb2018-08-28 16:59:26 -04005639
alan-bakerb6b09dc2018-11-08 16:59:28 -05005640bool SPIRVProducerPass::IsTypeNullable(const Type *type) const {
Alan Baker9bf93fb2018-08-28 16:59:26 -04005641 switch (type->getTypeID()) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05005642 case Type::HalfTyID:
5643 case Type::FloatTyID:
5644 case Type::DoubleTyID:
5645 case Type::IntegerTyID:
5646 case Type::VectorTyID:
5647 return true;
5648 case Type::PointerTyID: {
5649 const PointerType *pointer_type = cast<PointerType>(type);
5650 if (pointer_type->getPointerAddressSpace() !=
5651 AddressSpace::UniformConstant) {
5652 auto pointee_type = pointer_type->getPointerElementType();
5653 if (pointee_type->isStructTy() &&
5654 cast<StructType>(pointee_type)->isOpaque()) {
5655 // Images and samplers are not nullable.
5656 return false;
Alan Baker9bf93fb2018-08-28 16:59:26 -04005657 }
Alan Baker9bf93fb2018-08-28 16:59:26 -04005658 }
alan-bakerb6b09dc2018-11-08 16:59:28 -05005659 return true;
5660 }
5661 case Type::ArrayTyID:
5662 return IsTypeNullable(cast<CompositeType>(type)->getTypeAtIndex(0u));
5663 case Type::StructTyID: {
5664 const StructType *struct_type = cast<StructType>(type);
5665 // Images and samplers are not nullable.
5666 if (struct_type->isOpaque())
Alan Baker9bf93fb2018-08-28 16:59:26 -04005667 return false;
alan-bakerb6b09dc2018-11-08 16:59:28 -05005668 for (const auto element : struct_type->elements()) {
5669 if (!IsTypeNullable(element))
5670 return false;
5671 }
5672 return true;
5673 }
5674 default:
5675 return false;
Alan Baker9bf93fb2018-08-28 16:59:26 -04005676 }
5677}
Alan Bakerfcda9482018-10-02 17:09:59 -04005678
5679void SPIRVProducerPass::PopulateUBOTypeMaps(Module &module) {
5680 if (auto *offsets_md =
5681 module.getNamedMetadata(clspv::RemappedTypeOffsetMetadataName())) {
5682 // Metdata is stored as key-value pair operands. The first element of each
5683 // operand is the type and the second is a vector of offsets.
5684 for (const auto *operand : offsets_md->operands()) {
5685 const auto *pair = cast<MDTuple>(operand);
5686 auto *type =
5687 cast<ConstantAsMetadata>(pair->getOperand(0))->getValue()->getType();
5688 const auto *offset_vector = cast<MDTuple>(pair->getOperand(1));
5689 std::vector<uint32_t> offsets;
5690 for (const Metadata *offset_md : offset_vector->operands()) {
5691 const auto *constant_md = cast<ConstantAsMetadata>(offset_md);
alan-bakerb6b09dc2018-11-08 16:59:28 -05005692 offsets.push_back(static_cast<uint32_t>(
5693 cast<ConstantInt>(constant_md->getValue())->getZExtValue()));
Alan Bakerfcda9482018-10-02 17:09:59 -04005694 }
5695 RemappedUBOTypeOffsets.insert(std::make_pair(type, offsets));
5696 }
5697 }
5698
5699 if (auto *sizes_md =
5700 module.getNamedMetadata(clspv::RemappedTypeSizesMetadataName())) {
5701 // Metadata is stored as key-value pair operands. The first element of each
5702 // operand is the type and the second is a triple of sizes: type size in
5703 // bits, store size and alloc size.
5704 for (const auto *operand : sizes_md->operands()) {
5705 const auto *pair = cast<MDTuple>(operand);
5706 auto *type =
5707 cast<ConstantAsMetadata>(pair->getOperand(0))->getValue()->getType();
5708 const auto *size_triple = cast<MDTuple>(pair->getOperand(1));
5709 uint64_t type_size_in_bits =
5710 cast<ConstantInt>(
5711 cast<ConstantAsMetadata>(size_triple->getOperand(0))->getValue())
5712 ->getZExtValue();
5713 uint64_t type_store_size =
5714 cast<ConstantInt>(
5715 cast<ConstantAsMetadata>(size_triple->getOperand(1))->getValue())
5716 ->getZExtValue();
5717 uint64_t type_alloc_size =
5718 cast<ConstantInt>(
5719 cast<ConstantAsMetadata>(size_triple->getOperand(2))->getValue())
5720 ->getZExtValue();
5721 RemappedUBOTypeSizes.insert(std::make_pair(
5722 type, std::make_tuple(type_size_in_bits, type_store_size,
5723 type_alloc_size)));
5724 }
5725 }
5726}
5727
5728uint64_t SPIRVProducerPass::GetTypeSizeInBits(Type *type,
5729 const DataLayout &DL) {
5730 auto iter = RemappedUBOTypeSizes.find(type);
5731 if (iter != RemappedUBOTypeSizes.end()) {
5732 return std::get<0>(iter->second);
5733 }
5734
5735 return DL.getTypeSizeInBits(type);
5736}
5737
5738uint64_t SPIRVProducerPass::GetTypeStoreSize(Type *type, const DataLayout &DL) {
5739 auto iter = RemappedUBOTypeSizes.find(type);
5740 if (iter != RemappedUBOTypeSizes.end()) {
5741 return std::get<1>(iter->second);
5742 }
5743
5744 return DL.getTypeStoreSize(type);
5745}
5746
5747uint64_t SPIRVProducerPass::GetTypeAllocSize(Type *type, const DataLayout &DL) {
5748 auto iter = RemappedUBOTypeSizes.find(type);
5749 if (iter != RemappedUBOTypeSizes.end()) {
5750 return std::get<2>(iter->second);
5751 }
5752
5753 return DL.getTypeAllocSize(type);
5754}
alan-baker5b86ed72019-02-15 08:26:50 -05005755
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04005756void SPIRVProducerPass::setVariablePointersCapabilities(
5757 unsigned address_space) {
alan-baker5b86ed72019-02-15 08:26:50 -05005758 if (GetStorageClass(address_space) == spv::StorageClassStorageBuffer) {
5759 setVariablePointersStorageBuffer(true);
5760 } else {
5761 setVariablePointers(true);
5762 }
5763}
5764
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04005765Value *SPIRVProducerPass::GetBasePointer(Value *v) {
alan-baker5b86ed72019-02-15 08:26:50 -05005766 if (auto *gep = dyn_cast<GetElementPtrInst>(v)) {
5767 return GetBasePointer(gep->getPointerOperand());
5768 }
5769
5770 // Conservatively return |v|.
5771 return v;
5772}
5773
5774bool SPIRVProducerPass::sameResource(Value *lhs, Value *rhs) const {
5775 if (auto *lhs_call = dyn_cast<CallInst>(lhs)) {
5776 if (auto *rhs_call = dyn_cast<CallInst>(rhs)) {
5777 if (lhs_call->getCalledFunction()->getName().startswith(
5778 clspv::ResourceAccessorFunction()) &&
5779 rhs_call->getCalledFunction()->getName().startswith(
5780 clspv::ResourceAccessorFunction())) {
5781 // For resource accessors, match descriptor set and binding.
5782 if (lhs_call->getOperand(0) == rhs_call->getOperand(0) &&
5783 lhs_call->getOperand(1) == rhs_call->getOperand(1))
5784 return true;
5785 } else if (lhs_call->getCalledFunction()->getName().startswith(
5786 clspv::WorkgroupAccessorFunction()) &&
5787 rhs_call->getCalledFunction()->getName().startswith(
5788 clspv::WorkgroupAccessorFunction())) {
5789 // For workgroup resources, match spec id.
5790 if (lhs_call->getOperand(0) == rhs_call->getOperand(0))
5791 return true;
5792 }
5793 }
5794 }
5795
5796 return false;
5797}
5798
5799bool SPIRVProducerPass::selectFromSameObject(Instruction *inst) {
5800 assert(inst->getType()->isPointerTy());
5801 assert(GetStorageClass(inst->getType()->getPointerAddressSpace()) ==
5802 spv::StorageClassStorageBuffer);
5803 const bool hack_undef = clspv::Option::HackUndef();
5804 if (auto *select = dyn_cast<SelectInst>(inst)) {
5805 auto *true_base = GetBasePointer(select->getTrueValue());
5806 auto *false_base = GetBasePointer(select->getFalseValue());
5807
5808 if (true_base == false_base)
5809 return true;
5810
5811 // If either the true or false operand is a null, then we satisfy the same
5812 // object constraint.
5813 if (auto *true_cst = dyn_cast<Constant>(true_base)) {
5814 if (true_cst->isNullValue() || (hack_undef && isa<UndefValue>(true_base)))
5815 return true;
5816 }
5817
5818 if (auto *false_cst = dyn_cast<Constant>(false_base)) {
5819 if (false_cst->isNullValue() ||
5820 (hack_undef && isa<UndefValue>(false_base)))
5821 return true;
5822 }
5823
5824 if (sameResource(true_base, false_base))
5825 return true;
5826 } else if (auto *phi = dyn_cast<PHINode>(inst)) {
5827 Value *value = nullptr;
5828 bool ok = true;
5829 for (unsigned i = 0; ok && i != phi->getNumIncomingValues(); ++i) {
5830 auto *base = GetBasePointer(phi->getIncomingValue(i));
5831 // Null values satisfy the constraint of selecting of selecting from the
5832 // same object.
5833 if (!value) {
5834 if (auto *cst = dyn_cast<Constant>(base)) {
5835 if (!cst->isNullValue() && !(hack_undef && isa<UndefValue>(base)))
5836 value = base;
5837 } else {
5838 value = base;
5839 }
5840 } else if (base != value) {
5841 if (auto *base_cst = dyn_cast<Constant>(base)) {
5842 if (base_cst->isNullValue() || (hack_undef && isa<UndefValue>(base)))
5843 continue;
5844 }
5845
5846 if (sameResource(value, base))
5847 continue;
5848
5849 // Values don't represent the same base.
5850 ok = false;
5851 }
5852 }
5853
5854 return ok;
5855 }
5856
5857 // Conservatively return false.
5858 return false;
5859}
alan-bakere9308012019-03-15 10:25:13 -04005860
5861bool SPIRVProducerPass::CalledWithCoherentResource(Argument &Arg) {
5862 if (!Arg.getType()->isPointerTy() ||
5863 Arg.getType()->getPointerAddressSpace() != clspv::AddressSpace::Global) {
5864 // Only SSBOs need to be annotated as coherent.
5865 return false;
5866 }
5867
5868 DenseSet<Value *> visited;
5869 std::vector<Value *> stack;
5870 for (auto *U : Arg.getParent()->users()) {
5871 if (auto *call = dyn_cast<CallInst>(U)) {
5872 stack.push_back(call->getOperand(Arg.getArgNo()));
5873 }
5874 }
5875
5876 while (!stack.empty()) {
5877 Value *v = stack.back();
5878 stack.pop_back();
5879
5880 if (!visited.insert(v).second)
5881 continue;
5882
5883 auto *resource_call = dyn_cast<CallInst>(v);
5884 if (resource_call &&
5885 resource_call->getCalledFunction()->getName().startswith(
5886 clspv::ResourceAccessorFunction())) {
5887 // If this is a resource accessor function, check if the coherent operand
5888 // is set.
5889 const auto coherent =
5890 unsigned(dyn_cast<ConstantInt>(resource_call->getArgOperand(5))
5891 ->getZExtValue());
5892 if (coherent == 1)
5893 return true;
5894 } else if (auto *arg = dyn_cast<Argument>(v)) {
5895 // If this is a function argument, trace through its callers.
alan-bakere98f3f92019-04-08 15:06:36 -04005896 for (auto U : arg->getParent()->users()) {
alan-bakere9308012019-03-15 10:25:13 -04005897 if (auto *call = dyn_cast<CallInst>(U)) {
5898 stack.push_back(call->getOperand(arg->getArgNo()));
5899 }
5900 }
5901 } else if (auto *user = dyn_cast<User>(v)) {
5902 // If this is a user, traverse all operands that could lead to resource
5903 // variables.
5904 for (unsigned i = 0; i != user->getNumOperands(); ++i) {
5905 Value *operand = user->getOperand(i);
5906 if (operand->getType()->isPointerTy() &&
5907 operand->getType()->getPointerAddressSpace() ==
5908 clspv::AddressSpace::Global) {
5909 stack.push_back(operand);
5910 }
5911 }
5912 }
5913 }
5914
5915 // No coherent resource variables encountered.
5916 return false;
5917}