blob: 7bb2b56f32acad2876f54b254ef3a94c6eaa7e80 [file] [log] [blame]
David Neto22f144c2017-06-12 14:26:21 -04001// Copyright 2017 The Clspv Authors. All rights reserved.
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7// http://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
15#ifdef _MSC_VER
16#pragma warning(push, 0)
17#endif
18
David Neto156783e2017-07-05 15:39:41 -040019#include <cassert>
David Neto257c3892018-04-11 13:19:45 -040020#include <cstring>
David Neto118188e2018-08-24 11:27:54 -040021#include <iomanip>
22#include <list>
David Neto862b7d82018-06-14 18:48:37 -040023#include <memory>
David Neto118188e2018-08-24 11:27:54 -040024#include <set>
25#include <sstream>
26#include <string>
27#include <tuple>
28#include <unordered_set>
29#include <utility>
David Neto862b7d82018-06-14 18:48:37 -040030
David Neto118188e2018-08-24 11:27:54 -040031#include "llvm/ADT/StringSwitch.h"
32#include "llvm/ADT/UniqueVector.h"
33#include "llvm/Analysis/LoopInfo.h"
34#include "llvm/IR/Constants.h"
35#include "llvm/IR/Dominators.h"
36#include "llvm/IR/Instructions.h"
37#include "llvm/IR/Metadata.h"
38#include "llvm/IR/Module.h"
alan-bakerf67468c2019-11-25 15:51:49 -050039#include "llvm/IR/ValueSymbolTable.h"
David Neto118188e2018-08-24 11:27:54 -040040#include "llvm/Pass.h"
41#include "llvm/Support/CommandLine.h"
42#include "llvm/Support/raw_ostream.h"
43#include "llvm/Transforms/Utils/Cloning.h"
David Neto22f144c2017-06-12 14:26:21 -040044
David Neto85082642018-03-24 06:55:20 -070045#include "spirv/1.0/spirv.hpp"
David Neto118188e2018-08-24 11:27:54 -040046
David Neto85082642018-03-24 06:55:20 -070047#include "clspv/AddressSpace.h"
alan-bakerf5e5f692018-11-27 08:33:24 -050048#include "clspv/DescriptorMap.h"
David Neto118188e2018-08-24 11:27:54 -040049#include "clspv/Option.h"
David Neto85082642018-03-24 06:55:20 -070050#include "clspv/spirv_c_strings.hpp"
51#include "clspv/spirv_glsl.hpp"
David Neto22f144c2017-06-12 14:26:21 -040052
David Neto4feb7a42017-10-06 17:29:42 -040053#include "ArgKind.h"
alan-bakerf67468c2019-11-25 15:51:49 -050054#include "Builtins.h"
alan-baker06cad652019-12-03 17:56:47 -050055#include "ComputeStructuredOrder.h"
David Neto85082642018-03-24 06:55:20 -070056#include "ConstantEmitter.h"
Alan Baker202c8c72018-08-13 13:47:44 -040057#include "Constants.h"
David Neto78383442018-06-15 20:31:56 -040058#include "DescriptorCounter.h"
alan-baker56f7aff2019-05-22 08:06:42 -040059#include "NormalizeGlobalVariable.h"
Diego Novilloa4c44fa2019-04-11 10:56:15 -040060#include "Passes.h"
David Neto48f56a42017-10-06 16:44:25 -040061
David Neto22f144c2017-06-12 14:26:21 -040062#if defined(_MSC_VER)
63#pragma warning(pop)
64#endif
65
66using namespace llvm;
67using namespace clspv;
David Neto156783e2017-07-05 15:39:41 -040068using namespace mdconst;
David Neto22f144c2017-06-12 14:26:21 -040069
70namespace {
David Netocd8ca5f2017-10-02 23:34:11 -040071
David Neto862b7d82018-06-14 18:48:37 -040072cl::opt<bool> ShowResourceVars("show-rv", cl::init(false), cl::Hidden,
73 cl::desc("Show resource variable creation"));
74
75// These hacks exist to help transition code generation algorithms
76// without making huge noise in detailed test output.
77const bool Hack_generate_runtime_array_stride_early = true;
78
David Neto3fbb4072017-10-16 11:28:14 -040079// The value of 1/pi. This value is from MSDN
80// https://msdn.microsoft.com/en-us/library/4hwaceh6.aspx
81const double kOneOverPi = 0.318309886183790671538;
82const glsl::ExtInst kGlslExtInstBad = static_cast<glsl::ExtInst>(0);
83
alan-bakerb6b09dc2018-11-08 16:59:28 -050084const char *kCompositeConstructFunctionPrefix = "clspv.composite_construct.";
David Netoab03f432017-11-03 17:00:44 -040085
David Neto22f144c2017-06-12 14:26:21 -040086enum SPIRVOperandType {
87 NUMBERID,
88 LITERAL_INTEGER,
89 LITERAL_STRING,
90 LITERAL_FLOAT
91};
92
93struct SPIRVOperand {
94 explicit SPIRVOperand(SPIRVOperandType Ty, uint32_t Num)
95 : Type(Ty), LiteralNum(1, Num) {}
96 explicit SPIRVOperand(SPIRVOperandType Ty, const char *Str)
97 : Type(Ty), LiteralStr(Str) {}
98 explicit SPIRVOperand(SPIRVOperandType Ty, StringRef Str)
99 : Type(Ty), LiteralStr(Str) {}
100 explicit SPIRVOperand(SPIRVOperandType Ty, ArrayRef<uint32_t> NumVec)
101 : Type(Ty), LiteralNum(NumVec.begin(), NumVec.end()) {}
102
103 SPIRVOperandType getType() { return Type; };
104 uint32_t getNumID() { return LiteralNum[0]; };
105 std::string getLiteralStr() { return LiteralStr; };
106 ArrayRef<uint32_t> getLiteralNum() { return LiteralNum; };
107
David Neto87846742018-04-11 17:36:22 -0400108 uint32_t GetNumWords() const {
109 switch (Type) {
110 case NUMBERID:
111 return 1;
112 case LITERAL_INTEGER:
113 case LITERAL_FLOAT:
David Netoee2660d2018-06-28 16:31:29 -0400114 return uint32_t(LiteralNum.size());
David Neto87846742018-04-11 17:36:22 -0400115 case LITERAL_STRING:
116 // Account for the terminating null character.
David Netoee2660d2018-06-28 16:31:29 -0400117 return uint32_t((LiteralStr.size() + 4) / 4);
David Neto87846742018-04-11 17:36:22 -0400118 }
119 llvm_unreachable("Unhandled case in SPIRVOperand::GetNumWords()");
120 }
121
David Neto22f144c2017-06-12 14:26:21 -0400122private:
123 SPIRVOperandType Type;
124 std::string LiteralStr;
125 SmallVector<uint32_t, 4> LiteralNum;
126};
127
David Netoc6f3ab22018-04-06 18:02:31 -0400128class SPIRVOperandList {
129public:
130 SPIRVOperandList() {}
alan-bakerb6b09dc2018-11-08 16:59:28 -0500131 SPIRVOperandList(const SPIRVOperandList &other) = delete;
132 SPIRVOperandList(SPIRVOperandList &&other) {
David Netoc6f3ab22018-04-06 18:02:31 -0400133 contents_ = std::move(other.contents_);
134 other.contents_.clear();
135 }
136 SPIRVOperandList(ArrayRef<SPIRVOperand *> init)
137 : contents_(init.begin(), init.end()) {}
138 operator ArrayRef<SPIRVOperand *>() { return contents_; }
139 void push_back(SPIRVOperand *op) { contents_.push_back(op); }
alan-bakerb6b09dc2018-11-08 16:59:28 -0500140 void clear() { contents_.clear(); }
David Netoc6f3ab22018-04-06 18:02:31 -0400141 size_t size() const { return contents_.size(); }
142 SPIRVOperand *&operator[](size_t i) { return contents_[i]; }
143
David Neto87846742018-04-11 17:36:22 -0400144 const SmallVector<SPIRVOperand *, 8> &getOperands() const {
145 return contents_;
146 }
147
David Netoc6f3ab22018-04-06 18:02:31 -0400148private:
alan-bakerb6b09dc2018-11-08 16:59:28 -0500149 SmallVector<SPIRVOperand *, 8> contents_;
David Netoc6f3ab22018-04-06 18:02:31 -0400150};
151
152SPIRVOperandList &operator<<(SPIRVOperandList &list, SPIRVOperand *elem) {
153 list.push_back(elem);
154 return list;
155}
156
alan-bakerb6b09dc2018-11-08 16:59:28 -0500157SPIRVOperand *MkNum(uint32_t num) {
David Netoc6f3ab22018-04-06 18:02:31 -0400158 return new SPIRVOperand(LITERAL_INTEGER, num);
159}
alan-bakerb6b09dc2018-11-08 16:59:28 -0500160SPIRVOperand *MkInteger(ArrayRef<uint32_t> num_vec) {
David Neto257c3892018-04-11 13:19:45 -0400161 return new SPIRVOperand(LITERAL_INTEGER, num_vec);
162}
alan-bakerb6b09dc2018-11-08 16:59:28 -0500163SPIRVOperand *MkFloat(ArrayRef<uint32_t> num_vec) {
David Neto257c3892018-04-11 13:19:45 -0400164 return new SPIRVOperand(LITERAL_FLOAT, num_vec);
165}
alan-bakerb6b09dc2018-11-08 16:59:28 -0500166SPIRVOperand *MkId(uint32_t id) { return new SPIRVOperand(NUMBERID, id); }
167SPIRVOperand *MkString(StringRef str) {
David Neto257c3892018-04-11 13:19:45 -0400168 return new SPIRVOperand(LITERAL_STRING, str);
169}
David Netoc6f3ab22018-04-06 18:02:31 -0400170
David Neto22f144c2017-06-12 14:26:21 -0400171struct SPIRVInstruction {
David Neto87846742018-04-11 17:36:22 -0400172 // Create an instruction with an opcode and no result ID, and with the given
173 // operands. This computes its own word count.
174 explicit SPIRVInstruction(spv::Op Opc, ArrayRef<SPIRVOperand *> Ops)
175 : WordCount(1), Opcode(static_cast<uint16_t>(Opc)), ResultID(0),
176 Operands(Ops.begin(), Ops.end()) {
177 for (auto *operand : Ops) {
David Netoee2660d2018-06-28 16:31:29 -0400178 WordCount += uint16_t(operand->GetNumWords());
David Neto87846742018-04-11 17:36:22 -0400179 }
180 }
181 // Create an instruction with an opcode and a no-zero result ID, and
182 // with the given operands. This computes its own word count.
183 explicit SPIRVInstruction(spv::Op Opc, uint32_t ResID,
David Neto22f144c2017-06-12 14:26:21 -0400184 ArrayRef<SPIRVOperand *> Ops)
David Neto87846742018-04-11 17:36:22 -0400185 : WordCount(2), Opcode(static_cast<uint16_t>(Opc)), ResultID(ResID),
186 Operands(Ops.begin(), Ops.end()) {
187 if (ResID == 0) {
188 llvm_unreachable("Result ID of 0 was provided");
189 }
190 for (auto *operand : Ops) {
191 WordCount += operand->GetNumWords();
192 }
193 }
David Neto22f144c2017-06-12 14:26:21 -0400194
David Netoee2660d2018-06-28 16:31:29 -0400195 uint32_t getWordCount() const { return WordCount; }
David Neto22f144c2017-06-12 14:26:21 -0400196 uint16_t getOpcode() const { return Opcode; }
197 uint32_t getResultID() const { return ResultID; }
198 ArrayRef<SPIRVOperand *> getOperands() const { return Operands; }
199
200private:
David Netoee2660d2018-06-28 16:31:29 -0400201 uint32_t WordCount; // Check the 16-bit bound at code generation time.
David Neto22f144c2017-06-12 14:26:21 -0400202 uint16_t Opcode;
203 uint32_t ResultID;
204 SmallVector<SPIRVOperand *, 4> Operands;
205};
206
207struct SPIRVProducerPass final : public ModulePass {
David Neto22f144c2017-06-12 14:26:21 -0400208 typedef DenseMap<Type *, uint32_t> TypeMapType;
209 typedef UniqueVector<Type *> TypeList;
210 typedef DenseMap<Value *, uint32_t> ValueMapType;
David Netofb9a7972017-08-25 17:08:24 -0400211 typedef UniqueVector<Value *> ValueList;
David Neto22f144c2017-06-12 14:26:21 -0400212 typedef std::vector<std::pair<Value *, uint32_t>> EntryPointVecType;
213 typedef std::list<SPIRVInstruction *> SPIRVInstructionList;
David Neto87846742018-04-11 17:36:22 -0400214 // A vector of tuples, each of which is:
215 // - the LLVM instruction that we will later generate SPIR-V code for
216 // - where the SPIR-V instruction should be inserted
217 // - the result ID of the SPIR-V instruction
David Neto22f144c2017-06-12 14:26:21 -0400218 typedef std::vector<
219 std::tuple<Value *, SPIRVInstructionList::iterator, uint32_t>>
220 DeferredInstVecType;
221 typedef DenseMap<FunctionType *, std::pair<FunctionType *, uint32_t>>
222 GlobalConstFuncMapType;
223
David Neto44795152017-07-13 15:45:28 -0400224 explicit SPIRVProducerPass(
alan-bakerf5e5f692018-11-27 08:33:24 -0500225 raw_pwrite_stream &out,
226 std::vector<clspv::version0::DescriptorMapEntry> *descriptor_map_entries,
alan-baker00e7a582019-06-07 12:54:21 -0400227 ArrayRef<std::pair<unsigned, std::string>> samplerMap,
David Neto44795152017-07-13 15:45:28 -0400228 bool outputCInitList)
David Netoc2c368d2017-06-30 16:50:17 -0400229 : ModulePass(ID), samplerMap(samplerMap), out(out),
David Neto0676e6f2017-07-11 18:47:44 -0400230 binaryTempOut(binaryTempUnderlyingVector), binaryOut(&out),
alan-baker00e7a582019-06-07 12:54:21 -0400231 descriptorMapEntries(descriptor_map_entries),
David Neto0676e6f2017-07-11 18:47:44 -0400232 outputCInitList(outputCInitList), patchBoundOffset(0), nextID(1),
alan-baker5b86ed72019-02-15 08:26:50 -0500233 OpExtInstImportID(0), HasVariablePointersStorageBuffer(false),
234 HasVariablePointers(false), SamplerTy(nullptr), WorkgroupSizeValueID(0),
Kévin Petit89a525c2019-06-15 08:13:07 +0100235 WorkgroupSizeVarID(0), max_local_spec_id_(0) {}
David Neto22f144c2017-06-12 14:26:21 -0400236
237 void getAnalysisUsage(AnalysisUsage &AU) const override {
238 AU.addRequired<DominatorTreeWrapperPass>();
239 AU.addRequired<LoopInfoWrapperPass>();
240 }
241
242 virtual bool runOnModule(Module &module) override;
243
244 // output the SPIR-V header block
245 void outputHeader();
246
247 // patch the SPIR-V header block
248 void patchHeader();
249
250 uint32_t lookupType(Type *Ty) {
251 if (Ty->isPointerTy() &&
252 (Ty->getPointerAddressSpace() != AddressSpace::UniformConstant)) {
253 auto PointeeTy = Ty->getPointerElementType();
254 if (PointeeTy->isStructTy() &&
255 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
256 Ty = PointeeTy;
257 }
258 }
259
David Neto862b7d82018-06-14 18:48:37 -0400260 auto where = TypeMap.find(Ty);
261 if (where == TypeMap.end()) {
262 if (Ty) {
263 errs() << "Unhandled type " << *Ty << "\n";
264 } else {
265 errs() << "Unhandled type (null)\n";
266 }
David Netoe439d702018-03-23 13:14:08 -0700267 llvm_unreachable("\nUnhandled type!");
David Neto22f144c2017-06-12 14:26:21 -0400268 }
269
David Neto862b7d82018-06-14 18:48:37 -0400270 return where->second;
David Neto22f144c2017-06-12 14:26:21 -0400271 }
272 TypeMapType &getImageTypeMap() { return ImageTypeMap; }
alan-bakerabd82722019-12-03 17:14:51 -0500273 TypeList &getImageTypeList() { return ImageTypeList; }
David Neto22f144c2017-06-12 14:26:21 -0400274 TypeList &getTypeList() { return Types; };
275 ValueList &getConstantList() { return Constants; };
276 ValueMapType &getValueMap() { return ValueMap; }
277 ValueMapType &getAllocatedValueMap() { return AllocatedValueMap; }
278 SPIRVInstructionList &getSPIRVInstList() { return SPIRVInsts; };
David Neto22f144c2017-06-12 14:26:21 -0400279 EntryPointVecType &getEntryPointVec() { return EntryPointVec; };
280 DeferredInstVecType &getDeferredInstVec() { return DeferredInstVec; };
281 ValueList &getEntryPointInterfacesVec() { return EntryPointInterfacesVec; };
282 uint32_t &getOpExtInstImportID() { return OpExtInstImportID; };
283 std::vector<uint32_t> &getBuiltinDimVec() { return BuiltinDimensionVec; };
alan-baker5b86ed72019-02-15 08:26:50 -0500284 bool hasVariablePointersStorageBuffer() {
285 return HasVariablePointersStorageBuffer;
286 }
287 void setVariablePointersStorageBuffer(bool Val) {
288 HasVariablePointersStorageBuffer = Val;
289 }
Diego Novillo3cc8d7a2019-04-10 13:30:34 -0400290 bool hasVariablePointers() { return HasVariablePointers; };
David Neto22f144c2017-06-12 14:26:21 -0400291 void setVariablePointers(bool Val) { HasVariablePointers = Val; };
alan-bakerb6b09dc2018-11-08 16:59:28 -0500292 ArrayRef<std::pair<unsigned, std::string>> &getSamplerMap() {
293 return samplerMap;
294 }
David Neto22f144c2017-06-12 14:26:21 -0400295 GlobalConstFuncMapType &getGlobalConstFuncTypeMap() {
296 return GlobalConstFuncTypeMap;
297 }
298 SmallPtrSet<Value *, 16> &getGlobalConstArgSet() {
299 return GlobalConstArgumentSet;
300 }
alan-bakerb6b09dc2018-11-08 16:59:28 -0500301 TypeList &getTypesNeedingArrayStride() { return TypesNeedingArrayStride; }
David Neto22f144c2017-06-12 14:26:21 -0400302
David Netoc6f3ab22018-04-06 18:02:31 -0400303 void GenerateLLVMIRInfo(Module &M, const DataLayout &DL);
alan-bakerb6b09dc2018-11-08 16:59:28 -0500304 // Populate GlobalConstFuncTypeMap. Also, if module-scope __constant will
305 // *not* be converted to a storage buffer, replace each such global variable
306 // with one in the storage class expecgted by SPIR-V.
David Neto862b7d82018-06-14 18:48:37 -0400307 void FindGlobalConstVars(Module &M, const DataLayout &DL);
308 // Populate ResourceVarInfoList, FunctionToResourceVarsMap, and
309 // ModuleOrderedResourceVars.
310 void FindResourceVars(Module &M, const DataLayout &DL);
Alan Baker202c8c72018-08-13 13:47:44 -0400311 void FindWorkgroupVars(Module &M);
David Neto22f144c2017-06-12 14:26:21 -0400312 bool FindExtInst(Module &M);
313 void FindTypePerGlobalVar(GlobalVariable &GV);
314 void FindTypePerFunc(Function &F);
David Neto862b7d82018-06-14 18:48:37 -0400315 void FindTypesForSamplerMap(Module &M);
316 void FindTypesForResourceVars(Module &M);
alan-bakerb6b09dc2018-11-08 16:59:28 -0500317 // Inserts |Ty| and relevant sub-types into the |Types| member, indicating
318 // that |Ty| and its subtypes will need a corresponding SPIR-V type.
David Neto22f144c2017-06-12 14:26:21 -0400319 void FindType(Type *Ty);
320 void FindConstantPerGlobalVar(GlobalVariable &GV);
321 void FindConstantPerFunc(Function &F);
322 void FindConstant(Value *V);
323 void GenerateExtInstImport();
David Neto19a1bad2017-08-25 15:01:41 -0400324 // Generates instructions for SPIR-V types corresponding to the LLVM types
325 // saved in the |Types| member. A type follows its subtypes. IDs are
326 // allocated sequentially starting with the current value of nextID, and
327 // with a type following its subtypes. Also updates nextID to just beyond
328 // the last generated ID.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500329 void GenerateSPIRVTypes(LLVMContext &context, Module &module);
David Neto22f144c2017-06-12 14:26:21 -0400330 void GenerateSPIRVConstants();
David Neto5c22a252018-03-15 16:07:41 -0400331 void GenerateModuleInfo(Module &M);
David Neto22f144c2017-06-12 14:26:21 -0400332 void GenerateGlobalVar(GlobalVariable &GV);
David Netoc6f3ab22018-04-06 18:02:31 -0400333 void GenerateWorkgroupVars();
David Neto862b7d82018-06-14 18:48:37 -0400334 // Generate descriptor map entries for resource variables associated with
335 // arguments to F.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500336 void GenerateDescriptorMapInfo(const DataLayout &DL, Function &F);
David Neto22f144c2017-06-12 14:26:21 -0400337 void GenerateSamplers(Module &M);
David Neto862b7d82018-06-14 18:48:37 -0400338 // Generate OpVariables for %clspv.resource.var.* calls.
339 void GenerateResourceVars(Module &M);
David Neto22f144c2017-06-12 14:26:21 -0400340 void GenerateFuncPrologue(Function &F);
341 void GenerateFuncBody(Function &F);
David Netob6e2e062018-04-25 10:32:06 -0400342 void GenerateEntryPointInitialStores();
David Neto22f144c2017-06-12 14:26:21 -0400343 spv::Op GetSPIRVCmpOpcode(CmpInst *CmpI);
344 spv::Op GetSPIRVCastOpcode(Instruction &I);
345 spv::Op GetSPIRVBinaryOpcode(Instruction &I);
346 void GenerateInstruction(Instruction &I);
347 void GenerateFuncEpilogue();
348 void HandleDeferredInstruction();
alan-bakerb6b09dc2018-11-08 16:59:28 -0500349 void HandleDeferredDecorations(const DataLayout &DL);
David Neto22f144c2017-06-12 14:26:21 -0400350 bool is4xi8vec(Type *Ty) const;
351 spv::StorageClass GetStorageClass(unsigned AddrSpace) const;
David Neto862b7d82018-06-14 18:48:37 -0400352 spv::StorageClass GetStorageClassForArgKind(clspv::ArgKind arg_kind) const;
David Neto22f144c2017-06-12 14:26:21 -0400353 spv::BuiltIn GetBuiltin(StringRef globalVarName) const;
David Neto3fbb4072017-10-16 11:28:14 -0400354 // Returns the GLSL extended instruction enum that the given function
355 // call maps to. If none, then returns the 0 value, i.e. GLSLstd4580Bad.
David Neto22f144c2017-06-12 14:26:21 -0400356 glsl::ExtInst getExtInstEnum(StringRef Name);
David Neto3fbb4072017-10-16 11:28:14 -0400357 // Returns the GLSL extended instruction enum indirectly used by the given
358 // function. That is, to implement the given function, we use an extended
359 // instruction plus one more instruction. If none, then returns the 0 value,
360 // i.e. GLSLstd4580Bad.
361 glsl::ExtInst getIndirectExtInstEnum(StringRef Name);
362 // Returns the single GLSL extended instruction used directly or
363 // indirectly by the given function call.
364 glsl::ExtInst getDirectOrIndirectExtInstEnum(StringRef Name);
David Neto22f144c2017-06-12 14:26:21 -0400365 void WriteOneWord(uint32_t Word);
366 void WriteResultID(SPIRVInstruction *Inst);
367 void WriteWordCountAndOpcode(SPIRVInstruction *Inst);
368 void WriteOperand(SPIRVOperand *Op);
369 void WriteSPIRVBinary();
370
Alan Baker9bf93fb2018-08-28 16:59:26 -0400371 // Returns true if |type| is compatible with OpConstantNull.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500372 bool IsTypeNullable(const Type *type) const;
Alan Baker9bf93fb2018-08-28 16:59:26 -0400373
Alan Bakerfcda9482018-10-02 17:09:59 -0400374 // Populate UBO remapped type maps.
375 void PopulateUBOTypeMaps(Module &module);
376
alan-baker06cad652019-12-03 17:56:47 -0500377 // Populate the merge and continue block maps.
378 void PopulateStructuredCFGMaps(Module &module);
379
Alan Bakerfcda9482018-10-02 17:09:59 -0400380 // Wrapped methods of DataLayout accessors. If |type| was remapped for UBOs,
381 // uses the internal map, otherwise it falls back on the data layout.
382 uint64_t GetTypeSizeInBits(Type *type, const DataLayout &DL);
383 uint64_t GetTypeStoreSize(Type *type, const DataLayout &DL);
384 uint64_t GetTypeAllocSize(Type *type, const DataLayout &DL);
385
alan-baker5b86ed72019-02-15 08:26:50 -0500386 // Returns the base pointer of |v|.
387 Value *GetBasePointer(Value *v);
388
389 // Sets |HasVariablePointersStorageBuffer| or |HasVariablePointers| base on
390 // |address_space|.
391 void setVariablePointersCapabilities(unsigned address_space);
392
393 // Returns true if |lhs| and |rhs| represent the same resource or workgroup
394 // variable.
395 bool sameResource(Value *lhs, Value *rhs) const;
396
397 // Returns true if |inst| is phi or select that selects from the same
398 // structure (or null).
399 bool selectFromSameObject(Instruction *inst);
400
alan-bakere9308012019-03-15 10:25:13 -0400401 // Returns true if |Arg| is called with a coherent resource.
402 bool CalledWithCoherentResource(Argument &Arg);
403
David Neto22f144c2017-06-12 14:26:21 -0400404private:
405 static char ID;
David Neto44795152017-07-13 15:45:28 -0400406 ArrayRef<std::pair<unsigned, std::string>> samplerMap;
David Neto22f144c2017-06-12 14:26:21 -0400407 raw_pwrite_stream &out;
David Neto0676e6f2017-07-11 18:47:44 -0400408
409 // TODO(dneto): Wouldn't it be better to always just emit a binary, and then
410 // convert to other formats on demand?
411
412 // When emitting a C initialization list, the WriteSPIRVBinary method
413 // will actually write its words to this vector via binaryTempOut.
414 SmallVector<char, 100> binaryTempUnderlyingVector;
415 raw_svector_ostream binaryTempOut;
416
417 // Binary output writes to this stream, which might be |out| or
418 // |binaryTempOut|. It's the latter when we really want to write a C
419 // initializer list.
Diego Novillo3cc8d7a2019-04-10 13:30:34 -0400420 raw_pwrite_stream *binaryOut;
alan-bakerf5e5f692018-11-27 08:33:24 -0500421 std::vector<version0::DescriptorMapEntry> *descriptorMapEntries;
David Neto0676e6f2017-07-11 18:47:44 -0400422 const bool outputCInitList; // If true, output look like {0x7023, ... , 5}
David Neto22f144c2017-06-12 14:26:21 -0400423 uint64_t patchBoundOffset;
424 uint32_t nextID;
425
alan-bakerf67468c2019-11-25 15:51:49 -0500426 // ID for OpTypeInt 32 1.
427 uint32_t int32ID = 0;
428 // ID for OpTypeVector %int 4.
429 uint32_t v4int32ID = 0;
430
David Neto19a1bad2017-08-25 15:01:41 -0400431 // Maps an LLVM Value pointer to the corresponding SPIR-V Id.
David Neto22f144c2017-06-12 14:26:21 -0400432 TypeMapType TypeMap;
David Neto19a1bad2017-08-25 15:01:41 -0400433 // Maps an LLVM image type to its SPIR-V ID.
David Neto22f144c2017-06-12 14:26:21 -0400434 TypeMapType ImageTypeMap;
alan-bakerabd82722019-12-03 17:14:51 -0500435 // A unique-vector of LLVM image types. This list is used to provide
436 // deterministic traversal of image types.
437 TypeList ImageTypeList;
David Neto19a1bad2017-08-25 15:01:41 -0400438 // A unique-vector of LLVM types that map to a SPIR-V type.
David Neto22f144c2017-06-12 14:26:21 -0400439 TypeList Types;
440 ValueList Constants;
David Neto19a1bad2017-08-25 15:01:41 -0400441 // Maps an LLVM Value pointer to the corresponding SPIR-V Id.
David Neto22f144c2017-06-12 14:26:21 -0400442 ValueMapType ValueMap;
443 ValueMapType AllocatedValueMap;
444 SPIRVInstructionList SPIRVInsts;
David Neto862b7d82018-06-14 18:48:37 -0400445
David Neto22f144c2017-06-12 14:26:21 -0400446 EntryPointVecType EntryPointVec;
447 DeferredInstVecType DeferredInstVec;
448 ValueList EntryPointInterfacesVec;
449 uint32_t OpExtInstImportID;
450 std::vector<uint32_t> BuiltinDimensionVec;
alan-baker5b86ed72019-02-15 08:26:50 -0500451 bool HasVariablePointersStorageBuffer;
David Neto22f144c2017-06-12 14:26:21 -0400452 bool HasVariablePointers;
453 Type *SamplerTy;
alan-bakerb6b09dc2018-11-08 16:59:28 -0500454 DenseMap<unsigned, uint32_t> SamplerMapIndexToIDMap;
David Netoc77d9e22018-03-24 06:30:28 -0700455
456 // If a function F has a pointer-to-__constant parameter, then this variable
David Neto9ed8e2f2018-03-24 06:47:24 -0700457 // will map F's type to (G, index of the parameter), where in a first phase
458 // G is F's type. During FindTypePerFunc, G will be changed to F's type
459 // but replacing the pointer-to-constant parameter with
460 // pointer-to-ModuleScopePrivate.
David Netoc77d9e22018-03-24 06:30:28 -0700461 // TODO(dneto): This doesn't seem general enough? A function might have
462 // more than one such parameter.
David Neto22f144c2017-06-12 14:26:21 -0400463 GlobalConstFuncMapType GlobalConstFuncTypeMap;
464 SmallPtrSet<Value *, 16> GlobalConstArgumentSet;
David Neto1a1a0582017-07-07 12:01:44 -0400465 // An ordered set of pointer types of Base arguments to OpPtrAccessChain,
David Neto85082642018-03-24 06:55:20 -0700466 // or array types, and which point into transparent memory (StorageBuffer
467 // storage class). These will require an ArrayStride decoration.
David Neto1a1a0582017-07-07 12:01:44 -0400468 // See SPV_KHR_variable_pointers rev 13.
David Neto85082642018-03-24 06:55:20 -0700469 TypeList TypesNeedingArrayStride;
David Netoa60b00b2017-09-15 16:34:09 -0400470
471 // This is truly ugly, but works around what look like driver bugs.
472 // For get_local_size, an earlier part of the flow has created a module-scope
473 // variable in Private address space to hold the value for the workgroup
474 // size. Its intializer is a uint3 value marked as builtin WorkgroupSize.
475 // When this is present, save the IDs of the initializer value and variable
476 // in these two variables. We only ever do a vector load from it, and
477 // when we see one of those, substitute just the value of the intializer.
478 // This mimics what Glslang does, and that's what drivers are used to.
David Neto66cfe642018-03-24 06:13:56 -0700479 // TODO(dneto): Remove this once drivers are fixed.
David Netoa60b00b2017-09-15 16:34:09 -0400480 uint32_t WorkgroupSizeValueID;
481 uint32_t WorkgroupSizeVarID;
David Neto26aaf622017-10-23 18:11:53 -0400482
David Neto862b7d82018-06-14 18:48:37 -0400483 // Bookkeeping for mapping kernel arguments to resource variables.
484 struct ResourceVarInfo {
485 ResourceVarInfo(int index_arg, unsigned set_arg, unsigned binding_arg,
alan-bakere9308012019-03-15 10:25:13 -0400486 Function *fn, clspv::ArgKind arg_kind_arg, int coherent_arg)
David Neto862b7d82018-06-14 18:48:37 -0400487 : index(index_arg), descriptor_set(set_arg), binding(binding_arg),
alan-bakere9308012019-03-15 10:25:13 -0400488 var_fn(fn), arg_kind(arg_kind_arg), coherent(coherent_arg),
David Neto862b7d82018-06-14 18:48:37 -0400489 addr_space(fn->getReturnType()->getPointerAddressSpace()) {}
490 const int index; // Index into ResourceVarInfoList
491 const unsigned descriptor_set;
492 const unsigned binding;
493 Function *const var_fn; // The @clspv.resource.var.* function.
494 const clspv::ArgKind arg_kind;
alan-bakere9308012019-03-15 10:25:13 -0400495 const int coherent;
David Neto862b7d82018-06-14 18:48:37 -0400496 const unsigned addr_space; // The LLVM address space
497 // The SPIR-V ID of the OpVariable. Not populated at construction time.
498 uint32_t var_id = 0;
499 };
500 // A list of resource var info. Each one correponds to a module-scope
501 // resource variable we will have to create. Resource var indices are
502 // indices into this vector.
503 SmallVector<std::unique_ptr<ResourceVarInfo>, 8> ResourceVarInfoList;
504 // This is a vector of pointers of all the resource vars, but ordered by
505 // kernel function, and then by argument.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500506 UniqueVector<ResourceVarInfo *> ModuleOrderedResourceVars;
David Neto862b7d82018-06-14 18:48:37 -0400507 // Map a function to the ordered list of resource variables it uses, one for
508 // each argument. If an argument does not use a resource variable, it
509 // will have a null pointer entry.
510 using FunctionToResourceVarsMapType =
511 DenseMap<Function *, SmallVector<ResourceVarInfo *, 8>>;
512 FunctionToResourceVarsMapType FunctionToResourceVarsMap;
513
514 // What LLVM types map to SPIR-V types needing layout? These are the
515 // arrays and structures supporting storage buffers and uniform buffers.
516 TypeList TypesNeedingLayout;
517 // What LLVM struct types map to a SPIR-V struct type with Block decoration?
518 UniqueVector<StructType *> StructTypesNeedingBlock;
519 // For a call that represents a load from an opaque type (samplers, images),
520 // map it to the variable id it should load from.
521 DenseMap<CallInst *, uint32_t> ResourceVarDeferredLoadCalls;
David Neto85082642018-03-24 06:55:20 -0700522
Alan Baker202c8c72018-08-13 13:47:44 -0400523 // One larger than the maximum used SpecId for pointer-to-local arguments.
524 int max_local_spec_id_;
David Netoc6f3ab22018-04-06 18:02:31 -0400525 // An ordered list of the kernel arguments of type pointer-to-local.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500526 using LocalArgList = SmallVector<Argument *, 8>;
David Netoc6f3ab22018-04-06 18:02:31 -0400527 LocalArgList LocalArgs;
528 // Information about a pointer-to-local argument.
529 struct LocalArgInfo {
530 // The SPIR-V ID of the array variable.
531 uint32_t variable_id;
532 // The element type of the
alan-bakerb6b09dc2018-11-08 16:59:28 -0500533 Type *elem_type;
David Netoc6f3ab22018-04-06 18:02:31 -0400534 // The ID of the array type.
535 uint32_t array_size_id;
536 // The ID of the array type.
537 uint32_t array_type_id;
538 // The ID of the pointer to the array type.
539 uint32_t ptr_array_type_id;
David Netoc6f3ab22018-04-06 18:02:31 -0400540 // The specialization constant ID of the array size.
541 int spec_id;
542 };
Alan Baker202c8c72018-08-13 13:47:44 -0400543 // A mapping from Argument to its assigned SpecId.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500544 DenseMap<const Argument *, int> LocalArgSpecIds;
Alan Baker202c8c72018-08-13 13:47:44 -0400545 // A mapping from SpecId to its LocalArgInfo.
546 DenseMap<int, LocalArgInfo> LocalSpecIdInfoMap;
Alan Bakerfcda9482018-10-02 17:09:59 -0400547 // A mapping from a remapped type to its real offsets.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500548 DenseMap<Type *, std::vector<uint32_t>> RemappedUBOTypeOffsets;
Alan Bakerfcda9482018-10-02 17:09:59 -0400549 // A mapping from a remapped type to its real sizes.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500550 DenseMap<Type *, std::tuple<uint64_t, uint64_t, uint64_t>>
551 RemappedUBOTypeSizes;
alan-baker06cad652019-12-03 17:56:47 -0500552
553 // Maps basic block to its merge block.
554 DenseMap<BasicBlock *, BasicBlock *> MergeBlocks;
555 // Maps basic block to its continue block.
556 DenseMap<BasicBlock *, BasicBlock *> ContinueBlocks;
David Neto22f144c2017-06-12 14:26:21 -0400557};
558
559char SPIRVProducerPass::ID;
David Netoc6f3ab22018-04-06 18:02:31 -0400560
alan-bakerb6b09dc2018-11-08 16:59:28 -0500561} // namespace
David Neto22f144c2017-06-12 14:26:21 -0400562
563namespace clspv {
alan-bakerf5e5f692018-11-27 08:33:24 -0500564ModulePass *createSPIRVProducerPass(
565 raw_pwrite_stream &out,
566 std::vector<version0::DescriptorMapEntry> *descriptor_map_entries,
alan-baker00e7a582019-06-07 12:54:21 -0400567 ArrayRef<std::pair<unsigned, std::string>> samplerMap,
alan-bakerf5e5f692018-11-27 08:33:24 -0500568 bool outputCInitList) {
569 return new SPIRVProducerPass(out, descriptor_map_entries, samplerMap,
alan-baker00e7a582019-06-07 12:54:21 -0400570 outputCInitList);
David Neto22f144c2017-06-12 14:26:21 -0400571}
David Netoc2c368d2017-06-30 16:50:17 -0400572} // namespace clspv
David Neto22f144c2017-06-12 14:26:21 -0400573
574bool SPIRVProducerPass::runOnModule(Module &module) {
David Neto0676e6f2017-07-11 18:47:44 -0400575 binaryOut = outputCInitList ? &binaryTempOut : &out;
576
Alan Bakerfcda9482018-10-02 17:09:59 -0400577 PopulateUBOTypeMaps(module);
alan-baker06cad652019-12-03 17:56:47 -0500578 PopulateStructuredCFGMaps(module);
Alan Bakerfcda9482018-10-02 17:09:59 -0400579
David Neto22f144c2017-06-12 14:26:21 -0400580 // SPIR-V always begins with its header information
581 outputHeader();
582
David Netoc6f3ab22018-04-06 18:02:31 -0400583 const DataLayout &DL = module.getDataLayout();
584
David Neto22f144c2017-06-12 14:26:21 -0400585 // Gather information from the LLVM IR that we require.
David Netoc6f3ab22018-04-06 18:02:31 -0400586 GenerateLLVMIRInfo(module, DL);
David Neto22f144c2017-06-12 14:26:21 -0400587
David Neto22f144c2017-06-12 14:26:21 -0400588 // Collect information on global variables too.
589 for (GlobalVariable &GV : module.globals()) {
590 // If the GV is one of our special __spirv_* variables, remove the
591 // initializer as it was only placed there to force LLVM to not throw the
592 // value away.
593 if (GV.getName().startswith("__spirv_")) {
594 GV.setInitializer(nullptr);
595 }
596
597 // Collect types' information from global variable.
598 FindTypePerGlobalVar(GV);
599
600 // Collect constant information from global variable.
601 FindConstantPerGlobalVar(GV);
602
603 // If the variable is an input, entry points need to know about it.
604 if (AddressSpace::Input == GV.getType()->getPointerAddressSpace()) {
David Netofb9a7972017-08-25 17:08:24 -0400605 getEntryPointInterfacesVec().insert(&GV);
David Neto22f144c2017-06-12 14:26:21 -0400606 }
607 }
608
609 // If there are extended instructions, generate OpExtInstImport.
610 if (FindExtInst(module)) {
611 GenerateExtInstImport();
612 }
613
614 // Generate SPIRV instructions for types.
Alan Bakerfcda9482018-10-02 17:09:59 -0400615 GenerateSPIRVTypes(module.getContext(), module);
David Neto22f144c2017-06-12 14:26:21 -0400616
617 // Generate SPIRV constants.
618 GenerateSPIRVConstants();
619
620 // If we have a sampler map, we might have literal samplers to generate.
621 if (0 < getSamplerMap().size()) {
622 GenerateSamplers(module);
623 }
624
625 // Generate SPIRV variables.
626 for (GlobalVariable &GV : module.globals()) {
627 GenerateGlobalVar(GV);
628 }
David Neto862b7d82018-06-14 18:48:37 -0400629 GenerateResourceVars(module);
David Netoc6f3ab22018-04-06 18:02:31 -0400630 GenerateWorkgroupVars();
David Neto22f144c2017-06-12 14:26:21 -0400631
632 // Generate SPIRV instructions for each function.
633 for (Function &F : module) {
634 if (F.isDeclaration()) {
635 continue;
636 }
637
David Neto862b7d82018-06-14 18:48:37 -0400638 GenerateDescriptorMapInfo(DL, F);
639
David Neto22f144c2017-06-12 14:26:21 -0400640 // Generate Function Prologue.
641 GenerateFuncPrologue(F);
642
643 // Generate SPIRV instructions for function body.
644 GenerateFuncBody(F);
645
646 // Generate Function Epilogue.
647 GenerateFuncEpilogue();
648 }
649
650 HandleDeferredInstruction();
David Neto1a1a0582017-07-07 12:01:44 -0400651 HandleDeferredDecorations(DL);
David Neto22f144c2017-06-12 14:26:21 -0400652
653 // Generate SPIRV module information.
David Neto5c22a252018-03-15 16:07:41 -0400654 GenerateModuleInfo(module);
David Neto22f144c2017-06-12 14:26:21 -0400655
alan-baker00e7a582019-06-07 12:54:21 -0400656 WriteSPIRVBinary();
David Neto22f144c2017-06-12 14:26:21 -0400657
658 // We need to patch the SPIR-V header to set bound correctly.
659 patchHeader();
David Neto0676e6f2017-07-11 18:47:44 -0400660
661 if (outputCInitList) {
662 bool first = true;
David Neto0676e6f2017-07-11 18:47:44 -0400663 std::ostringstream os;
664
David Neto57fb0b92017-08-04 15:35:09 -0400665 auto emit_word = [&os, &first](uint32_t word) {
David Neto0676e6f2017-07-11 18:47:44 -0400666 if (!first)
David Neto57fb0b92017-08-04 15:35:09 -0400667 os << ",\n";
668 os << word;
David Neto0676e6f2017-07-11 18:47:44 -0400669 first = false;
670 };
671
672 os << "{";
David Neto57fb0b92017-08-04 15:35:09 -0400673 const std::string str(binaryTempOut.str());
674 for (unsigned i = 0; i < str.size(); i += 4) {
675 const uint32_t a = static_cast<unsigned char>(str[i]);
676 const uint32_t b = static_cast<unsigned char>(str[i + 1]);
677 const uint32_t c = static_cast<unsigned char>(str[i + 2]);
678 const uint32_t d = static_cast<unsigned char>(str[i + 3]);
679 emit_word(a | (b << 8) | (c << 16) | (d << 24));
David Neto0676e6f2017-07-11 18:47:44 -0400680 }
681 os << "}\n";
682 out << os.str();
683 }
684
David Neto22f144c2017-06-12 14:26:21 -0400685 return false;
686}
687
688void SPIRVProducerPass::outputHeader() {
alan-baker00e7a582019-06-07 12:54:21 -0400689 binaryOut->write(reinterpret_cast<const char *>(&spv::MagicNumber),
690 sizeof(spv::MagicNumber));
691 binaryOut->write(reinterpret_cast<const char *>(&spv::Version),
692 sizeof(spv::Version));
David Neto22f144c2017-06-12 14:26:21 -0400693
alan-baker0c18ab02019-06-12 10:23:21 -0400694 // use Google's vendor ID
695 const uint32_t vendor = 21 << 16;
alan-baker00e7a582019-06-07 12:54:21 -0400696 binaryOut->write(reinterpret_cast<const char *>(&vendor), sizeof(vendor));
David Neto22f144c2017-06-12 14:26:21 -0400697
alan-baker00e7a582019-06-07 12:54:21 -0400698 // we record where we need to come back to and patch in the bound value
699 patchBoundOffset = binaryOut->tell();
David Neto22f144c2017-06-12 14:26:21 -0400700
alan-baker00e7a582019-06-07 12:54:21 -0400701 // output a bad bound for now
702 binaryOut->write(reinterpret_cast<const char *>(&nextID), sizeof(nextID));
David Neto22f144c2017-06-12 14:26:21 -0400703
alan-baker00e7a582019-06-07 12:54:21 -0400704 // output the schema (reserved for use and must be 0)
705 const uint32_t schema = 0;
706 binaryOut->write(reinterpret_cast<const char *>(&schema), sizeof(schema));
David Neto22f144c2017-06-12 14:26:21 -0400707}
708
709void SPIRVProducerPass::patchHeader() {
alan-baker00e7a582019-06-07 12:54:21 -0400710 // for a binary we just write the value of nextID over bound
711 binaryOut->pwrite(reinterpret_cast<char *>(&nextID), sizeof(nextID),
712 patchBoundOffset);
David Neto22f144c2017-06-12 14:26:21 -0400713}
714
David Netoc6f3ab22018-04-06 18:02:31 -0400715void SPIRVProducerPass::GenerateLLVMIRInfo(Module &M, const DataLayout &DL) {
David Neto22f144c2017-06-12 14:26:21 -0400716 // This function generates LLVM IR for function such as global variable for
717 // argument, constant and pointer type for argument access. These information
718 // is artificial one because we need Vulkan SPIR-V output. This function is
719 // executed ahead of FindType and FindConstant.
David Neto22f144c2017-06-12 14:26:21 -0400720 LLVMContext &Context = M.getContext();
721
David Neto862b7d82018-06-14 18:48:37 -0400722 FindGlobalConstVars(M, DL);
David Neto5c22a252018-03-15 16:07:41 -0400723
David Neto862b7d82018-06-14 18:48:37 -0400724 FindResourceVars(M, DL);
David Neto22f144c2017-06-12 14:26:21 -0400725
726 bool HasWorkGroupBuiltin = false;
727 for (GlobalVariable &GV : M.globals()) {
728 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
729 if (spv::BuiltInWorkgroupSize == BuiltinType) {
730 HasWorkGroupBuiltin = true;
731 }
732 }
733
David Neto862b7d82018-06-14 18:48:37 -0400734 FindTypesForSamplerMap(M);
735 FindTypesForResourceVars(M);
Alan Baker202c8c72018-08-13 13:47:44 -0400736 FindWorkgroupVars(M);
David Neto22f144c2017-06-12 14:26:21 -0400737
738 for (Function &F : M) {
Kévin Petitabef4522019-03-27 13:08:01 +0000739 if (F.isDeclaration()) {
David Neto22f144c2017-06-12 14:26:21 -0400740 continue;
741 }
742
743 for (BasicBlock &BB : F) {
744 for (Instruction &I : BB) {
745 if (I.getOpcode() == Instruction::ZExt ||
746 I.getOpcode() == Instruction::SExt ||
747 I.getOpcode() == Instruction::UIToFP) {
748 // If there is zext with i1 type, it will be changed to OpSelect. The
749 // OpSelect needs constant 0 and 1 so the constants are added here.
750
751 auto OpTy = I.getOperand(0)->getType();
752
Kévin Petit24272b62018-10-18 19:16:12 +0000753 if (OpTy->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -0400754 if (I.getOpcode() == Instruction::ZExt) {
David Neto22f144c2017-06-12 14:26:21 -0400755 FindConstant(Constant::getNullValue(I.getType()));
Kévin Petit7bfb8992019-02-26 13:45:08 +0000756 FindConstant(ConstantInt::get(I.getType(), 1));
David Neto22f144c2017-06-12 14:26:21 -0400757 } else if (I.getOpcode() == Instruction::SExt) {
David Neto22f144c2017-06-12 14:26:21 -0400758 FindConstant(Constant::getNullValue(I.getType()));
Kévin Petit7bfb8992019-02-26 13:45:08 +0000759 FindConstant(ConstantInt::getSigned(I.getType(), -1));
David Neto22f144c2017-06-12 14:26:21 -0400760 } else {
761 FindConstant(ConstantFP::get(Context, APFloat(0.0f)));
762 FindConstant(ConstantFP::get(Context, APFloat(1.0f)));
763 }
764 }
765 } else if (CallInst *Call = dyn_cast<CallInst>(&I)) {
David Neto862b7d82018-06-14 18:48:37 -0400766 StringRef callee_name = Call->getCalledFunction()->getName();
David Neto22f144c2017-06-12 14:26:21 -0400767
768 // Handle image type specially.
alan-bakerf67468c2019-11-25 15:51:49 -0500769 if (clspv::IsSampledImageRead(callee_name)) {
David Neto22f144c2017-06-12 14:26:21 -0400770 TypeMapType &OpImageTypeMap = getImageTypeMap();
771 Type *ImageTy =
772 Call->getArgOperand(0)->getType()->getPointerElementType();
773 OpImageTypeMap[ImageTy] = 0;
alan-bakerabd82722019-12-03 17:14:51 -0500774 getImageTypeList().insert(ImageTy);
David Neto22f144c2017-06-12 14:26:21 -0400775
alan-bakerf67468c2019-11-25 15:51:49 -0500776 // All sampled reads need a floating point 0 for the Lod operand.
David Neto22f144c2017-06-12 14:26:21 -0400777 FindConstant(ConstantFP::get(Context, APFloat(0.0f)));
778 }
David Neto5c22a252018-03-15 16:07:41 -0400779
alan-bakerf67468c2019-11-25 15:51:49 -0500780 if (clspv::IsGetImageHeight(callee_name) ||
781 clspv::IsGetImageWidth(callee_name)) {
David Neto5c22a252018-03-15 16:07:41 -0400782 FindType(VectorType::get(Type::getInt32Ty(Context), 2));
783 }
David Neto22f144c2017-06-12 14:26:21 -0400784 }
785 }
786 }
787
Kévin Petitabef4522019-03-27 13:08:01 +0000788 // More things to do on kernel functions
789 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
790 if (const MDNode *MD =
791 dyn_cast<Function>(&F)->getMetadata("reqd_work_group_size")) {
792 // We generate constants if the WorkgroupSize builtin is being used.
793 if (HasWorkGroupBuiltin) {
794 // Collect constant information for work group size.
795 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(0)));
796 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(1)));
797 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(2)));
David Neto22f144c2017-06-12 14:26:21 -0400798 }
799 }
800 }
801
alan-bakerf67468c2019-11-25 15:51:49 -0500802 // TODO(alan-baker): make this better.
803 if (M.getTypeByName("opencl.image2d_ro_t.float") ||
804 M.getTypeByName("opencl.image2d_ro_t.float.sampled") ||
805 M.getTypeByName("opencl.image2d_wo_t.float") ||
806 M.getTypeByName("opencl.image3d_ro_t.float") ||
807 M.getTypeByName("opencl.image3d_ro_t.float.sampled") ||
808 M.getTypeByName("opencl.image3d_wo_t.float")) {
809 FindType(Type::getFloatTy(Context));
810 } else if (M.getTypeByName("opencl.image2d_ro_t.uint") ||
811 M.getTypeByName("opencl.image2d_ro_t.uint.sampled") ||
812 M.getTypeByName("opencl.image2d_wo_t.uint") ||
813 M.getTypeByName("opencl.image3d_ro_t.uint") ||
814 M.getTypeByName("opencl.image3d_ro_t.uint.sampled") ||
815 M.getTypeByName("opencl.image3d_wo_t.uint")) {
816 FindType(Type::getInt32Ty(Context));
817 } else if (M.getTypeByName("opencl.image2d_ro_t.int") ||
818 M.getTypeByName("opencl.image2d_ro_t.int.sampled") ||
819 M.getTypeByName("opencl.image2d_wo_t.int") ||
820 M.getTypeByName("opencl.image3d_ro_t.int") ||
821 M.getTypeByName("opencl.image3d_ro_t.int.sampled") ||
822 M.getTypeByName("opencl.image3d_wo_t.int")) {
823 // Nothing for now...
824 } else {
825 // This was likely an UndefValue.
David Neto22f144c2017-06-12 14:26:21 -0400826 FindType(Type::getFloatTy(Context));
827 }
828
829 // Collect types' information from function.
830 FindTypePerFunc(F);
831
832 // Collect constant information from function.
833 FindConstantPerFunc(F);
834 }
835}
836
David Neto862b7d82018-06-14 18:48:37 -0400837void SPIRVProducerPass::FindGlobalConstVars(Module &M, const DataLayout &DL) {
alan-baker56f7aff2019-05-22 08:06:42 -0400838 clspv::NormalizeGlobalVariables(M);
839
David Neto862b7d82018-06-14 18:48:37 -0400840 SmallVector<GlobalVariable *, 8> GVList;
841 SmallVector<GlobalVariable *, 8> DeadGVList;
842 for (GlobalVariable &GV : M.globals()) {
843 if (GV.getType()->getAddressSpace() == AddressSpace::Constant) {
844 if (GV.use_empty()) {
845 DeadGVList.push_back(&GV);
846 } else {
847 GVList.push_back(&GV);
848 }
849 }
850 }
851
852 // Remove dead global __constant variables.
853 for (auto GV : DeadGVList) {
854 GV->eraseFromParent();
855 }
856 DeadGVList.clear();
857
858 if (clspv::Option::ModuleConstantsInStorageBuffer()) {
859 // For now, we only support a single storage buffer.
860 if (GVList.size() > 0) {
861 assert(GVList.size() == 1);
862 const auto *GV = GVList[0];
863 const auto constants_byte_size =
Alan Bakerfcda9482018-10-02 17:09:59 -0400864 (GetTypeSizeInBits(GV->getInitializer()->getType(), DL)) / 8;
David Neto862b7d82018-06-14 18:48:37 -0400865 const size_t kConstantMaxSize = 65536;
866 if (constants_byte_size > kConstantMaxSize) {
867 outs() << "Max __constant capacity of " << kConstantMaxSize
868 << " bytes exceeded: " << constants_byte_size << " bytes used\n";
869 llvm_unreachable("Max __constant capacity exceeded");
870 }
871 }
872 } else {
873 // Change global constant variable's address space to ModuleScopePrivate.
874 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
875 for (auto GV : GVList) {
876 // Create new gv with ModuleScopePrivate address space.
877 Type *NewGVTy = GV->getType()->getPointerElementType();
878 GlobalVariable *NewGV = new GlobalVariable(
879 M, NewGVTy, false, GV->getLinkage(), GV->getInitializer(), "",
880 nullptr, GV->getThreadLocalMode(), AddressSpace::ModuleScopePrivate);
881 NewGV->takeName(GV);
882
883 const SmallVector<User *, 8> GVUsers(GV->user_begin(), GV->user_end());
884 SmallVector<User *, 8> CandidateUsers;
885
886 auto record_called_function_type_as_user =
887 [&GlobalConstFuncTyMap](Value *gv, CallInst *call) {
888 // Find argument index.
889 unsigned index = 0;
890 for (unsigned i = 0; i < call->getNumArgOperands(); i++) {
891 if (gv == call->getOperand(i)) {
892 // TODO(dneto): Should we break here?
893 index = i;
894 }
895 }
896
897 // Record function type with global constant.
898 GlobalConstFuncTyMap[call->getFunctionType()] =
899 std::make_pair(call->getFunctionType(), index);
900 };
901
902 for (User *GVU : GVUsers) {
903 if (CallInst *Call = dyn_cast<CallInst>(GVU)) {
904 record_called_function_type_as_user(GV, Call);
905 } else if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(GVU)) {
906 // Check GEP users.
907 for (User *GEPU : GEP->users()) {
908 if (CallInst *GEPCall = dyn_cast<CallInst>(GEPU)) {
909 record_called_function_type_as_user(GEP, GEPCall);
910 }
911 }
912 }
913
914 CandidateUsers.push_back(GVU);
915 }
916
917 for (User *U : CandidateUsers) {
918 // Update users of gv with new gv.
alan-bakered80f572019-02-11 17:28:26 -0500919 if (!isa<Constant>(U)) {
920 // #254: Can't change operands of a constant, but this shouldn't be
921 // something that sticks around in the module.
922 U->replaceUsesOfWith(GV, NewGV);
923 }
David Neto862b7d82018-06-14 18:48:37 -0400924 }
925
926 // Delete original gv.
927 GV->eraseFromParent();
928 }
929 }
930}
931
Radek Szymanskibe4b0c42018-10-04 22:20:53 +0100932void SPIRVProducerPass::FindResourceVars(Module &M, const DataLayout &) {
David Neto862b7d82018-06-14 18:48:37 -0400933 ResourceVarInfoList.clear();
934 FunctionToResourceVarsMap.clear();
935 ModuleOrderedResourceVars.reset();
936 // Normally, there is one resource variable per clspv.resource.var.*
937 // function, since that is unique'd by arg type and index. By design,
938 // we can share these resource variables across kernels because all
939 // kernels use the same descriptor set.
940 //
941 // But if the user requested distinct descriptor sets per kernel, then
942 // the descriptor allocator has made different (set,binding) pairs for
943 // the same (type,arg_index) pair. Since we can decorate a resource
944 // variable with only exactly one DescriptorSet and Binding, we are
945 // forced in this case to make distinct resource variables whenever
946 // the same clspv.reource.var.X function is seen with disintct
947 // (set,binding) values.
948 const bool always_distinct_sets =
949 clspv::Option::DistinctKernelDescriptorSets();
950 for (Function &F : M) {
951 // Rely on the fact the resource var functions have a stable ordering
952 // in the module.
Alan Baker202c8c72018-08-13 13:47:44 -0400953 if (F.getName().startswith(clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -0400954 // Find all calls to this function with distinct set and binding pairs.
955 // Save them in ResourceVarInfoList.
956
957 // Determine uniqueness of the (set,binding) pairs only withing this
958 // one resource-var builtin function.
959 using SetAndBinding = std::pair<unsigned, unsigned>;
960 // Maps set and binding to the resource var info.
961 DenseMap<SetAndBinding, ResourceVarInfo *> set_and_binding_map;
962 bool first_use = true;
963 for (auto &U : F.uses()) {
964 if (auto *call = dyn_cast<CallInst>(U.getUser())) {
965 const auto set = unsigned(
966 dyn_cast<ConstantInt>(call->getArgOperand(0))->getZExtValue());
967 const auto binding = unsigned(
968 dyn_cast<ConstantInt>(call->getArgOperand(1))->getZExtValue());
969 const auto arg_kind = clspv::ArgKind(
970 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
971 const auto arg_index = unsigned(
972 dyn_cast<ConstantInt>(call->getArgOperand(3))->getZExtValue());
alan-bakere9308012019-03-15 10:25:13 -0400973 const auto coherent = unsigned(
974 dyn_cast<ConstantInt>(call->getArgOperand(5))->getZExtValue());
David Neto862b7d82018-06-14 18:48:37 -0400975
976 // Find or make the resource var info for this combination.
977 ResourceVarInfo *rv = nullptr;
978 if (always_distinct_sets) {
979 // Make a new resource var any time we see a different
980 // (set,binding) pair.
981 SetAndBinding key{set, binding};
982 auto where = set_and_binding_map.find(key);
983 if (where == set_and_binding_map.end()) {
984 rv = new ResourceVarInfo(int(ResourceVarInfoList.size()), set,
alan-bakere9308012019-03-15 10:25:13 -0400985 binding, &F, arg_kind, coherent);
David Neto862b7d82018-06-14 18:48:37 -0400986 ResourceVarInfoList.emplace_back(rv);
987 set_and_binding_map[key] = rv;
988 } else {
989 rv = where->second;
990 }
991 } else {
992 // The default is to make exactly one resource for each
993 // clspv.resource.var.* function.
994 if (first_use) {
995 first_use = false;
996 rv = new ResourceVarInfo(int(ResourceVarInfoList.size()), set,
alan-bakere9308012019-03-15 10:25:13 -0400997 binding, &F, arg_kind, coherent);
David Neto862b7d82018-06-14 18:48:37 -0400998 ResourceVarInfoList.emplace_back(rv);
999 } else {
1000 rv = ResourceVarInfoList.back().get();
1001 }
1002 }
1003
1004 // Now populate FunctionToResourceVarsMap.
1005 auto &mapping =
1006 FunctionToResourceVarsMap[call->getParent()->getParent()];
1007 while (mapping.size() <= arg_index) {
1008 mapping.push_back(nullptr);
1009 }
1010 mapping[arg_index] = rv;
1011 }
1012 }
1013 }
1014 }
1015
1016 // Populate ModuleOrderedResourceVars.
1017 for (Function &F : M) {
1018 auto where = FunctionToResourceVarsMap.find(&F);
1019 if (where != FunctionToResourceVarsMap.end()) {
1020 for (auto &rv : where->second) {
1021 if (rv != nullptr) {
1022 ModuleOrderedResourceVars.insert(rv);
1023 }
1024 }
1025 }
1026 }
1027 if (ShowResourceVars) {
1028 for (auto *info : ModuleOrderedResourceVars) {
1029 outs() << "MORV index " << info->index << " (" << info->descriptor_set
1030 << "," << info->binding << ") " << *(info->var_fn->getReturnType())
1031 << "\n";
1032 }
1033 }
1034}
1035
David Neto22f144c2017-06-12 14:26:21 -04001036bool SPIRVProducerPass::FindExtInst(Module &M) {
1037 LLVMContext &Context = M.getContext();
1038 bool HasExtInst = false;
1039
1040 for (Function &F : M) {
1041 for (BasicBlock &BB : F) {
1042 for (Instruction &I : BB) {
1043 if (CallInst *Call = dyn_cast<CallInst>(&I)) {
1044 Function *Callee = Call->getCalledFunction();
1045 // Check whether this call is for extend instructions.
David Neto3fbb4072017-10-16 11:28:14 -04001046 auto callee_name = Callee->getName();
1047 const glsl::ExtInst EInst = getExtInstEnum(callee_name);
1048 const glsl::ExtInst IndirectEInst =
1049 getIndirectExtInstEnum(callee_name);
David Neto22f144c2017-06-12 14:26:21 -04001050
David Neto3fbb4072017-10-16 11:28:14 -04001051 HasExtInst |=
1052 (EInst != kGlslExtInstBad) || (IndirectEInst != kGlslExtInstBad);
1053
1054 if (IndirectEInst) {
1055 // Register extra constants if needed.
1056
1057 // Registers a type and constant for computing the result of the
1058 // given instruction. If the result of the instruction is a vector,
1059 // then make a splat vector constant with the same number of
1060 // elements.
1061 auto register_constant = [this, &I](Constant *constant) {
1062 FindType(constant->getType());
1063 FindConstant(constant);
1064 if (auto *vectorTy = dyn_cast<VectorType>(I.getType())) {
1065 // Register the splat vector of the value with the same
1066 // width as the result of the instruction.
1067 auto *vec_constant = ConstantVector::getSplat(
1068 static_cast<unsigned>(vectorTy->getNumElements()),
1069 constant);
1070 FindConstant(vec_constant);
1071 FindType(vec_constant->getType());
1072 }
1073 };
1074 switch (IndirectEInst) {
1075 case glsl::ExtInstFindUMsb:
1076 // clz needs OpExtInst and OpISub with constant 31, or splat
1077 // vector of 31. Add it to the constant list here.
1078 register_constant(
1079 ConstantInt::get(Type::getInt32Ty(Context), 31));
1080 break;
1081 case glsl::ExtInstAcos:
1082 case glsl::ExtInstAsin:
Kévin Petiteb9f90a2018-09-29 12:29:34 +01001083 case glsl::ExtInstAtan:
David Neto3fbb4072017-10-16 11:28:14 -04001084 case glsl::ExtInstAtan2:
1085 // We need 1/pi for acospi, asinpi, atan2pi.
1086 register_constant(
1087 ConstantFP::get(Type::getFloatTy(Context), kOneOverPi));
1088 break;
1089 default:
1090 assert(false && "internally inconsistent");
1091 }
David Neto22f144c2017-06-12 14:26:21 -04001092 }
1093 }
1094 }
1095 }
1096 }
1097
1098 return HasExtInst;
1099}
1100
1101void SPIRVProducerPass::FindTypePerGlobalVar(GlobalVariable &GV) {
1102 // Investigate global variable's type.
1103 FindType(GV.getType());
1104}
1105
1106void SPIRVProducerPass::FindTypePerFunc(Function &F) {
1107 // Investigate function's type.
1108 FunctionType *FTy = F.getFunctionType();
1109
1110 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
1111 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
David Neto9ed8e2f2018-03-24 06:47:24 -07001112 // Handle a regular function with global constant parameters.
David Neto22f144c2017-06-12 14:26:21 -04001113 if (GlobalConstFuncTyMap.count(FTy)) {
1114 uint32_t GVCstArgIdx = GlobalConstFuncTypeMap[FTy].second;
1115 SmallVector<Type *, 4> NewFuncParamTys;
1116 for (unsigned i = 0; i < FTy->getNumParams(); i++) {
1117 Type *ParamTy = FTy->getParamType(i);
1118 if (i == GVCstArgIdx) {
1119 Type *EleTy = ParamTy->getPointerElementType();
1120 ParamTy = PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
1121 }
1122
1123 NewFuncParamTys.push_back(ParamTy);
1124 }
1125
1126 FunctionType *NewFTy =
1127 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
1128 GlobalConstFuncTyMap[FTy] = std::make_pair(NewFTy, GVCstArgIdx);
1129 FTy = NewFTy;
1130 }
1131
1132 FindType(FTy);
1133 } else {
1134 // As kernel functions do not have parameters, create new function type and
1135 // add it to type map.
1136 SmallVector<Type *, 4> NewFuncParamTys;
1137 FunctionType *NewFTy =
1138 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
1139 FindType(NewFTy);
1140 }
1141
1142 // Investigate instructions' type in function body.
1143 for (BasicBlock &BB : F) {
1144 for (Instruction &I : BB) {
1145 if (isa<ShuffleVectorInst>(I)) {
1146 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1147 // Ignore type for mask of shuffle vector instruction.
1148 if (i == 2) {
1149 continue;
1150 }
1151
1152 Value *Op = I.getOperand(i);
1153 if (!isa<MetadataAsValue>(Op)) {
1154 FindType(Op->getType());
1155 }
1156 }
1157
1158 FindType(I.getType());
1159 continue;
1160 }
1161
David Neto862b7d82018-06-14 18:48:37 -04001162 CallInst *Call = dyn_cast<CallInst>(&I);
1163
1164 if (Call && Call->getCalledFunction()->getName().startswith(
Alan Baker202c8c72018-08-13 13:47:44 -04001165 clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04001166 // This is a fake call representing access to a resource variable.
1167 // We handle that elsewhere.
1168 continue;
1169 }
1170
Alan Baker202c8c72018-08-13 13:47:44 -04001171 if (Call && Call->getCalledFunction()->getName().startswith(
1172 clspv::WorkgroupAccessorFunction())) {
1173 // This is a fake call representing access to a workgroup variable.
1174 // We handle that elsewhere.
1175 continue;
1176 }
1177
David Neto22f144c2017-06-12 14:26:21 -04001178 // Work through the operands of the instruction.
1179 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1180 Value *const Op = I.getOperand(i);
1181 // If any of the operands is a constant, find the type!
1182 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1183 FindType(Op->getType());
1184 }
1185 }
1186
1187 for (Use &Op : I.operands()) {
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001188 if (isa<CallInst>(&I)) {
David Neto22f144c2017-06-12 14:26:21 -04001189 // Avoid to check call instruction's type.
1190 break;
1191 }
Alan Baker202c8c72018-08-13 13:47:44 -04001192 if (CallInst *OpCall = dyn_cast<CallInst>(Op)) {
1193 if (OpCall && OpCall->getCalledFunction()->getName().startswith(
1194 clspv::WorkgroupAccessorFunction())) {
1195 // This is a fake call representing access to a workgroup variable.
1196 // We handle that elsewhere.
1197 continue;
1198 }
1199 }
David Neto22f144c2017-06-12 14:26:21 -04001200 if (!isa<MetadataAsValue>(&Op)) {
1201 FindType(Op->getType());
1202 continue;
1203 }
1204 }
1205
David Neto22f144c2017-06-12 14:26:21 -04001206 // We don't want to track the type of this call as we are going to replace
1207 // it.
Kévin Petitdf71de32019-04-09 14:09:50 +01001208 if (Call && (clspv::LiteralSamplerFunction() ==
David Neto22f144c2017-06-12 14:26:21 -04001209 Call->getCalledFunction()->getName())) {
1210 continue;
1211 }
1212
1213 if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(&I)) {
1214 // If gep's base operand has ModuleScopePrivate address space, make gep
1215 // return ModuleScopePrivate address space.
1216 if (GEP->getPointerAddressSpace() == AddressSpace::ModuleScopePrivate) {
1217 // Add pointer type with private address space for global constant to
1218 // type list.
1219 Type *EleTy = I.getType()->getPointerElementType();
1220 Type *NewPTy =
1221 PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
1222
1223 FindType(NewPTy);
1224 continue;
1225 }
1226 }
1227
1228 FindType(I.getType());
1229 }
1230 }
1231}
1232
David Neto862b7d82018-06-14 18:48:37 -04001233void SPIRVProducerPass::FindTypesForSamplerMap(Module &M) {
1234 // If we are using a sampler map, find the type of the sampler.
Kévin Petitdf71de32019-04-09 14:09:50 +01001235 if (M.getFunction(clspv::LiteralSamplerFunction()) ||
David Neto862b7d82018-06-14 18:48:37 -04001236 0 < getSamplerMap().size()) {
1237 auto SamplerStructTy = M.getTypeByName("opencl.sampler_t");
1238 if (!SamplerStructTy) {
1239 SamplerStructTy = StructType::create(M.getContext(), "opencl.sampler_t");
1240 }
1241
1242 SamplerTy = SamplerStructTy->getPointerTo(AddressSpace::UniformConstant);
1243
1244 FindType(SamplerTy);
1245 }
1246}
1247
1248void SPIRVProducerPass::FindTypesForResourceVars(Module &M) {
1249 // Record types so they are generated.
1250 TypesNeedingLayout.reset();
1251 StructTypesNeedingBlock.reset();
1252
1253 // To match older clspv codegen, generate the float type first if required
1254 // for images.
1255 for (const auto *info : ModuleOrderedResourceVars) {
1256 if (info->arg_kind == clspv::ArgKind::ReadOnlyImage ||
1257 info->arg_kind == clspv::ArgKind::WriteOnlyImage) {
alan-bakerf67468c2019-11-25 15:51:49 -05001258 if (IsIntImageType(info->var_fn->getReturnType())) {
1259 // Nothing for now...
1260 } else if (IsUintImageType(info->var_fn->getReturnType())) {
1261 FindType(Type::getInt32Ty(M.getContext()));
1262 }
1263
1264 // We need "float" either for the sampled type or for the Lod operand.
David Neto862b7d82018-06-14 18:48:37 -04001265 FindType(Type::getFloatTy(M.getContext()));
David Neto862b7d82018-06-14 18:48:37 -04001266 }
1267 }
1268
1269 for (const auto *info : ModuleOrderedResourceVars) {
1270 Type *type = info->var_fn->getReturnType();
1271
1272 switch (info->arg_kind) {
1273 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04001274 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04001275 if (auto *sty = dyn_cast<StructType>(type->getPointerElementType())) {
1276 StructTypesNeedingBlock.insert(sty);
1277 } else {
1278 errs() << *type << "\n";
1279 llvm_unreachable("Buffer arguments must map to structures!");
1280 }
1281 break;
1282 case clspv::ArgKind::Pod:
1283 if (auto *sty = dyn_cast<StructType>(type->getPointerElementType())) {
1284 StructTypesNeedingBlock.insert(sty);
1285 } else {
1286 errs() << *type << "\n";
1287 llvm_unreachable("POD arguments must map to structures!");
1288 }
1289 break;
1290 case clspv::ArgKind::ReadOnlyImage:
1291 case clspv::ArgKind::WriteOnlyImage:
1292 case clspv::ArgKind::Sampler:
1293 // Sampler and image types map to the pointee type but
1294 // in the uniform constant address space.
1295 type = PointerType::get(type->getPointerElementType(),
1296 clspv::AddressSpace::UniformConstant);
1297 break;
1298 default:
1299 break;
1300 }
1301
1302 // The converted type is the type of the OpVariable we will generate.
1303 // If the pointee type is an array of size zero, FindType will convert it
1304 // to a runtime array.
1305 FindType(type);
1306 }
1307
alan-bakerdcd97412019-09-16 15:32:30 -04001308 // If module constants are clustered in a storage buffer then that struct
1309 // needs layout decorations.
1310 if (clspv::Option::ModuleConstantsInStorageBuffer()) {
1311 for (GlobalVariable &GV : M.globals()) {
1312 PointerType *PTy = cast<PointerType>(GV.getType());
1313 const auto AS = PTy->getAddressSpace();
1314 const bool module_scope_constant_external_init =
1315 (AS == AddressSpace::Constant) && GV.hasInitializer();
1316 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
1317 if (module_scope_constant_external_init &&
1318 spv::BuiltInMax == BuiltinType) {
1319 StructTypesNeedingBlock.insert(
1320 cast<StructType>(PTy->getPointerElementType()));
1321 }
1322 }
1323 }
1324
David Neto862b7d82018-06-14 18:48:37 -04001325 // Traverse the arrays and structures underneath each Block, and
1326 // mark them as needing layout.
1327 std::vector<Type *> work_list(StructTypesNeedingBlock.begin(),
1328 StructTypesNeedingBlock.end());
1329 while (!work_list.empty()) {
1330 Type *type = work_list.back();
1331 work_list.pop_back();
1332 TypesNeedingLayout.insert(type);
1333 switch (type->getTypeID()) {
1334 case Type::ArrayTyID:
1335 work_list.push_back(type->getArrayElementType());
1336 if (!Hack_generate_runtime_array_stride_early) {
1337 // Remember this array type for deferred decoration.
1338 TypesNeedingArrayStride.insert(type);
1339 }
1340 break;
1341 case Type::StructTyID:
1342 for (auto *elem_ty : cast<StructType>(type)->elements()) {
1343 work_list.push_back(elem_ty);
1344 }
1345 default:
1346 // This type and its contained types don't get layout.
1347 break;
1348 }
1349 }
1350}
1351
Alan Baker202c8c72018-08-13 13:47:44 -04001352void SPIRVProducerPass::FindWorkgroupVars(Module &M) {
1353 // The SpecId assignment for pointer-to-local arguments is recorded in
1354 // module-level metadata. Translate that information into local argument
1355 // information.
1356 NamedMDNode *nmd = M.getNamedMetadata(clspv::LocalSpecIdMetadataName());
alan-bakerb6b09dc2018-11-08 16:59:28 -05001357 if (!nmd)
1358 return;
Alan Baker202c8c72018-08-13 13:47:44 -04001359 for (auto operand : nmd->operands()) {
1360 MDTuple *tuple = cast<MDTuple>(operand);
1361 ValueAsMetadata *fn_md = cast<ValueAsMetadata>(tuple->getOperand(0));
1362 Function *func = cast<Function>(fn_md->getValue());
alan-bakerb6b09dc2018-11-08 16:59:28 -05001363 ConstantAsMetadata *arg_index_md =
1364 cast<ConstantAsMetadata>(tuple->getOperand(1));
1365 int arg_index = static_cast<int>(
1366 cast<ConstantInt>(arg_index_md->getValue())->getSExtValue());
1367 Argument *arg = &*(func->arg_begin() + arg_index);
Alan Baker202c8c72018-08-13 13:47:44 -04001368
1369 ConstantAsMetadata *spec_id_md =
1370 cast<ConstantAsMetadata>(tuple->getOperand(2));
alan-bakerb6b09dc2018-11-08 16:59:28 -05001371 int spec_id = static_cast<int>(
1372 cast<ConstantInt>(spec_id_md->getValue())->getSExtValue());
Alan Baker202c8c72018-08-13 13:47:44 -04001373
1374 max_local_spec_id_ = std::max(max_local_spec_id_, spec_id + 1);
1375 LocalArgSpecIds[arg] = spec_id;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001376 if (LocalSpecIdInfoMap.count(spec_id))
1377 continue;
Alan Baker202c8c72018-08-13 13:47:44 -04001378
1379 // We haven't seen this SpecId yet, so generate the LocalArgInfo for it.
1380 LocalArgInfo info{nextID, arg->getType()->getPointerElementType(),
1381 nextID + 1, nextID + 2,
1382 nextID + 3, spec_id};
1383 LocalSpecIdInfoMap[spec_id] = info;
1384 nextID += 4;
1385
1386 // Ensure the types necessary for this argument get generated.
1387 Type *IdxTy = Type::getInt32Ty(M.getContext());
1388 FindConstant(ConstantInt::get(IdxTy, 0));
1389 FindType(IdxTy);
1390 FindType(arg->getType());
1391 }
1392}
1393
David Neto22f144c2017-06-12 14:26:21 -04001394void SPIRVProducerPass::FindType(Type *Ty) {
1395 TypeList &TyList = getTypeList();
1396
1397 if (0 != TyList.idFor(Ty)) {
1398 return;
1399 }
1400
1401 if (Ty->isPointerTy()) {
1402 auto AddrSpace = Ty->getPointerAddressSpace();
1403 if ((AddressSpace::Constant == AddrSpace) ||
1404 (AddressSpace::Global == AddrSpace)) {
1405 auto PointeeTy = Ty->getPointerElementType();
1406
1407 if (PointeeTy->isStructTy() &&
1408 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
1409 FindType(PointeeTy);
1410 auto ActualPointerTy =
1411 PointeeTy->getPointerTo(AddressSpace::UniformConstant);
1412 FindType(ActualPointerTy);
1413 return;
1414 }
1415 }
1416 }
1417
David Neto862b7d82018-06-14 18:48:37 -04001418 // By convention, LLVM array type with 0 elements will map to
1419 // OpTypeRuntimeArray. Otherwise, it will map to OpTypeArray, which
1420 // has a constant number of elements. We need to support type of the
1421 // constant.
1422 if (auto *arrayTy = dyn_cast<ArrayType>(Ty)) {
1423 if (arrayTy->getNumElements() > 0) {
1424 LLVMContext &Context = Ty->getContext();
1425 FindType(Type::getInt32Ty(Context));
1426 }
David Neto22f144c2017-06-12 14:26:21 -04001427 }
1428
1429 for (Type *SubTy : Ty->subtypes()) {
1430 FindType(SubTy);
1431 }
1432
1433 TyList.insert(Ty);
1434}
1435
1436void SPIRVProducerPass::FindConstantPerGlobalVar(GlobalVariable &GV) {
1437 // If the global variable has a (non undef) initializer.
1438 if (GV.hasInitializer() && !isa<UndefValue>(GV.getInitializer())) {
David Neto862b7d82018-06-14 18:48:37 -04001439 // Generate the constant if it's not the initializer to a module scope
1440 // constant that we will expect in a storage buffer.
1441 const bool module_scope_constant_external_init =
1442 (GV.getType()->getPointerAddressSpace() == AddressSpace::Constant) &&
1443 clspv::Option::ModuleConstantsInStorageBuffer();
1444 if (!module_scope_constant_external_init) {
1445 FindConstant(GV.getInitializer());
1446 }
David Neto22f144c2017-06-12 14:26:21 -04001447 }
1448}
1449
1450void SPIRVProducerPass::FindConstantPerFunc(Function &F) {
1451 // Investigate constants in function body.
1452 for (BasicBlock &BB : F) {
1453 for (Instruction &I : BB) {
David Neto862b7d82018-06-14 18:48:37 -04001454 if (auto *call = dyn_cast<CallInst>(&I)) {
1455 auto name = call->getCalledFunction()->getName();
Kévin Petitdf71de32019-04-09 14:09:50 +01001456 if (name == clspv::LiteralSamplerFunction()) {
David Neto862b7d82018-06-14 18:48:37 -04001457 // We've handled these constants elsewhere, so skip it.
1458 continue;
1459 }
Alan Baker202c8c72018-08-13 13:47:44 -04001460 if (name.startswith(clspv::ResourceAccessorFunction())) {
1461 continue;
1462 }
1463 if (name.startswith(clspv::WorkgroupAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04001464 continue;
1465 }
Kévin Petit617a76d2019-04-04 13:54:16 +01001466 if (name.startswith(clspv::SPIRVOpIntrinsicFunction())) {
1467 // Skip the first operand that has the SPIR-V Opcode
1468 for (unsigned i = 1; i < I.getNumOperands(); i++) {
1469 if (isa<Constant>(I.getOperand(i)) &&
1470 !isa<GlobalValue>(I.getOperand(i))) {
1471 FindConstant(I.getOperand(i));
1472 }
1473 }
1474 continue;
1475 }
David Neto22f144c2017-06-12 14:26:21 -04001476 }
1477
1478 if (isa<AllocaInst>(I)) {
1479 // Alloca instruction has constant for the number of element. Ignore it.
1480 continue;
1481 } else if (isa<ShuffleVectorInst>(I)) {
1482 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1483 // Ignore constant for mask of shuffle vector instruction.
1484 if (i == 2) {
1485 continue;
1486 }
1487
1488 if (isa<Constant>(I.getOperand(i)) &&
1489 !isa<GlobalValue>(I.getOperand(i))) {
1490 FindConstant(I.getOperand(i));
1491 }
1492 }
1493
1494 continue;
1495 } else if (isa<InsertElementInst>(I)) {
1496 // Handle InsertElement with <4 x i8> specially.
1497 Type *CompositeTy = I.getOperand(0)->getType();
1498 if (is4xi8vec(CompositeTy)) {
1499 LLVMContext &Context = CompositeTy->getContext();
1500 if (isa<Constant>(I.getOperand(0))) {
1501 FindConstant(I.getOperand(0));
1502 }
1503
1504 if (isa<Constant>(I.getOperand(1))) {
1505 FindConstant(I.getOperand(1));
1506 }
1507
1508 // Add mask constant 0xFF.
1509 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
1510 FindConstant(CstFF);
1511
1512 // Add shift amount constant.
1513 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
1514 uint64_t Idx = CI->getZExtValue();
1515 Constant *CstShiftAmount =
1516 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
1517 FindConstant(CstShiftAmount);
1518 }
1519
1520 continue;
1521 }
1522
1523 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1524 // Ignore constant for index of InsertElement instruction.
1525 if (i == 2) {
1526 continue;
1527 }
1528
1529 if (isa<Constant>(I.getOperand(i)) &&
1530 !isa<GlobalValue>(I.getOperand(i))) {
1531 FindConstant(I.getOperand(i));
1532 }
1533 }
1534
1535 continue;
1536 } else if (isa<ExtractElementInst>(I)) {
1537 // Handle ExtractElement with <4 x i8> specially.
1538 Type *CompositeTy = I.getOperand(0)->getType();
1539 if (is4xi8vec(CompositeTy)) {
1540 LLVMContext &Context = CompositeTy->getContext();
1541 if (isa<Constant>(I.getOperand(0))) {
1542 FindConstant(I.getOperand(0));
1543 }
1544
1545 // Add mask constant 0xFF.
1546 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
1547 FindConstant(CstFF);
1548
1549 // Add shift amount constant.
1550 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
1551 uint64_t Idx = CI->getZExtValue();
1552 Constant *CstShiftAmount =
1553 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
1554 FindConstant(CstShiftAmount);
1555 } else {
1556 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
1557 FindConstant(Cst8);
1558 }
1559
1560 continue;
1561 }
1562
1563 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1564 // Ignore constant for index of ExtractElement instruction.
1565 if (i == 1) {
1566 continue;
1567 }
1568
1569 if (isa<Constant>(I.getOperand(i)) &&
1570 !isa<GlobalValue>(I.getOperand(i))) {
1571 FindConstant(I.getOperand(i));
1572 }
1573 }
1574
1575 continue;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001576 } else if ((Instruction::Xor == I.getOpcode()) &&
1577 I.getType()->isIntegerTy(1)) {
1578 // We special case for Xor where the type is i1 and one of the arguments
1579 // is a constant 1 (true), this is an OpLogicalNot in SPIR-V, and we
1580 // don't need the constant
David Neto22f144c2017-06-12 14:26:21 -04001581 bool foundConstantTrue = false;
1582 for (Use &Op : I.operands()) {
1583 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1584 auto CI = cast<ConstantInt>(Op);
1585
1586 if (CI->isZero() || foundConstantTrue) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05001587 // If we already found the true constant, we might (probably only
1588 // on -O0) have an OpLogicalNot which is taking a constant
1589 // argument, so discover it anyway.
David Neto22f144c2017-06-12 14:26:21 -04001590 FindConstant(Op);
1591 } else {
1592 foundConstantTrue = true;
1593 }
1594 }
1595 }
1596
1597 continue;
David Netod2de94a2017-08-28 17:27:47 -04001598 } else if (isa<TruncInst>(I)) {
alan-bakerb39c8262019-03-08 14:03:37 -05001599 // Special case if i8 is not generally handled.
1600 if (!clspv::Option::Int8Support()) {
1601 // For truncation to i8 we mask against 255.
1602 Type *ToTy = I.getType();
1603 if (8u == ToTy->getPrimitiveSizeInBits()) {
1604 LLVMContext &Context = ToTy->getContext();
1605 Constant *Cst255 =
1606 ConstantInt::get(Type::getInt32Ty(Context), 0xff);
1607 FindConstant(Cst255);
1608 }
David Netod2de94a2017-08-28 17:27:47 -04001609 }
Neil Henning39672102017-09-29 14:33:13 +01001610 } else if (isa<AtomicRMWInst>(I)) {
1611 LLVMContext &Context = I.getContext();
1612
1613 FindConstant(
1614 ConstantInt::get(Type::getInt32Ty(Context), spv::ScopeDevice));
1615 FindConstant(ConstantInt::get(
1616 Type::getInt32Ty(Context),
1617 spv::MemorySemanticsUniformMemoryMask |
1618 spv::MemorySemanticsSequentiallyConsistentMask));
David Neto22f144c2017-06-12 14:26:21 -04001619 }
1620
1621 for (Use &Op : I.operands()) {
1622 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1623 FindConstant(Op);
1624 }
1625 }
1626 }
1627 }
1628}
1629
1630void SPIRVProducerPass::FindConstant(Value *V) {
David Neto22f144c2017-06-12 14:26:21 -04001631 ValueList &CstList = getConstantList();
1632
David Netofb9a7972017-08-25 17:08:24 -04001633 // If V is already tracked, ignore it.
1634 if (0 != CstList.idFor(V)) {
David Neto22f144c2017-06-12 14:26:21 -04001635 return;
1636 }
1637
David Neto862b7d82018-06-14 18:48:37 -04001638 if (isa<GlobalValue>(V) && clspv::Option::ModuleConstantsInStorageBuffer()) {
1639 return;
1640 }
1641
David Neto22f144c2017-06-12 14:26:21 -04001642 Constant *Cst = cast<Constant>(V);
David Neto862b7d82018-06-14 18:48:37 -04001643 Type *CstTy = Cst->getType();
David Neto22f144c2017-06-12 14:26:21 -04001644
1645 // Handle constant with <4 x i8> type specially.
David Neto22f144c2017-06-12 14:26:21 -04001646 if (is4xi8vec(CstTy)) {
1647 if (!isa<GlobalValue>(V)) {
David Netofb9a7972017-08-25 17:08:24 -04001648 CstList.insert(V);
David Neto22f144c2017-06-12 14:26:21 -04001649 }
1650 }
1651
1652 if (Cst->getNumOperands()) {
1653 for (User::const_op_iterator I = Cst->op_begin(), E = Cst->op_end(); I != E;
1654 ++I) {
1655 FindConstant(*I);
1656 }
1657
David Netofb9a7972017-08-25 17:08:24 -04001658 CstList.insert(Cst);
David Neto22f144c2017-06-12 14:26:21 -04001659 return;
1660 } else if (const ConstantDataSequential *CDS =
1661 dyn_cast<ConstantDataSequential>(Cst)) {
1662 // Add constants for each element to constant list.
1663 for (unsigned i = 0; i < CDS->getNumElements(); i++) {
1664 Constant *EleCst = CDS->getElementAsConstant(i);
1665 FindConstant(EleCst);
1666 }
1667 }
1668
1669 if (!isa<GlobalValue>(V)) {
David Netofb9a7972017-08-25 17:08:24 -04001670 CstList.insert(V);
David Neto22f144c2017-06-12 14:26:21 -04001671 }
1672}
1673
1674spv::StorageClass SPIRVProducerPass::GetStorageClass(unsigned AddrSpace) const {
1675 switch (AddrSpace) {
1676 default:
1677 llvm_unreachable("Unsupported OpenCL address space");
1678 case AddressSpace::Private:
1679 return spv::StorageClassFunction;
1680 case AddressSpace::Global:
David Neto22f144c2017-06-12 14:26:21 -04001681 return spv::StorageClassStorageBuffer;
Alan Bakerfcda9482018-10-02 17:09:59 -04001682 case AddressSpace::Constant:
1683 return clspv::Option::ConstantArgsInUniformBuffer()
1684 ? spv::StorageClassUniform
1685 : spv::StorageClassStorageBuffer;
David Neto22f144c2017-06-12 14:26:21 -04001686 case AddressSpace::Input:
1687 return spv::StorageClassInput;
1688 case AddressSpace::Local:
1689 return spv::StorageClassWorkgroup;
1690 case AddressSpace::UniformConstant:
1691 return spv::StorageClassUniformConstant;
David Neto9ed8e2f2018-03-24 06:47:24 -07001692 case AddressSpace::Uniform:
David Netoe439d702018-03-23 13:14:08 -07001693 return spv::StorageClassUniform;
David Neto22f144c2017-06-12 14:26:21 -04001694 case AddressSpace::ModuleScopePrivate:
1695 return spv::StorageClassPrivate;
1696 }
1697}
1698
David Neto862b7d82018-06-14 18:48:37 -04001699spv::StorageClass
1700SPIRVProducerPass::GetStorageClassForArgKind(clspv::ArgKind arg_kind) const {
1701 switch (arg_kind) {
1702 case clspv::ArgKind::Buffer:
1703 return spv::StorageClassStorageBuffer;
Alan Bakerfcda9482018-10-02 17:09:59 -04001704 case clspv::ArgKind::BufferUBO:
1705 return spv::StorageClassUniform;
David Neto862b7d82018-06-14 18:48:37 -04001706 case clspv::ArgKind::Pod:
1707 return clspv::Option::PodArgsInUniformBuffer()
1708 ? spv::StorageClassUniform
1709 : spv::StorageClassStorageBuffer;
1710 case clspv::ArgKind::Local:
1711 return spv::StorageClassWorkgroup;
1712 case clspv::ArgKind::ReadOnlyImage:
1713 case clspv::ArgKind::WriteOnlyImage:
1714 case clspv::ArgKind::Sampler:
1715 return spv::StorageClassUniformConstant;
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001716 default:
1717 llvm_unreachable("Unsupported storage class for argument kind");
David Neto862b7d82018-06-14 18:48:37 -04001718 }
1719}
1720
David Neto22f144c2017-06-12 14:26:21 -04001721spv::BuiltIn SPIRVProducerPass::GetBuiltin(StringRef Name) const {
1722 return StringSwitch<spv::BuiltIn>(Name)
1723 .Case("__spirv_GlobalInvocationId", spv::BuiltInGlobalInvocationId)
1724 .Case("__spirv_LocalInvocationId", spv::BuiltInLocalInvocationId)
1725 .Case("__spirv_WorkgroupSize", spv::BuiltInWorkgroupSize)
1726 .Case("__spirv_NumWorkgroups", spv::BuiltInNumWorkgroups)
1727 .Case("__spirv_WorkgroupId", spv::BuiltInWorkgroupId)
1728 .Default(spv::BuiltInMax);
1729}
1730
1731void SPIRVProducerPass::GenerateExtInstImport() {
1732 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
1733 uint32_t &ExtInstImportID = getOpExtInstImportID();
1734
1735 //
1736 // Generate OpExtInstImport.
1737 //
1738 // Ops[0] ... Ops[n] = Name (Literal String)
David Neto22f144c2017-06-12 14:26:21 -04001739 ExtInstImportID = nextID;
David Neto87846742018-04-11 17:36:22 -04001740 SPIRVInstList.push_back(new SPIRVInstruction(spv::OpExtInstImport, nextID++,
1741 MkString("GLSL.std.450")));
David Neto22f144c2017-06-12 14:26:21 -04001742}
1743
alan-bakerb6b09dc2018-11-08 16:59:28 -05001744void SPIRVProducerPass::GenerateSPIRVTypes(LLVMContext &Context,
1745 Module &module) {
David Neto22f144c2017-06-12 14:26:21 -04001746 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
1747 ValueMapType &VMap = getValueMap();
1748 ValueMapType &AllocatedVMap = getAllocatedValueMap();
Alan Bakerfcda9482018-10-02 17:09:59 -04001749 const auto &DL = module.getDataLayout();
David Neto22f144c2017-06-12 14:26:21 -04001750
1751 // Map for OpTypeRuntimeArray. If argument has pointer type, 2 spirv type
1752 // instructions are generated. They are OpTypePointer and OpTypeRuntimeArray.
1753 DenseMap<Type *, uint32_t> OpRuntimeTyMap;
1754
1755 for (Type *Ty : getTypeList()) {
1756 // Update TypeMap with nextID for reference later.
1757 TypeMap[Ty] = nextID;
1758
1759 switch (Ty->getTypeID()) {
1760 default: {
1761 Ty->print(errs());
1762 llvm_unreachable("Unsupported type???");
1763 break;
1764 }
1765 case Type::MetadataTyID:
1766 case Type::LabelTyID: {
1767 // Ignore these types.
1768 break;
1769 }
1770 case Type::PointerTyID: {
1771 PointerType *PTy = cast<PointerType>(Ty);
1772 unsigned AddrSpace = PTy->getAddressSpace();
1773
1774 // For the purposes of our Vulkan SPIR-V type system, constant and global
1775 // are conflated.
1776 bool UseExistingOpTypePointer = false;
1777 if (AddressSpace::Constant == AddrSpace) {
Alan Bakerfcda9482018-10-02 17:09:59 -04001778 if (!clspv::Option::ConstantArgsInUniformBuffer()) {
1779 AddrSpace = AddressSpace::Global;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001780 // Check to see if we already created this type (for instance, if we
1781 // had a constant <type>* and a global <type>*, the type would be
1782 // created by one of these types, and shared by both).
Alan Bakerfcda9482018-10-02 17:09:59 -04001783 auto GlobalTy = PTy->getPointerElementType()->getPointerTo(AddrSpace);
1784 if (0 < TypeMap.count(GlobalTy)) {
1785 TypeMap[PTy] = TypeMap[GlobalTy];
1786 UseExistingOpTypePointer = true;
1787 break;
1788 }
David Neto22f144c2017-06-12 14:26:21 -04001789 }
1790 } else if (AddressSpace::Global == AddrSpace) {
Alan Bakerfcda9482018-10-02 17:09:59 -04001791 if (!clspv::Option::ConstantArgsInUniformBuffer()) {
1792 AddrSpace = AddressSpace::Constant;
David Neto22f144c2017-06-12 14:26:21 -04001793
alan-bakerb6b09dc2018-11-08 16:59:28 -05001794 // Check to see if we already created this type (for instance, if we
1795 // had a constant <type>* and a global <type>*, the type would be
1796 // created by one of these types, and shared by both).
1797 auto ConstantTy =
1798 PTy->getPointerElementType()->getPointerTo(AddrSpace);
Alan Bakerfcda9482018-10-02 17:09:59 -04001799 if (0 < TypeMap.count(ConstantTy)) {
1800 TypeMap[PTy] = TypeMap[ConstantTy];
1801 UseExistingOpTypePointer = true;
1802 }
David Neto22f144c2017-06-12 14:26:21 -04001803 }
1804 }
1805
David Neto862b7d82018-06-14 18:48:37 -04001806 const bool HasArgUser = true;
David Neto22f144c2017-06-12 14:26:21 -04001807
David Neto862b7d82018-06-14 18:48:37 -04001808 if (HasArgUser && !UseExistingOpTypePointer) {
David Neto22f144c2017-06-12 14:26:21 -04001809 //
1810 // Generate OpTypePointer.
1811 //
1812
1813 // OpTypePointer
1814 // Ops[0] = Storage Class
1815 // Ops[1] = Element Type ID
1816 SPIRVOperandList Ops;
1817
David Neto257c3892018-04-11 13:19:45 -04001818 Ops << MkNum(GetStorageClass(AddrSpace))
1819 << MkId(lookupType(PTy->getElementType()));
David Neto22f144c2017-06-12 14:26:21 -04001820
David Neto87846742018-04-11 17:36:22 -04001821 auto *Inst = new SPIRVInstruction(spv::OpTypePointer, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04001822 SPIRVInstList.push_back(Inst);
1823 }
David Neto22f144c2017-06-12 14:26:21 -04001824 break;
1825 }
1826 case Type::StructTyID: {
David Neto22f144c2017-06-12 14:26:21 -04001827 StructType *STy = cast<StructType>(Ty);
1828
1829 // Handle sampler type.
1830 if (STy->isOpaque()) {
1831 if (STy->getName().equals("opencl.sampler_t")) {
1832 //
1833 // Generate OpTypeSampler
1834 //
1835 // Empty Ops.
1836 SPIRVOperandList Ops;
1837
David Neto87846742018-04-11 17:36:22 -04001838 auto *Inst = new SPIRVInstruction(spv::OpTypeSampler, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04001839 SPIRVInstList.push_back(Inst);
1840 break;
alan-bakerf67468c2019-11-25 15:51:49 -05001841 } else if (STy->getName().startswith("opencl.image2d_ro_t") ||
1842 STy->getName().startswith("opencl.image2d_wo_t") ||
1843 STy->getName().startswith("opencl.image3d_ro_t") ||
1844 STy->getName().startswith("opencl.image3d_wo_t")) {
David Neto22f144c2017-06-12 14:26:21 -04001845 //
1846 // Generate OpTypeImage
1847 //
1848 // Ops[0] = Sampled Type ID
1849 // Ops[1] = Dim ID
1850 // Ops[2] = Depth (Literal Number)
1851 // Ops[3] = Arrayed (Literal Number)
1852 // Ops[4] = MS (Literal Number)
1853 // Ops[5] = Sampled (Literal Number)
1854 // Ops[6] = Image Format ID
1855 //
1856 SPIRVOperandList Ops;
1857
alan-bakerf67468c2019-11-25 15:51:49 -05001858 uint32_t ImageTyID = nextID++;
1859 uint32_t SampledTyID = 0;
1860 if (STy->getName().contains(".float")) {
1861 SampledTyID = lookupType(Type::getFloatTy(Context));
1862 } else if (STy->getName().contains(".uint")) {
1863 SampledTyID = lookupType(Type::getInt32Ty(Context));
1864 } else if (STy->getName().contains(".int")) {
1865 // Generate a signed 32-bit integer if necessary.
1866 if (int32ID == 0) {
1867 int32ID = nextID++;
1868 SPIRVOperandList intOps;
1869 intOps << MkNum(32);
1870 intOps << MkNum(1);
1871 auto signed_int =
1872 new SPIRVInstruction(spv::OpTypeInt, int32ID, intOps);
1873 SPIRVInstList.push_back(signed_int);
1874 }
1875 SampledTyID = int32ID;
1876
1877 // Generate a vec4 of the signed int if necessary.
1878 if (v4int32ID == 0) {
1879 v4int32ID = nextID++;
1880 SPIRVOperandList vecOps;
1881 vecOps << MkId(int32ID);
1882 vecOps << MkNum(4);
1883 auto int_vec =
1884 new SPIRVInstruction(spv::OpTypeVector, v4int32ID, vecOps);
1885 SPIRVInstList.push_back(int_vec);
1886 }
1887 } else {
1888 // This was likely an UndefValue.
1889 SampledTyID = lookupType(Type::getFloatTy(Context));
1890 }
David Neto257c3892018-04-11 13:19:45 -04001891 Ops << MkId(SampledTyID);
David Neto22f144c2017-06-12 14:26:21 -04001892
1893 spv::Dim DimID = spv::Dim2D;
alan-bakerf67468c2019-11-25 15:51:49 -05001894 if (STy->getName().startswith("opencl.image3d_ro_t") ||
1895 STy->getName().startswith("opencl.image3d_wo_t")) {
David Neto22f144c2017-06-12 14:26:21 -04001896 DimID = spv::Dim3D;
1897 }
David Neto257c3892018-04-11 13:19:45 -04001898 Ops << MkNum(DimID);
David Neto22f144c2017-06-12 14:26:21 -04001899
1900 // TODO: Set up Depth.
David Neto257c3892018-04-11 13:19:45 -04001901 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04001902
1903 // TODO: Set up Arrayed.
David Neto257c3892018-04-11 13:19:45 -04001904 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04001905
1906 // TODO: Set up MS.
David Neto257c3892018-04-11 13:19:45 -04001907 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04001908
1909 // TODO: Set up Sampled.
1910 //
1911 // From Spec
1912 //
1913 // 0 indicates this is only known at run time, not at compile time
1914 // 1 indicates will be used with sampler
1915 // 2 indicates will be used without a sampler (a storage image)
1916 uint32_t Sampled = 1;
alan-bakerf67468c2019-11-25 15:51:49 -05001917 if (!STy->getName().contains(".sampled")) {
David Neto22f144c2017-06-12 14:26:21 -04001918 Sampled = 2;
1919 }
David Neto257c3892018-04-11 13:19:45 -04001920 Ops << MkNum(Sampled);
David Neto22f144c2017-06-12 14:26:21 -04001921
1922 // TODO: Set up Image Format.
David Neto257c3892018-04-11 13:19:45 -04001923 Ops << MkNum(spv::ImageFormatUnknown);
David Neto22f144c2017-06-12 14:26:21 -04001924
alan-bakerf67468c2019-11-25 15:51:49 -05001925 auto *Inst = new SPIRVInstruction(spv::OpTypeImage, ImageTyID, Ops);
David Neto22f144c2017-06-12 14:26:21 -04001926 SPIRVInstList.push_back(Inst);
1927 break;
1928 }
1929 }
1930
1931 //
1932 // Generate OpTypeStruct
1933 //
1934 // Ops[0] ... Ops[n] = Member IDs
1935 SPIRVOperandList Ops;
1936
1937 for (auto *EleTy : STy->elements()) {
David Neto862b7d82018-06-14 18:48:37 -04001938 Ops << MkId(lookupType(EleTy));
David Neto22f144c2017-06-12 14:26:21 -04001939 }
1940
David Neto22f144c2017-06-12 14:26:21 -04001941 uint32_t STyID = nextID;
1942
alan-bakerb6b09dc2018-11-08 16:59:28 -05001943 auto *Inst = new SPIRVInstruction(spv::OpTypeStruct, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04001944 SPIRVInstList.push_back(Inst);
1945
1946 // Generate OpMemberDecorate.
1947 auto DecoInsertPoint =
1948 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
1949 [](SPIRVInstruction *Inst) -> bool {
1950 return Inst->getOpcode() != spv::OpDecorate &&
1951 Inst->getOpcode() != spv::OpMemberDecorate &&
1952 Inst->getOpcode() != spv::OpExtInstImport;
1953 });
1954
David Netoc463b372017-08-10 15:32:21 -04001955 const auto StructLayout = DL.getStructLayout(STy);
Alan Bakerfcda9482018-10-02 17:09:59 -04001956 // Search for the correct offsets if this type was remapped.
1957 std::vector<uint32_t> *offsets = nullptr;
1958 auto iter = RemappedUBOTypeOffsets.find(STy);
1959 if (iter != RemappedUBOTypeOffsets.end()) {
1960 offsets = &iter->second;
1961 }
David Netoc463b372017-08-10 15:32:21 -04001962
David Neto862b7d82018-06-14 18:48:37 -04001963 // #error TODO(dneto): Only do this if in TypesNeedingLayout.
David Neto22f144c2017-06-12 14:26:21 -04001964 for (unsigned MemberIdx = 0; MemberIdx < STy->getNumElements();
1965 MemberIdx++) {
1966 // Ops[0] = Structure Type ID
1967 // Ops[1] = Member Index(Literal Number)
1968 // Ops[2] = Decoration (Offset)
1969 // Ops[3] = Byte Offset (Literal Number)
1970 Ops.clear();
1971
David Neto257c3892018-04-11 13:19:45 -04001972 Ops << MkId(STyID) << MkNum(MemberIdx) << MkNum(spv::DecorationOffset);
David Neto22f144c2017-06-12 14:26:21 -04001973
alan-bakerb6b09dc2018-11-08 16:59:28 -05001974 auto ByteOffset =
1975 static_cast<uint32_t>(StructLayout->getElementOffset(MemberIdx));
Alan Bakerfcda9482018-10-02 17:09:59 -04001976 if (offsets) {
1977 ByteOffset = (*offsets)[MemberIdx];
1978 }
alan-bakerb6b09dc2018-11-08 16:59:28 -05001979 // const auto ByteOffset =
Alan Bakerfcda9482018-10-02 17:09:59 -04001980 // uint32_t(StructLayout->getElementOffset(MemberIdx));
David Neto257c3892018-04-11 13:19:45 -04001981 Ops << MkNum(ByteOffset);
David Neto22f144c2017-06-12 14:26:21 -04001982
David Neto87846742018-04-11 17:36:22 -04001983 auto *DecoInst = new SPIRVInstruction(spv::OpMemberDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04001984 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
David Neto22f144c2017-06-12 14:26:21 -04001985 }
1986
1987 // Generate OpDecorate.
David Neto862b7d82018-06-14 18:48:37 -04001988 if (StructTypesNeedingBlock.idFor(STy)) {
1989 Ops.clear();
1990 // Use Block decorations with StorageBuffer storage class.
1991 Ops << MkId(STyID) << MkNum(spv::DecorationBlock);
David Neto22f144c2017-06-12 14:26:21 -04001992
David Neto862b7d82018-06-14 18:48:37 -04001993 auto *DecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
1994 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
David Neto22f144c2017-06-12 14:26:21 -04001995 }
1996 break;
1997 }
1998 case Type::IntegerTyID: {
alan-baker0e64a592019-11-18 13:36:25 -05001999 uint32_t BitWidth = static_cast<uint32_t>(Ty->getPrimitiveSizeInBits());
David Neto22f144c2017-06-12 14:26:21 -04002000
2001 if (BitWidth == 1) {
David Neto87846742018-04-11 17:36:22 -04002002 auto *Inst = new SPIRVInstruction(spv::OpTypeBool, nextID++, {});
David Neto22f144c2017-06-12 14:26:21 -04002003 SPIRVInstList.push_back(Inst);
2004 } else {
alan-bakerb39c8262019-03-08 14:03:37 -05002005 if (!clspv::Option::Int8Support()) {
2006 // i8 is added to TypeMap as i32.
2007 // No matter what LLVM type is requested first, always alias the
2008 // second one's SPIR-V type to be the same as the one we generated
2009 // first.
2010 unsigned aliasToWidth = 0;
2011 if (BitWidth == 8) {
2012 aliasToWidth = 32;
2013 BitWidth = 32;
2014 } else if (BitWidth == 32) {
2015 aliasToWidth = 8;
2016 }
2017 if (aliasToWidth) {
2018 Type *otherType = Type::getIntNTy(Ty->getContext(), aliasToWidth);
2019 auto where = TypeMap.find(otherType);
2020 if (where == TypeMap.end()) {
2021 // Go ahead and make it, but also map the other type to it.
2022 TypeMap[otherType] = nextID;
2023 } else {
2024 // Alias this SPIR-V type the existing type.
2025 TypeMap[Ty] = where->second;
2026 break;
2027 }
David Neto391aeb12017-08-26 15:51:58 -04002028 }
David Neto22f144c2017-06-12 14:26:21 -04002029 }
2030
David Neto257c3892018-04-11 13:19:45 -04002031 SPIRVOperandList Ops;
2032 Ops << MkNum(BitWidth) << MkNum(0 /* not signed */);
David Neto22f144c2017-06-12 14:26:21 -04002033
2034 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002035 new SPIRVInstruction(spv::OpTypeInt, nextID++, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002036 }
2037 break;
2038 }
2039 case Type::HalfTyID:
2040 case Type::FloatTyID:
2041 case Type::DoubleTyID: {
alan-baker0e64a592019-11-18 13:36:25 -05002042 uint32_t BitWidth = static_cast<uint32_t>(Ty->getPrimitiveSizeInBits());
2043 SPIRVOperand *WidthOp =
2044 new SPIRVOperand(SPIRVOperandType::LITERAL_INTEGER, BitWidth);
David Neto22f144c2017-06-12 14:26:21 -04002045
2046 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002047 new SPIRVInstruction(spv::OpTypeFloat, nextID++, WidthOp));
David Neto22f144c2017-06-12 14:26:21 -04002048 break;
2049 }
2050 case Type::ArrayTyID: {
David Neto22f144c2017-06-12 14:26:21 -04002051 ArrayType *ArrTy = cast<ArrayType>(Ty);
David Neto862b7d82018-06-14 18:48:37 -04002052 const uint64_t Length = ArrTy->getArrayNumElements();
2053 if (Length == 0) {
2054 // By convention, map it to a RuntimeArray.
David Neto22f144c2017-06-12 14:26:21 -04002055
David Neto862b7d82018-06-14 18:48:37 -04002056 // Only generate the type once.
2057 // TODO(dneto): Can it ever be generated more than once?
2058 // Doesn't LLVM type uniqueness guarantee we'll only see this
2059 // once?
2060 Type *EleTy = ArrTy->getArrayElementType();
2061 if (OpRuntimeTyMap.count(EleTy) == 0) {
2062 uint32_t OpTypeRuntimeArrayID = nextID;
2063 OpRuntimeTyMap[Ty] = nextID;
David Neto22f144c2017-06-12 14:26:21 -04002064
David Neto862b7d82018-06-14 18:48:37 -04002065 //
2066 // Generate OpTypeRuntimeArray.
2067 //
David Neto22f144c2017-06-12 14:26:21 -04002068
David Neto862b7d82018-06-14 18:48:37 -04002069 // OpTypeRuntimeArray
2070 // Ops[0] = Element Type ID
2071 SPIRVOperandList Ops;
2072 Ops << MkId(lookupType(EleTy));
David Neto22f144c2017-06-12 14:26:21 -04002073
David Neto862b7d82018-06-14 18:48:37 -04002074 SPIRVInstList.push_back(
2075 new SPIRVInstruction(spv::OpTypeRuntimeArray, nextID++, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002076
David Neto862b7d82018-06-14 18:48:37 -04002077 if (Hack_generate_runtime_array_stride_early) {
2078 // Generate OpDecorate.
2079 auto DecoInsertPoint = std::find_if(
2080 SPIRVInstList.begin(), SPIRVInstList.end(),
2081 [](SPIRVInstruction *Inst) -> bool {
2082 return Inst->getOpcode() != spv::OpDecorate &&
2083 Inst->getOpcode() != spv::OpMemberDecorate &&
2084 Inst->getOpcode() != spv::OpExtInstImport;
2085 });
David Neto22f144c2017-06-12 14:26:21 -04002086
David Neto862b7d82018-06-14 18:48:37 -04002087 // Ops[0] = Target ID
2088 // Ops[1] = Decoration (ArrayStride)
2089 // Ops[2] = Stride Number(Literal Number)
2090 Ops.clear();
David Neto85082642018-03-24 06:55:20 -07002091
David Neto862b7d82018-06-14 18:48:37 -04002092 Ops << MkId(OpTypeRuntimeArrayID)
2093 << MkNum(spv::DecorationArrayStride)
Alan Bakerfcda9482018-10-02 17:09:59 -04002094 << MkNum(static_cast<uint32_t>(GetTypeAllocSize(EleTy, DL)));
David Neto22f144c2017-06-12 14:26:21 -04002095
David Neto862b7d82018-06-14 18:48:37 -04002096 auto *DecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
2097 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
2098 }
2099 }
David Neto22f144c2017-06-12 14:26:21 -04002100
David Neto862b7d82018-06-14 18:48:37 -04002101 } else {
David Neto22f144c2017-06-12 14:26:21 -04002102
David Neto862b7d82018-06-14 18:48:37 -04002103 //
2104 // Generate OpConstant and OpTypeArray.
2105 //
2106
2107 //
2108 // Generate OpConstant for array length.
2109 //
2110 // Ops[0] = Result Type ID
2111 // Ops[1] .. Ops[n] = Values LiteralNumber
2112 SPIRVOperandList Ops;
2113
2114 Type *LengthTy = Type::getInt32Ty(Context);
2115 uint32_t ResTyID = lookupType(LengthTy);
2116 Ops << MkId(ResTyID);
2117
2118 assert(Length < UINT32_MAX);
2119 Ops << MkNum(static_cast<uint32_t>(Length));
2120
2121 // Add constant for length to constant list.
2122 Constant *CstLength = ConstantInt::get(LengthTy, Length);
2123 AllocatedVMap[CstLength] = nextID;
2124 VMap[CstLength] = nextID;
2125 uint32_t LengthID = nextID;
2126
2127 auto *CstInst = new SPIRVInstruction(spv::OpConstant, nextID++, Ops);
2128 SPIRVInstList.push_back(CstInst);
2129
2130 // Remember to generate ArrayStride later
2131 getTypesNeedingArrayStride().insert(Ty);
2132
2133 //
2134 // Generate OpTypeArray.
2135 //
2136 // Ops[0] = Element Type ID
2137 // Ops[1] = Array Length Constant ID
2138 Ops.clear();
2139
2140 uint32_t EleTyID = lookupType(ArrTy->getElementType());
2141 Ops << MkId(EleTyID) << MkId(LengthID);
2142
2143 // Update TypeMap with nextID.
2144 TypeMap[Ty] = nextID;
2145
2146 auto *ArrayInst = new SPIRVInstruction(spv::OpTypeArray, nextID++, Ops);
2147 SPIRVInstList.push_back(ArrayInst);
2148 }
David Neto22f144c2017-06-12 14:26:21 -04002149 break;
2150 }
2151 case Type::VectorTyID: {
alan-bakerb39c8262019-03-08 14:03:37 -05002152 // <4 x i8> is changed to i32 if i8 is not generally supported.
2153 if (!clspv::Option::Int8Support() &&
2154 Ty->getVectorElementType() == Type::getInt8Ty(Context)) {
David Neto22f144c2017-06-12 14:26:21 -04002155 if (Ty->getVectorNumElements() == 4) {
2156 TypeMap[Ty] = lookupType(Ty->getVectorElementType());
2157 break;
2158 } else {
2159 Ty->print(errs());
2160 llvm_unreachable("Support above i8 vector type");
2161 }
2162 }
2163
2164 // Ops[0] = Component Type ID
2165 // Ops[1] = Component Count (Literal Number)
David Neto257c3892018-04-11 13:19:45 -04002166 SPIRVOperandList Ops;
2167 Ops << MkId(lookupType(Ty->getVectorElementType()))
2168 << MkNum(Ty->getVectorNumElements());
David Neto22f144c2017-06-12 14:26:21 -04002169
alan-bakerb6b09dc2018-11-08 16:59:28 -05002170 SPIRVInstruction *inst =
2171 new SPIRVInstruction(spv::OpTypeVector, nextID++, Ops);
David Netoc6f3ab22018-04-06 18:02:31 -04002172 SPIRVInstList.push_back(inst);
David Neto22f144c2017-06-12 14:26:21 -04002173 break;
2174 }
2175 case Type::VoidTyID: {
David Neto87846742018-04-11 17:36:22 -04002176 auto *Inst = new SPIRVInstruction(spv::OpTypeVoid, nextID++, {});
David Neto22f144c2017-06-12 14:26:21 -04002177 SPIRVInstList.push_back(Inst);
2178 break;
2179 }
2180 case Type::FunctionTyID: {
2181 // Generate SPIRV instruction for function type.
2182 FunctionType *FTy = cast<FunctionType>(Ty);
2183
2184 // Ops[0] = Return Type ID
2185 // Ops[1] ... Ops[n] = Parameter Type IDs
2186 SPIRVOperandList Ops;
2187
2188 // Find SPIRV instruction for return type
David Netoc6f3ab22018-04-06 18:02:31 -04002189 Ops << MkId(lookupType(FTy->getReturnType()));
David Neto22f144c2017-06-12 14:26:21 -04002190
2191 // Find SPIRV instructions for parameter types
2192 for (unsigned k = 0; k < FTy->getNumParams(); k++) {
2193 // Find SPIRV instruction for parameter type.
2194 auto ParamTy = FTy->getParamType(k);
2195 if (ParamTy->isPointerTy()) {
2196 auto PointeeTy = ParamTy->getPointerElementType();
2197 if (PointeeTy->isStructTy() &&
2198 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
2199 ParamTy = PointeeTy;
2200 }
2201 }
2202
David Netoc6f3ab22018-04-06 18:02:31 -04002203 Ops << MkId(lookupType(ParamTy));
David Neto22f144c2017-06-12 14:26:21 -04002204 }
2205
David Neto87846742018-04-11 17:36:22 -04002206 auto *Inst = new SPIRVInstruction(spv::OpTypeFunction, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002207 SPIRVInstList.push_back(Inst);
2208 break;
2209 }
2210 }
2211 }
2212
2213 // Generate OpTypeSampledImage.
alan-bakerabd82722019-12-03 17:14:51 -05002214 for (auto &ImgTy : getImageTypeList()) {
David Neto22f144c2017-06-12 14:26:21 -04002215 //
2216 // Generate OpTypeSampledImage.
2217 //
2218 // Ops[0] = Image Type ID
2219 //
2220 SPIRVOperandList Ops;
2221
David Netoc6f3ab22018-04-06 18:02:31 -04002222 Ops << MkId(TypeMap[ImgTy]);
David Neto22f144c2017-06-12 14:26:21 -04002223
alan-bakerabd82722019-12-03 17:14:51 -05002224 // Update the image type map.
2225 getImageTypeMap()[ImgTy] = nextID;
David Neto22f144c2017-06-12 14:26:21 -04002226
David Neto87846742018-04-11 17:36:22 -04002227 auto *Inst = new SPIRVInstruction(spv::OpTypeSampledImage, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002228 SPIRVInstList.push_back(Inst);
2229 }
David Netoc6f3ab22018-04-06 18:02:31 -04002230
2231 // Generate types for pointer-to-local arguments.
Alan Baker202c8c72018-08-13 13:47:44 -04002232 for (auto spec_id = clspv::FirstLocalSpecId(); spec_id < max_local_spec_id_;
2233 ++spec_id) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05002234 LocalArgInfo &arg_info = LocalSpecIdInfoMap[spec_id];
David Netoc6f3ab22018-04-06 18:02:31 -04002235
2236 // Generate the spec constant.
2237 SPIRVOperandList Ops;
2238 Ops << MkId(lookupType(Type::getInt32Ty(Context))) << MkNum(1);
David Neto87846742018-04-11 17:36:22 -04002239 SPIRVInstList.push_back(
2240 new SPIRVInstruction(spv::OpSpecConstant, arg_info.array_size_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002241
2242 // Generate the array type.
2243 Ops.clear();
2244 // The element type must have been created.
2245 uint32_t elem_ty_id = lookupType(arg_info.elem_type);
2246 assert(elem_ty_id);
2247 Ops << MkId(elem_ty_id) << MkId(arg_info.array_size_id);
2248
2249 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002250 new SPIRVInstruction(spv::OpTypeArray, arg_info.array_type_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002251
2252 Ops.clear();
2253 Ops << MkNum(spv::StorageClassWorkgroup) << MkId(arg_info.array_type_id);
David Neto87846742018-04-11 17:36:22 -04002254 SPIRVInstList.push_back(new SPIRVInstruction(
2255 spv::OpTypePointer, arg_info.ptr_array_type_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002256 }
David Neto22f144c2017-06-12 14:26:21 -04002257}
2258
2259void SPIRVProducerPass::GenerateSPIRVConstants() {
2260 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
2261 ValueMapType &VMap = getValueMap();
2262 ValueMapType &AllocatedVMap = getAllocatedValueMap();
2263 ValueList &CstList = getConstantList();
David Neto482550a2018-03-24 05:21:07 -07002264 const bool hack_undef = clspv::Option::HackUndef();
David Neto22f144c2017-06-12 14:26:21 -04002265
2266 for (uint32_t i = 0; i < CstList.size(); i++) {
David Netofb9a7972017-08-25 17:08:24 -04002267 // UniqueVector ids are 1-based.
alan-bakerb6b09dc2018-11-08 16:59:28 -05002268 Constant *Cst = cast<Constant>(CstList[i + 1]);
David Neto22f144c2017-06-12 14:26:21 -04002269
2270 // OpTypeArray's constant was already generated.
David Netofb9a7972017-08-25 17:08:24 -04002271 if (AllocatedVMap.find_as(Cst) != AllocatedVMap.end()) {
David Neto22f144c2017-06-12 14:26:21 -04002272 continue;
2273 }
2274
David Netofb9a7972017-08-25 17:08:24 -04002275 // Set ValueMap with nextID for reference later.
David Neto22f144c2017-06-12 14:26:21 -04002276 VMap[Cst] = nextID;
2277
2278 //
2279 // Generate OpConstant.
2280 //
2281
2282 // Ops[0] = Result Type ID
2283 // Ops[1] .. Ops[n] = Values LiteralNumber
2284 SPIRVOperandList Ops;
2285
David Neto257c3892018-04-11 13:19:45 -04002286 Ops << MkId(lookupType(Cst->getType()));
David Neto22f144c2017-06-12 14:26:21 -04002287
2288 std::vector<uint32_t> LiteralNum;
David Neto22f144c2017-06-12 14:26:21 -04002289 spv::Op Opcode = spv::OpNop;
2290
2291 if (isa<UndefValue>(Cst)) {
2292 // Ops[0] = Result Type ID
David Netoc66b3352017-10-20 14:28:46 -04002293 Opcode = spv::OpUndef;
Alan Baker9bf93fb2018-08-28 16:59:26 -04002294 if (hack_undef && IsTypeNullable(Cst->getType())) {
2295 Opcode = spv::OpConstantNull;
David Netoc66b3352017-10-20 14:28:46 -04002296 }
David Neto22f144c2017-06-12 14:26:21 -04002297 } else if (const ConstantInt *CI = dyn_cast<ConstantInt>(Cst)) {
2298 unsigned BitWidth = CI->getBitWidth();
2299 if (BitWidth == 1) {
2300 // If the bitwidth of constant is 1, generate OpConstantTrue or
2301 // OpConstantFalse.
2302 if (CI->getZExtValue()) {
2303 // Ops[0] = Result Type ID
2304 Opcode = spv::OpConstantTrue;
2305 } else {
2306 // Ops[0] = Result Type ID
2307 Opcode = spv::OpConstantFalse;
2308 }
David Neto22f144c2017-06-12 14:26:21 -04002309 } else {
2310 auto V = CI->getZExtValue();
2311 LiteralNum.push_back(V & 0xFFFFFFFF);
2312
2313 if (BitWidth > 32) {
2314 LiteralNum.push_back(V >> 32);
2315 }
2316
2317 Opcode = spv::OpConstant;
David Neto22f144c2017-06-12 14:26:21 -04002318
David Neto257c3892018-04-11 13:19:45 -04002319 Ops << MkInteger(LiteralNum);
David Neto22f144c2017-06-12 14:26:21 -04002320 }
2321 } else if (const ConstantFP *CFP = dyn_cast<ConstantFP>(Cst)) {
2322 uint64_t FPVal = CFP->getValueAPF().bitcastToAPInt().getZExtValue();
2323 Type *CFPTy = CFP->getType();
2324 if (CFPTy->isFloatTy()) {
2325 LiteralNum.push_back(FPVal & 0xFFFFFFFF);
Kévin Petit02ee34e2019-04-04 19:03:22 +01002326 } else if (CFPTy->isDoubleTy()) {
2327 LiteralNum.push_back(FPVal & 0xFFFFFFFF);
2328 LiteralNum.push_back(FPVal >> 32);
David Neto22f144c2017-06-12 14:26:21 -04002329 } else {
2330 CFPTy->print(errs());
2331 llvm_unreachable("Implement this ConstantFP Type");
2332 }
2333
2334 Opcode = spv::OpConstant;
David Neto22f144c2017-06-12 14:26:21 -04002335
David Neto257c3892018-04-11 13:19:45 -04002336 Ops << MkFloat(LiteralNum);
David Neto22f144c2017-06-12 14:26:21 -04002337 } else if (isa<ConstantDataSequential>(Cst) &&
2338 cast<ConstantDataSequential>(Cst)->isString()) {
2339 Cst->print(errs());
2340 llvm_unreachable("Implement this Constant");
2341
2342 } else if (const ConstantDataSequential *CDS =
2343 dyn_cast<ConstantDataSequential>(Cst)) {
David Neto49351ac2017-08-26 17:32:20 -04002344 // Let's convert <4 x i8> constant to int constant specially.
2345 // This case occurs when all the values are specified as constant
2346 // ints.
2347 Type *CstTy = Cst->getType();
2348 if (is4xi8vec(CstTy)) {
2349 LLVMContext &Context = CstTy->getContext();
2350
2351 //
2352 // Generate OpConstant with OpTypeInt 32 0.
2353 //
Neil Henning39672102017-09-29 14:33:13 +01002354 uint32_t IntValue = 0;
2355 for (unsigned k = 0; k < 4; k++) {
2356 const uint64_t Val = CDS->getElementAsInteger(k);
David Neto49351ac2017-08-26 17:32:20 -04002357 IntValue = (IntValue << 8) | (Val & 0xffu);
2358 }
2359
2360 Type *i32 = Type::getInt32Ty(Context);
2361 Constant *CstInt = ConstantInt::get(i32, IntValue);
2362 // If this constant is already registered on VMap, use it.
2363 if (VMap.count(CstInt)) {
2364 uint32_t CstID = VMap[CstInt];
2365 VMap[Cst] = CstID;
2366 continue;
2367 }
2368
David Neto257c3892018-04-11 13:19:45 -04002369 Ops << MkNum(IntValue);
David Neto49351ac2017-08-26 17:32:20 -04002370
David Neto87846742018-04-11 17:36:22 -04002371 auto *CstInst = new SPIRVInstruction(spv::OpConstant, nextID++, Ops);
David Neto49351ac2017-08-26 17:32:20 -04002372 SPIRVInstList.push_back(CstInst);
2373
2374 continue;
2375 }
2376
2377 // A normal constant-data-sequential case.
David Neto22f144c2017-06-12 14:26:21 -04002378 for (unsigned k = 0; k < CDS->getNumElements(); k++) {
2379 Constant *EleCst = CDS->getElementAsConstant(k);
2380 uint32_t EleCstID = VMap[EleCst];
David Neto257c3892018-04-11 13:19:45 -04002381 Ops << MkId(EleCstID);
David Neto22f144c2017-06-12 14:26:21 -04002382 }
2383
2384 Opcode = spv::OpConstantComposite;
David Neto22f144c2017-06-12 14:26:21 -04002385 } else if (const ConstantAggregate *CA = dyn_cast<ConstantAggregate>(Cst)) {
2386 // Let's convert <4 x i8> constant to int constant specially.
David Neto49351ac2017-08-26 17:32:20 -04002387 // This case occurs when at least one of the values is an undef.
David Neto22f144c2017-06-12 14:26:21 -04002388 Type *CstTy = Cst->getType();
2389 if (is4xi8vec(CstTy)) {
2390 LLVMContext &Context = CstTy->getContext();
2391
2392 //
2393 // Generate OpConstant with OpTypeInt 32 0.
2394 //
Neil Henning39672102017-09-29 14:33:13 +01002395 uint32_t IntValue = 0;
David Neto22f144c2017-06-12 14:26:21 -04002396 for (User::const_op_iterator I = Cst->op_begin(), E = Cst->op_end();
2397 I != E; ++I) {
2398 uint64_t Val = 0;
alan-bakerb6b09dc2018-11-08 16:59:28 -05002399 const Value *CV = *I;
Neil Henning39672102017-09-29 14:33:13 +01002400 if (auto *CI2 = dyn_cast<ConstantInt>(CV)) {
2401 Val = CI2->getZExtValue();
David Neto22f144c2017-06-12 14:26:21 -04002402 }
David Neto49351ac2017-08-26 17:32:20 -04002403 IntValue = (IntValue << 8) | (Val & 0xffu);
David Neto22f144c2017-06-12 14:26:21 -04002404 }
2405
David Neto49351ac2017-08-26 17:32:20 -04002406 Type *i32 = Type::getInt32Ty(Context);
2407 Constant *CstInt = ConstantInt::get(i32, IntValue);
David Neto22f144c2017-06-12 14:26:21 -04002408 // If this constant is already registered on VMap, use it.
2409 if (VMap.count(CstInt)) {
2410 uint32_t CstID = VMap[CstInt];
2411 VMap[Cst] = CstID;
David Neto19a1bad2017-08-25 15:01:41 -04002412 continue;
David Neto22f144c2017-06-12 14:26:21 -04002413 }
2414
David Neto257c3892018-04-11 13:19:45 -04002415 Ops << MkNum(IntValue);
David Neto22f144c2017-06-12 14:26:21 -04002416
David Neto87846742018-04-11 17:36:22 -04002417 auto *CstInst = new SPIRVInstruction(spv::OpConstant, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002418 SPIRVInstList.push_back(CstInst);
2419
David Neto19a1bad2017-08-25 15:01:41 -04002420 continue;
David Neto22f144c2017-06-12 14:26:21 -04002421 }
2422
2423 // We use a constant composite in SPIR-V for our constant aggregate in
2424 // LLVM.
2425 Opcode = spv::OpConstantComposite;
David Neto22f144c2017-06-12 14:26:21 -04002426
2427 for (unsigned k = 0; k < CA->getNumOperands(); k++) {
2428 // Look up the ID of the element of this aggregate (which we will
2429 // previously have created a constant for).
2430 uint32_t ElementConstantID = VMap[CA->getAggregateElement(k)];
2431
2432 // And add an operand to the composite we are constructing
David Neto257c3892018-04-11 13:19:45 -04002433 Ops << MkId(ElementConstantID);
David Neto22f144c2017-06-12 14:26:21 -04002434 }
2435 } else if (Cst->isNullValue()) {
2436 Opcode = spv::OpConstantNull;
David Neto22f144c2017-06-12 14:26:21 -04002437 } else {
2438 Cst->print(errs());
2439 llvm_unreachable("Unsupported Constant???");
2440 }
2441
alan-baker5b86ed72019-02-15 08:26:50 -05002442 if (Opcode == spv::OpConstantNull && Cst->getType()->isPointerTy()) {
2443 // Null pointer requires variable pointers.
2444 setVariablePointersCapabilities(Cst->getType()->getPointerAddressSpace());
2445 }
2446
David Neto87846742018-04-11 17:36:22 -04002447 auto *CstInst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002448 SPIRVInstList.push_back(CstInst);
2449 }
2450}
2451
2452void SPIRVProducerPass::GenerateSamplers(Module &M) {
2453 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
David Neto22f144c2017-06-12 14:26:21 -04002454
alan-bakerb6b09dc2018-11-08 16:59:28 -05002455 auto &sampler_map = getSamplerMap();
David Neto862b7d82018-06-14 18:48:37 -04002456 SamplerMapIndexToIDMap.clear();
David Neto22f144c2017-06-12 14:26:21 -04002457 DenseMap<unsigned, unsigned> SamplerLiteralToIDMap;
David Neto862b7d82018-06-14 18:48:37 -04002458 DenseMap<unsigned, unsigned> SamplerLiteralToDescriptorSetMap;
2459 DenseMap<unsigned, unsigned> SamplerLiteralToBindingMap;
David Neto22f144c2017-06-12 14:26:21 -04002460
David Neto862b7d82018-06-14 18:48:37 -04002461 // We might have samplers in the sampler map that are not used
2462 // in the translation unit. We need to allocate variables
2463 // for them and bindings too.
2464 DenseSet<unsigned> used_bindings;
David Neto22f144c2017-06-12 14:26:21 -04002465
Kévin Petitdf71de32019-04-09 14:09:50 +01002466 auto *var_fn = M.getFunction(clspv::LiteralSamplerFunction());
alan-bakerb6b09dc2018-11-08 16:59:28 -05002467 if (!var_fn)
2468 return;
David Neto862b7d82018-06-14 18:48:37 -04002469 for (auto user : var_fn->users()) {
2470 // Populate SamplerLiteralToDescriptorSetMap and
2471 // SamplerLiteralToBindingMap.
2472 //
2473 // Look for calls like
2474 // call %opencl.sampler_t addrspace(2)*
2475 // @clspv.sampler.var.literal(
2476 // i32 descriptor,
2477 // i32 binding,
2478 // i32 index-into-sampler-map)
alan-bakerb6b09dc2018-11-08 16:59:28 -05002479 if (auto *call = dyn_cast<CallInst>(user)) {
2480 const size_t index_into_sampler_map = static_cast<size_t>(
2481 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
David Neto862b7d82018-06-14 18:48:37 -04002482 if (index_into_sampler_map >= sampler_map.size()) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05002483 errs() << "Out of bounds index to sampler map: "
2484 << index_into_sampler_map;
David Neto862b7d82018-06-14 18:48:37 -04002485 llvm_unreachable("bad sampler init: out of bounds");
2486 }
2487
2488 auto sampler_value = sampler_map[index_into_sampler_map].first;
2489 const auto descriptor_set = static_cast<unsigned>(
2490 dyn_cast<ConstantInt>(call->getArgOperand(0))->getZExtValue());
2491 const auto binding = static_cast<unsigned>(
2492 dyn_cast<ConstantInt>(call->getArgOperand(1))->getZExtValue());
2493
2494 SamplerLiteralToDescriptorSetMap[sampler_value] = descriptor_set;
2495 SamplerLiteralToBindingMap[sampler_value] = binding;
2496 used_bindings.insert(binding);
2497 }
2498 }
2499
2500 unsigned index = 0;
2501 for (auto SamplerLiteral : sampler_map) {
David Neto22f144c2017-06-12 14:26:21 -04002502 // Generate OpVariable.
2503 //
2504 // GIDOps[0] : Result Type ID
2505 // GIDOps[1] : Storage Class
2506 SPIRVOperandList Ops;
2507
David Neto257c3892018-04-11 13:19:45 -04002508 Ops << MkId(lookupType(SamplerTy))
2509 << MkNum(spv::StorageClassUniformConstant);
David Neto22f144c2017-06-12 14:26:21 -04002510
David Neto862b7d82018-06-14 18:48:37 -04002511 auto sampler_var_id = nextID++;
2512 auto *Inst = new SPIRVInstruction(spv::OpVariable, sampler_var_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002513 SPIRVInstList.push_back(Inst);
2514
David Neto862b7d82018-06-14 18:48:37 -04002515 SamplerMapIndexToIDMap[index] = sampler_var_id;
2516 SamplerLiteralToIDMap[SamplerLiteral.first] = sampler_var_id;
David Neto22f144c2017-06-12 14:26:21 -04002517
2518 // Find Insert Point for OpDecorate.
2519 auto DecoInsertPoint =
2520 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
2521 [](SPIRVInstruction *Inst) -> bool {
2522 return Inst->getOpcode() != spv::OpDecorate &&
2523 Inst->getOpcode() != spv::OpMemberDecorate &&
2524 Inst->getOpcode() != spv::OpExtInstImport;
2525 });
2526
2527 // Ops[0] = Target ID
2528 // Ops[1] = Decoration (DescriptorSet)
2529 // Ops[2] = LiteralNumber according to Decoration
2530 Ops.clear();
2531
David Neto862b7d82018-06-14 18:48:37 -04002532 unsigned descriptor_set;
2533 unsigned binding;
alan-bakerb6b09dc2018-11-08 16:59:28 -05002534 if (SamplerLiteralToBindingMap.find(SamplerLiteral.first) ==
2535 SamplerLiteralToBindingMap.end()) {
David Neto862b7d82018-06-14 18:48:37 -04002536 // This sampler is not actually used. Find the next one.
2537 for (binding = 0; used_bindings.count(binding); binding++)
2538 ;
2539 descriptor_set = 0; // Literal samplers always use descriptor set 0.
2540 used_bindings.insert(binding);
2541 } else {
2542 descriptor_set = SamplerLiteralToDescriptorSetMap[SamplerLiteral.first];
2543 binding = SamplerLiteralToBindingMap[SamplerLiteral.first];
alan-bakercff80152019-06-15 00:38:00 -04002544
2545 version0::DescriptorMapEntry::SamplerData sampler_data = {
2546 SamplerLiteral.first};
2547 descriptorMapEntries->emplace_back(std::move(sampler_data),
2548 descriptor_set, binding);
David Neto862b7d82018-06-14 18:48:37 -04002549 }
2550
2551 Ops << MkId(sampler_var_id) << MkNum(spv::DecorationDescriptorSet)
2552 << MkNum(descriptor_set);
David Neto22f144c2017-06-12 14:26:21 -04002553
David Neto87846742018-04-11 17:36:22 -04002554 auto *DescDecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002555 SPIRVInstList.insert(DecoInsertPoint, DescDecoInst);
2556
2557 // Ops[0] = Target ID
2558 // Ops[1] = Decoration (Binding)
2559 // Ops[2] = LiteralNumber according to Decoration
2560 Ops.clear();
David Neto862b7d82018-06-14 18:48:37 -04002561 Ops << MkId(sampler_var_id) << MkNum(spv::DecorationBinding)
2562 << MkNum(binding);
David Neto22f144c2017-06-12 14:26:21 -04002563
David Neto87846742018-04-11 17:36:22 -04002564 auto *BindDecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002565 SPIRVInstList.insert(DecoInsertPoint, BindDecoInst);
David Neto862b7d82018-06-14 18:48:37 -04002566
2567 index++;
David Neto22f144c2017-06-12 14:26:21 -04002568 }
David Neto862b7d82018-06-14 18:48:37 -04002569}
David Neto22f144c2017-06-12 14:26:21 -04002570
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01002571void SPIRVProducerPass::GenerateResourceVars(Module &) {
David Neto862b7d82018-06-14 18:48:37 -04002572 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
2573 ValueMapType &VMap = getValueMap();
David Neto22f144c2017-06-12 14:26:21 -04002574
David Neto862b7d82018-06-14 18:48:37 -04002575 // Generate variables. Make one for each of resource var info object.
2576 for (auto *info : ModuleOrderedResourceVars) {
2577 Type *type = info->var_fn->getReturnType();
2578 // Remap the address space for opaque types.
2579 switch (info->arg_kind) {
2580 case clspv::ArgKind::Sampler:
2581 case clspv::ArgKind::ReadOnlyImage:
2582 case clspv::ArgKind::WriteOnlyImage:
2583 type = PointerType::get(type->getPointerElementType(),
2584 clspv::AddressSpace::UniformConstant);
2585 break;
2586 default:
2587 break;
2588 }
David Neto22f144c2017-06-12 14:26:21 -04002589
David Neto862b7d82018-06-14 18:48:37 -04002590 info->var_id = nextID++;
David Neto22f144c2017-06-12 14:26:21 -04002591
David Neto862b7d82018-06-14 18:48:37 -04002592 const auto type_id = lookupType(type);
2593 const auto sc = GetStorageClassForArgKind(info->arg_kind);
2594 SPIRVOperandList Ops;
2595 Ops << MkId(type_id) << MkNum(sc);
David Neto22f144c2017-06-12 14:26:21 -04002596
David Neto862b7d82018-06-14 18:48:37 -04002597 auto *Inst = new SPIRVInstruction(spv::OpVariable, info->var_id, Ops);
2598 SPIRVInstList.push_back(Inst);
2599
2600 // Map calls to the variable-builtin-function.
2601 for (auto &U : info->var_fn->uses()) {
2602 if (auto *call = dyn_cast<CallInst>(U.getUser())) {
2603 const auto set = unsigned(
2604 dyn_cast<ConstantInt>(call->getOperand(0))->getZExtValue());
2605 const auto binding = unsigned(
2606 dyn_cast<ConstantInt>(call->getOperand(1))->getZExtValue());
2607 if (set == info->descriptor_set && binding == info->binding) {
2608 switch (info->arg_kind) {
2609 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04002610 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04002611 case clspv::ArgKind::Pod:
2612 // The call maps to the variable directly.
2613 VMap[call] = info->var_id;
2614 break;
2615 case clspv::ArgKind::Sampler:
2616 case clspv::ArgKind::ReadOnlyImage:
2617 case clspv::ArgKind::WriteOnlyImage:
2618 // The call maps to a load we generate later.
2619 ResourceVarDeferredLoadCalls[call] = info->var_id;
2620 break;
2621 default:
2622 llvm_unreachable("Unhandled arg kind");
2623 }
2624 }
David Neto22f144c2017-06-12 14:26:21 -04002625 }
David Neto862b7d82018-06-14 18:48:37 -04002626 }
2627 }
David Neto22f144c2017-06-12 14:26:21 -04002628
David Neto862b7d82018-06-14 18:48:37 -04002629 // Generate associated decorations.
David Neto22f144c2017-06-12 14:26:21 -04002630
David Neto862b7d82018-06-14 18:48:37 -04002631 // Find Insert Point for OpDecorate.
2632 auto DecoInsertPoint =
2633 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
2634 [](SPIRVInstruction *Inst) -> bool {
2635 return Inst->getOpcode() != spv::OpDecorate &&
2636 Inst->getOpcode() != spv::OpMemberDecorate &&
2637 Inst->getOpcode() != spv::OpExtInstImport;
2638 });
2639
2640 SPIRVOperandList Ops;
2641 for (auto *info : ModuleOrderedResourceVars) {
2642 // Decorate with DescriptorSet and Binding.
2643 Ops.clear();
2644 Ops << MkId(info->var_id) << MkNum(spv::DecorationDescriptorSet)
2645 << MkNum(info->descriptor_set);
2646 SPIRVInstList.insert(DecoInsertPoint,
2647 new SPIRVInstruction(spv::OpDecorate, Ops));
2648
2649 Ops.clear();
2650 Ops << MkId(info->var_id) << MkNum(spv::DecorationBinding)
2651 << MkNum(info->binding);
2652 SPIRVInstList.insert(DecoInsertPoint,
2653 new SPIRVInstruction(spv::OpDecorate, Ops));
2654
alan-bakere9308012019-03-15 10:25:13 -04002655 if (info->coherent) {
2656 // Decorate with Coherent if required for the variable.
2657 Ops.clear();
2658 Ops << MkId(info->var_id) << MkNum(spv::DecorationCoherent);
2659 SPIRVInstList.insert(DecoInsertPoint,
2660 new SPIRVInstruction(spv::OpDecorate, Ops));
2661 }
2662
David Neto862b7d82018-06-14 18:48:37 -04002663 // Generate NonWritable and NonReadable
2664 switch (info->arg_kind) {
2665 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04002666 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04002667 if (info->var_fn->getReturnType()->getPointerAddressSpace() ==
2668 clspv::AddressSpace::Constant) {
2669 Ops.clear();
2670 Ops << MkId(info->var_id) << MkNum(spv::DecorationNonWritable);
2671 SPIRVInstList.insert(DecoInsertPoint,
2672 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002673 }
David Neto862b7d82018-06-14 18:48:37 -04002674 break;
David Neto862b7d82018-06-14 18:48:37 -04002675 case clspv::ArgKind::WriteOnlyImage:
2676 Ops.clear();
2677 Ops << MkId(info->var_id) << MkNum(spv::DecorationNonReadable);
2678 SPIRVInstList.insert(DecoInsertPoint,
2679 new SPIRVInstruction(spv::OpDecorate, Ops));
2680 break;
2681 default:
2682 break;
David Neto22f144c2017-06-12 14:26:21 -04002683 }
2684 }
2685}
2686
2687void SPIRVProducerPass::GenerateGlobalVar(GlobalVariable &GV) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05002688 Module &M = *GV.getParent();
David Neto22f144c2017-06-12 14:26:21 -04002689 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
2690 ValueMapType &VMap = getValueMap();
2691 std::vector<uint32_t> &BuiltinDimVec = getBuiltinDimVec();
David Neto85082642018-03-24 06:55:20 -07002692 const DataLayout &DL = GV.getParent()->getDataLayout();
David Neto22f144c2017-06-12 14:26:21 -04002693
2694 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
2695 Type *Ty = GV.getType();
2696 PointerType *PTy = cast<PointerType>(Ty);
2697
2698 uint32_t InitializerID = 0;
2699
2700 // Workgroup size is handled differently (it goes into a constant)
2701 if (spv::BuiltInWorkgroupSize == BuiltinType) {
2702 std::vector<bool> HasMDVec;
2703 uint32_t PrevXDimCst = 0xFFFFFFFF;
2704 uint32_t PrevYDimCst = 0xFFFFFFFF;
2705 uint32_t PrevZDimCst = 0xFFFFFFFF;
2706 for (Function &Func : *GV.getParent()) {
2707 if (Func.isDeclaration()) {
2708 continue;
2709 }
2710
2711 // We only need to check kernels.
2712 if (Func.getCallingConv() != CallingConv::SPIR_KERNEL) {
2713 continue;
2714 }
2715
2716 if (const MDNode *MD =
2717 dyn_cast<Function>(&Func)->getMetadata("reqd_work_group_size")) {
2718 uint32_t CurXDimCst = static_cast<uint32_t>(
2719 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
2720 uint32_t CurYDimCst = static_cast<uint32_t>(
2721 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
2722 uint32_t CurZDimCst = static_cast<uint32_t>(
2723 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
2724
2725 if (PrevXDimCst == 0xFFFFFFFF && PrevYDimCst == 0xFFFFFFFF &&
2726 PrevZDimCst == 0xFFFFFFFF) {
2727 PrevXDimCst = CurXDimCst;
2728 PrevYDimCst = CurYDimCst;
2729 PrevZDimCst = CurZDimCst;
2730 } else if (CurXDimCst != PrevXDimCst || CurYDimCst != PrevYDimCst ||
2731 CurZDimCst != PrevZDimCst) {
2732 llvm_unreachable(
2733 "reqd_work_group_size must be the same across all kernels");
2734 } else {
2735 continue;
2736 }
2737
2738 //
2739 // Generate OpConstantComposite.
2740 //
2741 // Ops[0] : Result Type ID
2742 // Ops[1] : Constant size for x dimension.
2743 // Ops[2] : Constant size for y dimension.
2744 // Ops[3] : Constant size for z dimension.
2745 SPIRVOperandList Ops;
2746
2747 uint32_t XDimCstID =
2748 VMap[mdconst::extract<ConstantInt>(MD->getOperand(0))];
2749 uint32_t YDimCstID =
2750 VMap[mdconst::extract<ConstantInt>(MD->getOperand(1))];
2751 uint32_t ZDimCstID =
2752 VMap[mdconst::extract<ConstantInt>(MD->getOperand(2))];
2753
2754 InitializerID = nextID;
2755
David Neto257c3892018-04-11 13:19:45 -04002756 Ops << MkId(lookupType(Ty->getPointerElementType())) << MkId(XDimCstID)
2757 << MkId(YDimCstID) << MkId(ZDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04002758
David Neto87846742018-04-11 17:36:22 -04002759 auto *Inst =
2760 new SPIRVInstruction(spv::OpConstantComposite, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002761 SPIRVInstList.push_back(Inst);
2762
2763 HasMDVec.push_back(true);
2764 } else {
2765 HasMDVec.push_back(false);
2766 }
2767 }
2768
2769 // Check all kernels have same definitions for work_group_size.
2770 bool HasMD = false;
2771 if (!HasMDVec.empty()) {
2772 HasMD = HasMDVec[0];
2773 for (uint32_t i = 1; i < HasMDVec.size(); i++) {
2774 if (HasMD != HasMDVec[i]) {
2775 llvm_unreachable(
2776 "Kernels should have consistent work group size definition");
2777 }
2778 }
2779 }
2780
2781 // If all kernels do not have metadata for reqd_work_group_size, generate
2782 // OpSpecConstants for x/y/z dimension.
2783 if (!HasMD) {
2784 //
2785 // Generate OpSpecConstants for x/y/z dimension.
2786 //
2787 // Ops[0] : Result Type ID
2788 // Ops[1] : Constant size for x/y/z dimension (Literal Number).
2789 uint32_t XDimCstID = 0;
2790 uint32_t YDimCstID = 0;
2791 uint32_t ZDimCstID = 0;
2792
David Neto22f144c2017-06-12 14:26:21 -04002793 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04002794 uint32_t result_type_id =
2795 lookupType(Ty->getPointerElementType()->getSequentialElementType());
David Neto22f144c2017-06-12 14:26:21 -04002796
David Neto257c3892018-04-11 13:19:45 -04002797 // X Dimension
2798 Ops << MkId(result_type_id) << MkNum(1);
2799 XDimCstID = nextID++;
2800 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002801 new SPIRVInstruction(spv::OpSpecConstant, XDimCstID, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002802
2803 // Y Dimension
2804 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04002805 Ops << MkId(result_type_id) << MkNum(1);
2806 YDimCstID = nextID++;
2807 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002808 new SPIRVInstruction(spv::OpSpecConstant, YDimCstID, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002809
2810 // Z Dimension
2811 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04002812 Ops << MkId(result_type_id) << MkNum(1);
2813 ZDimCstID = nextID++;
2814 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002815 new SPIRVInstruction(spv::OpSpecConstant, ZDimCstID, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002816
David Neto257c3892018-04-11 13:19:45 -04002817 BuiltinDimVec.push_back(XDimCstID);
2818 BuiltinDimVec.push_back(YDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04002819 BuiltinDimVec.push_back(ZDimCstID);
2820
David Neto22f144c2017-06-12 14:26:21 -04002821 //
2822 // Generate OpSpecConstantComposite.
2823 //
2824 // Ops[0] : Result Type ID
2825 // Ops[1] : Constant size for x dimension.
2826 // Ops[2] : Constant size for y dimension.
2827 // Ops[3] : Constant size for z dimension.
2828 InitializerID = nextID;
2829
2830 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04002831 Ops << MkId(lookupType(Ty->getPointerElementType())) << MkId(XDimCstID)
2832 << MkId(YDimCstID) << MkId(ZDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04002833
David Neto87846742018-04-11 17:36:22 -04002834 auto *Inst =
2835 new SPIRVInstruction(spv::OpSpecConstantComposite, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002836 SPIRVInstList.push_back(Inst);
2837 }
2838 }
2839
David Neto22f144c2017-06-12 14:26:21 -04002840 VMap[&GV] = nextID;
2841
2842 //
2843 // Generate OpVariable.
2844 //
2845 // GIDOps[0] : Result Type ID
2846 // GIDOps[1] : Storage Class
2847 SPIRVOperandList Ops;
2848
David Neto85082642018-03-24 06:55:20 -07002849 const auto AS = PTy->getAddressSpace();
David Netoc6f3ab22018-04-06 18:02:31 -04002850 Ops << MkId(lookupType(Ty)) << MkNum(GetStorageClass(AS));
David Neto22f144c2017-06-12 14:26:21 -04002851
David Neto85082642018-03-24 06:55:20 -07002852 const bool module_scope_constant_external_init =
David Neto862b7d82018-06-14 18:48:37 -04002853 (AS == AddressSpace::Constant) && GV.hasInitializer() &&
David Neto85082642018-03-24 06:55:20 -07002854 clspv::Option::ModuleConstantsInStorageBuffer();
2855
Kévin Petit23d5f182019-08-13 16:21:29 +01002856 if (GV.hasInitializer()) {
2857 auto GVInit = GV.getInitializer();
2858 if (!isa<UndefValue>(GVInit) && !module_scope_constant_external_init) {
2859 assert(VMap.count(GVInit) == 1);
2860 InitializerID = VMap[GVInit];
David Neto85082642018-03-24 06:55:20 -07002861 }
2862 }
Kévin Petit23d5f182019-08-13 16:21:29 +01002863
2864 if (0 != InitializerID) {
2865 // Emit the ID of the intiializer as part of the variable definition.
2866 Ops << MkId(InitializerID);
2867 }
David Neto85082642018-03-24 06:55:20 -07002868 const uint32_t var_id = nextID++;
2869
David Neto87846742018-04-11 17:36:22 -04002870 auto *Inst = new SPIRVInstruction(spv::OpVariable, var_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002871 SPIRVInstList.push_back(Inst);
2872
2873 // If we have a builtin.
2874 if (spv::BuiltInMax != BuiltinType) {
2875 // Find Insert Point for OpDecorate.
2876 auto DecoInsertPoint =
2877 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
2878 [](SPIRVInstruction *Inst) -> bool {
2879 return Inst->getOpcode() != spv::OpDecorate &&
2880 Inst->getOpcode() != spv::OpMemberDecorate &&
2881 Inst->getOpcode() != spv::OpExtInstImport;
2882 });
2883 //
2884 // Generate OpDecorate.
2885 //
2886 // DOps[0] = Target ID
2887 // DOps[1] = Decoration (Builtin)
2888 // DOps[2] = BuiltIn ID
2889 uint32_t ResultID;
2890
2891 // WorkgroupSize is different, we decorate the constant composite that has
2892 // its value, rather than the variable that we use to access the value.
2893 if (spv::BuiltInWorkgroupSize == BuiltinType) {
2894 ResultID = InitializerID;
David Netoa60b00b2017-09-15 16:34:09 -04002895 // Save both the value and variable IDs for later.
2896 WorkgroupSizeValueID = InitializerID;
2897 WorkgroupSizeVarID = VMap[&GV];
David Neto22f144c2017-06-12 14:26:21 -04002898 } else {
2899 ResultID = VMap[&GV];
2900 }
2901
2902 SPIRVOperandList DOps;
David Neto257c3892018-04-11 13:19:45 -04002903 DOps << MkId(ResultID) << MkNum(spv::DecorationBuiltIn)
2904 << MkNum(BuiltinType);
David Neto22f144c2017-06-12 14:26:21 -04002905
David Neto87846742018-04-11 17:36:22 -04002906 auto *DescDecoInst = new SPIRVInstruction(spv::OpDecorate, DOps);
David Neto22f144c2017-06-12 14:26:21 -04002907 SPIRVInstList.insert(DecoInsertPoint, DescDecoInst);
David Neto85082642018-03-24 06:55:20 -07002908 } else if (module_scope_constant_external_init) {
2909 // This module scope constant is initialized from a storage buffer with data
2910 // provided by the host at binding 0 of the next descriptor set.
David Neto78383442018-06-15 20:31:56 -04002911 const uint32_t descriptor_set = TakeDescriptorIndex(&M);
David Neto85082642018-03-24 06:55:20 -07002912
David Neto862b7d82018-06-14 18:48:37 -04002913 // Emit the intializer to the descriptor map file.
David Neto85082642018-03-24 06:55:20 -07002914 // Use "kind,buffer" to indicate storage buffer. We might want to expand
2915 // that later to other types, like uniform buffer.
alan-bakerf5e5f692018-11-27 08:33:24 -05002916 std::string hexbytes;
2917 llvm::raw_string_ostream str(hexbytes);
2918 clspv::ConstantEmitter(DL, str).Emit(GV.getInitializer());
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04002919 version0::DescriptorMapEntry::ConstantData constant_data = {ArgKind::Buffer,
2920 str.str()};
2921 descriptorMapEntries->emplace_back(std::move(constant_data), descriptor_set,
2922 0);
David Neto85082642018-03-24 06:55:20 -07002923
2924 // Find Insert Point for OpDecorate.
2925 auto DecoInsertPoint =
2926 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
2927 [](SPIRVInstruction *Inst) -> bool {
2928 return Inst->getOpcode() != spv::OpDecorate &&
2929 Inst->getOpcode() != spv::OpMemberDecorate &&
2930 Inst->getOpcode() != spv::OpExtInstImport;
2931 });
2932
David Neto257c3892018-04-11 13:19:45 -04002933 // OpDecorate %var Binding <binding>
David Neto85082642018-03-24 06:55:20 -07002934 SPIRVOperandList DOps;
David Neto257c3892018-04-11 13:19:45 -04002935 DOps << MkId(var_id) << MkNum(spv::DecorationBinding) << MkNum(0);
2936 DecoInsertPoint = SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04002937 DecoInsertPoint, new SPIRVInstruction(spv::OpDecorate, DOps));
David Neto85082642018-03-24 06:55:20 -07002938
2939 // OpDecorate %var DescriptorSet <descriptor_set>
2940 DOps.clear();
David Neto257c3892018-04-11 13:19:45 -04002941 DOps << MkId(var_id) << MkNum(spv::DecorationDescriptorSet)
2942 << MkNum(descriptor_set);
David Netoc6f3ab22018-04-06 18:02:31 -04002943 SPIRVInstList.insert(DecoInsertPoint,
David Neto87846742018-04-11 17:36:22 -04002944 new SPIRVInstruction(spv::OpDecorate, DOps));
David Neto22f144c2017-06-12 14:26:21 -04002945 }
2946}
2947
David Netoc6f3ab22018-04-06 18:02:31 -04002948void SPIRVProducerPass::GenerateWorkgroupVars() {
2949 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
Alan Baker202c8c72018-08-13 13:47:44 -04002950 for (auto spec_id = clspv::FirstLocalSpecId(); spec_id < max_local_spec_id_;
2951 ++spec_id) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05002952 LocalArgInfo &info = LocalSpecIdInfoMap[spec_id];
David Netoc6f3ab22018-04-06 18:02:31 -04002953
2954 // Generate OpVariable.
2955 //
2956 // GIDOps[0] : Result Type ID
2957 // GIDOps[1] : Storage Class
2958 SPIRVOperandList Ops;
2959 Ops << MkId(info.ptr_array_type_id) << MkNum(spv::StorageClassWorkgroup);
2960
2961 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002962 new SPIRVInstruction(spv::OpVariable, info.variable_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002963 }
2964}
2965
David Neto862b7d82018-06-14 18:48:37 -04002966void SPIRVProducerPass::GenerateDescriptorMapInfo(const DataLayout &DL,
2967 Function &F) {
David Netoc5fb5242018-07-30 13:28:31 -04002968 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
2969 return;
2970 }
David Neto862b7d82018-06-14 18:48:37 -04002971 // Gather the list of resources that are used by this function's arguments.
2972 auto &resource_var_at_index = FunctionToResourceVarsMap[&F];
2973
alan-bakerf5e5f692018-11-27 08:33:24 -05002974 // TODO(alan-baker): This should become unnecessary by fixing the rest of the
2975 // flow to generate pod_ubo arguments earlier.
David Neto862b7d82018-06-14 18:48:37 -04002976 auto remap_arg_kind = [](StringRef argKind) {
alan-bakerf5e5f692018-11-27 08:33:24 -05002977 std::string kind =
2978 clspv::Option::PodArgsInUniformBuffer() && argKind.equals("pod")
2979 ? "pod_ubo"
2980 : argKind;
2981 return GetArgKindFromName(kind);
David Neto862b7d82018-06-14 18:48:37 -04002982 };
2983
2984 auto *fty = F.getType()->getPointerElementType();
2985 auto *func_ty = dyn_cast<FunctionType>(fty);
2986
alan-baker038e9242019-04-19 22:14:41 -04002987 // If we've clustered POD arguments, then argument details are in metadata.
David Neto862b7d82018-06-14 18:48:37 -04002988 // If an argument maps to a resource variable, then get descriptor set and
2989 // binding from the resoure variable. Other info comes from the metadata.
2990 const auto *arg_map = F.getMetadata("kernel_arg_map");
2991 if (arg_map) {
2992 for (const auto &arg : arg_map->operands()) {
2993 const MDNode *arg_node = dyn_cast<MDNode>(arg.get());
Kévin PETITa353c832018-03-20 23:21:21 +00002994 assert(arg_node->getNumOperands() == 7);
David Neto862b7d82018-06-14 18:48:37 -04002995 const auto name =
2996 dyn_cast<MDString>(arg_node->getOperand(0))->getString();
2997 const auto old_index =
2998 dyn_extract<ConstantInt>(arg_node->getOperand(1))->getZExtValue();
2999 // Remapped argument index
alan-bakerb6b09dc2018-11-08 16:59:28 -05003000 const size_t new_index = static_cast<size_t>(
3001 dyn_extract<ConstantInt>(arg_node->getOperand(2))->getZExtValue());
David Neto862b7d82018-06-14 18:48:37 -04003002 const auto offset =
3003 dyn_extract<ConstantInt>(arg_node->getOperand(3))->getZExtValue();
Kévin PETITa353c832018-03-20 23:21:21 +00003004 const auto arg_size =
3005 dyn_extract<ConstantInt>(arg_node->getOperand(4))->getZExtValue();
David Neto862b7d82018-06-14 18:48:37 -04003006 const auto argKind = remap_arg_kind(
Kévin PETITa353c832018-03-20 23:21:21 +00003007 dyn_cast<MDString>(arg_node->getOperand(5))->getString());
David Neto862b7d82018-06-14 18:48:37 -04003008 const auto spec_id =
Kévin PETITa353c832018-03-20 23:21:21 +00003009 dyn_extract<ConstantInt>(arg_node->getOperand(6))->getSExtValue();
alan-bakerf5e5f692018-11-27 08:33:24 -05003010
3011 uint32_t descriptor_set = 0;
3012 uint32_t binding = 0;
3013 version0::DescriptorMapEntry::KernelArgData kernel_data = {
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003014 F.getName(), name, static_cast<uint32_t>(old_index), argKind,
alan-bakerf5e5f692018-11-27 08:33:24 -05003015 static_cast<uint32_t>(spec_id),
3016 // This will be set below for pointer-to-local args.
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003017 0, static_cast<uint32_t>(offset), static_cast<uint32_t>(arg_size)};
David Neto862b7d82018-06-14 18:48:37 -04003018 if (spec_id > 0) {
alan-bakerf5e5f692018-11-27 08:33:24 -05003019 kernel_data.local_element_size = static_cast<uint32_t>(GetTypeAllocSize(
3020 func_ty->getParamType(unsigned(new_index))->getPointerElementType(),
3021 DL));
David Neto862b7d82018-06-14 18:48:37 -04003022 } else {
3023 auto *info = resource_var_at_index[new_index];
3024 assert(info);
alan-bakerf5e5f692018-11-27 08:33:24 -05003025 descriptor_set = info->descriptor_set;
3026 binding = info->binding;
David Neto862b7d82018-06-14 18:48:37 -04003027 }
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003028 descriptorMapEntries->emplace_back(std::move(kernel_data), descriptor_set,
3029 binding);
David Neto862b7d82018-06-14 18:48:37 -04003030 }
3031 } else {
3032 // There is no argument map.
3033 // Take descriptor info from the resource variable calls.
Kévin PETITa353c832018-03-20 23:21:21 +00003034 // Take argument name and size from the arguments list.
David Neto862b7d82018-06-14 18:48:37 -04003035
3036 SmallVector<Argument *, 4> arguments;
3037 for (auto &arg : F.args()) {
3038 arguments.push_back(&arg);
3039 }
3040
3041 unsigned arg_index = 0;
3042 for (auto *info : resource_var_at_index) {
3043 if (info) {
Kévin PETITa353c832018-03-20 23:21:21 +00003044 auto arg = arguments[arg_index];
alan-bakerb6b09dc2018-11-08 16:59:28 -05003045 unsigned arg_size = 0;
Kévin PETITa353c832018-03-20 23:21:21 +00003046 if (info->arg_kind == clspv::ArgKind::Pod) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05003047 arg_size = static_cast<uint32_t>(DL.getTypeStoreSize(arg->getType()));
Kévin PETITa353c832018-03-20 23:21:21 +00003048 }
3049
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003050 // Local pointer arguments are unused in this case. Offset is always
3051 // zero.
alan-bakerf5e5f692018-11-27 08:33:24 -05003052 version0::DescriptorMapEntry::KernelArgData kernel_data = {
3053 F.getName(), arg->getName(),
3054 arg_index, remap_arg_kind(clspv::GetArgKindName(info->arg_kind)),
3055 0, 0,
3056 0, arg_size};
3057 descriptorMapEntries->emplace_back(std::move(kernel_data),
3058 info->descriptor_set, info->binding);
David Neto862b7d82018-06-14 18:48:37 -04003059 }
3060 arg_index++;
3061 }
3062 // Generate mappings for pointer-to-local arguments.
3063 for (arg_index = 0; arg_index < arguments.size(); ++arg_index) {
3064 Argument *arg = arguments[arg_index];
Alan Baker202c8c72018-08-13 13:47:44 -04003065 auto where = LocalArgSpecIds.find(arg);
3066 if (where != LocalArgSpecIds.end()) {
3067 auto &local_arg_info = LocalSpecIdInfoMap[where->second];
alan-bakerf5e5f692018-11-27 08:33:24 -05003068 // Pod arguments members are unused in this case.
3069 version0::DescriptorMapEntry::KernelArgData kernel_data = {
3070 F.getName(),
3071 arg->getName(),
3072 arg_index,
3073 ArgKind::Local,
3074 static_cast<uint32_t>(local_arg_info.spec_id),
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003075 static_cast<uint32_t>(
3076 GetTypeAllocSize(local_arg_info.elem_type, DL)),
alan-bakerf5e5f692018-11-27 08:33:24 -05003077 0,
3078 0};
3079 // Pointer-to-local arguments do not utilize descriptor set and binding.
3080 descriptorMapEntries->emplace_back(std::move(kernel_data), 0, 0);
David Neto862b7d82018-06-14 18:48:37 -04003081 }
3082 }
3083 }
3084}
3085
David Neto22f144c2017-06-12 14:26:21 -04003086void SPIRVProducerPass::GenerateFuncPrologue(Function &F) {
3087 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3088 ValueMapType &VMap = getValueMap();
3089 EntryPointVecType &EntryPoints = getEntryPointVec();
David Neto22f144c2017-06-12 14:26:21 -04003090 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
3091 auto &GlobalConstArgSet = getGlobalConstArgSet();
3092
3093 FunctionType *FTy = F.getFunctionType();
3094
3095 //
David Neto22f144c2017-06-12 14:26:21 -04003096 // Generate OPFunction.
3097 //
3098
3099 // FOps[0] : Result Type ID
3100 // FOps[1] : Function Control
3101 // FOps[2] : Function Type ID
3102 SPIRVOperandList FOps;
3103
3104 // Find SPIRV instruction for return type.
David Neto257c3892018-04-11 13:19:45 -04003105 FOps << MkId(lookupType(FTy->getReturnType()));
David Neto22f144c2017-06-12 14:26:21 -04003106
3107 // Check function attributes for SPIRV Function Control.
3108 uint32_t FuncControl = spv::FunctionControlMaskNone;
3109 if (F.hasFnAttribute(Attribute::AlwaysInline)) {
3110 FuncControl |= spv::FunctionControlInlineMask;
3111 }
3112 if (F.hasFnAttribute(Attribute::NoInline)) {
3113 FuncControl |= spv::FunctionControlDontInlineMask;
3114 }
3115 // TODO: Check llvm attribute for Function Control Pure.
3116 if (F.hasFnAttribute(Attribute::ReadOnly)) {
3117 FuncControl |= spv::FunctionControlPureMask;
3118 }
3119 // TODO: Check llvm attribute for Function Control Const.
3120 if (F.hasFnAttribute(Attribute::ReadNone)) {
3121 FuncControl |= spv::FunctionControlConstMask;
3122 }
3123
David Neto257c3892018-04-11 13:19:45 -04003124 FOps << MkNum(FuncControl);
David Neto22f144c2017-06-12 14:26:21 -04003125
3126 uint32_t FTyID;
3127 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
3128 SmallVector<Type *, 4> NewFuncParamTys;
3129 FunctionType *NewFTy =
3130 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
3131 FTyID = lookupType(NewFTy);
3132 } else {
David Neto9ed8e2f2018-03-24 06:47:24 -07003133 // Handle regular function with global constant parameters.
David Neto22f144c2017-06-12 14:26:21 -04003134 if (GlobalConstFuncTyMap.count(FTy)) {
3135 FTyID = lookupType(GlobalConstFuncTyMap[FTy].first);
3136 } else {
3137 FTyID = lookupType(FTy);
3138 }
3139 }
3140
David Neto257c3892018-04-11 13:19:45 -04003141 FOps << MkId(FTyID);
David Neto22f144c2017-06-12 14:26:21 -04003142
3143 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
3144 EntryPoints.push_back(std::make_pair(&F, nextID));
3145 }
3146
3147 VMap[&F] = nextID;
3148
David Neto482550a2018-03-24 05:21:07 -07003149 if (clspv::Option::ShowIDs()) {
David Netob05675d2018-02-16 12:37:49 -05003150 errs() << "Function " << F.getName() << " is " << nextID << "\n";
3151 }
David Neto22f144c2017-06-12 14:26:21 -04003152 // Generate SPIRV instruction for function.
David Neto87846742018-04-11 17:36:22 -04003153 auto *FuncInst = new SPIRVInstruction(spv::OpFunction, nextID++, FOps);
David Neto22f144c2017-06-12 14:26:21 -04003154 SPIRVInstList.push_back(FuncInst);
3155
3156 //
3157 // Generate OpFunctionParameter for Normal function.
3158 //
3159
3160 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
alan-bakere9308012019-03-15 10:25:13 -04003161
3162 // Find Insert Point for OpDecorate.
3163 auto DecoInsertPoint =
3164 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
3165 [](SPIRVInstruction *Inst) -> bool {
3166 return Inst->getOpcode() != spv::OpDecorate &&
3167 Inst->getOpcode() != spv::OpMemberDecorate &&
3168 Inst->getOpcode() != spv::OpExtInstImport;
3169 });
3170
David Neto22f144c2017-06-12 14:26:21 -04003171 // Iterate Argument for name instead of param type from function type.
3172 unsigned ArgIdx = 0;
3173 for (Argument &Arg : F.args()) {
alan-bakere9308012019-03-15 10:25:13 -04003174 uint32_t param_id = nextID++;
3175 VMap[&Arg] = param_id;
3176
3177 if (CalledWithCoherentResource(Arg)) {
3178 // If the arg is passed a coherent resource ever, then decorate this
3179 // parameter with Coherent too.
3180 SPIRVOperandList decoration_ops;
3181 decoration_ops << MkId(param_id) << MkNum(spv::DecorationCoherent);
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003182 SPIRVInstList.insert(
3183 DecoInsertPoint,
3184 new SPIRVInstruction(spv::OpDecorate, decoration_ops));
alan-bakere9308012019-03-15 10:25:13 -04003185 }
David Neto22f144c2017-06-12 14:26:21 -04003186
3187 // ParamOps[0] : Result Type ID
3188 SPIRVOperandList ParamOps;
3189
3190 // Find SPIRV instruction for parameter type.
3191 uint32_t ParamTyID = lookupType(Arg.getType());
3192 if (PointerType *PTy = dyn_cast<PointerType>(Arg.getType())) {
3193 if (GlobalConstFuncTyMap.count(FTy)) {
3194 if (ArgIdx == GlobalConstFuncTyMap[FTy].second) {
3195 Type *EleTy = PTy->getPointerElementType();
3196 Type *ArgTy =
3197 PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
3198 ParamTyID = lookupType(ArgTy);
3199 GlobalConstArgSet.insert(&Arg);
3200 }
3201 }
3202 }
David Neto257c3892018-04-11 13:19:45 -04003203 ParamOps << MkId(ParamTyID);
David Neto22f144c2017-06-12 14:26:21 -04003204
3205 // Generate SPIRV instruction for parameter.
David Neto87846742018-04-11 17:36:22 -04003206 auto *ParamInst =
alan-bakere9308012019-03-15 10:25:13 -04003207 new SPIRVInstruction(spv::OpFunctionParameter, param_id, ParamOps);
David Neto22f144c2017-06-12 14:26:21 -04003208 SPIRVInstList.push_back(ParamInst);
3209
3210 ArgIdx++;
3211 }
3212 }
3213}
3214
alan-bakerb6b09dc2018-11-08 16:59:28 -05003215void SPIRVProducerPass::GenerateModuleInfo(Module &module) {
David Neto22f144c2017-06-12 14:26:21 -04003216 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3217 EntryPointVecType &EntryPoints = getEntryPointVec();
3218 ValueMapType &VMap = getValueMap();
3219 ValueList &EntryPointInterfaces = getEntryPointInterfacesVec();
3220 uint32_t &ExtInstImportID = getOpExtInstImportID();
3221 std::vector<uint32_t> &BuiltinDimVec = getBuiltinDimVec();
3222
3223 // Set up insert point.
3224 auto InsertPoint = SPIRVInstList.begin();
3225
3226 //
3227 // Generate OpCapability
3228 //
3229 // TODO: Which llvm information is mapped to SPIRV Capapbility?
3230
3231 // Ops[0] = Capability
3232 SPIRVOperandList Ops;
3233
David Neto87846742018-04-11 17:36:22 -04003234 auto *CapInst =
3235 new SPIRVInstruction(spv::OpCapability, {MkNum(spv::CapabilityShader)});
David Neto22f144c2017-06-12 14:26:21 -04003236 SPIRVInstList.insert(InsertPoint, CapInst);
3237
3238 for (Type *Ty : getTypeList()) {
alan-bakerb39c8262019-03-08 14:03:37 -05003239 if (clspv::Option::Int8Support() && Ty->isIntegerTy(8)) {
3240 // Generate OpCapability for i8 type.
3241 SPIRVInstList.insert(InsertPoint,
3242 new SPIRVInstruction(spv::OpCapability,
3243 {MkNum(spv::CapabilityInt8)}));
3244 } else if (Ty->isIntegerTy(16)) {
David Neto22f144c2017-06-12 14:26:21 -04003245 // Generate OpCapability for i16 type.
David Neto87846742018-04-11 17:36:22 -04003246 SPIRVInstList.insert(InsertPoint,
3247 new SPIRVInstruction(spv::OpCapability,
3248 {MkNum(spv::CapabilityInt16)}));
David Neto22f144c2017-06-12 14:26:21 -04003249 } else if (Ty->isIntegerTy(64)) {
3250 // Generate OpCapability for i64 type.
David Neto87846742018-04-11 17:36:22 -04003251 SPIRVInstList.insert(InsertPoint,
3252 new SPIRVInstruction(spv::OpCapability,
3253 {MkNum(spv::CapabilityInt64)}));
David Neto22f144c2017-06-12 14:26:21 -04003254 } else if (Ty->isHalfTy()) {
3255 // Generate OpCapability for half type.
3256 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04003257 InsertPoint, new SPIRVInstruction(spv::OpCapability,
3258 {MkNum(spv::CapabilityFloat16)}));
David Neto22f144c2017-06-12 14:26:21 -04003259 } else if (Ty->isDoubleTy()) {
3260 // Generate OpCapability for double type.
3261 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04003262 InsertPoint, new SPIRVInstruction(spv::OpCapability,
3263 {MkNum(spv::CapabilityFloat64)}));
David Neto22f144c2017-06-12 14:26:21 -04003264 } else if (auto *STy = dyn_cast<StructType>(Ty)) {
3265 if (STy->isOpaque()) {
alan-bakerf67468c2019-11-25 15:51:49 -05003266 if (STy->getName().startswith("opencl.image2d_wo_t") ||
3267 STy->getName().startswith("opencl.image3d_wo_t")) {
David Neto22f144c2017-06-12 14:26:21 -04003268 // Generate OpCapability for write only image type.
3269 SPIRVInstList.insert(
3270 InsertPoint,
3271 new SPIRVInstruction(
David Neto87846742018-04-11 17:36:22 -04003272 spv::OpCapability,
3273 {MkNum(spv::CapabilityStorageImageWriteWithoutFormat)}));
David Neto22f144c2017-06-12 14:26:21 -04003274 }
3275 }
3276 }
3277 }
3278
David Neto5c22a252018-03-15 16:07:41 -04003279 { // OpCapability ImageQuery
3280 bool hasImageQuery = false;
alan-bakerf67468c2019-11-25 15:51:49 -05003281 for (const auto &SymVal : module.getValueSymbolTable()) {
3282 if (auto F = dyn_cast<Function>(SymVal.getValue())) {
3283 if (clspv::IsGetImageHeight(F) || clspv::IsGetImageWidth(F)) {
3284 hasImageQuery = true;
3285 break;
3286 }
David Neto5c22a252018-03-15 16:07:41 -04003287 }
3288 }
alan-bakerf67468c2019-11-25 15:51:49 -05003289
David Neto5c22a252018-03-15 16:07:41 -04003290 if (hasImageQuery) {
David Neto87846742018-04-11 17:36:22 -04003291 auto *ImageQueryCapInst = new SPIRVInstruction(
3292 spv::OpCapability, {MkNum(spv::CapabilityImageQuery)});
David Neto5c22a252018-03-15 16:07:41 -04003293 SPIRVInstList.insert(InsertPoint, ImageQueryCapInst);
3294 }
3295 }
3296
David Neto22f144c2017-06-12 14:26:21 -04003297 if (hasVariablePointers()) {
3298 //
David Neto22f144c2017-06-12 14:26:21 -04003299 // Generate OpCapability.
3300 //
3301 // Ops[0] = Capability
3302 //
3303 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003304 Ops << MkNum(spv::CapabilityVariablePointers);
David Neto22f144c2017-06-12 14:26:21 -04003305
David Neto87846742018-04-11 17:36:22 -04003306 SPIRVInstList.insert(InsertPoint,
3307 new SPIRVInstruction(spv::OpCapability, Ops));
alan-baker5b86ed72019-02-15 08:26:50 -05003308 } else if (hasVariablePointersStorageBuffer()) {
3309 //
3310 // Generate OpCapability.
3311 //
3312 // Ops[0] = Capability
3313 //
3314 Ops.clear();
3315 Ops << MkNum(spv::CapabilityVariablePointersStorageBuffer);
David Neto22f144c2017-06-12 14:26:21 -04003316
alan-baker5b86ed72019-02-15 08:26:50 -05003317 SPIRVInstList.insert(InsertPoint,
3318 new SPIRVInstruction(spv::OpCapability, Ops));
3319 }
3320
3321 // Always add the storage buffer extension
3322 {
David Neto22f144c2017-06-12 14:26:21 -04003323 //
3324 // Generate OpExtension.
3325 //
3326 // Ops[0] = Name (Literal String)
3327 //
alan-baker5b86ed72019-02-15 08:26:50 -05003328 auto *ExtensionInst = new SPIRVInstruction(
3329 spv::OpExtension, {MkString("SPV_KHR_storage_buffer_storage_class")});
3330 SPIRVInstList.insert(InsertPoint, ExtensionInst);
3331 }
David Neto22f144c2017-06-12 14:26:21 -04003332
alan-baker5b86ed72019-02-15 08:26:50 -05003333 if (hasVariablePointers() || hasVariablePointersStorageBuffer()) {
3334 //
3335 // Generate OpExtension.
3336 //
3337 // Ops[0] = Name (Literal String)
3338 //
3339 auto *ExtensionInst = new SPIRVInstruction(
3340 spv::OpExtension, {MkString("SPV_KHR_variable_pointers")});
3341 SPIRVInstList.insert(InsertPoint, ExtensionInst);
David Neto22f144c2017-06-12 14:26:21 -04003342 }
3343
3344 if (ExtInstImportID) {
3345 ++InsertPoint;
3346 }
3347
3348 //
3349 // Generate OpMemoryModel
3350 //
3351 // Memory model for Vulkan will always be GLSL450.
3352
3353 // Ops[0] = Addressing Model
3354 // Ops[1] = Memory Model
3355 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003356 Ops << MkNum(spv::AddressingModelLogical) << MkNum(spv::MemoryModelGLSL450);
David Neto22f144c2017-06-12 14:26:21 -04003357
David Neto87846742018-04-11 17:36:22 -04003358 auto *MemModelInst = new SPIRVInstruction(spv::OpMemoryModel, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003359 SPIRVInstList.insert(InsertPoint, MemModelInst);
3360
3361 //
3362 // Generate OpEntryPoint
3363 //
3364 for (auto EntryPoint : EntryPoints) {
3365 // Ops[0] = Execution Model
3366 // Ops[1] = EntryPoint ID
3367 // Ops[2] = Name (Literal String)
3368 // ...
3369 //
3370 // TODO: Do we need to consider Interface ID for forward references???
3371 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05003372 const StringRef &name = EntryPoint.first->getName();
David Neto257c3892018-04-11 13:19:45 -04003373 Ops << MkNum(spv::ExecutionModelGLCompute) << MkId(EntryPoint.second)
3374 << MkString(name);
David Neto22f144c2017-06-12 14:26:21 -04003375
David Neto22f144c2017-06-12 14:26:21 -04003376 for (Value *Interface : EntryPointInterfaces) {
David Neto257c3892018-04-11 13:19:45 -04003377 Ops << MkId(VMap[Interface]);
David Neto22f144c2017-06-12 14:26:21 -04003378 }
3379
David Neto87846742018-04-11 17:36:22 -04003380 auto *EntryPointInst = new SPIRVInstruction(spv::OpEntryPoint, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003381 SPIRVInstList.insert(InsertPoint, EntryPointInst);
3382 }
3383
3384 for (auto EntryPoint : EntryPoints) {
3385 if (const MDNode *MD = dyn_cast<Function>(EntryPoint.first)
3386 ->getMetadata("reqd_work_group_size")) {
3387
3388 if (!BuiltinDimVec.empty()) {
3389 llvm_unreachable(
3390 "Kernels should have consistent work group size definition");
3391 }
3392
3393 //
3394 // Generate OpExecutionMode
3395 //
3396
3397 // Ops[0] = Entry Point ID
3398 // Ops[1] = Execution Mode
3399 // Ops[2] ... Ops[n] = Optional literals according to Execution Mode
3400 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05003401 Ops << MkId(EntryPoint.second) << MkNum(spv::ExecutionModeLocalSize);
David Neto22f144c2017-06-12 14:26:21 -04003402
3403 uint32_t XDim = static_cast<uint32_t>(
3404 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
3405 uint32_t YDim = static_cast<uint32_t>(
3406 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
3407 uint32_t ZDim = static_cast<uint32_t>(
3408 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
3409
David Neto257c3892018-04-11 13:19:45 -04003410 Ops << MkNum(XDim) << MkNum(YDim) << MkNum(ZDim);
David Neto22f144c2017-06-12 14:26:21 -04003411
David Neto87846742018-04-11 17:36:22 -04003412 auto *ExecModeInst = new SPIRVInstruction(spv::OpExecutionMode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003413 SPIRVInstList.insert(InsertPoint, ExecModeInst);
3414 }
3415 }
3416
3417 //
3418 // Generate OpSource.
3419 //
3420 // Ops[0] = SourceLanguage ID
3421 // Ops[1] = Version (LiteralNum)
3422 //
3423 Ops.clear();
Kévin Petit0fc88042019-04-09 23:25:02 +01003424 if (clspv::Option::CPlusPlus()) {
3425 Ops << MkNum(spv::SourceLanguageOpenCL_CPP) << MkNum(100);
3426 } else {
3427 Ops << MkNum(spv::SourceLanguageOpenCL_C) << MkNum(120);
3428 }
David Neto22f144c2017-06-12 14:26:21 -04003429
David Neto87846742018-04-11 17:36:22 -04003430 auto *OpenSourceInst = new SPIRVInstruction(spv::OpSource, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003431 SPIRVInstList.insert(InsertPoint, OpenSourceInst);
3432
3433 if (!BuiltinDimVec.empty()) {
3434 //
3435 // Generate OpDecorates for x/y/z dimension.
3436 //
3437 // Ops[0] = Target ID
3438 // Ops[1] = Decoration (SpecId)
David Neto257c3892018-04-11 13:19:45 -04003439 // Ops[2] = Specialization Constant ID (Literal Number)
David Neto22f144c2017-06-12 14:26:21 -04003440
3441 // X Dimension
3442 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003443 Ops << MkId(BuiltinDimVec[0]) << MkNum(spv::DecorationSpecId) << MkNum(0);
David Neto87846742018-04-11 17:36:22 -04003444 SPIRVInstList.insert(InsertPoint,
3445 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003446
3447 // Y Dimension
3448 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003449 Ops << MkId(BuiltinDimVec[1]) << MkNum(spv::DecorationSpecId) << MkNum(1);
David Neto87846742018-04-11 17:36:22 -04003450 SPIRVInstList.insert(InsertPoint,
3451 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003452
3453 // Z Dimension
3454 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003455 Ops << MkId(BuiltinDimVec[2]) << MkNum(spv::DecorationSpecId) << MkNum(2);
David Neto87846742018-04-11 17:36:22 -04003456 SPIRVInstList.insert(InsertPoint,
3457 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003458 }
3459}
3460
David Netob6e2e062018-04-25 10:32:06 -04003461void SPIRVProducerPass::GenerateEntryPointInitialStores() {
3462 // Work around a driver bug. Initializers on Private variables might not
3463 // work. So the start of the kernel should store the initializer value to the
3464 // variables. Yes, *every* entry point pays this cost if *any* entry point
3465 // uses this builtin. At this point I judge this to be an acceptable tradeoff
3466 // of complexity vs. runtime, for a broken driver.
alan-bakerb6b09dc2018-11-08 16:59:28 -05003467 // TODO(dneto): Remove this at some point once fixed drivers are widely
3468 // available.
David Netob6e2e062018-04-25 10:32:06 -04003469 if (WorkgroupSizeVarID) {
3470 assert(WorkgroupSizeValueID);
3471
3472 SPIRVOperandList Ops;
3473 Ops << MkId(WorkgroupSizeVarID) << MkId(WorkgroupSizeValueID);
3474
3475 auto *Inst = new SPIRVInstruction(spv::OpStore, Ops);
3476 getSPIRVInstList().push_back(Inst);
3477 }
3478}
3479
David Neto22f144c2017-06-12 14:26:21 -04003480void SPIRVProducerPass::GenerateFuncBody(Function &F) {
3481 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3482 ValueMapType &VMap = getValueMap();
3483
David Netob6e2e062018-04-25 10:32:06 -04003484 const bool IsKernel = F.getCallingConv() == CallingConv::SPIR_KERNEL;
David Neto22f144c2017-06-12 14:26:21 -04003485
3486 for (BasicBlock &BB : F) {
3487 // Register BasicBlock to ValueMap.
3488 VMap[&BB] = nextID;
3489
3490 //
3491 // Generate OpLabel for Basic Block.
3492 //
3493 SPIRVOperandList Ops;
David Neto87846742018-04-11 17:36:22 -04003494 auto *Inst = new SPIRVInstruction(spv::OpLabel, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003495 SPIRVInstList.push_back(Inst);
3496
David Neto6dcd4712017-06-23 11:06:47 -04003497 // OpVariable instructions must come first.
3498 for (Instruction &I : BB) {
alan-baker5b86ed72019-02-15 08:26:50 -05003499 if (auto *alloca = dyn_cast<AllocaInst>(&I)) {
3500 // Allocating a pointer requires variable pointers.
3501 if (alloca->getAllocatedType()->isPointerTy()) {
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003502 setVariablePointersCapabilities(
3503 alloca->getAllocatedType()->getPointerAddressSpace());
alan-baker5b86ed72019-02-15 08:26:50 -05003504 }
David Neto6dcd4712017-06-23 11:06:47 -04003505 GenerateInstruction(I);
3506 }
3507 }
3508
David Neto22f144c2017-06-12 14:26:21 -04003509 if (&BB == &F.getEntryBlock() && IsKernel) {
David Netob6e2e062018-04-25 10:32:06 -04003510 if (clspv::Option::HackInitializers()) {
3511 GenerateEntryPointInitialStores();
3512 }
David Neto22f144c2017-06-12 14:26:21 -04003513 }
3514
3515 for (Instruction &I : BB) {
David Neto6dcd4712017-06-23 11:06:47 -04003516 if (!isa<AllocaInst>(I)) {
3517 GenerateInstruction(I);
3518 }
David Neto22f144c2017-06-12 14:26:21 -04003519 }
3520 }
3521}
3522
3523spv::Op SPIRVProducerPass::GetSPIRVCmpOpcode(CmpInst *I) {
3524 const std::map<CmpInst::Predicate, spv::Op> Map = {
3525 {CmpInst::ICMP_EQ, spv::OpIEqual},
3526 {CmpInst::ICMP_NE, spv::OpINotEqual},
3527 {CmpInst::ICMP_UGT, spv::OpUGreaterThan},
3528 {CmpInst::ICMP_UGE, spv::OpUGreaterThanEqual},
3529 {CmpInst::ICMP_ULT, spv::OpULessThan},
3530 {CmpInst::ICMP_ULE, spv::OpULessThanEqual},
3531 {CmpInst::ICMP_SGT, spv::OpSGreaterThan},
3532 {CmpInst::ICMP_SGE, spv::OpSGreaterThanEqual},
3533 {CmpInst::ICMP_SLT, spv::OpSLessThan},
3534 {CmpInst::ICMP_SLE, spv::OpSLessThanEqual},
3535 {CmpInst::FCMP_OEQ, spv::OpFOrdEqual},
3536 {CmpInst::FCMP_OGT, spv::OpFOrdGreaterThan},
3537 {CmpInst::FCMP_OGE, spv::OpFOrdGreaterThanEqual},
3538 {CmpInst::FCMP_OLT, spv::OpFOrdLessThan},
3539 {CmpInst::FCMP_OLE, spv::OpFOrdLessThanEqual},
3540 {CmpInst::FCMP_ONE, spv::OpFOrdNotEqual},
3541 {CmpInst::FCMP_UEQ, spv::OpFUnordEqual},
3542 {CmpInst::FCMP_UGT, spv::OpFUnordGreaterThan},
3543 {CmpInst::FCMP_UGE, spv::OpFUnordGreaterThanEqual},
3544 {CmpInst::FCMP_ULT, spv::OpFUnordLessThan},
3545 {CmpInst::FCMP_ULE, spv::OpFUnordLessThanEqual},
3546 {CmpInst::FCMP_UNE, spv::OpFUnordNotEqual}};
3547
3548 assert(0 != Map.count(I->getPredicate()));
3549
3550 return Map.at(I->getPredicate());
3551}
3552
3553spv::Op SPIRVProducerPass::GetSPIRVCastOpcode(Instruction &I) {
3554 const std::map<unsigned, spv::Op> Map{
3555 {Instruction::Trunc, spv::OpUConvert},
3556 {Instruction::ZExt, spv::OpUConvert},
3557 {Instruction::SExt, spv::OpSConvert},
3558 {Instruction::FPToUI, spv::OpConvertFToU},
3559 {Instruction::FPToSI, spv::OpConvertFToS},
3560 {Instruction::UIToFP, spv::OpConvertUToF},
3561 {Instruction::SIToFP, spv::OpConvertSToF},
3562 {Instruction::FPTrunc, spv::OpFConvert},
3563 {Instruction::FPExt, spv::OpFConvert},
3564 {Instruction::BitCast, spv::OpBitcast}};
3565
3566 assert(0 != Map.count(I.getOpcode()));
3567
3568 return Map.at(I.getOpcode());
3569}
3570
3571spv::Op SPIRVProducerPass::GetSPIRVBinaryOpcode(Instruction &I) {
Kévin Petit24272b62018-10-18 19:16:12 +00003572 if (I.getType()->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -04003573 switch (I.getOpcode()) {
3574 default:
3575 break;
3576 case Instruction::Or:
3577 return spv::OpLogicalOr;
3578 case Instruction::And:
3579 return spv::OpLogicalAnd;
3580 case Instruction::Xor:
3581 return spv::OpLogicalNotEqual;
3582 }
3583 }
3584
alan-bakerb6b09dc2018-11-08 16:59:28 -05003585 const std::map<unsigned, spv::Op> Map{
David Neto22f144c2017-06-12 14:26:21 -04003586 {Instruction::Add, spv::OpIAdd},
3587 {Instruction::FAdd, spv::OpFAdd},
3588 {Instruction::Sub, spv::OpISub},
3589 {Instruction::FSub, spv::OpFSub},
3590 {Instruction::Mul, spv::OpIMul},
3591 {Instruction::FMul, spv::OpFMul},
3592 {Instruction::UDiv, spv::OpUDiv},
3593 {Instruction::SDiv, spv::OpSDiv},
3594 {Instruction::FDiv, spv::OpFDiv},
3595 {Instruction::URem, spv::OpUMod},
3596 {Instruction::SRem, spv::OpSRem},
3597 {Instruction::FRem, spv::OpFRem},
3598 {Instruction::Or, spv::OpBitwiseOr},
3599 {Instruction::Xor, spv::OpBitwiseXor},
3600 {Instruction::And, spv::OpBitwiseAnd},
3601 {Instruction::Shl, spv::OpShiftLeftLogical},
3602 {Instruction::LShr, spv::OpShiftRightLogical},
3603 {Instruction::AShr, spv::OpShiftRightArithmetic}};
3604
3605 assert(0 != Map.count(I.getOpcode()));
3606
3607 return Map.at(I.getOpcode());
3608}
3609
3610void SPIRVProducerPass::GenerateInstruction(Instruction &I) {
3611 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3612 ValueMapType &VMap = getValueMap();
David Neto22f144c2017-06-12 14:26:21 -04003613 DeferredInstVecType &DeferredInsts = getDeferredInstVec();
3614 LLVMContext &Context = I.getParent()->getParent()->getParent()->getContext();
3615
3616 // Register Instruction to ValueMap.
3617 if (0 == VMap[&I]) {
3618 VMap[&I] = nextID;
3619 }
3620
3621 switch (I.getOpcode()) {
3622 default: {
3623 if (Instruction::isCast(I.getOpcode())) {
3624 //
3625 // Generate SPIRV instructions for cast operators.
3626 //
3627
David Netod2de94a2017-08-28 17:27:47 -04003628 auto Ty = I.getType();
David Neto22f144c2017-06-12 14:26:21 -04003629 auto OpTy = I.getOperand(0)->getType();
David Netod2de94a2017-08-28 17:27:47 -04003630 auto toI8 = Ty == Type::getInt8Ty(Context);
3631 auto fromI32 = OpTy == Type::getInt32Ty(Context);
David Neto22f144c2017-06-12 14:26:21 -04003632 // Handle zext, sext and uitofp with i1 type specially.
3633 if ((I.getOpcode() == Instruction::ZExt ||
3634 I.getOpcode() == Instruction::SExt ||
3635 I.getOpcode() == Instruction::UIToFP) &&
alan-bakerb6b09dc2018-11-08 16:59:28 -05003636 OpTy->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -04003637 //
3638 // Generate OpSelect.
3639 //
3640
3641 // Ops[0] = Result Type ID
3642 // Ops[1] = Condition ID
3643 // Ops[2] = True Constant ID
3644 // Ops[3] = False Constant ID
3645 SPIRVOperandList Ops;
3646
David Neto257c3892018-04-11 13:19:45 -04003647 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04003648
David Neto22f144c2017-06-12 14:26:21 -04003649 uint32_t CondID = VMap[I.getOperand(0)];
David Neto257c3892018-04-11 13:19:45 -04003650 Ops << MkId(CondID);
David Neto22f144c2017-06-12 14:26:21 -04003651
3652 uint32_t TrueID = 0;
3653 if (I.getOpcode() == Instruction::ZExt) {
Kévin Petit7bfb8992019-02-26 13:45:08 +00003654 TrueID = VMap[ConstantInt::get(I.getType(), 1)];
David Neto22f144c2017-06-12 14:26:21 -04003655 } else if (I.getOpcode() == Instruction::SExt) {
Kévin Petit7bfb8992019-02-26 13:45:08 +00003656 TrueID = VMap[ConstantInt::getSigned(I.getType(), -1)];
David Neto22f144c2017-06-12 14:26:21 -04003657 } else {
3658 TrueID = VMap[ConstantFP::get(Context, APFloat(1.0f))];
3659 }
David Neto257c3892018-04-11 13:19:45 -04003660 Ops << MkId(TrueID);
David Neto22f144c2017-06-12 14:26:21 -04003661
3662 uint32_t FalseID = 0;
3663 if (I.getOpcode() == Instruction::ZExt) {
3664 FalseID = VMap[Constant::getNullValue(I.getType())];
3665 } else if (I.getOpcode() == Instruction::SExt) {
3666 FalseID = VMap[Constant::getNullValue(I.getType())];
3667 } else {
3668 FalseID = VMap[ConstantFP::get(Context, APFloat(0.0f))];
3669 }
David Neto257c3892018-04-11 13:19:45 -04003670 Ops << MkId(FalseID);
David Neto22f144c2017-06-12 14:26:21 -04003671
David Neto87846742018-04-11 17:36:22 -04003672 auto *Inst = new SPIRVInstruction(spv::OpSelect, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003673 SPIRVInstList.push_back(Inst);
alan-bakerb39c8262019-03-08 14:03:37 -05003674 } else if (!clspv::Option::Int8Support() &&
3675 I.getOpcode() == Instruction::Trunc && fromI32 && toI8) {
David Netod2de94a2017-08-28 17:27:47 -04003676 // The SPIR-V target type is a 32-bit int. Keep only the bottom
3677 // 8 bits.
3678 // Before:
3679 // %result = trunc i32 %a to i8
3680 // After
3681 // %result = OpBitwiseAnd %uint %a %uint_255
3682
3683 SPIRVOperandList Ops;
3684
David Neto257c3892018-04-11 13:19:45 -04003685 Ops << MkId(lookupType(OpTy)) << MkId(VMap[I.getOperand(0)]);
David Netod2de94a2017-08-28 17:27:47 -04003686
3687 Type *UintTy = Type::getInt32Ty(Context);
3688 uint32_t MaskID = VMap[ConstantInt::get(UintTy, 255)];
David Neto257c3892018-04-11 13:19:45 -04003689 Ops << MkId(MaskID);
David Netod2de94a2017-08-28 17:27:47 -04003690
David Neto87846742018-04-11 17:36:22 -04003691 auto *Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Netod2de94a2017-08-28 17:27:47 -04003692 SPIRVInstList.push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04003693 } else {
3694 // Ops[0] = Result Type ID
3695 // Ops[1] = Source Value ID
3696 SPIRVOperandList Ops;
3697
David Neto257c3892018-04-11 13:19:45 -04003698 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04003699
David Neto87846742018-04-11 17:36:22 -04003700 auto *Inst = new SPIRVInstruction(GetSPIRVCastOpcode(I), nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003701 SPIRVInstList.push_back(Inst);
3702 }
3703 } else if (isa<BinaryOperator>(I)) {
3704 //
3705 // Generate SPIRV instructions for binary operators.
3706 //
3707
3708 // Handle xor with i1 type specially.
3709 if (I.getOpcode() == Instruction::Xor &&
3710 I.getType() == Type::getInt1Ty(Context) &&
Kévin Petit24272b62018-10-18 19:16:12 +00003711 ((isa<ConstantInt>(I.getOperand(0)) &&
3712 !cast<ConstantInt>(I.getOperand(0))->isZero()) ||
3713 (isa<ConstantInt>(I.getOperand(1)) &&
3714 !cast<ConstantInt>(I.getOperand(1))->isZero()))) {
David Neto22f144c2017-06-12 14:26:21 -04003715 //
3716 // Generate OpLogicalNot.
3717 //
3718 // Ops[0] = Result Type ID
3719 // Ops[1] = Operand
3720 SPIRVOperandList Ops;
3721
David Neto257c3892018-04-11 13:19:45 -04003722 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04003723
3724 Value *CondV = I.getOperand(0);
3725 if (isa<Constant>(I.getOperand(0))) {
3726 CondV = I.getOperand(1);
3727 }
David Neto257c3892018-04-11 13:19:45 -04003728 Ops << MkId(VMap[CondV]);
David Neto22f144c2017-06-12 14:26:21 -04003729
David Neto87846742018-04-11 17:36:22 -04003730 auto *Inst = new SPIRVInstruction(spv::OpLogicalNot, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003731 SPIRVInstList.push_back(Inst);
3732 } else {
3733 // Ops[0] = Result Type ID
3734 // Ops[1] = Operand 0
3735 // Ops[2] = Operand 1
3736 SPIRVOperandList Ops;
3737
David Neto257c3892018-04-11 13:19:45 -04003738 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)])
3739 << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04003740
David Neto87846742018-04-11 17:36:22 -04003741 auto *Inst =
3742 new SPIRVInstruction(GetSPIRVBinaryOpcode(I), nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003743 SPIRVInstList.push_back(Inst);
3744 }
alan-bakerc9c55ae2019-12-02 16:01:27 -05003745 } else if (I.getOpcode() == Instruction::FNeg) {
3746 // The only unary operator.
3747 //
3748 // Ops[0] = Result Type ID
3749 // Ops[1] = Operand 0
3750 SPIRVOperandList ops;
3751
3752 ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)]);
3753 auto *Inst = new SPIRVInstruction(spv::OpFNegate, nextID++, ops);
3754 SPIRVInstList.push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04003755 } else {
3756 I.print(errs());
3757 llvm_unreachable("Unsupported instruction???");
3758 }
3759 break;
3760 }
3761 case Instruction::GetElementPtr: {
3762 auto &GlobalConstArgSet = getGlobalConstArgSet();
3763
3764 //
3765 // Generate OpAccessChain.
3766 //
3767 GetElementPtrInst *GEP = cast<GetElementPtrInst>(&I);
3768
3769 //
3770 // Generate OpAccessChain.
3771 //
3772
3773 // Ops[0] = Result Type ID
3774 // Ops[1] = Base ID
3775 // Ops[2] ... Ops[n] = Indexes ID
3776 SPIRVOperandList Ops;
3777
alan-bakerb6b09dc2018-11-08 16:59:28 -05003778 PointerType *ResultType = cast<PointerType>(GEP->getType());
David Neto22f144c2017-06-12 14:26:21 -04003779 if (GEP->getPointerAddressSpace() == AddressSpace::ModuleScopePrivate ||
3780 GlobalConstArgSet.count(GEP->getPointerOperand())) {
3781 // Use pointer type with private address space for global constant.
3782 Type *EleTy = I.getType()->getPointerElementType();
David Neto1a1a0582017-07-07 12:01:44 -04003783 ResultType = PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
David Neto22f144c2017-06-12 14:26:21 -04003784 }
David Neto257c3892018-04-11 13:19:45 -04003785
3786 Ops << MkId(lookupType(ResultType));
David Neto22f144c2017-06-12 14:26:21 -04003787
David Neto862b7d82018-06-14 18:48:37 -04003788 // Generate the base pointer.
3789 Ops << MkId(VMap[GEP->getPointerOperand()]);
David Neto22f144c2017-06-12 14:26:21 -04003790
David Neto862b7d82018-06-14 18:48:37 -04003791 // TODO(dneto): Simplify the following?
David Neto22f144c2017-06-12 14:26:21 -04003792
3793 //
3794 // Follows below rules for gep.
3795 //
David Neto862b7d82018-06-14 18:48:37 -04003796 // 1. If gep's first index is 0 generate OpAccessChain and ignore gep's
3797 // first index.
David Neto22f144c2017-06-12 14:26:21 -04003798 // 2. If gep's first index is not 0, generate OpPtrAccessChain and use gep's
3799 // first index.
3800 // 3. If gep's first index is not constant, generate OpPtrAccessChain and
3801 // use gep's first index.
3802 // 4. If it is not above case 1, 2 and 3, generate OpAccessChain and use
3803 // gep's first index.
3804 //
3805 spv::Op Opcode = spv::OpAccessChain;
3806 unsigned offset = 0;
3807 if (ConstantInt *CstInt = dyn_cast<ConstantInt>(GEP->getOperand(1))) {
David Neto862b7d82018-06-14 18:48:37 -04003808 if (CstInt->getZExtValue() == 0) {
David Neto22f144c2017-06-12 14:26:21 -04003809 offset = 1;
David Neto862b7d82018-06-14 18:48:37 -04003810 } else if (CstInt->getZExtValue() != 0) {
David Neto22f144c2017-06-12 14:26:21 -04003811 Opcode = spv::OpPtrAccessChain;
David Neto22f144c2017-06-12 14:26:21 -04003812 }
David Neto862b7d82018-06-14 18:48:37 -04003813 } else {
David Neto22f144c2017-06-12 14:26:21 -04003814 Opcode = spv::OpPtrAccessChain;
David Neto1a1a0582017-07-07 12:01:44 -04003815 }
3816
3817 if (Opcode == spv::OpPtrAccessChain) {
David Neto1a1a0582017-07-07 12:01:44 -04003818 // Do we need to generate ArrayStride? Check against the GEP result type
3819 // rather than the pointer type of the base because when indexing into
3820 // an OpenCL program-scope constant, we'll swap out the LLVM base pointer
3821 // for something else in the SPIR-V.
3822 // E.g. see test/PointerAccessChain/pointer_index_is_constant_1.cl
alan-baker5b86ed72019-02-15 08:26:50 -05003823 auto address_space = ResultType->getAddressSpace();
3824 setVariablePointersCapabilities(address_space);
3825 switch (GetStorageClass(address_space)) {
Alan Bakerfcda9482018-10-02 17:09:59 -04003826 case spv::StorageClassStorageBuffer:
3827 case spv::StorageClassUniform:
David Neto1a1a0582017-07-07 12:01:44 -04003828 // Save the need to generate an ArrayStride decoration. But defer
3829 // generation until later, so we only make one decoration.
David Neto85082642018-03-24 06:55:20 -07003830 getTypesNeedingArrayStride().insert(ResultType);
Alan Bakerfcda9482018-10-02 17:09:59 -04003831 break;
3832 default:
3833 break;
David Neto1a1a0582017-07-07 12:01:44 -04003834 }
David Neto22f144c2017-06-12 14:26:21 -04003835 }
3836
3837 for (auto II = GEP->idx_begin() + offset; II != GEP->idx_end(); II++) {
David Neto257c3892018-04-11 13:19:45 -04003838 Ops << MkId(VMap[*II]);
David Neto22f144c2017-06-12 14:26:21 -04003839 }
3840
David Neto87846742018-04-11 17:36:22 -04003841 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003842 SPIRVInstList.push_back(Inst);
3843 break;
3844 }
3845 case Instruction::ExtractValue: {
3846 ExtractValueInst *EVI = cast<ExtractValueInst>(&I);
3847 // Ops[0] = Result Type ID
3848 // Ops[1] = Composite ID
3849 // Ops[2] ... Ops[n] = Indexes (Literal Number)
3850 SPIRVOperandList Ops;
3851
David Neto257c3892018-04-11 13:19:45 -04003852 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04003853
3854 uint32_t CompositeID = VMap[EVI->getAggregateOperand()];
David Neto257c3892018-04-11 13:19:45 -04003855 Ops << MkId(CompositeID);
David Neto22f144c2017-06-12 14:26:21 -04003856
3857 for (auto &Index : EVI->indices()) {
David Neto257c3892018-04-11 13:19:45 -04003858 Ops << MkNum(Index);
David Neto22f144c2017-06-12 14:26:21 -04003859 }
3860
David Neto87846742018-04-11 17:36:22 -04003861 auto *Inst = new SPIRVInstruction(spv::OpCompositeExtract, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003862 SPIRVInstList.push_back(Inst);
3863 break;
3864 }
3865 case Instruction::InsertValue: {
3866 InsertValueInst *IVI = cast<InsertValueInst>(&I);
3867 // Ops[0] = Result Type ID
3868 // Ops[1] = Object ID
3869 // Ops[2] = Composite ID
3870 // Ops[3] ... Ops[n] = Indexes (Literal Number)
3871 SPIRVOperandList Ops;
3872
3873 uint32_t ResTyID = lookupType(I.getType());
David Neto257c3892018-04-11 13:19:45 -04003874 Ops << MkId(ResTyID);
David Neto22f144c2017-06-12 14:26:21 -04003875
3876 uint32_t ObjectID = VMap[IVI->getInsertedValueOperand()];
David Neto257c3892018-04-11 13:19:45 -04003877 Ops << MkId(ObjectID);
David Neto22f144c2017-06-12 14:26:21 -04003878
3879 uint32_t CompositeID = VMap[IVI->getAggregateOperand()];
David Neto257c3892018-04-11 13:19:45 -04003880 Ops << MkId(CompositeID);
David Neto22f144c2017-06-12 14:26:21 -04003881
3882 for (auto &Index : IVI->indices()) {
David Neto257c3892018-04-11 13:19:45 -04003883 Ops << MkNum(Index);
David Neto22f144c2017-06-12 14:26:21 -04003884 }
3885
David Neto87846742018-04-11 17:36:22 -04003886 auto *Inst = new SPIRVInstruction(spv::OpCompositeInsert, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003887 SPIRVInstList.push_back(Inst);
3888 break;
3889 }
3890 case Instruction::Select: {
3891 //
3892 // Generate OpSelect.
3893 //
3894
3895 // Ops[0] = Result Type ID
3896 // Ops[1] = Condition ID
3897 // Ops[2] = True Constant ID
3898 // Ops[3] = False Constant ID
3899 SPIRVOperandList Ops;
3900
3901 // Find SPIRV instruction for parameter type.
3902 auto Ty = I.getType();
3903 if (Ty->isPointerTy()) {
3904 auto PointeeTy = Ty->getPointerElementType();
3905 if (PointeeTy->isStructTy() &&
3906 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
3907 Ty = PointeeTy;
alan-baker5b86ed72019-02-15 08:26:50 -05003908 } else {
3909 // Selecting between pointers requires variable pointers.
3910 setVariablePointersCapabilities(Ty->getPointerAddressSpace());
3911 if (!hasVariablePointers() && !selectFromSameObject(&I)) {
3912 setVariablePointers(true);
3913 }
David Neto22f144c2017-06-12 14:26:21 -04003914 }
3915 }
3916
David Neto257c3892018-04-11 13:19:45 -04003917 Ops << MkId(lookupType(Ty)) << MkId(VMap[I.getOperand(0)])
3918 << MkId(VMap[I.getOperand(1)]) << MkId(VMap[I.getOperand(2)]);
David Neto22f144c2017-06-12 14:26:21 -04003919
David Neto87846742018-04-11 17:36:22 -04003920 auto *Inst = new SPIRVInstruction(spv::OpSelect, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003921 SPIRVInstList.push_back(Inst);
3922 break;
3923 }
3924 case Instruction::ExtractElement: {
3925 // Handle <4 x i8> type manually.
3926 Type *CompositeTy = I.getOperand(0)->getType();
3927 if (is4xi8vec(CompositeTy)) {
3928 //
3929 // Generate OpShiftRightLogical and OpBitwiseAnd for extractelement with
3930 // <4 x i8>.
3931 //
3932
3933 //
3934 // Generate OpShiftRightLogical
3935 //
3936 // Ops[0] = Result Type ID
3937 // Ops[1] = Operand 0
3938 // Ops[2] = Operand 1
3939 //
3940 SPIRVOperandList Ops;
3941
David Neto257c3892018-04-11 13:19:45 -04003942 Ops << MkId(lookupType(CompositeTy));
David Neto22f144c2017-06-12 14:26:21 -04003943
3944 uint32_t Op0ID = VMap[I.getOperand(0)];
David Neto257c3892018-04-11 13:19:45 -04003945 Ops << MkId(Op0ID);
David Neto22f144c2017-06-12 14:26:21 -04003946
3947 uint32_t Op1ID = 0;
3948 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
3949 // Handle constant index.
3950 uint64_t Idx = CI->getZExtValue();
3951 Value *ShiftAmount =
3952 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
3953 Op1ID = VMap[ShiftAmount];
3954 } else {
3955 // Handle variable index.
3956 SPIRVOperandList TmpOps;
3957
David Neto257c3892018-04-11 13:19:45 -04003958 TmpOps << MkId(lookupType(Type::getInt32Ty(Context)))
3959 << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04003960
3961 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
David Neto257c3892018-04-11 13:19:45 -04003962 TmpOps << MkId(VMap[Cst8]);
David Neto22f144c2017-06-12 14:26:21 -04003963
3964 Op1ID = nextID;
3965
David Neto87846742018-04-11 17:36:22 -04003966 auto *TmpInst = new SPIRVInstruction(spv::OpIMul, nextID++, TmpOps);
David Neto22f144c2017-06-12 14:26:21 -04003967 SPIRVInstList.push_back(TmpInst);
3968 }
David Neto257c3892018-04-11 13:19:45 -04003969 Ops << MkId(Op1ID);
David Neto22f144c2017-06-12 14:26:21 -04003970
3971 uint32_t ShiftID = nextID;
3972
David Neto87846742018-04-11 17:36:22 -04003973 auto *Inst =
3974 new SPIRVInstruction(spv::OpShiftRightLogical, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003975 SPIRVInstList.push_back(Inst);
3976
3977 //
3978 // Generate OpBitwiseAnd
3979 //
3980 // Ops[0] = Result Type ID
3981 // Ops[1] = Operand 0
3982 // Ops[2] = Operand 1
3983 //
3984 Ops.clear();
3985
David Neto257c3892018-04-11 13:19:45 -04003986 Ops << MkId(lookupType(CompositeTy)) << MkId(ShiftID);
David Neto22f144c2017-06-12 14:26:21 -04003987
3988 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
David Neto257c3892018-04-11 13:19:45 -04003989 Ops << MkId(VMap[CstFF]);
David Neto22f144c2017-06-12 14:26:21 -04003990
David Neto9b2d6252017-09-06 15:47:37 -04003991 // Reset mapping for this value to the result of the bitwise and.
3992 VMap[&I] = nextID;
3993
David Neto87846742018-04-11 17:36:22 -04003994 Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003995 SPIRVInstList.push_back(Inst);
3996 break;
3997 }
3998
3999 // Ops[0] = Result Type ID
4000 // Ops[1] = Composite ID
4001 // Ops[2] ... Ops[n] = Indexes (Literal Number)
4002 SPIRVOperandList Ops;
4003
David Neto257c3892018-04-11 13:19:45 -04004004 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04004005
4006 spv::Op Opcode = spv::OpCompositeExtract;
4007 if (const ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
David Neto257c3892018-04-11 13:19:45 -04004008 Ops << MkNum(static_cast<uint32_t>(CI->getZExtValue()));
David Neto22f144c2017-06-12 14:26:21 -04004009 } else {
David Neto257c3892018-04-11 13:19:45 -04004010 Ops << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004011 Opcode = spv::OpVectorExtractDynamic;
4012 }
4013
David Neto87846742018-04-11 17:36:22 -04004014 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004015 SPIRVInstList.push_back(Inst);
4016 break;
4017 }
4018 case Instruction::InsertElement: {
4019 // Handle <4 x i8> type manually.
4020 Type *CompositeTy = I.getOperand(0)->getType();
4021 if (is4xi8vec(CompositeTy)) {
4022 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
4023 uint32_t CstFFID = VMap[CstFF];
4024
4025 uint32_t ShiftAmountID = 0;
4026 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
4027 // Handle constant index.
4028 uint64_t Idx = CI->getZExtValue();
4029 Value *ShiftAmount =
4030 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
4031 ShiftAmountID = VMap[ShiftAmount];
4032 } else {
4033 // Handle variable index.
4034 SPIRVOperandList TmpOps;
4035
David Neto257c3892018-04-11 13:19:45 -04004036 TmpOps << MkId(lookupType(Type::getInt32Ty(Context)))
4037 << MkId(VMap[I.getOperand(2)]);
David Neto22f144c2017-06-12 14:26:21 -04004038
4039 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
David Neto257c3892018-04-11 13:19:45 -04004040 TmpOps << MkId(VMap[Cst8]);
David Neto22f144c2017-06-12 14:26:21 -04004041
4042 ShiftAmountID = nextID;
4043
David Neto87846742018-04-11 17:36:22 -04004044 auto *TmpInst = new SPIRVInstruction(spv::OpIMul, nextID++, TmpOps);
David Neto22f144c2017-06-12 14:26:21 -04004045 SPIRVInstList.push_back(TmpInst);
4046 }
4047
4048 //
4049 // Generate mask operations.
4050 //
4051
4052 // ShiftLeft mask according to index of insertelement.
4053 SPIRVOperandList Ops;
4054
David Neto257c3892018-04-11 13:19:45 -04004055 const uint32_t ResTyID = lookupType(CompositeTy);
4056 Ops << MkId(ResTyID) << MkId(CstFFID) << MkId(ShiftAmountID);
David Neto22f144c2017-06-12 14:26:21 -04004057
4058 uint32_t MaskID = nextID;
4059
David Neto87846742018-04-11 17:36:22 -04004060 auto *Inst = new SPIRVInstruction(spv::OpShiftLeftLogical, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004061 SPIRVInstList.push_back(Inst);
4062
4063 // Inverse mask.
4064 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004065 Ops << MkId(ResTyID) << MkId(MaskID);
David Neto22f144c2017-06-12 14:26:21 -04004066
4067 uint32_t InvMaskID = nextID;
4068
David Neto87846742018-04-11 17:36:22 -04004069 Inst = new SPIRVInstruction(spv::OpNot, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004070 SPIRVInstList.push_back(Inst);
4071
4072 // Apply mask.
4073 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004074 Ops << MkId(ResTyID) << MkId(VMap[I.getOperand(0)]) << MkId(InvMaskID);
David Neto22f144c2017-06-12 14:26:21 -04004075
4076 uint32_t OrgValID = nextID;
4077
David Neto87846742018-04-11 17:36:22 -04004078 Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004079 SPIRVInstList.push_back(Inst);
4080
4081 // Create correct value according to index of insertelement.
4082 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05004083 Ops << MkId(ResTyID) << MkId(VMap[I.getOperand(1)])
4084 << MkId(ShiftAmountID);
David Neto22f144c2017-06-12 14:26:21 -04004085
4086 uint32_t InsertValID = nextID;
4087
David Neto87846742018-04-11 17:36:22 -04004088 Inst = new SPIRVInstruction(spv::OpShiftLeftLogical, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004089 SPIRVInstList.push_back(Inst);
4090
4091 // Insert value to original value.
4092 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004093 Ops << MkId(ResTyID) << MkId(OrgValID) << MkId(InsertValID);
David Neto22f144c2017-06-12 14:26:21 -04004094
David Netoa394f392017-08-26 20:45:29 -04004095 VMap[&I] = nextID;
4096
David Neto87846742018-04-11 17:36:22 -04004097 Inst = new SPIRVInstruction(spv::OpBitwiseOr, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004098 SPIRVInstList.push_back(Inst);
4099
4100 break;
4101 }
4102
David Neto22f144c2017-06-12 14:26:21 -04004103 SPIRVOperandList Ops;
4104
James Priced26efea2018-06-09 23:28:32 +01004105 // Ops[0] = Result Type ID
4106 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04004107
4108 spv::Op Opcode = spv::OpCompositeInsert;
4109 if (const ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
David Neto257c3892018-04-11 13:19:45 -04004110 const auto value = CI->getZExtValue();
4111 assert(value <= UINT32_MAX);
James Priced26efea2018-06-09 23:28:32 +01004112 // Ops[1] = Object ID
4113 // Ops[2] = Composite ID
4114 // Ops[3] ... Ops[n] = Indexes (Literal Number)
alan-bakerb6b09dc2018-11-08 16:59:28 -05004115 Ops << MkId(VMap[I.getOperand(1)]) << MkId(VMap[I.getOperand(0)])
James Priced26efea2018-06-09 23:28:32 +01004116 << MkNum(static_cast<uint32_t>(value));
David Neto22f144c2017-06-12 14:26:21 -04004117 } else {
James Priced26efea2018-06-09 23:28:32 +01004118 // Ops[1] = Composite ID
4119 // Ops[2] = Object ID
4120 // Ops[3] ... Ops[n] = Indexes (Literal Number)
alan-bakerb6b09dc2018-11-08 16:59:28 -05004121 Ops << MkId(VMap[I.getOperand(0)]) << MkId(VMap[I.getOperand(1)])
James Priced26efea2018-06-09 23:28:32 +01004122 << MkId(VMap[I.getOperand(2)]);
David Neto22f144c2017-06-12 14:26:21 -04004123 Opcode = spv::OpVectorInsertDynamic;
4124 }
4125
David Neto87846742018-04-11 17:36:22 -04004126 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004127 SPIRVInstList.push_back(Inst);
4128 break;
4129 }
4130 case Instruction::ShuffleVector: {
4131 // Ops[0] = Result Type ID
4132 // Ops[1] = Vector 1 ID
4133 // Ops[2] = Vector 2 ID
4134 // Ops[3] ... Ops[n] = Components (Literal Number)
4135 SPIRVOperandList Ops;
4136
David Neto257c3892018-04-11 13:19:45 -04004137 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)])
4138 << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004139
4140 uint64_t NumElements = 0;
4141 if (Constant *Cst = dyn_cast<Constant>(I.getOperand(2))) {
4142 NumElements = cast<VectorType>(Cst->getType())->getNumElements();
4143
4144 if (Cst->isNullValue()) {
4145 for (unsigned i = 0; i < NumElements; i++) {
David Neto257c3892018-04-11 13:19:45 -04004146 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04004147 }
4148 } else if (const ConstantDataSequential *CDS =
4149 dyn_cast<ConstantDataSequential>(Cst)) {
4150 for (unsigned i = 0; i < CDS->getNumElements(); i++) {
4151 std::vector<uint32_t> LiteralNum;
David Neto257c3892018-04-11 13:19:45 -04004152 const auto value = CDS->getElementAsInteger(i);
4153 assert(value <= UINT32_MAX);
4154 Ops << MkNum(static_cast<uint32_t>(value));
David Neto22f144c2017-06-12 14:26:21 -04004155 }
4156 } else if (const ConstantVector *CV = dyn_cast<ConstantVector>(Cst)) {
4157 for (unsigned i = 0; i < CV->getNumOperands(); i++) {
4158 auto Op = CV->getOperand(i);
4159
4160 uint32_t literal = 0;
4161
4162 if (auto CI = dyn_cast<ConstantInt>(Op)) {
4163 literal = static_cast<uint32_t>(CI->getZExtValue());
4164 } else if (auto UI = dyn_cast<UndefValue>(Op)) {
4165 literal = 0xFFFFFFFFu;
4166 } else {
4167 Op->print(errs());
4168 llvm_unreachable("Unsupported element in ConstantVector!");
4169 }
4170
David Neto257c3892018-04-11 13:19:45 -04004171 Ops << MkNum(literal);
David Neto22f144c2017-06-12 14:26:21 -04004172 }
4173 } else {
4174 Cst->print(errs());
4175 llvm_unreachable("Unsupported constant mask in ShuffleVector!");
4176 }
4177 }
4178
David Neto87846742018-04-11 17:36:22 -04004179 auto *Inst = new SPIRVInstruction(spv::OpVectorShuffle, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004180 SPIRVInstList.push_back(Inst);
4181 break;
4182 }
4183 case Instruction::ICmp:
4184 case Instruction::FCmp: {
4185 CmpInst *CmpI = cast<CmpInst>(&I);
4186
David Netod4ca2e62017-07-06 18:47:35 -04004187 // Pointer equality is invalid.
alan-bakerb6b09dc2018-11-08 16:59:28 -05004188 Type *ArgTy = CmpI->getOperand(0)->getType();
David Netod4ca2e62017-07-06 18:47:35 -04004189 if (isa<PointerType>(ArgTy)) {
4190 CmpI->print(errs());
4191 std::string name = I.getParent()->getParent()->getName();
4192 errs()
4193 << "\nPointer equality test is not supported by SPIR-V for Vulkan, "
4194 << "in function " << name << "\n";
4195 llvm_unreachable("Pointer equality check is invalid");
4196 break;
4197 }
4198
David Neto257c3892018-04-11 13:19:45 -04004199 // Ops[0] = Result Type ID
4200 // Ops[1] = Operand 1 ID
4201 // Ops[2] = Operand 2 ID
4202 SPIRVOperandList Ops;
David Neto22f144c2017-06-12 14:26:21 -04004203
David Neto257c3892018-04-11 13:19:45 -04004204 Ops << MkId(lookupType(CmpI->getType())) << MkId(VMap[CmpI->getOperand(0)])
4205 << MkId(VMap[CmpI->getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004206
4207 spv::Op Opcode = GetSPIRVCmpOpcode(CmpI);
David Neto87846742018-04-11 17:36:22 -04004208 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004209 SPIRVInstList.push_back(Inst);
4210 break;
4211 }
4212 case Instruction::Br: {
4213 // Branch instrucion is deferred because it needs label's ID. Record slot's
4214 // location on SPIRVInstructionList.
4215 DeferredInsts.push_back(
4216 std::make_tuple(&I, --SPIRVInstList.end(), 0 /* No id */));
4217 break;
4218 }
4219 case Instruction::Switch: {
4220 I.print(errs());
4221 llvm_unreachable("Unsupported instruction???");
4222 break;
4223 }
4224 case Instruction::IndirectBr: {
4225 I.print(errs());
4226 llvm_unreachable("Unsupported instruction???");
4227 break;
4228 }
4229 case Instruction::PHI: {
4230 // Branch instrucion is deferred because it needs label's ID. Record slot's
4231 // location on SPIRVInstructionList.
4232 DeferredInsts.push_back(
4233 std::make_tuple(&I, --SPIRVInstList.end(), nextID++));
4234 break;
4235 }
4236 case Instruction::Alloca: {
4237 //
4238 // Generate OpVariable.
4239 //
4240 // Ops[0] : Result Type ID
4241 // Ops[1] : Storage Class
4242 SPIRVOperandList Ops;
4243
David Neto257c3892018-04-11 13:19:45 -04004244 Ops << MkId(lookupType(I.getType())) << MkNum(spv::StorageClassFunction);
David Neto22f144c2017-06-12 14:26:21 -04004245
David Neto87846742018-04-11 17:36:22 -04004246 auto *Inst = new SPIRVInstruction(spv::OpVariable, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004247 SPIRVInstList.push_back(Inst);
4248 break;
4249 }
4250 case Instruction::Load: {
4251 LoadInst *LD = cast<LoadInst>(&I);
4252 //
4253 // Generate OpLoad.
4254 //
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04004255
alan-baker5b86ed72019-02-15 08:26:50 -05004256 if (LD->getType()->isPointerTy()) {
4257 // Loading a pointer requires variable pointers.
4258 setVariablePointersCapabilities(LD->getType()->getPointerAddressSpace());
4259 }
David Neto22f144c2017-06-12 14:26:21 -04004260
David Neto0a2f98d2017-09-15 19:38:40 -04004261 uint32_t ResTyID = lookupType(LD->getType());
David Netoa60b00b2017-09-15 16:34:09 -04004262 uint32_t PointerID = VMap[LD->getPointerOperand()];
4263
4264 // This is a hack to work around what looks like a driver bug.
4265 // When we're loading from the special variable holding the WorkgroupSize
David Neto0a2f98d2017-09-15 19:38:40 -04004266 // builtin value, use an OpBitWiseAnd of the value's ID rather than
4267 // generating a load.
David Neto66cfe642018-03-24 06:13:56 -07004268 // TODO(dneto): Remove this awful hack once drivers are fixed.
David Netoa60b00b2017-09-15 16:34:09 -04004269 if (PointerID == WorkgroupSizeVarID) {
David Neto0a2f98d2017-09-15 19:38:40 -04004270 // Generate a bitwise-and of the original value with itself.
4271 // We should have been able to get away with just an OpCopyObject,
4272 // but we need something more complex to get past certain driver bugs.
4273 // This is ridiculous, but necessary.
4274 // TODO(dneto): Revisit this once drivers fix their bugs.
4275
4276 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04004277 Ops << MkId(ResTyID) << MkId(WorkgroupSizeValueID)
4278 << MkId(WorkgroupSizeValueID);
David Neto0a2f98d2017-09-15 19:38:40 -04004279
David Neto87846742018-04-11 17:36:22 -04004280 auto *Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Neto0a2f98d2017-09-15 19:38:40 -04004281 SPIRVInstList.push_back(Inst);
David Netoa60b00b2017-09-15 16:34:09 -04004282 break;
4283 }
4284
4285 // This is the normal path. Generate a load.
4286
David Neto22f144c2017-06-12 14:26:21 -04004287 // Ops[0] = Result Type ID
4288 // Ops[1] = Pointer ID
4289 // Ops[2] ... Ops[n] = Optional Memory Access
4290 //
4291 // TODO: Do we need to implement Optional Memory Access???
David Neto0a2f98d2017-09-15 19:38:40 -04004292
David Neto22f144c2017-06-12 14:26:21 -04004293 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04004294 Ops << MkId(ResTyID) << MkId(PointerID);
David Neto22f144c2017-06-12 14:26:21 -04004295
David Neto87846742018-04-11 17:36:22 -04004296 auto *Inst = new SPIRVInstruction(spv::OpLoad, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004297 SPIRVInstList.push_back(Inst);
4298 break;
4299 }
4300 case Instruction::Store: {
4301 StoreInst *ST = cast<StoreInst>(&I);
4302 //
4303 // Generate OpStore.
4304 //
4305
alan-baker5b86ed72019-02-15 08:26:50 -05004306 if (ST->getValueOperand()->getType()->isPointerTy()) {
4307 // Storing a pointer requires variable pointers.
4308 setVariablePointersCapabilities(
4309 ST->getValueOperand()->getType()->getPointerAddressSpace());
4310 }
4311
David Neto22f144c2017-06-12 14:26:21 -04004312 // Ops[0] = Pointer ID
4313 // Ops[1] = Object ID
4314 // Ops[2] ... Ops[n] = Optional Memory Access (later???)
4315 //
4316 // TODO: Do we need to implement Optional Memory Access???
David Neto257c3892018-04-11 13:19:45 -04004317 SPIRVOperandList Ops;
4318 Ops << MkId(VMap[ST->getPointerOperand()])
4319 << MkId(VMap[ST->getValueOperand()]);
David Neto22f144c2017-06-12 14:26:21 -04004320
David Neto87846742018-04-11 17:36:22 -04004321 auto *Inst = new SPIRVInstruction(spv::OpStore, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004322 SPIRVInstList.push_back(Inst);
4323 break;
4324 }
4325 case Instruction::AtomicCmpXchg: {
4326 I.print(errs());
4327 llvm_unreachable("Unsupported instruction???");
4328 break;
4329 }
4330 case Instruction::AtomicRMW: {
Neil Henning39672102017-09-29 14:33:13 +01004331 AtomicRMWInst *AtomicRMW = dyn_cast<AtomicRMWInst>(&I);
4332
4333 spv::Op opcode;
4334
4335 switch (AtomicRMW->getOperation()) {
4336 default:
4337 I.print(errs());
4338 llvm_unreachable("Unsupported instruction???");
4339 case llvm::AtomicRMWInst::Add:
4340 opcode = spv::OpAtomicIAdd;
4341 break;
4342 case llvm::AtomicRMWInst::Sub:
4343 opcode = spv::OpAtomicISub;
4344 break;
4345 case llvm::AtomicRMWInst::Xchg:
4346 opcode = spv::OpAtomicExchange;
4347 break;
4348 case llvm::AtomicRMWInst::Min:
4349 opcode = spv::OpAtomicSMin;
4350 break;
4351 case llvm::AtomicRMWInst::Max:
4352 opcode = spv::OpAtomicSMax;
4353 break;
4354 case llvm::AtomicRMWInst::UMin:
4355 opcode = spv::OpAtomicUMin;
4356 break;
4357 case llvm::AtomicRMWInst::UMax:
4358 opcode = spv::OpAtomicUMax;
4359 break;
4360 case llvm::AtomicRMWInst::And:
4361 opcode = spv::OpAtomicAnd;
4362 break;
4363 case llvm::AtomicRMWInst::Or:
4364 opcode = spv::OpAtomicOr;
4365 break;
4366 case llvm::AtomicRMWInst::Xor:
4367 opcode = spv::OpAtomicXor;
4368 break;
4369 }
4370
4371 //
4372 // Generate OpAtomic*.
4373 //
4374 SPIRVOperandList Ops;
4375
David Neto257c3892018-04-11 13:19:45 -04004376 Ops << MkId(lookupType(I.getType()))
4377 << MkId(VMap[AtomicRMW->getPointerOperand()]);
Neil Henning39672102017-09-29 14:33:13 +01004378
4379 auto IntTy = Type::getInt32Ty(I.getContext());
Neil Henning39672102017-09-29 14:33:13 +01004380 const auto ConstantScopeDevice = ConstantInt::get(IntTy, spv::ScopeDevice);
David Neto257c3892018-04-11 13:19:45 -04004381 Ops << MkId(VMap[ConstantScopeDevice]);
Neil Henning39672102017-09-29 14:33:13 +01004382
4383 const auto ConstantMemorySemantics = ConstantInt::get(
4384 IntTy, spv::MemorySemanticsUniformMemoryMask |
4385 spv::MemorySemanticsSequentiallyConsistentMask);
David Neto257c3892018-04-11 13:19:45 -04004386 Ops << MkId(VMap[ConstantMemorySemantics]);
Neil Henning39672102017-09-29 14:33:13 +01004387
David Neto257c3892018-04-11 13:19:45 -04004388 Ops << MkId(VMap[AtomicRMW->getValOperand()]);
Neil Henning39672102017-09-29 14:33:13 +01004389
4390 VMap[&I] = nextID;
4391
David Neto87846742018-04-11 17:36:22 -04004392 auto *Inst = new SPIRVInstruction(opcode, nextID++, Ops);
Neil Henning39672102017-09-29 14:33:13 +01004393 SPIRVInstList.push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04004394 break;
4395 }
4396 case Instruction::Fence: {
4397 I.print(errs());
4398 llvm_unreachable("Unsupported instruction???");
4399 break;
4400 }
4401 case Instruction::Call: {
4402 CallInst *Call = dyn_cast<CallInst>(&I);
4403 Function *Callee = Call->getCalledFunction();
4404
Alan Baker202c8c72018-08-13 13:47:44 -04004405 if (Callee->getName().startswith(clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04004406 if (ResourceVarDeferredLoadCalls.count(Call) && Call->hasNUsesOrMore(1)) {
4407 // Generate an OpLoad
4408 SPIRVOperandList Ops;
4409 const auto load_id = nextID++;
David Neto22f144c2017-06-12 14:26:21 -04004410
David Neto862b7d82018-06-14 18:48:37 -04004411 Ops << MkId(lookupType(Call->getType()->getPointerElementType()))
4412 << MkId(ResourceVarDeferredLoadCalls[Call]);
4413
4414 auto *Inst = new SPIRVInstruction(spv::OpLoad, load_id, Ops);
4415 SPIRVInstList.push_back(Inst);
4416 VMap[Call] = load_id;
4417 break;
4418
4419 } else {
4420 // This maps to an OpVariable we've already generated.
4421 // No code is generated for the call.
4422 }
4423 break;
alan-bakerb6b09dc2018-11-08 16:59:28 -05004424 } else if (Callee->getName().startswith(
4425 clspv::WorkgroupAccessorFunction())) {
Alan Baker202c8c72018-08-13 13:47:44 -04004426 // Don't codegen an instruction here, but instead map this call directly
4427 // to the workgroup variable id.
alan-bakerb6b09dc2018-11-08 16:59:28 -05004428 int spec_id = static_cast<int>(
4429 cast<ConstantInt>(Call->getOperand(0))->getSExtValue());
Alan Baker202c8c72018-08-13 13:47:44 -04004430 const auto &info = LocalSpecIdInfoMap[spec_id];
4431 VMap[Call] = info.variable_id;
4432 break;
David Neto862b7d82018-06-14 18:48:37 -04004433 }
4434
4435 // Sampler initializers become a load of the corresponding sampler.
4436
Kévin Petitdf71de32019-04-09 14:09:50 +01004437 if (Callee->getName().equals(clspv::LiteralSamplerFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04004438 // Map this to a load from the variable.
4439 const auto index_into_sampler_map =
4440 dyn_cast<ConstantInt>(Call->getArgOperand(2))->getZExtValue();
4441
4442 // Generate an OpLoad
David Neto22f144c2017-06-12 14:26:21 -04004443 SPIRVOperandList Ops;
David Neto862b7d82018-06-14 18:48:37 -04004444 const auto load_id = nextID++;
David Neto22f144c2017-06-12 14:26:21 -04004445
David Neto257c3892018-04-11 13:19:45 -04004446 Ops << MkId(lookupType(SamplerTy->getPointerElementType()))
alan-bakerb6b09dc2018-11-08 16:59:28 -05004447 << MkId(SamplerMapIndexToIDMap[static_cast<unsigned>(
4448 index_into_sampler_map)]);
David Neto22f144c2017-06-12 14:26:21 -04004449
David Neto862b7d82018-06-14 18:48:37 -04004450 auto *Inst = new SPIRVInstruction(spv::OpLoad, load_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004451 SPIRVInstList.push_back(Inst);
David Neto862b7d82018-06-14 18:48:37 -04004452 VMap[Call] = load_id;
David Neto22f144c2017-06-12 14:26:21 -04004453 break;
4454 }
4455
Kévin Petit349c9502019-03-28 17:24:14 +00004456 // Handle SPIR-V intrinsics
Kévin Petit9b340262019-06-19 18:31:11 +01004457 spv::Op opcode = StringSwitch<spv::Op>(Callee->getName())
4458 .Case("spirv.atomic_xor", spv::OpAtomicXor)
4459 .Default(spv::OpNop);
David Neto22f144c2017-06-12 14:26:21 -04004460
Kévin Petit617a76d2019-04-04 13:54:16 +01004461 // If the switch above didn't have an entry maybe the intrinsic
4462 // is using the name mangling logic.
4463 bool usesMangler = false;
4464 if (opcode == spv::OpNop) {
4465 if (Callee->getName().startswith(clspv::SPIRVOpIntrinsicFunction())) {
4466 auto OpCst = cast<ConstantInt>(Call->getOperand(0));
4467 opcode = static_cast<spv::Op>(OpCst->getZExtValue());
4468 usesMangler = true;
4469 }
4470 }
4471
Kévin Petit349c9502019-03-28 17:24:14 +00004472 if (opcode != spv::OpNop) {
4473
David Neto22f144c2017-06-12 14:26:21 -04004474 SPIRVOperandList Ops;
4475
Kévin Petit349c9502019-03-28 17:24:14 +00004476 if (!I.getType()->isVoidTy()) {
4477 Ops << MkId(lookupType(I.getType()));
4478 }
David Neto22f144c2017-06-12 14:26:21 -04004479
Kévin Petit617a76d2019-04-04 13:54:16 +01004480 unsigned firstOperand = usesMangler ? 1 : 0;
4481 for (unsigned i = firstOperand; i < Call->getNumArgOperands(); i++) {
David Neto257c3892018-04-11 13:19:45 -04004482 Ops << MkId(VMap[Call->getArgOperand(i)]);
David Neto22f144c2017-06-12 14:26:21 -04004483 }
4484
Kévin Petit349c9502019-03-28 17:24:14 +00004485 if (!I.getType()->isVoidTy()) {
4486 VMap[&I] = nextID;
Kévin Petit8a560882019-03-21 15:24:34 +00004487 }
4488
Kévin Petit349c9502019-03-28 17:24:14 +00004489 SPIRVInstruction *Inst;
4490 if (!I.getType()->isVoidTy()) {
4491 Inst = new SPIRVInstruction(opcode, nextID++, Ops);
4492 } else {
4493 Inst = new SPIRVInstruction(opcode, Ops);
4494 }
Kévin Petit8a560882019-03-21 15:24:34 +00004495 SPIRVInstList.push_back(Inst);
4496 break;
4497 }
4498
David Neto22f144c2017-06-12 14:26:21 -04004499 // spirv.copy_memory.* intrinsics become OpMemoryMemory's.
4500 if (Callee->getName().startswith("spirv.copy_memory")) {
4501 //
4502 // Generate OpCopyMemory.
4503 //
4504
4505 // Ops[0] = Dst ID
4506 // Ops[1] = Src ID
4507 // Ops[2] = Memory Access
4508 // Ops[3] = Alignment
4509
4510 auto IsVolatile =
4511 dyn_cast<ConstantInt>(Call->getArgOperand(3))->getZExtValue() != 0;
4512
4513 auto VolatileMemoryAccess = (IsVolatile) ? spv::MemoryAccessVolatileMask
4514 : spv::MemoryAccessMaskNone;
4515
4516 auto MemoryAccess = VolatileMemoryAccess | spv::MemoryAccessAlignedMask;
4517
4518 auto Alignment =
4519 dyn_cast<ConstantInt>(Call->getArgOperand(2))->getZExtValue();
4520
David Neto257c3892018-04-11 13:19:45 -04004521 SPIRVOperandList Ops;
4522 Ops << MkId(VMap[Call->getArgOperand(0)])
4523 << MkId(VMap[Call->getArgOperand(1)]) << MkNum(MemoryAccess)
4524 << MkNum(static_cast<uint32_t>(Alignment));
David Neto22f144c2017-06-12 14:26:21 -04004525
David Neto87846742018-04-11 17:36:22 -04004526 auto *Inst = new SPIRVInstruction(spv::OpCopyMemory, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004527
4528 SPIRVInstList.push_back(Inst);
4529
4530 break;
4531 }
4532
David Neto22f144c2017-06-12 14:26:21 -04004533 // read_image is converted to OpSampledImage and OpImageSampleExplicitLod.
4534 // Additionally, OpTypeSampledImage is generated.
alan-bakerf67468c2019-11-25 15:51:49 -05004535 if (clspv::IsSampledImageRead(Callee)) {
David Neto22f144c2017-06-12 14:26:21 -04004536 //
4537 // Generate OpSampledImage.
4538 //
4539 // Ops[0] = Result Type ID
4540 // Ops[1] = Image ID
4541 // Ops[2] = Sampler ID
4542 //
4543 SPIRVOperandList Ops;
4544
4545 Value *Image = Call->getArgOperand(0);
4546 Value *Sampler = Call->getArgOperand(1);
4547 Value *Coordinate = Call->getArgOperand(2);
4548
4549 TypeMapType &OpImageTypeMap = getImageTypeMap();
4550 Type *ImageTy = Image->getType()->getPointerElementType();
4551 uint32_t ImageTyID = OpImageTypeMap[ImageTy];
David Neto22f144c2017-06-12 14:26:21 -04004552 uint32_t ImageID = VMap[Image];
David Neto22f144c2017-06-12 14:26:21 -04004553 uint32_t SamplerID = VMap[Sampler];
David Neto257c3892018-04-11 13:19:45 -04004554
4555 Ops << MkId(ImageTyID) << MkId(ImageID) << MkId(SamplerID);
David Neto22f144c2017-06-12 14:26:21 -04004556
4557 uint32_t SampledImageID = nextID;
4558
David Neto87846742018-04-11 17:36:22 -04004559 auto *Inst = new SPIRVInstruction(spv::OpSampledImage, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004560 SPIRVInstList.push_back(Inst);
4561
4562 //
4563 // Generate OpImageSampleExplicitLod.
4564 //
4565 // Ops[0] = Result Type ID
4566 // Ops[1] = Sampled Image ID
4567 // Ops[2] = Coordinate ID
4568 // Ops[3] = Image Operands Type ID
4569 // Ops[4] ... Ops[n] = Operands ID
4570 //
4571 Ops.clear();
4572
alan-bakerf67468c2019-11-25 15:51:49 -05004573 const bool is_int_image = IsIntImageType(Image->getType());
4574 uint32_t result_type = 0;
4575 if (is_int_image) {
4576 result_type = v4int32ID;
4577 } else {
4578 result_type = lookupType(Call->getType());
4579 }
4580
4581 Ops << MkId(result_type) << MkId(SampledImageID) << MkId(VMap[Coordinate])
4582 << MkNum(spv::ImageOperandsLodMask);
David Neto22f144c2017-06-12 14:26:21 -04004583
4584 Constant *CstFP0 = ConstantFP::get(Context, APFloat(0.0f));
David Neto257c3892018-04-11 13:19:45 -04004585 Ops << MkId(VMap[CstFP0]);
David Neto22f144c2017-06-12 14:26:21 -04004586
alan-bakerf67468c2019-11-25 15:51:49 -05004587 uint32_t final_id = nextID++;
4588 VMap[&I] = final_id;
David Neto22f144c2017-06-12 14:26:21 -04004589
alan-bakerf67468c2019-11-25 15:51:49 -05004590 uint32_t image_id = final_id;
4591 if (is_int_image) {
4592 // Int image requires a bitcast from v4int to v4uint.
4593 image_id = nextID++;
4594 }
4595
4596 Inst = new SPIRVInstruction(spv::OpImageSampleExplicitLod, image_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004597 SPIRVInstList.push_back(Inst);
alan-bakerf67468c2019-11-25 15:51:49 -05004598
4599 if (is_int_image) {
4600 // Generate the bitcast.
4601 Ops.clear();
4602 Ops << MkId(lookupType(Call->getType())) << MkId(image_id);
4603 Inst = new SPIRVInstruction(spv::OpBitcast, final_id, Ops);
4604 SPIRVInstList.push_back(Inst);
4605 }
David Neto22f144c2017-06-12 14:26:21 -04004606 break;
4607 }
4608
alan-bakerf67468c2019-11-25 15:51:49 -05004609 // write_image is mapped to OpImageWrite.
4610 if (clspv::IsImageWrite(Callee)) {
David Neto22f144c2017-06-12 14:26:21 -04004611 //
4612 // Generate OpImageWrite.
4613 //
4614 // Ops[0] = Image ID
4615 // Ops[1] = Coordinate ID
4616 // Ops[2] = Texel ID
4617 // Ops[3] = (Optional) Image Operands Type (Literal Number)
4618 // Ops[4] ... Ops[n] = (Optional) Operands ID
4619 //
4620 SPIRVOperandList Ops;
4621
4622 Value *Image = Call->getArgOperand(0);
4623 Value *Coordinate = Call->getArgOperand(1);
4624 Value *Texel = Call->getArgOperand(2);
4625
4626 uint32_t ImageID = VMap[Image];
David Neto22f144c2017-06-12 14:26:21 -04004627 uint32_t CoordinateID = VMap[Coordinate];
David Neto22f144c2017-06-12 14:26:21 -04004628 uint32_t TexelID = VMap[Texel];
alan-bakerf67468c2019-11-25 15:51:49 -05004629
4630 const bool is_int_image = IsIntImageType(Image->getType());
4631 if (is_int_image) {
4632 // Generate a bitcast to v4int and use it as the texel value.
4633 uint32_t castID = nextID++;
4634 Ops << MkId(v4int32ID) << MkId(TexelID);
4635 auto cast = new SPIRVInstruction(spv::OpBitcast, castID, Ops);
4636 SPIRVInstList.push_back(cast);
4637 Ops.clear();
4638 TexelID = castID;
4639 }
David Neto257c3892018-04-11 13:19:45 -04004640 Ops << MkId(ImageID) << MkId(CoordinateID) << MkId(TexelID);
David Neto22f144c2017-06-12 14:26:21 -04004641
David Neto87846742018-04-11 17:36:22 -04004642 auto *Inst = new SPIRVInstruction(spv::OpImageWrite, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004643 SPIRVInstList.push_back(Inst);
4644 break;
4645 }
4646
alan-bakerf67468c2019-11-25 15:51:49 -05004647 // get_image_* is mapped to OpImageQuerySize
4648 if (clspv::IsGetImageHeight(Callee) || clspv::IsGetImageWidth(Callee)) {
David Neto5c22a252018-03-15 16:07:41 -04004649 //
4650 // Generate OpImageQuerySize, then pull out the right component.
4651 // Assume 2D image for now.
4652 //
4653 // Ops[0] = Image ID
4654 //
4655 // %sizes = OpImageQuerySizes %uint2 %im
4656 // %result = OpCompositeExtract %uint %sizes 0-or-1
4657 SPIRVOperandList Ops;
4658
4659 // Implement:
4660 // %sizes = OpImageQuerySizes %uint2 %im
4661 uint32_t SizesTypeID =
4662 TypeMap[VectorType::get(Type::getInt32Ty(Context), 2)];
David Neto5c22a252018-03-15 16:07:41 -04004663 Value *Image = Call->getArgOperand(0);
4664 uint32_t ImageID = VMap[Image];
David Neto257c3892018-04-11 13:19:45 -04004665 Ops << MkId(SizesTypeID) << MkId(ImageID);
David Neto5c22a252018-03-15 16:07:41 -04004666
4667 uint32_t SizesID = nextID++;
David Neto87846742018-04-11 17:36:22 -04004668 auto *QueryInst =
4669 new SPIRVInstruction(spv::OpImageQuerySize, SizesID, Ops);
David Neto5c22a252018-03-15 16:07:41 -04004670 SPIRVInstList.push_back(QueryInst);
4671
4672 // Reset value map entry since we generated an intermediate instruction.
4673 VMap[&I] = nextID;
4674
4675 // Implement:
4676 // %result = OpCompositeExtract %uint %sizes 0-or-1
4677 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004678 Ops << MkId(TypeMap[I.getType()]) << MkId(SizesID);
David Neto5c22a252018-03-15 16:07:41 -04004679
4680 uint32_t component = Callee->getName().contains("height") ? 1 : 0;
David Neto257c3892018-04-11 13:19:45 -04004681 Ops << MkNum(component);
David Neto5c22a252018-03-15 16:07:41 -04004682
David Neto87846742018-04-11 17:36:22 -04004683 auto *Inst = new SPIRVInstruction(spv::OpCompositeExtract, nextID++, Ops);
David Neto5c22a252018-03-15 16:07:41 -04004684 SPIRVInstList.push_back(Inst);
4685 break;
4686 }
4687
David Neto22f144c2017-06-12 14:26:21 -04004688 // Call instrucion is deferred because it needs function's ID. Record
4689 // slot's location on SPIRVInstructionList.
4690 DeferredInsts.push_back(
4691 std::make_tuple(&I, --SPIRVInstList.end(), nextID++));
4692
David Neto3fbb4072017-10-16 11:28:14 -04004693 // Check whether the implementation of this call uses an extended
4694 // instruction plus one more value-producing instruction. If so, then
4695 // reserve the id for the extra value-producing slot.
4696 glsl::ExtInst EInst = getIndirectExtInstEnum(Callee->getName());
4697 if (EInst != kGlslExtInstBad) {
4698 // Reserve a spot for the extra value.
David Neto4d02a532017-09-17 12:57:44 -04004699 // Increase nextID.
David Neto22f144c2017-06-12 14:26:21 -04004700 VMap[&I] = nextID;
4701 nextID++;
4702 }
4703 break;
4704 }
4705 case Instruction::Ret: {
4706 unsigned NumOps = I.getNumOperands();
4707 if (NumOps == 0) {
4708 //
4709 // Generate OpReturn.
4710 //
David Neto87846742018-04-11 17:36:22 -04004711 SPIRVInstList.push_back(new SPIRVInstruction(spv::OpReturn, {}));
David Neto22f144c2017-06-12 14:26:21 -04004712 } else {
4713 //
4714 // Generate OpReturnValue.
4715 //
4716
4717 // Ops[0] = Return Value ID
4718 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04004719
4720 Ops << MkId(VMap[I.getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04004721
David Neto87846742018-04-11 17:36:22 -04004722 auto *Inst = new SPIRVInstruction(spv::OpReturnValue, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004723 SPIRVInstList.push_back(Inst);
4724 break;
4725 }
4726 break;
4727 }
4728 }
4729}
4730
4731void SPIRVProducerPass::GenerateFuncEpilogue() {
4732 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
4733
4734 //
4735 // Generate OpFunctionEnd
4736 //
4737
David Neto87846742018-04-11 17:36:22 -04004738 auto *Inst = new SPIRVInstruction(spv::OpFunctionEnd, {});
David Neto22f144c2017-06-12 14:26:21 -04004739 SPIRVInstList.push_back(Inst);
4740}
4741
4742bool SPIRVProducerPass::is4xi8vec(Type *Ty) const {
alan-bakerb39c8262019-03-08 14:03:37 -05004743 // Don't specialize <4 x i8> if i8 is generally supported.
4744 if (clspv::Option::Int8Support())
4745 return false;
4746
David Neto22f144c2017-06-12 14:26:21 -04004747 LLVMContext &Context = Ty->getContext();
4748 if (Ty->isVectorTy()) {
4749 if (Ty->getVectorElementType() == Type::getInt8Ty(Context) &&
4750 Ty->getVectorNumElements() == 4) {
4751 return true;
4752 }
4753 }
4754
4755 return false;
4756}
4757
4758void SPIRVProducerPass::HandleDeferredInstruction() {
4759 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
4760 ValueMapType &VMap = getValueMap();
4761 DeferredInstVecType &DeferredInsts = getDeferredInstVec();
4762
4763 for (auto DeferredInst = DeferredInsts.rbegin();
4764 DeferredInst != DeferredInsts.rend(); ++DeferredInst) {
4765 Value *Inst = std::get<0>(*DeferredInst);
4766 SPIRVInstructionList::iterator InsertPoint = ++std::get<1>(*DeferredInst);
4767 if (InsertPoint != SPIRVInstList.end()) {
4768 while ((*InsertPoint)->getOpcode() == spv::OpPhi) {
4769 ++InsertPoint;
4770 }
4771 }
4772
4773 if (BranchInst *Br = dyn_cast<BranchInst>(Inst)) {
alan-baker06cad652019-12-03 17:56:47 -05004774 // Check whether this branch needs to be preceeded by merge instruction.
David Neto22f144c2017-06-12 14:26:21 -04004775 BasicBlock *BrBB = Br->getParent();
alan-baker06cad652019-12-03 17:56:47 -05004776 if (ContinueBlocks.count(BrBB)) {
David Neto22f144c2017-06-12 14:26:21 -04004777 //
4778 // Generate OpLoopMerge.
4779 //
4780 // Ops[0] = Merge Block ID
4781 // Ops[1] = Continue Target ID
4782 // Ops[2] = Selection Control
4783 SPIRVOperandList Ops;
4784
alan-baker06cad652019-12-03 17:56:47 -05004785 auto MergeBB = MergeBlocks[BrBB];
4786 auto ContinueBB = ContinueBlocks[BrBB];
David Neto22f144c2017-06-12 14:26:21 -04004787 uint32_t MergeBBID = VMap[MergeBB];
David Neto22f144c2017-06-12 14:26:21 -04004788 uint32_t ContinueBBID = VMap[ContinueBB];
David Neto257c3892018-04-11 13:19:45 -04004789 Ops << MkId(MergeBBID) << MkId(ContinueBBID)
alan-baker06cad652019-12-03 17:56:47 -05004790 << MkNum(spv::LoopControlMaskNone);
David Neto22f144c2017-06-12 14:26:21 -04004791
David Neto87846742018-04-11 17:36:22 -04004792 auto *MergeInst = new SPIRVInstruction(spv::OpLoopMerge, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004793 SPIRVInstList.insert(InsertPoint, MergeInst);
alan-baker06cad652019-12-03 17:56:47 -05004794 } else if (MergeBlocks.count(BrBB)) {
4795 //
4796 // Generate OpSelectionMerge.
4797 //
4798 // Ops[0] = Merge Block ID
4799 // Ops[1] = Selection Control
4800 SPIRVOperandList Ops;
David Neto22f144c2017-06-12 14:26:21 -04004801
alan-baker06cad652019-12-03 17:56:47 -05004802 auto MergeBB = MergeBlocks[BrBB];
4803 uint32_t MergeBBID = VMap[MergeBB];
4804 Ops << MkId(MergeBBID) << MkNum(spv::SelectionControlMaskNone);
David Neto22f144c2017-06-12 14:26:21 -04004805
alan-baker06cad652019-12-03 17:56:47 -05004806 auto *MergeInst = new SPIRVInstruction(spv::OpSelectionMerge, Ops);
4807 SPIRVInstList.insert(InsertPoint, MergeInst);
David Neto22f144c2017-06-12 14:26:21 -04004808 }
4809
4810 if (Br->isConditional()) {
4811 //
4812 // Generate OpBranchConditional.
4813 //
4814 // Ops[0] = Condition ID
4815 // Ops[1] = True Label ID
4816 // Ops[2] = False Label ID
4817 // Ops[3] ... Ops[n] = Branch weights (Literal Number)
4818 SPIRVOperandList Ops;
4819
4820 uint32_t CondID = VMap[Br->getCondition()];
David Neto22f144c2017-06-12 14:26:21 -04004821 uint32_t TrueBBID = VMap[Br->getSuccessor(0)];
David Neto22f144c2017-06-12 14:26:21 -04004822 uint32_t FalseBBID = VMap[Br->getSuccessor(1)];
David Neto257c3892018-04-11 13:19:45 -04004823
4824 Ops << MkId(CondID) << MkId(TrueBBID) << MkId(FalseBBID);
David Neto22f144c2017-06-12 14:26:21 -04004825
David Neto87846742018-04-11 17:36:22 -04004826 auto *BrInst = new SPIRVInstruction(spv::OpBranchConditional, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004827 SPIRVInstList.insert(InsertPoint, BrInst);
4828 } else {
4829 //
4830 // Generate OpBranch.
4831 //
4832 // Ops[0] = Target Label ID
4833 SPIRVOperandList Ops;
4834
4835 uint32_t TargetID = VMap[Br->getSuccessor(0)];
David Neto257c3892018-04-11 13:19:45 -04004836 Ops << MkId(TargetID);
David Neto22f144c2017-06-12 14:26:21 -04004837
David Neto87846742018-04-11 17:36:22 -04004838 SPIRVInstList.insert(InsertPoint,
4839 new SPIRVInstruction(spv::OpBranch, Ops));
David Neto22f144c2017-06-12 14:26:21 -04004840 }
4841 } else if (PHINode *PHI = dyn_cast<PHINode>(Inst)) {
alan-baker5b86ed72019-02-15 08:26:50 -05004842 if (PHI->getType()->isPointerTy()) {
4843 // OpPhi on pointers requires variable pointers.
4844 setVariablePointersCapabilities(
4845 PHI->getType()->getPointerAddressSpace());
4846 if (!hasVariablePointers() && !selectFromSameObject(PHI)) {
4847 setVariablePointers(true);
4848 }
4849 }
4850
David Neto22f144c2017-06-12 14:26:21 -04004851 //
4852 // Generate OpPhi.
4853 //
4854 // Ops[0] = Result Type ID
4855 // Ops[1] ... Ops[n] = (Variable ID, Parent ID) pairs
4856 SPIRVOperandList Ops;
4857
David Neto257c3892018-04-11 13:19:45 -04004858 Ops << MkId(lookupType(PHI->getType()));
David Neto22f144c2017-06-12 14:26:21 -04004859
David Neto22f144c2017-06-12 14:26:21 -04004860 for (unsigned i = 0; i < PHI->getNumIncomingValues(); i++) {
4861 uint32_t VarID = VMap[PHI->getIncomingValue(i)];
David Neto22f144c2017-06-12 14:26:21 -04004862 uint32_t ParentID = VMap[PHI->getIncomingBlock(i)];
David Neto257c3892018-04-11 13:19:45 -04004863 Ops << MkId(VarID) << MkId(ParentID);
David Neto22f144c2017-06-12 14:26:21 -04004864 }
4865
4866 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04004867 InsertPoint,
4868 new SPIRVInstruction(spv::OpPhi, std::get<2>(*DeferredInst), Ops));
David Neto22f144c2017-06-12 14:26:21 -04004869 } else if (CallInst *Call = dyn_cast<CallInst>(Inst)) {
4870 Function *Callee = Call->getCalledFunction();
David Neto3fbb4072017-10-16 11:28:14 -04004871 auto callee_name = Callee->getName();
4872 glsl::ExtInst EInst = getDirectOrIndirectExtInstEnum(callee_name);
David Neto22f144c2017-06-12 14:26:21 -04004873
4874 if (EInst) {
4875 uint32_t &ExtInstImportID = getOpExtInstImportID();
4876
4877 //
4878 // Generate OpExtInst.
4879 //
4880
4881 // Ops[0] = Result Type ID
4882 // Ops[1] = Set ID (OpExtInstImport ID)
4883 // Ops[2] = Instruction Number (Literal Number)
4884 // Ops[3] ... Ops[n] = Operand 1, ... , Operand n
4885 SPIRVOperandList Ops;
4886
David Neto862b7d82018-06-14 18:48:37 -04004887 Ops << MkId(lookupType(Call->getType())) << MkId(ExtInstImportID)
4888 << MkNum(EInst);
David Neto22f144c2017-06-12 14:26:21 -04004889
David Neto22f144c2017-06-12 14:26:21 -04004890 FunctionType *CalleeFTy = cast<FunctionType>(Call->getFunctionType());
4891 for (unsigned i = 0; i < CalleeFTy->getNumParams(); i++) {
David Neto257c3892018-04-11 13:19:45 -04004892 Ops << MkId(VMap[Call->getOperand(i)]);
David Neto22f144c2017-06-12 14:26:21 -04004893 }
4894
David Neto87846742018-04-11 17:36:22 -04004895 auto *ExtInst = new SPIRVInstruction(spv::OpExtInst,
4896 std::get<2>(*DeferredInst), Ops);
David Neto22f144c2017-06-12 14:26:21 -04004897 SPIRVInstList.insert(InsertPoint, ExtInst);
4898
David Neto3fbb4072017-10-16 11:28:14 -04004899 const auto IndirectExtInst = getIndirectExtInstEnum(callee_name);
4900 if (IndirectExtInst != kGlslExtInstBad) {
4901 // Generate one more instruction that uses the result of the extended
4902 // instruction. Its result id is one more than the id of the
4903 // extended instruction.
David Neto22f144c2017-06-12 14:26:21 -04004904 LLVMContext &Context =
4905 Call->getParent()->getParent()->getParent()->getContext();
David Neto22f144c2017-06-12 14:26:21 -04004906
David Neto3fbb4072017-10-16 11:28:14 -04004907 auto generate_extra_inst = [this, &Context, &Call, &DeferredInst,
4908 &VMap, &SPIRVInstList, &InsertPoint](
4909 spv::Op opcode, Constant *constant) {
4910 //
4911 // Generate instruction like:
4912 // result = opcode constant <extinst-result>
4913 //
4914 // Ops[0] = Result Type ID
4915 // Ops[1] = Operand 0 ;; the constant, suitably splatted
4916 // Ops[2] = Operand 1 ;; the result of the extended instruction
4917 SPIRVOperandList Ops;
David Neto22f144c2017-06-12 14:26:21 -04004918
David Neto3fbb4072017-10-16 11:28:14 -04004919 Type *resultTy = Call->getType();
David Neto257c3892018-04-11 13:19:45 -04004920 Ops << MkId(lookupType(resultTy));
David Neto3fbb4072017-10-16 11:28:14 -04004921
4922 if (auto *vectorTy = dyn_cast<VectorType>(resultTy)) {
4923 constant = ConstantVector::getSplat(
4924 static_cast<unsigned>(vectorTy->getNumElements()), constant);
4925 }
David Neto257c3892018-04-11 13:19:45 -04004926 Ops << MkId(VMap[constant]) << MkId(std::get<2>(*DeferredInst));
David Neto3fbb4072017-10-16 11:28:14 -04004927
4928 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04004929 InsertPoint, new SPIRVInstruction(
4930 opcode, std::get<2>(*DeferredInst) + 1, Ops));
David Neto3fbb4072017-10-16 11:28:14 -04004931 };
4932
4933 switch (IndirectExtInst) {
4934 case glsl::ExtInstFindUMsb: // Implementing clz
4935 generate_extra_inst(
4936 spv::OpISub, ConstantInt::get(Type::getInt32Ty(Context), 31));
4937 break;
4938 case glsl::ExtInstAcos: // Implementing acospi
4939 case glsl::ExtInstAsin: // Implementing asinpi
Kévin Petiteb9f90a2018-09-29 12:29:34 +01004940 case glsl::ExtInstAtan: // Implementing atanpi
David Neto3fbb4072017-10-16 11:28:14 -04004941 case glsl::ExtInstAtan2: // Implementing atan2pi
4942 generate_extra_inst(
4943 spv::OpFMul,
4944 ConstantFP::get(Type::getFloatTy(Context), kOneOverPi));
4945 break;
4946
4947 default:
4948 assert(false && "internally inconsistent");
David Neto4d02a532017-09-17 12:57:44 -04004949 }
David Neto22f144c2017-06-12 14:26:21 -04004950 }
David Neto3fbb4072017-10-16 11:28:14 -04004951
alan-bakerb39c8262019-03-08 14:03:37 -05004952 } else if (callee_name.startswith("_Z8popcount")) {
David Neto22f144c2017-06-12 14:26:21 -04004953 //
4954 // Generate OpBitCount
4955 //
4956 // Ops[0] = Result Type ID
4957 // Ops[1] = Base ID
David Neto257c3892018-04-11 13:19:45 -04004958 SPIRVOperandList Ops;
4959 Ops << MkId(lookupType(Call->getType()))
4960 << MkId(VMap[Call->getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04004961
4962 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04004963 InsertPoint, new SPIRVInstruction(spv::OpBitCount,
David Neto22f144c2017-06-12 14:26:21 -04004964 std::get<2>(*DeferredInst), Ops));
David Netoab03f432017-11-03 17:00:44 -04004965
David Neto862b7d82018-06-14 18:48:37 -04004966 } else if (callee_name.startswith(kCompositeConstructFunctionPrefix)) {
David Netoab03f432017-11-03 17:00:44 -04004967
4968 // Generate an OpCompositeConstruct
4969 SPIRVOperandList Ops;
4970
4971 // The result type.
David Neto257c3892018-04-11 13:19:45 -04004972 Ops << MkId(lookupType(Call->getType()));
David Netoab03f432017-11-03 17:00:44 -04004973
4974 for (Use &use : Call->arg_operands()) {
David Neto257c3892018-04-11 13:19:45 -04004975 Ops << MkId(VMap[use.get()]);
David Netoab03f432017-11-03 17:00:44 -04004976 }
4977
4978 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04004979 InsertPoint, new SPIRVInstruction(spv::OpCompositeConstruct,
4980 std::get<2>(*DeferredInst), Ops));
David Netoab03f432017-11-03 17:00:44 -04004981
Alan Baker202c8c72018-08-13 13:47:44 -04004982 } else if (callee_name.startswith(clspv::ResourceAccessorFunction())) {
4983
4984 // We have already mapped the call's result value to an ID.
4985 // Don't generate any code now.
4986
4987 } else if (callee_name.startswith(clspv::WorkgroupAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04004988
4989 // We have already mapped the call's result value to an ID.
4990 // Don't generate any code now.
4991
David Neto22f144c2017-06-12 14:26:21 -04004992 } else {
alan-baker5b86ed72019-02-15 08:26:50 -05004993 if (Call->getType()->isPointerTy()) {
4994 // Functions returning pointers require variable pointers.
4995 setVariablePointersCapabilities(
4996 Call->getType()->getPointerAddressSpace());
4997 }
4998
David Neto22f144c2017-06-12 14:26:21 -04004999 //
5000 // Generate OpFunctionCall.
5001 //
5002
5003 // Ops[0] = Result Type ID
5004 // Ops[1] = Callee Function ID
5005 // Ops[2] ... Ops[n] = Argument 0, ... , Argument n
5006 SPIRVOperandList Ops;
5007
David Neto862b7d82018-06-14 18:48:37 -04005008 Ops << MkId(lookupType(Call->getType()));
David Neto22f144c2017-06-12 14:26:21 -04005009
5010 uint32_t CalleeID = VMap[Callee];
David Neto43568eb2017-10-13 18:25:25 -04005011 if (CalleeID == 0) {
5012 errs() << "Can't translate function call. Missing builtin? "
David Neto862b7d82018-06-14 18:48:37 -04005013 << callee_name << " in: " << *Call << "\n";
David Neto43568eb2017-10-13 18:25:25 -04005014 // TODO(dneto): Can we error out? Enabling this llvm_unreachable
5015 // causes an infinite loop. Instead, go ahead and generate
5016 // the bad function call. A validator will catch the 0-Id.
5017 // llvm_unreachable("Can't translate function call");
5018 }
David Neto22f144c2017-06-12 14:26:21 -04005019
David Neto257c3892018-04-11 13:19:45 -04005020 Ops << MkId(CalleeID);
David Neto22f144c2017-06-12 14:26:21 -04005021
David Neto22f144c2017-06-12 14:26:21 -04005022 FunctionType *CalleeFTy = cast<FunctionType>(Call->getFunctionType());
5023 for (unsigned i = 0; i < CalleeFTy->getNumParams(); i++) {
alan-baker5b86ed72019-02-15 08:26:50 -05005024 auto *operand = Call->getOperand(i);
alan-bakerd4d50652019-12-03 17:17:15 -05005025 auto *operand_type = operand->getType();
5026 // Images and samplers can be passed as function parameters without
5027 // variable pointers.
5028 if (operand_type->isPointerTy() && !IsImageType(operand_type) &&
5029 !IsSamplerType(operand_type)) {
alan-baker5b86ed72019-02-15 08:26:50 -05005030 auto sc =
5031 GetStorageClass(operand->getType()->getPointerAddressSpace());
5032 if (sc == spv::StorageClassStorageBuffer) {
5033 // Passing SSBO by reference requires variable pointers storage
5034 // buffer.
5035 setVariablePointersStorageBuffer(true);
5036 } else if (sc == spv::StorageClassWorkgroup) {
5037 // Workgroup references require variable pointers if they are not
5038 // memory object declarations.
5039 if (auto *operand_call = dyn_cast<CallInst>(operand)) {
5040 // Workgroup accessor represents a variable reference.
5041 if (!operand_call->getCalledFunction()->getName().startswith(
5042 clspv::WorkgroupAccessorFunction()))
5043 setVariablePointers(true);
5044 } else {
5045 // Arguments are function parameters.
5046 if (!isa<Argument>(operand))
5047 setVariablePointers(true);
5048 }
5049 }
5050 }
5051 Ops << MkId(VMap[operand]);
David Neto22f144c2017-06-12 14:26:21 -04005052 }
5053
David Neto87846742018-04-11 17:36:22 -04005054 auto *CallInst = new SPIRVInstruction(spv::OpFunctionCall,
5055 std::get<2>(*DeferredInst), Ops);
David Neto22f144c2017-06-12 14:26:21 -04005056 SPIRVInstList.insert(InsertPoint, CallInst);
5057 }
5058 }
5059 }
5060}
5061
David Neto1a1a0582017-07-07 12:01:44 -04005062void SPIRVProducerPass::HandleDeferredDecorations(const DataLayout &DL) {
Alan Baker202c8c72018-08-13 13:47:44 -04005063 if (getTypesNeedingArrayStride().empty() && LocalArgSpecIds.empty()) {
David Neto1a1a0582017-07-07 12:01:44 -04005064 return;
David Netoc6f3ab22018-04-06 18:02:31 -04005065 }
David Neto1a1a0582017-07-07 12:01:44 -04005066
5067 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
David Neto1a1a0582017-07-07 12:01:44 -04005068
5069 // Find an iterator pointing just past the last decoration.
5070 bool seen_decorations = false;
5071 auto DecoInsertPoint =
5072 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
5073 [&seen_decorations](SPIRVInstruction *Inst) -> bool {
5074 const bool is_decoration =
5075 Inst->getOpcode() == spv::OpDecorate ||
5076 Inst->getOpcode() == spv::OpMemberDecorate;
5077 if (is_decoration) {
5078 seen_decorations = true;
5079 return false;
5080 } else {
5081 return seen_decorations;
5082 }
5083 });
5084
David Netoc6f3ab22018-04-06 18:02:31 -04005085 // Insert ArrayStride decorations on pointer types, due to OpPtrAccessChain
5086 // instructions we generated earlier.
David Neto85082642018-03-24 06:55:20 -07005087 for (auto *type : getTypesNeedingArrayStride()) {
5088 Type *elemTy = nullptr;
5089 if (auto *ptrTy = dyn_cast<PointerType>(type)) {
5090 elemTy = ptrTy->getElementType();
alan-bakerb6b09dc2018-11-08 16:59:28 -05005091 } else if (auto *arrayTy = dyn_cast<ArrayType>(type)) {
David Neto85082642018-03-24 06:55:20 -07005092 elemTy = arrayTy->getArrayElementType();
alan-bakerb6b09dc2018-11-08 16:59:28 -05005093 } else if (auto *seqTy = dyn_cast<SequentialType>(type)) {
David Neto85082642018-03-24 06:55:20 -07005094 elemTy = seqTy->getSequentialElementType();
5095 } else {
5096 errs() << "Unhandled strided type " << *type << "\n";
5097 llvm_unreachable("Unhandled strided type");
5098 }
David Neto1a1a0582017-07-07 12:01:44 -04005099
5100 // Ops[0] = Target ID
5101 // Ops[1] = Decoration (ArrayStride)
5102 // Ops[2] = Stride number (Literal Number)
5103 SPIRVOperandList Ops;
5104
David Neto85082642018-03-24 06:55:20 -07005105 // Same as DL.getIndexedOffsetInType( elemTy, { 1 } );
Alan Bakerfcda9482018-10-02 17:09:59 -04005106 const uint32_t stride = static_cast<uint32_t>(GetTypeAllocSize(elemTy, DL));
David Neto257c3892018-04-11 13:19:45 -04005107
5108 Ops << MkId(lookupType(type)) << MkNum(spv::DecorationArrayStride)
5109 << MkNum(stride);
David Neto1a1a0582017-07-07 12:01:44 -04005110
David Neto87846742018-04-11 17:36:22 -04005111 auto *DecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
David Neto1a1a0582017-07-07 12:01:44 -04005112 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
5113 }
David Netoc6f3ab22018-04-06 18:02:31 -04005114
5115 // Emit SpecId decorations targeting the array size value.
Alan Baker202c8c72018-08-13 13:47:44 -04005116 for (auto spec_id = clspv::FirstLocalSpecId(); spec_id < max_local_spec_id_;
5117 ++spec_id) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05005118 LocalArgInfo &arg_info = LocalSpecIdInfoMap[spec_id];
David Netoc6f3ab22018-04-06 18:02:31 -04005119 SPIRVOperandList Ops;
5120 Ops << MkId(arg_info.array_size_id) << MkNum(spv::DecorationSpecId)
5121 << MkNum(arg_info.spec_id);
5122 SPIRVInstList.insert(DecoInsertPoint,
David Neto87846742018-04-11 17:36:22 -04005123 new SPIRVInstruction(spv::OpDecorate, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04005124 }
David Neto1a1a0582017-07-07 12:01:44 -04005125}
5126
David Neto22f144c2017-06-12 14:26:21 -04005127glsl::ExtInst SPIRVProducerPass::getExtInstEnum(StringRef Name) {
5128 return StringSwitch<glsl::ExtInst>(Name)
alan-bakerb39c8262019-03-08 14:03:37 -05005129 .Case("_Z3absc", glsl::ExtInst::ExtInstSAbs)
5130 .Case("_Z3absDv2_c", glsl::ExtInst::ExtInstSAbs)
5131 .Case("_Z3absDv3_c", glsl::ExtInst::ExtInstSAbs)
5132 .Case("_Z3absDv4_c", glsl::ExtInst::ExtInstSAbs)
Kévin Petit2444e9b2018-11-09 14:14:37 +00005133 .Case("_Z3abss", glsl::ExtInst::ExtInstSAbs)
5134 .Case("_Z3absDv2_s", glsl::ExtInst::ExtInstSAbs)
5135 .Case("_Z3absDv3_s", glsl::ExtInst::ExtInstSAbs)
5136 .Case("_Z3absDv4_s", glsl::ExtInst::ExtInstSAbs)
David Neto22f144c2017-06-12 14:26:21 -04005137 .Case("_Z3absi", glsl::ExtInst::ExtInstSAbs)
5138 .Case("_Z3absDv2_i", glsl::ExtInst::ExtInstSAbs)
5139 .Case("_Z3absDv3_i", glsl::ExtInst::ExtInstSAbs)
5140 .Case("_Z3absDv4_i", glsl::ExtInst::ExtInstSAbs)
Kévin Petit2444e9b2018-11-09 14:14:37 +00005141 .Case("_Z3absl", glsl::ExtInst::ExtInstSAbs)
5142 .Case("_Z3absDv2_l", glsl::ExtInst::ExtInstSAbs)
5143 .Case("_Z3absDv3_l", glsl::ExtInst::ExtInstSAbs)
5144 .Case("_Z3absDv4_l", glsl::ExtInst::ExtInstSAbs)
alan-bakerb39c8262019-03-08 14:03:37 -05005145 .Case("_Z5clampccc", glsl::ExtInst::ExtInstSClamp)
5146 .Case("_Z5clampDv2_cS_S_", glsl::ExtInst::ExtInstSClamp)
5147 .Case("_Z5clampDv3_cS_S_", glsl::ExtInst::ExtInstSClamp)
5148 .Case("_Z5clampDv4_cS_S_", glsl::ExtInst::ExtInstSClamp)
5149 .Case("_Z5clamphhh", glsl::ExtInst::ExtInstUClamp)
5150 .Case("_Z5clampDv2_hS_S_", glsl::ExtInst::ExtInstUClamp)
5151 .Case("_Z5clampDv3_hS_S_", glsl::ExtInst::ExtInstUClamp)
5152 .Case("_Z5clampDv4_hS_S_", glsl::ExtInst::ExtInstUClamp)
Kévin Petit495255d2019-03-06 13:56:48 +00005153 .Case("_Z5clampsss", glsl::ExtInst::ExtInstSClamp)
5154 .Case("_Z5clampDv2_sS_S_", glsl::ExtInst::ExtInstSClamp)
5155 .Case("_Z5clampDv3_sS_S_", glsl::ExtInst::ExtInstSClamp)
5156 .Case("_Z5clampDv4_sS_S_", glsl::ExtInst::ExtInstSClamp)
5157 .Case("_Z5clampttt", glsl::ExtInst::ExtInstUClamp)
5158 .Case("_Z5clampDv2_tS_S_", glsl::ExtInst::ExtInstUClamp)
5159 .Case("_Z5clampDv3_tS_S_", glsl::ExtInst::ExtInstUClamp)
5160 .Case("_Z5clampDv4_tS_S_", glsl::ExtInst::ExtInstUClamp)
David Neto22f144c2017-06-12 14:26:21 -04005161 .Case("_Z5clampiii", glsl::ExtInst::ExtInstSClamp)
5162 .Case("_Z5clampDv2_iS_S_", glsl::ExtInst::ExtInstSClamp)
5163 .Case("_Z5clampDv3_iS_S_", glsl::ExtInst::ExtInstSClamp)
5164 .Case("_Z5clampDv4_iS_S_", glsl::ExtInst::ExtInstSClamp)
5165 .Case("_Z5clampjjj", glsl::ExtInst::ExtInstUClamp)
5166 .Case("_Z5clampDv2_jS_S_", glsl::ExtInst::ExtInstUClamp)
5167 .Case("_Z5clampDv3_jS_S_", glsl::ExtInst::ExtInstUClamp)
5168 .Case("_Z5clampDv4_jS_S_", glsl::ExtInst::ExtInstUClamp)
Kévin Petit495255d2019-03-06 13:56:48 +00005169 .Case("_Z5clamplll", glsl::ExtInst::ExtInstSClamp)
5170 .Case("_Z5clampDv2_lS_S_", glsl::ExtInst::ExtInstSClamp)
5171 .Case("_Z5clampDv3_lS_S_", glsl::ExtInst::ExtInstSClamp)
5172 .Case("_Z5clampDv4_lS_S_", glsl::ExtInst::ExtInstSClamp)
5173 .Case("_Z5clampmmm", glsl::ExtInst::ExtInstUClamp)
5174 .Case("_Z5clampDv2_mS_S_", glsl::ExtInst::ExtInstUClamp)
5175 .Case("_Z5clampDv3_mS_S_", glsl::ExtInst::ExtInstUClamp)
5176 .Case("_Z5clampDv4_mS_S_", glsl::ExtInst::ExtInstUClamp)
David Neto22f144c2017-06-12 14:26:21 -04005177 .Case("_Z5clampfff", glsl::ExtInst::ExtInstFClamp)
5178 .Case("_Z5clampDv2_fS_S_", glsl::ExtInst::ExtInstFClamp)
5179 .Case("_Z5clampDv3_fS_S_", glsl::ExtInst::ExtInstFClamp)
5180 .Case("_Z5clampDv4_fS_S_", glsl::ExtInst::ExtInstFClamp)
alan-bakerb39c8262019-03-08 14:03:37 -05005181 .Case("_Z3maxcc", glsl::ExtInst::ExtInstSMax)
5182 .Case("_Z3maxDv2_cS_", glsl::ExtInst::ExtInstSMax)
5183 .Case("_Z3maxDv3_cS_", glsl::ExtInst::ExtInstSMax)
5184 .Case("_Z3maxDv4_cS_", glsl::ExtInst::ExtInstSMax)
5185 .Case("_Z3maxhh", glsl::ExtInst::ExtInstUMax)
5186 .Case("_Z3maxDv2_hS_", glsl::ExtInst::ExtInstUMax)
5187 .Case("_Z3maxDv3_hS_", glsl::ExtInst::ExtInstUMax)
5188 .Case("_Z3maxDv4_hS_", glsl::ExtInst::ExtInstUMax)
Kévin Petit9e1971c2018-11-09 14:17:18 +00005189 .Case("_Z3maxss", glsl::ExtInst::ExtInstSMax)
5190 .Case("_Z3maxDv2_sS_", glsl::ExtInst::ExtInstSMax)
5191 .Case("_Z3maxDv3_sS_", glsl::ExtInst::ExtInstSMax)
5192 .Case("_Z3maxDv4_sS_", glsl::ExtInst::ExtInstSMax)
5193 .Case("_Z3maxtt", glsl::ExtInst::ExtInstUMax)
5194 .Case("_Z3maxDv2_tS_", glsl::ExtInst::ExtInstUMax)
5195 .Case("_Z3maxDv3_tS_", glsl::ExtInst::ExtInstUMax)
5196 .Case("_Z3maxDv4_tS_", glsl::ExtInst::ExtInstUMax)
David Neto22f144c2017-06-12 14:26:21 -04005197 .Case("_Z3maxii", glsl::ExtInst::ExtInstSMax)
5198 .Case("_Z3maxDv2_iS_", glsl::ExtInst::ExtInstSMax)
5199 .Case("_Z3maxDv3_iS_", glsl::ExtInst::ExtInstSMax)
5200 .Case("_Z3maxDv4_iS_", glsl::ExtInst::ExtInstSMax)
5201 .Case("_Z3maxjj", glsl::ExtInst::ExtInstUMax)
5202 .Case("_Z3maxDv2_jS_", glsl::ExtInst::ExtInstUMax)
5203 .Case("_Z3maxDv3_jS_", glsl::ExtInst::ExtInstUMax)
5204 .Case("_Z3maxDv4_jS_", glsl::ExtInst::ExtInstUMax)
Kévin Petit9e1971c2018-11-09 14:17:18 +00005205 .Case("_Z3maxll", glsl::ExtInst::ExtInstSMax)
5206 .Case("_Z3maxDv2_lS_", glsl::ExtInst::ExtInstSMax)
5207 .Case("_Z3maxDv3_lS_", glsl::ExtInst::ExtInstSMax)
5208 .Case("_Z3maxDv4_lS_", glsl::ExtInst::ExtInstSMax)
5209 .Case("_Z3maxmm", glsl::ExtInst::ExtInstUMax)
5210 .Case("_Z3maxDv2_mS_", glsl::ExtInst::ExtInstUMax)
5211 .Case("_Z3maxDv3_mS_", glsl::ExtInst::ExtInstUMax)
5212 .Case("_Z3maxDv4_mS_", glsl::ExtInst::ExtInstUMax)
David Neto22f144c2017-06-12 14:26:21 -04005213 .Case("_Z3maxff", glsl::ExtInst::ExtInstFMax)
5214 .Case("_Z3maxDv2_fS_", glsl::ExtInst::ExtInstFMax)
5215 .Case("_Z3maxDv3_fS_", glsl::ExtInst::ExtInstFMax)
5216 .Case("_Z3maxDv4_fS_", glsl::ExtInst::ExtInstFMax)
5217 .StartsWith("_Z4fmax", glsl::ExtInst::ExtInstFMax)
alan-bakerb39c8262019-03-08 14:03:37 -05005218 .Case("_Z3mincc", glsl::ExtInst::ExtInstSMin)
5219 .Case("_Z3minDv2_cS_", glsl::ExtInst::ExtInstSMin)
5220 .Case("_Z3minDv3_cS_", glsl::ExtInst::ExtInstSMin)
5221 .Case("_Z3minDv4_cS_", glsl::ExtInst::ExtInstSMin)
5222 .Case("_Z3minhh", glsl::ExtInst::ExtInstUMin)
5223 .Case("_Z3minDv2_hS_", glsl::ExtInst::ExtInstUMin)
5224 .Case("_Z3minDv3_hS_", glsl::ExtInst::ExtInstUMin)
5225 .Case("_Z3minDv4_hS_", glsl::ExtInst::ExtInstUMin)
Kévin Petit9e1971c2018-11-09 14:17:18 +00005226 .Case("_Z3minss", glsl::ExtInst::ExtInstSMin)
5227 .Case("_Z3minDv2_sS_", glsl::ExtInst::ExtInstSMin)
5228 .Case("_Z3minDv3_sS_", glsl::ExtInst::ExtInstSMin)
5229 .Case("_Z3minDv4_sS_", glsl::ExtInst::ExtInstSMin)
5230 .Case("_Z3mintt", glsl::ExtInst::ExtInstUMin)
5231 .Case("_Z3minDv2_tS_", glsl::ExtInst::ExtInstUMin)
5232 .Case("_Z3minDv3_tS_", glsl::ExtInst::ExtInstUMin)
5233 .Case("_Z3minDv4_tS_", glsl::ExtInst::ExtInstUMin)
David Neto22f144c2017-06-12 14:26:21 -04005234 .Case("_Z3minii", glsl::ExtInst::ExtInstSMin)
5235 .Case("_Z3minDv2_iS_", glsl::ExtInst::ExtInstSMin)
5236 .Case("_Z3minDv3_iS_", glsl::ExtInst::ExtInstSMin)
5237 .Case("_Z3minDv4_iS_", glsl::ExtInst::ExtInstSMin)
5238 .Case("_Z3minjj", glsl::ExtInst::ExtInstUMin)
5239 .Case("_Z3minDv2_jS_", glsl::ExtInst::ExtInstUMin)
5240 .Case("_Z3minDv3_jS_", glsl::ExtInst::ExtInstUMin)
5241 .Case("_Z3minDv4_jS_", glsl::ExtInst::ExtInstUMin)
Kévin Petit9e1971c2018-11-09 14:17:18 +00005242 .Case("_Z3minll", glsl::ExtInst::ExtInstSMin)
5243 .Case("_Z3minDv2_lS_", glsl::ExtInst::ExtInstSMin)
5244 .Case("_Z3minDv3_lS_", glsl::ExtInst::ExtInstSMin)
5245 .Case("_Z3minDv4_lS_", glsl::ExtInst::ExtInstSMin)
5246 .Case("_Z3minmm", glsl::ExtInst::ExtInstUMin)
5247 .Case("_Z3minDv2_mS_", glsl::ExtInst::ExtInstUMin)
5248 .Case("_Z3minDv3_mS_", glsl::ExtInst::ExtInstUMin)
5249 .Case("_Z3minDv4_mS_", glsl::ExtInst::ExtInstUMin)
David Neto22f144c2017-06-12 14:26:21 -04005250 .Case("_Z3minff", glsl::ExtInst::ExtInstFMin)
5251 .Case("_Z3minDv2_fS_", glsl::ExtInst::ExtInstFMin)
5252 .Case("_Z3minDv3_fS_", glsl::ExtInst::ExtInstFMin)
5253 .Case("_Z3minDv4_fS_", glsl::ExtInst::ExtInstFMin)
5254 .StartsWith("_Z4fmin", glsl::ExtInst::ExtInstFMin)
5255 .StartsWith("_Z7degrees", glsl::ExtInst::ExtInstDegrees)
5256 .StartsWith("_Z7radians", glsl::ExtInst::ExtInstRadians)
5257 .StartsWith("_Z3mix", glsl::ExtInst::ExtInstFMix)
5258 .StartsWith("_Z4acos", glsl::ExtInst::ExtInstAcos)
5259 .StartsWith("_Z5acosh", glsl::ExtInst::ExtInstAcosh)
5260 .StartsWith("_Z4asin", glsl::ExtInst::ExtInstAsin)
5261 .StartsWith("_Z5asinh", glsl::ExtInst::ExtInstAsinh)
5262 .StartsWith("_Z4atan", glsl::ExtInst::ExtInstAtan)
5263 .StartsWith("_Z5atan2", glsl::ExtInst::ExtInstAtan2)
5264 .StartsWith("_Z5atanh", glsl::ExtInst::ExtInstAtanh)
5265 .StartsWith("_Z4ceil", glsl::ExtInst::ExtInstCeil)
5266 .StartsWith("_Z3sin", glsl::ExtInst::ExtInstSin)
5267 .StartsWith("_Z4sinh", glsl::ExtInst::ExtInstSinh)
5268 .StartsWith("_Z8half_sin", glsl::ExtInst::ExtInstSin)
5269 .StartsWith("_Z10native_sin", glsl::ExtInst::ExtInstSin)
5270 .StartsWith("_Z3cos", glsl::ExtInst::ExtInstCos)
5271 .StartsWith("_Z4cosh", glsl::ExtInst::ExtInstCosh)
5272 .StartsWith("_Z8half_cos", glsl::ExtInst::ExtInstCos)
5273 .StartsWith("_Z10native_cos", glsl::ExtInst::ExtInstCos)
5274 .StartsWith("_Z3tan", glsl::ExtInst::ExtInstTan)
5275 .StartsWith("_Z4tanh", glsl::ExtInst::ExtInstTanh)
5276 .StartsWith("_Z8half_tan", glsl::ExtInst::ExtInstTan)
5277 .StartsWith("_Z10native_tan", glsl::ExtInst::ExtInstTan)
5278 .StartsWith("_Z3exp", glsl::ExtInst::ExtInstExp)
5279 .StartsWith("_Z8half_exp", glsl::ExtInst::ExtInstExp)
5280 .StartsWith("_Z10native_exp", glsl::ExtInst::ExtInstExp)
5281 .StartsWith("_Z4exp2", glsl::ExtInst::ExtInstExp2)
5282 .StartsWith("_Z9half_exp2", glsl::ExtInst::ExtInstExp2)
5283 .StartsWith("_Z11native_exp2", glsl::ExtInst::ExtInstExp2)
5284 .StartsWith("_Z3log", glsl::ExtInst::ExtInstLog)
5285 .StartsWith("_Z8half_log", glsl::ExtInst::ExtInstLog)
5286 .StartsWith("_Z10native_log", glsl::ExtInst::ExtInstLog)
5287 .StartsWith("_Z4log2", glsl::ExtInst::ExtInstLog2)
5288 .StartsWith("_Z9half_log2", glsl::ExtInst::ExtInstLog2)
5289 .StartsWith("_Z11native_log2", glsl::ExtInst::ExtInstLog2)
5290 .StartsWith("_Z4fabs", glsl::ExtInst::ExtInstFAbs)
kpet3458e942018-10-03 14:35:21 +01005291 .StartsWith("_Z3fma", glsl::ExtInst::ExtInstFma)
David Neto22f144c2017-06-12 14:26:21 -04005292 .StartsWith("_Z5floor", glsl::ExtInst::ExtInstFloor)
5293 .StartsWith("_Z5ldexp", glsl::ExtInst::ExtInstLdexp)
5294 .StartsWith("_Z3pow", glsl::ExtInst::ExtInstPow)
5295 .StartsWith("_Z4powr", glsl::ExtInst::ExtInstPow)
5296 .StartsWith("_Z9half_powr", glsl::ExtInst::ExtInstPow)
5297 .StartsWith("_Z11native_powr", glsl::ExtInst::ExtInstPow)
5298 .StartsWith("_Z5round", glsl::ExtInst::ExtInstRound)
5299 .StartsWith("_Z4sqrt", glsl::ExtInst::ExtInstSqrt)
5300 .StartsWith("_Z9half_sqrt", glsl::ExtInst::ExtInstSqrt)
5301 .StartsWith("_Z11native_sqrt", glsl::ExtInst::ExtInstSqrt)
5302 .StartsWith("_Z5rsqrt", glsl::ExtInst::ExtInstInverseSqrt)
5303 .StartsWith("_Z10half_rsqrt", glsl::ExtInst::ExtInstInverseSqrt)
5304 .StartsWith("_Z12native_rsqrt", glsl::ExtInst::ExtInstInverseSqrt)
5305 .StartsWith("_Z5trunc", glsl::ExtInst::ExtInstTrunc)
5306 .StartsWith("_Z5frexp", glsl::ExtInst::ExtInstFrexp)
5307 .StartsWith("_Z4sign", glsl::ExtInst::ExtInstFSign)
5308 .StartsWith("_Z6length", glsl::ExtInst::ExtInstLength)
Kévin Petit7d09cec2018-09-22 15:43:38 +01005309 .StartsWith("_Z11fast_length", glsl::ExtInst::ExtInstLength)
David Neto22f144c2017-06-12 14:26:21 -04005310 .StartsWith("_Z8distance", glsl::ExtInst::ExtInstDistance)
Kévin Petit7d09cec2018-09-22 15:43:38 +01005311 .StartsWith("_Z13fast_distance", glsl::ExtInst::ExtInstDistance)
David Netoe9a03512017-10-16 10:08:27 -04005312 .StartsWith("_Z4step", glsl::ExtInst::ExtInstStep)
kpet6fd2a262018-10-03 14:48:01 +01005313 .StartsWith("_Z10smoothstep", glsl::ExtInst::ExtInstSmoothStep)
David Neto22f144c2017-06-12 14:26:21 -04005314 .Case("_Z5crossDv3_fS_", glsl::ExtInst::ExtInstCross)
5315 .StartsWith("_Z9normalize", glsl::ExtInst::ExtInstNormalize)
Kévin Petit7d09cec2018-09-22 15:43:38 +01005316 .StartsWith("_Z14fast_normalize", glsl::ExtInst::ExtInstNormalize)
David Neto22f144c2017-06-12 14:26:21 -04005317 .StartsWith("llvm.fmuladd.", glsl::ExtInst::ExtInstFma)
5318 .Case("spirv.unpack.v2f16", glsl::ExtInst::ExtInstUnpackHalf2x16)
5319 .Case("spirv.pack.v2f16", glsl::ExtInst::ExtInstPackHalf2x16)
David Neto62653202017-10-16 19:05:18 -04005320 .Case("clspv.fract.f", glsl::ExtInst::ExtInstFract)
5321 .Case("clspv.fract.v2f", glsl::ExtInst::ExtInstFract)
5322 .Case("clspv.fract.v3f", glsl::ExtInst::ExtInstFract)
5323 .Case("clspv.fract.v4f", glsl::ExtInst::ExtInstFract)
David Neto3fbb4072017-10-16 11:28:14 -04005324 .Default(kGlslExtInstBad);
5325}
5326
5327glsl::ExtInst SPIRVProducerPass::getIndirectExtInstEnum(StringRef Name) {
5328 // Check indirect cases.
5329 return StringSwitch<glsl::ExtInst>(Name)
5330 .StartsWith("_Z3clz", glsl::ExtInst::ExtInstFindUMsb)
5331 // Use exact match on float arg because these need a multiply
5332 // of a constant of the right floating point type.
5333 .Case("_Z6acospif", glsl::ExtInst::ExtInstAcos)
5334 .Case("_Z6acospiDv2_f", glsl::ExtInst::ExtInstAcos)
5335 .Case("_Z6acospiDv3_f", glsl::ExtInst::ExtInstAcos)
5336 .Case("_Z6acospiDv4_f", glsl::ExtInst::ExtInstAcos)
5337 .Case("_Z6asinpif", glsl::ExtInst::ExtInstAsin)
5338 .Case("_Z6asinpiDv2_f", glsl::ExtInst::ExtInstAsin)
5339 .Case("_Z6asinpiDv3_f", glsl::ExtInst::ExtInstAsin)
5340 .Case("_Z6asinpiDv4_f", glsl::ExtInst::ExtInstAsin)
Kévin Petiteb9f90a2018-09-29 12:29:34 +01005341 .Case("_Z6atanpif", glsl::ExtInst::ExtInstAtan)
5342 .Case("_Z6atanpiDv2_f", glsl::ExtInst::ExtInstAtan)
5343 .Case("_Z6atanpiDv3_f", glsl::ExtInst::ExtInstAtan)
5344 .Case("_Z6atanpiDv4_f", glsl::ExtInst::ExtInstAtan)
David Neto3fbb4072017-10-16 11:28:14 -04005345 .Case("_Z7atan2piff", glsl::ExtInst::ExtInstAtan2)
5346 .Case("_Z7atan2piDv2_fS_", glsl::ExtInst::ExtInstAtan2)
5347 .Case("_Z7atan2piDv3_fS_", glsl::ExtInst::ExtInstAtan2)
5348 .Case("_Z7atan2piDv4_fS_", glsl::ExtInst::ExtInstAtan2)
5349 .Default(kGlslExtInstBad);
5350}
5351
alan-bakerb6b09dc2018-11-08 16:59:28 -05005352glsl::ExtInst
5353SPIRVProducerPass::getDirectOrIndirectExtInstEnum(StringRef Name) {
David Neto3fbb4072017-10-16 11:28:14 -04005354 auto direct = getExtInstEnum(Name);
5355 if (direct != kGlslExtInstBad)
5356 return direct;
5357 return getIndirectExtInstEnum(Name);
David Neto22f144c2017-06-12 14:26:21 -04005358}
5359
David Neto22f144c2017-06-12 14:26:21 -04005360void SPIRVProducerPass::WriteOneWord(uint32_t Word) {
David Neto0676e6f2017-07-11 18:47:44 -04005361 binaryOut->write(reinterpret_cast<const char *>(&Word), sizeof(uint32_t));
David Neto22f144c2017-06-12 14:26:21 -04005362}
5363
5364void SPIRVProducerPass::WriteResultID(SPIRVInstruction *Inst) {
5365 WriteOneWord(Inst->getResultID());
5366}
5367
5368void SPIRVProducerPass::WriteWordCountAndOpcode(SPIRVInstruction *Inst) {
5369 // High 16 bit : Word Count
5370 // Low 16 bit : Opcode
5371 uint32_t Word = Inst->getOpcode();
David Netoee2660d2018-06-28 16:31:29 -04005372 const uint32_t count = Inst->getWordCount();
5373 if (count > 65535) {
5374 errs() << "Word count limit of 65535 exceeded: " << count << "\n";
5375 llvm_unreachable("Word count too high");
5376 }
David Neto22f144c2017-06-12 14:26:21 -04005377 Word |= Inst->getWordCount() << 16;
5378 WriteOneWord(Word);
5379}
5380
5381void SPIRVProducerPass::WriteOperand(SPIRVOperand *Op) {
5382 SPIRVOperandType OpTy = Op->getType();
5383 switch (OpTy) {
5384 default: {
5385 llvm_unreachable("Unsupported SPIRV Operand Type???");
5386 break;
5387 }
5388 case SPIRVOperandType::NUMBERID: {
5389 WriteOneWord(Op->getNumID());
5390 break;
5391 }
5392 case SPIRVOperandType::LITERAL_STRING: {
5393 std::string Str = Op->getLiteralStr();
5394 const char *Data = Str.c_str();
5395 size_t WordSize = Str.size() / 4;
5396 for (unsigned Idx = 0; Idx < WordSize; Idx++) {
5397 WriteOneWord(*reinterpret_cast<const uint32_t *>(&Data[4 * Idx]));
5398 }
5399
5400 uint32_t Remainder = Str.size() % 4;
5401 uint32_t LastWord = 0;
5402 if (Remainder) {
5403 for (unsigned Idx = 0; Idx < Remainder; Idx++) {
5404 LastWord |= Data[4 * WordSize + Idx] << 8 * Idx;
5405 }
5406 }
5407
5408 WriteOneWord(LastWord);
5409 break;
5410 }
5411 case SPIRVOperandType::LITERAL_INTEGER:
5412 case SPIRVOperandType::LITERAL_FLOAT: {
5413 auto LiteralNum = Op->getLiteralNum();
5414 // TODO: Handle LiteranNum carefully.
5415 for (auto Word : LiteralNum) {
5416 WriteOneWord(Word);
5417 }
5418 break;
5419 }
5420 }
5421}
5422
5423void SPIRVProducerPass::WriteSPIRVBinary() {
5424 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
5425
5426 for (auto Inst : SPIRVInstList) {
David Netoc6f3ab22018-04-06 18:02:31 -04005427 SPIRVOperandList Ops{Inst->getOperands()};
David Neto22f144c2017-06-12 14:26:21 -04005428 spv::Op Opcode = static_cast<spv::Op>(Inst->getOpcode());
5429
5430 switch (Opcode) {
5431 default: {
David Neto5c22a252018-03-15 16:07:41 -04005432 errs() << "Unsupported SPIR-V instruction opcode " << int(Opcode) << "\n";
David Neto22f144c2017-06-12 14:26:21 -04005433 llvm_unreachable("Unsupported SPIRV instruction");
5434 break;
5435 }
5436 case spv::OpCapability:
5437 case spv::OpExtension:
5438 case spv::OpMemoryModel:
5439 case spv::OpEntryPoint:
5440 case spv::OpExecutionMode:
5441 case spv::OpSource:
5442 case spv::OpDecorate:
5443 case spv::OpMemberDecorate:
5444 case spv::OpBranch:
5445 case spv::OpBranchConditional:
5446 case spv::OpSelectionMerge:
5447 case spv::OpLoopMerge:
5448 case spv::OpStore:
5449 case spv::OpImageWrite:
5450 case spv::OpReturnValue:
5451 case spv::OpControlBarrier:
5452 case spv::OpMemoryBarrier:
5453 case spv::OpReturn:
5454 case spv::OpFunctionEnd:
5455 case spv::OpCopyMemory: {
5456 WriteWordCountAndOpcode(Inst);
5457 for (uint32_t i = 0; i < Ops.size(); i++) {
5458 WriteOperand(Ops[i]);
5459 }
5460 break;
5461 }
5462 case spv::OpTypeBool:
5463 case spv::OpTypeVoid:
5464 case spv::OpTypeSampler:
5465 case spv::OpLabel:
5466 case spv::OpExtInstImport:
5467 case spv::OpTypePointer:
5468 case spv::OpTypeRuntimeArray:
5469 case spv::OpTypeStruct:
5470 case spv::OpTypeImage:
5471 case spv::OpTypeSampledImage:
5472 case spv::OpTypeInt:
5473 case spv::OpTypeFloat:
5474 case spv::OpTypeArray:
5475 case spv::OpTypeVector:
5476 case spv::OpTypeFunction: {
5477 WriteWordCountAndOpcode(Inst);
5478 WriteResultID(Inst);
5479 for (uint32_t i = 0; i < Ops.size(); i++) {
5480 WriteOperand(Ops[i]);
5481 }
5482 break;
5483 }
5484 case spv::OpFunction:
5485 case spv::OpFunctionParameter:
5486 case spv::OpAccessChain:
5487 case spv::OpPtrAccessChain:
5488 case spv::OpInBoundsAccessChain:
5489 case spv::OpUConvert:
5490 case spv::OpSConvert:
5491 case spv::OpConvertFToU:
5492 case spv::OpConvertFToS:
5493 case spv::OpConvertUToF:
5494 case spv::OpConvertSToF:
5495 case spv::OpFConvert:
5496 case spv::OpConvertPtrToU:
5497 case spv::OpConvertUToPtr:
5498 case spv::OpBitcast:
alan-bakerc9c55ae2019-12-02 16:01:27 -05005499 case spv::OpFNegate:
David Neto22f144c2017-06-12 14:26:21 -04005500 case spv::OpIAdd:
5501 case spv::OpFAdd:
5502 case spv::OpISub:
5503 case spv::OpFSub:
5504 case spv::OpIMul:
5505 case spv::OpFMul:
5506 case spv::OpUDiv:
5507 case spv::OpSDiv:
5508 case spv::OpFDiv:
5509 case spv::OpUMod:
5510 case spv::OpSRem:
5511 case spv::OpFRem:
Kévin Petit8a560882019-03-21 15:24:34 +00005512 case spv::OpUMulExtended:
5513 case spv::OpSMulExtended:
David Neto22f144c2017-06-12 14:26:21 -04005514 case spv::OpBitwiseOr:
5515 case spv::OpBitwiseXor:
5516 case spv::OpBitwiseAnd:
David Netoa394f392017-08-26 20:45:29 -04005517 case spv::OpNot:
David Neto22f144c2017-06-12 14:26:21 -04005518 case spv::OpShiftLeftLogical:
5519 case spv::OpShiftRightLogical:
5520 case spv::OpShiftRightArithmetic:
5521 case spv::OpBitCount:
David Netoab03f432017-11-03 17:00:44 -04005522 case spv::OpCompositeConstruct:
David Neto22f144c2017-06-12 14:26:21 -04005523 case spv::OpCompositeExtract:
5524 case spv::OpVectorExtractDynamic:
5525 case spv::OpCompositeInsert:
David Neto0a2f98d2017-09-15 19:38:40 -04005526 case spv::OpCopyObject:
David Neto22f144c2017-06-12 14:26:21 -04005527 case spv::OpVectorInsertDynamic:
5528 case spv::OpVectorShuffle:
5529 case spv::OpIEqual:
5530 case spv::OpINotEqual:
5531 case spv::OpUGreaterThan:
5532 case spv::OpUGreaterThanEqual:
5533 case spv::OpULessThan:
5534 case spv::OpULessThanEqual:
5535 case spv::OpSGreaterThan:
5536 case spv::OpSGreaterThanEqual:
5537 case spv::OpSLessThan:
5538 case spv::OpSLessThanEqual:
5539 case spv::OpFOrdEqual:
5540 case spv::OpFOrdGreaterThan:
5541 case spv::OpFOrdGreaterThanEqual:
5542 case spv::OpFOrdLessThan:
5543 case spv::OpFOrdLessThanEqual:
5544 case spv::OpFOrdNotEqual:
5545 case spv::OpFUnordEqual:
5546 case spv::OpFUnordGreaterThan:
5547 case spv::OpFUnordGreaterThanEqual:
5548 case spv::OpFUnordLessThan:
5549 case spv::OpFUnordLessThanEqual:
5550 case spv::OpFUnordNotEqual:
5551 case spv::OpExtInst:
5552 case spv::OpIsInf:
5553 case spv::OpIsNan:
5554 case spv::OpAny:
5555 case spv::OpAll:
5556 case spv::OpUndef:
5557 case spv::OpConstantNull:
5558 case spv::OpLogicalOr:
5559 case spv::OpLogicalAnd:
5560 case spv::OpLogicalNot:
5561 case spv::OpLogicalNotEqual:
5562 case spv::OpConstantComposite:
5563 case spv::OpSpecConstantComposite:
5564 case spv::OpConstantTrue:
5565 case spv::OpConstantFalse:
5566 case spv::OpConstant:
5567 case spv::OpSpecConstant:
5568 case spv::OpVariable:
5569 case spv::OpFunctionCall:
5570 case spv::OpSampledImage:
5571 case spv::OpImageSampleExplicitLod:
David Neto5c22a252018-03-15 16:07:41 -04005572 case spv::OpImageQuerySize:
David Neto22f144c2017-06-12 14:26:21 -04005573 case spv::OpSelect:
5574 case spv::OpPhi:
5575 case spv::OpLoad:
5576 case spv::OpAtomicIAdd:
5577 case spv::OpAtomicISub:
5578 case spv::OpAtomicExchange:
5579 case spv::OpAtomicIIncrement:
5580 case spv::OpAtomicIDecrement:
5581 case spv::OpAtomicCompareExchange:
5582 case spv::OpAtomicUMin:
5583 case spv::OpAtomicSMin:
5584 case spv::OpAtomicUMax:
5585 case spv::OpAtomicSMax:
5586 case spv::OpAtomicAnd:
5587 case spv::OpAtomicOr:
5588 case spv::OpAtomicXor:
5589 case spv::OpDot: {
5590 WriteWordCountAndOpcode(Inst);
5591 WriteOperand(Ops[0]);
5592 WriteResultID(Inst);
5593 for (uint32_t i = 1; i < Ops.size(); i++) {
5594 WriteOperand(Ops[i]);
5595 }
5596 break;
5597 }
5598 }
5599 }
5600}
Alan Baker9bf93fb2018-08-28 16:59:26 -04005601
alan-bakerb6b09dc2018-11-08 16:59:28 -05005602bool SPIRVProducerPass::IsTypeNullable(const Type *type) const {
Alan Baker9bf93fb2018-08-28 16:59:26 -04005603 switch (type->getTypeID()) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05005604 case Type::HalfTyID:
5605 case Type::FloatTyID:
5606 case Type::DoubleTyID:
5607 case Type::IntegerTyID:
5608 case Type::VectorTyID:
5609 return true;
5610 case Type::PointerTyID: {
5611 const PointerType *pointer_type = cast<PointerType>(type);
5612 if (pointer_type->getPointerAddressSpace() !=
5613 AddressSpace::UniformConstant) {
5614 auto pointee_type = pointer_type->getPointerElementType();
5615 if (pointee_type->isStructTy() &&
5616 cast<StructType>(pointee_type)->isOpaque()) {
5617 // Images and samplers are not nullable.
5618 return false;
Alan Baker9bf93fb2018-08-28 16:59:26 -04005619 }
Alan Baker9bf93fb2018-08-28 16:59:26 -04005620 }
alan-bakerb6b09dc2018-11-08 16:59:28 -05005621 return true;
5622 }
5623 case Type::ArrayTyID:
5624 return IsTypeNullable(cast<CompositeType>(type)->getTypeAtIndex(0u));
5625 case Type::StructTyID: {
5626 const StructType *struct_type = cast<StructType>(type);
5627 // Images and samplers are not nullable.
5628 if (struct_type->isOpaque())
Alan Baker9bf93fb2018-08-28 16:59:26 -04005629 return false;
alan-bakerb6b09dc2018-11-08 16:59:28 -05005630 for (const auto element : struct_type->elements()) {
5631 if (!IsTypeNullable(element))
5632 return false;
5633 }
5634 return true;
5635 }
5636 default:
5637 return false;
Alan Baker9bf93fb2018-08-28 16:59:26 -04005638 }
5639}
Alan Bakerfcda9482018-10-02 17:09:59 -04005640
5641void SPIRVProducerPass::PopulateUBOTypeMaps(Module &module) {
5642 if (auto *offsets_md =
5643 module.getNamedMetadata(clspv::RemappedTypeOffsetMetadataName())) {
5644 // Metdata is stored as key-value pair operands. The first element of each
5645 // operand is the type and the second is a vector of offsets.
5646 for (const auto *operand : offsets_md->operands()) {
5647 const auto *pair = cast<MDTuple>(operand);
5648 auto *type =
5649 cast<ConstantAsMetadata>(pair->getOperand(0))->getValue()->getType();
5650 const auto *offset_vector = cast<MDTuple>(pair->getOperand(1));
5651 std::vector<uint32_t> offsets;
5652 for (const Metadata *offset_md : offset_vector->operands()) {
5653 const auto *constant_md = cast<ConstantAsMetadata>(offset_md);
alan-bakerb6b09dc2018-11-08 16:59:28 -05005654 offsets.push_back(static_cast<uint32_t>(
5655 cast<ConstantInt>(constant_md->getValue())->getZExtValue()));
Alan Bakerfcda9482018-10-02 17:09:59 -04005656 }
5657 RemappedUBOTypeOffsets.insert(std::make_pair(type, offsets));
5658 }
5659 }
5660
5661 if (auto *sizes_md =
5662 module.getNamedMetadata(clspv::RemappedTypeSizesMetadataName())) {
5663 // Metadata is stored as key-value pair operands. The first element of each
5664 // operand is the type and the second is a triple of sizes: type size in
5665 // bits, store size and alloc size.
5666 for (const auto *operand : sizes_md->operands()) {
5667 const auto *pair = cast<MDTuple>(operand);
5668 auto *type =
5669 cast<ConstantAsMetadata>(pair->getOperand(0))->getValue()->getType();
5670 const auto *size_triple = cast<MDTuple>(pair->getOperand(1));
5671 uint64_t type_size_in_bits =
5672 cast<ConstantInt>(
5673 cast<ConstantAsMetadata>(size_triple->getOperand(0))->getValue())
5674 ->getZExtValue();
5675 uint64_t type_store_size =
5676 cast<ConstantInt>(
5677 cast<ConstantAsMetadata>(size_triple->getOperand(1))->getValue())
5678 ->getZExtValue();
5679 uint64_t type_alloc_size =
5680 cast<ConstantInt>(
5681 cast<ConstantAsMetadata>(size_triple->getOperand(2))->getValue())
5682 ->getZExtValue();
5683 RemappedUBOTypeSizes.insert(std::make_pair(
5684 type, std::make_tuple(type_size_in_bits, type_store_size,
5685 type_alloc_size)));
5686 }
5687 }
5688}
5689
5690uint64_t SPIRVProducerPass::GetTypeSizeInBits(Type *type,
5691 const DataLayout &DL) {
5692 auto iter = RemappedUBOTypeSizes.find(type);
5693 if (iter != RemappedUBOTypeSizes.end()) {
5694 return std::get<0>(iter->second);
5695 }
5696
5697 return DL.getTypeSizeInBits(type);
5698}
5699
5700uint64_t SPIRVProducerPass::GetTypeStoreSize(Type *type, const DataLayout &DL) {
5701 auto iter = RemappedUBOTypeSizes.find(type);
5702 if (iter != RemappedUBOTypeSizes.end()) {
5703 return std::get<1>(iter->second);
5704 }
5705
5706 return DL.getTypeStoreSize(type);
5707}
5708
5709uint64_t SPIRVProducerPass::GetTypeAllocSize(Type *type, const DataLayout &DL) {
5710 auto iter = RemappedUBOTypeSizes.find(type);
5711 if (iter != RemappedUBOTypeSizes.end()) {
5712 return std::get<2>(iter->second);
5713 }
5714
5715 return DL.getTypeAllocSize(type);
5716}
alan-baker5b86ed72019-02-15 08:26:50 -05005717
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04005718void SPIRVProducerPass::setVariablePointersCapabilities(
5719 unsigned address_space) {
alan-baker5b86ed72019-02-15 08:26:50 -05005720 if (GetStorageClass(address_space) == spv::StorageClassStorageBuffer) {
5721 setVariablePointersStorageBuffer(true);
5722 } else {
5723 setVariablePointers(true);
5724 }
5725}
5726
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04005727Value *SPIRVProducerPass::GetBasePointer(Value *v) {
alan-baker5b86ed72019-02-15 08:26:50 -05005728 if (auto *gep = dyn_cast<GetElementPtrInst>(v)) {
5729 return GetBasePointer(gep->getPointerOperand());
5730 }
5731
5732 // Conservatively return |v|.
5733 return v;
5734}
5735
5736bool SPIRVProducerPass::sameResource(Value *lhs, Value *rhs) const {
5737 if (auto *lhs_call = dyn_cast<CallInst>(lhs)) {
5738 if (auto *rhs_call = dyn_cast<CallInst>(rhs)) {
5739 if (lhs_call->getCalledFunction()->getName().startswith(
5740 clspv::ResourceAccessorFunction()) &&
5741 rhs_call->getCalledFunction()->getName().startswith(
5742 clspv::ResourceAccessorFunction())) {
5743 // For resource accessors, match descriptor set and binding.
5744 if (lhs_call->getOperand(0) == rhs_call->getOperand(0) &&
5745 lhs_call->getOperand(1) == rhs_call->getOperand(1))
5746 return true;
5747 } else if (lhs_call->getCalledFunction()->getName().startswith(
5748 clspv::WorkgroupAccessorFunction()) &&
5749 rhs_call->getCalledFunction()->getName().startswith(
5750 clspv::WorkgroupAccessorFunction())) {
5751 // For workgroup resources, match spec id.
5752 if (lhs_call->getOperand(0) == rhs_call->getOperand(0))
5753 return true;
5754 }
5755 }
5756 }
5757
5758 return false;
5759}
5760
5761bool SPIRVProducerPass::selectFromSameObject(Instruction *inst) {
5762 assert(inst->getType()->isPointerTy());
5763 assert(GetStorageClass(inst->getType()->getPointerAddressSpace()) ==
5764 spv::StorageClassStorageBuffer);
5765 const bool hack_undef = clspv::Option::HackUndef();
5766 if (auto *select = dyn_cast<SelectInst>(inst)) {
5767 auto *true_base = GetBasePointer(select->getTrueValue());
5768 auto *false_base = GetBasePointer(select->getFalseValue());
5769
5770 if (true_base == false_base)
5771 return true;
5772
5773 // If either the true or false operand is a null, then we satisfy the same
5774 // object constraint.
5775 if (auto *true_cst = dyn_cast<Constant>(true_base)) {
5776 if (true_cst->isNullValue() || (hack_undef && isa<UndefValue>(true_base)))
5777 return true;
5778 }
5779
5780 if (auto *false_cst = dyn_cast<Constant>(false_base)) {
5781 if (false_cst->isNullValue() ||
5782 (hack_undef && isa<UndefValue>(false_base)))
5783 return true;
5784 }
5785
5786 if (sameResource(true_base, false_base))
5787 return true;
5788 } else if (auto *phi = dyn_cast<PHINode>(inst)) {
5789 Value *value = nullptr;
5790 bool ok = true;
5791 for (unsigned i = 0; ok && i != phi->getNumIncomingValues(); ++i) {
5792 auto *base = GetBasePointer(phi->getIncomingValue(i));
5793 // Null values satisfy the constraint of selecting of selecting from the
5794 // same object.
5795 if (!value) {
5796 if (auto *cst = dyn_cast<Constant>(base)) {
5797 if (!cst->isNullValue() && !(hack_undef && isa<UndefValue>(base)))
5798 value = base;
5799 } else {
5800 value = base;
5801 }
5802 } else if (base != value) {
5803 if (auto *base_cst = dyn_cast<Constant>(base)) {
5804 if (base_cst->isNullValue() || (hack_undef && isa<UndefValue>(base)))
5805 continue;
5806 }
5807
5808 if (sameResource(value, base))
5809 continue;
5810
5811 // Values don't represent the same base.
5812 ok = false;
5813 }
5814 }
5815
5816 return ok;
5817 }
5818
5819 // Conservatively return false.
5820 return false;
5821}
alan-bakere9308012019-03-15 10:25:13 -04005822
5823bool SPIRVProducerPass::CalledWithCoherentResource(Argument &Arg) {
5824 if (!Arg.getType()->isPointerTy() ||
5825 Arg.getType()->getPointerAddressSpace() != clspv::AddressSpace::Global) {
5826 // Only SSBOs need to be annotated as coherent.
5827 return false;
5828 }
5829
5830 DenseSet<Value *> visited;
5831 std::vector<Value *> stack;
5832 for (auto *U : Arg.getParent()->users()) {
5833 if (auto *call = dyn_cast<CallInst>(U)) {
5834 stack.push_back(call->getOperand(Arg.getArgNo()));
5835 }
5836 }
5837
5838 while (!stack.empty()) {
5839 Value *v = stack.back();
5840 stack.pop_back();
5841
5842 if (!visited.insert(v).second)
5843 continue;
5844
5845 auto *resource_call = dyn_cast<CallInst>(v);
5846 if (resource_call &&
5847 resource_call->getCalledFunction()->getName().startswith(
5848 clspv::ResourceAccessorFunction())) {
5849 // If this is a resource accessor function, check if the coherent operand
5850 // is set.
5851 const auto coherent =
5852 unsigned(dyn_cast<ConstantInt>(resource_call->getArgOperand(5))
5853 ->getZExtValue());
5854 if (coherent == 1)
5855 return true;
5856 } else if (auto *arg = dyn_cast<Argument>(v)) {
5857 // If this is a function argument, trace through its callers.
alan-bakere98f3f92019-04-08 15:06:36 -04005858 for (auto U : arg->getParent()->users()) {
alan-bakere9308012019-03-15 10:25:13 -04005859 if (auto *call = dyn_cast<CallInst>(U)) {
5860 stack.push_back(call->getOperand(arg->getArgNo()));
5861 }
5862 }
5863 } else if (auto *user = dyn_cast<User>(v)) {
5864 // If this is a user, traverse all operands that could lead to resource
5865 // variables.
5866 for (unsigned i = 0; i != user->getNumOperands(); ++i) {
5867 Value *operand = user->getOperand(i);
5868 if (operand->getType()->isPointerTy() &&
5869 operand->getType()->getPointerAddressSpace() ==
5870 clspv::AddressSpace::Global) {
5871 stack.push_back(operand);
5872 }
5873 }
5874 }
5875 }
5876
5877 // No coherent resource variables encountered.
5878 return false;
5879}
alan-baker06cad652019-12-03 17:56:47 -05005880
5881void SPIRVProducerPass::PopulateStructuredCFGMaps(Module &module) {
5882 // First, track loop merges and continues.
5883 DenseSet<BasicBlock *> LoopMergesAndContinues;
5884 for (auto &F : module) {
5885 if (F.isDeclaration())
5886 continue;
5887
5888 DominatorTree &DT = getAnalysis<DominatorTreeWrapperPass>(F).getDomTree();
5889 const LoopInfo &LI = getAnalysis<LoopInfoWrapperPass>(F).getLoopInfo();
5890 std::deque<BasicBlock *> order;
5891 DenseSet<BasicBlock *> visited;
5892 clspv::ComputeStructuredOrder(&*F.begin(), &DT, LI, &order, &visited);
5893
5894 for (auto BB : order) {
5895 auto terminator = BB->getTerminator();
5896 auto branch = dyn_cast<BranchInst>(terminator);
5897 if (LI.isLoopHeader(BB)) {
5898 auto L = LI.getLoopFor(BB);
5899 BasicBlock *ContinueBB = nullptr;
5900 BasicBlock *MergeBB = nullptr;
5901
5902 MergeBB = L->getExitBlock();
5903 if (!MergeBB) {
5904 // StructurizeCFG pass converts CFG into triangle shape and the cfg
5905 // has regions with single entry/exit. As a result, loop should not
5906 // have multiple exits.
5907 llvm_unreachable("Loop has multiple exits???");
5908 }
5909
5910 if (L->isLoopLatch(BB)) {
5911 ContinueBB = BB;
5912 } else {
5913 // From SPIR-V spec 2.11, Continue Target must dominate that back-edge
5914 // block.
5915 BasicBlock *Header = L->getHeader();
5916 BasicBlock *Latch = L->getLoopLatch();
5917 for (auto *loop_block : L->blocks()) {
5918 if (loop_block == Header) {
5919 continue;
5920 }
5921
5922 // Check whether block dominates block with back-edge.
5923 // The loop latch is the single block with a back-edge. If it was
5924 // possible, StructurizeCFG made the loop conform to this
5925 // requirement, otherwise |Latch| is a nullptr.
5926 if (DT.dominates(loop_block, Latch)) {
5927 ContinueBB = loop_block;
5928 }
5929 }
5930
5931 if (!ContinueBB) {
5932 llvm_unreachable("Wrong continue block from loop");
5933 }
5934 }
5935
5936 // Record the continue and merge blocks.
5937 MergeBlocks[BB] = MergeBB;
5938 ContinueBlocks[BB] = ContinueBB;
5939 LoopMergesAndContinues.insert(MergeBB);
5940 LoopMergesAndContinues.insert(ContinueBB);
5941 } else if (branch && branch->isConditional()) {
5942 auto L = LI.getLoopFor(BB);
5943 bool HasBackedge = false;
5944 while (L && !HasBackedge) {
5945 if (L->isLoopLatch(BB)) {
5946 HasBackedge = true;
5947 }
5948 L = L->getParentLoop();
5949 }
5950
5951 if (!HasBackedge) {
5952 // Only need a merge if the branch doesn't include a loop break or
5953 // continue.
5954 auto true_bb = branch->getSuccessor(0);
5955 auto false_bb = branch->getSuccessor(1);
5956 if (!LoopMergesAndContinues.count(true_bb) &&
5957 !LoopMergesAndContinues.count(false_bb)) {
5958 // StructurizeCFG pass already manipulated CFG. Just use false block
5959 // of branch instruction as merge block.
5960 MergeBlocks[BB] = false_bb;
5961 }
5962 }
5963 }
5964 }
5965 }
5966}