blob: 7d5d550650d473b75e2bcd466d2a3c9433af67c7 [file] [log] [blame]
David Neto22f144c2017-06-12 14:26:21 -04001// Copyright 2017 The Clspv Authors. All rights reserved.
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7// http://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
15#ifdef _MSC_VER
16#pragma warning(push, 0)
17#endif
18
David Neto156783e2017-07-05 15:39:41 -040019#include <cassert>
David Neto257c3892018-04-11 13:19:45 -040020#include <cstring>
David Neto118188e2018-08-24 11:27:54 -040021#include <iomanip>
22#include <list>
David Neto862b7d82018-06-14 18:48:37 -040023#include <memory>
David Neto118188e2018-08-24 11:27:54 -040024#include <set>
25#include <sstream>
26#include <string>
27#include <tuple>
28#include <unordered_set>
29#include <utility>
David Neto862b7d82018-06-14 18:48:37 -040030
David Neto118188e2018-08-24 11:27:54 -040031#include "llvm/ADT/StringSwitch.h"
32#include "llvm/ADT/UniqueVector.h"
33#include "llvm/Analysis/LoopInfo.h"
34#include "llvm/IR/Constants.h"
35#include "llvm/IR/Dominators.h"
36#include "llvm/IR/Instructions.h"
37#include "llvm/IR/Metadata.h"
38#include "llvm/IR/Module.h"
alan-bakerf67468c2019-11-25 15:51:49 -050039#include "llvm/IR/ValueSymbolTable.h"
David Neto118188e2018-08-24 11:27:54 -040040#include "llvm/Pass.h"
41#include "llvm/Support/CommandLine.h"
42#include "llvm/Support/raw_ostream.h"
43#include "llvm/Transforms/Utils/Cloning.h"
David Neto22f144c2017-06-12 14:26:21 -040044
David Neto85082642018-03-24 06:55:20 -070045#include "spirv/1.0/spirv.hpp"
David Neto118188e2018-08-24 11:27:54 -040046
David Neto85082642018-03-24 06:55:20 -070047#include "clspv/AddressSpace.h"
alan-bakerf5e5f692018-11-27 08:33:24 -050048#include "clspv/DescriptorMap.h"
David Neto118188e2018-08-24 11:27:54 -040049#include "clspv/Option.h"
David Neto85082642018-03-24 06:55:20 -070050#include "clspv/spirv_c_strings.hpp"
51#include "clspv/spirv_glsl.hpp"
David Neto22f144c2017-06-12 14:26:21 -040052
David Neto4feb7a42017-10-06 17:29:42 -040053#include "ArgKind.h"
alan-bakerf67468c2019-11-25 15:51:49 -050054#include "Builtins.h"
David Neto85082642018-03-24 06:55:20 -070055#include "ConstantEmitter.h"
Alan Baker202c8c72018-08-13 13:47:44 -040056#include "Constants.h"
David Neto78383442018-06-15 20:31:56 -040057#include "DescriptorCounter.h"
alan-baker56f7aff2019-05-22 08:06:42 -040058#include "NormalizeGlobalVariable.h"
Diego Novilloa4c44fa2019-04-11 10:56:15 -040059#include "Passes.h"
David Neto48f56a42017-10-06 16:44:25 -040060
David Neto22f144c2017-06-12 14:26:21 -040061#if defined(_MSC_VER)
62#pragma warning(pop)
63#endif
64
65using namespace llvm;
66using namespace clspv;
David Neto156783e2017-07-05 15:39:41 -040067using namespace mdconst;
David Neto22f144c2017-06-12 14:26:21 -040068
69namespace {
David Netocd8ca5f2017-10-02 23:34:11 -040070
David Neto862b7d82018-06-14 18:48:37 -040071cl::opt<bool> ShowResourceVars("show-rv", cl::init(false), cl::Hidden,
72 cl::desc("Show resource variable creation"));
73
74// These hacks exist to help transition code generation algorithms
75// without making huge noise in detailed test output.
76const bool Hack_generate_runtime_array_stride_early = true;
77
David Neto3fbb4072017-10-16 11:28:14 -040078// The value of 1/pi. This value is from MSDN
79// https://msdn.microsoft.com/en-us/library/4hwaceh6.aspx
80const double kOneOverPi = 0.318309886183790671538;
81const glsl::ExtInst kGlslExtInstBad = static_cast<glsl::ExtInst>(0);
82
alan-bakerb6b09dc2018-11-08 16:59:28 -050083const char *kCompositeConstructFunctionPrefix = "clspv.composite_construct.";
David Netoab03f432017-11-03 17:00:44 -040084
David Neto22f144c2017-06-12 14:26:21 -040085enum SPIRVOperandType {
86 NUMBERID,
87 LITERAL_INTEGER,
88 LITERAL_STRING,
89 LITERAL_FLOAT
90};
91
92struct SPIRVOperand {
93 explicit SPIRVOperand(SPIRVOperandType Ty, uint32_t Num)
94 : Type(Ty), LiteralNum(1, Num) {}
95 explicit SPIRVOperand(SPIRVOperandType Ty, const char *Str)
96 : Type(Ty), LiteralStr(Str) {}
97 explicit SPIRVOperand(SPIRVOperandType Ty, StringRef Str)
98 : Type(Ty), LiteralStr(Str) {}
99 explicit SPIRVOperand(SPIRVOperandType Ty, ArrayRef<uint32_t> NumVec)
100 : Type(Ty), LiteralNum(NumVec.begin(), NumVec.end()) {}
101
102 SPIRVOperandType getType() { return Type; };
103 uint32_t getNumID() { return LiteralNum[0]; };
104 std::string getLiteralStr() { return LiteralStr; };
105 ArrayRef<uint32_t> getLiteralNum() { return LiteralNum; };
106
David Neto87846742018-04-11 17:36:22 -0400107 uint32_t GetNumWords() const {
108 switch (Type) {
109 case NUMBERID:
110 return 1;
111 case LITERAL_INTEGER:
112 case LITERAL_FLOAT:
David Netoee2660d2018-06-28 16:31:29 -0400113 return uint32_t(LiteralNum.size());
David Neto87846742018-04-11 17:36:22 -0400114 case LITERAL_STRING:
115 // Account for the terminating null character.
David Netoee2660d2018-06-28 16:31:29 -0400116 return uint32_t((LiteralStr.size() + 4) / 4);
David Neto87846742018-04-11 17:36:22 -0400117 }
118 llvm_unreachable("Unhandled case in SPIRVOperand::GetNumWords()");
119 }
120
David Neto22f144c2017-06-12 14:26:21 -0400121private:
122 SPIRVOperandType Type;
123 std::string LiteralStr;
124 SmallVector<uint32_t, 4> LiteralNum;
125};
126
David Netoc6f3ab22018-04-06 18:02:31 -0400127class SPIRVOperandList {
128public:
129 SPIRVOperandList() {}
alan-bakerb6b09dc2018-11-08 16:59:28 -0500130 SPIRVOperandList(const SPIRVOperandList &other) = delete;
131 SPIRVOperandList(SPIRVOperandList &&other) {
David Netoc6f3ab22018-04-06 18:02:31 -0400132 contents_ = std::move(other.contents_);
133 other.contents_.clear();
134 }
135 SPIRVOperandList(ArrayRef<SPIRVOperand *> init)
136 : contents_(init.begin(), init.end()) {}
137 operator ArrayRef<SPIRVOperand *>() { return contents_; }
138 void push_back(SPIRVOperand *op) { contents_.push_back(op); }
alan-bakerb6b09dc2018-11-08 16:59:28 -0500139 void clear() { contents_.clear(); }
David Netoc6f3ab22018-04-06 18:02:31 -0400140 size_t size() const { return contents_.size(); }
141 SPIRVOperand *&operator[](size_t i) { return contents_[i]; }
142
David Neto87846742018-04-11 17:36:22 -0400143 const SmallVector<SPIRVOperand *, 8> &getOperands() const {
144 return contents_;
145 }
146
David Netoc6f3ab22018-04-06 18:02:31 -0400147private:
alan-bakerb6b09dc2018-11-08 16:59:28 -0500148 SmallVector<SPIRVOperand *, 8> contents_;
David Netoc6f3ab22018-04-06 18:02:31 -0400149};
150
151SPIRVOperandList &operator<<(SPIRVOperandList &list, SPIRVOperand *elem) {
152 list.push_back(elem);
153 return list;
154}
155
alan-bakerb6b09dc2018-11-08 16:59:28 -0500156SPIRVOperand *MkNum(uint32_t num) {
David Netoc6f3ab22018-04-06 18:02:31 -0400157 return new SPIRVOperand(LITERAL_INTEGER, num);
158}
alan-bakerb6b09dc2018-11-08 16:59:28 -0500159SPIRVOperand *MkInteger(ArrayRef<uint32_t> num_vec) {
David Neto257c3892018-04-11 13:19:45 -0400160 return new SPIRVOperand(LITERAL_INTEGER, num_vec);
161}
alan-bakerb6b09dc2018-11-08 16:59:28 -0500162SPIRVOperand *MkFloat(ArrayRef<uint32_t> num_vec) {
David Neto257c3892018-04-11 13:19:45 -0400163 return new SPIRVOperand(LITERAL_FLOAT, num_vec);
164}
alan-bakerb6b09dc2018-11-08 16:59:28 -0500165SPIRVOperand *MkId(uint32_t id) { return new SPIRVOperand(NUMBERID, id); }
166SPIRVOperand *MkString(StringRef str) {
David Neto257c3892018-04-11 13:19:45 -0400167 return new SPIRVOperand(LITERAL_STRING, str);
168}
David Netoc6f3ab22018-04-06 18:02:31 -0400169
David Neto22f144c2017-06-12 14:26:21 -0400170struct SPIRVInstruction {
David Neto87846742018-04-11 17:36:22 -0400171 // Create an instruction with an opcode and no result ID, and with the given
172 // operands. This computes its own word count.
173 explicit SPIRVInstruction(spv::Op Opc, ArrayRef<SPIRVOperand *> Ops)
174 : WordCount(1), Opcode(static_cast<uint16_t>(Opc)), ResultID(0),
175 Operands(Ops.begin(), Ops.end()) {
176 for (auto *operand : Ops) {
David Netoee2660d2018-06-28 16:31:29 -0400177 WordCount += uint16_t(operand->GetNumWords());
David Neto87846742018-04-11 17:36:22 -0400178 }
179 }
180 // Create an instruction with an opcode and a no-zero result ID, and
181 // with the given operands. This computes its own word count.
182 explicit SPIRVInstruction(spv::Op Opc, uint32_t ResID,
David Neto22f144c2017-06-12 14:26:21 -0400183 ArrayRef<SPIRVOperand *> Ops)
David Neto87846742018-04-11 17:36:22 -0400184 : WordCount(2), Opcode(static_cast<uint16_t>(Opc)), ResultID(ResID),
185 Operands(Ops.begin(), Ops.end()) {
186 if (ResID == 0) {
187 llvm_unreachable("Result ID of 0 was provided");
188 }
189 for (auto *operand : Ops) {
190 WordCount += operand->GetNumWords();
191 }
192 }
David Neto22f144c2017-06-12 14:26:21 -0400193
David Netoee2660d2018-06-28 16:31:29 -0400194 uint32_t getWordCount() const { return WordCount; }
David Neto22f144c2017-06-12 14:26:21 -0400195 uint16_t getOpcode() const { return Opcode; }
196 uint32_t getResultID() const { return ResultID; }
197 ArrayRef<SPIRVOperand *> getOperands() const { return Operands; }
198
199private:
David Netoee2660d2018-06-28 16:31:29 -0400200 uint32_t WordCount; // Check the 16-bit bound at code generation time.
David Neto22f144c2017-06-12 14:26:21 -0400201 uint16_t Opcode;
202 uint32_t ResultID;
203 SmallVector<SPIRVOperand *, 4> Operands;
204};
205
206struct SPIRVProducerPass final : public ModulePass {
David Neto22f144c2017-06-12 14:26:21 -0400207 typedef DenseMap<Type *, uint32_t> TypeMapType;
208 typedef UniqueVector<Type *> TypeList;
209 typedef DenseMap<Value *, uint32_t> ValueMapType;
David Netofb9a7972017-08-25 17:08:24 -0400210 typedef UniqueVector<Value *> ValueList;
David Neto22f144c2017-06-12 14:26:21 -0400211 typedef std::vector<std::pair<Value *, uint32_t>> EntryPointVecType;
212 typedef std::list<SPIRVInstruction *> SPIRVInstructionList;
David Neto87846742018-04-11 17:36:22 -0400213 // A vector of tuples, each of which is:
214 // - the LLVM instruction that we will later generate SPIR-V code for
215 // - where the SPIR-V instruction should be inserted
216 // - the result ID of the SPIR-V instruction
David Neto22f144c2017-06-12 14:26:21 -0400217 typedef std::vector<
218 std::tuple<Value *, SPIRVInstructionList::iterator, uint32_t>>
219 DeferredInstVecType;
220 typedef DenseMap<FunctionType *, std::pair<FunctionType *, uint32_t>>
221 GlobalConstFuncMapType;
222
David Neto44795152017-07-13 15:45:28 -0400223 explicit SPIRVProducerPass(
alan-bakerf5e5f692018-11-27 08:33:24 -0500224 raw_pwrite_stream &out,
225 std::vector<clspv::version0::DescriptorMapEntry> *descriptor_map_entries,
alan-baker00e7a582019-06-07 12:54:21 -0400226 ArrayRef<std::pair<unsigned, std::string>> samplerMap,
David Neto44795152017-07-13 15:45:28 -0400227 bool outputCInitList)
David Netoc2c368d2017-06-30 16:50:17 -0400228 : ModulePass(ID), samplerMap(samplerMap), out(out),
David Neto0676e6f2017-07-11 18:47:44 -0400229 binaryTempOut(binaryTempUnderlyingVector), binaryOut(&out),
alan-baker00e7a582019-06-07 12:54:21 -0400230 descriptorMapEntries(descriptor_map_entries),
David Neto0676e6f2017-07-11 18:47:44 -0400231 outputCInitList(outputCInitList), patchBoundOffset(0), nextID(1),
alan-baker5b86ed72019-02-15 08:26:50 -0500232 OpExtInstImportID(0), HasVariablePointersStorageBuffer(false),
233 HasVariablePointers(false), SamplerTy(nullptr), WorkgroupSizeValueID(0),
Kévin Petit89a525c2019-06-15 08:13:07 +0100234 WorkgroupSizeVarID(0), max_local_spec_id_(0) {}
David Neto22f144c2017-06-12 14:26:21 -0400235
236 void getAnalysisUsage(AnalysisUsage &AU) const override {
237 AU.addRequired<DominatorTreeWrapperPass>();
238 AU.addRequired<LoopInfoWrapperPass>();
239 }
240
241 virtual bool runOnModule(Module &module) override;
242
243 // output the SPIR-V header block
244 void outputHeader();
245
246 // patch the SPIR-V header block
247 void patchHeader();
248
249 uint32_t lookupType(Type *Ty) {
250 if (Ty->isPointerTy() &&
251 (Ty->getPointerAddressSpace() != AddressSpace::UniformConstant)) {
252 auto PointeeTy = Ty->getPointerElementType();
253 if (PointeeTy->isStructTy() &&
254 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
255 Ty = PointeeTy;
256 }
257 }
258
David Neto862b7d82018-06-14 18:48:37 -0400259 auto where = TypeMap.find(Ty);
260 if (where == TypeMap.end()) {
261 if (Ty) {
262 errs() << "Unhandled type " << *Ty << "\n";
263 } else {
264 errs() << "Unhandled type (null)\n";
265 }
David Netoe439d702018-03-23 13:14:08 -0700266 llvm_unreachable("\nUnhandled type!");
David Neto22f144c2017-06-12 14:26:21 -0400267 }
268
David Neto862b7d82018-06-14 18:48:37 -0400269 return where->second;
David Neto22f144c2017-06-12 14:26:21 -0400270 }
271 TypeMapType &getImageTypeMap() { return ImageTypeMap; }
alan-bakerabd82722019-12-03 17:14:51 -0500272 TypeList &getImageTypeList() { return ImageTypeList; }
David Neto22f144c2017-06-12 14:26:21 -0400273 TypeList &getTypeList() { return Types; };
274 ValueList &getConstantList() { return Constants; };
275 ValueMapType &getValueMap() { return ValueMap; }
276 ValueMapType &getAllocatedValueMap() { return AllocatedValueMap; }
277 SPIRVInstructionList &getSPIRVInstList() { return SPIRVInsts; };
David Neto22f144c2017-06-12 14:26:21 -0400278 EntryPointVecType &getEntryPointVec() { return EntryPointVec; };
279 DeferredInstVecType &getDeferredInstVec() { return DeferredInstVec; };
280 ValueList &getEntryPointInterfacesVec() { return EntryPointInterfacesVec; };
281 uint32_t &getOpExtInstImportID() { return OpExtInstImportID; };
282 std::vector<uint32_t> &getBuiltinDimVec() { return BuiltinDimensionVec; };
alan-baker5b86ed72019-02-15 08:26:50 -0500283 bool hasVariablePointersStorageBuffer() {
284 return HasVariablePointersStorageBuffer;
285 }
286 void setVariablePointersStorageBuffer(bool Val) {
287 HasVariablePointersStorageBuffer = Val;
288 }
Diego Novillo3cc8d7a2019-04-10 13:30:34 -0400289 bool hasVariablePointers() { return HasVariablePointers; };
David Neto22f144c2017-06-12 14:26:21 -0400290 void setVariablePointers(bool Val) { HasVariablePointers = Val; };
alan-bakerb6b09dc2018-11-08 16:59:28 -0500291 ArrayRef<std::pair<unsigned, std::string>> &getSamplerMap() {
292 return samplerMap;
293 }
David Neto22f144c2017-06-12 14:26:21 -0400294 GlobalConstFuncMapType &getGlobalConstFuncTypeMap() {
295 return GlobalConstFuncTypeMap;
296 }
297 SmallPtrSet<Value *, 16> &getGlobalConstArgSet() {
298 return GlobalConstArgumentSet;
299 }
alan-bakerb6b09dc2018-11-08 16:59:28 -0500300 TypeList &getTypesNeedingArrayStride() { return TypesNeedingArrayStride; }
David Neto22f144c2017-06-12 14:26:21 -0400301
David Netoc6f3ab22018-04-06 18:02:31 -0400302 void GenerateLLVMIRInfo(Module &M, const DataLayout &DL);
alan-bakerb6b09dc2018-11-08 16:59:28 -0500303 // Populate GlobalConstFuncTypeMap. Also, if module-scope __constant will
304 // *not* be converted to a storage buffer, replace each such global variable
305 // with one in the storage class expecgted by SPIR-V.
David Neto862b7d82018-06-14 18:48:37 -0400306 void FindGlobalConstVars(Module &M, const DataLayout &DL);
307 // Populate ResourceVarInfoList, FunctionToResourceVarsMap, and
308 // ModuleOrderedResourceVars.
309 void FindResourceVars(Module &M, const DataLayout &DL);
Alan Baker202c8c72018-08-13 13:47:44 -0400310 void FindWorkgroupVars(Module &M);
David Neto22f144c2017-06-12 14:26:21 -0400311 bool FindExtInst(Module &M);
312 void FindTypePerGlobalVar(GlobalVariable &GV);
313 void FindTypePerFunc(Function &F);
David Neto862b7d82018-06-14 18:48:37 -0400314 void FindTypesForSamplerMap(Module &M);
315 void FindTypesForResourceVars(Module &M);
alan-bakerb6b09dc2018-11-08 16:59:28 -0500316 // Inserts |Ty| and relevant sub-types into the |Types| member, indicating
317 // that |Ty| and its subtypes will need a corresponding SPIR-V type.
David Neto22f144c2017-06-12 14:26:21 -0400318 void FindType(Type *Ty);
319 void FindConstantPerGlobalVar(GlobalVariable &GV);
320 void FindConstantPerFunc(Function &F);
321 void FindConstant(Value *V);
322 void GenerateExtInstImport();
David Neto19a1bad2017-08-25 15:01:41 -0400323 // Generates instructions for SPIR-V types corresponding to the LLVM types
324 // saved in the |Types| member. A type follows its subtypes. IDs are
325 // allocated sequentially starting with the current value of nextID, and
326 // with a type following its subtypes. Also updates nextID to just beyond
327 // the last generated ID.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500328 void GenerateSPIRVTypes(LLVMContext &context, Module &module);
David Neto22f144c2017-06-12 14:26:21 -0400329 void GenerateSPIRVConstants();
David Neto5c22a252018-03-15 16:07:41 -0400330 void GenerateModuleInfo(Module &M);
David Neto22f144c2017-06-12 14:26:21 -0400331 void GenerateGlobalVar(GlobalVariable &GV);
David Netoc6f3ab22018-04-06 18:02:31 -0400332 void GenerateWorkgroupVars();
David Neto862b7d82018-06-14 18:48:37 -0400333 // Generate descriptor map entries for resource variables associated with
334 // arguments to F.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500335 void GenerateDescriptorMapInfo(const DataLayout &DL, Function &F);
David Neto22f144c2017-06-12 14:26:21 -0400336 void GenerateSamplers(Module &M);
David Neto862b7d82018-06-14 18:48:37 -0400337 // Generate OpVariables for %clspv.resource.var.* calls.
338 void GenerateResourceVars(Module &M);
David Neto22f144c2017-06-12 14:26:21 -0400339 void GenerateFuncPrologue(Function &F);
340 void GenerateFuncBody(Function &F);
David Netob6e2e062018-04-25 10:32:06 -0400341 void GenerateEntryPointInitialStores();
David Neto22f144c2017-06-12 14:26:21 -0400342 spv::Op GetSPIRVCmpOpcode(CmpInst *CmpI);
343 spv::Op GetSPIRVCastOpcode(Instruction &I);
344 spv::Op GetSPIRVBinaryOpcode(Instruction &I);
345 void GenerateInstruction(Instruction &I);
346 void GenerateFuncEpilogue();
347 void HandleDeferredInstruction();
alan-bakerb6b09dc2018-11-08 16:59:28 -0500348 void HandleDeferredDecorations(const DataLayout &DL);
David Neto22f144c2017-06-12 14:26:21 -0400349 bool is4xi8vec(Type *Ty) const;
350 spv::StorageClass GetStorageClass(unsigned AddrSpace) const;
David Neto862b7d82018-06-14 18:48:37 -0400351 spv::StorageClass GetStorageClassForArgKind(clspv::ArgKind arg_kind) const;
David Neto22f144c2017-06-12 14:26:21 -0400352 spv::BuiltIn GetBuiltin(StringRef globalVarName) const;
David Neto3fbb4072017-10-16 11:28:14 -0400353 // Returns the GLSL extended instruction enum that the given function
354 // call maps to. If none, then returns the 0 value, i.e. GLSLstd4580Bad.
David Neto22f144c2017-06-12 14:26:21 -0400355 glsl::ExtInst getExtInstEnum(StringRef Name);
David Neto3fbb4072017-10-16 11:28:14 -0400356 // Returns the GLSL extended instruction enum indirectly used by the given
357 // function. That is, to implement the given function, we use an extended
358 // instruction plus one more instruction. If none, then returns the 0 value,
359 // i.e. GLSLstd4580Bad.
360 glsl::ExtInst getIndirectExtInstEnum(StringRef Name);
361 // Returns the single GLSL extended instruction used directly or
362 // indirectly by the given function call.
363 glsl::ExtInst getDirectOrIndirectExtInstEnum(StringRef Name);
David Neto22f144c2017-06-12 14:26:21 -0400364 void WriteOneWord(uint32_t Word);
365 void WriteResultID(SPIRVInstruction *Inst);
366 void WriteWordCountAndOpcode(SPIRVInstruction *Inst);
367 void WriteOperand(SPIRVOperand *Op);
368 void WriteSPIRVBinary();
369
Alan Baker9bf93fb2018-08-28 16:59:26 -0400370 // Returns true if |type| is compatible with OpConstantNull.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500371 bool IsTypeNullable(const Type *type) const;
Alan Baker9bf93fb2018-08-28 16:59:26 -0400372
Alan Bakerfcda9482018-10-02 17:09:59 -0400373 // Populate UBO remapped type maps.
374 void PopulateUBOTypeMaps(Module &module);
375
376 // Wrapped methods of DataLayout accessors. If |type| was remapped for UBOs,
377 // uses the internal map, otherwise it falls back on the data layout.
378 uint64_t GetTypeSizeInBits(Type *type, const DataLayout &DL);
379 uint64_t GetTypeStoreSize(Type *type, const DataLayout &DL);
380 uint64_t GetTypeAllocSize(Type *type, const DataLayout &DL);
381
alan-baker5b86ed72019-02-15 08:26:50 -0500382 // Returns the base pointer of |v|.
383 Value *GetBasePointer(Value *v);
384
385 // Sets |HasVariablePointersStorageBuffer| or |HasVariablePointers| base on
386 // |address_space|.
387 void setVariablePointersCapabilities(unsigned address_space);
388
389 // Returns true if |lhs| and |rhs| represent the same resource or workgroup
390 // variable.
391 bool sameResource(Value *lhs, Value *rhs) const;
392
393 // Returns true if |inst| is phi or select that selects from the same
394 // structure (or null).
395 bool selectFromSameObject(Instruction *inst);
396
alan-bakere9308012019-03-15 10:25:13 -0400397 // Returns true if |Arg| is called with a coherent resource.
398 bool CalledWithCoherentResource(Argument &Arg);
399
David Neto22f144c2017-06-12 14:26:21 -0400400private:
401 static char ID;
David Neto44795152017-07-13 15:45:28 -0400402 ArrayRef<std::pair<unsigned, std::string>> samplerMap;
David Neto22f144c2017-06-12 14:26:21 -0400403 raw_pwrite_stream &out;
David Neto0676e6f2017-07-11 18:47:44 -0400404
405 // TODO(dneto): Wouldn't it be better to always just emit a binary, and then
406 // convert to other formats on demand?
407
408 // When emitting a C initialization list, the WriteSPIRVBinary method
409 // will actually write its words to this vector via binaryTempOut.
410 SmallVector<char, 100> binaryTempUnderlyingVector;
411 raw_svector_ostream binaryTempOut;
412
413 // Binary output writes to this stream, which might be |out| or
414 // |binaryTempOut|. It's the latter when we really want to write a C
415 // initializer list.
Diego Novillo3cc8d7a2019-04-10 13:30:34 -0400416 raw_pwrite_stream *binaryOut;
alan-bakerf5e5f692018-11-27 08:33:24 -0500417 std::vector<version0::DescriptorMapEntry> *descriptorMapEntries;
David Neto0676e6f2017-07-11 18:47:44 -0400418 const bool outputCInitList; // If true, output look like {0x7023, ... , 5}
David Neto22f144c2017-06-12 14:26:21 -0400419 uint64_t patchBoundOffset;
420 uint32_t nextID;
421
alan-bakerf67468c2019-11-25 15:51:49 -0500422 // ID for OpTypeInt 32 1.
423 uint32_t int32ID = 0;
424 // ID for OpTypeVector %int 4.
425 uint32_t v4int32ID = 0;
426
David Neto19a1bad2017-08-25 15:01:41 -0400427 // Maps an LLVM Value pointer to the corresponding SPIR-V Id.
David Neto22f144c2017-06-12 14:26:21 -0400428 TypeMapType TypeMap;
David Neto19a1bad2017-08-25 15:01:41 -0400429 // Maps an LLVM image type to its SPIR-V ID.
David Neto22f144c2017-06-12 14:26:21 -0400430 TypeMapType ImageTypeMap;
alan-bakerabd82722019-12-03 17:14:51 -0500431 // A unique-vector of LLVM image types. This list is used to provide
432 // deterministic traversal of image types.
433 TypeList ImageTypeList;
David Neto19a1bad2017-08-25 15:01:41 -0400434 // A unique-vector of LLVM types that map to a SPIR-V type.
David Neto22f144c2017-06-12 14:26:21 -0400435 TypeList Types;
436 ValueList Constants;
David Neto19a1bad2017-08-25 15:01:41 -0400437 // Maps an LLVM Value pointer to the corresponding SPIR-V Id.
David Neto22f144c2017-06-12 14:26:21 -0400438 ValueMapType ValueMap;
439 ValueMapType AllocatedValueMap;
440 SPIRVInstructionList SPIRVInsts;
David Neto862b7d82018-06-14 18:48:37 -0400441
David Neto22f144c2017-06-12 14:26:21 -0400442 EntryPointVecType EntryPointVec;
443 DeferredInstVecType DeferredInstVec;
444 ValueList EntryPointInterfacesVec;
445 uint32_t OpExtInstImportID;
446 std::vector<uint32_t> BuiltinDimensionVec;
alan-baker5b86ed72019-02-15 08:26:50 -0500447 bool HasVariablePointersStorageBuffer;
David Neto22f144c2017-06-12 14:26:21 -0400448 bool HasVariablePointers;
449 Type *SamplerTy;
alan-bakerb6b09dc2018-11-08 16:59:28 -0500450 DenseMap<unsigned, uint32_t> SamplerMapIndexToIDMap;
David Netoc77d9e22018-03-24 06:30:28 -0700451
452 // If a function F has a pointer-to-__constant parameter, then this variable
David Neto9ed8e2f2018-03-24 06:47:24 -0700453 // will map F's type to (G, index of the parameter), where in a first phase
454 // G is F's type. During FindTypePerFunc, G will be changed to F's type
455 // but replacing the pointer-to-constant parameter with
456 // pointer-to-ModuleScopePrivate.
David Netoc77d9e22018-03-24 06:30:28 -0700457 // TODO(dneto): This doesn't seem general enough? A function might have
458 // more than one such parameter.
David Neto22f144c2017-06-12 14:26:21 -0400459 GlobalConstFuncMapType GlobalConstFuncTypeMap;
460 SmallPtrSet<Value *, 16> GlobalConstArgumentSet;
David Neto1a1a0582017-07-07 12:01:44 -0400461 // An ordered set of pointer types of Base arguments to OpPtrAccessChain,
David Neto85082642018-03-24 06:55:20 -0700462 // or array types, and which point into transparent memory (StorageBuffer
463 // storage class). These will require an ArrayStride decoration.
David Neto1a1a0582017-07-07 12:01:44 -0400464 // See SPV_KHR_variable_pointers rev 13.
David Neto85082642018-03-24 06:55:20 -0700465 TypeList TypesNeedingArrayStride;
David Netoa60b00b2017-09-15 16:34:09 -0400466
467 // This is truly ugly, but works around what look like driver bugs.
468 // For get_local_size, an earlier part of the flow has created a module-scope
469 // variable in Private address space to hold the value for the workgroup
470 // size. Its intializer is a uint3 value marked as builtin WorkgroupSize.
471 // When this is present, save the IDs of the initializer value and variable
472 // in these two variables. We only ever do a vector load from it, and
473 // when we see one of those, substitute just the value of the intializer.
474 // This mimics what Glslang does, and that's what drivers are used to.
David Neto66cfe642018-03-24 06:13:56 -0700475 // TODO(dneto): Remove this once drivers are fixed.
David Netoa60b00b2017-09-15 16:34:09 -0400476 uint32_t WorkgroupSizeValueID;
477 uint32_t WorkgroupSizeVarID;
David Neto26aaf622017-10-23 18:11:53 -0400478
David Neto862b7d82018-06-14 18:48:37 -0400479 // Bookkeeping for mapping kernel arguments to resource variables.
480 struct ResourceVarInfo {
481 ResourceVarInfo(int index_arg, unsigned set_arg, unsigned binding_arg,
alan-bakere9308012019-03-15 10:25:13 -0400482 Function *fn, clspv::ArgKind arg_kind_arg, int coherent_arg)
David Neto862b7d82018-06-14 18:48:37 -0400483 : index(index_arg), descriptor_set(set_arg), binding(binding_arg),
alan-bakere9308012019-03-15 10:25:13 -0400484 var_fn(fn), arg_kind(arg_kind_arg), coherent(coherent_arg),
David Neto862b7d82018-06-14 18:48:37 -0400485 addr_space(fn->getReturnType()->getPointerAddressSpace()) {}
486 const int index; // Index into ResourceVarInfoList
487 const unsigned descriptor_set;
488 const unsigned binding;
489 Function *const var_fn; // The @clspv.resource.var.* function.
490 const clspv::ArgKind arg_kind;
alan-bakere9308012019-03-15 10:25:13 -0400491 const int coherent;
David Neto862b7d82018-06-14 18:48:37 -0400492 const unsigned addr_space; // The LLVM address space
493 // The SPIR-V ID of the OpVariable. Not populated at construction time.
494 uint32_t var_id = 0;
495 };
496 // A list of resource var info. Each one correponds to a module-scope
497 // resource variable we will have to create. Resource var indices are
498 // indices into this vector.
499 SmallVector<std::unique_ptr<ResourceVarInfo>, 8> ResourceVarInfoList;
500 // This is a vector of pointers of all the resource vars, but ordered by
501 // kernel function, and then by argument.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500502 UniqueVector<ResourceVarInfo *> ModuleOrderedResourceVars;
David Neto862b7d82018-06-14 18:48:37 -0400503 // Map a function to the ordered list of resource variables it uses, one for
504 // each argument. If an argument does not use a resource variable, it
505 // will have a null pointer entry.
506 using FunctionToResourceVarsMapType =
507 DenseMap<Function *, SmallVector<ResourceVarInfo *, 8>>;
508 FunctionToResourceVarsMapType FunctionToResourceVarsMap;
509
510 // What LLVM types map to SPIR-V types needing layout? These are the
511 // arrays and structures supporting storage buffers and uniform buffers.
512 TypeList TypesNeedingLayout;
513 // What LLVM struct types map to a SPIR-V struct type with Block decoration?
514 UniqueVector<StructType *> StructTypesNeedingBlock;
515 // For a call that represents a load from an opaque type (samplers, images),
516 // map it to the variable id it should load from.
517 DenseMap<CallInst *, uint32_t> ResourceVarDeferredLoadCalls;
David Neto85082642018-03-24 06:55:20 -0700518
Alan Baker202c8c72018-08-13 13:47:44 -0400519 // One larger than the maximum used SpecId for pointer-to-local arguments.
520 int max_local_spec_id_;
David Netoc6f3ab22018-04-06 18:02:31 -0400521 // An ordered list of the kernel arguments of type pointer-to-local.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500522 using LocalArgList = SmallVector<Argument *, 8>;
David Netoc6f3ab22018-04-06 18:02:31 -0400523 LocalArgList LocalArgs;
524 // Information about a pointer-to-local argument.
525 struct LocalArgInfo {
526 // The SPIR-V ID of the array variable.
527 uint32_t variable_id;
528 // The element type of the
alan-bakerb6b09dc2018-11-08 16:59:28 -0500529 Type *elem_type;
David Netoc6f3ab22018-04-06 18:02:31 -0400530 // The ID of the array type.
531 uint32_t array_size_id;
532 // The ID of the array type.
533 uint32_t array_type_id;
534 // The ID of the pointer to the array type.
535 uint32_t ptr_array_type_id;
David Netoc6f3ab22018-04-06 18:02:31 -0400536 // The specialization constant ID of the array size.
537 int spec_id;
538 };
Alan Baker202c8c72018-08-13 13:47:44 -0400539 // A mapping from Argument to its assigned SpecId.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500540 DenseMap<const Argument *, int> LocalArgSpecIds;
Alan Baker202c8c72018-08-13 13:47:44 -0400541 // A mapping from SpecId to its LocalArgInfo.
542 DenseMap<int, LocalArgInfo> LocalSpecIdInfoMap;
Alan Bakerfcda9482018-10-02 17:09:59 -0400543 // A mapping from a remapped type to its real offsets.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500544 DenseMap<Type *, std::vector<uint32_t>> RemappedUBOTypeOffsets;
Alan Bakerfcda9482018-10-02 17:09:59 -0400545 // A mapping from a remapped type to its real sizes.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500546 DenseMap<Type *, std::tuple<uint64_t, uint64_t, uint64_t>>
547 RemappedUBOTypeSizes;
David Neto22f144c2017-06-12 14:26:21 -0400548};
549
550char SPIRVProducerPass::ID;
David Netoc6f3ab22018-04-06 18:02:31 -0400551
alan-bakerb6b09dc2018-11-08 16:59:28 -0500552} // namespace
David Neto22f144c2017-06-12 14:26:21 -0400553
554namespace clspv {
alan-bakerf5e5f692018-11-27 08:33:24 -0500555ModulePass *createSPIRVProducerPass(
556 raw_pwrite_stream &out,
557 std::vector<version0::DescriptorMapEntry> *descriptor_map_entries,
alan-baker00e7a582019-06-07 12:54:21 -0400558 ArrayRef<std::pair<unsigned, std::string>> samplerMap,
alan-bakerf5e5f692018-11-27 08:33:24 -0500559 bool outputCInitList) {
560 return new SPIRVProducerPass(out, descriptor_map_entries, samplerMap,
alan-baker00e7a582019-06-07 12:54:21 -0400561 outputCInitList);
David Neto22f144c2017-06-12 14:26:21 -0400562}
David Netoc2c368d2017-06-30 16:50:17 -0400563} // namespace clspv
David Neto22f144c2017-06-12 14:26:21 -0400564
565bool SPIRVProducerPass::runOnModule(Module &module) {
David Neto0676e6f2017-07-11 18:47:44 -0400566 binaryOut = outputCInitList ? &binaryTempOut : &out;
567
Alan Bakerfcda9482018-10-02 17:09:59 -0400568 PopulateUBOTypeMaps(module);
569
David Neto22f144c2017-06-12 14:26:21 -0400570 // SPIR-V always begins with its header information
571 outputHeader();
572
David Netoc6f3ab22018-04-06 18:02:31 -0400573 const DataLayout &DL = module.getDataLayout();
574
David Neto22f144c2017-06-12 14:26:21 -0400575 // Gather information from the LLVM IR that we require.
David Netoc6f3ab22018-04-06 18:02:31 -0400576 GenerateLLVMIRInfo(module, DL);
David Neto22f144c2017-06-12 14:26:21 -0400577
David Neto22f144c2017-06-12 14:26:21 -0400578 // Collect information on global variables too.
579 for (GlobalVariable &GV : module.globals()) {
580 // If the GV is one of our special __spirv_* variables, remove the
581 // initializer as it was only placed there to force LLVM to not throw the
582 // value away.
583 if (GV.getName().startswith("__spirv_")) {
584 GV.setInitializer(nullptr);
585 }
586
587 // Collect types' information from global variable.
588 FindTypePerGlobalVar(GV);
589
590 // Collect constant information from global variable.
591 FindConstantPerGlobalVar(GV);
592
593 // If the variable is an input, entry points need to know about it.
594 if (AddressSpace::Input == GV.getType()->getPointerAddressSpace()) {
David Netofb9a7972017-08-25 17:08:24 -0400595 getEntryPointInterfacesVec().insert(&GV);
David Neto22f144c2017-06-12 14:26:21 -0400596 }
597 }
598
599 // If there are extended instructions, generate OpExtInstImport.
600 if (FindExtInst(module)) {
601 GenerateExtInstImport();
602 }
603
604 // Generate SPIRV instructions for types.
Alan Bakerfcda9482018-10-02 17:09:59 -0400605 GenerateSPIRVTypes(module.getContext(), module);
David Neto22f144c2017-06-12 14:26:21 -0400606
607 // Generate SPIRV constants.
608 GenerateSPIRVConstants();
609
610 // If we have a sampler map, we might have literal samplers to generate.
611 if (0 < getSamplerMap().size()) {
612 GenerateSamplers(module);
613 }
614
615 // Generate SPIRV variables.
616 for (GlobalVariable &GV : module.globals()) {
617 GenerateGlobalVar(GV);
618 }
David Neto862b7d82018-06-14 18:48:37 -0400619 GenerateResourceVars(module);
David Netoc6f3ab22018-04-06 18:02:31 -0400620 GenerateWorkgroupVars();
David Neto22f144c2017-06-12 14:26:21 -0400621
622 // Generate SPIRV instructions for each function.
623 for (Function &F : module) {
624 if (F.isDeclaration()) {
625 continue;
626 }
627
David Neto862b7d82018-06-14 18:48:37 -0400628 GenerateDescriptorMapInfo(DL, F);
629
David Neto22f144c2017-06-12 14:26:21 -0400630 // Generate Function Prologue.
631 GenerateFuncPrologue(F);
632
633 // Generate SPIRV instructions for function body.
634 GenerateFuncBody(F);
635
636 // Generate Function Epilogue.
637 GenerateFuncEpilogue();
638 }
639
640 HandleDeferredInstruction();
David Neto1a1a0582017-07-07 12:01:44 -0400641 HandleDeferredDecorations(DL);
David Neto22f144c2017-06-12 14:26:21 -0400642
643 // Generate SPIRV module information.
David Neto5c22a252018-03-15 16:07:41 -0400644 GenerateModuleInfo(module);
David Neto22f144c2017-06-12 14:26:21 -0400645
alan-baker00e7a582019-06-07 12:54:21 -0400646 WriteSPIRVBinary();
David Neto22f144c2017-06-12 14:26:21 -0400647
648 // We need to patch the SPIR-V header to set bound correctly.
649 patchHeader();
David Neto0676e6f2017-07-11 18:47:44 -0400650
651 if (outputCInitList) {
652 bool first = true;
David Neto0676e6f2017-07-11 18:47:44 -0400653 std::ostringstream os;
654
David Neto57fb0b92017-08-04 15:35:09 -0400655 auto emit_word = [&os, &first](uint32_t word) {
David Neto0676e6f2017-07-11 18:47:44 -0400656 if (!first)
David Neto57fb0b92017-08-04 15:35:09 -0400657 os << ",\n";
658 os << word;
David Neto0676e6f2017-07-11 18:47:44 -0400659 first = false;
660 };
661
662 os << "{";
David Neto57fb0b92017-08-04 15:35:09 -0400663 const std::string str(binaryTempOut.str());
664 for (unsigned i = 0; i < str.size(); i += 4) {
665 const uint32_t a = static_cast<unsigned char>(str[i]);
666 const uint32_t b = static_cast<unsigned char>(str[i + 1]);
667 const uint32_t c = static_cast<unsigned char>(str[i + 2]);
668 const uint32_t d = static_cast<unsigned char>(str[i + 3]);
669 emit_word(a | (b << 8) | (c << 16) | (d << 24));
David Neto0676e6f2017-07-11 18:47:44 -0400670 }
671 os << "}\n";
672 out << os.str();
673 }
674
David Neto22f144c2017-06-12 14:26:21 -0400675 return false;
676}
677
678void SPIRVProducerPass::outputHeader() {
alan-baker00e7a582019-06-07 12:54:21 -0400679 binaryOut->write(reinterpret_cast<const char *>(&spv::MagicNumber),
680 sizeof(spv::MagicNumber));
681 binaryOut->write(reinterpret_cast<const char *>(&spv::Version),
682 sizeof(spv::Version));
David Neto22f144c2017-06-12 14:26:21 -0400683
alan-baker0c18ab02019-06-12 10:23:21 -0400684 // use Google's vendor ID
685 const uint32_t vendor = 21 << 16;
alan-baker00e7a582019-06-07 12:54:21 -0400686 binaryOut->write(reinterpret_cast<const char *>(&vendor), sizeof(vendor));
David Neto22f144c2017-06-12 14:26:21 -0400687
alan-baker00e7a582019-06-07 12:54:21 -0400688 // we record where we need to come back to and patch in the bound value
689 patchBoundOffset = binaryOut->tell();
David Neto22f144c2017-06-12 14:26:21 -0400690
alan-baker00e7a582019-06-07 12:54:21 -0400691 // output a bad bound for now
692 binaryOut->write(reinterpret_cast<const char *>(&nextID), sizeof(nextID));
David Neto22f144c2017-06-12 14:26:21 -0400693
alan-baker00e7a582019-06-07 12:54:21 -0400694 // output the schema (reserved for use and must be 0)
695 const uint32_t schema = 0;
696 binaryOut->write(reinterpret_cast<const char *>(&schema), sizeof(schema));
David Neto22f144c2017-06-12 14:26:21 -0400697}
698
699void SPIRVProducerPass::patchHeader() {
alan-baker00e7a582019-06-07 12:54:21 -0400700 // for a binary we just write the value of nextID over bound
701 binaryOut->pwrite(reinterpret_cast<char *>(&nextID), sizeof(nextID),
702 patchBoundOffset);
David Neto22f144c2017-06-12 14:26:21 -0400703}
704
David Netoc6f3ab22018-04-06 18:02:31 -0400705void SPIRVProducerPass::GenerateLLVMIRInfo(Module &M, const DataLayout &DL) {
David Neto22f144c2017-06-12 14:26:21 -0400706 // This function generates LLVM IR for function such as global variable for
707 // argument, constant and pointer type for argument access. These information
708 // is artificial one because we need Vulkan SPIR-V output. This function is
709 // executed ahead of FindType and FindConstant.
David Neto22f144c2017-06-12 14:26:21 -0400710 LLVMContext &Context = M.getContext();
711
David Neto862b7d82018-06-14 18:48:37 -0400712 FindGlobalConstVars(M, DL);
David Neto5c22a252018-03-15 16:07:41 -0400713
David Neto862b7d82018-06-14 18:48:37 -0400714 FindResourceVars(M, DL);
David Neto22f144c2017-06-12 14:26:21 -0400715
716 bool HasWorkGroupBuiltin = false;
717 for (GlobalVariable &GV : M.globals()) {
718 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
719 if (spv::BuiltInWorkgroupSize == BuiltinType) {
720 HasWorkGroupBuiltin = true;
721 }
722 }
723
David Neto862b7d82018-06-14 18:48:37 -0400724 FindTypesForSamplerMap(M);
725 FindTypesForResourceVars(M);
Alan Baker202c8c72018-08-13 13:47:44 -0400726 FindWorkgroupVars(M);
David Neto22f144c2017-06-12 14:26:21 -0400727
728 for (Function &F : M) {
Kévin Petitabef4522019-03-27 13:08:01 +0000729 if (F.isDeclaration()) {
David Neto22f144c2017-06-12 14:26:21 -0400730 continue;
731 }
732
733 for (BasicBlock &BB : F) {
734 for (Instruction &I : BB) {
735 if (I.getOpcode() == Instruction::ZExt ||
736 I.getOpcode() == Instruction::SExt ||
737 I.getOpcode() == Instruction::UIToFP) {
738 // If there is zext with i1 type, it will be changed to OpSelect. The
739 // OpSelect needs constant 0 and 1 so the constants are added here.
740
741 auto OpTy = I.getOperand(0)->getType();
742
Kévin Petit24272b62018-10-18 19:16:12 +0000743 if (OpTy->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -0400744 if (I.getOpcode() == Instruction::ZExt) {
David Neto22f144c2017-06-12 14:26:21 -0400745 FindConstant(Constant::getNullValue(I.getType()));
Kévin Petit7bfb8992019-02-26 13:45:08 +0000746 FindConstant(ConstantInt::get(I.getType(), 1));
David Neto22f144c2017-06-12 14:26:21 -0400747 } else if (I.getOpcode() == Instruction::SExt) {
David Neto22f144c2017-06-12 14:26:21 -0400748 FindConstant(Constant::getNullValue(I.getType()));
Kévin Petit7bfb8992019-02-26 13:45:08 +0000749 FindConstant(ConstantInt::getSigned(I.getType(), -1));
David Neto22f144c2017-06-12 14:26:21 -0400750 } else {
751 FindConstant(ConstantFP::get(Context, APFloat(0.0f)));
752 FindConstant(ConstantFP::get(Context, APFloat(1.0f)));
753 }
754 }
755 } else if (CallInst *Call = dyn_cast<CallInst>(&I)) {
David Neto862b7d82018-06-14 18:48:37 -0400756 StringRef callee_name = Call->getCalledFunction()->getName();
David Neto22f144c2017-06-12 14:26:21 -0400757
758 // Handle image type specially.
alan-bakerf67468c2019-11-25 15:51:49 -0500759 if (clspv::IsSampledImageRead(callee_name)) {
David Neto22f144c2017-06-12 14:26:21 -0400760 TypeMapType &OpImageTypeMap = getImageTypeMap();
761 Type *ImageTy =
762 Call->getArgOperand(0)->getType()->getPointerElementType();
763 OpImageTypeMap[ImageTy] = 0;
alan-bakerabd82722019-12-03 17:14:51 -0500764 getImageTypeList().insert(ImageTy);
David Neto22f144c2017-06-12 14:26:21 -0400765
alan-bakerf67468c2019-11-25 15:51:49 -0500766 // All sampled reads need a floating point 0 for the Lod operand.
David Neto22f144c2017-06-12 14:26:21 -0400767 FindConstant(ConstantFP::get(Context, APFloat(0.0f)));
768 }
David Neto5c22a252018-03-15 16:07:41 -0400769
alan-bakerf67468c2019-11-25 15:51:49 -0500770 if (clspv::IsGetImageHeight(callee_name) ||
771 clspv::IsGetImageWidth(callee_name)) {
David Neto5c22a252018-03-15 16:07:41 -0400772 FindType(VectorType::get(Type::getInt32Ty(Context), 2));
773 }
David Neto22f144c2017-06-12 14:26:21 -0400774 }
775 }
776 }
777
Kévin Petitabef4522019-03-27 13:08:01 +0000778 // More things to do on kernel functions
779 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
780 if (const MDNode *MD =
781 dyn_cast<Function>(&F)->getMetadata("reqd_work_group_size")) {
782 // We generate constants if the WorkgroupSize builtin is being used.
783 if (HasWorkGroupBuiltin) {
784 // Collect constant information for work group size.
785 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(0)));
786 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(1)));
787 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(2)));
David Neto22f144c2017-06-12 14:26:21 -0400788 }
789 }
790 }
791
alan-bakerf67468c2019-11-25 15:51:49 -0500792 // TODO(alan-baker): make this better.
793 if (M.getTypeByName("opencl.image2d_ro_t.float") ||
794 M.getTypeByName("opencl.image2d_ro_t.float.sampled") ||
795 M.getTypeByName("opencl.image2d_wo_t.float") ||
796 M.getTypeByName("opencl.image3d_ro_t.float") ||
797 M.getTypeByName("opencl.image3d_ro_t.float.sampled") ||
798 M.getTypeByName("opencl.image3d_wo_t.float")) {
799 FindType(Type::getFloatTy(Context));
800 } else if (M.getTypeByName("opencl.image2d_ro_t.uint") ||
801 M.getTypeByName("opencl.image2d_ro_t.uint.sampled") ||
802 M.getTypeByName("opencl.image2d_wo_t.uint") ||
803 M.getTypeByName("opencl.image3d_ro_t.uint") ||
804 M.getTypeByName("opencl.image3d_ro_t.uint.sampled") ||
805 M.getTypeByName("opencl.image3d_wo_t.uint")) {
806 FindType(Type::getInt32Ty(Context));
807 } else if (M.getTypeByName("opencl.image2d_ro_t.int") ||
808 M.getTypeByName("opencl.image2d_ro_t.int.sampled") ||
809 M.getTypeByName("opencl.image2d_wo_t.int") ||
810 M.getTypeByName("opencl.image3d_ro_t.int") ||
811 M.getTypeByName("opencl.image3d_ro_t.int.sampled") ||
812 M.getTypeByName("opencl.image3d_wo_t.int")) {
813 // Nothing for now...
814 } else {
815 // This was likely an UndefValue.
David Neto22f144c2017-06-12 14:26:21 -0400816 FindType(Type::getFloatTy(Context));
817 }
818
819 // Collect types' information from function.
820 FindTypePerFunc(F);
821
822 // Collect constant information from function.
823 FindConstantPerFunc(F);
824 }
825}
826
David Neto862b7d82018-06-14 18:48:37 -0400827void SPIRVProducerPass::FindGlobalConstVars(Module &M, const DataLayout &DL) {
alan-baker56f7aff2019-05-22 08:06:42 -0400828 clspv::NormalizeGlobalVariables(M);
829
David Neto862b7d82018-06-14 18:48:37 -0400830 SmallVector<GlobalVariable *, 8> GVList;
831 SmallVector<GlobalVariable *, 8> DeadGVList;
832 for (GlobalVariable &GV : M.globals()) {
833 if (GV.getType()->getAddressSpace() == AddressSpace::Constant) {
834 if (GV.use_empty()) {
835 DeadGVList.push_back(&GV);
836 } else {
837 GVList.push_back(&GV);
838 }
839 }
840 }
841
842 // Remove dead global __constant variables.
843 for (auto GV : DeadGVList) {
844 GV->eraseFromParent();
845 }
846 DeadGVList.clear();
847
848 if (clspv::Option::ModuleConstantsInStorageBuffer()) {
849 // For now, we only support a single storage buffer.
850 if (GVList.size() > 0) {
851 assert(GVList.size() == 1);
852 const auto *GV = GVList[0];
853 const auto constants_byte_size =
Alan Bakerfcda9482018-10-02 17:09:59 -0400854 (GetTypeSizeInBits(GV->getInitializer()->getType(), DL)) / 8;
David Neto862b7d82018-06-14 18:48:37 -0400855 const size_t kConstantMaxSize = 65536;
856 if (constants_byte_size > kConstantMaxSize) {
857 outs() << "Max __constant capacity of " << kConstantMaxSize
858 << " bytes exceeded: " << constants_byte_size << " bytes used\n";
859 llvm_unreachable("Max __constant capacity exceeded");
860 }
861 }
862 } else {
863 // Change global constant variable's address space to ModuleScopePrivate.
864 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
865 for (auto GV : GVList) {
866 // Create new gv with ModuleScopePrivate address space.
867 Type *NewGVTy = GV->getType()->getPointerElementType();
868 GlobalVariable *NewGV = new GlobalVariable(
869 M, NewGVTy, false, GV->getLinkage(), GV->getInitializer(), "",
870 nullptr, GV->getThreadLocalMode(), AddressSpace::ModuleScopePrivate);
871 NewGV->takeName(GV);
872
873 const SmallVector<User *, 8> GVUsers(GV->user_begin(), GV->user_end());
874 SmallVector<User *, 8> CandidateUsers;
875
876 auto record_called_function_type_as_user =
877 [&GlobalConstFuncTyMap](Value *gv, CallInst *call) {
878 // Find argument index.
879 unsigned index = 0;
880 for (unsigned i = 0; i < call->getNumArgOperands(); i++) {
881 if (gv == call->getOperand(i)) {
882 // TODO(dneto): Should we break here?
883 index = i;
884 }
885 }
886
887 // Record function type with global constant.
888 GlobalConstFuncTyMap[call->getFunctionType()] =
889 std::make_pair(call->getFunctionType(), index);
890 };
891
892 for (User *GVU : GVUsers) {
893 if (CallInst *Call = dyn_cast<CallInst>(GVU)) {
894 record_called_function_type_as_user(GV, Call);
895 } else if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(GVU)) {
896 // Check GEP users.
897 for (User *GEPU : GEP->users()) {
898 if (CallInst *GEPCall = dyn_cast<CallInst>(GEPU)) {
899 record_called_function_type_as_user(GEP, GEPCall);
900 }
901 }
902 }
903
904 CandidateUsers.push_back(GVU);
905 }
906
907 for (User *U : CandidateUsers) {
908 // Update users of gv with new gv.
alan-bakered80f572019-02-11 17:28:26 -0500909 if (!isa<Constant>(U)) {
910 // #254: Can't change operands of a constant, but this shouldn't be
911 // something that sticks around in the module.
912 U->replaceUsesOfWith(GV, NewGV);
913 }
David Neto862b7d82018-06-14 18:48:37 -0400914 }
915
916 // Delete original gv.
917 GV->eraseFromParent();
918 }
919 }
920}
921
Radek Szymanskibe4b0c42018-10-04 22:20:53 +0100922void SPIRVProducerPass::FindResourceVars(Module &M, const DataLayout &) {
David Neto862b7d82018-06-14 18:48:37 -0400923 ResourceVarInfoList.clear();
924 FunctionToResourceVarsMap.clear();
925 ModuleOrderedResourceVars.reset();
926 // Normally, there is one resource variable per clspv.resource.var.*
927 // function, since that is unique'd by arg type and index. By design,
928 // we can share these resource variables across kernels because all
929 // kernels use the same descriptor set.
930 //
931 // But if the user requested distinct descriptor sets per kernel, then
932 // the descriptor allocator has made different (set,binding) pairs for
933 // the same (type,arg_index) pair. Since we can decorate a resource
934 // variable with only exactly one DescriptorSet and Binding, we are
935 // forced in this case to make distinct resource variables whenever
936 // the same clspv.reource.var.X function is seen with disintct
937 // (set,binding) values.
938 const bool always_distinct_sets =
939 clspv::Option::DistinctKernelDescriptorSets();
940 for (Function &F : M) {
941 // Rely on the fact the resource var functions have a stable ordering
942 // in the module.
Alan Baker202c8c72018-08-13 13:47:44 -0400943 if (F.getName().startswith(clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -0400944 // Find all calls to this function with distinct set and binding pairs.
945 // Save them in ResourceVarInfoList.
946
947 // Determine uniqueness of the (set,binding) pairs only withing this
948 // one resource-var builtin function.
949 using SetAndBinding = std::pair<unsigned, unsigned>;
950 // Maps set and binding to the resource var info.
951 DenseMap<SetAndBinding, ResourceVarInfo *> set_and_binding_map;
952 bool first_use = true;
953 for (auto &U : F.uses()) {
954 if (auto *call = dyn_cast<CallInst>(U.getUser())) {
955 const auto set = unsigned(
956 dyn_cast<ConstantInt>(call->getArgOperand(0))->getZExtValue());
957 const auto binding = unsigned(
958 dyn_cast<ConstantInt>(call->getArgOperand(1))->getZExtValue());
959 const auto arg_kind = clspv::ArgKind(
960 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
961 const auto arg_index = unsigned(
962 dyn_cast<ConstantInt>(call->getArgOperand(3))->getZExtValue());
alan-bakere9308012019-03-15 10:25:13 -0400963 const auto coherent = unsigned(
964 dyn_cast<ConstantInt>(call->getArgOperand(5))->getZExtValue());
David Neto862b7d82018-06-14 18:48:37 -0400965
966 // Find or make the resource var info for this combination.
967 ResourceVarInfo *rv = nullptr;
968 if (always_distinct_sets) {
969 // Make a new resource var any time we see a different
970 // (set,binding) pair.
971 SetAndBinding key{set, binding};
972 auto where = set_and_binding_map.find(key);
973 if (where == set_and_binding_map.end()) {
974 rv = new ResourceVarInfo(int(ResourceVarInfoList.size()), set,
alan-bakere9308012019-03-15 10:25:13 -0400975 binding, &F, arg_kind, coherent);
David Neto862b7d82018-06-14 18:48:37 -0400976 ResourceVarInfoList.emplace_back(rv);
977 set_and_binding_map[key] = rv;
978 } else {
979 rv = where->second;
980 }
981 } else {
982 // The default is to make exactly one resource for each
983 // clspv.resource.var.* function.
984 if (first_use) {
985 first_use = false;
986 rv = new ResourceVarInfo(int(ResourceVarInfoList.size()), set,
alan-bakere9308012019-03-15 10:25:13 -0400987 binding, &F, arg_kind, coherent);
David Neto862b7d82018-06-14 18:48:37 -0400988 ResourceVarInfoList.emplace_back(rv);
989 } else {
990 rv = ResourceVarInfoList.back().get();
991 }
992 }
993
994 // Now populate FunctionToResourceVarsMap.
995 auto &mapping =
996 FunctionToResourceVarsMap[call->getParent()->getParent()];
997 while (mapping.size() <= arg_index) {
998 mapping.push_back(nullptr);
999 }
1000 mapping[arg_index] = rv;
1001 }
1002 }
1003 }
1004 }
1005
1006 // Populate ModuleOrderedResourceVars.
1007 for (Function &F : M) {
1008 auto where = FunctionToResourceVarsMap.find(&F);
1009 if (where != FunctionToResourceVarsMap.end()) {
1010 for (auto &rv : where->second) {
1011 if (rv != nullptr) {
1012 ModuleOrderedResourceVars.insert(rv);
1013 }
1014 }
1015 }
1016 }
1017 if (ShowResourceVars) {
1018 for (auto *info : ModuleOrderedResourceVars) {
1019 outs() << "MORV index " << info->index << " (" << info->descriptor_set
1020 << "," << info->binding << ") " << *(info->var_fn->getReturnType())
1021 << "\n";
1022 }
1023 }
1024}
1025
David Neto22f144c2017-06-12 14:26:21 -04001026bool SPIRVProducerPass::FindExtInst(Module &M) {
1027 LLVMContext &Context = M.getContext();
1028 bool HasExtInst = false;
1029
1030 for (Function &F : M) {
1031 for (BasicBlock &BB : F) {
1032 for (Instruction &I : BB) {
1033 if (CallInst *Call = dyn_cast<CallInst>(&I)) {
1034 Function *Callee = Call->getCalledFunction();
1035 // Check whether this call is for extend instructions.
David Neto3fbb4072017-10-16 11:28:14 -04001036 auto callee_name = Callee->getName();
1037 const glsl::ExtInst EInst = getExtInstEnum(callee_name);
1038 const glsl::ExtInst IndirectEInst =
1039 getIndirectExtInstEnum(callee_name);
David Neto22f144c2017-06-12 14:26:21 -04001040
David Neto3fbb4072017-10-16 11:28:14 -04001041 HasExtInst |=
1042 (EInst != kGlslExtInstBad) || (IndirectEInst != kGlslExtInstBad);
1043
1044 if (IndirectEInst) {
1045 // Register extra constants if needed.
1046
1047 // Registers a type and constant for computing the result of the
1048 // given instruction. If the result of the instruction is a vector,
1049 // then make a splat vector constant with the same number of
1050 // elements.
1051 auto register_constant = [this, &I](Constant *constant) {
1052 FindType(constant->getType());
1053 FindConstant(constant);
1054 if (auto *vectorTy = dyn_cast<VectorType>(I.getType())) {
1055 // Register the splat vector of the value with the same
1056 // width as the result of the instruction.
1057 auto *vec_constant = ConstantVector::getSplat(
1058 static_cast<unsigned>(vectorTy->getNumElements()),
1059 constant);
1060 FindConstant(vec_constant);
1061 FindType(vec_constant->getType());
1062 }
1063 };
1064 switch (IndirectEInst) {
1065 case glsl::ExtInstFindUMsb:
1066 // clz needs OpExtInst and OpISub with constant 31, or splat
1067 // vector of 31. Add it to the constant list here.
1068 register_constant(
1069 ConstantInt::get(Type::getInt32Ty(Context), 31));
1070 break;
1071 case glsl::ExtInstAcos:
1072 case glsl::ExtInstAsin:
Kévin Petiteb9f90a2018-09-29 12:29:34 +01001073 case glsl::ExtInstAtan:
David Neto3fbb4072017-10-16 11:28:14 -04001074 case glsl::ExtInstAtan2:
1075 // We need 1/pi for acospi, asinpi, atan2pi.
1076 register_constant(
1077 ConstantFP::get(Type::getFloatTy(Context), kOneOverPi));
1078 break;
1079 default:
1080 assert(false && "internally inconsistent");
1081 }
David Neto22f144c2017-06-12 14:26:21 -04001082 }
1083 }
1084 }
1085 }
1086 }
1087
1088 return HasExtInst;
1089}
1090
1091void SPIRVProducerPass::FindTypePerGlobalVar(GlobalVariable &GV) {
1092 // Investigate global variable's type.
1093 FindType(GV.getType());
1094}
1095
1096void SPIRVProducerPass::FindTypePerFunc(Function &F) {
1097 // Investigate function's type.
1098 FunctionType *FTy = F.getFunctionType();
1099
1100 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
1101 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
David Neto9ed8e2f2018-03-24 06:47:24 -07001102 // Handle a regular function with global constant parameters.
David Neto22f144c2017-06-12 14:26:21 -04001103 if (GlobalConstFuncTyMap.count(FTy)) {
1104 uint32_t GVCstArgIdx = GlobalConstFuncTypeMap[FTy].second;
1105 SmallVector<Type *, 4> NewFuncParamTys;
1106 for (unsigned i = 0; i < FTy->getNumParams(); i++) {
1107 Type *ParamTy = FTy->getParamType(i);
1108 if (i == GVCstArgIdx) {
1109 Type *EleTy = ParamTy->getPointerElementType();
1110 ParamTy = PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
1111 }
1112
1113 NewFuncParamTys.push_back(ParamTy);
1114 }
1115
1116 FunctionType *NewFTy =
1117 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
1118 GlobalConstFuncTyMap[FTy] = std::make_pair(NewFTy, GVCstArgIdx);
1119 FTy = NewFTy;
1120 }
1121
1122 FindType(FTy);
1123 } else {
1124 // As kernel functions do not have parameters, create new function type and
1125 // add it to type map.
1126 SmallVector<Type *, 4> NewFuncParamTys;
1127 FunctionType *NewFTy =
1128 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
1129 FindType(NewFTy);
1130 }
1131
1132 // Investigate instructions' type in function body.
1133 for (BasicBlock &BB : F) {
1134 for (Instruction &I : BB) {
1135 if (isa<ShuffleVectorInst>(I)) {
1136 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1137 // Ignore type for mask of shuffle vector instruction.
1138 if (i == 2) {
1139 continue;
1140 }
1141
1142 Value *Op = I.getOperand(i);
1143 if (!isa<MetadataAsValue>(Op)) {
1144 FindType(Op->getType());
1145 }
1146 }
1147
1148 FindType(I.getType());
1149 continue;
1150 }
1151
David Neto862b7d82018-06-14 18:48:37 -04001152 CallInst *Call = dyn_cast<CallInst>(&I);
1153
1154 if (Call && Call->getCalledFunction()->getName().startswith(
Alan Baker202c8c72018-08-13 13:47:44 -04001155 clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04001156 // This is a fake call representing access to a resource variable.
1157 // We handle that elsewhere.
1158 continue;
1159 }
1160
Alan Baker202c8c72018-08-13 13:47:44 -04001161 if (Call && Call->getCalledFunction()->getName().startswith(
1162 clspv::WorkgroupAccessorFunction())) {
1163 // This is a fake call representing access to a workgroup variable.
1164 // We handle that elsewhere.
1165 continue;
1166 }
1167
David Neto22f144c2017-06-12 14:26:21 -04001168 // Work through the operands of the instruction.
1169 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1170 Value *const Op = I.getOperand(i);
1171 // If any of the operands is a constant, find the type!
1172 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1173 FindType(Op->getType());
1174 }
1175 }
1176
1177 for (Use &Op : I.operands()) {
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001178 if (isa<CallInst>(&I)) {
David Neto22f144c2017-06-12 14:26:21 -04001179 // Avoid to check call instruction's type.
1180 break;
1181 }
Alan Baker202c8c72018-08-13 13:47:44 -04001182 if (CallInst *OpCall = dyn_cast<CallInst>(Op)) {
1183 if (OpCall && OpCall->getCalledFunction()->getName().startswith(
1184 clspv::WorkgroupAccessorFunction())) {
1185 // This is a fake call representing access to a workgroup variable.
1186 // We handle that elsewhere.
1187 continue;
1188 }
1189 }
David Neto22f144c2017-06-12 14:26:21 -04001190 if (!isa<MetadataAsValue>(&Op)) {
1191 FindType(Op->getType());
1192 continue;
1193 }
1194 }
1195
David Neto22f144c2017-06-12 14:26:21 -04001196 // We don't want to track the type of this call as we are going to replace
1197 // it.
Kévin Petitdf71de32019-04-09 14:09:50 +01001198 if (Call && (clspv::LiteralSamplerFunction() ==
David Neto22f144c2017-06-12 14:26:21 -04001199 Call->getCalledFunction()->getName())) {
1200 continue;
1201 }
1202
1203 if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(&I)) {
1204 // If gep's base operand has ModuleScopePrivate address space, make gep
1205 // return ModuleScopePrivate address space.
1206 if (GEP->getPointerAddressSpace() == AddressSpace::ModuleScopePrivate) {
1207 // Add pointer type with private address space for global constant to
1208 // type list.
1209 Type *EleTy = I.getType()->getPointerElementType();
1210 Type *NewPTy =
1211 PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
1212
1213 FindType(NewPTy);
1214 continue;
1215 }
1216 }
1217
1218 FindType(I.getType());
1219 }
1220 }
1221}
1222
David Neto862b7d82018-06-14 18:48:37 -04001223void SPIRVProducerPass::FindTypesForSamplerMap(Module &M) {
1224 // If we are using a sampler map, find the type of the sampler.
Kévin Petitdf71de32019-04-09 14:09:50 +01001225 if (M.getFunction(clspv::LiteralSamplerFunction()) ||
David Neto862b7d82018-06-14 18:48:37 -04001226 0 < getSamplerMap().size()) {
1227 auto SamplerStructTy = M.getTypeByName("opencl.sampler_t");
1228 if (!SamplerStructTy) {
1229 SamplerStructTy = StructType::create(M.getContext(), "opencl.sampler_t");
1230 }
1231
1232 SamplerTy = SamplerStructTy->getPointerTo(AddressSpace::UniformConstant);
1233
1234 FindType(SamplerTy);
1235 }
1236}
1237
1238void SPIRVProducerPass::FindTypesForResourceVars(Module &M) {
1239 // Record types so they are generated.
1240 TypesNeedingLayout.reset();
1241 StructTypesNeedingBlock.reset();
1242
1243 // To match older clspv codegen, generate the float type first if required
1244 // for images.
1245 for (const auto *info : ModuleOrderedResourceVars) {
1246 if (info->arg_kind == clspv::ArgKind::ReadOnlyImage ||
1247 info->arg_kind == clspv::ArgKind::WriteOnlyImage) {
alan-bakerf67468c2019-11-25 15:51:49 -05001248 if (IsIntImageType(info->var_fn->getReturnType())) {
1249 // Nothing for now...
1250 } else if (IsUintImageType(info->var_fn->getReturnType())) {
1251 FindType(Type::getInt32Ty(M.getContext()));
1252 }
1253
1254 // We need "float" either for the sampled type or for the Lod operand.
David Neto862b7d82018-06-14 18:48:37 -04001255 FindType(Type::getFloatTy(M.getContext()));
David Neto862b7d82018-06-14 18:48:37 -04001256 }
1257 }
1258
1259 for (const auto *info : ModuleOrderedResourceVars) {
1260 Type *type = info->var_fn->getReturnType();
1261
1262 switch (info->arg_kind) {
1263 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04001264 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04001265 if (auto *sty = dyn_cast<StructType>(type->getPointerElementType())) {
1266 StructTypesNeedingBlock.insert(sty);
1267 } else {
1268 errs() << *type << "\n";
1269 llvm_unreachable("Buffer arguments must map to structures!");
1270 }
1271 break;
1272 case clspv::ArgKind::Pod:
1273 if (auto *sty = dyn_cast<StructType>(type->getPointerElementType())) {
1274 StructTypesNeedingBlock.insert(sty);
1275 } else {
1276 errs() << *type << "\n";
1277 llvm_unreachable("POD arguments must map to structures!");
1278 }
1279 break;
1280 case clspv::ArgKind::ReadOnlyImage:
1281 case clspv::ArgKind::WriteOnlyImage:
1282 case clspv::ArgKind::Sampler:
1283 // Sampler and image types map to the pointee type but
1284 // in the uniform constant address space.
1285 type = PointerType::get(type->getPointerElementType(),
1286 clspv::AddressSpace::UniformConstant);
1287 break;
1288 default:
1289 break;
1290 }
1291
1292 // The converted type is the type of the OpVariable we will generate.
1293 // If the pointee type is an array of size zero, FindType will convert it
1294 // to a runtime array.
1295 FindType(type);
1296 }
1297
alan-bakerdcd97412019-09-16 15:32:30 -04001298 // If module constants are clustered in a storage buffer then that struct
1299 // needs layout decorations.
1300 if (clspv::Option::ModuleConstantsInStorageBuffer()) {
1301 for (GlobalVariable &GV : M.globals()) {
1302 PointerType *PTy = cast<PointerType>(GV.getType());
1303 const auto AS = PTy->getAddressSpace();
1304 const bool module_scope_constant_external_init =
1305 (AS == AddressSpace::Constant) && GV.hasInitializer();
1306 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
1307 if (module_scope_constant_external_init &&
1308 spv::BuiltInMax == BuiltinType) {
1309 StructTypesNeedingBlock.insert(
1310 cast<StructType>(PTy->getPointerElementType()));
1311 }
1312 }
1313 }
1314
David Neto862b7d82018-06-14 18:48:37 -04001315 // Traverse the arrays and structures underneath each Block, and
1316 // mark them as needing layout.
1317 std::vector<Type *> work_list(StructTypesNeedingBlock.begin(),
1318 StructTypesNeedingBlock.end());
1319 while (!work_list.empty()) {
1320 Type *type = work_list.back();
1321 work_list.pop_back();
1322 TypesNeedingLayout.insert(type);
1323 switch (type->getTypeID()) {
1324 case Type::ArrayTyID:
1325 work_list.push_back(type->getArrayElementType());
1326 if (!Hack_generate_runtime_array_stride_early) {
1327 // Remember this array type for deferred decoration.
1328 TypesNeedingArrayStride.insert(type);
1329 }
1330 break;
1331 case Type::StructTyID:
1332 for (auto *elem_ty : cast<StructType>(type)->elements()) {
1333 work_list.push_back(elem_ty);
1334 }
1335 default:
1336 // This type and its contained types don't get layout.
1337 break;
1338 }
1339 }
1340}
1341
Alan Baker202c8c72018-08-13 13:47:44 -04001342void SPIRVProducerPass::FindWorkgroupVars(Module &M) {
1343 // The SpecId assignment for pointer-to-local arguments is recorded in
1344 // module-level metadata. Translate that information into local argument
1345 // information.
1346 NamedMDNode *nmd = M.getNamedMetadata(clspv::LocalSpecIdMetadataName());
alan-bakerb6b09dc2018-11-08 16:59:28 -05001347 if (!nmd)
1348 return;
Alan Baker202c8c72018-08-13 13:47:44 -04001349 for (auto operand : nmd->operands()) {
1350 MDTuple *tuple = cast<MDTuple>(operand);
1351 ValueAsMetadata *fn_md = cast<ValueAsMetadata>(tuple->getOperand(0));
1352 Function *func = cast<Function>(fn_md->getValue());
alan-bakerb6b09dc2018-11-08 16:59:28 -05001353 ConstantAsMetadata *arg_index_md =
1354 cast<ConstantAsMetadata>(tuple->getOperand(1));
1355 int arg_index = static_cast<int>(
1356 cast<ConstantInt>(arg_index_md->getValue())->getSExtValue());
1357 Argument *arg = &*(func->arg_begin() + arg_index);
Alan Baker202c8c72018-08-13 13:47:44 -04001358
1359 ConstantAsMetadata *spec_id_md =
1360 cast<ConstantAsMetadata>(tuple->getOperand(2));
alan-bakerb6b09dc2018-11-08 16:59:28 -05001361 int spec_id = static_cast<int>(
1362 cast<ConstantInt>(spec_id_md->getValue())->getSExtValue());
Alan Baker202c8c72018-08-13 13:47:44 -04001363
1364 max_local_spec_id_ = std::max(max_local_spec_id_, spec_id + 1);
1365 LocalArgSpecIds[arg] = spec_id;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001366 if (LocalSpecIdInfoMap.count(spec_id))
1367 continue;
Alan Baker202c8c72018-08-13 13:47:44 -04001368
1369 // We haven't seen this SpecId yet, so generate the LocalArgInfo for it.
1370 LocalArgInfo info{nextID, arg->getType()->getPointerElementType(),
1371 nextID + 1, nextID + 2,
1372 nextID + 3, spec_id};
1373 LocalSpecIdInfoMap[spec_id] = info;
1374 nextID += 4;
1375
1376 // Ensure the types necessary for this argument get generated.
1377 Type *IdxTy = Type::getInt32Ty(M.getContext());
1378 FindConstant(ConstantInt::get(IdxTy, 0));
1379 FindType(IdxTy);
1380 FindType(arg->getType());
1381 }
1382}
1383
David Neto22f144c2017-06-12 14:26:21 -04001384void SPIRVProducerPass::FindType(Type *Ty) {
1385 TypeList &TyList = getTypeList();
1386
1387 if (0 != TyList.idFor(Ty)) {
1388 return;
1389 }
1390
1391 if (Ty->isPointerTy()) {
1392 auto AddrSpace = Ty->getPointerAddressSpace();
1393 if ((AddressSpace::Constant == AddrSpace) ||
1394 (AddressSpace::Global == AddrSpace)) {
1395 auto PointeeTy = Ty->getPointerElementType();
1396
1397 if (PointeeTy->isStructTy() &&
1398 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
1399 FindType(PointeeTy);
1400 auto ActualPointerTy =
1401 PointeeTy->getPointerTo(AddressSpace::UniformConstant);
1402 FindType(ActualPointerTy);
1403 return;
1404 }
1405 }
1406 }
1407
David Neto862b7d82018-06-14 18:48:37 -04001408 // By convention, LLVM array type with 0 elements will map to
1409 // OpTypeRuntimeArray. Otherwise, it will map to OpTypeArray, which
1410 // has a constant number of elements. We need to support type of the
1411 // constant.
1412 if (auto *arrayTy = dyn_cast<ArrayType>(Ty)) {
1413 if (arrayTy->getNumElements() > 0) {
1414 LLVMContext &Context = Ty->getContext();
1415 FindType(Type::getInt32Ty(Context));
1416 }
David Neto22f144c2017-06-12 14:26:21 -04001417 }
1418
1419 for (Type *SubTy : Ty->subtypes()) {
1420 FindType(SubTy);
1421 }
1422
1423 TyList.insert(Ty);
1424}
1425
1426void SPIRVProducerPass::FindConstantPerGlobalVar(GlobalVariable &GV) {
1427 // If the global variable has a (non undef) initializer.
1428 if (GV.hasInitializer() && !isa<UndefValue>(GV.getInitializer())) {
David Neto862b7d82018-06-14 18:48:37 -04001429 // Generate the constant if it's not the initializer to a module scope
1430 // constant that we will expect in a storage buffer.
1431 const bool module_scope_constant_external_init =
1432 (GV.getType()->getPointerAddressSpace() == AddressSpace::Constant) &&
1433 clspv::Option::ModuleConstantsInStorageBuffer();
1434 if (!module_scope_constant_external_init) {
1435 FindConstant(GV.getInitializer());
1436 }
David Neto22f144c2017-06-12 14:26:21 -04001437 }
1438}
1439
1440void SPIRVProducerPass::FindConstantPerFunc(Function &F) {
1441 // Investigate constants in function body.
1442 for (BasicBlock &BB : F) {
1443 for (Instruction &I : BB) {
David Neto862b7d82018-06-14 18:48:37 -04001444 if (auto *call = dyn_cast<CallInst>(&I)) {
1445 auto name = call->getCalledFunction()->getName();
Kévin Petitdf71de32019-04-09 14:09:50 +01001446 if (name == clspv::LiteralSamplerFunction()) {
David Neto862b7d82018-06-14 18:48:37 -04001447 // We've handled these constants elsewhere, so skip it.
1448 continue;
1449 }
Alan Baker202c8c72018-08-13 13:47:44 -04001450 if (name.startswith(clspv::ResourceAccessorFunction())) {
1451 continue;
1452 }
1453 if (name.startswith(clspv::WorkgroupAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04001454 continue;
1455 }
Kévin Petit617a76d2019-04-04 13:54:16 +01001456 if (name.startswith(clspv::SPIRVOpIntrinsicFunction())) {
1457 // Skip the first operand that has the SPIR-V Opcode
1458 for (unsigned i = 1; i < I.getNumOperands(); i++) {
1459 if (isa<Constant>(I.getOperand(i)) &&
1460 !isa<GlobalValue>(I.getOperand(i))) {
1461 FindConstant(I.getOperand(i));
1462 }
1463 }
1464 continue;
1465 }
David Neto22f144c2017-06-12 14:26:21 -04001466 }
1467
1468 if (isa<AllocaInst>(I)) {
1469 // Alloca instruction has constant for the number of element. Ignore it.
1470 continue;
1471 } else if (isa<ShuffleVectorInst>(I)) {
1472 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1473 // Ignore constant for mask of shuffle vector instruction.
1474 if (i == 2) {
1475 continue;
1476 }
1477
1478 if (isa<Constant>(I.getOperand(i)) &&
1479 !isa<GlobalValue>(I.getOperand(i))) {
1480 FindConstant(I.getOperand(i));
1481 }
1482 }
1483
1484 continue;
1485 } else if (isa<InsertElementInst>(I)) {
1486 // Handle InsertElement with <4 x i8> specially.
1487 Type *CompositeTy = I.getOperand(0)->getType();
1488 if (is4xi8vec(CompositeTy)) {
1489 LLVMContext &Context = CompositeTy->getContext();
1490 if (isa<Constant>(I.getOperand(0))) {
1491 FindConstant(I.getOperand(0));
1492 }
1493
1494 if (isa<Constant>(I.getOperand(1))) {
1495 FindConstant(I.getOperand(1));
1496 }
1497
1498 // Add mask constant 0xFF.
1499 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
1500 FindConstant(CstFF);
1501
1502 // Add shift amount constant.
1503 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
1504 uint64_t Idx = CI->getZExtValue();
1505 Constant *CstShiftAmount =
1506 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
1507 FindConstant(CstShiftAmount);
1508 }
1509
1510 continue;
1511 }
1512
1513 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1514 // Ignore constant for index of InsertElement instruction.
1515 if (i == 2) {
1516 continue;
1517 }
1518
1519 if (isa<Constant>(I.getOperand(i)) &&
1520 !isa<GlobalValue>(I.getOperand(i))) {
1521 FindConstant(I.getOperand(i));
1522 }
1523 }
1524
1525 continue;
1526 } else if (isa<ExtractElementInst>(I)) {
1527 // Handle ExtractElement with <4 x i8> specially.
1528 Type *CompositeTy = I.getOperand(0)->getType();
1529 if (is4xi8vec(CompositeTy)) {
1530 LLVMContext &Context = CompositeTy->getContext();
1531 if (isa<Constant>(I.getOperand(0))) {
1532 FindConstant(I.getOperand(0));
1533 }
1534
1535 // Add mask constant 0xFF.
1536 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
1537 FindConstant(CstFF);
1538
1539 // Add shift amount constant.
1540 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
1541 uint64_t Idx = CI->getZExtValue();
1542 Constant *CstShiftAmount =
1543 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
1544 FindConstant(CstShiftAmount);
1545 } else {
1546 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
1547 FindConstant(Cst8);
1548 }
1549
1550 continue;
1551 }
1552
1553 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1554 // Ignore constant for index of ExtractElement instruction.
1555 if (i == 1) {
1556 continue;
1557 }
1558
1559 if (isa<Constant>(I.getOperand(i)) &&
1560 !isa<GlobalValue>(I.getOperand(i))) {
1561 FindConstant(I.getOperand(i));
1562 }
1563 }
1564
1565 continue;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001566 } else if ((Instruction::Xor == I.getOpcode()) &&
1567 I.getType()->isIntegerTy(1)) {
1568 // We special case for Xor where the type is i1 and one of the arguments
1569 // is a constant 1 (true), this is an OpLogicalNot in SPIR-V, and we
1570 // don't need the constant
David Neto22f144c2017-06-12 14:26:21 -04001571 bool foundConstantTrue = false;
1572 for (Use &Op : I.operands()) {
1573 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1574 auto CI = cast<ConstantInt>(Op);
1575
1576 if (CI->isZero() || foundConstantTrue) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05001577 // If we already found the true constant, we might (probably only
1578 // on -O0) have an OpLogicalNot which is taking a constant
1579 // argument, so discover it anyway.
David Neto22f144c2017-06-12 14:26:21 -04001580 FindConstant(Op);
1581 } else {
1582 foundConstantTrue = true;
1583 }
1584 }
1585 }
1586
1587 continue;
David Netod2de94a2017-08-28 17:27:47 -04001588 } else if (isa<TruncInst>(I)) {
alan-bakerb39c8262019-03-08 14:03:37 -05001589 // Special case if i8 is not generally handled.
1590 if (!clspv::Option::Int8Support()) {
1591 // For truncation to i8 we mask against 255.
1592 Type *ToTy = I.getType();
1593 if (8u == ToTy->getPrimitiveSizeInBits()) {
1594 LLVMContext &Context = ToTy->getContext();
1595 Constant *Cst255 =
1596 ConstantInt::get(Type::getInt32Ty(Context), 0xff);
1597 FindConstant(Cst255);
1598 }
David Netod2de94a2017-08-28 17:27:47 -04001599 }
Neil Henning39672102017-09-29 14:33:13 +01001600 } else if (isa<AtomicRMWInst>(I)) {
1601 LLVMContext &Context = I.getContext();
1602
1603 FindConstant(
1604 ConstantInt::get(Type::getInt32Ty(Context), spv::ScopeDevice));
1605 FindConstant(ConstantInt::get(
1606 Type::getInt32Ty(Context),
1607 spv::MemorySemanticsUniformMemoryMask |
1608 spv::MemorySemanticsSequentiallyConsistentMask));
David Neto22f144c2017-06-12 14:26:21 -04001609 }
1610
1611 for (Use &Op : I.operands()) {
1612 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1613 FindConstant(Op);
1614 }
1615 }
1616 }
1617 }
1618}
1619
1620void SPIRVProducerPass::FindConstant(Value *V) {
David Neto22f144c2017-06-12 14:26:21 -04001621 ValueList &CstList = getConstantList();
1622
David Netofb9a7972017-08-25 17:08:24 -04001623 // If V is already tracked, ignore it.
1624 if (0 != CstList.idFor(V)) {
David Neto22f144c2017-06-12 14:26:21 -04001625 return;
1626 }
1627
David Neto862b7d82018-06-14 18:48:37 -04001628 if (isa<GlobalValue>(V) && clspv::Option::ModuleConstantsInStorageBuffer()) {
1629 return;
1630 }
1631
David Neto22f144c2017-06-12 14:26:21 -04001632 Constant *Cst = cast<Constant>(V);
David Neto862b7d82018-06-14 18:48:37 -04001633 Type *CstTy = Cst->getType();
David Neto22f144c2017-06-12 14:26:21 -04001634
1635 // Handle constant with <4 x i8> type specially.
David Neto22f144c2017-06-12 14:26:21 -04001636 if (is4xi8vec(CstTy)) {
1637 if (!isa<GlobalValue>(V)) {
David Netofb9a7972017-08-25 17:08:24 -04001638 CstList.insert(V);
David Neto22f144c2017-06-12 14:26:21 -04001639 }
1640 }
1641
1642 if (Cst->getNumOperands()) {
1643 for (User::const_op_iterator I = Cst->op_begin(), E = Cst->op_end(); I != E;
1644 ++I) {
1645 FindConstant(*I);
1646 }
1647
David Netofb9a7972017-08-25 17:08:24 -04001648 CstList.insert(Cst);
David Neto22f144c2017-06-12 14:26:21 -04001649 return;
1650 } else if (const ConstantDataSequential *CDS =
1651 dyn_cast<ConstantDataSequential>(Cst)) {
1652 // Add constants for each element to constant list.
1653 for (unsigned i = 0; i < CDS->getNumElements(); i++) {
1654 Constant *EleCst = CDS->getElementAsConstant(i);
1655 FindConstant(EleCst);
1656 }
1657 }
1658
1659 if (!isa<GlobalValue>(V)) {
David Netofb9a7972017-08-25 17:08:24 -04001660 CstList.insert(V);
David Neto22f144c2017-06-12 14:26:21 -04001661 }
1662}
1663
1664spv::StorageClass SPIRVProducerPass::GetStorageClass(unsigned AddrSpace) const {
1665 switch (AddrSpace) {
1666 default:
1667 llvm_unreachable("Unsupported OpenCL address space");
1668 case AddressSpace::Private:
1669 return spv::StorageClassFunction;
1670 case AddressSpace::Global:
David Neto22f144c2017-06-12 14:26:21 -04001671 return spv::StorageClassStorageBuffer;
Alan Bakerfcda9482018-10-02 17:09:59 -04001672 case AddressSpace::Constant:
1673 return clspv::Option::ConstantArgsInUniformBuffer()
1674 ? spv::StorageClassUniform
1675 : spv::StorageClassStorageBuffer;
David Neto22f144c2017-06-12 14:26:21 -04001676 case AddressSpace::Input:
1677 return spv::StorageClassInput;
1678 case AddressSpace::Local:
1679 return spv::StorageClassWorkgroup;
1680 case AddressSpace::UniformConstant:
1681 return spv::StorageClassUniformConstant;
David Neto9ed8e2f2018-03-24 06:47:24 -07001682 case AddressSpace::Uniform:
David Netoe439d702018-03-23 13:14:08 -07001683 return spv::StorageClassUniform;
David Neto22f144c2017-06-12 14:26:21 -04001684 case AddressSpace::ModuleScopePrivate:
1685 return spv::StorageClassPrivate;
1686 }
1687}
1688
David Neto862b7d82018-06-14 18:48:37 -04001689spv::StorageClass
1690SPIRVProducerPass::GetStorageClassForArgKind(clspv::ArgKind arg_kind) const {
1691 switch (arg_kind) {
1692 case clspv::ArgKind::Buffer:
1693 return spv::StorageClassStorageBuffer;
Alan Bakerfcda9482018-10-02 17:09:59 -04001694 case clspv::ArgKind::BufferUBO:
1695 return spv::StorageClassUniform;
David Neto862b7d82018-06-14 18:48:37 -04001696 case clspv::ArgKind::Pod:
1697 return clspv::Option::PodArgsInUniformBuffer()
1698 ? spv::StorageClassUniform
1699 : spv::StorageClassStorageBuffer;
1700 case clspv::ArgKind::Local:
1701 return spv::StorageClassWorkgroup;
1702 case clspv::ArgKind::ReadOnlyImage:
1703 case clspv::ArgKind::WriteOnlyImage:
1704 case clspv::ArgKind::Sampler:
1705 return spv::StorageClassUniformConstant;
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001706 default:
1707 llvm_unreachable("Unsupported storage class for argument kind");
David Neto862b7d82018-06-14 18:48:37 -04001708 }
1709}
1710
David Neto22f144c2017-06-12 14:26:21 -04001711spv::BuiltIn SPIRVProducerPass::GetBuiltin(StringRef Name) const {
1712 return StringSwitch<spv::BuiltIn>(Name)
1713 .Case("__spirv_GlobalInvocationId", spv::BuiltInGlobalInvocationId)
1714 .Case("__spirv_LocalInvocationId", spv::BuiltInLocalInvocationId)
1715 .Case("__spirv_WorkgroupSize", spv::BuiltInWorkgroupSize)
1716 .Case("__spirv_NumWorkgroups", spv::BuiltInNumWorkgroups)
1717 .Case("__spirv_WorkgroupId", spv::BuiltInWorkgroupId)
1718 .Default(spv::BuiltInMax);
1719}
1720
1721void SPIRVProducerPass::GenerateExtInstImport() {
1722 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
1723 uint32_t &ExtInstImportID = getOpExtInstImportID();
1724
1725 //
1726 // Generate OpExtInstImport.
1727 //
1728 // Ops[0] ... Ops[n] = Name (Literal String)
David Neto22f144c2017-06-12 14:26:21 -04001729 ExtInstImportID = nextID;
David Neto87846742018-04-11 17:36:22 -04001730 SPIRVInstList.push_back(new SPIRVInstruction(spv::OpExtInstImport, nextID++,
1731 MkString("GLSL.std.450")));
David Neto22f144c2017-06-12 14:26:21 -04001732}
1733
alan-bakerb6b09dc2018-11-08 16:59:28 -05001734void SPIRVProducerPass::GenerateSPIRVTypes(LLVMContext &Context,
1735 Module &module) {
David Neto22f144c2017-06-12 14:26:21 -04001736 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
1737 ValueMapType &VMap = getValueMap();
1738 ValueMapType &AllocatedVMap = getAllocatedValueMap();
Alan Bakerfcda9482018-10-02 17:09:59 -04001739 const auto &DL = module.getDataLayout();
David Neto22f144c2017-06-12 14:26:21 -04001740
1741 // Map for OpTypeRuntimeArray. If argument has pointer type, 2 spirv type
1742 // instructions are generated. They are OpTypePointer and OpTypeRuntimeArray.
1743 DenseMap<Type *, uint32_t> OpRuntimeTyMap;
1744
1745 for (Type *Ty : getTypeList()) {
1746 // Update TypeMap with nextID for reference later.
1747 TypeMap[Ty] = nextID;
1748
1749 switch (Ty->getTypeID()) {
1750 default: {
1751 Ty->print(errs());
1752 llvm_unreachable("Unsupported type???");
1753 break;
1754 }
1755 case Type::MetadataTyID:
1756 case Type::LabelTyID: {
1757 // Ignore these types.
1758 break;
1759 }
1760 case Type::PointerTyID: {
1761 PointerType *PTy = cast<PointerType>(Ty);
1762 unsigned AddrSpace = PTy->getAddressSpace();
1763
1764 // For the purposes of our Vulkan SPIR-V type system, constant and global
1765 // are conflated.
1766 bool UseExistingOpTypePointer = false;
1767 if (AddressSpace::Constant == AddrSpace) {
Alan Bakerfcda9482018-10-02 17:09:59 -04001768 if (!clspv::Option::ConstantArgsInUniformBuffer()) {
1769 AddrSpace = AddressSpace::Global;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001770 // Check to see if we already created this type (for instance, if we
1771 // had a constant <type>* and a global <type>*, the type would be
1772 // created by one of these types, and shared by both).
Alan Bakerfcda9482018-10-02 17:09:59 -04001773 auto GlobalTy = PTy->getPointerElementType()->getPointerTo(AddrSpace);
1774 if (0 < TypeMap.count(GlobalTy)) {
1775 TypeMap[PTy] = TypeMap[GlobalTy];
1776 UseExistingOpTypePointer = true;
1777 break;
1778 }
David Neto22f144c2017-06-12 14:26:21 -04001779 }
1780 } else if (AddressSpace::Global == AddrSpace) {
Alan Bakerfcda9482018-10-02 17:09:59 -04001781 if (!clspv::Option::ConstantArgsInUniformBuffer()) {
1782 AddrSpace = AddressSpace::Constant;
David Neto22f144c2017-06-12 14:26:21 -04001783
alan-bakerb6b09dc2018-11-08 16:59:28 -05001784 // Check to see if we already created this type (for instance, if we
1785 // had a constant <type>* and a global <type>*, the type would be
1786 // created by one of these types, and shared by both).
1787 auto ConstantTy =
1788 PTy->getPointerElementType()->getPointerTo(AddrSpace);
Alan Bakerfcda9482018-10-02 17:09:59 -04001789 if (0 < TypeMap.count(ConstantTy)) {
1790 TypeMap[PTy] = TypeMap[ConstantTy];
1791 UseExistingOpTypePointer = true;
1792 }
David Neto22f144c2017-06-12 14:26:21 -04001793 }
1794 }
1795
David Neto862b7d82018-06-14 18:48:37 -04001796 const bool HasArgUser = true;
David Neto22f144c2017-06-12 14:26:21 -04001797
David Neto862b7d82018-06-14 18:48:37 -04001798 if (HasArgUser && !UseExistingOpTypePointer) {
David Neto22f144c2017-06-12 14:26:21 -04001799 //
1800 // Generate OpTypePointer.
1801 //
1802
1803 // OpTypePointer
1804 // Ops[0] = Storage Class
1805 // Ops[1] = Element Type ID
1806 SPIRVOperandList Ops;
1807
David Neto257c3892018-04-11 13:19:45 -04001808 Ops << MkNum(GetStorageClass(AddrSpace))
1809 << MkId(lookupType(PTy->getElementType()));
David Neto22f144c2017-06-12 14:26:21 -04001810
David Neto87846742018-04-11 17:36:22 -04001811 auto *Inst = new SPIRVInstruction(spv::OpTypePointer, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04001812 SPIRVInstList.push_back(Inst);
1813 }
David Neto22f144c2017-06-12 14:26:21 -04001814 break;
1815 }
1816 case Type::StructTyID: {
David Neto22f144c2017-06-12 14:26:21 -04001817 StructType *STy = cast<StructType>(Ty);
1818
1819 // Handle sampler type.
1820 if (STy->isOpaque()) {
1821 if (STy->getName().equals("opencl.sampler_t")) {
1822 //
1823 // Generate OpTypeSampler
1824 //
1825 // Empty Ops.
1826 SPIRVOperandList Ops;
1827
David Neto87846742018-04-11 17:36:22 -04001828 auto *Inst = new SPIRVInstruction(spv::OpTypeSampler, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04001829 SPIRVInstList.push_back(Inst);
1830 break;
alan-bakerf67468c2019-11-25 15:51:49 -05001831 } else if (STy->getName().startswith("opencl.image2d_ro_t") ||
1832 STy->getName().startswith("opencl.image2d_wo_t") ||
1833 STy->getName().startswith("opencl.image3d_ro_t") ||
1834 STy->getName().startswith("opencl.image3d_wo_t")) {
David Neto22f144c2017-06-12 14:26:21 -04001835 //
1836 // Generate OpTypeImage
1837 //
1838 // Ops[0] = Sampled Type ID
1839 // Ops[1] = Dim ID
1840 // Ops[2] = Depth (Literal Number)
1841 // Ops[3] = Arrayed (Literal Number)
1842 // Ops[4] = MS (Literal Number)
1843 // Ops[5] = Sampled (Literal Number)
1844 // Ops[6] = Image Format ID
1845 //
1846 SPIRVOperandList Ops;
1847
alan-bakerf67468c2019-11-25 15:51:49 -05001848 uint32_t ImageTyID = nextID++;
1849 uint32_t SampledTyID = 0;
1850 if (STy->getName().contains(".float")) {
1851 SampledTyID = lookupType(Type::getFloatTy(Context));
1852 } else if (STy->getName().contains(".uint")) {
1853 SampledTyID = lookupType(Type::getInt32Ty(Context));
1854 } else if (STy->getName().contains(".int")) {
1855 // Generate a signed 32-bit integer if necessary.
1856 if (int32ID == 0) {
1857 int32ID = nextID++;
1858 SPIRVOperandList intOps;
1859 intOps << MkNum(32);
1860 intOps << MkNum(1);
1861 auto signed_int =
1862 new SPIRVInstruction(spv::OpTypeInt, int32ID, intOps);
1863 SPIRVInstList.push_back(signed_int);
1864 }
1865 SampledTyID = int32ID;
1866
1867 // Generate a vec4 of the signed int if necessary.
1868 if (v4int32ID == 0) {
1869 v4int32ID = nextID++;
1870 SPIRVOperandList vecOps;
1871 vecOps << MkId(int32ID);
1872 vecOps << MkNum(4);
1873 auto int_vec =
1874 new SPIRVInstruction(spv::OpTypeVector, v4int32ID, vecOps);
1875 SPIRVInstList.push_back(int_vec);
1876 }
1877 } else {
1878 // This was likely an UndefValue.
1879 SampledTyID = lookupType(Type::getFloatTy(Context));
1880 }
David Neto257c3892018-04-11 13:19:45 -04001881 Ops << MkId(SampledTyID);
David Neto22f144c2017-06-12 14:26:21 -04001882
1883 spv::Dim DimID = spv::Dim2D;
alan-bakerf67468c2019-11-25 15:51:49 -05001884 if (STy->getName().startswith("opencl.image3d_ro_t") ||
1885 STy->getName().startswith("opencl.image3d_wo_t")) {
David Neto22f144c2017-06-12 14:26:21 -04001886 DimID = spv::Dim3D;
1887 }
David Neto257c3892018-04-11 13:19:45 -04001888 Ops << MkNum(DimID);
David Neto22f144c2017-06-12 14:26:21 -04001889
1890 // TODO: Set up Depth.
David Neto257c3892018-04-11 13:19:45 -04001891 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04001892
1893 // TODO: Set up Arrayed.
David Neto257c3892018-04-11 13:19:45 -04001894 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04001895
1896 // TODO: Set up MS.
David Neto257c3892018-04-11 13:19:45 -04001897 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04001898
1899 // TODO: Set up Sampled.
1900 //
1901 // From Spec
1902 //
1903 // 0 indicates this is only known at run time, not at compile time
1904 // 1 indicates will be used with sampler
1905 // 2 indicates will be used without a sampler (a storage image)
1906 uint32_t Sampled = 1;
alan-bakerf67468c2019-11-25 15:51:49 -05001907 if (!STy->getName().contains(".sampled")) {
David Neto22f144c2017-06-12 14:26:21 -04001908 Sampled = 2;
1909 }
David Neto257c3892018-04-11 13:19:45 -04001910 Ops << MkNum(Sampled);
David Neto22f144c2017-06-12 14:26:21 -04001911
1912 // TODO: Set up Image Format.
David Neto257c3892018-04-11 13:19:45 -04001913 Ops << MkNum(spv::ImageFormatUnknown);
David Neto22f144c2017-06-12 14:26:21 -04001914
alan-bakerf67468c2019-11-25 15:51:49 -05001915 auto *Inst = new SPIRVInstruction(spv::OpTypeImage, ImageTyID, Ops);
David Neto22f144c2017-06-12 14:26:21 -04001916 SPIRVInstList.push_back(Inst);
1917 break;
1918 }
1919 }
1920
1921 //
1922 // Generate OpTypeStruct
1923 //
1924 // Ops[0] ... Ops[n] = Member IDs
1925 SPIRVOperandList Ops;
1926
1927 for (auto *EleTy : STy->elements()) {
David Neto862b7d82018-06-14 18:48:37 -04001928 Ops << MkId(lookupType(EleTy));
David Neto22f144c2017-06-12 14:26:21 -04001929 }
1930
David Neto22f144c2017-06-12 14:26:21 -04001931 uint32_t STyID = nextID;
1932
alan-bakerb6b09dc2018-11-08 16:59:28 -05001933 auto *Inst = new SPIRVInstruction(spv::OpTypeStruct, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04001934 SPIRVInstList.push_back(Inst);
1935
1936 // Generate OpMemberDecorate.
1937 auto DecoInsertPoint =
1938 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
1939 [](SPIRVInstruction *Inst) -> bool {
1940 return Inst->getOpcode() != spv::OpDecorate &&
1941 Inst->getOpcode() != spv::OpMemberDecorate &&
1942 Inst->getOpcode() != spv::OpExtInstImport;
1943 });
1944
David Netoc463b372017-08-10 15:32:21 -04001945 const auto StructLayout = DL.getStructLayout(STy);
Alan Bakerfcda9482018-10-02 17:09:59 -04001946 // Search for the correct offsets if this type was remapped.
1947 std::vector<uint32_t> *offsets = nullptr;
1948 auto iter = RemappedUBOTypeOffsets.find(STy);
1949 if (iter != RemappedUBOTypeOffsets.end()) {
1950 offsets = &iter->second;
1951 }
David Netoc463b372017-08-10 15:32:21 -04001952
David Neto862b7d82018-06-14 18:48:37 -04001953 // #error TODO(dneto): Only do this if in TypesNeedingLayout.
David Neto22f144c2017-06-12 14:26:21 -04001954 for (unsigned MemberIdx = 0; MemberIdx < STy->getNumElements();
1955 MemberIdx++) {
1956 // Ops[0] = Structure Type ID
1957 // Ops[1] = Member Index(Literal Number)
1958 // Ops[2] = Decoration (Offset)
1959 // Ops[3] = Byte Offset (Literal Number)
1960 Ops.clear();
1961
David Neto257c3892018-04-11 13:19:45 -04001962 Ops << MkId(STyID) << MkNum(MemberIdx) << MkNum(spv::DecorationOffset);
David Neto22f144c2017-06-12 14:26:21 -04001963
alan-bakerb6b09dc2018-11-08 16:59:28 -05001964 auto ByteOffset =
1965 static_cast<uint32_t>(StructLayout->getElementOffset(MemberIdx));
Alan Bakerfcda9482018-10-02 17:09:59 -04001966 if (offsets) {
1967 ByteOffset = (*offsets)[MemberIdx];
1968 }
alan-bakerb6b09dc2018-11-08 16:59:28 -05001969 // const auto ByteOffset =
Alan Bakerfcda9482018-10-02 17:09:59 -04001970 // uint32_t(StructLayout->getElementOffset(MemberIdx));
David Neto257c3892018-04-11 13:19:45 -04001971 Ops << MkNum(ByteOffset);
David Neto22f144c2017-06-12 14:26:21 -04001972
David Neto87846742018-04-11 17:36:22 -04001973 auto *DecoInst = new SPIRVInstruction(spv::OpMemberDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04001974 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
David Neto22f144c2017-06-12 14:26:21 -04001975 }
1976
1977 // Generate OpDecorate.
David Neto862b7d82018-06-14 18:48:37 -04001978 if (StructTypesNeedingBlock.idFor(STy)) {
1979 Ops.clear();
1980 // Use Block decorations with StorageBuffer storage class.
1981 Ops << MkId(STyID) << MkNum(spv::DecorationBlock);
David Neto22f144c2017-06-12 14:26:21 -04001982
David Neto862b7d82018-06-14 18:48:37 -04001983 auto *DecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
1984 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
David Neto22f144c2017-06-12 14:26:21 -04001985 }
1986 break;
1987 }
1988 case Type::IntegerTyID: {
alan-baker0e64a592019-11-18 13:36:25 -05001989 uint32_t BitWidth = static_cast<uint32_t>(Ty->getPrimitiveSizeInBits());
David Neto22f144c2017-06-12 14:26:21 -04001990
1991 if (BitWidth == 1) {
David Neto87846742018-04-11 17:36:22 -04001992 auto *Inst = new SPIRVInstruction(spv::OpTypeBool, nextID++, {});
David Neto22f144c2017-06-12 14:26:21 -04001993 SPIRVInstList.push_back(Inst);
1994 } else {
alan-bakerb39c8262019-03-08 14:03:37 -05001995 if (!clspv::Option::Int8Support()) {
1996 // i8 is added to TypeMap as i32.
1997 // No matter what LLVM type is requested first, always alias the
1998 // second one's SPIR-V type to be the same as the one we generated
1999 // first.
2000 unsigned aliasToWidth = 0;
2001 if (BitWidth == 8) {
2002 aliasToWidth = 32;
2003 BitWidth = 32;
2004 } else if (BitWidth == 32) {
2005 aliasToWidth = 8;
2006 }
2007 if (aliasToWidth) {
2008 Type *otherType = Type::getIntNTy(Ty->getContext(), aliasToWidth);
2009 auto where = TypeMap.find(otherType);
2010 if (where == TypeMap.end()) {
2011 // Go ahead and make it, but also map the other type to it.
2012 TypeMap[otherType] = nextID;
2013 } else {
2014 // Alias this SPIR-V type the existing type.
2015 TypeMap[Ty] = where->second;
2016 break;
2017 }
David Neto391aeb12017-08-26 15:51:58 -04002018 }
David Neto22f144c2017-06-12 14:26:21 -04002019 }
2020
David Neto257c3892018-04-11 13:19:45 -04002021 SPIRVOperandList Ops;
2022 Ops << MkNum(BitWidth) << MkNum(0 /* not signed */);
David Neto22f144c2017-06-12 14:26:21 -04002023
2024 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002025 new SPIRVInstruction(spv::OpTypeInt, nextID++, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002026 }
2027 break;
2028 }
2029 case Type::HalfTyID:
2030 case Type::FloatTyID:
2031 case Type::DoubleTyID: {
alan-baker0e64a592019-11-18 13:36:25 -05002032 uint32_t BitWidth = static_cast<uint32_t>(Ty->getPrimitiveSizeInBits());
2033 SPIRVOperand *WidthOp =
2034 new SPIRVOperand(SPIRVOperandType::LITERAL_INTEGER, BitWidth);
David Neto22f144c2017-06-12 14:26:21 -04002035
2036 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002037 new SPIRVInstruction(spv::OpTypeFloat, nextID++, WidthOp));
David Neto22f144c2017-06-12 14:26:21 -04002038 break;
2039 }
2040 case Type::ArrayTyID: {
David Neto22f144c2017-06-12 14:26:21 -04002041 ArrayType *ArrTy = cast<ArrayType>(Ty);
David Neto862b7d82018-06-14 18:48:37 -04002042 const uint64_t Length = ArrTy->getArrayNumElements();
2043 if (Length == 0) {
2044 // By convention, map it to a RuntimeArray.
David Neto22f144c2017-06-12 14:26:21 -04002045
David Neto862b7d82018-06-14 18:48:37 -04002046 // Only generate the type once.
2047 // TODO(dneto): Can it ever be generated more than once?
2048 // Doesn't LLVM type uniqueness guarantee we'll only see this
2049 // once?
2050 Type *EleTy = ArrTy->getArrayElementType();
2051 if (OpRuntimeTyMap.count(EleTy) == 0) {
2052 uint32_t OpTypeRuntimeArrayID = nextID;
2053 OpRuntimeTyMap[Ty] = nextID;
David Neto22f144c2017-06-12 14:26:21 -04002054
David Neto862b7d82018-06-14 18:48:37 -04002055 //
2056 // Generate OpTypeRuntimeArray.
2057 //
David Neto22f144c2017-06-12 14:26:21 -04002058
David Neto862b7d82018-06-14 18:48:37 -04002059 // OpTypeRuntimeArray
2060 // Ops[0] = Element Type ID
2061 SPIRVOperandList Ops;
2062 Ops << MkId(lookupType(EleTy));
David Neto22f144c2017-06-12 14:26:21 -04002063
David Neto862b7d82018-06-14 18:48:37 -04002064 SPIRVInstList.push_back(
2065 new SPIRVInstruction(spv::OpTypeRuntimeArray, nextID++, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002066
David Neto862b7d82018-06-14 18:48:37 -04002067 if (Hack_generate_runtime_array_stride_early) {
2068 // Generate OpDecorate.
2069 auto DecoInsertPoint = std::find_if(
2070 SPIRVInstList.begin(), SPIRVInstList.end(),
2071 [](SPIRVInstruction *Inst) -> bool {
2072 return Inst->getOpcode() != spv::OpDecorate &&
2073 Inst->getOpcode() != spv::OpMemberDecorate &&
2074 Inst->getOpcode() != spv::OpExtInstImport;
2075 });
David Neto22f144c2017-06-12 14:26:21 -04002076
David Neto862b7d82018-06-14 18:48:37 -04002077 // Ops[0] = Target ID
2078 // Ops[1] = Decoration (ArrayStride)
2079 // Ops[2] = Stride Number(Literal Number)
2080 Ops.clear();
David Neto85082642018-03-24 06:55:20 -07002081
David Neto862b7d82018-06-14 18:48:37 -04002082 Ops << MkId(OpTypeRuntimeArrayID)
2083 << MkNum(spv::DecorationArrayStride)
Alan Bakerfcda9482018-10-02 17:09:59 -04002084 << MkNum(static_cast<uint32_t>(GetTypeAllocSize(EleTy, DL)));
David Neto22f144c2017-06-12 14:26:21 -04002085
David Neto862b7d82018-06-14 18:48:37 -04002086 auto *DecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
2087 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
2088 }
2089 }
David Neto22f144c2017-06-12 14:26:21 -04002090
David Neto862b7d82018-06-14 18:48:37 -04002091 } else {
David Neto22f144c2017-06-12 14:26:21 -04002092
David Neto862b7d82018-06-14 18:48:37 -04002093 //
2094 // Generate OpConstant and OpTypeArray.
2095 //
2096
2097 //
2098 // Generate OpConstant for array length.
2099 //
2100 // Ops[0] = Result Type ID
2101 // Ops[1] .. Ops[n] = Values LiteralNumber
2102 SPIRVOperandList Ops;
2103
2104 Type *LengthTy = Type::getInt32Ty(Context);
2105 uint32_t ResTyID = lookupType(LengthTy);
2106 Ops << MkId(ResTyID);
2107
2108 assert(Length < UINT32_MAX);
2109 Ops << MkNum(static_cast<uint32_t>(Length));
2110
2111 // Add constant for length to constant list.
2112 Constant *CstLength = ConstantInt::get(LengthTy, Length);
2113 AllocatedVMap[CstLength] = nextID;
2114 VMap[CstLength] = nextID;
2115 uint32_t LengthID = nextID;
2116
2117 auto *CstInst = new SPIRVInstruction(spv::OpConstant, nextID++, Ops);
2118 SPIRVInstList.push_back(CstInst);
2119
2120 // Remember to generate ArrayStride later
2121 getTypesNeedingArrayStride().insert(Ty);
2122
2123 //
2124 // Generate OpTypeArray.
2125 //
2126 // Ops[0] = Element Type ID
2127 // Ops[1] = Array Length Constant ID
2128 Ops.clear();
2129
2130 uint32_t EleTyID = lookupType(ArrTy->getElementType());
2131 Ops << MkId(EleTyID) << MkId(LengthID);
2132
2133 // Update TypeMap with nextID.
2134 TypeMap[Ty] = nextID;
2135
2136 auto *ArrayInst = new SPIRVInstruction(spv::OpTypeArray, nextID++, Ops);
2137 SPIRVInstList.push_back(ArrayInst);
2138 }
David Neto22f144c2017-06-12 14:26:21 -04002139 break;
2140 }
2141 case Type::VectorTyID: {
alan-bakerb39c8262019-03-08 14:03:37 -05002142 // <4 x i8> is changed to i32 if i8 is not generally supported.
2143 if (!clspv::Option::Int8Support() &&
2144 Ty->getVectorElementType() == Type::getInt8Ty(Context)) {
David Neto22f144c2017-06-12 14:26:21 -04002145 if (Ty->getVectorNumElements() == 4) {
2146 TypeMap[Ty] = lookupType(Ty->getVectorElementType());
2147 break;
2148 } else {
2149 Ty->print(errs());
2150 llvm_unreachable("Support above i8 vector type");
2151 }
2152 }
2153
2154 // Ops[0] = Component Type ID
2155 // Ops[1] = Component Count (Literal Number)
David Neto257c3892018-04-11 13:19:45 -04002156 SPIRVOperandList Ops;
2157 Ops << MkId(lookupType(Ty->getVectorElementType()))
2158 << MkNum(Ty->getVectorNumElements());
David Neto22f144c2017-06-12 14:26:21 -04002159
alan-bakerb6b09dc2018-11-08 16:59:28 -05002160 SPIRVInstruction *inst =
2161 new SPIRVInstruction(spv::OpTypeVector, nextID++, Ops);
David Netoc6f3ab22018-04-06 18:02:31 -04002162 SPIRVInstList.push_back(inst);
David Neto22f144c2017-06-12 14:26:21 -04002163 break;
2164 }
2165 case Type::VoidTyID: {
David Neto87846742018-04-11 17:36:22 -04002166 auto *Inst = new SPIRVInstruction(spv::OpTypeVoid, nextID++, {});
David Neto22f144c2017-06-12 14:26:21 -04002167 SPIRVInstList.push_back(Inst);
2168 break;
2169 }
2170 case Type::FunctionTyID: {
2171 // Generate SPIRV instruction for function type.
2172 FunctionType *FTy = cast<FunctionType>(Ty);
2173
2174 // Ops[0] = Return Type ID
2175 // Ops[1] ... Ops[n] = Parameter Type IDs
2176 SPIRVOperandList Ops;
2177
2178 // Find SPIRV instruction for return type
David Netoc6f3ab22018-04-06 18:02:31 -04002179 Ops << MkId(lookupType(FTy->getReturnType()));
David Neto22f144c2017-06-12 14:26:21 -04002180
2181 // Find SPIRV instructions for parameter types
2182 for (unsigned k = 0; k < FTy->getNumParams(); k++) {
2183 // Find SPIRV instruction for parameter type.
2184 auto ParamTy = FTy->getParamType(k);
2185 if (ParamTy->isPointerTy()) {
2186 auto PointeeTy = ParamTy->getPointerElementType();
2187 if (PointeeTy->isStructTy() &&
2188 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
2189 ParamTy = PointeeTy;
2190 }
2191 }
2192
David Netoc6f3ab22018-04-06 18:02:31 -04002193 Ops << MkId(lookupType(ParamTy));
David Neto22f144c2017-06-12 14:26:21 -04002194 }
2195
David Neto87846742018-04-11 17:36:22 -04002196 auto *Inst = new SPIRVInstruction(spv::OpTypeFunction, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002197 SPIRVInstList.push_back(Inst);
2198 break;
2199 }
2200 }
2201 }
2202
2203 // Generate OpTypeSampledImage.
alan-bakerabd82722019-12-03 17:14:51 -05002204 for (auto &ImgTy : getImageTypeList()) {
David Neto22f144c2017-06-12 14:26:21 -04002205 //
2206 // Generate OpTypeSampledImage.
2207 //
2208 // Ops[0] = Image Type ID
2209 //
2210 SPIRVOperandList Ops;
2211
David Netoc6f3ab22018-04-06 18:02:31 -04002212 Ops << MkId(TypeMap[ImgTy]);
David Neto22f144c2017-06-12 14:26:21 -04002213
alan-bakerabd82722019-12-03 17:14:51 -05002214 // Update the image type map.
2215 getImageTypeMap()[ImgTy] = nextID;
David Neto22f144c2017-06-12 14:26:21 -04002216
David Neto87846742018-04-11 17:36:22 -04002217 auto *Inst = new SPIRVInstruction(spv::OpTypeSampledImage, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002218 SPIRVInstList.push_back(Inst);
2219 }
David Netoc6f3ab22018-04-06 18:02:31 -04002220
2221 // Generate types for pointer-to-local arguments.
Alan Baker202c8c72018-08-13 13:47:44 -04002222 for (auto spec_id = clspv::FirstLocalSpecId(); spec_id < max_local_spec_id_;
2223 ++spec_id) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05002224 LocalArgInfo &arg_info = LocalSpecIdInfoMap[spec_id];
David Netoc6f3ab22018-04-06 18:02:31 -04002225
2226 // Generate the spec constant.
2227 SPIRVOperandList Ops;
2228 Ops << MkId(lookupType(Type::getInt32Ty(Context))) << MkNum(1);
David Neto87846742018-04-11 17:36:22 -04002229 SPIRVInstList.push_back(
2230 new SPIRVInstruction(spv::OpSpecConstant, arg_info.array_size_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002231
2232 // Generate the array type.
2233 Ops.clear();
2234 // The element type must have been created.
2235 uint32_t elem_ty_id = lookupType(arg_info.elem_type);
2236 assert(elem_ty_id);
2237 Ops << MkId(elem_ty_id) << MkId(arg_info.array_size_id);
2238
2239 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002240 new SPIRVInstruction(spv::OpTypeArray, arg_info.array_type_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002241
2242 Ops.clear();
2243 Ops << MkNum(spv::StorageClassWorkgroup) << MkId(arg_info.array_type_id);
David Neto87846742018-04-11 17:36:22 -04002244 SPIRVInstList.push_back(new SPIRVInstruction(
2245 spv::OpTypePointer, arg_info.ptr_array_type_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002246 }
David Neto22f144c2017-06-12 14:26:21 -04002247}
2248
2249void SPIRVProducerPass::GenerateSPIRVConstants() {
2250 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
2251 ValueMapType &VMap = getValueMap();
2252 ValueMapType &AllocatedVMap = getAllocatedValueMap();
2253 ValueList &CstList = getConstantList();
David Neto482550a2018-03-24 05:21:07 -07002254 const bool hack_undef = clspv::Option::HackUndef();
David Neto22f144c2017-06-12 14:26:21 -04002255
2256 for (uint32_t i = 0; i < CstList.size(); i++) {
David Netofb9a7972017-08-25 17:08:24 -04002257 // UniqueVector ids are 1-based.
alan-bakerb6b09dc2018-11-08 16:59:28 -05002258 Constant *Cst = cast<Constant>(CstList[i + 1]);
David Neto22f144c2017-06-12 14:26:21 -04002259
2260 // OpTypeArray's constant was already generated.
David Netofb9a7972017-08-25 17:08:24 -04002261 if (AllocatedVMap.find_as(Cst) != AllocatedVMap.end()) {
David Neto22f144c2017-06-12 14:26:21 -04002262 continue;
2263 }
2264
David Netofb9a7972017-08-25 17:08:24 -04002265 // Set ValueMap with nextID for reference later.
David Neto22f144c2017-06-12 14:26:21 -04002266 VMap[Cst] = nextID;
2267
2268 //
2269 // Generate OpConstant.
2270 //
2271
2272 // Ops[0] = Result Type ID
2273 // Ops[1] .. Ops[n] = Values LiteralNumber
2274 SPIRVOperandList Ops;
2275
David Neto257c3892018-04-11 13:19:45 -04002276 Ops << MkId(lookupType(Cst->getType()));
David Neto22f144c2017-06-12 14:26:21 -04002277
2278 std::vector<uint32_t> LiteralNum;
David Neto22f144c2017-06-12 14:26:21 -04002279 spv::Op Opcode = spv::OpNop;
2280
2281 if (isa<UndefValue>(Cst)) {
2282 // Ops[0] = Result Type ID
David Netoc66b3352017-10-20 14:28:46 -04002283 Opcode = spv::OpUndef;
Alan Baker9bf93fb2018-08-28 16:59:26 -04002284 if (hack_undef && IsTypeNullable(Cst->getType())) {
2285 Opcode = spv::OpConstantNull;
David Netoc66b3352017-10-20 14:28:46 -04002286 }
David Neto22f144c2017-06-12 14:26:21 -04002287 } else if (const ConstantInt *CI = dyn_cast<ConstantInt>(Cst)) {
2288 unsigned BitWidth = CI->getBitWidth();
2289 if (BitWidth == 1) {
2290 // If the bitwidth of constant is 1, generate OpConstantTrue or
2291 // OpConstantFalse.
2292 if (CI->getZExtValue()) {
2293 // Ops[0] = Result Type ID
2294 Opcode = spv::OpConstantTrue;
2295 } else {
2296 // Ops[0] = Result Type ID
2297 Opcode = spv::OpConstantFalse;
2298 }
David Neto22f144c2017-06-12 14:26:21 -04002299 } else {
2300 auto V = CI->getZExtValue();
2301 LiteralNum.push_back(V & 0xFFFFFFFF);
2302
2303 if (BitWidth > 32) {
2304 LiteralNum.push_back(V >> 32);
2305 }
2306
2307 Opcode = spv::OpConstant;
David Neto22f144c2017-06-12 14:26:21 -04002308
David Neto257c3892018-04-11 13:19:45 -04002309 Ops << MkInteger(LiteralNum);
David Neto22f144c2017-06-12 14:26:21 -04002310 }
2311 } else if (const ConstantFP *CFP = dyn_cast<ConstantFP>(Cst)) {
2312 uint64_t FPVal = CFP->getValueAPF().bitcastToAPInt().getZExtValue();
2313 Type *CFPTy = CFP->getType();
2314 if (CFPTy->isFloatTy()) {
2315 LiteralNum.push_back(FPVal & 0xFFFFFFFF);
Kévin Petit02ee34e2019-04-04 19:03:22 +01002316 } else if (CFPTy->isDoubleTy()) {
2317 LiteralNum.push_back(FPVal & 0xFFFFFFFF);
2318 LiteralNum.push_back(FPVal >> 32);
David Neto22f144c2017-06-12 14:26:21 -04002319 } else {
2320 CFPTy->print(errs());
2321 llvm_unreachable("Implement this ConstantFP Type");
2322 }
2323
2324 Opcode = spv::OpConstant;
David Neto22f144c2017-06-12 14:26:21 -04002325
David Neto257c3892018-04-11 13:19:45 -04002326 Ops << MkFloat(LiteralNum);
David Neto22f144c2017-06-12 14:26:21 -04002327 } else if (isa<ConstantDataSequential>(Cst) &&
2328 cast<ConstantDataSequential>(Cst)->isString()) {
2329 Cst->print(errs());
2330 llvm_unreachable("Implement this Constant");
2331
2332 } else if (const ConstantDataSequential *CDS =
2333 dyn_cast<ConstantDataSequential>(Cst)) {
David Neto49351ac2017-08-26 17:32:20 -04002334 // Let's convert <4 x i8> constant to int constant specially.
2335 // This case occurs when all the values are specified as constant
2336 // ints.
2337 Type *CstTy = Cst->getType();
2338 if (is4xi8vec(CstTy)) {
2339 LLVMContext &Context = CstTy->getContext();
2340
2341 //
2342 // Generate OpConstant with OpTypeInt 32 0.
2343 //
Neil Henning39672102017-09-29 14:33:13 +01002344 uint32_t IntValue = 0;
2345 for (unsigned k = 0; k < 4; k++) {
2346 const uint64_t Val = CDS->getElementAsInteger(k);
David Neto49351ac2017-08-26 17:32:20 -04002347 IntValue = (IntValue << 8) | (Val & 0xffu);
2348 }
2349
2350 Type *i32 = Type::getInt32Ty(Context);
2351 Constant *CstInt = ConstantInt::get(i32, IntValue);
2352 // If this constant is already registered on VMap, use it.
2353 if (VMap.count(CstInt)) {
2354 uint32_t CstID = VMap[CstInt];
2355 VMap[Cst] = CstID;
2356 continue;
2357 }
2358
David Neto257c3892018-04-11 13:19:45 -04002359 Ops << MkNum(IntValue);
David Neto49351ac2017-08-26 17:32:20 -04002360
David Neto87846742018-04-11 17:36:22 -04002361 auto *CstInst = new SPIRVInstruction(spv::OpConstant, nextID++, Ops);
David Neto49351ac2017-08-26 17:32:20 -04002362 SPIRVInstList.push_back(CstInst);
2363
2364 continue;
2365 }
2366
2367 // A normal constant-data-sequential case.
David Neto22f144c2017-06-12 14:26:21 -04002368 for (unsigned k = 0; k < CDS->getNumElements(); k++) {
2369 Constant *EleCst = CDS->getElementAsConstant(k);
2370 uint32_t EleCstID = VMap[EleCst];
David Neto257c3892018-04-11 13:19:45 -04002371 Ops << MkId(EleCstID);
David Neto22f144c2017-06-12 14:26:21 -04002372 }
2373
2374 Opcode = spv::OpConstantComposite;
David Neto22f144c2017-06-12 14:26:21 -04002375 } else if (const ConstantAggregate *CA = dyn_cast<ConstantAggregate>(Cst)) {
2376 // Let's convert <4 x i8> constant to int constant specially.
David Neto49351ac2017-08-26 17:32:20 -04002377 // This case occurs when at least one of the values is an undef.
David Neto22f144c2017-06-12 14:26:21 -04002378 Type *CstTy = Cst->getType();
2379 if (is4xi8vec(CstTy)) {
2380 LLVMContext &Context = CstTy->getContext();
2381
2382 //
2383 // Generate OpConstant with OpTypeInt 32 0.
2384 //
Neil Henning39672102017-09-29 14:33:13 +01002385 uint32_t IntValue = 0;
David Neto22f144c2017-06-12 14:26:21 -04002386 for (User::const_op_iterator I = Cst->op_begin(), E = Cst->op_end();
2387 I != E; ++I) {
2388 uint64_t Val = 0;
alan-bakerb6b09dc2018-11-08 16:59:28 -05002389 const Value *CV = *I;
Neil Henning39672102017-09-29 14:33:13 +01002390 if (auto *CI2 = dyn_cast<ConstantInt>(CV)) {
2391 Val = CI2->getZExtValue();
David Neto22f144c2017-06-12 14:26:21 -04002392 }
David Neto49351ac2017-08-26 17:32:20 -04002393 IntValue = (IntValue << 8) | (Val & 0xffu);
David Neto22f144c2017-06-12 14:26:21 -04002394 }
2395
David Neto49351ac2017-08-26 17:32:20 -04002396 Type *i32 = Type::getInt32Ty(Context);
2397 Constant *CstInt = ConstantInt::get(i32, IntValue);
David Neto22f144c2017-06-12 14:26:21 -04002398 // If this constant is already registered on VMap, use it.
2399 if (VMap.count(CstInt)) {
2400 uint32_t CstID = VMap[CstInt];
2401 VMap[Cst] = CstID;
David Neto19a1bad2017-08-25 15:01:41 -04002402 continue;
David Neto22f144c2017-06-12 14:26:21 -04002403 }
2404
David Neto257c3892018-04-11 13:19:45 -04002405 Ops << MkNum(IntValue);
David Neto22f144c2017-06-12 14:26:21 -04002406
David Neto87846742018-04-11 17:36:22 -04002407 auto *CstInst = new SPIRVInstruction(spv::OpConstant, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002408 SPIRVInstList.push_back(CstInst);
2409
David Neto19a1bad2017-08-25 15:01:41 -04002410 continue;
David Neto22f144c2017-06-12 14:26:21 -04002411 }
2412
2413 // We use a constant composite in SPIR-V for our constant aggregate in
2414 // LLVM.
2415 Opcode = spv::OpConstantComposite;
David Neto22f144c2017-06-12 14:26:21 -04002416
2417 for (unsigned k = 0; k < CA->getNumOperands(); k++) {
2418 // Look up the ID of the element of this aggregate (which we will
2419 // previously have created a constant for).
2420 uint32_t ElementConstantID = VMap[CA->getAggregateElement(k)];
2421
2422 // And add an operand to the composite we are constructing
David Neto257c3892018-04-11 13:19:45 -04002423 Ops << MkId(ElementConstantID);
David Neto22f144c2017-06-12 14:26:21 -04002424 }
2425 } else if (Cst->isNullValue()) {
2426 Opcode = spv::OpConstantNull;
David Neto22f144c2017-06-12 14:26:21 -04002427 } else {
2428 Cst->print(errs());
2429 llvm_unreachable("Unsupported Constant???");
2430 }
2431
alan-baker5b86ed72019-02-15 08:26:50 -05002432 if (Opcode == spv::OpConstantNull && Cst->getType()->isPointerTy()) {
2433 // Null pointer requires variable pointers.
2434 setVariablePointersCapabilities(Cst->getType()->getPointerAddressSpace());
2435 }
2436
David Neto87846742018-04-11 17:36:22 -04002437 auto *CstInst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002438 SPIRVInstList.push_back(CstInst);
2439 }
2440}
2441
2442void SPIRVProducerPass::GenerateSamplers(Module &M) {
2443 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
David Neto22f144c2017-06-12 14:26:21 -04002444
alan-bakerb6b09dc2018-11-08 16:59:28 -05002445 auto &sampler_map = getSamplerMap();
David Neto862b7d82018-06-14 18:48:37 -04002446 SamplerMapIndexToIDMap.clear();
David Neto22f144c2017-06-12 14:26:21 -04002447 DenseMap<unsigned, unsigned> SamplerLiteralToIDMap;
David Neto862b7d82018-06-14 18:48:37 -04002448 DenseMap<unsigned, unsigned> SamplerLiteralToDescriptorSetMap;
2449 DenseMap<unsigned, unsigned> SamplerLiteralToBindingMap;
David Neto22f144c2017-06-12 14:26:21 -04002450
David Neto862b7d82018-06-14 18:48:37 -04002451 // We might have samplers in the sampler map that are not used
2452 // in the translation unit. We need to allocate variables
2453 // for them and bindings too.
2454 DenseSet<unsigned> used_bindings;
David Neto22f144c2017-06-12 14:26:21 -04002455
Kévin Petitdf71de32019-04-09 14:09:50 +01002456 auto *var_fn = M.getFunction(clspv::LiteralSamplerFunction());
alan-bakerb6b09dc2018-11-08 16:59:28 -05002457 if (!var_fn)
2458 return;
David Neto862b7d82018-06-14 18:48:37 -04002459 for (auto user : var_fn->users()) {
2460 // Populate SamplerLiteralToDescriptorSetMap and
2461 // SamplerLiteralToBindingMap.
2462 //
2463 // Look for calls like
2464 // call %opencl.sampler_t addrspace(2)*
2465 // @clspv.sampler.var.literal(
2466 // i32 descriptor,
2467 // i32 binding,
2468 // i32 index-into-sampler-map)
alan-bakerb6b09dc2018-11-08 16:59:28 -05002469 if (auto *call = dyn_cast<CallInst>(user)) {
2470 const size_t index_into_sampler_map = static_cast<size_t>(
2471 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
David Neto862b7d82018-06-14 18:48:37 -04002472 if (index_into_sampler_map >= sampler_map.size()) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05002473 errs() << "Out of bounds index to sampler map: "
2474 << index_into_sampler_map;
David Neto862b7d82018-06-14 18:48:37 -04002475 llvm_unreachable("bad sampler init: out of bounds");
2476 }
2477
2478 auto sampler_value = sampler_map[index_into_sampler_map].first;
2479 const auto descriptor_set = static_cast<unsigned>(
2480 dyn_cast<ConstantInt>(call->getArgOperand(0))->getZExtValue());
2481 const auto binding = static_cast<unsigned>(
2482 dyn_cast<ConstantInt>(call->getArgOperand(1))->getZExtValue());
2483
2484 SamplerLiteralToDescriptorSetMap[sampler_value] = descriptor_set;
2485 SamplerLiteralToBindingMap[sampler_value] = binding;
2486 used_bindings.insert(binding);
2487 }
2488 }
2489
2490 unsigned index = 0;
2491 for (auto SamplerLiteral : sampler_map) {
David Neto22f144c2017-06-12 14:26:21 -04002492 // Generate OpVariable.
2493 //
2494 // GIDOps[0] : Result Type ID
2495 // GIDOps[1] : Storage Class
2496 SPIRVOperandList Ops;
2497
David Neto257c3892018-04-11 13:19:45 -04002498 Ops << MkId(lookupType(SamplerTy))
2499 << MkNum(spv::StorageClassUniformConstant);
David Neto22f144c2017-06-12 14:26:21 -04002500
David Neto862b7d82018-06-14 18:48:37 -04002501 auto sampler_var_id = nextID++;
2502 auto *Inst = new SPIRVInstruction(spv::OpVariable, sampler_var_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002503 SPIRVInstList.push_back(Inst);
2504
David Neto862b7d82018-06-14 18:48:37 -04002505 SamplerMapIndexToIDMap[index] = sampler_var_id;
2506 SamplerLiteralToIDMap[SamplerLiteral.first] = sampler_var_id;
David Neto22f144c2017-06-12 14:26:21 -04002507
2508 // Find Insert Point for OpDecorate.
2509 auto DecoInsertPoint =
2510 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
2511 [](SPIRVInstruction *Inst) -> bool {
2512 return Inst->getOpcode() != spv::OpDecorate &&
2513 Inst->getOpcode() != spv::OpMemberDecorate &&
2514 Inst->getOpcode() != spv::OpExtInstImport;
2515 });
2516
2517 // Ops[0] = Target ID
2518 // Ops[1] = Decoration (DescriptorSet)
2519 // Ops[2] = LiteralNumber according to Decoration
2520 Ops.clear();
2521
David Neto862b7d82018-06-14 18:48:37 -04002522 unsigned descriptor_set;
2523 unsigned binding;
alan-bakerb6b09dc2018-11-08 16:59:28 -05002524 if (SamplerLiteralToBindingMap.find(SamplerLiteral.first) ==
2525 SamplerLiteralToBindingMap.end()) {
David Neto862b7d82018-06-14 18:48:37 -04002526 // This sampler is not actually used. Find the next one.
2527 for (binding = 0; used_bindings.count(binding); binding++)
2528 ;
2529 descriptor_set = 0; // Literal samplers always use descriptor set 0.
2530 used_bindings.insert(binding);
2531 } else {
2532 descriptor_set = SamplerLiteralToDescriptorSetMap[SamplerLiteral.first];
2533 binding = SamplerLiteralToBindingMap[SamplerLiteral.first];
alan-bakercff80152019-06-15 00:38:00 -04002534
2535 version0::DescriptorMapEntry::SamplerData sampler_data = {
2536 SamplerLiteral.first};
2537 descriptorMapEntries->emplace_back(std::move(sampler_data),
2538 descriptor_set, binding);
David Neto862b7d82018-06-14 18:48:37 -04002539 }
2540
2541 Ops << MkId(sampler_var_id) << MkNum(spv::DecorationDescriptorSet)
2542 << MkNum(descriptor_set);
David Neto22f144c2017-06-12 14:26:21 -04002543
David Neto87846742018-04-11 17:36:22 -04002544 auto *DescDecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002545 SPIRVInstList.insert(DecoInsertPoint, DescDecoInst);
2546
2547 // Ops[0] = Target ID
2548 // Ops[1] = Decoration (Binding)
2549 // Ops[2] = LiteralNumber according to Decoration
2550 Ops.clear();
David Neto862b7d82018-06-14 18:48:37 -04002551 Ops << MkId(sampler_var_id) << MkNum(spv::DecorationBinding)
2552 << MkNum(binding);
David Neto22f144c2017-06-12 14:26:21 -04002553
David Neto87846742018-04-11 17:36:22 -04002554 auto *BindDecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002555 SPIRVInstList.insert(DecoInsertPoint, BindDecoInst);
David Neto862b7d82018-06-14 18:48:37 -04002556
2557 index++;
David Neto22f144c2017-06-12 14:26:21 -04002558 }
David Neto862b7d82018-06-14 18:48:37 -04002559}
David Neto22f144c2017-06-12 14:26:21 -04002560
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01002561void SPIRVProducerPass::GenerateResourceVars(Module &) {
David Neto862b7d82018-06-14 18:48:37 -04002562 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
2563 ValueMapType &VMap = getValueMap();
David Neto22f144c2017-06-12 14:26:21 -04002564
David Neto862b7d82018-06-14 18:48:37 -04002565 // Generate variables. Make one for each of resource var info object.
2566 for (auto *info : ModuleOrderedResourceVars) {
2567 Type *type = info->var_fn->getReturnType();
2568 // Remap the address space for opaque types.
2569 switch (info->arg_kind) {
2570 case clspv::ArgKind::Sampler:
2571 case clspv::ArgKind::ReadOnlyImage:
2572 case clspv::ArgKind::WriteOnlyImage:
2573 type = PointerType::get(type->getPointerElementType(),
2574 clspv::AddressSpace::UniformConstant);
2575 break;
2576 default:
2577 break;
2578 }
David Neto22f144c2017-06-12 14:26:21 -04002579
David Neto862b7d82018-06-14 18:48:37 -04002580 info->var_id = nextID++;
David Neto22f144c2017-06-12 14:26:21 -04002581
David Neto862b7d82018-06-14 18:48:37 -04002582 const auto type_id = lookupType(type);
2583 const auto sc = GetStorageClassForArgKind(info->arg_kind);
2584 SPIRVOperandList Ops;
2585 Ops << MkId(type_id) << MkNum(sc);
David Neto22f144c2017-06-12 14:26:21 -04002586
David Neto862b7d82018-06-14 18:48:37 -04002587 auto *Inst = new SPIRVInstruction(spv::OpVariable, info->var_id, Ops);
2588 SPIRVInstList.push_back(Inst);
2589
2590 // Map calls to the variable-builtin-function.
2591 for (auto &U : info->var_fn->uses()) {
2592 if (auto *call = dyn_cast<CallInst>(U.getUser())) {
2593 const auto set = unsigned(
2594 dyn_cast<ConstantInt>(call->getOperand(0))->getZExtValue());
2595 const auto binding = unsigned(
2596 dyn_cast<ConstantInt>(call->getOperand(1))->getZExtValue());
2597 if (set == info->descriptor_set && binding == info->binding) {
2598 switch (info->arg_kind) {
2599 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04002600 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04002601 case clspv::ArgKind::Pod:
2602 // The call maps to the variable directly.
2603 VMap[call] = info->var_id;
2604 break;
2605 case clspv::ArgKind::Sampler:
2606 case clspv::ArgKind::ReadOnlyImage:
2607 case clspv::ArgKind::WriteOnlyImage:
2608 // The call maps to a load we generate later.
2609 ResourceVarDeferredLoadCalls[call] = info->var_id;
2610 break;
2611 default:
2612 llvm_unreachable("Unhandled arg kind");
2613 }
2614 }
David Neto22f144c2017-06-12 14:26:21 -04002615 }
David Neto862b7d82018-06-14 18:48:37 -04002616 }
2617 }
David Neto22f144c2017-06-12 14:26:21 -04002618
David Neto862b7d82018-06-14 18:48:37 -04002619 // Generate associated decorations.
David Neto22f144c2017-06-12 14:26:21 -04002620
David Neto862b7d82018-06-14 18:48:37 -04002621 // Find Insert Point for OpDecorate.
2622 auto DecoInsertPoint =
2623 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
2624 [](SPIRVInstruction *Inst) -> bool {
2625 return Inst->getOpcode() != spv::OpDecorate &&
2626 Inst->getOpcode() != spv::OpMemberDecorate &&
2627 Inst->getOpcode() != spv::OpExtInstImport;
2628 });
2629
2630 SPIRVOperandList Ops;
2631 for (auto *info : ModuleOrderedResourceVars) {
2632 // Decorate with DescriptorSet and Binding.
2633 Ops.clear();
2634 Ops << MkId(info->var_id) << MkNum(spv::DecorationDescriptorSet)
2635 << MkNum(info->descriptor_set);
2636 SPIRVInstList.insert(DecoInsertPoint,
2637 new SPIRVInstruction(spv::OpDecorate, Ops));
2638
2639 Ops.clear();
2640 Ops << MkId(info->var_id) << MkNum(spv::DecorationBinding)
2641 << MkNum(info->binding);
2642 SPIRVInstList.insert(DecoInsertPoint,
2643 new SPIRVInstruction(spv::OpDecorate, Ops));
2644
alan-bakere9308012019-03-15 10:25:13 -04002645 if (info->coherent) {
2646 // Decorate with Coherent if required for the variable.
2647 Ops.clear();
2648 Ops << MkId(info->var_id) << MkNum(spv::DecorationCoherent);
2649 SPIRVInstList.insert(DecoInsertPoint,
2650 new SPIRVInstruction(spv::OpDecorate, Ops));
2651 }
2652
David Neto862b7d82018-06-14 18:48:37 -04002653 // Generate NonWritable and NonReadable
2654 switch (info->arg_kind) {
2655 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04002656 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04002657 if (info->var_fn->getReturnType()->getPointerAddressSpace() ==
2658 clspv::AddressSpace::Constant) {
2659 Ops.clear();
2660 Ops << MkId(info->var_id) << MkNum(spv::DecorationNonWritable);
2661 SPIRVInstList.insert(DecoInsertPoint,
2662 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002663 }
David Neto862b7d82018-06-14 18:48:37 -04002664 break;
David Neto862b7d82018-06-14 18:48:37 -04002665 case clspv::ArgKind::WriteOnlyImage:
2666 Ops.clear();
2667 Ops << MkId(info->var_id) << MkNum(spv::DecorationNonReadable);
2668 SPIRVInstList.insert(DecoInsertPoint,
2669 new SPIRVInstruction(spv::OpDecorate, Ops));
2670 break;
2671 default:
2672 break;
David Neto22f144c2017-06-12 14:26:21 -04002673 }
2674 }
2675}
2676
2677void SPIRVProducerPass::GenerateGlobalVar(GlobalVariable &GV) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05002678 Module &M = *GV.getParent();
David Neto22f144c2017-06-12 14:26:21 -04002679 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
2680 ValueMapType &VMap = getValueMap();
2681 std::vector<uint32_t> &BuiltinDimVec = getBuiltinDimVec();
David Neto85082642018-03-24 06:55:20 -07002682 const DataLayout &DL = GV.getParent()->getDataLayout();
David Neto22f144c2017-06-12 14:26:21 -04002683
2684 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
2685 Type *Ty = GV.getType();
2686 PointerType *PTy = cast<PointerType>(Ty);
2687
2688 uint32_t InitializerID = 0;
2689
2690 // Workgroup size is handled differently (it goes into a constant)
2691 if (spv::BuiltInWorkgroupSize == BuiltinType) {
2692 std::vector<bool> HasMDVec;
2693 uint32_t PrevXDimCst = 0xFFFFFFFF;
2694 uint32_t PrevYDimCst = 0xFFFFFFFF;
2695 uint32_t PrevZDimCst = 0xFFFFFFFF;
2696 for (Function &Func : *GV.getParent()) {
2697 if (Func.isDeclaration()) {
2698 continue;
2699 }
2700
2701 // We only need to check kernels.
2702 if (Func.getCallingConv() != CallingConv::SPIR_KERNEL) {
2703 continue;
2704 }
2705
2706 if (const MDNode *MD =
2707 dyn_cast<Function>(&Func)->getMetadata("reqd_work_group_size")) {
2708 uint32_t CurXDimCst = static_cast<uint32_t>(
2709 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
2710 uint32_t CurYDimCst = static_cast<uint32_t>(
2711 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
2712 uint32_t CurZDimCst = static_cast<uint32_t>(
2713 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
2714
2715 if (PrevXDimCst == 0xFFFFFFFF && PrevYDimCst == 0xFFFFFFFF &&
2716 PrevZDimCst == 0xFFFFFFFF) {
2717 PrevXDimCst = CurXDimCst;
2718 PrevYDimCst = CurYDimCst;
2719 PrevZDimCst = CurZDimCst;
2720 } else if (CurXDimCst != PrevXDimCst || CurYDimCst != PrevYDimCst ||
2721 CurZDimCst != PrevZDimCst) {
2722 llvm_unreachable(
2723 "reqd_work_group_size must be the same across all kernels");
2724 } else {
2725 continue;
2726 }
2727
2728 //
2729 // Generate OpConstantComposite.
2730 //
2731 // Ops[0] : Result Type ID
2732 // Ops[1] : Constant size for x dimension.
2733 // Ops[2] : Constant size for y dimension.
2734 // Ops[3] : Constant size for z dimension.
2735 SPIRVOperandList Ops;
2736
2737 uint32_t XDimCstID =
2738 VMap[mdconst::extract<ConstantInt>(MD->getOperand(0))];
2739 uint32_t YDimCstID =
2740 VMap[mdconst::extract<ConstantInt>(MD->getOperand(1))];
2741 uint32_t ZDimCstID =
2742 VMap[mdconst::extract<ConstantInt>(MD->getOperand(2))];
2743
2744 InitializerID = nextID;
2745
David Neto257c3892018-04-11 13:19:45 -04002746 Ops << MkId(lookupType(Ty->getPointerElementType())) << MkId(XDimCstID)
2747 << MkId(YDimCstID) << MkId(ZDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04002748
David Neto87846742018-04-11 17:36:22 -04002749 auto *Inst =
2750 new SPIRVInstruction(spv::OpConstantComposite, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002751 SPIRVInstList.push_back(Inst);
2752
2753 HasMDVec.push_back(true);
2754 } else {
2755 HasMDVec.push_back(false);
2756 }
2757 }
2758
2759 // Check all kernels have same definitions for work_group_size.
2760 bool HasMD = false;
2761 if (!HasMDVec.empty()) {
2762 HasMD = HasMDVec[0];
2763 for (uint32_t i = 1; i < HasMDVec.size(); i++) {
2764 if (HasMD != HasMDVec[i]) {
2765 llvm_unreachable(
2766 "Kernels should have consistent work group size definition");
2767 }
2768 }
2769 }
2770
2771 // If all kernels do not have metadata for reqd_work_group_size, generate
2772 // OpSpecConstants for x/y/z dimension.
2773 if (!HasMD) {
2774 //
2775 // Generate OpSpecConstants for x/y/z dimension.
2776 //
2777 // Ops[0] : Result Type ID
2778 // Ops[1] : Constant size for x/y/z dimension (Literal Number).
2779 uint32_t XDimCstID = 0;
2780 uint32_t YDimCstID = 0;
2781 uint32_t ZDimCstID = 0;
2782
David Neto22f144c2017-06-12 14:26:21 -04002783 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04002784 uint32_t result_type_id =
2785 lookupType(Ty->getPointerElementType()->getSequentialElementType());
David Neto22f144c2017-06-12 14:26:21 -04002786
David Neto257c3892018-04-11 13:19:45 -04002787 // X Dimension
2788 Ops << MkId(result_type_id) << MkNum(1);
2789 XDimCstID = nextID++;
2790 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002791 new SPIRVInstruction(spv::OpSpecConstant, XDimCstID, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002792
2793 // Y Dimension
2794 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04002795 Ops << MkId(result_type_id) << MkNum(1);
2796 YDimCstID = nextID++;
2797 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002798 new SPIRVInstruction(spv::OpSpecConstant, YDimCstID, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002799
2800 // Z Dimension
2801 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04002802 Ops << MkId(result_type_id) << MkNum(1);
2803 ZDimCstID = nextID++;
2804 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002805 new SPIRVInstruction(spv::OpSpecConstant, ZDimCstID, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002806
David Neto257c3892018-04-11 13:19:45 -04002807 BuiltinDimVec.push_back(XDimCstID);
2808 BuiltinDimVec.push_back(YDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04002809 BuiltinDimVec.push_back(ZDimCstID);
2810
David Neto22f144c2017-06-12 14:26:21 -04002811 //
2812 // Generate OpSpecConstantComposite.
2813 //
2814 // Ops[0] : Result Type ID
2815 // Ops[1] : Constant size for x dimension.
2816 // Ops[2] : Constant size for y dimension.
2817 // Ops[3] : Constant size for z dimension.
2818 InitializerID = nextID;
2819
2820 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04002821 Ops << MkId(lookupType(Ty->getPointerElementType())) << MkId(XDimCstID)
2822 << MkId(YDimCstID) << MkId(ZDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04002823
David Neto87846742018-04-11 17:36:22 -04002824 auto *Inst =
2825 new SPIRVInstruction(spv::OpSpecConstantComposite, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002826 SPIRVInstList.push_back(Inst);
2827 }
2828 }
2829
David Neto22f144c2017-06-12 14:26:21 -04002830 VMap[&GV] = nextID;
2831
2832 //
2833 // Generate OpVariable.
2834 //
2835 // GIDOps[0] : Result Type ID
2836 // GIDOps[1] : Storage Class
2837 SPIRVOperandList Ops;
2838
David Neto85082642018-03-24 06:55:20 -07002839 const auto AS = PTy->getAddressSpace();
David Netoc6f3ab22018-04-06 18:02:31 -04002840 Ops << MkId(lookupType(Ty)) << MkNum(GetStorageClass(AS));
David Neto22f144c2017-06-12 14:26:21 -04002841
David Neto85082642018-03-24 06:55:20 -07002842 const bool module_scope_constant_external_init =
David Neto862b7d82018-06-14 18:48:37 -04002843 (AS == AddressSpace::Constant) && GV.hasInitializer() &&
David Neto85082642018-03-24 06:55:20 -07002844 clspv::Option::ModuleConstantsInStorageBuffer();
2845
Kévin Petit23d5f182019-08-13 16:21:29 +01002846 if (GV.hasInitializer()) {
2847 auto GVInit = GV.getInitializer();
2848 if (!isa<UndefValue>(GVInit) && !module_scope_constant_external_init) {
2849 assert(VMap.count(GVInit) == 1);
2850 InitializerID = VMap[GVInit];
David Neto85082642018-03-24 06:55:20 -07002851 }
2852 }
Kévin Petit23d5f182019-08-13 16:21:29 +01002853
2854 if (0 != InitializerID) {
2855 // Emit the ID of the intiializer as part of the variable definition.
2856 Ops << MkId(InitializerID);
2857 }
David Neto85082642018-03-24 06:55:20 -07002858 const uint32_t var_id = nextID++;
2859
David Neto87846742018-04-11 17:36:22 -04002860 auto *Inst = new SPIRVInstruction(spv::OpVariable, var_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002861 SPIRVInstList.push_back(Inst);
2862
2863 // If we have a builtin.
2864 if (spv::BuiltInMax != BuiltinType) {
2865 // Find Insert Point for OpDecorate.
2866 auto DecoInsertPoint =
2867 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
2868 [](SPIRVInstruction *Inst) -> bool {
2869 return Inst->getOpcode() != spv::OpDecorate &&
2870 Inst->getOpcode() != spv::OpMemberDecorate &&
2871 Inst->getOpcode() != spv::OpExtInstImport;
2872 });
2873 //
2874 // Generate OpDecorate.
2875 //
2876 // DOps[0] = Target ID
2877 // DOps[1] = Decoration (Builtin)
2878 // DOps[2] = BuiltIn ID
2879 uint32_t ResultID;
2880
2881 // WorkgroupSize is different, we decorate the constant composite that has
2882 // its value, rather than the variable that we use to access the value.
2883 if (spv::BuiltInWorkgroupSize == BuiltinType) {
2884 ResultID = InitializerID;
David Netoa60b00b2017-09-15 16:34:09 -04002885 // Save both the value and variable IDs for later.
2886 WorkgroupSizeValueID = InitializerID;
2887 WorkgroupSizeVarID = VMap[&GV];
David Neto22f144c2017-06-12 14:26:21 -04002888 } else {
2889 ResultID = VMap[&GV];
2890 }
2891
2892 SPIRVOperandList DOps;
David Neto257c3892018-04-11 13:19:45 -04002893 DOps << MkId(ResultID) << MkNum(spv::DecorationBuiltIn)
2894 << MkNum(BuiltinType);
David Neto22f144c2017-06-12 14:26:21 -04002895
David Neto87846742018-04-11 17:36:22 -04002896 auto *DescDecoInst = new SPIRVInstruction(spv::OpDecorate, DOps);
David Neto22f144c2017-06-12 14:26:21 -04002897 SPIRVInstList.insert(DecoInsertPoint, DescDecoInst);
David Neto85082642018-03-24 06:55:20 -07002898 } else if (module_scope_constant_external_init) {
2899 // This module scope constant is initialized from a storage buffer with data
2900 // provided by the host at binding 0 of the next descriptor set.
David Neto78383442018-06-15 20:31:56 -04002901 const uint32_t descriptor_set = TakeDescriptorIndex(&M);
David Neto85082642018-03-24 06:55:20 -07002902
David Neto862b7d82018-06-14 18:48:37 -04002903 // Emit the intializer to the descriptor map file.
David Neto85082642018-03-24 06:55:20 -07002904 // Use "kind,buffer" to indicate storage buffer. We might want to expand
2905 // that later to other types, like uniform buffer.
alan-bakerf5e5f692018-11-27 08:33:24 -05002906 std::string hexbytes;
2907 llvm::raw_string_ostream str(hexbytes);
2908 clspv::ConstantEmitter(DL, str).Emit(GV.getInitializer());
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04002909 version0::DescriptorMapEntry::ConstantData constant_data = {ArgKind::Buffer,
2910 str.str()};
2911 descriptorMapEntries->emplace_back(std::move(constant_data), descriptor_set,
2912 0);
David Neto85082642018-03-24 06:55:20 -07002913
2914 // Find Insert Point for OpDecorate.
2915 auto DecoInsertPoint =
2916 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
2917 [](SPIRVInstruction *Inst) -> bool {
2918 return Inst->getOpcode() != spv::OpDecorate &&
2919 Inst->getOpcode() != spv::OpMemberDecorate &&
2920 Inst->getOpcode() != spv::OpExtInstImport;
2921 });
2922
David Neto257c3892018-04-11 13:19:45 -04002923 // OpDecorate %var Binding <binding>
David Neto85082642018-03-24 06:55:20 -07002924 SPIRVOperandList DOps;
David Neto257c3892018-04-11 13:19:45 -04002925 DOps << MkId(var_id) << MkNum(spv::DecorationBinding) << MkNum(0);
2926 DecoInsertPoint = SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04002927 DecoInsertPoint, new SPIRVInstruction(spv::OpDecorate, DOps));
David Neto85082642018-03-24 06:55:20 -07002928
2929 // OpDecorate %var DescriptorSet <descriptor_set>
2930 DOps.clear();
David Neto257c3892018-04-11 13:19:45 -04002931 DOps << MkId(var_id) << MkNum(spv::DecorationDescriptorSet)
2932 << MkNum(descriptor_set);
David Netoc6f3ab22018-04-06 18:02:31 -04002933 SPIRVInstList.insert(DecoInsertPoint,
David Neto87846742018-04-11 17:36:22 -04002934 new SPIRVInstruction(spv::OpDecorate, DOps));
David Neto22f144c2017-06-12 14:26:21 -04002935 }
2936}
2937
David Netoc6f3ab22018-04-06 18:02:31 -04002938void SPIRVProducerPass::GenerateWorkgroupVars() {
2939 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
Alan Baker202c8c72018-08-13 13:47:44 -04002940 for (auto spec_id = clspv::FirstLocalSpecId(); spec_id < max_local_spec_id_;
2941 ++spec_id) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05002942 LocalArgInfo &info = LocalSpecIdInfoMap[spec_id];
David Netoc6f3ab22018-04-06 18:02:31 -04002943
2944 // Generate OpVariable.
2945 //
2946 // GIDOps[0] : Result Type ID
2947 // GIDOps[1] : Storage Class
2948 SPIRVOperandList Ops;
2949 Ops << MkId(info.ptr_array_type_id) << MkNum(spv::StorageClassWorkgroup);
2950
2951 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002952 new SPIRVInstruction(spv::OpVariable, info.variable_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002953 }
2954}
2955
David Neto862b7d82018-06-14 18:48:37 -04002956void SPIRVProducerPass::GenerateDescriptorMapInfo(const DataLayout &DL,
2957 Function &F) {
David Netoc5fb5242018-07-30 13:28:31 -04002958 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
2959 return;
2960 }
David Neto862b7d82018-06-14 18:48:37 -04002961 // Gather the list of resources that are used by this function's arguments.
2962 auto &resource_var_at_index = FunctionToResourceVarsMap[&F];
2963
alan-bakerf5e5f692018-11-27 08:33:24 -05002964 // TODO(alan-baker): This should become unnecessary by fixing the rest of the
2965 // flow to generate pod_ubo arguments earlier.
David Neto862b7d82018-06-14 18:48:37 -04002966 auto remap_arg_kind = [](StringRef argKind) {
alan-bakerf5e5f692018-11-27 08:33:24 -05002967 std::string kind =
2968 clspv::Option::PodArgsInUniformBuffer() && argKind.equals("pod")
2969 ? "pod_ubo"
2970 : argKind;
2971 return GetArgKindFromName(kind);
David Neto862b7d82018-06-14 18:48:37 -04002972 };
2973
2974 auto *fty = F.getType()->getPointerElementType();
2975 auto *func_ty = dyn_cast<FunctionType>(fty);
2976
alan-baker038e9242019-04-19 22:14:41 -04002977 // If we've clustered POD arguments, then argument details are in metadata.
David Neto862b7d82018-06-14 18:48:37 -04002978 // If an argument maps to a resource variable, then get descriptor set and
2979 // binding from the resoure variable. Other info comes from the metadata.
2980 const auto *arg_map = F.getMetadata("kernel_arg_map");
2981 if (arg_map) {
2982 for (const auto &arg : arg_map->operands()) {
2983 const MDNode *arg_node = dyn_cast<MDNode>(arg.get());
Kévin PETITa353c832018-03-20 23:21:21 +00002984 assert(arg_node->getNumOperands() == 7);
David Neto862b7d82018-06-14 18:48:37 -04002985 const auto name =
2986 dyn_cast<MDString>(arg_node->getOperand(0))->getString();
2987 const auto old_index =
2988 dyn_extract<ConstantInt>(arg_node->getOperand(1))->getZExtValue();
2989 // Remapped argument index
alan-bakerb6b09dc2018-11-08 16:59:28 -05002990 const size_t new_index = static_cast<size_t>(
2991 dyn_extract<ConstantInt>(arg_node->getOperand(2))->getZExtValue());
David Neto862b7d82018-06-14 18:48:37 -04002992 const auto offset =
2993 dyn_extract<ConstantInt>(arg_node->getOperand(3))->getZExtValue();
Kévin PETITa353c832018-03-20 23:21:21 +00002994 const auto arg_size =
2995 dyn_extract<ConstantInt>(arg_node->getOperand(4))->getZExtValue();
David Neto862b7d82018-06-14 18:48:37 -04002996 const auto argKind = remap_arg_kind(
Kévin PETITa353c832018-03-20 23:21:21 +00002997 dyn_cast<MDString>(arg_node->getOperand(5))->getString());
David Neto862b7d82018-06-14 18:48:37 -04002998 const auto spec_id =
Kévin PETITa353c832018-03-20 23:21:21 +00002999 dyn_extract<ConstantInt>(arg_node->getOperand(6))->getSExtValue();
alan-bakerf5e5f692018-11-27 08:33:24 -05003000
3001 uint32_t descriptor_set = 0;
3002 uint32_t binding = 0;
3003 version0::DescriptorMapEntry::KernelArgData kernel_data = {
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003004 F.getName(), name, static_cast<uint32_t>(old_index), argKind,
alan-bakerf5e5f692018-11-27 08:33:24 -05003005 static_cast<uint32_t>(spec_id),
3006 // This will be set below for pointer-to-local args.
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003007 0, static_cast<uint32_t>(offset), static_cast<uint32_t>(arg_size)};
David Neto862b7d82018-06-14 18:48:37 -04003008 if (spec_id > 0) {
alan-bakerf5e5f692018-11-27 08:33:24 -05003009 kernel_data.local_element_size = static_cast<uint32_t>(GetTypeAllocSize(
3010 func_ty->getParamType(unsigned(new_index))->getPointerElementType(),
3011 DL));
David Neto862b7d82018-06-14 18:48:37 -04003012 } else {
3013 auto *info = resource_var_at_index[new_index];
3014 assert(info);
alan-bakerf5e5f692018-11-27 08:33:24 -05003015 descriptor_set = info->descriptor_set;
3016 binding = info->binding;
David Neto862b7d82018-06-14 18:48:37 -04003017 }
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003018 descriptorMapEntries->emplace_back(std::move(kernel_data), descriptor_set,
3019 binding);
David Neto862b7d82018-06-14 18:48:37 -04003020 }
3021 } else {
3022 // There is no argument map.
3023 // Take descriptor info from the resource variable calls.
Kévin PETITa353c832018-03-20 23:21:21 +00003024 // Take argument name and size from the arguments list.
David Neto862b7d82018-06-14 18:48:37 -04003025
3026 SmallVector<Argument *, 4> arguments;
3027 for (auto &arg : F.args()) {
3028 arguments.push_back(&arg);
3029 }
3030
3031 unsigned arg_index = 0;
3032 for (auto *info : resource_var_at_index) {
3033 if (info) {
Kévin PETITa353c832018-03-20 23:21:21 +00003034 auto arg = arguments[arg_index];
alan-bakerb6b09dc2018-11-08 16:59:28 -05003035 unsigned arg_size = 0;
Kévin PETITa353c832018-03-20 23:21:21 +00003036 if (info->arg_kind == clspv::ArgKind::Pod) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05003037 arg_size = static_cast<uint32_t>(DL.getTypeStoreSize(arg->getType()));
Kévin PETITa353c832018-03-20 23:21:21 +00003038 }
3039
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003040 // Local pointer arguments are unused in this case. Offset is always
3041 // zero.
alan-bakerf5e5f692018-11-27 08:33:24 -05003042 version0::DescriptorMapEntry::KernelArgData kernel_data = {
3043 F.getName(), arg->getName(),
3044 arg_index, remap_arg_kind(clspv::GetArgKindName(info->arg_kind)),
3045 0, 0,
3046 0, arg_size};
3047 descriptorMapEntries->emplace_back(std::move(kernel_data),
3048 info->descriptor_set, info->binding);
David Neto862b7d82018-06-14 18:48:37 -04003049 }
3050 arg_index++;
3051 }
3052 // Generate mappings for pointer-to-local arguments.
3053 for (arg_index = 0; arg_index < arguments.size(); ++arg_index) {
3054 Argument *arg = arguments[arg_index];
Alan Baker202c8c72018-08-13 13:47:44 -04003055 auto where = LocalArgSpecIds.find(arg);
3056 if (where != LocalArgSpecIds.end()) {
3057 auto &local_arg_info = LocalSpecIdInfoMap[where->second];
alan-bakerf5e5f692018-11-27 08:33:24 -05003058 // Pod arguments members are unused in this case.
3059 version0::DescriptorMapEntry::KernelArgData kernel_data = {
3060 F.getName(),
3061 arg->getName(),
3062 arg_index,
3063 ArgKind::Local,
3064 static_cast<uint32_t>(local_arg_info.spec_id),
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003065 static_cast<uint32_t>(
3066 GetTypeAllocSize(local_arg_info.elem_type, DL)),
alan-bakerf5e5f692018-11-27 08:33:24 -05003067 0,
3068 0};
3069 // Pointer-to-local arguments do not utilize descriptor set and binding.
3070 descriptorMapEntries->emplace_back(std::move(kernel_data), 0, 0);
David Neto862b7d82018-06-14 18:48:37 -04003071 }
3072 }
3073 }
3074}
3075
David Neto22f144c2017-06-12 14:26:21 -04003076void SPIRVProducerPass::GenerateFuncPrologue(Function &F) {
3077 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3078 ValueMapType &VMap = getValueMap();
3079 EntryPointVecType &EntryPoints = getEntryPointVec();
David Neto22f144c2017-06-12 14:26:21 -04003080 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
3081 auto &GlobalConstArgSet = getGlobalConstArgSet();
3082
3083 FunctionType *FTy = F.getFunctionType();
3084
3085 //
David Neto22f144c2017-06-12 14:26:21 -04003086 // Generate OPFunction.
3087 //
3088
3089 // FOps[0] : Result Type ID
3090 // FOps[1] : Function Control
3091 // FOps[2] : Function Type ID
3092 SPIRVOperandList FOps;
3093
3094 // Find SPIRV instruction for return type.
David Neto257c3892018-04-11 13:19:45 -04003095 FOps << MkId(lookupType(FTy->getReturnType()));
David Neto22f144c2017-06-12 14:26:21 -04003096
3097 // Check function attributes for SPIRV Function Control.
3098 uint32_t FuncControl = spv::FunctionControlMaskNone;
3099 if (F.hasFnAttribute(Attribute::AlwaysInline)) {
3100 FuncControl |= spv::FunctionControlInlineMask;
3101 }
3102 if (F.hasFnAttribute(Attribute::NoInline)) {
3103 FuncControl |= spv::FunctionControlDontInlineMask;
3104 }
3105 // TODO: Check llvm attribute for Function Control Pure.
3106 if (F.hasFnAttribute(Attribute::ReadOnly)) {
3107 FuncControl |= spv::FunctionControlPureMask;
3108 }
3109 // TODO: Check llvm attribute for Function Control Const.
3110 if (F.hasFnAttribute(Attribute::ReadNone)) {
3111 FuncControl |= spv::FunctionControlConstMask;
3112 }
3113
David Neto257c3892018-04-11 13:19:45 -04003114 FOps << MkNum(FuncControl);
David Neto22f144c2017-06-12 14:26:21 -04003115
3116 uint32_t FTyID;
3117 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
3118 SmallVector<Type *, 4> NewFuncParamTys;
3119 FunctionType *NewFTy =
3120 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
3121 FTyID = lookupType(NewFTy);
3122 } else {
David Neto9ed8e2f2018-03-24 06:47:24 -07003123 // Handle regular function with global constant parameters.
David Neto22f144c2017-06-12 14:26:21 -04003124 if (GlobalConstFuncTyMap.count(FTy)) {
3125 FTyID = lookupType(GlobalConstFuncTyMap[FTy].first);
3126 } else {
3127 FTyID = lookupType(FTy);
3128 }
3129 }
3130
David Neto257c3892018-04-11 13:19:45 -04003131 FOps << MkId(FTyID);
David Neto22f144c2017-06-12 14:26:21 -04003132
3133 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
3134 EntryPoints.push_back(std::make_pair(&F, nextID));
3135 }
3136
3137 VMap[&F] = nextID;
3138
David Neto482550a2018-03-24 05:21:07 -07003139 if (clspv::Option::ShowIDs()) {
David Netob05675d2018-02-16 12:37:49 -05003140 errs() << "Function " << F.getName() << " is " << nextID << "\n";
3141 }
David Neto22f144c2017-06-12 14:26:21 -04003142 // Generate SPIRV instruction for function.
David Neto87846742018-04-11 17:36:22 -04003143 auto *FuncInst = new SPIRVInstruction(spv::OpFunction, nextID++, FOps);
David Neto22f144c2017-06-12 14:26:21 -04003144 SPIRVInstList.push_back(FuncInst);
3145
3146 //
3147 // Generate OpFunctionParameter for Normal function.
3148 //
3149
3150 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
alan-bakere9308012019-03-15 10:25:13 -04003151
3152 // Find Insert Point for OpDecorate.
3153 auto DecoInsertPoint =
3154 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
3155 [](SPIRVInstruction *Inst) -> bool {
3156 return Inst->getOpcode() != spv::OpDecorate &&
3157 Inst->getOpcode() != spv::OpMemberDecorate &&
3158 Inst->getOpcode() != spv::OpExtInstImport;
3159 });
3160
David Neto22f144c2017-06-12 14:26:21 -04003161 // Iterate Argument for name instead of param type from function type.
3162 unsigned ArgIdx = 0;
3163 for (Argument &Arg : F.args()) {
alan-bakere9308012019-03-15 10:25:13 -04003164 uint32_t param_id = nextID++;
3165 VMap[&Arg] = param_id;
3166
3167 if (CalledWithCoherentResource(Arg)) {
3168 // If the arg is passed a coherent resource ever, then decorate this
3169 // parameter with Coherent too.
3170 SPIRVOperandList decoration_ops;
3171 decoration_ops << MkId(param_id) << MkNum(spv::DecorationCoherent);
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003172 SPIRVInstList.insert(
3173 DecoInsertPoint,
3174 new SPIRVInstruction(spv::OpDecorate, decoration_ops));
alan-bakere9308012019-03-15 10:25:13 -04003175 }
David Neto22f144c2017-06-12 14:26:21 -04003176
3177 // ParamOps[0] : Result Type ID
3178 SPIRVOperandList ParamOps;
3179
3180 // Find SPIRV instruction for parameter type.
3181 uint32_t ParamTyID = lookupType(Arg.getType());
3182 if (PointerType *PTy = dyn_cast<PointerType>(Arg.getType())) {
3183 if (GlobalConstFuncTyMap.count(FTy)) {
3184 if (ArgIdx == GlobalConstFuncTyMap[FTy].second) {
3185 Type *EleTy = PTy->getPointerElementType();
3186 Type *ArgTy =
3187 PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
3188 ParamTyID = lookupType(ArgTy);
3189 GlobalConstArgSet.insert(&Arg);
3190 }
3191 }
3192 }
David Neto257c3892018-04-11 13:19:45 -04003193 ParamOps << MkId(ParamTyID);
David Neto22f144c2017-06-12 14:26:21 -04003194
3195 // Generate SPIRV instruction for parameter.
David Neto87846742018-04-11 17:36:22 -04003196 auto *ParamInst =
alan-bakere9308012019-03-15 10:25:13 -04003197 new SPIRVInstruction(spv::OpFunctionParameter, param_id, ParamOps);
David Neto22f144c2017-06-12 14:26:21 -04003198 SPIRVInstList.push_back(ParamInst);
3199
3200 ArgIdx++;
3201 }
3202 }
3203}
3204
alan-bakerb6b09dc2018-11-08 16:59:28 -05003205void SPIRVProducerPass::GenerateModuleInfo(Module &module) {
David Neto22f144c2017-06-12 14:26:21 -04003206 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3207 EntryPointVecType &EntryPoints = getEntryPointVec();
3208 ValueMapType &VMap = getValueMap();
3209 ValueList &EntryPointInterfaces = getEntryPointInterfacesVec();
3210 uint32_t &ExtInstImportID = getOpExtInstImportID();
3211 std::vector<uint32_t> &BuiltinDimVec = getBuiltinDimVec();
3212
3213 // Set up insert point.
3214 auto InsertPoint = SPIRVInstList.begin();
3215
3216 //
3217 // Generate OpCapability
3218 //
3219 // TODO: Which llvm information is mapped to SPIRV Capapbility?
3220
3221 // Ops[0] = Capability
3222 SPIRVOperandList Ops;
3223
David Neto87846742018-04-11 17:36:22 -04003224 auto *CapInst =
3225 new SPIRVInstruction(spv::OpCapability, {MkNum(spv::CapabilityShader)});
David Neto22f144c2017-06-12 14:26:21 -04003226 SPIRVInstList.insert(InsertPoint, CapInst);
3227
3228 for (Type *Ty : getTypeList()) {
alan-bakerb39c8262019-03-08 14:03:37 -05003229 if (clspv::Option::Int8Support() && Ty->isIntegerTy(8)) {
3230 // Generate OpCapability for i8 type.
3231 SPIRVInstList.insert(InsertPoint,
3232 new SPIRVInstruction(spv::OpCapability,
3233 {MkNum(spv::CapabilityInt8)}));
3234 } else if (Ty->isIntegerTy(16)) {
David Neto22f144c2017-06-12 14:26:21 -04003235 // Generate OpCapability for i16 type.
David Neto87846742018-04-11 17:36:22 -04003236 SPIRVInstList.insert(InsertPoint,
3237 new SPIRVInstruction(spv::OpCapability,
3238 {MkNum(spv::CapabilityInt16)}));
David Neto22f144c2017-06-12 14:26:21 -04003239 } else if (Ty->isIntegerTy(64)) {
3240 // Generate OpCapability for i64 type.
David Neto87846742018-04-11 17:36:22 -04003241 SPIRVInstList.insert(InsertPoint,
3242 new SPIRVInstruction(spv::OpCapability,
3243 {MkNum(spv::CapabilityInt64)}));
David Neto22f144c2017-06-12 14:26:21 -04003244 } else if (Ty->isHalfTy()) {
3245 // Generate OpCapability for half type.
3246 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04003247 InsertPoint, new SPIRVInstruction(spv::OpCapability,
3248 {MkNum(spv::CapabilityFloat16)}));
David Neto22f144c2017-06-12 14:26:21 -04003249 } else if (Ty->isDoubleTy()) {
3250 // Generate OpCapability for double type.
3251 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04003252 InsertPoint, new SPIRVInstruction(spv::OpCapability,
3253 {MkNum(spv::CapabilityFloat64)}));
David Neto22f144c2017-06-12 14:26:21 -04003254 } else if (auto *STy = dyn_cast<StructType>(Ty)) {
3255 if (STy->isOpaque()) {
alan-bakerf67468c2019-11-25 15:51:49 -05003256 if (STy->getName().startswith("opencl.image2d_wo_t") ||
3257 STy->getName().startswith("opencl.image3d_wo_t")) {
David Neto22f144c2017-06-12 14:26:21 -04003258 // Generate OpCapability for write only image type.
3259 SPIRVInstList.insert(
3260 InsertPoint,
3261 new SPIRVInstruction(
David Neto87846742018-04-11 17:36:22 -04003262 spv::OpCapability,
3263 {MkNum(spv::CapabilityStorageImageWriteWithoutFormat)}));
David Neto22f144c2017-06-12 14:26:21 -04003264 }
3265 }
3266 }
3267 }
3268
David Neto5c22a252018-03-15 16:07:41 -04003269 { // OpCapability ImageQuery
3270 bool hasImageQuery = false;
alan-bakerf67468c2019-11-25 15:51:49 -05003271 for (const auto &SymVal : module.getValueSymbolTable()) {
3272 if (auto F = dyn_cast<Function>(SymVal.getValue())) {
3273 if (clspv::IsGetImageHeight(F) || clspv::IsGetImageWidth(F)) {
3274 hasImageQuery = true;
3275 break;
3276 }
David Neto5c22a252018-03-15 16:07:41 -04003277 }
3278 }
alan-bakerf67468c2019-11-25 15:51:49 -05003279
David Neto5c22a252018-03-15 16:07:41 -04003280 if (hasImageQuery) {
David Neto87846742018-04-11 17:36:22 -04003281 auto *ImageQueryCapInst = new SPIRVInstruction(
3282 spv::OpCapability, {MkNum(spv::CapabilityImageQuery)});
David Neto5c22a252018-03-15 16:07:41 -04003283 SPIRVInstList.insert(InsertPoint, ImageQueryCapInst);
3284 }
3285 }
3286
David Neto22f144c2017-06-12 14:26:21 -04003287 if (hasVariablePointers()) {
3288 //
David Neto22f144c2017-06-12 14:26:21 -04003289 // Generate OpCapability.
3290 //
3291 // Ops[0] = Capability
3292 //
3293 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003294 Ops << MkNum(spv::CapabilityVariablePointers);
David Neto22f144c2017-06-12 14:26:21 -04003295
David Neto87846742018-04-11 17:36:22 -04003296 SPIRVInstList.insert(InsertPoint,
3297 new SPIRVInstruction(spv::OpCapability, Ops));
alan-baker5b86ed72019-02-15 08:26:50 -05003298 } else if (hasVariablePointersStorageBuffer()) {
3299 //
3300 // Generate OpCapability.
3301 //
3302 // Ops[0] = Capability
3303 //
3304 Ops.clear();
3305 Ops << MkNum(spv::CapabilityVariablePointersStorageBuffer);
David Neto22f144c2017-06-12 14:26:21 -04003306
alan-baker5b86ed72019-02-15 08:26:50 -05003307 SPIRVInstList.insert(InsertPoint,
3308 new SPIRVInstruction(spv::OpCapability, Ops));
3309 }
3310
3311 // Always add the storage buffer extension
3312 {
David Neto22f144c2017-06-12 14:26:21 -04003313 //
3314 // Generate OpExtension.
3315 //
3316 // Ops[0] = Name (Literal String)
3317 //
alan-baker5b86ed72019-02-15 08:26:50 -05003318 auto *ExtensionInst = new SPIRVInstruction(
3319 spv::OpExtension, {MkString("SPV_KHR_storage_buffer_storage_class")});
3320 SPIRVInstList.insert(InsertPoint, ExtensionInst);
3321 }
David Neto22f144c2017-06-12 14:26:21 -04003322
alan-baker5b86ed72019-02-15 08:26:50 -05003323 if (hasVariablePointers() || hasVariablePointersStorageBuffer()) {
3324 //
3325 // Generate OpExtension.
3326 //
3327 // Ops[0] = Name (Literal String)
3328 //
3329 auto *ExtensionInst = new SPIRVInstruction(
3330 spv::OpExtension, {MkString("SPV_KHR_variable_pointers")});
3331 SPIRVInstList.insert(InsertPoint, ExtensionInst);
David Neto22f144c2017-06-12 14:26:21 -04003332 }
3333
3334 if (ExtInstImportID) {
3335 ++InsertPoint;
3336 }
3337
3338 //
3339 // Generate OpMemoryModel
3340 //
3341 // Memory model for Vulkan will always be GLSL450.
3342
3343 // Ops[0] = Addressing Model
3344 // Ops[1] = Memory Model
3345 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003346 Ops << MkNum(spv::AddressingModelLogical) << MkNum(spv::MemoryModelGLSL450);
David Neto22f144c2017-06-12 14:26:21 -04003347
David Neto87846742018-04-11 17:36:22 -04003348 auto *MemModelInst = new SPIRVInstruction(spv::OpMemoryModel, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003349 SPIRVInstList.insert(InsertPoint, MemModelInst);
3350
3351 //
3352 // Generate OpEntryPoint
3353 //
3354 for (auto EntryPoint : EntryPoints) {
3355 // Ops[0] = Execution Model
3356 // Ops[1] = EntryPoint ID
3357 // Ops[2] = Name (Literal String)
3358 // ...
3359 //
3360 // TODO: Do we need to consider Interface ID for forward references???
3361 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05003362 const StringRef &name = EntryPoint.first->getName();
David Neto257c3892018-04-11 13:19:45 -04003363 Ops << MkNum(spv::ExecutionModelGLCompute) << MkId(EntryPoint.second)
3364 << MkString(name);
David Neto22f144c2017-06-12 14:26:21 -04003365
David Neto22f144c2017-06-12 14:26:21 -04003366 for (Value *Interface : EntryPointInterfaces) {
David Neto257c3892018-04-11 13:19:45 -04003367 Ops << MkId(VMap[Interface]);
David Neto22f144c2017-06-12 14:26:21 -04003368 }
3369
David Neto87846742018-04-11 17:36:22 -04003370 auto *EntryPointInst = new SPIRVInstruction(spv::OpEntryPoint, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003371 SPIRVInstList.insert(InsertPoint, EntryPointInst);
3372 }
3373
3374 for (auto EntryPoint : EntryPoints) {
3375 if (const MDNode *MD = dyn_cast<Function>(EntryPoint.first)
3376 ->getMetadata("reqd_work_group_size")) {
3377
3378 if (!BuiltinDimVec.empty()) {
3379 llvm_unreachable(
3380 "Kernels should have consistent work group size definition");
3381 }
3382
3383 //
3384 // Generate OpExecutionMode
3385 //
3386
3387 // Ops[0] = Entry Point ID
3388 // Ops[1] = Execution Mode
3389 // Ops[2] ... Ops[n] = Optional literals according to Execution Mode
3390 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05003391 Ops << MkId(EntryPoint.second) << MkNum(spv::ExecutionModeLocalSize);
David Neto22f144c2017-06-12 14:26:21 -04003392
3393 uint32_t XDim = static_cast<uint32_t>(
3394 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
3395 uint32_t YDim = static_cast<uint32_t>(
3396 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
3397 uint32_t ZDim = static_cast<uint32_t>(
3398 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
3399
David Neto257c3892018-04-11 13:19:45 -04003400 Ops << MkNum(XDim) << MkNum(YDim) << MkNum(ZDim);
David Neto22f144c2017-06-12 14:26:21 -04003401
David Neto87846742018-04-11 17:36:22 -04003402 auto *ExecModeInst = new SPIRVInstruction(spv::OpExecutionMode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003403 SPIRVInstList.insert(InsertPoint, ExecModeInst);
3404 }
3405 }
3406
3407 //
3408 // Generate OpSource.
3409 //
3410 // Ops[0] = SourceLanguage ID
3411 // Ops[1] = Version (LiteralNum)
3412 //
3413 Ops.clear();
Kévin Petit0fc88042019-04-09 23:25:02 +01003414 if (clspv::Option::CPlusPlus()) {
3415 Ops << MkNum(spv::SourceLanguageOpenCL_CPP) << MkNum(100);
3416 } else {
3417 Ops << MkNum(spv::SourceLanguageOpenCL_C) << MkNum(120);
3418 }
David Neto22f144c2017-06-12 14:26:21 -04003419
David Neto87846742018-04-11 17:36:22 -04003420 auto *OpenSourceInst = new SPIRVInstruction(spv::OpSource, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003421 SPIRVInstList.insert(InsertPoint, OpenSourceInst);
3422
3423 if (!BuiltinDimVec.empty()) {
3424 //
3425 // Generate OpDecorates for x/y/z dimension.
3426 //
3427 // Ops[0] = Target ID
3428 // Ops[1] = Decoration (SpecId)
David Neto257c3892018-04-11 13:19:45 -04003429 // Ops[2] = Specialization Constant ID (Literal Number)
David Neto22f144c2017-06-12 14:26:21 -04003430
3431 // X Dimension
3432 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003433 Ops << MkId(BuiltinDimVec[0]) << MkNum(spv::DecorationSpecId) << MkNum(0);
David Neto87846742018-04-11 17:36:22 -04003434 SPIRVInstList.insert(InsertPoint,
3435 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003436
3437 // Y Dimension
3438 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003439 Ops << MkId(BuiltinDimVec[1]) << MkNum(spv::DecorationSpecId) << MkNum(1);
David Neto87846742018-04-11 17:36:22 -04003440 SPIRVInstList.insert(InsertPoint,
3441 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003442
3443 // Z Dimension
3444 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003445 Ops << MkId(BuiltinDimVec[2]) << MkNum(spv::DecorationSpecId) << MkNum(2);
David Neto87846742018-04-11 17:36:22 -04003446 SPIRVInstList.insert(InsertPoint,
3447 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003448 }
3449}
3450
David Netob6e2e062018-04-25 10:32:06 -04003451void SPIRVProducerPass::GenerateEntryPointInitialStores() {
3452 // Work around a driver bug. Initializers on Private variables might not
3453 // work. So the start of the kernel should store the initializer value to the
3454 // variables. Yes, *every* entry point pays this cost if *any* entry point
3455 // uses this builtin. At this point I judge this to be an acceptable tradeoff
3456 // of complexity vs. runtime, for a broken driver.
alan-bakerb6b09dc2018-11-08 16:59:28 -05003457 // TODO(dneto): Remove this at some point once fixed drivers are widely
3458 // available.
David Netob6e2e062018-04-25 10:32:06 -04003459 if (WorkgroupSizeVarID) {
3460 assert(WorkgroupSizeValueID);
3461
3462 SPIRVOperandList Ops;
3463 Ops << MkId(WorkgroupSizeVarID) << MkId(WorkgroupSizeValueID);
3464
3465 auto *Inst = new SPIRVInstruction(spv::OpStore, Ops);
3466 getSPIRVInstList().push_back(Inst);
3467 }
3468}
3469
David Neto22f144c2017-06-12 14:26:21 -04003470void SPIRVProducerPass::GenerateFuncBody(Function &F) {
3471 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3472 ValueMapType &VMap = getValueMap();
3473
David Netob6e2e062018-04-25 10:32:06 -04003474 const bool IsKernel = F.getCallingConv() == CallingConv::SPIR_KERNEL;
David Neto22f144c2017-06-12 14:26:21 -04003475
3476 for (BasicBlock &BB : F) {
3477 // Register BasicBlock to ValueMap.
3478 VMap[&BB] = nextID;
3479
3480 //
3481 // Generate OpLabel for Basic Block.
3482 //
3483 SPIRVOperandList Ops;
David Neto87846742018-04-11 17:36:22 -04003484 auto *Inst = new SPIRVInstruction(spv::OpLabel, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003485 SPIRVInstList.push_back(Inst);
3486
David Neto6dcd4712017-06-23 11:06:47 -04003487 // OpVariable instructions must come first.
3488 for (Instruction &I : BB) {
alan-baker5b86ed72019-02-15 08:26:50 -05003489 if (auto *alloca = dyn_cast<AllocaInst>(&I)) {
3490 // Allocating a pointer requires variable pointers.
3491 if (alloca->getAllocatedType()->isPointerTy()) {
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003492 setVariablePointersCapabilities(
3493 alloca->getAllocatedType()->getPointerAddressSpace());
alan-baker5b86ed72019-02-15 08:26:50 -05003494 }
David Neto6dcd4712017-06-23 11:06:47 -04003495 GenerateInstruction(I);
3496 }
3497 }
3498
David Neto22f144c2017-06-12 14:26:21 -04003499 if (&BB == &F.getEntryBlock() && IsKernel) {
David Netob6e2e062018-04-25 10:32:06 -04003500 if (clspv::Option::HackInitializers()) {
3501 GenerateEntryPointInitialStores();
3502 }
David Neto22f144c2017-06-12 14:26:21 -04003503 }
3504
3505 for (Instruction &I : BB) {
David Neto6dcd4712017-06-23 11:06:47 -04003506 if (!isa<AllocaInst>(I)) {
3507 GenerateInstruction(I);
3508 }
David Neto22f144c2017-06-12 14:26:21 -04003509 }
3510 }
3511}
3512
3513spv::Op SPIRVProducerPass::GetSPIRVCmpOpcode(CmpInst *I) {
3514 const std::map<CmpInst::Predicate, spv::Op> Map = {
3515 {CmpInst::ICMP_EQ, spv::OpIEqual},
3516 {CmpInst::ICMP_NE, spv::OpINotEqual},
3517 {CmpInst::ICMP_UGT, spv::OpUGreaterThan},
3518 {CmpInst::ICMP_UGE, spv::OpUGreaterThanEqual},
3519 {CmpInst::ICMP_ULT, spv::OpULessThan},
3520 {CmpInst::ICMP_ULE, spv::OpULessThanEqual},
3521 {CmpInst::ICMP_SGT, spv::OpSGreaterThan},
3522 {CmpInst::ICMP_SGE, spv::OpSGreaterThanEqual},
3523 {CmpInst::ICMP_SLT, spv::OpSLessThan},
3524 {CmpInst::ICMP_SLE, spv::OpSLessThanEqual},
3525 {CmpInst::FCMP_OEQ, spv::OpFOrdEqual},
3526 {CmpInst::FCMP_OGT, spv::OpFOrdGreaterThan},
3527 {CmpInst::FCMP_OGE, spv::OpFOrdGreaterThanEqual},
3528 {CmpInst::FCMP_OLT, spv::OpFOrdLessThan},
3529 {CmpInst::FCMP_OLE, spv::OpFOrdLessThanEqual},
3530 {CmpInst::FCMP_ONE, spv::OpFOrdNotEqual},
3531 {CmpInst::FCMP_UEQ, spv::OpFUnordEqual},
3532 {CmpInst::FCMP_UGT, spv::OpFUnordGreaterThan},
3533 {CmpInst::FCMP_UGE, spv::OpFUnordGreaterThanEqual},
3534 {CmpInst::FCMP_ULT, spv::OpFUnordLessThan},
3535 {CmpInst::FCMP_ULE, spv::OpFUnordLessThanEqual},
3536 {CmpInst::FCMP_UNE, spv::OpFUnordNotEqual}};
3537
3538 assert(0 != Map.count(I->getPredicate()));
3539
3540 return Map.at(I->getPredicate());
3541}
3542
3543spv::Op SPIRVProducerPass::GetSPIRVCastOpcode(Instruction &I) {
3544 const std::map<unsigned, spv::Op> Map{
3545 {Instruction::Trunc, spv::OpUConvert},
3546 {Instruction::ZExt, spv::OpUConvert},
3547 {Instruction::SExt, spv::OpSConvert},
3548 {Instruction::FPToUI, spv::OpConvertFToU},
3549 {Instruction::FPToSI, spv::OpConvertFToS},
3550 {Instruction::UIToFP, spv::OpConvertUToF},
3551 {Instruction::SIToFP, spv::OpConvertSToF},
3552 {Instruction::FPTrunc, spv::OpFConvert},
3553 {Instruction::FPExt, spv::OpFConvert},
3554 {Instruction::BitCast, spv::OpBitcast}};
3555
3556 assert(0 != Map.count(I.getOpcode()));
3557
3558 return Map.at(I.getOpcode());
3559}
3560
3561spv::Op SPIRVProducerPass::GetSPIRVBinaryOpcode(Instruction &I) {
Kévin Petit24272b62018-10-18 19:16:12 +00003562 if (I.getType()->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -04003563 switch (I.getOpcode()) {
3564 default:
3565 break;
3566 case Instruction::Or:
3567 return spv::OpLogicalOr;
3568 case Instruction::And:
3569 return spv::OpLogicalAnd;
3570 case Instruction::Xor:
3571 return spv::OpLogicalNotEqual;
3572 }
3573 }
3574
alan-bakerb6b09dc2018-11-08 16:59:28 -05003575 const std::map<unsigned, spv::Op> Map{
David Neto22f144c2017-06-12 14:26:21 -04003576 {Instruction::Add, spv::OpIAdd},
3577 {Instruction::FAdd, spv::OpFAdd},
3578 {Instruction::Sub, spv::OpISub},
3579 {Instruction::FSub, spv::OpFSub},
3580 {Instruction::Mul, spv::OpIMul},
3581 {Instruction::FMul, spv::OpFMul},
3582 {Instruction::UDiv, spv::OpUDiv},
3583 {Instruction::SDiv, spv::OpSDiv},
3584 {Instruction::FDiv, spv::OpFDiv},
3585 {Instruction::URem, spv::OpUMod},
3586 {Instruction::SRem, spv::OpSRem},
3587 {Instruction::FRem, spv::OpFRem},
3588 {Instruction::Or, spv::OpBitwiseOr},
3589 {Instruction::Xor, spv::OpBitwiseXor},
3590 {Instruction::And, spv::OpBitwiseAnd},
3591 {Instruction::Shl, spv::OpShiftLeftLogical},
3592 {Instruction::LShr, spv::OpShiftRightLogical},
3593 {Instruction::AShr, spv::OpShiftRightArithmetic}};
3594
3595 assert(0 != Map.count(I.getOpcode()));
3596
3597 return Map.at(I.getOpcode());
3598}
3599
3600void SPIRVProducerPass::GenerateInstruction(Instruction &I) {
3601 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3602 ValueMapType &VMap = getValueMap();
David Neto22f144c2017-06-12 14:26:21 -04003603 DeferredInstVecType &DeferredInsts = getDeferredInstVec();
3604 LLVMContext &Context = I.getParent()->getParent()->getParent()->getContext();
3605
3606 // Register Instruction to ValueMap.
3607 if (0 == VMap[&I]) {
3608 VMap[&I] = nextID;
3609 }
3610
3611 switch (I.getOpcode()) {
3612 default: {
3613 if (Instruction::isCast(I.getOpcode())) {
3614 //
3615 // Generate SPIRV instructions for cast operators.
3616 //
3617
David Netod2de94a2017-08-28 17:27:47 -04003618 auto Ty = I.getType();
David Neto22f144c2017-06-12 14:26:21 -04003619 auto OpTy = I.getOperand(0)->getType();
David Netod2de94a2017-08-28 17:27:47 -04003620 auto toI8 = Ty == Type::getInt8Ty(Context);
3621 auto fromI32 = OpTy == Type::getInt32Ty(Context);
David Neto22f144c2017-06-12 14:26:21 -04003622 // Handle zext, sext and uitofp with i1 type specially.
3623 if ((I.getOpcode() == Instruction::ZExt ||
3624 I.getOpcode() == Instruction::SExt ||
3625 I.getOpcode() == Instruction::UIToFP) &&
alan-bakerb6b09dc2018-11-08 16:59:28 -05003626 OpTy->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -04003627 //
3628 // Generate OpSelect.
3629 //
3630
3631 // Ops[0] = Result Type ID
3632 // Ops[1] = Condition ID
3633 // Ops[2] = True Constant ID
3634 // Ops[3] = False Constant ID
3635 SPIRVOperandList Ops;
3636
David Neto257c3892018-04-11 13:19:45 -04003637 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04003638
David Neto22f144c2017-06-12 14:26:21 -04003639 uint32_t CondID = VMap[I.getOperand(0)];
David Neto257c3892018-04-11 13:19:45 -04003640 Ops << MkId(CondID);
David Neto22f144c2017-06-12 14:26:21 -04003641
3642 uint32_t TrueID = 0;
3643 if (I.getOpcode() == Instruction::ZExt) {
Kévin Petit7bfb8992019-02-26 13:45:08 +00003644 TrueID = VMap[ConstantInt::get(I.getType(), 1)];
David Neto22f144c2017-06-12 14:26:21 -04003645 } else if (I.getOpcode() == Instruction::SExt) {
Kévin Petit7bfb8992019-02-26 13:45:08 +00003646 TrueID = VMap[ConstantInt::getSigned(I.getType(), -1)];
David Neto22f144c2017-06-12 14:26:21 -04003647 } else {
3648 TrueID = VMap[ConstantFP::get(Context, APFloat(1.0f))];
3649 }
David Neto257c3892018-04-11 13:19:45 -04003650 Ops << MkId(TrueID);
David Neto22f144c2017-06-12 14:26:21 -04003651
3652 uint32_t FalseID = 0;
3653 if (I.getOpcode() == Instruction::ZExt) {
3654 FalseID = VMap[Constant::getNullValue(I.getType())];
3655 } else if (I.getOpcode() == Instruction::SExt) {
3656 FalseID = VMap[Constant::getNullValue(I.getType())];
3657 } else {
3658 FalseID = VMap[ConstantFP::get(Context, APFloat(0.0f))];
3659 }
David Neto257c3892018-04-11 13:19:45 -04003660 Ops << MkId(FalseID);
David Neto22f144c2017-06-12 14:26:21 -04003661
David Neto87846742018-04-11 17:36:22 -04003662 auto *Inst = new SPIRVInstruction(spv::OpSelect, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003663 SPIRVInstList.push_back(Inst);
alan-bakerb39c8262019-03-08 14:03:37 -05003664 } else if (!clspv::Option::Int8Support() &&
3665 I.getOpcode() == Instruction::Trunc && fromI32 && toI8) {
David Netod2de94a2017-08-28 17:27:47 -04003666 // The SPIR-V target type is a 32-bit int. Keep only the bottom
3667 // 8 bits.
3668 // Before:
3669 // %result = trunc i32 %a to i8
3670 // After
3671 // %result = OpBitwiseAnd %uint %a %uint_255
3672
3673 SPIRVOperandList Ops;
3674
David Neto257c3892018-04-11 13:19:45 -04003675 Ops << MkId(lookupType(OpTy)) << MkId(VMap[I.getOperand(0)]);
David Netod2de94a2017-08-28 17:27:47 -04003676
3677 Type *UintTy = Type::getInt32Ty(Context);
3678 uint32_t MaskID = VMap[ConstantInt::get(UintTy, 255)];
David Neto257c3892018-04-11 13:19:45 -04003679 Ops << MkId(MaskID);
David Netod2de94a2017-08-28 17:27:47 -04003680
David Neto87846742018-04-11 17:36:22 -04003681 auto *Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Netod2de94a2017-08-28 17:27:47 -04003682 SPIRVInstList.push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04003683 } else {
3684 // Ops[0] = Result Type ID
3685 // Ops[1] = Source Value ID
3686 SPIRVOperandList Ops;
3687
David Neto257c3892018-04-11 13:19:45 -04003688 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04003689
David Neto87846742018-04-11 17:36:22 -04003690 auto *Inst = new SPIRVInstruction(GetSPIRVCastOpcode(I), nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003691 SPIRVInstList.push_back(Inst);
3692 }
3693 } else if (isa<BinaryOperator>(I)) {
3694 //
3695 // Generate SPIRV instructions for binary operators.
3696 //
3697
3698 // Handle xor with i1 type specially.
3699 if (I.getOpcode() == Instruction::Xor &&
3700 I.getType() == Type::getInt1Ty(Context) &&
Kévin Petit24272b62018-10-18 19:16:12 +00003701 ((isa<ConstantInt>(I.getOperand(0)) &&
3702 !cast<ConstantInt>(I.getOperand(0))->isZero()) ||
3703 (isa<ConstantInt>(I.getOperand(1)) &&
3704 !cast<ConstantInt>(I.getOperand(1))->isZero()))) {
David Neto22f144c2017-06-12 14:26:21 -04003705 //
3706 // Generate OpLogicalNot.
3707 //
3708 // Ops[0] = Result Type ID
3709 // Ops[1] = Operand
3710 SPIRVOperandList Ops;
3711
David Neto257c3892018-04-11 13:19:45 -04003712 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04003713
3714 Value *CondV = I.getOperand(0);
3715 if (isa<Constant>(I.getOperand(0))) {
3716 CondV = I.getOperand(1);
3717 }
David Neto257c3892018-04-11 13:19:45 -04003718 Ops << MkId(VMap[CondV]);
David Neto22f144c2017-06-12 14:26:21 -04003719
David Neto87846742018-04-11 17:36:22 -04003720 auto *Inst = new SPIRVInstruction(spv::OpLogicalNot, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003721 SPIRVInstList.push_back(Inst);
3722 } else {
3723 // Ops[0] = Result Type ID
3724 // Ops[1] = Operand 0
3725 // Ops[2] = Operand 1
3726 SPIRVOperandList Ops;
3727
David Neto257c3892018-04-11 13:19:45 -04003728 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)])
3729 << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04003730
David Neto87846742018-04-11 17:36:22 -04003731 auto *Inst =
3732 new SPIRVInstruction(GetSPIRVBinaryOpcode(I), nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003733 SPIRVInstList.push_back(Inst);
3734 }
alan-bakerc9c55ae2019-12-02 16:01:27 -05003735 } else if (I.getOpcode() == Instruction::FNeg) {
3736 // The only unary operator.
3737 //
3738 // Ops[0] = Result Type ID
3739 // Ops[1] = Operand 0
3740 SPIRVOperandList ops;
3741
3742 ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)]);
3743 auto *Inst = new SPIRVInstruction(spv::OpFNegate, nextID++, ops);
3744 SPIRVInstList.push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04003745 } else {
3746 I.print(errs());
3747 llvm_unreachable("Unsupported instruction???");
3748 }
3749 break;
3750 }
3751 case Instruction::GetElementPtr: {
3752 auto &GlobalConstArgSet = getGlobalConstArgSet();
3753
3754 //
3755 // Generate OpAccessChain.
3756 //
3757 GetElementPtrInst *GEP = cast<GetElementPtrInst>(&I);
3758
3759 //
3760 // Generate OpAccessChain.
3761 //
3762
3763 // Ops[0] = Result Type ID
3764 // Ops[1] = Base ID
3765 // Ops[2] ... Ops[n] = Indexes ID
3766 SPIRVOperandList Ops;
3767
alan-bakerb6b09dc2018-11-08 16:59:28 -05003768 PointerType *ResultType = cast<PointerType>(GEP->getType());
David Neto22f144c2017-06-12 14:26:21 -04003769 if (GEP->getPointerAddressSpace() == AddressSpace::ModuleScopePrivate ||
3770 GlobalConstArgSet.count(GEP->getPointerOperand())) {
3771 // Use pointer type with private address space for global constant.
3772 Type *EleTy = I.getType()->getPointerElementType();
David Neto1a1a0582017-07-07 12:01:44 -04003773 ResultType = PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
David Neto22f144c2017-06-12 14:26:21 -04003774 }
David Neto257c3892018-04-11 13:19:45 -04003775
3776 Ops << MkId(lookupType(ResultType));
David Neto22f144c2017-06-12 14:26:21 -04003777
David Neto862b7d82018-06-14 18:48:37 -04003778 // Generate the base pointer.
3779 Ops << MkId(VMap[GEP->getPointerOperand()]);
David Neto22f144c2017-06-12 14:26:21 -04003780
David Neto862b7d82018-06-14 18:48:37 -04003781 // TODO(dneto): Simplify the following?
David Neto22f144c2017-06-12 14:26:21 -04003782
3783 //
3784 // Follows below rules for gep.
3785 //
David Neto862b7d82018-06-14 18:48:37 -04003786 // 1. If gep's first index is 0 generate OpAccessChain and ignore gep's
3787 // first index.
David Neto22f144c2017-06-12 14:26:21 -04003788 // 2. If gep's first index is not 0, generate OpPtrAccessChain and use gep's
3789 // first index.
3790 // 3. If gep's first index is not constant, generate OpPtrAccessChain and
3791 // use gep's first index.
3792 // 4. If it is not above case 1, 2 and 3, generate OpAccessChain and use
3793 // gep's first index.
3794 //
3795 spv::Op Opcode = spv::OpAccessChain;
3796 unsigned offset = 0;
3797 if (ConstantInt *CstInt = dyn_cast<ConstantInt>(GEP->getOperand(1))) {
David Neto862b7d82018-06-14 18:48:37 -04003798 if (CstInt->getZExtValue() == 0) {
David Neto22f144c2017-06-12 14:26:21 -04003799 offset = 1;
David Neto862b7d82018-06-14 18:48:37 -04003800 } else if (CstInt->getZExtValue() != 0) {
David Neto22f144c2017-06-12 14:26:21 -04003801 Opcode = spv::OpPtrAccessChain;
David Neto22f144c2017-06-12 14:26:21 -04003802 }
David Neto862b7d82018-06-14 18:48:37 -04003803 } else {
David Neto22f144c2017-06-12 14:26:21 -04003804 Opcode = spv::OpPtrAccessChain;
David Neto1a1a0582017-07-07 12:01:44 -04003805 }
3806
3807 if (Opcode == spv::OpPtrAccessChain) {
David Neto1a1a0582017-07-07 12:01:44 -04003808 // Do we need to generate ArrayStride? Check against the GEP result type
3809 // rather than the pointer type of the base because when indexing into
3810 // an OpenCL program-scope constant, we'll swap out the LLVM base pointer
3811 // for something else in the SPIR-V.
3812 // E.g. see test/PointerAccessChain/pointer_index_is_constant_1.cl
alan-baker5b86ed72019-02-15 08:26:50 -05003813 auto address_space = ResultType->getAddressSpace();
3814 setVariablePointersCapabilities(address_space);
3815 switch (GetStorageClass(address_space)) {
Alan Bakerfcda9482018-10-02 17:09:59 -04003816 case spv::StorageClassStorageBuffer:
3817 case spv::StorageClassUniform:
David Neto1a1a0582017-07-07 12:01:44 -04003818 // Save the need to generate an ArrayStride decoration. But defer
3819 // generation until later, so we only make one decoration.
David Neto85082642018-03-24 06:55:20 -07003820 getTypesNeedingArrayStride().insert(ResultType);
Alan Bakerfcda9482018-10-02 17:09:59 -04003821 break;
3822 default:
3823 break;
David Neto1a1a0582017-07-07 12:01:44 -04003824 }
David Neto22f144c2017-06-12 14:26:21 -04003825 }
3826
3827 for (auto II = GEP->idx_begin() + offset; II != GEP->idx_end(); II++) {
David Neto257c3892018-04-11 13:19:45 -04003828 Ops << MkId(VMap[*II]);
David Neto22f144c2017-06-12 14:26:21 -04003829 }
3830
David Neto87846742018-04-11 17:36:22 -04003831 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003832 SPIRVInstList.push_back(Inst);
3833 break;
3834 }
3835 case Instruction::ExtractValue: {
3836 ExtractValueInst *EVI = cast<ExtractValueInst>(&I);
3837 // Ops[0] = Result Type ID
3838 // Ops[1] = Composite ID
3839 // Ops[2] ... Ops[n] = Indexes (Literal Number)
3840 SPIRVOperandList Ops;
3841
David Neto257c3892018-04-11 13:19:45 -04003842 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04003843
3844 uint32_t CompositeID = VMap[EVI->getAggregateOperand()];
David Neto257c3892018-04-11 13:19:45 -04003845 Ops << MkId(CompositeID);
David Neto22f144c2017-06-12 14:26:21 -04003846
3847 for (auto &Index : EVI->indices()) {
David Neto257c3892018-04-11 13:19:45 -04003848 Ops << MkNum(Index);
David Neto22f144c2017-06-12 14:26:21 -04003849 }
3850
David Neto87846742018-04-11 17:36:22 -04003851 auto *Inst = new SPIRVInstruction(spv::OpCompositeExtract, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003852 SPIRVInstList.push_back(Inst);
3853 break;
3854 }
3855 case Instruction::InsertValue: {
3856 InsertValueInst *IVI = cast<InsertValueInst>(&I);
3857 // Ops[0] = Result Type ID
3858 // Ops[1] = Object ID
3859 // Ops[2] = Composite ID
3860 // Ops[3] ... Ops[n] = Indexes (Literal Number)
3861 SPIRVOperandList Ops;
3862
3863 uint32_t ResTyID = lookupType(I.getType());
David Neto257c3892018-04-11 13:19:45 -04003864 Ops << MkId(ResTyID);
David Neto22f144c2017-06-12 14:26:21 -04003865
3866 uint32_t ObjectID = VMap[IVI->getInsertedValueOperand()];
David Neto257c3892018-04-11 13:19:45 -04003867 Ops << MkId(ObjectID);
David Neto22f144c2017-06-12 14:26:21 -04003868
3869 uint32_t CompositeID = VMap[IVI->getAggregateOperand()];
David Neto257c3892018-04-11 13:19:45 -04003870 Ops << MkId(CompositeID);
David Neto22f144c2017-06-12 14:26:21 -04003871
3872 for (auto &Index : IVI->indices()) {
David Neto257c3892018-04-11 13:19:45 -04003873 Ops << MkNum(Index);
David Neto22f144c2017-06-12 14:26:21 -04003874 }
3875
David Neto87846742018-04-11 17:36:22 -04003876 auto *Inst = new SPIRVInstruction(spv::OpCompositeInsert, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003877 SPIRVInstList.push_back(Inst);
3878 break;
3879 }
3880 case Instruction::Select: {
3881 //
3882 // Generate OpSelect.
3883 //
3884
3885 // Ops[0] = Result Type ID
3886 // Ops[1] = Condition ID
3887 // Ops[2] = True Constant ID
3888 // Ops[3] = False Constant ID
3889 SPIRVOperandList Ops;
3890
3891 // Find SPIRV instruction for parameter type.
3892 auto Ty = I.getType();
3893 if (Ty->isPointerTy()) {
3894 auto PointeeTy = Ty->getPointerElementType();
3895 if (PointeeTy->isStructTy() &&
3896 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
3897 Ty = PointeeTy;
alan-baker5b86ed72019-02-15 08:26:50 -05003898 } else {
3899 // Selecting between pointers requires variable pointers.
3900 setVariablePointersCapabilities(Ty->getPointerAddressSpace());
3901 if (!hasVariablePointers() && !selectFromSameObject(&I)) {
3902 setVariablePointers(true);
3903 }
David Neto22f144c2017-06-12 14:26:21 -04003904 }
3905 }
3906
David Neto257c3892018-04-11 13:19:45 -04003907 Ops << MkId(lookupType(Ty)) << MkId(VMap[I.getOperand(0)])
3908 << MkId(VMap[I.getOperand(1)]) << MkId(VMap[I.getOperand(2)]);
David Neto22f144c2017-06-12 14:26:21 -04003909
David Neto87846742018-04-11 17:36:22 -04003910 auto *Inst = new SPIRVInstruction(spv::OpSelect, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003911 SPIRVInstList.push_back(Inst);
3912 break;
3913 }
3914 case Instruction::ExtractElement: {
3915 // Handle <4 x i8> type manually.
3916 Type *CompositeTy = I.getOperand(0)->getType();
3917 if (is4xi8vec(CompositeTy)) {
3918 //
3919 // Generate OpShiftRightLogical and OpBitwiseAnd for extractelement with
3920 // <4 x i8>.
3921 //
3922
3923 //
3924 // Generate OpShiftRightLogical
3925 //
3926 // Ops[0] = Result Type ID
3927 // Ops[1] = Operand 0
3928 // Ops[2] = Operand 1
3929 //
3930 SPIRVOperandList Ops;
3931
David Neto257c3892018-04-11 13:19:45 -04003932 Ops << MkId(lookupType(CompositeTy));
David Neto22f144c2017-06-12 14:26:21 -04003933
3934 uint32_t Op0ID = VMap[I.getOperand(0)];
David Neto257c3892018-04-11 13:19:45 -04003935 Ops << MkId(Op0ID);
David Neto22f144c2017-06-12 14:26:21 -04003936
3937 uint32_t Op1ID = 0;
3938 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
3939 // Handle constant index.
3940 uint64_t Idx = CI->getZExtValue();
3941 Value *ShiftAmount =
3942 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
3943 Op1ID = VMap[ShiftAmount];
3944 } else {
3945 // Handle variable index.
3946 SPIRVOperandList TmpOps;
3947
David Neto257c3892018-04-11 13:19:45 -04003948 TmpOps << MkId(lookupType(Type::getInt32Ty(Context)))
3949 << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04003950
3951 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
David Neto257c3892018-04-11 13:19:45 -04003952 TmpOps << MkId(VMap[Cst8]);
David Neto22f144c2017-06-12 14:26:21 -04003953
3954 Op1ID = nextID;
3955
David Neto87846742018-04-11 17:36:22 -04003956 auto *TmpInst = new SPIRVInstruction(spv::OpIMul, nextID++, TmpOps);
David Neto22f144c2017-06-12 14:26:21 -04003957 SPIRVInstList.push_back(TmpInst);
3958 }
David Neto257c3892018-04-11 13:19:45 -04003959 Ops << MkId(Op1ID);
David Neto22f144c2017-06-12 14:26:21 -04003960
3961 uint32_t ShiftID = nextID;
3962
David Neto87846742018-04-11 17:36:22 -04003963 auto *Inst =
3964 new SPIRVInstruction(spv::OpShiftRightLogical, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003965 SPIRVInstList.push_back(Inst);
3966
3967 //
3968 // Generate OpBitwiseAnd
3969 //
3970 // Ops[0] = Result Type ID
3971 // Ops[1] = Operand 0
3972 // Ops[2] = Operand 1
3973 //
3974 Ops.clear();
3975
David Neto257c3892018-04-11 13:19:45 -04003976 Ops << MkId(lookupType(CompositeTy)) << MkId(ShiftID);
David Neto22f144c2017-06-12 14:26:21 -04003977
3978 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
David Neto257c3892018-04-11 13:19:45 -04003979 Ops << MkId(VMap[CstFF]);
David Neto22f144c2017-06-12 14:26:21 -04003980
David Neto9b2d6252017-09-06 15:47:37 -04003981 // Reset mapping for this value to the result of the bitwise and.
3982 VMap[&I] = nextID;
3983
David Neto87846742018-04-11 17:36:22 -04003984 Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003985 SPIRVInstList.push_back(Inst);
3986 break;
3987 }
3988
3989 // Ops[0] = Result Type ID
3990 // Ops[1] = Composite ID
3991 // Ops[2] ... Ops[n] = Indexes (Literal Number)
3992 SPIRVOperandList Ops;
3993
David Neto257c3892018-04-11 13:19:45 -04003994 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04003995
3996 spv::Op Opcode = spv::OpCompositeExtract;
3997 if (const ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
David Neto257c3892018-04-11 13:19:45 -04003998 Ops << MkNum(static_cast<uint32_t>(CI->getZExtValue()));
David Neto22f144c2017-06-12 14:26:21 -04003999 } else {
David Neto257c3892018-04-11 13:19:45 -04004000 Ops << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004001 Opcode = spv::OpVectorExtractDynamic;
4002 }
4003
David Neto87846742018-04-11 17:36:22 -04004004 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004005 SPIRVInstList.push_back(Inst);
4006 break;
4007 }
4008 case Instruction::InsertElement: {
4009 // Handle <4 x i8> type manually.
4010 Type *CompositeTy = I.getOperand(0)->getType();
4011 if (is4xi8vec(CompositeTy)) {
4012 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
4013 uint32_t CstFFID = VMap[CstFF];
4014
4015 uint32_t ShiftAmountID = 0;
4016 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
4017 // Handle constant index.
4018 uint64_t Idx = CI->getZExtValue();
4019 Value *ShiftAmount =
4020 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
4021 ShiftAmountID = VMap[ShiftAmount];
4022 } else {
4023 // Handle variable index.
4024 SPIRVOperandList TmpOps;
4025
David Neto257c3892018-04-11 13:19:45 -04004026 TmpOps << MkId(lookupType(Type::getInt32Ty(Context)))
4027 << MkId(VMap[I.getOperand(2)]);
David Neto22f144c2017-06-12 14:26:21 -04004028
4029 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
David Neto257c3892018-04-11 13:19:45 -04004030 TmpOps << MkId(VMap[Cst8]);
David Neto22f144c2017-06-12 14:26:21 -04004031
4032 ShiftAmountID = nextID;
4033
David Neto87846742018-04-11 17:36:22 -04004034 auto *TmpInst = new SPIRVInstruction(spv::OpIMul, nextID++, TmpOps);
David Neto22f144c2017-06-12 14:26:21 -04004035 SPIRVInstList.push_back(TmpInst);
4036 }
4037
4038 //
4039 // Generate mask operations.
4040 //
4041
4042 // ShiftLeft mask according to index of insertelement.
4043 SPIRVOperandList Ops;
4044
David Neto257c3892018-04-11 13:19:45 -04004045 const uint32_t ResTyID = lookupType(CompositeTy);
4046 Ops << MkId(ResTyID) << MkId(CstFFID) << MkId(ShiftAmountID);
David Neto22f144c2017-06-12 14:26:21 -04004047
4048 uint32_t MaskID = nextID;
4049
David Neto87846742018-04-11 17:36:22 -04004050 auto *Inst = new SPIRVInstruction(spv::OpShiftLeftLogical, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004051 SPIRVInstList.push_back(Inst);
4052
4053 // Inverse mask.
4054 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004055 Ops << MkId(ResTyID) << MkId(MaskID);
David Neto22f144c2017-06-12 14:26:21 -04004056
4057 uint32_t InvMaskID = nextID;
4058
David Neto87846742018-04-11 17:36:22 -04004059 Inst = new SPIRVInstruction(spv::OpNot, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004060 SPIRVInstList.push_back(Inst);
4061
4062 // Apply mask.
4063 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004064 Ops << MkId(ResTyID) << MkId(VMap[I.getOperand(0)]) << MkId(InvMaskID);
David Neto22f144c2017-06-12 14:26:21 -04004065
4066 uint32_t OrgValID = nextID;
4067
David Neto87846742018-04-11 17:36:22 -04004068 Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004069 SPIRVInstList.push_back(Inst);
4070
4071 // Create correct value according to index of insertelement.
4072 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05004073 Ops << MkId(ResTyID) << MkId(VMap[I.getOperand(1)])
4074 << MkId(ShiftAmountID);
David Neto22f144c2017-06-12 14:26:21 -04004075
4076 uint32_t InsertValID = nextID;
4077
David Neto87846742018-04-11 17:36:22 -04004078 Inst = new SPIRVInstruction(spv::OpShiftLeftLogical, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004079 SPIRVInstList.push_back(Inst);
4080
4081 // Insert value to original value.
4082 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004083 Ops << MkId(ResTyID) << MkId(OrgValID) << MkId(InsertValID);
David Neto22f144c2017-06-12 14:26:21 -04004084
David Netoa394f392017-08-26 20:45:29 -04004085 VMap[&I] = nextID;
4086
David Neto87846742018-04-11 17:36:22 -04004087 Inst = new SPIRVInstruction(spv::OpBitwiseOr, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004088 SPIRVInstList.push_back(Inst);
4089
4090 break;
4091 }
4092
David Neto22f144c2017-06-12 14:26:21 -04004093 SPIRVOperandList Ops;
4094
James Priced26efea2018-06-09 23:28:32 +01004095 // Ops[0] = Result Type ID
4096 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04004097
4098 spv::Op Opcode = spv::OpCompositeInsert;
4099 if (const ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
David Neto257c3892018-04-11 13:19:45 -04004100 const auto value = CI->getZExtValue();
4101 assert(value <= UINT32_MAX);
James Priced26efea2018-06-09 23:28:32 +01004102 // Ops[1] = Object ID
4103 // Ops[2] = Composite ID
4104 // Ops[3] ... Ops[n] = Indexes (Literal Number)
alan-bakerb6b09dc2018-11-08 16:59:28 -05004105 Ops << MkId(VMap[I.getOperand(1)]) << MkId(VMap[I.getOperand(0)])
James Priced26efea2018-06-09 23:28:32 +01004106 << MkNum(static_cast<uint32_t>(value));
David Neto22f144c2017-06-12 14:26:21 -04004107 } else {
James Priced26efea2018-06-09 23:28:32 +01004108 // Ops[1] = Composite ID
4109 // Ops[2] = Object ID
4110 // Ops[3] ... Ops[n] = Indexes (Literal Number)
alan-bakerb6b09dc2018-11-08 16:59:28 -05004111 Ops << MkId(VMap[I.getOperand(0)]) << MkId(VMap[I.getOperand(1)])
James Priced26efea2018-06-09 23:28:32 +01004112 << MkId(VMap[I.getOperand(2)]);
David Neto22f144c2017-06-12 14:26:21 -04004113 Opcode = spv::OpVectorInsertDynamic;
4114 }
4115
David Neto87846742018-04-11 17:36:22 -04004116 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004117 SPIRVInstList.push_back(Inst);
4118 break;
4119 }
4120 case Instruction::ShuffleVector: {
4121 // Ops[0] = Result Type ID
4122 // Ops[1] = Vector 1 ID
4123 // Ops[2] = Vector 2 ID
4124 // Ops[3] ... Ops[n] = Components (Literal Number)
4125 SPIRVOperandList Ops;
4126
David Neto257c3892018-04-11 13:19:45 -04004127 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)])
4128 << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004129
4130 uint64_t NumElements = 0;
4131 if (Constant *Cst = dyn_cast<Constant>(I.getOperand(2))) {
4132 NumElements = cast<VectorType>(Cst->getType())->getNumElements();
4133
4134 if (Cst->isNullValue()) {
4135 for (unsigned i = 0; i < NumElements; i++) {
David Neto257c3892018-04-11 13:19:45 -04004136 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04004137 }
4138 } else if (const ConstantDataSequential *CDS =
4139 dyn_cast<ConstantDataSequential>(Cst)) {
4140 for (unsigned i = 0; i < CDS->getNumElements(); i++) {
4141 std::vector<uint32_t> LiteralNum;
David Neto257c3892018-04-11 13:19:45 -04004142 const auto value = CDS->getElementAsInteger(i);
4143 assert(value <= UINT32_MAX);
4144 Ops << MkNum(static_cast<uint32_t>(value));
David Neto22f144c2017-06-12 14:26:21 -04004145 }
4146 } else if (const ConstantVector *CV = dyn_cast<ConstantVector>(Cst)) {
4147 for (unsigned i = 0; i < CV->getNumOperands(); i++) {
4148 auto Op = CV->getOperand(i);
4149
4150 uint32_t literal = 0;
4151
4152 if (auto CI = dyn_cast<ConstantInt>(Op)) {
4153 literal = static_cast<uint32_t>(CI->getZExtValue());
4154 } else if (auto UI = dyn_cast<UndefValue>(Op)) {
4155 literal = 0xFFFFFFFFu;
4156 } else {
4157 Op->print(errs());
4158 llvm_unreachable("Unsupported element in ConstantVector!");
4159 }
4160
David Neto257c3892018-04-11 13:19:45 -04004161 Ops << MkNum(literal);
David Neto22f144c2017-06-12 14:26:21 -04004162 }
4163 } else {
4164 Cst->print(errs());
4165 llvm_unreachable("Unsupported constant mask in ShuffleVector!");
4166 }
4167 }
4168
David Neto87846742018-04-11 17:36:22 -04004169 auto *Inst = new SPIRVInstruction(spv::OpVectorShuffle, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004170 SPIRVInstList.push_back(Inst);
4171 break;
4172 }
4173 case Instruction::ICmp:
4174 case Instruction::FCmp: {
4175 CmpInst *CmpI = cast<CmpInst>(&I);
4176
David Netod4ca2e62017-07-06 18:47:35 -04004177 // Pointer equality is invalid.
alan-bakerb6b09dc2018-11-08 16:59:28 -05004178 Type *ArgTy = CmpI->getOperand(0)->getType();
David Netod4ca2e62017-07-06 18:47:35 -04004179 if (isa<PointerType>(ArgTy)) {
4180 CmpI->print(errs());
4181 std::string name = I.getParent()->getParent()->getName();
4182 errs()
4183 << "\nPointer equality test is not supported by SPIR-V for Vulkan, "
4184 << "in function " << name << "\n";
4185 llvm_unreachable("Pointer equality check is invalid");
4186 break;
4187 }
4188
David Neto257c3892018-04-11 13:19:45 -04004189 // Ops[0] = Result Type ID
4190 // Ops[1] = Operand 1 ID
4191 // Ops[2] = Operand 2 ID
4192 SPIRVOperandList Ops;
David Neto22f144c2017-06-12 14:26:21 -04004193
David Neto257c3892018-04-11 13:19:45 -04004194 Ops << MkId(lookupType(CmpI->getType())) << MkId(VMap[CmpI->getOperand(0)])
4195 << MkId(VMap[CmpI->getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004196
4197 spv::Op Opcode = GetSPIRVCmpOpcode(CmpI);
David Neto87846742018-04-11 17:36:22 -04004198 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004199 SPIRVInstList.push_back(Inst);
4200 break;
4201 }
4202 case Instruction::Br: {
4203 // Branch instrucion is deferred because it needs label's ID. Record slot's
4204 // location on SPIRVInstructionList.
4205 DeferredInsts.push_back(
4206 std::make_tuple(&I, --SPIRVInstList.end(), 0 /* No id */));
4207 break;
4208 }
4209 case Instruction::Switch: {
4210 I.print(errs());
4211 llvm_unreachable("Unsupported instruction???");
4212 break;
4213 }
4214 case Instruction::IndirectBr: {
4215 I.print(errs());
4216 llvm_unreachable("Unsupported instruction???");
4217 break;
4218 }
4219 case Instruction::PHI: {
4220 // Branch instrucion is deferred because it needs label's ID. Record slot's
4221 // location on SPIRVInstructionList.
4222 DeferredInsts.push_back(
4223 std::make_tuple(&I, --SPIRVInstList.end(), nextID++));
4224 break;
4225 }
4226 case Instruction::Alloca: {
4227 //
4228 // Generate OpVariable.
4229 //
4230 // Ops[0] : Result Type ID
4231 // Ops[1] : Storage Class
4232 SPIRVOperandList Ops;
4233
David Neto257c3892018-04-11 13:19:45 -04004234 Ops << MkId(lookupType(I.getType())) << MkNum(spv::StorageClassFunction);
David Neto22f144c2017-06-12 14:26:21 -04004235
David Neto87846742018-04-11 17:36:22 -04004236 auto *Inst = new SPIRVInstruction(spv::OpVariable, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004237 SPIRVInstList.push_back(Inst);
4238 break;
4239 }
4240 case Instruction::Load: {
4241 LoadInst *LD = cast<LoadInst>(&I);
4242 //
4243 // Generate OpLoad.
4244 //
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04004245
alan-baker5b86ed72019-02-15 08:26:50 -05004246 if (LD->getType()->isPointerTy()) {
4247 // Loading a pointer requires variable pointers.
4248 setVariablePointersCapabilities(LD->getType()->getPointerAddressSpace());
4249 }
David Neto22f144c2017-06-12 14:26:21 -04004250
David Neto0a2f98d2017-09-15 19:38:40 -04004251 uint32_t ResTyID = lookupType(LD->getType());
David Netoa60b00b2017-09-15 16:34:09 -04004252 uint32_t PointerID = VMap[LD->getPointerOperand()];
4253
4254 // This is a hack to work around what looks like a driver bug.
4255 // When we're loading from the special variable holding the WorkgroupSize
David Neto0a2f98d2017-09-15 19:38:40 -04004256 // builtin value, use an OpBitWiseAnd of the value's ID rather than
4257 // generating a load.
David Neto66cfe642018-03-24 06:13:56 -07004258 // TODO(dneto): Remove this awful hack once drivers are fixed.
David Netoa60b00b2017-09-15 16:34:09 -04004259 if (PointerID == WorkgroupSizeVarID) {
David Neto0a2f98d2017-09-15 19:38:40 -04004260 // Generate a bitwise-and of the original value with itself.
4261 // We should have been able to get away with just an OpCopyObject,
4262 // but we need something more complex to get past certain driver bugs.
4263 // This is ridiculous, but necessary.
4264 // TODO(dneto): Revisit this once drivers fix their bugs.
4265
4266 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04004267 Ops << MkId(ResTyID) << MkId(WorkgroupSizeValueID)
4268 << MkId(WorkgroupSizeValueID);
David Neto0a2f98d2017-09-15 19:38:40 -04004269
David Neto87846742018-04-11 17:36:22 -04004270 auto *Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Neto0a2f98d2017-09-15 19:38:40 -04004271 SPIRVInstList.push_back(Inst);
David Netoa60b00b2017-09-15 16:34:09 -04004272 break;
4273 }
4274
4275 // This is the normal path. Generate a load.
4276
David Neto22f144c2017-06-12 14:26:21 -04004277 // Ops[0] = Result Type ID
4278 // Ops[1] = Pointer ID
4279 // Ops[2] ... Ops[n] = Optional Memory Access
4280 //
4281 // TODO: Do we need to implement Optional Memory Access???
David Neto0a2f98d2017-09-15 19:38:40 -04004282
David Neto22f144c2017-06-12 14:26:21 -04004283 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04004284 Ops << MkId(ResTyID) << MkId(PointerID);
David Neto22f144c2017-06-12 14:26:21 -04004285
David Neto87846742018-04-11 17:36:22 -04004286 auto *Inst = new SPIRVInstruction(spv::OpLoad, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004287 SPIRVInstList.push_back(Inst);
4288 break;
4289 }
4290 case Instruction::Store: {
4291 StoreInst *ST = cast<StoreInst>(&I);
4292 //
4293 // Generate OpStore.
4294 //
4295
alan-baker5b86ed72019-02-15 08:26:50 -05004296 if (ST->getValueOperand()->getType()->isPointerTy()) {
4297 // Storing a pointer requires variable pointers.
4298 setVariablePointersCapabilities(
4299 ST->getValueOperand()->getType()->getPointerAddressSpace());
4300 }
4301
David Neto22f144c2017-06-12 14:26:21 -04004302 // Ops[0] = Pointer ID
4303 // Ops[1] = Object ID
4304 // Ops[2] ... Ops[n] = Optional Memory Access (later???)
4305 //
4306 // TODO: Do we need to implement Optional Memory Access???
David Neto257c3892018-04-11 13:19:45 -04004307 SPIRVOperandList Ops;
4308 Ops << MkId(VMap[ST->getPointerOperand()])
4309 << MkId(VMap[ST->getValueOperand()]);
David Neto22f144c2017-06-12 14:26:21 -04004310
David Neto87846742018-04-11 17:36:22 -04004311 auto *Inst = new SPIRVInstruction(spv::OpStore, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004312 SPIRVInstList.push_back(Inst);
4313 break;
4314 }
4315 case Instruction::AtomicCmpXchg: {
4316 I.print(errs());
4317 llvm_unreachable("Unsupported instruction???");
4318 break;
4319 }
4320 case Instruction::AtomicRMW: {
Neil Henning39672102017-09-29 14:33:13 +01004321 AtomicRMWInst *AtomicRMW = dyn_cast<AtomicRMWInst>(&I);
4322
4323 spv::Op opcode;
4324
4325 switch (AtomicRMW->getOperation()) {
4326 default:
4327 I.print(errs());
4328 llvm_unreachable("Unsupported instruction???");
4329 case llvm::AtomicRMWInst::Add:
4330 opcode = spv::OpAtomicIAdd;
4331 break;
4332 case llvm::AtomicRMWInst::Sub:
4333 opcode = spv::OpAtomicISub;
4334 break;
4335 case llvm::AtomicRMWInst::Xchg:
4336 opcode = spv::OpAtomicExchange;
4337 break;
4338 case llvm::AtomicRMWInst::Min:
4339 opcode = spv::OpAtomicSMin;
4340 break;
4341 case llvm::AtomicRMWInst::Max:
4342 opcode = spv::OpAtomicSMax;
4343 break;
4344 case llvm::AtomicRMWInst::UMin:
4345 opcode = spv::OpAtomicUMin;
4346 break;
4347 case llvm::AtomicRMWInst::UMax:
4348 opcode = spv::OpAtomicUMax;
4349 break;
4350 case llvm::AtomicRMWInst::And:
4351 opcode = spv::OpAtomicAnd;
4352 break;
4353 case llvm::AtomicRMWInst::Or:
4354 opcode = spv::OpAtomicOr;
4355 break;
4356 case llvm::AtomicRMWInst::Xor:
4357 opcode = spv::OpAtomicXor;
4358 break;
4359 }
4360
4361 //
4362 // Generate OpAtomic*.
4363 //
4364 SPIRVOperandList Ops;
4365
David Neto257c3892018-04-11 13:19:45 -04004366 Ops << MkId(lookupType(I.getType()))
4367 << MkId(VMap[AtomicRMW->getPointerOperand()]);
Neil Henning39672102017-09-29 14:33:13 +01004368
4369 auto IntTy = Type::getInt32Ty(I.getContext());
Neil Henning39672102017-09-29 14:33:13 +01004370 const auto ConstantScopeDevice = ConstantInt::get(IntTy, spv::ScopeDevice);
David Neto257c3892018-04-11 13:19:45 -04004371 Ops << MkId(VMap[ConstantScopeDevice]);
Neil Henning39672102017-09-29 14:33:13 +01004372
4373 const auto ConstantMemorySemantics = ConstantInt::get(
4374 IntTy, spv::MemorySemanticsUniformMemoryMask |
4375 spv::MemorySemanticsSequentiallyConsistentMask);
David Neto257c3892018-04-11 13:19:45 -04004376 Ops << MkId(VMap[ConstantMemorySemantics]);
Neil Henning39672102017-09-29 14:33:13 +01004377
David Neto257c3892018-04-11 13:19:45 -04004378 Ops << MkId(VMap[AtomicRMW->getValOperand()]);
Neil Henning39672102017-09-29 14:33:13 +01004379
4380 VMap[&I] = nextID;
4381
David Neto87846742018-04-11 17:36:22 -04004382 auto *Inst = new SPIRVInstruction(opcode, nextID++, Ops);
Neil Henning39672102017-09-29 14:33:13 +01004383 SPIRVInstList.push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04004384 break;
4385 }
4386 case Instruction::Fence: {
4387 I.print(errs());
4388 llvm_unreachable("Unsupported instruction???");
4389 break;
4390 }
4391 case Instruction::Call: {
4392 CallInst *Call = dyn_cast<CallInst>(&I);
4393 Function *Callee = Call->getCalledFunction();
4394
Alan Baker202c8c72018-08-13 13:47:44 -04004395 if (Callee->getName().startswith(clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04004396 if (ResourceVarDeferredLoadCalls.count(Call) && Call->hasNUsesOrMore(1)) {
4397 // Generate an OpLoad
4398 SPIRVOperandList Ops;
4399 const auto load_id = nextID++;
David Neto22f144c2017-06-12 14:26:21 -04004400
David Neto862b7d82018-06-14 18:48:37 -04004401 Ops << MkId(lookupType(Call->getType()->getPointerElementType()))
4402 << MkId(ResourceVarDeferredLoadCalls[Call]);
4403
4404 auto *Inst = new SPIRVInstruction(spv::OpLoad, load_id, Ops);
4405 SPIRVInstList.push_back(Inst);
4406 VMap[Call] = load_id;
4407 break;
4408
4409 } else {
4410 // This maps to an OpVariable we've already generated.
4411 // No code is generated for the call.
4412 }
4413 break;
alan-bakerb6b09dc2018-11-08 16:59:28 -05004414 } else if (Callee->getName().startswith(
4415 clspv::WorkgroupAccessorFunction())) {
Alan Baker202c8c72018-08-13 13:47:44 -04004416 // Don't codegen an instruction here, but instead map this call directly
4417 // to the workgroup variable id.
alan-bakerb6b09dc2018-11-08 16:59:28 -05004418 int spec_id = static_cast<int>(
4419 cast<ConstantInt>(Call->getOperand(0))->getSExtValue());
Alan Baker202c8c72018-08-13 13:47:44 -04004420 const auto &info = LocalSpecIdInfoMap[spec_id];
4421 VMap[Call] = info.variable_id;
4422 break;
David Neto862b7d82018-06-14 18:48:37 -04004423 }
4424
4425 // Sampler initializers become a load of the corresponding sampler.
4426
Kévin Petitdf71de32019-04-09 14:09:50 +01004427 if (Callee->getName().equals(clspv::LiteralSamplerFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04004428 // Map this to a load from the variable.
4429 const auto index_into_sampler_map =
4430 dyn_cast<ConstantInt>(Call->getArgOperand(2))->getZExtValue();
4431
4432 // Generate an OpLoad
David Neto22f144c2017-06-12 14:26:21 -04004433 SPIRVOperandList Ops;
David Neto862b7d82018-06-14 18:48:37 -04004434 const auto load_id = nextID++;
David Neto22f144c2017-06-12 14:26:21 -04004435
David Neto257c3892018-04-11 13:19:45 -04004436 Ops << MkId(lookupType(SamplerTy->getPointerElementType()))
alan-bakerb6b09dc2018-11-08 16:59:28 -05004437 << MkId(SamplerMapIndexToIDMap[static_cast<unsigned>(
4438 index_into_sampler_map)]);
David Neto22f144c2017-06-12 14:26:21 -04004439
David Neto862b7d82018-06-14 18:48:37 -04004440 auto *Inst = new SPIRVInstruction(spv::OpLoad, load_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004441 SPIRVInstList.push_back(Inst);
David Neto862b7d82018-06-14 18:48:37 -04004442 VMap[Call] = load_id;
David Neto22f144c2017-06-12 14:26:21 -04004443 break;
4444 }
4445
Kévin Petit349c9502019-03-28 17:24:14 +00004446 // Handle SPIR-V intrinsics
Kévin Petit9b340262019-06-19 18:31:11 +01004447 spv::Op opcode = StringSwitch<spv::Op>(Callee->getName())
4448 .Case("spirv.atomic_xor", spv::OpAtomicXor)
4449 .Default(spv::OpNop);
David Neto22f144c2017-06-12 14:26:21 -04004450
Kévin Petit617a76d2019-04-04 13:54:16 +01004451 // If the switch above didn't have an entry maybe the intrinsic
4452 // is using the name mangling logic.
4453 bool usesMangler = false;
4454 if (opcode == spv::OpNop) {
4455 if (Callee->getName().startswith(clspv::SPIRVOpIntrinsicFunction())) {
4456 auto OpCst = cast<ConstantInt>(Call->getOperand(0));
4457 opcode = static_cast<spv::Op>(OpCst->getZExtValue());
4458 usesMangler = true;
4459 }
4460 }
4461
Kévin Petit349c9502019-03-28 17:24:14 +00004462 if (opcode != spv::OpNop) {
4463
David Neto22f144c2017-06-12 14:26:21 -04004464 SPIRVOperandList Ops;
4465
Kévin Petit349c9502019-03-28 17:24:14 +00004466 if (!I.getType()->isVoidTy()) {
4467 Ops << MkId(lookupType(I.getType()));
4468 }
David Neto22f144c2017-06-12 14:26:21 -04004469
Kévin Petit617a76d2019-04-04 13:54:16 +01004470 unsigned firstOperand = usesMangler ? 1 : 0;
4471 for (unsigned i = firstOperand; i < Call->getNumArgOperands(); i++) {
David Neto257c3892018-04-11 13:19:45 -04004472 Ops << MkId(VMap[Call->getArgOperand(i)]);
David Neto22f144c2017-06-12 14:26:21 -04004473 }
4474
Kévin Petit349c9502019-03-28 17:24:14 +00004475 if (!I.getType()->isVoidTy()) {
4476 VMap[&I] = nextID;
Kévin Petit8a560882019-03-21 15:24:34 +00004477 }
4478
Kévin Petit349c9502019-03-28 17:24:14 +00004479 SPIRVInstruction *Inst;
4480 if (!I.getType()->isVoidTy()) {
4481 Inst = new SPIRVInstruction(opcode, nextID++, Ops);
4482 } else {
4483 Inst = new SPIRVInstruction(opcode, Ops);
4484 }
Kévin Petit8a560882019-03-21 15:24:34 +00004485 SPIRVInstList.push_back(Inst);
4486 break;
4487 }
4488
David Neto22f144c2017-06-12 14:26:21 -04004489 // spirv.copy_memory.* intrinsics become OpMemoryMemory's.
4490 if (Callee->getName().startswith("spirv.copy_memory")) {
4491 //
4492 // Generate OpCopyMemory.
4493 //
4494
4495 // Ops[0] = Dst ID
4496 // Ops[1] = Src ID
4497 // Ops[2] = Memory Access
4498 // Ops[3] = Alignment
4499
4500 auto IsVolatile =
4501 dyn_cast<ConstantInt>(Call->getArgOperand(3))->getZExtValue() != 0;
4502
4503 auto VolatileMemoryAccess = (IsVolatile) ? spv::MemoryAccessVolatileMask
4504 : spv::MemoryAccessMaskNone;
4505
4506 auto MemoryAccess = VolatileMemoryAccess | spv::MemoryAccessAlignedMask;
4507
4508 auto Alignment =
4509 dyn_cast<ConstantInt>(Call->getArgOperand(2))->getZExtValue();
4510
David Neto257c3892018-04-11 13:19:45 -04004511 SPIRVOperandList Ops;
4512 Ops << MkId(VMap[Call->getArgOperand(0)])
4513 << MkId(VMap[Call->getArgOperand(1)]) << MkNum(MemoryAccess)
4514 << MkNum(static_cast<uint32_t>(Alignment));
David Neto22f144c2017-06-12 14:26:21 -04004515
David Neto87846742018-04-11 17:36:22 -04004516 auto *Inst = new SPIRVInstruction(spv::OpCopyMemory, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004517
4518 SPIRVInstList.push_back(Inst);
4519
4520 break;
4521 }
4522
David Neto22f144c2017-06-12 14:26:21 -04004523 // read_image is converted to OpSampledImage and OpImageSampleExplicitLod.
4524 // Additionally, OpTypeSampledImage is generated.
alan-bakerf67468c2019-11-25 15:51:49 -05004525 if (clspv::IsSampledImageRead(Callee)) {
David Neto22f144c2017-06-12 14:26:21 -04004526 //
4527 // Generate OpSampledImage.
4528 //
4529 // Ops[0] = Result Type ID
4530 // Ops[1] = Image ID
4531 // Ops[2] = Sampler ID
4532 //
4533 SPIRVOperandList Ops;
4534
4535 Value *Image = Call->getArgOperand(0);
4536 Value *Sampler = Call->getArgOperand(1);
4537 Value *Coordinate = Call->getArgOperand(2);
4538
4539 TypeMapType &OpImageTypeMap = getImageTypeMap();
4540 Type *ImageTy = Image->getType()->getPointerElementType();
4541 uint32_t ImageTyID = OpImageTypeMap[ImageTy];
David Neto22f144c2017-06-12 14:26:21 -04004542 uint32_t ImageID = VMap[Image];
David Neto22f144c2017-06-12 14:26:21 -04004543 uint32_t SamplerID = VMap[Sampler];
David Neto257c3892018-04-11 13:19:45 -04004544
4545 Ops << MkId(ImageTyID) << MkId(ImageID) << MkId(SamplerID);
David Neto22f144c2017-06-12 14:26:21 -04004546
4547 uint32_t SampledImageID = nextID;
4548
David Neto87846742018-04-11 17:36:22 -04004549 auto *Inst = new SPIRVInstruction(spv::OpSampledImage, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004550 SPIRVInstList.push_back(Inst);
4551
4552 //
4553 // Generate OpImageSampleExplicitLod.
4554 //
4555 // Ops[0] = Result Type ID
4556 // Ops[1] = Sampled Image ID
4557 // Ops[2] = Coordinate ID
4558 // Ops[3] = Image Operands Type ID
4559 // Ops[4] ... Ops[n] = Operands ID
4560 //
4561 Ops.clear();
4562
alan-bakerf67468c2019-11-25 15:51:49 -05004563 const bool is_int_image = IsIntImageType(Image->getType());
4564 uint32_t result_type = 0;
4565 if (is_int_image) {
4566 result_type = v4int32ID;
4567 } else {
4568 result_type = lookupType(Call->getType());
4569 }
4570
4571 Ops << MkId(result_type) << MkId(SampledImageID) << MkId(VMap[Coordinate])
4572 << MkNum(spv::ImageOperandsLodMask);
David Neto22f144c2017-06-12 14:26:21 -04004573
4574 Constant *CstFP0 = ConstantFP::get(Context, APFloat(0.0f));
David Neto257c3892018-04-11 13:19:45 -04004575 Ops << MkId(VMap[CstFP0]);
David Neto22f144c2017-06-12 14:26:21 -04004576
alan-bakerf67468c2019-11-25 15:51:49 -05004577 uint32_t final_id = nextID++;
4578 VMap[&I] = final_id;
David Neto22f144c2017-06-12 14:26:21 -04004579
alan-bakerf67468c2019-11-25 15:51:49 -05004580 uint32_t image_id = final_id;
4581 if (is_int_image) {
4582 // Int image requires a bitcast from v4int to v4uint.
4583 image_id = nextID++;
4584 }
4585
4586 Inst = new SPIRVInstruction(spv::OpImageSampleExplicitLod, image_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004587 SPIRVInstList.push_back(Inst);
alan-bakerf67468c2019-11-25 15:51:49 -05004588
4589 if (is_int_image) {
4590 // Generate the bitcast.
4591 Ops.clear();
4592 Ops << MkId(lookupType(Call->getType())) << MkId(image_id);
4593 Inst = new SPIRVInstruction(spv::OpBitcast, final_id, Ops);
4594 SPIRVInstList.push_back(Inst);
4595 }
David Neto22f144c2017-06-12 14:26:21 -04004596 break;
4597 }
4598
alan-bakerf67468c2019-11-25 15:51:49 -05004599 // write_image is mapped to OpImageWrite.
4600 if (clspv::IsImageWrite(Callee)) {
David Neto22f144c2017-06-12 14:26:21 -04004601 //
4602 // Generate OpImageWrite.
4603 //
4604 // Ops[0] = Image ID
4605 // Ops[1] = Coordinate ID
4606 // Ops[2] = Texel ID
4607 // Ops[3] = (Optional) Image Operands Type (Literal Number)
4608 // Ops[4] ... Ops[n] = (Optional) Operands ID
4609 //
4610 SPIRVOperandList Ops;
4611
4612 Value *Image = Call->getArgOperand(0);
4613 Value *Coordinate = Call->getArgOperand(1);
4614 Value *Texel = Call->getArgOperand(2);
4615
4616 uint32_t ImageID = VMap[Image];
David Neto22f144c2017-06-12 14:26:21 -04004617 uint32_t CoordinateID = VMap[Coordinate];
David Neto22f144c2017-06-12 14:26:21 -04004618 uint32_t TexelID = VMap[Texel];
alan-bakerf67468c2019-11-25 15:51:49 -05004619
4620 const bool is_int_image = IsIntImageType(Image->getType());
4621 if (is_int_image) {
4622 // Generate a bitcast to v4int and use it as the texel value.
4623 uint32_t castID = nextID++;
4624 Ops << MkId(v4int32ID) << MkId(TexelID);
4625 auto cast = new SPIRVInstruction(spv::OpBitcast, castID, Ops);
4626 SPIRVInstList.push_back(cast);
4627 Ops.clear();
4628 TexelID = castID;
4629 }
David Neto257c3892018-04-11 13:19:45 -04004630 Ops << MkId(ImageID) << MkId(CoordinateID) << MkId(TexelID);
David Neto22f144c2017-06-12 14:26:21 -04004631
David Neto87846742018-04-11 17:36:22 -04004632 auto *Inst = new SPIRVInstruction(spv::OpImageWrite, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004633 SPIRVInstList.push_back(Inst);
4634 break;
4635 }
4636
alan-bakerf67468c2019-11-25 15:51:49 -05004637 // get_image_* is mapped to OpImageQuerySize
4638 if (clspv::IsGetImageHeight(Callee) || clspv::IsGetImageWidth(Callee)) {
David Neto5c22a252018-03-15 16:07:41 -04004639 //
4640 // Generate OpImageQuerySize, then pull out the right component.
4641 // Assume 2D image for now.
4642 //
4643 // Ops[0] = Image ID
4644 //
4645 // %sizes = OpImageQuerySizes %uint2 %im
4646 // %result = OpCompositeExtract %uint %sizes 0-or-1
4647 SPIRVOperandList Ops;
4648
4649 // Implement:
4650 // %sizes = OpImageQuerySizes %uint2 %im
4651 uint32_t SizesTypeID =
4652 TypeMap[VectorType::get(Type::getInt32Ty(Context), 2)];
David Neto5c22a252018-03-15 16:07:41 -04004653 Value *Image = Call->getArgOperand(0);
4654 uint32_t ImageID = VMap[Image];
David Neto257c3892018-04-11 13:19:45 -04004655 Ops << MkId(SizesTypeID) << MkId(ImageID);
David Neto5c22a252018-03-15 16:07:41 -04004656
4657 uint32_t SizesID = nextID++;
David Neto87846742018-04-11 17:36:22 -04004658 auto *QueryInst =
4659 new SPIRVInstruction(spv::OpImageQuerySize, SizesID, Ops);
David Neto5c22a252018-03-15 16:07:41 -04004660 SPIRVInstList.push_back(QueryInst);
4661
4662 // Reset value map entry since we generated an intermediate instruction.
4663 VMap[&I] = nextID;
4664
4665 // Implement:
4666 // %result = OpCompositeExtract %uint %sizes 0-or-1
4667 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004668 Ops << MkId(TypeMap[I.getType()]) << MkId(SizesID);
David Neto5c22a252018-03-15 16:07:41 -04004669
4670 uint32_t component = Callee->getName().contains("height") ? 1 : 0;
David Neto257c3892018-04-11 13:19:45 -04004671 Ops << MkNum(component);
David Neto5c22a252018-03-15 16:07:41 -04004672
David Neto87846742018-04-11 17:36:22 -04004673 auto *Inst = new SPIRVInstruction(spv::OpCompositeExtract, nextID++, Ops);
David Neto5c22a252018-03-15 16:07:41 -04004674 SPIRVInstList.push_back(Inst);
4675 break;
4676 }
4677
David Neto22f144c2017-06-12 14:26:21 -04004678 // Call instrucion is deferred because it needs function's ID. Record
4679 // slot's location on SPIRVInstructionList.
4680 DeferredInsts.push_back(
4681 std::make_tuple(&I, --SPIRVInstList.end(), nextID++));
4682
David Neto3fbb4072017-10-16 11:28:14 -04004683 // Check whether the implementation of this call uses an extended
4684 // instruction plus one more value-producing instruction. If so, then
4685 // reserve the id for the extra value-producing slot.
4686 glsl::ExtInst EInst = getIndirectExtInstEnum(Callee->getName());
4687 if (EInst != kGlslExtInstBad) {
4688 // Reserve a spot for the extra value.
David Neto4d02a532017-09-17 12:57:44 -04004689 // Increase nextID.
David Neto22f144c2017-06-12 14:26:21 -04004690 VMap[&I] = nextID;
4691 nextID++;
4692 }
4693 break;
4694 }
4695 case Instruction::Ret: {
4696 unsigned NumOps = I.getNumOperands();
4697 if (NumOps == 0) {
4698 //
4699 // Generate OpReturn.
4700 //
David Neto87846742018-04-11 17:36:22 -04004701 SPIRVInstList.push_back(new SPIRVInstruction(spv::OpReturn, {}));
David Neto22f144c2017-06-12 14:26:21 -04004702 } else {
4703 //
4704 // Generate OpReturnValue.
4705 //
4706
4707 // Ops[0] = Return Value ID
4708 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04004709
4710 Ops << MkId(VMap[I.getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04004711
David Neto87846742018-04-11 17:36:22 -04004712 auto *Inst = new SPIRVInstruction(spv::OpReturnValue, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004713 SPIRVInstList.push_back(Inst);
4714 break;
4715 }
4716 break;
4717 }
4718 }
4719}
4720
4721void SPIRVProducerPass::GenerateFuncEpilogue() {
4722 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
4723
4724 //
4725 // Generate OpFunctionEnd
4726 //
4727
David Neto87846742018-04-11 17:36:22 -04004728 auto *Inst = new SPIRVInstruction(spv::OpFunctionEnd, {});
David Neto22f144c2017-06-12 14:26:21 -04004729 SPIRVInstList.push_back(Inst);
4730}
4731
4732bool SPIRVProducerPass::is4xi8vec(Type *Ty) const {
alan-bakerb39c8262019-03-08 14:03:37 -05004733 // Don't specialize <4 x i8> if i8 is generally supported.
4734 if (clspv::Option::Int8Support())
4735 return false;
4736
David Neto22f144c2017-06-12 14:26:21 -04004737 LLVMContext &Context = Ty->getContext();
4738 if (Ty->isVectorTy()) {
4739 if (Ty->getVectorElementType() == Type::getInt8Ty(Context) &&
4740 Ty->getVectorNumElements() == 4) {
4741 return true;
4742 }
4743 }
4744
4745 return false;
4746}
4747
4748void SPIRVProducerPass::HandleDeferredInstruction() {
4749 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
4750 ValueMapType &VMap = getValueMap();
4751 DeferredInstVecType &DeferredInsts = getDeferredInstVec();
4752
4753 for (auto DeferredInst = DeferredInsts.rbegin();
4754 DeferredInst != DeferredInsts.rend(); ++DeferredInst) {
4755 Value *Inst = std::get<0>(*DeferredInst);
4756 SPIRVInstructionList::iterator InsertPoint = ++std::get<1>(*DeferredInst);
4757 if (InsertPoint != SPIRVInstList.end()) {
4758 while ((*InsertPoint)->getOpcode() == spv::OpPhi) {
4759 ++InsertPoint;
4760 }
4761 }
4762
4763 if (BranchInst *Br = dyn_cast<BranchInst>(Inst)) {
4764 // Check whether basic block, which has this branch instruction, is loop
4765 // header or not. If it is loop header, generate OpLoopMerge and
4766 // OpBranchConditional.
4767 Function *Func = Br->getParent()->getParent();
4768 DominatorTree &DT =
4769 getAnalysis<DominatorTreeWrapperPass>(*Func).getDomTree();
4770 const LoopInfo &LI =
4771 getAnalysis<LoopInfoWrapperPass>(*Func).getLoopInfo();
4772
4773 BasicBlock *BrBB = Br->getParent();
alan-baker49531082019-06-05 17:30:56 -04004774 Loop *L = LI.getLoopFor(BrBB);
David Neto22f144c2017-06-12 14:26:21 -04004775 if (LI.isLoopHeader(BrBB)) {
4776 Value *ContinueBB = nullptr;
4777 Value *MergeBB = nullptr;
4778
David Neto22f144c2017-06-12 14:26:21 -04004779 MergeBB = L->getExitBlock();
4780 if (!MergeBB) {
4781 // StructurizeCFG pass converts CFG into triangle shape and the cfg
4782 // has regions with single entry/exit. As a result, loop should not
4783 // have multiple exits.
4784 llvm_unreachable("Loop has multiple exits???");
4785 }
4786
4787 if (L->isLoopLatch(BrBB)) {
4788 ContinueBB = BrBB;
4789 } else {
4790 // From SPIR-V spec 2.11, Continue Target must dominate that back-edge
4791 // block.
4792 BasicBlock *Header = L->getHeader();
4793 BasicBlock *Latch = L->getLoopLatch();
4794 for (BasicBlock *BB : L->blocks()) {
4795 if (BB == Header) {
4796 continue;
4797 }
4798
4799 // Check whether block dominates block with back-edge.
4800 if (DT.dominates(BB, Latch)) {
4801 ContinueBB = BB;
4802 }
4803 }
4804
4805 if (!ContinueBB) {
4806 llvm_unreachable("Wrong continue block from loop");
4807 }
4808 }
4809
4810 //
4811 // Generate OpLoopMerge.
4812 //
4813 // Ops[0] = Merge Block ID
4814 // Ops[1] = Continue Target ID
4815 // Ops[2] = Selection Control
4816 SPIRVOperandList Ops;
4817
4818 // StructurizeCFG pass already manipulated CFG. Just use false block of
4819 // branch instruction as merge block.
4820 uint32_t MergeBBID = VMap[MergeBB];
David Neto22f144c2017-06-12 14:26:21 -04004821 uint32_t ContinueBBID = VMap[ContinueBB];
David Neto257c3892018-04-11 13:19:45 -04004822 Ops << MkId(MergeBBID) << MkId(ContinueBBID)
4823 << MkNum(spv::SelectionControlMaskNone);
David Neto22f144c2017-06-12 14:26:21 -04004824
David Neto87846742018-04-11 17:36:22 -04004825 auto *MergeInst = new SPIRVInstruction(spv::OpLoopMerge, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004826 SPIRVInstList.insert(InsertPoint, MergeInst);
4827
4828 } else if (Br->isConditional()) {
alan-baker49531082019-06-05 17:30:56 -04004829 // Generate a selection merge unless this is a back-edge block.
4830 bool HasBackedge = false;
4831 while (L && !HasBackedge) {
4832 if (L->isLoopLatch(BrBB)) {
4833 HasBackedge = true;
David Neto22f144c2017-06-12 14:26:21 -04004834 }
alan-baker49531082019-06-05 17:30:56 -04004835 L = L->getParentLoop();
David Neto22f144c2017-06-12 14:26:21 -04004836 }
alan-baker49531082019-06-05 17:30:56 -04004837 if (!HasBackedge) {
David Neto22f144c2017-06-12 14:26:21 -04004838 //
4839 // Generate OpSelectionMerge.
4840 //
4841 // Ops[0] = Merge Block ID
4842 // Ops[1] = Selection Control
4843 SPIRVOperandList Ops;
4844
4845 // StructurizeCFG pass already manipulated CFG. Just use false block
4846 // of branch instruction as merge block.
4847 uint32_t MergeBBID = VMap[Br->getSuccessor(1)];
David Neto257c3892018-04-11 13:19:45 -04004848 Ops << MkId(MergeBBID) << MkNum(spv::SelectionControlMaskNone);
David Neto22f144c2017-06-12 14:26:21 -04004849
David Neto87846742018-04-11 17:36:22 -04004850 auto *MergeInst = new SPIRVInstruction(spv::OpSelectionMerge, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004851 SPIRVInstList.insert(InsertPoint, MergeInst);
4852 }
4853 }
4854
4855 if (Br->isConditional()) {
4856 //
4857 // Generate OpBranchConditional.
4858 //
4859 // Ops[0] = Condition ID
4860 // Ops[1] = True Label ID
4861 // Ops[2] = False Label ID
4862 // Ops[3] ... Ops[n] = Branch weights (Literal Number)
4863 SPIRVOperandList Ops;
4864
4865 uint32_t CondID = VMap[Br->getCondition()];
David Neto22f144c2017-06-12 14:26:21 -04004866 uint32_t TrueBBID = VMap[Br->getSuccessor(0)];
David Neto22f144c2017-06-12 14:26:21 -04004867 uint32_t FalseBBID = VMap[Br->getSuccessor(1)];
David Neto257c3892018-04-11 13:19:45 -04004868
4869 Ops << MkId(CondID) << MkId(TrueBBID) << MkId(FalseBBID);
David Neto22f144c2017-06-12 14:26:21 -04004870
David Neto87846742018-04-11 17:36:22 -04004871 auto *BrInst = new SPIRVInstruction(spv::OpBranchConditional, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004872 SPIRVInstList.insert(InsertPoint, BrInst);
4873 } else {
4874 //
4875 // Generate OpBranch.
4876 //
4877 // Ops[0] = Target Label ID
4878 SPIRVOperandList Ops;
4879
4880 uint32_t TargetID = VMap[Br->getSuccessor(0)];
David Neto257c3892018-04-11 13:19:45 -04004881 Ops << MkId(TargetID);
David Neto22f144c2017-06-12 14:26:21 -04004882
David Neto87846742018-04-11 17:36:22 -04004883 SPIRVInstList.insert(InsertPoint,
4884 new SPIRVInstruction(spv::OpBranch, Ops));
David Neto22f144c2017-06-12 14:26:21 -04004885 }
4886 } else if (PHINode *PHI = dyn_cast<PHINode>(Inst)) {
alan-baker5b86ed72019-02-15 08:26:50 -05004887 if (PHI->getType()->isPointerTy()) {
4888 // OpPhi on pointers requires variable pointers.
4889 setVariablePointersCapabilities(
4890 PHI->getType()->getPointerAddressSpace());
4891 if (!hasVariablePointers() && !selectFromSameObject(PHI)) {
4892 setVariablePointers(true);
4893 }
4894 }
4895
David Neto22f144c2017-06-12 14:26:21 -04004896 //
4897 // Generate OpPhi.
4898 //
4899 // Ops[0] = Result Type ID
4900 // Ops[1] ... Ops[n] = (Variable ID, Parent ID) pairs
4901 SPIRVOperandList Ops;
4902
David Neto257c3892018-04-11 13:19:45 -04004903 Ops << MkId(lookupType(PHI->getType()));
David Neto22f144c2017-06-12 14:26:21 -04004904
David Neto22f144c2017-06-12 14:26:21 -04004905 for (unsigned i = 0; i < PHI->getNumIncomingValues(); i++) {
4906 uint32_t VarID = VMap[PHI->getIncomingValue(i)];
David Neto22f144c2017-06-12 14:26:21 -04004907 uint32_t ParentID = VMap[PHI->getIncomingBlock(i)];
David Neto257c3892018-04-11 13:19:45 -04004908 Ops << MkId(VarID) << MkId(ParentID);
David Neto22f144c2017-06-12 14:26:21 -04004909 }
4910
4911 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04004912 InsertPoint,
4913 new SPIRVInstruction(spv::OpPhi, std::get<2>(*DeferredInst), Ops));
David Neto22f144c2017-06-12 14:26:21 -04004914 } else if (CallInst *Call = dyn_cast<CallInst>(Inst)) {
4915 Function *Callee = Call->getCalledFunction();
David Neto3fbb4072017-10-16 11:28:14 -04004916 auto callee_name = Callee->getName();
4917 glsl::ExtInst EInst = getDirectOrIndirectExtInstEnum(callee_name);
David Neto22f144c2017-06-12 14:26:21 -04004918
4919 if (EInst) {
4920 uint32_t &ExtInstImportID = getOpExtInstImportID();
4921
4922 //
4923 // Generate OpExtInst.
4924 //
4925
4926 // Ops[0] = Result Type ID
4927 // Ops[1] = Set ID (OpExtInstImport ID)
4928 // Ops[2] = Instruction Number (Literal Number)
4929 // Ops[3] ... Ops[n] = Operand 1, ... , Operand n
4930 SPIRVOperandList Ops;
4931
David Neto862b7d82018-06-14 18:48:37 -04004932 Ops << MkId(lookupType(Call->getType())) << MkId(ExtInstImportID)
4933 << MkNum(EInst);
David Neto22f144c2017-06-12 14:26:21 -04004934
David Neto22f144c2017-06-12 14:26:21 -04004935 FunctionType *CalleeFTy = cast<FunctionType>(Call->getFunctionType());
4936 for (unsigned i = 0; i < CalleeFTy->getNumParams(); i++) {
David Neto257c3892018-04-11 13:19:45 -04004937 Ops << MkId(VMap[Call->getOperand(i)]);
David Neto22f144c2017-06-12 14:26:21 -04004938 }
4939
David Neto87846742018-04-11 17:36:22 -04004940 auto *ExtInst = new SPIRVInstruction(spv::OpExtInst,
4941 std::get<2>(*DeferredInst), Ops);
David Neto22f144c2017-06-12 14:26:21 -04004942 SPIRVInstList.insert(InsertPoint, ExtInst);
4943
David Neto3fbb4072017-10-16 11:28:14 -04004944 const auto IndirectExtInst = getIndirectExtInstEnum(callee_name);
4945 if (IndirectExtInst != kGlslExtInstBad) {
4946 // Generate one more instruction that uses the result of the extended
4947 // instruction. Its result id is one more than the id of the
4948 // extended instruction.
David Neto22f144c2017-06-12 14:26:21 -04004949 LLVMContext &Context =
4950 Call->getParent()->getParent()->getParent()->getContext();
David Neto22f144c2017-06-12 14:26:21 -04004951
David Neto3fbb4072017-10-16 11:28:14 -04004952 auto generate_extra_inst = [this, &Context, &Call, &DeferredInst,
4953 &VMap, &SPIRVInstList, &InsertPoint](
4954 spv::Op opcode, Constant *constant) {
4955 //
4956 // Generate instruction like:
4957 // result = opcode constant <extinst-result>
4958 //
4959 // Ops[0] = Result Type ID
4960 // Ops[1] = Operand 0 ;; the constant, suitably splatted
4961 // Ops[2] = Operand 1 ;; the result of the extended instruction
4962 SPIRVOperandList Ops;
David Neto22f144c2017-06-12 14:26:21 -04004963
David Neto3fbb4072017-10-16 11:28:14 -04004964 Type *resultTy = Call->getType();
David Neto257c3892018-04-11 13:19:45 -04004965 Ops << MkId(lookupType(resultTy));
David Neto3fbb4072017-10-16 11:28:14 -04004966
4967 if (auto *vectorTy = dyn_cast<VectorType>(resultTy)) {
4968 constant = ConstantVector::getSplat(
4969 static_cast<unsigned>(vectorTy->getNumElements()), constant);
4970 }
David Neto257c3892018-04-11 13:19:45 -04004971 Ops << MkId(VMap[constant]) << MkId(std::get<2>(*DeferredInst));
David Neto3fbb4072017-10-16 11:28:14 -04004972
4973 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04004974 InsertPoint, new SPIRVInstruction(
4975 opcode, std::get<2>(*DeferredInst) + 1, Ops));
David Neto3fbb4072017-10-16 11:28:14 -04004976 };
4977
4978 switch (IndirectExtInst) {
4979 case glsl::ExtInstFindUMsb: // Implementing clz
4980 generate_extra_inst(
4981 spv::OpISub, ConstantInt::get(Type::getInt32Ty(Context), 31));
4982 break;
4983 case glsl::ExtInstAcos: // Implementing acospi
4984 case glsl::ExtInstAsin: // Implementing asinpi
Kévin Petiteb9f90a2018-09-29 12:29:34 +01004985 case glsl::ExtInstAtan: // Implementing atanpi
David Neto3fbb4072017-10-16 11:28:14 -04004986 case glsl::ExtInstAtan2: // Implementing atan2pi
4987 generate_extra_inst(
4988 spv::OpFMul,
4989 ConstantFP::get(Type::getFloatTy(Context), kOneOverPi));
4990 break;
4991
4992 default:
4993 assert(false && "internally inconsistent");
David Neto4d02a532017-09-17 12:57:44 -04004994 }
David Neto22f144c2017-06-12 14:26:21 -04004995 }
David Neto3fbb4072017-10-16 11:28:14 -04004996
alan-bakerb39c8262019-03-08 14:03:37 -05004997 } else if (callee_name.startswith("_Z8popcount")) {
David Neto22f144c2017-06-12 14:26:21 -04004998 //
4999 // Generate OpBitCount
5000 //
5001 // Ops[0] = Result Type ID
5002 // Ops[1] = Base ID
David Neto257c3892018-04-11 13:19:45 -04005003 SPIRVOperandList Ops;
5004 Ops << MkId(lookupType(Call->getType()))
5005 << MkId(VMap[Call->getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04005006
5007 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005008 InsertPoint, new SPIRVInstruction(spv::OpBitCount,
David Neto22f144c2017-06-12 14:26:21 -04005009 std::get<2>(*DeferredInst), Ops));
David Netoab03f432017-11-03 17:00:44 -04005010
David Neto862b7d82018-06-14 18:48:37 -04005011 } else if (callee_name.startswith(kCompositeConstructFunctionPrefix)) {
David Netoab03f432017-11-03 17:00:44 -04005012
5013 // Generate an OpCompositeConstruct
5014 SPIRVOperandList Ops;
5015
5016 // The result type.
David Neto257c3892018-04-11 13:19:45 -04005017 Ops << MkId(lookupType(Call->getType()));
David Netoab03f432017-11-03 17:00:44 -04005018
5019 for (Use &use : Call->arg_operands()) {
David Neto257c3892018-04-11 13:19:45 -04005020 Ops << MkId(VMap[use.get()]);
David Netoab03f432017-11-03 17:00:44 -04005021 }
5022
5023 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005024 InsertPoint, new SPIRVInstruction(spv::OpCompositeConstruct,
5025 std::get<2>(*DeferredInst), Ops));
David Netoab03f432017-11-03 17:00:44 -04005026
Alan Baker202c8c72018-08-13 13:47:44 -04005027 } else if (callee_name.startswith(clspv::ResourceAccessorFunction())) {
5028
5029 // We have already mapped the call's result value to an ID.
5030 // Don't generate any code now.
5031
5032 } else if (callee_name.startswith(clspv::WorkgroupAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04005033
5034 // We have already mapped the call's result value to an ID.
5035 // Don't generate any code now.
5036
David Neto22f144c2017-06-12 14:26:21 -04005037 } else {
alan-baker5b86ed72019-02-15 08:26:50 -05005038 if (Call->getType()->isPointerTy()) {
5039 // Functions returning pointers require variable pointers.
5040 setVariablePointersCapabilities(
5041 Call->getType()->getPointerAddressSpace());
5042 }
5043
David Neto22f144c2017-06-12 14:26:21 -04005044 //
5045 // Generate OpFunctionCall.
5046 //
5047
5048 // Ops[0] = Result Type ID
5049 // Ops[1] = Callee Function ID
5050 // Ops[2] ... Ops[n] = Argument 0, ... , Argument n
5051 SPIRVOperandList Ops;
5052
David Neto862b7d82018-06-14 18:48:37 -04005053 Ops << MkId(lookupType(Call->getType()));
David Neto22f144c2017-06-12 14:26:21 -04005054
5055 uint32_t CalleeID = VMap[Callee];
David Neto43568eb2017-10-13 18:25:25 -04005056 if (CalleeID == 0) {
5057 errs() << "Can't translate function call. Missing builtin? "
David Neto862b7d82018-06-14 18:48:37 -04005058 << callee_name << " in: " << *Call << "\n";
David Neto43568eb2017-10-13 18:25:25 -04005059 // TODO(dneto): Can we error out? Enabling this llvm_unreachable
5060 // causes an infinite loop. Instead, go ahead and generate
5061 // the bad function call. A validator will catch the 0-Id.
5062 // llvm_unreachable("Can't translate function call");
5063 }
David Neto22f144c2017-06-12 14:26:21 -04005064
David Neto257c3892018-04-11 13:19:45 -04005065 Ops << MkId(CalleeID);
David Neto22f144c2017-06-12 14:26:21 -04005066
David Neto22f144c2017-06-12 14:26:21 -04005067 FunctionType *CalleeFTy = cast<FunctionType>(Call->getFunctionType());
5068 for (unsigned i = 0; i < CalleeFTy->getNumParams(); i++) {
alan-baker5b86ed72019-02-15 08:26:50 -05005069 auto *operand = Call->getOperand(i);
5070 if (operand->getType()->isPointerTy()) {
5071 auto sc =
5072 GetStorageClass(operand->getType()->getPointerAddressSpace());
5073 if (sc == spv::StorageClassStorageBuffer) {
5074 // Passing SSBO by reference requires variable pointers storage
5075 // buffer.
5076 setVariablePointersStorageBuffer(true);
5077 } else if (sc == spv::StorageClassWorkgroup) {
5078 // Workgroup references require variable pointers if they are not
5079 // memory object declarations.
5080 if (auto *operand_call = dyn_cast<CallInst>(operand)) {
5081 // Workgroup accessor represents a variable reference.
5082 if (!operand_call->getCalledFunction()->getName().startswith(
5083 clspv::WorkgroupAccessorFunction()))
5084 setVariablePointers(true);
5085 } else {
5086 // Arguments are function parameters.
5087 if (!isa<Argument>(operand))
5088 setVariablePointers(true);
5089 }
5090 }
5091 }
5092 Ops << MkId(VMap[operand]);
David Neto22f144c2017-06-12 14:26:21 -04005093 }
5094
David Neto87846742018-04-11 17:36:22 -04005095 auto *CallInst = new SPIRVInstruction(spv::OpFunctionCall,
5096 std::get<2>(*DeferredInst), Ops);
David Neto22f144c2017-06-12 14:26:21 -04005097 SPIRVInstList.insert(InsertPoint, CallInst);
5098 }
5099 }
5100 }
5101}
5102
David Neto1a1a0582017-07-07 12:01:44 -04005103void SPIRVProducerPass::HandleDeferredDecorations(const DataLayout &DL) {
Alan Baker202c8c72018-08-13 13:47:44 -04005104 if (getTypesNeedingArrayStride().empty() && LocalArgSpecIds.empty()) {
David Neto1a1a0582017-07-07 12:01:44 -04005105 return;
David Netoc6f3ab22018-04-06 18:02:31 -04005106 }
David Neto1a1a0582017-07-07 12:01:44 -04005107
5108 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
David Neto1a1a0582017-07-07 12:01:44 -04005109
5110 // Find an iterator pointing just past the last decoration.
5111 bool seen_decorations = false;
5112 auto DecoInsertPoint =
5113 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
5114 [&seen_decorations](SPIRVInstruction *Inst) -> bool {
5115 const bool is_decoration =
5116 Inst->getOpcode() == spv::OpDecorate ||
5117 Inst->getOpcode() == spv::OpMemberDecorate;
5118 if (is_decoration) {
5119 seen_decorations = true;
5120 return false;
5121 } else {
5122 return seen_decorations;
5123 }
5124 });
5125
David Netoc6f3ab22018-04-06 18:02:31 -04005126 // Insert ArrayStride decorations on pointer types, due to OpPtrAccessChain
5127 // instructions we generated earlier.
David Neto85082642018-03-24 06:55:20 -07005128 for (auto *type : getTypesNeedingArrayStride()) {
5129 Type *elemTy = nullptr;
5130 if (auto *ptrTy = dyn_cast<PointerType>(type)) {
5131 elemTy = ptrTy->getElementType();
alan-bakerb6b09dc2018-11-08 16:59:28 -05005132 } else if (auto *arrayTy = dyn_cast<ArrayType>(type)) {
David Neto85082642018-03-24 06:55:20 -07005133 elemTy = arrayTy->getArrayElementType();
alan-bakerb6b09dc2018-11-08 16:59:28 -05005134 } else if (auto *seqTy = dyn_cast<SequentialType>(type)) {
David Neto85082642018-03-24 06:55:20 -07005135 elemTy = seqTy->getSequentialElementType();
5136 } else {
5137 errs() << "Unhandled strided type " << *type << "\n";
5138 llvm_unreachable("Unhandled strided type");
5139 }
David Neto1a1a0582017-07-07 12:01:44 -04005140
5141 // Ops[0] = Target ID
5142 // Ops[1] = Decoration (ArrayStride)
5143 // Ops[2] = Stride number (Literal Number)
5144 SPIRVOperandList Ops;
5145
David Neto85082642018-03-24 06:55:20 -07005146 // Same as DL.getIndexedOffsetInType( elemTy, { 1 } );
Alan Bakerfcda9482018-10-02 17:09:59 -04005147 const uint32_t stride = static_cast<uint32_t>(GetTypeAllocSize(elemTy, DL));
David Neto257c3892018-04-11 13:19:45 -04005148
5149 Ops << MkId(lookupType(type)) << MkNum(spv::DecorationArrayStride)
5150 << MkNum(stride);
David Neto1a1a0582017-07-07 12:01:44 -04005151
David Neto87846742018-04-11 17:36:22 -04005152 auto *DecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
David Neto1a1a0582017-07-07 12:01:44 -04005153 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
5154 }
David Netoc6f3ab22018-04-06 18:02:31 -04005155
5156 // Emit SpecId decorations targeting the array size value.
Alan Baker202c8c72018-08-13 13:47:44 -04005157 for (auto spec_id = clspv::FirstLocalSpecId(); spec_id < max_local_spec_id_;
5158 ++spec_id) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05005159 LocalArgInfo &arg_info = LocalSpecIdInfoMap[spec_id];
David Netoc6f3ab22018-04-06 18:02:31 -04005160 SPIRVOperandList Ops;
5161 Ops << MkId(arg_info.array_size_id) << MkNum(spv::DecorationSpecId)
5162 << MkNum(arg_info.spec_id);
5163 SPIRVInstList.insert(DecoInsertPoint,
David Neto87846742018-04-11 17:36:22 -04005164 new SPIRVInstruction(spv::OpDecorate, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04005165 }
David Neto1a1a0582017-07-07 12:01:44 -04005166}
5167
David Neto22f144c2017-06-12 14:26:21 -04005168glsl::ExtInst SPIRVProducerPass::getExtInstEnum(StringRef Name) {
5169 return StringSwitch<glsl::ExtInst>(Name)
alan-bakerb39c8262019-03-08 14:03:37 -05005170 .Case("_Z3absc", glsl::ExtInst::ExtInstSAbs)
5171 .Case("_Z3absDv2_c", glsl::ExtInst::ExtInstSAbs)
5172 .Case("_Z3absDv3_c", glsl::ExtInst::ExtInstSAbs)
5173 .Case("_Z3absDv4_c", glsl::ExtInst::ExtInstSAbs)
Kévin Petit2444e9b2018-11-09 14:14:37 +00005174 .Case("_Z3abss", glsl::ExtInst::ExtInstSAbs)
5175 .Case("_Z3absDv2_s", glsl::ExtInst::ExtInstSAbs)
5176 .Case("_Z3absDv3_s", glsl::ExtInst::ExtInstSAbs)
5177 .Case("_Z3absDv4_s", glsl::ExtInst::ExtInstSAbs)
David Neto22f144c2017-06-12 14:26:21 -04005178 .Case("_Z3absi", glsl::ExtInst::ExtInstSAbs)
5179 .Case("_Z3absDv2_i", glsl::ExtInst::ExtInstSAbs)
5180 .Case("_Z3absDv3_i", glsl::ExtInst::ExtInstSAbs)
5181 .Case("_Z3absDv4_i", glsl::ExtInst::ExtInstSAbs)
Kévin Petit2444e9b2018-11-09 14:14:37 +00005182 .Case("_Z3absl", glsl::ExtInst::ExtInstSAbs)
5183 .Case("_Z3absDv2_l", glsl::ExtInst::ExtInstSAbs)
5184 .Case("_Z3absDv3_l", glsl::ExtInst::ExtInstSAbs)
5185 .Case("_Z3absDv4_l", glsl::ExtInst::ExtInstSAbs)
alan-bakerb39c8262019-03-08 14:03:37 -05005186 .Case("_Z5clampccc", glsl::ExtInst::ExtInstSClamp)
5187 .Case("_Z5clampDv2_cS_S_", glsl::ExtInst::ExtInstSClamp)
5188 .Case("_Z5clampDv3_cS_S_", glsl::ExtInst::ExtInstSClamp)
5189 .Case("_Z5clampDv4_cS_S_", glsl::ExtInst::ExtInstSClamp)
5190 .Case("_Z5clamphhh", glsl::ExtInst::ExtInstUClamp)
5191 .Case("_Z5clampDv2_hS_S_", glsl::ExtInst::ExtInstUClamp)
5192 .Case("_Z5clampDv3_hS_S_", glsl::ExtInst::ExtInstUClamp)
5193 .Case("_Z5clampDv4_hS_S_", glsl::ExtInst::ExtInstUClamp)
Kévin Petit495255d2019-03-06 13:56:48 +00005194 .Case("_Z5clampsss", glsl::ExtInst::ExtInstSClamp)
5195 .Case("_Z5clampDv2_sS_S_", glsl::ExtInst::ExtInstSClamp)
5196 .Case("_Z5clampDv3_sS_S_", glsl::ExtInst::ExtInstSClamp)
5197 .Case("_Z5clampDv4_sS_S_", glsl::ExtInst::ExtInstSClamp)
5198 .Case("_Z5clampttt", glsl::ExtInst::ExtInstUClamp)
5199 .Case("_Z5clampDv2_tS_S_", glsl::ExtInst::ExtInstUClamp)
5200 .Case("_Z5clampDv3_tS_S_", glsl::ExtInst::ExtInstUClamp)
5201 .Case("_Z5clampDv4_tS_S_", glsl::ExtInst::ExtInstUClamp)
David Neto22f144c2017-06-12 14:26:21 -04005202 .Case("_Z5clampiii", glsl::ExtInst::ExtInstSClamp)
5203 .Case("_Z5clampDv2_iS_S_", glsl::ExtInst::ExtInstSClamp)
5204 .Case("_Z5clampDv3_iS_S_", glsl::ExtInst::ExtInstSClamp)
5205 .Case("_Z5clampDv4_iS_S_", glsl::ExtInst::ExtInstSClamp)
5206 .Case("_Z5clampjjj", glsl::ExtInst::ExtInstUClamp)
5207 .Case("_Z5clampDv2_jS_S_", glsl::ExtInst::ExtInstUClamp)
5208 .Case("_Z5clampDv3_jS_S_", glsl::ExtInst::ExtInstUClamp)
5209 .Case("_Z5clampDv4_jS_S_", glsl::ExtInst::ExtInstUClamp)
Kévin Petit495255d2019-03-06 13:56:48 +00005210 .Case("_Z5clamplll", glsl::ExtInst::ExtInstSClamp)
5211 .Case("_Z5clampDv2_lS_S_", glsl::ExtInst::ExtInstSClamp)
5212 .Case("_Z5clampDv3_lS_S_", glsl::ExtInst::ExtInstSClamp)
5213 .Case("_Z5clampDv4_lS_S_", glsl::ExtInst::ExtInstSClamp)
5214 .Case("_Z5clampmmm", glsl::ExtInst::ExtInstUClamp)
5215 .Case("_Z5clampDv2_mS_S_", glsl::ExtInst::ExtInstUClamp)
5216 .Case("_Z5clampDv3_mS_S_", glsl::ExtInst::ExtInstUClamp)
5217 .Case("_Z5clampDv4_mS_S_", glsl::ExtInst::ExtInstUClamp)
David Neto22f144c2017-06-12 14:26:21 -04005218 .Case("_Z5clampfff", glsl::ExtInst::ExtInstFClamp)
5219 .Case("_Z5clampDv2_fS_S_", glsl::ExtInst::ExtInstFClamp)
5220 .Case("_Z5clampDv3_fS_S_", glsl::ExtInst::ExtInstFClamp)
5221 .Case("_Z5clampDv4_fS_S_", glsl::ExtInst::ExtInstFClamp)
alan-bakerb39c8262019-03-08 14:03:37 -05005222 .Case("_Z3maxcc", glsl::ExtInst::ExtInstSMax)
5223 .Case("_Z3maxDv2_cS_", glsl::ExtInst::ExtInstSMax)
5224 .Case("_Z3maxDv3_cS_", glsl::ExtInst::ExtInstSMax)
5225 .Case("_Z3maxDv4_cS_", glsl::ExtInst::ExtInstSMax)
5226 .Case("_Z3maxhh", glsl::ExtInst::ExtInstUMax)
5227 .Case("_Z3maxDv2_hS_", glsl::ExtInst::ExtInstUMax)
5228 .Case("_Z3maxDv3_hS_", glsl::ExtInst::ExtInstUMax)
5229 .Case("_Z3maxDv4_hS_", glsl::ExtInst::ExtInstUMax)
Kévin Petit9e1971c2018-11-09 14:17:18 +00005230 .Case("_Z3maxss", glsl::ExtInst::ExtInstSMax)
5231 .Case("_Z3maxDv2_sS_", glsl::ExtInst::ExtInstSMax)
5232 .Case("_Z3maxDv3_sS_", glsl::ExtInst::ExtInstSMax)
5233 .Case("_Z3maxDv4_sS_", glsl::ExtInst::ExtInstSMax)
5234 .Case("_Z3maxtt", glsl::ExtInst::ExtInstUMax)
5235 .Case("_Z3maxDv2_tS_", glsl::ExtInst::ExtInstUMax)
5236 .Case("_Z3maxDv3_tS_", glsl::ExtInst::ExtInstUMax)
5237 .Case("_Z3maxDv4_tS_", glsl::ExtInst::ExtInstUMax)
David Neto22f144c2017-06-12 14:26:21 -04005238 .Case("_Z3maxii", glsl::ExtInst::ExtInstSMax)
5239 .Case("_Z3maxDv2_iS_", glsl::ExtInst::ExtInstSMax)
5240 .Case("_Z3maxDv3_iS_", glsl::ExtInst::ExtInstSMax)
5241 .Case("_Z3maxDv4_iS_", glsl::ExtInst::ExtInstSMax)
5242 .Case("_Z3maxjj", glsl::ExtInst::ExtInstUMax)
5243 .Case("_Z3maxDv2_jS_", glsl::ExtInst::ExtInstUMax)
5244 .Case("_Z3maxDv3_jS_", glsl::ExtInst::ExtInstUMax)
5245 .Case("_Z3maxDv4_jS_", glsl::ExtInst::ExtInstUMax)
Kévin Petit9e1971c2018-11-09 14:17:18 +00005246 .Case("_Z3maxll", glsl::ExtInst::ExtInstSMax)
5247 .Case("_Z3maxDv2_lS_", glsl::ExtInst::ExtInstSMax)
5248 .Case("_Z3maxDv3_lS_", glsl::ExtInst::ExtInstSMax)
5249 .Case("_Z3maxDv4_lS_", glsl::ExtInst::ExtInstSMax)
5250 .Case("_Z3maxmm", glsl::ExtInst::ExtInstUMax)
5251 .Case("_Z3maxDv2_mS_", glsl::ExtInst::ExtInstUMax)
5252 .Case("_Z3maxDv3_mS_", glsl::ExtInst::ExtInstUMax)
5253 .Case("_Z3maxDv4_mS_", glsl::ExtInst::ExtInstUMax)
David Neto22f144c2017-06-12 14:26:21 -04005254 .Case("_Z3maxff", glsl::ExtInst::ExtInstFMax)
5255 .Case("_Z3maxDv2_fS_", glsl::ExtInst::ExtInstFMax)
5256 .Case("_Z3maxDv3_fS_", glsl::ExtInst::ExtInstFMax)
5257 .Case("_Z3maxDv4_fS_", glsl::ExtInst::ExtInstFMax)
5258 .StartsWith("_Z4fmax", glsl::ExtInst::ExtInstFMax)
alan-bakerb39c8262019-03-08 14:03:37 -05005259 .Case("_Z3mincc", glsl::ExtInst::ExtInstSMin)
5260 .Case("_Z3minDv2_cS_", glsl::ExtInst::ExtInstSMin)
5261 .Case("_Z3minDv3_cS_", glsl::ExtInst::ExtInstSMin)
5262 .Case("_Z3minDv4_cS_", glsl::ExtInst::ExtInstSMin)
5263 .Case("_Z3minhh", glsl::ExtInst::ExtInstUMin)
5264 .Case("_Z3minDv2_hS_", glsl::ExtInst::ExtInstUMin)
5265 .Case("_Z3minDv3_hS_", glsl::ExtInst::ExtInstUMin)
5266 .Case("_Z3minDv4_hS_", glsl::ExtInst::ExtInstUMin)
Kévin Petit9e1971c2018-11-09 14:17:18 +00005267 .Case("_Z3minss", glsl::ExtInst::ExtInstSMin)
5268 .Case("_Z3minDv2_sS_", glsl::ExtInst::ExtInstSMin)
5269 .Case("_Z3minDv3_sS_", glsl::ExtInst::ExtInstSMin)
5270 .Case("_Z3minDv4_sS_", glsl::ExtInst::ExtInstSMin)
5271 .Case("_Z3mintt", glsl::ExtInst::ExtInstUMin)
5272 .Case("_Z3minDv2_tS_", glsl::ExtInst::ExtInstUMin)
5273 .Case("_Z3minDv3_tS_", glsl::ExtInst::ExtInstUMin)
5274 .Case("_Z3minDv4_tS_", glsl::ExtInst::ExtInstUMin)
David Neto22f144c2017-06-12 14:26:21 -04005275 .Case("_Z3minii", glsl::ExtInst::ExtInstSMin)
5276 .Case("_Z3minDv2_iS_", glsl::ExtInst::ExtInstSMin)
5277 .Case("_Z3minDv3_iS_", glsl::ExtInst::ExtInstSMin)
5278 .Case("_Z3minDv4_iS_", glsl::ExtInst::ExtInstSMin)
5279 .Case("_Z3minjj", glsl::ExtInst::ExtInstUMin)
5280 .Case("_Z3minDv2_jS_", glsl::ExtInst::ExtInstUMin)
5281 .Case("_Z3minDv3_jS_", glsl::ExtInst::ExtInstUMin)
5282 .Case("_Z3minDv4_jS_", glsl::ExtInst::ExtInstUMin)
Kévin Petit9e1971c2018-11-09 14:17:18 +00005283 .Case("_Z3minll", glsl::ExtInst::ExtInstSMin)
5284 .Case("_Z3minDv2_lS_", glsl::ExtInst::ExtInstSMin)
5285 .Case("_Z3minDv3_lS_", glsl::ExtInst::ExtInstSMin)
5286 .Case("_Z3minDv4_lS_", glsl::ExtInst::ExtInstSMin)
5287 .Case("_Z3minmm", glsl::ExtInst::ExtInstUMin)
5288 .Case("_Z3minDv2_mS_", glsl::ExtInst::ExtInstUMin)
5289 .Case("_Z3minDv3_mS_", glsl::ExtInst::ExtInstUMin)
5290 .Case("_Z3minDv4_mS_", glsl::ExtInst::ExtInstUMin)
David Neto22f144c2017-06-12 14:26:21 -04005291 .Case("_Z3minff", glsl::ExtInst::ExtInstFMin)
5292 .Case("_Z3minDv2_fS_", glsl::ExtInst::ExtInstFMin)
5293 .Case("_Z3minDv3_fS_", glsl::ExtInst::ExtInstFMin)
5294 .Case("_Z3minDv4_fS_", glsl::ExtInst::ExtInstFMin)
5295 .StartsWith("_Z4fmin", glsl::ExtInst::ExtInstFMin)
5296 .StartsWith("_Z7degrees", glsl::ExtInst::ExtInstDegrees)
5297 .StartsWith("_Z7radians", glsl::ExtInst::ExtInstRadians)
5298 .StartsWith("_Z3mix", glsl::ExtInst::ExtInstFMix)
5299 .StartsWith("_Z4acos", glsl::ExtInst::ExtInstAcos)
5300 .StartsWith("_Z5acosh", glsl::ExtInst::ExtInstAcosh)
5301 .StartsWith("_Z4asin", glsl::ExtInst::ExtInstAsin)
5302 .StartsWith("_Z5asinh", glsl::ExtInst::ExtInstAsinh)
5303 .StartsWith("_Z4atan", glsl::ExtInst::ExtInstAtan)
5304 .StartsWith("_Z5atan2", glsl::ExtInst::ExtInstAtan2)
5305 .StartsWith("_Z5atanh", glsl::ExtInst::ExtInstAtanh)
5306 .StartsWith("_Z4ceil", glsl::ExtInst::ExtInstCeil)
5307 .StartsWith("_Z3sin", glsl::ExtInst::ExtInstSin)
5308 .StartsWith("_Z4sinh", glsl::ExtInst::ExtInstSinh)
5309 .StartsWith("_Z8half_sin", glsl::ExtInst::ExtInstSin)
5310 .StartsWith("_Z10native_sin", glsl::ExtInst::ExtInstSin)
5311 .StartsWith("_Z3cos", glsl::ExtInst::ExtInstCos)
5312 .StartsWith("_Z4cosh", glsl::ExtInst::ExtInstCosh)
5313 .StartsWith("_Z8half_cos", glsl::ExtInst::ExtInstCos)
5314 .StartsWith("_Z10native_cos", glsl::ExtInst::ExtInstCos)
5315 .StartsWith("_Z3tan", glsl::ExtInst::ExtInstTan)
5316 .StartsWith("_Z4tanh", glsl::ExtInst::ExtInstTanh)
5317 .StartsWith("_Z8half_tan", glsl::ExtInst::ExtInstTan)
5318 .StartsWith("_Z10native_tan", glsl::ExtInst::ExtInstTan)
5319 .StartsWith("_Z3exp", glsl::ExtInst::ExtInstExp)
5320 .StartsWith("_Z8half_exp", glsl::ExtInst::ExtInstExp)
5321 .StartsWith("_Z10native_exp", glsl::ExtInst::ExtInstExp)
5322 .StartsWith("_Z4exp2", glsl::ExtInst::ExtInstExp2)
5323 .StartsWith("_Z9half_exp2", glsl::ExtInst::ExtInstExp2)
5324 .StartsWith("_Z11native_exp2", glsl::ExtInst::ExtInstExp2)
5325 .StartsWith("_Z3log", glsl::ExtInst::ExtInstLog)
5326 .StartsWith("_Z8half_log", glsl::ExtInst::ExtInstLog)
5327 .StartsWith("_Z10native_log", glsl::ExtInst::ExtInstLog)
5328 .StartsWith("_Z4log2", glsl::ExtInst::ExtInstLog2)
5329 .StartsWith("_Z9half_log2", glsl::ExtInst::ExtInstLog2)
5330 .StartsWith("_Z11native_log2", glsl::ExtInst::ExtInstLog2)
5331 .StartsWith("_Z4fabs", glsl::ExtInst::ExtInstFAbs)
kpet3458e942018-10-03 14:35:21 +01005332 .StartsWith("_Z3fma", glsl::ExtInst::ExtInstFma)
David Neto22f144c2017-06-12 14:26:21 -04005333 .StartsWith("_Z5floor", glsl::ExtInst::ExtInstFloor)
5334 .StartsWith("_Z5ldexp", glsl::ExtInst::ExtInstLdexp)
5335 .StartsWith("_Z3pow", glsl::ExtInst::ExtInstPow)
5336 .StartsWith("_Z4powr", glsl::ExtInst::ExtInstPow)
5337 .StartsWith("_Z9half_powr", glsl::ExtInst::ExtInstPow)
5338 .StartsWith("_Z11native_powr", glsl::ExtInst::ExtInstPow)
5339 .StartsWith("_Z5round", glsl::ExtInst::ExtInstRound)
5340 .StartsWith("_Z4sqrt", glsl::ExtInst::ExtInstSqrt)
5341 .StartsWith("_Z9half_sqrt", glsl::ExtInst::ExtInstSqrt)
5342 .StartsWith("_Z11native_sqrt", glsl::ExtInst::ExtInstSqrt)
5343 .StartsWith("_Z5rsqrt", glsl::ExtInst::ExtInstInverseSqrt)
5344 .StartsWith("_Z10half_rsqrt", glsl::ExtInst::ExtInstInverseSqrt)
5345 .StartsWith("_Z12native_rsqrt", glsl::ExtInst::ExtInstInverseSqrt)
5346 .StartsWith("_Z5trunc", glsl::ExtInst::ExtInstTrunc)
5347 .StartsWith("_Z5frexp", glsl::ExtInst::ExtInstFrexp)
5348 .StartsWith("_Z4sign", glsl::ExtInst::ExtInstFSign)
5349 .StartsWith("_Z6length", glsl::ExtInst::ExtInstLength)
Kévin Petit7d09cec2018-09-22 15:43:38 +01005350 .StartsWith("_Z11fast_length", glsl::ExtInst::ExtInstLength)
David Neto22f144c2017-06-12 14:26:21 -04005351 .StartsWith("_Z8distance", glsl::ExtInst::ExtInstDistance)
Kévin Petit7d09cec2018-09-22 15:43:38 +01005352 .StartsWith("_Z13fast_distance", glsl::ExtInst::ExtInstDistance)
David Netoe9a03512017-10-16 10:08:27 -04005353 .StartsWith("_Z4step", glsl::ExtInst::ExtInstStep)
kpet6fd2a262018-10-03 14:48:01 +01005354 .StartsWith("_Z10smoothstep", glsl::ExtInst::ExtInstSmoothStep)
David Neto22f144c2017-06-12 14:26:21 -04005355 .Case("_Z5crossDv3_fS_", glsl::ExtInst::ExtInstCross)
5356 .StartsWith("_Z9normalize", glsl::ExtInst::ExtInstNormalize)
Kévin Petit7d09cec2018-09-22 15:43:38 +01005357 .StartsWith("_Z14fast_normalize", glsl::ExtInst::ExtInstNormalize)
David Neto22f144c2017-06-12 14:26:21 -04005358 .StartsWith("llvm.fmuladd.", glsl::ExtInst::ExtInstFma)
5359 .Case("spirv.unpack.v2f16", glsl::ExtInst::ExtInstUnpackHalf2x16)
5360 .Case("spirv.pack.v2f16", glsl::ExtInst::ExtInstPackHalf2x16)
David Neto62653202017-10-16 19:05:18 -04005361 .Case("clspv.fract.f", glsl::ExtInst::ExtInstFract)
5362 .Case("clspv.fract.v2f", glsl::ExtInst::ExtInstFract)
5363 .Case("clspv.fract.v3f", glsl::ExtInst::ExtInstFract)
5364 .Case("clspv.fract.v4f", glsl::ExtInst::ExtInstFract)
David Neto3fbb4072017-10-16 11:28:14 -04005365 .Default(kGlslExtInstBad);
5366}
5367
5368glsl::ExtInst SPIRVProducerPass::getIndirectExtInstEnum(StringRef Name) {
5369 // Check indirect cases.
5370 return StringSwitch<glsl::ExtInst>(Name)
5371 .StartsWith("_Z3clz", glsl::ExtInst::ExtInstFindUMsb)
5372 // Use exact match on float arg because these need a multiply
5373 // of a constant of the right floating point type.
5374 .Case("_Z6acospif", glsl::ExtInst::ExtInstAcos)
5375 .Case("_Z6acospiDv2_f", glsl::ExtInst::ExtInstAcos)
5376 .Case("_Z6acospiDv3_f", glsl::ExtInst::ExtInstAcos)
5377 .Case("_Z6acospiDv4_f", glsl::ExtInst::ExtInstAcos)
5378 .Case("_Z6asinpif", glsl::ExtInst::ExtInstAsin)
5379 .Case("_Z6asinpiDv2_f", glsl::ExtInst::ExtInstAsin)
5380 .Case("_Z6asinpiDv3_f", glsl::ExtInst::ExtInstAsin)
5381 .Case("_Z6asinpiDv4_f", glsl::ExtInst::ExtInstAsin)
Kévin Petiteb9f90a2018-09-29 12:29:34 +01005382 .Case("_Z6atanpif", glsl::ExtInst::ExtInstAtan)
5383 .Case("_Z6atanpiDv2_f", glsl::ExtInst::ExtInstAtan)
5384 .Case("_Z6atanpiDv3_f", glsl::ExtInst::ExtInstAtan)
5385 .Case("_Z6atanpiDv4_f", glsl::ExtInst::ExtInstAtan)
David Neto3fbb4072017-10-16 11:28:14 -04005386 .Case("_Z7atan2piff", glsl::ExtInst::ExtInstAtan2)
5387 .Case("_Z7atan2piDv2_fS_", glsl::ExtInst::ExtInstAtan2)
5388 .Case("_Z7atan2piDv3_fS_", glsl::ExtInst::ExtInstAtan2)
5389 .Case("_Z7atan2piDv4_fS_", glsl::ExtInst::ExtInstAtan2)
5390 .Default(kGlslExtInstBad);
5391}
5392
alan-bakerb6b09dc2018-11-08 16:59:28 -05005393glsl::ExtInst
5394SPIRVProducerPass::getDirectOrIndirectExtInstEnum(StringRef Name) {
David Neto3fbb4072017-10-16 11:28:14 -04005395 auto direct = getExtInstEnum(Name);
5396 if (direct != kGlslExtInstBad)
5397 return direct;
5398 return getIndirectExtInstEnum(Name);
David Neto22f144c2017-06-12 14:26:21 -04005399}
5400
David Neto22f144c2017-06-12 14:26:21 -04005401void SPIRVProducerPass::WriteOneWord(uint32_t Word) {
David Neto0676e6f2017-07-11 18:47:44 -04005402 binaryOut->write(reinterpret_cast<const char *>(&Word), sizeof(uint32_t));
David Neto22f144c2017-06-12 14:26:21 -04005403}
5404
5405void SPIRVProducerPass::WriteResultID(SPIRVInstruction *Inst) {
5406 WriteOneWord(Inst->getResultID());
5407}
5408
5409void SPIRVProducerPass::WriteWordCountAndOpcode(SPIRVInstruction *Inst) {
5410 // High 16 bit : Word Count
5411 // Low 16 bit : Opcode
5412 uint32_t Word = Inst->getOpcode();
David Netoee2660d2018-06-28 16:31:29 -04005413 const uint32_t count = Inst->getWordCount();
5414 if (count > 65535) {
5415 errs() << "Word count limit of 65535 exceeded: " << count << "\n";
5416 llvm_unreachable("Word count too high");
5417 }
David Neto22f144c2017-06-12 14:26:21 -04005418 Word |= Inst->getWordCount() << 16;
5419 WriteOneWord(Word);
5420}
5421
5422void SPIRVProducerPass::WriteOperand(SPIRVOperand *Op) {
5423 SPIRVOperandType OpTy = Op->getType();
5424 switch (OpTy) {
5425 default: {
5426 llvm_unreachable("Unsupported SPIRV Operand Type???");
5427 break;
5428 }
5429 case SPIRVOperandType::NUMBERID: {
5430 WriteOneWord(Op->getNumID());
5431 break;
5432 }
5433 case SPIRVOperandType::LITERAL_STRING: {
5434 std::string Str = Op->getLiteralStr();
5435 const char *Data = Str.c_str();
5436 size_t WordSize = Str.size() / 4;
5437 for (unsigned Idx = 0; Idx < WordSize; Idx++) {
5438 WriteOneWord(*reinterpret_cast<const uint32_t *>(&Data[4 * Idx]));
5439 }
5440
5441 uint32_t Remainder = Str.size() % 4;
5442 uint32_t LastWord = 0;
5443 if (Remainder) {
5444 for (unsigned Idx = 0; Idx < Remainder; Idx++) {
5445 LastWord |= Data[4 * WordSize + Idx] << 8 * Idx;
5446 }
5447 }
5448
5449 WriteOneWord(LastWord);
5450 break;
5451 }
5452 case SPIRVOperandType::LITERAL_INTEGER:
5453 case SPIRVOperandType::LITERAL_FLOAT: {
5454 auto LiteralNum = Op->getLiteralNum();
5455 // TODO: Handle LiteranNum carefully.
5456 for (auto Word : LiteralNum) {
5457 WriteOneWord(Word);
5458 }
5459 break;
5460 }
5461 }
5462}
5463
5464void SPIRVProducerPass::WriteSPIRVBinary() {
5465 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
5466
5467 for (auto Inst : SPIRVInstList) {
David Netoc6f3ab22018-04-06 18:02:31 -04005468 SPIRVOperandList Ops{Inst->getOperands()};
David Neto22f144c2017-06-12 14:26:21 -04005469 spv::Op Opcode = static_cast<spv::Op>(Inst->getOpcode());
5470
5471 switch (Opcode) {
5472 default: {
David Neto5c22a252018-03-15 16:07:41 -04005473 errs() << "Unsupported SPIR-V instruction opcode " << int(Opcode) << "\n";
David Neto22f144c2017-06-12 14:26:21 -04005474 llvm_unreachable("Unsupported SPIRV instruction");
5475 break;
5476 }
5477 case spv::OpCapability:
5478 case spv::OpExtension:
5479 case spv::OpMemoryModel:
5480 case spv::OpEntryPoint:
5481 case spv::OpExecutionMode:
5482 case spv::OpSource:
5483 case spv::OpDecorate:
5484 case spv::OpMemberDecorate:
5485 case spv::OpBranch:
5486 case spv::OpBranchConditional:
5487 case spv::OpSelectionMerge:
5488 case spv::OpLoopMerge:
5489 case spv::OpStore:
5490 case spv::OpImageWrite:
5491 case spv::OpReturnValue:
5492 case spv::OpControlBarrier:
5493 case spv::OpMemoryBarrier:
5494 case spv::OpReturn:
5495 case spv::OpFunctionEnd:
5496 case spv::OpCopyMemory: {
5497 WriteWordCountAndOpcode(Inst);
5498 for (uint32_t i = 0; i < Ops.size(); i++) {
5499 WriteOperand(Ops[i]);
5500 }
5501 break;
5502 }
5503 case spv::OpTypeBool:
5504 case spv::OpTypeVoid:
5505 case spv::OpTypeSampler:
5506 case spv::OpLabel:
5507 case spv::OpExtInstImport:
5508 case spv::OpTypePointer:
5509 case spv::OpTypeRuntimeArray:
5510 case spv::OpTypeStruct:
5511 case spv::OpTypeImage:
5512 case spv::OpTypeSampledImage:
5513 case spv::OpTypeInt:
5514 case spv::OpTypeFloat:
5515 case spv::OpTypeArray:
5516 case spv::OpTypeVector:
5517 case spv::OpTypeFunction: {
5518 WriteWordCountAndOpcode(Inst);
5519 WriteResultID(Inst);
5520 for (uint32_t i = 0; i < Ops.size(); i++) {
5521 WriteOperand(Ops[i]);
5522 }
5523 break;
5524 }
5525 case spv::OpFunction:
5526 case spv::OpFunctionParameter:
5527 case spv::OpAccessChain:
5528 case spv::OpPtrAccessChain:
5529 case spv::OpInBoundsAccessChain:
5530 case spv::OpUConvert:
5531 case spv::OpSConvert:
5532 case spv::OpConvertFToU:
5533 case spv::OpConvertFToS:
5534 case spv::OpConvertUToF:
5535 case spv::OpConvertSToF:
5536 case spv::OpFConvert:
5537 case spv::OpConvertPtrToU:
5538 case spv::OpConvertUToPtr:
5539 case spv::OpBitcast:
alan-bakerc9c55ae2019-12-02 16:01:27 -05005540 case spv::OpFNegate:
David Neto22f144c2017-06-12 14:26:21 -04005541 case spv::OpIAdd:
5542 case spv::OpFAdd:
5543 case spv::OpISub:
5544 case spv::OpFSub:
5545 case spv::OpIMul:
5546 case spv::OpFMul:
5547 case spv::OpUDiv:
5548 case spv::OpSDiv:
5549 case spv::OpFDiv:
5550 case spv::OpUMod:
5551 case spv::OpSRem:
5552 case spv::OpFRem:
Kévin Petit8a560882019-03-21 15:24:34 +00005553 case spv::OpUMulExtended:
5554 case spv::OpSMulExtended:
David Neto22f144c2017-06-12 14:26:21 -04005555 case spv::OpBitwiseOr:
5556 case spv::OpBitwiseXor:
5557 case spv::OpBitwiseAnd:
David Netoa394f392017-08-26 20:45:29 -04005558 case spv::OpNot:
David Neto22f144c2017-06-12 14:26:21 -04005559 case spv::OpShiftLeftLogical:
5560 case spv::OpShiftRightLogical:
5561 case spv::OpShiftRightArithmetic:
5562 case spv::OpBitCount:
David Netoab03f432017-11-03 17:00:44 -04005563 case spv::OpCompositeConstruct:
David Neto22f144c2017-06-12 14:26:21 -04005564 case spv::OpCompositeExtract:
5565 case spv::OpVectorExtractDynamic:
5566 case spv::OpCompositeInsert:
David Neto0a2f98d2017-09-15 19:38:40 -04005567 case spv::OpCopyObject:
David Neto22f144c2017-06-12 14:26:21 -04005568 case spv::OpVectorInsertDynamic:
5569 case spv::OpVectorShuffle:
5570 case spv::OpIEqual:
5571 case spv::OpINotEqual:
5572 case spv::OpUGreaterThan:
5573 case spv::OpUGreaterThanEqual:
5574 case spv::OpULessThan:
5575 case spv::OpULessThanEqual:
5576 case spv::OpSGreaterThan:
5577 case spv::OpSGreaterThanEqual:
5578 case spv::OpSLessThan:
5579 case spv::OpSLessThanEqual:
5580 case spv::OpFOrdEqual:
5581 case spv::OpFOrdGreaterThan:
5582 case spv::OpFOrdGreaterThanEqual:
5583 case spv::OpFOrdLessThan:
5584 case spv::OpFOrdLessThanEqual:
5585 case spv::OpFOrdNotEqual:
5586 case spv::OpFUnordEqual:
5587 case spv::OpFUnordGreaterThan:
5588 case spv::OpFUnordGreaterThanEqual:
5589 case spv::OpFUnordLessThan:
5590 case spv::OpFUnordLessThanEqual:
5591 case spv::OpFUnordNotEqual:
5592 case spv::OpExtInst:
5593 case spv::OpIsInf:
5594 case spv::OpIsNan:
5595 case spv::OpAny:
5596 case spv::OpAll:
5597 case spv::OpUndef:
5598 case spv::OpConstantNull:
5599 case spv::OpLogicalOr:
5600 case spv::OpLogicalAnd:
5601 case spv::OpLogicalNot:
5602 case spv::OpLogicalNotEqual:
5603 case spv::OpConstantComposite:
5604 case spv::OpSpecConstantComposite:
5605 case spv::OpConstantTrue:
5606 case spv::OpConstantFalse:
5607 case spv::OpConstant:
5608 case spv::OpSpecConstant:
5609 case spv::OpVariable:
5610 case spv::OpFunctionCall:
5611 case spv::OpSampledImage:
5612 case spv::OpImageSampleExplicitLod:
David Neto5c22a252018-03-15 16:07:41 -04005613 case spv::OpImageQuerySize:
David Neto22f144c2017-06-12 14:26:21 -04005614 case spv::OpSelect:
5615 case spv::OpPhi:
5616 case spv::OpLoad:
5617 case spv::OpAtomicIAdd:
5618 case spv::OpAtomicISub:
5619 case spv::OpAtomicExchange:
5620 case spv::OpAtomicIIncrement:
5621 case spv::OpAtomicIDecrement:
5622 case spv::OpAtomicCompareExchange:
5623 case spv::OpAtomicUMin:
5624 case spv::OpAtomicSMin:
5625 case spv::OpAtomicUMax:
5626 case spv::OpAtomicSMax:
5627 case spv::OpAtomicAnd:
5628 case spv::OpAtomicOr:
5629 case spv::OpAtomicXor:
5630 case spv::OpDot: {
5631 WriteWordCountAndOpcode(Inst);
5632 WriteOperand(Ops[0]);
5633 WriteResultID(Inst);
5634 for (uint32_t i = 1; i < Ops.size(); i++) {
5635 WriteOperand(Ops[i]);
5636 }
5637 break;
5638 }
5639 }
5640 }
5641}
Alan Baker9bf93fb2018-08-28 16:59:26 -04005642
alan-bakerb6b09dc2018-11-08 16:59:28 -05005643bool SPIRVProducerPass::IsTypeNullable(const Type *type) const {
Alan Baker9bf93fb2018-08-28 16:59:26 -04005644 switch (type->getTypeID()) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05005645 case Type::HalfTyID:
5646 case Type::FloatTyID:
5647 case Type::DoubleTyID:
5648 case Type::IntegerTyID:
5649 case Type::VectorTyID:
5650 return true;
5651 case Type::PointerTyID: {
5652 const PointerType *pointer_type = cast<PointerType>(type);
5653 if (pointer_type->getPointerAddressSpace() !=
5654 AddressSpace::UniformConstant) {
5655 auto pointee_type = pointer_type->getPointerElementType();
5656 if (pointee_type->isStructTy() &&
5657 cast<StructType>(pointee_type)->isOpaque()) {
5658 // Images and samplers are not nullable.
5659 return false;
Alan Baker9bf93fb2018-08-28 16:59:26 -04005660 }
Alan Baker9bf93fb2018-08-28 16:59:26 -04005661 }
alan-bakerb6b09dc2018-11-08 16:59:28 -05005662 return true;
5663 }
5664 case Type::ArrayTyID:
5665 return IsTypeNullable(cast<CompositeType>(type)->getTypeAtIndex(0u));
5666 case Type::StructTyID: {
5667 const StructType *struct_type = cast<StructType>(type);
5668 // Images and samplers are not nullable.
5669 if (struct_type->isOpaque())
Alan Baker9bf93fb2018-08-28 16:59:26 -04005670 return false;
alan-bakerb6b09dc2018-11-08 16:59:28 -05005671 for (const auto element : struct_type->elements()) {
5672 if (!IsTypeNullable(element))
5673 return false;
5674 }
5675 return true;
5676 }
5677 default:
5678 return false;
Alan Baker9bf93fb2018-08-28 16:59:26 -04005679 }
5680}
Alan Bakerfcda9482018-10-02 17:09:59 -04005681
5682void SPIRVProducerPass::PopulateUBOTypeMaps(Module &module) {
5683 if (auto *offsets_md =
5684 module.getNamedMetadata(clspv::RemappedTypeOffsetMetadataName())) {
5685 // Metdata is stored as key-value pair operands. The first element of each
5686 // operand is the type and the second is a vector of offsets.
5687 for (const auto *operand : offsets_md->operands()) {
5688 const auto *pair = cast<MDTuple>(operand);
5689 auto *type =
5690 cast<ConstantAsMetadata>(pair->getOperand(0))->getValue()->getType();
5691 const auto *offset_vector = cast<MDTuple>(pair->getOperand(1));
5692 std::vector<uint32_t> offsets;
5693 for (const Metadata *offset_md : offset_vector->operands()) {
5694 const auto *constant_md = cast<ConstantAsMetadata>(offset_md);
alan-bakerb6b09dc2018-11-08 16:59:28 -05005695 offsets.push_back(static_cast<uint32_t>(
5696 cast<ConstantInt>(constant_md->getValue())->getZExtValue()));
Alan Bakerfcda9482018-10-02 17:09:59 -04005697 }
5698 RemappedUBOTypeOffsets.insert(std::make_pair(type, offsets));
5699 }
5700 }
5701
5702 if (auto *sizes_md =
5703 module.getNamedMetadata(clspv::RemappedTypeSizesMetadataName())) {
5704 // Metadata is stored as key-value pair operands. The first element of each
5705 // operand is the type and the second is a triple of sizes: type size in
5706 // bits, store size and alloc size.
5707 for (const auto *operand : sizes_md->operands()) {
5708 const auto *pair = cast<MDTuple>(operand);
5709 auto *type =
5710 cast<ConstantAsMetadata>(pair->getOperand(0))->getValue()->getType();
5711 const auto *size_triple = cast<MDTuple>(pair->getOperand(1));
5712 uint64_t type_size_in_bits =
5713 cast<ConstantInt>(
5714 cast<ConstantAsMetadata>(size_triple->getOperand(0))->getValue())
5715 ->getZExtValue();
5716 uint64_t type_store_size =
5717 cast<ConstantInt>(
5718 cast<ConstantAsMetadata>(size_triple->getOperand(1))->getValue())
5719 ->getZExtValue();
5720 uint64_t type_alloc_size =
5721 cast<ConstantInt>(
5722 cast<ConstantAsMetadata>(size_triple->getOperand(2))->getValue())
5723 ->getZExtValue();
5724 RemappedUBOTypeSizes.insert(std::make_pair(
5725 type, std::make_tuple(type_size_in_bits, type_store_size,
5726 type_alloc_size)));
5727 }
5728 }
5729}
5730
5731uint64_t SPIRVProducerPass::GetTypeSizeInBits(Type *type,
5732 const DataLayout &DL) {
5733 auto iter = RemappedUBOTypeSizes.find(type);
5734 if (iter != RemappedUBOTypeSizes.end()) {
5735 return std::get<0>(iter->second);
5736 }
5737
5738 return DL.getTypeSizeInBits(type);
5739}
5740
5741uint64_t SPIRVProducerPass::GetTypeStoreSize(Type *type, const DataLayout &DL) {
5742 auto iter = RemappedUBOTypeSizes.find(type);
5743 if (iter != RemappedUBOTypeSizes.end()) {
5744 return std::get<1>(iter->second);
5745 }
5746
5747 return DL.getTypeStoreSize(type);
5748}
5749
5750uint64_t SPIRVProducerPass::GetTypeAllocSize(Type *type, const DataLayout &DL) {
5751 auto iter = RemappedUBOTypeSizes.find(type);
5752 if (iter != RemappedUBOTypeSizes.end()) {
5753 return std::get<2>(iter->second);
5754 }
5755
5756 return DL.getTypeAllocSize(type);
5757}
alan-baker5b86ed72019-02-15 08:26:50 -05005758
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04005759void SPIRVProducerPass::setVariablePointersCapabilities(
5760 unsigned address_space) {
alan-baker5b86ed72019-02-15 08:26:50 -05005761 if (GetStorageClass(address_space) == spv::StorageClassStorageBuffer) {
5762 setVariablePointersStorageBuffer(true);
5763 } else {
5764 setVariablePointers(true);
5765 }
5766}
5767
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04005768Value *SPIRVProducerPass::GetBasePointer(Value *v) {
alan-baker5b86ed72019-02-15 08:26:50 -05005769 if (auto *gep = dyn_cast<GetElementPtrInst>(v)) {
5770 return GetBasePointer(gep->getPointerOperand());
5771 }
5772
5773 // Conservatively return |v|.
5774 return v;
5775}
5776
5777bool SPIRVProducerPass::sameResource(Value *lhs, Value *rhs) const {
5778 if (auto *lhs_call = dyn_cast<CallInst>(lhs)) {
5779 if (auto *rhs_call = dyn_cast<CallInst>(rhs)) {
5780 if (lhs_call->getCalledFunction()->getName().startswith(
5781 clspv::ResourceAccessorFunction()) &&
5782 rhs_call->getCalledFunction()->getName().startswith(
5783 clspv::ResourceAccessorFunction())) {
5784 // For resource accessors, match descriptor set and binding.
5785 if (lhs_call->getOperand(0) == rhs_call->getOperand(0) &&
5786 lhs_call->getOperand(1) == rhs_call->getOperand(1))
5787 return true;
5788 } else if (lhs_call->getCalledFunction()->getName().startswith(
5789 clspv::WorkgroupAccessorFunction()) &&
5790 rhs_call->getCalledFunction()->getName().startswith(
5791 clspv::WorkgroupAccessorFunction())) {
5792 // For workgroup resources, match spec id.
5793 if (lhs_call->getOperand(0) == rhs_call->getOperand(0))
5794 return true;
5795 }
5796 }
5797 }
5798
5799 return false;
5800}
5801
5802bool SPIRVProducerPass::selectFromSameObject(Instruction *inst) {
5803 assert(inst->getType()->isPointerTy());
5804 assert(GetStorageClass(inst->getType()->getPointerAddressSpace()) ==
5805 spv::StorageClassStorageBuffer);
5806 const bool hack_undef = clspv::Option::HackUndef();
5807 if (auto *select = dyn_cast<SelectInst>(inst)) {
5808 auto *true_base = GetBasePointer(select->getTrueValue());
5809 auto *false_base = GetBasePointer(select->getFalseValue());
5810
5811 if (true_base == false_base)
5812 return true;
5813
5814 // If either the true or false operand is a null, then we satisfy the same
5815 // object constraint.
5816 if (auto *true_cst = dyn_cast<Constant>(true_base)) {
5817 if (true_cst->isNullValue() || (hack_undef && isa<UndefValue>(true_base)))
5818 return true;
5819 }
5820
5821 if (auto *false_cst = dyn_cast<Constant>(false_base)) {
5822 if (false_cst->isNullValue() ||
5823 (hack_undef && isa<UndefValue>(false_base)))
5824 return true;
5825 }
5826
5827 if (sameResource(true_base, false_base))
5828 return true;
5829 } else if (auto *phi = dyn_cast<PHINode>(inst)) {
5830 Value *value = nullptr;
5831 bool ok = true;
5832 for (unsigned i = 0; ok && i != phi->getNumIncomingValues(); ++i) {
5833 auto *base = GetBasePointer(phi->getIncomingValue(i));
5834 // Null values satisfy the constraint of selecting of selecting from the
5835 // same object.
5836 if (!value) {
5837 if (auto *cst = dyn_cast<Constant>(base)) {
5838 if (!cst->isNullValue() && !(hack_undef && isa<UndefValue>(base)))
5839 value = base;
5840 } else {
5841 value = base;
5842 }
5843 } else if (base != value) {
5844 if (auto *base_cst = dyn_cast<Constant>(base)) {
5845 if (base_cst->isNullValue() || (hack_undef && isa<UndefValue>(base)))
5846 continue;
5847 }
5848
5849 if (sameResource(value, base))
5850 continue;
5851
5852 // Values don't represent the same base.
5853 ok = false;
5854 }
5855 }
5856
5857 return ok;
5858 }
5859
5860 // Conservatively return false.
5861 return false;
5862}
alan-bakere9308012019-03-15 10:25:13 -04005863
5864bool SPIRVProducerPass::CalledWithCoherentResource(Argument &Arg) {
5865 if (!Arg.getType()->isPointerTy() ||
5866 Arg.getType()->getPointerAddressSpace() != clspv::AddressSpace::Global) {
5867 // Only SSBOs need to be annotated as coherent.
5868 return false;
5869 }
5870
5871 DenseSet<Value *> visited;
5872 std::vector<Value *> stack;
5873 for (auto *U : Arg.getParent()->users()) {
5874 if (auto *call = dyn_cast<CallInst>(U)) {
5875 stack.push_back(call->getOperand(Arg.getArgNo()));
5876 }
5877 }
5878
5879 while (!stack.empty()) {
5880 Value *v = stack.back();
5881 stack.pop_back();
5882
5883 if (!visited.insert(v).second)
5884 continue;
5885
5886 auto *resource_call = dyn_cast<CallInst>(v);
5887 if (resource_call &&
5888 resource_call->getCalledFunction()->getName().startswith(
5889 clspv::ResourceAccessorFunction())) {
5890 // If this is a resource accessor function, check if the coherent operand
5891 // is set.
5892 const auto coherent =
5893 unsigned(dyn_cast<ConstantInt>(resource_call->getArgOperand(5))
5894 ->getZExtValue());
5895 if (coherent == 1)
5896 return true;
5897 } else if (auto *arg = dyn_cast<Argument>(v)) {
5898 // If this is a function argument, trace through its callers.
alan-bakere98f3f92019-04-08 15:06:36 -04005899 for (auto U : arg->getParent()->users()) {
alan-bakere9308012019-03-15 10:25:13 -04005900 if (auto *call = dyn_cast<CallInst>(U)) {
5901 stack.push_back(call->getOperand(arg->getArgNo()));
5902 }
5903 }
5904 } else if (auto *user = dyn_cast<User>(v)) {
5905 // If this is a user, traverse all operands that could lead to resource
5906 // variables.
5907 for (unsigned i = 0; i != user->getNumOperands(); ++i) {
5908 Value *operand = user->getOperand(i);
5909 if (operand->getType()->isPointerTy() &&
5910 operand->getType()->getPointerAddressSpace() ==
5911 clspv::AddressSpace::Global) {
5912 stack.push_back(operand);
5913 }
5914 }
5915 }
5916 }
5917
5918 // No coherent resource variables encountered.
5919 return false;
5920}