blob: 9baaa7598f726c078fc97b139be47f4d3f40aea4 [file] [log] [blame]
David Neto22f144c2017-06-12 14:26:21 -04001// Copyright 2017 The Clspv Authors. All rights reserved.
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7// http://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
15#ifdef _MSC_VER
16#pragma warning(push, 0)
17#endif
18
David Neto156783e2017-07-05 15:39:41 -040019#include <cassert>
David Neto257c3892018-04-11 13:19:45 -040020#include <cstring>
David Neto118188e2018-08-24 11:27:54 -040021#include <iomanip>
22#include <list>
David Neto862b7d82018-06-14 18:48:37 -040023#include <memory>
David Neto118188e2018-08-24 11:27:54 -040024#include <set>
25#include <sstream>
26#include <string>
27#include <tuple>
28#include <unordered_set>
29#include <utility>
David Neto862b7d82018-06-14 18:48:37 -040030
David Neto118188e2018-08-24 11:27:54 -040031#include "llvm/ADT/StringSwitch.h"
32#include "llvm/ADT/UniqueVector.h"
33#include "llvm/Analysis/LoopInfo.h"
34#include "llvm/IR/Constants.h"
35#include "llvm/IR/Dominators.h"
36#include "llvm/IR/Instructions.h"
37#include "llvm/IR/Metadata.h"
38#include "llvm/IR/Module.h"
alan-bakerf67468c2019-11-25 15:51:49 -050039#include "llvm/IR/ValueSymbolTable.h"
David Neto118188e2018-08-24 11:27:54 -040040#include "llvm/Pass.h"
41#include "llvm/Support/CommandLine.h"
42#include "llvm/Support/raw_ostream.h"
43#include "llvm/Transforms/Utils/Cloning.h"
David Neto22f144c2017-06-12 14:26:21 -040044
David Neto85082642018-03-24 06:55:20 -070045#include "spirv/1.0/spirv.hpp"
David Neto118188e2018-08-24 11:27:54 -040046
David Neto85082642018-03-24 06:55:20 -070047#include "clspv/AddressSpace.h"
alan-bakerf5e5f692018-11-27 08:33:24 -050048#include "clspv/DescriptorMap.h"
David Neto118188e2018-08-24 11:27:54 -040049#include "clspv/Option.h"
David Neto85082642018-03-24 06:55:20 -070050#include "clspv/spirv_c_strings.hpp"
51#include "clspv/spirv_glsl.hpp"
David Neto22f144c2017-06-12 14:26:21 -040052
David Neto4feb7a42017-10-06 17:29:42 -040053#include "ArgKind.h"
alan-bakerf67468c2019-11-25 15:51:49 -050054#include "Builtins.h"
alan-baker06cad652019-12-03 17:56:47 -050055#include "ComputeStructuredOrder.h"
David Neto85082642018-03-24 06:55:20 -070056#include "ConstantEmitter.h"
Alan Baker202c8c72018-08-13 13:47:44 -040057#include "Constants.h"
David Neto78383442018-06-15 20:31:56 -040058#include "DescriptorCounter.h"
alan-baker56f7aff2019-05-22 08:06:42 -040059#include "NormalizeGlobalVariable.h"
Diego Novilloa4c44fa2019-04-11 10:56:15 -040060#include "Passes.h"
alan-bakerce179f12019-12-06 19:02:22 -050061#include "Types.h"
David Neto48f56a42017-10-06 16:44:25 -040062
David Neto22f144c2017-06-12 14:26:21 -040063#if defined(_MSC_VER)
64#pragma warning(pop)
65#endif
66
67using namespace llvm;
68using namespace clspv;
David Neto156783e2017-07-05 15:39:41 -040069using namespace mdconst;
David Neto22f144c2017-06-12 14:26:21 -040070
71namespace {
David Netocd8ca5f2017-10-02 23:34:11 -040072
David Neto862b7d82018-06-14 18:48:37 -040073cl::opt<bool> ShowResourceVars("show-rv", cl::init(false), cl::Hidden,
74 cl::desc("Show resource variable creation"));
75
76// These hacks exist to help transition code generation algorithms
77// without making huge noise in detailed test output.
78const bool Hack_generate_runtime_array_stride_early = true;
79
David Neto3fbb4072017-10-16 11:28:14 -040080// The value of 1/pi. This value is from MSDN
81// https://msdn.microsoft.com/en-us/library/4hwaceh6.aspx
82const double kOneOverPi = 0.318309886183790671538;
83const glsl::ExtInst kGlslExtInstBad = static_cast<glsl::ExtInst>(0);
84
alan-bakerb6b09dc2018-11-08 16:59:28 -050085const char *kCompositeConstructFunctionPrefix = "clspv.composite_construct.";
David Netoab03f432017-11-03 17:00:44 -040086
David Neto22f144c2017-06-12 14:26:21 -040087enum SPIRVOperandType {
88 NUMBERID,
89 LITERAL_INTEGER,
90 LITERAL_STRING,
91 LITERAL_FLOAT
92};
93
94struct SPIRVOperand {
95 explicit SPIRVOperand(SPIRVOperandType Ty, uint32_t Num)
96 : Type(Ty), LiteralNum(1, Num) {}
97 explicit SPIRVOperand(SPIRVOperandType Ty, const char *Str)
98 : Type(Ty), LiteralStr(Str) {}
99 explicit SPIRVOperand(SPIRVOperandType Ty, StringRef Str)
100 : Type(Ty), LiteralStr(Str) {}
101 explicit SPIRVOperand(SPIRVOperandType Ty, ArrayRef<uint32_t> NumVec)
102 : Type(Ty), LiteralNum(NumVec.begin(), NumVec.end()) {}
103
104 SPIRVOperandType getType() { return Type; };
105 uint32_t getNumID() { return LiteralNum[0]; };
106 std::string getLiteralStr() { return LiteralStr; };
107 ArrayRef<uint32_t> getLiteralNum() { return LiteralNum; };
108
David Neto87846742018-04-11 17:36:22 -0400109 uint32_t GetNumWords() const {
110 switch (Type) {
111 case NUMBERID:
112 return 1;
113 case LITERAL_INTEGER:
114 case LITERAL_FLOAT:
David Netoee2660d2018-06-28 16:31:29 -0400115 return uint32_t(LiteralNum.size());
David Neto87846742018-04-11 17:36:22 -0400116 case LITERAL_STRING:
117 // Account for the terminating null character.
David Netoee2660d2018-06-28 16:31:29 -0400118 return uint32_t((LiteralStr.size() + 4) / 4);
David Neto87846742018-04-11 17:36:22 -0400119 }
120 llvm_unreachable("Unhandled case in SPIRVOperand::GetNumWords()");
121 }
122
David Neto22f144c2017-06-12 14:26:21 -0400123private:
124 SPIRVOperandType Type;
125 std::string LiteralStr;
126 SmallVector<uint32_t, 4> LiteralNum;
127};
128
David Netoc6f3ab22018-04-06 18:02:31 -0400129class SPIRVOperandList {
130public:
131 SPIRVOperandList() {}
alan-bakerb6b09dc2018-11-08 16:59:28 -0500132 SPIRVOperandList(const SPIRVOperandList &other) = delete;
133 SPIRVOperandList(SPIRVOperandList &&other) {
David Netoc6f3ab22018-04-06 18:02:31 -0400134 contents_ = std::move(other.contents_);
135 other.contents_.clear();
136 }
137 SPIRVOperandList(ArrayRef<SPIRVOperand *> init)
138 : contents_(init.begin(), init.end()) {}
139 operator ArrayRef<SPIRVOperand *>() { return contents_; }
140 void push_back(SPIRVOperand *op) { contents_.push_back(op); }
alan-bakerb6b09dc2018-11-08 16:59:28 -0500141 void clear() { contents_.clear(); }
David Netoc6f3ab22018-04-06 18:02:31 -0400142 size_t size() const { return contents_.size(); }
143 SPIRVOperand *&operator[](size_t i) { return contents_[i]; }
144
David Neto87846742018-04-11 17:36:22 -0400145 const SmallVector<SPIRVOperand *, 8> &getOperands() const {
146 return contents_;
147 }
148
David Netoc6f3ab22018-04-06 18:02:31 -0400149private:
alan-bakerb6b09dc2018-11-08 16:59:28 -0500150 SmallVector<SPIRVOperand *, 8> contents_;
David Netoc6f3ab22018-04-06 18:02:31 -0400151};
152
153SPIRVOperandList &operator<<(SPIRVOperandList &list, SPIRVOperand *elem) {
154 list.push_back(elem);
155 return list;
156}
157
alan-bakerb6b09dc2018-11-08 16:59:28 -0500158SPIRVOperand *MkNum(uint32_t num) {
David Netoc6f3ab22018-04-06 18:02:31 -0400159 return new SPIRVOperand(LITERAL_INTEGER, num);
160}
alan-bakerb6b09dc2018-11-08 16:59:28 -0500161SPIRVOperand *MkInteger(ArrayRef<uint32_t> num_vec) {
David Neto257c3892018-04-11 13:19:45 -0400162 return new SPIRVOperand(LITERAL_INTEGER, num_vec);
163}
alan-bakerb6b09dc2018-11-08 16:59:28 -0500164SPIRVOperand *MkFloat(ArrayRef<uint32_t> num_vec) {
David Neto257c3892018-04-11 13:19:45 -0400165 return new SPIRVOperand(LITERAL_FLOAT, num_vec);
166}
alan-bakerb6b09dc2018-11-08 16:59:28 -0500167SPIRVOperand *MkId(uint32_t id) { return new SPIRVOperand(NUMBERID, id); }
168SPIRVOperand *MkString(StringRef str) {
David Neto257c3892018-04-11 13:19:45 -0400169 return new SPIRVOperand(LITERAL_STRING, str);
170}
David Netoc6f3ab22018-04-06 18:02:31 -0400171
David Neto22f144c2017-06-12 14:26:21 -0400172struct SPIRVInstruction {
David Neto87846742018-04-11 17:36:22 -0400173 // Create an instruction with an opcode and no result ID, and with the given
174 // operands. This computes its own word count.
175 explicit SPIRVInstruction(spv::Op Opc, ArrayRef<SPIRVOperand *> Ops)
176 : WordCount(1), Opcode(static_cast<uint16_t>(Opc)), ResultID(0),
177 Operands(Ops.begin(), Ops.end()) {
178 for (auto *operand : Ops) {
David Netoee2660d2018-06-28 16:31:29 -0400179 WordCount += uint16_t(operand->GetNumWords());
David Neto87846742018-04-11 17:36:22 -0400180 }
181 }
182 // Create an instruction with an opcode and a no-zero result ID, and
183 // with the given operands. This computes its own word count.
184 explicit SPIRVInstruction(spv::Op Opc, uint32_t ResID,
David Neto22f144c2017-06-12 14:26:21 -0400185 ArrayRef<SPIRVOperand *> Ops)
David Neto87846742018-04-11 17:36:22 -0400186 : WordCount(2), Opcode(static_cast<uint16_t>(Opc)), ResultID(ResID),
187 Operands(Ops.begin(), Ops.end()) {
188 if (ResID == 0) {
189 llvm_unreachable("Result ID of 0 was provided");
190 }
191 for (auto *operand : Ops) {
192 WordCount += operand->GetNumWords();
193 }
194 }
David Neto22f144c2017-06-12 14:26:21 -0400195
David Netoee2660d2018-06-28 16:31:29 -0400196 uint32_t getWordCount() const { return WordCount; }
David Neto22f144c2017-06-12 14:26:21 -0400197 uint16_t getOpcode() const { return Opcode; }
198 uint32_t getResultID() const { return ResultID; }
199 ArrayRef<SPIRVOperand *> getOperands() const { return Operands; }
200
201private:
David Netoee2660d2018-06-28 16:31:29 -0400202 uint32_t WordCount; // Check the 16-bit bound at code generation time.
David Neto22f144c2017-06-12 14:26:21 -0400203 uint16_t Opcode;
204 uint32_t ResultID;
205 SmallVector<SPIRVOperand *, 4> Operands;
206};
207
208struct SPIRVProducerPass final : public ModulePass {
David Neto22f144c2017-06-12 14:26:21 -0400209 typedef DenseMap<Type *, uint32_t> TypeMapType;
210 typedef UniqueVector<Type *> TypeList;
211 typedef DenseMap<Value *, uint32_t> ValueMapType;
David Netofb9a7972017-08-25 17:08:24 -0400212 typedef UniqueVector<Value *> ValueList;
David Neto22f144c2017-06-12 14:26:21 -0400213 typedef std::vector<std::pair<Value *, uint32_t>> EntryPointVecType;
214 typedef std::list<SPIRVInstruction *> SPIRVInstructionList;
David Neto87846742018-04-11 17:36:22 -0400215 // A vector of tuples, each of which is:
216 // - the LLVM instruction that we will later generate SPIR-V code for
217 // - where the SPIR-V instruction should be inserted
218 // - the result ID of the SPIR-V instruction
David Neto22f144c2017-06-12 14:26:21 -0400219 typedef std::vector<
220 std::tuple<Value *, SPIRVInstructionList::iterator, uint32_t>>
221 DeferredInstVecType;
222 typedef DenseMap<FunctionType *, std::pair<FunctionType *, uint32_t>>
223 GlobalConstFuncMapType;
224
David Neto44795152017-07-13 15:45:28 -0400225 explicit SPIRVProducerPass(
alan-bakerf5e5f692018-11-27 08:33:24 -0500226 raw_pwrite_stream &out,
227 std::vector<clspv::version0::DescriptorMapEntry> *descriptor_map_entries,
alan-baker00e7a582019-06-07 12:54:21 -0400228 ArrayRef<std::pair<unsigned, std::string>> samplerMap,
David Neto44795152017-07-13 15:45:28 -0400229 bool outputCInitList)
David Netoc2c368d2017-06-30 16:50:17 -0400230 : ModulePass(ID), samplerMap(samplerMap), out(out),
David Neto0676e6f2017-07-11 18:47:44 -0400231 binaryTempOut(binaryTempUnderlyingVector), binaryOut(&out),
alan-baker00e7a582019-06-07 12:54:21 -0400232 descriptorMapEntries(descriptor_map_entries),
David Neto0676e6f2017-07-11 18:47:44 -0400233 outputCInitList(outputCInitList), patchBoundOffset(0), nextID(1),
alan-baker5b86ed72019-02-15 08:26:50 -0500234 OpExtInstImportID(0), HasVariablePointersStorageBuffer(false),
235 HasVariablePointers(false), SamplerTy(nullptr), WorkgroupSizeValueID(0),
Kévin Petit89a525c2019-06-15 08:13:07 +0100236 WorkgroupSizeVarID(0), max_local_spec_id_(0) {}
David Neto22f144c2017-06-12 14:26:21 -0400237
238 void getAnalysisUsage(AnalysisUsage &AU) const override {
239 AU.addRequired<DominatorTreeWrapperPass>();
240 AU.addRequired<LoopInfoWrapperPass>();
241 }
242
243 virtual bool runOnModule(Module &module) override;
244
245 // output the SPIR-V header block
246 void outputHeader();
247
248 // patch the SPIR-V header block
249 void patchHeader();
250
251 uint32_t lookupType(Type *Ty) {
252 if (Ty->isPointerTy() &&
253 (Ty->getPointerAddressSpace() != AddressSpace::UniformConstant)) {
254 auto PointeeTy = Ty->getPointerElementType();
255 if (PointeeTy->isStructTy() &&
256 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
257 Ty = PointeeTy;
258 }
259 }
260
David Neto862b7d82018-06-14 18:48:37 -0400261 auto where = TypeMap.find(Ty);
262 if (where == TypeMap.end()) {
263 if (Ty) {
264 errs() << "Unhandled type " << *Ty << "\n";
265 } else {
266 errs() << "Unhandled type (null)\n";
267 }
David Netoe439d702018-03-23 13:14:08 -0700268 llvm_unreachable("\nUnhandled type!");
David Neto22f144c2017-06-12 14:26:21 -0400269 }
270
David Neto862b7d82018-06-14 18:48:37 -0400271 return where->second;
David Neto22f144c2017-06-12 14:26:21 -0400272 }
273 TypeMapType &getImageTypeMap() { return ImageTypeMap; }
alan-bakerabd82722019-12-03 17:14:51 -0500274 TypeList &getImageTypeList() { return ImageTypeList; }
David Neto22f144c2017-06-12 14:26:21 -0400275 TypeList &getTypeList() { return Types; };
276 ValueList &getConstantList() { return Constants; };
277 ValueMapType &getValueMap() { return ValueMap; }
278 ValueMapType &getAllocatedValueMap() { return AllocatedValueMap; }
279 SPIRVInstructionList &getSPIRVInstList() { return SPIRVInsts; };
David Neto22f144c2017-06-12 14:26:21 -0400280 EntryPointVecType &getEntryPointVec() { return EntryPointVec; };
281 DeferredInstVecType &getDeferredInstVec() { return DeferredInstVec; };
282 ValueList &getEntryPointInterfacesVec() { return EntryPointInterfacesVec; };
283 uint32_t &getOpExtInstImportID() { return OpExtInstImportID; };
284 std::vector<uint32_t> &getBuiltinDimVec() { return BuiltinDimensionVec; };
alan-baker5b86ed72019-02-15 08:26:50 -0500285 bool hasVariablePointersStorageBuffer() {
286 return HasVariablePointersStorageBuffer;
287 }
288 void setVariablePointersStorageBuffer(bool Val) {
289 HasVariablePointersStorageBuffer = Val;
290 }
Diego Novillo3cc8d7a2019-04-10 13:30:34 -0400291 bool hasVariablePointers() { return HasVariablePointers; };
David Neto22f144c2017-06-12 14:26:21 -0400292 void setVariablePointers(bool Val) { HasVariablePointers = Val; };
alan-bakerb6b09dc2018-11-08 16:59:28 -0500293 ArrayRef<std::pair<unsigned, std::string>> &getSamplerMap() {
294 return samplerMap;
295 }
David Neto22f144c2017-06-12 14:26:21 -0400296 GlobalConstFuncMapType &getGlobalConstFuncTypeMap() {
297 return GlobalConstFuncTypeMap;
298 }
299 SmallPtrSet<Value *, 16> &getGlobalConstArgSet() {
300 return GlobalConstArgumentSet;
301 }
alan-bakerb6b09dc2018-11-08 16:59:28 -0500302 TypeList &getTypesNeedingArrayStride() { return TypesNeedingArrayStride; }
David Neto22f144c2017-06-12 14:26:21 -0400303
David Netoc6f3ab22018-04-06 18:02:31 -0400304 void GenerateLLVMIRInfo(Module &M, const DataLayout &DL);
alan-bakerb6b09dc2018-11-08 16:59:28 -0500305 // Populate GlobalConstFuncTypeMap. Also, if module-scope __constant will
306 // *not* be converted to a storage buffer, replace each such global variable
307 // with one in the storage class expecgted by SPIR-V.
David Neto862b7d82018-06-14 18:48:37 -0400308 void FindGlobalConstVars(Module &M, const DataLayout &DL);
309 // Populate ResourceVarInfoList, FunctionToResourceVarsMap, and
310 // ModuleOrderedResourceVars.
311 void FindResourceVars(Module &M, const DataLayout &DL);
Alan Baker202c8c72018-08-13 13:47:44 -0400312 void FindWorkgroupVars(Module &M);
David Neto22f144c2017-06-12 14:26:21 -0400313 bool FindExtInst(Module &M);
314 void FindTypePerGlobalVar(GlobalVariable &GV);
315 void FindTypePerFunc(Function &F);
David Neto862b7d82018-06-14 18:48:37 -0400316 void FindTypesForSamplerMap(Module &M);
317 void FindTypesForResourceVars(Module &M);
alan-bakerb6b09dc2018-11-08 16:59:28 -0500318 // Inserts |Ty| and relevant sub-types into the |Types| member, indicating
319 // that |Ty| and its subtypes will need a corresponding SPIR-V type.
David Neto22f144c2017-06-12 14:26:21 -0400320 void FindType(Type *Ty);
321 void FindConstantPerGlobalVar(GlobalVariable &GV);
322 void FindConstantPerFunc(Function &F);
323 void FindConstant(Value *V);
324 void GenerateExtInstImport();
David Neto19a1bad2017-08-25 15:01:41 -0400325 // Generates instructions for SPIR-V types corresponding to the LLVM types
326 // saved in the |Types| member. A type follows its subtypes. IDs are
327 // allocated sequentially starting with the current value of nextID, and
328 // with a type following its subtypes. Also updates nextID to just beyond
329 // the last generated ID.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500330 void GenerateSPIRVTypes(LLVMContext &context, Module &module);
David Neto22f144c2017-06-12 14:26:21 -0400331 void GenerateSPIRVConstants();
David Neto5c22a252018-03-15 16:07:41 -0400332 void GenerateModuleInfo(Module &M);
David Neto22f144c2017-06-12 14:26:21 -0400333 void GenerateGlobalVar(GlobalVariable &GV);
David Netoc6f3ab22018-04-06 18:02:31 -0400334 void GenerateWorkgroupVars();
David Neto862b7d82018-06-14 18:48:37 -0400335 // Generate descriptor map entries for resource variables associated with
336 // arguments to F.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500337 void GenerateDescriptorMapInfo(const DataLayout &DL, Function &F);
David Neto22f144c2017-06-12 14:26:21 -0400338 void GenerateSamplers(Module &M);
David Neto862b7d82018-06-14 18:48:37 -0400339 // Generate OpVariables for %clspv.resource.var.* calls.
340 void GenerateResourceVars(Module &M);
David Neto22f144c2017-06-12 14:26:21 -0400341 void GenerateFuncPrologue(Function &F);
342 void GenerateFuncBody(Function &F);
David Netob6e2e062018-04-25 10:32:06 -0400343 void GenerateEntryPointInitialStores();
David Neto22f144c2017-06-12 14:26:21 -0400344 spv::Op GetSPIRVCmpOpcode(CmpInst *CmpI);
345 spv::Op GetSPIRVCastOpcode(Instruction &I);
346 spv::Op GetSPIRVBinaryOpcode(Instruction &I);
347 void GenerateInstruction(Instruction &I);
348 void GenerateFuncEpilogue();
349 void HandleDeferredInstruction();
alan-bakerb6b09dc2018-11-08 16:59:28 -0500350 void HandleDeferredDecorations(const DataLayout &DL);
David Neto22f144c2017-06-12 14:26:21 -0400351 bool is4xi8vec(Type *Ty) const;
352 spv::StorageClass GetStorageClass(unsigned AddrSpace) const;
David Neto862b7d82018-06-14 18:48:37 -0400353 spv::StorageClass GetStorageClassForArgKind(clspv::ArgKind arg_kind) const;
David Neto22f144c2017-06-12 14:26:21 -0400354 spv::BuiltIn GetBuiltin(StringRef globalVarName) const;
David Neto3fbb4072017-10-16 11:28:14 -0400355 // Returns the GLSL extended instruction enum that the given function
356 // call maps to. If none, then returns the 0 value, i.e. GLSLstd4580Bad.
David Neto22f144c2017-06-12 14:26:21 -0400357 glsl::ExtInst getExtInstEnum(StringRef Name);
David Neto3fbb4072017-10-16 11:28:14 -0400358 // Returns the GLSL extended instruction enum indirectly used by the given
359 // function. That is, to implement the given function, we use an extended
360 // instruction plus one more instruction. If none, then returns the 0 value,
361 // i.e. GLSLstd4580Bad.
362 glsl::ExtInst getIndirectExtInstEnum(StringRef Name);
363 // Returns the single GLSL extended instruction used directly or
364 // indirectly by the given function call.
365 glsl::ExtInst getDirectOrIndirectExtInstEnum(StringRef Name);
David Neto22f144c2017-06-12 14:26:21 -0400366 void WriteOneWord(uint32_t Word);
367 void WriteResultID(SPIRVInstruction *Inst);
368 void WriteWordCountAndOpcode(SPIRVInstruction *Inst);
369 void WriteOperand(SPIRVOperand *Op);
370 void WriteSPIRVBinary();
371
Alan Baker9bf93fb2018-08-28 16:59:26 -0400372 // Returns true if |type| is compatible with OpConstantNull.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500373 bool IsTypeNullable(const Type *type) const;
Alan Baker9bf93fb2018-08-28 16:59:26 -0400374
Alan Bakerfcda9482018-10-02 17:09:59 -0400375 // Populate UBO remapped type maps.
376 void PopulateUBOTypeMaps(Module &module);
377
alan-baker06cad652019-12-03 17:56:47 -0500378 // Populate the merge and continue block maps.
379 void PopulateStructuredCFGMaps(Module &module);
380
Alan Bakerfcda9482018-10-02 17:09:59 -0400381 // Wrapped methods of DataLayout accessors. If |type| was remapped for UBOs,
382 // uses the internal map, otherwise it falls back on the data layout.
383 uint64_t GetTypeSizeInBits(Type *type, const DataLayout &DL);
384 uint64_t GetTypeStoreSize(Type *type, const DataLayout &DL);
385 uint64_t GetTypeAllocSize(Type *type, const DataLayout &DL);
386
alan-baker5b86ed72019-02-15 08:26:50 -0500387 // Returns the base pointer of |v|.
388 Value *GetBasePointer(Value *v);
389
390 // Sets |HasVariablePointersStorageBuffer| or |HasVariablePointers| base on
391 // |address_space|.
392 void setVariablePointersCapabilities(unsigned address_space);
393
394 // Returns true if |lhs| and |rhs| represent the same resource or workgroup
395 // variable.
396 bool sameResource(Value *lhs, Value *rhs) const;
397
398 // Returns true if |inst| is phi or select that selects from the same
399 // structure (or null).
400 bool selectFromSameObject(Instruction *inst);
401
alan-bakere9308012019-03-15 10:25:13 -0400402 // Returns true if |Arg| is called with a coherent resource.
403 bool CalledWithCoherentResource(Argument &Arg);
404
David Neto22f144c2017-06-12 14:26:21 -0400405private:
406 static char ID;
David Neto44795152017-07-13 15:45:28 -0400407 ArrayRef<std::pair<unsigned, std::string>> samplerMap;
David Neto22f144c2017-06-12 14:26:21 -0400408 raw_pwrite_stream &out;
David Neto0676e6f2017-07-11 18:47:44 -0400409
410 // TODO(dneto): Wouldn't it be better to always just emit a binary, and then
411 // convert to other formats on demand?
412
413 // When emitting a C initialization list, the WriteSPIRVBinary method
414 // will actually write its words to this vector via binaryTempOut.
415 SmallVector<char, 100> binaryTempUnderlyingVector;
416 raw_svector_ostream binaryTempOut;
417
418 // Binary output writes to this stream, which might be |out| or
419 // |binaryTempOut|. It's the latter when we really want to write a C
420 // initializer list.
Diego Novillo3cc8d7a2019-04-10 13:30:34 -0400421 raw_pwrite_stream *binaryOut;
alan-bakerf5e5f692018-11-27 08:33:24 -0500422 std::vector<version0::DescriptorMapEntry> *descriptorMapEntries;
David Neto0676e6f2017-07-11 18:47:44 -0400423 const bool outputCInitList; // If true, output look like {0x7023, ... , 5}
David Neto22f144c2017-06-12 14:26:21 -0400424 uint64_t patchBoundOffset;
425 uint32_t nextID;
426
alan-bakerf67468c2019-11-25 15:51:49 -0500427 // ID for OpTypeInt 32 1.
428 uint32_t int32ID = 0;
429 // ID for OpTypeVector %int 4.
430 uint32_t v4int32ID = 0;
431
David Neto19a1bad2017-08-25 15:01:41 -0400432 // Maps an LLVM Value pointer to the corresponding SPIR-V Id.
David Neto22f144c2017-06-12 14:26:21 -0400433 TypeMapType TypeMap;
David Neto19a1bad2017-08-25 15:01:41 -0400434 // Maps an LLVM image type to its SPIR-V ID.
David Neto22f144c2017-06-12 14:26:21 -0400435 TypeMapType ImageTypeMap;
alan-bakerabd82722019-12-03 17:14:51 -0500436 // A unique-vector of LLVM image types. This list is used to provide
437 // deterministic traversal of image types.
438 TypeList ImageTypeList;
David Neto19a1bad2017-08-25 15:01:41 -0400439 // A unique-vector of LLVM types that map to a SPIR-V type.
David Neto22f144c2017-06-12 14:26:21 -0400440 TypeList Types;
441 ValueList Constants;
David Neto19a1bad2017-08-25 15:01:41 -0400442 // Maps an LLVM Value pointer to the corresponding SPIR-V Id.
David Neto22f144c2017-06-12 14:26:21 -0400443 ValueMapType ValueMap;
444 ValueMapType AllocatedValueMap;
445 SPIRVInstructionList SPIRVInsts;
David Neto862b7d82018-06-14 18:48:37 -0400446
David Neto22f144c2017-06-12 14:26:21 -0400447 EntryPointVecType EntryPointVec;
448 DeferredInstVecType DeferredInstVec;
449 ValueList EntryPointInterfacesVec;
450 uint32_t OpExtInstImportID;
451 std::vector<uint32_t> BuiltinDimensionVec;
alan-baker5b86ed72019-02-15 08:26:50 -0500452 bool HasVariablePointersStorageBuffer;
David Neto22f144c2017-06-12 14:26:21 -0400453 bool HasVariablePointers;
454 Type *SamplerTy;
alan-bakerb6b09dc2018-11-08 16:59:28 -0500455 DenseMap<unsigned, uint32_t> SamplerMapIndexToIDMap;
David Netoc77d9e22018-03-24 06:30:28 -0700456
457 // If a function F has a pointer-to-__constant parameter, then this variable
David Neto9ed8e2f2018-03-24 06:47:24 -0700458 // will map F's type to (G, index of the parameter), where in a first phase
459 // G is F's type. During FindTypePerFunc, G will be changed to F's type
460 // but replacing the pointer-to-constant parameter with
461 // pointer-to-ModuleScopePrivate.
David Netoc77d9e22018-03-24 06:30:28 -0700462 // TODO(dneto): This doesn't seem general enough? A function might have
463 // more than one such parameter.
David Neto22f144c2017-06-12 14:26:21 -0400464 GlobalConstFuncMapType GlobalConstFuncTypeMap;
465 SmallPtrSet<Value *, 16> GlobalConstArgumentSet;
David Neto1a1a0582017-07-07 12:01:44 -0400466 // An ordered set of pointer types of Base arguments to OpPtrAccessChain,
David Neto85082642018-03-24 06:55:20 -0700467 // or array types, and which point into transparent memory (StorageBuffer
468 // storage class). These will require an ArrayStride decoration.
David Neto1a1a0582017-07-07 12:01:44 -0400469 // See SPV_KHR_variable_pointers rev 13.
David Neto85082642018-03-24 06:55:20 -0700470 TypeList TypesNeedingArrayStride;
David Netoa60b00b2017-09-15 16:34:09 -0400471
472 // This is truly ugly, but works around what look like driver bugs.
473 // For get_local_size, an earlier part of the flow has created a module-scope
474 // variable in Private address space to hold the value for the workgroup
475 // size. Its intializer is a uint3 value marked as builtin WorkgroupSize.
476 // When this is present, save the IDs of the initializer value and variable
477 // in these two variables. We only ever do a vector load from it, and
478 // when we see one of those, substitute just the value of the intializer.
479 // This mimics what Glslang does, and that's what drivers are used to.
David Neto66cfe642018-03-24 06:13:56 -0700480 // TODO(dneto): Remove this once drivers are fixed.
David Netoa60b00b2017-09-15 16:34:09 -0400481 uint32_t WorkgroupSizeValueID;
482 uint32_t WorkgroupSizeVarID;
David Neto26aaf622017-10-23 18:11:53 -0400483
David Neto862b7d82018-06-14 18:48:37 -0400484 // Bookkeeping for mapping kernel arguments to resource variables.
485 struct ResourceVarInfo {
486 ResourceVarInfo(int index_arg, unsigned set_arg, unsigned binding_arg,
alan-bakere9308012019-03-15 10:25:13 -0400487 Function *fn, clspv::ArgKind arg_kind_arg, int coherent_arg)
David Neto862b7d82018-06-14 18:48:37 -0400488 : index(index_arg), descriptor_set(set_arg), binding(binding_arg),
alan-bakere9308012019-03-15 10:25:13 -0400489 var_fn(fn), arg_kind(arg_kind_arg), coherent(coherent_arg),
David Neto862b7d82018-06-14 18:48:37 -0400490 addr_space(fn->getReturnType()->getPointerAddressSpace()) {}
491 const int index; // Index into ResourceVarInfoList
492 const unsigned descriptor_set;
493 const unsigned binding;
494 Function *const var_fn; // The @clspv.resource.var.* function.
495 const clspv::ArgKind arg_kind;
alan-bakere9308012019-03-15 10:25:13 -0400496 const int coherent;
David Neto862b7d82018-06-14 18:48:37 -0400497 const unsigned addr_space; // The LLVM address space
498 // The SPIR-V ID of the OpVariable. Not populated at construction time.
499 uint32_t var_id = 0;
500 };
501 // A list of resource var info. Each one correponds to a module-scope
502 // resource variable we will have to create. Resource var indices are
503 // indices into this vector.
504 SmallVector<std::unique_ptr<ResourceVarInfo>, 8> ResourceVarInfoList;
505 // This is a vector of pointers of all the resource vars, but ordered by
506 // kernel function, and then by argument.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500507 UniqueVector<ResourceVarInfo *> ModuleOrderedResourceVars;
David Neto862b7d82018-06-14 18:48:37 -0400508 // Map a function to the ordered list of resource variables it uses, one for
509 // each argument. If an argument does not use a resource variable, it
510 // will have a null pointer entry.
511 using FunctionToResourceVarsMapType =
512 DenseMap<Function *, SmallVector<ResourceVarInfo *, 8>>;
513 FunctionToResourceVarsMapType FunctionToResourceVarsMap;
514
515 // What LLVM types map to SPIR-V types needing layout? These are the
516 // arrays and structures supporting storage buffers and uniform buffers.
517 TypeList TypesNeedingLayout;
518 // What LLVM struct types map to a SPIR-V struct type with Block decoration?
519 UniqueVector<StructType *> StructTypesNeedingBlock;
520 // For a call that represents a load from an opaque type (samplers, images),
521 // map it to the variable id it should load from.
522 DenseMap<CallInst *, uint32_t> ResourceVarDeferredLoadCalls;
David Neto85082642018-03-24 06:55:20 -0700523
Alan Baker202c8c72018-08-13 13:47:44 -0400524 // One larger than the maximum used SpecId for pointer-to-local arguments.
525 int max_local_spec_id_;
David Netoc6f3ab22018-04-06 18:02:31 -0400526 // An ordered list of the kernel arguments of type pointer-to-local.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500527 using LocalArgList = SmallVector<Argument *, 8>;
David Netoc6f3ab22018-04-06 18:02:31 -0400528 LocalArgList LocalArgs;
529 // Information about a pointer-to-local argument.
530 struct LocalArgInfo {
531 // The SPIR-V ID of the array variable.
532 uint32_t variable_id;
533 // The element type of the
alan-bakerb6b09dc2018-11-08 16:59:28 -0500534 Type *elem_type;
David Netoc6f3ab22018-04-06 18:02:31 -0400535 // The ID of the array type.
536 uint32_t array_size_id;
537 // The ID of the array type.
538 uint32_t array_type_id;
539 // The ID of the pointer to the array type.
540 uint32_t ptr_array_type_id;
David Netoc6f3ab22018-04-06 18:02:31 -0400541 // The specialization constant ID of the array size.
542 int spec_id;
543 };
Alan Baker202c8c72018-08-13 13:47:44 -0400544 // A mapping from Argument to its assigned SpecId.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500545 DenseMap<const Argument *, int> LocalArgSpecIds;
Alan Baker202c8c72018-08-13 13:47:44 -0400546 // A mapping from SpecId to its LocalArgInfo.
547 DenseMap<int, LocalArgInfo> LocalSpecIdInfoMap;
Alan Bakerfcda9482018-10-02 17:09:59 -0400548 // A mapping from a remapped type to its real offsets.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500549 DenseMap<Type *, std::vector<uint32_t>> RemappedUBOTypeOffsets;
Alan Bakerfcda9482018-10-02 17:09:59 -0400550 // A mapping from a remapped type to its real sizes.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500551 DenseMap<Type *, std::tuple<uint64_t, uint64_t, uint64_t>>
552 RemappedUBOTypeSizes;
alan-baker06cad652019-12-03 17:56:47 -0500553
554 // Maps basic block to its merge block.
555 DenseMap<BasicBlock *, BasicBlock *> MergeBlocks;
556 // Maps basic block to its continue block.
557 DenseMap<BasicBlock *, BasicBlock *> ContinueBlocks;
David Neto22f144c2017-06-12 14:26:21 -0400558};
559
560char SPIRVProducerPass::ID;
David Netoc6f3ab22018-04-06 18:02:31 -0400561
alan-bakerb6b09dc2018-11-08 16:59:28 -0500562} // namespace
David Neto22f144c2017-06-12 14:26:21 -0400563
564namespace clspv {
alan-bakerf5e5f692018-11-27 08:33:24 -0500565ModulePass *createSPIRVProducerPass(
566 raw_pwrite_stream &out,
567 std::vector<version0::DescriptorMapEntry> *descriptor_map_entries,
alan-baker00e7a582019-06-07 12:54:21 -0400568 ArrayRef<std::pair<unsigned, std::string>> samplerMap,
alan-bakerf5e5f692018-11-27 08:33:24 -0500569 bool outputCInitList) {
570 return new SPIRVProducerPass(out, descriptor_map_entries, samplerMap,
alan-baker00e7a582019-06-07 12:54:21 -0400571 outputCInitList);
David Neto22f144c2017-06-12 14:26:21 -0400572}
David Netoc2c368d2017-06-30 16:50:17 -0400573} // namespace clspv
David Neto22f144c2017-06-12 14:26:21 -0400574
575bool SPIRVProducerPass::runOnModule(Module &module) {
David Neto0676e6f2017-07-11 18:47:44 -0400576 binaryOut = outputCInitList ? &binaryTempOut : &out;
577
Alan Bakerfcda9482018-10-02 17:09:59 -0400578 PopulateUBOTypeMaps(module);
alan-baker06cad652019-12-03 17:56:47 -0500579 PopulateStructuredCFGMaps(module);
Alan Bakerfcda9482018-10-02 17:09:59 -0400580
David Neto22f144c2017-06-12 14:26:21 -0400581 // SPIR-V always begins with its header information
582 outputHeader();
583
David Netoc6f3ab22018-04-06 18:02:31 -0400584 const DataLayout &DL = module.getDataLayout();
585
David Neto22f144c2017-06-12 14:26:21 -0400586 // Gather information from the LLVM IR that we require.
David Netoc6f3ab22018-04-06 18:02:31 -0400587 GenerateLLVMIRInfo(module, DL);
David Neto22f144c2017-06-12 14:26:21 -0400588
David Neto22f144c2017-06-12 14:26:21 -0400589 // Collect information on global variables too.
590 for (GlobalVariable &GV : module.globals()) {
591 // If the GV is one of our special __spirv_* variables, remove the
592 // initializer as it was only placed there to force LLVM to not throw the
593 // value away.
594 if (GV.getName().startswith("__spirv_")) {
595 GV.setInitializer(nullptr);
596 }
597
598 // Collect types' information from global variable.
599 FindTypePerGlobalVar(GV);
600
601 // Collect constant information from global variable.
602 FindConstantPerGlobalVar(GV);
603
604 // If the variable is an input, entry points need to know about it.
605 if (AddressSpace::Input == GV.getType()->getPointerAddressSpace()) {
David Netofb9a7972017-08-25 17:08:24 -0400606 getEntryPointInterfacesVec().insert(&GV);
David Neto22f144c2017-06-12 14:26:21 -0400607 }
608 }
609
610 // If there are extended instructions, generate OpExtInstImport.
611 if (FindExtInst(module)) {
612 GenerateExtInstImport();
613 }
614
615 // Generate SPIRV instructions for types.
Alan Bakerfcda9482018-10-02 17:09:59 -0400616 GenerateSPIRVTypes(module.getContext(), module);
David Neto22f144c2017-06-12 14:26:21 -0400617
618 // Generate SPIRV constants.
619 GenerateSPIRVConstants();
620
621 // If we have a sampler map, we might have literal samplers to generate.
622 if (0 < getSamplerMap().size()) {
623 GenerateSamplers(module);
624 }
625
626 // Generate SPIRV variables.
627 for (GlobalVariable &GV : module.globals()) {
628 GenerateGlobalVar(GV);
629 }
David Neto862b7d82018-06-14 18:48:37 -0400630 GenerateResourceVars(module);
David Netoc6f3ab22018-04-06 18:02:31 -0400631 GenerateWorkgroupVars();
David Neto22f144c2017-06-12 14:26:21 -0400632
633 // Generate SPIRV instructions for each function.
634 for (Function &F : module) {
635 if (F.isDeclaration()) {
636 continue;
637 }
638
David Neto862b7d82018-06-14 18:48:37 -0400639 GenerateDescriptorMapInfo(DL, F);
640
David Neto22f144c2017-06-12 14:26:21 -0400641 // Generate Function Prologue.
642 GenerateFuncPrologue(F);
643
644 // Generate SPIRV instructions for function body.
645 GenerateFuncBody(F);
646
647 // Generate Function Epilogue.
648 GenerateFuncEpilogue();
649 }
650
651 HandleDeferredInstruction();
David Neto1a1a0582017-07-07 12:01:44 -0400652 HandleDeferredDecorations(DL);
David Neto22f144c2017-06-12 14:26:21 -0400653
654 // Generate SPIRV module information.
David Neto5c22a252018-03-15 16:07:41 -0400655 GenerateModuleInfo(module);
David Neto22f144c2017-06-12 14:26:21 -0400656
alan-baker00e7a582019-06-07 12:54:21 -0400657 WriteSPIRVBinary();
David Neto22f144c2017-06-12 14:26:21 -0400658
659 // We need to patch the SPIR-V header to set bound correctly.
660 patchHeader();
David Neto0676e6f2017-07-11 18:47:44 -0400661
662 if (outputCInitList) {
663 bool first = true;
David Neto0676e6f2017-07-11 18:47:44 -0400664 std::ostringstream os;
665
David Neto57fb0b92017-08-04 15:35:09 -0400666 auto emit_word = [&os, &first](uint32_t word) {
David Neto0676e6f2017-07-11 18:47:44 -0400667 if (!first)
David Neto57fb0b92017-08-04 15:35:09 -0400668 os << ",\n";
669 os << word;
David Neto0676e6f2017-07-11 18:47:44 -0400670 first = false;
671 };
672
673 os << "{";
David Neto57fb0b92017-08-04 15:35:09 -0400674 const std::string str(binaryTempOut.str());
675 for (unsigned i = 0; i < str.size(); i += 4) {
676 const uint32_t a = static_cast<unsigned char>(str[i]);
677 const uint32_t b = static_cast<unsigned char>(str[i + 1]);
678 const uint32_t c = static_cast<unsigned char>(str[i + 2]);
679 const uint32_t d = static_cast<unsigned char>(str[i + 3]);
680 emit_word(a | (b << 8) | (c << 16) | (d << 24));
David Neto0676e6f2017-07-11 18:47:44 -0400681 }
682 os << "}\n";
683 out << os.str();
684 }
685
David Neto22f144c2017-06-12 14:26:21 -0400686 return false;
687}
688
689void SPIRVProducerPass::outputHeader() {
alan-baker00e7a582019-06-07 12:54:21 -0400690 binaryOut->write(reinterpret_cast<const char *>(&spv::MagicNumber),
691 sizeof(spv::MagicNumber));
692 binaryOut->write(reinterpret_cast<const char *>(&spv::Version),
693 sizeof(spv::Version));
David Neto22f144c2017-06-12 14:26:21 -0400694
alan-baker0c18ab02019-06-12 10:23:21 -0400695 // use Google's vendor ID
696 const uint32_t vendor = 21 << 16;
alan-baker00e7a582019-06-07 12:54:21 -0400697 binaryOut->write(reinterpret_cast<const char *>(&vendor), sizeof(vendor));
David Neto22f144c2017-06-12 14:26:21 -0400698
alan-baker00e7a582019-06-07 12:54:21 -0400699 // we record where we need to come back to and patch in the bound value
700 patchBoundOffset = binaryOut->tell();
David Neto22f144c2017-06-12 14:26:21 -0400701
alan-baker00e7a582019-06-07 12:54:21 -0400702 // output a bad bound for now
703 binaryOut->write(reinterpret_cast<const char *>(&nextID), sizeof(nextID));
David Neto22f144c2017-06-12 14:26:21 -0400704
alan-baker00e7a582019-06-07 12:54:21 -0400705 // output the schema (reserved for use and must be 0)
706 const uint32_t schema = 0;
707 binaryOut->write(reinterpret_cast<const char *>(&schema), sizeof(schema));
David Neto22f144c2017-06-12 14:26:21 -0400708}
709
710void SPIRVProducerPass::patchHeader() {
alan-baker00e7a582019-06-07 12:54:21 -0400711 // for a binary we just write the value of nextID over bound
712 binaryOut->pwrite(reinterpret_cast<char *>(&nextID), sizeof(nextID),
713 patchBoundOffset);
David Neto22f144c2017-06-12 14:26:21 -0400714}
715
David Netoc6f3ab22018-04-06 18:02:31 -0400716void SPIRVProducerPass::GenerateLLVMIRInfo(Module &M, const DataLayout &DL) {
David Neto22f144c2017-06-12 14:26:21 -0400717 // This function generates LLVM IR for function such as global variable for
718 // argument, constant and pointer type for argument access. These information
719 // is artificial one because we need Vulkan SPIR-V output. This function is
720 // executed ahead of FindType and FindConstant.
David Neto22f144c2017-06-12 14:26:21 -0400721 LLVMContext &Context = M.getContext();
722
David Neto862b7d82018-06-14 18:48:37 -0400723 FindGlobalConstVars(M, DL);
David Neto5c22a252018-03-15 16:07:41 -0400724
David Neto862b7d82018-06-14 18:48:37 -0400725 FindResourceVars(M, DL);
David Neto22f144c2017-06-12 14:26:21 -0400726
727 bool HasWorkGroupBuiltin = false;
728 for (GlobalVariable &GV : M.globals()) {
729 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
730 if (spv::BuiltInWorkgroupSize == BuiltinType) {
731 HasWorkGroupBuiltin = true;
732 }
733 }
734
David Neto862b7d82018-06-14 18:48:37 -0400735 FindTypesForSamplerMap(M);
736 FindTypesForResourceVars(M);
Alan Baker202c8c72018-08-13 13:47:44 -0400737 FindWorkgroupVars(M);
David Neto22f144c2017-06-12 14:26:21 -0400738
739 for (Function &F : M) {
Kévin Petitabef4522019-03-27 13:08:01 +0000740 if (F.isDeclaration()) {
David Neto22f144c2017-06-12 14:26:21 -0400741 continue;
742 }
743
744 for (BasicBlock &BB : F) {
745 for (Instruction &I : BB) {
746 if (I.getOpcode() == Instruction::ZExt ||
747 I.getOpcode() == Instruction::SExt ||
748 I.getOpcode() == Instruction::UIToFP) {
749 // If there is zext with i1 type, it will be changed to OpSelect. The
750 // OpSelect needs constant 0 and 1 so the constants are added here.
751
752 auto OpTy = I.getOperand(0)->getType();
753
Kévin Petit24272b62018-10-18 19:16:12 +0000754 if (OpTy->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -0400755 if (I.getOpcode() == Instruction::ZExt) {
David Neto22f144c2017-06-12 14:26:21 -0400756 FindConstant(Constant::getNullValue(I.getType()));
Kévin Petit7bfb8992019-02-26 13:45:08 +0000757 FindConstant(ConstantInt::get(I.getType(), 1));
David Neto22f144c2017-06-12 14:26:21 -0400758 } else if (I.getOpcode() == Instruction::SExt) {
David Neto22f144c2017-06-12 14:26:21 -0400759 FindConstant(Constant::getNullValue(I.getType()));
Kévin Petit7bfb8992019-02-26 13:45:08 +0000760 FindConstant(ConstantInt::getSigned(I.getType(), -1));
David Neto22f144c2017-06-12 14:26:21 -0400761 } else {
762 FindConstant(ConstantFP::get(Context, APFloat(0.0f)));
763 FindConstant(ConstantFP::get(Context, APFloat(1.0f)));
764 }
765 }
766 } else if (CallInst *Call = dyn_cast<CallInst>(&I)) {
David Neto862b7d82018-06-14 18:48:37 -0400767 StringRef callee_name = Call->getCalledFunction()->getName();
David Neto22f144c2017-06-12 14:26:21 -0400768
769 // Handle image type specially.
alan-bakerf67468c2019-11-25 15:51:49 -0500770 if (clspv::IsSampledImageRead(callee_name)) {
David Neto22f144c2017-06-12 14:26:21 -0400771 TypeMapType &OpImageTypeMap = getImageTypeMap();
772 Type *ImageTy =
773 Call->getArgOperand(0)->getType()->getPointerElementType();
774 OpImageTypeMap[ImageTy] = 0;
alan-bakerabd82722019-12-03 17:14:51 -0500775 getImageTypeList().insert(ImageTy);
David Neto22f144c2017-06-12 14:26:21 -0400776
alan-bakerf67468c2019-11-25 15:51:49 -0500777 // All sampled reads need a floating point 0 for the Lod operand.
David Neto22f144c2017-06-12 14:26:21 -0400778 FindConstant(ConstantFP::get(Context, APFloat(0.0f)));
779 }
David Neto5c22a252018-03-15 16:07:41 -0400780
alan-bakerce179f12019-12-06 19:02:22 -0500781 if (clspv::IsImageQuery(callee_name)) {
782 Type *ImageTy = Call->getOperand(0)->getType();
783 const uint32_t dim = ImageDimensionality(ImageTy);
784 uint32_t components = dim;
785 if (components > 1) {
786 // OpImageQuerySize* return |components| components.
787 FindType(VectorType::get(Type::getInt32Ty(Context), components));
788 if (dim == 3 && IsGetImageDim(callee_name)) {
789 // get_image_dim for 3D images returns an int4.
790 FindType(
791 VectorType::get(Type::getInt32Ty(Context), components + 1));
792 }
793 }
794
795 if (clspv::IsSampledImageType(ImageTy)) {
796 // All sampled image queries need a integer 0 for the Lod
797 // operand.
798 FindConstant(ConstantInt::get(Context, APInt(32, 0)));
799 }
David Neto5c22a252018-03-15 16:07:41 -0400800 }
David Neto22f144c2017-06-12 14:26:21 -0400801 }
802 }
803 }
804
Kévin Petitabef4522019-03-27 13:08:01 +0000805 // More things to do on kernel functions
806 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
807 if (const MDNode *MD =
808 dyn_cast<Function>(&F)->getMetadata("reqd_work_group_size")) {
809 // We generate constants if the WorkgroupSize builtin is being used.
810 if (HasWorkGroupBuiltin) {
811 // Collect constant information for work group size.
812 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(0)));
813 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(1)));
814 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(2)));
David Neto22f144c2017-06-12 14:26:21 -0400815 }
816 }
817 }
818
alan-bakerf67468c2019-11-25 15:51:49 -0500819 // TODO(alan-baker): make this better.
alan-bakerf906d2b2019-12-10 11:26:23 -0500820 if (M.getTypeByName("opencl.image1d_ro_t.float") ||
821 M.getTypeByName("opencl.image1d_ro_t.float.sampled") ||
822 M.getTypeByName("opencl.image1d_wo_t.float") ||
823 M.getTypeByName("opencl.image2d_ro_t.float") ||
alan-bakerf67468c2019-11-25 15:51:49 -0500824 M.getTypeByName("opencl.image2d_ro_t.float.sampled") ||
825 M.getTypeByName("opencl.image2d_wo_t.float") ||
826 M.getTypeByName("opencl.image3d_ro_t.float") ||
827 M.getTypeByName("opencl.image3d_ro_t.float.sampled") ||
828 M.getTypeByName("opencl.image3d_wo_t.float")) {
829 FindType(Type::getFloatTy(Context));
alan-bakerf906d2b2019-12-10 11:26:23 -0500830 } else if (M.getTypeByName("opencl.image1d_ro_t.uint") ||
831 M.getTypeByName("opencl.image1d_ro_t.uint.sampled") ||
832 M.getTypeByName("opencl.image1d_wo_t.uint") ||
833 M.getTypeByName("opencl.image2d_ro_t.uint") ||
alan-bakerf67468c2019-11-25 15:51:49 -0500834 M.getTypeByName("opencl.image2d_ro_t.uint.sampled") ||
835 M.getTypeByName("opencl.image2d_wo_t.uint") ||
836 M.getTypeByName("opencl.image3d_ro_t.uint") ||
837 M.getTypeByName("opencl.image3d_ro_t.uint.sampled") ||
838 M.getTypeByName("opencl.image3d_wo_t.uint")) {
839 FindType(Type::getInt32Ty(Context));
alan-bakerf906d2b2019-12-10 11:26:23 -0500840 } else if (M.getTypeByName("opencl.image1d_ro_t.int") ||
841 M.getTypeByName("opencl.image1d_ro_t.int.sampled") ||
842 M.getTypeByName("opencl.image1d_wo_t.int") ||
843 M.getTypeByName("opencl.image2d_ro_t.int") ||
alan-bakerf67468c2019-11-25 15:51:49 -0500844 M.getTypeByName("opencl.image2d_ro_t.int.sampled") ||
845 M.getTypeByName("opencl.image2d_wo_t.int") ||
846 M.getTypeByName("opencl.image3d_ro_t.int") ||
847 M.getTypeByName("opencl.image3d_ro_t.int.sampled") ||
848 M.getTypeByName("opencl.image3d_wo_t.int")) {
849 // Nothing for now...
850 } else {
851 // This was likely an UndefValue.
David Neto22f144c2017-06-12 14:26:21 -0400852 FindType(Type::getFloatTy(Context));
853 }
854
855 // Collect types' information from function.
856 FindTypePerFunc(F);
857
858 // Collect constant information from function.
859 FindConstantPerFunc(F);
860 }
861}
862
David Neto862b7d82018-06-14 18:48:37 -0400863void SPIRVProducerPass::FindGlobalConstVars(Module &M, const DataLayout &DL) {
alan-baker56f7aff2019-05-22 08:06:42 -0400864 clspv::NormalizeGlobalVariables(M);
865
David Neto862b7d82018-06-14 18:48:37 -0400866 SmallVector<GlobalVariable *, 8> GVList;
867 SmallVector<GlobalVariable *, 8> DeadGVList;
868 for (GlobalVariable &GV : M.globals()) {
869 if (GV.getType()->getAddressSpace() == AddressSpace::Constant) {
870 if (GV.use_empty()) {
871 DeadGVList.push_back(&GV);
872 } else {
873 GVList.push_back(&GV);
874 }
875 }
876 }
877
878 // Remove dead global __constant variables.
879 for (auto GV : DeadGVList) {
880 GV->eraseFromParent();
881 }
882 DeadGVList.clear();
883
884 if (clspv::Option::ModuleConstantsInStorageBuffer()) {
885 // For now, we only support a single storage buffer.
886 if (GVList.size() > 0) {
887 assert(GVList.size() == 1);
888 const auto *GV = GVList[0];
889 const auto constants_byte_size =
Alan Bakerfcda9482018-10-02 17:09:59 -0400890 (GetTypeSizeInBits(GV->getInitializer()->getType(), DL)) / 8;
David Neto862b7d82018-06-14 18:48:37 -0400891 const size_t kConstantMaxSize = 65536;
892 if (constants_byte_size > kConstantMaxSize) {
893 outs() << "Max __constant capacity of " << kConstantMaxSize
894 << " bytes exceeded: " << constants_byte_size << " bytes used\n";
895 llvm_unreachable("Max __constant capacity exceeded");
896 }
897 }
898 } else {
899 // Change global constant variable's address space to ModuleScopePrivate.
900 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
901 for (auto GV : GVList) {
902 // Create new gv with ModuleScopePrivate address space.
903 Type *NewGVTy = GV->getType()->getPointerElementType();
904 GlobalVariable *NewGV = new GlobalVariable(
905 M, NewGVTy, false, GV->getLinkage(), GV->getInitializer(), "",
906 nullptr, GV->getThreadLocalMode(), AddressSpace::ModuleScopePrivate);
907 NewGV->takeName(GV);
908
909 const SmallVector<User *, 8> GVUsers(GV->user_begin(), GV->user_end());
910 SmallVector<User *, 8> CandidateUsers;
911
912 auto record_called_function_type_as_user =
913 [&GlobalConstFuncTyMap](Value *gv, CallInst *call) {
914 // Find argument index.
915 unsigned index = 0;
916 for (unsigned i = 0; i < call->getNumArgOperands(); i++) {
917 if (gv == call->getOperand(i)) {
918 // TODO(dneto): Should we break here?
919 index = i;
920 }
921 }
922
923 // Record function type with global constant.
924 GlobalConstFuncTyMap[call->getFunctionType()] =
925 std::make_pair(call->getFunctionType(), index);
926 };
927
928 for (User *GVU : GVUsers) {
929 if (CallInst *Call = dyn_cast<CallInst>(GVU)) {
930 record_called_function_type_as_user(GV, Call);
931 } else if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(GVU)) {
932 // Check GEP users.
933 for (User *GEPU : GEP->users()) {
934 if (CallInst *GEPCall = dyn_cast<CallInst>(GEPU)) {
935 record_called_function_type_as_user(GEP, GEPCall);
936 }
937 }
938 }
939
940 CandidateUsers.push_back(GVU);
941 }
942
943 for (User *U : CandidateUsers) {
944 // Update users of gv with new gv.
alan-bakered80f572019-02-11 17:28:26 -0500945 if (!isa<Constant>(U)) {
946 // #254: Can't change operands of a constant, but this shouldn't be
947 // something that sticks around in the module.
948 U->replaceUsesOfWith(GV, NewGV);
949 }
David Neto862b7d82018-06-14 18:48:37 -0400950 }
951
952 // Delete original gv.
953 GV->eraseFromParent();
954 }
955 }
956}
957
Radek Szymanskibe4b0c42018-10-04 22:20:53 +0100958void SPIRVProducerPass::FindResourceVars(Module &M, const DataLayout &) {
David Neto862b7d82018-06-14 18:48:37 -0400959 ResourceVarInfoList.clear();
960 FunctionToResourceVarsMap.clear();
961 ModuleOrderedResourceVars.reset();
962 // Normally, there is one resource variable per clspv.resource.var.*
963 // function, since that is unique'd by arg type and index. By design,
964 // we can share these resource variables across kernels because all
965 // kernels use the same descriptor set.
966 //
967 // But if the user requested distinct descriptor sets per kernel, then
968 // the descriptor allocator has made different (set,binding) pairs for
969 // the same (type,arg_index) pair. Since we can decorate a resource
970 // variable with only exactly one DescriptorSet and Binding, we are
971 // forced in this case to make distinct resource variables whenever
972 // the same clspv.reource.var.X function is seen with disintct
973 // (set,binding) values.
974 const bool always_distinct_sets =
975 clspv::Option::DistinctKernelDescriptorSets();
976 for (Function &F : M) {
977 // Rely on the fact the resource var functions have a stable ordering
978 // in the module.
Alan Baker202c8c72018-08-13 13:47:44 -0400979 if (F.getName().startswith(clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -0400980 // Find all calls to this function with distinct set and binding pairs.
981 // Save them in ResourceVarInfoList.
982
983 // Determine uniqueness of the (set,binding) pairs only withing this
984 // one resource-var builtin function.
985 using SetAndBinding = std::pair<unsigned, unsigned>;
986 // Maps set and binding to the resource var info.
987 DenseMap<SetAndBinding, ResourceVarInfo *> set_and_binding_map;
988 bool first_use = true;
989 for (auto &U : F.uses()) {
990 if (auto *call = dyn_cast<CallInst>(U.getUser())) {
991 const auto set = unsigned(
992 dyn_cast<ConstantInt>(call->getArgOperand(0))->getZExtValue());
993 const auto binding = unsigned(
994 dyn_cast<ConstantInt>(call->getArgOperand(1))->getZExtValue());
995 const auto arg_kind = clspv::ArgKind(
996 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
997 const auto arg_index = unsigned(
998 dyn_cast<ConstantInt>(call->getArgOperand(3))->getZExtValue());
alan-bakere9308012019-03-15 10:25:13 -0400999 const auto coherent = unsigned(
1000 dyn_cast<ConstantInt>(call->getArgOperand(5))->getZExtValue());
David Neto862b7d82018-06-14 18:48:37 -04001001
1002 // Find or make the resource var info for this combination.
1003 ResourceVarInfo *rv = nullptr;
1004 if (always_distinct_sets) {
1005 // Make a new resource var any time we see a different
1006 // (set,binding) pair.
1007 SetAndBinding key{set, binding};
1008 auto where = set_and_binding_map.find(key);
1009 if (where == set_and_binding_map.end()) {
1010 rv = new ResourceVarInfo(int(ResourceVarInfoList.size()), set,
alan-bakere9308012019-03-15 10:25:13 -04001011 binding, &F, arg_kind, coherent);
David Neto862b7d82018-06-14 18:48:37 -04001012 ResourceVarInfoList.emplace_back(rv);
1013 set_and_binding_map[key] = rv;
1014 } else {
1015 rv = where->second;
1016 }
1017 } else {
1018 // The default is to make exactly one resource for each
1019 // clspv.resource.var.* function.
1020 if (first_use) {
1021 first_use = false;
1022 rv = new ResourceVarInfo(int(ResourceVarInfoList.size()), set,
alan-bakere9308012019-03-15 10:25:13 -04001023 binding, &F, arg_kind, coherent);
David Neto862b7d82018-06-14 18:48:37 -04001024 ResourceVarInfoList.emplace_back(rv);
1025 } else {
1026 rv = ResourceVarInfoList.back().get();
1027 }
1028 }
1029
1030 // Now populate FunctionToResourceVarsMap.
1031 auto &mapping =
1032 FunctionToResourceVarsMap[call->getParent()->getParent()];
1033 while (mapping.size() <= arg_index) {
1034 mapping.push_back(nullptr);
1035 }
1036 mapping[arg_index] = rv;
1037 }
1038 }
1039 }
1040 }
1041
1042 // Populate ModuleOrderedResourceVars.
1043 for (Function &F : M) {
1044 auto where = FunctionToResourceVarsMap.find(&F);
1045 if (where != FunctionToResourceVarsMap.end()) {
1046 for (auto &rv : where->second) {
1047 if (rv != nullptr) {
1048 ModuleOrderedResourceVars.insert(rv);
1049 }
1050 }
1051 }
1052 }
1053 if (ShowResourceVars) {
1054 for (auto *info : ModuleOrderedResourceVars) {
1055 outs() << "MORV index " << info->index << " (" << info->descriptor_set
1056 << "," << info->binding << ") " << *(info->var_fn->getReturnType())
1057 << "\n";
1058 }
1059 }
1060}
1061
David Neto22f144c2017-06-12 14:26:21 -04001062bool SPIRVProducerPass::FindExtInst(Module &M) {
1063 LLVMContext &Context = M.getContext();
1064 bool HasExtInst = false;
1065
1066 for (Function &F : M) {
1067 for (BasicBlock &BB : F) {
1068 for (Instruction &I : BB) {
1069 if (CallInst *Call = dyn_cast<CallInst>(&I)) {
1070 Function *Callee = Call->getCalledFunction();
1071 // Check whether this call is for extend instructions.
David Neto3fbb4072017-10-16 11:28:14 -04001072 auto callee_name = Callee->getName();
1073 const glsl::ExtInst EInst = getExtInstEnum(callee_name);
1074 const glsl::ExtInst IndirectEInst =
1075 getIndirectExtInstEnum(callee_name);
David Neto22f144c2017-06-12 14:26:21 -04001076
David Neto3fbb4072017-10-16 11:28:14 -04001077 HasExtInst |=
1078 (EInst != kGlslExtInstBad) || (IndirectEInst != kGlslExtInstBad);
1079
1080 if (IndirectEInst) {
1081 // Register extra constants if needed.
1082
1083 // Registers a type and constant for computing the result of the
1084 // given instruction. If the result of the instruction is a vector,
1085 // then make a splat vector constant with the same number of
1086 // elements.
1087 auto register_constant = [this, &I](Constant *constant) {
1088 FindType(constant->getType());
1089 FindConstant(constant);
1090 if (auto *vectorTy = dyn_cast<VectorType>(I.getType())) {
1091 // Register the splat vector of the value with the same
1092 // width as the result of the instruction.
1093 auto *vec_constant = ConstantVector::getSplat(
1094 static_cast<unsigned>(vectorTy->getNumElements()),
1095 constant);
1096 FindConstant(vec_constant);
1097 FindType(vec_constant->getType());
1098 }
1099 };
1100 switch (IndirectEInst) {
1101 case glsl::ExtInstFindUMsb:
1102 // clz needs OpExtInst and OpISub with constant 31, or splat
1103 // vector of 31. Add it to the constant list here.
1104 register_constant(
1105 ConstantInt::get(Type::getInt32Ty(Context), 31));
1106 break;
1107 case glsl::ExtInstAcos:
1108 case glsl::ExtInstAsin:
Kévin Petiteb9f90a2018-09-29 12:29:34 +01001109 case glsl::ExtInstAtan:
David Neto3fbb4072017-10-16 11:28:14 -04001110 case glsl::ExtInstAtan2:
1111 // We need 1/pi for acospi, asinpi, atan2pi.
1112 register_constant(
1113 ConstantFP::get(Type::getFloatTy(Context), kOneOverPi));
1114 break;
1115 default:
1116 assert(false && "internally inconsistent");
1117 }
David Neto22f144c2017-06-12 14:26:21 -04001118 }
1119 }
1120 }
1121 }
1122 }
1123
1124 return HasExtInst;
1125}
1126
1127void SPIRVProducerPass::FindTypePerGlobalVar(GlobalVariable &GV) {
1128 // Investigate global variable's type.
1129 FindType(GV.getType());
1130}
1131
1132void SPIRVProducerPass::FindTypePerFunc(Function &F) {
1133 // Investigate function's type.
1134 FunctionType *FTy = F.getFunctionType();
1135
1136 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
1137 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
David Neto9ed8e2f2018-03-24 06:47:24 -07001138 // Handle a regular function with global constant parameters.
David Neto22f144c2017-06-12 14:26:21 -04001139 if (GlobalConstFuncTyMap.count(FTy)) {
1140 uint32_t GVCstArgIdx = GlobalConstFuncTypeMap[FTy].second;
1141 SmallVector<Type *, 4> NewFuncParamTys;
1142 for (unsigned i = 0; i < FTy->getNumParams(); i++) {
1143 Type *ParamTy = FTy->getParamType(i);
1144 if (i == GVCstArgIdx) {
1145 Type *EleTy = ParamTy->getPointerElementType();
1146 ParamTy = PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
1147 }
1148
1149 NewFuncParamTys.push_back(ParamTy);
1150 }
1151
1152 FunctionType *NewFTy =
1153 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
1154 GlobalConstFuncTyMap[FTy] = std::make_pair(NewFTy, GVCstArgIdx);
1155 FTy = NewFTy;
1156 }
1157
1158 FindType(FTy);
1159 } else {
1160 // As kernel functions do not have parameters, create new function type and
1161 // add it to type map.
1162 SmallVector<Type *, 4> NewFuncParamTys;
1163 FunctionType *NewFTy =
1164 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
1165 FindType(NewFTy);
1166 }
1167
1168 // Investigate instructions' type in function body.
1169 for (BasicBlock &BB : F) {
1170 for (Instruction &I : BB) {
1171 if (isa<ShuffleVectorInst>(I)) {
1172 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1173 // Ignore type for mask of shuffle vector instruction.
1174 if (i == 2) {
1175 continue;
1176 }
1177
1178 Value *Op = I.getOperand(i);
1179 if (!isa<MetadataAsValue>(Op)) {
1180 FindType(Op->getType());
1181 }
1182 }
1183
1184 FindType(I.getType());
1185 continue;
1186 }
1187
David Neto862b7d82018-06-14 18:48:37 -04001188 CallInst *Call = dyn_cast<CallInst>(&I);
1189
1190 if (Call && Call->getCalledFunction()->getName().startswith(
Alan Baker202c8c72018-08-13 13:47:44 -04001191 clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04001192 // This is a fake call representing access to a resource variable.
1193 // We handle that elsewhere.
1194 continue;
1195 }
1196
Alan Baker202c8c72018-08-13 13:47:44 -04001197 if (Call && Call->getCalledFunction()->getName().startswith(
1198 clspv::WorkgroupAccessorFunction())) {
1199 // This is a fake call representing access to a workgroup variable.
1200 // We handle that elsewhere.
1201 continue;
1202 }
1203
David Neto22f144c2017-06-12 14:26:21 -04001204 // Work through the operands of the instruction.
1205 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1206 Value *const Op = I.getOperand(i);
1207 // If any of the operands is a constant, find the type!
1208 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1209 FindType(Op->getType());
1210 }
1211 }
1212
1213 for (Use &Op : I.operands()) {
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001214 if (isa<CallInst>(&I)) {
David Neto22f144c2017-06-12 14:26:21 -04001215 // Avoid to check call instruction's type.
1216 break;
1217 }
Alan Baker202c8c72018-08-13 13:47:44 -04001218 if (CallInst *OpCall = dyn_cast<CallInst>(Op)) {
1219 if (OpCall && OpCall->getCalledFunction()->getName().startswith(
1220 clspv::WorkgroupAccessorFunction())) {
1221 // This is a fake call representing access to a workgroup variable.
1222 // We handle that elsewhere.
1223 continue;
1224 }
1225 }
David Neto22f144c2017-06-12 14:26:21 -04001226 if (!isa<MetadataAsValue>(&Op)) {
1227 FindType(Op->getType());
1228 continue;
1229 }
1230 }
1231
David Neto22f144c2017-06-12 14:26:21 -04001232 // We don't want to track the type of this call as we are going to replace
1233 // it.
Kévin Petitdf71de32019-04-09 14:09:50 +01001234 if (Call && (clspv::LiteralSamplerFunction() ==
David Neto22f144c2017-06-12 14:26:21 -04001235 Call->getCalledFunction()->getName())) {
1236 continue;
1237 }
1238
1239 if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(&I)) {
1240 // If gep's base operand has ModuleScopePrivate address space, make gep
1241 // return ModuleScopePrivate address space.
1242 if (GEP->getPointerAddressSpace() == AddressSpace::ModuleScopePrivate) {
1243 // Add pointer type with private address space for global constant to
1244 // type list.
1245 Type *EleTy = I.getType()->getPointerElementType();
1246 Type *NewPTy =
1247 PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
1248
1249 FindType(NewPTy);
1250 continue;
1251 }
1252 }
1253
1254 FindType(I.getType());
1255 }
1256 }
1257}
1258
David Neto862b7d82018-06-14 18:48:37 -04001259void SPIRVProducerPass::FindTypesForSamplerMap(Module &M) {
1260 // If we are using a sampler map, find the type of the sampler.
Kévin Petitdf71de32019-04-09 14:09:50 +01001261 if (M.getFunction(clspv::LiteralSamplerFunction()) ||
David Neto862b7d82018-06-14 18:48:37 -04001262 0 < getSamplerMap().size()) {
1263 auto SamplerStructTy = M.getTypeByName("opencl.sampler_t");
1264 if (!SamplerStructTy) {
1265 SamplerStructTy = StructType::create(M.getContext(), "opencl.sampler_t");
1266 }
1267
1268 SamplerTy = SamplerStructTy->getPointerTo(AddressSpace::UniformConstant);
1269
1270 FindType(SamplerTy);
1271 }
1272}
1273
1274void SPIRVProducerPass::FindTypesForResourceVars(Module &M) {
1275 // Record types so they are generated.
1276 TypesNeedingLayout.reset();
1277 StructTypesNeedingBlock.reset();
1278
1279 // To match older clspv codegen, generate the float type first if required
1280 // for images.
1281 for (const auto *info : ModuleOrderedResourceVars) {
1282 if (info->arg_kind == clspv::ArgKind::ReadOnlyImage ||
1283 info->arg_kind == clspv::ArgKind::WriteOnlyImage) {
alan-bakerf67468c2019-11-25 15:51:49 -05001284 if (IsIntImageType(info->var_fn->getReturnType())) {
1285 // Nothing for now...
1286 } else if (IsUintImageType(info->var_fn->getReturnType())) {
1287 FindType(Type::getInt32Ty(M.getContext()));
1288 }
1289
1290 // We need "float" either for the sampled type or for the Lod operand.
David Neto862b7d82018-06-14 18:48:37 -04001291 FindType(Type::getFloatTy(M.getContext()));
David Neto862b7d82018-06-14 18:48:37 -04001292 }
1293 }
1294
1295 for (const auto *info : ModuleOrderedResourceVars) {
1296 Type *type = info->var_fn->getReturnType();
1297
1298 switch (info->arg_kind) {
1299 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04001300 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04001301 if (auto *sty = dyn_cast<StructType>(type->getPointerElementType())) {
1302 StructTypesNeedingBlock.insert(sty);
1303 } else {
1304 errs() << *type << "\n";
1305 llvm_unreachable("Buffer arguments must map to structures!");
1306 }
1307 break;
1308 case clspv::ArgKind::Pod:
1309 if (auto *sty = dyn_cast<StructType>(type->getPointerElementType())) {
1310 StructTypesNeedingBlock.insert(sty);
1311 } else {
1312 errs() << *type << "\n";
1313 llvm_unreachable("POD arguments must map to structures!");
1314 }
1315 break;
1316 case clspv::ArgKind::ReadOnlyImage:
1317 case clspv::ArgKind::WriteOnlyImage:
1318 case clspv::ArgKind::Sampler:
1319 // Sampler and image types map to the pointee type but
1320 // in the uniform constant address space.
1321 type = PointerType::get(type->getPointerElementType(),
1322 clspv::AddressSpace::UniformConstant);
1323 break;
1324 default:
1325 break;
1326 }
1327
1328 // The converted type is the type of the OpVariable we will generate.
1329 // If the pointee type is an array of size zero, FindType will convert it
1330 // to a runtime array.
1331 FindType(type);
1332 }
1333
alan-bakerdcd97412019-09-16 15:32:30 -04001334 // If module constants are clustered in a storage buffer then that struct
1335 // needs layout decorations.
1336 if (clspv::Option::ModuleConstantsInStorageBuffer()) {
1337 for (GlobalVariable &GV : M.globals()) {
1338 PointerType *PTy = cast<PointerType>(GV.getType());
1339 const auto AS = PTy->getAddressSpace();
1340 const bool module_scope_constant_external_init =
1341 (AS == AddressSpace::Constant) && GV.hasInitializer();
1342 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
1343 if (module_scope_constant_external_init &&
1344 spv::BuiltInMax == BuiltinType) {
1345 StructTypesNeedingBlock.insert(
1346 cast<StructType>(PTy->getPointerElementType()));
1347 }
1348 }
1349 }
1350
David Neto862b7d82018-06-14 18:48:37 -04001351 // Traverse the arrays and structures underneath each Block, and
1352 // mark them as needing layout.
1353 std::vector<Type *> work_list(StructTypesNeedingBlock.begin(),
1354 StructTypesNeedingBlock.end());
1355 while (!work_list.empty()) {
1356 Type *type = work_list.back();
1357 work_list.pop_back();
1358 TypesNeedingLayout.insert(type);
1359 switch (type->getTypeID()) {
1360 case Type::ArrayTyID:
1361 work_list.push_back(type->getArrayElementType());
1362 if (!Hack_generate_runtime_array_stride_early) {
1363 // Remember this array type for deferred decoration.
1364 TypesNeedingArrayStride.insert(type);
1365 }
1366 break;
1367 case Type::StructTyID:
1368 for (auto *elem_ty : cast<StructType>(type)->elements()) {
1369 work_list.push_back(elem_ty);
1370 }
1371 default:
1372 // This type and its contained types don't get layout.
1373 break;
1374 }
1375 }
1376}
1377
Alan Baker202c8c72018-08-13 13:47:44 -04001378void SPIRVProducerPass::FindWorkgroupVars(Module &M) {
1379 // The SpecId assignment for pointer-to-local arguments is recorded in
1380 // module-level metadata. Translate that information into local argument
1381 // information.
1382 NamedMDNode *nmd = M.getNamedMetadata(clspv::LocalSpecIdMetadataName());
alan-bakerb6b09dc2018-11-08 16:59:28 -05001383 if (!nmd)
1384 return;
Alan Baker202c8c72018-08-13 13:47:44 -04001385 for (auto operand : nmd->operands()) {
1386 MDTuple *tuple = cast<MDTuple>(operand);
1387 ValueAsMetadata *fn_md = cast<ValueAsMetadata>(tuple->getOperand(0));
1388 Function *func = cast<Function>(fn_md->getValue());
alan-bakerb6b09dc2018-11-08 16:59:28 -05001389 ConstantAsMetadata *arg_index_md =
1390 cast<ConstantAsMetadata>(tuple->getOperand(1));
1391 int arg_index = static_cast<int>(
1392 cast<ConstantInt>(arg_index_md->getValue())->getSExtValue());
1393 Argument *arg = &*(func->arg_begin() + arg_index);
Alan Baker202c8c72018-08-13 13:47:44 -04001394
1395 ConstantAsMetadata *spec_id_md =
1396 cast<ConstantAsMetadata>(tuple->getOperand(2));
alan-bakerb6b09dc2018-11-08 16:59:28 -05001397 int spec_id = static_cast<int>(
1398 cast<ConstantInt>(spec_id_md->getValue())->getSExtValue());
Alan Baker202c8c72018-08-13 13:47:44 -04001399
1400 max_local_spec_id_ = std::max(max_local_spec_id_, spec_id + 1);
1401 LocalArgSpecIds[arg] = spec_id;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001402 if (LocalSpecIdInfoMap.count(spec_id))
1403 continue;
Alan Baker202c8c72018-08-13 13:47:44 -04001404
1405 // We haven't seen this SpecId yet, so generate the LocalArgInfo for it.
1406 LocalArgInfo info{nextID, arg->getType()->getPointerElementType(),
1407 nextID + 1, nextID + 2,
1408 nextID + 3, spec_id};
1409 LocalSpecIdInfoMap[spec_id] = info;
1410 nextID += 4;
1411
1412 // Ensure the types necessary for this argument get generated.
1413 Type *IdxTy = Type::getInt32Ty(M.getContext());
1414 FindConstant(ConstantInt::get(IdxTy, 0));
1415 FindType(IdxTy);
1416 FindType(arg->getType());
1417 }
1418}
1419
David Neto22f144c2017-06-12 14:26:21 -04001420void SPIRVProducerPass::FindType(Type *Ty) {
1421 TypeList &TyList = getTypeList();
1422
1423 if (0 != TyList.idFor(Ty)) {
1424 return;
1425 }
1426
1427 if (Ty->isPointerTy()) {
1428 auto AddrSpace = Ty->getPointerAddressSpace();
1429 if ((AddressSpace::Constant == AddrSpace) ||
1430 (AddressSpace::Global == AddrSpace)) {
1431 auto PointeeTy = Ty->getPointerElementType();
1432
1433 if (PointeeTy->isStructTy() &&
1434 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
1435 FindType(PointeeTy);
1436 auto ActualPointerTy =
1437 PointeeTy->getPointerTo(AddressSpace::UniformConstant);
1438 FindType(ActualPointerTy);
1439 return;
1440 }
1441 }
1442 }
1443
David Neto862b7d82018-06-14 18:48:37 -04001444 // By convention, LLVM array type with 0 elements will map to
1445 // OpTypeRuntimeArray. Otherwise, it will map to OpTypeArray, which
1446 // has a constant number of elements. We need to support type of the
1447 // constant.
1448 if (auto *arrayTy = dyn_cast<ArrayType>(Ty)) {
1449 if (arrayTy->getNumElements() > 0) {
1450 LLVMContext &Context = Ty->getContext();
1451 FindType(Type::getInt32Ty(Context));
1452 }
David Neto22f144c2017-06-12 14:26:21 -04001453 }
1454
1455 for (Type *SubTy : Ty->subtypes()) {
1456 FindType(SubTy);
1457 }
1458
1459 TyList.insert(Ty);
1460}
1461
1462void SPIRVProducerPass::FindConstantPerGlobalVar(GlobalVariable &GV) {
1463 // If the global variable has a (non undef) initializer.
1464 if (GV.hasInitializer() && !isa<UndefValue>(GV.getInitializer())) {
David Neto862b7d82018-06-14 18:48:37 -04001465 // Generate the constant if it's not the initializer to a module scope
1466 // constant that we will expect in a storage buffer.
1467 const bool module_scope_constant_external_init =
1468 (GV.getType()->getPointerAddressSpace() == AddressSpace::Constant) &&
1469 clspv::Option::ModuleConstantsInStorageBuffer();
1470 if (!module_scope_constant_external_init) {
1471 FindConstant(GV.getInitializer());
1472 }
David Neto22f144c2017-06-12 14:26:21 -04001473 }
1474}
1475
1476void SPIRVProducerPass::FindConstantPerFunc(Function &F) {
1477 // Investigate constants in function body.
1478 for (BasicBlock &BB : F) {
1479 for (Instruction &I : BB) {
David Neto862b7d82018-06-14 18:48:37 -04001480 if (auto *call = dyn_cast<CallInst>(&I)) {
1481 auto name = call->getCalledFunction()->getName();
Kévin Petitdf71de32019-04-09 14:09:50 +01001482 if (name == clspv::LiteralSamplerFunction()) {
David Neto862b7d82018-06-14 18:48:37 -04001483 // We've handled these constants elsewhere, so skip it.
1484 continue;
1485 }
Alan Baker202c8c72018-08-13 13:47:44 -04001486 if (name.startswith(clspv::ResourceAccessorFunction())) {
1487 continue;
1488 }
1489 if (name.startswith(clspv::WorkgroupAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04001490 continue;
1491 }
Kévin Petit617a76d2019-04-04 13:54:16 +01001492 if (name.startswith(clspv::SPIRVOpIntrinsicFunction())) {
1493 // Skip the first operand that has the SPIR-V Opcode
1494 for (unsigned i = 1; i < I.getNumOperands(); i++) {
1495 if (isa<Constant>(I.getOperand(i)) &&
1496 !isa<GlobalValue>(I.getOperand(i))) {
1497 FindConstant(I.getOperand(i));
1498 }
1499 }
1500 continue;
1501 }
David Neto22f144c2017-06-12 14:26:21 -04001502 }
1503
1504 if (isa<AllocaInst>(I)) {
1505 // Alloca instruction has constant for the number of element. Ignore it.
1506 continue;
1507 } else if (isa<ShuffleVectorInst>(I)) {
1508 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1509 // Ignore constant for mask of shuffle vector instruction.
1510 if (i == 2) {
1511 continue;
1512 }
1513
1514 if (isa<Constant>(I.getOperand(i)) &&
1515 !isa<GlobalValue>(I.getOperand(i))) {
1516 FindConstant(I.getOperand(i));
1517 }
1518 }
1519
1520 continue;
1521 } else if (isa<InsertElementInst>(I)) {
1522 // Handle InsertElement with <4 x i8> specially.
1523 Type *CompositeTy = I.getOperand(0)->getType();
1524 if (is4xi8vec(CompositeTy)) {
1525 LLVMContext &Context = CompositeTy->getContext();
1526 if (isa<Constant>(I.getOperand(0))) {
1527 FindConstant(I.getOperand(0));
1528 }
1529
1530 if (isa<Constant>(I.getOperand(1))) {
1531 FindConstant(I.getOperand(1));
1532 }
1533
1534 // Add mask constant 0xFF.
1535 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
1536 FindConstant(CstFF);
1537
1538 // Add shift amount constant.
1539 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
1540 uint64_t Idx = CI->getZExtValue();
1541 Constant *CstShiftAmount =
1542 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
1543 FindConstant(CstShiftAmount);
1544 }
1545
1546 continue;
1547 }
1548
1549 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1550 // Ignore constant for index of InsertElement instruction.
1551 if (i == 2) {
1552 continue;
1553 }
1554
1555 if (isa<Constant>(I.getOperand(i)) &&
1556 !isa<GlobalValue>(I.getOperand(i))) {
1557 FindConstant(I.getOperand(i));
1558 }
1559 }
1560
1561 continue;
1562 } else if (isa<ExtractElementInst>(I)) {
1563 // Handle ExtractElement with <4 x i8> specially.
1564 Type *CompositeTy = I.getOperand(0)->getType();
1565 if (is4xi8vec(CompositeTy)) {
1566 LLVMContext &Context = CompositeTy->getContext();
1567 if (isa<Constant>(I.getOperand(0))) {
1568 FindConstant(I.getOperand(0));
1569 }
1570
1571 // Add mask constant 0xFF.
1572 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
1573 FindConstant(CstFF);
1574
1575 // Add shift amount constant.
1576 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
1577 uint64_t Idx = CI->getZExtValue();
1578 Constant *CstShiftAmount =
1579 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
1580 FindConstant(CstShiftAmount);
1581 } else {
1582 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
1583 FindConstant(Cst8);
1584 }
1585
1586 continue;
1587 }
1588
1589 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1590 // Ignore constant for index of ExtractElement instruction.
1591 if (i == 1) {
1592 continue;
1593 }
1594
1595 if (isa<Constant>(I.getOperand(i)) &&
1596 !isa<GlobalValue>(I.getOperand(i))) {
1597 FindConstant(I.getOperand(i));
1598 }
1599 }
1600
1601 continue;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001602 } else if ((Instruction::Xor == I.getOpcode()) &&
1603 I.getType()->isIntegerTy(1)) {
1604 // We special case for Xor where the type is i1 and one of the arguments
1605 // is a constant 1 (true), this is an OpLogicalNot in SPIR-V, and we
1606 // don't need the constant
David Neto22f144c2017-06-12 14:26:21 -04001607 bool foundConstantTrue = false;
1608 for (Use &Op : I.operands()) {
1609 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1610 auto CI = cast<ConstantInt>(Op);
1611
1612 if (CI->isZero() || foundConstantTrue) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05001613 // If we already found the true constant, we might (probably only
1614 // on -O0) have an OpLogicalNot which is taking a constant
1615 // argument, so discover it anyway.
David Neto22f144c2017-06-12 14:26:21 -04001616 FindConstant(Op);
1617 } else {
1618 foundConstantTrue = true;
1619 }
1620 }
1621 }
1622
1623 continue;
David Netod2de94a2017-08-28 17:27:47 -04001624 } else if (isa<TruncInst>(I)) {
alan-bakerb39c8262019-03-08 14:03:37 -05001625 // Special case if i8 is not generally handled.
1626 if (!clspv::Option::Int8Support()) {
1627 // For truncation to i8 we mask against 255.
1628 Type *ToTy = I.getType();
1629 if (8u == ToTy->getPrimitiveSizeInBits()) {
1630 LLVMContext &Context = ToTy->getContext();
1631 Constant *Cst255 =
1632 ConstantInt::get(Type::getInt32Ty(Context), 0xff);
1633 FindConstant(Cst255);
1634 }
David Netod2de94a2017-08-28 17:27:47 -04001635 }
Neil Henning39672102017-09-29 14:33:13 +01001636 } else if (isa<AtomicRMWInst>(I)) {
1637 LLVMContext &Context = I.getContext();
1638
1639 FindConstant(
1640 ConstantInt::get(Type::getInt32Ty(Context), spv::ScopeDevice));
1641 FindConstant(ConstantInt::get(
1642 Type::getInt32Ty(Context),
1643 spv::MemorySemanticsUniformMemoryMask |
1644 spv::MemorySemanticsSequentiallyConsistentMask));
David Neto22f144c2017-06-12 14:26:21 -04001645 }
1646
1647 for (Use &Op : I.operands()) {
1648 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1649 FindConstant(Op);
1650 }
1651 }
1652 }
1653 }
1654}
1655
1656void SPIRVProducerPass::FindConstant(Value *V) {
David Neto22f144c2017-06-12 14:26:21 -04001657 ValueList &CstList = getConstantList();
1658
David Netofb9a7972017-08-25 17:08:24 -04001659 // If V is already tracked, ignore it.
1660 if (0 != CstList.idFor(V)) {
David Neto22f144c2017-06-12 14:26:21 -04001661 return;
1662 }
1663
David Neto862b7d82018-06-14 18:48:37 -04001664 if (isa<GlobalValue>(V) && clspv::Option::ModuleConstantsInStorageBuffer()) {
1665 return;
1666 }
1667
David Neto22f144c2017-06-12 14:26:21 -04001668 Constant *Cst = cast<Constant>(V);
David Neto862b7d82018-06-14 18:48:37 -04001669 Type *CstTy = Cst->getType();
David Neto22f144c2017-06-12 14:26:21 -04001670
1671 // Handle constant with <4 x i8> type specially.
David Neto22f144c2017-06-12 14:26:21 -04001672 if (is4xi8vec(CstTy)) {
1673 if (!isa<GlobalValue>(V)) {
David Netofb9a7972017-08-25 17:08:24 -04001674 CstList.insert(V);
David Neto22f144c2017-06-12 14:26:21 -04001675 }
1676 }
1677
1678 if (Cst->getNumOperands()) {
1679 for (User::const_op_iterator I = Cst->op_begin(), E = Cst->op_end(); I != E;
1680 ++I) {
1681 FindConstant(*I);
1682 }
1683
David Netofb9a7972017-08-25 17:08:24 -04001684 CstList.insert(Cst);
David Neto22f144c2017-06-12 14:26:21 -04001685 return;
1686 } else if (const ConstantDataSequential *CDS =
1687 dyn_cast<ConstantDataSequential>(Cst)) {
1688 // Add constants for each element to constant list.
1689 for (unsigned i = 0; i < CDS->getNumElements(); i++) {
1690 Constant *EleCst = CDS->getElementAsConstant(i);
1691 FindConstant(EleCst);
1692 }
1693 }
1694
1695 if (!isa<GlobalValue>(V)) {
David Netofb9a7972017-08-25 17:08:24 -04001696 CstList.insert(V);
David Neto22f144c2017-06-12 14:26:21 -04001697 }
1698}
1699
1700spv::StorageClass SPIRVProducerPass::GetStorageClass(unsigned AddrSpace) const {
1701 switch (AddrSpace) {
1702 default:
1703 llvm_unreachable("Unsupported OpenCL address space");
1704 case AddressSpace::Private:
1705 return spv::StorageClassFunction;
1706 case AddressSpace::Global:
David Neto22f144c2017-06-12 14:26:21 -04001707 return spv::StorageClassStorageBuffer;
Alan Bakerfcda9482018-10-02 17:09:59 -04001708 case AddressSpace::Constant:
1709 return clspv::Option::ConstantArgsInUniformBuffer()
1710 ? spv::StorageClassUniform
1711 : spv::StorageClassStorageBuffer;
David Neto22f144c2017-06-12 14:26:21 -04001712 case AddressSpace::Input:
1713 return spv::StorageClassInput;
1714 case AddressSpace::Local:
1715 return spv::StorageClassWorkgroup;
1716 case AddressSpace::UniformConstant:
1717 return spv::StorageClassUniformConstant;
David Neto9ed8e2f2018-03-24 06:47:24 -07001718 case AddressSpace::Uniform:
David Netoe439d702018-03-23 13:14:08 -07001719 return spv::StorageClassUniform;
David Neto22f144c2017-06-12 14:26:21 -04001720 case AddressSpace::ModuleScopePrivate:
1721 return spv::StorageClassPrivate;
1722 }
1723}
1724
David Neto862b7d82018-06-14 18:48:37 -04001725spv::StorageClass
1726SPIRVProducerPass::GetStorageClassForArgKind(clspv::ArgKind arg_kind) const {
1727 switch (arg_kind) {
1728 case clspv::ArgKind::Buffer:
1729 return spv::StorageClassStorageBuffer;
Alan Bakerfcda9482018-10-02 17:09:59 -04001730 case clspv::ArgKind::BufferUBO:
1731 return spv::StorageClassUniform;
David Neto862b7d82018-06-14 18:48:37 -04001732 case clspv::ArgKind::Pod:
1733 return clspv::Option::PodArgsInUniformBuffer()
1734 ? spv::StorageClassUniform
1735 : spv::StorageClassStorageBuffer;
1736 case clspv::ArgKind::Local:
1737 return spv::StorageClassWorkgroup;
1738 case clspv::ArgKind::ReadOnlyImage:
1739 case clspv::ArgKind::WriteOnlyImage:
1740 case clspv::ArgKind::Sampler:
1741 return spv::StorageClassUniformConstant;
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001742 default:
1743 llvm_unreachable("Unsupported storage class for argument kind");
David Neto862b7d82018-06-14 18:48:37 -04001744 }
1745}
1746
David Neto22f144c2017-06-12 14:26:21 -04001747spv::BuiltIn SPIRVProducerPass::GetBuiltin(StringRef Name) const {
1748 return StringSwitch<spv::BuiltIn>(Name)
1749 .Case("__spirv_GlobalInvocationId", spv::BuiltInGlobalInvocationId)
1750 .Case("__spirv_LocalInvocationId", spv::BuiltInLocalInvocationId)
1751 .Case("__spirv_WorkgroupSize", spv::BuiltInWorkgroupSize)
1752 .Case("__spirv_NumWorkgroups", spv::BuiltInNumWorkgroups)
1753 .Case("__spirv_WorkgroupId", spv::BuiltInWorkgroupId)
1754 .Default(spv::BuiltInMax);
1755}
1756
1757void SPIRVProducerPass::GenerateExtInstImport() {
1758 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
1759 uint32_t &ExtInstImportID = getOpExtInstImportID();
1760
1761 //
1762 // Generate OpExtInstImport.
1763 //
1764 // Ops[0] ... Ops[n] = Name (Literal String)
David Neto22f144c2017-06-12 14:26:21 -04001765 ExtInstImportID = nextID;
David Neto87846742018-04-11 17:36:22 -04001766 SPIRVInstList.push_back(new SPIRVInstruction(spv::OpExtInstImport, nextID++,
1767 MkString("GLSL.std.450")));
David Neto22f144c2017-06-12 14:26:21 -04001768}
1769
alan-bakerb6b09dc2018-11-08 16:59:28 -05001770void SPIRVProducerPass::GenerateSPIRVTypes(LLVMContext &Context,
1771 Module &module) {
David Neto22f144c2017-06-12 14:26:21 -04001772 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
1773 ValueMapType &VMap = getValueMap();
1774 ValueMapType &AllocatedVMap = getAllocatedValueMap();
Alan Bakerfcda9482018-10-02 17:09:59 -04001775 const auto &DL = module.getDataLayout();
David Neto22f144c2017-06-12 14:26:21 -04001776
1777 // Map for OpTypeRuntimeArray. If argument has pointer type, 2 spirv type
1778 // instructions are generated. They are OpTypePointer and OpTypeRuntimeArray.
1779 DenseMap<Type *, uint32_t> OpRuntimeTyMap;
1780
1781 for (Type *Ty : getTypeList()) {
1782 // Update TypeMap with nextID for reference later.
1783 TypeMap[Ty] = nextID;
1784
1785 switch (Ty->getTypeID()) {
1786 default: {
1787 Ty->print(errs());
1788 llvm_unreachable("Unsupported type???");
1789 break;
1790 }
1791 case Type::MetadataTyID:
1792 case Type::LabelTyID: {
1793 // Ignore these types.
1794 break;
1795 }
1796 case Type::PointerTyID: {
1797 PointerType *PTy = cast<PointerType>(Ty);
1798 unsigned AddrSpace = PTy->getAddressSpace();
1799
1800 // For the purposes of our Vulkan SPIR-V type system, constant and global
1801 // are conflated.
1802 bool UseExistingOpTypePointer = false;
1803 if (AddressSpace::Constant == AddrSpace) {
Alan Bakerfcda9482018-10-02 17:09:59 -04001804 if (!clspv::Option::ConstantArgsInUniformBuffer()) {
1805 AddrSpace = AddressSpace::Global;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001806 // Check to see if we already created this type (for instance, if we
1807 // had a constant <type>* and a global <type>*, the type would be
1808 // created by one of these types, and shared by both).
Alan Bakerfcda9482018-10-02 17:09:59 -04001809 auto GlobalTy = PTy->getPointerElementType()->getPointerTo(AddrSpace);
1810 if (0 < TypeMap.count(GlobalTy)) {
1811 TypeMap[PTy] = TypeMap[GlobalTy];
1812 UseExistingOpTypePointer = true;
1813 break;
1814 }
David Neto22f144c2017-06-12 14:26:21 -04001815 }
1816 } else if (AddressSpace::Global == AddrSpace) {
Alan Bakerfcda9482018-10-02 17:09:59 -04001817 if (!clspv::Option::ConstantArgsInUniformBuffer()) {
1818 AddrSpace = AddressSpace::Constant;
David Neto22f144c2017-06-12 14:26:21 -04001819
alan-bakerb6b09dc2018-11-08 16:59:28 -05001820 // Check to see if we already created this type (for instance, if we
1821 // had a constant <type>* and a global <type>*, the type would be
1822 // created by one of these types, and shared by both).
1823 auto ConstantTy =
1824 PTy->getPointerElementType()->getPointerTo(AddrSpace);
Alan Bakerfcda9482018-10-02 17:09:59 -04001825 if (0 < TypeMap.count(ConstantTy)) {
1826 TypeMap[PTy] = TypeMap[ConstantTy];
1827 UseExistingOpTypePointer = true;
1828 }
David Neto22f144c2017-06-12 14:26:21 -04001829 }
1830 }
1831
David Neto862b7d82018-06-14 18:48:37 -04001832 const bool HasArgUser = true;
David Neto22f144c2017-06-12 14:26:21 -04001833
David Neto862b7d82018-06-14 18:48:37 -04001834 if (HasArgUser && !UseExistingOpTypePointer) {
David Neto22f144c2017-06-12 14:26:21 -04001835 //
1836 // Generate OpTypePointer.
1837 //
1838
1839 // OpTypePointer
1840 // Ops[0] = Storage Class
1841 // Ops[1] = Element Type ID
1842 SPIRVOperandList Ops;
1843
David Neto257c3892018-04-11 13:19:45 -04001844 Ops << MkNum(GetStorageClass(AddrSpace))
1845 << MkId(lookupType(PTy->getElementType()));
David Neto22f144c2017-06-12 14:26:21 -04001846
David Neto87846742018-04-11 17:36:22 -04001847 auto *Inst = new SPIRVInstruction(spv::OpTypePointer, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04001848 SPIRVInstList.push_back(Inst);
1849 }
David Neto22f144c2017-06-12 14:26:21 -04001850 break;
1851 }
1852 case Type::StructTyID: {
David Neto22f144c2017-06-12 14:26:21 -04001853 StructType *STy = cast<StructType>(Ty);
1854
1855 // Handle sampler type.
1856 if (STy->isOpaque()) {
1857 if (STy->getName().equals("opencl.sampler_t")) {
1858 //
1859 // Generate OpTypeSampler
1860 //
1861 // Empty Ops.
1862 SPIRVOperandList Ops;
1863
David Neto87846742018-04-11 17:36:22 -04001864 auto *Inst = new SPIRVInstruction(spv::OpTypeSampler, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04001865 SPIRVInstList.push_back(Inst);
1866 break;
alan-bakerf906d2b2019-12-10 11:26:23 -05001867 } else if (STy->getName().startswith("opencl.image1d_ro_t") ||
1868 STy->getName().startswith("opencl.image1d_wo_t") ||
1869 STy->getName().startswith("opencl.image2d_ro_t") ||
alan-bakerf67468c2019-11-25 15:51:49 -05001870 STy->getName().startswith("opencl.image2d_wo_t") ||
1871 STy->getName().startswith("opencl.image3d_ro_t") ||
1872 STy->getName().startswith("opencl.image3d_wo_t")) {
David Neto22f144c2017-06-12 14:26:21 -04001873 //
1874 // Generate OpTypeImage
1875 //
1876 // Ops[0] = Sampled Type ID
1877 // Ops[1] = Dim ID
1878 // Ops[2] = Depth (Literal Number)
1879 // Ops[3] = Arrayed (Literal Number)
1880 // Ops[4] = MS (Literal Number)
1881 // Ops[5] = Sampled (Literal Number)
1882 // Ops[6] = Image Format ID
1883 //
1884 SPIRVOperandList Ops;
1885
alan-bakerf67468c2019-11-25 15:51:49 -05001886 uint32_t ImageTyID = nextID++;
1887 uint32_t SampledTyID = 0;
1888 if (STy->getName().contains(".float")) {
1889 SampledTyID = lookupType(Type::getFloatTy(Context));
1890 } else if (STy->getName().contains(".uint")) {
1891 SampledTyID = lookupType(Type::getInt32Ty(Context));
1892 } else if (STy->getName().contains(".int")) {
1893 // Generate a signed 32-bit integer if necessary.
1894 if (int32ID == 0) {
1895 int32ID = nextID++;
1896 SPIRVOperandList intOps;
1897 intOps << MkNum(32);
1898 intOps << MkNum(1);
1899 auto signed_int =
1900 new SPIRVInstruction(spv::OpTypeInt, int32ID, intOps);
1901 SPIRVInstList.push_back(signed_int);
1902 }
1903 SampledTyID = int32ID;
1904
1905 // Generate a vec4 of the signed int if necessary.
1906 if (v4int32ID == 0) {
1907 v4int32ID = nextID++;
1908 SPIRVOperandList vecOps;
1909 vecOps << MkId(int32ID);
1910 vecOps << MkNum(4);
1911 auto int_vec =
1912 new SPIRVInstruction(spv::OpTypeVector, v4int32ID, vecOps);
1913 SPIRVInstList.push_back(int_vec);
1914 }
1915 } else {
1916 // This was likely an UndefValue.
1917 SampledTyID = lookupType(Type::getFloatTy(Context));
1918 }
David Neto257c3892018-04-11 13:19:45 -04001919 Ops << MkId(SampledTyID);
David Neto22f144c2017-06-12 14:26:21 -04001920
1921 spv::Dim DimID = spv::Dim2D;
alan-bakerf906d2b2019-12-10 11:26:23 -05001922 if (STy->getName().startswith("opencl.image1d_ro_t") ||
1923 STy->getName().startswith("opencl.image1d_wo_t")) {
1924 DimID = spv::Dim1D;
1925 } else if (STy->getName().startswith("opencl.image3d_ro_t") ||
1926 STy->getName().startswith("opencl.image3d_wo_t")) {
David Neto22f144c2017-06-12 14:26:21 -04001927 DimID = spv::Dim3D;
1928 }
David Neto257c3892018-04-11 13:19:45 -04001929 Ops << MkNum(DimID);
David Neto22f144c2017-06-12 14:26:21 -04001930
1931 // TODO: Set up Depth.
David Neto257c3892018-04-11 13:19:45 -04001932 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04001933
1934 // TODO: Set up Arrayed.
David Neto257c3892018-04-11 13:19:45 -04001935 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04001936
1937 // TODO: Set up MS.
David Neto257c3892018-04-11 13:19:45 -04001938 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04001939
1940 // TODO: Set up Sampled.
1941 //
1942 // From Spec
1943 //
1944 // 0 indicates this is only known at run time, not at compile time
1945 // 1 indicates will be used with sampler
1946 // 2 indicates will be used without a sampler (a storage image)
1947 uint32_t Sampled = 1;
alan-bakerf67468c2019-11-25 15:51:49 -05001948 if (!STy->getName().contains(".sampled")) {
David Neto22f144c2017-06-12 14:26:21 -04001949 Sampled = 2;
1950 }
David Neto257c3892018-04-11 13:19:45 -04001951 Ops << MkNum(Sampled);
David Neto22f144c2017-06-12 14:26:21 -04001952
1953 // TODO: Set up Image Format.
David Neto257c3892018-04-11 13:19:45 -04001954 Ops << MkNum(spv::ImageFormatUnknown);
David Neto22f144c2017-06-12 14:26:21 -04001955
alan-bakerf67468c2019-11-25 15:51:49 -05001956 auto *Inst = new SPIRVInstruction(spv::OpTypeImage, ImageTyID, Ops);
David Neto22f144c2017-06-12 14:26:21 -04001957 SPIRVInstList.push_back(Inst);
1958 break;
1959 }
1960 }
1961
1962 //
1963 // Generate OpTypeStruct
1964 //
1965 // Ops[0] ... Ops[n] = Member IDs
1966 SPIRVOperandList Ops;
1967
1968 for (auto *EleTy : STy->elements()) {
David Neto862b7d82018-06-14 18:48:37 -04001969 Ops << MkId(lookupType(EleTy));
David Neto22f144c2017-06-12 14:26:21 -04001970 }
1971
David Neto22f144c2017-06-12 14:26:21 -04001972 uint32_t STyID = nextID;
1973
alan-bakerb6b09dc2018-11-08 16:59:28 -05001974 auto *Inst = new SPIRVInstruction(spv::OpTypeStruct, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04001975 SPIRVInstList.push_back(Inst);
1976
1977 // Generate OpMemberDecorate.
1978 auto DecoInsertPoint =
1979 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
1980 [](SPIRVInstruction *Inst) -> bool {
1981 return Inst->getOpcode() != spv::OpDecorate &&
1982 Inst->getOpcode() != spv::OpMemberDecorate &&
1983 Inst->getOpcode() != spv::OpExtInstImport;
1984 });
1985
David Netoc463b372017-08-10 15:32:21 -04001986 const auto StructLayout = DL.getStructLayout(STy);
Alan Bakerfcda9482018-10-02 17:09:59 -04001987 // Search for the correct offsets if this type was remapped.
1988 std::vector<uint32_t> *offsets = nullptr;
1989 auto iter = RemappedUBOTypeOffsets.find(STy);
1990 if (iter != RemappedUBOTypeOffsets.end()) {
1991 offsets = &iter->second;
1992 }
David Netoc463b372017-08-10 15:32:21 -04001993
David Neto862b7d82018-06-14 18:48:37 -04001994 // #error TODO(dneto): Only do this if in TypesNeedingLayout.
David Neto22f144c2017-06-12 14:26:21 -04001995 for (unsigned MemberIdx = 0; MemberIdx < STy->getNumElements();
1996 MemberIdx++) {
1997 // Ops[0] = Structure Type ID
1998 // Ops[1] = Member Index(Literal Number)
1999 // Ops[2] = Decoration (Offset)
2000 // Ops[3] = Byte Offset (Literal Number)
2001 Ops.clear();
2002
David Neto257c3892018-04-11 13:19:45 -04002003 Ops << MkId(STyID) << MkNum(MemberIdx) << MkNum(spv::DecorationOffset);
David Neto22f144c2017-06-12 14:26:21 -04002004
alan-bakerb6b09dc2018-11-08 16:59:28 -05002005 auto ByteOffset =
2006 static_cast<uint32_t>(StructLayout->getElementOffset(MemberIdx));
Alan Bakerfcda9482018-10-02 17:09:59 -04002007 if (offsets) {
2008 ByteOffset = (*offsets)[MemberIdx];
2009 }
alan-bakerb6b09dc2018-11-08 16:59:28 -05002010 // const auto ByteOffset =
Alan Bakerfcda9482018-10-02 17:09:59 -04002011 // uint32_t(StructLayout->getElementOffset(MemberIdx));
David Neto257c3892018-04-11 13:19:45 -04002012 Ops << MkNum(ByteOffset);
David Neto22f144c2017-06-12 14:26:21 -04002013
David Neto87846742018-04-11 17:36:22 -04002014 auto *DecoInst = new SPIRVInstruction(spv::OpMemberDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002015 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
David Neto22f144c2017-06-12 14:26:21 -04002016 }
2017
2018 // Generate OpDecorate.
David Neto862b7d82018-06-14 18:48:37 -04002019 if (StructTypesNeedingBlock.idFor(STy)) {
2020 Ops.clear();
2021 // Use Block decorations with StorageBuffer storage class.
2022 Ops << MkId(STyID) << MkNum(spv::DecorationBlock);
David Neto22f144c2017-06-12 14:26:21 -04002023
David Neto862b7d82018-06-14 18:48:37 -04002024 auto *DecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
2025 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
David Neto22f144c2017-06-12 14:26:21 -04002026 }
2027 break;
2028 }
2029 case Type::IntegerTyID: {
alan-baker0e64a592019-11-18 13:36:25 -05002030 uint32_t BitWidth = static_cast<uint32_t>(Ty->getPrimitiveSizeInBits());
David Neto22f144c2017-06-12 14:26:21 -04002031
2032 if (BitWidth == 1) {
David Neto87846742018-04-11 17:36:22 -04002033 auto *Inst = new SPIRVInstruction(spv::OpTypeBool, nextID++, {});
David Neto22f144c2017-06-12 14:26:21 -04002034 SPIRVInstList.push_back(Inst);
2035 } else {
alan-bakerb39c8262019-03-08 14:03:37 -05002036 if (!clspv::Option::Int8Support()) {
2037 // i8 is added to TypeMap as i32.
2038 // No matter what LLVM type is requested first, always alias the
2039 // second one's SPIR-V type to be the same as the one we generated
2040 // first.
2041 unsigned aliasToWidth = 0;
2042 if (BitWidth == 8) {
2043 aliasToWidth = 32;
2044 BitWidth = 32;
2045 } else if (BitWidth == 32) {
2046 aliasToWidth = 8;
2047 }
2048 if (aliasToWidth) {
2049 Type *otherType = Type::getIntNTy(Ty->getContext(), aliasToWidth);
2050 auto where = TypeMap.find(otherType);
2051 if (where == TypeMap.end()) {
2052 // Go ahead and make it, but also map the other type to it.
2053 TypeMap[otherType] = nextID;
2054 } else {
2055 // Alias this SPIR-V type the existing type.
2056 TypeMap[Ty] = where->second;
2057 break;
2058 }
David Neto391aeb12017-08-26 15:51:58 -04002059 }
David Neto22f144c2017-06-12 14:26:21 -04002060 }
2061
David Neto257c3892018-04-11 13:19:45 -04002062 SPIRVOperandList Ops;
2063 Ops << MkNum(BitWidth) << MkNum(0 /* not signed */);
David Neto22f144c2017-06-12 14:26:21 -04002064
2065 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002066 new SPIRVInstruction(spv::OpTypeInt, nextID++, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002067 }
2068 break;
2069 }
2070 case Type::HalfTyID:
2071 case Type::FloatTyID:
2072 case Type::DoubleTyID: {
alan-baker0e64a592019-11-18 13:36:25 -05002073 uint32_t BitWidth = static_cast<uint32_t>(Ty->getPrimitiveSizeInBits());
2074 SPIRVOperand *WidthOp =
2075 new SPIRVOperand(SPIRVOperandType::LITERAL_INTEGER, BitWidth);
David Neto22f144c2017-06-12 14:26:21 -04002076
2077 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002078 new SPIRVInstruction(spv::OpTypeFloat, nextID++, WidthOp));
David Neto22f144c2017-06-12 14:26:21 -04002079 break;
2080 }
2081 case Type::ArrayTyID: {
David Neto22f144c2017-06-12 14:26:21 -04002082 ArrayType *ArrTy = cast<ArrayType>(Ty);
David Neto862b7d82018-06-14 18:48:37 -04002083 const uint64_t Length = ArrTy->getArrayNumElements();
2084 if (Length == 0) {
2085 // By convention, map it to a RuntimeArray.
David Neto22f144c2017-06-12 14:26:21 -04002086
David Neto862b7d82018-06-14 18:48:37 -04002087 // Only generate the type once.
2088 // TODO(dneto): Can it ever be generated more than once?
2089 // Doesn't LLVM type uniqueness guarantee we'll only see this
2090 // once?
2091 Type *EleTy = ArrTy->getArrayElementType();
2092 if (OpRuntimeTyMap.count(EleTy) == 0) {
2093 uint32_t OpTypeRuntimeArrayID = nextID;
2094 OpRuntimeTyMap[Ty] = nextID;
David Neto22f144c2017-06-12 14:26:21 -04002095
David Neto862b7d82018-06-14 18:48:37 -04002096 //
2097 // Generate OpTypeRuntimeArray.
2098 //
David Neto22f144c2017-06-12 14:26:21 -04002099
David Neto862b7d82018-06-14 18:48:37 -04002100 // OpTypeRuntimeArray
2101 // Ops[0] = Element Type ID
2102 SPIRVOperandList Ops;
2103 Ops << MkId(lookupType(EleTy));
David Neto22f144c2017-06-12 14:26:21 -04002104
David Neto862b7d82018-06-14 18:48:37 -04002105 SPIRVInstList.push_back(
2106 new SPIRVInstruction(spv::OpTypeRuntimeArray, nextID++, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002107
David Neto862b7d82018-06-14 18:48:37 -04002108 if (Hack_generate_runtime_array_stride_early) {
2109 // Generate OpDecorate.
2110 auto DecoInsertPoint = std::find_if(
2111 SPIRVInstList.begin(), SPIRVInstList.end(),
2112 [](SPIRVInstruction *Inst) -> bool {
2113 return Inst->getOpcode() != spv::OpDecorate &&
2114 Inst->getOpcode() != spv::OpMemberDecorate &&
2115 Inst->getOpcode() != spv::OpExtInstImport;
2116 });
David Neto22f144c2017-06-12 14:26:21 -04002117
David Neto862b7d82018-06-14 18:48:37 -04002118 // Ops[0] = Target ID
2119 // Ops[1] = Decoration (ArrayStride)
2120 // Ops[2] = Stride Number(Literal Number)
2121 Ops.clear();
David Neto85082642018-03-24 06:55:20 -07002122
David Neto862b7d82018-06-14 18:48:37 -04002123 Ops << MkId(OpTypeRuntimeArrayID)
2124 << MkNum(spv::DecorationArrayStride)
Alan Bakerfcda9482018-10-02 17:09:59 -04002125 << MkNum(static_cast<uint32_t>(GetTypeAllocSize(EleTy, DL)));
David Neto22f144c2017-06-12 14:26:21 -04002126
David Neto862b7d82018-06-14 18:48:37 -04002127 auto *DecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
2128 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
2129 }
2130 }
David Neto22f144c2017-06-12 14:26:21 -04002131
David Neto862b7d82018-06-14 18:48:37 -04002132 } else {
David Neto22f144c2017-06-12 14:26:21 -04002133
David Neto862b7d82018-06-14 18:48:37 -04002134 //
2135 // Generate OpConstant and OpTypeArray.
2136 //
2137
2138 //
2139 // Generate OpConstant for array length.
2140 //
2141 // Ops[0] = Result Type ID
2142 // Ops[1] .. Ops[n] = Values LiteralNumber
2143 SPIRVOperandList Ops;
2144
2145 Type *LengthTy = Type::getInt32Ty(Context);
2146 uint32_t ResTyID = lookupType(LengthTy);
2147 Ops << MkId(ResTyID);
2148
2149 assert(Length < UINT32_MAX);
2150 Ops << MkNum(static_cast<uint32_t>(Length));
2151
2152 // Add constant for length to constant list.
2153 Constant *CstLength = ConstantInt::get(LengthTy, Length);
2154 AllocatedVMap[CstLength] = nextID;
2155 VMap[CstLength] = nextID;
2156 uint32_t LengthID = nextID;
2157
2158 auto *CstInst = new SPIRVInstruction(spv::OpConstant, nextID++, Ops);
2159 SPIRVInstList.push_back(CstInst);
2160
2161 // Remember to generate ArrayStride later
2162 getTypesNeedingArrayStride().insert(Ty);
2163
2164 //
2165 // Generate OpTypeArray.
2166 //
2167 // Ops[0] = Element Type ID
2168 // Ops[1] = Array Length Constant ID
2169 Ops.clear();
2170
2171 uint32_t EleTyID = lookupType(ArrTy->getElementType());
2172 Ops << MkId(EleTyID) << MkId(LengthID);
2173
2174 // Update TypeMap with nextID.
2175 TypeMap[Ty] = nextID;
2176
2177 auto *ArrayInst = new SPIRVInstruction(spv::OpTypeArray, nextID++, Ops);
2178 SPIRVInstList.push_back(ArrayInst);
2179 }
David Neto22f144c2017-06-12 14:26:21 -04002180 break;
2181 }
2182 case Type::VectorTyID: {
alan-bakerb39c8262019-03-08 14:03:37 -05002183 // <4 x i8> is changed to i32 if i8 is not generally supported.
2184 if (!clspv::Option::Int8Support() &&
2185 Ty->getVectorElementType() == Type::getInt8Ty(Context)) {
David Neto22f144c2017-06-12 14:26:21 -04002186 if (Ty->getVectorNumElements() == 4) {
2187 TypeMap[Ty] = lookupType(Ty->getVectorElementType());
2188 break;
2189 } else {
2190 Ty->print(errs());
2191 llvm_unreachable("Support above i8 vector type");
2192 }
2193 }
2194
2195 // Ops[0] = Component Type ID
2196 // Ops[1] = Component Count (Literal Number)
David Neto257c3892018-04-11 13:19:45 -04002197 SPIRVOperandList Ops;
2198 Ops << MkId(lookupType(Ty->getVectorElementType()))
2199 << MkNum(Ty->getVectorNumElements());
David Neto22f144c2017-06-12 14:26:21 -04002200
alan-bakerb6b09dc2018-11-08 16:59:28 -05002201 SPIRVInstruction *inst =
2202 new SPIRVInstruction(spv::OpTypeVector, nextID++, Ops);
David Netoc6f3ab22018-04-06 18:02:31 -04002203 SPIRVInstList.push_back(inst);
David Neto22f144c2017-06-12 14:26:21 -04002204 break;
2205 }
2206 case Type::VoidTyID: {
David Neto87846742018-04-11 17:36:22 -04002207 auto *Inst = new SPIRVInstruction(spv::OpTypeVoid, nextID++, {});
David Neto22f144c2017-06-12 14:26:21 -04002208 SPIRVInstList.push_back(Inst);
2209 break;
2210 }
2211 case Type::FunctionTyID: {
2212 // Generate SPIRV instruction for function type.
2213 FunctionType *FTy = cast<FunctionType>(Ty);
2214
2215 // Ops[0] = Return Type ID
2216 // Ops[1] ... Ops[n] = Parameter Type IDs
2217 SPIRVOperandList Ops;
2218
2219 // Find SPIRV instruction for return type
David Netoc6f3ab22018-04-06 18:02:31 -04002220 Ops << MkId(lookupType(FTy->getReturnType()));
David Neto22f144c2017-06-12 14:26:21 -04002221
2222 // Find SPIRV instructions for parameter types
2223 for (unsigned k = 0; k < FTy->getNumParams(); k++) {
2224 // Find SPIRV instruction for parameter type.
2225 auto ParamTy = FTy->getParamType(k);
2226 if (ParamTy->isPointerTy()) {
2227 auto PointeeTy = ParamTy->getPointerElementType();
2228 if (PointeeTy->isStructTy() &&
2229 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
2230 ParamTy = PointeeTy;
2231 }
2232 }
2233
David Netoc6f3ab22018-04-06 18:02:31 -04002234 Ops << MkId(lookupType(ParamTy));
David Neto22f144c2017-06-12 14:26:21 -04002235 }
2236
David Neto87846742018-04-11 17:36:22 -04002237 auto *Inst = new SPIRVInstruction(spv::OpTypeFunction, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002238 SPIRVInstList.push_back(Inst);
2239 break;
2240 }
2241 }
2242 }
2243
2244 // Generate OpTypeSampledImage.
alan-bakerabd82722019-12-03 17:14:51 -05002245 for (auto &ImgTy : getImageTypeList()) {
David Neto22f144c2017-06-12 14:26:21 -04002246 //
2247 // Generate OpTypeSampledImage.
2248 //
2249 // Ops[0] = Image Type ID
2250 //
2251 SPIRVOperandList Ops;
2252
David Netoc6f3ab22018-04-06 18:02:31 -04002253 Ops << MkId(TypeMap[ImgTy]);
David Neto22f144c2017-06-12 14:26:21 -04002254
alan-bakerabd82722019-12-03 17:14:51 -05002255 // Update the image type map.
2256 getImageTypeMap()[ImgTy] = nextID;
David Neto22f144c2017-06-12 14:26:21 -04002257
David Neto87846742018-04-11 17:36:22 -04002258 auto *Inst = new SPIRVInstruction(spv::OpTypeSampledImage, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002259 SPIRVInstList.push_back(Inst);
2260 }
David Netoc6f3ab22018-04-06 18:02:31 -04002261
2262 // Generate types for pointer-to-local arguments.
Alan Baker202c8c72018-08-13 13:47:44 -04002263 for (auto spec_id = clspv::FirstLocalSpecId(); spec_id < max_local_spec_id_;
2264 ++spec_id) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05002265 LocalArgInfo &arg_info = LocalSpecIdInfoMap[spec_id];
David Netoc6f3ab22018-04-06 18:02:31 -04002266
2267 // Generate the spec constant.
2268 SPIRVOperandList Ops;
2269 Ops << MkId(lookupType(Type::getInt32Ty(Context))) << MkNum(1);
David Neto87846742018-04-11 17:36:22 -04002270 SPIRVInstList.push_back(
2271 new SPIRVInstruction(spv::OpSpecConstant, arg_info.array_size_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002272
2273 // Generate the array type.
2274 Ops.clear();
2275 // The element type must have been created.
2276 uint32_t elem_ty_id = lookupType(arg_info.elem_type);
2277 assert(elem_ty_id);
2278 Ops << MkId(elem_ty_id) << MkId(arg_info.array_size_id);
2279
2280 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002281 new SPIRVInstruction(spv::OpTypeArray, arg_info.array_type_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002282
2283 Ops.clear();
2284 Ops << MkNum(spv::StorageClassWorkgroup) << MkId(arg_info.array_type_id);
David Neto87846742018-04-11 17:36:22 -04002285 SPIRVInstList.push_back(new SPIRVInstruction(
2286 spv::OpTypePointer, arg_info.ptr_array_type_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002287 }
David Neto22f144c2017-06-12 14:26:21 -04002288}
2289
2290void SPIRVProducerPass::GenerateSPIRVConstants() {
2291 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
2292 ValueMapType &VMap = getValueMap();
2293 ValueMapType &AllocatedVMap = getAllocatedValueMap();
2294 ValueList &CstList = getConstantList();
David Neto482550a2018-03-24 05:21:07 -07002295 const bool hack_undef = clspv::Option::HackUndef();
David Neto22f144c2017-06-12 14:26:21 -04002296
2297 for (uint32_t i = 0; i < CstList.size(); i++) {
David Netofb9a7972017-08-25 17:08:24 -04002298 // UniqueVector ids are 1-based.
alan-bakerb6b09dc2018-11-08 16:59:28 -05002299 Constant *Cst = cast<Constant>(CstList[i + 1]);
David Neto22f144c2017-06-12 14:26:21 -04002300
2301 // OpTypeArray's constant was already generated.
David Netofb9a7972017-08-25 17:08:24 -04002302 if (AllocatedVMap.find_as(Cst) != AllocatedVMap.end()) {
David Neto22f144c2017-06-12 14:26:21 -04002303 continue;
2304 }
2305
David Netofb9a7972017-08-25 17:08:24 -04002306 // Set ValueMap with nextID for reference later.
David Neto22f144c2017-06-12 14:26:21 -04002307 VMap[Cst] = nextID;
2308
2309 //
2310 // Generate OpConstant.
2311 //
2312
2313 // Ops[0] = Result Type ID
2314 // Ops[1] .. Ops[n] = Values LiteralNumber
2315 SPIRVOperandList Ops;
2316
David Neto257c3892018-04-11 13:19:45 -04002317 Ops << MkId(lookupType(Cst->getType()));
David Neto22f144c2017-06-12 14:26:21 -04002318
2319 std::vector<uint32_t> LiteralNum;
David Neto22f144c2017-06-12 14:26:21 -04002320 spv::Op Opcode = spv::OpNop;
2321
2322 if (isa<UndefValue>(Cst)) {
2323 // Ops[0] = Result Type ID
David Netoc66b3352017-10-20 14:28:46 -04002324 Opcode = spv::OpUndef;
Alan Baker9bf93fb2018-08-28 16:59:26 -04002325 if (hack_undef && IsTypeNullable(Cst->getType())) {
2326 Opcode = spv::OpConstantNull;
David Netoc66b3352017-10-20 14:28:46 -04002327 }
David Neto22f144c2017-06-12 14:26:21 -04002328 } else if (const ConstantInt *CI = dyn_cast<ConstantInt>(Cst)) {
2329 unsigned BitWidth = CI->getBitWidth();
2330 if (BitWidth == 1) {
2331 // If the bitwidth of constant is 1, generate OpConstantTrue or
2332 // OpConstantFalse.
2333 if (CI->getZExtValue()) {
2334 // Ops[0] = Result Type ID
2335 Opcode = spv::OpConstantTrue;
2336 } else {
2337 // Ops[0] = Result Type ID
2338 Opcode = spv::OpConstantFalse;
2339 }
David Neto22f144c2017-06-12 14:26:21 -04002340 } else {
2341 auto V = CI->getZExtValue();
2342 LiteralNum.push_back(V & 0xFFFFFFFF);
2343
2344 if (BitWidth > 32) {
2345 LiteralNum.push_back(V >> 32);
2346 }
2347
2348 Opcode = spv::OpConstant;
David Neto22f144c2017-06-12 14:26:21 -04002349
David Neto257c3892018-04-11 13:19:45 -04002350 Ops << MkInteger(LiteralNum);
David Neto22f144c2017-06-12 14:26:21 -04002351 }
2352 } else if (const ConstantFP *CFP = dyn_cast<ConstantFP>(Cst)) {
2353 uint64_t FPVal = CFP->getValueAPF().bitcastToAPInt().getZExtValue();
2354 Type *CFPTy = CFP->getType();
2355 if (CFPTy->isFloatTy()) {
2356 LiteralNum.push_back(FPVal & 0xFFFFFFFF);
Kévin Petit02ee34e2019-04-04 19:03:22 +01002357 } else if (CFPTy->isDoubleTy()) {
2358 LiteralNum.push_back(FPVal & 0xFFFFFFFF);
2359 LiteralNum.push_back(FPVal >> 32);
David Neto22f144c2017-06-12 14:26:21 -04002360 } else {
2361 CFPTy->print(errs());
2362 llvm_unreachable("Implement this ConstantFP Type");
2363 }
2364
2365 Opcode = spv::OpConstant;
David Neto22f144c2017-06-12 14:26:21 -04002366
David Neto257c3892018-04-11 13:19:45 -04002367 Ops << MkFloat(LiteralNum);
David Neto22f144c2017-06-12 14:26:21 -04002368 } else if (isa<ConstantDataSequential>(Cst) &&
2369 cast<ConstantDataSequential>(Cst)->isString()) {
2370 Cst->print(errs());
2371 llvm_unreachable("Implement this Constant");
2372
2373 } else if (const ConstantDataSequential *CDS =
2374 dyn_cast<ConstantDataSequential>(Cst)) {
David Neto49351ac2017-08-26 17:32:20 -04002375 // Let's convert <4 x i8> constant to int constant specially.
2376 // This case occurs when all the values are specified as constant
2377 // ints.
2378 Type *CstTy = Cst->getType();
2379 if (is4xi8vec(CstTy)) {
2380 LLVMContext &Context = CstTy->getContext();
2381
2382 //
2383 // Generate OpConstant with OpTypeInt 32 0.
2384 //
Neil Henning39672102017-09-29 14:33:13 +01002385 uint32_t IntValue = 0;
2386 for (unsigned k = 0; k < 4; k++) {
2387 const uint64_t Val = CDS->getElementAsInteger(k);
David Neto49351ac2017-08-26 17:32:20 -04002388 IntValue = (IntValue << 8) | (Val & 0xffu);
2389 }
2390
2391 Type *i32 = Type::getInt32Ty(Context);
2392 Constant *CstInt = ConstantInt::get(i32, IntValue);
2393 // If this constant is already registered on VMap, use it.
2394 if (VMap.count(CstInt)) {
2395 uint32_t CstID = VMap[CstInt];
2396 VMap[Cst] = CstID;
2397 continue;
2398 }
2399
David Neto257c3892018-04-11 13:19:45 -04002400 Ops << MkNum(IntValue);
David Neto49351ac2017-08-26 17:32:20 -04002401
David Neto87846742018-04-11 17:36:22 -04002402 auto *CstInst = new SPIRVInstruction(spv::OpConstant, nextID++, Ops);
David Neto49351ac2017-08-26 17:32:20 -04002403 SPIRVInstList.push_back(CstInst);
2404
2405 continue;
2406 }
2407
2408 // A normal constant-data-sequential case.
David Neto22f144c2017-06-12 14:26:21 -04002409 for (unsigned k = 0; k < CDS->getNumElements(); k++) {
2410 Constant *EleCst = CDS->getElementAsConstant(k);
2411 uint32_t EleCstID = VMap[EleCst];
David Neto257c3892018-04-11 13:19:45 -04002412 Ops << MkId(EleCstID);
David Neto22f144c2017-06-12 14:26:21 -04002413 }
2414
2415 Opcode = spv::OpConstantComposite;
David Neto22f144c2017-06-12 14:26:21 -04002416 } else if (const ConstantAggregate *CA = dyn_cast<ConstantAggregate>(Cst)) {
2417 // Let's convert <4 x i8> constant to int constant specially.
David Neto49351ac2017-08-26 17:32:20 -04002418 // This case occurs when at least one of the values is an undef.
David Neto22f144c2017-06-12 14:26:21 -04002419 Type *CstTy = Cst->getType();
2420 if (is4xi8vec(CstTy)) {
2421 LLVMContext &Context = CstTy->getContext();
2422
2423 //
2424 // Generate OpConstant with OpTypeInt 32 0.
2425 //
Neil Henning39672102017-09-29 14:33:13 +01002426 uint32_t IntValue = 0;
David Neto22f144c2017-06-12 14:26:21 -04002427 for (User::const_op_iterator I = Cst->op_begin(), E = Cst->op_end();
2428 I != E; ++I) {
2429 uint64_t Val = 0;
alan-bakerb6b09dc2018-11-08 16:59:28 -05002430 const Value *CV = *I;
Neil Henning39672102017-09-29 14:33:13 +01002431 if (auto *CI2 = dyn_cast<ConstantInt>(CV)) {
2432 Val = CI2->getZExtValue();
David Neto22f144c2017-06-12 14:26:21 -04002433 }
David Neto49351ac2017-08-26 17:32:20 -04002434 IntValue = (IntValue << 8) | (Val & 0xffu);
David Neto22f144c2017-06-12 14:26:21 -04002435 }
2436
David Neto49351ac2017-08-26 17:32:20 -04002437 Type *i32 = Type::getInt32Ty(Context);
2438 Constant *CstInt = ConstantInt::get(i32, IntValue);
David Neto22f144c2017-06-12 14:26:21 -04002439 // If this constant is already registered on VMap, use it.
2440 if (VMap.count(CstInt)) {
2441 uint32_t CstID = VMap[CstInt];
2442 VMap[Cst] = CstID;
David Neto19a1bad2017-08-25 15:01:41 -04002443 continue;
David Neto22f144c2017-06-12 14:26:21 -04002444 }
2445
David Neto257c3892018-04-11 13:19:45 -04002446 Ops << MkNum(IntValue);
David Neto22f144c2017-06-12 14:26:21 -04002447
David Neto87846742018-04-11 17:36:22 -04002448 auto *CstInst = new SPIRVInstruction(spv::OpConstant, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002449 SPIRVInstList.push_back(CstInst);
2450
David Neto19a1bad2017-08-25 15:01:41 -04002451 continue;
David Neto22f144c2017-06-12 14:26:21 -04002452 }
2453
2454 // We use a constant composite in SPIR-V for our constant aggregate in
2455 // LLVM.
2456 Opcode = spv::OpConstantComposite;
David Neto22f144c2017-06-12 14:26:21 -04002457
2458 for (unsigned k = 0; k < CA->getNumOperands(); k++) {
2459 // Look up the ID of the element of this aggregate (which we will
2460 // previously have created a constant for).
2461 uint32_t ElementConstantID = VMap[CA->getAggregateElement(k)];
2462
2463 // And add an operand to the composite we are constructing
David Neto257c3892018-04-11 13:19:45 -04002464 Ops << MkId(ElementConstantID);
David Neto22f144c2017-06-12 14:26:21 -04002465 }
2466 } else if (Cst->isNullValue()) {
2467 Opcode = spv::OpConstantNull;
David Neto22f144c2017-06-12 14:26:21 -04002468 } else {
2469 Cst->print(errs());
2470 llvm_unreachable("Unsupported Constant???");
2471 }
2472
alan-baker5b86ed72019-02-15 08:26:50 -05002473 if (Opcode == spv::OpConstantNull && Cst->getType()->isPointerTy()) {
2474 // Null pointer requires variable pointers.
2475 setVariablePointersCapabilities(Cst->getType()->getPointerAddressSpace());
2476 }
2477
David Neto87846742018-04-11 17:36:22 -04002478 auto *CstInst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002479 SPIRVInstList.push_back(CstInst);
2480 }
2481}
2482
2483void SPIRVProducerPass::GenerateSamplers(Module &M) {
2484 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
David Neto22f144c2017-06-12 14:26:21 -04002485
alan-bakerb6b09dc2018-11-08 16:59:28 -05002486 auto &sampler_map = getSamplerMap();
David Neto862b7d82018-06-14 18:48:37 -04002487 SamplerMapIndexToIDMap.clear();
David Neto22f144c2017-06-12 14:26:21 -04002488 DenseMap<unsigned, unsigned> SamplerLiteralToIDMap;
David Neto862b7d82018-06-14 18:48:37 -04002489 DenseMap<unsigned, unsigned> SamplerLiteralToDescriptorSetMap;
2490 DenseMap<unsigned, unsigned> SamplerLiteralToBindingMap;
David Neto22f144c2017-06-12 14:26:21 -04002491
David Neto862b7d82018-06-14 18:48:37 -04002492 // We might have samplers in the sampler map that are not used
2493 // in the translation unit. We need to allocate variables
2494 // for them and bindings too.
2495 DenseSet<unsigned> used_bindings;
David Neto22f144c2017-06-12 14:26:21 -04002496
Kévin Petitdf71de32019-04-09 14:09:50 +01002497 auto *var_fn = M.getFunction(clspv::LiteralSamplerFunction());
alan-bakerb6b09dc2018-11-08 16:59:28 -05002498 if (!var_fn)
2499 return;
David Neto862b7d82018-06-14 18:48:37 -04002500 for (auto user : var_fn->users()) {
2501 // Populate SamplerLiteralToDescriptorSetMap and
2502 // SamplerLiteralToBindingMap.
2503 //
2504 // Look for calls like
2505 // call %opencl.sampler_t addrspace(2)*
2506 // @clspv.sampler.var.literal(
2507 // i32 descriptor,
2508 // i32 binding,
2509 // i32 index-into-sampler-map)
alan-bakerb6b09dc2018-11-08 16:59:28 -05002510 if (auto *call = dyn_cast<CallInst>(user)) {
2511 const size_t index_into_sampler_map = static_cast<size_t>(
2512 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
David Neto862b7d82018-06-14 18:48:37 -04002513 if (index_into_sampler_map >= sampler_map.size()) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05002514 errs() << "Out of bounds index to sampler map: "
2515 << index_into_sampler_map;
David Neto862b7d82018-06-14 18:48:37 -04002516 llvm_unreachable("bad sampler init: out of bounds");
2517 }
2518
2519 auto sampler_value = sampler_map[index_into_sampler_map].first;
2520 const auto descriptor_set = static_cast<unsigned>(
2521 dyn_cast<ConstantInt>(call->getArgOperand(0))->getZExtValue());
2522 const auto binding = static_cast<unsigned>(
2523 dyn_cast<ConstantInt>(call->getArgOperand(1))->getZExtValue());
2524
2525 SamplerLiteralToDescriptorSetMap[sampler_value] = descriptor_set;
2526 SamplerLiteralToBindingMap[sampler_value] = binding;
2527 used_bindings.insert(binding);
2528 }
2529 }
2530
2531 unsigned index = 0;
2532 for (auto SamplerLiteral : sampler_map) {
David Neto22f144c2017-06-12 14:26:21 -04002533 // Generate OpVariable.
2534 //
2535 // GIDOps[0] : Result Type ID
2536 // GIDOps[1] : Storage Class
2537 SPIRVOperandList Ops;
2538
David Neto257c3892018-04-11 13:19:45 -04002539 Ops << MkId(lookupType(SamplerTy))
2540 << MkNum(spv::StorageClassUniformConstant);
David Neto22f144c2017-06-12 14:26:21 -04002541
David Neto862b7d82018-06-14 18:48:37 -04002542 auto sampler_var_id = nextID++;
2543 auto *Inst = new SPIRVInstruction(spv::OpVariable, sampler_var_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002544 SPIRVInstList.push_back(Inst);
2545
David Neto862b7d82018-06-14 18:48:37 -04002546 SamplerMapIndexToIDMap[index] = sampler_var_id;
2547 SamplerLiteralToIDMap[SamplerLiteral.first] = sampler_var_id;
David Neto22f144c2017-06-12 14:26:21 -04002548
2549 // Find Insert Point for OpDecorate.
2550 auto DecoInsertPoint =
2551 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
2552 [](SPIRVInstruction *Inst) -> bool {
2553 return Inst->getOpcode() != spv::OpDecorate &&
2554 Inst->getOpcode() != spv::OpMemberDecorate &&
2555 Inst->getOpcode() != spv::OpExtInstImport;
2556 });
2557
2558 // Ops[0] = Target ID
2559 // Ops[1] = Decoration (DescriptorSet)
2560 // Ops[2] = LiteralNumber according to Decoration
2561 Ops.clear();
2562
David Neto862b7d82018-06-14 18:48:37 -04002563 unsigned descriptor_set;
2564 unsigned binding;
alan-bakerb6b09dc2018-11-08 16:59:28 -05002565 if (SamplerLiteralToBindingMap.find(SamplerLiteral.first) ==
2566 SamplerLiteralToBindingMap.end()) {
David Neto862b7d82018-06-14 18:48:37 -04002567 // This sampler is not actually used. Find the next one.
2568 for (binding = 0; used_bindings.count(binding); binding++)
2569 ;
2570 descriptor_set = 0; // Literal samplers always use descriptor set 0.
2571 used_bindings.insert(binding);
2572 } else {
2573 descriptor_set = SamplerLiteralToDescriptorSetMap[SamplerLiteral.first];
2574 binding = SamplerLiteralToBindingMap[SamplerLiteral.first];
alan-bakercff80152019-06-15 00:38:00 -04002575
2576 version0::DescriptorMapEntry::SamplerData sampler_data = {
2577 SamplerLiteral.first};
2578 descriptorMapEntries->emplace_back(std::move(sampler_data),
2579 descriptor_set, binding);
David Neto862b7d82018-06-14 18:48:37 -04002580 }
2581
2582 Ops << MkId(sampler_var_id) << MkNum(spv::DecorationDescriptorSet)
2583 << MkNum(descriptor_set);
David Neto22f144c2017-06-12 14:26:21 -04002584
David Neto87846742018-04-11 17:36:22 -04002585 auto *DescDecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002586 SPIRVInstList.insert(DecoInsertPoint, DescDecoInst);
2587
2588 // Ops[0] = Target ID
2589 // Ops[1] = Decoration (Binding)
2590 // Ops[2] = LiteralNumber according to Decoration
2591 Ops.clear();
David Neto862b7d82018-06-14 18:48:37 -04002592 Ops << MkId(sampler_var_id) << MkNum(spv::DecorationBinding)
2593 << MkNum(binding);
David Neto22f144c2017-06-12 14:26:21 -04002594
David Neto87846742018-04-11 17:36:22 -04002595 auto *BindDecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002596 SPIRVInstList.insert(DecoInsertPoint, BindDecoInst);
David Neto862b7d82018-06-14 18:48:37 -04002597
2598 index++;
David Neto22f144c2017-06-12 14:26:21 -04002599 }
David Neto862b7d82018-06-14 18:48:37 -04002600}
David Neto22f144c2017-06-12 14:26:21 -04002601
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01002602void SPIRVProducerPass::GenerateResourceVars(Module &) {
David Neto862b7d82018-06-14 18:48:37 -04002603 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
2604 ValueMapType &VMap = getValueMap();
David Neto22f144c2017-06-12 14:26:21 -04002605
David Neto862b7d82018-06-14 18:48:37 -04002606 // Generate variables. Make one for each of resource var info object.
2607 for (auto *info : ModuleOrderedResourceVars) {
2608 Type *type = info->var_fn->getReturnType();
2609 // Remap the address space for opaque types.
2610 switch (info->arg_kind) {
2611 case clspv::ArgKind::Sampler:
2612 case clspv::ArgKind::ReadOnlyImage:
2613 case clspv::ArgKind::WriteOnlyImage:
2614 type = PointerType::get(type->getPointerElementType(),
2615 clspv::AddressSpace::UniformConstant);
2616 break;
2617 default:
2618 break;
2619 }
David Neto22f144c2017-06-12 14:26:21 -04002620
David Neto862b7d82018-06-14 18:48:37 -04002621 info->var_id = nextID++;
David Neto22f144c2017-06-12 14:26:21 -04002622
David Neto862b7d82018-06-14 18:48:37 -04002623 const auto type_id = lookupType(type);
2624 const auto sc = GetStorageClassForArgKind(info->arg_kind);
2625 SPIRVOperandList Ops;
2626 Ops << MkId(type_id) << MkNum(sc);
David Neto22f144c2017-06-12 14:26:21 -04002627
David Neto862b7d82018-06-14 18:48:37 -04002628 auto *Inst = new SPIRVInstruction(spv::OpVariable, info->var_id, Ops);
2629 SPIRVInstList.push_back(Inst);
2630
2631 // Map calls to the variable-builtin-function.
2632 for (auto &U : info->var_fn->uses()) {
2633 if (auto *call = dyn_cast<CallInst>(U.getUser())) {
2634 const auto set = unsigned(
2635 dyn_cast<ConstantInt>(call->getOperand(0))->getZExtValue());
2636 const auto binding = unsigned(
2637 dyn_cast<ConstantInt>(call->getOperand(1))->getZExtValue());
2638 if (set == info->descriptor_set && binding == info->binding) {
2639 switch (info->arg_kind) {
2640 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04002641 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04002642 case clspv::ArgKind::Pod:
2643 // The call maps to the variable directly.
2644 VMap[call] = info->var_id;
2645 break;
2646 case clspv::ArgKind::Sampler:
2647 case clspv::ArgKind::ReadOnlyImage:
2648 case clspv::ArgKind::WriteOnlyImage:
2649 // The call maps to a load we generate later.
2650 ResourceVarDeferredLoadCalls[call] = info->var_id;
2651 break;
2652 default:
2653 llvm_unreachable("Unhandled arg kind");
2654 }
2655 }
David Neto22f144c2017-06-12 14:26:21 -04002656 }
David Neto862b7d82018-06-14 18:48:37 -04002657 }
2658 }
David Neto22f144c2017-06-12 14:26:21 -04002659
David Neto862b7d82018-06-14 18:48:37 -04002660 // Generate associated decorations.
David Neto22f144c2017-06-12 14:26:21 -04002661
David Neto862b7d82018-06-14 18:48:37 -04002662 // Find Insert Point for OpDecorate.
2663 auto DecoInsertPoint =
2664 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
2665 [](SPIRVInstruction *Inst) -> bool {
2666 return Inst->getOpcode() != spv::OpDecorate &&
2667 Inst->getOpcode() != spv::OpMemberDecorate &&
2668 Inst->getOpcode() != spv::OpExtInstImport;
2669 });
2670
2671 SPIRVOperandList Ops;
2672 for (auto *info : ModuleOrderedResourceVars) {
2673 // Decorate with DescriptorSet and Binding.
2674 Ops.clear();
2675 Ops << MkId(info->var_id) << MkNum(spv::DecorationDescriptorSet)
2676 << MkNum(info->descriptor_set);
2677 SPIRVInstList.insert(DecoInsertPoint,
2678 new SPIRVInstruction(spv::OpDecorate, Ops));
2679
2680 Ops.clear();
2681 Ops << MkId(info->var_id) << MkNum(spv::DecorationBinding)
2682 << MkNum(info->binding);
2683 SPIRVInstList.insert(DecoInsertPoint,
2684 new SPIRVInstruction(spv::OpDecorate, Ops));
2685
alan-bakere9308012019-03-15 10:25:13 -04002686 if (info->coherent) {
2687 // Decorate with Coherent if required for the variable.
2688 Ops.clear();
2689 Ops << MkId(info->var_id) << MkNum(spv::DecorationCoherent);
2690 SPIRVInstList.insert(DecoInsertPoint,
2691 new SPIRVInstruction(spv::OpDecorate, Ops));
2692 }
2693
David Neto862b7d82018-06-14 18:48:37 -04002694 // Generate NonWritable and NonReadable
2695 switch (info->arg_kind) {
2696 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04002697 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04002698 if (info->var_fn->getReturnType()->getPointerAddressSpace() ==
2699 clspv::AddressSpace::Constant) {
2700 Ops.clear();
2701 Ops << MkId(info->var_id) << MkNum(spv::DecorationNonWritable);
2702 SPIRVInstList.insert(DecoInsertPoint,
2703 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002704 }
David Neto862b7d82018-06-14 18:48:37 -04002705 break;
David Neto862b7d82018-06-14 18:48:37 -04002706 case clspv::ArgKind::WriteOnlyImage:
2707 Ops.clear();
2708 Ops << MkId(info->var_id) << MkNum(spv::DecorationNonReadable);
2709 SPIRVInstList.insert(DecoInsertPoint,
2710 new SPIRVInstruction(spv::OpDecorate, Ops));
2711 break;
2712 default:
2713 break;
David Neto22f144c2017-06-12 14:26:21 -04002714 }
2715 }
2716}
2717
2718void SPIRVProducerPass::GenerateGlobalVar(GlobalVariable &GV) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05002719 Module &M = *GV.getParent();
David Neto22f144c2017-06-12 14:26:21 -04002720 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
2721 ValueMapType &VMap = getValueMap();
2722 std::vector<uint32_t> &BuiltinDimVec = getBuiltinDimVec();
David Neto85082642018-03-24 06:55:20 -07002723 const DataLayout &DL = GV.getParent()->getDataLayout();
David Neto22f144c2017-06-12 14:26:21 -04002724
2725 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
2726 Type *Ty = GV.getType();
2727 PointerType *PTy = cast<PointerType>(Ty);
2728
2729 uint32_t InitializerID = 0;
2730
2731 // Workgroup size is handled differently (it goes into a constant)
2732 if (spv::BuiltInWorkgroupSize == BuiltinType) {
2733 std::vector<bool> HasMDVec;
2734 uint32_t PrevXDimCst = 0xFFFFFFFF;
2735 uint32_t PrevYDimCst = 0xFFFFFFFF;
2736 uint32_t PrevZDimCst = 0xFFFFFFFF;
2737 for (Function &Func : *GV.getParent()) {
2738 if (Func.isDeclaration()) {
2739 continue;
2740 }
2741
2742 // We only need to check kernels.
2743 if (Func.getCallingConv() != CallingConv::SPIR_KERNEL) {
2744 continue;
2745 }
2746
2747 if (const MDNode *MD =
2748 dyn_cast<Function>(&Func)->getMetadata("reqd_work_group_size")) {
2749 uint32_t CurXDimCst = static_cast<uint32_t>(
2750 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
2751 uint32_t CurYDimCst = static_cast<uint32_t>(
2752 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
2753 uint32_t CurZDimCst = static_cast<uint32_t>(
2754 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
2755
2756 if (PrevXDimCst == 0xFFFFFFFF && PrevYDimCst == 0xFFFFFFFF &&
2757 PrevZDimCst == 0xFFFFFFFF) {
2758 PrevXDimCst = CurXDimCst;
2759 PrevYDimCst = CurYDimCst;
2760 PrevZDimCst = CurZDimCst;
2761 } else if (CurXDimCst != PrevXDimCst || CurYDimCst != PrevYDimCst ||
2762 CurZDimCst != PrevZDimCst) {
2763 llvm_unreachable(
2764 "reqd_work_group_size must be the same across all kernels");
2765 } else {
2766 continue;
2767 }
2768
2769 //
2770 // Generate OpConstantComposite.
2771 //
2772 // Ops[0] : Result Type ID
2773 // Ops[1] : Constant size for x dimension.
2774 // Ops[2] : Constant size for y dimension.
2775 // Ops[3] : Constant size for z dimension.
2776 SPIRVOperandList Ops;
2777
2778 uint32_t XDimCstID =
2779 VMap[mdconst::extract<ConstantInt>(MD->getOperand(0))];
2780 uint32_t YDimCstID =
2781 VMap[mdconst::extract<ConstantInt>(MD->getOperand(1))];
2782 uint32_t ZDimCstID =
2783 VMap[mdconst::extract<ConstantInt>(MD->getOperand(2))];
2784
2785 InitializerID = nextID;
2786
David Neto257c3892018-04-11 13:19:45 -04002787 Ops << MkId(lookupType(Ty->getPointerElementType())) << MkId(XDimCstID)
2788 << MkId(YDimCstID) << MkId(ZDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04002789
David Neto87846742018-04-11 17:36:22 -04002790 auto *Inst =
2791 new SPIRVInstruction(spv::OpConstantComposite, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002792 SPIRVInstList.push_back(Inst);
2793
2794 HasMDVec.push_back(true);
2795 } else {
2796 HasMDVec.push_back(false);
2797 }
2798 }
2799
2800 // Check all kernels have same definitions for work_group_size.
2801 bool HasMD = false;
2802 if (!HasMDVec.empty()) {
2803 HasMD = HasMDVec[0];
2804 for (uint32_t i = 1; i < HasMDVec.size(); i++) {
2805 if (HasMD != HasMDVec[i]) {
2806 llvm_unreachable(
2807 "Kernels should have consistent work group size definition");
2808 }
2809 }
2810 }
2811
2812 // If all kernels do not have metadata for reqd_work_group_size, generate
2813 // OpSpecConstants for x/y/z dimension.
2814 if (!HasMD) {
2815 //
2816 // Generate OpSpecConstants for x/y/z dimension.
2817 //
2818 // Ops[0] : Result Type ID
2819 // Ops[1] : Constant size for x/y/z dimension (Literal Number).
2820 uint32_t XDimCstID = 0;
2821 uint32_t YDimCstID = 0;
2822 uint32_t ZDimCstID = 0;
2823
David Neto22f144c2017-06-12 14:26:21 -04002824 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04002825 uint32_t result_type_id =
2826 lookupType(Ty->getPointerElementType()->getSequentialElementType());
David Neto22f144c2017-06-12 14:26:21 -04002827
David Neto257c3892018-04-11 13:19:45 -04002828 // X Dimension
2829 Ops << MkId(result_type_id) << MkNum(1);
2830 XDimCstID = nextID++;
2831 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002832 new SPIRVInstruction(spv::OpSpecConstant, XDimCstID, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002833
2834 // Y Dimension
2835 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04002836 Ops << MkId(result_type_id) << MkNum(1);
2837 YDimCstID = nextID++;
2838 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002839 new SPIRVInstruction(spv::OpSpecConstant, YDimCstID, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002840
2841 // Z Dimension
2842 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04002843 Ops << MkId(result_type_id) << MkNum(1);
2844 ZDimCstID = nextID++;
2845 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002846 new SPIRVInstruction(spv::OpSpecConstant, ZDimCstID, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002847
David Neto257c3892018-04-11 13:19:45 -04002848 BuiltinDimVec.push_back(XDimCstID);
2849 BuiltinDimVec.push_back(YDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04002850 BuiltinDimVec.push_back(ZDimCstID);
2851
David Neto22f144c2017-06-12 14:26:21 -04002852 //
2853 // Generate OpSpecConstantComposite.
2854 //
2855 // Ops[0] : Result Type ID
2856 // Ops[1] : Constant size for x dimension.
2857 // Ops[2] : Constant size for y dimension.
2858 // Ops[3] : Constant size for z dimension.
2859 InitializerID = nextID;
2860
2861 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04002862 Ops << MkId(lookupType(Ty->getPointerElementType())) << MkId(XDimCstID)
2863 << MkId(YDimCstID) << MkId(ZDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04002864
David Neto87846742018-04-11 17:36:22 -04002865 auto *Inst =
2866 new SPIRVInstruction(spv::OpSpecConstantComposite, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002867 SPIRVInstList.push_back(Inst);
2868 }
2869 }
2870
David Neto22f144c2017-06-12 14:26:21 -04002871 VMap[&GV] = nextID;
2872
2873 //
2874 // Generate OpVariable.
2875 //
2876 // GIDOps[0] : Result Type ID
2877 // GIDOps[1] : Storage Class
2878 SPIRVOperandList Ops;
2879
David Neto85082642018-03-24 06:55:20 -07002880 const auto AS = PTy->getAddressSpace();
David Netoc6f3ab22018-04-06 18:02:31 -04002881 Ops << MkId(lookupType(Ty)) << MkNum(GetStorageClass(AS));
David Neto22f144c2017-06-12 14:26:21 -04002882
David Neto85082642018-03-24 06:55:20 -07002883 const bool module_scope_constant_external_init =
David Neto862b7d82018-06-14 18:48:37 -04002884 (AS == AddressSpace::Constant) && GV.hasInitializer() &&
David Neto85082642018-03-24 06:55:20 -07002885 clspv::Option::ModuleConstantsInStorageBuffer();
2886
Kévin Petit23d5f182019-08-13 16:21:29 +01002887 if (GV.hasInitializer()) {
2888 auto GVInit = GV.getInitializer();
2889 if (!isa<UndefValue>(GVInit) && !module_scope_constant_external_init) {
2890 assert(VMap.count(GVInit) == 1);
2891 InitializerID = VMap[GVInit];
David Neto85082642018-03-24 06:55:20 -07002892 }
2893 }
Kévin Petit23d5f182019-08-13 16:21:29 +01002894
2895 if (0 != InitializerID) {
2896 // Emit the ID of the intiializer as part of the variable definition.
2897 Ops << MkId(InitializerID);
2898 }
David Neto85082642018-03-24 06:55:20 -07002899 const uint32_t var_id = nextID++;
2900
David Neto87846742018-04-11 17:36:22 -04002901 auto *Inst = new SPIRVInstruction(spv::OpVariable, var_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002902 SPIRVInstList.push_back(Inst);
2903
2904 // If we have a builtin.
2905 if (spv::BuiltInMax != BuiltinType) {
2906 // Find Insert Point for OpDecorate.
2907 auto DecoInsertPoint =
2908 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
2909 [](SPIRVInstruction *Inst) -> bool {
2910 return Inst->getOpcode() != spv::OpDecorate &&
2911 Inst->getOpcode() != spv::OpMemberDecorate &&
2912 Inst->getOpcode() != spv::OpExtInstImport;
2913 });
2914 //
2915 // Generate OpDecorate.
2916 //
2917 // DOps[0] = Target ID
2918 // DOps[1] = Decoration (Builtin)
2919 // DOps[2] = BuiltIn ID
2920 uint32_t ResultID;
2921
2922 // WorkgroupSize is different, we decorate the constant composite that has
2923 // its value, rather than the variable that we use to access the value.
2924 if (spv::BuiltInWorkgroupSize == BuiltinType) {
2925 ResultID = InitializerID;
David Netoa60b00b2017-09-15 16:34:09 -04002926 // Save both the value and variable IDs for later.
2927 WorkgroupSizeValueID = InitializerID;
2928 WorkgroupSizeVarID = VMap[&GV];
David Neto22f144c2017-06-12 14:26:21 -04002929 } else {
2930 ResultID = VMap[&GV];
2931 }
2932
2933 SPIRVOperandList DOps;
David Neto257c3892018-04-11 13:19:45 -04002934 DOps << MkId(ResultID) << MkNum(spv::DecorationBuiltIn)
2935 << MkNum(BuiltinType);
David Neto22f144c2017-06-12 14:26:21 -04002936
David Neto87846742018-04-11 17:36:22 -04002937 auto *DescDecoInst = new SPIRVInstruction(spv::OpDecorate, DOps);
David Neto22f144c2017-06-12 14:26:21 -04002938 SPIRVInstList.insert(DecoInsertPoint, DescDecoInst);
David Neto85082642018-03-24 06:55:20 -07002939 } else if (module_scope_constant_external_init) {
2940 // This module scope constant is initialized from a storage buffer with data
2941 // provided by the host at binding 0 of the next descriptor set.
David Neto78383442018-06-15 20:31:56 -04002942 const uint32_t descriptor_set = TakeDescriptorIndex(&M);
David Neto85082642018-03-24 06:55:20 -07002943
David Neto862b7d82018-06-14 18:48:37 -04002944 // Emit the intializer to the descriptor map file.
David Neto85082642018-03-24 06:55:20 -07002945 // Use "kind,buffer" to indicate storage buffer. We might want to expand
2946 // that later to other types, like uniform buffer.
alan-bakerf5e5f692018-11-27 08:33:24 -05002947 std::string hexbytes;
2948 llvm::raw_string_ostream str(hexbytes);
2949 clspv::ConstantEmitter(DL, str).Emit(GV.getInitializer());
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04002950 version0::DescriptorMapEntry::ConstantData constant_data = {ArgKind::Buffer,
2951 str.str()};
2952 descriptorMapEntries->emplace_back(std::move(constant_data), descriptor_set,
2953 0);
David Neto85082642018-03-24 06:55:20 -07002954
2955 // Find Insert Point for OpDecorate.
2956 auto DecoInsertPoint =
2957 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
2958 [](SPIRVInstruction *Inst) -> bool {
2959 return Inst->getOpcode() != spv::OpDecorate &&
2960 Inst->getOpcode() != spv::OpMemberDecorate &&
2961 Inst->getOpcode() != spv::OpExtInstImport;
2962 });
2963
David Neto257c3892018-04-11 13:19:45 -04002964 // OpDecorate %var Binding <binding>
David Neto85082642018-03-24 06:55:20 -07002965 SPIRVOperandList DOps;
David Neto257c3892018-04-11 13:19:45 -04002966 DOps << MkId(var_id) << MkNum(spv::DecorationBinding) << MkNum(0);
2967 DecoInsertPoint = SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04002968 DecoInsertPoint, new SPIRVInstruction(spv::OpDecorate, DOps));
David Neto85082642018-03-24 06:55:20 -07002969
2970 // OpDecorate %var DescriptorSet <descriptor_set>
2971 DOps.clear();
David Neto257c3892018-04-11 13:19:45 -04002972 DOps << MkId(var_id) << MkNum(spv::DecorationDescriptorSet)
2973 << MkNum(descriptor_set);
David Netoc6f3ab22018-04-06 18:02:31 -04002974 SPIRVInstList.insert(DecoInsertPoint,
David Neto87846742018-04-11 17:36:22 -04002975 new SPIRVInstruction(spv::OpDecorate, DOps));
David Neto22f144c2017-06-12 14:26:21 -04002976 }
2977}
2978
David Netoc6f3ab22018-04-06 18:02:31 -04002979void SPIRVProducerPass::GenerateWorkgroupVars() {
2980 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
Alan Baker202c8c72018-08-13 13:47:44 -04002981 for (auto spec_id = clspv::FirstLocalSpecId(); spec_id < max_local_spec_id_;
2982 ++spec_id) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05002983 LocalArgInfo &info = LocalSpecIdInfoMap[spec_id];
David Netoc6f3ab22018-04-06 18:02:31 -04002984
2985 // Generate OpVariable.
2986 //
2987 // GIDOps[0] : Result Type ID
2988 // GIDOps[1] : Storage Class
2989 SPIRVOperandList Ops;
2990 Ops << MkId(info.ptr_array_type_id) << MkNum(spv::StorageClassWorkgroup);
2991
2992 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002993 new SPIRVInstruction(spv::OpVariable, info.variable_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002994 }
2995}
2996
David Neto862b7d82018-06-14 18:48:37 -04002997void SPIRVProducerPass::GenerateDescriptorMapInfo(const DataLayout &DL,
2998 Function &F) {
David Netoc5fb5242018-07-30 13:28:31 -04002999 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
3000 return;
3001 }
David Neto862b7d82018-06-14 18:48:37 -04003002 // Gather the list of resources that are used by this function's arguments.
3003 auto &resource_var_at_index = FunctionToResourceVarsMap[&F];
3004
alan-bakerf5e5f692018-11-27 08:33:24 -05003005 // TODO(alan-baker): This should become unnecessary by fixing the rest of the
3006 // flow to generate pod_ubo arguments earlier.
David Neto862b7d82018-06-14 18:48:37 -04003007 auto remap_arg_kind = [](StringRef argKind) {
alan-bakerf5e5f692018-11-27 08:33:24 -05003008 std::string kind =
3009 clspv::Option::PodArgsInUniformBuffer() && argKind.equals("pod")
3010 ? "pod_ubo"
3011 : argKind;
3012 return GetArgKindFromName(kind);
David Neto862b7d82018-06-14 18:48:37 -04003013 };
3014
3015 auto *fty = F.getType()->getPointerElementType();
3016 auto *func_ty = dyn_cast<FunctionType>(fty);
3017
alan-baker038e9242019-04-19 22:14:41 -04003018 // If we've clustered POD arguments, then argument details are in metadata.
David Neto862b7d82018-06-14 18:48:37 -04003019 // If an argument maps to a resource variable, then get descriptor set and
3020 // binding from the resoure variable. Other info comes from the metadata.
3021 const auto *arg_map = F.getMetadata("kernel_arg_map");
3022 if (arg_map) {
3023 for (const auto &arg : arg_map->operands()) {
3024 const MDNode *arg_node = dyn_cast<MDNode>(arg.get());
Kévin PETITa353c832018-03-20 23:21:21 +00003025 assert(arg_node->getNumOperands() == 7);
David Neto862b7d82018-06-14 18:48:37 -04003026 const auto name =
3027 dyn_cast<MDString>(arg_node->getOperand(0))->getString();
3028 const auto old_index =
3029 dyn_extract<ConstantInt>(arg_node->getOperand(1))->getZExtValue();
3030 // Remapped argument index
alan-bakerb6b09dc2018-11-08 16:59:28 -05003031 const size_t new_index = static_cast<size_t>(
3032 dyn_extract<ConstantInt>(arg_node->getOperand(2))->getZExtValue());
David Neto862b7d82018-06-14 18:48:37 -04003033 const auto offset =
3034 dyn_extract<ConstantInt>(arg_node->getOperand(3))->getZExtValue();
Kévin PETITa353c832018-03-20 23:21:21 +00003035 const auto arg_size =
3036 dyn_extract<ConstantInt>(arg_node->getOperand(4))->getZExtValue();
David Neto862b7d82018-06-14 18:48:37 -04003037 const auto argKind = remap_arg_kind(
Kévin PETITa353c832018-03-20 23:21:21 +00003038 dyn_cast<MDString>(arg_node->getOperand(5))->getString());
David Neto862b7d82018-06-14 18:48:37 -04003039 const auto spec_id =
Kévin PETITa353c832018-03-20 23:21:21 +00003040 dyn_extract<ConstantInt>(arg_node->getOperand(6))->getSExtValue();
alan-bakerf5e5f692018-11-27 08:33:24 -05003041
3042 uint32_t descriptor_set = 0;
3043 uint32_t binding = 0;
3044 version0::DescriptorMapEntry::KernelArgData kernel_data = {
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003045 F.getName(), name, static_cast<uint32_t>(old_index), argKind,
alan-bakerf5e5f692018-11-27 08:33:24 -05003046 static_cast<uint32_t>(spec_id),
3047 // This will be set below for pointer-to-local args.
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003048 0, static_cast<uint32_t>(offset), static_cast<uint32_t>(arg_size)};
David Neto862b7d82018-06-14 18:48:37 -04003049 if (spec_id > 0) {
alan-bakerf5e5f692018-11-27 08:33:24 -05003050 kernel_data.local_element_size = static_cast<uint32_t>(GetTypeAllocSize(
3051 func_ty->getParamType(unsigned(new_index))->getPointerElementType(),
3052 DL));
David Neto862b7d82018-06-14 18:48:37 -04003053 } else {
3054 auto *info = resource_var_at_index[new_index];
3055 assert(info);
alan-bakerf5e5f692018-11-27 08:33:24 -05003056 descriptor_set = info->descriptor_set;
3057 binding = info->binding;
David Neto862b7d82018-06-14 18:48:37 -04003058 }
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003059 descriptorMapEntries->emplace_back(std::move(kernel_data), descriptor_set,
3060 binding);
David Neto862b7d82018-06-14 18:48:37 -04003061 }
3062 } else {
3063 // There is no argument map.
3064 // Take descriptor info from the resource variable calls.
Kévin PETITa353c832018-03-20 23:21:21 +00003065 // Take argument name and size from the arguments list.
David Neto862b7d82018-06-14 18:48:37 -04003066
3067 SmallVector<Argument *, 4> arguments;
3068 for (auto &arg : F.args()) {
3069 arguments.push_back(&arg);
3070 }
3071
3072 unsigned arg_index = 0;
3073 for (auto *info : resource_var_at_index) {
3074 if (info) {
Kévin PETITa353c832018-03-20 23:21:21 +00003075 auto arg = arguments[arg_index];
alan-bakerb6b09dc2018-11-08 16:59:28 -05003076 unsigned arg_size = 0;
Kévin PETITa353c832018-03-20 23:21:21 +00003077 if (info->arg_kind == clspv::ArgKind::Pod) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05003078 arg_size = static_cast<uint32_t>(DL.getTypeStoreSize(arg->getType()));
Kévin PETITa353c832018-03-20 23:21:21 +00003079 }
3080
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003081 // Local pointer arguments are unused in this case. Offset is always
3082 // zero.
alan-bakerf5e5f692018-11-27 08:33:24 -05003083 version0::DescriptorMapEntry::KernelArgData kernel_data = {
3084 F.getName(), arg->getName(),
3085 arg_index, remap_arg_kind(clspv::GetArgKindName(info->arg_kind)),
3086 0, 0,
3087 0, arg_size};
3088 descriptorMapEntries->emplace_back(std::move(kernel_data),
3089 info->descriptor_set, info->binding);
David Neto862b7d82018-06-14 18:48:37 -04003090 }
3091 arg_index++;
3092 }
3093 // Generate mappings for pointer-to-local arguments.
3094 for (arg_index = 0; arg_index < arguments.size(); ++arg_index) {
3095 Argument *arg = arguments[arg_index];
Alan Baker202c8c72018-08-13 13:47:44 -04003096 auto where = LocalArgSpecIds.find(arg);
3097 if (where != LocalArgSpecIds.end()) {
3098 auto &local_arg_info = LocalSpecIdInfoMap[where->second];
alan-bakerf5e5f692018-11-27 08:33:24 -05003099 // Pod arguments members are unused in this case.
3100 version0::DescriptorMapEntry::KernelArgData kernel_data = {
3101 F.getName(),
3102 arg->getName(),
3103 arg_index,
3104 ArgKind::Local,
3105 static_cast<uint32_t>(local_arg_info.spec_id),
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003106 static_cast<uint32_t>(
3107 GetTypeAllocSize(local_arg_info.elem_type, DL)),
alan-bakerf5e5f692018-11-27 08:33:24 -05003108 0,
3109 0};
3110 // Pointer-to-local arguments do not utilize descriptor set and binding.
3111 descriptorMapEntries->emplace_back(std::move(kernel_data), 0, 0);
David Neto862b7d82018-06-14 18:48:37 -04003112 }
3113 }
3114 }
3115}
3116
David Neto22f144c2017-06-12 14:26:21 -04003117void SPIRVProducerPass::GenerateFuncPrologue(Function &F) {
3118 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3119 ValueMapType &VMap = getValueMap();
3120 EntryPointVecType &EntryPoints = getEntryPointVec();
David Neto22f144c2017-06-12 14:26:21 -04003121 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
3122 auto &GlobalConstArgSet = getGlobalConstArgSet();
3123
3124 FunctionType *FTy = F.getFunctionType();
3125
3126 //
David Neto22f144c2017-06-12 14:26:21 -04003127 // Generate OPFunction.
3128 //
3129
3130 // FOps[0] : Result Type ID
3131 // FOps[1] : Function Control
3132 // FOps[2] : Function Type ID
3133 SPIRVOperandList FOps;
3134
3135 // Find SPIRV instruction for return type.
David Neto257c3892018-04-11 13:19:45 -04003136 FOps << MkId(lookupType(FTy->getReturnType()));
David Neto22f144c2017-06-12 14:26:21 -04003137
3138 // Check function attributes for SPIRV Function Control.
3139 uint32_t FuncControl = spv::FunctionControlMaskNone;
3140 if (F.hasFnAttribute(Attribute::AlwaysInline)) {
3141 FuncControl |= spv::FunctionControlInlineMask;
3142 }
3143 if (F.hasFnAttribute(Attribute::NoInline)) {
3144 FuncControl |= spv::FunctionControlDontInlineMask;
3145 }
3146 // TODO: Check llvm attribute for Function Control Pure.
3147 if (F.hasFnAttribute(Attribute::ReadOnly)) {
3148 FuncControl |= spv::FunctionControlPureMask;
3149 }
3150 // TODO: Check llvm attribute for Function Control Const.
3151 if (F.hasFnAttribute(Attribute::ReadNone)) {
3152 FuncControl |= spv::FunctionControlConstMask;
3153 }
3154
David Neto257c3892018-04-11 13:19:45 -04003155 FOps << MkNum(FuncControl);
David Neto22f144c2017-06-12 14:26:21 -04003156
3157 uint32_t FTyID;
3158 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
3159 SmallVector<Type *, 4> NewFuncParamTys;
3160 FunctionType *NewFTy =
3161 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
3162 FTyID = lookupType(NewFTy);
3163 } else {
David Neto9ed8e2f2018-03-24 06:47:24 -07003164 // Handle regular function with global constant parameters.
David Neto22f144c2017-06-12 14:26:21 -04003165 if (GlobalConstFuncTyMap.count(FTy)) {
3166 FTyID = lookupType(GlobalConstFuncTyMap[FTy].first);
3167 } else {
3168 FTyID = lookupType(FTy);
3169 }
3170 }
3171
David Neto257c3892018-04-11 13:19:45 -04003172 FOps << MkId(FTyID);
David Neto22f144c2017-06-12 14:26:21 -04003173
3174 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
3175 EntryPoints.push_back(std::make_pair(&F, nextID));
3176 }
3177
3178 VMap[&F] = nextID;
3179
David Neto482550a2018-03-24 05:21:07 -07003180 if (clspv::Option::ShowIDs()) {
David Netob05675d2018-02-16 12:37:49 -05003181 errs() << "Function " << F.getName() << " is " << nextID << "\n";
3182 }
David Neto22f144c2017-06-12 14:26:21 -04003183 // Generate SPIRV instruction for function.
David Neto87846742018-04-11 17:36:22 -04003184 auto *FuncInst = new SPIRVInstruction(spv::OpFunction, nextID++, FOps);
David Neto22f144c2017-06-12 14:26:21 -04003185 SPIRVInstList.push_back(FuncInst);
3186
3187 //
3188 // Generate OpFunctionParameter for Normal function.
3189 //
3190
3191 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
alan-bakere9308012019-03-15 10:25:13 -04003192
3193 // Find Insert Point for OpDecorate.
3194 auto DecoInsertPoint =
3195 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
3196 [](SPIRVInstruction *Inst) -> bool {
3197 return Inst->getOpcode() != spv::OpDecorate &&
3198 Inst->getOpcode() != spv::OpMemberDecorate &&
3199 Inst->getOpcode() != spv::OpExtInstImport;
3200 });
3201
David Neto22f144c2017-06-12 14:26:21 -04003202 // Iterate Argument for name instead of param type from function type.
3203 unsigned ArgIdx = 0;
3204 for (Argument &Arg : F.args()) {
alan-bakere9308012019-03-15 10:25:13 -04003205 uint32_t param_id = nextID++;
3206 VMap[&Arg] = param_id;
3207
3208 if (CalledWithCoherentResource(Arg)) {
3209 // If the arg is passed a coherent resource ever, then decorate this
3210 // parameter with Coherent too.
3211 SPIRVOperandList decoration_ops;
3212 decoration_ops << MkId(param_id) << MkNum(spv::DecorationCoherent);
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003213 SPIRVInstList.insert(
3214 DecoInsertPoint,
3215 new SPIRVInstruction(spv::OpDecorate, decoration_ops));
alan-bakere9308012019-03-15 10:25:13 -04003216 }
David Neto22f144c2017-06-12 14:26:21 -04003217
3218 // ParamOps[0] : Result Type ID
3219 SPIRVOperandList ParamOps;
3220
3221 // Find SPIRV instruction for parameter type.
3222 uint32_t ParamTyID = lookupType(Arg.getType());
3223 if (PointerType *PTy = dyn_cast<PointerType>(Arg.getType())) {
3224 if (GlobalConstFuncTyMap.count(FTy)) {
3225 if (ArgIdx == GlobalConstFuncTyMap[FTy].second) {
3226 Type *EleTy = PTy->getPointerElementType();
3227 Type *ArgTy =
3228 PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
3229 ParamTyID = lookupType(ArgTy);
3230 GlobalConstArgSet.insert(&Arg);
3231 }
3232 }
3233 }
David Neto257c3892018-04-11 13:19:45 -04003234 ParamOps << MkId(ParamTyID);
David Neto22f144c2017-06-12 14:26:21 -04003235
3236 // Generate SPIRV instruction for parameter.
David Neto87846742018-04-11 17:36:22 -04003237 auto *ParamInst =
alan-bakere9308012019-03-15 10:25:13 -04003238 new SPIRVInstruction(spv::OpFunctionParameter, param_id, ParamOps);
David Neto22f144c2017-06-12 14:26:21 -04003239 SPIRVInstList.push_back(ParamInst);
3240
3241 ArgIdx++;
3242 }
3243 }
3244}
3245
alan-bakerb6b09dc2018-11-08 16:59:28 -05003246void SPIRVProducerPass::GenerateModuleInfo(Module &module) {
David Neto22f144c2017-06-12 14:26:21 -04003247 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3248 EntryPointVecType &EntryPoints = getEntryPointVec();
3249 ValueMapType &VMap = getValueMap();
3250 ValueList &EntryPointInterfaces = getEntryPointInterfacesVec();
3251 uint32_t &ExtInstImportID = getOpExtInstImportID();
3252 std::vector<uint32_t> &BuiltinDimVec = getBuiltinDimVec();
3253
3254 // Set up insert point.
3255 auto InsertPoint = SPIRVInstList.begin();
3256
3257 //
3258 // Generate OpCapability
3259 //
3260 // TODO: Which llvm information is mapped to SPIRV Capapbility?
3261
3262 // Ops[0] = Capability
3263 SPIRVOperandList Ops;
3264
David Neto87846742018-04-11 17:36:22 -04003265 auto *CapInst =
3266 new SPIRVInstruction(spv::OpCapability, {MkNum(spv::CapabilityShader)});
David Neto22f144c2017-06-12 14:26:21 -04003267 SPIRVInstList.insert(InsertPoint, CapInst);
3268
alan-bakerf906d2b2019-12-10 11:26:23 -05003269 bool write_without_format = false;
3270 bool sampled_1d = false;
3271 bool image_1d = false;
David Neto22f144c2017-06-12 14:26:21 -04003272 for (Type *Ty : getTypeList()) {
alan-bakerb39c8262019-03-08 14:03:37 -05003273 if (clspv::Option::Int8Support() && Ty->isIntegerTy(8)) {
3274 // Generate OpCapability for i8 type.
3275 SPIRVInstList.insert(InsertPoint,
3276 new SPIRVInstruction(spv::OpCapability,
3277 {MkNum(spv::CapabilityInt8)}));
3278 } else if (Ty->isIntegerTy(16)) {
David Neto22f144c2017-06-12 14:26:21 -04003279 // Generate OpCapability for i16 type.
David Neto87846742018-04-11 17:36:22 -04003280 SPIRVInstList.insert(InsertPoint,
3281 new SPIRVInstruction(spv::OpCapability,
3282 {MkNum(spv::CapabilityInt16)}));
David Neto22f144c2017-06-12 14:26:21 -04003283 } else if (Ty->isIntegerTy(64)) {
3284 // Generate OpCapability for i64 type.
David Neto87846742018-04-11 17:36:22 -04003285 SPIRVInstList.insert(InsertPoint,
3286 new SPIRVInstruction(spv::OpCapability,
3287 {MkNum(spv::CapabilityInt64)}));
David Neto22f144c2017-06-12 14:26:21 -04003288 } else if (Ty->isHalfTy()) {
3289 // Generate OpCapability for half type.
3290 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04003291 InsertPoint, new SPIRVInstruction(spv::OpCapability,
3292 {MkNum(spv::CapabilityFloat16)}));
David Neto22f144c2017-06-12 14:26:21 -04003293 } else if (Ty->isDoubleTy()) {
3294 // Generate OpCapability for double type.
3295 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04003296 InsertPoint, new SPIRVInstruction(spv::OpCapability,
3297 {MkNum(spv::CapabilityFloat64)}));
David Neto22f144c2017-06-12 14:26:21 -04003298 } else if (auto *STy = dyn_cast<StructType>(Ty)) {
3299 if (STy->isOpaque()) {
alan-bakerf906d2b2019-12-10 11:26:23 -05003300 if (STy->getName().startswith("opencl.image1d_wo_t") ||
3301 STy->getName().startswith("opencl.image2d_wo_t") ||
alan-bakerf67468c2019-11-25 15:51:49 -05003302 STy->getName().startswith("opencl.image3d_wo_t")) {
alan-bakerf906d2b2019-12-10 11:26:23 -05003303 write_without_format = true;
3304 }
3305 if (STy->getName().startswith("opencl.image1d_ro_t") ||
3306 STy->getName().startswith("opencl.image1d_wo_t")) {
3307 if (STy->getName().contains(".sampled"))
3308 sampled_1d = true;
3309 else
3310 image_1d = true;
David Neto22f144c2017-06-12 14:26:21 -04003311 }
3312 }
3313 }
3314 }
3315
alan-bakerf906d2b2019-12-10 11:26:23 -05003316 if (write_without_format) {
3317 // Generate OpCapability for write only image type.
3318 SPIRVInstList.insert(
3319 InsertPoint,
3320 new SPIRVInstruction(
3321 spv::OpCapability,
3322 {MkNum(spv::CapabilityStorageImageWriteWithoutFormat)}));
3323 }
3324 if (image_1d) {
3325 // Generate OpCapability for unsampled 1D image type.
3326 SPIRVInstList.insert(InsertPoint,
3327 new SPIRVInstruction(spv::OpCapability,
3328 {MkNum(spv::CapabilityImage1D)}));
3329 } else if (sampled_1d) {
3330 // Generate OpCapability for sampled 1D image type.
3331 SPIRVInstList.insert(
3332 InsertPoint, new SPIRVInstruction(spv::OpCapability,
3333 {MkNum(spv::CapabilitySampled1D)}));
3334 }
3335
David Neto5c22a252018-03-15 16:07:41 -04003336 { // OpCapability ImageQuery
3337 bool hasImageQuery = false;
alan-bakerf67468c2019-11-25 15:51:49 -05003338 for (const auto &SymVal : module.getValueSymbolTable()) {
3339 if (auto F = dyn_cast<Function>(SymVal.getValue())) {
alan-bakerce179f12019-12-06 19:02:22 -05003340 if (clspv::IsImageQuery(F)) {
alan-bakerf67468c2019-11-25 15:51:49 -05003341 hasImageQuery = true;
3342 break;
3343 }
David Neto5c22a252018-03-15 16:07:41 -04003344 }
3345 }
alan-bakerf67468c2019-11-25 15:51:49 -05003346
David Neto5c22a252018-03-15 16:07:41 -04003347 if (hasImageQuery) {
David Neto87846742018-04-11 17:36:22 -04003348 auto *ImageQueryCapInst = new SPIRVInstruction(
3349 spv::OpCapability, {MkNum(spv::CapabilityImageQuery)});
David Neto5c22a252018-03-15 16:07:41 -04003350 SPIRVInstList.insert(InsertPoint, ImageQueryCapInst);
3351 }
3352 }
3353
David Neto22f144c2017-06-12 14:26:21 -04003354 if (hasVariablePointers()) {
3355 //
David Neto22f144c2017-06-12 14:26:21 -04003356 // Generate OpCapability.
3357 //
3358 // Ops[0] = Capability
3359 //
3360 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003361 Ops << MkNum(spv::CapabilityVariablePointers);
David Neto22f144c2017-06-12 14:26:21 -04003362
David Neto87846742018-04-11 17:36:22 -04003363 SPIRVInstList.insert(InsertPoint,
3364 new SPIRVInstruction(spv::OpCapability, Ops));
alan-baker5b86ed72019-02-15 08:26:50 -05003365 } else if (hasVariablePointersStorageBuffer()) {
3366 //
3367 // Generate OpCapability.
3368 //
3369 // Ops[0] = Capability
3370 //
3371 Ops.clear();
3372 Ops << MkNum(spv::CapabilityVariablePointersStorageBuffer);
David Neto22f144c2017-06-12 14:26:21 -04003373
alan-baker5b86ed72019-02-15 08:26:50 -05003374 SPIRVInstList.insert(InsertPoint,
3375 new SPIRVInstruction(spv::OpCapability, Ops));
3376 }
3377
3378 // Always add the storage buffer extension
3379 {
David Neto22f144c2017-06-12 14:26:21 -04003380 //
3381 // Generate OpExtension.
3382 //
3383 // Ops[0] = Name (Literal String)
3384 //
alan-baker5b86ed72019-02-15 08:26:50 -05003385 auto *ExtensionInst = new SPIRVInstruction(
3386 spv::OpExtension, {MkString("SPV_KHR_storage_buffer_storage_class")});
3387 SPIRVInstList.insert(InsertPoint, ExtensionInst);
3388 }
David Neto22f144c2017-06-12 14:26:21 -04003389
alan-baker5b86ed72019-02-15 08:26:50 -05003390 if (hasVariablePointers() || hasVariablePointersStorageBuffer()) {
3391 //
3392 // Generate OpExtension.
3393 //
3394 // Ops[0] = Name (Literal String)
3395 //
3396 auto *ExtensionInst = new SPIRVInstruction(
3397 spv::OpExtension, {MkString("SPV_KHR_variable_pointers")});
3398 SPIRVInstList.insert(InsertPoint, ExtensionInst);
David Neto22f144c2017-06-12 14:26:21 -04003399 }
3400
3401 if (ExtInstImportID) {
3402 ++InsertPoint;
3403 }
3404
3405 //
3406 // Generate OpMemoryModel
3407 //
3408 // Memory model for Vulkan will always be GLSL450.
3409
3410 // Ops[0] = Addressing Model
3411 // Ops[1] = Memory Model
3412 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003413 Ops << MkNum(spv::AddressingModelLogical) << MkNum(spv::MemoryModelGLSL450);
David Neto22f144c2017-06-12 14:26:21 -04003414
David Neto87846742018-04-11 17:36:22 -04003415 auto *MemModelInst = new SPIRVInstruction(spv::OpMemoryModel, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003416 SPIRVInstList.insert(InsertPoint, MemModelInst);
3417
3418 //
3419 // Generate OpEntryPoint
3420 //
3421 for (auto EntryPoint : EntryPoints) {
3422 // Ops[0] = Execution Model
3423 // Ops[1] = EntryPoint ID
3424 // Ops[2] = Name (Literal String)
3425 // ...
3426 //
3427 // TODO: Do we need to consider Interface ID for forward references???
3428 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05003429 const StringRef &name = EntryPoint.first->getName();
David Neto257c3892018-04-11 13:19:45 -04003430 Ops << MkNum(spv::ExecutionModelGLCompute) << MkId(EntryPoint.second)
3431 << MkString(name);
David Neto22f144c2017-06-12 14:26:21 -04003432
David Neto22f144c2017-06-12 14:26:21 -04003433 for (Value *Interface : EntryPointInterfaces) {
David Neto257c3892018-04-11 13:19:45 -04003434 Ops << MkId(VMap[Interface]);
David Neto22f144c2017-06-12 14:26:21 -04003435 }
3436
David Neto87846742018-04-11 17:36:22 -04003437 auto *EntryPointInst = new SPIRVInstruction(spv::OpEntryPoint, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003438 SPIRVInstList.insert(InsertPoint, EntryPointInst);
3439 }
3440
3441 for (auto EntryPoint : EntryPoints) {
3442 if (const MDNode *MD = dyn_cast<Function>(EntryPoint.first)
3443 ->getMetadata("reqd_work_group_size")) {
3444
3445 if (!BuiltinDimVec.empty()) {
3446 llvm_unreachable(
3447 "Kernels should have consistent work group size definition");
3448 }
3449
3450 //
3451 // Generate OpExecutionMode
3452 //
3453
3454 // Ops[0] = Entry Point ID
3455 // Ops[1] = Execution Mode
3456 // Ops[2] ... Ops[n] = Optional literals according to Execution Mode
3457 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05003458 Ops << MkId(EntryPoint.second) << MkNum(spv::ExecutionModeLocalSize);
David Neto22f144c2017-06-12 14:26:21 -04003459
3460 uint32_t XDim = static_cast<uint32_t>(
3461 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
3462 uint32_t YDim = static_cast<uint32_t>(
3463 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
3464 uint32_t ZDim = static_cast<uint32_t>(
3465 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
3466
David Neto257c3892018-04-11 13:19:45 -04003467 Ops << MkNum(XDim) << MkNum(YDim) << MkNum(ZDim);
David Neto22f144c2017-06-12 14:26:21 -04003468
David Neto87846742018-04-11 17:36:22 -04003469 auto *ExecModeInst = new SPIRVInstruction(spv::OpExecutionMode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003470 SPIRVInstList.insert(InsertPoint, ExecModeInst);
3471 }
3472 }
3473
3474 //
3475 // Generate OpSource.
3476 //
3477 // Ops[0] = SourceLanguage ID
3478 // Ops[1] = Version (LiteralNum)
3479 //
3480 Ops.clear();
Kévin Petit0fc88042019-04-09 23:25:02 +01003481 if (clspv::Option::CPlusPlus()) {
3482 Ops << MkNum(spv::SourceLanguageOpenCL_CPP) << MkNum(100);
3483 } else {
3484 Ops << MkNum(spv::SourceLanguageOpenCL_C) << MkNum(120);
3485 }
David Neto22f144c2017-06-12 14:26:21 -04003486
David Neto87846742018-04-11 17:36:22 -04003487 auto *OpenSourceInst = new SPIRVInstruction(spv::OpSource, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003488 SPIRVInstList.insert(InsertPoint, OpenSourceInst);
3489
3490 if (!BuiltinDimVec.empty()) {
3491 //
3492 // Generate OpDecorates for x/y/z dimension.
3493 //
3494 // Ops[0] = Target ID
3495 // Ops[1] = Decoration (SpecId)
David Neto257c3892018-04-11 13:19:45 -04003496 // Ops[2] = Specialization Constant ID (Literal Number)
David Neto22f144c2017-06-12 14:26:21 -04003497
3498 // X Dimension
3499 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003500 Ops << MkId(BuiltinDimVec[0]) << MkNum(spv::DecorationSpecId) << MkNum(0);
David Neto87846742018-04-11 17:36:22 -04003501 SPIRVInstList.insert(InsertPoint,
3502 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003503
3504 // Y Dimension
3505 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003506 Ops << MkId(BuiltinDimVec[1]) << MkNum(spv::DecorationSpecId) << MkNum(1);
David Neto87846742018-04-11 17:36:22 -04003507 SPIRVInstList.insert(InsertPoint,
3508 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003509
3510 // Z Dimension
3511 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003512 Ops << MkId(BuiltinDimVec[2]) << MkNum(spv::DecorationSpecId) << MkNum(2);
David Neto87846742018-04-11 17:36:22 -04003513 SPIRVInstList.insert(InsertPoint,
3514 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003515 }
3516}
3517
David Netob6e2e062018-04-25 10:32:06 -04003518void SPIRVProducerPass::GenerateEntryPointInitialStores() {
3519 // Work around a driver bug. Initializers on Private variables might not
3520 // work. So the start of the kernel should store the initializer value to the
3521 // variables. Yes, *every* entry point pays this cost if *any* entry point
3522 // uses this builtin. At this point I judge this to be an acceptable tradeoff
3523 // of complexity vs. runtime, for a broken driver.
alan-bakerb6b09dc2018-11-08 16:59:28 -05003524 // TODO(dneto): Remove this at some point once fixed drivers are widely
3525 // available.
David Netob6e2e062018-04-25 10:32:06 -04003526 if (WorkgroupSizeVarID) {
3527 assert(WorkgroupSizeValueID);
3528
3529 SPIRVOperandList Ops;
3530 Ops << MkId(WorkgroupSizeVarID) << MkId(WorkgroupSizeValueID);
3531
3532 auto *Inst = new SPIRVInstruction(spv::OpStore, Ops);
3533 getSPIRVInstList().push_back(Inst);
3534 }
3535}
3536
David Neto22f144c2017-06-12 14:26:21 -04003537void SPIRVProducerPass::GenerateFuncBody(Function &F) {
3538 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3539 ValueMapType &VMap = getValueMap();
3540
David Netob6e2e062018-04-25 10:32:06 -04003541 const bool IsKernel = F.getCallingConv() == CallingConv::SPIR_KERNEL;
David Neto22f144c2017-06-12 14:26:21 -04003542
3543 for (BasicBlock &BB : F) {
3544 // Register BasicBlock to ValueMap.
3545 VMap[&BB] = nextID;
3546
3547 //
3548 // Generate OpLabel for Basic Block.
3549 //
3550 SPIRVOperandList Ops;
David Neto87846742018-04-11 17:36:22 -04003551 auto *Inst = new SPIRVInstruction(spv::OpLabel, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003552 SPIRVInstList.push_back(Inst);
3553
David Neto6dcd4712017-06-23 11:06:47 -04003554 // OpVariable instructions must come first.
3555 for (Instruction &I : BB) {
alan-baker5b86ed72019-02-15 08:26:50 -05003556 if (auto *alloca = dyn_cast<AllocaInst>(&I)) {
3557 // Allocating a pointer requires variable pointers.
3558 if (alloca->getAllocatedType()->isPointerTy()) {
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003559 setVariablePointersCapabilities(
3560 alloca->getAllocatedType()->getPointerAddressSpace());
alan-baker5b86ed72019-02-15 08:26:50 -05003561 }
David Neto6dcd4712017-06-23 11:06:47 -04003562 GenerateInstruction(I);
3563 }
3564 }
3565
David Neto22f144c2017-06-12 14:26:21 -04003566 if (&BB == &F.getEntryBlock() && IsKernel) {
David Netob6e2e062018-04-25 10:32:06 -04003567 if (clspv::Option::HackInitializers()) {
3568 GenerateEntryPointInitialStores();
3569 }
David Neto22f144c2017-06-12 14:26:21 -04003570 }
3571
3572 for (Instruction &I : BB) {
David Neto6dcd4712017-06-23 11:06:47 -04003573 if (!isa<AllocaInst>(I)) {
3574 GenerateInstruction(I);
3575 }
David Neto22f144c2017-06-12 14:26:21 -04003576 }
3577 }
3578}
3579
3580spv::Op SPIRVProducerPass::GetSPIRVCmpOpcode(CmpInst *I) {
3581 const std::map<CmpInst::Predicate, spv::Op> Map = {
3582 {CmpInst::ICMP_EQ, spv::OpIEqual},
3583 {CmpInst::ICMP_NE, spv::OpINotEqual},
3584 {CmpInst::ICMP_UGT, spv::OpUGreaterThan},
3585 {CmpInst::ICMP_UGE, spv::OpUGreaterThanEqual},
3586 {CmpInst::ICMP_ULT, spv::OpULessThan},
3587 {CmpInst::ICMP_ULE, spv::OpULessThanEqual},
3588 {CmpInst::ICMP_SGT, spv::OpSGreaterThan},
3589 {CmpInst::ICMP_SGE, spv::OpSGreaterThanEqual},
3590 {CmpInst::ICMP_SLT, spv::OpSLessThan},
3591 {CmpInst::ICMP_SLE, spv::OpSLessThanEqual},
3592 {CmpInst::FCMP_OEQ, spv::OpFOrdEqual},
3593 {CmpInst::FCMP_OGT, spv::OpFOrdGreaterThan},
3594 {CmpInst::FCMP_OGE, spv::OpFOrdGreaterThanEqual},
3595 {CmpInst::FCMP_OLT, spv::OpFOrdLessThan},
3596 {CmpInst::FCMP_OLE, spv::OpFOrdLessThanEqual},
3597 {CmpInst::FCMP_ONE, spv::OpFOrdNotEqual},
3598 {CmpInst::FCMP_UEQ, spv::OpFUnordEqual},
3599 {CmpInst::FCMP_UGT, spv::OpFUnordGreaterThan},
3600 {CmpInst::FCMP_UGE, spv::OpFUnordGreaterThanEqual},
3601 {CmpInst::FCMP_ULT, spv::OpFUnordLessThan},
3602 {CmpInst::FCMP_ULE, spv::OpFUnordLessThanEqual},
3603 {CmpInst::FCMP_UNE, spv::OpFUnordNotEqual}};
3604
3605 assert(0 != Map.count(I->getPredicate()));
3606
3607 return Map.at(I->getPredicate());
3608}
3609
3610spv::Op SPIRVProducerPass::GetSPIRVCastOpcode(Instruction &I) {
3611 const std::map<unsigned, spv::Op> Map{
3612 {Instruction::Trunc, spv::OpUConvert},
3613 {Instruction::ZExt, spv::OpUConvert},
3614 {Instruction::SExt, spv::OpSConvert},
3615 {Instruction::FPToUI, spv::OpConvertFToU},
3616 {Instruction::FPToSI, spv::OpConvertFToS},
3617 {Instruction::UIToFP, spv::OpConvertUToF},
3618 {Instruction::SIToFP, spv::OpConvertSToF},
3619 {Instruction::FPTrunc, spv::OpFConvert},
3620 {Instruction::FPExt, spv::OpFConvert},
3621 {Instruction::BitCast, spv::OpBitcast}};
3622
3623 assert(0 != Map.count(I.getOpcode()));
3624
3625 return Map.at(I.getOpcode());
3626}
3627
3628spv::Op SPIRVProducerPass::GetSPIRVBinaryOpcode(Instruction &I) {
Kévin Petit24272b62018-10-18 19:16:12 +00003629 if (I.getType()->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -04003630 switch (I.getOpcode()) {
3631 default:
3632 break;
3633 case Instruction::Or:
3634 return spv::OpLogicalOr;
3635 case Instruction::And:
3636 return spv::OpLogicalAnd;
3637 case Instruction::Xor:
3638 return spv::OpLogicalNotEqual;
3639 }
3640 }
3641
alan-bakerb6b09dc2018-11-08 16:59:28 -05003642 const std::map<unsigned, spv::Op> Map{
David Neto22f144c2017-06-12 14:26:21 -04003643 {Instruction::Add, spv::OpIAdd},
3644 {Instruction::FAdd, spv::OpFAdd},
3645 {Instruction::Sub, spv::OpISub},
3646 {Instruction::FSub, spv::OpFSub},
3647 {Instruction::Mul, spv::OpIMul},
3648 {Instruction::FMul, spv::OpFMul},
3649 {Instruction::UDiv, spv::OpUDiv},
3650 {Instruction::SDiv, spv::OpSDiv},
3651 {Instruction::FDiv, spv::OpFDiv},
3652 {Instruction::URem, spv::OpUMod},
3653 {Instruction::SRem, spv::OpSRem},
3654 {Instruction::FRem, spv::OpFRem},
3655 {Instruction::Or, spv::OpBitwiseOr},
3656 {Instruction::Xor, spv::OpBitwiseXor},
3657 {Instruction::And, spv::OpBitwiseAnd},
3658 {Instruction::Shl, spv::OpShiftLeftLogical},
3659 {Instruction::LShr, spv::OpShiftRightLogical},
3660 {Instruction::AShr, spv::OpShiftRightArithmetic}};
3661
3662 assert(0 != Map.count(I.getOpcode()));
3663
3664 return Map.at(I.getOpcode());
3665}
3666
3667void SPIRVProducerPass::GenerateInstruction(Instruction &I) {
3668 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3669 ValueMapType &VMap = getValueMap();
David Neto22f144c2017-06-12 14:26:21 -04003670 DeferredInstVecType &DeferredInsts = getDeferredInstVec();
3671 LLVMContext &Context = I.getParent()->getParent()->getParent()->getContext();
3672
3673 // Register Instruction to ValueMap.
3674 if (0 == VMap[&I]) {
3675 VMap[&I] = nextID;
3676 }
3677
3678 switch (I.getOpcode()) {
3679 default: {
3680 if (Instruction::isCast(I.getOpcode())) {
3681 //
3682 // Generate SPIRV instructions for cast operators.
3683 //
3684
David Netod2de94a2017-08-28 17:27:47 -04003685 auto Ty = I.getType();
David Neto22f144c2017-06-12 14:26:21 -04003686 auto OpTy = I.getOperand(0)->getType();
David Netod2de94a2017-08-28 17:27:47 -04003687 auto toI8 = Ty == Type::getInt8Ty(Context);
3688 auto fromI32 = OpTy == Type::getInt32Ty(Context);
David Neto22f144c2017-06-12 14:26:21 -04003689 // Handle zext, sext and uitofp with i1 type specially.
3690 if ((I.getOpcode() == Instruction::ZExt ||
3691 I.getOpcode() == Instruction::SExt ||
3692 I.getOpcode() == Instruction::UIToFP) &&
alan-bakerb6b09dc2018-11-08 16:59:28 -05003693 OpTy->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -04003694 //
3695 // Generate OpSelect.
3696 //
3697
3698 // Ops[0] = Result Type ID
3699 // Ops[1] = Condition ID
3700 // Ops[2] = True Constant ID
3701 // Ops[3] = False Constant ID
3702 SPIRVOperandList Ops;
3703
David Neto257c3892018-04-11 13:19:45 -04003704 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04003705
David Neto22f144c2017-06-12 14:26:21 -04003706 uint32_t CondID = VMap[I.getOperand(0)];
David Neto257c3892018-04-11 13:19:45 -04003707 Ops << MkId(CondID);
David Neto22f144c2017-06-12 14:26:21 -04003708
3709 uint32_t TrueID = 0;
3710 if (I.getOpcode() == Instruction::ZExt) {
Kévin Petit7bfb8992019-02-26 13:45:08 +00003711 TrueID = VMap[ConstantInt::get(I.getType(), 1)];
David Neto22f144c2017-06-12 14:26:21 -04003712 } else if (I.getOpcode() == Instruction::SExt) {
Kévin Petit7bfb8992019-02-26 13:45:08 +00003713 TrueID = VMap[ConstantInt::getSigned(I.getType(), -1)];
David Neto22f144c2017-06-12 14:26:21 -04003714 } else {
3715 TrueID = VMap[ConstantFP::get(Context, APFloat(1.0f))];
3716 }
David Neto257c3892018-04-11 13:19:45 -04003717 Ops << MkId(TrueID);
David Neto22f144c2017-06-12 14:26:21 -04003718
3719 uint32_t FalseID = 0;
3720 if (I.getOpcode() == Instruction::ZExt) {
3721 FalseID = VMap[Constant::getNullValue(I.getType())];
3722 } else if (I.getOpcode() == Instruction::SExt) {
3723 FalseID = VMap[Constant::getNullValue(I.getType())];
3724 } else {
3725 FalseID = VMap[ConstantFP::get(Context, APFloat(0.0f))];
3726 }
David Neto257c3892018-04-11 13:19:45 -04003727 Ops << MkId(FalseID);
David Neto22f144c2017-06-12 14:26:21 -04003728
David Neto87846742018-04-11 17:36:22 -04003729 auto *Inst = new SPIRVInstruction(spv::OpSelect, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003730 SPIRVInstList.push_back(Inst);
alan-bakerb39c8262019-03-08 14:03:37 -05003731 } else if (!clspv::Option::Int8Support() &&
3732 I.getOpcode() == Instruction::Trunc && fromI32 && toI8) {
David Netod2de94a2017-08-28 17:27:47 -04003733 // The SPIR-V target type is a 32-bit int. Keep only the bottom
3734 // 8 bits.
3735 // Before:
3736 // %result = trunc i32 %a to i8
3737 // After
3738 // %result = OpBitwiseAnd %uint %a %uint_255
3739
3740 SPIRVOperandList Ops;
3741
David Neto257c3892018-04-11 13:19:45 -04003742 Ops << MkId(lookupType(OpTy)) << MkId(VMap[I.getOperand(0)]);
David Netod2de94a2017-08-28 17:27:47 -04003743
3744 Type *UintTy = Type::getInt32Ty(Context);
3745 uint32_t MaskID = VMap[ConstantInt::get(UintTy, 255)];
David Neto257c3892018-04-11 13:19:45 -04003746 Ops << MkId(MaskID);
David Netod2de94a2017-08-28 17:27:47 -04003747
David Neto87846742018-04-11 17:36:22 -04003748 auto *Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Netod2de94a2017-08-28 17:27:47 -04003749 SPIRVInstList.push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04003750 } else {
3751 // Ops[0] = Result Type ID
3752 // Ops[1] = Source Value ID
3753 SPIRVOperandList Ops;
3754
David Neto257c3892018-04-11 13:19:45 -04003755 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04003756
David Neto87846742018-04-11 17:36:22 -04003757 auto *Inst = new SPIRVInstruction(GetSPIRVCastOpcode(I), nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003758 SPIRVInstList.push_back(Inst);
3759 }
3760 } else if (isa<BinaryOperator>(I)) {
3761 //
3762 // Generate SPIRV instructions for binary operators.
3763 //
3764
3765 // Handle xor with i1 type specially.
3766 if (I.getOpcode() == Instruction::Xor &&
3767 I.getType() == Type::getInt1Ty(Context) &&
Kévin Petit24272b62018-10-18 19:16:12 +00003768 ((isa<ConstantInt>(I.getOperand(0)) &&
3769 !cast<ConstantInt>(I.getOperand(0))->isZero()) ||
3770 (isa<ConstantInt>(I.getOperand(1)) &&
3771 !cast<ConstantInt>(I.getOperand(1))->isZero()))) {
David Neto22f144c2017-06-12 14:26:21 -04003772 //
3773 // Generate OpLogicalNot.
3774 //
3775 // Ops[0] = Result Type ID
3776 // Ops[1] = Operand
3777 SPIRVOperandList Ops;
3778
David Neto257c3892018-04-11 13:19:45 -04003779 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04003780
3781 Value *CondV = I.getOperand(0);
3782 if (isa<Constant>(I.getOperand(0))) {
3783 CondV = I.getOperand(1);
3784 }
David Neto257c3892018-04-11 13:19:45 -04003785 Ops << MkId(VMap[CondV]);
David Neto22f144c2017-06-12 14:26:21 -04003786
David Neto87846742018-04-11 17:36:22 -04003787 auto *Inst = new SPIRVInstruction(spv::OpLogicalNot, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003788 SPIRVInstList.push_back(Inst);
3789 } else {
3790 // Ops[0] = Result Type ID
3791 // Ops[1] = Operand 0
3792 // Ops[2] = Operand 1
3793 SPIRVOperandList Ops;
3794
David Neto257c3892018-04-11 13:19:45 -04003795 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)])
3796 << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04003797
David Neto87846742018-04-11 17:36:22 -04003798 auto *Inst =
3799 new SPIRVInstruction(GetSPIRVBinaryOpcode(I), nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003800 SPIRVInstList.push_back(Inst);
3801 }
alan-bakerc9c55ae2019-12-02 16:01:27 -05003802 } else if (I.getOpcode() == Instruction::FNeg) {
3803 // The only unary operator.
3804 //
3805 // Ops[0] = Result Type ID
3806 // Ops[1] = Operand 0
3807 SPIRVOperandList ops;
3808
3809 ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)]);
3810 auto *Inst = new SPIRVInstruction(spv::OpFNegate, nextID++, ops);
3811 SPIRVInstList.push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04003812 } else {
3813 I.print(errs());
3814 llvm_unreachable("Unsupported instruction???");
3815 }
3816 break;
3817 }
3818 case Instruction::GetElementPtr: {
3819 auto &GlobalConstArgSet = getGlobalConstArgSet();
3820
3821 //
3822 // Generate OpAccessChain.
3823 //
3824 GetElementPtrInst *GEP = cast<GetElementPtrInst>(&I);
3825
3826 //
3827 // Generate OpAccessChain.
3828 //
3829
3830 // Ops[0] = Result Type ID
3831 // Ops[1] = Base ID
3832 // Ops[2] ... Ops[n] = Indexes ID
3833 SPIRVOperandList Ops;
3834
alan-bakerb6b09dc2018-11-08 16:59:28 -05003835 PointerType *ResultType = cast<PointerType>(GEP->getType());
David Neto22f144c2017-06-12 14:26:21 -04003836 if (GEP->getPointerAddressSpace() == AddressSpace::ModuleScopePrivate ||
3837 GlobalConstArgSet.count(GEP->getPointerOperand())) {
3838 // Use pointer type with private address space for global constant.
3839 Type *EleTy = I.getType()->getPointerElementType();
David Neto1a1a0582017-07-07 12:01:44 -04003840 ResultType = PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
David Neto22f144c2017-06-12 14:26:21 -04003841 }
David Neto257c3892018-04-11 13:19:45 -04003842
3843 Ops << MkId(lookupType(ResultType));
David Neto22f144c2017-06-12 14:26:21 -04003844
David Neto862b7d82018-06-14 18:48:37 -04003845 // Generate the base pointer.
3846 Ops << MkId(VMap[GEP->getPointerOperand()]);
David Neto22f144c2017-06-12 14:26:21 -04003847
David Neto862b7d82018-06-14 18:48:37 -04003848 // TODO(dneto): Simplify the following?
David Neto22f144c2017-06-12 14:26:21 -04003849
3850 //
3851 // Follows below rules for gep.
3852 //
David Neto862b7d82018-06-14 18:48:37 -04003853 // 1. If gep's first index is 0 generate OpAccessChain and ignore gep's
3854 // first index.
David Neto22f144c2017-06-12 14:26:21 -04003855 // 2. If gep's first index is not 0, generate OpPtrAccessChain and use gep's
3856 // first index.
3857 // 3. If gep's first index is not constant, generate OpPtrAccessChain and
3858 // use gep's first index.
3859 // 4. If it is not above case 1, 2 and 3, generate OpAccessChain and use
3860 // gep's first index.
3861 //
3862 spv::Op Opcode = spv::OpAccessChain;
3863 unsigned offset = 0;
3864 if (ConstantInt *CstInt = dyn_cast<ConstantInt>(GEP->getOperand(1))) {
David Neto862b7d82018-06-14 18:48:37 -04003865 if (CstInt->getZExtValue() == 0) {
David Neto22f144c2017-06-12 14:26:21 -04003866 offset = 1;
David Neto862b7d82018-06-14 18:48:37 -04003867 } else if (CstInt->getZExtValue() != 0) {
David Neto22f144c2017-06-12 14:26:21 -04003868 Opcode = spv::OpPtrAccessChain;
David Neto22f144c2017-06-12 14:26:21 -04003869 }
David Neto862b7d82018-06-14 18:48:37 -04003870 } else {
David Neto22f144c2017-06-12 14:26:21 -04003871 Opcode = spv::OpPtrAccessChain;
David Neto1a1a0582017-07-07 12:01:44 -04003872 }
3873
3874 if (Opcode == spv::OpPtrAccessChain) {
David Neto1a1a0582017-07-07 12:01:44 -04003875 // Do we need to generate ArrayStride? Check against the GEP result type
3876 // rather than the pointer type of the base because when indexing into
3877 // an OpenCL program-scope constant, we'll swap out the LLVM base pointer
3878 // for something else in the SPIR-V.
3879 // E.g. see test/PointerAccessChain/pointer_index_is_constant_1.cl
alan-baker5b86ed72019-02-15 08:26:50 -05003880 auto address_space = ResultType->getAddressSpace();
3881 setVariablePointersCapabilities(address_space);
3882 switch (GetStorageClass(address_space)) {
Alan Bakerfcda9482018-10-02 17:09:59 -04003883 case spv::StorageClassStorageBuffer:
3884 case spv::StorageClassUniform:
David Neto1a1a0582017-07-07 12:01:44 -04003885 // Save the need to generate an ArrayStride decoration. But defer
3886 // generation until later, so we only make one decoration.
David Neto85082642018-03-24 06:55:20 -07003887 getTypesNeedingArrayStride().insert(ResultType);
Alan Bakerfcda9482018-10-02 17:09:59 -04003888 break;
3889 default:
3890 break;
David Neto1a1a0582017-07-07 12:01:44 -04003891 }
David Neto22f144c2017-06-12 14:26:21 -04003892 }
3893
3894 for (auto II = GEP->idx_begin() + offset; II != GEP->idx_end(); II++) {
David Neto257c3892018-04-11 13:19:45 -04003895 Ops << MkId(VMap[*II]);
David Neto22f144c2017-06-12 14:26:21 -04003896 }
3897
David Neto87846742018-04-11 17:36:22 -04003898 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003899 SPIRVInstList.push_back(Inst);
3900 break;
3901 }
3902 case Instruction::ExtractValue: {
3903 ExtractValueInst *EVI = cast<ExtractValueInst>(&I);
3904 // Ops[0] = Result Type ID
3905 // Ops[1] = Composite ID
3906 // Ops[2] ... Ops[n] = Indexes (Literal Number)
3907 SPIRVOperandList Ops;
3908
David Neto257c3892018-04-11 13:19:45 -04003909 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04003910
3911 uint32_t CompositeID = VMap[EVI->getAggregateOperand()];
David Neto257c3892018-04-11 13:19:45 -04003912 Ops << MkId(CompositeID);
David Neto22f144c2017-06-12 14:26:21 -04003913
3914 for (auto &Index : EVI->indices()) {
David Neto257c3892018-04-11 13:19:45 -04003915 Ops << MkNum(Index);
David Neto22f144c2017-06-12 14:26:21 -04003916 }
3917
David Neto87846742018-04-11 17:36:22 -04003918 auto *Inst = new SPIRVInstruction(spv::OpCompositeExtract, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003919 SPIRVInstList.push_back(Inst);
3920 break;
3921 }
3922 case Instruction::InsertValue: {
3923 InsertValueInst *IVI = cast<InsertValueInst>(&I);
3924 // Ops[0] = Result Type ID
3925 // Ops[1] = Object ID
3926 // Ops[2] = Composite ID
3927 // Ops[3] ... Ops[n] = Indexes (Literal Number)
3928 SPIRVOperandList Ops;
3929
3930 uint32_t ResTyID = lookupType(I.getType());
David Neto257c3892018-04-11 13:19:45 -04003931 Ops << MkId(ResTyID);
David Neto22f144c2017-06-12 14:26:21 -04003932
3933 uint32_t ObjectID = VMap[IVI->getInsertedValueOperand()];
David Neto257c3892018-04-11 13:19:45 -04003934 Ops << MkId(ObjectID);
David Neto22f144c2017-06-12 14:26:21 -04003935
3936 uint32_t CompositeID = VMap[IVI->getAggregateOperand()];
David Neto257c3892018-04-11 13:19:45 -04003937 Ops << MkId(CompositeID);
David Neto22f144c2017-06-12 14:26:21 -04003938
3939 for (auto &Index : IVI->indices()) {
David Neto257c3892018-04-11 13:19:45 -04003940 Ops << MkNum(Index);
David Neto22f144c2017-06-12 14:26:21 -04003941 }
3942
David Neto87846742018-04-11 17:36:22 -04003943 auto *Inst = new SPIRVInstruction(spv::OpCompositeInsert, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003944 SPIRVInstList.push_back(Inst);
3945 break;
3946 }
3947 case Instruction::Select: {
3948 //
3949 // Generate OpSelect.
3950 //
3951
3952 // Ops[0] = Result Type ID
3953 // Ops[1] = Condition ID
3954 // Ops[2] = True Constant ID
3955 // Ops[3] = False Constant ID
3956 SPIRVOperandList Ops;
3957
3958 // Find SPIRV instruction for parameter type.
3959 auto Ty = I.getType();
3960 if (Ty->isPointerTy()) {
3961 auto PointeeTy = Ty->getPointerElementType();
3962 if (PointeeTy->isStructTy() &&
3963 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
3964 Ty = PointeeTy;
alan-baker5b86ed72019-02-15 08:26:50 -05003965 } else {
3966 // Selecting between pointers requires variable pointers.
3967 setVariablePointersCapabilities(Ty->getPointerAddressSpace());
3968 if (!hasVariablePointers() && !selectFromSameObject(&I)) {
3969 setVariablePointers(true);
3970 }
David Neto22f144c2017-06-12 14:26:21 -04003971 }
3972 }
3973
David Neto257c3892018-04-11 13:19:45 -04003974 Ops << MkId(lookupType(Ty)) << MkId(VMap[I.getOperand(0)])
3975 << MkId(VMap[I.getOperand(1)]) << MkId(VMap[I.getOperand(2)]);
David Neto22f144c2017-06-12 14:26:21 -04003976
David Neto87846742018-04-11 17:36:22 -04003977 auto *Inst = new SPIRVInstruction(spv::OpSelect, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003978 SPIRVInstList.push_back(Inst);
3979 break;
3980 }
3981 case Instruction::ExtractElement: {
3982 // Handle <4 x i8> type manually.
3983 Type *CompositeTy = I.getOperand(0)->getType();
3984 if (is4xi8vec(CompositeTy)) {
3985 //
3986 // Generate OpShiftRightLogical and OpBitwiseAnd for extractelement with
3987 // <4 x i8>.
3988 //
3989
3990 //
3991 // Generate OpShiftRightLogical
3992 //
3993 // Ops[0] = Result Type ID
3994 // Ops[1] = Operand 0
3995 // Ops[2] = Operand 1
3996 //
3997 SPIRVOperandList Ops;
3998
David Neto257c3892018-04-11 13:19:45 -04003999 Ops << MkId(lookupType(CompositeTy));
David Neto22f144c2017-06-12 14:26:21 -04004000
4001 uint32_t Op0ID = VMap[I.getOperand(0)];
David Neto257c3892018-04-11 13:19:45 -04004002 Ops << MkId(Op0ID);
David Neto22f144c2017-06-12 14:26:21 -04004003
4004 uint32_t Op1ID = 0;
4005 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
4006 // Handle constant index.
4007 uint64_t Idx = CI->getZExtValue();
4008 Value *ShiftAmount =
4009 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
4010 Op1ID = VMap[ShiftAmount];
4011 } else {
4012 // Handle variable index.
4013 SPIRVOperandList TmpOps;
4014
David Neto257c3892018-04-11 13:19:45 -04004015 TmpOps << MkId(lookupType(Type::getInt32Ty(Context)))
4016 << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004017
4018 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
David Neto257c3892018-04-11 13:19:45 -04004019 TmpOps << MkId(VMap[Cst8]);
David Neto22f144c2017-06-12 14:26:21 -04004020
4021 Op1ID = nextID;
4022
David Neto87846742018-04-11 17:36:22 -04004023 auto *TmpInst = new SPIRVInstruction(spv::OpIMul, nextID++, TmpOps);
David Neto22f144c2017-06-12 14:26:21 -04004024 SPIRVInstList.push_back(TmpInst);
4025 }
David Neto257c3892018-04-11 13:19:45 -04004026 Ops << MkId(Op1ID);
David Neto22f144c2017-06-12 14:26:21 -04004027
4028 uint32_t ShiftID = nextID;
4029
David Neto87846742018-04-11 17:36:22 -04004030 auto *Inst =
4031 new SPIRVInstruction(spv::OpShiftRightLogical, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004032 SPIRVInstList.push_back(Inst);
4033
4034 //
4035 // Generate OpBitwiseAnd
4036 //
4037 // Ops[0] = Result Type ID
4038 // Ops[1] = Operand 0
4039 // Ops[2] = Operand 1
4040 //
4041 Ops.clear();
4042
David Neto257c3892018-04-11 13:19:45 -04004043 Ops << MkId(lookupType(CompositeTy)) << MkId(ShiftID);
David Neto22f144c2017-06-12 14:26:21 -04004044
4045 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
David Neto257c3892018-04-11 13:19:45 -04004046 Ops << MkId(VMap[CstFF]);
David Neto22f144c2017-06-12 14:26:21 -04004047
David Neto9b2d6252017-09-06 15:47:37 -04004048 // Reset mapping for this value to the result of the bitwise and.
4049 VMap[&I] = nextID;
4050
David Neto87846742018-04-11 17:36:22 -04004051 Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004052 SPIRVInstList.push_back(Inst);
4053 break;
4054 }
4055
4056 // Ops[0] = Result Type ID
4057 // Ops[1] = Composite ID
4058 // Ops[2] ... Ops[n] = Indexes (Literal Number)
4059 SPIRVOperandList Ops;
4060
David Neto257c3892018-04-11 13:19:45 -04004061 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04004062
4063 spv::Op Opcode = spv::OpCompositeExtract;
4064 if (const ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
David Neto257c3892018-04-11 13:19:45 -04004065 Ops << MkNum(static_cast<uint32_t>(CI->getZExtValue()));
David Neto22f144c2017-06-12 14:26:21 -04004066 } else {
David Neto257c3892018-04-11 13:19:45 -04004067 Ops << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004068 Opcode = spv::OpVectorExtractDynamic;
4069 }
4070
David Neto87846742018-04-11 17:36:22 -04004071 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004072 SPIRVInstList.push_back(Inst);
4073 break;
4074 }
4075 case Instruction::InsertElement: {
4076 // Handle <4 x i8> type manually.
4077 Type *CompositeTy = I.getOperand(0)->getType();
4078 if (is4xi8vec(CompositeTy)) {
4079 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
4080 uint32_t CstFFID = VMap[CstFF];
4081
4082 uint32_t ShiftAmountID = 0;
4083 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
4084 // Handle constant index.
4085 uint64_t Idx = CI->getZExtValue();
4086 Value *ShiftAmount =
4087 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
4088 ShiftAmountID = VMap[ShiftAmount];
4089 } else {
4090 // Handle variable index.
4091 SPIRVOperandList TmpOps;
4092
David Neto257c3892018-04-11 13:19:45 -04004093 TmpOps << MkId(lookupType(Type::getInt32Ty(Context)))
4094 << MkId(VMap[I.getOperand(2)]);
David Neto22f144c2017-06-12 14:26:21 -04004095
4096 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
David Neto257c3892018-04-11 13:19:45 -04004097 TmpOps << MkId(VMap[Cst8]);
David Neto22f144c2017-06-12 14:26:21 -04004098
4099 ShiftAmountID = nextID;
4100
David Neto87846742018-04-11 17:36:22 -04004101 auto *TmpInst = new SPIRVInstruction(spv::OpIMul, nextID++, TmpOps);
David Neto22f144c2017-06-12 14:26:21 -04004102 SPIRVInstList.push_back(TmpInst);
4103 }
4104
4105 //
4106 // Generate mask operations.
4107 //
4108
4109 // ShiftLeft mask according to index of insertelement.
4110 SPIRVOperandList Ops;
4111
David Neto257c3892018-04-11 13:19:45 -04004112 const uint32_t ResTyID = lookupType(CompositeTy);
4113 Ops << MkId(ResTyID) << MkId(CstFFID) << MkId(ShiftAmountID);
David Neto22f144c2017-06-12 14:26:21 -04004114
4115 uint32_t MaskID = nextID;
4116
David Neto87846742018-04-11 17:36:22 -04004117 auto *Inst = new SPIRVInstruction(spv::OpShiftLeftLogical, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004118 SPIRVInstList.push_back(Inst);
4119
4120 // Inverse mask.
4121 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004122 Ops << MkId(ResTyID) << MkId(MaskID);
David Neto22f144c2017-06-12 14:26:21 -04004123
4124 uint32_t InvMaskID = nextID;
4125
David Neto87846742018-04-11 17:36:22 -04004126 Inst = new SPIRVInstruction(spv::OpNot, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004127 SPIRVInstList.push_back(Inst);
4128
4129 // Apply mask.
4130 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004131 Ops << MkId(ResTyID) << MkId(VMap[I.getOperand(0)]) << MkId(InvMaskID);
David Neto22f144c2017-06-12 14:26:21 -04004132
4133 uint32_t OrgValID = nextID;
4134
David Neto87846742018-04-11 17:36:22 -04004135 Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004136 SPIRVInstList.push_back(Inst);
4137
4138 // Create correct value according to index of insertelement.
4139 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05004140 Ops << MkId(ResTyID) << MkId(VMap[I.getOperand(1)])
4141 << MkId(ShiftAmountID);
David Neto22f144c2017-06-12 14:26:21 -04004142
4143 uint32_t InsertValID = nextID;
4144
David Neto87846742018-04-11 17:36:22 -04004145 Inst = new SPIRVInstruction(spv::OpShiftLeftLogical, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004146 SPIRVInstList.push_back(Inst);
4147
4148 // Insert value to original value.
4149 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004150 Ops << MkId(ResTyID) << MkId(OrgValID) << MkId(InsertValID);
David Neto22f144c2017-06-12 14:26:21 -04004151
David Netoa394f392017-08-26 20:45:29 -04004152 VMap[&I] = nextID;
4153
David Neto87846742018-04-11 17:36:22 -04004154 Inst = new SPIRVInstruction(spv::OpBitwiseOr, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004155 SPIRVInstList.push_back(Inst);
4156
4157 break;
4158 }
4159
David Neto22f144c2017-06-12 14:26:21 -04004160 SPIRVOperandList Ops;
4161
James Priced26efea2018-06-09 23:28:32 +01004162 // Ops[0] = Result Type ID
4163 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04004164
4165 spv::Op Opcode = spv::OpCompositeInsert;
4166 if (const ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
David Neto257c3892018-04-11 13:19:45 -04004167 const auto value = CI->getZExtValue();
4168 assert(value <= UINT32_MAX);
James Priced26efea2018-06-09 23:28:32 +01004169 // Ops[1] = Object ID
4170 // Ops[2] = Composite ID
4171 // Ops[3] ... Ops[n] = Indexes (Literal Number)
alan-bakerb6b09dc2018-11-08 16:59:28 -05004172 Ops << MkId(VMap[I.getOperand(1)]) << MkId(VMap[I.getOperand(0)])
James Priced26efea2018-06-09 23:28:32 +01004173 << MkNum(static_cast<uint32_t>(value));
David Neto22f144c2017-06-12 14:26:21 -04004174 } else {
James Priced26efea2018-06-09 23:28:32 +01004175 // Ops[1] = Composite ID
4176 // Ops[2] = Object ID
4177 // Ops[3] ... Ops[n] = Indexes (Literal Number)
alan-bakerb6b09dc2018-11-08 16:59:28 -05004178 Ops << MkId(VMap[I.getOperand(0)]) << MkId(VMap[I.getOperand(1)])
James Priced26efea2018-06-09 23:28:32 +01004179 << MkId(VMap[I.getOperand(2)]);
David Neto22f144c2017-06-12 14:26:21 -04004180 Opcode = spv::OpVectorInsertDynamic;
4181 }
4182
David Neto87846742018-04-11 17:36:22 -04004183 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004184 SPIRVInstList.push_back(Inst);
4185 break;
4186 }
4187 case Instruction::ShuffleVector: {
4188 // Ops[0] = Result Type ID
4189 // Ops[1] = Vector 1 ID
4190 // Ops[2] = Vector 2 ID
4191 // Ops[3] ... Ops[n] = Components (Literal Number)
4192 SPIRVOperandList Ops;
4193
David Neto257c3892018-04-11 13:19:45 -04004194 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)])
4195 << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004196
4197 uint64_t NumElements = 0;
4198 if (Constant *Cst = dyn_cast<Constant>(I.getOperand(2))) {
4199 NumElements = cast<VectorType>(Cst->getType())->getNumElements();
4200
4201 if (Cst->isNullValue()) {
4202 for (unsigned i = 0; i < NumElements; i++) {
David Neto257c3892018-04-11 13:19:45 -04004203 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04004204 }
4205 } else if (const ConstantDataSequential *CDS =
4206 dyn_cast<ConstantDataSequential>(Cst)) {
4207 for (unsigned i = 0; i < CDS->getNumElements(); i++) {
4208 std::vector<uint32_t> LiteralNum;
David Neto257c3892018-04-11 13:19:45 -04004209 const auto value = CDS->getElementAsInteger(i);
4210 assert(value <= UINT32_MAX);
4211 Ops << MkNum(static_cast<uint32_t>(value));
David Neto22f144c2017-06-12 14:26:21 -04004212 }
4213 } else if (const ConstantVector *CV = dyn_cast<ConstantVector>(Cst)) {
4214 for (unsigned i = 0; i < CV->getNumOperands(); i++) {
4215 auto Op = CV->getOperand(i);
4216
4217 uint32_t literal = 0;
4218
4219 if (auto CI = dyn_cast<ConstantInt>(Op)) {
4220 literal = static_cast<uint32_t>(CI->getZExtValue());
4221 } else if (auto UI = dyn_cast<UndefValue>(Op)) {
4222 literal = 0xFFFFFFFFu;
4223 } else {
4224 Op->print(errs());
4225 llvm_unreachable("Unsupported element in ConstantVector!");
4226 }
4227
David Neto257c3892018-04-11 13:19:45 -04004228 Ops << MkNum(literal);
David Neto22f144c2017-06-12 14:26:21 -04004229 }
4230 } else {
4231 Cst->print(errs());
4232 llvm_unreachable("Unsupported constant mask in ShuffleVector!");
4233 }
4234 }
4235
David Neto87846742018-04-11 17:36:22 -04004236 auto *Inst = new SPIRVInstruction(spv::OpVectorShuffle, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004237 SPIRVInstList.push_back(Inst);
4238 break;
4239 }
4240 case Instruction::ICmp:
4241 case Instruction::FCmp: {
4242 CmpInst *CmpI = cast<CmpInst>(&I);
4243
David Netod4ca2e62017-07-06 18:47:35 -04004244 // Pointer equality is invalid.
alan-bakerb6b09dc2018-11-08 16:59:28 -05004245 Type *ArgTy = CmpI->getOperand(0)->getType();
David Netod4ca2e62017-07-06 18:47:35 -04004246 if (isa<PointerType>(ArgTy)) {
4247 CmpI->print(errs());
4248 std::string name = I.getParent()->getParent()->getName();
4249 errs()
4250 << "\nPointer equality test is not supported by SPIR-V for Vulkan, "
4251 << "in function " << name << "\n";
4252 llvm_unreachable("Pointer equality check is invalid");
4253 break;
4254 }
4255
David Neto257c3892018-04-11 13:19:45 -04004256 // Ops[0] = Result Type ID
4257 // Ops[1] = Operand 1 ID
4258 // Ops[2] = Operand 2 ID
4259 SPIRVOperandList Ops;
David Neto22f144c2017-06-12 14:26:21 -04004260
David Neto257c3892018-04-11 13:19:45 -04004261 Ops << MkId(lookupType(CmpI->getType())) << MkId(VMap[CmpI->getOperand(0)])
4262 << MkId(VMap[CmpI->getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004263
4264 spv::Op Opcode = GetSPIRVCmpOpcode(CmpI);
David Neto87846742018-04-11 17:36:22 -04004265 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004266 SPIRVInstList.push_back(Inst);
4267 break;
4268 }
4269 case Instruction::Br: {
4270 // Branch instrucion is deferred because it needs label's ID. Record slot's
4271 // location on SPIRVInstructionList.
4272 DeferredInsts.push_back(
4273 std::make_tuple(&I, --SPIRVInstList.end(), 0 /* No id */));
4274 break;
4275 }
4276 case Instruction::Switch: {
4277 I.print(errs());
4278 llvm_unreachable("Unsupported instruction???");
4279 break;
4280 }
4281 case Instruction::IndirectBr: {
4282 I.print(errs());
4283 llvm_unreachable("Unsupported instruction???");
4284 break;
4285 }
4286 case Instruction::PHI: {
4287 // Branch instrucion is deferred because it needs label's ID. Record slot's
4288 // location on SPIRVInstructionList.
4289 DeferredInsts.push_back(
4290 std::make_tuple(&I, --SPIRVInstList.end(), nextID++));
4291 break;
4292 }
4293 case Instruction::Alloca: {
4294 //
4295 // Generate OpVariable.
4296 //
4297 // Ops[0] : Result Type ID
4298 // Ops[1] : Storage Class
4299 SPIRVOperandList Ops;
4300
David Neto257c3892018-04-11 13:19:45 -04004301 Ops << MkId(lookupType(I.getType())) << MkNum(spv::StorageClassFunction);
David Neto22f144c2017-06-12 14:26:21 -04004302
David Neto87846742018-04-11 17:36:22 -04004303 auto *Inst = new SPIRVInstruction(spv::OpVariable, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004304 SPIRVInstList.push_back(Inst);
4305 break;
4306 }
4307 case Instruction::Load: {
4308 LoadInst *LD = cast<LoadInst>(&I);
4309 //
4310 // Generate OpLoad.
4311 //
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04004312
alan-baker5b86ed72019-02-15 08:26:50 -05004313 if (LD->getType()->isPointerTy()) {
4314 // Loading a pointer requires variable pointers.
4315 setVariablePointersCapabilities(LD->getType()->getPointerAddressSpace());
4316 }
David Neto22f144c2017-06-12 14:26:21 -04004317
David Neto0a2f98d2017-09-15 19:38:40 -04004318 uint32_t ResTyID = lookupType(LD->getType());
David Netoa60b00b2017-09-15 16:34:09 -04004319 uint32_t PointerID = VMap[LD->getPointerOperand()];
4320
4321 // This is a hack to work around what looks like a driver bug.
4322 // When we're loading from the special variable holding the WorkgroupSize
David Neto0a2f98d2017-09-15 19:38:40 -04004323 // builtin value, use an OpBitWiseAnd of the value's ID rather than
4324 // generating a load.
David Neto66cfe642018-03-24 06:13:56 -07004325 // TODO(dneto): Remove this awful hack once drivers are fixed.
David Netoa60b00b2017-09-15 16:34:09 -04004326 if (PointerID == WorkgroupSizeVarID) {
David Neto0a2f98d2017-09-15 19:38:40 -04004327 // Generate a bitwise-and of the original value with itself.
4328 // We should have been able to get away with just an OpCopyObject,
4329 // but we need something more complex to get past certain driver bugs.
4330 // This is ridiculous, but necessary.
4331 // TODO(dneto): Revisit this once drivers fix their bugs.
4332
4333 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04004334 Ops << MkId(ResTyID) << MkId(WorkgroupSizeValueID)
4335 << MkId(WorkgroupSizeValueID);
David Neto0a2f98d2017-09-15 19:38:40 -04004336
David Neto87846742018-04-11 17:36:22 -04004337 auto *Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Neto0a2f98d2017-09-15 19:38:40 -04004338 SPIRVInstList.push_back(Inst);
David Netoa60b00b2017-09-15 16:34:09 -04004339 break;
4340 }
4341
4342 // This is the normal path. Generate a load.
4343
David Neto22f144c2017-06-12 14:26:21 -04004344 // Ops[0] = Result Type ID
4345 // Ops[1] = Pointer ID
4346 // Ops[2] ... Ops[n] = Optional Memory Access
4347 //
4348 // TODO: Do we need to implement Optional Memory Access???
David Neto0a2f98d2017-09-15 19:38:40 -04004349
David Neto22f144c2017-06-12 14:26:21 -04004350 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04004351 Ops << MkId(ResTyID) << MkId(PointerID);
David Neto22f144c2017-06-12 14:26:21 -04004352
David Neto87846742018-04-11 17:36:22 -04004353 auto *Inst = new SPIRVInstruction(spv::OpLoad, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004354 SPIRVInstList.push_back(Inst);
4355 break;
4356 }
4357 case Instruction::Store: {
4358 StoreInst *ST = cast<StoreInst>(&I);
4359 //
4360 // Generate OpStore.
4361 //
4362
alan-baker5b86ed72019-02-15 08:26:50 -05004363 if (ST->getValueOperand()->getType()->isPointerTy()) {
4364 // Storing a pointer requires variable pointers.
4365 setVariablePointersCapabilities(
4366 ST->getValueOperand()->getType()->getPointerAddressSpace());
4367 }
4368
David Neto22f144c2017-06-12 14:26:21 -04004369 // Ops[0] = Pointer ID
4370 // Ops[1] = Object ID
4371 // Ops[2] ... Ops[n] = Optional Memory Access (later???)
4372 //
4373 // TODO: Do we need to implement Optional Memory Access???
David Neto257c3892018-04-11 13:19:45 -04004374 SPIRVOperandList Ops;
4375 Ops << MkId(VMap[ST->getPointerOperand()])
4376 << MkId(VMap[ST->getValueOperand()]);
David Neto22f144c2017-06-12 14:26:21 -04004377
David Neto87846742018-04-11 17:36:22 -04004378 auto *Inst = new SPIRVInstruction(spv::OpStore, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004379 SPIRVInstList.push_back(Inst);
4380 break;
4381 }
4382 case Instruction::AtomicCmpXchg: {
4383 I.print(errs());
4384 llvm_unreachable("Unsupported instruction???");
4385 break;
4386 }
4387 case Instruction::AtomicRMW: {
Neil Henning39672102017-09-29 14:33:13 +01004388 AtomicRMWInst *AtomicRMW = dyn_cast<AtomicRMWInst>(&I);
4389
4390 spv::Op opcode;
4391
4392 switch (AtomicRMW->getOperation()) {
4393 default:
4394 I.print(errs());
4395 llvm_unreachable("Unsupported instruction???");
4396 case llvm::AtomicRMWInst::Add:
4397 opcode = spv::OpAtomicIAdd;
4398 break;
4399 case llvm::AtomicRMWInst::Sub:
4400 opcode = spv::OpAtomicISub;
4401 break;
4402 case llvm::AtomicRMWInst::Xchg:
4403 opcode = spv::OpAtomicExchange;
4404 break;
4405 case llvm::AtomicRMWInst::Min:
4406 opcode = spv::OpAtomicSMin;
4407 break;
4408 case llvm::AtomicRMWInst::Max:
4409 opcode = spv::OpAtomicSMax;
4410 break;
4411 case llvm::AtomicRMWInst::UMin:
4412 opcode = spv::OpAtomicUMin;
4413 break;
4414 case llvm::AtomicRMWInst::UMax:
4415 opcode = spv::OpAtomicUMax;
4416 break;
4417 case llvm::AtomicRMWInst::And:
4418 opcode = spv::OpAtomicAnd;
4419 break;
4420 case llvm::AtomicRMWInst::Or:
4421 opcode = spv::OpAtomicOr;
4422 break;
4423 case llvm::AtomicRMWInst::Xor:
4424 opcode = spv::OpAtomicXor;
4425 break;
4426 }
4427
4428 //
4429 // Generate OpAtomic*.
4430 //
4431 SPIRVOperandList Ops;
4432
David Neto257c3892018-04-11 13:19:45 -04004433 Ops << MkId(lookupType(I.getType()))
4434 << MkId(VMap[AtomicRMW->getPointerOperand()]);
Neil Henning39672102017-09-29 14:33:13 +01004435
4436 auto IntTy = Type::getInt32Ty(I.getContext());
Neil Henning39672102017-09-29 14:33:13 +01004437 const auto ConstantScopeDevice = ConstantInt::get(IntTy, spv::ScopeDevice);
David Neto257c3892018-04-11 13:19:45 -04004438 Ops << MkId(VMap[ConstantScopeDevice]);
Neil Henning39672102017-09-29 14:33:13 +01004439
4440 const auto ConstantMemorySemantics = ConstantInt::get(
4441 IntTy, spv::MemorySemanticsUniformMemoryMask |
4442 spv::MemorySemanticsSequentiallyConsistentMask);
David Neto257c3892018-04-11 13:19:45 -04004443 Ops << MkId(VMap[ConstantMemorySemantics]);
Neil Henning39672102017-09-29 14:33:13 +01004444
David Neto257c3892018-04-11 13:19:45 -04004445 Ops << MkId(VMap[AtomicRMW->getValOperand()]);
Neil Henning39672102017-09-29 14:33:13 +01004446
4447 VMap[&I] = nextID;
4448
David Neto87846742018-04-11 17:36:22 -04004449 auto *Inst = new SPIRVInstruction(opcode, nextID++, Ops);
Neil Henning39672102017-09-29 14:33:13 +01004450 SPIRVInstList.push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04004451 break;
4452 }
4453 case Instruction::Fence: {
4454 I.print(errs());
4455 llvm_unreachable("Unsupported instruction???");
4456 break;
4457 }
4458 case Instruction::Call: {
4459 CallInst *Call = dyn_cast<CallInst>(&I);
4460 Function *Callee = Call->getCalledFunction();
4461
Alan Baker202c8c72018-08-13 13:47:44 -04004462 if (Callee->getName().startswith(clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04004463 if (ResourceVarDeferredLoadCalls.count(Call) && Call->hasNUsesOrMore(1)) {
4464 // Generate an OpLoad
4465 SPIRVOperandList Ops;
4466 const auto load_id = nextID++;
David Neto22f144c2017-06-12 14:26:21 -04004467
David Neto862b7d82018-06-14 18:48:37 -04004468 Ops << MkId(lookupType(Call->getType()->getPointerElementType()))
4469 << MkId(ResourceVarDeferredLoadCalls[Call]);
4470
4471 auto *Inst = new SPIRVInstruction(spv::OpLoad, load_id, Ops);
4472 SPIRVInstList.push_back(Inst);
4473 VMap[Call] = load_id;
4474 break;
4475
4476 } else {
4477 // This maps to an OpVariable we've already generated.
4478 // No code is generated for the call.
4479 }
4480 break;
alan-bakerb6b09dc2018-11-08 16:59:28 -05004481 } else if (Callee->getName().startswith(
4482 clspv::WorkgroupAccessorFunction())) {
Alan Baker202c8c72018-08-13 13:47:44 -04004483 // Don't codegen an instruction here, but instead map this call directly
4484 // to the workgroup variable id.
alan-bakerb6b09dc2018-11-08 16:59:28 -05004485 int spec_id = static_cast<int>(
4486 cast<ConstantInt>(Call->getOperand(0))->getSExtValue());
Alan Baker202c8c72018-08-13 13:47:44 -04004487 const auto &info = LocalSpecIdInfoMap[spec_id];
4488 VMap[Call] = info.variable_id;
4489 break;
David Neto862b7d82018-06-14 18:48:37 -04004490 }
4491
4492 // Sampler initializers become a load of the corresponding sampler.
4493
Kévin Petitdf71de32019-04-09 14:09:50 +01004494 if (Callee->getName().equals(clspv::LiteralSamplerFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04004495 // Map this to a load from the variable.
4496 const auto index_into_sampler_map =
4497 dyn_cast<ConstantInt>(Call->getArgOperand(2))->getZExtValue();
4498
4499 // Generate an OpLoad
David Neto22f144c2017-06-12 14:26:21 -04004500 SPIRVOperandList Ops;
David Neto862b7d82018-06-14 18:48:37 -04004501 const auto load_id = nextID++;
David Neto22f144c2017-06-12 14:26:21 -04004502
David Neto257c3892018-04-11 13:19:45 -04004503 Ops << MkId(lookupType(SamplerTy->getPointerElementType()))
alan-bakerb6b09dc2018-11-08 16:59:28 -05004504 << MkId(SamplerMapIndexToIDMap[static_cast<unsigned>(
4505 index_into_sampler_map)]);
David Neto22f144c2017-06-12 14:26:21 -04004506
David Neto862b7d82018-06-14 18:48:37 -04004507 auto *Inst = new SPIRVInstruction(spv::OpLoad, load_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004508 SPIRVInstList.push_back(Inst);
David Neto862b7d82018-06-14 18:48:37 -04004509 VMap[Call] = load_id;
David Neto22f144c2017-06-12 14:26:21 -04004510 break;
4511 }
4512
Kévin Petit349c9502019-03-28 17:24:14 +00004513 // Handle SPIR-V intrinsics
Kévin Petit9b340262019-06-19 18:31:11 +01004514 spv::Op opcode = StringSwitch<spv::Op>(Callee->getName())
4515 .Case("spirv.atomic_xor", spv::OpAtomicXor)
4516 .Default(spv::OpNop);
David Neto22f144c2017-06-12 14:26:21 -04004517
Kévin Petit617a76d2019-04-04 13:54:16 +01004518 // If the switch above didn't have an entry maybe the intrinsic
4519 // is using the name mangling logic.
4520 bool usesMangler = false;
4521 if (opcode == spv::OpNop) {
4522 if (Callee->getName().startswith(clspv::SPIRVOpIntrinsicFunction())) {
4523 auto OpCst = cast<ConstantInt>(Call->getOperand(0));
4524 opcode = static_cast<spv::Op>(OpCst->getZExtValue());
4525 usesMangler = true;
4526 }
4527 }
4528
Kévin Petit349c9502019-03-28 17:24:14 +00004529 if (opcode != spv::OpNop) {
4530
David Neto22f144c2017-06-12 14:26:21 -04004531 SPIRVOperandList Ops;
4532
Kévin Petit349c9502019-03-28 17:24:14 +00004533 if (!I.getType()->isVoidTy()) {
4534 Ops << MkId(lookupType(I.getType()));
4535 }
David Neto22f144c2017-06-12 14:26:21 -04004536
Kévin Petit617a76d2019-04-04 13:54:16 +01004537 unsigned firstOperand = usesMangler ? 1 : 0;
4538 for (unsigned i = firstOperand; i < Call->getNumArgOperands(); i++) {
David Neto257c3892018-04-11 13:19:45 -04004539 Ops << MkId(VMap[Call->getArgOperand(i)]);
David Neto22f144c2017-06-12 14:26:21 -04004540 }
4541
Kévin Petit349c9502019-03-28 17:24:14 +00004542 if (!I.getType()->isVoidTy()) {
4543 VMap[&I] = nextID;
Kévin Petit8a560882019-03-21 15:24:34 +00004544 }
4545
Kévin Petit349c9502019-03-28 17:24:14 +00004546 SPIRVInstruction *Inst;
4547 if (!I.getType()->isVoidTy()) {
4548 Inst = new SPIRVInstruction(opcode, nextID++, Ops);
4549 } else {
4550 Inst = new SPIRVInstruction(opcode, Ops);
4551 }
Kévin Petit8a560882019-03-21 15:24:34 +00004552 SPIRVInstList.push_back(Inst);
4553 break;
4554 }
4555
David Neto22f144c2017-06-12 14:26:21 -04004556 // spirv.copy_memory.* intrinsics become OpMemoryMemory's.
4557 if (Callee->getName().startswith("spirv.copy_memory")) {
4558 //
4559 // Generate OpCopyMemory.
4560 //
4561
4562 // Ops[0] = Dst ID
4563 // Ops[1] = Src ID
4564 // Ops[2] = Memory Access
4565 // Ops[3] = Alignment
4566
4567 auto IsVolatile =
4568 dyn_cast<ConstantInt>(Call->getArgOperand(3))->getZExtValue() != 0;
4569
4570 auto VolatileMemoryAccess = (IsVolatile) ? spv::MemoryAccessVolatileMask
4571 : spv::MemoryAccessMaskNone;
4572
4573 auto MemoryAccess = VolatileMemoryAccess | spv::MemoryAccessAlignedMask;
4574
4575 auto Alignment =
4576 dyn_cast<ConstantInt>(Call->getArgOperand(2))->getZExtValue();
4577
David Neto257c3892018-04-11 13:19:45 -04004578 SPIRVOperandList Ops;
4579 Ops << MkId(VMap[Call->getArgOperand(0)])
4580 << MkId(VMap[Call->getArgOperand(1)]) << MkNum(MemoryAccess)
4581 << MkNum(static_cast<uint32_t>(Alignment));
David Neto22f144c2017-06-12 14:26:21 -04004582
David Neto87846742018-04-11 17:36:22 -04004583 auto *Inst = new SPIRVInstruction(spv::OpCopyMemory, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004584
4585 SPIRVInstList.push_back(Inst);
4586
4587 break;
4588 }
4589
David Neto22f144c2017-06-12 14:26:21 -04004590 // read_image is converted to OpSampledImage and OpImageSampleExplicitLod.
4591 // Additionally, OpTypeSampledImage is generated.
alan-bakerf67468c2019-11-25 15:51:49 -05004592 if (clspv::IsSampledImageRead(Callee)) {
David Neto22f144c2017-06-12 14:26:21 -04004593 //
4594 // Generate OpSampledImage.
4595 //
4596 // Ops[0] = Result Type ID
4597 // Ops[1] = Image ID
4598 // Ops[2] = Sampler ID
4599 //
4600 SPIRVOperandList Ops;
4601
4602 Value *Image = Call->getArgOperand(0);
4603 Value *Sampler = Call->getArgOperand(1);
4604 Value *Coordinate = Call->getArgOperand(2);
4605
4606 TypeMapType &OpImageTypeMap = getImageTypeMap();
4607 Type *ImageTy = Image->getType()->getPointerElementType();
4608 uint32_t ImageTyID = OpImageTypeMap[ImageTy];
David Neto22f144c2017-06-12 14:26:21 -04004609 uint32_t ImageID = VMap[Image];
David Neto22f144c2017-06-12 14:26:21 -04004610 uint32_t SamplerID = VMap[Sampler];
David Neto257c3892018-04-11 13:19:45 -04004611
4612 Ops << MkId(ImageTyID) << MkId(ImageID) << MkId(SamplerID);
David Neto22f144c2017-06-12 14:26:21 -04004613
4614 uint32_t SampledImageID = nextID;
4615
David Neto87846742018-04-11 17:36:22 -04004616 auto *Inst = new SPIRVInstruction(spv::OpSampledImage, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004617 SPIRVInstList.push_back(Inst);
4618
4619 //
4620 // Generate OpImageSampleExplicitLod.
4621 //
4622 // Ops[0] = Result Type ID
4623 // Ops[1] = Sampled Image ID
4624 // Ops[2] = Coordinate ID
4625 // Ops[3] = Image Operands Type ID
4626 // Ops[4] ... Ops[n] = Operands ID
4627 //
4628 Ops.clear();
4629
alan-bakerf67468c2019-11-25 15:51:49 -05004630 const bool is_int_image = IsIntImageType(Image->getType());
4631 uint32_t result_type = 0;
4632 if (is_int_image) {
4633 result_type = v4int32ID;
4634 } else {
4635 result_type = lookupType(Call->getType());
4636 }
4637
4638 Ops << MkId(result_type) << MkId(SampledImageID) << MkId(VMap[Coordinate])
4639 << MkNum(spv::ImageOperandsLodMask);
David Neto22f144c2017-06-12 14:26:21 -04004640
4641 Constant *CstFP0 = ConstantFP::get(Context, APFloat(0.0f));
David Neto257c3892018-04-11 13:19:45 -04004642 Ops << MkId(VMap[CstFP0]);
David Neto22f144c2017-06-12 14:26:21 -04004643
alan-bakerf67468c2019-11-25 15:51:49 -05004644 uint32_t final_id = nextID++;
4645 VMap[&I] = final_id;
David Neto22f144c2017-06-12 14:26:21 -04004646
alan-bakerf67468c2019-11-25 15:51:49 -05004647 uint32_t image_id = final_id;
4648 if (is_int_image) {
4649 // Int image requires a bitcast from v4int to v4uint.
4650 image_id = nextID++;
4651 }
4652
4653 Inst = new SPIRVInstruction(spv::OpImageSampleExplicitLod, image_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004654 SPIRVInstList.push_back(Inst);
alan-bakerf67468c2019-11-25 15:51:49 -05004655
4656 if (is_int_image) {
4657 // Generate the bitcast.
4658 Ops.clear();
4659 Ops << MkId(lookupType(Call->getType())) << MkId(image_id);
4660 Inst = new SPIRVInstruction(spv::OpBitcast, final_id, Ops);
4661 SPIRVInstList.push_back(Inst);
4662 }
David Neto22f144c2017-06-12 14:26:21 -04004663 break;
4664 }
4665
alan-bakerf67468c2019-11-25 15:51:49 -05004666 // write_image is mapped to OpImageWrite.
4667 if (clspv::IsImageWrite(Callee)) {
David Neto22f144c2017-06-12 14:26:21 -04004668 //
4669 // Generate OpImageWrite.
4670 //
4671 // Ops[0] = Image ID
4672 // Ops[1] = Coordinate ID
4673 // Ops[2] = Texel ID
4674 // Ops[3] = (Optional) Image Operands Type (Literal Number)
4675 // Ops[4] ... Ops[n] = (Optional) Operands ID
4676 //
4677 SPIRVOperandList Ops;
4678
4679 Value *Image = Call->getArgOperand(0);
4680 Value *Coordinate = Call->getArgOperand(1);
4681 Value *Texel = Call->getArgOperand(2);
4682
4683 uint32_t ImageID = VMap[Image];
David Neto22f144c2017-06-12 14:26:21 -04004684 uint32_t CoordinateID = VMap[Coordinate];
David Neto22f144c2017-06-12 14:26:21 -04004685 uint32_t TexelID = VMap[Texel];
alan-bakerf67468c2019-11-25 15:51:49 -05004686
4687 const bool is_int_image = IsIntImageType(Image->getType());
4688 if (is_int_image) {
4689 // Generate a bitcast to v4int and use it as the texel value.
4690 uint32_t castID = nextID++;
4691 Ops << MkId(v4int32ID) << MkId(TexelID);
4692 auto cast = new SPIRVInstruction(spv::OpBitcast, castID, Ops);
4693 SPIRVInstList.push_back(cast);
4694 Ops.clear();
4695 TexelID = castID;
4696 }
David Neto257c3892018-04-11 13:19:45 -04004697 Ops << MkId(ImageID) << MkId(CoordinateID) << MkId(TexelID);
David Neto22f144c2017-06-12 14:26:21 -04004698
David Neto87846742018-04-11 17:36:22 -04004699 auto *Inst = new SPIRVInstruction(spv::OpImageWrite, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004700 SPIRVInstList.push_back(Inst);
4701 break;
4702 }
4703
alan-bakerce179f12019-12-06 19:02:22 -05004704 // get_image_* is mapped to OpImageQuerySize or OpImageQuerySizeLod
4705 if (clspv::IsImageQuery(Callee)) {
David Neto5c22a252018-03-15 16:07:41 -04004706 //
alan-bakerce179f12019-12-06 19:02:22 -05004707 // Generate OpImageQuerySize[Lod]
David Neto5c22a252018-03-15 16:07:41 -04004708 //
4709 // Ops[0] = Image ID
4710 //
alan-bakerce179f12019-12-06 19:02:22 -05004711 // Result type has components equal to the dimensionality of the image,
4712 // plus 1 if the image is arrayed.
4713 //
alan-bakerf906d2b2019-12-10 11:26:23 -05004714 // %sizes = OpImageQuerySize[Lod] %uint[2|3|4] %im [%uint_0]
David Neto5c22a252018-03-15 16:07:41 -04004715 SPIRVOperandList Ops;
4716
4717 // Implement:
alan-bakerce179f12019-12-06 19:02:22 -05004718 // %sizes = OpImageQuerySize[Lod] %uint[2|3|4] %im [%uint_0]
4719 uint32_t SizesTypeID = 0;
4720
David Neto5c22a252018-03-15 16:07:41 -04004721 Value *Image = Call->getArgOperand(0);
alan-bakerce179f12019-12-06 19:02:22 -05004722 const uint32_t dim = ImageDimensionality(Image->getType());
alan-bakerf906d2b2019-12-10 11:26:23 -05004723 // TODO(alan-baker): fix component calculation when arrayed images are
4724 // supported.
alan-bakerce179f12019-12-06 19:02:22 -05004725 const uint32_t components = dim;
4726 if (components == 1) {
alan-bakerce179f12019-12-06 19:02:22 -05004727 SizesTypeID = TypeMap[Type::getInt32Ty(Context)];
4728 } else {
4729 SizesTypeID = TypeMap[VectorType::get(Type::getInt32Ty(Context), dim)];
4730 }
David Neto5c22a252018-03-15 16:07:41 -04004731 uint32_t ImageID = VMap[Image];
David Neto257c3892018-04-11 13:19:45 -04004732 Ops << MkId(SizesTypeID) << MkId(ImageID);
alan-bakerce179f12019-12-06 19:02:22 -05004733 spv::Op query_opcode = spv::OpImageQuerySize;
4734 if (clspv::IsSampledImageType(Image->getType())) {
4735 query_opcode = spv::OpImageQuerySizeLod;
4736 // Need explicit 0 for Lod operand.
4737 Constant *CstInt0 = ConstantInt::get(Context, APInt(32, 0));
4738 Ops << MkId(VMap[CstInt0]);
4739 }
David Neto5c22a252018-03-15 16:07:41 -04004740
4741 uint32_t SizesID = nextID++;
alan-bakerce179f12019-12-06 19:02:22 -05004742 auto *QueryInst = new SPIRVInstruction(query_opcode, SizesID, Ops);
David Neto5c22a252018-03-15 16:07:41 -04004743 SPIRVInstList.push_back(QueryInst);
4744
alan-bakerce179f12019-12-06 19:02:22 -05004745 // May require an extra instruction to create the appropriate result of
4746 // the builtin function.
4747 if (clspv::IsGetImageDim(Callee)) {
4748 if (dim == 3) {
4749 // get_image_dim returns an int4 for 3D images.
4750 //
4751 // Reset value map entry since we generated an intermediate
4752 // instruction.
4753 VMap[&I] = nextID;
David Neto5c22a252018-03-15 16:07:41 -04004754
alan-bakerce179f12019-12-06 19:02:22 -05004755 // Implement:
4756 // %result = OpCompositeConstruct %uint4 %sizes %uint_0
4757 Ops.clear();
4758 Ops << MkId(lookupType(VectorType::get(Type::getInt32Ty(Context), 4)))
4759 << MkId(SizesID);
David Neto5c22a252018-03-15 16:07:41 -04004760
alan-bakerce179f12019-12-06 19:02:22 -05004761 Constant *CstInt0 = ConstantInt::get(Context, APInt(32, 0));
4762 Ops << MkId(VMap[CstInt0]);
David Neto5c22a252018-03-15 16:07:41 -04004763
alan-bakerce179f12019-12-06 19:02:22 -05004764 auto *Inst =
4765 new SPIRVInstruction(spv::OpCompositeConstruct, nextID++, Ops);
4766 SPIRVInstList.push_back(Inst);
4767 } else if (dim != components) {
4768 // get_image_dim return an int2 regardless of the arrayedness of the
4769 // image. If the image is arrayed an element must be dropped from the
4770 // query result.
4771 //
4772 // Reset value map entry since we generated an intermediate
4773 // instruction.
4774 VMap[&I] = nextID;
4775
4776 // Implement:
4777 // %result = OpVectorShuffle %uint2 %sizes %sizes 0 1
4778 Ops.clear();
4779 Ops << MkId(lookupType(VectorType::get(Type::getInt32Ty(Context), 2)))
4780 << MkId(SizesID) << MkId(SizesID) << MkNum(0) << MkNum(1);
4781
4782 auto *Inst =
4783 new SPIRVInstruction(spv::OpVectorShuffle, nextID++, Ops);
4784 SPIRVInstList.push_back(Inst);
4785 }
4786 } else if (components > 1) {
4787 // Reset value map entry since we generated an intermediate instruction.
4788 VMap[&I] = nextID;
4789
4790 // Implement:
4791 // %result = OpCompositeExtract %uint %sizes <component number>
4792 Ops.clear();
4793 Ops << MkId(TypeMap[I.getType()]) << MkId(SizesID);
4794
4795 uint32_t component = 0;
4796 if (IsGetImageHeight(Callee))
4797 component = 1;
4798 else if (IsGetImageDepth(Callee))
4799 component = 2;
4800 Ops << MkNum(component);
4801
4802 auto *Inst =
4803 new SPIRVInstruction(spv::OpCompositeExtract, nextID++, Ops);
4804 SPIRVInstList.push_back(Inst);
4805 }
David Neto5c22a252018-03-15 16:07:41 -04004806 break;
4807 }
4808
David Neto22f144c2017-06-12 14:26:21 -04004809 // Call instrucion is deferred because it needs function's ID. Record
4810 // slot's location on SPIRVInstructionList.
4811 DeferredInsts.push_back(
4812 std::make_tuple(&I, --SPIRVInstList.end(), nextID++));
4813
David Neto3fbb4072017-10-16 11:28:14 -04004814 // Check whether the implementation of this call uses an extended
4815 // instruction plus one more value-producing instruction. If so, then
4816 // reserve the id for the extra value-producing slot.
4817 glsl::ExtInst EInst = getIndirectExtInstEnum(Callee->getName());
4818 if (EInst != kGlslExtInstBad) {
4819 // Reserve a spot for the extra value.
David Neto4d02a532017-09-17 12:57:44 -04004820 // Increase nextID.
David Neto22f144c2017-06-12 14:26:21 -04004821 VMap[&I] = nextID;
4822 nextID++;
4823 }
4824 break;
4825 }
4826 case Instruction::Ret: {
4827 unsigned NumOps = I.getNumOperands();
4828 if (NumOps == 0) {
4829 //
4830 // Generate OpReturn.
4831 //
David Neto87846742018-04-11 17:36:22 -04004832 SPIRVInstList.push_back(new SPIRVInstruction(spv::OpReturn, {}));
David Neto22f144c2017-06-12 14:26:21 -04004833 } else {
4834 //
4835 // Generate OpReturnValue.
4836 //
4837
4838 // Ops[0] = Return Value ID
4839 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04004840
4841 Ops << MkId(VMap[I.getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04004842
David Neto87846742018-04-11 17:36:22 -04004843 auto *Inst = new SPIRVInstruction(spv::OpReturnValue, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004844 SPIRVInstList.push_back(Inst);
4845 break;
4846 }
4847 break;
4848 }
4849 }
4850}
4851
4852void SPIRVProducerPass::GenerateFuncEpilogue() {
4853 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
4854
4855 //
4856 // Generate OpFunctionEnd
4857 //
4858
David Neto87846742018-04-11 17:36:22 -04004859 auto *Inst = new SPIRVInstruction(spv::OpFunctionEnd, {});
David Neto22f144c2017-06-12 14:26:21 -04004860 SPIRVInstList.push_back(Inst);
4861}
4862
4863bool SPIRVProducerPass::is4xi8vec(Type *Ty) const {
alan-bakerb39c8262019-03-08 14:03:37 -05004864 // Don't specialize <4 x i8> if i8 is generally supported.
4865 if (clspv::Option::Int8Support())
4866 return false;
4867
David Neto22f144c2017-06-12 14:26:21 -04004868 LLVMContext &Context = Ty->getContext();
4869 if (Ty->isVectorTy()) {
4870 if (Ty->getVectorElementType() == Type::getInt8Ty(Context) &&
4871 Ty->getVectorNumElements() == 4) {
4872 return true;
4873 }
4874 }
4875
4876 return false;
4877}
4878
4879void SPIRVProducerPass::HandleDeferredInstruction() {
4880 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
4881 ValueMapType &VMap = getValueMap();
4882 DeferredInstVecType &DeferredInsts = getDeferredInstVec();
4883
4884 for (auto DeferredInst = DeferredInsts.rbegin();
4885 DeferredInst != DeferredInsts.rend(); ++DeferredInst) {
4886 Value *Inst = std::get<0>(*DeferredInst);
4887 SPIRVInstructionList::iterator InsertPoint = ++std::get<1>(*DeferredInst);
4888 if (InsertPoint != SPIRVInstList.end()) {
4889 while ((*InsertPoint)->getOpcode() == spv::OpPhi) {
4890 ++InsertPoint;
4891 }
4892 }
4893
4894 if (BranchInst *Br = dyn_cast<BranchInst>(Inst)) {
alan-baker06cad652019-12-03 17:56:47 -05004895 // Check whether this branch needs to be preceeded by merge instruction.
David Neto22f144c2017-06-12 14:26:21 -04004896 BasicBlock *BrBB = Br->getParent();
alan-baker06cad652019-12-03 17:56:47 -05004897 if (ContinueBlocks.count(BrBB)) {
David Neto22f144c2017-06-12 14:26:21 -04004898 //
4899 // Generate OpLoopMerge.
4900 //
4901 // Ops[0] = Merge Block ID
4902 // Ops[1] = Continue Target ID
4903 // Ops[2] = Selection Control
4904 SPIRVOperandList Ops;
4905
alan-baker06cad652019-12-03 17:56:47 -05004906 auto MergeBB = MergeBlocks[BrBB];
4907 auto ContinueBB = ContinueBlocks[BrBB];
David Neto22f144c2017-06-12 14:26:21 -04004908 uint32_t MergeBBID = VMap[MergeBB];
David Neto22f144c2017-06-12 14:26:21 -04004909 uint32_t ContinueBBID = VMap[ContinueBB];
David Neto257c3892018-04-11 13:19:45 -04004910 Ops << MkId(MergeBBID) << MkId(ContinueBBID)
alan-baker06cad652019-12-03 17:56:47 -05004911 << MkNum(spv::LoopControlMaskNone);
David Neto22f144c2017-06-12 14:26:21 -04004912
David Neto87846742018-04-11 17:36:22 -04004913 auto *MergeInst = new SPIRVInstruction(spv::OpLoopMerge, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004914 SPIRVInstList.insert(InsertPoint, MergeInst);
alan-baker06cad652019-12-03 17:56:47 -05004915 } else if (MergeBlocks.count(BrBB)) {
4916 //
4917 // Generate OpSelectionMerge.
4918 //
4919 // Ops[0] = Merge Block ID
4920 // Ops[1] = Selection Control
4921 SPIRVOperandList Ops;
David Neto22f144c2017-06-12 14:26:21 -04004922
alan-baker06cad652019-12-03 17:56:47 -05004923 auto MergeBB = MergeBlocks[BrBB];
4924 uint32_t MergeBBID = VMap[MergeBB];
4925 Ops << MkId(MergeBBID) << MkNum(spv::SelectionControlMaskNone);
David Neto22f144c2017-06-12 14:26:21 -04004926
alan-baker06cad652019-12-03 17:56:47 -05004927 auto *MergeInst = new SPIRVInstruction(spv::OpSelectionMerge, Ops);
4928 SPIRVInstList.insert(InsertPoint, MergeInst);
David Neto22f144c2017-06-12 14:26:21 -04004929 }
4930
4931 if (Br->isConditional()) {
4932 //
4933 // Generate OpBranchConditional.
4934 //
4935 // Ops[0] = Condition ID
4936 // Ops[1] = True Label ID
4937 // Ops[2] = False Label ID
4938 // Ops[3] ... Ops[n] = Branch weights (Literal Number)
4939 SPIRVOperandList Ops;
4940
4941 uint32_t CondID = VMap[Br->getCondition()];
David Neto22f144c2017-06-12 14:26:21 -04004942 uint32_t TrueBBID = VMap[Br->getSuccessor(0)];
David Neto22f144c2017-06-12 14:26:21 -04004943 uint32_t FalseBBID = VMap[Br->getSuccessor(1)];
David Neto257c3892018-04-11 13:19:45 -04004944
4945 Ops << MkId(CondID) << MkId(TrueBBID) << MkId(FalseBBID);
David Neto22f144c2017-06-12 14:26:21 -04004946
David Neto87846742018-04-11 17:36:22 -04004947 auto *BrInst = new SPIRVInstruction(spv::OpBranchConditional, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004948 SPIRVInstList.insert(InsertPoint, BrInst);
4949 } else {
4950 //
4951 // Generate OpBranch.
4952 //
4953 // Ops[0] = Target Label ID
4954 SPIRVOperandList Ops;
4955
4956 uint32_t TargetID = VMap[Br->getSuccessor(0)];
David Neto257c3892018-04-11 13:19:45 -04004957 Ops << MkId(TargetID);
David Neto22f144c2017-06-12 14:26:21 -04004958
David Neto87846742018-04-11 17:36:22 -04004959 SPIRVInstList.insert(InsertPoint,
4960 new SPIRVInstruction(spv::OpBranch, Ops));
David Neto22f144c2017-06-12 14:26:21 -04004961 }
4962 } else if (PHINode *PHI = dyn_cast<PHINode>(Inst)) {
alan-baker5b86ed72019-02-15 08:26:50 -05004963 if (PHI->getType()->isPointerTy()) {
4964 // OpPhi on pointers requires variable pointers.
4965 setVariablePointersCapabilities(
4966 PHI->getType()->getPointerAddressSpace());
4967 if (!hasVariablePointers() && !selectFromSameObject(PHI)) {
4968 setVariablePointers(true);
4969 }
4970 }
4971
David Neto22f144c2017-06-12 14:26:21 -04004972 //
4973 // Generate OpPhi.
4974 //
4975 // Ops[0] = Result Type ID
4976 // Ops[1] ... Ops[n] = (Variable ID, Parent ID) pairs
4977 SPIRVOperandList Ops;
4978
David Neto257c3892018-04-11 13:19:45 -04004979 Ops << MkId(lookupType(PHI->getType()));
David Neto22f144c2017-06-12 14:26:21 -04004980
David Neto22f144c2017-06-12 14:26:21 -04004981 for (unsigned i = 0; i < PHI->getNumIncomingValues(); i++) {
4982 uint32_t VarID = VMap[PHI->getIncomingValue(i)];
David Neto22f144c2017-06-12 14:26:21 -04004983 uint32_t ParentID = VMap[PHI->getIncomingBlock(i)];
David Neto257c3892018-04-11 13:19:45 -04004984 Ops << MkId(VarID) << MkId(ParentID);
David Neto22f144c2017-06-12 14:26:21 -04004985 }
4986
4987 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04004988 InsertPoint,
4989 new SPIRVInstruction(spv::OpPhi, std::get<2>(*DeferredInst), Ops));
David Neto22f144c2017-06-12 14:26:21 -04004990 } else if (CallInst *Call = dyn_cast<CallInst>(Inst)) {
4991 Function *Callee = Call->getCalledFunction();
David Neto3fbb4072017-10-16 11:28:14 -04004992 auto callee_name = Callee->getName();
4993 glsl::ExtInst EInst = getDirectOrIndirectExtInstEnum(callee_name);
David Neto22f144c2017-06-12 14:26:21 -04004994
4995 if (EInst) {
4996 uint32_t &ExtInstImportID = getOpExtInstImportID();
4997
4998 //
4999 // Generate OpExtInst.
5000 //
5001
5002 // Ops[0] = Result Type ID
5003 // Ops[1] = Set ID (OpExtInstImport ID)
5004 // Ops[2] = Instruction Number (Literal Number)
5005 // Ops[3] ... Ops[n] = Operand 1, ... , Operand n
5006 SPIRVOperandList Ops;
5007
David Neto862b7d82018-06-14 18:48:37 -04005008 Ops << MkId(lookupType(Call->getType())) << MkId(ExtInstImportID)
5009 << MkNum(EInst);
David Neto22f144c2017-06-12 14:26:21 -04005010
David Neto22f144c2017-06-12 14:26:21 -04005011 FunctionType *CalleeFTy = cast<FunctionType>(Call->getFunctionType());
5012 for (unsigned i = 0; i < CalleeFTy->getNumParams(); i++) {
David Neto257c3892018-04-11 13:19:45 -04005013 Ops << MkId(VMap[Call->getOperand(i)]);
David Neto22f144c2017-06-12 14:26:21 -04005014 }
5015
David Neto87846742018-04-11 17:36:22 -04005016 auto *ExtInst = new SPIRVInstruction(spv::OpExtInst,
5017 std::get<2>(*DeferredInst), Ops);
David Neto22f144c2017-06-12 14:26:21 -04005018 SPIRVInstList.insert(InsertPoint, ExtInst);
5019
David Neto3fbb4072017-10-16 11:28:14 -04005020 const auto IndirectExtInst = getIndirectExtInstEnum(callee_name);
5021 if (IndirectExtInst != kGlslExtInstBad) {
5022 // Generate one more instruction that uses the result of the extended
5023 // instruction. Its result id is one more than the id of the
5024 // extended instruction.
David Neto22f144c2017-06-12 14:26:21 -04005025 LLVMContext &Context =
5026 Call->getParent()->getParent()->getParent()->getContext();
David Neto22f144c2017-06-12 14:26:21 -04005027
David Neto3fbb4072017-10-16 11:28:14 -04005028 auto generate_extra_inst = [this, &Context, &Call, &DeferredInst,
5029 &VMap, &SPIRVInstList, &InsertPoint](
5030 spv::Op opcode, Constant *constant) {
5031 //
5032 // Generate instruction like:
5033 // result = opcode constant <extinst-result>
5034 //
5035 // Ops[0] = Result Type ID
5036 // Ops[1] = Operand 0 ;; the constant, suitably splatted
5037 // Ops[2] = Operand 1 ;; the result of the extended instruction
5038 SPIRVOperandList Ops;
David Neto22f144c2017-06-12 14:26:21 -04005039
David Neto3fbb4072017-10-16 11:28:14 -04005040 Type *resultTy = Call->getType();
David Neto257c3892018-04-11 13:19:45 -04005041 Ops << MkId(lookupType(resultTy));
David Neto3fbb4072017-10-16 11:28:14 -04005042
5043 if (auto *vectorTy = dyn_cast<VectorType>(resultTy)) {
5044 constant = ConstantVector::getSplat(
5045 static_cast<unsigned>(vectorTy->getNumElements()), constant);
5046 }
David Neto257c3892018-04-11 13:19:45 -04005047 Ops << MkId(VMap[constant]) << MkId(std::get<2>(*DeferredInst));
David Neto3fbb4072017-10-16 11:28:14 -04005048
5049 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005050 InsertPoint, new SPIRVInstruction(
5051 opcode, std::get<2>(*DeferredInst) + 1, Ops));
David Neto3fbb4072017-10-16 11:28:14 -04005052 };
5053
5054 switch (IndirectExtInst) {
5055 case glsl::ExtInstFindUMsb: // Implementing clz
5056 generate_extra_inst(
5057 spv::OpISub, ConstantInt::get(Type::getInt32Ty(Context), 31));
5058 break;
5059 case glsl::ExtInstAcos: // Implementing acospi
5060 case glsl::ExtInstAsin: // Implementing asinpi
Kévin Petiteb9f90a2018-09-29 12:29:34 +01005061 case glsl::ExtInstAtan: // Implementing atanpi
David Neto3fbb4072017-10-16 11:28:14 -04005062 case glsl::ExtInstAtan2: // Implementing atan2pi
5063 generate_extra_inst(
5064 spv::OpFMul,
5065 ConstantFP::get(Type::getFloatTy(Context), kOneOverPi));
5066 break;
5067
5068 default:
5069 assert(false && "internally inconsistent");
David Neto4d02a532017-09-17 12:57:44 -04005070 }
David Neto22f144c2017-06-12 14:26:21 -04005071 }
David Neto3fbb4072017-10-16 11:28:14 -04005072
alan-bakerb39c8262019-03-08 14:03:37 -05005073 } else if (callee_name.startswith("_Z8popcount")) {
David Neto22f144c2017-06-12 14:26:21 -04005074 //
5075 // Generate OpBitCount
5076 //
5077 // Ops[0] = Result Type ID
5078 // Ops[1] = Base ID
David Neto257c3892018-04-11 13:19:45 -04005079 SPIRVOperandList Ops;
5080 Ops << MkId(lookupType(Call->getType()))
5081 << MkId(VMap[Call->getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04005082
5083 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005084 InsertPoint, new SPIRVInstruction(spv::OpBitCount,
David Neto22f144c2017-06-12 14:26:21 -04005085 std::get<2>(*DeferredInst), Ops));
David Netoab03f432017-11-03 17:00:44 -04005086
David Neto862b7d82018-06-14 18:48:37 -04005087 } else if (callee_name.startswith(kCompositeConstructFunctionPrefix)) {
David Netoab03f432017-11-03 17:00:44 -04005088
5089 // Generate an OpCompositeConstruct
5090 SPIRVOperandList Ops;
5091
5092 // The result type.
David Neto257c3892018-04-11 13:19:45 -04005093 Ops << MkId(lookupType(Call->getType()));
David Netoab03f432017-11-03 17:00:44 -04005094
5095 for (Use &use : Call->arg_operands()) {
David Neto257c3892018-04-11 13:19:45 -04005096 Ops << MkId(VMap[use.get()]);
David Netoab03f432017-11-03 17:00:44 -04005097 }
5098
5099 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005100 InsertPoint, new SPIRVInstruction(spv::OpCompositeConstruct,
5101 std::get<2>(*DeferredInst), Ops));
David Netoab03f432017-11-03 17:00:44 -04005102
Alan Baker202c8c72018-08-13 13:47:44 -04005103 } else if (callee_name.startswith(clspv::ResourceAccessorFunction())) {
5104
5105 // We have already mapped the call's result value to an ID.
5106 // Don't generate any code now.
5107
5108 } else if (callee_name.startswith(clspv::WorkgroupAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04005109
5110 // We have already mapped the call's result value to an ID.
5111 // Don't generate any code now.
5112
David Neto22f144c2017-06-12 14:26:21 -04005113 } else {
alan-baker5b86ed72019-02-15 08:26:50 -05005114 if (Call->getType()->isPointerTy()) {
5115 // Functions returning pointers require variable pointers.
5116 setVariablePointersCapabilities(
5117 Call->getType()->getPointerAddressSpace());
5118 }
5119
David Neto22f144c2017-06-12 14:26:21 -04005120 //
5121 // Generate OpFunctionCall.
5122 //
5123
5124 // Ops[0] = Result Type ID
5125 // Ops[1] = Callee Function ID
5126 // Ops[2] ... Ops[n] = Argument 0, ... , Argument n
5127 SPIRVOperandList Ops;
5128
David Neto862b7d82018-06-14 18:48:37 -04005129 Ops << MkId(lookupType(Call->getType()));
David Neto22f144c2017-06-12 14:26:21 -04005130
5131 uint32_t CalleeID = VMap[Callee];
David Neto43568eb2017-10-13 18:25:25 -04005132 if (CalleeID == 0) {
5133 errs() << "Can't translate function call. Missing builtin? "
David Neto862b7d82018-06-14 18:48:37 -04005134 << callee_name << " in: " << *Call << "\n";
David Neto43568eb2017-10-13 18:25:25 -04005135 // TODO(dneto): Can we error out? Enabling this llvm_unreachable
5136 // causes an infinite loop. Instead, go ahead and generate
5137 // the bad function call. A validator will catch the 0-Id.
5138 // llvm_unreachable("Can't translate function call");
5139 }
David Neto22f144c2017-06-12 14:26:21 -04005140
David Neto257c3892018-04-11 13:19:45 -04005141 Ops << MkId(CalleeID);
David Neto22f144c2017-06-12 14:26:21 -04005142
David Neto22f144c2017-06-12 14:26:21 -04005143 FunctionType *CalleeFTy = cast<FunctionType>(Call->getFunctionType());
5144 for (unsigned i = 0; i < CalleeFTy->getNumParams(); i++) {
alan-baker5b86ed72019-02-15 08:26:50 -05005145 auto *operand = Call->getOperand(i);
alan-bakerd4d50652019-12-03 17:17:15 -05005146 auto *operand_type = operand->getType();
5147 // Images and samplers can be passed as function parameters without
5148 // variable pointers.
5149 if (operand_type->isPointerTy() && !IsImageType(operand_type) &&
5150 !IsSamplerType(operand_type)) {
alan-baker5b86ed72019-02-15 08:26:50 -05005151 auto sc =
5152 GetStorageClass(operand->getType()->getPointerAddressSpace());
5153 if (sc == spv::StorageClassStorageBuffer) {
5154 // Passing SSBO by reference requires variable pointers storage
5155 // buffer.
5156 setVariablePointersStorageBuffer(true);
5157 } else if (sc == spv::StorageClassWorkgroup) {
5158 // Workgroup references require variable pointers if they are not
5159 // memory object declarations.
5160 if (auto *operand_call = dyn_cast<CallInst>(operand)) {
5161 // Workgroup accessor represents a variable reference.
5162 if (!operand_call->getCalledFunction()->getName().startswith(
5163 clspv::WorkgroupAccessorFunction()))
5164 setVariablePointers(true);
5165 } else {
5166 // Arguments are function parameters.
5167 if (!isa<Argument>(operand))
5168 setVariablePointers(true);
5169 }
5170 }
5171 }
5172 Ops << MkId(VMap[operand]);
David Neto22f144c2017-06-12 14:26:21 -04005173 }
5174
David Neto87846742018-04-11 17:36:22 -04005175 auto *CallInst = new SPIRVInstruction(spv::OpFunctionCall,
5176 std::get<2>(*DeferredInst), Ops);
David Neto22f144c2017-06-12 14:26:21 -04005177 SPIRVInstList.insert(InsertPoint, CallInst);
5178 }
5179 }
5180 }
5181}
5182
David Neto1a1a0582017-07-07 12:01:44 -04005183void SPIRVProducerPass::HandleDeferredDecorations(const DataLayout &DL) {
Alan Baker202c8c72018-08-13 13:47:44 -04005184 if (getTypesNeedingArrayStride().empty() && LocalArgSpecIds.empty()) {
David Neto1a1a0582017-07-07 12:01:44 -04005185 return;
David Netoc6f3ab22018-04-06 18:02:31 -04005186 }
David Neto1a1a0582017-07-07 12:01:44 -04005187
5188 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
David Neto1a1a0582017-07-07 12:01:44 -04005189
5190 // Find an iterator pointing just past the last decoration.
5191 bool seen_decorations = false;
5192 auto DecoInsertPoint =
5193 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
5194 [&seen_decorations](SPIRVInstruction *Inst) -> bool {
5195 const bool is_decoration =
5196 Inst->getOpcode() == spv::OpDecorate ||
5197 Inst->getOpcode() == spv::OpMemberDecorate;
5198 if (is_decoration) {
5199 seen_decorations = true;
5200 return false;
5201 } else {
5202 return seen_decorations;
5203 }
5204 });
5205
David Netoc6f3ab22018-04-06 18:02:31 -04005206 // Insert ArrayStride decorations on pointer types, due to OpPtrAccessChain
5207 // instructions we generated earlier.
David Neto85082642018-03-24 06:55:20 -07005208 for (auto *type : getTypesNeedingArrayStride()) {
5209 Type *elemTy = nullptr;
5210 if (auto *ptrTy = dyn_cast<PointerType>(type)) {
5211 elemTy = ptrTy->getElementType();
alan-bakerb6b09dc2018-11-08 16:59:28 -05005212 } else if (auto *arrayTy = dyn_cast<ArrayType>(type)) {
David Neto85082642018-03-24 06:55:20 -07005213 elemTy = arrayTy->getArrayElementType();
alan-bakerb6b09dc2018-11-08 16:59:28 -05005214 } else if (auto *seqTy = dyn_cast<SequentialType>(type)) {
David Neto85082642018-03-24 06:55:20 -07005215 elemTy = seqTy->getSequentialElementType();
5216 } else {
5217 errs() << "Unhandled strided type " << *type << "\n";
5218 llvm_unreachable("Unhandled strided type");
5219 }
David Neto1a1a0582017-07-07 12:01:44 -04005220
5221 // Ops[0] = Target ID
5222 // Ops[1] = Decoration (ArrayStride)
5223 // Ops[2] = Stride number (Literal Number)
5224 SPIRVOperandList Ops;
5225
David Neto85082642018-03-24 06:55:20 -07005226 // Same as DL.getIndexedOffsetInType( elemTy, { 1 } );
Alan Bakerfcda9482018-10-02 17:09:59 -04005227 const uint32_t stride = static_cast<uint32_t>(GetTypeAllocSize(elemTy, DL));
David Neto257c3892018-04-11 13:19:45 -04005228
5229 Ops << MkId(lookupType(type)) << MkNum(spv::DecorationArrayStride)
5230 << MkNum(stride);
David Neto1a1a0582017-07-07 12:01:44 -04005231
David Neto87846742018-04-11 17:36:22 -04005232 auto *DecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
David Neto1a1a0582017-07-07 12:01:44 -04005233 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
5234 }
David Netoc6f3ab22018-04-06 18:02:31 -04005235
5236 // Emit SpecId decorations targeting the array size value.
Alan Baker202c8c72018-08-13 13:47:44 -04005237 for (auto spec_id = clspv::FirstLocalSpecId(); spec_id < max_local_spec_id_;
5238 ++spec_id) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05005239 LocalArgInfo &arg_info = LocalSpecIdInfoMap[spec_id];
David Netoc6f3ab22018-04-06 18:02:31 -04005240 SPIRVOperandList Ops;
5241 Ops << MkId(arg_info.array_size_id) << MkNum(spv::DecorationSpecId)
5242 << MkNum(arg_info.spec_id);
5243 SPIRVInstList.insert(DecoInsertPoint,
David Neto87846742018-04-11 17:36:22 -04005244 new SPIRVInstruction(spv::OpDecorate, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04005245 }
David Neto1a1a0582017-07-07 12:01:44 -04005246}
5247
David Neto22f144c2017-06-12 14:26:21 -04005248glsl::ExtInst SPIRVProducerPass::getExtInstEnum(StringRef Name) {
5249 return StringSwitch<glsl::ExtInst>(Name)
alan-bakerb39c8262019-03-08 14:03:37 -05005250 .Case("_Z3absc", glsl::ExtInst::ExtInstSAbs)
5251 .Case("_Z3absDv2_c", glsl::ExtInst::ExtInstSAbs)
5252 .Case("_Z3absDv3_c", glsl::ExtInst::ExtInstSAbs)
5253 .Case("_Z3absDv4_c", glsl::ExtInst::ExtInstSAbs)
Kévin Petit2444e9b2018-11-09 14:14:37 +00005254 .Case("_Z3abss", glsl::ExtInst::ExtInstSAbs)
5255 .Case("_Z3absDv2_s", glsl::ExtInst::ExtInstSAbs)
5256 .Case("_Z3absDv3_s", glsl::ExtInst::ExtInstSAbs)
5257 .Case("_Z3absDv4_s", glsl::ExtInst::ExtInstSAbs)
David Neto22f144c2017-06-12 14:26:21 -04005258 .Case("_Z3absi", glsl::ExtInst::ExtInstSAbs)
5259 .Case("_Z3absDv2_i", glsl::ExtInst::ExtInstSAbs)
5260 .Case("_Z3absDv3_i", glsl::ExtInst::ExtInstSAbs)
5261 .Case("_Z3absDv4_i", glsl::ExtInst::ExtInstSAbs)
Kévin Petit2444e9b2018-11-09 14:14:37 +00005262 .Case("_Z3absl", glsl::ExtInst::ExtInstSAbs)
5263 .Case("_Z3absDv2_l", glsl::ExtInst::ExtInstSAbs)
5264 .Case("_Z3absDv3_l", glsl::ExtInst::ExtInstSAbs)
5265 .Case("_Z3absDv4_l", glsl::ExtInst::ExtInstSAbs)
alan-bakerb39c8262019-03-08 14:03:37 -05005266 .Case("_Z5clampccc", glsl::ExtInst::ExtInstSClamp)
5267 .Case("_Z5clampDv2_cS_S_", glsl::ExtInst::ExtInstSClamp)
5268 .Case("_Z5clampDv3_cS_S_", glsl::ExtInst::ExtInstSClamp)
5269 .Case("_Z5clampDv4_cS_S_", glsl::ExtInst::ExtInstSClamp)
5270 .Case("_Z5clamphhh", glsl::ExtInst::ExtInstUClamp)
5271 .Case("_Z5clampDv2_hS_S_", glsl::ExtInst::ExtInstUClamp)
5272 .Case("_Z5clampDv3_hS_S_", glsl::ExtInst::ExtInstUClamp)
5273 .Case("_Z5clampDv4_hS_S_", glsl::ExtInst::ExtInstUClamp)
Kévin Petit495255d2019-03-06 13:56:48 +00005274 .Case("_Z5clampsss", glsl::ExtInst::ExtInstSClamp)
5275 .Case("_Z5clampDv2_sS_S_", glsl::ExtInst::ExtInstSClamp)
5276 .Case("_Z5clampDv3_sS_S_", glsl::ExtInst::ExtInstSClamp)
5277 .Case("_Z5clampDv4_sS_S_", glsl::ExtInst::ExtInstSClamp)
5278 .Case("_Z5clampttt", glsl::ExtInst::ExtInstUClamp)
5279 .Case("_Z5clampDv2_tS_S_", glsl::ExtInst::ExtInstUClamp)
5280 .Case("_Z5clampDv3_tS_S_", glsl::ExtInst::ExtInstUClamp)
5281 .Case("_Z5clampDv4_tS_S_", glsl::ExtInst::ExtInstUClamp)
David Neto22f144c2017-06-12 14:26:21 -04005282 .Case("_Z5clampiii", glsl::ExtInst::ExtInstSClamp)
5283 .Case("_Z5clampDv2_iS_S_", glsl::ExtInst::ExtInstSClamp)
5284 .Case("_Z5clampDv3_iS_S_", glsl::ExtInst::ExtInstSClamp)
5285 .Case("_Z5clampDv4_iS_S_", glsl::ExtInst::ExtInstSClamp)
5286 .Case("_Z5clampjjj", glsl::ExtInst::ExtInstUClamp)
5287 .Case("_Z5clampDv2_jS_S_", glsl::ExtInst::ExtInstUClamp)
5288 .Case("_Z5clampDv3_jS_S_", glsl::ExtInst::ExtInstUClamp)
5289 .Case("_Z5clampDv4_jS_S_", glsl::ExtInst::ExtInstUClamp)
Kévin Petit495255d2019-03-06 13:56:48 +00005290 .Case("_Z5clamplll", glsl::ExtInst::ExtInstSClamp)
5291 .Case("_Z5clampDv2_lS_S_", glsl::ExtInst::ExtInstSClamp)
5292 .Case("_Z5clampDv3_lS_S_", glsl::ExtInst::ExtInstSClamp)
5293 .Case("_Z5clampDv4_lS_S_", glsl::ExtInst::ExtInstSClamp)
5294 .Case("_Z5clampmmm", glsl::ExtInst::ExtInstUClamp)
5295 .Case("_Z5clampDv2_mS_S_", glsl::ExtInst::ExtInstUClamp)
5296 .Case("_Z5clampDv3_mS_S_", glsl::ExtInst::ExtInstUClamp)
5297 .Case("_Z5clampDv4_mS_S_", glsl::ExtInst::ExtInstUClamp)
David Neto22f144c2017-06-12 14:26:21 -04005298 .Case("_Z5clampfff", glsl::ExtInst::ExtInstFClamp)
5299 .Case("_Z5clampDv2_fS_S_", glsl::ExtInst::ExtInstFClamp)
5300 .Case("_Z5clampDv3_fS_S_", glsl::ExtInst::ExtInstFClamp)
5301 .Case("_Z5clampDv4_fS_S_", glsl::ExtInst::ExtInstFClamp)
alan-bakerb39c8262019-03-08 14:03:37 -05005302 .Case("_Z3maxcc", glsl::ExtInst::ExtInstSMax)
5303 .Case("_Z3maxDv2_cS_", glsl::ExtInst::ExtInstSMax)
5304 .Case("_Z3maxDv3_cS_", glsl::ExtInst::ExtInstSMax)
5305 .Case("_Z3maxDv4_cS_", glsl::ExtInst::ExtInstSMax)
5306 .Case("_Z3maxhh", glsl::ExtInst::ExtInstUMax)
5307 .Case("_Z3maxDv2_hS_", glsl::ExtInst::ExtInstUMax)
5308 .Case("_Z3maxDv3_hS_", glsl::ExtInst::ExtInstUMax)
5309 .Case("_Z3maxDv4_hS_", glsl::ExtInst::ExtInstUMax)
Kévin Petit9e1971c2018-11-09 14:17:18 +00005310 .Case("_Z3maxss", glsl::ExtInst::ExtInstSMax)
5311 .Case("_Z3maxDv2_sS_", glsl::ExtInst::ExtInstSMax)
5312 .Case("_Z3maxDv3_sS_", glsl::ExtInst::ExtInstSMax)
5313 .Case("_Z3maxDv4_sS_", glsl::ExtInst::ExtInstSMax)
5314 .Case("_Z3maxtt", glsl::ExtInst::ExtInstUMax)
5315 .Case("_Z3maxDv2_tS_", glsl::ExtInst::ExtInstUMax)
5316 .Case("_Z3maxDv3_tS_", glsl::ExtInst::ExtInstUMax)
5317 .Case("_Z3maxDv4_tS_", glsl::ExtInst::ExtInstUMax)
David Neto22f144c2017-06-12 14:26:21 -04005318 .Case("_Z3maxii", glsl::ExtInst::ExtInstSMax)
5319 .Case("_Z3maxDv2_iS_", glsl::ExtInst::ExtInstSMax)
5320 .Case("_Z3maxDv3_iS_", glsl::ExtInst::ExtInstSMax)
5321 .Case("_Z3maxDv4_iS_", glsl::ExtInst::ExtInstSMax)
5322 .Case("_Z3maxjj", glsl::ExtInst::ExtInstUMax)
5323 .Case("_Z3maxDv2_jS_", glsl::ExtInst::ExtInstUMax)
5324 .Case("_Z3maxDv3_jS_", glsl::ExtInst::ExtInstUMax)
5325 .Case("_Z3maxDv4_jS_", glsl::ExtInst::ExtInstUMax)
Kévin Petit9e1971c2018-11-09 14:17:18 +00005326 .Case("_Z3maxll", glsl::ExtInst::ExtInstSMax)
5327 .Case("_Z3maxDv2_lS_", glsl::ExtInst::ExtInstSMax)
5328 .Case("_Z3maxDv3_lS_", glsl::ExtInst::ExtInstSMax)
5329 .Case("_Z3maxDv4_lS_", glsl::ExtInst::ExtInstSMax)
5330 .Case("_Z3maxmm", glsl::ExtInst::ExtInstUMax)
5331 .Case("_Z3maxDv2_mS_", glsl::ExtInst::ExtInstUMax)
5332 .Case("_Z3maxDv3_mS_", glsl::ExtInst::ExtInstUMax)
5333 .Case("_Z3maxDv4_mS_", glsl::ExtInst::ExtInstUMax)
David Neto22f144c2017-06-12 14:26:21 -04005334 .Case("_Z3maxff", glsl::ExtInst::ExtInstFMax)
5335 .Case("_Z3maxDv2_fS_", glsl::ExtInst::ExtInstFMax)
5336 .Case("_Z3maxDv3_fS_", glsl::ExtInst::ExtInstFMax)
5337 .Case("_Z3maxDv4_fS_", glsl::ExtInst::ExtInstFMax)
5338 .StartsWith("_Z4fmax", glsl::ExtInst::ExtInstFMax)
alan-bakerb39c8262019-03-08 14:03:37 -05005339 .Case("_Z3mincc", glsl::ExtInst::ExtInstSMin)
5340 .Case("_Z3minDv2_cS_", glsl::ExtInst::ExtInstSMin)
5341 .Case("_Z3minDv3_cS_", glsl::ExtInst::ExtInstSMin)
5342 .Case("_Z3minDv4_cS_", glsl::ExtInst::ExtInstSMin)
5343 .Case("_Z3minhh", glsl::ExtInst::ExtInstUMin)
5344 .Case("_Z3minDv2_hS_", glsl::ExtInst::ExtInstUMin)
5345 .Case("_Z3minDv3_hS_", glsl::ExtInst::ExtInstUMin)
5346 .Case("_Z3minDv4_hS_", glsl::ExtInst::ExtInstUMin)
Kévin Petit9e1971c2018-11-09 14:17:18 +00005347 .Case("_Z3minss", glsl::ExtInst::ExtInstSMin)
5348 .Case("_Z3minDv2_sS_", glsl::ExtInst::ExtInstSMin)
5349 .Case("_Z3minDv3_sS_", glsl::ExtInst::ExtInstSMin)
5350 .Case("_Z3minDv4_sS_", glsl::ExtInst::ExtInstSMin)
5351 .Case("_Z3mintt", glsl::ExtInst::ExtInstUMin)
5352 .Case("_Z3minDv2_tS_", glsl::ExtInst::ExtInstUMin)
5353 .Case("_Z3minDv3_tS_", glsl::ExtInst::ExtInstUMin)
5354 .Case("_Z3minDv4_tS_", glsl::ExtInst::ExtInstUMin)
David Neto22f144c2017-06-12 14:26:21 -04005355 .Case("_Z3minii", glsl::ExtInst::ExtInstSMin)
5356 .Case("_Z3minDv2_iS_", glsl::ExtInst::ExtInstSMin)
5357 .Case("_Z3minDv3_iS_", glsl::ExtInst::ExtInstSMin)
5358 .Case("_Z3minDv4_iS_", glsl::ExtInst::ExtInstSMin)
5359 .Case("_Z3minjj", glsl::ExtInst::ExtInstUMin)
5360 .Case("_Z3minDv2_jS_", glsl::ExtInst::ExtInstUMin)
5361 .Case("_Z3minDv3_jS_", glsl::ExtInst::ExtInstUMin)
5362 .Case("_Z3minDv4_jS_", glsl::ExtInst::ExtInstUMin)
Kévin Petit9e1971c2018-11-09 14:17:18 +00005363 .Case("_Z3minll", glsl::ExtInst::ExtInstSMin)
5364 .Case("_Z3minDv2_lS_", glsl::ExtInst::ExtInstSMin)
5365 .Case("_Z3minDv3_lS_", glsl::ExtInst::ExtInstSMin)
5366 .Case("_Z3minDv4_lS_", glsl::ExtInst::ExtInstSMin)
5367 .Case("_Z3minmm", glsl::ExtInst::ExtInstUMin)
5368 .Case("_Z3minDv2_mS_", glsl::ExtInst::ExtInstUMin)
5369 .Case("_Z3minDv3_mS_", glsl::ExtInst::ExtInstUMin)
5370 .Case("_Z3minDv4_mS_", glsl::ExtInst::ExtInstUMin)
David Neto22f144c2017-06-12 14:26:21 -04005371 .Case("_Z3minff", glsl::ExtInst::ExtInstFMin)
5372 .Case("_Z3minDv2_fS_", glsl::ExtInst::ExtInstFMin)
5373 .Case("_Z3minDv3_fS_", glsl::ExtInst::ExtInstFMin)
5374 .Case("_Z3minDv4_fS_", glsl::ExtInst::ExtInstFMin)
5375 .StartsWith("_Z4fmin", glsl::ExtInst::ExtInstFMin)
5376 .StartsWith("_Z7degrees", glsl::ExtInst::ExtInstDegrees)
5377 .StartsWith("_Z7radians", glsl::ExtInst::ExtInstRadians)
5378 .StartsWith("_Z3mix", glsl::ExtInst::ExtInstFMix)
5379 .StartsWith("_Z4acos", glsl::ExtInst::ExtInstAcos)
5380 .StartsWith("_Z5acosh", glsl::ExtInst::ExtInstAcosh)
5381 .StartsWith("_Z4asin", glsl::ExtInst::ExtInstAsin)
5382 .StartsWith("_Z5asinh", glsl::ExtInst::ExtInstAsinh)
5383 .StartsWith("_Z4atan", glsl::ExtInst::ExtInstAtan)
5384 .StartsWith("_Z5atan2", glsl::ExtInst::ExtInstAtan2)
5385 .StartsWith("_Z5atanh", glsl::ExtInst::ExtInstAtanh)
5386 .StartsWith("_Z4ceil", glsl::ExtInst::ExtInstCeil)
5387 .StartsWith("_Z3sin", glsl::ExtInst::ExtInstSin)
5388 .StartsWith("_Z4sinh", glsl::ExtInst::ExtInstSinh)
5389 .StartsWith("_Z8half_sin", glsl::ExtInst::ExtInstSin)
5390 .StartsWith("_Z10native_sin", glsl::ExtInst::ExtInstSin)
5391 .StartsWith("_Z3cos", glsl::ExtInst::ExtInstCos)
5392 .StartsWith("_Z4cosh", glsl::ExtInst::ExtInstCosh)
5393 .StartsWith("_Z8half_cos", glsl::ExtInst::ExtInstCos)
5394 .StartsWith("_Z10native_cos", glsl::ExtInst::ExtInstCos)
5395 .StartsWith("_Z3tan", glsl::ExtInst::ExtInstTan)
5396 .StartsWith("_Z4tanh", glsl::ExtInst::ExtInstTanh)
5397 .StartsWith("_Z8half_tan", glsl::ExtInst::ExtInstTan)
5398 .StartsWith("_Z10native_tan", glsl::ExtInst::ExtInstTan)
5399 .StartsWith("_Z3exp", glsl::ExtInst::ExtInstExp)
5400 .StartsWith("_Z8half_exp", glsl::ExtInst::ExtInstExp)
5401 .StartsWith("_Z10native_exp", glsl::ExtInst::ExtInstExp)
5402 .StartsWith("_Z4exp2", glsl::ExtInst::ExtInstExp2)
5403 .StartsWith("_Z9half_exp2", glsl::ExtInst::ExtInstExp2)
5404 .StartsWith("_Z11native_exp2", glsl::ExtInst::ExtInstExp2)
5405 .StartsWith("_Z3log", glsl::ExtInst::ExtInstLog)
5406 .StartsWith("_Z8half_log", glsl::ExtInst::ExtInstLog)
5407 .StartsWith("_Z10native_log", glsl::ExtInst::ExtInstLog)
5408 .StartsWith("_Z4log2", glsl::ExtInst::ExtInstLog2)
5409 .StartsWith("_Z9half_log2", glsl::ExtInst::ExtInstLog2)
5410 .StartsWith("_Z11native_log2", glsl::ExtInst::ExtInstLog2)
5411 .StartsWith("_Z4fabs", glsl::ExtInst::ExtInstFAbs)
kpet3458e942018-10-03 14:35:21 +01005412 .StartsWith("_Z3fma", glsl::ExtInst::ExtInstFma)
David Neto22f144c2017-06-12 14:26:21 -04005413 .StartsWith("_Z5floor", glsl::ExtInst::ExtInstFloor)
5414 .StartsWith("_Z5ldexp", glsl::ExtInst::ExtInstLdexp)
5415 .StartsWith("_Z3pow", glsl::ExtInst::ExtInstPow)
5416 .StartsWith("_Z4powr", glsl::ExtInst::ExtInstPow)
5417 .StartsWith("_Z9half_powr", glsl::ExtInst::ExtInstPow)
5418 .StartsWith("_Z11native_powr", glsl::ExtInst::ExtInstPow)
5419 .StartsWith("_Z5round", glsl::ExtInst::ExtInstRound)
5420 .StartsWith("_Z4sqrt", glsl::ExtInst::ExtInstSqrt)
5421 .StartsWith("_Z9half_sqrt", glsl::ExtInst::ExtInstSqrt)
5422 .StartsWith("_Z11native_sqrt", glsl::ExtInst::ExtInstSqrt)
5423 .StartsWith("_Z5rsqrt", glsl::ExtInst::ExtInstInverseSqrt)
5424 .StartsWith("_Z10half_rsqrt", glsl::ExtInst::ExtInstInverseSqrt)
5425 .StartsWith("_Z12native_rsqrt", glsl::ExtInst::ExtInstInverseSqrt)
5426 .StartsWith("_Z5trunc", glsl::ExtInst::ExtInstTrunc)
5427 .StartsWith("_Z5frexp", glsl::ExtInst::ExtInstFrexp)
5428 .StartsWith("_Z4sign", glsl::ExtInst::ExtInstFSign)
5429 .StartsWith("_Z6length", glsl::ExtInst::ExtInstLength)
Kévin Petit7d09cec2018-09-22 15:43:38 +01005430 .StartsWith("_Z11fast_length", glsl::ExtInst::ExtInstLength)
David Neto22f144c2017-06-12 14:26:21 -04005431 .StartsWith("_Z8distance", glsl::ExtInst::ExtInstDistance)
Kévin Petit7d09cec2018-09-22 15:43:38 +01005432 .StartsWith("_Z13fast_distance", glsl::ExtInst::ExtInstDistance)
David Netoe9a03512017-10-16 10:08:27 -04005433 .StartsWith("_Z4step", glsl::ExtInst::ExtInstStep)
kpet6fd2a262018-10-03 14:48:01 +01005434 .StartsWith("_Z10smoothstep", glsl::ExtInst::ExtInstSmoothStep)
David Neto22f144c2017-06-12 14:26:21 -04005435 .Case("_Z5crossDv3_fS_", glsl::ExtInst::ExtInstCross)
5436 .StartsWith("_Z9normalize", glsl::ExtInst::ExtInstNormalize)
Kévin Petit7d09cec2018-09-22 15:43:38 +01005437 .StartsWith("_Z14fast_normalize", glsl::ExtInst::ExtInstNormalize)
David Neto22f144c2017-06-12 14:26:21 -04005438 .StartsWith("llvm.fmuladd.", glsl::ExtInst::ExtInstFma)
5439 .Case("spirv.unpack.v2f16", glsl::ExtInst::ExtInstUnpackHalf2x16)
5440 .Case("spirv.pack.v2f16", glsl::ExtInst::ExtInstPackHalf2x16)
David Neto62653202017-10-16 19:05:18 -04005441 .Case("clspv.fract.f", glsl::ExtInst::ExtInstFract)
5442 .Case("clspv.fract.v2f", glsl::ExtInst::ExtInstFract)
5443 .Case("clspv.fract.v3f", glsl::ExtInst::ExtInstFract)
5444 .Case("clspv.fract.v4f", glsl::ExtInst::ExtInstFract)
David Neto3fbb4072017-10-16 11:28:14 -04005445 .Default(kGlslExtInstBad);
5446}
5447
5448glsl::ExtInst SPIRVProducerPass::getIndirectExtInstEnum(StringRef Name) {
5449 // Check indirect cases.
5450 return StringSwitch<glsl::ExtInst>(Name)
5451 .StartsWith("_Z3clz", glsl::ExtInst::ExtInstFindUMsb)
5452 // Use exact match on float arg because these need a multiply
5453 // of a constant of the right floating point type.
5454 .Case("_Z6acospif", glsl::ExtInst::ExtInstAcos)
5455 .Case("_Z6acospiDv2_f", glsl::ExtInst::ExtInstAcos)
5456 .Case("_Z6acospiDv3_f", glsl::ExtInst::ExtInstAcos)
5457 .Case("_Z6acospiDv4_f", glsl::ExtInst::ExtInstAcos)
5458 .Case("_Z6asinpif", glsl::ExtInst::ExtInstAsin)
5459 .Case("_Z6asinpiDv2_f", glsl::ExtInst::ExtInstAsin)
5460 .Case("_Z6asinpiDv3_f", glsl::ExtInst::ExtInstAsin)
5461 .Case("_Z6asinpiDv4_f", glsl::ExtInst::ExtInstAsin)
Kévin Petiteb9f90a2018-09-29 12:29:34 +01005462 .Case("_Z6atanpif", glsl::ExtInst::ExtInstAtan)
5463 .Case("_Z6atanpiDv2_f", glsl::ExtInst::ExtInstAtan)
5464 .Case("_Z6atanpiDv3_f", glsl::ExtInst::ExtInstAtan)
5465 .Case("_Z6atanpiDv4_f", glsl::ExtInst::ExtInstAtan)
David Neto3fbb4072017-10-16 11:28:14 -04005466 .Case("_Z7atan2piff", glsl::ExtInst::ExtInstAtan2)
5467 .Case("_Z7atan2piDv2_fS_", glsl::ExtInst::ExtInstAtan2)
5468 .Case("_Z7atan2piDv3_fS_", glsl::ExtInst::ExtInstAtan2)
5469 .Case("_Z7atan2piDv4_fS_", glsl::ExtInst::ExtInstAtan2)
5470 .Default(kGlslExtInstBad);
5471}
5472
alan-bakerb6b09dc2018-11-08 16:59:28 -05005473glsl::ExtInst
5474SPIRVProducerPass::getDirectOrIndirectExtInstEnum(StringRef Name) {
David Neto3fbb4072017-10-16 11:28:14 -04005475 auto direct = getExtInstEnum(Name);
5476 if (direct != kGlslExtInstBad)
5477 return direct;
5478 return getIndirectExtInstEnum(Name);
David Neto22f144c2017-06-12 14:26:21 -04005479}
5480
David Neto22f144c2017-06-12 14:26:21 -04005481void SPIRVProducerPass::WriteOneWord(uint32_t Word) {
David Neto0676e6f2017-07-11 18:47:44 -04005482 binaryOut->write(reinterpret_cast<const char *>(&Word), sizeof(uint32_t));
David Neto22f144c2017-06-12 14:26:21 -04005483}
5484
5485void SPIRVProducerPass::WriteResultID(SPIRVInstruction *Inst) {
5486 WriteOneWord(Inst->getResultID());
5487}
5488
5489void SPIRVProducerPass::WriteWordCountAndOpcode(SPIRVInstruction *Inst) {
5490 // High 16 bit : Word Count
5491 // Low 16 bit : Opcode
5492 uint32_t Word = Inst->getOpcode();
David Netoee2660d2018-06-28 16:31:29 -04005493 const uint32_t count = Inst->getWordCount();
5494 if (count > 65535) {
5495 errs() << "Word count limit of 65535 exceeded: " << count << "\n";
5496 llvm_unreachable("Word count too high");
5497 }
David Neto22f144c2017-06-12 14:26:21 -04005498 Word |= Inst->getWordCount() << 16;
5499 WriteOneWord(Word);
5500}
5501
5502void SPIRVProducerPass::WriteOperand(SPIRVOperand *Op) {
5503 SPIRVOperandType OpTy = Op->getType();
5504 switch (OpTy) {
5505 default: {
5506 llvm_unreachable("Unsupported SPIRV Operand Type???");
5507 break;
5508 }
5509 case SPIRVOperandType::NUMBERID: {
5510 WriteOneWord(Op->getNumID());
5511 break;
5512 }
5513 case SPIRVOperandType::LITERAL_STRING: {
5514 std::string Str = Op->getLiteralStr();
5515 const char *Data = Str.c_str();
5516 size_t WordSize = Str.size() / 4;
5517 for (unsigned Idx = 0; Idx < WordSize; Idx++) {
5518 WriteOneWord(*reinterpret_cast<const uint32_t *>(&Data[4 * Idx]));
5519 }
5520
5521 uint32_t Remainder = Str.size() % 4;
5522 uint32_t LastWord = 0;
5523 if (Remainder) {
5524 for (unsigned Idx = 0; Idx < Remainder; Idx++) {
5525 LastWord |= Data[4 * WordSize + Idx] << 8 * Idx;
5526 }
5527 }
5528
5529 WriteOneWord(LastWord);
5530 break;
5531 }
5532 case SPIRVOperandType::LITERAL_INTEGER:
5533 case SPIRVOperandType::LITERAL_FLOAT: {
5534 auto LiteralNum = Op->getLiteralNum();
5535 // TODO: Handle LiteranNum carefully.
5536 for (auto Word : LiteralNum) {
5537 WriteOneWord(Word);
5538 }
5539 break;
5540 }
5541 }
5542}
5543
5544void SPIRVProducerPass::WriteSPIRVBinary() {
5545 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
5546
5547 for (auto Inst : SPIRVInstList) {
David Netoc6f3ab22018-04-06 18:02:31 -04005548 SPIRVOperandList Ops{Inst->getOperands()};
David Neto22f144c2017-06-12 14:26:21 -04005549 spv::Op Opcode = static_cast<spv::Op>(Inst->getOpcode());
5550
5551 switch (Opcode) {
5552 default: {
David Neto5c22a252018-03-15 16:07:41 -04005553 errs() << "Unsupported SPIR-V instruction opcode " << int(Opcode) << "\n";
David Neto22f144c2017-06-12 14:26:21 -04005554 llvm_unreachable("Unsupported SPIRV instruction");
5555 break;
5556 }
5557 case spv::OpCapability:
5558 case spv::OpExtension:
5559 case spv::OpMemoryModel:
5560 case spv::OpEntryPoint:
5561 case spv::OpExecutionMode:
5562 case spv::OpSource:
5563 case spv::OpDecorate:
5564 case spv::OpMemberDecorate:
5565 case spv::OpBranch:
5566 case spv::OpBranchConditional:
5567 case spv::OpSelectionMerge:
5568 case spv::OpLoopMerge:
5569 case spv::OpStore:
5570 case spv::OpImageWrite:
5571 case spv::OpReturnValue:
5572 case spv::OpControlBarrier:
5573 case spv::OpMemoryBarrier:
5574 case spv::OpReturn:
5575 case spv::OpFunctionEnd:
5576 case spv::OpCopyMemory: {
5577 WriteWordCountAndOpcode(Inst);
5578 for (uint32_t i = 0; i < Ops.size(); i++) {
5579 WriteOperand(Ops[i]);
5580 }
5581 break;
5582 }
5583 case spv::OpTypeBool:
5584 case spv::OpTypeVoid:
5585 case spv::OpTypeSampler:
5586 case spv::OpLabel:
5587 case spv::OpExtInstImport:
5588 case spv::OpTypePointer:
5589 case spv::OpTypeRuntimeArray:
5590 case spv::OpTypeStruct:
5591 case spv::OpTypeImage:
5592 case spv::OpTypeSampledImage:
5593 case spv::OpTypeInt:
5594 case spv::OpTypeFloat:
5595 case spv::OpTypeArray:
5596 case spv::OpTypeVector:
5597 case spv::OpTypeFunction: {
5598 WriteWordCountAndOpcode(Inst);
5599 WriteResultID(Inst);
5600 for (uint32_t i = 0; i < Ops.size(); i++) {
5601 WriteOperand(Ops[i]);
5602 }
5603 break;
5604 }
5605 case spv::OpFunction:
5606 case spv::OpFunctionParameter:
5607 case spv::OpAccessChain:
5608 case spv::OpPtrAccessChain:
5609 case spv::OpInBoundsAccessChain:
5610 case spv::OpUConvert:
5611 case spv::OpSConvert:
5612 case spv::OpConvertFToU:
5613 case spv::OpConvertFToS:
5614 case spv::OpConvertUToF:
5615 case spv::OpConvertSToF:
5616 case spv::OpFConvert:
5617 case spv::OpConvertPtrToU:
5618 case spv::OpConvertUToPtr:
5619 case spv::OpBitcast:
alan-bakerc9c55ae2019-12-02 16:01:27 -05005620 case spv::OpFNegate:
David Neto22f144c2017-06-12 14:26:21 -04005621 case spv::OpIAdd:
5622 case spv::OpFAdd:
5623 case spv::OpISub:
5624 case spv::OpFSub:
5625 case spv::OpIMul:
5626 case spv::OpFMul:
5627 case spv::OpUDiv:
5628 case spv::OpSDiv:
5629 case spv::OpFDiv:
5630 case spv::OpUMod:
5631 case spv::OpSRem:
5632 case spv::OpFRem:
Kévin Petit8a560882019-03-21 15:24:34 +00005633 case spv::OpUMulExtended:
5634 case spv::OpSMulExtended:
David Neto22f144c2017-06-12 14:26:21 -04005635 case spv::OpBitwiseOr:
5636 case spv::OpBitwiseXor:
5637 case spv::OpBitwiseAnd:
David Netoa394f392017-08-26 20:45:29 -04005638 case spv::OpNot:
David Neto22f144c2017-06-12 14:26:21 -04005639 case spv::OpShiftLeftLogical:
5640 case spv::OpShiftRightLogical:
5641 case spv::OpShiftRightArithmetic:
5642 case spv::OpBitCount:
David Netoab03f432017-11-03 17:00:44 -04005643 case spv::OpCompositeConstruct:
David Neto22f144c2017-06-12 14:26:21 -04005644 case spv::OpCompositeExtract:
5645 case spv::OpVectorExtractDynamic:
5646 case spv::OpCompositeInsert:
David Neto0a2f98d2017-09-15 19:38:40 -04005647 case spv::OpCopyObject:
David Neto22f144c2017-06-12 14:26:21 -04005648 case spv::OpVectorInsertDynamic:
5649 case spv::OpVectorShuffle:
5650 case spv::OpIEqual:
5651 case spv::OpINotEqual:
5652 case spv::OpUGreaterThan:
5653 case spv::OpUGreaterThanEqual:
5654 case spv::OpULessThan:
5655 case spv::OpULessThanEqual:
5656 case spv::OpSGreaterThan:
5657 case spv::OpSGreaterThanEqual:
5658 case spv::OpSLessThan:
5659 case spv::OpSLessThanEqual:
5660 case spv::OpFOrdEqual:
5661 case spv::OpFOrdGreaterThan:
5662 case spv::OpFOrdGreaterThanEqual:
5663 case spv::OpFOrdLessThan:
5664 case spv::OpFOrdLessThanEqual:
5665 case spv::OpFOrdNotEqual:
5666 case spv::OpFUnordEqual:
5667 case spv::OpFUnordGreaterThan:
5668 case spv::OpFUnordGreaterThanEqual:
5669 case spv::OpFUnordLessThan:
5670 case spv::OpFUnordLessThanEqual:
5671 case spv::OpFUnordNotEqual:
5672 case spv::OpExtInst:
5673 case spv::OpIsInf:
5674 case spv::OpIsNan:
5675 case spv::OpAny:
5676 case spv::OpAll:
5677 case spv::OpUndef:
5678 case spv::OpConstantNull:
5679 case spv::OpLogicalOr:
5680 case spv::OpLogicalAnd:
5681 case spv::OpLogicalNot:
5682 case spv::OpLogicalNotEqual:
5683 case spv::OpConstantComposite:
5684 case spv::OpSpecConstantComposite:
5685 case spv::OpConstantTrue:
5686 case spv::OpConstantFalse:
5687 case spv::OpConstant:
5688 case spv::OpSpecConstant:
5689 case spv::OpVariable:
5690 case spv::OpFunctionCall:
5691 case spv::OpSampledImage:
5692 case spv::OpImageSampleExplicitLod:
David Neto5c22a252018-03-15 16:07:41 -04005693 case spv::OpImageQuerySize:
alan-bakerce179f12019-12-06 19:02:22 -05005694 case spv::OpImageQuerySizeLod:
David Neto22f144c2017-06-12 14:26:21 -04005695 case spv::OpSelect:
5696 case spv::OpPhi:
5697 case spv::OpLoad:
5698 case spv::OpAtomicIAdd:
5699 case spv::OpAtomicISub:
5700 case spv::OpAtomicExchange:
5701 case spv::OpAtomicIIncrement:
5702 case spv::OpAtomicIDecrement:
5703 case spv::OpAtomicCompareExchange:
5704 case spv::OpAtomicUMin:
5705 case spv::OpAtomicSMin:
5706 case spv::OpAtomicUMax:
5707 case spv::OpAtomicSMax:
5708 case spv::OpAtomicAnd:
5709 case spv::OpAtomicOr:
5710 case spv::OpAtomicXor:
5711 case spv::OpDot: {
5712 WriteWordCountAndOpcode(Inst);
5713 WriteOperand(Ops[0]);
5714 WriteResultID(Inst);
5715 for (uint32_t i = 1; i < Ops.size(); i++) {
5716 WriteOperand(Ops[i]);
5717 }
5718 break;
5719 }
5720 }
5721 }
5722}
Alan Baker9bf93fb2018-08-28 16:59:26 -04005723
alan-bakerb6b09dc2018-11-08 16:59:28 -05005724bool SPIRVProducerPass::IsTypeNullable(const Type *type) const {
Alan Baker9bf93fb2018-08-28 16:59:26 -04005725 switch (type->getTypeID()) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05005726 case Type::HalfTyID:
5727 case Type::FloatTyID:
5728 case Type::DoubleTyID:
5729 case Type::IntegerTyID:
5730 case Type::VectorTyID:
5731 return true;
5732 case Type::PointerTyID: {
5733 const PointerType *pointer_type = cast<PointerType>(type);
5734 if (pointer_type->getPointerAddressSpace() !=
5735 AddressSpace::UniformConstant) {
5736 auto pointee_type = pointer_type->getPointerElementType();
5737 if (pointee_type->isStructTy() &&
5738 cast<StructType>(pointee_type)->isOpaque()) {
5739 // Images and samplers are not nullable.
5740 return false;
Alan Baker9bf93fb2018-08-28 16:59:26 -04005741 }
Alan Baker9bf93fb2018-08-28 16:59:26 -04005742 }
alan-bakerb6b09dc2018-11-08 16:59:28 -05005743 return true;
5744 }
5745 case Type::ArrayTyID:
5746 return IsTypeNullable(cast<CompositeType>(type)->getTypeAtIndex(0u));
5747 case Type::StructTyID: {
5748 const StructType *struct_type = cast<StructType>(type);
5749 // Images and samplers are not nullable.
5750 if (struct_type->isOpaque())
Alan Baker9bf93fb2018-08-28 16:59:26 -04005751 return false;
alan-bakerb6b09dc2018-11-08 16:59:28 -05005752 for (const auto element : struct_type->elements()) {
5753 if (!IsTypeNullable(element))
5754 return false;
5755 }
5756 return true;
5757 }
5758 default:
5759 return false;
Alan Baker9bf93fb2018-08-28 16:59:26 -04005760 }
5761}
Alan Bakerfcda9482018-10-02 17:09:59 -04005762
5763void SPIRVProducerPass::PopulateUBOTypeMaps(Module &module) {
5764 if (auto *offsets_md =
5765 module.getNamedMetadata(clspv::RemappedTypeOffsetMetadataName())) {
5766 // Metdata is stored as key-value pair operands. The first element of each
5767 // operand is the type and the second is a vector of offsets.
5768 for (const auto *operand : offsets_md->operands()) {
5769 const auto *pair = cast<MDTuple>(operand);
5770 auto *type =
5771 cast<ConstantAsMetadata>(pair->getOperand(0))->getValue()->getType();
5772 const auto *offset_vector = cast<MDTuple>(pair->getOperand(1));
5773 std::vector<uint32_t> offsets;
5774 for (const Metadata *offset_md : offset_vector->operands()) {
5775 const auto *constant_md = cast<ConstantAsMetadata>(offset_md);
alan-bakerb6b09dc2018-11-08 16:59:28 -05005776 offsets.push_back(static_cast<uint32_t>(
5777 cast<ConstantInt>(constant_md->getValue())->getZExtValue()));
Alan Bakerfcda9482018-10-02 17:09:59 -04005778 }
5779 RemappedUBOTypeOffsets.insert(std::make_pair(type, offsets));
5780 }
5781 }
5782
5783 if (auto *sizes_md =
5784 module.getNamedMetadata(clspv::RemappedTypeSizesMetadataName())) {
5785 // Metadata is stored as key-value pair operands. The first element of each
5786 // operand is the type and the second is a triple of sizes: type size in
5787 // bits, store size and alloc size.
5788 for (const auto *operand : sizes_md->operands()) {
5789 const auto *pair = cast<MDTuple>(operand);
5790 auto *type =
5791 cast<ConstantAsMetadata>(pair->getOperand(0))->getValue()->getType();
5792 const auto *size_triple = cast<MDTuple>(pair->getOperand(1));
5793 uint64_t type_size_in_bits =
5794 cast<ConstantInt>(
5795 cast<ConstantAsMetadata>(size_triple->getOperand(0))->getValue())
5796 ->getZExtValue();
5797 uint64_t type_store_size =
5798 cast<ConstantInt>(
5799 cast<ConstantAsMetadata>(size_triple->getOperand(1))->getValue())
5800 ->getZExtValue();
5801 uint64_t type_alloc_size =
5802 cast<ConstantInt>(
5803 cast<ConstantAsMetadata>(size_triple->getOperand(2))->getValue())
5804 ->getZExtValue();
5805 RemappedUBOTypeSizes.insert(std::make_pair(
5806 type, std::make_tuple(type_size_in_bits, type_store_size,
5807 type_alloc_size)));
5808 }
5809 }
5810}
5811
5812uint64_t SPIRVProducerPass::GetTypeSizeInBits(Type *type,
5813 const DataLayout &DL) {
5814 auto iter = RemappedUBOTypeSizes.find(type);
5815 if (iter != RemappedUBOTypeSizes.end()) {
5816 return std::get<0>(iter->second);
5817 }
5818
5819 return DL.getTypeSizeInBits(type);
5820}
5821
5822uint64_t SPIRVProducerPass::GetTypeStoreSize(Type *type, const DataLayout &DL) {
5823 auto iter = RemappedUBOTypeSizes.find(type);
5824 if (iter != RemappedUBOTypeSizes.end()) {
5825 return std::get<1>(iter->second);
5826 }
5827
5828 return DL.getTypeStoreSize(type);
5829}
5830
5831uint64_t SPIRVProducerPass::GetTypeAllocSize(Type *type, const DataLayout &DL) {
5832 auto iter = RemappedUBOTypeSizes.find(type);
5833 if (iter != RemappedUBOTypeSizes.end()) {
5834 return std::get<2>(iter->second);
5835 }
5836
5837 return DL.getTypeAllocSize(type);
5838}
alan-baker5b86ed72019-02-15 08:26:50 -05005839
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04005840void SPIRVProducerPass::setVariablePointersCapabilities(
5841 unsigned address_space) {
alan-baker5b86ed72019-02-15 08:26:50 -05005842 if (GetStorageClass(address_space) == spv::StorageClassStorageBuffer) {
5843 setVariablePointersStorageBuffer(true);
5844 } else {
5845 setVariablePointers(true);
5846 }
5847}
5848
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04005849Value *SPIRVProducerPass::GetBasePointer(Value *v) {
alan-baker5b86ed72019-02-15 08:26:50 -05005850 if (auto *gep = dyn_cast<GetElementPtrInst>(v)) {
5851 return GetBasePointer(gep->getPointerOperand());
5852 }
5853
5854 // Conservatively return |v|.
5855 return v;
5856}
5857
5858bool SPIRVProducerPass::sameResource(Value *lhs, Value *rhs) const {
5859 if (auto *lhs_call = dyn_cast<CallInst>(lhs)) {
5860 if (auto *rhs_call = dyn_cast<CallInst>(rhs)) {
5861 if (lhs_call->getCalledFunction()->getName().startswith(
5862 clspv::ResourceAccessorFunction()) &&
5863 rhs_call->getCalledFunction()->getName().startswith(
5864 clspv::ResourceAccessorFunction())) {
5865 // For resource accessors, match descriptor set and binding.
5866 if (lhs_call->getOperand(0) == rhs_call->getOperand(0) &&
5867 lhs_call->getOperand(1) == rhs_call->getOperand(1))
5868 return true;
5869 } else if (lhs_call->getCalledFunction()->getName().startswith(
5870 clspv::WorkgroupAccessorFunction()) &&
5871 rhs_call->getCalledFunction()->getName().startswith(
5872 clspv::WorkgroupAccessorFunction())) {
5873 // For workgroup resources, match spec id.
5874 if (lhs_call->getOperand(0) == rhs_call->getOperand(0))
5875 return true;
5876 }
5877 }
5878 }
5879
5880 return false;
5881}
5882
5883bool SPIRVProducerPass::selectFromSameObject(Instruction *inst) {
5884 assert(inst->getType()->isPointerTy());
5885 assert(GetStorageClass(inst->getType()->getPointerAddressSpace()) ==
5886 spv::StorageClassStorageBuffer);
5887 const bool hack_undef = clspv::Option::HackUndef();
5888 if (auto *select = dyn_cast<SelectInst>(inst)) {
5889 auto *true_base = GetBasePointer(select->getTrueValue());
5890 auto *false_base = GetBasePointer(select->getFalseValue());
5891
5892 if (true_base == false_base)
5893 return true;
5894
5895 // If either the true or false operand is a null, then we satisfy the same
5896 // object constraint.
5897 if (auto *true_cst = dyn_cast<Constant>(true_base)) {
5898 if (true_cst->isNullValue() || (hack_undef && isa<UndefValue>(true_base)))
5899 return true;
5900 }
5901
5902 if (auto *false_cst = dyn_cast<Constant>(false_base)) {
5903 if (false_cst->isNullValue() ||
5904 (hack_undef && isa<UndefValue>(false_base)))
5905 return true;
5906 }
5907
5908 if (sameResource(true_base, false_base))
5909 return true;
5910 } else if (auto *phi = dyn_cast<PHINode>(inst)) {
5911 Value *value = nullptr;
5912 bool ok = true;
5913 for (unsigned i = 0; ok && i != phi->getNumIncomingValues(); ++i) {
5914 auto *base = GetBasePointer(phi->getIncomingValue(i));
5915 // Null values satisfy the constraint of selecting of selecting from the
5916 // same object.
5917 if (!value) {
5918 if (auto *cst = dyn_cast<Constant>(base)) {
5919 if (!cst->isNullValue() && !(hack_undef && isa<UndefValue>(base)))
5920 value = base;
5921 } else {
5922 value = base;
5923 }
5924 } else if (base != value) {
5925 if (auto *base_cst = dyn_cast<Constant>(base)) {
5926 if (base_cst->isNullValue() || (hack_undef && isa<UndefValue>(base)))
5927 continue;
5928 }
5929
5930 if (sameResource(value, base))
5931 continue;
5932
5933 // Values don't represent the same base.
5934 ok = false;
5935 }
5936 }
5937
5938 return ok;
5939 }
5940
5941 // Conservatively return false.
5942 return false;
5943}
alan-bakere9308012019-03-15 10:25:13 -04005944
5945bool SPIRVProducerPass::CalledWithCoherentResource(Argument &Arg) {
5946 if (!Arg.getType()->isPointerTy() ||
5947 Arg.getType()->getPointerAddressSpace() != clspv::AddressSpace::Global) {
5948 // Only SSBOs need to be annotated as coherent.
5949 return false;
5950 }
5951
5952 DenseSet<Value *> visited;
5953 std::vector<Value *> stack;
5954 for (auto *U : Arg.getParent()->users()) {
5955 if (auto *call = dyn_cast<CallInst>(U)) {
5956 stack.push_back(call->getOperand(Arg.getArgNo()));
5957 }
5958 }
5959
5960 while (!stack.empty()) {
5961 Value *v = stack.back();
5962 stack.pop_back();
5963
5964 if (!visited.insert(v).second)
5965 continue;
5966
5967 auto *resource_call = dyn_cast<CallInst>(v);
5968 if (resource_call &&
5969 resource_call->getCalledFunction()->getName().startswith(
5970 clspv::ResourceAccessorFunction())) {
5971 // If this is a resource accessor function, check if the coherent operand
5972 // is set.
5973 const auto coherent =
5974 unsigned(dyn_cast<ConstantInt>(resource_call->getArgOperand(5))
5975 ->getZExtValue());
5976 if (coherent == 1)
5977 return true;
5978 } else if (auto *arg = dyn_cast<Argument>(v)) {
5979 // If this is a function argument, trace through its callers.
alan-bakere98f3f92019-04-08 15:06:36 -04005980 for (auto U : arg->getParent()->users()) {
alan-bakere9308012019-03-15 10:25:13 -04005981 if (auto *call = dyn_cast<CallInst>(U)) {
5982 stack.push_back(call->getOperand(arg->getArgNo()));
5983 }
5984 }
5985 } else if (auto *user = dyn_cast<User>(v)) {
5986 // If this is a user, traverse all operands that could lead to resource
5987 // variables.
5988 for (unsigned i = 0; i != user->getNumOperands(); ++i) {
5989 Value *operand = user->getOperand(i);
5990 if (operand->getType()->isPointerTy() &&
5991 operand->getType()->getPointerAddressSpace() ==
5992 clspv::AddressSpace::Global) {
5993 stack.push_back(operand);
5994 }
5995 }
5996 }
5997 }
5998
5999 // No coherent resource variables encountered.
6000 return false;
6001}
alan-baker06cad652019-12-03 17:56:47 -05006002
6003void SPIRVProducerPass::PopulateStructuredCFGMaps(Module &module) {
6004 // First, track loop merges and continues.
6005 DenseSet<BasicBlock *> LoopMergesAndContinues;
6006 for (auto &F : module) {
6007 if (F.isDeclaration())
6008 continue;
6009
6010 DominatorTree &DT = getAnalysis<DominatorTreeWrapperPass>(F).getDomTree();
6011 const LoopInfo &LI = getAnalysis<LoopInfoWrapperPass>(F).getLoopInfo();
6012 std::deque<BasicBlock *> order;
6013 DenseSet<BasicBlock *> visited;
6014 clspv::ComputeStructuredOrder(&*F.begin(), &DT, LI, &order, &visited);
6015
6016 for (auto BB : order) {
6017 auto terminator = BB->getTerminator();
6018 auto branch = dyn_cast<BranchInst>(terminator);
6019 if (LI.isLoopHeader(BB)) {
6020 auto L = LI.getLoopFor(BB);
6021 BasicBlock *ContinueBB = nullptr;
6022 BasicBlock *MergeBB = nullptr;
6023
6024 MergeBB = L->getExitBlock();
6025 if (!MergeBB) {
6026 // StructurizeCFG pass converts CFG into triangle shape and the cfg
6027 // has regions with single entry/exit. As a result, loop should not
6028 // have multiple exits.
6029 llvm_unreachable("Loop has multiple exits???");
6030 }
6031
6032 if (L->isLoopLatch(BB)) {
6033 ContinueBB = BB;
6034 } else {
6035 // From SPIR-V spec 2.11, Continue Target must dominate that back-edge
6036 // block.
6037 BasicBlock *Header = L->getHeader();
6038 BasicBlock *Latch = L->getLoopLatch();
6039 for (auto *loop_block : L->blocks()) {
6040 if (loop_block == Header) {
6041 continue;
6042 }
6043
6044 // Check whether block dominates block with back-edge.
6045 // The loop latch is the single block with a back-edge. If it was
6046 // possible, StructurizeCFG made the loop conform to this
6047 // requirement, otherwise |Latch| is a nullptr.
6048 if (DT.dominates(loop_block, Latch)) {
6049 ContinueBB = loop_block;
6050 }
6051 }
6052
6053 if (!ContinueBB) {
6054 llvm_unreachable("Wrong continue block from loop");
6055 }
6056 }
6057
6058 // Record the continue and merge blocks.
6059 MergeBlocks[BB] = MergeBB;
6060 ContinueBlocks[BB] = ContinueBB;
6061 LoopMergesAndContinues.insert(MergeBB);
6062 LoopMergesAndContinues.insert(ContinueBB);
6063 } else if (branch && branch->isConditional()) {
6064 auto L = LI.getLoopFor(BB);
6065 bool HasBackedge = false;
6066 while (L && !HasBackedge) {
6067 if (L->isLoopLatch(BB)) {
6068 HasBackedge = true;
6069 }
6070 L = L->getParentLoop();
6071 }
6072
6073 if (!HasBackedge) {
6074 // Only need a merge if the branch doesn't include a loop break or
6075 // continue.
6076 auto true_bb = branch->getSuccessor(0);
6077 auto false_bb = branch->getSuccessor(1);
6078 if (!LoopMergesAndContinues.count(true_bb) &&
6079 !LoopMergesAndContinues.count(false_bb)) {
6080 // StructurizeCFG pass already manipulated CFG. Just use false block
6081 // of branch instruction as merge block.
6082 MergeBlocks[BB] = false_bb;
6083 }
6084 }
6085 }
6086 }
6087 }
6088}