blob: ce64fcdb9ac41e7adc380978bd42c9cb706a72d9 [file] [log] [blame]
David Neto22f144c2017-06-12 14:26:21 -04001// Copyright 2017 The Clspv Authors. All rights reserved.
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7// http://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
15#ifdef _MSC_VER
16#pragma warning(push, 0)
17#endif
18
David Neto156783e2017-07-05 15:39:41 -040019#include <cassert>
David Neto257c3892018-04-11 13:19:45 -040020#include <cstring>
David Neto118188e2018-08-24 11:27:54 -040021#include <iomanip>
22#include <list>
David Neto862b7d82018-06-14 18:48:37 -040023#include <memory>
David Neto118188e2018-08-24 11:27:54 -040024#include <set>
25#include <sstream>
26#include <string>
27#include <tuple>
28#include <unordered_set>
29#include <utility>
David Neto862b7d82018-06-14 18:48:37 -040030
David Neto118188e2018-08-24 11:27:54 -040031#include "llvm/ADT/StringSwitch.h"
32#include "llvm/ADT/UniqueVector.h"
33#include "llvm/Analysis/LoopInfo.h"
34#include "llvm/IR/Constants.h"
35#include "llvm/IR/Dominators.h"
36#include "llvm/IR/Instructions.h"
37#include "llvm/IR/Metadata.h"
38#include "llvm/IR/Module.h"
alan-bakerf67468c2019-11-25 15:51:49 -050039#include "llvm/IR/ValueSymbolTable.h"
David Neto118188e2018-08-24 11:27:54 -040040#include "llvm/Pass.h"
41#include "llvm/Support/CommandLine.h"
42#include "llvm/Support/raw_ostream.h"
43#include "llvm/Transforms/Utils/Cloning.h"
David Neto22f144c2017-06-12 14:26:21 -040044
David Neto85082642018-03-24 06:55:20 -070045#include "spirv/1.0/spirv.hpp"
David Neto118188e2018-08-24 11:27:54 -040046
David Neto85082642018-03-24 06:55:20 -070047#include "clspv/AddressSpace.h"
alan-bakerf5e5f692018-11-27 08:33:24 -050048#include "clspv/DescriptorMap.h"
David Neto118188e2018-08-24 11:27:54 -040049#include "clspv/Option.h"
David Neto85082642018-03-24 06:55:20 -070050#include "clspv/spirv_c_strings.hpp"
51#include "clspv/spirv_glsl.hpp"
David Neto22f144c2017-06-12 14:26:21 -040052
David Neto4feb7a42017-10-06 17:29:42 -040053#include "ArgKind.h"
alan-bakerf67468c2019-11-25 15:51:49 -050054#include "Builtins.h"
alan-baker06cad652019-12-03 17:56:47 -050055#include "ComputeStructuredOrder.h"
David Neto85082642018-03-24 06:55:20 -070056#include "ConstantEmitter.h"
Alan Baker202c8c72018-08-13 13:47:44 -040057#include "Constants.h"
David Neto78383442018-06-15 20:31:56 -040058#include "DescriptorCounter.h"
alan-baker56f7aff2019-05-22 08:06:42 -040059#include "NormalizeGlobalVariable.h"
Diego Novilloa4c44fa2019-04-11 10:56:15 -040060#include "Passes.h"
alan-bakerce179f12019-12-06 19:02:22 -050061#include "Types.h"
David Neto48f56a42017-10-06 16:44:25 -040062
David Neto22f144c2017-06-12 14:26:21 -040063#if defined(_MSC_VER)
64#pragma warning(pop)
65#endif
66
67using namespace llvm;
68using namespace clspv;
David Neto156783e2017-07-05 15:39:41 -040069using namespace mdconst;
David Neto22f144c2017-06-12 14:26:21 -040070
71namespace {
David Netocd8ca5f2017-10-02 23:34:11 -040072
David Neto862b7d82018-06-14 18:48:37 -040073cl::opt<bool> ShowResourceVars("show-rv", cl::init(false), cl::Hidden,
74 cl::desc("Show resource variable creation"));
75
76// These hacks exist to help transition code generation algorithms
77// without making huge noise in detailed test output.
78const bool Hack_generate_runtime_array_stride_early = true;
79
David Neto3fbb4072017-10-16 11:28:14 -040080// The value of 1/pi. This value is from MSDN
81// https://msdn.microsoft.com/en-us/library/4hwaceh6.aspx
82const double kOneOverPi = 0.318309886183790671538;
83const glsl::ExtInst kGlslExtInstBad = static_cast<glsl::ExtInst>(0);
84
alan-bakerb6b09dc2018-11-08 16:59:28 -050085const char *kCompositeConstructFunctionPrefix = "clspv.composite_construct.";
David Netoab03f432017-11-03 17:00:44 -040086
David Neto22f144c2017-06-12 14:26:21 -040087enum SPIRVOperandType {
88 NUMBERID,
89 LITERAL_INTEGER,
90 LITERAL_STRING,
91 LITERAL_FLOAT
92};
93
94struct SPIRVOperand {
95 explicit SPIRVOperand(SPIRVOperandType Ty, uint32_t Num)
96 : Type(Ty), LiteralNum(1, Num) {}
97 explicit SPIRVOperand(SPIRVOperandType Ty, const char *Str)
98 : Type(Ty), LiteralStr(Str) {}
99 explicit SPIRVOperand(SPIRVOperandType Ty, StringRef Str)
100 : Type(Ty), LiteralStr(Str) {}
101 explicit SPIRVOperand(SPIRVOperandType Ty, ArrayRef<uint32_t> NumVec)
102 : Type(Ty), LiteralNum(NumVec.begin(), NumVec.end()) {}
103
104 SPIRVOperandType getType() { return Type; };
105 uint32_t getNumID() { return LiteralNum[0]; };
106 std::string getLiteralStr() { return LiteralStr; };
107 ArrayRef<uint32_t> getLiteralNum() { return LiteralNum; };
108
David Neto87846742018-04-11 17:36:22 -0400109 uint32_t GetNumWords() const {
110 switch (Type) {
111 case NUMBERID:
112 return 1;
113 case LITERAL_INTEGER:
114 case LITERAL_FLOAT:
David Netoee2660d2018-06-28 16:31:29 -0400115 return uint32_t(LiteralNum.size());
David Neto87846742018-04-11 17:36:22 -0400116 case LITERAL_STRING:
117 // Account for the terminating null character.
David Netoee2660d2018-06-28 16:31:29 -0400118 return uint32_t((LiteralStr.size() + 4) / 4);
David Neto87846742018-04-11 17:36:22 -0400119 }
120 llvm_unreachable("Unhandled case in SPIRVOperand::GetNumWords()");
121 }
122
David Neto22f144c2017-06-12 14:26:21 -0400123private:
124 SPIRVOperandType Type;
125 std::string LiteralStr;
126 SmallVector<uint32_t, 4> LiteralNum;
127};
128
David Netoc6f3ab22018-04-06 18:02:31 -0400129class SPIRVOperandList {
130public:
131 SPIRVOperandList() {}
alan-bakerb6b09dc2018-11-08 16:59:28 -0500132 SPIRVOperandList(const SPIRVOperandList &other) = delete;
133 SPIRVOperandList(SPIRVOperandList &&other) {
David Netoc6f3ab22018-04-06 18:02:31 -0400134 contents_ = std::move(other.contents_);
135 other.contents_.clear();
136 }
137 SPIRVOperandList(ArrayRef<SPIRVOperand *> init)
138 : contents_(init.begin(), init.end()) {}
139 operator ArrayRef<SPIRVOperand *>() { return contents_; }
140 void push_back(SPIRVOperand *op) { contents_.push_back(op); }
alan-bakerb6b09dc2018-11-08 16:59:28 -0500141 void clear() { contents_.clear(); }
David Netoc6f3ab22018-04-06 18:02:31 -0400142 size_t size() const { return contents_.size(); }
143 SPIRVOperand *&operator[](size_t i) { return contents_[i]; }
144
David Neto87846742018-04-11 17:36:22 -0400145 const SmallVector<SPIRVOperand *, 8> &getOperands() const {
146 return contents_;
147 }
148
David Netoc6f3ab22018-04-06 18:02:31 -0400149private:
alan-bakerb6b09dc2018-11-08 16:59:28 -0500150 SmallVector<SPIRVOperand *, 8> contents_;
David Netoc6f3ab22018-04-06 18:02:31 -0400151};
152
153SPIRVOperandList &operator<<(SPIRVOperandList &list, SPIRVOperand *elem) {
154 list.push_back(elem);
155 return list;
156}
157
alan-bakerb6b09dc2018-11-08 16:59:28 -0500158SPIRVOperand *MkNum(uint32_t num) {
David Netoc6f3ab22018-04-06 18:02:31 -0400159 return new SPIRVOperand(LITERAL_INTEGER, num);
160}
alan-bakerb6b09dc2018-11-08 16:59:28 -0500161SPIRVOperand *MkInteger(ArrayRef<uint32_t> num_vec) {
David Neto257c3892018-04-11 13:19:45 -0400162 return new SPIRVOperand(LITERAL_INTEGER, num_vec);
163}
alan-bakerb6b09dc2018-11-08 16:59:28 -0500164SPIRVOperand *MkFloat(ArrayRef<uint32_t> num_vec) {
David Neto257c3892018-04-11 13:19:45 -0400165 return new SPIRVOperand(LITERAL_FLOAT, num_vec);
166}
alan-bakerb6b09dc2018-11-08 16:59:28 -0500167SPIRVOperand *MkId(uint32_t id) { return new SPIRVOperand(NUMBERID, id); }
168SPIRVOperand *MkString(StringRef str) {
David Neto257c3892018-04-11 13:19:45 -0400169 return new SPIRVOperand(LITERAL_STRING, str);
170}
David Netoc6f3ab22018-04-06 18:02:31 -0400171
David Neto22f144c2017-06-12 14:26:21 -0400172struct SPIRVInstruction {
David Neto87846742018-04-11 17:36:22 -0400173 // Create an instruction with an opcode and no result ID, and with the given
174 // operands. This computes its own word count.
175 explicit SPIRVInstruction(spv::Op Opc, ArrayRef<SPIRVOperand *> Ops)
176 : WordCount(1), Opcode(static_cast<uint16_t>(Opc)), ResultID(0),
177 Operands(Ops.begin(), Ops.end()) {
178 for (auto *operand : Ops) {
David Netoee2660d2018-06-28 16:31:29 -0400179 WordCount += uint16_t(operand->GetNumWords());
David Neto87846742018-04-11 17:36:22 -0400180 }
181 }
182 // Create an instruction with an opcode and a no-zero result ID, and
183 // with the given operands. This computes its own word count.
184 explicit SPIRVInstruction(spv::Op Opc, uint32_t ResID,
David Neto22f144c2017-06-12 14:26:21 -0400185 ArrayRef<SPIRVOperand *> Ops)
David Neto87846742018-04-11 17:36:22 -0400186 : WordCount(2), Opcode(static_cast<uint16_t>(Opc)), ResultID(ResID),
187 Operands(Ops.begin(), Ops.end()) {
188 if (ResID == 0) {
189 llvm_unreachable("Result ID of 0 was provided");
190 }
191 for (auto *operand : Ops) {
192 WordCount += operand->GetNumWords();
193 }
194 }
David Neto22f144c2017-06-12 14:26:21 -0400195
David Netoee2660d2018-06-28 16:31:29 -0400196 uint32_t getWordCount() const { return WordCount; }
David Neto22f144c2017-06-12 14:26:21 -0400197 uint16_t getOpcode() const { return Opcode; }
198 uint32_t getResultID() const { return ResultID; }
199 ArrayRef<SPIRVOperand *> getOperands() const { return Operands; }
200
201private:
David Netoee2660d2018-06-28 16:31:29 -0400202 uint32_t WordCount; // Check the 16-bit bound at code generation time.
David Neto22f144c2017-06-12 14:26:21 -0400203 uint16_t Opcode;
204 uint32_t ResultID;
205 SmallVector<SPIRVOperand *, 4> Operands;
206};
207
208struct SPIRVProducerPass final : public ModulePass {
David Neto22f144c2017-06-12 14:26:21 -0400209 typedef DenseMap<Type *, uint32_t> TypeMapType;
210 typedef UniqueVector<Type *> TypeList;
211 typedef DenseMap<Value *, uint32_t> ValueMapType;
David Netofb9a7972017-08-25 17:08:24 -0400212 typedef UniqueVector<Value *> ValueList;
David Neto22f144c2017-06-12 14:26:21 -0400213 typedef std::vector<std::pair<Value *, uint32_t>> EntryPointVecType;
214 typedef std::list<SPIRVInstruction *> SPIRVInstructionList;
David Neto87846742018-04-11 17:36:22 -0400215 // A vector of tuples, each of which is:
216 // - the LLVM instruction that we will later generate SPIR-V code for
217 // - where the SPIR-V instruction should be inserted
218 // - the result ID of the SPIR-V instruction
David Neto22f144c2017-06-12 14:26:21 -0400219 typedef std::vector<
220 std::tuple<Value *, SPIRVInstructionList::iterator, uint32_t>>
221 DeferredInstVecType;
222 typedef DenseMap<FunctionType *, std::pair<FunctionType *, uint32_t>>
223 GlobalConstFuncMapType;
224
David Neto44795152017-07-13 15:45:28 -0400225 explicit SPIRVProducerPass(
alan-bakerf5e5f692018-11-27 08:33:24 -0500226 raw_pwrite_stream &out,
227 std::vector<clspv::version0::DescriptorMapEntry> *descriptor_map_entries,
alan-baker00e7a582019-06-07 12:54:21 -0400228 ArrayRef<std::pair<unsigned, std::string>> samplerMap,
David Neto44795152017-07-13 15:45:28 -0400229 bool outputCInitList)
David Netoc2c368d2017-06-30 16:50:17 -0400230 : ModulePass(ID), samplerMap(samplerMap), out(out),
David Neto0676e6f2017-07-11 18:47:44 -0400231 binaryTempOut(binaryTempUnderlyingVector), binaryOut(&out),
alan-baker00e7a582019-06-07 12:54:21 -0400232 descriptorMapEntries(descriptor_map_entries),
David Neto0676e6f2017-07-11 18:47:44 -0400233 outputCInitList(outputCInitList), patchBoundOffset(0), nextID(1),
alan-baker5b86ed72019-02-15 08:26:50 -0500234 OpExtInstImportID(0), HasVariablePointersStorageBuffer(false),
235 HasVariablePointers(false), SamplerTy(nullptr), WorkgroupSizeValueID(0),
Kévin Petit89a525c2019-06-15 08:13:07 +0100236 WorkgroupSizeVarID(0), max_local_spec_id_(0) {}
David Neto22f144c2017-06-12 14:26:21 -0400237
238 void getAnalysisUsage(AnalysisUsage &AU) const override {
239 AU.addRequired<DominatorTreeWrapperPass>();
240 AU.addRequired<LoopInfoWrapperPass>();
241 }
242
243 virtual bool runOnModule(Module &module) override;
244
245 // output the SPIR-V header block
246 void outputHeader();
247
248 // patch the SPIR-V header block
249 void patchHeader();
250
251 uint32_t lookupType(Type *Ty) {
252 if (Ty->isPointerTy() &&
253 (Ty->getPointerAddressSpace() != AddressSpace::UniformConstant)) {
254 auto PointeeTy = Ty->getPointerElementType();
255 if (PointeeTy->isStructTy() &&
256 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
257 Ty = PointeeTy;
258 }
259 }
260
David Neto862b7d82018-06-14 18:48:37 -0400261 auto where = TypeMap.find(Ty);
262 if (where == TypeMap.end()) {
263 if (Ty) {
264 errs() << "Unhandled type " << *Ty << "\n";
265 } else {
266 errs() << "Unhandled type (null)\n";
267 }
David Netoe439d702018-03-23 13:14:08 -0700268 llvm_unreachable("\nUnhandled type!");
David Neto22f144c2017-06-12 14:26:21 -0400269 }
270
David Neto862b7d82018-06-14 18:48:37 -0400271 return where->second;
David Neto22f144c2017-06-12 14:26:21 -0400272 }
273 TypeMapType &getImageTypeMap() { return ImageTypeMap; }
alan-bakerabd82722019-12-03 17:14:51 -0500274 TypeList &getImageTypeList() { return ImageTypeList; }
David Neto22f144c2017-06-12 14:26:21 -0400275 TypeList &getTypeList() { return Types; };
276 ValueList &getConstantList() { return Constants; };
277 ValueMapType &getValueMap() { return ValueMap; }
278 ValueMapType &getAllocatedValueMap() { return AllocatedValueMap; }
279 SPIRVInstructionList &getSPIRVInstList() { return SPIRVInsts; };
David Neto22f144c2017-06-12 14:26:21 -0400280 EntryPointVecType &getEntryPointVec() { return EntryPointVec; };
281 DeferredInstVecType &getDeferredInstVec() { return DeferredInstVec; };
282 ValueList &getEntryPointInterfacesVec() { return EntryPointInterfacesVec; };
283 uint32_t &getOpExtInstImportID() { return OpExtInstImportID; };
284 std::vector<uint32_t> &getBuiltinDimVec() { return BuiltinDimensionVec; };
alan-baker5b86ed72019-02-15 08:26:50 -0500285 bool hasVariablePointersStorageBuffer() {
286 return HasVariablePointersStorageBuffer;
287 }
288 void setVariablePointersStorageBuffer(bool Val) {
289 HasVariablePointersStorageBuffer = Val;
290 }
Diego Novillo3cc8d7a2019-04-10 13:30:34 -0400291 bool hasVariablePointers() { return HasVariablePointers; };
David Neto22f144c2017-06-12 14:26:21 -0400292 void setVariablePointers(bool Val) { HasVariablePointers = Val; };
alan-bakerb6b09dc2018-11-08 16:59:28 -0500293 ArrayRef<std::pair<unsigned, std::string>> &getSamplerMap() {
294 return samplerMap;
295 }
David Neto22f144c2017-06-12 14:26:21 -0400296 GlobalConstFuncMapType &getGlobalConstFuncTypeMap() {
297 return GlobalConstFuncTypeMap;
298 }
299 SmallPtrSet<Value *, 16> &getGlobalConstArgSet() {
300 return GlobalConstArgumentSet;
301 }
alan-bakerb6b09dc2018-11-08 16:59:28 -0500302 TypeList &getTypesNeedingArrayStride() { return TypesNeedingArrayStride; }
David Neto22f144c2017-06-12 14:26:21 -0400303
David Netoc6f3ab22018-04-06 18:02:31 -0400304 void GenerateLLVMIRInfo(Module &M, const DataLayout &DL);
alan-bakerb6b09dc2018-11-08 16:59:28 -0500305 // Populate GlobalConstFuncTypeMap. Also, if module-scope __constant will
306 // *not* be converted to a storage buffer, replace each such global variable
307 // with one in the storage class expecgted by SPIR-V.
David Neto862b7d82018-06-14 18:48:37 -0400308 void FindGlobalConstVars(Module &M, const DataLayout &DL);
309 // Populate ResourceVarInfoList, FunctionToResourceVarsMap, and
310 // ModuleOrderedResourceVars.
311 void FindResourceVars(Module &M, const DataLayout &DL);
Alan Baker202c8c72018-08-13 13:47:44 -0400312 void FindWorkgroupVars(Module &M);
David Neto22f144c2017-06-12 14:26:21 -0400313 bool FindExtInst(Module &M);
314 void FindTypePerGlobalVar(GlobalVariable &GV);
315 void FindTypePerFunc(Function &F);
David Neto862b7d82018-06-14 18:48:37 -0400316 void FindTypesForSamplerMap(Module &M);
317 void FindTypesForResourceVars(Module &M);
alan-bakerb6b09dc2018-11-08 16:59:28 -0500318 // Inserts |Ty| and relevant sub-types into the |Types| member, indicating
319 // that |Ty| and its subtypes will need a corresponding SPIR-V type.
David Neto22f144c2017-06-12 14:26:21 -0400320 void FindType(Type *Ty);
321 void FindConstantPerGlobalVar(GlobalVariable &GV);
322 void FindConstantPerFunc(Function &F);
323 void FindConstant(Value *V);
324 void GenerateExtInstImport();
David Neto19a1bad2017-08-25 15:01:41 -0400325 // Generates instructions for SPIR-V types corresponding to the LLVM types
326 // saved in the |Types| member. A type follows its subtypes. IDs are
327 // allocated sequentially starting with the current value of nextID, and
328 // with a type following its subtypes. Also updates nextID to just beyond
329 // the last generated ID.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500330 void GenerateSPIRVTypes(LLVMContext &context, Module &module);
David Neto22f144c2017-06-12 14:26:21 -0400331 void GenerateSPIRVConstants();
David Neto5c22a252018-03-15 16:07:41 -0400332 void GenerateModuleInfo(Module &M);
David Neto22f144c2017-06-12 14:26:21 -0400333 void GenerateGlobalVar(GlobalVariable &GV);
David Netoc6f3ab22018-04-06 18:02:31 -0400334 void GenerateWorkgroupVars();
David Neto862b7d82018-06-14 18:48:37 -0400335 // Generate descriptor map entries for resource variables associated with
336 // arguments to F.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500337 void GenerateDescriptorMapInfo(const DataLayout &DL, Function &F);
David Neto22f144c2017-06-12 14:26:21 -0400338 void GenerateSamplers(Module &M);
David Neto862b7d82018-06-14 18:48:37 -0400339 // Generate OpVariables for %clspv.resource.var.* calls.
340 void GenerateResourceVars(Module &M);
David Neto22f144c2017-06-12 14:26:21 -0400341 void GenerateFuncPrologue(Function &F);
342 void GenerateFuncBody(Function &F);
David Netob6e2e062018-04-25 10:32:06 -0400343 void GenerateEntryPointInitialStores();
David Neto22f144c2017-06-12 14:26:21 -0400344 spv::Op GetSPIRVCmpOpcode(CmpInst *CmpI);
345 spv::Op GetSPIRVCastOpcode(Instruction &I);
346 spv::Op GetSPIRVBinaryOpcode(Instruction &I);
347 void GenerateInstruction(Instruction &I);
348 void GenerateFuncEpilogue();
349 void HandleDeferredInstruction();
alan-bakerb6b09dc2018-11-08 16:59:28 -0500350 void HandleDeferredDecorations(const DataLayout &DL);
David Neto22f144c2017-06-12 14:26:21 -0400351 bool is4xi8vec(Type *Ty) const;
352 spv::StorageClass GetStorageClass(unsigned AddrSpace) const;
David Neto862b7d82018-06-14 18:48:37 -0400353 spv::StorageClass GetStorageClassForArgKind(clspv::ArgKind arg_kind) const;
David Neto22f144c2017-06-12 14:26:21 -0400354 spv::BuiltIn GetBuiltin(StringRef globalVarName) const;
David Neto3fbb4072017-10-16 11:28:14 -0400355 // Returns the GLSL extended instruction enum that the given function
356 // call maps to. If none, then returns the 0 value, i.e. GLSLstd4580Bad.
David Neto22f144c2017-06-12 14:26:21 -0400357 glsl::ExtInst getExtInstEnum(StringRef Name);
David Neto3fbb4072017-10-16 11:28:14 -0400358 // Returns the GLSL extended instruction enum indirectly used by the given
359 // function. That is, to implement the given function, we use an extended
360 // instruction plus one more instruction. If none, then returns the 0 value,
361 // i.e. GLSLstd4580Bad.
362 glsl::ExtInst getIndirectExtInstEnum(StringRef Name);
363 // Returns the single GLSL extended instruction used directly or
364 // indirectly by the given function call.
365 glsl::ExtInst getDirectOrIndirectExtInstEnum(StringRef Name);
David Neto22f144c2017-06-12 14:26:21 -0400366 void WriteOneWord(uint32_t Word);
367 void WriteResultID(SPIRVInstruction *Inst);
368 void WriteWordCountAndOpcode(SPIRVInstruction *Inst);
369 void WriteOperand(SPIRVOperand *Op);
370 void WriteSPIRVBinary();
371
Alan Baker9bf93fb2018-08-28 16:59:26 -0400372 // Returns true if |type| is compatible with OpConstantNull.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500373 bool IsTypeNullable(const Type *type) const;
Alan Baker9bf93fb2018-08-28 16:59:26 -0400374
Alan Bakerfcda9482018-10-02 17:09:59 -0400375 // Populate UBO remapped type maps.
376 void PopulateUBOTypeMaps(Module &module);
377
alan-baker06cad652019-12-03 17:56:47 -0500378 // Populate the merge and continue block maps.
379 void PopulateStructuredCFGMaps(Module &module);
380
Alan Bakerfcda9482018-10-02 17:09:59 -0400381 // Wrapped methods of DataLayout accessors. If |type| was remapped for UBOs,
382 // uses the internal map, otherwise it falls back on the data layout.
383 uint64_t GetTypeSizeInBits(Type *type, const DataLayout &DL);
384 uint64_t GetTypeStoreSize(Type *type, const DataLayout &DL);
385 uint64_t GetTypeAllocSize(Type *type, const DataLayout &DL);
386
alan-baker5b86ed72019-02-15 08:26:50 -0500387 // Returns the base pointer of |v|.
388 Value *GetBasePointer(Value *v);
389
390 // Sets |HasVariablePointersStorageBuffer| or |HasVariablePointers| base on
391 // |address_space|.
392 void setVariablePointersCapabilities(unsigned address_space);
393
394 // Returns true if |lhs| and |rhs| represent the same resource or workgroup
395 // variable.
396 bool sameResource(Value *lhs, Value *rhs) const;
397
398 // Returns true if |inst| is phi or select that selects from the same
399 // structure (or null).
400 bool selectFromSameObject(Instruction *inst);
401
alan-bakere9308012019-03-15 10:25:13 -0400402 // Returns true if |Arg| is called with a coherent resource.
403 bool CalledWithCoherentResource(Argument &Arg);
404
David Neto22f144c2017-06-12 14:26:21 -0400405private:
406 static char ID;
David Neto44795152017-07-13 15:45:28 -0400407 ArrayRef<std::pair<unsigned, std::string>> samplerMap;
David Neto22f144c2017-06-12 14:26:21 -0400408 raw_pwrite_stream &out;
David Neto0676e6f2017-07-11 18:47:44 -0400409
410 // TODO(dneto): Wouldn't it be better to always just emit a binary, and then
411 // convert to other formats on demand?
412
413 // When emitting a C initialization list, the WriteSPIRVBinary method
414 // will actually write its words to this vector via binaryTempOut.
415 SmallVector<char, 100> binaryTempUnderlyingVector;
416 raw_svector_ostream binaryTempOut;
417
418 // Binary output writes to this stream, which might be |out| or
419 // |binaryTempOut|. It's the latter when we really want to write a C
420 // initializer list.
Diego Novillo3cc8d7a2019-04-10 13:30:34 -0400421 raw_pwrite_stream *binaryOut;
alan-bakerf5e5f692018-11-27 08:33:24 -0500422 std::vector<version0::DescriptorMapEntry> *descriptorMapEntries;
David Neto0676e6f2017-07-11 18:47:44 -0400423 const bool outputCInitList; // If true, output look like {0x7023, ... , 5}
David Neto22f144c2017-06-12 14:26:21 -0400424 uint64_t patchBoundOffset;
425 uint32_t nextID;
426
alan-bakerf67468c2019-11-25 15:51:49 -0500427 // ID for OpTypeInt 32 1.
428 uint32_t int32ID = 0;
429 // ID for OpTypeVector %int 4.
430 uint32_t v4int32ID = 0;
431
David Neto19a1bad2017-08-25 15:01:41 -0400432 // Maps an LLVM Value pointer to the corresponding SPIR-V Id.
David Neto22f144c2017-06-12 14:26:21 -0400433 TypeMapType TypeMap;
David Neto19a1bad2017-08-25 15:01:41 -0400434 // Maps an LLVM image type to its SPIR-V ID.
David Neto22f144c2017-06-12 14:26:21 -0400435 TypeMapType ImageTypeMap;
alan-bakerabd82722019-12-03 17:14:51 -0500436 // A unique-vector of LLVM image types. This list is used to provide
437 // deterministic traversal of image types.
438 TypeList ImageTypeList;
David Neto19a1bad2017-08-25 15:01:41 -0400439 // A unique-vector of LLVM types that map to a SPIR-V type.
David Neto22f144c2017-06-12 14:26:21 -0400440 TypeList Types;
441 ValueList Constants;
David Neto19a1bad2017-08-25 15:01:41 -0400442 // Maps an LLVM Value pointer to the corresponding SPIR-V Id.
David Neto22f144c2017-06-12 14:26:21 -0400443 ValueMapType ValueMap;
444 ValueMapType AllocatedValueMap;
445 SPIRVInstructionList SPIRVInsts;
David Neto862b7d82018-06-14 18:48:37 -0400446
David Neto22f144c2017-06-12 14:26:21 -0400447 EntryPointVecType EntryPointVec;
448 DeferredInstVecType DeferredInstVec;
449 ValueList EntryPointInterfacesVec;
450 uint32_t OpExtInstImportID;
451 std::vector<uint32_t> BuiltinDimensionVec;
alan-baker5b86ed72019-02-15 08:26:50 -0500452 bool HasVariablePointersStorageBuffer;
David Neto22f144c2017-06-12 14:26:21 -0400453 bool HasVariablePointers;
454 Type *SamplerTy;
alan-baker09cb9802019-12-10 13:16:27 -0500455 DenseMap<unsigned, unsigned> SamplerLiteralToIDMap;
456 ;
David Netoc77d9e22018-03-24 06:30:28 -0700457
458 // If a function F has a pointer-to-__constant parameter, then this variable
David Neto9ed8e2f2018-03-24 06:47:24 -0700459 // will map F's type to (G, index of the parameter), where in a first phase
460 // G is F's type. During FindTypePerFunc, G will be changed to F's type
461 // but replacing the pointer-to-constant parameter with
462 // pointer-to-ModuleScopePrivate.
David Netoc77d9e22018-03-24 06:30:28 -0700463 // TODO(dneto): This doesn't seem general enough? A function might have
464 // more than one such parameter.
David Neto22f144c2017-06-12 14:26:21 -0400465 GlobalConstFuncMapType GlobalConstFuncTypeMap;
466 SmallPtrSet<Value *, 16> GlobalConstArgumentSet;
David Neto1a1a0582017-07-07 12:01:44 -0400467 // An ordered set of pointer types of Base arguments to OpPtrAccessChain,
David Neto85082642018-03-24 06:55:20 -0700468 // or array types, and which point into transparent memory (StorageBuffer
469 // storage class). These will require an ArrayStride decoration.
David Neto1a1a0582017-07-07 12:01:44 -0400470 // See SPV_KHR_variable_pointers rev 13.
David Neto85082642018-03-24 06:55:20 -0700471 TypeList TypesNeedingArrayStride;
David Netoa60b00b2017-09-15 16:34:09 -0400472
473 // This is truly ugly, but works around what look like driver bugs.
474 // For get_local_size, an earlier part of the flow has created a module-scope
475 // variable in Private address space to hold the value for the workgroup
476 // size. Its intializer is a uint3 value marked as builtin WorkgroupSize.
477 // When this is present, save the IDs of the initializer value and variable
478 // in these two variables. We only ever do a vector load from it, and
479 // when we see one of those, substitute just the value of the intializer.
480 // This mimics what Glslang does, and that's what drivers are used to.
David Neto66cfe642018-03-24 06:13:56 -0700481 // TODO(dneto): Remove this once drivers are fixed.
David Netoa60b00b2017-09-15 16:34:09 -0400482 uint32_t WorkgroupSizeValueID;
483 uint32_t WorkgroupSizeVarID;
David Neto26aaf622017-10-23 18:11:53 -0400484
David Neto862b7d82018-06-14 18:48:37 -0400485 // Bookkeeping for mapping kernel arguments to resource variables.
486 struct ResourceVarInfo {
487 ResourceVarInfo(int index_arg, unsigned set_arg, unsigned binding_arg,
alan-bakere9308012019-03-15 10:25:13 -0400488 Function *fn, clspv::ArgKind arg_kind_arg, int coherent_arg)
David Neto862b7d82018-06-14 18:48:37 -0400489 : index(index_arg), descriptor_set(set_arg), binding(binding_arg),
alan-bakere9308012019-03-15 10:25:13 -0400490 var_fn(fn), arg_kind(arg_kind_arg), coherent(coherent_arg),
David Neto862b7d82018-06-14 18:48:37 -0400491 addr_space(fn->getReturnType()->getPointerAddressSpace()) {}
492 const int index; // Index into ResourceVarInfoList
493 const unsigned descriptor_set;
494 const unsigned binding;
495 Function *const var_fn; // The @clspv.resource.var.* function.
496 const clspv::ArgKind arg_kind;
alan-bakere9308012019-03-15 10:25:13 -0400497 const int coherent;
David Neto862b7d82018-06-14 18:48:37 -0400498 const unsigned addr_space; // The LLVM address space
499 // The SPIR-V ID of the OpVariable. Not populated at construction time.
500 uint32_t var_id = 0;
501 };
502 // A list of resource var info. Each one correponds to a module-scope
503 // resource variable we will have to create. Resource var indices are
504 // indices into this vector.
505 SmallVector<std::unique_ptr<ResourceVarInfo>, 8> ResourceVarInfoList;
506 // This is a vector of pointers of all the resource vars, but ordered by
507 // kernel function, and then by argument.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500508 UniqueVector<ResourceVarInfo *> ModuleOrderedResourceVars;
David Neto862b7d82018-06-14 18:48:37 -0400509 // Map a function to the ordered list of resource variables it uses, one for
510 // each argument. If an argument does not use a resource variable, it
511 // will have a null pointer entry.
512 using FunctionToResourceVarsMapType =
513 DenseMap<Function *, SmallVector<ResourceVarInfo *, 8>>;
514 FunctionToResourceVarsMapType FunctionToResourceVarsMap;
515
516 // What LLVM types map to SPIR-V types needing layout? These are the
517 // arrays and structures supporting storage buffers and uniform buffers.
518 TypeList TypesNeedingLayout;
519 // What LLVM struct types map to a SPIR-V struct type with Block decoration?
520 UniqueVector<StructType *> StructTypesNeedingBlock;
521 // For a call that represents a load from an opaque type (samplers, images),
522 // map it to the variable id it should load from.
523 DenseMap<CallInst *, uint32_t> ResourceVarDeferredLoadCalls;
David Neto85082642018-03-24 06:55:20 -0700524
Alan Baker202c8c72018-08-13 13:47:44 -0400525 // One larger than the maximum used SpecId for pointer-to-local arguments.
526 int max_local_spec_id_;
David Netoc6f3ab22018-04-06 18:02:31 -0400527 // An ordered list of the kernel arguments of type pointer-to-local.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500528 using LocalArgList = SmallVector<Argument *, 8>;
David Netoc6f3ab22018-04-06 18:02:31 -0400529 LocalArgList LocalArgs;
530 // Information about a pointer-to-local argument.
531 struct LocalArgInfo {
532 // The SPIR-V ID of the array variable.
533 uint32_t variable_id;
534 // The element type of the
alan-bakerb6b09dc2018-11-08 16:59:28 -0500535 Type *elem_type;
David Netoc6f3ab22018-04-06 18:02:31 -0400536 // The ID of the array type.
537 uint32_t array_size_id;
538 // The ID of the array type.
539 uint32_t array_type_id;
540 // The ID of the pointer to the array type.
541 uint32_t ptr_array_type_id;
David Netoc6f3ab22018-04-06 18:02:31 -0400542 // The specialization constant ID of the array size.
543 int spec_id;
544 };
Alan Baker202c8c72018-08-13 13:47:44 -0400545 // A mapping from Argument to its assigned SpecId.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500546 DenseMap<const Argument *, int> LocalArgSpecIds;
Alan Baker202c8c72018-08-13 13:47:44 -0400547 // A mapping from SpecId to its LocalArgInfo.
548 DenseMap<int, LocalArgInfo> LocalSpecIdInfoMap;
Alan Bakerfcda9482018-10-02 17:09:59 -0400549 // A mapping from a remapped type to its real offsets.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500550 DenseMap<Type *, std::vector<uint32_t>> RemappedUBOTypeOffsets;
Alan Bakerfcda9482018-10-02 17:09:59 -0400551 // A mapping from a remapped type to its real sizes.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500552 DenseMap<Type *, std::tuple<uint64_t, uint64_t, uint64_t>>
553 RemappedUBOTypeSizes;
alan-baker06cad652019-12-03 17:56:47 -0500554
555 // Maps basic block to its merge block.
556 DenseMap<BasicBlock *, BasicBlock *> MergeBlocks;
557 // Maps basic block to its continue block.
558 DenseMap<BasicBlock *, BasicBlock *> ContinueBlocks;
David Neto22f144c2017-06-12 14:26:21 -0400559};
560
561char SPIRVProducerPass::ID;
David Netoc6f3ab22018-04-06 18:02:31 -0400562
alan-bakerb6b09dc2018-11-08 16:59:28 -0500563} // namespace
David Neto22f144c2017-06-12 14:26:21 -0400564
565namespace clspv {
alan-bakerf5e5f692018-11-27 08:33:24 -0500566ModulePass *createSPIRVProducerPass(
567 raw_pwrite_stream &out,
568 std::vector<version0::DescriptorMapEntry> *descriptor_map_entries,
alan-baker00e7a582019-06-07 12:54:21 -0400569 ArrayRef<std::pair<unsigned, std::string>> samplerMap,
alan-bakerf5e5f692018-11-27 08:33:24 -0500570 bool outputCInitList) {
571 return new SPIRVProducerPass(out, descriptor_map_entries, samplerMap,
alan-baker00e7a582019-06-07 12:54:21 -0400572 outputCInitList);
David Neto22f144c2017-06-12 14:26:21 -0400573}
David Netoc2c368d2017-06-30 16:50:17 -0400574} // namespace clspv
David Neto22f144c2017-06-12 14:26:21 -0400575
576bool SPIRVProducerPass::runOnModule(Module &module) {
David Neto0676e6f2017-07-11 18:47:44 -0400577 binaryOut = outputCInitList ? &binaryTempOut : &out;
578
Alan Bakerfcda9482018-10-02 17:09:59 -0400579 PopulateUBOTypeMaps(module);
alan-baker06cad652019-12-03 17:56:47 -0500580 PopulateStructuredCFGMaps(module);
Alan Bakerfcda9482018-10-02 17:09:59 -0400581
David Neto22f144c2017-06-12 14:26:21 -0400582 // SPIR-V always begins with its header information
583 outputHeader();
584
David Netoc6f3ab22018-04-06 18:02:31 -0400585 const DataLayout &DL = module.getDataLayout();
586
David Neto22f144c2017-06-12 14:26:21 -0400587 // Gather information from the LLVM IR that we require.
David Netoc6f3ab22018-04-06 18:02:31 -0400588 GenerateLLVMIRInfo(module, DL);
David Neto22f144c2017-06-12 14:26:21 -0400589
David Neto22f144c2017-06-12 14:26:21 -0400590 // Collect information on global variables too.
591 for (GlobalVariable &GV : module.globals()) {
592 // If the GV is one of our special __spirv_* variables, remove the
593 // initializer as it was only placed there to force LLVM to not throw the
594 // value away.
595 if (GV.getName().startswith("__spirv_")) {
596 GV.setInitializer(nullptr);
597 }
598
599 // Collect types' information from global variable.
600 FindTypePerGlobalVar(GV);
601
602 // Collect constant information from global variable.
603 FindConstantPerGlobalVar(GV);
604
605 // If the variable is an input, entry points need to know about it.
606 if (AddressSpace::Input == GV.getType()->getPointerAddressSpace()) {
David Netofb9a7972017-08-25 17:08:24 -0400607 getEntryPointInterfacesVec().insert(&GV);
David Neto22f144c2017-06-12 14:26:21 -0400608 }
609 }
610
611 // If there are extended instructions, generate OpExtInstImport.
612 if (FindExtInst(module)) {
613 GenerateExtInstImport();
614 }
615
616 // Generate SPIRV instructions for types.
Alan Bakerfcda9482018-10-02 17:09:59 -0400617 GenerateSPIRVTypes(module.getContext(), module);
David Neto22f144c2017-06-12 14:26:21 -0400618
619 // Generate SPIRV constants.
620 GenerateSPIRVConstants();
621
alan-baker09cb9802019-12-10 13:16:27 -0500622 // Generate literal samplers if necessary.
623 GenerateSamplers(module);
David Neto22f144c2017-06-12 14:26:21 -0400624
625 // Generate SPIRV variables.
626 for (GlobalVariable &GV : module.globals()) {
627 GenerateGlobalVar(GV);
628 }
David Neto862b7d82018-06-14 18:48:37 -0400629 GenerateResourceVars(module);
David Netoc6f3ab22018-04-06 18:02:31 -0400630 GenerateWorkgroupVars();
David Neto22f144c2017-06-12 14:26:21 -0400631
632 // Generate SPIRV instructions for each function.
633 for (Function &F : module) {
634 if (F.isDeclaration()) {
635 continue;
636 }
637
David Neto862b7d82018-06-14 18:48:37 -0400638 GenerateDescriptorMapInfo(DL, F);
639
David Neto22f144c2017-06-12 14:26:21 -0400640 // Generate Function Prologue.
641 GenerateFuncPrologue(F);
642
643 // Generate SPIRV instructions for function body.
644 GenerateFuncBody(F);
645
646 // Generate Function Epilogue.
647 GenerateFuncEpilogue();
648 }
649
650 HandleDeferredInstruction();
David Neto1a1a0582017-07-07 12:01:44 -0400651 HandleDeferredDecorations(DL);
David Neto22f144c2017-06-12 14:26:21 -0400652
653 // Generate SPIRV module information.
David Neto5c22a252018-03-15 16:07:41 -0400654 GenerateModuleInfo(module);
David Neto22f144c2017-06-12 14:26:21 -0400655
alan-baker00e7a582019-06-07 12:54:21 -0400656 WriteSPIRVBinary();
David Neto22f144c2017-06-12 14:26:21 -0400657
658 // We need to patch the SPIR-V header to set bound correctly.
659 patchHeader();
David Neto0676e6f2017-07-11 18:47:44 -0400660
661 if (outputCInitList) {
662 bool first = true;
David Neto0676e6f2017-07-11 18:47:44 -0400663 std::ostringstream os;
664
David Neto57fb0b92017-08-04 15:35:09 -0400665 auto emit_word = [&os, &first](uint32_t word) {
David Neto0676e6f2017-07-11 18:47:44 -0400666 if (!first)
David Neto57fb0b92017-08-04 15:35:09 -0400667 os << ",\n";
668 os << word;
David Neto0676e6f2017-07-11 18:47:44 -0400669 first = false;
670 };
671
672 os << "{";
David Neto57fb0b92017-08-04 15:35:09 -0400673 const std::string str(binaryTempOut.str());
674 for (unsigned i = 0; i < str.size(); i += 4) {
675 const uint32_t a = static_cast<unsigned char>(str[i]);
676 const uint32_t b = static_cast<unsigned char>(str[i + 1]);
677 const uint32_t c = static_cast<unsigned char>(str[i + 2]);
678 const uint32_t d = static_cast<unsigned char>(str[i + 3]);
679 emit_word(a | (b << 8) | (c << 16) | (d << 24));
David Neto0676e6f2017-07-11 18:47:44 -0400680 }
681 os << "}\n";
682 out << os.str();
683 }
684
David Neto22f144c2017-06-12 14:26:21 -0400685 return false;
686}
687
688void SPIRVProducerPass::outputHeader() {
alan-baker00e7a582019-06-07 12:54:21 -0400689 binaryOut->write(reinterpret_cast<const char *>(&spv::MagicNumber),
690 sizeof(spv::MagicNumber));
691 binaryOut->write(reinterpret_cast<const char *>(&spv::Version),
692 sizeof(spv::Version));
David Neto22f144c2017-06-12 14:26:21 -0400693
alan-baker0c18ab02019-06-12 10:23:21 -0400694 // use Google's vendor ID
695 const uint32_t vendor = 21 << 16;
alan-baker00e7a582019-06-07 12:54:21 -0400696 binaryOut->write(reinterpret_cast<const char *>(&vendor), sizeof(vendor));
David Neto22f144c2017-06-12 14:26:21 -0400697
alan-baker00e7a582019-06-07 12:54:21 -0400698 // we record where we need to come back to and patch in the bound value
699 patchBoundOffset = binaryOut->tell();
David Neto22f144c2017-06-12 14:26:21 -0400700
alan-baker00e7a582019-06-07 12:54:21 -0400701 // output a bad bound for now
702 binaryOut->write(reinterpret_cast<const char *>(&nextID), sizeof(nextID));
David Neto22f144c2017-06-12 14:26:21 -0400703
alan-baker00e7a582019-06-07 12:54:21 -0400704 // output the schema (reserved for use and must be 0)
705 const uint32_t schema = 0;
706 binaryOut->write(reinterpret_cast<const char *>(&schema), sizeof(schema));
David Neto22f144c2017-06-12 14:26:21 -0400707}
708
709void SPIRVProducerPass::patchHeader() {
alan-baker00e7a582019-06-07 12:54:21 -0400710 // for a binary we just write the value of nextID over bound
711 binaryOut->pwrite(reinterpret_cast<char *>(&nextID), sizeof(nextID),
712 patchBoundOffset);
David Neto22f144c2017-06-12 14:26:21 -0400713}
714
David Netoc6f3ab22018-04-06 18:02:31 -0400715void SPIRVProducerPass::GenerateLLVMIRInfo(Module &M, const DataLayout &DL) {
David Neto22f144c2017-06-12 14:26:21 -0400716 // This function generates LLVM IR for function such as global variable for
717 // argument, constant and pointer type for argument access. These information
718 // is artificial one because we need Vulkan SPIR-V output. This function is
719 // executed ahead of FindType and FindConstant.
David Neto22f144c2017-06-12 14:26:21 -0400720 LLVMContext &Context = M.getContext();
721
David Neto862b7d82018-06-14 18:48:37 -0400722 FindGlobalConstVars(M, DL);
David Neto5c22a252018-03-15 16:07:41 -0400723
David Neto862b7d82018-06-14 18:48:37 -0400724 FindResourceVars(M, DL);
David Neto22f144c2017-06-12 14:26:21 -0400725
726 bool HasWorkGroupBuiltin = false;
727 for (GlobalVariable &GV : M.globals()) {
728 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
729 if (spv::BuiltInWorkgroupSize == BuiltinType) {
730 HasWorkGroupBuiltin = true;
731 }
732 }
733
David Neto862b7d82018-06-14 18:48:37 -0400734 FindTypesForSamplerMap(M);
735 FindTypesForResourceVars(M);
Alan Baker202c8c72018-08-13 13:47:44 -0400736 FindWorkgroupVars(M);
David Neto22f144c2017-06-12 14:26:21 -0400737
738 for (Function &F : M) {
Kévin Petitabef4522019-03-27 13:08:01 +0000739 if (F.isDeclaration()) {
David Neto22f144c2017-06-12 14:26:21 -0400740 continue;
741 }
742
743 for (BasicBlock &BB : F) {
744 for (Instruction &I : BB) {
745 if (I.getOpcode() == Instruction::ZExt ||
746 I.getOpcode() == Instruction::SExt ||
747 I.getOpcode() == Instruction::UIToFP) {
748 // If there is zext with i1 type, it will be changed to OpSelect. The
749 // OpSelect needs constant 0 and 1 so the constants are added here.
750
751 auto OpTy = I.getOperand(0)->getType();
752
Kévin Petit24272b62018-10-18 19:16:12 +0000753 if (OpTy->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -0400754 if (I.getOpcode() == Instruction::ZExt) {
David Neto22f144c2017-06-12 14:26:21 -0400755 FindConstant(Constant::getNullValue(I.getType()));
Kévin Petit7bfb8992019-02-26 13:45:08 +0000756 FindConstant(ConstantInt::get(I.getType(), 1));
David Neto22f144c2017-06-12 14:26:21 -0400757 } else if (I.getOpcode() == Instruction::SExt) {
David Neto22f144c2017-06-12 14:26:21 -0400758 FindConstant(Constant::getNullValue(I.getType()));
Kévin Petit7bfb8992019-02-26 13:45:08 +0000759 FindConstant(ConstantInt::getSigned(I.getType(), -1));
David Neto22f144c2017-06-12 14:26:21 -0400760 } else {
761 FindConstant(ConstantFP::get(Context, APFloat(0.0f)));
762 FindConstant(ConstantFP::get(Context, APFloat(1.0f)));
763 }
764 }
765 } else if (CallInst *Call = dyn_cast<CallInst>(&I)) {
David Neto862b7d82018-06-14 18:48:37 -0400766 StringRef callee_name = Call->getCalledFunction()->getName();
David Neto22f144c2017-06-12 14:26:21 -0400767
768 // Handle image type specially.
alan-bakerf67468c2019-11-25 15:51:49 -0500769 if (clspv::IsSampledImageRead(callee_name)) {
David Neto22f144c2017-06-12 14:26:21 -0400770 TypeMapType &OpImageTypeMap = getImageTypeMap();
771 Type *ImageTy =
772 Call->getArgOperand(0)->getType()->getPointerElementType();
773 OpImageTypeMap[ImageTy] = 0;
alan-bakerabd82722019-12-03 17:14:51 -0500774 getImageTypeList().insert(ImageTy);
David Neto22f144c2017-06-12 14:26:21 -0400775
alan-bakerf67468c2019-11-25 15:51:49 -0500776 // All sampled reads need a floating point 0 for the Lod operand.
David Neto22f144c2017-06-12 14:26:21 -0400777 FindConstant(ConstantFP::get(Context, APFloat(0.0f)));
778 }
David Neto5c22a252018-03-15 16:07:41 -0400779
alan-bakerce179f12019-12-06 19:02:22 -0500780 if (clspv::IsImageQuery(callee_name)) {
781 Type *ImageTy = Call->getOperand(0)->getType();
782 const uint32_t dim = ImageDimensionality(ImageTy);
783 uint32_t components = dim;
784 if (components > 1) {
785 // OpImageQuerySize* return |components| components.
786 FindType(VectorType::get(Type::getInt32Ty(Context), components));
787 if (dim == 3 && IsGetImageDim(callee_name)) {
788 // get_image_dim for 3D images returns an int4.
789 FindType(
790 VectorType::get(Type::getInt32Ty(Context), components + 1));
791 }
792 }
793
794 if (clspv::IsSampledImageType(ImageTy)) {
795 // All sampled image queries need a integer 0 for the Lod
796 // operand.
797 FindConstant(ConstantInt::get(Context, APInt(32, 0)));
798 }
David Neto5c22a252018-03-15 16:07:41 -0400799 }
David Neto22f144c2017-06-12 14:26:21 -0400800 }
801 }
802 }
803
Kévin Petitabef4522019-03-27 13:08:01 +0000804 // More things to do on kernel functions
805 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
806 if (const MDNode *MD =
807 dyn_cast<Function>(&F)->getMetadata("reqd_work_group_size")) {
808 // We generate constants if the WorkgroupSize builtin is being used.
809 if (HasWorkGroupBuiltin) {
810 // Collect constant information for work group size.
811 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(0)));
812 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(1)));
813 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(2)));
David Neto22f144c2017-06-12 14:26:21 -0400814 }
815 }
816 }
817
alan-bakerf67468c2019-11-25 15:51:49 -0500818 // TODO(alan-baker): make this better.
alan-bakerf906d2b2019-12-10 11:26:23 -0500819 if (M.getTypeByName("opencl.image1d_ro_t.float") ||
820 M.getTypeByName("opencl.image1d_ro_t.float.sampled") ||
821 M.getTypeByName("opencl.image1d_wo_t.float") ||
822 M.getTypeByName("opencl.image2d_ro_t.float") ||
alan-bakerf67468c2019-11-25 15:51:49 -0500823 M.getTypeByName("opencl.image2d_ro_t.float.sampled") ||
824 M.getTypeByName("opencl.image2d_wo_t.float") ||
825 M.getTypeByName("opencl.image3d_ro_t.float") ||
826 M.getTypeByName("opencl.image3d_ro_t.float.sampled") ||
827 M.getTypeByName("opencl.image3d_wo_t.float")) {
828 FindType(Type::getFloatTy(Context));
alan-bakerf906d2b2019-12-10 11:26:23 -0500829 } else if (M.getTypeByName("opencl.image1d_ro_t.uint") ||
830 M.getTypeByName("opencl.image1d_ro_t.uint.sampled") ||
831 M.getTypeByName("opencl.image1d_wo_t.uint") ||
832 M.getTypeByName("opencl.image2d_ro_t.uint") ||
alan-bakerf67468c2019-11-25 15:51:49 -0500833 M.getTypeByName("opencl.image2d_ro_t.uint.sampled") ||
834 M.getTypeByName("opencl.image2d_wo_t.uint") ||
835 M.getTypeByName("opencl.image3d_ro_t.uint") ||
836 M.getTypeByName("opencl.image3d_ro_t.uint.sampled") ||
837 M.getTypeByName("opencl.image3d_wo_t.uint")) {
838 FindType(Type::getInt32Ty(Context));
alan-bakerf906d2b2019-12-10 11:26:23 -0500839 } else if (M.getTypeByName("opencl.image1d_ro_t.int") ||
840 M.getTypeByName("opencl.image1d_ro_t.int.sampled") ||
841 M.getTypeByName("opencl.image1d_wo_t.int") ||
842 M.getTypeByName("opencl.image2d_ro_t.int") ||
alan-bakerf67468c2019-11-25 15:51:49 -0500843 M.getTypeByName("opencl.image2d_ro_t.int.sampled") ||
844 M.getTypeByName("opencl.image2d_wo_t.int") ||
845 M.getTypeByName("opencl.image3d_ro_t.int") ||
846 M.getTypeByName("opencl.image3d_ro_t.int.sampled") ||
847 M.getTypeByName("opencl.image3d_wo_t.int")) {
848 // Nothing for now...
849 } else {
850 // This was likely an UndefValue.
David Neto22f144c2017-06-12 14:26:21 -0400851 FindType(Type::getFloatTy(Context));
852 }
853
854 // Collect types' information from function.
855 FindTypePerFunc(F);
856
857 // Collect constant information from function.
858 FindConstantPerFunc(F);
859 }
860}
861
David Neto862b7d82018-06-14 18:48:37 -0400862void SPIRVProducerPass::FindGlobalConstVars(Module &M, const DataLayout &DL) {
alan-baker56f7aff2019-05-22 08:06:42 -0400863 clspv::NormalizeGlobalVariables(M);
864
David Neto862b7d82018-06-14 18:48:37 -0400865 SmallVector<GlobalVariable *, 8> GVList;
866 SmallVector<GlobalVariable *, 8> DeadGVList;
867 for (GlobalVariable &GV : M.globals()) {
868 if (GV.getType()->getAddressSpace() == AddressSpace::Constant) {
869 if (GV.use_empty()) {
870 DeadGVList.push_back(&GV);
871 } else {
872 GVList.push_back(&GV);
873 }
874 }
875 }
876
877 // Remove dead global __constant variables.
878 for (auto GV : DeadGVList) {
879 GV->eraseFromParent();
880 }
881 DeadGVList.clear();
882
883 if (clspv::Option::ModuleConstantsInStorageBuffer()) {
884 // For now, we only support a single storage buffer.
885 if (GVList.size() > 0) {
886 assert(GVList.size() == 1);
887 const auto *GV = GVList[0];
888 const auto constants_byte_size =
Alan Bakerfcda9482018-10-02 17:09:59 -0400889 (GetTypeSizeInBits(GV->getInitializer()->getType(), DL)) / 8;
David Neto862b7d82018-06-14 18:48:37 -0400890 const size_t kConstantMaxSize = 65536;
891 if (constants_byte_size > kConstantMaxSize) {
892 outs() << "Max __constant capacity of " << kConstantMaxSize
893 << " bytes exceeded: " << constants_byte_size << " bytes used\n";
894 llvm_unreachable("Max __constant capacity exceeded");
895 }
896 }
897 } else {
898 // Change global constant variable's address space to ModuleScopePrivate.
899 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
900 for (auto GV : GVList) {
901 // Create new gv with ModuleScopePrivate address space.
902 Type *NewGVTy = GV->getType()->getPointerElementType();
903 GlobalVariable *NewGV = new GlobalVariable(
904 M, NewGVTy, false, GV->getLinkage(), GV->getInitializer(), "",
905 nullptr, GV->getThreadLocalMode(), AddressSpace::ModuleScopePrivate);
906 NewGV->takeName(GV);
907
908 const SmallVector<User *, 8> GVUsers(GV->user_begin(), GV->user_end());
909 SmallVector<User *, 8> CandidateUsers;
910
911 auto record_called_function_type_as_user =
912 [&GlobalConstFuncTyMap](Value *gv, CallInst *call) {
913 // Find argument index.
914 unsigned index = 0;
915 for (unsigned i = 0; i < call->getNumArgOperands(); i++) {
916 if (gv == call->getOperand(i)) {
917 // TODO(dneto): Should we break here?
918 index = i;
919 }
920 }
921
922 // Record function type with global constant.
923 GlobalConstFuncTyMap[call->getFunctionType()] =
924 std::make_pair(call->getFunctionType(), index);
925 };
926
927 for (User *GVU : GVUsers) {
928 if (CallInst *Call = dyn_cast<CallInst>(GVU)) {
929 record_called_function_type_as_user(GV, Call);
930 } else if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(GVU)) {
931 // Check GEP users.
932 for (User *GEPU : GEP->users()) {
933 if (CallInst *GEPCall = dyn_cast<CallInst>(GEPU)) {
934 record_called_function_type_as_user(GEP, GEPCall);
935 }
936 }
937 }
938
939 CandidateUsers.push_back(GVU);
940 }
941
942 for (User *U : CandidateUsers) {
943 // Update users of gv with new gv.
alan-bakered80f572019-02-11 17:28:26 -0500944 if (!isa<Constant>(U)) {
945 // #254: Can't change operands of a constant, but this shouldn't be
946 // something that sticks around in the module.
947 U->replaceUsesOfWith(GV, NewGV);
948 }
David Neto862b7d82018-06-14 18:48:37 -0400949 }
950
951 // Delete original gv.
952 GV->eraseFromParent();
953 }
954 }
955}
956
Radek Szymanskibe4b0c42018-10-04 22:20:53 +0100957void SPIRVProducerPass::FindResourceVars(Module &M, const DataLayout &) {
David Neto862b7d82018-06-14 18:48:37 -0400958 ResourceVarInfoList.clear();
959 FunctionToResourceVarsMap.clear();
960 ModuleOrderedResourceVars.reset();
961 // Normally, there is one resource variable per clspv.resource.var.*
962 // function, since that is unique'd by arg type and index. By design,
963 // we can share these resource variables across kernels because all
964 // kernels use the same descriptor set.
965 //
966 // But if the user requested distinct descriptor sets per kernel, then
967 // the descriptor allocator has made different (set,binding) pairs for
968 // the same (type,arg_index) pair. Since we can decorate a resource
969 // variable with only exactly one DescriptorSet and Binding, we are
970 // forced in this case to make distinct resource variables whenever
971 // the same clspv.reource.var.X function is seen with disintct
972 // (set,binding) values.
973 const bool always_distinct_sets =
974 clspv::Option::DistinctKernelDescriptorSets();
975 for (Function &F : M) {
976 // Rely on the fact the resource var functions have a stable ordering
977 // in the module.
Alan Baker202c8c72018-08-13 13:47:44 -0400978 if (F.getName().startswith(clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -0400979 // Find all calls to this function with distinct set and binding pairs.
980 // Save them in ResourceVarInfoList.
981
982 // Determine uniqueness of the (set,binding) pairs only withing this
983 // one resource-var builtin function.
984 using SetAndBinding = std::pair<unsigned, unsigned>;
985 // Maps set and binding to the resource var info.
986 DenseMap<SetAndBinding, ResourceVarInfo *> set_and_binding_map;
987 bool first_use = true;
988 for (auto &U : F.uses()) {
989 if (auto *call = dyn_cast<CallInst>(U.getUser())) {
990 const auto set = unsigned(
991 dyn_cast<ConstantInt>(call->getArgOperand(0))->getZExtValue());
992 const auto binding = unsigned(
993 dyn_cast<ConstantInt>(call->getArgOperand(1))->getZExtValue());
994 const auto arg_kind = clspv::ArgKind(
995 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
996 const auto arg_index = unsigned(
997 dyn_cast<ConstantInt>(call->getArgOperand(3))->getZExtValue());
alan-bakere9308012019-03-15 10:25:13 -0400998 const auto coherent = unsigned(
999 dyn_cast<ConstantInt>(call->getArgOperand(5))->getZExtValue());
David Neto862b7d82018-06-14 18:48:37 -04001000
1001 // Find or make the resource var info for this combination.
1002 ResourceVarInfo *rv = nullptr;
1003 if (always_distinct_sets) {
1004 // Make a new resource var any time we see a different
1005 // (set,binding) pair.
1006 SetAndBinding key{set, binding};
1007 auto where = set_and_binding_map.find(key);
1008 if (where == set_and_binding_map.end()) {
1009 rv = new ResourceVarInfo(int(ResourceVarInfoList.size()), set,
alan-bakere9308012019-03-15 10:25:13 -04001010 binding, &F, arg_kind, coherent);
David Neto862b7d82018-06-14 18:48:37 -04001011 ResourceVarInfoList.emplace_back(rv);
1012 set_and_binding_map[key] = rv;
1013 } else {
1014 rv = where->second;
1015 }
1016 } else {
1017 // The default is to make exactly one resource for each
1018 // clspv.resource.var.* function.
1019 if (first_use) {
1020 first_use = false;
1021 rv = new ResourceVarInfo(int(ResourceVarInfoList.size()), set,
alan-bakere9308012019-03-15 10:25:13 -04001022 binding, &F, arg_kind, coherent);
David Neto862b7d82018-06-14 18:48:37 -04001023 ResourceVarInfoList.emplace_back(rv);
1024 } else {
1025 rv = ResourceVarInfoList.back().get();
1026 }
1027 }
1028
1029 // Now populate FunctionToResourceVarsMap.
1030 auto &mapping =
1031 FunctionToResourceVarsMap[call->getParent()->getParent()];
1032 while (mapping.size() <= arg_index) {
1033 mapping.push_back(nullptr);
1034 }
1035 mapping[arg_index] = rv;
1036 }
1037 }
1038 }
1039 }
1040
1041 // Populate ModuleOrderedResourceVars.
1042 for (Function &F : M) {
1043 auto where = FunctionToResourceVarsMap.find(&F);
1044 if (where != FunctionToResourceVarsMap.end()) {
1045 for (auto &rv : where->second) {
1046 if (rv != nullptr) {
1047 ModuleOrderedResourceVars.insert(rv);
1048 }
1049 }
1050 }
1051 }
1052 if (ShowResourceVars) {
1053 for (auto *info : ModuleOrderedResourceVars) {
1054 outs() << "MORV index " << info->index << " (" << info->descriptor_set
1055 << "," << info->binding << ") " << *(info->var_fn->getReturnType())
1056 << "\n";
1057 }
1058 }
1059}
1060
David Neto22f144c2017-06-12 14:26:21 -04001061bool SPIRVProducerPass::FindExtInst(Module &M) {
1062 LLVMContext &Context = M.getContext();
1063 bool HasExtInst = false;
1064
1065 for (Function &F : M) {
1066 for (BasicBlock &BB : F) {
1067 for (Instruction &I : BB) {
1068 if (CallInst *Call = dyn_cast<CallInst>(&I)) {
1069 Function *Callee = Call->getCalledFunction();
1070 // Check whether this call is for extend instructions.
David Neto3fbb4072017-10-16 11:28:14 -04001071 auto callee_name = Callee->getName();
1072 const glsl::ExtInst EInst = getExtInstEnum(callee_name);
1073 const glsl::ExtInst IndirectEInst =
1074 getIndirectExtInstEnum(callee_name);
David Neto22f144c2017-06-12 14:26:21 -04001075
David Neto3fbb4072017-10-16 11:28:14 -04001076 HasExtInst |=
1077 (EInst != kGlslExtInstBad) || (IndirectEInst != kGlslExtInstBad);
1078
1079 if (IndirectEInst) {
1080 // Register extra constants if needed.
1081
1082 // Registers a type and constant for computing the result of the
1083 // given instruction. If the result of the instruction is a vector,
1084 // then make a splat vector constant with the same number of
1085 // elements.
1086 auto register_constant = [this, &I](Constant *constant) {
1087 FindType(constant->getType());
1088 FindConstant(constant);
1089 if (auto *vectorTy = dyn_cast<VectorType>(I.getType())) {
1090 // Register the splat vector of the value with the same
1091 // width as the result of the instruction.
1092 auto *vec_constant = ConstantVector::getSplat(
1093 static_cast<unsigned>(vectorTy->getNumElements()),
1094 constant);
1095 FindConstant(vec_constant);
1096 FindType(vec_constant->getType());
1097 }
1098 };
1099 switch (IndirectEInst) {
1100 case glsl::ExtInstFindUMsb:
1101 // clz needs OpExtInst and OpISub with constant 31, or splat
1102 // vector of 31. Add it to the constant list here.
1103 register_constant(
1104 ConstantInt::get(Type::getInt32Ty(Context), 31));
1105 break;
1106 case glsl::ExtInstAcos:
1107 case glsl::ExtInstAsin:
Kévin Petiteb9f90a2018-09-29 12:29:34 +01001108 case glsl::ExtInstAtan:
David Neto3fbb4072017-10-16 11:28:14 -04001109 case glsl::ExtInstAtan2:
1110 // We need 1/pi for acospi, asinpi, atan2pi.
1111 register_constant(
1112 ConstantFP::get(Type::getFloatTy(Context), kOneOverPi));
1113 break;
1114 default:
1115 assert(false && "internally inconsistent");
1116 }
David Neto22f144c2017-06-12 14:26:21 -04001117 }
1118 }
1119 }
1120 }
1121 }
1122
1123 return HasExtInst;
1124}
1125
1126void SPIRVProducerPass::FindTypePerGlobalVar(GlobalVariable &GV) {
1127 // Investigate global variable's type.
1128 FindType(GV.getType());
1129}
1130
1131void SPIRVProducerPass::FindTypePerFunc(Function &F) {
1132 // Investigate function's type.
1133 FunctionType *FTy = F.getFunctionType();
1134
1135 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
1136 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
David Neto9ed8e2f2018-03-24 06:47:24 -07001137 // Handle a regular function with global constant parameters.
David Neto22f144c2017-06-12 14:26:21 -04001138 if (GlobalConstFuncTyMap.count(FTy)) {
1139 uint32_t GVCstArgIdx = GlobalConstFuncTypeMap[FTy].second;
1140 SmallVector<Type *, 4> NewFuncParamTys;
1141 for (unsigned i = 0; i < FTy->getNumParams(); i++) {
1142 Type *ParamTy = FTy->getParamType(i);
1143 if (i == GVCstArgIdx) {
1144 Type *EleTy = ParamTy->getPointerElementType();
1145 ParamTy = PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
1146 }
1147
1148 NewFuncParamTys.push_back(ParamTy);
1149 }
1150
1151 FunctionType *NewFTy =
1152 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
1153 GlobalConstFuncTyMap[FTy] = std::make_pair(NewFTy, GVCstArgIdx);
1154 FTy = NewFTy;
1155 }
1156
1157 FindType(FTy);
1158 } else {
1159 // As kernel functions do not have parameters, create new function type and
1160 // add it to type map.
1161 SmallVector<Type *, 4> NewFuncParamTys;
1162 FunctionType *NewFTy =
1163 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
1164 FindType(NewFTy);
1165 }
1166
1167 // Investigate instructions' type in function body.
1168 for (BasicBlock &BB : F) {
1169 for (Instruction &I : BB) {
1170 if (isa<ShuffleVectorInst>(I)) {
1171 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1172 // Ignore type for mask of shuffle vector instruction.
1173 if (i == 2) {
1174 continue;
1175 }
1176
1177 Value *Op = I.getOperand(i);
1178 if (!isa<MetadataAsValue>(Op)) {
1179 FindType(Op->getType());
1180 }
1181 }
1182
1183 FindType(I.getType());
1184 continue;
1185 }
1186
David Neto862b7d82018-06-14 18:48:37 -04001187 CallInst *Call = dyn_cast<CallInst>(&I);
1188
1189 if (Call && Call->getCalledFunction()->getName().startswith(
Alan Baker202c8c72018-08-13 13:47:44 -04001190 clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04001191 // This is a fake call representing access to a resource variable.
1192 // We handle that elsewhere.
1193 continue;
1194 }
1195
Alan Baker202c8c72018-08-13 13:47:44 -04001196 if (Call && Call->getCalledFunction()->getName().startswith(
1197 clspv::WorkgroupAccessorFunction())) {
1198 // This is a fake call representing access to a workgroup variable.
1199 // We handle that elsewhere.
1200 continue;
1201 }
1202
David Neto22f144c2017-06-12 14:26:21 -04001203 // Work through the operands of the instruction.
1204 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1205 Value *const Op = I.getOperand(i);
1206 // If any of the operands is a constant, find the type!
1207 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1208 FindType(Op->getType());
1209 }
1210 }
1211
1212 for (Use &Op : I.operands()) {
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001213 if (isa<CallInst>(&I)) {
David Neto22f144c2017-06-12 14:26:21 -04001214 // Avoid to check call instruction's type.
1215 break;
1216 }
Alan Baker202c8c72018-08-13 13:47:44 -04001217 if (CallInst *OpCall = dyn_cast<CallInst>(Op)) {
1218 if (OpCall && OpCall->getCalledFunction()->getName().startswith(
1219 clspv::WorkgroupAccessorFunction())) {
1220 // This is a fake call representing access to a workgroup variable.
1221 // We handle that elsewhere.
1222 continue;
1223 }
1224 }
David Neto22f144c2017-06-12 14:26:21 -04001225 if (!isa<MetadataAsValue>(&Op)) {
1226 FindType(Op->getType());
1227 continue;
1228 }
1229 }
1230
David Neto22f144c2017-06-12 14:26:21 -04001231 // We don't want to track the type of this call as we are going to replace
1232 // it.
Kévin Petitdf71de32019-04-09 14:09:50 +01001233 if (Call && (clspv::LiteralSamplerFunction() ==
David Neto22f144c2017-06-12 14:26:21 -04001234 Call->getCalledFunction()->getName())) {
1235 continue;
1236 }
1237
1238 if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(&I)) {
1239 // If gep's base operand has ModuleScopePrivate address space, make gep
1240 // return ModuleScopePrivate address space.
1241 if (GEP->getPointerAddressSpace() == AddressSpace::ModuleScopePrivate) {
1242 // Add pointer type with private address space for global constant to
1243 // type list.
1244 Type *EleTy = I.getType()->getPointerElementType();
1245 Type *NewPTy =
1246 PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
1247
1248 FindType(NewPTy);
1249 continue;
1250 }
1251 }
1252
1253 FindType(I.getType());
1254 }
1255 }
1256}
1257
David Neto862b7d82018-06-14 18:48:37 -04001258void SPIRVProducerPass::FindTypesForSamplerMap(Module &M) {
1259 // If we are using a sampler map, find the type of the sampler.
Kévin Petitdf71de32019-04-09 14:09:50 +01001260 if (M.getFunction(clspv::LiteralSamplerFunction()) ||
David Neto862b7d82018-06-14 18:48:37 -04001261 0 < getSamplerMap().size()) {
1262 auto SamplerStructTy = M.getTypeByName("opencl.sampler_t");
1263 if (!SamplerStructTy) {
1264 SamplerStructTy = StructType::create(M.getContext(), "opencl.sampler_t");
1265 }
1266
1267 SamplerTy = SamplerStructTy->getPointerTo(AddressSpace::UniformConstant);
1268
1269 FindType(SamplerTy);
1270 }
1271}
1272
1273void SPIRVProducerPass::FindTypesForResourceVars(Module &M) {
1274 // Record types so they are generated.
1275 TypesNeedingLayout.reset();
1276 StructTypesNeedingBlock.reset();
1277
1278 // To match older clspv codegen, generate the float type first if required
1279 // for images.
1280 for (const auto *info : ModuleOrderedResourceVars) {
1281 if (info->arg_kind == clspv::ArgKind::ReadOnlyImage ||
1282 info->arg_kind == clspv::ArgKind::WriteOnlyImage) {
alan-bakerf67468c2019-11-25 15:51:49 -05001283 if (IsIntImageType(info->var_fn->getReturnType())) {
1284 // Nothing for now...
1285 } else if (IsUintImageType(info->var_fn->getReturnType())) {
1286 FindType(Type::getInt32Ty(M.getContext()));
1287 }
1288
1289 // We need "float" either for the sampled type or for the Lod operand.
David Neto862b7d82018-06-14 18:48:37 -04001290 FindType(Type::getFloatTy(M.getContext()));
David Neto862b7d82018-06-14 18:48:37 -04001291 }
1292 }
1293
1294 for (const auto *info : ModuleOrderedResourceVars) {
1295 Type *type = info->var_fn->getReturnType();
1296
1297 switch (info->arg_kind) {
1298 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04001299 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04001300 if (auto *sty = dyn_cast<StructType>(type->getPointerElementType())) {
1301 StructTypesNeedingBlock.insert(sty);
1302 } else {
1303 errs() << *type << "\n";
1304 llvm_unreachable("Buffer arguments must map to structures!");
1305 }
1306 break;
1307 case clspv::ArgKind::Pod:
1308 if (auto *sty = dyn_cast<StructType>(type->getPointerElementType())) {
1309 StructTypesNeedingBlock.insert(sty);
1310 } else {
1311 errs() << *type << "\n";
1312 llvm_unreachable("POD arguments must map to structures!");
1313 }
1314 break;
1315 case clspv::ArgKind::ReadOnlyImage:
1316 case clspv::ArgKind::WriteOnlyImage:
1317 case clspv::ArgKind::Sampler:
1318 // Sampler and image types map to the pointee type but
1319 // in the uniform constant address space.
1320 type = PointerType::get(type->getPointerElementType(),
1321 clspv::AddressSpace::UniformConstant);
1322 break;
1323 default:
1324 break;
1325 }
1326
1327 // The converted type is the type of the OpVariable we will generate.
1328 // If the pointee type is an array of size zero, FindType will convert it
1329 // to a runtime array.
1330 FindType(type);
1331 }
1332
alan-bakerdcd97412019-09-16 15:32:30 -04001333 // If module constants are clustered in a storage buffer then that struct
1334 // needs layout decorations.
1335 if (clspv::Option::ModuleConstantsInStorageBuffer()) {
1336 for (GlobalVariable &GV : M.globals()) {
1337 PointerType *PTy = cast<PointerType>(GV.getType());
1338 const auto AS = PTy->getAddressSpace();
1339 const bool module_scope_constant_external_init =
1340 (AS == AddressSpace::Constant) && GV.hasInitializer();
1341 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
1342 if (module_scope_constant_external_init &&
1343 spv::BuiltInMax == BuiltinType) {
1344 StructTypesNeedingBlock.insert(
1345 cast<StructType>(PTy->getPointerElementType()));
1346 }
1347 }
1348 }
1349
David Neto862b7d82018-06-14 18:48:37 -04001350 // Traverse the arrays and structures underneath each Block, and
1351 // mark them as needing layout.
1352 std::vector<Type *> work_list(StructTypesNeedingBlock.begin(),
1353 StructTypesNeedingBlock.end());
1354 while (!work_list.empty()) {
1355 Type *type = work_list.back();
1356 work_list.pop_back();
1357 TypesNeedingLayout.insert(type);
1358 switch (type->getTypeID()) {
1359 case Type::ArrayTyID:
1360 work_list.push_back(type->getArrayElementType());
1361 if (!Hack_generate_runtime_array_stride_early) {
1362 // Remember this array type for deferred decoration.
1363 TypesNeedingArrayStride.insert(type);
1364 }
1365 break;
1366 case Type::StructTyID:
1367 for (auto *elem_ty : cast<StructType>(type)->elements()) {
1368 work_list.push_back(elem_ty);
1369 }
1370 default:
1371 // This type and its contained types don't get layout.
1372 break;
1373 }
1374 }
1375}
1376
Alan Baker202c8c72018-08-13 13:47:44 -04001377void SPIRVProducerPass::FindWorkgroupVars(Module &M) {
1378 // The SpecId assignment for pointer-to-local arguments is recorded in
1379 // module-level metadata. Translate that information into local argument
1380 // information.
1381 NamedMDNode *nmd = M.getNamedMetadata(clspv::LocalSpecIdMetadataName());
alan-bakerb6b09dc2018-11-08 16:59:28 -05001382 if (!nmd)
1383 return;
Alan Baker202c8c72018-08-13 13:47:44 -04001384 for (auto operand : nmd->operands()) {
1385 MDTuple *tuple = cast<MDTuple>(operand);
1386 ValueAsMetadata *fn_md = cast<ValueAsMetadata>(tuple->getOperand(0));
1387 Function *func = cast<Function>(fn_md->getValue());
alan-bakerb6b09dc2018-11-08 16:59:28 -05001388 ConstantAsMetadata *arg_index_md =
1389 cast<ConstantAsMetadata>(tuple->getOperand(1));
1390 int arg_index = static_cast<int>(
1391 cast<ConstantInt>(arg_index_md->getValue())->getSExtValue());
1392 Argument *arg = &*(func->arg_begin() + arg_index);
Alan Baker202c8c72018-08-13 13:47:44 -04001393
1394 ConstantAsMetadata *spec_id_md =
1395 cast<ConstantAsMetadata>(tuple->getOperand(2));
alan-bakerb6b09dc2018-11-08 16:59:28 -05001396 int spec_id = static_cast<int>(
1397 cast<ConstantInt>(spec_id_md->getValue())->getSExtValue());
Alan Baker202c8c72018-08-13 13:47:44 -04001398
1399 max_local_spec_id_ = std::max(max_local_spec_id_, spec_id + 1);
1400 LocalArgSpecIds[arg] = spec_id;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001401 if (LocalSpecIdInfoMap.count(spec_id))
1402 continue;
Alan Baker202c8c72018-08-13 13:47:44 -04001403
1404 // We haven't seen this SpecId yet, so generate the LocalArgInfo for it.
1405 LocalArgInfo info{nextID, arg->getType()->getPointerElementType(),
1406 nextID + 1, nextID + 2,
1407 nextID + 3, spec_id};
1408 LocalSpecIdInfoMap[spec_id] = info;
1409 nextID += 4;
1410
1411 // Ensure the types necessary for this argument get generated.
1412 Type *IdxTy = Type::getInt32Ty(M.getContext());
1413 FindConstant(ConstantInt::get(IdxTy, 0));
1414 FindType(IdxTy);
1415 FindType(arg->getType());
1416 }
1417}
1418
David Neto22f144c2017-06-12 14:26:21 -04001419void SPIRVProducerPass::FindType(Type *Ty) {
1420 TypeList &TyList = getTypeList();
1421
1422 if (0 != TyList.idFor(Ty)) {
1423 return;
1424 }
1425
1426 if (Ty->isPointerTy()) {
1427 auto AddrSpace = Ty->getPointerAddressSpace();
1428 if ((AddressSpace::Constant == AddrSpace) ||
1429 (AddressSpace::Global == AddrSpace)) {
1430 auto PointeeTy = Ty->getPointerElementType();
1431
1432 if (PointeeTy->isStructTy() &&
1433 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
1434 FindType(PointeeTy);
1435 auto ActualPointerTy =
1436 PointeeTy->getPointerTo(AddressSpace::UniformConstant);
1437 FindType(ActualPointerTy);
1438 return;
1439 }
1440 }
1441 }
1442
David Neto862b7d82018-06-14 18:48:37 -04001443 // By convention, LLVM array type with 0 elements will map to
1444 // OpTypeRuntimeArray. Otherwise, it will map to OpTypeArray, which
1445 // has a constant number of elements. We need to support type of the
1446 // constant.
1447 if (auto *arrayTy = dyn_cast<ArrayType>(Ty)) {
1448 if (arrayTy->getNumElements() > 0) {
1449 LLVMContext &Context = Ty->getContext();
1450 FindType(Type::getInt32Ty(Context));
1451 }
David Neto22f144c2017-06-12 14:26:21 -04001452 }
1453
1454 for (Type *SubTy : Ty->subtypes()) {
1455 FindType(SubTy);
1456 }
1457
1458 TyList.insert(Ty);
1459}
1460
1461void SPIRVProducerPass::FindConstantPerGlobalVar(GlobalVariable &GV) {
1462 // If the global variable has a (non undef) initializer.
1463 if (GV.hasInitializer() && !isa<UndefValue>(GV.getInitializer())) {
David Neto862b7d82018-06-14 18:48:37 -04001464 // Generate the constant if it's not the initializer to a module scope
1465 // constant that we will expect in a storage buffer.
1466 const bool module_scope_constant_external_init =
1467 (GV.getType()->getPointerAddressSpace() == AddressSpace::Constant) &&
1468 clspv::Option::ModuleConstantsInStorageBuffer();
1469 if (!module_scope_constant_external_init) {
1470 FindConstant(GV.getInitializer());
1471 }
David Neto22f144c2017-06-12 14:26:21 -04001472 }
1473}
1474
1475void SPIRVProducerPass::FindConstantPerFunc(Function &F) {
1476 // Investigate constants in function body.
1477 for (BasicBlock &BB : F) {
1478 for (Instruction &I : BB) {
David Neto862b7d82018-06-14 18:48:37 -04001479 if (auto *call = dyn_cast<CallInst>(&I)) {
1480 auto name = call->getCalledFunction()->getName();
Kévin Petitdf71de32019-04-09 14:09:50 +01001481 if (name == clspv::LiteralSamplerFunction()) {
David Neto862b7d82018-06-14 18:48:37 -04001482 // We've handled these constants elsewhere, so skip it.
1483 continue;
1484 }
Alan Baker202c8c72018-08-13 13:47:44 -04001485 if (name.startswith(clspv::ResourceAccessorFunction())) {
1486 continue;
1487 }
1488 if (name.startswith(clspv::WorkgroupAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04001489 continue;
1490 }
Kévin Petit617a76d2019-04-04 13:54:16 +01001491 if (name.startswith(clspv::SPIRVOpIntrinsicFunction())) {
1492 // Skip the first operand that has the SPIR-V Opcode
1493 for (unsigned i = 1; i < I.getNumOperands(); i++) {
1494 if (isa<Constant>(I.getOperand(i)) &&
1495 !isa<GlobalValue>(I.getOperand(i))) {
1496 FindConstant(I.getOperand(i));
1497 }
1498 }
1499 continue;
1500 }
David Neto22f144c2017-06-12 14:26:21 -04001501 }
1502
1503 if (isa<AllocaInst>(I)) {
1504 // Alloca instruction has constant for the number of element. Ignore it.
1505 continue;
1506 } else if (isa<ShuffleVectorInst>(I)) {
1507 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1508 // Ignore constant for mask of shuffle vector instruction.
1509 if (i == 2) {
1510 continue;
1511 }
1512
1513 if (isa<Constant>(I.getOperand(i)) &&
1514 !isa<GlobalValue>(I.getOperand(i))) {
1515 FindConstant(I.getOperand(i));
1516 }
1517 }
1518
1519 continue;
1520 } else if (isa<InsertElementInst>(I)) {
1521 // Handle InsertElement with <4 x i8> specially.
1522 Type *CompositeTy = I.getOperand(0)->getType();
1523 if (is4xi8vec(CompositeTy)) {
1524 LLVMContext &Context = CompositeTy->getContext();
1525 if (isa<Constant>(I.getOperand(0))) {
1526 FindConstant(I.getOperand(0));
1527 }
1528
1529 if (isa<Constant>(I.getOperand(1))) {
1530 FindConstant(I.getOperand(1));
1531 }
1532
1533 // Add mask constant 0xFF.
1534 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
1535 FindConstant(CstFF);
1536
1537 // Add shift amount constant.
1538 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
1539 uint64_t Idx = CI->getZExtValue();
1540 Constant *CstShiftAmount =
1541 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
1542 FindConstant(CstShiftAmount);
1543 }
1544
1545 continue;
1546 }
1547
1548 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1549 // Ignore constant for index of InsertElement instruction.
1550 if (i == 2) {
1551 continue;
1552 }
1553
1554 if (isa<Constant>(I.getOperand(i)) &&
1555 !isa<GlobalValue>(I.getOperand(i))) {
1556 FindConstant(I.getOperand(i));
1557 }
1558 }
1559
1560 continue;
1561 } else if (isa<ExtractElementInst>(I)) {
1562 // Handle ExtractElement with <4 x i8> specially.
1563 Type *CompositeTy = I.getOperand(0)->getType();
1564 if (is4xi8vec(CompositeTy)) {
1565 LLVMContext &Context = CompositeTy->getContext();
1566 if (isa<Constant>(I.getOperand(0))) {
1567 FindConstant(I.getOperand(0));
1568 }
1569
1570 // Add mask constant 0xFF.
1571 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
1572 FindConstant(CstFF);
1573
1574 // Add shift amount constant.
1575 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
1576 uint64_t Idx = CI->getZExtValue();
1577 Constant *CstShiftAmount =
1578 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
1579 FindConstant(CstShiftAmount);
1580 } else {
1581 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
1582 FindConstant(Cst8);
1583 }
1584
1585 continue;
1586 }
1587
1588 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1589 // Ignore constant for index of ExtractElement instruction.
1590 if (i == 1) {
1591 continue;
1592 }
1593
1594 if (isa<Constant>(I.getOperand(i)) &&
1595 !isa<GlobalValue>(I.getOperand(i))) {
1596 FindConstant(I.getOperand(i));
1597 }
1598 }
1599
1600 continue;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001601 } else if ((Instruction::Xor == I.getOpcode()) &&
1602 I.getType()->isIntegerTy(1)) {
1603 // We special case for Xor where the type is i1 and one of the arguments
1604 // is a constant 1 (true), this is an OpLogicalNot in SPIR-V, and we
1605 // don't need the constant
David Neto22f144c2017-06-12 14:26:21 -04001606 bool foundConstantTrue = false;
1607 for (Use &Op : I.operands()) {
1608 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1609 auto CI = cast<ConstantInt>(Op);
1610
1611 if (CI->isZero() || foundConstantTrue) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05001612 // If we already found the true constant, we might (probably only
1613 // on -O0) have an OpLogicalNot which is taking a constant
1614 // argument, so discover it anyway.
David Neto22f144c2017-06-12 14:26:21 -04001615 FindConstant(Op);
1616 } else {
1617 foundConstantTrue = true;
1618 }
1619 }
1620 }
1621
1622 continue;
David Netod2de94a2017-08-28 17:27:47 -04001623 } else if (isa<TruncInst>(I)) {
alan-bakerb39c8262019-03-08 14:03:37 -05001624 // Special case if i8 is not generally handled.
1625 if (!clspv::Option::Int8Support()) {
1626 // For truncation to i8 we mask against 255.
1627 Type *ToTy = I.getType();
1628 if (8u == ToTy->getPrimitiveSizeInBits()) {
1629 LLVMContext &Context = ToTy->getContext();
1630 Constant *Cst255 =
1631 ConstantInt::get(Type::getInt32Ty(Context), 0xff);
1632 FindConstant(Cst255);
1633 }
David Netod2de94a2017-08-28 17:27:47 -04001634 }
Neil Henning39672102017-09-29 14:33:13 +01001635 } else if (isa<AtomicRMWInst>(I)) {
1636 LLVMContext &Context = I.getContext();
1637
1638 FindConstant(
1639 ConstantInt::get(Type::getInt32Ty(Context), spv::ScopeDevice));
1640 FindConstant(ConstantInt::get(
1641 Type::getInt32Ty(Context),
1642 spv::MemorySemanticsUniformMemoryMask |
1643 spv::MemorySemanticsSequentiallyConsistentMask));
David Neto22f144c2017-06-12 14:26:21 -04001644 }
1645
1646 for (Use &Op : I.operands()) {
1647 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1648 FindConstant(Op);
1649 }
1650 }
1651 }
1652 }
1653}
1654
1655void SPIRVProducerPass::FindConstant(Value *V) {
David Neto22f144c2017-06-12 14:26:21 -04001656 ValueList &CstList = getConstantList();
1657
David Netofb9a7972017-08-25 17:08:24 -04001658 // If V is already tracked, ignore it.
1659 if (0 != CstList.idFor(V)) {
David Neto22f144c2017-06-12 14:26:21 -04001660 return;
1661 }
1662
David Neto862b7d82018-06-14 18:48:37 -04001663 if (isa<GlobalValue>(V) && clspv::Option::ModuleConstantsInStorageBuffer()) {
1664 return;
1665 }
1666
David Neto22f144c2017-06-12 14:26:21 -04001667 Constant *Cst = cast<Constant>(V);
David Neto862b7d82018-06-14 18:48:37 -04001668 Type *CstTy = Cst->getType();
David Neto22f144c2017-06-12 14:26:21 -04001669
1670 // Handle constant with <4 x i8> type specially.
David Neto22f144c2017-06-12 14:26:21 -04001671 if (is4xi8vec(CstTy)) {
1672 if (!isa<GlobalValue>(V)) {
David Netofb9a7972017-08-25 17:08:24 -04001673 CstList.insert(V);
David Neto22f144c2017-06-12 14:26:21 -04001674 }
1675 }
1676
1677 if (Cst->getNumOperands()) {
1678 for (User::const_op_iterator I = Cst->op_begin(), E = Cst->op_end(); I != E;
1679 ++I) {
1680 FindConstant(*I);
1681 }
1682
David Netofb9a7972017-08-25 17:08:24 -04001683 CstList.insert(Cst);
David Neto22f144c2017-06-12 14:26:21 -04001684 return;
1685 } else if (const ConstantDataSequential *CDS =
1686 dyn_cast<ConstantDataSequential>(Cst)) {
1687 // Add constants for each element to constant list.
1688 for (unsigned i = 0; i < CDS->getNumElements(); i++) {
1689 Constant *EleCst = CDS->getElementAsConstant(i);
1690 FindConstant(EleCst);
1691 }
1692 }
1693
1694 if (!isa<GlobalValue>(V)) {
David Netofb9a7972017-08-25 17:08:24 -04001695 CstList.insert(V);
David Neto22f144c2017-06-12 14:26:21 -04001696 }
1697}
1698
1699spv::StorageClass SPIRVProducerPass::GetStorageClass(unsigned AddrSpace) const {
1700 switch (AddrSpace) {
1701 default:
1702 llvm_unreachable("Unsupported OpenCL address space");
1703 case AddressSpace::Private:
1704 return spv::StorageClassFunction;
1705 case AddressSpace::Global:
David Neto22f144c2017-06-12 14:26:21 -04001706 return spv::StorageClassStorageBuffer;
Alan Bakerfcda9482018-10-02 17:09:59 -04001707 case AddressSpace::Constant:
1708 return clspv::Option::ConstantArgsInUniformBuffer()
1709 ? spv::StorageClassUniform
1710 : spv::StorageClassStorageBuffer;
David Neto22f144c2017-06-12 14:26:21 -04001711 case AddressSpace::Input:
1712 return spv::StorageClassInput;
1713 case AddressSpace::Local:
1714 return spv::StorageClassWorkgroup;
1715 case AddressSpace::UniformConstant:
1716 return spv::StorageClassUniformConstant;
David Neto9ed8e2f2018-03-24 06:47:24 -07001717 case AddressSpace::Uniform:
David Netoe439d702018-03-23 13:14:08 -07001718 return spv::StorageClassUniform;
David Neto22f144c2017-06-12 14:26:21 -04001719 case AddressSpace::ModuleScopePrivate:
1720 return spv::StorageClassPrivate;
1721 }
1722}
1723
David Neto862b7d82018-06-14 18:48:37 -04001724spv::StorageClass
1725SPIRVProducerPass::GetStorageClassForArgKind(clspv::ArgKind arg_kind) const {
1726 switch (arg_kind) {
1727 case clspv::ArgKind::Buffer:
1728 return spv::StorageClassStorageBuffer;
Alan Bakerfcda9482018-10-02 17:09:59 -04001729 case clspv::ArgKind::BufferUBO:
1730 return spv::StorageClassUniform;
David Neto862b7d82018-06-14 18:48:37 -04001731 case clspv::ArgKind::Pod:
1732 return clspv::Option::PodArgsInUniformBuffer()
1733 ? spv::StorageClassUniform
1734 : spv::StorageClassStorageBuffer;
1735 case clspv::ArgKind::Local:
1736 return spv::StorageClassWorkgroup;
1737 case clspv::ArgKind::ReadOnlyImage:
1738 case clspv::ArgKind::WriteOnlyImage:
1739 case clspv::ArgKind::Sampler:
1740 return spv::StorageClassUniformConstant;
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001741 default:
1742 llvm_unreachable("Unsupported storage class for argument kind");
David Neto862b7d82018-06-14 18:48:37 -04001743 }
1744}
1745
David Neto22f144c2017-06-12 14:26:21 -04001746spv::BuiltIn SPIRVProducerPass::GetBuiltin(StringRef Name) const {
1747 return StringSwitch<spv::BuiltIn>(Name)
1748 .Case("__spirv_GlobalInvocationId", spv::BuiltInGlobalInvocationId)
1749 .Case("__spirv_LocalInvocationId", spv::BuiltInLocalInvocationId)
1750 .Case("__spirv_WorkgroupSize", spv::BuiltInWorkgroupSize)
1751 .Case("__spirv_NumWorkgroups", spv::BuiltInNumWorkgroups)
1752 .Case("__spirv_WorkgroupId", spv::BuiltInWorkgroupId)
1753 .Default(spv::BuiltInMax);
1754}
1755
1756void SPIRVProducerPass::GenerateExtInstImport() {
1757 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
1758 uint32_t &ExtInstImportID = getOpExtInstImportID();
1759
1760 //
1761 // Generate OpExtInstImport.
1762 //
1763 // Ops[0] ... Ops[n] = Name (Literal String)
David Neto22f144c2017-06-12 14:26:21 -04001764 ExtInstImportID = nextID;
David Neto87846742018-04-11 17:36:22 -04001765 SPIRVInstList.push_back(new SPIRVInstruction(spv::OpExtInstImport, nextID++,
1766 MkString("GLSL.std.450")));
David Neto22f144c2017-06-12 14:26:21 -04001767}
1768
alan-bakerb6b09dc2018-11-08 16:59:28 -05001769void SPIRVProducerPass::GenerateSPIRVTypes(LLVMContext &Context,
1770 Module &module) {
David Neto22f144c2017-06-12 14:26:21 -04001771 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
1772 ValueMapType &VMap = getValueMap();
1773 ValueMapType &AllocatedVMap = getAllocatedValueMap();
Alan Bakerfcda9482018-10-02 17:09:59 -04001774 const auto &DL = module.getDataLayout();
David Neto22f144c2017-06-12 14:26:21 -04001775
1776 // Map for OpTypeRuntimeArray. If argument has pointer type, 2 spirv type
1777 // instructions are generated. They are OpTypePointer and OpTypeRuntimeArray.
1778 DenseMap<Type *, uint32_t> OpRuntimeTyMap;
1779
1780 for (Type *Ty : getTypeList()) {
1781 // Update TypeMap with nextID for reference later.
1782 TypeMap[Ty] = nextID;
1783
1784 switch (Ty->getTypeID()) {
1785 default: {
1786 Ty->print(errs());
1787 llvm_unreachable("Unsupported type???");
1788 break;
1789 }
1790 case Type::MetadataTyID:
1791 case Type::LabelTyID: {
1792 // Ignore these types.
1793 break;
1794 }
1795 case Type::PointerTyID: {
1796 PointerType *PTy = cast<PointerType>(Ty);
1797 unsigned AddrSpace = PTy->getAddressSpace();
1798
1799 // For the purposes of our Vulkan SPIR-V type system, constant and global
1800 // are conflated.
1801 bool UseExistingOpTypePointer = false;
1802 if (AddressSpace::Constant == AddrSpace) {
Alan Bakerfcda9482018-10-02 17:09:59 -04001803 if (!clspv::Option::ConstantArgsInUniformBuffer()) {
1804 AddrSpace = AddressSpace::Global;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001805 // Check to see if we already created this type (for instance, if we
1806 // had a constant <type>* and a global <type>*, the type would be
1807 // created by one of these types, and shared by both).
Alan Bakerfcda9482018-10-02 17:09:59 -04001808 auto GlobalTy = PTy->getPointerElementType()->getPointerTo(AddrSpace);
1809 if (0 < TypeMap.count(GlobalTy)) {
1810 TypeMap[PTy] = TypeMap[GlobalTy];
1811 UseExistingOpTypePointer = true;
1812 break;
1813 }
David Neto22f144c2017-06-12 14:26:21 -04001814 }
1815 } else if (AddressSpace::Global == AddrSpace) {
Alan Bakerfcda9482018-10-02 17:09:59 -04001816 if (!clspv::Option::ConstantArgsInUniformBuffer()) {
1817 AddrSpace = AddressSpace::Constant;
David Neto22f144c2017-06-12 14:26:21 -04001818
alan-bakerb6b09dc2018-11-08 16:59:28 -05001819 // Check to see if we already created this type (for instance, if we
1820 // had a constant <type>* and a global <type>*, the type would be
1821 // created by one of these types, and shared by both).
1822 auto ConstantTy =
1823 PTy->getPointerElementType()->getPointerTo(AddrSpace);
Alan Bakerfcda9482018-10-02 17:09:59 -04001824 if (0 < TypeMap.count(ConstantTy)) {
1825 TypeMap[PTy] = TypeMap[ConstantTy];
1826 UseExistingOpTypePointer = true;
1827 }
David Neto22f144c2017-06-12 14:26:21 -04001828 }
1829 }
1830
David Neto862b7d82018-06-14 18:48:37 -04001831 const bool HasArgUser = true;
David Neto22f144c2017-06-12 14:26:21 -04001832
David Neto862b7d82018-06-14 18:48:37 -04001833 if (HasArgUser && !UseExistingOpTypePointer) {
David Neto22f144c2017-06-12 14:26:21 -04001834 //
1835 // Generate OpTypePointer.
1836 //
1837
1838 // OpTypePointer
1839 // Ops[0] = Storage Class
1840 // Ops[1] = Element Type ID
1841 SPIRVOperandList Ops;
1842
David Neto257c3892018-04-11 13:19:45 -04001843 Ops << MkNum(GetStorageClass(AddrSpace))
1844 << MkId(lookupType(PTy->getElementType()));
David Neto22f144c2017-06-12 14:26:21 -04001845
David Neto87846742018-04-11 17:36:22 -04001846 auto *Inst = new SPIRVInstruction(spv::OpTypePointer, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04001847 SPIRVInstList.push_back(Inst);
1848 }
David Neto22f144c2017-06-12 14:26:21 -04001849 break;
1850 }
1851 case Type::StructTyID: {
David Neto22f144c2017-06-12 14:26:21 -04001852 StructType *STy = cast<StructType>(Ty);
1853
1854 // Handle sampler type.
1855 if (STy->isOpaque()) {
1856 if (STy->getName().equals("opencl.sampler_t")) {
1857 //
1858 // Generate OpTypeSampler
1859 //
1860 // Empty Ops.
1861 SPIRVOperandList Ops;
1862
David Neto87846742018-04-11 17:36:22 -04001863 auto *Inst = new SPIRVInstruction(spv::OpTypeSampler, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04001864 SPIRVInstList.push_back(Inst);
1865 break;
alan-bakerf906d2b2019-12-10 11:26:23 -05001866 } else if (STy->getName().startswith("opencl.image1d_ro_t") ||
1867 STy->getName().startswith("opencl.image1d_wo_t") ||
1868 STy->getName().startswith("opencl.image2d_ro_t") ||
alan-bakerf67468c2019-11-25 15:51:49 -05001869 STy->getName().startswith("opencl.image2d_wo_t") ||
1870 STy->getName().startswith("opencl.image3d_ro_t") ||
1871 STy->getName().startswith("opencl.image3d_wo_t")) {
David Neto22f144c2017-06-12 14:26:21 -04001872 //
1873 // Generate OpTypeImage
1874 //
1875 // Ops[0] = Sampled Type ID
1876 // Ops[1] = Dim ID
1877 // Ops[2] = Depth (Literal Number)
1878 // Ops[3] = Arrayed (Literal Number)
1879 // Ops[4] = MS (Literal Number)
1880 // Ops[5] = Sampled (Literal Number)
1881 // Ops[6] = Image Format ID
1882 //
1883 SPIRVOperandList Ops;
1884
alan-bakerf67468c2019-11-25 15:51:49 -05001885 uint32_t ImageTyID = nextID++;
1886 uint32_t SampledTyID = 0;
1887 if (STy->getName().contains(".float")) {
1888 SampledTyID = lookupType(Type::getFloatTy(Context));
1889 } else if (STy->getName().contains(".uint")) {
1890 SampledTyID = lookupType(Type::getInt32Ty(Context));
1891 } else if (STy->getName().contains(".int")) {
1892 // Generate a signed 32-bit integer if necessary.
1893 if (int32ID == 0) {
1894 int32ID = nextID++;
1895 SPIRVOperandList intOps;
1896 intOps << MkNum(32);
1897 intOps << MkNum(1);
1898 auto signed_int =
1899 new SPIRVInstruction(spv::OpTypeInt, int32ID, intOps);
1900 SPIRVInstList.push_back(signed_int);
1901 }
1902 SampledTyID = int32ID;
1903
1904 // Generate a vec4 of the signed int if necessary.
1905 if (v4int32ID == 0) {
1906 v4int32ID = nextID++;
1907 SPIRVOperandList vecOps;
1908 vecOps << MkId(int32ID);
1909 vecOps << MkNum(4);
1910 auto int_vec =
1911 new SPIRVInstruction(spv::OpTypeVector, v4int32ID, vecOps);
1912 SPIRVInstList.push_back(int_vec);
1913 }
1914 } else {
1915 // This was likely an UndefValue.
1916 SampledTyID = lookupType(Type::getFloatTy(Context));
1917 }
David Neto257c3892018-04-11 13:19:45 -04001918 Ops << MkId(SampledTyID);
David Neto22f144c2017-06-12 14:26:21 -04001919
1920 spv::Dim DimID = spv::Dim2D;
alan-bakerf906d2b2019-12-10 11:26:23 -05001921 if (STy->getName().startswith("opencl.image1d_ro_t") ||
1922 STy->getName().startswith("opencl.image1d_wo_t")) {
1923 DimID = spv::Dim1D;
1924 } else if (STy->getName().startswith("opencl.image3d_ro_t") ||
1925 STy->getName().startswith("opencl.image3d_wo_t")) {
David Neto22f144c2017-06-12 14:26:21 -04001926 DimID = spv::Dim3D;
1927 }
David Neto257c3892018-04-11 13:19:45 -04001928 Ops << MkNum(DimID);
David Neto22f144c2017-06-12 14:26:21 -04001929
1930 // TODO: Set up Depth.
David Neto257c3892018-04-11 13:19:45 -04001931 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04001932
1933 // TODO: Set up Arrayed.
David Neto257c3892018-04-11 13:19:45 -04001934 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04001935
1936 // TODO: Set up MS.
David Neto257c3892018-04-11 13:19:45 -04001937 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04001938
1939 // TODO: Set up Sampled.
1940 //
1941 // From Spec
1942 //
1943 // 0 indicates this is only known at run time, not at compile time
1944 // 1 indicates will be used with sampler
1945 // 2 indicates will be used without a sampler (a storage image)
1946 uint32_t Sampled = 1;
alan-bakerf67468c2019-11-25 15:51:49 -05001947 if (!STy->getName().contains(".sampled")) {
David Neto22f144c2017-06-12 14:26:21 -04001948 Sampled = 2;
1949 }
David Neto257c3892018-04-11 13:19:45 -04001950 Ops << MkNum(Sampled);
David Neto22f144c2017-06-12 14:26:21 -04001951
1952 // TODO: Set up Image Format.
David Neto257c3892018-04-11 13:19:45 -04001953 Ops << MkNum(spv::ImageFormatUnknown);
David Neto22f144c2017-06-12 14:26:21 -04001954
alan-bakerf67468c2019-11-25 15:51:49 -05001955 auto *Inst = new SPIRVInstruction(spv::OpTypeImage, ImageTyID, Ops);
David Neto22f144c2017-06-12 14:26:21 -04001956 SPIRVInstList.push_back(Inst);
1957 break;
1958 }
1959 }
1960
1961 //
1962 // Generate OpTypeStruct
1963 //
1964 // Ops[0] ... Ops[n] = Member IDs
1965 SPIRVOperandList Ops;
1966
1967 for (auto *EleTy : STy->elements()) {
David Neto862b7d82018-06-14 18:48:37 -04001968 Ops << MkId(lookupType(EleTy));
David Neto22f144c2017-06-12 14:26:21 -04001969 }
1970
David Neto22f144c2017-06-12 14:26:21 -04001971 uint32_t STyID = nextID;
1972
alan-bakerb6b09dc2018-11-08 16:59:28 -05001973 auto *Inst = new SPIRVInstruction(spv::OpTypeStruct, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04001974 SPIRVInstList.push_back(Inst);
1975
1976 // Generate OpMemberDecorate.
1977 auto DecoInsertPoint =
1978 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
1979 [](SPIRVInstruction *Inst) -> bool {
1980 return Inst->getOpcode() != spv::OpDecorate &&
1981 Inst->getOpcode() != spv::OpMemberDecorate &&
1982 Inst->getOpcode() != spv::OpExtInstImport;
1983 });
1984
David Netoc463b372017-08-10 15:32:21 -04001985 const auto StructLayout = DL.getStructLayout(STy);
Alan Bakerfcda9482018-10-02 17:09:59 -04001986 // Search for the correct offsets if this type was remapped.
1987 std::vector<uint32_t> *offsets = nullptr;
1988 auto iter = RemappedUBOTypeOffsets.find(STy);
1989 if (iter != RemappedUBOTypeOffsets.end()) {
1990 offsets = &iter->second;
1991 }
David Netoc463b372017-08-10 15:32:21 -04001992
David Neto862b7d82018-06-14 18:48:37 -04001993 // #error TODO(dneto): Only do this if in TypesNeedingLayout.
David Neto22f144c2017-06-12 14:26:21 -04001994 for (unsigned MemberIdx = 0; MemberIdx < STy->getNumElements();
1995 MemberIdx++) {
1996 // Ops[0] = Structure Type ID
1997 // Ops[1] = Member Index(Literal Number)
1998 // Ops[2] = Decoration (Offset)
1999 // Ops[3] = Byte Offset (Literal Number)
2000 Ops.clear();
2001
David Neto257c3892018-04-11 13:19:45 -04002002 Ops << MkId(STyID) << MkNum(MemberIdx) << MkNum(spv::DecorationOffset);
David Neto22f144c2017-06-12 14:26:21 -04002003
alan-bakerb6b09dc2018-11-08 16:59:28 -05002004 auto ByteOffset =
2005 static_cast<uint32_t>(StructLayout->getElementOffset(MemberIdx));
Alan Bakerfcda9482018-10-02 17:09:59 -04002006 if (offsets) {
2007 ByteOffset = (*offsets)[MemberIdx];
2008 }
alan-bakerb6b09dc2018-11-08 16:59:28 -05002009 // const auto ByteOffset =
Alan Bakerfcda9482018-10-02 17:09:59 -04002010 // uint32_t(StructLayout->getElementOffset(MemberIdx));
David Neto257c3892018-04-11 13:19:45 -04002011 Ops << MkNum(ByteOffset);
David Neto22f144c2017-06-12 14:26:21 -04002012
David Neto87846742018-04-11 17:36:22 -04002013 auto *DecoInst = new SPIRVInstruction(spv::OpMemberDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002014 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
David Neto22f144c2017-06-12 14:26:21 -04002015 }
2016
2017 // Generate OpDecorate.
David Neto862b7d82018-06-14 18:48:37 -04002018 if (StructTypesNeedingBlock.idFor(STy)) {
2019 Ops.clear();
2020 // Use Block decorations with StorageBuffer storage class.
2021 Ops << MkId(STyID) << MkNum(spv::DecorationBlock);
David Neto22f144c2017-06-12 14:26:21 -04002022
David Neto862b7d82018-06-14 18:48:37 -04002023 auto *DecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
2024 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
David Neto22f144c2017-06-12 14:26:21 -04002025 }
2026 break;
2027 }
2028 case Type::IntegerTyID: {
alan-baker0e64a592019-11-18 13:36:25 -05002029 uint32_t BitWidth = static_cast<uint32_t>(Ty->getPrimitiveSizeInBits());
David Neto22f144c2017-06-12 14:26:21 -04002030
2031 if (BitWidth == 1) {
David Neto87846742018-04-11 17:36:22 -04002032 auto *Inst = new SPIRVInstruction(spv::OpTypeBool, nextID++, {});
David Neto22f144c2017-06-12 14:26:21 -04002033 SPIRVInstList.push_back(Inst);
2034 } else {
alan-bakerb39c8262019-03-08 14:03:37 -05002035 if (!clspv::Option::Int8Support()) {
2036 // i8 is added to TypeMap as i32.
2037 // No matter what LLVM type is requested first, always alias the
2038 // second one's SPIR-V type to be the same as the one we generated
2039 // first.
2040 unsigned aliasToWidth = 0;
2041 if (BitWidth == 8) {
2042 aliasToWidth = 32;
2043 BitWidth = 32;
2044 } else if (BitWidth == 32) {
2045 aliasToWidth = 8;
2046 }
2047 if (aliasToWidth) {
2048 Type *otherType = Type::getIntNTy(Ty->getContext(), aliasToWidth);
2049 auto where = TypeMap.find(otherType);
2050 if (where == TypeMap.end()) {
2051 // Go ahead and make it, but also map the other type to it.
2052 TypeMap[otherType] = nextID;
2053 } else {
2054 // Alias this SPIR-V type the existing type.
2055 TypeMap[Ty] = where->second;
2056 break;
2057 }
David Neto391aeb12017-08-26 15:51:58 -04002058 }
David Neto22f144c2017-06-12 14:26:21 -04002059 }
2060
David Neto257c3892018-04-11 13:19:45 -04002061 SPIRVOperandList Ops;
2062 Ops << MkNum(BitWidth) << MkNum(0 /* not signed */);
David Neto22f144c2017-06-12 14:26:21 -04002063
2064 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002065 new SPIRVInstruction(spv::OpTypeInt, nextID++, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002066 }
2067 break;
2068 }
2069 case Type::HalfTyID:
2070 case Type::FloatTyID:
2071 case Type::DoubleTyID: {
alan-baker0e64a592019-11-18 13:36:25 -05002072 uint32_t BitWidth = static_cast<uint32_t>(Ty->getPrimitiveSizeInBits());
2073 SPIRVOperand *WidthOp =
2074 new SPIRVOperand(SPIRVOperandType::LITERAL_INTEGER, BitWidth);
David Neto22f144c2017-06-12 14:26:21 -04002075
2076 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002077 new SPIRVInstruction(spv::OpTypeFloat, nextID++, WidthOp));
David Neto22f144c2017-06-12 14:26:21 -04002078 break;
2079 }
2080 case Type::ArrayTyID: {
David Neto22f144c2017-06-12 14:26:21 -04002081 ArrayType *ArrTy = cast<ArrayType>(Ty);
David Neto862b7d82018-06-14 18:48:37 -04002082 const uint64_t Length = ArrTy->getArrayNumElements();
2083 if (Length == 0) {
2084 // By convention, map it to a RuntimeArray.
David Neto22f144c2017-06-12 14:26:21 -04002085
David Neto862b7d82018-06-14 18:48:37 -04002086 // Only generate the type once.
2087 // TODO(dneto): Can it ever be generated more than once?
2088 // Doesn't LLVM type uniqueness guarantee we'll only see this
2089 // once?
2090 Type *EleTy = ArrTy->getArrayElementType();
2091 if (OpRuntimeTyMap.count(EleTy) == 0) {
2092 uint32_t OpTypeRuntimeArrayID = nextID;
2093 OpRuntimeTyMap[Ty] = nextID;
David Neto22f144c2017-06-12 14:26:21 -04002094
David Neto862b7d82018-06-14 18:48:37 -04002095 //
2096 // Generate OpTypeRuntimeArray.
2097 //
David Neto22f144c2017-06-12 14:26:21 -04002098
David Neto862b7d82018-06-14 18:48:37 -04002099 // OpTypeRuntimeArray
2100 // Ops[0] = Element Type ID
2101 SPIRVOperandList Ops;
2102 Ops << MkId(lookupType(EleTy));
David Neto22f144c2017-06-12 14:26:21 -04002103
David Neto862b7d82018-06-14 18:48:37 -04002104 SPIRVInstList.push_back(
2105 new SPIRVInstruction(spv::OpTypeRuntimeArray, nextID++, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002106
David Neto862b7d82018-06-14 18:48:37 -04002107 if (Hack_generate_runtime_array_stride_early) {
2108 // Generate OpDecorate.
2109 auto DecoInsertPoint = std::find_if(
2110 SPIRVInstList.begin(), SPIRVInstList.end(),
2111 [](SPIRVInstruction *Inst) -> bool {
2112 return Inst->getOpcode() != spv::OpDecorate &&
2113 Inst->getOpcode() != spv::OpMemberDecorate &&
2114 Inst->getOpcode() != spv::OpExtInstImport;
2115 });
David Neto22f144c2017-06-12 14:26:21 -04002116
David Neto862b7d82018-06-14 18:48:37 -04002117 // Ops[0] = Target ID
2118 // Ops[1] = Decoration (ArrayStride)
2119 // Ops[2] = Stride Number(Literal Number)
2120 Ops.clear();
David Neto85082642018-03-24 06:55:20 -07002121
David Neto862b7d82018-06-14 18:48:37 -04002122 Ops << MkId(OpTypeRuntimeArrayID)
2123 << MkNum(spv::DecorationArrayStride)
Alan Bakerfcda9482018-10-02 17:09:59 -04002124 << MkNum(static_cast<uint32_t>(GetTypeAllocSize(EleTy, DL)));
David Neto22f144c2017-06-12 14:26:21 -04002125
David Neto862b7d82018-06-14 18:48:37 -04002126 auto *DecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
2127 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
2128 }
2129 }
David Neto22f144c2017-06-12 14:26:21 -04002130
David Neto862b7d82018-06-14 18:48:37 -04002131 } else {
David Neto22f144c2017-06-12 14:26:21 -04002132
David Neto862b7d82018-06-14 18:48:37 -04002133 //
2134 // Generate OpConstant and OpTypeArray.
2135 //
2136
2137 //
2138 // Generate OpConstant for array length.
2139 //
2140 // Ops[0] = Result Type ID
2141 // Ops[1] .. Ops[n] = Values LiteralNumber
2142 SPIRVOperandList Ops;
2143
2144 Type *LengthTy = Type::getInt32Ty(Context);
2145 uint32_t ResTyID = lookupType(LengthTy);
2146 Ops << MkId(ResTyID);
2147
2148 assert(Length < UINT32_MAX);
2149 Ops << MkNum(static_cast<uint32_t>(Length));
2150
2151 // Add constant for length to constant list.
2152 Constant *CstLength = ConstantInt::get(LengthTy, Length);
2153 AllocatedVMap[CstLength] = nextID;
2154 VMap[CstLength] = nextID;
2155 uint32_t LengthID = nextID;
2156
2157 auto *CstInst = new SPIRVInstruction(spv::OpConstant, nextID++, Ops);
2158 SPIRVInstList.push_back(CstInst);
2159
2160 // Remember to generate ArrayStride later
2161 getTypesNeedingArrayStride().insert(Ty);
2162
2163 //
2164 // Generate OpTypeArray.
2165 //
2166 // Ops[0] = Element Type ID
2167 // Ops[1] = Array Length Constant ID
2168 Ops.clear();
2169
2170 uint32_t EleTyID = lookupType(ArrTy->getElementType());
2171 Ops << MkId(EleTyID) << MkId(LengthID);
2172
2173 // Update TypeMap with nextID.
2174 TypeMap[Ty] = nextID;
2175
2176 auto *ArrayInst = new SPIRVInstruction(spv::OpTypeArray, nextID++, Ops);
2177 SPIRVInstList.push_back(ArrayInst);
2178 }
David Neto22f144c2017-06-12 14:26:21 -04002179 break;
2180 }
2181 case Type::VectorTyID: {
alan-bakerb39c8262019-03-08 14:03:37 -05002182 // <4 x i8> is changed to i32 if i8 is not generally supported.
2183 if (!clspv::Option::Int8Support() &&
2184 Ty->getVectorElementType() == Type::getInt8Ty(Context)) {
David Neto22f144c2017-06-12 14:26:21 -04002185 if (Ty->getVectorNumElements() == 4) {
2186 TypeMap[Ty] = lookupType(Ty->getVectorElementType());
2187 break;
2188 } else {
2189 Ty->print(errs());
2190 llvm_unreachable("Support above i8 vector type");
2191 }
2192 }
2193
2194 // Ops[0] = Component Type ID
2195 // Ops[1] = Component Count (Literal Number)
David Neto257c3892018-04-11 13:19:45 -04002196 SPIRVOperandList Ops;
2197 Ops << MkId(lookupType(Ty->getVectorElementType()))
2198 << MkNum(Ty->getVectorNumElements());
David Neto22f144c2017-06-12 14:26:21 -04002199
alan-bakerb6b09dc2018-11-08 16:59:28 -05002200 SPIRVInstruction *inst =
2201 new SPIRVInstruction(spv::OpTypeVector, nextID++, Ops);
David Netoc6f3ab22018-04-06 18:02:31 -04002202 SPIRVInstList.push_back(inst);
David Neto22f144c2017-06-12 14:26:21 -04002203 break;
2204 }
2205 case Type::VoidTyID: {
David Neto87846742018-04-11 17:36:22 -04002206 auto *Inst = new SPIRVInstruction(spv::OpTypeVoid, nextID++, {});
David Neto22f144c2017-06-12 14:26:21 -04002207 SPIRVInstList.push_back(Inst);
2208 break;
2209 }
2210 case Type::FunctionTyID: {
2211 // Generate SPIRV instruction for function type.
2212 FunctionType *FTy = cast<FunctionType>(Ty);
2213
2214 // Ops[0] = Return Type ID
2215 // Ops[1] ... Ops[n] = Parameter Type IDs
2216 SPIRVOperandList Ops;
2217
2218 // Find SPIRV instruction for return type
David Netoc6f3ab22018-04-06 18:02:31 -04002219 Ops << MkId(lookupType(FTy->getReturnType()));
David Neto22f144c2017-06-12 14:26:21 -04002220
2221 // Find SPIRV instructions for parameter types
2222 for (unsigned k = 0; k < FTy->getNumParams(); k++) {
2223 // Find SPIRV instruction for parameter type.
2224 auto ParamTy = FTy->getParamType(k);
2225 if (ParamTy->isPointerTy()) {
2226 auto PointeeTy = ParamTy->getPointerElementType();
2227 if (PointeeTy->isStructTy() &&
2228 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
2229 ParamTy = PointeeTy;
2230 }
2231 }
2232
David Netoc6f3ab22018-04-06 18:02:31 -04002233 Ops << MkId(lookupType(ParamTy));
David Neto22f144c2017-06-12 14:26:21 -04002234 }
2235
David Neto87846742018-04-11 17:36:22 -04002236 auto *Inst = new SPIRVInstruction(spv::OpTypeFunction, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002237 SPIRVInstList.push_back(Inst);
2238 break;
2239 }
2240 }
2241 }
2242
2243 // Generate OpTypeSampledImage.
alan-bakerabd82722019-12-03 17:14:51 -05002244 for (auto &ImgTy : getImageTypeList()) {
David Neto22f144c2017-06-12 14:26:21 -04002245 //
2246 // Generate OpTypeSampledImage.
2247 //
2248 // Ops[0] = Image Type ID
2249 //
2250 SPIRVOperandList Ops;
2251
David Netoc6f3ab22018-04-06 18:02:31 -04002252 Ops << MkId(TypeMap[ImgTy]);
David Neto22f144c2017-06-12 14:26:21 -04002253
alan-bakerabd82722019-12-03 17:14:51 -05002254 // Update the image type map.
2255 getImageTypeMap()[ImgTy] = nextID;
David Neto22f144c2017-06-12 14:26:21 -04002256
David Neto87846742018-04-11 17:36:22 -04002257 auto *Inst = new SPIRVInstruction(spv::OpTypeSampledImage, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002258 SPIRVInstList.push_back(Inst);
2259 }
David Netoc6f3ab22018-04-06 18:02:31 -04002260
2261 // Generate types for pointer-to-local arguments.
Alan Baker202c8c72018-08-13 13:47:44 -04002262 for (auto spec_id = clspv::FirstLocalSpecId(); spec_id < max_local_spec_id_;
2263 ++spec_id) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05002264 LocalArgInfo &arg_info = LocalSpecIdInfoMap[spec_id];
David Netoc6f3ab22018-04-06 18:02:31 -04002265
2266 // Generate the spec constant.
2267 SPIRVOperandList Ops;
2268 Ops << MkId(lookupType(Type::getInt32Ty(Context))) << MkNum(1);
David Neto87846742018-04-11 17:36:22 -04002269 SPIRVInstList.push_back(
2270 new SPIRVInstruction(spv::OpSpecConstant, arg_info.array_size_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002271
2272 // Generate the array type.
2273 Ops.clear();
2274 // The element type must have been created.
2275 uint32_t elem_ty_id = lookupType(arg_info.elem_type);
2276 assert(elem_ty_id);
2277 Ops << MkId(elem_ty_id) << MkId(arg_info.array_size_id);
2278
2279 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002280 new SPIRVInstruction(spv::OpTypeArray, arg_info.array_type_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002281
2282 Ops.clear();
2283 Ops << MkNum(spv::StorageClassWorkgroup) << MkId(arg_info.array_type_id);
David Neto87846742018-04-11 17:36:22 -04002284 SPIRVInstList.push_back(new SPIRVInstruction(
2285 spv::OpTypePointer, arg_info.ptr_array_type_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002286 }
David Neto22f144c2017-06-12 14:26:21 -04002287}
2288
2289void SPIRVProducerPass::GenerateSPIRVConstants() {
2290 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
2291 ValueMapType &VMap = getValueMap();
2292 ValueMapType &AllocatedVMap = getAllocatedValueMap();
2293 ValueList &CstList = getConstantList();
David Neto482550a2018-03-24 05:21:07 -07002294 const bool hack_undef = clspv::Option::HackUndef();
David Neto22f144c2017-06-12 14:26:21 -04002295
2296 for (uint32_t i = 0; i < CstList.size(); i++) {
David Netofb9a7972017-08-25 17:08:24 -04002297 // UniqueVector ids are 1-based.
alan-bakerb6b09dc2018-11-08 16:59:28 -05002298 Constant *Cst = cast<Constant>(CstList[i + 1]);
David Neto22f144c2017-06-12 14:26:21 -04002299
2300 // OpTypeArray's constant was already generated.
David Netofb9a7972017-08-25 17:08:24 -04002301 if (AllocatedVMap.find_as(Cst) != AllocatedVMap.end()) {
David Neto22f144c2017-06-12 14:26:21 -04002302 continue;
2303 }
2304
David Netofb9a7972017-08-25 17:08:24 -04002305 // Set ValueMap with nextID for reference later.
David Neto22f144c2017-06-12 14:26:21 -04002306 VMap[Cst] = nextID;
2307
2308 //
2309 // Generate OpConstant.
2310 //
2311
2312 // Ops[0] = Result Type ID
2313 // Ops[1] .. Ops[n] = Values LiteralNumber
2314 SPIRVOperandList Ops;
2315
David Neto257c3892018-04-11 13:19:45 -04002316 Ops << MkId(lookupType(Cst->getType()));
David Neto22f144c2017-06-12 14:26:21 -04002317
2318 std::vector<uint32_t> LiteralNum;
David Neto22f144c2017-06-12 14:26:21 -04002319 spv::Op Opcode = spv::OpNop;
2320
2321 if (isa<UndefValue>(Cst)) {
2322 // Ops[0] = Result Type ID
David Netoc66b3352017-10-20 14:28:46 -04002323 Opcode = spv::OpUndef;
Alan Baker9bf93fb2018-08-28 16:59:26 -04002324 if (hack_undef && IsTypeNullable(Cst->getType())) {
2325 Opcode = spv::OpConstantNull;
David Netoc66b3352017-10-20 14:28:46 -04002326 }
David Neto22f144c2017-06-12 14:26:21 -04002327 } else if (const ConstantInt *CI = dyn_cast<ConstantInt>(Cst)) {
2328 unsigned BitWidth = CI->getBitWidth();
2329 if (BitWidth == 1) {
2330 // If the bitwidth of constant is 1, generate OpConstantTrue or
2331 // OpConstantFalse.
2332 if (CI->getZExtValue()) {
2333 // Ops[0] = Result Type ID
2334 Opcode = spv::OpConstantTrue;
2335 } else {
2336 // Ops[0] = Result Type ID
2337 Opcode = spv::OpConstantFalse;
2338 }
David Neto22f144c2017-06-12 14:26:21 -04002339 } else {
2340 auto V = CI->getZExtValue();
2341 LiteralNum.push_back(V & 0xFFFFFFFF);
2342
2343 if (BitWidth > 32) {
2344 LiteralNum.push_back(V >> 32);
2345 }
2346
2347 Opcode = spv::OpConstant;
David Neto22f144c2017-06-12 14:26:21 -04002348
David Neto257c3892018-04-11 13:19:45 -04002349 Ops << MkInteger(LiteralNum);
David Neto22f144c2017-06-12 14:26:21 -04002350 }
2351 } else if (const ConstantFP *CFP = dyn_cast<ConstantFP>(Cst)) {
2352 uint64_t FPVal = CFP->getValueAPF().bitcastToAPInt().getZExtValue();
2353 Type *CFPTy = CFP->getType();
2354 if (CFPTy->isFloatTy()) {
2355 LiteralNum.push_back(FPVal & 0xFFFFFFFF);
Kévin Petit02ee34e2019-04-04 19:03:22 +01002356 } else if (CFPTy->isDoubleTy()) {
2357 LiteralNum.push_back(FPVal & 0xFFFFFFFF);
2358 LiteralNum.push_back(FPVal >> 32);
David Neto22f144c2017-06-12 14:26:21 -04002359 } else {
2360 CFPTy->print(errs());
2361 llvm_unreachable("Implement this ConstantFP Type");
2362 }
2363
2364 Opcode = spv::OpConstant;
David Neto22f144c2017-06-12 14:26:21 -04002365
David Neto257c3892018-04-11 13:19:45 -04002366 Ops << MkFloat(LiteralNum);
David Neto22f144c2017-06-12 14:26:21 -04002367 } else if (isa<ConstantDataSequential>(Cst) &&
2368 cast<ConstantDataSequential>(Cst)->isString()) {
2369 Cst->print(errs());
2370 llvm_unreachable("Implement this Constant");
2371
2372 } else if (const ConstantDataSequential *CDS =
2373 dyn_cast<ConstantDataSequential>(Cst)) {
David Neto49351ac2017-08-26 17:32:20 -04002374 // Let's convert <4 x i8> constant to int constant specially.
2375 // This case occurs when all the values are specified as constant
2376 // ints.
2377 Type *CstTy = Cst->getType();
2378 if (is4xi8vec(CstTy)) {
2379 LLVMContext &Context = CstTy->getContext();
2380
2381 //
2382 // Generate OpConstant with OpTypeInt 32 0.
2383 //
Neil Henning39672102017-09-29 14:33:13 +01002384 uint32_t IntValue = 0;
2385 for (unsigned k = 0; k < 4; k++) {
2386 const uint64_t Val = CDS->getElementAsInteger(k);
David Neto49351ac2017-08-26 17:32:20 -04002387 IntValue = (IntValue << 8) | (Val & 0xffu);
2388 }
2389
2390 Type *i32 = Type::getInt32Ty(Context);
2391 Constant *CstInt = ConstantInt::get(i32, IntValue);
2392 // If this constant is already registered on VMap, use it.
2393 if (VMap.count(CstInt)) {
2394 uint32_t CstID = VMap[CstInt];
2395 VMap[Cst] = CstID;
2396 continue;
2397 }
2398
David Neto257c3892018-04-11 13:19:45 -04002399 Ops << MkNum(IntValue);
David Neto49351ac2017-08-26 17:32:20 -04002400
David Neto87846742018-04-11 17:36:22 -04002401 auto *CstInst = new SPIRVInstruction(spv::OpConstant, nextID++, Ops);
David Neto49351ac2017-08-26 17:32:20 -04002402 SPIRVInstList.push_back(CstInst);
2403
2404 continue;
2405 }
2406
2407 // A normal constant-data-sequential case.
David Neto22f144c2017-06-12 14:26:21 -04002408 for (unsigned k = 0; k < CDS->getNumElements(); k++) {
2409 Constant *EleCst = CDS->getElementAsConstant(k);
2410 uint32_t EleCstID = VMap[EleCst];
David Neto257c3892018-04-11 13:19:45 -04002411 Ops << MkId(EleCstID);
David Neto22f144c2017-06-12 14:26:21 -04002412 }
2413
2414 Opcode = spv::OpConstantComposite;
David Neto22f144c2017-06-12 14:26:21 -04002415 } else if (const ConstantAggregate *CA = dyn_cast<ConstantAggregate>(Cst)) {
2416 // Let's convert <4 x i8> constant to int constant specially.
David Neto49351ac2017-08-26 17:32:20 -04002417 // This case occurs when at least one of the values is an undef.
David Neto22f144c2017-06-12 14:26:21 -04002418 Type *CstTy = Cst->getType();
2419 if (is4xi8vec(CstTy)) {
2420 LLVMContext &Context = CstTy->getContext();
2421
2422 //
2423 // Generate OpConstant with OpTypeInt 32 0.
2424 //
Neil Henning39672102017-09-29 14:33:13 +01002425 uint32_t IntValue = 0;
David Neto22f144c2017-06-12 14:26:21 -04002426 for (User::const_op_iterator I = Cst->op_begin(), E = Cst->op_end();
2427 I != E; ++I) {
2428 uint64_t Val = 0;
alan-bakerb6b09dc2018-11-08 16:59:28 -05002429 const Value *CV = *I;
Neil Henning39672102017-09-29 14:33:13 +01002430 if (auto *CI2 = dyn_cast<ConstantInt>(CV)) {
2431 Val = CI2->getZExtValue();
David Neto22f144c2017-06-12 14:26:21 -04002432 }
David Neto49351ac2017-08-26 17:32:20 -04002433 IntValue = (IntValue << 8) | (Val & 0xffu);
David Neto22f144c2017-06-12 14:26:21 -04002434 }
2435
David Neto49351ac2017-08-26 17:32:20 -04002436 Type *i32 = Type::getInt32Ty(Context);
2437 Constant *CstInt = ConstantInt::get(i32, IntValue);
David Neto22f144c2017-06-12 14:26:21 -04002438 // If this constant is already registered on VMap, use it.
2439 if (VMap.count(CstInt)) {
2440 uint32_t CstID = VMap[CstInt];
2441 VMap[Cst] = CstID;
David Neto19a1bad2017-08-25 15:01:41 -04002442 continue;
David Neto22f144c2017-06-12 14:26:21 -04002443 }
2444
David Neto257c3892018-04-11 13:19:45 -04002445 Ops << MkNum(IntValue);
David Neto22f144c2017-06-12 14:26:21 -04002446
David Neto87846742018-04-11 17:36:22 -04002447 auto *CstInst = new SPIRVInstruction(spv::OpConstant, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002448 SPIRVInstList.push_back(CstInst);
2449
David Neto19a1bad2017-08-25 15:01:41 -04002450 continue;
David Neto22f144c2017-06-12 14:26:21 -04002451 }
2452
2453 // We use a constant composite in SPIR-V for our constant aggregate in
2454 // LLVM.
2455 Opcode = spv::OpConstantComposite;
David Neto22f144c2017-06-12 14:26:21 -04002456
2457 for (unsigned k = 0; k < CA->getNumOperands(); k++) {
2458 // Look up the ID of the element of this aggregate (which we will
2459 // previously have created a constant for).
2460 uint32_t ElementConstantID = VMap[CA->getAggregateElement(k)];
2461
2462 // And add an operand to the composite we are constructing
David Neto257c3892018-04-11 13:19:45 -04002463 Ops << MkId(ElementConstantID);
David Neto22f144c2017-06-12 14:26:21 -04002464 }
2465 } else if (Cst->isNullValue()) {
2466 Opcode = spv::OpConstantNull;
David Neto22f144c2017-06-12 14:26:21 -04002467 } else {
2468 Cst->print(errs());
2469 llvm_unreachable("Unsupported Constant???");
2470 }
2471
alan-baker5b86ed72019-02-15 08:26:50 -05002472 if (Opcode == spv::OpConstantNull && Cst->getType()->isPointerTy()) {
2473 // Null pointer requires variable pointers.
2474 setVariablePointersCapabilities(Cst->getType()->getPointerAddressSpace());
2475 }
2476
David Neto87846742018-04-11 17:36:22 -04002477 auto *CstInst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002478 SPIRVInstList.push_back(CstInst);
2479 }
2480}
2481
2482void SPIRVProducerPass::GenerateSamplers(Module &M) {
2483 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
David Neto22f144c2017-06-12 14:26:21 -04002484
alan-bakerb6b09dc2018-11-08 16:59:28 -05002485 auto &sampler_map = getSamplerMap();
alan-baker09cb9802019-12-10 13:16:27 -05002486 SamplerLiteralToIDMap.clear();
David Neto862b7d82018-06-14 18:48:37 -04002487 DenseMap<unsigned, unsigned> SamplerLiteralToDescriptorSetMap;
2488 DenseMap<unsigned, unsigned> SamplerLiteralToBindingMap;
David Neto22f144c2017-06-12 14:26:21 -04002489
David Neto862b7d82018-06-14 18:48:37 -04002490 // We might have samplers in the sampler map that are not used
2491 // in the translation unit. We need to allocate variables
2492 // for them and bindings too.
2493 DenseSet<unsigned> used_bindings;
David Neto22f144c2017-06-12 14:26:21 -04002494
Kévin Petitdf71de32019-04-09 14:09:50 +01002495 auto *var_fn = M.getFunction(clspv::LiteralSamplerFunction());
alan-baker09cb9802019-12-10 13:16:27 -05002496 // Return if there are no literal samplers.
alan-bakerb6b09dc2018-11-08 16:59:28 -05002497 if (!var_fn)
2498 return;
alan-baker09cb9802019-12-10 13:16:27 -05002499
David Neto862b7d82018-06-14 18:48:37 -04002500 for (auto user : var_fn->users()) {
2501 // Populate SamplerLiteralToDescriptorSetMap and
2502 // SamplerLiteralToBindingMap.
2503 //
2504 // Look for calls like
2505 // call %opencl.sampler_t addrspace(2)*
2506 // @clspv.sampler.var.literal(
2507 // i32 descriptor,
2508 // i32 binding,
alan-baker09cb9802019-12-10 13:16:27 -05002509 // i32 (index-into-sampler-map|sampler_mask))
alan-bakerb6b09dc2018-11-08 16:59:28 -05002510 if (auto *call = dyn_cast<CallInst>(user)) {
alan-baker09cb9802019-12-10 13:16:27 -05002511 const auto third_param = static_cast<unsigned>(
alan-bakerb6b09dc2018-11-08 16:59:28 -05002512 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
alan-baker09cb9802019-12-10 13:16:27 -05002513 auto sampler_value = third_param;
2514 if (clspv::Option::UseSamplerMap()) {
2515 if (third_param >= sampler_map.size()) {
2516 errs() << "Out of bounds index to sampler map: " << third_param;
2517 llvm_unreachable("bad sampler init: out of bounds");
2518 }
2519 sampler_value = sampler_map[third_param].first;
David Neto862b7d82018-06-14 18:48:37 -04002520 }
2521
David Neto862b7d82018-06-14 18:48:37 -04002522 const auto descriptor_set = static_cast<unsigned>(
2523 dyn_cast<ConstantInt>(call->getArgOperand(0))->getZExtValue());
2524 const auto binding = static_cast<unsigned>(
2525 dyn_cast<ConstantInt>(call->getArgOperand(1))->getZExtValue());
2526
2527 SamplerLiteralToDescriptorSetMap[sampler_value] = descriptor_set;
2528 SamplerLiteralToBindingMap[sampler_value] = binding;
2529 used_bindings.insert(binding);
2530 }
2531 }
2532
alan-baker09cb9802019-12-10 13:16:27 -05002533 DenseSet<size_t> seen;
2534 for (auto user : var_fn->users()) {
2535 if (!isa<CallInst>(user))
2536 continue;
2537
2538 auto call = cast<CallInst>(user);
2539 const unsigned third_param = static_cast<unsigned>(
2540 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
2541
2542 // Already allocated a variable for this value.
2543 if (!seen.insert(third_param).second)
2544 continue;
2545
2546 auto sampler_value = third_param;
2547 if (clspv::Option::UseSamplerMap()) {
2548 sampler_value = sampler_map[third_param].first;
2549 }
2550
David Neto22f144c2017-06-12 14:26:21 -04002551 // Generate OpVariable.
2552 //
2553 // GIDOps[0] : Result Type ID
2554 // GIDOps[1] : Storage Class
2555 SPIRVOperandList Ops;
2556
David Neto257c3892018-04-11 13:19:45 -04002557 Ops << MkId(lookupType(SamplerTy))
2558 << MkNum(spv::StorageClassUniformConstant);
David Neto22f144c2017-06-12 14:26:21 -04002559
David Neto862b7d82018-06-14 18:48:37 -04002560 auto sampler_var_id = nextID++;
2561 auto *Inst = new SPIRVInstruction(spv::OpVariable, sampler_var_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002562 SPIRVInstList.push_back(Inst);
2563
alan-baker09cb9802019-12-10 13:16:27 -05002564 SamplerLiteralToIDMap[sampler_value] = sampler_var_id;
David Neto22f144c2017-06-12 14:26:21 -04002565
2566 // Find Insert Point for OpDecorate.
2567 auto DecoInsertPoint =
2568 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
2569 [](SPIRVInstruction *Inst) -> bool {
2570 return Inst->getOpcode() != spv::OpDecorate &&
2571 Inst->getOpcode() != spv::OpMemberDecorate &&
2572 Inst->getOpcode() != spv::OpExtInstImport;
2573 });
2574
2575 // Ops[0] = Target ID
2576 // Ops[1] = Decoration (DescriptorSet)
2577 // Ops[2] = LiteralNumber according to Decoration
2578 Ops.clear();
2579
David Neto862b7d82018-06-14 18:48:37 -04002580 unsigned descriptor_set;
2581 unsigned binding;
alan-baker09cb9802019-12-10 13:16:27 -05002582 if (SamplerLiteralToBindingMap.find(sampler_value) ==
alan-bakerb6b09dc2018-11-08 16:59:28 -05002583 SamplerLiteralToBindingMap.end()) {
David Neto862b7d82018-06-14 18:48:37 -04002584 // This sampler is not actually used. Find the next one.
2585 for (binding = 0; used_bindings.count(binding); binding++)
2586 ;
2587 descriptor_set = 0; // Literal samplers always use descriptor set 0.
2588 used_bindings.insert(binding);
2589 } else {
alan-baker09cb9802019-12-10 13:16:27 -05002590 descriptor_set = SamplerLiteralToDescriptorSetMap[sampler_value];
2591 binding = SamplerLiteralToBindingMap[sampler_value];
alan-bakercff80152019-06-15 00:38:00 -04002592
alan-baker09cb9802019-12-10 13:16:27 -05002593 version0::DescriptorMapEntry::SamplerData sampler_data = {sampler_value};
alan-bakercff80152019-06-15 00:38:00 -04002594 descriptorMapEntries->emplace_back(std::move(sampler_data),
2595 descriptor_set, binding);
David Neto862b7d82018-06-14 18:48:37 -04002596 }
2597
2598 Ops << MkId(sampler_var_id) << MkNum(spv::DecorationDescriptorSet)
2599 << MkNum(descriptor_set);
David Neto22f144c2017-06-12 14:26:21 -04002600
David Neto87846742018-04-11 17:36:22 -04002601 auto *DescDecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002602 SPIRVInstList.insert(DecoInsertPoint, DescDecoInst);
2603
2604 // Ops[0] = Target ID
2605 // Ops[1] = Decoration (Binding)
2606 // Ops[2] = LiteralNumber according to Decoration
2607 Ops.clear();
David Neto862b7d82018-06-14 18:48:37 -04002608 Ops << MkId(sampler_var_id) << MkNum(spv::DecorationBinding)
2609 << MkNum(binding);
David Neto22f144c2017-06-12 14:26:21 -04002610
David Neto87846742018-04-11 17:36:22 -04002611 auto *BindDecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002612 SPIRVInstList.insert(DecoInsertPoint, BindDecoInst);
2613 }
David Neto862b7d82018-06-14 18:48:37 -04002614}
David Neto22f144c2017-06-12 14:26:21 -04002615
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01002616void SPIRVProducerPass::GenerateResourceVars(Module &) {
David Neto862b7d82018-06-14 18:48:37 -04002617 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
2618 ValueMapType &VMap = getValueMap();
David Neto22f144c2017-06-12 14:26:21 -04002619
David Neto862b7d82018-06-14 18:48:37 -04002620 // Generate variables. Make one for each of resource var info object.
2621 for (auto *info : ModuleOrderedResourceVars) {
2622 Type *type = info->var_fn->getReturnType();
2623 // Remap the address space for opaque types.
2624 switch (info->arg_kind) {
2625 case clspv::ArgKind::Sampler:
2626 case clspv::ArgKind::ReadOnlyImage:
2627 case clspv::ArgKind::WriteOnlyImage:
2628 type = PointerType::get(type->getPointerElementType(),
2629 clspv::AddressSpace::UniformConstant);
2630 break;
2631 default:
2632 break;
2633 }
David Neto22f144c2017-06-12 14:26:21 -04002634
David Neto862b7d82018-06-14 18:48:37 -04002635 info->var_id = nextID++;
David Neto22f144c2017-06-12 14:26:21 -04002636
David Neto862b7d82018-06-14 18:48:37 -04002637 const auto type_id = lookupType(type);
2638 const auto sc = GetStorageClassForArgKind(info->arg_kind);
2639 SPIRVOperandList Ops;
2640 Ops << MkId(type_id) << MkNum(sc);
David Neto22f144c2017-06-12 14:26:21 -04002641
David Neto862b7d82018-06-14 18:48:37 -04002642 auto *Inst = new SPIRVInstruction(spv::OpVariable, info->var_id, Ops);
2643 SPIRVInstList.push_back(Inst);
2644
2645 // Map calls to the variable-builtin-function.
2646 for (auto &U : info->var_fn->uses()) {
2647 if (auto *call = dyn_cast<CallInst>(U.getUser())) {
2648 const auto set = unsigned(
2649 dyn_cast<ConstantInt>(call->getOperand(0))->getZExtValue());
2650 const auto binding = unsigned(
2651 dyn_cast<ConstantInt>(call->getOperand(1))->getZExtValue());
2652 if (set == info->descriptor_set && binding == info->binding) {
2653 switch (info->arg_kind) {
2654 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04002655 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04002656 case clspv::ArgKind::Pod:
2657 // The call maps to the variable directly.
2658 VMap[call] = info->var_id;
2659 break;
2660 case clspv::ArgKind::Sampler:
2661 case clspv::ArgKind::ReadOnlyImage:
2662 case clspv::ArgKind::WriteOnlyImage:
2663 // The call maps to a load we generate later.
2664 ResourceVarDeferredLoadCalls[call] = info->var_id;
2665 break;
2666 default:
2667 llvm_unreachable("Unhandled arg kind");
2668 }
2669 }
David Neto22f144c2017-06-12 14:26:21 -04002670 }
David Neto862b7d82018-06-14 18:48:37 -04002671 }
2672 }
David Neto22f144c2017-06-12 14:26:21 -04002673
David Neto862b7d82018-06-14 18:48:37 -04002674 // Generate associated decorations.
David Neto22f144c2017-06-12 14:26:21 -04002675
David Neto862b7d82018-06-14 18:48:37 -04002676 // Find Insert Point for OpDecorate.
2677 auto DecoInsertPoint =
2678 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
2679 [](SPIRVInstruction *Inst) -> bool {
2680 return Inst->getOpcode() != spv::OpDecorate &&
2681 Inst->getOpcode() != spv::OpMemberDecorate &&
2682 Inst->getOpcode() != spv::OpExtInstImport;
2683 });
2684
2685 SPIRVOperandList Ops;
2686 for (auto *info : ModuleOrderedResourceVars) {
2687 // Decorate with DescriptorSet and Binding.
2688 Ops.clear();
2689 Ops << MkId(info->var_id) << MkNum(spv::DecorationDescriptorSet)
2690 << MkNum(info->descriptor_set);
2691 SPIRVInstList.insert(DecoInsertPoint,
2692 new SPIRVInstruction(spv::OpDecorate, Ops));
2693
2694 Ops.clear();
2695 Ops << MkId(info->var_id) << MkNum(spv::DecorationBinding)
2696 << MkNum(info->binding);
2697 SPIRVInstList.insert(DecoInsertPoint,
2698 new SPIRVInstruction(spv::OpDecorate, Ops));
2699
alan-bakere9308012019-03-15 10:25:13 -04002700 if (info->coherent) {
2701 // Decorate with Coherent if required for the variable.
2702 Ops.clear();
2703 Ops << MkId(info->var_id) << MkNum(spv::DecorationCoherent);
2704 SPIRVInstList.insert(DecoInsertPoint,
2705 new SPIRVInstruction(spv::OpDecorate, Ops));
2706 }
2707
David Neto862b7d82018-06-14 18:48:37 -04002708 // Generate NonWritable and NonReadable
2709 switch (info->arg_kind) {
2710 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04002711 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04002712 if (info->var_fn->getReturnType()->getPointerAddressSpace() ==
2713 clspv::AddressSpace::Constant) {
2714 Ops.clear();
2715 Ops << MkId(info->var_id) << MkNum(spv::DecorationNonWritable);
2716 SPIRVInstList.insert(DecoInsertPoint,
2717 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002718 }
David Neto862b7d82018-06-14 18:48:37 -04002719 break;
David Neto862b7d82018-06-14 18:48:37 -04002720 case clspv::ArgKind::WriteOnlyImage:
2721 Ops.clear();
2722 Ops << MkId(info->var_id) << MkNum(spv::DecorationNonReadable);
2723 SPIRVInstList.insert(DecoInsertPoint,
2724 new SPIRVInstruction(spv::OpDecorate, Ops));
2725 break;
2726 default:
2727 break;
David Neto22f144c2017-06-12 14:26:21 -04002728 }
2729 }
2730}
2731
2732void SPIRVProducerPass::GenerateGlobalVar(GlobalVariable &GV) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05002733 Module &M = *GV.getParent();
David Neto22f144c2017-06-12 14:26:21 -04002734 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
2735 ValueMapType &VMap = getValueMap();
2736 std::vector<uint32_t> &BuiltinDimVec = getBuiltinDimVec();
David Neto85082642018-03-24 06:55:20 -07002737 const DataLayout &DL = GV.getParent()->getDataLayout();
David Neto22f144c2017-06-12 14:26:21 -04002738
2739 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
2740 Type *Ty = GV.getType();
2741 PointerType *PTy = cast<PointerType>(Ty);
2742
2743 uint32_t InitializerID = 0;
2744
2745 // Workgroup size is handled differently (it goes into a constant)
2746 if (spv::BuiltInWorkgroupSize == BuiltinType) {
2747 std::vector<bool> HasMDVec;
2748 uint32_t PrevXDimCst = 0xFFFFFFFF;
2749 uint32_t PrevYDimCst = 0xFFFFFFFF;
2750 uint32_t PrevZDimCst = 0xFFFFFFFF;
2751 for (Function &Func : *GV.getParent()) {
2752 if (Func.isDeclaration()) {
2753 continue;
2754 }
2755
2756 // We only need to check kernels.
2757 if (Func.getCallingConv() != CallingConv::SPIR_KERNEL) {
2758 continue;
2759 }
2760
2761 if (const MDNode *MD =
2762 dyn_cast<Function>(&Func)->getMetadata("reqd_work_group_size")) {
2763 uint32_t CurXDimCst = static_cast<uint32_t>(
2764 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
2765 uint32_t CurYDimCst = static_cast<uint32_t>(
2766 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
2767 uint32_t CurZDimCst = static_cast<uint32_t>(
2768 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
2769
2770 if (PrevXDimCst == 0xFFFFFFFF && PrevYDimCst == 0xFFFFFFFF &&
2771 PrevZDimCst == 0xFFFFFFFF) {
2772 PrevXDimCst = CurXDimCst;
2773 PrevYDimCst = CurYDimCst;
2774 PrevZDimCst = CurZDimCst;
2775 } else if (CurXDimCst != PrevXDimCst || CurYDimCst != PrevYDimCst ||
2776 CurZDimCst != PrevZDimCst) {
2777 llvm_unreachable(
2778 "reqd_work_group_size must be the same across all kernels");
2779 } else {
2780 continue;
2781 }
2782
2783 //
2784 // Generate OpConstantComposite.
2785 //
2786 // Ops[0] : Result Type ID
2787 // Ops[1] : Constant size for x dimension.
2788 // Ops[2] : Constant size for y dimension.
2789 // Ops[3] : Constant size for z dimension.
2790 SPIRVOperandList Ops;
2791
2792 uint32_t XDimCstID =
2793 VMap[mdconst::extract<ConstantInt>(MD->getOperand(0))];
2794 uint32_t YDimCstID =
2795 VMap[mdconst::extract<ConstantInt>(MD->getOperand(1))];
2796 uint32_t ZDimCstID =
2797 VMap[mdconst::extract<ConstantInt>(MD->getOperand(2))];
2798
2799 InitializerID = nextID;
2800
David Neto257c3892018-04-11 13:19:45 -04002801 Ops << MkId(lookupType(Ty->getPointerElementType())) << MkId(XDimCstID)
2802 << MkId(YDimCstID) << MkId(ZDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04002803
David Neto87846742018-04-11 17:36:22 -04002804 auto *Inst =
2805 new SPIRVInstruction(spv::OpConstantComposite, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002806 SPIRVInstList.push_back(Inst);
2807
2808 HasMDVec.push_back(true);
2809 } else {
2810 HasMDVec.push_back(false);
2811 }
2812 }
2813
2814 // Check all kernels have same definitions for work_group_size.
2815 bool HasMD = false;
2816 if (!HasMDVec.empty()) {
2817 HasMD = HasMDVec[0];
2818 for (uint32_t i = 1; i < HasMDVec.size(); i++) {
2819 if (HasMD != HasMDVec[i]) {
2820 llvm_unreachable(
2821 "Kernels should have consistent work group size definition");
2822 }
2823 }
2824 }
2825
2826 // If all kernels do not have metadata for reqd_work_group_size, generate
2827 // OpSpecConstants for x/y/z dimension.
2828 if (!HasMD) {
2829 //
2830 // Generate OpSpecConstants for x/y/z dimension.
2831 //
2832 // Ops[0] : Result Type ID
2833 // Ops[1] : Constant size for x/y/z dimension (Literal Number).
2834 uint32_t XDimCstID = 0;
2835 uint32_t YDimCstID = 0;
2836 uint32_t ZDimCstID = 0;
2837
David Neto22f144c2017-06-12 14:26:21 -04002838 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04002839 uint32_t result_type_id =
2840 lookupType(Ty->getPointerElementType()->getSequentialElementType());
David Neto22f144c2017-06-12 14:26:21 -04002841
David Neto257c3892018-04-11 13:19:45 -04002842 // X Dimension
2843 Ops << MkId(result_type_id) << MkNum(1);
2844 XDimCstID = nextID++;
2845 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002846 new SPIRVInstruction(spv::OpSpecConstant, XDimCstID, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002847
2848 // Y Dimension
2849 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04002850 Ops << MkId(result_type_id) << MkNum(1);
2851 YDimCstID = nextID++;
2852 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002853 new SPIRVInstruction(spv::OpSpecConstant, YDimCstID, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002854
2855 // Z Dimension
2856 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04002857 Ops << MkId(result_type_id) << MkNum(1);
2858 ZDimCstID = nextID++;
2859 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002860 new SPIRVInstruction(spv::OpSpecConstant, ZDimCstID, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002861
David Neto257c3892018-04-11 13:19:45 -04002862 BuiltinDimVec.push_back(XDimCstID);
2863 BuiltinDimVec.push_back(YDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04002864 BuiltinDimVec.push_back(ZDimCstID);
2865
David Neto22f144c2017-06-12 14:26:21 -04002866 //
2867 // Generate OpSpecConstantComposite.
2868 //
2869 // Ops[0] : Result Type ID
2870 // Ops[1] : Constant size for x dimension.
2871 // Ops[2] : Constant size for y dimension.
2872 // Ops[3] : Constant size for z dimension.
2873 InitializerID = nextID;
2874
2875 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04002876 Ops << MkId(lookupType(Ty->getPointerElementType())) << MkId(XDimCstID)
2877 << MkId(YDimCstID) << MkId(ZDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04002878
David Neto87846742018-04-11 17:36:22 -04002879 auto *Inst =
2880 new SPIRVInstruction(spv::OpSpecConstantComposite, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002881 SPIRVInstList.push_back(Inst);
2882 }
2883 }
2884
David Neto22f144c2017-06-12 14:26:21 -04002885 VMap[&GV] = nextID;
2886
2887 //
2888 // Generate OpVariable.
2889 //
2890 // GIDOps[0] : Result Type ID
2891 // GIDOps[1] : Storage Class
2892 SPIRVOperandList Ops;
2893
David Neto85082642018-03-24 06:55:20 -07002894 const auto AS = PTy->getAddressSpace();
David Netoc6f3ab22018-04-06 18:02:31 -04002895 Ops << MkId(lookupType(Ty)) << MkNum(GetStorageClass(AS));
David Neto22f144c2017-06-12 14:26:21 -04002896
David Neto85082642018-03-24 06:55:20 -07002897 const bool module_scope_constant_external_init =
David Neto862b7d82018-06-14 18:48:37 -04002898 (AS == AddressSpace::Constant) && GV.hasInitializer() &&
David Neto85082642018-03-24 06:55:20 -07002899 clspv::Option::ModuleConstantsInStorageBuffer();
2900
Kévin Petit23d5f182019-08-13 16:21:29 +01002901 if (GV.hasInitializer()) {
2902 auto GVInit = GV.getInitializer();
2903 if (!isa<UndefValue>(GVInit) && !module_scope_constant_external_init) {
2904 assert(VMap.count(GVInit) == 1);
2905 InitializerID = VMap[GVInit];
David Neto85082642018-03-24 06:55:20 -07002906 }
2907 }
Kévin Petit23d5f182019-08-13 16:21:29 +01002908
2909 if (0 != InitializerID) {
2910 // Emit the ID of the intiializer as part of the variable definition.
2911 Ops << MkId(InitializerID);
2912 }
David Neto85082642018-03-24 06:55:20 -07002913 const uint32_t var_id = nextID++;
2914
David Neto87846742018-04-11 17:36:22 -04002915 auto *Inst = new SPIRVInstruction(spv::OpVariable, var_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002916 SPIRVInstList.push_back(Inst);
2917
2918 // If we have a builtin.
2919 if (spv::BuiltInMax != BuiltinType) {
2920 // Find Insert Point for OpDecorate.
2921 auto DecoInsertPoint =
2922 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
2923 [](SPIRVInstruction *Inst) -> bool {
2924 return Inst->getOpcode() != spv::OpDecorate &&
2925 Inst->getOpcode() != spv::OpMemberDecorate &&
2926 Inst->getOpcode() != spv::OpExtInstImport;
2927 });
2928 //
2929 // Generate OpDecorate.
2930 //
2931 // DOps[0] = Target ID
2932 // DOps[1] = Decoration (Builtin)
2933 // DOps[2] = BuiltIn ID
2934 uint32_t ResultID;
2935
2936 // WorkgroupSize is different, we decorate the constant composite that has
2937 // its value, rather than the variable that we use to access the value.
2938 if (spv::BuiltInWorkgroupSize == BuiltinType) {
2939 ResultID = InitializerID;
David Netoa60b00b2017-09-15 16:34:09 -04002940 // Save both the value and variable IDs for later.
2941 WorkgroupSizeValueID = InitializerID;
2942 WorkgroupSizeVarID = VMap[&GV];
David Neto22f144c2017-06-12 14:26:21 -04002943 } else {
2944 ResultID = VMap[&GV];
2945 }
2946
2947 SPIRVOperandList DOps;
David Neto257c3892018-04-11 13:19:45 -04002948 DOps << MkId(ResultID) << MkNum(spv::DecorationBuiltIn)
2949 << MkNum(BuiltinType);
David Neto22f144c2017-06-12 14:26:21 -04002950
David Neto87846742018-04-11 17:36:22 -04002951 auto *DescDecoInst = new SPIRVInstruction(spv::OpDecorate, DOps);
David Neto22f144c2017-06-12 14:26:21 -04002952 SPIRVInstList.insert(DecoInsertPoint, DescDecoInst);
David Neto85082642018-03-24 06:55:20 -07002953 } else if (module_scope_constant_external_init) {
2954 // This module scope constant is initialized from a storage buffer with data
2955 // provided by the host at binding 0 of the next descriptor set.
David Neto78383442018-06-15 20:31:56 -04002956 const uint32_t descriptor_set = TakeDescriptorIndex(&M);
David Neto85082642018-03-24 06:55:20 -07002957
David Neto862b7d82018-06-14 18:48:37 -04002958 // Emit the intializer to the descriptor map file.
David Neto85082642018-03-24 06:55:20 -07002959 // Use "kind,buffer" to indicate storage buffer. We might want to expand
2960 // that later to other types, like uniform buffer.
alan-bakerf5e5f692018-11-27 08:33:24 -05002961 std::string hexbytes;
2962 llvm::raw_string_ostream str(hexbytes);
2963 clspv::ConstantEmitter(DL, str).Emit(GV.getInitializer());
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04002964 version0::DescriptorMapEntry::ConstantData constant_data = {ArgKind::Buffer,
2965 str.str()};
2966 descriptorMapEntries->emplace_back(std::move(constant_data), descriptor_set,
2967 0);
David Neto85082642018-03-24 06:55:20 -07002968
2969 // Find Insert Point for OpDecorate.
2970 auto DecoInsertPoint =
2971 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
2972 [](SPIRVInstruction *Inst) -> bool {
2973 return Inst->getOpcode() != spv::OpDecorate &&
2974 Inst->getOpcode() != spv::OpMemberDecorate &&
2975 Inst->getOpcode() != spv::OpExtInstImport;
2976 });
2977
David Neto257c3892018-04-11 13:19:45 -04002978 // OpDecorate %var Binding <binding>
David Neto85082642018-03-24 06:55:20 -07002979 SPIRVOperandList DOps;
David Neto257c3892018-04-11 13:19:45 -04002980 DOps << MkId(var_id) << MkNum(spv::DecorationBinding) << MkNum(0);
2981 DecoInsertPoint = SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04002982 DecoInsertPoint, new SPIRVInstruction(spv::OpDecorate, DOps));
David Neto85082642018-03-24 06:55:20 -07002983
2984 // OpDecorate %var DescriptorSet <descriptor_set>
2985 DOps.clear();
David Neto257c3892018-04-11 13:19:45 -04002986 DOps << MkId(var_id) << MkNum(spv::DecorationDescriptorSet)
2987 << MkNum(descriptor_set);
David Netoc6f3ab22018-04-06 18:02:31 -04002988 SPIRVInstList.insert(DecoInsertPoint,
David Neto87846742018-04-11 17:36:22 -04002989 new SPIRVInstruction(spv::OpDecorate, DOps));
David Neto22f144c2017-06-12 14:26:21 -04002990 }
2991}
2992
David Netoc6f3ab22018-04-06 18:02:31 -04002993void SPIRVProducerPass::GenerateWorkgroupVars() {
2994 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
Alan Baker202c8c72018-08-13 13:47:44 -04002995 for (auto spec_id = clspv::FirstLocalSpecId(); spec_id < max_local_spec_id_;
2996 ++spec_id) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05002997 LocalArgInfo &info = LocalSpecIdInfoMap[spec_id];
David Netoc6f3ab22018-04-06 18:02:31 -04002998
2999 // Generate OpVariable.
3000 //
3001 // GIDOps[0] : Result Type ID
3002 // GIDOps[1] : Storage Class
3003 SPIRVOperandList Ops;
3004 Ops << MkId(info.ptr_array_type_id) << MkNum(spv::StorageClassWorkgroup);
3005
3006 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04003007 new SPIRVInstruction(spv::OpVariable, info.variable_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04003008 }
3009}
3010
David Neto862b7d82018-06-14 18:48:37 -04003011void SPIRVProducerPass::GenerateDescriptorMapInfo(const DataLayout &DL,
3012 Function &F) {
David Netoc5fb5242018-07-30 13:28:31 -04003013 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
3014 return;
3015 }
David Neto862b7d82018-06-14 18:48:37 -04003016 // Gather the list of resources that are used by this function's arguments.
3017 auto &resource_var_at_index = FunctionToResourceVarsMap[&F];
3018
alan-bakerf5e5f692018-11-27 08:33:24 -05003019 // TODO(alan-baker): This should become unnecessary by fixing the rest of the
3020 // flow to generate pod_ubo arguments earlier.
David Neto862b7d82018-06-14 18:48:37 -04003021 auto remap_arg_kind = [](StringRef argKind) {
alan-bakerf5e5f692018-11-27 08:33:24 -05003022 std::string kind =
3023 clspv::Option::PodArgsInUniformBuffer() && argKind.equals("pod")
3024 ? "pod_ubo"
3025 : argKind;
3026 return GetArgKindFromName(kind);
David Neto862b7d82018-06-14 18:48:37 -04003027 };
3028
3029 auto *fty = F.getType()->getPointerElementType();
3030 auto *func_ty = dyn_cast<FunctionType>(fty);
3031
alan-baker038e9242019-04-19 22:14:41 -04003032 // If we've clustered POD arguments, then argument details are in metadata.
David Neto862b7d82018-06-14 18:48:37 -04003033 // If an argument maps to a resource variable, then get descriptor set and
3034 // binding from the resoure variable. Other info comes from the metadata.
3035 const auto *arg_map = F.getMetadata("kernel_arg_map");
3036 if (arg_map) {
3037 for (const auto &arg : arg_map->operands()) {
3038 const MDNode *arg_node = dyn_cast<MDNode>(arg.get());
Kévin PETITa353c832018-03-20 23:21:21 +00003039 assert(arg_node->getNumOperands() == 7);
David Neto862b7d82018-06-14 18:48:37 -04003040 const auto name =
3041 dyn_cast<MDString>(arg_node->getOperand(0))->getString();
3042 const auto old_index =
3043 dyn_extract<ConstantInt>(arg_node->getOperand(1))->getZExtValue();
3044 // Remapped argument index
alan-bakerb6b09dc2018-11-08 16:59:28 -05003045 const size_t new_index = static_cast<size_t>(
3046 dyn_extract<ConstantInt>(arg_node->getOperand(2))->getZExtValue());
David Neto862b7d82018-06-14 18:48:37 -04003047 const auto offset =
3048 dyn_extract<ConstantInt>(arg_node->getOperand(3))->getZExtValue();
Kévin PETITa353c832018-03-20 23:21:21 +00003049 const auto arg_size =
3050 dyn_extract<ConstantInt>(arg_node->getOperand(4))->getZExtValue();
David Neto862b7d82018-06-14 18:48:37 -04003051 const auto argKind = remap_arg_kind(
Kévin PETITa353c832018-03-20 23:21:21 +00003052 dyn_cast<MDString>(arg_node->getOperand(5))->getString());
David Neto862b7d82018-06-14 18:48:37 -04003053 const auto spec_id =
Kévin PETITa353c832018-03-20 23:21:21 +00003054 dyn_extract<ConstantInt>(arg_node->getOperand(6))->getSExtValue();
alan-bakerf5e5f692018-11-27 08:33:24 -05003055
3056 uint32_t descriptor_set = 0;
3057 uint32_t binding = 0;
3058 version0::DescriptorMapEntry::KernelArgData kernel_data = {
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003059 F.getName(), name, static_cast<uint32_t>(old_index), argKind,
alan-bakerf5e5f692018-11-27 08:33:24 -05003060 static_cast<uint32_t>(spec_id),
3061 // This will be set below for pointer-to-local args.
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003062 0, static_cast<uint32_t>(offset), static_cast<uint32_t>(arg_size)};
David Neto862b7d82018-06-14 18:48:37 -04003063 if (spec_id > 0) {
alan-bakerf5e5f692018-11-27 08:33:24 -05003064 kernel_data.local_element_size = static_cast<uint32_t>(GetTypeAllocSize(
3065 func_ty->getParamType(unsigned(new_index))->getPointerElementType(),
3066 DL));
David Neto862b7d82018-06-14 18:48:37 -04003067 } else {
3068 auto *info = resource_var_at_index[new_index];
3069 assert(info);
alan-bakerf5e5f692018-11-27 08:33:24 -05003070 descriptor_set = info->descriptor_set;
3071 binding = info->binding;
David Neto862b7d82018-06-14 18:48:37 -04003072 }
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003073 descriptorMapEntries->emplace_back(std::move(kernel_data), descriptor_set,
3074 binding);
David Neto862b7d82018-06-14 18:48:37 -04003075 }
3076 } else {
3077 // There is no argument map.
3078 // Take descriptor info from the resource variable calls.
Kévin PETITa353c832018-03-20 23:21:21 +00003079 // Take argument name and size from the arguments list.
David Neto862b7d82018-06-14 18:48:37 -04003080
3081 SmallVector<Argument *, 4> arguments;
3082 for (auto &arg : F.args()) {
3083 arguments.push_back(&arg);
3084 }
3085
3086 unsigned arg_index = 0;
3087 for (auto *info : resource_var_at_index) {
3088 if (info) {
Kévin PETITa353c832018-03-20 23:21:21 +00003089 auto arg = arguments[arg_index];
alan-bakerb6b09dc2018-11-08 16:59:28 -05003090 unsigned arg_size = 0;
Kévin PETITa353c832018-03-20 23:21:21 +00003091 if (info->arg_kind == clspv::ArgKind::Pod) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05003092 arg_size = static_cast<uint32_t>(DL.getTypeStoreSize(arg->getType()));
Kévin PETITa353c832018-03-20 23:21:21 +00003093 }
3094
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003095 // Local pointer arguments are unused in this case. Offset is always
3096 // zero.
alan-bakerf5e5f692018-11-27 08:33:24 -05003097 version0::DescriptorMapEntry::KernelArgData kernel_data = {
3098 F.getName(), arg->getName(),
3099 arg_index, remap_arg_kind(clspv::GetArgKindName(info->arg_kind)),
3100 0, 0,
3101 0, arg_size};
3102 descriptorMapEntries->emplace_back(std::move(kernel_data),
3103 info->descriptor_set, info->binding);
David Neto862b7d82018-06-14 18:48:37 -04003104 }
3105 arg_index++;
3106 }
3107 // Generate mappings for pointer-to-local arguments.
3108 for (arg_index = 0; arg_index < arguments.size(); ++arg_index) {
3109 Argument *arg = arguments[arg_index];
Alan Baker202c8c72018-08-13 13:47:44 -04003110 auto where = LocalArgSpecIds.find(arg);
3111 if (where != LocalArgSpecIds.end()) {
3112 auto &local_arg_info = LocalSpecIdInfoMap[where->second];
alan-bakerf5e5f692018-11-27 08:33:24 -05003113 // Pod arguments members are unused in this case.
3114 version0::DescriptorMapEntry::KernelArgData kernel_data = {
3115 F.getName(),
3116 arg->getName(),
3117 arg_index,
3118 ArgKind::Local,
3119 static_cast<uint32_t>(local_arg_info.spec_id),
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003120 static_cast<uint32_t>(
3121 GetTypeAllocSize(local_arg_info.elem_type, DL)),
alan-bakerf5e5f692018-11-27 08:33:24 -05003122 0,
3123 0};
3124 // Pointer-to-local arguments do not utilize descriptor set and binding.
3125 descriptorMapEntries->emplace_back(std::move(kernel_data), 0, 0);
David Neto862b7d82018-06-14 18:48:37 -04003126 }
3127 }
3128 }
3129}
3130
David Neto22f144c2017-06-12 14:26:21 -04003131void SPIRVProducerPass::GenerateFuncPrologue(Function &F) {
3132 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3133 ValueMapType &VMap = getValueMap();
3134 EntryPointVecType &EntryPoints = getEntryPointVec();
David Neto22f144c2017-06-12 14:26:21 -04003135 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
3136 auto &GlobalConstArgSet = getGlobalConstArgSet();
3137
3138 FunctionType *FTy = F.getFunctionType();
3139
3140 //
David Neto22f144c2017-06-12 14:26:21 -04003141 // Generate OPFunction.
3142 //
3143
3144 // FOps[0] : Result Type ID
3145 // FOps[1] : Function Control
3146 // FOps[2] : Function Type ID
3147 SPIRVOperandList FOps;
3148
3149 // Find SPIRV instruction for return type.
David Neto257c3892018-04-11 13:19:45 -04003150 FOps << MkId(lookupType(FTy->getReturnType()));
David Neto22f144c2017-06-12 14:26:21 -04003151
3152 // Check function attributes for SPIRV Function Control.
3153 uint32_t FuncControl = spv::FunctionControlMaskNone;
3154 if (F.hasFnAttribute(Attribute::AlwaysInline)) {
3155 FuncControl |= spv::FunctionControlInlineMask;
3156 }
3157 if (F.hasFnAttribute(Attribute::NoInline)) {
3158 FuncControl |= spv::FunctionControlDontInlineMask;
3159 }
3160 // TODO: Check llvm attribute for Function Control Pure.
3161 if (F.hasFnAttribute(Attribute::ReadOnly)) {
3162 FuncControl |= spv::FunctionControlPureMask;
3163 }
3164 // TODO: Check llvm attribute for Function Control Const.
3165 if (F.hasFnAttribute(Attribute::ReadNone)) {
3166 FuncControl |= spv::FunctionControlConstMask;
3167 }
3168
David Neto257c3892018-04-11 13:19:45 -04003169 FOps << MkNum(FuncControl);
David Neto22f144c2017-06-12 14:26:21 -04003170
3171 uint32_t FTyID;
3172 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
3173 SmallVector<Type *, 4> NewFuncParamTys;
3174 FunctionType *NewFTy =
3175 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
3176 FTyID = lookupType(NewFTy);
3177 } else {
David Neto9ed8e2f2018-03-24 06:47:24 -07003178 // Handle regular function with global constant parameters.
David Neto22f144c2017-06-12 14:26:21 -04003179 if (GlobalConstFuncTyMap.count(FTy)) {
3180 FTyID = lookupType(GlobalConstFuncTyMap[FTy].first);
3181 } else {
3182 FTyID = lookupType(FTy);
3183 }
3184 }
3185
David Neto257c3892018-04-11 13:19:45 -04003186 FOps << MkId(FTyID);
David Neto22f144c2017-06-12 14:26:21 -04003187
3188 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
3189 EntryPoints.push_back(std::make_pair(&F, nextID));
3190 }
3191
3192 VMap[&F] = nextID;
3193
David Neto482550a2018-03-24 05:21:07 -07003194 if (clspv::Option::ShowIDs()) {
David Netob05675d2018-02-16 12:37:49 -05003195 errs() << "Function " << F.getName() << " is " << nextID << "\n";
3196 }
David Neto22f144c2017-06-12 14:26:21 -04003197 // Generate SPIRV instruction for function.
David Neto87846742018-04-11 17:36:22 -04003198 auto *FuncInst = new SPIRVInstruction(spv::OpFunction, nextID++, FOps);
David Neto22f144c2017-06-12 14:26:21 -04003199 SPIRVInstList.push_back(FuncInst);
3200
3201 //
3202 // Generate OpFunctionParameter for Normal function.
3203 //
3204
3205 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
alan-bakere9308012019-03-15 10:25:13 -04003206
3207 // Find Insert Point for OpDecorate.
3208 auto DecoInsertPoint =
3209 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
3210 [](SPIRVInstruction *Inst) -> bool {
3211 return Inst->getOpcode() != spv::OpDecorate &&
3212 Inst->getOpcode() != spv::OpMemberDecorate &&
3213 Inst->getOpcode() != spv::OpExtInstImport;
3214 });
3215
David Neto22f144c2017-06-12 14:26:21 -04003216 // Iterate Argument for name instead of param type from function type.
3217 unsigned ArgIdx = 0;
3218 for (Argument &Arg : F.args()) {
alan-bakere9308012019-03-15 10:25:13 -04003219 uint32_t param_id = nextID++;
3220 VMap[&Arg] = param_id;
3221
3222 if (CalledWithCoherentResource(Arg)) {
3223 // If the arg is passed a coherent resource ever, then decorate this
3224 // parameter with Coherent too.
3225 SPIRVOperandList decoration_ops;
3226 decoration_ops << MkId(param_id) << MkNum(spv::DecorationCoherent);
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003227 SPIRVInstList.insert(
3228 DecoInsertPoint,
3229 new SPIRVInstruction(spv::OpDecorate, decoration_ops));
alan-bakere9308012019-03-15 10:25:13 -04003230 }
David Neto22f144c2017-06-12 14:26:21 -04003231
3232 // ParamOps[0] : Result Type ID
3233 SPIRVOperandList ParamOps;
3234
3235 // Find SPIRV instruction for parameter type.
3236 uint32_t ParamTyID = lookupType(Arg.getType());
3237 if (PointerType *PTy = dyn_cast<PointerType>(Arg.getType())) {
3238 if (GlobalConstFuncTyMap.count(FTy)) {
3239 if (ArgIdx == GlobalConstFuncTyMap[FTy].second) {
3240 Type *EleTy = PTy->getPointerElementType();
3241 Type *ArgTy =
3242 PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
3243 ParamTyID = lookupType(ArgTy);
3244 GlobalConstArgSet.insert(&Arg);
3245 }
3246 }
3247 }
David Neto257c3892018-04-11 13:19:45 -04003248 ParamOps << MkId(ParamTyID);
David Neto22f144c2017-06-12 14:26:21 -04003249
3250 // Generate SPIRV instruction for parameter.
David Neto87846742018-04-11 17:36:22 -04003251 auto *ParamInst =
alan-bakere9308012019-03-15 10:25:13 -04003252 new SPIRVInstruction(spv::OpFunctionParameter, param_id, ParamOps);
David Neto22f144c2017-06-12 14:26:21 -04003253 SPIRVInstList.push_back(ParamInst);
3254
3255 ArgIdx++;
3256 }
3257 }
3258}
3259
alan-bakerb6b09dc2018-11-08 16:59:28 -05003260void SPIRVProducerPass::GenerateModuleInfo(Module &module) {
David Neto22f144c2017-06-12 14:26:21 -04003261 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3262 EntryPointVecType &EntryPoints = getEntryPointVec();
3263 ValueMapType &VMap = getValueMap();
3264 ValueList &EntryPointInterfaces = getEntryPointInterfacesVec();
3265 uint32_t &ExtInstImportID = getOpExtInstImportID();
3266 std::vector<uint32_t> &BuiltinDimVec = getBuiltinDimVec();
3267
3268 // Set up insert point.
3269 auto InsertPoint = SPIRVInstList.begin();
3270
3271 //
3272 // Generate OpCapability
3273 //
3274 // TODO: Which llvm information is mapped to SPIRV Capapbility?
3275
3276 // Ops[0] = Capability
3277 SPIRVOperandList Ops;
3278
David Neto87846742018-04-11 17:36:22 -04003279 auto *CapInst =
3280 new SPIRVInstruction(spv::OpCapability, {MkNum(spv::CapabilityShader)});
David Neto22f144c2017-06-12 14:26:21 -04003281 SPIRVInstList.insert(InsertPoint, CapInst);
3282
alan-bakerf906d2b2019-12-10 11:26:23 -05003283 bool write_without_format = false;
3284 bool sampled_1d = false;
3285 bool image_1d = false;
David Neto22f144c2017-06-12 14:26:21 -04003286 for (Type *Ty : getTypeList()) {
alan-bakerb39c8262019-03-08 14:03:37 -05003287 if (clspv::Option::Int8Support() && Ty->isIntegerTy(8)) {
3288 // Generate OpCapability for i8 type.
3289 SPIRVInstList.insert(InsertPoint,
3290 new SPIRVInstruction(spv::OpCapability,
3291 {MkNum(spv::CapabilityInt8)}));
3292 } else if (Ty->isIntegerTy(16)) {
David Neto22f144c2017-06-12 14:26:21 -04003293 // Generate OpCapability for i16 type.
David Neto87846742018-04-11 17:36:22 -04003294 SPIRVInstList.insert(InsertPoint,
3295 new SPIRVInstruction(spv::OpCapability,
3296 {MkNum(spv::CapabilityInt16)}));
David Neto22f144c2017-06-12 14:26:21 -04003297 } else if (Ty->isIntegerTy(64)) {
3298 // Generate OpCapability for i64 type.
David Neto87846742018-04-11 17:36:22 -04003299 SPIRVInstList.insert(InsertPoint,
3300 new SPIRVInstruction(spv::OpCapability,
3301 {MkNum(spv::CapabilityInt64)}));
David Neto22f144c2017-06-12 14:26:21 -04003302 } else if (Ty->isHalfTy()) {
3303 // Generate OpCapability for half type.
3304 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04003305 InsertPoint, new SPIRVInstruction(spv::OpCapability,
3306 {MkNum(spv::CapabilityFloat16)}));
David Neto22f144c2017-06-12 14:26:21 -04003307 } else if (Ty->isDoubleTy()) {
3308 // Generate OpCapability for double type.
3309 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04003310 InsertPoint, new SPIRVInstruction(spv::OpCapability,
3311 {MkNum(spv::CapabilityFloat64)}));
David Neto22f144c2017-06-12 14:26:21 -04003312 } else if (auto *STy = dyn_cast<StructType>(Ty)) {
3313 if (STy->isOpaque()) {
alan-bakerf906d2b2019-12-10 11:26:23 -05003314 if (STy->getName().startswith("opencl.image1d_wo_t") ||
3315 STy->getName().startswith("opencl.image2d_wo_t") ||
alan-bakerf67468c2019-11-25 15:51:49 -05003316 STy->getName().startswith("opencl.image3d_wo_t")) {
alan-bakerf906d2b2019-12-10 11:26:23 -05003317 write_without_format = true;
3318 }
3319 if (STy->getName().startswith("opencl.image1d_ro_t") ||
3320 STy->getName().startswith("opencl.image1d_wo_t")) {
3321 if (STy->getName().contains(".sampled"))
3322 sampled_1d = true;
3323 else
3324 image_1d = true;
David Neto22f144c2017-06-12 14:26:21 -04003325 }
3326 }
3327 }
3328 }
3329
alan-bakerf906d2b2019-12-10 11:26:23 -05003330 if (write_without_format) {
3331 // Generate OpCapability for write only image type.
3332 SPIRVInstList.insert(
3333 InsertPoint,
3334 new SPIRVInstruction(
3335 spv::OpCapability,
3336 {MkNum(spv::CapabilityStorageImageWriteWithoutFormat)}));
3337 }
3338 if (image_1d) {
3339 // Generate OpCapability for unsampled 1D image type.
3340 SPIRVInstList.insert(InsertPoint,
3341 new SPIRVInstruction(spv::OpCapability,
3342 {MkNum(spv::CapabilityImage1D)}));
3343 } else if (sampled_1d) {
3344 // Generate OpCapability for sampled 1D image type.
3345 SPIRVInstList.insert(
3346 InsertPoint, new SPIRVInstruction(spv::OpCapability,
3347 {MkNum(spv::CapabilitySampled1D)}));
3348 }
3349
David Neto5c22a252018-03-15 16:07:41 -04003350 { // OpCapability ImageQuery
3351 bool hasImageQuery = false;
alan-bakerf67468c2019-11-25 15:51:49 -05003352 for (const auto &SymVal : module.getValueSymbolTable()) {
3353 if (auto F = dyn_cast<Function>(SymVal.getValue())) {
alan-bakerce179f12019-12-06 19:02:22 -05003354 if (clspv::IsImageQuery(F)) {
alan-bakerf67468c2019-11-25 15:51:49 -05003355 hasImageQuery = true;
3356 break;
3357 }
David Neto5c22a252018-03-15 16:07:41 -04003358 }
3359 }
alan-bakerf67468c2019-11-25 15:51:49 -05003360
David Neto5c22a252018-03-15 16:07:41 -04003361 if (hasImageQuery) {
David Neto87846742018-04-11 17:36:22 -04003362 auto *ImageQueryCapInst = new SPIRVInstruction(
3363 spv::OpCapability, {MkNum(spv::CapabilityImageQuery)});
David Neto5c22a252018-03-15 16:07:41 -04003364 SPIRVInstList.insert(InsertPoint, ImageQueryCapInst);
3365 }
3366 }
3367
David Neto22f144c2017-06-12 14:26:21 -04003368 if (hasVariablePointers()) {
3369 //
David Neto22f144c2017-06-12 14:26:21 -04003370 // Generate OpCapability.
3371 //
3372 // Ops[0] = Capability
3373 //
3374 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003375 Ops << MkNum(spv::CapabilityVariablePointers);
David Neto22f144c2017-06-12 14:26:21 -04003376
David Neto87846742018-04-11 17:36:22 -04003377 SPIRVInstList.insert(InsertPoint,
3378 new SPIRVInstruction(spv::OpCapability, Ops));
alan-baker5b86ed72019-02-15 08:26:50 -05003379 } else if (hasVariablePointersStorageBuffer()) {
3380 //
3381 // Generate OpCapability.
3382 //
3383 // Ops[0] = Capability
3384 //
3385 Ops.clear();
3386 Ops << MkNum(spv::CapabilityVariablePointersStorageBuffer);
David Neto22f144c2017-06-12 14:26:21 -04003387
alan-baker5b86ed72019-02-15 08:26:50 -05003388 SPIRVInstList.insert(InsertPoint,
3389 new SPIRVInstruction(spv::OpCapability, Ops));
3390 }
3391
3392 // Always add the storage buffer extension
3393 {
David Neto22f144c2017-06-12 14:26:21 -04003394 //
3395 // Generate OpExtension.
3396 //
3397 // Ops[0] = Name (Literal String)
3398 //
alan-baker5b86ed72019-02-15 08:26:50 -05003399 auto *ExtensionInst = new SPIRVInstruction(
3400 spv::OpExtension, {MkString("SPV_KHR_storage_buffer_storage_class")});
3401 SPIRVInstList.insert(InsertPoint, ExtensionInst);
3402 }
David Neto22f144c2017-06-12 14:26:21 -04003403
alan-baker5b86ed72019-02-15 08:26:50 -05003404 if (hasVariablePointers() || hasVariablePointersStorageBuffer()) {
3405 //
3406 // Generate OpExtension.
3407 //
3408 // Ops[0] = Name (Literal String)
3409 //
3410 auto *ExtensionInst = new SPIRVInstruction(
3411 spv::OpExtension, {MkString("SPV_KHR_variable_pointers")});
3412 SPIRVInstList.insert(InsertPoint, ExtensionInst);
David Neto22f144c2017-06-12 14:26:21 -04003413 }
3414
3415 if (ExtInstImportID) {
3416 ++InsertPoint;
3417 }
3418
3419 //
3420 // Generate OpMemoryModel
3421 //
3422 // Memory model for Vulkan will always be GLSL450.
3423
3424 // Ops[0] = Addressing Model
3425 // Ops[1] = Memory Model
3426 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003427 Ops << MkNum(spv::AddressingModelLogical) << MkNum(spv::MemoryModelGLSL450);
David Neto22f144c2017-06-12 14:26:21 -04003428
David Neto87846742018-04-11 17:36:22 -04003429 auto *MemModelInst = new SPIRVInstruction(spv::OpMemoryModel, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003430 SPIRVInstList.insert(InsertPoint, MemModelInst);
3431
3432 //
3433 // Generate OpEntryPoint
3434 //
3435 for (auto EntryPoint : EntryPoints) {
3436 // Ops[0] = Execution Model
3437 // Ops[1] = EntryPoint ID
3438 // Ops[2] = Name (Literal String)
3439 // ...
3440 //
3441 // TODO: Do we need to consider Interface ID for forward references???
3442 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05003443 const StringRef &name = EntryPoint.first->getName();
David Neto257c3892018-04-11 13:19:45 -04003444 Ops << MkNum(spv::ExecutionModelGLCompute) << MkId(EntryPoint.second)
3445 << MkString(name);
David Neto22f144c2017-06-12 14:26:21 -04003446
David Neto22f144c2017-06-12 14:26:21 -04003447 for (Value *Interface : EntryPointInterfaces) {
David Neto257c3892018-04-11 13:19:45 -04003448 Ops << MkId(VMap[Interface]);
David Neto22f144c2017-06-12 14:26:21 -04003449 }
3450
David Neto87846742018-04-11 17:36:22 -04003451 auto *EntryPointInst = new SPIRVInstruction(spv::OpEntryPoint, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003452 SPIRVInstList.insert(InsertPoint, EntryPointInst);
3453 }
3454
3455 for (auto EntryPoint : EntryPoints) {
3456 if (const MDNode *MD = dyn_cast<Function>(EntryPoint.first)
3457 ->getMetadata("reqd_work_group_size")) {
3458
3459 if (!BuiltinDimVec.empty()) {
3460 llvm_unreachable(
3461 "Kernels should have consistent work group size definition");
3462 }
3463
3464 //
3465 // Generate OpExecutionMode
3466 //
3467
3468 // Ops[0] = Entry Point ID
3469 // Ops[1] = Execution Mode
3470 // Ops[2] ... Ops[n] = Optional literals according to Execution Mode
3471 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05003472 Ops << MkId(EntryPoint.second) << MkNum(spv::ExecutionModeLocalSize);
David Neto22f144c2017-06-12 14:26:21 -04003473
3474 uint32_t XDim = static_cast<uint32_t>(
3475 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
3476 uint32_t YDim = static_cast<uint32_t>(
3477 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
3478 uint32_t ZDim = static_cast<uint32_t>(
3479 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
3480
David Neto257c3892018-04-11 13:19:45 -04003481 Ops << MkNum(XDim) << MkNum(YDim) << MkNum(ZDim);
David Neto22f144c2017-06-12 14:26:21 -04003482
David Neto87846742018-04-11 17:36:22 -04003483 auto *ExecModeInst = new SPIRVInstruction(spv::OpExecutionMode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003484 SPIRVInstList.insert(InsertPoint, ExecModeInst);
3485 }
3486 }
3487
3488 //
3489 // Generate OpSource.
3490 //
3491 // Ops[0] = SourceLanguage ID
3492 // Ops[1] = Version (LiteralNum)
3493 //
3494 Ops.clear();
Kévin Petit0fc88042019-04-09 23:25:02 +01003495 if (clspv::Option::CPlusPlus()) {
3496 Ops << MkNum(spv::SourceLanguageOpenCL_CPP) << MkNum(100);
3497 } else {
3498 Ops << MkNum(spv::SourceLanguageOpenCL_C) << MkNum(120);
3499 }
David Neto22f144c2017-06-12 14:26:21 -04003500
David Neto87846742018-04-11 17:36:22 -04003501 auto *OpenSourceInst = new SPIRVInstruction(spv::OpSource, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003502 SPIRVInstList.insert(InsertPoint, OpenSourceInst);
3503
3504 if (!BuiltinDimVec.empty()) {
3505 //
3506 // Generate OpDecorates for x/y/z dimension.
3507 //
3508 // Ops[0] = Target ID
3509 // Ops[1] = Decoration (SpecId)
David Neto257c3892018-04-11 13:19:45 -04003510 // Ops[2] = Specialization Constant ID (Literal Number)
David Neto22f144c2017-06-12 14:26:21 -04003511
3512 // X Dimension
3513 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003514 Ops << MkId(BuiltinDimVec[0]) << MkNum(spv::DecorationSpecId) << MkNum(0);
David Neto87846742018-04-11 17:36:22 -04003515 SPIRVInstList.insert(InsertPoint,
3516 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003517
3518 // Y Dimension
3519 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003520 Ops << MkId(BuiltinDimVec[1]) << MkNum(spv::DecorationSpecId) << MkNum(1);
David Neto87846742018-04-11 17:36:22 -04003521 SPIRVInstList.insert(InsertPoint,
3522 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003523
3524 // Z Dimension
3525 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003526 Ops << MkId(BuiltinDimVec[2]) << MkNum(spv::DecorationSpecId) << MkNum(2);
David Neto87846742018-04-11 17:36:22 -04003527 SPIRVInstList.insert(InsertPoint,
3528 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003529 }
3530}
3531
David Netob6e2e062018-04-25 10:32:06 -04003532void SPIRVProducerPass::GenerateEntryPointInitialStores() {
3533 // Work around a driver bug. Initializers on Private variables might not
3534 // work. So the start of the kernel should store the initializer value to the
3535 // variables. Yes, *every* entry point pays this cost if *any* entry point
3536 // uses this builtin. At this point I judge this to be an acceptable tradeoff
3537 // of complexity vs. runtime, for a broken driver.
alan-bakerb6b09dc2018-11-08 16:59:28 -05003538 // TODO(dneto): Remove this at some point once fixed drivers are widely
3539 // available.
David Netob6e2e062018-04-25 10:32:06 -04003540 if (WorkgroupSizeVarID) {
3541 assert(WorkgroupSizeValueID);
3542
3543 SPIRVOperandList Ops;
3544 Ops << MkId(WorkgroupSizeVarID) << MkId(WorkgroupSizeValueID);
3545
3546 auto *Inst = new SPIRVInstruction(spv::OpStore, Ops);
3547 getSPIRVInstList().push_back(Inst);
3548 }
3549}
3550
David Neto22f144c2017-06-12 14:26:21 -04003551void SPIRVProducerPass::GenerateFuncBody(Function &F) {
3552 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3553 ValueMapType &VMap = getValueMap();
3554
David Netob6e2e062018-04-25 10:32:06 -04003555 const bool IsKernel = F.getCallingConv() == CallingConv::SPIR_KERNEL;
David Neto22f144c2017-06-12 14:26:21 -04003556
3557 for (BasicBlock &BB : F) {
3558 // Register BasicBlock to ValueMap.
3559 VMap[&BB] = nextID;
3560
3561 //
3562 // Generate OpLabel for Basic Block.
3563 //
3564 SPIRVOperandList Ops;
David Neto87846742018-04-11 17:36:22 -04003565 auto *Inst = new SPIRVInstruction(spv::OpLabel, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003566 SPIRVInstList.push_back(Inst);
3567
David Neto6dcd4712017-06-23 11:06:47 -04003568 // OpVariable instructions must come first.
3569 for (Instruction &I : BB) {
alan-baker5b86ed72019-02-15 08:26:50 -05003570 if (auto *alloca = dyn_cast<AllocaInst>(&I)) {
3571 // Allocating a pointer requires variable pointers.
3572 if (alloca->getAllocatedType()->isPointerTy()) {
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003573 setVariablePointersCapabilities(
3574 alloca->getAllocatedType()->getPointerAddressSpace());
alan-baker5b86ed72019-02-15 08:26:50 -05003575 }
David Neto6dcd4712017-06-23 11:06:47 -04003576 GenerateInstruction(I);
3577 }
3578 }
3579
David Neto22f144c2017-06-12 14:26:21 -04003580 if (&BB == &F.getEntryBlock() && IsKernel) {
David Netob6e2e062018-04-25 10:32:06 -04003581 if (clspv::Option::HackInitializers()) {
3582 GenerateEntryPointInitialStores();
3583 }
David Neto22f144c2017-06-12 14:26:21 -04003584 }
3585
3586 for (Instruction &I : BB) {
David Neto6dcd4712017-06-23 11:06:47 -04003587 if (!isa<AllocaInst>(I)) {
3588 GenerateInstruction(I);
3589 }
David Neto22f144c2017-06-12 14:26:21 -04003590 }
3591 }
3592}
3593
3594spv::Op SPIRVProducerPass::GetSPIRVCmpOpcode(CmpInst *I) {
3595 const std::map<CmpInst::Predicate, spv::Op> Map = {
3596 {CmpInst::ICMP_EQ, spv::OpIEqual},
3597 {CmpInst::ICMP_NE, spv::OpINotEqual},
3598 {CmpInst::ICMP_UGT, spv::OpUGreaterThan},
3599 {CmpInst::ICMP_UGE, spv::OpUGreaterThanEqual},
3600 {CmpInst::ICMP_ULT, spv::OpULessThan},
3601 {CmpInst::ICMP_ULE, spv::OpULessThanEqual},
3602 {CmpInst::ICMP_SGT, spv::OpSGreaterThan},
3603 {CmpInst::ICMP_SGE, spv::OpSGreaterThanEqual},
3604 {CmpInst::ICMP_SLT, spv::OpSLessThan},
3605 {CmpInst::ICMP_SLE, spv::OpSLessThanEqual},
3606 {CmpInst::FCMP_OEQ, spv::OpFOrdEqual},
3607 {CmpInst::FCMP_OGT, spv::OpFOrdGreaterThan},
3608 {CmpInst::FCMP_OGE, spv::OpFOrdGreaterThanEqual},
3609 {CmpInst::FCMP_OLT, spv::OpFOrdLessThan},
3610 {CmpInst::FCMP_OLE, spv::OpFOrdLessThanEqual},
3611 {CmpInst::FCMP_ONE, spv::OpFOrdNotEqual},
3612 {CmpInst::FCMP_UEQ, spv::OpFUnordEqual},
3613 {CmpInst::FCMP_UGT, spv::OpFUnordGreaterThan},
3614 {CmpInst::FCMP_UGE, spv::OpFUnordGreaterThanEqual},
3615 {CmpInst::FCMP_ULT, spv::OpFUnordLessThan},
3616 {CmpInst::FCMP_ULE, spv::OpFUnordLessThanEqual},
3617 {CmpInst::FCMP_UNE, spv::OpFUnordNotEqual}};
3618
3619 assert(0 != Map.count(I->getPredicate()));
3620
3621 return Map.at(I->getPredicate());
3622}
3623
3624spv::Op SPIRVProducerPass::GetSPIRVCastOpcode(Instruction &I) {
3625 const std::map<unsigned, spv::Op> Map{
3626 {Instruction::Trunc, spv::OpUConvert},
3627 {Instruction::ZExt, spv::OpUConvert},
3628 {Instruction::SExt, spv::OpSConvert},
3629 {Instruction::FPToUI, spv::OpConvertFToU},
3630 {Instruction::FPToSI, spv::OpConvertFToS},
3631 {Instruction::UIToFP, spv::OpConvertUToF},
3632 {Instruction::SIToFP, spv::OpConvertSToF},
3633 {Instruction::FPTrunc, spv::OpFConvert},
3634 {Instruction::FPExt, spv::OpFConvert},
3635 {Instruction::BitCast, spv::OpBitcast}};
3636
3637 assert(0 != Map.count(I.getOpcode()));
3638
3639 return Map.at(I.getOpcode());
3640}
3641
3642spv::Op SPIRVProducerPass::GetSPIRVBinaryOpcode(Instruction &I) {
Kévin Petit24272b62018-10-18 19:16:12 +00003643 if (I.getType()->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -04003644 switch (I.getOpcode()) {
3645 default:
3646 break;
3647 case Instruction::Or:
3648 return spv::OpLogicalOr;
3649 case Instruction::And:
3650 return spv::OpLogicalAnd;
3651 case Instruction::Xor:
3652 return spv::OpLogicalNotEqual;
3653 }
3654 }
3655
alan-bakerb6b09dc2018-11-08 16:59:28 -05003656 const std::map<unsigned, spv::Op> Map{
David Neto22f144c2017-06-12 14:26:21 -04003657 {Instruction::Add, spv::OpIAdd},
3658 {Instruction::FAdd, spv::OpFAdd},
3659 {Instruction::Sub, spv::OpISub},
3660 {Instruction::FSub, spv::OpFSub},
3661 {Instruction::Mul, spv::OpIMul},
3662 {Instruction::FMul, spv::OpFMul},
3663 {Instruction::UDiv, spv::OpUDiv},
3664 {Instruction::SDiv, spv::OpSDiv},
3665 {Instruction::FDiv, spv::OpFDiv},
3666 {Instruction::URem, spv::OpUMod},
3667 {Instruction::SRem, spv::OpSRem},
3668 {Instruction::FRem, spv::OpFRem},
3669 {Instruction::Or, spv::OpBitwiseOr},
3670 {Instruction::Xor, spv::OpBitwiseXor},
3671 {Instruction::And, spv::OpBitwiseAnd},
3672 {Instruction::Shl, spv::OpShiftLeftLogical},
3673 {Instruction::LShr, spv::OpShiftRightLogical},
3674 {Instruction::AShr, spv::OpShiftRightArithmetic}};
3675
3676 assert(0 != Map.count(I.getOpcode()));
3677
3678 return Map.at(I.getOpcode());
3679}
3680
3681void SPIRVProducerPass::GenerateInstruction(Instruction &I) {
3682 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3683 ValueMapType &VMap = getValueMap();
David Neto22f144c2017-06-12 14:26:21 -04003684 DeferredInstVecType &DeferredInsts = getDeferredInstVec();
3685 LLVMContext &Context = I.getParent()->getParent()->getParent()->getContext();
3686
3687 // Register Instruction to ValueMap.
3688 if (0 == VMap[&I]) {
3689 VMap[&I] = nextID;
3690 }
3691
3692 switch (I.getOpcode()) {
3693 default: {
3694 if (Instruction::isCast(I.getOpcode())) {
3695 //
3696 // Generate SPIRV instructions for cast operators.
3697 //
3698
David Netod2de94a2017-08-28 17:27:47 -04003699 auto Ty = I.getType();
David Neto22f144c2017-06-12 14:26:21 -04003700 auto OpTy = I.getOperand(0)->getType();
David Netod2de94a2017-08-28 17:27:47 -04003701 auto toI8 = Ty == Type::getInt8Ty(Context);
3702 auto fromI32 = OpTy == Type::getInt32Ty(Context);
David Neto22f144c2017-06-12 14:26:21 -04003703 // Handle zext, sext and uitofp with i1 type specially.
3704 if ((I.getOpcode() == Instruction::ZExt ||
3705 I.getOpcode() == Instruction::SExt ||
3706 I.getOpcode() == Instruction::UIToFP) &&
alan-bakerb6b09dc2018-11-08 16:59:28 -05003707 OpTy->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -04003708 //
3709 // Generate OpSelect.
3710 //
3711
3712 // Ops[0] = Result Type ID
3713 // Ops[1] = Condition ID
3714 // Ops[2] = True Constant ID
3715 // Ops[3] = False Constant ID
3716 SPIRVOperandList Ops;
3717
David Neto257c3892018-04-11 13:19:45 -04003718 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04003719
David Neto22f144c2017-06-12 14:26:21 -04003720 uint32_t CondID = VMap[I.getOperand(0)];
David Neto257c3892018-04-11 13:19:45 -04003721 Ops << MkId(CondID);
David Neto22f144c2017-06-12 14:26:21 -04003722
3723 uint32_t TrueID = 0;
3724 if (I.getOpcode() == Instruction::ZExt) {
Kévin Petit7bfb8992019-02-26 13:45:08 +00003725 TrueID = VMap[ConstantInt::get(I.getType(), 1)];
David Neto22f144c2017-06-12 14:26:21 -04003726 } else if (I.getOpcode() == Instruction::SExt) {
Kévin Petit7bfb8992019-02-26 13:45:08 +00003727 TrueID = VMap[ConstantInt::getSigned(I.getType(), -1)];
David Neto22f144c2017-06-12 14:26:21 -04003728 } else {
3729 TrueID = VMap[ConstantFP::get(Context, APFloat(1.0f))];
3730 }
David Neto257c3892018-04-11 13:19:45 -04003731 Ops << MkId(TrueID);
David Neto22f144c2017-06-12 14:26:21 -04003732
3733 uint32_t FalseID = 0;
3734 if (I.getOpcode() == Instruction::ZExt) {
3735 FalseID = VMap[Constant::getNullValue(I.getType())];
3736 } else if (I.getOpcode() == Instruction::SExt) {
3737 FalseID = VMap[Constant::getNullValue(I.getType())];
3738 } else {
3739 FalseID = VMap[ConstantFP::get(Context, APFloat(0.0f))];
3740 }
David Neto257c3892018-04-11 13:19:45 -04003741 Ops << MkId(FalseID);
David Neto22f144c2017-06-12 14:26:21 -04003742
David Neto87846742018-04-11 17:36:22 -04003743 auto *Inst = new SPIRVInstruction(spv::OpSelect, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003744 SPIRVInstList.push_back(Inst);
alan-bakerb39c8262019-03-08 14:03:37 -05003745 } else if (!clspv::Option::Int8Support() &&
3746 I.getOpcode() == Instruction::Trunc && fromI32 && toI8) {
David Netod2de94a2017-08-28 17:27:47 -04003747 // The SPIR-V target type is a 32-bit int. Keep only the bottom
3748 // 8 bits.
3749 // Before:
3750 // %result = trunc i32 %a to i8
3751 // After
3752 // %result = OpBitwiseAnd %uint %a %uint_255
3753
3754 SPIRVOperandList Ops;
3755
David Neto257c3892018-04-11 13:19:45 -04003756 Ops << MkId(lookupType(OpTy)) << MkId(VMap[I.getOperand(0)]);
David Netod2de94a2017-08-28 17:27:47 -04003757
3758 Type *UintTy = Type::getInt32Ty(Context);
3759 uint32_t MaskID = VMap[ConstantInt::get(UintTy, 255)];
David Neto257c3892018-04-11 13:19:45 -04003760 Ops << MkId(MaskID);
David Netod2de94a2017-08-28 17:27:47 -04003761
David Neto87846742018-04-11 17:36:22 -04003762 auto *Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Netod2de94a2017-08-28 17:27:47 -04003763 SPIRVInstList.push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04003764 } else {
3765 // Ops[0] = Result Type ID
3766 // Ops[1] = Source Value ID
3767 SPIRVOperandList Ops;
3768
David Neto257c3892018-04-11 13:19:45 -04003769 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04003770
David Neto87846742018-04-11 17:36:22 -04003771 auto *Inst = new SPIRVInstruction(GetSPIRVCastOpcode(I), nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003772 SPIRVInstList.push_back(Inst);
3773 }
3774 } else if (isa<BinaryOperator>(I)) {
3775 //
3776 // Generate SPIRV instructions for binary operators.
3777 //
3778
3779 // Handle xor with i1 type specially.
3780 if (I.getOpcode() == Instruction::Xor &&
3781 I.getType() == Type::getInt1Ty(Context) &&
Kévin Petit24272b62018-10-18 19:16:12 +00003782 ((isa<ConstantInt>(I.getOperand(0)) &&
3783 !cast<ConstantInt>(I.getOperand(0))->isZero()) ||
3784 (isa<ConstantInt>(I.getOperand(1)) &&
3785 !cast<ConstantInt>(I.getOperand(1))->isZero()))) {
David Neto22f144c2017-06-12 14:26:21 -04003786 //
3787 // Generate OpLogicalNot.
3788 //
3789 // Ops[0] = Result Type ID
3790 // Ops[1] = Operand
3791 SPIRVOperandList Ops;
3792
David Neto257c3892018-04-11 13:19:45 -04003793 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04003794
3795 Value *CondV = I.getOperand(0);
3796 if (isa<Constant>(I.getOperand(0))) {
3797 CondV = I.getOperand(1);
3798 }
David Neto257c3892018-04-11 13:19:45 -04003799 Ops << MkId(VMap[CondV]);
David Neto22f144c2017-06-12 14:26:21 -04003800
David Neto87846742018-04-11 17:36:22 -04003801 auto *Inst = new SPIRVInstruction(spv::OpLogicalNot, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003802 SPIRVInstList.push_back(Inst);
3803 } else {
3804 // Ops[0] = Result Type ID
3805 // Ops[1] = Operand 0
3806 // Ops[2] = Operand 1
3807 SPIRVOperandList Ops;
3808
David Neto257c3892018-04-11 13:19:45 -04003809 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)])
3810 << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04003811
David Neto87846742018-04-11 17:36:22 -04003812 auto *Inst =
3813 new SPIRVInstruction(GetSPIRVBinaryOpcode(I), nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003814 SPIRVInstList.push_back(Inst);
3815 }
alan-bakerc9c55ae2019-12-02 16:01:27 -05003816 } else if (I.getOpcode() == Instruction::FNeg) {
3817 // The only unary operator.
3818 //
3819 // Ops[0] = Result Type ID
3820 // Ops[1] = Operand 0
3821 SPIRVOperandList ops;
3822
3823 ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)]);
3824 auto *Inst = new SPIRVInstruction(spv::OpFNegate, nextID++, ops);
3825 SPIRVInstList.push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04003826 } else {
3827 I.print(errs());
3828 llvm_unreachable("Unsupported instruction???");
3829 }
3830 break;
3831 }
3832 case Instruction::GetElementPtr: {
3833 auto &GlobalConstArgSet = getGlobalConstArgSet();
3834
3835 //
3836 // Generate OpAccessChain.
3837 //
3838 GetElementPtrInst *GEP = cast<GetElementPtrInst>(&I);
3839
3840 //
3841 // Generate OpAccessChain.
3842 //
3843
3844 // Ops[0] = Result Type ID
3845 // Ops[1] = Base ID
3846 // Ops[2] ... Ops[n] = Indexes ID
3847 SPIRVOperandList Ops;
3848
alan-bakerb6b09dc2018-11-08 16:59:28 -05003849 PointerType *ResultType = cast<PointerType>(GEP->getType());
David Neto22f144c2017-06-12 14:26:21 -04003850 if (GEP->getPointerAddressSpace() == AddressSpace::ModuleScopePrivate ||
3851 GlobalConstArgSet.count(GEP->getPointerOperand())) {
3852 // Use pointer type with private address space for global constant.
3853 Type *EleTy = I.getType()->getPointerElementType();
David Neto1a1a0582017-07-07 12:01:44 -04003854 ResultType = PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
David Neto22f144c2017-06-12 14:26:21 -04003855 }
David Neto257c3892018-04-11 13:19:45 -04003856
3857 Ops << MkId(lookupType(ResultType));
David Neto22f144c2017-06-12 14:26:21 -04003858
David Neto862b7d82018-06-14 18:48:37 -04003859 // Generate the base pointer.
3860 Ops << MkId(VMap[GEP->getPointerOperand()]);
David Neto22f144c2017-06-12 14:26:21 -04003861
David Neto862b7d82018-06-14 18:48:37 -04003862 // TODO(dneto): Simplify the following?
David Neto22f144c2017-06-12 14:26:21 -04003863
3864 //
3865 // Follows below rules for gep.
3866 //
David Neto862b7d82018-06-14 18:48:37 -04003867 // 1. If gep's first index is 0 generate OpAccessChain and ignore gep's
3868 // first index.
David Neto22f144c2017-06-12 14:26:21 -04003869 // 2. If gep's first index is not 0, generate OpPtrAccessChain and use gep's
3870 // first index.
3871 // 3. If gep's first index is not constant, generate OpPtrAccessChain and
3872 // use gep's first index.
3873 // 4. If it is not above case 1, 2 and 3, generate OpAccessChain and use
3874 // gep's first index.
3875 //
3876 spv::Op Opcode = spv::OpAccessChain;
3877 unsigned offset = 0;
3878 if (ConstantInt *CstInt = dyn_cast<ConstantInt>(GEP->getOperand(1))) {
David Neto862b7d82018-06-14 18:48:37 -04003879 if (CstInt->getZExtValue() == 0) {
David Neto22f144c2017-06-12 14:26:21 -04003880 offset = 1;
David Neto862b7d82018-06-14 18:48:37 -04003881 } else if (CstInt->getZExtValue() != 0) {
David Neto22f144c2017-06-12 14:26:21 -04003882 Opcode = spv::OpPtrAccessChain;
David Neto22f144c2017-06-12 14:26:21 -04003883 }
David Neto862b7d82018-06-14 18:48:37 -04003884 } else {
David Neto22f144c2017-06-12 14:26:21 -04003885 Opcode = spv::OpPtrAccessChain;
David Neto1a1a0582017-07-07 12:01:44 -04003886 }
3887
3888 if (Opcode == spv::OpPtrAccessChain) {
David Neto1a1a0582017-07-07 12:01:44 -04003889 // Do we need to generate ArrayStride? Check against the GEP result type
3890 // rather than the pointer type of the base because when indexing into
3891 // an OpenCL program-scope constant, we'll swap out the LLVM base pointer
3892 // for something else in the SPIR-V.
3893 // E.g. see test/PointerAccessChain/pointer_index_is_constant_1.cl
alan-baker5b86ed72019-02-15 08:26:50 -05003894 auto address_space = ResultType->getAddressSpace();
3895 setVariablePointersCapabilities(address_space);
3896 switch (GetStorageClass(address_space)) {
Alan Bakerfcda9482018-10-02 17:09:59 -04003897 case spv::StorageClassStorageBuffer:
3898 case spv::StorageClassUniform:
David Neto1a1a0582017-07-07 12:01:44 -04003899 // Save the need to generate an ArrayStride decoration. But defer
3900 // generation until later, so we only make one decoration.
David Neto85082642018-03-24 06:55:20 -07003901 getTypesNeedingArrayStride().insert(ResultType);
Alan Bakerfcda9482018-10-02 17:09:59 -04003902 break;
3903 default:
3904 break;
David Neto1a1a0582017-07-07 12:01:44 -04003905 }
David Neto22f144c2017-06-12 14:26:21 -04003906 }
3907
3908 for (auto II = GEP->idx_begin() + offset; II != GEP->idx_end(); II++) {
David Neto257c3892018-04-11 13:19:45 -04003909 Ops << MkId(VMap[*II]);
David Neto22f144c2017-06-12 14:26:21 -04003910 }
3911
David Neto87846742018-04-11 17:36:22 -04003912 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003913 SPIRVInstList.push_back(Inst);
3914 break;
3915 }
3916 case Instruction::ExtractValue: {
3917 ExtractValueInst *EVI = cast<ExtractValueInst>(&I);
3918 // Ops[0] = Result Type ID
3919 // Ops[1] = Composite ID
3920 // Ops[2] ... Ops[n] = Indexes (Literal Number)
3921 SPIRVOperandList Ops;
3922
David Neto257c3892018-04-11 13:19:45 -04003923 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04003924
3925 uint32_t CompositeID = VMap[EVI->getAggregateOperand()];
David Neto257c3892018-04-11 13:19:45 -04003926 Ops << MkId(CompositeID);
David Neto22f144c2017-06-12 14:26:21 -04003927
3928 for (auto &Index : EVI->indices()) {
David Neto257c3892018-04-11 13:19:45 -04003929 Ops << MkNum(Index);
David Neto22f144c2017-06-12 14:26:21 -04003930 }
3931
David Neto87846742018-04-11 17:36:22 -04003932 auto *Inst = new SPIRVInstruction(spv::OpCompositeExtract, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003933 SPIRVInstList.push_back(Inst);
3934 break;
3935 }
3936 case Instruction::InsertValue: {
3937 InsertValueInst *IVI = cast<InsertValueInst>(&I);
3938 // Ops[0] = Result Type ID
3939 // Ops[1] = Object ID
3940 // Ops[2] = Composite ID
3941 // Ops[3] ... Ops[n] = Indexes (Literal Number)
3942 SPIRVOperandList Ops;
3943
3944 uint32_t ResTyID = lookupType(I.getType());
David Neto257c3892018-04-11 13:19:45 -04003945 Ops << MkId(ResTyID);
David Neto22f144c2017-06-12 14:26:21 -04003946
3947 uint32_t ObjectID = VMap[IVI->getInsertedValueOperand()];
David Neto257c3892018-04-11 13:19:45 -04003948 Ops << MkId(ObjectID);
David Neto22f144c2017-06-12 14:26:21 -04003949
3950 uint32_t CompositeID = VMap[IVI->getAggregateOperand()];
David Neto257c3892018-04-11 13:19:45 -04003951 Ops << MkId(CompositeID);
David Neto22f144c2017-06-12 14:26:21 -04003952
3953 for (auto &Index : IVI->indices()) {
David Neto257c3892018-04-11 13:19:45 -04003954 Ops << MkNum(Index);
David Neto22f144c2017-06-12 14:26:21 -04003955 }
3956
David Neto87846742018-04-11 17:36:22 -04003957 auto *Inst = new SPIRVInstruction(spv::OpCompositeInsert, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003958 SPIRVInstList.push_back(Inst);
3959 break;
3960 }
3961 case Instruction::Select: {
3962 //
3963 // Generate OpSelect.
3964 //
3965
3966 // Ops[0] = Result Type ID
3967 // Ops[1] = Condition ID
3968 // Ops[2] = True Constant ID
3969 // Ops[3] = False Constant ID
3970 SPIRVOperandList Ops;
3971
3972 // Find SPIRV instruction for parameter type.
3973 auto Ty = I.getType();
3974 if (Ty->isPointerTy()) {
3975 auto PointeeTy = Ty->getPointerElementType();
3976 if (PointeeTy->isStructTy() &&
3977 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
3978 Ty = PointeeTy;
alan-baker5b86ed72019-02-15 08:26:50 -05003979 } else {
3980 // Selecting between pointers requires variable pointers.
3981 setVariablePointersCapabilities(Ty->getPointerAddressSpace());
3982 if (!hasVariablePointers() && !selectFromSameObject(&I)) {
3983 setVariablePointers(true);
3984 }
David Neto22f144c2017-06-12 14:26:21 -04003985 }
3986 }
3987
David Neto257c3892018-04-11 13:19:45 -04003988 Ops << MkId(lookupType(Ty)) << MkId(VMap[I.getOperand(0)])
3989 << MkId(VMap[I.getOperand(1)]) << MkId(VMap[I.getOperand(2)]);
David Neto22f144c2017-06-12 14:26:21 -04003990
David Neto87846742018-04-11 17:36:22 -04003991 auto *Inst = new SPIRVInstruction(spv::OpSelect, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003992 SPIRVInstList.push_back(Inst);
3993 break;
3994 }
3995 case Instruction::ExtractElement: {
3996 // Handle <4 x i8> type manually.
3997 Type *CompositeTy = I.getOperand(0)->getType();
3998 if (is4xi8vec(CompositeTy)) {
3999 //
4000 // Generate OpShiftRightLogical and OpBitwiseAnd for extractelement with
4001 // <4 x i8>.
4002 //
4003
4004 //
4005 // Generate OpShiftRightLogical
4006 //
4007 // Ops[0] = Result Type ID
4008 // Ops[1] = Operand 0
4009 // Ops[2] = Operand 1
4010 //
4011 SPIRVOperandList Ops;
4012
David Neto257c3892018-04-11 13:19:45 -04004013 Ops << MkId(lookupType(CompositeTy));
David Neto22f144c2017-06-12 14:26:21 -04004014
4015 uint32_t Op0ID = VMap[I.getOperand(0)];
David Neto257c3892018-04-11 13:19:45 -04004016 Ops << MkId(Op0ID);
David Neto22f144c2017-06-12 14:26:21 -04004017
4018 uint32_t Op1ID = 0;
4019 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
4020 // Handle constant index.
4021 uint64_t Idx = CI->getZExtValue();
4022 Value *ShiftAmount =
4023 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
4024 Op1ID = VMap[ShiftAmount];
4025 } else {
4026 // Handle variable index.
4027 SPIRVOperandList TmpOps;
4028
David Neto257c3892018-04-11 13:19:45 -04004029 TmpOps << MkId(lookupType(Type::getInt32Ty(Context)))
4030 << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004031
4032 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
David Neto257c3892018-04-11 13:19:45 -04004033 TmpOps << MkId(VMap[Cst8]);
David Neto22f144c2017-06-12 14:26:21 -04004034
4035 Op1ID = nextID;
4036
David Neto87846742018-04-11 17:36:22 -04004037 auto *TmpInst = new SPIRVInstruction(spv::OpIMul, nextID++, TmpOps);
David Neto22f144c2017-06-12 14:26:21 -04004038 SPIRVInstList.push_back(TmpInst);
4039 }
David Neto257c3892018-04-11 13:19:45 -04004040 Ops << MkId(Op1ID);
David Neto22f144c2017-06-12 14:26:21 -04004041
4042 uint32_t ShiftID = nextID;
4043
David Neto87846742018-04-11 17:36:22 -04004044 auto *Inst =
4045 new SPIRVInstruction(spv::OpShiftRightLogical, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004046 SPIRVInstList.push_back(Inst);
4047
4048 //
4049 // Generate OpBitwiseAnd
4050 //
4051 // Ops[0] = Result Type ID
4052 // Ops[1] = Operand 0
4053 // Ops[2] = Operand 1
4054 //
4055 Ops.clear();
4056
David Neto257c3892018-04-11 13:19:45 -04004057 Ops << MkId(lookupType(CompositeTy)) << MkId(ShiftID);
David Neto22f144c2017-06-12 14:26:21 -04004058
4059 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
David Neto257c3892018-04-11 13:19:45 -04004060 Ops << MkId(VMap[CstFF]);
David Neto22f144c2017-06-12 14:26:21 -04004061
David Neto9b2d6252017-09-06 15:47:37 -04004062 // Reset mapping for this value to the result of the bitwise and.
4063 VMap[&I] = nextID;
4064
David Neto87846742018-04-11 17:36:22 -04004065 Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004066 SPIRVInstList.push_back(Inst);
4067 break;
4068 }
4069
4070 // Ops[0] = Result Type ID
4071 // Ops[1] = Composite ID
4072 // Ops[2] ... Ops[n] = Indexes (Literal Number)
4073 SPIRVOperandList Ops;
4074
David Neto257c3892018-04-11 13:19:45 -04004075 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04004076
4077 spv::Op Opcode = spv::OpCompositeExtract;
4078 if (const ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
David Neto257c3892018-04-11 13:19:45 -04004079 Ops << MkNum(static_cast<uint32_t>(CI->getZExtValue()));
David Neto22f144c2017-06-12 14:26:21 -04004080 } else {
David Neto257c3892018-04-11 13:19:45 -04004081 Ops << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004082 Opcode = spv::OpVectorExtractDynamic;
4083 }
4084
David Neto87846742018-04-11 17:36:22 -04004085 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004086 SPIRVInstList.push_back(Inst);
4087 break;
4088 }
4089 case Instruction::InsertElement: {
4090 // Handle <4 x i8> type manually.
4091 Type *CompositeTy = I.getOperand(0)->getType();
4092 if (is4xi8vec(CompositeTy)) {
4093 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
4094 uint32_t CstFFID = VMap[CstFF];
4095
4096 uint32_t ShiftAmountID = 0;
4097 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
4098 // Handle constant index.
4099 uint64_t Idx = CI->getZExtValue();
4100 Value *ShiftAmount =
4101 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
4102 ShiftAmountID = VMap[ShiftAmount];
4103 } else {
4104 // Handle variable index.
4105 SPIRVOperandList TmpOps;
4106
David Neto257c3892018-04-11 13:19:45 -04004107 TmpOps << MkId(lookupType(Type::getInt32Ty(Context)))
4108 << MkId(VMap[I.getOperand(2)]);
David Neto22f144c2017-06-12 14:26:21 -04004109
4110 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
David Neto257c3892018-04-11 13:19:45 -04004111 TmpOps << MkId(VMap[Cst8]);
David Neto22f144c2017-06-12 14:26:21 -04004112
4113 ShiftAmountID = nextID;
4114
David Neto87846742018-04-11 17:36:22 -04004115 auto *TmpInst = new SPIRVInstruction(spv::OpIMul, nextID++, TmpOps);
David Neto22f144c2017-06-12 14:26:21 -04004116 SPIRVInstList.push_back(TmpInst);
4117 }
4118
4119 //
4120 // Generate mask operations.
4121 //
4122
4123 // ShiftLeft mask according to index of insertelement.
4124 SPIRVOperandList Ops;
4125
David Neto257c3892018-04-11 13:19:45 -04004126 const uint32_t ResTyID = lookupType(CompositeTy);
4127 Ops << MkId(ResTyID) << MkId(CstFFID) << MkId(ShiftAmountID);
David Neto22f144c2017-06-12 14:26:21 -04004128
4129 uint32_t MaskID = nextID;
4130
David Neto87846742018-04-11 17:36:22 -04004131 auto *Inst = new SPIRVInstruction(spv::OpShiftLeftLogical, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004132 SPIRVInstList.push_back(Inst);
4133
4134 // Inverse mask.
4135 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004136 Ops << MkId(ResTyID) << MkId(MaskID);
David Neto22f144c2017-06-12 14:26:21 -04004137
4138 uint32_t InvMaskID = nextID;
4139
David Neto87846742018-04-11 17:36:22 -04004140 Inst = new SPIRVInstruction(spv::OpNot, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004141 SPIRVInstList.push_back(Inst);
4142
4143 // Apply mask.
4144 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004145 Ops << MkId(ResTyID) << MkId(VMap[I.getOperand(0)]) << MkId(InvMaskID);
David Neto22f144c2017-06-12 14:26:21 -04004146
4147 uint32_t OrgValID = nextID;
4148
David Neto87846742018-04-11 17:36:22 -04004149 Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004150 SPIRVInstList.push_back(Inst);
4151
4152 // Create correct value according to index of insertelement.
4153 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05004154 Ops << MkId(ResTyID) << MkId(VMap[I.getOperand(1)])
4155 << MkId(ShiftAmountID);
David Neto22f144c2017-06-12 14:26:21 -04004156
4157 uint32_t InsertValID = nextID;
4158
David Neto87846742018-04-11 17:36:22 -04004159 Inst = new SPIRVInstruction(spv::OpShiftLeftLogical, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004160 SPIRVInstList.push_back(Inst);
4161
4162 // Insert value to original value.
4163 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004164 Ops << MkId(ResTyID) << MkId(OrgValID) << MkId(InsertValID);
David Neto22f144c2017-06-12 14:26:21 -04004165
David Netoa394f392017-08-26 20:45:29 -04004166 VMap[&I] = nextID;
4167
David Neto87846742018-04-11 17:36:22 -04004168 Inst = new SPIRVInstruction(spv::OpBitwiseOr, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004169 SPIRVInstList.push_back(Inst);
4170
4171 break;
4172 }
4173
David Neto22f144c2017-06-12 14:26:21 -04004174 SPIRVOperandList Ops;
4175
James Priced26efea2018-06-09 23:28:32 +01004176 // Ops[0] = Result Type ID
4177 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04004178
4179 spv::Op Opcode = spv::OpCompositeInsert;
4180 if (const ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
David Neto257c3892018-04-11 13:19:45 -04004181 const auto value = CI->getZExtValue();
4182 assert(value <= UINT32_MAX);
James Priced26efea2018-06-09 23:28:32 +01004183 // Ops[1] = Object ID
4184 // Ops[2] = Composite ID
4185 // Ops[3] ... Ops[n] = Indexes (Literal Number)
alan-bakerb6b09dc2018-11-08 16:59:28 -05004186 Ops << MkId(VMap[I.getOperand(1)]) << MkId(VMap[I.getOperand(0)])
James Priced26efea2018-06-09 23:28:32 +01004187 << MkNum(static_cast<uint32_t>(value));
David Neto22f144c2017-06-12 14:26:21 -04004188 } else {
James Priced26efea2018-06-09 23:28:32 +01004189 // Ops[1] = Composite ID
4190 // Ops[2] = Object ID
4191 // Ops[3] ... Ops[n] = Indexes (Literal Number)
alan-bakerb6b09dc2018-11-08 16:59:28 -05004192 Ops << MkId(VMap[I.getOperand(0)]) << MkId(VMap[I.getOperand(1)])
James Priced26efea2018-06-09 23:28:32 +01004193 << MkId(VMap[I.getOperand(2)]);
David Neto22f144c2017-06-12 14:26:21 -04004194 Opcode = spv::OpVectorInsertDynamic;
4195 }
4196
David Neto87846742018-04-11 17:36:22 -04004197 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004198 SPIRVInstList.push_back(Inst);
4199 break;
4200 }
4201 case Instruction::ShuffleVector: {
4202 // Ops[0] = Result Type ID
4203 // Ops[1] = Vector 1 ID
4204 // Ops[2] = Vector 2 ID
4205 // Ops[3] ... Ops[n] = Components (Literal Number)
4206 SPIRVOperandList Ops;
4207
David Neto257c3892018-04-11 13:19:45 -04004208 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)])
4209 << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004210
4211 uint64_t NumElements = 0;
4212 if (Constant *Cst = dyn_cast<Constant>(I.getOperand(2))) {
4213 NumElements = cast<VectorType>(Cst->getType())->getNumElements();
4214
4215 if (Cst->isNullValue()) {
4216 for (unsigned i = 0; i < NumElements; i++) {
David Neto257c3892018-04-11 13:19:45 -04004217 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04004218 }
4219 } else if (const ConstantDataSequential *CDS =
4220 dyn_cast<ConstantDataSequential>(Cst)) {
4221 for (unsigned i = 0; i < CDS->getNumElements(); i++) {
4222 std::vector<uint32_t> LiteralNum;
David Neto257c3892018-04-11 13:19:45 -04004223 const auto value = CDS->getElementAsInteger(i);
4224 assert(value <= UINT32_MAX);
4225 Ops << MkNum(static_cast<uint32_t>(value));
David Neto22f144c2017-06-12 14:26:21 -04004226 }
4227 } else if (const ConstantVector *CV = dyn_cast<ConstantVector>(Cst)) {
4228 for (unsigned i = 0; i < CV->getNumOperands(); i++) {
4229 auto Op = CV->getOperand(i);
4230
4231 uint32_t literal = 0;
4232
4233 if (auto CI = dyn_cast<ConstantInt>(Op)) {
4234 literal = static_cast<uint32_t>(CI->getZExtValue());
4235 } else if (auto UI = dyn_cast<UndefValue>(Op)) {
4236 literal = 0xFFFFFFFFu;
4237 } else {
4238 Op->print(errs());
4239 llvm_unreachable("Unsupported element in ConstantVector!");
4240 }
4241
David Neto257c3892018-04-11 13:19:45 -04004242 Ops << MkNum(literal);
David Neto22f144c2017-06-12 14:26:21 -04004243 }
4244 } else {
4245 Cst->print(errs());
4246 llvm_unreachable("Unsupported constant mask in ShuffleVector!");
4247 }
4248 }
4249
David Neto87846742018-04-11 17:36:22 -04004250 auto *Inst = new SPIRVInstruction(spv::OpVectorShuffle, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004251 SPIRVInstList.push_back(Inst);
4252 break;
4253 }
4254 case Instruction::ICmp:
4255 case Instruction::FCmp: {
4256 CmpInst *CmpI = cast<CmpInst>(&I);
4257
David Netod4ca2e62017-07-06 18:47:35 -04004258 // Pointer equality is invalid.
alan-bakerb6b09dc2018-11-08 16:59:28 -05004259 Type *ArgTy = CmpI->getOperand(0)->getType();
David Netod4ca2e62017-07-06 18:47:35 -04004260 if (isa<PointerType>(ArgTy)) {
4261 CmpI->print(errs());
4262 std::string name = I.getParent()->getParent()->getName();
4263 errs()
4264 << "\nPointer equality test is not supported by SPIR-V for Vulkan, "
4265 << "in function " << name << "\n";
4266 llvm_unreachable("Pointer equality check is invalid");
4267 break;
4268 }
4269
David Neto257c3892018-04-11 13:19:45 -04004270 // Ops[0] = Result Type ID
4271 // Ops[1] = Operand 1 ID
4272 // Ops[2] = Operand 2 ID
4273 SPIRVOperandList Ops;
David Neto22f144c2017-06-12 14:26:21 -04004274
David Neto257c3892018-04-11 13:19:45 -04004275 Ops << MkId(lookupType(CmpI->getType())) << MkId(VMap[CmpI->getOperand(0)])
4276 << MkId(VMap[CmpI->getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004277
4278 spv::Op Opcode = GetSPIRVCmpOpcode(CmpI);
David Neto87846742018-04-11 17:36:22 -04004279 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004280 SPIRVInstList.push_back(Inst);
4281 break;
4282 }
4283 case Instruction::Br: {
4284 // Branch instrucion is deferred because it needs label's ID. Record slot's
4285 // location on SPIRVInstructionList.
4286 DeferredInsts.push_back(
4287 std::make_tuple(&I, --SPIRVInstList.end(), 0 /* No id */));
4288 break;
4289 }
4290 case Instruction::Switch: {
4291 I.print(errs());
4292 llvm_unreachable("Unsupported instruction???");
4293 break;
4294 }
4295 case Instruction::IndirectBr: {
4296 I.print(errs());
4297 llvm_unreachable("Unsupported instruction???");
4298 break;
4299 }
4300 case Instruction::PHI: {
4301 // Branch instrucion is deferred because it needs label's ID. Record slot's
4302 // location on SPIRVInstructionList.
4303 DeferredInsts.push_back(
4304 std::make_tuple(&I, --SPIRVInstList.end(), nextID++));
4305 break;
4306 }
4307 case Instruction::Alloca: {
4308 //
4309 // Generate OpVariable.
4310 //
4311 // Ops[0] : Result Type ID
4312 // Ops[1] : Storage Class
4313 SPIRVOperandList Ops;
4314
David Neto257c3892018-04-11 13:19:45 -04004315 Ops << MkId(lookupType(I.getType())) << MkNum(spv::StorageClassFunction);
David Neto22f144c2017-06-12 14:26:21 -04004316
David Neto87846742018-04-11 17:36:22 -04004317 auto *Inst = new SPIRVInstruction(spv::OpVariable, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004318 SPIRVInstList.push_back(Inst);
4319 break;
4320 }
4321 case Instruction::Load: {
4322 LoadInst *LD = cast<LoadInst>(&I);
4323 //
4324 // Generate OpLoad.
4325 //
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04004326
alan-baker5b86ed72019-02-15 08:26:50 -05004327 if (LD->getType()->isPointerTy()) {
4328 // Loading a pointer requires variable pointers.
4329 setVariablePointersCapabilities(LD->getType()->getPointerAddressSpace());
4330 }
David Neto22f144c2017-06-12 14:26:21 -04004331
David Neto0a2f98d2017-09-15 19:38:40 -04004332 uint32_t ResTyID = lookupType(LD->getType());
David Netoa60b00b2017-09-15 16:34:09 -04004333 uint32_t PointerID = VMap[LD->getPointerOperand()];
4334
4335 // This is a hack to work around what looks like a driver bug.
4336 // When we're loading from the special variable holding the WorkgroupSize
David Neto0a2f98d2017-09-15 19:38:40 -04004337 // builtin value, use an OpBitWiseAnd of the value's ID rather than
4338 // generating a load.
David Neto66cfe642018-03-24 06:13:56 -07004339 // TODO(dneto): Remove this awful hack once drivers are fixed.
David Netoa60b00b2017-09-15 16:34:09 -04004340 if (PointerID == WorkgroupSizeVarID) {
David Neto0a2f98d2017-09-15 19:38:40 -04004341 // Generate a bitwise-and of the original value with itself.
4342 // We should have been able to get away with just an OpCopyObject,
4343 // but we need something more complex to get past certain driver bugs.
4344 // This is ridiculous, but necessary.
4345 // TODO(dneto): Revisit this once drivers fix their bugs.
4346
4347 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04004348 Ops << MkId(ResTyID) << MkId(WorkgroupSizeValueID)
4349 << MkId(WorkgroupSizeValueID);
David Neto0a2f98d2017-09-15 19:38:40 -04004350
David Neto87846742018-04-11 17:36:22 -04004351 auto *Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Neto0a2f98d2017-09-15 19:38:40 -04004352 SPIRVInstList.push_back(Inst);
David Netoa60b00b2017-09-15 16:34:09 -04004353 break;
4354 }
4355
4356 // This is the normal path. Generate a load.
4357
David Neto22f144c2017-06-12 14:26:21 -04004358 // Ops[0] = Result Type ID
4359 // Ops[1] = Pointer ID
4360 // Ops[2] ... Ops[n] = Optional Memory Access
4361 //
4362 // TODO: Do we need to implement Optional Memory Access???
David Neto0a2f98d2017-09-15 19:38:40 -04004363
David Neto22f144c2017-06-12 14:26:21 -04004364 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04004365 Ops << MkId(ResTyID) << MkId(PointerID);
David Neto22f144c2017-06-12 14:26:21 -04004366
David Neto87846742018-04-11 17:36:22 -04004367 auto *Inst = new SPIRVInstruction(spv::OpLoad, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004368 SPIRVInstList.push_back(Inst);
4369 break;
4370 }
4371 case Instruction::Store: {
4372 StoreInst *ST = cast<StoreInst>(&I);
4373 //
4374 // Generate OpStore.
4375 //
4376
alan-baker5b86ed72019-02-15 08:26:50 -05004377 if (ST->getValueOperand()->getType()->isPointerTy()) {
4378 // Storing a pointer requires variable pointers.
4379 setVariablePointersCapabilities(
4380 ST->getValueOperand()->getType()->getPointerAddressSpace());
4381 }
4382
David Neto22f144c2017-06-12 14:26:21 -04004383 // Ops[0] = Pointer ID
4384 // Ops[1] = Object ID
4385 // Ops[2] ... Ops[n] = Optional Memory Access (later???)
4386 //
4387 // TODO: Do we need to implement Optional Memory Access???
David Neto257c3892018-04-11 13:19:45 -04004388 SPIRVOperandList Ops;
4389 Ops << MkId(VMap[ST->getPointerOperand()])
4390 << MkId(VMap[ST->getValueOperand()]);
David Neto22f144c2017-06-12 14:26:21 -04004391
David Neto87846742018-04-11 17:36:22 -04004392 auto *Inst = new SPIRVInstruction(spv::OpStore, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004393 SPIRVInstList.push_back(Inst);
4394 break;
4395 }
4396 case Instruction::AtomicCmpXchg: {
4397 I.print(errs());
4398 llvm_unreachable("Unsupported instruction???");
4399 break;
4400 }
4401 case Instruction::AtomicRMW: {
Neil Henning39672102017-09-29 14:33:13 +01004402 AtomicRMWInst *AtomicRMW = dyn_cast<AtomicRMWInst>(&I);
4403
4404 spv::Op opcode;
4405
4406 switch (AtomicRMW->getOperation()) {
4407 default:
4408 I.print(errs());
4409 llvm_unreachable("Unsupported instruction???");
4410 case llvm::AtomicRMWInst::Add:
4411 opcode = spv::OpAtomicIAdd;
4412 break;
4413 case llvm::AtomicRMWInst::Sub:
4414 opcode = spv::OpAtomicISub;
4415 break;
4416 case llvm::AtomicRMWInst::Xchg:
4417 opcode = spv::OpAtomicExchange;
4418 break;
4419 case llvm::AtomicRMWInst::Min:
4420 opcode = spv::OpAtomicSMin;
4421 break;
4422 case llvm::AtomicRMWInst::Max:
4423 opcode = spv::OpAtomicSMax;
4424 break;
4425 case llvm::AtomicRMWInst::UMin:
4426 opcode = spv::OpAtomicUMin;
4427 break;
4428 case llvm::AtomicRMWInst::UMax:
4429 opcode = spv::OpAtomicUMax;
4430 break;
4431 case llvm::AtomicRMWInst::And:
4432 opcode = spv::OpAtomicAnd;
4433 break;
4434 case llvm::AtomicRMWInst::Or:
4435 opcode = spv::OpAtomicOr;
4436 break;
4437 case llvm::AtomicRMWInst::Xor:
4438 opcode = spv::OpAtomicXor;
4439 break;
4440 }
4441
4442 //
4443 // Generate OpAtomic*.
4444 //
4445 SPIRVOperandList Ops;
4446
David Neto257c3892018-04-11 13:19:45 -04004447 Ops << MkId(lookupType(I.getType()))
4448 << MkId(VMap[AtomicRMW->getPointerOperand()]);
Neil Henning39672102017-09-29 14:33:13 +01004449
4450 auto IntTy = Type::getInt32Ty(I.getContext());
Neil Henning39672102017-09-29 14:33:13 +01004451 const auto ConstantScopeDevice = ConstantInt::get(IntTy, spv::ScopeDevice);
David Neto257c3892018-04-11 13:19:45 -04004452 Ops << MkId(VMap[ConstantScopeDevice]);
Neil Henning39672102017-09-29 14:33:13 +01004453
4454 const auto ConstantMemorySemantics = ConstantInt::get(
4455 IntTy, spv::MemorySemanticsUniformMemoryMask |
4456 spv::MemorySemanticsSequentiallyConsistentMask);
David Neto257c3892018-04-11 13:19:45 -04004457 Ops << MkId(VMap[ConstantMemorySemantics]);
Neil Henning39672102017-09-29 14:33:13 +01004458
David Neto257c3892018-04-11 13:19:45 -04004459 Ops << MkId(VMap[AtomicRMW->getValOperand()]);
Neil Henning39672102017-09-29 14:33:13 +01004460
4461 VMap[&I] = nextID;
4462
David Neto87846742018-04-11 17:36:22 -04004463 auto *Inst = new SPIRVInstruction(opcode, nextID++, Ops);
Neil Henning39672102017-09-29 14:33:13 +01004464 SPIRVInstList.push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04004465 break;
4466 }
4467 case Instruction::Fence: {
4468 I.print(errs());
4469 llvm_unreachable("Unsupported instruction???");
4470 break;
4471 }
4472 case Instruction::Call: {
4473 CallInst *Call = dyn_cast<CallInst>(&I);
4474 Function *Callee = Call->getCalledFunction();
4475
Alan Baker202c8c72018-08-13 13:47:44 -04004476 if (Callee->getName().startswith(clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04004477 if (ResourceVarDeferredLoadCalls.count(Call) && Call->hasNUsesOrMore(1)) {
4478 // Generate an OpLoad
4479 SPIRVOperandList Ops;
4480 const auto load_id = nextID++;
David Neto22f144c2017-06-12 14:26:21 -04004481
David Neto862b7d82018-06-14 18:48:37 -04004482 Ops << MkId(lookupType(Call->getType()->getPointerElementType()))
4483 << MkId(ResourceVarDeferredLoadCalls[Call]);
4484
4485 auto *Inst = new SPIRVInstruction(spv::OpLoad, load_id, Ops);
4486 SPIRVInstList.push_back(Inst);
4487 VMap[Call] = load_id;
4488 break;
4489
4490 } else {
4491 // This maps to an OpVariable we've already generated.
4492 // No code is generated for the call.
4493 }
4494 break;
alan-bakerb6b09dc2018-11-08 16:59:28 -05004495 } else if (Callee->getName().startswith(
4496 clspv::WorkgroupAccessorFunction())) {
Alan Baker202c8c72018-08-13 13:47:44 -04004497 // Don't codegen an instruction here, but instead map this call directly
4498 // to the workgroup variable id.
alan-bakerb6b09dc2018-11-08 16:59:28 -05004499 int spec_id = static_cast<int>(
4500 cast<ConstantInt>(Call->getOperand(0))->getSExtValue());
Alan Baker202c8c72018-08-13 13:47:44 -04004501 const auto &info = LocalSpecIdInfoMap[spec_id];
4502 VMap[Call] = info.variable_id;
4503 break;
David Neto862b7d82018-06-14 18:48:37 -04004504 }
4505
4506 // Sampler initializers become a load of the corresponding sampler.
4507
Kévin Petitdf71de32019-04-09 14:09:50 +01004508 if (Callee->getName().equals(clspv::LiteralSamplerFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04004509 // Map this to a load from the variable.
alan-baker09cb9802019-12-10 13:16:27 -05004510 const auto third_param = static_cast<unsigned>(
4511 dyn_cast<ConstantInt>(Call->getArgOperand(2))->getZExtValue());
4512 auto sampler_value = third_param;
4513 if (clspv::Option::UseSamplerMap()) {
4514 sampler_value = getSamplerMap()[third_param].first;
4515 }
David Neto862b7d82018-06-14 18:48:37 -04004516
4517 // Generate an OpLoad
David Neto22f144c2017-06-12 14:26:21 -04004518 SPIRVOperandList Ops;
David Neto862b7d82018-06-14 18:48:37 -04004519 const auto load_id = nextID++;
David Neto22f144c2017-06-12 14:26:21 -04004520
David Neto257c3892018-04-11 13:19:45 -04004521 Ops << MkId(lookupType(SamplerTy->getPointerElementType()))
alan-baker09cb9802019-12-10 13:16:27 -05004522 << MkId(SamplerLiteralToIDMap[sampler_value]);
David Neto22f144c2017-06-12 14:26:21 -04004523
David Neto862b7d82018-06-14 18:48:37 -04004524 auto *Inst = new SPIRVInstruction(spv::OpLoad, load_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004525 SPIRVInstList.push_back(Inst);
David Neto862b7d82018-06-14 18:48:37 -04004526 VMap[Call] = load_id;
David Neto22f144c2017-06-12 14:26:21 -04004527 break;
4528 }
4529
Kévin Petit349c9502019-03-28 17:24:14 +00004530 // Handle SPIR-V intrinsics
Kévin Petit9b340262019-06-19 18:31:11 +01004531 spv::Op opcode = StringSwitch<spv::Op>(Callee->getName())
4532 .Case("spirv.atomic_xor", spv::OpAtomicXor)
4533 .Default(spv::OpNop);
David Neto22f144c2017-06-12 14:26:21 -04004534
Kévin Petit617a76d2019-04-04 13:54:16 +01004535 // If the switch above didn't have an entry maybe the intrinsic
4536 // is using the name mangling logic.
4537 bool usesMangler = false;
4538 if (opcode == spv::OpNop) {
4539 if (Callee->getName().startswith(clspv::SPIRVOpIntrinsicFunction())) {
4540 auto OpCst = cast<ConstantInt>(Call->getOperand(0));
4541 opcode = static_cast<spv::Op>(OpCst->getZExtValue());
4542 usesMangler = true;
4543 }
4544 }
4545
Kévin Petit349c9502019-03-28 17:24:14 +00004546 if (opcode != spv::OpNop) {
4547
David Neto22f144c2017-06-12 14:26:21 -04004548 SPIRVOperandList Ops;
4549
Kévin Petit349c9502019-03-28 17:24:14 +00004550 if (!I.getType()->isVoidTy()) {
4551 Ops << MkId(lookupType(I.getType()));
4552 }
David Neto22f144c2017-06-12 14:26:21 -04004553
Kévin Petit617a76d2019-04-04 13:54:16 +01004554 unsigned firstOperand = usesMangler ? 1 : 0;
4555 for (unsigned i = firstOperand; i < Call->getNumArgOperands(); i++) {
David Neto257c3892018-04-11 13:19:45 -04004556 Ops << MkId(VMap[Call->getArgOperand(i)]);
David Neto22f144c2017-06-12 14:26:21 -04004557 }
4558
Kévin Petit349c9502019-03-28 17:24:14 +00004559 if (!I.getType()->isVoidTy()) {
4560 VMap[&I] = nextID;
Kévin Petit8a560882019-03-21 15:24:34 +00004561 }
4562
Kévin Petit349c9502019-03-28 17:24:14 +00004563 SPIRVInstruction *Inst;
4564 if (!I.getType()->isVoidTy()) {
4565 Inst = new SPIRVInstruction(opcode, nextID++, Ops);
4566 } else {
4567 Inst = new SPIRVInstruction(opcode, Ops);
4568 }
Kévin Petit8a560882019-03-21 15:24:34 +00004569 SPIRVInstList.push_back(Inst);
4570 break;
4571 }
4572
David Neto22f144c2017-06-12 14:26:21 -04004573 // spirv.copy_memory.* intrinsics become OpMemoryMemory's.
4574 if (Callee->getName().startswith("spirv.copy_memory")) {
4575 //
4576 // Generate OpCopyMemory.
4577 //
4578
4579 // Ops[0] = Dst ID
4580 // Ops[1] = Src ID
4581 // Ops[2] = Memory Access
4582 // Ops[3] = Alignment
4583
4584 auto IsVolatile =
4585 dyn_cast<ConstantInt>(Call->getArgOperand(3))->getZExtValue() != 0;
4586
4587 auto VolatileMemoryAccess = (IsVolatile) ? spv::MemoryAccessVolatileMask
4588 : spv::MemoryAccessMaskNone;
4589
4590 auto MemoryAccess = VolatileMemoryAccess | spv::MemoryAccessAlignedMask;
4591
4592 auto Alignment =
4593 dyn_cast<ConstantInt>(Call->getArgOperand(2))->getZExtValue();
4594
David Neto257c3892018-04-11 13:19:45 -04004595 SPIRVOperandList Ops;
4596 Ops << MkId(VMap[Call->getArgOperand(0)])
4597 << MkId(VMap[Call->getArgOperand(1)]) << MkNum(MemoryAccess)
4598 << MkNum(static_cast<uint32_t>(Alignment));
David Neto22f144c2017-06-12 14:26:21 -04004599
David Neto87846742018-04-11 17:36:22 -04004600 auto *Inst = new SPIRVInstruction(spv::OpCopyMemory, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004601
4602 SPIRVInstList.push_back(Inst);
4603
4604 break;
4605 }
4606
David Neto22f144c2017-06-12 14:26:21 -04004607 // read_image is converted to OpSampledImage and OpImageSampleExplicitLod.
4608 // Additionally, OpTypeSampledImage is generated.
alan-bakerf67468c2019-11-25 15:51:49 -05004609 if (clspv::IsSampledImageRead(Callee)) {
David Neto22f144c2017-06-12 14:26:21 -04004610 //
4611 // Generate OpSampledImage.
4612 //
4613 // Ops[0] = Result Type ID
4614 // Ops[1] = Image ID
4615 // Ops[2] = Sampler ID
4616 //
4617 SPIRVOperandList Ops;
4618
4619 Value *Image = Call->getArgOperand(0);
4620 Value *Sampler = Call->getArgOperand(1);
4621 Value *Coordinate = Call->getArgOperand(2);
4622
4623 TypeMapType &OpImageTypeMap = getImageTypeMap();
4624 Type *ImageTy = Image->getType()->getPointerElementType();
4625 uint32_t ImageTyID = OpImageTypeMap[ImageTy];
David Neto22f144c2017-06-12 14:26:21 -04004626 uint32_t ImageID = VMap[Image];
David Neto22f144c2017-06-12 14:26:21 -04004627 uint32_t SamplerID = VMap[Sampler];
David Neto257c3892018-04-11 13:19:45 -04004628
4629 Ops << MkId(ImageTyID) << MkId(ImageID) << MkId(SamplerID);
David Neto22f144c2017-06-12 14:26:21 -04004630
4631 uint32_t SampledImageID = nextID;
4632
David Neto87846742018-04-11 17:36:22 -04004633 auto *Inst = new SPIRVInstruction(spv::OpSampledImage, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004634 SPIRVInstList.push_back(Inst);
4635
4636 //
4637 // Generate OpImageSampleExplicitLod.
4638 //
4639 // Ops[0] = Result Type ID
4640 // Ops[1] = Sampled Image ID
4641 // Ops[2] = Coordinate ID
4642 // Ops[3] = Image Operands Type ID
4643 // Ops[4] ... Ops[n] = Operands ID
4644 //
4645 Ops.clear();
4646
alan-bakerf67468c2019-11-25 15:51:49 -05004647 const bool is_int_image = IsIntImageType(Image->getType());
4648 uint32_t result_type = 0;
4649 if (is_int_image) {
4650 result_type = v4int32ID;
4651 } else {
4652 result_type = lookupType(Call->getType());
4653 }
4654
4655 Ops << MkId(result_type) << MkId(SampledImageID) << MkId(VMap[Coordinate])
4656 << MkNum(spv::ImageOperandsLodMask);
David Neto22f144c2017-06-12 14:26:21 -04004657
4658 Constant *CstFP0 = ConstantFP::get(Context, APFloat(0.0f));
David Neto257c3892018-04-11 13:19:45 -04004659 Ops << MkId(VMap[CstFP0]);
David Neto22f144c2017-06-12 14:26:21 -04004660
alan-bakerf67468c2019-11-25 15:51:49 -05004661 uint32_t final_id = nextID++;
4662 VMap[&I] = final_id;
David Neto22f144c2017-06-12 14:26:21 -04004663
alan-bakerf67468c2019-11-25 15:51:49 -05004664 uint32_t image_id = final_id;
4665 if (is_int_image) {
4666 // Int image requires a bitcast from v4int to v4uint.
4667 image_id = nextID++;
4668 }
4669
4670 Inst = new SPIRVInstruction(spv::OpImageSampleExplicitLod, image_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004671 SPIRVInstList.push_back(Inst);
alan-bakerf67468c2019-11-25 15:51:49 -05004672
4673 if (is_int_image) {
4674 // Generate the bitcast.
4675 Ops.clear();
4676 Ops << MkId(lookupType(Call->getType())) << MkId(image_id);
4677 Inst = new SPIRVInstruction(spv::OpBitcast, final_id, Ops);
4678 SPIRVInstList.push_back(Inst);
4679 }
David Neto22f144c2017-06-12 14:26:21 -04004680 break;
4681 }
4682
alan-bakerf67468c2019-11-25 15:51:49 -05004683 // write_image is mapped to OpImageWrite.
4684 if (clspv::IsImageWrite(Callee)) {
David Neto22f144c2017-06-12 14:26:21 -04004685 //
4686 // Generate OpImageWrite.
4687 //
4688 // Ops[0] = Image ID
4689 // Ops[1] = Coordinate ID
4690 // Ops[2] = Texel ID
4691 // Ops[3] = (Optional) Image Operands Type (Literal Number)
4692 // Ops[4] ... Ops[n] = (Optional) Operands ID
4693 //
4694 SPIRVOperandList Ops;
4695
4696 Value *Image = Call->getArgOperand(0);
4697 Value *Coordinate = Call->getArgOperand(1);
4698 Value *Texel = Call->getArgOperand(2);
4699
4700 uint32_t ImageID = VMap[Image];
David Neto22f144c2017-06-12 14:26:21 -04004701 uint32_t CoordinateID = VMap[Coordinate];
David Neto22f144c2017-06-12 14:26:21 -04004702 uint32_t TexelID = VMap[Texel];
alan-bakerf67468c2019-11-25 15:51:49 -05004703
4704 const bool is_int_image = IsIntImageType(Image->getType());
4705 if (is_int_image) {
4706 // Generate a bitcast to v4int and use it as the texel value.
4707 uint32_t castID = nextID++;
4708 Ops << MkId(v4int32ID) << MkId(TexelID);
4709 auto cast = new SPIRVInstruction(spv::OpBitcast, castID, Ops);
4710 SPIRVInstList.push_back(cast);
4711 Ops.clear();
4712 TexelID = castID;
4713 }
David Neto257c3892018-04-11 13:19:45 -04004714 Ops << MkId(ImageID) << MkId(CoordinateID) << MkId(TexelID);
David Neto22f144c2017-06-12 14:26:21 -04004715
David Neto87846742018-04-11 17:36:22 -04004716 auto *Inst = new SPIRVInstruction(spv::OpImageWrite, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004717 SPIRVInstList.push_back(Inst);
4718 break;
4719 }
4720
alan-bakerce179f12019-12-06 19:02:22 -05004721 // get_image_* is mapped to OpImageQuerySize or OpImageQuerySizeLod
4722 if (clspv::IsImageQuery(Callee)) {
David Neto5c22a252018-03-15 16:07:41 -04004723 //
alan-bakerce179f12019-12-06 19:02:22 -05004724 // Generate OpImageQuerySize[Lod]
David Neto5c22a252018-03-15 16:07:41 -04004725 //
4726 // Ops[0] = Image ID
4727 //
alan-bakerce179f12019-12-06 19:02:22 -05004728 // Result type has components equal to the dimensionality of the image,
4729 // plus 1 if the image is arrayed.
4730 //
alan-bakerf906d2b2019-12-10 11:26:23 -05004731 // %sizes = OpImageQuerySize[Lod] %uint[2|3|4] %im [%uint_0]
David Neto5c22a252018-03-15 16:07:41 -04004732 SPIRVOperandList Ops;
4733
4734 // Implement:
alan-bakerce179f12019-12-06 19:02:22 -05004735 // %sizes = OpImageQuerySize[Lod] %uint[2|3|4] %im [%uint_0]
4736 uint32_t SizesTypeID = 0;
4737
David Neto5c22a252018-03-15 16:07:41 -04004738 Value *Image = Call->getArgOperand(0);
alan-bakerce179f12019-12-06 19:02:22 -05004739 const uint32_t dim = ImageDimensionality(Image->getType());
alan-bakerf906d2b2019-12-10 11:26:23 -05004740 // TODO(alan-baker): fix component calculation when arrayed images are
4741 // supported.
alan-bakerce179f12019-12-06 19:02:22 -05004742 const uint32_t components = dim;
4743 if (components == 1) {
alan-bakerce179f12019-12-06 19:02:22 -05004744 SizesTypeID = TypeMap[Type::getInt32Ty(Context)];
4745 } else {
4746 SizesTypeID = TypeMap[VectorType::get(Type::getInt32Ty(Context), dim)];
4747 }
David Neto5c22a252018-03-15 16:07:41 -04004748 uint32_t ImageID = VMap[Image];
David Neto257c3892018-04-11 13:19:45 -04004749 Ops << MkId(SizesTypeID) << MkId(ImageID);
alan-bakerce179f12019-12-06 19:02:22 -05004750 spv::Op query_opcode = spv::OpImageQuerySize;
4751 if (clspv::IsSampledImageType(Image->getType())) {
4752 query_opcode = spv::OpImageQuerySizeLod;
4753 // Need explicit 0 for Lod operand.
4754 Constant *CstInt0 = ConstantInt::get(Context, APInt(32, 0));
4755 Ops << MkId(VMap[CstInt0]);
4756 }
David Neto5c22a252018-03-15 16:07:41 -04004757
4758 uint32_t SizesID = nextID++;
alan-bakerce179f12019-12-06 19:02:22 -05004759 auto *QueryInst = new SPIRVInstruction(query_opcode, SizesID, Ops);
David Neto5c22a252018-03-15 16:07:41 -04004760 SPIRVInstList.push_back(QueryInst);
4761
alan-bakerce179f12019-12-06 19:02:22 -05004762 // May require an extra instruction to create the appropriate result of
4763 // the builtin function.
4764 if (clspv::IsGetImageDim(Callee)) {
4765 if (dim == 3) {
4766 // get_image_dim returns an int4 for 3D images.
4767 //
4768 // Reset value map entry since we generated an intermediate
4769 // instruction.
4770 VMap[&I] = nextID;
David Neto5c22a252018-03-15 16:07:41 -04004771
alan-bakerce179f12019-12-06 19:02:22 -05004772 // Implement:
4773 // %result = OpCompositeConstruct %uint4 %sizes %uint_0
4774 Ops.clear();
4775 Ops << MkId(lookupType(VectorType::get(Type::getInt32Ty(Context), 4)))
4776 << MkId(SizesID);
David Neto5c22a252018-03-15 16:07:41 -04004777
alan-bakerce179f12019-12-06 19:02:22 -05004778 Constant *CstInt0 = ConstantInt::get(Context, APInt(32, 0));
4779 Ops << MkId(VMap[CstInt0]);
David Neto5c22a252018-03-15 16:07:41 -04004780
alan-bakerce179f12019-12-06 19:02:22 -05004781 auto *Inst =
4782 new SPIRVInstruction(spv::OpCompositeConstruct, nextID++, Ops);
4783 SPIRVInstList.push_back(Inst);
4784 } else if (dim != components) {
4785 // get_image_dim return an int2 regardless of the arrayedness of the
4786 // image. If the image is arrayed an element must be dropped from the
4787 // query result.
4788 //
4789 // Reset value map entry since we generated an intermediate
4790 // instruction.
4791 VMap[&I] = nextID;
4792
4793 // Implement:
4794 // %result = OpVectorShuffle %uint2 %sizes %sizes 0 1
4795 Ops.clear();
4796 Ops << MkId(lookupType(VectorType::get(Type::getInt32Ty(Context), 2)))
4797 << MkId(SizesID) << MkId(SizesID) << MkNum(0) << MkNum(1);
4798
4799 auto *Inst =
4800 new SPIRVInstruction(spv::OpVectorShuffle, nextID++, Ops);
4801 SPIRVInstList.push_back(Inst);
4802 }
4803 } else if (components > 1) {
4804 // Reset value map entry since we generated an intermediate instruction.
4805 VMap[&I] = nextID;
4806
4807 // Implement:
4808 // %result = OpCompositeExtract %uint %sizes <component number>
4809 Ops.clear();
4810 Ops << MkId(TypeMap[I.getType()]) << MkId(SizesID);
4811
4812 uint32_t component = 0;
4813 if (IsGetImageHeight(Callee))
4814 component = 1;
4815 else if (IsGetImageDepth(Callee))
4816 component = 2;
4817 Ops << MkNum(component);
4818
4819 auto *Inst =
4820 new SPIRVInstruction(spv::OpCompositeExtract, nextID++, Ops);
4821 SPIRVInstList.push_back(Inst);
4822 }
David Neto5c22a252018-03-15 16:07:41 -04004823 break;
4824 }
4825
David Neto22f144c2017-06-12 14:26:21 -04004826 // Call instrucion is deferred because it needs function's ID. Record
4827 // slot's location on SPIRVInstructionList.
4828 DeferredInsts.push_back(
4829 std::make_tuple(&I, --SPIRVInstList.end(), nextID++));
4830
David Neto3fbb4072017-10-16 11:28:14 -04004831 // Check whether the implementation of this call uses an extended
4832 // instruction plus one more value-producing instruction. If so, then
4833 // reserve the id for the extra value-producing slot.
4834 glsl::ExtInst EInst = getIndirectExtInstEnum(Callee->getName());
4835 if (EInst != kGlslExtInstBad) {
4836 // Reserve a spot for the extra value.
David Neto4d02a532017-09-17 12:57:44 -04004837 // Increase nextID.
David Neto22f144c2017-06-12 14:26:21 -04004838 VMap[&I] = nextID;
4839 nextID++;
4840 }
4841 break;
4842 }
4843 case Instruction::Ret: {
4844 unsigned NumOps = I.getNumOperands();
4845 if (NumOps == 0) {
4846 //
4847 // Generate OpReturn.
4848 //
David Neto87846742018-04-11 17:36:22 -04004849 SPIRVInstList.push_back(new SPIRVInstruction(spv::OpReturn, {}));
David Neto22f144c2017-06-12 14:26:21 -04004850 } else {
4851 //
4852 // Generate OpReturnValue.
4853 //
4854
4855 // Ops[0] = Return Value ID
4856 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04004857
4858 Ops << MkId(VMap[I.getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04004859
David Neto87846742018-04-11 17:36:22 -04004860 auto *Inst = new SPIRVInstruction(spv::OpReturnValue, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004861 SPIRVInstList.push_back(Inst);
4862 break;
4863 }
4864 break;
4865 }
4866 }
4867}
4868
4869void SPIRVProducerPass::GenerateFuncEpilogue() {
4870 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
4871
4872 //
4873 // Generate OpFunctionEnd
4874 //
4875
David Neto87846742018-04-11 17:36:22 -04004876 auto *Inst = new SPIRVInstruction(spv::OpFunctionEnd, {});
David Neto22f144c2017-06-12 14:26:21 -04004877 SPIRVInstList.push_back(Inst);
4878}
4879
4880bool SPIRVProducerPass::is4xi8vec(Type *Ty) const {
alan-bakerb39c8262019-03-08 14:03:37 -05004881 // Don't specialize <4 x i8> if i8 is generally supported.
4882 if (clspv::Option::Int8Support())
4883 return false;
4884
David Neto22f144c2017-06-12 14:26:21 -04004885 LLVMContext &Context = Ty->getContext();
4886 if (Ty->isVectorTy()) {
4887 if (Ty->getVectorElementType() == Type::getInt8Ty(Context) &&
4888 Ty->getVectorNumElements() == 4) {
4889 return true;
4890 }
4891 }
4892
4893 return false;
4894}
4895
4896void SPIRVProducerPass::HandleDeferredInstruction() {
4897 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
4898 ValueMapType &VMap = getValueMap();
4899 DeferredInstVecType &DeferredInsts = getDeferredInstVec();
4900
4901 for (auto DeferredInst = DeferredInsts.rbegin();
4902 DeferredInst != DeferredInsts.rend(); ++DeferredInst) {
4903 Value *Inst = std::get<0>(*DeferredInst);
4904 SPIRVInstructionList::iterator InsertPoint = ++std::get<1>(*DeferredInst);
4905 if (InsertPoint != SPIRVInstList.end()) {
4906 while ((*InsertPoint)->getOpcode() == spv::OpPhi) {
4907 ++InsertPoint;
4908 }
4909 }
4910
4911 if (BranchInst *Br = dyn_cast<BranchInst>(Inst)) {
alan-baker06cad652019-12-03 17:56:47 -05004912 // Check whether this branch needs to be preceeded by merge instruction.
David Neto22f144c2017-06-12 14:26:21 -04004913 BasicBlock *BrBB = Br->getParent();
alan-baker06cad652019-12-03 17:56:47 -05004914 if (ContinueBlocks.count(BrBB)) {
David Neto22f144c2017-06-12 14:26:21 -04004915 //
4916 // Generate OpLoopMerge.
4917 //
4918 // Ops[0] = Merge Block ID
4919 // Ops[1] = Continue Target ID
4920 // Ops[2] = Selection Control
4921 SPIRVOperandList Ops;
4922
alan-baker06cad652019-12-03 17:56:47 -05004923 auto MergeBB = MergeBlocks[BrBB];
4924 auto ContinueBB = ContinueBlocks[BrBB];
David Neto22f144c2017-06-12 14:26:21 -04004925 uint32_t MergeBBID = VMap[MergeBB];
David Neto22f144c2017-06-12 14:26:21 -04004926 uint32_t ContinueBBID = VMap[ContinueBB];
David Neto257c3892018-04-11 13:19:45 -04004927 Ops << MkId(MergeBBID) << MkId(ContinueBBID)
alan-baker06cad652019-12-03 17:56:47 -05004928 << MkNum(spv::LoopControlMaskNone);
David Neto22f144c2017-06-12 14:26:21 -04004929
David Neto87846742018-04-11 17:36:22 -04004930 auto *MergeInst = new SPIRVInstruction(spv::OpLoopMerge, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004931 SPIRVInstList.insert(InsertPoint, MergeInst);
alan-baker06cad652019-12-03 17:56:47 -05004932 } else if (MergeBlocks.count(BrBB)) {
4933 //
4934 // Generate OpSelectionMerge.
4935 //
4936 // Ops[0] = Merge Block ID
4937 // Ops[1] = Selection Control
4938 SPIRVOperandList Ops;
David Neto22f144c2017-06-12 14:26:21 -04004939
alan-baker06cad652019-12-03 17:56:47 -05004940 auto MergeBB = MergeBlocks[BrBB];
4941 uint32_t MergeBBID = VMap[MergeBB];
4942 Ops << MkId(MergeBBID) << MkNum(spv::SelectionControlMaskNone);
David Neto22f144c2017-06-12 14:26:21 -04004943
alan-baker06cad652019-12-03 17:56:47 -05004944 auto *MergeInst = new SPIRVInstruction(spv::OpSelectionMerge, Ops);
4945 SPIRVInstList.insert(InsertPoint, MergeInst);
David Neto22f144c2017-06-12 14:26:21 -04004946 }
4947
4948 if (Br->isConditional()) {
4949 //
4950 // Generate OpBranchConditional.
4951 //
4952 // Ops[0] = Condition ID
4953 // Ops[1] = True Label ID
4954 // Ops[2] = False Label ID
4955 // Ops[3] ... Ops[n] = Branch weights (Literal Number)
4956 SPIRVOperandList Ops;
4957
4958 uint32_t CondID = VMap[Br->getCondition()];
David Neto22f144c2017-06-12 14:26:21 -04004959 uint32_t TrueBBID = VMap[Br->getSuccessor(0)];
David Neto22f144c2017-06-12 14:26:21 -04004960 uint32_t FalseBBID = VMap[Br->getSuccessor(1)];
David Neto257c3892018-04-11 13:19:45 -04004961
4962 Ops << MkId(CondID) << MkId(TrueBBID) << MkId(FalseBBID);
David Neto22f144c2017-06-12 14:26:21 -04004963
David Neto87846742018-04-11 17:36:22 -04004964 auto *BrInst = new SPIRVInstruction(spv::OpBranchConditional, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004965 SPIRVInstList.insert(InsertPoint, BrInst);
4966 } else {
4967 //
4968 // Generate OpBranch.
4969 //
4970 // Ops[0] = Target Label ID
4971 SPIRVOperandList Ops;
4972
4973 uint32_t TargetID = VMap[Br->getSuccessor(0)];
David Neto257c3892018-04-11 13:19:45 -04004974 Ops << MkId(TargetID);
David Neto22f144c2017-06-12 14:26:21 -04004975
David Neto87846742018-04-11 17:36:22 -04004976 SPIRVInstList.insert(InsertPoint,
4977 new SPIRVInstruction(spv::OpBranch, Ops));
David Neto22f144c2017-06-12 14:26:21 -04004978 }
4979 } else if (PHINode *PHI = dyn_cast<PHINode>(Inst)) {
alan-baker5b86ed72019-02-15 08:26:50 -05004980 if (PHI->getType()->isPointerTy()) {
4981 // OpPhi on pointers requires variable pointers.
4982 setVariablePointersCapabilities(
4983 PHI->getType()->getPointerAddressSpace());
4984 if (!hasVariablePointers() && !selectFromSameObject(PHI)) {
4985 setVariablePointers(true);
4986 }
4987 }
4988
David Neto22f144c2017-06-12 14:26:21 -04004989 //
4990 // Generate OpPhi.
4991 //
4992 // Ops[0] = Result Type ID
4993 // Ops[1] ... Ops[n] = (Variable ID, Parent ID) pairs
4994 SPIRVOperandList Ops;
4995
David Neto257c3892018-04-11 13:19:45 -04004996 Ops << MkId(lookupType(PHI->getType()));
David Neto22f144c2017-06-12 14:26:21 -04004997
David Neto22f144c2017-06-12 14:26:21 -04004998 for (unsigned i = 0; i < PHI->getNumIncomingValues(); i++) {
4999 uint32_t VarID = VMap[PHI->getIncomingValue(i)];
David Neto22f144c2017-06-12 14:26:21 -04005000 uint32_t ParentID = VMap[PHI->getIncomingBlock(i)];
David Neto257c3892018-04-11 13:19:45 -04005001 Ops << MkId(VarID) << MkId(ParentID);
David Neto22f144c2017-06-12 14:26:21 -04005002 }
5003
5004 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005005 InsertPoint,
5006 new SPIRVInstruction(spv::OpPhi, std::get<2>(*DeferredInst), Ops));
David Neto22f144c2017-06-12 14:26:21 -04005007 } else if (CallInst *Call = dyn_cast<CallInst>(Inst)) {
5008 Function *Callee = Call->getCalledFunction();
David Neto3fbb4072017-10-16 11:28:14 -04005009 auto callee_name = Callee->getName();
5010 glsl::ExtInst EInst = getDirectOrIndirectExtInstEnum(callee_name);
David Neto22f144c2017-06-12 14:26:21 -04005011
5012 if (EInst) {
5013 uint32_t &ExtInstImportID = getOpExtInstImportID();
5014
5015 //
5016 // Generate OpExtInst.
5017 //
5018
5019 // Ops[0] = Result Type ID
5020 // Ops[1] = Set ID (OpExtInstImport ID)
5021 // Ops[2] = Instruction Number (Literal Number)
5022 // Ops[3] ... Ops[n] = Operand 1, ... , Operand n
5023 SPIRVOperandList Ops;
5024
David Neto862b7d82018-06-14 18:48:37 -04005025 Ops << MkId(lookupType(Call->getType())) << MkId(ExtInstImportID)
5026 << MkNum(EInst);
David Neto22f144c2017-06-12 14:26:21 -04005027
David Neto22f144c2017-06-12 14:26:21 -04005028 FunctionType *CalleeFTy = cast<FunctionType>(Call->getFunctionType());
5029 for (unsigned i = 0; i < CalleeFTy->getNumParams(); i++) {
David Neto257c3892018-04-11 13:19:45 -04005030 Ops << MkId(VMap[Call->getOperand(i)]);
David Neto22f144c2017-06-12 14:26:21 -04005031 }
5032
David Neto87846742018-04-11 17:36:22 -04005033 auto *ExtInst = new SPIRVInstruction(spv::OpExtInst,
5034 std::get<2>(*DeferredInst), Ops);
David Neto22f144c2017-06-12 14:26:21 -04005035 SPIRVInstList.insert(InsertPoint, ExtInst);
5036
David Neto3fbb4072017-10-16 11:28:14 -04005037 const auto IndirectExtInst = getIndirectExtInstEnum(callee_name);
5038 if (IndirectExtInst != kGlslExtInstBad) {
5039 // Generate one more instruction that uses the result of the extended
5040 // instruction. Its result id is one more than the id of the
5041 // extended instruction.
David Neto22f144c2017-06-12 14:26:21 -04005042 LLVMContext &Context =
5043 Call->getParent()->getParent()->getParent()->getContext();
David Neto22f144c2017-06-12 14:26:21 -04005044
David Neto3fbb4072017-10-16 11:28:14 -04005045 auto generate_extra_inst = [this, &Context, &Call, &DeferredInst,
5046 &VMap, &SPIRVInstList, &InsertPoint](
5047 spv::Op opcode, Constant *constant) {
5048 //
5049 // Generate instruction like:
5050 // result = opcode constant <extinst-result>
5051 //
5052 // Ops[0] = Result Type ID
5053 // Ops[1] = Operand 0 ;; the constant, suitably splatted
5054 // Ops[2] = Operand 1 ;; the result of the extended instruction
5055 SPIRVOperandList Ops;
David Neto22f144c2017-06-12 14:26:21 -04005056
David Neto3fbb4072017-10-16 11:28:14 -04005057 Type *resultTy = Call->getType();
David Neto257c3892018-04-11 13:19:45 -04005058 Ops << MkId(lookupType(resultTy));
David Neto3fbb4072017-10-16 11:28:14 -04005059
5060 if (auto *vectorTy = dyn_cast<VectorType>(resultTy)) {
5061 constant = ConstantVector::getSplat(
5062 static_cast<unsigned>(vectorTy->getNumElements()), constant);
5063 }
David Neto257c3892018-04-11 13:19:45 -04005064 Ops << MkId(VMap[constant]) << MkId(std::get<2>(*DeferredInst));
David Neto3fbb4072017-10-16 11:28:14 -04005065
5066 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005067 InsertPoint, new SPIRVInstruction(
5068 opcode, std::get<2>(*DeferredInst) + 1, Ops));
David Neto3fbb4072017-10-16 11:28:14 -04005069 };
5070
5071 switch (IndirectExtInst) {
5072 case glsl::ExtInstFindUMsb: // Implementing clz
5073 generate_extra_inst(
5074 spv::OpISub, ConstantInt::get(Type::getInt32Ty(Context), 31));
5075 break;
5076 case glsl::ExtInstAcos: // Implementing acospi
5077 case glsl::ExtInstAsin: // Implementing asinpi
Kévin Petiteb9f90a2018-09-29 12:29:34 +01005078 case glsl::ExtInstAtan: // Implementing atanpi
David Neto3fbb4072017-10-16 11:28:14 -04005079 case glsl::ExtInstAtan2: // Implementing atan2pi
5080 generate_extra_inst(
5081 spv::OpFMul,
5082 ConstantFP::get(Type::getFloatTy(Context), kOneOverPi));
5083 break;
5084
5085 default:
5086 assert(false && "internally inconsistent");
David Neto4d02a532017-09-17 12:57:44 -04005087 }
David Neto22f144c2017-06-12 14:26:21 -04005088 }
David Neto3fbb4072017-10-16 11:28:14 -04005089
alan-bakerb39c8262019-03-08 14:03:37 -05005090 } else if (callee_name.startswith("_Z8popcount")) {
David Neto22f144c2017-06-12 14:26:21 -04005091 //
5092 // Generate OpBitCount
5093 //
5094 // Ops[0] = Result Type ID
5095 // Ops[1] = Base ID
David Neto257c3892018-04-11 13:19:45 -04005096 SPIRVOperandList Ops;
5097 Ops << MkId(lookupType(Call->getType()))
5098 << MkId(VMap[Call->getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04005099
5100 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005101 InsertPoint, new SPIRVInstruction(spv::OpBitCount,
David Neto22f144c2017-06-12 14:26:21 -04005102 std::get<2>(*DeferredInst), Ops));
David Netoab03f432017-11-03 17:00:44 -04005103
David Neto862b7d82018-06-14 18:48:37 -04005104 } else if (callee_name.startswith(kCompositeConstructFunctionPrefix)) {
David Netoab03f432017-11-03 17:00:44 -04005105
5106 // Generate an OpCompositeConstruct
5107 SPIRVOperandList Ops;
5108
5109 // The result type.
David Neto257c3892018-04-11 13:19:45 -04005110 Ops << MkId(lookupType(Call->getType()));
David Netoab03f432017-11-03 17:00:44 -04005111
5112 for (Use &use : Call->arg_operands()) {
David Neto257c3892018-04-11 13:19:45 -04005113 Ops << MkId(VMap[use.get()]);
David Netoab03f432017-11-03 17:00:44 -04005114 }
5115
5116 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005117 InsertPoint, new SPIRVInstruction(spv::OpCompositeConstruct,
5118 std::get<2>(*DeferredInst), Ops));
David Netoab03f432017-11-03 17:00:44 -04005119
Alan Baker202c8c72018-08-13 13:47:44 -04005120 } else if (callee_name.startswith(clspv::ResourceAccessorFunction())) {
5121
5122 // We have already mapped the call's result value to an ID.
5123 // Don't generate any code now.
5124
5125 } else if (callee_name.startswith(clspv::WorkgroupAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04005126
5127 // We have already mapped the call's result value to an ID.
5128 // Don't generate any code now.
5129
David Neto22f144c2017-06-12 14:26:21 -04005130 } else {
alan-baker5b86ed72019-02-15 08:26:50 -05005131 if (Call->getType()->isPointerTy()) {
5132 // Functions returning pointers require variable pointers.
5133 setVariablePointersCapabilities(
5134 Call->getType()->getPointerAddressSpace());
5135 }
5136
David Neto22f144c2017-06-12 14:26:21 -04005137 //
5138 // Generate OpFunctionCall.
5139 //
5140
5141 // Ops[0] = Result Type ID
5142 // Ops[1] = Callee Function ID
5143 // Ops[2] ... Ops[n] = Argument 0, ... , Argument n
5144 SPIRVOperandList Ops;
5145
David Neto862b7d82018-06-14 18:48:37 -04005146 Ops << MkId(lookupType(Call->getType()));
David Neto22f144c2017-06-12 14:26:21 -04005147
5148 uint32_t CalleeID = VMap[Callee];
David Neto43568eb2017-10-13 18:25:25 -04005149 if (CalleeID == 0) {
5150 errs() << "Can't translate function call. Missing builtin? "
David Neto862b7d82018-06-14 18:48:37 -04005151 << callee_name << " in: " << *Call << "\n";
David Neto43568eb2017-10-13 18:25:25 -04005152 // TODO(dneto): Can we error out? Enabling this llvm_unreachable
5153 // causes an infinite loop. Instead, go ahead and generate
5154 // the bad function call. A validator will catch the 0-Id.
5155 // llvm_unreachable("Can't translate function call");
5156 }
David Neto22f144c2017-06-12 14:26:21 -04005157
David Neto257c3892018-04-11 13:19:45 -04005158 Ops << MkId(CalleeID);
David Neto22f144c2017-06-12 14:26:21 -04005159
David Neto22f144c2017-06-12 14:26:21 -04005160 FunctionType *CalleeFTy = cast<FunctionType>(Call->getFunctionType());
5161 for (unsigned i = 0; i < CalleeFTy->getNumParams(); i++) {
alan-baker5b86ed72019-02-15 08:26:50 -05005162 auto *operand = Call->getOperand(i);
alan-bakerd4d50652019-12-03 17:17:15 -05005163 auto *operand_type = operand->getType();
5164 // Images and samplers can be passed as function parameters without
5165 // variable pointers.
5166 if (operand_type->isPointerTy() && !IsImageType(operand_type) &&
5167 !IsSamplerType(operand_type)) {
alan-baker5b86ed72019-02-15 08:26:50 -05005168 auto sc =
5169 GetStorageClass(operand->getType()->getPointerAddressSpace());
5170 if (sc == spv::StorageClassStorageBuffer) {
5171 // Passing SSBO by reference requires variable pointers storage
5172 // buffer.
5173 setVariablePointersStorageBuffer(true);
5174 } else if (sc == spv::StorageClassWorkgroup) {
5175 // Workgroup references require variable pointers if they are not
5176 // memory object declarations.
5177 if (auto *operand_call = dyn_cast<CallInst>(operand)) {
5178 // Workgroup accessor represents a variable reference.
5179 if (!operand_call->getCalledFunction()->getName().startswith(
5180 clspv::WorkgroupAccessorFunction()))
5181 setVariablePointers(true);
5182 } else {
5183 // Arguments are function parameters.
5184 if (!isa<Argument>(operand))
5185 setVariablePointers(true);
5186 }
5187 }
5188 }
5189 Ops << MkId(VMap[operand]);
David Neto22f144c2017-06-12 14:26:21 -04005190 }
5191
David Neto87846742018-04-11 17:36:22 -04005192 auto *CallInst = new SPIRVInstruction(spv::OpFunctionCall,
5193 std::get<2>(*DeferredInst), Ops);
David Neto22f144c2017-06-12 14:26:21 -04005194 SPIRVInstList.insert(InsertPoint, CallInst);
5195 }
5196 }
5197 }
5198}
5199
David Neto1a1a0582017-07-07 12:01:44 -04005200void SPIRVProducerPass::HandleDeferredDecorations(const DataLayout &DL) {
Alan Baker202c8c72018-08-13 13:47:44 -04005201 if (getTypesNeedingArrayStride().empty() && LocalArgSpecIds.empty()) {
David Neto1a1a0582017-07-07 12:01:44 -04005202 return;
David Netoc6f3ab22018-04-06 18:02:31 -04005203 }
David Neto1a1a0582017-07-07 12:01:44 -04005204
5205 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
David Neto1a1a0582017-07-07 12:01:44 -04005206
5207 // Find an iterator pointing just past the last decoration.
5208 bool seen_decorations = false;
5209 auto DecoInsertPoint =
5210 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
5211 [&seen_decorations](SPIRVInstruction *Inst) -> bool {
5212 const bool is_decoration =
5213 Inst->getOpcode() == spv::OpDecorate ||
5214 Inst->getOpcode() == spv::OpMemberDecorate;
5215 if (is_decoration) {
5216 seen_decorations = true;
5217 return false;
5218 } else {
5219 return seen_decorations;
5220 }
5221 });
5222
David Netoc6f3ab22018-04-06 18:02:31 -04005223 // Insert ArrayStride decorations on pointer types, due to OpPtrAccessChain
5224 // instructions we generated earlier.
David Neto85082642018-03-24 06:55:20 -07005225 for (auto *type : getTypesNeedingArrayStride()) {
5226 Type *elemTy = nullptr;
5227 if (auto *ptrTy = dyn_cast<PointerType>(type)) {
5228 elemTy = ptrTy->getElementType();
alan-bakerb6b09dc2018-11-08 16:59:28 -05005229 } else if (auto *arrayTy = dyn_cast<ArrayType>(type)) {
David Neto85082642018-03-24 06:55:20 -07005230 elemTy = arrayTy->getArrayElementType();
alan-bakerb6b09dc2018-11-08 16:59:28 -05005231 } else if (auto *seqTy = dyn_cast<SequentialType>(type)) {
David Neto85082642018-03-24 06:55:20 -07005232 elemTy = seqTy->getSequentialElementType();
5233 } else {
5234 errs() << "Unhandled strided type " << *type << "\n";
5235 llvm_unreachable("Unhandled strided type");
5236 }
David Neto1a1a0582017-07-07 12:01:44 -04005237
5238 // Ops[0] = Target ID
5239 // Ops[1] = Decoration (ArrayStride)
5240 // Ops[2] = Stride number (Literal Number)
5241 SPIRVOperandList Ops;
5242
David Neto85082642018-03-24 06:55:20 -07005243 // Same as DL.getIndexedOffsetInType( elemTy, { 1 } );
Alan Bakerfcda9482018-10-02 17:09:59 -04005244 const uint32_t stride = static_cast<uint32_t>(GetTypeAllocSize(elemTy, DL));
David Neto257c3892018-04-11 13:19:45 -04005245
5246 Ops << MkId(lookupType(type)) << MkNum(spv::DecorationArrayStride)
5247 << MkNum(stride);
David Neto1a1a0582017-07-07 12:01:44 -04005248
David Neto87846742018-04-11 17:36:22 -04005249 auto *DecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
David Neto1a1a0582017-07-07 12:01:44 -04005250 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
5251 }
David Netoc6f3ab22018-04-06 18:02:31 -04005252
5253 // Emit SpecId decorations targeting the array size value.
Alan Baker202c8c72018-08-13 13:47:44 -04005254 for (auto spec_id = clspv::FirstLocalSpecId(); spec_id < max_local_spec_id_;
5255 ++spec_id) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05005256 LocalArgInfo &arg_info = LocalSpecIdInfoMap[spec_id];
David Netoc6f3ab22018-04-06 18:02:31 -04005257 SPIRVOperandList Ops;
5258 Ops << MkId(arg_info.array_size_id) << MkNum(spv::DecorationSpecId)
5259 << MkNum(arg_info.spec_id);
5260 SPIRVInstList.insert(DecoInsertPoint,
David Neto87846742018-04-11 17:36:22 -04005261 new SPIRVInstruction(spv::OpDecorate, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04005262 }
David Neto1a1a0582017-07-07 12:01:44 -04005263}
5264
David Neto22f144c2017-06-12 14:26:21 -04005265glsl::ExtInst SPIRVProducerPass::getExtInstEnum(StringRef Name) {
5266 return StringSwitch<glsl::ExtInst>(Name)
alan-bakerb39c8262019-03-08 14:03:37 -05005267 .Case("_Z3absc", glsl::ExtInst::ExtInstSAbs)
5268 .Case("_Z3absDv2_c", glsl::ExtInst::ExtInstSAbs)
5269 .Case("_Z3absDv3_c", glsl::ExtInst::ExtInstSAbs)
5270 .Case("_Z3absDv4_c", glsl::ExtInst::ExtInstSAbs)
Kévin Petit2444e9b2018-11-09 14:14:37 +00005271 .Case("_Z3abss", glsl::ExtInst::ExtInstSAbs)
5272 .Case("_Z3absDv2_s", glsl::ExtInst::ExtInstSAbs)
5273 .Case("_Z3absDv3_s", glsl::ExtInst::ExtInstSAbs)
5274 .Case("_Z3absDv4_s", glsl::ExtInst::ExtInstSAbs)
David Neto22f144c2017-06-12 14:26:21 -04005275 .Case("_Z3absi", glsl::ExtInst::ExtInstSAbs)
5276 .Case("_Z3absDv2_i", glsl::ExtInst::ExtInstSAbs)
5277 .Case("_Z3absDv3_i", glsl::ExtInst::ExtInstSAbs)
5278 .Case("_Z3absDv4_i", glsl::ExtInst::ExtInstSAbs)
Kévin Petit2444e9b2018-11-09 14:14:37 +00005279 .Case("_Z3absl", glsl::ExtInst::ExtInstSAbs)
5280 .Case("_Z3absDv2_l", glsl::ExtInst::ExtInstSAbs)
5281 .Case("_Z3absDv3_l", glsl::ExtInst::ExtInstSAbs)
5282 .Case("_Z3absDv4_l", glsl::ExtInst::ExtInstSAbs)
alan-bakerb39c8262019-03-08 14:03:37 -05005283 .Case("_Z5clampccc", glsl::ExtInst::ExtInstSClamp)
5284 .Case("_Z5clampDv2_cS_S_", glsl::ExtInst::ExtInstSClamp)
5285 .Case("_Z5clampDv3_cS_S_", glsl::ExtInst::ExtInstSClamp)
5286 .Case("_Z5clampDv4_cS_S_", glsl::ExtInst::ExtInstSClamp)
5287 .Case("_Z5clamphhh", glsl::ExtInst::ExtInstUClamp)
5288 .Case("_Z5clampDv2_hS_S_", glsl::ExtInst::ExtInstUClamp)
5289 .Case("_Z5clampDv3_hS_S_", glsl::ExtInst::ExtInstUClamp)
5290 .Case("_Z5clampDv4_hS_S_", glsl::ExtInst::ExtInstUClamp)
Kévin Petit495255d2019-03-06 13:56:48 +00005291 .Case("_Z5clampsss", glsl::ExtInst::ExtInstSClamp)
5292 .Case("_Z5clampDv2_sS_S_", glsl::ExtInst::ExtInstSClamp)
5293 .Case("_Z5clampDv3_sS_S_", glsl::ExtInst::ExtInstSClamp)
5294 .Case("_Z5clampDv4_sS_S_", glsl::ExtInst::ExtInstSClamp)
5295 .Case("_Z5clampttt", glsl::ExtInst::ExtInstUClamp)
5296 .Case("_Z5clampDv2_tS_S_", glsl::ExtInst::ExtInstUClamp)
5297 .Case("_Z5clampDv3_tS_S_", glsl::ExtInst::ExtInstUClamp)
5298 .Case("_Z5clampDv4_tS_S_", glsl::ExtInst::ExtInstUClamp)
David Neto22f144c2017-06-12 14:26:21 -04005299 .Case("_Z5clampiii", glsl::ExtInst::ExtInstSClamp)
5300 .Case("_Z5clampDv2_iS_S_", glsl::ExtInst::ExtInstSClamp)
5301 .Case("_Z5clampDv3_iS_S_", glsl::ExtInst::ExtInstSClamp)
5302 .Case("_Z5clampDv4_iS_S_", glsl::ExtInst::ExtInstSClamp)
5303 .Case("_Z5clampjjj", glsl::ExtInst::ExtInstUClamp)
5304 .Case("_Z5clampDv2_jS_S_", glsl::ExtInst::ExtInstUClamp)
5305 .Case("_Z5clampDv3_jS_S_", glsl::ExtInst::ExtInstUClamp)
5306 .Case("_Z5clampDv4_jS_S_", glsl::ExtInst::ExtInstUClamp)
Kévin Petit495255d2019-03-06 13:56:48 +00005307 .Case("_Z5clamplll", glsl::ExtInst::ExtInstSClamp)
5308 .Case("_Z5clampDv2_lS_S_", glsl::ExtInst::ExtInstSClamp)
5309 .Case("_Z5clampDv3_lS_S_", glsl::ExtInst::ExtInstSClamp)
5310 .Case("_Z5clampDv4_lS_S_", glsl::ExtInst::ExtInstSClamp)
5311 .Case("_Z5clampmmm", glsl::ExtInst::ExtInstUClamp)
5312 .Case("_Z5clampDv2_mS_S_", glsl::ExtInst::ExtInstUClamp)
5313 .Case("_Z5clampDv3_mS_S_", glsl::ExtInst::ExtInstUClamp)
5314 .Case("_Z5clampDv4_mS_S_", glsl::ExtInst::ExtInstUClamp)
David Neto22f144c2017-06-12 14:26:21 -04005315 .Case("_Z5clampfff", glsl::ExtInst::ExtInstFClamp)
5316 .Case("_Z5clampDv2_fS_S_", glsl::ExtInst::ExtInstFClamp)
5317 .Case("_Z5clampDv3_fS_S_", glsl::ExtInst::ExtInstFClamp)
5318 .Case("_Z5clampDv4_fS_S_", glsl::ExtInst::ExtInstFClamp)
alan-bakerb39c8262019-03-08 14:03:37 -05005319 .Case("_Z3maxcc", glsl::ExtInst::ExtInstSMax)
5320 .Case("_Z3maxDv2_cS_", glsl::ExtInst::ExtInstSMax)
5321 .Case("_Z3maxDv3_cS_", glsl::ExtInst::ExtInstSMax)
5322 .Case("_Z3maxDv4_cS_", glsl::ExtInst::ExtInstSMax)
5323 .Case("_Z3maxhh", glsl::ExtInst::ExtInstUMax)
5324 .Case("_Z3maxDv2_hS_", glsl::ExtInst::ExtInstUMax)
5325 .Case("_Z3maxDv3_hS_", glsl::ExtInst::ExtInstUMax)
5326 .Case("_Z3maxDv4_hS_", glsl::ExtInst::ExtInstUMax)
Kévin Petit9e1971c2018-11-09 14:17:18 +00005327 .Case("_Z3maxss", glsl::ExtInst::ExtInstSMax)
5328 .Case("_Z3maxDv2_sS_", glsl::ExtInst::ExtInstSMax)
5329 .Case("_Z3maxDv3_sS_", glsl::ExtInst::ExtInstSMax)
5330 .Case("_Z3maxDv4_sS_", glsl::ExtInst::ExtInstSMax)
5331 .Case("_Z3maxtt", glsl::ExtInst::ExtInstUMax)
5332 .Case("_Z3maxDv2_tS_", glsl::ExtInst::ExtInstUMax)
5333 .Case("_Z3maxDv3_tS_", glsl::ExtInst::ExtInstUMax)
5334 .Case("_Z3maxDv4_tS_", glsl::ExtInst::ExtInstUMax)
David Neto22f144c2017-06-12 14:26:21 -04005335 .Case("_Z3maxii", glsl::ExtInst::ExtInstSMax)
5336 .Case("_Z3maxDv2_iS_", glsl::ExtInst::ExtInstSMax)
5337 .Case("_Z3maxDv3_iS_", glsl::ExtInst::ExtInstSMax)
5338 .Case("_Z3maxDv4_iS_", glsl::ExtInst::ExtInstSMax)
5339 .Case("_Z3maxjj", glsl::ExtInst::ExtInstUMax)
5340 .Case("_Z3maxDv2_jS_", glsl::ExtInst::ExtInstUMax)
5341 .Case("_Z3maxDv3_jS_", glsl::ExtInst::ExtInstUMax)
5342 .Case("_Z3maxDv4_jS_", glsl::ExtInst::ExtInstUMax)
Kévin Petit9e1971c2018-11-09 14:17:18 +00005343 .Case("_Z3maxll", glsl::ExtInst::ExtInstSMax)
5344 .Case("_Z3maxDv2_lS_", glsl::ExtInst::ExtInstSMax)
5345 .Case("_Z3maxDv3_lS_", glsl::ExtInst::ExtInstSMax)
5346 .Case("_Z3maxDv4_lS_", glsl::ExtInst::ExtInstSMax)
5347 .Case("_Z3maxmm", glsl::ExtInst::ExtInstUMax)
5348 .Case("_Z3maxDv2_mS_", glsl::ExtInst::ExtInstUMax)
5349 .Case("_Z3maxDv3_mS_", glsl::ExtInst::ExtInstUMax)
5350 .Case("_Z3maxDv4_mS_", glsl::ExtInst::ExtInstUMax)
David Neto22f144c2017-06-12 14:26:21 -04005351 .Case("_Z3maxff", glsl::ExtInst::ExtInstFMax)
5352 .Case("_Z3maxDv2_fS_", glsl::ExtInst::ExtInstFMax)
5353 .Case("_Z3maxDv3_fS_", glsl::ExtInst::ExtInstFMax)
5354 .Case("_Z3maxDv4_fS_", glsl::ExtInst::ExtInstFMax)
5355 .StartsWith("_Z4fmax", glsl::ExtInst::ExtInstFMax)
alan-bakerb39c8262019-03-08 14:03:37 -05005356 .Case("_Z3mincc", glsl::ExtInst::ExtInstSMin)
5357 .Case("_Z3minDv2_cS_", glsl::ExtInst::ExtInstSMin)
5358 .Case("_Z3minDv3_cS_", glsl::ExtInst::ExtInstSMin)
5359 .Case("_Z3minDv4_cS_", glsl::ExtInst::ExtInstSMin)
5360 .Case("_Z3minhh", glsl::ExtInst::ExtInstUMin)
5361 .Case("_Z3minDv2_hS_", glsl::ExtInst::ExtInstUMin)
5362 .Case("_Z3minDv3_hS_", glsl::ExtInst::ExtInstUMin)
5363 .Case("_Z3minDv4_hS_", glsl::ExtInst::ExtInstUMin)
Kévin Petit9e1971c2018-11-09 14:17:18 +00005364 .Case("_Z3minss", glsl::ExtInst::ExtInstSMin)
5365 .Case("_Z3minDv2_sS_", glsl::ExtInst::ExtInstSMin)
5366 .Case("_Z3minDv3_sS_", glsl::ExtInst::ExtInstSMin)
5367 .Case("_Z3minDv4_sS_", glsl::ExtInst::ExtInstSMin)
5368 .Case("_Z3mintt", glsl::ExtInst::ExtInstUMin)
5369 .Case("_Z3minDv2_tS_", glsl::ExtInst::ExtInstUMin)
5370 .Case("_Z3minDv3_tS_", glsl::ExtInst::ExtInstUMin)
5371 .Case("_Z3minDv4_tS_", glsl::ExtInst::ExtInstUMin)
David Neto22f144c2017-06-12 14:26:21 -04005372 .Case("_Z3minii", glsl::ExtInst::ExtInstSMin)
5373 .Case("_Z3minDv2_iS_", glsl::ExtInst::ExtInstSMin)
5374 .Case("_Z3minDv3_iS_", glsl::ExtInst::ExtInstSMin)
5375 .Case("_Z3minDv4_iS_", glsl::ExtInst::ExtInstSMin)
5376 .Case("_Z3minjj", glsl::ExtInst::ExtInstUMin)
5377 .Case("_Z3minDv2_jS_", glsl::ExtInst::ExtInstUMin)
5378 .Case("_Z3minDv3_jS_", glsl::ExtInst::ExtInstUMin)
5379 .Case("_Z3minDv4_jS_", glsl::ExtInst::ExtInstUMin)
Kévin Petit9e1971c2018-11-09 14:17:18 +00005380 .Case("_Z3minll", glsl::ExtInst::ExtInstSMin)
5381 .Case("_Z3minDv2_lS_", glsl::ExtInst::ExtInstSMin)
5382 .Case("_Z3minDv3_lS_", glsl::ExtInst::ExtInstSMin)
5383 .Case("_Z3minDv4_lS_", glsl::ExtInst::ExtInstSMin)
5384 .Case("_Z3minmm", glsl::ExtInst::ExtInstUMin)
5385 .Case("_Z3minDv2_mS_", glsl::ExtInst::ExtInstUMin)
5386 .Case("_Z3minDv3_mS_", glsl::ExtInst::ExtInstUMin)
5387 .Case("_Z3minDv4_mS_", glsl::ExtInst::ExtInstUMin)
David Neto22f144c2017-06-12 14:26:21 -04005388 .Case("_Z3minff", glsl::ExtInst::ExtInstFMin)
5389 .Case("_Z3minDv2_fS_", glsl::ExtInst::ExtInstFMin)
5390 .Case("_Z3minDv3_fS_", glsl::ExtInst::ExtInstFMin)
5391 .Case("_Z3minDv4_fS_", glsl::ExtInst::ExtInstFMin)
5392 .StartsWith("_Z4fmin", glsl::ExtInst::ExtInstFMin)
5393 .StartsWith("_Z7degrees", glsl::ExtInst::ExtInstDegrees)
5394 .StartsWith("_Z7radians", glsl::ExtInst::ExtInstRadians)
5395 .StartsWith("_Z3mix", glsl::ExtInst::ExtInstFMix)
5396 .StartsWith("_Z4acos", glsl::ExtInst::ExtInstAcos)
5397 .StartsWith("_Z5acosh", glsl::ExtInst::ExtInstAcosh)
5398 .StartsWith("_Z4asin", glsl::ExtInst::ExtInstAsin)
5399 .StartsWith("_Z5asinh", glsl::ExtInst::ExtInstAsinh)
5400 .StartsWith("_Z4atan", glsl::ExtInst::ExtInstAtan)
5401 .StartsWith("_Z5atan2", glsl::ExtInst::ExtInstAtan2)
5402 .StartsWith("_Z5atanh", glsl::ExtInst::ExtInstAtanh)
5403 .StartsWith("_Z4ceil", glsl::ExtInst::ExtInstCeil)
5404 .StartsWith("_Z3sin", glsl::ExtInst::ExtInstSin)
5405 .StartsWith("_Z4sinh", glsl::ExtInst::ExtInstSinh)
5406 .StartsWith("_Z8half_sin", glsl::ExtInst::ExtInstSin)
5407 .StartsWith("_Z10native_sin", glsl::ExtInst::ExtInstSin)
5408 .StartsWith("_Z3cos", glsl::ExtInst::ExtInstCos)
5409 .StartsWith("_Z4cosh", glsl::ExtInst::ExtInstCosh)
5410 .StartsWith("_Z8half_cos", glsl::ExtInst::ExtInstCos)
5411 .StartsWith("_Z10native_cos", glsl::ExtInst::ExtInstCos)
5412 .StartsWith("_Z3tan", glsl::ExtInst::ExtInstTan)
5413 .StartsWith("_Z4tanh", glsl::ExtInst::ExtInstTanh)
5414 .StartsWith("_Z8half_tan", glsl::ExtInst::ExtInstTan)
5415 .StartsWith("_Z10native_tan", glsl::ExtInst::ExtInstTan)
5416 .StartsWith("_Z3exp", glsl::ExtInst::ExtInstExp)
5417 .StartsWith("_Z8half_exp", glsl::ExtInst::ExtInstExp)
5418 .StartsWith("_Z10native_exp", glsl::ExtInst::ExtInstExp)
5419 .StartsWith("_Z4exp2", glsl::ExtInst::ExtInstExp2)
5420 .StartsWith("_Z9half_exp2", glsl::ExtInst::ExtInstExp2)
5421 .StartsWith("_Z11native_exp2", glsl::ExtInst::ExtInstExp2)
5422 .StartsWith("_Z3log", glsl::ExtInst::ExtInstLog)
5423 .StartsWith("_Z8half_log", glsl::ExtInst::ExtInstLog)
5424 .StartsWith("_Z10native_log", glsl::ExtInst::ExtInstLog)
5425 .StartsWith("_Z4log2", glsl::ExtInst::ExtInstLog2)
5426 .StartsWith("_Z9half_log2", glsl::ExtInst::ExtInstLog2)
5427 .StartsWith("_Z11native_log2", glsl::ExtInst::ExtInstLog2)
5428 .StartsWith("_Z4fabs", glsl::ExtInst::ExtInstFAbs)
kpet3458e942018-10-03 14:35:21 +01005429 .StartsWith("_Z3fma", glsl::ExtInst::ExtInstFma)
David Neto22f144c2017-06-12 14:26:21 -04005430 .StartsWith("_Z5floor", glsl::ExtInst::ExtInstFloor)
5431 .StartsWith("_Z5ldexp", glsl::ExtInst::ExtInstLdexp)
5432 .StartsWith("_Z3pow", glsl::ExtInst::ExtInstPow)
5433 .StartsWith("_Z4powr", glsl::ExtInst::ExtInstPow)
5434 .StartsWith("_Z9half_powr", glsl::ExtInst::ExtInstPow)
5435 .StartsWith("_Z11native_powr", glsl::ExtInst::ExtInstPow)
5436 .StartsWith("_Z5round", glsl::ExtInst::ExtInstRound)
5437 .StartsWith("_Z4sqrt", glsl::ExtInst::ExtInstSqrt)
5438 .StartsWith("_Z9half_sqrt", glsl::ExtInst::ExtInstSqrt)
5439 .StartsWith("_Z11native_sqrt", glsl::ExtInst::ExtInstSqrt)
5440 .StartsWith("_Z5rsqrt", glsl::ExtInst::ExtInstInverseSqrt)
5441 .StartsWith("_Z10half_rsqrt", glsl::ExtInst::ExtInstInverseSqrt)
5442 .StartsWith("_Z12native_rsqrt", glsl::ExtInst::ExtInstInverseSqrt)
5443 .StartsWith("_Z5trunc", glsl::ExtInst::ExtInstTrunc)
5444 .StartsWith("_Z5frexp", glsl::ExtInst::ExtInstFrexp)
5445 .StartsWith("_Z4sign", glsl::ExtInst::ExtInstFSign)
5446 .StartsWith("_Z6length", glsl::ExtInst::ExtInstLength)
Kévin Petit7d09cec2018-09-22 15:43:38 +01005447 .StartsWith("_Z11fast_length", glsl::ExtInst::ExtInstLength)
David Neto22f144c2017-06-12 14:26:21 -04005448 .StartsWith("_Z8distance", glsl::ExtInst::ExtInstDistance)
Kévin Petit7d09cec2018-09-22 15:43:38 +01005449 .StartsWith("_Z13fast_distance", glsl::ExtInst::ExtInstDistance)
David Netoe9a03512017-10-16 10:08:27 -04005450 .StartsWith("_Z4step", glsl::ExtInst::ExtInstStep)
kpet6fd2a262018-10-03 14:48:01 +01005451 .StartsWith("_Z10smoothstep", glsl::ExtInst::ExtInstSmoothStep)
David Neto22f144c2017-06-12 14:26:21 -04005452 .Case("_Z5crossDv3_fS_", glsl::ExtInst::ExtInstCross)
5453 .StartsWith("_Z9normalize", glsl::ExtInst::ExtInstNormalize)
Kévin Petit7d09cec2018-09-22 15:43:38 +01005454 .StartsWith("_Z14fast_normalize", glsl::ExtInst::ExtInstNormalize)
David Neto22f144c2017-06-12 14:26:21 -04005455 .StartsWith("llvm.fmuladd.", glsl::ExtInst::ExtInstFma)
5456 .Case("spirv.unpack.v2f16", glsl::ExtInst::ExtInstUnpackHalf2x16)
5457 .Case("spirv.pack.v2f16", glsl::ExtInst::ExtInstPackHalf2x16)
David Neto62653202017-10-16 19:05:18 -04005458 .Case("clspv.fract.f", glsl::ExtInst::ExtInstFract)
5459 .Case("clspv.fract.v2f", glsl::ExtInst::ExtInstFract)
5460 .Case("clspv.fract.v3f", glsl::ExtInst::ExtInstFract)
5461 .Case("clspv.fract.v4f", glsl::ExtInst::ExtInstFract)
David Neto3fbb4072017-10-16 11:28:14 -04005462 .Default(kGlslExtInstBad);
5463}
5464
5465glsl::ExtInst SPIRVProducerPass::getIndirectExtInstEnum(StringRef Name) {
5466 // Check indirect cases.
5467 return StringSwitch<glsl::ExtInst>(Name)
5468 .StartsWith("_Z3clz", glsl::ExtInst::ExtInstFindUMsb)
5469 // Use exact match on float arg because these need a multiply
5470 // of a constant of the right floating point type.
5471 .Case("_Z6acospif", glsl::ExtInst::ExtInstAcos)
5472 .Case("_Z6acospiDv2_f", glsl::ExtInst::ExtInstAcos)
5473 .Case("_Z6acospiDv3_f", glsl::ExtInst::ExtInstAcos)
5474 .Case("_Z6acospiDv4_f", glsl::ExtInst::ExtInstAcos)
5475 .Case("_Z6asinpif", glsl::ExtInst::ExtInstAsin)
5476 .Case("_Z6asinpiDv2_f", glsl::ExtInst::ExtInstAsin)
5477 .Case("_Z6asinpiDv3_f", glsl::ExtInst::ExtInstAsin)
5478 .Case("_Z6asinpiDv4_f", glsl::ExtInst::ExtInstAsin)
Kévin Petiteb9f90a2018-09-29 12:29:34 +01005479 .Case("_Z6atanpif", glsl::ExtInst::ExtInstAtan)
5480 .Case("_Z6atanpiDv2_f", glsl::ExtInst::ExtInstAtan)
5481 .Case("_Z6atanpiDv3_f", glsl::ExtInst::ExtInstAtan)
5482 .Case("_Z6atanpiDv4_f", glsl::ExtInst::ExtInstAtan)
David Neto3fbb4072017-10-16 11:28:14 -04005483 .Case("_Z7atan2piff", glsl::ExtInst::ExtInstAtan2)
5484 .Case("_Z7atan2piDv2_fS_", glsl::ExtInst::ExtInstAtan2)
5485 .Case("_Z7atan2piDv3_fS_", glsl::ExtInst::ExtInstAtan2)
5486 .Case("_Z7atan2piDv4_fS_", glsl::ExtInst::ExtInstAtan2)
5487 .Default(kGlslExtInstBad);
5488}
5489
alan-bakerb6b09dc2018-11-08 16:59:28 -05005490glsl::ExtInst
5491SPIRVProducerPass::getDirectOrIndirectExtInstEnum(StringRef Name) {
David Neto3fbb4072017-10-16 11:28:14 -04005492 auto direct = getExtInstEnum(Name);
5493 if (direct != kGlslExtInstBad)
5494 return direct;
5495 return getIndirectExtInstEnum(Name);
David Neto22f144c2017-06-12 14:26:21 -04005496}
5497
David Neto22f144c2017-06-12 14:26:21 -04005498void SPIRVProducerPass::WriteOneWord(uint32_t Word) {
David Neto0676e6f2017-07-11 18:47:44 -04005499 binaryOut->write(reinterpret_cast<const char *>(&Word), sizeof(uint32_t));
David Neto22f144c2017-06-12 14:26:21 -04005500}
5501
5502void SPIRVProducerPass::WriteResultID(SPIRVInstruction *Inst) {
5503 WriteOneWord(Inst->getResultID());
5504}
5505
5506void SPIRVProducerPass::WriteWordCountAndOpcode(SPIRVInstruction *Inst) {
5507 // High 16 bit : Word Count
5508 // Low 16 bit : Opcode
5509 uint32_t Word = Inst->getOpcode();
David Netoee2660d2018-06-28 16:31:29 -04005510 const uint32_t count = Inst->getWordCount();
5511 if (count > 65535) {
5512 errs() << "Word count limit of 65535 exceeded: " << count << "\n";
5513 llvm_unreachable("Word count too high");
5514 }
David Neto22f144c2017-06-12 14:26:21 -04005515 Word |= Inst->getWordCount() << 16;
5516 WriteOneWord(Word);
5517}
5518
5519void SPIRVProducerPass::WriteOperand(SPIRVOperand *Op) {
5520 SPIRVOperandType OpTy = Op->getType();
5521 switch (OpTy) {
5522 default: {
5523 llvm_unreachable("Unsupported SPIRV Operand Type???");
5524 break;
5525 }
5526 case SPIRVOperandType::NUMBERID: {
5527 WriteOneWord(Op->getNumID());
5528 break;
5529 }
5530 case SPIRVOperandType::LITERAL_STRING: {
5531 std::string Str = Op->getLiteralStr();
5532 const char *Data = Str.c_str();
5533 size_t WordSize = Str.size() / 4;
5534 for (unsigned Idx = 0; Idx < WordSize; Idx++) {
5535 WriteOneWord(*reinterpret_cast<const uint32_t *>(&Data[4 * Idx]));
5536 }
5537
5538 uint32_t Remainder = Str.size() % 4;
5539 uint32_t LastWord = 0;
5540 if (Remainder) {
5541 for (unsigned Idx = 0; Idx < Remainder; Idx++) {
5542 LastWord |= Data[4 * WordSize + Idx] << 8 * Idx;
5543 }
5544 }
5545
5546 WriteOneWord(LastWord);
5547 break;
5548 }
5549 case SPIRVOperandType::LITERAL_INTEGER:
5550 case SPIRVOperandType::LITERAL_FLOAT: {
5551 auto LiteralNum = Op->getLiteralNum();
5552 // TODO: Handle LiteranNum carefully.
5553 for (auto Word : LiteralNum) {
5554 WriteOneWord(Word);
5555 }
5556 break;
5557 }
5558 }
5559}
5560
5561void SPIRVProducerPass::WriteSPIRVBinary() {
5562 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
5563
5564 for (auto Inst : SPIRVInstList) {
David Netoc6f3ab22018-04-06 18:02:31 -04005565 SPIRVOperandList Ops{Inst->getOperands()};
David Neto22f144c2017-06-12 14:26:21 -04005566 spv::Op Opcode = static_cast<spv::Op>(Inst->getOpcode());
5567
5568 switch (Opcode) {
5569 default: {
David Neto5c22a252018-03-15 16:07:41 -04005570 errs() << "Unsupported SPIR-V instruction opcode " << int(Opcode) << "\n";
David Neto22f144c2017-06-12 14:26:21 -04005571 llvm_unreachable("Unsupported SPIRV instruction");
5572 break;
5573 }
5574 case spv::OpCapability:
5575 case spv::OpExtension:
5576 case spv::OpMemoryModel:
5577 case spv::OpEntryPoint:
5578 case spv::OpExecutionMode:
5579 case spv::OpSource:
5580 case spv::OpDecorate:
5581 case spv::OpMemberDecorate:
5582 case spv::OpBranch:
5583 case spv::OpBranchConditional:
5584 case spv::OpSelectionMerge:
5585 case spv::OpLoopMerge:
5586 case spv::OpStore:
5587 case spv::OpImageWrite:
5588 case spv::OpReturnValue:
5589 case spv::OpControlBarrier:
5590 case spv::OpMemoryBarrier:
5591 case spv::OpReturn:
5592 case spv::OpFunctionEnd:
5593 case spv::OpCopyMemory: {
5594 WriteWordCountAndOpcode(Inst);
5595 for (uint32_t i = 0; i < Ops.size(); i++) {
5596 WriteOperand(Ops[i]);
5597 }
5598 break;
5599 }
5600 case spv::OpTypeBool:
5601 case spv::OpTypeVoid:
5602 case spv::OpTypeSampler:
5603 case spv::OpLabel:
5604 case spv::OpExtInstImport:
5605 case spv::OpTypePointer:
5606 case spv::OpTypeRuntimeArray:
5607 case spv::OpTypeStruct:
5608 case spv::OpTypeImage:
5609 case spv::OpTypeSampledImage:
5610 case spv::OpTypeInt:
5611 case spv::OpTypeFloat:
5612 case spv::OpTypeArray:
5613 case spv::OpTypeVector:
5614 case spv::OpTypeFunction: {
5615 WriteWordCountAndOpcode(Inst);
5616 WriteResultID(Inst);
5617 for (uint32_t i = 0; i < Ops.size(); i++) {
5618 WriteOperand(Ops[i]);
5619 }
5620 break;
5621 }
5622 case spv::OpFunction:
5623 case spv::OpFunctionParameter:
5624 case spv::OpAccessChain:
5625 case spv::OpPtrAccessChain:
5626 case spv::OpInBoundsAccessChain:
5627 case spv::OpUConvert:
5628 case spv::OpSConvert:
5629 case spv::OpConvertFToU:
5630 case spv::OpConvertFToS:
5631 case spv::OpConvertUToF:
5632 case spv::OpConvertSToF:
5633 case spv::OpFConvert:
5634 case spv::OpConvertPtrToU:
5635 case spv::OpConvertUToPtr:
5636 case spv::OpBitcast:
alan-bakerc9c55ae2019-12-02 16:01:27 -05005637 case spv::OpFNegate:
David Neto22f144c2017-06-12 14:26:21 -04005638 case spv::OpIAdd:
5639 case spv::OpFAdd:
5640 case spv::OpISub:
5641 case spv::OpFSub:
5642 case spv::OpIMul:
5643 case spv::OpFMul:
5644 case spv::OpUDiv:
5645 case spv::OpSDiv:
5646 case spv::OpFDiv:
5647 case spv::OpUMod:
5648 case spv::OpSRem:
5649 case spv::OpFRem:
Kévin Petit8a560882019-03-21 15:24:34 +00005650 case spv::OpUMulExtended:
5651 case spv::OpSMulExtended:
David Neto22f144c2017-06-12 14:26:21 -04005652 case spv::OpBitwiseOr:
5653 case spv::OpBitwiseXor:
5654 case spv::OpBitwiseAnd:
David Netoa394f392017-08-26 20:45:29 -04005655 case spv::OpNot:
David Neto22f144c2017-06-12 14:26:21 -04005656 case spv::OpShiftLeftLogical:
5657 case spv::OpShiftRightLogical:
5658 case spv::OpShiftRightArithmetic:
5659 case spv::OpBitCount:
David Netoab03f432017-11-03 17:00:44 -04005660 case spv::OpCompositeConstruct:
David Neto22f144c2017-06-12 14:26:21 -04005661 case spv::OpCompositeExtract:
5662 case spv::OpVectorExtractDynamic:
5663 case spv::OpCompositeInsert:
David Neto0a2f98d2017-09-15 19:38:40 -04005664 case spv::OpCopyObject:
David Neto22f144c2017-06-12 14:26:21 -04005665 case spv::OpVectorInsertDynamic:
5666 case spv::OpVectorShuffle:
5667 case spv::OpIEqual:
5668 case spv::OpINotEqual:
5669 case spv::OpUGreaterThan:
5670 case spv::OpUGreaterThanEqual:
5671 case spv::OpULessThan:
5672 case spv::OpULessThanEqual:
5673 case spv::OpSGreaterThan:
5674 case spv::OpSGreaterThanEqual:
5675 case spv::OpSLessThan:
5676 case spv::OpSLessThanEqual:
5677 case spv::OpFOrdEqual:
5678 case spv::OpFOrdGreaterThan:
5679 case spv::OpFOrdGreaterThanEqual:
5680 case spv::OpFOrdLessThan:
5681 case spv::OpFOrdLessThanEqual:
5682 case spv::OpFOrdNotEqual:
5683 case spv::OpFUnordEqual:
5684 case spv::OpFUnordGreaterThan:
5685 case spv::OpFUnordGreaterThanEqual:
5686 case spv::OpFUnordLessThan:
5687 case spv::OpFUnordLessThanEqual:
5688 case spv::OpFUnordNotEqual:
5689 case spv::OpExtInst:
5690 case spv::OpIsInf:
5691 case spv::OpIsNan:
5692 case spv::OpAny:
5693 case spv::OpAll:
5694 case spv::OpUndef:
5695 case spv::OpConstantNull:
5696 case spv::OpLogicalOr:
5697 case spv::OpLogicalAnd:
5698 case spv::OpLogicalNot:
5699 case spv::OpLogicalNotEqual:
5700 case spv::OpConstantComposite:
5701 case spv::OpSpecConstantComposite:
5702 case spv::OpConstantTrue:
5703 case spv::OpConstantFalse:
5704 case spv::OpConstant:
5705 case spv::OpSpecConstant:
5706 case spv::OpVariable:
5707 case spv::OpFunctionCall:
5708 case spv::OpSampledImage:
5709 case spv::OpImageSampleExplicitLod:
David Neto5c22a252018-03-15 16:07:41 -04005710 case spv::OpImageQuerySize:
alan-bakerce179f12019-12-06 19:02:22 -05005711 case spv::OpImageQuerySizeLod:
David Neto22f144c2017-06-12 14:26:21 -04005712 case spv::OpSelect:
5713 case spv::OpPhi:
5714 case spv::OpLoad:
5715 case spv::OpAtomicIAdd:
5716 case spv::OpAtomicISub:
5717 case spv::OpAtomicExchange:
5718 case spv::OpAtomicIIncrement:
5719 case spv::OpAtomicIDecrement:
5720 case spv::OpAtomicCompareExchange:
5721 case spv::OpAtomicUMin:
5722 case spv::OpAtomicSMin:
5723 case spv::OpAtomicUMax:
5724 case spv::OpAtomicSMax:
5725 case spv::OpAtomicAnd:
5726 case spv::OpAtomicOr:
5727 case spv::OpAtomicXor:
5728 case spv::OpDot: {
5729 WriteWordCountAndOpcode(Inst);
5730 WriteOperand(Ops[0]);
5731 WriteResultID(Inst);
5732 for (uint32_t i = 1; i < Ops.size(); i++) {
5733 WriteOperand(Ops[i]);
5734 }
5735 break;
5736 }
5737 }
5738 }
5739}
Alan Baker9bf93fb2018-08-28 16:59:26 -04005740
alan-bakerb6b09dc2018-11-08 16:59:28 -05005741bool SPIRVProducerPass::IsTypeNullable(const Type *type) const {
Alan Baker9bf93fb2018-08-28 16:59:26 -04005742 switch (type->getTypeID()) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05005743 case Type::HalfTyID:
5744 case Type::FloatTyID:
5745 case Type::DoubleTyID:
5746 case Type::IntegerTyID:
5747 case Type::VectorTyID:
5748 return true;
5749 case Type::PointerTyID: {
5750 const PointerType *pointer_type = cast<PointerType>(type);
5751 if (pointer_type->getPointerAddressSpace() !=
5752 AddressSpace::UniformConstant) {
5753 auto pointee_type = pointer_type->getPointerElementType();
5754 if (pointee_type->isStructTy() &&
5755 cast<StructType>(pointee_type)->isOpaque()) {
5756 // Images and samplers are not nullable.
5757 return false;
Alan Baker9bf93fb2018-08-28 16:59:26 -04005758 }
Alan Baker9bf93fb2018-08-28 16:59:26 -04005759 }
alan-bakerb6b09dc2018-11-08 16:59:28 -05005760 return true;
5761 }
5762 case Type::ArrayTyID:
5763 return IsTypeNullable(cast<CompositeType>(type)->getTypeAtIndex(0u));
5764 case Type::StructTyID: {
5765 const StructType *struct_type = cast<StructType>(type);
5766 // Images and samplers are not nullable.
5767 if (struct_type->isOpaque())
Alan Baker9bf93fb2018-08-28 16:59:26 -04005768 return false;
alan-bakerb6b09dc2018-11-08 16:59:28 -05005769 for (const auto element : struct_type->elements()) {
5770 if (!IsTypeNullable(element))
5771 return false;
5772 }
5773 return true;
5774 }
5775 default:
5776 return false;
Alan Baker9bf93fb2018-08-28 16:59:26 -04005777 }
5778}
Alan Bakerfcda9482018-10-02 17:09:59 -04005779
5780void SPIRVProducerPass::PopulateUBOTypeMaps(Module &module) {
5781 if (auto *offsets_md =
5782 module.getNamedMetadata(clspv::RemappedTypeOffsetMetadataName())) {
5783 // Metdata is stored as key-value pair operands. The first element of each
5784 // operand is the type and the second is a vector of offsets.
5785 for (const auto *operand : offsets_md->operands()) {
5786 const auto *pair = cast<MDTuple>(operand);
5787 auto *type =
5788 cast<ConstantAsMetadata>(pair->getOperand(0))->getValue()->getType();
5789 const auto *offset_vector = cast<MDTuple>(pair->getOperand(1));
5790 std::vector<uint32_t> offsets;
5791 for (const Metadata *offset_md : offset_vector->operands()) {
5792 const auto *constant_md = cast<ConstantAsMetadata>(offset_md);
alan-bakerb6b09dc2018-11-08 16:59:28 -05005793 offsets.push_back(static_cast<uint32_t>(
5794 cast<ConstantInt>(constant_md->getValue())->getZExtValue()));
Alan Bakerfcda9482018-10-02 17:09:59 -04005795 }
5796 RemappedUBOTypeOffsets.insert(std::make_pair(type, offsets));
5797 }
5798 }
5799
5800 if (auto *sizes_md =
5801 module.getNamedMetadata(clspv::RemappedTypeSizesMetadataName())) {
5802 // Metadata is stored as key-value pair operands. The first element of each
5803 // operand is the type and the second is a triple of sizes: type size in
5804 // bits, store size and alloc size.
5805 for (const auto *operand : sizes_md->operands()) {
5806 const auto *pair = cast<MDTuple>(operand);
5807 auto *type =
5808 cast<ConstantAsMetadata>(pair->getOperand(0))->getValue()->getType();
5809 const auto *size_triple = cast<MDTuple>(pair->getOperand(1));
5810 uint64_t type_size_in_bits =
5811 cast<ConstantInt>(
5812 cast<ConstantAsMetadata>(size_triple->getOperand(0))->getValue())
5813 ->getZExtValue();
5814 uint64_t type_store_size =
5815 cast<ConstantInt>(
5816 cast<ConstantAsMetadata>(size_triple->getOperand(1))->getValue())
5817 ->getZExtValue();
5818 uint64_t type_alloc_size =
5819 cast<ConstantInt>(
5820 cast<ConstantAsMetadata>(size_triple->getOperand(2))->getValue())
5821 ->getZExtValue();
5822 RemappedUBOTypeSizes.insert(std::make_pair(
5823 type, std::make_tuple(type_size_in_bits, type_store_size,
5824 type_alloc_size)));
5825 }
5826 }
5827}
5828
5829uint64_t SPIRVProducerPass::GetTypeSizeInBits(Type *type,
5830 const DataLayout &DL) {
5831 auto iter = RemappedUBOTypeSizes.find(type);
5832 if (iter != RemappedUBOTypeSizes.end()) {
5833 return std::get<0>(iter->second);
5834 }
5835
5836 return DL.getTypeSizeInBits(type);
5837}
5838
5839uint64_t SPIRVProducerPass::GetTypeStoreSize(Type *type, const DataLayout &DL) {
5840 auto iter = RemappedUBOTypeSizes.find(type);
5841 if (iter != RemappedUBOTypeSizes.end()) {
5842 return std::get<1>(iter->second);
5843 }
5844
5845 return DL.getTypeStoreSize(type);
5846}
5847
5848uint64_t SPIRVProducerPass::GetTypeAllocSize(Type *type, const DataLayout &DL) {
5849 auto iter = RemappedUBOTypeSizes.find(type);
5850 if (iter != RemappedUBOTypeSizes.end()) {
5851 return std::get<2>(iter->second);
5852 }
5853
5854 return DL.getTypeAllocSize(type);
5855}
alan-baker5b86ed72019-02-15 08:26:50 -05005856
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04005857void SPIRVProducerPass::setVariablePointersCapabilities(
5858 unsigned address_space) {
alan-baker5b86ed72019-02-15 08:26:50 -05005859 if (GetStorageClass(address_space) == spv::StorageClassStorageBuffer) {
5860 setVariablePointersStorageBuffer(true);
5861 } else {
5862 setVariablePointers(true);
5863 }
5864}
5865
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04005866Value *SPIRVProducerPass::GetBasePointer(Value *v) {
alan-baker5b86ed72019-02-15 08:26:50 -05005867 if (auto *gep = dyn_cast<GetElementPtrInst>(v)) {
5868 return GetBasePointer(gep->getPointerOperand());
5869 }
5870
5871 // Conservatively return |v|.
5872 return v;
5873}
5874
5875bool SPIRVProducerPass::sameResource(Value *lhs, Value *rhs) const {
5876 if (auto *lhs_call = dyn_cast<CallInst>(lhs)) {
5877 if (auto *rhs_call = dyn_cast<CallInst>(rhs)) {
5878 if (lhs_call->getCalledFunction()->getName().startswith(
5879 clspv::ResourceAccessorFunction()) &&
5880 rhs_call->getCalledFunction()->getName().startswith(
5881 clspv::ResourceAccessorFunction())) {
5882 // For resource accessors, match descriptor set and binding.
5883 if (lhs_call->getOperand(0) == rhs_call->getOperand(0) &&
5884 lhs_call->getOperand(1) == rhs_call->getOperand(1))
5885 return true;
5886 } else if (lhs_call->getCalledFunction()->getName().startswith(
5887 clspv::WorkgroupAccessorFunction()) &&
5888 rhs_call->getCalledFunction()->getName().startswith(
5889 clspv::WorkgroupAccessorFunction())) {
5890 // For workgroup resources, match spec id.
5891 if (lhs_call->getOperand(0) == rhs_call->getOperand(0))
5892 return true;
5893 }
5894 }
5895 }
5896
5897 return false;
5898}
5899
5900bool SPIRVProducerPass::selectFromSameObject(Instruction *inst) {
5901 assert(inst->getType()->isPointerTy());
5902 assert(GetStorageClass(inst->getType()->getPointerAddressSpace()) ==
5903 spv::StorageClassStorageBuffer);
5904 const bool hack_undef = clspv::Option::HackUndef();
5905 if (auto *select = dyn_cast<SelectInst>(inst)) {
5906 auto *true_base = GetBasePointer(select->getTrueValue());
5907 auto *false_base = GetBasePointer(select->getFalseValue());
5908
5909 if (true_base == false_base)
5910 return true;
5911
5912 // If either the true or false operand is a null, then we satisfy the same
5913 // object constraint.
5914 if (auto *true_cst = dyn_cast<Constant>(true_base)) {
5915 if (true_cst->isNullValue() || (hack_undef && isa<UndefValue>(true_base)))
5916 return true;
5917 }
5918
5919 if (auto *false_cst = dyn_cast<Constant>(false_base)) {
5920 if (false_cst->isNullValue() ||
5921 (hack_undef && isa<UndefValue>(false_base)))
5922 return true;
5923 }
5924
5925 if (sameResource(true_base, false_base))
5926 return true;
5927 } else if (auto *phi = dyn_cast<PHINode>(inst)) {
5928 Value *value = nullptr;
5929 bool ok = true;
5930 for (unsigned i = 0; ok && i != phi->getNumIncomingValues(); ++i) {
5931 auto *base = GetBasePointer(phi->getIncomingValue(i));
5932 // Null values satisfy the constraint of selecting of selecting from the
5933 // same object.
5934 if (!value) {
5935 if (auto *cst = dyn_cast<Constant>(base)) {
5936 if (!cst->isNullValue() && !(hack_undef && isa<UndefValue>(base)))
5937 value = base;
5938 } else {
5939 value = base;
5940 }
5941 } else if (base != value) {
5942 if (auto *base_cst = dyn_cast<Constant>(base)) {
5943 if (base_cst->isNullValue() || (hack_undef && isa<UndefValue>(base)))
5944 continue;
5945 }
5946
5947 if (sameResource(value, base))
5948 continue;
5949
5950 // Values don't represent the same base.
5951 ok = false;
5952 }
5953 }
5954
5955 return ok;
5956 }
5957
5958 // Conservatively return false.
5959 return false;
5960}
alan-bakere9308012019-03-15 10:25:13 -04005961
5962bool SPIRVProducerPass::CalledWithCoherentResource(Argument &Arg) {
5963 if (!Arg.getType()->isPointerTy() ||
5964 Arg.getType()->getPointerAddressSpace() != clspv::AddressSpace::Global) {
5965 // Only SSBOs need to be annotated as coherent.
5966 return false;
5967 }
5968
5969 DenseSet<Value *> visited;
5970 std::vector<Value *> stack;
5971 for (auto *U : Arg.getParent()->users()) {
5972 if (auto *call = dyn_cast<CallInst>(U)) {
5973 stack.push_back(call->getOperand(Arg.getArgNo()));
5974 }
5975 }
5976
5977 while (!stack.empty()) {
5978 Value *v = stack.back();
5979 stack.pop_back();
5980
5981 if (!visited.insert(v).second)
5982 continue;
5983
5984 auto *resource_call = dyn_cast<CallInst>(v);
5985 if (resource_call &&
5986 resource_call->getCalledFunction()->getName().startswith(
5987 clspv::ResourceAccessorFunction())) {
5988 // If this is a resource accessor function, check if the coherent operand
5989 // is set.
5990 const auto coherent =
5991 unsigned(dyn_cast<ConstantInt>(resource_call->getArgOperand(5))
5992 ->getZExtValue());
5993 if (coherent == 1)
5994 return true;
5995 } else if (auto *arg = dyn_cast<Argument>(v)) {
5996 // If this is a function argument, trace through its callers.
alan-bakere98f3f92019-04-08 15:06:36 -04005997 for (auto U : arg->getParent()->users()) {
alan-bakere9308012019-03-15 10:25:13 -04005998 if (auto *call = dyn_cast<CallInst>(U)) {
5999 stack.push_back(call->getOperand(arg->getArgNo()));
6000 }
6001 }
6002 } else if (auto *user = dyn_cast<User>(v)) {
6003 // If this is a user, traverse all operands that could lead to resource
6004 // variables.
6005 for (unsigned i = 0; i != user->getNumOperands(); ++i) {
6006 Value *operand = user->getOperand(i);
6007 if (operand->getType()->isPointerTy() &&
6008 operand->getType()->getPointerAddressSpace() ==
6009 clspv::AddressSpace::Global) {
6010 stack.push_back(operand);
6011 }
6012 }
6013 }
6014 }
6015
6016 // No coherent resource variables encountered.
6017 return false;
6018}
alan-baker06cad652019-12-03 17:56:47 -05006019
6020void SPIRVProducerPass::PopulateStructuredCFGMaps(Module &module) {
6021 // First, track loop merges and continues.
6022 DenseSet<BasicBlock *> LoopMergesAndContinues;
6023 for (auto &F : module) {
6024 if (F.isDeclaration())
6025 continue;
6026
6027 DominatorTree &DT = getAnalysis<DominatorTreeWrapperPass>(F).getDomTree();
6028 const LoopInfo &LI = getAnalysis<LoopInfoWrapperPass>(F).getLoopInfo();
6029 std::deque<BasicBlock *> order;
6030 DenseSet<BasicBlock *> visited;
6031 clspv::ComputeStructuredOrder(&*F.begin(), &DT, LI, &order, &visited);
6032
6033 for (auto BB : order) {
6034 auto terminator = BB->getTerminator();
6035 auto branch = dyn_cast<BranchInst>(terminator);
6036 if (LI.isLoopHeader(BB)) {
6037 auto L = LI.getLoopFor(BB);
6038 BasicBlock *ContinueBB = nullptr;
6039 BasicBlock *MergeBB = nullptr;
6040
6041 MergeBB = L->getExitBlock();
6042 if (!MergeBB) {
6043 // StructurizeCFG pass converts CFG into triangle shape and the cfg
6044 // has regions with single entry/exit. As a result, loop should not
6045 // have multiple exits.
6046 llvm_unreachable("Loop has multiple exits???");
6047 }
6048
6049 if (L->isLoopLatch(BB)) {
6050 ContinueBB = BB;
6051 } else {
6052 // From SPIR-V spec 2.11, Continue Target must dominate that back-edge
6053 // block.
6054 BasicBlock *Header = L->getHeader();
6055 BasicBlock *Latch = L->getLoopLatch();
6056 for (auto *loop_block : L->blocks()) {
6057 if (loop_block == Header) {
6058 continue;
6059 }
6060
6061 // Check whether block dominates block with back-edge.
6062 // The loop latch is the single block with a back-edge. If it was
6063 // possible, StructurizeCFG made the loop conform to this
6064 // requirement, otherwise |Latch| is a nullptr.
6065 if (DT.dominates(loop_block, Latch)) {
6066 ContinueBB = loop_block;
6067 }
6068 }
6069
6070 if (!ContinueBB) {
6071 llvm_unreachable("Wrong continue block from loop");
6072 }
6073 }
6074
6075 // Record the continue and merge blocks.
6076 MergeBlocks[BB] = MergeBB;
6077 ContinueBlocks[BB] = ContinueBB;
6078 LoopMergesAndContinues.insert(MergeBB);
6079 LoopMergesAndContinues.insert(ContinueBB);
6080 } else if (branch && branch->isConditional()) {
6081 auto L = LI.getLoopFor(BB);
6082 bool HasBackedge = false;
6083 while (L && !HasBackedge) {
6084 if (L->isLoopLatch(BB)) {
6085 HasBackedge = true;
6086 }
6087 L = L->getParentLoop();
6088 }
6089
6090 if (!HasBackedge) {
6091 // Only need a merge if the branch doesn't include a loop break or
6092 // continue.
6093 auto true_bb = branch->getSuccessor(0);
6094 auto false_bb = branch->getSuccessor(1);
6095 if (!LoopMergesAndContinues.count(true_bb) &&
6096 !LoopMergesAndContinues.count(false_bb)) {
6097 // StructurizeCFG pass already manipulated CFG. Just use false block
6098 // of branch instruction as merge block.
6099 MergeBlocks[BB] = false_bb;
6100 }
6101 }
6102 }
6103 }
6104 }
6105}