blob: 79694fec33282d35c0abf2dbb91e8c1b6746a348 [file] [log] [blame]
David Neto22f144c2017-06-12 14:26:21 -04001// Copyright 2017 The Clspv Authors. All rights reserved.
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7// http://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
15#ifdef _MSC_VER
16#pragma warning(push, 0)
17#endif
18
David Neto156783e2017-07-05 15:39:41 -040019#include <cassert>
David Neto257c3892018-04-11 13:19:45 -040020#include <cstring>
David Neto118188e2018-08-24 11:27:54 -040021#include <iomanip>
22#include <list>
David Neto862b7d82018-06-14 18:48:37 -040023#include <memory>
David Neto118188e2018-08-24 11:27:54 -040024#include <set>
25#include <sstream>
26#include <string>
27#include <tuple>
28#include <unordered_set>
29#include <utility>
David Neto862b7d82018-06-14 18:48:37 -040030
David Neto118188e2018-08-24 11:27:54 -040031#include "llvm/ADT/StringSwitch.h"
32#include "llvm/ADT/UniqueVector.h"
33#include "llvm/Analysis/LoopInfo.h"
34#include "llvm/IR/Constants.h"
35#include "llvm/IR/Dominators.h"
36#include "llvm/IR/Instructions.h"
37#include "llvm/IR/Metadata.h"
38#include "llvm/IR/Module.h"
alan-bakerf67468c2019-11-25 15:51:49 -050039#include "llvm/IR/ValueSymbolTable.h"
David Neto118188e2018-08-24 11:27:54 -040040#include "llvm/Pass.h"
41#include "llvm/Support/CommandLine.h"
42#include "llvm/Support/raw_ostream.h"
43#include "llvm/Transforms/Utils/Cloning.h"
David Neto22f144c2017-06-12 14:26:21 -040044
David Neto85082642018-03-24 06:55:20 -070045#include "spirv/1.0/spirv.hpp"
David Neto118188e2018-08-24 11:27:54 -040046
David Neto85082642018-03-24 06:55:20 -070047#include "clspv/AddressSpace.h"
alan-bakerf5e5f692018-11-27 08:33:24 -050048#include "clspv/DescriptorMap.h"
David Neto118188e2018-08-24 11:27:54 -040049#include "clspv/Option.h"
David Neto85082642018-03-24 06:55:20 -070050#include "clspv/spirv_c_strings.hpp"
51#include "clspv/spirv_glsl.hpp"
David Neto22f144c2017-06-12 14:26:21 -040052
David Neto4feb7a42017-10-06 17:29:42 -040053#include "ArgKind.h"
alan-bakerf67468c2019-11-25 15:51:49 -050054#include "Builtins.h"
alan-baker06cad652019-12-03 17:56:47 -050055#include "ComputeStructuredOrder.h"
David Neto85082642018-03-24 06:55:20 -070056#include "ConstantEmitter.h"
Alan Baker202c8c72018-08-13 13:47:44 -040057#include "Constants.h"
David Neto78383442018-06-15 20:31:56 -040058#include "DescriptorCounter.h"
alan-baker56f7aff2019-05-22 08:06:42 -040059#include "NormalizeGlobalVariable.h"
Diego Novilloa4c44fa2019-04-11 10:56:15 -040060#include "Passes.h"
alan-bakerce179f12019-12-06 19:02:22 -050061#include "Types.h"
David Neto48f56a42017-10-06 16:44:25 -040062
David Neto22f144c2017-06-12 14:26:21 -040063#if defined(_MSC_VER)
64#pragma warning(pop)
65#endif
66
67using namespace llvm;
68using namespace clspv;
David Neto156783e2017-07-05 15:39:41 -040069using namespace mdconst;
David Neto22f144c2017-06-12 14:26:21 -040070
71namespace {
David Netocd8ca5f2017-10-02 23:34:11 -040072
David Neto862b7d82018-06-14 18:48:37 -040073cl::opt<bool> ShowResourceVars("show-rv", cl::init(false), cl::Hidden,
74 cl::desc("Show resource variable creation"));
75
76// These hacks exist to help transition code generation algorithms
77// without making huge noise in detailed test output.
78const bool Hack_generate_runtime_array_stride_early = true;
79
David Neto3fbb4072017-10-16 11:28:14 -040080// The value of 1/pi. This value is from MSDN
81// https://msdn.microsoft.com/en-us/library/4hwaceh6.aspx
82const double kOneOverPi = 0.318309886183790671538;
83const glsl::ExtInst kGlslExtInstBad = static_cast<glsl::ExtInst>(0);
84
alan-bakerb6b09dc2018-11-08 16:59:28 -050085const char *kCompositeConstructFunctionPrefix = "clspv.composite_construct.";
David Netoab03f432017-11-03 17:00:44 -040086
David Neto22f144c2017-06-12 14:26:21 -040087enum SPIRVOperandType {
88 NUMBERID,
89 LITERAL_INTEGER,
90 LITERAL_STRING,
91 LITERAL_FLOAT
92};
93
94struct SPIRVOperand {
95 explicit SPIRVOperand(SPIRVOperandType Ty, uint32_t Num)
96 : Type(Ty), LiteralNum(1, Num) {}
97 explicit SPIRVOperand(SPIRVOperandType Ty, const char *Str)
98 : Type(Ty), LiteralStr(Str) {}
99 explicit SPIRVOperand(SPIRVOperandType Ty, StringRef Str)
100 : Type(Ty), LiteralStr(Str) {}
101 explicit SPIRVOperand(SPIRVOperandType Ty, ArrayRef<uint32_t> NumVec)
102 : Type(Ty), LiteralNum(NumVec.begin(), NumVec.end()) {}
103
James Price11010dc2019-12-19 13:53:09 -0500104 SPIRVOperandType getType() const { return Type; };
105 uint32_t getNumID() const { return LiteralNum[0]; };
106 std::string getLiteralStr() const { return LiteralStr; };
107 ArrayRef<uint32_t> getLiteralNum() const { return LiteralNum; };
David Neto22f144c2017-06-12 14:26:21 -0400108
David Neto87846742018-04-11 17:36:22 -0400109 uint32_t GetNumWords() const {
110 switch (Type) {
111 case NUMBERID:
112 return 1;
113 case LITERAL_INTEGER:
114 case LITERAL_FLOAT:
David Netoee2660d2018-06-28 16:31:29 -0400115 return uint32_t(LiteralNum.size());
David Neto87846742018-04-11 17:36:22 -0400116 case LITERAL_STRING:
117 // Account for the terminating null character.
David Netoee2660d2018-06-28 16:31:29 -0400118 return uint32_t((LiteralStr.size() + 4) / 4);
David Neto87846742018-04-11 17:36:22 -0400119 }
120 llvm_unreachable("Unhandled case in SPIRVOperand::GetNumWords()");
121 }
122
David Neto22f144c2017-06-12 14:26:21 -0400123private:
124 SPIRVOperandType Type;
125 std::string LiteralStr;
126 SmallVector<uint32_t, 4> LiteralNum;
127};
128
David Netoc6f3ab22018-04-06 18:02:31 -0400129class SPIRVOperandList {
130public:
131 SPIRVOperandList() {}
alan-bakerb6b09dc2018-11-08 16:59:28 -0500132 SPIRVOperandList(const SPIRVOperandList &other) = delete;
133 SPIRVOperandList(SPIRVOperandList &&other) {
David Netoc6f3ab22018-04-06 18:02:31 -0400134 contents_ = std::move(other.contents_);
135 other.contents_.clear();
136 }
James Price11010dc2019-12-19 13:53:09 -0500137 SPIRVOperandList(ArrayRef<std::shared_ptr<SPIRVOperand>> init)
David Netoc6f3ab22018-04-06 18:02:31 -0400138 : contents_(init.begin(), init.end()) {}
James Price11010dc2019-12-19 13:53:09 -0500139 operator ArrayRef<std::shared_ptr<SPIRVOperand>>() { return contents_; }
140 void push_back(std::shared_ptr<SPIRVOperand> op) { contents_.push_back(op); }
alan-bakerb6b09dc2018-11-08 16:59:28 -0500141 void clear() { contents_.clear(); }
David Netoc6f3ab22018-04-06 18:02:31 -0400142 size_t size() const { return contents_.size(); }
James Price11010dc2019-12-19 13:53:09 -0500143 const SPIRVOperand *operator[](size_t i) { return contents_[i].get(); }
David Netoc6f3ab22018-04-06 18:02:31 -0400144
James Price11010dc2019-12-19 13:53:09 -0500145 const SmallVector<std::shared_ptr<SPIRVOperand>, 8> &getOperands() const {
David Neto87846742018-04-11 17:36:22 -0400146 return contents_;
147 }
148
David Netoc6f3ab22018-04-06 18:02:31 -0400149private:
James Price11010dc2019-12-19 13:53:09 -0500150 SmallVector<std::shared_ptr<SPIRVOperand>, 8> contents_;
David Netoc6f3ab22018-04-06 18:02:31 -0400151};
152
James Price11010dc2019-12-19 13:53:09 -0500153SPIRVOperandList &operator<<(SPIRVOperandList &list,
154 std::shared_ptr<SPIRVOperand> elem) {
David Netoc6f3ab22018-04-06 18:02:31 -0400155 list.push_back(elem);
156 return list;
157}
158
James Price11010dc2019-12-19 13:53:09 -0500159std::shared_ptr<SPIRVOperand> MkNum(uint32_t num) {
160 return std::make_shared<SPIRVOperand>(LITERAL_INTEGER, num);
David Netoc6f3ab22018-04-06 18:02:31 -0400161}
James Price11010dc2019-12-19 13:53:09 -0500162std::shared_ptr<SPIRVOperand> MkInteger(ArrayRef<uint32_t> num_vec) {
163 return std::make_shared<SPIRVOperand>(LITERAL_INTEGER, num_vec);
David Neto257c3892018-04-11 13:19:45 -0400164}
James Price11010dc2019-12-19 13:53:09 -0500165std::shared_ptr<SPIRVOperand> MkFloat(ArrayRef<uint32_t> num_vec) {
166 return std::make_shared<SPIRVOperand>(LITERAL_FLOAT, num_vec);
David Neto257c3892018-04-11 13:19:45 -0400167}
James Price11010dc2019-12-19 13:53:09 -0500168std::shared_ptr<SPIRVOperand> MkId(uint32_t id) {
169 return std::make_shared<SPIRVOperand>(NUMBERID, id);
170}
171std::shared_ptr<SPIRVOperand> MkString(StringRef str) {
172 return std::make_shared<SPIRVOperand>(LITERAL_STRING, str);
David Neto257c3892018-04-11 13:19:45 -0400173}
David Netoc6f3ab22018-04-06 18:02:31 -0400174
David Neto22f144c2017-06-12 14:26:21 -0400175struct SPIRVInstruction {
David Neto87846742018-04-11 17:36:22 -0400176 // Create an instruction with an opcode and no result ID, and with the given
177 // operands. This computes its own word count.
James Price11010dc2019-12-19 13:53:09 -0500178 explicit SPIRVInstruction(spv::Op Opc,
179 ArrayRef<std::shared_ptr<SPIRVOperand>> Ops)
David Neto87846742018-04-11 17:36:22 -0400180 : WordCount(1), Opcode(static_cast<uint16_t>(Opc)), ResultID(0),
181 Operands(Ops.begin(), Ops.end()) {
James Price11010dc2019-12-19 13:53:09 -0500182 for (auto &operand : Ops) {
David Netoee2660d2018-06-28 16:31:29 -0400183 WordCount += uint16_t(operand->GetNumWords());
David Neto87846742018-04-11 17:36:22 -0400184 }
185 }
186 // Create an instruction with an opcode and a no-zero result ID, and
187 // with the given operands. This computes its own word count.
188 explicit SPIRVInstruction(spv::Op Opc, uint32_t ResID,
James Price11010dc2019-12-19 13:53:09 -0500189 ArrayRef<std::shared_ptr<SPIRVOperand>> Ops)
David Neto87846742018-04-11 17:36:22 -0400190 : WordCount(2), Opcode(static_cast<uint16_t>(Opc)), ResultID(ResID),
191 Operands(Ops.begin(), Ops.end()) {
192 if (ResID == 0) {
193 llvm_unreachable("Result ID of 0 was provided");
194 }
James Price11010dc2019-12-19 13:53:09 -0500195 for (auto &operand : Ops) {
David Neto87846742018-04-11 17:36:22 -0400196 WordCount += operand->GetNumWords();
197 }
198 }
David Neto22f144c2017-06-12 14:26:21 -0400199
David Netoee2660d2018-06-28 16:31:29 -0400200 uint32_t getWordCount() const { return WordCount; }
David Neto22f144c2017-06-12 14:26:21 -0400201 uint16_t getOpcode() const { return Opcode; }
202 uint32_t getResultID() const { return ResultID; }
James Price11010dc2019-12-19 13:53:09 -0500203 ArrayRef<std::shared_ptr<SPIRVOperand>> getOperands() const {
204 return Operands;
205 }
David Neto22f144c2017-06-12 14:26:21 -0400206
207private:
David Netoee2660d2018-06-28 16:31:29 -0400208 uint32_t WordCount; // Check the 16-bit bound at code generation time.
David Neto22f144c2017-06-12 14:26:21 -0400209 uint16_t Opcode;
210 uint32_t ResultID;
James Price11010dc2019-12-19 13:53:09 -0500211 SmallVector<std::shared_ptr<SPIRVOperand>, 4> Operands;
David Neto22f144c2017-06-12 14:26:21 -0400212};
213
214struct SPIRVProducerPass final : public ModulePass {
David Neto22f144c2017-06-12 14:26:21 -0400215 typedef DenseMap<Type *, uint32_t> TypeMapType;
216 typedef UniqueVector<Type *> TypeList;
217 typedef DenseMap<Value *, uint32_t> ValueMapType;
David Netofb9a7972017-08-25 17:08:24 -0400218 typedef UniqueVector<Value *> ValueList;
David Neto22f144c2017-06-12 14:26:21 -0400219 typedef std::vector<std::pair<Value *, uint32_t>> EntryPointVecType;
220 typedef std::list<SPIRVInstruction *> SPIRVInstructionList;
David Neto87846742018-04-11 17:36:22 -0400221 // A vector of tuples, each of which is:
222 // - the LLVM instruction that we will later generate SPIR-V code for
223 // - where the SPIR-V instruction should be inserted
224 // - the result ID of the SPIR-V instruction
David Neto22f144c2017-06-12 14:26:21 -0400225 typedef std::vector<
226 std::tuple<Value *, SPIRVInstructionList::iterator, uint32_t>>
227 DeferredInstVecType;
228 typedef DenseMap<FunctionType *, std::pair<FunctionType *, uint32_t>>
229 GlobalConstFuncMapType;
230
David Neto44795152017-07-13 15:45:28 -0400231 explicit SPIRVProducerPass(
alan-bakerf5e5f692018-11-27 08:33:24 -0500232 raw_pwrite_stream &out,
233 std::vector<clspv::version0::DescriptorMapEntry> *descriptor_map_entries,
alan-baker00e7a582019-06-07 12:54:21 -0400234 ArrayRef<std::pair<unsigned, std::string>> samplerMap,
David Neto44795152017-07-13 15:45:28 -0400235 bool outputCInitList)
David Netoc2c368d2017-06-30 16:50:17 -0400236 : ModulePass(ID), samplerMap(samplerMap), out(out),
David Neto0676e6f2017-07-11 18:47:44 -0400237 binaryTempOut(binaryTempUnderlyingVector), binaryOut(&out),
alan-baker00e7a582019-06-07 12:54:21 -0400238 descriptorMapEntries(descriptor_map_entries),
David Neto0676e6f2017-07-11 18:47:44 -0400239 outputCInitList(outputCInitList), patchBoundOffset(0), nextID(1),
alan-baker5b86ed72019-02-15 08:26:50 -0500240 OpExtInstImportID(0), HasVariablePointersStorageBuffer(false),
241 HasVariablePointers(false), SamplerTy(nullptr), WorkgroupSizeValueID(0),
Kévin Petit89a525c2019-06-15 08:13:07 +0100242 WorkgroupSizeVarID(0), max_local_spec_id_(0) {}
David Neto22f144c2017-06-12 14:26:21 -0400243
James Price11010dc2019-12-19 13:53:09 -0500244 virtual ~SPIRVProducerPass() {
245 for (auto *Inst : SPIRVInsts) {
246 delete Inst;
247 }
248 }
249
David Neto22f144c2017-06-12 14:26:21 -0400250 void getAnalysisUsage(AnalysisUsage &AU) const override {
251 AU.addRequired<DominatorTreeWrapperPass>();
252 AU.addRequired<LoopInfoWrapperPass>();
253 }
254
255 virtual bool runOnModule(Module &module) override;
256
257 // output the SPIR-V header block
258 void outputHeader();
259
260 // patch the SPIR-V header block
261 void patchHeader();
262
263 uint32_t lookupType(Type *Ty) {
264 if (Ty->isPointerTy() &&
265 (Ty->getPointerAddressSpace() != AddressSpace::UniformConstant)) {
266 auto PointeeTy = Ty->getPointerElementType();
267 if (PointeeTy->isStructTy() &&
268 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
269 Ty = PointeeTy;
270 }
271 }
272
David Neto862b7d82018-06-14 18:48:37 -0400273 auto where = TypeMap.find(Ty);
274 if (where == TypeMap.end()) {
275 if (Ty) {
276 errs() << "Unhandled type " << *Ty << "\n";
277 } else {
278 errs() << "Unhandled type (null)\n";
279 }
David Netoe439d702018-03-23 13:14:08 -0700280 llvm_unreachable("\nUnhandled type!");
David Neto22f144c2017-06-12 14:26:21 -0400281 }
282
David Neto862b7d82018-06-14 18:48:37 -0400283 return where->second;
David Neto22f144c2017-06-12 14:26:21 -0400284 }
285 TypeMapType &getImageTypeMap() { return ImageTypeMap; }
alan-bakerabd82722019-12-03 17:14:51 -0500286 TypeList &getImageTypeList() { return ImageTypeList; }
David Neto22f144c2017-06-12 14:26:21 -0400287 TypeList &getTypeList() { return Types; };
288 ValueList &getConstantList() { return Constants; };
289 ValueMapType &getValueMap() { return ValueMap; }
290 ValueMapType &getAllocatedValueMap() { return AllocatedValueMap; }
291 SPIRVInstructionList &getSPIRVInstList() { return SPIRVInsts; };
David Neto22f144c2017-06-12 14:26:21 -0400292 EntryPointVecType &getEntryPointVec() { return EntryPointVec; };
293 DeferredInstVecType &getDeferredInstVec() { return DeferredInstVec; };
294 ValueList &getEntryPointInterfacesVec() { return EntryPointInterfacesVec; };
295 uint32_t &getOpExtInstImportID() { return OpExtInstImportID; };
296 std::vector<uint32_t> &getBuiltinDimVec() { return BuiltinDimensionVec; };
alan-baker5b86ed72019-02-15 08:26:50 -0500297 bool hasVariablePointersStorageBuffer() {
298 return HasVariablePointersStorageBuffer;
299 }
300 void setVariablePointersStorageBuffer(bool Val) {
301 HasVariablePointersStorageBuffer = Val;
302 }
Diego Novillo3cc8d7a2019-04-10 13:30:34 -0400303 bool hasVariablePointers() { return HasVariablePointers; };
David Neto22f144c2017-06-12 14:26:21 -0400304 void setVariablePointers(bool Val) { HasVariablePointers = Val; };
alan-bakerb6b09dc2018-11-08 16:59:28 -0500305 ArrayRef<std::pair<unsigned, std::string>> &getSamplerMap() {
306 return samplerMap;
307 }
David Neto22f144c2017-06-12 14:26:21 -0400308 GlobalConstFuncMapType &getGlobalConstFuncTypeMap() {
309 return GlobalConstFuncTypeMap;
310 }
311 SmallPtrSet<Value *, 16> &getGlobalConstArgSet() {
312 return GlobalConstArgumentSet;
313 }
alan-bakerb6b09dc2018-11-08 16:59:28 -0500314 TypeList &getTypesNeedingArrayStride() { return TypesNeedingArrayStride; }
David Neto22f144c2017-06-12 14:26:21 -0400315
David Netoc6f3ab22018-04-06 18:02:31 -0400316 void GenerateLLVMIRInfo(Module &M, const DataLayout &DL);
alan-bakerb6b09dc2018-11-08 16:59:28 -0500317 // Populate GlobalConstFuncTypeMap. Also, if module-scope __constant will
318 // *not* be converted to a storage buffer, replace each such global variable
319 // with one in the storage class expecgted by SPIR-V.
David Neto862b7d82018-06-14 18:48:37 -0400320 void FindGlobalConstVars(Module &M, const DataLayout &DL);
321 // Populate ResourceVarInfoList, FunctionToResourceVarsMap, and
322 // ModuleOrderedResourceVars.
323 void FindResourceVars(Module &M, const DataLayout &DL);
Alan Baker202c8c72018-08-13 13:47:44 -0400324 void FindWorkgroupVars(Module &M);
David Neto22f144c2017-06-12 14:26:21 -0400325 bool FindExtInst(Module &M);
326 void FindTypePerGlobalVar(GlobalVariable &GV);
327 void FindTypePerFunc(Function &F);
David Neto862b7d82018-06-14 18:48:37 -0400328 void FindTypesForSamplerMap(Module &M);
329 void FindTypesForResourceVars(Module &M);
alan-bakerb6b09dc2018-11-08 16:59:28 -0500330 // Inserts |Ty| and relevant sub-types into the |Types| member, indicating
331 // that |Ty| and its subtypes will need a corresponding SPIR-V type.
David Neto22f144c2017-06-12 14:26:21 -0400332 void FindType(Type *Ty);
333 void FindConstantPerGlobalVar(GlobalVariable &GV);
334 void FindConstantPerFunc(Function &F);
335 void FindConstant(Value *V);
336 void GenerateExtInstImport();
David Neto19a1bad2017-08-25 15:01:41 -0400337 // Generates instructions for SPIR-V types corresponding to the LLVM types
338 // saved in the |Types| member. A type follows its subtypes. IDs are
339 // allocated sequentially starting with the current value of nextID, and
340 // with a type following its subtypes. Also updates nextID to just beyond
341 // the last generated ID.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500342 void GenerateSPIRVTypes(LLVMContext &context, Module &module);
David Neto22f144c2017-06-12 14:26:21 -0400343 void GenerateSPIRVConstants();
David Neto5c22a252018-03-15 16:07:41 -0400344 void GenerateModuleInfo(Module &M);
David Neto22f144c2017-06-12 14:26:21 -0400345 void GenerateGlobalVar(GlobalVariable &GV);
David Netoc6f3ab22018-04-06 18:02:31 -0400346 void GenerateWorkgroupVars();
David Neto862b7d82018-06-14 18:48:37 -0400347 // Generate descriptor map entries for resource variables associated with
348 // arguments to F.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500349 void GenerateDescriptorMapInfo(const DataLayout &DL, Function &F);
David Neto22f144c2017-06-12 14:26:21 -0400350 void GenerateSamplers(Module &M);
David Neto862b7d82018-06-14 18:48:37 -0400351 // Generate OpVariables for %clspv.resource.var.* calls.
352 void GenerateResourceVars(Module &M);
David Neto22f144c2017-06-12 14:26:21 -0400353 void GenerateFuncPrologue(Function &F);
354 void GenerateFuncBody(Function &F);
David Netob6e2e062018-04-25 10:32:06 -0400355 void GenerateEntryPointInitialStores();
David Neto22f144c2017-06-12 14:26:21 -0400356 spv::Op GetSPIRVCmpOpcode(CmpInst *CmpI);
357 spv::Op GetSPIRVCastOpcode(Instruction &I);
358 spv::Op GetSPIRVBinaryOpcode(Instruction &I);
359 void GenerateInstruction(Instruction &I);
360 void GenerateFuncEpilogue();
361 void HandleDeferredInstruction();
alan-bakerb6b09dc2018-11-08 16:59:28 -0500362 void HandleDeferredDecorations(const DataLayout &DL);
David Neto22f144c2017-06-12 14:26:21 -0400363 bool is4xi8vec(Type *Ty) const;
364 spv::StorageClass GetStorageClass(unsigned AddrSpace) const;
David Neto862b7d82018-06-14 18:48:37 -0400365 spv::StorageClass GetStorageClassForArgKind(clspv::ArgKind arg_kind) const;
David Neto22f144c2017-06-12 14:26:21 -0400366 spv::BuiltIn GetBuiltin(StringRef globalVarName) const;
David Neto3fbb4072017-10-16 11:28:14 -0400367 // Returns the GLSL extended instruction enum that the given function
368 // call maps to. If none, then returns the 0 value, i.e. GLSLstd4580Bad.
David Neto22f144c2017-06-12 14:26:21 -0400369 glsl::ExtInst getExtInstEnum(StringRef Name);
David Neto3fbb4072017-10-16 11:28:14 -0400370 // Returns the GLSL extended instruction enum indirectly used by the given
371 // function. That is, to implement the given function, we use an extended
372 // instruction plus one more instruction. If none, then returns the 0 value,
373 // i.e. GLSLstd4580Bad.
374 glsl::ExtInst getIndirectExtInstEnum(StringRef Name);
375 // Returns the single GLSL extended instruction used directly or
376 // indirectly by the given function call.
377 glsl::ExtInst getDirectOrIndirectExtInstEnum(StringRef Name);
David Neto22f144c2017-06-12 14:26:21 -0400378 void WriteOneWord(uint32_t Word);
379 void WriteResultID(SPIRVInstruction *Inst);
380 void WriteWordCountAndOpcode(SPIRVInstruction *Inst);
James Price11010dc2019-12-19 13:53:09 -0500381 void WriteOperand(const SPIRVOperand *Op);
David Neto22f144c2017-06-12 14:26:21 -0400382 void WriteSPIRVBinary();
383
Alan Baker9bf93fb2018-08-28 16:59:26 -0400384 // Returns true if |type| is compatible with OpConstantNull.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500385 bool IsTypeNullable(const Type *type) const;
Alan Baker9bf93fb2018-08-28 16:59:26 -0400386
Alan Bakerfcda9482018-10-02 17:09:59 -0400387 // Populate UBO remapped type maps.
388 void PopulateUBOTypeMaps(Module &module);
389
alan-baker06cad652019-12-03 17:56:47 -0500390 // Populate the merge and continue block maps.
391 void PopulateStructuredCFGMaps(Module &module);
392
Alan Bakerfcda9482018-10-02 17:09:59 -0400393 // Wrapped methods of DataLayout accessors. If |type| was remapped for UBOs,
394 // uses the internal map, otherwise it falls back on the data layout.
395 uint64_t GetTypeSizeInBits(Type *type, const DataLayout &DL);
396 uint64_t GetTypeStoreSize(Type *type, const DataLayout &DL);
397 uint64_t GetTypeAllocSize(Type *type, const DataLayout &DL);
398
alan-baker5b86ed72019-02-15 08:26:50 -0500399 // Returns the base pointer of |v|.
400 Value *GetBasePointer(Value *v);
401
402 // Sets |HasVariablePointersStorageBuffer| or |HasVariablePointers| base on
403 // |address_space|.
404 void setVariablePointersCapabilities(unsigned address_space);
405
406 // Returns true if |lhs| and |rhs| represent the same resource or workgroup
407 // variable.
408 bool sameResource(Value *lhs, Value *rhs) const;
409
410 // Returns true if |inst| is phi or select that selects from the same
411 // structure (or null).
412 bool selectFromSameObject(Instruction *inst);
413
alan-bakere9308012019-03-15 10:25:13 -0400414 // Returns true if |Arg| is called with a coherent resource.
415 bool CalledWithCoherentResource(Argument &Arg);
416
David Neto22f144c2017-06-12 14:26:21 -0400417private:
418 static char ID;
David Neto44795152017-07-13 15:45:28 -0400419 ArrayRef<std::pair<unsigned, std::string>> samplerMap;
David Neto22f144c2017-06-12 14:26:21 -0400420 raw_pwrite_stream &out;
David Neto0676e6f2017-07-11 18:47:44 -0400421
422 // TODO(dneto): Wouldn't it be better to always just emit a binary, and then
423 // convert to other formats on demand?
424
425 // When emitting a C initialization list, the WriteSPIRVBinary method
426 // will actually write its words to this vector via binaryTempOut.
427 SmallVector<char, 100> binaryTempUnderlyingVector;
428 raw_svector_ostream binaryTempOut;
429
430 // Binary output writes to this stream, which might be |out| or
431 // |binaryTempOut|. It's the latter when we really want to write a C
432 // initializer list.
Diego Novillo3cc8d7a2019-04-10 13:30:34 -0400433 raw_pwrite_stream *binaryOut;
alan-bakerf5e5f692018-11-27 08:33:24 -0500434 std::vector<version0::DescriptorMapEntry> *descriptorMapEntries;
David Neto0676e6f2017-07-11 18:47:44 -0400435 const bool outputCInitList; // If true, output look like {0x7023, ... , 5}
David Neto22f144c2017-06-12 14:26:21 -0400436 uint64_t patchBoundOffset;
437 uint32_t nextID;
438
alan-bakerf67468c2019-11-25 15:51:49 -0500439 // ID for OpTypeInt 32 1.
440 uint32_t int32ID = 0;
441 // ID for OpTypeVector %int 4.
442 uint32_t v4int32ID = 0;
443
David Neto19a1bad2017-08-25 15:01:41 -0400444 // Maps an LLVM Value pointer to the corresponding SPIR-V Id.
David Neto22f144c2017-06-12 14:26:21 -0400445 TypeMapType TypeMap;
David Neto19a1bad2017-08-25 15:01:41 -0400446 // Maps an LLVM image type to its SPIR-V ID.
David Neto22f144c2017-06-12 14:26:21 -0400447 TypeMapType ImageTypeMap;
alan-bakerabd82722019-12-03 17:14:51 -0500448 // A unique-vector of LLVM image types. This list is used to provide
449 // deterministic traversal of image types.
450 TypeList ImageTypeList;
David Neto19a1bad2017-08-25 15:01:41 -0400451 // A unique-vector of LLVM types that map to a SPIR-V type.
David Neto22f144c2017-06-12 14:26:21 -0400452 TypeList Types;
453 ValueList Constants;
David Neto19a1bad2017-08-25 15:01:41 -0400454 // Maps an LLVM Value pointer to the corresponding SPIR-V Id.
David Neto22f144c2017-06-12 14:26:21 -0400455 ValueMapType ValueMap;
456 ValueMapType AllocatedValueMap;
457 SPIRVInstructionList SPIRVInsts;
David Neto862b7d82018-06-14 18:48:37 -0400458
David Neto22f144c2017-06-12 14:26:21 -0400459 EntryPointVecType EntryPointVec;
460 DeferredInstVecType DeferredInstVec;
461 ValueList EntryPointInterfacesVec;
462 uint32_t OpExtInstImportID;
463 std::vector<uint32_t> BuiltinDimensionVec;
alan-baker5b86ed72019-02-15 08:26:50 -0500464 bool HasVariablePointersStorageBuffer;
David Neto22f144c2017-06-12 14:26:21 -0400465 bool HasVariablePointers;
466 Type *SamplerTy;
alan-baker09cb9802019-12-10 13:16:27 -0500467 DenseMap<unsigned, unsigned> SamplerLiteralToIDMap;
468 ;
David Netoc77d9e22018-03-24 06:30:28 -0700469
470 // If a function F has a pointer-to-__constant parameter, then this variable
David Neto9ed8e2f2018-03-24 06:47:24 -0700471 // will map F's type to (G, index of the parameter), where in a first phase
472 // G is F's type. During FindTypePerFunc, G will be changed to F's type
473 // but replacing the pointer-to-constant parameter with
474 // pointer-to-ModuleScopePrivate.
David Netoc77d9e22018-03-24 06:30:28 -0700475 // TODO(dneto): This doesn't seem general enough? A function might have
476 // more than one such parameter.
David Neto22f144c2017-06-12 14:26:21 -0400477 GlobalConstFuncMapType GlobalConstFuncTypeMap;
478 SmallPtrSet<Value *, 16> GlobalConstArgumentSet;
David Neto1a1a0582017-07-07 12:01:44 -0400479 // An ordered set of pointer types of Base arguments to OpPtrAccessChain,
David Neto85082642018-03-24 06:55:20 -0700480 // or array types, and which point into transparent memory (StorageBuffer
481 // storage class). These will require an ArrayStride decoration.
David Neto1a1a0582017-07-07 12:01:44 -0400482 // See SPV_KHR_variable_pointers rev 13.
David Neto85082642018-03-24 06:55:20 -0700483 TypeList TypesNeedingArrayStride;
David Netoa60b00b2017-09-15 16:34:09 -0400484
485 // This is truly ugly, but works around what look like driver bugs.
486 // For get_local_size, an earlier part of the flow has created a module-scope
487 // variable in Private address space to hold the value for the workgroup
488 // size. Its intializer is a uint3 value marked as builtin WorkgroupSize.
489 // When this is present, save the IDs of the initializer value and variable
490 // in these two variables. We only ever do a vector load from it, and
491 // when we see one of those, substitute just the value of the intializer.
492 // This mimics what Glslang does, and that's what drivers are used to.
David Neto66cfe642018-03-24 06:13:56 -0700493 // TODO(dneto): Remove this once drivers are fixed.
David Netoa60b00b2017-09-15 16:34:09 -0400494 uint32_t WorkgroupSizeValueID;
495 uint32_t WorkgroupSizeVarID;
David Neto26aaf622017-10-23 18:11:53 -0400496
David Neto862b7d82018-06-14 18:48:37 -0400497 // Bookkeeping for mapping kernel arguments to resource variables.
498 struct ResourceVarInfo {
499 ResourceVarInfo(int index_arg, unsigned set_arg, unsigned binding_arg,
alan-bakere9308012019-03-15 10:25:13 -0400500 Function *fn, clspv::ArgKind arg_kind_arg, int coherent_arg)
David Neto862b7d82018-06-14 18:48:37 -0400501 : index(index_arg), descriptor_set(set_arg), binding(binding_arg),
alan-bakere9308012019-03-15 10:25:13 -0400502 var_fn(fn), arg_kind(arg_kind_arg), coherent(coherent_arg),
David Neto862b7d82018-06-14 18:48:37 -0400503 addr_space(fn->getReturnType()->getPointerAddressSpace()) {}
504 const int index; // Index into ResourceVarInfoList
505 const unsigned descriptor_set;
506 const unsigned binding;
507 Function *const var_fn; // The @clspv.resource.var.* function.
508 const clspv::ArgKind arg_kind;
alan-bakere9308012019-03-15 10:25:13 -0400509 const int coherent;
David Neto862b7d82018-06-14 18:48:37 -0400510 const unsigned addr_space; // The LLVM address space
511 // The SPIR-V ID of the OpVariable. Not populated at construction time.
512 uint32_t var_id = 0;
513 };
514 // A list of resource var info. Each one correponds to a module-scope
515 // resource variable we will have to create. Resource var indices are
516 // indices into this vector.
517 SmallVector<std::unique_ptr<ResourceVarInfo>, 8> ResourceVarInfoList;
518 // This is a vector of pointers of all the resource vars, but ordered by
519 // kernel function, and then by argument.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500520 UniqueVector<ResourceVarInfo *> ModuleOrderedResourceVars;
David Neto862b7d82018-06-14 18:48:37 -0400521 // Map a function to the ordered list of resource variables it uses, one for
522 // each argument. If an argument does not use a resource variable, it
523 // will have a null pointer entry.
524 using FunctionToResourceVarsMapType =
525 DenseMap<Function *, SmallVector<ResourceVarInfo *, 8>>;
526 FunctionToResourceVarsMapType FunctionToResourceVarsMap;
527
528 // What LLVM types map to SPIR-V types needing layout? These are the
529 // arrays and structures supporting storage buffers and uniform buffers.
530 TypeList TypesNeedingLayout;
531 // What LLVM struct types map to a SPIR-V struct type with Block decoration?
532 UniqueVector<StructType *> StructTypesNeedingBlock;
533 // For a call that represents a load from an opaque type (samplers, images),
534 // map it to the variable id it should load from.
535 DenseMap<CallInst *, uint32_t> ResourceVarDeferredLoadCalls;
David Neto85082642018-03-24 06:55:20 -0700536
Alan Baker202c8c72018-08-13 13:47:44 -0400537 // One larger than the maximum used SpecId for pointer-to-local arguments.
538 int max_local_spec_id_;
David Netoc6f3ab22018-04-06 18:02:31 -0400539 // An ordered list of the kernel arguments of type pointer-to-local.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500540 using LocalArgList = SmallVector<Argument *, 8>;
David Netoc6f3ab22018-04-06 18:02:31 -0400541 LocalArgList LocalArgs;
542 // Information about a pointer-to-local argument.
543 struct LocalArgInfo {
544 // The SPIR-V ID of the array variable.
545 uint32_t variable_id;
546 // The element type of the
alan-bakerb6b09dc2018-11-08 16:59:28 -0500547 Type *elem_type;
David Netoc6f3ab22018-04-06 18:02:31 -0400548 // The ID of the array type.
549 uint32_t array_size_id;
550 // The ID of the array type.
551 uint32_t array_type_id;
552 // The ID of the pointer to the array type.
553 uint32_t ptr_array_type_id;
David Netoc6f3ab22018-04-06 18:02:31 -0400554 // The specialization constant ID of the array size.
555 int spec_id;
556 };
Alan Baker202c8c72018-08-13 13:47:44 -0400557 // A mapping from Argument to its assigned SpecId.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500558 DenseMap<const Argument *, int> LocalArgSpecIds;
Alan Baker202c8c72018-08-13 13:47:44 -0400559 // A mapping from SpecId to its LocalArgInfo.
560 DenseMap<int, LocalArgInfo> LocalSpecIdInfoMap;
Alan Bakerfcda9482018-10-02 17:09:59 -0400561 // A mapping from a remapped type to its real offsets.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500562 DenseMap<Type *, std::vector<uint32_t>> RemappedUBOTypeOffsets;
Alan Bakerfcda9482018-10-02 17:09:59 -0400563 // A mapping from a remapped type to its real sizes.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500564 DenseMap<Type *, std::tuple<uint64_t, uint64_t, uint64_t>>
565 RemappedUBOTypeSizes;
alan-baker06cad652019-12-03 17:56:47 -0500566
567 // Maps basic block to its merge block.
568 DenseMap<BasicBlock *, BasicBlock *> MergeBlocks;
569 // Maps basic block to its continue block.
570 DenseMap<BasicBlock *, BasicBlock *> ContinueBlocks;
David Neto22f144c2017-06-12 14:26:21 -0400571};
572
573char SPIRVProducerPass::ID;
David Netoc6f3ab22018-04-06 18:02:31 -0400574
alan-bakerb6b09dc2018-11-08 16:59:28 -0500575} // namespace
David Neto22f144c2017-06-12 14:26:21 -0400576
577namespace clspv {
alan-bakerf5e5f692018-11-27 08:33:24 -0500578ModulePass *createSPIRVProducerPass(
579 raw_pwrite_stream &out,
580 std::vector<version0::DescriptorMapEntry> *descriptor_map_entries,
alan-baker00e7a582019-06-07 12:54:21 -0400581 ArrayRef<std::pair<unsigned, std::string>> samplerMap,
alan-bakerf5e5f692018-11-27 08:33:24 -0500582 bool outputCInitList) {
583 return new SPIRVProducerPass(out, descriptor_map_entries, samplerMap,
alan-baker00e7a582019-06-07 12:54:21 -0400584 outputCInitList);
David Neto22f144c2017-06-12 14:26:21 -0400585}
David Netoc2c368d2017-06-30 16:50:17 -0400586} // namespace clspv
David Neto22f144c2017-06-12 14:26:21 -0400587
588bool SPIRVProducerPass::runOnModule(Module &module) {
David Neto0676e6f2017-07-11 18:47:44 -0400589 binaryOut = outputCInitList ? &binaryTempOut : &out;
590
Alan Bakerfcda9482018-10-02 17:09:59 -0400591 PopulateUBOTypeMaps(module);
alan-baker06cad652019-12-03 17:56:47 -0500592 PopulateStructuredCFGMaps(module);
Alan Bakerfcda9482018-10-02 17:09:59 -0400593
David Neto22f144c2017-06-12 14:26:21 -0400594 // SPIR-V always begins with its header information
595 outputHeader();
596
David Netoc6f3ab22018-04-06 18:02:31 -0400597 const DataLayout &DL = module.getDataLayout();
598
David Neto22f144c2017-06-12 14:26:21 -0400599 // Gather information from the LLVM IR that we require.
David Netoc6f3ab22018-04-06 18:02:31 -0400600 GenerateLLVMIRInfo(module, DL);
David Neto22f144c2017-06-12 14:26:21 -0400601
David Neto22f144c2017-06-12 14:26:21 -0400602 // Collect information on global variables too.
603 for (GlobalVariable &GV : module.globals()) {
604 // If the GV is one of our special __spirv_* variables, remove the
605 // initializer as it was only placed there to force LLVM to not throw the
606 // value away.
607 if (GV.getName().startswith("__spirv_")) {
608 GV.setInitializer(nullptr);
609 }
610
611 // Collect types' information from global variable.
612 FindTypePerGlobalVar(GV);
613
614 // Collect constant information from global variable.
615 FindConstantPerGlobalVar(GV);
616
617 // If the variable is an input, entry points need to know about it.
618 if (AddressSpace::Input == GV.getType()->getPointerAddressSpace()) {
David Netofb9a7972017-08-25 17:08:24 -0400619 getEntryPointInterfacesVec().insert(&GV);
David Neto22f144c2017-06-12 14:26:21 -0400620 }
621 }
622
623 // If there are extended instructions, generate OpExtInstImport.
624 if (FindExtInst(module)) {
625 GenerateExtInstImport();
626 }
627
628 // Generate SPIRV instructions for types.
Alan Bakerfcda9482018-10-02 17:09:59 -0400629 GenerateSPIRVTypes(module.getContext(), module);
David Neto22f144c2017-06-12 14:26:21 -0400630
631 // Generate SPIRV constants.
632 GenerateSPIRVConstants();
633
alan-baker09cb9802019-12-10 13:16:27 -0500634 // Generate literal samplers if necessary.
635 GenerateSamplers(module);
David Neto22f144c2017-06-12 14:26:21 -0400636
637 // Generate SPIRV variables.
638 for (GlobalVariable &GV : module.globals()) {
639 GenerateGlobalVar(GV);
640 }
David Neto862b7d82018-06-14 18:48:37 -0400641 GenerateResourceVars(module);
David Netoc6f3ab22018-04-06 18:02:31 -0400642 GenerateWorkgroupVars();
David Neto22f144c2017-06-12 14:26:21 -0400643
644 // Generate SPIRV instructions for each function.
645 for (Function &F : module) {
646 if (F.isDeclaration()) {
647 continue;
648 }
649
David Neto862b7d82018-06-14 18:48:37 -0400650 GenerateDescriptorMapInfo(DL, F);
651
David Neto22f144c2017-06-12 14:26:21 -0400652 // Generate Function Prologue.
653 GenerateFuncPrologue(F);
654
655 // Generate SPIRV instructions for function body.
656 GenerateFuncBody(F);
657
658 // Generate Function Epilogue.
659 GenerateFuncEpilogue();
660 }
661
662 HandleDeferredInstruction();
David Neto1a1a0582017-07-07 12:01:44 -0400663 HandleDeferredDecorations(DL);
David Neto22f144c2017-06-12 14:26:21 -0400664
665 // Generate SPIRV module information.
David Neto5c22a252018-03-15 16:07:41 -0400666 GenerateModuleInfo(module);
David Neto22f144c2017-06-12 14:26:21 -0400667
alan-baker00e7a582019-06-07 12:54:21 -0400668 WriteSPIRVBinary();
David Neto22f144c2017-06-12 14:26:21 -0400669
670 // We need to patch the SPIR-V header to set bound correctly.
671 patchHeader();
David Neto0676e6f2017-07-11 18:47:44 -0400672
673 if (outputCInitList) {
674 bool first = true;
David Neto0676e6f2017-07-11 18:47:44 -0400675 std::ostringstream os;
676
David Neto57fb0b92017-08-04 15:35:09 -0400677 auto emit_word = [&os, &first](uint32_t word) {
David Neto0676e6f2017-07-11 18:47:44 -0400678 if (!first)
David Neto57fb0b92017-08-04 15:35:09 -0400679 os << ",\n";
680 os << word;
David Neto0676e6f2017-07-11 18:47:44 -0400681 first = false;
682 };
683
684 os << "{";
David Neto57fb0b92017-08-04 15:35:09 -0400685 const std::string str(binaryTempOut.str());
686 for (unsigned i = 0; i < str.size(); i += 4) {
687 const uint32_t a = static_cast<unsigned char>(str[i]);
688 const uint32_t b = static_cast<unsigned char>(str[i + 1]);
689 const uint32_t c = static_cast<unsigned char>(str[i + 2]);
690 const uint32_t d = static_cast<unsigned char>(str[i + 3]);
691 emit_word(a | (b << 8) | (c << 16) | (d << 24));
David Neto0676e6f2017-07-11 18:47:44 -0400692 }
693 os << "}\n";
694 out << os.str();
695 }
696
David Neto22f144c2017-06-12 14:26:21 -0400697 return false;
698}
699
700void SPIRVProducerPass::outputHeader() {
alan-baker00e7a582019-06-07 12:54:21 -0400701 binaryOut->write(reinterpret_cast<const char *>(&spv::MagicNumber),
702 sizeof(spv::MagicNumber));
703 binaryOut->write(reinterpret_cast<const char *>(&spv::Version),
704 sizeof(spv::Version));
David Neto22f144c2017-06-12 14:26:21 -0400705
alan-baker0c18ab02019-06-12 10:23:21 -0400706 // use Google's vendor ID
707 const uint32_t vendor = 21 << 16;
alan-baker00e7a582019-06-07 12:54:21 -0400708 binaryOut->write(reinterpret_cast<const char *>(&vendor), sizeof(vendor));
David Neto22f144c2017-06-12 14:26:21 -0400709
alan-baker00e7a582019-06-07 12:54:21 -0400710 // we record where we need to come back to and patch in the bound value
711 patchBoundOffset = binaryOut->tell();
David Neto22f144c2017-06-12 14:26:21 -0400712
alan-baker00e7a582019-06-07 12:54:21 -0400713 // output a bad bound for now
714 binaryOut->write(reinterpret_cast<const char *>(&nextID), sizeof(nextID));
David Neto22f144c2017-06-12 14:26:21 -0400715
alan-baker00e7a582019-06-07 12:54:21 -0400716 // output the schema (reserved for use and must be 0)
717 const uint32_t schema = 0;
718 binaryOut->write(reinterpret_cast<const char *>(&schema), sizeof(schema));
David Neto22f144c2017-06-12 14:26:21 -0400719}
720
721void SPIRVProducerPass::patchHeader() {
alan-baker00e7a582019-06-07 12:54:21 -0400722 // for a binary we just write the value of nextID over bound
723 binaryOut->pwrite(reinterpret_cast<char *>(&nextID), sizeof(nextID),
724 patchBoundOffset);
David Neto22f144c2017-06-12 14:26:21 -0400725}
726
David Netoc6f3ab22018-04-06 18:02:31 -0400727void SPIRVProducerPass::GenerateLLVMIRInfo(Module &M, const DataLayout &DL) {
David Neto22f144c2017-06-12 14:26:21 -0400728 // This function generates LLVM IR for function such as global variable for
729 // argument, constant and pointer type for argument access. These information
730 // is artificial one because we need Vulkan SPIR-V output. This function is
731 // executed ahead of FindType and FindConstant.
David Neto22f144c2017-06-12 14:26:21 -0400732 LLVMContext &Context = M.getContext();
733
David Neto862b7d82018-06-14 18:48:37 -0400734 FindGlobalConstVars(M, DL);
David Neto5c22a252018-03-15 16:07:41 -0400735
David Neto862b7d82018-06-14 18:48:37 -0400736 FindResourceVars(M, DL);
David Neto22f144c2017-06-12 14:26:21 -0400737
738 bool HasWorkGroupBuiltin = false;
739 for (GlobalVariable &GV : M.globals()) {
740 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
741 if (spv::BuiltInWorkgroupSize == BuiltinType) {
742 HasWorkGroupBuiltin = true;
743 }
744 }
745
David Neto862b7d82018-06-14 18:48:37 -0400746 FindTypesForSamplerMap(M);
747 FindTypesForResourceVars(M);
Alan Baker202c8c72018-08-13 13:47:44 -0400748 FindWorkgroupVars(M);
David Neto22f144c2017-06-12 14:26:21 -0400749
750 for (Function &F : M) {
Kévin Petitabef4522019-03-27 13:08:01 +0000751 if (F.isDeclaration()) {
David Neto22f144c2017-06-12 14:26:21 -0400752 continue;
753 }
754
755 for (BasicBlock &BB : F) {
756 for (Instruction &I : BB) {
757 if (I.getOpcode() == Instruction::ZExt ||
758 I.getOpcode() == Instruction::SExt ||
759 I.getOpcode() == Instruction::UIToFP) {
760 // If there is zext with i1 type, it will be changed to OpSelect. The
761 // OpSelect needs constant 0 and 1 so the constants are added here.
762
763 auto OpTy = I.getOperand(0)->getType();
764
Kévin Petit24272b62018-10-18 19:16:12 +0000765 if (OpTy->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -0400766 if (I.getOpcode() == Instruction::ZExt) {
David Neto22f144c2017-06-12 14:26:21 -0400767 FindConstant(Constant::getNullValue(I.getType()));
Kévin Petit7bfb8992019-02-26 13:45:08 +0000768 FindConstant(ConstantInt::get(I.getType(), 1));
David Neto22f144c2017-06-12 14:26:21 -0400769 } else if (I.getOpcode() == Instruction::SExt) {
David Neto22f144c2017-06-12 14:26:21 -0400770 FindConstant(Constant::getNullValue(I.getType()));
Kévin Petit7bfb8992019-02-26 13:45:08 +0000771 FindConstant(ConstantInt::getSigned(I.getType(), -1));
David Neto22f144c2017-06-12 14:26:21 -0400772 } else {
773 FindConstant(ConstantFP::get(Context, APFloat(0.0f)));
774 FindConstant(ConstantFP::get(Context, APFloat(1.0f)));
775 }
776 }
777 } else if (CallInst *Call = dyn_cast<CallInst>(&I)) {
David Neto862b7d82018-06-14 18:48:37 -0400778 StringRef callee_name = Call->getCalledFunction()->getName();
David Neto22f144c2017-06-12 14:26:21 -0400779
780 // Handle image type specially.
alan-bakerf67468c2019-11-25 15:51:49 -0500781 if (clspv::IsSampledImageRead(callee_name)) {
David Neto22f144c2017-06-12 14:26:21 -0400782 TypeMapType &OpImageTypeMap = getImageTypeMap();
783 Type *ImageTy =
784 Call->getArgOperand(0)->getType()->getPointerElementType();
785 OpImageTypeMap[ImageTy] = 0;
alan-bakerabd82722019-12-03 17:14:51 -0500786 getImageTypeList().insert(ImageTy);
David Neto22f144c2017-06-12 14:26:21 -0400787
alan-bakerf67468c2019-11-25 15:51:49 -0500788 // All sampled reads need a floating point 0 for the Lod operand.
David Neto22f144c2017-06-12 14:26:21 -0400789 FindConstant(ConstantFP::get(Context, APFloat(0.0f)));
790 }
David Neto5c22a252018-03-15 16:07:41 -0400791
alan-bakerce179f12019-12-06 19:02:22 -0500792 if (clspv::IsImageQuery(callee_name)) {
793 Type *ImageTy = Call->getOperand(0)->getType();
794 const uint32_t dim = ImageDimensionality(ImageTy);
795 uint32_t components = dim;
796 if (components > 1) {
797 // OpImageQuerySize* return |components| components.
798 FindType(VectorType::get(Type::getInt32Ty(Context), components));
799 if (dim == 3 && IsGetImageDim(callee_name)) {
800 // get_image_dim for 3D images returns an int4.
801 FindType(
802 VectorType::get(Type::getInt32Ty(Context), components + 1));
803 }
804 }
805
806 if (clspv::IsSampledImageType(ImageTy)) {
807 // All sampled image queries need a integer 0 for the Lod
808 // operand.
809 FindConstant(ConstantInt::get(Context, APInt(32, 0)));
810 }
David Neto5c22a252018-03-15 16:07:41 -0400811 }
David Neto22f144c2017-06-12 14:26:21 -0400812 }
813 }
814 }
815
Kévin Petitabef4522019-03-27 13:08:01 +0000816 // More things to do on kernel functions
817 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
818 if (const MDNode *MD =
819 dyn_cast<Function>(&F)->getMetadata("reqd_work_group_size")) {
820 // We generate constants if the WorkgroupSize builtin is being used.
821 if (HasWorkGroupBuiltin) {
822 // Collect constant information for work group size.
823 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(0)));
824 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(1)));
825 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(2)));
David Neto22f144c2017-06-12 14:26:21 -0400826 }
827 }
828 }
829
alan-bakerf67468c2019-11-25 15:51:49 -0500830 // TODO(alan-baker): make this better.
alan-bakerf906d2b2019-12-10 11:26:23 -0500831 if (M.getTypeByName("opencl.image1d_ro_t.float") ||
832 M.getTypeByName("opencl.image1d_ro_t.float.sampled") ||
833 M.getTypeByName("opencl.image1d_wo_t.float") ||
834 M.getTypeByName("opencl.image2d_ro_t.float") ||
alan-bakerf67468c2019-11-25 15:51:49 -0500835 M.getTypeByName("opencl.image2d_ro_t.float.sampled") ||
836 M.getTypeByName("opencl.image2d_wo_t.float") ||
837 M.getTypeByName("opencl.image3d_ro_t.float") ||
838 M.getTypeByName("opencl.image3d_ro_t.float.sampled") ||
839 M.getTypeByName("opencl.image3d_wo_t.float")) {
840 FindType(Type::getFloatTy(Context));
alan-bakerf906d2b2019-12-10 11:26:23 -0500841 } else if (M.getTypeByName("opencl.image1d_ro_t.uint") ||
842 M.getTypeByName("opencl.image1d_ro_t.uint.sampled") ||
843 M.getTypeByName("opencl.image1d_wo_t.uint") ||
844 M.getTypeByName("opencl.image2d_ro_t.uint") ||
alan-bakerf67468c2019-11-25 15:51:49 -0500845 M.getTypeByName("opencl.image2d_ro_t.uint.sampled") ||
846 M.getTypeByName("opencl.image2d_wo_t.uint") ||
847 M.getTypeByName("opencl.image3d_ro_t.uint") ||
848 M.getTypeByName("opencl.image3d_ro_t.uint.sampled") ||
849 M.getTypeByName("opencl.image3d_wo_t.uint")) {
850 FindType(Type::getInt32Ty(Context));
alan-bakerf906d2b2019-12-10 11:26:23 -0500851 } else if (M.getTypeByName("opencl.image1d_ro_t.int") ||
852 M.getTypeByName("opencl.image1d_ro_t.int.sampled") ||
853 M.getTypeByName("opencl.image1d_wo_t.int") ||
854 M.getTypeByName("opencl.image2d_ro_t.int") ||
alan-bakerf67468c2019-11-25 15:51:49 -0500855 M.getTypeByName("opencl.image2d_ro_t.int.sampled") ||
856 M.getTypeByName("opencl.image2d_wo_t.int") ||
857 M.getTypeByName("opencl.image3d_ro_t.int") ||
858 M.getTypeByName("opencl.image3d_ro_t.int.sampled") ||
859 M.getTypeByName("opencl.image3d_wo_t.int")) {
860 // Nothing for now...
861 } else {
862 // This was likely an UndefValue.
David Neto22f144c2017-06-12 14:26:21 -0400863 FindType(Type::getFloatTy(Context));
864 }
865
866 // Collect types' information from function.
867 FindTypePerFunc(F);
868
869 // Collect constant information from function.
870 FindConstantPerFunc(F);
871 }
872}
873
David Neto862b7d82018-06-14 18:48:37 -0400874void SPIRVProducerPass::FindGlobalConstVars(Module &M, const DataLayout &DL) {
alan-baker56f7aff2019-05-22 08:06:42 -0400875 clspv::NormalizeGlobalVariables(M);
876
David Neto862b7d82018-06-14 18:48:37 -0400877 SmallVector<GlobalVariable *, 8> GVList;
878 SmallVector<GlobalVariable *, 8> DeadGVList;
879 for (GlobalVariable &GV : M.globals()) {
880 if (GV.getType()->getAddressSpace() == AddressSpace::Constant) {
881 if (GV.use_empty()) {
882 DeadGVList.push_back(&GV);
883 } else {
884 GVList.push_back(&GV);
885 }
886 }
887 }
888
889 // Remove dead global __constant variables.
890 for (auto GV : DeadGVList) {
891 GV->eraseFromParent();
892 }
893 DeadGVList.clear();
894
895 if (clspv::Option::ModuleConstantsInStorageBuffer()) {
896 // For now, we only support a single storage buffer.
897 if (GVList.size() > 0) {
898 assert(GVList.size() == 1);
899 const auto *GV = GVList[0];
900 const auto constants_byte_size =
Alan Bakerfcda9482018-10-02 17:09:59 -0400901 (GetTypeSizeInBits(GV->getInitializer()->getType(), DL)) / 8;
David Neto862b7d82018-06-14 18:48:37 -0400902 const size_t kConstantMaxSize = 65536;
903 if (constants_byte_size > kConstantMaxSize) {
904 outs() << "Max __constant capacity of " << kConstantMaxSize
905 << " bytes exceeded: " << constants_byte_size << " bytes used\n";
906 llvm_unreachable("Max __constant capacity exceeded");
907 }
908 }
909 } else {
910 // Change global constant variable's address space to ModuleScopePrivate.
911 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
912 for (auto GV : GVList) {
913 // Create new gv with ModuleScopePrivate address space.
914 Type *NewGVTy = GV->getType()->getPointerElementType();
915 GlobalVariable *NewGV = new GlobalVariable(
916 M, NewGVTy, false, GV->getLinkage(), GV->getInitializer(), "",
917 nullptr, GV->getThreadLocalMode(), AddressSpace::ModuleScopePrivate);
918 NewGV->takeName(GV);
919
920 const SmallVector<User *, 8> GVUsers(GV->user_begin(), GV->user_end());
921 SmallVector<User *, 8> CandidateUsers;
922
923 auto record_called_function_type_as_user =
924 [&GlobalConstFuncTyMap](Value *gv, CallInst *call) {
925 // Find argument index.
926 unsigned index = 0;
927 for (unsigned i = 0; i < call->getNumArgOperands(); i++) {
928 if (gv == call->getOperand(i)) {
929 // TODO(dneto): Should we break here?
930 index = i;
931 }
932 }
933
934 // Record function type with global constant.
935 GlobalConstFuncTyMap[call->getFunctionType()] =
936 std::make_pair(call->getFunctionType(), index);
937 };
938
939 for (User *GVU : GVUsers) {
940 if (CallInst *Call = dyn_cast<CallInst>(GVU)) {
941 record_called_function_type_as_user(GV, Call);
942 } else if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(GVU)) {
943 // Check GEP users.
944 for (User *GEPU : GEP->users()) {
945 if (CallInst *GEPCall = dyn_cast<CallInst>(GEPU)) {
946 record_called_function_type_as_user(GEP, GEPCall);
947 }
948 }
949 }
950
951 CandidateUsers.push_back(GVU);
952 }
953
954 for (User *U : CandidateUsers) {
955 // Update users of gv with new gv.
alan-bakered80f572019-02-11 17:28:26 -0500956 if (!isa<Constant>(U)) {
957 // #254: Can't change operands of a constant, but this shouldn't be
958 // something that sticks around in the module.
959 U->replaceUsesOfWith(GV, NewGV);
960 }
David Neto862b7d82018-06-14 18:48:37 -0400961 }
962
963 // Delete original gv.
964 GV->eraseFromParent();
965 }
966 }
967}
968
Radek Szymanskibe4b0c42018-10-04 22:20:53 +0100969void SPIRVProducerPass::FindResourceVars(Module &M, const DataLayout &) {
David Neto862b7d82018-06-14 18:48:37 -0400970 ResourceVarInfoList.clear();
971 FunctionToResourceVarsMap.clear();
972 ModuleOrderedResourceVars.reset();
973 // Normally, there is one resource variable per clspv.resource.var.*
974 // function, since that is unique'd by arg type and index. By design,
975 // we can share these resource variables across kernels because all
976 // kernels use the same descriptor set.
977 //
978 // But if the user requested distinct descriptor sets per kernel, then
979 // the descriptor allocator has made different (set,binding) pairs for
980 // the same (type,arg_index) pair. Since we can decorate a resource
981 // variable with only exactly one DescriptorSet and Binding, we are
982 // forced in this case to make distinct resource variables whenever
983 // the same clspv.reource.var.X function is seen with disintct
984 // (set,binding) values.
985 const bool always_distinct_sets =
986 clspv::Option::DistinctKernelDescriptorSets();
987 for (Function &F : M) {
988 // Rely on the fact the resource var functions have a stable ordering
989 // in the module.
Alan Baker202c8c72018-08-13 13:47:44 -0400990 if (F.getName().startswith(clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -0400991 // Find all calls to this function with distinct set and binding pairs.
992 // Save them in ResourceVarInfoList.
993
994 // Determine uniqueness of the (set,binding) pairs only withing this
995 // one resource-var builtin function.
996 using SetAndBinding = std::pair<unsigned, unsigned>;
997 // Maps set and binding to the resource var info.
998 DenseMap<SetAndBinding, ResourceVarInfo *> set_and_binding_map;
999 bool first_use = true;
1000 for (auto &U : F.uses()) {
1001 if (auto *call = dyn_cast<CallInst>(U.getUser())) {
1002 const auto set = unsigned(
1003 dyn_cast<ConstantInt>(call->getArgOperand(0))->getZExtValue());
1004 const auto binding = unsigned(
1005 dyn_cast<ConstantInt>(call->getArgOperand(1))->getZExtValue());
1006 const auto arg_kind = clspv::ArgKind(
1007 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
1008 const auto arg_index = unsigned(
1009 dyn_cast<ConstantInt>(call->getArgOperand(3))->getZExtValue());
alan-bakere9308012019-03-15 10:25:13 -04001010 const auto coherent = unsigned(
1011 dyn_cast<ConstantInt>(call->getArgOperand(5))->getZExtValue());
David Neto862b7d82018-06-14 18:48:37 -04001012
1013 // Find or make the resource var info for this combination.
1014 ResourceVarInfo *rv = nullptr;
1015 if (always_distinct_sets) {
1016 // Make a new resource var any time we see a different
1017 // (set,binding) pair.
1018 SetAndBinding key{set, binding};
1019 auto where = set_and_binding_map.find(key);
1020 if (where == set_and_binding_map.end()) {
1021 rv = new ResourceVarInfo(int(ResourceVarInfoList.size()), set,
alan-bakere9308012019-03-15 10:25:13 -04001022 binding, &F, arg_kind, coherent);
David Neto862b7d82018-06-14 18:48:37 -04001023 ResourceVarInfoList.emplace_back(rv);
1024 set_and_binding_map[key] = rv;
1025 } else {
1026 rv = where->second;
1027 }
1028 } else {
1029 // The default is to make exactly one resource for each
1030 // clspv.resource.var.* function.
1031 if (first_use) {
1032 first_use = false;
1033 rv = new ResourceVarInfo(int(ResourceVarInfoList.size()), set,
alan-bakere9308012019-03-15 10:25:13 -04001034 binding, &F, arg_kind, coherent);
David Neto862b7d82018-06-14 18:48:37 -04001035 ResourceVarInfoList.emplace_back(rv);
1036 } else {
1037 rv = ResourceVarInfoList.back().get();
1038 }
1039 }
1040
1041 // Now populate FunctionToResourceVarsMap.
1042 auto &mapping =
1043 FunctionToResourceVarsMap[call->getParent()->getParent()];
1044 while (mapping.size() <= arg_index) {
1045 mapping.push_back(nullptr);
1046 }
1047 mapping[arg_index] = rv;
1048 }
1049 }
1050 }
1051 }
1052
1053 // Populate ModuleOrderedResourceVars.
1054 for (Function &F : M) {
1055 auto where = FunctionToResourceVarsMap.find(&F);
1056 if (where != FunctionToResourceVarsMap.end()) {
1057 for (auto &rv : where->second) {
1058 if (rv != nullptr) {
1059 ModuleOrderedResourceVars.insert(rv);
1060 }
1061 }
1062 }
1063 }
1064 if (ShowResourceVars) {
1065 for (auto *info : ModuleOrderedResourceVars) {
1066 outs() << "MORV index " << info->index << " (" << info->descriptor_set
1067 << "," << info->binding << ") " << *(info->var_fn->getReturnType())
1068 << "\n";
1069 }
1070 }
1071}
1072
David Neto22f144c2017-06-12 14:26:21 -04001073bool SPIRVProducerPass::FindExtInst(Module &M) {
1074 LLVMContext &Context = M.getContext();
1075 bool HasExtInst = false;
1076
1077 for (Function &F : M) {
1078 for (BasicBlock &BB : F) {
1079 for (Instruction &I : BB) {
1080 if (CallInst *Call = dyn_cast<CallInst>(&I)) {
1081 Function *Callee = Call->getCalledFunction();
1082 // Check whether this call is for extend instructions.
David Neto3fbb4072017-10-16 11:28:14 -04001083 auto callee_name = Callee->getName();
1084 const glsl::ExtInst EInst = getExtInstEnum(callee_name);
1085 const glsl::ExtInst IndirectEInst =
1086 getIndirectExtInstEnum(callee_name);
David Neto22f144c2017-06-12 14:26:21 -04001087
David Neto3fbb4072017-10-16 11:28:14 -04001088 HasExtInst |=
1089 (EInst != kGlslExtInstBad) || (IndirectEInst != kGlslExtInstBad);
1090
1091 if (IndirectEInst) {
1092 // Register extra constants if needed.
1093
1094 // Registers a type and constant for computing the result of the
1095 // given instruction. If the result of the instruction is a vector,
1096 // then make a splat vector constant with the same number of
1097 // elements.
1098 auto register_constant = [this, &I](Constant *constant) {
1099 FindType(constant->getType());
1100 FindConstant(constant);
1101 if (auto *vectorTy = dyn_cast<VectorType>(I.getType())) {
1102 // Register the splat vector of the value with the same
1103 // width as the result of the instruction.
1104 auto *vec_constant = ConstantVector::getSplat(
1105 static_cast<unsigned>(vectorTy->getNumElements()),
1106 constant);
1107 FindConstant(vec_constant);
1108 FindType(vec_constant->getType());
1109 }
1110 };
1111 switch (IndirectEInst) {
1112 case glsl::ExtInstFindUMsb:
1113 // clz needs OpExtInst and OpISub with constant 31, or splat
1114 // vector of 31. Add it to the constant list here.
1115 register_constant(
1116 ConstantInt::get(Type::getInt32Ty(Context), 31));
1117 break;
1118 case glsl::ExtInstAcos:
1119 case glsl::ExtInstAsin:
Kévin Petiteb9f90a2018-09-29 12:29:34 +01001120 case glsl::ExtInstAtan:
David Neto3fbb4072017-10-16 11:28:14 -04001121 case glsl::ExtInstAtan2:
1122 // We need 1/pi for acospi, asinpi, atan2pi.
1123 register_constant(
1124 ConstantFP::get(Type::getFloatTy(Context), kOneOverPi));
1125 break;
1126 default:
1127 assert(false && "internally inconsistent");
1128 }
David Neto22f144c2017-06-12 14:26:21 -04001129 }
1130 }
1131 }
1132 }
1133 }
1134
1135 return HasExtInst;
1136}
1137
1138void SPIRVProducerPass::FindTypePerGlobalVar(GlobalVariable &GV) {
1139 // Investigate global variable's type.
1140 FindType(GV.getType());
1141}
1142
1143void SPIRVProducerPass::FindTypePerFunc(Function &F) {
1144 // Investigate function's type.
1145 FunctionType *FTy = F.getFunctionType();
1146
1147 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
1148 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
David Neto9ed8e2f2018-03-24 06:47:24 -07001149 // Handle a regular function with global constant parameters.
David Neto22f144c2017-06-12 14:26:21 -04001150 if (GlobalConstFuncTyMap.count(FTy)) {
1151 uint32_t GVCstArgIdx = GlobalConstFuncTypeMap[FTy].second;
1152 SmallVector<Type *, 4> NewFuncParamTys;
1153 for (unsigned i = 0; i < FTy->getNumParams(); i++) {
1154 Type *ParamTy = FTy->getParamType(i);
1155 if (i == GVCstArgIdx) {
1156 Type *EleTy = ParamTy->getPointerElementType();
1157 ParamTy = PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
1158 }
1159
1160 NewFuncParamTys.push_back(ParamTy);
1161 }
1162
1163 FunctionType *NewFTy =
1164 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
1165 GlobalConstFuncTyMap[FTy] = std::make_pair(NewFTy, GVCstArgIdx);
1166 FTy = NewFTy;
1167 }
1168
1169 FindType(FTy);
1170 } else {
1171 // As kernel functions do not have parameters, create new function type and
1172 // add it to type map.
1173 SmallVector<Type *, 4> NewFuncParamTys;
1174 FunctionType *NewFTy =
1175 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
1176 FindType(NewFTy);
1177 }
1178
1179 // Investigate instructions' type in function body.
1180 for (BasicBlock &BB : F) {
1181 for (Instruction &I : BB) {
1182 if (isa<ShuffleVectorInst>(I)) {
1183 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1184 // Ignore type for mask of shuffle vector instruction.
1185 if (i == 2) {
1186 continue;
1187 }
1188
1189 Value *Op = I.getOperand(i);
1190 if (!isa<MetadataAsValue>(Op)) {
1191 FindType(Op->getType());
1192 }
1193 }
1194
1195 FindType(I.getType());
1196 continue;
1197 }
1198
David Neto862b7d82018-06-14 18:48:37 -04001199 CallInst *Call = dyn_cast<CallInst>(&I);
1200
1201 if (Call && Call->getCalledFunction()->getName().startswith(
Alan Baker202c8c72018-08-13 13:47:44 -04001202 clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04001203 // This is a fake call representing access to a resource variable.
1204 // We handle that elsewhere.
1205 continue;
1206 }
1207
Alan Baker202c8c72018-08-13 13:47:44 -04001208 if (Call && Call->getCalledFunction()->getName().startswith(
1209 clspv::WorkgroupAccessorFunction())) {
1210 // This is a fake call representing access to a workgroup variable.
1211 // We handle that elsewhere.
1212 continue;
1213 }
1214
David Neto22f144c2017-06-12 14:26:21 -04001215 // Work through the operands of the instruction.
1216 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1217 Value *const Op = I.getOperand(i);
1218 // If any of the operands is a constant, find the type!
1219 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1220 FindType(Op->getType());
1221 }
1222 }
1223
1224 for (Use &Op : I.operands()) {
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001225 if (isa<CallInst>(&I)) {
David Neto22f144c2017-06-12 14:26:21 -04001226 // Avoid to check call instruction's type.
1227 break;
1228 }
Alan Baker202c8c72018-08-13 13:47:44 -04001229 if (CallInst *OpCall = dyn_cast<CallInst>(Op)) {
1230 if (OpCall && OpCall->getCalledFunction()->getName().startswith(
1231 clspv::WorkgroupAccessorFunction())) {
1232 // This is a fake call representing access to a workgroup variable.
1233 // We handle that elsewhere.
1234 continue;
1235 }
1236 }
David Neto22f144c2017-06-12 14:26:21 -04001237 if (!isa<MetadataAsValue>(&Op)) {
1238 FindType(Op->getType());
1239 continue;
1240 }
1241 }
1242
David Neto22f144c2017-06-12 14:26:21 -04001243 // We don't want to track the type of this call as we are going to replace
1244 // it.
Kévin Petitdf71de32019-04-09 14:09:50 +01001245 if (Call && (clspv::LiteralSamplerFunction() ==
David Neto22f144c2017-06-12 14:26:21 -04001246 Call->getCalledFunction()->getName())) {
1247 continue;
1248 }
1249
1250 if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(&I)) {
1251 // If gep's base operand has ModuleScopePrivate address space, make gep
1252 // return ModuleScopePrivate address space.
1253 if (GEP->getPointerAddressSpace() == AddressSpace::ModuleScopePrivate) {
1254 // Add pointer type with private address space for global constant to
1255 // type list.
1256 Type *EleTy = I.getType()->getPointerElementType();
1257 Type *NewPTy =
1258 PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
1259
1260 FindType(NewPTy);
1261 continue;
1262 }
1263 }
1264
1265 FindType(I.getType());
1266 }
1267 }
1268}
1269
David Neto862b7d82018-06-14 18:48:37 -04001270void SPIRVProducerPass::FindTypesForSamplerMap(Module &M) {
1271 // If we are using a sampler map, find the type of the sampler.
Kévin Petitdf71de32019-04-09 14:09:50 +01001272 if (M.getFunction(clspv::LiteralSamplerFunction()) ||
David Neto862b7d82018-06-14 18:48:37 -04001273 0 < getSamplerMap().size()) {
1274 auto SamplerStructTy = M.getTypeByName("opencl.sampler_t");
1275 if (!SamplerStructTy) {
1276 SamplerStructTy = StructType::create(M.getContext(), "opencl.sampler_t");
1277 }
1278
1279 SamplerTy = SamplerStructTy->getPointerTo(AddressSpace::UniformConstant);
1280
1281 FindType(SamplerTy);
1282 }
1283}
1284
1285void SPIRVProducerPass::FindTypesForResourceVars(Module &M) {
1286 // Record types so they are generated.
1287 TypesNeedingLayout.reset();
1288 StructTypesNeedingBlock.reset();
1289
1290 // To match older clspv codegen, generate the float type first if required
1291 // for images.
1292 for (const auto *info : ModuleOrderedResourceVars) {
1293 if (info->arg_kind == clspv::ArgKind::ReadOnlyImage ||
1294 info->arg_kind == clspv::ArgKind::WriteOnlyImage) {
alan-bakerf67468c2019-11-25 15:51:49 -05001295 if (IsIntImageType(info->var_fn->getReturnType())) {
1296 // Nothing for now...
1297 } else if (IsUintImageType(info->var_fn->getReturnType())) {
1298 FindType(Type::getInt32Ty(M.getContext()));
1299 }
1300
1301 // We need "float" either for the sampled type or for the Lod operand.
David Neto862b7d82018-06-14 18:48:37 -04001302 FindType(Type::getFloatTy(M.getContext()));
David Neto862b7d82018-06-14 18:48:37 -04001303 }
1304 }
1305
1306 for (const auto *info : ModuleOrderedResourceVars) {
1307 Type *type = info->var_fn->getReturnType();
1308
1309 switch (info->arg_kind) {
1310 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04001311 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04001312 if (auto *sty = dyn_cast<StructType>(type->getPointerElementType())) {
1313 StructTypesNeedingBlock.insert(sty);
1314 } else {
1315 errs() << *type << "\n";
1316 llvm_unreachable("Buffer arguments must map to structures!");
1317 }
1318 break;
1319 case clspv::ArgKind::Pod:
1320 if (auto *sty = dyn_cast<StructType>(type->getPointerElementType())) {
1321 StructTypesNeedingBlock.insert(sty);
1322 } else {
1323 errs() << *type << "\n";
1324 llvm_unreachable("POD arguments must map to structures!");
1325 }
1326 break;
1327 case clspv::ArgKind::ReadOnlyImage:
1328 case clspv::ArgKind::WriteOnlyImage:
1329 case clspv::ArgKind::Sampler:
1330 // Sampler and image types map to the pointee type but
1331 // in the uniform constant address space.
1332 type = PointerType::get(type->getPointerElementType(),
1333 clspv::AddressSpace::UniformConstant);
1334 break;
1335 default:
1336 break;
1337 }
1338
1339 // The converted type is the type of the OpVariable we will generate.
1340 // If the pointee type is an array of size zero, FindType will convert it
1341 // to a runtime array.
1342 FindType(type);
1343 }
1344
alan-bakerdcd97412019-09-16 15:32:30 -04001345 // If module constants are clustered in a storage buffer then that struct
1346 // needs layout decorations.
1347 if (clspv::Option::ModuleConstantsInStorageBuffer()) {
1348 for (GlobalVariable &GV : M.globals()) {
1349 PointerType *PTy = cast<PointerType>(GV.getType());
1350 const auto AS = PTy->getAddressSpace();
1351 const bool module_scope_constant_external_init =
1352 (AS == AddressSpace::Constant) && GV.hasInitializer();
1353 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
1354 if (module_scope_constant_external_init &&
1355 spv::BuiltInMax == BuiltinType) {
1356 StructTypesNeedingBlock.insert(
1357 cast<StructType>(PTy->getPointerElementType()));
1358 }
1359 }
1360 }
1361
David Neto862b7d82018-06-14 18:48:37 -04001362 // Traverse the arrays and structures underneath each Block, and
1363 // mark them as needing layout.
1364 std::vector<Type *> work_list(StructTypesNeedingBlock.begin(),
1365 StructTypesNeedingBlock.end());
1366 while (!work_list.empty()) {
1367 Type *type = work_list.back();
1368 work_list.pop_back();
1369 TypesNeedingLayout.insert(type);
1370 switch (type->getTypeID()) {
1371 case Type::ArrayTyID:
1372 work_list.push_back(type->getArrayElementType());
1373 if (!Hack_generate_runtime_array_stride_early) {
1374 // Remember this array type for deferred decoration.
1375 TypesNeedingArrayStride.insert(type);
1376 }
1377 break;
1378 case Type::StructTyID:
1379 for (auto *elem_ty : cast<StructType>(type)->elements()) {
1380 work_list.push_back(elem_ty);
1381 }
1382 default:
1383 // This type and its contained types don't get layout.
1384 break;
1385 }
1386 }
1387}
1388
Alan Baker202c8c72018-08-13 13:47:44 -04001389void SPIRVProducerPass::FindWorkgroupVars(Module &M) {
1390 // The SpecId assignment for pointer-to-local arguments is recorded in
1391 // module-level metadata. Translate that information into local argument
1392 // information.
1393 NamedMDNode *nmd = M.getNamedMetadata(clspv::LocalSpecIdMetadataName());
alan-bakerb6b09dc2018-11-08 16:59:28 -05001394 if (!nmd)
1395 return;
Alan Baker202c8c72018-08-13 13:47:44 -04001396 for (auto operand : nmd->operands()) {
1397 MDTuple *tuple = cast<MDTuple>(operand);
1398 ValueAsMetadata *fn_md = cast<ValueAsMetadata>(tuple->getOperand(0));
1399 Function *func = cast<Function>(fn_md->getValue());
alan-bakerb6b09dc2018-11-08 16:59:28 -05001400 ConstantAsMetadata *arg_index_md =
1401 cast<ConstantAsMetadata>(tuple->getOperand(1));
1402 int arg_index = static_cast<int>(
1403 cast<ConstantInt>(arg_index_md->getValue())->getSExtValue());
1404 Argument *arg = &*(func->arg_begin() + arg_index);
Alan Baker202c8c72018-08-13 13:47:44 -04001405
1406 ConstantAsMetadata *spec_id_md =
1407 cast<ConstantAsMetadata>(tuple->getOperand(2));
alan-bakerb6b09dc2018-11-08 16:59:28 -05001408 int spec_id = static_cast<int>(
1409 cast<ConstantInt>(spec_id_md->getValue())->getSExtValue());
Alan Baker202c8c72018-08-13 13:47:44 -04001410
1411 max_local_spec_id_ = std::max(max_local_spec_id_, spec_id + 1);
1412 LocalArgSpecIds[arg] = spec_id;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001413 if (LocalSpecIdInfoMap.count(spec_id))
1414 continue;
Alan Baker202c8c72018-08-13 13:47:44 -04001415
1416 // We haven't seen this SpecId yet, so generate the LocalArgInfo for it.
1417 LocalArgInfo info{nextID, arg->getType()->getPointerElementType(),
1418 nextID + 1, nextID + 2,
1419 nextID + 3, spec_id};
1420 LocalSpecIdInfoMap[spec_id] = info;
1421 nextID += 4;
1422
1423 // Ensure the types necessary for this argument get generated.
1424 Type *IdxTy = Type::getInt32Ty(M.getContext());
1425 FindConstant(ConstantInt::get(IdxTy, 0));
1426 FindType(IdxTy);
1427 FindType(arg->getType());
1428 }
1429}
1430
David Neto22f144c2017-06-12 14:26:21 -04001431void SPIRVProducerPass::FindType(Type *Ty) {
1432 TypeList &TyList = getTypeList();
1433
1434 if (0 != TyList.idFor(Ty)) {
1435 return;
1436 }
1437
1438 if (Ty->isPointerTy()) {
1439 auto AddrSpace = Ty->getPointerAddressSpace();
1440 if ((AddressSpace::Constant == AddrSpace) ||
1441 (AddressSpace::Global == AddrSpace)) {
1442 auto PointeeTy = Ty->getPointerElementType();
1443
1444 if (PointeeTy->isStructTy() &&
1445 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
1446 FindType(PointeeTy);
1447 auto ActualPointerTy =
1448 PointeeTy->getPointerTo(AddressSpace::UniformConstant);
1449 FindType(ActualPointerTy);
1450 return;
1451 }
1452 }
1453 }
1454
David Neto862b7d82018-06-14 18:48:37 -04001455 // By convention, LLVM array type with 0 elements will map to
1456 // OpTypeRuntimeArray. Otherwise, it will map to OpTypeArray, which
1457 // has a constant number of elements. We need to support type of the
1458 // constant.
1459 if (auto *arrayTy = dyn_cast<ArrayType>(Ty)) {
1460 if (arrayTy->getNumElements() > 0) {
1461 LLVMContext &Context = Ty->getContext();
1462 FindType(Type::getInt32Ty(Context));
1463 }
David Neto22f144c2017-06-12 14:26:21 -04001464 }
1465
1466 for (Type *SubTy : Ty->subtypes()) {
1467 FindType(SubTy);
1468 }
1469
1470 TyList.insert(Ty);
1471}
1472
1473void SPIRVProducerPass::FindConstantPerGlobalVar(GlobalVariable &GV) {
1474 // If the global variable has a (non undef) initializer.
1475 if (GV.hasInitializer() && !isa<UndefValue>(GV.getInitializer())) {
David Neto862b7d82018-06-14 18:48:37 -04001476 // Generate the constant if it's not the initializer to a module scope
1477 // constant that we will expect in a storage buffer.
1478 const bool module_scope_constant_external_init =
1479 (GV.getType()->getPointerAddressSpace() == AddressSpace::Constant) &&
1480 clspv::Option::ModuleConstantsInStorageBuffer();
1481 if (!module_scope_constant_external_init) {
1482 FindConstant(GV.getInitializer());
1483 }
David Neto22f144c2017-06-12 14:26:21 -04001484 }
1485}
1486
1487void SPIRVProducerPass::FindConstantPerFunc(Function &F) {
1488 // Investigate constants in function body.
1489 for (BasicBlock &BB : F) {
1490 for (Instruction &I : BB) {
David Neto862b7d82018-06-14 18:48:37 -04001491 if (auto *call = dyn_cast<CallInst>(&I)) {
1492 auto name = call->getCalledFunction()->getName();
Kévin Petitdf71de32019-04-09 14:09:50 +01001493 if (name == clspv::LiteralSamplerFunction()) {
David Neto862b7d82018-06-14 18:48:37 -04001494 // We've handled these constants elsewhere, so skip it.
1495 continue;
1496 }
Alan Baker202c8c72018-08-13 13:47:44 -04001497 if (name.startswith(clspv::ResourceAccessorFunction())) {
1498 continue;
1499 }
1500 if (name.startswith(clspv::WorkgroupAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04001501 continue;
1502 }
Kévin Petit617a76d2019-04-04 13:54:16 +01001503 if (name.startswith(clspv::SPIRVOpIntrinsicFunction())) {
1504 // Skip the first operand that has the SPIR-V Opcode
1505 for (unsigned i = 1; i < I.getNumOperands(); i++) {
1506 if (isa<Constant>(I.getOperand(i)) &&
1507 !isa<GlobalValue>(I.getOperand(i))) {
1508 FindConstant(I.getOperand(i));
1509 }
1510 }
1511 continue;
1512 }
David Neto22f144c2017-06-12 14:26:21 -04001513 }
1514
1515 if (isa<AllocaInst>(I)) {
1516 // Alloca instruction has constant for the number of element. Ignore it.
1517 continue;
1518 } else if (isa<ShuffleVectorInst>(I)) {
1519 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1520 // Ignore constant for mask of shuffle vector instruction.
1521 if (i == 2) {
1522 continue;
1523 }
1524
1525 if (isa<Constant>(I.getOperand(i)) &&
1526 !isa<GlobalValue>(I.getOperand(i))) {
1527 FindConstant(I.getOperand(i));
1528 }
1529 }
1530
1531 continue;
1532 } else if (isa<InsertElementInst>(I)) {
1533 // Handle InsertElement with <4 x i8> specially.
1534 Type *CompositeTy = I.getOperand(0)->getType();
1535 if (is4xi8vec(CompositeTy)) {
1536 LLVMContext &Context = CompositeTy->getContext();
1537 if (isa<Constant>(I.getOperand(0))) {
1538 FindConstant(I.getOperand(0));
1539 }
1540
1541 if (isa<Constant>(I.getOperand(1))) {
1542 FindConstant(I.getOperand(1));
1543 }
1544
1545 // Add mask constant 0xFF.
1546 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
1547 FindConstant(CstFF);
1548
1549 // Add shift amount constant.
1550 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
1551 uint64_t Idx = CI->getZExtValue();
1552 Constant *CstShiftAmount =
1553 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
1554 FindConstant(CstShiftAmount);
1555 }
1556
1557 continue;
1558 }
1559
1560 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1561 // Ignore constant for index of InsertElement instruction.
1562 if (i == 2) {
1563 continue;
1564 }
1565
1566 if (isa<Constant>(I.getOperand(i)) &&
1567 !isa<GlobalValue>(I.getOperand(i))) {
1568 FindConstant(I.getOperand(i));
1569 }
1570 }
1571
1572 continue;
1573 } else if (isa<ExtractElementInst>(I)) {
1574 // Handle ExtractElement with <4 x i8> specially.
1575 Type *CompositeTy = I.getOperand(0)->getType();
1576 if (is4xi8vec(CompositeTy)) {
1577 LLVMContext &Context = CompositeTy->getContext();
1578 if (isa<Constant>(I.getOperand(0))) {
1579 FindConstant(I.getOperand(0));
1580 }
1581
1582 // Add mask constant 0xFF.
1583 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
1584 FindConstant(CstFF);
1585
1586 // Add shift amount constant.
1587 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
1588 uint64_t Idx = CI->getZExtValue();
1589 Constant *CstShiftAmount =
1590 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
1591 FindConstant(CstShiftAmount);
1592 } else {
1593 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
1594 FindConstant(Cst8);
1595 }
1596
1597 continue;
1598 }
1599
1600 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1601 // Ignore constant for index of ExtractElement instruction.
1602 if (i == 1) {
1603 continue;
1604 }
1605
1606 if (isa<Constant>(I.getOperand(i)) &&
1607 !isa<GlobalValue>(I.getOperand(i))) {
1608 FindConstant(I.getOperand(i));
1609 }
1610 }
1611
1612 continue;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001613 } else if ((Instruction::Xor == I.getOpcode()) &&
1614 I.getType()->isIntegerTy(1)) {
1615 // We special case for Xor where the type is i1 and one of the arguments
1616 // is a constant 1 (true), this is an OpLogicalNot in SPIR-V, and we
1617 // don't need the constant
David Neto22f144c2017-06-12 14:26:21 -04001618 bool foundConstantTrue = false;
1619 for (Use &Op : I.operands()) {
1620 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1621 auto CI = cast<ConstantInt>(Op);
1622
1623 if (CI->isZero() || foundConstantTrue) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05001624 // If we already found the true constant, we might (probably only
1625 // on -O0) have an OpLogicalNot which is taking a constant
1626 // argument, so discover it anyway.
David Neto22f144c2017-06-12 14:26:21 -04001627 FindConstant(Op);
1628 } else {
1629 foundConstantTrue = true;
1630 }
1631 }
1632 }
1633
1634 continue;
David Netod2de94a2017-08-28 17:27:47 -04001635 } else if (isa<TruncInst>(I)) {
alan-bakerb39c8262019-03-08 14:03:37 -05001636 // Special case if i8 is not generally handled.
1637 if (!clspv::Option::Int8Support()) {
1638 // For truncation to i8 we mask against 255.
1639 Type *ToTy = I.getType();
1640 if (8u == ToTy->getPrimitiveSizeInBits()) {
1641 LLVMContext &Context = ToTy->getContext();
1642 Constant *Cst255 =
1643 ConstantInt::get(Type::getInt32Ty(Context), 0xff);
1644 FindConstant(Cst255);
1645 }
David Netod2de94a2017-08-28 17:27:47 -04001646 }
Neil Henning39672102017-09-29 14:33:13 +01001647 } else if (isa<AtomicRMWInst>(I)) {
1648 LLVMContext &Context = I.getContext();
1649
1650 FindConstant(
1651 ConstantInt::get(Type::getInt32Ty(Context), spv::ScopeDevice));
1652 FindConstant(ConstantInt::get(
1653 Type::getInt32Ty(Context),
1654 spv::MemorySemanticsUniformMemoryMask |
1655 spv::MemorySemanticsSequentiallyConsistentMask));
David Neto22f144c2017-06-12 14:26:21 -04001656 }
1657
1658 for (Use &Op : I.operands()) {
1659 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1660 FindConstant(Op);
1661 }
1662 }
1663 }
1664 }
1665}
1666
1667void SPIRVProducerPass::FindConstant(Value *V) {
David Neto22f144c2017-06-12 14:26:21 -04001668 ValueList &CstList = getConstantList();
1669
David Netofb9a7972017-08-25 17:08:24 -04001670 // If V is already tracked, ignore it.
1671 if (0 != CstList.idFor(V)) {
David Neto22f144c2017-06-12 14:26:21 -04001672 return;
1673 }
1674
David Neto862b7d82018-06-14 18:48:37 -04001675 if (isa<GlobalValue>(V) && clspv::Option::ModuleConstantsInStorageBuffer()) {
1676 return;
1677 }
1678
David Neto22f144c2017-06-12 14:26:21 -04001679 Constant *Cst = cast<Constant>(V);
David Neto862b7d82018-06-14 18:48:37 -04001680 Type *CstTy = Cst->getType();
David Neto22f144c2017-06-12 14:26:21 -04001681
1682 // Handle constant with <4 x i8> type specially.
David Neto22f144c2017-06-12 14:26:21 -04001683 if (is4xi8vec(CstTy)) {
1684 if (!isa<GlobalValue>(V)) {
David Netofb9a7972017-08-25 17:08:24 -04001685 CstList.insert(V);
David Neto22f144c2017-06-12 14:26:21 -04001686 }
1687 }
1688
1689 if (Cst->getNumOperands()) {
1690 for (User::const_op_iterator I = Cst->op_begin(), E = Cst->op_end(); I != E;
1691 ++I) {
1692 FindConstant(*I);
1693 }
1694
David Netofb9a7972017-08-25 17:08:24 -04001695 CstList.insert(Cst);
David Neto22f144c2017-06-12 14:26:21 -04001696 return;
1697 } else if (const ConstantDataSequential *CDS =
1698 dyn_cast<ConstantDataSequential>(Cst)) {
1699 // Add constants for each element to constant list.
1700 for (unsigned i = 0; i < CDS->getNumElements(); i++) {
1701 Constant *EleCst = CDS->getElementAsConstant(i);
1702 FindConstant(EleCst);
1703 }
1704 }
1705
1706 if (!isa<GlobalValue>(V)) {
David Netofb9a7972017-08-25 17:08:24 -04001707 CstList.insert(V);
David Neto22f144c2017-06-12 14:26:21 -04001708 }
1709}
1710
1711spv::StorageClass SPIRVProducerPass::GetStorageClass(unsigned AddrSpace) const {
1712 switch (AddrSpace) {
1713 default:
1714 llvm_unreachable("Unsupported OpenCL address space");
1715 case AddressSpace::Private:
1716 return spv::StorageClassFunction;
1717 case AddressSpace::Global:
David Neto22f144c2017-06-12 14:26:21 -04001718 return spv::StorageClassStorageBuffer;
Alan Bakerfcda9482018-10-02 17:09:59 -04001719 case AddressSpace::Constant:
1720 return clspv::Option::ConstantArgsInUniformBuffer()
1721 ? spv::StorageClassUniform
1722 : spv::StorageClassStorageBuffer;
David Neto22f144c2017-06-12 14:26:21 -04001723 case AddressSpace::Input:
1724 return spv::StorageClassInput;
1725 case AddressSpace::Local:
1726 return spv::StorageClassWorkgroup;
1727 case AddressSpace::UniformConstant:
1728 return spv::StorageClassUniformConstant;
David Neto9ed8e2f2018-03-24 06:47:24 -07001729 case AddressSpace::Uniform:
David Netoe439d702018-03-23 13:14:08 -07001730 return spv::StorageClassUniform;
David Neto22f144c2017-06-12 14:26:21 -04001731 case AddressSpace::ModuleScopePrivate:
1732 return spv::StorageClassPrivate;
1733 }
1734}
1735
David Neto862b7d82018-06-14 18:48:37 -04001736spv::StorageClass
1737SPIRVProducerPass::GetStorageClassForArgKind(clspv::ArgKind arg_kind) const {
1738 switch (arg_kind) {
1739 case clspv::ArgKind::Buffer:
1740 return spv::StorageClassStorageBuffer;
Alan Bakerfcda9482018-10-02 17:09:59 -04001741 case clspv::ArgKind::BufferUBO:
1742 return spv::StorageClassUniform;
David Neto862b7d82018-06-14 18:48:37 -04001743 case clspv::ArgKind::Pod:
1744 return clspv::Option::PodArgsInUniformBuffer()
1745 ? spv::StorageClassUniform
1746 : spv::StorageClassStorageBuffer;
1747 case clspv::ArgKind::Local:
1748 return spv::StorageClassWorkgroup;
1749 case clspv::ArgKind::ReadOnlyImage:
1750 case clspv::ArgKind::WriteOnlyImage:
1751 case clspv::ArgKind::Sampler:
1752 return spv::StorageClassUniformConstant;
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001753 default:
1754 llvm_unreachable("Unsupported storage class for argument kind");
David Neto862b7d82018-06-14 18:48:37 -04001755 }
1756}
1757
David Neto22f144c2017-06-12 14:26:21 -04001758spv::BuiltIn SPIRVProducerPass::GetBuiltin(StringRef Name) const {
1759 return StringSwitch<spv::BuiltIn>(Name)
1760 .Case("__spirv_GlobalInvocationId", spv::BuiltInGlobalInvocationId)
1761 .Case("__spirv_LocalInvocationId", spv::BuiltInLocalInvocationId)
1762 .Case("__spirv_WorkgroupSize", spv::BuiltInWorkgroupSize)
1763 .Case("__spirv_NumWorkgroups", spv::BuiltInNumWorkgroups)
1764 .Case("__spirv_WorkgroupId", spv::BuiltInWorkgroupId)
1765 .Default(spv::BuiltInMax);
1766}
1767
1768void SPIRVProducerPass::GenerateExtInstImport() {
1769 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
1770 uint32_t &ExtInstImportID = getOpExtInstImportID();
1771
1772 //
1773 // Generate OpExtInstImport.
1774 //
1775 // Ops[0] ... Ops[n] = Name (Literal String)
David Neto22f144c2017-06-12 14:26:21 -04001776 ExtInstImportID = nextID;
David Neto87846742018-04-11 17:36:22 -04001777 SPIRVInstList.push_back(new SPIRVInstruction(spv::OpExtInstImport, nextID++,
1778 MkString("GLSL.std.450")));
David Neto22f144c2017-06-12 14:26:21 -04001779}
1780
alan-bakerb6b09dc2018-11-08 16:59:28 -05001781void SPIRVProducerPass::GenerateSPIRVTypes(LLVMContext &Context,
1782 Module &module) {
David Neto22f144c2017-06-12 14:26:21 -04001783 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
1784 ValueMapType &VMap = getValueMap();
1785 ValueMapType &AllocatedVMap = getAllocatedValueMap();
Alan Bakerfcda9482018-10-02 17:09:59 -04001786 const auto &DL = module.getDataLayout();
David Neto22f144c2017-06-12 14:26:21 -04001787
1788 // Map for OpTypeRuntimeArray. If argument has pointer type, 2 spirv type
1789 // instructions are generated. They are OpTypePointer and OpTypeRuntimeArray.
1790 DenseMap<Type *, uint32_t> OpRuntimeTyMap;
1791
1792 for (Type *Ty : getTypeList()) {
1793 // Update TypeMap with nextID for reference later.
1794 TypeMap[Ty] = nextID;
1795
1796 switch (Ty->getTypeID()) {
1797 default: {
1798 Ty->print(errs());
1799 llvm_unreachable("Unsupported type???");
1800 break;
1801 }
1802 case Type::MetadataTyID:
1803 case Type::LabelTyID: {
1804 // Ignore these types.
1805 break;
1806 }
1807 case Type::PointerTyID: {
1808 PointerType *PTy = cast<PointerType>(Ty);
1809 unsigned AddrSpace = PTy->getAddressSpace();
1810
1811 // For the purposes of our Vulkan SPIR-V type system, constant and global
1812 // are conflated.
1813 bool UseExistingOpTypePointer = false;
1814 if (AddressSpace::Constant == AddrSpace) {
Alan Bakerfcda9482018-10-02 17:09:59 -04001815 if (!clspv::Option::ConstantArgsInUniformBuffer()) {
1816 AddrSpace = AddressSpace::Global;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001817 // Check to see if we already created this type (for instance, if we
1818 // had a constant <type>* and a global <type>*, the type would be
1819 // created by one of these types, and shared by both).
Alan Bakerfcda9482018-10-02 17:09:59 -04001820 auto GlobalTy = PTy->getPointerElementType()->getPointerTo(AddrSpace);
1821 if (0 < TypeMap.count(GlobalTy)) {
1822 TypeMap[PTy] = TypeMap[GlobalTy];
1823 UseExistingOpTypePointer = true;
1824 break;
1825 }
David Neto22f144c2017-06-12 14:26:21 -04001826 }
1827 } else if (AddressSpace::Global == AddrSpace) {
Alan Bakerfcda9482018-10-02 17:09:59 -04001828 if (!clspv::Option::ConstantArgsInUniformBuffer()) {
1829 AddrSpace = AddressSpace::Constant;
David Neto22f144c2017-06-12 14:26:21 -04001830
alan-bakerb6b09dc2018-11-08 16:59:28 -05001831 // Check to see if we already created this type (for instance, if we
1832 // had a constant <type>* and a global <type>*, the type would be
1833 // created by one of these types, and shared by both).
1834 auto ConstantTy =
1835 PTy->getPointerElementType()->getPointerTo(AddrSpace);
Alan Bakerfcda9482018-10-02 17:09:59 -04001836 if (0 < TypeMap.count(ConstantTy)) {
1837 TypeMap[PTy] = TypeMap[ConstantTy];
1838 UseExistingOpTypePointer = true;
1839 }
David Neto22f144c2017-06-12 14:26:21 -04001840 }
1841 }
1842
David Neto862b7d82018-06-14 18:48:37 -04001843 const bool HasArgUser = true;
David Neto22f144c2017-06-12 14:26:21 -04001844
David Neto862b7d82018-06-14 18:48:37 -04001845 if (HasArgUser && !UseExistingOpTypePointer) {
David Neto22f144c2017-06-12 14:26:21 -04001846 //
1847 // Generate OpTypePointer.
1848 //
1849
1850 // OpTypePointer
1851 // Ops[0] = Storage Class
1852 // Ops[1] = Element Type ID
1853 SPIRVOperandList Ops;
1854
David Neto257c3892018-04-11 13:19:45 -04001855 Ops << MkNum(GetStorageClass(AddrSpace))
1856 << MkId(lookupType(PTy->getElementType()));
David Neto22f144c2017-06-12 14:26:21 -04001857
David Neto87846742018-04-11 17:36:22 -04001858 auto *Inst = new SPIRVInstruction(spv::OpTypePointer, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04001859 SPIRVInstList.push_back(Inst);
1860 }
David Neto22f144c2017-06-12 14:26:21 -04001861 break;
1862 }
1863 case Type::StructTyID: {
David Neto22f144c2017-06-12 14:26:21 -04001864 StructType *STy = cast<StructType>(Ty);
1865
1866 // Handle sampler type.
1867 if (STy->isOpaque()) {
1868 if (STy->getName().equals("opencl.sampler_t")) {
1869 //
1870 // Generate OpTypeSampler
1871 //
1872 // Empty Ops.
1873 SPIRVOperandList Ops;
1874
David Neto87846742018-04-11 17:36:22 -04001875 auto *Inst = new SPIRVInstruction(spv::OpTypeSampler, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04001876 SPIRVInstList.push_back(Inst);
1877 break;
alan-bakerf906d2b2019-12-10 11:26:23 -05001878 } else if (STy->getName().startswith("opencl.image1d_ro_t") ||
1879 STy->getName().startswith("opencl.image1d_wo_t") ||
1880 STy->getName().startswith("opencl.image2d_ro_t") ||
alan-bakerf67468c2019-11-25 15:51:49 -05001881 STy->getName().startswith("opencl.image2d_wo_t") ||
1882 STy->getName().startswith("opencl.image3d_ro_t") ||
1883 STy->getName().startswith("opencl.image3d_wo_t")) {
David Neto22f144c2017-06-12 14:26:21 -04001884 //
1885 // Generate OpTypeImage
1886 //
1887 // Ops[0] = Sampled Type ID
1888 // Ops[1] = Dim ID
1889 // Ops[2] = Depth (Literal Number)
1890 // Ops[3] = Arrayed (Literal Number)
1891 // Ops[4] = MS (Literal Number)
1892 // Ops[5] = Sampled (Literal Number)
1893 // Ops[6] = Image Format ID
1894 //
1895 SPIRVOperandList Ops;
1896
alan-bakerf67468c2019-11-25 15:51:49 -05001897 uint32_t ImageTyID = nextID++;
1898 uint32_t SampledTyID = 0;
1899 if (STy->getName().contains(".float")) {
1900 SampledTyID = lookupType(Type::getFloatTy(Context));
1901 } else if (STy->getName().contains(".uint")) {
1902 SampledTyID = lookupType(Type::getInt32Ty(Context));
1903 } else if (STy->getName().contains(".int")) {
1904 // Generate a signed 32-bit integer if necessary.
1905 if (int32ID == 0) {
1906 int32ID = nextID++;
1907 SPIRVOperandList intOps;
1908 intOps << MkNum(32);
1909 intOps << MkNum(1);
1910 auto signed_int =
1911 new SPIRVInstruction(spv::OpTypeInt, int32ID, intOps);
1912 SPIRVInstList.push_back(signed_int);
1913 }
1914 SampledTyID = int32ID;
1915
1916 // Generate a vec4 of the signed int if necessary.
1917 if (v4int32ID == 0) {
1918 v4int32ID = nextID++;
1919 SPIRVOperandList vecOps;
1920 vecOps << MkId(int32ID);
1921 vecOps << MkNum(4);
1922 auto int_vec =
1923 new SPIRVInstruction(spv::OpTypeVector, v4int32ID, vecOps);
1924 SPIRVInstList.push_back(int_vec);
1925 }
1926 } else {
1927 // This was likely an UndefValue.
1928 SampledTyID = lookupType(Type::getFloatTy(Context));
1929 }
David Neto257c3892018-04-11 13:19:45 -04001930 Ops << MkId(SampledTyID);
David Neto22f144c2017-06-12 14:26:21 -04001931
1932 spv::Dim DimID = spv::Dim2D;
alan-bakerf906d2b2019-12-10 11:26:23 -05001933 if (STy->getName().startswith("opencl.image1d_ro_t") ||
1934 STy->getName().startswith("opencl.image1d_wo_t")) {
1935 DimID = spv::Dim1D;
1936 } else if (STy->getName().startswith("opencl.image3d_ro_t") ||
1937 STy->getName().startswith("opencl.image3d_wo_t")) {
David Neto22f144c2017-06-12 14:26:21 -04001938 DimID = spv::Dim3D;
1939 }
David Neto257c3892018-04-11 13:19:45 -04001940 Ops << MkNum(DimID);
David Neto22f144c2017-06-12 14:26:21 -04001941
1942 // TODO: Set up Depth.
David Neto257c3892018-04-11 13:19:45 -04001943 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04001944
1945 // TODO: Set up Arrayed.
David Neto257c3892018-04-11 13:19:45 -04001946 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04001947
1948 // TODO: Set up MS.
David Neto257c3892018-04-11 13:19:45 -04001949 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04001950
1951 // TODO: Set up Sampled.
1952 //
1953 // From Spec
1954 //
1955 // 0 indicates this is only known at run time, not at compile time
1956 // 1 indicates will be used with sampler
1957 // 2 indicates will be used without a sampler (a storage image)
1958 uint32_t Sampled = 1;
alan-bakerf67468c2019-11-25 15:51:49 -05001959 if (!STy->getName().contains(".sampled")) {
David Neto22f144c2017-06-12 14:26:21 -04001960 Sampled = 2;
1961 }
David Neto257c3892018-04-11 13:19:45 -04001962 Ops << MkNum(Sampled);
David Neto22f144c2017-06-12 14:26:21 -04001963
1964 // TODO: Set up Image Format.
David Neto257c3892018-04-11 13:19:45 -04001965 Ops << MkNum(spv::ImageFormatUnknown);
David Neto22f144c2017-06-12 14:26:21 -04001966
alan-bakerf67468c2019-11-25 15:51:49 -05001967 auto *Inst = new SPIRVInstruction(spv::OpTypeImage, ImageTyID, Ops);
David Neto22f144c2017-06-12 14:26:21 -04001968 SPIRVInstList.push_back(Inst);
1969 break;
1970 }
1971 }
1972
1973 //
1974 // Generate OpTypeStruct
1975 //
1976 // Ops[0] ... Ops[n] = Member IDs
1977 SPIRVOperandList Ops;
1978
1979 for (auto *EleTy : STy->elements()) {
David Neto862b7d82018-06-14 18:48:37 -04001980 Ops << MkId(lookupType(EleTy));
David Neto22f144c2017-06-12 14:26:21 -04001981 }
1982
David Neto22f144c2017-06-12 14:26:21 -04001983 uint32_t STyID = nextID;
1984
alan-bakerb6b09dc2018-11-08 16:59:28 -05001985 auto *Inst = new SPIRVInstruction(spv::OpTypeStruct, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04001986 SPIRVInstList.push_back(Inst);
1987
1988 // Generate OpMemberDecorate.
1989 auto DecoInsertPoint =
1990 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
1991 [](SPIRVInstruction *Inst) -> bool {
1992 return Inst->getOpcode() != spv::OpDecorate &&
1993 Inst->getOpcode() != spv::OpMemberDecorate &&
1994 Inst->getOpcode() != spv::OpExtInstImport;
1995 });
1996
David Netoc463b372017-08-10 15:32:21 -04001997 const auto StructLayout = DL.getStructLayout(STy);
Alan Bakerfcda9482018-10-02 17:09:59 -04001998 // Search for the correct offsets if this type was remapped.
1999 std::vector<uint32_t> *offsets = nullptr;
2000 auto iter = RemappedUBOTypeOffsets.find(STy);
2001 if (iter != RemappedUBOTypeOffsets.end()) {
2002 offsets = &iter->second;
2003 }
David Netoc463b372017-08-10 15:32:21 -04002004
David Neto862b7d82018-06-14 18:48:37 -04002005 // #error TODO(dneto): Only do this if in TypesNeedingLayout.
David Neto22f144c2017-06-12 14:26:21 -04002006 for (unsigned MemberIdx = 0; MemberIdx < STy->getNumElements();
2007 MemberIdx++) {
2008 // Ops[0] = Structure Type ID
2009 // Ops[1] = Member Index(Literal Number)
2010 // Ops[2] = Decoration (Offset)
2011 // Ops[3] = Byte Offset (Literal Number)
2012 Ops.clear();
2013
David Neto257c3892018-04-11 13:19:45 -04002014 Ops << MkId(STyID) << MkNum(MemberIdx) << MkNum(spv::DecorationOffset);
David Neto22f144c2017-06-12 14:26:21 -04002015
alan-bakerb6b09dc2018-11-08 16:59:28 -05002016 auto ByteOffset =
2017 static_cast<uint32_t>(StructLayout->getElementOffset(MemberIdx));
Alan Bakerfcda9482018-10-02 17:09:59 -04002018 if (offsets) {
2019 ByteOffset = (*offsets)[MemberIdx];
2020 }
alan-bakerb6b09dc2018-11-08 16:59:28 -05002021 // const auto ByteOffset =
Alan Bakerfcda9482018-10-02 17:09:59 -04002022 // uint32_t(StructLayout->getElementOffset(MemberIdx));
David Neto257c3892018-04-11 13:19:45 -04002023 Ops << MkNum(ByteOffset);
David Neto22f144c2017-06-12 14:26:21 -04002024
David Neto87846742018-04-11 17:36:22 -04002025 auto *DecoInst = new SPIRVInstruction(spv::OpMemberDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002026 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
David Neto22f144c2017-06-12 14:26:21 -04002027 }
2028
2029 // Generate OpDecorate.
David Neto862b7d82018-06-14 18:48:37 -04002030 if (StructTypesNeedingBlock.idFor(STy)) {
2031 Ops.clear();
2032 // Use Block decorations with StorageBuffer storage class.
2033 Ops << MkId(STyID) << MkNum(spv::DecorationBlock);
David Neto22f144c2017-06-12 14:26:21 -04002034
David Neto862b7d82018-06-14 18:48:37 -04002035 auto *DecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
2036 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
David Neto22f144c2017-06-12 14:26:21 -04002037 }
2038 break;
2039 }
2040 case Type::IntegerTyID: {
alan-baker0e64a592019-11-18 13:36:25 -05002041 uint32_t BitWidth = static_cast<uint32_t>(Ty->getPrimitiveSizeInBits());
David Neto22f144c2017-06-12 14:26:21 -04002042
2043 if (BitWidth == 1) {
David Neto87846742018-04-11 17:36:22 -04002044 auto *Inst = new SPIRVInstruction(spv::OpTypeBool, nextID++, {});
David Neto22f144c2017-06-12 14:26:21 -04002045 SPIRVInstList.push_back(Inst);
2046 } else {
alan-bakerb39c8262019-03-08 14:03:37 -05002047 if (!clspv::Option::Int8Support()) {
2048 // i8 is added to TypeMap as i32.
2049 // No matter what LLVM type is requested first, always alias the
2050 // second one's SPIR-V type to be the same as the one we generated
2051 // first.
2052 unsigned aliasToWidth = 0;
2053 if (BitWidth == 8) {
2054 aliasToWidth = 32;
2055 BitWidth = 32;
2056 } else if (BitWidth == 32) {
2057 aliasToWidth = 8;
2058 }
2059 if (aliasToWidth) {
2060 Type *otherType = Type::getIntNTy(Ty->getContext(), aliasToWidth);
2061 auto where = TypeMap.find(otherType);
2062 if (where == TypeMap.end()) {
2063 // Go ahead and make it, but also map the other type to it.
2064 TypeMap[otherType] = nextID;
2065 } else {
2066 // Alias this SPIR-V type the existing type.
2067 TypeMap[Ty] = where->second;
2068 break;
2069 }
David Neto391aeb12017-08-26 15:51:58 -04002070 }
David Neto22f144c2017-06-12 14:26:21 -04002071 }
2072
David Neto257c3892018-04-11 13:19:45 -04002073 SPIRVOperandList Ops;
2074 Ops << MkNum(BitWidth) << MkNum(0 /* not signed */);
David Neto22f144c2017-06-12 14:26:21 -04002075
2076 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002077 new SPIRVInstruction(spv::OpTypeInt, nextID++, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002078 }
2079 break;
2080 }
2081 case Type::HalfTyID:
2082 case Type::FloatTyID:
2083 case Type::DoubleTyID: {
alan-baker0e64a592019-11-18 13:36:25 -05002084 uint32_t BitWidth = static_cast<uint32_t>(Ty->getPrimitiveSizeInBits());
James Price11010dc2019-12-19 13:53:09 -05002085 auto WidthOp = MkNum(BitWidth);
David Neto22f144c2017-06-12 14:26:21 -04002086
2087 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002088 new SPIRVInstruction(spv::OpTypeFloat, nextID++, WidthOp));
David Neto22f144c2017-06-12 14:26:21 -04002089 break;
2090 }
2091 case Type::ArrayTyID: {
David Neto22f144c2017-06-12 14:26:21 -04002092 ArrayType *ArrTy = cast<ArrayType>(Ty);
David Neto862b7d82018-06-14 18:48:37 -04002093 const uint64_t Length = ArrTy->getArrayNumElements();
2094 if (Length == 0) {
2095 // By convention, map it to a RuntimeArray.
David Neto22f144c2017-06-12 14:26:21 -04002096
David Neto862b7d82018-06-14 18:48:37 -04002097 // Only generate the type once.
2098 // TODO(dneto): Can it ever be generated more than once?
2099 // Doesn't LLVM type uniqueness guarantee we'll only see this
2100 // once?
2101 Type *EleTy = ArrTy->getArrayElementType();
2102 if (OpRuntimeTyMap.count(EleTy) == 0) {
2103 uint32_t OpTypeRuntimeArrayID = nextID;
2104 OpRuntimeTyMap[Ty] = nextID;
David Neto22f144c2017-06-12 14:26:21 -04002105
David Neto862b7d82018-06-14 18:48:37 -04002106 //
2107 // Generate OpTypeRuntimeArray.
2108 //
David Neto22f144c2017-06-12 14:26:21 -04002109
David Neto862b7d82018-06-14 18:48:37 -04002110 // OpTypeRuntimeArray
2111 // Ops[0] = Element Type ID
2112 SPIRVOperandList Ops;
2113 Ops << MkId(lookupType(EleTy));
David Neto22f144c2017-06-12 14:26:21 -04002114
David Neto862b7d82018-06-14 18:48:37 -04002115 SPIRVInstList.push_back(
2116 new SPIRVInstruction(spv::OpTypeRuntimeArray, nextID++, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002117
David Neto862b7d82018-06-14 18:48:37 -04002118 if (Hack_generate_runtime_array_stride_early) {
2119 // Generate OpDecorate.
2120 auto DecoInsertPoint = std::find_if(
2121 SPIRVInstList.begin(), SPIRVInstList.end(),
2122 [](SPIRVInstruction *Inst) -> bool {
2123 return Inst->getOpcode() != spv::OpDecorate &&
2124 Inst->getOpcode() != spv::OpMemberDecorate &&
2125 Inst->getOpcode() != spv::OpExtInstImport;
2126 });
David Neto22f144c2017-06-12 14:26:21 -04002127
David Neto862b7d82018-06-14 18:48:37 -04002128 // Ops[0] = Target ID
2129 // Ops[1] = Decoration (ArrayStride)
2130 // Ops[2] = Stride Number(Literal Number)
2131 Ops.clear();
David Neto85082642018-03-24 06:55:20 -07002132
David Neto862b7d82018-06-14 18:48:37 -04002133 Ops << MkId(OpTypeRuntimeArrayID)
2134 << MkNum(spv::DecorationArrayStride)
Alan Bakerfcda9482018-10-02 17:09:59 -04002135 << MkNum(static_cast<uint32_t>(GetTypeAllocSize(EleTy, DL)));
David Neto22f144c2017-06-12 14:26:21 -04002136
David Neto862b7d82018-06-14 18:48:37 -04002137 auto *DecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
2138 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
2139 }
2140 }
David Neto22f144c2017-06-12 14:26:21 -04002141
David Neto862b7d82018-06-14 18:48:37 -04002142 } else {
David Neto22f144c2017-06-12 14:26:21 -04002143
David Neto862b7d82018-06-14 18:48:37 -04002144 //
2145 // Generate OpConstant and OpTypeArray.
2146 //
2147
2148 //
2149 // Generate OpConstant for array length.
2150 //
2151 // Ops[0] = Result Type ID
2152 // Ops[1] .. Ops[n] = Values LiteralNumber
2153 SPIRVOperandList Ops;
2154
2155 Type *LengthTy = Type::getInt32Ty(Context);
2156 uint32_t ResTyID = lookupType(LengthTy);
2157 Ops << MkId(ResTyID);
2158
2159 assert(Length < UINT32_MAX);
2160 Ops << MkNum(static_cast<uint32_t>(Length));
2161
2162 // Add constant for length to constant list.
2163 Constant *CstLength = ConstantInt::get(LengthTy, Length);
2164 AllocatedVMap[CstLength] = nextID;
2165 VMap[CstLength] = nextID;
2166 uint32_t LengthID = nextID;
2167
2168 auto *CstInst = new SPIRVInstruction(spv::OpConstant, nextID++, Ops);
2169 SPIRVInstList.push_back(CstInst);
2170
2171 // Remember to generate ArrayStride later
2172 getTypesNeedingArrayStride().insert(Ty);
2173
2174 //
2175 // Generate OpTypeArray.
2176 //
2177 // Ops[0] = Element Type ID
2178 // Ops[1] = Array Length Constant ID
2179 Ops.clear();
2180
2181 uint32_t EleTyID = lookupType(ArrTy->getElementType());
2182 Ops << MkId(EleTyID) << MkId(LengthID);
2183
2184 // Update TypeMap with nextID.
2185 TypeMap[Ty] = nextID;
2186
2187 auto *ArrayInst = new SPIRVInstruction(spv::OpTypeArray, nextID++, Ops);
2188 SPIRVInstList.push_back(ArrayInst);
2189 }
David Neto22f144c2017-06-12 14:26:21 -04002190 break;
2191 }
2192 case Type::VectorTyID: {
alan-bakerb39c8262019-03-08 14:03:37 -05002193 // <4 x i8> is changed to i32 if i8 is not generally supported.
2194 if (!clspv::Option::Int8Support() &&
2195 Ty->getVectorElementType() == Type::getInt8Ty(Context)) {
David Neto22f144c2017-06-12 14:26:21 -04002196 if (Ty->getVectorNumElements() == 4) {
2197 TypeMap[Ty] = lookupType(Ty->getVectorElementType());
2198 break;
2199 } else {
2200 Ty->print(errs());
2201 llvm_unreachable("Support above i8 vector type");
2202 }
2203 }
2204
2205 // Ops[0] = Component Type ID
2206 // Ops[1] = Component Count (Literal Number)
David Neto257c3892018-04-11 13:19:45 -04002207 SPIRVOperandList Ops;
2208 Ops << MkId(lookupType(Ty->getVectorElementType()))
2209 << MkNum(Ty->getVectorNumElements());
David Neto22f144c2017-06-12 14:26:21 -04002210
alan-bakerb6b09dc2018-11-08 16:59:28 -05002211 SPIRVInstruction *inst =
2212 new SPIRVInstruction(spv::OpTypeVector, nextID++, Ops);
David Netoc6f3ab22018-04-06 18:02:31 -04002213 SPIRVInstList.push_back(inst);
David Neto22f144c2017-06-12 14:26:21 -04002214 break;
2215 }
2216 case Type::VoidTyID: {
David Neto87846742018-04-11 17:36:22 -04002217 auto *Inst = new SPIRVInstruction(spv::OpTypeVoid, nextID++, {});
David Neto22f144c2017-06-12 14:26:21 -04002218 SPIRVInstList.push_back(Inst);
2219 break;
2220 }
2221 case Type::FunctionTyID: {
2222 // Generate SPIRV instruction for function type.
2223 FunctionType *FTy = cast<FunctionType>(Ty);
2224
2225 // Ops[0] = Return Type ID
2226 // Ops[1] ... Ops[n] = Parameter Type IDs
2227 SPIRVOperandList Ops;
2228
2229 // Find SPIRV instruction for return type
David Netoc6f3ab22018-04-06 18:02:31 -04002230 Ops << MkId(lookupType(FTy->getReturnType()));
David Neto22f144c2017-06-12 14:26:21 -04002231
2232 // Find SPIRV instructions for parameter types
2233 for (unsigned k = 0; k < FTy->getNumParams(); k++) {
2234 // Find SPIRV instruction for parameter type.
2235 auto ParamTy = FTy->getParamType(k);
2236 if (ParamTy->isPointerTy()) {
2237 auto PointeeTy = ParamTy->getPointerElementType();
2238 if (PointeeTy->isStructTy() &&
2239 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
2240 ParamTy = PointeeTy;
2241 }
2242 }
2243
David Netoc6f3ab22018-04-06 18:02:31 -04002244 Ops << MkId(lookupType(ParamTy));
David Neto22f144c2017-06-12 14:26:21 -04002245 }
2246
David Neto87846742018-04-11 17:36:22 -04002247 auto *Inst = new SPIRVInstruction(spv::OpTypeFunction, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002248 SPIRVInstList.push_back(Inst);
2249 break;
2250 }
2251 }
2252 }
2253
2254 // Generate OpTypeSampledImage.
alan-bakerabd82722019-12-03 17:14:51 -05002255 for (auto &ImgTy : getImageTypeList()) {
David Neto22f144c2017-06-12 14:26:21 -04002256 //
2257 // Generate OpTypeSampledImage.
2258 //
2259 // Ops[0] = Image Type ID
2260 //
2261 SPIRVOperandList Ops;
2262
David Netoc6f3ab22018-04-06 18:02:31 -04002263 Ops << MkId(TypeMap[ImgTy]);
David Neto22f144c2017-06-12 14:26:21 -04002264
alan-bakerabd82722019-12-03 17:14:51 -05002265 // Update the image type map.
2266 getImageTypeMap()[ImgTy] = nextID;
David Neto22f144c2017-06-12 14:26:21 -04002267
David Neto87846742018-04-11 17:36:22 -04002268 auto *Inst = new SPIRVInstruction(spv::OpTypeSampledImage, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002269 SPIRVInstList.push_back(Inst);
2270 }
David Netoc6f3ab22018-04-06 18:02:31 -04002271
2272 // Generate types for pointer-to-local arguments.
Alan Baker202c8c72018-08-13 13:47:44 -04002273 for (auto spec_id = clspv::FirstLocalSpecId(); spec_id < max_local_spec_id_;
2274 ++spec_id) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05002275 LocalArgInfo &arg_info = LocalSpecIdInfoMap[spec_id];
David Netoc6f3ab22018-04-06 18:02:31 -04002276
2277 // Generate the spec constant.
2278 SPIRVOperandList Ops;
2279 Ops << MkId(lookupType(Type::getInt32Ty(Context))) << MkNum(1);
David Neto87846742018-04-11 17:36:22 -04002280 SPIRVInstList.push_back(
2281 new SPIRVInstruction(spv::OpSpecConstant, arg_info.array_size_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002282
2283 // Generate the array type.
2284 Ops.clear();
2285 // The element type must have been created.
2286 uint32_t elem_ty_id = lookupType(arg_info.elem_type);
2287 assert(elem_ty_id);
2288 Ops << MkId(elem_ty_id) << MkId(arg_info.array_size_id);
2289
2290 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002291 new SPIRVInstruction(spv::OpTypeArray, arg_info.array_type_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002292
2293 Ops.clear();
2294 Ops << MkNum(spv::StorageClassWorkgroup) << MkId(arg_info.array_type_id);
David Neto87846742018-04-11 17:36:22 -04002295 SPIRVInstList.push_back(new SPIRVInstruction(
2296 spv::OpTypePointer, arg_info.ptr_array_type_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002297 }
David Neto22f144c2017-06-12 14:26:21 -04002298}
2299
2300void SPIRVProducerPass::GenerateSPIRVConstants() {
2301 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
2302 ValueMapType &VMap = getValueMap();
2303 ValueMapType &AllocatedVMap = getAllocatedValueMap();
2304 ValueList &CstList = getConstantList();
David Neto482550a2018-03-24 05:21:07 -07002305 const bool hack_undef = clspv::Option::HackUndef();
David Neto22f144c2017-06-12 14:26:21 -04002306
2307 for (uint32_t i = 0; i < CstList.size(); i++) {
David Netofb9a7972017-08-25 17:08:24 -04002308 // UniqueVector ids are 1-based.
alan-bakerb6b09dc2018-11-08 16:59:28 -05002309 Constant *Cst = cast<Constant>(CstList[i + 1]);
David Neto22f144c2017-06-12 14:26:21 -04002310
2311 // OpTypeArray's constant was already generated.
David Netofb9a7972017-08-25 17:08:24 -04002312 if (AllocatedVMap.find_as(Cst) != AllocatedVMap.end()) {
David Neto22f144c2017-06-12 14:26:21 -04002313 continue;
2314 }
2315
David Netofb9a7972017-08-25 17:08:24 -04002316 // Set ValueMap with nextID for reference later.
David Neto22f144c2017-06-12 14:26:21 -04002317 VMap[Cst] = nextID;
2318
2319 //
2320 // Generate OpConstant.
2321 //
2322
2323 // Ops[0] = Result Type ID
2324 // Ops[1] .. Ops[n] = Values LiteralNumber
2325 SPIRVOperandList Ops;
2326
David Neto257c3892018-04-11 13:19:45 -04002327 Ops << MkId(lookupType(Cst->getType()));
David Neto22f144c2017-06-12 14:26:21 -04002328
2329 std::vector<uint32_t> LiteralNum;
David Neto22f144c2017-06-12 14:26:21 -04002330 spv::Op Opcode = spv::OpNop;
2331
2332 if (isa<UndefValue>(Cst)) {
2333 // Ops[0] = Result Type ID
David Netoc66b3352017-10-20 14:28:46 -04002334 Opcode = spv::OpUndef;
Alan Baker9bf93fb2018-08-28 16:59:26 -04002335 if (hack_undef && IsTypeNullable(Cst->getType())) {
2336 Opcode = spv::OpConstantNull;
David Netoc66b3352017-10-20 14:28:46 -04002337 }
David Neto22f144c2017-06-12 14:26:21 -04002338 } else if (const ConstantInt *CI = dyn_cast<ConstantInt>(Cst)) {
2339 unsigned BitWidth = CI->getBitWidth();
2340 if (BitWidth == 1) {
2341 // If the bitwidth of constant is 1, generate OpConstantTrue or
2342 // OpConstantFalse.
2343 if (CI->getZExtValue()) {
2344 // Ops[0] = Result Type ID
2345 Opcode = spv::OpConstantTrue;
2346 } else {
2347 // Ops[0] = Result Type ID
2348 Opcode = spv::OpConstantFalse;
2349 }
David Neto22f144c2017-06-12 14:26:21 -04002350 } else {
2351 auto V = CI->getZExtValue();
2352 LiteralNum.push_back(V & 0xFFFFFFFF);
2353
2354 if (BitWidth > 32) {
2355 LiteralNum.push_back(V >> 32);
2356 }
2357
2358 Opcode = spv::OpConstant;
David Neto22f144c2017-06-12 14:26:21 -04002359
David Neto257c3892018-04-11 13:19:45 -04002360 Ops << MkInteger(LiteralNum);
David Neto22f144c2017-06-12 14:26:21 -04002361 }
2362 } else if (const ConstantFP *CFP = dyn_cast<ConstantFP>(Cst)) {
2363 uint64_t FPVal = CFP->getValueAPF().bitcastToAPInt().getZExtValue();
2364 Type *CFPTy = CFP->getType();
2365 if (CFPTy->isFloatTy()) {
2366 LiteralNum.push_back(FPVal & 0xFFFFFFFF);
Kévin Petit02ee34e2019-04-04 19:03:22 +01002367 } else if (CFPTy->isDoubleTy()) {
2368 LiteralNum.push_back(FPVal & 0xFFFFFFFF);
2369 LiteralNum.push_back(FPVal >> 32);
David Neto22f144c2017-06-12 14:26:21 -04002370 } else {
2371 CFPTy->print(errs());
2372 llvm_unreachable("Implement this ConstantFP Type");
2373 }
2374
2375 Opcode = spv::OpConstant;
David Neto22f144c2017-06-12 14:26:21 -04002376
David Neto257c3892018-04-11 13:19:45 -04002377 Ops << MkFloat(LiteralNum);
David Neto22f144c2017-06-12 14:26:21 -04002378 } else if (isa<ConstantDataSequential>(Cst) &&
2379 cast<ConstantDataSequential>(Cst)->isString()) {
2380 Cst->print(errs());
2381 llvm_unreachable("Implement this Constant");
2382
2383 } else if (const ConstantDataSequential *CDS =
2384 dyn_cast<ConstantDataSequential>(Cst)) {
David Neto49351ac2017-08-26 17:32:20 -04002385 // Let's convert <4 x i8> constant to int constant specially.
2386 // This case occurs when all the values are specified as constant
2387 // ints.
2388 Type *CstTy = Cst->getType();
2389 if (is4xi8vec(CstTy)) {
2390 LLVMContext &Context = CstTy->getContext();
2391
2392 //
2393 // Generate OpConstant with OpTypeInt 32 0.
2394 //
Neil Henning39672102017-09-29 14:33:13 +01002395 uint32_t IntValue = 0;
2396 for (unsigned k = 0; k < 4; k++) {
2397 const uint64_t Val = CDS->getElementAsInteger(k);
David Neto49351ac2017-08-26 17:32:20 -04002398 IntValue = (IntValue << 8) | (Val & 0xffu);
2399 }
2400
2401 Type *i32 = Type::getInt32Ty(Context);
2402 Constant *CstInt = ConstantInt::get(i32, IntValue);
2403 // If this constant is already registered on VMap, use it.
2404 if (VMap.count(CstInt)) {
2405 uint32_t CstID = VMap[CstInt];
2406 VMap[Cst] = CstID;
2407 continue;
2408 }
2409
David Neto257c3892018-04-11 13:19:45 -04002410 Ops << MkNum(IntValue);
David Neto49351ac2017-08-26 17:32:20 -04002411
David Neto87846742018-04-11 17:36:22 -04002412 auto *CstInst = new SPIRVInstruction(spv::OpConstant, nextID++, Ops);
David Neto49351ac2017-08-26 17:32:20 -04002413 SPIRVInstList.push_back(CstInst);
2414
2415 continue;
2416 }
2417
2418 // A normal constant-data-sequential case.
David Neto22f144c2017-06-12 14:26:21 -04002419 for (unsigned k = 0; k < CDS->getNumElements(); k++) {
2420 Constant *EleCst = CDS->getElementAsConstant(k);
2421 uint32_t EleCstID = VMap[EleCst];
David Neto257c3892018-04-11 13:19:45 -04002422 Ops << MkId(EleCstID);
David Neto22f144c2017-06-12 14:26:21 -04002423 }
2424
2425 Opcode = spv::OpConstantComposite;
David Neto22f144c2017-06-12 14:26:21 -04002426 } else if (const ConstantAggregate *CA = dyn_cast<ConstantAggregate>(Cst)) {
2427 // Let's convert <4 x i8> constant to int constant specially.
David Neto49351ac2017-08-26 17:32:20 -04002428 // This case occurs when at least one of the values is an undef.
David Neto22f144c2017-06-12 14:26:21 -04002429 Type *CstTy = Cst->getType();
2430 if (is4xi8vec(CstTy)) {
2431 LLVMContext &Context = CstTy->getContext();
2432
2433 //
2434 // Generate OpConstant with OpTypeInt 32 0.
2435 //
Neil Henning39672102017-09-29 14:33:13 +01002436 uint32_t IntValue = 0;
David Neto22f144c2017-06-12 14:26:21 -04002437 for (User::const_op_iterator I = Cst->op_begin(), E = Cst->op_end();
2438 I != E; ++I) {
2439 uint64_t Val = 0;
alan-bakerb6b09dc2018-11-08 16:59:28 -05002440 const Value *CV = *I;
Neil Henning39672102017-09-29 14:33:13 +01002441 if (auto *CI2 = dyn_cast<ConstantInt>(CV)) {
2442 Val = CI2->getZExtValue();
David Neto22f144c2017-06-12 14:26:21 -04002443 }
David Neto49351ac2017-08-26 17:32:20 -04002444 IntValue = (IntValue << 8) | (Val & 0xffu);
David Neto22f144c2017-06-12 14:26:21 -04002445 }
2446
David Neto49351ac2017-08-26 17:32:20 -04002447 Type *i32 = Type::getInt32Ty(Context);
2448 Constant *CstInt = ConstantInt::get(i32, IntValue);
David Neto22f144c2017-06-12 14:26:21 -04002449 // If this constant is already registered on VMap, use it.
2450 if (VMap.count(CstInt)) {
2451 uint32_t CstID = VMap[CstInt];
2452 VMap[Cst] = CstID;
David Neto19a1bad2017-08-25 15:01:41 -04002453 continue;
David Neto22f144c2017-06-12 14:26:21 -04002454 }
2455
David Neto257c3892018-04-11 13:19:45 -04002456 Ops << MkNum(IntValue);
David Neto22f144c2017-06-12 14:26:21 -04002457
David Neto87846742018-04-11 17:36:22 -04002458 auto *CstInst = new SPIRVInstruction(spv::OpConstant, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002459 SPIRVInstList.push_back(CstInst);
2460
David Neto19a1bad2017-08-25 15:01:41 -04002461 continue;
David Neto22f144c2017-06-12 14:26:21 -04002462 }
2463
2464 // We use a constant composite in SPIR-V for our constant aggregate in
2465 // LLVM.
2466 Opcode = spv::OpConstantComposite;
David Neto22f144c2017-06-12 14:26:21 -04002467
2468 for (unsigned k = 0; k < CA->getNumOperands(); k++) {
2469 // Look up the ID of the element of this aggregate (which we will
2470 // previously have created a constant for).
2471 uint32_t ElementConstantID = VMap[CA->getAggregateElement(k)];
2472
2473 // And add an operand to the composite we are constructing
David Neto257c3892018-04-11 13:19:45 -04002474 Ops << MkId(ElementConstantID);
David Neto22f144c2017-06-12 14:26:21 -04002475 }
2476 } else if (Cst->isNullValue()) {
2477 Opcode = spv::OpConstantNull;
David Neto22f144c2017-06-12 14:26:21 -04002478 } else {
2479 Cst->print(errs());
2480 llvm_unreachable("Unsupported Constant???");
2481 }
2482
alan-baker5b86ed72019-02-15 08:26:50 -05002483 if (Opcode == spv::OpConstantNull && Cst->getType()->isPointerTy()) {
2484 // Null pointer requires variable pointers.
2485 setVariablePointersCapabilities(Cst->getType()->getPointerAddressSpace());
2486 }
2487
David Neto87846742018-04-11 17:36:22 -04002488 auto *CstInst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002489 SPIRVInstList.push_back(CstInst);
2490 }
2491}
2492
2493void SPIRVProducerPass::GenerateSamplers(Module &M) {
2494 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
David Neto22f144c2017-06-12 14:26:21 -04002495
alan-bakerb6b09dc2018-11-08 16:59:28 -05002496 auto &sampler_map = getSamplerMap();
alan-baker09cb9802019-12-10 13:16:27 -05002497 SamplerLiteralToIDMap.clear();
David Neto862b7d82018-06-14 18:48:37 -04002498 DenseMap<unsigned, unsigned> SamplerLiteralToDescriptorSetMap;
2499 DenseMap<unsigned, unsigned> SamplerLiteralToBindingMap;
David Neto22f144c2017-06-12 14:26:21 -04002500
David Neto862b7d82018-06-14 18:48:37 -04002501 // We might have samplers in the sampler map that are not used
2502 // in the translation unit. We need to allocate variables
2503 // for them and bindings too.
2504 DenseSet<unsigned> used_bindings;
David Neto22f144c2017-06-12 14:26:21 -04002505
Kévin Petitdf71de32019-04-09 14:09:50 +01002506 auto *var_fn = M.getFunction(clspv::LiteralSamplerFunction());
alan-baker09cb9802019-12-10 13:16:27 -05002507 // Return if there are no literal samplers.
alan-bakerb6b09dc2018-11-08 16:59:28 -05002508 if (!var_fn)
2509 return;
alan-baker09cb9802019-12-10 13:16:27 -05002510
David Neto862b7d82018-06-14 18:48:37 -04002511 for (auto user : var_fn->users()) {
2512 // Populate SamplerLiteralToDescriptorSetMap and
2513 // SamplerLiteralToBindingMap.
2514 //
2515 // Look for calls like
2516 // call %opencl.sampler_t addrspace(2)*
2517 // @clspv.sampler.var.literal(
2518 // i32 descriptor,
2519 // i32 binding,
alan-baker09cb9802019-12-10 13:16:27 -05002520 // i32 (index-into-sampler-map|sampler_mask))
alan-bakerb6b09dc2018-11-08 16:59:28 -05002521 if (auto *call = dyn_cast<CallInst>(user)) {
alan-baker09cb9802019-12-10 13:16:27 -05002522 const auto third_param = static_cast<unsigned>(
alan-bakerb6b09dc2018-11-08 16:59:28 -05002523 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
alan-baker09cb9802019-12-10 13:16:27 -05002524 auto sampler_value = third_param;
2525 if (clspv::Option::UseSamplerMap()) {
2526 if (third_param >= sampler_map.size()) {
2527 errs() << "Out of bounds index to sampler map: " << third_param;
2528 llvm_unreachable("bad sampler init: out of bounds");
2529 }
2530 sampler_value = sampler_map[third_param].first;
David Neto862b7d82018-06-14 18:48:37 -04002531 }
2532
David Neto862b7d82018-06-14 18:48:37 -04002533 const auto descriptor_set = static_cast<unsigned>(
2534 dyn_cast<ConstantInt>(call->getArgOperand(0))->getZExtValue());
2535 const auto binding = static_cast<unsigned>(
2536 dyn_cast<ConstantInt>(call->getArgOperand(1))->getZExtValue());
2537
2538 SamplerLiteralToDescriptorSetMap[sampler_value] = descriptor_set;
2539 SamplerLiteralToBindingMap[sampler_value] = binding;
2540 used_bindings.insert(binding);
2541 }
2542 }
2543
alan-baker09cb9802019-12-10 13:16:27 -05002544 DenseSet<size_t> seen;
2545 for (auto user : var_fn->users()) {
2546 if (!isa<CallInst>(user))
2547 continue;
2548
2549 auto call = cast<CallInst>(user);
2550 const unsigned third_param = static_cast<unsigned>(
2551 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
2552
2553 // Already allocated a variable for this value.
2554 if (!seen.insert(third_param).second)
2555 continue;
2556
2557 auto sampler_value = third_param;
2558 if (clspv::Option::UseSamplerMap()) {
2559 sampler_value = sampler_map[third_param].first;
2560 }
2561
David Neto22f144c2017-06-12 14:26:21 -04002562 // Generate OpVariable.
2563 //
2564 // GIDOps[0] : Result Type ID
2565 // GIDOps[1] : Storage Class
2566 SPIRVOperandList Ops;
2567
David Neto257c3892018-04-11 13:19:45 -04002568 Ops << MkId(lookupType(SamplerTy))
2569 << MkNum(spv::StorageClassUniformConstant);
David Neto22f144c2017-06-12 14:26:21 -04002570
David Neto862b7d82018-06-14 18:48:37 -04002571 auto sampler_var_id = nextID++;
2572 auto *Inst = new SPIRVInstruction(spv::OpVariable, sampler_var_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002573 SPIRVInstList.push_back(Inst);
2574
alan-baker09cb9802019-12-10 13:16:27 -05002575 SamplerLiteralToIDMap[sampler_value] = sampler_var_id;
David Neto22f144c2017-06-12 14:26:21 -04002576
2577 // Find Insert Point for OpDecorate.
2578 auto DecoInsertPoint =
2579 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
2580 [](SPIRVInstruction *Inst) -> bool {
2581 return Inst->getOpcode() != spv::OpDecorate &&
2582 Inst->getOpcode() != spv::OpMemberDecorate &&
2583 Inst->getOpcode() != spv::OpExtInstImport;
2584 });
2585
2586 // Ops[0] = Target ID
2587 // Ops[1] = Decoration (DescriptorSet)
2588 // Ops[2] = LiteralNumber according to Decoration
2589 Ops.clear();
2590
David Neto862b7d82018-06-14 18:48:37 -04002591 unsigned descriptor_set;
2592 unsigned binding;
alan-baker09cb9802019-12-10 13:16:27 -05002593 if (SamplerLiteralToBindingMap.find(sampler_value) ==
alan-bakerb6b09dc2018-11-08 16:59:28 -05002594 SamplerLiteralToBindingMap.end()) {
David Neto862b7d82018-06-14 18:48:37 -04002595 // This sampler is not actually used. Find the next one.
2596 for (binding = 0; used_bindings.count(binding); binding++)
2597 ;
2598 descriptor_set = 0; // Literal samplers always use descriptor set 0.
2599 used_bindings.insert(binding);
2600 } else {
alan-baker09cb9802019-12-10 13:16:27 -05002601 descriptor_set = SamplerLiteralToDescriptorSetMap[sampler_value];
2602 binding = SamplerLiteralToBindingMap[sampler_value];
alan-bakercff80152019-06-15 00:38:00 -04002603
alan-baker09cb9802019-12-10 13:16:27 -05002604 version0::DescriptorMapEntry::SamplerData sampler_data = {sampler_value};
alan-bakercff80152019-06-15 00:38:00 -04002605 descriptorMapEntries->emplace_back(std::move(sampler_data),
2606 descriptor_set, binding);
David Neto862b7d82018-06-14 18:48:37 -04002607 }
2608
2609 Ops << MkId(sampler_var_id) << MkNum(spv::DecorationDescriptorSet)
2610 << MkNum(descriptor_set);
David Neto22f144c2017-06-12 14:26:21 -04002611
David Neto87846742018-04-11 17:36:22 -04002612 auto *DescDecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002613 SPIRVInstList.insert(DecoInsertPoint, DescDecoInst);
2614
2615 // Ops[0] = Target ID
2616 // Ops[1] = Decoration (Binding)
2617 // Ops[2] = LiteralNumber according to Decoration
2618 Ops.clear();
David Neto862b7d82018-06-14 18:48:37 -04002619 Ops << MkId(sampler_var_id) << MkNum(spv::DecorationBinding)
2620 << MkNum(binding);
David Neto22f144c2017-06-12 14:26:21 -04002621
David Neto87846742018-04-11 17:36:22 -04002622 auto *BindDecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002623 SPIRVInstList.insert(DecoInsertPoint, BindDecoInst);
2624 }
David Neto862b7d82018-06-14 18:48:37 -04002625}
David Neto22f144c2017-06-12 14:26:21 -04002626
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01002627void SPIRVProducerPass::GenerateResourceVars(Module &) {
David Neto862b7d82018-06-14 18:48:37 -04002628 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
2629 ValueMapType &VMap = getValueMap();
David Neto22f144c2017-06-12 14:26:21 -04002630
David Neto862b7d82018-06-14 18:48:37 -04002631 // Generate variables. Make one for each of resource var info object.
2632 for (auto *info : ModuleOrderedResourceVars) {
2633 Type *type = info->var_fn->getReturnType();
2634 // Remap the address space for opaque types.
2635 switch (info->arg_kind) {
2636 case clspv::ArgKind::Sampler:
2637 case clspv::ArgKind::ReadOnlyImage:
2638 case clspv::ArgKind::WriteOnlyImage:
2639 type = PointerType::get(type->getPointerElementType(),
2640 clspv::AddressSpace::UniformConstant);
2641 break;
2642 default:
2643 break;
2644 }
David Neto22f144c2017-06-12 14:26:21 -04002645
David Neto862b7d82018-06-14 18:48:37 -04002646 info->var_id = nextID++;
David Neto22f144c2017-06-12 14:26:21 -04002647
David Neto862b7d82018-06-14 18:48:37 -04002648 const auto type_id = lookupType(type);
2649 const auto sc = GetStorageClassForArgKind(info->arg_kind);
2650 SPIRVOperandList Ops;
2651 Ops << MkId(type_id) << MkNum(sc);
David Neto22f144c2017-06-12 14:26:21 -04002652
David Neto862b7d82018-06-14 18:48:37 -04002653 auto *Inst = new SPIRVInstruction(spv::OpVariable, info->var_id, Ops);
2654 SPIRVInstList.push_back(Inst);
2655
2656 // Map calls to the variable-builtin-function.
2657 for (auto &U : info->var_fn->uses()) {
2658 if (auto *call = dyn_cast<CallInst>(U.getUser())) {
2659 const auto set = unsigned(
2660 dyn_cast<ConstantInt>(call->getOperand(0))->getZExtValue());
2661 const auto binding = unsigned(
2662 dyn_cast<ConstantInt>(call->getOperand(1))->getZExtValue());
2663 if (set == info->descriptor_set && binding == info->binding) {
2664 switch (info->arg_kind) {
2665 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04002666 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04002667 case clspv::ArgKind::Pod:
2668 // The call maps to the variable directly.
2669 VMap[call] = info->var_id;
2670 break;
2671 case clspv::ArgKind::Sampler:
2672 case clspv::ArgKind::ReadOnlyImage:
2673 case clspv::ArgKind::WriteOnlyImage:
2674 // The call maps to a load we generate later.
2675 ResourceVarDeferredLoadCalls[call] = info->var_id;
2676 break;
2677 default:
2678 llvm_unreachable("Unhandled arg kind");
2679 }
2680 }
David Neto22f144c2017-06-12 14:26:21 -04002681 }
David Neto862b7d82018-06-14 18:48:37 -04002682 }
2683 }
David Neto22f144c2017-06-12 14:26:21 -04002684
David Neto862b7d82018-06-14 18:48:37 -04002685 // Generate associated decorations.
David Neto22f144c2017-06-12 14:26:21 -04002686
David Neto862b7d82018-06-14 18:48:37 -04002687 // Find Insert Point for OpDecorate.
2688 auto DecoInsertPoint =
2689 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
2690 [](SPIRVInstruction *Inst) -> bool {
2691 return Inst->getOpcode() != spv::OpDecorate &&
2692 Inst->getOpcode() != spv::OpMemberDecorate &&
2693 Inst->getOpcode() != spv::OpExtInstImport;
2694 });
2695
2696 SPIRVOperandList Ops;
2697 for (auto *info : ModuleOrderedResourceVars) {
2698 // Decorate with DescriptorSet and Binding.
2699 Ops.clear();
2700 Ops << MkId(info->var_id) << MkNum(spv::DecorationDescriptorSet)
2701 << MkNum(info->descriptor_set);
2702 SPIRVInstList.insert(DecoInsertPoint,
2703 new SPIRVInstruction(spv::OpDecorate, Ops));
2704
2705 Ops.clear();
2706 Ops << MkId(info->var_id) << MkNum(spv::DecorationBinding)
2707 << MkNum(info->binding);
2708 SPIRVInstList.insert(DecoInsertPoint,
2709 new SPIRVInstruction(spv::OpDecorate, Ops));
2710
alan-bakere9308012019-03-15 10:25:13 -04002711 if (info->coherent) {
2712 // Decorate with Coherent if required for the variable.
2713 Ops.clear();
2714 Ops << MkId(info->var_id) << MkNum(spv::DecorationCoherent);
2715 SPIRVInstList.insert(DecoInsertPoint,
2716 new SPIRVInstruction(spv::OpDecorate, Ops));
2717 }
2718
David Neto862b7d82018-06-14 18:48:37 -04002719 // Generate NonWritable and NonReadable
2720 switch (info->arg_kind) {
2721 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04002722 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04002723 if (info->var_fn->getReturnType()->getPointerAddressSpace() ==
2724 clspv::AddressSpace::Constant) {
2725 Ops.clear();
2726 Ops << MkId(info->var_id) << MkNum(spv::DecorationNonWritable);
2727 SPIRVInstList.insert(DecoInsertPoint,
2728 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002729 }
David Neto862b7d82018-06-14 18:48:37 -04002730 break;
David Neto862b7d82018-06-14 18:48:37 -04002731 case clspv::ArgKind::WriteOnlyImage:
2732 Ops.clear();
2733 Ops << MkId(info->var_id) << MkNum(spv::DecorationNonReadable);
2734 SPIRVInstList.insert(DecoInsertPoint,
2735 new SPIRVInstruction(spv::OpDecorate, Ops));
2736 break;
2737 default:
2738 break;
David Neto22f144c2017-06-12 14:26:21 -04002739 }
2740 }
2741}
2742
2743void SPIRVProducerPass::GenerateGlobalVar(GlobalVariable &GV) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05002744 Module &M = *GV.getParent();
David Neto22f144c2017-06-12 14:26:21 -04002745 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
2746 ValueMapType &VMap = getValueMap();
2747 std::vector<uint32_t> &BuiltinDimVec = getBuiltinDimVec();
David Neto85082642018-03-24 06:55:20 -07002748 const DataLayout &DL = GV.getParent()->getDataLayout();
David Neto22f144c2017-06-12 14:26:21 -04002749
2750 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
2751 Type *Ty = GV.getType();
2752 PointerType *PTy = cast<PointerType>(Ty);
2753
2754 uint32_t InitializerID = 0;
2755
2756 // Workgroup size is handled differently (it goes into a constant)
2757 if (spv::BuiltInWorkgroupSize == BuiltinType) {
2758 std::vector<bool> HasMDVec;
2759 uint32_t PrevXDimCst = 0xFFFFFFFF;
2760 uint32_t PrevYDimCst = 0xFFFFFFFF;
2761 uint32_t PrevZDimCst = 0xFFFFFFFF;
2762 for (Function &Func : *GV.getParent()) {
2763 if (Func.isDeclaration()) {
2764 continue;
2765 }
2766
2767 // We only need to check kernels.
2768 if (Func.getCallingConv() != CallingConv::SPIR_KERNEL) {
2769 continue;
2770 }
2771
2772 if (const MDNode *MD =
2773 dyn_cast<Function>(&Func)->getMetadata("reqd_work_group_size")) {
2774 uint32_t CurXDimCst = static_cast<uint32_t>(
2775 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
2776 uint32_t CurYDimCst = static_cast<uint32_t>(
2777 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
2778 uint32_t CurZDimCst = static_cast<uint32_t>(
2779 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
2780
2781 if (PrevXDimCst == 0xFFFFFFFF && PrevYDimCst == 0xFFFFFFFF &&
2782 PrevZDimCst == 0xFFFFFFFF) {
2783 PrevXDimCst = CurXDimCst;
2784 PrevYDimCst = CurYDimCst;
2785 PrevZDimCst = CurZDimCst;
2786 } else if (CurXDimCst != PrevXDimCst || CurYDimCst != PrevYDimCst ||
2787 CurZDimCst != PrevZDimCst) {
2788 llvm_unreachable(
2789 "reqd_work_group_size must be the same across all kernels");
2790 } else {
2791 continue;
2792 }
2793
2794 //
2795 // Generate OpConstantComposite.
2796 //
2797 // Ops[0] : Result Type ID
2798 // Ops[1] : Constant size for x dimension.
2799 // Ops[2] : Constant size for y dimension.
2800 // Ops[3] : Constant size for z dimension.
2801 SPIRVOperandList Ops;
2802
2803 uint32_t XDimCstID =
2804 VMap[mdconst::extract<ConstantInt>(MD->getOperand(0))];
2805 uint32_t YDimCstID =
2806 VMap[mdconst::extract<ConstantInt>(MD->getOperand(1))];
2807 uint32_t ZDimCstID =
2808 VMap[mdconst::extract<ConstantInt>(MD->getOperand(2))];
2809
2810 InitializerID = nextID;
2811
David Neto257c3892018-04-11 13:19:45 -04002812 Ops << MkId(lookupType(Ty->getPointerElementType())) << MkId(XDimCstID)
2813 << MkId(YDimCstID) << MkId(ZDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04002814
David Neto87846742018-04-11 17:36:22 -04002815 auto *Inst =
2816 new SPIRVInstruction(spv::OpConstantComposite, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002817 SPIRVInstList.push_back(Inst);
2818
2819 HasMDVec.push_back(true);
2820 } else {
2821 HasMDVec.push_back(false);
2822 }
2823 }
2824
2825 // Check all kernels have same definitions for work_group_size.
2826 bool HasMD = false;
2827 if (!HasMDVec.empty()) {
2828 HasMD = HasMDVec[0];
2829 for (uint32_t i = 1; i < HasMDVec.size(); i++) {
2830 if (HasMD != HasMDVec[i]) {
2831 llvm_unreachable(
2832 "Kernels should have consistent work group size definition");
2833 }
2834 }
2835 }
2836
2837 // If all kernels do not have metadata for reqd_work_group_size, generate
2838 // OpSpecConstants for x/y/z dimension.
2839 if (!HasMD) {
2840 //
2841 // Generate OpSpecConstants for x/y/z dimension.
2842 //
2843 // Ops[0] : Result Type ID
2844 // Ops[1] : Constant size for x/y/z dimension (Literal Number).
2845 uint32_t XDimCstID = 0;
2846 uint32_t YDimCstID = 0;
2847 uint32_t ZDimCstID = 0;
2848
David Neto22f144c2017-06-12 14:26:21 -04002849 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04002850 uint32_t result_type_id =
2851 lookupType(Ty->getPointerElementType()->getSequentialElementType());
David Neto22f144c2017-06-12 14:26:21 -04002852
David Neto257c3892018-04-11 13:19:45 -04002853 // X Dimension
2854 Ops << MkId(result_type_id) << MkNum(1);
2855 XDimCstID = nextID++;
2856 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002857 new SPIRVInstruction(spv::OpSpecConstant, XDimCstID, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002858
2859 // Y Dimension
2860 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04002861 Ops << MkId(result_type_id) << MkNum(1);
2862 YDimCstID = nextID++;
2863 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002864 new SPIRVInstruction(spv::OpSpecConstant, YDimCstID, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002865
2866 // Z Dimension
2867 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04002868 Ops << MkId(result_type_id) << MkNum(1);
2869 ZDimCstID = nextID++;
2870 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002871 new SPIRVInstruction(spv::OpSpecConstant, ZDimCstID, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002872
David Neto257c3892018-04-11 13:19:45 -04002873 BuiltinDimVec.push_back(XDimCstID);
2874 BuiltinDimVec.push_back(YDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04002875 BuiltinDimVec.push_back(ZDimCstID);
2876
David Neto22f144c2017-06-12 14:26:21 -04002877 //
2878 // Generate OpSpecConstantComposite.
2879 //
2880 // Ops[0] : Result Type ID
2881 // Ops[1] : Constant size for x dimension.
2882 // Ops[2] : Constant size for y dimension.
2883 // Ops[3] : Constant size for z dimension.
2884 InitializerID = nextID;
2885
2886 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04002887 Ops << MkId(lookupType(Ty->getPointerElementType())) << MkId(XDimCstID)
2888 << MkId(YDimCstID) << MkId(ZDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04002889
David Neto87846742018-04-11 17:36:22 -04002890 auto *Inst =
2891 new SPIRVInstruction(spv::OpSpecConstantComposite, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002892 SPIRVInstList.push_back(Inst);
2893 }
2894 }
2895
David Neto22f144c2017-06-12 14:26:21 -04002896 VMap[&GV] = nextID;
2897
2898 //
2899 // Generate OpVariable.
2900 //
2901 // GIDOps[0] : Result Type ID
2902 // GIDOps[1] : Storage Class
2903 SPIRVOperandList Ops;
2904
David Neto85082642018-03-24 06:55:20 -07002905 const auto AS = PTy->getAddressSpace();
David Netoc6f3ab22018-04-06 18:02:31 -04002906 Ops << MkId(lookupType(Ty)) << MkNum(GetStorageClass(AS));
David Neto22f144c2017-06-12 14:26:21 -04002907
David Neto85082642018-03-24 06:55:20 -07002908 const bool module_scope_constant_external_init =
David Neto862b7d82018-06-14 18:48:37 -04002909 (AS == AddressSpace::Constant) && GV.hasInitializer() &&
David Neto85082642018-03-24 06:55:20 -07002910 clspv::Option::ModuleConstantsInStorageBuffer();
2911
Kévin Petit23d5f182019-08-13 16:21:29 +01002912 if (GV.hasInitializer()) {
2913 auto GVInit = GV.getInitializer();
2914 if (!isa<UndefValue>(GVInit) && !module_scope_constant_external_init) {
2915 assert(VMap.count(GVInit) == 1);
2916 InitializerID = VMap[GVInit];
David Neto85082642018-03-24 06:55:20 -07002917 }
2918 }
Kévin Petit23d5f182019-08-13 16:21:29 +01002919
2920 if (0 != InitializerID) {
2921 // Emit the ID of the intiializer as part of the variable definition.
2922 Ops << MkId(InitializerID);
2923 }
David Neto85082642018-03-24 06:55:20 -07002924 const uint32_t var_id = nextID++;
2925
David Neto87846742018-04-11 17:36:22 -04002926 auto *Inst = new SPIRVInstruction(spv::OpVariable, var_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002927 SPIRVInstList.push_back(Inst);
2928
2929 // If we have a builtin.
2930 if (spv::BuiltInMax != BuiltinType) {
2931 // Find Insert Point for OpDecorate.
2932 auto DecoInsertPoint =
2933 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
2934 [](SPIRVInstruction *Inst) -> bool {
2935 return Inst->getOpcode() != spv::OpDecorate &&
2936 Inst->getOpcode() != spv::OpMemberDecorate &&
2937 Inst->getOpcode() != spv::OpExtInstImport;
2938 });
2939 //
2940 // Generate OpDecorate.
2941 //
2942 // DOps[0] = Target ID
2943 // DOps[1] = Decoration (Builtin)
2944 // DOps[2] = BuiltIn ID
2945 uint32_t ResultID;
2946
2947 // WorkgroupSize is different, we decorate the constant composite that has
2948 // its value, rather than the variable that we use to access the value.
2949 if (spv::BuiltInWorkgroupSize == BuiltinType) {
2950 ResultID = InitializerID;
David Netoa60b00b2017-09-15 16:34:09 -04002951 // Save both the value and variable IDs for later.
2952 WorkgroupSizeValueID = InitializerID;
2953 WorkgroupSizeVarID = VMap[&GV];
David Neto22f144c2017-06-12 14:26:21 -04002954 } else {
2955 ResultID = VMap[&GV];
2956 }
2957
2958 SPIRVOperandList DOps;
David Neto257c3892018-04-11 13:19:45 -04002959 DOps << MkId(ResultID) << MkNum(spv::DecorationBuiltIn)
2960 << MkNum(BuiltinType);
David Neto22f144c2017-06-12 14:26:21 -04002961
David Neto87846742018-04-11 17:36:22 -04002962 auto *DescDecoInst = new SPIRVInstruction(spv::OpDecorate, DOps);
David Neto22f144c2017-06-12 14:26:21 -04002963 SPIRVInstList.insert(DecoInsertPoint, DescDecoInst);
David Neto85082642018-03-24 06:55:20 -07002964 } else if (module_scope_constant_external_init) {
2965 // This module scope constant is initialized from a storage buffer with data
2966 // provided by the host at binding 0 of the next descriptor set.
David Neto78383442018-06-15 20:31:56 -04002967 const uint32_t descriptor_set = TakeDescriptorIndex(&M);
David Neto85082642018-03-24 06:55:20 -07002968
David Neto862b7d82018-06-14 18:48:37 -04002969 // Emit the intializer to the descriptor map file.
David Neto85082642018-03-24 06:55:20 -07002970 // Use "kind,buffer" to indicate storage buffer. We might want to expand
2971 // that later to other types, like uniform buffer.
alan-bakerf5e5f692018-11-27 08:33:24 -05002972 std::string hexbytes;
2973 llvm::raw_string_ostream str(hexbytes);
2974 clspv::ConstantEmitter(DL, str).Emit(GV.getInitializer());
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04002975 version0::DescriptorMapEntry::ConstantData constant_data = {ArgKind::Buffer,
2976 str.str()};
2977 descriptorMapEntries->emplace_back(std::move(constant_data), descriptor_set,
2978 0);
David Neto85082642018-03-24 06:55:20 -07002979
2980 // Find Insert Point for OpDecorate.
2981 auto DecoInsertPoint =
2982 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
2983 [](SPIRVInstruction *Inst) -> bool {
2984 return Inst->getOpcode() != spv::OpDecorate &&
2985 Inst->getOpcode() != spv::OpMemberDecorate &&
2986 Inst->getOpcode() != spv::OpExtInstImport;
2987 });
2988
David Neto257c3892018-04-11 13:19:45 -04002989 // OpDecorate %var Binding <binding>
David Neto85082642018-03-24 06:55:20 -07002990 SPIRVOperandList DOps;
David Neto257c3892018-04-11 13:19:45 -04002991 DOps << MkId(var_id) << MkNum(spv::DecorationBinding) << MkNum(0);
2992 DecoInsertPoint = SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04002993 DecoInsertPoint, new SPIRVInstruction(spv::OpDecorate, DOps));
David Neto85082642018-03-24 06:55:20 -07002994
2995 // OpDecorate %var DescriptorSet <descriptor_set>
2996 DOps.clear();
David Neto257c3892018-04-11 13:19:45 -04002997 DOps << MkId(var_id) << MkNum(spv::DecorationDescriptorSet)
2998 << MkNum(descriptor_set);
David Netoc6f3ab22018-04-06 18:02:31 -04002999 SPIRVInstList.insert(DecoInsertPoint,
David Neto87846742018-04-11 17:36:22 -04003000 new SPIRVInstruction(spv::OpDecorate, DOps));
David Neto22f144c2017-06-12 14:26:21 -04003001 }
3002}
3003
David Netoc6f3ab22018-04-06 18:02:31 -04003004void SPIRVProducerPass::GenerateWorkgroupVars() {
3005 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
Alan Baker202c8c72018-08-13 13:47:44 -04003006 for (auto spec_id = clspv::FirstLocalSpecId(); spec_id < max_local_spec_id_;
3007 ++spec_id) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05003008 LocalArgInfo &info = LocalSpecIdInfoMap[spec_id];
David Netoc6f3ab22018-04-06 18:02:31 -04003009
3010 // Generate OpVariable.
3011 //
3012 // GIDOps[0] : Result Type ID
3013 // GIDOps[1] : Storage Class
3014 SPIRVOperandList Ops;
3015 Ops << MkId(info.ptr_array_type_id) << MkNum(spv::StorageClassWorkgroup);
3016
3017 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04003018 new SPIRVInstruction(spv::OpVariable, info.variable_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04003019 }
3020}
3021
David Neto862b7d82018-06-14 18:48:37 -04003022void SPIRVProducerPass::GenerateDescriptorMapInfo(const DataLayout &DL,
3023 Function &F) {
David Netoc5fb5242018-07-30 13:28:31 -04003024 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
3025 return;
3026 }
David Neto862b7d82018-06-14 18:48:37 -04003027 // Gather the list of resources that are used by this function's arguments.
3028 auto &resource_var_at_index = FunctionToResourceVarsMap[&F];
3029
alan-bakerf5e5f692018-11-27 08:33:24 -05003030 // TODO(alan-baker): This should become unnecessary by fixing the rest of the
3031 // flow to generate pod_ubo arguments earlier.
David Neto862b7d82018-06-14 18:48:37 -04003032 auto remap_arg_kind = [](StringRef argKind) {
alan-bakerf5e5f692018-11-27 08:33:24 -05003033 std::string kind =
3034 clspv::Option::PodArgsInUniformBuffer() && argKind.equals("pod")
3035 ? "pod_ubo"
3036 : argKind;
3037 return GetArgKindFromName(kind);
David Neto862b7d82018-06-14 18:48:37 -04003038 };
3039
3040 auto *fty = F.getType()->getPointerElementType();
3041 auto *func_ty = dyn_cast<FunctionType>(fty);
3042
alan-baker038e9242019-04-19 22:14:41 -04003043 // If we've clustered POD arguments, then argument details are in metadata.
David Neto862b7d82018-06-14 18:48:37 -04003044 // If an argument maps to a resource variable, then get descriptor set and
3045 // binding from the resoure variable. Other info comes from the metadata.
3046 const auto *arg_map = F.getMetadata("kernel_arg_map");
3047 if (arg_map) {
3048 for (const auto &arg : arg_map->operands()) {
3049 const MDNode *arg_node = dyn_cast<MDNode>(arg.get());
Kévin PETITa353c832018-03-20 23:21:21 +00003050 assert(arg_node->getNumOperands() == 7);
David Neto862b7d82018-06-14 18:48:37 -04003051 const auto name =
3052 dyn_cast<MDString>(arg_node->getOperand(0))->getString();
3053 const auto old_index =
3054 dyn_extract<ConstantInt>(arg_node->getOperand(1))->getZExtValue();
3055 // Remapped argument index
alan-bakerb6b09dc2018-11-08 16:59:28 -05003056 const size_t new_index = static_cast<size_t>(
3057 dyn_extract<ConstantInt>(arg_node->getOperand(2))->getZExtValue());
David Neto862b7d82018-06-14 18:48:37 -04003058 const auto offset =
3059 dyn_extract<ConstantInt>(arg_node->getOperand(3))->getZExtValue();
Kévin PETITa353c832018-03-20 23:21:21 +00003060 const auto arg_size =
3061 dyn_extract<ConstantInt>(arg_node->getOperand(4))->getZExtValue();
David Neto862b7d82018-06-14 18:48:37 -04003062 const auto argKind = remap_arg_kind(
Kévin PETITa353c832018-03-20 23:21:21 +00003063 dyn_cast<MDString>(arg_node->getOperand(5))->getString());
David Neto862b7d82018-06-14 18:48:37 -04003064 const auto spec_id =
Kévin PETITa353c832018-03-20 23:21:21 +00003065 dyn_extract<ConstantInt>(arg_node->getOperand(6))->getSExtValue();
alan-bakerf5e5f692018-11-27 08:33:24 -05003066
3067 uint32_t descriptor_set = 0;
3068 uint32_t binding = 0;
3069 version0::DescriptorMapEntry::KernelArgData kernel_data = {
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003070 F.getName(), name, static_cast<uint32_t>(old_index), argKind,
alan-bakerf5e5f692018-11-27 08:33:24 -05003071 static_cast<uint32_t>(spec_id),
3072 // This will be set below for pointer-to-local args.
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003073 0, static_cast<uint32_t>(offset), static_cast<uint32_t>(arg_size)};
David Neto862b7d82018-06-14 18:48:37 -04003074 if (spec_id > 0) {
alan-bakerf5e5f692018-11-27 08:33:24 -05003075 kernel_data.local_element_size = static_cast<uint32_t>(GetTypeAllocSize(
3076 func_ty->getParamType(unsigned(new_index))->getPointerElementType(),
3077 DL));
David Neto862b7d82018-06-14 18:48:37 -04003078 } else {
3079 auto *info = resource_var_at_index[new_index];
3080 assert(info);
alan-bakerf5e5f692018-11-27 08:33:24 -05003081 descriptor_set = info->descriptor_set;
3082 binding = info->binding;
David Neto862b7d82018-06-14 18:48:37 -04003083 }
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003084 descriptorMapEntries->emplace_back(std::move(kernel_data), descriptor_set,
3085 binding);
David Neto862b7d82018-06-14 18:48:37 -04003086 }
3087 } else {
3088 // There is no argument map.
3089 // Take descriptor info from the resource variable calls.
Kévin PETITa353c832018-03-20 23:21:21 +00003090 // Take argument name and size from the arguments list.
David Neto862b7d82018-06-14 18:48:37 -04003091
3092 SmallVector<Argument *, 4> arguments;
3093 for (auto &arg : F.args()) {
3094 arguments.push_back(&arg);
3095 }
3096
3097 unsigned arg_index = 0;
3098 for (auto *info : resource_var_at_index) {
3099 if (info) {
Kévin PETITa353c832018-03-20 23:21:21 +00003100 auto arg = arguments[arg_index];
alan-bakerb6b09dc2018-11-08 16:59:28 -05003101 unsigned arg_size = 0;
Kévin PETITa353c832018-03-20 23:21:21 +00003102 if (info->arg_kind == clspv::ArgKind::Pod) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05003103 arg_size = static_cast<uint32_t>(DL.getTypeStoreSize(arg->getType()));
Kévin PETITa353c832018-03-20 23:21:21 +00003104 }
3105
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003106 // Local pointer arguments are unused in this case. Offset is always
3107 // zero.
alan-bakerf5e5f692018-11-27 08:33:24 -05003108 version0::DescriptorMapEntry::KernelArgData kernel_data = {
3109 F.getName(), arg->getName(),
3110 arg_index, remap_arg_kind(clspv::GetArgKindName(info->arg_kind)),
3111 0, 0,
3112 0, arg_size};
3113 descriptorMapEntries->emplace_back(std::move(kernel_data),
3114 info->descriptor_set, info->binding);
David Neto862b7d82018-06-14 18:48:37 -04003115 }
3116 arg_index++;
3117 }
3118 // Generate mappings for pointer-to-local arguments.
3119 for (arg_index = 0; arg_index < arguments.size(); ++arg_index) {
3120 Argument *arg = arguments[arg_index];
Alan Baker202c8c72018-08-13 13:47:44 -04003121 auto where = LocalArgSpecIds.find(arg);
3122 if (where != LocalArgSpecIds.end()) {
3123 auto &local_arg_info = LocalSpecIdInfoMap[where->second];
alan-bakerf5e5f692018-11-27 08:33:24 -05003124 // Pod arguments members are unused in this case.
3125 version0::DescriptorMapEntry::KernelArgData kernel_data = {
3126 F.getName(),
3127 arg->getName(),
3128 arg_index,
3129 ArgKind::Local,
3130 static_cast<uint32_t>(local_arg_info.spec_id),
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003131 static_cast<uint32_t>(
3132 GetTypeAllocSize(local_arg_info.elem_type, DL)),
alan-bakerf5e5f692018-11-27 08:33:24 -05003133 0,
3134 0};
3135 // Pointer-to-local arguments do not utilize descriptor set and binding.
3136 descriptorMapEntries->emplace_back(std::move(kernel_data), 0, 0);
David Neto862b7d82018-06-14 18:48:37 -04003137 }
3138 }
3139 }
3140}
3141
David Neto22f144c2017-06-12 14:26:21 -04003142void SPIRVProducerPass::GenerateFuncPrologue(Function &F) {
3143 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3144 ValueMapType &VMap = getValueMap();
3145 EntryPointVecType &EntryPoints = getEntryPointVec();
David Neto22f144c2017-06-12 14:26:21 -04003146 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
3147 auto &GlobalConstArgSet = getGlobalConstArgSet();
3148
3149 FunctionType *FTy = F.getFunctionType();
3150
3151 //
David Neto22f144c2017-06-12 14:26:21 -04003152 // Generate OPFunction.
3153 //
3154
3155 // FOps[0] : Result Type ID
3156 // FOps[1] : Function Control
3157 // FOps[2] : Function Type ID
3158 SPIRVOperandList FOps;
3159
3160 // Find SPIRV instruction for return type.
David Neto257c3892018-04-11 13:19:45 -04003161 FOps << MkId(lookupType(FTy->getReturnType()));
David Neto22f144c2017-06-12 14:26:21 -04003162
3163 // Check function attributes for SPIRV Function Control.
3164 uint32_t FuncControl = spv::FunctionControlMaskNone;
3165 if (F.hasFnAttribute(Attribute::AlwaysInline)) {
3166 FuncControl |= spv::FunctionControlInlineMask;
3167 }
3168 if (F.hasFnAttribute(Attribute::NoInline)) {
3169 FuncControl |= spv::FunctionControlDontInlineMask;
3170 }
3171 // TODO: Check llvm attribute for Function Control Pure.
3172 if (F.hasFnAttribute(Attribute::ReadOnly)) {
3173 FuncControl |= spv::FunctionControlPureMask;
3174 }
3175 // TODO: Check llvm attribute for Function Control Const.
3176 if (F.hasFnAttribute(Attribute::ReadNone)) {
3177 FuncControl |= spv::FunctionControlConstMask;
3178 }
3179
David Neto257c3892018-04-11 13:19:45 -04003180 FOps << MkNum(FuncControl);
David Neto22f144c2017-06-12 14:26:21 -04003181
3182 uint32_t FTyID;
3183 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
3184 SmallVector<Type *, 4> NewFuncParamTys;
3185 FunctionType *NewFTy =
3186 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
3187 FTyID = lookupType(NewFTy);
3188 } else {
David Neto9ed8e2f2018-03-24 06:47:24 -07003189 // Handle regular function with global constant parameters.
David Neto22f144c2017-06-12 14:26:21 -04003190 if (GlobalConstFuncTyMap.count(FTy)) {
3191 FTyID = lookupType(GlobalConstFuncTyMap[FTy].first);
3192 } else {
3193 FTyID = lookupType(FTy);
3194 }
3195 }
3196
David Neto257c3892018-04-11 13:19:45 -04003197 FOps << MkId(FTyID);
David Neto22f144c2017-06-12 14:26:21 -04003198
3199 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
3200 EntryPoints.push_back(std::make_pair(&F, nextID));
3201 }
3202
3203 VMap[&F] = nextID;
3204
David Neto482550a2018-03-24 05:21:07 -07003205 if (clspv::Option::ShowIDs()) {
David Netob05675d2018-02-16 12:37:49 -05003206 errs() << "Function " << F.getName() << " is " << nextID << "\n";
3207 }
David Neto22f144c2017-06-12 14:26:21 -04003208 // Generate SPIRV instruction for function.
David Neto87846742018-04-11 17:36:22 -04003209 auto *FuncInst = new SPIRVInstruction(spv::OpFunction, nextID++, FOps);
David Neto22f144c2017-06-12 14:26:21 -04003210 SPIRVInstList.push_back(FuncInst);
3211
3212 //
3213 // Generate OpFunctionParameter for Normal function.
3214 //
3215
3216 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
alan-bakere9308012019-03-15 10:25:13 -04003217
3218 // Find Insert Point for OpDecorate.
3219 auto DecoInsertPoint =
3220 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
3221 [](SPIRVInstruction *Inst) -> bool {
3222 return Inst->getOpcode() != spv::OpDecorate &&
3223 Inst->getOpcode() != spv::OpMemberDecorate &&
3224 Inst->getOpcode() != spv::OpExtInstImport;
3225 });
3226
David Neto22f144c2017-06-12 14:26:21 -04003227 // Iterate Argument for name instead of param type from function type.
3228 unsigned ArgIdx = 0;
3229 for (Argument &Arg : F.args()) {
alan-bakere9308012019-03-15 10:25:13 -04003230 uint32_t param_id = nextID++;
3231 VMap[&Arg] = param_id;
3232
3233 if (CalledWithCoherentResource(Arg)) {
3234 // If the arg is passed a coherent resource ever, then decorate this
3235 // parameter with Coherent too.
3236 SPIRVOperandList decoration_ops;
3237 decoration_ops << MkId(param_id) << MkNum(spv::DecorationCoherent);
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003238 SPIRVInstList.insert(
3239 DecoInsertPoint,
3240 new SPIRVInstruction(spv::OpDecorate, decoration_ops));
alan-bakere9308012019-03-15 10:25:13 -04003241 }
David Neto22f144c2017-06-12 14:26:21 -04003242
3243 // ParamOps[0] : Result Type ID
3244 SPIRVOperandList ParamOps;
3245
3246 // Find SPIRV instruction for parameter type.
3247 uint32_t ParamTyID = lookupType(Arg.getType());
3248 if (PointerType *PTy = dyn_cast<PointerType>(Arg.getType())) {
3249 if (GlobalConstFuncTyMap.count(FTy)) {
3250 if (ArgIdx == GlobalConstFuncTyMap[FTy].second) {
3251 Type *EleTy = PTy->getPointerElementType();
3252 Type *ArgTy =
3253 PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
3254 ParamTyID = lookupType(ArgTy);
3255 GlobalConstArgSet.insert(&Arg);
3256 }
3257 }
3258 }
David Neto257c3892018-04-11 13:19:45 -04003259 ParamOps << MkId(ParamTyID);
David Neto22f144c2017-06-12 14:26:21 -04003260
3261 // Generate SPIRV instruction for parameter.
David Neto87846742018-04-11 17:36:22 -04003262 auto *ParamInst =
alan-bakere9308012019-03-15 10:25:13 -04003263 new SPIRVInstruction(spv::OpFunctionParameter, param_id, ParamOps);
David Neto22f144c2017-06-12 14:26:21 -04003264 SPIRVInstList.push_back(ParamInst);
3265
3266 ArgIdx++;
3267 }
3268 }
3269}
3270
alan-bakerb6b09dc2018-11-08 16:59:28 -05003271void SPIRVProducerPass::GenerateModuleInfo(Module &module) {
David Neto22f144c2017-06-12 14:26:21 -04003272 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3273 EntryPointVecType &EntryPoints = getEntryPointVec();
3274 ValueMapType &VMap = getValueMap();
3275 ValueList &EntryPointInterfaces = getEntryPointInterfacesVec();
3276 uint32_t &ExtInstImportID = getOpExtInstImportID();
3277 std::vector<uint32_t> &BuiltinDimVec = getBuiltinDimVec();
3278
3279 // Set up insert point.
3280 auto InsertPoint = SPIRVInstList.begin();
3281
3282 //
3283 // Generate OpCapability
3284 //
3285 // TODO: Which llvm information is mapped to SPIRV Capapbility?
3286
3287 // Ops[0] = Capability
3288 SPIRVOperandList Ops;
3289
David Neto87846742018-04-11 17:36:22 -04003290 auto *CapInst =
3291 new SPIRVInstruction(spv::OpCapability, {MkNum(spv::CapabilityShader)});
David Neto22f144c2017-06-12 14:26:21 -04003292 SPIRVInstList.insert(InsertPoint, CapInst);
3293
alan-bakerf906d2b2019-12-10 11:26:23 -05003294 bool write_without_format = false;
3295 bool sampled_1d = false;
3296 bool image_1d = false;
David Neto22f144c2017-06-12 14:26:21 -04003297 for (Type *Ty : getTypeList()) {
alan-bakerb39c8262019-03-08 14:03:37 -05003298 if (clspv::Option::Int8Support() && Ty->isIntegerTy(8)) {
3299 // Generate OpCapability for i8 type.
3300 SPIRVInstList.insert(InsertPoint,
3301 new SPIRVInstruction(spv::OpCapability,
3302 {MkNum(spv::CapabilityInt8)}));
3303 } else if (Ty->isIntegerTy(16)) {
David Neto22f144c2017-06-12 14:26:21 -04003304 // Generate OpCapability for i16 type.
David Neto87846742018-04-11 17:36:22 -04003305 SPIRVInstList.insert(InsertPoint,
3306 new SPIRVInstruction(spv::OpCapability,
3307 {MkNum(spv::CapabilityInt16)}));
David Neto22f144c2017-06-12 14:26:21 -04003308 } else if (Ty->isIntegerTy(64)) {
3309 // Generate OpCapability for i64 type.
David Neto87846742018-04-11 17:36:22 -04003310 SPIRVInstList.insert(InsertPoint,
3311 new SPIRVInstruction(spv::OpCapability,
3312 {MkNum(spv::CapabilityInt64)}));
David Neto22f144c2017-06-12 14:26:21 -04003313 } else if (Ty->isHalfTy()) {
3314 // Generate OpCapability for half type.
3315 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04003316 InsertPoint, new SPIRVInstruction(spv::OpCapability,
3317 {MkNum(spv::CapabilityFloat16)}));
David Neto22f144c2017-06-12 14:26:21 -04003318 } else if (Ty->isDoubleTy()) {
3319 // Generate OpCapability for double type.
3320 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04003321 InsertPoint, new SPIRVInstruction(spv::OpCapability,
3322 {MkNum(spv::CapabilityFloat64)}));
David Neto22f144c2017-06-12 14:26:21 -04003323 } else if (auto *STy = dyn_cast<StructType>(Ty)) {
3324 if (STy->isOpaque()) {
alan-bakerf906d2b2019-12-10 11:26:23 -05003325 if (STy->getName().startswith("opencl.image1d_wo_t") ||
3326 STy->getName().startswith("opencl.image2d_wo_t") ||
alan-bakerf67468c2019-11-25 15:51:49 -05003327 STy->getName().startswith("opencl.image3d_wo_t")) {
alan-bakerf906d2b2019-12-10 11:26:23 -05003328 write_without_format = true;
3329 }
3330 if (STy->getName().startswith("opencl.image1d_ro_t") ||
3331 STy->getName().startswith("opencl.image1d_wo_t")) {
3332 if (STy->getName().contains(".sampled"))
3333 sampled_1d = true;
3334 else
3335 image_1d = true;
David Neto22f144c2017-06-12 14:26:21 -04003336 }
3337 }
3338 }
3339 }
3340
alan-bakerf906d2b2019-12-10 11:26:23 -05003341 if (write_without_format) {
3342 // Generate OpCapability for write only image type.
3343 SPIRVInstList.insert(
3344 InsertPoint,
3345 new SPIRVInstruction(
3346 spv::OpCapability,
3347 {MkNum(spv::CapabilityStorageImageWriteWithoutFormat)}));
3348 }
3349 if (image_1d) {
3350 // Generate OpCapability for unsampled 1D image type.
3351 SPIRVInstList.insert(InsertPoint,
3352 new SPIRVInstruction(spv::OpCapability,
3353 {MkNum(spv::CapabilityImage1D)}));
3354 } else if (sampled_1d) {
3355 // Generate OpCapability for sampled 1D image type.
3356 SPIRVInstList.insert(
3357 InsertPoint, new SPIRVInstruction(spv::OpCapability,
3358 {MkNum(spv::CapabilitySampled1D)}));
3359 }
3360
David Neto5c22a252018-03-15 16:07:41 -04003361 { // OpCapability ImageQuery
3362 bool hasImageQuery = false;
alan-bakerf67468c2019-11-25 15:51:49 -05003363 for (const auto &SymVal : module.getValueSymbolTable()) {
3364 if (auto F = dyn_cast<Function>(SymVal.getValue())) {
alan-bakerce179f12019-12-06 19:02:22 -05003365 if (clspv::IsImageQuery(F)) {
alan-bakerf67468c2019-11-25 15:51:49 -05003366 hasImageQuery = true;
3367 break;
3368 }
David Neto5c22a252018-03-15 16:07:41 -04003369 }
3370 }
alan-bakerf67468c2019-11-25 15:51:49 -05003371
David Neto5c22a252018-03-15 16:07:41 -04003372 if (hasImageQuery) {
David Neto87846742018-04-11 17:36:22 -04003373 auto *ImageQueryCapInst = new SPIRVInstruction(
3374 spv::OpCapability, {MkNum(spv::CapabilityImageQuery)});
David Neto5c22a252018-03-15 16:07:41 -04003375 SPIRVInstList.insert(InsertPoint, ImageQueryCapInst);
3376 }
3377 }
3378
David Neto22f144c2017-06-12 14:26:21 -04003379 if (hasVariablePointers()) {
3380 //
David Neto22f144c2017-06-12 14:26:21 -04003381 // Generate OpCapability.
3382 //
3383 // Ops[0] = Capability
3384 //
3385 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003386 Ops << MkNum(spv::CapabilityVariablePointers);
David Neto22f144c2017-06-12 14:26:21 -04003387
David Neto87846742018-04-11 17:36:22 -04003388 SPIRVInstList.insert(InsertPoint,
3389 new SPIRVInstruction(spv::OpCapability, Ops));
alan-baker5b86ed72019-02-15 08:26:50 -05003390 } else if (hasVariablePointersStorageBuffer()) {
3391 //
3392 // Generate OpCapability.
3393 //
3394 // Ops[0] = Capability
3395 //
3396 Ops.clear();
3397 Ops << MkNum(spv::CapabilityVariablePointersStorageBuffer);
David Neto22f144c2017-06-12 14:26:21 -04003398
alan-baker5b86ed72019-02-15 08:26:50 -05003399 SPIRVInstList.insert(InsertPoint,
3400 new SPIRVInstruction(spv::OpCapability, Ops));
3401 }
3402
3403 // Always add the storage buffer extension
3404 {
David Neto22f144c2017-06-12 14:26:21 -04003405 //
3406 // Generate OpExtension.
3407 //
3408 // Ops[0] = Name (Literal String)
3409 //
alan-baker5b86ed72019-02-15 08:26:50 -05003410 auto *ExtensionInst = new SPIRVInstruction(
3411 spv::OpExtension, {MkString("SPV_KHR_storage_buffer_storage_class")});
3412 SPIRVInstList.insert(InsertPoint, ExtensionInst);
3413 }
David Neto22f144c2017-06-12 14:26:21 -04003414
alan-baker5b86ed72019-02-15 08:26:50 -05003415 if (hasVariablePointers() || hasVariablePointersStorageBuffer()) {
3416 //
3417 // Generate OpExtension.
3418 //
3419 // Ops[0] = Name (Literal String)
3420 //
3421 auto *ExtensionInst = new SPIRVInstruction(
3422 spv::OpExtension, {MkString("SPV_KHR_variable_pointers")});
3423 SPIRVInstList.insert(InsertPoint, ExtensionInst);
David Neto22f144c2017-06-12 14:26:21 -04003424 }
3425
3426 if (ExtInstImportID) {
3427 ++InsertPoint;
3428 }
3429
3430 //
3431 // Generate OpMemoryModel
3432 //
3433 // Memory model for Vulkan will always be GLSL450.
3434
3435 // Ops[0] = Addressing Model
3436 // Ops[1] = Memory Model
3437 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003438 Ops << MkNum(spv::AddressingModelLogical) << MkNum(spv::MemoryModelGLSL450);
David Neto22f144c2017-06-12 14:26:21 -04003439
David Neto87846742018-04-11 17:36:22 -04003440 auto *MemModelInst = new SPIRVInstruction(spv::OpMemoryModel, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003441 SPIRVInstList.insert(InsertPoint, MemModelInst);
3442
3443 //
3444 // Generate OpEntryPoint
3445 //
3446 for (auto EntryPoint : EntryPoints) {
3447 // Ops[0] = Execution Model
3448 // Ops[1] = EntryPoint ID
3449 // Ops[2] = Name (Literal String)
3450 // ...
3451 //
3452 // TODO: Do we need to consider Interface ID for forward references???
3453 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05003454 const StringRef &name = EntryPoint.first->getName();
David Neto257c3892018-04-11 13:19:45 -04003455 Ops << MkNum(spv::ExecutionModelGLCompute) << MkId(EntryPoint.second)
3456 << MkString(name);
David Neto22f144c2017-06-12 14:26:21 -04003457
David Neto22f144c2017-06-12 14:26:21 -04003458 for (Value *Interface : EntryPointInterfaces) {
David Neto257c3892018-04-11 13:19:45 -04003459 Ops << MkId(VMap[Interface]);
David Neto22f144c2017-06-12 14:26:21 -04003460 }
3461
David Neto87846742018-04-11 17:36:22 -04003462 auto *EntryPointInst = new SPIRVInstruction(spv::OpEntryPoint, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003463 SPIRVInstList.insert(InsertPoint, EntryPointInst);
3464 }
3465
3466 for (auto EntryPoint : EntryPoints) {
3467 if (const MDNode *MD = dyn_cast<Function>(EntryPoint.first)
3468 ->getMetadata("reqd_work_group_size")) {
3469
3470 if (!BuiltinDimVec.empty()) {
3471 llvm_unreachable(
3472 "Kernels should have consistent work group size definition");
3473 }
3474
3475 //
3476 // Generate OpExecutionMode
3477 //
3478
3479 // Ops[0] = Entry Point ID
3480 // Ops[1] = Execution Mode
3481 // Ops[2] ... Ops[n] = Optional literals according to Execution Mode
3482 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05003483 Ops << MkId(EntryPoint.second) << MkNum(spv::ExecutionModeLocalSize);
David Neto22f144c2017-06-12 14:26:21 -04003484
3485 uint32_t XDim = static_cast<uint32_t>(
3486 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
3487 uint32_t YDim = static_cast<uint32_t>(
3488 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
3489 uint32_t ZDim = static_cast<uint32_t>(
3490 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
3491
David Neto257c3892018-04-11 13:19:45 -04003492 Ops << MkNum(XDim) << MkNum(YDim) << MkNum(ZDim);
David Neto22f144c2017-06-12 14:26:21 -04003493
David Neto87846742018-04-11 17:36:22 -04003494 auto *ExecModeInst = new SPIRVInstruction(spv::OpExecutionMode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003495 SPIRVInstList.insert(InsertPoint, ExecModeInst);
3496 }
3497 }
3498
3499 //
3500 // Generate OpSource.
3501 //
3502 // Ops[0] = SourceLanguage ID
3503 // Ops[1] = Version (LiteralNum)
3504 //
3505 Ops.clear();
Kévin Petit0fc88042019-04-09 23:25:02 +01003506 if (clspv::Option::CPlusPlus()) {
3507 Ops << MkNum(spv::SourceLanguageOpenCL_CPP) << MkNum(100);
3508 } else {
3509 Ops << MkNum(spv::SourceLanguageOpenCL_C) << MkNum(120);
3510 }
David Neto22f144c2017-06-12 14:26:21 -04003511
David Neto87846742018-04-11 17:36:22 -04003512 auto *OpenSourceInst = new SPIRVInstruction(spv::OpSource, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003513 SPIRVInstList.insert(InsertPoint, OpenSourceInst);
3514
3515 if (!BuiltinDimVec.empty()) {
3516 //
3517 // Generate OpDecorates for x/y/z dimension.
3518 //
3519 // Ops[0] = Target ID
3520 // Ops[1] = Decoration (SpecId)
David Neto257c3892018-04-11 13:19:45 -04003521 // Ops[2] = Specialization Constant ID (Literal Number)
David Neto22f144c2017-06-12 14:26:21 -04003522
3523 // X Dimension
3524 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003525 Ops << MkId(BuiltinDimVec[0]) << MkNum(spv::DecorationSpecId) << MkNum(0);
David Neto87846742018-04-11 17:36:22 -04003526 SPIRVInstList.insert(InsertPoint,
3527 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003528
3529 // Y Dimension
3530 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003531 Ops << MkId(BuiltinDimVec[1]) << MkNum(spv::DecorationSpecId) << MkNum(1);
David Neto87846742018-04-11 17:36:22 -04003532 SPIRVInstList.insert(InsertPoint,
3533 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003534
3535 // Z Dimension
3536 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003537 Ops << MkId(BuiltinDimVec[2]) << MkNum(spv::DecorationSpecId) << MkNum(2);
David Neto87846742018-04-11 17:36:22 -04003538 SPIRVInstList.insert(InsertPoint,
3539 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003540 }
3541}
3542
David Netob6e2e062018-04-25 10:32:06 -04003543void SPIRVProducerPass::GenerateEntryPointInitialStores() {
3544 // Work around a driver bug. Initializers on Private variables might not
3545 // work. So the start of the kernel should store the initializer value to the
3546 // variables. Yes, *every* entry point pays this cost if *any* entry point
3547 // uses this builtin. At this point I judge this to be an acceptable tradeoff
3548 // of complexity vs. runtime, for a broken driver.
alan-bakerb6b09dc2018-11-08 16:59:28 -05003549 // TODO(dneto): Remove this at some point once fixed drivers are widely
3550 // available.
David Netob6e2e062018-04-25 10:32:06 -04003551 if (WorkgroupSizeVarID) {
3552 assert(WorkgroupSizeValueID);
3553
3554 SPIRVOperandList Ops;
3555 Ops << MkId(WorkgroupSizeVarID) << MkId(WorkgroupSizeValueID);
3556
3557 auto *Inst = new SPIRVInstruction(spv::OpStore, Ops);
3558 getSPIRVInstList().push_back(Inst);
3559 }
3560}
3561
David Neto22f144c2017-06-12 14:26:21 -04003562void SPIRVProducerPass::GenerateFuncBody(Function &F) {
3563 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3564 ValueMapType &VMap = getValueMap();
3565
David Netob6e2e062018-04-25 10:32:06 -04003566 const bool IsKernel = F.getCallingConv() == CallingConv::SPIR_KERNEL;
David Neto22f144c2017-06-12 14:26:21 -04003567
3568 for (BasicBlock &BB : F) {
3569 // Register BasicBlock to ValueMap.
3570 VMap[&BB] = nextID;
3571
3572 //
3573 // Generate OpLabel for Basic Block.
3574 //
3575 SPIRVOperandList Ops;
David Neto87846742018-04-11 17:36:22 -04003576 auto *Inst = new SPIRVInstruction(spv::OpLabel, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003577 SPIRVInstList.push_back(Inst);
3578
David Neto6dcd4712017-06-23 11:06:47 -04003579 // OpVariable instructions must come first.
3580 for (Instruction &I : BB) {
alan-baker5b86ed72019-02-15 08:26:50 -05003581 if (auto *alloca = dyn_cast<AllocaInst>(&I)) {
3582 // Allocating a pointer requires variable pointers.
3583 if (alloca->getAllocatedType()->isPointerTy()) {
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003584 setVariablePointersCapabilities(
3585 alloca->getAllocatedType()->getPointerAddressSpace());
alan-baker5b86ed72019-02-15 08:26:50 -05003586 }
David Neto6dcd4712017-06-23 11:06:47 -04003587 GenerateInstruction(I);
3588 }
3589 }
3590
David Neto22f144c2017-06-12 14:26:21 -04003591 if (&BB == &F.getEntryBlock() && IsKernel) {
David Netob6e2e062018-04-25 10:32:06 -04003592 if (clspv::Option::HackInitializers()) {
3593 GenerateEntryPointInitialStores();
3594 }
David Neto22f144c2017-06-12 14:26:21 -04003595 }
3596
3597 for (Instruction &I : BB) {
David Neto6dcd4712017-06-23 11:06:47 -04003598 if (!isa<AllocaInst>(I)) {
3599 GenerateInstruction(I);
3600 }
David Neto22f144c2017-06-12 14:26:21 -04003601 }
3602 }
3603}
3604
3605spv::Op SPIRVProducerPass::GetSPIRVCmpOpcode(CmpInst *I) {
3606 const std::map<CmpInst::Predicate, spv::Op> Map = {
3607 {CmpInst::ICMP_EQ, spv::OpIEqual},
3608 {CmpInst::ICMP_NE, spv::OpINotEqual},
3609 {CmpInst::ICMP_UGT, spv::OpUGreaterThan},
3610 {CmpInst::ICMP_UGE, spv::OpUGreaterThanEqual},
3611 {CmpInst::ICMP_ULT, spv::OpULessThan},
3612 {CmpInst::ICMP_ULE, spv::OpULessThanEqual},
3613 {CmpInst::ICMP_SGT, spv::OpSGreaterThan},
3614 {CmpInst::ICMP_SGE, spv::OpSGreaterThanEqual},
3615 {CmpInst::ICMP_SLT, spv::OpSLessThan},
3616 {CmpInst::ICMP_SLE, spv::OpSLessThanEqual},
3617 {CmpInst::FCMP_OEQ, spv::OpFOrdEqual},
3618 {CmpInst::FCMP_OGT, spv::OpFOrdGreaterThan},
3619 {CmpInst::FCMP_OGE, spv::OpFOrdGreaterThanEqual},
3620 {CmpInst::FCMP_OLT, spv::OpFOrdLessThan},
3621 {CmpInst::FCMP_OLE, spv::OpFOrdLessThanEqual},
3622 {CmpInst::FCMP_ONE, spv::OpFOrdNotEqual},
3623 {CmpInst::FCMP_UEQ, spv::OpFUnordEqual},
3624 {CmpInst::FCMP_UGT, spv::OpFUnordGreaterThan},
3625 {CmpInst::FCMP_UGE, spv::OpFUnordGreaterThanEqual},
3626 {CmpInst::FCMP_ULT, spv::OpFUnordLessThan},
3627 {CmpInst::FCMP_ULE, spv::OpFUnordLessThanEqual},
3628 {CmpInst::FCMP_UNE, spv::OpFUnordNotEqual}};
3629
3630 assert(0 != Map.count(I->getPredicate()));
3631
3632 return Map.at(I->getPredicate());
3633}
3634
3635spv::Op SPIRVProducerPass::GetSPIRVCastOpcode(Instruction &I) {
3636 const std::map<unsigned, spv::Op> Map{
3637 {Instruction::Trunc, spv::OpUConvert},
3638 {Instruction::ZExt, spv::OpUConvert},
3639 {Instruction::SExt, spv::OpSConvert},
3640 {Instruction::FPToUI, spv::OpConvertFToU},
3641 {Instruction::FPToSI, spv::OpConvertFToS},
3642 {Instruction::UIToFP, spv::OpConvertUToF},
3643 {Instruction::SIToFP, spv::OpConvertSToF},
3644 {Instruction::FPTrunc, spv::OpFConvert},
3645 {Instruction::FPExt, spv::OpFConvert},
3646 {Instruction::BitCast, spv::OpBitcast}};
3647
3648 assert(0 != Map.count(I.getOpcode()));
3649
3650 return Map.at(I.getOpcode());
3651}
3652
3653spv::Op SPIRVProducerPass::GetSPIRVBinaryOpcode(Instruction &I) {
Kévin Petit24272b62018-10-18 19:16:12 +00003654 if (I.getType()->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -04003655 switch (I.getOpcode()) {
3656 default:
3657 break;
3658 case Instruction::Or:
3659 return spv::OpLogicalOr;
3660 case Instruction::And:
3661 return spv::OpLogicalAnd;
3662 case Instruction::Xor:
3663 return spv::OpLogicalNotEqual;
3664 }
3665 }
3666
alan-bakerb6b09dc2018-11-08 16:59:28 -05003667 const std::map<unsigned, spv::Op> Map{
David Neto22f144c2017-06-12 14:26:21 -04003668 {Instruction::Add, spv::OpIAdd},
3669 {Instruction::FAdd, spv::OpFAdd},
3670 {Instruction::Sub, spv::OpISub},
3671 {Instruction::FSub, spv::OpFSub},
3672 {Instruction::Mul, spv::OpIMul},
3673 {Instruction::FMul, spv::OpFMul},
3674 {Instruction::UDiv, spv::OpUDiv},
3675 {Instruction::SDiv, spv::OpSDiv},
3676 {Instruction::FDiv, spv::OpFDiv},
3677 {Instruction::URem, spv::OpUMod},
3678 {Instruction::SRem, spv::OpSRem},
3679 {Instruction::FRem, spv::OpFRem},
3680 {Instruction::Or, spv::OpBitwiseOr},
3681 {Instruction::Xor, spv::OpBitwiseXor},
3682 {Instruction::And, spv::OpBitwiseAnd},
3683 {Instruction::Shl, spv::OpShiftLeftLogical},
3684 {Instruction::LShr, spv::OpShiftRightLogical},
3685 {Instruction::AShr, spv::OpShiftRightArithmetic}};
3686
3687 assert(0 != Map.count(I.getOpcode()));
3688
3689 return Map.at(I.getOpcode());
3690}
3691
3692void SPIRVProducerPass::GenerateInstruction(Instruction &I) {
3693 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3694 ValueMapType &VMap = getValueMap();
David Neto22f144c2017-06-12 14:26:21 -04003695 DeferredInstVecType &DeferredInsts = getDeferredInstVec();
3696 LLVMContext &Context = I.getParent()->getParent()->getParent()->getContext();
3697
3698 // Register Instruction to ValueMap.
3699 if (0 == VMap[&I]) {
3700 VMap[&I] = nextID;
3701 }
3702
3703 switch (I.getOpcode()) {
3704 default: {
3705 if (Instruction::isCast(I.getOpcode())) {
3706 //
3707 // Generate SPIRV instructions for cast operators.
3708 //
3709
David Netod2de94a2017-08-28 17:27:47 -04003710 auto Ty = I.getType();
David Neto22f144c2017-06-12 14:26:21 -04003711 auto OpTy = I.getOperand(0)->getType();
David Netod2de94a2017-08-28 17:27:47 -04003712 auto toI8 = Ty == Type::getInt8Ty(Context);
3713 auto fromI32 = OpTy == Type::getInt32Ty(Context);
David Neto22f144c2017-06-12 14:26:21 -04003714 // Handle zext, sext and uitofp with i1 type specially.
3715 if ((I.getOpcode() == Instruction::ZExt ||
3716 I.getOpcode() == Instruction::SExt ||
3717 I.getOpcode() == Instruction::UIToFP) &&
alan-bakerb6b09dc2018-11-08 16:59:28 -05003718 OpTy->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -04003719 //
3720 // Generate OpSelect.
3721 //
3722
3723 // Ops[0] = Result Type ID
3724 // Ops[1] = Condition ID
3725 // Ops[2] = True Constant ID
3726 // Ops[3] = False Constant ID
3727 SPIRVOperandList Ops;
3728
David Neto257c3892018-04-11 13:19:45 -04003729 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04003730
David Neto22f144c2017-06-12 14:26:21 -04003731 uint32_t CondID = VMap[I.getOperand(0)];
David Neto257c3892018-04-11 13:19:45 -04003732 Ops << MkId(CondID);
David Neto22f144c2017-06-12 14:26:21 -04003733
3734 uint32_t TrueID = 0;
3735 if (I.getOpcode() == Instruction::ZExt) {
Kévin Petit7bfb8992019-02-26 13:45:08 +00003736 TrueID = VMap[ConstantInt::get(I.getType(), 1)];
David Neto22f144c2017-06-12 14:26:21 -04003737 } else if (I.getOpcode() == Instruction::SExt) {
Kévin Petit7bfb8992019-02-26 13:45:08 +00003738 TrueID = VMap[ConstantInt::getSigned(I.getType(), -1)];
David Neto22f144c2017-06-12 14:26:21 -04003739 } else {
3740 TrueID = VMap[ConstantFP::get(Context, APFloat(1.0f))];
3741 }
David Neto257c3892018-04-11 13:19:45 -04003742 Ops << MkId(TrueID);
David Neto22f144c2017-06-12 14:26:21 -04003743
3744 uint32_t FalseID = 0;
3745 if (I.getOpcode() == Instruction::ZExt) {
3746 FalseID = VMap[Constant::getNullValue(I.getType())];
3747 } else if (I.getOpcode() == Instruction::SExt) {
3748 FalseID = VMap[Constant::getNullValue(I.getType())];
3749 } else {
3750 FalseID = VMap[ConstantFP::get(Context, APFloat(0.0f))];
3751 }
David Neto257c3892018-04-11 13:19:45 -04003752 Ops << MkId(FalseID);
David Neto22f144c2017-06-12 14:26:21 -04003753
David Neto87846742018-04-11 17:36:22 -04003754 auto *Inst = new SPIRVInstruction(spv::OpSelect, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003755 SPIRVInstList.push_back(Inst);
alan-bakerb39c8262019-03-08 14:03:37 -05003756 } else if (!clspv::Option::Int8Support() &&
3757 I.getOpcode() == Instruction::Trunc && fromI32 && toI8) {
David Netod2de94a2017-08-28 17:27:47 -04003758 // The SPIR-V target type is a 32-bit int. Keep only the bottom
3759 // 8 bits.
3760 // Before:
3761 // %result = trunc i32 %a to i8
3762 // After
3763 // %result = OpBitwiseAnd %uint %a %uint_255
3764
3765 SPIRVOperandList Ops;
3766
David Neto257c3892018-04-11 13:19:45 -04003767 Ops << MkId(lookupType(OpTy)) << MkId(VMap[I.getOperand(0)]);
David Netod2de94a2017-08-28 17:27:47 -04003768
3769 Type *UintTy = Type::getInt32Ty(Context);
3770 uint32_t MaskID = VMap[ConstantInt::get(UintTy, 255)];
David Neto257c3892018-04-11 13:19:45 -04003771 Ops << MkId(MaskID);
David Netod2de94a2017-08-28 17:27:47 -04003772
David Neto87846742018-04-11 17:36:22 -04003773 auto *Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Netod2de94a2017-08-28 17:27:47 -04003774 SPIRVInstList.push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04003775 } else {
3776 // Ops[0] = Result Type ID
3777 // Ops[1] = Source Value ID
3778 SPIRVOperandList Ops;
3779
David Neto257c3892018-04-11 13:19:45 -04003780 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04003781
David Neto87846742018-04-11 17:36:22 -04003782 auto *Inst = new SPIRVInstruction(GetSPIRVCastOpcode(I), nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003783 SPIRVInstList.push_back(Inst);
3784 }
3785 } else if (isa<BinaryOperator>(I)) {
3786 //
3787 // Generate SPIRV instructions for binary operators.
3788 //
3789
3790 // Handle xor with i1 type specially.
3791 if (I.getOpcode() == Instruction::Xor &&
3792 I.getType() == Type::getInt1Ty(Context) &&
Kévin Petit24272b62018-10-18 19:16:12 +00003793 ((isa<ConstantInt>(I.getOperand(0)) &&
3794 !cast<ConstantInt>(I.getOperand(0))->isZero()) ||
3795 (isa<ConstantInt>(I.getOperand(1)) &&
3796 !cast<ConstantInt>(I.getOperand(1))->isZero()))) {
David Neto22f144c2017-06-12 14:26:21 -04003797 //
3798 // Generate OpLogicalNot.
3799 //
3800 // Ops[0] = Result Type ID
3801 // Ops[1] = Operand
3802 SPIRVOperandList Ops;
3803
David Neto257c3892018-04-11 13:19:45 -04003804 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04003805
3806 Value *CondV = I.getOperand(0);
3807 if (isa<Constant>(I.getOperand(0))) {
3808 CondV = I.getOperand(1);
3809 }
David Neto257c3892018-04-11 13:19:45 -04003810 Ops << MkId(VMap[CondV]);
David Neto22f144c2017-06-12 14:26:21 -04003811
David Neto87846742018-04-11 17:36:22 -04003812 auto *Inst = new SPIRVInstruction(spv::OpLogicalNot, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003813 SPIRVInstList.push_back(Inst);
3814 } else {
3815 // Ops[0] = Result Type ID
3816 // Ops[1] = Operand 0
3817 // Ops[2] = Operand 1
3818 SPIRVOperandList Ops;
3819
David Neto257c3892018-04-11 13:19:45 -04003820 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)])
3821 << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04003822
David Neto87846742018-04-11 17:36:22 -04003823 auto *Inst =
3824 new SPIRVInstruction(GetSPIRVBinaryOpcode(I), nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003825 SPIRVInstList.push_back(Inst);
3826 }
alan-bakerc9c55ae2019-12-02 16:01:27 -05003827 } else if (I.getOpcode() == Instruction::FNeg) {
3828 // The only unary operator.
3829 //
3830 // Ops[0] = Result Type ID
3831 // Ops[1] = Operand 0
3832 SPIRVOperandList ops;
3833
3834 ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)]);
3835 auto *Inst = new SPIRVInstruction(spv::OpFNegate, nextID++, ops);
3836 SPIRVInstList.push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04003837 } else {
3838 I.print(errs());
3839 llvm_unreachable("Unsupported instruction???");
3840 }
3841 break;
3842 }
3843 case Instruction::GetElementPtr: {
3844 auto &GlobalConstArgSet = getGlobalConstArgSet();
3845
3846 //
3847 // Generate OpAccessChain.
3848 //
3849 GetElementPtrInst *GEP = cast<GetElementPtrInst>(&I);
3850
3851 //
3852 // Generate OpAccessChain.
3853 //
3854
3855 // Ops[0] = Result Type ID
3856 // Ops[1] = Base ID
3857 // Ops[2] ... Ops[n] = Indexes ID
3858 SPIRVOperandList Ops;
3859
alan-bakerb6b09dc2018-11-08 16:59:28 -05003860 PointerType *ResultType = cast<PointerType>(GEP->getType());
David Neto22f144c2017-06-12 14:26:21 -04003861 if (GEP->getPointerAddressSpace() == AddressSpace::ModuleScopePrivate ||
3862 GlobalConstArgSet.count(GEP->getPointerOperand())) {
3863 // Use pointer type with private address space for global constant.
3864 Type *EleTy = I.getType()->getPointerElementType();
David Neto1a1a0582017-07-07 12:01:44 -04003865 ResultType = PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
David Neto22f144c2017-06-12 14:26:21 -04003866 }
David Neto257c3892018-04-11 13:19:45 -04003867
3868 Ops << MkId(lookupType(ResultType));
David Neto22f144c2017-06-12 14:26:21 -04003869
David Neto862b7d82018-06-14 18:48:37 -04003870 // Generate the base pointer.
3871 Ops << MkId(VMap[GEP->getPointerOperand()]);
David Neto22f144c2017-06-12 14:26:21 -04003872
David Neto862b7d82018-06-14 18:48:37 -04003873 // TODO(dneto): Simplify the following?
David Neto22f144c2017-06-12 14:26:21 -04003874
3875 //
3876 // Follows below rules for gep.
3877 //
David Neto862b7d82018-06-14 18:48:37 -04003878 // 1. If gep's first index is 0 generate OpAccessChain and ignore gep's
3879 // first index.
David Neto22f144c2017-06-12 14:26:21 -04003880 // 2. If gep's first index is not 0, generate OpPtrAccessChain and use gep's
3881 // first index.
3882 // 3. If gep's first index is not constant, generate OpPtrAccessChain and
3883 // use gep's first index.
3884 // 4. If it is not above case 1, 2 and 3, generate OpAccessChain and use
3885 // gep's first index.
3886 //
3887 spv::Op Opcode = spv::OpAccessChain;
3888 unsigned offset = 0;
3889 if (ConstantInt *CstInt = dyn_cast<ConstantInt>(GEP->getOperand(1))) {
David Neto862b7d82018-06-14 18:48:37 -04003890 if (CstInt->getZExtValue() == 0) {
David Neto22f144c2017-06-12 14:26:21 -04003891 offset = 1;
David Neto862b7d82018-06-14 18:48:37 -04003892 } else if (CstInt->getZExtValue() != 0) {
David Neto22f144c2017-06-12 14:26:21 -04003893 Opcode = spv::OpPtrAccessChain;
David Neto22f144c2017-06-12 14:26:21 -04003894 }
David Neto862b7d82018-06-14 18:48:37 -04003895 } else {
David Neto22f144c2017-06-12 14:26:21 -04003896 Opcode = spv::OpPtrAccessChain;
David Neto1a1a0582017-07-07 12:01:44 -04003897 }
3898
3899 if (Opcode == spv::OpPtrAccessChain) {
David Neto1a1a0582017-07-07 12:01:44 -04003900 // Do we need to generate ArrayStride? Check against the GEP result type
3901 // rather than the pointer type of the base because when indexing into
3902 // an OpenCL program-scope constant, we'll swap out the LLVM base pointer
3903 // for something else in the SPIR-V.
3904 // E.g. see test/PointerAccessChain/pointer_index_is_constant_1.cl
alan-baker5b86ed72019-02-15 08:26:50 -05003905 auto address_space = ResultType->getAddressSpace();
3906 setVariablePointersCapabilities(address_space);
3907 switch (GetStorageClass(address_space)) {
Alan Bakerfcda9482018-10-02 17:09:59 -04003908 case spv::StorageClassStorageBuffer:
3909 case spv::StorageClassUniform:
David Neto1a1a0582017-07-07 12:01:44 -04003910 // Save the need to generate an ArrayStride decoration. But defer
3911 // generation until later, so we only make one decoration.
David Neto85082642018-03-24 06:55:20 -07003912 getTypesNeedingArrayStride().insert(ResultType);
Alan Bakerfcda9482018-10-02 17:09:59 -04003913 break;
3914 default:
3915 break;
David Neto1a1a0582017-07-07 12:01:44 -04003916 }
David Neto22f144c2017-06-12 14:26:21 -04003917 }
3918
3919 for (auto II = GEP->idx_begin() + offset; II != GEP->idx_end(); II++) {
David Neto257c3892018-04-11 13:19:45 -04003920 Ops << MkId(VMap[*II]);
David Neto22f144c2017-06-12 14:26:21 -04003921 }
3922
David Neto87846742018-04-11 17:36:22 -04003923 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003924 SPIRVInstList.push_back(Inst);
3925 break;
3926 }
3927 case Instruction::ExtractValue: {
3928 ExtractValueInst *EVI = cast<ExtractValueInst>(&I);
3929 // Ops[0] = Result Type ID
3930 // Ops[1] = Composite ID
3931 // Ops[2] ... Ops[n] = Indexes (Literal Number)
3932 SPIRVOperandList Ops;
3933
David Neto257c3892018-04-11 13:19:45 -04003934 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04003935
3936 uint32_t CompositeID = VMap[EVI->getAggregateOperand()];
David Neto257c3892018-04-11 13:19:45 -04003937 Ops << MkId(CompositeID);
David Neto22f144c2017-06-12 14:26:21 -04003938
3939 for (auto &Index : EVI->indices()) {
David Neto257c3892018-04-11 13:19:45 -04003940 Ops << MkNum(Index);
David Neto22f144c2017-06-12 14:26:21 -04003941 }
3942
David Neto87846742018-04-11 17:36:22 -04003943 auto *Inst = new SPIRVInstruction(spv::OpCompositeExtract, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003944 SPIRVInstList.push_back(Inst);
3945 break;
3946 }
3947 case Instruction::InsertValue: {
3948 InsertValueInst *IVI = cast<InsertValueInst>(&I);
3949 // Ops[0] = Result Type ID
3950 // Ops[1] = Object ID
3951 // Ops[2] = Composite ID
3952 // Ops[3] ... Ops[n] = Indexes (Literal Number)
3953 SPIRVOperandList Ops;
3954
3955 uint32_t ResTyID = lookupType(I.getType());
David Neto257c3892018-04-11 13:19:45 -04003956 Ops << MkId(ResTyID);
David Neto22f144c2017-06-12 14:26:21 -04003957
3958 uint32_t ObjectID = VMap[IVI->getInsertedValueOperand()];
David Neto257c3892018-04-11 13:19:45 -04003959 Ops << MkId(ObjectID);
David Neto22f144c2017-06-12 14:26:21 -04003960
3961 uint32_t CompositeID = VMap[IVI->getAggregateOperand()];
David Neto257c3892018-04-11 13:19:45 -04003962 Ops << MkId(CompositeID);
David Neto22f144c2017-06-12 14:26:21 -04003963
3964 for (auto &Index : IVI->indices()) {
David Neto257c3892018-04-11 13:19:45 -04003965 Ops << MkNum(Index);
David Neto22f144c2017-06-12 14:26:21 -04003966 }
3967
David Neto87846742018-04-11 17:36:22 -04003968 auto *Inst = new SPIRVInstruction(spv::OpCompositeInsert, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003969 SPIRVInstList.push_back(Inst);
3970 break;
3971 }
3972 case Instruction::Select: {
3973 //
3974 // Generate OpSelect.
3975 //
3976
3977 // Ops[0] = Result Type ID
3978 // Ops[1] = Condition ID
3979 // Ops[2] = True Constant ID
3980 // Ops[3] = False Constant ID
3981 SPIRVOperandList Ops;
3982
3983 // Find SPIRV instruction for parameter type.
3984 auto Ty = I.getType();
3985 if (Ty->isPointerTy()) {
3986 auto PointeeTy = Ty->getPointerElementType();
3987 if (PointeeTy->isStructTy() &&
3988 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
3989 Ty = PointeeTy;
alan-baker5b86ed72019-02-15 08:26:50 -05003990 } else {
3991 // Selecting between pointers requires variable pointers.
3992 setVariablePointersCapabilities(Ty->getPointerAddressSpace());
3993 if (!hasVariablePointers() && !selectFromSameObject(&I)) {
3994 setVariablePointers(true);
3995 }
David Neto22f144c2017-06-12 14:26:21 -04003996 }
3997 }
3998
David Neto257c3892018-04-11 13:19:45 -04003999 Ops << MkId(lookupType(Ty)) << MkId(VMap[I.getOperand(0)])
4000 << MkId(VMap[I.getOperand(1)]) << MkId(VMap[I.getOperand(2)]);
David Neto22f144c2017-06-12 14:26:21 -04004001
David Neto87846742018-04-11 17:36:22 -04004002 auto *Inst = new SPIRVInstruction(spv::OpSelect, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004003 SPIRVInstList.push_back(Inst);
4004 break;
4005 }
4006 case Instruction::ExtractElement: {
4007 // Handle <4 x i8> type manually.
4008 Type *CompositeTy = I.getOperand(0)->getType();
4009 if (is4xi8vec(CompositeTy)) {
4010 //
4011 // Generate OpShiftRightLogical and OpBitwiseAnd for extractelement with
4012 // <4 x i8>.
4013 //
4014
4015 //
4016 // Generate OpShiftRightLogical
4017 //
4018 // Ops[0] = Result Type ID
4019 // Ops[1] = Operand 0
4020 // Ops[2] = Operand 1
4021 //
4022 SPIRVOperandList Ops;
4023
David Neto257c3892018-04-11 13:19:45 -04004024 Ops << MkId(lookupType(CompositeTy));
David Neto22f144c2017-06-12 14:26:21 -04004025
4026 uint32_t Op0ID = VMap[I.getOperand(0)];
David Neto257c3892018-04-11 13:19:45 -04004027 Ops << MkId(Op0ID);
David Neto22f144c2017-06-12 14:26:21 -04004028
4029 uint32_t Op1ID = 0;
4030 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
4031 // Handle constant index.
4032 uint64_t Idx = CI->getZExtValue();
4033 Value *ShiftAmount =
4034 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
4035 Op1ID = VMap[ShiftAmount];
4036 } else {
4037 // Handle variable index.
4038 SPIRVOperandList TmpOps;
4039
David Neto257c3892018-04-11 13:19:45 -04004040 TmpOps << MkId(lookupType(Type::getInt32Ty(Context)))
4041 << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004042
4043 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
David Neto257c3892018-04-11 13:19:45 -04004044 TmpOps << MkId(VMap[Cst8]);
David Neto22f144c2017-06-12 14:26:21 -04004045
4046 Op1ID = nextID;
4047
David Neto87846742018-04-11 17:36:22 -04004048 auto *TmpInst = new SPIRVInstruction(spv::OpIMul, nextID++, TmpOps);
David Neto22f144c2017-06-12 14:26:21 -04004049 SPIRVInstList.push_back(TmpInst);
4050 }
David Neto257c3892018-04-11 13:19:45 -04004051 Ops << MkId(Op1ID);
David Neto22f144c2017-06-12 14:26:21 -04004052
4053 uint32_t ShiftID = nextID;
4054
David Neto87846742018-04-11 17:36:22 -04004055 auto *Inst =
4056 new SPIRVInstruction(spv::OpShiftRightLogical, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004057 SPIRVInstList.push_back(Inst);
4058
4059 //
4060 // Generate OpBitwiseAnd
4061 //
4062 // Ops[0] = Result Type ID
4063 // Ops[1] = Operand 0
4064 // Ops[2] = Operand 1
4065 //
4066 Ops.clear();
4067
David Neto257c3892018-04-11 13:19:45 -04004068 Ops << MkId(lookupType(CompositeTy)) << MkId(ShiftID);
David Neto22f144c2017-06-12 14:26:21 -04004069
4070 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
David Neto257c3892018-04-11 13:19:45 -04004071 Ops << MkId(VMap[CstFF]);
David Neto22f144c2017-06-12 14:26:21 -04004072
David Neto9b2d6252017-09-06 15:47:37 -04004073 // Reset mapping for this value to the result of the bitwise and.
4074 VMap[&I] = nextID;
4075
David Neto87846742018-04-11 17:36:22 -04004076 Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004077 SPIRVInstList.push_back(Inst);
4078 break;
4079 }
4080
4081 // Ops[0] = Result Type ID
4082 // Ops[1] = Composite ID
4083 // Ops[2] ... Ops[n] = Indexes (Literal Number)
4084 SPIRVOperandList Ops;
4085
David Neto257c3892018-04-11 13:19:45 -04004086 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04004087
4088 spv::Op Opcode = spv::OpCompositeExtract;
4089 if (const ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
David Neto257c3892018-04-11 13:19:45 -04004090 Ops << MkNum(static_cast<uint32_t>(CI->getZExtValue()));
David Neto22f144c2017-06-12 14:26:21 -04004091 } else {
David Neto257c3892018-04-11 13:19:45 -04004092 Ops << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004093 Opcode = spv::OpVectorExtractDynamic;
4094 }
4095
David Neto87846742018-04-11 17:36:22 -04004096 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004097 SPIRVInstList.push_back(Inst);
4098 break;
4099 }
4100 case Instruction::InsertElement: {
4101 // Handle <4 x i8> type manually.
4102 Type *CompositeTy = I.getOperand(0)->getType();
4103 if (is4xi8vec(CompositeTy)) {
4104 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
4105 uint32_t CstFFID = VMap[CstFF];
4106
4107 uint32_t ShiftAmountID = 0;
4108 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
4109 // Handle constant index.
4110 uint64_t Idx = CI->getZExtValue();
4111 Value *ShiftAmount =
4112 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
4113 ShiftAmountID = VMap[ShiftAmount];
4114 } else {
4115 // Handle variable index.
4116 SPIRVOperandList TmpOps;
4117
David Neto257c3892018-04-11 13:19:45 -04004118 TmpOps << MkId(lookupType(Type::getInt32Ty(Context)))
4119 << MkId(VMap[I.getOperand(2)]);
David Neto22f144c2017-06-12 14:26:21 -04004120
4121 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
David Neto257c3892018-04-11 13:19:45 -04004122 TmpOps << MkId(VMap[Cst8]);
David Neto22f144c2017-06-12 14:26:21 -04004123
4124 ShiftAmountID = nextID;
4125
David Neto87846742018-04-11 17:36:22 -04004126 auto *TmpInst = new SPIRVInstruction(spv::OpIMul, nextID++, TmpOps);
David Neto22f144c2017-06-12 14:26:21 -04004127 SPIRVInstList.push_back(TmpInst);
4128 }
4129
4130 //
4131 // Generate mask operations.
4132 //
4133
4134 // ShiftLeft mask according to index of insertelement.
4135 SPIRVOperandList Ops;
4136
David Neto257c3892018-04-11 13:19:45 -04004137 const uint32_t ResTyID = lookupType(CompositeTy);
4138 Ops << MkId(ResTyID) << MkId(CstFFID) << MkId(ShiftAmountID);
David Neto22f144c2017-06-12 14:26:21 -04004139
4140 uint32_t MaskID = nextID;
4141
David Neto87846742018-04-11 17:36:22 -04004142 auto *Inst = new SPIRVInstruction(spv::OpShiftLeftLogical, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004143 SPIRVInstList.push_back(Inst);
4144
4145 // Inverse mask.
4146 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004147 Ops << MkId(ResTyID) << MkId(MaskID);
David Neto22f144c2017-06-12 14:26:21 -04004148
4149 uint32_t InvMaskID = nextID;
4150
David Neto87846742018-04-11 17:36:22 -04004151 Inst = new SPIRVInstruction(spv::OpNot, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004152 SPIRVInstList.push_back(Inst);
4153
4154 // Apply mask.
4155 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004156 Ops << MkId(ResTyID) << MkId(VMap[I.getOperand(0)]) << MkId(InvMaskID);
David Neto22f144c2017-06-12 14:26:21 -04004157
4158 uint32_t OrgValID = nextID;
4159
David Neto87846742018-04-11 17:36:22 -04004160 Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004161 SPIRVInstList.push_back(Inst);
4162
4163 // Create correct value according to index of insertelement.
4164 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05004165 Ops << MkId(ResTyID) << MkId(VMap[I.getOperand(1)])
4166 << MkId(ShiftAmountID);
David Neto22f144c2017-06-12 14:26:21 -04004167
4168 uint32_t InsertValID = nextID;
4169
David Neto87846742018-04-11 17:36:22 -04004170 Inst = new SPIRVInstruction(spv::OpShiftLeftLogical, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004171 SPIRVInstList.push_back(Inst);
4172
4173 // Insert value to original value.
4174 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004175 Ops << MkId(ResTyID) << MkId(OrgValID) << MkId(InsertValID);
David Neto22f144c2017-06-12 14:26:21 -04004176
David Netoa394f392017-08-26 20:45:29 -04004177 VMap[&I] = nextID;
4178
David Neto87846742018-04-11 17:36:22 -04004179 Inst = new SPIRVInstruction(spv::OpBitwiseOr, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004180 SPIRVInstList.push_back(Inst);
4181
4182 break;
4183 }
4184
David Neto22f144c2017-06-12 14:26:21 -04004185 SPIRVOperandList Ops;
4186
James Priced26efea2018-06-09 23:28:32 +01004187 // Ops[0] = Result Type ID
4188 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04004189
4190 spv::Op Opcode = spv::OpCompositeInsert;
4191 if (const ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
David Neto257c3892018-04-11 13:19:45 -04004192 const auto value = CI->getZExtValue();
4193 assert(value <= UINT32_MAX);
James Priced26efea2018-06-09 23:28:32 +01004194 // Ops[1] = Object ID
4195 // Ops[2] = Composite ID
4196 // Ops[3] ... Ops[n] = Indexes (Literal Number)
alan-bakerb6b09dc2018-11-08 16:59:28 -05004197 Ops << MkId(VMap[I.getOperand(1)]) << MkId(VMap[I.getOperand(0)])
James Priced26efea2018-06-09 23:28:32 +01004198 << MkNum(static_cast<uint32_t>(value));
David Neto22f144c2017-06-12 14:26:21 -04004199 } else {
James Priced26efea2018-06-09 23:28:32 +01004200 // Ops[1] = Composite ID
4201 // Ops[2] = Object ID
4202 // Ops[3] ... Ops[n] = Indexes (Literal Number)
alan-bakerb6b09dc2018-11-08 16:59:28 -05004203 Ops << MkId(VMap[I.getOperand(0)]) << MkId(VMap[I.getOperand(1)])
James Priced26efea2018-06-09 23:28:32 +01004204 << MkId(VMap[I.getOperand(2)]);
David Neto22f144c2017-06-12 14:26:21 -04004205 Opcode = spv::OpVectorInsertDynamic;
4206 }
4207
David Neto87846742018-04-11 17:36:22 -04004208 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004209 SPIRVInstList.push_back(Inst);
4210 break;
4211 }
4212 case Instruction::ShuffleVector: {
4213 // Ops[0] = Result Type ID
4214 // Ops[1] = Vector 1 ID
4215 // Ops[2] = Vector 2 ID
4216 // Ops[3] ... Ops[n] = Components (Literal Number)
4217 SPIRVOperandList Ops;
4218
David Neto257c3892018-04-11 13:19:45 -04004219 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)])
4220 << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004221
4222 uint64_t NumElements = 0;
4223 if (Constant *Cst = dyn_cast<Constant>(I.getOperand(2))) {
4224 NumElements = cast<VectorType>(Cst->getType())->getNumElements();
4225
4226 if (Cst->isNullValue()) {
4227 for (unsigned i = 0; i < NumElements; i++) {
David Neto257c3892018-04-11 13:19:45 -04004228 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04004229 }
4230 } else if (const ConstantDataSequential *CDS =
4231 dyn_cast<ConstantDataSequential>(Cst)) {
4232 for (unsigned i = 0; i < CDS->getNumElements(); i++) {
4233 std::vector<uint32_t> LiteralNum;
David Neto257c3892018-04-11 13:19:45 -04004234 const auto value = CDS->getElementAsInteger(i);
4235 assert(value <= UINT32_MAX);
4236 Ops << MkNum(static_cast<uint32_t>(value));
David Neto22f144c2017-06-12 14:26:21 -04004237 }
4238 } else if (const ConstantVector *CV = dyn_cast<ConstantVector>(Cst)) {
4239 for (unsigned i = 0; i < CV->getNumOperands(); i++) {
4240 auto Op = CV->getOperand(i);
4241
4242 uint32_t literal = 0;
4243
4244 if (auto CI = dyn_cast<ConstantInt>(Op)) {
4245 literal = static_cast<uint32_t>(CI->getZExtValue());
4246 } else if (auto UI = dyn_cast<UndefValue>(Op)) {
4247 literal = 0xFFFFFFFFu;
4248 } else {
4249 Op->print(errs());
4250 llvm_unreachable("Unsupported element in ConstantVector!");
4251 }
4252
David Neto257c3892018-04-11 13:19:45 -04004253 Ops << MkNum(literal);
David Neto22f144c2017-06-12 14:26:21 -04004254 }
4255 } else {
4256 Cst->print(errs());
4257 llvm_unreachable("Unsupported constant mask in ShuffleVector!");
4258 }
4259 }
4260
David Neto87846742018-04-11 17:36:22 -04004261 auto *Inst = new SPIRVInstruction(spv::OpVectorShuffle, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004262 SPIRVInstList.push_back(Inst);
4263 break;
4264 }
4265 case Instruction::ICmp:
4266 case Instruction::FCmp: {
4267 CmpInst *CmpI = cast<CmpInst>(&I);
4268
David Netod4ca2e62017-07-06 18:47:35 -04004269 // Pointer equality is invalid.
alan-bakerb6b09dc2018-11-08 16:59:28 -05004270 Type *ArgTy = CmpI->getOperand(0)->getType();
David Netod4ca2e62017-07-06 18:47:35 -04004271 if (isa<PointerType>(ArgTy)) {
4272 CmpI->print(errs());
4273 std::string name = I.getParent()->getParent()->getName();
4274 errs()
4275 << "\nPointer equality test is not supported by SPIR-V for Vulkan, "
4276 << "in function " << name << "\n";
4277 llvm_unreachable("Pointer equality check is invalid");
4278 break;
4279 }
4280
David Neto257c3892018-04-11 13:19:45 -04004281 // Ops[0] = Result Type ID
4282 // Ops[1] = Operand 1 ID
4283 // Ops[2] = Operand 2 ID
4284 SPIRVOperandList Ops;
David Neto22f144c2017-06-12 14:26:21 -04004285
David Neto257c3892018-04-11 13:19:45 -04004286 Ops << MkId(lookupType(CmpI->getType())) << MkId(VMap[CmpI->getOperand(0)])
4287 << MkId(VMap[CmpI->getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004288
4289 spv::Op Opcode = GetSPIRVCmpOpcode(CmpI);
David Neto87846742018-04-11 17:36:22 -04004290 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004291 SPIRVInstList.push_back(Inst);
4292 break;
4293 }
4294 case Instruction::Br: {
4295 // Branch instrucion is deferred because it needs label's ID. Record slot's
4296 // location on SPIRVInstructionList.
4297 DeferredInsts.push_back(
4298 std::make_tuple(&I, --SPIRVInstList.end(), 0 /* No id */));
4299 break;
4300 }
4301 case Instruction::Switch: {
4302 I.print(errs());
4303 llvm_unreachable("Unsupported instruction???");
4304 break;
4305 }
4306 case Instruction::IndirectBr: {
4307 I.print(errs());
4308 llvm_unreachable("Unsupported instruction???");
4309 break;
4310 }
4311 case Instruction::PHI: {
4312 // Branch instrucion is deferred because it needs label's ID. Record slot's
4313 // location on SPIRVInstructionList.
4314 DeferredInsts.push_back(
4315 std::make_tuple(&I, --SPIRVInstList.end(), nextID++));
4316 break;
4317 }
4318 case Instruction::Alloca: {
4319 //
4320 // Generate OpVariable.
4321 //
4322 // Ops[0] : Result Type ID
4323 // Ops[1] : Storage Class
4324 SPIRVOperandList Ops;
4325
David Neto257c3892018-04-11 13:19:45 -04004326 Ops << MkId(lookupType(I.getType())) << MkNum(spv::StorageClassFunction);
David Neto22f144c2017-06-12 14:26:21 -04004327
David Neto87846742018-04-11 17:36:22 -04004328 auto *Inst = new SPIRVInstruction(spv::OpVariable, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004329 SPIRVInstList.push_back(Inst);
4330 break;
4331 }
4332 case Instruction::Load: {
4333 LoadInst *LD = cast<LoadInst>(&I);
4334 //
4335 // Generate OpLoad.
4336 //
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04004337
alan-baker5b86ed72019-02-15 08:26:50 -05004338 if (LD->getType()->isPointerTy()) {
4339 // Loading a pointer requires variable pointers.
4340 setVariablePointersCapabilities(LD->getType()->getPointerAddressSpace());
4341 }
David Neto22f144c2017-06-12 14:26:21 -04004342
David Neto0a2f98d2017-09-15 19:38:40 -04004343 uint32_t ResTyID = lookupType(LD->getType());
David Netoa60b00b2017-09-15 16:34:09 -04004344 uint32_t PointerID = VMap[LD->getPointerOperand()];
4345
4346 // This is a hack to work around what looks like a driver bug.
4347 // When we're loading from the special variable holding the WorkgroupSize
David Neto0a2f98d2017-09-15 19:38:40 -04004348 // builtin value, use an OpBitWiseAnd of the value's ID rather than
4349 // generating a load.
David Neto66cfe642018-03-24 06:13:56 -07004350 // TODO(dneto): Remove this awful hack once drivers are fixed.
David Netoa60b00b2017-09-15 16:34:09 -04004351 if (PointerID == WorkgroupSizeVarID) {
David Neto0a2f98d2017-09-15 19:38:40 -04004352 // Generate a bitwise-and of the original value with itself.
4353 // We should have been able to get away with just an OpCopyObject,
4354 // but we need something more complex to get past certain driver bugs.
4355 // This is ridiculous, but necessary.
4356 // TODO(dneto): Revisit this once drivers fix their bugs.
4357
4358 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04004359 Ops << MkId(ResTyID) << MkId(WorkgroupSizeValueID)
4360 << MkId(WorkgroupSizeValueID);
David Neto0a2f98d2017-09-15 19:38:40 -04004361
David Neto87846742018-04-11 17:36:22 -04004362 auto *Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Neto0a2f98d2017-09-15 19:38:40 -04004363 SPIRVInstList.push_back(Inst);
David Netoa60b00b2017-09-15 16:34:09 -04004364 break;
4365 }
4366
4367 // This is the normal path. Generate a load.
4368
David Neto22f144c2017-06-12 14:26:21 -04004369 // Ops[0] = Result Type ID
4370 // Ops[1] = Pointer ID
4371 // Ops[2] ... Ops[n] = Optional Memory Access
4372 //
4373 // TODO: Do we need to implement Optional Memory Access???
David Neto0a2f98d2017-09-15 19:38:40 -04004374
David Neto22f144c2017-06-12 14:26:21 -04004375 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04004376 Ops << MkId(ResTyID) << MkId(PointerID);
David Neto22f144c2017-06-12 14:26:21 -04004377
David Neto87846742018-04-11 17:36:22 -04004378 auto *Inst = new SPIRVInstruction(spv::OpLoad, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004379 SPIRVInstList.push_back(Inst);
4380 break;
4381 }
4382 case Instruction::Store: {
4383 StoreInst *ST = cast<StoreInst>(&I);
4384 //
4385 // Generate OpStore.
4386 //
4387
alan-baker5b86ed72019-02-15 08:26:50 -05004388 if (ST->getValueOperand()->getType()->isPointerTy()) {
4389 // Storing a pointer requires variable pointers.
4390 setVariablePointersCapabilities(
4391 ST->getValueOperand()->getType()->getPointerAddressSpace());
4392 }
4393
David Neto22f144c2017-06-12 14:26:21 -04004394 // Ops[0] = Pointer ID
4395 // Ops[1] = Object ID
4396 // Ops[2] ... Ops[n] = Optional Memory Access (later???)
4397 //
4398 // TODO: Do we need to implement Optional Memory Access???
David Neto257c3892018-04-11 13:19:45 -04004399 SPIRVOperandList Ops;
4400 Ops << MkId(VMap[ST->getPointerOperand()])
4401 << MkId(VMap[ST->getValueOperand()]);
David Neto22f144c2017-06-12 14:26:21 -04004402
David Neto87846742018-04-11 17:36:22 -04004403 auto *Inst = new SPIRVInstruction(spv::OpStore, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004404 SPIRVInstList.push_back(Inst);
4405 break;
4406 }
4407 case Instruction::AtomicCmpXchg: {
4408 I.print(errs());
4409 llvm_unreachable("Unsupported instruction???");
4410 break;
4411 }
4412 case Instruction::AtomicRMW: {
Neil Henning39672102017-09-29 14:33:13 +01004413 AtomicRMWInst *AtomicRMW = dyn_cast<AtomicRMWInst>(&I);
4414
4415 spv::Op opcode;
4416
4417 switch (AtomicRMW->getOperation()) {
4418 default:
4419 I.print(errs());
4420 llvm_unreachable("Unsupported instruction???");
4421 case llvm::AtomicRMWInst::Add:
4422 opcode = spv::OpAtomicIAdd;
4423 break;
4424 case llvm::AtomicRMWInst::Sub:
4425 opcode = spv::OpAtomicISub;
4426 break;
4427 case llvm::AtomicRMWInst::Xchg:
4428 opcode = spv::OpAtomicExchange;
4429 break;
4430 case llvm::AtomicRMWInst::Min:
4431 opcode = spv::OpAtomicSMin;
4432 break;
4433 case llvm::AtomicRMWInst::Max:
4434 opcode = spv::OpAtomicSMax;
4435 break;
4436 case llvm::AtomicRMWInst::UMin:
4437 opcode = spv::OpAtomicUMin;
4438 break;
4439 case llvm::AtomicRMWInst::UMax:
4440 opcode = spv::OpAtomicUMax;
4441 break;
4442 case llvm::AtomicRMWInst::And:
4443 opcode = spv::OpAtomicAnd;
4444 break;
4445 case llvm::AtomicRMWInst::Or:
4446 opcode = spv::OpAtomicOr;
4447 break;
4448 case llvm::AtomicRMWInst::Xor:
4449 opcode = spv::OpAtomicXor;
4450 break;
4451 }
4452
4453 //
4454 // Generate OpAtomic*.
4455 //
4456 SPIRVOperandList Ops;
4457
David Neto257c3892018-04-11 13:19:45 -04004458 Ops << MkId(lookupType(I.getType()))
4459 << MkId(VMap[AtomicRMW->getPointerOperand()]);
Neil Henning39672102017-09-29 14:33:13 +01004460
4461 auto IntTy = Type::getInt32Ty(I.getContext());
Neil Henning39672102017-09-29 14:33:13 +01004462 const auto ConstantScopeDevice = ConstantInt::get(IntTy, spv::ScopeDevice);
David Neto257c3892018-04-11 13:19:45 -04004463 Ops << MkId(VMap[ConstantScopeDevice]);
Neil Henning39672102017-09-29 14:33:13 +01004464
4465 const auto ConstantMemorySemantics = ConstantInt::get(
4466 IntTy, spv::MemorySemanticsUniformMemoryMask |
4467 spv::MemorySemanticsSequentiallyConsistentMask);
David Neto257c3892018-04-11 13:19:45 -04004468 Ops << MkId(VMap[ConstantMemorySemantics]);
Neil Henning39672102017-09-29 14:33:13 +01004469
David Neto257c3892018-04-11 13:19:45 -04004470 Ops << MkId(VMap[AtomicRMW->getValOperand()]);
Neil Henning39672102017-09-29 14:33:13 +01004471
4472 VMap[&I] = nextID;
4473
David Neto87846742018-04-11 17:36:22 -04004474 auto *Inst = new SPIRVInstruction(opcode, nextID++, Ops);
Neil Henning39672102017-09-29 14:33:13 +01004475 SPIRVInstList.push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04004476 break;
4477 }
4478 case Instruction::Fence: {
4479 I.print(errs());
4480 llvm_unreachable("Unsupported instruction???");
4481 break;
4482 }
4483 case Instruction::Call: {
4484 CallInst *Call = dyn_cast<CallInst>(&I);
4485 Function *Callee = Call->getCalledFunction();
4486
Alan Baker202c8c72018-08-13 13:47:44 -04004487 if (Callee->getName().startswith(clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04004488 if (ResourceVarDeferredLoadCalls.count(Call) && Call->hasNUsesOrMore(1)) {
4489 // Generate an OpLoad
4490 SPIRVOperandList Ops;
4491 const auto load_id = nextID++;
David Neto22f144c2017-06-12 14:26:21 -04004492
David Neto862b7d82018-06-14 18:48:37 -04004493 Ops << MkId(lookupType(Call->getType()->getPointerElementType()))
4494 << MkId(ResourceVarDeferredLoadCalls[Call]);
4495
4496 auto *Inst = new SPIRVInstruction(spv::OpLoad, load_id, Ops);
4497 SPIRVInstList.push_back(Inst);
4498 VMap[Call] = load_id;
4499 break;
4500
4501 } else {
4502 // This maps to an OpVariable we've already generated.
4503 // No code is generated for the call.
4504 }
4505 break;
alan-bakerb6b09dc2018-11-08 16:59:28 -05004506 } else if (Callee->getName().startswith(
4507 clspv::WorkgroupAccessorFunction())) {
Alan Baker202c8c72018-08-13 13:47:44 -04004508 // Don't codegen an instruction here, but instead map this call directly
4509 // to the workgroup variable id.
alan-bakerb6b09dc2018-11-08 16:59:28 -05004510 int spec_id = static_cast<int>(
4511 cast<ConstantInt>(Call->getOperand(0))->getSExtValue());
Alan Baker202c8c72018-08-13 13:47:44 -04004512 const auto &info = LocalSpecIdInfoMap[spec_id];
4513 VMap[Call] = info.variable_id;
4514 break;
David Neto862b7d82018-06-14 18:48:37 -04004515 }
4516
4517 // Sampler initializers become a load of the corresponding sampler.
4518
Kévin Petitdf71de32019-04-09 14:09:50 +01004519 if (Callee->getName().equals(clspv::LiteralSamplerFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04004520 // Map this to a load from the variable.
alan-baker09cb9802019-12-10 13:16:27 -05004521 const auto third_param = static_cast<unsigned>(
4522 dyn_cast<ConstantInt>(Call->getArgOperand(2))->getZExtValue());
4523 auto sampler_value = third_param;
4524 if (clspv::Option::UseSamplerMap()) {
4525 sampler_value = getSamplerMap()[third_param].first;
4526 }
David Neto862b7d82018-06-14 18:48:37 -04004527
4528 // Generate an OpLoad
David Neto22f144c2017-06-12 14:26:21 -04004529 SPIRVOperandList Ops;
David Neto862b7d82018-06-14 18:48:37 -04004530 const auto load_id = nextID++;
David Neto22f144c2017-06-12 14:26:21 -04004531
David Neto257c3892018-04-11 13:19:45 -04004532 Ops << MkId(lookupType(SamplerTy->getPointerElementType()))
alan-baker09cb9802019-12-10 13:16:27 -05004533 << MkId(SamplerLiteralToIDMap[sampler_value]);
David Neto22f144c2017-06-12 14:26:21 -04004534
David Neto862b7d82018-06-14 18:48:37 -04004535 auto *Inst = new SPIRVInstruction(spv::OpLoad, load_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004536 SPIRVInstList.push_back(Inst);
David Neto862b7d82018-06-14 18:48:37 -04004537 VMap[Call] = load_id;
David Neto22f144c2017-06-12 14:26:21 -04004538 break;
4539 }
4540
Kévin Petit349c9502019-03-28 17:24:14 +00004541 // Handle SPIR-V intrinsics
Kévin Petit9b340262019-06-19 18:31:11 +01004542 spv::Op opcode = StringSwitch<spv::Op>(Callee->getName())
4543 .Case("spirv.atomic_xor", spv::OpAtomicXor)
4544 .Default(spv::OpNop);
David Neto22f144c2017-06-12 14:26:21 -04004545
Kévin Petit617a76d2019-04-04 13:54:16 +01004546 // If the switch above didn't have an entry maybe the intrinsic
4547 // is using the name mangling logic.
4548 bool usesMangler = false;
4549 if (opcode == spv::OpNop) {
4550 if (Callee->getName().startswith(clspv::SPIRVOpIntrinsicFunction())) {
4551 auto OpCst = cast<ConstantInt>(Call->getOperand(0));
4552 opcode = static_cast<spv::Op>(OpCst->getZExtValue());
4553 usesMangler = true;
4554 }
4555 }
4556
Kévin Petit349c9502019-03-28 17:24:14 +00004557 if (opcode != spv::OpNop) {
4558
David Neto22f144c2017-06-12 14:26:21 -04004559 SPIRVOperandList Ops;
4560
Kévin Petit349c9502019-03-28 17:24:14 +00004561 if (!I.getType()->isVoidTy()) {
4562 Ops << MkId(lookupType(I.getType()));
4563 }
David Neto22f144c2017-06-12 14:26:21 -04004564
Kévin Petit617a76d2019-04-04 13:54:16 +01004565 unsigned firstOperand = usesMangler ? 1 : 0;
4566 for (unsigned i = firstOperand; i < Call->getNumArgOperands(); i++) {
David Neto257c3892018-04-11 13:19:45 -04004567 Ops << MkId(VMap[Call->getArgOperand(i)]);
David Neto22f144c2017-06-12 14:26:21 -04004568 }
4569
Kévin Petit349c9502019-03-28 17:24:14 +00004570 if (!I.getType()->isVoidTy()) {
4571 VMap[&I] = nextID;
Kévin Petit8a560882019-03-21 15:24:34 +00004572 }
4573
Kévin Petit349c9502019-03-28 17:24:14 +00004574 SPIRVInstruction *Inst;
4575 if (!I.getType()->isVoidTy()) {
4576 Inst = new SPIRVInstruction(opcode, nextID++, Ops);
4577 } else {
4578 Inst = new SPIRVInstruction(opcode, Ops);
4579 }
Kévin Petit8a560882019-03-21 15:24:34 +00004580 SPIRVInstList.push_back(Inst);
4581 break;
4582 }
4583
David Neto22f144c2017-06-12 14:26:21 -04004584 // spirv.copy_memory.* intrinsics become OpMemoryMemory's.
4585 if (Callee->getName().startswith("spirv.copy_memory")) {
4586 //
4587 // Generate OpCopyMemory.
4588 //
4589
4590 // Ops[0] = Dst ID
4591 // Ops[1] = Src ID
4592 // Ops[2] = Memory Access
4593 // Ops[3] = Alignment
4594
4595 auto IsVolatile =
4596 dyn_cast<ConstantInt>(Call->getArgOperand(3))->getZExtValue() != 0;
4597
4598 auto VolatileMemoryAccess = (IsVolatile) ? spv::MemoryAccessVolatileMask
4599 : spv::MemoryAccessMaskNone;
4600
4601 auto MemoryAccess = VolatileMemoryAccess | spv::MemoryAccessAlignedMask;
4602
4603 auto Alignment =
4604 dyn_cast<ConstantInt>(Call->getArgOperand(2))->getZExtValue();
4605
David Neto257c3892018-04-11 13:19:45 -04004606 SPIRVOperandList Ops;
4607 Ops << MkId(VMap[Call->getArgOperand(0)])
4608 << MkId(VMap[Call->getArgOperand(1)]) << MkNum(MemoryAccess)
4609 << MkNum(static_cast<uint32_t>(Alignment));
David Neto22f144c2017-06-12 14:26:21 -04004610
David Neto87846742018-04-11 17:36:22 -04004611 auto *Inst = new SPIRVInstruction(spv::OpCopyMemory, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004612
4613 SPIRVInstList.push_back(Inst);
4614
4615 break;
4616 }
4617
David Neto22f144c2017-06-12 14:26:21 -04004618 // read_image is converted to OpSampledImage and OpImageSampleExplicitLod.
4619 // Additionally, OpTypeSampledImage is generated.
alan-bakerf67468c2019-11-25 15:51:49 -05004620 if (clspv::IsSampledImageRead(Callee)) {
David Neto22f144c2017-06-12 14:26:21 -04004621 //
4622 // Generate OpSampledImage.
4623 //
4624 // Ops[0] = Result Type ID
4625 // Ops[1] = Image ID
4626 // Ops[2] = Sampler ID
4627 //
4628 SPIRVOperandList Ops;
4629
4630 Value *Image = Call->getArgOperand(0);
4631 Value *Sampler = Call->getArgOperand(1);
4632 Value *Coordinate = Call->getArgOperand(2);
4633
4634 TypeMapType &OpImageTypeMap = getImageTypeMap();
4635 Type *ImageTy = Image->getType()->getPointerElementType();
4636 uint32_t ImageTyID = OpImageTypeMap[ImageTy];
David Neto22f144c2017-06-12 14:26:21 -04004637 uint32_t ImageID = VMap[Image];
David Neto22f144c2017-06-12 14:26:21 -04004638 uint32_t SamplerID = VMap[Sampler];
David Neto257c3892018-04-11 13:19:45 -04004639
4640 Ops << MkId(ImageTyID) << MkId(ImageID) << MkId(SamplerID);
David Neto22f144c2017-06-12 14:26:21 -04004641
4642 uint32_t SampledImageID = nextID;
4643
David Neto87846742018-04-11 17:36:22 -04004644 auto *Inst = new SPIRVInstruction(spv::OpSampledImage, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004645 SPIRVInstList.push_back(Inst);
4646
4647 //
4648 // Generate OpImageSampleExplicitLod.
4649 //
4650 // Ops[0] = Result Type ID
4651 // Ops[1] = Sampled Image ID
4652 // Ops[2] = Coordinate ID
4653 // Ops[3] = Image Operands Type ID
4654 // Ops[4] ... Ops[n] = Operands ID
4655 //
4656 Ops.clear();
4657
alan-bakerf67468c2019-11-25 15:51:49 -05004658 const bool is_int_image = IsIntImageType(Image->getType());
4659 uint32_t result_type = 0;
4660 if (is_int_image) {
4661 result_type = v4int32ID;
4662 } else {
4663 result_type = lookupType(Call->getType());
4664 }
4665
4666 Ops << MkId(result_type) << MkId(SampledImageID) << MkId(VMap[Coordinate])
4667 << MkNum(spv::ImageOperandsLodMask);
David Neto22f144c2017-06-12 14:26:21 -04004668
4669 Constant *CstFP0 = ConstantFP::get(Context, APFloat(0.0f));
David Neto257c3892018-04-11 13:19:45 -04004670 Ops << MkId(VMap[CstFP0]);
David Neto22f144c2017-06-12 14:26:21 -04004671
alan-bakerf67468c2019-11-25 15:51:49 -05004672 uint32_t final_id = nextID++;
4673 VMap[&I] = final_id;
David Neto22f144c2017-06-12 14:26:21 -04004674
alan-bakerf67468c2019-11-25 15:51:49 -05004675 uint32_t image_id = final_id;
4676 if (is_int_image) {
4677 // Int image requires a bitcast from v4int to v4uint.
4678 image_id = nextID++;
4679 }
4680
4681 Inst = new SPIRVInstruction(spv::OpImageSampleExplicitLod, image_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004682 SPIRVInstList.push_back(Inst);
alan-bakerf67468c2019-11-25 15:51:49 -05004683
4684 if (is_int_image) {
4685 // Generate the bitcast.
4686 Ops.clear();
4687 Ops << MkId(lookupType(Call->getType())) << MkId(image_id);
4688 Inst = new SPIRVInstruction(spv::OpBitcast, final_id, Ops);
4689 SPIRVInstList.push_back(Inst);
4690 }
David Neto22f144c2017-06-12 14:26:21 -04004691 break;
4692 }
4693
alan-bakerf67468c2019-11-25 15:51:49 -05004694 // write_image is mapped to OpImageWrite.
4695 if (clspv::IsImageWrite(Callee)) {
David Neto22f144c2017-06-12 14:26:21 -04004696 //
4697 // Generate OpImageWrite.
4698 //
4699 // Ops[0] = Image ID
4700 // Ops[1] = Coordinate ID
4701 // Ops[2] = Texel ID
4702 // Ops[3] = (Optional) Image Operands Type (Literal Number)
4703 // Ops[4] ... Ops[n] = (Optional) Operands ID
4704 //
4705 SPIRVOperandList Ops;
4706
4707 Value *Image = Call->getArgOperand(0);
4708 Value *Coordinate = Call->getArgOperand(1);
4709 Value *Texel = Call->getArgOperand(2);
4710
4711 uint32_t ImageID = VMap[Image];
David Neto22f144c2017-06-12 14:26:21 -04004712 uint32_t CoordinateID = VMap[Coordinate];
David Neto22f144c2017-06-12 14:26:21 -04004713 uint32_t TexelID = VMap[Texel];
alan-bakerf67468c2019-11-25 15:51:49 -05004714
4715 const bool is_int_image = IsIntImageType(Image->getType());
4716 if (is_int_image) {
4717 // Generate a bitcast to v4int and use it as the texel value.
4718 uint32_t castID = nextID++;
4719 Ops << MkId(v4int32ID) << MkId(TexelID);
4720 auto cast = new SPIRVInstruction(spv::OpBitcast, castID, Ops);
4721 SPIRVInstList.push_back(cast);
4722 Ops.clear();
4723 TexelID = castID;
4724 }
David Neto257c3892018-04-11 13:19:45 -04004725 Ops << MkId(ImageID) << MkId(CoordinateID) << MkId(TexelID);
David Neto22f144c2017-06-12 14:26:21 -04004726
David Neto87846742018-04-11 17:36:22 -04004727 auto *Inst = new SPIRVInstruction(spv::OpImageWrite, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004728 SPIRVInstList.push_back(Inst);
4729 break;
4730 }
4731
alan-bakerce179f12019-12-06 19:02:22 -05004732 // get_image_* is mapped to OpImageQuerySize or OpImageQuerySizeLod
4733 if (clspv::IsImageQuery(Callee)) {
David Neto5c22a252018-03-15 16:07:41 -04004734 //
alan-bakerce179f12019-12-06 19:02:22 -05004735 // Generate OpImageQuerySize[Lod]
David Neto5c22a252018-03-15 16:07:41 -04004736 //
4737 // Ops[0] = Image ID
4738 //
alan-bakerce179f12019-12-06 19:02:22 -05004739 // Result type has components equal to the dimensionality of the image,
4740 // plus 1 if the image is arrayed.
4741 //
alan-bakerf906d2b2019-12-10 11:26:23 -05004742 // %sizes = OpImageQuerySize[Lod] %uint[2|3|4] %im [%uint_0]
David Neto5c22a252018-03-15 16:07:41 -04004743 SPIRVOperandList Ops;
4744
4745 // Implement:
alan-bakerce179f12019-12-06 19:02:22 -05004746 // %sizes = OpImageQuerySize[Lod] %uint[2|3|4] %im [%uint_0]
4747 uint32_t SizesTypeID = 0;
4748
David Neto5c22a252018-03-15 16:07:41 -04004749 Value *Image = Call->getArgOperand(0);
alan-bakerce179f12019-12-06 19:02:22 -05004750 const uint32_t dim = ImageDimensionality(Image->getType());
alan-bakerf906d2b2019-12-10 11:26:23 -05004751 // TODO(alan-baker): fix component calculation when arrayed images are
4752 // supported.
alan-bakerce179f12019-12-06 19:02:22 -05004753 const uint32_t components = dim;
4754 if (components == 1) {
alan-bakerce179f12019-12-06 19:02:22 -05004755 SizesTypeID = TypeMap[Type::getInt32Ty(Context)];
4756 } else {
4757 SizesTypeID = TypeMap[VectorType::get(Type::getInt32Ty(Context), dim)];
4758 }
David Neto5c22a252018-03-15 16:07:41 -04004759 uint32_t ImageID = VMap[Image];
David Neto257c3892018-04-11 13:19:45 -04004760 Ops << MkId(SizesTypeID) << MkId(ImageID);
alan-bakerce179f12019-12-06 19:02:22 -05004761 spv::Op query_opcode = spv::OpImageQuerySize;
4762 if (clspv::IsSampledImageType(Image->getType())) {
4763 query_opcode = spv::OpImageQuerySizeLod;
4764 // Need explicit 0 for Lod operand.
4765 Constant *CstInt0 = ConstantInt::get(Context, APInt(32, 0));
4766 Ops << MkId(VMap[CstInt0]);
4767 }
David Neto5c22a252018-03-15 16:07:41 -04004768
4769 uint32_t SizesID = nextID++;
alan-bakerce179f12019-12-06 19:02:22 -05004770 auto *QueryInst = new SPIRVInstruction(query_opcode, SizesID, Ops);
David Neto5c22a252018-03-15 16:07:41 -04004771 SPIRVInstList.push_back(QueryInst);
4772
alan-bakerce179f12019-12-06 19:02:22 -05004773 // May require an extra instruction to create the appropriate result of
4774 // the builtin function.
4775 if (clspv::IsGetImageDim(Callee)) {
4776 if (dim == 3) {
4777 // get_image_dim returns an int4 for 3D images.
4778 //
4779 // Reset value map entry since we generated an intermediate
4780 // instruction.
4781 VMap[&I] = nextID;
David Neto5c22a252018-03-15 16:07:41 -04004782
alan-bakerce179f12019-12-06 19:02:22 -05004783 // Implement:
4784 // %result = OpCompositeConstruct %uint4 %sizes %uint_0
4785 Ops.clear();
4786 Ops << MkId(lookupType(VectorType::get(Type::getInt32Ty(Context), 4)))
4787 << MkId(SizesID);
David Neto5c22a252018-03-15 16:07:41 -04004788
alan-bakerce179f12019-12-06 19:02:22 -05004789 Constant *CstInt0 = ConstantInt::get(Context, APInt(32, 0));
4790 Ops << MkId(VMap[CstInt0]);
David Neto5c22a252018-03-15 16:07:41 -04004791
alan-bakerce179f12019-12-06 19:02:22 -05004792 auto *Inst =
4793 new SPIRVInstruction(spv::OpCompositeConstruct, nextID++, Ops);
4794 SPIRVInstList.push_back(Inst);
4795 } else if (dim != components) {
4796 // get_image_dim return an int2 regardless of the arrayedness of the
4797 // image. If the image is arrayed an element must be dropped from the
4798 // query result.
4799 //
4800 // Reset value map entry since we generated an intermediate
4801 // instruction.
4802 VMap[&I] = nextID;
4803
4804 // Implement:
4805 // %result = OpVectorShuffle %uint2 %sizes %sizes 0 1
4806 Ops.clear();
4807 Ops << MkId(lookupType(VectorType::get(Type::getInt32Ty(Context), 2)))
4808 << MkId(SizesID) << MkId(SizesID) << MkNum(0) << MkNum(1);
4809
4810 auto *Inst =
4811 new SPIRVInstruction(spv::OpVectorShuffle, nextID++, Ops);
4812 SPIRVInstList.push_back(Inst);
4813 }
4814 } else if (components > 1) {
4815 // Reset value map entry since we generated an intermediate instruction.
4816 VMap[&I] = nextID;
4817
4818 // Implement:
4819 // %result = OpCompositeExtract %uint %sizes <component number>
4820 Ops.clear();
4821 Ops << MkId(TypeMap[I.getType()]) << MkId(SizesID);
4822
4823 uint32_t component = 0;
4824 if (IsGetImageHeight(Callee))
4825 component = 1;
4826 else if (IsGetImageDepth(Callee))
4827 component = 2;
4828 Ops << MkNum(component);
4829
4830 auto *Inst =
4831 new SPIRVInstruction(spv::OpCompositeExtract, nextID++, Ops);
4832 SPIRVInstList.push_back(Inst);
4833 }
David Neto5c22a252018-03-15 16:07:41 -04004834 break;
4835 }
4836
David Neto22f144c2017-06-12 14:26:21 -04004837 // Call instrucion is deferred because it needs function's ID. Record
4838 // slot's location on SPIRVInstructionList.
4839 DeferredInsts.push_back(
4840 std::make_tuple(&I, --SPIRVInstList.end(), nextID++));
4841
David Neto3fbb4072017-10-16 11:28:14 -04004842 // Check whether the implementation of this call uses an extended
4843 // instruction plus one more value-producing instruction. If so, then
4844 // reserve the id for the extra value-producing slot.
4845 glsl::ExtInst EInst = getIndirectExtInstEnum(Callee->getName());
4846 if (EInst != kGlslExtInstBad) {
4847 // Reserve a spot for the extra value.
David Neto4d02a532017-09-17 12:57:44 -04004848 // Increase nextID.
David Neto22f144c2017-06-12 14:26:21 -04004849 VMap[&I] = nextID;
4850 nextID++;
4851 }
4852 break;
4853 }
4854 case Instruction::Ret: {
4855 unsigned NumOps = I.getNumOperands();
4856 if (NumOps == 0) {
4857 //
4858 // Generate OpReturn.
4859 //
David Neto87846742018-04-11 17:36:22 -04004860 SPIRVInstList.push_back(new SPIRVInstruction(spv::OpReturn, {}));
David Neto22f144c2017-06-12 14:26:21 -04004861 } else {
4862 //
4863 // Generate OpReturnValue.
4864 //
4865
4866 // Ops[0] = Return Value ID
4867 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04004868
4869 Ops << MkId(VMap[I.getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04004870
David Neto87846742018-04-11 17:36:22 -04004871 auto *Inst = new SPIRVInstruction(spv::OpReturnValue, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004872 SPIRVInstList.push_back(Inst);
4873 break;
4874 }
4875 break;
4876 }
4877 }
4878}
4879
4880void SPIRVProducerPass::GenerateFuncEpilogue() {
4881 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
4882
4883 //
4884 // Generate OpFunctionEnd
4885 //
4886
David Neto87846742018-04-11 17:36:22 -04004887 auto *Inst = new SPIRVInstruction(spv::OpFunctionEnd, {});
David Neto22f144c2017-06-12 14:26:21 -04004888 SPIRVInstList.push_back(Inst);
4889}
4890
4891bool SPIRVProducerPass::is4xi8vec(Type *Ty) const {
alan-bakerb39c8262019-03-08 14:03:37 -05004892 // Don't specialize <4 x i8> if i8 is generally supported.
4893 if (clspv::Option::Int8Support())
4894 return false;
4895
David Neto22f144c2017-06-12 14:26:21 -04004896 LLVMContext &Context = Ty->getContext();
4897 if (Ty->isVectorTy()) {
4898 if (Ty->getVectorElementType() == Type::getInt8Ty(Context) &&
4899 Ty->getVectorNumElements() == 4) {
4900 return true;
4901 }
4902 }
4903
4904 return false;
4905}
4906
4907void SPIRVProducerPass::HandleDeferredInstruction() {
4908 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
4909 ValueMapType &VMap = getValueMap();
4910 DeferredInstVecType &DeferredInsts = getDeferredInstVec();
4911
4912 for (auto DeferredInst = DeferredInsts.rbegin();
4913 DeferredInst != DeferredInsts.rend(); ++DeferredInst) {
4914 Value *Inst = std::get<0>(*DeferredInst);
4915 SPIRVInstructionList::iterator InsertPoint = ++std::get<1>(*DeferredInst);
4916 if (InsertPoint != SPIRVInstList.end()) {
4917 while ((*InsertPoint)->getOpcode() == spv::OpPhi) {
4918 ++InsertPoint;
4919 }
4920 }
4921
4922 if (BranchInst *Br = dyn_cast<BranchInst>(Inst)) {
alan-baker06cad652019-12-03 17:56:47 -05004923 // Check whether this branch needs to be preceeded by merge instruction.
David Neto22f144c2017-06-12 14:26:21 -04004924 BasicBlock *BrBB = Br->getParent();
alan-baker06cad652019-12-03 17:56:47 -05004925 if (ContinueBlocks.count(BrBB)) {
David Neto22f144c2017-06-12 14:26:21 -04004926 //
4927 // Generate OpLoopMerge.
4928 //
4929 // Ops[0] = Merge Block ID
4930 // Ops[1] = Continue Target ID
4931 // Ops[2] = Selection Control
4932 SPIRVOperandList Ops;
4933
alan-baker06cad652019-12-03 17:56:47 -05004934 auto MergeBB = MergeBlocks[BrBB];
4935 auto ContinueBB = ContinueBlocks[BrBB];
David Neto22f144c2017-06-12 14:26:21 -04004936 uint32_t MergeBBID = VMap[MergeBB];
David Neto22f144c2017-06-12 14:26:21 -04004937 uint32_t ContinueBBID = VMap[ContinueBB];
David Neto257c3892018-04-11 13:19:45 -04004938 Ops << MkId(MergeBBID) << MkId(ContinueBBID)
alan-baker06cad652019-12-03 17:56:47 -05004939 << MkNum(spv::LoopControlMaskNone);
David Neto22f144c2017-06-12 14:26:21 -04004940
David Neto87846742018-04-11 17:36:22 -04004941 auto *MergeInst = new SPIRVInstruction(spv::OpLoopMerge, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004942 SPIRVInstList.insert(InsertPoint, MergeInst);
alan-baker06cad652019-12-03 17:56:47 -05004943 } else if (MergeBlocks.count(BrBB)) {
4944 //
4945 // Generate OpSelectionMerge.
4946 //
4947 // Ops[0] = Merge Block ID
4948 // Ops[1] = Selection Control
4949 SPIRVOperandList Ops;
David Neto22f144c2017-06-12 14:26:21 -04004950
alan-baker06cad652019-12-03 17:56:47 -05004951 auto MergeBB = MergeBlocks[BrBB];
4952 uint32_t MergeBBID = VMap[MergeBB];
4953 Ops << MkId(MergeBBID) << MkNum(spv::SelectionControlMaskNone);
David Neto22f144c2017-06-12 14:26:21 -04004954
alan-baker06cad652019-12-03 17:56:47 -05004955 auto *MergeInst = new SPIRVInstruction(spv::OpSelectionMerge, Ops);
4956 SPIRVInstList.insert(InsertPoint, MergeInst);
David Neto22f144c2017-06-12 14:26:21 -04004957 }
4958
4959 if (Br->isConditional()) {
4960 //
4961 // Generate OpBranchConditional.
4962 //
4963 // Ops[0] = Condition ID
4964 // Ops[1] = True Label ID
4965 // Ops[2] = False Label ID
4966 // Ops[3] ... Ops[n] = Branch weights (Literal Number)
4967 SPIRVOperandList Ops;
4968
4969 uint32_t CondID = VMap[Br->getCondition()];
David Neto22f144c2017-06-12 14:26:21 -04004970 uint32_t TrueBBID = VMap[Br->getSuccessor(0)];
David Neto22f144c2017-06-12 14:26:21 -04004971 uint32_t FalseBBID = VMap[Br->getSuccessor(1)];
David Neto257c3892018-04-11 13:19:45 -04004972
4973 Ops << MkId(CondID) << MkId(TrueBBID) << MkId(FalseBBID);
David Neto22f144c2017-06-12 14:26:21 -04004974
David Neto87846742018-04-11 17:36:22 -04004975 auto *BrInst = new SPIRVInstruction(spv::OpBranchConditional, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004976 SPIRVInstList.insert(InsertPoint, BrInst);
4977 } else {
4978 //
4979 // Generate OpBranch.
4980 //
4981 // Ops[0] = Target Label ID
4982 SPIRVOperandList Ops;
4983
4984 uint32_t TargetID = VMap[Br->getSuccessor(0)];
David Neto257c3892018-04-11 13:19:45 -04004985 Ops << MkId(TargetID);
David Neto22f144c2017-06-12 14:26:21 -04004986
David Neto87846742018-04-11 17:36:22 -04004987 SPIRVInstList.insert(InsertPoint,
4988 new SPIRVInstruction(spv::OpBranch, Ops));
David Neto22f144c2017-06-12 14:26:21 -04004989 }
4990 } else if (PHINode *PHI = dyn_cast<PHINode>(Inst)) {
alan-baker5b86ed72019-02-15 08:26:50 -05004991 if (PHI->getType()->isPointerTy()) {
4992 // OpPhi on pointers requires variable pointers.
4993 setVariablePointersCapabilities(
4994 PHI->getType()->getPointerAddressSpace());
4995 if (!hasVariablePointers() && !selectFromSameObject(PHI)) {
4996 setVariablePointers(true);
4997 }
4998 }
4999
David Neto22f144c2017-06-12 14:26:21 -04005000 //
5001 // Generate OpPhi.
5002 //
5003 // Ops[0] = Result Type ID
5004 // Ops[1] ... Ops[n] = (Variable ID, Parent ID) pairs
5005 SPIRVOperandList Ops;
5006
David Neto257c3892018-04-11 13:19:45 -04005007 Ops << MkId(lookupType(PHI->getType()));
David Neto22f144c2017-06-12 14:26:21 -04005008
David Neto22f144c2017-06-12 14:26:21 -04005009 for (unsigned i = 0; i < PHI->getNumIncomingValues(); i++) {
5010 uint32_t VarID = VMap[PHI->getIncomingValue(i)];
David Neto22f144c2017-06-12 14:26:21 -04005011 uint32_t ParentID = VMap[PHI->getIncomingBlock(i)];
David Neto257c3892018-04-11 13:19:45 -04005012 Ops << MkId(VarID) << MkId(ParentID);
David Neto22f144c2017-06-12 14:26:21 -04005013 }
5014
5015 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005016 InsertPoint,
5017 new SPIRVInstruction(spv::OpPhi, std::get<2>(*DeferredInst), Ops));
David Neto22f144c2017-06-12 14:26:21 -04005018 } else if (CallInst *Call = dyn_cast<CallInst>(Inst)) {
5019 Function *Callee = Call->getCalledFunction();
David Neto3fbb4072017-10-16 11:28:14 -04005020 auto callee_name = Callee->getName();
5021 glsl::ExtInst EInst = getDirectOrIndirectExtInstEnum(callee_name);
David Neto22f144c2017-06-12 14:26:21 -04005022
5023 if (EInst) {
5024 uint32_t &ExtInstImportID = getOpExtInstImportID();
5025
5026 //
5027 // Generate OpExtInst.
5028 //
5029
5030 // Ops[0] = Result Type ID
5031 // Ops[1] = Set ID (OpExtInstImport ID)
5032 // Ops[2] = Instruction Number (Literal Number)
5033 // Ops[3] ... Ops[n] = Operand 1, ... , Operand n
5034 SPIRVOperandList Ops;
5035
David Neto862b7d82018-06-14 18:48:37 -04005036 Ops << MkId(lookupType(Call->getType())) << MkId(ExtInstImportID)
5037 << MkNum(EInst);
David Neto22f144c2017-06-12 14:26:21 -04005038
David Neto22f144c2017-06-12 14:26:21 -04005039 FunctionType *CalleeFTy = cast<FunctionType>(Call->getFunctionType());
5040 for (unsigned i = 0; i < CalleeFTy->getNumParams(); i++) {
David Neto257c3892018-04-11 13:19:45 -04005041 Ops << MkId(VMap[Call->getOperand(i)]);
David Neto22f144c2017-06-12 14:26:21 -04005042 }
5043
David Neto87846742018-04-11 17:36:22 -04005044 auto *ExtInst = new SPIRVInstruction(spv::OpExtInst,
5045 std::get<2>(*DeferredInst), Ops);
David Neto22f144c2017-06-12 14:26:21 -04005046 SPIRVInstList.insert(InsertPoint, ExtInst);
5047
David Neto3fbb4072017-10-16 11:28:14 -04005048 const auto IndirectExtInst = getIndirectExtInstEnum(callee_name);
5049 if (IndirectExtInst != kGlslExtInstBad) {
5050 // Generate one more instruction that uses the result of the extended
5051 // instruction. Its result id is one more than the id of the
5052 // extended instruction.
David Neto22f144c2017-06-12 14:26:21 -04005053 LLVMContext &Context =
5054 Call->getParent()->getParent()->getParent()->getContext();
David Neto22f144c2017-06-12 14:26:21 -04005055
David Neto3fbb4072017-10-16 11:28:14 -04005056 auto generate_extra_inst = [this, &Context, &Call, &DeferredInst,
5057 &VMap, &SPIRVInstList, &InsertPoint](
5058 spv::Op opcode, Constant *constant) {
5059 //
5060 // Generate instruction like:
5061 // result = opcode constant <extinst-result>
5062 //
5063 // Ops[0] = Result Type ID
5064 // Ops[1] = Operand 0 ;; the constant, suitably splatted
5065 // Ops[2] = Operand 1 ;; the result of the extended instruction
5066 SPIRVOperandList Ops;
David Neto22f144c2017-06-12 14:26:21 -04005067
David Neto3fbb4072017-10-16 11:28:14 -04005068 Type *resultTy = Call->getType();
David Neto257c3892018-04-11 13:19:45 -04005069 Ops << MkId(lookupType(resultTy));
David Neto3fbb4072017-10-16 11:28:14 -04005070
5071 if (auto *vectorTy = dyn_cast<VectorType>(resultTy)) {
5072 constant = ConstantVector::getSplat(
5073 static_cast<unsigned>(vectorTy->getNumElements()), constant);
5074 }
David Neto257c3892018-04-11 13:19:45 -04005075 Ops << MkId(VMap[constant]) << MkId(std::get<2>(*DeferredInst));
David Neto3fbb4072017-10-16 11:28:14 -04005076
5077 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005078 InsertPoint, new SPIRVInstruction(
5079 opcode, std::get<2>(*DeferredInst) + 1, Ops));
David Neto3fbb4072017-10-16 11:28:14 -04005080 };
5081
5082 switch (IndirectExtInst) {
5083 case glsl::ExtInstFindUMsb: // Implementing clz
5084 generate_extra_inst(
5085 spv::OpISub, ConstantInt::get(Type::getInt32Ty(Context), 31));
5086 break;
5087 case glsl::ExtInstAcos: // Implementing acospi
5088 case glsl::ExtInstAsin: // Implementing asinpi
Kévin Petiteb9f90a2018-09-29 12:29:34 +01005089 case glsl::ExtInstAtan: // Implementing atanpi
David Neto3fbb4072017-10-16 11:28:14 -04005090 case glsl::ExtInstAtan2: // Implementing atan2pi
5091 generate_extra_inst(
5092 spv::OpFMul,
5093 ConstantFP::get(Type::getFloatTy(Context), kOneOverPi));
5094 break;
5095
5096 default:
5097 assert(false && "internally inconsistent");
David Neto4d02a532017-09-17 12:57:44 -04005098 }
David Neto22f144c2017-06-12 14:26:21 -04005099 }
David Neto3fbb4072017-10-16 11:28:14 -04005100
alan-bakerb39c8262019-03-08 14:03:37 -05005101 } else if (callee_name.startswith("_Z8popcount")) {
David Neto22f144c2017-06-12 14:26:21 -04005102 //
5103 // Generate OpBitCount
5104 //
5105 // Ops[0] = Result Type ID
5106 // Ops[1] = Base ID
David Neto257c3892018-04-11 13:19:45 -04005107 SPIRVOperandList Ops;
5108 Ops << MkId(lookupType(Call->getType()))
5109 << MkId(VMap[Call->getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04005110
5111 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005112 InsertPoint, new SPIRVInstruction(spv::OpBitCount,
David Neto22f144c2017-06-12 14:26:21 -04005113 std::get<2>(*DeferredInst), Ops));
David Netoab03f432017-11-03 17:00:44 -04005114
David Neto862b7d82018-06-14 18:48:37 -04005115 } else if (callee_name.startswith(kCompositeConstructFunctionPrefix)) {
David Netoab03f432017-11-03 17:00:44 -04005116
5117 // Generate an OpCompositeConstruct
5118 SPIRVOperandList Ops;
5119
5120 // The result type.
David Neto257c3892018-04-11 13:19:45 -04005121 Ops << MkId(lookupType(Call->getType()));
David Netoab03f432017-11-03 17:00:44 -04005122
5123 for (Use &use : Call->arg_operands()) {
David Neto257c3892018-04-11 13:19:45 -04005124 Ops << MkId(VMap[use.get()]);
David Netoab03f432017-11-03 17:00:44 -04005125 }
5126
5127 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005128 InsertPoint, new SPIRVInstruction(spv::OpCompositeConstruct,
5129 std::get<2>(*DeferredInst), Ops));
David Netoab03f432017-11-03 17:00:44 -04005130
Alan Baker202c8c72018-08-13 13:47:44 -04005131 } else if (callee_name.startswith(clspv::ResourceAccessorFunction())) {
5132
5133 // We have already mapped the call's result value to an ID.
5134 // Don't generate any code now.
5135
5136 } else if (callee_name.startswith(clspv::WorkgroupAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04005137
5138 // We have already mapped the call's result value to an ID.
5139 // Don't generate any code now.
5140
David Neto22f144c2017-06-12 14:26:21 -04005141 } else {
alan-baker5b86ed72019-02-15 08:26:50 -05005142 if (Call->getType()->isPointerTy()) {
5143 // Functions returning pointers require variable pointers.
5144 setVariablePointersCapabilities(
5145 Call->getType()->getPointerAddressSpace());
5146 }
5147
David Neto22f144c2017-06-12 14:26:21 -04005148 //
5149 // Generate OpFunctionCall.
5150 //
5151
5152 // Ops[0] = Result Type ID
5153 // Ops[1] = Callee Function ID
5154 // Ops[2] ... Ops[n] = Argument 0, ... , Argument n
5155 SPIRVOperandList Ops;
5156
David Neto862b7d82018-06-14 18:48:37 -04005157 Ops << MkId(lookupType(Call->getType()));
David Neto22f144c2017-06-12 14:26:21 -04005158
5159 uint32_t CalleeID = VMap[Callee];
David Neto43568eb2017-10-13 18:25:25 -04005160 if (CalleeID == 0) {
5161 errs() << "Can't translate function call. Missing builtin? "
David Neto862b7d82018-06-14 18:48:37 -04005162 << callee_name << " in: " << *Call << "\n";
David Neto43568eb2017-10-13 18:25:25 -04005163 // TODO(dneto): Can we error out? Enabling this llvm_unreachable
5164 // causes an infinite loop. Instead, go ahead and generate
5165 // the bad function call. A validator will catch the 0-Id.
5166 // llvm_unreachable("Can't translate function call");
5167 }
David Neto22f144c2017-06-12 14:26:21 -04005168
David Neto257c3892018-04-11 13:19:45 -04005169 Ops << MkId(CalleeID);
David Neto22f144c2017-06-12 14:26:21 -04005170
David Neto22f144c2017-06-12 14:26:21 -04005171 FunctionType *CalleeFTy = cast<FunctionType>(Call->getFunctionType());
5172 for (unsigned i = 0; i < CalleeFTy->getNumParams(); i++) {
alan-baker5b86ed72019-02-15 08:26:50 -05005173 auto *operand = Call->getOperand(i);
alan-bakerd4d50652019-12-03 17:17:15 -05005174 auto *operand_type = operand->getType();
5175 // Images and samplers can be passed as function parameters without
5176 // variable pointers.
5177 if (operand_type->isPointerTy() && !IsImageType(operand_type) &&
5178 !IsSamplerType(operand_type)) {
alan-baker5b86ed72019-02-15 08:26:50 -05005179 auto sc =
5180 GetStorageClass(operand->getType()->getPointerAddressSpace());
5181 if (sc == spv::StorageClassStorageBuffer) {
5182 // Passing SSBO by reference requires variable pointers storage
5183 // buffer.
5184 setVariablePointersStorageBuffer(true);
5185 } else if (sc == spv::StorageClassWorkgroup) {
5186 // Workgroup references require variable pointers if they are not
5187 // memory object declarations.
5188 if (auto *operand_call = dyn_cast<CallInst>(operand)) {
5189 // Workgroup accessor represents a variable reference.
5190 if (!operand_call->getCalledFunction()->getName().startswith(
5191 clspv::WorkgroupAccessorFunction()))
5192 setVariablePointers(true);
5193 } else {
5194 // Arguments are function parameters.
5195 if (!isa<Argument>(operand))
5196 setVariablePointers(true);
5197 }
5198 }
5199 }
5200 Ops << MkId(VMap[operand]);
David Neto22f144c2017-06-12 14:26:21 -04005201 }
5202
David Neto87846742018-04-11 17:36:22 -04005203 auto *CallInst = new SPIRVInstruction(spv::OpFunctionCall,
5204 std::get<2>(*DeferredInst), Ops);
David Neto22f144c2017-06-12 14:26:21 -04005205 SPIRVInstList.insert(InsertPoint, CallInst);
5206 }
5207 }
5208 }
5209}
5210
David Neto1a1a0582017-07-07 12:01:44 -04005211void SPIRVProducerPass::HandleDeferredDecorations(const DataLayout &DL) {
Alan Baker202c8c72018-08-13 13:47:44 -04005212 if (getTypesNeedingArrayStride().empty() && LocalArgSpecIds.empty()) {
David Neto1a1a0582017-07-07 12:01:44 -04005213 return;
David Netoc6f3ab22018-04-06 18:02:31 -04005214 }
David Neto1a1a0582017-07-07 12:01:44 -04005215
5216 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
David Neto1a1a0582017-07-07 12:01:44 -04005217
5218 // Find an iterator pointing just past the last decoration.
5219 bool seen_decorations = false;
5220 auto DecoInsertPoint =
5221 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
5222 [&seen_decorations](SPIRVInstruction *Inst) -> bool {
5223 const bool is_decoration =
5224 Inst->getOpcode() == spv::OpDecorate ||
5225 Inst->getOpcode() == spv::OpMemberDecorate;
5226 if (is_decoration) {
5227 seen_decorations = true;
5228 return false;
5229 } else {
5230 return seen_decorations;
5231 }
5232 });
5233
David Netoc6f3ab22018-04-06 18:02:31 -04005234 // Insert ArrayStride decorations on pointer types, due to OpPtrAccessChain
5235 // instructions we generated earlier.
David Neto85082642018-03-24 06:55:20 -07005236 for (auto *type : getTypesNeedingArrayStride()) {
5237 Type *elemTy = nullptr;
5238 if (auto *ptrTy = dyn_cast<PointerType>(type)) {
5239 elemTy = ptrTy->getElementType();
alan-bakerb6b09dc2018-11-08 16:59:28 -05005240 } else if (auto *arrayTy = dyn_cast<ArrayType>(type)) {
David Neto85082642018-03-24 06:55:20 -07005241 elemTy = arrayTy->getArrayElementType();
alan-bakerb6b09dc2018-11-08 16:59:28 -05005242 } else if (auto *seqTy = dyn_cast<SequentialType>(type)) {
David Neto85082642018-03-24 06:55:20 -07005243 elemTy = seqTy->getSequentialElementType();
5244 } else {
5245 errs() << "Unhandled strided type " << *type << "\n";
5246 llvm_unreachable("Unhandled strided type");
5247 }
David Neto1a1a0582017-07-07 12:01:44 -04005248
5249 // Ops[0] = Target ID
5250 // Ops[1] = Decoration (ArrayStride)
5251 // Ops[2] = Stride number (Literal Number)
5252 SPIRVOperandList Ops;
5253
David Neto85082642018-03-24 06:55:20 -07005254 // Same as DL.getIndexedOffsetInType( elemTy, { 1 } );
Alan Bakerfcda9482018-10-02 17:09:59 -04005255 const uint32_t stride = static_cast<uint32_t>(GetTypeAllocSize(elemTy, DL));
David Neto257c3892018-04-11 13:19:45 -04005256
5257 Ops << MkId(lookupType(type)) << MkNum(spv::DecorationArrayStride)
5258 << MkNum(stride);
David Neto1a1a0582017-07-07 12:01:44 -04005259
David Neto87846742018-04-11 17:36:22 -04005260 auto *DecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
David Neto1a1a0582017-07-07 12:01:44 -04005261 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
5262 }
David Netoc6f3ab22018-04-06 18:02:31 -04005263
5264 // Emit SpecId decorations targeting the array size value.
Alan Baker202c8c72018-08-13 13:47:44 -04005265 for (auto spec_id = clspv::FirstLocalSpecId(); spec_id < max_local_spec_id_;
5266 ++spec_id) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05005267 LocalArgInfo &arg_info = LocalSpecIdInfoMap[spec_id];
David Netoc6f3ab22018-04-06 18:02:31 -04005268 SPIRVOperandList Ops;
5269 Ops << MkId(arg_info.array_size_id) << MkNum(spv::DecorationSpecId)
5270 << MkNum(arg_info.spec_id);
5271 SPIRVInstList.insert(DecoInsertPoint,
David Neto87846742018-04-11 17:36:22 -04005272 new SPIRVInstruction(spv::OpDecorate, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04005273 }
David Neto1a1a0582017-07-07 12:01:44 -04005274}
5275
David Neto22f144c2017-06-12 14:26:21 -04005276glsl::ExtInst SPIRVProducerPass::getExtInstEnum(StringRef Name) {
5277 return StringSwitch<glsl::ExtInst>(Name)
alan-bakerb39c8262019-03-08 14:03:37 -05005278 .Case("_Z3absc", glsl::ExtInst::ExtInstSAbs)
5279 .Case("_Z3absDv2_c", glsl::ExtInst::ExtInstSAbs)
5280 .Case("_Z3absDv3_c", glsl::ExtInst::ExtInstSAbs)
5281 .Case("_Z3absDv4_c", glsl::ExtInst::ExtInstSAbs)
Kévin Petit2444e9b2018-11-09 14:14:37 +00005282 .Case("_Z3abss", glsl::ExtInst::ExtInstSAbs)
5283 .Case("_Z3absDv2_s", glsl::ExtInst::ExtInstSAbs)
5284 .Case("_Z3absDv3_s", glsl::ExtInst::ExtInstSAbs)
5285 .Case("_Z3absDv4_s", glsl::ExtInst::ExtInstSAbs)
David Neto22f144c2017-06-12 14:26:21 -04005286 .Case("_Z3absi", glsl::ExtInst::ExtInstSAbs)
5287 .Case("_Z3absDv2_i", glsl::ExtInst::ExtInstSAbs)
5288 .Case("_Z3absDv3_i", glsl::ExtInst::ExtInstSAbs)
5289 .Case("_Z3absDv4_i", glsl::ExtInst::ExtInstSAbs)
Kévin Petit2444e9b2018-11-09 14:14:37 +00005290 .Case("_Z3absl", glsl::ExtInst::ExtInstSAbs)
5291 .Case("_Z3absDv2_l", glsl::ExtInst::ExtInstSAbs)
5292 .Case("_Z3absDv3_l", glsl::ExtInst::ExtInstSAbs)
5293 .Case("_Z3absDv4_l", glsl::ExtInst::ExtInstSAbs)
alan-bakerb39c8262019-03-08 14:03:37 -05005294 .Case("_Z5clampccc", glsl::ExtInst::ExtInstSClamp)
5295 .Case("_Z5clampDv2_cS_S_", glsl::ExtInst::ExtInstSClamp)
5296 .Case("_Z5clampDv3_cS_S_", glsl::ExtInst::ExtInstSClamp)
5297 .Case("_Z5clampDv4_cS_S_", glsl::ExtInst::ExtInstSClamp)
5298 .Case("_Z5clamphhh", glsl::ExtInst::ExtInstUClamp)
5299 .Case("_Z5clampDv2_hS_S_", glsl::ExtInst::ExtInstUClamp)
5300 .Case("_Z5clampDv3_hS_S_", glsl::ExtInst::ExtInstUClamp)
5301 .Case("_Z5clampDv4_hS_S_", glsl::ExtInst::ExtInstUClamp)
Kévin Petit495255d2019-03-06 13:56:48 +00005302 .Case("_Z5clampsss", glsl::ExtInst::ExtInstSClamp)
5303 .Case("_Z5clampDv2_sS_S_", glsl::ExtInst::ExtInstSClamp)
5304 .Case("_Z5clampDv3_sS_S_", glsl::ExtInst::ExtInstSClamp)
5305 .Case("_Z5clampDv4_sS_S_", glsl::ExtInst::ExtInstSClamp)
5306 .Case("_Z5clampttt", glsl::ExtInst::ExtInstUClamp)
5307 .Case("_Z5clampDv2_tS_S_", glsl::ExtInst::ExtInstUClamp)
5308 .Case("_Z5clampDv3_tS_S_", glsl::ExtInst::ExtInstUClamp)
5309 .Case("_Z5clampDv4_tS_S_", glsl::ExtInst::ExtInstUClamp)
David Neto22f144c2017-06-12 14:26:21 -04005310 .Case("_Z5clampiii", glsl::ExtInst::ExtInstSClamp)
5311 .Case("_Z5clampDv2_iS_S_", glsl::ExtInst::ExtInstSClamp)
5312 .Case("_Z5clampDv3_iS_S_", glsl::ExtInst::ExtInstSClamp)
5313 .Case("_Z5clampDv4_iS_S_", glsl::ExtInst::ExtInstSClamp)
5314 .Case("_Z5clampjjj", glsl::ExtInst::ExtInstUClamp)
5315 .Case("_Z5clampDv2_jS_S_", glsl::ExtInst::ExtInstUClamp)
5316 .Case("_Z5clampDv3_jS_S_", glsl::ExtInst::ExtInstUClamp)
5317 .Case("_Z5clampDv4_jS_S_", glsl::ExtInst::ExtInstUClamp)
Kévin Petit495255d2019-03-06 13:56:48 +00005318 .Case("_Z5clamplll", glsl::ExtInst::ExtInstSClamp)
5319 .Case("_Z5clampDv2_lS_S_", glsl::ExtInst::ExtInstSClamp)
5320 .Case("_Z5clampDv3_lS_S_", glsl::ExtInst::ExtInstSClamp)
5321 .Case("_Z5clampDv4_lS_S_", glsl::ExtInst::ExtInstSClamp)
5322 .Case("_Z5clampmmm", glsl::ExtInst::ExtInstUClamp)
5323 .Case("_Z5clampDv2_mS_S_", glsl::ExtInst::ExtInstUClamp)
5324 .Case("_Z5clampDv3_mS_S_", glsl::ExtInst::ExtInstUClamp)
5325 .Case("_Z5clampDv4_mS_S_", glsl::ExtInst::ExtInstUClamp)
David Neto22f144c2017-06-12 14:26:21 -04005326 .Case("_Z5clampfff", glsl::ExtInst::ExtInstFClamp)
5327 .Case("_Z5clampDv2_fS_S_", glsl::ExtInst::ExtInstFClamp)
5328 .Case("_Z5clampDv3_fS_S_", glsl::ExtInst::ExtInstFClamp)
5329 .Case("_Z5clampDv4_fS_S_", glsl::ExtInst::ExtInstFClamp)
alan-bakerb39c8262019-03-08 14:03:37 -05005330 .Case("_Z3maxcc", glsl::ExtInst::ExtInstSMax)
5331 .Case("_Z3maxDv2_cS_", glsl::ExtInst::ExtInstSMax)
5332 .Case("_Z3maxDv3_cS_", glsl::ExtInst::ExtInstSMax)
5333 .Case("_Z3maxDv4_cS_", glsl::ExtInst::ExtInstSMax)
5334 .Case("_Z3maxhh", glsl::ExtInst::ExtInstUMax)
5335 .Case("_Z3maxDv2_hS_", glsl::ExtInst::ExtInstUMax)
5336 .Case("_Z3maxDv3_hS_", glsl::ExtInst::ExtInstUMax)
5337 .Case("_Z3maxDv4_hS_", glsl::ExtInst::ExtInstUMax)
Kévin Petit9e1971c2018-11-09 14:17:18 +00005338 .Case("_Z3maxss", glsl::ExtInst::ExtInstSMax)
5339 .Case("_Z3maxDv2_sS_", glsl::ExtInst::ExtInstSMax)
5340 .Case("_Z3maxDv3_sS_", glsl::ExtInst::ExtInstSMax)
5341 .Case("_Z3maxDv4_sS_", glsl::ExtInst::ExtInstSMax)
5342 .Case("_Z3maxtt", glsl::ExtInst::ExtInstUMax)
5343 .Case("_Z3maxDv2_tS_", glsl::ExtInst::ExtInstUMax)
5344 .Case("_Z3maxDv3_tS_", glsl::ExtInst::ExtInstUMax)
5345 .Case("_Z3maxDv4_tS_", glsl::ExtInst::ExtInstUMax)
David Neto22f144c2017-06-12 14:26:21 -04005346 .Case("_Z3maxii", glsl::ExtInst::ExtInstSMax)
5347 .Case("_Z3maxDv2_iS_", glsl::ExtInst::ExtInstSMax)
5348 .Case("_Z3maxDv3_iS_", glsl::ExtInst::ExtInstSMax)
5349 .Case("_Z3maxDv4_iS_", glsl::ExtInst::ExtInstSMax)
5350 .Case("_Z3maxjj", glsl::ExtInst::ExtInstUMax)
5351 .Case("_Z3maxDv2_jS_", glsl::ExtInst::ExtInstUMax)
5352 .Case("_Z3maxDv3_jS_", glsl::ExtInst::ExtInstUMax)
5353 .Case("_Z3maxDv4_jS_", glsl::ExtInst::ExtInstUMax)
Kévin Petit9e1971c2018-11-09 14:17:18 +00005354 .Case("_Z3maxll", glsl::ExtInst::ExtInstSMax)
5355 .Case("_Z3maxDv2_lS_", glsl::ExtInst::ExtInstSMax)
5356 .Case("_Z3maxDv3_lS_", glsl::ExtInst::ExtInstSMax)
5357 .Case("_Z3maxDv4_lS_", glsl::ExtInst::ExtInstSMax)
5358 .Case("_Z3maxmm", glsl::ExtInst::ExtInstUMax)
5359 .Case("_Z3maxDv2_mS_", glsl::ExtInst::ExtInstUMax)
5360 .Case("_Z3maxDv3_mS_", glsl::ExtInst::ExtInstUMax)
5361 .Case("_Z3maxDv4_mS_", glsl::ExtInst::ExtInstUMax)
David Neto22f144c2017-06-12 14:26:21 -04005362 .Case("_Z3maxff", glsl::ExtInst::ExtInstFMax)
5363 .Case("_Z3maxDv2_fS_", glsl::ExtInst::ExtInstFMax)
5364 .Case("_Z3maxDv3_fS_", glsl::ExtInst::ExtInstFMax)
5365 .Case("_Z3maxDv4_fS_", glsl::ExtInst::ExtInstFMax)
5366 .StartsWith("_Z4fmax", glsl::ExtInst::ExtInstFMax)
alan-bakerb39c8262019-03-08 14:03:37 -05005367 .Case("_Z3mincc", glsl::ExtInst::ExtInstSMin)
5368 .Case("_Z3minDv2_cS_", glsl::ExtInst::ExtInstSMin)
5369 .Case("_Z3minDv3_cS_", glsl::ExtInst::ExtInstSMin)
5370 .Case("_Z3minDv4_cS_", glsl::ExtInst::ExtInstSMin)
5371 .Case("_Z3minhh", glsl::ExtInst::ExtInstUMin)
5372 .Case("_Z3minDv2_hS_", glsl::ExtInst::ExtInstUMin)
5373 .Case("_Z3minDv3_hS_", glsl::ExtInst::ExtInstUMin)
5374 .Case("_Z3minDv4_hS_", glsl::ExtInst::ExtInstUMin)
Kévin Petit9e1971c2018-11-09 14:17:18 +00005375 .Case("_Z3minss", glsl::ExtInst::ExtInstSMin)
5376 .Case("_Z3minDv2_sS_", glsl::ExtInst::ExtInstSMin)
5377 .Case("_Z3minDv3_sS_", glsl::ExtInst::ExtInstSMin)
5378 .Case("_Z3minDv4_sS_", glsl::ExtInst::ExtInstSMin)
5379 .Case("_Z3mintt", glsl::ExtInst::ExtInstUMin)
5380 .Case("_Z3minDv2_tS_", glsl::ExtInst::ExtInstUMin)
5381 .Case("_Z3minDv3_tS_", glsl::ExtInst::ExtInstUMin)
5382 .Case("_Z3minDv4_tS_", glsl::ExtInst::ExtInstUMin)
David Neto22f144c2017-06-12 14:26:21 -04005383 .Case("_Z3minii", glsl::ExtInst::ExtInstSMin)
5384 .Case("_Z3minDv2_iS_", glsl::ExtInst::ExtInstSMin)
5385 .Case("_Z3minDv3_iS_", glsl::ExtInst::ExtInstSMin)
5386 .Case("_Z3minDv4_iS_", glsl::ExtInst::ExtInstSMin)
5387 .Case("_Z3minjj", glsl::ExtInst::ExtInstUMin)
5388 .Case("_Z3minDv2_jS_", glsl::ExtInst::ExtInstUMin)
5389 .Case("_Z3minDv3_jS_", glsl::ExtInst::ExtInstUMin)
5390 .Case("_Z3minDv4_jS_", glsl::ExtInst::ExtInstUMin)
Kévin Petit9e1971c2018-11-09 14:17:18 +00005391 .Case("_Z3minll", glsl::ExtInst::ExtInstSMin)
5392 .Case("_Z3minDv2_lS_", glsl::ExtInst::ExtInstSMin)
5393 .Case("_Z3minDv3_lS_", glsl::ExtInst::ExtInstSMin)
5394 .Case("_Z3minDv4_lS_", glsl::ExtInst::ExtInstSMin)
5395 .Case("_Z3minmm", glsl::ExtInst::ExtInstUMin)
5396 .Case("_Z3minDv2_mS_", glsl::ExtInst::ExtInstUMin)
5397 .Case("_Z3minDv3_mS_", glsl::ExtInst::ExtInstUMin)
5398 .Case("_Z3minDv4_mS_", glsl::ExtInst::ExtInstUMin)
David Neto22f144c2017-06-12 14:26:21 -04005399 .Case("_Z3minff", glsl::ExtInst::ExtInstFMin)
5400 .Case("_Z3minDv2_fS_", glsl::ExtInst::ExtInstFMin)
5401 .Case("_Z3minDv3_fS_", glsl::ExtInst::ExtInstFMin)
5402 .Case("_Z3minDv4_fS_", glsl::ExtInst::ExtInstFMin)
5403 .StartsWith("_Z4fmin", glsl::ExtInst::ExtInstFMin)
5404 .StartsWith("_Z7degrees", glsl::ExtInst::ExtInstDegrees)
5405 .StartsWith("_Z7radians", glsl::ExtInst::ExtInstRadians)
5406 .StartsWith("_Z3mix", glsl::ExtInst::ExtInstFMix)
5407 .StartsWith("_Z4acos", glsl::ExtInst::ExtInstAcos)
5408 .StartsWith("_Z5acosh", glsl::ExtInst::ExtInstAcosh)
5409 .StartsWith("_Z4asin", glsl::ExtInst::ExtInstAsin)
5410 .StartsWith("_Z5asinh", glsl::ExtInst::ExtInstAsinh)
5411 .StartsWith("_Z4atan", glsl::ExtInst::ExtInstAtan)
5412 .StartsWith("_Z5atan2", glsl::ExtInst::ExtInstAtan2)
5413 .StartsWith("_Z5atanh", glsl::ExtInst::ExtInstAtanh)
5414 .StartsWith("_Z4ceil", glsl::ExtInst::ExtInstCeil)
5415 .StartsWith("_Z3sin", glsl::ExtInst::ExtInstSin)
5416 .StartsWith("_Z4sinh", glsl::ExtInst::ExtInstSinh)
5417 .StartsWith("_Z8half_sin", glsl::ExtInst::ExtInstSin)
5418 .StartsWith("_Z10native_sin", glsl::ExtInst::ExtInstSin)
5419 .StartsWith("_Z3cos", glsl::ExtInst::ExtInstCos)
5420 .StartsWith("_Z4cosh", glsl::ExtInst::ExtInstCosh)
5421 .StartsWith("_Z8half_cos", glsl::ExtInst::ExtInstCos)
5422 .StartsWith("_Z10native_cos", glsl::ExtInst::ExtInstCos)
5423 .StartsWith("_Z3tan", glsl::ExtInst::ExtInstTan)
5424 .StartsWith("_Z4tanh", glsl::ExtInst::ExtInstTanh)
5425 .StartsWith("_Z8half_tan", glsl::ExtInst::ExtInstTan)
5426 .StartsWith("_Z10native_tan", glsl::ExtInst::ExtInstTan)
5427 .StartsWith("_Z3exp", glsl::ExtInst::ExtInstExp)
5428 .StartsWith("_Z8half_exp", glsl::ExtInst::ExtInstExp)
5429 .StartsWith("_Z10native_exp", glsl::ExtInst::ExtInstExp)
5430 .StartsWith("_Z4exp2", glsl::ExtInst::ExtInstExp2)
5431 .StartsWith("_Z9half_exp2", glsl::ExtInst::ExtInstExp2)
5432 .StartsWith("_Z11native_exp2", glsl::ExtInst::ExtInstExp2)
5433 .StartsWith("_Z3log", glsl::ExtInst::ExtInstLog)
5434 .StartsWith("_Z8half_log", glsl::ExtInst::ExtInstLog)
5435 .StartsWith("_Z10native_log", glsl::ExtInst::ExtInstLog)
5436 .StartsWith("_Z4log2", glsl::ExtInst::ExtInstLog2)
5437 .StartsWith("_Z9half_log2", glsl::ExtInst::ExtInstLog2)
5438 .StartsWith("_Z11native_log2", glsl::ExtInst::ExtInstLog2)
5439 .StartsWith("_Z4fabs", glsl::ExtInst::ExtInstFAbs)
kpet3458e942018-10-03 14:35:21 +01005440 .StartsWith("_Z3fma", glsl::ExtInst::ExtInstFma)
David Neto22f144c2017-06-12 14:26:21 -04005441 .StartsWith("_Z5floor", glsl::ExtInst::ExtInstFloor)
5442 .StartsWith("_Z5ldexp", glsl::ExtInst::ExtInstLdexp)
5443 .StartsWith("_Z3pow", glsl::ExtInst::ExtInstPow)
5444 .StartsWith("_Z4powr", glsl::ExtInst::ExtInstPow)
5445 .StartsWith("_Z9half_powr", glsl::ExtInst::ExtInstPow)
5446 .StartsWith("_Z11native_powr", glsl::ExtInst::ExtInstPow)
5447 .StartsWith("_Z5round", glsl::ExtInst::ExtInstRound)
5448 .StartsWith("_Z4sqrt", glsl::ExtInst::ExtInstSqrt)
5449 .StartsWith("_Z9half_sqrt", glsl::ExtInst::ExtInstSqrt)
5450 .StartsWith("_Z11native_sqrt", glsl::ExtInst::ExtInstSqrt)
5451 .StartsWith("_Z5rsqrt", glsl::ExtInst::ExtInstInverseSqrt)
5452 .StartsWith("_Z10half_rsqrt", glsl::ExtInst::ExtInstInverseSqrt)
5453 .StartsWith("_Z12native_rsqrt", glsl::ExtInst::ExtInstInverseSqrt)
5454 .StartsWith("_Z5trunc", glsl::ExtInst::ExtInstTrunc)
5455 .StartsWith("_Z5frexp", glsl::ExtInst::ExtInstFrexp)
5456 .StartsWith("_Z4sign", glsl::ExtInst::ExtInstFSign)
5457 .StartsWith("_Z6length", glsl::ExtInst::ExtInstLength)
Kévin Petit7d09cec2018-09-22 15:43:38 +01005458 .StartsWith("_Z11fast_length", glsl::ExtInst::ExtInstLength)
David Neto22f144c2017-06-12 14:26:21 -04005459 .StartsWith("_Z8distance", glsl::ExtInst::ExtInstDistance)
Kévin Petit7d09cec2018-09-22 15:43:38 +01005460 .StartsWith("_Z13fast_distance", glsl::ExtInst::ExtInstDistance)
David Netoe9a03512017-10-16 10:08:27 -04005461 .StartsWith("_Z4step", glsl::ExtInst::ExtInstStep)
kpet6fd2a262018-10-03 14:48:01 +01005462 .StartsWith("_Z10smoothstep", glsl::ExtInst::ExtInstSmoothStep)
David Neto22f144c2017-06-12 14:26:21 -04005463 .Case("_Z5crossDv3_fS_", glsl::ExtInst::ExtInstCross)
5464 .StartsWith("_Z9normalize", glsl::ExtInst::ExtInstNormalize)
Kévin Petit7d09cec2018-09-22 15:43:38 +01005465 .StartsWith("_Z14fast_normalize", glsl::ExtInst::ExtInstNormalize)
David Neto22f144c2017-06-12 14:26:21 -04005466 .StartsWith("llvm.fmuladd.", glsl::ExtInst::ExtInstFma)
5467 .Case("spirv.unpack.v2f16", glsl::ExtInst::ExtInstUnpackHalf2x16)
5468 .Case("spirv.pack.v2f16", glsl::ExtInst::ExtInstPackHalf2x16)
David Neto62653202017-10-16 19:05:18 -04005469 .Case("clspv.fract.f", glsl::ExtInst::ExtInstFract)
5470 .Case("clspv.fract.v2f", glsl::ExtInst::ExtInstFract)
5471 .Case("clspv.fract.v3f", glsl::ExtInst::ExtInstFract)
5472 .Case("clspv.fract.v4f", glsl::ExtInst::ExtInstFract)
David Neto3fbb4072017-10-16 11:28:14 -04005473 .Default(kGlslExtInstBad);
5474}
5475
5476glsl::ExtInst SPIRVProducerPass::getIndirectExtInstEnum(StringRef Name) {
5477 // Check indirect cases.
5478 return StringSwitch<glsl::ExtInst>(Name)
5479 .StartsWith("_Z3clz", glsl::ExtInst::ExtInstFindUMsb)
5480 // Use exact match on float arg because these need a multiply
5481 // of a constant of the right floating point type.
5482 .Case("_Z6acospif", glsl::ExtInst::ExtInstAcos)
5483 .Case("_Z6acospiDv2_f", glsl::ExtInst::ExtInstAcos)
5484 .Case("_Z6acospiDv3_f", glsl::ExtInst::ExtInstAcos)
5485 .Case("_Z6acospiDv4_f", glsl::ExtInst::ExtInstAcos)
5486 .Case("_Z6asinpif", glsl::ExtInst::ExtInstAsin)
5487 .Case("_Z6asinpiDv2_f", glsl::ExtInst::ExtInstAsin)
5488 .Case("_Z6asinpiDv3_f", glsl::ExtInst::ExtInstAsin)
5489 .Case("_Z6asinpiDv4_f", glsl::ExtInst::ExtInstAsin)
Kévin Petiteb9f90a2018-09-29 12:29:34 +01005490 .Case("_Z6atanpif", glsl::ExtInst::ExtInstAtan)
5491 .Case("_Z6atanpiDv2_f", glsl::ExtInst::ExtInstAtan)
5492 .Case("_Z6atanpiDv3_f", glsl::ExtInst::ExtInstAtan)
5493 .Case("_Z6atanpiDv4_f", glsl::ExtInst::ExtInstAtan)
David Neto3fbb4072017-10-16 11:28:14 -04005494 .Case("_Z7atan2piff", glsl::ExtInst::ExtInstAtan2)
5495 .Case("_Z7atan2piDv2_fS_", glsl::ExtInst::ExtInstAtan2)
5496 .Case("_Z7atan2piDv3_fS_", glsl::ExtInst::ExtInstAtan2)
5497 .Case("_Z7atan2piDv4_fS_", glsl::ExtInst::ExtInstAtan2)
5498 .Default(kGlslExtInstBad);
5499}
5500
alan-bakerb6b09dc2018-11-08 16:59:28 -05005501glsl::ExtInst
5502SPIRVProducerPass::getDirectOrIndirectExtInstEnum(StringRef Name) {
David Neto3fbb4072017-10-16 11:28:14 -04005503 auto direct = getExtInstEnum(Name);
5504 if (direct != kGlslExtInstBad)
5505 return direct;
5506 return getIndirectExtInstEnum(Name);
David Neto22f144c2017-06-12 14:26:21 -04005507}
5508
David Neto22f144c2017-06-12 14:26:21 -04005509void SPIRVProducerPass::WriteOneWord(uint32_t Word) {
David Neto0676e6f2017-07-11 18:47:44 -04005510 binaryOut->write(reinterpret_cast<const char *>(&Word), sizeof(uint32_t));
David Neto22f144c2017-06-12 14:26:21 -04005511}
5512
5513void SPIRVProducerPass::WriteResultID(SPIRVInstruction *Inst) {
5514 WriteOneWord(Inst->getResultID());
5515}
5516
5517void SPIRVProducerPass::WriteWordCountAndOpcode(SPIRVInstruction *Inst) {
5518 // High 16 bit : Word Count
5519 // Low 16 bit : Opcode
5520 uint32_t Word = Inst->getOpcode();
David Netoee2660d2018-06-28 16:31:29 -04005521 const uint32_t count = Inst->getWordCount();
5522 if (count > 65535) {
5523 errs() << "Word count limit of 65535 exceeded: " << count << "\n";
5524 llvm_unreachable("Word count too high");
5525 }
David Neto22f144c2017-06-12 14:26:21 -04005526 Word |= Inst->getWordCount() << 16;
5527 WriteOneWord(Word);
5528}
5529
James Price11010dc2019-12-19 13:53:09 -05005530void SPIRVProducerPass::WriteOperand(const SPIRVOperand *Op) {
David Neto22f144c2017-06-12 14:26:21 -04005531 SPIRVOperandType OpTy = Op->getType();
5532 switch (OpTy) {
5533 default: {
5534 llvm_unreachable("Unsupported SPIRV Operand Type???");
5535 break;
5536 }
5537 case SPIRVOperandType::NUMBERID: {
5538 WriteOneWord(Op->getNumID());
5539 break;
5540 }
5541 case SPIRVOperandType::LITERAL_STRING: {
5542 std::string Str = Op->getLiteralStr();
5543 const char *Data = Str.c_str();
5544 size_t WordSize = Str.size() / 4;
5545 for (unsigned Idx = 0; Idx < WordSize; Idx++) {
5546 WriteOneWord(*reinterpret_cast<const uint32_t *>(&Data[4 * Idx]));
5547 }
5548
5549 uint32_t Remainder = Str.size() % 4;
5550 uint32_t LastWord = 0;
5551 if (Remainder) {
5552 for (unsigned Idx = 0; Idx < Remainder; Idx++) {
5553 LastWord |= Data[4 * WordSize + Idx] << 8 * Idx;
5554 }
5555 }
5556
5557 WriteOneWord(LastWord);
5558 break;
5559 }
5560 case SPIRVOperandType::LITERAL_INTEGER:
5561 case SPIRVOperandType::LITERAL_FLOAT: {
5562 auto LiteralNum = Op->getLiteralNum();
5563 // TODO: Handle LiteranNum carefully.
5564 for (auto Word : LiteralNum) {
5565 WriteOneWord(Word);
5566 }
5567 break;
5568 }
5569 }
5570}
5571
5572void SPIRVProducerPass::WriteSPIRVBinary() {
5573 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
5574
5575 for (auto Inst : SPIRVInstList) {
David Netoc6f3ab22018-04-06 18:02:31 -04005576 SPIRVOperandList Ops{Inst->getOperands()};
David Neto22f144c2017-06-12 14:26:21 -04005577 spv::Op Opcode = static_cast<spv::Op>(Inst->getOpcode());
5578
5579 switch (Opcode) {
5580 default: {
David Neto5c22a252018-03-15 16:07:41 -04005581 errs() << "Unsupported SPIR-V instruction opcode " << int(Opcode) << "\n";
David Neto22f144c2017-06-12 14:26:21 -04005582 llvm_unreachable("Unsupported SPIRV instruction");
5583 break;
5584 }
5585 case spv::OpCapability:
5586 case spv::OpExtension:
5587 case spv::OpMemoryModel:
5588 case spv::OpEntryPoint:
5589 case spv::OpExecutionMode:
5590 case spv::OpSource:
5591 case spv::OpDecorate:
5592 case spv::OpMemberDecorate:
5593 case spv::OpBranch:
5594 case spv::OpBranchConditional:
5595 case spv::OpSelectionMerge:
5596 case spv::OpLoopMerge:
5597 case spv::OpStore:
5598 case spv::OpImageWrite:
5599 case spv::OpReturnValue:
5600 case spv::OpControlBarrier:
5601 case spv::OpMemoryBarrier:
5602 case spv::OpReturn:
5603 case spv::OpFunctionEnd:
5604 case spv::OpCopyMemory: {
5605 WriteWordCountAndOpcode(Inst);
5606 for (uint32_t i = 0; i < Ops.size(); i++) {
5607 WriteOperand(Ops[i]);
5608 }
5609 break;
5610 }
5611 case spv::OpTypeBool:
5612 case spv::OpTypeVoid:
5613 case spv::OpTypeSampler:
5614 case spv::OpLabel:
5615 case spv::OpExtInstImport:
5616 case spv::OpTypePointer:
5617 case spv::OpTypeRuntimeArray:
5618 case spv::OpTypeStruct:
5619 case spv::OpTypeImage:
5620 case spv::OpTypeSampledImage:
5621 case spv::OpTypeInt:
5622 case spv::OpTypeFloat:
5623 case spv::OpTypeArray:
5624 case spv::OpTypeVector:
5625 case spv::OpTypeFunction: {
5626 WriteWordCountAndOpcode(Inst);
5627 WriteResultID(Inst);
5628 for (uint32_t i = 0; i < Ops.size(); i++) {
5629 WriteOperand(Ops[i]);
5630 }
5631 break;
5632 }
5633 case spv::OpFunction:
5634 case spv::OpFunctionParameter:
5635 case spv::OpAccessChain:
5636 case spv::OpPtrAccessChain:
5637 case spv::OpInBoundsAccessChain:
5638 case spv::OpUConvert:
5639 case spv::OpSConvert:
5640 case spv::OpConvertFToU:
5641 case spv::OpConvertFToS:
5642 case spv::OpConvertUToF:
5643 case spv::OpConvertSToF:
5644 case spv::OpFConvert:
5645 case spv::OpConvertPtrToU:
5646 case spv::OpConvertUToPtr:
5647 case spv::OpBitcast:
alan-bakerc9c55ae2019-12-02 16:01:27 -05005648 case spv::OpFNegate:
David Neto22f144c2017-06-12 14:26:21 -04005649 case spv::OpIAdd:
5650 case spv::OpFAdd:
5651 case spv::OpISub:
5652 case spv::OpFSub:
5653 case spv::OpIMul:
5654 case spv::OpFMul:
5655 case spv::OpUDiv:
5656 case spv::OpSDiv:
5657 case spv::OpFDiv:
5658 case spv::OpUMod:
5659 case spv::OpSRem:
5660 case spv::OpFRem:
Kévin Petit8a560882019-03-21 15:24:34 +00005661 case spv::OpUMulExtended:
5662 case spv::OpSMulExtended:
David Neto22f144c2017-06-12 14:26:21 -04005663 case spv::OpBitwiseOr:
5664 case spv::OpBitwiseXor:
5665 case spv::OpBitwiseAnd:
David Netoa394f392017-08-26 20:45:29 -04005666 case spv::OpNot:
David Neto22f144c2017-06-12 14:26:21 -04005667 case spv::OpShiftLeftLogical:
5668 case spv::OpShiftRightLogical:
5669 case spv::OpShiftRightArithmetic:
5670 case spv::OpBitCount:
David Netoab03f432017-11-03 17:00:44 -04005671 case spv::OpCompositeConstruct:
David Neto22f144c2017-06-12 14:26:21 -04005672 case spv::OpCompositeExtract:
5673 case spv::OpVectorExtractDynamic:
5674 case spv::OpCompositeInsert:
David Neto0a2f98d2017-09-15 19:38:40 -04005675 case spv::OpCopyObject:
David Neto22f144c2017-06-12 14:26:21 -04005676 case spv::OpVectorInsertDynamic:
5677 case spv::OpVectorShuffle:
5678 case spv::OpIEqual:
5679 case spv::OpINotEqual:
5680 case spv::OpUGreaterThan:
5681 case spv::OpUGreaterThanEqual:
5682 case spv::OpULessThan:
5683 case spv::OpULessThanEqual:
5684 case spv::OpSGreaterThan:
5685 case spv::OpSGreaterThanEqual:
5686 case spv::OpSLessThan:
5687 case spv::OpSLessThanEqual:
5688 case spv::OpFOrdEqual:
5689 case spv::OpFOrdGreaterThan:
5690 case spv::OpFOrdGreaterThanEqual:
5691 case spv::OpFOrdLessThan:
5692 case spv::OpFOrdLessThanEqual:
5693 case spv::OpFOrdNotEqual:
5694 case spv::OpFUnordEqual:
5695 case spv::OpFUnordGreaterThan:
5696 case spv::OpFUnordGreaterThanEqual:
5697 case spv::OpFUnordLessThan:
5698 case spv::OpFUnordLessThanEqual:
5699 case spv::OpFUnordNotEqual:
5700 case spv::OpExtInst:
5701 case spv::OpIsInf:
5702 case spv::OpIsNan:
5703 case spv::OpAny:
5704 case spv::OpAll:
5705 case spv::OpUndef:
5706 case spv::OpConstantNull:
5707 case spv::OpLogicalOr:
5708 case spv::OpLogicalAnd:
5709 case spv::OpLogicalNot:
5710 case spv::OpLogicalNotEqual:
5711 case spv::OpConstantComposite:
5712 case spv::OpSpecConstantComposite:
5713 case spv::OpConstantTrue:
5714 case spv::OpConstantFalse:
5715 case spv::OpConstant:
5716 case spv::OpSpecConstant:
5717 case spv::OpVariable:
5718 case spv::OpFunctionCall:
5719 case spv::OpSampledImage:
5720 case spv::OpImageSampleExplicitLod:
David Neto5c22a252018-03-15 16:07:41 -04005721 case spv::OpImageQuerySize:
alan-bakerce179f12019-12-06 19:02:22 -05005722 case spv::OpImageQuerySizeLod:
David Neto22f144c2017-06-12 14:26:21 -04005723 case spv::OpSelect:
5724 case spv::OpPhi:
5725 case spv::OpLoad:
5726 case spv::OpAtomicIAdd:
5727 case spv::OpAtomicISub:
5728 case spv::OpAtomicExchange:
5729 case spv::OpAtomicIIncrement:
5730 case spv::OpAtomicIDecrement:
5731 case spv::OpAtomicCompareExchange:
5732 case spv::OpAtomicUMin:
5733 case spv::OpAtomicSMin:
5734 case spv::OpAtomicUMax:
5735 case spv::OpAtomicSMax:
5736 case spv::OpAtomicAnd:
5737 case spv::OpAtomicOr:
5738 case spv::OpAtomicXor:
5739 case spv::OpDot: {
5740 WriteWordCountAndOpcode(Inst);
5741 WriteOperand(Ops[0]);
5742 WriteResultID(Inst);
5743 for (uint32_t i = 1; i < Ops.size(); i++) {
5744 WriteOperand(Ops[i]);
5745 }
5746 break;
5747 }
5748 }
5749 }
5750}
Alan Baker9bf93fb2018-08-28 16:59:26 -04005751
alan-bakerb6b09dc2018-11-08 16:59:28 -05005752bool SPIRVProducerPass::IsTypeNullable(const Type *type) const {
Alan Baker9bf93fb2018-08-28 16:59:26 -04005753 switch (type->getTypeID()) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05005754 case Type::HalfTyID:
5755 case Type::FloatTyID:
5756 case Type::DoubleTyID:
5757 case Type::IntegerTyID:
5758 case Type::VectorTyID:
5759 return true;
5760 case Type::PointerTyID: {
5761 const PointerType *pointer_type = cast<PointerType>(type);
5762 if (pointer_type->getPointerAddressSpace() !=
5763 AddressSpace::UniformConstant) {
5764 auto pointee_type = pointer_type->getPointerElementType();
5765 if (pointee_type->isStructTy() &&
5766 cast<StructType>(pointee_type)->isOpaque()) {
5767 // Images and samplers are not nullable.
5768 return false;
Alan Baker9bf93fb2018-08-28 16:59:26 -04005769 }
Alan Baker9bf93fb2018-08-28 16:59:26 -04005770 }
alan-bakerb6b09dc2018-11-08 16:59:28 -05005771 return true;
5772 }
5773 case Type::ArrayTyID:
5774 return IsTypeNullable(cast<CompositeType>(type)->getTypeAtIndex(0u));
5775 case Type::StructTyID: {
5776 const StructType *struct_type = cast<StructType>(type);
5777 // Images and samplers are not nullable.
5778 if (struct_type->isOpaque())
Alan Baker9bf93fb2018-08-28 16:59:26 -04005779 return false;
alan-bakerb6b09dc2018-11-08 16:59:28 -05005780 for (const auto element : struct_type->elements()) {
5781 if (!IsTypeNullable(element))
5782 return false;
5783 }
5784 return true;
5785 }
5786 default:
5787 return false;
Alan Baker9bf93fb2018-08-28 16:59:26 -04005788 }
5789}
Alan Bakerfcda9482018-10-02 17:09:59 -04005790
5791void SPIRVProducerPass::PopulateUBOTypeMaps(Module &module) {
5792 if (auto *offsets_md =
5793 module.getNamedMetadata(clspv::RemappedTypeOffsetMetadataName())) {
5794 // Metdata is stored as key-value pair operands. The first element of each
5795 // operand is the type and the second is a vector of offsets.
5796 for (const auto *operand : offsets_md->operands()) {
5797 const auto *pair = cast<MDTuple>(operand);
5798 auto *type =
5799 cast<ConstantAsMetadata>(pair->getOperand(0))->getValue()->getType();
5800 const auto *offset_vector = cast<MDTuple>(pair->getOperand(1));
5801 std::vector<uint32_t> offsets;
5802 for (const Metadata *offset_md : offset_vector->operands()) {
5803 const auto *constant_md = cast<ConstantAsMetadata>(offset_md);
alan-bakerb6b09dc2018-11-08 16:59:28 -05005804 offsets.push_back(static_cast<uint32_t>(
5805 cast<ConstantInt>(constant_md->getValue())->getZExtValue()));
Alan Bakerfcda9482018-10-02 17:09:59 -04005806 }
5807 RemappedUBOTypeOffsets.insert(std::make_pair(type, offsets));
5808 }
5809 }
5810
5811 if (auto *sizes_md =
5812 module.getNamedMetadata(clspv::RemappedTypeSizesMetadataName())) {
5813 // Metadata is stored as key-value pair operands. The first element of each
5814 // operand is the type and the second is a triple of sizes: type size in
5815 // bits, store size and alloc size.
5816 for (const auto *operand : sizes_md->operands()) {
5817 const auto *pair = cast<MDTuple>(operand);
5818 auto *type =
5819 cast<ConstantAsMetadata>(pair->getOperand(0))->getValue()->getType();
5820 const auto *size_triple = cast<MDTuple>(pair->getOperand(1));
5821 uint64_t type_size_in_bits =
5822 cast<ConstantInt>(
5823 cast<ConstantAsMetadata>(size_triple->getOperand(0))->getValue())
5824 ->getZExtValue();
5825 uint64_t type_store_size =
5826 cast<ConstantInt>(
5827 cast<ConstantAsMetadata>(size_triple->getOperand(1))->getValue())
5828 ->getZExtValue();
5829 uint64_t type_alloc_size =
5830 cast<ConstantInt>(
5831 cast<ConstantAsMetadata>(size_triple->getOperand(2))->getValue())
5832 ->getZExtValue();
5833 RemappedUBOTypeSizes.insert(std::make_pair(
5834 type, std::make_tuple(type_size_in_bits, type_store_size,
5835 type_alloc_size)));
5836 }
5837 }
5838}
5839
5840uint64_t SPIRVProducerPass::GetTypeSizeInBits(Type *type,
5841 const DataLayout &DL) {
5842 auto iter = RemappedUBOTypeSizes.find(type);
5843 if (iter != RemappedUBOTypeSizes.end()) {
5844 return std::get<0>(iter->second);
5845 }
5846
5847 return DL.getTypeSizeInBits(type);
5848}
5849
5850uint64_t SPIRVProducerPass::GetTypeStoreSize(Type *type, const DataLayout &DL) {
5851 auto iter = RemappedUBOTypeSizes.find(type);
5852 if (iter != RemappedUBOTypeSizes.end()) {
5853 return std::get<1>(iter->second);
5854 }
5855
5856 return DL.getTypeStoreSize(type);
5857}
5858
5859uint64_t SPIRVProducerPass::GetTypeAllocSize(Type *type, const DataLayout &DL) {
5860 auto iter = RemappedUBOTypeSizes.find(type);
5861 if (iter != RemappedUBOTypeSizes.end()) {
5862 return std::get<2>(iter->second);
5863 }
5864
5865 return DL.getTypeAllocSize(type);
5866}
alan-baker5b86ed72019-02-15 08:26:50 -05005867
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04005868void SPIRVProducerPass::setVariablePointersCapabilities(
5869 unsigned address_space) {
alan-baker5b86ed72019-02-15 08:26:50 -05005870 if (GetStorageClass(address_space) == spv::StorageClassStorageBuffer) {
5871 setVariablePointersStorageBuffer(true);
5872 } else {
5873 setVariablePointers(true);
5874 }
5875}
5876
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04005877Value *SPIRVProducerPass::GetBasePointer(Value *v) {
alan-baker5b86ed72019-02-15 08:26:50 -05005878 if (auto *gep = dyn_cast<GetElementPtrInst>(v)) {
5879 return GetBasePointer(gep->getPointerOperand());
5880 }
5881
5882 // Conservatively return |v|.
5883 return v;
5884}
5885
5886bool SPIRVProducerPass::sameResource(Value *lhs, Value *rhs) const {
5887 if (auto *lhs_call = dyn_cast<CallInst>(lhs)) {
5888 if (auto *rhs_call = dyn_cast<CallInst>(rhs)) {
5889 if (lhs_call->getCalledFunction()->getName().startswith(
5890 clspv::ResourceAccessorFunction()) &&
5891 rhs_call->getCalledFunction()->getName().startswith(
5892 clspv::ResourceAccessorFunction())) {
5893 // For resource accessors, match descriptor set and binding.
5894 if (lhs_call->getOperand(0) == rhs_call->getOperand(0) &&
5895 lhs_call->getOperand(1) == rhs_call->getOperand(1))
5896 return true;
5897 } else if (lhs_call->getCalledFunction()->getName().startswith(
5898 clspv::WorkgroupAccessorFunction()) &&
5899 rhs_call->getCalledFunction()->getName().startswith(
5900 clspv::WorkgroupAccessorFunction())) {
5901 // For workgroup resources, match spec id.
5902 if (lhs_call->getOperand(0) == rhs_call->getOperand(0))
5903 return true;
5904 }
5905 }
5906 }
5907
5908 return false;
5909}
5910
5911bool SPIRVProducerPass::selectFromSameObject(Instruction *inst) {
5912 assert(inst->getType()->isPointerTy());
5913 assert(GetStorageClass(inst->getType()->getPointerAddressSpace()) ==
5914 spv::StorageClassStorageBuffer);
5915 const bool hack_undef = clspv::Option::HackUndef();
5916 if (auto *select = dyn_cast<SelectInst>(inst)) {
5917 auto *true_base = GetBasePointer(select->getTrueValue());
5918 auto *false_base = GetBasePointer(select->getFalseValue());
5919
5920 if (true_base == false_base)
5921 return true;
5922
5923 // If either the true or false operand is a null, then we satisfy the same
5924 // object constraint.
5925 if (auto *true_cst = dyn_cast<Constant>(true_base)) {
5926 if (true_cst->isNullValue() || (hack_undef && isa<UndefValue>(true_base)))
5927 return true;
5928 }
5929
5930 if (auto *false_cst = dyn_cast<Constant>(false_base)) {
5931 if (false_cst->isNullValue() ||
5932 (hack_undef && isa<UndefValue>(false_base)))
5933 return true;
5934 }
5935
5936 if (sameResource(true_base, false_base))
5937 return true;
5938 } else if (auto *phi = dyn_cast<PHINode>(inst)) {
5939 Value *value = nullptr;
5940 bool ok = true;
5941 for (unsigned i = 0; ok && i != phi->getNumIncomingValues(); ++i) {
5942 auto *base = GetBasePointer(phi->getIncomingValue(i));
5943 // Null values satisfy the constraint of selecting of selecting from the
5944 // same object.
5945 if (!value) {
5946 if (auto *cst = dyn_cast<Constant>(base)) {
5947 if (!cst->isNullValue() && !(hack_undef && isa<UndefValue>(base)))
5948 value = base;
5949 } else {
5950 value = base;
5951 }
5952 } else if (base != value) {
5953 if (auto *base_cst = dyn_cast<Constant>(base)) {
5954 if (base_cst->isNullValue() || (hack_undef && isa<UndefValue>(base)))
5955 continue;
5956 }
5957
5958 if (sameResource(value, base))
5959 continue;
5960
5961 // Values don't represent the same base.
5962 ok = false;
5963 }
5964 }
5965
5966 return ok;
5967 }
5968
5969 // Conservatively return false.
5970 return false;
5971}
alan-bakere9308012019-03-15 10:25:13 -04005972
5973bool SPIRVProducerPass::CalledWithCoherentResource(Argument &Arg) {
5974 if (!Arg.getType()->isPointerTy() ||
5975 Arg.getType()->getPointerAddressSpace() != clspv::AddressSpace::Global) {
5976 // Only SSBOs need to be annotated as coherent.
5977 return false;
5978 }
5979
5980 DenseSet<Value *> visited;
5981 std::vector<Value *> stack;
5982 for (auto *U : Arg.getParent()->users()) {
5983 if (auto *call = dyn_cast<CallInst>(U)) {
5984 stack.push_back(call->getOperand(Arg.getArgNo()));
5985 }
5986 }
5987
5988 while (!stack.empty()) {
5989 Value *v = stack.back();
5990 stack.pop_back();
5991
5992 if (!visited.insert(v).second)
5993 continue;
5994
5995 auto *resource_call = dyn_cast<CallInst>(v);
5996 if (resource_call &&
5997 resource_call->getCalledFunction()->getName().startswith(
5998 clspv::ResourceAccessorFunction())) {
5999 // If this is a resource accessor function, check if the coherent operand
6000 // is set.
6001 const auto coherent =
6002 unsigned(dyn_cast<ConstantInt>(resource_call->getArgOperand(5))
6003 ->getZExtValue());
6004 if (coherent == 1)
6005 return true;
6006 } else if (auto *arg = dyn_cast<Argument>(v)) {
6007 // If this is a function argument, trace through its callers.
alan-bakere98f3f92019-04-08 15:06:36 -04006008 for (auto U : arg->getParent()->users()) {
alan-bakere9308012019-03-15 10:25:13 -04006009 if (auto *call = dyn_cast<CallInst>(U)) {
6010 stack.push_back(call->getOperand(arg->getArgNo()));
6011 }
6012 }
6013 } else if (auto *user = dyn_cast<User>(v)) {
6014 // If this is a user, traverse all operands that could lead to resource
6015 // variables.
6016 for (unsigned i = 0; i != user->getNumOperands(); ++i) {
6017 Value *operand = user->getOperand(i);
6018 if (operand->getType()->isPointerTy() &&
6019 operand->getType()->getPointerAddressSpace() ==
6020 clspv::AddressSpace::Global) {
6021 stack.push_back(operand);
6022 }
6023 }
6024 }
6025 }
6026
6027 // No coherent resource variables encountered.
6028 return false;
6029}
alan-baker06cad652019-12-03 17:56:47 -05006030
6031void SPIRVProducerPass::PopulateStructuredCFGMaps(Module &module) {
6032 // First, track loop merges and continues.
6033 DenseSet<BasicBlock *> LoopMergesAndContinues;
6034 for (auto &F : module) {
6035 if (F.isDeclaration())
6036 continue;
6037
6038 DominatorTree &DT = getAnalysis<DominatorTreeWrapperPass>(F).getDomTree();
6039 const LoopInfo &LI = getAnalysis<LoopInfoWrapperPass>(F).getLoopInfo();
6040 std::deque<BasicBlock *> order;
6041 DenseSet<BasicBlock *> visited;
6042 clspv::ComputeStructuredOrder(&*F.begin(), &DT, LI, &order, &visited);
6043
6044 for (auto BB : order) {
6045 auto terminator = BB->getTerminator();
6046 auto branch = dyn_cast<BranchInst>(terminator);
6047 if (LI.isLoopHeader(BB)) {
6048 auto L = LI.getLoopFor(BB);
6049 BasicBlock *ContinueBB = nullptr;
6050 BasicBlock *MergeBB = nullptr;
6051
6052 MergeBB = L->getExitBlock();
6053 if (!MergeBB) {
6054 // StructurizeCFG pass converts CFG into triangle shape and the cfg
6055 // has regions with single entry/exit. As a result, loop should not
6056 // have multiple exits.
6057 llvm_unreachable("Loop has multiple exits???");
6058 }
6059
6060 if (L->isLoopLatch(BB)) {
6061 ContinueBB = BB;
6062 } else {
6063 // From SPIR-V spec 2.11, Continue Target must dominate that back-edge
6064 // block.
6065 BasicBlock *Header = L->getHeader();
6066 BasicBlock *Latch = L->getLoopLatch();
6067 for (auto *loop_block : L->blocks()) {
6068 if (loop_block == Header) {
6069 continue;
6070 }
6071
6072 // Check whether block dominates block with back-edge.
6073 // The loop latch is the single block with a back-edge. If it was
6074 // possible, StructurizeCFG made the loop conform to this
6075 // requirement, otherwise |Latch| is a nullptr.
6076 if (DT.dominates(loop_block, Latch)) {
6077 ContinueBB = loop_block;
6078 }
6079 }
6080
6081 if (!ContinueBB) {
6082 llvm_unreachable("Wrong continue block from loop");
6083 }
6084 }
6085
6086 // Record the continue and merge blocks.
6087 MergeBlocks[BB] = MergeBB;
6088 ContinueBlocks[BB] = ContinueBB;
6089 LoopMergesAndContinues.insert(MergeBB);
6090 LoopMergesAndContinues.insert(ContinueBB);
6091 } else if (branch && branch->isConditional()) {
6092 auto L = LI.getLoopFor(BB);
6093 bool HasBackedge = false;
6094 while (L && !HasBackedge) {
6095 if (L->isLoopLatch(BB)) {
6096 HasBackedge = true;
6097 }
6098 L = L->getParentLoop();
6099 }
6100
6101 if (!HasBackedge) {
6102 // Only need a merge if the branch doesn't include a loop break or
6103 // continue.
6104 auto true_bb = branch->getSuccessor(0);
6105 auto false_bb = branch->getSuccessor(1);
6106 if (!LoopMergesAndContinues.count(true_bb) &&
6107 !LoopMergesAndContinues.count(false_bb)) {
6108 // StructurizeCFG pass already manipulated CFG. Just use false block
6109 // of branch instruction as merge block.
6110 MergeBlocks[BB] = false_bb;
6111 }
6112 }
6113 }
6114 }
6115 }
6116}