blob: 63461ded138c02ad9d51b60ef382112722ca95c8 [file] [log] [blame]
David Neto22f144c2017-06-12 14:26:21 -04001// Copyright 2017 The Clspv Authors. All rights reserved.
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7// http://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
15#ifdef _MSC_VER
16#pragma warning(push, 0)
17#endif
18
David Neto156783e2017-07-05 15:39:41 -040019#include <cassert>
David Neto257c3892018-04-11 13:19:45 -040020#include <cstring>
David Neto118188e2018-08-24 11:27:54 -040021#include <iomanip>
22#include <list>
David Neto862b7d82018-06-14 18:48:37 -040023#include <memory>
David Neto118188e2018-08-24 11:27:54 -040024#include <set>
25#include <sstream>
26#include <string>
27#include <tuple>
28#include <unordered_set>
29#include <utility>
David Neto862b7d82018-06-14 18:48:37 -040030
David Neto118188e2018-08-24 11:27:54 -040031#include "llvm/ADT/StringSwitch.h"
32#include "llvm/ADT/UniqueVector.h"
33#include "llvm/Analysis/LoopInfo.h"
34#include "llvm/IR/Constants.h"
35#include "llvm/IR/Dominators.h"
36#include "llvm/IR/Instructions.h"
37#include "llvm/IR/Metadata.h"
38#include "llvm/IR/Module.h"
alan-bakerf67468c2019-11-25 15:51:49 -050039#include "llvm/IR/ValueSymbolTable.h"
David Neto118188e2018-08-24 11:27:54 -040040#include "llvm/Pass.h"
41#include "llvm/Support/CommandLine.h"
42#include "llvm/Support/raw_ostream.h"
43#include "llvm/Transforms/Utils/Cloning.h"
David Neto22f144c2017-06-12 14:26:21 -040044
David Neto85082642018-03-24 06:55:20 -070045#include "spirv/1.0/spirv.hpp"
David Neto118188e2018-08-24 11:27:54 -040046
David Neto85082642018-03-24 06:55:20 -070047#include "clspv/AddressSpace.h"
alan-bakerf5e5f692018-11-27 08:33:24 -050048#include "clspv/DescriptorMap.h"
David Neto118188e2018-08-24 11:27:54 -040049#include "clspv/Option.h"
David Neto85082642018-03-24 06:55:20 -070050#include "clspv/spirv_c_strings.hpp"
51#include "clspv/spirv_glsl.hpp"
David Neto22f144c2017-06-12 14:26:21 -040052
David Neto4feb7a42017-10-06 17:29:42 -040053#include "ArgKind.h"
alan-bakerf67468c2019-11-25 15:51:49 -050054#include "Builtins.h"
alan-baker06cad652019-12-03 17:56:47 -050055#include "ComputeStructuredOrder.h"
David Neto85082642018-03-24 06:55:20 -070056#include "ConstantEmitter.h"
Alan Baker202c8c72018-08-13 13:47:44 -040057#include "Constants.h"
David Neto78383442018-06-15 20:31:56 -040058#include "DescriptorCounter.h"
alan-baker56f7aff2019-05-22 08:06:42 -040059#include "NormalizeGlobalVariable.h"
Diego Novilloa4c44fa2019-04-11 10:56:15 -040060#include "Passes.h"
alan-bakerce179f12019-12-06 19:02:22 -050061#include "Types.h"
David Neto48f56a42017-10-06 16:44:25 -040062
David Neto22f144c2017-06-12 14:26:21 -040063#if defined(_MSC_VER)
64#pragma warning(pop)
65#endif
66
67using namespace llvm;
68using namespace clspv;
David Neto156783e2017-07-05 15:39:41 -040069using namespace mdconst;
David Neto22f144c2017-06-12 14:26:21 -040070
71namespace {
David Netocd8ca5f2017-10-02 23:34:11 -040072
David Neto862b7d82018-06-14 18:48:37 -040073cl::opt<bool> ShowResourceVars("show-rv", cl::init(false), cl::Hidden,
74 cl::desc("Show resource variable creation"));
75
76// These hacks exist to help transition code generation algorithms
77// without making huge noise in detailed test output.
78const bool Hack_generate_runtime_array_stride_early = true;
79
David Neto3fbb4072017-10-16 11:28:14 -040080// The value of 1/pi. This value is from MSDN
81// https://msdn.microsoft.com/en-us/library/4hwaceh6.aspx
82const double kOneOverPi = 0.318309886183790671538;
83const glsl::ExtInst kGlslExtInstBad = static_cast<glsl::ExtInst>(0);
84
alan-bakerb6b09dc2018-11-08 16:59:28 -050085const char *kCompositeConstructFunctionPrefix = "clspv.composite_construct.";
David Netoab03f432017-11-03 17:00:44 -040086
David Neto22f144c2017-06-12 14:26:21 -040087enum SPIRVOperandType {
88 NUMBERID,
89 LITERAL_INTEGER,
90 LITERAL_STRING,
91 LITERAL_FLOAT
92};
93
94struct SPIRVOperand {
95 explicit SPIRVOperand(SPIRVOperandType Ty, uint32_t Num)
96 : Type(Ty), LiteralNum(1, Num) {}
97 explicit SPIRVOperand(SPIRVOperandType Ty, const char *Str)
98 : Type(Ty), LiteralStr(Str) {}
99 explicit SPIRVOperand(SPIRVOperandType Ty, StringRef Str)
100 : Type(Ty), LiteralStr(Str) {}
101 explicit SPIRVOperand(SPIRVOperandType Ty, ArrayRef<uint32_t> NumVec)
102 : Type(Ty), LiteralNum(NumVec.begin(), NumVec.end()) {}
103
James Price11010dc2019-12-19 13:53:09 -0500104 SPIRVOperandType getType() const { return Type; };
105 uint32_t getNumID() const { return LiteralNum[0]; };
106 std::string getLiteralStr() const { return LiteralStr; };
107 ArrayRef<uint32_t> getLiteralNum() const { return LiteralNum; };
David Neto22f144c2017-06-12 14:26:21 -0400108
David Neto87846742018-04-11 17:36:22 -0400109 uint32_t GetNumWords() const {
110 switch (Type) {
111 case NUMBERID:
112 return 1;
113 case LITERAL_INTEGER:
114 case LITERAL_FLOAT:
David Netoee2660d2018-06-28 16:31:29 -0400115 return uint32_t(LiteralNum.size());
David Neto87846742018-04-11 17:36:22 -0400116 case LITERAL_STRING:
117 // Account for the terminating null character.
David Netoee2660d2018-06-28 16:31:29 -0400118 return uint32_t((LiteralStr.size() + 4) / 4);
David Neto87846742018-04-11 17:36:22 -0400119 }
120 llvm_unreachable("Unhandled case in SPIRVOperand::GetNumWords()");
121 }
122
David Neto22f144c2017-06-12 14:26:21 -0400123private:
124 SPIRVOperandType Type;
125 std::string LiteralStr;
126 SmallVector<uint32_t, 4> LiteralNum;
127};
128
David Netoc6f3ab22018-04-06 18:02:31 -0400129class SPIRVOperandList {
130public:
David Netoef5ba2b2019-12-20 08:35:54 -0500131 typedef std::unique_ptr<SPIRVOperand> element_type;
132 typedef SmallVector<element_type, 8> container_type;
133 typedef container_type::iterator iterator;
David Netoc6f3ab22018-04-06 18:02:31 -0400134 SPIRVOperandList() {}
alan-bakerb6b09dc2018-11-08 16:59:28 -0500135 SPIRVOperandList(const SPIRVOperandList &other) = delete;
136 SPIRVOperandList(SPIRVOperandList &&other) {
David Netoc6f3ab22018-04-06 18:02:31 -0400137 contents_ = std::move(other.contents_);
138 other.contents_.clear();
139 }
David Netoef5ba2b2019-12-20 08:35:54 -0500140 iterator begin() { return contents_.begin(); }
141 iterator end() { return contents_.end(); }
142 operator ArrayRef<element_type>() { return contents_; }
143 void push_back(element_type op) { contents_.push_back(std::move(op)); }
alan-bakerb6b09dc2018-11-08 16:59:28 -0500144 void clear() { contents_.clear(); }
David Netoc6f3ab22018-04-06 18:02:31 -0400145 size_t size() const { return contents_.size(); }
James Price11010dc2019-12-19 13:53:09 -0500146 const SPIRVOperand *operator[](size_t i) { return contents_[i].get(); }
David Netoc6f3ab22018-04-06 18:02:31 -0400147
David Netoef5ba2b2019-12-20 08:35:54 -0500148 const container_type &getOperands() const { return contents_; }
David Neto87846742018-04-11 17:36:22 -0400149
David Netoc6f3ab22018-04-06 18:02:31 -0400150private:
David Netoef5ba2b2019-12-20 08:35:54 -0500151 container_type contents_;
David Netoc6f3ab22018-04-06 18:02:31 -0400152};
153
James Price11010dc2019-12-19 13:53:09 -0500154SPIRVOperandList &operator<<(SPIRVOperandList &list,
David Netoef5ba2b2019-12-20 08:35:54 -0500155 std::unique_ptr<SPIRVOperand> elem) {
156 list.push_back(std::move(elem));
David Netoc6f3ab22018-04-06 18:02:31 -0400157 return list;
158}
159
David Netoef5ba2b2019-12-20 08:35:54 -0500160std::unique_ptr<SPIRVOperand> MkNum(uint32_t num) {
161 return std::make_unique<SPIRVOperand>(LITERAL_INTEGER, num);
David Netoc6f3ab22018-04-06 18:02:31 -0400162}
David Netoef5ba2b2019-12-20 08:35:54 -0500163std::unique_ptr<SPIRVOperand> MkInteger(ArrayRef<uint32_t> num_vec) {
164 return std::make_unique<SPIRVOperand>(LITERAL_INTEGER, num_vec);
David Neto257c3892018-04-11 13:19:45 -0400165}
David Netoef5ba2b2019-12-20 08:35:54 -0500166std::unique_ptr<SPIRVOperand> MkFloat(ArrayRef<uint32_t> num_vec) {
167 return std::make_unique<SPIRVOperand>(LITERAL_FLOAT, num_vec);
David Neto257c3892018-04-11 13:19:45 -0400168}
David Netoef5ba2b2019-12-20 08:35:54 -0500169std::unique_ptr<SPIRVOperand> MkId(uint32_t id) {
170 return std::make_unique<SPIRVOperand>(NUMBERID, id);
James Price11010dc2019-12-19 13:53:09 -0500171}
David Netoef5ba2b2019-12-20 08:35:54 -0500172std::unique_ptr<SPIRVOperand> MkString(StringRef str) {
173 return std::make_unique<SPIRVOperand>(LITERAL_STRING, str);
David Neto257c3892018-04-11 13:19:45 -0400174}
David Netoc6f3ab22018-04-06 18:02:31 -0400175
David Neto22f144c2017-06-12 14:26:21 -0400176struct SPIRVInstruction {
David Netoef5ba2b2019-12-20 08:35:54 -0500177 // Creates an instruction with an opcode and no result ID, and with the given
178 // operands. This computes its own word count. Takes ownership of the
179 // operands and clears |Ops|.
180 SPIRVInstruction(spv::Op Opc, SPIRVOperandList &Ops)
181 : WordCount(1), Opcode(static_cast<uint16_t>(Opc)), ResultID(0) {
James Price11010dc2019-12-19 13:53:09 -0500182 for (auto &operand : Ops) {
David Netoee2660d2018-06-28 16:31:29 -0400183 WordCount += uint16_t(operand->GetNumWords());
David Neto87846742018-04-11 17:36:22 -0400184 }
David Netoef5ba2b2019-12-20 08:35:54 -0500185 Operands.reserve(Ops.size());
186 for (auto &ptr : Ops) {
187 Operands.emplace_back(std::move(ptr));
188 ptr.reset(nullptr);
David Neto87846742018-04-11 17:36:22 -0400189 }
David Netoef5ba2b2019-12-20 08:35:54 -0500190 Ops.clear();
191 }
192 // Creates an instruction with an opcode and a no-zero result ID, and
193 // with the given operands. This computes its own word count. Takes ownership
194 // of the operands and clears |Ops|.
195 SPIRVInstruction(spv::Op Opc, uint32_t ResID, SPIRVOperandList &Ops)
196 : WordCount(2), Opcode(static_cast<uint16_t>(Opc)), ResultID(ResID) {
James Price11010dc2019-12-19 13:53:09 -0500197 for (auto &operand : Ops) {
David Neto87846742018-04-11 17:36:22 -0400198 WordCount += operand->GetNumWords();
199 }
David Netoef5ba2b2019-12-20 08:35:54 -0500200 Operands.reserve(Ops.size());
201 for (auto &ptr : Ops) {
202 Operands.emplace_back(std::move(ptr));
203 ptr.reset(nullptr);
204 }
205 if (ResID == 0) {
206 llvm_unreachable("Result ID of 0 was provided");
207 }
208 Ops.clear();
David Neto87846742018-04-11 17:36:22 -0400209 }
David Neto22f144c2017-06-12 14:26:21 -0400210
David Netoef5ba2b2019-12-20 08:35:54 -0500211 // Creates an instruction with an opcode and no result ID, and with the single
212 // operand. This computes its own word count.
213 SPIRVInstruction(spv::Op Opc, SPIRVOperandList::element_type operand)
214 : WordCount(1), Opcode(static_cast<uint16_t>(Opc)), ResultID(0) {
215 WordCount += operand->GetNumWords();
216 Operands.emplace_back(std::move(operand));
217 operand.reset(nullptr);
218 }
219 // Creates an instruction with an opcode and a non-zero result ID, and
220 // with the single operand. This computes its own word count.
221 SPIRVInstruction(spv::Op Opc, uint32_t ResID,
222 SPIRVOperandList::element_type operand)
223 : WordCount(2), Opcode(static_cast<uint16_t>(Opc)), ResultID(ResID) {
224 WordCount += operand->GetNumWords();
225 if (ResID == 0) {
226 llvm_unreachable("Result ID of 0 was provided");
227 }
228 Operands.emplace_back(std::move(operand));
229 operand.reset(nullptr);
230 }
231 // Creates an instruction with an opcode and a no-zero result ID, and no
232 // operands.
233 SPIRVInstruction(spv::Op Opc, uint32_t ResID)
234 : WordCount(2), Opcode(static_cast<uint16_t>(Opc)), ResultID(ResID) {
235 if (ResID == 0) {
236 llvm_unreachable("Result ID of 0 was provided");
237 }
238 }
239 // Creates an instruction with an opcode, no result ID, no type ID, and no
240 // operands.
241 SPIRVInstruction(spv::Op Opc)
242 : WordCount(1), Opcode(static_cast<uint16_t>(Opc)), ResultID(0) {}
243
David Netoee2660d2018-06-28 16:31:29 -0400244 uint32_t getWordCount() const { return WordCount; }
David Neto22f144c2017-06-12 14:26:21 -0400245 uint16_t getOpcode() const { return Opcode; }
246 uint32_t getResultID() const { return ResultID; }
David Netoef5ba2b2019-12-20 08:35:54 -0500247 ArrayRef<std::unique_ptr<SPIRVOperand>> getOperands() const {
James Price11010dc2019-12-19 13:53:09 -0500248 return Operands;
249 }
David Neto22f144c2017-06-12 14:26:21 -0400250
251private:
David Netoee2660d2018-06-28 16:31:29 -0400252 uint32_t WordCount; // Check the 16-bit bound at code generation time.
David Neto22f144c2017-06-12 14:26:21 -0400253 uint16_t Opcode;
254 uint32_t ResultID;
David Netoef5ba2b2019-12-20 08:35:54 -0500255 SmallVector<std::unique_ptr<SPIRVOperand>, 4> Operands;
David Neto22f144c2017-06-12 14:26:21 -0400256};
257
258struct SPIRVProducerPass final : public ModulePass {
David Neto22f144c2017-06-12 14:26:21 -0400259 typedef DenseMap<Type *, uint32_t> TypeMapType;
260 typedef UniqueVector<Type *> TypeList;
261 typedef DenseMap<Value *, uint32_t> ValueMapType;
David Netofb9a7972017-08-25 17:08:24 -0400262 typedef UniqueVector<Value *> ValueList;
David Neto22f144c2017-06-12 14:26:21 -0400263 typedef std::vector<std::pair<Value *, uint32_t>> EntryPointVecType;
264 typedef std::list<SPIRVInstruction *> SPIRVInstructionList;
David Neto87846742018-04-11 17:36:22 -0400265 // A vector of tuples, each of which is:
266 // - the LLVM instruction that we will later generate SPIR-V code for
267 // - where the SPIR-V instruction should be inserted
268 // - the result ID of the SPIR-V instruction
David Neto22f144c2017-06-12 14:26:21 -0400269 typedef std::vector<
270 std::tuple<Value *, SPIRVInstructionList::iterator, uint32_t>>
271 DeferredInstVecType;
272 typedef DenseMap<FunctionType *, std::pair<FunctionType *, uint32_t>>
273 GlobalConstFuncMapType;
274
David Neto44795152017-07-13 15:45:28 -0400275 explicit SPIRVProducerPass(
alan-bakerf5e5f692018-11-27 08:33:24 -0500276 raw_pwrite_stream &out,
277 std::vector<clspv::version0::DescriptorMapEntry> *descriptor_map_entries,
alan-baker00e7a582019-06-07 12:54:21 -0400278 ArrayRef<std::pair<unsigned, std::string>> samplerMap,
David Neto44795152017-07-13 15:45:28 -0400279 bool outputCInitList)
David Netoc2c368d2017-06-30 16:50:17 -0400280 : ModulePass(ID), samplerMap(samplerMap), out(out),
David Neto0676e6f2017-07-11 18:47:44 -0400281 binaryTempOut(binaryTempUnderlyingVector), binaryOut(&out),
alan-baker00e7a582019-06-07 12:54:21 -0400282 descriptorMapEntries(descriptor_map_entries),
David Neto0676e6f2017-07-11 18:47:44 -0400283 outputCInitList(outputCInitList), patchBoundOffset(0), nextID(1),
alan-baker5b86ed72019-02-15 08:26:50 -0500284 OpExtInstImportID(0), HasVariablePointersStorageBuffer(false),
285 HasVariablePointers(false), SamplerTy(nullptr), WorkgroupSizeValueID(0),
Kévin Petit89a525c2019-06-15 08:13:07 +0100286 WorkgroupSizeVarID(0), max_local_spec_id_(0) {}
David Neto22f144c2017-06-12 14:26:21 -0400287
James Price11010dc2019-12-19 13:53:09 -0500288 virtual ~SPIRVProducerPass() {
289 for (auto *Inst : SPIRVInsts) {
290 delete Inst;
291 }
292 }
293
David Neto22f144c2017-06-12 14:26:21 -0400294 void getAnalysisUsage(AnalysisUsage &AU) const override {
295 AU.addRequired<DominatorTreeWrapperPass>();
296 AU.addRequired<LoopInfoWrapperPass>();
297 }
298
299 virtual bool runOnModule(Module &module) override;
300
301 // output the SPIR-V header block
302 void outputHeader();
303
304 // patch the SPIR-V header block
305 void patchHeader();
306
307 uint32_t lookupType(Type *Ty) {
308 if (Ty->isPointerTy() &&
309 (Ty->getPointerAddressSpace() != AddressSpace::UniformConstant)) {
310 auto PointeeTy = Ty->getPointerElementType();
311 if (PointeeTy->isStructTy() &&
312 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
313 Ty = PointeeTy;
314 }
315 }
316
David Neto862b7d82018-06-14 18:48:37 -0400317 auto where = TypeMap.find(Ty);
318 if (where == TypeMap.end()) {
319 if (Ty) {
320 errs() << "Unhandled type " << *Ty << "\n";
321 } else {
322 errs() << "Unhandled type (null)\n";
323 }
David Netoe439d702018-03-23 13:14:08 -0700324 llvm_unreachable("\nUnhandled type!");
David Neto22f144c2017-06-12 14:26:21 -0400325 }
326
David Neto862b7d82018-06-14 18:48:37 -0400327 return where->second;
David Neto22f144c2017-06-12 14:26:21 -0400328 }
329 TypeMapType &getImageTypeMap() { return ImageTypeMap; }
alan-bakerabd82722019-12-03 17:14:51 -0500330 TypeList &getImageTypeList() { return ImageTypeList; }
David Neto22f144c2017-06-12 14:26:21 -0400331 TypeList &getTypeList() { return Types; };
332 ValueList &getConstantList() { return Constants; };
333 ValueMapType &getValueMap() { return ValueMap; }
334 ValueMapType &getAllocatedValueMap() { return AllocatedValueMap; }
335 SPIRVInstructionList &getSPIRVInstList() { return SPIRVInsts; };
David Neto22f144c2017-06-12 14:26:21 -0400336 EntryPointVecType &getEntryPointVec() { return EntryPointVec; };
337 DeferredInstVecType &getDeferredInstVec() { return DeferredInstVec; };
338 ValueList &getEntryPointInterfacesVec() { return EntryPointInterfacesVec; };
339 uint32_t &getOpExtInstImportID() { return OpExtInstImportID; };
340 std::vector<uint32_t> &getBuiltinDimVec() { return BuiltinDimensionVec; };
alan-baker5b86ed72019-02-15 08:26:50 -0500341 bool hasVariablePointersStorageBuffer() {
342 return HasVariablePointersStorageBuffer;
343 }
344 void setVariablePointersStorageBuffer(bool Val) {
345 HasVariablePointersStorageBuffer = Val;
346 }
Diego Novillo3cc8d7a2019-04-10 13:30:34 -0400347 bool hasVariablePointers() { return HasVariablePointers; };
David Neto22f144c2017-06-12 14:26:21 -0400348 void setVariablePointers(bool Val) { HasVariablePointers = Val; };
alan-bakerb6b09dc2018-11-08 16:59:28 -0500349 ArrayRef<std::pair<unsigned, std::string>> &getSamplerMap() {
350 return samplerMap;
351 }
David Neto22f144c2017-06-12 14:26:21 -0400352 GlobalConstFuncMapType &getGlobalConstFuncTypeMap() {
353 return GlobalConstFuncTypeMap;
354 }
355 SmallPtrSet<Value *, 16> &getGlobalConstArgSet() {
356 return GlobalConstArgumentSet;
357 }
alan-bakerb6b09dc2018-11-08 16:59:28 -0500358 TypeList &getTypesNeedingArrayStride() { return TypesNeedingArrayStride; }
David Neto22f144c2017-06-12 14:26:21 -0400359
David Netoc6f3ab22018-04-06 18:02:31 -0400360 void GenerateLLVMIRInfo(Module &M, const DataLayout &DL);
alan-bakerb6b09dc2018-11-08 16:59:28 -0500361 // Populate GlobalConstFuncTypeMap. Also, if module-scope __constant will
362 // *not* be converted to a storage buffer, replace each such global variable
363 // with one in the storage class expecgted by SPIR-V.
David Neto862b7d82018-06-14 18:48:37 -0400364 void FindGlobalConstVars(Module &M, const DataLayout &DL);
365 // Populate ResourceVarInfoList, FunctionToResourceVarsMap, and
366 // ModuleOrderedResourceVars.
367 void FindResourceVars(Module &M, const DataLayout &DL);
Alan Baker202c8c72018-08-13 13:47:44 -0400368 void FindWorkgroupVars(Module &M);
David Neto22f144c2017-06-12 14:26:21 -0400369 bool FindExtInst(Module &M);
370 void FindTypePerGlobalVar(GlobalVariable &GV);
371 void FindTypePerFunc(Function &F);
David Neto862b7d82018-06-14 18:48:37 -0400372 void FindTypesForSamplerMap(Module &M);
373 void FindTypesForResourceVars(Module &M);
alan-bakerb6b09dc2018-11-08 16:59:28 -0500374 // Inserts |Ty| and relevant sub-types into the |Types| member, indicating
375 // that |Ty| and its subtypes will need a corresponding SPIR-V type.
David Neto22f144c2017-06-12 14:26:21 -0400376 void FindType(Type *Ty);
377 void FindConstantPerGlobalVar(GlobalVariable &GV);
378 void FindConstantPerFunc(Function &F);
379 void FindConstant(Value *V);
380 void GenerateExtInstImport();
David Neto19a1bad2017-08-25 15:01:41 -0400381 // Generates instructions for SPIR-V types corresponding to the LLVM types
382 // saved in the |Types| member. A type follows its subtypes. IDs are
383 // allocated sequentially starting with the current value of nextID, and
384 // with a type following its subtypes. Also updates nextID to just beyond
385 // the last generated ID.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500386 void GenerateSPIRVTypes(LLVMContext &context, Module &module);
David Neto22f144c2017-06-12 14:26:21 -0400387 void GenerateSPIRVConstants();
David Neto5c22a252018-03-15 16:07:41 -0400388 void GenerateModuleInfo(Module &M);
David Neto22f144c2017-06-12 14:26:21 -0400389 void GenerateGlobalVar(GlobalVariable &GV);
David Netoc6f3ab22018-04-06 18:02:31 -0400390 void GenerateWorkgroupVars();
David Neto862b7d82018-06-14 18:48:37 -0400391 // Generate descriptor map entries for resource variables associated with
392 // arguments to F.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500393 void GenerateDescriptorMapInfo(const DataLayout &DL, Function &F);
David Neto22f144c2017-06-12 14:26:21 -0400394 void GenerateSamplers(Module &M);
David Neto862b7d82018-06-14 18:48:37 -0400395 // Generate OpVariables for %clspv.resource.var.* calls.
396 void GenerateResourceVars(Module &M);
David Neto22f144c2017-06-12 14:26:21 -0400397 void GenerateFuncPrologue(Function &F);
398 void GenerateFuncBody(Function &F);
David Netob6e2e062018-04-25 10:32:06 -0400399 void GenerateEntryPointInitialStores();
David Neto22f144c2017-06-12 14:26:21 -0400400 spv::Op GetSPIRVCmpOpcode(CmpInst *CmpI);
401 spv::Op GetSPIRVCastOpcode(Instruction &I);
402 spv::Op GetSPIRVBinaryOpcode(Instruction &I);
403 void GenerateInstruction(Instruction &I);
404 void GenerateFuncEpilogue();
405 void HandleDeferredInstruction();
alan-bakerb6b09dc2018-11-08 16:59:28 -0500406 void HandleDeferredDecorations(const DataLayout &DL);
David Neto22f144c2017-06-12 14:26:21 -0400407 bool is4xi8vec(Type *Ty) const;
408 spv::StorageClass GetStorageClass(unsigned AddrSpace) const;
David Neto862b7d82018-06-14 18:48:37 -0400409 spv::StorageClass GetStorageClassForArgKind(clspv::ArgKind arg_kind) const;
David Neto22f144c2017-06-12 14:26:21 -0400410 spv::BuiltIn GetBuiltin(StringRef globalVarName) const;
David Neto3fbb4072017-10-16 11:28:14 -0400411 // Returns the GLSL extended instruction enum that the given function
412 // call maps to. If none, then returns the 0 value, i.e. GLSLstd4580Bad.
David Neto22f144c2017-06-12 14:26:21 -0400413 glsl::ExtInst getExtInstEnum(StringRef Name);
David Neto3fbb4072017-10-16 11:28:14 -0400414 // Returns the GLSL extended instruction enum indirectly used by the given
415 // function. That is, to implement the given function, we use an extended
416 // instruction plus one more instruction. If none, then returns the 0 value,
417 // i.e. GLSLstd4580Bad.
418 glsl::ExtInst getIndirectExtInstEnum(StringRef Name);
419 // Returns the single GLSL extended instruction used directly or
420 // indirectly by the given function call.
421 glsl::ExtInst getDirectOrIndirectExtInstEnum(StringRef Name);
David Neto22f144c2017-06-12 14:26:21 -0400422 void WriteOneWord(uint32_t Word);
423 void WriteResultID(SPIRVInstruction *Inst);
424 void WriteWordCountAndOpcode(SPIRVInstruction *Inst);
David Netoef5ba2b2019-12-20 08:35:54 -0500425 void WriteOperand(const std::unique_ptr<SPIRVOperand> &Op);
David Neto22f144c2017-06-12 14:26:21 -0400426 void WriteSPIRVBinary();
427
Alan Baker9bf93fb2018-08-28 16:59:26 -0400428 // Returns true if |type| is compatible with OpConstantNull.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500429 bool IsTypeNullable(const Type *type) const;
Alan Baker9bf93fb2018-08-28 16:59:26 -0400430
Alan Bakerfcda9482018-10-02 17:09:59 -0400431 // Populate UBO remapped type maps.
432 void PopulateUBOTypeMaps(Module &module);
433
alan-baker06cad652019-12-03 17:56:47 -0500434 // Populate the merge and continue block maps.
435 void PopulateStructuredCFGMaps(Module &module);
436
Alan Bakerfcda9482018-10-02 17:09:59 -0400437 // Wrapped methods of DataLayout accessors. If |type| was remapped for UBOs,
438 // uses the internal map, otherwise it falls back on the data layout.
439 uint64_t GetTypeSizeInBits(Type *type, const DataLayout &DL);
440 uint64_t GetTypeStoreSize(Type *type, const DataLayout &DL);
441 uint64_t GetTypeAllocSize(Type *type, const DataLayout &DL);
442
alan-baker5b86ed72019-02-15 08:26:50 -0500443 // Returns the base pointer of |v|.
444 Value *GetBasePointer(Value *v);
445
446 // Sets |HasVariablePointersStorageBuffer| or |HasVariablePointers| base on
447 // |address_space|.
448 void setVariablePointersCapabilities(unsigned address_space);
449
450 // Returns true if |lhs| and |rhs| represent the same resource or workgroup
451 // variable.
452 bool sameResource(Value *lhs, Value *rhs) const;
453
454 // Returns true if |inst| is phi or select that selects from the same
455 // structure (or null).
456 bool selectFromSameObject(Instruction *inst);
457
alan-bakere9308012019-03-15 10:25:13 -0400458 // Returns true if |Arg| is called with a coherent resource.
459 bool CalledWithCoherentResource(Argument &Arg);
460
David Neto22f144c2017-06-12 14:26:21 -0400461private:
462 static char ID;
David Neto44795152017-07-13 15:45:28 -0400463 ArrayRef<std::pair<unsigned, std::string>> samplerMap;
David Neto22f144c2017-06-12 14:26:21 -0400464 raw_pwrite_stream &out;
David Neto0676e6f2017-07-11 18:47:44 -0400465
466 // TODO(dneto): Wouldn't it be better to always just emit a binary, and then
467 // convert to other formats on demand?
468
469 // When emitting a C initialization list, the WriteSPIRVBinary method
470 // will actually write its words to this vector via binaryTempOut.
471 SmallVector<char, 100> binaryTempUnderlyingVector;
472 raw_svector_ostream binaryTempOut;
473
474 // Binary output writes to this stream, which might be |out| or
475 // |binaryTempOut|. It's the latter when we really want to write a C
476 // initializer list.
Diego Novillo3cc8d7a2019-04-10 13:30:34 -0400477 raw_pwrite_stream *binaryOut;
alan-bakerf5e5f692018-11-27 08:33:24 -0500478 std::vector<version0::DescriptorMapEntry> *descriptorMapEntries;
David Neto0676e6f2017-07-11 18:47:44 -0400479 const bool outputCInitList; // If true, output look like {0x7023, ... , 5}
David Neto22f144c2017-06-12 14:26:21 -0400480 uint64_t patchBoundOffset;
481 uint32_t nextID;
482
alan-bakerf67468c2019-11-25 15:51:49 -0500483 // ID for OpTypeInt 32 1.
484 uint32_t int32ID = 0;
485 // ID for OpTypeVector %int 4.
486 uint32_t v4int32ID = 0;
487
David Neto19a1bad2017-08-25 15:01:41 -0400488 // Maps an LLVM Value pointer to the corresponding SPIR-V Id.
David Neto22f144c2017-06-12 14:26:21 -0400489 TypeMapType TypeMap;
David Neto19a1bad2017-08-25 15:01:41 -0400490 // Maps an LLVM image type to its SPIR-V ID.
David Neto22f144c2017-06-12 14:26:21 -0400491 TypeMapType ImageTypeMap;
alan-bakerabd82722019-12-03 17:14:51 -0500492 // A unique-vector of LLVM image types. This list is used to provide
493 // deterministic traversal of image types.
494 TypeList ImageTypeList;
David Neto19a1bad2017-08-25 15:01:41 -0400495 // A unique-vector of LLVM types that map to a SPIR-V type.
David Neto22f144c2017-06-12 14:26:21 -0400496 TypeList Types;
497 ValueList Constants;
David Neto19a1bad2017-08-25 15:01:41 -0400498 // Maps an LLVM Value pointer to the corresponding SPIR-V Id.
David Neto22f144c2017-06-12 14:26:21 -0400499 ValueMapType ValueMap;
500 ValueMapType AllocatedValueMap;
501 SPIRVInstructionList SPIRVInsts;
David Neto862b7d82018-06-14 18:48:37 -0400502
David Neto22f144c2017-06-12 14:26:21 -0400503 EntryPointVecType EntryPointVec;
504 DeferredInstVecType DeferredInstVec;
505 ValueList EntryPointInterfacesVec;
506 uint32_t OpExtInstImportID;
507 std::vector<uint32_t> BuiltinDimensionVec;
alan-baker5b86ed72019-02-15 08:26:50 -0500508 bool HasVariablePointersStorageBuffer;
David Neto22f144c2017-06-12 14:26:21 -0400509 bool HasVariablePointers;
510 Type *SamplerTy;
alan-baker09cb9802019-12-10 13:16:27 -0500511 DenseMap<unsigned, unsigned> SamplerLiteralToIDMap;
David Netoc77d9e22018-03-24 06:30:28 -0700512
513 // If a function F has a pointer-to-__constant parameter, then this variable
David Neto9ed8e2f2018-03-24 06:47:24 -0700514 // will map F's type to (G, index of the parameter), where in a first phase
515 // G is F's type. During FindTypePerFunc, G will be changed to F's type
516 // but replacing the pointer-to-constant parameter with
517 // pointer-to-ModuleScopePrivate.
David Netoc77d9e22018-03-24 06:30:28 -0700518 // TODO(dneto): This doesn't seem general enough? A function might have
519 // more than one such parameter.
David Neto22f144c2017-06-12 14:26:21 -0400520 GlobalConstFuncMapType GlobalConstFuncTypeMap;
521 SmallPtrSet<Value *, 16> GlobalConstArgumentSet;
David Neto1a1a0582017-07-07 12:01:44 -0400522 // An ordered set of pointer types of Base arguments to OpPtrAccessChain,
David Neto85082642018-03-24 06:55:20 -0700523 // or array types, and which point into transparent memory (StorageBuffer
524 // storage class). These will require an ArrayStride decoration.
David Neto1a1a0582017-07-07 12:01:44 -0400525 // See SPV_KHR_variable_pointers rev 13.
David Neto85082642018-03-24 06:55:20 -0700526 TypeList TypesNeedingArrayStride;
David Netoa60b00b2017-09-15 16:34:09 -0400527
528 // This is truly ugly, but works around what look like driver bugs.
529 // For get_local_size, an earlier part of the flow has created a module-scope
530 // variable in Private address space to hold the value for the workgroup
531 // size. Its intializer is a uint3 value marked as builtin WorkgroupSize.
532 // When this is present, save the IDs of the initializer value and variable
533 // in these two variables. We only ever do a vector load from it, and
534 // when we see one of those, substitute just the value of the intializer.
535 // This mimics what Glslang does, and that's what drivers are used to.
David Neto66cfe642018-03-24 06:13:56 -0700536 // TODO(dneto): Remove this once drivers are fixed.
David Netoa60b00b2017-09-15 16:34:09 -0400537 uint32_t WorkgroupSizeValueID;
538 uint32_t WorkgroupSizeVarID;
David Neto26aaf622017-10-23 18:11:53 -0400539
David Neto862b7d82018-06-14 18:48:37 -0400540 // Bookkeeping for mapping kernel arguments to resource variables.
541 struct ResourceVarInfo {
542 ResourceVarInfo(int index_arg, unsigned set_arg, unsigned binding_arg,
alan-bakere9308012019-03-15 10:25:13 -0400543 Function *fn, clspv::ArgKind arg_kind_arg, int coherent_arg)
David Neto862b7d82018-06-14 18:48:37 -0400544 : index(index_arg), descriptor_set(set_arg), binding(binding_arg),
alan-bakere9308012019-03-15 10:25:13 -0400545 var_fn(fn), arg_kind(arg_kind_arg), coherent(coherent_arg),
David Neto862b7d82018-06-14 18:48:37 -0400546 addr_space(fn->getReturnType()->getPointerAddressSpace()) {}
547 const int index; // Index into ResourceVarInfoList
548 const unsigned descriptor_set;
549 const unsigned binding;
550 Function *const var_fn; // The @clspv.resource.var.* function.
551 const clspv::ArgKind arg_kind;
alan-bakere9308012019-03-15 10:25:13 -0400552 const int coherent;
David Neto862b7d82018-06-14 18:48:37 -0400553 const unsigned addr_space; // The LLVM address space
554 // The SPIR-V ID of the OpVariable. Not populated at construction time.
555 uint32_t var_id = 0;
556 };
557 // A list of resource var info. Each one correponds to a module-scope
558 // resource variable we will have to create. Resource var indices are
559 // indices into this vector.
560 SmallVector<std::unique_ptr<ResourceVarInfo>, 8> ResourceVarInfoList;
561 // This is a vector of pointers of all the resource vars, but ordered by
562 // kernel function, and then by argument.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500563 UniqueVector<ResourceVarInfo *> ModuleOrderedResourceVars;
David Neto862b7d82018-06-14 18:48:37 -0400564 // Map a function to the ordered list of resource variables it uses, one for
565 // each argument. If an argument does not use a resource variable, it
566 // will have a null pointer entry.
567 using FunctionToResourceVarsMapType =
568 DenseMap<Function *, SmallVector<ResourceVarInfo *, 8>>;
569 FunctionToResourceVarsMapType FunctionToResourceVarsMap;
570
571 // What LLVM types map to SPIR-V types needing layout? These are the
572 // arrays and structures supporting storage buffers and uniform buffers.
573 TypeList TypesNeedingLayout;
574 // What LLVM struct types map to a SPIR-V struct type with Block decoration?
575 UniqueVector<StructType *> StructTypesNeedingBlock;
576 // For a call that represents a load from an opaque type (samplers, images),
577 // map it to the variable id it should load from.
578 DenseMap<CallInst *, uint32_t> ResourceVarDeferredLoadCalls;
David Neto85082642018-03-24 06:55:20 -0700579
Alan Baker202c8c72018-08-13 13:47:44 -0400580 // One larger than the maximum used SpecId for pointer-to-local arguments.
581 int max_local_spec_id_;
David Netoc6f3ab22018-04-06 18:02:31 -0400582 // An ordered list of the kernel arguments of type pointer-to-local.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500583 using LocalArgList = SmallVector<Argument *, 8>;
David Netoc6f3ab22018-04-06 18:02:31 -0400584 LocalArgList LocalArgs;
585 // Information about a pointer-to-local argument.
586 struct LocalArgInfo {
587 // The SPIR-V ID of the array variable.
588 uint32_t variable_id;
589 // The element type of the
alan-bakerb6b09dc2018-11-08 16:59:28 -0500590 Type *elem_type;
David Netoc6f3ab22018-04-06 18:02:31 -0400591 // The ID of the array type.
592 uint32_t array_size_id;
593 // The ID of the array type.
594 uint32_t array_type_id;
595 // The ID of the pointer to the array type.
596 uint32_t ptr_array_type_id;
David Netoc6f3ab22018-04-06 18:02:31 -0400597 // The specialization constant ID of the array size.
598 int spec_id;
599 };
Alan Baker202c8c72018-08-13 13:47:44 -0400600 // A mapping from Argument to its assigned SpecId.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500601 DenseMap<const Argument *, int> LocalArgSpecIds;
Alan Baker202c8c72018-08-13 13:47:44 -0400602 // A mapping from SpecId to its LocalArgInfo.
603 DenseMap<int, LocalArgInfo> LocalSpecIdInfoMap;
Alan Bakerfcda9482018-10-02 17:09:59 -0400604 // A mapping from a remapped type to its real offsets.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500605 DenseMap<Type *, std::vector<uint32_t>> RemappedUBOTypeOffsets;
Alan Bakerfcda9482018-10-02 17:09:59 -0400606 // A mapping from a remapped type to its real sizes.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500607 DenseMap<Type *, std::tuple<uint64_t, uint64_t, uint64_t>>
608 RemappedUBOTypeSizes;
alan-baker06cad652019-12-03 17:56:47 -0500609
610 // Maps basic block to its merge block.
611 DenseMap<BasicBlock *, BasicBlock *> MergeBlocks;
612 // Maps basic block to its continue block.
613 DenseMap<BasicBlock *, BasicBlock *> ContinueBlocks;
David Neto22f144c2017-06-12 14:26:21 -0400614};
615
616char SPIRVProducerPass::ID;
David Netoc6f3ab22018-04-06 18:02:31 -0400617
alan-bakerb6b09dc2018-11-08 16:59:28 -0500618} // namespace
David Neto22f144c2017-06-12 14:26:21 -0400619
620namespace clspv {
alan-bakerf5e5f692018-11-27 08:33:24 -0500621ModulePass *createSPIRVProducerPass(
622 raw_pwrite_stream &out,
623 std::vector<version0::DescriptorMapEntry> *descriptor_map_entries,
alan-baker00e7a582019-06-07 12:54:21 -0400624 ArrayRef<std::pair<unsigned, std::string>> samplerMap,
alan-bakerf5e5f692018-11-27 08:33:24 -0500625 bool outputCInitList) {
626 return new SPIRVProducerPass(out, descriptor_map_entries, samplerMap,
alan-baker00e7a582019-06-07 12:54:21 -0400627 outputCInitList);
David Neto22f144c2017-06-12 14:26:21 -0400628}
David Netoc2c368d2017-06-30 16:50:17 -0400629} // namespace clspv
David Neto22f144c2017-06-12 14:26:21 -0400630
631bool SPIRVProducerPass::runOnModule(Module &module) {
David Neto0676e6f2017-07-11 18:47:44 -0400632 binaryOut = outputCInitList ? &binaryTempOut : &out;
633
Alan Bakerfcda9482018-10-02 17:09:59 -0400634 PopulateUBOTypeMaps(module);
alan-baker06cad652019-12-03 17:56:47 -0500635 PopulateStructuredCFGMaps(module);
Alan Bakerfcda9482018-10-02 17:09:59 -0400636
David Neto22f144c2017-06-12 14:26:21 -0400637 // SPIR-V always begins with its header information
638 outputHeader();
639
David Netoc6f3ab22018-04-06 18:02:31 -0400640 const DataLayout &DL = module.getDataLayout();
641
David Neto22f144c2017-06-12 14:26:21 -0400642 // Gather information from the LLVM IR that we require.
David Netoc6f3ab22018-04-06 18:02:31 -0400643 GenerateLLVMIRInfo(module, DL);
David Neto22f144c2017-06-12 14:26:21 -0400644
David Neto22f144c2017-06-12 14:26:21 -0400645 // Collect information on global variables too.
646 for (GlobalVariable &GV : module.globals()) {
647 // If the GV is one of our special __spirv_* variables, remove the
648 // initializer as it was only placed there to force LLVM to not throw the
649 // value away.
650 if (GV.getName().startswith("__spirv_")) {
651 GV.setInitializer(nullptr);
652 }
653
654 // Collect types' information from global variable.
655 FindTypePerGlobalVar(GV);
656
657 // Collect constant information from global variable.
658 FindConstantPerGlobalVar(GV);
659
660 // If the variable is an input, entry points need to know about it.
661 if (AddressSpace::Input == GV.getType()->getPointerAddressSpace()) {
David Netofb9a7972017-08-25 17:08:24 -0400662 getEntryPointInterfacesVec().insert(&GV);
David Neto22f144c2017-06-12 14:26:21 -0400663 }
664 }
665
666 // If there are extended instructions, generate OpExtInstImport.
667 if (FindExtInst(module)) {
668 GenerateExtInstImport();
669 }
670
671 // Generate SPIRV instructions for types.
Alan Bakerfcda9482018-10-02 17:09:59 -0400672 GenerateSPIRVTypes(module.getContext(), module);
David Neto22f144c2017-06-12 14:26:21 -0400673
674 // Generate SPIRV constants.
675 GenerateSPIRVConstants();
676
alan-baker09cb9802019-12-10 13:16:27 -0500677 // Generate literal samplers if necessary.
678 GenerateSamplers(module);
David Neto22f144c2017-06-12 14:26:21 -0400679
680 // Generate SPIRV variables.
681 for (GlobalVariable &GV : module.globals()) {
682 GenerateGlobalVar(GV);
683 }
David Neto862b7d82018-06-14 18:48:37 -0400684 GenerateResourceVars(module);
David Netoc6f3ab22018-04-06 18:02:31 -0400685 GenerateWorkgroupVars();
David Neto22f144c2017-06-12 14:26:21 -0400686
687 // Generate SPIRV instructions for each function.
688 for (Function &F : module) {
689 if (F.isDeclaration()) {
690 continue;
691 }
692
David Neto862b7d82018-06-14 18:48:37 -0400693 GenerateDescriptorMapInfo(DL, F);
694
David Neto22f144c2017-06-12 14:26:21 -0400695 // Generate Function Prologue.
696 GenerateFuncPrologue(F);
697
698 // Generate SPIRV instructions for function body.
699 GenerateFuncBody(F);
700
701 // Generate Function Epilogue.
702 GenerateFuncEpilogue();
703 }
704
705 HandleDeferredInstruction();
David Neto1a1a0582017-07-07 12:01:44 -0400706 HandleDeferredDecorations(DL);
David Neto22f144c2017-06-12 14:26:21 -0400707
708 // Generate SPIRV module information.
David Neto5c22a252018-03-15 16:07:41 -0400709 GenerateModuleInfo(module);
David Neto22f144c2017-06-12 14:26:21 -0400710
alan-baker00e7a582019-06-07 12:54:21 -0400711 WriteSPIRVBinary();
David Neto22f144c2017-06-12 14:26:21 -0400712
713 // We need to patch the SPIR-V header to set bound correctly.
714 patchHeader();
David Neto0676e6f2017-07-11 18:47:44 -0400715
716 if (outputCInitList) {
717 bool first = true;
David Neto0676e6f2017-07-11 18:47:44 -0400718 std::ostringstream os;
719
David Neto57fb0b92017-08-04 15:35:09 -0400720 auto emit_word = [&os, &first](uint32_t word) {
David Neto0676e6f2017-07-11 18:47:44 -0400721 if (!first)
David Neto57fb0b92017-08-04 15:35:09 -0400722 os << ",\n";
723 os << word;
David Neto0676e6f2017-07-11 18:47:44 -0400724 first = false;
725 };
726
727 os << "{";
David Neto57fb0b92017-08-04 15:35:09 -0400728 const std::string str(binaryTempOut.str());
729 for (unsigned i = 0; i < str.size(); i += 4) {
730 const uint32_t a = static_cast<unsigned char>(str[i]);
731 const uint32_t b = static_cast<unsigned char>(str[i + 1]);
732 const uint32_t c = static_cast<unsigned char>(str[i + 2]);
733 const uint32_t d = static_cast<unsigned char>(str[i + 3]);
734 emit_word(a | (b << 8) | (c << 16) | (d << 24));
David Neto0676e6f2017-07-11 18:47:44 -0400735 }
736 os << "}\n";
737 out << os.str();
738 }
739
David Neto22f144c2017-06-12 14:26:21 -0400740 return false;
741}
742
743void SPIRVProducerPass::outputHeader() {
alan-baker00e7a582019-06-07 12:54:21 -0400744 binaryOut->write(reinterpret_cast<const char *>(&spv::MagicNumber),
745 sizeof(spv::MagicNumber));
746 binaryOut->write(reinterpret_cast<const char *>(&spv::Version),
747 sizeof(spv::Version));
David Neto22f144c2017-06-12 14:26:21 -0400748
alan-baker0c18ab02019-06-12 10:23:21 -0400749 // use Google's vendor ID
750 const uint32_t vendor = 21 << 16;
alan-baker00e7a582019-06-07 12:54:21 -0400751 binaryOut->write(reinterpret_cast<const char *>(&vendor), sizeof(vendor));
David Neto22f144c2017-06-12 14:26:21 -0400752
alan-baker00e7a582019-06-07 12:54:21 -0400753 // we record where we need to come back to and patch in the bound value
754 patchBoundOffset = binaryOut->tell();
David Neto22f144c2017-06-12 14:26:21 -0400755
alan-baker00e7a582019-06-07 12:54:21 -0400756 // output a bad bound for now
757 binaryOut->write(reinterpret_cast<const char *>(&nextID), sizeof(nextID));
David Neto22f144c2017-06-12 14:26:21 -0400758
alan-baker00e7a582019-06-07 12:54:21 -0400759 // output the schema (reserved for use and must be 0)
760 const uint32_t schema = 0;
761 binaryOut->write(reinterpret_cast<const char *>(&schema), sizeof(schema));
David Neto22f144c2017-06-12 14:26:21 -0400762}
763
764void SPIRVProducerPass::patchHeader() {
alan-baker00e7a582019-06-07 12:54:21 -0400765 // for a binary we just write the value of nextID over bound
766 binaryOut->pwrite(reinterpret_cast<char *>(&nextID), sizeof(nextID),
767 patchBoundOffset);
David Neto22f144c2017-06-12 14:26:21 -0400768}
769
David Netoc6f3ab22018-04-06 18:02:31 -0400770void SPIRVProducerPass::GenerateLLVMIRInfo(Module &M, const DataLayout &DL) {
David Neto22f144c2017-06-12 14:26:21 -0400771 // This function generates LLVM IR for function such as global variable for
772 // argument, constant and pointer type for argument access. These information
773 // is artificial one because we need Vulkan SPIR-V output. This function is
774 // executed ahead of FindType and FindConstant.
David Neto22f144c2017-06-12 14:26:21 -0400775 LLVMContext &Context = M.getContext();
776
David Neto862b7d82018-06-14 18:48:37 -0400777 FindGlobalConstVars(M, DL);
David Neto5c22a252018-03-15 16:07:41 -0400778
David Neto862b7d82018-06-14 18:48:37 -0400779 FindResourceVars(M, DL);
David Neto22f144c2017-06-12 14:26:21 -0400780
781 bool HasWorkGroupBuiltin = false;
782 for (GlobalVariable &GV : M.globals()) {
783 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
784 if (spv::BuiltInWorkgroupSize == BuiltinType) {
785 HasWorkGroupBuiltin = true;
786 }
787 }
788
David Neto862b7d82018-06-14 18:48:37 -0400789 FindTypesForSamplerMap(M);
790 FindTypesForResourceVars(M);
Alan Baker202c8c72018-08-13 13:47:44 -0400791 FindWorkgroupVars(M);
David Neto22f144c2017-06-12 14:26:21 -0400792
793 for (Function &F : M) {
Kévin Petitabef4522019-03-27 13:08:01 +0000794 if (F.isDeclaration()) {
David Neto22f144c2017-06-12 14:26:21 -0400795 continue;
796 }
797
798 for (BasicBlock &BB : F) {
799 for (Instruction &I : BB) {
800 if (I.getOpcode() == Instruction::ZExt ||
801 I.getOpcode() == Instruction::SExt ||
802 I.getOpcode() == Instruction::UIToFP) {
803 // If there is zext with i1 type, it will be changed to OpSelect. The
804 // OpSelect needs constant 0 and 1 so the constants are added here.
805
806 auto OpTy = I.getOperand(0)->getType();
807
Kévin Petit24272b62018-10-18 19:16:12 +0000808 if (OpTy->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -0400809 if (I.getOpcode() == Instruction::ZExt) {
David Neto22f144c2017-06-12 14:26:21 -0400810 FindConstant(Constant::getNullValue(I.getType()));
Kévin Petit7bfb8992019-02-26 13:45:08 +0000811 FindConstant(ConstantInt::get(I.getType(), 1));
David Neto22f144c2017-06-12 14:26:21 -0400812 } else if (I.getOpcode() == Instruction::SExt) {
David Neto22f144c2017-06-12 14:26:21 -0400813 FindConstant(Constant::getNullValue(I.getType()));
Kévin Petit7bfb8992019-02-26 13:45:08 +0000814 FindConstant(ConstantInt::getSigned(I.getType(), -1));
David Neto22f144c2017-06-12 14:26:21 -0400815 } else {
816 FindConstant(ConstantFP::get(Context, APFloat(0.0f)));
817 FindConstant(ConstantFP::get(Context, APFloat(1.0f)));
818 }
819 }
820 } else if (CallInst *Call = dyn_cast<CallInst>(&I)) {
David Neto862b7d82018-06-14 18:48:37 -0400821 StringRef callee_name = Call->getCalledFunction()->getName();
David Neto22f144c2017-06-12 14:26:21 -0400822
823 // Handle image type specially.
alan-bakerf67468c2019-11-25 15:51:49 -0500824 if (clspv::IsSampledImageRead(callee_name)) {
David Neto22f144c2017-06-12 14:26:21 -0400825 TypeMapType &OpImageTypeMap = getImageTypeMap();
826 Type *ImageTy =
827 Call->getArgOperand(0)->getType()->getPointerElementType();
828 OpImageTypeMap[ImageTy] = 0;
alan-bakerabd82722019-12-03 17:14:51 -0500829 getImageTypeList().insert(ImageTy);
David Neto22f144c2017-06-12 14:26:21 -0400830
alan-bakerf67468c2019-11-25 15:51:49 -0500831 // All sampled reads need a floating point 0 for the Lod operand.
David Neto22f144c2017-06-12 14:26:21 -0400832 FindConstant(ConstantFP::get(Context, APFloat(0.0f)));
833 }
David Neto5c22a252018-03-15 16:07:41 -0400834
alan-bakerce179f12019-12-06 19:02:22 -0500835 if (clspv::IsImageQuery(callee_name)) {
836 Type *ImageTy = Call->getOperand(0)->getType();
837 const uint32_t dim = ImageDimensionality(ImageTy);
838 uint32_t components = dim;
839 if (components > 1) {
840 // OpImageQuerySize* return |components| components.
841 FindType(VectorType::get(Type::getInt32Ty(Context), components));
842 if (dim == 3 && IsGetImageDim(callee_name)) {
843 // get_image_dim for 3D images returns an int4.
844 FindType(
845 VectorType::get(Type::getInt32Ty(Context), components + 1));
846 }
847 }
848
849 if (clspv::IsSampledImageType(ImageTy)) {
850 // All sampled image queries need a integer 0 for the Lod
851 // operand.
852 FindConstant(ConstantInt::get(Context, APInt(32, 0)));
853 }
David Neto5c22a252018-03-15 16:07:41 -0400854 }
David Neto22f144c2017-06-12 14:26:21 -0400855 }
856 }
857 }
858
Kévin Petitabef4522019-03-27 13:08:01 +0000859 // More things to do on kernel functions
860 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
861 if (const MDNode *MD =
862 dyn_cast<Function>(&F)->getMetadata("reqd_work_group_size")) {
863 // We generate constants if the WorkgroupSize builtin is being used.
864 if (HasWorkGroupBuiltin) {
865 // Collect constant information for work group size.
866 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(0)));
867 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(1)));
868 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(2)));
David Neto22f144c2017-06-12 14:26:21 -0400869 }
870 }
871 }
872
alan-bakerf67468c2019-11-25 15:51:49 -0500873 // TODO(alan-baker): make this better.
alan-bakerf906d2b2019-12-10 11:26:23 -0500874 if (M.getTypeByName("opencl.image1d_ro_t.float") ||
875 M.getTypeByName("opencl.image1d_ro_t.float.sampled") ||
876 M.getTypeByName("opencl.image1d_wo_t.float") ||
877 M.getTypeByName("opencl.image2d_ro_t.float") ||
alan-bakerf67468c2019-11-25 15:51:49 -0500878 M.getTypeByName("opencl.image2d_ro_t.float.sampled") ||
879 M.getTypeByName("opencl.image2d_wo_t.float") ||
880 M.getTypeByName("opencl.image3d_ro_t.float") ||
881 M.getTypeByName("opencl.image3d_ro_t.float.sampled") ||
882 M.getTypeByName("opencl.image3d_wo_t.float")) {
883 FindType(Type::getFloatTy(Context));
alan-bakerf906d2b2019-12-10 11:26:23 -0500884 } else if (M.getTypeByName("opencl.image1d_ro_t.uint") ||
885 M.getTypeByName("opencl.image1d_ro_t.uint.sampled") ||
886 M.getTypeByName("opencl.image1d_wo_t.uint") ||
887 M.getTypeByName("opencl.image2d_ro_t.uint") ||
alan-bakerf67468c2019-11-25 15:51:49 -0500888 M.getTypeByName("opencl.image2d_ro_t.uint.sampled") ||
889 M.getTypeByName("opencl.image2d_wo_t.uint") ||
890 M.getTypeByName("opencl.image3d_ro_t.uint") ||
891 M.getTypeByName("opencl.image3d_ro_t.uint.sampled") ||
892 M.getTypeByName("opencl.image3d_wo_t.uint")) {
893 FindType(Type::getInt32Ty(Context));
alan-bakerf906d2b2019-12-10 11:26:23 -0500894 } else if (M.getTypeByName("opencl.image1d_ro_t.int") ||
895 M.getTypeByName("opencl.image1d_ro_t.int.sampled") ||
896 M.getTypeByName("opencl.image1d_wo_t.int") ||
897 M.getTypeByName("opencl.image2d_ro_t.int") ||
alan-bakerf67468c2019-11-25 15:51:49 -0500898 M.getTypeByName("opencl.image2d_ro_t.int.sampled") ||
899 M.getTypeByName("opencl.image2d_wo_t.int") ||
900 M.getTypeByName("opencl.image3d_ro_t.int") ||
901 M.getTypeByName("opencl.image3d_ro_t.int.sampled") ||
902 M.getTypeByName("opencl.image3d_wo_t.int")) {
903 // Nothing for now...
904 } else {
905 // This was likely an UndefValue.
David Neto22f144c2017-06-12 14:26:21 -0400906 FindType(Type::getFloatTy(Context));
907 }
908
909 // Collect types' information from function.
910 FindTypePerFunc(F);
911
912 // Collect constant information from function.
913 FindConstantPerFunc(F);
914 }
915}
916
David Neto862b7d82018-06-14 18:48:37 -0400917void SPIRVProducerPass::FindGlobalConstVars(Module &M, const DataLayout &DL) {
alan-baker56f7aff2019-05-22 08:06:42 -0400918 clspv::NormalizeGlobalVariables(M);
919
David Neto862b7d82018-06-14 18:48:37 -0400920 SmallVector<GlobalVariable *, 8> GVList;
921 SmallVector<GlobalVariable *, 8> DeadGVList;
922 for (GlobalVariable &GV : M.globals()) {
923 if (GV.getType()->getAddressSpace() == AddressSpace::Constant) {
924 if (GV.use_empty()) {
925 DeadGVList.push_back(&GV);
926 } else {
927 GVList.push_back(&GV);
928 }
929 }
930 }
931
932 // Remove dead global __constant variables.
933 for (auto GV : DeadGVList) {
934 GV->eraseFromParent();
935 }
936 DeadGVList.clear();
937
938 if (clspv::Option::ModuleConstantsInStorageBuffer()) {
939 // For now, we only support a single storage buffer.
940 if (GVList.size() > 0) {
941 assert(GVList.size() == 1);
942 const auto *GV = GVList[0];
943 const auto constants_byte_size =
Alan Bakerfcda9482018-10-02 17:09:59 -0400944 (GetTypeSizeInBits(GV->getInitializer()->getType(), DL)) / 8;
David Neto862b7d82018-06-14 18:48:37 -0400945 const size_t kConstantMaxSize = 65536;
946 if (constants_byte_size > kConstantMaxSize) {
947 outs() << "Max __constant capacity of " << kConstantMaxSize
948 << " bytes exceeded: " << constants_byte_size << " bytes used\n";
949 llvm_unreachable("Max __constant capacity exceeded");
950 }
951 }
952 } else {
953 // Change global constant variable's address space to ModuleScopePrivate.
954 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
955 for (auto GV : GVList) {
956 // Create new gv with ModuleScopePrivate address space.
957 Type *NewGVTy = GV->getType()->getPointerElementType();
958 GlobalVariable *NewGV = new GlobalVariable(
959 M, NewGVTy, false, GV->getLinkage(), GV->getInitializer(), "",
960 nullptr, GV->getThreadLocalMode(), AddressSpace::ModuleScopePrivate);
961 NewGV->takeName(GV);
962
963 const SmallVector<User *, 8> GVUsers(GV->user_begin(), GV->user_end());
964 SmallVector<User *, 8> CandidateUsers;
965
966 auto record_called_function_type_as_user =
967 [&GlobalConstFuncTyMap](Value *gv, CallInst *call) {
968 // Find argument index.
969 unsigned index = 0;
970 for (unsigned i = 0; i < call->getNumArgOperands(); i++) {
971 if (gv == call->getOperand(i)) {
972 // TODO(dneto): Should we break here?
973 index = i;
974 }
975 }
976
977 // Record function type with global constant.
978 GlobalConstFuncTyMap[call->getFunctionType()] =
979 std::make_pair(call->getFunctionType(), index);
980 };
981
982 for (User *GVU : GVUsers) {
983 if (CallInst *Call = dyn_cast<CallInst>(GVU)) {
984 record_called_function_type_as_user(GV, Call);
985 } else if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(GVU)) {
986 // Check GEP users.
987 for (User *GEPU : GEP->users()) {
988 if (CallInst *GEPCall = dyn_cast<CallInst>(GEPU)) {
989 record_called_function_type_as_user(GEP, GEPCall);
990 }
991 }
992 }
993
994 CandidateUsers.push_back(GVU);
995 }
996
997 for (User *U : CandidateUsers) {
998 // Update users of gv with new gv.
alan-bakered80f572019-02-11 17:28:26 -0500999 if (!isa<Constant>(U)) {
1000 // #254: Can't change operands of a constant, but this shouldn't be
1001 // something that sticks around in the module.
1002 U->replaceUsesOfWith(GV, NewGV);
1003 }
David Neto862b7d82018-06-14 18:48:37 -04001004 }
1005
1006 // Delete original gv.
1007 GV->eraseFromParent();
1008 }
1009 }
1010}
1011
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001012void SPIRVProducerPass::FindResourceVars(Module &M, const DataLayout &) {
David Neto862b7d82018-06-14 18:48:37 -04001013 ResourceVarInfoList.clear();
1014 FunctionToResourceVarsMap.clear();
1015 ModuleOrderedResourceVars.reset();
1016 // Normally, there is one resource variable per clspv.resource.var.*
1017 // function, since that is unique'd by arg type and index. By design,
1018 // we can share these resource variables across kernels because all
1019 // kernels use the same descriptor set.
1020 //
1021 // But if the user requested distinct descriptor sets per kernel, then
1022 // the descriptor allocator has made different (set,binding) pairs for
1023 // the same (type,arg_index) pair. Since we can decorate a resource
1024 // variable with only exactly one DescriptorSet and Binding, we are
1025 // forced in this case to make distinct resource variables whenever
1026 // the same clspv.reource.var.X function is seen with disintct
1027 // (set,binding) values.
1028 const bool always_distinct_sets =
1029 clspv::Option::DistinctKernelDescriptorSets();
1030 for (Function &F : M) {
1031 // Rely on the fact the resource var functions have a stable ordering
1032 // in the module.
Alan Baker202c8c72018-08-13 13:47:44 -04001033 if (F.getName().startswith(clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04001034 // Find all calls to this function with distinct set and binding pairs.
1035 // Save them in ResourceVarInfoList.
1036
1037 // Determine uniqueness of the (set,binding) pairs only withing this
1038 // one resource-var builtin function.
1039 using SetAndBinding = std::pair<unsigned, unsigned>;
1040 // Maps set and binding to the resource var info.
1041 DenseMap<SetAndBinding, ResourceVarInfo *> set_and_binding_map;
1042 bool first_use = true;
1043 for (auto &U : F.uses()) {
1044 if (auto *call = dyn_cast<CallInst>(U.getUser())) {
1045 const auto set = unsigned(
1046 dyn_cast<ConstantInt>(call->getArgOperand(0))->getZExtValue());
1047 const auto binding = unsigned(
1048 dyn_cast<ConstantInt>(call->getArgOperand(1))->getZExtValue());
1049 const auto arg_kind = clspv::ArgKind(
1050 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
1051 const auto arg_index = unsigned(
1052 dyn_cast<ConstantInt>(call->getArgOperand(3))->getZExtValue());
alan-bakere9308012019-03-15 10:25:13 -04001053 const auto coherent = unsigned(
1054 dyn_cast<ConstantInt>(call->getArgOperand(5))->getZExtValue());
David Neto862b7d82018-06-14 18:48:37 -04001055
1056 // Find or make the resource var info for this combination.
1057 ResourceVarInfo *rv = nullptr;
1058 if (always_distinct_sets) {
1059 // Make a new resource var any time we see a different
1060 // (set,binding) pair.
1061 SetAndBinding key{set, binding};
1062 auto where = set_and_binding_map.find(key);
1063 if (where == set_and_binding_map.end()) {
1064 rv = new ResourceVarInfo(int(ResourceVarInfoList.size()), set,
alan-bakere9308012019-03-15 10:25:13 -04001065 binding, &F, arg_kind, coherent);
David Neto862b7d82018-06-14 18:48:37 -04001066 ResourceVarInfoList.emplace_back(rv);
1067 set_and_binding_map[key] = rv;
1068 } else {
1069 rv = where->second;
1070 }
1071 } else {
1072 // The default is to make exactly one resource for each
1073 // clspv.resource.var.* function.
1074 if (first_use) {
1075 first_use = false;
1076 rv = new ResourceVarInfo(int(ResourceVarInfoList.size()), set,
alan-bakere9308012019-03-15 10:25:13 -04001077 binding, &F, arg_kind, coherent);
David Neto862b7d82018-06-14 18:48:37 -04001078 ResourceVarInfoList.emplace_back(rv);
1079 } else {
1080 rv = ResourceVarInfoList.back().get();
1081 }
1082 }
1083
1084 // Now populate FunctionToResourceVarsMap.
1085 auto &mapping =
1086 FunctionToResourceVarsMap[call->getParent()->getParent()];
1087 while (mapping.size() <= arg_index) {
1088 mapping.push_back(nullptr);
1089 }
1090 mapping[arg_index] = rv;
1091 }
1092 }
1093 }
1094 }
1095
1096 // Populate ModuleOrderedResourceVars.
1097 for (Function &F : M) {
1098 auto where = FunctionToResourceVarsMap.find(&F);
1099 if (where != FunctionToResourceVarsMap.end()) {
1100 for (auto &rv : where->second) {
1101 if (rv != nullptr) {
1102 ModuleOrderedResourceVars.insert(rv);
1103 }
1104 }
1105 }
1106 }
1107 if (ShowResourceVars) {
1108 for (auto *info : ModuleOrderedResourceVars) {
1109 outs() << "MORV index " << info->index << " (" << info->descriptor_set
1110 << "," << info->binding << ") " << *(info->var_fn->getReturnType())
1111 << "\n";
1112 }
1113 }
1114}
1115
David Neto22f144c2017-06-12 14:26:21 -04001116bool SPIRVProducerPass::FindExtInst(Module &M) {
1117 LLVMContext &Context = M.getContext();
1118 bool HasExtInst = false;
1119
1120 for (Function &F : M) {
1121 for (BasicBlock &BB : F) {
1122 for (Instruction &I : BB) {
1123 if (CallInst *Call = dyn_cast<CallInst>(&I)) {
1124 Function *Callee = Call->getCalledFunction();
1125 // Check whether this call is for extend instructions.
David Neto3fbb4072017-10-16 11:28:14 -04001126 auto callee_name = Callee->getName();
1127 const glsl::ExtInst EInst = getExtInstEnum(callee_name);
1128 const glsl::ExtInst IndirectEInst =
1129 getIndirectExtInstEnum(callee_name);
David Neto22f144c2017-06-12 14:26:21 -04001130
David Neto3fbb4072017-10-16 11:28:14 -04001131 HasExtInst |=
1132 (EInst != kGlslExtInstBad) || (IndirectEInst != kGlslExtInstBad);
1133
1134 if (IndirectEInst) {
1135 // Register extra constants if needed.
1136
1137 // Registers a type and constant for computing the result of the
1138 // given instruction. If the result of the instruction is a vector,
1139 // then make a splat vector constant with the same number of
1140 // elements.
1141 auto register_constant = [this, &I](Constant *constant) {
1142 FindType(constant->getType());
1143 FindConstant(constant);
1144 if (auto *vectorTy = dyn_cast<VectorType>(I.getType())) {
1145 // Register the splat vector of the value with the same
1146 // width as the result of the instruction.
1147 auto *vec_constant = ConstantVector::getSplat(
1148 static_cast<unsigned>(vectorTy->getNumElements()),
1149 constant);
1150 FindConstant(vec_constant);
1151 FindType(vec_constant->getType());
1152 }
1153 };
1154 switch (IndirectEInst) {
1155 case glsl::ExtInstFindUMsb:
1156 // clz needs OpExtInst and OpISub with constant 31, or splat
1157 // vector of 31. Add it to the constant list here.
1158 register_constant(
1159 ConstantInt::get(Type::getInt32Ty(Context), 31));
1160 break;
1161 case glsl::ExtInstAcos:
1162 case glsl::ExtInstAsin:
Kévin Petiteb9f90a2018-09-29 12:29:34 +01001163 case glsl::ExtInstAtan:
David Neto3fbb4072017-10-16 11:28:14 -04001164 case glsl::ExtInstAtan2:
1165 // We need 1/pi for acospi, asinpi, atan2pi.
1166 register_constant(
1167 ConstantFP::get(Type::getFloatTy(Context), kOneOverPi));
1168 break;
1169 default:
1170 assert(false && "internally inconsistent");
1171 }
David Neto22f144c2017-06-12 14:26:21 -04001172 }
1173 }
1174 }
1175 }
1176 }
1177
1178 return HasExtInst;
1179}
1180
1181void SPIRVProducerPass::FindTypePerGlobalVar(GlobalVariable &GV) {
1182 // Investigate global variable's type.
1183 FindType(GV.getType());
1184}
1185
1186void SPIRVProducerPass::FindTypePerFunc(Function &F) {
1187 // Investigate function's type.
1188 FunctionType *FTy = F.getFunctionType();
1189
1190 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
1191 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
David Neto9ed8e2f2018-03-24 06:47:24 -07001192 // Handle a regular function with global constant parameters.
David Neto22f144c2017-06-12 14:26:21 -04001193 if (GlobalConstFuncTyMap.count(FTy)) {
1194 uint32_t GVCstArgIdx = GlobalConstFuncTypeMap[FTy].second;
1195 SmallVector<Type *, 4> NewFuncParamTys;
1196 for (unsigned i = 0; i < FTy->getNumParams(); i++) {
1197 Type *ParamTy = FTy->getParamType(i);
1198 if (i == GVCstArgIdx) {
1199 Type *EleTy = ParamTy->getPointerElementType();
1200 ParamTy = PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
1201 }
1202
1203 NewFuncParamTys.push_back(ParamTy);
1204 }
1205
1206 FunctionType *NewFTy =
1207 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
1208 GlobalConstFuncTyMap[FTy] = std::make_pair(NewFTy, GVCstArgIdx);
1209 FTy = NewFTy;
1210 }
1211
1212 FindType(FTy);
1213 } else {
1214 // As kernel functions do not have parameters, create new function type and
1215 // add it to type map.
1216 SmallVector<Type *, 4> NewFuncParamTys;
1217 FunctionType *NewFTy =
1218 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
1219 FindType(NewFTy);
1220 }
1221
1222 // Investigate instructions' type in function body.
1223 for (BasicBlock &BB : F) {
1224 for (Instruction &I : BB) {
1225 if (isa<ShuffleVectorInst>(I)) {
1226 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1227 // Ignore type for mask of shuffle vector instruction.
1228 if (i == 2) {
1229 continue;
1230 }
1231
1232 Value *Op = I.getOperand(i);
1233 if (!isa<MetadataAsValue>(Op)) {
1234 FindType(Op->getType());
1235 }
1236 }
1237
1238 FindType(I.getType());
1239 continue;
1240 }
1241
David Neto862b7d82018-06-14 18:48:37 -04001242 CallInst *Call = dyn_cast<CallInst>(&I);
1243
1244 if (Call && Call->getCalledFunction()->getName().startswith(
Alan Baker202c8c72018-08-13 13:47:44 -04001245 clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04001246 // This is a fake call representing access to a resource variable.
1247 // We handle that elsewhere.
1248 continue;
1249 }
1250
Alan Baker202c8c72018-08-13 13:47:44 -04001251 if (Call && Call->getCalledFunction()->getName().startswith(
1252 clspv::WorkgroupAccessorFunction())) {
1253 // This is a fake call representing access to a workgroup variable.
1254 // We handle that elsewhere.
1255 continue;
1256 }
1257
David Neto22f144c2017-06-12 14:26:21 -04001258 // Work through the operands of the instruction.
1259 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1260 Value *const Op = I.getOperand(i);
1261 // If any of the operands is a constant, find the type!
1262 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1263 FindType(Op->getType());
1264 }
1265 }
1266
1267 for (Use &Op : I.operands()) {
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001268 if (isa<CallInst>(&I)) {
David Neto22f144c2017-06-12 14:26:21 -04001269 // Avoid to check call instruction's type.
1270 break;
1271 }
Alan Baker202c8c72018-08-13 13:47:44 -04001272 if (CallInst *OpCall = dyn_cast<CallInst>(Op)) {
1273 if (OpCall && OpCall->getCalledFunction()->getName().startswith(
1274 clspv::WorkgroupAccessorFunction())) {
1275 // This is a fake call representing access to a workgroup variable.
1276 // We handle that elsewhere.
1277 continue;
1278 }
1279 }
David Neto22f144c2017-06-12 14:26:21 -04001280 if (!isa<MetadataAsValue>(&Op)) {
1281 FindType(Op->getType());
1282 continue;
1283 }
1284 }
1285
David Neto22f144c2017-06-12 14:26:21 -04001286 // We don't want to track the type of this call as we are going to replace
1287 // it.
Kévin Petitdf71de32019-04-09 14:09:50 +01001288 if (Call && (clspv::LiteralSamplerFunction() ==
David Neto22f144c2017-06-12 14:26:21 -04001289 Call->getCalledFunction()->getName())) {
1290 continue;
1291 }
1292
1293 if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(&I)) {
1294 // If gep's base operand has ModuleScopePrivate address space, make gep
1295 // return ModuleScopePrivate address space.
1296 if (GEP->getPointerAddressSpace() == AddressSpace::ModuleScopePrivate) {
1297 // Add pointer type with private address space for global constant to
1298 // type list.
1299 Type *EleTy = I.getType()->getPointerElementType();
1300 Type *NewPTy =
1301 PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
1302
1303 FindType(NewPTy);
1304 continue;
1305 }
1306 }
1307
1308 FindType(I.getType());
1309 }
1310 }
1311}
1312
David Neto862b7d82018-06-14 18:48:37 -04001313void SPIRVProducerPass::FindTypesForSamplerMap(Module &M) {
1314 // If we are using a sampler map, find the type of the sampler.
Kévin Petitdf71de32019-04-09 14:09:50 +01001315 if (M.getFunction(clspv::LiteralSamplerFunction()) ||
David Neto862b7d82018-06-14 18:48:37 -04001316 0 < getSamplerMap().size()) {
1317 auto SamplerStructTy = M.getTypeByName("opencl.sampler_t");
1318 if (!SamplerStructTy) {
1319 SamplerStructTy = StructType::create(M.getContext(), "opencl.sampler_t");
1320 }
1321
1322 SamplerTy = SamplerStructTy->getPointerTo(AddressSpace::UniformConstant);
1323
1324 FindType(SamplerTy);
1325 }
1326}
1327
1328void SPIRVProducerPass::FindTypesForResourceVars(Module &M) {
1329 // Record types so they are generated.
1330 TypesNeedingLayout.reset();
1331 StructTypesNeedingBlock.reset();
1332
1333 // To match older clspv codegen, generate the float type first if required
1334 // for images.
1335 for (const auto *info : ModuleOrderedResourceVars) {
1336 if (info->arg_kind == clspv::ArgKind::ReadOnlyImage ||
1337 info->arg_kind == clspv::ArgKind::WriteOnlyImage) {
alan-bakerf67468c2019-11-25 15:51:49 -05001338 if (IsIntImageType(info->var_fn->getReturnType())) {
1339 // Nothing for now...
1340 } else if (IsUintImageType(info->var_fn->getReturnType())) {
1341 FindType(Type::getInt32Ty(M.getContext()));
1342 }
1343
1344 // We need "float" either for the sampled type or for the Lod operand.
David Neto862b7d82018-06-14 18:48:37 -04001345 FindType(Type::getFloatTy(M.getContext()));
David Neto862b7d82018-06-14 18:48:37 -04001346 }
1347 }
1348
1349 for (const auto *info : ModuleOrderedResourceVars) {
1350 Type *type = info->var_fn->getReturnType();
1351
1352 switch (info->arg_kind) {
1353 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04001354 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04001355 if (auto *sty = dyn_cast<StructType>(type->getPointerElementType())) {
1356 StructTypesNeedingBlock.insert(sty);
1357 } else {
1358 errs() << *type << "\n";
1359 llvm_unreachable("Buffer arguments must map to structures!");
1360 }
1361 break;
1362 case clspv::ArgKind::Pod:
1363 if (auto *sty = dyn_cast<StructType>(type->getPointerElementType())) {
1364 StructTypesNeedingBlock.insert(sty);
1365 } else {
1366 errs() << *type << "\n";
1367 llvm_unreachable("POD arguments must map to structures!");
1368 }
1369 break;
1370 case clspv::ArgKind::ReadOnlyImage:
1371 case clspv::ArgKind::WriteOnlyImage:
1372 case clspv::ArgKind::Sampler:
1373 // Sampler and image types map to the pointee type but
1374 // in the uniform constant address space.
1375 type = PointerType::get(type->getPointerElementType(),
1376 clspv::AddressSpace::UniformConstant);
1377 break;
1378 default:
1379 break;
1380 }
1381
1382 // The converted type is the type of the OpVariable we will generate.
1383 // If the pointee type is an array of size zero, FindType will convert it
1384 // to a runtime array.
1385 FindType(type);
1386 }
1387
alan-bakerdcd97412019-09-16 15:32:30 -04001388 // If module constants are clustered in a storage buffer then that struct
1389 // needs layout decorations.
1390 if (clspv::Option::ModuleConstantsInStorageBuffer()) {
1391 for (GlobalVariable &GV : M.globals()) {
1392 PointerType *PTy = cast<PointerType>(GV.getType());
1393 const auto AS = PTy->getAddressSpace();
1394 const bool module_scope_constant_external_init =
1395 (AS == AddressSpace::Constant) && GV.hasInitializer();
1396 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
1397 if (module_scope_constant_external_init &&
1398 spv::BuiltInMax == BuiltinType) {
1399 StructTypesNeedingBlock.insert(
1400 cast<StructType>(PTy->getPointerElementType()));
1401 }
1402 }
1403 }
1404
David Neto862b7d82018-06-14 18:48:37 -04001405 // Traverse the arrays and structures underneath each Block, and
1406 // mark them as needing layout.
1407 std::vector<Type *> work_list(StructTypesNeedingBlock.begin(),
1408 StructTypesNeedingBlock.end());
1409 while (!work_list.empty()) {
1410 Type *type = work_list.back();
1411 work_list.pop_back();
1412 TypesNeedingLayout.insert(type);
1413 switch (type->getTypeID()) {
1414 case Type::ArrayTyID:
1415 work_list.push_back(type->getArrayElementType());
1416 if (!Hack_generate_runtime_array_stride_early) {
1417 // Remember this array type for deferred decoration.
1418 TypesNeedingArrayStride.insert(type);
1419 }
1420 break;
1421 case Type::StructTyID:
1422 for (auto *elem_ty : cast<StructType>(type)->elements()) {
1423 work_list.push_back(elem_ty);
1424 }
1425 default:
1426 // This type and its contained types don't get layout.
1427 break;
1428 }
1429 }
1430}
1431
Alan Baker202c8c72018-08-13 13:47:44 -04001432void SPIRVProducerPass::FindWorkgroupVars(Module &M) {
1433 // The SpecId assignment for pointer-to-local arguments is recorded in
1434 // module-level metadata. Translate that information into local argument
1435 // information.
1436 NamedMDNode *nmd = M.getNamedMetadata(clspv::LocalSpecIdMetadataName());
alan-bakerb6b09dc2018-11-08 16:59:28 -05001437 if (!nmd)
1438 return;
Alan Baker202c8c72018-08-13 13:47:44 -04001439 for (auto operand : nmd->operands()) {
1440 MDTuple *tuple = cast<MDTuple>(operand);
1441 ValueAsMetadata *fn_md = cast<ValueAsMetadata>(tuple->getOperand(0));
1442 Function *func = cast<Function>(fn_md->getValue());
alan-bakerb6b09dc2018-11-08 16:59:28 -05001443 ConstantAsMetadata *arg_index_md =
1444 cast<ConstantAsMetadata>(tuple->getOperand(1));
1445 int arg_index = static_cast<int>(
1446 cast<ConstantInt>(arg_index_md->getValue())->getSExtValue());
1447 Argument *arg = &*(func->arg_begin() + arg_index);
Alan Baker202c8c72018-08-13 13:47:44 -04001448
1449 ConstantAsMetadata *spec_id_md =
1450 cast<ConstantAsMetadata>(tuple->getOperand(2));
alan-bakerb6b09dc2018-11-08 16:59:28 -05001451 int spec_id = static_cast<int>(
1452 cast<ConstantInt>(spec_id_md->getValue())->getSExtValue());
Alan Baker202c8c72018-08-13 13:47:44 -04001453
1454 max_local_spec_id_ = std::max(max_local_spec_id_, spec_id + 1);
1455 LocalArgSpecIds[arg] = spec_id;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001456 if (LocalSpecIdInfoMap.count(spec_id))
1457 continue;
Alan Baker202c8c72018-08-13 13:47:44 -04001458
1459 // We haven't seen this SpecId yet, so generate the LocalArgInfo for it.
1460 LocalArgInfo info{nextID, arg->getType()->getPointerElementType(),
1461 nextID + 1, nextID + 2,
1462 nextID + 3, spec_id};
1463 LocalSpecIdInfoMap[spec_id] = info;
1464 nextID += 4;
1465
1466 // Ensure the types necessary for this argument get generated.
1467 Type *IdxTy = Type::getInt32Ty(M.getContext());
1468 FindConstant(ConstantInt::get(IdxTy, 0));
1469 FindType(IdxTy);
1470 FindType(arg->getType());
1471 }
1472}
1473
David Neto22f144c2017-06-12 14:26:21 -04001474void SPIRVProducerPass::FindType(Type *Ty) {
1475 TypeList &TyList = getTypeList();
1476
1477 if (0 != TyList.idFor(Ty)) {
1478 return;
1479 }
1480
1481 if (Ty->isPointerTy()) {
1482 auto AddrSpace = Ty->getPointerAddressSpace();
1483 if ((AddressSpace::Constant == AddrSpace) ||
1484 (AddressSpace::Global == AddrSpace)) {
1485 auto PointeeTy = Ty->getPointerElementType();
1486
1487 if (PointeeTy->isStructTy() &&
1488 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
1489 FindType(PointeeTy);
1490 auto ActualPointerTy =
1491 PointeeTy->getPointerTo(AddressSpace::UniformConstant);
1492 FindType(ActualPointerTy);
1493 return;
1494 }
1495 }
1496 }
1497
David Neto862b7d82018-06-14 18:48:37 -04001498 // By convention, LLVM array type with 0 elements will map to
1499 // OpTypeRuntimeArray. Otherwise, it will map to OpTypeArray, which
1500 // has a constant number of elements. We need to support type of the
1501 // constant.
1502 if (auto *arrayTy = dyn_cast<ArrayType>(Ty)) {
1503 if (arrayTy->getNumElements() > 0) {
1504 LLVMContext &Context = Ty->getContext();
1505 FindType(Type::getInt32Ty(Context));
1506 }
David Neto22f144c2017-06-12 14:26:21 -04001507 }
1508
1509 for (Type *SubTy : Ty->subtypes()) {
1510 FindType(SubTy);
1511 }
1512
1513 TyList.insert(Ty);
1514}
1515
1516void SPIRVProducerPass::FindConstantPerGlobalVar(GlobalVariable &GV) {
1517 // If the global variable has a (non undef) initializer.
1518 if (GV.hasInitializer() && !isa<UndefValue>(GV.getInitializer())) {
David Neto862b7d82018-06-14 18:48:37 -04001519 // Generate the constant if it's not the initializer to a module scope
1520 // constant that we will expect in a storage buffer.
1521 const bool module_scope_constant_external_init =
1522 (GV.getType()->getPointerAddressSpace() == AddressSpace::Constant) &&
1523 clspv::Option::ModuleConstantsInStorageBuffer();
1524 if (!module_scope_constant_external_init) {
1525 FindConstant(GV.getInitializer());
1526 }
David Neto22f144c2017-06-12 14:26:21 -04001527 }
1528}
1529
1530void SPIRVProducerPass::FindConstantPerFunc(Function &F) {
1531 // Investigate constants in function body.
1532 for (BasicBlock &BB : F) {
1533 for (Instruction &I : BB) {
David Neto862b7d82018-06-14 18:48:37 -04001534 if (auto *call = dyn_cast<CallInst>(&I)) {
1535 auto name = call->getCalledFunction()->getName();
Kévin Petitdf71de32019-04-09 14:09:50 +01001536 if (name == clspv::LiteralSamplerFunction()) {
David Neto862b7d82018-06-14 18:48:37 -04001537 // We've handled these constants elsewhere, so skip it.
1538 continue;
1539 }
Alan Baker202c8c72018-08-13 13:47:44 -04001540 if (name.startswith(clspv::ResourceAccessorFunction())) {
1541 continue;
1542 }
1543 if (name.startswith(clspv::WorkgroupAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04001544 continue;
1545 }
Kévin Petit617a76d2019-04-04 13:54:16 +01001546 if (name.startswith(clspv::SPIRVOpIntrinsicFunction())) {
1547 // Skip the first operand that has the SPIR-V Opcode
1548 for (unsigned i = 1; i < I.getNumOperands(); i++) {
1549 if (isa<Constant>(I.getOperand(i)) &&
1550 !isa<GlobalValue>(I.getOperand(i))) {
1551 FindConstant(I.getOperand(i));
1552 }
1553 }
1554 continue;
1555 }
David Neto22f144c2017-06-12 14:26:21 -04001556 }
1557
1558 if (isa<AllocaInst>(I)) {
1559 // Alloca instruction has constant for the number of element. Ignore it.
1560 continue;
1561 } else if (isa<ShuffleVectorInst>(I)) {
1562 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1563 // Ignore constant for mask of shuffle vector instruction.
1564 if (i == 2) {
1565 continue;
1566 }
1567
1568 if (isa<Constant>(I.getOperand(i)) &&
1569 !isa<GlobalValue>(I.getOperand(i))) {
1570 FindConstant(I.getOperand(i));
1571 }
1572 }
1573
1574 continue;
1575 } else if (isa<InsertElementInst>(I)) {
1576 // Handle InsertElement with <4 x i8> specially.
1577 Type *CompositeTy = I.getOperand(0)->getType();
1578 if (is4xi8vec(CompositeTy)) {
1579 LLVMContext &Context = CompositeTy->getContext();
1580 if (isa<Constant>(I.getOperand(0))) {
1581 FindConstant(I.getOperand(0));
1582 }
1583
1584 if (isa<Constant>(I.getOperand(1))) {
1585 FindConstant(I.getOperand(1));
1586 }
1587
1588 // Add mask constant 0xFF.
1589 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
1590 FindConstant(CstFF);
1591
1592 // Add shift amount constant.
1593 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
1594 uint64_t Idx = CI->getZExtValue();
1595 Constant *CstShiftAmount =
1596 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
1597 FindConstant(CstShiftAmount);
1598 }
1599
1600 continue;
1601 }
1602
1603 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1604 // Ignore constant for index of InsertElement instruction.
1605 if (i == 2) {
1606 continue;
1607 }
1608
1609 if (isa<Constant>(I.getOperand(i)) &&
1610 !isa<GlobalValue>(I.getOperand(i))) {
1611 FindConstant(I.getOperand(i));
1612 }
1613 }
1614
1615 continue;
1616 } else if (isa<ExtractElementInst>(I)) {
1617 // Handle ExtractElement with <4 x i8> specially.
1618 Type *CompositeTy = I.getOperand(0)->getType();
1619 if (is4xi8vec(CompositeTy)) {
1620 LLVMContext &Context = CompositeTy->getContext();
1621 if (isa<Constant>(I.getOperand(0))) {
1622 FindConstant(I.getOperand(0));
1623 }
1624
1625 // Add mask constant 0xFF.
1626 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
1627 FindConstant(CstFF);
1628
1629 // Add shift amount constant.
1630 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
1631 uint64_t Idx = CI->getZExtValue();
1632 Constant *CstShiftAmount =
1633 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
1634 FindConstant(CstShiftAmount);
1635 } else {
1636 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
1637 FindConstant(Cst8);
1638 }
1639
1640 continue;
1641 }
1642
1643 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1644 // Ignore constant for index of ExtractElement instruction.
1645 if (i == 1) {
1646 continue;
1647 }
1648
1649 if (isa<Constant>(I.getOperand(i)) &&
1650 !isa<GlobalValue>(I.getOperand(i))) {
1651 FindConstant(I.getOperand(i));
1652 }
1653 }
1654
1655 continue;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001656 } else if ((Instruction::Xor == I.getOpcode()) &&
1657 I.getType()->isIntegerTy(1)) {
1658 // We special case for Xor where the type is i1 and one of the arguments
1659 // is a constant 1 (true), this is an OpLogicalNot in SPIR-V, and we
1660 // don't need the constant
David Neto22f144c2017-06-12 14:26:21 -04001661 bool foundConstantTrue = false;
1662 for (Use &Op : I.operands()) {
1663 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1664 auto CI = cast<ConstantInt>(Op);
1665
1666 if (CI->isZero() || foundConstantTrue) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05001667 // If we already found the true constant, we might (probably only
1668 // on -O0) have an OpLogicalNot which is taking a constant
1669 // argument, so discover it anyway.
David Neto22f144c2017-06-12 14:26:21 -04001670 FindConstant(Op);
1671 } else {
1672 foundConstantTrue = true;
1673 }
1674 }
1675 }
1676
1677 continue;
David Netod2de94a2017-08-28 17:27:47 -04001678 } else if (isa<TruncInst>(I)) {
alan-bakerb39c8262019-03-08 14:03:37 -05001679 // Special case if i8 is not generally handled.
1680 if (!clspv::Option::Int8Support()) {
1681 // For truncation to i8 we mask against 255.
1682 Type *ToTy = I.getType();
1683 if (8u == ToTy->getPrimitiveSizeInBits()) {
1684 LLVMContext &Context = ToTy->getContext();
1685 Constant *Cst255 =
1686 ConstantInt::get(Type::getInt32Ty(Context), 0xff);
1687 FindConstant(Cst255);
1688 }
David Netod2de94a2017-08-28 17:27:47 -04001689 }
Neil Henning39672102017-09-29 14:33:13 +01001690 } else if (isa<AtomicRMWInst>(I)) {
1691 LLVMContext &Context = I.getContext();
1692
1693 FindConstant(
1694 ConstantInt::get(Type::getInt32Ty(Context), spv::ScopeDevice));
1695 FindConstant(ConstantInt::get(
1696 Type::getInt32Ty(Context),
1697 spv::MemorySemanticsUniformMemoryMask |
1698 spv::MemorySemanticsSequentiallyConsistentMask));
David Neto22f144c2017-06-12 14:26:21 -04001699 }
1700
1701 for (Use &Op : I.operands()) {
1702 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1703 FindConstant(Op);
1704 }
1705 }
1706 }
1707 }
1708}
1709
1710void SPIRVProducerPass::FindConstant(Value *V) {
David Neto22f144c2017-06-12 14:26:21 -04001711 ValueList &CstList = getConstantList();
1712
David Netofb9a7972017-08-25 17:08:24 -04001713 // If V is already tracked, ignore it.
1714 if (0 != CstList.idFor(V)) {
David Neto22f144c2017-06-12 14:26:21 -04001715 return;
1716 }
1717
David Neto862b7d82018-06-14 18:48:37 -04001718 if (isa<GlobalValue>(V) && clspv::Option::ModuleConstantsInStorageBuffer()) {
1719 return;
1720 }
1721
David Neto22f144c2017-06-12 14:26:21 -04001722 Constant *Cst = cast<Constant>(V);
David Neto862b7d82018-06-14 18:48:37 -04001723 Type *CstTy = Cst->getType();
David Neto22f144c2017-06-12 14:26:21 -04001724
1725 // Handle constant with <4 x i8> type specially.
David Neto22f144c2017-06-12 14:26:21 -04001726 if (is4xi8vec(CstTy)) {
1727 if (!isa<GlobalValue>(V)) {
David Netofb9a7972017-08-25 17:08:24 -04001728 CstList.insert(V);
David Neto22f144c2017-06-12 14:26:21 -04001729 }
1730 }
1731
1732 if (Cst->getNumOperands()) {
1733 for (User::const_op_iterator I = Cst->op_begin(), E = Cst->op_end(); I != E;
1734 ++I) {
1735 FindConstant(*I);
1736 }
1737
David Netofb9a7972017-08-25 17:08:24 -04001738 CstList.insert(Cst);
David Neto22f144c2017-06-12 14:26:21 -04001739 return;
1740 } else if (const ConstantDataSequential *CDS =
1741 dyn_cast<ConstantDataSequential>(Cst)) {
1742 // Add constants for each element to constant list.
1743 for (unsigned i = 0; i < CDS->getNumElements(); i++) {
1744 Constant *EleCst = CDS->getElementAsConstant(i);
1745 FindConstant(EleCst);
1746 }
1747 }
1748
1749 if (!isa<GlobalValue>(V)) {
David Netofb9a7972017-08-25 17:08:24 -04001750 CstList.insert(V);
David Neto22f144c2017-06-12 14:26:21 -04001751 }
1752}
1753
1754spv::StorageClass SPIRVProducerPass::GetStorageClass(unsigned AddrSpace) const {
1755 switch (AddrSpace) {
1756 default:
1757 llvm_unreachable("Unsupported OpenCL address space");
1758 case AddressSpace::Private:
1759 return spv::StorageClassFunction;
1760 case AddressSpace::Global:
David Neto22f144c2017-06-12 14:26:21 -04001761 return spv::StorageClassStorageBuffer;
Alan Bakerfcda9482018-10-02 17:09:59 -04001762 case AddressSpace::Constant:
1763 return clspv::Option::ConstantArgsInUniformBuffer()
1764 ? spv::StorageClassUniform
1765 : spv::StorageClassStorageBuffer;
David Neto22f144c2017-06-12 14:26:21 -04001766 case AddressSpace::Input:
1767 return spv::StorageClassInput;
1768 case AddressSpace::Local:
1769 return spv::StorageClassWorkgroup;
1770 case AddressSpace::UniformConstant:
1771 return spv::StorageClassUniformConstant;
David Neto9ed8e2f2018-03-24 06:47:24 -07001772 case AddressSpace::Uniform:
David Netoe439d702018-03-23 13:14:08 -07001773 return spv::StorageClassUniform;
David Neto22f144c2017-06-12 14:26:21 -04001774 case AddressSpace::ModuleScopePrivate:
1775 return spv::StorageClassPrivate;
1776 }
1777}
1778
David Neto862b7d82018-06-14 18:48:37 -04001779spv::StorageClass
1780SPIRVProducerPass::GetStorageClassForArgKind(clspv::ArgKind arg_kind) const {
1781 switch (arg_kind) {
1782 case clspv::ArgKind::Buffer:
1783 return spv::StorageClassStorageBuffer;
Alan Bakerfcda9482018-10-02 17:09:59 -04001784 case clspv::ArgKind::BufferUBO:
1785 return spv::StorageClassUniform;
David Neto862b7d82018-06-14 18:48:37 -04001786 case clspv::ArgKind::Pod:
1787 return clspv::Option::PodArgsInUniformBuffer()
1788 ? spv::StorageClassUniform
1789 : spv::StorageClassStorageBuffer;
1790 case clspv::ArgKind::Local:
1791 return spv::StorageClassWorkgroup;
1792 case clspv::ArgKind::ReadOnlyImage:
1793 case clspv::ArgKind::WriteOnlyImage:
1794 case clspv::ArgKind::Sampler:
1795 return spv::StorageClassUniformConstant;
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001796 default:
1797 llvm_unreachable("Unsupported storage class for argument kind");
David Neto862b7d82018-06-14 18:48:37 -04001798 }
1799}
1800
David Neto22f144c2017-06-12 14:26:21 -04001801spv::BuiltIn SPIRVProducerPass::GetBuiltin(StringRef Name) const {
1802 return StringSwitch<spv::BuiltIn>(Name)
1803 .Case("__spirv_GlobalInvocationId", spv::BuiltInGlobalInvocationId)
1804 .Case("__spirv_LocalInvocationId", spv::BuiltInLocalInvocationId)
1805 .Case("__spirv_WorkgroupSize", spv::BuiltInWorkgroupSize)
1806 .Case("__spirv_NumWorkgroups", spv::BuiltInNumWorkgroups)
1807 .Case("__spirv_WorkgroupId", spv::BuiltInWorkgroupId)
1808 .Default(spv::BuiltInMax);
1809}
1810
1811void SPIRVProducerPass::GenerateExtInstImport() {
1812 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
1813 uint32_t &ExtInstImportID = getOpExtInstImportID();
1814
1815 //
1816 // Generate OpExtInstImport.
1817 //
1818 // Ops[0] ... Ops[n] = Name (Literal String)
David Neto22f144c2017-06-12 14:26:21 -04001819 ExtInstImportID = nextID;
David Neto87846742018-04-11 17:36:22 -04001820 SPIRVInstList.push_back(new SPIRVInstruction(spv::OpExtInstImport, nextID++,
1821 MkString("GLSL.std.450")));
David Neto22f144c2017-06-12 14:26:21 -04001822}
1823
alan-bakerb6b09dc2018-11-08 16:59:28 -05001824void SPIRVProducerPass::GenerateSPIRVTypes(LLVMContext &Context,
1825 Module &module) {
David Neto22f144c2017-06-12 14:26:21 -04001826 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
1827 ValueMapType &VMap = getValueMap();
1828 ValueMapType &AllocatedVMap = getAllocatedValueMap();
Alan Bakerfcda9482018-10-02 17:09:59 -04001829 const auto &DL = module.getDataLayout();
David Neto22f144c2017-06-12 14:26:21 -04001830
1831 // Map for OpTypeRuntimeArray. If argument has pointer type, 2 spirv type
1832 // instructions are generated. They are OpTypePointer and OpTypeRuntimeArray.
1833 DenseMap<Type *, uint32_t> OpRuntimeTyMap;
1834
1835 for (Type *Ty : getTypeList()) {
1836 // Update TypeMap with nextID for reference later.
1837 TypeMap[Ty] = nextID;
1838
1839 switch (Ty->getTypeID()) {
1840 default: {
1841 Ty->print(errs());
1842 llvm_unreachable("Unsupported type???");
1843 break;
1844 }
1845 case Type::MetadataTyID:
1846 case Type::LabelTyID: {
1847 // Ignore these types.
1848 break;
1849 }
1850 case Type::PointerTyID: {
1851 PointerType *PTy = cast<PointerType>(Ty);
1852 unsigned AddrSpace = PTy->getAddressSpace();
1853
1854 // For the purposes of our Vulkan SPIR-V type system, constant and global
1855 // are conflated.
1856 bool UseExistingOpTypePointer = false;
1857 if (AddressSpace::Constant == AddrSpace) {
Alan Bakerfcda9482018-10-02 17:09:59 -04001858 if (!clspv::Option::ConstantArgsInUniformBuffer()) {
1859 AddrSpace = AddressSpace::Global;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001860 // Check to see if we already created this type (for instance, if we
1861 // had a constant <type>* and a global <type>*, the type would be
1862 // created by one of these types, and shared by both).
Alan Bakerfcda9482018-10-02 17:09:59 -04001863 auto GlobalTy = PTy->getPointerElementType()->getPointerTo(AddrSpace);
1864 if (0 < TypeMap.count(GlobalTy)) {
1865 TypeMap[PTy] = TypeMap[GlobalTy];
1866 UseExistingOpTypePointer = true;
1867 break;
1868 }
David Neto22f144c2017-06-12 14:26:21 -04001869 }
1870 } else if (AddressSpace::Global == AddrSpace) {
Alan Bakerfcda9482018-10-02 17:09:59 -04001871 if (!clspv::Option::ConstantArgsInUniformBuffer()) {
1872 AddrSpace = AddressSpace::Constant;
David Neto22f144c2017-06-12 14:26:21 -04001873
alan-bakerb6b09dc2018-11-08 16:59:28 -05001874 // Check to see if we already created this type (for instance, if we
1875 // had a constant <type>* and a global <type>*, the type would be
1876 // created by one of these types, and shared by both).
1877 auto ConstantTy =
1878 PTy->getPointerElementType()->getPointerTo(AddrSpace);
Alan Bakerfcda9482018-10-02 17:09:59 -04001879 if (0 < TypeMap.count(ConstantTy)) {
1880 TypeMap[PTy] = TypeMap[ConstantTy];
1881 UseExistingOpTypePointer = true;
1882 }
David Neto22f144c2017-06-12 14:26:21 -04001883 }
1884 }
1885
David Neto862b7d82018-06-14 18:48:37 -04001886 const bool HasArgUser = true;
David Neto22f144c2017-06-12 14:26:21 -04001887
David Neto862b7d82018-06-14 18:48:37 -04001888 if (HasArgUser && !UseExistingOpTypePointer) {
David Neto22f144c2017-06-12 14:26:21 -04001889 //
1890 // Generate OpTypePointer.
1891 //
1892
1893 // OpTypePointer
1894 // Ops[0] = Storage Class
1895 // Ops[1] = Element Type ID
1896 SPIRVOperandList Ops;
1897
David Neto257c3892018-04-11 13:19:45 -04001898 Ops << MkNum(GetStorageClass(AddrSpace))
1899 << MkId(lookupType(PTy->getElementType()));
David Neto22f144c2017-06-12 14:26:21 -04001900
David Neto87846742018-04-11 17:36:22 -04001901 auto *Inst = new SPIRVInstruction(spv::OpTypePointer, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04001902 SPIRVInstList.push_back(Inst);
1903 }
David Neto22f144c2017-06-12 14:26:21 -04001904 break;
1905 }
1906 case Type::StructTyID: {
David Neto22f144c2017-06-12 14:26:21 -04001907 StructType *STy = cast<StructType>(Ty);
1908
1909 // Handle sampler type.
1910 if (STy->isOpaque()) {
1911 if (STy->getName().equals("opencl.sampler_t")) {
1912 //
1913 // Generate OpTypeSampler
1914 //
1915 // Empty Ops.
1916 SPIRVOperandList Ops;
1917
David Neto87846742018-04-11 17:36:22 -04001918 auto *Inst = new SPIRVInstruction(spv::OpTypeSampler, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04001919 SPIRVInstList.push_back(Inst);
1920 break;
alan-bakerf906d2b2019-12-10 11:26:23 -05001921 } else if (STy->getName().startswith("opencl.image1d_ro_t") ||
1922 STy->getName().startswith("opencl.image1d_wo_t") ||
1923 STy->getName().startswith("opencl.image2d_ro_t") ||
alan-bakerf67468c2019-11-25 15:51:49 -05001924 STy->getName().startswith("opencl.image2d_wo_t") ||
1925 STy->getName().startswith("opencl.image3d_ro_t") ||
1926 STy->getName().startswith("opencl.image3d_wo_t")) {
David Neto22f144c2017-06-12 14:26:21 -04001927 //
1928 // Generate OpTypeImage
1929 //
1930 // Ops[0] = Sampled Type ID
1931 // Ops[1] = Dim ID
1932 // Ops[2] = Depth (Literal Number)
1933 // Ops[3] = Arrayed (Literal Number)
1934 // Ops[4] = MS (Literal Number)
1935 // Ops[5] = Sampled (Literal Number)
1936 // Ops[6] = Image Format ID
1937 //
1938 SPIRVOperandList Ops;
1939
alan-bakerf67468c2019-11-25 15:51:49 -05001940 uint32_t ImageTyID = nextID++;
1941 uint32_t SampledTyID = 0;
1942 if (STy->getName().contains(".float")) {
1943 SampledTyID = lookupType(Type::getFloatTy(Context));
1944 } else if (STy->getName().contains(".uint")) {
1945 SampledTyID = lookupType(Type::getInt32Ty(Context));
1946 } else if (STy->getName().contains(".int")) {
1947 // Generate a signed 32-bit integer if necessary.
1948 if (int32ID == 0) {
1949 int32ID = nextID++;
1950 SPIRVOperandList intOps;
1951 intOps << MkNum(32);
1952 intOps << MkNum(1);
1953 auto signed_int =
1954 new SPIRVInstruction(spv::OpTypeInt, int32ID, intOps);
1955 SPIRVInstList.push_back(signed_int);
1956 }
1957 SampledTyID = int32ID;
1958
1959 // Generate a vec4 of the signed int if necessary.
1960 if (v4int32ID == 0) {
1961 v4int32ID = nextID++;
1962 SPIRVOperandList vecOps;
1963 vecOps << MkId(int32ID);
1964 vecOps << MkNum(4);
1965 auto int_vec =
1966 new SPIRVInstruction(spv::OpTypeVector, v4int32ID, vecOps);
1967 SPIRVInstList.push_back(int_vec);
1968 }
1969 } else {
1970 // This was likely an UndefValue.
1971 SampledTyID = lookupType(Type::getFloatTy(Context));
1972 }
David Neto257c3892018-04-11 13:19:45 -04001973 Ops << MkId(SampledTyID);
David Neto22f144c2017-06-12 14:26:21 -04001974
1975 spv::Dim DimID = spv::Dim2D;
alan-bakerf906d2b2019-12-10 11:26:23 -05001976 if (STy->getName().startswith("opencl.image1d_ro_t") ||
1977 STy->getName().startswith("opencl.image1d_wo_t")) {
1978 DimID = spv::Dim1D;
1979 } else if (STy->getName().startswith("opencl.image3d_ro_t") ||
1980 STy->getName().startswith("opencl.image3d_wo_t")) {
David Neto22f144c2017-06-12 14:26:21 -04001981 DimID = spv::Dim3D;
1982 }
David Neto257c3892018-04-11 13:19:45 -04001983 Ops << MkNum(DimID);
David Neto22f144c2017-06-12 14:26:21 -04001984
1985 // TODO: Set up Depth.
David Neto257c3892018-04-11 13:19:45 -04001986 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04001987
1988 // TODO: Set up Arrayed.
David Neto257c3892018-04-11 13:19:45 -04001989 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04001990
1991 // TODO: Set up MS.
David Neto257c3892018-04-11 13:19:45 -04001992 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04001993
1994 // TODO: Set up Sampled.
1995 //
1996 // From Spec
1997 //
1998 // 0 indicates this is only known at run time, not at compile time
1999 // 1 indicates will be used with sampler
2000 // 2 indicates will be used without a sampler (a storage image)
2001 uint32_t Sampled = 1;
alan-bakerf67468c2019-11-25 15:51:49 -05002002 if (!STy->getName().contains(".sampled")) {
David Neto22f144c2017-06-12 14:26:21 -04002003 Sampled = 2;
2004 }
David Neto257c3892018-04-11 13:19:45 -04002005 Ops << MkNum(Sampled);
David Neto22f144c2017-06-12 14:26:21 -04002006
2007 // TODO: Set up Image Format.
David Neto257c3892018-04-11 13:19:45 -04002008 Ops << MkNum(spv::ImageFormatUnknown);
David Neto22f144c2017-06-12 14:26:21 -04002009
alan-bakerf67468c2019-11-25 15:51:49 -05002010 auto *Inst = new SPIRVInstruction(spv::OpTypeImage, ImageTyID, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002011 SPIRVInstList.push_back(Inst);
2012 break;
2013 }
2014 }
2015
2016 //
2017 // Generate OpTypeStruct
2018 //
2019 // Ops[0] ... Ops[n] = Member IDs
2020 SPIRVOperandList Ops;
2021
2022 for (auto *EleTy : STy->elements()) {
David Neto862b7d82018-06-14 18:48:37 -04002023 Ops << MkId(lookupType(EleTy));
David Neto22f144c2017-06-12 14:26:21 -04002024 }
2025
David Neto22f144c2017-06-12 14:26:21 -04002026 uint32_t STyID = nextID;
2027
alan-bakerb6b09dc2018-11-08 16:59:28 -05002028 auto *Inst = new SPIRVInstruction(spv::OpTypeStruct, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002029 SPIRVInstList.push_back(Inst);
2030
2031 // Generate OpMemberDecorate.
2032 auto DecoInsertPoint =
2033 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
2034 [](SPIRVInstruction *Inst) -> bool {
2035 return Inst->getOpcode() != spv::OpDecorate &&
2036 Inst->getOpcode() != spv::OpMemberDecorate &&
2037 Inst->getOpcode() != spv::OpExtInstImport;
2038 });
2039
David Netoc463b372017-08-10 15:32:21 -04002040 const auto StructLayout = DL.getStructLayout(STy);
Alan Bakerfcda9482018-10-02 17:09:59 -04002041 // Search for the correct offsets if this type was remapped.
2042 std::vector<uint32_t> *offsets = nullptr;
2043 auto iter = RemappedUBOTypeOffsets.find(STy);
2044 if (iter != RemappedUBOTypeOffsets.end()) {
2045 offsets = &iter->second;
2046 }
David Netoc463b372017-08-10 15:32:21 -04002047
David Neto862b7d82018-06-14 18:48:37 -04002048 // #error TODO(dneto): Only do this if in TypesNeedingLayout.
David Neto22f144c2017-06-12 14:26:21 -04002049 for (unsigned MemberIdx = 0; MemberIdx < STy->getNumElements();
2050 MemberIdx++) {
2051 // Ops[0] = Structure Type ID
2052 // Ops[1] = Member Index(Literal Number)
2053 // Ops[2] = Decoration (Offset)
2054 // Ops[3] = Byte Offset (Literal Number)
2055 Ops.clear();
2056
David Neto257c3892018-04-11 13:19:45 -04002057 Ops << MkId(STyID) << MkNum(MemberIdx) << MkNum(spv::DecorationOffset);
David Neto22f144c2017-06-12 14:26:21 -04002058
alan-bakerb6b09dc2018-11-08 16:59:28 -05002059 auto ByteOffset =
2060 static_cast<uint32_t>(StructLayout->getElementOffset(MemberIdx));
Alan Bakerfcda9482018-10-02 17:09:59 -04002061 if (offsets) {
2062 ByteOffset = (*offsets)[MemberIdx];
2063 }
alan-bakerb6b09dc2018-11-08 16:59:28 -05002064 // const auto ByteOffset =
Alan Bakerfcda9482018-10-02 17:09:59 -04002065 // uint32_t(StructLayout->getElementOffset(MemberIdx));
David Neto257c3892018-04-11 13:19:45 -04002066 Ops << MkNum(ByteOffset);
David Neto22f144c2017-06-12 14:26:21 -04002067
David Neto87846742018-04-11 17:36:22 -04002068 auto *DecoInst = new SPIRVInstruction(spv::OpMemberDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002069 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
David Neto22f144c2017-06-12 14:26:21 -04002070 }
2071
2072 // Generate OpDecorate.
David Neto862b7d82018-06-14 18:48:37 -04002073 if (StructTypesNeedingBlock.idFor(STy)) {
2074 Ops.clear();
2075 // Use Block decorations with StorageBuffer storage class.
2076 Ops << MkId(STyID) << MkNum(spv::DecorationBlock);
David Neto22f144c2017-06-12 14:26:21 -04002077
David Neto862b7d82018-06-14 18:48:37 -04002078 auto *DecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
2079 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
David Neto22f144c2017-06-12 14:26:21 -04002080 }
2081 break;
2082 }
2083 case Type::IntegerTyID: {
alan-baker0e64a592019-11-18 13:36:25 -05002084 uint32_t BitWidth = static_cast<uint32_t>(Ty->getPrimitiveSizeInBits());
David Neto22f144c2017-06-12 14:26:21 -04002085
2086 if (BitWidth == 1) {
David Netoef5ba2b2019-12-20 08:35:54 -05002087 auto *Inst = new SPIRVInstruction(spv::OpTypeBool, nextID++);
David Neto22f144c2017-06-12 14:26:21 -04002088 SPIRVInstList.push_back(Inst);
2089 } else {
alan-bakerb39c8262019-03-08 14:03:37 -05002090 if (!clspv::Option::Int8Support()) {
2091 // i8 is added to TypeMap as i32.
2092 // No matter what LLVM type is requested first, always alias the
2093 // second one's SPIR-V type to be the same as the one we generated
2094 // first.
2095 unsigned aliasToWidth = 0;
2096 if (BitWidth == 8) {
2097 aliasToWidth = 32;
2098 BitWidth = 32;
2099 } else if (BitWidth == 32) {
2100 aliasToWidth = 8;
2101 }
2102 if (aliasToWidth) {
2103 Type *otherType = Type::getIntNTy(Ty->getContext(), aliasToWidth);
2104 auto where = TypeMap.find(otherType);
2105 if (where == TypeMap.end()) {
2106 // Go ahead and make it, but also map the other type to it.
2107 TypeMap[otherType] = nextID;
2108 } else {
2109 // Alias this SPIR-V type the existing type.
2110 TypeMap[Ty] = where->second;
2111 break;
2112 }
David Neto391aeb12017-08-26 15:51:58 -04002113 }
David Neto22f144c2017-06-12 14:26:21 -04002114 }
2115
David Neto257c3892018-04-11 13:19:45 -04002116 SPIRVOperandList Ops;
2117 Ops << MkNum(BitWidth) << MkNum(0 /* not signed */);
David Neto22f144c2017-06-12 14:26:21 -04002118
2119 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002120 new SPIRVInstruction(spv::OpTypeInt, nextID++, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002121 }
2122 break;
2123 }
2124 case Type::HalfTyID:
2125 case Type::FloatTyID:
2126 case Type::DoubleTyID: {
alan-baker0e64a592019-11-18 13:36:25 -05002127 uint32_t BitWidth = static_cast<uint32_t>(Ty->getPrimitiveSizeInBits());
James Price11010dc2019-12-19 13:53:09 -05002128 auto WidthOp = MkNum(BitWidth);
David Neto22f144c2017-06-12 14:26:21 -04002129
2130 SPIRVInstList.push_back(
David Netoef5ba2b2019-12-20 08:35:54 -05002131 new SPIRVInstruction(spv::OpTypeFloat, nextID++, std::move(WidthOp)));
David Neto22f144c2017-06-12 14:26:21 -04002132 break;
2133 }
2134 case Type::ArrayTyID: {
David Neto22f144c2017-06-12 14:26:21 -04002135 ArrayType *ArrTy = cast<ArrayType>(Ty);
David Neto862b7d82018-06-14 18:48:37 -04002136 const uint64_t Length = ArrTy->getArrayNumElements();
2137 if (Length == 0) {
2138 // By convention, map it to a RuntimeArray.
David Neto22f144c2017-06-12 14:26:21 -04002139
David Neto862b7d82018-06-14 18:48:37 -04002140 // Only generate the type once.
2141 // TODO(dneto): Can it ever be generated more than once?
2142 // Doesn't LLVM type uniqueness guarantee we'll only see this
2143 // once?
2144 Type *EleTy = ArrTy->getArrayElementType();
2145 if (OpRuntimeTyMap.count(EleTy) == 0) {
2146 uint32_t OpTypeRuntimeArrayID = nextID;
2147 OpRuntimeTyMap[Ty] = nextID;
David Neto22f144c2017-06-12 14:26:21 -04002148
David Neto862b7d82018-06-14 18:48:37 -04002149 //
2150 // Generate OpTypeRuntimeArray.
2151 //
David Neto22f144c2017-06-12 14:26:21 -04002152
David Neto862b7d82018-06-14 18:48:37 -04002153 // OpTypeRuntimeArray
2154 // Ops[0] = Element Type ID
2155 SPIRVOperandList Ops;
2156 Ops << MkId(lookupType(EleTy));
David Neto22f144c2017-06-12 14:26:21 -04002157
David Neto862b7d82018-06-14 18:48:37 -04002158 SPIRVInstList.push_back(
2159 new SPIRVInstruction(spv::OpTypeRuntimeArray, nextID++, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002160
David Neto862b7d82018-06-14 18:48:37 -04002161 if (Hack_generate_runtime_array_stride_early) {
2162 // Generate OpDecorate.
2163 auto DecoInsertPoint = std::find_if(
2164 SPIRVInstList.begin(), SPIRVInstList.end(),
2165 [](SPIRVInstruction *Inst) -> bool {
2166 return Inst->getOpcode() != spv::OpDecorate &&
2167 Inst->getOpcode() != spv::OpMemberDecorate &&
2168 Inst->getOpcode() != spv::OpExtInstImport;
2169 });
David Neto22f144c2017-06-12 14:26:21 -04002170
David Neto862b7d82018-06-14 18:48:37 -04002171 // Ops[0] = Target ID
2172 // Ops[1] = Decoration (ArrayStride)
2173 // Ops[2] = Stride Number(Literal Number)
2174 Ops.clear();
David Neto85082642018-03-24 06:55:20 -07002175
David Neto862b7d82018-06-14 18:48:37 -04002176 Ops << MkId(OpTypeRuntimeArrayID)
2177 << MkNum(spv::DecorationArrayStride)
Alan Bakerfcda9482018-10-02 17:09:59 -04002178 << MkNum(static_cast<uint32_t>(GetTypeAllocSize(EleTy, DL)));
David Neto22f144c2017-06-12 14:26:21 -04002179
David Neto862b7d82018-06-14 18:48:37 -04002180 auto *DecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
2181 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
2182 }
2183 }
David Neto22f144c2017-06-12 14:26:21 -04002184
David Neto862b7d82018-06-14 18:48:37 -04002185 } else {
David Neto22f144c2017-06-12 14:26:21 -04002186
David Neto862b7d82018-06-14 18:48:37 -04002187 //
2188 // Generate OpConstant and OpTypeArray.
2189 //
2190
2191 //
2192 // Generate OpConstant for array length.
2193 //
2194 // Ops[0] = Result Type ID
2195 // Ops[1] .. Ops[n] = Values LiteralNumber
2196 SPIRVOperandList Ops;
2197
2198 Type *LengthTy = Type::getInt32Ty(Context);
2199 uint32_t ResTyID = lookupType(LengthTy);
2200 Ops << MkId(ResTyID);
2201
2202 assert(Length < UINT32_MAX);
2203 Ops << MkNum(static_cast<uint32_t>(Length));
2204
2205 // Add constant for length to constant list.
2206 Constant *CstLength = ConstantInt::get(LengthTy, Length);
2207 AllocatedVMap[CstLength] = nextID;
2208 VMap[CstLength] = nextID;
2209 uint32_t LengthID = nextID;
2210
2211 auto *CstInst = new SPIRVInstruction(spv::OpConstant, nextID++, Ops);
2212 SPIRVInstList.push_back(CstInst);
2213
2214 // Remember to generate ArrayStride later
2215 getTypesNeedingArrayStride().insert(Ty);
2216
2217 //
2218 // Generate OpTypeArray.
2219 //
2220 // Ops[0] = Element Type ID
2221 // Ops[1] = Array Length Constant ID
2222 Ops.clear();
2223
2224 uint32_t EleTyID = lookupType(ArrTy->getElementType());
2225 Ops << MkId(EleTyID) << MkId(LengthID);
2226
2227 // Update TypeMap with nextID.
2228 TypeMap[Ty] = nextID;
2229
2230 auto *ArrayInst = new SPIRVInstruction(spv::OpTypeArray, nextID++, Ops);
2231 SPIRVInstList.push_back(ArrayInst);
2232 }
David Neto22f144c2017-06-12 14:26:21 -04002233 break;
2234 }
2235 case Type::VectorTyID: {
alan-bakerb39c8262019-03-08 14:03:37 -05002236 // <4 x i8> is changed to i32 if i8 is not generally supported.
2237 if (!clspv::Option::Int8Support() &&
2238 Ty->getVectorElementType() == Type::getInt8Ty(Context)) {
David Neto22f144c2017-06-12 14:26:21 -04002239 if (Ty->getVectorNumElements() == 4) {
2240 TypeMap[Ty] = lookupType(Ty->getVectorElementType());
2241 break;
2242 } else {
2243 Ty->print(errs());
2244 llvm_unreachable("Support above i8 vector type");
2245 }
2246 }
2247
2248 // Ops[0] = Component Type ID
2249 // Ops[1] = Component Count (Literal Number)
David Neto257c3892018-04-11 13:19:45 -04002250 SPIRVOperandList Ops;
2251 Ops << MkId(lookupType(Ty->getVectorElementType()))
2252 << MkNum(Ty->getVectorNumElements());
David Neto22f144c2017-06-12 14:26:21 -04002253
alan-bakerb6b09dc2018-11-08 16:59:28 -05002254 SPIRVInstruction *inst =
2255 new SPIRVInstruction(spv::OpTypeVector, nextID++, Ops);
David Netoc6f3ab22018-04-06 18:02:31 -04002256 SPIRVInstList.push_back(inst);
David Neto22f144c2017-06-12 14:26:21 -04002257 break;
2258 }
2259 case Type::VoidTyID: {
David Netoef5ba2b2019-12-20 08:35:54 -05002260 auto *Inst = new SPIRVInstruction(spv::OpTypeVoid, nextID++);
David Neto22f144c2017-06-12 14:26:21 -04002261 SPIRVInstList.push_back(Inst);
2262 break;
2263 }
2264 case Type::FunctionTyID: {
2265 // Generate SPIRV instruction for function type.
2266 FunctionType *FTy = cast<FunctionType>(Ty);
2267
2268 // Ops[0] = Return Type ID
2269 // Ops[1] ... Ops[n] = Parameter Type IDs
2270 SPIRVOperandList Ops;
2271
2272 // Find SPIRV instruction for return type
David Netoc6f3ab22018-04-06 18:02:31 -04002273 Ops << MkId(lookupType(FTy->getReturnType()));
David Neto22f144c2017-06-12 14:26:21 -04002274
2275 // Find SPIRV instructions for parameter types
2276 for (unsigned k = 0; k < FTy->getNumParams(); k++) {
2277 // Find SPIRV instruction for parameter type.
2278 auto ParamTy = FTy->getParamType(k);
2279 if (ParamTy->isPointerTy()) {
2280 auto PointeeTy = ParamTy->getPointerElementType();
2281 if (PointeeTy->isStructTy() &&
2282 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
2283 ParamTy = PointeeTy;
2284 }
2285 }
2286
David Netoc6f3ab22018-04-06 18:02:31 -04002287 Ops << MkId(lookupType(ParamTy));
David Neto22f144c2017-06-12 14:26:21 -04002288 }
2289
David Neto87846742018-04-11 17:36:22 -04002290 auto *Inst = new SPIRVInstruction(spv::OpTypeFunction, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002291 SPIRVInstList.push_back(Inst);
2292 break;
2293 }
2294 }
2295 }
2296
2297 // Generate OpTypeSampledImage.
alan-bakerabd82722019-12-03 17:14:51 -05002298 for (auto &ImgTy : getImageTypeList()) {
David Neto22f144c2017-06-12 14:26:21 -04002299 //
2300 // Generate OpTypeSampledImage.
2301 //
2302 // Ops[0] = Image Type ID
2303 //
2304 SPIRVOperandList Ops;
2305
David Netoc6f3ab22018-04-06 18:02:31 -04002306 Ops << MkId(TypeMap[ImgTy]);
David Neto22f144c2017-06-12 14:26:21 -04002307
alan-bakerabd82722019-12-03 17:14:51 -05002308 // Update the image type map.
2309 getImageTypeMap()[ImgTy] = nextID;
David Neto22f144c2017-06-12 14:26:21 -04002310
David Neto87846742018-04-11 17:36:22 -04002311 auto *Inst = new SPIRVInstruction(spv::OpTypeSampledImage, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002312 SPIRVInstList.push_back(Inst);
2313 }
David Netoc6f3ab22018-04-06 18:02:31 -04002314
2315 // Generate types for pointer-to-local arguments.
Alan Baker202c8c72018-08-13 13:47:44 -04002316 for (auto spec_id = clspv::FirstLocalSpecId(); spec_id < max_local_spec_id_;
2317 ++spec_id) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05002318 LocalArgInfo &arg_info = LocalSpecIdInfoMap[spec_id];
David Netoc6f3ab22018-04-06 18:02:31 -04002319
2320 // Generate the spec constant.
2321 SPIRVOperandList Ops;
2322 Ops << MkId(lookupType(Type::getInt32Ty(Context))) << MkNum(1);
David Neto87846742018-04-11 17:36:22 -04002323 SPIRVInstList.push_back(
2324 new SPIRVInstruction(spv::OpSpecConstant, arg_info.array_size_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002325
2326 // Generate the array type.
2327 Ops.clear();
2328 // The element type must have been created.
2329 uint32_t elem_ty_id = lookupType(arg_info.elem_type);
2330 assert(elem_ty_id);
2331 Ops << MkId(elem_ty_id) << MkId(arg_info.array_size_id);
2332
2333 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002334 new SPIRVInstruction(spv::OpTypeArray, arg_info.array_type_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002335
2336 Ops.clear();
2337 Ops << MkNum(spv::StorageClassWorkgroup) << MkId(arg_info.array_type_id);
David Neto87846742018-04-11 17:36:22 -04002338 SPIRVInstList.push_back(new SPIRVInstruction(
2339 spv::OpTypePointer, arg_info.ptr_array_type_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002340 }
David Neto22f144c2017-06-12 14:26:21 -04002341}
2342
2343void SPIRVProducerPass::GenerateSPIRVConstants() {
2344 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
2345 ValueMapType &VMap = getValueMap();
2346 ValueMapType &AllocatedVMap = getAllocatedValueMap();
2347 ValueList &CstList = getConstantList();
David Neto482550a2018-03-24 05:21:07 -07002348 const bool hack_undef = clspv::Option::HackUndef();
David Neto22f144c2017-06-12 14:26:21 -04002349
2350 for (uint32_t i = 0; i < CstList.size(); i++) {
David Netofb9a7972017-08-25 17:08:24 -04002351 // UniqueVector ids are 1-based.
alan-bakerb6b09dc2018-11-08 16:59:28 -05002352 Constant *Cst = cast<Constant>(CstList[i + 1]);
David Neto22f144c2017-06-12 14:26:21 -04002353
2354 // OpTypeArray's constant was already generated.
David Netofb9a7972017-08-25 17:08:24 -04002355 if (AllocatedVMap.find_as(Cst) != AllocatedVMap.end()) {
David Neto22f144c2017-06-12 14:26:21 -04002356 continue;
2357 }
2358
David Netofb9a7972017-08-25 17:08:24 -04002359 // Set ValueMap with nextID for reference later.
David Neto22f144c2017-06-12 14:26:21 -04002360 VMap[Cst] = nextID;
2361
2362 //
2363 // Generate OpConstant.
2364 //
2365
2366 // Ops[0] = Result Type ID
2367 // Ops[1] .. Ops[n] = Values LiteralNumber
2368 SPIRVOperandList Ops;
2369
David Neto257c3892018-04-11 13:19:45 -04002370 Ops << MkId(lookupType(Cst->getType()));
David Neto22f144c2017-06-12 14:26:21 -04002371
2372 std::vector<uint32_t> LiteralNum;
David Neto22f144c2017-06-12 14:26:21 -04002373 spv::Op Opcode = spv::OpNop;
2374
2375 if (isa<UndefValue>(Cst)) {
2376 // Ops[0] = Result Type ID
David Netoc66b3352017-10-20 14:28:46 -04002377 Opcode = spv::OpUndef;
Alan Baker9bf93fb2018-08-28 16:59:26 -04002378 if (hack_undef && IsTypeNullable(Cst->getType())) {
2379 Opcode = spv::OpConstantNull;
David Netoc66b3352017-10-20 14:28:46 -04002380 }
David Neto22f144c2017-06-12 14:26:21 -04002381 } else if (const ConstantInt *CI = dyn_cast<ConstantInt>(Cst)) {
2382 unsigned BitWidth = CI->getBitWidth();
2383 if (BitWidth == 1) {
2384 // If the bitwidth of constant is 1, generate OpConstantTrue or
2385 // OpConstantFalse.
2386 if (CI->getZExtValue()) {
2387 // Ops[0] = Result Type ID
2388 Opcode = spv::OpConstantTrue;
2389 } else {
2390 // Ops[0] = Result Type ID
2391 Opcode = spv::OpConstantFalse;
2392 }
David Neto22f144c2017-06-12 14:26:21 -04002393 } else {
2394 auto V = CI->getZExtValue();
2395 LiteralNum.push_back(V & 0xFFFFFFFF);
2396
2397 if (BitWidth > 32) {
2398 LiteralNum.push_back(V >> 32);
2399 }
2400
2401 Opcode = spv::OpConstant;
David Neto22f144c2017-06-12 14:26:21 -04002402
David Neto257c3892018-04-11 13:19:45 -04002403 Ops << MkInteger(LiteralNum);
David Neto22f144c2017-06-12 14:26:21 -04002404 }
2405 } else if (const ConstantFP *CFP = dyn_cast<ConstantFP>(Cst)) {
2406 uint64_t FPVal = CFP->getValueAPF().bitcastToAPInt().getZExtValue();
2407 Type *CFPTy = CFP->getType();
2408 if (CFPTy->isFloatTy()) {
2409 LiteralNum.push_back(FPVal & 0xFFFFFFFF);
Kévin Petit02ee34e2019-04-04 19:03:22 +01002410 } else if (CFPTy->isDoubleTy()) {
2411 LiteralNum.push_back(FPVal & 0xFFFFFFFF);
2412 LiteralNum.push_back(FPVal >> 32);
alan-baker089bf932020-01-07 16:35:45 -05002413 } else if (CFPTy->isHalfTy()) {
2414 LiteralNum.push_back(FPVal & 0xFFFF);
David Neto22f144c2017-06-12 14:26:21 -04002415 } else {
2416 CFPTy->print(errs());
2417 llvm_unreachable("Implement this ConstantFP Type");
2418 }
2419
2420 Opcode = spv::OpConstant;
David Neto22f144c2017-06-12 14:26:21 -04002421
David Neto257c3892018-04-11 13:19:45 -04002422 Ops << MkFloat(LiteralNum);
David Neto22f144c2017-06-12 14:26:21 -04002423 } else if (isa<ConstantDataSequential>(Cst) &&
2424 cast<ConstantDataSequential>(Cst)->isString()) {
2425 Cst->print(errs());
2426 llvm_unreachable("Implement this Constant");
2427
2428 } else if (const ConstantDataSequential *CDS =
2429 dyn_cast<ConstantDataSequential>(Cst)) {
David Neto49351ac2017-08-26 17:32:20 -04002430 // Let's convert <4 x i8> constant to int constant specially.
2431 // This case occurs when all the values are specified as constant
2432 // ints.
2433 Type *CstTy = Cst->getType();
2434 if (is4xi8vec(CstTy)) {
2435 LLVMContext &Context = CstTy->getContext();
2436
2437 //
2438 // Generate OpConstant with OpTypeInt 32 0.
2439 //
Neil Henning39672102017-09-29 14:33:13 +01002440 uint32_t IntValue = 0;
2441 for (unsigned k = 0; k < 4; k++) {
2442 const uint64_t Val = CDS->getElementAsInteger(k);
David Neto49351ac2017-08-26 17:32:20 -04002443 IntValue = (IntValue << 8) | (Val & 0xffu);
2444 }
2445
2446 Type *i32 = Type::getInt32Ty(Context);
2447 Constant *CstInt = ConstantInt::get(i32, IntValue);
2448 // If this constant is already registered on VMap, use it.
2449 if (VMap.count(CstInt)) {
2450 uint32_t CstID = VMap[CstInt];
2451 VMap[Cst] = CstID;
2452 continue;
2453 }
2454
David Neto257c3892018-04-11 13:19:45 -04002455 Ops << MkNum(IntValue);
David Neto49351ac2017-08-26 17:32:20 -04002456
David Neto87846742018-04-11 17:36:22 -04002457 auto *CstInst = new SPIRVInstruction(spv::OpConstant, nextID++, Ops);
David Neto49351ac2017-08-26 17:32:20 -04002458 SPIRVInstList.push_back(CstInst);
2459
2460 continue;
2461 }
2462
2463 // A normal constant-data-sequential case.
David Neto22f144c2017-06-12 14:26:21 -04002464 for (unsigned k = 0; k < CDS->getNumElements(); k++) {
2465 Constant *EleCst = CDS->getElementAsConstant(k);
2466 uint32_t EleCstID = VMap[EleCst];
David Neto257c3892018-04-11 13:19:45 -04002467 Ops << MkId(EleCstID);
David Neto22f144c2017-06-12 14:26:21 -04002468 }
2469
2470 Opcode = spv::OpConstantComposite;
David Neto22f144c2017-06-12 14:26:21 -04002471 } else if (const ConstantAggregate *CA = dyn_cast<ConstantAggregate>(Cst)) {
2472 // Let's convert <4 x i8> constant to int constant specially.
David Neto49351ac2017-08-26 17:32:20 -04002473 // This case occurs when at least one of the values is an undef.
David Neto22f144c2017-06-12 14:26:21 -04002474 Type *CstTy = Cst->getType();
2475 if (is4xi8vec(CstTy)) {
2476 LLVMContext &Context = CstTy->getContext();
2477
2478 //
2479 // Generate OpConstant with OpTypeInt 32 0.
2480 //
Neil Henning39672102017-09-29 14:33:13 +01002481 uint32_t IntValue = 0;
David Neto22f144c2017-06-12 14:26:21 -04002482 for (User::const_op_iterator I = Cst->op_begin(), E = Cst->op_end();
2483 I != E; ++I) {
2484 uint64_t Val = 0;
alan-bakerb6b09dc2018-11-08 16:59:28 -05002485 const Value *CV = *I;
Neil Henning39672102017-09-29 14:33:13 +01002486 if (auto *CI2 = dyn_cast<ConstantInt>(CV)) {
2487 Val = CI2->getZExtValue();
David Neto22f144c2017-06-12 14:26:21 -04002488 }
David Neto49351ac2017-08-26 17:32:20 -04002489 IntValue = (IntValue << 8) | (Val & 0xffu);
David Neto22f144c2017-06-12 14:26:21 -04002490 }
2491
David Neto49351ac2017-08-26 17:32:20 -04002492 Type *i32 = Type::getInt32Ty(Context);
2493 Constant *CstInt = ConstantInt::get(i32, IntValue);
David Neto22f144c2017-06-12 14:26:21 -04002494 // If this constant is already registered on VMap, use it.
2495 if (VMap.count(CstInt)) {
2496 uint32_t CstID = VMap[CstInt];
2497 VMap[Cst] = CstID;
David Neto19a1bad2017-08-25 15:01:41 -04002498 continue;
David Neto22f144c2017-06-12 14:26:21 -04002499 }
2500
David Neto257c3892018-04-11 13:19:45 -04002501 Ops << MkNum(IntValue);
David Neto22f144c2017-06-12 14:26:21 -04002502
David Neto87846742018-04-11 17:36:22 -04002503 auto *CstInst = new SPIRVInstruction(spv::OpConstant, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002504 SPIRVInstList.push_back(CstInst);
2505
David Neto19a1bad2017-08-25 15:01:41 -04002506 continue;
David Neto22f144c2017-06-12 14:26:21 -04002507 }
2508
2509 // We use a constant composite in SPIR-V for our constant aggregate in
2510 // LLVM.
2511 Opcode = spv::OpConstantComposite;
David Neto22f144c2017-06-12 14:26:21 -04002512
2513 for (unsigned k = 0; k < CA->getNumOperands(); k++) {
2514 // Look up the ID of the element of this aggregate (which we will
2515 // previously have created a constant for).
2516 uint32_t ElementConstantID = VMap[CA->getAggregateElement(k)];
2517
2518 // And add an operand to the composite we are constructing
David Neto257c3892018-04-11 13:19:45 -04002519 Ops << MkId(ElementConstantID);
David Neto22f144c2017-06-12 14:26:21 -04002520 }
2521 } else if (Cst->isNullValue()) {
2522 Opcode = spv::OpConstantNull;
David Neto22f144c2017-06-12 14:26:21 -04002523 } else {
2524 Cst->print(errs());
2525 llvm_unreachable("Unsupported Constant???");
2526 }
2527
alan-baker5b86ed72019-02-15 08:26:50 -05002528 if (Opcode == spv::OpConstantNull && Cst->getType()->isPointerTy()) {
2529 // Null pointer requires variable pointers.
2530 setVariablePointersCapabilities(Cst->getType()->getPointerAddressSpace());
2531 }
2532
David Neto87846742018-04-11 17:36:22 -04002533 auto *CstInst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002534 SPIRVInstList.push_back(CstInst);
2535 }
2536}
2537
2538void SPIRVProducerPass::GenerateSamplers(Module &M) {
2539 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
David Neto22f144c2017-06-12 14:26:21 -04002540
alan-bakerb6b09dc2018-11-08 16:59:28 -05002541 auto &sampler_map = getSamplerMap();
alan-baker09cb9802019-12-10 13:16:27 -05002542 SamplerLiteralToIDMap.clear();
David Neto862b7d82018-06-14 18:48:37 -04002543 DenseMap<unsigned, unsigned> SamplerLiteralToDescriptorSetMap;
2544 DenseMap<unsigned, unsigned> SamplerLiteralToBindingMap;
David Neto22f144c2017-06-12 14:26:21 -04002545
David Neto862b7d82018-06-14 18:48:37 -04002546 // We might have samplers in the sampler map that are not used
2547 // in the translation unit. We need to allocate variables
2548 // for them and bindings too.
2549 DenseSet<unsigned> used_bindings;
David Neto22f144c2017-06-12 14:26:21 -04002550
Kévin Petitdf71de32019-04-09 14:09:50 +01002551 auto *var_fn = M.getFunction(clspv::LiteralSamplerFunction());
alan-baker09cb9802019-12-10 13:16:27 -05002552 // Return if there are no literal samplers.
alan-bakerb6b09dc2018-11-08 16:59:28 -05002553 if (!var_fn)
2554 return;
alan-baker09cb9802019-12-10 13:16:27 -05002555
David Neto862b7d82018-06-14 18:48:37 -04002556 for (auto user : var_fn->users()) {
2557 // Populate SamplerLiteralToDescriptorSetMap and
2558 // SamplerLiteralToBindingMap.
2559 //
2560 // Look for calls like
2561 // call %opencl.sampler_t addrspace(2)*
2562 // @clspv.sampler.var.literal(
2563 // i32 descriptor,
2564 // i32 binding,
alan-baker09cb9802019-12-10 13:16:27 -05002565 // i32 (index-into-sampler-map|sampler_mask))
alan-bakerb6b09dc2018-11-08 16:59:28 -05002566 if (auto *call = dyn_cast<CallInst>(user)) {
alan-baker09cb9802019-12-10 13:16:27 -05002567 const auto third_param = static_cast<unsigned>(
alan-bakerb6b09dc2018-11-08 16:59:28 -05002568 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
alan-baker09cb9802019-12-10 13:16:27 -05002569 auto sampler_value = third_param;
2570 if (clspv::Option::UseSamplerMap()) {
2571 if (third_param >= sampler_map.size()) {
2572 errs() << "Out of bounds index to sampler map: " << third_param;
2573 llvm_unreachable("bad sampler init: out of bounds");
2574 }
2575 sampler_value = sampler_map[third_param].first;
David Neto862b7d82018-06-14 18:48:37 -04002576 }
2577
David Neto862b7d82018-06-14 18:48:37 -04002578 const auto descriptor_set = static_cast<unsigned>(
2579 dyn_cast<ConstantInt>(call->getArgOperand(0))->getZExtValue());
2580 const auto binding = static_cast<unsigned>(
2581 dyn_cast<ConstantInt>(call->getArgOperand(1))->getZExtValue());
2582
2583 SamplerLiteralToDescriptorSetMap[sampler_value] = descriptor_set;
2584 SamplerLiteralToBindingMap[sampler_value] = binding;
2585 used_bindings.insert(binding);
2586 }
2587 }
2588
alan-baker09cb9802019-12-10 13:16:27 -05002589 DenseSet<size_t> seen;
2590 for (auto user : var_fn->users()) {
2591 if (!isa<CallInst>(user))
2592 continue;
2593
2594 auto call = cast<CallInst>(user);
2595 const unsigned third_param = static_cast<unsigned>(
2596 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
2597
2598 // Already allocated a variable for this value.
2599 if (!seen.insert(third_param).second)
2600 continue;
2601
2602 auto sampler_value = third_param;
2603 if (clspv::Option::UseSamplerMap()) {
2604 sampler_value = sampler_map[third_param].first;
2605 }
2606
David Neto22f144c2017-06-12 14:26:21 -04002607 // Generate OpVariable.
2608 //
2609 // GIDOps[0] : Result Type ID
2610 // GIDOps[1] : Storage Class
2611 SPIRVOperandList Ops;
2612
David Neto257c3892018-04-11 13:19:45 -04002613 Ops << MkId(lookupType(SamplerTy))
2614 << MkNum(spv::StorageClassUniformConstant);
David Neto22f144c2017-06-12 14:26:21 -04002615
David Neto862b7d82018-06-14 18:48:37 -04002616 auto sampler_var_id = nextID++;
2617 auto *Inst = new SPIRVInstruction(spv::OpVariable, sampler_var_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002618 SPIRVInstList.push_back(Inst);
2619
alan-baker09cb9802019-12-10 13:16:27 -05002620 SamplerLiteralToIDMap[sampler_value] = sampler_var_id;
David Neto22f144c2017-06-12 14:26:21 -04002621
2622 // Find Insert Point for OpDecorate.
2623 auto DecoInsertPoint =
2624 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
2625 [](SPIRVInstruction *Inst) -> bool {
2626 return Inst->getOpcode() != spv::OpDecorate &&
2627 Inst->getOpcode() != spv::OpMemberDecorate &&
2628 Inst->getOpcode() != spv::OpExtInstImport;
2629 });
2630
2631 // Ops[0] = Target ID
2632 // Ops[1] = Decoration (DescriptorSet)
2633 // Ops[2] = LiteralNumber according to Decoration
2634 Ops.clear();
2635
David Neto862b7d82018-06-14 18:48:37 -04002636 unsigned descriptor_set;
2637 unsigned binding;
alan-baker09cb9802019-12-10 13:16:27 -05002638 if (SamplerLiteralToBindingMap.find(sampler_value) ==
alan-bakerb6b09dc2018-11-08 16:59:28 -05002639 SamplerLiteralToBindingMap.end()) {
David Neto862b7d82018-06-14 18:48:37 -04002640 // This sampler is not actually used. Find the next one.
2641 for (binding = 0; used_bindings.count(binding); binding++)
2642 ;
2643 descriptor_set = 0; // Literal samplers always use descriptor set 0.
2644 used_bindings.insert(binding);
2645 } else {
alan-baker09cb9802019-12-10 13:16:27 -05002646 descriptor_set = SamplerLiteralToDescriptorSetMap[sampler_value];
2647 binding = SamplerLiteralToBindingMap[sampler_value];
alan-bakercff80152019-06-15 00:38:00 -04002648
alan-baker09cb9802019-12-10 13:16:27 -05002649 version0::DescriptorMapEntry::SamplerData sampler_data = {sampler_value};
alan-bakercff80152019-06-15 00:38:00 -04002650 descriptorMapEntries->emplace_back(std::move(sampler_data),
2651 descriptor_set, binding);
David Neto862b7d82018-06-14 18:48:37 -04002652 }
2653
2654 Ops << MkId(sampler_var_id) << MkNum(spv::DecorationDescriptorSet)
2655 << MkNum(descriptor_set);
David Neto22f144c2017-06-12 14:26:21 -04002656
David Neto87846742018-04-11 17:36:22 -04002657 auto *DescDecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002658 SPIRVInstList.insert(DecoInsertPoint, DescDecoInst);
2659
2660 // Ops[0] = Target ID
2661 // Ops[1] = Decoration (Binding)
2662 // Ops[2] = LiteralNumber according to Decoration
2663 Ops.clear();
David Neto862b7d82018-06-14 18:48:37 -04002664 Ops << MkId(sampler_var_id) << MkNum(spv::DecorationBinding)
2665 << MkNum(binding);
David Neto22f144c2017-06-12 14:26:21 -04002666
David Neto87846742018-04-11 17:36:22 -04002667 auto *BindDecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002668 SPIRVInstList.insert(DecoInsertPoint, BindDecoInst);
2669 }
David Neto862b7d82018-06-14 18:48:37 -04002670}
David Neto22f144c2017-06-12 14:26:21 -04002671
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01002672void SPIRVProducerPass::GenerateResourceVars(Module &) {
David Neto862b7d82018-06-14 18:48:37 -04002673 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
2674 ValueMapType &VMap = getValueMap();
David Neto22f144c2017-06-12 14:26:21 -04002675
David Neto862b7d82018-06-14 18:48:37 -04002676 // Generate variables. Make one for each of resource var info object.
2677 for (auto *info : ModuleOrderedResourceVars) {
2678 Type *type = info->var_fn->getReturnType();
2679 // Remap the address space for opaque types.
2680 switch (info->arg_kind) {
2681 case clspv::ArgKind::Sampler:
2682 case clspv::ArgKind::ReadOnlyImage:
2683 case clspv::ArgKind::WriteOnlyImage:
2684 type = PointerType::get(type->getPointerElementType(),
2685 clspv::AddressSpace::UniformConstant);
2686 break;
2687 default:
2688 break;
2689 }
David Neto22f144c2017-06-12 14:26:21 -04002690
David Neto862b7d82018-06-14 18:48:37 -04002691 info->var_id = nextID++;
David Neto22f144c2017-06-12 14:26:21 -04002692
David Neto862b7d82018-06-14 18:48:37 -04002693 const auto type_id = lookupType(type);
2694 const auto sc = GetStorageClassForArgKind(info->arg_kind);
2695 SPIRVOperandList Ops;
2696 Ops << MkId(type_id) << MkNum(sc);
David Neto22f144c2017-06-12 14:26:21 -04002697
David Neto862b7d82018-06-14 18:48:37 -04002698 auto *Inst = new SPIRVInstruction(spv::OpVariable, info->var_id, Ops);
2699 SPIRVInstList.push_back(Inst);
2700
2701 // Map calls to the variable-builtin-function.
2702 for (auto &U : info->var_fn->uses()) {
2703 if (auto *call = dyn_cast<CallInst>(U.getUser())) {
2704 const auto set = unsigned(
2705 dyn_cast<ConstantInt>(call->getOperand(0))->getZExtValue());
2706 const auto binding = unsigned(
2707 dyn_cast<ConstantInt>(call->getOperand(1))->getZExtValue());
2708 if (set == info->descriptor_set && binding == info->binding) {
2709 switch (info->arg_kind) {
2710 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04002711 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04002712 case clspv::ArgKind::Pod:
2713 // The call maps to the variable directly.
2714 VMap[call] = info->var_id;
2715 break;
2716 case clspv::ArgKind::Sampler:
2717 case clspv::ArgKind::ReadOnlyImage:
2718 case clspv::ArgKind::WriteOnlyImage:
2719 // The call maps to a load we generate later.
2720 ResourceVarDeferredLoadCalls[call] = info->var_id;
2721 break;
2722 default:
2723 llvm_unreachable("Unhandled arg kind");
2724 }
2725 }
David Neto22f144c2017-06-12 14:26:21 -04002726 }
David Neto862b7d82018-06-14 18:48:37 -04002727 }
2728 }
David Neto22f144c2017-06-12 14:26:21 -04002729
David Neto862b7d82018-06-14 18:48:37 -04002730 // Generate associated decorations.
David Neto22f144c2017-06-12 14:26:21 -04002731
David Neto862b7d82018-06-14 18:48:37 -04002732 // Find Insert Point for OpDecorate.
2733 auto DecoInsertPoint =
2734 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
2735 [](SPIRVInstruction *Inst) -> bool {
2736 return Inst->getOpcode() != spv::OpDecorate &&
2737 Inst->getOpcode() != spv::OpMemberDecorate &&
2738 Inst->getOpcode() != spv::OpExtInstImport;
2739 });
2740
2741 SPIRVOperandList Ops;
2742 for (auto *info : ModuleOrderedResourceVars) {
2743 // Decorate with DescriptorSet and Binding.
2744 Ops.clear();
2745 Ops << MkId(info->var_id) << MkNum(spv::DecorationDescriptorSet)
2746 << MkNum(info->descriptor_set);
2747 SPIRVInstList.insert(DecoInsertPoint,
2748 new SPIRVInstruction(spv::OpDecorate, Ops));
2749
2750 Ops.clear();
2751 Ops << MkId(info->var_id) << MkNum(spv::DecorationBinding)
2752 << MkNum(info->binding);
2753 SPIRVInstList.insert(DecoInsertPoint,
2754 new SPIRVInstruction(spv::OpDecorate, Ops));
2755
alan-bakere9308012019-03-15 10:25:13 -04002756 if (info->coherent) {
2757 // Decorate with Coherent if required for the variable.
2758 Ops.clear();
2759 Ops << MkId(info->var_id) << MkNum(spv::DecorationCoherent);
2760 SPIRVInstList.insert(DecoInsertPoint,
2761 new SPIRVInstruction(spv::OpDecorate, Ops));
2762 }
2763
David Neto862b7d82018-06-14 18:48:37 -04002764 // Generate NonWritable and NonReadable
2765 switch (info->arg_kind) {
2766 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04002767 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04002768 if (info->var_fn->getReturnType()->getPointerAddressSpace() ==
2769 clspv::AddressSpace::Constant) {
2770 Ops.clear();
2771 Ops << MkId(info->var_id) << MkNum(spv::DecorationNonWritable);
2772 SPIRVInstList.insert(DecoInsertPoint,
2773 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002774 }
David Neto862b7d82018-06-14 18:48:37 -04002775 break;
David Neto862b7d82018-06-14 18:48:37 -04002776 case clspv::ArgKind::WriteOnlyImage:
2777 Ops.clear();
2778 Ops << MkId(info->var_id) << MkNum(spv::DecorationNonReadable);
2779 SPIRVInstList.insert(DecoInsertPoint,
2780 new SPIRVInstruction(spv::OpDecorate, Ops));
2781 break;
2782 default:
2783 break;
David Neto22f144c2017-06-12 14:26:21 -04002784 }
2785 }
2786}
2787
2788void SPIRVProducerPass::GenerateGlobalVar(GlobalVariable &GV) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05002789 Module &M = *GV.getParent();
David Neto22f144c2017-06-12 14:26:21 -04002790 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
2791 ValueMapType &VMap = getValueMap();
2792 std::vector<uint32_t> &BuiltinDimVec = getBuiltinDimVec();
David Neto85082642018-03-24 06:55:20 -07002793 const DataLayout &DL = GV.getParent()->getDataLayout();
David Neto22f144c2017-06-12 14:26:21 -04002794
2795 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
2796 Type *Ty = GV.getType();
2797 PointerType *PTy = cast<PointerType>(Ty);
2798
2799 uint32_t InitializerID = 0;
2800
2801 // Workgroup size is handled differently (it goes into a constant)
2802 if (spv::BuiltInWorkgroupSize == BuiltinType) {
2803 std::vector<bool> HasMDVec;
2804 uint32_t PrevXDimCst = 0xFFFFFFFF;
2805 uint32_t PrevYDimCst = 0xFFFFFFFF;
2806 uint32_t PrevZDimCst = 0xFFFFFFFF;
2807 for (Function &Func : *GV.getParent()) {
2808 if (Func.isDeclaration()) {
2809 continue;
2810 }
2811
2812 // We only need to check kernels.
2813 if (Func.getCallingConv() != CallingConv::SPIR_KERNEL) {
2814 continue;
2815 }
2816
2817 if (const MDNode *MD =
2818 dyn_cast<Function>(&Func)->getMetadata("reqd_work_group_size")) {
2819 uint32_t CurXDimCst = static_cast<uint32_t>(
2820 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
2821 uint32_t CurYDimCst = static_cast<uint32_t>(
2822 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
2823 uint32_t CurZDimCst = static_cast<uint32_t>(
2824 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
2825
2826 if (PrevXDimCst == 0xFFFFFFFF && PrevYDimCst == 0xFFFFFFFF &&
2827 PrevZDimCst == 0xFFFFFFFF) {
2828 PrevXDimCst = CurXDimCst;
2829 PrevYDimCst = CurYDimCst;
2830 PrevZDimCst = CurZDimCst;
2831 } else if (CurXDimCst != PrevXDimCst || CurYDimCst != PrevYDimCst ||
2832 CurZDimCst != PrevZDimCst) {
2833 llvm_unreachable(
2834 "reqd_work_group_size must be the same across all kernels");
2835 } else {
2836 continue;
2837 }
2838
2839 //
2840 // Generate OpConstantComposite.
2841 //
2842 // Ops[0] : Result Type ID
2843 // Ops[1] : Constant size for x dimension.
2844 // Ops[2] : Constant size for y dimension.
2845 // Ops[3] : Constant size for z dimension.
2846 SPIRVOperandList Ops;
2847
2848 uint32_t XDimCstID =
2849 VMap[mdconst::extract<ConstantInt>(MD->getOperand(0))];
2850 uint32_t YDimCstID =
2851 VMap[mdconst::extract<ConstantInt>(MD->getOperand(1))];
2852 uint32_t ZDimCstID =
2853 VMap[mdconst::extract<ConstantInt>(MD->getOperand(2))];
2854
2855 InitializerID = nextID;
2856
David Neto257c3892018-04-11 13:19:45 -04002857 Ops << MkId(lookupType(Ty->getPointerElementType())) << MkId(XDimCstID)
2858 << MkId(YDimCstID) << MkId(ZDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04002859
David Neto87846742018-04-11 17:36:22 -04002860 auto *Inst =
2861 new SPIRVInstruction(spv::OpConstantComposite, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002862 SPIRVInstList.push_back(Inst);
2863
2864 HasMDVec.push_back(true);
2865 } else {
2866 HasMDVec.push_back(false);
2867 }
2868 }
2869
2870 // Check all kernels have same definitions for work_group_size.
2871 bool HasMD = false;
2872 if (!HasMDVec.empty()) {
2873 HasMD = HasMDVec[0];
2874 for (uint32_t i = 1; i < HasMDVec.size(); i++) {
2875 if (HasMD != HasMDVec[i]) {
2876 llvm_unreachable(
2877 "Kernels should have consistent work group size definition");
2878 }
2879 }
2880 }
2881
2882 // If all kernels do not have metadata for reqd_work_group_size, generate
2883 // OpSpecConstants for x/y/z dimension.
2884 if (!HasMD) {
2885 //
2886 // Generate OpSpecConstants for x/y/z dimension.
2887 //
2888 // Ops[0] : Result Type ID
2889 // Ops[1] : Constant size for x/y/z dimension (Literal Number).
2890 uint32_t XDimCstID = 0;
2891 uint32_t YDimCstID = 0;
2892 uint32_t ZDimCstID = 0;
2893
David Neto22f144c2017-06-12 14:26:21 -04002894 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04002895 uint32_t result_type_id =
2896 lookupType(Ty->getPointerElementType()->getSequentialElementType());
David Neto22f144c2017-06-12 14:26:21 -04002897
David Neto257c3892018-04-11 13:19:45 -04002898 // X Dimension
2899 Ops << MkId(result_type_id) << MkNum(1);
2900 XDimCstID = nextID++;
2901 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002902 new SPIRVInstruction(spv::OpSpecConstant, XDimCstID, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002903
2904 // Y Dimension
2905 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04002906 Ops << MkId(result_type_id) << MkNum(1);
2907 YDimCstID = nextID++;
2908 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002909 new SPIRVInstruction(spv::OpSpecConstant, YDimCstID, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002910
2911 // Z Dimension
2912 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04002913 Ops << MkId(result_type_id) << MkNum(1);
2914 ZDimCstID = nextID++;
2915 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002916 new SPIRVInstruction(spv::OpSpecConstant, ZDimCstID, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002917
David Neto257c3892018-04-11 13:19:45 -04002918 BuiltinDimVec.push_back(XDimCstID);
2919 BuiltinDimVec.push_back(YDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04002920 BuiltinDimVec.push_back(ZDimCstID);
2921
David Neto22f144c2017-06-12 14:26:21 -04002922 //
2923 // Generate OpSpecConstantComposite.
2924 //
2925 // Ops[0] : Result Type ID
2926 // Ops[1] : Constant size for x dimension.
2927 // Ops[2] : Constant size for y dimension.
2928 // Ops[3] : Constant size for z dimension.
2929 InitializerID = nextID;
2930
2931 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04002932 Ops << MkId(lookupType(Ty->getPointerElementType())) << MkId(XDimCstID)
2933 << MkId(YDimCstID) << MkId(ZDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04002934
David Neto87846742018-04-11 17:36:22 -04002935 auto *Inst =
2936 new SPIRVInstruction(spv::OpSpecConstantComposite, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002937 SPIRVInstList.push_back(Inst);
2938 }
2939 }
2940
David Neto22f144c2017-06-12 14:26:21 -04002941 VMap[&GV] = nextID;
2942
2943 //
2944 // Generate OpVariable.
2945 //
2946 // GIDOps[0] : Result Type ID
2947 // GIDOps[1] : Storage Class
2948 SPIRVOperandList Ops;
2949
David Neto85082642018-03-24 06:55:20 -07002950 const auto AS = PTy->getAddressSpace();
David Netoc6f3ab22018-04-06 18:02:31 -04002951 Ops << MkId(lookupType(Ty)) << MkNum(GetStorageClass(AS));
David Neto22f144c2017-06-12 14:26:21 -04002952
David Neto85082642018-03-24 06:55:20 -07002953 const bool module_scope_constant_external_init =
David Neto862b7d82018-06-14 18:48:37 -04002954 (AS == AddressSpace::Constant) && GV.hasInitializer() &&
David Neto85082642018-03-24 06:55:20 -07002955 clspv::Option::ModuleConstantsInStorageBuffer();
2956
Kévin Petit23d5f182019-08-13 16:21:29 +01002957 if (GV.hasInitializer()) {
2958 auto GVInit = GV.getInitializer();
2959 if (!isa<UndefValue>(GVInit) && !module_scope_constant_external_init) {
2960 assert(VMap.count(GVInit) == 1);
2961 InitializerID = VMap[GVInit];
David Neto85082642018-03-24 06:55:20 -07002962 }
2963 }
Kévin Petit23d5f182019-08-13 16:21:29 +01002964
2965 if (0 != InitializerID) {
2966 // Emit the ID of the intiializer as part of the variable definition.
2967 Ops << MkId(InitializerID);
2968 }
David Neto85082642018-03-24 06:55:20 -07002969 const uint32_t var_id = nextID++;
2970
David Neto87846742018-04-11 17:36:22 -04002971 auto *Inst = new SPIRVInstruction(spv::OpVariable, var_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002972 SPIRVInstList.push_back(Inst);
2973
2974 // If we have a builtin.
2975 if (spv::BuiltInMax != BuiltinType) {
2976 // Find Insert Point for OpDecorate.
2977 auto DecoInsertPoint =
2978 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
2979 [](SPIRVInstruction *Inst) -> bool {
2980 return Inst->getOpcode() != spv::OpDecorate &&
2981 Inst->getOpcode() != spv::OpMemberDecorate &&
2982 Inst->getOpcode() != spv::OpExtInstImport;
2983 });
2984 //
2985 // Generate OpDecorate.
2986 //
2987 // DOps[0] = Target ID
2988 // DOps[1] = Decoration (Builtin)
2989 // DOps[2] = BuiltIn ID
2990 uint32_t ResultID;
2991
2992 // WorkgroupSize is different, we decorate the constant composite that has
2993 // its value, rather than the variable that we use to access the value.
2994 if (spv::BuiltInWorkgroupSize == BuiltinType) {
2995 ResultID = InitializerID;
David Netoa60b00b2017-09-15 16:34:09 -04002996 // Save both the value and variable IDs for later.
2997 WorkgroupSizeValueID = InitializerID;
2998 WorkgroupSizeVarID = VMap[&GV];
David Neto22f144c2017-06-12 14:26:21 -04002999 } else {
3000 ResultID = VMap[&GV];
3001 }
3002
3003 SPIRVOperandList DOps;
David Neto257c3892018-04-11 13:19:45 -04003004 DOps << MkId(ResultID) << MkNum(spv::DecorationBuiltIn)
3005 << MkNum(BuiltinType);
David Neto22f144c2017-06-12 14:26:21 -04003006
David Neto87846742018-04-11 17:36:22 -04003007 auto *DescDecoInst = new SPIRVInstruction(spv::OpDecorate, DOps);
David Neto22f144c2017-06-12 14:26:21 -04003008 SPIRVInstList.insert(DecoInsertPoint, DescDecoInst);
David Neto85082642018-03-24 06:55:20 -07003009 } else if (module_scope_constant_external_init) {
3010 // This module scope constant is initialized from a storage buffer with data
3011 // provided by the host at binding 0 of the next descriptor set.
David Neto78383442018-06-15 20:31:56 -04003012 const uint32_t descriptor_set = TakeDescriptorIndex(&M);
David Neto85082642018-03-24 06:55:20 -07003013
David Neto862b7d82018-06-14 18:48:37 -04003014 // Emit the intializer to the descriptor map file.
David Neto85082642018-03-24 06:55:20 -07003015 // Use "kind,buffer" to indicate storage buffer. We might want to expand
3016 // that later to other types, like uniform buffer.
alan-bakerf5e5f692018-11-27 08:33:24 -05003017 std::string hexbytes;
3018 llvm::raw_string_ostream str(hexbytes);
3019 clspv::ConstantEmitter(DL, str).Emit(GV.getInitializer());
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003020 version0::DescriptorMapEntry::ConstantData constant_data = {ArgKind::Buffer,
3021 str.str()};
3022 descriptorMapEntries->emplace_back(std::move(constant_data), descriptor_set,
3023 0);
David Neto85082642018-03-24 06:55:20 -07003024
3025 // Find Insert Point for OpDecorate.
3026 auto DecoInsertPoint =
3027 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
3028 [](SPIRVInstruction *Inst) -> bool {
3029 return Inst->getOpcode() != spv::OpDecorate &&
3030 Inst->getOpcode() != spv::OpMemberDecorate &&
3031 Inst->getOpcode() != spv::OpExtInstImport;
3032 });
3033
David Neto257c3892018-04-11 13:19:45 -04003034 // OpDecorate %var Binding <binding>
David Neto85082642018-03-24 06:55:20 -07003035 SPIRVOperandList DOps;
David Neto257c3892018-04-11 13:19:45 -04003036 DOps << MkId(var_id) << MkNum(spv::DecorationBinding) << MkNum(0);
3037 DecoInsertPoint = SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04003038 DecoInsertPoint, new SPIRVInstruction(spv::OpDecorate, DOps));
David Neto85082642018-03-24 06:55:20 -07003039
3040 // OpDecorate %var DescriptorSet <descriptor_set>
3041 DOps.clear();
David Neto257c3892018-04-11 13:19:45 -04003042 DOps << MkId(var_id) << MkNum(spv::DecorationDescriptorSet)
3043 << MkNum(descriptor_set);
David Netoc6f3ab22018-04-06 18:02:31 -04003044 SPIRVInstList.insert(DecoInsertPoint,
David Neto87846742018-04-11 17:36:22 -04003045 new SPIRVInstruction(spv::OpDecorate, DOps));
David Neto22f144c2017-06-12 14:26:21 -04003046 }
3047}
3048
David Netoc6f3ab22018-04-06 18:02:31 -04003049void SPIRVProducerPass::GenerateWorkgroupVars() {
3050 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
Alan Baker202c8c72018-08-13 13:47:44 -04003051 for (auto spec_id = clspv::FirstLocalSpecId(); spec_id < max_local_spec_id_;
3052 ++spec_id) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05003053 LocalArgInfo &info = LocalSpecIdInfoMap[spec_id];
David Netoc6f3ab22018-04-06 18:02:31 -04003054
3055 // Generate OpVariable.
3056 //
3057 // GIDOps[0] : Result Type ID
3058 // GIDOps[1] : Storage Class
3059 SPIRVOperandList Ops;
3060 Ops << MkId(info.ptr_array_type_id) << MkNum(spv::StorageClassWorkgroup);
3061
3062 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04003063 new SPIRVInstruction(spv::OpVariable, info.variable_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04003064 }
3065}
3066
David Neto862b7d82018-06-14 18:48:37 -04003067void SPIRVProducerPass::GenerateDescriptorMapInfo(const DataLayout &DL,
3068 Function &F) {
David Netoc5fb5242018-07-30 13:28:31 -04003069 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
3070 return;
3071 }
David Neto862b7d82018-06-14 18:48:37 -04003072 // Gather the list of resources that are used by this function's arguments.
3073 auto &resource_var_at_index = FunctionToResourceVarsMap[&F];
3074
alan-bakerf5e5f692018-11-27 08:33:24 -05003075 // TODO(alan-baker): This should become unnecessary by fixing the rest of the
3076 // flow to generate pod_ubo arguments earlier.
David Neto862b7d82018-06-14 18:48:37 -04003077 auto remap_arg_kind = [](StringRef argKind) {
alan-bakerf5e5f692018-11-27 08:33:24 -05003078 std::string kind =
3079 clspv::Option::PodArgsInUniformBuffer() && argKind.equals("pod")
3080 ? "pod_ubo"
3081 : argKind;
3082 return GetArgKindFromName(kind);
David Neto862b7d82018-06-14 18:48:37 -04003083 };
3084
3085 auto *fty = F.getType()->getPointerElementType();
3086 auto *func_ty = dyn_cast<FunctionType>(fty);
3087
alan-baker038e9242019-04-19 22:14:41 -04003088 // If we've clustered POD arguments, then argument details are in metadata.
David Neto862b7d82018-06-14 18:48:37 -04003089 // If an argument maps to a resource variable, then get descriptor set and
3090 // binding from the resoure variable. Other info comes from the metadata.
3091 const auto *arg_map = F.getMetadata("kernel_arg_map");
3092 if (arg_map) {
3093 for (const auto &arg : arg_map->operands()) {
3094 const MDNode *arg_node = dyn_cast<MDNode>(arg.get());
Kévin PETITa353c832018-03-20 23:21:21 +00003095 assert(arg_node->getNumOperands() == 7);
David Neto862b7d82018-06-14 18:48:37 -04003096 const auto name =
3097 dyn_cast<MDString>(arg_node->getOperand(0))->getString();
3098 const auto old_index =
3099 dyn_extract<ConstantInt>(arg_node->getOperand(1))->getZExtValue();
3100 // Remapped argument index
alan-bakerb6b09dc2018-11-08 16:59:28 -05003101 const size_t new_index = static_cast<size_t>(
3102 dyn_extract<ConstantInt>(arg_node->getOperand(2))->getZExtValue());
David Neto862b7d82018-06-14 18:48:37 -04003103 const auto offset =
3104 dyn_extract<ConstantInt>(arg_node->getOperand(3))->getZExtValue();
Kévin PETITa353c832018-03-20 23:21:21 +00003105 const auto arg_size =
3106 dyn_extract<ConstantInt>(arg_node->getOperand(4))->getZExtValue();
David Neto862b7d82018-06-14 18:48:37 -04003107 const auto argKind = remap_arg_kind(
Kévin PETITa353c832018-03-20 23:21:21 +00003108 dyn_cast<MDString>(arg_node->getOperand(5))->getString());
David Neto862b7d82018-06-14 18:48:37 -04003109 const auto spec_id =
Kévin PETITa353c832018-03-20 23:21:21 +00003110 dyn_extract<ConstantInt>(arg_node->getOperand(6))->getSExtValue();
alan-bakerf5e5f692018-11-27 08:33:24 -05003111
3112 uint32_t descriptor_set = 0;
3113 uint32_t binding = 0;
3114 version0::DescriptorMapEntry::KernelArgData kernel_data = {
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003115 F.getName(), name, static_cast<uint32_t>(old_index), argKind,
alan-bakerf5e5f692018-11-27 08:33:24 -05003116 static_cast<uint32_t>(spec_id),
3117 // This will be set below for pointer-to-local args.
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003118 0, static_cast<uint32_t>(offset), static_cast<uint32_t>(arg_size)};
David Neto862b7d82018-06-14 18:48:37 -04003119 if (spec_id > 0) {
alan-bakerf5e5f692018-11-27 08:33:24 -05003120 kernel_data.local_element_size = static_cast<uint32_t>(GetTypeAllocSize(
3121 func_ty->getParamType(unsigned(new_index))->getPointerElementType(),
3122 DL));
David Neto862b7d82018-06-14 18:48:37 -04003123 } else {
3124 auto *info = resource_var_at_index[new_index];
3125 assert(info);
alan-bakerf5e5f692018-11-27 08:33:24 -05003126 descriptor_set = info->descriptor_set;
3127 binding = info->binding;
David Neto862b7d82018-06-14 18:48:37 -04003128 }
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003129 descriptorMapEntries->emplace_back(std::move(kernel_data), descriptor_set,
3130 binding);
David Neto862b7d82018-06-14 18:48:37 -04003131 }
3132 } else {
3133 // There is no argument map.
3134 // Take descriptor info from the resource variable calls.
Kévin PETITa353c832018-03-20 23:21:21 +00003135 // Take argument name and size from the arguments list.
David Neto862b7d82018-06-14 18:48:37 -04003136
3137 SmallVector<Argument *, 4> arguments;
3138 for (auto &arg : F.args()) {
3139 arguments.push_back(&arg);
3140 }
3141
3142 unsigned arg_index = 0;
3143 for (auto *info : resource_var_at_index) {
3144 if (info) {
Kévin PETITa353c832018-03-20 23:21:21 +00003145 auto arg = arguments[arg_index];
alan-bakerb6b09dc2018-11-08 16:59:28 -05003146 unsigned arg_size = 0;
Kévin PETITa353c832018-03-20 23:21:21 +00003147 if (info->arg_kind == clspv::ArgKind::Pod) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05003148 arg_size = static_cast<uint32_t>(DL.getTypeStoreSize(arg->getType()));
Kévin PETITa353c832018-03-20 23:21:21 +00003149 }
3150
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003151 // Local pointer arguments are unused in this case. Offset is always
3152 // zero.
alan-bakerf5e5f692018-11-27 08:33:24 -05003153 version0::DescriptorMapEntry::KernelArgData kernel_data = {
3154 F.getName(), arg->getName(),
3155 arg_index, remap_arg_kind(clspv::GetArgKindName(info->arg_kind)),
3156 0, 0,
3157 0, arg_size};
3158 descriptorMapEntries->emplace_back(std::move(kernel_data),
3159 info->descriptor_set, info->binding);
David Neto862b7d82018-06-14 18:48:37 -04003160 }
3161 arg_index++;
3162 }
3163 // Generate mappings for pointer-to-local arguments.
3164 for (arg_index = 0; arg_index < arguments.size(); ++arg_index) {
3165 Argument *arg = arguments[arg_index];
Alan Baker202c8c72018-08-13 13:47:44 -04003166 auto where = LocalArgSpecIds.find(arg);
3167 if (where != LocalArgSpecIds.end()) {
3168 auto &local_arg_info = LocalSpecIdInfoMap[where->second];
alan-bakerf5e5f692018-11-27 08:33:24 -05003169 // Pod arguments members are unused in this case.
3170 version0::DescriptorMapEntry::KernelArgData kernel_data = {
3171 F.getName(),
3172 arg->getName(),
3173 arg_index,
3174 ArgKind::Local,
3175 static_cast<uint32_t>(local_arg_info.spec_id),
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003176 static_cast<uint32_t>(
3177 GetTypeAllocSize(local_arg_info.elem_type, DL)),
alan-bakerf5e5f692018-11-27 08:33:24 -05003178 0,
3179 0};
3180 // Pointer-to-local arguments do not utilize descriptor set and binding.
3181 descriptorMapEntries->emplace_back(std::move(kernel_data), 0, 0);
David Neto862b7d82018-06-14 18:48:37 -04003182 }
3183 }
3184 }
3185}
3186
David Neto22f144c2017-06-12 14:26:21 -04003187void SPIRVProducerPass::GenerateFuncPrologue(Function &F) {
3188 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3189 ValueMapType &VMap = getValueMap();
3190 EntryPointVecType &EntryPoints = getEntryPointVec();
David Neto22f144c2017-06-12 14:26:21 -04003191 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
3192 auto &GlobalConstArgSet = getGlobalConstArgSet();
3193
3194 FunctionType *FTy = F.getFunctionType();
3195
3196 //
David Neto22f144c2017-06-12 14:26:21 -04003197 // Generate OPFunction.
3198 //
3199
3200 // FOps[0] : Result Type ID
3201 // FOps[1] : Function Control
3202 // FOps[2] : Function Type ID
3203 SPIRVOperandList FOps;
3204
3205 // Find SPIRV instruction for return type.
David Neto257c3892018-04-11 13:19:45 -04003206 FOps << MkId(lookupType(FTy->getReturnType()));
David Neto22f144c2017-06-12 14:26:21 -04003207
3208 // Check function attributes for SPIRV Function Control.
3209 uint32_t FuncControl = spv::FunctionControlMaskNone;
3210 if (F.hasFnAttribute(Attribute::AlwaysInline)) {
3211 FuncControl |= spv::FunctionControlInlineMask;
3212 }
3213 if (F.hasFnAttribute(Attribute::NoInline)) {
3214 FuncControl |= spv::FunctionControlDontInlineMask;
3215 }
3216 // TODO: Check llvm attribute for Function Control Pure.
3217 if (F.hasFnAttribute(Attribute::ReadOnly)) {
3218 FuncControl |= spv::FunctionControlPureMask;
3219 }
3220 // TODO: Check llvm attribute for Function Control Const.
3221 if (F.hasFnAttribute(Attribute::ReadNone)) {
3222 FuncControl |= spv::FunctionControlConstMask;
3223 }
3224
David Neto257c3892018-04-11 13:19:45 -04003225 FOps << MkNum(FuncControl);
David Neto22f144c2017-06-12 14:26:21 -04003226
3227 uint32_t FTyID;
3228 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
3229 SmallVector<Type *, 4> NewFuncParamTys;
3230 FunctionType *NewFTy =
3231 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
3232 FTyID = lookupType(NewFTy);
3233 } else {
David Neto9ed8e2f2018-03-24 06:47:24 -07003234 // Handle regular function with global constant parameters.
David Neto22f144c2017-06-12 14:26:21 -04003235 if (GlobalConstFuncTyMap.count(FTy)) {
3236 FTyID = lookupType(GlobalConstFuncTyMap[FTy].first);
3237 } else {
3238 FTyID = lookupType(FTy);
3239 }
3240 }
3241
David Neto257c3892018-04-11 13:19:45 -04003242 FOps << MkId(FTyID);
David Neto22f144c2017-06-12 14:26:21 -04003243
3244 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
3245 EntryPoints.push_back(std::make_pair(&F, nextID));
3246 }
3247
3248 VMap[&F] = nextID;
3249
David Neto482550a2018-03-24 05:21:07 -07003250 if (clspv::Option::ShowIDs()) {
David Netob05675d2018-02-16 12:37:49 -05003251 errs() << "Function " << F.getName() << " is " << nextID << "\n";
3252 }
David Neto22f144c2017-06-12 14:26:21 -04003253 // Generate SPIRV instruction for function.
David Neto87846742018-04-11 17:36:22 -04003254 auto *FuncInst = new SPIRVInstruction(spv::OpFunction, nextID++, FOps);
David Neto22f144c2017-06-12 14:26:21 -04003255 SPIRVInstList.push_back(FuncInst);
3256
3257 //
3258 // Generate OpFunctionParameter for Normal function.
3259 //
3260
3261 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
alan-bakere9308012019-03-15 10:25:13 -04003262
3263 // Find Insert Point for OpDecorate.
3264 auto DecoInsertPoint =
3265 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
3266 [](SPIRVInstruction *Inst) -> bool {
3267 return Inst->getOpcode() != spv::OpDecorate &&
3268 Inst->getOpcode() != spv::OpMemberDecorate &&
3269 Inst->getOpcode() != spv::OpExtInstImport;
3270 });
3271
David Neto22f144c2017-06-12 14:26:21 -04003272 // Iterate Argument for name instead of param type from function type.
3273 unsigned ArgIdx = 0;
3274 for (Argument &Arg : F.args()) {
alan-bakere9308012019-03-15 10:25:13 -04003275 uint32_t param_id = nextID++;
3276 VMap[&Arg] = param_id;
3277
3278 if (CalledWithCoherentResource(Arg)) {
3279 // If the arg is passed a coherent resource ever, then decorate this
3280 // parameter with Coherent too.
3281 SPIRVOperandList decoration_ops;
3282 decoration_ops << MkId(param_id) << MkNum(spv::DecorationCoherent);
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003283 SPIRVInstList.insert(
3284 DecoInsertPoint,
3285 new SPIRVInstruction(spv::OpDecorate, decoration_ops));
alan-bakere9308012019-03-15 10:25:13 -04003286 }
David Neto22f144c2017-06-12 14:26:21 -04003287
3288 // ParamOps[0] : Result Type ID
3289 SPIRVOperandList ParamOps;
3290
3291 // Find SPIRV instruction for parameter type.
3292 uint32_t ParamTyID = lookupType(Arg.getType());
3293 if (PointerType *PTy = dyn_cast<PointerType>(Arg.getType())) {
3294 if (GlobalConstFuncTyMap.count(FTy)) {
3295 if (ArgIdx == GlobalConstFuncTyMap[FTy].second) {
3296 Type *EleTy = PTy->getPointerElementType();
3297 Type *ArgTy =
3298 PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
3299 ParamTyID = lookupType(ArgTy);
3300 GlobalConstArgSet.insert(&Arg);
3301 }
3302 }
3303 }
David Neto257c3892018-04-11 13:19:45 -04003304 ParamOps << MkId(ParamTyID);
David Neto22f144c2017-06-12 14:26:21 -04003305
3306 // Generate SPIRV instruction for parameter.
David Neto87846742018-04-11 17:36:22 -04003307 auto *ParamInst =
alan-bakere9308012019-03-15 10:25:13 -04003308 new SPIRVInstruction(spv::OpFunctionParameter, param_id, ParamOps);
David Neto22f144c2017-06-12 14:26:21 -04003309 SPIRVInstList.push_back(ParamInst);
3310
3311 ArgIdx++;
3312 }
3313 }
3314}
3315
alan-bakerb6b09dc2018-11-08 16:59:28 -05003316void SPIRVProducerPass::GenerateModuleInfo(Module &module) {
David Neto22f144c2017-06-12 14:26:21 -04003317 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3318 EntryPointVecType &EntryPoints = getEntryPointVec();
3319 ValueMapType &VMap = getValueMap();
3320 ValueList &EntryPointInterfaces = getEntryPointInterfacesVec();
3321 uint32_t &ExtInstImportID = getOpExtInstImportID();
3322 std::vector<uint32_t> &BuiltinDimVec = getBuiltinDimVec();
3323
3324 // Set up insert point.
3325 auto InsertPoint = SPIRVInstList.begin();
3326
3327 //
3328 // Generate OpCapability
3329 //
3330 // TODO: Which llvm information is mapped to SPIRV Capapbility?
3331
3332 // Ops[0] = Capability
3333 SPIRVOperandList Ops;
3334
David Neto87846742018-04-11 17:36:22 -04003335 auto *CapInst =
David Netoef5ba2b2019-12-20 08:35:54 -05003336 new SPIRVInstruction(spv::OpCapability, MkNum(spv::CapabilityShader));
David Neto22f144c2017-06-12 14:26:21 -04003337 SPIRVInstList.insert(InsertPoint, CapInst);
3338
alan-bakerf906d2b2019-12-10 11:26:23 -05003339 bool write_without_format = false;
3340 bool sampled_1d = false;
3341 bool image_1d = false;
David Neto22f144c2017-06-12 14:26:21 -04003342 for (Type *Ty : getTypeList()) {
alan-bakerb39c8262019-03-08 14:03:37 -05003343 if (clspv::Option::Int8Support() && Ty->isIntegerTy(8)) {
3344 // Generate OpCapability for i8 type.
David Netoef5ba2b2019-12-20 08:35:54 -05003345 SPIRVInstList.insert(
3346 InsertPoint,
3347 new SPIRVInstruction(spv::OpCapability, MkNum(spv::CapabilityInt8)));
alan-bakerb39c8262019-03-08 14:03:37 -05003348 } else if (Ty->isIntegerTy(16)) {
David Neto22f144c2017-06-12 14:26:21 -04003349 // Generate OpCapability for i16 type.
David Netoef5ba2b2019-12-20 08:35:54 -05003350 SPIRVInstList.insert(
3351 InsertPoint,
3352 new SPIRVInstruction(spv::OpCapability, MkNum(spv::CapabilityInt16)));
David Neto22f144c2017-06-12 14:26:21 -04003353 } else if (Ty->isIntegerTy(64)) {
3354 // Generate OpCapability for i64 type.
David Netoef5ba2b2019-12-20 08:35:54 -05003355 SPIRVInstList.insert(
3356 InsertPoint,
3357 new SPIRVInstruction(spv::OpCapability, MkNum(spv::CapabilityInt64)));
David Neto22f144c2017-06-12 14:26:21 -04003358 } else if (Ty->isHalfTy()) {
3359 // Generate OpCapability for half type.
David Netoef5ba2b2019-12-20 08:35:54 -05003360 SPIRVInstList.insert(InsertPoint,
3361 new SPIRVInstruction(spv::OpCapability,
3362 MkNum(spv::CapabilityFloat16)));
David Neto22f144c2017-06-12 14:26:21 -04003363 } else if (Ty->isDoubleTy()) {
3364 // Generate OpCapability for double type.
David Netoef5ba2b2019-12-20 08:35:54 -05003365 SPIRVInstList.insert(InsertPoint,
3366 new SPIRVInstruction(spv::OpCapability,
3367 MkNum(spv::CapabilityFloat64)));
David Neto22f144c2017-06-12 14:26:21 -04003368 } else if (auto *STy = dyn_cast<StructType>(Ty)) {
3369 if (STy->isOpaque()) {
alan-bakerf906d2b2019-12-10 11:26:23 -05003370 if (STy->getName().startswith("opencl.image1d_wo_t") ||
3371 STy->getName().startswith("opencl.image2d_wo_t") ||
alan-bakerf67468c2019-11-25 15:51:49 -05003372 STy->getName().startswith("opencl.image3d_wo_t")) {
alan-bakerf906d2b2019-12-10 11:26:23 -05003373 write_without_format = true;
3374 }
3375 if (STy->getName().startswith("opencl.image1d_ro_t") ||
3376 STy->getName().startswith("opencl.image1d_wo_t")) {
3377 if (STy->getName().contains(".sampled"))
3378 sampled_1d = true;
3379 else
3380 image_1d = true;
David Neto22f144c2017-06-12 14:26:21 -04003381 }
3382 }
3383 }
3384 }
3385
alan-bakerf906d2b2019-12-10 11:26:23 -05003386 if (write_without_format) {
3387 // Generate OpCapability for write only image type.
3388 SPIRVInstList.insert(
3389 InsertPoint,
3390 new SPIRVInstruction(
3391 spv::OpCapability,
3392 {MkNum(spv::CapabilityStorageImageWriteWithoutFormat)}));
3393 }
3394 if (image_1d) {
3395 // Generate OpCapability for unsampled 1D image type.
3396 SPIRVInstList.insert(InsertPoint,
3397 new SPIRVInstruction(spv::OpCapability,
3398 {MkNum(spv::CapabilityImage1D)}));
3399 } else if (sampled_1d) {
3400 // Generate OpCapability for sampled 1D image type.
3401 SPIRVInstList.insert(
3402 InsertPoint, new SPIRVInstruction(spv::OpCapability,
3403 {MkNum(spv::CapabilitySampled1D)}));
3404 }
3405
David Neto5c22a252018-03-15 16:07:41 -04003406 { // OpCapability ImageQuery
3407 bool hasImageQuery = false;
alan-bakerf67468c2019-11-25 15:51:49 -05003408 for (const auto &SymVal : module.getValueSymbolTable()) {
3409 if (auto F = dyn_cast<Function>(SymVal.getValue())) {
alan-bakerce179f12019-12-06 19:02:22 -05003410 if (clspv::IsImageQuery(F)) {
alan-bakerf67468c2019-11-25 15:51:49 -05003411 hasImageQuery = true;
3412 break;
3413 }
David Neto5c22a252018-03-15 16:07:41 -04003414 }
3415 }
alan-bakerf67468c2019-11-25 15:51:49 -05003416
David Neto5c22a252018-03-15 16:07:41 -04003417 if (hasImageQuery) {
David Neto87846742018-04-11 17:36:22 -04003418 auto *ImageQueryCapInst = new SPIRVInstruction(
3419 spv::OpCapability, {MkNum(spv::CapabilityImageQuery)});
David Neto5c22a252018-03-15 16:07:41 -04003420 SPIRVInstList.insert(InsertPoint, ImageQueryCapInst);
3421 }
3422 }
3423
David Neto22f144c2017-06-12 14:26:21 -04003424 if (hasVariablePointers()) {
3425 //
David Neto22f144c2017-06-12 14:26:21 -04003426 // Generate OpCapability.
3427 //
3428 // Ops[0] = Capability
3429 //
3430 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003431 Ops << MkNum(spv::CapabilityVariablePointers);
David Neto22f144c2017-06-12 14:26:21 -04003432
David Neto87846742018-04-11 17:36:22 -04003433 SPIRVInstList.insert(InsertPoint,
3434 new SPIRVInstruction(spv::OpCapability, Ops));
alan-baker5b86ed72019-02-15 08:26:50 -05003435 } else if (hasVariablePointersStorageBuffer()) {
3436 //
3437 // Generate OpCapability.
3438 //
3439 // Ops[0] = Capability
3440 //
3441 Ops.clear();
3442 Ops << MkNum(spv::CapabilityVariablePointersStorageBuffer);
David Neto22f144c2017-06-12 14:26:21 -04003443
alan-baker5b86ed72019-02-15 08:26:50 -05003444 SPIRVInstList.insert(InsertPoint,
3445 new SPIRVInstruction(spv::OpCapability, Ops));
3446 }
3447
3448 // Always add the storage buffer extension
3449 {
David Neto22f144c2017-06-12 14:26:21 -04003450 //
3451 // Generate OpExtension.
3452 //
3453 // Ops[0] = Name (Literal String)
3454 //
alan-baker5b86ed72019-02-15 08:26:50 -05003455 auto *ExtensionInst = new SPIRVInstruction(
3456 spv::OpExtension, {MkString("SPV_KHR_storage_buffer_storage_class")});
3457 SPIRVInstList.insert(InsertPoint, ExtensionInst);
3458 }
David Neto22f144c2017-06-12 14:26:21 -04003459
alan-baker5b86ed72019-02-15 08:26:50 -05003460 if (hasVariablePointers() || hasVariablePointersStorageBuffer()) {
3461 //
3462 // Generate OpExtension.
3463 //
3464 // Ops[0] = Name (Literal String)
3465 //
3466 auto *ExtensionInst = new SPIRVInstruction(
3467 spv::OpExtension, {MkString("SPV_KHR_variable_pointers")});
3468 SPIRVInstList.insert(InsertPoint, ExtensionInst);
David Neto22f144c2017-06-12 14:26:21 -04003469 }
3470
3471 if (ExtInstImportID) {
3472 ++InsertPoint;
3473 }
3474
3475 //
3476 // Generate OpMemoryModel
3477 //
3478 // Memory model for Vulkan will always be GLSL450.
3479
3480 // Ops[0] = Addressing Model
3481 // Ops[1] = Memory Model
3482 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003483 Ops << MkNum(spv::AddressingModelLogical) << MkNum(spv::MemoryModelGLSL450);
David Neto22f144c2017-06-12 14:26:21 -04003484
David Neto87846742018-04-11 17:36:22 -04003485 auto *MemModelInst = new SPIRVInstruction(spv::OpMemoryModel, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003486 SPIRVInstList.insert(InsertPoint, MemModelInst);
3487
3488 //
3489 // Generate OpEntryPoint
3490 //
3491 for (auto EntryPoint : EntryPoints) {
3492 // Ops[0] = Execution Model
3493 // Ops[1] = EntryPoint ID
3494 // Ops[2] = Name (Literal String)
3495 // ...
3496 //
3497 // TODO: Do we need to consider Interface ID for forward references???
3498 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05003499 const StringRef &name = EntryPoint.first->getName();
David Neto257c3892018-04-11 13:19:45 -04003500 Ops << MkNum(spv::ExecutionModelGLCompute) << MkId(EntryPoint.second)
3501 << MkString(name);
David Neto22f144c2017-06-12 14:26:21 -04003502
David Neto22f144c2017-06-12 14:26:21 -04003503 for (Value *Interface : EntryPointInterfaces) {
David Neto257c3892018-04-11 13:19:45 -04003504 Ops << MkId(VMap[Interface]);
David Neto22f144c2017-06-12 14:26:21 -04003505 }
3506
David Neto87846742018-04-11 17:36:22 -04003507 auto *EntryPointInst = new SPIRVInstruction(spv::OpEntryPoint, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003508 SPIRVInstList.insert(InsertPoint, EntryPointInst);
3509 }
3510
3511 for (auto EntryPoint : EntryPoints) {
3512 if (const MDNode *MD = dyn_cast<Function>(EntryPoint.first)
3513 ->getMetadata("reqd_work_group_size")) {
3514
3515 if (!BuiltinDimVec.empty()) {
3516 llvm_unreachable(
3517 "Kernels should have consistent work group size definition");
3518 }
3519
3520 //
3521 // Generate OpExecutionMode
3522 //
3523
3524 // Ops[0] = Entry Point ID
3525 // Ops[1] = Execution Mode
3526 // Ops[2] ... Ops[n] = Optional literals according to Execution Mode
3527 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05003528 Ops << MkId(EntryPoint.second) << MkNum(spv::ExecutionModeLocalSize);
David Neto22f144c2017-06-12 14:26:21 -04003529
3530 uint32_t XDim = static_cast<uint32_t>(
3531 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
3532 uint32_t YDim = static_cast<uint32_t>(
3533 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
3534 uint32_t ZDim = static_cast<uint32_t>(
3535 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
3536
David Neto257c3892018-04-11 13:19:45 -04003537 Ops << MkNum(XDim) << MkNum(YDim) << MkNum(ZDim);
David Neto22f144c2017-06-12 14:26:21 -04003538
David Neto87846742018-04-11 17:36:22 -04003539 auto *ExecModeInst = new SPIRVInstruction(spv::OpExecutionMode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003540 SPIRVInstList.insert(InsertPoint, ExecModeInst);
3541 }
3542 }
3543
3544 //
3545 // Generate OpSource.
3546 //
3547 // Ops[0] = SourceLanguage ID
3548 // Ops[1] = Version (LiteralNum)
3549 //
3550 Ops.clear();
Kévin Petitf0515712020-01-07 18:29:20 +00003551 switch (clspv::Option::Language()) {
3552 case clspv::Option::SourceLanguage::OpenCL_C_10:
3553 Ops << MkNum(spv::SourceLanguageOpenCL_C) << MkNum(100);
3554 break;
3555 case clspv::Option::SourceLanguage::OpenCL_C_11:
3556 Ops << MkNum(spv::SourceLanguageOpenCL_C) << MkNum(110);
3557 break;
3558 case clspv::Option::SourceLanguage::OpenCL_C_12:
Kévin Petit0fc88042019-04-09 23:25:02 +01003559 Ops << MkNum(spv::SourceLanguageOpenCL_C) << MkNum(120);
Kévin Petitf0515712020-01-07 18:29:20 +00003560 break;
3561 case clspv::Option::SourceLanguage::OpenCL_C_20:
3562 Ops << MkNum(spv::SourceLanguageOpenCL_C) << MkNum(200);
3563 break;
3564 case clspv::Option::SourceLanguage::OpenCL_CPP:
3565 Ops << MkNum(spv::SourceLanguageOpenCL_CPP) << MkNum(100);
3566 break;
3567 default:
3568 Ops << MkNum(spv::SourceLanguageUnknown) << MkNum(0);
3569 break;
Kévin Petit0fc88042019-04-09 23:25:02 +01003570 }
David Neto22f144c2017-06-12 14:26:21 -04003571
David Neto87846742018-04-11 17:36:22 -04003572 auto *OpenSourceInst = new SPIRVInstruction(spv::OpSource, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003573 SPIRVInstList.insert(InsertPoint, OpenSourceInst);
3574
3575 if (!BuiltinDimVec.empty()) {
3576 //
3577 // Generate OpDecorates for x/y/z dimension.
3578 //
3579 // Ops[0] = Target ID
3580 // Ops[1] = Decoration (SpecId)
David Neto257c3892018-04-11 13:19:45 -04003581 // Ops[2] = Specialization Constant ID (Literal Number)
David Neto22f144c2017-06-12 14:26:21 -04003582
3583 // X Dimension
3584 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003585 Ops << MkId(BuiltinDimVec[0]) << MkNum(spv::DecorationSpecId) << MkNum(0);
David Neto87846742018-04-11 17:36:22 -04003586 SPIRVInstList.insert(InsertPoint,
3587 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003588
3589 // Y Dimension
3590 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003591 Ops << MkId(BuiltinDimVec[1]) << MkNum(spv::DecorationSpecId) << MkNum(1);
David Neto87846742018-04-11 17:36:22 -04003592 SPIRVInstList.insert(InsertPoint,
3593 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003594
3595 // Z Dimension
3596 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003597 Ops << MkId(BuiltinDimVec[2]) << MkNum(spv::DecorationSpecId) << MkNum(2);
David Neto87846742018-04-11 17:36:22 -04003598 SPIRVInstList.insert(InsertPoint,
3599 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003600 }
3601}
3602
David Netob6e2e062018-04-25 10:32:06 -04003603void SPIRVProducerPass::GenerateEntryPointInitialStores() {
3604 // Work around a driver bug. Initializers on Private variables might not
3605 // work. So the start of the kernel should store the initializer value to the
3606 // variables. Yes, *every* entry point pays this cost if *any* entry point
3607 // uses this builtin. At this point I judge this to be an acceptable tradeoff
3608 // of complexity vs. runtime, for a broken driver.
alan-bakerb6b09dc2018-11-08 16:59:28 -05003609 // TODO(dneto): Remove this at some point once fixed drivers are widely
3610 // available.
David Netob6e2e062018-04-25 10:32:06 -04003611 if (WorkgroupSizeVarID) {
3612 assert(WorkgroupSizeValueID);
3613
3614 SPIRVOperandList Ops;
3615 Ops << MkId(WorkgroupSizeVarID) << MkId(WorkgroupSizeValueID);
3616
3617 auto *Inst = new SPIRVInstruction(spv::OpStore, Ops);
3618 getSPIRVInstList().push_back(Inst);
3619 }
3620}
3621
David Neto22f144c2017-06-12 14:26:21 -04003622void SPIRVProducerPass::GenerateFuncBody(Function &F) {
3623 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3624 ValueMapType &VMap = getValueMap();
3625
David Netob6e2e062018-04-25 10:32:06 -04003626 const bool IsKernel = F.getCallingConv() == CallingConv::SPIR_KERNEL;
David Neto22f144c2017-06-12 14:26:21 -04003627
3628 for (BasicBlock &BB : F) {
3629 // Register BasicBlock to ValueMap.
3630 VMap[&BB] = nextID;
3631
3632 //
3633 // Generate OpLabel for Basic Block.
3634 //
3635 SPIRVOperandList Ops;
David Neto87846742018-04-11 17:36:22 -04003636 auto *Inst = new SPIRVInstruction(spv::OpLabel, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003637 SPIRVInstList.push_back(Inst);
3638
David Neto6dcd4712017-06-23 11:06:47 -04003639 // OpVariable instructions must come first.
3640 for (Instruction &I : BB) {
alan-baker5b86ed72019-02-15 08:26:50 -05003641 if (auto *alloca = dyn_cast<AllocaInst>(&I)) {
3642 // Allocating a pointer requires variable pointers.
3643 if (alloca->getAllocatedType()->isPointerTy()) {
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003644 setVariablePointersCapabilities(
3645 alloca->getAllocatedType()->getPointerAddressSpace());
alan-baker5b86ed72019-02-15 08:26:50 -05003646 }
David Neto6dcd4712017-06-23 11:06:47 -04003647 GenerateInstruction(I);
3648 }
3649 }
3650
David Neto22f144c2017-06-12 14:26:21 -04003651 if (&BB == &F.getEntryBlock() && IsKernel) {
David Netob6e2e062018-04-25 10:32:06 -04003652 if (clspv::Option::HackInitializers()) {
3653 GenerateEntryPointInitialStores();
3654 }
David Neto22f144c2017-06-12 14:26:21 -04003655 }
3656
3657 for (Instruction &I : BB) {
David Neto6dcd4712017-06-23 11:06:47 -04003658 if (!isa<AllocaInst>(I)) {
3659 GenerateInstruction(I);
3660 }
David Neto22f144c2017-06-12 14:26:21 -04003661 }
3662 }
3663}
3664
3665spv::Op SPIRVProducerPass::GetSPIRVCmpOpcode(CmpInst *I) {
3666 const std::map<CmpInst::Predicate, spv::Op> Map = {
3667 {CmpInst::ICMP_EQ, spv::OpIEqual},
3668 {CmpInst::ICMP_NE, spv::OpINotEqual},
3669 {CmpInst::ICMP_UGT, spv::OpUGreaterThan},
3670 {CmpInst::ICMP_UGE, spv::OpUGreaterThanEqual},
3671 {CmpInst::ICMP_ULT, spv::OpULessThan},
3672 {CmpInst::ICMP_ULE, spv::OpULessThanEqual},
3673 {CmpInst::ICMP_SGT, spv::OpSGreaterThan},
3674 {CmpInst::ICMP_SGE, spv::OpSGreaterThanEqual},
3675 {CmpInst::ICMP_SLT, spv::OpSLessThan},
3676 {CmpInst::ICMP_SLE, spv::OpSLessThanEqual},
3677 {CmpInst::FCMP_OEQ, spv::OpFOrdEqual},
3678 {CmpInst::FCMP_OGT, spv::OpFOrdGreaterThan},
3679 {CmpInst::FCMP_OGE, spv::OpFOrdGreaterThanEqual},
3680 {CmpInst::FCMP_OLT, spv::OpFOrdLessThan},
3681 {CmpInst::FCMP_OLE, spv::OpFOrdLessThanEqual},
3682 {CmpInst::FCMP_ONE, spv::OpFOrdNotEqual},
3683 {CmpInst::FCMP_UEQ, spv::OpFUnordEqual},
3684 {CmpInst::FCMP_UGT, spv::OpFUnordGreaterThan},
3685 {CmpInst::FCMP_UGE, spv::OpFUnordGreaterThanEqual},
3686 {CmpInst::FCMP_ULT, spv::OpFUnordLessThan},
3687 {CmpInst::FCMP_ULE, spv::OpFUnordLessThanEqual},
3688 {CmpInst::FCMP_UNE, spv::OpFUnordNotEqual}};
3689
3690 assert(0 != Map.count(I->getPredicate()));
3691
3692 return Map.at(I->getPredicate());
3693}
3694
3695spv::Op SPIRVProducerPass::GetSPIRVCastOpcode(Instruction &I) {
3696 const std::map<unsigned, spv::Op> Map{
3697 {Instruction::Trunc, spv::OpUConvert},
3698 {Instruction::ZExt, spv::OpUConvert},
3699 {Instruction::SExt, spv::OpSConvert},
3700 {Instruction::FPToUI, spv::OpConvertFToU},
3701 {Instruction::FPToSI, spv::OpConvertFToS},
3702 {Instruction::UIToFP, spv::OpConvertUToF},
3703 {Instruction::SIToFP, spv::OpConvertSToF},
3704 {Instruction::FPTrunc, spv::OpFConvert},
3705 {Instruction::FPExt, spv::OpFConvert},
3706 {Instruction::BitCast, spv::OpBitcast}};
3707
3708 assert(0 != Map.count(I.getOpcode()));
3709
3710 return Map.at(I.getOpcode());
3711}
3712
3713spv::Op SPIRVProducerPass::GetSPIRVBinaryOpcode(Instruction &I) {
Kévin Petit24272b62018-10-18 19:16:12 +00003714 if (I.getType()->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -04003715 switch (I.getOpcode()) {
3716 default:
3717 break;
3718 case Instruction::Or:
3719 return spv::OpLogicalOr;
3720 case Instruction::And:
3721 return spv::OpLogicalAnd;
3722 case Instruction::Xor:
3723 return spv::OpLogicalNotEqual;
3724 }
3725 }
3726
alan-bakerb6b09dc2018-11-08 16:59:28 -05003727 const std::map<unsigned, spv::Op> Map{
David Neto22f144c2017-06-12 14:26:21 -04003728 {Instruction::Add, spv::OpIAdd},
3729 {Instruction::FAdd, spv::OpFAdd},
3730 {Instruction::Sub, spv::OpISub},
3731 {Instruction::FSub, spv::OpFSub},
3732 {Instruction::Mul, spv::OpIMul},
3733 {Instruction::FMul, spv::OpFMul},
3734 {Instruction::UDiv, spv::OpUDiv},
3735 {Instruction::SDiv, spv::OpSDiv},
3736 {Instruction::FDiv, spv::OpFDiv},
3737 {Instruction::URem, spv::OpUMod},
3738 {Instruction::SRem, spv::OpSRem},
3739 {Instruction::FRem, spv::OpFRem},
3740 {Instruction::Or, spv::OpBitwiseOr},
3741 {Instruction::Xor, spv::OpBitwiseXor},
3742 {Instruction::And, spv::OpBitwiseAnd},
3743 {Instruction::Shl, spv::OpShiftLeftLogical},
3744 {Instruction::LShr, spv::OpShiftRightLogical},
3745 {Instruction::AShr, spv::OpShiftRightArithmetic}};
3746
3747 assert(0 != Map.count(I.getOpcode()));
3748
3749 return Map.at(I.getOpcode());
3750}
3751
3752void SPIRVProducerPass::GenerateInstruction(Instruction &I) {
3753 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3754 ValueMapType &VMap = getValueMap();
David Neto22f144c2017-06-12 14:26:21 -04003755 DeferredInstVecType &DeferredInsts = getDeferredInstVec();
3756 LLVMContext &Context = I.getParent()->getParent()->getParent()->getContext();
3757
3758 // Register Instruction to ValueMap.
3759 if (0 == VMap[&I]) {
3760 VMap[&I] = nextID;
3761 }
3762
3763 switch (I.getOpcode()) {
3764 default: {
3765 if (Instruction::isCast(I.getOpcode())) {
3766 //
3767 // Generate SPIRV instructions for cast operators.
3768 //
3769
David Netod2de94a2017-08-28 17:27:47 -04003770 auto Ty = I.getType();
David Neto22f144c2017-06-12 14:26:21 -04003771 auto OpTy = I.getOperand(0)->getType();
David Netod2de94a2017-08-28 17:27:47 -04003772 auto toI8 = Ty == Type::getInt8Ty(Context);
3773 auto fromI32 = OpTy == Type::getInt32Ty(Context);
David Neto22f144c2017-06-12 14:26:21 -04003774 // Handle zext, sext and uitofp with i1 type specially.
3775 if ((I.getOpcode() == Instruction::ZExt ||
3776 I.getOpcode() == Instruction::SExt ||
3777 I.getOpcode() == Instruction::UIToFP) &&
alan-bakerb6b09dc2018-11-08 16:59:28 -05003778 OpTy->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -04003779 //
3780 // Generate OpSelect.
3781 //
3782
3783 // Ops[0] = Result Type ID
3784 // Ops[1] = Condition ID
3785 // Ops[2] = True Constant ID
3786 // Ops[3] = False Constant ID
3787 SPIRVOperandList Ops;
3788
David Neto257c3892018-04-11 13:19:45 -04003789 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04003790
David Neto22f144c2017-06-12 14:26:21 -04003791 uint32_t CondID = VMap[I.getOperand(0)];
David Neto257c3892018-04-11 13:19:45 -04003792 Ops << MkId(CondID);
David Neto22f144c2017-06-12 14:26:21 -04003793
3794 uint32_t TrueID = 0;
3795 if (I.getOpcode() == Instruction::ZExt) {
Kévin Petit7bfb8992019-02-26 13:45:08 +00003796 TrueID = VMap[ConstantInt::get(I.getType(), 1)];
David Neto22f144c2017-06-12 14:26:21 -04003797 } else if (I.getOpcode() == Instruction::SExt) {
Kévin Petit7bfb8992019-02-26 13:45:08 +00003798 TrueID = VMap[ConstantInt::getSigned(I.getType(), -1)];
David Neto22f144c2017-06-12 14:26:21 -04003799 } else {
3800 TrueID = VMap[ConstantFP::get(Context, APFloat(1.0f))];
3801 }
David Neto257c3892018-04-11 13:19:45 -04003802 Ops << MkId(TrueID);
David Neto22f144c2017-06-12 14:26:21 -04003803
3804 uint32_t FalseID = 0;
3805 if (I.getOpcode() == Instruction::ZExt) {
3806 FalseID = VMap[Constant::getNullValue(I.getType())];
3807 } else if (I.getOpcode() == Instruction::SExt) {
3808 FalseID = VMap[Constant::getNullValue(I.getType())];
3809 } else {
3810 FalseID = VMap[ConstantFP::get(Context, APFloat(0.0f))];
3811 }
David Neto257c3892018-04-11 13:19:45 -04003812 Ops << MkId(FalseID);
David Neto22f144c2017-06-12 14:26:21 -04003813
David Neto87846742018-04-11 17:36:22 -04003814 auto *Inst = new SPIRVInstruction(spv::OpSelect, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003815 SPIRVInstList.push_back(Inst);
alan-bakerb39c8262019-03-08 14:03:37 -05003816 } else if (!clspv::Option::Int8Support() &&
3817 I.getOpcode() == Instruction::Trunc && fromI32 && toI8) {
David Netod2de94a2017-08-28 17:27:47 -04003818 // The SPIR-V target type is a 32-bit int. Keep only the bottom
3819 // 8 bits.
3820 // Before:
3821 // %result = trunc i32 %a to i8
3822 // After
3823 // %result = OpBitwiseAnd %uint %a %uint_255
3824
3825 SPIRVOperandList Ops;
3826
David Neto257c3892018-04-11 13:19:45 -04003827 Ops << MkId(lookupType(OpTy)) << MkId(VMap[I.getOperand(0)]);
David Netod2de94a2017-08-28 17:27:47 -04003828
3829 Type *UintTy = Type::getInt32Ty(Context);
3830 uint32_t MaskID = VMap[ConstantInt::get(UintTy, 255)];
David Neto257c3892018-04-11 13:19:45 -04003831 Ops << MkId(MaskID);
David Netod2de94a2017-08-28 17:27:47 -04003832
David Neto87846742018-04-11 17:36:22 -04003833 auto *Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Netod2de94a2017-08-28 17:27:47 -04003834 SPIRVInstList.push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04003835 } else {
3836 // Ops[0] = Result Type ID
3837 // Ops[1] = Source Value ID
3838 SPIRVOperandList Ops;
3839
David Neto257c3892018-04-11 13:19:45 -04003840 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04003841
David Neto87846742018-04-11 17:36:22 -04003842 auto *Inst = new SPIRVInstruction(GetSPIRVCastOpcode(I), nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003843 SPIRVInstList.push_back(Inst);
3844 }
3845 } else if (isa<BinaryOperator>(I)) {
3846 //
3847 // Generate SPIRV instructions for binary operators.
3848 //
3849
3850 // Handle xor with i1 type specially.
3851 if (I.getOpcode() == Instruction::Xor &&
3852 I.getType() == Type::getInt1Ty(Context) &&
Kévin Petit24272b62018-10-18 19:16:12 +00003853 ((isa<ConstantInt>(I.getOperand(0)) &&
3854 !cast<ConstantInt>(I.getOperand(0))->isZero()) ||
3855 (isa<ConstantInt>(I.getOperand(1)) &&
3856 !cast<ConstantInt>(I.getOperand(1))->isZero()))) {
David Neto22f144c2017-06-12 14:26:21 -04003857 //
3858 // Generate OpLogicalNot.
3859 //
3860 // Ops[0] = Result Type ID
3861 // Ops[1] = Operand
3862 SPIRVOperandList Ops;
3863
David Neto257c3892018-04-11 13:19:45 -04003864 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04003865
3866 Value *CondV = I.getOperand(0);
3867 if (isa<Constant>(I.getOperand(0))) {
3868 CondV = I.getOperand(1);
3869 }
David Neto257c3892018-04-11 13:19:45 -04003870 Ops << MkId(VMap[CondV]);
David Neto22f144c2017-06-12 14:26:21 -04003871
David Neto87846742018-04-11 17:36:22 -04003872 auto *Inst = new SPIRVInstruction(spv::OpLogicalNot, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003873 SPIRVInstList.push_back(Inst);
3874 } else {
3875 // Ops[0] = Result Type ID
3876 // Ops[1] = Operand 0
3877 // Ops[2] = Operand 1
3878 SPIRVOperandList Ops;
3879
David Neto257c3892018-04-11 13:19:45 -04003880 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)])
3881 << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04003882
David Neto87846742018-04-11 17:36:22 -04003883 auto *Inst =
3884 new SPIRVInstruction(GetSPIRVBinaryOpcode(I), nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003885 SPIRVInstList.push_back(Inst);
3886 }
alan-bakerc9c55ae2019-12-02 16:01:27 -05003887 } else if (I.getOpcode() == Instruction::FNeg) {
3888 // The only unary operator.
3889 //
3890 // Ops[0] = Result Type ID
3891 // Ops[1] = Operand 0
3892 SPIRVOperandList ops;
3893
3894 ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)]);
3895 auto *Inst = new SPIRVInstruction(spv::OpFNegate, nextID++, ops);
3896 SPIRVInstList.push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04003897 } else {
3898 I.print(errs());
3899 llvm_unreachable("Unsupported instruction???");
3900 }
3901 break;
3902 }
3903 case Instruction::GetElementPtr: {
3904 auto &GlobalConstArgSet = getGlobalConstArgSet();
3905
3906 //
3907 // Generate OpAccessChain.
3908 //
3909 GetElementPtrInst *GEP = cast<GetElementPtrInst>(&I);
3910
3911 //
3912 // Generate OpAccessChain.
3913 //
3914
3915 // Ops[0] = Result Type ID
3916 // Ops[1] = Base ID
3917 // Ops[2] ... Ops[n] = Indexes ID
3918 SPIRVOperandList Ops;
3919
alan-bakerb6b09dc2018-11-08 16:59:28 -05003920 PointerType *ResultType = cast<PointerType>(GEP->getType());
David Neto22f144c2017-06-12 14:26:21 -04003921 if (GEP->getPointerAddressSpace() == AddressSpace::ModuleScopePrivate ||
3922 GlobalConstArgSet.count(GEP->getPointerOperand())) {
3923 // Use pointer type with private address space for global constant.
3924 Type *EleTy = I.getType()->getPointerElementType();
David Neto1a1a0582017-07-07 12:01:44 -04003925 ResultType = PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
David Neto22f144c2017-06-12 14:26:21 -04003926 }
David Neto257c3892018-04-11 13:19:45 -04003927
3928 Ops << MkId(lookupType(ResultType));
David Neto22f144c2017-06-12 14:26:21 -04003929
David Neto862b7d82018-06-14 18:48:37 -04003930 // Generate the base pointer.
3931 Ops << MkId(VMap[GEP->getPointerOperand()]);
David Neto22f144c2017-06-12 14:26:21 -04003932
David Neto862b7d82018-06-14 18:48:37 -04003933 // TODO(dneto): Simplify the following?
David Neto22f144c2017-06-12 14:26:21 -04003934
3935 //
3936 // Follows below rules for gep.
3937 //
David Neto862b7d82018-06-14 18:48:37 -04003938 // 1. If gep's first index is 0 generate OpAccessChain and ignore gep's
3939 // first index.
David Neto22f144c2017-06-12 14:26:21 -04003940 // 2. If gep's first index is not 0, generate OpPtrAccessChain and use gep's
3941 // first index.
3942 // 3. If gep's first index is not constant, generate OpPtrAccessChain and
3943 // use gep's first index.
3944 // 4. If it is not above case 1, 2 and 3, generate OpAccessChain and use
3945 // gep's first index.
3946 //
3947 spv::Op Opcode = spv::OpAccessChain;
3948 unsigned offset = 0;
3949 if (ConstantInt *CstInt = dyn_cast<ConstantInt>(GEP->getOperand(1))) {
David Neto862b7d82018-06-14 18:48:37 -04003950 if (CstInt->getZExtValue() == 0) {
David Neto22f144c2017-06-12 14:26:21 -04003951 offset = 1;
David Neto862b7d82018-06-14 18:48:37 -04003952 } else if (CstInt->getZExtValue() != 0) {
David Neto22f144c2017-06-12 14:26:21 -04003953 Opcode = spv::OpPtrAccessChain;
David Neto22f144c2017-06-12 14:26:21 -04003954 }
David Neto862b7d82018-06-14 18:48:37 -04003955 } else {
David Neto22f144c2017-06-12 14:26:21 -04003956 Opcode = spv::OpPtrAccessChain;
David Neto1a1a0582017-07-07 12:01:44 -04003957 }
3958
3959 if (Opcode == spv::OpPtrAccessChain) {
David Neto1a1a0582017-07-07 12:01:44 -04003960 // Do we need to generate ArrayStride? Check against the GEP result type
3961 // rather than the pointer type of the base because when indexing into
3962 // an OpenCL program-scope constant, we'll swap out the LLVM base pointer
3963 // for something else in the SPIR-V.
3964 // E.g. see test/PointerAccessChain/pointer_index_is_constant_1.cl
alan-baker5b86ed72019-02-15 08:26:50 -05003965 auto address_space = ResultType->getAddressSpace();
3966 setVariablePointersCapabilities(address_space);
3967 switch (GetStorageClass(address_space)) {
Alan Bakerfcda9482018-10-02 17:09:59 -04003968 case spv::StorageClassStorageBuffer:
3969 case spv::StorageClassUniform:
David Neto1a1a0582017-07-07 12:01:44 -04003970 // Save the need to generate an ArrayStride decoration. But defer
3971 // generation until later, so we only make one decoration.
David Neto85082642018-03-24 06:55:20 -07003972 getTypesNeedingArrayStride().insert(ResultType);
Alan Bakerfcda9482018-10-02 17:09:59 -04003973 break;
3974 default:
3975 break;
David Neto1a1a0582017-07-07 12:01:44 -04003976 }
David Neto22f144c2017-06-12 14:26:21 -04003977 }
3978
3979 for (auto II = GEP->idx_begin() + offset; II != GEP->idx_end(); II++) {
David Neto257c3892018-04-11 13:19:45 -04003980 Ops << MkId(VMap[*II]);
David Neto22f144c2017-06-12 14:26:21 -04003981 }
3982
David Neto87846742018-04-11 17:36:22 -04003983 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003984 SPIRVInstList.push_back(Inst);
3985 break;
3986 }
3987 case Instruction::ExtractValue: {
3988 ExtractValueInst *EVI = cast<ExtractValueInst>(&I);
3989 // Ops[0] = Result Type ID
3990 // Ops[1] = Composite ID
3991 // Ops[2] ... Ops[n] = Indexes (Literal Number)
3992 SPIRVOperandList Ops;
3993
David Neto257c3892018-04-11 13:19:45 -04003994 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04003995
3996 uint32_t CompositeID = VMap[EVI->getAggregateOperand()];
David Neto257c3892018-04-11 13:19:45 -04003997 Ops << MkId(CompositeID);
David Neto22f144c2017-06-12 14:26:21 -04003998
3999 for (auto &Index : EVI->indices()) {
David Neto257c3892018-04-11 13:19:45 -04004000 Ops << MkNum(Index);
David Neto22f144c2017-06-12 14:26:21 -04004001 }
4002
David Neto87846742018-04-11 17:36:22 -04004003 auto *Inst = new SPIRVInstruction(spv::OpCompositeExtract, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004004 SPIRVInstList.push_back(Inst);
4005 break;
4006 }
4007 case Instruction::InsertValue: {
4008 InsertValueInst *IVI = cast<InsertValueInst>(&I);
4009 // Ops[0] = Result Type ID
4010 // Ops[1] = Object ID
4011 // Ops[2] = Composite ID
4012 // Ops[3] ... Ops[n] = Indexes (Literal Number)
4013 SPIRVOperandList Ops;
4014
4015 uint32_t ResTyID = lookupType(I.getType());
David Neto257c3892018-04-11 13:19:45 -04004016 Ops << MkId(ResTyID);
David Neto22f144c2017-06-12 14:26:21 -04004017
4018 uint32_t ObjectID = VMap[IVI->getInsertedValueOperand()];
David Neto257c3892018-04-11 13:19:45 -04004019 Ops << MkId(ObjectID);
David Neto22f144c2017-06-12 14:26:21 -04004020
4021 uint32_t CompositeID = VMap[IVI->getAggregateOperand()];
David Neto257c3892018-04-11 13:19:45 -04004022 Ops << MkId(CompositeID);
David Neto22f144c2017-06-12 14:26:21 -04004023
4024 for (auto &Index : IVI->indices()) {
David Neto257c3892018-04-11 13:19:45 -04004025 Ops << MkNum(Index);
David Neto22f144c2017-06-12 14:26:21 -04004026 }
4027
David Neto87846742018-04-11 17:36:22 -04004028 auto *Inst = new SPIRVInstruction(spv::OpCompositeInsert, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004029 SPIRVInstList.push_back(Inst);
4030 break;
4031 }
4032 case Instruction::Select: {
4033 //
4034 // Generate OpSelect.
4035 //
4036
4037 // Ops[0] = Result Type ID
4038 // Ops[1] = Condition ID
4039 // Ops[2] = True Constant ID
4040 // Ops[3] = False Constant ID
4041 SPIRVOperandList Ops;
4042
4043 // Find SPIRV instruction for parameter type.
4044 auto Ty = I.getType();
4045 if (Ty->isPointerTy()) {
4046 auto PointeeTy = Ty->getPointerElementType();
4047 if (PointeeTy->isStructTy() &&
4048 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
4049 Ty = PointeeTy;
alan-baker5b86ed72019-02-15 08:26:50 -05004050 } else {
4051 // Selecting between pointers requires variable pointers.
4052 setVariablePointersCapabilities(Ty->getPointerAddressSpace());
4053 if (!hasVariablePointers() && !selectFromSameObject(&I)) {
4054 setVariablePointers(true);
4055 }
David Neto22f144c2017-06-12 14:26:21 -04004056 }
4057 }
4058
David Neto257c3892018-04-11 13:19:45 -04004059 Ops << MkId(lookupType(Ty)) << MkId(VMap[I.getOperand(0)])
4060 << MkId(VMap[I.getOperand(1)]) << MkId(VMap[I.getOperand(2)]);
David Neto22f144c2017-06-12 14:26:21 -04004061
David Neto87846742018-04-11 17:36:22 -04004062 auto *Inst = new SPIRVInstruction(spv::OpSelect, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004063 SPIRVInstList.push_back(Inst);
4064 break;
4065 }
4066 case Instruction::ExtractElement: {
4067 // Handle <4 x i8> type manually.
4068 Type *CompositeTy = I.getOperand(0)->getType();
4069 if (is4xi8vec(CompositeTy)) {
4070 //
4071 // Generate OpShiftRightLogical and OpBitwiseAnd for extractelement with
4072 // <4 x i8>.
4073 //
4074
4075 //
4076 // Generate OpShiftRightLogical
4077 //
4078 // Ops[0] = Result Type ID
4079 // Ops[1] = Operand 0
4080 // Ops[2] = Operand 1
4081 //
4082 SPIRVOperandList Ops;
4083
David Neto257c3892018-04-11 13:19:45 -04004084 Ops << MkId(lookupType(CompositeTy));
David Neto22f144c2017-06-12 14:26:21 -04004085
4086 uint32_t Op0ID = VMap[I.getOperand(0)];
David Neto257c3892018-04-11 13:19:45 -04004087 Ops << MkId(Op0ID);
David Neto22f144c2017-06-12 14:26:21 -04004088
4089 uint32_t Op1ID = 0;
4090 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
4091 // Handle constant index.
4092 uint64_t Idx = CI->getZExtValue();
4093 Value *ShiftAmount =
4094 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
4095 Op1ID = VMap[ShiftAmount];
4096 } else {
4097 // Handle variable index.
4098 SPIRVOperandList TmpOps;
4099
David Neto257c3892018-04-11 13:19:45 -04004100 TmpOps << MkId(lookupType(Type::getInt32Ty(Context)))
4101 << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004102
4103 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
David Neto257c3892018-04-11 13:19:45 -04004104 TmpOps << MkId(VMap[Cst8]);
David Neto22f144c2017-06-12 14:26:21 -04004105
4106 Op1ID = nextID;
4107
David Neto87846742018-04-11 17:36:22 -04004108 auto *TmpInst = new SPIRVInstruction(spv::OpIMul, nextID++, TmpOps);
David Neto22f144c2017-06-12 14:26:21 -04004109 SPIRVInstList.push_back(TmpInst);
4110 }
David Neto257c3892018-04-11 13:19:45 -04004111 Ops << MkId(Op1ID);
David Neto22f144c2017-06-12 14:26:21 -04004112
4113 uint32_t ShiftID = nextID;
4114
David Neto87846742018-04-11 17:36:22 -04004115 auto *Inst =
4116 new SPIRVInstruction(spv::OpShiftRightLogical, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004117 SPIRVInstList.push_back(Inst);
4118
4119 //
4120 // Generate OpBitwiseAnd
4121 //
4122 // Ops[0] = Result Type ID
4123 // Ops[1] = Operand 0
4124 // Ops[2] = Operand 1
4125 //
4126 Ops.clear();
4127
David Neto257c3892018-04-11 13:19:45 -04004128 Ops << MkId(lookupType(CompositeTy)) << MkId(ShiftID);
David Neto22f144c2017-06-12 14:26:21 -04004129
4130 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
David Neto257c3892018-04-11 13:19:45 -04004131 Ops << MkId(VMap[CstFF]);
David Neto22f144c2017-06-12 14:26:21 -04004132
David Neto9b2d6252017-09-06 15:47:37 -04004133 // Reset mapping for this value to the result of the bitwise and.
4134 VMap[&I] = nextID;
4135
David Neto87846742018-04-11 17:36:22 -04004136 Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004137 SPIRVInstList.push_back(Inst);
4138 break;
4139 }
4140
4141 // Ops[0] = Result Type ID
4142 // Ops[1] = Composite ID
4143 // Ops[2] ... Ops[n] = Indexes (Literal Number)
4144 SPIRVOperandList Ops;
4145
David Neto257c3892018-04-11 13:19:45 -04004146 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04004147
4148 spv::Op Opcode = spv::OpCompositeExtract;
4149 if (const ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
David Neto257c3892018-04-11 13:19:45 -04004150 Ops << MkNum(static_cast<uint32_t>(CI->getZExtValue()));
David Neto22f144c2017-06-12 14:26:21 -04004151 } else {
David Neto257c3892018-04-11 13:19:45 -04004152 Ops << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004153 Opcode = spv::OpVectorExtractDynamic;
4154 }
4155
David Neto87846742018-04-11 17:36:22 -04004156 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004157 SPIRVInstList.push_back(Inst);
4158 break;
4159 }
4160 case Instruction::InsertElement: {
4161 // Handle <4 x i8> type manually.
4162 Type *CompositeTy = I.getOperand(0)->getType();
4163 if (is4xi8vec(CompositeTy)) {
4164 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
4165 uint32_t CstFFID = VMap[CstFF];
4166
4167 uint32_t ShiftAmountID = 0;
4168 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
4169 // Handle constant index.
4170 uint64_t Idx = CI->getZExtValue();
4171 Value *ShiftAmount =
4172 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
4173 ShiftAmountID = VMap[ShiftAmount];
4174 } else {
4175 // Handle variable index.
4176 SPIRVOperandList TmpOps;
4177
David Neto257c3892018-04-11 13:19:45 -04004178 TmpOps << MkId(lookupType(Type::getInt32Ty(Context)))
4179 << MkId(VMap[I.getOperand(2)]);
David Neto22f144c2017-06-12 14:26:21 -04004180
4181 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
David Neto257c3892018-04-11 13:19:45 -04004182 TmpOps << MkId(VMap[Cst8]);
David Neto22f144c2017-06-12 14:26:21 -04004183
4184 ShiftAmountID = nextID;
4185
David Neto87846742018-04-11 17:36:22 -04004186 auto *TmpInst = new SPIRVInstruction(spv::OpIMul, nextID++, TmpOps);
David Neto22f144c2017-06-12 14:26:21 -04004187 SPIRVInstList.push_back(TmpInst);
4188 }
4189
4190 //
4191 // Generate mask operations.
4192 //
4193
4194 // ShiftLeft mask according to index of insertelement.
4195 SPIRVOperandList Ops;
4196
David Neto257c3892018-04-11 13:19:45 -04004197 const uint32_t ResTyID = lookupType(CompositeTy);
4198 Ops << MkId(ResTyID) << MkId(CstFFID) << MkId(ShiftAmountID);
David Neto22f144c2017-06-12 14:26:21 -04004199
4200 uint32_t MaskID = nextID;
4201
David Neto87846742018-04-11 17:36:22 -04004202 auto *Inst = new SPIRVInstruction(spv::OpShiftLeftLogical, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004203 SPIRVInstList.push_back(Inst);
4204
4205 // Inverse mask.
4206 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004207 Ops << MkId(ResTyID) << MkId(MaskID);
David Neto22f144c2017-06-12 14:26:21 -04004208
4209 uint32_t InvMaskID = nextID;
4210
David Neto87846742018-04-11 17:36:22 -04004211 Inst = new SPIRVInstruction(spv::OpNot, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004212 SPIRVInstList.push_back(Inst);
4213
4214 // Apply mask.
4215 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004216 Ops << MkId(ResTyID) << MkId(VMap[I.getOperand(0)]) << MkId(InvMaskID);
David Neto22f144c2017-06-12 14:26:21 -04004217
4218 uint32_t OrgValID = nextID;
4219
David Neto87846742018-04-11 17:36:22 -04004220 Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004221 SPIRVInstList.push_back(Inst);
4222
4223 // Create correct value according to index of insertelement.
4224 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05004225 Ops << MkId(ResTyID) << MkId(VMap[I.getOperand(1)])
4226 << MkId(ShiftAmountID);
David Neto22f144c2017-06-12 14:26:21 -04004227
4228 uint32_t InsertValID = nextID;
4229
David Neto87846742018-04-11 17:36:22 -04004230 Inst = new SPIRVInstruction(spv::OpShiftLeftLogical, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004231 SPIRVInstList.push_back(Inst);
4232
4233 // Insert value to original value.
4234 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004235 Ops << MkId(ResTyID) << MkId(OrgValID) << MkId(InsertValID);
David Neto22f144c2017-06-12 14:26:21 -04004236
David Netoa394f392017-08-26 20:45:29 -04004237 VMap[&I] = nextID;
4238
David Neto87846742018-04-11 17:36:22 -04004239 Inst = new SPIRVInstruction(spv::OpBitwiseOr, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004240 SPIRVInstList.push_back(Inst);
4241
4242 break;
4243 }
4244
David Neto22f144c2017-06-12 14:26:21 -04004245 SPIRVOperandList Ops;
4246
James Priced26efea2018-06-09 23:28:32 +01004247 // Ops[0] = Result Type ID
4248 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04004249
4250 spv::Op Opcode = spv::OpCompositeInsert;
4251 if (const ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
David Neto257c3892018-04-11 13:19:45 -04004252 const auto value = CI->getZExtValue();
4253 assert(value <= UINT32_MAX);
James Priced26efea2018-06-09 23:28:32 +01004254 // Ops[1] = Object ID
4255 // Ops[2] = Composite ID
4256 // Ops[3] ... Ops[n] = Indexes (Literal Number)
alan-bakerb6b09dc2018-11-08 16:59:28 -05004257 Ops << MkId(VMap[I.getOperand(1)]) << MkId(VMap[I.getOperand(0)])
James Priced26efea2018-06-09 23:28:32 +01004258 << MkNum(static_cast<uint32_t>(value));
David Neto22f144c2017-06-12 14:26:21 -04004259 } else {
James Priced26efea2018-06-09 23:28:32 +01004260 // Ops[1] = Composite ID
4261 // Ops[2] = Object ID
4262 // Ops[3] ... Ops[n] = Indexes (Literal Number)
alan-bakerb6b09dc2018-11-08 16:59:28 -05004263 Ops << MkId(VMap[I.getOperand(0)]) << MkId(VMap[I.getOperand(1)])
James Priced26efea2018-06-09 23:28:32 +01004264 << MkId(VMap[I.getOperand(2)]);
David Neto22f144c2017-06-12 14:26:21 -04004265 Opcode = spv::OpVectorInsertDynamic;
4266 }
4267
David Neto87846742018-04-11 17:36:22 -04004268 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004269 SPIRVInstList.push_back(Inst);
4270 break;
4271 }
4272 case Instruction::ShuffleVector: {
4273 // Ops[0] = Result Type ID
4274 // Ops[1] = Vector 1 ID
4275 // Ops[2] = Vector 2 ID
4276 // Ops[3] ... Ops[n] = Components (Literal Number)
4277 SPIRVOperandList Ops;
4278
David Neto257c3892018-04-11 13:19:45 -04004279 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)])
4280 << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004281
4282 uint64_t NumElements = 0;
4283 if (Constant *Cst = dyn_cast<Constant>(I.getOperand(2))) {
4284 NumElements = cast<VectorType>(Cst->getType())->getNumElements();
4285
4286 if (Cst->isNullValue()) {
4287 for (unsigned i = 0; i < NumElements; i++) {
David Neto257c3892018-04-11 13:19:45 -04004288 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04004289 }
4290 } else if (const ConstantDataSequential *CDS =
4291 dyn_cast<ConstantDataSequential>(Cst)) {
4292 for (unsigned i = 0; i < CDS->getNumElements(); i++) {
4293 std::vector<uint32_t> LiteralNum;
David Neto257c3892018-04-11 13:19:45 -04004294 const auto value = CDS->getElementAsInteger(i);
4295 assert(value <= UINT32_MAX);
4296 Ops << MkNum(static_cast<uint32_t>(value));
David Neto22f144c2017-06-12 14:26:21 -04004297 }
4298 } else if (const ConstantVector *CV = dyn_cast<ConstantVector>(Cst)) {
4299 for (unsigned i = 0; i < CV->getNumOperands(); i++) {
4300 auto Op = CV->getOperand(i);
4301
4302 uint32_t literal = 0;
4303
4304 if (auto CI = dyn_cast<ConstantInt>(Op)) {
4305 literal = static_cast<uint32_t>(CI->getZExtValue());
4306 } else if (auto UI = dyn_cast<UndefValue>(Op)) {
4307 literal = 0xFFFFFFFFu;
4308 } else {
4309 Op->print(errs());
4310 llvm_unreachable("Unsupported element in ConstantVector!");
4311 }
4312
David Neto257c3892018-04-11 13:19:45 -04004313 Ops << MkNum(literal);
David Neto22f144c2017-06-12 14:26:21 -04004314 }
4315 } else {
4316 Cst->print(errs());
4317 llvm_unreachable("Unsupported constant mask in ShuffleVector!");
4318 }
4319 }
4320
David Neto87846742018-04-11 17:36:22 -04004321 auto *Inst = new SPIRVInstruction(spv::OpVectorShuffle, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004322 SPIRVInstList.push_back(Inst);
4323 break;
4324 }
4325 case Instruction::ICmp:
4326 case Instruction::FCmp: {
4327 CmpInst *CmpI = cast<CmpInst>(&I);
4328
David Netod4ca2e62017-07-06 18:47:35 -04004329 // Pointer equality is invalid.
alan-bakerb6b09dc2018-11-08 16:59:28 -05004330 Type *ArgTy = CmpI->getOperand(0)->getType();
David Netod4ca2e62017-07-06 18:47:35 -04004331 if (isa<PointerType>(ArgTy)) {
4332 CmpI->print(errs());
4333 std::string name = I.getParent()->getParent()->getName();
4334 errs()
4335 << "\nPointer equality test is not supported by SPIR-V for Vulkan, "
4336 << "in function " << name << "\n";
4337 llvm_unreachable("Pointer equality check is invalid");
4338 break;
4339 }
4340
David Neto257c3892018-04-11 13:19:45 -04004341 // Ops[0] = Result Type ID
4342 // Ops[1] = Operand 1 ID
4343 // Ops[2] = Operand 2 ID
4344 SPIRVOperandList Ops;
David Neto22f144c2017-06-12 14:26:21 -04004345
David Neto257c3892018-04-11 13:19:45 -04004346 Ops << MkId(lookupType(CmpI->getType())) << MkId(VMap[CmpI->getOperand(0)])
4347 << MkId(VMap[CmpI->getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004348
4349 spv::Op Opcode = GetSPIRVCmpOpcode(CmpI);
David Neto87846742018-04-11 17:36:22 -04004350 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004351 SPIRVInstList.push_back(Inst);
4352 break;
4353 }
4354 case Instruction::Br: {
4355 // Branch instrucion is deferred because it needs label's ID. Record slot's
4356 // location on SPIRVInstructionList.
4357 DeferredInsts.push_back(
4358 std::make_tuple(&I, --SPIRVInstList.end(), 0 /* No id */));
4359 break;
4360 }
4361 case Instruction::Switch: {
4362 I.print(errs());
4363 llvm_unreachable("Unsupported instruction???");
4364 break;
4365 }
4366 case Instruction::IndirectBr: {
4367 I.print(errs());
4368 llvm_unreachable("Unsupported instruction???");
4369 break;
4370 }
4371 case Instruction::PHI: {
4372 // Branch instrucion is deferred because it needs label's ID. Record slot's
4373 // location on SPIRVInstructionList.
4374 DeferredInsts.push_back(
4375 std::make_tuple(&I, --SPIRVInstList.end(), nextID++));
4376 break;
4377 }
4378 case Instruction::Alloca: {
4379 //
4380 // Generate OpVariable.
4381 //
4382 // Ops[0] : Result Type ID
4383 // Ops[1] : Storage Class
4384 SPIRVOperandList Ops;
4385
David Neto257c3892018-04-11 13:19:45 -04004386 Ops << MkId(lookupType(I.getType())) << MkNum(spv::StorageClassFunction);
David Neto22f144c2017-06-12 14:26:21 -04004387
David Neto87846742018-04-11 17:36:22 -04004388 auto *Inst = new SPIRVInstruction(spv::OpVariable, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004389 SPIRVInstList.push_back(Inst);
4390 break;
4391 }
4392 case Instruction::Load: {
4393 LoadInst *LD = cast<LoadInst>(&I);
4394 //
4395 // Generate OpLoad.
4396 //
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04004397
alan-baker5b86ed72019-02-15 08:26:50 -05004398 if (LD->getType()->isPointerTy()) {
4399 // Loading a pointer requires variable pointers.
4400 setVariablePointersCapabilities(LD->getType()->getPointerAddressSpace());
4401 }
David Neto22f144c2017-06-12 14:26:21 -04004402
David Neto0a2f98d2017-09-15 19:38:40 -04004403 uint32_t ResTyID = lookupType(LD->getType());
David Netoa60b00b2017-09-15 16:34:09 -04004404 uint32_t PointerID = VMap[LD->getPointerOperand()];
4405
4406 // This is a hack to work around what looks like a driver bug.
4407 // When we're loading from the special variable holding the WorkgroupSize
David Neto0a2f98d2017-09-15 19:38:40 -04004408 // builtin value, use an OpBitWiseAnd of the value's ID rather than
4409 // generating a load.
David Neto66cfe642018-03-24 06:13:56 -07004410 // TODO(dneto): Remove this awful hack once drivers are fixed.
David Netoa60b00b2017-09-15 16:34:09 -04004411 if (PointerID == WorkgroupSizeVarID) {
David Neto0a2f98d2017-09-15 19:38:40 -04004412 // Generate a bitwise-and of the original value with itself.
4413 // We should have been able to get away with just an OpCopyObject,
4414 // but we need something more complex to get past certain driver bugs.
4415 // This is ridiculous, but necessary.
4416 // TODO(dneto): Revisit this once drivers fix their bugs.
4417
4418 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04004419 Ops << MkId(ResTyID) << MkId(WorkgroupSizeValueID)
4420 << MkId(WorkgroupSizeValueID);
David Neto0a2f98d2017-09-15 19:38:40 -04004421
David Neto87846742018-04-11 17:36:22 -04004422 auto *Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Neto0a2f98d2017-09-15 19:38:40 -04004423 SPIRVInstList.push_back(Inst);
David Netoa60b00b2017-09-15 16:34:09 -04004424 break;
4425 }
4426
4427 // This is the normal path. Generate a load.
4428
David Neto22f144c2017-06-12 14:26:21 -04004429 // Ops[0] = Result Type ID
4430 // Ops[1] = Pointer ID
4431 // Ops[2] ... Ops[n] = Optional Memory Access
4432 //
4433 // TODO: Do we need to implement Optional Memory Access???
David Neto0a2f98d2017-09-15 19:38:40 -04004434
David Neto22f144c2017-06-12 14:26:21 -04004435 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04004436 Ops << MkId(ResTyID) << MkId(PointerID);
David Neto22f144c2017-06-12 14:26:21 -04004437
David Neto87846742018-04-11 17:36:22 -04004438 auto *Inst = new SPIRVInstruction(spv::OpLoad, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004439 SPIRVInstList.push_back(Inst);
4440 break;
4441 }
4442 case Instruction::Store: {
4443 StoreInst *ST = cast<StoreInst>(&I);
4444 //
4445 // Generate OpStore.
4446 //
4447
alan-baker5b86ed72019-02-15 08:26:50 -05004448 if (ST->getValueOperand()->getType()->isPointerTy()) {
4449 // Storing a pointer requires variable pointers.
4450 setVariablePointersCapabilities(
4451 ST->getValueOperand()->getType()->getPointerAddressSpace());
4452 }
4453
David Neto22f144c2017-06-12 14:26:21 -04004454 // Ops[0] = Pointer ID
4455 // Ops[1] = Object ID
4456 // Ops[2] ... Ops[n] = Optional Memory Access (later???)
4457 //
4458 // TODO: Do we need to implement Optional Memory Access???
David Neto257c3892018-04-11 13:19:45 -04004459 SPIRVOperandList Ops;
4460 Ops << MkId(VMap[ST->getPointerOperand()])
4461 << MkId(VMap[ST->getValueOperand()]);
David Neto22f144c2017-06-12 14:26:21 -04004462
David Neto87846742018-04-11 17:36:22 -04004463 auto *Inst = new SPIRVInstruction(spv::OpStore, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004464 SPIRVInstList.push_back(Inst);
4465 break;
4466 }
4467 case Instruction::AtomicCmpXchg: {
4468 I.print(errs());
4469 llvm_unreachable("Unsupported instruction???");
4470 break;
4471 }
4472 case Instruction::AtomicRMW: {
Neil Henning39672102017-09-29 14:33:13 +01004473 AtomicRMWInst *AtomicRMW = dyn_cast<AtomicRMWInst>(&I);
4474
4475 spv::Op opcode;
4476
4477 switch (AtomicRMW->getOperation()) {
4478 default:
4479 I.print(errs());
4480 llvm_unreachable("Unsupported instruction???");
4481 case llvm::AtomicRMWInst::Add:
4482 opcode = spv::OpAtomicIAdd;
4483 break;
4484 case llvm::AtomicRMWInst::Sub:
4485 opcode = spv::OpAtomicISub;
4486 break;
4487 case llvm::AtomicRMWInst::Xchg:
4488 opcode = spv::OpAtomicExchange;
4489 break;
4490 case llvm::AtomicRMWInst::Min:
4491 opcode = spv::OpAtomicSMin;
4492 break;
4493 case llvm::AtomicRMWInst::Max:
4494 opcode = spv::OpAtomicSMax;
4495 break;
4496 case llvm::AtomicRMWInst::UMin:
4497 opcode = spv::OpAtomicUMin;
4498 break;
4499 case llvm::AtomicRMWInst::UMax:
4500 opcode = spv::OpAtomicUMax;
4501 break;
4502 case llvm::AtomicRMWInst::And:
4503 opcode = spv::OpAtomicAnd;
4504 break;
4505 case llvm::AtomicRMWInst::Or:
4506 opcode = spv::OpAtomicOr;
4507 break;
4508 case llvm::AtomicRMWInst::Xor:
4509 opcode = spv::OpAtomicXor;
4510 break;
4511 }
4512
4513 //
4514 // Generate OpAtomic*.
4515 //
4516 SPIRVOperandList Ops;
4517
David Neto257c3892018-04-11 13:19:45 -04004518 Ops << MkId(lookupType(I.getType()))
4519 << MkId(VMap[AtomicRMW->getPointerOperand()]);
Neil Henning39672102017-09-29 14:33:13 +01004520
4521 auto IntTy = Type::getInt32Ty(I.getContext());
Neil Henning39672102017-09-29 14:33:13 +01004522 const auto ConstantScopeDevice = ConstantInt::get(IntTy, spv::ScopeDevice);
David Neto257c3892018-04-11 13:19:45 -04004523 Ops << MkId(VMap[ConstantScopeDevice]);
Neil Henning39672102017-09-29 14:33:13 +01004524
4525 const auto ConstantMemorySemantics = ConstantInt::get(
4526 IntTy, spv::MemorySemanticsUniformMemoryMask |
4527 spv::MemorySemanticsSequentiallyConsistentMask);
David Neto257c3892018-04-11 13:19:45 -04004528 Ops << MkId(VMap[ConstantMemorySemantics]);
Neil Henning39672102017-09-29 14:33:13 +01004529
David Neto257c3892018-04-11 13:19:45 -04004530 Ops << MkId(VMap[AtomicRMW->getValOperand()]);
Neil Henning39672102017-09-29 14:33:13 +01004531
4532 VMap[&I] = nextID;
4533
David Neto87846742018-04-11 17:36:22 -04004534 auto *Inst = new SPIRVInstruction(opcode, nextID++, Ops);
Neil Henning39672102017-09-29 14:33:13 +01004535 SPIRVInstList.push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04004536 break;
4537 }
4538 case Instruction::Fence: {
4539 I.print(errs());
4540 llvm_unreachable("Unsupported instruction???");
4541 break;
4542 }
4543 case Instruction::Call: {
4544 CallInst *Call = dyn_cast<CallInst>(&I);
4545 Function *Callee = Call->getCalledFunction();
4546
Alan Baker202c8c72018-08-13 13:47:44 -04004547 if (Callee->getName().startswith(clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04004548 if (ResourceVarDeferredLoadCalls.count(Call) && Call->hasNUsesOrMore(1)) {
4549 // Generate an OpLoad
4550 SPIRVOperandList Ops;
4551 const auto load_id = nextID++;
David Neto22f144c2017-06-12 14:26:21 -04004552
David Neto862b7d82018-06-14 18:48:37 -04004553 Ops << MkId(lookupType(Call->getType()->getPointerElementType()))
4554 << MkId(ResourceVarDeferredLoadCalls[Call]);
4555
4556 auto *Inst = new SPIRVInstruction(spv::OpLoad, load_id, Ops);
4557 SPIRVInstList.push_back(Inst);
4558 VMap[Call] = load_id;
4559 break;
4560
4561 } else {
4562 // This maps to an OpVariable we've already generated.
4563 // No code is generated for the call.
4564 }
4565 break;
alan-bakerb6b09dc2018-11-08 16:59:28 -05004566 } else if (Callee->getName().startswith(
4567 clspv::WorkgroupAccessorFunction())) {
Alan Baker202c8c72018-08-13 13:47:44 -04004568 // Don't codegen an instruction here, but instead map this call directly
4569 // to the workgroup variable id.
alan-bakerb6b09dc2018-11-08 16:59:28 -05004570 int spec_id = static_cast<int>(
4571 cast<ConstantInt>(Call->getOperand(0))->getSExtValue());
Alan Baker202c8c72018-08-13 13:47:44 -04004572 const auto &info = LocalSpecIdInfoMap[spec_id];
4573 VMap[Call] = info.variable_id;
4574 break;
David Neto862b7d82018-06-14 18:48:37 -04004575 }
4576
4577 // Sampler initializers become a load of the corresponding sampler.
4578
Kévin Petitdf71de32019-04-09 14:09:50 +01004579 if (Callee->getName().equals(clspv::LiteralSamplerFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04004580 // Map this to a load from the variable.
alan-baker09cb9802019-12-10 13:16:27 -05004581 const auto third_param = static_cast<unsigned>(
4582 dyn_cast<ConstantInt>(Call->getArgOperand(2))->getZExtValue());
4583 auto sampler_value = third_param;
4584 if (clspv::Option::UseSamplerMap()) {
4585 sampler_value = getSamplerMap()[third_param].first;
4586 }
David Neto862b7d82018-06-14 18:48:37 -04004587
4588 // Generate an OpLoad
David Neto22f144c2017-06-12 14:26:21 -04004589 SPIRVOperandList Ops;
David Neto862b7d82018-06-14 18:48:37 -04004590 const auto load_id = nextID++;
David Neto22f144c2017-06-12 14:26:21 -04004591
David Neto257c3892018-04-11 13:19:45 -04004592 Ops << MkId(lookupType(SamplerTy->getPointerElementType()))
alan-baker09cb9802019-12-10 13:16:27 -05004593 << MkId(SamplerLiteralToIDMap[sampler_value]);
David Neto22f144c2017-06-12 14:26:21 -04004594
David Neto862b7d82018-06-14 18:48:37 -04004595 auto *Inst = new SPIRVInstruction(spv::OpLoad, load_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004596 SPIRVInstList.push_back(Inst);
David Neto862b7d82018-06-14 18:48:37 -04004597 VMap[Call] = load_id;
David Neto22f144c2017-06-12 14:26:21 -04004598 break;
4599 }
4600
Kévin Petit349c9502019-03-28 17:24:14 +00004601 // Handle SPIR-V intrinsics
Kévin Petit9b340262019-06-19 18:31:11 +01004602 spv::Op opcode = StringSwitch<spv::Op>(Callee->getName())
4603 .Case("spirv.atomic_xor", spv::OpAtomicXor)
4604 .Default(spv::OpNop);
David Neto22f144c2017-06-12 14:26:21 -04004605
Kévin Petit617a76d2019-04-04 13:54:16 +01004606 // If the switch above didn't have an entry maybe the intrinsic
4607 // is using the name mangling logic.
4608 bool usesMangler = false;
4609 if (opcode == spv::OpNop) {
4610 if (Callee->getName().startswith(clspv::SPIRVOpIntrinsicFunction())) {
4611 auto OpCst = cast<ConstantInt>(Call->getOperand(0));
4612 opcode = static_cast<spv::Op>(OpCst->getZExtValue());
4613 usesMangler = true;
4614 }
4615 }
4616
Kévin Petit349c9502019-03-28 17:24:14 +00004617 if (opcode != spv::OpNop) {
4618
David Neto22f144c2017-06-12 14:26:21 -04004619 SPIRVOperandList Ops;
4620
Kévin Petit349c9502019-03-28 17:24:14 +00004621 if (!I.getType()->isVoidTy()) {
4622 Ops << MkId(lookupType(I.getType()));
4623 }
David Neto22f144c2017-06-12 14:26:21 -04004624
Kévin Petit617a76d2019-04-04 13:54:16 +01004625 unsigned firstOperand = usesMangler ? 1 : 0;
4626 for (unsigned i = firstOperand; i < Call->getNumArgOperands(); i++) {
David Neto257c3892018-04-11 13:19:45 -04004627 Ops << MkId(VMap[Call->getArgOperand(i)]);
David Neto22f144c2017-06-12 14:26:21 -04004628 }
4629
Kévin Petit349c9502019-03-28 17:24:14 +00004630 if (!I.getType()->isVoidTy()) {
4631 VMap[&I] = nextID;
Kévin Petit8a560882019-03-21 15:24:34 +00004632 }
4633
Kévin Petit349c9502019-03-28 17:24:14 +00004634 SPIRVInstruction *Inst;
4635 if (!I.getType()->isVoidTy()) {
4636 Inst = new SPIRVInstruction(opcode, nextID++, Ops);
4637 } else {
4638 Inst = new SPIRVInstruction(opcode, Ops);
4639 }
Kévin Petit8a560882019-03-21 15:24:34 +00004640 SPIRVInstList.push_back(Inst);
4641 break;
4642 }
4643
David Neto22f144c2017-06-12 14:26:21 -04004644 // spirv.copy_memory.* intrinsics become OpMemoryMemory's.
4645 if (Callee->getName().startswith("spirv.copy_memory")) {
4646 //
4647 // Generate OpCopyMemory.
4648 //
4649
4650 // Ops[0] = Dst ID
4651 // Ops[1] = Src ID
4652 // Ops[2] = Memory Access
4653 // Ops[3] = Alignment
4654
4655 auto IsVolatile =
4656 dyn_cast<ConstantInt>(Call->getArgOperand(3))->getZExtValue() != 0;
4657
4658 auto VolatileMemoryAccess = (IsVolatile) ? spv::MemoryAccessVolatileMask
4659 : spv::MemoryAccessMaskNone;
4660
4661 auto MemoryAccess = VolatileMemoryAccess | spv::MemoryAccessAlignedMask;
4662
4663 auto Alignment =
4664 dyn_cast<ConstantInt>(Call->getArgOperand(2))->getZExtValue();
4665
David Neto257c3892018-04-11 13:19:45 -04004666 SPIRVOperandList Ops;
4667 Ops << MkId(VMap[Call->getArgOperand(0)])
4668 << MkId(VMap[Call->getArgOperand(1)]) << MkNum(MemoryAccess)
4669 << MkNum(static_cast<uint32_t>(Alignment));
David Neto22f144c2017-06-12 14:26:21 -04004670
David Neto87846742018-04-11 17:36:22 -04004671 auto *Inst = new SPIRVInstruction(spv::OpCopyMemory, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004672
4673 SPIRVInstList.push_back(Inst);
4674
4675 break;
4676 }
4677
David Neto22f144c2017-06-12 14:26:21 -04004678 // read_image is converted to OpSampledImage and OpImageSampleExplicitLod.
4679 // Additionally, OpTypeSampledImage is generated.
alan-bakerf67468c2019-11-25 15:51:49 -05004680 if (clspv::IsSampledImageRead(Callee)) {
David Neto22f144c2017-06-12 14:26:21 -04004681 //
4682 // Generate OpSampledImage.
4683 //
4684 // Ops[0] = Result Type ID
4685 // Ops[1] = Image ID
4686 // Ops[2] = Sampler ID
4687 //
4688 SPIRVOperandList Ops;
4689
4690 Value *Image = Call->getArgOperand(0);
4691 Value *Sampler = Call->getArgOperand(1);
4692 Value *Coordinate = Call->getArgOperand(2);
4693
4694 TypeMapType &OpImageTypeMap = getImageTypeMap();
4695 Type *ImageTy = Image->getType()->getPointerElementType();
4696 uint32_t ImageTyID = OpImageTypeMap[ImageTy];
David Neto22f144c2017-06-12 14:26:21 -04004697 uint32_t ImageID = VMap[Image];
David Neto22f144c2017-06-12 14:26:21 -04004698 uint32_t SamplerID = VMap[Sampler];
David Neto257c3892018-04-11 13:19:45 -04004699
4700 Ops << MkId(ImageTyID) << MkId(ImageID) << MkId(SamplerID);
David Neto22f144c2017-06-12 14:26:21 -04004701
4702 uint32_t SampledImageID = nextID;
4703
David Neto87846742018-04-11 17:36:22 -04004704 auto *Inst = new SPIRVInstruction(spv::OpSampledImage, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004705 SPIRVInstList.push_back(Inst);
4706
4707 //
4708 // Generate OpImageSampleExplicitLod.
4709 //
4710 // Ops[0] = Result Type ID
4711 // Ops[1] = Sampled Image ID
4712 // Ops[2] = Coordinate ID
4713 // Ops[3] = Image Operands Type ID
4714 // Ops[4] ... Ops[n] = Operands ID
4715 //
4716 Ops.clear();
4717
alan-bakerf67468c2019-11-25 15:51:49 -05004718 const bool is_int_image = IsIntImageType(Image->getType());
4719 uint32_t result_type = 0;
4720 if (is_int_image) {
4721 result_type = v4int32ID;
4722 } else {
4723 result_type = lookupType(Call->getType());
4724 }
4725
4726 Ops << MkId(result_type) << MkId(SampledImageID) << MkId(VMap[Coordinate])
4727 << MkNum(spv::ImageOperandsLodMask);
David Neto22f144c2017-06-12 14:26:21 -04004728
4729 Constant *CstFP0 = ConstantFP::get(Context, APFloat(0.0f));
David Neto257c3892018-04-11 13:19:45 -04004730 Ops << MkId(VMap[CstFP0]);
David Neto22f144c2017-06-12 14:26:21 -04004731
alan-bakerf67468c2019-11-25 15:51:49 -05004732 uint32_t final_id = nextID++;
4733 VMap[&I] = final_id;
David Neto22f144c2017-06-12 14:26:21 -04004734
alan-bakerf67468c2019-11-25 15:51:49 -05004735 uint32_t image_id = final_id;
4736 if (is_int_image) {
4737 // Int image requires a bitcast from v4int to v4uint.
4738 image_id = nextID++;
4739 }
4740
4741 Inst = new SPIRVInstruction(spv::OpImageSampleExplicitLod, image_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004742 SPIRVInstList.push_back(Inst);
alan-bakerf67468c2019-11-25 15:51:49 -05004743
4744 if (is_int_image) {
4745 // Generate the bitcast.
4746 Ops.clear();
4747 Ops << MkId(lookupType(Call->getType())) << MkId(image_id);
4748 Inst = new SPIRVInstruction(spv::OpBitcast, final_id, Ops);
4749 SPIRVInstList.push_back(Inst);
4750 }
David Neto22f144c2017-06-12 14:26:21 -04004751 break;
4752 }
4753
alan-bakerf67468c2019-11-25 15:51:49 -05004754 // write_image is mapped to OpImageWrite.
4755 if (clspv::IsImageWrite(Callee)) {
David Neto22f144c2017-06-12 14:26:21 -04004756 //
4757 // Generate OpImageWrite.
4758 //
4759 // Ops[0] = Image ID
4760 // Ops[1] = Coordinate ID
4761 // Ops[2] = Texel ID
4762 // Ops[3] = (Optional) Image Operands Type (Literal Number)
4763 // Ops[4] ... Ops[n] = (Optional) Operands ID
4764 //
4765 SPIRVOperandList Ops;
4766
4767 Value *Image = Call->getArgOperand(0);
4768 Value *Coordinate = Call->getArgOperand(1);
4769 Value *Texel = Call->getArgOperand(2);
4770
4771 uint32_t ImageID = VMap[Image];
David Neto22f144c2017-06-12 14:26:21 -04004772 uint32_t CoordinateID = VMap[Coordinate];
David Neto22f144c2017-06-12 14:26:21 -04004773 uint32_t TexelID = VMap[Texel];
alan-bakerf67468c2019-11-25 15:51:49 -05004774
4775 const bool is_int_image = IsIntImageType(Image->getType());
4776 if (is_int_image) {
4777 // Generate a bitcast to v4int and use it as the texel value.
4778 uint32_t castID = nextID++;
4779 Ops << MkId(v4int32ID) << MkId(TexelID);
4780 auto cast = new SPIRVInstruction(spv::OpBitcast, castID, Ops);
4781 SPIRVInstList.push_back(cast);
4782 Ops.clear();
4783 TexelID = castID;
4784 }
David Neto257c3892018-04-11 13:19:45 -04004785 Ops << MkId(ImageID) << MkId(CoordinateID) << MkId(TexelID);
David Neto22f144c2017-06-12 14:26:21 -04004786
David Neto87846742018-04-11 17:36:22 -04004787 auto *Inst = new SPIRVInstruction(spv::OpImageWrite, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004788 SPIRVInstList.push_back(Inst);
4789 break;
4790 }
4791
alan-bakerce179f12019-12-06 19:02:22 -05004792 // get_image_* is mapped to OpImageQuerySize or OpImageQuerySizeLod
4793 if (clspv::IsImageQuery(Callee)) {
David Neto5c22a252018-03-15 16:07:41 -04004794 //
alan-bakerce179f12019-12-06 19:02:22 -05004795 // Generate OpImageQuerySize[Lod]
David Neto5c22a252018-03-15 16:07:41 -04004796 //
4797 // Ops[0] = Image ID
4798 //
alan-bakerce179f12019-12-06 19:02:22 -05004799 // Result type has components equal to the dimensionality of the image,
4800 // plus 1 if the image is arrayed.
4801 //
alan-bakerf906d2b2019-12-10 11:26:23 -05004802 // %sizes = OpImageQuerySize[Lod] %uint[2|3|4] %im [%uint_0]
David Neto5c22a252018-03-15 16:07:41 -04004803 SPIRVOperandList Ops;
4804
4805 // Implement:
alan-bakerce179f12019-12-06 19:02:22 -05004806 // %sizes = OpImageQuerySize[Lod] %uint[2|3|4] %im [%uint_0]
4807 uint32_t SizesTypeID = 0;
4808
David Neto5c22a252018-03-15 16:07:41 -04004809 Value *Image = Call->getArgOperand(0);
alan-bakerce179f12019-12-06 19:02:22 -05004810 const uint32_t dim = ImageDimensionality(Image->getType());
alan-bakerf906d2b2019-12-10 11:26:23 -05004811 // TODO(alan-baker): fix component calculation when arrayed images are
4812 // supported.
alan-bakerce179f12019-12-06 19:02:22 -05004813 const uint32_t components = dim;
4814 if (components == 1) {
alan-bakerce179f12019-12-06 19:02:22 -05004815 SizesTypeID = TypeMap[Type::getInt32Ty(Context)];
4816 } else {
4817 SizesTypeID = TypeMap[VectorType::get(Type::getInt32Ty(Context), dim)];
4818 }
David Neto5c22a252018-03-15 16:07:41 -04004819 uint32_t ImageID = VMap[Image];
David Neto257c3892018-04-11 13:19:45 -04004820 Ops << MkId(SizesTypeID) << MkId(ImageID);
alan-bakerce179f12019-12-06 19:02:22 -05004821 spv::Op query_opcode = spv::OpImageQuerySize;
4822 if (clspv::IsSampledImageType(Image->getType())) {
4823 query_opcode = spv::OpImageQuerySizeLod;
4824 // Need explicit 0 for Lod operand.
4825 Constant *CstInt0 = ConstantInt::get(Context, APInt(32, 0));
4826 Ops << MkId(VMap[CstInt0]);
4827 }
David Neto5c22a252018-03-15 16:07:41 -04004828
4829 uint32_t SizesID = nextID++;
alan-bakerce179f12019-12-06 19:02:22 -05004830 auto *QueryInst = new SPIRVInstruction(query_opcode, SizesID, Ops);
David Neto5c22a252018-03-15 16:07:41 -04004831 SPIRVInstList.push_back(QueryInst);
4832
alan-bakerce179f12019-12-06 19:02:22 -05004833 // May require an extra instruction to create the appropriate result of
4834 // the builtin function.
4835 if (clspv::IsGetImageDim(Callee)) {
4836 if (dim == 3) {
4837 // get_image_dim returns an int4 for 3D images.
4838 //
4839 // Reset value map entry since we generated an intermediate
4840 // instruction.
4841 VMap[&I] = nextID;
David Neto5c22a252018-03-15 16:07:41 -04004842
alan-bakerce179f12019-12-06 19:02:22 -05004843 // Implement:
4844 // %result = OpCompositeConstruct %uint4 %sizes %uint_0
4845 Ops.clear();
4846 Ops << MkId(lookupType(VectorType::get(Type::getInt32Ty(Context), 4)))
4847 << MkId(SizesID);
David Neto5c22a252018-03-15 16:07:41 -04004848
alan-bakerce179f12019-12-06 19:02:22 -05004849 Constant *CstInt0 = ConstantInt::get(Context, APInt(32, 0));
4850 Ops << MkId(VMap[CstInt0]);
David Neto5c22a252018-03-15 16:07:41 -04004851
alan-bakerce179f12019-12-06 19:02:22 -05004852 auto *Inst =
4853 new SPIRVInstruction(spv::OpCompositeConstruct, nextID++, Ops);
4854 SPIRVInstList.push_back(Inst);
4855 } else if (dim != components) {
4856 // get_image_dim return an int2 regardless of the arrayedness of the
4857 // image. If the image is arrayed an element must be dropped from the
4858 // query result.
4859 //
4860 // Reset value map entry since we generated an intermediate
4861 // instruction.
4862 VMap[&I] = nextID;
4863
4864 // Implement:
4865 // %result = OpVectorShuffle %uint2 %sizes %sizes 0 1
4866 Ops.clear();
4867 Ops << MkId(lookupType(VectorType::get(Type::getInt32Ty(Context), 2)))
4868 << MkId(SizesID) << MkId(SizesID) << MkNum(0) << MkNum(1);
4869
4870 auto *Inst =
4871 new SPIRVInstruction(spv::OpVectorShuffle, nextID++, Ops);
4872 SPIRVInstList.push_back(Inst);
4873 }
4874 } else if (components > 1) {
4875 // Reset value map entry since we generated an intermediate instruction.
4876 VMap[&I] = nextID;
4877
4878 // Implement:
4879 // %result = OpCompositeExtract %uint %sizes <component number>
4880 Ops.clear();
4881 Ops << MkId(TypeMap[I.getType()]) << MkId(SizesID);
4882
4883 uint32_t component = 0;
4884 if (IsGetImageHeight(Callee))
4885 component = 1;
4886 else if (IsGetImageDepth(Callee))
4887 component = 2;
4888 Ops << MkNum(component);
4889
4890 auto *Inst =
4891 new SPIRVInstruction(spv::OpCompositeExtract, nextID++, Ops);
4892 SPIRVInstList.push_back(Inst);
4893 }
David Neto5c22a252018-03-15 16:07:41 -04004894 break;
4895 }
4896
David Neto22f144c2017-06-12 14:26:21 -04004897 // Call instrucion is deferred because it needs function's ID. Record
4898 // slot's location on SPIRVInstructionList.
4899 DeferredInsts.push_back(
4900 std::make_tuple(&I, --SPIRVInstList.end(), nextID++));
4901
David Neto3fbb4072017-10-16 11:28:14 -04004902 // Check whether the implementation of this call uses an extended
4903 // instruction plus one more value-producing instruction. If so, then
4904 // reserve the id for the extra value-producing slot.
4905 glsl::ExtInst EInst = getIndirectExtInstEnum(Callee->getName());
4906 if (EInst != kGlslExtInstBad) {
4907 // Reserve a spot for the extra value.
David Neto4d02a532017-09-17 12:57:44 -04004908 // Increase nextID.
David Neto22f144c2017-06-12 14:26:21 -04004909 VMap[&I] = nextID;
4910 nextID++;
4911 }
4912 break;
4913 }
4914 case Instruction::Ret: {
4915 unsigned NumOps = I.getNumOperands();
4916 if (NumOps == 0) {
4917 //
4918 // Generate OpReturn.
4919 //
David Netoef5ba2b2019-12-20 08:35:54 -05004920 SPIRVInstList.push_back(new SPIRVInstruction(spv::OpReturn));
David Neto22f144c2017-06-12 14:26:21 -04004921 } else {
4922 //
4923 // Generate OpReturnValue.
4924 //
4925
4926 // Ops[0] = Return Value ID
4927 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04004928
4929 Ops << MkId(VMap[I.getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04004930
David Neto87846742018-04-11 17:36:22 -04004931 auto *Inst = new SPIRVInstruction(spv::OpReturnValue, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004932 SPIRVInstList.push_back(Inst);
4933 break;
4934 }
4935 break;
4936 }
4937 }
4938}
4939
4940void SPIRVProducerPass::GenerateFuncEpilogue() {
4941 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
4942
4943 //
4944 // Generate OpFunctionEnd
4945 //
4946
David Netoef5ba2b2019-12-20 08:35:54 -05004947 auto *Inst = new SPIRVInstruction(spv::OpFunctionEnd);
David Neto22f144c2017-06-12 14:26:21 -04004948 SPIRVInstList.push_back(Inst);
4949}
4950
4951bool SPIRVProducerPass::is4xi8vec(Type *Ty) const {
alan-bakerb39c8262019-03-08 14:03:37 -05004952 // Don't specialize <4 x i8> if i8 is generally supported.
4953 if (clspv::Option::Int8Support())
4954 return false;
4955
David Neto22f144c2017-06-12 14:26:21 -04004956 LLVMContext &Context = Ty->getContext();
4957 if (Ty->isVectorTy()) {
4958 if (Ty->getVectorElementType() == Type::getInt8Ty(Context) &&
4959 Ty->getVectorNumElements() == 4) {
4960 return true;
4961 }
4962 }
4963
4964 return false;
4965}
4966
4967void SPIRVProducerPass::HandleDeferredInstruction() {
4968 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
4969 ValueMapType &VMap = getValueMap();
4970 DeferredInstVecType &DeferredInsts = getDeferredInstVec();
4971
4972 for (auto DeferredInst = DeferredInsts.rbegin();
4973 DeferredInst != DeferredInsts.rend(); ++DeferredInst) {
4974 Value *Inst = std::get<0>(*DeferredInst);
4975 SPIRVInstructionList::iterator InsertPoint = ++std::get<1>(*DeferredInst);
4976 if (InsertPoint != SPIRVInstList.end()) {
4977 while ((*InsertPoint)->getOpcode() == spv::OpPhi) {
4978 ++InsertPoint;
4979 }
4980 }
4981
4982 if (BranchInst *Br = dyn_cast<BranchInst>(Inst)) {
alan-baker06cad652019-12-03 17:56:47 -05004983 // Check whether this branch needs to be preceeded by merge instruction.
David Neto22f144c2017-06-12 14:26:21 -04004984 BasicBlock *BrBB = Br->getParent();
alan-baker06cad652019-12-03 17:56:47 -05004985 if (ContinueBlocks.count(BrBB)) {
David Neto22f144c2017-06-12 14:26:21 -04004986 //
4987 // Generate OpLoopMerge.
4988 //
4989 // Ops[0] = Merge Block ID
4990 // Ops[1] = Continue Target ID
4991 // Ops[2] = Selection Control
4992 SPIRVOperandList Ops;
4993
alan-baker06cad652019-12-03 17:56:47 -05004994 auto MergeBB = MergeBlocks[BrBB];
4995 auto ContinueBB = ContinueBlocks[BrBB];
David Neto22f144c2017-06-12 14:26:21 -04004996 uint32_t MergeBBID = VMap[MergeBB];
David Neto22f144c2017-06-12 14:26:21 -04004997 uint32_t ContinueBBID = VMap[ContinueBB];
David Neto257c3892018-04-11 13:19:45 -04004998 Ops << MkId(MergeBBID) << MkId(ContinueBBID)
alan-baker06cad652019-12-03 17:56:47 -05004999 << MkNum(spv::LoopControlMaskNone);
David Neto22f144c2017-06-12 14:26:21 -04005000
David Neto87846742018-04-11 17:36:22 -04005001 auto *MergeInst = new SPIRVInstruction(spv::OpLoopMerge, Ops);
David Neto22f144c2017-06-12 14:26:21 -04005002 SPIRVInstList.insert(InsertPoint, MergeInst);
alan-baker06cad652019-12-03 17:56:47 -05005003 } else if (MergeBlocks.count(BrBB)) {
5004 //
5005 // Generate OpSelectionMerge.
5006 //
5007 // Ops[0] = Merge Block ID
5008 // Ops[1] = Selection Control
5009 SPIRVOperandList Ops;
David Neto22f144c2017-06-12 14:26:21 -04005010
alan-baker06cad652019-12-03 17:56:47 -05005011 auto MergeBB = MergeBlocks[BrBB];
5012 uint32_t MergeBBID = VMap[MergeBB];
5013 Ops << MkId(MergeBBID) << MkNum(spv::SelectionControlMaskNone);
David Neto22f144c2017-06-12 14:26:21 -04005014
alan-baker06cad652019-12-03 17:56:47 -05005015 auto *MergeInst = new SPIRVInstruction(spv::OpSelectionMerge, Ops);
5016 SPIRVInstList.insert(InsertPoint, MergeInst);
David Neto22f144c2017-06-12 14:26:21 -04005017 }
5018
5019 if (Br->isConditional()) {
5020 //
5021 // Generate OpBranchConditional.
5022 //
5023 // Ops[0] = Condition ID
5024 // Ops[1] = True Label ID
5025 // Ops[2] = False Label ID
5026 // Ops[3] ... Ops[n] = Branch weights (Literal Number)
5027 SPIRVOperandList Ops;
5028
5029 uint32_t CondID = VMap[Br->getCondition()];
David Neto22f144c2017-06-12 14:26:21 -04005030 uint32_t TrueBBID = VMap[Br->getSuccessor(0)];
David Neto22f144c2017-06-12 14:26:21 -04005031 uint32_t FalseBBID = VMap[Br->getSuccessor(1)];
David Neto257c3892018-04-11 13:19:45 -04005032
5033 Ops << MkId(CondID) << MkId(TrueBBID) << MkId(FalseBBID);
David Neto22f144c2017-06-12 14:26:21 -04005034
David Neto87846742018-04-11 17:36:22 -04005035 auto *BrInst = new SPIRVInstruction(spv::OpBranchConditional, Ops);
David Neto22f144c2017-06-12 14:26:21 -04005036 SPIRVInstList.insert(InsertPoint, BrInst);
5037 } else {
5038 //
5039 // Generate OpBranch.
5040 //
5041 // Ops[0] = Target Label ID
5042 SPIRVOperandList Ops;
5043
5044 uint32_t TargetID = VMap[Br->getSuccessor(0)];
David Neto257c3892018-04-11 13:19:45 -04005045 Ops << MkId(TargetID);
David Neto22f144c2017-06-12 14:26:21 -04005046
David Neto87846742018-04-11 17:36:22 -04005047 SPIRVInstList.insert(InsertPoint,
5048 new SPIRVInstruction(spv::OpBranch, Ops));
David Neto22f144c2017-06-12 14:26:21 -04005049 }
5050 } else if (PHINode *PHI = dyn_cast<PHINode>(Inst)) {
alan-baker5b86ed72019-02-15 08:26:50 -05005051 if (PHI->getType()->isPointerTy()) {
5052 // OpPhi on pointers requires variable pointers.
5053 setVariablePointersCapabilities(
5054 PHI->getType()->getPointerAddressSpace());
5055 if (!hasVariablePointers() && !selectFromSameObject(PHI)) {
5056 setVariablePointers(true);
5057 }
5058 }
5059
David Neto22f144c2017-06-12 14:26:21 -04005060 //
5061 // Generate OpPhi.
5062 //
5063 // Ops[0] = Result Type ID
5064 // Ops[1] ... Ops[n] = (Variable ID, Parent ID) pairs
5065 SPIRVOperandList Ops;
5066
David Neto257c3892018-04-11 13:19:45 -04005067 Ops << MkId(lookupType(PHI->getType()));
David Neto22f144c2017-06-12 14:26:21 -04005068
David Neto22f144c2017-06-12 14:26:21 -04005069 for (unsigned i = 0; i < PHI->getNumIncomingValues(); i++) {
5070 uint32_t VarID = VMap[PHI->getIncomingValue(i)];
David Neto22f144c2017-06-12 14:26:21 -04005071 uint32_t ParentID = VMap[PHI->getIncomingBlock(i)];
David Neto257c3892018-04-11 13:19:45 -04005072 Ops << MkId(VarID) << MkId(ParentID);
David Neto22f144c2017-06-12 14:26:21 -04005073 }
5074
5075 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005076 InsertPoint,
5077 new SPIRVInstruction(spv::OpPhi, std::get<2>(*DeferredInst), Ops));
David Neto22f144c2017-06-12 14:26:21 -04005078 } else if (CallInst *Call = dyn_cast<CallInst>(Inst)) {
5079 Function *Callee = Call->getCalledFunction();
David Neto3fbb4072017-10-16 11:28:14 -04005080 auto callee_name = Callee->getName();
5081 glsl::ExtInst EInst = getDirectOrIndirectExtInstEnum(callee_name);
David Neto22f144c2017-06-12 14:26:21 -04005082
5083 if (EInst) {
5084 uint32_t &ExtInstImportID = getOpExtInstImportID();
5085
5086 //
5087 // Generate OpExtInst.
5088 //
5089
5090 // Ops[0] = Result Type ID
5091 // Ops[1] = Set ID (OpExtInstImport ID)
5092 // Ops[2] = Instruction Number (Literal Number)
5093 // Ops[3] ... Ops[n] = Operand 1, ... , Operand n
5094 SPIRVOperandList Ops;
5095
David Neto862b7d82018-06-14 18:48:37 -04005096 Ops << MkId(lookupType(Call->getType())) << MkId(ExtInstImportID)
5097 << MkNum(EInst);
David Neto22f144c2017-06-12 14:26:21 -04005098
David Neto22f144c2017-06-12 14:26:21 -04005099 FunctionType *CalleeFTy = cast<FunctionType>(Call->getFunctionType());
5100 for (unsigned i = 0; i < CalleeFTy->getNumParams(); i++) {
David Neto257c3892018-04-11 13:19:45 -04005101 Ops << MkId(VMap[Call->getOperand(i)]);
David Neto22f144c2017-06-12 14:26:21 -04005102 }
5103
David Neto87846742018-04-11 17:36:22 -04005104 auto *ExtInst = new SPIRVInstruction(spv::OpExtInst,
5105 std::get<2>(*DeferredInst), Ops);
David Neto22f144c2017-06-12 14:26:21 -04005106 SPIRVInstList.insert(InsertPoint, ExtInst);
5107
David Neto3fbb4072017-10-16 11:28:14 -04005108 const auto IndirectExtInst = getIndirectExtInstEnum(callee_name);
5109 if (IndirectExtInst != kGlslExtInstBad) {
5110 // Generate one more instruction that uses the result of the extended
5111 // instruction. Its result id is one more than the id of the
5112 // extended instruction.
David Neto22f144c2017-06-12 14:26:21 -04005113 LLVMContext &Context =
5114 Call->getParent()->getParent()->getParent()->getContext();
David Neto22f144c2017-06-12 14:26:21 -04005115
David Neto3fbb4072017-10-16 11:28:14 -04005116 auto generate_extra_inst = [this, &Context, &Call, &DeferredInst,
5117 &VMap, &SPIRVInstList, &InsertPoint](
5118 spv::Op opcode, Constant *constant) {
5119 //
5120 // Generate instruction like:
5121 // result = opcode constant <extinst-result>
5122 //
5123 // Ops[0] = Result Type ID
5124 // Ops[1] = Operand 0 ;; the constant, suitably splatted
5125 // Ops[2] = Operand 1 ;; the result of the extended instruction
5126 SPIRVOperandList Ops;
David Neto22f144c2017-06-12 14:26:21 -04005127
David Neto3fbb4072017-10-16 11:28:14 -04005128 Type *resultTy = Call->getType();
David Neto257c3892018-04-11 13:19:45 -04005129 Ops << MkId(lookupType(resultTy));
David Neto3fbb4072017-10-16 11:28:14 -04005130
5131 if (auto *vectorTy = dyn_cast<VectorType>(resultTy)) {
5132 constant = ConstantVector::getSplat(
5133 static_cast<unsigned>(vectorTy->getNumElements()), constant);
5134 }
David Neto257c3892018-04-11 13:19:45 -04005135 Ops << MkId(VMap[constant]) << MkId(std::get<2>(*DeferredInst));
David Neto3fbb4072017-10-16 11:28:14 -04005136
5137 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005138 InsertPoint, new SPIRVInstruction(
5139 opcode, std::get<2>(*DeferredInst) + 1, Ops));
David Neto3fbb4072017-10-16 11:28:14 -04005140 };
5141
5142 switch (IndirectExtInst) {
5143 case glsl::ExtInstFindUMsb: // Implementing clz
5144 generate_extra_inst(
5145 spv::OpISub, ConstantInt::get(Type::getInt32Ty(Context), 31));
5146 break;
5147 case glsl::ExtInstAcos: // Implementing acospi
5148 case glsl::ExtInstAsin: // Implementing asinpi
Kévin Petiteb9f90a2018-09-29 12:29:34 +01005149 case glsl::ExtInstAtan: // Implementing atanpi
David Neto3fbb4072017-10-16 11:28:14 -04005150 case glsl::ExtInstAtan2: // Implementing atan2pi
5151 generate_extra_inst(
5152 spv::OpFMul,
5153 ConstantFP::get(Type::getFloatTy(Context), kOneOverPi));
5154 break;
5155
5156 default:
5157 assert(false && "internally inconsistent");
David Neto4d02a532017-09-17 12:57:44 -04005158 }
David Neto22f144c2017-06-12 14:26:21 -04005159 }
David Neto3fbb4072017-10-16 11:28:14 -04005160
alan-bakerb39c8262019-03-08 14:03:37 -05005161 } else if (callee_name.startswith("_Z8popcount")) {
David Neto22f144c2017-06-12 14:26:21 -04005162 //
5163 // Generate OpBitCount
5164 //
5165 // Ops[0] = Result Type ID
5166 // Ops[1] = Base ID
David Neto257c3892018-04-11 13:19:45 -04005167 SPIRVOperandList Ops;
5168 Ops << MkId(lookupType(Call->getType()))
5169 << MkId(VMap[Call->getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04005170
5171 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005172 InsertPoint, new SPIRVInstruction(spv::OpBitCount,
David Neto22f144c2017-06-12 14:26:21 -04005173 std::get<2>(*DeferredInst), Ops));
David Netoab03f432017-11-03 17:00:44 -04005174
David Neto862b7d82018-06-14 18:48:37 -04005175 } else if (callee_name.startswith(kCompositeConstructFunctionPrefix)) {
David Netoab03f432017-11-03 17:00:44 -04005176
5177 // Generate an OpCompositeConstruct
5178 SPIRVOperandList Ops;
5179
5180 // The result type.
David Neto257c3892018-04-11 13:19:45 -04005181 Ops << MkId(lookupType(Call->getType()));
David Netoab03f432017-11-03 17:00:44 -04005182
5183 for (Use &use : Call->arg_operands()) {
David Neto257c3892018-04-11 13:19:45 -04005184 Ops << MkId(VMap[use.get()]);
David Netoab03f432017-11-03 17:00:44 -04005185 }
5186
5187 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005188 InsertPoint, new SPIRVInstruction(spv::OpCompositeConstruct,
5189 std::get<2>(*DeferredInst), Ops));
David Netoab03f432017-11-03 17:00:44 -04005190
Alan Baker202c8c72018-08-13 13:47:44 -04005191 } else if (callee_name.startswith(clspv::ResourceAccessorFunction())) {
5192
5193 // We have already mapped the call's result value to an ID.
5194 // Don't generate any code now.
5195
5196 } else if (callee_name.startswith(clspv::WorkgroupAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04005197
5198 // We have already mapped the call's result value to an ID.
5199 // Don't generate any code now.
5200
David Neto22f144c2017-06-12 14:26:21 -04005201 } else {
alan-baker5b86ed72019-02-15 08:26:50 -05005202 if (Call->getType()->isPointerTy()) {
5203 // Functions returning pointers require variable pointers.
5204 setVariablePointersCapabilities(
5205 Call->getType()->getPointerAddressSpace());
5206 }
5207
David Neto22f144c2017-06-12 14:26:21 -04005208 //
5209 // Generate OpFunctionCall.
5210 //
5211
5212 // Ops[0] = Result Type ID
5213 // Ops[1] = Callee Function ID
5214 // Ops[2] ... Ops[n] = Argument 0, ... , Argument n
5215 SPIRVOperandList Ops;
5216
David Neto862b7d82018-06-14 18:48:37 -04005217 Ops << MkId(lookupType(Call->getType()));
David Neto22f144c2017-06-12 14:26:21 -04005218
5219 uint32_t CalleeID = VMap[Callee];
David Neto43568eb2017-10-13 18:25:25 -04005220 if (CalleeID == 0) {
5221 errs() << "Can't translate function call. Missing builtin? "
David Neto862b7d82018-06-14 18:48:37 -04005222 << callee_name << " in: " << *Call << "\n";
David Neto43568eb2017-10-13 18:25:25 -04005223 // TODO(dneto): Can we error out? Enabling this llvm_unreachable
5224 // causes an infinite loop. Instead, go ahead and generate
5225 // the bad function call. A validator will catch the 0-Id.
5226 // llvm_unreachable("Can't translate function call");
5227 }
David Neto22f144c2017-06-12 14:26:21 -04005228
David Neto257c3892018-04-11 13:19:45 -04005229 Ops << MkId(CalleeID);
David Neto22f144c2017-06-12 14:26:21 -04005230
David Neto22f144c2017-06-12 14:26:21 -04005231 FunctionType *CalleeFTy = cast<FunctionType>(Call->getFunctionType());
5232 for (unsigned i = 0; i < CalleeFTy->getNumParams(); i++) {
alan-baker5b86ed72019-02-15 08:26:50 -05005233 auto *operand = Call->getOperand(i);
alan-bakerd4d50652019-12-03 17:17:15 -05005234 auto *operand_type = operand->getType();
5235 // Images and samplers can be passed as function parameters without
5236 // variable pointers.
5237 if (operand_type->isPointerTy() && !IsImageType(operand_type) &&
5238 !IsSamplerType(operand_type)) {
alan-baker5b86ed72019-02-15 08:26:50 -05005239 auto sc =
5240 GetStorageClass(operand->getType()->getPointerAddressSpace());
5241 if (sc == spv::StorageClassStorageBuffer) {
5242 // Passing SSBO by reference requires variable pointers storage
5243 // buffer.
5244 setVariablePointersStorageBuffer(true);
5245 } else if (sc == spv::StorageClassWorkgroup) {
5246 // Workgroup references require variable pointers if they are not
5247 // memory object declarations.
5248 if (auto *operand_call = dyn_cast<CallInst>(operand)) {
5249 // Workgroup accessor represents a variable reference.
5250 if (!operand_call->getCalledFunction()->getName().startswith(
5251 clspv::WorkgroupAccessorFunction()))
5252 setVariablePointers(true);
5253 } else {
5254 // Arguments are function parameters.
5255 if (!isa<Argument>(operand))
5256 setVariablePointers(true);
5257 }
5258 }
5259 }
5260 Ops << MkId(VMap[operand]);
David Neto22f144c2017-06-12 14:26:21 -04005261 }
5262
David Neto87846742018-04-11 17:36:22 -04005263 auto *CallInst = new SPIRVInstruction(spv::OpFunctionCall,
5264 std::get<2>(*DeferredInst), Ops);
David Neto22f144c2017-06-12 14:26:21 -04005265 SPIRVInstList.insert(InsertPoint, CallInst);
5266 }
5267 }
5268 }
5269}
5270
David Neto1a1a0582017-07-07 12:01:44 -04005271void SPIRVProducerPass::HandleDeferredDecorations(const DataLayout &DL) {
Alan Baker202c8c72018-08-13 13:47:44 -04005272 if (getTypesNeedingArrayStride().empty() && LocalArgSpecIds.empty()) {
David Neto1a1a0582017-07-07 12:01:44 -04005273 return;
David Netoc6f3ab22018-04-06 18:02:31 -04005274 }
David Neto1a1a0582017-07-07 12:01:44 -04005275
5276 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
David Neto1a1a0582017-07-07 12:01:44 -04005277
5278 // Find an iterator pointing just past the last decoration.
5279 bool seen_decorations = false;
5280 auto DecoInsertPoint =
5281 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
5282 [&seen_decorations](SPIRVInstruction *Inst) -> bool {
5283 const bool is_decoration =
5284 Inst->getOpcode() == spv::OpDecorate ||
5285 Inst->getOpcode() == spv::OpMemberDecorate;
5286 if (is_decoration) {
5287 seen_decorations = true;
5288 return false;
5289 } else {
5290 return seen_decorations;
5291 }
5292 });
5293
David Netoc6f3ab22018-04-06 18:02:31 -04005294 // Insert ArrayStride decorations on pointer types, due to OpPtrAccessChain
5295 // instructions we generated earlier.
David Neto85082642018-03-24 06:55:20 -07005296 for (auto *type : getTypesNeedingArrayStride()) {
5297 Type *elemTy = nullptr;
5298 if (auto *ptrTy = dyn_cast<PointerType>(type)) {
5299 elemTy = ptrTy->getElementType();
alan-bakerb6b09dc2018-11-08 16:59:28 -05005300 } else if (auto *arrayTy = dyn_cast<ArrayType>(type)) {
David Neto85082642018-03-24 06:55:20 -07005301 elemTy = arrayTy->getArrayElementType();
alan-bakerb6b09dc2018-11-08 16:59:28 -05005302 } else if (auto *seqTy = dyn_cast<SequentialType>(type)) {
David Neto85082642018-03-24 06:55:20 -07005303 elemTy = seqTy->getSequentialElementType();
5304 } else {
5305 errs() << "Unhandled strided type " << *type << "\n";
5306 llvm_unreachable("Unhandled strided type");
5307 }
David Neto1a1a0582017-07-07 12:01:44 -04005308
5309 // Ops[0] = Target ID
5310 // Ops[1] = Decoration (ArrayStride)
5311 // Ops[2] = Stride number (Literal Number)
5312 SPIRVOperandList Ops;
5313
David Neto85082642018-03-24 06:55:20 -07005314 // Same as DL.getIndexedOffsetInType( elemTy, { 1 } );
Alan Bakerfcda9482018-10-02 17:09:59 -04005315 const uint32_t stride = static_cast<uint32_t>(GetTypeAllocSize(elemTy, DL));
David Neto257c3892018-04-11 13:19:45 -04005316
5317 Ops << MkId(lookupType(type)) << MkNum(spv::DecorationArrayStride)
5318 << MkNum(stride);
David Neto1a1a0582017-07-07 12:01:44 -04005319
David Neto87846742018-04-11 17:36:22 -04005320 auto *DecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
David Neto1a1a0582017-07-07 12:01:44 -04005321 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
5322 }
David Netoc6f3ab22018-04-06 18:02:31 -04005323
5324 // Emit SpecId decorations targeting the array size value.
Alan Baker202c8c72018-08-13 13:47:44 -04005325 for (auto spec_id = clspv::FirstLocalSpecId(); spec_id < max_local_spec_id_;
5326 ++spec_id) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05005327 LocalArgInfo &arg_info = LocalSpecIdInfoMap[spec_id];
David Netoc6f3ab22018-04-06 18:02:31 -04005328 SPIRVOperandList Ops;
5329 Ops << MkId(arg_info.array_size_id) << MkNum(spv::DecorationSpecId)
5330 << MkNum(arg_info.spec_id);
5331 SPIRVInstList.insert(DecoInsertPoint,
David Neto87846742018-04-11 17:36:22 -04005332 new SPIRVInstruction(spv::OpDecorate, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04005333 }
David Neto1a1a0582017-07-07 12:01:44 -04005334}
5335
David Neto22f144c2017-06-12 14:26:21 -04005336glsl::ExtInst SPIRVProducerPass::getExtInstEnum(StringRef Name) {
5337 return StringSwitch<glsl::ExtInst>(Name)
alan-bakerb39c8262019-03-08 14:03:37 -05005338 .Case("_Z3absc", glsl::ExtInst::ExtInstSAbs)
5339 .Case("_Z3absDv2_c", glsl::ExtInst::ExtInstSAbs)
5340 .Case("_Z3absDv3_c", glsl::ExtInst::ExtInstSAbs)
5341 .Case("_Z3absDv4_c", glsl::ExtInst::ExtInstSAbs)
Kévin Petit2444e9b2018-11-09 14:14:37 +00005342 .Case("_Z3abss", glsl::ExtInst::ExtInstSAbs)
5343 .Case("_Z3absDv2_s", glsl::ExtInst::ExtInstSAbs)
5344 .Case("_Z3absDv3_s", glsl::ExtInst::ExtInstSAbs)
5345 .Case("_Z3absDv4_s", glsl::ExtInst::ExtInstSAbs)
David Neto22f144c2017-06-12 14:26:21 -04005346 .Case("_Z3absi", glsl::ExtInst::ExtInstSAbs)
5347 .Case("_Z3absDv2_i", glsl::ExtInst::ExtInstSAbs)
5348 .Case("_Z3absDv3_i", glsl::ExtInst::ExtInstSAbs)
5349 .Case("_Z3absDv4_i", glsl::ExtInst::ExtInstSAbs)
Kévin Petit2444e9b2018-11-09 14:14:37 +00005350 .Case("_Z3absl", glsl::ExtInst::ExtInstSAbs)
5351 .Case("_Z3absDv2_l", glsl::ExtInst::ExtInstSAbs)
5352 .Case("_Z3absDv3_l", glsl::ExtInst::ExtInstSAbs)
5353 .Case("_Z3absDv4_l", glsl::ExtInst::ExtInstSAbs)
alan-bakerb39c8262019-03-08 14:03:37 -05005354 .Case("_Z5clampccc", glsl::ExtInst::ExtInstSClamp)
5355 .Case("_Z5clampDv2_cS_S_", glsl::ExtInst::ExtInstSClamp)
5356 .Case("_Z5clampDv3_cS_S_", glsl::ExtInst::ExtInstSClamp)
5357 .Case("_Z5clampDv4_cS_S_", glsl::ExtInst::ExtInstSClamp)
5358 .Case("_Z5clamphhh", glsl::ExtInst::ExtInstUClamp)
5359 .Case("_Z5clampDv2_hS_S_", glsl::ExtInst::ExtInstUClamp)
5360 .Case("_Z5clampDv3_hS_S_", glsl::ExtInst::ExtInstUClamp)
5361 .Case("_Z5clampDv4_hS_S_", glsl::ExtInst::ExtInstUClamp)
Kévin Petit495255d2019-03-06 13:56:48 +00005362 .Case("_Z5clampsss", glsl::ExtInst::ExtInstSClamp)
5363 .Case("_Z5clampDv2_sS_S_", glsl::ExtInst::ExtInstSClamp)
5364 .Case("_Z5clampDv3_sS_S_", glsl::ExtInst::ExtInstSClamp)
5365 .Case("_Z5clampDv4_sS_S_", glsl::ExtInst::ExtInstSClamp)
5366 .Case("_Z5clampttt", glsl::ExtInst::ExtInstUClamp)
5367 .Case("_Z5clampDv2_tS_S_", glsl::ExtInst::ExtInstUClamp)
5368 .Case("_Z5clampDv3_tS_S_", glsl::ExtInst::ExtInstUClamp)
5369 .Case("_Z5clampDv4_tS_S_", glsl::ExtInst::ExtInstUClamp)
David Neto22f144c2017-06-12 14:26:21 -04005370 .Case("_Z5clampiii", glsl::ExtInst::ExtInstSClamp)
5371 .Case("_Z5clampDv2_iS_S_", glsl::ExtInst::ExtInstSClamp)
5372 .Case("_Z5clampDv3_iS_S_", glsl::ExtInst::ExtInstSClamp)
5373 .Case("_Z5clampDv4_iS_S_", glsl::ExtInst::ExtInstSClamp)
5374 .Case("_Z5clampjjj", glsl::ExtInst::ExtInstUClamp)
5375 .Case("_Z5clampDv2_jS_S_", glsl::ExtInst::ExtInstUClamp)
5376 .Case("_Z5clampDv3_jS_S_", glsl::ExtInst::ExtInstUClamp)
5377 .Case("_Z5clampDv4_jS_S_", glsl::ExtInst::ExtInstUClamp)
Kévin Petit495255d2019-03-06 13:56:48 +00005378 .Case("_Z5clamplll", glsl::ExtInst::ExtInstSClamp)
5379 .Case("_Z5clampDv2_lS_S_", glsl::ExtInst::ExtInstSClamp)
5380 .Case("_Z5clampDv3_lS_S_", glsl::ExtInst::ExtInstSClamp)
5381 .Case("_Z5clampDv4_lS_S_", glsl::ExtInst::ExtInstSClamp)
5382 .Case("_Z5clampmmm", glsl::ExtInst::ExtInstUClamp)
5383 .Case("_Z5clampDv2_mS_S_", glsl::ExtInst::ExtInstUClamp)
5384 .Case("_Z5clampDv3_mS_S_", glsl::ExtInst::ExtInstUClamp)
5385 .Case("_Z5clampDv4_mS_S_", glsl::ExtInst::ExtInstUClamp)
David Neto22f144c2017-06-12 14:26:21 -04005386 .Case("_Z5clampfff", glsl::ExtInst::ExtInstFClamp)
5387 .Case("_Z5clampDv2_fS_S_", glsl::ExtInst::ExtInstFClamp)
5388 .Case("_Z5clampDv3_fS_S_", glsl::ExtInst::ExtInstFClamp)
5389 .Case("_Z5clampDv4_fS_S_", glsl::ExtInst::ExtInstFClamp)
alan-bakerb39c8262019-03-08 14:03:37 -05005390 .Case("_Z3maxcc", glsl::ExtInst::ExtInstSMax)
5391 .Case("_Z3maxDv2_cS_", glsl::ExtInst::ExtInstSMax)
5392 .Case("_Z3maxDv3_cS_", glsl::ExtInst::ExtInstSMax)
5393 .Case("_Z3maxDv4_cS_", glsl::ExtInst::ExtInstSMax)
5394 .Case("_Z3maxhh", glsl::ExtInst::ExtInstUMax)
5395 .Case("_Z3maxDv2_hS_", glsl::ExtInst::ExtInstUMax)
5396 .Case("_Z3maxDv3_hS_", glsl::ExtInst::ExtInstUMax)
5397 .Case("_Z3maxDv4_hS_", glsl::ExtInst::ExtInstUMax)
Kévin Petit9e1971c2018-11-09 14:17:18 +00005398 .Case("_Z3maxss", glsl::ExtInst::ExtInstSMax)
5399 .Case("_Z3maxDv2_sS_", glsl::ExtInst::ExtInstSMax)
5400 .Case("_Z3maxDv3_sS_", glsl::ExtInst::ExtInstSMax)
5401 .Case("_Z3maxDv4_sS_", glsl::ExtInst::ExtInstSMax)
5402 .Case("_Z3maxtt", glsl::ExtInst::ExtInstUMax)
5403 .Case("_Z3maxDv2_tS_", glsl::ExtInst::ExtInstUMax)
5404 .Case("_Z3maxDv3_tS_", glsl::ExtInst::ExtInstUMax)
5405 .Case("_Z3maxDv4_tS_", glsl::ExtInst::ExtInstUMax)
David Neto22f144c2017-06-12 14:26:21 -04005406 .Case("_Z3maxii", glsl::ExtInst::ExtInstSMax)
5407 .Case("_Z3maxDv2_iS_", glsl::ExtInst::ExtInstSMax)
5408 .Case("_Z3maxDv3_iS_", glsl::ExtInst::ExtInstSMax)
5409 .Case("_Z3maxDv4_iS_", glsl::ExtInst::ExtInstSMax)
5410 .Case("_Z3maxjj", glsl::ExtInst::ExtInstUMax)
5411 .Case("_Z3maxDv2_jS_", glsl::ExtInst::ExtInstUMax)
5412 .Case("_Z3maxDv3_jS_", glsl::ExtInst::ExtInstUMax)
5413 .Case("_Z3maxDv4_jS_", glsl::ExtInst::ExtInstUMax)
Kévin Petit9e1971c2018-11-09 14:17:18 +00005414 .Case("_Z3maxll", glsl::ExtInst::ExtInstSMax)
5415 .Case("_Z3maxDv2_lS_", glsl::ExtInst::ExtInstSMax)
5416 .Case("_Z3maxDv3_lS_", glsl::ExtInst::ExtInstSMax)
5417 .Case("_Z3maxDv4_lS_", glsl::ExtInst::ExtInstSMax)
5418 .Case("_Z3maxmm", glsl::ExtInst::ExtInstUMax)
5419 .Case("_Z3maxDv2_mS_", glsl::ExtInst::ExtInstUMax)
5420 .Case("_Z3maxDv3_mS_", glsl::ExtInst::ExtInstUMax)
5421 .Case("_Z3maxDv4_mS_", glsl::ExtInst::ExtInstUMax)
David Neto22f144c2017-06-12 14:26:21 -04005422 .Case("_Z3maxff", glsl::ExtInst::ExtInstFMax)
5423 .Case("_Z3maxDv2_fS_", glsl::ExtInst::ExtInstFMax)
5424 .Case("_Z3maxDv3_fS_", glsl::ExtInst::ExtInstFMax)
5425 .Case("_Z3maxDv4_fS_", glsl::ExtInst::ExtInstFMax)
5426 .StartsWith("_Z4fmax", glsl::ExtInst::ExtInstFMax)
alan-bakerb39c8262019-03-08 14:03:37 -05005427 .Case("_Z3mincc", glsl::ExtInst::ExtInstSMin)
5428 .Case("_Z3minDv2_cS_", glsl::ExtInst::ExtInstSMin)
5429 .Case("_Z3minDv3_cS_", glsl::ExtInst::ExtInstSMin)
5430 .Case("_Z3minDv4_cS_", glsl::ExtInst::ExtInstSMin)
5431 .Case("_Z3minhh", glsl::ExtInst::ExtInstUMin)
5432 .Case("_Z3minDv2_hS_", glsl::ExtInst::ExtInstUMin)
5433 .Case("_Z3minDv3_hS_", glsl::ExtInst::ExtInstUMin)
5434 .Case("_Z3minDv4_hS_", glsl::ExtInst::ExtInstUMin)
Kévin Petit9e1971c2018-11-09 14:17:18 +00005435 .Case("_Z3minss", glsl::ExtInst::ExtInstSMin)
5436 .Case("_Z3minDv2_sS_", glsl::ExtInst::ExtInstSMin)
5437 .Case("_Z3minDv3_sS_", glsl::ExtInst::ExtInstSMin)
5438 .Case("_Z3minDv4_sS_", glsl::ExtInst::ExtInstSMin)
5439 .Case("_Z3mintt", glsl::ExtInst::ExtInstUMin)
5440 .Case("_Z3minDv2_tS_", glsl::ExtInst::ExtInstUMin)
5441 .Case("_Z3minDv3_tS_", glsl::ExtInst::ExtInstUMin)
5442 .Case("_Z3minDv4_tS_", glsl::ExtInst::ExtInstUMin)
David Neto22f144c2017-06-12 14:26:21 -04005443 .Case("_Z3minii", glsl::ExtInst::ExtInstSMin)
5444 .Case("_Z3minDv2_iS_", glsl::ExtInst::ExtInstSMin)
5445 .Case("_Z3minDv3_iS_", glsl::ExtInst::ExtInstSMin)
5446 .Case("_Z3minDv4_iS_", glsl::ExtInst::ExtInstSMin)
5447 .Case("_Z3minjj", glsl::ExtInst::ExtInstUMin)
5448 .Case("_Z3minDv2_jS_", glsl::ExtInst::ExtInstUMin)
5449 .Case("_Z3minDv3_jS_", glsl::ExtInst::ExtInstUMin)
5450 .Case("_Z3minDv4_jS_", glsl::ExtInst::ExtInstUMin)
Kévin Petit9e1971c2018-11-09 14:17:18 +00005451 .Case("_Z3minll", glsl::ExtInst::ExtInstSMin)
5452 .Case("_Z3minDv2_lS_", glsl::ExtInst::ExtInstSMin)
5453 .Case("_Z3minDv3_lS_", glsl::ExtInst::ExtInstSMin)
5454 .Case("_Z3minDv4_lS_", glsl::ExtInst::ExtInstSMin)
5455 .Case("_Z3minmm", glsl::ExtInst::ExtInstUMin)
5456 .Case("_Z3minDv2_mS_", glsl::ExtInst::ExtInstUMin)
5457 .Case("_Z3minDv3_mS_", glsl::ExtInst::ExtInstUMin)
5458 .Case("_Z3minDv4_mS_", glsl::ExtInst::ExtInstUMin)
David Neto22f144c2017-06-12 14:26:21 -04005459 .Case("_Z3minff", glsl::ExtInst::ExtInstFMin)
5460 .Case("_Z3minDv2_fS_", glsl::ExtInst::ExtInstFMin)
5461 .Case("_Z3minDv3_fS_", glsl::ExtInst::ExtInstFMin)
5462 .Case("_Z3minDv4_fS_", glsl::ExtInst::ExtInstFMin)
5463 .StartsWith("_Z4fmin", glsl::ExtInst::ExtInstFMin)
5464 .StartsWith("_Z7degrees", glsl::ExtInst::ExtInstDegrees)
5465 .StartsWith("_Z7radians", glsl::ExtInst::ExtInstRadians)
5466 .StartsWith("_Z3mix", glsl::ExtInst::ExtInstFMix)
5467 .StartsWith("_Z4acos", glsl::ExtInst::ExtInstAcos)
5468 .StartsWith("_Z5acosh", glsl::ExtInst::ExtInstAcosh)
5469 .StartsWith("_Z4asin", glsl::ExtInst::ExtInstAsin)
5470 .StartsWith("_Z5asinh", glsl::ExtInst::ExtInstAsinh)
5471 .StartsWith("_Z4atan", glsl::ExtInst::ExtInstAtan)
5472 .StartsWith("_Z5atan2", glsl::ExtInst::ExtInstAtan2)
5473 .StartsWith("_Z5atanh", glsl::ExtInst::ExtInstAtanh)
5474 .StartsWith("_Z4ceil", glsl::ExtInst::ExtInstCeil)
5475 .StartsWith("_Z3sin", glsl::ExtInst::ExtInstSin)
5476 .StartsWith("_Z4sinh", glsl::ExtInst::ExtInstSinh)
5477 .StartsWith("_Z8half_sin", glsl::ExtInst::ExtInstSin)
5478 .StartsWith("_Z10native_sin", glsl::ExtInst::ExtInstSin)
5479 .StartsWith("_Z3cos", glsl::ExtInst::ExtInstCos)
5480 .StartsWith("_Z4cosh", glsl::ExtInst::ExtInstCosh)
5481 .StartsWith("_Z8half_cos", glsl::ExtInst::ExtInstCos)
5482 .StartsWith("_Z10native_cos", glsl::ExtInst::ExtInstCos)
5483 .StartsWith("_Z3tan", glsl::ExtInst::ExtInstTan)
5484 .StartsWith("_Z4tanh", glsl::ExtInst::ExtInstTanh)
5485 .StartsWith("_Z8half_tan", glsl::ExtInst::ExtInstTan)
5486 .StartsWith("_Z10native_tan", glsl::ExtInst::ExtInstTan)
5487 .StartsWith("_Z3exp", glsl::ExtInst::ExtInstExp)
5488 .StartsWith("_Z8half_exp", glsl::ExtInst::ExtInstExp)
5489 .StartsWith("_Z10native_exp", glsl::ExtInst::ExtInstExp)
5490 .StartsWith("_Z4exp2", glsl::ExtInst::ExtInstExp2)
5491 .StartsWith("_Z9half_exp2", glsl::ExtInst::ExtInstExp2)
5492 .StartsWith("_Z11native_exp2", glsl::ExtInst::ExtInstExp2)
5493 .StartsWith("_Z3log", glsl::ExtInst::ExtInstLog)
5494 .StartsWith("_Z8half_log", glsl::ExtInst::ExtInstLog)
5495 .StartsWith("_Z10native_log", glsl::ExtInst::ExtInstLog)
5496 .StartsWith("_Z4log2", glsl::ExtInst::ExtInstLog2)
5497 .StartsWith("_Z9half_log2", glsl::ExtInst::ExtInstLog2)
5498 .StartsWith("_Z11native_log2", glsl::ExtInst::ExtInstLog2)
5499 .StartsWith("_Z4fabs", glsl::ExtInst::ExtInstFAbs)
kpet3458e942018-10-03 14:35:21 +01005500 .StartsWith("_Z3fma", glsl::ExtInst::ExtInstFma)
David Neto22f144c2017-06-12 14:26:21 -04005501 .StartsWith("_Z5floor", glsl::ExtInst::ExtInstFloor)
5502 .StartsWith("_Z5ldexp", glsl::ExtInst::ExtInstLdexp)
5503 .StartsWith("_Z3pow", glsl::ExtInst::ExtInstPow)
5504 .StartsWith("_Z4powr", glsl::ExtInst::ExtInstPow)
5505 .StartsWith("_Z9half_powr", glsl::ExtInst::ExtInstPow)
5506 .StartsWith("_Z11native_powr", glsl::ExtInst::ExtInstPow)
5507 .StartsWith("_Z5round", glsl::ExtInst::ExtInstRound)
5508 .StartsWith("_Z4sqrt", glsl::ExtInst::ExtInstSqrt)
5509 .StartsWith("_Z9half_sqrt", glsl::ExtInst::ExtInstSqrt)
5510 .StartsWith("_Z11native_sqrt", glsl::ExtInst::ExtInstSqrt)
5511 .StartsWith("_Z5rsqrt", glsl::ExtInst::ExtInstInverseSqrt)
5512 .StartsWith("_Z10half_rsqrt", glsl::ExtInst::ExtInstInverseSqrt)
5513 .StartsWith("_Z12native_rsqrt", glsl::ExtInst::ExtInstInverseSqrt)
5514 .StartsWith("_Z5trunc", glsl::ExtInst::ExtInstTrunc)
5515 .StartsWith("_Z5frexp", glsl::ExtInst::ExtInstFrexp)
5516 .StartsWith("_Z4sign", glsl::ExtInst::ExtInstFSign)
5517 .StartsWith("_Z6length", glsl::ExtInst::ExtInstLength)
Kévin Petit7d09cec2018-09-22 15:43:38 +01005518 .StartsWith("_Z11fast_length", glsl::ExtInst::ExtInstLength)
David Neto22f144c2017-06-12 14:26:21 -04005519 .StartsWith("_Z8distance", glsl::ExtInst::ExtInstDistance)
Kévin Petit7d09cec2018-09-22 15:43:38 +01005520 .StartsWith("_Z13fast_distance", glsl::ExtInst::ExtInstDistance)
David Netoe9a03512017-10-16 10:08:27 -04005521 .StartsWith("_Z4step", glsl::ExtInst::ExtInstStep)
kpet6fd2a262018-10-03 14:48:01 +01005522 .StartsWith("_Z10smoothstep", glsl::ExtInst::ExtInstSmoothStep)
David Neto22f144c2017-06-12 14:26:21 -04005523 .Case("_Z5crossDv3_fS_", glsl::ExtInst::ExtInstCross)
5524 .StartsWith("_Z9normalize", glsl::ExtInst::ExtInstNormalize)
Kévin Petit7d09cec2018-09-22 15:43:38 +01005525 .StartsWith("_Z14fast_normalize", glsl::ExtInst::ExtInstNormalize)
David Neto22f144c2017-06-12 14:26:21 -04005526 .StartsWith("llvm.fmuladd.", glsl::ExtInst::ExtInstFma)
5527 .Case("spirv.unpack.v2f16", glsl::ExtInst::ExtInstUnpackHalf2x16)
5528 .Case("spirv.pack.v2f16", glsl::ExtInst::ExtInstPackHalf2x16)
David Neto62653202017-10-16 19:05:18 -04005529 .Case("clspv.fract.f", glsl::ExtInst::ExtInstFract)
5530 .Case("clspv.fract.v2f", glsl::ExtInst::ExtInstFract)
5531 .Case("clspv.fract.v3f", glsl::ExtInst::ExtInstFract)
5532 .Case("clspv.fract.v4f", glsl::ExtInst::ExtInstFract)
David Neto3fbb4072017-10-16 11:28:14 -04005533 .Default(kGlslExtInstBad);
5534}
5535
5536glsl::ExtInst SPIRVProducerPass::getIndirectExtInstEnum(StringRef Name) {
5537 // Check indirect cases.
5538 return StringSwitch<glsl::ExtInst>(Name)
5539 .StartsWith("_Z3clz", glsl::ExtInst::ExtInstFindUMsb)
5540 // Use exact match on float arg because these need a multiply
5541 // of a constant of the right floating point type.
5542 .Case("_Z6acospif", glsl::ExtInst::ExtInstAcos)
5543 .Case("_Z6acospiDv2_f", glsl::ExtInst::ExtInstAcos)
5544 .Case("_Z6acospiDv3_f", glsl::ExtInst::ExtInstAcos)
5545 .Case("_Z6acospiDv4_f", glsl::ExtInst::ExtInstAcos)
5546 .Case("_Z6asinpif", glsl::ExtInst::ExtInstAsin)
5547 .Case("_Z6asinpiDv2_f", glsl::ExtInst::ExtInstAsin)
5548 .Case("_Z6asinpiDv3_f", glsl::ExtInst::ExtInstAsin)
5549 .Case("_Z6asinpiDv4_f", glsl::ExtInst::ExtInstAsin)
Kévin Petiteb9f90a2018-09-29 12:29:34 +01005550 .Case("_Z6atanpif", glsl::ExtInst::ExtInstAtan)
5551 .Case("_Z6atanpiDv2_f", glsl::ExtInst::ExtInstAtan)
5552 .Case("_Z6atanpiDv3_f", glsl::ExtInst::ExtInstAtan)
5553 .Case("_Z6atanpiDv4_f", glsl::ExtInst::ExtInstAtan)
David Neto3fbb4072017-10-16 11:28:14 -04005554 .Case("_Z7atan2piff", glsl::ExtInst::ExtInstAtan2)
5555 .Case("_Z7atan2piDv2_fS_", glsl::ExtInst::ExtInstAtan2)
5556 .Case("_Z7atan2piDv3_fS_", glsl::ExtInst::ExtInstAtan2)
5557 .Case("_Z7atan2piDv4_fS_", glsl::ExtInst::ExtInstAtan2)
5558 .Default(kGlslExtInstBad);
5559}
5560
alan-bakerb6b09dc2018-11-08 16:59:28 -05005561glsl::ExtInst
5562SPIRVProducerPass::getDirectOrIndirectExtInstEnum(StringRef Name) {
David Neto3fbb4072017-10-16 11:28:14 -04005563 auto direct = getExtInstEnum(Name);
5564 if (direct != kGlslExtInstBad)
5565 return direct;
5566 return getIndirectExtInstEnum(Name);
David Neto22f144c2017-06-12 14:26:21 -04005567}
5568
David Neto22f144c2017-06-12 14:26:21 -04005569void SPIRVProducerPass::WriteOneWord(uint32_t Word) {
David Neto0676e6f2017-07-11 18:47:44 -04005570 binaryOut->write(reinterpret_cast<const char *>(&Word), sizeof(uint32_t));
David Neto22f144c2017-06-12 14:26:21 -04005571}
5572
5573void SPIRVProducerPass::WriteResultID(SPIRVInstruction *Inst) {
5574 WriteOneWord(Inst->getResultID());
5575}
5576
5577void SPIRVProducerPass::WriteWordCountAndOpcode(SPIRVInstruction *Inst) {
5578 // High 16 bit : Word Count
5579 // Low 16 bit : Opcode
5580 uint32_t Word = Inst->getOpcode();
David Netoee2660d2018-06-28 16:31:29 -04005581 const uint32_t count = Inst->getWordCount();
5582 if (count > 65535) {
5583 errs() << "Word count limit of 65535 exceeded: " << count << "\n";
5584 llvm_unreachable("Word count too high");
5585 }
David Neto22f144c2017-06-12 14:26:21 -04005586 Word |= Inst->getWordCount() << 16;
5587 WriteOneWord(Word);
5588}
5589
David Netoef5ba2b2019-12-20 08:35:54 -05005590void SPIRVProducerPass::WriteOperand(const std::unique_ptr<SPIRVOperand> &Op) {
David Neto22f144c2017-06-12 14:26:21 -04005591 SPIRVOperandType OpTy = Op->getType();
5592 switch (OpTy) {
5593 default: {
5594 llvm_unreachable("Unsupported SPIRV Operand Type???");
5595 break;
5596 }
5597 case SPIRVOperandType::NUMBERID: {
5598 WriteOneWord(Op->getNumID());
5599 break;
5600 }
5601 case SPIRVOperandType::LITERAL_STRING: {
5602 std::string Str = Op->getLiteralStr();
5603 const char *Data = Str.c_str();
5604 size_t WordSize = Str.size() / 4;
5605 for (unsigned Idx = 0; Idx < WordSize; Idx++) {
5606 WriteOneWord(*reinterpret_cast<const uint32_t *>(&Data[4 * Idx]));
5607 }
5608
5609 uint32_t Remainder = Str.size() % 4;
5610 uint32_t LastWord = 0;
5611 if (Remainder) {
5612 for (unsigned Idx = 0; Idx < Remainder; Idx++) {
5613 LastWord |= Data[4 * WordSize + Idx] << 8 * Idx;
5614 }
5615 }
5616
5617 WriteOneWord(LastWord);
5618 break;
5619 }
5620 case SPIRVOperandType::LITERAL_INTEGER:
5621 case SPIRVOperandType::LITERAL_FLOAT: {
5622 auto LiteralNum = Op->getLiteralNum();
5623 // TODO: Handle LiteranNum carefully.
5624 for (auto Word : LiteralNum) {
5625 WriteOneWord(Word);
5626 }
5627 break;
5628 }
5629 }
5630}
5631
5632void SPIRVProducerPass::WriteSPIRVBinary() {
5633 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
5634
5635 for (auto Inst : SPIRVInstList) {
David Netoef5ba2b2019-12-20 08:35:54 -05005636 const auto &Ops = Inst->getOperands();
David Neto22f144c2017-06-12 14:26:21 -04005637 spv::Op Opcode = static_cast<spv::Op>(Inst->getOpcode());
5638
5639 switch (Opcode) {
5640 default: {
David Neto5c22a252018-03-15 16:07:41 -04005641 errs() << "Unsupported SPIR-V instruction opcode " << int(Opcode) << "\n";
David Neto22f144c2017-06-12 14:26:21 -04005642 llvm_unreachable("Unsupported SPIRV instruction");
5643 break;
5644 }
5645 case spv::OpCapability:
5646 case spv::OpExtension:
5647 case spv::OpMemoryModel:
5648 case spv::OpEntryPoint:
5649 case spv::OpExecutionMode:
5650 case spv::OpSource:
5651 case spv::OpDecorate:
5652 case spv::OpMemberDecorate:
5653 case spv::OpBranch:
5654 case spv::OpBranchConditional:
5655 case spv::OpSelectionMerge:
5656 case spv::OpLoopMerge:
5657 case spv::OpStore:
5658 case spv::OpImageWrite:
5659 case spv::OpReturnValue:
5660 case spv::OpControlBarrier:
5661 case spv::OpMemoryBarrier:
5662 case spv::OpReturn:
5663 case spv::OpFunctionEnd:
5664 case spv::OpCopyMemory: {
5665 WriteWordCountAndOpcode(Inst);
5666 for (uint32_t i = 0; i < Ops.size(); i++) {
5667 WriteOperand(Ops[i]);
5668 }
5669 break;
5670 }
5671 case spv::OpTypeBool:
5672 case spv::OpTypeVoid:
5673 case spv::OpTypeSampler:
5674 case spv::OpLabel:
5675 case spv::OpExtInstImport:
5676 case spv::OpTypePointer:
5677 case spv::OpTypeRuntimeArray:
5678 case spv::OpTypeStruct:
5679 case spv::OpTypeImage:
5680 case spv::OpTypeSampledImage:
5681 case spv::OpTypeInt:
5682 case spv::OpTypeFloat:
5683 case spv::OpTypeArray:
5684 case spv::OpTypeVector:
5685 case spv::OpTypeFunction: {
5686 WriteWordCountAndOpcode(Inst);
5687 WriteResultID(Inst);
5688 for (uint32_t i = 0; i < Ops.size(); i++) {
5689 WriteOperand(Ops[i]);
5690 }
5691 break;
5692 }
5693 case spv::OpFunction:
5694 case spv::OpFunctionParameter:
5695 case spv::OpAccessChain:
5696 case spv::OpPtrAccessChain:
5697 case spv::OpInBoundsAccessChain:
5698 case spv::OpUConvert:
5699 case spv::OpSConvert:
5700 case spv::OpConvertFToU:
5701 case spv::OpConvertFToS:
5702 case spv::OpConvertUToF:
5703 case spv::OpConvertSToF:
5704 case spv::OpFConvert:
5705 case spv::OpConvertPtrToU:
5706 case spv::OpConvertUToPtr:
5707 case spv::OpBitcast:
alan-bakerc9c55ae2019-12-02 16:01:27 -05005708 case spv::OpFNegate:
David Neto22f144c2017-06-12 14:26:21 -04005709 case spv::OpIAdd:
5710 case spv::OpFAdd:
5711 case spv::OpISub:
5712 case spv::OpFSub:
5713 case spv::OpIMul:
5714 case spv::OpFMul:
5715 case spv::OpUDiv:
5716 case spv::OpSDiv:
5717 case spv::OpFDiv:
5718 case spv::OpUMod:
5719 case spv::OpSRem:
5720 case spv::OpFRem:
Kévin Petit8a560882019-03-21 15:24:34 +00005721 case spv::OpUMulExtended:
5722 case spv::OpSMulExtended:
David Neto22f144c2017-06-12 14:26:21 -04005723 case spv::OpBitwiseOr:
5724 case spv::OpBitwiseXor:
5725 case spv::OpBitwiseAnd:
David Netoa394f392017-08-26 20:45:29 -04005726 case spv::OpNot:
David Neto22f144c2017-06-12 14:26:21 -04005727 case spv::OpShiftLeftLogical:
5728 case spv::OpShiftRightLogical:
5729 case spv::OpShiftRightArithmetic:
5730 case spv::OpBitCount:
David Netoab03f432017-11-03 17:00:44 -04005731 case spv::OpCompositeConstruct:
David Neto22f144c2017-06-12 14:26:21 -04005732 case spv::OpCompositeExtract:
5733 case spv::OpVectorExtractDynamic:
5734 case spv::OpCompositeInsert:
David Neto0a2f98d2017-09-15 19:38:40 -04005735 case spv::OpCopyObject:
David Neto22f144c2017-06-12 14:26:21 -04005736 case spv::OpVectorInsertDynamic:
5737 case spv::OpVectorShuffle:
5738 case spv::OpIEqual:
5739 case spv::OpINotEqual:
5740 case spv::OpUGreaterThan:
5741 case spv::OpUGreaterThanEqual:
5742 case spv::OpULessThan:
5743 case spv::OpULessThanEqual:
5744 case spv::OpSGreaterThan:
5745 case spv::OpSGreaterThanEqual:
5746 case spv::OpSLessThan:
5747 case spv::OpSLessThanEqual:
5748 case spv::OpFOrdEqual:
5749 case spv::OpFOrdGreaterThan:
5750 case spv::OpFOrdGreaterThanEqual:
5751 case spv::OpFOrdLessThan:
5752 case spv::OpFOrdLessThanEqual:
5753 case spv::OpFOrdNotEqual:
5754 case spv::OpFUnordEqual:
5755 case spv::OpFUnordGreaterThan:
5756 case spv::OpFUnordGreaterThanEqual:
5757 case spv::OpFUnordLessThan:
5758 case spv::OpFUnordLessThanEqual:
5759 case spv::OpFUnordNotEqual:
5760 case spv::OpExtInst:
5761 case spv::OpIsInf:
5762 case spv::OpIsNan:
5763 case spv::OpAny:
5764 case spv::OpAll:
5765 case spv::OpUndef:
5766 case spv::OpConstantNull:
5767 case spv::OpLogicalOr:
5768 case spv::OpLogicalAnd:
5769 case spv::OpLogicalNot:
5770 case spv::OpLogicalNotEqual:
5771 case spv::OpConstantComposite:
5772 case spv::OpSpecConstantComposite:
5773 case spv::OpConstantTrue:
5774 case spv::OpConstantFalse:
5775 case spv::OpConstant:
5776 case spv::OpSpecConstant:
5777 case spv::OpVariable:
5778 case spv::OpFunctionCall:
5779 case spv::OpSampledImage:
5780 case spv::OpImageSampleExplicitLod:
David Neto5c22a252018-03-15 16:07:41 -04005781 case spv::OpImageQuerySize:
alan-bakerce179f12019-12-06 19:02:22 -05005782 case spv::OpImageQuerySizeLod:
David Neto22f144c2017-06-12 14:26:21 -04005783 case spv::OpSelect:
5784 case spv::OpPhi:
5785 case spv::OpLoad:
5786 case spv::OpAtomicIAdd:
5787 case spv::OpAtomicISub:
5788 case spv::OpAtomicExchange:
5789 case spv::OpAtomicIIncrement:
5790 case spv::OpAtomicIDecrement:
5791 case spv::OpAtomicCompareExchange:
5792 case spv::OpAtomicUMin:
5793 case spv::OpAtomicSMin:
5794 case spv::OpAtomicUMax:
5795 case spv::OpAtomicSMax:
5796 case spv::OpAtomicAnd:
5797 case spv::OpAtomicOr:
5798 case spv::OpAtomicXor:
5799 case spv::OpDot: {
5800 WriteWordCountAndOpcode(Inst);
5801 WriteOperand(Ops[0]);
5802 WriteResultID(Inst);
5803 for (uint32_t i = 1; i < Ops.size(); i++) {
5804 WriteOperand(Ops[i]);
5805 }
5806 break;
5807 }
5808 }
5809 }
5810}
Alan Baker9bf93fb2018-08-28 16:59:26 -04005811
alan-bakerb6b09dc2018-11-08 16:59:28 -05005812bool SPIRVProducerPass::IsTypeNullable(const Type *type) const {
Alan Baker9bf93fb2018-08-28 16:59:26 -04005813 switch (type->getTypeID()) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05005814 case Type::HalfTyID:
5815 case Type::FloatTyID:
5816 case Type::DoubleTyID:
5817 case Type::IntegerTyID:
5818 case Type::VectorTyID:
5819 return true;
5820 case Type::PointerTyID: {
5821 const PointerType *pointer_type = cast<PointerType>(type);
5822 if (pointer_type->getPointerAddressSpace() !=
5823 AddressSpace::UniformConstant) {
5824 auto pointee_type = pointer_type->getPointerElementType();
5825 if (pointee_type->isStructTy() &&
5826 cast<StructType>(pointee_type)->isOpaque()) {
5827 // Images and samplers are not nullable.
5828 return false;
Alan Baker9bf93fb2018-08-28 16:59:26 -04005829 }
Alan Baker9bf93fb2018-08-28 16:59:26 -04005830 }
alan-bakerb6b09dc2018-11-08 16:59:28 -05005831 return true;
5832 }
5833 case Type::ArrayTyID:
5834 return IsTypeNullable(cast<CompositeType>(type)->getTypeAtIndex(0u));
5835 case Type::StructTyID: {
5836 const StructType *struct_type = cast<StructType>(type);
5837 // Images and samplers are not nullable.
5838 if (struct_type->isOpaque())
Alan Baker9bf93fb2018-08-28 16:59:26 -04005839 return false;
alan-bakerb6b09dc2018-11-08 16:59:28 -05005840 for (const auto element : struct_type->elements()) {
5841 if (!IsTypeNullable(element))
5842 return false;
5843 }
5844 return true;
5845 }
5846 default:
5847 return false;
Alan Baker9bf93fb2018-08-28 16:59:26 -04005848 }
5849}
Alan Bakerfcda9482018-10-02 17:09:59 -04005850
5851void SPIRVProducerPass::PopulateUBOTypeMaps(Module &module) {
5852 if (auto *offsets_md =
5853 module.getNamedMetadata(clspv::RemappedTypeOffsetMetadataName())) {
5854 // Metdata is stored as key-value pair operands. The first element of each
5855 // operand is the type and the second is a vector of offsets.
5856 for (const auto *operand : offsets_md->operands()) {
5857 const auto *pair = cast<MDTuple>(operand);
5858 auto *type =
5859 cast<ConstantAsMetadata>(pair->getOperand(0))->getValue()->getType();
5860 const auto *offset_vector = cast<MDTuple>(pair->getOperand(1));
5861 std::vector<uint32_t> offsets;
5862 for (const Metadata *offset_md : offset_vector->operands()) {
5863 const auto *constant_md = cast<ConstantAsMetadata>(offset_md);
alan-bakerb6b09dc2018-11-08 16:59:28 -05005864 offsets.push_back(static_cast<uint32_t>(
5865 cast<ConstantInt>(constant_md->getValue())->getZExtValue()));
Alan Bakerfcda9482018-10-02 17:09:59 -04005866 }
5867 RemappedUBOTypeOffsets.insert(std::make_pair(type, offsets));
5868 }
5869 }
5870
5871 if (auto *sizes_md =
5872 module.getNamedMetadata(clspv::RemappedTypeSizesMetadataName())) {
5873 // Metadata is stored as key-value pair operands. The first element of each
5874 // operand is the type and the second is a triple of sizes: type size in
5875 // bits, store size and alloc size.
5876 for (const auto *operand : sizes_md->operands()) {
5877 const auto *pair = cast<MDTuple>(operand);
5878 auto *type =
5879 cast<ConstantAsMetadata>(pair->getOperand(0))->getValue()->getType();
5880 const auto *size_triple = cast<MDTuple>(pair->getOperand(1));
5881 uint64_t type_size_in_bits =
5882 cast<ConstantInt>(
5883 cast<ConstantAsMetadata>(size_triple->getOperand(0))->getValue())
5884 ->getZExtValue();
5885 uint64_t type_store_size =
5886 cast<ConstantInt>(
5887 cast<ConstantAsMetadata>(size_triple->getOperand(1))->getValue())
5888 ->getZExtValue();
5889 uint64_t type_alloc_size =
5890 cast<ConstantInt>(
5891 cast<ConstantAsMetadata>(size_triple->getOperand(2))->getValue())
5892 ->getZExtValue();
5893 RemappedUBOTypeSizes.insert(std::make_pair(
5894 type, std::make_tuple(type_size_in_bits, type_store_size,
5895 type_alloc_size)));
5896 }
5897 }
5898}
5899
5900uint64_t SPIRVProducerPass::GetTypeSizeInBits(Type *type,
5901 const DataLayout &DL) {
5902 auto iter = RemappedUBOTypeSizes.find(type);
5903 if (iter != RemappedUBOTypeSizes.end()) {
5904 return std::get<0>(iter->second);
5905 }
5906
5907 return DL.getTypeSizeInBits(type);
5908}
5909
5910uint64_t SPIRVProducerPass::GetTypeStoreSize(Type *type, const DataLayout &DL) {
5911 auto iter = RemappedUBOTypeSizes.find(type);
5912 if (iter != RemappedUBOTypeSizes.end()) {
5913 return std::get<1>(iter->second);
5914 }
5915
5916 return DL.getTypeStoreSize(type);
5917}
5918
5919uint64_t SPIRVProducerPass::GetTypeAllocSize(Type *type, const DataLayout &DL) {
5920 auto iter = RemappedUBOTypeSizes.find(type);
5921 if (iter != RemappedUBOTypeSizes.end()) {
5922 return std::get<2>(iter->second);
5923 }
5924
5925 return DL.getTypeAllocSize(type);
5926}
alan-baker5b86ed72019-02-15 08:26:50 -05005927
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04005928void SPIRVProducerPass::setVariablePointersCapabilities(
5929 unsigned address_space) {
alan-baker5b86ed72019-02-15 08:26:50 -05005930 if (GetStorageClass(address_space) == spv::StorageClassStorageBuffer) {
5931 setVariablePointersStorageBuffer(true);
5932 } else {
5933 setVariablePointers(true);
5934 }
5935}
5936
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04005937Value *SPIRVProducerPass::GetBasePointer(Value *v) {
alan-baker5b86ed72019-02-15 08:26:50 -05005938 if (auto *gep = dyn_cast<GetElementPtrInst>(v)) {
5939 return GetBasePointer(gep->getPointerOperand());
5940 }
5941
5942 // Conservatively return |v|.
5943 return v;
5944}
5945
5946bool SPIRVProducerPass::sameResource(Value *lhs, Value *rhs) const {
5947 if (auto *lhs_call = dyn_cast<CallInst>(lhs)) {
5948 if (auto *rhs_call = dyn_cast<CallInst>(rhs)) {
5949 if (lhs_call->getCalledFunction()->getName().startswith(
5950 clspv::ResourceAccessorFunction()) &&
5951 rhs_call->getCalledFunction()->getName().startswith(
5952 clspv::ResourceAccessorFunction())) {
5953 // For resource accessors, match descriptor set and binding.
5954 if (lhs_call->getOperand(0) == rhs_call->getOperand(0) &&
5955 lhs_call->getOperand(1) == rhs_call->getOperand(1))
5956 return true;
5957 } else if (lhs_call->getCalledFunction()->getName().startswith(
5958 clspv::WorkgroupAccessorFunction()) &&
5959 rhs_call->getCalledFunction()->getName().startswith(
5960 clspv::WorkgroupAccessorFunction())) {
5961 // For workgroup resources, match spec id.
5962 if (lhs_call->getOperand(0) == rhs_call->getOperand(0))
5963 return true;
5964 }
5965 }
5966 }
5967
5968 return false;
5969}
5970
5971bool SPIRVProducerPass::selectFromSameObject(Instruction *inst) {
5972 assert(inst->getType()->isPointerTy());
5973 assert(GetStorageClass(inst->getType()->getPointerAddressSpace()) ==
5974 spv::StorageClassStorageBuffer);
5975 const bool hack_undef = clspv::Option::HackUndef();
5976 if (auto *select = dyn_cast<SelectInst>(inst)) {
5977 auto *true_base = GetBasePointer(select->getTrueValue());
5978 auto *false_base = GetBasePointer(select->getFalseValue());
5979
5980 if (true_base == false_base)
5981 return true;
5982
5983 // If either the true or false operand is a null, then we satisfy the same
5984 // object constraint.
5985 if (auto *true_cst = dyn_cast<Constant>(true_base)) {
5986 if (true_cst->isNullValue() || (hack_undef && isa<UndefValue>(true_base)))
5987 return true;
5988 }
5989
5990 if (auto *false_cst = dyn_cast<Constant>(false_base)) {
5991 if (false_cst->isNullValue() ||
5992 (hack_undef && isa<UndefValue>(false_base)))
5993 return true;
5994 }
5995
5996 if (sameResource(true_base, false_base))
5997 return true;
5998 } else if (auto *phi = dyn_cast<PHINode>(inst)) {
5999 Value *value = nullptr;
6000 bool ok = true;
6001 for (unsigned i = 0; ok && i != phi->getNumIncomingValues(); ++i) {
6002 auto *base = GetBasePointer(phi->getIncomingValue(i));
6003 // Null values satisfy the constraint of selecting of selecting from the
6004 // same object.
6005 if (!value) {
6006 if (auto *cst = dyn_cast<Constant>(base)) {
6007 if (!cst->isNullValue() && !(hack_undef && isa<UndefValue>(base)))
6008 value = base;
6009 } else {
6010 value = base;
6011 }
6012 } else if (base != value) {
6013 if (auto *base_cst = dyn_cast<Constant>(base)) {
6014 if (base_cst->isNullValue() || (hack_undef && isa<UndefValue>(base)))
6015 continue;
6016 }
6017
6018 if (sameResource(value, base))
6019 continue;
6020
6021 // Values don't represent the same base.
6022 ok = false;
6023 }
6024 }
6025
6026 return ok;
6027 }
6028
6029 // Conservatively return false.
6030 return false;
6031}
alan-bakere9308012019-03-15 10:25:13 -04006032
6033bool SPIRVProducerPass::CalledWithCoherentResource(Argument &Arg) {
6034 if (!Arg.getType()->isPointerTy() ||
6035 Arg.getType()->getPointerAddressSpace() != clspv::AddressSpace::Global) {
6036 // Only SSBOs need to be annotated as coherent.
6037 return false;
6038 }
6039
6040 DenseSet<Value *> visited;
6041 std::vector<Value *> stack;
6042 for (auto *U : Arg.getParent()->users()) {
6043 if (auto *call = dyn_cast<CallInst>(U)) {
6044 stack.push_back(call->getOperand(Arg.getArgNo()));
6045 }
6046 }
6047
6048 while (!stack.empty()) {
6049 Value *v = stack.back();
6050 stack.pop_back();
6051
6052 if (!visited.insert(v).second)
6053 continue;
6054
6055 auto *resource_call = dyn_cast<CallInst>(v);
6056 if (resource_call &&
6057 resource_call->getCalledFunction()->getName().startswith(
6058 clspv::ResourceAccessorFunction())) {
6059 // If this is a resource accessor function, check if the coherent operand
6060 // is set.
6061 const auto coherent =
6062 unsigned(dyn_cast<ConstantInt>(resource_call->getArgOperand(5))
6063 ->getZExtValue());
6064 if (coherent == 1)
6065 return true;
6066 } else if (auto *arg = dyn_cast<Argument>(v)) {
6067 // If this is a function argument, trace through its callers.
alan-bakere98f3f92019-04-08 15:06:36 -04006068 for (auto U : arg->getParent()->users()) {
alan-bakere9308012019-03-15 10:25:13 -04006069 if (auto *call = dyn_cast<CallInst>(U)) {
6070 stack.push_back(call->getOperand(arg->getArgNo()));
6071 }
6072 }
6073 } else if (auto *user = dyn_cast<User>(v)) {
6074 // If this is a user, traverse all operands that could lead to resource
6075 // variables.
6076 for (unsigned i = 0; i != user->getNumOperands(); ++i) {
6077 Value *operand = user->getOperand(i);
6078 if (operand->getType()->isPointerTy() &&
6079 operand->getType()->getPointerAddressSpace() ==
6080 clspv::AddressSpace::Global) {
6081 stack.push_back(operand);
6082 }
6083 }
6084 }
6085 }
6086
6087 // No coherent resource variables encountered.
6088 return false;
6089}
alan-baker06cad652019-12-03 17:56:47 -05006090
6091void SPIRVProducerPass::PopulateStructuredCFGMaps(Module &module) {
6092 // First, track loop merges and continues.
6093 DenseSet<BasicBlock *> LoopMergesAndContinues;
6094 for (auto &F : module) {
6095 if (F.isDeclaration())
6096 continue;
6097
6098 DominatorTree &DT = getAnalysis<DominatorTreeWrapperPass>(F).getDomTree();
6099 const LoopInfo &LI = getAnalysis<LoopInfoWrapperPass>(F).getLoopInfo();
6100 std::deque<BasicBlock *> order;
6101 DenseSet<BasicBlock *> visited;
6102 clspv::ComputeStructuredOrder(&*F.begin(), &DT, LI, &order, &visited);
6103
6104 for (auto BB : order) {
6105 auto terminator = BB->getTerminator();
6106 auto branch = dyn_cast<BranchInst>(terminator);
6107 if (LI.isLoopHeader(BB)) {
6108 auto L = LI.getLoopFor(BB);
6109 BasicBlock *ContinueBB = nullptr;
6110 BasicBlock *MergeBB = nullptr;
6111
6112 MergeBB = L->getExitBlock();
6113 if (!MergeBB) {
6114 // StructurizeCFG pass converts CFG into triangle shape and the cfg
6115 // has regions with single entry/exit. As a result, loop should not
6116 // have multiple exits.
6117 llvm_unreachable("Loop has multiple exits???");
6118 }
6119
6120 if (L->isLoopLatch(BB)) {
6121 ContinueBB = BB;
6122 } else {
6123 // From SPIR-V spec 2.11, Continue Target must dominate that back-edge
6124 // block.
6125 BasicBlock *Header = L->getHeader();
6126 BasicBlock *Latch = L->getLoopLatch();
6127 for (auto *loop_block : L->blocks()) {
6128 if (loop_block == Header) {
6129 continue;
6130 }
6131
6132 // Check whether block dominates block with back-edge.
6133 // The loop latch is the single block with a back-edge. If it was
6134 // possible, StructurizeCFG made the loop conform to this
6135 // requirement, otherwise |Latch| is a nullptr.
6136 if (DT.dominates(loop_block, Latch)) {
6137 ContinueBB = loop_block;
6138 }
6139 }
6140
6141 if (!ContinueBB) {
6142 llvm_unreachable("Wrong continue block from loop");
6143 }
6144 }
6145
6146 // Record the continue and merge blocks.
6147 MergeBlocks[BB] = MergeBB;
6148 ContinueBlocks[BB] = ContinueBB;
6149 LoopMergesAndContinues.insert(MergeBB);
6150 LoopMergesAndContinues.insert(ContinueBB);
6151 } else if (branch && branch->isConditional()) {
6152 auto L = LI.getLoopFor(BB);
6153 bool HasBackedge = false;
6154 while (L && !HasBackedge) {
6155 if (L->isLoopLatch(BB)) {
6156 HasBackedge = true;
6157 }
6158 L = L->getParentLoop();
6159 }
6160
6161 if (!HasBackedge) {
6162 // Only need a merge if the branch doesn't include a loop break or
6163 // continue.
6164 auto true_bb = branch->getSuccessor(0);
6165 auto false_bb = branch->getSuccessor(1);
6166 if (!LoopMergesAndContinues.count(true_bb) &&
6167 !LoopMergesAndContinues.count(false_bb)) {
6168 // StructurizeCFG pass already manipulated CFG. Just use false block
6169 // of branch instruction as merge block.
6170 MergeBlocks[BB] = false_bb;
6171 }
6172 }
6173 }
6174 }
6175 }
6176}