blob: 050b59f7e7564e6289a70afe138f8269e3c83339 [file] [log] [blame]
David Neto22f144c2017-06-12 14:26:21 -04001// Copyright 2017 The Clspv Authors. All rights reserved.
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7// http://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
15#ifdef _MSC_VER
16#pragma warning(push, 0)
17#endif
18
David Neto156783e2017-07-05 15:39:41 -040019#include <cassert>
David Neto257c3892018-04-11 13:19:45 -040020#include <cstring>
David Neto118188e2018-08-24 11:27:54 -040021#include <iomanip>
22#include <list>
David Neto862b7d82018-06-14 18:48:37 -040023#include <memory>
David Neto118188e2018-08-24 11:27:54 -040024#include <set>
25#include <sstream>
26#include <string>
27#include <tuple>
28#include <unordered_set>
29#include <utility>
David Neto862b7d82018-06-14 18:48:37 -040030
David Neto118188e2018-08-24 11:27:54 -040031#include "llvm/ADT/StringSwitch.h"
32#include "llvm/ADT/UniqueVector.h"
33#include "llvm/Analysis/LoopInfo.h"
34#include "llvm/IR/Constants.h"
35#include "llvm/IR/Dominators.h"
36#include "llvm/IR/Instructions.h"
37#include "llvm/IR/Metadata.h"
38#include "llvm/IR/Module.h"
alan-bakerf67468c2019-11-25 15:51:49 -050039#include "llvm/IR/ValueSymbolTable.h"
David Neto118188e2018-08-24 11:27:54 -040040#include "llvm/Pass.h"
41#include "llvm/Support/CommandLine.h"
Kévin Petitbbbda972020-03-03 19:16:31 +000042#include "llvm/Support/MathExtras.h"
David Neto118188e2018-08-24 11:27:54 -040043#include "llvm/Support/raw_ostream.h"
44#include "llvm/Transforms/Utils/Cloning.h"
David Neto22f144c2017-06-12 14:26:21 -040045
alan-bakere0902602020-03-23 08:43:40 -040046#include "spirv/unified1/spirv.hpp"
David Neto118188e2018-08-24 11:27:54 -040047
David Neto85082642018-03-24 06:55:20 -070048#include "clspv/AddressSpace.h"
alan-bakerf5e5f692018-11-27 08:33:24 -050049#include "clspv/DescriptorMap.h"
David Neto118188e2018-08-24 11:27:54 -040050#include "clspv/Option.h"
David Neto85082642018-03-24 06:55:20 -070051#include "clspv/spirv_c_strings.hpp"
52#include "clspv/spirv_glsl.hpp"
David Neto22f144c2017-06-12 14:26:21 -040053
David Neto4feb7a42017-10-06 17:29:42 -040054#include "ArgKind.h"
alan-bakerf67468c2019-11-25 15:51:49 -050055#include "Builtins.h"
alan-baker06cad652019-12-03 17:56:47 -050056#include "ComputeStructuredOrder.h"
David Neto85082642018-03-24 06:55:20 -070057#include "ConstantEmitter.h"
Alan Baker202c8c72018-08-13 13:47:44 -040058#include "Constants.h"
David Neto78383442018-06-15 20:31:56 -040059#include "DescriptorCounter.h"
alan-baker56f7aff2019-05-22 08:06:42 -040060#include "NormalizeGlobalVariable.h"
Diego Novilloa4c44fa2019-04-11 10:56:15 -040061#include "Passes.h"
alan-bakerce179f12019-12-06 19:02:22 -050062#include "Types.h"
David Neto48f56a42017-10-06 16:44:25 -040063
David Neto22f144c2017-06-12 14:26:21 -040064#if defined(_MSC_VER)
65#pragma warning(pop)
66#endif
67
68using namespace llvm;
69using namespace clspv;
SJW173c7e92020-03-16 08:44:47 -050070using namespace clspv::Builtins;
David Neto156783e2017-07-05 15:39:41 -040071using namespace mdconst;
David Neto22f144c2017-06-12 14:26:21 -040072
73namespace {
David Netocd8ca5f2017-10-02 23:34:11 -040074
David Neto862b7d82018-06-14 18:48:37 -040075cl::opt<bool> ShowResourceVars("show-rv", cl::init(false), cl::Hidden,
76 cl::desc("Show resource variable creation"));
77
alan-baker5ed87542020-03-23 11:05:22 -040078cl::opt<bool>
79 ShowProducerIR("show-producer-ir", cl::init(false), cl::ReallyHidden,
80 cl::desc("Dump the IR at the start of SPIRVProducer"));
81
David Neto862b7d82018-06-14 18:48:37 -040082// These hacks exist to help transition code generation algorithms
83// without making huge noise in detailed test output.
84const bool Hack_generate_runtime_array_stride_early = true;
85
David Neto3fbb4072017-10-16 11:28:14 -040086// The value of 1/pi. This value is from MSDN
87// https://msdn.microsoft.com/en-us/library/4hwaceh6.aspx
88const double kOneOverPi = 0.318309886183790671538;
89const glsl::ExtInst kGlslExtInstBad = static_cast<glsl::ExtInst>(0);
90
alan-bakerb6b09dc2018-11-08 16:59:28 -050091const char *kCompositeConstructFunctionPrefix = "clspv.composite_construct.";
David Netoab03f432017-11-03 17:00:44 -040092
SJW69939d52020-04-16 07:29:07 -050093// SPIRV Module Sections (per 2.4 of the SPIRV spec)
94// These are used to collect SPIRVInstructions by type on-the-fly.
95enum SPIRVSection {
96 kCapabilities,
97 kExtensions,
98 kImports,
99 kMemoryModel,
100 kEntryPoints,
101 kExecutionModes,
102
103 kDebug,
104 kAnnotations,
105
106 kTypes,
107 kConstants = kTypes,
108 kGlobalVariables,
109
110 kFunctions,
111
112 kSectionCount
113};
114
David Neto22f144c2017-06-12 14:26:21 -0400115enum SPIRVOperandType {
116 NUMBERID,
117 LITERAL_INTEGER,
118 LITERAL_STRING,
119 LITERAL_FLOAT
120};
121
122struct SPIRVOperand {
123 explicit SPIRVOperand(SPIRVOperandType Ty, uint32_t Num)
124 : Type(Ty), LiteralNum(1, Num) {}
125 explicit SPIRVOperand(SPIRVOperandType Ty, const char *Str)
126 : Type(Ty), LiteralStr(Str) {}
127 explicit SPIRVOperand(SPIRVOperandType Ty, StringRef Str)
128 : Type(Ty), LiteralStr(Str) {}
129 explicit SPIRVOperand(SPIRVOperandType Ty, ArrayRef<uint32_t> NumVec)
130 : Type(Ty), LiteralNum(NumVec.begin(), NumVec.end()) {}
131
James Price11010dc2019-12-19 13:53:09 -0500132 SPIRVOperandType getType() const { return Type; };
133 uint32_t getNumID() const { return LiteralNum[0]; };
134 std::string getLiteralStr() const { return LiteralStr; };
135 ArrayRef<uint32_t> getLiteralNum() const { return LiteralNum; };
David Neto22f144c2017-06-12 14:26:21 -0400136
David Neto87846742018-04-11 17:36:22 -0400137 uint32_t GetNumWords() const {
138 switch (Type) {
139 case NUMBERID:
140 return 1;
141 case LITERAL_INTEGER:
142 case LITERAL_FLOAT:
David Netoee2660d2018-06-28 16:31:29 -0400143 return uint32_t(LiteralNum.size());
David Neto87846742018-04-11 17:36:22 -0400144 case LITERAL_STRING:
145 // Account for the terminating null character.
David Netoee2660d2018-06-28 16:31:29 -0400146 return uint32_t((LiteralStr.size() + 4) / 4);
David Neto87846742018-04-11 17:36:22 -0400147 }
148 llvm_unreachable("Unhandled case in SPIRVOperand::GetNumWords()");
149 }
150
David Neto22f144c2017-06-12 14:26:21 -0400151private:
152 SPIRVOperandType Type;
153 std::string LiteralStr;
154 SmallVector<uint32_t, 4> LiteralNum;
155};
156
David Netoc6f3ab22018-04-06 18:02:31 -0400157class SPIRVOperandList {
158public:
David Netoef5ba2b2019-12-20 08:35:54 -0500159 typedef std::unique_ptr<SPIRVOperand> element_type;
160 typedef SmallVector<element_type, 8> container_type;
161 typedef container_type::iterator iterator;
David Netoc6f3ab22018-04-06 18:02:31 -0400162 SPIRVOperandList() {}
alan-bakerb6b09dc2018-11-08 16:59:28 -0500163 SPIRVOperandList(const SPIRVOperandList &other) = delete;
164 SPIRVOperandList(SPIRVOperandList &&other) {
David Netoc6f3ab22018-04-06 18:02:31 -0400165 contents_ = std::move(other.contents_);
166 other.contents_.clear();
167 }
David Netoef5ba2b2019-12-20 08:35:54 -0500168 iterator begin() { return contents_.begin(); }
169 iterator end() { return contents_.end(); }
170 operator ArrayRef<element_type>() { return contents_; }
171 void push_back(element_type op) { contents_.push_back(std::move(op)); }
alan-bakerb6b09dc2018-11-08 16:59:28 -0500172 void clear() { contents_.clear(); }
David Netoc6f3ab22018-04-06 18:02:31 -0400173 size_t size() const { return contents_.size(); }
James Price11010dc2019-12-19 13:53:09 -0500174 const SPIRVOperand *operator[](size_t i) { return contents_[i].get(); }
David Netoc6f3ab22018-04-06 18:02:31 -0400175
David Netoef5ba2b2019-12-20 08:35:54 -0500176 const container_type &getOperands() const { return contents_; }
David Neto87846742018-04-11 17:36:22 -0400177
David Netoc6f3ab22018-04-06 18:02:31 -0400178private:
David Netoef5ba2b2019-12-20 08:35:54 -0500179 container_type contents_;
David Netoc6f3ab22018-04-06 18:02:31 -0400180};
181
James Price11010dc2019-12-19 13:53:09 -0500182SPIRVOperandList &operator<<(SPIRVOperandList &list,
David Netoef5ba2b2019-12-20 08:35:54 -0500183 std::unique_ptr<SPIRVOperand> elem) {
184 list.push_back(std::move(elem));
David Netoc6f3ab22018-04-06 18:02:31 -0400185 return list;
186}
187
David Netoef5ba2b2019-12-20 08:35:54 -0500188std::unique_ptr<SPIRVOperand> MkNum(uint32_t num) {
189 return std::make_unique<SPIRVOperand>(LITERAL_INTEGER, num);
David Netoc6f3ab22018-04-06 18:02:31 -0400190}
David Netoef5ba2b2019-12-20 08:35:54 -0500191std::unique_ptr<SPIRVOperand> MkInteger(ArrayRef<uint32_t> num_vec) {
192 return std::make_unique<SPIRVOperand>(LITERAL_INTEGER, num_vec);
David Neto257c3892018-04-11 13:19:45 -0400193}
David Netoef5ba2b2019-12-20 08:35:54 -0500194std::unique_ptr<SPIRVOperand> MkFloat(ArrayRef<uint32_t> num_vec) {
195 return std::make_unique<SPIRVOperand>(LITERAL_FLOAT, num_vec);
David Neto257c3892018-04-11 13:19:45 -0400196}
David Netoef5ba2b2019-12-20 08:35:54 -0500197std::unique_ptr<SPIRVOperand> MkId(uint32_t id) {
198 return std::make_unique<SPIRVOperand>(NUMBERID, id);
James Price11010dc2019-12-19 13:53:09 -0500199}
David Netoef5ba2b2019-12-20 08:35:54 -0500200std::unique_ptr<SPIRVOperand> MkString(StringRef str) {
201 return std::make_unique<SPIRVOperand>(LITERAL_STRING, str);
David Neto257c3892018-04-11 13:19:45 -0400202}
David Netoc6f3ab22018-04-06 18:02:31 -0400203
David Neto22f144c2017-06-12 14:26:21 -0400204struct SPIRVInstruction {
David Netoef5ba2b2019-12-20 08:35:54 -0500205 // Creates an instruction with an opcode and no result ID, and with the given
206 // operands. This computes its own word count. Takes ownership of the
207 // operands and clears |Ops|.
208 SPIRVInstruction(spv::Op Opc, SPIRVOperandList &Ops)
209 : WordCount(1), Opcode(static_cast<uint16_t>(Opc)), ResultID(0) {
James Price11010dc2019-12-19 13:53:09 -0500210 for (auto &operand : Ops) {
David Netoee2660d2018-06-28 16:31:29 -0400211 WordCount += uint16_t(operand->GetNumWords());
David Neto87846742018-04-11 17:36:22 -0400212 }
David Netoef5ba2b2019-12-20 08:35:54 -0500213 Operands.reserve(Ops.size());
214 for (auto &ptr : Ops) {
215 Operands.emplace_back(std::move(ptr));
216 ptr.reset(nullptr);
David Neto87846742018-04-11 17:36:22 -0400217 }
David Netoef5ba2b2019-12-20 08:35:54 -0500218 Ops.clear();
219 }
220 // Creates an instruction with an opcode and a no-zero result ID, and
221 // with the given operands. This computes its own word count. Takes ownership
222 // of the operands and clears |Ops|.
223 SPIRVInstruction(spv::Op Opc, uint32_t ResID, SPIRVOperandList &Ops)
224 : WordCount(2), Opcode(static_cast<uint16_t>(Opc)), ResultID(ResID) {
James Price11010dc2019-12-19 13:53:09 -0500225 for (auto &operand : Ops) {
David Neto87846742018-04-11 17:36:22 -0400226 WordCount += operand->GetNumWords();
227 }
David Netoef5ba2b2019-12-20 08:35:54 -0500228 Operands.reserve(Ops.size());
229 for (auto &ptr : Ops) {
230 Operands.emplace_back(std::move(ptr));
231 ptr.reset(nullptr);
232 }
233 if (ResID == 0) {
234 llvm_unreachable("Result ID of 0 was provided");
235 }
236 Ops.clear();
David Neto87846742018-04-11 17:36:22 -0400237 }
David Neto22f144c2017-06-12 14:26:21 -0400238
David Netoef5ba2b2019-12-20 08:35:54 -0500239 // Creates an instruction with an opcode and no result ID, and with the single
240 // operand. This computes its own word count.
241 SPIRVInstruction(spv::Op Opc, SPIRVOperandList::element_type operand)
242 : WordCount(1), Opcode(static_cast<uint16_t>(Opc)), ResultID(0) {
243 WordCount += operand->GetNumWords();
244 Operands.emplace_back(std::move(operand));
245 operand.reset(nullptr);
246 }
247 // Creates an instruction with an opcode and a non-zero result ID, and
248 // with the single operand. This computes its own word count.
249 SPIRVInstruction(spv::Op Opc, uint32_t ResID,
250 SPIRVOperandList::element_type operand)
251 : WordCount(2), Opcode(static_cast<uint16_t>(Opc)), ResultID(ResID) {
252 WordCount += operand->GetNumWords();
253 if (ResID == 0) {
254 llvm_unreachable("Result ID of 0 was provided");
255 }
256 Operands.emplace_back(std::move(operand));
257 operand.reset(nullptr);
258 }
259 // Creates an instruction with an opcode and a no-zero result ID, and no
260 // operands.
261 SPIRVInstruction(spv::Op Opc, uint32_t ResID)
262 : WordCount(2), Opcode(static_cast<uint16_t>(Opc)), ResultID(ResID) {
263 if (ResID == 0) {
264 llvm_unreachable("Result ID of 0 was provided");
265 }
266 }
267 // Creates an instruction with an opcode, no result ID, no type ID, and no
268 // operands.
269 SPIRVInstruction(spv::Op Opc)
270 : WordCount(1), Opcode(static_cast<uint16_t>(Opc)), ResultID(0) {}
271
David Netoee2660d2018-06-28 16:31:29 -0400272 uint32_t getWordCount() const { return WordCount; }
David Neto22f144c2017-06-12 14:26:21 -0400273 uint16_t getOpcode() const { return Opcode; }
274 uint32_t getResultID() const { return ResultID; }
David Netoef5ba2b2019-12-20 08:35:54 -0500275 ArrayRef<std::unique_ptr<SPIRVOperand>> getOperands() const {
James Price11010dc2019-12-19 13:53:09 -0500276 return Operands;
277 }
David Neto22f144c2017-06-12 14:26:21 -0400278
279private:
David Netoee2660d2018-06-28 16:31:29 -0400280 uint32_t WordCount; // Check the 16-bit bound at code generation time.
David Neto22f144c2017-06-12 14:26:21 -0400281 uint16_t Opcode;
282 uint32_t ResultID;
David Netoef5ba2b2019-12-20 08:35:54 -0500283 SmallVector<std::unique_ptr<SPIRVOperand>, 4> Operands;
David Neto22f144c2017-06-12 14:26:21 -0400284};
285
286struct SPIRVProducerPass final : public ModulePass {
David Neto22f144c2017-06-12 14:26:21 -0400287 typedef DenseMap<Type *, uint32_t> TypeMapType;
288 typedef UniqueVector<Type *> TypeList;
289 typedef DenseMap<Value *, uint32_t> ValueMapType;
David Netofb9a7972017-08-25 17:08:24 -0400290 typedef UniqueVector<Value *> ValueList;
David Neto22f144c2017-06-12 14:26:21 -0400291 typedef std::vector<std::pair<Value *, uint32_t>> EntryPointVecType;
292 typedef std::list<SPIRVInstruction *> SPIRVInstructionList;
David Neto87846742018-04-11 17:36:22 -0400293 // A vector of tuples, each of which is:
294 // - the LLVM instruction that we will later generate SPIR-V code for
295 // - where the SPIR-V instruction should be inserted
296 // - the result ID of the SPIR-V instruction
David Neto22f144c2017-06-12 14:26:21 -0400297 typedef std::vector<
298 std::tuple<Value *, SPIRVInstructionList::iterator, uint32_t>>
299 DeferredInstVecType;
300 typedef DenseMap<FunctionType *, std::pair<FunctionType *, uint32_t>>
301 GlobalConstFuncMapType;
302
David Neto44795152017-07-13 15:45:28 -0400303 explicit SPIRVProducerPass(
alan-bakerf5e5f692018-11-27 08:33:24 -0500304 raw_pwrite_stream &out,
305 std::vector<clspv::version0::DescriptorMapEntry> *descriptor_map_entries,
alan-baker00e7a582019-06-07 12:54:21 -0400306 ArrayRef<std::pair<unsigned, std::string>> samplerMap,
David Neto44795152017-07-13 15:45:28 -0400307 bool outputCInitList)
David Netoc2c368d2017-06-30 16:50:17 -0400308 : ModulePass(ID), samplerMap(samplerMap), out(out),
David Neto0676e6f2017-07-11 18:47:44 -0400309 binaryTempOut(binaryTempUnderlyingVector), binaryOut(&out),
alan-baker00e7a582019-06-07 12:54:21 -0400310 descriptorMapEntries(descriptor_map_entries),
David Neto0676e6f2017-07-11 18:47:44 -0400311 outputCInitList(outputCInitList), patchBoundOffset(0), nextID(1),
alan-baker5b86ed72019-02-15 08:26:50 -0500312 OpExtInstImportID(0), HasVariablePointersStorageBuffer(false),
313 HasVariablePointers(false), SamplerTy(nullptr), WorkgroupSizeValueID(0),
Kévin Petit89a525c2019-06-15 08:13:07 +0100314 WorkgroupSizeVarID(0), max_local_spec_id_(0) {}
David Neto22f144c2017-06-12 14:26:21 -0400315
James Price11010dc2019-12-19 13:53:09 -0500316 virtual ~SPIRVProducerPass() {
SJW69939d52020-04-16 07:29:07 -0500317 for (int i = 0; i < kSectionCount; ++i) {
318 for (auto *Inst : SPIRVSections[i]) {
319 delete Inst;
320 }
James Price11010dc2019-12-19 13:53:09 -0500321 }
322 }
323
David Neto22f144c2017-06-12 14:26:21 -0400324 void getAnalysisUsage(AnalysisUsage &AU) const override {
325 AU.addRequired<DominatorTreeWrapperPass>();
326 AU.addRequired<LoopInfoWrapperPass>();
327 }
328
329 virtual bool runOnModule(Module &module) override;
330
331 // output the SPIR-V header block
332 void outputHeader();
333
334 // patch the SPIR-V header block
335 void patchHeader();
336
337 uint32_t lookupType(Type *Ty) {
338 if (Ty->isPointerTy() &&
339 (Ty->getPointerAddressSpace() != AddressSpace::UniformConstant)) {
340 auto PointeeTy = Ty->getPointerElementType();
341 if (PointeeTy->isStructTy() &&
342 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
343 Ty = PointeeTy;
344 }
345 }
346
David Neto862b7d82018-06-14 18:48:37 -0400347 auto where = TypeMap.find(Ty);
348 if (where == TypeMap.end()) {
349 if (Ty) {
350 errs() << "Unhandled type " << *Ty << "\n";
351 } else {
352 errs() << "Unhandled type (null)\n";
353 }
David Netoe439d702018-03-23 13:14:08 -0700354 llvm_unreachable("\nUnhandled type!");
David Neto22f144c2017-06-12 14:26:21 -0400355 }
356
David Neto862b7d82018-06-14 18:48:37 -0400357 return where->second;
David Neto22f144c2017-06-12 14:26:21 -0400358 }
359 TypeMapType &getImageTypeMap() { return ImageTypeMap; }
alan-bakerabd82722019-12-03 17:14:51 -0500360 TypeList &getImageTypeList() { return ImageTypeList; }
David Neto22f144c2017-06-12 14:26:21 -0400361 TypeList &getTypeList() { return Types; };
362 ValueList &getConstantList() { return Constants; };
363 ValueMapType &getValueMap() { return ValueMap; }
364 ValueMapType &getAllocatedValueMap() { return AllocatedValueMap; }
SJW69939d52020-04-16 07:29:07 -0500365 SPIRVInstructionList &getSPIRVInstList(SPIRVSection Section) {
366 return SPIRVSections[Section];
367 };
David Neto22f144c2017-06-12 14:26:21 -0400368 EntryPointVecType &getEntryPointVec() { return EntryPointVec; };
369 DeferredInstVecType &getDeferredInstVec() { return DeferredInstVec; };
370 ValueList &getEntryPointInterfacesVec() { return EntryPointInterfacesVec; };
371 uint32_t &getOpExtInstImportID() { return OpExtInstImportID; };
372 std::vector<uint32_t> &getBuiltinDimVec() { return BuiltinDimensionVec; };
SJW2c317da2020-03-23 07:39:13 -0500373
alan-baker5b86ed72019-02-15 08:26:50 -0500374 bool hasVariablePointersStorageBuffer() {
375 return HasVariablePointersStorageBuffer;
376 }
377 void setVariablePointersStorageBuffer(bool Val) {
378 HasVariablePointersStorageBuffer = Val;
379 }
Diego Novillo3cc8d7a2019-04-10 13:30:34 -0400380 bool hasVariablePointers() { return HasVariablePointers; };
David Neto22f144c2017-06-12 14:26:21 -0400381 void setVariablePointers(bool Val) { HasVariablePointers = Val; };
alan-bakerb6b09dc2018-11-08 16:59:28 -0500382 ArrayRef<std::pair<unsigned, std::string>> &getSamplerMap() {
383 return samplerMap;
384 }
David Neto22f144c2017-06-12 14:26:21 -0400385 GlobalConstFuncMapType &getGlobalConstFuncTypeMap() {
386 return GlobalConstFuncTypeMap;
387 }
388 SmallPtrSet<Value *, 16> &getGlobalConstArgSet() {
389 return GlobalConstArgumentSet;
390 }
alan-bakerb6b09dc2018-11-08 16:59:28 -0500391 TypeList &getTypesNeedingArrayStride() { return TypesNeedingArrayStride; }
David Neto22f144c2017-06-12 14:26:21 -0400392
David Netoc6f3ab22018-04-06 18:02:31 -0400393 void GenerateLLVMIRInfo(Module &M, const DataLayout &DL);
alan-bakerb6b09dc2018-11-08 16:59:28 -0500394 // Populate GlobalConstFuncTypeMap. Also, if module-scope __constant will
395 // *not* be converted to a storage buffer, replace each such global variable
396 // with one in the storage class expecgted by SPIR-V.
David Neto862b7d82018-06-14 18:48:37 -0400397 void FindGlobalConstVars(Module &M, const DataLayout &DL);
398 // Populate ResourceVarInfoList, FunctionToResourceVarsMap, and
399 // ModuleOrderedResourceVars.
400 void FindResourceVars(Module &M, const DataLayout &DL);
Alan Baker202c8c72018-08-13 13:47:44 -0400401 void FindWorkgroupVars(Module &M);
David Neto22f144c2017-06-12 14:26:21 -0400402 bool FindExtInst(Module &M);
403 void FindTypePerGlobalVar(GlobalVariable &GV);
404 void FindTypePerFunc(Function &F);
David Neto862b7d82018-06-14 18:48:37 -0400405 void FindTypesForSamplerMap(Module &M);
406 void FindTypesForResourceVars(Module &M);
alan-bakerb6b09dc2018-11-08 16:59:28 -0500407 // Inserts |Ty| and relevant sub-types into the |Types| member, indicating
408 // that |Ty| and its subtypes will need a corresponding SPIR-V type.
David Neto22f144c2017-06-12 14:26:21 -0400409 void FindType(Type *Ty);
410 void FindConstantPerGlobalVar(GlobalVariable &GV);
411 void FindConstantPerFunc(Function &F);
412 void FindConstant(Value *V);
413 void GenerateExtInstImport();
David Neto19a1bad2017-08-25 15:01:41 -0400414 // Generates instructions for SPIR-V types corresponding to the LLVM types
415 // saved in the |Types| member. A type follows its subtypes. IDs are
416 // allocated sequentially starting with the current value of nextID, and
417 // with a type following its subtypes. Also updates nextID to just beyond
418 // the last generated ID.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500419 void GenerateSPIRVTypes(LLVMContext &context, Module &module);
David Neto22f144c2017-06-12 14:26:21 -0400420 void GenerateSPIRVConstants();
David Neto5c22a252018-03-15 16:07:41 -0400421 void GenerateModuleInfo(Module &M);
Kévin Petitbbbda972020-03-03 19:16:31 +0000422 void GeneratePushConstantDescriptormapEntries(Module &M);
David Neto22f144c2017-06-12 14:26:21 -0400423 void GenerateGlobalVar(GlobalVariable &GV);
David Netoc6f3ab22018-04-06 18:02:31 -0400424 void GenerateWorkgroupVars();
David Neto862b7d82018-06-14 18:48:37 -0400425 // Generate descriptor map entries for resource variables associated with
426 // arguments to F.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500427 void GenerateDescriptorMapInfo(const DataLayout &DL, Function &F);
David Neto22f144c2017-06-12 14:26:21 -0400428 void GenerateSamplers(Module &M);
David Neto862b7d82018-06-14 18:48:37 -0400429 // Generate OpVariables for %clspv.resource.var.* calls.
430 void GenerateResourceVars(Module &M);
David Neto22f144c2017-06-12 14:26:21 -0400431 void GenerateFuncPrologue(Function &F);
432 void GenerateFuncBody(Function &F);
David Netob6e2e062018-04-25 10:32:06 -0400433 void GenerateEntryPointInitialStores();
David Neto22f144c2017-06-12 14:26:21 -0400434 spv::Op GetSPIRVCmpOpcode(CmpInst *CmpI);
435 spv::Op GetSPIRVCastOpcode(Instruction &I);
436 spv::Op GetSPIRVBinaryOpcode(Instruction &I);
437 void GenerateInstruction(Instruction &I);
438 void GenerateFuncEpilogue();
439 void HandleDeferredInstruction();
alan-bakerb6b09dc2018-11-08 16:59:28 -0500440 void HandleDeferredDecorations(const DataLayout &DL);
David Neto22f144c2017-06-12 14:26:21 -0400441 bool is4xi8vec(Type *Ty) const;
442 spv::StorageClass GetStorageClass(unsigned AddrSpace) const;
David Neto862b7d82018-06-14 18:48:37 -0400443 spv::StorageClass GetStorageClassForArgKind(clspv::ArgKind arg_kind) const;
David Neto22f144c2017-06-12 14:26:21 -0400444 spv::BuiltIn GetBuiltin(StringRef globalVarName) const;
David Neto3fbb4072017-10-16 11:28:14 -0400445 // Returns the GLSL extended instruction enum that the given function
446 // call maps to. If none, then returns the 0 value, i.e. GLSLstd4580Bad.
David Neto22f144c2017-06-12 14:26:21 -0400447 glsl::ExtInst getExtInstEnum(StringRef Name);
David Neto3fbb4072017-10-16 11:28:14 -0400448 // Returns the GLSL extended instruction enum indirectly used by the given
449 // function. That is, to implement the given function, we use an extended
450 // instruction plus one more instruction. If none, then returns the 0 value,
451 // i.e. GLSLstd4580Bad.
452 glsl::ExtInst getIndirectExtInstEnum(StringRef Name);
453 // Returns the single GLSL extended instruction used directly or
454 // indirectly by the given function call.
455 glsl::ExtInst getDirectOrIndirectExtInstEnum(StringRef Name);
David Neto22f144c2017-06-12 14:26:21 -0400456 void WriteOneWord(uint32_t Word);
457 void WriteResultID(SPIRVInstruction *Inst);
458 void WriteWordCountAndOpcode(SPIRVInstruction *Inst);
David Netoef5ba2b2019-12-20 08:35:54 -0500459 void WriteOperand(const std::unique_ptr<SPIRVOperand> &Op);
David Neto22f144c2017-06-12 14:26:21 -0400460 void WriteSPIRVBinary();
SJW69939d52020-04-16 07:29:07 -0500461 void WriteSPIRVBinary(SPIRVInstructionList &SPIRVInstList);
David Neto22f144c2017-06-12 14:26:21 -0400462
Alan Baker9bf93fb2018-08-28 16:59:26 -0400463 // Returns true if |type| is compatible with OpConstantNull.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500464 bool IsTypeNullable(const Type *type) const;
Alan Baker9bf93fb2018-08-28 16:59:26 -0400465
Alan Bakerfcda9482018-10-02 17:09:59 -0400466 // Populate UBO remapped type maps.
467 void PopulateUBOTypeMaps(Module &module);
468
alan-baker06cad652019-12-03 17:56:47 -0500469 // Populate the merge and continue block maps.
470 void PopulateStructuredCFGMaps(Module &module);
471
Alan Bakerfcda9482018-10-02 17:09:59 -0400472 // Wrapped methods of DataLayout accessors. If |type| was remapped for UBOs,
473 // uses the internal map, otherwise it falls back on the data layout.
474 uint64_t GetTypeSizeInBits(Type *type, const DataLayout &DL);
475 uint64_t GetTypeStoreSize(Type *type, const DataLayout &DL);
476 uint64_t GetTypeAllocSize(Type *type, const DataLayout &DL);
Kévin Petitbbbda972020-03-03 19:16:31 +0000477 uint32_t GetExplicitLayoutStructMemberOffset(StructType *type,
478 unsigned member,
479 const DataLayout &DL);
Alan Bakerfcda9482018-10-02 17:09:59 -0400480
alan-baker5b86ed72019-02-15 08:26:50 -0500481 // Returns the base pointer of |v|.
482 Value *GetBasePointer(Value *v);
483
484 // Sets |HasVariablePointersStorageBuffer| or |HasVariablePointers| base on
485 // |address_space|.
486 void setVariablePointersCapabilities(unsigned address_space);
487
488 // Returns true if |lhs| and |rhs| represent the same resource or workgroup
489 // variable.
490 bool sameResource(Value *lhs, Value *rhs) const;
491
492 // Returns true if |inst| is phi or select that selects from the same
493 // structure (or null).
494 bool selectFromSameObject(Instruction *inst);
495
alan-bakere9308012019-03-15 10:25:13 -0400496 // Returns true if |Arg| is called with a coherent resource.
497 bool CalledWithCoherentResource(Argument &Arg);
498
David Neto22f144c2017-06-12 14:26:21 -0400499private:
500 static char ID;
David Neto44795152017-07-13 15:45:28 -0400501 ArrayRef<std::pair<unsigned, std::string>> samplerMap;
David Neto22f144c2017-06-12 14:26:21 -0400502 raw_pwrite_stream &out;
David Neto0676e6f2017-07-11 18:47:44 -0400503
504 // TODO(dneto): Wouldn't it be better to always just emit a binary, and then
505 // convert to other formats on demand?
506
507 // When emitting a C initialization list, the WriteSPIRVBinary method
508 // will actually write its words to this vector via binaryTempOut.
509 SmallVector<char, 100> binaryTempUnderlyingVector;
510 raw_svector_ostream binaryTempOut;
511
512 // Binary output writes to this stream, which might be |out| or
513 // |binaryTempOut|. It's the latter when we really want to write a C
514 // initializer list.
Diego Novillo3cc8d7a2019-04-10 13:30:34 -0400515 raw_pwrite_stream *binaryOut;
alan-bakerf5e5f692018-11-27 08:33:24 -0500516 std::vector<version0::DescriptorMapEntry> *descriptorMapEntries;
David Neto0676e6f2017-07-11 18:47:44 -0400517 const bool outputCInitList; // If true, output look like {0x7023, ... , 5}
David Neto22f144c2017-06-12 14:26:21 -0400518 uint64_t patchBoundOffset;
519 uint32_t nextID;
520
alan-bakerf67468c2019-11-25 15:51:49 -0500521 // ID for OpTypeInt 32 1.
522 uint32_t int32ID = 0;
523 // ID for OpTypeVector %int 4.
524 uint32_t v4int32ID = 0;
525
David Neto19a1bad2017-08-25 15:01:41 -0400526 // Maps an LLVM Value pointer to the corresponding SPIR-V Id.
David Neto22f144c2017-06-12 14:26:21 -0400527 TypeMapType TypeMap;
David Neto19a1bad2017-08-25 15:01:41 -0400528 // Maps an LLVM image type to its SPIR-V ID.
David Neto22f144c2017-06-12 14:26:21 -0400529 TypeMapType ImageTypeMap;
alan-bakerabd82722019-12-03 17:14:51 -0500530 // A unique-vector of LLVM image types. This list is used to provide
531 // deterministic traversal of image types.
532 TypeList ImageTypeList;
David Neto19a1bad2017-08-25 15:01:41 -0400533 // A unique-vector of LLVM types that map to a SPIR-V type.
David Neto22f144c2017-06-12 14:26:21 -0400534 TypeList Types;
535 ValueList Constants;
David Neto19a1bad2017-08-25 15:01:41 -0400536 // Maps an LLVM Value pointer to the corresponding SPIR-V Id.
David Neto22f144c2017-06-12 14:26:21 -0400537 ValueMapType ValueMap;
538 ValueMapType AllocatedValueMap;
SJW69939d52020-04-16 07:29:07 -0500539 SPIRVInstructionList SPIRVSections[kSectionCount];
David Neto862b7d82018-06-14 18:48:37 -0400540
David Neto22f144c2017-06-12 14:26:21 -0400541 EntryPointVecType EntryPointVec;
542 DeferredInstVecType DeferredInstVec;
543 ValueList EntryPointInterfacesVec;
544 uint32_t OpExtInstImportID;
545 std::vector<uint32_t> BuiltinDimensionVec;
alan-baker5b86ed72019-02-15 08:26:50 -0500546 bool HasVariablePointersStorageBuffer;
David Neto22f144c2017-06-12 14:26:21 -0400547 bool HasVariablePointers;
548 Type *SamplerTy;
alan-baker09cb9802019-12-10 13:16:27 -0500549 DenseMap<unsigned, unsigned> SamplerLiteralToIDMap;
David Netoc77d9e22018-03-24 06:30:28 -0700550
551 // If a function F has a pointer-to-__constant parameter, then this variable
David Neto9ed8e2f2018-03-24 06:47:24 -0700552 // will map F's type to (G, index of the parameter), where in a first phase
553 // G is F's type. During FindTypePerFunc, G will be changed to F's type
554 // but replacing the pointer-to-constant parameter with
555 // pointer-to-ModuleScopePrivate.
David Netoc77d9e22018-03-24 06:30:28 -0700556 // TODO(dneto): This doesn't seem general enough? A function might have
557 // more than one such parameter.
David Neto22f144c2017-06-12 14:26:21 -0400558 GlobalConstFuncMapType GlobalConstFuncTypeMap;
559 SmallPtrSet<Value *, 16> GlobalConstArgumentSet;
David Neto1a1a0582017-07-07 12:01:44 -0400560 // An ordered set of pointer types of Base arguments to OpPtrAccessChain,
David Neto85082642018-03-24 06:55:20 -0700561 // or array types, and which point into transparent memory (StorageBuffer
562 // storage class). These will require an ArrayStride decoration.
David Neto1a1a0582017-07-07 12:01:44 -0400563 // See SPV_KHR_variable_pointers rev 13.
David Neto85082642018-03-24 06:55:20 -0700564 TypeList TypesNeedingArrayStride;
David Netoa60b00b2017-09-15 16:34:09 -0400565
566 // This is truly ugly, but works around what look like driver bugs.
567 // For get_local_size, an earlier part of the flow has created a module-scope
568 // variable in Private address space to hold the value for the workgroup
569 // size. Its intializer is a uint3 value marked as builtin WorkgroupSize.
570 // When this is present, save the IDs of the initializer value and variable
571 // in these two variables. We only ever do a vector load from it, and
572 // when we see one of those, substitute just the value of the intializer.
573 // This mimics what Glslang does, and that's what drivers are used to.
David Neto66cfe642018-03-24 06:13:56 -0700574 // TODO(dneto): Remove this once drivers are fixed.
David Netoa60b00b2017-09-15 16:34:09 -0400575 uint32_t WorkgroupSizeValueID;
576 uint32_t WorkgroupSizeVarID;
David Neto26aaf622017-10-23 18:11:53 -0400577
David Neto862b7d82018-06-14 18:48:37 -0400578 // Bookkeeping for mapping kernel arguments to resource variables.
579 struct ResourceVarInfo {
580 ResourceVarInfo(int index_arg, unsigned set_arg, unsigned binding_arg,
alan-bakere9308012019-03-15 10:25:13 -0400581 Function *fn, clspv::ArgKind arg_kind_arg, int coherent_arg)
David Neto862b7d82018-06-14 18:48:37 -0400582 : index(index_arg), descriptor_set(set_arg), binding(binding_arg),
alan-bakere9308012019-03-15 10:25:13 -0400583 var_fn(fn), arg_kind(arg_kind_arg), coherent(coherent_arg),
David Neto862b7d82018-06-14 18:48:37 -0400584 addr_space(fn->getReturnType()->getPointerAddressSpace()) {}
585 const int index; // Index into ResourceVarInfoList
586 const unsigned descriptor_set;
587 const unsigned binding;
588 Function *const var_fn; // The @clspv.resource.var.* function.
589 const clspv::ArgKind arg_kind;
alan-bakere9308012019-03-15 10:25:13 -0400590 const int coherent;
David Neto862b7d82018-06-14 18:48:37 -0400591 const unsigned addr_space; // The LLVM address space
592 // The SPIR-V ID of the OpVariable. Not populated at construction time.
593 uint32_t var_id = 0;
594 };
595 // A list of resource var info. Each one correponds to a module-scope
596 // resource variable we will have to create. Resource var indices are
597 // indices into this vector.
598 SmallVector<std::unique_ptr<ResourceVarInfo>, 8> ResourceVarInfoList;
599 // This is a vector of pointers of all the resource vars, but ordered by
600 // kernel function, and then by argument.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500601 UniqueVector<ResourceVarInfo *> ModuleOrderedResourceVars;
David Neto862b7d82018-06-14 18:48:37 -0400602 // Map a function to the ordered list of resource variables it uses, one for
603 // each argument. If an argument does not use a resource variable, it
604 // will have a null pointer entry.
605 using FunctionToResourceVarsMapType =
606 DenseMap<Function *, SmallVector<ResourceVarInfo *, 8>>;
607 FunctionToResourceVarsMapType FunctionToResourceVarsMap;
608
609 // What LLVM types map to SPIR-V types needing layout? These are the
610 // arrays and structures supporting storage buffers and uniform buffers.
611 TypeList TypesNeedingLayout;
612 // What LLVM struct types map to a SPIR-V struct type with Block decoration?
613 UniqueVector<StructType *> StructTypesNeedingBlock;
614 // For a call that represents a load from an opaque type (samplers, images),
615 // map it to the variable id it should load from.
616 DenseMap<CallInst *, uint32_t> ResourceVarDeferredLoadCalls;
David Neto85082642018-03-24 06:55:20 -0700617
Alan Baker202c8c72018-08-13 13:47:44 -0400618 // One larger than the maximum used SpecId for pointer-to-local arguments.
619 int max_local_spec_id_;
David Netoc6f3ab22018-04-06 18:02:31 -0400620 // An ordered list of the kernel arguments of type pointer-to-local.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500621 using LocalArgList = SmallVector<Argument *, 8>;
David Netoc6f3ab22018-04-06 18:02:31 -0400622 LocalArgList LocalArgs;
623 // Information about a pointer-to-local argument.
624 struct LocalArgInfo {
625 // The SPIR-V ID of the array variable.
626 uint32_t variable_id;
627 // The element type of the
alan-bakerb6b09dc2018-11-08 16:59:28 -0500628 Type *elem_type;
David Netoc6f3ab22018-04-06 18:02:31 -0400629 // The ID of the array type.
630 uint32_t array_size_id;
631 // The ID of the array type.
632 uint32_t array_type_id;
633 // The ID of the pointer to the array type.
634 uint32_t ptr_array_type_id;
David Netoc6f3ab22018-04-06 18:02:31 -0400635 // The specialization constant ID of the array size.
636 int spec_id;
637 };
Alan Baker202c8c72018-08-13 13:47:44 -0400638 // A mapping from Argument to its assigned SpecId.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500639 DenseMap<const Argument *, int> LocalArgSpecIds;
Alan Baker202c8c72018-08-13 13:47:44 -0400640 // A mapping from SpecId to its LocalArgInfo.
641 DenseMap<int, LocalArgInfo> LocalSpecIdInfoMap;
Alan Bakerfcda9482018-10-02 17:09:59 -0400642 // A mapping from a remapped type to its real offsets.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500643 DenseMap<Type *, std::vector<uint32_t>> RemappedUBOTypeOffsets;
Alan Bakerfcda9482018-10-02 17:09:59 -0400644 // A mapping from a remapped type to its real sizes.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500645 DenseMap<Type *, std::tuple<uint64_t, uint64_t, uint64_t>>
646 RemappedUBOTypeSizes;
alan-baker06cad652019-12-03 17:56:47 -0500647
648 // Maps basic block to its merge block.
649 DenseMap<BasicBlock *, BasicBlock *> MergeBlocks;
650 // Maps basic block to its continue block.
651 DenseMap<BasicBlock *, BasicBlock *> ContinueBlocks;
David Neto22f144c2017-06-12 14:26:21 -0400652};
653
654char SPIRVProducerPass::ID;
David Netoc6f3ab22018-04-06 18:02:31 -0400655
alan-bakerb6b09dc2018-11-08 16:59:28 -0500656} // namespace
David Neto22f144c2017-06-12 14:26:21 -0400657
658namespace clspv {
alan-bakerf5e5f692018-11-27 08:33:24 -0500659ModulePass *createSPIRVProducerPass(
660 raw_pwrite_stream &out,
661 std::vector<version0::DescriptorMapEntry> *descriptor_map_entries,
alan-baker00e7a582019-06-07 12:54:21 -0400662 ArrayRef<std::pair<unsigned, std::string>> samplerMap,
alan-bakerf5e5f692018-11-27 08:33:24 -0500663 bool outputCInitList) {
664 return new SPIRVProducerPass(out, descriptor_map_entries, samplerMap,
alan-baker00e7a582019-06-07 12:54:21 -0400665 outputCInitList);
David Neto22f144c2017-06-12 14:26:21 -0400666}
David Netoc2c368d2017-06-30 16:50:17 -0400667} // namespace clspv
David Neto22f144c2017-06-12 14:26:21 -0400668
669bool SPIRVProducerPass::runOnModule(Module &module) {
alan-baker5ed87542020-03-23 11:05:22 -0400670 if (ShowProducerIR) {
671 llvm::outs() << module << "\n";
672 }
David Neto0676e6f2017-07-11 18:47:44 -0400673 binaryOut = outputCInitList ? &binaryTempOut : &out;
674
Alan Bakerfcda9482018-10-02 17:09:59 -0400675 PopulateUBOTypeMaps(module);
alan-baker06cad652019-12-03 17:56:47 -0500676 PopulateStructuredCFGMaps(module);
Alan Bakerfcda9482018-10-02 17:09:59 -0400677
David Neto22f144c2017-06-12 14:26:21 -0400678 // SPIR-V always begins with its header information
679 outputHeader();
680
David Netoc6f3ab22018-04-06 18:02:31 -0400681 const DataLayout &DL = module.getDataLayout();
682
David Neto22f144c2017-06-12 14:26:21 -0400683 // Gather information from the LLVM IR that we require.
David Netoc6f3ab22018-04-06 18:02:31 -0400684 GenerateLLVMIRInfo(module, DL);
David Neto22f144c2017-06-12 14:26:21 -0400685
David Neto22f144c2017-06-12 14:26:21 -0400686 // Collect information on global variables too.
687 for (GlobalVariable &GV : module.globals()) {
688 // If the GV is one of our special __spirv_* variables, remove the
689 // initializer as it was only placed there to force LLVM to not throw the
690 // value away.
Kévin Petitbbbda972020-03-03 19:16:31 +0000691 if (GV.getName().startswith("__spirv_") ||
692 GV.getAddressSpace() == clspv::AddressSpace::PushConstant) {
David Neto22f144c2017-06-12 14:26:21 -0400693 GV.setInitializer(nullptr);
694 }
695
696 // Collect types' information from global variable.
697 FindTypePerGlobalVar(GV);
698
699 // Collect constant information from global variable.
700 FindConstantPerGlobalVar(GV);
701
702 // If the variable is an input, entry points need to know about it.
703 if (AddressSpace::Input == GV.getType()->getPointerAddressSpace()) {
David Netofb9a7972017-08-25 17:08:24 -0400704 getEntryPointInterfacesVec().insert(&GV);
David Neto22f144c2017-06-12 14:26:21 -0400705 }
706 }
707
708 // If there are extended instructions, generate OpExtInstImport.
709 if (FindExtInst(module)) {
710 GenerateExtInstImport();
711 }
712
713 // Generate SPIRV instructions for types.
Alan Bakerfcda9482018-10-02 17:09:59 -0400714 GenerateSPIRVTypes(module.getContext(), module);
David Neto22f144c2017-06-12 14:26:21 -0400715
716 // Generate SPIRV constants.
717 GenerateSPIRVConstants();
718
alan-baker09cb9802019-12-10 13:16:27 -0500719 // Generate literal samplers if necessary.
720 GenerateSamplers(module);
David Neto22f144c2017-06-12 14:26:21 -0400721
Kévin Petitbbbda972020-03-03 19:16:31 +0000722 // Generate descriptor map entries for all push constants
723 GeneratePushConstantDescriptormapEntries(module);
724
David Neto22f144c2017-06-12 14:26:21 -0400725 // Generate SPIRV variables.
726 for (GlobalVariable &GV : module.globals()) {
727 GenerateGlobalVar(GV);
728 }
David Neto862b7d82018-06-14 18:48:37 -0400729 GenerateResourceVars(module);
David Netoc6f3ab22018-04-06 18:02:31 -0400730 GenerateWorkgroupVars();
David Neto22f144c2017-06-12 14:26:21 -0400731
732 // Generate SPIRV instructions for each function.
733 for (Function &F : module) {
734 if (F.isDeclaration()) {
735 continue;
736 }
737
David Neto862b7d82018-06-14 18:48:37 -0400738 GenerateDescriptorMapInfo(DL, F);
739
David Neto22f144c2017-06-12 14:26:21 -0400740 // Generate Function Prologue.
741 GenerateFuncPrologue(F);
742
743 // Generate SPIRV instructions for function body.
744 GenerateFuncBody(F);
745
746 // Generate Function Epilogue.
747 GenerateFuncEpilogue();
748 }
749
750 HandleDeferredInstruction();
David Neto1a1a0582017-07-07 12:01:44 -0400751 HandleDeferredDecorations(DL);
David Neto22f144c2017-06-12 14:26:21 -0400752
753 // Generate SPIRV module information.
David Neto5c22a252018-03-15 16:07:41 -0400754 GenerateModuleInfo(module);
David Neto22f144c2017-06-12 14:26:21 -0400755
alan-baker00e7a582019-06-07 12:54:21 -0400756 WriteSPIRVBinary();
David Neto22f144c2017-06-12 14:26:21 -0400757
758 // We need to patch the SPIR-V header to set bound correctly.
759 patchHeader();
David Neto0676e6f2017-07-11 18:47:44 -0400760
761 if (outputCInitList) {
762 bool first = true;
David Neto0676e6f2017-07-11 18:47:44 -0400763 std::ostringstream os;
764
David Neto57fb0b92017-08-04 15:35:09 -0400765 auto emit_word = [&os, &first](uint32_t word) {
David Neto0676e6f2017-07-11 18:47:44 -0400766 if (!first)
David Neto57fb0b92017-08-04 15:35:09 -0400767 os << ",\n";
768 os << word;
David Neto0676e6f2017-07-11 18:47:44 -0400769 first = false;
770 };
771
772 os << "{";
David Neto57fb0b92017-08-04 15:35:09 -0400773 const std::string str(binaryTempOut.str());
774 for (unsigned i = 0; i < str.size(); i += 4) {
775 const uint32_t a = static_cast<unsigned char>(str[i]);
776 const uint32_t b = static_cast<unsigned char>(str[i + 1]);
777 const uint32_t c = static_cast<unsigned char>(str[i + 2]);
778 const uint32_t d = static_cast<unsigned char>(str[i + 3]);
779 emit_word(a | (b << 8) | (c << 16) | (d << 24));
David Neto0676e6f2017-07-11 18:47:44 -0400780 }
781 os << "}\n";
782 out << os.str();
783 }
784
David Neto22f144c2017-06-12 14:26:21 -0400785 return false;
786}
787
788void SPIRVProducerPass::outputHeader() {
alan-baker00e7a582019-06-07 12:54:21 -0400789 binaryOut->write(reinterpret_cast<const char *>(&spv::MagicNumber),
790 sizeof(spv::MagicNumber));
alan-bakere0902602020-03-23 08:43:40 -0400791 const uint32_t spv_version = 0x10000; // SPIR-V 1.0
792 binaryOut->write(reinterpret_cast<const char *>(&spv_version),
793 sizeof(spv_version));
David Neto22f144c2017-06-12 14:26:21 -0400794
alan-baker0c18ab02019-06-12 10:23:21 -0400795 // use Google's vendor ID
796 const uint32_t vendor = 21 << 16;
alan-baker00e7a582019-06-07 12:54:21 -0400797 binaryOut->write(reinterpret_cast<const char *>(&vendor), sizeof(vendor));
David Neto22f144c2017-06-12 14:26:21 -0400798
alan-baker00e7a582019-06-07 12:54:21 -0400799 // we record where we need to come back to and patch in the bound value
800 patchBoundOffset = binaryOut->tell();
David Neto22f144c2017-06-12 14:26:21 -0400801
alan-baker00e7a582019-06-07 12:54:21 -0400802 // output a bad bound for now
803 binaryOut->write(reinterpret_cast<const char *>(&nextID), sizeof(nextID));
David Neto22f144c2017-06-12 14:26:21 -0400804
alan-baker00e7a582019-06-07 12:54:21 -0400805 // output the schema (reserved for use and must be 0)
806 const uint32_t schema = 0;
807 binaryOut->write(reinterpret_cast<const char *>(&schema), sizeof(schema));
David Neto22f144c2017-06-12 14:26:21 -0400808}
809
810void SPIRVProducerPass::patchHeader() {
alan-baker00e7a582019-06-07 12:54:21 -0400811 // for a binary we just write the value of nextID over bound
812 binaryOut->pwrite(reinterpret_cast<char *>(&nextID), sizeof(nextID),
813 patchBoundOffset);
David Neto22f144c2017-06-12 14:26:21 -0400814}
815
David Netoc6f3ab22018-04-06 18:02:31 -0400816void SPIRVProducerPass::GenerateLLVMIRInfo(Module &M, const DataLayout &DL) {
David Neto22f144c2017-06-12 14:26:21 -0400817 // This function generates LLVM IR for function such as global variable for
818 // argument, constant and pointer type for argument access. These information
819 // is artificial one because we need Vulkan SPIR-V output. This function is
820 // executed ahead of FindType and FindConstant.
David Neto22f144c2017-06-12 14:26:21 -0400821 LLVMContext &Context = M.getContext();
822
David Neto862b7d82018-06-14 18:48:37 -0400823 FindGlobalConstVars(M, DL);
David Neto5c22a252018-03-15 16:07:41 -0400824
David Neto862b7d82018-06-14 18:48:37 -0400825 FindResourceVars(M, DL);
David Neto22f144c2017-06-12 14:26:21 -0400826
827 bool HasWorkGroupBuiltin = false;
828 for (GlobalVariable &GV : M.globals()) {
829 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
830 if (spv::BuiltInWorkgroupSize == BuiltinType) {
831 HasWorkGroupBuiltin = true;
832 }
833 }
834
David Neto862b7d82018-06-14 18:48:37 -0400835 FindTypesForSamplerMap(M);
836 FindTypesForResourceVars(M);
Alan Baker202c8c72018-08-13 13:47:44 -0400837 FindWorkgroupVars(M);
David Neto22f144c2017-06-12 14:26:21 -0400838
839 for (Function &F : M) {
Kévin Petitabef4522019-03-27 13:08:01 +0000840 if (F.isDeclaration()) {
David Neto22f144c2017-06-12 14:26:21 -0400841 continue;
842 }
843
844 for (BasicBlock &BB : F) {
845 for (Instruction &I : BB) {
846 if (I.getOpcode() == Instruction::ZExt ||
847 I.getOpcode() == Instruction::SExt ||
848 I.getOpcode() == Instruction::UIToFP) {
849 // If there is zext with i1 type, it will be changed to OpSelect. The
850 // OpSelect needs constant 0 and 1 so the constants are added here.
851
852 auto OpTy = I.getOperand(0)->getType();
853
Kévin Petit24272b62018-10-18 19:16:12 +0000854 if (OpTy->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -0400855 if (I.getOpcode() == Instruction::ZExt) {
David Neto22f144c2017-06-12 14:26:21 -0400856 FindConstant(Constant::getNullValue(I.getType()));
Kévin Petit7bfb8992019-02-26 13:45:08 +0000857 FindConstant(ConstantInt::get(I.getType(), 1));
David Neto22f144c2017-06-12 14:26:21 -0400858 } else if (I.getOpcode() == Instruction::SExt) {
David Neto22f144c2017-06-12 14:26:21 -0400859 FindConstant(Constant::getNullValue(I.getType()));
Kévin Petit7bfb8992019-02-26 13:45:08 +0000860 FindConstant(ConstantInt::getSigned(I.getType(), -1));
David Neto22f144c2017-06-12 14:26:21 -0400861 } else {
862 FindConstant(ConstantFP::get(Context, APFloat(0.0f)));
863 FindConstant(ConstantFP::get(Context, APFloat(1.0f)));
864 }
865 }
866 } else if (CallInst *Call = dyn_cast<CallInst>(&I)) {
David Neto862b7d82018-06-14 18:48:37 -0400867 StringRef callee_name = Call->getCalledFunction()->getName();
David Neto22f144c2017-06-12 14:26:21 -0400868
869 // Handle image type specially.
SJW173c7e92020-03-16 08:44:47 -0500870 if (IsImageBuiltin(callee_name)) {
David Neto22f144c2017-06-12 14:26:21 -0400871 TypeMapType &OpImageTypeMap = getImageTypeMap();
872 Type *ImageTy =
873 Call->getArgOperand(0)->getType()->getPointerElementType();
874 OpImageTypeMap[ImageTy] = 0;
alan-bakerabd82722019-12-03 17:14:51 -0500875 getImageTypeList().insert(ImageTy);
alan-baker75090e42020-02-20 11:21:04 -0500876 }
David Neto22f144c2017-06-12 14:26:21 -0400877
SJW173c7e92020-03-16 08:44:47 -0500878 if (IsSampledImageRead(callee_name)) {
alan-bakerf67468c2019-11-25 15:51:49 -0500879 // All sampled reads need a floating point 0 for the Lod operand.
David Neto22f144c2017-06-12 14:26:21 -0400880 FindConstant(ConstantFP::get(Context, APFloat(0.0f)));
SJW2c317da2020-03-23 07:39:13 -0500881 } else if (IsUnsampledImageRead(callee_name)) {
alan-baker75090e42020-02-20 11:21:04 -0500882 // All unsampled reads need an integer 0 for the Lod operand.
883 FindConstant(ConstantInt::get(Context, APInt(32, 0)));
SJW2c317da2020-03-23 07:39:13 -0500884 } else if (IsImageQuery(callee_name)) {
alan-bakerce179f12019-12-06 19:02:22 -0500885 Type *ImageTy = Call->getOperand(0)->getType();
886 const uint32_t dim = ImageDimensionality(ImageTy);
alan-baker7150a1d2020-02-25 08:31:06 -0500887 uint32_t components =
888 dim + (clspv::IsArrayImageType(ImageTy) ? 1 : 0);
alan-bakerce179f12019-12-06 19:02:22 -0500889 if (components > 1) {
890 // OpImageQuerySize* return |components| components.
891 FindType(VectorType::get(Type::getInt32Ty(Context), components));
892 if (dim == 3 && IsGetImageDim(callee_name)) {
893 // get_image_dim for 3D images returns an int4.
894 FindType(
895 VectorType::get(Type::getInt32Ty(Context), components + 1));
896 }
897 }
898
SJW173c7e92020-03-16 08:44:47 -0500899 if (IsSampledImageType(ImageTy)) {
alan-bakerce179f12019-12-06 19:02:22 -0500900 // All sampled image queries need a integer 0 for the Lod
901 // operand.
902 FindConstant(ConstantInt::get(Context, APInt(32, 0)));
903 }
David Neto5c22a252018-03-15 16:07:41 -0400904 }
David Neto22f144c2017-06-12 14:26:21 -0400905 }
906 }
907 }
908
Kévin Petitabef4522019-03-27 13:08:01 +0000909 // More things to do on kernel functions
910 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
911 if (const MDNode *MD =
912 dyn_cast<Function>(&F)->getMetadata("reqd_work_group_size")) {
913 // We generate constants if the WorkgroupSize builtin is being used.
914 if (HasWorkGroupBuiltin) {
915 // Collect constant information for work group size.
916 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(0)));
917 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(1)));
918 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(2)));
David Neto22f144c2017-06-12 14:26:21 -0400919 }
920 }
921 }
922
alan-bakerf67468c2019-11-25 15:51:49 -0500923 // TODO(alan-baker): make this better.
alan-bakerf906d2b2019-12-10 11:26:23 -0500924 if (M.getTypeByName("opencl.image1d_ro_t.float") ||
925 M.getTypeByName("opencl.image1d_ro_t.float.sampled") ||
926 M.getTypeByName("opencl.image1d_wo_t.float") ||
927 M.getTypeByName("opencl.image2d_ro_t.float") ||
alan-bakerf67468c2019-11-25 15:51:49 -0500928 M.getTypeByName("opencl.image2d_ro_t.float.sampled") ||
929 M.getTypeByName("opencl.image2d_wo_t.float") ||
930 M.getTypeByName("opencl.image3d_ro_t.float") ||
931 M.getTypeByName("opencl.image3d_ro_t.float.sampled") ||
alan-baker7150a1d2020-02-25 08:31:06 -0500932 M.getTypeByName("opencl.image3d_wo_t.float") ||
933 M.getTypeByName("opencl.image1d_array_ro_t.float") ||
934 M.getTypeByName("opencl.image1d_array_ro_t.float.sampled") ||
935 M.getTypeByName("opencl.image1d_array_wo_t.float") ||
936 M.getTypeByName("opencl.image2d_array_ro_t.float") ||
937 M.getTypeByName("opencl.image2d_array_ro_t.float.sampled") ||
938 M.getTypeByName("opencl.image2d_array_wo_t.float")) {
alan-bakerf67468c2019-11-25 15:51:49 -0500939 FindType(Type::getFloatTy(Context));
alan-bakerf906d2b2019-12-10 11:26:23 -0500940 } else if (M.getTypeByName("opencl.image1d_ro_t.uint") ||
941 M.getTypeByName("opencl.image1d_ro_t.uint.sampled") ||
942 M.getTypeByName("opencl.image1d_wo_t.uint") ||
943 M.getTypeByName("opencl.image2d_ro_t.uint") ||
alan-bakerf67468c2019-11-25 15:51:49 -0500944 M.getTypeByName("opencl.image2d_ro_t.uint.sampled") ||
945 M.getTypeByName("opencl.image2d_wo_t.uint") ||
946 M.getTypeByName("opencl.image3d_ro_t.uint") ||
947 M.getTypeByName("opencl.image3d_ro_t.uint.sampled") ||
alan-baker7150a1d2020-02-25 08:31:06 -0500948 M.getTypeByName("opencl.image3d_wo_t.uint") ||
949 M.getTypeByName("opencl.image1d_array_ro_t.uint") ||
950 M.getTypeByName("opencl.image1d_array_ro_t.uint.sampled") ||
951 M.getTypeByName("opencl.image1d_array_wo_t.uint") ||
952 M.getTypeByName("opencl.image2d_array_ro_t.uint") ||
953 M.getTypeByName("opencl.image2d_array_ro_t.uint.sampled") ||
954 M.getTypeByName("opencl.image2d_array_wo_t.uint")) {
alan-bakerf67468c2019-11-25 15:51:49 -0500955 FindType(Type::getInt32Ty(Context));
alan-bakerf906d2b2019-12-10 11:26:23 -0500956 } else if (M.getTypeByName("opencl.image1d_ro_t.int") ||
957 M.getTypeByName("opencl.image1d_ro_t.int.sampled") ||
958 M.getTypeByName("opencl.image1d_wo_t.int") ||
959 M.getTypeByName("opencl.image2d_ro_t.int") ||
alan-bakerf67468c2019-11-25 15:51:49 -0500960 M.getTypeByName("opencl.image2d_ro_t.int.sampled") ||
961 M.getTypeByName("opencl.image2d_wo_t.int") ||
962 M.getTypeByName("opencl.image3d_ro_t.int") ||
963 M.getTypeByName("opencl.image3d_ro_t.int.sampled") ||
alan-baker7150a1d2020-02-25 08:31:06 -0500964 M.getTypeByName("opencl.image3d_wo_t.int") ||
965 M.getTypeByName("opencl.image1d_array_ro_t.int") ||
966 M.getTypeByName("opencl.image1d_array_ro_t.int.sampled") ||
967 M.getTypeByName("opencl.image1d_array_wo_t.int") ||
968 M.getTypeByName("opencl.image2d_array_ro_t.int") ||
969 M.getTypeByName("opencl.image2d_array_ro_t.int.sampled") ||
970 M.getTypeByName("opencl.image2d_array_wo_t.int")) {
alan-bakerf67468c2019-11-25 15:51:49 -0500971 // Nothing for now...
972 } else {
973 // This was likely an UndefValue.
David Neto22f144c2017-06-12 14:26:21 -0400974 FindType(Type::getFloatTy(Context));
975 }
976
977 // Collect types' information from function.
978 FindTypePerFunc(F);
979
980 // Collect constant information from function.
981 FindConstantPerFunc(F);
982 }
983}
984
David Neto862b7d82018-06-14 18:48:37 -0400985void SPIRVProducerPass::FindGlobalConstVars(Module &M, const DataLayout &DL) {
alan-baker56f7aff2019-05-22 08:06:42 -0400986 clspv::NormalizeGlobalVariables(M);
987
David Neto862b7d82018-06-14 18:48:37 -0400988 SmallVector<GlobalVariable *, 8> GVList;
989 SmallVector<GlobalVariable *, 8> DeadGVList;
990 for (GlobalVariable &GV : M.globals()) {
991 if (GV.getType()->getAddressSpace() == AddressSpace::Constant) {
992 if (GV.use_empty()) {
993 DeadGVList.push_back(&GV);
994 } else {
995 GVList.push_back(&GV);
996 }
997 }
998 }
999
1000 // Remove dead global __constant variables.
1001 for (auto GV : DeadGVList) {
1002 GV->eraseFromParent();
1003 }
1004 DeadGVList.clear();
1005
1006 if (clspv::Option::ModuleConstantsInStorageBuffer()) {
1007 // For now, we only support a single storage buffer.
1008 if (GVList.size() > 0) {
1009 assert(GVList.size() == 1);
1010 const auto *GV = GVList[0];
1011 const auto constants_byte_size =
Alan Bakerfcda9482018-10-02 17:09:59 -04001012 (GetTypeSizeInBits(GV->getInitializer()->getType(), DL)) / 8;
David Neto862b7d82018-06-14 18:48:37 -04001013 const size_t kConstantMaxSize = 65536;
1014 if (constants_byte_size > kConstantMaxSize) {
1015 outs() << "Max __constant capacity of " << kConstantMaxSize
1016 << " bytes exceeded: " << constants_byte_size << " bytes used\n";
1017 llvm_unreachable("Max __constant capacity exceeded");
1018 }
1019 }
1020 } else {
1021 // Change global constant variable's address space to ModuleScopePrivate.
1022 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
1023 for (auto GV : GVList) {
1024 // Create new gv with ModuleScopePrivate address space.
1025 Type *NewGVTy = GV->getType()->getPointerElementType();
1026 GlobalVariable *NewGV = new GlobalVariable(
1027 M, NewGVTy, false, GV->getLinkage(), GV->getInitializer(), "",
1028 nullptr, GV->getThreadLocalMode(), AddressSpace::ModuleScopePrivate);
1029 NewGV->takeName(GV);
1030
1031 const SmallVector<User *, 8> GVUsers(GV->user_begin(), GV->user_end());
1032 SmallVector<User *, 8> CandidateUsers;
1033
1034 auto record_called_function_type_as_user =
1035 [&GlobalConstFuncTyMap](Value *gv, CallInst *call) {
1036 // Find argument index.
1037 unsigned index = 0;
1038 for (unsigned i = 0; i < call->getNumArgOperands(); i++) {
1039 if (gv == call->getOperand(i)) {
1040 // TODO(dneto): Should we break here?
1041 index = i;
1042 }
1043 }
1044
1045 // Record function type with global constant.
1046 GlobalConstFuncTyMap[call->getFunctionType()] =
1047 std::make_pair(call->getFunctionType(), index);
1048 };
1049
1050 for (User *GVU : GVUsers) {
1051 if (CallInst *Call = dyn_cast<CallInst>(GVU)) {
1052 record_called_function_type_as_user(GV, Call);
1053 } else if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(GVU)) {
1054 // Check GEP users.
1055 for (User *GEPU : GEP->users()) {
1056 if (CallInst *GEPCall = dyn_cast<CallInst>(GEPU)) {
1057 record_called_function_type_as_user(GEP, GEPCall);
1058 }
1059 }
1060 }
1061
1062 CandidateUsers.push_back(GVU);
1063 }
1064
1065 for (User *U : CandidateUsers) {
1066 // Update users of gv with new gv.
alan-bakered80f572019-02-11 17:28:26 -05001067 if (!isa<Constant>(U)) {
1068 // #254: Can't change operands of a constant, but this shouldn't be
1069 // something that sticks around in the module.
1070 U->replaceUsesOfWith(GV, NewGV);
1071 }
David Neto862b7d82018-06-14 18:48:37 -04001072 }
1073
1074 // Delete original gv.
1075 GV->eraseFromParent();
1076 }
1077 }
1078}
1079
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001080void SPIRVProducerPass::FindResourceVars(Module &M, const DataLayout &) {
David Neto862b7d82018-06-14 18:48:37 -04001081 ResourceVarInfoList.clear();
1082 FunctionToResourceVarsMap.clear();
1083 ModuleOrderedResourceVars.reset();
1084 // Normally, there is one resource variable per clspv.resource.var.*
1085 // function, since that is unique'd by arg type and index. By design,
1086 // we can share these resource variables across kernels because all
1087 // kernels use the same descriptor set.
1088 //
1089 // But if the user requested distinct descriptor sets per kernel, then
1090 // the descriptor allocator has made different (set,binding) pairs for
1091 // the same (type,arg_index) pair. Since we can decorate a resource
1092 // variable with only exactly one DescriptorSet and Binding, we are
1093 // forced in this case to make distinct resource variables whenever
Kévin Petitbbbda972020-03-03 19:16:31 +00001094 // the same clspv.resource.var.X function is seen with disintct
David Neto862b7d82018-06-14 18:48:37 -04001095 // (set,binding) values.
1096 const bool always_distinct_sets =
1097 clspv::Option::DistinctKernelDescriptorSets();
1098 for (Function &F : M) {
1099 // Rely on the fact the resource var functions have a stable ordering
1100 // in the module.
Alan Baker202c8c72018-08-13 13:47:44 -04001101 if (F.getName().startswith(clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04001102 // Find all calls to this function with distinct set and binding pairs.
1103 // Save them in ResourceVarInfoList.
1104
1105 // Determine uniqueness of the (set,binding) pairs only withing this
1106 // one resource-var builtin function.
1107 using SetAndBinding = std::pair<unsigned, unsigned>;
1108 // Maps set and binding to the resource var info.
1109 DenseMap<SetAndBinding, ResourceVarInfo *> set_and_binding_map;
1110 bool first_use = true;
1111 for (auto &U : F.uses()) {
1112 if (auto *call = dyn_cast<CallInst>(U.getUser())) {
1113 const auto set = unsigned(
1114 dyn_cast<ConstantInt>(call->getArgOperand(0))->getZExtValue());
1115 const auto binding = unsigned(
1116 dyn_cast<ConstantInt>(call->getArgOperand(1))->getZExtValue());
1117 const auto arg_kind = clspv::ArgKind(
1118 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
1119 const auto arg_index = unsigned(
1120 dyn_cast<ConstantInt>(call->getArgOperand(3))->getZExtValue());
alan-bakere9308012019-03-15 10:25:13 -04001121 const auto coherent = unsigned(
1122 dyn_cast<ConstantInt>(call->getArgOperand(5))->getZExtValue());
David Neto862b7d82018-06-14 18:48:37 -04001123
1124 // Find or make the resource var info for this combination.
1125 ResourceVarInfo *rv = nullptr;
1126 if (always_distinct_sets) {
1127 // Make a new resource var any time we see a different
1128 // (set,binding) pair.
1129 SetAndBinding key{set, binding};
1130 auto where = set_and_binding_map.find(key);
1131 if (where == set_and_binding_map.end()) {
1132 rv = new ResourceVarInfo(int(ResourceVarInfoList.size()), set,
alan-bakere9308012019-03-15 10:25:13 -04001133 binding, &F, arg_kind, coherent);
David Neto862b7d82018-06-14 18:48:37 -04001134 ResourceVarInfoList.emplace_back(rv);
1135 set_and_binding_map[key] = rv;
1136 } else {
1137 rv = where->second;
1138 }
1139 } else {
1140 // The default is to make exactly one resource for each
1141 // clspv.resource.var.* function.
1142 if (first_use) {
1143 first_use = false;
1144 rv = new ResourceVarInfo(int(ResourceVarInfoList.size()), set,
alan-bakere9308012019-03-15 10:25:13 -04001145 binding, &F, arg_kind, coherent);
David Neto862b7d82018-06-14 18:48:37 -04001146 ResourceVarInfoList.emplace_back(rv);
1147 } else {
1148 rv = ResourceVarInfoList.back().get();
1149 }
1150 }
1151
1152 // Now populate FunctionToResourceVarsMap.
1153 auto &mapping =
1154 FunctionToResourceVarsMap[call->getParent()->getParent()];
1155 while (mapping.size() <= arg_index) {
1156 mapping.push_back(nullptr);
1157 }
1158 mapping[arg_index] = rv;
1159 }
1160 }
1161 }
1162 }
1163
1164 // Populate ModuleOrderedResourceVars.
1165 for (Function &F : M) {
1166 auto where = FunctionToResourceVarsMap.find(&F);
1167 if (where != FunctionToResourceVarsMap.end()) {
1168 for (auto &rv : where->second) {
1169 if (rv != nullptr) {
1170 ModuleOrderedResourceVars.insert(rv);
1171 }
1172 }
1173 }
1174 }
1175 if (ShowResourceVars) {
1176 for (auto *info : ModuleOrderedResourceVars) {
1177 outs() << "MORV index " << info->index << " (" << info->descriptor_set
1178 << "," << info->binding << ") " << *(info->var_fn->getReturnType())
1179 << "\n";
1180 }
1181 }
1182}
1183
David Neto22f144c2017-06-12 14:26:21 -04001184bool SPIRVProducerPass::FindExtInst(Module &M) {
1185 LLVMContext &Context = M.getContext();
1186 bool HasExtInst = false;
1187
1188 for (Function &F : M) {
1189 for (BasicBlock &BB : F) {
1190 for (Instruction &I : BB) {
1191 if (CallInst *Call = dyn_cast<CallInst>(&I)) {
1192 Function *Callee = Call->getCalledFunction();
1193 // Check whether this call is for extend instructions.
David Neto3fbb4072017-10-16 11:28:14 -04001194 auto callee_name = Callee->getName();
1195 const glsl::ExtInst EInst = getExtInstEnum(callee_name);
1196 const glsl::ExtInst IndirectEInst =
1197 getIndirectExtInstEnum(callee_name);
David Neto22f144c2017-06-12 14:26:21 -04001198
David Neto3fbb4072017-10-16 11:28:14 -04001199 HasExtInst |=
1200 (EInst != kGlslExtInstBad) || (IndirectEInst != kGlslExtInstBad);
1201
1202 if (IndirectEInst) {
1203 // Register extra constants if needed.
1204
1205 // Registers a type and constant for computing the result of the
1206 // given instruction. If the result of the instruction is a vector,
1207 // then make a splat vector constant with the same number of
1208 // elements.
1209 auto register_constant = [this, &I](Constant *constant) {
1210 FindType(constant->getType());
1211 FindConstant(constant);
1212 if (auto *vectorTy = dyn_cast<VectorType>(I.getType())) {
1213 // Register the splat vector of the value with the same
1214 // width as the result of the instruction.
1215 auto *vec_constant = ConstantVector::getSplat(
alan-baker7261e062020-03-15 14:35:48 -04001216 {static_cast<unsigned>(vectorTy->getNumElements()), false},
David Neto3fbb4072017-10-16 11:28:14 -04001217 constant);
1218 FindConstant(vec_constant);
1219 FindType(vec_constant->getType());
1220 }
1221 };
1222 switch (IndirectEInst) {
1223 case glsl::ExtInstFindUMsb:
1224 // clz needs OpExtInst and OpISub with constant 31, or splat
1225 // vector of 31. Add it to the constant list here.
1226 register_constant(
1227 ConstantInt::get(Type::getInt32Ty(Context), 31));
1228 break;
1229 case glsl::ExtInstAcos:
1230 case glsl::ExtInstAsin:
Kévin Petiteb9f90a2018-09-29 12:29:34 +01001231 case glsl::ExtInstAtan:
David Neto3fbb4072017-10-16 11:28:14 -04001232 case glsl::ExtInstAtan2:
1233 // We need 1/pi for acospi, asinpi, atan2pi.
1234 register_constant(
1235 ConstantFP::get(Type::getFloatTy(Context), kOneOverPi));
1236 break;
1237 default:
1238 assert(false && "internally inconsistent");
1239 }
David Neto22f144c2017-06-12 14:26:21 -04001240 }
1241 }
1242 }
1243 }
1244 }
1245
1246 return HasExtInst;
1247}
1248
1249void SPIRVProducerPass::FindTypePerGlobalVar(GlobalVariable &GV) {
1250 // Investigate global variable's type.
1251 FindType(GV.getType());
1252}
1253
1254void SPIRVProducerPass::FindTypePerFunc(Function &F) {
1255 // Investigate function's type.
1256 FunctionType *FTy = F.getFunctionType();
1257
1258 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
1259 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
David Neto9ed8e2f2018-03-24 06:47:24 -07001260 // Handle a regular function with global constant parameters.
David Neto22f144c2017-06-12 14:26:21 -04001261 if (GlobalConstFuncTyMap.count(FTy)) {
1262 uint32_t GVCstArgIdx = GlobalConstFuncTypeMap[FTy].second;
1263 SmallVector<Type *, 4> NewFuncParamTys;
1264 for (unsigned i = 0; i < FTy->getNumParams(); i++) {
1265 Type *ParamTy = FTy->getParamType(i);
1266 if (i == GVCstArgIdx) {
1267 Type *EleTy = ParamTy->getPointerElementType();
1268 ParamTy = PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
1269 }
1270
1271 NewFuncParamTys.push_back(ParamTy);
1272 }
1273
1274 FunctionType *NewFTy =
1275 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
1276 GlobalConstFuncTyMap[FTy] = std::make_pair(NewFTy, GVCstArgIdx);
1277 FTy = NewFTy;
1278 }
1279
1280 FindType(FTy);
1281 } else {
1282 // As kernel functions do not have parameters, create new function type and
1283 // add it to type map.
1284 SmallVector<Type *, 4> NewFuncParamTys;
1285 FunctionType *NewFTy =
1286 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
1287 FindType(NewFTy);
1288 }
1289
1290 // Investigate instructions' type in function body.
1291 for (BasicBlock &BB : F) {
1292 for (Instruction &I : BB) {
1293 if (isa<ShuffleVectorInst>(I)) {
1294 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1295 // Ignore type for mask of shuffle vector instruction.
1296 if (i == 2) {
1297 continue;
1298 }
1299
1300 Value *Op = I.getOperand(i);
1301 if (!isa<MetadataAsValue>(Op)) {
1302 FindType(Op->getType());
1303 }
1304 }
1305
1306 FindType(I.getType());
1307 continue;
1308 }
1309
David Neto862b7d82018-06-14 18:48:37 -04001310 CallInst *Call = dyn_cast<CallInst>(&I);
1311
1312 if (Call && Call->getCalledFunction()->getName().startswith(
Alan Baker202c8c72018-08-13 13:47:44 -04001313 clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04001314 // This is a fake call representing access to a resource variable.
1315 // We handle that elsewhere.
1316 continue;
1317 }
1318
Alan Baker202c8c72018-08-13 13:47:44 -04001319 if (Call && Call->getCalledFunction()->getName().startswith(
1320 clspv::WorkgroupAccessorFunction())) {
1321 // This is a fake call representing access to a workgroup variable.
1322 // We handle that elsewhere.
1323 continue;
1324 }
1325
alan-bakerf083bed2020-01-29 08:15:42 -05001326 // #497: InsertValue and ExtractValue map to OpCompositeInsert and
1327 // OpCompositeExtract which takes literal values for indices. As a result
1328 // don't map the type of indices.
1329 if (I.getOpcode() == Instruction::ExtractValue) {
1330 FindType(I.getOperand(0)->getType());
1331 continue;
1332 }
1333 if (I.getOpcode() == Instruction::InsertValue) {
1334 FindType(I.getOperand(0)->getType());
1335 FindType(I.getOperand(1)->getType());
1336 continue;
1337 }
1338
1339 // #497: InsertElement and ExtractElement map to OpCompositeExtract if
1340 // the index is a constant. In such a case don't map the index type.
1341 if (I.getOpcode() == Instruction::ExtractElement) {
1342 FindType(I.getOperand(0)->getType());
1343 Value *op1 = I.getOperand(1);
1344 if (!isa<Constant>(op1) || isa<GlobalValue>(op1)) {
1345 FindType(op1->getType());
1346 }
1347 continue;
1348 }
1349 if (I.getOpcode() == Instruction::InsertElement) {
1350 FindType(I.getOperand(0)->getType());
1351 FindType(I.getOperand(1)->getType());
1352 Value *op2 = I.getOperand(2);
1353 if (!isa<Constant>(op2) || isa<GlobalValue>(op2)) {
1354 FindType(op2->getType());
1355 }
1356 continue;
1357 }
1358
David Neto22f144c2017-06-12 14:26:21 -04001359 // Work through the operands of the instruction.
1360 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1361 Value *const Op = I.getOperand(i);
1362 // If any of the operands is a constant, find the type!
1363 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1364 FindType(Op->getType());
1365 }
1366 }
1367
1368 for (Use &Op : I.operands()) {
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001369 if (isa<CallInst>(&I)) {
David Neto22f144c2017-06-12 14:26:21 -04001370 // Avoid to check call instruction's type.
1371 break;
1372 }
Alan Baker202c8c72018-08-13 13:47:44 -04001373 if (CallInst *OpCall = dyn_cast<CallInst>(Op)) {
1374 if (OpCall && OpCall->getCalledFunction()->getName().startswith(
1375 clspv::WorkgroupAccessorFunction())) {
1376 // This is a fake call representing access to a workgroup variable.
1377 // We handle that elsewhere.
1378 continue;
1379 }
1380 }
David Neto22f144c2017-06-12 14:26:21 -04001381 if (!isa<MetadataAsValue>(&Op)) {
1382 FindType(Op->getType());
1383 continue;
1384 }
1385 }
1386
David Neto22f144c2017-06-12 14:26:21 -04001387 // We don't want to track the type of this call as we are going to replace
1388 // it.
Kévin Petitdf71de32019-04-09 14:09:50 +01001389 if (Call && (clspv::LiteralSamplerFunction() ==
David Neto22f144c2017-06-12 14:26:21 -04001390 Call->getCalledFunction()->getName())) {
1391 continue;
1392 }
1393
1394 if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(&I)) {
1395 // If gep's base operand has ModuleScopePrivate address space, make gep
1396 // return ModuleScopePrivate address space.
1397 if (GEP->getPointerAddressSpace() == AddressSpace::ModuleScopePrivate) {
1398 // Add pointer type with private address space for global constant to
1399 // type list.
1400 Type *EleTy = I.getType()->getPointerElementType();
1401 Type *NewPTy =
1402 PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
1403
1404 FindType(NewPTy);
1405 continue;
1406 }
1407 }
1408
1409 FindType(I.getType());
1410 }
1411 }
1412}
1413
David Neto862b7d82018-06-14 18:48:37 -04001414void SPIRVProducerPass::FindTypesForSamplerMap(Module &M) {
1415 // If we are using a sampler map, find the type of the sampler.
Kévin Petitdf71de32019-04-09 14:09:50 +01001416 if (M.getFunction(clspv::LiteralSamplerFunction()) ||
David Neto862b7d82018-06-14 18:48:37 -04001417 0 < getSamplerMap().size()) {
1418 auto SamplerStructTy = M.getTypeByName("opencl.sampler_t");
1419 if (!SamplerStructTy) {
1420 SamplerStructTy = StructType::create(M.getContext(), "opencl.sampler_t");
1421 }
1422
1423 SamplerTy = SamplerStructTy->getPointerTo(AddressSpace::UniformConstant);
1424
1425 FindType(SamplerTy);
1426 }
1427}
1428
1429void SPIRVProducerPass::FindTypesForResourceVars(Module &M) {
1430 // Record types so they are generated.
1431 TypesNeedingLayout.reset();
1432 StructTypesNeedingBlock.reset();
1433
1434 // To match older clspv codegen, generate the float type first if required
1435 // for images.
1436 for (const auto *info : ModuleOrderedResourceVars) {
1437 if (info->arg_kind == clspv::ArgKind::ReadOnlyImage ||
1438 info->arg_kind == clspv::ArgKind::WriteOnlyImage) {
alan-bakerf67468c2019-11-25 15:51:49 -05001439 if (IsIntImageType(info->var_fn->getReturnType())) {
1440 // Nothing for now...
1441 } else if (IsUintImageType(info->var_fn->getReturnType())) {
1442 FindType(Type::getInt32Ty(M.getContext()));
1443 }
1444
1445 // We need "float" either for the sampled type or for the Lod operand.
David Neto862b7d82018-06-14 18:48:37 -04001446 FindType(Type::getFloatTy(M.getContext()));
David Neto862b7d82018-06-14 18:48:37 -04001447 }
1448 }
1449
1450 for (const auto *info : ModuleOrderedResourceVars) {
1451 Type *type = info->var_fn->getReturnType();
1452
1453 switch (info->arg_kind) {
1454 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04001455 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04001456 if (auto *sty = dyn_cast<StructType>(type->getPointerElementType())) {
1457 StructTypesNeedingBlock.insert(sty);
1458 } else {
1459 errs() << *type << "\n";
1460 llvm_unreachable("Buffer arguments must map to structures!");
1461 }
1462 break;
1463 case clspv::ArgKind::Pod:
alan-baker9b0ec3c2020-04-06 14:45:34 -04001464 case clspv::ArgKind::PodUBO:
1465 case clspv::ArgKind::PodPushConstant:
David Neto862b7d82018-06-14 18:48:37 -04001466 if (auto *sty = dyn_cast<StructType>(type->getPointerElementType())) {
1467 StructTypesNeedingBlock.insert(sty);
1468 } else {
1469 errs() << *type << "\n";
1470 llvm_unreachable("POD arguments must map to structures!");
1471 }
1472 break;
1473 case clspv::ArgKind::ReadOnlyImage:
1474 case clspv::ArgKind::WriteOnlyImage:
1475 case clspv::ArgKind::Sampler:
1476 // Sampler and image types map to the pointee type but
1477 // in the uniform constant address space.
1478 type = PointerType::get(type->getPointerElementType(),
1479 clspv::AddressSpace::UniformConstant);
1480 break;
1481 default:
1482 break;
1483 }
1484
1485 // The converted type is the type of the OpVariable we will generate.
1486 // If the pointee type is an array of size zero, FindType will convert it
1487 // to a runtime array.
1488 FindType(type);
1489 }
1490
alan-bakerdcd97412019-09-16 15:32:30 -04001491 // If module constants are clustered in a storage buffer then that struct
1492 // needs layout decorations.
1493 if (clspv::Option::ModuleConstantsInStorageBuffer()) {
1494 for (GlobalVariable &GV : M.globals()) {
1495 PointerType *PTy = cast<PointerType>(GV.getType());
1496 const auto AS = PTy->getAddressSpace();
1497 const bool module_scope_constant_external_init =
1498 (AS == AddressSpace::Constant) && GV.hasInitializer();
1499 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
1500 if (module_scope_constant_external_init &&
1501 spv::BuiltInMax == BuiltinType) {
1502 StructTypesNeedingBlock.insert(
1503 cast<StructType>(PTy->getPointerElementType()));
1504 }
1505 }
1506 }
1507
Kévin Petitbbbda972020-03-03 19:16:31 +00001508 for (const GlobalVariable &GV : M.globals()) {
1509 if (GV.getAddressSpace() == clspv::AddressSpace::PushConstant) {
1510 auto Ty = cast<PointerType>(GV.getType())->getPointerElementType();
1511 assert(Ty->isStructTy() && "Push constants have to be structures.");
1512 auto STy = cast<StructType>(Ty);
1513 StructTypesNeedingBlock.insert(STy);
1514 }
1515 }
1516
David Neto862b7d82018-06-14 18:48:37 -04001517 // Traverse the arrays and structures underneath each Block, and
1518 // mark them as needing layout.
1519 std::vector<Type *> work_list(StructTypesNeedingBlock.begin(),
1520 StructTypesNeedingBlock.end());
1521 while (!work_list.empty()) {
1522 Type *type = work_list.back();
1523 work_list.pop_back();
1524 TypesNeedingLayout.insert(type);
1525 switch (type->getTypeID()) {
1526 case Type::ArrayTyID:
1527 work_list.push_back(type->getArrayElementType());
1528 if (!Hack_generate_runtime_array_stride_early) {
1529 // Remember this array type for deferred decoration.
1530 TypesNeedingArrayStride.insert(type);
1531 }
1532 break;
1533 case Type::StructTyID:
1534 for (auto *elem_ty : cast<StructType>(type)->elements()) {
1535 work_list.push_back(elem_ty);
1536 }
1537 default:
1538 // This type and its contained types don't get layout.
1539 break;
1540 }
1541 }
1542}
1543
Alan Baker202c8c72018-08-13 13:47:44 -04001544void SPIRVProducerPass::FindWorkgroupVars(Module &M) {
1545 // The SpecId assignment for pointer-to-local arguments is recorded in
1546 // module-level metadata. Translate that information into local argument
1547 // information.
1548 NamedMDNode *nmd = M.getNamedMetadata(clspv::LocalSpecIdMetadataName());
alan-bakerb6b09dc2018-11-08 16:59:28 -05001549 if (!nmd)
1550 return;
Alan Baker202c8c72018-08-13 13:47:44 -04001551 for (auto operand : nmd->operands()) {
1552 MDTuple *tuple = cast<MDTuple>(operand);
1553 ValueAsMetadata *fn_md = cast<ValueAsMetadata>(tuple->getOperand(0));
1554 Function *func = cast<Function>(fn_md->getValue());
alan-bakerb6b09dc2018-11-08 16:59:28 -05001555 ConstantAsMetadata *arg_index_md =
1556 cast<ConstantAsMetadata>(tuple->getOperand(1));
1557 int arg_index = static_cast<int>(
1558 cast<ConstantInt>(arg_index_md->getValue())->getSExtValue());
1559 Argument *arg = &*(func->arg_begin() + arg_index);
Alan Baker202c8c72018-08-13 13:47:44 -04001560
1561 ConstantAsMetadata *spec_id_md =
1562 cast<ConstantAsMetadata>(tuple->getOperand(2));
alan-bakerb6b09dc2018-11-08 16:59:28 -05001563 int spec_id = static_cast<int>(
1564 cast<ConstantInt>(spec_id_md->getValue())->getSExtValue());
Alan Baker202c8c72018-08-13 13:47:44 -04001565
1566 max_local_spec_id_ = std::max(max_local_spec_id_, spec_id + 1);
1567 LocalArgSpecIds[arg] = spec_id;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001568 if (LocalSpecIdInfoMap.count(spec_id))
1569 continue;
Alan Baker202c8c72018-08-13 13:47:44 -04001570
1571 // We haven't seen this SpecId yet, so generate the LocalArgInfo for it.
1572 LocalArgInfo info{nextID, arg->getType()->getPointerElementType(),
1573 nextID + 1, nextID + 2,
1574 nextID + 3, spec_id};
1575 LocalSpecIdInfoMap[spec_id] = info;
1576 nextID += 4;
1577
1578 // Ensure the types necessary for this argument get generated.
1579 Type *IdxTy = Type::getInt32Ty(M.getContext());
1580 FindConstant(ConstantInt::get(IdxTy, 0));
1581 FindType(IdxTy);
1582 FindType(arg->getType());
1583 }
1584}
1585
David Neto22f144c2017-06-12 14:26:21 -04001586void SPIRVProducerPass::FindType(Type *Ty) {
1587 TypeList &TyList = getTypeList();
1588
1589 if (0 != TyList.idFor(Ty)) {
1590 return;
1591 }
1592
1593 if (Ty->isPointerTy()) {
1594 auto AddrSpace = Ty->getPointerAddressSpace();
1595 if ((AddressSpace::Constant == AddrSpace) ||
1596 (AddressSpace::Global == AddrSpace)) {
1597 auto PointeeTy = Ty->getPointerElementType();
1598
1599 if (PointeeTy->isStructTy() &&
1600 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
1601 FindType(PointeeTy);
1602 auto ActualPointerTy =
1603 PointeeTy->getPointerTo(AddressSpace::UniformConstant);
1604 FindType(ActualPointerTy);
1605 return;
1606 }
1607 }
1608 }
1609
David Neto862b7d82018-06-14 18:48:37 -04001610 // By convention, LLVM array type with 0 elements will map to
1611 // OpTypeRuntimeArray. Otherwise, it will map to OpTypeArray, which
1612 // has a constant number of elements. We need to support type of the
1613 // constant.
1614 if (auto *arrayTy = dyn_cast<ArrayType>(Ty)) {
1615 if (arrayTy->getNumElements() > 0) {
1616 LLVMContext &Context = Ty->getContext();
1617 FindType(Type::getInt32Ty(Context));
1618 }
David Neto22f144c2017-06-12 14:26:21 -04001619 }
1620
1621 for (Type *SubTy : Ty->subtypes()) {
1622 FindType(SubTy);
1623 }
1624
1625 TyList.insert(Ty);
1626}
1627
1628void SPIRVProducerPass::FindConstantPerGlobalVar(GlobalVariable &GV) {
1629 // If the global variable has a (non undef) initializer.
1630 if (GV.hasInitializer() && !isa<UndefValue>(GV.getInitializer())) {
David Neto862b7d82018-06-14 18:48:37 -04001631 // Generate the constant if it's not the initializer to a module scope
1632 // constant that we will expect in a storage buffer.
1633 const bool module_scope_constant_external_init =
1634 (GV.getType()->getPointerAddressSpace() == AddressSpace::Constant) &&
1635 clspv::Option::ModuleConstantsInStorageBuffer();
1636 if (!module_scope_constant_external_init) {
1637 FindConstant(GV.getInitializer());
1638 }
David Neto22f144c2017-06-12 14:26:21 -04001639 }
1640}
1641
1642void SPIRVProducerPass::FindConstantPerFunc(Function &F) {
1643 // Investigate constants in function body.
1644 for (BasicBlock &BB : F) {
1645 for (Instruction &I : BB) {
David Neto862b7d82018-06-14 18:48:37 -04001646 if (auto *call = dyn_cast<CallInst>(&I)) {
1647 auto name = call->getCalledFunction()->getName();
Kévin Petitdf71de32019-04-09 14:09:50 +01001648 if (name == clspv::LiteralSamplerFunction()) {
David Neto862b7d82018-06-14 18:48:37 -04001649 // We've handled these constants elsewhere, so skip it.
1650 continue;
1651 }
Alan Baker202c8c72018-08-13 13:47:44 -04001652 if (name.startswith(clspv::ResourceAccessorFunction())) {
1653 continue;
1654 }
1655 if (name.startswith(clspv::WorkgroupAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04001656 continue;
1657 }
Kévin Petit617a76d2019-04-04 13:54:16 +01001658 if (name.startswith(clspv::SPIRVOpIntrinsicFunction())) {
1659 // Skip the first operand that has the SPIR-V Opcode
1660 for (unsigned i = 1; i < I.getNumOperands(); i++) {
1661 if (isa<Constant>(I.getOperand(i)) &&
1662 !isa<GlobalValue>(I.getOperand(i))) {
1663 FindConstant(I.getOperand(i));
1664 }
1665 }
1666 continue;
1667 }
David Neto22f144c2017-06-12 14:26:21 -04001668 }
1669
1670 if (isa<AllocaInst>(I)) {
1671 // Alloca instruction has constant for the number of element. Ignore it.
1672 continue;
1673 } else if (isa<ShuffleVectorInst>(I)) {
1674 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1675 // Ignore constant for mask of shuffle vector instruction.
1676 if (i == 2) {
1677 continue;
1678 }
1679
1680 if (isa<Constant>(I.getOperand(i)) &&
1681 !isa<GlobalValue>(I.getOperand(i))) {
1682 FindConstant(I.getOperand(i));
1683 }
1684 }
1685
1686 continue;
1687 } else if (isa<InsertElementInst>(I)) {
1688 // Handle InsertElement with <4 x i8> specially.
1689 Type *CompositeTy = I.getOperand(0)->getType();
1690 if (is4xi8vec(CompositeTy)) {
1691 LLVMContext &Context = CompositeTy->getContext();
1692 if (isa<Constant>(I.getOperand(0))) {
1693 FindConstant(I.getOperand(0));
1694 }
1695
1696 if (isa<Constant>(I.getOperand(1))) {
1697 FindConstant(I.getOperand(1));
1698 }
1699
1700 // Add mask constant 0xFF.
1701 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
1702 FindConstant(CstFF);
1703
1704 // Add shift amount constant.
1705 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
1706 uint64_t Idx = CI->getZExtValue();
1707 Constant *CstShiftAmount =
1708 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
1709 FindConstant(CstShiftAmount);
1710 }
1711
1712 continue;
1713 }
1714
1715 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1716 // Ignore constant for index of InsertElement instruction.
1717 if (i == 2) {
1718 continue;
1719 }
1720
1721 if (isa<Constant>(I.getOperand(i)) &&
1722 !isa<GlobalValue>(I.getOperand(i))) {
1723 FindConstant(I.getOperand(i));
1724 }
1725 }
1726
1727 continue;
1728 } else if (isa<ExtractElementInst>(I)) {
1729 // Handle ExtractElement with <4 x i8> specially.
1730 Type *CompositeTy = I.getOperand(0)->getType();
1731 if (is4xi8vec(CompositeTy)) {
1732 LLVMContext &Context = CompositeTy->getContext();
1733 if (isa<Constant>(I.getOperand(0))) {
1734 FindConstant(I.getOperand(0));
1735 }
1736
1737 // Add mask constant 0xFF.
1738 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
1739 FindConstant(CstFF);
1740
1741 // Add shift amount constant.
1742 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
1743 uint64_t Idx = CI->getZExtValue();
1744 Constant *CstShiftAmount =
1745 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
1746 FindConstant(CstShiftAmount);
1747 } else {
1748 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
1749 FindConstant(Cst8);
1750 }
1751
1752 continue;
1753 }
1754
1755 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1756 // Ignore constant for index of ExtractElement instruction.
1757 if (i == 1) {
1758 continue;
1759 }
1760
1761 if (isa<Constant>(I.getOperand(i)) &&
1762 !isa<GlobalValue>(I.getOperand(i))) {
1763 FindConstant(I.getOperand(i));
1764 }
1765 }
1766
1767 continue;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001768 } else if ((Instruction::Xor == I.getOpcode()) &&
1769 I.getType()->isIntegerTy(1)) {
1770 // We special case for Xor where the type is i1 and one of the arguments
1771 // is a constant 1 (true), this is an OpLogicalNot in SPIR-V, and we
1772 // don't need the constant
David Neto22f144c2017-06-12 14:26:21 -04001773 bool foundConstantTrue = false;
1774 for (Use &Op : I.operands()) {
1775 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1776 auto CI = cast<ConstantInt>(Op);
1777
1778 if (CI->isZero() || foundConstantTrue) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05001779 // If we already found the true constant, we might (probably only
1780 // on -O0) have an OpLogicalNot which is taking a constant
1781 // argument, so discover it anyway.
David Neto22f144c2017-06-12 14:26:21 -04001782 FindConstant(Op);
1783 } else {
1784 foundConstantTrue = true;
1785 }
1786 }
1787 }
1788
1789 continue;
David Netod2de94a2017-08-28 17:27:47 -04001790 } else if (isa<TruncInst>(I)) {
alan-bakerb39c8262019-03-08 14:03:37 -05001791 // Special case if i8 is not generally handled.
1792 if (!clspv::Option::Int8Support()) {
1793 // For truncation to i8 we mask against 255.
1794 Type *ToTy = I.getType();
1795 if (8u == ToTy->getPrimitiveSizeInBits()) {
1796 LLVMContext &Context = ToTy->getContext();
1797 Constant *Cst255 =
1798 ConstantInt::get(Type::getInt32Ty(Context), 0xff);
1799 FindConstant(Cst255);
1800 }
David Netod2de94a2017-08-28 17:27:47 -04001801 }
Neil Henning39672102017-09-29 14:33:13 +01001802 } else if (isa<AtomicRMWInst>(I)) {
1803 LLVMContext &Context = I.getContext();
1804
1805 FindConstant(
1806 ConstantInt::get(Type::getInt32Ty(Context), spv::ScopeDevice));
1807 FindConstant(ConstantInt::get(
1808 Type::getInt32Ty(Context),
1809 spv::MemorySemanticsUniformMemoryMask |
1810 spv::MemorySemanticsSequentiallyConsistentMask));
David Neto22f144c2017-06-12 14:26:21 -04001811 }
1812
1813 for (Use &Op : I.operands()) {
1814 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1815 FindConstant(Op);
1816 }
1817 }
1818 }
1819 }
1820}
1821
1822void SPIRVProducerPass::FindConstant(Value *V) {
David Neto22f144c2017-06-12 14:26:21 -04001823 ValueList &CstList = getConstantList();
1824
David Netofb9a7972017-08-25 17:08:24 -04001825 // If V is already tracked, ignore it.
1826 if (0 != CstList.idFor(V)) {
David Neto22f144c2017-06-12 14:26:21 -04001827 return;
1828 }
1829
David Neto862b7d82018-06-14 18:48:37 -04001830 if (isa<GlobalValue>(V) && clspv::Option::ModuleConstantsInStorageBuffer()) {
1831 return;
1832 }
1833
David Neto22f144c2017-06-12 14:26:21 -04001834 Constant *Cst = cast<Constant>(V);
David Neto862b7d82018-06-14 18:48:37 -04001835 Type *CstTy = Cst->getType();
David Neto22f144c2017-06-12 14:26:21 -04001836
1837 // Handle constant with <4 x i8> type specially.
David Neto22f144c2017-06-12 14:26:21 -04001838 if (is4xi8vec(CstTy)) {
1839 if (!isa<GlobalValue>(V)) {
David Netofb9a7972017-08-25 17:08:24 -04001840 CstList.insert(V);
David Neto22f144c2017-06-12 14:26:21 -04001841 }
1842 }
1843
1844 if (Cst->getNumOperands()) {
1845 for (User::const_op_iterator I = Cst->op_begin(), E = Cst->op_end(); I != E;
1846 ++I) {
1847 FindConstant(*I);
1848 }
1849
David Netofb9a7972017-08-25 17:08:24 -04001850 CstList.insert(Cst);
David Neto22f144c2017-06-12 14:26:21 -04001851 return;
1852 } else if (const ConstantDataSequential *CDS =
1853 dyn_cast<ConstantDataSequential>(Cst)) {
1854 // Add constants for each element to constant list.
1855 for (unsigned i = 0; i < CDS->getNumElements(); i++) {
1856 Constant *EleCst = CDS->getElementAsConstant(i);
1857 FindConstant(EleCst);
1858 }
1859 }
1860
1861 if (!isa<GlobalValue>(V)) {
David Netofb9a7972017-08-25 17:08:24 -04001862 CstList.insert(V);
David Neto22f144c2017-06-12 14:26:21 -04001863 }
1864}
1865
1866spv::StorageClass SPIRVProducerPass::GetStorageClass(unsigned AddrSpace) const {
1867 switch (AddrSpace) {
1868 default:
1869 llvm_unreachable("Unsupported OpenCL address space");
1870 case AddressSpace::Private:
1871 return spv::StorageClassFunction;
1872 case AddressSpace::Global:
David Neto22f144c2017-06-12 14:26:21 -04001873 return spv::StorageClassStorageBuffer;
Alan Bakerfcda9482018-10-02 17:09:59 -04001874 case AddressSpace::Constant:
1875 return clspv::Option::ConstantArgsInUniformBuffer()
1876 ? spv::StorageClassUniform
1877 : spv::StorageClassStorageBuffer;
David Neto22f144c2017-06-12 14:26:21 -04001878 case AddressSpace::Input:
1879 return spv::StorageClassInput;
1880 case AddressSpace::Local:
1881 return spv::StorageClassWorkgroup;
1882 case AddressSpace::UniformConstant:
1883 return spv::StorageClassUniformConstant;
David Neto9ed8e2f2018-03-24 06:47:24 -07001884 case AddressSpace::Uniform:
David Netoe439d702018-03-23 13:14:08 -07001885 return spv::StorageClassUniform;
David Neto22f144c2017-06-12 14:26:21 -04001886 case AddressSpace::ModuleScopePrivate:
1887 return spv::StorageClassPrivate;
Kévin Petitbbbda972020-03-03 19:16:31 +00001888 case AddressSpace::PushConstant:
1889 return spv::StorageClassPushConstant;
David Neto22f144c2017-06-12 14:26:21 -04001890 }
1891}
1892
David Neto862b7d82018-06-14 18:48:37 -04001893spv::StorageClass
1894SPIRVProducerPass::GetStorageClassForArgKind(clspv::ArgKind arg_kind) const {
1895 switch (arg_kind) {
1896 case clspv::ArgKind::Buffer:
1897 return spv::StorageClassStorageBuffer;
Alan Bakerfcda9482018-10-02 17:09:59 -04001898 case clspv::ArgKind::BufferUBO:
1899 return spv::StorageClassUniform;
David Neto862b7d82018-06-14 18:48:37 -04001900 case clspv::ArgKind::Pod:
alan-baker9b0ec3c2020-04-06 14:45:34 -04001901 return spv::StorageClassStorageBuffer;
1902 case clspv::ArgKind::PodUBO:
1903 return spv::StorageClassUniform;
1904 case clspv::ArgKind::PodPushConstant:
1905 return spv::StorageClassPushConstant;
David Neto862b7d82018-06-14 18:48:37 -04001906 case clspv::ArgKind::Local:
1907 return spv::StorageClassWorkgroup;
1908 case clspv::ArgKind::ReadOnlyImage:
1909 case clspv::ArgKind::WriteOnlyImage:
1910 case clspv::ArgKind::Sampler:
1911 return spv::StorageClassUniformConstant;
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001912 default:
1913 llvm_unreachable("Unsupported storage class for argument kind");
David Neto862b7d82018-06-14 18:48:37 -04001914 }
1915}
1916
David Neto22f144c2017-06-12 14:26:21 -04001917spv::BuiltIn SPIRVProducerPass::GetBuiltin(StringRef Name) const {
1918 return StringSwitch<spv::BuiltIn>(Name)
1919 .Case("__spirv_GlobalInvocationId", spv::BuiltInGlobalInvocationId)
1920 .Case("__spirv_LocalInvocationId", spv::BuiltInLocalInvocationId)
1921 .Case("__spirv_WorkgroupSize", spv::BuiltInWorkgroupSize)
1922 .Case("__spirv_NumWorkgroups", spv::BuiltInNumWorkgroups)
1923 .Case("__spirv_WorkgroupId", spv::BuiltInWorkgroupId)
1924 .Default(spv::BuiltInMax);
1925}
1926
1927void SPIRVProducerPass::GenerateExtInstImport() {
SJW69939d52020-04-16 07:29:07 -05001928 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList(kImports);
David Neto22f144c2017-06-12 14:26:21 -04001929 uint32_t &ExtInstImportID = getOpExtInstImportID();
1930
1931 //
1932 // Generate OpExtInstImport.
1933 //
1934 // Ops[0] ... Ops[n] = Name (Literal String)
David Neto22f144c2017-06-12 14:26:21 -04001935 ExtInstImportID = nextID;
David Neto87846742018-04-11 17:36:22 -04001936 SPIRVInstList.push_back(new SPIRVInstruction(spv::OpExtInstImport, nextID++,
1937 MkString("GLSL.std.450")));
David Neto22f144c2017-06-12 14:26:21 -04001938}
1939
alan-bakerb6b09dc2018-11-08 16:59:28 -05001940void SPIRVProducerPass::GenerateSPIRVTypes(LLVMContext &Context,
1941 Module &module) {
SJW69939d52020-04-16 07:29:07 -05001942 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList(kTypes);
David Neto22f144c2017-06-12 14:26:21 -04001943 ValueMapType &VMap = getValueMap();
1944 ValueMapType &AllocatedVMap = getAllocatedValueMap();
Alan Bakerfcda9482018-10-02 17:09:59 -04001945 const auto &DL = module.getDataLayout();
David Neto22f144c2017-06-12 14:26:21 -04001946
1947 // Map for OpTypeRuntimeArray. If argument has pointer type, 2 spirv type
1948 // instructions are generated. They are OpTypePointer and OpTypeRuntimeArray.
1949 DenseMap<Type *, uint32_t> OpRuntimeTyMap;
1950
1951 for (Type *Ty : getTypeList()) {
1952 // Update TypeMap with nextID for reference later.
1953 TypeMap[Ty] = nextID;
1954
1955 switch (Ty->getTypeID()) {
1956 default: {
1957 Ty->print(errs());
1958 llvm_unreachable("Unsupported type???");
1959 break;
1960 }
1961 case Type::MetadataTyID:
1962 case Type::LabelTyID: {
1963 // Ignore these types.
1964 break;
1965 }
1966 case Type::PointerTyID: {
1967 PointerType *PTy = cast<PointerType>(Ty);
1968 unsigned AddrSpace = PTy->getAddressSpace();
1969
1970 // For the purposes of our Vulkan SPIR-V type system, constant and global
1971 // are conflated.
1972 bool UseExistingOpTypePointer = false;
1973 if (AddressSpace::Constant == AddrSpace) {
Alan Bakerfcda9482018-10-02 17:09:59 -04001974 if (!clspv::Option::ConstantArgsInUniformBuffer()) {
1975 AddrSpace = AddressSpace::Global;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001976 // Check to see if we already created this type (for instance, if we
1977 // had a constant <type>* and a global <type>*, the type would be
1978 // created by one of these types, and shared by both).
Alan Bakerfcda9482018-10-02 17:09:59 -04001979 auto GlobalTy = PTy->getPointerElementType()->getPointerTo(AddrSpace);
1980 if (0 < TypeMap.count(GlobalTy)) {
1981 TypeMap[PTy] = TypeMap[GlobalTy];
1982 UseExistingOpTypePointer = true;
1983 break;
1984 }
David Neto22f144c2017-06-12 14:26:21 -04001985 }
1986 } else if (AddressSpace::Global == AddrSpace) {
Alan Bakerfcda9482018-10-02 17:09:59 -04001987 if (!clspv::Option::ConstantArgsInUniformBuffer()) {
1988 AddrSpace = AddressSpace::Constant;
David Neto22f144c2017-06-12 14:26:21 -04001989
alan-bakerb6b09dc2018-11-08 16:59:28 -05001990 // Check to see if we already created this type (for instance, if we
1991 // had a constant <type>* and a global <type>*, the type would be
1992 // created by one of these types, and shared by both).
1993 auto ConstantTy =
1994 PTy->getPointerElementType()->getPointerTo(AddrSpace);
Alan Bakerfcda9482018-10-02 17:09:59 -04001995 if (0 < TypeMap.count(ConstantTy)) {
1996 TypeMap[PTy] = TypeMap[ConstantTy];
1997 UseExistingOpTypePointer = true;
1998 }
David Neto22f144c2017-06-12 14:26:21 -04001999 }
2000 }
2001
David Neto862b7d82018-06-14 18:48:37 -04002002 const bool HasArgUser = true;
David Neto22f144c2017-06-12 14:26:21 -04002003
David Neto862b7d82018-06-14 18:48:37 -04002004 if (HasArgUser && !UseExistingOpTypePointer) {
David Neto22f144c2017-06-12 14:26:21 -04002005 //
2006 // Generate OpTypePointer.
2007 //
2008
2009 // OpTypePointer
2010 // Ops[0] = Storage Class
2011 // Ops[1] = Element Type ID
2012 SPIRVOperandList Ops;
2013
David Neto257c3892018-04-11 13:19:45 -04002014 Ops << MkNum(GetStorageClass(AddrSpace))
2015 << MkId(lookupType(PTy->getElementType()));
David Neto22f144c2017-06-12 14:26:21 -04002016
David Neto87846742018-04-11 17:36:22 -04002017 auto *Inst = new SPIRVInstruction(spv::OpTypePointer, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002018 SPIRVInstList.push_back(Inst);
2019 }
David Neto22f144c2017-06-12 14:26:21 -04002020 break;
2021 }
2022 case Type::StructTyID: {
David Neto22f144c2017-06-12 14:26:21 -04002023 StructType *STy = cast<StructType>(Ty);
2024
2025 // Handle sampler type.
2026 if (STy->isOpaque()) {
2027 if (STy->getName().equals("opencl.sampler_t")) {
2028 //
2029 // Generate OpTypeSampler
2030 //
2031 // Empty Ops.
2032 SPIRVOperandList Ops;
2033
David Neto87846742018-04-11 17:36:22 -04002034 auto *Inst = new SPIRVInstruction(spv::OpTypeSampler, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002035 SPIRVInstList.push_back(Inst);
2036 break;
alan-bakerf906d2b2019-12-10 11:26:23 -05002037 } else if (STy->getName().startswith("opencl.image1d_ro_t") ||
2038 STy->getName().startswith("opencl.image1d_wo_t") ||
alan-baker7150a1d2020-02-25 08:31:06 -05002039 STy->getName().startswith("opencl.image1d_array_ro_t") ||
2040 STy->getName().startswith("opencl.image1d_array_wo_t") ||
alan-bakerf906d2b2019-12-10 11:26:23 -05002041 STy->getName().startswith("opencl.image2d_ro_t") ||
alan-bakerf67468c2019-11-25 15:51:49 -05002042 STy->getName().startswith("opencl.image2d_wo_t") ||
alan-baker7150a1d2020-02-25 08:31:06 -05002043 STy->getName().startswith("opencl.image2d_array_ro_t") ||
2044 STy->getName().startswith("opencl.image2d_array_wo_t") ||
alan-bakerf67468c2019-11-25 15:51:49 -05002045 STy->getName().startswith("opencl.image3d_ro_t") ||
2046 STy->getName().startswith("opencl.image3d_wo_t")) {
David Neto22f144c2017-06-12 14:26:21 -04002047 //
2048 // Generate OpTypeImage
2049 //
2050 // Ops[0] = Sampled Type ID
2051 // Ops[1] = Dim ID
2052 // Ops[2] = Depth (Literal Number)
2053 // Ops[3] = Arrayed (Literal Number)
2054 // Ops[4] = MS (Literal Number)
2055 // Ops[5] = Sampled (Literal Number)
2056 // Ops[6] = Image Format ID
2057 //
2058 SPIRVOperandList Ops;
2059
alan-bakerf67468c2019-11-25 15:51:49 -05002060 uint32_t ImageTyID = nextID++;
2061 uint32_t SampledTyID = 0;
2062 if (STy->getName().contains(".float")) {
2063 SampledTyID = lookupType(Type::getFloatTy(Context));
2064 } else if (STy->getName().contains(".uint")) {
2065 SampledTyID = lookupType(Type::getInt32Ty(Context));
2066 } else if (STy->getName().contains(".int")) {
2067 // Generate a signed 32-bit integer if necessary.
2068 if (int32ID == 0) {
2069 int32ID = nextID++;
2070 SPIRVOperandList intOps;
2071 intOps << MkNum(32);
2072 intOps << MkNum(1);
2073 auto signed_int =
2074 new SPIRVInstruction(spv::OpTypeInt, int32ID, intOps);
2075 SPIRVInstList.push_back(signed_int);
2076 }
2077 SampledTyID = int32ID;
2078
2079 // Generate a vec4 of the signed int if necessary.
2080 if (v4int32ID == 0) {
2081 v4int32ID = nextID++;
2082 SPIRVOperandList vecOps;
2083 vecOps << MkId(int32ID);
2084 vecOps << MkNum(4);
2085 auto int_vec =
2086 new SPIRVInstruction(spv::OpTypeVector, v4int32ID, vecOps);
2087 SPIRVInstList.push_back(int_vec);
2088 }
2089 } else {
2090 // This was likely an UndefValue.
2091 SampledTyID = lookupType(Type::getFloatTy(Context));
2092 }
David Neto257c3892018-04-11 13:19:45 -04002093 Ops << MkId(SampledTyID);
David Neto22f144c2017-06-12 14:26:21 -04002094
2095 spv::Dim DimID = spv::Dim2D;
alan-bakerf906d2b2019-12-10 11:26:23 -05002096 if (STy->getName().startswith("opencl.image1d_ro_t") ||
alan-baker7150a1d2020-02-25 08:31:06 -05002097 STy->getName().startswith("opencl.image1d_wo_t") ||
2098 STy->getName().startswith("opencl.image1d_array_ro_t") ||
2099 STy->getName().startswith("opencl.image1d_array_wo_t")) {
alan-bakerf906d2b2019-12-10 11:26:23 -05002100 DimID = spv::Dim1D;
2101 } else if (STy->getName().startswith("opencl.image3d_ro_t") ||
2102 STy->getName().startswith("opencl.image3d_wo_t")) {
David Neto22f144c2017-06-12 14:26:21 -04002103 DimID = spv::Dim3D;
2104 }
David Neto257c3892018-04-11 13:19:45 -04002105 Ops << MkNum(DimID);
David Neto22f144c2017-06-12 14:26:21 -04002106
2107 // TODO: Set up Depth.
David Neto257c3892018-04-11 13:19:45 -04002108 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04002109
alan-baker7150a1d2020-02-25 08:31:06 -05002110 uint32_t arrayed = STy->getName().contains("_array_") ? 1 : 0;
2111 Ops << MkNum(arrayed);
David Neto22f144c2017-06-12 14:26:21 -04002112
2113 // TODO: Set up MS.
David Neto257c3892018-04-11 13:19:45 -04002114 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04002115
alan-baker7150a1d2020-02-25 08:31:06 -05002116 // Set up Sampled.
David Neto22f144c2017-06-12 14:26:21 -04002117 //
2118 // From Spec
2119 //
2120 // 0 indicates this is only known at run time, not at compile time
2121 // 1 indicates will be used with sampler
2122 // 2 indicates will be used without a sampler (a storage image)
2123 uint32_t Sampled = 1;
alan-bakerf67468c2019-11-25 15:51:49 -05002124 if (!STy->getName().contains(".sampled")) {
David Neto22f144c2017-06-12 14:26:21 -04002125 Sampled = 2;
2126 }
David Neto257c3892018-04-11 13:19:45 -04002127 Ops << MkNum(Sampled);
David Neto22f144c2017-06-12 14:26:21 -04002128
2129 // TODO: Set up Image Format.
David Neto257c3892018-04-11 13:19:45 -04002130 Ops << MkNum(spv::ImageFormatUnknown);
David Neto22f144c2017-06-12 14:26:21 -04002131
alan-bakerf67468c2019-11-25 15:51:49 -05002132 auto *Inst = new SPIRVInstruction(spv::OpTypeImage, ImageTyID, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002133 SPIRVInstList.push_back(Inst);
2134 break;
2135 }
2136 }
2137
2138 //
2139 // Generate OpTypeStruct
2140 //
2141 // Ops[0] ... Ops[n] = Member IDs
2142 SPIRVOperandList Ops;
2143
2144 for (auto *EleTy : STy->elements()) {
David Neto862b7d82018-06-14 18:48:37 -04002145 Ops << MkId(lookupType(EleTy));
David Neto22f144c2017-06-12 14:26:21 -04002146 }
2147
David Neto22f144c2017-06-12 14:26:21 -04002148 uint32_t STyID = nextID;
2149
alan-bakerb6b09dc2018-11-08 16:59:28 -05002150 auto *Inst = new SPIRVInstruction(spv::OpTypeStruct, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002151 SPIRVInstList.push_back(Inst);
2152
2153 // Generate OpMemberDecorate.
Kévin Petitbbbda972020-03-03 19:16:31 +00002154 if (TypesNeedingLayout.idFor(STy)) {
2155 for (unsigned MemberIdx = 0; MemberIdx < STy->getNumElements();
2156 MemberIdx++) {
2157 // Ops[0] = Structure Type ID
2158 // Ops[1] = Member Index(Literal Number)
2159 // Ops[2] = Decoration (Offset)
2160 // Ops[3] = Byte Offset (Literal Number)
2161 Ops.clear();
David Netoc463b372017-08-10 15:32:21 -04002162
Kévin Petitbbbda972020-03-03 19:16:31 +00002163 Ops << MkId(STyID) << MkNum(MemberIdx)
2164 << MkNum(spv::DecorationOffset);
David Neto22f144c2017-06-12 14:26:21 -04002165
Kévin Petitbbbda972020-03-03 19:16:31 +00002166 const auto ByteOffset =
2167 GetExplicitLayoutStructMemberOffset(STy, MemberIdx, DL);
David Neto22f144c2017-06-12 14:26:21 -04002168
Kévin Petitbbbda972020-03-03 19:16:31 +00002169 Ops << MkNum(ByteOffset);
2170
2171 auto *DecoInst = new SPIRVInstruction(spv::OpMemberDecorate, Ops);
SJW69939d52020-04-16 07:29:07 -05002172 getSPIRVInstList(kAnnotations).push_back(DecoInst);
Alan Bakerfcda9482018-10-02 17:09:59 -04002173 }
David Neto22f144c2017-06-12 14:26:21 -04002174 }
2175
2176 // Generate OpDecorate.
David Neto862b7d82018-06-14 18:48:37 -04002177 if (StructTypesNeedingBlock.idFor(STy)) {
2178 Ops.clear();
2179 // Use Block decorations with StorageBuffer storage class.
2180 Ops << MkId(STyID) << MkNum(spv::DecorationBlock);
David Neto22f144c2017-06-12 14:26:21 -04002181
David Neto862b7d82018-06-14 18:48:37 -04002182 auto *DecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
SJW69939d52020-04-16 07:29:07 -05002183 getSPIRVInstList(kAnnotations).push_back(DecoInst);
David Neto22f144c2017-06-12 14:26:21 -04002184 }
2185 break;
2186 }
2187 case Type::IntegerTyID: {
alan-baker0e64a592019-11-18 13:36:25 -05002188 uint32_t BitWidth = static_cast<uint32_t>(Ty->getPrimitiveSizeInBits());
David Neto22f144c2017-06-12 14:26:21 -04002189
2190 if (BitWidth == 1) {
David Netoef5ba2b2019-12-20 08:35:54 -05002191 auto *Inst = new SPIRVInstruction(spv::OpTypeBool, nextID++);
David Neto22f144c2017-06-12 14:26:21 -04002192 SPIRVInstList.push_back(Inst);
2193 } else {
alan-bakerb39c8262019-03-08 14:03:37 -05002194 if (!clspv::Option::Int8Support()) {
2195 // i8 is added to TypeMap as i32.
2196 // No matter what LLVM type is requested first, always alias the
2197 // second one's SPIR-V type to be the same as the one we generated
2198 // first.
2199 unsigned aliasToWidth = 0;
2200 if (BitWidth == 8) {
2201 aliasToWidth = 32;
2202 BitWidth = 32;
2203 } else if (BitWidth == 32) {
2204 aliasToWidth = 8;
2205 }
2206 if (aliasToWidth) {
2207 Type *otherType = Type::getIntNTy(Ty->getContext(), aliasToWidth);
2208 auto where = TypeMap.find(otherType);
2209 if (where == TypeMap.end()) {
2210 // Go ahead and make it, but also map the other type to it.
2211 TypeMap[otherType] = nextID;
2212 } else {
2213 // Alias this SPIR-V type the existing type.
2214 TypeMap[Ty] = where->second;
2215 break;
2216 }
David Neto391aeb12017-08-26 15:51:58 -04002217 }
David Neto22f144c2017-06-12 14:26:21 -04002218 }
2219
David Neto257c3892018-04-11 13:19:45 -04002220 SPIRVOperandList Ops;
2221 Ops << MkNum(BitWidth) << MkNum(0 /* not signed */);
David Neto22f144c2017-06-12 14:26:21 -04002222
2223 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002224 new SPIRVInstruction(spv::OpTypeInt, nextID++, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002225 }
2226 break;
2227 }
2228 case Type::HalfTyID:
2229 case Type::FloatTyID:
2230 case Type::DoubleTyID: {
alan-baker0e64a592019-11-18 13:36:25 -05002231 uint32_t BitWidth = static_cast<uint32_t>(Ty->getPrimitiveSizeInBits());
James Price11010dc2019-12-19 13:53:09 -05002232 auto WidthOp = MkNum(BitWidth);
David Neto22f144c2017-06-12 14:26:21 -04002233
2234 SPIRVInstList.push_back(
David Netoef5ba2b2019-12-20 08:35:54 -05002235 new SPIRVInstruction(spv::OpTypeFloat, nextID++, std::move(WidthOp)));
David Neto22f144c2017-06-12 14:26:21 -04002236 break;
2237 }
2238 case Type::ArrayTyID: {
David Neto22f144c2017-06-12 14:26:21 -04002239 ArrayType *ArrTy = cast<ArrayType>(Ty);
David Neto862b7d82018-06-14 18:48:37 -04002240 const uint64_t Length = ArrTy->getArrayNumElements();
2241 if (Length == 0) {
2242 // By convention, map it to a RuntimeArray.
David Neto22f144c2017-06-12 14:26:21 -04002243
David Neto862b7d82018-06-14 18:48:37 -04002244 // Only generate the type once.
2245 // TODO(dneto): Can it ever be generated more than once?
2246 // Doesn't LLVM type uniqueness guarantee we'll only see this
2247 // once?
2248 Type *EleTy = ArrTy->getArrayElementType();
2249 if (OpRuntimeTyMap.count(EleTy) == 0) {
2250 uint32_t OpTypeRuntimeArrayID = nextID;
2251 OpRuntimeTyMap[Ty] = nextID;
David Neto22f144c2017-06-12 14:26:21 -04002252
David Neto862b7d82018-06-14 18:48:37 -04002253 //
2254 // Generate OpTypeRuntimeArray.
2255 //
David Neto22f144c2017-06-12 14:26:21 -04002256
David Neto862b7d82018-06-14 18:48:37 -04002257 // OpTypeRuntimeArray
2258 // Ops[0] = Element Type ID
2259 SPIRVOperandList Ops;
2260 Ops << MkId(lookupType(EleTy));
David Neto22f144c2017-06-12 14:26:21 -04002261
David Neto862b7d82018-06-14 18:48:37 -04002262 SPIRVInstList.push_back(
2263 new SPIRVInstruction(spv::OpTypeRuntimeArray, nextID++, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002264
David Neto862b7d82018-06-14 18:48:37 -04002265 if (Hack_generate_runtime_array_stride_early) {
2266 // Generate OpDecorate.
David Neto22f144c2017-06-12 14:26:21 -04002267
David Neto862b7d82018-06-14 18:48:37 -04002268 // Ops[0] = Target ID
2269 // Ops[1] = Decoration (ArrayStride)
2270 // Ops[2] = Stride Number(Literal Number)
2271 Ops.clear();
David Neto85082642018-03-24 06:55:20 -07002272
David Neto862b7d82018-06-14 18:48:37 -04002273 Ops << MkId(OpTypeRuntimeArrayID)
2274 << MkNum(spv::DecorationArrayStride)
Alan Bakerfcda9482018-10-02 17:09:59 -04002275 << MkNum(static_cast<uint32_t>(GetTypeAllocSize(EleTy, DL)));
David Neto22f144c2017-06-12 14:26:21 -04002276
David Neto862b7d82018-06-14 18:48:37 -04002277 auto *DecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
SJW69939d52020-04-16 07:29:07 -05002278 getSPIRVInstList(kAnnotations).push_back(DecoInst);
David Neto862b7d82018-06-14 18:48:37 -04002279 }
2280 }
David Neto22f144c2017-06-12 14:26:21 -04002281
David Neto862b7d82018-06-14 18:48:37 -04002282 } else {
David Neto22f144c2017-06-12 14:26:21 -04002283
David Neto862b7d82018-06-14 18:48:37 -04002284 //
2285 // Generate OpConstant and OpTypeArray.
2286 //
2287
2288 //
2289 // Generate OpConstant for array length.
2290 //
2291 // Ops[0] = Result Type ID
2292 // Ops[1] .. Ops[n] = Values LiteralNumber
2293 SPIRVOperandList Ops;
2294
2295 Type *LengthTy = Type::getInt32Ty(Context);
2296 uint32_t ResTyID = lookupType(LengthTy);
2297 Ops << MkId(ResTyID);
2298
2299 assert(Length < UINT32_MAX);
2300 Ops << MkNum(static_cast<uint32_t>(Length));
2301
2302 // Add constant for length to constant list.
2303 Constant *CstLength = ConstantInt::get(LengthTy, Length);
2304 AllocatedVMap[CstLength] = nextID;
2305 VMap[CstLength] = nextID;
2306 uint32_t LengthID = nextID;
2307
2308 auto *CstInst = new SPIRVInstruction(spv::OpConstant, nextID++, Ops);
2309 SPIRVInstList.push_back(CstInst);
2310
2311 // Remember to generate ArrayStride later
2312 getTypesNeedingArrayStride().insert(Ty);
2313
2314 //
2315 // Generate OpTypeArray.
2316 //
2317 // Ops[0] = Element Type ID
2318 // Ops[1] = Array Length Constant ID
2319 Ops.clear();
2320
2321 uint32_t EleTyID = lookupType(ArrTy->getElementType());
2322 Ops << MkId(EleTyID) << MkId(LengthID);
2323
2324 // Update TypeMap with nextID.
2325 TypeMap[Ty] = nextID;
2326
2327 auto *ArrayInst = new SPIRVInstruction(spv::OpTypeArray, nextID++, Ops);
2328 SPIRVInstList.push_back(ArrayInst);
2329 }
David Neto22f144c2017-06-12 14:26:21 -04002330 break;
2331 }
2332 case Type::VectorTyID: {
alan-bakerb39c8262019-03-08 14:03:37 -05002333 // <4 x i8> is changed to i32 if i8 is not generally supported.
2334 if (!clspv::Option::Int8Support() &&
2335 Ty->getVectorElementType() == Type::getInt8Ty(Context)) {
David Neto22f144c2017-06-12 14:26:21 -04002336 if (Ty->getVectorNumElements() == 4) {
2337 TypeMap[Ty] = lookupType(Ty->getVectorElementType());
2338 break;
2339 } else {
2340 Ty->print(errs());
2341 llvm_unreachable("Support above i8 vector type");
2342 }
2343 }
2344
2345 // Ops[0] = Component Type ID
2346 // Ops[1] = Component Count (Literal Number)
David Neto257c3892018-04-11 13:19:45 -04002347 SPIRVOperandList Ops;
2348 Ops << MkId(lookupType(Ty->getVectorElementType()))
2349 << MkNum(Ty->getVectorNumElements());
David Neto22f144c2017-06-12 14:26:21 -04002350
alan-bakerb6b09dc2018-11-08 16:59:28 -05002351 SPIRVInstruction *inst =
2352 new SPIRVInstruction(spv::OpTypeVector, nextID++, Ops);
David Netoc6f3ab22018-04-06 18:02:31 -04002353 SPIRVInstList.push_back(inst);
David Neto22f144c2017-06-12 14:26:21 -04002354 break;
2355 }
2356 case Type::VoidTyID: {
David Netoef5ba2b2019-12-20 08:35:54 -05002357 auto *Inst = new SPIRVInstruction(spv::OpTypeVoid, nextID++);
David Neto22f144c2017-06-12 14:26:21 -04002358 SPIRVInstList.push_back(Inst);
2359 break;
2360 }
2361 case Type::FunctionTyID: {
2362 // Generate SPIRV instruction for function type.
2363 FunctionType *FTy = cast<FunctionType>(Ty);
2364
2365 // Ops[0] = Return Type ID
2366 // Ops[1] ... Ops[n] = Parameter Type IDs
2367 SPIRVOperandList Ops;
2368
2369 // Find SPIRV instruction for return type
David Netoc6f3ab22018-04-06 18:02:31 -04002370 Ops << MkId(lookupType(FTy->getReturnType()));
David Neto22f144c2017-06-12 14:26:21 -04002371
2372 // Find SPIRV instructions for parameter types
2373 for (unsigned k = 0; k < FTy->getNumParams(); k++) {
2374 // Find SPIRV instruction for parameter type.
2375 auto ParamTy = FTy->getParamType(k);
2376 if (ParamTy->isPointerTy()) {
2377 auto PointeeTy = ParamTy->getPointerElementType();
2378 if (PointeeTy->isStructTy() &&
2379 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
2380 ParamTy = PointeeTy;
2381 }
2382 }
2383
David Netoc6f3ab22018-04-06 18:02:31 -04002384 Ops << MkId(lookupType(ParamTy));
David Neto22f144c2017-06-12 14:26:21 -04002385 }
2386
David Neto87846742018-04-11 17:36:22 -04002387 auto *Inst = new SPIRVInstruction(spv::OpTypeFunction, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002388 SPIRVInstList.push_back(Inst);
2389 break;
2390 }
2391 }
2392 }
2393
2394 // Generate OpTypeSampledImage.
alan-bakerabd82722019-12-03 17:14:51 -05002395 for (auto &ImgTy : getImageTypeList()) {
David Neto22f144c2017-06-12 14:26:21 -04002396 //
2397 // Generate OpTypeSampledImage.
2398 //
2399 // Ops[0] = Image Type ID
2400 //
2401 SPIRVOperandList Ops;
2402
David Netoc6f3ab22018-04-06 18:02:31 -04002403 Ops << MkId(TypeMap[ImgTy]);
David Neto22f144c2017-06-12 14:26:21 -04002404
alan-bakerabd82722019-12-03 17:14:51 -05002405 // Update the image type map.
2406 getImageTypeMap()[ImgTy] = nextID;
David Neto22f144c2017-06-12 14:26:21 -04002407
David Neto87846742018-04-11 17:36:22 -04002408 auto *Inst = new SPIRVInstruction(spv::OpTypeSampledImage, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002409 SPIRVInstList.push_back(Inst);
2410 }
David Netoc6f3ab22018-04-06 18:02:31 -04002411
2412 // Generate types for pointer-to-local arguments.
Alan Baker202c8c72018-08-13 13:47:44 -04002413 for (auto spec_id = clspv::FirstLocalSpecId(); spec_id < max_local_spec_id_;
2414 ++spec_id) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05002415 LocalArgInfo &arg_info = LocalSpecIdInfoMap[spec_id];
David Netoc6f3ab22018-04-06 18:02:31 -04002416
2417 // Generate the spec constant.
2418 SPIRVOperandList Ops;
2419 Ops << MkId(lookupType(Type::getInt32Ty(Context))) << MkNum(1);
David Neto87846742018-04-11 17:36:22 -04002420 SPIRVInstList.push_back(
2421 new SPIRVInstruction(spv::OpSpecConstant, arg_info.array_size_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002422
2423 // Generate the array type.
2424 Ops.clear();
2425 // The element type must have been created.
2426 uint32_t elem_ty_id = lookupType(arg_info.elem_type);
2427 assert(elem_ty_id);
2428 Ops << MkId(elem_ty_id) << MkId(arg_info.array_size_id);
2429
2430 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002431 new SPIRVInstruction(spv::OpTypeArray, arg_info.array_type_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002432
2433 Ops.clear();
2434 Ops << MkNum(spv::StorageClassWorkgroup) << MkId(arg_info.array_type_id);
David Neto87846742018-04-11 17:36:22 -04002435 SPIRVInstList.push_back(new SPIRVInstruction(
2436 spv::OpTypePointer, arg_info.ptr_array_type_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002437 }
David Neto22f144c2017-06-12 14:26:21 -04002438}
2439
2440void SPIRVProducerPass::GenerateSPIRVConstants() {
SJW69939d52020-04-16 07:29:07 -05002441 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList(kConstants);
David Neto22f144c2017-06-12 14:26:21 -04002442 ValueMapType &VMap = getValueMap();
2443 ValueMapType &AllocatedVMap = getAllocatedValueMap();
2444 ValueList &CstList = getConstantList();
David Neto482550a2018-03-24 05:21:07 -07002445 const bool hack_undef = clspv::Option::HackUndef();
David Neto22f144c2017-06-12 14:26:21 -04002446
2447 for (uint32_t i = 0; i < CstList.size(); i++) {
David Netofb9a7972017-08-25 17:08:24 -04002448 // UniqueVector ids are 1-based.
alan-bakerb6b09dc2018-11-08 16:59:28 -05002449 Constant *Cst = cast<Constant>(CstList[i + 1]);
David Neto22f144c2017-06-12 14:26:21 -04002450
2451 // OpTypeArray's constant was already generated.
David Netofb9a7972017-08-25 17:08:24 -04002452 if (AllocatedVMap.find_as(Cst) != AllocatedVMap.end()) {
David Neto22f144c2017-06-12 14:26:21 -04002453 continue;
2454 }
2455
David Netofb9a7972017-08-25 17:08:24 -04002456 // Set ValueMap with nextID for reference later.
David Neto22f144c2017-06-12 14:26:21 -04002457 VMap[Cst] = nextID;
2458
2459 //
2460 // Generate OpConstant.
2461 //
2462
2463 // Ops[0] = Result Type ID
2464 // Ops[1] .. Ops[n] = Values LiteralNumber
2465 SPIRVOperandList Ops;
2466
David Neto257c3892018-04-11 13:19:45 -04002467 Ops << MkId(lookupType(Cst->getType()));
David Neto22f144c2017-06-12 14:26:21 -04002468
2469 std::vector<uint32_t> LiteralNum;
David Neto22f144c2017-06-12 14:26:21 -04002470 spv::Op Opcode = spv::OpNop;
2471
2472 if (isa<UndefValue>(Cst)) {
2473 // Ops[0] = Result Type ID
David Netoc66b3352017-10-20 14:28:46 -04002474 Opcode = spv::OpUndef;
Alan Baker9bf93fb2018-08-28 16:59:26 -04002475 if (hack_undef && IsTypeNullable(Cst->getType())) {
2476 Opcode = spv::OpConstantNull;
David Netoc66b3352017-10-20 14:28:46 -04002477 }
David Neto22f144c2017-06-12 14:26:21 -04002478 } else if (const ConstantInt *CI = dyn_cast<ConstantInt>(Cst)) {
2479 unsigned BitWidth = CI->getBitWidth();
2480 if (BitWidth == 1) {
2481 // If the bitwidth of constant is 1, generate OpConstantTrue or
2482 // OpConstantFalse.
2483 if (CI->getZExtValue()) {
2484 // Ops[0] = Result Type ID
2485 Opcode = spv::OpConstantTrue;
2486 } else {
2487 // Ops[0] = Result Type ID
2488 Opcode = spv::OpConstantFalse;
2489 }
David Neto22f144c2017-06-12 14:26:21 -04002490 } else {
2491 auto V = CI->getZExtValue();
2492 LiteralNum.push_back(V & 0xFFFFFFFF);
2493
2494 if (BitWidth > 32) {
2495 LiteralNum.push_back(V >> 32);
2496 }
2497
2498 Opcode = spv::OpConstant;
David Neto22f144c2017-06-12 14:26:21 -04002499
David Neto257c3892018-04-11 13:19:45 -04002500 Ops << MkInteger(LiteralNum);
David Neto22f144c2017-06-12 14:26:21 -04002501 }
2502 } else if (const ConstantFP *CFP = dyn_cast<ConstantFP>(Cst)) {
2503 uint64_t FPVal = CFP->getValueAPF().bitcastToAPInt().getZExtValue();
2504 Type *CFPTy = CFP->getType();
2505 if (CFPTy->isFloatTy()) {
2506 LiteralNum.push_back(FPVal & 0xFFFFFFFF);
Kévin Petit02ee34e2019-04-04 19:03:22 +01002507 } else if (CFPTy->isDoubleTy()) {
2508 LiteralNum.push_back(FPVal & 0xFFFFFFFF);
2509 LiteralNum.push_back(FPVal >> 32);
alan-baker089bf932020-01-07 16:35:45 -05002510 } else if (CFPTy->isHalfTy()) {
2511 LiteralNum.push_back(FPVal & 0xFFFF);
David Neto22f144c2017-06-12 14:26:21 -04002512 } else {
2513 CFPTy->print(errs());
2514 llvm_unreachable("Implement this ConstantFP Type");
2515 }
2516
2517 Opcode = spv::OpConstant;
David Neto22f144c2017-06-12 14:26:21 -04002518
David Neto257c3892018-04-11 13:19:45 -04002519 Ops << MkFloat(LiteralNum);
David Neto22f144c2017-06-12 14:26:21 -04002520 } else if (isa<ConstantDataSequential>(Cst) &&
2521 cast<ConstantDataSequential>(Cst)->isString()) {
2522 Cst->print(errs());
2523 llvm_unreachable("Implement this Constant");
2524
2525 } else if (const ConstantDataSequential *CDS =
2526 dyn_cast<ConstantDataSequential>(Cst)) {
David Neto49351ac2017-08-26 17:32:20 -04002527 // Let's convert <4 x i8> constant to int constant specially.
2528 // This case occurs when all the values are specified as constant
2529 // ints.
2530 Type *CstTy = Cst->getType();
2531 if (is4xi8vec(CstTy)) {
2532 LLVMContext &Context = CstTy->getContext();
2533
2534 //
2535 // Generate OpConstant with OpTypeInt 32 0.
2536 //
Neil Henning39672102017-09-29 14:33:13 +01002537 uint32_t IntValue = 0;
2538 for (unsigned k = 0; k < 4; k++) {
2539 const uint64_t Val = CDS->getElementAsInteger(k);
David Neto49351ac2017-08-26 17:32:20 -04002540 IntValue = (IntValue << 8) | (Val & 0xffu);
2541 }
2542
2543 Type *i32 = Type::getInt32Ty(Context);
2544 Constant *CstInt = ConstantInt::get(i32, IntValue);
2545 // If this constant is already registered on VMap, use it.
2546 if (VMap.count(CstInt)) {
2547 uint32_t CstID = VMap[CstInt];
2548 VMap[Cst] = CstID;
2549 continue;
2550 }
2551
David Neto257c3892018-04-11 13:19:45 -04002552 Ops << MkNum(IntValue);
David Neto49351ac2017-08-26 17:32:20 -04002553
David Neto87846742018-04-11 17:36:22 -04002554 auto *CstInst = new SPIRVInstruction(spv::OpConstant, nextID++, Ops);
David Neto49351ac2017-08-26 17:32:20 -04002555 SPIRVInstList.push_back(CstInst);
2556
2557 continue;
2558 }
2559
2560 // A normal constant-data-sequential case.
David Neto22f144c2017-06-12 14:26:21 -04002561 for (unsigned k = 0; k < CDS->getNumElements(); k++) {
2562 Constant *EleCst = CDS->getElementAsConstant(k);
2563 uint32_t EleCstID = VMap[EleCst];
David Neto257c3892018-04-11 13:19:45 -04002564 Ops << MkId(EleCstID);
David Neto22f144c2017-06-12 14:26:21 -04002565 }
2566
2567 Opcode = spv::OpConstantComposite;
David Neto22f144c2017-06-12 14:26:21 -04002568 } else if (const ConstantAggregate *CA = dyn_cast<ConstantAggregate>(Cst)) {
2569 // Let's convert <4 x i8> constant to int constant specially.
David Neto49351ac2017-08-26 17:32:20 -04002570 // This case occurs when at least one of the values is an undef.
David Neto22f144c2017-06-12 14:26:21 -04002571 Type *CstTy = Cst->getType();
2572 if (is4xi8vec(CstTy)) {
2573 LLVMContext &Context = CstTy->getContext();
2574
2575 //
2576 // Generate OpConstant with OpTypeInt 32 0.
2577 //
Neil Henning39672102017-09-29 14:33:13 +01002578 uint32_t IntValue = 0;
David Neto22f144c2017-06-12 14:26:21 -04002579 for (User::const_op_iterator I = Cst->op_begin(), E = Cst->op_end();
2580 I != E; ++I) {
2581 uint64_t Val = 0;
alan-bakerb6b09dc2018-11-08 16:59:28 -05002582 const Value *CV = *I;
Neil Henning39672102017-09-29 14:33:13 +01002583 if (auto *CI2 = dyn_cast<ConstantInt>(CV)) {
2584 Val = CI2->getZExtValue();
David Neto22f144c2017-06-12 14:26:21 -04002585 }
David Neto49351ac2017-08-26 17:32:20 -04002586 IntValue = (IntValue << 8) | (Val & 0xffu);
David Neto22f144c2017-06-12 14:26:21 -04002587 }
2588
David Neto49351ac2017-08-26 17:32:20 -04002589 Type *i32 = Type::getInt32Ty(Context);
2590 Constant *CstInt = ConstantInt::get(i32, IntValue);
David Neto22f144c2017-06-12 14:26:21 -04002591 // If this constant is already registered on VMap, use it.
2592 if (VMap.count(CstInt)) {
2593 uint32_t CstID = VMap[CstInt];
2594 VMap[Cst] = CstID;
David Neto19a1bad2017-08-25 15:01:41 -04002595 continue;
David Neto22f144c2017-06-12 14:26:21 -04002596 }
2597
David Neto257c3892018-04-11 13:19:45 -04002598 Ops << MkNum(IntValue);
David Neto22f144c2017-06-12 14:26:21 -04002599
David Neto87846742018-04-11 17:36:22 -04002600 auto *CstInst = new SPIRVInstruction(spv::OpConstant, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002601 SPIRVInstList.push_back(CstInst);
2602
David Neto19a1bad2017-08-25 15:01:41 -04002603 continue;
David Neto22f144c2017-06-12 14:26:21 -04002604 }
2605
2606 // We use a constant composite in SPIR-V for our constant aggregate in
2607 // LLVM.
2608 Opcode = spv::OpConstantComposite;
David Neto22f144c2017-06-12 14:26:21 -04002609
2610 for (unsigned k = 0; k < CA->getNumOperands(); k++) {
2611 // Look up the ID of the element of this aggregate (which we will
2612 // previously have created a constant for).
2613 uint32_t ElementConstantID = VMap[CA->getAggregateElement(k)];
2614
2615 // And add an operand to the composite we are constructing
David Neto257c3892018-04-11 13:19:45 -04002616 Ops << MkId(ElementConstantID);
David Neto22f144c2017-06-12 14:26:21 -04002617 }
2618 } else if (Cst->isNullValue()) {
2619 Opcode = spv::OpConstantNull;
David Neto22f144c2017-06-12 14:26:21 -04002620 } else {
2621 Cst->print(errs());
2622 llvm_unreachable("Unsupported Constant???");
2623 }
2624
alan-baker5b86ed72019-02-15 08:26:50 -05002625 if (Opcode == spv::OpConstantNull && Cst->getType()->isPointerTy()) {
2626 // Null pointer requires variable pointers.
2627 setVariablePointersCapabilities(Cst->getType()->getPointerAddressSpace());
2628 }
2629
David Neto87846742018-04-11 17:36:22 -04002630 auto *CstInst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002631 SPIRVInstList.push_back(CstInst);
2632 }
2633}
2634
2635void SPIRVProducerPass::GenerateSamplers(Module &M) {
SJW69939d52020-04-16 07:29:07 -05002636 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList(kGlobalVariables);
David Neto22f144c2017-06-12 14:26:21 -04002637
alan-bakerb6b09dc2018-11-08 16:59:28 -05002638 auto &sampler_map = getSamplerMap();
alan-baker09cb9802019-12-10 13:16:27 -05002639 SamplerLiteralToIDMap.clear();
David Neto862b7d82018-06-14 18:48:37 -04002640 DenseMap<unsigned, unsigned> SamplerLiteralToDescriptorSetMap;
2641 DenseMap<unsigned, unsigned> SamplerLiteralToBindingMap;
David Neto22f144c2017-06-12 14:26:21 -04002642
David Neto862b7d82018-06-14 18:48:37 -04002643 // We might have samplers in the sampler map that are not used
2644 // in the translation unit. We need to allocate variables
2645 // for them and bindings too.
2646 DenseSet<unsigned> used_bindings;
David Neto22f144c2017-06-12 14:26:21 -04002647
Kévin Petitdf71de32019-04-09 14:09:50 +01002648 auto *var_fn = M.getFunction(clspv::LiteralSamplerFunction());
alan-baker09cb9802019-12-10 13:16:27 -05002649 // Return if there are no literal samplers.
alan-bakerb6b09dc2018-11-08 16:59:28 -05002650 if (!var_fn)
2651 return;
alan-baker09cb9802019-12-10 13:16:27 -05002652
David Neto862b7d82018-06-14 18:48:37 -04002653 for (auto user : var_fn->users()) {
2654 // Populate SamplerLiteralToDescriptorSetMap and
2655 // SamplerLiteralToBindingMap.
2656 //
2657 // Look for calls like
2658 // call %opencl.sampler_t addrspace(2)*
2659 // @clspv.sampler.var.literal(
2660 // i32 descriptor,
2661 // i32 binding,
alan-baker09cb9802019-12-10 13:16:27 -05002662 // i32 (index-into-sampler-map|sampler_mask))
alan-bakerb6b09dc2018-11-08 16:59:28 -05002663 if (auto *call = dyn_cast<CallInst>(user)) {
alan-baker09cb9802019-12-10 13:16:27 -05002664 const auto third_param = static_cast<unsigned>(
alan-bakerb6b09dc2018-11-08 16:59:28 -05002665 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
alan-baker09cb9802019-12-10 13:16:27 -05002666 auto sampler_value = third_param;
2667 if (clspv::Option::UseSamplerMap()) {
2668 if (third_param >= sampler_map.size()) {
2669 errs() << "Out of bounds index to sampler map: " << third_param;
2670 llvm_unreachable("bad sampler init: out of bounds");
2671 }
2672 sampler_value = sampler_map[third_param].first;
David Neto862b7d82018-06-14 18:48:37 -04002673 }
2674
David Neto862b7d82018-06-14 18:48:37 -04002675 const auto descriptor_set = static_cast<unsigned>(
2676 dyn_cast<ConstantInt>(call->getArgOperand(0))->getZExtValue());
2677 const auto binding = static_cast<unsigned>(
2678 dyn_cast<ConstantInt>(call->getArgOperand(1))->getZExtValue());
2679
2680 SamplerLiteralToDescriptorSetMap[sampler_value] = descriptor_set;
2681 SamplerLiteralToBindingMap[sampler_value] = binding;
2682 used_bindings.insert(binding);
2683 }
2684 }
2685
alan-baker09cb9802019-12-10 13:16:27 -05002686 DenseSet<size_t> seen;
2687 for (auto user : var_fn->users()) {
2688 if (!isa<CallInst>(user))
2689 continue;
2690
2691 auto call = cast<CallInst>(user);
2692 const unsigned third_param = static_cast<unsigned>(
2693 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
2694
2695 // Already allocated a variable for this value.
2696 if (!seen.insert(third_param).second)
2697 continue;
2698
2699 auto sampler_value = third_param;
2700 if (clspv::Option::UseSamplerMap()) {
2701 sampler_value = sampler_map[third_param].first;
2702 }
2703
David Neto22f144c2017-06-12 14:26:21 -04002704 // Generate OpVariable.
2705 //
2706 // GIDOps[0] : Result Type ID
2707 // GIDOps[1] : Storage Class
2708 SPIRVOperandList Ops;
2709
David Neto257c3892018-04-11 13:19:45 -04002710 Ops << MkId(lookupType(SamplerTy))
2711 << MkNum(spv::StorageClassUniformConstant);
David Neto22f144c2017-06-12 14:26:21 -04002712
David Neto862b7d82018-06-14 18:48:37 -04002713 auto sampler_var_id = nextID++;
2714 auto *Inst = new SPIRVInstruction(spv::OpVariable, sampler_var_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002715 SPIRVInstList.push_back(Inst);
2716
alan-baker09cb9802019-12-10 13:16:27 -05002717 SamplerLiteralToIDMap[sampler_value] = sampler_var_id;
David Neto22f144c2017-06-12 14:26:21 -04002718
David Neto862b7d82018-06-14 18:48:37 -04002719 unsigned descriptor_set;
2720 unsigned binding;
alan-baker09cb9802019-12-10 13:16:27 -05002721 if (SamplerLiteralToBindingMap.find(sampler_value) ==
alan-bakerb6b09dc2018-11-08 16:59:28 -05002722 SamplerLiteralToBindingMap.end()) {
David Neto862b7d82018-06-14 18:48:37 -04002723 // This sampler is not actually used. Find the next one.
2724 for (binding = 0; used_bindings.count(binding); binding++)
2725 ;
2726 descriptor_set = 0; // Literal samplers always use descriptor set 0.
2727 used_bindings.insert(binding);
2728 } else {
alan-baker09cb9802019-12-10 13:16:27 -05002729 descriptor_set = SamplerLiteralToDescriptorSetMap[sampler_value];
2730 binding = SamplerLiteralToBindingMap[sampler_value];
alan-bakercff80152019-06-15 00:38:00 -04002731
alan-baker09cb9802019-12-10 13:16:27 -05002732 version0::DescriptorMapEntry::SamplerData sampler_data = {sampler_value};
alan-bakercff80152019-06-15 00:38:00 -04002733 descriptorMapEntries->emplace_back(std::move(sampler_data),
2734 descriptor_set, binding);
David Neto862b7d82018-06-14 18:48:37 -04002735 }
2736
SJW69939d52020-04-16 07:29:07 -05002737 // Ops[0] = Target ID
2738 // Ops[1] = Decoration (DescriptorSet)
2739 // Ops[2] = LiteralNumber according to Decoration
2740 Ops.clear();
2741
David Neto862b7d82018-06-14 18:48:37 -04002742 Ops << MkId(sampler_var_id) << MkNum(spv::DecorationDescriptorSet)
2743 << MkNum(descriptor_set);
David Neto22f144c2017-06-12 14:26:21 -04002744
David Neto87846742018-04-11 17:36:22 -04002745 auto *DescDecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
SJW69939d52020-04-16 07:29:07 -05002746 getSPIRVInstList(kAnnotations).push_back(DescDecoInst);
David Neto22f144c2017-06-12 14:26:21 -04002747
2748 // Ops[0] = Target ID
2749 // Ops[1] = Decoration (Binding)
2750 // Ops[2] = LiteralNumber according to Decoration
2751 Ops.clear();
David Neto862b7d82018-06-14 18:48:37 -04002752 Ops << MkId(sampler_var_id) << MkNum(spv::DecorationBinding)
2753 << MkNum(binding);
David Neto22f144c2017-06-12 14:26:21 -04002754
David Neto87846742018-04-11 17:36:22 -04002755 auto *BindDecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
SJW69939d52020-04-16 07:29:07 -05002756 getSPIRVInstList(kAnnotations).push_back(BindDecoInst);
David Neto22f144c2017-06-12 14:26:21 -04002757 }
David Neto862b7d82018-06-14 18:48:37 -04002758}
David Neto22f144c2017-06-12 14:26:21 -04002759
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01002760void SPIRVProducerPass::GenerateResourceVars(Module &) {
SJW69939d52020-04-16 07:29:07 -05002761 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList(kGlobalVariables);
David Neto862b7d82018-06-14 18:48:37 -04002762 ValueMapType &VMap = getValueMap();
David Neto22f144c2017-06-12 14:26:21 -04002763
David Neto862b7d82018-06-14 18:48:37 -04002764 // Generate variables. Make one for each of resource var info object.
2765 for (auto *info : ModuleOrderedResourceVars) {
2766 Type *type = info->var_fn->getReturnType();
2767 // Remap the address space for opaque types.
2768 switch (info->arg_kind) {
2769 case clspv::ArgKind::Sampler:
2770 case clspv::ArgKind::ReadOnlyImage:
2771 case clspv::ArgKind::WriteOnlyImage:
2772 type = PointerType::get(type->getPointerElementType(),
2773 clspv::AddressSpace::UniformConstant);
2774 break;
2775 default:
2776 break;
2777 }
David Neto22f144c2017-06-12 14:26:21 -04002778
David Neto862b7d82018-06-14 18:48:37 -04002779 info->var_id = nextID++;
David Neto22f144c2017-06-12 14:26:21 -04002780
David Neto862b7d82018-06-14 18:48:37 -04002781 const auto type_id = lookupType(type);
2782 const auto sc = GetStorageClassForArgKind(info->arg_kind);
2783 SPIRVOperandList Ops;
2784 Ops << MkId(type_id) << MkNum(sc);
David Neto22f144c2017-06-12 14:26:21 -04002785
David Neto862b7d82018-06-14 18:48:37 -04002786 auto *Inst = new SPIRVInstruction(spv::OpVariable, info->var_id, Ops);
2787 SPIRVInstList.push_back(Inst);
2788
2789 // Map calls to the variable-builtin-function.
2790 for (auto &U : info->var_fn->uses()) {
2791 if (auto *call = dyn_cast<CallInst>(U.getUser())) {
2792 const auto set = unsigned(
2793 dyn_cast<ConstantInt>(call->getOperand(0))->getZExtValue());
2794 const auto binding = unsigned(
2795 dyn_cast<ConstantInt>(call->getOperand(1))->getZExtValue());
2796 if (set == info->descriptor_set && binding == info->binding) {
2797 switch (info->arg_kind) {
2798 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04002799 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04002800 case clspv::ArgKind::Pod:
alan-baker9b0ec3c2020-04-06 14:45:34 -04002801 case clspv::ArgKind::PodUBO:
2802 case clspv::ArgKind::PodPushConstant:
David Neto862b7d82018-06-14 18:48:37 -04002803 // The call maps to the variable directly.
2804 VMap[call] = info->var_id;
2805 break;
2806 case clspv::ArgKind::Sampler:
2807 case clspv::ArgKind::ReadOnlyImage:
2808 case clspv::ArgKind::WriteOnlyImage:
2809 // The call maps to a load we generate later.
2810 ResourceVarDeferredLoadCalls[call] = info->var_id;
2811 break;
2812 default:
2813 llvm_unreachable("Unhandled arg kind");
2814 }
2815 }
David Neto22f144c2017-06-12 14:26:21 -04002816 }
David Neto862b7d82018-06-14 18:48:37 -04002817 }
2818 }
David Neto22f144c2017-06-12 14:26:21 -04002819
David Neto862b7d82018-06-14 18:48:37 -04002820 // Generate associated decorations.
SJW69939d52020-04-16 07:29:07 -05002821 SPIRVInstructionList &Annotations = getSPIRVInstList(kAnnotations);
David Neto862b7d82018-06-14 18:48:37 -04002822
2823 SPIRVOperandList Ops;
2824 for (auto *info : ModuleOrderedResourceVars) {
alan-baker9b0ec3c2020-04-06 14:45:34 -04002825 // Push constants don't need descriptor set or binding decorations.
2826 if (info->arg_kind == clspv::ArgKind::PodPushConstant)
2827 continue;
2828
David Neto862b7d82018-06-14 18:48:37 -04002829 // Decorate with DescriptorSet and Binding.
2830 Ops.clear();
2831 Ops << MkId(info->var_id) << MkNum(spv::DecorationDescriptorSet)
2832 << MkNum(info->descriptor_set);
SJW69939d52020-04-16 07:29:07 -05002833 Annotations.push_back(new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto862b7d82018-06-14 18:48:37 -04002834
2835 Ops.clear();
2836 Ops << MkId(info->var_id) << MkNum(spv::DecorationBinding)
2837 << MkNum(info->binding);
SJW69939d52020-04-16 07:29:07 -05002838 Annotations.push_back(new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto862b7d82018-06-14 18:48:37 -04002839
alan-bakere9308012019-03-15 10:25:13 -04002840 if (info->coherent) {
2841 // Decorate with Coherent if required for the variable.
2842 Ops.clear();
2843 Ops << MkId(info->var_id) << MkNum(spv::DecorationCoherent);
SJW69939d52020-04-16 07:29:07 -05002844 Annotations.push_back(new SPIRVInstruction(spv::OpDecorate, Ops));
alan-bakere9308012019-03-15 10:25:13 -04002845 }
2846
David Neto862b7d82018-06-14 18:48:37 -04002847 // Generate NonWritable and NonReadable
2848 switch (info->arg_kind) {
2849 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04002850 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04002851 if (info->var_fn->getReturnType()->getPointerAddressSpace() ==
2852 clspv::AddressSpace::Constant) {
2853 Ops.clear();
2854 Ops << MkId(info->var_id) << MkNum(spv::DecorationNonWritable);
SJW69939d52020-04-16 07:29:07 -05002855 Annotations.push_back(new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002856 }
David Neto862b7d82018-06-14 18:48:37 -04002857 break;
David Neto862b7d82018-06-14 18:48:37 -04002858 case clspv::ArgKind::WriteOnlyImage:
2859 Ops.clear();
2860 Ops << MkId(info->var_id) << MkNum(spv::DecorationNonReadable);
SJW69939d52020-04-16 07:29:07 -05002861 Annotations.push_back(new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto862b7d82018-06-14 18:48:37 -04002862 break;
2863 default:
2864 break;
David Neto22f144c2017-06-12 14:26:21 -04002865 }
2866 }
2867}
2868
Kévin Petitbbbda972020-03-03 19:16:31 +00002869namespace {
2870
2871bool isScalarType(Type *type) {
2872 return type->isIntegerTy() || type->isFloatTy();
2873}
2874
2875uint64_t structAlignment(StructType *type,
2876 std::function<uint64_t(Type *)> alignFn) {
2877 uint64_t maxAlign = 1;
2878 for (unsigned i = 0; i < type->getStructNumElements(); i++) {
2879 uint64_t align = alignFn(type->getStructElementType(i));
2880 maxAlign = std::max(align, maxAlign);
2881 }
2882 return maxAlign;
2883}
2884
2885uint64_t scalarAlignment(Type *type) {
2886 // A scalar of size N has a scalar alignment of N.
2887 if (isScalarType(type)) {
2888 return type->getScalarSizeInBits() / 8;
2889 }
2890
2891 // A vector or matrix type has a scalar alignment equal to that of its
2892 // component type.
2893 if (type->isVectorTy()) {
2894 return scalarAlignment(type->getVectorElementType());
2895 }
2896
2897 // An array type has a scalar alignment equal to that of its element type.
2898 if (type->isArrayTy()) {
2899 return scalarAlignment(type->getArrayElementType());
2900 }
2901
2902 // A structure has a scalar alignment equal to the largest scalar alignment of
2903 // any of its members.
2904 if (type->isStructTy()) {
2905 return structAlignment(cast<StructType>(type), scalarAlignment);
2906 }
2907
2908 llvm_unreachable("Unsupported type");
2909}
2910
2911uint64_t baseAlignment(Type *type) {
2912 // A scalar has a base alignment equal to its scalar alignment.
2913 if (isScalarType(type)) {
2914 return scalarAlignment(type);
2915 }
2916
2917 if (type->isVectorTy()) {
2918 unsigned numElems = type->getVectorNumElements();
2919
2920 // A two-component vector has a base alignment equal to twice its scalar
2921 // alignment.
2922 if (numElems == 2) {
2923 return 2 * scalarAlignment(type);
2924 }
2925 // A three- or four-component vector has a base alignment equal to four
2926 // times its scalar alignment.
2927 if ((numElems == 3) || (numElems == 4)) {
2928 return 4 * scalarAlignment(type);
2929 }
2930 }
2931
2932 // An array has a base alignment equal to the base alignment of its element
2933 // type.
2934 if (type->isArrayTy()) {
2935 return baseAlignment(type->getArrayElementType());
2936 }
2937
2938 // A structure has a base alignment equal to the largest base alignment of any
2939 // of its members.
2940 if (type->isStructTy()) {
2941 return structAlignment(cast<StructType>(type), baseAlignment);
2942 }
2943
2944 // TODO A row-major matrix of C columns has a base alignment equal to the base
2945 // alignment of a vector of C matrix components.
2946 // TODO A column-major matrix has a base alignment equal to the base alignment
2947 // of the matrix column type.
2948
2949 llvm_unreachable("Unsupported type");
2950}
2951
2952uint64_t extendedAlignment(Type *type) {
2953 // A scalar, vector or matrix type has an extended alignment equal to its base
2954 // alignment.
2955 // TODO matrix type
2956 if (isScalarType(type) || type->isVectorTy()) {
2957 return baseAlignment(type);
2958 }
2959
2960 // An array or structure type has an extended alignment equal to the largest
2961 // extended alignment of any of its members, rounded up to a multiple of 16
2962 if (type->isStructTy()) {
2963 auto salign = structAlignment(cast<StructType>(type), extendedAlignment);
2964 return alignTo(salign, 16);
2965 }
2966
2967 if (type->isArrayTy()) {
2968 auto salign = extendedAlignment(type->getArrayElementType());
2969 return alignTo(salign, 16);
2970 }
2971
2972 llvm_unreachable("Unsupported type");
2973}
2974
2975uint64_t standardAlignment(Type *type, spv::StorageClass sclass) {
2976 // If the scalarBlockLayout feature is enabled on the device then every member
2977 // must be aligned according to its scalar alignment
2978 if (clspv::Option::ScalarBlockLayout()) {
2979 return scalarAlignment(type);
2980 }
2981
2982 // All vectors must be aligned according to their scalar alignment
2983 if (type->isVectorTy()) {
2984 return scalarAlignment(type);
2985 }
2986
2987 // If the uniformBufferStandardLayout feature is not enabled on the device,
2988 // then any member of an OpTypeStruct with a storage class of Uniform and a
2989 // decoration of Block must be aligned according to its extended alignment.
2990 if (!clspv::Option::Std430UniformBufferLayout() &&
2991 sclass == spv::StorageClassUniform) {
2992 return extendedAlignment(type);
2993 }
2994
2995 // Every other member must be aligned according to its base alignment
2996 return baseAlignment(type);
2997}
2998
2999bool improperlyStraddles(const DataLayout &DL, Type *type, unsigned offset) {
3000 assert(type->isVectorTy());
3001
3002 auto size = DL.getTypeStoreSize(type);
3003
3004 // It is a vector with total size less than or equal to 16 bytes, and has
3005 // Offset decorations placing its first byte at F and its last byte at L,
3006 // where floor(F / 16) != floor(L / 16).
3007 if ((size <= 16) && (offset % 16 + size > 16)) {
3008 return true;
3009 }
3010
3011 // It is a vector with total size greater than 16 bytes and has its Offset
3012 // decorations placing its first byte at a non-integer multiple of 16
3013 if ((size > 16) && (offset % 16 != 0)) {
3014 return true;
3015 }
3016
3017 return false;
3018}
3019
3020// See 14.5 Shader Resource Interface in Vulkan spec
3021bool isValidExplicitLayout(Module &M, StructType *STy, unsigned Member,
3022 spv::StorageClass SClass, unsigned Offset,
3023 unsigned PreviousMemberOffset) {
3024
3025 auto MemberType = STy->getElementType(Member);
3026 auto Align = standardAlignment(MemberType, SClass);
3027 auto &DL = M.getDataLayout();
3028
3029 // The Offset decoration of any member must be a multiple of its alignment
3030 if (Offset % Align != 0) {
3031 return false;
3032 }
3033
3034 // TODO Any ArrayStride or MatrixStride decoration must be a multiple of the
3035 // alignment of the array or matrix as defined above
3036
3037 if (!clspv::Option::ScalarBlockLayout()) {
3038 // Vectors must not improperly straddle, as defined above
3039 if (MemberType->isVectorTy() &&
3040 improperlyStraddles(DL, MemberType, Offset)) {
3041 return true;
3042 }
3043
3044 // The Offset decoration of a member must not place it between the end
3045 // of a structure or an array and the next multiple of the alignment of that
3046 // structure or array
3047 if (Member > 0) {
3048 auto PType = STy->getElementType(Member - 1);
3049 if (PType->isStructTy() || PType->isArrayTy()) {
3050 auto PAlign = standardAlignment(PType, SClass);
3051 if (Offset - PreviousMemberOffset < PAlign) {
3052 return false;
3053 }
3054 }
3055 }
3056 }
3057
3058 return true;
3059}
3060
3061} // namespace
3062
3063void SPIRVProducerPass::GeneratePushConstantDescriptormapEntries(Module &M) {
3064
3065 if (auto GV = M.getGlobalVariable(clspv::PushConstantsVariableName())) {
3066 auto const &DL = M.getDataLayout();
3067 auto MD = GV->getMetadata(clspv::PushConstantsMetadataName());
3068 auto STy = cast<StructType>(GV->getValueType());
3069
3070 for (unsigned i = 0; i < STy->getNumElements(); i++) {
3071 auto pc = static_cast<clspv::PushConstant>(
3072 mdconst::extract<ConstantInt>(MD->getOperand(i))->getZExtValue());
3073 auto memberType = STy->getElementType(i);
3074 auto offset = GetExplicitLayoutStructMemberOffset(STy, i, DL);
3075 unsigned previousOffset = 0;
3076 if (i > 0) {
3077 previousOffset = GetExplicitLayoutStructMemberOffset(STy, i - 1, DL);
3078 }
3079 auto size = static_cast<uint32_t>(GetTypeSizeInBits(memberType, DL)) / 8;
3080 assert(isValidExplicitLayout(M, STy, i, spv::StorageClassPushConstant,
3081 offset, previousOffset));
3082 version0::DescriptorMapEntry::PushConstantData data = {pc, offset, size};
3083 descriptorMapEntries->emplace_back(std::move(data));
3084 }
3085 }
3086}
3087
David Neto22f144c2017-06-12 14:26:21 -04003088void SPIRVProducerPass::GenerateGlobalVar(GlobalVariable &GV) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05003089 Module &M = *GV.getParent();
SJW69939d52020-04-16 07:29:07 -05003090 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList(kGlobalVariables);
David Neto22f144c2017-06-12 14:26:21 -04003091 ValueMapType &VMap = getValueMap();
3092 std::vector<uint32_t> &BuiltinDimVec = getBuiltinDimVec();
David Neto85082642018-03-24 06:55:20 -07003093 const DataLayout &DL = GV.getParent()->getDataLayout();
David Neto22f144c2017-06-12 14:26:21 -04003094
3095 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
3096 Type *Ty = GV.getType();
3097 PointerType *PTy = cast<PointerType>(Ty);
3098
3099 uint32_t InitializerID = 0;
3100
3101 // Workgroup size is handled differently (it goes into a constant)
3102 if (spv::BuiltInWorkgroupSize == BuiltinType) {
3103 std::vector<bool> HasMDVec;
3104 uint32_t PrevXDimCst = 0xFFFFFFFF;
3105 uint32_t PrevYDimCst = 0xFFFFFFFF;
3106 uint32_t PrevZDimCst = 0xFFFFFFFF;
3107 for (Function &Func : *GV.getParent()) {
3108 if (Func.isDeclaration()) {
3109 continue;
3110 }
3111
3112 // We only need to check kernels.
3113 if (Func.getCallingConv() != CallingConv::SPIR_KERNEL) {
3114 continue;
3115 }
3116
3117 if (const MDNode *MD =
3118 dyn_cast<Function>(&Func)->getMetadata("reqd_work_group_size")) {
3119 uint32_t CurXDimCst = static_cast<uint32_t>(
3120 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
3121 uint32_t CurYDimCst = static_cast<uint32_t>(
3122 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
3123 uint32_t CurZDimCst = static_cast<uint32_t>(
3124 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
3125
3126 if (PrevXDimCst == 0xFFFFFFFF && PrevYDimCst == 0xFFFFFFFF &&
3127 PrevZDimCst == 0xFFFFFFFF) {
3128 PrevXDimCst = CurXDimCst;
3129 PrevYDimCst = CurYDimCst;
3130 PrevZDimCst = CurZDimCst;
3131 } else if (CurXDimCst != PrevXDimCst || CurYDimCst != PrevYDimCst ||
3132 CurZDimCst != PrevZDimCst) {
3133 llvm_unreachable(
3134 "reqd_work_group_size must be the same across all kernels");
3135 } else {
3136 continue;
3137 }
3138
3139 //
3140 // Generate OpConstantComposite.
3141 //
3142 // Ops[0] : Result Type ID
3143 // Ops[1] : Constant size for x dimension.
3144 // Ops[2] : Constant size for y dimension.
3145 // Ops[3] : Constant size for z dimension.
3146 SPIRVOperandList Ops;
3147
3148 uint32_t XDimCstID =
3149 VMap[mdconst::extract<ConstantInt>(MD->getOperand(0))];
3150 uint32_t YDimCstID =
3151 VMap[mdconst::extract<ConstantInt>(MD->getOperand(1))];
3152 uint32_t ZDimCstID =
3153 VMap[mdconst::extract<ConstantInt>(MD->getOperand(2))];
3154
3155 InitializerID = nextID;
3156
David Neto257c3892018-04-11 13:19:45 -04003157 Ops << MkId(lookupType(Ty->getPointerElementType())) << MkId(XDimCstID)
3158 << MkId(YDimCstID) << MkId(ZDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04003159
David Neto87846742018-04-11 17:36:22 -04003160 auto *Inst =
3161 new SPIRVInstruction(spv::OpConstantComposite, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003162 SPIRVInstList.push_back(Inst);
3163
3164 HasMDVec.push_back(true);
3165 } else {
3166 HasMDVec.push_back(false);
3167 }
3168 }
3169
3170 // Check all kernels have same definitions for work_group_size.
3171 bool HasMD = false;
3172 if (!HasMDVec.empty()) {
3173 HasMD = HasMDVec[0];
3174 for (uint32_t i = 1; i < HasMDVec.size(); i++) {
3175 if (HasMD != HasMDVec[i]) {
3176 llvm_unreachable(
3177 "Kernels should have consistent work group size definition");
3178 }
3179 }
3180 }
3181
3182 // If all kernels do not have metadata for reqd_work_group_size, generate
3183 // OpSpecConstants for x/y/z dimension.
3184 if (!HasMD) {
3185 //
3186 // Generate OpSpecConstants for x/y/z dimension.
3187 //
3188 // Ops[0] : Result Type ID
3189 // Ops[1] : Constant size for x/y/z dimension (Literal Number).
3190 uint32_t XDimCstID = 0;
3191 uint32_t YDimCstID = 0;
3192 uint32_t ZDimCstID = 0;
3193
David Neto22f144c2017-06-12 14:26:21 -04003194 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04003195 uint32_t result_type_id =
alan-baker8eb435a2020-04-08 00:42:06 -04003196 lookupType(Ty->getPointerElementType()->getVectorElementType());
David Neto22f144c2017-06-12 14:26:21 -04003197
David Neto257c3892018-04-11 13:19:45 -04003198 // X Dimension
3199 Ops << MkId(result_type_id) << MkNum(1);
3200 XDimCstID = nextID++;
3201 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04003202 new SPIRVInstruction(spv::OpSpecConstant, XDimCstID, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003203
3204 // Y Dimension
3205 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003206 Ops << MkId(result_type_id) << MkNum(1);
3207 YDimCstID = nextID++;
3208 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04003209 new SPIRVInstruction(spv::OpSpecConstant, YDimCstID, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003210
3211 // Z Dimension
3212 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003213 Ops << MkId(result_type_id) << MkNum(1);
3214 ZDimCstID = nextID++;
3215 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04003216 new SPIRVInstruction(spv::OpSpecConstant, ZDimCstID, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003217
David Neto257c3892018-04-11 13:19:45 -04003218 BuiltinDimVec.push_back(XDimCstID);
3219 BuiltinDimVec.push_back(YDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04003220 BuiltinDimVec.push_back(ZDimCstID);
3221
David Neto22f144c2017-06-12 14:26:21 -04003222 //
3223 // Generate OpSpecConstantComposite.
3224 //
3225 // Ops[0] : Result Type ID
3226 // Ops[1] : Constant size for x dimension.
3227 // Ops[2] : Constant size for y dimension.
3228 // Ops[3] : Constant size for z dimension.
3229 InitializerID = nextID;
3230
3231 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003232 Ops << MkId(lookupType(Ty->getPointerElementType())) << MkId(XDimCstID)
3233 << MkId(YDimCstID) << MkId(ZDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04003234
David Neto87846742018-04-11 17:36:22 -04003235 auto *Inst =
3236 new SPIRVInstruction(spv::OpSpecConstantComposite, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003237 SPIRVInstList.push_back(Inst);
3238 }
3239 }
3240
David Neto22f144c2017-06-12 14:26:21 -04003241 VMap[&GV] = nextID;
3242
3243 //
3244 // Generate OpVariable.
3245 //
3246 // GIDOps[0] : Result Type ID
3247 // GIDOps[1] : Storage Class
3248 SPIRVOperandList Ops;
3249
David Neto85082642018-03-24 06:55:20 -07003250 const auto AS = PTy->getAddressSpace();
David Netoc6f3ab22018-04-06 18:02:31 -04003251 Ops << MkId(lookupType(Ty)) << MkNum(GetStorageClass(AS));
David Neto22f144c2017-06-12 14:26:21 -04003252
David Neto85082642018-03-24 06:55:20 -07003253 const bool module_scope_constant_external_init =
David Neto862b7d82018-06-14 18:48:37 -04003254 (AS == AddressSpace::Constant) && GV.hasInitializer() &&
David Neto85082642018-03-24 06:55:20 -07003255 clspv::Option::ModuleConstantsInStorageBuffer();
3256
Kévin Petit23d5f182019-08-13 16:21:29 +01003257 if (GV.hasInitializer()) {
3258 auto GVInit = GV.getInitializer();
3259 if (!isa<UndefValue>(GVInit) && !module_scope_constant_external_init) {
3260 assert(VMap.count(GVInit) == 1);
3261 InitializerID = VMap[GVInit];
David Neto85082642018-03-24 06:55:20 -07003262 }
3263 }
Kévin Petit23d5f182019-08-13 16:21:29 +01003264
3265 if (0 != InitializerID) {
Kévin Petitbbbda972020-03-03 19:16:31 +00003266 // Emit the ID of the initializer as part of the variable definition.
Kévin Petit23d5f182019-08-13 16:21:29 +01003267 Ops << MkId(InitializerID);
3268 }
David Neto85082642018-03-24 06:55:20 -07003269 const uint32_t var_id = nextID++;
3270
David Neto87846742018-04-11 17:36:22 -04003271 auto *Inst = new SPIRVInstruction(spv::OpVariable, var_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003272 SPIRVInstList.push_back(Inst);
3273
SJW69939d52020-04-16 07:29:07 -05003274 SPIRVInstructionList &Annotations = getSPIRVInstList(kAnnotations);
David Neto22f144c2017-06-12 14:26:21 -04003275 // If we have a builtin.
3276 if (spv::BuiltInMax != BuiltinType) {
David Neto22f144c2017-06-12 14:26:21 -04003277 //
3278 // Generate OpDecorate.
3279 //
3280 // DOps[0] = Target ID
3281 // DOps[1] = Decoration (Builtin)
3282 // DOps[2] = BuiltIn ID
3283 uint32_t ResultID;
3284
3285 // WorkgroupSize is different, we decorate the constant composite that has
3286 // its value, rather than the variable that we use to access the value.
3287 if (spv::BuiltInWorkgroupSize == BuiltinType) {
3288 ResultID = InitializerID;
David Netoa60b00b2017-09-15 16:34:09 -04003289 // Save both the value and variable IDs for later.
3290 WorkgroupSizeValueID = InitializerID;
3291 WorkgroupSizeVarID = VMap[&GV];
David Neto22f144c2017-06-12 14:26:21 -04003292 } else {
3293 ResultID = VMap[&GV];
3294 }
3295
3296 SPIRVOperandList DOps;
David Neto257c3892018-04-11 13:19:45 -04003297 DOps << MkId(ResultID) << MkNum(spv::DecorationBuiltIn)
3298 << MkNum(BuiltinType);
David Neto22f144c2017-06-12 14:26:21 -04003299
David Neto87846742018-04-11 17:36:22 -04003300 auto *DescDecoInst = new SPIRVInstruction(spv::OpDecorate, DOps);
SJW69939d52020-04-16 07:29:07 -05003301 Annotations.push_back(DescDecoInst);
David Neto85082642018-03-24 06:55:20 -07003302 } else if (module_scope_constant_external_init) {
3303 // This module scope constant is initialized from a storage buffer with data
3304 // provided by the host at binding 0 of the next descriptor set.
David Neto78383442018-06-15 20:31:56 -04003305 const uint32_t descriptor_set = TakeDescriptorIndex(&M);
David Neto85082642018-03-24 06:55:20 -07003306
David Neto862b7d82018-06-14 18:48:37 -04003307 // Emit the intializer to the descriptor map file.
David Neto85082642018-03-24 06:55:20 -07003308 // Use "kind,buffer" to indicate storage buffer. We might want to expand
3309 // that later to other types, like uniform buffer.
alan-bakerf5e5f692018-11-27 08:33:24 -05003310 std::string hexbytes;
3311 llvm::raw_string_ostream str(hexbytes);
3312 clspv::ConstantEmitter(DL, str).Emit(GV.getInitializer());
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003313 version0::DescriptorMapEntry::ConstantData constant_data = {ArgKind::Buffer,
3314 str.str()};
3315 descriptorMapEntries->emplace_back(std::move(constant_data), descriptor_set,
3316 0);
David Neto85082642018-03-24 06:55:20 -07003317
David Neto85082642018-03-24 06:55:20 -07003318 SPIRVOperandList DOps;
David Neto85082642018-03-24 06:55:20 -07003319
3320 // OpDecorate %var DescriptorSet <descriptor_set>
David Neto257c3892018-04-11 13:19:45 -04003321 DOps << MkId(var_id) << MkNum(spv::DecorationDescriptorSet)
3322 << MkNum(descriptor_set);
SJW69939d52020-04-16 07:29:07 -05003323 Annotations.push_back(new SPIRVInstruction(spv::OpDecorate, DOps));
3324
3325 // OpDecorate %var Binding <binding>
3326 DOps.clear();
3327 DOps << MkId(var_id) << MkNum(spv::DecorationBinding) << MkNum(0);
3328 Annotations.push_back(new SPIRVInstruction(spv::OpDecorate, DOps));
David Neto22f144c2017-06-12 14:26:21 -04003329 }
3330}
3331
David Netoc6f3ab22018-04-06 18:02:31 -04003332void SPIRVProducerPass::GenerateWorkgroupVars() {
SJW69939d52020-04-16 07:29:07 -05003333 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList(kGlobalVariables);
Alan Baker202c8c72018-08-13 13:47:44 -04003334 for (auto spec_id = clspv::FirstLocalSpecId(); spec_id < max_local_spec_id_;
3335 ++spec_id) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05003336 LocalArgInfo &info = LocalSpecIdInfoMap[spec_id];
David Netoc6f3ab22018-04-06 18:02:31 -04003337
3338 // Generate OpVariable.
3339 //
3340 // GIDOps[0] : Result Type ID
3341 // GIDOps[1] : Storage Class
3342 SPIRVOperandList Ops;
3343 Ops << MkId(info.ptr_array_type_id) << MkNum(spv::StorageClassWorkgroup);
3344
3345 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04003346 new SPIRVInstruction(spv::OpVariable, info.variable_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04003347 }
3348}
3349
David Neto862b7d82018-06-14 18:48:37 -04003350void SPIRVProducerPass::GenerateDescriptorMapInfo(const DataLayout &DL,
3351 Function &F) {
David Netoc5fb5242018-07-30 13:28:31 -04003352 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
3353 return;
3354 }
Kévin Petit717f8572020-04-06 17:31:53 +01003355 // Add entries for each kernel
3356 version0::DescriptorMapEntry::KernelDeclData kernel_decl_data = {
3357 F.getName().str()};
3358 descriptorMapEntries->emplace_back(std::move(kernel_decl_data));
3359
David Neto862b7d82018-06-14 18:48:37 -04003360 // Gather the list of resources that are used by this function's arguments.
3361 auto &resource_var_at_index = FunctionToResourceVarsMap[&F];
3362
alan-bakerf5e5f692018-11-27 08:33:24 -05003363 // TODO(alan-baker): This should become unnecessary by fixing the rest of the
3364 // flow to generate pod_ubo arguments earlier.
David Neto862b7d82018-06-14 18:48:37 -04003365 auto remap_arg_kind = [](StringRef argKind) {
alan-bakerf5e5f692018-11-27 08:33:24 -05003366 std::string kind =
3367 clspv::Option::PodArgsInUniformBuffer() && argKind.equals("pod")
3368 ? "pod_ubo"
alan-baker21574d32020-01-29 16:00:31 -05003369 : argKind.str();
alan-bakerf5e5f692018-11-27 08:33:24 -05003370 return GetArgKindFromName(kind);
David Neto862b7d82018-06-14 18:48:37 -04003371 };
3372
3373 auto *fty = F.getType()->getPointerElementType();
3374 auto *func_ty = dyn_cast<FunctionType>(fty);
3375
alan-baker038e9242019-04-19 22:14:41 -04003376 // If we've clustered POD arguments, then argument details are in metadata.
David Neto862b7d82018-06-14 18:48:37 -04003377 // If an argument maps to a resource variable, then get descriptor set and
3378 // binding from the resoure variable. Other info comes from the metadata.
3379 const auto *arg_map = F.getMetadata("kernel_arg_map");
3380 if (arg_map) {
3381 for (const auto &arg : arg_map->operands()) {
3382 const MDNode *arg_node = dyn_cast<MDNode>(arg.get());
Kévin PETITa353c832018-03-20 23:21:21 +00003383 assert(arg_node->getNumOperands() == 7);
David Neto862b7d82018-06-14 18:48:37 -04003384 const auto name =
3385 dyn_cast<MDString>(arg_node->getOperand(0))->getString();
3386 const auto old_index =
3387 dyn_extract<ConstantInt>(arg_node->getOperand(1))->getZExtValue();
3388 // Remapped argument index
alan-bakerb6b09dc2018-11-08 16:59:28 -05003389 const size_t new_index = static_cast<size_t>(
3390 dyn_extract<ConstantInt>(arg_node->getOperand(2))->getZExtValue());
David Neto862b7d82018-06-14 18:48:37 -04003391 const auto offset =
3392 dyn_extract<ConstantInt>(arg_node->getOperand(3))->getZExtValue();
Kévin PETITa353c832018-03-20 23:21:21 +00003393 const auto arg_size =
3394 dyn_extract<ConstantInt>(arg_node->getOperand(4))->getZExtValue();
David Neto862b7d82018-06-14 18:48:37 -04003395 const auto argKind = remap_arg_kind(
Kévin PETITa353c832018-03-20 23:21:21 +00003396 dyn_cast<MDString>(arg_node->getOperand(5))->getString());
David Neto862b7d82018-06-14 18:48:37 -04003397 const auto spec_id =
Kévin PETITa353c832018-03-20 23:21:21 +00003398 dyn_extract<ConstantInt>(arg_node->getOperand(6))->getSExtValue();
alan-bakerf5e5f692018-11-27 08:33:24 -05003399
3400 uint32_t descriptor_set = 0;
3401 uint32_t binding = 0;
3402 version0::DescriptorMapEntry::KernelArgData kernel_data = {
alan-baker21574d32020-01-29 16:00:31 -05003403 F.getName().str(), name.str(), static_cast<uint32_t>(old_index),
3404 argKind, static_cast<uint32_t>(spec_id),
alan-bakerf5e5f692018-11-27 08:33:24 -05003405 // This will be set below for pointer-to-local args.
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003406 0, static_cast<uint32_t>(offset), static_cast<uint32_t>(arg_size)};
David Neto862b7d82018-06-14 18:48:37 -04003407 if (spec_id > 0) {
alan-bakerf5e5f692018-11-27 08:33:24 -05003408 kernel_data.local_element_size = static_cast<uint32_t>(GetTypeAllocSize(
3409 func_ty->getParamType(unsigned(new_index))->getPointerElementType(),
3410 DL));
David Neto862b7d82018-06-14 18:48:37 -04003411 } else {
3412 auto *info = resource_var_at_index[new_index];
3413 assert(info);
alan-bakerf5e5f692018-11-27 08:33:24 -05003414 descriptor_set = info->descriptor_set;
3415 binding = info->binding;
David Neto862b7d82018-06-14 18:48:37 -04003416 }
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003417 descriptorMapEntries->emplace_back(std::move(kernel_data), descriptor_set,
3418 binding);
David Neto862b7d82018-06-14 18:48:37 -04003419 }
3420 } else {
3421 // There is no argument map.
3422 // Take descriptor info from the resource variable calls.
Kévin PETITa353c832018-03-20 23:21:21 +00003423 // Take argument name and size from the arguments list.
David Neto862b7d82018-06-14 18:48:37 -04003424
3425 SmallVector<Argument *, 4> arguments;
3426 for (auto &arg : F.args()) {
3427 arguments.push_back(&arg);
3428 }
3429
3430 unsigned arg_index = 0;
3431 for (auto *info : resource_var_at_index) {
3432 if (info) {
Kévin PETITa353c832018-03-20 23:21:21 +00003433 auto arg = arguments[arg_index];
alan-bakerb6b09dc2018-11-08 16:59:28 -05003434 unsigned arg_size = 0;
alan-baker9b0ec3c2020-04-06 14:45:34 -04003435 if (info->arg_kind == clspv::ArgKind::Pod ||
3436 info->arg_kind == clspv::ArgKind::PodUBO ||
3437 info->arg_kind == clspv::ArgKind::PodPushConstant) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05003438 arg_size = static_cast<uint32_t>(DL.getTypeStoreSize(arg->getType()));
Kévin PETITa353c832018-03-20 23:21:21 +00003439 }
3440
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003441 // Local pointer arguments are unused in this case. Offset is always
3442 // zero.
alan-bakerf5e5f692018-11-27 08:33:24 -05003443 version0::DescriptorMapEntry::KernelArgData kernel_data = {
alan-baker21574d32020-01-29 16:00:31 -05003444 F.getName().str(),
3445 arg->getName().str(),
3446 arg_index,
3447 remap_arg_kind(clspv::GetArgKindName(info->arg_kind)),
3448 0,
3449 0,
3450 0,
3451 arg_size};
alan-bakerf5e5f692018-11-27 08:33:24 -05003452 descriptorMapEntries->emplace_back(std::move(kernel_data),
3453 info->descriptor_set, info->binding);
David Neto862b7d82018-06-14 18:48:37 -04003454 }
3455 arg_index++;
3456 }
3457 // Generate mappings for pointer-to-local arguments.
3458 for (arg_index = 0; arg_index < arguments.size(); ++arg_index) {
3459 Argument *arg = arguments[arg_index];
Alan Baker202c8c72018-08-13 13:47:44 -04003460 auto where = LocalArgSpecIds.find(arg);
3461 if (where != LocalArgSpecIds.end()) {
3462 auto &local_arg_info = LocalSpecIdInfoMap[where->second];
alan-bakerf5e5f692018-11-27 08:33:24 -05003463 // Pod arguments members are unused in this case.
3464 version0::DescriptorMapEntry::KernelArgData kernel_data = {
alan-baker21574d32020-01-29 16:00:31 -05003465 F.getName().str(),
3466 arg->getName().str(),
alan-bakerf5e5f692018-11-27 08:33:24 -05003467 arg_index,
3468 ArgKind::Local,
3469 static_cast<uint32_t>(local_arg_info.spec_id),
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003470 static_cast<uint32_t>(
3471 GetTypeAllocSize(local_arg_info.elem_type, DL)),
alan-bakerf5e5f692018-11-27 08:33:24 -05003472 0,
3473 0};
3474 // Pointer-to-local arguments do not utilize descriptor set and binding.
3475 descriptorMapEntries->emplace_back(std::move(kernel_data), 0, 0);
David Neto862b7d82018-06-14 18:48:37 -04003476 }
3477 }
3478 }
3479}
3480
David Neto22f144c2017-06-12 14:26:21 -04003481void SPIRVProducerPass::GenerateFuncPrologue(Function &F) {
SJW69939d52020-04-16 07:29:07 -05003482 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList(kFunctions);
David Neto22f144c2017-06-12 14:26:21 -04003483 ValueMapType &VMap = getValueMap();
3484 EntryPointVecType &EntryPoints = getEntryPointVec();
David Neto22f144c2017-06-12 14:26:21 -04003485 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
3486 auto &GlobalConstArgSet = getGlobalConstArgSet();
3487
3488 FunctionType *FTy = F.getFunctionType();
3489
3490 //
David Neto22f144c2017-06-12 14:26:21 -04003491 // Generate OPFunction.
3492 //
3493
3494 // FOps[0] : Result Type ID
3495 // FOps[1] : Function Control
3496 // FOps[2] : Function Type ID
3497 SPIRVOperandList FOps;
3498
3499 // Find SPIRV instruction for return type.
David Neto257c3892018-04-11 13:19:45 -04003500 FOps << MkId(lookupType(FTy->getReturnType()));
David Neto22f144c2017-06-12 14:26:21 -04003501
3502 // Check function attributes for SPIRV Function Control.
3503 uint32_t FuncControl = spv::FunctionControlMaskNone;
3504 if (F.hasFnAttribute(Attribute::AlwaysInline)) {
3505 FuncControl |= spv::FunctionControlInlineMask;
3506 }
3507 if (F.hasFnAttribute(Attribute::NoInline)) {
3508 FuncControl |= spv::FunctionControlDontInlineMask;
3509 }
3510 // TODO: Check llvm attribute for Function Control Pure.
3511 if (F.hasFnAttribute(Attribute::ReadOnly)) {
3512 FuncControl |= spv::FunctionControlPureMask;
3513 }
3514 // TODO: Check llvm attribute for Function Control Const.
3515 if (F.hasFnAttribute(Attribute::ReadNone)) {
3516 FuncControl |= spv::FunctionControlConstMask;
3517 }
3518
David Neto257c3892018-04-11 13:19:45 -04003519 FOps << MkNum(FuncControl);
David Neto22f144c2017-06-12 14:26:21 -04003520
3521 uint32_t FTyID;
3522 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
3523 SmallVector<Type *, 4> NewFuncParamTys;
3524 FunctionType *NewFTy =
3525 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
3526 FTyID = lookupType(NewFTy);
3527 } else {
David Neto9ed8e2f2018-03-24 06:47:24 -07003528 // Handle regular function with global constant parameters.
David Neto22f144c2017-06-12 14:26:21 -04003529 if (GlobalConstFuncTyMap.count(FTy)) {
3530 FTyID = lookupType(GlobalConstFuncTyMap[FTy].first);
3531 } else {
3532 FTyID = lookupType(FTy);
3533 }
3534 }
3535
David Neto257c3892018-04-11 13:19:45 -04003536 FOps << MkId(FTyID);
David Neto22f144c2017-06-12 14:26:21 -04003537
3538 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
3539 EntryPoints.push_back(std::make_pair(&F, nextID));
3540 }
3541
3542 VMap[&F] = nextID;
3543
David Neto482550a2018-03-24 05:21:07 -07003544 if (clspv::Option::ShowIDs()) {
David Netob05675d2018-02-16 12:37:49 -05003545 errs() << "Function " << F.getName() << " is " << nextID << "\n";
3546 }
David Neto22f144c2017-06-12 14:26:21 -04003547 // Generate SPIRV instruction for function.
David Neto87846742018-04-11 17:36:22 -04003548 auto *FuncInst = new SPIRVInstruction(spv::OpFunction, nextID++, FOps);
David Neto22f144c2017-06-12 14:26:21 -04003549 SPIRVInstList.push_back(FuncInst);
3550
3551 //
3552 // Generate OpFunctionParameter for Normal function.
3553 //
3554
3555 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
alan-bakere9308012019-03-15 10:25:13 -04003556
David Neto22f144c2017-06-12 14:26:21 -04003557 // Iterate Argument for name instead of param type from function type.
3558 unsigned ArgIdx = 0;
3559 for (Argument &Arg : F.args()) {
alan-bakere9308012019-03-15 10:25:13 -04003560 uint32_t param_id = nextID++;
3561 VMap[&Arg] = param_id;
3562
3563 if (CalledWithCoherentResource(Arg)) {
3564 // If the arg is passed a coherent resource ever, then decorate this
3565 // parameter with Coherent too.
3566 SPIRVOperandList decoration_ops;
3567 decoration_ops << MkId(param_id) << MkNum(spv::DecorationCoherent);
SJW69939d52020-04-16 07:29:07 -05003568 getSPIRVInstList(kAnnotations)
3569 .push_back(new SPIRVInstruction(spv::OpDecorate, decoration_ops));
alan-bakere9308012019-03-15 10:25:13 -04003570 }
David Neto22f144c2017-06-12 14:26:21 -04003571
3572 // ParamOps[0] : Result Type ID
3573 SPIRVOperandList ParamOps;
3574
3575 // Find SPIRV instruction for parameter type.
3576 uint32_t ParamTyID = lookupType(Arg.getType());
3577 if (PointerType *PTy = dyn_cast<PointerType>(Arg.getType())) {
3578 if (GlobalConstFuncTyMap.count(FTy)) {
3579 if (ArgIdx == GlobalConstFuncTyMap[FTy].second) {
3580 Type *EleTy = PTy->getPointerElementType();
3581 Type *ArgTy =
3582 PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
3583 ParamTyID = lookupType(ArgTy);
3584 GlobalConstArgSet.insert(&Arg);
3585 }
3586 }
3587 }
David Neto257c3892018-04-11 13:19:45 -04003588 ParamOps << MkId(ParamTyID);
David Neto22f144c2017-06-12 14:26:21 -04003589
3590 // Generate SPIRV instruction for parameter.
David Neto87846742018-04-11 17:36:22 -04003591 auto *ParamInst =
alan-bakere9308012019-03-15 10:25:13 -04003592 new SPIRVInstruction(spv::OpFunctionParameter, param_id, ParamOps);
David Neto22f144c2017-06-12 14:26:21 -04003593 SPIRVInstList.push_back(ParamInst);
3594
3595 ArgIdx++;
3596 }
3597 }
3598}
3599
alan-bakerb6b09dc2018-11-08 16:59:28 -05003600void SPIRVProducerPass::GenerateModuleInfo(Module &module) {
David Neto22f144c2017-06-12 14:26:21 -04003601 EntryPointVecType &EntryPoints = getEntryPointVec();
3602 ValueMapType &VMap = getValueMap();
3603 ValueList &EntryPointInterfaces = getEntryPointInterfacesVec();
David Neto22f144c2017-06-12 14:26:21 -04003604 std::vector<uint32_t> &BuiltinDimVec = getBuiltinDimVec();
3605
SJW69939d52020-04-16 07:29:07 -05003606 SPIRVInstructionList &SPIRVCapabilities = getSPIRVInstList(kCapabilities);
David Neto22f144c2017-06-12 14:26:21 -04003607 //
3608 // Generate OpCapability
3609 //
3610 // TODO: Which llvm information is mapped to SPIRV Capapbility?
3611
3612 // Ops[0] = Capability
3613 SPIRVOperandList Ops;
3614
David Neto87846742018-04-11 17:36:22 -04003615 auto *CapInst =
David Netoef5ba2b2019-12-20 08:35:54 -05003616 new SPIRVInstruction(spv::OpCapability, MkNum(spv::CapabilityShader));
SJW69939d52020-04-16 07:29:07 -05003617 SPIRVCapabilities.push_back(CapInst);
David Neto22f144c2017-06-12 14:26:21 -04003618
alan-bakerf906d2b2019-12-10 11:26:23 -05003619 bool write_without_format = false;
3620 bool sampled_1d = false;
3621 bool image_1d = false;
David Neto22f144c2017-06-12 14:26:21 -04003622 for (Type *Ty : getTypeList()) {
alan-bakerb39c8262019-03-08 14:03:37 -05003623 if (clspv::Option::Int8Support() && Ty->isIntegerTy(8)) {
3624 // Generate OpCapability for i8 type.
SJW69939d52020-04-16 07:29:07 -05003625 SPIRVCapabilities.push_back(
David Netoef5ba2b2019-12-20 08:35:54 -05003626 new SPIRVInstruction(spv::OpCapability, MkNum(spv::CapabilityInt8)));
alan-bakerb39c8262019-03-08 14:03:37 -05003627 } else if (Ty->isIntegerTy(16)) {
David Neto22f144c2017-06-12 14:26:21 -04003628 // Generate OpCapability for i16 type.
SJW69939d52020-04-16 07:29:07 -05003629 SPIRVCapabilities.push_back(
David Netoef5ba2b2019-12-20 08:35:54 -05003630 new SPIRVInstruction(spv::OpCapability, MkNum(spv::CapabilityInt16)));
David Neto22f144c2017-06-12 14:26:21 -04003631 } else if (Ty->isIntegerTy(64)) {
3632 // Generate OpCapability for i64 type.
SJW69939d52020-04-16 07:29:07 -05003633 SPIRVCapabilities.push_back(
David Netoef5ba2b2019-12-20 08:35:54 -05003634 new SPIRVInstruction(spv::OpCapability, MkNum(spv::CapabilityInt64)));
David Neto22f144c2017-06-12 14:26:21 -04003635 } else if (Ty->isHalfTy()) {
3636 // Generate OpCapability for half type.
SJW69939d52020-04-16 07:29:07 -05003637 SPIRVCapabilities.push_back(new SPIRVInstruction(
3638 spv::OpCapability, MkNum(spv::CapabilityFloat16)));
David Neto22f144c2017-06-12 14:26:21 -04003639 } else if (Ty->isDoubleTy()) {
3640 // Generate OpCapability for double type.
SJW69939d52020-04-16 07:29:07 -05003641 SPIRVCapabilities.push_back(new SPIRVInstruction(
3642 spv::OpCapability, MkNum(spv::CapabilityFloat64)));
David Neto22f144c2017-06-12 14:26:21 -04003643 } else if (auto *STy = dyn_cast<StructType>(Ty)) {
3644 if (STy->isOpaque()) {
alan-bakerf906d2b2019-12-10 11:26:23 -05003645 if (STy->getName().startswith("opencl.image1d_wo_t") ||
alan-baker7150a1d2020-02-25 08:31:06 -05003646 STy->getName().startswith("opencl.image1d_array_wo_t") ||
alan-bakerf906d2b2019-12-10 11:26:23 -05003647 STy->getName().startswith("opencl.image2d_wo_t") ||
alan-baker7150a1d2020-02-25 08:31:06 -05003648 STy->getName().startswith("opencl.image2d_array_wo_t") ||
alan-bakerf67468c2019-11-25 15:51:49 -05003649 STy->getName().startswith("opencl.image3d_wo_t")) {
alan-bakerf906d2b2019-12-10 11:26:23 -05003650 write_without_format = true;
3651 }
3652 if (STy->getName().startswith("opencl.image1d_ro_t") ||
alan-baker7150a1d2020-02-25 08:31:06 -05003653 STy->getName().startswith("opencl.image1d_wo_t") ||
3654 STy->getName().startswith("opencl.image1d_array_ro_t") ||
3655 STy->getName().startswith("opencl.image1d_array_wo_t")) {
alan-bakerf906d2b2019-12-10 11:26:23 -05003656 if (STy->getName().contains(".sampled"))
3657 sampled_1d = true;
3658 else
3659 image_1d = true;
David Neto22f144c2017-06-12 14:26:21 -04003660 }
3661 }
3662 }
3663 }
3664
alan-bakerf906d2b2019-12-10 11:26:23 -05003665 if (write_without_format) {
3666 // Generate OpCapability for write only image type.
SJW69939d52020-04-16 07:29:07 -05003667 SPIRVCapabilities.push_back(new SPIRVInstruction(
3668 spv::OpCapability,
3669 {MkNum(spv::CapabilityStorageImageWriteWithoutFormat)}));
alan-bakerf906d2b2019-12-10 11:26:23 -05003670 }
3671 if (image_1d) {
3672 // Generate OpCapability for unsampled 1D image type.
SJW69939d52020-04-16 07:29:07 -05003673 SPIRVCapabilities.push_back(new SPIRVInstruction(
3674 spv::OpCapability, {MkNum(spv::CapabilityImage1D)}));
alan-bakerf906d2b2019-12-10 11:26:23 -05003675 } else if (sampled_1d) {
3676 // Generate OpCapability for sampled 1D image type.
SJW69939d52020-04-16 07:29:07 -05003677 SPIRVCapabilities.push_back(new SPIRVInstruction(
3678 spv::OpCapability, {MkNum(spv::CapabilitySampled1D)}));
alan-bakerf906d2b2019-12-10 11:26:23 -05003679 }
3680
David Neto5c22a252018-03-15 16:07:41 -04003681 { // OpCapability ImageQuery
3682 bool hasImageQuery = false;
alan-bakerf67468c2019-11-25 15:51:49 -05003683 for (const auto &SymVal : module.getValueSymbolTable()) {
3684 if (auto F = dyn_cast<Function>(SymVal.getValue())) {
SJW173c7e92020-03-16 08:44:47 -05003685 if (IsImageQuery(F)) {
alan-bakerf67468c2019-11-25 15:51:49 -05003686 hasImageQuery = true;
3687 break;
3688 }
David Neto5c22a252018-03-15 16:07:41 -04003689 }
3690 }
alan-bakerf67468c2019-11-25 15:51:49 -05003691
David Neto5c22a252018-03-15 16:07:41 -04003692 if (hasImageQuery) {
SJW69939d52020-04-16 07:29:07 -05003693 SPIRVCapabilities.push_back(new SPIRVInstruction(
3694 spv::OpCapability, {MkNum(spv::CapabilityImageQuery)}));
David Neto5c22a252018-03-15 16:07:41 -04003695 }
3696 }
3697
David Neto22f144c2017-06-12 14:26:21 -04003698 if (hasVariablePointers()) {
3699 //
David Neto22f144c2017-06-12 14:26:21 -04003700 // Generate OpCapability.
3701 //
3702 // Ops[0] = Capability
3703 //
3704 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003705 Ops << MkNum(spv::CapabilityVariablePointers);
David Neto22f144c2017-06-12 14:26:21 -04003706
SJW69939d52020-04-16 07:29:07 -05003707 SPIRVCapabilities.push_back(new SPIRVInstruction(spv::OpCapability, Ops));
alan-baker5b86ed72019-02-15 08:26:50 -05003708 } else if (hasVariablePointersStorageBuffer()) {
3709 //
3710 // Generate OpCapability.
3711 //
3712 // Ops[0] = Capability
3713 //
3714 Ops.clear();
3715 Ops << MkNum(spv::CapabilityVariablePointersStorageBuffer);
David Neto22f144c2017-06-12 14:26:21 -04003716
SJW69939d52020-04-16 07:29:07 -05003717 SPIRVCapabilities.push_back(new SPIRVInstruction(spv::OpCapability, Ops));
alan-baker5b86ed72019-02-15 08:26:50 -05003718 }
3719
SJW69939d52020-04-16 07:29:07 -05003720 SPIRVInstructionList &SPIRVExtensions = getSPIRVInstList(kExtensions);
alan-baker5b86ed72019-02-15 08:26:50 -05003721 // Always add the storage buffer extension
3722 {
David Neto22f144c2017-06-12 14:26:21 -04003723 //
3724 // Generate OpExtension.
3725 //
3726 // Ops[0] = Name (Literal String)
3727 //
alan-baker5b86ed72019-02-15 08:26:50 -05003728 auto *ExtensionInst = new SPIRVInstruction(
3729 spv::OpExtension, {MkString("SPV_KHR_storage_buffer_storage_class")});
SJW69939d52020-04-16 07:29:07 -05003730 SPIRVExtensions.push_back(ExtensionInst);
alan-baker5b86ed72019-02-15 08:26:50 -05003731 }
David Neto22f144c2017-06-12 14:26:21 -04003732
alan-baker5b86ed72019-02-15 08:26:50 -05003733 if (hasVariablePointers() || hasVariablePointersStorageBuffer()) {
3734 //
3735 // Generate OpExtension.
3736 //
3737 // Ops[0] = Name (Literal String)
3738 //
3739 auto *ExtensionInst = new SPIRVInstruction(
3740 spv::OpExtension, {MkString("SPV_KHR_variable_pointers")});
SJW69939d52020-04-16 07:29:07 -05003741 SPIRVExtensions.push_back(ExtensionInst);
David Neto22f144c2017-06-12 14:26:21 -04003742 }
3743
3744 //
3745 // Generate OpMemoryModel
3746 //
3747 // Memory model for Vulkan will always be GLSL450.
3748
3749 // Ops[0] = Addressing Model
3750 // Ops[1] = Memory Model
3751 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003752 Ops << MkNum(spv::AddressingModelLogical) << MkNum(spv::MemoryModelGLSL450);
David Neto22f144c2017-06-12 14:26:21 -04003753
David Neto87846742018-04-11 17:36:22 -04003754 auto *MemModelInst = new SPIRVInstruction(spv::OpMemoryModel, Ops);
SJW69939d52020-04-16 07:29:07 -05003755 getSPIRVInstList(kMemoryModel).push_back(MemModelInst);
David Neto22f144c2017-06-12 14:26:21 -04003756
SJW69939d52020-04-16 07:29:07 -05003757 SPIRVInstructionList &SPIRVEntryPoints = getSPIRVInstList(kEntryPoints);
David Neto22f144c2017-06-12 14:26:21 -04003758 //
3759 // Generate OpEntryPoint
3760 //
3761 for (auto EntryPoint : EntryPoints) {
3762 // Ops[0] = Execution Model
3763 // Ops[1] = EntryPoint ID
3764 // Ops[2] = Name (Literal String)
3765 // ...
3766 //
3767 // TODO: Do we need to consider Interface ID for forward references???
3768 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05003769 const StringRef &name = EntryPoint.first->getName();
David Neto257c3892018-04-11 13:19:45 -04003770 Ops << MkNum(spv::ExecutionModelGLCompute) << MkId(EntryPoint.second)
3771 << MkString(name);
David Neto22f144c2017-06-12 14:26:21 -04003772
David Neto22f144c2017-06-12 14:26:21 -04003773 for (Value *Interface : EntryPointInterfaces) {
David Neto257c3892018-04-11 13:19:45 -04003774 Ops << MkId(VMap[Interface]);
David Neto22f144c2017-06-12 14:26:21 -04003775 }
3776
David Neto87846742018-04-11 17:36:22 -04003777 auto *EntryPointInst = new SPIRVInstruction(spv::OpEntryPoint, Ops);
SJW69939d52020-04-16 07:29:07 -05003778 SPIRVEntryPoints.push_back(EntryPointInst);
David Neto22f144c2017-06-12 14:26:21 -04003779 }
3780
SJW69939d52020-04-16 07:29:07 -05003781 SPIRVInstructionList &SPIRVExecutionModes = getSPIRVInstList(kExecutionModes);
David Neto22f144c2017-06-12 14:26:21 -04003782 for (auto EntryPoint : EntryPoints) {
3783 if (const MDNode *MD = dyn_cast<Function>(EntryPoint.first)
3784 ->getMetadata("reqd_work_group_size")) {
3785
3786 if (!BuiltinDimVec.empty()) {
3787 llvm_unreachable(
3788 "Kernels should have consistent work group size definition");
3789 }
3790
3791 //
3792 // Generate OpExecutionMode
3793 //
3794
3795 // Ops[0] = Entry Point ID
3796 // Ops[1] = Execution Mode
3797 // Ops[2] ... Ops[n] = Optional literals according to Execution Mode
3798 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05003799 Ops << MkId(EntryPoint.second) << MkNum(spv::ExecutionModeLocalSize);
David Neto22f144c2017-06-12 14:26:21 -04003800
3801 uint32_t XDim = static_cast<uint32_t>(
3802 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
3803 uint32_t YDim = static_cast<uint32_t>(
3804 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
3805 uint32_t ZDim = static_cast<uint32_t>(
3806 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
3807
David Neto257c3892018-04-11 13:19:45 -04003808 Ops << MkNum(XDim) << MkNum(YDim) << MkNum(ZDim);
David Neto22f144c2017-06-12 14:26:21 -04003809
David Neto87846742018-04-11 17:36:22 -04003810 auto *ExecModeInst = new SPIRVInstruction(spv::OpExecutionMode, Ops);
SJW69939d52020-04-16 07:29:07 -05003811 SPIRVExecutionModes.push_back(ExecModeInst);
David Neto22f144c2017-06-12 14:26:21 -04003812 }
3813 }
3814
3815 //
3816 // Generate OpSource.
3817 //
3818 // Ops[0] = SourceLanguage ID
3819 // Ops[1] = Version (LiteralNum)
3820 //
3821 Ops.clear();
Kévin Petitf0515712020-01-07 18:29:20 +00003822 switch (clspv::Option::Language()) {
3823 case clspv::Option::SourceLanguage::OpenCL_C_10:
3824 Ops << MkNum(spv::SourceLanguageOpenCL_C) << MkNum(100);
3825 break;
3826 case clspv::Option::SourceLanguage::OpenCL_C_11:
3827 Ops << MkNum(spv::SourceLanguageOpenCL_C) << MkNum(110);
3828 break;
3829 case clspv::Option::SourceLanguage::OpenCL_C_12:
Kévin Petit0fc88042019-04-09 23:25:02 +01003830 Ops << MkNum(spv::SourceLanguageOpenCL_C) << MkNum(120);
Kévin Petitf0515712020-01-07 18:29:20 +00003831 break;
3832 case clspv::Option::SourceLanguage::OpenCL_C_20:
3833 Ops << MkNum(spv::SourceLanguageOpenCL_C) << MkNum(200);
3834 break;
3835 case clspv::Option::SourceLanguage::OpenCL_CPP:
3836 Ops << MkNum(spv::SourceLanguageOpenCL_CPP) << MkNum(100);
3837 break;
3838 default:
3839 Ops << MkNum(spv::SourceLanguageUnknown) << MkNum(0);
3840 break;
Kévin Petit0fc88042019-04-09 23:25:02 +01003841 }
David Neto22f144c2017-06-12 14:26:21 -04003842
David Neto87846742018-04-11 17:36:22 -04003843 auto *OpenSourceInst = new SPIRVInstruction(spv::OpSource, Ops);
SJW69939d52020-04-16 07:29:07 -05003844 getSPIRVInstList(kDebug).push_back(OpenSourceInst);
David Neto22f144c2017-06-12 14:26:21 -04003845
3846 if (!BuiltinDimVec.empty()) {
SJW69939d52020-04-16 07:29:07 -05003847 SPIRVInstructionList &SPIRVAnnotations = getSPIRVInstList(kAnnotations);
David Neto22f144c2017-06-12 14:26:21 -04003848 //
3849 // Generate OpDecorates for x/y/z dimension.
3850 //
3851 // Ops[0] = Target ID
3852 // Ops[1] = Decoration (SpecId)
David Neto257c3892018-04-11 13:19:45 -04003853 // Ops[2] = Specialization Constant ID (Literal Number)
David Neto22f144c2017-06-12 14:26:21 -04003854
3855 // X Dimension
3856 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003857 Ops << MkId(BuiltinDimVec[0]) << MkNum(spv::DecorationSpecId) << MkNum(0);
SJW69939d52020-04-16 07:29:07 -05003858 SPIRVAnnotations.push_back(new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003859
3860 // Y Dimension
3861 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003862 Ops << MkId(BuiltinDimVec[1]) << MkNum(spv::DecorationSpecId) << MkNum(1);
SJW69939d52020-04-16 07:29:07 -05003863 SPIRVAnnotations.push_back(new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003864
3865 // Z Dimension
3866 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003867 Ops << MkId(BuiltinDimVec[2]) << MkNum(spv::DecorationSpecId) << MkNum(2);
SJW69939d52020-04-16 07:29:07 -05003868 SPIRVAnnotations.push_back(new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003869 }
3870}
3871
David Netob6e2e062018-04-25 10:32:06 -04003872void SPIRVProducerPass::GenerateEntryPointInitialStores() {
3873 // Work around a driver bug. Initializers on Private variables might not
3874 // work. So the start of the kernel should store the initializer value to the
3875 // variables. Yes, *every* entry point pays this cost if *any* entry point
3876 // uses this builtin. At this point I judge this to be an acceptable tradeoff
3877 // of complexity vs. runtime, for a broken driver.
alan-bakerb6b09dc2018-11-08 16:59:28 -05003878 // TODO(dneto): Remove this at some point once fixed drivers are widely
3879 // available.
David Netob6e2e062018-04-25 10:32:06 -04003880 if (WorkgroupSizeVarID) {
3881 assert(WorkgroupSizeValueID);
3882
3883 SPIRVOperandList Ops;
3884 Ops << MkId(WorkgroupSizeVarID) << MkId(WorkgroupSizeValueID);
3885
3886 auto *Inst = new SPIRVInstruction(spv::OpStore, Ops);
SJW69939d52020-04-16 07:29:07 -05003887 getSPIRVInstList(kFunctions).push_back(Inst);
David Netob6e2e062018-04-25 10:32:06 -04003888 }
3889}
3890
David Neto22f144c2017-06-12 14:26:21 -04003891void SPIRVProducerPass::GenerateFuncBody(Function &F) {
SJW69939d52020-04-16 07:29:07 -05003892 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList(kFunctions);
David Neto22f144c2017-06-12 14:26:21 -04003893 ValueMapType &VMap = getValueMap();
3894
David Netob6e2e062018-04-25 10:32:06 -04003895 const bool IsKernel = F.getCallingConv() == CallingConv::SPIR_KERNEL;
David Neto22f144c2017-06-12 14:26:21 -04003896
3897 for (BasicBlock &BB : F) {
3898 // Register BasicBlock to ValueMap.
3899 VMap[&BB] = nextID;
3900
3901 //
3902 // Generate OpLabel for Basic Block.
3903 //
3904 SPIRVOperandList Ops;
David Neto87846742018-04-11 17:36:22 -04003905 auto *Inst = new SPIRVInstruction(spv::OpLabel, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003906 SPIRVInstList.push_back(Inst);
3907
David Neto6dcd4712017-06-23 11:06:47 -04003908 // OpVariable instructions must come first.
3909 for (Instruction &I : BB) {
alan-baker5b86ed72019-02-15 08:26:50 -05003910 if (auto *alloca = dyn_cast<AllocaInst>(&I)) {
3911 // Allocating a pointer requires variable pointers.
3912 if (alloca->getAllocatedType()->isPointerTy()) {
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003913 setVariablePointersCapabilities(
3914 alloca->getAllocatedType()->getPointerAddressSpace());
alan-baker5b86ed72019-02-15 08:26:50 -05003915 }
David Neto6dcd4712017-06-23 11:06:47 -04003916 GenerateInstruction(I);
3917 }
3918 }
3919
David Neto22f144c2017-06-12 14:26:21 -04003920 if (&BB == &F.getEntryBlock() && IsKernel) {
David Netob6e2e062018-04-25 10:32:06 -04003921 if (clspv::Option::HackInitializers()) {
3922 GenerateEntryPointInitialStores();
3923 }
David Neto22f144c2017-06-12 14:26:21 -04003924 }
3925
3926 for (Instruction &I : BB) {
David Neto6dcd4712017-06-23 11:06:47 -04003927 if (!isa<AllocaInst>(I)) {
3928 GenerateInstruction(I);
3929 }
David Neto22f144c2017-06-12 14:26:21 -04003930 }
3931 }
3932}
3933
3934spv::Op SPIRVProducerPass::GetSPIRVCmpOpcode(CmpInst *I) {
3935 const std::map<CmpInst::Predicate, spv::Op> Map = {
3936 {CmpInst::ICMP_EQ, spv::OpIEqual},
3937 {CmpInst::ICMP_NE, spv::OpINotEqual},
3938 {CmpInst::ICMP_UGT, spv::OpUGreaterThan},
3939 {CmpInst::ICMP_UGE, spv::OpUGreaterThanEqual},
3940 {CmpInst::ICMP_ULT, spv::OpULessThan},
3941 {CmpInst::ICMP_ULE, spv::OpULessThanEqual},
3942 {CmpInst::ICMP_SGT, spv::OpSGreaterThan},
3943 {CmpInst::ICMP_SGE, spv::OpSGreaterThanEqual},
3944 {CmpInst::ICMP_SLT, spv::OpSLessThan},
3945 {CmpInst::ICMP_SLE, spv::OpSLessThanEqual},
3946 {CmpInst::FCMP_OEQ, spv::OpFOrdEqual},
3947 {CmpInst::FCMP_OGT, spv::OpFOrdGreaterThan},
3948 {CmpInst::FCMP_OGE, spv::OpFOrdGreaterThanEqual},
3949 {CmpInst::FCMP_OLT, spv::OpFOrdLessThan},
3950 {CmpInst::FCMP_OLE, spv::OpFOrdLessThanEqual},
3951 {CmpInst::FCMP_ONE, spv::OpFOrdNotEqual},
3952 {CmpInst::FCMP_UEQ, spv::OpFUnordEqual},
3953 {CmpInst::FCMP_UGT, spv::OpFUnordGreaterThan},
3954 {CmpInst::FCMP_UGE, spv::OpFUnordGreaterThanEqual},
3955 {CmpInst::FCMP_ULT, spv::OpFUnordLessThan},
3956 {CmpInst::FCMP_ULE, spv::OpFUnordLessThanEqual},
3957 {CmpInst::FCMP_UNE, spv::OpFUnordNotEqual}};
3958
3959 assert(0 != Map.count(I->getPredicate()));
3960
3961 return Map.at(I->getPredicate());
3962}
3963
3964spv::Op SPIRVProducerPass::GetSPIRVCastOpcode(Instruction &I) {
3965 const std::map<unsigned, spv::Op> Map{
3966 {Instruction::Trunc, spv::OpUConvert},
3967 {Instruction::ZExt, spv::OpUConvert},
3968 {Instruction::SExt, spv::OpSConvert},
3969 {Instruction::FPToUI, spv::OpConvertFToU},
3970 {Instruction::FPToSI, spv::OpConvertFToS},
3971 {Instruction::UIToFP, spv::OpConvertUToF},
3972 {Instruction::SIToFP, spv::OpConvertSToF},
3973 {Instruction::FPTrunc, spv::OpFConvert},
3974 {Instruction::FPExt, spv::OpFConvert},
3975 {Instruction::BitCast, spv::OpBitcast}};
3976
3977 assert(0 != Map.count(I.getOpcode()));
3978
3979 return Map.at(I.getOpcode());
3980}
3981
3982spv::Op SPIRVProducerPass::GetSPIRVBinaryOpcode(Instruction &I) {
Kévin Petit24272b62018-10-18 19:16:12 +00003983 if (I.getType()->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -04003984 switch (I.getOpcode()) {
3985 default:
3986 break;
3987 case Instruction::Or:
3988 return spv::OpLogicalOr;
3989 case Instruction::And:
3990 return spv::OpLogicalAnd;
3991 case Instruction::Xor:
3992 return spv::OpLogicalNotEqual;
3993 }
3994 }
3995
alan-bakerb6b09dc2018-11-08 16:59:28 -05003996 const std::map<unsigned, spv::Op> Map{
David Neto22f144c2017-06-12 14:26:21 -04003997 {Instruction::Add, spv::OpIAdd},
3998 {Instruction::FAdd, spv::OpFAdd},
3999 {Instruction::Sub, spv::OpISub},
4000 {Instruction::FSub, spv::OpFSub},
4001 {Instruction::Mul, spv::OpIMul},
4002 {Instruction::FMul, spv::OpFMul},
4003 {Instruction::UDiv, spv::OpUDiv},
4004 {Instruction::SDiv, spv::OpSDiv},
4005 {Instruction::FDiv, spv::OpFDiv},
4006 {Instruction::URem, spv::OpUMod},
4007 {Instruction::SRem, spv::OpSRem},
4008 {Instruction::FRem, spv::OpFRem},
4009 {Instruction::Or, spv::OpBitwiseOr},
4010 {Instruction::Xor, spv::OpBitwiseXor},
4011 {Instruction::And, spv::OpBitwiseAnd},
4012 {Instruction::Shl, spv::OpShiftLeftLogical},
4013 {Instruction::LShr, spv::OpShiftRightLogical},
4014 {Instruction::AShr, spv::OpShiftRightArithmetic}};
4015
4016 assert(0 != Map.count(I.getOpcode()));
4017
4018 return Map.at(I.getOpcode());
4019}
4020
4021void SPIRVProducerPass::GenerateInstruction(Instruction &I) {
SJW69939d52020-04-16 07:29:07 -05004022 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList(kFunctions);
David Neto22f144c2017-06-12 14:26:21 -04004023 ValueMapType &VMap = getValueMap();
David Neto22f144c2017-06-12 14:26:21 -04004024 DeferredInstVecType &DeferredInsts = getDeferredInstVec();
4025 LLVMContext &Context = I.getParent()->getParent()->getParent()->getContext();
4026
4027 // Register Instruction to ValueMap.
4028 if (0 == VMap[&I]) {
4029 VMap[&I] = nextID;
4030 }
4031
4032 switch (I.getOpcode()) {
4033 default: {
4034 if (Instruction::isCast(I.getOpcode())) {
4035 //
4036 // Generate SPIRV instructions for cast operators.
4037 //
4038
David Netod2de94a2017-08-28 17:27:47 -04004039 auto Ty = I.getType();
David Neto22f144c2017-06-12 14:26:21 -04004040 auto OpTy = I.getOperand(0)->getType();
David Netod2de94a2017-08-28 17:27:47 -04004041 auto toI8 = Ty == Type::getInt8Ty(Context);
4042 auto fromI32 = OpTy == Type::getInt32Ty(Context);
David Neto22f144c2017-06-12 14:26:21 -04004043 // Handle zext, sext and uitofp with i1 type specially.
4044 if ((I.getOpcode() == Instruction::ZExt ||
4045 I.getOpcode() == Instruction::SExt ||
4046 I.getOpcode() == Instruction::UIToFP) &&
alan-bakerb6b09dc2018-11-08 16:59:28 -05004047 OpTy->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -04004048 //
4049 // Generate OpSelect.
4050 //
4051
4052 // Ops[0] = Result Type ID
4053 // Ops[1] = Condition ID
4054 // Ops[2] = True Constant ID
4055 // Ops[3] = False Constant ID
4056 SPIRVOperandList Ops;
4057
David Neto257c3892018-04-11 13:19:45 -04004058 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04004059
David Neto22f144c2017-06-12 14:26:21 -04004060 uint32_t CondID = VMap[I.getOperand(0)];
David Neto257c3892018-04-11 13:19:45 -04004061 Ops << MkId(CondID);
David Neto22f144c2017-06-12 14:26:21 -04004062
4063 uint32_t TrueID = 0;
4064 if (I.getOpcode() == Instruction::ZExt) {
Kévin Petit7bfb8992019-02-26 13:45:08 +00004065 TrueID = VMap[ConstantInt::get(I.getType(), 1)];
David Neto22f144c2017-06-12 14:26:21 -04004066 } else if (I.getOpcode() == Instruction::SExt) {
Kévin Petit7bfb8992019-02-26 13:45:08 +00004067 TrueID = VMap[ConstantInt::getSigned(I.getType(), -1)];
David Neto22f144c2017-06-12 14:26:21 -04004068 } else {
4069 TrueID = VMap[ConstantFP::get(Context, APFloat(1.0f))];
4070 }
David Neto257c3892018-04-11 13:19:45 -04004071 Ops << MkId(TrueID);
David Neto22f144c2017-06-12 14:26:21 -04004072
4073 uint32_t FalseID = 0;
4074 if (I.getOpcode() == Instruction::ZExt) {
4075 FalseID = VMap[Constant::getNullValue(I.getType())];
4076 } else if (I.getOpcode() == Instruction::SExt) {
4077 FalseID = VMap[Constant::getNullValue(I.getType())];
4078 } else {
4079 FalseID = VMap[ConstantFP::get(Context, APFloat(0.0f))];
4080 }
David Neto257c3892018-04-11 13:19:45 -04004081 Ops << MkId(FalseID);
David Neto22f144c2017-06-12 14:26:21 -04004082
David Neto87846742018-04-11 17:36:22 -04004083 auto *Inst = new SPIRVInstruction(spv::OpSelect, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004084 SPIRVInstList.push_back(Inst);
alan-bakerb39c8262019-03-08 14:03:37 -05004085 } else if (!clspv::Option::Int8Support() &&
4086 I.getOpcode() == Instruction::Trunc && fromI32 && toI8) {
David Netod2de94a2017-08-28 17:27:47 -04004087 // The SPIR-V target type is a 32-bit int. Keep only the bottom
4088 // 8 bits.
4089 // Before:
4090 // %result = trunc i32 %a to i8
4091 // After
4092 // %result = OpBitwiseAnd %uint %a %uint_255
4093
4094 SPIRVOperandList Ops;
4095
David Neto257c3892018-04-11 13:19:45 -04004096 Ops << MkId(lookupType(OpTy)) << MkId(VMap[I.getOperand(0)]);
David Netod2de94a2017-08-28 17:27:47 -04004097
4098 Type *UintTy = Type::getInt32Ty(Context);
4099 uint32_t MaskID = VMap[ConstantInt::get(UintTy, 255)];
David Neto257c3892018-04-11 13:19:45 -04004100 Ops << MkId(MaskID);
David Netod2de94a2017-08-28 17:27:47 -04004101
David Neto87846742018-04-11 17:36:22 -04004102 auto *Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Netod2de94a2017-08-28 17:27:47 -04004103 SPIRVInstList.push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04004104 } else {
4105 // Ops[0] = Result Type ID
4106 // Ops[1] = Source Value ID
4107 SPIRVOperandList Ops;
4108
David Neto257c3892018-04-11 13:19:45 -04004109 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04004110
David Neto87846742018-04-11 17:36:22 -04004111 auto *Inst = new SPIRVInstruction(GetSPIRVCastOpcode(I), nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004112 SPIRVInstList.push_back(Inst);
4113 }
4114 } else if (isa<BinaryOperator>(I)) {
4115 //
4116 // Generate SPIRV instructions for binary operators.
4117 //
4118
4119 // Handle xor with i1 type specially.
4120 if (I.getOpcode() == Instruction::Xor &&
4121 I.getType() == Type::getInt1Ty(Context) &&
Kévin Petit24272b62018-10-18 19:16:12 +00004122 ((isa<ConstantInt>(I.getOperand(0)) &&
4123 !cast<ConstantInt>(I.getOperand(0))->isZero()) ||
4124 (isa<ConstantInt>(I.getOperand(1)) &&
4125 !cast<ConstantInt>(I.getOperand(1))->isZero()))) {
David Neto22f144c2017-06-12 14:26:21 -04004126 //
4127 // Generate OpLogicalNot.
4128 //
4129 // Ops[0] = Result Type ID
4130 // Ops[1] = Operand
4131 SPIRVOperandList Ops;
4132
David Neto257c3892018-04-11 13:19:45 -04004133 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04004134
4135 Value *CondV = I.getOperand(0);
4136 if (isa<Constant>(I.getOperand(0))) {
4137 CondV = I.getOperand(1);
4138 }
David Neto257c3892018-04-11 13:19:45 -04004139 Ops << MkId(VMap[CondV]);
David Neto22f144c2017-06-12 14:26:21 -04004140
David Neto87846742018-04-11 17:36:22 -04004141 auto *Inst = new SPIRVInstruction(spv::OpLogicalNot, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004142 SPIRVInstList.push_back(Inst);
4143 } else {
4144 // Ops[0] = Result Type ID
4145 // Ops[1] = Operand 0
4146 // Ops[2] = Operand 1
4147 SPIRVOperandList Ops;
4148
David Neto257c3892018-04-11 13:19:45 -04004149 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)])
4150 << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004151
David Neto87846742018-04-11 17:36:22 -04004152 auto *Inst =
4153 new SPIRVInstruction(GetSPIRVBinaryOpcode(I), nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004154 SPIRVInstList.push_back(Inst);
4155 }
alan-bakerc9c55ae2019-12-02 16:01:27 -05004156 } else if (I.getOpcode() == Instruction::FNeg) {
4157 // The only unary operator.
4158 //
4159 // Ops[0] = Result Type ID
4160 // Ops[1] = Operand 0
4161 SPIRVOperandList ops;
4162
4163 ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)]);
4164 auto *Inst = new SPIRVInstruction(spv::OpFNegate, nextID++, ops);
4165 SPIRVInstList.push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04004166 } else {
4167 I.print(errs());
4168 llvm_unreachable("Unsupported instruction???");
4169 }
4170 break;
4171 }
4172 case Instruction::GetElementPtr: {
4173 auto &GlobalConstArgSet = getGlobalConstArgSet();
4174
4175 //
4176 // Generate OpAccessChain.
4177 //
4178 GetElementPtrInst *GEP = cast<GetElementPtrInst>(&I);
4179
4180 //
4181 // Generate OpAccessChain.
4182 //
4183
4184 // Ops[0] = Result Type ID
4185 // Ops[1] = Base ID
4186 // Ops[2] ... Ops[n] = Indexes ID
4187 SPIRVOperandList Ops;
4188
alan-bakerb6b09dc2018-11-08 16:59:28 -05004189 PointerType *ResultType = cast<PointerType>(GEP->getType());
David Neto22f144c2017-06-12 14:26:21 -04004190 if (GEP->getPointerAddressSpace() == AddressSpace::ModuleScopePrivate ||
4191 GlobalConstArgSet.count(GEP->getPointerOperand())) {
4192 // Use pointer type with private address space for global constant.
4193 Type *EleTy = I.getType()->getPointerElementType();
David Neto1a1a0582017-07-07 12:01:44 -04004194 ResultType = PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
David Neto22f144c2017-06-12 14:26:21 -04004195 }
David Neto257c3892018-04-11 13:19:45 -04004196
4197 Ops << MkId(lookupType(ResultType));
David Neto22f144c2017-06-12 14:26:21 -04004198
David Neto862b7d82018-06-14 18:48:37 -04004199 // Generate the base pointer.
4200 Ops << MkId(VMap[GEP->getPointerOperand()]);
David Neto22f144c2017-06-12 14:26:21 -04004201
David Neto862b7d82018-06-14 18:48:37 -04004202 // TODO(dneto): Simplify the following?
David Neto22f144c2017-06-12 14:26:21 -04004203
4204 //
4205 // Follows below rules for gep.
4206 //
David Neto862b7d82018-06-14 18:48:37 -04004207 // 1. If gep's first index is 0 generate OpAccessChain and ignore gep's
4208 // first index.
David Neto22f144c2017-06-12 14:26:21 -04004209 // 2. If gep's first index is not 0, generate OpPtrAccessChain and use gep's
4210 // first index.
4211 // 3. If gep's first index is not constant, generate OpPtrAccessChain and
4212 // use gep's first index.
4213 // 4. If it is not above case 1, 2 and 3, generate OpAccessChain and use
4214 // gep's first index.
4215 //
4216 spv::Op Opcode = spv::OpAccessChain;
4217 unsigned offset = 0;
4218 if (ConstantInt *CstInt = dyn_cast<ConstantInt>(GEP->getOperand(1))) {
David Neto862b7d82018-06-14 18:48:37 -04004219 if (CstInt->getZExtValue() == 0) {
David Neto22f144c2017-06-12 14:26:21 -04004220 offset = 1;
David Neto862b7d82018-06-14 18:48:37 -04004221 } else if (CstInt->getZExtValue() != 0) {
David Neto22f144c2017-06-12 14:26:21 -04004222 Opcode = spv::OpPtrAccessChain;
David Neto22f144c2017-06-12 14:26:21 -04004223 }
David Neto862b7d82018-06-14 18:48:37 -04004224 } else {
David Neto22f144c2017-06-12 14:26:21 -04004225 Opcode = spv::OpPtrAccessChain;
David Neto1a1a0582017-07-07 12:01:44 -04004226 }
4227
4228 if (Opcode == spv::OpPtrAccessChain) {
David Neto1a1a0582017-07-07 12:01:44 -04004229 // Do we need to generate ArrayStride? Check against the GEP result type
4230 // rather than the pointer type of the base because when indexing into
4231 // an OpenCL program-scope constant, we'll swap out the LLVM base pointer
4232 // for something else in the SPIR-V.
4233 // E.g. see test/PointerAccessChain/pointer_index_is_constant_1.cl
alan-baker5b86ed72019-02-15 08:26:50 -05004234 auto address_space = ResultType->getAddressSpace();
4235 setVariablePointersCapabilities(address_space);
4236 switch (GetStorageClass(address_space)) {
Alan Bakerfcda9482018-10-02 17:09:59 -04004237 case spv::StorageClassStorageBuffer:
4238 case spv::StorageClassUniform:
David Neto1a1a0582017-07-07 12:01:44 -04004239 // Save the need to generate an ArrayStride decoration. But defer
4240 // generation until later, so we only make one decoration.
David Neto85082642018-03-24 06:55:20 -07004241 getTypesNeedingArrayStride().insert(ResultType);
Alan Bakerfcda9482018-10-02 17:09:59 -04004242 break;
4243 default:
4244 break;
David Neto1a1a0582017-07-07 12:01:44 -04004245 }
David Neto22f144c2017-06-12 14:26:21 -04004246 }
4247
4248 for (auto II = GEP->idx_begin() + offset; II != GEP->idx_end(); II++) {
David Neto257c3892018-04-11 13:19:45 -04004249 Ops << MkId(VMap[*II]);
David Neto22f144c2017-06-12 14:26:21 -04004250 }
4251
David Neto87846742018-04-11 17:36:22 -04004252 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004253 SPIRVInstList.push_back(Inst);
4254 break;
4255 }
4256 case Instruction::ExtractValue: {
4257 ExtractValueInst *EVI = cast<ExtractValueInst>(&I);
4258 // Ops[0] = Result Type ID
4259 // Ops[1] = Composite ID
4260 // Ops[2] ... Ops[n] = Indexes (Literal Number)
4261 SPIRVOperandList Ops;
4262
David Neto257c3892018-04-11 13:19:45 -04004263 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04004264
4265 uint32_t CompositeID = VMap[EVI->getAggregateOperand()];
David Neto257c3892018-04-11 13:19:45 -04004266 Ops << MkId(CompositeID);
David Neto22f144c2017-06-12 14:26:21 -04004267
4268 for (auto &Index : EVI->indices()) {
David Neto257c3892018-04-11 13:19:45 -04004269 Ops << MkNum(Index);
David Neto22f144c2017-06-12 14:26:21 -04004270 }
4271
David Neto87846742018-04-11 17:36:22 -04004272 auto *Inst = new SPIRVInstruction(spv::OpCompositeExtract, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004273 SPIRVInstList.push_back(Inst);
4274 break;
4275 }
4276 case Instruction::InsertValue: {
4277 InsertValueInst *IVI = cast<InsertValueInst>(&I);
4278 // Ops[0] = Result Type ID
4279 // Ops[1] = Object ID
4280 // Ops[2] = Composite ID
4281 // Ops[3] ... Ops[n] = Indexes (Literal Number)
4282 SPIRVOperandList Ops;
4283
4284 uint32_t ResTyID = lookupType(I.getType());
David Neto257c3892018-04-11 13:19:45 -04004285 Ops << MkId(ResTyID);
David Neto22f144c2017-06-12 14:26:21 -04004286
4287 uint32_t ObjectID = VMap[IVI->getInsertedValueOperand()];
David Neto257c3892018-04-11 13:19:45 -04004288 Ops << MkId(ObjectID);
David Neto22f144c2017-06-12 14:26:21 -04004289
4290 uint32_t CompositeID = VMap[IVI->getAggregateOperand()];
David Neto257c3892018-04-11 13:19:45 -04004291 Ops << MkId(CompositeID);
David Neto22f144c2017-06-12 14:26:21 -04004292
4293 for (auto &Index : IVI->indices()) {
David Neto257c3892018-04-11 13:19:45 -04004294 Ops << MkNum(Index);
David Neto22f144c2017-06-12 14:26:21 -04004295 }
4296
David Neto87846742018-04-11 17:36:22 -04004297 auto *Inst = new SPIRVInstruction(spv::OpCompositeInsert, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004298 SPIRVInstList.push_back(Inst);
4299 break;
4300 }
4301 case Instruction::Select: {
4302 //
4303 // Generate OpSelect.
4304 //
4305
4306 // Ops[0] = Result Type ID
4307 // Ops[1] = Condition ID
4308 // Ops[2] = True Constant ID
4309 // Ops[3] = False Constant ID
4310 SPIRVOperandList Ops;
4311
4312 // Find SPIRV instruction for parameter type.
4313 auto Ty = I.getType();
4314 if (Ty->isPointerTy()) {
4315 auto PointeeTy = Ty->getPointerElementType();
4316 if (PointeeTy->isStructTy() &&
4317 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
4318 Ty = PointeeTy;
alan-baker5b86ed72019-02-15 08:26:50 -05004319 } else {
4320 // Selecting between pointers requires variable pointers.
4321 setVariablePointersCapabilities(Ty->getPointerAddressSpace());
4322 if (!hasVariablePointers() && !selectFromSameObject(&I)) {
4323 setVariablePointers(true);
4324 }
David Neto22f144c2017-06-12 14:26:21 -04004325 }
4326 }
4327
David Neto257c3892018-04-11 13:19:45 -04004328 Ops << MkId(lookupType(Ty)) << MkId(VMap[I.getOperand(0)])
4329 << MkId(VMap[I.getOperand(1)]) << MkId(VMap[I.getOperand(2)]);
David Neto22f144c2017-06-12 14:26:21 -04004330
David Neto87846742018-04-11 17:36:22 -04004331 auto *Inst = new SPIRVInstruction(spv::OpSelect, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004332 SPIRVInstList.push_back(Inst);
4333 break;
4334 }
4335 case Instruction::ExtractElement: {
4336 // Handle <4 x i8> type manually.
4337 Type *CompositeTy = I.getOperand(0)->getType();
4338 if (is4xi8vec(CompositeTy)) {
4339 //
4340 // Generate OpShiftRightLogical and OpBitwiseAnd for extractelement with
4341 // <4 x i8>.
4342 //
4343
4344 //
4345 // Generate OpShiftRightLogical
4346 //
4347 // Ops[0] = Result Type ID
4348 // Ops[1] = Operand 0
4349 // Ops[2] = Operand 1
4350 //
4351 SPIRVOperandList Ops;
4352
David Neto257c3892018-04-11 13:19:45 -04004353 Ops << MkId(lookupType(CompositeTy));
David Neto22f144c2017-06-12 14:26:21 -04004354
4355 uint32_t Op0ID = VMap[I.getOperand(0)];
David Neto257c3892018-04-11 13:19:45 -04004356 Ops << MkId(Op0ID);
David Neto22f144c2017-06-12 14:26:21 -04004357
4358 uint32_t Op1ID = 0;
4359 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
4360 // Handle constant index.
4361 uint64_t Idx = CI->getZExtValue();
4362 Value *ShiftAmount =
4363 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
4364 Op1ID = VMap[ShiftAmount];
4365 } else {
4366 // Handle variable index.
4367 SPIRVOperandList TmpOps;
4368
David Neto257c3892018-04-11 13:19:45 -04004369 TmpOps << MkId(lookupType(Type::getInt32Ty(Context)))
4370 << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004371
4372 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
David Neto257c3892018-04-11 13:19:45 -04004373 TmpOps << MkId(VMap[Cst8]);
David Neto22f144c2017-06-12 14:26:21 -04004374
4375 Op1ID = nextID;
4376
David Neto87846742018-04-11 17:36:22 -04004377 auto *TmpInst = new SPIRVInstruction(spv::OpIMul, nextID++, TmpOps);
David Neto22f144c2017-06-12 14:26:21 -04004378 SPIRVInstList.push_back(TmpInst);
4379 }
David Neto257c3892018-04-11 13:19:45 -04004380 Ops << MkId(Op1ID);
David Neto22f144c2017-06-12 14:26:21 -04004381
4382 uint32_t ShiftID = nextID;
4383
David Neto87846742018-04-11 17:36:22 -04004384 auto *Inst =
4385 new SPIRVInstruction(spv::OpShiftRightLogical, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004386 SPIRVInstList.push_back(Inst);
4387
4388 //
4389 // Generate OpBitwiseAnd
4390 //
4391 // Ops[0] = Result Type ID
4392 // Ops[1] = Operand 0
4393 // Ops[2] = Operand 1
4394 //
4395 Ops.clear();
4396
David Neto257c3892018-04-11 13:19:45 -04004397 Ops << MkId(lookupType(CompositeTy)) << MkId(ShiftID);
David Neto22f144c2017-06-12 14:26:21 -04004398
4399 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
David Neto257c3892018-04-11 13:19:45 -04004400 Ops << MkId(VMap[CstFF]);
David Neto22f144c2017-06-12 14:26:21 -04004401
David Neto9b2d6252017-09-06 15:47:37 -04004402 // Reset mapping for this value to the result of the bitwise and.
4403 VMap[&I] = nextID;
4404
David Neto87846742018-04-11 17:36:22 -04004405 Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004406 SPIRVInstList.push_back(Inst);
4407 break;
4408 }
4409
4410 // Ops[0] = Result Type ID
4411 // Ops[1] = Composite ID
4412 // Ops[2] ... Ops[n] = Indexes (Literal Number)
4413 SPIRVOperandList Ops;
4414
David Neto257c3892018-04-11 13:19:45 -04004415 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04004416
4417 spv::Op Opcode = spv::OpCompositeExtract;
4418 if (const ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
David Neto257c3892018-04-11 13:19:45 -04004419 Ops << MkNum(static_cast<uint32_t>(CI->getZExtValue()));
David Neto22f144c2017-06-12 14:26:21 -04004420 } else {
David Neto257c3892018-04-11 13:19:45 -04004421 Ops << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004422 Opcode = spv::OpVectorExtractDynamic;
4423 }
4424
David Neto87846742018-04-11 17:36:22 -04004425 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004426 SPIRVInstList.push_back(Inst);
4427 break;
4428 }
4429 case Instruction::InsertElement: {
4430 // Handle <4 x i8> type manually.
4431 Type *CompositeTy = I.getOperand(0)->getType();
4432 if (is4xi8vec(CompositeTy)) {
4433 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
4434 uint32_t CstFFID = VMap[CstFF];
4435
4436 uint32_t ShiftAmountID = 0;
4437 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
4438 // Handle constant index.
4439 uint64_t Idx = CI->getZExtValue();
4440 Value *ShiftAmount =
4441 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
4442 ShiftAmountID = VMap[ShiftAmount];
4443 } else {
4444 // Handle variable index.
4445 SPIRVOperandList TmpOps;
4446
David Neto257c3892018-04-11 13:19:45 -04004447 TmpOps << MkId(lookupType(Type::getInt32Ty(Context)))
4448 << MkId(VMap[I.getOperand(2)]);
David Neto22f144c2017-06-12 14:26:21 -04004449
4450 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
David Neto257c3892018-04-11 13:19:45 -04004451 TmpOps << MkId(VMap[Cst8]);
David Neto22f144c2017-06-12 14:26:21 -04004452
4453 ShiftAmountID = nextID;
4454
David Neto87846742018-04-11 17:36:22 -04004455 auto *TmpInst = new SPIRVInstruction(spv::OpIMul, nextID++, TmpOps);
David Neto22f144c2017-06-12 14:26:21 -04004456 SPIRVInstList.push_back(TmpInst);
4457 }
4458
4459 //
4460 // Generate mask operations.
4461 //
4462
4463 // ShiftLeft mask according to index of insertelement.
4464 SPIRVOperandList Ops;
4465
David Neto257c3892018-04-11 13:19:45 -04004466 const uint32_t ResTyID = lookupType(CompositeTy);
4467 Ops << MkId(ResTyID) << MkId(CstFFID) << MkId(ShiftAmountID);
David Neto22f144c2017-06-12 14:26:21 -04004468
4469 uint32_t MaskID = nextID;
4470
David Neto87846742018-04-11 17:36:22 -04004471 auto *Inst = new SPIRVInstruction(spv::OpShiftLeftLogical, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004472 SPIRVInstList.push_back(Inst);
4473
4474 // Inverse mask.
4475 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004476 Ops << MkId(ResTyID) << MkId(MaskID);
David Neto22f144c2017-06-12 14:26:21 -04004477
4478 uint32_t InvMaskID = nextID;
4479
David Neto87846742018-04-11 17:36:22 -04004480 Inst = new SPIRVInstruction(spv::OpNot, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004481 SPIRVInstList.push_back(Inst);
4482
4483 // Apply mask.
4484 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004485 Ops << MkId(ResTyID) << MkId(VMap[I.getOperand(0)]) << MkId(InvMaskID);
David Neto22f144c2017-06-12 14:26:21 -04004486
4487 uint32_t OrgValID = nextID;
4488
David Neto87846742018-04-11 17:36:22 -04004489 Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004490 SPIRVInstList.push_back(Inst);
4491
4492 // Create correct value according to index of insertelement.
4493 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05004494 Ops << MkId(ResTyID) << MkId(VMap[I.getOperand(1)])
4495 << MkId(ShiftAmountID);
David Neto22f144c2017-06-12 14:26:21 -04004496
4497 uint32_t InsertValID = nextID;
4498
David Neto87846742018-04-11 17:36:22 -04004499 Inst = new SPIRVInstruction(spv::OpShiftLeftLogical, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004500 SPIRVInstList.push_back(Inst);
4501
4502 // Insert value to original value.
4503 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004504 Ops << MkId(ResTyID) << MkId(OrgValID) << MkId(InsertValID);
David Neto22f144c2017-06-12 14:26:21 -04004505
David Netoa394f392017-08-26 20:45:29 -04004506 VMap[&I] = nextID;
4507
David Neto87846742018-04-11 17:36:22 -04004508 Inst = new SPIRVInstruction(spv::OpBitwiseOr, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004509 SPIRVInstList.push_back(Inst);
4510
4511 break;
4512 }
4513
David Neto22f144c2017-06-12 14:26:21 -04004514 SPIRVOperandList Ops;
4515
James Priced26efea2018-06-09 23:28:32 +01004516 // Ops[0] = Result Type ID
4517 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04004518
4519 spv::Op Opcode = spv::OpCompositeInsert;
4520 if (const ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
David Neto257c3892018-04-11 13:19:45 -04004521 const auto value = CI->getZExtValue();
4522 assert(value <= UINT32_MAX);
James Priced26efea2018-06-09 23:28:32 +01004523 // Ops[1] = Object ID
4524 // Ops[2] = Composite ID
4525 // Ops[3] ... Ops[n] = Indexes (Literal Number)
alan-bakerb6b09dc2018-11-08 16:59:28 -05004526 Ops << MkId(VMap[I.getOperand(1)]) << MkId(VMap[I.getOperand(0)])
James Priced26efea2018-06-09 23:28:32 +01004527 << MkNum(static_cast<uint32_t>(value));
David Neto22f144c2017-06-12 14:26:21 -04004528 } else {
James Priced26efea2018-06-09 23:28:32 +01004529 // Ops[1] = Composite ID
4530 // Ops[2] = Object ID
4531 // Ops[3] ... Ops[n] = Indexes (Literal Number)
alan-bakerb6b09dc2018-11-08 16:59:28 -05004532 Ops << MkId(VMap[I.getOperand(0)]) << MkId(VMap[I.getOperand(1)])
James Priced26efea2018-06-09 23:28:32 +01004533 << MkId(VMap[I.getOperand(2)]);
David Neto22f144c2017-06-12 14:26:21 -04004534 Opcode = spv::OpVectorInsertDynamic;
4535 }
4536
David Neto87846742018-04-11 17:36:22 -04004537 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004538 SPIRVInstList.push_back(Inst);
4539 break;
4540 }
4541 case Instruction::ShuffleVector: {
4542 // Ops[0] = Result Type ID
4543 // Ops[1] = Vector 1 ID
4544 // Ops[2] = Vector 2 ID
4545 // Ops[3] ... Ops[n] = Components (Literal Number)
4546 SPIRVOperandList Ops;
4547
David Neto257c3892018-04-11 13:19:45 -04004548 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)])
4549 << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004550
alan-bakerc9666712020-04-01 16:31:21 -04004551 auto shuffle = cast<ShuffleVectorInst>(&I);
4552 SmallVector<int, 4> mask;
4553 shuffle->getShuffleMask(mask);
4554 for (auto i : mask) {
4555 if (i == UndefMaskElem) {
4556 if (clspv::Option::HackUndef())
4557 // Use 0 instead of undef.
David Neto257c3892018-04-11 13:19:45 -04004558 Ops << MkNum(0);
alan-bakerc9666712020-04-01 16:31:21 -04004559 else
4560 // Undef for shuffle in SPIR-V.
4561 Ops << MkNum(0xffffffff);
David Neto22f144c2017-06-12 14:26:21 -04004562 } else {
alan-bakerc9666712020-04-01 16:31:21 -04004563 Ops << MkNum(i);
David Neto22f144c2017-06-12 14:26:21 -04004564 }
4565 }
4566
David Neto87846742018-04-11 17:36:22 -04004567 auto *Inst = new SPIRVInstruction(spv::OpVectorShuffle, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004568 SPIRVInstList.push_back(Inst);
4569 break;
4570 }
4571 case Instruction::ICmp:
4572 case Instruction::FCmp: {
4573 CmpInst *CmpI = cast<CmpInst>(&I);
4574
David Netod4ca2e62017-07-06 18:47:35 -04004575 // Pointer equality is invalid.
alan-bakerb6b09dc2018-11-08 16:59:28 -05004576 Type *ArgTy = CmpI->getOperand(0)->getType();
David Netod4ca2e62017-07-06 18:47:35 -04004577 if (isa<PointerType>(ArgTy)) {
4578 CmpI->print(errs());
alan-baker21574d32020-01-29 16:00:31 -05004579 std::string name = I.getParent()->getParent()->getName().str();
David Netod4ca2e62017-07-06 18:47:35 -04004580 errs()
4581 << "\nPointer equality test is not supported by SPIR-V for Vulkan, "
4582 << "in function " << name << "\n";
4583 llvm_unreachable("Pointer equality check is invalid");
4584 break;
4585 }
4586
David Neto257c3892018-04-11 13:19:45 -04004587 // Ops[0] = Result Type ID
4588 // Ops[1] = Operand 1 ID
4589 // Ops[2] = Operand 2 ID
4590 SPIRVOperandList Ops;
David Neto22f144c2017-06-12 14:26:21 -04004591
David Neto257c3892018-04-11 13:19:45 -04004592 Ops << MkId(lookupType(CmpI->getType())) << MkId(VMap[CmpI->getOperand(0)])
4593 << MkId(VMap[CmpI->getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004594
4595 spv::Op Opcode = GetSPIRVCmpOpcode(CmpI);
David Neto87846742018-04-11 17:36:22 -04004596 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004597 SPIRVInstList.push_back(Inst);
4598 break;
4599 }
4600 case Instruction::Br: {
4601 // Branch instrucion is deferred because it needs label's ID. Record slot's
4602 // location on SPIRVInstructionList.
4603 DeferredInsts.push_back(
4604 std::make_tuple(&I, --SPIRVInstList.end(), 0 /* No id */));
4605 break;
4606 }
4607 case Instruction::Switch: {
4608 I.print(errs());
4609 llvm_unreachable("Unsupported instruction???");
4610 break;
4611 }
4612 case Instruction::IndirectBr: {
4613 I.print(errs());
4614 llvm_unreachable("Unsupported instruction???");
4615 break;
4616 }
4617 case Instruction::PHI: {
4618 // Branch instrucion is deferred because it needs label's ID. Record slot's
4619 // location on SPIRVInstructionList.
4620 DeferredInsts.push_back(
4621 std::make_tuple(&I, --SPIRVInstList.end(), nextID++));
4622 break;
4623 }
4624 case Instruction::Alloca: {
4625 //
4626 // Generate OpVariable.
4627 //
4628 // Ops[0] : Result Type ID
4629 // Ops[1] : Storage Class
4630 SPIRVOperandList Ops;
4631
David Neto257c3892018-04-11 13:19:45 -04004632 Ops << MkId(lookupType(I.getType())) << MkNum(spv::StorageClassFunction);
David Neto22f144c2017-06-12 14:26:21 -04004633
David Neto87846742018-04-11 17:36:22 -04004634 auto *Inst = new SPIRVInstruction(spv::OpVariable, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004635 SPIRVInstList.push_back(Inst);
4636 break;
4637 }
4638 case Instruction::Load: {
4639 LoadInst *LD = cast<LoadInst>(&I);
4640 //
4641 // Generate OpLoad.
4642 //
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04004643
alan-baker5b86ed72019-02-15 08:26:50 -05004644 if (LD->getType()->isPointerTy()) {
4645 // Loading a pointer requires variable pointers.
4646 setVariablePointersCapabilities(LD->getType()->getPointerAddressSpace());
4647 }
David Neto22f144c2017-06-12 14:26:21 -04004648
David Neto0a2f98d2017-09-15 19:38:40 -04004649 uint32_t ResTyID = lookupType(LD->getType());
David Netoa60b00b2017-09-15 16:34:09 -04004650 uint32_t PointerID = VMap[LD->getPointerOperand()];
4651
4652 // This is a hack to work around what looks like a driver bug.
4653 // When we're loading from the special variable holding the WorkgroupSize
David Neto0a2f98d2017-09-15 19:38:40 -04004654 // builtin value, use an OpBitWiseAnd of the value's ID rather than
4655 // generating a load.
David Neto66cfe642018-03-24 06:13:56 -07004656 // TODO(dneto): Remove this awful hack once drivers are fixed.
David Netoa60b00b2017-09-15 16:34:09 -04004657 if (PointerID == WorkgroupSizeVarID) {
David Neto0a2f98d2017-09-15 19:38:40 -04004658 // Generate a bitwise-and of the original value with itself.
4659 // We should have been able to get away with just an OpCopyObject,
4660 // but we need something more complex to get past certain driver bugs.
4661 // This is ridiculous, but necessary.
4662 // TODO(dneto): Revisit this once drivers fix their bugs.
4663
4664 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04004665 Ops << MkId(ResTyID) << MkId(WorkgroupSizeValueID)
4666 << MkId(WorkgroupSizeValueID);
David Neto0a2f98d2017-09-15 19:38:40 -04004667
David Neto87846742018-04-11 17:36:22 -04004668 auto *Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Neto0a2f98d2017-09-15 19:38:40 -04004669 SPIRVInstList.push_back(Inst);
David Netoa60b00b2017-09-15 16:34:09 -04004670 break;
4671 }
4672
4673 // This is the normal path. Generate a load.
4674
David Neto22f144c2017-06-12 14:26:21 -04004675 // Ops[0] = Result Type ID
4676 // Ops[1] = Pointer ID
4677 // Ops[2] ... Ops[n] = Optional Memory Access
4678 //
4679 // TODO: Do we need to implement Optional Memory Access???
David Neto0a2f98d2017-09-15 19:38:40 -04004680
David Neto22f144c2017-06-12 14:26:21 -04004681 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04004682 Ops << MkId(ResTyID) << MkId(PointerID);
David Neto22f144c2017-06-12 14:26:21 -04004683
David Neto87846742018-04-11 17:36:22 -04004684 auto *Inst = new SPIRVInstruction(spv::OpLoad, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004685 SPIRVInstList.push_back(Inst);
4686 break;
4687 }
4688 case Instruction::Store: {
4689 StoreInst *ST = cast<StoreInst>(&I);
4690 //
4691 // Generate OpStore.
4692 //
4693
alan-baker5b86ed72019-02-15 08:26:50 -05004694 if (ST->getValueOperand()->getType()->isPointerTy()) {
4695 // Storing a pointer requires variable pointers.
4696 setVariablePointersCapabilities(
4697 ST->getValueOperand()->getType()->getPointerAddressSpace());
4698 }
4699
David Neto22f144c2017-06-12 14:26:21 -04004700 // Ops[0] = Pointer ID
4701 // Ops[1] = Object ID
4702 // Ops[2] ... Ops[n] = Optional Memory Access (later???)
4703 //
4704 // TODO: Do we need to implement Optional Memory Access???
David Neto257c3892018-04-11 13:19:45 -04004705 SPIRVOperandList Ops;
4706 Ops << MkId(VMap[ST->getPointerOperand()])
4707 << MkId(VMap[ST->getValueOperand()]);
David Neto22f144c2017-06-12 14:26:21 -04004708
David Neto87846742018-04-11 17:36:22 -04004709 auto *Inst = new SPIRVInstruction(spv::OpStore, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004710 SPIRVInstList.push_back(Inst);
4711 break;
4712 }
4713 case Instruction::AtomicCmpXchg: {
4714 I.print(errs());
4715 llvm_unreachable("Unsupported instruction???");
4716 break;
4717 }
4718 case Instruction::AtomicRMW: {
Neil Henning39672102017-09-29 14:33:13 +01004719 AtomicRMWInst *AtomicRMW = dyn_cast<AtomicRMWInst>(&I);
4720
4721 spv::Op opcode;
4722
4723 switch (AtomicRMW->getOperation()) {
4724 default:
4725 I.print(errs());
4726 llvm_unreachable("Unsupported instruction???");
4727 case llvm::AtomicRMWInst::Add:
4728 opcode = spv::OpAtomicIAdd;
4729 break;
4730 case llvm::AtomicRMWInst::Sub:
4731 opcode = spv::OpAtomicISub;
4732 break;
4733 case llvm::AtomicRMWInst::Xchg:
4734 opcode = spv::OpAtomicExchange;
4735 break;
4736 case llvm::AtomicRMWInst::Min:
4737 opcode = spv::OpAtomicSMin;
4738 break;
4739 case llvm::AtomicRMWInst::Max:
4740 opcode = spv::OpAtomicSMax;
4741 break;
4742 case llvm::AtomicRMWInst::UMin:
4743 opcode = spv::OpAtomicUMin;
4744 break;
4745 case llvm::AtomicRMWInst::UMax:
4746 opcode = spv::OpAtomicUMax;
4747 break;
4748 case llvm::AtomicRMWInst::And:
4749 opcode = spv::OpAtomicAnd;
4750 break;
4751 case llvm::AtomicRMWInst::Or:
4752 opcode = spv::OpAtomicOr;
4753 break;
4754 case llvm::AtomicRMWInst::Xor:
4755 opcode = spv::OpAtomicXor;
4756 break;
4757 }
4758
4759 //
4760 // Generate OpAtomic*.
4761 //
4762 SPIRVOperandList Ops;
4763
David Neto257c3892018-04-11 13:19:45 -04004764 Ops << MkId(lookupType(I.getType()))
4765 << MkId(VMap[AtomicRMW->getPointerOperand()]);
Neil Henning39672102017-09-29 14:33:13 +01004766
4767 auto IntTy = Type::getInt32Ty(I.getContext());
Neil Henning39672102017-09-29 14:33:13 +01004768 const auto ConstantScopeDevice = ConstantInt::get(IntTy, spv::ScopeDevice);
David Neto257c3892018-04-11 13:19:45 -04004769 Ops << MkId(VMap[ConstantScopeDevice]);
Neil Henning39672102017-09-29 14:33:13 +01004770
4771 const auto ConstantMemorySemantics = ConstantInt::get(
4772 IntTy, spv::MemorySemanticsUniformMemoryMask |
4773 spv::MemorySemanticsSequentiallyConsistentMask);
David Neto257c3892018-04-11 13:19:45 -04004774 Ops << MkId(VMap[ConstantMemorySemantics]);
Neil Henning39672102017-09-29 14:33:13 +01004775
David Neto257c3892018-04-11 13:19:45 -04004776 Ops << MkId(VMap[AtomicRMW->getValOperand()]);
Neil Henning39672102017-09-29 14:33:13 +01004777
4778 VMap[&I] = nextID;
4779
David Neto87846742018-04-11 17:36:22 -04004780 auto *Inst = new SPIRVInstruction(opcode, nextID++, Ops);
Neil Henning39672102017-09-29 14:33:13 +01004781 SPIRVInstList.push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04004782 break;
4783 }
4784 case Instruction::Fence: {
4785 I.print(errs());
4786 llvm_unreachable("Unsupported instruction???");
4787 break;
4788 }
4789 case Instruction::Call: {
4790 CallInst *Call = dyn_cast<CallInst>(&I);
4791 Function *Callee = Call->getCalledFunction();
4792
Alan Baker202c8c72018-08-13 13:47:44 -04004793 if (Callee->getName().startswith(clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04004794 if (ResourceVarDeferredLoadCalls.count(Call) && Call->hasNUsesOrMore(1)) {
4795 // Generate an OpLoad
4796 SPIRVOperandList Ops;
4797 const auto load_id = nextID++;
David Neto22f144c2017-06-12 14:26:21 -04004798
David Neto862b7d82018-06-14 18:48:37 -04004799 Ops << MkId(lookupType(Call->getType()->getPointerElementType()))
4800 << MkId(ResourceVarDeferredLoadCalls[Call]);
4801
4802 auto *Inst = new SPIRVInstruction(spv::OpLoad, load_id, Ops);
4803 SPIRVInstList.push_back(Inst);
4804 VMap[Call] = load_id;
4805 break;
4806
4807 } else {
4808 // This maps to an OpVariable we've already generated.
4809 // No code is generated for the call.
4810 }
4811 break;
alan-bakerb6b09dc2018-11-08 16:59:28 -05004812 } else if (Callee->getName().startswith(
4813 clspv::WorkgroupAccessorFunction())) {
Alan Baker202c8c72018-08-13 13:47:44 -04004814 // Don't codegen an instruction here, but instead map this call directly
4815 // to the workgroup variable id.
alan-bakerb6b09dc2018-11-08 16:59:28 -05004816 int spec_id = static_cast<int>(
4817 cast<ConstantInt>(Call->getOperand(0))->getSExtValue());
Alan Baker202c8c72018-08-13 13:47:44 -04004818 const auto &info = LocalSpecIdInfoMap[spec_id];
4819 VMap[Call] = info.variable_id;
4820 break;
David Neto862b7d82018-06-14 18:48:37 -04004821 }
4822
4823 // Sampler initializers become a load of the corresponding sampler.
4824
Kévin Petitdf71de32019-04-09 14:09:50 +01004825 if (Callee->getName().equals(clspv::LiteralSamplerFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04004826 // Map this to a load from the variable.
alan-baker09cb9802019-12-10 13:16:27 -05004827 const auto third_param = static_cast<unsigned>(
4828 dyn_cast<ConstantInt>(Call->getArgOperand(2))->getZExtValue());
4829 auto sampler_value = third_param;
4830 if (clspv::Option::UseSamplerMap()) {
4831 sampler_value = getSamplerMap()[third_param].first;
4832 }
David Neto862b7d82018-06-14 18:48:37 -04004833
4834 // Generate an OpLoad
David Neto22f144c2017-06-12 14:26:21 -04004835 SPIRVOperandList Ops;
David Neto862b7d82018-06-14 18:48:37 -04004836 const auto load_id = nextID++;
David Neto22f144c2017-06-12 14:26:21 -04004837
David Neto257c3892018-04-11 13:19:45 -04004838 Ops << MkId(lookupType(SamplerTy->getPointerElementType()))
alan-baker09cb9802019-12-10 13:16:27 -05004839 << MkId(SamplerLiteralToIDMap[sampler_value]);
David Neto22f144c2017-06-12 14:26:21 -04004840
David Neto862b7d82018-06-14 18:48:37 -04004841 auto *Inst = new SPIRVInstruction(spv::OpLoad, load_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004842 SPIRVInstList.push_back(Inst);
David Neto862b7d82018-06-14 18:48:37 -04004843 VMap[Call] = load_id;
David Neto22f144c2017-06-12 14:26:21 -04004844 break;
4845 }
4846
Kévin Petit349c9502019-03-28 17:24:14 +00004847 // Handle SPIR-V intrinsics
Kévin Petit9b340262019-06-19 18:31:11 +01004848 spv::Op opcode = StringSwitch<spv::Op>(Callee->getName())
4849 .Case("spirv.atomic_xor", spv::OpAtomicXor)
4850 .Default(spv::OpNop);
David Neto22f144c2017-06-12 14:26:21 -04004851
Kévin Petit617a76d2019-04-04 13:54:16 +01004852 // If the switch above didn't have an entry maybe the intrinsic
4853 // is using the name mangling logic.
4854 bool usesMangler = false;
4855 if (opcode == spv::OpNop) {
4856 if (Callee->getName().startswith(clspv::SPIRVOpIntrinsicFunction())) {
4857 auto OpCst = cast<ConstantInt>(Call->getOperand(0));
4858 opcode = static_cast<spv::Op>(OpCst->getZExtValue());
4859 usesMangler = true;
4860 }
4861 }
4862
Kévin Petit349c9502019-03-28 17:24:14 +00004863 if (opcode != spv::OpNop) {
4864
David Neto22f144c2017-06-12 14:26:21 -04004865 SPIRVOperandList Ops;
4866
Kévin Petit349c9502019-03-28 17:24:14 +00004867 if (!I.getType()->isVoidTy()) {
4868 Ops << MkId(lookupType(I.getType()));
4869 }
David Neto22f144c2017-06-12 14:26:21 -04004870
Kévin Petit617a76d2019-04-04 13:54:16 +01004871 unsigned firstOperand = usesMangler ? 1 : 0;
4872 for (unsigned i = firstOperand; i < Call->getNumArgOperands(); i++) {
David Neto257c3892018-04-11 13:19:45 -04004873 Ops << MkId(VMap[Call->getArgOperand(i)]);
David Neto22f144c2017-06-12 14:26:21 -04004874 }
4875
Kévin Petit349c9502019-03-28 17:24:14 +00004876 if (!I.getType()->isVoidTy()) {
4877 VMap[&I] = nextID;
Kévin Petit8a560882019-03-21 15:24:34 +00004878 }
4879
Kévin Petit349c9502019-03-28 17:24:14 +00004880 SPIRVInstruction *Inst;
4881 if (!I.getType()->isVoidTy()) {
4882 Inst = new SPIRVInstruction(opcode, nextID++, Ops);
4883 } else {
4884 Inst = new SPIRVInstruction(opcode, Ops);
4885 }
Kévin Petit8a560882019-03-21 15:24:34 +00004886 SPIRVInstList.push_back(Inst);
4887 break;
4888 }
4889
David Neto22f144c2017-06-12 14:26:21 -04004890 // spirv.copy_memory.* intrinsics become OpMemoryMemory's.
4891 if (Callee->getName().startswith("spirv.copy_memory")) {
4892 //
4893 // Generate OpCopyMemory.
4894 //
4895
4896 // Ops[0] = Dst ID
4897 // Ops[1] = Src ID
4898 // Ops[2] = Memory Access
4899 // Ops[3] = Alignment
4900
4901 auto IsVolatile =
4902 dyn_cast<ConstantInt>(Call->getArgOperand(3))->getZExtValue() != 0;
4903
4904 auto VolatileMemoryAccess = (IsVolatile) ? spv::MemoryAccessVolatileMask
4905 : spv::MemoryAccessMaskNone;
4906
4907 auto MemoryAccess = VolatileMemoryAccess | spv::MemoryAccessAlignedMask;
4908
4909 auto Alignment =
4910 dyn_cast<ConstantInt>(Call->getArgOperand(2))->getZExtValue();
4911
David Neto257c3892018-04-11 13:19:45 -04004912 SPIRVOperandList Ops;
4913 Ops << MkId(VMap[Call->getArgOperand(0)])
4914 << MkId(VMap[Call->getArgOperand(1)]) << MkNum(MemoryAccess)
4915 << MkNum(static_cast<uint32_t>(Alignment));
David Neto22f144c2017-06-12 14:26:21 -04004916
David Neto87846742018-04-11 17:36:22 -04004917 auto *Inst = new SPIRVInstruction(spv::OpCopyMemory, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004918
4919 SPIRVInstList.push_back(Inst);
4920
4921 break;
4922 }
4923
SJW2c317da2020-03-23 07:39:13 -05004924 // read_image is converted to OpSampledImage and OpImageSampleExplicitLod.
4925 // Additionally, OpTypeSampledImage is generated.
SJW173c7e92020-03-16 08:44:47 -05004926 if (IsSampledImageRead(Callee)) {
David Neto22f144c2017-06-12 14:26:21 -04004927 //
4928 // Generate OpSampledImage.
4929 //
4930 // Ops[0] = Result Type ID
4931 // Ops[1] = Image ID
4932 // Ops[2] = Sampler ID
4933 //
4934 SPIRVOperandList Ops;
4935
4936 Value *Image = Call->getArgOperand(0);
4937 Value *Sampler = Call->getArgOperand(1);
4938 Value *Coordinate = Call->getArgOperand(2);
4939
4940 TypeMapType &OpImageTypeMap = getImageTypeMap();
4941 Type *ImageTy = Image->getType()->getPointerElementType();
4942 uint32_t ImageTyID = OpImageTypeMap[ImageTy];
David Neto22f144c2017-06-12 14:26:21 -04004943 uint32_t ImageID = VMap[Image];
David Neto22f144c2017-06-12 14:26:21 -04004944 uint32_t SamplerID = VMap[Sampler];
David Neto257c3892018-04-11 13:19:45 -04004945
4946 Ops << MkId(ImageTyID) << MkId(ImageID) << MkId(SamplerID);
David Neto22f144c2017-06-12 14:26:21 -04004947
4948 uint32_t SampledImageID = nextID;
4949
David Neto87846742018-04-11 17:36:22 -04004950 auto *Inst = new SPIRVInstruction(spv::OpSampledImage, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004951 SPIRVInstList.push_back(Inst);
4952
4953 //
4954 // Generate OpImageSampleExplicitLod.
4955 //
4956 // Ops[0] = Result Type ID
4957 // Ops[1] = Sampled Image ID
4958 // Ops[2] = Coordinate ID
4959 // Ops[3] = Image Operands Type ID
4960 // Ops[4] ... Ops[n] = Operands ID
4961 //
4962 Ops.clear();
4963
alan-bakerf67468c2019-11-25 15:51:49 -05004964 const bool is_int_image = IsIntImageType(Image->getType());
4965 uint32_t result_type = 0;
4966 if (is_int_image) {
4967 result_type = v4int32ID;
4968 } else {
4969 result_type = lookupType(Call->getType());
4970 }
4971
4972 Ops << MkId(result_type) << MkId(SampledImageID) << MkId(VMap[Coordinate])
4973 << MkNum(spv::ImageOperandsLodMask);
David Neto22f144c2017-06-12 14:26:21 -04004974
4975 Constant *CstFP0 = ConstantFP::get(Context, APFloat(0.0f));
David Neto257c3892018-04-11 13:19:45 -04004976 Ops << MkId(VMap[CstFP0]);
David Neto22f144c2017-06-12 14:26:21 -04004977
alan-bakerf67468c2019-11-25 15:51:49 -05004978 uint32_t final_id = nextID++;
4979 VMap[&I] = final_id;
David Neto22f144c2017-06-12 14:26:21 -04004980
alan-bakerf67468c2019-11-25 15:51:49 -05004981 uint32_t image_id = final_id;
4982 if (is_int_image) {
4983 // Int image requires a bitcast from v4int to v4uint.
4984 image_id = nextID++;
4985 }
4986
4987 Inst = new SPIRVInstruction(spv::OpImageSampleExplicitLod, image_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004988 SPIRVInstList.push_back(Inst);
alan-bakerf67468c2019-11-25 15:51:49 -05004989
4990 if (is_int_image) {
4991 // Generate the bitcast.
4992 Ops.clear();
4993 Ops << MkId(lookupType(Call->getType())) << MkId(image_id);
4994 Inst = new SPIRVInstruction(spv::OpBitcast, final_id, Ops);
4995 SPIRVInstList.push_back(Inst);
4996 }
David Neto22f144c2017-06-12 14:26:21 -04004997 break;
4998 }
4999
alan-baker75090e42020-02-20 11:21:04 -05005000 // read_image (without a sampler) is mapped to OpImageFetch.
SJW173c7e92020-03-16 08:44:47 -05005001 if (IsUnsampledImageRead(Callee)) {
alan-baker75090e42020-02-20 11:21:04 -05005002 Value *Image = Call->getArgOperand(0);
5003 Value *Coordinate = Call->getArgOperand(1);
5004
5005 //
5006 // Generate OpImageFetch
5007 //
5008 // Ops[0] = Result Type ID
5009 // Ops[1] = Image ID
5010 // Ops[2] = Coordinate ID
5011 // Ops[3] = Lod
5012 // Ops[4] = 0
5013 //
5014 SPIRVOperandList Ops;
5015
5016 const bool is_int_image = IsIntImageType(Image->getType());
5017 uint32_t result_type = 0;
5018 if (is_int_image) {
5019 result_type = v4int32ID;
5020 } else {
5021 result_type = lookupType(Call->getType());
5022 }
5023
5024 Ops << MkId(result_type) << MkId(VMap[Image]) << MkId(VMap[Coordinate])
5025 << MkNum(spv::ImageOperandsLodMask);
5026
5027 Constant *CstInt0 = ConstantInt::get(Context, APInt(32, 0));
5028 Ops << MkId(VMap[CstInt0]);
5029
5030 uint32_t final_id = nextID++;
5031 VMap[&I] = final_id;
5032
5033 uint32_t image_id = final_id;
5034 if (is_int_image) {
5035 // Int image requires a bitcast from v4int to v4uint.
5036 image_id = nextID++;
5037 }
5038
5039 auto *Inst = new SPIRVInstruction(spv::OpImageFetch, image_id, Ops);
5040 SPIRVInstList.push_back(Inst);
5041
5042 if (is_int_image) {
5043 // Generate the bitcast.
5044 Ops.clear();
5045 Ops << MkId(lookupType(Call->getType())) << MkId(image_id);
5046 Inst = new SPIRVInstruction(spv::OpBitcast, final_id, Ops);
5047 SPIRVInstList.push_back(Inst);
5048 }
5049 break;
5050 }
5051
alan-bakerf67468c2019-11-25 15:51:49 -05005052 // write_image is mapped to OpImageWrite.
SJW173c7e92020-03-16 08:44:47 -05005053 if (IsImageWrite(Callee)) {
David Neto22f144c2017-06-12 14:26:21 -04005054 //
5055 // Generate OpImageWrite.
5056 //
5057 // Ops[0] = Image ID
5058 // Ops[1] = Coordinate ID
5059 // Ops[2] = Texel ID
5060 // Ops[3] = (Optional) Image Operands Type (Literal Number)
5061 // Ops[4] ... Ops[n] = (Optional) Operands ID
5062 //
5063 SPIRVOperandList Ops;
5064
5065 Value *Image = Call->getArgOperand(0);
5066 Value *Coordinate = Call->getArgOperand(1);
5067 Value *Texel = Call->getArgOperand(2);
5068
5069 uint32_t ImageID = VMap[Image];
David Neto22f144c2017-06-12 14:26:21 -04005070 uint32_t CoordinateID = VMap[Coordinate];
David Neto22f144c2017-06-12 14:26:21 -04005071 uint32_t TexelID = VMap[Texel];
alan-bakerf67468c2019-11-25 15:51:49 -05005072
5073 const bool is_int_image = IsIntImageType(Image->getType());
5074 if (is_int_image) {
5075 // Generate a bitcast to v4int and use it as the texel value.
5076 uint32_t castID = nextID++;
5077 Ops << MkId(v4int32ID) << MkId(TexelID);
5078 auto cast = new SPIRVInstruction(spv::OpBitcast, castID, Ops);
5079 SPIRVInstList.push_back(cast);
5080 Ops.clear();
5081 TexelID = castID;
5082 }
David Neto257c3892018-04-11 13:19:45 -04005083 Ops << MkId(ImageID) << MkId(CoordinateID) << MkId(TexelID);
David Neto22f144c2017-06-12 14:26:21 -04005084
David Neto87846742018-04-11 17:36:22 -04005085 auto *Inst = new SPIRVInstruction(spv::OpImageWrite, Ops);
David Neto22f144c2017-06-12 14:26:21 -04005086 SPIRVInstList.push_back(Inst);
5087 break;
5088 }
5089
alan-bakerce179f12019-12-06 19:02:22 -05005090 // get_image_* is mapped to OpImageQuerySize or OpImageQuerySizeLod
SJW173c7e92020-03-16 08:44:47 -05005091 if (IsImageQuery(Callee)) {
David Neto5c22a252018-03-15 16:07:41 -04005092 //
alan-bakerce179f12019-12-06 19:02:22 -05005093 // Generate OpImageQuerySize[Lod]
David Neto5c22a252018-03-15 16:07:41 -04005094 //
5095 // Ops[0] = Image ID
5096 //
alan-bakerce179f12019-12-06 19:02:22 -05005097 // Result type has components equal to the dimensionality of the image,
5098 // plus 1 if the image is arrayed.
5099 //
alan-bakerf906d2b2019-12-10 11:26:23 -05005100 // %sizes = OpImageQuerySize[Lod] %uint[2|3|4] %im [%uint_0]
David Neto5c22a252018-03-15 16:07:41 -04005101 SPIRVOperandList Ops;
5102
5103 // Implement:
alan-bakerce179f12019-12-06 19:02:22 -05005104 // %sizes = OpImageQuerySize[Lod] %uint[2|3|4] %im [%uint_0]
5105 uint32_t SizesTypeID = 0;
5106
David Neto5c22a252018-03-15 16:07:41 -04005107 Value *Image = Call->getArgOperand(0);
alan-bakerce179f12019-12-06 19:02:22 -05005108 const uint32_t dim = ImageDimensionality(Image->getType());
alan-baker7150a1d2020-02-25 08:31:06 -05005109 const uint32_t components =
5110 dim + (IsArrayImageType(Image->getType()) ? 1 : 0);
alan-bakerce179f12019-12-06 19:02:22 -05005111 if (components == 1) {
alan-bakerce179f12019-12-06 19:02:22 -05005112 SizesTypeID = TypeMap[Type::getInt32Ty(Context)];
5113 } else {
alan-baker7150a1d2020-02-25 08:31:06 -05005114 SizesTypeID =
5115 TypeMap[VectorType::get(Type::getInt32Ty(Context), components)];
alan-bakerce179f12019-12-06 19:02:22 -05005116 }
David Neto5c22a252018-03-15 16:07:41 -04005117 uint32_t ImageID = VMap[Image];
David Neto257c3892018-04-11 13:19:45 -04005118 Ops << MkId(SizesTypeID) << MkId(ImageID);
alan-bakerce179f12019-12-06 19:02:22 -05005119 spv::Op query_opcode = spv::OpImageQuerySize;
SJW173c7e92020-03-16 08:44:47 -05005120 if (IsSampledImageType(Image->getType())) {
alan-bakerce179f12019-12-06 19:02:22 -05005121 query_opcode = spv::OpImageQuerySizeLod;
5122 // Need explicit 0 for Lod operand.
5123 Constant *CstInt0 = ConstantInt::get(Context, APInt(32, 0));
5124 Ops << MkId(VMap[CstInt0]);
5125 }
David Neto5c22a252018-03-15 16:07:41 -04005126
5127 uint32_t SizesID = nextID++;
alan-bakerce179f12019-12-06 19:02:22 -05005128 auto *QueryInst = new SPIRVInstruction(query_opcode, SizesID, Ops);
David Neto5c22a252018-03-15 16:07:41 -04005129 SPIRVInstList.push_back(QueryInst);
5130
alan-bakerce179f12019-12-06 19:02:22 -05005131 // May require an extra instruction to create the appropriate result of
5132 // the builtin function.
SJW173c7e92020-03-16 08:44:47 -05005133 if (IsGetImageDim(Callee)) {
alan-bakerce179f12019-12-06 19:02:22 -05005134 if (dim == 3) {
5135 // get_image_dim returns an int4 for 3D images.
5136 //
5137 // Reset value map entry since we generated an intermediate
5138 // instruction.
5139 VMap[&I] = nextID;
David Neto5c22a252018-03-15 16:07:41 -04005140
alan-bakerce179f12019-12-06 19:02:22 -05005141 // Implement:
5142 // %result = OpCompositeConstruct %uint4 %sizes %uint_0
5143 Ops.clear();
5144 Ops << MkId(lookupType(VectorType::get(Type::getInt32Ty(Context), 4)))
5145 << MkId(SizesID);
David Neto5c22a252018-03-15 16:07:41 -04005146
alan-bakerce179f12019-12-06 19:02:22 -05005147 Constant *CstInt0 = ConstantInt::get(Context, APInt(32, 0));
5148 Ops << MkId(VMap[CstInt0]);
David Neto5c22a252018-03-15 16:07:41 -04005149
alan-bakerce179f12019-12-06 19:02:22 -05005150 auto *Inst =
5151 new SPIRVInstruction(spv::OpCompositeConstruct, nextID++, Ops);
5152 SPIRVInstList.push_back(Inst);
5153 } else if (dim != components) {
5154 // get_image_dim return an int2 regardless of the arrayedness of the
5155 // image. If the image is arrayed an element must be dropped from the
5156 // query result.
5157 //
5158 // Reset value map entry since we generated an intermediate
5159 // instruction.
5160 VMap[&I] = nextID;
5161
5162 // Implement:
5163 // %result = OpVectorShuffle %uint2 %sizes %sizes 0 1
5164 Ops.clear();
5165 Ops << MkId(lookupType(VectorType::get(Type::getInt32Ty(Context), 2)))
5166 << MkId(SizesID) << MkId(SizesID) << MkNum(0) << MkNum(1);
5167
5168 auto *Inst =
5169 new SPIRVInstruction(spv::OpVectorShuffle, nextID++, Ops);
5170 SPIRVInstList.push_back(Inst);
5171 }
5172 } else if (components > 1) {
5173 // Reset value map entry since we generated an intermediate instruction.
5174 VMap[&I] = nextID;
5175
5176 // Implement:
5177 // %result = OpCompositeExtract %uint %sizes <component number>
5178 Ops.clear();
5179 Ops << MkId(TypeMap[I.getType()]) << MkId(SizesID);
5180
5181 uint32_t component = 0;
5182 if (IsGetImageHeight(Callee))
5183 component = 1;
5184 else if (IsGetImageDepth(Callee))
5185 component = 2;
5186 Ops << MkNum(component);
5187
5188 auto *Inst =
5189 new SPIRVInstruction(spv::OpCompositeExtract, nextID++, Ops);
5190 SPIRVInstList.push_back(Inst);
5191 }
David Neto5c22a252018-03-15 16:07:41 -04005192 break;
5193 }
5194
David Neto22f144c2017-06-12 14:26:21 -04005195 // Call instrucion is deferred because it needs function's ID. Record
5196 // slot's location on SPIRVInstructionList.
5197 DeferredInsts.push_back(
5198 std::make_tuple(&I, --SPIRVInstList.end(), nextID++));
5199
David Neto3fbb4072017-10-16 11:28:14 -04005200 // Check whether the implementation of this call uses an extended
5201 // instruction plus one more value-producing instruction. If so, then
5202 // reserve the id for the extra value-producing slot.
5203 glsl::ExtInst EInst = getIndirectExtInstEnum(Callee->getName());
5204 if (EInst != kGlslExtInstBad) {
5205 // Reserve a spot for the extra value.
David Neto4d02a532017-09-17 12:57:44 -04005206 // Increase nextID.
David Neto22f144c2017-06-12 14:26:21 -04005207 VMap[&I] = nextID;
5208 nextID++;
5209 }
5210 break;
5211 }
5212 case Instruction::Ret: {
5213 unsigned NumOps = I.getNumOperands();
5214 if (NumOps == 0) {
5215 //
5216 // Generate OpReturn.
5217 //
David Netoef5ba2b2019-12-20 08:35:54 -05005218 SPIRVInstList.push_back(new SPIRVInstruction(spv::OpReturn));
David Neto22f144c2017-06-12 14:26:21 -04005219 } else {
5220 //
5221 // Generate OpReturnValue.
5222 //
5223
5224 // Ops[0] = Return Value ID
5225 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04005226
5227 Ops << MkId(VMap[I.getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04005228
David Neto87846742018-04-11 17:36:22 -04005229 auto *Inst = new SPIRVInstruction(spv::OpReturnValue, Ops);
David Neto22f144c2017-06-12 14:26:21 -04005230 SPIRVInstList.push_back(Inst);
5231 break;
5232 }
5233 break;
5234 }
5235 }
5236}
5237
5238void SPIRVProducerPass::GenerateFuncEpilogue() {
SJW69939d52020-04-16 07:29:07 -05005239 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList(kFunctions);
David Neto22f144c2017-06-12 14:26:21 -04005240
5241 //
5242 // Generate OpFunctionEnd
5243 //
5244
David Netoef5ba2b2019-12-20 08:35:54 -05005245 auto *Inst = new SPIRVInstruction(spv::OpFunctionEnd);
David Neto22f144c2017-06-12 14:26:21 -04005246 SPIRVInstList.push_back(Inst);
5247}
5248
5249bool SPIRVProducerPass::is4xi8vec(Type *Ty) const {
alan-bakerb39c8262019-03-08 14:03:37 -05005250 // Don't specialize <4 x i8> if i8 is generally supported.
5251 if (clspv::Option::Int8Support())
5252 return false;
5253
David Neto22f144c2017-06-12 14:26:21 -04005254 LLVMContext &Context = Ty->getContext();
5255 if (Ty->isVectorTy()) {
5256 if (Ty->getVectorElementType() == Type::getInt8Ty(Context) &&
5257 Ty->getVectorNumElements() == 4) {
5258 return true;
5259 }
5260 }
5261
5262 return false;
5263}
5264
5265void SPIRVProducerPass::HandleDeferredInstruction() {
SJW69939d52020-04-16 07:29:07 -05005266 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList(kFunctions);
David Neto22f144c2017-06-12 14:26:21 -04005267 ValueMapType &VMap = getValueMap();
5268 DeferredInstVecType &DeferredInsts = getDeferredInstVec();
5269
5270 for (auto DeferredInst = DeferredInsts.rbegin();
5271 DeferredInst != DeferredInsts.rend(); ++DeferredInst) {
5272 Value *Inst = std::get<0>(*DeferredInst);
5273 SPIRVInstructionList::iterator InsertPoint = ++std::get<1>(*DeferredInst);
5274 if (InsertPoint != SPIRVInstList.end()) {
5275 while ((*InsertPoint)->getOpcode() == spv::OpPhi) {
5276 ++InsertPoint;
5277 }
5278 }
5279
5280 if (BranchInst *Br = dyn_cast<BranchInst>(Inst)) {
alan-baker06cad652019-12-03 17:56:47 -05005281 // Check whether this branch needs to be preceeded by merge instruction.
David Neto22f144c2017-06-12 14:26:21 -04005282 BasicBlock *BrBB = Br->getParent();
alan-baker06cad652019-12-03 17:56:47 -05005283 if (ContinueBlocks.count(BrBB)) {
David Neto22f144c2017-06-12 14:26:21 -04005284 //
5285 // Generate OpLoopMerge.
5286 //
5287 // Ops[0] = Merge Block ID
5288 // Ops[1] = Continue Target ID
5289 // Ops[2] = Selection Control
5290 SPIRVOperandList Ops;
5291
alan-baker06cad652019-12-03 17:56:47 -05005292 auto MergeBB = MergeBlocks[BrBB];
5293 auto ContinueBB = ContinueBlocks[BrBB];
David Neto22f144c2017-06-12 14:26:21 -04005294 uint32_t MergeBBID = VMap[MergeBB];
David Neto22f144c2017-06-12 14:26:21 -04005295 uint32_t ContinueBBID = VMap[ContinueBB];
David Neto257c3892018-04-11 13:19:45 -04005296 Ops << MkId(MergeBBID) << MkId(ContinueBBID)
alan-baker06cad652019-12-03 17:56:47 -05005297 << MkNum(spv::LoopControlMaskNone);
David Neto22f144c2017-06-12 14:26:21 -04005298
David Neto87846742018-04-11 17:36:22 -04005299 auto *MergeInst = new SPIRVInstruction(spv::OpLoopMerge, Ops);
David Neto22f144c2017-06-12 14:26:21 -04005300 SPIRVInstList.insert(InsertPoint, MergeInst);
alan-baker06cad652019-12-03 17:56:47 -05005301 } else if (MergeBlocks.count(BrBB)) {
5302 //
5303 // Generate OpSelectionMerge.
5304 //
5305 // Ops[0] = Merge Block ID
5306 // Ops[1] = Selection Control
5307 SPIRVOperandList Ops;
David Neto22f144c2017-06-12 14:26:21 -04005308
alan-baker06cad652019-12-03 17:56:47 -05005309 auto MergeBB = MergeBlocks[BrBB];
5310 uint32_t MergeBBID = VMap[MergeBB];
5311 Ops << MkId(MergeBBID) << MkNum(spv::SelectionControlMaskNone);
David Neto22f144c2017-06-12 14:26:21 -04005312
alan-baker06cad652019-12-03 17:56:47 -05005313 auto *MergeInst = new SPIRVInstruction(spv::OpSelectionMerge, Ops);
5314 SPIRVInstList.insert(InsertPoint, MergeInst);
David Neto22f144c2017-06-12 14:26:21 -04005315 }
5316
5317 if (Br->isConditional()) {
5318 //
5319 // Generate OpBranchConditional.
5320 //
5321 // Ops[0] = Condition ID
5322 // Ops[1] = True Label ID
5323 // Ops[2] = False Label ID
5324 // Ops[3] ... Ops[n] = Branch weights (Literal Number)
5325 SPIRVOperandList Ops;
5326
5327 uint32_t CondID = VMap[Br->getCondition()];
David Neto22f144c2017-06-12 14:26:21 -04005328 uint32_t TrueBBID = VMap[Br->getSuccessor(0)];
David Neto22f144c2017-06-12 14:26:21 -04005329 uint32_t FalseBBID = VMap[Br->getSuccessor(1)];
David Neto257c3892018-04-11 13:19:45 -04005330
5331 Ops << MkId(CondID) << MkId(TrueBBID) << MkId(FalseBBID);
David Neto22f144c2017-06-12 14:26:21 -04005332
David Neto87846742018-04-11 17:36:22 -04005333 auto *BrInst = new SPIRVInstruction(spv::OpBranchConditional, Ops);
David Neto22f144c2017-06-12 14:26:21 -04005334 SPIRVInstList.insert(InsertPoint, BrInst);
5335 } else {
5336 //
5337 // Generate OpBranch.
5338 //
5339 // Ops[0] = Target Label ID
5340 SPIRVOperandList Ops;
5341
5342 uint32_t TargetID = VMap[Br->getSuccessor(0)];
David Neto257c3892018-04-11 13:19:45 -04005343 Ops << MkId(TargetID);
David Neto22f144c2017-06-12 14:26:21 -04005344
David Neto87846742018-04-11 17:36:22 -04005345 SPIRVInstList.insert(InsertPoint,
5346 new SPIRVInstruction(spv::OpBranch, Ops));
David Neto22f144c2017-06-12 14:26:21 -04005347 }
5348 } else if (PHINode *PHI = dyn_cast<PHINode>(Inst)) {
alan-baker5ed87542020-03-23 11:05:22 -04005349 if (PHI->getType()->isPointerTy() && !IsSamplerType(PHI->getType()) &&
5350 !IsImageType(PHI->getType())) {
alan-baker5b86ed72019-02-15 08:26:50 -05005351 // OpPhi on pointers requires variable pointers.
5352 setVariablePointersCapabilities(
5353 PHI->getType()->getPointerAddressSpace());
5354 if (!hasVariablePointers() && !selectFromSameObject(PHI)) {
5355 setVariablePointers(true);
5356 }
5357 }
5358
David Neto22f144c2017-06-12 14:26:21 -04005359 //
5360 // Generate OpPhi.
5361 //
5362 // Ops[0] = Result Type ID
5363 // Ops[1] ... Ops[n] = (Variable ID, Parent ID) pairs
5364 SPIRVOperandList Ops;
5365
David Neto257c3892018-04-11 13:19:45 -04005366 Ops << MkId(lookupType(PHI->getType()));
David Neto22f144c2017-06-12 14:26:21 -04005367
David Neto22f144c2017-06-12 14:26:21 -04005368 for (unsigned i = 0; i < PHI->getNumIncomingValues(); i++) {
5369 uint32_t VarID = VMap[PHI->getIncomingValue(i)];
David Neto22f144c2017-06-12 14:26:21 -04005370 uint32_t ParentID = VMap[PHI->getIncomingBlock(i)];
David Neto257c3892018-04-11 13:19:45 -04005371 Ops << MkId(VarID) << MkId(ParentID);
David Neto22f144c2017-06-12 14:26:21 -04005372 }
5373
5374 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005375 InsertPoint,
5376 new SPIRVInstruction(spv::OpPhi, std::get<2>(*DeferredInst), Ops));
David Neto22f144c2017-06-12 14:26:21 -04005377 } else if (CallInst *Call = dyn_cast<CallInst>(Inst)) {
5378 Function *Callee = Call->getCalledFunction();
SJW2c317da2020-03-23 07:39:13 -05005379 LLVMContext &Context = Callee->getContext();
5380 auto IntTy = Type::getInt32Ty(Context);
5381 auto callee_code = Builtins::Lookup(Callee);
David Neto3fbb4072017-10-16 11:28:14 -04005382 auto callee_name = Callee->getName();
5383 glsl::ExtInst EInst = getDirectOrIndirectExtInstEnum(callee_name);
David Neto22f144c2017-06-12 14:26:21 -04005384
5385 if (EInst) {
5386 uint32_t &ExtInstImportID = getOpExtInstImportID();
5387
5388 //
5389 // Generate OpExtInst.
5390 //
5391
5392 // Ops[0] = Result Type ID
5393 // Ops[1] = Set ID (OpExtInstImport ID)
5394 // Ops[2] = Instruction Number (Literal Number)
5395 // Ops[3] ... Ops[n] = Operand 1, ... , Operand n
5396 SPIRVOperandList Ops;
5397
David Neto862b7d82018-06-14 18:48:37 -04005398 Ops << MkId(lookupType(Call->getType())) << MkId(ExtInstImportID)
5399 << MkNum(EInst);
David Neto22f144c2017-06-12 14:26:21 -04005400
David Neto22f144c2017-06-12 14:26:21 -04005401 FunctionType *CalleeFTy = cast<FunctionType>(Call->getFunctionType());
5402 for (unsigned i = 0; i < CalleeFTy->getNumParams(); i++) {
David Neto257c3892018-04-11 13:19:45 -04005403 Ops << MkId(VMap[Call->getOperand(i)]);
David Neto22f144c2017-06-12 14:26:21 -04005404 }
5405
David Neto87846742018-04-11 17:36:22 -04005406 auto *ExtInst = new SPIRVInstruction(spv::OpExtInst,
5407 std::get<2>(*DeferredInst), Ops);
David Neto22f144c2017-06-12 14:26:21 -04005408 SPIRVInstList.insert(InsertPoint, ExtInst);
5409
David Neto3fbb4072017-10-16 11:28:14 -04005410 const auto IndirectExtInst = getIndirectExtInstEnum(callee_name);
5411 if (IndirectExtInst != kGlslExtInstBad) {
5412 // Generate one more instruction that uses the result of the extended
5413 // instruction. Its result id is one more than the id of the
5414 // extended instruction.
David Neto3fbb4072017-10-16 11:28:14 -04005415 auto generate_extra_inst = [this, &Context, &Call, &DeferredInst,
5416 &VMap, &SPIRVInstList, &InsertPoint](
5417 spv::Op opcode, Constant *constant) {
5418 //
5419 // Generate instruction like:
5420 // result = opcode constant <extinst-result>
5421 //
5422 // Ops[0] = Result Type ID
5423 // Ops[1] = Operand 0 ;; the constant, suitably splatted
5424 // Ops[2] = Operand 1 ;; the result of the extended instruction
5425 SPIRVOperandList Ops;
David Neto22f144c2017-06-12 14:26:21 -04005426
David Neto3fbb4072017-10-16 11:28:14 -04005427 Type *resultTy = Call->getType();
David Neto257c3892018-04-11 13:19:45 -04005428 Ops << MkId(lookupType(resultTy));
David Neto3fbb4072017-10-16 11:28:14 -04005429
5430 if (auto *vectorTy = dyn_cast<VectorType>(resultTy)) {
5431 constant = ConstantVector::getSplat(
alan-baker7261e062020-03-15 14:35:48 -04005432 {static_cast<unsigned>(vectorTy->getNumElements()), false},
5433 constant);
David Neto3fbb4072017-10-16 11:28:14 -04005434 }
David Neto257c3892018-04-11 13:19:45 -04005435 Ops << MkId(VMap[constant]) << MkId(std::get<2>(*DeferredInst));
David Neto3fbb4072017-10-16 11:28:14 -04005436
5437 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005438 InsertPoint, new SPIRVInstruction(
5439 opcode, std::get<2>(*DeferredInst) + 1, Ops));
David Neto3fbb4072017-10-16 11:28:14 -04005440 };
5441
5442 switch (IndirectExtInst) {
5443 case glsl::ExtInstFindUMsb: // Implementing clz
SJW2c317da2020-03-23 07:39:13 -05005444 generate_extra_inst(spv::OpISub, ConstantInt::get(IntTy, 31));
David Neto3fbb4072017-10-16 11:28:14 -04005445 break;
5446 case glsl::ExtInstAcos: // Implementing acospi
5447 case glsl::ExtInstAsin: // Implementing asinpi
Kévin Petiteb9f90a2018-09-29 12:29:34 +01005448 case glsl::ExtInstAtan: // Implementing atanpi
David Neto3fbb4072017-10-16 11:28:14 -04005449 case glsl::ExtInstAtan2: // Implementing atan2pi
5450 generate_extra_inst(
5451 spv::OpFMul,
5452 ConstantFP::get(Type::getFloatTy(Context), kOneOverPi));
5453 break;
5454
5455 default:
5456 assert(false && "internally inconsistent");
David Neto4d02a532017-09-17 12:57:44 -04005457 }
David Neto22f144c2017-06-12 14:26:21 -04005458 }
David Neto3fbb4072017-10-16 11:28:14 -04005459
SJW2c317da2020-03-23 07:39:13 -05005460 } else if (callee_code == Builtins::kPopcount) {
David Neto22f144c2017-06-12 14:26:21 -04005461 //
5462 // Generate OpBitCount
5463 //
5464 // Ops[0] = Result Type ID
5465 // Ops[1] = Base ID
David Neto257c3892018-04-11 13:19:45 -04005466 SPIRVOperandList Ops;
5467 Ops << MkId(lookupType(Call->getType()))
5468 << MkId(VMap[Call->getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04005469
5470 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005471 InsertPoint, new SPIRVInstruction(spv::OpBitCount,
David Neto22f144c2017-06-12 14:26:21 -04005472 std::get<2>(*DeferredInst), Ops));
David Netoab03f432017-11-03 17:00:44 -04005473
David Neto862b7d82018-06-14 18:48:37 -04005474 } else if (callee_name.startswith(kCompositeConstructFunctionPrefix)) {
David Netoab03f432017-11-03 17:00:44 -04005475
5476 // Generate an OpCompositeConstruct
5477 SPIRVOperandList Ops;
5478
5479 // The result type.
David Neto257c3892018-04-11 13:19:45 -04005480 Ops << MkId(lookupType(Call->getType()));
David Netoab03f432017-11-03 17:00:44 -04005481
5482 for (Use &use : Call->arg_operands()) {
David Neto257c3892018-04-11 13:19:45 -04005483 Ops << MkId(VMap[use.get()]);
David Netoab03f432017-11-03 17:00:44 -04005484 }
5485
5486 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005487 InsertPoint, new SPIRVInstruction(spv::OpCompositeConstruct,
5488 std::get<2>(*DeferredInst), Ops));
David Netoab03f432017-11-03 17:00:44 -04005489
Alan Baker202c8c72018-08-13 13:47:44 -04005490 } else if (callee_name.startswith(clspv::ResourceAccessorFunction())) {
5491
5492 // We have already mapped the call's result value to an ID.
5493 // Don't generate any code now.
5494
5495 } else if (callee_name.startswith(clspv::WorkgroupAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04005496
5497 // We have already mapped the call's result value to an ID.
5498 // Don't generate any code now.
5499
David Neto22f144c2017-06-12 14:26:21 -04005500 } else {
alan-baker5b86ed72019-02-15 08:26:50 -05005501 if (Call->getType()->isPointerTy()) {
5502 // Functions returning pointers require variable pointers.
5503 setVariablePointersCapabilities(
5504 Call->getType()->getPointerAddressSpace());
5505 }
5506
David Neto22f144c2017-06-12 14:26:21 -04005507 //
5508 // Generate OpFunctionCall.
5509 //
5510
5511 // Ops[0] = Result Type ID
5512 // Ops[1] = Callee Function ID
5513 // Ops[2] ... Ops[n] = Argument 0, ... , Argument n
5514 SPIRVOperandList Ops;
5515
David Neto862b7d82018-06-14 18:48:37 -04005516 Ops << MkId(lookupType(Call->getType()));
David Neto22f144c2017-06-12 14:26:21 -04005517
5518 uint32_t CalleeID = VMap[Callee];
David Neto43568eb2017-10-13 18:25:25 -04005519 if (CalleeID == 0) {
5520 errs() << "Can't translate function call. Missing builtin? "
David Neto862b7d82018-06-14 18:48:37 -04005521 << callee_name << " in: " << *Call << "\n";
David Neto43568eb2017-10-13 18:25:25 -04005522 // TODO(dneto): Can we error out? Enabling this llvm_unreachable
5523 // causes an infinite loop. Instead, go ahead and generate
5524 // the bad function call. A validator will catch the 0-Id.
5525 // llvm_unreachable("Can't translate function call");
5526 }
David Neto22f144c2017-06-12 14:26:21 -04005527
David Neto257c3892018-04-11 13:19:45 -04005528 Ops << MkId(CalleeID);
David Neto22f144c2017-06-12 14:26:21 -04005529
David Neto22f144c2017-06-12 14:26:21 -04005530 FunctionType *CalleeFTy = cast<FunctionType>(Call->getFunctionType());
5531 for (unsigned i = 0; i < CalleeFTy->getNumParams(); i++) {
alan-baker5b86ed72019-02-15 08:26:50 -05005532 auto *operand = Call->getOperand(i);
alan-bakerd4d50652019-12-03 17:17:15 -05005533 auto *operand_type = operand->getType();
5534 // Images and samplers can be passed as function parameters without
5535 // variable pointers.
5536 if (operand_type->isPointerTy() && !IsImageType(operand_type) &&
5537 !IsSamplerType(operand_type)) {
alan-baker5b86ed72019-02-15 08:26:50 -05005538 auto sc =
5539 GetStorageClass(operand->getType()->getPointerAddressSpace());
5540 if (sc == spv::StorageClassStorageBuffer) {
5541 // Passing SSBO by reference requires variable pointers storage
5542 // buffer.
5543 setVariablePointersStorageBuffer(true);
5544 } else if (sc == spv::StorageClassWorkgroup) {
5545 // Workgroup references require variable pointers if they are not
5546 // memory object declarations.
5547 if (auto *operand_call = dyn_cast<CallInst>(operand)) {
5548 // Workgroup accessor represents a variable reference.
5549 if (!operand_call->getCalledFunction()->getName().startswith(
5550 clspv::WorkgroupAccessorFunction()))
5551 setVariablePointers(true);
5552 } else {
5553 // Arguments are function parameters.
5554 if (!isa<Argument>(operand))
5555 setVariablePointers(true);
5556 }
5557 }
5558 }
5559 Ops << MkId(VMap[operand]);
David Neto22f144c2017-06-12 14:26:21 -04005560 }
5561
David Neto87846742018-04-11 17:36:22 -04005562 auto *CallInst = new SPIRVInstruction(spv::OpFunctionCall,
5563 std::get<2>(*DeferredInst), Ops);
David Neto22f144c2017-06-12 14:26:21 -04005564 SPIRVInstList.insert(InsertPoint, CallInst);
5565 }
5566 }
5567 }
5568}
5569
David Neto1a1a0582017-07-07 12:01:44 -04005570void SPIRVProducerPass::HandleDeferredDecorations(const DataLayout &DL) {
Alan Baker202c8c72018-08-13 13:47:44 -04005571 if (getTypesNeedingArrayStride().empty() && LocalArgSpecIds.empty()) {
David Neto1a1a0582017-07-07 12:01:44 -04005572 return;
David Netoc6f3ab22018-04-06 18:02:31 -04005573 }
David Neto1a1a0582017-07-07 12:01:44 -04005574
SJW69939d52020-04-16 07:29:07 -05005575 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList(kAnnotations);
David Neto1a1a0582017-07-07 12:01:44 -04005576
David Netoc6f3ab22018-04-06 18:02:31 -04005577 // Insert ArrayStride decorations on pointer types, due to OpPtrAccessChain
5578 // instructions we generated earlier.
David Neto85082642018-03-24 06:55:20 -07005579 for (auto *type : getTypesNeedingArrayStride()) {
5580 Type *elemTy = nullptr;
5581 if (auto *ptrTy = dyn_cast<PointerType>(type)) {
5582 elemTy = ptrTy->getElementType();
alan-bakerb6b09dc2018-11-08 16:59:28 -05005583 } else if (auto *arrayTy = dyn_cast<ArrayType>(type)) {
alan-baker8eb435a2020-04-08 00:42:06 -04005584 elemTy = arrayTy->getElementType();
5585 } else if (auto *vecTy = dyn_cast<VectorType>(type)) {
5586 elemTy = vecTy->getElementType();
David Neto85082642018-03-24 06:55:20 -07005587 } else {
5588 errs() << "Unhandled strided type " << *type << "\n";
5589 llvm_unreachable("Unhandled strided type");
5590 }
David Neto1a1a0582017-07-07 12:01:44 -04005591
5592 // Ops[0] = Target ID
5593 // Ops[1] = Decoration (ArrayStride)
5594 // Ops[2] = Stride number (Literal Number)
5595 SPIRVOperandList Ops;
5596
David Neto85082642018-03-24 06:55:20 -07005597 // Same as DL.getIndexedOffsetInType( elemTy, { 1 } );
Alan Bakerfcda9482018-10-02 17:09:59 -04005598 const uint32_t stride = static_cast<uint32_t>(GetTypeAllocSize(elemTy, DL));
David Neto257c3892018-04-11 13:19:45 -04005599
5600 Ops << MkId(lookupType(type)) << MkNum(spv::DecorationArrayStride)
5601 << MkNum(stride);
David Neto1a1a0582017-07-07 12:01:44 -04005602
David Neto87846742018-04-11 17:36:22 -04005603 auto *DecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
SJW69939d52020-04-16 07:29:07 -05005604 SPIRVInstList.push_back(DecoInst);
David Neto1a1a0582017-07-07 12:01:44 -04005605 }
David Netoc6f3ab22018-04-06 18:02:31 -04005606
5607 // Emit SpecId decorations targeting the array size value.
Alan Baker202c8c72018-08-13 13:47:44 -04005608 for (auto spec_id = clspv::FirstLocalSpecId(); spec_id < max_local_spec_id_;
5609 ++spec_id) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05005610 LocalArgInfo &arg_info = LocalSpecIdInfoMap[spec_id];
David Netoc6f3ab22018-04-06 18:02:31 -04005611 SPIRVOperandList Ops;
5612 Ops << MkId(arg_info.array_size_id) << MkNum(spv::DecorationSpecId)
5613 << MkNum(arg_info.spec_id);
SJW69939d52020-04-16 07:29:07 -05005614 SPIRVInstList.push_back(new SPIRVInstruction(spv::OpDecorate, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04005615 }
David Neto1a1a0582017-07-07 12:01:44 -04005616}
5617
David Neto22f144c2017-06-12 14:26:21 -04005618glsl::ExtInst SPIRVProducerPass::getExtInstEnum(StringRef Name) {
SJW2c317da2020-03-23 07:39:13 -05005619
5620 const auto &fi = Builtins::Lookup(Name);
5621 switch (fi) {
5622 case Builtins::kClamp: {
5623 auto param_type = fi.getParameter(0);
5624 if (param_type.type_id == Type::FloatTyID) {
5625 return glsl::ExtInst::ExtInstFClamp;
5626 }
5627 return param_type.is_signed ? glsl::ExtInst::ExtInstSClamp
5628 : glsl::ExtInst::ExtInstUClamp;
5629 }
5630 case Builtins::kMax: {
5631 auto param_type = fi.getParameter(0);
5632 if (param_type.type_id == Type::FloatTyID) {
5633 return glsl::ExtInst::ExtInstFMax;
5634 }
5635 return param_type.is_signed ? glsl::ExtInst::ExtInstSMax
5636 : glsl::ExtInst::ExtInstUMax;
5637 }
5638 case Builtins::kMin: {
5639 auto param_type = fi.getParameter(0);
5640 if (param_type.type_id == Type::FloatTyID) {
5641 return glsl::ExtInst::ExtInstFMin;
5642 }
5643 return param_type.is_signed ? glsl::ExtInst::ExtInstSMin
5644 : glsl::ExtInst::ExtInstUMin;
5645 }
5646 case Builtins::kAbs:
5647 return glsl::ExtInst::ExtInstSAbs;
5648 case Builtins::kFmax:
5649 return glsl::ExtInst::ExtInstFMax;
5650 case Builtins::kFmin:
5651 return glsl::ExtInst::ExtInstFMin;
5652 case Builtins::kDegrees:
5653 return glsl::ExtInst::ExtInstDegrees;
5654 case Builtins::kRadians:
5655 return glsl::ExtInst::ExtInstRadians;
5656 case Builtins::kMix:
5657 return glsl::ExtInst::ExtInstFMix;
5658 case Builtins::kAcos:
5659 case Builtins::kAcospi:
5660 return glsl::ExtInst::ExtInstAcos;
5661 case Builtins::kAcosh:
5662 return glsl::ExtInst::ExtInstAcosh;
5663 case Builtins::kAsin:
5664 case Builtins::kAsinpi:
5665 return glsl::ExtInst::ExtInstAsin;
5666 case Builtins::kAsinh:
5667 return glsl::ExtInst::ExtInstAsinh;
5668 case Builtins::kAtan:
5669 case Builtins::kAtanpi:
5670 return glsl::ExtInst::ExtInstAtan;
5671 case Builtins::kAtanh:
5672 return glsl::ExtInst::ExtInstAtanh;
5673 case Builtins::kAtan2:
5674 case Builtins::kAtan2pi:
5675 return glsl::ExtInst::ExtInstAtan2;
5676 case Builtins::kCeil:
5677 return glsl::ExtInst::ExtInstCeil;
5678 case Builtins::kSin:
5679 case Builtins::kHalfSin:
5680 case Builtins::kNativeSin:
5681 return glsl::ExtInst::ExtInstSin;
5682 case Builtins::kSinh:
5683 return glsl::ExtInst::ExtInstSinh;
5684 case Builtins::kCos:
5685 case Builtins::kHalfCos:
5686 case Builtins::kNativeCos:
5687 return glsl::ExtInst::ExtInstCos;
5688 case Builtins::kCosh:
5689 return glsl::ExtInst::ExtInstCosh;
5690 case Builtins::kTan:
5691 case Builtins::kHalfTan:
5692 case Builtins::kNativeTan:
5693 return glsl::ExtInst::ExtInstTan;
5694 case Builtins::kTanh:
5695 return glsl::ExtInst::ExtInstTanh;
5696 case Builtins::kExp:
5697 case Builtins::kHalfExp:
5698 case Builtins::kNativeExp:
5699 return glsl::ExtInst::ExtInstExp;
5700 case Builtins::kExp2:
5701 case Builtins::kHalfExp2:
5702 case Builtins::kNativeExp2:
5703 return glsl::ExtInst::ExtInstExp2;
5704 case Builtins::kLog:
5705 case Builtins::kHalfLog:
5706 case Builtins::kNativeLog:
5707 return glsl::ExtInst::ExtInstLog;
5708 case Builtins::kLog2:
5709 case Builtins::kHalfLog2:
5710 case Builtins::kNativeLog2:
5711 return glsl::ExtInst::ExtInstLog2;
5712 case Builtins::kFabs:
5713 return glsl::ExtInst::ExtInstFAbs;
5714 case Builtins::kFma:
5715 return glsl::ExtInst::ExtInstFma;
5716 case Builtins::kFloor:
5717 return glsl::ExtInst::ExtInstFloor;
5718 case Builtins::kLdexp:
5719 return glsl::ExtInst::ExtInstLdexp;
5720 case Builtins::kPow:
5721 case Builtins::kPowr:
5722 case Builtins::kHalfPowr:
5723 case Builtins::kNativePowr:
5724 return glsl::ExtInst::ExtInstPow;
5725 case Builtins::kRound:
5726 return glsl::ExtInst::ExtInstRound;
5727 case Builtins::kSqrt:
5728 case Builtins::kHalfSqrt:
5729 case Builtins::kNativeSqrt:
5730 return glsl::ExtInst::ExtInstSqrt;
5731 case Builtins::kRsqrt:
5732 case Builtins::kHalfRsqrt:
5733 case Builtins::kNativeRsqrt:
5734 return glsl::ExtInst::ExtInstInverseSqrt;
5735 case Builtins::kTrunc:
5736 return glsl::ExtInst::ExtInstTrunc;
5737 case Builtins::kFrexp:
5738 return glsl::ExtInst::ExtInstFrexp;
5739 case Builtins::kFract:
5740 return glsl::ExtInst::ExtInstFract;
5741 case Builtins::kSign:
5742 return glsl::ExtInst::ExtInstFSign;
5743 case Builtins::kLength:
5744 case Builtins::kFastLength:
5745 return glsl::ExtInst::ExtInstLength;
5746 case Builtins::kDistance:
5747 case Builtins::kFastDistance:
5748 return glsl::ExtInst::ExtInstDistance;
5749 case Builtins::kStep:
5750 return glsl::ExtInst::ExtInstStep;
5751 case Builtins::kSmoothstep:
5752 return glsl::ExtInst::ExtInstSmoothStep;
5753 case Builtins::kCross:
5754 return glsl::ExtInst::ExtInstCross;
5755 case Builtins::kNormalize:
5756 case Builtins::kFastNormalize:
5757 return glsl::ExtInst::ExtInstNormalize;
5758 default:
5759 break;
5760 }
5761
David Neto22f144c2017-06-12 14:26:21 -04005762 return StringSwitch<glsl::ExtInst>(Name)
David Neto22f144c2017-06-12 14:26:21 -04005763 .StartsWith("llvm.fmuladd.", glsl::ExtInst::ExtInstFma)
5764 .Case("spirv.unpack.v2f16", glsl::ExtInst::ExtInstUnpackHalf2x16)
5765 .Case("spirv.pack.v2f16", glsl::ExtInst::ExtInstPackHalf2x16)
David Neto3fbb4072017-10-16 11:28:14 -04005766 .Default(kGlslExtInstBad);
5767}
5768
5769glsl::ExtInst SPIRVProducerPass::getIndirectExtInstEnum(StringRef Name) {
SJW2c317da2020-03-23 07:39:13 -05005770 switch (Builtins::Lookup(Name)) {
5771 case Builtins::kClz:
5772 return glsl::ExtInst::ExtInstFindUMsb;
5773 case Builtins::kAcospi:
5774 return glsl::ExtInst::ExtInstAcos;
5775 case Builtins::kAsinpi:
5776 return glsl::ExtInst::ExtInstAsin;
5777 case Builtins::kAtanpi:
5778 return glsl::ExtInst::ExtInstAtan;
5779 case Builtins::kAtan2pi:
5780 return glsl::ExtInst::ExtInstAtan2;
5781 default:
5782 break;
5783 }
5784 return kGlslExtInstBad;
David Neto3fbb4072017-10-16 11:28:14 -04005785}
5786
alan-bakerb6b09dc2018-11-08 16:59:28 -05005787glsl::ExtInst
5788SPIRVProducerPass::getDirectOrIndirectExtInstEnum(StringRef Name) {
David Neto3fbb4072017-10-16 11:28:14 -04005789 auto direct = getExtInstEnum(Name);
5790 if (direct != kGlslExtInstBad)
5791 return direct;
5792 return getIndirectExtInstEnum(Name);
David Neto22f144c2017-06-12 14:26:21 -04005793}
5794
David Neto22f144c2017-06-12 14:26:21 -04005795void SPIRVProducerPass::WriteOneWord(uint32_t Word) {
David Neto0676e6f2017-07-11 18:47:44 -04005796 binaryOut->write(reinterpret_cast<const char *>(&Word), sizeof(uint32_t));
David Neto22f144c2017-06-12 14:26:21 -04005797}
5798
5799void SPIRVProducerPass::WriteResultID(SPIRVInstruction *Inst) {
5800 WriteOneWord(Inst->getResultID());
5801}
5802
5803void SPIRVProducerPass::WriteWordCountAndOpcode(SPIRVInstruction *Inst) {
5804 // High 16 bit : Word Count
5805 // Low 16 bit : Opcode
5806 uint32_t Word = Inst->getOpcode();
David Netoee2660d2018-06-28 16:31:29 -04005807 const uint32_t count = Inst->getWordCount();
5808 if (count > 65535) {
5809 errs() << "Word count limit of 65535 exceeded: " << count << "\n";
5810 llvm_unreachable("Word count too high");
5811 }
David Neto22f144c2017-06-12 14:26:21 -04005812 Word |= Inst->getWordCount() << 16;
5813 WriteOneWord(Word);
5814}
5815
David Netoef5ba2b2019-12-20 08:35:54 -05005816void SPIRVProducerPass::WriteOperand(const std::unique_ptr<SPIRVOperand> &Op) {
David Neto22f144c2017-06-12 14:26:21 -04005817 SPIRVOperandType OpTy = Op->getType();
5818 switch (OpTy) {
5819 default: {
5820 llvm_unreachable("Unsupported SPIRV Operand Type???");
5821 break;
5822 }
5823 case SPIRVOperandType::NUMBERID: {
5824 WriteOneWord(Op->getNumID());
5825 break;
5826 }
5827 case SPIRVOperandType::LITERAL_STRING: {
5828 std::string Str = Op->getLiteralStr();
5829 const char *Data = Str.c_str();
5830 size_t WordSize = Str.size() / 4;
5831 for (unsigned Idx = 0; Idx < WordSize; Idx++) {
5832 WriteOneWord(*reinterpret_cast<const uint32_t *>(&Data[4 * Idx]));
5833 }
5834
5835 uint32_t Remainder = Str.size() % 4;
5836 uint32_t LastWord = 0;
5837 if (Remainder) {
5838 for (unsigned Idx = 0; Idx < Remainder; Idx++) {
5839 LastWord |= Data[4 * WordSize + Idx] << 8 * Idx;
5840 }
5841 }
5842
5843 WriteOneWord(LastWord);
5844 break;
5845 }
5846 case SPIRVOperandType::LITERAL_INTEGER:
5847 case SPIRVOperandType::LITERAL_FLOAT: {
5848 auto LiteralNum = Op->getLiteralNum();
5849 // TODO: Handle LiteranNum carefully.
5850 for (auto Word : LiteralNum) {
5851 WriteOneWord(Word);
5852 }
5853 break;
5854 }
5855 }
5856}
5857
5858void SPIRVProducerPass::WriteSPIRVBinary() {
SJW69939d52020-04-16 07:29:07 -05005859 for (int i = 0; i < kSectionCount; ++i) {
5860 WriteSPIRVBinary(SPIRVSections[i]);
5861 }
5862}
5863
5864void SPIRVProducerPass::WriteSPIRVBinary(SPIRVInstructionList &SPIRVInstList) {
David Neto22f144c2017-06-12 14:26:21 -04005865
5866 for (auto Inst : SPIRVInstList) {
David Netoef5ba2b2019-12-20 08:35:54 -05005867 const auto &Ops = Inst->getOperands();
David Neto22f144c2017-06-12 14:26:21 -04005868 spv::Op Opcode = static_cast<spv::Op>(Inst->getOpcode());
5869
5870 switch (Opcode) {
5871 default: {
David Neto5c22a252018-03-15 16:07:41 -04005872 errs() << "Unsupported SPIR-V instruction opcode " << int(Opcode) << "\n";
David Neto22f144c2017-06-12 14:26:21 -04005873 llvm_unreachable("Unsupported SPIRV instruction");
5874 break;
5875 }
5876 case spv::OpCapability:
5877 case spv::OpExtension:
5878 case spv::OpMemoryModel:
5879 case spv::OpEntryPoint:
5880 case spv::OpExecutionMode:
5881 case spv::OpSource:
5882 case spv::OpDecorate:
5883 case spv::OpMemberDecorate:
5884 case spv::OpBranch:
5885 case spv::OpBranchConditional:
5886 case spv::OpSelectionMerge:
5887 case spv::OpLoopMerge:
5888 case spv::OpStore:
5889 case spv::OpImageWrite:
5890 case spv::OpReturnValue:
5891 case spv::OpControlBarrier:
5892 case spv::OpMemoryBarrier:
5893 case spv::OpReturn:
5894 case spv::OpFunctionEnd:
5895 case spv::OpCopyMemory: {
5896 WriteWordCountAndOpcode(Inst);
5897 for (uint32_t i = 0; i < Ops.size(); i++) {
5898 WriteOperand(Ops[i]);
5899 }
5900 break;
5901 }
5902 case spv::OpTypeBool:
5903 case spv::OpTypeVoid:
5904 case spv::OpTypeSampler:
5905 case spv::OpLabel:
5906 case spv::OpExtInstImport:
5907 case spv::OpTypePointer:
5908 case spv::OpTypeRuntimeArray:
5909 case spv::OpTypeStruct:
5910 case spv::OpTypeImage:
5911 case spv::OpTypeSampledImage:
5912 case spv::OpTypeInt:
5913 case spv::OpTypeFloat:
5914 case spv::OpTypeArray:
5915 case spv::OpTypeVector:
5916 case spv::OpTypeFunction: {
5917 WriteWordCountAndOpcode(Inst);
5918 WriteResultID(Inst);
5919 for (uint32_t i = 0; i < Ops.size(); i++) {
5920 WriteOperand(Ops[i]);
5921 }
5922 break;
5923 }
5924 case spv::OpFunction:
5925 case spv::OpFunctionParameter:
5926 case spv::OpAccessChain:
5927 case spv::OpPtrAccessChain:
5928 case spv::OpInBoundsAccessChain:
5929 case spv::OpUConvert:
5930 case spv::OpSConvert:
5931 case spv::OpConvertFToU:
5932 case spv::OpConvertFToS:
5933 case spv::OpConvertUToF:
5934 case spv::OpConvertSToF:
5935 case spv::OpFConvert:
5936 case spv::OpConvertPtrToU:
5937 case spv::OpConvertUToPtr:
5938 case spv::OpBitcast:
alan-bakerc9c55ae2019-12-02 16:01:27 -05005939 case spv::OpFNegate:
David Neto22f144c2017-06-12 14:26:21 -04005940 case spv::OpIAdd:
5941 case spv::OpFAdd:
5942 case spv::OpISub:
5943 case spv::OpFSub:
5944 case spv::OpIMul:
5945 case spv::OpFMul:
5946 case spv::OpUDiv:
5947 case spv::OpSDiv:
5948 case spv::OpFDiv:
5949 case spv::OpUMod:
5950 case spv::OpSRem:
5951 case spv::OpFRem:
Kévin Petit8a560882019-03-21 15:24:34 +00005952 case spv::OpUMulExtended:
5953 case spv::OpSMulExtended:
David Neto22f144c2017-06-12 14:26:21 -04005954 case spv::OpBitwiseOr:
5955 case spv::OpBitwiseXor:
5956 case spv::OpBitwiseAnd:
David Netoa394f392017-08-26 20:45:29 -04005957 case spv::OpNot:
David Neto22f144c2017-06-12 14:26:21 -04005958 case spv::OpShiftLeftLogical:
5959 case spv::OpShiftRightLogical:
5960 case spv::OpShiftRightArithmetic:
5961 case spv::OpBitCount:
David Netoab03f432017-11-03 17:00:44 -04005962 case spv::OpCompositeConstruct:
David Neto22f144c2017-06-12 14:26:21 -04005963 case spv::OpCompositeExtract:
5964 case spv::OpVectorExtractDynamic:
5965 case spv::OpCompositeInsert:
David Neto0a2f98d2017-09-15 19:38:40 -04005966 case spv::OpCopyObject:
David Neto22f144c2017-06-12 14:26:21 -04005967 case spv::OpVectorInsertDynamic:
5968 case spv::OpVectorShuffle:
5969 case spv::OpIEqual:
5970 case spv::OpINotEqual:
5971 case spv::OpUGreaterThan:
5972 case spv::OpUGreaterThanEqual:
5973 case spv::OpULessThan:
5974 case spv::OpULessThanEqual:
5975 case spv::OpSGreaterThan:
5976 case spv::OpSGreaterThanEqual:
5977 case spv::OpSLessThan:
5978 case spv::OpSLessThanEqual:
5979 case spv::OpFOrdEqual:
5980 case spv::OpFOrdGreaterThan:
5981 case spv::OpFOrdGreaterThanEqual:
5982 case spv::OpFOrdLessThan:
5983 case spv::OpFOrdLessThanEqual:
5984 case spv::OpFOrdNotEqual:
5985 case spv::OpFUnordEqual:
5986 case spv::OpFUnordGreaterThan:
5987 case spv::OpFUnordGreaterThanEqual:
5988 case spv::OpFUnordLessThan:
5989 case spv::OpFUnordLessThanEqual:
5990 case spv::OpFUnordNotEqual:
5991 case spv::OpExtInst:
5992 case spv::OpIsInf:
5993 case spv::OpIsNan:
5994 case spv::OpAny:
5995 case spv::OpAll:
5996 case spv::OpUndef:
5997 case spv::OpConstantNull:
5998 case spv::OpLogicalOr:
5999 case spv::OpLogicalAnd:
6000 case spv::OpLogicalNot:
6001 case spv::OpLogicalNotEqual:
6002 case spv::OpConstantComposite:
6003 case spv::OpSpecConstantComposite:
6004 case spv::OpConstantTrue:
6005 case spv::OpConstantFalse:
6006 case spv::OpConstant:
6007 case spv::OpSpecConstant:
6008 case spv::OpVariable:
6009 case spv::OpFunctionCall:
6010 case spv::OpSampledImage:
alan-baker75090e42020-02-20 11:21:04 -05006011 case spv::OpImageFetch:
David Neto22f144c2017-06-12 14:26:21 -04006012 case spv::OpImageSampleExplicitLod:
David Neto5c22a252018-03-15 16:07:41 -04006013 case spv::OpImageQuerySize:
alan-bakerce179f12019-12-06 19:02:22 -05006014 case spv::OpImageQuerySizeLod:
David Neto22f144c2017-06-12 14:26:21 -04006015 case spv::OpSelect:
6016 case spv::OpPhi:
6017 case spv::OpLoad:
6018 case spv::OpAtomicIAdd:
6019 case spv::OpAtomicISub:
6020 case spv::OpAtomicExchange:
6021 case spv::OpAtomicIIncrement:
6022 case spv::OpAtomicIDecrement:
6023 case spv::OpAtomicCompareExchange:
6024 case spv::OpAtomicUMin:
6025 case spv::OpAtomicSMin:
6026 case spv::OpAtomicUMax:
6027 case spv::OpAtomicSMax:
6028 case spv::OpAtomicAnd:
6029 case spv::OpAtomicOr:
6030 case spv::OpAtomicXor:
6031 case spv::OpDot: {
6032 WriteWordCountAndOpcode(Inst);
6033 WriteOperand(Ops[0]);
6034 WriteResultID(Inst);
6035 for (uint32_t i = 1; i < Ops.size(); i++) {
6036 WriteOperand(Ops[i]);
6037 }
6038 break;
6039 }
6040 }
6041 }
6042}
Alan Baker9bf93fb2018-08-28 16:59:26 -04006043
alan-bakerb6b09dc2018-11-08 16:59:28 -05006044bool SPIRVProducerPass::IsTypeNullable(const Type *type) const {
Alan Baker9bf93fb2018-08-28 16:59:26 -04006045 switch (type->getTypeID()) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05006046 case Type::HalfTyID:
6047 case Type::FloatTyID:
6048 case Type::DoubleTyID:
6049 case Type::IntegerTyID:
6050 case Type::VectorTyID:
6051 return true;
6052 case Type::PointerTyID: {
6053 const PointerType *pointer_type = cast<PointerType>(type);
6054 if (pointer_type->getPointerAddressSpace() !=
6055 AddressSpace::UniformConstant) {
6056 auto pointee_type = pointer_type->getPointerElementType();
6057 if (pointee_type->isStructTy() &&
6058 cast<StructType>(pointee_type)->isOpaque()) {
6059 // Images and samplers are not nullable.
6060 return false;
Alan Baker9bf93fb2018-08-28 16:59:26 -04006061 }
Alan Baker9bf93fb2018-08-28 16:59:26 -04006062 }
alan-bakerb6b09dc2018-11-08 16:59:28 -05006063 return true;
6064 }
6065 case Type::ArrayTyID:
alan-baker8eb435a2020-04-08 00:42:06 -04006066 return IsTypeNullable(type->getArrayElementType());
alan-bakerb6b09dc2018-11-08 16:59:28 -05006067 case Type::StructTyID: {
6068 const StructType *struct_type = cast<StructType>(type);
6069 // Images and samplers are not nullable.
6070 if (struct_type->isOpaque())
Alan Baker9bf93fb2018-08-28 16:59:26 -04006071 return false;
alan-bakerb6b09dc2018-11-08 16:59:28 -05006072 for (const auto element : struct_type->elements()) {
6073 if (!IsTypeNullable(element))
6074 return false;
6075 }
6076 return true;
6077 }
6078 default:
6079 return false;
Alan Baker9bf93fb2018-08-28 16:59:26 -04006080 }
6081}
Alan Bakerfcda9482018-10-02 17:09:59 -04006082
6083void SPIRVProducerPass::PopulateUBOTypeMaps(Module &module) {
6084 if (auto *offsets_md =
6085 module.getNamedMetadata(clspv::RemappedTypeOffsetMetadataName())) {
6086 // Metdata is stored as key-value pair operands. The first element of each
6087 // operand is the type and the second is a vector of offsets.
6088 for (const auto *operand : offsets_md->operands()) {
6089 const auto *pair = cast<MDTuple>(operand);
6090 auto *type =
6091 cast<ConstantAsMetadata>(pair->getOperand(0))->getValue()->getType();
6092 const auto *offset_vector = cast<MDTuple>(pair->getOperand(1));
6093 std::vector<uint32_t> offsets;
6094 for (const Metadata *offset_md : offset_vector->operands()) {
6095 const auto *constant_md = cast<ConstantAsMetadata>(offset_md);
alan-bakerb6b09dc2018-11-08 16:59:28 -05006096 offsets.push_back(static_cast<uint32_t>(
6097 cast<ConstantInt>(constant_md->getValue())->getZExtValue()));
Alan Bakerfcda9482018-10-02 17:09:59 -04006098 }
6099 RemappedUBOTypeOffsets.insert(std::make_pair(type, offsets));
6100 }
6101 }
6102
6103 if (auto *sizes_md =
6104 module.getNamedMetadata(clspv::RemappedTypeSizesMetadataName())) {
6105 // Metadata is stored as key-value pair operands. The first element of each
6106 // operand is the type and the second is a triple of sizes: type size in
6107 // bits, store size and alloc size.
6108 for (const auto *operand : sizes_md->operands()) {
6109 const auto *pair = cast<MDTuple>(operand);
6110 auto *type =
6111 cast<ConstantAsMetadata>(pair->getOperand(0))->getValue()->getType();
6112 const auto *size_triple = cast<MDTuple>(pair->getOperand(1));
6113 uint64_t type_size_in_bits =
6114 cast<ConstantInt>(
6115 cast<ConstantAsMetadata>(size_triple->getOperand(0))->getValue())
6116 ->getZExtValue();
6117 uint64_t type_store_size =
6118 cast<ConstantInt>(
6119 cast<ConstantAsMetadata>(size_triple->getOperand(1))->getValue())
6120 ->getZExtValue();
6121 uint64_t type_alloc_size =
6122 cast<ConstantInt>(
6123 cast<ConstantAsMetadata>(size_triple->getOperand(2))->getValue())
6124 ->getZExtValue();
6125 RemappedUBOTypeSizes.insert(std::make_pair(
6126 type, std::make_tuple(type_size_in_bits, type_store_size,
6127 type_alloc_size)));
6128 }
6129 }
6130}
6131
6132uint64_t SPIRVProducerPass::GetTypeSizeInBits(Type *type,
6133 const DataLayout &DL) {
6134 auto iter = RemappedUBOTypeSizes.find(type);
6135 if (iter != RemappedUBOTypeSizes.end()) {
6136 return std::get<0>(iter->second);
6137 }
6138
6139 return DL.getTypeSizeInBits(type);
6140}
6141
6142uint64_t SPIRVProducerPass::GetTypeStoreSize(Type *type, const DataLayout &DL) {
6143 auto iter = RemappedUBOTypeSizes.find(type);
6144 if (iter != RemappedUBOTypeSizes.end()) {
6145 return std::get<1>(iter->second);
6146 }
6147
6148 return DL.getTypeStoreSize(type);
6149}
6150
6151uint64_t SPIRVProducerPass::GetTypeAllocSize(Type *type, const DataLayout &DL) {
6152 auto iter = RemappedUBOTypeSizes.find(type);
6153 if (iter != RemappedUBOTypeSizes.end()) {
6154 return std::get<2>(iter->second);
6155 }
6156
6157 return DL.getTypeAllocSize(type);
6158}
alan-baker5b86ed72019-02-15 08:26:50 -05006159
Kévin Petitbbbda972020-03-03 19:16:31 +00006160uint32_t SPIRVProducerPass::GetExplicitLayoutStructMemberOffset(
6161 StructType *type, unsigned member, const DataLayout &DL) {
6162 const auto StructLayout = DL.getStructLayout(type);
6163 // Search for the correct offsets if this type was remapped.
6164 std::vector<uint32_t> *offsets = nullptr;
6165 auto iter = RemappedUBOTypeOffsets.find(type);
6166 if (iter != RemappedUBOTypeOffsets.end()) {
6167 offsets = &iter->second;
6168 }
6169 auto ByteOffset =
6170 static_cast<uint32_t>(StructLayout->getElementOffset(member));
6171 if (offsets) {
6172 ByteOffset = (*offsets)[member];
6173 }
6174
6175 return ByteOffset;
6176}
6177
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04006178void SPIRVProducerPass::setVariablePointersCapabilities(
6179 unsigned address_space) {
alan-baker5b86ed72019-02-15 08:26:50 -05006180 if (GetStorageClass(address_space) == spv::StorageClassStorageBuffer) {
6181 setVariablePointersStorageBuffer(true);
6182 } else {
6183 setVariablePointers(true);
6184 }
6185}
6186
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04006187Value *SPIRVProducerPass::GetBasePointer(Value *v) {
alan-baker5b86ed72019-02-15 08:26:50 -05006188 if (auto *gep = dyn_cast<GetElementPtrInst>(v)) {
6189 return GetBasePointer(gep->getPointerOperand());
6190 }
6191
6192 // Conservatively return |v|.
6193 return v;
6194}
6195
6196bool SPIRVProducerPass::sameResource(Value *lhs, Value *rhs) const {
6197 if (auto *lhs_call = dyn_cast<CallInst>(lhs)) {
6198 if (auto *rhs_call = dyn_cast<CallInst>(rhs)) {
6199 if (lhs_call->getCalledFunction()->getName().startswith(
6200 clspv::ResourceAccessorFunction()) &&
6201 rhs_call->getCalledFunction()->getName().startswith(
6202 clspv::ResourceAccessorFunction())) {
6203 // For resource accessors, match descriptor set and binding.
6204 if (lhs_call->getOperand(0) == rhs_call->getOperand(0) &&
6205 lhs_call->getOperand(1) == rhs_call->getOperand(1))
6206 return true;
6207 } else if (lhs_call->getCalledFunction()->getName().startswith(
6208 clspv::WorkgroupAccessorFunction()) &&
6209 rhs_call->getCalledFunction()->getName().startswith(
6210 clspv::WorkgroupAccessorFunction())) {
6211 // For workgroup resources, match spec id.
6212 if (lhs_call->getOperand(0) == rhs_call->getOperand(0))
6213 return true;
6214 }
6215 }
6216 }
6217
6218 return false;
6219}
6220
6221bool SPIRVProducerPass::selectFromSameObject(Instruction *inst) {
6222 assert(inst->getType()->isPointerTy());
6223 assert(GetStorageClass(inst->getType()->getPointerAddressSpace()) ==
6224 spv::StorageClassStorageBuffer);
6225 const bool hack_undef = clspv::Option::HackUndef();
6226 if (auto *select = dyn_cast<SelectInst>(inst)) {
6227 auto *true_base = GetBasePointer(select->getTrueValue());
6228 auto *false_base = GetBasePointer(select->getFalseValue());
6229
6230 if (true_base == false_base)
6231 return true;
6232
6233 // If either the true or false operand is a null, then we satisfy the same
6234 // object constraint.
6235 if (auto *true_cst = dyn_cast<Constant>(true_base)) {
6236 if (true_cst->isNullValue() || (hack_undef && isa<UndefValue>(true_base)))
6237 return true;
6238 }
6239
6240 if (auto *false_cst = dyn_cast<Constant>(false_base)) {
6241 if (false_cst->isNullValue() ||
6242 (hack_undef && isa<UndefValue>(false_base)))
6243 return true;
6244 }
6245
6246 if (sameResource(true_base, false_base))
6247 return true;
6248 } else if (auto *phi = dyn_cast<PHINode>(inst)) {
6249 Value *value = nullptr;
6250 bool ok = true;
6251 for (unsigned i = 0; ok && i != phi->getNumIncomingValues(); ++i) {
6252 auto *base = GetBasePointer(phi->getIncomingValue(i));
6253 // Null values satisfy the constraint of selecting of selecting from the
6254 // same object.
6255 if (!value) {
6256 if (auto *cst = dyn_cast<Constant>(base)) {
6257 if (!cst->isNullValue() && !(hack_undef && isa<UndefValue>(base)))
6258 value = base;
6259 } else {
6260 value = base;
6261 }
6262 } else if (base != value) {
6263 if (auto *base_cst = dyn_cast<Constant>(base)) {
6264 if (base_cst->isNullValue() || (hack_undef && isa<UndefValue>(base)))
6265 continue;
6266 }
6267
6268 if (sameResource(value, base))
6269 continue;
6270
6271 // Values don't represent the same base.
6272 ok = false;
6273 }
6274 }
6275
6276 return ok;
6277 }
6278
6279 // Conservatively return false.
6280 return false;
6281}
alan-bakere9308012019-03-15 10:25:13 -04006282
6283bool SPIRVProducerPass::CalledWithCoherentResource(Argument &Arg) {
6284 if (!Arg.getType()->isPointerTy() ||
6285 Arg.getType()->getPointerAddressSpace() != clspv::AddressSpace::Global) {
6286 // Only SSBOs need to be annotated as coherent.
6287 return false;
6288 }
6289
6290 DenseSet<Value *> visited;
6291 std::vector<Value *> stack;
6292 for (auto *U : Arg.getParent()->users()) {
6293 if (auto *call = dyn_cast<CallInst>(U)) {
6294 stack.push_back(call->getOperand(Arg.getArgNo()));
6295 }
6296 }
6297
6298 while (!stack.empty()) {
6299 Value *v = stack.back();
6300 stack.pop_back();
6301
6302 if (!visited.insert(v).second)
6303 continue;
6304
6305 auto *resource_call = dyn_cast<CallInst>(v);
6306 if (resource_call &&
6307 resource_call->getCalledFunction()->getName().startswith(
6308 clspv::ResourceAccessorFunction())) {
6309 // If this is a resource accessor function, check if the coherent operand
6310 // is set.
6311 const auto coherent =
6312 unsigned(dyn_cast<ConstantInt>(resource_call->getArgOperand(5))
6313 ->getZExtValue());
6314 if (coherent == 1)
6315 return true;
6316 } else if (auto *arg = dyn_cast<Argument>(v)) {
6317 // If this is a function argument, trace through its callers.
alan-bakere98f3f92019-04-08 15:06:36 -04006318 for (auto U : arg->getParent()->users()) {
alan-bakere9308012019-03-15 10:25:13 -04006319 if (auto *call = dyn_cast<CallInst>(U)) {
6320 stack.push_back(call->getOperand(arg->getArgNo()));
6321 }
6322 }
6323 } else if (auto *user = dyn_cast<User>(v)) {
6324 // If this is a user, traverse all operands that could lead to resource
6325 // variables.
6326 for (unsigned i = 0; i != user->getNumOperands(); ++i) {
6327 Value *operand = user->getOperand(i);
6328 if (operand->getType()->isPointerTy() &&
6329 operand->getType()->getPointerAddressSpace() ==
6330 clspv::AddressSpace::Global) {
6331 stack.push_back(operand);
6332 }
6333 }
6334 }
6335 }
6336
6337 // No coherent resource variables encountered.
6338 return false;
6339}
alan-baker06cad652019-12-03 17:56:47 -05006340
6341void SPIRVProducerPass::PopulateStructuredCFGMaps(Module &module) {
6342 // First, track loop merges and continues.
6343 DenseSet<BasicBlock *> LoopMergesAndContinues;
6344 for (auto &F : module) {
6345 if (F.isDeclaration())
6346 continue;
6347
6348 DominatorTree &DT = getAnalysis<DominatorTreeWrapperPass>(F).getDomTree();
6349 const LoopInfo &LI = getAnalysis<LoopInfoWrapperPass>(F).getLoopInfo();
6350 std::deque<BasicBlock *> order;
6351 DenseSet<BasicBlock *> visited;
6352 clspv::ComputeStructuredOrder(&*F.begin(), &DT, LI, &order, &visited);
6353
6354 for (auto BB : order) {
6355 auto terminator = BB->getTerminator();
6356 auto branch = dyn_cast<BranchInst>(terminator);
6357 if (LI.isLoopHeader(BB)) {
6358 auto L = LI.getLoopFor(BB);
6359 BasicBlock *ContinueBB = nullptr;
6360 BasicBlock *MergeBB = nullptr;
6361
6362 MergeBB = L->getExitBlock();
6363 if (!MergeBB) {
6364 // StructurizeCFG pass converts CFG into triangle shape and the cfg
6365 // has regions with single entry/exit. As a result, loop should not
6366 // have multiple exits.
6367 llvm_unreachable("Loop has multiple exits???");
6368 }
6369
6370 if (L->isLoopLatch(BB)) {
6371 ContinueBB = BB;
6372 } else {
6373 // From SPIR-V spec 2.11, Continue Target must dominate that back-edge
6374 // block.
6375 BasicBlock *Header = L->getHeader();
6376 BasicBlock *Latch = L->getLoopLatch();
6377 for (auto *loop_block : L->blocks()) {
6378 if (loop_block == Header) {
6379 continue;
6380 }
6381
6382 // Check whether block dominates block with back-edge.
6383 // The loop latch is the single block with a back-edge. If it was
6384 // possible, StructurizeCFG made the loop conform to this
6385 // requirement, otherwise |Latch| is a nullptr.
6386 if (DT.dominates(loop_block, Latch)) {
6387 ContinueBB = loop_block;
6388 }
6389 }
6390
6391 if (!ContinueBB) {
6392 llvm_unreachable("Wrong continue block from loop");
6393 }
6394 }
6395
6396 // Record the continue and merge blocks.
6397 MergeBlocks[BB] = MergeBB;
6398 ContinueBlocks[BB] = ContinueBB;
6399 LoopMergesAndContinues.insert(MergeBB);
6400 LoopMergesAndContinues.insert(ContinueBB);
6401 } else if (branch && branch->isConditional()) {
6402 auto L = LI.getLoopFor(BB);
6403 bool HasBackedge = false;
6404 while (L && !HasBackedge) {
6405 if (L->isLoopLatch(BB)) {
6406 HasBackedge = true;
6407 }
6408 L = L->getParentLoop();
6409 }
6410
6411 if (!HasBackedge) {
6412 // Only need a merge if the branch doesn't include a loop break or
6413 // continue.
6414 auto true_bb = branch->getSuccessor(0);
6415 auto false_bb = branch->getSuccessor(1);
6416 if (!LoopMergesAndContinues.count(true_bb) &&
6417 !LoopMergesAndContinues.count(false_bb)) {
6418 // StructurizeCFG pass already manipulated CFG. Just use false block
6419 // of branch instruction as merge block.
6420 MergeBlocks[BB] = false_bb;
6421 }
6422 }
6423 }
6424 }
6425 }
6426}