blob: d33baf1f889bf23d654f1dd9967e3b2322b3ae66 [file] [log] [blame]
David Neto22f144c2017-06-12 14:26:21 -04001// Copyright 2017 The Clspv Authors. All rights reserved.
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7// http://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
15#ifdef _MSC_VER
16#pragma warning(push, 0)
17#endif
18
David Neto156783e2017-07-05 15:39:41 -040019#include <cassert>
David Neto257c3892018-04-11 13:19:45 -040020#include <cstring>
David Neto118188e2018-08-24 11:27:54 -040021#include <iomanip>
22#include <list>
David Neto862b7d82018-06-14 18:48:37 -040023#include <memory>
David Neto118188e2018-08-24 11:27:54 -040024#include <set>
25#include <sstream>
26#include <string>
27#include <tuple>
28#include <unordered_set>
29#include <utility>
David Neto862b7d82018-06-14 18:48:37 -040030
David Neto118188e2018-08-24 11:27:54 -040031#include "llvm/ADT/StringSwitch.h"
32#include "llvm/ADT/UniqueVector.h"
33#include "llvm/Analysis/LoopInfo.h"
34#include "llvm/IR/Constants.h"
35#include "llvm/IR/Dominators.h"
36#include "llvm/IR/Instructions.h"
37#include "llvm/IR/Metadata.h"
38#include "llvm/IR/Module.h"
alan-bakerf67468c2019-11-25 15:51:49 -050039#include "llvm/IR/ValueSymbolTable.h"
David Neto118188e2018-08-24 11:27:54 -040040#include "llvm/Pass.h"
41#include "llvm/Support/CommandLine.h"
Kévin Petitbbbda972020-03-03 19:16:31 +000042#include "llvm/Support/MathExtras.h"
David Neto118188e2018-08-24 11:27:54 -040043#include "llvm/Support/raw_ostream.h"
44#include "llvm/Transforms/Utils/Cloning.h"
David Neto22f144c2017-06-12 14:26:21 -040045
alan-bakere0902602020-03-23 08:43:40 -040046#include "spirv/unified1/spirv.hpp"
David Neto118188e2018-08-24 11:27:54 -040047
David Neto85082642018-03-24 06:55:20 -070048#include "clspv/AddressSpace.h"
alan-bakerf5e5f692018-11-27 08:33:24 -050049#include "clspv/DescriptorMap.h"
David Neto118188e2018-08-24 11:27:54 -040050#include "clspv/Option.h"
David Neto85082642018-03-24 06:55:20 -070051#include "clspv/spirv_c_strings.hpp"
52#include "clspv/spirv_glsl.hpp"
David Neto22f144c2017-06-12 14:26:21 -040053
David Neto4feb7a42017-10-06 17:29:42 -040054#include "ArgKind.h"
alan-bakerf67468c2019-11-25 15:51:49 -050055#include "Builtins.h"
alan-baker06cad652019-12-03 17:56:47 -050056#include "ComputeStructuredOrder.h"
David Neto85082642018-03-24 06:55:20 -070057#include "ConstantEmitter.h"
Alan Baker202c8c72018-08-13 13:47:44 -040058#include "Constants.h"
David Neto78383442018-06-15 20:31:56 -040059#include "DescriptorCounter.h"
alan-baker56f7aff2019-05-22 08:06:42 -040060#include "NormalizeGlobalVariable.h"
Diego Novilloa4c44fa2019-04-11 10:56:15 -040061#include "Passes.h"
alan-bakera1be3322020-04-20 12:48:18 -040062#include "SpecConstant.h"
alan-bakerce179f12019-12-06 19:02:22 -050063#include "Types.h"
David Neto48f56a42017-10-06 16:44:25 -040064
David Neto22f144c2017-06-12 14:26:21 -040065#if defined(_MSC_VER)
66#pragma warning(pop)
67#endif
68
69using namespace llvm;
70using namespace clspv;
SJW173c7e92020-03-16 08:44:47 -050071using namespace clspv::Builtins;
David Neto156783e2017-07-05 15:39:41 -040072using namespace mdconst;
David Neto22f144c2017-06-12 14:26:21 -040073
74namespace {
David Netocd8ca5f2017-10-02 23:34:11 -040075
David Neto862b7d82018-06-14 18:48:37 -040076cl::opt<bool> ShowResourceVars("show-rv", cl::init(false), cl::Hidden,
77 cl::desc("Show resource variable creation"));
78
alan-baker5ed87542020-03-23 11:05:22 -040079cl::opt<bool>
80 ShowProducerIR("show-producer-ir", cl::init(false), cl::ReallyHidden,
81 cl::desc("Dump the IR at the start of SPIRVProducer"));
82
David Neto862b7d82018-06-14 18:48:37 -040083// These hacks exist to help transition code generation algorithms
84// without making huge noise in detailed test output.
85const bool Hack_generate_runtime_array_stride_early = true;
86
David Neto3fbb4072017-10-16 11:28:14 -040087// The value of 1/pi. This value is from MSDN
88// https://msdn.microsoft.com/en-us/library/4hwaceh6.aspx
89const double kOneOverPi = 0.318309886183790671538;
90const glsl::ExtInst kGlslExtInstBad = static_cast<glsl::ExtInst>(0);
91
alan-bakerb6b09dc2018-11-08 16:59:28 -050092const char *kCompositeConstructFunctionPrefix = "clspv.composite_construct.";
David Netoab03f432017-11-03 17:00:44 -040093
SJW69939d52020-04-16 07:29:07 -050094// SPIRV Module Sections (per 2.4 of the SPIRV spec)
95// These are used to collect SPIRVInstructions by type on-the-fly.
96enum SPIRVSection {
97 kCapabilities,
98 kExtensions,
99 kImports,
100 kMemoryModel,
101 kEntryPoints,
102 kExecutionModes,
103
104 kDebug,
105 kAnnotations,
106
107 kTypes,
108 kConstants = kTypes,
109 kGlobalVariables,
110
111 kFunctions,
112
113 kSectionCount
114};
115
David Neto22f144c2017-06-12 14:26:21 -0400116enum SPIRVOperandType {
117 NUMBERID,
118 LITERAL_INTEGER,
119 LITERAL_STRING,
120 LITERAL_FLOAT
121};
122
123struct SPIRVOperand {
124 explicit SPIRVOperand(SPIRVOperandType Ty, uint32_t Num)
125 : Type(Ty), LiteralNum(1, Num) {}
126 explicit SPIRVOperand(SPIRVOperandType Ty, const char *Str)
127 : Type(Ty), LiteralStr(Str) {}
128 explicit SPIRVOperand(SPIRVOperandType Ty, StringRef Str)
129 : Type(Ty), LiteralStr(Str) {}
130 explicit SPIRVOperand(SPIRVOperandType Ty, ArrayRef<uint32_t> NumVec)
131 : Type(Ty), LiteralNum(NumVec.begin(), NumVec.end()) {}
132
James Price11010dc2019-12-19 13:53:09 -0500133 SPIRVOperandType getType() const { return Type; };
134 uint32_t getNumID() const { return LiteralNum[0]; };
135 std::string getLiteralStr() const { return LiteralStr; };
136 ArrayRef<uint32_t> getLiteralNum() const { return LiteralNum; };
David Neto22f144c2017-06-12 14:26:21 -0400137
David Neto87846742018-04-11 17:36:22 -0400138 uint32_t GetNumWords() const {
139 switch (Type) {
140 case NUMBERID:
141 return 1;
142 case LITERAL_INTEGER:
143 case LITERAL_FLOAT:
David Netoee2660d2018-06-28 16:31:29 -0400144 return uint32_t(LiteralNum.size());
David Neto87846742018-04-11 17:36:22 -0400145 case LITERAL_STRING:
146 // Account for the terminating null character.
David Netoee2660d2018-06-28 16:31:29 -0400147 return uint32_t((LiteralStr.size() + 4) / 4);
David Neto87846742018-04-11 17:36:22 -0400148 }
149 llvm_unreachable("Unhandled case in SPIRVOperand::GetNumWords()");
150 }
151
David Neto22f144c2017-06-12 14:26:21 -0400152private:
153 SPIRVOperandType Type;
154 std::string LiteralStr;
155 SmallVector<uint32_t, 4> LiteralNum;
156};
157
David Netoc6f3ab22018-04-06 18:02:31 -0400158class SPIRVOperandList {
159public:
David Netoef5ba2b2019-12-20 08:35:54 -0500160 typedef std::unique_ptr<SPIRVOperand> element_type;
161 typedef SmallVector<element_type, 8> container_type;
162 typedef container_type::iterator iterator;
David Netoc6f3ab22018-04-06 18:02:31 -0400163 SPIRVOperandList() {}
alan-bakerb6b09dc2018-11-08 16:59:28 -0500164 SPIRVOperandList(const SPIRVOperandList &other) = delete;
165 SPIRVOperandList(SPIRVOperandList &&other) {
David Netoc6f3ab22018-04-06 18:02:31 -0400166 contents_ = std::move(other.contents_);
167 other.contents_.clear();
168 }
David Netoef5ba2b2019-12-20 08:35:54 -0500169 iterator begin() { return contents_.begin(); }
170 iterator end() { return contents_.end(); }
171 operator ArrayRef<element_type>() { return contents_; }
172 void push_back(element_type op) { contents_.push_back(std::move(op)); }
alan-bakerb6b09dc2018-11-08 16:59:28 -0500173 void clear() { contents_.clear(); }
David Netoc6f3ab22018-04-06 18:02:31 -0400174 size_t size() const { return contents_.size(); }
James Price11010dc2019-12-19 13:53:09 -0500175 const SPIRVOperand *operator[](size_t i) { return contents_[i].get(); }
David Netoc6f3ab22018-04-06 18:02:31 -0400176
David Netoef5ba2b2019-12-20 08:35:54 -0500177 const container_type &getOperands() const { return contents_; }
David Neto87846742018-04-11 17:36:22 -0400178
David Netoc6f3ab22018-04-06 18:02:31 -0400179private:
David Netoef5ba2b2019-12-20 08:35:54 -0500180 container_type contents_;
David Netoc6f3ab22018-04-06 18:02:31 -0400181};
182
James Price11010dc2019-12-19 13:53:09 -0500183SPIRVOperandList &operator<<(SPIRVOperandList &list,
David Netoef5ba2b2019-12-20 08:35:54 -0500184 std::unique_ptr<SPIRVOperand> elem) {
185 list.push_back(std::move(elem));
David Netoc6f3ab22018-04-06 18:02:31 -0400186 return list;
187}
188
David Netoef5ba2b2019-12-20 08:35:54 -0500189std::unique_ptr<SPIRVOperand> MkNum(uint32_t num) {
190 return std::make_unique<SPIRVOperand>(LITERAL_INTEGER, num);
David Netoc6f3ab22018-04-06 18:02:31 -0400191}
David Netoef5ba2b2019-12-20 08:35:54 -0500192std::unique_ptr<SPIRVOperand> MkInteger(ArrayRef<uint32_t> num_vec) {
193 return std::make_unique<SPIRVOperand>(LITERAL_INTEGER, num_vec);
David Neto257c3892018-04-11 13:19:45 -0400194}
David Netoef5ba2b2019-12-20 08:35:54 -0500195std::unique_ptr<SPIRVOperand> MkFloat(ArrayRef<uint32_t> num_vec) {
196 return std::make_unique<SPIRVOperand>(LITERAL_FLOAT, num_vec);
David Neto257c3892018-04-11 13:19:45 -0400197}
David Netoef5ba2b2019-12-20 08:35:54 -0500198std::unique_ptr<SPIRVOperand> MkId(uint32_t id) {
199 return std::make_unique<SPIRVOperand>(NUMBERID, id);
James Price11010dc2019-12-19 13:53:09 -0500200}
David Netoef5ba2b2019-12-20 08:35:54 -0500201std::unique_ptr<SPIRVOperand> MkString(StringRef str) {
202 return std::make_unique<SPIRVOperand>(LITERAL_STRING, str);
David Neto257c3892018-04-11 13:19:45 -0400203}
David Netoc6f3ab22018-04-06 18:02:31 -0400204
David Neto22f144c2017-06-12 14:26:21 -0400205struct SPIRVInstruction {
David Netoef5ba2b2019-12-20 08:35:54 -0500206 // Creates an instruction with an opcode and no result ID, and with the given
207 // operands. This computes its own word count. Takes ownership of the
208 // operands and clears |Ops|.
209 SPIRVInstruction(spv::Op Opc, SPIRVOperandList &Ops)
210 : WordCount(1), Opcode(static_cast<uint16_t>(Opc)), ResultID(0) {
James Price11010dc2019-12-19 13:53:09 -0500211 for (auto &operand : Ops) {
David Netoee2660d2018-06-28 16:31:29 -0400212 WordCount += uint16_t(operand->GetNumWords());
David Neto87846742018-04-11 17:36:22 -0400213 }
David Netoef5ba2b2019-12-20 08:35:54 -0500214 Operands.reserve(Ops.size());
215 for (auto &ptr : Ops) {
216 Operands.emplace_back(std::move(ptr));
217 ptr.reset(nullptr);
David Neto87846742018-04-11 17:36:22 -0400218 }
David Netoef5ba2b2019-12-20 08:35:54 -0500219 Ops.clear();
220 }
221 // Creates an instruction with an opcode and a no-zero result ID, and
222 // with the given operands. This computes its own word count. Takes ownership
223 // of the operands and clears |Ops|.
224 SPIRVInstruction(spv::Op Opc, uint32_t ResID, SPIRVOperandList &Ops)
225 : WordCount(2), Opcode(static_cast<uint16_t>(Opc)), ResultID(ResID) {
James Price11010dc2019-12-19 13:53:09 -0500226 for (auto &operand : Ops) {
David Neto87846742018-04-11 17:36:22 -0400227 WordCount += operand->GetNumWords();
228 }
David Netoef5ba2b2019-12-20 08:35:54 -0500229 Operands.reserve(Ops.size());
230 for (auto &ptr : Ops) {
231 Operands.emplace_back(std::move(ptr));
232 ptr.reset(nullptr);
233 }
234 if (ResID == 0) {
235 llvm_unreachable("Result ID of 0 was provided");
236 }
237 Ops.clear();
David Neto87846742018-04-11 17:36:22 -0400238 }
David Neto22f144c2017-06-12 14:26:21 -0400239
David Netoef5ba2b2019-12-20 08:35:54 -0500240 // Creates an instruction with an opcode and no result ID, and with the single
241 // operand. This computes its own word count.
242 SPIRVInstruction(spv::Op Opc, SPIRVOperandList::element_type operand)
243 : WordCount(1), Opcode(static_cast<uint16_t>(Opc)), ResultID(0) {
244 WordCount += operand->GetNumWords();
245 Operands.emplace_back(std::move(operand));
246 operand.reset(nullptr);
247 }
248 // Creates an instruction with an opcode and a non-zero result ID, and
249 // with the single operand. This computes its own word count.
250 SPIRVInstruction(spv::Op Opc, uint32_t ResID,
251 SPIRVOperandList::element_type operand)
252 : WordCount(2), Opcode(static_cast<uint16_t>(Opc)), ResultID(ResID) {
253 WordCount += operand->GetNumWords();
254 if (ResID == 0) {
255 llvm_unreachable("Result ID of 0 was provided");
256 }
257 Operands.emplace_back(std::move(operand));
258 operand.reset(nullptr);
259 }
260 // Creates an instruction with an opcode and a no-zero result ID, and no
261 // operands.
262 SPIRVInstruction(spv::Op Opc, uint32_t ResID)
263 : WordCount(2), Opcode(static_cast<uint16_t>(Opc)), ResultID(ResID) {
264 if (ResID == 0) {
265 llvm_unreachable("Result ID of 0 was provided");
266 }
267 }
268 // Creates an instruction with an opcode, no result ID, no type ID, and no
269 // operands.
270 SPIRVInstruction(spv::Op Opc)
271 : WordCount(1), Opcode(static_cast<uint16_t>(Opc)), ResultID(0) {}
272
David Netoee2660d2018-06-28 16:31:29 -0400273 uint32_t getWordCount() const { return WordCount; }
David Neto22f144c2017-06-12 14:26:21 -0400274 uint16_t getOpcode() const { return Opcode; }
275 uint32_t getResultID() const { return ResultID; }
David Netoef5ba2b2019-12-20 08:35:54 -0500276 ArrayRef<std::unique_ptr<SPIRVOperand>> getOperands() const {
James Price11010dc2019-12-19 13:53:09 -0500277 return Operands;
278 }
David Neto22f144c2017-06-12 14:26:21 -0400279
280private:
David Netoee2660d2018-06-28 16:31:29 -0400281 uint32_t WordCount; // Check the 16-bit bound at code generation time.
David Neto22f144c2017-06-12 14:26:21 -0400282 uint16_t Opcode;
283 uint32_t ResultID;
David Netoef5ba2b2019-12-20 08:35:54 -0500284 SmallVector<std::unique_ptr<SPIRVOperand>, 4> Operands;
David Neto22f144c2017-06-12 14:26:21 -0400285};
286
287struct SPIRVProducerPass final : public ModulePass {
David Neto22f144c2017-06-12 14:26:21 -0400288 typedef DenseMap<Type *, uint32_t> TypeMapType;
289 typedef UniqueVector<Type *> TypeList;
290 typedef DenseMap<Value *, uint32_t> ValueMapType;
David Netofb9a7972017-08-25 17:08:24 -0400291 typedef UniqueVector<Value *> ValueList;
David Neto22f144c2017-06-12 14:26:21 -0400292 typedef std::vector<std::pair<Value *, uint32_t>> EntryPointVecType;
293 typedef std::list<SPIRVInstruction *> SPIRVInstructionList;
David Neto87846742018-04-11 17:36:22 -0400294 // A vector of tuples, each of which is:
295 // - the LLVM instruction that we will later generate SPIR-V code for
296 // - where the SPIR-V instruction should be inserted
297 // - the result ID of the SPIR-V instruction
David Neto22f144c2017-06-12 14:26:21 -0400298 typedef std::vector<
299 std::tuple<Value *, SPIRVInstructionList::iterator, uint32_t>>
300 DeferredInstVecType;
301 typedef DenseMap<FunctionType *, std::pair<FunctionType *, uint32_t>>
302 GlobalConstFuncMapType;
303
David Neto44795152017-07-13 15:45:28 -0400304 explicit SPIRVProducerPass(
alan-bakerf5e5f692018-11-27 08:33:24 -0500305 raw_pwrite_stream &out,
306 std::vector<clspv::version0::DescriptorMapEntry> *descriptor_map_entries,
alan-baker00e7a582019-06-07 12:54:21 -0400307 ArrayRef<std::pair<unsigned, std::string>> samplerMap,
David Neto44795152017-07-13 15:45:28 -0400308 bool outputCInitList)
David Netoc2c368d2017-06-30 16:50:17 -0400309 : ModulePass(ID), samplerMap(samplerMap), out(out),
David Neto0676e6f2017-07-11 18:47:44 -0400310 binaryTempOut(binaryTempUnderlyingVector), binaryOut(&out),
alan-baker00e7a582019-06-07 12:54:21 -0400311 descriptorMapEntries(descriptor_map_entries),
David Neto0676e6f2017-07-11 18:47:44 -0400312 outputCInitList(outputCInitList), patchBoundOffset(0), nextID(1),
alan-baker5b86ed72019-02-15 08:26:50 -0500313 OpExtInstImportID(0), HasVariablePointersStorageBuffer(false),
314 HasVariablePointers(false), SamplerTy(nullptr), WorkgroupSizeValueID(0),
alan-bakera1be3322020-04-20 12:48:18 -0400315 WorkgroupSizeVarID(0) {}
David Neto22f144c2017-06-12 14:26:21 -0400316
James Price11010dc2019-12-19 13:53:09 -0500317 virtual ~SPIRVProducerPass() {
SJW69939d52020-04-16 07:29:07 -0500318 for (int i = 0; i < kSectionCount; ++i) {
319 for (auto *Inst : SPIRVSections[i]) {
320 delete Inst;
321 }
James Price11010dc2019-12-19 13:53:09 -0500322 }
323 }
324
David Neto22f144c2017-06-12 14:26:21 -0400325 void getAnalysisUsage(AnalysisUsage &AU) const override {
326 AU.addRequired<DominatorTreeWrapperPass>();
327 AU.addRequired<LoopInfoWrapperPass>();
328 }
329
330 virtual bool runOnModule(Module &module) override;
331
332 // output the SPIR-V header block
333 void outputHeader();
334
335 // patch the SPIR-V header block
336 void patchHeader();
337
338 uint32_t lookupType(Type *Ty) {
339 if (Ty->isPointerTy() &&
340 (Ty->getPointerAddressSpace() != AddressSpace::UniformConstant)) {
341 auto PointeeTy = Ty->getPointerElementType();
342 if (PointeeTy->isStructTy() &&
343 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
344 Ty = PointeeTy;
345 }
346 }
347
David Neto862b7d82018-06-14 18:48:37 -0400348 auto where = TypeMap.find(Ty);
349 if (where == TypeMap.end()) {
350 if (Ty) {
351 errs() << "Unhandled type " << *Ty << "\n";
352 } else {
353 errs() << "Unhandled type (null)\n";
354 }
David Netoe439d702018-03-23 13:14:08 -0700355 llvm_unreachable("\nUnhandled type!");
David Neto22f144c2017-06-12 14:26:21 -0400356 }
357
David Neto862b7d82018-06-14 18:48:37 -0400358 return where->second;
David Neto22f144c2017-06-12 14:26:21 -0400359 }
360 TypeMapType &getImageTypeMap() { return ImageTypeMap; }
alan-bakerabd82722019-12-03 17:14:51 -0500361 TypeList &getImageTypeList() { return ImageTypeList; }
David Neto22f144c2017-06-12 14:26:21 -0400362 TypeList &getTypeList() { return Types; };
363 ValueList &getConstantList() { return Constants; };
364 ValueMapType &getValueMap() { return ValueMap; }
365 ValueMapType &getAllocatedValueMap() { return AllocatedValueMap; }
SJW69939d52020-04-16 07:29:07 -0500366 SPIRVInstructionList &getSPIRVInstList(SPIRVSection Section) {
367 return SPIRVSections[Section];
368 };
David Neto22f144c2017-06-12 14:26:21 -0400369 EntryPointVecType &getEntryPointVec() { return EntryPointVec; };
370 DeferredInstVecType &getDeferredInstVec() { return DeferredInstVec; };
371 ValueList &getEntryPointInterfacesVec() { return EntryPointInterfacesVec; };
372 uint32_t &getOpExtInstImportID() { return OpExtInstImportID; };
373 std::vector<uint32_t> &getBuiltinDimVec() { return BuiltinDimensionVec; };
SJW2c317da2020-03-23 07:39:13 -0500374
alan-baker5b86ed72019-02-15 08:26:50 -0500375 bool hasVariablePointersStorageBuffer() {
376 return HasVariablePointersStorageBuffer;
377 }
378 void setVariablePointersStorageBuffer(bool Val) {
379 HasVariablePointersStorageBuffer = Val;
380 }
Diego Novillo3cc8d7a2019-04-10 13:30:34 -0400381 bool hasVariablePointers() { return HasVariablePointers; };
David Neto22f144c2017-06-12 14:26:21 -0400382 void setVariablePointers(bool Val) { HasVariablePointers = Val; };
alan-bakerb6b09dc2018-11-08 16:59:28 -0500383 ArrayRef<std::pair<unsigned, std::string>> &getSamplerMap() {
384 return samplerMap;
385 }
David Neto22f144c2017-06-12 14:26:21 -0400386 GlobalConstFuncMapType &getGlobalConstFuncTypeMap() {
387 return GlobalConstFuncTypeMap;
388 }
389 SmallPtrSet<Value *, 16> &getGlobalConstArgSet() {
390 return GlobalConstArgumentSet;
391 }
alan-bakerb6b09dc2018-11-08 16:59:28 -0500392 TypeList &getTypesNeedingArrayStride() { return TypesNeedingArrayStride; }
David Neto22f144c2017-06-12 14:26:21 -0400393
SJW77b87ad2020-04-21 14:37:52 -0500394 void GenerateLLVMIRInfo();
alan-bakerb6b09dc2018-11-08 16:59:28 -0500395 // Populate GlobalConstFuncTypeMap. Also, if module-scope __constant will
396 // *not* be converted to a storage buffer, replace each such global variable
397 // with one in the storage class expecgted by SPIR-V.
SJW77b87ad2020-04-21 14:37:52 -0500398 void FindGlobalConstVars();
David Neto862b7d82018-06-14 18:48:37 -0400399 // Populate ResourceVarInfoList, FunctionToResourceVarsMap, and
400 // ModuleOrderedResourceVars.
SJW77b87ad2020-04-21 14:37:52 -0500401 void FindResourceVars();
402 void FindWorkgroupVars();
403 bool FindExtInst();
David Neto22f144c2017-06-12 14:26:21 -0400404 void FindTypePerGlobalVar(GlobalVariable &GV);
405 void FindTypePerFunc(Function &F);
SJW77b87ad2020-04-21 14:37:52 -0500406 void FindTypesForSamplerMap();
407 void FindTypesForResourceVars();
alan-bakerb6b09dc2018-11-08 16:59:28 -0500408 // Inserts |Ty| and relevant sub-types into the |Types| member, indicating
409 // that |Ty| and its subtypes will need a corresponding SPIR-V type.
David Neto22f144c2017-06-12 14:26:21 -0400410 void FindType(Type *Ty);
411 void FindConstantPerGlobalVar(GlobalVariable &GV);
412 void FindConstantPerFunc(Function &F);
413 void FindConstant(Value *V);
414 void GenerateExtInstImport();
David Neto19a1bad2017-08-25 15:01:41 -0400415 // Generates instructions for SPIR-V types corresponding to the LLVM types
416 // saved in the |Types| member. A type follows its subtypes. IDs are
417 // allocated sequentially starting with the current value of nextID, and
418 // with a type following its subtypes. Also updates nextID to just beyond
419 // the last generated ID.
SJW77b87ad2020-04-21 14:37:52 -0500420 void GenerateSPIRVTypes();
David Neto22f144c2017-06-12 14:26:21 -0400421 void GenerateSPIRVConstants();
SJW77b87ad2020-04-21 14:37:52 -0500422 void GenerateModuleInfo();
423 void GeneratePushConstantDescriptorMapEntries();
424 void GenerateSpecConstantDescriptorMapEntries();
David Neto22f144c2017-06-12 14:26:21 -0400425 void GenerateGlobalVar(GlobalVariable &GV);
SJW77b87ad2020-04-21 14:37:52 -0500426 void GenerateWorkgroupVars();
David Neto862b7d82018-06-14 18:48:37 -0400427 // Generate descriptor map entries for resource variables associated with
428 // arguments to F.
SJW77b87ad2020-04-21 14:37:52 -0500429 void GenerateDescriptorMapInfo(Function &F);
430 void GenerateSamplers();
David Neto862b7d82018-06-14 18:48:37 -0400431 // Generate OpVariables for %clspv.resource.var.* calls.
SJW77b87ad2020-04-21 14:37:52 -0500432 void GenerateResourceVars();
David Neto22f144c2017-06-12 14:26:21 -0400433 void GenerateFuncPrologue(Function &F);
434 void GenerateFuncBody(Function &F);
David Netob6e2e062018-04-25 10:32:06 -0400435 void GenerateEntryPointInitialStores();
David Neto22f144c2017-06-12 14:26:21 -0400436 spv::Op GetSPIRVCmpOpcode(CmpInst *CmpI);
437 spv::Op GetSPIRVCastOpcode(Instruction &I);
438 spv::Op GetSPIRVBinaryOpcode(Instruction &I);
439 void GenerateInstruction(Instruction &I);
440 void GenerateFuncEpilogue();
441 void HandleDeferredInstruction();
SJW77b87ad2020-04-21 14:37:52 -0500442 void HandleDeferredDecorations();
David Neto22f144c2017-06-12 14:26:21 -0400443 bool is4xi8vec(Type *Ty) const;
444 spv::StorageClass GetStorageClass(unsigned AddrSpace) const;
David Neto862b7d82018-06-14 18:48:37 -0400445 spv::StorageClass GetStorageClassForArgKind(clspv::ArgKind arg_kind) const;
David Neto22f144c2017-06-12 14:26:21 -0400446 spv::BuiltIn GetBuiltin(StringRef globalVarName) const;
David Neto3fbb4072017-10-16 11:28:14 -0400447 // Returns the GLSL extended instruction enum that the given function
448 // call maps to. If none, then returns the 0 value, i.e. GLSLstd4580Bad.
David Neto22f144c2017-06-12 14:26:21 -0400449 glsl::ExtInst getExtInstEnum(StringRef Name);
David Neto3fbb4072017-10-16 11:28:14 -0400450 // Returns the GLSL extended instruction enum indirectly used by the given
451 // function. That is, to implement the given function, we use an extended
452 // instruction plus one more instruction. If none, then returns the 0 value,
453 // i.e. GLSLstd4580Bad.
454 glsl::ExtInst getIndirectExtInstEnum(StringRef Name);
455 // Returns the single GLSL extended instruction used directly or
456 // indirectly by the given function call.
457 glsl::ExtInst getDirectOrIndirectExtInstEnum(StringRef Name);
David Neto22f144c2017-06-12 14:26:21 -0400458 void WriteOneWord(uint32_t Word);
459 void WriteResultID(SPIRVInstruction *Inst);
460 void WriteWordCountAndOpcode(SPIRVInstruction *Inst);
David Netoef5ba2b2019-12-20 08:35:54 -0500461 void WriteOperand(const std::unique_ptr<SPIRVOperand> &Op);
David Neto22f144c2017-06-12 14:26:21 -0400462 void WriteSPIRVBinary();
SJW69939d52020-04-16 07:29:07 -0500463 void WriteSPIRVBinary(SPIRVInstructionList &SPIRVInstList);
David Neto22f144c2017-06-12 14:26:21 -0400464
Alan Baker9bf93fb2018-08-28 16:59:26 -0400465 // Returns true if |type| is compatible with OpConstantNull.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500466 bool IsTypeNullable(const Type *type) const;
Alan Baker9bf93fb2018-08-28 16:59:26 -0400467
Alan Bakerfcda9482018-10-02 17:09:59 -0400468 // Populate UBO remapped type maps.
SJW77b87ad2020-04-21 14:37:52 -0500469 void PopulateUBOTypeMaps();
Alan Bakerfcda9482018-10-02 17:09:59 -0400470
alan-baker06cad652019-12-03 17:56:47 -0500471 // Populate the merge and continue block maps.
SJW77b87ad2020-04-21 14:37:52 -0500472 void PopulateStructuredCFGMaps();
alan-baker06cad652019-12-03 17:56:47 -0500473
Alan Bakerfcda9482018-10-02 17:09:59 -0400474 // Wrapped methods of DataLayout accessors. If |type| was remapped for UBOs,
475 // uses the internal map, otherwise it falls back on the data layout.
476 uint64_t GetTypeSizeInBits(Type *type, const DataLayout &DL);
477 uint64_t GetTypeStoreSize(Type *type, const DataLayout &DL);
478 uint64_t GetTypeAllocSize(Type *type, const DataLayout &DL);
Kévin Petitbbbda972020-03-03 19:16:31 +0000479 uint32_t GetExplicitLayoutStructMemberOffset(StructType *type,
480 unsigned member,
481 const DataLayout &DL);
Alan Bakerfcda9482018-10-02 17:09:59 -0400482
alan-baker5b86ed72019-02-15 08:26:50 -0500483 // Returns the base pointer of |v|.
484 Value *GetBasePointer(Value *v);
485
486 // Sets |HasVariablePointersStorageBuffer| or |HasVariablePointers| base on
487 // |address_space|.
488 void setVariablePointersCapabilities(unsigned address_space);
489
490 // Returns true if |lhs| and |rhs| represent the same resource or workgroup
491 // variable.
492 bool sameResource(Value *lhs, Value *rhs) const;
493
494 // Returns true if |inst| is phi or select that selects from the same
495 // structure (or null).
496 bool selectFromSameObject(Instruction *inst);
497
alan-bakere9308012019-03-15 10:25:13 -0400498 // Returns true if |Arg| is called with a coherent resource.
499 bool CalledWithCoherentResource(Argument &Arg);
500
David Neto22f144c2017-06-12 14:26:21 -0400501private:
502 static char ID;
SJW77b87ad2020-04-21 14:37:52 -0500503
504 Module *module;
505
David Neto44795152017-07-13 15:45:28 -0400506 ArrayRef<std::pair<unsigned, std::string>> samplerMap;
David Neto22f144c2017-06-12 14:26:21 -0400507 raw_pwrite_stream &out;
David Neto0676e6f2017-07-11 18:47:44 -0400508
509 // TODO(dneto): Wouldn't it be better to always just emit a binary, and then
510 // convert to other formats on demand?
511
512 // When emitting a C initialization list, the WriteSPIRVBinary method
513 // will actually write its words to this vector via binaryTempOut.
514 SmallVector<char, 100> binaryTempUnderlyingVector;
515 raw_svector_ostream binaryTempOut;
516
517 // Binary output writes to this stream, which might be |out| or
518 // |binaryTempOut|. It's the latter when we really want to write a C
519 // initializer list.
Diego Novillo3cc8d7a2019-04-10 13:30:34 -0400520 raw_pwrite_stream *binaryOut;
alan-bakerf5e5f692018-11-27 08:33:24 -0500521 std::vector<version0::DescriptorMapEntry> *descriptorMapEntries;
David Neto0676e6f2017-07-11 18:47:44 -0400522 const bool outputCInitList; // If true, output look like {0x7023, ... , 5}
David Neto22f144c2017-06-12 14:26:21 -0400523 uint64_t patchBoundOffset;
524 uint32_t nextID;
525
alan-bakerf67468c2019-11-25 15:51:49 -0500526 // ID for OpTypeInt 32 1.
527 uint32_t int32ID = 0;
528 // ID for OpTypeVector %int 4.
529 uint32_t v4int32ID = 0;
530
David Neto19a1bad2017-08-25 15:01:41 -0400531 // Maps an LLVM Value pointer to the corresponding SPIR-V Id.
David Neto22f144c2017-06-12 14:26:21 -0400532 TypeMapType TypeMap;
David Neto19a1bad2017-08-25 15:01:41 -0400533 // Maps an LLVM image type to its SPIR-V ID.
David Neto22f144c2017-06-12 14:26:21 -0400534 TypeMapType ImageTypeMap;
alan-bakerabd82722019-12-03 17:14:51 -0500535 // A unique-vector of LLVM image types. This list is used to provide
536 // deterministic traversal of image types.
537 TypeList ImageTypeList;
David Neto19a1bad2017-08-25 15:01:41 -0400538 // A unique-vector of LLVM types that map to a SPIR-V type.
David Neto22f144c2017-06-12 14:26:21 -0400539 TypeList Types;
540 ValueList Constants;
David Neto19a1bad2017-08-25 15:01:41 -0400541 // Maps an LLVM Value pointer to the corresponding SPIR-V Id.
David Neto22f144c2017-06-12 14:26:21 -0400542 ValueMapType ValueMap;
543 ValueMapType AllocatedValueMap;
SJW69939d52020-04-16 07:29:07 -0500544 SPIRVInstructionList SPIRVSections[kSectionCount];
David Neto862b7d82018-06-14 18:48:37 -0400545
David Neto22f144c2017-06-12 14:26:21 -0400546 EntryPointVecType EntryPointVec;
547 DeferredInstVecType DeferredInstVec;
548 ValueList EntryPointInterfacesVec;
549 uint32_t OpExtInstImportID;
550 std::vector<uint32_t> BuiltinDimensionVec;
alan-baker5b86ed72019-02-15 08:26:50 -0500551 bool HasVariablePointersStorageBuffer;
David Neto22f144c2017-06-12 14:26:21 -0400552 bool HasVariablePointers;
553 Type *SamplerTy;
alan-baker09cb9802019-12-10 13:16:27 -0500554 DenseMap<unsigned, unsigned> SamplerLiteralToIDMap;
David Netoc77d9e22018-03-24 06:30:28 -0700555
556 // If a function F has a pointer-to-__constant parameter, then this variable
David Neto9ed8e2f2018-03-24 06:47:24 -0700557 // will map F's type to (G, index of the parameter), where in a first phase
558 // G is F's type. During FindTypePerFunc, G will be changed to F's type
559 // but replacing the pointer-to-constant parameter with
560 // pointer-to-ModuleScopePrivate.
David Netoc77d9e22018-03-24 06:30:28 -0700561 // TODO(dneto): This doesn't seem general enough? A function might have
562 // more than one such parameter.
David Neto22f144c2017-06-12 14:26:21 -0400563 GlobalConstFuncMapType GlobalConstFuncTypeMap;
564 SmallPtrSet<Value *, 16> GlobalConstArgumentSet;
David Neto1a1a0582017-07-07 12:01:44 -0400565 // An ordered set of pointer types of Base arguments to OpPtrAccessChain,
David Neto85082642018-03-24 06:55:20 -0700566 // or array types, and which point into transparent memory (StorageBuffer
567 // storage class). These will require an ArrayStride decoration.
David Neto1a1a0582017-07-07 12:01:44 -0400568 // See SPV_KHR_variable_pointers rev 13.
David Neto85082642018-03-24 06:55:20 -0700569 TypeList TypesNeedingArrayStride;
David Netoa60b00b2017-09-15 16:34:09 -0400570
571 // This is truly ugly, but works around what look like driver bugs.
572 // For get_local_size, an earlier part of the flow has created a module-scope
573 // variable in Private address space to hold the value for the workgroup
574 // size. Its intializer is a uint3 value marked as builtin WorkgroupSize.
575 // When this is present, save the IDs of the initializer value and variable
576 // in these two variables. We only ever do a vector load from it, and
577 // when we see one of those, substitute just the value of the intializer.
578 // This mimics what Glslang does, and that's what drivers are used to.
David Neto66cfe642018-03-24 06:13:56 -0700579 // TODO(dneto): Remove this once drivers are fixed.
David Netoa60b00b2017-09-15 16:34:09 -0400580 uint32_t WorkgroupSizeValueID;
581 uint32_t WorkgroupSizeVarID;
David Neto26aaf622017-10-23 18:11:53 -0400582
David Neto862b7d82018-06-14 18:48:37 -0400583 // Bookkeeping for mapping kernel arguments to resource variables.
584 struct ResourceVarInfo {
585 ResourceVarInfo(int index_arg, unsigned set_arg, unsigned binding_arg,
alan-bakere9308012019-03-15 10:25:13 -0400586 Function *fn, clspv::ArgKind arg_kind_arg, int coherent_arg)
David Neto862b7d82018-06-14 18:48:37 -0400587 : index(index_arg), descriptor_set(set_arg), binding(binding_arg),
alan-bakere9308012019-03-15 10:25:13 -0400588 var_fn(fn), arg_kind(arg_kind_arg), coherent(coherent_arg),
David Neto862b7d82018-06-14 18:48:37 -0400589 addr_space(fn->getReturnType()->getPointerAddressSpace()) {}
590 const int index; // Index into ResourceVarInfoList
591 const unsigned descriptor_set;
592 const unsigned binding;
593 Function *const var_fn; // The @clspv.resource.var.* function.
594 const clspv::ArgKind arg_kind;
alan-bakere9308012019-03-15 10:25:13 -0400595 const int coherent;
David Neto862b7d82018-06-14 18:48:37 -0400596 const unsigned addr_space; // The LLVM address space
597 // The SPIR-V ID of the OpVariable. Not populated at construction time.
598 uint32_t var_id = 0;
599 };
600 // A list of resource var info. Each one correponds to a module-scope
601 // resource variable we will have to create. Resource var indices are
602 // indices into this vector.
603 SmallVector<std::unique_ptr<ResourceVarInfo>, 8> ResourceVarInfoList;
604 // This is a vector of pointers of all the resource vars, but ordered by
605 // kernel function, and then by argument.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500606 UniqueVector<ResourceVarInfo *> ModuleOrderedResourceVars;
David Neto862b7d82018-06-14 18:48:37 -0400607 // Map a function to the ordered list of resource variables it uses, one for
608 // each argument. If an argument does not use a resource variable, it
609 // will have a null pointer entry.
610 using FunctionToResourceVarsMapType =
611 DenseMap<Function *, SmallVector<ResourceVarInfo *, 8>>;
612 FunctionToResourceVarsMapType FunctionToResourceVarsMap;
613
614 // What LLVM types map to SPIR-V types needing layout? These are the
615 // arrays and structures supporting storage buffers and uniform buffers.
616 TypeList TypesNeedingLayout;
617 // What LLVM struct types map to a SPIR-V struct type with Block decoration?
618 UniqueVector<StructType *> StructTypesNeedingBlock;
619 // For a call that represents a load from an opaque type (samplers, images),
620 // map it to the variable id it should load from.
621 DenseMap<CallInst *, uint32_t> ResourceVarDeferredLoadCalls;
David Neto85082642018-03-24 06:55:20 -0700622
David Netoc6f3ab22018-04-06 18:02:31 -0400623 // An ordered list of the kernel arguments of type pointer-to-local.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500624 using LocalArgList = SmallVector<Argument *, 8>;
David Netoc6f3ab22018-04-06 18:02:31 -0400625 LocalArgList LocalArgs;
626 // Information about a pointer-to-local argument.
627 struct LocalArgInfo {
628 // The SPIR-V ID of the array variable.
629 uint32_t variable_id;
630 // The element type of the
alan-bakerb6b09dc2018-11-08 16:59:28 -0500631 Type *elem_type;
David Netoc6f3ab22018-04-06 18:02:31 -0400632 // The ID of the array type.
633 uint32_t array_size_id;
634 // The ID of the array type.
635 uint32_t array_type_id;
636 // The ID of the pointer to the array type.
637 uint32_t ptr_array_type_id;
David Netoc6f3ab22018-04-06 18:02:31 -0400638 // The specialization constant ID of the array size.
639 int spec_id;
640 };
Alan Baker202c8c72018-08-13 13:47:44 -0400641 // A mapping from Argument to its assigned SpecId.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500642 DenseMap<const Argument *, int> LocalArgSpecIds;
Alan Baker202c8c72018-08-13 13:47:44 -0400643 // A mapping from SpecId to its LocalArgInfo.
644 DenseMap<int, LocalArgInfo> LocalSpecIdInfoMap;
Alan Bakerfcda9482018-10-02 17:09:59 -0400645 // A mapping from a remapped type to its real offsets.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500646 DenseMap<Type *, std::vector<uint32_t>> RemappedUBOTypeOffsets;
Alan Bakerfcda9482018-10-02 17:09:59 -0400647 // A mapping from a remapped type to its real sizes.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500648 DenseMap<Type *, std::tuple<uint64_t, uint64_t, uint64_t>>
649 RemappedUBOTypeSizes;
alan-baker06cad652019-12-03 17:56:47 -0500650
651 // Maps basic block to its merge block.
652 DenseMap<BasicBlock *, BasicBlock *> MergeBlocks;
653 // Maps basic block to its continue block.
654 DenseMap<BasicBlock *, BasicBlock *> ContinueBlocks;
David Neto22f144c2017-06-12 14:26:21 -0400655};
656
657char SPIRVProducerPass::ID;
David Netoc6f3ab22018-04-06 18:02:31 -0400658
alan-bakerb6b09dc2018-11-08 16:59:28 -0500659} // namespace
David Neto22f144c2017-06-12 14:26:21 -0400660
661namespace clspv {
alan-bakerf5e5f692018-11-27 08:33:24 -0500662ModulePass *createSPIRVProducerPass(
663 raw_pwrite_stream &out,
664 std::vector<version0::DescriptorMapEntry> *descriptor_map_entries,
alan-baker00e7a582019-06-07 12:54:21 -0400665 ArrayRef<std::pair<unsigned, std::string>> samplerMap,
alan-bakerf5e5f692018-11-27 08:33:24 -0500666 bool outputCInitList) {
667 return new SPIRVProducerPass(out, descriptor_map_entries, samplerMap,
alan-baker00e7a582019-06-07 12:54:21 -0400668 outputCInitList);
David Neto22f144c2017-06-12 14:26:21 -0400669}
David Netoc2c368d2017-06-30 16:50:17 -0400670} // namespace clspv
David Neto22f144c2017-06-12 14:26:21 -0400671
SJW77b87ad2020-04-21 14:37:52 -0500672bool SPIRVProducerPass::runOnModule(Module &M) {
673 module = &M;
alan-baker5ed87542020-03-23 11:05:22 -0400674 if (ShowProducerIR) {
SJW77b87ad2020-04-21 14:37:52 -0500675 llvm::outs() << *module << "\n";
alan-baker5ed87542020-03-23 11:05:22 -0400676 }
David Neto0676e6f2017-07-11 18:47:44 -0400677 binaryOut = outputCInitList ? &binaryTempOut : &out;
678
SJW77b87ad2020-04-21 14:37:52 -0500679 PopulateUBOTypeMaps();
680 PopulateStructuredCFGMaps();
Alan Bakerfcda9482018-10-02 17:09:59 -0400681
David Neto22f144c2017-06-12 14:26:21 -0400682 // SPIR-V always begins with its header information
683 outputHeader();
684
685 // Gather information from the LLVM IR that we require.
SJW77b87ad2020-04-21 14:37:52 -0500686 GenerateLLVMIRInfo();
David Neto22f144c2017-06-12 14:26:21 -0400687
David Neto22f144c2017-06-12 14:26:21 -0400688 // Collect information on global variables too.
SJW77b87ad2020-04-21 14:37:52 -0500689 for (GlobalVariable &GV : module->globals()) {
David Neto22f144c2017-06-12 14:26:21 -0400690 // If the GV is one of our special __spirv_* variables, remove the
691 // initializer as it was only placed there to force LLVM to not throw the
692 // value away.
Kévin Petitbbbda972020-03-03 19:16:31 +0000693 if (GV.getName().startswith("__spirv_") ||
694 GV.getAddressSpace() == clspv::AddressSpace::PushConstant) {
David Neto22f144c2017-06-12 14:26:21 -0400695 GV.setInitializer(nullptr);
696 }
697
698 // Collect types' information from global variable.
699 FindTypePerGlobalVar(GV);
700
701 // Collect constant information from global variable.
702 FindConstantPerGlobalVar(GV);
703
704 // If the variable is an input, entry points need to know about it.
705 if (AddressSpace::Input == GV.getType()->getPointerAddressSpace()) {
David Netofb9a7972017-08-25 17:08:24 -0400706 getEntryPointInterfacesVec().insert(&GV);
David Neto22f144c2017-06-12 14:26:21 -0400707 }
708 }
709
710 // If there are extended instructions, generate OpExtInstImport.
SJW77b87ad2020-04-21 14:37:52 -0500711 if (FindExtInst()) {
David Neto22f144c2017-06-12 14:26:21 -0400712 GenerateExtInstImport();
713 }
714
715 // Generate SPIRV instructions for types.
SJW77b87ad2020-04-21 14:37:52 -0500716 GenerateSPIRVTypes();
David Neto22f144c2017-06-12 14:26:21 -0400717
718 // Generate SPIRV constants.
719 GenerateSPIRVConstants();
720
alan-baker09cb9802019-12-10 13:16:27 -0500721 // Generate literal samplers if necessary.
SJW77b87ad2020-04-21 14:37:52 -0500722 GenerateSamplers();
David Neto22f144c2017-06-12 14:26:21 -0400723
Kévin Petitbbbda972020-03-03 19:16:31 +0000724 // Generate descriptor map entries for all push constants
SJW77b87ad2020-04-21 14:37:52 -0500725 GeneratePushConstantDescriptorMapEntries();
Kévin Petitbbbda972020-03-03 19:16:31 +0000726
David Neto22f144c2017-06-12 14:26:21 -0400727 // Generate SPIRV variables.
SJW77b87ad2020-04-21 14:37:52 -0500728 for (GlobalVariable &GV : module->globals()) {
David Neto22f144c2017-06-12 14:26:21 -0400729 GenerateGlobalVar(GV);
730 }
SJW77b87ad2020-04-21 14:37:52 -0500731 GenerateResourceVars();
732 GenerateWorkgroupVars();
David Neto22f144c2017-06-12 14:26:21 -0400733
734 // Generate SPIRV instructions for each function.
SJW77b87ad2020-04-21 14:37:52 -0500735 for (Function &F : *module) {
David Neto22f144c2017-06-12 14:26:21 -0400736 if (F.isDeclaration()) {
737 continue;
738 }
739
SJW77b87ad2020-04-21 14:37:52 -0500740 GenerateDescriptorMapInfo(F);
David Neto862b7d82018-06-14 18:48:37 -0400741
David Neto22f144c2017-06-12 14:26:21 -0400742 // Generate Function Prologue.
743 GenerateFuncPrologue(F);
744
745 // Generate SPIRV instructions for function body.
746 GenerateFuncBody(F);
747
748 // Generate Function Epilogue.
749 GenerateFuncEpilogue();
750 }
751
752 HandleDeferredInstruction();
SJW77b87ad2020-04-21 14:37:52 -0500753 HandleDeferredDecorations();
alan-bakera1be3322020-04-20 12:48:18 -0400754
755 // Generate descriptor map entries for module scope specialization constants.
SJW77b87ad2020-04-21 14:37:52 -0500756 GenerateSpecConstantDescriptorMapEntries();
David Neto22f144c2017-06-12 14:26:21 -0400757
758 // Generate SPIRV module information.
SJW77b87ad2020-04-21 14:37:52 -0500759 GenerateModuleInfo();
David Neto22f144c2017-06-12 14:26:21 -0400760
alan-baker00e7a582019-06-07 12:54:21 -0400761 WriteSPIRVBinary();
David Neto22f144c2017-06-12 14:26:21 -0400762
763 // We need to patch the SPIR-V header to set bound correctly.
764 patchHeader();
David Neto0676e6f2017-07-11 18:47:44 -0400765
766 if (outputCInitList) {
767 bool first = true;
David Neto0676e6f2017-07-11 18:47:44 -0400768 std::ostringstream os;
769
David Neto57fb0b92017-08-04 15:35:09 -0400770 auto emit_word = [&os, &first](uint32_t word) {
David Neto0676e6f2017-07-11 18:47:44 -0400771 if (!first)
David Neto57fb0b92017-08-04 15:35:09 -0400772 os << ",\n";
773 os << word;
David Neto0676e6f2017-07-11 18:47:44 -0400774 first = false;
775 };
776
777 os << "{";
David Neto57fb0b92017-08-04 15:35:09 -0400778 const std::string str(binaryTempOut.str());
779 for (unsigned i = 0; i < str.size(); i += 4) {
780 const uint32_t a = static_cast<unsigned char>(str[i]);
781 const uint32_t b = static_cast<unsigned char>(str[i + 1]);
782 const uint32_t c = static_cast<unsigned char>(str[i + 2]);
783 const uint32_t d = static_cast<unsigned char>(str[i + 3]);
784 emit_word(a | (b << 8) | (c << 16) | (d << 24));
David Neto0676e6f2017-07-11 18:47:44 -0400785 }
786 os << "}\n";
787 out << os.str();
788 }
789
David Neto22f144c2017-06-12 14:26:21 -0400790 return false;
791}
792
793void SPIRVProducerPass::outputHeader() {
alan-baker00e7a582019-06-07 12:54:21 -0400794 binaryOut->write(reinterpret_cast<const char *>(&spv::MagicNumber),
795 sizeof(spv::MagicNumber));
alan-bakere0902602020-03-23 08:43:40 -0400796 const uint32_t spv_version = 0x10000; // SPIR-V 1.0
797 binaryOut->write(reinterpret_cast<const char *>(&spv_version),
798 sizeof(spv_version));
David Neto22f144c2017-06-12 14:26:21 -0400799
alan-baker0c18ab02019-06-12 10:23:21 -0400800 // use Google's vendor ID
801 const uint32_t vendor = 21 << 16;
alan-baker00e7a582019-06-07 12:54:21 -0400802 binaryOut->write(reinterpret_cast<const char *>(&vendor), sizeof(vendor));
David Neto22f144c2017-06-12 14:26:21 -0400803
alan-baker00e7a582019-06-07 12:54:21 -0400804 // we record where we need to come back to and patch in the bound value
805 patchBoundOffset = binaryOut->tell();
David Neto22f144c2017-06-12 14:26:21 -0400806
alan-baker00e7a582019-06-07 12:54:21 -0400807 // output a bad bound for now
808 binaryOut->write(reinterpret_cast<const char *>(&nextID), sizeof(nextID));
David Neto22f144c2017-06-12 14:26:21 -0400809
alan-baker00e7a582019-06-07 12:54:21 -0400810 // output the schema (reserved for use and must be 0)
811 const uint32_t schema = 0;
812 binaryOut->write(reinterpret_cast<const char *>(&schema), sizeof(schema));
David Neto22f144c2017-06-12 14:26:21 -0400813}
814
815void SPIRVProducerPass::patchHeader() {
alan-baker00e7a582019-06-07 12:54:21 -0400816 // for a binary we just write the value of nextID over bound
817 binaryOut->pwrite(reinterpret_cast<char *>(&nextID), sizeof(nextID),
818 patchBoundOffset);
David Neto22f144c2017-06-12 14:26:21 -0400819}
820
SJW77b87ad2020-04-21 14:37:52 -0500821void SPIRVProducerPass::GenerateLLVMIRInfo() {
David Neto22f144c2017-06-12 14:26:21 -0400822 // This function generates LLVM IR for function such as global variable for
823 // argument, constant and pointer type for argument access. These information
824 // is artificial one because we need Vulkan SPIR-V output. This function is
825 // executed ahead of FindType and FindConstant.
SJW77b87ad2020-04-21 14:37:52 -0500826 LLVMContext &Context = module->getContext();
David Neto22f144c2017-06-12 14:26:21 -0400827
SJW77b87ad2020-04-21 14:37:52 -0500828 FindGlobalConstVars();
David Neto5c22a252018-03-15 16:07:41 -0400829
SJW77b87ad2020-04-21 14:37:52 -0500830 FindResourceVars();
David Neto22f144c2017-06-12 14:26:21 -0400831
832 bool HasWorkGroupBuiltin = false;
SJW77b87ad2020-04-21 14:37:52 -0500833 for (GlobalVariable &GV : module->globals()) {
David Neto22f144c2017-06-12 14:26:21 -0400834 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
835 if (spv::BuiltInWorkgroupSize == BuiltinType) {
836 HasWorkGroupBuiltin = true;
837 }
838 }
839
SJW77b87ad2020-04-21 14:37:52 -0500840 FindTypesForSamplerMap();
841 FindTypesForResourceVars();
842 FindWorkgroupVars();
David Neto22f144c2017-06-12 14:26:21 -0400843
SJW77b87ad2020-04-21 14:37:52 -0500844 for (Function &F : *module) {
Kévin Petitabef4522019-03-27 13:08:01 +0000845 if (F.isDeclaration()) {
David Neto22f144c2017-06-12 14:26:21 -0400846 continue;
847 }
848
849 for (BasicBlock &BB : F) {
850 for (Instruction &I : BB) {
851 if (I.getOpcode() == Instruction::ZExt ||
852 I.getOpcode() == Instruction::SExt ||
853 I.getOpcode() == Instruction::UIToFP) {
854 // If there is zext with i1 type, it will be changed to OpSelect. The
855 // OpSelect needs constant 0 and 1 so the constants are added here.
856
857 auto OpTy = I.getOperand(0)->getType();
858
Kévin Petit24272b62018-10-18 19:16:12 +0000859 if (OpTy->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -0400860 if (I.getOpcode() == Instruction::ZExt) {
David Neto22f144c2017-06-12 14:26:21 -0400861 FindConstant(Constant::getNullValue(I.getType()));
Kévin Petit7bfb8992019-02-26 13:45:08 +0000862 FindConstant(ConstantInt::get(I.getType(), 1));
David Neto22f144c2017-06-12 14:26:21 -0400863 } else if (I.getOpcode() == Instruction::SExt) {
David Neto22f144c2017-06-12 14:26:21 -0400864 FindConstant(Constant::getNullValue(I.getType()));
Kévin Petit7bfb8992019-02-26 13:45:08 +0000865 FindConstant(ConstantInt::getSigned(I.getType(), -1));
David Neto22f144c2017-06-12 14:26:21 -0400866 } else {
867 FindConstant(ConstantFP::get(Context, APFloat(0.0f)));
868 FindConstant(ConstantFP::get(Context, APFloat(1.0f)));
869 }
870 }
871 } else if (CallInst *Call = dyn_cast<CallInst>(&I)) {
David Neto862b7d82018-06-14 18:48:37 -0400872 StringRef callee_name = Call->getCalledFunction()->getName();
David Neto22f144c2017-06-12 14:26:21 -0400873
874 // Handle image type specially.
SJW173c7e92020-03-16 08:44:47 -0500875 if (IsImageBuiltin(callee_name)) {
David Neto22f144c2017-06-12 14:26:21 -0400876 TypeMapType &OpImageTypeMap = getImageTypeMap();
877 Type *ImageTy =
878 Call->getArgOperand(0)->getType()->getPointerElementType();
879 OpImageTypeMap[ImageTy] = 0;
alan-bakerabd82722019-12-03 17:14:51 -0500880 getImageTypeList().insert(ImageTy);
alan-baker75090e42020-02-20 11:21:04 -0500881 }
David Neto22f144c2017-06-12 14:26:21 -0400882
SJW173c7e92020-03-16 08:44:47 -0500883 if (IsSampledImageRead(callee_name)) {
alan-bakerf67468c2019-11-25 15:51:49 -0500884 // All sampled reads need a floating point 0 for the Lod operand.
David Neto22f144c2017-06-12 14:26:21 -0400885 FindConstant(ConstantFP::get(Context, APFloat(0.0f)));
SJW2c317da2020-03-23 07:39:13 -0500886 } else if (IsUnsampledImageRead(callee_name)) {
alan-baker75090e42020-02-20 11:21:04 -0500887 // All unsampled reads need an integer 0 for the Lod operand.
888 FindConstant(ConstantInt::get(Context, APInt(32, 0)));
SJW2c317da2020-03-23 07:39:13 -0500889 } else if (IsImageQuery(callee_name)) {
alan-bakerce179f12019-12-06 19:02:22 -0500890 Type *ImageTy = Call->getOperand(0)->getType();
891 const uint32_t dim = ImageDimensionality(ImageTy);
alan-baker7150a1d2020-02-25 08:31:06 -0500892 uint32_t components =
893 dim + (clspv::IsArrayImageType(ImageTy) ? 1 : 0);
alan-bakerce179f12019-12-06 19:02:22 -0500894 if (components > 1) {
895 // OpImageQuerySize* return |components| components.
896 FindType(VectorType::get(Type::getInt32Ty(Context), components));
897 if (dim == 3 && IsGetImageDim(callee_name)) {
898 // get_image_dim for 3D images returns an int4.
899 FindType(
900 VectorType::get(Type::getInt32Ty(Context), components + 1));
901 }
902 }
903
SJW173c7e92020-03-16 08:44:47 -0500904 if (IsSampledImageType(ImageTy)) {
alan-bakerce179f12019-12-06 19:02:22 -0500905 // All sampled image queries need a integer 0 for the Lod
906 // operand.
907 FindConstant(ConstantInt::get(Context, APInt(32, 0)));
908 }
David Neto5c22a252018-03-15 16:07:41 -0400909 }
David Neto22f144c2017-06-12 14:26:21 -0400910 }
911 }
912 }
913
Kévin Petitabef4522019-03-27 13:08:01 +0000914 // More things to do on kernel functions
915 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
916 if (const MDNode *MD =
917 dyn_cast<Function>(&F)->getMetadata("reqd_work_group_size")) {
918 // We generate constants if the WorkgroupSize builtin is being used.
919 if (HasWorkGroupBuiltin) {
920 // Collect constant information for work group size.
921 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(0)));
922 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(1)));
923 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(2)));
David Neto22f144c2017-06-12 14:26:21 -0400924 }
925 }
926 }
927
alan-bakerf67468c2019-11-25 15:51:49 -0500928 // TODO(alan-baker): make this better.
SJW77b87ad2020-04-21 14:37:52 -0500929 if (module->getTypeByName("opencl.image1d_ro_t.float") ||
930 module->getTypeByName("opencl.image1d_ro_t.float.sampled") ||
931 module->getTypeByName("opencl.image1d_wo_t.float") ||
932 module->getTypeByName("opencl.image2d_ro_t.float") ||
933 module->getTypeByName("opencl.image2d_ro_t.float.sampled") ||
934 module->getTypeByName("opencl.image2d_wo_t.float") ||
935 module->getTypeByName("opencl.image3d_ro_t.float") ||
936 module->getTypeByName("opencl.image3d_ro_t.float.sampled") ||
937 module->getTypeByName("opencl.image3d_wo_t.float") ||
938 module->getTypeByName("opencl.image1d_array_ro_t.float") ||
939 module->getTypeByName("opencl.image1d_array_ro_t.float.sampled") ||
940 module->getTypeByName("opencl.image1d_array_wo_t.float") ||
941 module->getTypeByName("opencl.image2d_array_ro_t.float") ||
942 module->getTypeByName("opencl.image2d_array_ro_t.float.sampled") ||
943 module->getTypeByName("opencl.image2d_array_wo_t.float")) {
alan-bakerf67468c2019-11-25 15:51:49 -0500944 FindType(Type::getFloatTy(Context));
SJW77b87ad2020-04-21 14:37:52 -0500945 } else if (module->getTypeByName("opencl.image1d_ro_t.uint") ||
946 module->getTypeByName("opencl.image1d_ro_t.uint.sampled") ||
947 module->getTypeByName("opencl.image1d_wo_t.uint") ||
948 module->getTypeByName("opencl.image2d_ro_t.uint") ||
949 module->getTypeByName("opencl.image2d_ro_t.uint.sampled") ||
950 module->getTypeByName("opencl.image2d_wo_t.uint") ||
951 module->getTypeByName("opencl.image3d_ro_t.uint") ||
952 module->getTypeByName("opencl.image3d_ro_t.uint.sampled") ||
953 module->getTypeByName("opencl.image3d_wo_t.uint") ||
954 module->getTypeByName("opencl.image1d_array_ro_t.uint") ||
955 module->getTypeByName(
956 "opencl.image1d_array_ro_t.uint.sampled") ||
957 module->getTypeByName("opencl.image1d_array_wo_t.uint") ||
958 module->getTypeByName("opencl.image2d_array_ro_t.uint") ||
959 module->getTypeByName(
960 "opencl.image2d_array_ro_t.uint.sampled") ||
961 module->getTypeByName("opencl.image2d_array_wo_t.uint")) {
alan-bakerf67468c2019-11-25 15:51:49 -0500962 FindType(Type::getInt32Ty(Context));
SJW77b87ad2020-04-21 14:37:52 -0500963 } else if (module->getTypeByName("opencl.image1d_ro_t.int") ||
964 module->getTypeByName("opencl.image1d_ro_t.int.sampled") ||
965 module->getTypeByName("opencl.image1d_wo_t.int") ||
966 module->getTypeByName("opencl.image2d_ro_t.int") ||
967 module->getTypeByName("opencl.image2d_ro_t.int.sampled") ||
968 module->getTypeByName("opencl.image2d_wo_t.int") ||
969 module->getTypeByName("opencl.image3d_ro_t.int") ||
970 module->getTypeByName("opencl.image3d_ro_t.int.sampled") ||
971 module->getTypeByName("opencl.image3d_wo_t.int") ||
972 module->getTypeByName("opencl.image1d_array_ro_t.int") ||
973 module->getTypeByName("opencl.image1d_array_ro_t.int.sampled") ||
974 module->getTypeByName("opencl.image1d_array_wo_t.int") ||
975 module->getTypeByName("opencl.image2d_array_ro_t.int") ||
976 module->getTypeByName("opencl.image2d_array_ro_t.int.sampled") ||
977 module->getTypeByName("opencl.image2d_array_wo_t.int")) {
alan-bakerf67468c2019-11-25 15:51:49 -0500978 // Nothing for now...
979 } else {
980 // This was likely an UndefValue.
David Neto22f144c2017-06-12 14:26:21 -0400981 FindType(Type::getFloatTy(Context));
982 }
983
984 // Collect types' information from function.
985 FindTypePerFunc(F);
986
987 // Collect constant information from function.
988 FindConstantPerFunc(F);
989 }
990}
991
SJW77b87ad2020-04-21 14:37:52 -0500992void SPIRVProducerPass::FindGlobalConstVars() {
993 clspv::NormalizeGlobalVariables(*module);
994 const DataLayout &DL = module->getDataLayout();
alan-baker56f7aff2019-05-22 08:06:42 -0400995
David Neto862b7d82018-06-14 18:48:37 -0400996 SmallVector<GlobalVariable *, 8> GVList;
997 SmallVector<GlobalVariable *, 8> DeadGVList;
SJW77b87ad2020-04-21 14:37:52 -0500998 for (GlobalVariable &GV : module->globals()) {
David Neto862b7d82018-06-14 18:48:37 -0400999 if (GV.getType()->getAddressSpace() == AddressSpace::Constant) {
1000 if (GV.use_empty()) {
1001 DeadGVList.push_back(&GV);
1002 } else {
1003 GVList.push_back(&GV);
1004 }
1005 }
1006 }
1007
1008 // Remove dead global __constant variables.
1009 for (auto GV : DeadGVList) {
1010 GV->eraseFromParent();
1011 }
1012 DeadGVList.clear();
1013
1014 if (clspv::Option::ModuleConstantsInStorageBuffer()) {
1015 // For now, we only support a single storage buffer.
1016 if (GVList.size() > 0) {
1017 assert(GVList.size() == 1);
1018 const auto *GV = GVList[0];
1019 const auto constants_byte_size =
Alan Bakerfcda9482018-10-02 17:09:59 -04001020 (GetTypeSizeInBits(GV->getInitializer()->getType(), DL)) / 8;
David Neto862b7d82018-06-14 18:48:37 -04001021 const size_t kConstantMaxSize = 65536;
1022 if (constants_byte_size > kConstantMaxSize) {
1023 outs() << "Max __constant capacity of " << kConstantMaxSize
1024 << " bytes exceeded: " << constants_byte_size << " bytes used\n";
1025 llvm_unreachable("Max __constant capacity exceeded");
1026 }
1027 }
1028 } else {
1029 // Change global constant variable's address space to ModuleScopePrivate.
1030 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
1031 for (auto GV : GVList) {
1032 // Create new gv with ModuleScopePrivate address space.
1033 Type *NewGVTy = GV->getType()->getPointerElementType();
1034 GlobalVariable *NewGV = new GlobalVariable(
SJW77b87ad2020-04-21 14:37:52 -05001035 *module, NewGVTy, false, GV->getLinkage(), GV->getInitializer(), "",
David Neto862b7d82018-06-14 18:48:37 -04001036 nullptr, GV->getThreadLocalMode(), AddressSpace::ModuleScopePrivate);
1037 NewGV->takeName(GV);
1038
1039 const SmallVector<User *, 8> GVUsers(GV->user_begin(), GV->user_end());
1040 SmallVector<User *, 8> CandidateUsers;
1041
1042 auto record_called_function_type_as_user =
1043 [&GlobalConstFuncTyMap](Value *gv, CallInst *call) {
1044 // Find argument index.
1045 unsigned index = 0;
1046 for (unsigned i = 0; i < call->getNumArgOperands(); i++) {
1047 if (gv == call->getOperand(i)) {
1048 // TODO(dneto): Should we break here?
1049 index = i;
1050 }
1051 }
1052
1053 // Record function type with global constant.
1054 GlobalConstFuncTyMap[call->getFunctionType()] =
1055 std::make_pair(call->getFunctionType(), index);
1056 };
1057
1058 for (User *GVU : GVUsers) {
1059 if (CallInst *Call = dyn_cast<CallInst>(GVU)) {
1060 record_called_function_type_as_user(GV, Call);
1061 } else if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(GVU)) {
1062 // Check GEP users.
1063 for (User *GEPU : GEP->users()) {
1064 if (CallInst *GEPCall = dyn_cast<CallInst>(GEPU)) {
1065 record_called_function_type_as_user(GEP, GEPCall);
1066 }
1067 }
1068 }
1069
1070 CandidateUsers.push_back(GVU);
1071 }
1072
1073 for (User *U : CandidateUsers) {
1074 // Update users of gv with new gv.
alan-bakered80f572019-02-11 17:28:26 -05001075 if (!isa<Constant>(U)) {
1076 // #254: Can't change operands of a constant, but this shouldn't be
1077 // something that sticks around in the module.
1078 U->replaceUsesOfWith(GV, NewGV);
1079 }
David Neto862b7d82018-06-14 18:48:37 -04001080 }
1081
1082 // Delete original gv.
1083 GV->eraseFromParent();
1084 }
1085 }
1086}
1087
SJW77b87ad2020-04-21 14:37:52 -05001088void SPIRVProducerPass::FindResourceVars() {
David Neto862b7d82018-06-14 18:48:37 -04001089 ResourceVarInfoList.clear();
1090 FunctionToResourceVarsMap.clear();
1091 ModuleOrderedResourceVars.reset();
1092 // Normally, there is one resource variable per clspv.resource.var.*
1093 // function, since that is unique'd by arg type and index. By design,
1094 // we can share these resource variables across kernels because all
1095 // kernels use the same descriptor set.
1096 //
1097 // But if the user requested distinct descriptor sets per kernel, then
1098 // the descriptor allocator has made different (set,binding) pairs for
1099 // the same (type,arg_index) pair. Since we can decorate a resource
1100 // variable with only exactly one DescriptorSet and Binding, we are
1101 // forced in this case to make distinct resource variables whenever
Kévin Petitbbbda972020-03-03 19:16:31 +00001102 // the same clspv.resource.var.X function is seen with disintct
David Neto862b7d82018-06-14 18:48:37 -04001103 // (set,binding) values.
1104 const bool always_distinct_sets =
1105 clspv::Option::DistinctKernelDescriptorSets();
SJW77b87ad2020-04-21 14:37:52 -05001106 for (Function &F : *module) {
David Neto862b7d82018-06-14 18:48:37 -04001107 // Rely on the fact the resource var functions have a stable ordering
1108 // in the module.
Alan Baker202c8c72018-08-13 13:47:44 -04001109 if (F.getName().startswith(clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04001110 // Find all calls to this function with distinct set and binding pairs.
1111 // Save them in ResourceVarInfoList.
1112
1113 // Determine uniqueness of the (set,binding) pairs only withing this
1114 // one resource-var builtin function.
1115 using SetAndBinding = std::pair<unsigned, unsigned>;
1116 // Maps set and binding to the resource var info.
1117 DenseMap<SetAndBinding, ResourceVarInfo *> set_and_binding_map;
1118 bool first_use = true;
1119 for (auto &U : F.uses()) {
1120 if (auto *call = dyn_cast<CallInst>(U.getUser())) {
1121 const auto set = unsigned(
1122 dyn_cast<ConstantInt>(call->getArgOperand(0))->getZExtValue());
1123 const auto binding = unsigned(
1124 dyn_cast<ConstantInt>(call->getArgOperand(1))->getZExtValue());
1125 const auto arg_kind = clspv::ArgKind(
1126 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
1127 const auto arg_index = unsigned(
1128 dyn_cast<ConstantInt>(call->getArgOperand(3))->getZExtValue());
alan-bakere9308012019-03-15 10:25:13 -04001129 const auto coherent = unsigned(
1130 dyn_cast<ConstantInt>(call->getArgOperand(5))->getZExtValue());
David Neto862b7d82018-06-14 18:48:37 -04001131
1132 // Find or make the resource var info for this combination.
1133 ResourceVarInfo *rv = nullptr;
1134 if (always_distinct_sets) {
1135 // Make a new resource var any time we see a different
1136 // (set,binding) pair.
1137 SetAndBinding key{set, binding};
1138 auto where = set_and_binding_map.find(key);
1139 if (where == set_and_binding_map.end()) {
1140 rv = new ResourceVarInfo(int(ResourceVarInfoList.size()), set,
alan-bakere9308012019-03-15 10:25:13 -04001141 binding, &F, arg_kind, coherent);
David Neto862b7d82018-06-14 18:48:37 -04001142 ResourceVarInfoList.emplace_back(rv);
1143 set_and_binding_map[key] = rv;
1144 } else {
1145 rv = where->second;
1146 }
1147 } else {
1148 // The default is to make exactly one resource for each
1149 // clspv.resource.var.* function.
1150 if (first_use) {
1151 first_use = false;
1152 rv = new ResourceVarInfo(int(ResourceVarInfoList.size()), set,
alan-bakere9308012019-03-15 10:25:13 -04001153 binding, &F, arg_kind, coherent);
David Neto862b7d82018-06-14 18:48:37 -04001154 ResourceVarInfoList.emplace_back(rv);
1155 } else {
1156 rv = ResourceVarInfoList.back().get();
1157 }
1158 }
1159
1160 // Now populate FunctionToResourceVarsMap.
1161 auto &mapping =
1162 FunctionToResourceVarsMap[call->getParent()->getParent()];
1163 while (mapping.size() <= arg_index) {
1164 mapping.push_back(nullptr);
1165 }
1166 mapping[arg_index] = rv;
1167 }
1168 }
1169 }
1170 }
1171
1172 // Populate ModuleOrderedResourceVars.
SJW77b87ad2020-04-21 14:37:52 -05001173 for (Function &F : *module) {
David Neto862b7d82018-06-14 18:48:37 -04001174 auto where = FunctionToResourceVarsMap.find(&F);
1175 if (where != FunctionToResourceVarsMap.end()) {
1176 for (auto &rv : where->second) {
1177 if (rv != nullptr) {
1178 ModuleOrderedResourceVars.insert(rv);
1179 }
1180 }
1181 }
1182 }
1183 if (ShowResourceVars) {
1184 for (auto *info : ModuleOrderedResourceVars) {
1185 outs() << "MORV index " << info->index << " (" << info->descriptor_set
1186 << "," << info->binding << ") " << *(info->var_fn->getReturnType())
1187 << "\n";
1188 }
1189 }
1190}
1191
SJW77b87ad2020-04-21 14:37:52 -05001192bool SPIRVProducerPass::FindExtInst() {
1193 LLVMContext &Context = module->getContext();
David Neto22f144c2017-06-12 14:26:21 -04001194 bool HasExtInst = false;
1195
SJW77b87ad2020-04-21 14:37:52 -05001196 for (Function &F : *module) {
David Neto22f144c2017-06-12 14:26:21 -04001197 for (BasicBlock &BB : F) {
1198 for (Instruction &I : BB) {
1199 if (CallInst *Call = dyn_cast<CallInst>(&I)) {
1200 Function *Callee = Call->getCalledFunction();
1201 // Check whether this call is for extend instructions.
David Neto3fbb4072017-10-16 11:28:14 -04001202 auto callee_name = Callee->getName();
1203 const glsl::ExtInst EInst = getExtInstEnum(callee_name);
1204 const glsl::ExtInst IndirectEInst =
1205 getIndirectExtInstEnum(callee_name);
David Neto22f144c2017-06-12 14:26:21 -04001206
David Neto3fbb4072017-10-16 11:28:14 -04001207 HasExtInst |=
1208 (EInst != kGlslExtInstBad) || (IndirectEInst != kGlslExtInstBad);
1209
1210 if (IndirectEInst) {
1211 // Register extra constants if needed.
1212
1213 // Registers a type and constant for computing the result of the
1214 // given instruction. If the result of the instruction is a vector,
1215 // then make a splat vector constant with the same number of
1216 // elements.
1217 auto register_constant = [this, &I](Constant *constant) {
1218 FindType(constant->getType());
1219 FindConstant(constant);
1220 if (auto *vectorTy = dyn_cast<VectorType>(I.getType())) {
1221 // Register the splat vector of the value with the same
1222 // width as the result of the instruction.
1223 auto *vec_constant = ConstantVector::getSplat(
alan-baker7261e062020-03-15 14:35:48 -04001224 {static_cast<unsigned>(vectorTy->getNumElements()), false},
David Neto3fbb4072017-10-16 11:28:14 -04001225 constant);
1226 FindConstant(vec_constant);
1227 FindType(vec_constant->getType());
1228 }
1229 };
1230 switch (IndirectEInst) {
1231 case glsl::ExtInstFindUMsb:
1232 // clz needs OpExtInst and OpISub with constant 31, or splat
1233 // vector of 31. Add it to the constant list here.
1234 register_constant(
1235 ConstantInt::get(Type::getInt32Ty(Context), 31));
1236 break;
1237 case glsl::ExtInstAcos:
1238 case glsl::ExtInstAsin:
Kévin Petiteb9f90a2018-09-29 12:29:34 +01001239 case glsl::ExtInstAtan:
David Neto3fbb4072017-10-16 11:28:14 -04001240 case glsl::ExtInstAtan2:
1241 // We need 1/pi for acospi, asinpi, atan2pi.
1242 register_constant(
1243 ConstantFP::get(Type::getFloatTy(Context), kOneOverPi));
1244 break;
1245 default:
1246 assert(false && "internally inconsistent");
1247 }
David Neto22f144c2017-06-12 14:26:21 -04001248 }
1249 }
1250 }
1251 }
1252 }
1253
1254 return HasExtInst;
1255}
1256
1257void SPIRVProducerPass::FindTypePerGlobalVar(GlobalVariable &GV) {
1258 // Investigate global variable's type.
1259 FindType(GV.getType());
1260}
1261
1262void SPIRVProducerPass::FindTypePerFunc(Function &F) {
1263 // Investigate function's type.
1264 FunctionType *FTy = F.getFunctionType();
1265
1266 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
1267 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
David Neto9ed8e2f2018-03-24 06:47:24 -07001268 // Handle a regular function with global constant parameters.
David Neto22f144c2017-06-12 14:26:21 -04001269 if (GlobalConstFuncTyMap.count(FTy)) {
1270 uint32_t GVCstArgIdx = GlobalConstFuncTypeMap[FTy].second;
1271 SmallVector<Type *, 4> NewFuncParamTys;
1272 for (unsigned i = 0; i < FTy->getNumParams(); i++) {
1273 Type *ParamTy = FTy->getParamType(i);
1274 if (i == GVCstArgIdx) {
1275 Type *EleTy = ParamTy->getPointerElementType();
1276 ParamTy = PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
1277 }
1278
1279 NewFuncParamTys.push_back(ParamTy);
1280 }
1281
1282 FunctionType *NewFTy =
1283 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
1284 GlobalConstFuncTyMap[FTy] = std::make_pair(NewFTy, GVCstArgIdx);
1285 FTy = NewFTy;
1286 }
1287
1288 FindType(FTy);
1289 } else {
1290 // As kernel functions do not have parameters, create new function type and
1291 // add it to type map.
1292 SmallVector<Type *, 4> NewFuncParamTys;
1293 FunctionType *NewFTy =
1294 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
1295 FindType(NewFTy);
1296 }
1297
1298 // Investigate instructions' type in function body.
1299 for (BasicBlock &BB : F) {
1300 for (Instruction &I : BB) {
1301 if (isa<ShuffleVectorInst>(I)) {
1302 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1303 // Ignore type for mask of shuffle vector instruction.
1304 if (i == 2) {
1305 continue;
1306 }
1307
1308 Value *Op = I.getOperand(i);
1309 if (!isa<MetadataAsValue>(Op)) {
1310 FindType(Op->getType());
1311 }
1312 }
1313
1314 FindType(I.getType());
1315 continue;
1316 }
1317
David Neto862b7d82018-06-14 18:48:37 -04001318 CallInst *Call = dyn_cast<CallInst>(&I);
1319
1320 if (Call && Call->getCalledFunction()->getName().startswith(
Alan Baker202c8c72018-08-13 13:47:44 -04001321 clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04001322 // This is a fake call representing access to a resource variable.
1323 // We handle that elsewhere.
1324 continue;
1325 }
1326
Alan Baker202c8c72018-08-13 13:47:44 -04001327 if (Call && Call->getCalledFunction()->getName().startswith(
1328 clspv::WorkgroupAccessorFunction())) {
1329 // This is a fake call representing access to a workgroup variable.
1330 // We handle that elsewhere.
1331 continue;
1332 }
1333
alan-bakerf083bed2020-01-29 08:15:42 -05001334 // #497: InsertValue and ExtractValue map to OpCompositeInsert and
1335 // OpCompositeExtract which takes literal values for indices. As a result
1336 // don't map the type of indices.
1337 if (I.getOpcode() == Instruction::ExtractValue) {
1338 FindType(I.getOperand(0)->getType());
1339 continue;
1340 }
1341 if (I.getOpcode() == Instruction::InsertValue) {
1342 FindType(I.getOperand(0)->getType());
1343 FindType(I.getOperand(1)->getType());
1344 continue;
1345 }
1346
1347 // #497: InsertElement and ExtractElement map to OpCompositeExtract if
1348 // the index is a constant. In such a case don't map the index type.
1349 if (I.getOpcode() == Instruction::ExtractElement) {
1350 FindType(I.getOperand(0)->getType());
1351 Value *op1 = I.getOperand(1);
1352 if (!isa<Constant>(op1) || isa<GlobalValue>(op1)) {
1353 FindType(op1->getType());
1354 }
1355 continue;
1356 }
1357 if (I.getOpcode() == Instruction::InsertElement) {
1358 FindType(I.getOperand(0)->getType());
1359 FindType(I.getOperand(1)->getType());
1360 Value *op2 = I.getOperand(2);
1361 if (!isa<Constant>(op2) || isa<GlobalValue>(op2)) {
1362 FindType(op2->getType());
1363 }
1364 continue;
1365 }
1366
David Neto22f144c2017-06-12 14:26:21 -04001367 // Work through the operands of the instruction.
1368 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1369 Value *const Op = I.getOperand(i);
1370 // If any of the operands is a constant, find the type!
1371 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1372 FindType(Op->getType());
1373 }
1374 }
1375
1376 for (Use &Op : I.operands()) {
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001377 if (isa<CallInst>(&I)) {
David Neto22f144c2017-06-12 14:26:21 -04001378 // Avoid to check call instruction's type.
1379 break;
1380 }
Alan Baker202c8c72018-08-13 13:47:44 -04001381 if (CallInst *OpCall = dyn_cast<CallInst>(Op)) {
1382 if (OpCall && OpCall->getCalledFunction()->getName().startswith(
1383 clspv::WorkgroupAccessorFunction())) {
1384 // This is a fake call representing access to a workgroup variable.
1385 // We handle that elsewhere.
1386 continue;
1387 }
1388 }
David Neto22f144c2017-06-12 14:26:21 -04001389 if (!isa<MetadataAsValue>(&Op)) {
1390 FindType(Op->getType());
1391 continue;
1392 }
1393 }
1394
David Neto22f144c2017-06-12 14:26:21 -04001395 // We don't want to track the type of this call as we are going to replace
1396 // it.
Kévin Petitdf71de32019-04-09 14:09:50 +01001397 if (Call && (clspv::LiteralSamplerFunction() ==
David Neto22f144c2017-06-12 14:26:21 -04001398 Call->getCalledFunction()->getName())) {
1399 continue;
1400 }
1401
1402 if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(&I)) {
1403 // If gep's base operand has ModuleScopePrivate address space, make gep
1404 // return ModuleScopePrivate address space.
1405 if (GEP->getPointerAddressSpace() == AddressSpace::ModuleScopePrivate) {
1406 // Add pointer type with private address space for global constant to
1407 // type list.
1408 Type *EleTy = I.getType()->getPointerElementType();
1409 Type *NewPTy =
1410 PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
1411
1412 FindType(NewPTy);
1413 continue;
1414 }
1415 }
1416
1417 FindType(I.getType());
1418 }
1419 }
1420}
1421
SJW77b87ad2020-04-21 14:37:52 -05001422void SPIRVProducerPass::FindTypesForSamplerMap() {
David Neto862b7d82018-06-14 18:48:37 -04001423 // If we are using a sampler map, find the type of the sampler.
SJW77b87ad2020-04-21 14:37:52 -05001424 if (module->getFunction(clspv::LiteralSamplerFunction()) ||
David Neto862b7d82018-06-14 18:48:37 -04001425 0 < getSamplerMap().size()) {
SJW77b87ad2020-04-21 14:37:52 -05001426 auto SamplerStructTy = module->getTypeByName("opencl.sampler_t");
David Neto862b7d82018-06-14 18:48:37 -04001427 if (!SamplerStructTy) {
SJW77b87ad2020-04-21 14:37:52 -05001428 SamplerStructTy =
1429 StructType::create(module->getContext(), "opencl.sampler_t");
David Neto862b7d82018-06-14 18:48:37 -04001430 }
1431
1432 SamplerTy = SamplerStructTy->getPointerTo(AddressSpace::UniformConstant);
1433
1434 FindType(SamplerTy);
1435 }
1436}
1437
SJW77b87ad2020-04-21 14:37:52 -05001438void SPIRVProducerPass::FindTypesForResourceVars() {
David Neto862b7d82018-06-14 18:48:37 -04001439 // Record types so they are generated.
1440 TypesNeedingLayout.reset();
1441 StructTypesNeedingBlock.reset();
1442
1443 // To match older clspv codegen, generate the float type first if required
1444 // for images.
1445 for (const auto *info : ModuleOrderedResourceVars) {
1446 if (info->arg_kind == clspv::ArgKind::ReadOnlyImage ||
1447 info->arg_kind == clspv::ArgKind::WriteOnlyImage) {
alan-bakerf67468c2019-11-25 15:51:49 -05001448 if (IsIntImageType(info->var_fn->getReturnType())) {
1449 // Nothing for now...
1450 } else if (IsUintImageType(info->var_fn->getReturnType())) {
SJW77b87ad2020-04-21 14:37:52 -05001451 FindType(Type::getInt32Ty(module->getContext()));
alan-bakerf67468c2019-11-25 15:51:49 -05001452 }
1453
1454 // We need "float" either for the sampled type or for the Lod operand.
SJW77b87ad2020-04-21 14:37:52 -05001455 FindType(Type::getFloatTy(module->getContext()));
David Neto862b7d82018-06-14 18:48:37 -04001456 }
1457 }
1458
1459 for (const auto *info : ModuleOrderedResourceVars) {
1460 Type *type = info->var_fn->getReturnType();
1461
1462 switch (info->arg_kind) {
1463 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04001464 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04001465 if (auto *sty = dyn_cast<StructType>(type->getPointerElementType())) {
1466 StructTypesNeedingBlock.insert(sty);
1467 } else {
1468 errs() << *type << "\n";
1469 llvm_unreachable("Buffer arguments must map to structures!");
1470 }
1471 break;
1472 case clspv::ArgKind::Pod:
alan-baker9b0ec3c2020-04-06 14:45:34 -04001473 case clspv::ArgKind::PodUBO:
1474 case clspv::ArgKind::PodPushConstant:
David Neto862b7d82018-06-14 18:48:37 -04001475 if (auto *sty = dyn_cast<StructType>(type->getPointerElementType())) {
1476 StructTypesNeedingBlock.insert(sty);
1477 } else {
1478 errs() << *type << "\n";
1479 llvm_unreachable("POD arguments must map to structures!");
1480 }
1481 break;
1482 case clspv::ArgKind::ReadOnlyImage:
1483 case clspv::ArgKind::WriteOnlyImage:
1484 case clspv::ArgKind::Sampler:
1485 // Sampler and image types map to the pointee type but
1486 // in the uniform constant address space.
1487 type = PointerType::get(type->getPointerElementType(),
1488 clspv::AddressSpace::UniformConstant);
1489 break;
1490 default:
1491 break;
1492 }
1493
1494 // The converted type is the type of the OpVariable we will generate.
1495 // If the pointee type is an array of size zero, FindType will convert it
1496 // to a runtime array.
1497 FindType(type);
1498 }
1499
alan-bakerdcd97412019-09-16 15:32:30 -04001500 // If module constants are clustered in a storage buffer then that struct
1501 // needs layout decorations.
1502 if (clspv::Option::ModuleConstantsInStorageBuffer()) {
SJW77b87ad2020-04-21 14:37:52 -05001503 for (GlobalVariable &GV : module->globals()) {
alan-bakerdcd97412019-09-16 15:32:30 -04001504 PointerType *PTy = cast<PointerType>(GV.getType());
1505 const auto AS = PTy->getAddressSpace();
1506 const bool module_scope_constant_external_init =
1507 (AS == AddressSpace::Constant) && GV.hasInitializer();
1508 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
1509 if (module_scope_constant_external_init &&
1510 spv::BuiltInMax == BuiltinType) {
1511 StructTypesNeedingBlock.insert(
1512 cast<StructType>(PTy->getPointerElementType()));
1513 }
1514 }
1515 }
1516
SJW77b87ad2020-04-21 14:37:52 -05001517 for (const GlobalVariable &GV : module->globals()) {
Kévin Petitbbbda972020-03-03 19:16:31 +00001518 if (GV.getAddressSpace() == clspv::AddressSpace::PushConstant) {
1519 auto Ty = cast<PointerType>(GV.getType())->getPointerElementType();
1520 assert(Ty->isStructTy() && "Push constants have to be structures.");
1521 auto STy = cast<StructType>(Ty);
1522 StructTypesNeedingBlock.insert(STy);
1523 }
1524 }
1525
David Neto862b7d82018-06-14 18:48:37 -04001526 // Traverse the arrays and structures underneath each Block, and
1527 // mark them as needing layout.
1528 std::vector<Type *> work_list(StructTypesNeedingBlock.begin(),
1529 StructTypesNeedingBlock.end());
1530 while (!work_list.empty()) {
1531 Type *type = work_list.back();
1532 work_list.pop_back();
1533 TypesNeedingLayout.insert(type);
1534 switch (type->getTypeID()) {
1535 case Type::ArrayTyID:
1536 work_list.push_back(type->getArrayElementType());
1537 if (!Hack_generate_runtime_array_stride_early) {
1538 // Remember this array type for deferred decoration.
1539 TypesNeedingArrayStride.insert(type);
1540 }
1541 break;
1542 case Type::StructTyID:
1543 for (auto *elem_ty : cast<StructType>(type)->elements()) {
1544 work_list.push_back(elem_ty);
1545 }
1546 default:
1547 // This type and its contained types don't get layout.
1548 break;
1549 }
1550 }
1551}
1552
SJW77b87ad2020-04-21 14:37:52 -05001553void SPIRVProducerPass::FindWorkgroupVars() {
Alan Baker202c8c72018-08-13 13:47:44 -04001554 // The SpecId assignment for pointer-to-local arguments is recorded in
1555 // module-level metadata. Translate that information into local argument
1556 // information.
SJW77b87ad2020-04-21 14:37:52 -05001557 NamedMDNode *nmd = module->getNamedMetadata(clspv::LocalSpecIdMetadataName());
alan-bakerb6b09dc2018-11-08 16:59:28 -05001558 if (!nmd)
1559 return;
Alan Baker202c8c72018-08-13 13:47:44 -04001560 for (auto operand : nmd->operands()) {
1561 MDTuple *tuple = cast<MDTuple>(operand);
1562 ValueAsMetadata *fn_md = cast<ValueAsMetadata>(tuple->getOperand(0));
1563 Function *func = cast<Function>(fn_md->getValue());
alan-bakerb6b09dc2018-11-08 16:59:28 -05001564 ConstantAsMetadata *arg_index_md =
1565 cast<ConstantAsMetadata>(tuple->getOperand(1));
1566 int arg_index = static_cast<int>(
1567 cast<ConstantInt>(arg_index_md->getValue())->getSExtValue());
1568 Argument *arg = &*(func->arg_begin() + arg_index);
Alan Baker202c8c72018-08-13 13:47:44 -04001569
1570 ConstantAsMetadata *spec_id_md =
1571 cast<ConstantAsMetadata>(tuple->getOperand(2));
alan-bakerb6b09dc2018-11-08 16:59:28 -05001572 int spec_id = static_cast<int>(
1573 cast<ConstantInt>(spec_id_md->getValue())->getSExtValue());
Alan Baker202c8c72018-08-13 13:47:44 -04001574
Alan Baker202c8c72018-08-13 13:47:44 -04001575 LocalArgSpecIds[arg] = spec_id;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001576 if (LocalSpecIdInfoMap.count(spec_id))
1577 continue;
Alan Baker202c8c72018-08-13 13:47:44 -04001578
1579 // We haven't seen this SpecId yet, so generate the LocalArgInfo for it.
1580 LocalArgInfo info{nextID, arg->getType()->getPointerElementType(),
1581 nextID + 1, nextID + 2,
1582 nextID + 3, spec_id};
1583 LocalSpecIdInfoMap[spec_id] = info;
1584 nextID += 4;
1585
1586 // Ensure the types necessary for this argument get generated.
SJW77b87ad2020-04-21 14:37:52 -05001587 Type *IdxTy = Type::getInt32Ty(module->getContext());
Alan Baker202c8c72018-08-13 13:47:44 -04001588 FindConstant(ConstantInt::get(IdxTy, 0));
1589 FindType(IdxTy);
1590 FindType(arg->getType());
1591 }
1592}
1593
David Neto22f144c2017-06-12 14:26:21 -04001594void SPIRVProducerPass::FindType(Type *Ty) {
1595 TypeList &TyList = getTypeList();
1596
1597 if (0 != TyList.idFor(Ty)) {
1598 return;
1599 }
1600
1601 if (Ty->isPointerTy()) {
1602 auto AddrSpace = Ty->getPointerAddressSpace();
1603 if ((AddressSpace::Constant == AddrSpace) ||
1604 (AddressSpace::Global == AddrSpace)) {
1605 auto PointeeTy = Ty->getPointerElementType();
1606
1607 if (PointeeTy->isStructTy() &&
1608 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
1609 FindType(PointeeTy);
1610 auto ActualPointerTy =
1611 PointeeTy->getPointerTo(AddressSpace::UniformConstant);
1612 FindType(ActualPointerTy);
1613 return;
1614 }
1615 }
1616 }
1617
David Neto862b7d82018-06-14 18:48:37 -04001618 // By convention, LLVM array type with 0 elements will map to
1619 // OpTypeRuntimeArray. Otherwise, it will map to OpTypeArray, which
1620 // has a constant number of elements. We need to support type of the
1621 // constant.
1622 if (auto *arrayTy = dyn_cast<ArrayType>(Ty)) {
1623 if (arrayTy->getNumElements() > 0) {
1624 LLVMContext &Context = Ty->getContext();
1625 FindType(Type::getInt32Ty(Context));
1626 }
David Neto22f144c2017-06-12 14:26:21 -04001627 }
1628
1629 for (Type *SubTy : Ty->subtypes()) {
1630 FindType(SubTy);
1631 }
1632
1633 TyList.insert(Ty);
1634}
1635
1636void SPIRVProducerPass::FindConstantPerGlobalVar(GlobalVariable &GV) {
1637 // If the global variable has a (non undef) initializer.
1638 if (GV.hasInitializer() && !isa<UndefValue>(GV.getInitializer())) {
David Neto862b7d82018-06-14 18:48:37 -04001639 // Generate the constant if it's not the initializer to a module scope
1640 // constant that we will expect in a storage buffer.
1641 const bool module_scope_constant_external_init =
1642 (GV.getType()->getPointerAddressSpace() == AddressSpace::Constant) &&
1643 clspv::Option::ModuleConstantsInStorageBuffer();
1644 if (!module_scope_constant_external_init) {
1645 FindConstant(GV.getInitializer());
1646 }
David Neto22f144c2017-06-12 14:26:21 -04001647 }
1648}
1649
1650void SPIRVProducerPass::FindConstantPerFunc(Function &F) {
1651 // Investigate constants in function body.
1652 for (BasicBlock &BB : F) {
1653 for (Instruction &I : BB) {
David Neto862b7d82018-06-14 18:48:37 -04001654 if (auto *call = dyn_cast<CallInst>(&I)) {
1655 auto name = call->getCalledFunction()->getName();
Kévin Petitdf71de32019-04-09 14:09:50 +01001656 if (name == clspv::LiteralSamplerFunction()) {
David Neto862b7d82018-06-14 18:48:37 -04001657 // We've handled these constants elsewhere, so skip it.
1658 continue;
1659 }
Alan Baker202c8c72018-08-13 13:47:44 -04001660 if (name.startswith(clspv::ResourceAccessorFunction())) {
1661 continue;
1662 }
1663 if (name.startswith(clspv::WorkgroupAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04001664 continue;
1665 }
Kévin Petit617a76d2019-04-04 13:54:16 +01001666 if (name.startswith(clspv::SPIRVOpIntrinsicFunction())) {
1667 // Skip the first operand that has the SPIR-V Opcode
1668 for (unsigned i = 1; i < I.getNumOperands(); i++) {
1669 if (isa<Constant>(I.getOperand(i)) &&
1670 !isa<GlobalValue>(I.getOperand(i))) {
1671 FindConstant(I.getOperand(i));
1672 }
1673 }
1674 continue;
1675 }
David Neto22f144c2017-06-12 14:26:21 -04001676 }
1677
1678 if (isa<AllocaInst>(I)) {
1679 // Alloca instruction has constant for the number of element. Ignore it.
1680 continue;
1681 } else if (isa<ShuffleVectorInst>(I)) {
1682 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1683 // Ignore constant for mask of shuffle vector instruction.
1684 if (i == 2) {
1685 continue;
1686 }
1687
1688 if (isa<Constant>(I.getOperand(i)) &&
1689 !isa<GlobalValue>(I.getOperand(i))) {
1690 FindConstant(I.getOperand(i));
1691 }
1692 }
1693
1694 continue;
1695 } else if (isa<InsertElementInst>(I)) {
1696 // Handle InsertElement with <4 x i8> specially.
1697 Type *CompositeTy = I.getOperand(0)->getType();
1698 if (is4xi8vec(CompositeTy)) {
1699 LLVMContext &Context = CompositeTy->getContext();
1700 if (isa<Constant>(I.getOperand(0))) {
1701 FindConstant(I.getOperand(0));
1702 }
1703
1704 if (isa<Constant>(I.getOperand(1))) {
1705 FindConstant(I.getOperand(1));
1706 }
1707
1708 // Add mask constant 0xFF.
1709 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
1710 FindConstant(CstFF);
1711
1712 // Add shift amount constant.
1713 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
1714 uint64_t Idx = CI->getZExtValue();
1715 Constant *CstShiftAmount =
1716 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
1717 FindConstant(CstShiftAmount);
1718 }
1719
1720 continue;
1721 }
1722
1723 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1724 // Ignore constant for index of InsertElement instruction.
1725 if (i == 2) {
1726 continue;
1727 }
1728
1729 if (isa<Constant>(I.getOperand(i)) &&
1730 !isa<GlobalValue>(I.getOperand(i))) {
1731 FindConstant(I.getOperand(i));
1732 }
1733 }
1734
1735 continue;
1736 } else if (isa<ExtractElementInst>(I)) {
1737 // Handle ExtractElement with <4 x i8> specially.
1738 Type *CompositeTy = I.getOperand(0)->getType();
1739 if (is4xi8vec(CompositeTy)) {
1740 LLVMContext &Context = CompositeTy->getContext();
1741 if (isa<Constant>(I.getOperand(0))) {
1742 FindConstant(I.getOperand(0));
1743 }
1744
1745 // Add mask constant 0xFF.
1746 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
1747 FindConstant(CstFF);
1748
1749 // Add shift amount constant.
1750 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
1751 uint64_t Idx = CI->getZExtValue();
1752 Constant *CstShiftAmount =
1753 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
1754 FindConstant(CstShiftAmount);
1755 } else {
1756 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
1757 FindConstant(Cst8);
1758 }
1759
1760 continue;
1761 }
1762
1763 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1764 // Ignore constant for index of ExtractElement instruction.
1765 if (i == 1) {
1766 continue;
1767 }
1768
1769 if (isa<Constant>(I.getOperand(i)) &&
1770 !isa<GlobalValue>(I.getOperand(i))) {
1771 FindConstant(I.getOperand(i));
1772 }
1773 }
1774
1775 continue;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001776 } else if ((Instruction::Xor == I.getOpcode()) &&
1777 I.getType()->isIntegerTy(1)) {
1778 // We special case for Xor where the type is i1 and one of the arguments
1779 // is a constant 1 (true), this is an OpLogicalNot in SPIR-V, and we
1780 // don't need the constant
David Neto22f144c2017-06-12 14:26:21 -04001781 bool foundConstantTrue = false;
1782 for (Use &Op : I.operands()) {
1783 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1784 auto CI = cast<ConstantInt>(Op);
1785
1786 if (CI->isZero() || foundConstantTrue) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05001787 // If we already found the true constant, we might (probably only
1788 // on -O0) have an OpLogicalNot which is taking a constant
1789 // argument, so discover it anyway.
David Neto22f144c2017-06-12 14:26:21 -04001790 FindConstant(Op);
1791 } else {
1792 foundConstantTrue = true;
1793 }
1794 }
1795 }
1796
1797 continue;
David Netod2de94a2017-08-28 17:27:47 -04001798 } else if (isa<TruncInst>(I)) {
alan-bakerb39c8262019-03-08 14:03:37 -05001799 // Special case if i8 is not generally handled.
1800 if (!clspv::Option::Int8Support()) {
1801 // For truncation to i8 we mask against 255.
1802 Type *ToTy = I.getType();
1803 if (8u == ToTy->getPrimitiveSizeInBits()) {
1804 LLVMContext &Context = ToTy->getContext();
1805 Constant *Cst255 =
1806 ConstantInt::get(Type::getInt32Ty(Context), 0xff);
1807 FindConstant(Cst255);
1808 }
David Netod2de94a2017-08-28 17:27:47 -04001809 }
Neil Henning39672102017-09-29 14:33:13 +01001810 } else if (isa<AtomicRMWInst>(I)) {
1811 LLVMContext &Context = I.getContext();
1812
1813 FindConstant(
1814 ConstantInt::get(Type::getInt32Ty(Context), spv::ScopeDevice));
1815 FindConstant(ConstantInt::get(
1816 Type::getInt32Ty(Context),
1817 spv::MemorySemanticsUniformMemoryMask |
1818 spv::MemorySemanticsSequentiallyConsistentMask));
David Neto22f144c2017-06-12 14:26:21 -04001819 }
1820
1821 for (Use &Op : I.operands()) {
1822 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1823 FindConstant(Op);
1824 }
1825 }
1826 }
1827 }
1828}
1829
1830void SPIRVProducerPass::FindConstant(Value *V) {
David Neto22f144c2017-06-12 14:26:21 -04001831 ValueList &CstList = getConstantList();
1832
David Netofb9a7972017-08-25 17:08:24 -04001833 // If V is already tracked, ignore it.
1834 if (0 != CstList.idFor(V)) {
David Neto22f144c2017-06-12 14:26:21 -04001835 return;
1836 }
1837
David Neto862b7d82018-06-14 18:48:37 -04001838 if (isa<GlobalValue>(V) && clspv::Option::ModuleConstantsInStorageBuffer()) {
1839 return;
1840 }
1841
David Neto22f144c2017-06-12 14:26:21 -04001842 Constant *Cst = cast<Constant>(V);
David Neto862b7d82018-06-14 18:48:37 -04001843 Type *CstTy = Cst->getType();
David Neto22f144c2017-06-12 14:26:21 -04001844
1845 // Handle constant with <4 x i8> type specially.
David Neto22f144c2017-06-12 14:26:21 -04001846 if (is4xi8vec(CstTy)) {
1847 if (!isa<GlobalValue>(V)) {
David Netofb9a7972017-08-25 17:08:24 -04001848 CstList.insert(V);
David Neto22f144c2017-06-12 14:26:21 -04001849 }
1850 }
1851
1852 if (Cst->getNumOperands()) {
1853 for (User::const_op_iterator I = Cst->op_begin(), E = Cst->op_end(); I != E;
1854 ++I) {
1855 FindConstant(*I);
1856 }
1857
David Netofb9a7972017-08-25 17:08:24 -04001858 CstList.insert(Cst);
David Neto22f144c2017-06-12 14:26:21 -04001859 return;
1860 } else if (const ConstantDataSequential *CDS =
1861 dyn_cast<ConstantDataSequential>(Cst)) {
1862 // Add constants for each element to constant list.
1863 for (unsigned i = 0; i < CDS->getNumElements(); i++) {
1864 Constant *EleCst = CDS->getElementAsConstant(i);
1865 FindConstant(EleCst);
1866 }
1867 }
1868
1869 if (!isa<GlobalValue>(V)) {
David Netofb9a7972017-08-25 17:08:24 -04001870 CstList.insert(V);
David Neto22f144c2017-06-12 14:26:21 -04001871 }
1872}
1873
1874spv::StorageClass SPIRVProducerPass::GetStorageClass(unsigned AddrSpace) const {
1875 switch (AddrSpace) {
1876 default:
1877 llvm_unreachable("Unsupported OpenCL address space");
1878 case AddressSpace::Private:
1879 return spv::StorageClassFunction;
1880 case AddressSpace::Global:
David Neto22f144c2017-06-12 14:26:21 -04001881 return spv::StorageClassStorageBuffer;
Alan Bakerfcda9482018-10-02 17:09:59 -04001882 case AddressSpace::Constant:
1883 return clspv::Option::ConstantArgsInUniformBuffer()
1884 ? spv::StorageClassUniform
1885 : spv::StorageClassStorageBuffer;
David Neto22f144c2017-06-12 14:26:21 -04001886 case AddressSpace::Input:
1887 return spv::StorageClassInput;
1888 case AddressSpace::Local:
1889 return spv::StorageClassWorkgroup;
1890 case AddressSpace::UniformConstant:
1891 return spv::StorageClassUniformConstant;
David Neto9ed8e2f2018-03-24 06:47:24 -07001892 case AddressSpace::Uniform:
David Netoe439d702018-03-23 13:14:08 -07001893 return spv::StorageClassUniform;
David Neto22f144c2017-06-12 14:26:21 -04001894 case AddressSpace::ModuleScopePrivate:
1895 return spv::StorageClassPrivate;
Kévin Petitbbbda972020-03-03 19:16:31 +00001896 case AddressSpace::PushConstant:
1897 return spv::StorageClassPushConstant;
David Neto22f144c2017-06-12 14:26:21 -04001898 }
1899}
1900
David Neto862b7d82018-06-14 18:48:37 -04001901spv::StorageClass
1902SPIRVProducerPass::GetStorageClassForArgKind(clspv::ArgKind arg_kind) const {
1903 switch (arg_kind) {
1904 case clspv::ArgKind::Buffer:
1905 return spv::StorageClassStorageBuffer;
Alan Bakerfcda9482018-10-02 17:09:59 -04001906 case clspv::ArgKind::BufferUBO:
1907 return spv::StorageClassUniform;
David Neto862b7d82018-06-14 18:48:37 -04001908 case clspv::ArgKind::Pod:
alan-baker9b0ec3c2020-04-06 14:45:34 -04001909 return spv::StorageClassStorageBuffer;
1910 case clspv::ArgKind::PodUBO:
1911 return spv::StorageClassUniform;
1912 case clspv::ArgKind::PodPushConstant:
1913 return spv::StorageClassPushConstant;
David Neto862b7d82018-06-14 18:48:37 -04001914 case clspv::ArgKind::Local:
1915 return spv::StorageClassWorkgroup;
1916 case clspv::ArgKind::ReadOnlyImage:
1917 case clspv::ArgKind::WriteOnlyImage:
1918 case clspv::ArgKind::Sampler:
1919 return spv::StorageClassUniformConstant;
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001920 default:
1921 llvm_unreachable("Unsupported storage class for argument kind");
David Neto862b7d82018-06-14 18:48:37 -04001922 }
1923}
1924
David Neto22f144c2017-06-12 14:26:21 -04001925spv::BuiltIn SPIRVProducerPass::GetBuiltin(StringRef Name) const {
1926 return StringSwitch<spv::BuiltIn>(Name)
1927 .Case("__spirv_GlobalInvocationId", spv::BuiltInGlobalInvocationId)
1928 .Case("__spirv_LocalInvocationId", spv::BuiltInLocalInvocationId)
1929 .Case("__spirv_WorkgroupSize", spv::BuiltInWorkgroupSize)
1930 .Case("__spirv_NumWorkgroups", spv::BuiltInNumWorkgroups)
1931 .Case("__spirv_WorkgroupId", spv::BuiltInWorkgroupId)
alan-bakerbed3a882020-04-21 14:42:41 -04001932 .Case("__spirv_WorkDim", spv::BuiltInWorkDim)
David Neto22f144c2017-06-12 14:26:21 -04001933 .Default(spv::BuiltInMax);
1934}
1935
1936void SPIRVProducerPass::GenerateExtInstImport() {
SJW69939d52020-04-16 07:29:07 -05001937 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList(kImports);
David Neto22f144c2017-06-12 14:26:21 -04001938 uint32_t &ExtInstImportID = getOpExtInstImportID();
1939
1940 //
1941 // Generate OpExtInstImport.
1942 //
1943 // Ops[0] ... Ops[n] = Name (Literal String)
David Neto22f144c2017-06-12 14:26:21 -04001944 ExtInstImportID = nextID;
David Neto87846742018-04-11 17:36:22 -04001945 SPIRVInstList.push_back(new SPIRVInstruction(spv::OpExtInstImport, nextID++,
1946 MkString("GLSL.std.450")));
David Neto22f144c2017-06-12 14:26:21 -04001947}
1948
SJW77b87ad2020-04-21 14:37:52 -05001949void SPIRVProducerPass::GenerateSPIRVTypes() {
SJW69939d52020-04-16 07:29:07 -05001950 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList(kTypes);
David Neto22f144c2017-06-12 14:26:21 -04001951 ValueMapType &VMap = getValueMap();
1952 ValueMapType &AllocatedVMap = getAllocatedValueMap();
SJW77b87ad2020-04-21 14:37:52 -05001953 const auto &DL = module->getDataLayout();
1954 LLVMContext &Context = module->getContext();
David Neto22f144c2017-06-12 14:26:21 -04001955
1956 // Map for OpTypeRuntimeArray. If argument has pointer type, 2 spirv type
1957 // instructions are generated. They are OpTypePointer and OpTypeRuntimeArray.
1958 DenseMap<Type *, uint32_t> OpRuntimeTyMap;
1959
1960 for (Type *Ty : getTypeList()) {
1961 // Update TypeMap with nextID for reference later.
1962 TypeMap[Ty] = nextID;
1963
1964 switch (Ty->getTypeID()) {
1965 default: {
1966 Ty->print(errs());
1967 llvm_unreachable("Unsupported type???");
1968 break;
1969 }
1970 case Type::MetadataTyID:
1971 case Type::LabelTyID: {
1972 // Ignore these types.
1973 break;
1974 }
1975 case Type::PointerTyID: {
1976 PointerType *PTy = cast<PointerType>(Ty);
1977 unsigned AddrSpace = PTy->getAddressSpace();
1978
1979 // For the purposes of our Vulkan SPIR-V type system, constant and global
1980 // are conflated.
1981 bool UseExistingOpTypePointer = false;
1982 if (AddressSpace::Constant == AddrSpace) {
Alan Bakerfcda9482018-10-02 17:09:59 -04001983 if (!clspv::Option::ConstantArgsInUniformBuffer()) {
1984 AddrSpace = AddressSpace::Global;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001985 // Check to see if we already created this type (for instance, if we
1986 // had a constant <type>* and a global <type>*, the type would be
1987 // created by one of these types, and shared by both).
Alan Bakerfcda9482018-10-02 17:09:59 -04001988 auto GlobalTy = PTy->getPointerElementType()->getPointerTo(AddrSpace);
1989 if (0 < TypeMap.count(GlobalTy)) {
1990 TypeMap[PTy] = TypeMap[GlobalTy];
1991 UseExistingOpTypePointer = true;
1992 break;
1993 }
David Neto22f144c2017-06-12 14:26:21 -04001994 }
1995 } else if (AddressSpace::Global == AddrSpace) {
Alan Bakerfcda9482018-10-02 17:09:59 -04001996 if (!clspv::Option::ConstantArgsInUniformBuffer()) {
1997 AddrSpace = AddressSpace::Constant;
David Neto22f144c2017-06-12 14:26:21 -04001998
alan-bakerb6b09dc2018-11-08 16:59:28 -05001999 // Check to see if we already created this type (for instance, if we
2000 // had a constant <type>* and a global <type>*, the type would be
2001 // created by one of these types, and shared by both).
2002 auto ConstantTy =
2003 PTy->getPointerElementType()->getPointerTo(AddrSpace);
Alan Bakerfcda9482018-10-02 17:09:59 -04002004 if (0 < TypeMap.count(ConstantTy)) {
2005 TypeMap[PTy] = TypeMap[ConstantTy];
2006 UseExistingOpTypePointer = true;
2007 }
David Neto22f144c2017-06-12 14:26:21 -04002008 }
2009 }
2010
David Neto862b7d82018-06-14 18:48:37 -04002011 const bool HasArgUser = true;
David Neto22f144c2017-06-12 14:26:21 -04002012
David Neto862b7d82018-06-14 18:48:37 -04002013 if (HasArgUser && !UseExistingOpTypePointer) {
David Neto22f144c2017-06-12 14:26:21 -04002014 //
2015 // Generate OpTypePointer.
2016 //
2017
2018 // OpTypePointer
2019 // Ops[0] = Storage Class
2020 // Ops[1] = Element Type ID
2021 SPIRVOperandList Ops;
2022
David Neto257c3892018-04-11 13:19:45 -04002023 Ops << MkNum(GetStorageClass(AddrSpace))
2024 << MkId(lookupType(PTy->getElementType()));
David Neto22f144c2017-06-12 14:26:21 -04002025
David Neto87846742018-04-11 17:36:22 -04002026 auto *Inst = new SPIRVInstruction(spv::OpTypePointer, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002027 SPIRVInstList.push_back(Inst);
2028 }
David Neto22f144c2017-06-12 14:26:21 -04002029 break;
2030 }
2031 case Type::StructTyID: {
David Neto22f144c2017-06-12 14:26:21 -04002032 StructType *STy = cast<StructType>(Ty);
2033
2034 // Handle sampler type.
2035 if (STy->isOpaque()) {
2036 if (STy->getName().equals("opencl.sampler_t")) {
2037 //
2038 // Generate OpTypeSampler
2039 //
2040 // Empty Ops.
2041 SPIRVOperandList Ops;
2042
David Neto87846742018-04-11 17:36:22 -04002043 auto *Inst = new SPIRVInstruction(spv::OpTypeSampler, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002044 SPIRVInstList.push_back(Inst);
2045 break;
alan-bakerf906d2b2019-12-10 11:26:23 -05002046 } else if (STy->getName().startswith("opencl.image1d_ro_t") ||
2047 STy->getName().startswith("opencl.image1d_wo_t") ||
alan-baker7150a1d2020-02-25 08:31:06 -05002048 STy->getName().startswith("opencl.image1d_array_ro_t") ||
2049 STy->getName().startswith("opencl.image1d_array_wo_t") ||
alan-bakerf906d2b2019-12-10 11:26:23 -05002050 STy->getName().startswith("opencl.image2d_ro_t") ||
alan-bakerf67468c2019-11-25 15:51:49 -05002051 STy->getName().startswith("opencl.image2d_wo_t") ||
alan-baker7150a1d2020-02-25 08:31:06 -05002052 STy->getName().startswith("opencl.image2d_array_ro_t") ||
2053 STy->getName().startswith("opencl.image2d_array_wo_t") ||
alan-bakerf67468c2019-11-25 15:51:49 -05002054 STy->getName().startswith("opencl.image3d_ro_t") ||
2055 STy->getName().startswith("opencl.image3d_wo_t")) {
David Neto22f144c2017-06-12 14:26:21 -04002056 //
2057 // Generate OpTypeImage
2058 //
2059 // Ops[0] = Sampled Type ID
2060 // Ops[1] = Dim ID
2061 // Ops[2] = Depth (Literal Number)
2062 // Ops[3] = Arrayed (Literal Number)
2063 // Ops[4] = MS (Literal Number)
2064 // Ops[5] = Sampled (Literal Number)
2065 // Ops[6] = Image Format ID
2066 //
2067 SPIRVOperandList Ops;
2068
alan-bakerf67468c2019-11-25 15:51:49 -05002069 uint32_t ImageTyID = nextID++;
2070 uint32_t SampledTyID = 0;
2071 if (STy->getName().contains(".float")) {
2072 SampledTyID = lookupType(Type::getFloatTy(Context));
2073 } else if (STy->getName().contains(".uint")) {
2074 SampledTyID = lookupType(Type::getInt32Ty(Context));
2075 } else if (STy->getName().contains(".int")) {
2076 // Generate a signed 32-bit integer if necessary.
2077 if (int32ID == 0) {
2078 int32ID = nextID++;
2079 SPIRVOperandList intOps;
2080 intOps << MkNum(32);
2081 intOps << MkNum(1);
2082 auto signed_int =
2083 new SPIRVInstruction(spv::OpTypeInt, int32ID, intOps);
2084 SPIRVInstList.push_back(signed_int);
2085 }
2086 SampledTyID = int32ID;
2087
2088 // Generate a vec4 of the signed int if necessary.
2089 if (v4int32ID == 0) {
2090 v4int32ID = nextID++;
2091 SPIRVOperandList vecOps;
2092 vecOps << MkId(int32ID);
2093 vecOps << MkNum(4);
2094 auto int_vec =
2095 new SPIRVInstruction(spv::OpTypeVector, v4int32ID, vecOps);
2096 SPIRVInstList.push_back(int_vec);
2097 }
2098 } else {
2099 // This was likely an UndefValue.
2100 SampledTyID = lookupType(Type::getFloatTy(Context));
2101 }
David Neto257c3892018-04-11 13:19:45 -04002102 Ops << MkId(SampledTyID);
David Neto22f144c2017-06-12 14:26:21 -04002103
2104 spv::Dim DimID = spv::Dim2D;
alan-bakerf906d2b2019-12-10 11:26:23 -05002105 if (STy->getName().startswith("opencl.image1d_ro_t") ||
alan-baker7150a1d2020-02-25 08:31:06 -05002106 STy->getName().startswith("opencl.image1d_wo_t") ||
2107 STy->getName().startswith("opencl.image1d_array_ro_t") ||
2108 STy->getName().startswith("opencl.image1d_array_wo_t")) {
alan-bakerf906d2b2019-12-10 11:26:23 -05002109 DimID = spv::Dim1D;
2110 } else if (STy->getName().startswith("opencl.image3d_ro_t") ||
2111 STy->getName().startswith("opencl.image3d_wo_t")) {
David Neto22f144c2017-06-12 14:26:21 -04002112 DimID = spv::Dim3D;
2113 }
David Neto257c3892018-04-11 13:19:45 -04002114 Ops << MkNum(DimID);
David Neto22f144c2017-06-12 14:26:21 -04002115
2116 // TODO: Set up Depth.
David Neto257c3892018-04-11 13:19:45 -04002117 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04002118
alan-baker7150a1d2020-02-25 08:31:06 -05002119 uint32_t arrayed = STy->getName().contains("_array_") ? 1 : 0;
2120 Ops << MkNum(arrayed);
David Neto22f144c2017-06-12 14:26:21 -04002121
2122 // TODO: Set up MS.
David Neto257c3892018-04-11 13:19:45 -04002123 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04002124
alan-baker7150a1d2020-02-25 08:31:06 -05002125 // Set up Sampled.
David Neto22f144c2017-06-12 14:26:21 -04002126 //
2127 // From Spec
2128 //
2129 // 0 indicates this is only known at run time, not at compile time
2130 // 1 indicates will be used with sampler
2131 // 2 indicates will be used without a sampler (a storage image)
2132 uint32_t Sampled = 1;
alan-bakerf67468c2019-11-25 15:51:49 -05002133 if (!STy->getName().contains(".sampled")) {
David Neto22f144c2017-06-12 14:26:21 -04002134 Sampled = 2;
2135 }
David Neto257c3892018-04-11 13:19:45 -04002136 Ops << MkNum(Sampled);
David Neto22f144c2017-06-12 14:26:21 -04002137
2138 // TODO: Set up Image Format.
David Neto257c3892018-04-11 13:19:45 -04002139 Ops << MkNum(spv::ImageFormatUnknown);
David Neto22f144c2017-06-12 14:26:21 -04002140
alan-bakerf67468c2019-11-25 15:51:49 -05002141 auto *Inst = new SPIRVInstruction(spv::OpTypeImage, ImageTyID, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002142 SPIRVInstList.push_back(Inst);
2143 break;
2144 }
2145 }
2146
2147 //
2148 // Generate OpTypeStruct
2149 //
2150 // Ops[0] ... Ops[n] = Member IDs
2151 SPIRVOperandList Ops;
2152
2153 for (auto *EleTy : STy->elements()) {
David Neto862b7d82018-06-14 18:48:37 -04002154 Ops << MkId(lookupType(EleTy));
David Neto22f144c2017-06-12 14:26:21 -04002155 }
2156
David Neto22f144c2017-06-12 14:26:21 -04002157 uint32_t STyID = nextID;
2158
alan-bakerb6b09dc2018-11-08 16:59:28 -05002159 auto *Inst = new SPIRVInstruction(spv::OpTypeStruct, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002160 SPIRVInstList.push_back(Inst);
2161
2162 // Generate OpMemberDecorate.
Kévin Petitbbbda972020-03-03 19:16:31 +00002163 if (TypesNeedingLayout.idFor(STy)) {
2164 for (unsigned MemberIdx = 0; MemberIdx < STy->getNumElements();
2165 MemberIdx++) {
2166 // Ops[0] = Structure Type ID
2167 // Ops[1] = Member Index(Literal Number)
2168 // Ops[2] = Decoration (Offset)
2169 // Ops[3] = Byte Offset (Literal Number)
2170 Ops.clear();
David Netoc463b372017-08-10 15:32:21 -04002171
Kévin Petitbbbda972020-03-03 19:16:31 +00002172 Ops << MkId(STyID) << MkNum(MemberIdx)
2173 << MkNum(spv::DecorationOffset);
David Neto22f144c2017-06-12 14:26:21 -04002174
Kévin Petitbbbda972020-03-03 19:16:31 +00002175 const auto ByteOffset =
2176 GetExplicitLayoutStructMemberOffset(STy, MemberIdx, DL);
David Neto22f144c2017-06-12 14:26:21 -04002177
Kévin Petitbbbda972020-03-03 19:16:31 +00002178 Ops << MkNum(ByteOffset);
2179
2180 auto *DecoInst = new SPIRVInstruction(spv::OpMemberDecorate, Ops);
SJW69939d52020-04-16 07:29:07 -05002181 getSPIRVInstList(kAnnotations).push_back(DecoInst);
Alan Bakerfcda9482018-10-02 17:09:59 -04002182 }
David Neto22f144c2017-06-12 14:26:21 -04002183 }
2184
2185 // Generate OpDecorate.
David Neto862b7d82018-06-14 18:48:37 -04002186 if (StructTypesNeedingBlock.idFor(STy)) {
2187 Ops.clear();
2188 // Use Block decorations with StorageBuffer storage class.
2189 Ops << MkId(STyID) << MkNum(spv::DecorationBlock);
David Neto22f144c2017-06-12 14:26:21 -04002190
David Neto862b7d82018-06-14 18:48:37 -04002191 auto *DecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
SJW69939d52020-04-16 07:29:07 -05002192 getSPIRVInstList(kAnnotations).push_back(DecoInst);
David Neto22f144c2017-06-12 14:26:21 -04002193 }
2194 break;
2195 }
2196 case Type::IntegerTyID: {
alan-baker0e64a592019-11-18 13:36:25 -05002197 uint32_t BitWidth = static_cast<uint32_t>(Ty->getPrimitiveSizeInBits());
David Neto22f144c2017-06-12 14:26:21 -04002198
2199 if (BitWidth == 1) {
David Netoef5ba2b2019-12-20 08:35:54 -05002200 auto *Inst = new SPIRVInstruction(spv::OpTypeBool, nextID++);
David Neto22f144c2017-06-12 14:26:21 -04002201 SPIRVInstList.push_back(Inst);
2202 } else {
alan-bakerb39c8262019-03-08 14:03:37 -05002203 if (!clspv::Option::Int8Support()) {
2204 // i8 is added to TypeMap as i32.
2205 // No matter what LLVM type is requested first, always alias the
2206 // second one's SPIR-V type to be the same as the one we generated
2207 // first.
2208 unsigned aliasToWidth = 0;
2209 if (BitWidth == 8) {
2210 aliasToWidth = 32;
2211 BitWidth = 32;
2212 } else if (BitWidth == 32) {
2213 aliasToWidth = 8;
2214 }
2215 if (aliasToWidth) {
2216 Type *otherType = Type::getIntNTy(Ty->getContext(), aliasToWidth);
2217 auto where = TypeMap.find(otherType);
2218 if (where == TypeMap.end()) {
2219 // Go ahead and make it, but also map the other type to it.
2220 TypeMap[otherType] = nextID;
2221 } else {
2222 // Alias this SPIR-V type the existing type.
2223 TypeMap[Ty] = where->second;
2224 break;
2225 }
David Neto391aeb12017-08-26 15:51:58 -04002226 }
David Neto22f144c2017-06-12 14:26:21 -04002227 }
2228
David Neto257c3892018-04-11 13:19:45 -04002229 SPIRVOperandList Ops;
2230 Ops << MkNum(BitWidth) << MkNum(0 /* not signed */);
David Neto22f144c2017-06-12 14:26:21 -04002231
2232 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002233 new SPIRVInstruction(spv::OpTypeInt, nextID++, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002234 }
2235 break;
2236 }
2237 case Type::HalfTyID:
2238 case Type::FloatTyID:
2239 case Type::DoubleTyID: {
alan-baker0e64a592019-11-18 13:36:25 -05002240 uint32_t BitWidth = static_cast<uint32_t>(Ty->getPrimitiveSizeInBits());
James Price11010dc2019-12-19 13:53:09 -05002241 auto WidthOp = MkNum(BitWidth);
David Neto22f144c2017-06-12 14:26:21 -04002242
2243 SPIRVInstList.push_back(
David Netoef5ba2b2019-12-20 08:35:54 -05002244 new SPIRVInstruction(spv::OpTypeFloat, nextID++, std::move(WidthOp)));
David Neto22f144c2017-06-12 14:26:21 -04002245 break;
2246 }
2247 case Type::ArrayTyID: {
David Neto22f144c2017-06-12 14:26:21 -04002248 ArrayType *ArrTy = cast<ArrayType>(Ty);
David Neto862b7d82018-06-14 18:48:37 -04002249 const uint64_t Length = ArrTy->getArrayNumElements();
2250 if (Length == 0) {
2251 // By convention, map it to a RuntimeArray.
David Neto22f144c2017-06-12 14:26:21 -04002252
David Neto862b7d82018-06-14 18:48:37 -04002253 // Only generate the type once.
2254 // TODO(dneto): Can it ever be generated more than once?
2255 // Doesn't LLVM type uniqueness guarantee we'll only see this
2256 // once?
2257 Type *EleTy = ArrTy->getArrayElementType();
2258 if (OpRuntimeTyMap.count(EleTy) == 0) {
2259 uint32_t OpTypeRuntimeArrayID = nextID;
2260 OpRuntimeTyMap[Ty] = nextID;
David Neto22f144c2017-06-12 14:26:21 -04002261
David Neto862b7d82018-06-14 18:48:37 -04002262 //
2263 // Generate OpTypeRuntimeArray.
2264 //
David Neto22f144c2017-06-12 14:26:21 -04002265
David Neto862b7d82018-06-14 18:48:37 -04002266 // OpTypeRuntimeArray
2267 // Ops[0] = Element Type ID
2268 SPIRVOperandList Ops;
2269 Ops << MkId(lookupType(EleTy));
David Neto22f144c2017-06-12 14:26:21 -04002270
David Neto862b7d82018-06-14 18:48:37 -04002271 SPIRVInstList.push_back(
2272 new SPIRVInstruction(spv::OpTypeRuntimeArray, nextID++, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002273
David Neto862b7d82018-06-14 18:48:37 -04002274 if (Hack_generate_runtime_array_stride_early) {
2275 // Generate OpDecorate.
David Neto22f144c2017-06-12 14:26:21 -04002276
David Neto862b7d82018-06-14 18:48:37 -04002277 // Ops[0] = Target ID
2278 // Ops[1] = Decoration (ArrayStride)
2279 // Ops[2] = Stride Number(Literal Number)
2280 Ops.clear();
David Neto85082642018-03-24 06:55:20 -07002281
David Neto862b7d82018-06-14 18:48:37 -04002282 Ops << MkId(OpTypeRuntimeArrayID)
2283 << MkNum(spv::DecorationArrayStride)
Alan Bakerfcda9482018-10-02 17:09:59 -04002284 << MkNum(static_cast<uint32_t>(GetTypeAllocSize(EleTy, DL)));
David Neto22f144c2017-06-12 14:26:21 -04002285
David Neto862b7d82018-06-14 18:48:37 -04002286 auto *DecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
SJW69939d52020-04-16 07:29:07 -05002287 getSPIRVInstList(kAnnotations).push_back(DecoInst);
David Neto862b7d82018-06-14 18:48:37 -04002288 }
2289 }
David Neto22f144c2017-06-12 14:26:21 -04002290
David Neto862b7d82018-06-14 18:48:37 -04002291 } else {
David Neto22f144c2017-06-12 14:26:21 -04002292
David Neto862b7d82018-06-14 18:48:37 -04002293 //
2294 // Generate OpConstant and OpTypeArray.
2295 //
2296
2297 //
2298 // Generate OpConstant for array length.
2299 //
2300 // Ops[0] = Result Type ID
2301 // Ops[1] .. Ops[n] = Values LiteralNumber
2302 SPIRVOperandList Ops;
2303
2304 Type *LengthTy = Type::getInt32Ty(Context);
2305 uint32_t ResTyID = lookupType(LengthTy);
2306 Ops << MkId(ResTyID);
2307
2308 assert(Length < UINT32_MAX);
2309 Ops << MkNum(static_cast<uint32_t>(Length));
2310
2311 // Add constant for length to constant list.
2312 Constant *CstLength = ConstantInt::get(LengthTy, Length);
2313 AllocatedVMap[CstLength] = nextID;
2314 VMap[CstLength] = nextID;
2315 uint32_t LengthID = nextID;
2316
2317 auto *CstInst = new SPIRVInstruction(spv::OpConstant, nextID++, Ops);
2318 SPIRVInstList.push_back(CstInst);
2319
2320 // Remember to generate ArrayStride later
2321 getTypesNeedingArrayStride().insert(Ty);
2322
2323 //
2324 // Generate OpTypeArray.
2325 //
2326 // Ops[0] = Element Type ID
2327 // Ops[1] = Array Length Constant ID
2328 Ops.clear();
2329
2330 uint32_t EleTyID = lookupType(ArrTy->getElementType());
2331 Ops << MkId(EleTyID) << MkId(LengthID);
2332
2333 // Update TypeMap with nextID.
2334 TypeMap[Ty] = nextID;
2335
2336 auto *ArrayInst = new SPIRVInstruction(spv::OpTypeArray, nextID++, Ops);
2337 SPIRVInstList.push_back(ArrayInst);
2338 }
David Neto22f144c2017-06-12 14:26:21 -04002339 break;
2340 }
James Price59a1c752020-04-23 23:06:16 -04002341 case Type::FixedVectorTyID: {
James Pricecf53df42020-04-20 14:41:24 -04002342 auto VecTy = cast<VectorType>(Ty);
alan-bakerb39c8262019-03-08 14:03:37 -05002343 // <4 x i8> is changed to i32 if i8 is not generally supported.
2344 if (!clspv::Option::Int8Support() &&
James Pricecf53df42020-04-20 14:41:24 -04002345 VecTy->getElementType() == Type::getInt8Ty(Context)) {
2346 if (VecTy->getNumElements() == 4) {
2347 TypeMap[Ty] = lookupType(VecTy->getElementType());
David Neto22f144c2017-06-12 14:26:21 -04002348 break;
2349 } else {
2350 Ty->print(errs());
2351 llvm_unreachable("Support above i8 vector type");
2352 }
2353 }
2354
2355 // Ops[0] = Component Type ID
2356 // Ops[1] = Component Count (Literal Number)
David Neto257c3892018-04-11 13:19:45 -04002357 SPIRVOperandList Ops;
James Pricecf53df42020-04-20 14:41:24 -04002358 Ops << MkId(lookupType(VecTy->getElementType()))
2359 << MkNum(VecTy->getNumElements());
David Neto22f144c2017-06-12 14:26:21 -04002360
alan-bakerb6b09dc2018-11-08 16:59:28 -05002361 SPIRVInstruction *inst =
2362 new SPIRVInstruction(spv::OpTypeVector, nextID++, Ops);
David Netoc6f3ab22018-04-06 18:02:31 -04002363 SPIRVInstList.push_back(inst);
David Neto22f144c2017-06-12 14:26:21 -04002364 break;
2365 }
2366 case Type::VoidTyID: {
David Netoef5ba2b2019-12-20 08:35:54 -05002367 auto *Inst = new SPIRVInstruction(spv::OpTypeVoid, nextID++);
David Neto22f144c2017-06-12 14:26:21 -04002368 SPIRVInstList.push_back(Inst);
2369 break;
2370 }
2371 case Type::FunctionTyID: {
2372 // Generate SPIRV instruction for function type.
2373 FunctionType *FTy = cast<FunctionType>(Ty);
2374
2375 // Ops[0] = Return Type ID
2376 // Ops[1] ... Ops[n] = Parameter Type IDs
2377 SPIRVOperandList Ops;
2378
2379 // Find SPIRV instruction for return type
David Netoc6f3ab22018-04-06 18:02:31 -04002380 Ops << MkId(lookupType(FTy->getReturnType()));
David Neto22f144c2017-06-12 14:26:21 -04002381
2382 // Find SPIRV instructions for parameter types
2383 for (unsigned k = 0; k < FTy->getNumParams(); k++) {
2384 // Find SPIRV instruction for parameter type.
2385 auto ParamTy = FTy->getParamType(k);
2386 if (ParamTy->isPointerTy()) {
2387 auto PointeeTy = ParamTy->getPointerElementType();
2388 if (PointeeTy->isStructTy() &&
2389 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
2390 ParamTy = PointeeTy;
2391 }
2392 }
2393
David Netoc6f3ab22018-04-06 18:02:31 -04002394 Ops << MkId(lookupType(ParamTy));
David Neto22f144c2017-06-12 14:26:21 -04002395 }
2396
David Neto87846742018-04-11 17:36:22 -04002397 auto *Inst = new SPIRVInstruction(spv::OpTypeFunction, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002398 SPIRVInstList.push_back(Inst);
2399 break;
2400 }
2401 }
2402 }
2403
2404 // Generate OpTypeSampledImage.
alan-bakerabd82722019-12-03 17:14:51 -05002405 for (auto &ImgTy : getImageTypeList()) {
David Neto22f144c2017-06-12 14:26:21 -04002406 //
2407 // Generate OpTypeSampledImage.
2408 //
2409 // Ops[0] = Image Type ID
2410 //
2411 SPIRVOperandList Ops;
2412
David Netoc6f3ab22018-04-06 18:02:31 -04002413 Ops << MkId(TypeMap[ImgTy]);
David Neto22f144c2017-06-12 14:26:21 -04002414
alan-bakerabd82722019-12-03 17:14:51 -05002415 // Update the image type map.
2416 getImageTypeMap()[ImgTy] = nextID;
David Neto22f144c2017-06-12 14:26:21 -04002417
David Neto87846742018-04-11 17:36:22 -04002418 auto *Inst = new SPIRVInstruction(spv::OpTypeSampledImage, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002419 SPIRVInstList.push_back(Inst);
2420 }
David Netoc6f3ab22018-04-06 18:02:31 -04002421
2422 // Generate types for pointer-to-local arguments.
SJW77b87ad2020-04-21 14:37:52 -05002423 for (auto pair : clspv::GetSpecConstants(module)) {
alan-bakera1be3322020-04-20 12:48:18 -04002424 auto kind = pair.first;
2425 auto spec_id = pair.second;
2426
2427 if (kind != SpecConstant::kLocalMemorySize)
2428 continue;
2429
alan-bakerb6b09dc2018-11-08 16:59:28 -05002430 LocalArgInfo &arg_info = LocalSpecIdInfoMap[spec_id];
David Netoc6f3ab22018-04-06 18:02:31 -04002431
2432 // Generate the spec constant.
2433 SPIRVOperandList Ops;
2434 Ops << MkId(lookupType(Type::getInt32Ty(Context))) << MkNum(1);
David Neto87846742018-04-11 17:36:22 -04002435 SPIRVInstList.push_back(
2436 new SPIRVInstruction(spv::OpSpecConstant, arg_info.array_size_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002437
2438 // Generate the array type.
2439 Ops.clear();
2440 // The element type must have been created.
2441 uint32_t elem_ty_id = lookupType(arg_info.elem_type);
2442 assert(elem_ty_id);
2443 Ops << MkId(elem_ty_id) << MkId(arg_info.array_size_id);
2444
2445 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002446 new SPIRVInstruction(spv::OpTypeArray, arg_info.array_type_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002447
2448 Ops.clear();
2449 Ops << MkNum(spv::StorageClassWorkgroup) << MkId(arg_info.array_type_id);
David Neto87846742018-04-11 17:36:22 -04002450 SPIRVInstList.push_back(new SPIRVInstruction(
2451 spv::OpTypePointer, arg_info.ptr_array_type_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002452 }
David Neto22f144c2017-06-12 14:26:21 -04002453}
2454
2455void SPIRVProducerPass::GenerateSPIRVConstants() {
SJW69939d52020-04-16 07:29:07 -05002456 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList(kConstants);
David Neto22f144c2017-06-12 14:26:21 -04002457 ValueMapType &VMap = getValueMap();
2458 ValueMapType &AllocatedVMap = getAllocatedValueMap();
2459 ValueList &CstList = getConstantList();
David Neto482550a2018-03-24 05:21:07 -07002460 const bool hack_undef = clspv::Option::HackUndef();
David Neto22f144c2017-06-12 14:26:21 -04002461
2462 for (uint32_t i = 0; i < CstList.size(); i++) {
David Netofb9a7972017-08-25 17:08:24 -04002463 // UniqueVector ids are 1-based.
alan-bakerb6b09dc2018-11-08 16:59:28 -05002464 Constant *Cst = cast<Constant>(CstList[i + 1]);
David Neto22f144c2017-06-12 14:26:21 -04002465
2466 // OpTypeArray's constant was already generated.
David Netofb9a7972017-08-25 17:08:24 -04002467 if (AllocatedVMap.find_as(Cst) != AllocatedVMap.end()) {
David Neto22f144c2017-06-12 14:26:21 -04002468 continue;
2469 }
2470
David Netofb9a7972017-08-25 17:08:24 -04002471 // Set ValueMap with nextID for reference later.
David Neto22f144c2017-06-12 14:26:21 -04002472 VMap[Cst] = nextID;
2473
2474 //
2475 // Generate OpConstant.
2476 //
2477
2478 // Ops[0] = Result Type ID
2479 // Ops[1] .. Ops[n] = Values LiteralNumber
2480 SPIRVOperandList Ops;
2481
David Neto257c3892018-04-11 13:19:45 -04002482 Ops << MkId(lookupType(Cst->getType()));
David Neto22f144c2017-06-12 14:26:21 -04002483
2484 std::vector<uint32_t> LiteralNum;
David Neto22f144c2017-06-12 14:26:21 -04002485 spv::Op Opcode = spv::OpNop;
2486
2487 if (isa<UndefValue>(Cst)) {
2488 // Ops[0] = Result Type ID
David Netoc66b3352017-10-20 14:28:46 -04002489 Opcode = spv::OpUndef;
Alan Baker9bf93fb2018-08-28 16:59:26 -04002490 if (hack_undef && IsTypeNullable(Cst->getType())) {
2491 Opcode = spv::OpConstantNull;
David Netoc66b3352017-10-20 14:28:46 -04002492 }
David Neto22f144c2017-06-12 14:26:21 -04002493 } else if (const ConstantInt *CI = dyn_cast<ConstantInt>(Cst)) {
2494 unsigned BitWidth = CI->getBitWidth();
2495 if (BitWidth == 1) {
2496 // If the bitwidth of constant is 1, generate OpConstantTrue or
2497 // OpConstantFalse.
2498 if (CI->getZExtValue()) {
2499 // Ops[0] = Result Type ID
2500 Opcode = spv::OpConstantTrue;
2501 } else {
2502 // Ops[0] = Result Type ID
2503 Opcode = spv::OpConstantFalse;
2504 }
David Neto22f144c2017-06-12 14:26:21 -04002505 } else {
2506 auto V = CI->getZExtValue();
2507 LiteralNum.push_back(V & 0xFFFFFFFF);
2508
2509 if (BitWidth > 32) {
2510 LiteralNum.push_back(V >> 32);
2511 }
2512
2513 Opcode = spv::OpConstant;
David Neto22f144c2017-06-12 14:26:21 -04002514
David Neto257c3892018-04-11 13:19:45 -04002515 Ops << MkInteger(LiteralNum);
David Neto22f144c2017-06-12 14:26:21 -04002516 }
2517 } else if (const ConstantFP *CFP = dyn_cast<ConstantFP>(Cst)) {
2518 uint64_t FPVal = CFP->getValueAPF().bitcastToAPInt().getZExtValue();
2519 Type *CFPTy = CFP->getType();
2520 if (CFPTy->isFloatTy()) {
2521 LiteralNum.push_back(FPVal & 0xFFFFFFFF);
Kévin Petit02ee34e2019-04-04 19:03:22 +01002522 } else if (CFPTy->isDoubleTy()) {
2523 LiteralNum.push_back(FPVal & 0xFFFFFFFF);
2524 LiteralNum.push_back(FPVal >> 32);
alan-baker089bf932020-01-07 16:35:45 -05002525 } else if (CFPTy->isHalfTy()) {
2526 LiteralNum.push_back(FPVal & 0xFFFF);
David Neto22f144c2017-06-12 14:26:21 -04002527 } else {
2528 CFPTy->print(errs());
2529 llvm_unreachable("Implement this ConstantFP Type");
2530 }
2531
2532 Opcode = spv::OpConstant;
David Neto22f144c2017-06-12 14:26:21 -04002533
David Neto257c3892018-04-11 13:19:45 -04002534 Ops << MkFloat(LiteralNum);
David Neto22f144c2017-06-12 14:26:21 -04002535 } else if (isa<ConstantDataSequential>(Cst) &&
2536 cast<ConstantDataSequential>(Cst)->isString()) {
2537 Cst->print(errs());
2538 llvm_unreachable("Implement this Constant");
2539
2540 } else if (const ConstantDataSequential *CDS =
2541 dyn_cast<ConstantDataSequential>(Cst)) {
David Neto49351ac2017-08-26 17:32:20 -04002542 // Let's convert <4 x i8> constant to int constant specially.
2543 // This case occurs when all the values are specified as constant
2544 // ints.
2545 Type *CstTy = Cst->getType();
2546 if (is4xi8vec(CstTy)) {
2547 LLVMContext &Context = CstTy->getContext();
2548
2549 //
2550 // Generate OpConstant with OpTypeInt 32 0.
2551 //
Neil Henning39672102017-09-29 14:33:13 +01002552 uint32_t IntValue = 0;
2553 for (unsigned k = 0; k < 4; k++) {
2554 const uint64_t Val = CDS->getElementAsInteger(k);
David Neto49351ac2017-08-26 17:32:20 -04002555 IntValue = (IntValue << 8) | (Val & 0xffu);
2556 }
2557
2558 Type *i32 = Type::getInt32Ty(Context);
2559 Constant *CstInt = ConstantInt::get(i32, IntValue);
2560 // If this constant is already registered on VMap, use it.
2561 if (VMap.count(CstInt)) {
2562 uint32_t CstID = VMap[CstInt];
2563 VMap[Cst] = CstID;
2564 continue;
2565 }
2566
David Neto257c3892018-04-11 13:19:45 -04002567 Ops << MkNum(IntValue);
David Neto49351ac2017-08-26 17:32:20 -04002568
David Neto87846742018-04-11 17:36:22 -04002569 auto *CstInst = new SPIRVInstruction(spv::OpConstant, nextID++, Ops);
David Neto49351ac2017-08-26 17:32:20 -04002570 SPIRVInstList.push_back(CstInst);
2571
2572 continue;
2573 }
2574
2575 // A normal constant-data-sequential case.
David Neto22f144c2017-06-12 14:26:21 -04002576 for (unsigned k = 0; k < CDS->getNumElements(); k++) {
2577 Constant *EleCst = CDS->getElementAsConstant(k);
2578 uint32_t EleCstID = VMap[EleCst];
David Neto257c3892018-04-11 13:19:45 -04002579 Ops << MkId(EleCstID);
David Neto22f144c2017-06-12 14:26:21 -04002580 }
2581
2582 Opcode = spv::OpConstantComposite;
David Neto22f144c2017-06-12 14:26:21 -04002583 } else if (const ConstantAggregate *CA = dyn_cast<ConstantAggregate>(Cst)) {
2584 // Let's convert <4 x i8> constant to int constant specially.
David Neto49351ac2017-08-26 17:32:20 -04002585 // This case occurs when at least one of the values is an undef.
David Neto22f144c2017-06-12 14:26:21 -04002586 Type *CstTy = Cst->getType();
2587 if (is4xi8vec(CstTy)) {
2588 LLVMContext &Context = CstTy->getContext();
2589
2590 //
2591 // Generate OpConstant with OpTypeInt 32 0.
2592 //
Neil Henning39672102017-09-29 14:33:13 +01002593 uint32_t IntValue = 0;
David Neto22f144c2017-06-12 14:26:21 -04002594 for (User::const_op_iterator I = Cst->op_begin(), E = Cst->op_end();
2595 I != E; ++I) {
2596 uint64_t Val = 0;
alan-bakerb6b09dc2018-11-08 16:59:28 -05002597 const Value *CV = *I;
Neil Henning39672102017-09-29 14:33:13 +01002598 if (auto *CI2 = dyn_cast<ConstantInt>(CV)) {
2599 Val = CI2->getZExtValue();
David Neto22f144c2017-06-12 14:26:21 -04002600 }
David Neto49351ac2017-08-26 17:32:20 -04002601 IntValue = (IntValue << 8) | (Val & 0xffu);
David Neto22f144c2017-06-12 14:26:21 -04002602 }
2603
David Neto49351ac2017-08-26 17:32:20 -04002604 Type *i32 = Type::getInt32Ty(Context);
2605 Constant *CstInt = ConstantInt::get(i32, IntValue);
David Neto22f144c2017-06-12 14:26:21 -04002606 // If this constant is already registered on VMap, use it.
2607 if (VMap.count(CstInt)) {
2608 uint32_t CstID = VMap[CstInt];
2609 VMap[Cst] = CstID;
David Neto19a1bad2017-08-25 15:01:41 -04002610 continue;
David Neto22f144c2017-06-12 14:26:21 -04002611 }
2612
David Neto257c3892018-04-11 13:19:45 -04002613 Ops << MkNum(IntValue);
David Neto22f144c2017-06-12 14:26:21 -04002614
David Neto87846742018-04-11 17:36:22 -04002615 auto *CstInst = new SPIRVInstruction(spv::OpConstant, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002616 SPIRVInstList.push_back(CstInst);
2617
David Neto19a1bad2017-08-25 15:01:41 -04002618 continue;
David Neto22f144c2017-06-12 14:26:21 -04002619 }
2620
2621 // We use a constant composite in SPIR-V for our constant aggregate in
2622 // LLVM.
2623 Opcode = spv::OpConstantComposite;
David Neto22f144c2017-06-12 14:26:21 -04002624
2625 for (unsigned k = 0; k < CA->getNumOperands(); k++) {
2626 // Look up the ID of the element of this aggregate (which we will
2627 // previously have created a constant for).
2628 uint32_t ElementConstantID = VMap[CA->getAggregateElement(k)];
2629
2630 // And add an operand to the composite we are constructing
David Neto257c3892018-04-11 13:19:45 -04002631 Ops << MkId(ElementConstantID);
David Neto22f144c2017-06-12 14:26:21 -04002632 }
2633 } else if (Cst->isNullValue()) {
2634 Opcode = spv::OpConstantNull;
David Neto22f144c2017-06-12 14:26:21 -04002635 } else {
2636 Cst->print(errs());
2637 llvm_unreachable("Unsupported Constant???");
2638 }
2639
alan-baker5b86ed72019-02-15 08:26:50 -05002640 if (Opcode == spv::OpConstantNull && Cst->getType()->isPointerTy()) {
2641 // Null pointer requires variable pointers.
2642 setVariablePointersCapabilities(Cst->getType()->getPointerAddressSpace());
2643 }
2644
David Neto87846742018-04-11 17:36:22 -04002645 auto *CstInst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002646 SPIRVInstList.push_back(CstInst);
2647 }
2648}
2649
SJW77b87ad2020-04-21 14:37:52 -05002650void SPIRVProducerPass::GenerateSamplers() {
SJW69939d52020-04-16 07:29:07 -05002651 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList(kGlobalVariables);
David Neto22f144c2017-06-12 14:26:21 -04002652
alan-bakerb6b09dc2018-11-08 16:59:28 -05002653 auto &sampler_map = getSamplerMap();
alan-baker09cb9802019-12-10 13:16:27 -05002654 SamplerLiteralToIDMap.clear();
David Neto862b7d82018-06-14 18:48:37 -04002655 DenseMap<unsigned, unsigned> SamplerLiteralToDescriptorSetMap;
2656 DenseMap<unsigned, unsigned> SamplerLiteralToBindingMap;
David Neto22f144c2017-06-12 14:26:21 -04002657
David Neto862b7d82018-06-14 18:48:37 -04002658 // We might have samplers in the sampler map that are not used
2659 // in the translation unit. We need to allocate variables
2660 // for them and bindings too.
2661 DenseSet<unsigned> used_bindings;
David Neto22f144c2017-06-12 14:26:21 -04002662
SJW77b87ad2020-04-21 14:37:52 -05002663 auto *var_fn = module->getFunction(clspv::LiteralSamplerFunction());
alan-baker09cb9802019-12-10 13:16:27 -05002664 // Return if there are no literal samplers.
alan-bakerb6b09dc2018-11-08 16:59:28 -05002665 if (!var_fn)
2666 return;
alan-baker09cb9802019-12-10 13:16:27 -05002667
David Neto862b7d82018-06-14 18:48:37 -04002668 for (auto user : var_fn->users()) {
2669 // Populate SamplerLiteralToDescriptorSetMap and
2670 // SamplerLiteralToBindingMap.
2671 //
2672 // Look for calls like
2673 // call %opencl.sampler_t addrspace(2)*
2674 // @clspv.sampler.var.literal(
2675 // i32 descriptor,
2676 // i32 binding,
alan-baker09cb9802019-12-10 13:16:27 -05002677 // i32 (index-into-sampler-map|sampler_mask))
alan-bakerb6b09dc2018-11-08 16:59:28 -05002678 if (auto *call = dyn_cast<CallInst>(user)) {
alan-baker09cb9802019-12-10 13:16:27 -05002679 const auto third_param = static_cast<unsigned>(
alan-bakerb6b09dc2018-11-08 16:59:28 -05002680 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
alan-baker09cb9802019-12-10 13:16:27 -05002681 auto sampler_value = third_param;
2682 if (clspv::Option::UseSamplerMap()) {
2683 if (third_param >= sampler_map.size()) {
2684 errs() << "Out of bounds index to sampler map: " << third_param;
2685 llvm_unreachable("bad sampler init: out of bounds");
2686 }
2687 sampler_value = sampler_map[third_param].first;
David Neto862b7d82018-06-14 18:48:37 -04002688 }
2689
David Neto862b7d82018-06-14 18:48:37 -04002690 const auto descriptor_set = static_cast<unsigned>(
2691 dyn_cast<ConstantInt>(call->getArgOperand(0))->getZExtValue());
2692 const auto binding = static_cast<unsigned>(
2693 dyn_cast<ConstantInt>(call->getArgOperand(1))->getZExtValue());
2694
2695 SamplerLiteralToDescriptorSetMap[sampler_value] = descriptor_set;
2696 SamplerLiteralToBindingMap[sampler_value] = binding;
2697 used_bindings.insert(binding);
2698 }
2699 }
2700
alan-baker09cb9802019-12-10 13:16:27 -05002701 DenseSet<size_t> seen;
2702 for (auto user : var_fn->users()) {
2703 if (!isa<CallInst>(user))
2704 continue;
2705
2706 auto call = cast<CallInst>(user);
2707 const unsigned third_param = static_cast<unsigned>(
2708 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
2709
2710 // Already allocated a variable for this value.
2711 if (!seen.insert(third_param).second)
2712 continue;
2713
2714 auto sampler_value = third_param;
2715 if (clspv::Option::UseSamplerMap()) {
2716 sampler_value = sampler_map[third_param].first;
2717 }
2718
David Neto22f144c2017-06-12 14:26:21 -04002719 // Generate OpVariable.
2720 //
2721 // GIDOps[0] : Result Type ID
2722 // GIDOps[1] : Storage Class
2723 SPIRVOperandList Ops;
2724
David Neto257c3892018-04-11 13:19:45 -04002725 Ops << MkId(lookupType(SamplerTy))
2726 << MkNum(spv::StorageClassUniformConstant);
David Neto22f144c2017-06-12 14:26:21 -04002727
David Neto862b7d82018-06-14 18:48:37 -04002728 auto sampler_var_id = nextID++;
2729 auto *Inst = new SPIRVInstruction(spv::OpVariable, sampler_var_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002730 SPIRVInstList.push_back(Inst);
2731
alan-baker09cb9802019-12-10 13:16:27 -05002732 SamplerLiteralToIDMap[sampler_value] = sampler_var_id;
David Neto22f144c2017-06-12 14:26:21 -04002733
David Neto862b7d82018-06-14 18:48:37 -04002734 unsigned descriptor_set;
2735 unsigned binding;
alan-baker09cb9802019-12-10 13:16:27 -05002736 if (SamplerLiteralToBindingMap.find(sampler_value) ==
alan-bakerb6b09dc2018-11-08 16:59:28 -05002737 SamplerLiteralToBindingMap.end()) {
David Neto862b7d82018-06-14 18:48:37 -04002738 // This sampler is not actually used. Find the next one.
2739 for (binding = 0; used_bindings.count(binding); binding++)
2740 ;
2741 descriptor_set = 0; // Literal samplers always use descriptor set 0.
2742 used_bindings.insert(binding);
2743 } else {
alan-baker09cb9802019-12-10 13:16:27 -05002744 descriptor_set = SamplerLiteralToDescriptorSetMap[sampler_value];
2745 binding = SamplerLiteralToBindingMap[sampler_value];
alan-bakercff80152019-06-15 00:38:00 -04002746
alan-baker09cb9802019-12-10 13:16:27 -05002747 version0::DescriptorMapEntry::SamplerData sampler_data = {sampler_value};
alan-bakercff80152019-06-15 00:38:00 -04002748 descriptorMapEntries->emplace_back(std::move(sampler_data),
2749 descriptor_set, binding);
David Neto862b7d82018-06-14 18:48:37 -04002750 }
2751
SJW69939d52020-04-16 07:29:07 -05002752 // Ops[0] = Target ID
2753 // Ops[1] = Decoration (DescriptorSet)
2754 // Ops[2] = LiteralNumber according to Decoration
2755 Ops.clear();
2756
David Neto862b7d82018-06-14 18:48:37 -04002757 Ops << MkId(sampler_var_id) << MkNum(spv::DecorationDescriptorSet)
2758 << MkNum(descriptor_set);
David Neto22f144c2017-06-12 14:26:21 -04002759
David Neto87846742018-04-11 17:36:22 -04002760 auto *DescDecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
SJW69939d52020-04-16 07:29:07 -05002761 getSPIRVInstList(kAnnotations).push_back(DescDecoInst);
David Neto22f144c2017-06-12 14:26:21 -04002762
2763 // Ops[0] = Target ID
2764 // Ops[1] = Decoration (Binding)
2765 // Ops[2] = LiteralNumber according to Decoration
2766 Ops.clear();
David Neto862b7d82018-06-14 18:48:37 -04002767 Ops << MkId(sampler_var_id) << MkNum(spv::DecorationBinding)
2768 << MkNum(binding);
David Neto22f144c2017-06-12 14:26:21 -04002769
David Neto87846742018-04-11 17:36:22 -04002770 auto *BindDecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
SJW69939d52020-04-16 07:29:07 -05002771 getSPIRVInstList(kAnnotations).push_back(BindDecoInst);
David Neto22f144c2017-06-12 14:26:21 -04002772 }
David Neto862b7d82018-06-14 18:48:37 -04002773}
David Neto22f144c2017-06-12 14:26:21 -04002774
SJW77b87ad2020-04-21 14:37:52 -05002775void SPIRVProducerPass::GenerateResourceVars() {
SJW69939d52020-04-16 07:29:07 -05002776 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList(kGlobalVariables);
David Neto862b7d82018-06-14 18:48:37 -04002777 ValueMapType &VMap = getValueMap();
David Neto22f144c2017-06-12 14:26:21 -04002778
David Neto862b7d82018-06-14 18:48:37 -04002779 // Generate variables. Make one for each of resource var info object.
2780 for (auto *info : ModuleOrderedResourceVars) {
2781 Type *type = info->var_fn->getReturnType();
2782 // Remap the address space for opaque types.
2783 switch (info->arg_kind) {
2784 case clspv::ArgKind::Sampler:
2785 case clspv::ArgKind::ReadOnlyImage:
2786 case clspv::ArgKind::WriteOnlyImage:
2787 type = PointerType::get(type->getPointerElementType(),
2788 clspv::AddressSpace::UniformConstant);
2789 break;
2790 default:
2791 break;
2792 }
David Neto22f144c2017-06-12 14:26:21 -04002793
David Neto862b7d82018-06-14 18:48:37 -04002794 info->var_id = nextID++;
David Neto22f144c2017-06-12 14:26:21 -04002795
David Neto862b7d82018-06-14 18:48:37 -04002796 const auto type_id = lookupType(type);
2797 const auto sc = GetStorageClassForArgKind(info->arg_kind);
2798 SPIRVOperandList Ops;
2799 Ops << MkId(type_id) << MkNum(sc);
David Neto22f144c2017-06-12 14:26:21 -04002800
David Neto862b7d82018-06-14 18:48:37 -04002801 auto *Inst = new SPIRVInstruction(spv::OpVariable, info->var_id, Ops);
2802 SPIRVInstList.push_back(Inst);
2803
2804 // Map calls to the variable-builtin-function.
2805 for (auto &U : info->var_fn->uses()) {
2806 if (auto *call = dyn_cast<CallInst>(U.getUser())) {
2807 const auto set = unsigned(
2808 dyn_cast<ConstantInt>(call->getOperand(0))->getZExtValue());
2809 const auto binding = unsigned(
2810 dyn_cast<ConstantInt>(call->getOperand(1))->getZExtValue());
2811 if (set == info->descriptor_set && binding == info->binding) {
2812 switch (info->arg_kind) {
2813 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04002814 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04002815 case clspv::ArgKind::Pod:
alan-baker9b0ec3c2020-04-06 14:45:34 -04002816 case clspv::ArgKind::PodUBO:
2817 case clspv::ArgKind::PodPushConstant:
David Neto862b7d82018-06-14 18:48:37 -04002818 // The call maps to the variable directly.
2819 VMap[call] = info->var_id;
2820 break;
2821 case clspv::ArgKind::Sampler:
2822 case clspv::ArgKind::ReadOnlyImage:
2823 case clspv::ArgKind::WriteOnlyImage:
2824 // The call maps to a load we generate later.
2825 ResourceVarDeferredLoadCalls[call] = info->var_id;
2826 break;
2827 default:
2828 llvm_unreachable("Unhandled arg kind");
2829 }
2830 }
David Neto22f144c2017-06-12 14:26:21 -04002831 }
David Neto862b7d82018-06-14 18:48:37 -04002832 }
2833 }
David Neto22f144c2017-06-12 14:26:21 -04002834
David Neto862b7d82018-06-14 18:48:37 -04002835 // Generate associated decorations.
SJW69939d52020-04-16 07:29:07 -05002836 SPIRVInstructionList &Annotations = getSPIRVInstList(kAnnotations);
David Neto862b7d82018-06-14 18:48:37 -04002837
2838 SPIRVOperandList Ops;
2839 for (auto *info : ModuleOrderedResourceVars) {
alan-baker9b0ec3c2020-04-06 14:45:34 -04002840 // Push constants don't need descriptor set or binding decorations.
2841 if (info->arg_kind == clspv::ArgKind::PodPushConstant)
2842 continue;
2843
David Neto862b7d82018-06-14 18:48:37 -04002844 // Decorate with DescriptorSet and Binding.
2845 Ops.clear();
2846 Ops << MkId(info->var_id) << MkNum(spv::DecorationDescriptorSet)
2847 << MkNum(info->descriptor_set);
SJW69939d52020-04-16 07:29:07 -05002848 Annotations.push_back(new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto862b7d82018-06-14 18:48:37 -04002849
2850 Ops.clear();
2851 Ops << MkId(info->var_id) << MkNum(spv::DecorationBinding)
2852 << MkNum(info->binding);
SJW69939d52020-04-16 07:29:07 -05002853 Annotations.push_back(new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto862b7d82018-06-14 18:48:37 -04002854
alan-bakere9308012019-03-15 10:25:13 -04002855 if (info->coherent) {
2856 // Decorate with Coherent if required for the variable.
2857 Ops.clear();
2858 Ops << MkId(info->var_id) << MkNum(spv::DecorationCoherent);
SJW69939d52020-04-16 07:29:07 -05002859 Annotations.push_back(new SPIRVInstruction(spv::OpDecorate, Ops));
alan-bakere9308012019-03-15 10:25:13 -04002860 }
2861
David Neto862b7d82018-06-14 18:48:37 -04002862 // Generate NonWritable and NonReadable
2863 switch (info->arg_kind) {
2864 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04002865 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04002866 if (info->var_fn->getReturnType()->getPointerAddressSpace() ==
2867 clspv::AddressSpace::Constant) {
2868 Ops.clear();
2869 Ops << MkId(info->var_id) << MkNum(spv::DecorationNonWritable);
SJW69939d52020-04-16 07:29:07 -05002870 Annotations.push_back(new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002871 }
David Neto862b7d82018-06-14 18:48:37 -04002872 break;
David Neto862b7d82018-06-14 18:48:37 -04002873 case clspv::ArgKind::WriteOnlyImage:
2874 Ops.clear();
2875 Ops << MkId(info->var_id) << MkNum(spv::DecorationNonReadable);
SJW69939d52020-04-16 07:29:07 -05002876 Annotations.push_back(new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto862b7d82018-06-14 18:48:37 -04002877 break;
2878 default:
2879 break;
David Neto22f144c2017-06-12 14:26:21 -04002880 }
2881 }
2882}
2883
Kévin Petitbbbda972020-03-03 19:16:31 +00002884namespace {
2885
2886bool isScalarType(Type *type) {
2887 return type->isIntegerTy() || type->isFloatTy();
2888}
2889
2890uint64_t structAlignment(StructType *type,
2891 std::function<uint64_t(Type *)> alignFn) {
2892 uint64_t maxAlign = 1;
2893 for (unsigned i = 0; i < type->getStructNumElements(); i++) {
2894 uint64_t align = alignFn(type->getStructElementType(i));
2895 maxAlign = std::max(align, maxAlign);
2896 }
2897 return maxAlign;
2898}
2899
2900uint64_t scalarAlignment(Type *type) {
2901 // A scalar of size N has a scalar alignment of N.
2902 if (isScalarType(type)) {
2903 return type->getScalarSizeInBits() / 8;
2904 }
2905
2906 // A vector or matrix type has a scalar alignment equal to that of its
2907 // component type.
James Pricecf53df42020-04-20 14:41:24 -04002908 if (auto vec_type = dyn_cast<VectorType>(type)) {
2909 return scalarAlignment(vec_type->getElementType());
Kévin Petitbbbda972020-03-03 19:16:31 +00002910 }
2911
2912 // An array type has a scalar alignment equal to that of its element type.
2913 if (type->isArrayTy()) {
2914 return scalarAlignment(type->getArrayElementType());
2915 }
2916
2917 // A structure has a scalar alignment equal to the largest scalar alignment of
2918 // any of its members.
2919 if (type->isStructTy()) {
2920 return structAlignment(cast<StructType>(type), scalarAlignment);
2921 }
2922
2923 llvm_unreachable("Unsupported type");
2924}
2925
2926uint64_t baseAlignment(Type *type) {
2927 // A scalar has a base alignment equal to its scalar alignment.
2928 if (isScalarType(type)) {
2929 return scalarAlignment(type);
2930 }
2931
James Pricecf53df42020-04-20 14:41:24 -04002932 if (auto vec_type = dyn_cast<VectorType>(type)) {
2933 unsigned numElems = vec_type->getNumElements();
Kévin Petitbbbda972020-03-03 19:16:31 +00002934
2935 // A two-component vector has a base alignment equal to twice its scalar
2936 // alignment.
2937 if (numElems == 2) {
2938 return 2 * scalarAlignment(type);
2939 }
2940 // A three- or four-component vector has a base alignment equal to four
2941 // times its scalar alignment.
2942 if ((numElems == 3) || (numElems == 4)) {
2943 return 4 * scalarAlignment(type);
2944 }
2945 }
2946
2947 // An array has a base alignment equal to the base alignment of its element
2948 // type.
2949 if (type->isArrayTy()) {
2950 return baseAlignment(type->getArrayElementType());
2951 }
2952
2953 // A structure has a base alignment equal to the largest base alignment of any
2954 // of its members.
2955 if (type->isStructTy()) {
2956 return structAlignment(cast<StructType>(type), baseAlignment);
2957 }
2958
2959 // TODO A row-major matrix of C columns has a base alignment equal to the base
2960 // alignment of a vector of C matrix components.
2961 // TODO A column-major matrix has a base alignment equal to the base alignment
2962 // of the matrix column type.
2963
2964 llvm_unreachable("Unsupported type");
2965}
2966
2967uint64_t extendedAlignment(Type *type) {
2968 // A scalar, vector or matrix type has an extended alignment equal to its base
2969 // alignment.
2970 // TODO matrix type
2971 if (isScalarType(type) || type->isVectorTy()) {
2972 return baseAlignment(type);
2973 }
2974
2975 // An array or structure type has an extended alignment equal to the largest
2976 // extended alignment of any of its members, rounded up to a multiple of 16
2977 if (type->isStructTy()) {
2978 auto salign = structAlignment(cast<StructType>(type), extendedAlignment);
2979 return alignTo(salign, 16);
2980 }
2981
2982 if (type->isArrayTy()) {
2983 auto salign = extendedAlignment(type->getArrayElementType());
2984 return alignTo(salign, 16);
2985 }
2986
2987 llvm_unreachable("Unsupported type");
2988}
2989
2990uint64_t standardAlignment(Type *type, spv::StorageClass sclass) {
2991 // If the scalarBlockLayout feature is enabled on the device then every member
2992 // must be aligned according to its scalar alignment
2993 if (clspv::Option::ScalarBlockLayout()) {
2994 return scalarAlignment(type);
2995 }
2996
2997 // All vectors must be aligned according to their scalar alignment
2998 if (type->isVectorTy()) {
2999 return scalarAlignment(type);
3000 }
3001
3002 // If the uniformBufferStandardLayout feature is not enabled on the device,
3003 // then any member of an OpTypeStruct with a storage class of Uniform and a
3004 // decoration of Block must be aligned according to its extended alignment.
3005 if (!clspv::Option::Std430UniformBufferLayout() &&
3006 sclass == spv::StorageClassUniform) {
3007 return extendedAlignment(type);
3008 }
3009
3010 // Every other member must be aligned according to its base alignment
3011 return baseAlignment(type);
3012}
3013
3014bool improperlyStraddles(const DataLayout &DL, Type *type, unsigned offset) {
3015 assert(type->isVectorTy());
3016
3017 auto size = DL.getTypeStoreSize(type);
3018
3019 // It is a vector with total size less than or equal to 16 bytes, and has
3020 // Offset decorations placing its first byte at F and its last byte at L,
3021 // where floor(F / 16) != floor(L / 16).
3022 if ((size <= 16) && (offset % 16 + size > 16)) {
3023 return true;
3024 }
3025
3026 // It is a vector with total size greater than 16 bytes and has its Offset
3027 // decorations placing its first byte at a non-integer multiple of 16
3028 if ((size > 16) && (offset % 16 != 0)) {
3029 return true;
3030 }
3031
3032 return false;
3033}
3034
3035// See 14.5 Shader Resource Interface in Vulkan spec
3036bool isValidExplicitLayout(Module &M, StructType *STy, unsigned Member,
3037 spv::StorageClass SClass, unsigned Offset,
3038 unsigned PreviousMemberOffset) {
3039
3040 auto MemberType = STy->getElementType(Member);
3041 auto Align = standardAlignment(MemberType, SClass);
3042 auto &DL = M.getDataLayout();
3043
3044 // The Offset decoration of any member must be a multiple of its alignment
3045 if (Offset % Align != 0) {
3046 return false;
3047 }
3048
3049 // TODO Any ArrayStride or MatrixStride decoration must be a multiple of the
3050 // alignment of the array or matrix as defined above
3051
3052 if (!clspv::Option::ScalarBlockLayout()) {
3053 // Vectors must not improperly straddle, as defined above
3054 if (MemberType->isVectorTy() &&
3055 improperlyStraddles(DL, MemberType, Offset)) {
3056 return true;
3057 }
3058
3059 // The Offset decoration of a member must not place it between the end
3060 // of a structure or an array and the next multiple of the alignment of that
3061 // structure or array
3062 if (Member > 0) {
3063 auto PType = STy->getElementType(Member - 1);
3064 if (PType->isStructTy() || PType->isArrayTy()) {
3065 auto PAlign = standardAlignment(PType, SClass);
3066 if (Offset - PreviousMemberOffset < PAlign) {
3067 return false;
3068 }
3069 }
3070 }
3071 }
3072
3073 return true;
3074}
3075
3076} // namespace
3077
SJW77b87ad2020-04-21 14:37:52 -05003078void SPIRVProducerPass::GeneratePushConstantDescriptorMapEntries() {
Kévin Petitbbbda972020-03-03 19:16:31 +00003079
SJW77b87ad2020-04-21 14:37:52 -05003080 if (auto GV = module->getGlobalVariable(clspv::PushConstantsVariableName())) {
3081 auto const &DL = module->getDataLayout();
Kévin Petitbbbda972020-03-03 19:16:31 +00003082 auto MD = GV->getMetadata(clspv::PushConstantsMetadataName());
3083 auto STy = cast<StructType>(GV->getValueType());
3084
3085 for (unsigned i = 0; i < STy->getNumElements(); i++) {
3086 auto pc = static_cast<clspv::PushConstant>(
3087 mdconst::extract<ConstantInt>(MD->getOperand(i))->getZExtValue());
3088 auto memberType = STy->getElementType(i);
3089 auto offset = GetExplicitLayoutStructMemberOffset(STy, i, DL);
3090 unsigned previousOffset = 0;
3091 if (i > 0) {
3092 previousOffset = GetExplicitLayoutStructMemberOffset(STy, i - 1, DL);
3093 }
3094 auto size = static_cast<uint32_t>(GetTypeSizeInBits(memberType, DL)) / 8;
SJW77b87ad2020-04-21 14:37:52 -05003095 assert(isValidExplicitLayout(*module, STy, i,
3096 spv::StorageClassPushConstant, offset,
3097 previousOffset));
Kévin Petitbbbda972020-03-03 19:16:31 +00003098 version0::DescriptorMapEntry::PushConstantData data = {pc, offset, size};
3099 descriptorMapEntries->emplace_back(std::move(data));
3100 }
3101 }
3102}
3103
SJW77b87ad2020-04-21 14:37:52 -05003104void SPIRVProducerPass::GenerateSpecConstantDescriptorMapEntries() {
3105 for (auto pair : clspv::GetSpecConstants(module)) {
alan-bakera1be3322020-04-20 12:48:18 -04003106 auto kind = pair.first;
3107 auto id = pair.second;
3108
3109 // Local memory size is only used for kernel arguments.
3110 if (kind == SpecConstant::kLocalMemorySize)
3111 continue;
3112
3113 version0::DescriptorMapEntry::SpecConstantData data = {kind, id};
3114 descriptorMapEntries->emplace_back(std::move(data));
3115 }
3116}
3117
David Neto22f144c2017-06-12 14:26:21 -04003118void SPIRVProducerPass::GenerateGlobalVar(GlobalVariable &GV) {
SJW69939d52020-04-16 07:29:07 -05003119 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList(kGlobalVariables);
David Neto22f144c2017-06-12 14:26:21 -04003120 ValueMapType &VMap = getValueMap();
3121 std::vector<uint32_t> &BuiltinDimVec = getBuiltinDimVec();
David Neto85082642018-03-24 06:55:20 -07003122 const DataLayout &DL = GV.getParent()->getDataLayout();
David Neto22f144c2017-06-12 14:26:21 -04003123
3124 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
3125 Type *Ty = GV.getType();
3126 PointerType *PTy = cast<PointerType>(Ty);
3127
3128 uint32_t InitializerID = 0;
3129
3130 // Workgroup size is handled differently (it goes into a constant)
3131 if (spv::BuiltInWorkgroupSize == BuiltinType) {
3132 std::vector<bool> HasMDVec;
3133 uint32_t PrevXDimCst = 0xFFFFFFFF;
3134 uint32_t PrevYDimCst = 0xFFFFFFFF;
3135 uint32_t PrevZDimCst = 0xFFFFFFFF;
3136 for (Function &Func : *GV.getParent()) {
3137 if (Func.isDeclaration()) {
3138 continue;
3139 }
3140
3141 // We only need to check kernels.
3142 if (Func.getCallingConv() != CallingConv::SPIR_KERNEL) {
3143 continue;
3144 }
3145
3146 if (const MDNode *MD =
3147 dyn_cast<Function>(&Func)->getMetadata("reqd_work_group_size")) {
3148 uint32_t CurXDimCst = static_cast<uint32_t>(
3149 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
3150 uint32_t CurYDimCst = static_cast<uint32_t>(
3151 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
3152 uint32_t CurZDimCst = static_cast<uint32_t>(
3153 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
3154
3155 if (PrevXDimCst == 0xFFFFFFFF && PrevYDimCst == 0xFFFFFFFF &&
3156 PrevZDimCst == 0xFFFFFFFF) {
3157 PrevXDimCst = CurXDimCst;
3158 PrevYDimCst = CurYDimCst;
3159 PrevZDimCst = CurZDimCst;
3160 } else if (CurXDimCst != PrevXDimCst || CurYDimCst != PrevYDimCst ||
3161 CurZDimCst != PrevZDimCst) {
3162 llvm_unreachable(
3163 "reqd_work_group_size must be the same across all kernels");
3164 } else {
3165 continue;
3166 }
3167
3168 //
3169 // Generate OpConstantComposite.
3170 //
3171 // Ops[0] : Result Type ID
3172 // Ops[1] : Constant size for x dimension.
3173 // Ops[2] : Constant size for y dimension.
3174 // Ops[3] : Constant size for z dimension.
3175 SPIRVOperandList Ops;
3176
3177 uint32_t XDimCstID =
3178 VMap[mdconst::extract<ConstantInt>(MD->getOperand(0))];
3179 uint32_t YDimCstID =
3180 VMap[mdconst::extract<ConstantInt>(MD->getOperand(1))];
3181 uint32_t ZDimCstID =
3182 VMap[mdconst::extract<ConstantInt>(MD->getOperand(2))];
3183
3184 InitializerID = nextID;
3185
David Neto257c3892018-04-11 13:19:45 -04003186 Ops << MkId(lookupType(Ty->getPointerElementType())) << MkId(XDimCstID)
3187 << MkId(YDimCstID) << MkId(ZDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04003188
David Neto87846742018-04-11 17:36:22 -04003189 auto *Inst =
3190 new SPIRVInstruction(spv::OpConstantComposite, nextID++, Ops);
alan-bakera1be3322020-04-20 12:48:18 -04003191 getSPIRVInstList(kConstants).push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04003192
3193 HasMDVec.push_back(true);
3194 } else {
3195 HasMDVec.push_back(false);
3196 }
3197 }
3198
3199 // Check all kernels have same definitions for work_group_size.
3200 bool HasMD = false;
3201 if (!HasMDVec.empty()) {
3202 HasMD = HasMDVec[0];
3203 for (uint32_t i = 1; i < HasMDVec.size(); i++) {
3204 if (HasMD != HasMDVec[i]) {
3205 llvm_unreachable(
3206 "Kernels should have consistent work group size definition");
3207 }
3208 }
3209 }
3210
3211 // If all kernels do not have metadata for reqd_work_group_size, generate
3212 // OpSpecConstants for x/y/z dimension.
Kévin Petit21c23c62020-04-29 01:38:28 +01003213 if (!HasMD || clspv::Option::NonUniformNDRangeSupported()) {
David Neto22f144c2017-06-12 14:26:21 -04003214 //
3215 // Generate OpSpecConstants for x/y/z dimension.
3216 //
3217 // Ops[0] : Result Type ID
3218 // Ops[1] : Constant size for x/y/z dimension (Literal Number).
3219 uint32_t XDimCstID = 0;
3220 uint32_t YDimCstID = 0;
3221 uint32_t ZDimCstID = 0;
3222
alan-bakera1be3322020-04-20 12:48:18 -04003223 // Allocate spec constants for workgroup size.
SJW77b87ad2020-04-21 14:37:52 -05003224 clspv::AddWorkgroupSpecConstants(module);
alan-bakera1be3322020-04-20 12:48:18 -04003225
David Neto22f144c2017-06-12 14:26:21 -04003226 SPIRVOperandList Ops;
James Pricecf53df42020-04-20 14:41:24 -04003227 uint32_t result_type_id = lookupType(
3228 cast<VectorType>(Ty->getPointerElementType())->getElementType());
David Neto22f144c2017-06-12 14:26:21 -04003229
David Neto257c3892018-04-11 13:19:45 -04003230 // X Dimension
3231 Ops << MkId(result_type_id) << MkNum(1);
3232 XDimCstID = nextID++;
alan-bakera1be3322020-04-20 12:48:18 -04003233 getSPIRVInstList(kConstants)
3234 .push_back(new SPIRVInstruction(spv::OpSpecConstant, XDimCstID, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003235
3236 // Y Dimension
3237 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003238 Ops << MkId(result_type_id) << MkNum(1);
3239 YDimCstID = nextID++;
alan-bakera1be3322020-04-20 12:48:18 -04003240 getSPIRVInstList(kConstants)
3241 .push_back(new SPIRVInstruction(spv::OpSpecConstant, YDimCstID, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003242
3243 // Z Dimension
3244 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003245 Ops << MkId(result_type_id) << MkNum(1);
3246 ZDimCstID = nextID++;
alan-bakera1be3322020-04-20 12:48:18 -04003247 getSPIRVInstList(kConstants)
3248 .push_back(new SPIRVInstruction(spv::OpSpecConstant, ZDimCstID, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003249
David Neto257c3892018-04-11 13:19:45 -04003250 BuiltinDimVec.push_back(XDimCstID);
3251 BuiltinDimVec.push_back(YDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04003252 BuiltinDimVec.push_back(ZDimCstID);
3253
David Neto22f144c2017-06-12 14:26:21 -04003254 //
3255 // Generate OpSpecConstantComposite.
3256 //
3257 // Ops[0] : Result Type ID
3258 // Ops[1] : Constant size for x dimension.
3259 // Ops[2] : Constant size for y dimension.
3260 // Ops[3] : Constant size for z dimension.
3261 InitializerID = nextID;
3262
3263 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003264 Ops << MkId(lookupType(Ty->getPointerElementType())) << MkId(XDimCstID)
3265 << MkId(YDimCstID) << MkId(ZDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04003266
David Neto87846742018-04-11 17:36:22 -04003267 auto *Inst =
3268 new SPIRVInstruction(spv::OpSpecConstantComposite, nextID++, Ops);
alan-bakera1be3322020-04-20 12:48:18 -04003269 getSPIRVInstList(kConstants).push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04003270 }
alan-bakerbed3a882020-04-21 14:42:41 -04003271 } else if (BuiltinType == spv::BuiltInWorkDim) {
3272 // 1. Generate a specialization constant with a default of 3.
3273 // 2. Allocate and annotate a SpecId for the constant.
3274 // 3. Use the spec constant as the initializer for the variable.
3275 SPIRVOperandList Ops;
3276
3277 //
3278 // Generate OpSpecConstant.
3279 //
3280 // Ops[0] : Result Type ID
3281 // Ops[1] : Default literal value
3282 InitializerID = nextID++;
3283
3284 Ops << MkId(lookupType(IntegerType::get(GV.getContext(), 32))) << MkNum(3);
3285
3286 auto *Inst = new SPIRVInstruction(spv::OpSpecConstant, InitializerID, Ops);
3287 getSPIRVInstList(kConstants).push_back(Inst);
3288
3289 //
3290 // Generate SpecId decoration.
3291 //
3292 // Ops[0] : target
3293 // Ops[1] : decoration
3294 // Ops[2] : SpecId
Alan Baker75ccc252020-04-21 17:11:52 -04003295 auto spec_id = AllocateSpecConstant(module, SpecConstant::kWorkDim);
alan-bakerbed3a882020-04-21 14:42:41 -04003296 Ops.clear();
3297 Ops << MkId(InitializerID) << MkNum(spv::DecorationSpecId)
3298 << MkNum(spec_id);
3299
3300 Inst = new SPIRVInstruction(spv::OpDecorate, Ops);
3301 getSPIRVInstList(kAnnotations).push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04003302 }
3303
David Neto22f144c2017-06-12 14:26:21 -04003304 VMap[&GV] = nextID;
3305
3306 //
3307 // Generate OpVariable.
3308 //
3309 // GIDOps[0] : Result Type ID
3310 // GIDOps[1] : Storage Class
3311 SPIRVOperandList Ops;
3312
David Neto85082642018-03-24 06:55:20 -07003313 const auto AS = PTy->getAddressSpace();
David Netoc6f3ab22018-04-06 18:02:31 -04003314 Ops << MkId(lookupType(Ty)) << MkNum(GetStorageClass(AS));
David Neto22f144c2017-06-12 14:26:21 -04003315
David Neto85082642018-03-24 06:55:20 -07003316 const bool module_scope_constant_external_init =
David Neto862b7d82018-06-14 18:48:37 -04003317 (AS == AddressSpace::Constant) && GV.hasInitializer() &&
David Neto85082642018-03-24 06:55:20 -07003318 clspv::Option::ModuleConstantsInStorageBuffer();
3319
Kévin Petit23d5f182019-08-13 16:21:29 +01003320 if (GV.hasInitializer()) {
3321 auto GVInit = GV.getInitializer();
3322 if (!isa<UndefValue>(GVInit) && !module_scope_constant_external_init) {
3323 assert(VMap.count(GVInit) == 1);
3324 InitializerID = VMap[GVInit];
David Neto85082642018-03-24 06:55:20 -07003325 }
3326 }
Kévin Petit23d5f182019-08-13 16:21:29 +01003327
3328 if (0 != InitializerID) {
Kévin Petitbbbda972020-03-03 19:16:31 +00003329 // Emit the ID of the initializer as part of the variable definition.
Kévin Petit23d5f182019-08-13 16:21:29 +01003330 Ops << MkId(InitializerID);
3331 }
David Neto85082642018-03-24 06:55:20 -07003332 const uint32_t var_id = nextID++;
3333
David Neto87846742018-04-11 17:36:22 -04003334 auto *Inst = new SPIRVInstruction(spv::OpVariable, var_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003335 SPIRVInstList.push_back(Inst);
3336
SJW69939d52020-04-16 07:29:07 -05003337 SPIRVInstructionList &Annotations = getSPIRVInstList(kAnnotations);
alan-bakerbed3a882020-04-21 14:42:41 -04003338 // If we have a builtin (not WorkDim).
3339 if (spv::BuiltInMax != BuiltinType && BuiltinType != spv::BuiltInWorkDim) {
David Neto22f144c2017-06-12 14:26:21 -04003340 //
3341 // Generate OpDecorate.
3342 //
3343 // DOps[0] = Target ID
3344 // DOps[1] = Decoration (Builtin)
3345 // DOps[2] = BuiltIn ID
3346 uint32_t ResultID;
3347
3348 // WorkgroupSize is different, we decorate the constant composite that has
3349 // its value, rather than the variable that we use to access the value.
3350 if (spv::BuiltInWorkgroupSize == BuiltinType) {
3351 ResultID = InitializerID;
David Netoa60b00b2017-09-15 16:34:09 -04003352 // Save both the value and variable IDs for later.
3353 WorkgroupSizeValueID = InitializerID;
3354 WorkgroupSizeVarID = VMap[&GV];
David Neto22f144c2017-06-12 14:26:21 -04003355 } else {
3356 ResultID = VMap[&GV];
3357 }
3358
3359 SPIRVOperandList DOps;
David Neto257c3892018-04-11 13:19:45 -04003360 DOps << MkId(ResultID) << MkNum(spv::DecorationBuiltIn)
3361 << MkNum(BuiltinType);
David Neto22f144c2017-06-12 14:26:21 -04003362
David Neto87846742018-04-11 17:36:22 -04003363 auto *DescDecoInst = new SPIRVInstruction(spv::OpDecorate, DOps);
SJW69939d52020-04-16 07:29:07 -05003364 Annotations.push_back(DescDecoInst);
David Neto85082642018-03-24 06:55:20 -07003365 } else if (module_scope_constant_external_init) {
3366 // This module scope constant is initialized from a storage buffer with data
3367 // provided by the host at binding 0 of the next descriptor set.
SJW77b87ad2020-04-21 14:37:52 -05003368 const uint32_t descriptor_set = TakeDescriptorIndex(module);
David Neto85082642018-03-24 06:55:20 -07003369
David Neto862b7d82018-06-14 18:48:37 -04003370 // Emit the intializer to the descriptor map file.
David Neto85082642018-03-24 06:55:20 -07003371 // Use "kind,buffer" to indicate storage buffer. We might want to expand
3372 // that later to other types, like uniform buffer.
alan-bakerf5e5f692018-11-27 08:33:24 -05003373 std::string hexbytes;
3374 llvm::raw_string_ostream str(hexbytes);
3375 clspv::ConstantEmitter(DL, str).Emit(GV.getInitializer());
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003376 version0::DescriptorMapEntry::ConstantData constant_data = {ArgKind::Buffer,
3377 str.str()};
3378 descriptorMapEntries->emplace_back(std::move(constant_data), descriptor_set,
3379 0);
David Neto85082642018-03-24 06:55:20 -07003380
David Neto85082642018-03-24 06:55:20 -07003381 SPIRVOperandList DOps;
David Neto85082642018-03-24 06:55:20 -07003382
3383 // OpDecorate %var DescriptorSet <descriptor_set>
David Neto257c3892018-04-11 13:19:45 -04003384 DOps << MkId(var_id) << MkNum(spv::DecorationDescriptorSet)
3385 << MkNum(descriptor_set);
SJW69939d52020-04-16 07:29:07 -05003386 Annotations.push_back(new SPIRVInstruction(spv::OpDecorate, DOps));
3387
3388 // OpDecorate %var Binding <binding>
3389 DOps.clear();
3390 DOps << MkId(var_id) << MkNum(spv::DecorationBinding) << MkNum(0);
3391 Annotations.push_back(new SPIRVInstruction(spv::OpDecorate, DOps));
David Neto22f144c2017-06-12 14:26:21 -04003392 }
3393}
3394
SJW77b87ad2020-04-21 14:37:52 -05003395void SPIRVProducerPass::GenerateWorkgroupVars() {
SJW69939d52020-04-16 07:29:07 -05003396 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList(kGlobalVariables);
SJW77b87ad2020-04-21 14:37:52 -05003397 auto spec_constant_md =
3398 module->getNamedMetadata(clspv::SpecConstantMetadataName());
alan-bakera1be3322020-04-20 12:48:18 -04003399 if (!spec_constant_md)
3400 return;
3401
SJW77b87ad2020-04-21 14:37:52 -05003402 for (auto pair : clspv::GetSpecConstants(module)) {
alan-bakera1be3322020-04-20 12:48:18 -04003403 auto kind = pair.first;
3404 auto spec_id = pair.second;
3405
3406 if (kind != SpecConstant::kLocalMemorySize)
3407 continue;
3408
alan-bakerb6b09dc2018-11-08 16:59:28 -05003409 LocalArgInfo &info = LocalSpecIdInfoMap[spec_id];
David Netoc6f3ab22018-04-06 18:02:31 -04003410
3411 // Generate OpVariable.
3412 //
3413 // GIDOps[0] : Result Type ID
3414 // GIDOps[1] : Storage Class
3415 SPIRVOperandList Ops;
3416 Ops << MkId(info.ptr_array_type_id) << MkNum(spv::StorageClassWorkgroup);
3417
3418 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04003419 new SPIRVInstruction(spv::OpVariable, info.variable_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04003420 }
3421}
3422
SJW77b87ad2020-04-21 14:37:52 -05003423void SPIRVProducerPass::GenerateDescriptorMapInfo(Function &F) {
3424 const auto &DL = module->getDataLayout();
David Netoc5fb5242018-07-30 13:28:31 -04003425 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
3426 return;
3427 }
Kévin Petit717f8572020-04-06 17:31:53 +01003428 // Add entries for each kernel
3429 version0::DescriptorMapEntry::KernelDeclData kernel_decl_data = {
3430 F.getName().str()};
3431 descriptorMapEntries->emplace_back(std::move(kernel_decl_data));
3432
David Neto862b7d82018-06-14 18:48:37 -04003433 // Gather the list of resources that are used by this function's arguments.
3434 auto &resource_var_at_index = FunctionToResourceVarsMap[&F];
3435
alan-bakerf5e5f692018-11-27 08:33:24 -05003436 // TODO(alan-baker): This should become unnecessary by fixing the rest of the
3437 // flow to generate pod_ubo arguments earlier.
David Neto862b7d82018-06-14 18:48:37 -04003438 auto remap_arg_kind = [](StringRef argKind) {
alan-bakerf5e5f692018-11-27 08:33:24 -05003439 std::string kind =
3440 clspv::Option::PodArgsInUniformBuffer() && argKind.equals("pod")
3441 ? "pod_ubo"
alan-baker21574d32020-01-29 16:00:31 -05003442 : argKind.str();
alan-bakerf5e5f692018-11-27 08:33:24 -05003443 return GetArgKindFromName(kind);
David Neto862b7d82018-06-14 18:48:37 -04003444 };
3445
3446 auto *fty = F.getType()->getPointerElementType();
3447 auto *func_ty = dyn_cast<FunctionType>(fty);
3448
alan-baker038e9242019-04-19 22:14:41 -04003449 // If we've clustered POD arguments, then argument details are in metadata.
David Neto862b7d82018-06-14 18:48:37 -04003450 // If an argument maps to a resource variable, then get descriptor set and
3451 // binding from the resoure variable. Other info comes from the metadata.
3452 const auto *arg_map = F.getMetadata("kernel_arg_map");
3453 if (arg_map) {
3454 for (const auto &arg : arg_map->operands()) {
3455 const MDNode *arg_node = dyn_cast<MDNode>(arg.get());
Kévin PETITa353c832018-03-20 23:21:21 +00003456 assert(arg_node->getNumOperands() == 7);
David Neto862b7d82018-06-14 18:48:37 -04003457 const auto name =
3458 dyn_cast<MDString>(arg_node->getOperand(0))->getString();
3459 const auto old_index =
3460 dyn_extract<ConstantInt>(arg_node->getOperand(1))->getZExtValue();
3461 // Remapped argument index
alan-bakerb6b09dc2018-11-08 16:59:28 -05003462 const size_t new_index = static_cast<size_t>(
3463 dyn_extract<ConstantInt>(arg_node->getOperand(2))->getZExtValue());
David Neto862b7d82018-06-14 18:48:37 -04003464 const auto offset =
3465 dyn_extract<ConstantInt>(arg_node->getOperand(3))->getZExtValue();
Kévin PETITa353c832018-03-20 23:21:21 +00003466 const auto arg_size =
3467 dyn_extract<ConstantInt>(arg_node->getOperand(4))->getZExtValue();
David Neto862b7d82018-06-14 18:48:37 -04003468 const auto argKind = remap_arg_kind(
Kévin PETITa353c832018-03-20 23:21:21 +00003469 dyn_cast<MDString>(arg_node->getOperand(5))->getString());
David Neto862b7d82018-06-14 18:48:37 -04003470 const auto spec_id =
Kévin PETITa353c832018-03-20 23:21:21 +00003471 dyn_extract<ConstantInt>(arg_node->getOperand(6))->getSExtValue();
alan-bakerf5e5f692018-11-27 08:33:24 -05003472
3473 uint32_t descriptor_set = 0;
3474 uint32_t binding = 0;
3475 version0::DescriptorMapEntry::KernelArgData kernel_data = {
alan-baker21574d32020-01-29 16:00:31 -05003476 F.getName().str(), name.str(), static_cast<uint32_t>(old_index),
3477 argKind, static_cast<uint32_t>(spec_id),
alan-bakerf5e5f692018-11-27 08:33:24 -05003478 // This will be set below for pointer-to-local args.
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003479 0, static_cast<uint32_t>(offset), static_cast<uint32_t>(arg_size)};
David Neto862b7d82018-06-14 18:48:37 -04003480 if (spec_id > 0) {
alan-bakerf5e5f692018-11-27 08:33:24 -05003481 kernel_data.local_element_size = static_cast<uint32_t>(GetTypeAllocSize(
3482 func_ty->getParamType(unsigned(new_index))->getPointerElementType(),
3483 DL));
David Neto862b7d82018-06-14 18:48:37 -04003484 } else {
3485 auto *info = resource_var_at_index[new_index];
3486 assert(info);
alan-bakerf5e5f692018-11-27 08:33:24 -05003487 descriptor_set = info->descriptor_set;
3488 binding = info->binding;
David Neto862b7d82018-06-14 18:48:37 -04003489 }
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003490 descriptorMapEntries->emplace_back(std::move(kernel_data), descriptor_set,
3491 binding);
David Neto862b7d82018-06-14 18:48:37 -04003492 }
3493 } else {
3494 // There is no argument map.
3495 // Take descriptor info from the resource variable calls.
Kévin PETITa353c832018-03-20 23:21:21 +00003496 // Take argument name and size from the arguments list.
David Neto862b7d82018-06-14 18:48:37 -04003497
3498 SmallVector<Argument *, 4> arguments;
3499 for (auto &arg : F.args()) {
3500 arguments.push_back(&arg);
3501 }
3502
3503 unsigned arg_index = 0;
3504 for (auto *info : resource_var_at_index) {
3505 if (info) {
Kévin PETITa353c832018-03-20 23:21:21 +00003506 auto arg = arguments[arg_index];
alan-bakerb6b09dc2018-11-08 16:59:28 -05003507 unsigned arg_size = 0;
alan-baker9b0ec3c2020-04-06 14:45:34 -04003508 if (info->arg_kind == clspv::ArgKind::Pod ||
3509 info->arg_kind == clspv::ArgKind::PodUBO ||
3510 info->arg_kind == clspv::ArgKind::PodPushConstant) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05003511 arg_size = static_cast<uint32_t>(DL.getTypeStoreSize(arg->getType()));
Kévin PETITa353c832018-03-20 23:21:21 +00003512 }
3513
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003514 // Local pointer arguments are unused in this case. Offset is always
3515 // zero.
alan-bakerf5e5f692018-11-27 08:33:24 -05003516 version0::DescriptorMapEntry::KernelArgData kernel_data = {
alan-baker21574d32020-01-29 16:00:31 -05003517 F.getName().str(),
3518 arg->getName().str(),
3519 arg_index,
3520 remap_arg_kind(clspv::GetArgKindName(info->arg_kind)),
3521 0,
3522 0,
3523 0,
3524 arg_size};
alan-bakerf5e5f692018-11-27 08:33:24 -05003525 descriptorMapEntries->emplace_back(std::move(kernel_data),
3526 info->descriptor_set, info->binding);
David Neto862b7d82018-06-14 18:48:37 -04003527 }
3528 arg_index++;
3529 }
3530 // Generate mappings for pointer-to-local arguments.
3531 for (arg_index = 0; arg_index < arguments.size(); ++arg_index) {
3532 Argument *arg = arguments[arg_index];
Alan Baker202c8c72018-08-13 13:47:44 -04003533 auto where = LocalArgSpecIds.find(arg);
3534 if (where != LocalArgSpecIds.end()) {
3535 auto &local_arg_info = LocalSpecIdInfoMap[where->second];
alan-bakerf5e5f692018-11-27 08:33:24 -05003536 // Pod arguments members are unused in this case.
3537 version0::DescriptorMapEntry::KernelArgData kernel_data = {
alan-baker21574d32020-01-29 16:00:31 -05003538 F.getName().str(),
3539 arg->getName().str(),
alan-bakerf5e5f692018-11-27 08:33:24 -05003540 arg_index,
3541 ArgKind::Local,
3542 static_cast<uint32_t>(local_arg_info.spec_id),
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003543 static_cast<uint32_t>(
3544 GetTypeAllocSize(local_arg_info.elem_type, DL)),
alan-bakerf5e5f692018-11-27 08:33:24 -05003545 0,
3546 0};
3547 // Pointer-to-local arguments do not utilize descriptor set and binding.
3548 descriptorMapEntries->emplace_back(std::move(kernel_data), 0, 0);
David Neto862b7d82018-06-14 18:48:37 -04003549 }
3550 }
3551 }
3552}
3553
David Neto22f144c2017-06-12 14:26:21 -04003554void SPIRVProducerPass::GenerateFuncPrologue(Function &F) {
SJW69939d52020-04-16 07:29:07 -05003555 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList(kFunctions);
David Neto22f144c2017-06-12 14:26:21 -04003556 ValueMapType &VMap = getValueMap();
3557 EntryPointVecType &EntryPoints = getEntryPointVec();
David Neto22f144c2017-06-12 14:26:21 -04003558 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
3559 auto &GlobalConstArgSet = getGlobalConstArgSet();
3560
3561 FunctionType *FTy = F.getFunctionType();
3562
3563 //
David Neto22f144c2017-06-12 14:26:21 -04003564 // Generate OPFunction.
3565 //
3566
3567 // FOps[0] : Result Type ID
3568 // FOps[1] : Function Control
3569 // FOps[2] : Function Type ID
3570 SPIRVOperandList FOps;
3571
3572 // Find SPIRV instruction for return type.
David Neto257c3892018-04-11 13:19:45 -04003573 FOps << MkId(lookupType(FTy->getReturnType()));
David Neto22f144c2017-06-12 14:26:21 -04003574
3575 // Check function attributes for SPIRV Function Control.
3576 uint32_t FuncControl = spv::FunctionControlMaskNone;
3577 if (F.hasFnAttribute(Attribute::AlwaysInline)) {
3578 FuncControl |= spv::FunctionControlInlineMask;
3579 }
3580 if (F.hasFnAttribute(Attribute::NoInline)) {
3581 FuncControl |= spv::FunctionControlDontInlineMask;
3582 }
3583 // TODO: Check llvm attribute for Function Control Pure.
3584 if (F.hasFnAttribute(Attribute::ReadOnly)) {
3585 FuncControl |= spv::FunctionControlPureMask;
3586 }
3587 // TODO: Check llvm attribute for Function Control Const.
3588 if (F.hasFnAttribute(Attribute::ReadNone)) {
3589 FuncControl |= spv::FunctionControlConstMask;
3590 }
3591
David Neto257c3892018-04-11 13:19:45 -04003592 FOps << MkNum(FuncControl);
David Neto22f144c2017-06-12 14:26:21 -04003593
3594 uint32_t FTyID;
3595 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
3596 SmallVector<Type *, 4> NewFuncParamTys;
3597 FunctionType *NewFTy =
3598 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
3599 FTyID = lookupType(NewFTy);
3600 } else {
David Neto9ed8e2f2018-03-24 06:47:24 -07003601 // Handle regular function with global constant parameters.
David Neto22f144c2017-06-12 14:26:21 -04003602 if (GlobalConstFuncTyMap.count(FTy)) {
3603 FTyID = lookupType(GlobalConstFuncTyMap[FTy].first);
3604 } else {
3605 FTyID = lookupType(FTy);
3606 }
3607 }
3608
David Neto257c3892018-04-11 13:19:45 -04003609 FOps << MkId(FTyID);
David Neto22f144c2017-06-12 14:26:21 -04003610
3611 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
3612 EntryPoints.push_back(std::make_pair(&F, nextID));
3613 }
3614
3615 VMap[&F] = nextID;
3616
David Neto482550a2018-03-24 05:21:07 -07003617 if (clspv::Option::ShowIDs()) {
David Netob05675d2018-02-16 12:37:49 -05003618 errs() << "Function " << F.getName() << " is " << nextID << "\n";
3619 }
David Neto22f144c2017-06-12 14:26:21 -04003620 // Generate SPIRV instruction for function.
David Neto87846742018-04-11 17:36:22 -04003621 auto *FuncInst = new SPIRVInstruction(spv::OpFunction, nextID++, FOps);
David Neto22f144c2017-06-12 14:26:21 -04003622 SPIRVInstList.push_back(FuncInst);
3623
3624 //
3625 // Generate OpFunctionParameter for Normal function.
3626 //
3627
3628 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
alan-bakere9308012019-03-15 10:25:13 -04003629
David Neto22f144c2017-06-12 14:26:21 -04003630 // Iterate Argument for name instead of param type from function type.
3631 unsigned ArgIdx = 0;
3632 for (Argument &Arg : F.args()) {
alan-bakere9308012019-03-15 10:25:13 -04003633 uint32_t param_id = nextID++;
3634 VMap[&Arg] = param_id;
3635
3636 if (CalledWithCoherentResource(Arg)) {
3637 // If the arg is passed a coherent resource ever, then decorate this
3638 // parameter with Coherent too.
3639 SPIRVOperandList decoration_ops;
3640 decoration_ops << MkId(param_id) << MkNum(spv::DecorationCoherent);
SJW69939d52020-04-16 07:29:07 -05003641 getSPIRVInstList(kAnnotations)
3642 .push_back(new SPIRVInstruction(spv::OpDecorate, decoration_ops));
alan-bakere9308012019-03-15 10:25:13 -04003643 }
David Neto22f144c2017-06-12 14:26:21 -04003644
3645 // ParamOps[0] : Result Type ID
3646 SPIRVOperandList ParamOps;
3647
3648 // Find SPIRV instruction for parameter type.
3649 uint32_t ParamTyID = lookupType(Arg.getType());
3650 if (PointerType *PTy = dyn_cast<PointerType>(Arg.getType())) {
3651 if (GlobalConstFuncTyMap.count(FTy)) {
3652 if (ArgIdx == GlobalConstFuncTyMap[FTy].second) {
3653 Type *EleTy = PTy->getPointerElementType();
3654 Type *ArgTy =
3655 PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
3656 ParamTyID = lookupType(ArgTy);
3657 GlobalConstArgSet.insert(&Arg);
3658 }
3659 }
3660 }
David Neto257c3892018-04-11 13:19:45 -04003661 ParamOps << MkId(ParamTyID);
David Neto22f144c2017-06-12 14:26:21 -04003662
3663 // Generate SPIRV instruction for parameter.
David Neto87846742018-04-11 17:36:22 -04003664 auto *ParamInst =
alan-bakere9308012019-03-15 10:25:13 -04003665 new SPIRVInstruction(spv::OpFunctionParameter, param_id, ParamOps);
David Neto22f144c2017-06-12 14:26:21 -04003666 SPIRVInstList.push_back(ParamInst);
3667
3668 ArgIdx++;
3669 }
3670 }
3671}
3672
SJW77b87ad2020-04-21 14:37:52 -05003673void SPIRVProducerPass::GenerateModuleInfo() {
David Neto22f144c2017-06-12 14:26:21 -04003674 EntryPointVecType &EntryPoints = getEntryPointVec();
3675 ValueMapType &VMap = getValueMap();
3676 ValueList &EntryPointInterfaces = getEntryPointInterfacesVec();
David Neto22f144c2017-06-12 14:26:21 -04003677 std::vector<uint32_t> &BuiltinDimVec = getBuiltinDimVec();
3678
SJW69939d52020-04-16 07:29:07 -05003679 SPIRVInstructionList &SPIRVCapabilities = getSPIRVInstList(kCapabilities);
David Neto22f144c2017-06-12 14:26:21 -04003680 //
3681 // Generate OpCapability
3682 //
3683 // TODO: Which llvm information is mapped to SPIRV Capapbility?
3684
3685 // Ops[0] = Capability
3686 SPIRVOperandList Ops;
3687
David Neto87846742018-04-11 17:36:22 -04003688 auto *CapInst =
David Netoef5ba2b2019-12-20 08:35:54 -05003689 new SPIRVInstruction(spv::OpCapability, MkNum(spv::CapabilityShader));
SJW69939d52020-04-16 07:29:07 -05003690 SPIRVCapabilities.push_back(CapInst);
David Neto22f144c2017-06-12 14:26:21 -04003691
alan-bakerf906d2b2019-12-10 11:26:23 -05003692 bool write_without_format = false;
3693 bool sampled_1d = false;
3694 bool image_1d = false;
David Neto22f144c2017-06-12 14:26:21 -04003695 for (Type *Ty : getTypeList()) {
alan-bakerb39c8262019-03-08 14:03:37 -05003696 if (clspv::Option::Int8Support() && Ty->isIntegerTy(8)) {
3697 // Generate OpCapability for i8 type.
SJW69939d52020-04-16 07:29:07 -05003698 SPIRVCapabilities.push_back(
David Netoef5ba2b2019-12-20 08:35:54 -05003699 new SPIRVInstruction(spv::OpCapability, MkNum(spv::CapabilityInt8)));
alan-bakerb39c8262019-03-08 14:03:37 -05003700 } else if (Ty->isIntegerTy(16)) {
David Neto22f144c2017-06-12 14:26:21 -04003701 // Generate OpCapability for i16 type.
SJW69939d52020-04-16 07:29:07 -05003702 SPIRVCapabilities.push_back(
David Netoef5ba2b2019-12-20 08:35:54 -05003703 new SPIRVInstruction(spv::OpCapability, MkNum(spv::CapabilityInt16)));
David Neto22f144c2017-06-12 14:26:21 -04003704 } else if (Ty->isIntegerTy(64)) {
3705 // Generate OpCapability for i64 type.
SJW69939d52020-04-16 07:29:07 -05003706 SPIRVCapabilities.push_back(
David Netoef5ba2b2019-12-20 08:35:54 -05003707 new SPIRVInstruction(spv::OpCapability, MkNum(spv::CapabilityInt64)));
David Neto22f144c2017-06-12 14:26:21 -04003708 } else if (Ty->isHalfTy()) {
3709 // Generate OpCapability for half type.
SJW69939d52020-04-16 07:29:07 -05003710 SPIRVCapabilities.push_back(new SPIRVInstruction(
3711 spv::OpCapability, MkNum(spv::CapabilityFloat16)));
David Neto22f144c2017-06-12 14:26:21 -04003712 } else if (Ty->isDoubleTy()) {
3713 // Generate OpCapability for double type.
SJW69939d52020-04-16 07:29:07 -05003714 SPIRVCapabilities.push_back(new SPIRVInstruction(
3715 spv::OpCapability, MkNum(spv::CapabilityFloat64)));
David Neto22f144c2017-06-12 14:26:21 -04003716 } else if (auto *STy = dyn_cast<StructType>(Ty)) {
3717 if (STy->isOpaque()) {
alan-bakerf906d2b2019-12-10 11:26:23 -05003718 if (STy->getName().startswith("opencl.image1d_wo_t") ||
alan-baker7150a1d2020-02-25 08:31:06 -05003719 STy->getName().startswith("opencl.image1d_array_wo_t") ||
alan-bakerf906d2b2019-12-10 11:26:23 -05003720 STy->getName().startswith("opencl.image2d_wo_t") ||
alan-baker7150a1d2020-02-25 08:31:06 -05003721 STy->getName().startswith("opencl.image2d_array_wo_t") ||
alan-bakerf67468c2019-11-25 15:51:49 -05003722 STy->getName().startswith("opencl.image3d_wo_t")) {
alan-bakerf906d2b2019-12-10 11:26:23 -05003723 write_without_format = true;
3724 }
3725 if (STy->getName().startswith("opencl.image1d_ro_t") ||
alan-baker7150a1d2020-02-25 08:31:06 -05003726 STy->getName().startswith("opencl.image1d_wo_t") ||
3727 STy->getName().startswith("opencl.image1d_array_ro_t") ||
3728 STy->getName().startswith("opencl.image1d_array_wo_t")) {
alan-bakerf906d2b2019-12-10 11:26:23 -05003729 if (STy->getName().contains(".sampled"))
3730 sampled_1d = true;
3731 else
3732 image_1d = true;
David Neto22f144c2017-06-12 14:26:21 -04003733 }
3734 }
3735 }
3736 }
3737
alan-bakerf906d2b2019-12-10 11:26:23 -05003738 if (write_without_format) {
3739 // Generate OpCapability for write only image type.
SJW69939d52020-04-16 07:29:07 -05003740 SPIRVCapabilities.push_back(new SPIRVInstruction(
3741 spv::OpCapability,
3742 {MkNum(spv::CapabilityStorageImageWriteWithoutFormat)}));
alan-bakerf906d2b2019-12-10 11:26:23 -05003743 }
3744 if (image_1d) {
3745 // Generate OpCapability for unsampled 1D image type.
SJW69939d52020-04-16 07:29:07 -05003746 SPIRVCapabilities.push_back(new SPIRVInstruction(
3747 spv::OpCapability, {MkNum(spv::CapabilityImage1D)}));
alan-bakerf906d2b2019-12-10 11:26:23 -05003748 } else if (sampled_1d) {
3749 // Generate OpCapability for sampled 1D image type.
SJW69939d52020-04-16 07:29:07 -05003750 SPIRVCapabilities.push_back(new SPIRVInstruction(
3751 spv::OpCapability, {MkNum(spv::CapabilitySampled1D)}));
alan-bakerf906d2b2019-12-10 11:26:23 -05003752 }
3753
David Neto5c22a252018-03-15 16:07:41 -04003754 { // OpCapability ImageQuery
3755 bool hasImageQuery = false;
SJW77b87ad2020-04-21 14:37:52 -05003756 for (const auto &SymVal : module->getValueSymbolTable()) {
alan-bakerf67468c2019-11-25 15:51:49 -05003757 if (auto F = dyn_cast<Function>(SymVal.getValue())) {
SJW173c7e92020-03-16 08:44:47 -05003758 if (IsImageQuery(F)) {
alan-bakerf67468c2019-11-25 15:51:49 -05003759 hasImageQuery = true;
3760 break;
3761 }
David Neto5c22a252018-03-15 16:07:41 -04003762 }
3763 }
alan-bakerf67468c2019-11-25 15:51:49 -05003764
David Neto5c22a252018-03-15 16:07:41 -04003765 if (hasImageQuery) {
SJW69939d52020-04-16 07:29:07 -05003766 SPIRVCapabilities.push_back(new SPIRVInstruction(
3767 spv::OpCapability, {MkNum(spv::CapabilityImageQuery)}));
David Neto5c22a252018-03-15 16:07:41 -04003768 }
3769 }
3770
David Neto22f144c2017-06-12 14:26:21 -04003771 if (hasVariablePointers()) {
3772 //
David Neto22f144c2017-06-12 14:26:21 -04003773 // Generate OpCapability.
3774 //
3775 // Ops[0] = Capability
3776 //
3777 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003778 Ops << MkNum(spv::CapabilityVariablePointers);
David Neto22f144c2017-06-12 14:26:21 -04003779
SJW69939d52020-04-16 07:29:07 -05003780 SPIRVCapabilities.push_back(new SPIRVInstruction(spv::OpCapability, Ops));
alan-baker5b86ed72019-02-15 08:26:50 -05003781 } else if (hasVariablePointersStorageBuffer()) {
3782 //
3783 // Generate OpCapability.
3784 //
3785 // Ops[0] = Capability
3786 //
3787 Ops.clear();
3788 Ops << MkNum(spv::CapabilityVariablePointersStorageBuffer);
David Neto22f144c2017-06-12 14:26:21 -04003789
SJW69939d52020-04-16 07:29:07 -05003790 SPIRVCapabilities.push_back(new SPIRVInstruction(spv::OpCapability, Ops));
alan-baker5b86ed72019-02-15 08:26:50 -05003791 }
3792
SJW69939d52020-04-16 07:29:07 -05003793 SPIRVInstructionList &SPIRVExtensions = getSPIRVInstList(kExtensions);
alan-baker5b86ed72019-02-15 08:26:50 -05003794 // Always add the storage buffer extension
3795 {
David Neto22f144c2017-06-12 14:26:21 -04003796 //
3797 // Generate OpExtension.
3798 //
3799 // Ops[0] = Name (Literal String)
3800 //
alan-baker5b86ed72019-02-15 08:26:50 -05003801 auto *ExtensionInst = new SPIRVInstruction(
3802 spv::OpExtension, {MkString("SPV_KHR_storage_buffer_storage_class")});
SJW69939d52020-04-16 07:29:07 -05003803 SPIRVExtensions.push_back(ExtensionInst);
alan-baker5b86ed72019-02-15 08:26:50 -05003804 }
David Neto22f144c2017-06-12 14:26:21 -04003805
alan-baker5b86ed72019-02-15 08:26:50 -05003806 if (hasVariablePointers() || hasVariablePointersStorageBuffer()) {
3807 //
3808 // Generate OpExtension.
3809 //
3810 // Ops[0] = Name (Literal String)
3811 //
3812 auto *ExtensionInst = new SPIRVInstruction(
3813 spv::OpExtension, {MkString("SPV_KHR_variable_pointers")});
SJW69939d52020-04-16 07:29:07 -05003814 SPIRVExtensions.push_back(ExtensionInst);
David Neto22f144c2017-06-12 14:26:21 -04003815 }
3816
3817 //
3818 // Generate OpMemoryModel
3819 //
3820 // Memory model for Vulkan will always be GLSL450.
3821
3822 // Ops[0] = Addressing Model
3823 // Ops[1] = Memory Model
3824 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003825 Ops << MkNum(spv::AddressingModelLogical) << MkNum(spv::MemoryModelGLSL450);
David Neto22f144c2017-06-12 14:26:21 -04003826
David Neto87846742018-04-11 17:36:22 -04003827 auto *MemModelInst = new SPIRVInstruction(spv::OpMemoryModel, Ops);
SJW69939d52020-04-16 07:29:07 -05003828 getSPIRVInstList(kMemoryModel).push_back(MemModelInst);
David Neto22f144c2017-06-12 14:26:21 -04003829
SJW69939d52020-04-16 07:29:07 -05003830 SPIRVInstructionList &SPIRVEntryPoints = getSPIRVInstList(kEntryPoints);
David Neto22f144c2017-06-12 14:26:21 -04003831 //
3832 // Generate OpEntryPoint
3833 //
3834 for (auto EntryPoint : EntryPoints) {
3835 // Ops[0] = Execution Model
3836 // Ops[1] = EntryPoint ID
3837 // Ops[2] = Name (Literal String)
3838 // ...
3839 //
3840 // TODO: Do we need to consider Interface ID for forward references???
3841 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05003842 const StringRef &name = EntryPoint.first->getName();
David Neto257c3892018-04-11 13:19:45 -04003843 Ops << MkNum(spv::ExecutionModelGLCompute) << MkId(EntryPoint.second)
3844 << MkString(name);
David Neto22f144c2017-06-12 14:26:21 -04003845
David Neto22f144c2017-06-12 14:26:21 -04003846 for (Value *Interface : EntryPointInterfaces) {
David Neto257c3892018-04-11 13:19:45 -04003847 Ops << MkId(VMap[Interface]);
David Neto22f144c2017-06-12 14:26:21 -04003848 }
3849
David Neto87846742018-04-11 17:36:22 -04003850 auto *EntryPointInst = new SPIRVInstruction(spv::OpEntryPoint, Ops);
SJW69939d52020-04-16 07:29:07 -05003851 SPIRVEntryPoints.push_back(EntryPointInst);
David Neto22f144c2017-06-12 14:26:21 -04003852 }
3853
SJW69939d52020-04-16 07:29:07 -05003854 SPIRVInstructionList &SPIRVExecutionModes = getSPIRVInstList(kExecutionModes);
David Neto22f144c2017-06-12 14:26:21 -04003855 for (auto EntryPoint : EntryPoints) {
Kévin Petit21c23c62020-04-29 01:38:28 +01003856 const MDNode *MD = dyn_cast<Function>(EntryPoint.first)
3857 ->getMetadata("reqd_work_group_size");
3858 if ((MD != nullptr) && !clspv::Option::NonUniformNDRangeSupported()) {
David Neto22f144c2017-06-12 14:26:21 -04003859
3860 if (!BuiltinDimVec.empty()) {
3861 llvm_unreachable(
3862 "Kernels should have consistent work group size definition");
3863 }
3864
3865 //
3866 // Generate OpExecutionMode
3867 //
3868
3869 // Ops[0] = Entry Point ID
3870 // Ops[1] = Execution Mode
3871 // Ops[2] ... Ops[n] = Optional literals according to Execution Mode
3872 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05003873 Ops << MkId(EntryPoint.second) << MkNum(spv::ExecutionModeLocalSize);
David Neto22f144c2017-06-12 14:26:21 -04003874
3875 uint32_t XDim = static_cast<uint32_t>(
3876 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
3877 uint32_t YDim = static_cast<uint32_t>(
3878 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
3879 uint32_t ZDim = static_cast<uint32_t>(
3880 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
3881
David Neto257c3892018-04-11 13:19:45 -04003882 Ops << MkNum(XDim) << MkNum(YDim) << MkNum(ZDim);
David Neto22f144c2017-06-12 14:26:21 -04003883
David Neto87846742018-04-11 17:36:22 -04003884 auto *ExecModeInst = new SPIRVInstruction(spv::OpExecutionMode, Ops);
SJW69939d52020-04-16 07:29:07 -05003885 SPIRVExecutionModes.push_back(ExecModeInst);
David Neto22f144c2017-06-12 14:26:21 -04003886 }
3887 }
3888
3889 //
3890 // Generate OpSource.
3891 //
3892 // Ops[0] = SourceLanguage ID
3893 // Ops[1] = Version (LiteralNum)
3894 //
3895 Ops.clear();
Kévin Petitf0515712020-01-07 18:29:20 +00003896 switch (clspv::Option::Language()) {
3897 case clspv::Option::SourceLanguage::OpenCL_C_10:
3898 Ops << MkNum(spv::SourceLanguageOpenCL_C) << MkNum(100);
3899 break;
3900 case clspv::Option::SourceLanguage::OpenCL_C_11:
3901 Ops << MkNum(spv::SourceLanguageOpenCL_C) << MkNum(110);
3902 break;
3903 case clspv::Option::SourceLanguage::OpenCL_C_12:
Kévin Petit0fc88042019-04-09 23:25:02 +01003904 Ops << MkNum(spv::SourceLanguageOpenCL_C) << MkNum(120);
Kévin Petitf0515712020-01-07 18:29:20 +00003905 break;
3906 case clspv::Option::SourceLanguage::OpenCL_C_20:
3907 Ops << MkNum(spv::SourceLanguageOpenCL_C) << MkNum(200);
3908 break;
3909 case clspv::Option::SourceLanguage::OpenCL_CPP:
3910 Ops << MkNum(spv::SourceLanguageOpenCL_CPP) << MkNum(100);
3911 break;
3912 default:
3913 Ops << MkNum(spv::SourceLanguageUnknown) << MkNum(0);
3914 break;
Kévin Petit0fc88042019-04-09 23:25:02 +01003915 }
David Neto22f144c2017-06-12 14:26:21 -04003916
David Neto87846742018-04-11 17:36:22 -04003917 auto *OpenSourceInst = new SPIRVInstruction(spv::OpSource, Ops);
SJW69939d52020-04-16 07:29:07 -05003918 getSPIRVInstList(kDebug).push_back(OpenSourceInst);
David Neto22f144c2017-06-12 14:26:21 -04003919
3920 if (!BuiltinDimVec.empty()) {
SJW69939d52020-04-16 07:29:07 -05003921 SPIRVInstructionList &SPIRVAnnotations = getSPIRVInstList(kAnnotations);
David Neto22f144c2017-06-12 14:26:21 -04003922 //
3923 // Generate OpDecorates for x/y/z dimension.
3924 //
3925 // Ops[0] = Target ID
3926 // Ops[1] = Decoration (SpecId)
David Neto257c3892018-04-11 13:19:45 -04003927 // Ops[2] = Specialization Constant ID (Literal Number)
David Neto22f144c2017-06-12 14:26:21 -04003928
3929 // X Dimension
3930 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003931 Ops << MkId(BuiltinDimVec[0]) << MkNum(spv::DecorationSpecId) << MkNum(0);
SJW69939d52020-04-16 07:29:07 -05003932 SPIRVAnnotations.push_back(new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003933
3934 // Y Dimension
3935 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003936 Ops << MkId(BuiltinDimVec[1]) << MkNum(spv::DecorationSpecId) << MkNum(1);
SJW69939d52020-04-16 07:29:07 -05003937 SPIRVAnnotations.push_back(new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003938
3939 // Z Dimension
3940 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003941 Ops << MkId(BuiltinDimVec[2]) << MkNum(spv::DecorationSpecId) << MkNum(2);
SJW69939d52020-04-16 07:29:07 -05003942 SPIRVAnnotations.push_back(new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003943 }
3944}
3945
David Netob6e2e062018-04-25 10:32:06 -04003946void SPIRVProducerPass::GenerateEntryPointInitialStores() {
3947 // Work around a driver bug. Initializers on Private variables might not
3948 // work. So the start of the kernel should store the initializer value to the
3949 // variables. Yes, *every* entry point pays this cost if *any* entry point
3950 // uses this builtin. At this point I judge this to be an acceptable tradeoff
3951 // of complexity vs. runtime, for a broken driver.
alan-bakerb6b09dc2018-11-08 16:59:28 -05003952 // TODO(dneto): Remove this at some point once fixed drivers are widely
3953 // available.
David Netob6e2e062018-04-25 10:32:06 -04003954 if (WorkgroupSizeVarID) {
3955 assert(WorkgroupSizeValueID);
3956
3957 SPIRVOperandList Ops;
3958 Ops << MkId(WorkgroupSizeVarID) << MkId(WorkgroupSizeValueID);
3959
3960 auto *Inst = new SPIRVInstruction(spv::OpStore, Ops);
SJW69939d52020-04-16 07:29:07 -05003961 getSPIRVInstList(kFunctions).push_back(Inst);
David Netob6e2e062018-04-25 10:32:06 -04003962 }
3963}
3964
David Neto22f144c2017-06-12 14:26:21 -04003965void SPIRVProducerPass::GenerateFuncBody(Function &F) {
SJW69939d52020-04-16 07:29:07 -05003966 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList(kFunctions);
David Neto22f144c2017-06-12 14:26:21 -04003967 ValueMapType &VMap = getValueMap();
3968
David Netob6e2e062018-04-25 10:32:06 -04003969 const bool IsKernel = F.getCallingConv() == CallingConv::SPIR_KERNEL;
David Neto22f144c2017-06-12 14:26:21 -04003970
3971 for (BasicBlock &BB : F) {
3972 // Register BasicBlock to ValueMap.
3973 VMap[&BB] = nextID;
3974
3975 //
3976 // Generate OpLabel for Basic Block.
3977 //
3978 SPIRVOperandList Ops;
David Neto87846742018-04-11 17:36:22 -04003979 auto *Inst = new SPIRVInstruction(spv::OpLabel, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003980 SPIRVInstList.push_back(Inst);
3981
David Neto6dcd4712017-06-23 11:06:47 -04003982 // OpVariable instructions must come first.
3983 for (Instruction &I : BB) {
alan-baker5b86ed72019-02-15 08:26:50 -05003984 if (auto *alloca = dyn_cast<AllocaInst>(&I)) {
3985 // Allocating a pointer requires variable pointers.
3986 if (alloca->getAllocatedType()->isPointerTy()) {
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003987 setVariablePointersCapabilities(
3988 alloca->getAllocatedType()->getPointerAddressSpace());
alan-baker5b86ed72019-02-15 08:26:50 -05003989 }
David Neto6dcd4712017-06-23 11:06:47 -04003990 GenerateInstruction(I);
3991 }
3992 }
3993
David Neto22f144c2017-06-12 14:26:21 -04003994 if (&BB == &F.getEntryBlock() && IsKernel) {
David Netob6e2e062018-04-25 10:32:06 -04003995 if (clspv::Option::HackInitializers()) {
3996 GenerateEntryPointInitialStores();
3997 }
David Neto22f144c2017-06-12 14:26:21 -04003998 }
3999
4000 for (Instruction &I : BB) {
David Neto6dcd4712017-06-23 11:06:47 -04004001 if (!isa<AllocaInst>(I)) {
4002 GenerateInstruction(I);
4003 }
David Neto22f144c2017-06-12 14:26:21 -04004004 }
4005 }
4006}
4007
4008spv::Op SPIRVProducerPass::GetSPIRVCmpOpcode(CmpInst *I) {
4009 const std::map<CmpInst::Predicate, spv::Op> Map = {
4010 {CmpInst::ICMP_EQ, spv::OpIEqual},
4011 {CmpInst::ICMP_NE, spv::OpINotEqual},
4012 {CmpInst::ICMP_UGT, spv::OpUGreaterThan},
4013 {CmpInst::ICMP_UGE, spv::OpUGreaterThanEqual},
4014 {CmpInst::ICMP_ULT, spv::OpULessThan},
4015 {CmpInst::ICMP_ULE, spv::OpULessThanEqual},
4016 {CmpInst::ICMP_SGT, spv::OpSGreaterThan},
4017 {CmpInst::ICMP_SGE, spv::OpSGreaterThanEqual},
4018 {CmpInst::ICMP_SLT, spv::OpSLessThan},
4019 {CmpInst::ICMP_SLE, spv::OpSLessThanEqual},
4020 {CmpInst::FCMP_OEQ, spv::OpFOrdEqual},
4021 {CmpInst::FCMP_OGT, spv::OpFOrdGreaterThan},
4022 {CmpInst::FCMP_OGE, spv::OpFOrdGreaterThanEqual},
4023 {CmpInst::FCMP_OLT, spv::OpFOrdLessThan},
4024 {CmpInst::FCMP_OLE, spv::OpFOrdLessThanEqual},
4025 {CmpInst::FCMP_ONE, spv::OpFOrdNotEqual},
4026 {CmpInst::FCMP_UEQ, spv::OpFUnordEqual},
4027 {CmpInst::FCMP_UGT, spv::OpFUnordGreaterThan},
4028 {CmpInst::FCMP_UGE, spv::OpFUnordGreaterThanEqual},
4029 {CmpInst::FCMP_ULT, spv::OpFUnordLessThan},
4030 {CmpInst::FCMP_ULE, spv::OpFUnordLessThanEqual},
4031 {CmpInst::FCMP_UNE, spv::OpFUnordNotEqual}};
4032
4033 assert(0 != Map.count(I->getPredicate()));
4034
4035 return Map.at(I->getPredicate());
4036}
4037
4038spv::Op SPIRVProducerPass::GetSPIRVCastOpcode(Instruction &I) {
4039 const std::map<unsigned, spv::Op> Map{
4040 {Instruction::Trunc, spv::OpUConvert},
4041 {Instruction::ZExt, spv::OpUConvert},
4042 {Instruction::SExt, spv::OpSConvert},
4043 {Instruction::FPToUI, spv::OpConvertFToU},
4044 {Instruction::FPToSI, spv::OpConvertFToS},
4045 {Instruction::UIToFP, spv::OpConvertUToF},
4046 {Instruction::SIToFP, spv::OpConvertSToF},
4047 {Instruction::FPTrunc, spv::OpFConvert},
4048 {Instruction::FPExt, spv::OpFConvert},
4049 {Instruction::BitCast, spv::OpBitcast}};
4050
4051 assert(0 != Map.count(I.getOpcode()));
4052
4053 return Map.at(I.getOpcode());
4054}
4055
4056spv::Op SPIRVProducerPass::GetSPIRVBinaryOpcode(Instruction &I) {
Kévin Petit24272b62018-10-18 19:16:12 +00004057 if (I.getType()->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -04004058 switch (I.getOpcode()) {
4059 default:
4060 break;
4061 case Instruction::Or:
4062 return spv::OpLogicalOr;
4063 case Instruction::And:
4064 return spv::OpLogicalAnd;
4065 case Instruction::Xor:
4066 return spv::OpLogicalNotEqual;
4067 }
4068 }
4069
alan-bakerb6b09dc2018-11-08 16:59:28 -05004070 const std::map<unsigned, spv::Op> Map{
David Neto22f144c2017-06-12 14:26:21 -04004071 {Instruction::Add, spv::OpIAdd},
4072 {Instruction::FAdd, spv::OpFAdd},
4073 {Instruction::Sub, spv::OpISub},
4074 {Instruction::FSub, spv::OpFSub},
4075 {Instruction::Mul, spv::OpIMul},
4076 {Instruction::FMul, spv::OpFMul},
4077 {Instruction::UDiv, spv::OpUDiv},
4078 {Instruction::SDiv, spv::OpSDiv},
4079 {Instruction::FDiv, spv::OpFDiv},
4080 {Instruction::URem, spv::OpUMod},
4081 {Instruction::SRem, spv::OpSRem},
4082 {Instruction::FRem, spv::OpFRem},
4083 {Instruction::Or, spv::OpBitwiseOr},
4084 {Instruction::Xor, spv::OpBitwiseXor},
4085 {Instruction::And, spv::OpBitwiseAnd},
4086 {Instruction::Shl, spv::OpShiftLeftLogical},
4087 {Instruction::LShr, spv::OpShiftRightLogical},
4088 {Instruction::AShr, spv::OpShiftRightArithmetic}};
4089
4090 assert(0 != Map.count(I.getOpcode()));
4091
4092 return Map.at(I.getOpcode());
4093}
4094
4095void SPIRVProducerPass::GenerateInstruction(Instruction &I) {
SJW69939d52020-04-16 07:29:07 -05004096 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList(kFunctions);
David Neto22f144c2017-06-12 14:26:21 -04004097 ValueMapType &VMap = getValueMap();
David Neto22f144c2017-06-12 14:26:21 -04004098 DeferredInstVecType &DeferredInsts = getDeferredInstVec();
4099 LLVMContext &Context = I.getParent()->getParent()->getParent()->getContext();
4100
4101 // Register Instruction to ValueMap.
4102 if (0 == VMap[&I]) {
4103 VMap[&I] = nextID;
4104 }
4105
4106 switch (I.getOpcode()) {
4107 default: {
4108 if (Instruction::isCast(I.getOpcode())) {
4109 //
4110 // Generate SPIRV instructions for cast operators.
4111 //
4112
David Netod2de94a2017-08-28 17:27:47 -04004113 auto Ty = I.getType();
David Neto22f144c2017-06-12 14:26:21 -04004114 auto OpTy = I.getOperand(0)->getType();
David Netod2de94a2017-08-28 17:27:47 -04004115 auto toI8 = Ty == Type::getInt8Ty(Context);
4116 auto fromI32 = OpTy == Type::getInt32Ty(Context);
David Neto22f144c2017-06-12 14:26:21 -04004117 // Handle zext, sext and uitofp with i1 type specially.
4118 if ((I.getOpcode() == Instruction::ZExt ||
4119 I.getOpcode() == Instruction::SExt ||
4120 I.getOpcode() == Instruction::UIToFP) &&
alan-bakerb6b09dc2018-11-08 16:59:28 -05004121 OpTy->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -04004122 //
4123 // Generate OpSelect.
4124 //
4125
4126 // Ops[0] = Result Type ID
4127 // Ops[1] = Condition ID
4128 // Ops[2] = True Constant ID
4129 // Ops[3] = False Constant ID
4130 SPIRVOperandList Ops;
4131
David Neto257c3892018-04-11 13:19:45 -04004132 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04004133
David Neto22f144c2017-06-12 14:26:21 -04004134 uint32_t CondID = VMap[I.getOperand(0)];
David Neto257c3892018-04-11 13:19:45 -04004135 Ops << MkId(CondID);
David Neto22f144c2017-06-12 14:26:21 -04004136
4137 uint32_t TrueID = 0;
4138 if (I.getOpcode() == Instruction::ZExt) {
Kévin Petit7bfb8992019-02-26 13:45:08 +00004139 TrueID = VMap[ConstantInt::get(I.getType(), 1)];
David Neto22f144c2017-06-12 14:26:21 -04004140 } else if (I.getOpcode() == Instruction::SExt) {
Kévin Petit7bfb8992019-02-26 13:45:08 +00004141 TrueID = VMap[ConstantInt::getSigned(I.getType(), -1)];
David Neto22f144c2017-06-12 14:26:21 -04004142 } else {
4143 TrueID = VMap[ConstantFP::get(Context, APFloat(1.0f))];
4144 }
David Neto257c3892018-04-11 13:19:45 -04004145 Ops << MkId(TrueID);
David Neto22f144c2017-06-12 14:26:21 -04004146
4147 uint32_t FalseID = 0;
4148 if (I.getOpcode() == Instruction::ZExt) {
4149 FalseID = VMap[Constant::getNullValue(I.getType())];
4150 } else if (I.getOpcode() == Instruction::SExt) {
4151 FalseID = VMap[Constant::getNullValue(I.getType())];
4152 } else {
4153 FalseID = VMap[ConstantFP::get(Context, APFloat(0.0f))];
4154 }
David Neto257c3892018-04-11 13:19:45 -04004155 Ops << MkId(FalseID);
David Neto22f144c2017-06-12 14:26:21 -04004156
David Neto87846742018-04-11 17:36:22 -04004157 auto *Inst = new SPIRVInstruction(spv::OpSelect, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004158 SPIRVInstList.push_back(Inst);
alan-bakerb39c8262019-03-08 14:03:37 -05004159 } else if (!clspv::Option::Int8Support() &&
4160 I.getOpcode() == Instruction::Trunc && fromI32 && toI8) {
David Netod2de94a2017-08-28 17:27:47 -04004161 // The SPIR-V target type is a 32-bit int. Keep only the bottom
4162 // 8 bits.
4163 // Before:
4164 // %result = trunc i32 %a to i8
4165 // After
4166 // %result = OpBitwiseAnd %uint %a %uint_255
4167
4168 SPIRVOperandList Ops;
4169
David Neto257c3892018-04-11 13:19:45 -04004170 Ops << MkId(lookupType(OpTy)) << MkId(VMap[I.getOperand(0)]);
David Netod2de94a2017-08-28 17:27:47 -04004171
4172 Type *UintTy = Type::getInt32Ty(Context);
4173 uint32_t MaskID = VMap[ConstantInt::get(UintTy, 255)];
David Neto257c3892018-04-11 13:19:45 -04004174 Ops << MkId(MaskID);
David Netod2de94a2017-08-28 17:27:47 -04004175
David Neto87846742018-04-11 17:36:22 -04004176 auto *Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Netod2de94a2017-08-28 17:27:47 -04004177 SPIRVInstList.push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04004178 } else {
4179 // Ops[0] = Result Type ID
4180 // Ops[1] = Source Value ID
4181 SPIRVOperandList Ops;
4182
David Neto257c3892018-04-11 13:19:45 -04004183 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04004184
David Neto87846742018-04-11 17:36:22 -04004185 auto *Inst = new SPIRVInstruction(GetSPIRVCastOpcode(I), nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004186 SPIRVInstList.push_back(Inst);
4187 }
4188 } else if (isa<BinaryOperator>(I)) {
4189 //
4190 // Generate SPIRV instructions for binary operators.
4191 //
4192
4193 // Handle xor with i1 type specially.
4194 if (I.getOpcode() == Instruction::Xor &&
4195 I.getType() == Type::getInt1Ty(Context) &&
Kévin Petit24272b62018-10-18 19:16:12 +00004196 ((isa<ConstantInt>(I.getOperand(0)) &&
4197 !cast<ConstantInt>(I.getOperand(0))->isZero()) ||
4198 (isa<ConstantInt>(I.getOperand(1)) &&
4199 !cast<ConstantInt>(I.getOperand(1))->isZero()))) {
David Neto22f144c2017-06-12 14:26:21 -04004200 //
4201 // Generate OpLogicalNot.
4202 //
4203 // Ops[0] = Result Type ID
4204 // Ops[1] = Operand
4205 SPIRVOperandList Ops;
4206
David Neto257c3892018-04-11 13:19:45 -04004207 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04004208
4209 Value *CondV = I.getOperand(0);
4210 if (isa<Constant>(I.getOperand(0))) {
4211 CondV = I.getOperand(1);
4212 }
David Neto257c3892018-04-11 13:19:45 -04004213 Ops << MkId(VMap[CondV]);
David Neto22f144c2017-06-12 14:26:21 -04004214
David Neto87846742018-04-11 17:36:22 -04004215 auto *Inst = new SPIRVInstruction(spv::OpLogicalNot, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004216 SPIRVInstList.push_back(Inst);
4217 } else {
4218 // Ops[0] = Result Type ID
4219 // Ops[1] = Operand 0
4220 // Ops[2] = Operand 1
4221 SPIRVOperandList Ops;
4222
David Neto257c3892018-04-11 13:19:45 -04004223 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)])
4224 << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004225
David Neto87846742018-04-11 17:36:22 -04004226 auto *Inst =
4227 new SPIRVInstruction(GetSPIRVBinaryOpcode(I), nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004228 SPIRVInstList.push_back(Inst);
4229 }
alan-bakerc9c55ae2019-12-02 16:01:27 -05004230 } else if (I.getOpcode() == Instruction::FNeg) {
4231 // The only unary operator.
4232 //
4233 // Ops[0] = Result Type ID
4234 // Ops[1] = Operand 0
4235 SPIRVOperandList ops;
4236
4237 ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)]);
4238 auto *Inst = new SPIRVInstruction(spv::OpFNegate, nextID++, ops);
4239 SPIRVInstList.push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04004240 } else {
4241 I.print(errs());
4242 llvm_unreachable("Unsupported instruction???");
4243 }
4244 break;
4245 }
4246 case Instruction::GetElementPtr: {
4247 auto &GlobalConstArgSet = getGlobalConstArgSet();
4248
4249 //
4250 // Generate OpAccessChain.
4251 //
4252 GetElementPtrInst *GEP = cast<GetElementPtrInst>(&I);
4253
4254 //
4255 // Generate OpAccessChain.
4256 //
4257
4258 // Ops[0] = Result Type ID
4259 // Ops[1] = Base ID
4260 // Ops[2] ... Ops[n] = Indexes ID
4261 SPIRVOperandList Ops;
4262
alan-bakerb6b09dc2018-11-08 16:59:28 -05004263 PointerType *ResultType = cast<PointerType>(GEP->getType());
David Neto22f144c2017-06-12 14:26:21 -04004264 if (GEP->getPointerAddressSpace() == AddressSpace::ModuleScopePrivate ||
4265 GlobalConstArgSet.count(GEP->getPointerOperand())) {
4266 // Use pointer type with private address space for global constant.
4267 Type *EleTy = I.getType()->getPointerElementType();
David Neto1a1a0582017-07-07 12:01:44 -04004268 ResultType = PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
David Neto22f144c2017-06-12 14:26:21 -04004269 }
David Neto257c3892018-04-11 13:19:45 -04004270
4271 Ops << MkId(lookupType(ResultType));
David Neto22f144c2017-06-12 14:26:21 -04004272
David Neto862b7d82018-06-14 18:48:37 -04004273 // Generate the base pointer.
4274 Ops << MkId(VMap[GEP->getPointerOperand()]);
David Neto22f144c2017-06-12 14:26:21 -04004275
David Neto862b7d82018-06-14 18:48:37 -04004276 // TODO(dneto): Simplify the following?
David Neto22f144c2017-06-12 14:26:21 -04004277
4278 //
4279 // Follows below rules for gep.
4280 //
David Neto862b7d82018-06-14 18:48:37 -04004281 // 1. If gep's first index is 0 generate OpAccessChain and ignore gep's
4282 // first index.
David Neto22f144c2017-06-12 14:26:21 -04004283 // 2. If gep's first index is not 0, generate OpPtrAccessChain and use gep's
4284 // first index.
4285 // 3. If gep's first index is not constant, generate OpPtrAccessChain and
4286 // use gep's first index.
4287 // 4. If it is not above case 1, 2 and 3, generate OpAccessChain and use
4288 // gep's first index.
4289 //
4290 spv::Op Opcode = spv::OpAccessChain;
4291 unsigned offset = 0;
4292 if (ConstantInt *CstInt = dyn_cast<ConstantInt>(GEP->getOperand(1))) {
David Neto862b7d82018-06-14 18:48:37 -04004293 if (CstInt->getZExtValue() == 0) {
David Neto22f144c2017-06-12 14:26:21 -04004294 offset = 1;
David Neto862b7d82018-06-14 18:48:37 -04004295 } else if (CstInt->getZExtValue() != 0) {
David Neto22f144c2017-06-12 14:26:21 -04004296 Opcode = spv::OpPtrAccessChain;
David Neto22f144c2017-06-12 14:26:21 -04004297 }
David Neto862b7d82018-06-14 18:48:37 -04004298 } else {
David Neto22f144c2017-06-12 14:26:21 -04004299 Opcode = spv::OpPtrAccessChain;
David Neto1a1a0582017-07-07 12:01:44 -04004300 }
4301
4302 if (Opcode == spv::OpPtrAccessChain) {
David Neto1a1a0582017-07-07 12:01:44 -04004303 // Do we need to generate ArrayStride? Check against the GEP result type
4304 // rather than the pointer type of the base because when indexing into
4305 // an OpenCL program-scope constant, we'll swap out the LLVM base pointer
4306 // for something else in the SPIR-V.
4307 // E.g. see test/PointerAccessChain/pointer_index_is_constant_1.cl
alan-baker5b86ed72019-02-15 08:26:50 -05004308 auto address_space = ResultType->getAddressSpace();
4309 setVariablePointersCapabilities(address_space);
4310 switch (GetStorageClass(address_space)) {
Alan Bakerfcda9482018-10-02 17:09:59 -04004311 case spv::StorageClassStorageBuffer:
4312 case spv::StorageClassUniform:
David Neto1a1a0582017-07-07 12:01:44 -04004313 // Save the need to generate an ArrayStride decoration. But defer
4314 // generation until later, so we only make one decoration.
David Neto85082642018-03-24 06:55:20 -07004315 getTypesNeedingArrayStride().insert(ResultType);
Alan Bakerfcda9482018-10-02 17:09:59 -04004316 break;
4317 default:
4318 break;
David Neto1a1a0582017-07-07 12:01:44 -04004319 }
David Neto22f144c2017-06-12 14:26:21 -04004320 }
4321
4322 for (auto II = GEP->idx_begin() + offset; II != GEP->idx_end(); II++) {
David Neto257c3892018-04-11 13:19:45 -04004323 Ops << MkId(VMap[*II]);
David Neto22f144c2017-06-12 14:26:21 -04004324 }
4325
David Neto87846742018-04-11 17:36:22 -04004326 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004327 SPIRVInstList.push_back(Inst);
4328 break;
4329 }
4330 case Instruction::ExtractValue: {
4331 ExtractValueInst *EVI = cast<ExtractValueInst>(&I);
4332 // Ops[0] = Result Type ID
4333 // Ops[1] = Composite ID
4334 // Ops[2] ... Ops[n] = Indexes (Literal Number)
4335 SPIRVOperandList Ops;
4336
David Neto257c3892018-04-11 13:19:45 -04004337 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04004338
4339 uint32_t CompositeID = VMap[EVI->getAggregateOperand()];
David Neto257c3892018-04-11 13:19:45 -04004340 Ops << MkId(CompositeID);
David Neto22f144c2017-06-12 14:26:21 -04004341
4342 for (auto &Index : EVI->indices()) {
David Neto257c3892018-04-11 13:19:45 -04004343 Ops << MkNum(Index);
David Neto22f144c2017-06-12 14:26:21 -04004344 }
4345
David Neto87846742018-04-11 17:36:22 -04004346 auto *Inst = new SPIRVInstruction(spv::OpCompositeExtract, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004347 SPIRVInstList.push_back(Inst);
4348 break;
4349 }
4350 case Instruction::InsertValue: {
4351 InsertValueInst *IVI = cast<InsertValueInst>(&I);
4352 // Ops[0] = Result Type ID
4353 // Ops[1] = Object ID
4354 // Ops[2] = Composite ID
4355 // Ops[3] ... Ops[n] = Indexes (Literal Number)
4356 SPIRVOperandList Ops;
4357
4358 uint32_t ResTyID = lookupType(I.getType());
David Neto257c3892018-04-11 13:19:45 -04004359 Ops << MkId(ResTyID);
David Neto22f144c2017-06-12 14:26:21 -04004360
4361 uint32_t ObjectID = VMap[IVI->getInsertedValueOperand()];
David Neto257c3892018-04-11 13:19:45 -04004362 Ops << MkId(ObjectID);
David Neto22f144c2017-06-12 14:26:21 -04004363
4364 uint32_t CompositeID = VMap[IVI->getAggregateOperand()];
David Neto257c3892018-04-11 13:19:45 -04004365 Ops << MkId(CompositeID);
David Neto22f144c2017-06-12 14:26:21 -04004366
4367 for (auto &Index : IVI->indices()) {
David Neto257c3892018-04-11 13:19:45 -04004368 Ops << MkNum(Index);
David Neto22f144c2017-06-12 14:26:21 -04004369 }
4370
David Neto87846742018-04-11 17:36:22 -04004371 auto *Inst = new SPIRVInstruction(spv::OpCompositeInsert, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004372 SPIRVInstList.push_back(Inst);
4373 break;
4374 }
4375 case Instruction::Select: {
4376 //
4377 // Generate OpSelect.
4378 //
4379
4380 // Ops[0] = Result Type ID
4381 // Ops[1] = Condition ID
4382 // Ops[2] = True Constant ID
4383 // Ops[3] = False Constant ID
4384 SPIRVOperandList Ops;
4385
4386 // Find SPIRV instruction for parameter type.
4387 auto Ty = I.getType();
4388 if (Ty->isPointerTy()) {
4389 auto PointeeTy = Ty->getPointerElementType();
4390 if (PointeeTy->isStructTy() &&
4391 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
4392 Ty = PointeeTy;
alan-baker5b86ed72019-02-15 08:26:50 -05004393 } else {
4394 // Selecting between pointers requires variable pointers.
4395 setVariablePointersCapabilities(Ty->getPointerAddressSpace());
4396 if (!hasVariablePointers() && !selectFromSameObject(&I)) {
4397 setVariablePointers(true);
4398 }
David Neto22f144c2017-06-12 14:26:21 -04004399 }
4400 }
4401
David Neto257c3892018-04-11 13:19:45 -04004402 Ops << MkId(lookupType(Ty)) << MkId(VMap[I.getOperand(0)])
4403 << MkId(VMap[I.getOperand(1)]) << MkId(VMap[I.getOperand(2)]);
David Neto22f144c2017-06-12 14:26:21 -04004404
David Neto87846742018-04-11 17:36:22 -04004405 auto *Inst = new SPIRVInstruction(spv::OpSelect, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004406 SPIRVInstList.push_back(Inst);
4407 break;
4408 }
4409 case Instruction::ExtractElement: {
4410 // Handle <4 x i8> type manually.
4411 Type *CompositeTy = I.getOperand(0)->getType();
4412 if (is4xi8vec(CompositeTy)) {
4413 //
4414 // Generate OpShiftRightLogical and OpBitwiseAnd for extractelement with
4415 // <4 x i8>.
4416 //
4417
4418 //
4419 // Generate OpShiftRightLogical
4420 //
4421 // Ops[0] = Result Type ID
4422 // Ops[1] = Operand 0
4423 // Ops[2] = Operand 1
4424 //
4425 SPIRVOperandList Ops;
4426
David Neto257c3892018-04-11 13:19:45 -04004427 Ops << MkId(lookupType(CompositeTy));
David Neto22f144c2017-06-12 14:26:21 -04004428
4429 uint32_t Op0ID = VMap[I.getOperand(0)];
David Neto257c3892018-04-11 13:19:45 -04004430 Ops << MkId(Op0ID);
David Neto22f144c2017-06-12 14:26:21 -04004431
4432 uint32_t Op1ID = 0;
4433 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
4434 // Handle constant index.
4435 uint64_t Idx = CI->getZExtValue();
4436 Value *ShiftAmount =
4437 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
4438 Op1ID = VMap[ShiftAmount];
4439 } else {
4440 // Handle variable index.
4441 SPIRVOperandList TmpOps;
4442
David Neto257c3892018-04-11 13:19:45 -04004443 TmpOps << MkId(lookupType(Type::getInt32Ty(Context)))
4444 << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004445
4446 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
David Neto257c3892018-04-11 13:19:45 -04004447 TmpOps << MkId(VMap[Cst8]);
David Neto22f144c2017-06-12 14:26:21 -04004448
4449 Op1ID = nextID;
4450
David Neto87846742018-04-11 17:36:22 -04004451 auto *TmpInst = new SPIRVInstruction(spv::OpIMul, nextID++, TmpOps);
David Neto22f144c2017-06-12 14:26:21 -04004452 SPIRVInstList.push_back(TmpInst);
4453 }
David Neto257c3892018-04-11 13:19:45 -04004454 Ops << MkId(Op1ID);
David Neto22f144c2017-06-12 14:26:21 -04004455
4456 uint32_t ShiftID = nextID;
4457
David Neto87846742018-04-11 17:36:22 -04004458 auto *Inst =
4459 new SPIRVInstruction(spv::OpShiftRightLogical, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004460 SPIRVInstList.push_back(Inst);
4461
4462 //
4463 // Generate OpBitwiseAnd
4464 //
4465 // Ops[0] = Result Type ID
4466 // Ops[1] = Operand 0
4467 // Ops[2] = Operand 1
4468 //
4469 Ops.clear();
4470
David Neto257c3892018-04-11 13:19:45 -04004471 Ops << MkId(lookupType(CompositeTy)) << MkId(ShiftID);
David Neto22f144c2017-06-12 14:26:21 -04004472
4473 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
David Neto257c3892018-04-11 13:19:45 -04004474 Ops << MkId(VMap[CstFF]);
David Neto22f144c2017-06-12 14:26:21 -04004475
David Neto9b2d6252017-09-06 15:47:37 -04004476 // Reset mapping for this value to the result of the bitwise and.
4477 VMap[&I] = nextID;
4478
David Neto87846742018-04-11 17:36:22 -04004479 Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004480 SPIRVInstList.push_back(Inst);
4481 break;
4482 }
4483
4484 // Ops[0] = Result Type ID
4485 // Ops[1] = Composite ID
4486 // Ops[2] ... Ops[n] = Indexes (Literal Number)
4487 SPIRVOperandList Ops;
4488
David Neto257c3892018-04-11 13:19:45 -04004489 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04004490
4491 spv::Op Opcode = spv::OpCompositeExtract;
4492 if (const ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
David Neto257c3892018-04-11 13:19:45 -04004493 Ops << MkNum(static_cast<uint32_t>(CI->getZExtValue()));
David Neto22f144c2017-06-12 14:26:21 -04004494 } else {
David Neto257c3892018-04-11 13:19:45 -04004495 Ops << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004496 Opcode = spv::OpVectorExtractDynamic;
4497 }
4498
David Neto87846742018-04-11 17:36:22 -04004499 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004500 SPIRVInstList.push_back(Inst);
4501 break;
4502 }
4503 case Instruction::InsertElement: {
4504 // Handle <4 x i8> type manually.
4505 Type *CompositeTy = I.getOperand(0)->getType();
4506 if (is4xi8vec(CompositeTy)) {
4507 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
4508 uint32_t CstFFID = VMap[CstFF];
4509
4510 uint32_t ShiftAmountID = 0;
4511 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
4512 // Handle constant index.
4513 uint64_t Idx = CI->getZExtValue();
4514 Value *ShiftAmount =
4515 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
4516 ShiftAmountID = VMap[ShiftAmount];
4517 } else {
4518 // Handle variable index.
4519 SPIRVOperandList TmpOps;
4520
David Neto257c3892018-04-11 13:19:45 -04004521 TmpOps << MkId(lookupType(Type::getInt32Ty(Context)))
4522 << MkId(VMap[I.getOperand(2)]);
David Neto22f144c2017-06-12 14:26:21 -04004523
4524 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
David Neto257c3892018-04-11 13:19:45 -04004525 TmpOps << MkId(VMap[Cst8]);
David Neto22f144c2017-06-12 14:26:21 -04004526
4527 ShiftAmountID = nextID;
4528
David Neto87846742018-04-11 17:36:22 -04004529 auto *TmpInst = new SPIRVInstruction(spv::OpIMul, nextID++, TmpOps);
David Neto22f144c2017-06-12 14:26:21 -04004530 SPIRVInstList.push_back(TmpInst);
4531 }
4532
4533 //
4534 // Generate mask operations.
4535 //
4536
4537 // ShiftLeft mask according to index of insertelement.
4538 SPIRVOperandList Ops;
4539
David Neto257c3892018-04-11 13:19:45 -04004540 const uint32_t ResTyID = lookupType(CompositeTy);
4541 Ops << MkId(ResTyID) << MkId(CstFFID) << MkId(ShiftAmountID);
David Neto22f144c2017-06-12 14:26:21 -04004542
4543 uint32_t MaskID = nextID;
4544
David Neto87846742018-04-11 17:36:22 -04004545 auto *Inst = new SPIRVInstruction(spv::OpShiftLeftLogical, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004546 SPIRVInstList.push_back(Inst);
4547
4548 // Inverse mask.
4549 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004550 Ops << MkId(ResTyID) << MkId(MaskID);
David Neto22f144c2017-06-12 14:26:21 -04004551
4552 uint32_t InvMaskID = nextID;
4553
David Neto87846742018-04-11 17:36:22 -04004554 Inst = new SPIRVInstruction(spv::OpNot, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004555 SPIRVInstList.push_back(Inst);
4556
4557 // Apply mask.
4558 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004559 Ops << MkId(ResTyID) << MkId(VMap[I.getOperand(0)]) << MkId(InvMaskID);
David Neto22f144c2017-06-12 14:26:21 -04004560
4561 uint32_t OrgValID = nextID;
4562
David Neto87846742018-04-11 17:36:22 -04004563 Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004564 SPIRVInstList.push_back(Inst);
4565
4566 // Create correct value according to index of insertelement.
4567 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05004568 Ops << MkId(ResTyID) << MkId(VMap[I.getOperand(1)])
4569 << MkId(ShiftAmountID);
David Neto22f144c2017-06-12 14:26:21 -04004570
4571 uint32_t InsertValID = nextID;
4572
David Neto87846742018-04-11 17:36:22 -04004573 Inst = new SPIRVInstruction(spv::OpShiftLeftLogical, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004574 SPIRVInstList.push_back(Inst);
4575
4576 // Insert value to original value.
4577 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004578 Ops << MkId(ResTyID) << MkId(OrgValID) << MkId(InsertValID);
David Neto22f144c2017-06-12 14:26:21 -04004579
David Netoa394f392017-08-26 20:45:29 -04004580 VMap[&I] = nextID;
4581
David Neto87846742018-04-11 17:36:22 -04004582 Inst = new SPIRVInstruction(spv::OpBitwiseOr, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004583 SPIRVInstList.push_back(Inst);
4584
4585 break;
4586 }
4587
David Neto22f144c2017-06-12 14:26:21 -04004588 SPIRVOperandList Ops;
4589
James Priced26efea2018-06-09 23:28:32 +01004590 // Ops[0] = Result Type ID
4591 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04004592
4593 spv::Op Opcode = spv::OpCompositeInsert;
4594 if (const ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
David Neto257c3892018-04-11 13:19:45 -04004595 const auto value = CI->getZExtValue();
4596 assert(value <= UINT32_MAX);
James Priced26efea2018-06-09 23:28:32 +01004597 // Ops[1] = Object ID
4598 // Ops[2] = Composite ID
4599 // Ops[3] ... Ops[n] = Indexes (Literal Number)
alan-bakerb6b09dc2018-11-08 16:59:28 -05004600 Ops << MkId(VMap[I.getOperand(1)]) << MkId(VMap[I.getOperand(0)])
James Priced26efea2018-06-09 23:28:32 +01004601 << MkNum(static_cast<uint32_t>(value));
David Neto22f144c2017-06-12 14:26:21 -04004602 } else {
James Priced26efea2018-06-09 23:28:32 +01004603 // Ops[1] = Composite ID
4604 // Ops[2] = Object ID
4605 // Ops[3] ... Ops[n] = Indexes (Literal Number)
alan-bakerb6b09dc2018-11-08 16:59:28 -05004606 Ops << MkId(VMap[I.getOperand(0)]) << MkId(VMap[I.getOperand(1)])
James Priced26efea2018-06-09 23:28:32 +01004607 << MkId(VMap[I.getOperand(2)]);
David Neto22f144c2017-06-12 14:26:21 -04004608 Opcode = spv::OpVectorInsertDynamic;
4609 }
4610
David Neto87846742018-04-11 17:36:22 -04004611 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004612 SPIRVInstList.push_back(Inst);
4613 break;
4614 }
4615 case Instruction::ShuffleVector: {
4616 // Ops[0] = Result Type ID
4617 // Ops[1] = Vector 1 ID
4618 // Ops[2] = Vector 2 ID
4619 // Ops[3] ... Ops[n] = Components (Literal Number)
4620 SPIRVOperandList Ops;
4621
David Neto257c3892018-04-11 13:19:45 -04004622 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)])
4623 << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004624
alan-bakerc9666712020-04-01 16:31:21 -04004625 auto shuffle = cast<ShuffleVectorInst>(&I);
4626 SmallVector<int, 4> mask;
4627 shuffle->getShuffleMask(mask);
4628 for (auto i : mask) {
4629 if (i == UndefMaskElem) {
4630 if (clspv::Option::HackUndef())
4631 // Use 0 instead of undef.
David Neto257c3892018-04-11 13:19:45 -04004632 Ops << MkNum(0);
alan-bakerc9666712020-04-01 16:31:21 -04004633 else
4634 // Undef for shuffle in SPIR-V.
4635 Ops << MkNum(0xffffffff);
David Neto22f144c2017-06-12 14:26:21 -04004636 } else {
alan-bakerc9666712020-04-01 16:31:21 -04004637 Ops << MkNum(i);
David Neto22f144c2017-06-12 14:26:21 -04004638 }
4639 }
4640
David Neto87846742018-04-11 17:36:22 -04004641 auto *Inst = new SPIRVInstruction(spv::OpVectorShuffle, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004642 SPIRVInstList.push_back(Inst);
4643 break;
4644 }
4645 case Instruction::ICmp:
4646 case Instruction::FCmp: {
4647 CmpInst *CmpI = cast<CmpInst>(&I);
4648
David Netod4ca2e62017-07-06 18:47:35 -04004649 // Pointer equality is invalid.
alan-bakerb6b09dc2018-11-08 16:59:28 -05004650 Type *ArgTy = CmpI->getOperand(0)->getType();
David Netod4ca2e62017-07-06 18:47:35 -04004651 if (isa<PointerType>(ArgTy)) {
4652 CmpI->print(errs());
alan-baker21574d32020-01-29 16:00:31 -05004653 std::string name = I.getParent()->getParent()->getName().str();
David Netod4ca2e62017-07-06 18:47:35 -04004654 errs()
4655 << "\nPointer equality test is not supported by SPIR-V for Vulkan, "
4656 << "in function " << name << "\n";
4657 llvm_unreachable("Pointer equality check is invalid");
4658 break;
4659 }
4660
David Neto257c3892018-04-11 13:19:45 -04004661 // Ops[0] = Result Type ID
4662 // Ops[1] = Operand 1 ID
4663 // Ops[2] = Operand 2 ID
4664 SPIRVOperandList Ops;
David Neto22f144c2017-06-12 14:26:21 -04004665
David Neto257c3892018-04-11 13:19:45 -04004666 Ops << MkId(lookupType(CmpI->getType())) << MkId(VMap[CmpI->getOperand(0)])
4667 << MkId(VMap[CmpI->getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004668
4669 spv::Op Opcode = GetSPIRVCmpOpcode(CmpI);
David Neto87846742018-04-11 17:36:22 -04004670 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004671 SPIRVInstList.push_back(Inst);
4672 break;
4673 }
4674 case Instruction::Br: {
4675 // Branch instrucion is deferred because it needs label's ID. Record slot's
4676 // location on SPIRVInstructionList.
4677 DeferredInsts.push_back(
4678 std::make_tuple(&I, --SPIRVInstList.end(), 0 /* No id */));
4679 break;
4680 }
4681 case Instruction::Switch: {
4682 I.print(errs());
4683 llvm_unreachable("Unsupported instruction???");
4684 break;
4685 }
4686 case Instruction::IndirectBr: {
4687 I.print(errs());
4688 llvm_unreachable("Unsupported instruction???");
4689 break;
4690 }
4691 case Instruction::PHI: {
4692 // Branch instrucion is deferred because it needs label's ID. Record slot's
4693 // location on SPIRVInstructionList.
4694 DeferredInsts.push_back(
4695 std::make_tuple(&I, --SPIRVInstList.end(), nextID++));
4696 break;
4697 }
4698 case Instruction::Alloca: {
4699 //
4700 // Generate OpVariable.
4701 //
4702 // Ops[0] : Result Type ID
4703 // Ops[1] : Storage Class
4704 SPIRVOperandList Ops;
4705
David Neto257c3892018-04-11 13:19:45 -04004706 Ops << MkId(lookupType(I.getType())) << MkNum(spv::StorageClassFunction);
David Neto22f144c2017-06-12 14:26:21 -04004707
David Neto87846742018-04-11 17:36:22 -04004708 auto *Inst = new SPIRVInstruction(spv::OpVariable, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004709 SPIRVInstList.push_back(Inst);
4710 break;
4711 }
4712 case Instruction::Load: {
4713 LoadInst *LD = cast<LoadInst>(&I);
4714 //
4715 // Generate OpLoad.
4716 //
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04004717
alan-baker5b86ed72019-02-15 08:26:50 -05004718 if (LD->getType()->isPointerTy()) {
4719 // Loading a pointer requires variable pointers.
4720 setVariablePointersCapabilities(LD->getType()->getPointerAddressSpace());
4721 }
David Neto22f144c2017-06-12 14:26:21 -04004722
David Neto0a2f98d2017-09-15 19:38:40 -04004723 uint32_t ResTyID = lookupType(LD->getType());
David Netoa60b00b2017-09-15 16:34:09 -04004724 uint32_t PointerID = VMap[LD->getPointerOperand()];
4725
4726 // This is a hack to work around what looks like a driver bug.
4727 // When we're loading from the special variable holding the WorkgroupSize
David Neto0a2f98d2017-09-15 19:38:40 -04004728 // builtin value, use an OpBitWiseAnd of the value's ID rather than
4729 // generating a load.
David Neto66cfe642018-03-24 06:13:56 -07004730 // TODO(dneto): Remove this awful hack once drivers are fixed.
David Netoa60b00b2017-09-15 16:34:09 -04004731 if (PointerID == WorkgroupSizeVarID) {
David Neto0a2f98d2017-09-15 19:38:40 -04004732 // Generate a bitwise-and of the original value with itself.
4733 // We should have been able to get away with just an OpCopyObject,
4734 // but we need something more complex to get past certain driver bugs.
4735 // This is ridiculous, but necessary.
4736 // TODO(dneto): Revisit this once drivers fix their bugs.
4737
4738 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04004739 Ops << MkId(ResTyID) << MkId(WorkgroupSizeValueID)
4740 << MkId(WorkgroupSizeValueID);
David Neto0a2f98d2017-09-15 19:38:40 -04004741
David Neto87846742018-04-11 17:36:22 -04004742 auto *Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Neto0a2f98d2017-09-15 19:38:40 -04004743 SPIRVInstList.push_back(Inst);
David Netoa60b00b2017-09-15 16:34:09 -04004744 break;
4745 }
4746
4747 // This is the normal path. Generate a load.
4748
David Neto22f144c2017-06-12 14:26:21 -04004749 // Ops[0] = Result Type ID
4750 // Ops[1] = Pointer ID
4751 // Ops[2] ... Ops[n] = Optional Memory Access
4752 //
4753 // TODO: Do we need to implement Optional Memory Access???
David Neto0a2f98d2017-09-15 19:38:40 -04004754
David Neto22f144c2017-06-12 14:26:21 -04004755 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04004756 Ops << MkId(ResTyID) << MkId(PointerID);
David Neto22f144c2017-06-12 14:26:21 -04004757
David Neto87846742018-04-11 17:36:22 -04004758 auto *Inst = new SPIRVInstruction(spv::OpLoad, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004759 SPIRVInstList.push_back(Inst);
4760 break;
4761 }
4762 case Instruction::Store: {
4763 StoreInst *ST = cast<StoreInst>(&I);
4764 //
4765 // Generate OpStore.
4766 //
4767
alan-baker5b86ed72019-02-15 08:26:50 -05004768 if (ST->getValueOperand()->getType()->isPointerTy()) {
4769 // Storing a pointer requires variable pointers.
4770 setVariablePointersCapabilities(
4771 ST->getValueOperand()->getType()->getPointerAddressSpace());
4772 }
4773
David Neto22f144c2017-06-12 14:26:21 -04004774 // Ops[0] = Pointer ID
4775 // Ops[1] = Object ID
4776 // Ops[2] ... Ops[n] = Optional Memory Access (later???)
4777 //
4778 // TODO: Do we need to implement Optional Memory Access???
David Neto257c3892018-04-11 13:19:45 -04004779 SPIRVOperandList Ops;
4780 Ops << MkId(VMap[ST->getPointerOperand()])
4781 << MkId(VMap[ST->getValueOperand()]);
David Neto22f144c2017-06-12 14:26:21 -04004782
David Neto87846742018-04-11 17:36:22 -04004783 auto *Inst = new SPIRVInstruction(spv::OpStore, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004784 SPIRVInstList.push_back(Inst);
4785 break;
4786 }
4787 case Instruction::AtomicCmpXchg: {
4788 I.print(errs());
4789 llvm_unreachable("Unsupported instruction???");
4790 break;
4791 }
4792 case Instruction::AtomicRMW: {
Neil Henning39672102017-09-29 14:33:13 +01004793 AtomicRMWInst *AtomicRMW = dyn_cast<AtomicRMWInst>(&I);
4794
4795 spv::Op opcode;
4796
4797 switch (AtomicRMW->getOperation()) {
4798 default:
4799 I.print(errs());
4800 llvm_unreachable("Unsupported instruction???");
4801 case llvm::AtomicRMWInst::Add:
4802 opcode = spv::OpAtomicIAdd;
4803 break;
4804 case llvm::AtomicRMWInst::Sub:
4805 opcode = spv::OpAtomicISub;
4806 break;
4807 case llvm::AtomicRMWInst::Xchg:
4808 opcode = spv::OpAtomicExchange;
4809 break;
4810 case llvm::AtomicRMWInst::Min:
4811 opcode = spv::OpAtomicSMin;
4812 break;
4813 case llvm::AtomicRMWInst::Max:
4814 opcode = spv::OpAtomicSMax;
4815 break;
4816 case llvm::AtomicRMWInst::UMin:
4817 opcode = spv::OpAtomicUMin;
4818 break;
4819 case llvm::AtomicRMWInst::UMax:
4820 opcode = spv::OpAtomicUMax;
4821 break;
4822 case llvm::AtomicRMWInst::And:
4823 opcode = spv::OpAtomicAnd;
4824 break;
4825 case llvm::AtomicRMWInst::Or:
4826 opcode = spv::OpAtomicOr;
4827 break;
4828 case llvm::AtomicRMWInst::Xor:
4829 opcode = spv::OpAtomicXor;
4830 break;
4831 }
4832
4833 //
4834 // Generate OpAtomic*.
4835 //
4836 SPIRVOperandList Ops;
4837
David Neto257c3892018-04-11 13:19:45 -04004838 Ops << MkId(lookupType(I.getType()))
4839 << MkId(VMap[AtomicRMW->getPointerOperand()]);
Neil Henning39672102017-09-29 14:33:13 +01004840
4841 auto IntTy = Type::getInt32Ty(I.getContext());
Neil Henning39672102017-09-29 14:33:13 +01004842 const auto ConstantScopeDevice = ConstantInt::get(IntTy, spv::ScopeDevice);
David Neto257c3892018-04-11 13:19:45 -04004843 Ops << MkId(VMap[ConstantScopeDevice]);
Neil Henning39672102017-09-29 14:33:13 +01004844
4845 const auto ConstantMemorySemantics = ConstantInt::get(
4846 IntTy, spv::MemorySemanticsUniformMemoryMask |
4847 spv::MemorySemanticsSequentiallyConsistentMask);
David Neto257c3892018-04-11 13:19:45 -04004848 Ops << MkId(VMap[ConstantMemorySemantics]);
Neil Henning39672102017-09-29 14:33:13 +01004849
David Neto257c3892018-04-11 13:19:45 -04004850 Ops << MkId(VMap[AtomicRMW->getValOperand()]);
Neil Henning39672102017-09-29 14:33:13 +01004851
4852 VMap[&I] = nextID;
4853
David Neto87846742018-04-11 17:36:22 -04004854 auto *Inst = new SPIRVInstruction(opcode, nextID++, Ops);
Neil Henning39672102017-09-29 14:33:13 +01004855 SPIRVInstList.push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04004856 break;
4857 }
4858 case Instruction::Fence: {
4859 I.print(errs());
4860 llvm_unreachable("Unsupported instruction???");
4861 break;
4862 }
4863 case Instruction::Call: {
4864 CallInst *Call = dyn_cast<CallInst>(&I);
4865 Function *Callee = Call->getCalledFunction();
4866
Alan Baker202c8c72018-08-13 13:47:44 -04004867 if (Callee->getName().startswith(clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04004868 if (ResourceVarDeferredLoadCalls.count(Call) && Call->hasNUsesOrMore(1)) {
4869 // Generate an OpLoad
4870 SPIRVOperandList Ops;
4871 const auto load_id = nextID++;
David Neto22f144c2017-06-12 14:26:21 -04004872
David Neto862b7d82018-06-14 18:48:37 -04004873 Ops << MkId(lookupType(Call->getType()->getPointerElementType()))
4874 << MkId(ResourceVarDeferredLoadCalls[Call]);
4875
4876 auto *Inst = new SPIRVInstruction(spv::OpLoad, load_id, Ops);
4877 SPIRVInstList.push_back(Inst);
4878 VMap[Call] = load_id;
4879 break;
4880
4881 } else {
4882 // This maps to an OpVariable we've already generated.
4883 // No code is generated for the call.
4884 }
4885 break;
alan-bakerb6b09dc2018-11-08 16:59:28 -05004886 } else if (Callee->getName().startswith(
4887 clspv::WorkgroupAccessorFunction())) {
Alan Baker202c8c72018-08-13 13:47:44 -04004888 // Don't codegen an instruction here, but instead map this call directly
4889 // to the workgroup variable id.
alan-bakerb6b09dc2018-11-08 16:59:28 -05004890 int spec_id = static_cast<int>(
4891 cast<ConstantInt>(Call->getOperand(0))->getSExtValue());
Alan Baker202c8c72018-08-13 13:47:44 -04004892 const auto &info = LocalSpecIdInfoMap[spec_id];
4893 VMap[Call] = info.variable_id;
4894 break;
David Neto862b7d82018-06-14 18:48:37 -04004895 }
4896
4897 // Sampler initializers become a load of the corresponding sampler.
4898
Kévin Petitdf71de32019-04-09 14:09:50 +01004899 if (Callee->getName().equals(clspv::LiteralSamplerFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04004900 // Map this to a load from the variable.
alan-baker09cb9802019-12-10 13:16:27 -05004901 const auto third_param = static_cast<unsigned>(
4902 dyn_cast<ConstantInt>(Call->getArgOperand(2))->getZExtValue());
4903 auto sampler_value = third_param;
4904 if (clspv::Option::UseSamplerMap()) {
4905 sampler_value = getSamplerMap()[third_param].first;
4906 }
David Neto862b7d82018-06-14 18:48:37 -04004907
4908 // Generate an OpLoad
David Neto22f144c2017-06-12 14:26:21 -04004909 SPIRVOperandList Ops;
David Neto862b7d82018-06-14 18:48:37 -04004910 const auto load_id = nextID++;
David Neto22f144c2017-06-12 14:26:21 -04004911
David Neto257c3892018-04-11 13:19:45 -04004912 Ops << MkId(lookupType(SamplerTy->getPointerElementType()))
alan-baker09cb9802019-12-10 13:16:27 -05004913 << MkId(SamplerLiteralToIDMap[sampler_value]);
David Neto22f144c2017-06-12 14:26:21 -04004914
David Neto862b7d82018-06-14 18:48:37 -04004915 auto *Inst = new SPIRVInstruction(spv::OpLoad, load_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004916 SPIRVInstList.push_back(Inst);
David Neto862b7d82018-06-14 18:48:37 -04004917 VMap[Call] = load_id;
David Neto22f144c2017-06-12 14:26:21 -04004918 break;
4919 }
4920
Kévin Petit349c9502019-03-28 17:24:14 +00004921 // Handle SPIR-V intrinsics
Kévin Petit9b340262019-06-19 18:31:11 +01004922 spv::Op opcode = StringSwitch<spv::Op>(Callee->getName())
4923 .Case("spirv.atomic_xor", spv::OpAtomicXor)
4924 .Default(spv::OpNop);
David Neto22f144c2017-06-12 14:26:21 -04004925
Kévin Petit617a76d2019-04-04 13:54:16 +01004926 // If the switch above didn't have an entry maybe the intrinsic
4927 // is using the name mangling logic.
4928 bool usesMangler = false;
4929 if (opcode == spv::OpNop) {
4930 if (Callee->getName().startswith(clspv::SPIRVOpIntrinsicFunction())) {
4931 auto OpCst = cast<ConstantInt>(Call->getOperand(0));
4932 opcode = static_cast<spv::Op>(OpCst->getZExtValue());
4933 usesMangler = true;
4934 }
4935 }
4936
Kévin Petit349c9502019-03-28 17:24:14 +00004937 if (opcode != spv::OpNop) {
4938
David Neto22f144c2017-06-12 14:26:21 -04004939 SPIRVOperandList Ops;
4940
Kévin Petit349c9502019-03-28 17:24:14 +00004941 if (!I.getType()->isVoidTy()) {
4942 Ops << MkId(lookupType(I.getType()));
4943 }
David Neto22f144c2017-06-12 14:26:21 -04004944
Kévin Petit617a76d2019-04-04 13:54:16 +01004945 unsigned firstOperand = usesMangler ? 1 : 0;
4946 for (unsigned i = firstOperand; i < Call->getNumArgOperands(); i++) {
David Neto257c3892018-04-11 13:19:45 -04004947 Ops << MkId(VMap[Call->getArgOperand(i)]);
David Neto22f144c2017-06-12 14:26:21 -04004948 }
4949
Kévin Petit349c9502019-03-28 17:24:14 +00004950 if (!I.getType()->isVoidTy()) {
4951 VMap[&I] = nextID;
Kévin Petit8a560882019-03-21 15:24:34 +00004952 }
4953
Kévin Petit349c9502019-03-28 17:24:14 +00004954 SPIRVInstruction *Inst;
4955 if (!I.getType()->isVoidTy()) {
4956 Inst = new SPIRVInstruction(opcode, nextID++, Ops);
4957 } else {
4958 Inst = new SPIRVInstruction(opcode, Ops);
4959 }
Kévin Petit8a560882019-03-21 15:24:34 +00004960 SPIRVInstList.push_back(Inst);
4961 break;
4962 }
4963
David Neto22f144c2017-06-12 14:26:21 -04004964 // spirv.copy_memory.* intrinsics become OpMemoryMemory's.
4965 if (Callee->getName().startswith("spirv.copy_memory")) {
4966 //
4967 // Generate OpCopyMemory.
4968 //
4969
4970 // Ops[0] = Dst ID
4971 // Ops[1] = Src ID
4972 // Ops[2] = Memory Access
4973 // Ops[3] = Alignment
4974
4975 auto IsVolatile =
4976 dyn_cast<ConstantInt>(Call->getArgOperand(3))->getZExtValue() != 0;
4977
4978 auto VolatileMemoryAccess = (IsVolatile) ? spv::MemoryAccessVolatileMask
4979 : spv::MemoryAccessMaskNone;
4980
4981 auto MemoryAccess = VolatileMemoryAccess | spv::MemoryAccessAlignedMask;
4982
4983 auto Alignment =
4984 dyn_cast<ConstantInt>(Call->getArgOperand(2))->getZExtValue();
4985
David Neto257c3892018-04-11 13:19:45 -04004986 SPIRVOperandList Ops;
4987 Ops << MkId(VMap[Call->getArgOperand(0)])
4988 << MkId(VMap[Call->getArgOperand(1)]) << MkNum(MemoryAccess)
4989 << MkNum(static_cast<uint32_t>(Alignment));
David Neto22f144c2017-06-12 14:26:21 -04004990
David Neto87846742018-04-11 17:36:22 -04004991 auto *Inst = new SPIRVInstruction(spv::OpCopyMemory, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004992
4993 SPIRVInstList.push_back(Inst);
4994
4995 break;
4996 }
4997
SJW2c317da2020-03-23 07:39:13 -05004998 // read_image is converted to OpSampledImage and OpImageSampleExplicitLod.
4999 // Additionally, OpTypeSampledImage is generated.
SJW173c7e92020-03-16 08:44:47 -05005000 if (IsSampledImageRead(Callee)) {
David Neto22f144c2017-06-12 14:26:21 -04005001 //
5002 // Generate OpSampledImage.
5003 //
5004 // Ops[0] = Result Type ID
5005 // Ops[1] = Image ID
5006 // Ops[2] = Sampler ID
5007 //
5008 SPIRVOperandList Ops;
5009
5010 Value *Image = Call->getArgOperand(0);
5011 Value *Sampler = Call->getArgOperand(1);
5012 Value *Coordinate = Call->getArgOperand(2);
5013
5014 TypeMapType &OpImageTypeMap = getImageTypeMap();
5015 Type *ImageTy = Image->getType()->getPointerElementType();
5016 uint32_t ImageTyID = OpImageTypeMap[ImageTy];
David Neto22f144c2017-06-12 14:26:21 -04005017 uint32_t ImageID = VMap[Image];
David Neto22f144c2017-06-12 14:26:21 -04005018 uint32_t SamplerID = VMap[Sampler];
David Neto257c3892018-04-11 13:19:45 -04005019
5020 Ops << MkId(ImageTyID) << MkId(ImageID) << MkId(SamplerID);
David Neto22f144c2017-06-12 14:26:21 -04005021
5022 uint32_t SampledImageID = nextID;
5023
David Neto87846742018-04-11 17:36:22 -04005024 auto *Inst = new SPIRVInstruction(spv::OpSampledImage, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04005025 SPIRVInstList.push_back(Inst);
5026
5027 //
5028 // Generate OpImageSampleExplicitLod.
5029 //
5030 // Ops[0] = Result Type ID
5031 // Ops[1] = Sampled Image ID
5032 // Ops[2] = Coordinate ID
5033 // Ops[3] = Image Operands Type ID
5034 // Ops[4] ... Ops[n] = Operands ID
5035 //
5036 Ops.clear();
5037
alan-bakerf67468c2019-11-25 15:51:49 -05005038 const bool is_int_image = IsIntImageType(Image->getType());
5039 uint32_t result_type = 0;
5040 if (is_int_image) {
5041 result_type = v4int32ID;
5042 } else {
5043 result_type = lookupType(Call->getType());
5044 }
5045
5046 Ops << MkId(result_type) << MkId(SampledImageID) << MkId(VMap[Coordinate])
5047 << MkNum(spv::ImageOperandsLodMask);
David Neto22f144c2017-06-12 14:26:21 -04005048
5049 Constant *CstFP0 = ConstantFP::get(Context, APFloat(0.0f));
David Neto257c3892018-04-11 13:19:45 -04005050 Ops << MkId(VMap[CstFP0]);
David Neto22f144c2017-06-12 14:26:21 -04005051
alan-bakerf67468c2019-11-25 15:51:49 -05005052 uint32_t final_id = nextID++;
5053 VMap[&I] = final_id;
David Neto22f144c2017-06-12 14:26:21 -04005054
alan-bakerf67468c2019-11-25 15:51:49 -05005055 uint32_t image_id = final_id;
5056 if (is_int_image) {
5057 // Int image requires a bitcast from v4int to v4uint.
5058 image_id = nextID++;
5059 }
5060
5061 Inst = new SPIRVInstruction(spv::OpImageSampleExplicitLod, image_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04005062 SPIRVInstList.push_back(Inst);
alan-bakerf67468c2019-11-25 15:51:49 -05005063
5064 if (is_int_image) {
5065 // Generate the bitcast.
5066 Ops.clear();
5067 Ops << MkId(lookupType(Call->getType())) << MkId(image_id);
5068 Inst = new SPIRVInstruction(spv::OpBitcast, final_id, Ops);
5069 SPIRVInstList.push_back(Inst);
5070 }
David Neto22f144c2017-06-12 14:26:21 -04005071 break;
5072 }
5073
alan-baker75090e42020-02-20 11:21:04 -05005074 // read_image (without a sampler) is mapped to OpImageFetch.
SJW173c7e92020-03-16 08:44:47 -05005075 if (IsUnsampledImageRead(Callee)) {
alan-baker75090e42020-02-20 11:21:04 -05005076 Value *Image = Call->getArgOperand(0);
5077 Value *Coordinate = Call->getArgOperand(1);
5078
5079 //
5080 // Generate OpImageFetch
5081 //
5082 // Ops[0] = Result Type ID
5083 // Ops[1] = Image ID
5084 // Ops[2] = Coordinate ID
5085 // Ops[3] = Lod
5086 // Ops[4] = 0
5087 //
5088 SPIRVOperandList Ops;
5089
5090 const bool is_int_image = IsIntImageType(Image->getType());
5091 uint32_t result_type = 0;
5092 if (is_int_image) {
5093 result_type = v4int32ID;
5094 } else {
5095 result_type = lookupType(Call->getType());
5096 }
5097
5098 Ops << MkId(result_type) << MkId(VMap[Image]) << MkId(VMap[Coordinate])
5099 << MkNum(spv::ImageOperandsLodMask);
5100
5101 Constant *CstInt0 = ConstantInt::get(Context, APInt(32, 0));
5102 Ops << MkId(VMap[CstInt0]);
5103
5104 uint32_t final_id = nextID++;
5105 VMap[&I] = final_id;
5106
5107 uint32_t image_id = final_id;
5108 if (is_int_image) {
5109 // Int image requires a bitcast from v4int to v4uint.
5110 image_id = nextID++;
5111 }
5112
5113 auto *Inst = new SPIRVInstruction(spv::OpImageFetch, image_id, Ops);
5114 SPIRVInstList.push_back(Inst);
5115
5116 if (is_int_image) {
5117 // Generate the bitcast.
5118 Ops.clear();
5119 Ops << MkId(lookupType(Call->getType())) << MkId(image_id);
5120 Inst = new SPIRVInstruction(spv::OpBitcast, final_id, Ops);
5121 SPIRVInstList.push_back(Inst);
5122 }
5123 break;
5124 }
5125
alan-bakerf67468c2019-11-25 15:51:49 -05005126 // write_image is mapped to OpImageWrite.
SJW173c7e92020-03-16 08:44:47 -05005127 if (IsImageWrite(Callee)) {
David Neto22f144c2017-06-12 14:26:21 -04005128 //
5129 // Generate OpImageWrite.
5130 //
5131 // Ops[0] = Image ID
5132 // Ops[1] = Coordinate ID
5133 // Ops[2] = Texel ID
5134 // Ops[3] = (Optional) Image Operands Type (Literal Number)
5135 // Ops[4] ... Ops[n] = (Optional) Operands ID
5136 //
5137 SPIRVOperandList Ops;
5138
5139 Value *Image = Call->getArgOperand(0);
5140 Value *Coordinate = Call->getArgOperand(1);
5141 Value *Texel = Call->getArgOperand(2);
5142
5143 uint32_t ImageID = VMap[Image];
David Neto22f144c2017-06-12 14:26:21 -04005144 uint32_t CoordinateID = VMap[Coordinate];
David Neto22f144c2017-06-12 14:26:21 -04005145 uint32_t TexelID = VMap[Texel];
alan-bakerf67468c2019-11-25 15:51:49 -05005146
5147 const bool is_int_image = IsIntImageType(Image->getType());
5148 if (is_int_image) {
5149 // Generate a bitcast to v4int and use it as the texel value.
5150 uint32_t castID = nextID++;
5151 Ops << MkId(v4int32ID) << MkId(TexelID);
5152 auto cast = new SPIRVInstruction(spv::OpBitcast, castID, Ops);
5153 SPIRVInstList.push_back(cast);
5154 Ops.clear();
5155 TexelID = castID;
5156 }
David Neto257c3892018-04-11 13:19:45 -04005157 Ops << MkId(ImageID) << MkId(CoordinateID) << MkId(TexelID);
David Neto22f144c2017-06-12 14:26:21 -04005158
David Neto87846742018-04-11 17:36:22 -04005159 auto *Inst = new SPIRVInstruction(spv::OpImageWrite, Ops);
David Neto22f144c2017-06-12 14:26:21 -04005160 SPIRVInstList.push_back(Inst);
5161 break;
5162 }
5163
alan-bakerce179f12019-12-06 19:02:22 -05005164 // get_image_* is mapped to OpImageQuerySize or OpImageQuerySizeLod
SJW173c7e92020-03-16 08:44:47 -05005165 if (IsImageQuery(Callee)) {
David Neto5c22a252018-03-15 16:07:41 -04005166 //
alan-bakerce179f12019-12-06 19:02:22 -05005167 // Generate OpImageQuerySize[Lod]
David Neto5c22a252018-03-15 16:07:41 -04005168 //
5169 // Ops[0] = Image ID
5170 //
alan-bakerce179f12019-12-06 19:02:22 -05005171 // Result type has components equal to the dimensionality of the image,
5172 // plus 1 if the image is arrayed.
5173 //
alan-bakerf906d2b2019-12-10 11:26:23 -05005174 // %sizes = OpImageQuerySize[Lod] %uint[2|3|4] %im [%uint_0]
David Neto5c22a252018-03-15 16:07:41 -04005175 SPIRVOperandList Ops;
5176
5177 // Implement:
alan-bakerce179f12019-12-06 19:02:22 -05005178 // %sizes = OpImageQuerySize[Lod] %uint[2|3|4] %im [%uint_0]
5179 uint32_t SizesTypeID = 0;
5180
David Neto5c22a252018-03-15 16:07:41 -04005181 Value *Image = Call->getArgOperand(0);
alan-bakerce179f12019-12-06 19:02:22 -05005182 const uint32_t dim = ImageDimensionality(Image->getType());
alan-baker7150a1d2020-02-25 08:31:06 -05005183 const uint32_t components =
5184 dim + (IsArrayImageType(Image->getType()) ? 1 : 0);
alan-bakerce179f12019-12-06 19:02:22 -05005185 if (components == 1) {
alan-bakerce179f12019-12-06 19:02:22 -05005186 SizesTypeID = TypeMap[Type::getInt32Ty(Context)];
5187 } else {
alan-baker7150a1d2020-02-25 08:31:06 -05005188 SizesTypeID =
5189 TypeMap[VectorType::get(Type::getInt32Ty(Context), components)];
alan-bakerce179f12019-12-06 19:02:22 -05005190 }
David Neto5c22a252018-03-15 16:07:41 -04005191 uint32_t ImageID = VMap[Image];
David Neto257c3892018-04-11 13:19:45 -04005192 Ops << MkId(SizesTypeID) << MkId(ImageID);
alan-bakerce179f12019-12-06 19:02:22 -05005193 spv::Op query_opcode = spv::OpImageQuerySize;
SJW173c7e92020-03-16 08:44:47 -05005194 if (IsSampledImageType(Image->getType())) {
alan-bakerce179f12019-12-06 19:02:22 -05005195 query_opcode = spv::OpImageQuerySizeLod;
5196 // Need explicit 0 for Lod operand.
5197 Constant *CstInt0 = ConstantInt::get(Context, APInt(32, 0));
5198 Ops << MkId(VMap[CstInt0]);
5199 }
David Neto5c22a252018-03-15 16:07:41 -04005200
5201 uint32_t SizesID = nextID++;
alan-bakerce179f12019-12-06 19:02:22 -05005202 auto *QueryInst = new SPIRVInstruction(query_opcode, SizesID, Ops);
David Neto5c22a252018-03-15 16:07:41 -04005203 SPIRVInstList.push_back(QueryInst);
5204
alan-bakerce179f12019-12-06 19:02:22 -05005205 // May require an extra instruction to create the appropriate result of
5206 // the builtin function.
SJW173c7e92020-03-16 08:44:47 -05005207 if (IsGetImageDim(Callee)) {
alan-bakerce179f12019-12-06 19:02:22 -05005208 if (dim == 3) {
5209 // get_image_dim returns an int4 for 3D images.
5210 //
5211 // Reset value map entry since we generated an intermediate
5212 // instruction.
5213 VMap[&I] = nextID;
David Neto5c22a252018-03-15 16:07:41 -04005214
alan-bakerce179f12019-12-06 19:02:22 -05005215 // Implement:
5216 // %result = OpCompositeConstruct %uint4 %sizes %uint_0
5217 Ops.clear();
5218 Ops << MkId(lookupType(VectorType::get(Type::getInt32Ty(Context), 4)))
5219 << MkId(SizesID);
David Neto5c22a252018-03-15 16:07:41 -04005220
alan-bakerce179f12019-12-06 19:02:22 -05005221 Constant *CstInt0 = ConstantInt::get(Context, APInt(32, 0));
5222 Ops << MkId(VMap[CstInt0]);
David Neto5c22a252018-03-15 16:07:41 -04005223
alan-bakerce179f12019-12-06 19:02:22 -05005224 auto *Inst =
5225 new SPIRVInstruction(spv::OpCompositeConstruct, nextID++, Ops);
5226 SPIRVInstList.push_back(Inst);
5227 } else if (dim != components) {
5228 // get_image_dim return an int2 regardless of the arrayedness of the
5229 // image. If the image is arrayed an element must be dropped from the
5230 // query result.
5231 //
5232 // Reset value map entry since we generated an intermediate
5233 // instruction.
5234 VMap[&I] = nextID;
5235
5236 // Implement:
5237 // %result = OpVectorShuffle %uint2 %sizes %sizes 0 1
5238 Ops.clear();
5239 Ops << MkId(lookupType(VectorType::get(Type::getInt32Ty(Context), 2)))
5240 << MkId(SizesID) << MkId(SizesID) << MkNum(0) << MkNum(1);
5241
5242 auto *Inst =
5243 new SPIRVInstruction(spv::OpVectorShuffle, nextID++, Ops);
5244 SPIRVInstList.push_back(Inst);
5245 }
5246 } else if (components > 1) {
5247 // Reset value map entry since we generated an intermediate instruction.
5248 VMap[&I] = nextID;
5249
5250 // Implement:
5251 // %result = OpCompositeExtract %uint %sizes <component number>
5252 Ops.clear();
5253 Ops << MkId(TypeMap[I.getType()]) << MkId(SizesID);
5254
5255 uint32_t component = 0;
5256 if (IsGetImageHeight(Callee))
5257 component = 1;
5258 else if (IsGetImageDepth(Callee))
5259 component = 2;
5260 Ops << MkNum(component);
5261
5262 auto *Inst =
5263 new SPIRVInstruction(spv::OpCompositeExtract, nextID++, Ops);
5264 SPIRVInstList.push_back(Inst);
5265 }
David Neto5c22a252018-03-15 16:07:41 -04005266 break;
5267 }
5268
David Neto22f144c2017-06-12 14:26:21 -04005269 // Call instrucion is deferred because it needs function's ID. Record
5270 // slot's location on SPIRVInstructionList.
5271 DeferredInsts.push_back(
5272 std::make_tuple(&I, --SPIRVInstList.end(), nextID++));
5273
David Neto3fbb4072017-10-16 11:28:14 -04005274 // Check whether the implementation of this call uses an extended
5275 // instruction plus one more value-producing instruction. If so, then
5276 // reserve the id for the extra value-producing slot.
5277 glsl::ExtInst EInst = getIndirectExtInstEnum(Callee->getName());
5278 if (EInst != kGlslExtInstBad) {
5279 // Reserve a spot for the extra value.
David Neto4d02a532017-09-17 12:57:44 -04005280 // Increase nextID.
David Neto22f144c2017-06-12 14:26:21 -04005281 VMap[&I] = nextID;
5282 nextID++;
5283 }
5284 break;
5285 }
5286 case Instruction::Ret: {
5287 unsigned NumOps = I.getNumOperands();
5288 if (NumOps == 0) {
5289 //
5290 // Generate OpReturn.
5291 //
David Netoef5ba2b2019-12-20 08:35:54 -05005292 SPIRVInstList.push_back(new SPIRVInstruction(spv::OpReturn));
David Neto22f144c2017-06-12 14:26:21 -04005293 } else {
5294 //
5295 // Generate OpReturnValue.
5296 //
5297
5298 // Ops[0] = Return Value ID
5299 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04005300
5301 Ops << MkId(VMap[I.getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04005302
David Neto87846742018-04-11 17:36:22 -04005303 auto *Inst = new SPIRVInstruction(spv::OpReturnValue, Ops);
David Neto22f144c2017-06-12 14:26:21 -04005304 SPIRVInstList.push_back(Inst);
5305 break;
5306 }
5307 break;
5308 }
5309 }
5310}
5311
5312void SPIRVProducerPass::GenerateFuncEpilogue() {
SJW69939d52020-04-16 07:29:07 -05005313 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList(kFunctions);
David Neto22f144c2017-06-12 14:26:21 -04005314
5315 //
5316 // Generate OpFunctionEnd
5317 //
5318
David Netoef5ba2b2019-12-20 08:35:54 -05005319 auto *Inst = new SPIRVInstruction(spv::OpFunctionEnd);
David Neto22f144c2017-06-12 14:26:21 -04005320 SPIRVInstList.push_back(Inst);
5321}
5322
5323bool SPIRVProducerPass::is4xi8vec(Type *Ty) const {
alan-bakerb39c8262019-03-08 14:03:37 -05005324 // Don't specialize <4 x i8> if i8 is generally supported.
5325 if (clspv::Option::Int8Support())
5326 return false;
5327
David Neto22f144c2017-06-12 14:26:21 -04005328 LLVMContext &Context = Ty->getContext();
James Pricecf53df42020-04-20 14:41:24 -04005329 if (auto VecTy = dyn_cast<VectorType>(Ty)) {
5330 if (VecTy->getElementType() == Type::getInt8Ty(Context) &&
5331 VecTy->getNumElements() == 4) {
David Neto22f144c2017-06-12 14:26:21 -04005332 return true;
5333 }
5334 }
5335
5336 return false;
5337}
5338
5339void SPIRVProducerPass::HandleDeferredInstruction() {
SJW69939d52020-04-16 07:29:07 -05005340 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList(kFunctions);
David Neto22f144c2017-06-12 14:26:21 -04005341 ValueMapType &VMap = getValueMap();
5342 DeferredInstVecType &DeferredInsts = getDeferredInstVec();
5343
5344 for (auto DeferredInst = DeferredInsts.rbegin();
5345 DeferredInst != DeferredInsts.rend(); ++DeferredInst) {
5346 Value *Inst = std::get<0>(*DeferredInst);
5347 SPIRVInstructionList::iterator InsertPoint = ++std::get<1>(*DeferredInst);
5348 if (InsertPoint != SPIRVInstList.end()) {
5349 while ((*InsertPoint)->getOpcode() == spv::OpPhi) {
5350 ++InsertPoint;
5351 }
5352 }
5353
5354 if (BranchInst *Br = dyn_cast<BranchInst>(Inst)) {
alan-baker06cad652019-12-03 17:56:47 -05005355 // Check whether this branch needs to be preceeded by merge instruction.
David Neto22f144c2017-06-12 14:26:21 -04005356 BasicBlock *BrBB = Br->getParent();
alan-baker06cad652019-12-03 17:56:47 -05005357 if (ContinueBlocks.count(BrBB)) {
David Neto22f144c2017-06-12 14:26:21 -04005358 //
5359 // Generate OpLoopMerge.
5360 //
5361 // Ops[0] = Merge Block ID
5362 // Ops[1] = Continue Target ID
5363 // Ops[2] = Selection Control
5364 SPIRVOperandList Ops;
5365
alan-baker06cad652019-12-03 17:56:47 -05005366 auto MergeBB = MergeBlocks[BrBB];
5367 auto ContinueBB = ContinueBlocks[BrBB];
David Neto22f144c2017-06-12 14:26:21 -04005368 uint32_t MergeBBID = VMap[MergeBB];
David Neto22f144c2017-06-12 14:26:21 -04005369 uint32_t ContinueBBID = VMap[ContinueBB];
David Neto257c3892018-04-11 13:19:45 -04005370 Ops << MkId(MergeBBID) << MkId(ContinueBBID)
alan-baker06cad652019-12-03 17:56:47 -05005371 << MkNum(spv::LoopControlMaskNone);
David Neto22f144c2017-06-12 14:26:21 -04005372
David Neto87846742018-04-11 17:36:22 -04005373 auto *MergeInst = new SPIRVInstruction(spv::OpLoopMerge, Ops);
David Neto22f144c2017-06-12 14:26:21 -04005374 SPIRVInstList.insert(InsertPoint, MergeInst);
alan-baker06cad652019-12-03 17:56:47 -05005375 } else if (MergeBlocks.count(BrBB)) {
5376 //
5377 // Generate OpSelectionMerge.
5378 //
5379 // Ops[0] = Merge Block ID
5380 // Ops[1] = Selection Control
5381 SPIRVOperandList Ops;
David Neto22f144c2017-06-12 14:26:21 -04005382
alan-baker06cad652019-12-03 17:56:47 -05005383 auto MergeBB = MergeBlocks[BrBB];
5384 uint32_t MergeBBID = VMap[MergeBB];
5385 Ops << MkId(MergeBBID) << MkNum(spv::SelectionControlMaskNone);
David Neto22f144c2017-06-12 14:26:21 -04005386
alan-baker06cad652019-12-03 17:56:47 -05005387 auto *MergeInst = new SPIRVInstruction(spv::OpSelectionMerge, Ops);
5388 SPIRVInstList.insert(InsertPoint, MergeInst);
David Neto22f144c2017-06-12 14:26:21 -04005389 }
5390
5391 if (Br->isConditional()) {
5392 //
5393 // Generate OpBranchConditional.
5394 //
5395 // Ops[0] = Condition ID
5396 // Ops[1] = True Label ID
5397 // Ops[2] = False Label ID
5398 // Ops[3] ... Ops[n] = Branch weights (Literal Number)
5399 SPIRVOperandList Ops;
5400
5401 uint32_t CondID = VMap[Br->getCondition()];
David Neto22f144c2017-06-12 14:26:21 -04005402 uint32_t TrueBBID = VMap[Br->getSuccessor(0)];
David Neto22f144c2017-06-12 14:26:21 -04005403 uint32_t FalseBBID = VMap[Br->getSuccessor(1)];
David Neto257c3892018-04-11 13:19:45 -04005404
5405 Ops << MkId(CondID) << MkId(TrueBBID) << MkId(FalseBBID);
David Neto22f144c2017-06-12 14:26:21 -04005406
David Neto87846742018-04-11 17:36:22 -04005407 auto *BrInst = new SPIRVInstruction(spv::OpBranchConditional, Ops);
David Neto22f144c2017-06-12 14:26:21 -04005408 SPIRVInstList.insert(InsertPoint, BrInst);
5409 } else {
5410 //
5411 // Generate OpBranch.
5412 //
5413 // Ops[0] = Target Label ID
5414 SPIRVOperandList Ops;
5415
5416 uint32_t TargetID = VMap[Br->getSuccessor(0)];
David Neto257c3892018-04-11 13:19:45 -04005417 Ops << MkId(TargetID);
David Neto22f144c2017-06-12 14:26:21 -04005418
David Neto87846742018-04-11 17:36:22 -04005419 SPIRVInstList.insert(InsertPoint,
5420 new SPIRVInstruction(spv::OpBranch, Ops));
David Neto22f144c2017-06-12 14:26:21 -04005421 }
5422 } else if (PHINode *PHI = dyn_cast<PHINode>(Inst)) {
alan-baker5ed87542020-03-23 11:05:22 -04005423 if (PHI->getType()->isPointerTy() && !IsSamplerType(PHI->getType()) &&
5424 !IsImageType(PHI->getType())) {
alan-baker5b86ed72019-02-15 08:26:50 -05005425 // OpPhi on pointers requires variable pointers.
5426 setVariablePointersCapabilities(
5427 PHI->getType()->getPointerAddressSpace());
5428 if (!hasVariablePointers() && !selectFromSameObject(PHI)) {
5429 setVariablePointers(true);
5430 }
5431 }
5432
David Neto22f144c2017-06-12 14:26:21 -04005433 //
5434 // Generate OpPhi.
5435 //
5436 // Ops[0] = Result Type ID
5437 // Ops[1] ... Ops[n] = (Variable ID, Parent ID) pairs
5438 SPIRVOperandList Ops;
5439
David Neto257c3892018-04-11 13:19:45 -04005440 Ops << MkId(lookupType(PHI->getType()));
David Neto22f144c2017-06-12 14:26:21 -04005441
David Neto22f144c2017-06-12 14:26:21 -04005442 for (unsigned i = 0; i < PHI->getNumIncomingValues(); i++) {
5443 uint32_t VarID = VMap[PHI->getIncomingValue(i)];
David Neto22f144c2017-06-12 14:26:21 -04005444 uint32_t ParentID = VMap[PHI->getIncomingBlock(i)];
David Neto257c3892018-04-11 13:19:45 -04005445 Ops << MkId(VarID) << MkId(ParentID);
David Neto22f144c2017-06-12 14:26:21 -04005446 }
5447
5448 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005449 InsertPoint,
5450 new SPIRVInstruction(spv::OpPhi, std::get<2>(*DeferredInst), Ops));
David Neto22f144c2017-06-12 14:26:21 -04005451 } else if (CallInst *Call = dyn_cast<CallInst>(Inst)) {
5452 Function *Callee = Call->getCalledFunction();
SJW2c317da2020-03-23 07:39:13 -05005453 LLVMContext &Context = Callee->getContext();
5454 auto IntTy = Type::getInt32Ty(Context);
5455 auto callee_code = Builtins::Lookup(Callee);
David Neto3fbb4072017-10-16 11:28:14 -04005456 auto callee_name = Callee->getName();
5457 glsl::ExtInst EInst = getDirectOrIndirectExtInstEnum(callee_name);
David Neto22f144c2017-06-12 14:26:21 -04005458
5459 if (EInst) {
5460 uint32_t &ExtInstImportID = getOpExtInstImportID();
5461
5462 //
5463 // Generate OpExtInst.
5464 //
5465
5466 // Ops[0] = Result Type ID
5467 // Ops[1] = Set ID (OpExtInstImport ID)
5468 // Ops[2] = Instruction Number (Literal Number)
5469 // Ops[3] ... Ops[n] = Operand 1, ... , Operand n
5470 SPIRVOperandList Ops;
5471
David Neto862b7d82018-06-14 18:48:37 -04005472 Ops << MkId(lookupType(Call->getType())) << MkId(ExtInstImportID)
5473 << MkNum(EInst);
David Neto22f144c2017-06-12 14:26:21 -04005474
David Neto22f144c2017-06-12 14:26:21 -04005475 FunctionType *CalleeFTy = cast<FunctionType>(Call->getFunctionType());
5476 for (unsigned i = 0; i < CalleeFTy->getNumParams(); i++) {
David Neto257c3892018-04-11 13:19:45 -04005477 Ops << MkId(VMap[Call->getOperand(i)]);
David Neto22f144c2017-06-12 14:26:21 -04005478 }
5479
David Neto87846742018-04-11 17:36:22 -04005480 auto *ExtInst = new SPIRVInstruction(spv::OpExtInst,
5481 std::get<2>(*DeferredInst), Ops);
David Neto22f144c2017-06-12 14:26:21 -04005482 SPIRVInstList.insert(InsertPoint, ExtInst);
5483
David Neto3fbb4072017-10-16 11:28:14 -04005484 const auto IndirectExtInst = getIndirectExtInstEnum(callee_name);
5485 if (IndirectExtInst != kGlslExtInstBad) {
5486 // Generate one more instruction that uses the result of the extended
5487 // instruction. Its result id is one more than the id of the
5488 // extended instruction.
David Neto3fbb4072017-10-16 11:28:14 -04005489 auto generate_extra_inst = [this, &Context, &Call, &DeferredInst,
5490 &VMap, &SPIRVInstList, &InsertPoint](
5491 spv::Op opcode, Constant *constant) {
5492 //
5493 // Generate instruction like:
5494 // result = opcode constant <extinst-result>
5495 //
5496 // Ops[0] = Result Type ID
5497 // Ops[1] = Operand 0 ;; the constant, suitably splatted
5498 // Ops[2] = Operand 1 ;; the result of the extended instruction
5499 SPIRVOperandList Ops;
David Neto22f144c2017-06-12 14:26:21 -04005500
David Neto3fbb4072017-10-16 11:28:14 -04005501 Type *resultTy = Call->getType();
David Neto257c3892018-04-11 13:19:45 -04005502 Ops << MkId(lookupType(resultTy));
David Neto3fbb4072017-10-16 11:28:14 -04005503
5504 if (auto *vectorTy = dyn_cast<VectorType>(resultTy)) {
5505 constant = ConstantVector::getSplat(
alan-baker7261e062020-03-15 14:35:48 -04005506 {static_cast<unsigned>(vectorTy->getNumElements()), false},
5507 constant);
David Neto3fbb4072017-10-16 11:28:14 -04005508 }
David Neto257c3892018-04-11 13:19:45 -04005509 Ops << MkId(VMap[constant]) << MkId(std::get<2>(*DeferredInst));
David Neto3fbb4072017-10-16 11:28:14 -04005510
5511 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005512 InsertPoint, new SPIRVInstruction(
5513 opcode, std::get<2>(*DeferredInst) + 1, Ops));
David Neto3fbb4072017-10-16 11:28:14 -04005514 };
5515
5516 switch (IndirectExtInst) {
5517 case glsl::ExtInstFindUMsb: // Implementing clz
SJW2c317da2020-03-23 07:39:13 -05005518 generate_extra_inst(spv::OpISub, ConstantInt::get(IntTy, 31));
David Neto3fbb4072017-10-16 11:28:14 -04005519 break;
5520 case glsl::ExtInstAcos: // Implementing acospi
5521 case glsl::ExtInstAsin: // Implementing asinpi
Kévin Petiteb9f90a2018-09-29 12:29:34 +01005522 case glsl::ExtInstAtan: // Implementing atanpi
David Neto3fbb4072017-10-16 11:28:14 -04005523 case glsl::ExtInstAtan2: // Implementing atan2pi
5524 generate_extra_inst(
5525 spv::OpFMul,
5526 ConstantFP::get(Type::getFloatTy(Context), kOneOverPi));
5527 break;
5528
5529 default:
5530 assert(false && "internally inconsistent");
David Neto4d02a532017-09-17 12:57:44 -04005531 }
David Neto22f144c2017-06-12 14:26:21 -04005532 }
David Neto3fbb4072017-10-16 11:28:14 -04005533
SJW2c317da2020-03-23 07:39:13 -05005534 } else if (callee_code == Builtins::kPopcount) {
David Neto22f144c2017-06-12 14:26:21 -04005535 //
5536 // Generate OpBitCount
5537 //
5538 // Ops[0] = Result Type ID
5539 // Ops[1] = Base ID
David Neto257c3892018-04-11 13:19:45 -04005540 SPIRVOperandList Ops;
5541 Ops << MkId(lookupType(Call->getType()))
5542 << MkId(VMap[Call->getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04005543
5544 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005545 InsertPoint, new SPIRVInstruction(spv::OpBitCount,
David Neto22f144c2017-06-12 14:26:21 -04005546 std::get<2>(*DeferredInst), Ops));
David Netoab03f432017-11-03 17:00:44 -04005547
David Neto862b7d82018-06-14 18:48:37 -04005548 } else if (callee_name.startswith(kCompositeConstructFunctionPrefix)) {
David Netoab03f432017-11-03 17:00:44 -04005549
5550 // Generate an OpCompositeConstruct
5551 SPIRVOperandList Ops;
5552
5553 // The result type.
David Neto257c3892018-04-11 13:19:45 -04005554 Ops << MkId(lookupType(Call->getType()));
David Netoab03f432017-11-03 17:00:44 -04005555
5556 for (Use &use : Call->arg_operands()) {
David Neto257c3892018-04-11 13:19:45 -04005557 Ops << MkId(VMap[use.get()]);
David Netoab03f432017-11-03 17:00:44 -04005558 }
5559
5560 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005561 InsertPoint, new SPIRVInstruction(spv::OpCompositeConstruct,
5562 std::get<2>(*DeferredInst), Ops));
David Netoab03f432017-11-03 17:00:44 -04005563
Alan Baker202c8c72018-08-13 13:47:44 -04005564 } else if (callee_name.startswith(clspv::ResourceAccessorFunction())) {
5565
5566 // We have already mapped the call's result value to an ID.
5567 // Don't generate any code now.
5568
5569 } else if (callee_name.startswith(clspv::WorkgroupAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04005570
5571 // We have already mapped the call's result value to an ID.
5572 // Don't generate any code now.
5573
David Neto22f144c2017-06-12 14:26:21 -04005574 } else {
alan-baker5b86ed72019-02-15 08:26:50 -05005575 if (Call->getType()->isPointerTy()) {
5576 // Functions returning pointers require variable pointers.
5577 setVariablePointersCapabilities(
5578 Call->getType()->getPointerAddressSpace());
5579 }
5580
David Neto22f144c2017-06-12 14:26:21 -04005581 //
5582 // Generate OpFunctionCall.
5583 //
5584
5585 // Ops[0] = Result Type ID
5586 // Ops[1] = Callee Function ID
5587 // Ops[2] ... Ops[n] = Argument 0, ... , Argument n
5588 SPIRVOperandList Ops;
5589
David Neto862b7d82018-06-14 18:48:37 -04005590 Ops << MkId(lookupType(Call->getType()));
David Neto22f144c2017-06-12 14:26:21 -04005591
5592 uint32_t CalleeID = VMap[Callee];
David Neto43568eb2017-10-13 18:25:25 -04005593 if (CalleeID == 0) {
5594 errs() << "Can't translate function call. Missing builtin? "
David Neto862b7d82018-06-14 18:48:37 -04005595 << callee_name << " in: " << *Call << "\n";
David Neto43568eb2017-10-13 18:25:25 -04005596 // TODO(dneto): Can we error out? Enabling this llvm_unreachable
5597 // causes an infinite loop. Instead, go ahead and generate
5598 // the bad function call. A validator will catch the 0-Id.
5599 // llvm_unreachable("Can't translate function call");
5600 }
David Neto22f144c2017-06-12 14:26:21 -04005601
David Neto257c3892018-04-11 13:19:45 -04005602 Ops << MkId(CalleeID);
David Neto22f144c2017-06-12 14:26:21 -04005603
David Neto22f144c2017-06-12 14:26:21 -04005604 FunctionType *CalleeFTy = cast<FunctionType>(Call->getFunctionType());
5605 for (unsigned i = 0; i < CalleeFTy->getNumParams(); i++) {
alan-baker5b86ed72019-02-15 08:26:50 -05005606 auto *operand = Call->getOperand(i);
alan-bakerd4d50652019-12-03 17:17:15 -05005607 auto *operand_type = operand->getType();
5608 // Images and samplers can be passed as function parameters without
5609 // variable pointers.
5610 if (operand_type->isPointerTy() && !IsImageType(operand_type) &&
5611 !IsSamplerType(operand_type)) {
alan-baker5b86ed72019-02-15 08:26:50 -05005612 auto sc =
5613 GetStorageClass(operand->getType()->getPointerAddressSpace());
5614 if (sc == spv::StorageClassStorageBuffer) {
5615 // Passing SSBO by reference requires variable pointers storage
5616 // buffer.
5617 setVariablePointersStorageBuffer(true);
5618 } else if (sc == spv::StorageClassWorkgroup) {
5619 // Workgroup references require variable pointers if they are not
5620 // memory object declarations.
5621 if (auto *operand_call = dyn_cast<CallInst>(operand)) {
5622 // Workgroup accessor represents a variable reference.
5623 if (!operand_call->getCalledFunction()->getName().startswith(
5624 clspv::WorkgroupAccessorFunction()))
5625 setVariablePointers(true);
5626 } else {
5627 // Arguments are function parameters.
5628 if (!isa<Argument>(operand))
5629 setVariablePointers(true);
5630 }
5631 }
5632 }
5633 Ops << MkId(VMap[operand]);
David Neto22f144c2017-06-12 14:26:21 -04005634 }
5635
David Neto87846742018-04-11 17:36:22 -04005636 auto *CallInst = new SPIRVInstruction(spv::OpFunctionCall,
5637 std::get<2>(*DeferredInst), Ops);
David Neto22f144c2017-06-12 14:26:21 -04005638 SPIRVInstList.insert(InsertPoint, CallInst);
5639 }
5640 }
5641 }
5642}
5643
SJW77b87ad2020-04-21 14:37:52 -05005644void SPIRVProducerPass::HandleDeferredDecorations() {
5645 const auto &DL = module->getDataLayout();
Alan Baker202c8c72018-08-13 13:47:44 -04005646 if (getTypesNeedingArrayStride().empty() && LocalArgSpecIds.empty()) {
David Neto1a1a0582017-07-07 12:01:44 -04005647 return;
David Netoc6f3ab22018-04-06 18:02:31 -04005648 }
David Neto1a1a0582017-07-07 12:01:44 -04005649
SJW69939d52020-04-16 07:29:07 -05005650 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList(kAnnotations);
David Neto1a1a0582017-07-07 12:01:44 -04005651
David Netoc6f3ab22018-04-06 18:02:31 -04005652 // Insert ArrayStride decorations on pointer types, due to OpPtrAccessChain
5653 // instructions we generated earlier.
David Neto85082642018-03-24 06:55:20 -07005654 for (auto *type : getTypesNeedingArrayStride()) {
5655 Type *elemTy = nullptr;
5656 if (auto *ptrTy = dyn_cast<PointerType>(type)) {
5657 elemTy = ptrTy->getElementType();
alan-bakerb6b09dc2018-11-08 16:59:28 -05005658 } else if (auto *arrayTy = dyn_cast<ArrayType>(type)) {
alan-baker8eb435a2020-04-08 00:42:06 -04005659 elemTy = arrayTy->getElementType();
5660 } else if (auto *vecTy = dyn_cast<VectorType>(type)) {
5661 elemTy = vecTy->getElementType();
David Neto85082642018-03-24 06:55:20 -07005662 } else {
5663 errs() << "Unhandled strided type " << *type << "\n";
5664 llvm_unreachable("Unhandled strided type");
5665 }
David Neto1a1a0582017-07-07 12:01:44 -04005666
5667 // Ops[0] = Target ID
5668 // Ops[1] = Decoration (ArrayStride)
5669 // Ops[2] = Stride number (Literal Number)
5670 SPIRVOperandList Ops;
5671
David Neto85082642018-03-24 06:55:20 -07005672 // Same as DL.getIndexedOffsetInType( elemTy, { 1 } );
Alan Bakerfcda9482018-10-02 17:09:59 -04005673 const uint32_t stride = static_cast<uint32_t>(GetTypeAllocSize(elemTy, DL));
David Neto257c3892018-04-11 13:19:45 -04005674
5675 Ops << MkId(lookupType(type)) << MkNum(spv::DecorationArrayStride)
5676 << MkNum(stride);
David Neto1a1a0582017-07-07 12:01:44 -04005677
David Neto87846742018-04-11 17:36:22 -04005678 auto *DecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
SJW69939d52020-04-16 07:29:07 -05005679 SPIRVInstList.push_back(DecoInst);
David Neto1a1a0582017-07-07 12:01:44 -04005680 }
David Netoc6f3ab22018-04-06 18:02:31 -04005681
5682 // Emit SpecId decorations targeting the array size value.
SJW77b87ad2020-04-21 14:37:52 -05005683 for (auto pair : clspv::GetSpecConstants(module)) {
alan-bakera1be3322020-04-20 12:48:18 -04005684 auto kind = pair.first;
5685 auto spec_id = pair.second;
5686
5687 if (kind != SpecConstant::kLocalMemorySize)
5688 continue;
5689
alan-bakerb6b09dc2018-11-08 16:59:28 -05005690 LocalArgInfo &arg_info = LocalSpecIdInfoMap[spec_id];
David Netoc6f3ab22018-04-06 18:02:31 -04005691 SPIRVOperandList Ops;
5692 Ops << MkId(arg_info.array_size_id) << MkNum(spv::DecorationSpecId)
5693 << MkNum(arg_info.spec_id);
SJW69939d52020-04-16 07:29:07 -05005694 SPIRVInstList.push_back(new SPIRVInstruction(spv::OpDecorate, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04005695 }
David Neto1a1a0582017-07-07 12:01:44 -04005696}
5697
David Neto22f144c2017-06-12 14:26:21 -04005698glsl::ExtInst SPIRVProducerPass::getExtInstEnum(StringRef Name) {
SJW2c317da2020-03-23 07:39:13 -05005699
5700 const auto &fi = Builtins::Lookup(Name);
5701 switch (fi) {
5702 case Builtins::kClamp: {
5703 auto param_type = fi.getParameter(0);
5704 if (param_type.type_id == Type::FloatTyID) {
5705 return glsl::ExtInst::ExtInstFClamp;
5706 }
5707 return param_type.is_signed ? glsl::ExtInst::ExtInstSClamp
5708 : glsl::ExtInst::ExtInstUClamp;
5709 }
5710 case Builtins::kMax: {
5711 auto param_type = fi.getParameter(0);
5712 if (param_type.type_id == Type::FloatTyID) {
5713 return glsl::ExtInst::ExtInstFMax;
5714 }
5715 return param_type.is_signed ? glsl::ExtInst::ExtInstSMax
5716 : glsl::ExtInst::ExtInstUMax;
5717 }
5718 case Builtins::kMin: {
5719 auto param_type = fi.getParameter(0);
5720 if (param_type.type_id == Type::FloatTyID) {
5721 return glsl::ExtInst::ExtInstFMin;
5722 }
5723 return param_type.is_signed ? glsl::ExtInst::ExtInstSMin
5724 : glsl::ExtInst::ExtInstUMin;
5725 }
5726 case Builtins::kAbs:
5727 return glsl::ExtInst::ExtInstSAbs;
5728 case Builtins::kFmax:
5729 return glsl::ExtInst::ExtInstFMax;
5730 case Builtins::kFmin:
5731 return glsl::ExtInst::ExtInstFMin;
5732 case Builtins::kDegrees:
5733 return glsl::ExtInst::ExtInstDegrees;
5734 case Builtins::kRadians:
5735 return glsl::ExtInst::ExtInstRadians;
5736 case Builtins::kMix:
5737 return glsl::ExtInst::ExtInstFMix;
5738 case Builtins::kAcos:
5739 case Builtins::kAcospi:
5740 return glsl::ExtInst::ExtInstAcos;
5741 case Builtins::kAcosh:
5742 return glsl::ExtInst::ExtInstAcosh;
5743 case Builtins::kAsin:
5744 case Builtins::kAsinpi:
5745 return glsl::ExtInst::ExtInstAsin;
5746 case Builtins::kAsinh:
5747 return glsl::ExtInst::ExtInstAsinh;
5748 case Builtins::kAtan:
5749 case Builtins::kAtanpi:
5750 return glsl::ExtInst::ExtInstAtan;
5751 case Builtins::kAtanh:
5752 return glsl::ExtInst::ExtInstAtanh;
5753 case Builtins::kAtan2:
5754 case Builtins::kAtan2pi:
5755 return glsl::ExtInst::ExtInstAtan2;
5756 case Builtins::kCeil:
5757 return glsl::ExtInst::ExtInstCeil;
5758 case Builtins::kSin:
5759 case Builtins::kHalfSin:
5760 case Builtins::kNativeSin:
5761 return glsl::ExtInst::ExtInstSin;
5762 case Builtins::kSinh:
5763 return glsl::ExtInst::ExtInstSinh;
5764 case Builtins::kCos:
5765 case Builtins::kHalfCos:
5766 case Builtins::kNativeCos:
5767 return glsl::ExtInst::ExtInstCos;
5768 case Builtins::kCosh:
5769 return glsl::ExtInst::ExtInstCosh;
5770 case Builtins::kTan:
5771 case Builtins::kHalfTan:
5772 case Builtins::kNativeTan:
5773 return glsl::ExtInst::ExtInstTan;
5774 case Builtins::kTanh:
5775 return glsl::ExtInst::ExtInstTanh;
5776 case Builtins::kExp:
5777 case Builtins::kHalfExp:
5778 case Builtins::kNativeExp:
5779 return glsl::ExtInst::ExtInstExp;
5780 case Builtins::kExp2:
5781 case Builtins::kHalfExp2:
5782 case Builtins::kNativeExp2:
5783 return glsl::ExtInst::ExtInstExp2;
5784 case Builtins::kLog:
5785 case Builtins::kHalfLog:
5786 case Builtins::kNativeLog:
5787 return glsl::ExtInst::ExtInstLog;
5788 case Builtins::kLog2:
5789 case Builtins::kHalfLog2:
5790 case Builtins::kNativeLog2:
5791 return glsl::ExtInst::ExtInstLog2;
5792 case Builtins::kFabs:
5793 return glsl::ExtInst::ExtInstFAbs;
5794 case Builtins::kFma:
5795 return glsl::ExtInst::ExtInstFma;
5796 case Builtins::kFloor:
5797 return glsl::ExtInst::ExtInstFloor;
5798 case Builtins::kLdexp:
5799 return glsl::ExtInst::ExtInstLdexp;
5800 case Builtins::kPow:
5801 case Builtins::kPowr:
5802 case Builtins::kHalfPowr:
5803 case Builtins::kNativePowr:
5804 return glsl::ExtInst::ExtInstPow;
5805 case Builtins::kRound:
5806 return glsl::ExtInst::ExtInstRound;
5807 case Builtins::kSqrt:
5808 case Builtins::kHalfSqrt:
5809 case Builtins::kNativeSqrt:
5810 return glsl::ExtInst::ExtInstSqrt;
5811 case Builtins::kRsqrt:
5812 case Builtins::kHalfRsqrt:
5813 case Builtins::kNativeRsqrt:
5814 return glsl::ExtInst::ExtInstInverseSqrt;
5815 case Builtins::kTrunc:
5816 return glsl::ExtInst::ExtInstTrunc;
5817 case Builtins::kFrexp:
5818 return glsl::ExtInst::ExtInstFrexp;
5819 case Builtins::kFract:
5820 return glsl::ExtInst::ExtInstFract;
5821 case Builtins::kSign:
5822 return glsl::ExtInst::ExtInstFSign;
5823 case Builtins::kLength:
5824 case Builtins::kFastLength:
5825 return glsl::ExtInst::ExtInstLength;
5826 case Builtins::kDistance:
5827 case Builtins::kFastDistance:
5828 return glsl::ExtInst::ExtInstDistance;
5829 case Builtins::kStep:
5830 return glsl::ExtInst::ExtInstStep;
5831 case Builtins::kSmoothstep:
5832 return glsl::ExtInst::ExtInstSmoothStep;
5833 case Builtins::kCross:
5834 return glsl::ExtInst::ExtInstCross;
5835 case Builtins::kNormalize:
5836 case Builtins::kFastNormalize:
5837 return glsl::ExtInst::ExtInstNormalize;
5838 default:
5839 break;
5840 }
5841
David Neto22f144c2017-06-12 14:26:21 -04005842 return StringSwitch<glsl::ExtInst>(Name)
David Neto22f144c2017-06-12 14:26:21 -04005843 .StartsWith("llvm.fmuladd.", glsl::ExtInst::ExtInstFma)
5844 .Case("spirv.unpack.v2f16", glsl::ExtInst::ExtInstUnpackHalf2x16)
5845 .Case("spirv.pack.v2f16", glsl::ExtInst::ExtInstPackHalf2x16)
David Neto3fbb4072017-10-16 11:28:14 -04005846 .Default(kGlslExtInstBad);
5847}
5848
5849glsl::ExtInst SPIRVProducerPass::getIndirectExtInstEnum(StringRef Name) {
SJW2c317da2020-03-23 07:39:13 -05005850 switch (Builtins::Lookup(Name)) {
5851 case Builtins::kClz:
5852 return glsl::ExtInst::ExtInstFindUMsb;
5853 case Builtins::kAcospi:
5854 return glsl::ExtInst::ExtInstAcos;
5855 case Builtins::kAsinpi:
5856 return glsl::ExtInst::ExtInstAsin;
5857 case Builtins::kAtanpi:
5858 return glsl::ExtInst::ExtInstAtan;
5859 case Builtins::kAtan2pi:
5860 return glsl::ExtInst::ExtInstAtan2;
5861 default:
5862 break;
5863 }
5864 return kGlslExtInstBad;
David Neto3fbb4072017-10-16 11:28:14 -04005865}
5866
alan-bakerb6b09dc2018-11-08 16:59:28 -05005867glsl::ExtInst
5868SPIRVProducerPass::getDirectOrIndirectExtInstEnum(StringRef Name) {
David Neto3fbb4072017-10-16 11:28:14 -04005869 auto direct = getExtInstEnum(Name);
5870 if (direct != kGlslExtInstBad)
5871 return direct;
5872 return getIndirectExtInstEnum(Name);
David Neto22f144c2017-06-12 14:26:21 -04005873}
5874
David Neto22f144c2017-06-12 14:26:21 -04005875void SPIRVProducerPass::WriteOneWord(uint32_t Word) {
David Neto0676e6f2017-07-11 18:47:44 -04005876 binaryOut->write(reinterpret_cast<const char *>(&Word), sizeof(uint32_t));
David Neto22f144c2017-06-12 14:26:21 -04005877}
5878
5879void SPIRVProducerPass::WriteResultID(SPIRVInstruction *Inst) {
5880 WriteOneWord(Inst->getResultID());
5881}
5882
5883void SPIRVProducerPass::WriteWordCountAndOpcode(SPIRVInstruction *Inst) {
5884 // High 16 bit : Word Count
5885 // Low 16 bit : Opcode
5886 uint32_t Word = Inst->getOpcode();
David Netoee2660d2018-06-28 16:31:29 -04005887 const uint32_t count = Inst->getWordCount();
5888 if (count > 65535) {
5889 errs() << "Word count limit of 65535 exceeded: " << count << "\n";
5890 llvm_unreachable("Word count too high");
5891 }
David Neto22f144c2017-06-12 14:26:21 -04005892 Word |= Inst->getWordCount() << 16;
5893 WriteOneWord(Word);
5894}
5895
David Netoef5ba2b2019-12-20 08:35:54 -05005896void SPIRVProducerPass::WriteOperand(const std::unique_ptr<SPIRVOperand> &Op) {
David Neto22f144c2017-06-12 14:26:21 -04005897 SPIRVOperandType OpTy = Op->getType();
5898 switch (OpTy) {
5899 default: {
5900 llvm_unreachable("Unsupported SPIRV Operand Type???");
5901 break;
5902 }
5903 case SPIRVOperandType::NUMBERID: {
5904 WriteOneWord(Op->getNumID());
5905 break;
5906 }
5907 case SPIRVOperandType::LITERAL_STRING: {
5908 std::string Str = Op->getLiteralStr();
5909 const char *Data = Str.c_str();
5910 size_t WordSize = Str.size() / 4;
5911 for (unsigned Idx = 0; Idx < WordSize; Idx++) {
5912 WriteOneWord(*reinterpret_cast<const uint32_t *>(&Data[4 * Idx]));
5913 }
5914
5915 uint32_t Remainder = Str.size() % 4;
5916 uint32_t LastWord = 0;
5917 if (Remainder) {
5918 for (unsigned Idx = 0; Idx < Remainder; Idx++) {
5919 LastWord |= Data[4 * WordSize + Idx] << 8 * Idx;
5920 }
5921 }
5922
5923 WriteOneWord(LastWord);
5924 break;
5925 }
5926 case SPIRVOperandType::LITERAL_INTEGER:
5927 case SPIRVOperandType::LITERAL_FLOAT: {
5928 auto LiteralNum = Op->getLiteralNum();
5929 // TODO: Handle LiteranNum carefully.
5930 for (auto Word : LiteralNum) {
5931 WriteOneWord(Word);
5932 }
5933 break;
5934 }
5935 }
5936}
5937
5938void SPIRVProducerPass::WriteSPIRVBinary() {
SJW69939d52020-04-16 07:29:07 -05005939 for (int i = 0; i < kSectionCount; ++i) {
5940 WriteSPIRVBinary(SPIRVSections[i]);
5941 }
5942}
5943
5944void SPIRVProducerPass::WriteSPIRVBinary(SPIRVInstructionList &SPIRVInstList) {
David Neto22f144c2017-06-12 14:26:21 -04005945
5946 for (auto Inst : SPIRVInstList) {
David Netoef5ba2b2019-12-20 08:35:54 -05005947 const auto &Ops = Inst->getOperands();
David Neto22f144c2017-06-12 14:26:21 -04005948 spv::Op Opcode = static_cast<spv::Op>(Inst->getOpcode());
5949
5950 switch (Opcode) {
5951 default: {
David Neto5c22a252018-03-15 16:07:41 -04005952 errs() << "Unsupported SPIR-V instruction opcode " << int(Opcode) << "\n";
David Neto22f144c2017-06-12 14:26:21 -04005953 llvm_unreachable("Unsupported SPIRV instruction");
5954 break;
5955 }
5956 case spv::OpCapability:
5957 case spv::OpExtension:
5958 case spv::OpMemoryModel:
5959 case spv::OpEntryPoint:
5960 case spv::OpExecutionMode:
5961 case spv::OpSource:
5962 case spv::OpDecorate:
5963 case spv::OpMemberDecorate:
5964 case spv::OpBranch:
5965 case spv::OpBranchConditional:
5966 case spv::OpSelectionMerge:
5967 case spv::OpLoopMerge:
5968 case spv::OpStore:
5969 case spv::OpImageWrite:
5970 case spv::OpReturnValue:
5971 case spv::OpControlBarrier:
5972 case spv::OpMemoryBarrier:
5973 case spv::OpReturn:
5974 case spv::OpFunctionEnd:
5975 case spv::OpCopyMemory: {
5976 WriteWordCountAndOpcode(Inst);
5977 for (uint32_t i = 0; i < Ops.size(); i++) {
5978 WriteOperand(Ops[i]);
5979 }
5980 break;
5981 }
5982 case spv::OpTypeBool:
5983 case spv::OpTypeVoid:
5984 case spv::OpTypeSampler:
5985 case spv::OpLabel:
5986 case spv::OpExtInstImport:
5987 case spv::OpTypePointer:
5988 case spv::OpTypeRuntimeArray:
5989 case spv::OpTypeStruct:
5990 case spv::OpTypeImage:
5991 case spv::OpTypeSampledImage:
5992 case spv::OpTypeInt:
5993 case spv::OpTypeFloat:
5994 case spv::OpTypeArray:
5995 case spv::OpTypeVector:
5996 case spv::OpTypeFunction: {
5997 WriteWordCountAndOpcode(Inst);
5998 WriteResultID(Inst);
5999 for (uint32_t i = 0; i < Ops.size(); i++) {
6000 WriteOperand(Ops[i]);
6001 }
6002 break;
6003 }
6004 case spv::OpFunction:
6005 case spv::OpFunctionParameter:
6006 case spv::OpAccessChain:
6007 case spv::OpPtrAccessChain:
6008 case spv::OpInBoundsAccessChain:
6009 case spv::OpUConvert:
6010 case spv::OpSConvert:
6011 case spv::OpConvertFToU:
6012 case spv::OpConvertFToS:
6013 case spv::OpConvertUToF:
6014 case spv::OpConvertSToF:
6015 case spv::OpFConvert:
6016 case spv::OpConvertPtrToU:
6017 case spv::OpConvertUToPtr:
6018 case spv::OpBitcast:
alan-bakerc9c55ae2019-12-02 16:01:27 -05006019 case spv::OpFNegate:
David Neto22f144c2017-06-12 14:26:21 -04006020 case spv::OpIAdd:
6021 case spv::OpFAdd:
6022 case spv::OpISub:
6023 case spv::OpFSub:
6024 case spv::OpIMul:
6025 case spv::OpFMul:
6026 case spv::OpUDiv:
6027 case spv::OpSDiv:
6028 case spv::OpFDiv:
6029 case spv::OpUMod:
6030 case spv::OpSRem:
6031 case spv::OpFRem:
Kévin Petit8a560882019-03-21 15:24:34 +00006032 case spv::OpUMulExtended:
6033 case spv::OpSMulExtended:
David Neto22f144c2017-06-12 14:26:21 -04006034 case spv::OpBitwiseOr:
6035 case spv::OpBitwiseXor:
6036 case spv::OpBitwiseAnd:
David Netoa394f392017-08-26 20:45:29 -04006037 case spv::OpNot:
David Neto22f144c2017-06-12 14:26:21 -04006038 case spv::OpShiftLeftLogical:
6039 case spv::OpShiftRightLogical:
6040 case spv::OpShiftRightArithmetic:
6041 case spv::OpBitCount:
David Netoab03f432017-11-03 17:00:44 -04006042 case spv::OpCompositeConstruct:
David Neto22f144c2017-06-12 14:26:21 -04006043 case spv::OpCompositeExtract:
6044 case spv::OpVectorExtractDynamic:
6045 case spv::OpCompositeInsert:
David Neto0a2f98d2017-09-15 19:38:40 -04006046 case spv::OpCopyObject:
David Neto22f144c2017-06-12 14:26:21 -04006047 case spv::OpVectorInsertDynamic:
6048 case spv::OpVectorShuffle:
6049 case spv::OpIEqual:
6050 case spv::OpINotEqual:
6051 case spv::OpUGreaterThan:
6052 case spv::OpUGreaterThanEqual:
6053 case spv::OpULessThan:
6054 case spv::OpULessThanEqual:
6055 case spv::OpSGreaterThan:
6056 case spv::OpSGreaterThanEqual:
6057 case spv::OpSLessThan:
6058 case spv::OpSLessThanEqual:
6059 case spv::OpFOrdEqual:
6060 case spv::OpFOrdGreaterThan:
6061 case spv::OpFOrdGreaterThanEqual:
6062 case spv::OpFOrdLessThan:
6063 case spv::OpFOrdLessThanEqual:
6064 case spv::OpFOrdNotEqual:
6065 case spv::OpFUnordEqual:
6066 case spv::OpFUnordGreaterThan:
6067 case spv::OpFUnordGreaterThanEqual:
6068 case spv::OpFUnordLessThan:
6069 case spv::OpFUnordLessThanEqual:
6070 case spv::OpFUnordNotEqual:
6071 case spv::OpExtInst:
6072 case spv::OpIsInf:
6073 case spv::OpIsNan:
6074 case spv::OpAny:
6075 case spv::OpAll:
6076 case spv::OpUndef:
6077 case spv::OpConstantNull:
6078 case spv::OpLogicalOr:
6079 case spv::OpLogicalAnd:
6080 case spv::OpLogicalNot:
6081 case spv::OpLogicalNotEqual:
6082 case spv::OpConstantComposite:
6083 case spv::OpSpecConstantComposite:
6084 case spv::OpConstantTrue:
6085 case spv::OpConstantFalse:
6086 case spv::OpConstant:
6087 case spv::OpSpecConstant:
6088 case spv::OpVariable:
6089 case spv::OpFunctionCall:
6090 case spv::OpSampledImage:
alan-baker75090e42020-02-20 11:21:04 -05006091 case spv::OpImageFetch:
David Neto22f144c2017-06-12 14:26:21 -04006092 case spv::OpImageSampleExplicitLod:
David Neto5c22a252018-03-15 16:07:41 -04006093 case spv::OpImageQuerySize:
alan-bakerce179f12019-12-06 19:02:22 -05006094 case spv::OpImageQuerySizeLod:
David Neto22f144c2017-06-12 14:26:21 -04006095 case spv::OpSelect:
6096 case spv::OpPhi:
6097 case spv::OpLoad:
6098 case spv::OpAtomicIAdd:
6099 case spv::OpAtomicISub:
6100 case spv::OpAtomicExchange:
6101 case spv::OpAtomicIIncrement:
6102 case spv::OpAtomicIDecrement:
6103 case spv::OpAtomicCompareExchange:
6104 case spv::OpAtomicUMin:
6105 case spv::OpAtomicSMin:
6106 case spv::OpAtomicUMax:
6107 case spv::OpAtomicSMax:
6108 case spv::OpAtomicAnd:
6109 case spv::OpAtomicOr:
6110 case spv::OpAtomicXor:
6111 case spv::OpDot: {
6112 WriteWordCountAndOpcode(Inst);
6113 WriteOperand(Ops[0]);
6114 WriteResultID(Inst);
6115 for (uint32_t i = 1; i < Ops.size(); i++) {
6116 WriteOperand(Ops[i]);
6117 }
6118 break;
6119 }
6120 }
6121 }
6122}
Alan Baker9bf93fb2018-08-28 16:59:26 -04006123
alan-bakerb6b09dc2018-11-08 16:59:28 -05006124bool SPIRVProducerPass::IsTypeNullable(const Type *type) const {
Alan Baker9bf93fb2018-08-28 16:59:26 -04006125 switch (type->getTypeID()) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05006126 case Type::HalfTyID:
6127 case Type::FloatTyID:
6128 case Type::DoubleTyID:
6129 case Type::IntegerTyID:
James Price59a1c752020-04-23 23:06:16 -04006130 case Type::FixedVectorTyID:
alan-bakerb6b09dc2018-11-08 16:59:28 -05006131 return true;
6132 case Type::PointerTyID: {
6133 const PointerType *pointer_type = cast<PointerType>(type);
6134 if (pointer_type->getPointerAddressSpace() !=
6135 AddressSpace::UniformConstant) {
6136 auto pointee_type = pointer_type->getPointerElementType();
6137 if (pointee_type->isStructTy() &&
6138 cast<StructType>(pointee_type)->isOpaque()) {
6139 // Images and samplers are not nullable.
6140 return false;
Alan Baker9bf93fb2018-08-28 16:59:26 -04006141 }
Alan Baker9bf93fb2018-08-28 16:59:26 -04006142 }
alan-bakerb6b09dc2018-11-08 16:59:28 -05006143 return true;
6144 }
6145 case Type::ArrayTyID:
alan-baker8eb435a2020-04-08 00:42:06 -04006146 return IsTypeNullable(type->getArrayElementType());
alan-bakerb6b09dc2018-11-08 16:59:28 -05006147 case Type::StructTyID: {
6148 const StructType *struct_type = cast<StructType>(type);
6149 // Images and samplers are not nullable.
6150 if (struct_type->isOpaque())
Alan Baker9bf93fb2018-08-28 16:59:26 -04006151 return false;
alan-bakerb6b09dc2018-11-08 16:59:28 -05006152 for (const auto element : struct_type->elements()) {
6153 if (!IsTypeNullable(element))
6154 return false;
6155 }
6156 return true;
6157 }
6158 default:
6159 return false;
Alan Baker9bf93fb2018-08-28 16:59:26 -04006160 }
6161}
Alan Bakerfcda9482018-10-02 17:09:59 -04006162
SJW77b87ad2020-04-21 14:37:52 -05006163void SPIRVProducerPass::PopulateUBOTypeMaps() {
Alan Bakerfcda9482018-10-02 17:09:59 -04006164 if (auto *offsets_md =
SJW77b87ad2020-04-21 14:37:52 -05006165 module->getNamedMetadata(clspv::RemappedTypeOffsetMetadataName())) {
Alan Bakerfcda9482018-10-02 17:09:59 -04006166 // Metdata is stored as key-value pair operands. The first element of each
6167 // operand is the type and the second is a vector of offsets.
6168 for (const auto *operand : offsets_md->operands()) {
6169 const auto *pair = cast<MDTuple>(operand);
6170 auto *type =
6171 cast<ConstantAsMetadata>(pair->getOperand(0))->getValue()->getType();
6172 const auto *offset_vector = cast<MDTuple>(pair->getOperand(1));
6173 std::vector<uint32_t> offsets;
6174 for (const Metadata *offset_md : offset_vector->operands()) {
6175 const auto *constant_md = cast<ConstantAsMetadata>(offset_md);
alan-bakerb6b09dc2018-11-08 16:59:28 -05006176 offsets.push_back(static_cast<uint32_t>(
6177 cast<ConstantInt>(constant_md->getValue())->getZExtValue()));
Alan Bakerfcda9482018-10-02 17:09:59 -04006178 }
6179 RemappedUBOTypeOffsets.insert(std::make_pair(type, offsets));
6180 }
6181 }
6182
6183 if (auto *sizes_md =
SJW77b87ad2020-04-21 14:37:52 -05006184 module->getNamedMetadata(clspv::RemappedTypeSizesMetadataName())) {
Alan Bakerfcda9482018-10-02 17:09:59 -04006185 // Metadata is stored as key-value pair operands. The first element of each
6186 // operand is the type and the second is a triple of sizes: type size in
6187 // bits, store size and alloc size.
6188 for (const auto *operand : sizes_md->operands()) {
6189 const auto *pair = cast<MDTuple>(operand);
6190 auto *type =
6191 cast<ConstantAsMetadata>(pair->getOperand(0))->getValue()->getType();
6192 const auto *size_triple = cast<MDTuple>(pair->getOperand(1));
6193 uint64_t type_size_in_bits =
6194 cast<ConstantInt>(
6195 cast<ConstantAsMetadata>(size_triple->getOperand(0))->getValue())
6196 ->getZExtValue();
6197 uint64_t type_store_size =
6198 cast<ConstantInt>(
6199 cast<ConstantAsMetadata>(size_triple->getOperand(1))->getValue())
6200 ->getZExtValue();
6201 uint64_t type_alloc_size =
6202 cast<ConstantInt>(
6203 cast<ConstantAsMetadata>(size_triple->getOperand(2))->getValue())
6204 ->getZExtValue();
6205 RemappedUBOTypeSizes.insert(std::make_pair(
6206 type, std::make_tuple(type_size_in_bits, type_store_size,
6207 type_alloc_size)));
6208 }
6209 }
6210}
6211
6212uint64_t SPIRVProducerPass::GetTypeSizeInBits(Type *type,
6213 const DataLayout &DL) {
6214 auto iter = RemappedUBOTypeSizes.find(type);
6215 if (iter != RemappedUBOTypeSizes.end()) {
6216 return std::get<0>(iter->second);
6217 }
6218
6219 return DL.getTypeSizeInBits(type);
6220}
6221
6222uint64_t SPIRVProducerPass::GetTypeStoreSize(Type *type, const DataLayout &DL) {
6223 auto iter = RemappedUBOTypeSizes.find(type);
6224 if (iter != RemappedUBOTypeSizes.end()) {
6225 return std::get<1>(iter->second);
6226 }
6227
6228 return DL.getTypeStoreSize(type);
6229}
6230
6231uint64_t SPIRVProducerPass::GetTypeAllocSize(Type *type, const DataLayout &DL) {
6232 auto iter = RemappedUBOTypeSizes.find(type);
6233 if (iter != RemappedUBOTypeSizes.end()) {
6234 return std::get<2>(iter->second);
6235 }
6236
6237 return DL.getTypeAllocSize(type);
6238}
alan-baker5b86ed72019-02-15 08:26:50 -05006239
Kévin Petitbbbda972020-03-03 19:16:31 +00006240uint32_t SPIRVProducerPass::GetExplicitLayoutStructMemberOffset(
6241 StructType *type, unsigned member, const DataLayout &DL) {
6242 const auto StructLayout = DL.getStructLayout(type);
6243 // Search for the correct offsets if this type was remapped.
6244 std::vector<uint32_t> *offsets = nullptr;
6245 auto iter = RemappedUBOTypeOffsets.find(type);
6246 if (iter != RemappedUBOTypeOffsets.end()) {
6247 offsets = &iter->second;
6248 }
6249 auto ByteOffset =
6250 static_cast<uint32_t>(StructLayout->getElementOffset(member));
6251 if (offsets) {
6252 ByteOffset = (*offsets)[member];
6253 }
6254
6255 return ByteOffset;
6256}
6257
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04006258void SPIRVProducerPass::setVariablePointersCapabilities(
6259 unsigned address_space) {
alan-baker5b86ed72019-02-15 08:26:50 -05006260 if (GetStorageClass(address_space) == spv::StorageClassStorageBuffer) {
6261 setVariablePointersStorageBuffer(true);
6262 } else {
6263 setVariablePointers(true);
6264 }
6265}
6266
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04006267Value *SPIRVProducerPass::GetBasePointer(Value *v) {
alan-baker5b86ed72019-02-15 08:26:50 -05006268 if (auto *gep = dyn_cast<GetElementPtrInst>(v)) {
6269 return GetBasePointer(gep->getPointerOperand());
6270 }
6271
6272 // Conservatively return |v|.
6273 return v;
6274}
6275
6276bool SPIRVProducerPass::sameResource(Value *lhs, Value *rhs) const {
6277 if (auto *lhs_call = dyn_cast<CallInst>(lhs)) {
6278 if (auto *rhs_call = dyn_cast<CallInst>(rhs)) {
6279 if (lhs_call->getCalledFunction()->getName().startswith(
6280 clspv::ResourceAccessorFunction()) &&
6281 rhs_call->getCalledFunction()->getName().startswith(
6282 clspv::ResourceAccessorFunction())) {
6283 // For resource accessors, match descriptor set and binding.
6284 if (lhs_call->getOperand(0) == rhs_call->getOperand(0) &&
6285 lhs_call->getOperand(1) == rhs_call->getOperand(1))
6286 return true;
6287 } else if (lhs_call->getCalledFunction()->getName().startswith(
6288 clspv::WorkgroupAccessorFunction()) &&
6289 rhs_call->getCalledFunction()->getName().startswith(
6290 clspv::WorkgroupAccessorFunction())) {
6291 // For workgroup resources, match spec id.
6292 if (lhs_call->getOperand(0) == rhs_call->getOperand(0))
6293 return true;
6294 }
6295 }
6296 }
6297
6298 return false;
6299}
6300
6301bool SPIRVProducerPass::selectFromSameObject(Instruction *inst) {
6302 assert(inst->getType()->isPointerTy());
6303 assert(GetStorageClass(inst->getType()->getPointerAddressSpace()) ==
6304 spv::StorageClassStorageBuffer);
6305 const bool hack_undef = clspv::Option::HackUndef();
6306 if (auto *select = dyn_cast<SelectInst>(inst)) {
6307 auto *true_base = GetBasePointer(select->getTrueValue());
6308 auto *false_base = GetBasePointer(select->getFalseValue());
6309
6310 if (true_base == false_base)
6311 return true;
6312
6313 // If either the true or false operand is a null, then we satisfy the same
6314 // object constraint.
6315 if (auto *true_cst = dyn_cast<Constant>(true_base)) {
6316 if (true_cst->isNullValue() || (hack_undef && isa<UndefValue>(true_base)))
6317 return true;
6318 }
6319
6320 if (auto *false_cst = dyn_cast<Constant>(false_base)) {
6321 if (false_cst->isNullValue() ||
6322 (hack_undef && isa<UndefValue>(false_base)))
6323 return true;
6324 }
6325
6326 if (sameResource(true_base, false_base))
6327 return true;
6328 } else if (auto *phi = dyn_cast<PHINode>(inst)) {
6329 Value *value = nullptr;
6330 bool ok = true;
6331 for (unsigned i = 0; ok && i != phi->getNumIncomingValues(); ++i) {
6332 auto *base = GetBasePointer(phi->getIncomingValue(i));
6333 // Null values satisfy the constraint of selecting of selecting from the
6334 // same object.
6335 if (!value) {
6336 if (auto *cst = dyn_cast<Constant>(base)) {
6337 if (!cst->isNullValue() && !(hack_undef && isa<UndefValue>(base)))
6338 value = base;
6339 } else {
6340 value = base;
6341 }
6342 } else if (base != value) {
6343 if (auto *base_cst = dyn_cast<Constant>(base)) {
6344 if (base_cst->isNullValue() || (hack_undef && isa<UndefValue>(base)))
6345 continue;
6346 }
6347
6348 if (sameResource(value, base))
6349 continue;
6350
6351 // Values don't represent the same base.
6352 ok = false;
6353 }
6354 }
6355
6356 return ok;
6357 }
6358
6359 // Conservatively return false.
6360 return false;
6361}
alan-bakere9308012019-03-15 10:25:13 -04006362
6363bool SPIRVProducerPass::CalledWithCoherentResource(Argument &Arg) {
6364 if (!Arg.getType()->isPointerTy() ||
6365 Arg.getType()->getPointerAddressSpace() != clspv::AddressSpace::Global) {
6366 // Only SSBOs need to be annotated as coherent.
6367 return false;
6368 }
6369
6370 DenseSet<Value *> visited;
6371 std::vector<Value *> stack;
6372 for (auto *U : Arg.getParent()->users()) {
6373 if (auto *call = dyn_cast<CallInst>(U)) {
6374 stack.push_back(call->getOperand(Arg.getArgNo()));
6375 }
6376 }
6377
6378 while (!stack.empty()) {
6379 Value *v = stack.back();
6380 stack.pop_back();
6381
6382 if (!visited.insert(v).second)
6383 continue;
6384
6385 auto *resource_call = dyn_cast<CallInst>(v);
6386 if (resource_call &&
6387 resource_call->getCalledFunction()->getName().startswith(
6388 clspv::ResourceAccessorFunction())) {
6389 // If this is a resource accessor function, check if the coherent operand
6390 // is set.
6391 const auto coherent =
6392 unsigned(dyn_cast<ConstantInt>(resource_call->getArgOperand(5))
6393 ->getZExtValue());
6394 if (coherent == 1)
6395 return true;
6396 } else if (auto *arg = dyn_cast<Argument>(v)) {
6397 // If this is a function argument, trace through its callers.
alan-bakere98f3f92019-04-08 15:06:36 -04006398 for (auto U : arg->getParent()->users()) {
alan-bakere9308012019-03-15 10:25:13 -04006399 if (auto *call = dyn_cast<CallInst>(U)) {
6400 stack.push_back(call->getOperand(arg->getArgNo()));
6401 }
6402 }
6403 } else if (auto *user = dyn_cast<User>(v)) {
6404 // If this is a user, traverse all operands that could lead to resource
6405 // variables.
6406 for (unsigned i = 0; i != user->getNumOperands(); ++i) {
6407 Value *operand = user->getOperand(i);
6408 if (operand->getType()->isPointerTy() &&
6409 operand->getType()->getPointerAddressSpace() ==
6410 clspv::AddressSpace::Global) {
6411 stack.push_back(operand);
6412 }
6413 }
6414 }
6415 }
6416
6417 // No coherent resource variables encountered.
6418 return false;
6419}
alan-baker06cad652019-12-03 17:56:47 -05006420
SJW77b87ad2020-04-21 14:37:52 -05006421void SPIRVProducerPass::PopulateStructuredCFGMaps() {
alan-baker06cad652019-12-03 17:56:47 -05006422 // First, track loop merges and continues.
6423 DenseSet<BasicBlock *> LoopMergesAndContinues;
SJW77b87ad2020-04-21 14:37:52 -05006424 for (auto &F : *module) {
alan-baker06cad652019-12-03 17:56:47 -05006425 if (F.isDeclaration())
6426 continue;
6427
6428 DominatorTree &DT = getAnalysis<DominatorTreeWrapperPass>(F).getDomTree();
6429 const LoopInfo &LI = getAnalysis<LoopInfoWrapperPass>(F).getLoopInfo();
6430 std::deque<BasicBlock *> order;
6431 DenseSet<BasicBlock *> visited;
6432 clspv::ComputeStructuredOrder(&*F.begin(), &DT, LI, &order, &visited);
6433
6434 for (auto BB : order) {
6435 auto terminator = BB->getTerminator();
6436 auto branch = dyn_cast<BranchInst>(terminator);
6437 if (LI.isLoopHeader(BB)) {
6438 auto L = LI.getLoopFor(BB);
6439 BasicBlock *ContinueBB = nullptr;
6440 BasicBlock *MergeBB = nullptr;
6441
6442 MergeBB = L->getExitBlock();
6443 if (!MergeBB) {
6444 // StructurizeCFG pass converts CFG into triangle shape and the cfg
6445 // has regions with single entry/exit. As a result, loop should not
6446 // have multiple exits.
6447 llvm_unreachable("Loop has multiple exits???");
6448 }
6449
6450 if (L->isLoopLatch(BB)) {
6451 ContinueBB = BB;
6452 } else {
6453 // From SPIR-V spec 2.11, Continue Target must dominate that back-edge
6454 // block.
6455 BasicBlock *Header = L->getHeader();
6456 BasicBlock *Latch = L->getLoopLatch();
6457 for (auto *loop_block : L->blocks()) {
6458 if (loop_block == Header) {
6459 continue;
6460 }
6461
6462 // Check whether block dominates block with back-edge.
6463 // The loop latch is the single block with a back-edge. If it was
6464 // possible, StructurizeCFG made the loop conform to this
6465 // requirement, otherwise |Latch| is a nullptr.
6466 if (DT.dominates(loop_block, Latch)) {
6467 ContinueBB = loop_block;
6468 }
6469 }
6470
6471 if (!ContinueBB) {
6472 llvm_unreachable("Wrong continue block from loop");
6473 }
6474 }
6475
6476 // Record the continue and merge blocks.
6477 MergeBlocks[BB] = MergeBB;
6478 ContinueBlocks[BB] = ContinueBB;
6479 LoopMergesAndContinues.insert(MergeBB);
6480 LoopMergesAndContinues.insert(ContinueBB);
6481 } else if (branch && branch->isConditional()) {
6482 auto L = LI.getLoopFor(BB);
6483 bool HasBackedge = false;
6484 while (L && !HasBackedge) {
6485 if (L->isLoopLatch(BB)) {
6486 HasBackedge = true;
6487 }
6488 L = L->getParentLoop();
6489 }
6490
6491 if (!HasBackedge) {
6492 // Only need a merge if the branch doesn't include a loop break or
6493 // continue.
6494 auto true_bb = branch->getSuccessor(0);
6495 auto false_bb = branch->getSuccessor(1);
6496 if (!LoopMergesAndContinues.count(true_bb) &&
6497 !LoopMergesAndContinues.count(false_bb)) {
6498 // StructurizeCFG pass already manipulated CFG. Just use false block
6499 // of branch instruction as merge block.
6500 MergeBlocks[BB] = false_bb;
6501 }
6502 }
6503 }
6504 }
6505 }
6506}