blob: d3ed1e2081d63fc0290c7bb7d21ad094d922435f [file] [log] [blame]
David Neto22f144c2017-06-12 14:26:21 -04001// Copyright 2017 The Clspv Authors. All rights reserved.
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7// http://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
15#ifdef _MSC_VER
16#pragma warning(push, 0)
17#endif
18
David Neto156783e2017-07-05 15:39:41 -040019#include <cassert>
David Neto257c3892018-04-11 13:19:45 -040020#include <cstring>
David Neto118188e2018-08-24 11:27:54 -040021#include <iomanip>
22#include <list>
David Neto862b7d82018-06-14 18:48:37 -040023#include <memory>
David Neto118188e2018-08-24 11:27:54 -040024#include <set>
25#include <sstream>
26#include <string>
27#include <tuple>
28#include <unordered_set>
29#include <utility>
David Neto862b7d82018-06-14 18:48:37 -040030
David Neto118188e2018-08-24 11:27:54 -040031#include "llvm/ADT/StringSwitch.h"
32#include "llvm/ADT/UniqueVector.h"
33#include "llvm/Analysis/LoopInfo.h"
34#include "llvm/IR/Constants.h"
35#include "llvm/IR/Dominators.h"
36#include "llvm/IR/Instructions.h"
37#include "llvm/IR/Metadata.h"
38#include "llvm/IR/Module.h"
alan-bakerf67468c2019-11-25 15:51:49 -050039#include "llvm/IR/ValueSymbolTable.h"
David Neto118188e2018-08-24 11:27:54 -040040#include "llvm/Pass.h"
41#include "llvm/Support/CommandLine.h"
Kévin Petitbbbda972020-03-03 19:16:31 +000042#include "llvm/Support/MathExtras.h"
David Neto118188e2018-08-24 11:27:54 -040043#include "llvm/Support/raw_ostream.h"
44#include "llvm/Transforms/Utils/Cloning.h"
David Neto22f144c2017-06-12 14:26:21 -040045
alan-bakere0902602020-03-23 08:43:40 -040046#include "spirv/unified1/spirv.hpp"
David Neto118188e2018-08-24 11:27:54 -040047
David Neto85082642018-03-24 06:55:20 -070048#include "clspv/AddressSpace.h"
alan-bakerf5e5f692018-11-27 08:33:24 -050049#include "clspv/DescriptorMap.h"
David Neto118188e2018-08-24 11:27:54 -040050#include "clspv/Option.h"
David Neto85082642018-03-24 06:55:20 -070051#include "clspv/spirv_c_strings.hpp"
52#include "clspv/spirv_glsl.hpp"
David Neto22f144c2017-06-12 14:26:21 -040053
David Neto4feb7a42017-10-06 17:29:42 -040054#include "ArgKind.h"
alan-bakerf67468c2019-11-25 15:51:49 -050055#include "Builtins.h"
alan-baker06cad652019-12-03 17:56:47 -050056#include "ComputeStructuredOrder.h"
David Neto85082642018-03-24 06:55:20 -070057#include "ConstantEmitter.h"
Alan Baker202c8c72018-08-13 13:47:44 -040058#include "Constants.h"
David Neto78383442018-06-15 20:31:56 -040059#include "DescriptorCounter.h"
alan-baker56f7aff2019-05-22 08:06:42 -040060#include "NormalizeGlobalVariable.h"
Diego Novilloa4c44fa2019-04-11 10:56:15 -040061#include "Passes.h"
alan-bakera1be3322020-04-20 12:48:18 -040062#include "SpecConstant.h"
alan-bakerce179f12019-12-06 19:02:22 -050063#include "Types.h"
David Neto48f56a42017-10-06 16:44:25 -040064
David Neto22f144c2017-06-12 14:26:21 -040065#if defined(_MSC_VER)
66#pragma warning(pop)
67#endif
68
69using namespace llvm;
70using namespace clspv;
SJW173c7e92020-03-16 08:44:47 -050071using namespace clspv::Builtins;
David Neto156783e2017-07-05 15:39:41 -040072using namespace mdconst;
David Neto22f144c2017-06-12 14:26:21 -040073
74namespace {
David Netocd8ca5f2017-10-02 23:34:11 -040075
David Neto862b7d82018-06-14 18:48:37 -040076cl::opt<bool> ShowResourceVars("show-rv", cl::init(false), cl::Hidden,
77 cl::desc("Show resource variable creation"));
78
alan-baker5ed87542020-03-23 11:05:22 -040079cl::opt<bool>
80 ShowProducerIR("show-producer-ir", cl::init(false), cl::ReallyHidden,
81 cl::desc("Dump the IR at the start of SPIRVProducer"));
82
David Neto862b7d82018-06-14 18:48:37 -040083// These hacks exist to help transition code generation algorithms
84// without making huge noise in detailed test output.
85const bool Hack_generate_runtime_array_stride_early = true;
86
David Neto3fbb4072017-10-16 11:28:14 -040087// The value of 1/pi. This value is from MSDN
88// https://msdn.microsoft.com/en-us/library/4hwaceh6.aspx
89const double kOneOverPi = 0.318309886183790671538;
90const glsl::ExtInst kGlslExtInstBad = static_cast<glsl::ExtInst>(0);
91
alan-bakerb6b09dc2018-11-08 16:59:28 -050092const char *kCompositeConstructFunctionPrefix = "clspv.composite_construct.";
David Netoab03f432017-11-03 17:00:44 -040093
SJW69939d52020-04-16 07:29:07 -050094// SPIRV Module Sections (per 2.4 of the SPIRV spec)
95// These are used to collect SPIRVInstructions by type on-the-fly.
96enum SPIRVSection {
97 kCapabilities,
98 kExtensions,
99 kImports,
100 kMemoryModel,
101 kEntryPoints,
102 kExecutionModes,
103
104 kDebug,
105 kAnnotations,
106
107 kTypes,
108 kConstants = kTypes,
109 kGlobalVariables,
110
111 kFunctions,
112
113 kSectionCount
114};
115
David Neto22f144c2017-06-12 14:26:21 -0400116enum SPIRVOperandType {
117 NUMBERID,
118 LITERAL_INTEGER,
119 LITERAL_STRING,
120 LITERAL_FLOAT
121};
122
123struct SPIRVOperand {
124 explicit SPIRVOperand(SPIRVOperandType Ty, uint32_t Num)
125 : Type(Ty), LiteralNum(1, Num) {}
126 explicit SPIRVOperand(SPIRVOperandType Ty, const char *Str)
127 : Type(Ty), LiteralStr(Str) {}
128 explicit SPIRVOperand(SPIRVOperandType Ty, StringRef Str)
129 : Type(Ty), LiteralStr(Str) {}
130 explicit SPIRVOperand(SPIRVOperandType Ty, ArrayRef<uint32_t> NumVec)
131 : Type(Ty), LiteralNum(NumVec.begin(), NumVec.end()) {}
132
James Price11010dc2019-12-19 13:53:09 -0500133 SPIRVOperandType getType() const { return Type; };
134 uint32_t getNumID() const { return LiteralNum[0]; };
135 std::string getLiteralStr() const { return LiteralStr; };
136 ArrayRef<uint32_t> getLiteralNum() const { return LiteralNum; };
David Neto22f144c2017-06-12 14:26:21 -0400137
David Neto87846742018-04-11 17:36:22 -0400138 uint32_t GetNumWords() const {
139 switch (Type) {
140 case NUMBERID:
141 return 1;
142 case LITERAL_INTEGER:
143 case LITERAL_FLOAT:
David Netoee2660d2018-06-28 16:31:29 -0400144 return uint32_t(LiteralNum.size());
David Neto87846742018-04-11 17:36:22 -0400145 case LITERAL_STRING:
146 // Account for the terminating null character.
David Netoee2660d2018-06-28 16:31:29 -0400147 return uint32_t((LiteralStr.size() + 4) / 4);
David Neto87846742018-04-11 17:36:22 -0400148 }
149 llvm_unreachable("Unhandled case in SPIRVOperand::GetNumWords()");
150 }
151
David Neto22f144c2017-06-12 14:26:21 -0400152private:
153 SPIRVOperandType Type;
154 std::string LiteralStr;
155 SmallVector<uint32_t, 4> LiteralNum;
156};
157
David Netoc6f3ab22018-04-06 18:02:31 -0400158class SPIRVOperandList {
159public:
David Netoef5ba2b2019-12-20 08:35:54 -0500160 typedef std::unique_ptr<SPIRVOperand> element_type;
161 typedef SmallVector<element_type, 8> container_type;
162 typedef container_type::iterator iterator;
David Netoc6f3ab22018-04-06 18:02:31 -0400163 SPIRVOperandList() {}
alan-bakerb6b09dc2018-11-08 16:59:28 -0500164 SPIRVOperandList(const SPIRVOperandList &other) = delete;
165 SPIRVOperandList(SPIRVOperandList &&other) {
David Netoc6f3ab22018-04-06 18:02:31 -0400166 contents_ = std::move(other.contents_);
167 other.contents_.clear();
168 }
David Netoef5ba2b2019-12-20 08:35:54 -0500169 iterator begin() { return contents_.begin(); }
170 iterator end() { return contents_.end(); }
171 operator ArrayRef<element_type>() { return contents_; }
172 void push_back(element_type op) { contents_.push_back(std::move(op)); }
alan-bakerb6b09dc2018-11-08 16:59:28 -0500173 void clear() { contents_.clear(); }
David Netoc6f3ab22018-04-06 18:02:31 -0400174 size_t size() const { return contents_.size(); }
James Price11010dc2019-12-19 13:53:09 -0500175 const SPIRVOperand *operator[](size_t i) { return contents_[i].get(); }
David Netoc6f3ab22018-04-06 18:02:31 -0400176
David Netoef5ba2b2019-12-20 08:35:54 -0500177 const container_type &getOperands() const { return contents_; }
David Neto87846742018-04-11 17:36:22 -0400178
David Netoc6f3ab22018-04-06 18:02:31 -0400179private:
David Netoef5ba2b2019-12-20 08:35:54 -0500180 container_type contents_;
David Netoc6f3ab22018-04-06 18:02:31 -0400181};
182
James Price11010dc2019-12-19 13:53:09 -0500183SPIRVOperandList &operator<<(SPIRVOperandList &list,
David Netoef5ba2b2019-12-20 08:35:54 -0500184 std::unique_ptr<SPIRVOperand> elem) {
185 list.push_back(std::move(elem));
David Netoc6f3ab22018-04-06 18:02:31 -0400186 return list;
187}
188
David Netoef5ba2b2019-12-20 08:35:54 -0500189std::unique_ptr<SPIRVOperand> MkNum(uint32_t num) {
190 return std::make_unique<SPIRVOperand>(LITERAL_INTEGER, num);
David Netoc6f3ab22018-04-06 18:02:31 -0400191}
David Netoef5ba2b2019-12-20 08:35:54 -0500192std::unique_ptr<SPIRVOperand> MkInteger(ArrayRef<uint32_t> num_vec) {
193 return std::make_unique<SPIRVOperand>(LITERAL_INTEGER, num_vec);
David Neto257c3892018-04-11 13:19:45 -0400194}
David Netoef5ba2b2019-12-20 08:35:54 -0500195std::unique_ptr<SPIRVOperand> MkFloat(ArrayRef<uint32_t> num_vec) {
196 return std::make_unique<SPIRVOperand>(LITERAL_FLOAT, num_vec);
David Neto257c3892018-04-11 13:19:45 -0400197}
David Netoef5ba2b2019-12-20 08:35:54 -0500198std::unique_ptr<SPIRVOperand> MkId(uint32_t id) {
199 return std::make_unique<SPIRVOperand>(NUMBERID, id);
James Price11010dc2019-12-19 13:53:09 -0500200}
David Netoef5ba2b2019-12-20 08:35:54 -0500201std::unique_ptr<SPIRVOperand> MkString(StringRef str) {
202 return std::make_unique<SPIRVOperand>(LITERAL_STRING, str);
David Neto257c3892018-04-11 13:19:45 -0400203}
David Netoc6f3ab22018-04-06 18:02:31 -0400204
David Neto22f144c2017-06-12 14:26:21 -0400205struct SPIRVInstruction {
David Netoef5ba2b2019-12-20 08:35:54 -0500206 // Creates an instruction with an opcode and no result ID, and with the given
207 // operands. This computes its own word count. Takes ownership of the
208 // operands and clears |Ops|.
209 SPIRVInstruction(spv::Op Opc, SPIRVOperandList &Ops)
210 : WordCount(1), Opcode(static_cast<uint16_t>(Opc)), ResultID(0) {
James Price11010dc2019-12-19 13:53:09 -0500211 for (auto &operand : Ops) {
David Netoee2660d2018-06-28 16:31:29 -0400212 WordCount += uint16_t(operand->GetNumWords());
David Neto87846742018-04-11 17:36:22 -0400213 }
David Netoef5ba2b2019-12-20 08:35:54 -0500214 Operands.reserve(Ops.size());
215 for (auto &ptr : Ops) {
216 Operands.emplace_back(std::move(ptr));
217 ptr.reset(nullptr);
David Neto87846742018-04-11 17:36:22 -0400218 }
David Netoef5ba2b2019-12-20 08:35:54 -0500219 Ops.clear();
220 }
221 // Creates an instruction with an opcode and a no-zero result ID, and
222 // with the given operands. This computes its own word count. Takes ownership
223 // of the operands and clears |Ops|.
224 SPIRVInstruction(spv::Op Opc, uint32_t ResID, SPIRVOperandList &Ops)
225 : WordCount(2), Opcode(static_cast<uint16_t>(Opc)), ResultID(ResID) {
James Price11010dc2019-12-19 13:53:09 -0500226 for (auto &operand : Ops) {
David Neto87846742018-04-11 17:36:22 -0400227 WordCount += operand->GetNumWords();
228 }
David Netoef5ba2b2019-12-20 08:35:54 -0500229 Operands.reserve(Ops.size());
230 for (auto &ptr : Ops) {
231 Operands.emplace_back(std::move(ptr));
232 ptr.reset(nullptr);
233 }
234 if (ResID == 0) {
235 llvm_unreachable("Result ID of 0 was provided");
236 }
237 Ops.clear();
David Neto87846742018-04-11 17:36:22 -0400238 }
David Neto22f144c2017-06-12 14:26:21 -0400239
David Netoef5ba2b2019-12-20 08:35:54 -0500240 // Creates an instruction with an opcode and no result ID, and with the single
241 // operand. This computes its own word count.
242 SPIRVInstruction(spv::Op Opc, SPIRVOperandList::element_type operand)
243 : WordCount(1), Opcode(static_cast<uint16_t>(Opc)), ResultID(0) {
244 WordCount += operand->GetNumWords();
245 Operands.emplace_back(std::move(operand));
246 operand.reset(nullptr);
247 }
248 // Creates an instruction with an opcode and a non-zero result ID, and
249 // with the single operand. This computes its own word count.
250 SPIRVInstruction(spv::Op Opc, uint32_t ResID,
251 SPIRVOperandList::element_type operand)
252 : WordCount(2), Opcode(static_cast<uint16_t>(Opc)), ResultID(ResID) {
253 WordCount += operand->GetNumWords();
254 if (ResID == 0) {
255 llvm_unreachable("Result ID of 0 was provided");
256 }
257 Operands.emplace_back(std::move(operand));
258 operand.reset(nullptr);
259 }
260 // Creates an instruction with an opcode and a no-zero result ID, and no
261 // operands.
262 SPIRVInstruction(spv::Op Opc, uint32_t ResID)
263 : WordCount(2), Opcode(static_cast<uint16_t>(Opc)), ResultID(ResID) {
264 if (ResID == 0) {
265 llvm_unreachable("Result ID of 0 was provided");
266 }
267 }
268 // Creates an instruction with an opcode, no result ID, no type ID, and no
269 // operands.
270 SPIRVInstruction(spv::Op Opc)
271 : WordCount(1), Opcode(static_cast<uint16_t>(Opc)), ResultID(0) {}
272
David Netoee2660d2018-06-28 16:31:29 -0400273 uint32_t getWordCount() const { return WordCount; }
David Neto22f144c2017-06-12 14:26:21 -0400274 uint16_t getOpcode() const { return Opcode; }
275 uint32_t getResultID() const { return ResultID; }
David Netoef5ba2b2019-12-20 08:35:54 -0500276 ArrayRef<std::unique_ptr<SPIRVOperand>> getOperands() const {
James Price11010dc2019-12-19 13:53:09 -0500277 return Operands;
278 }
David Neto22f144c2017-06-12 14:26:21 -0400279
280private:
David Netoee2660d2018-06-28 16:31:29 -0400281 uint32_t WordCount; // Check the 16-bit bound at code generation time.
David Neto22f144c2017-06-12 14:26:21 -0400282 uint16_t Opcode;
283 uint32_t ResultID;
David Netoef5ba2b2019-12-20 08:35:54 -0500284 SmallVector<std::unique_ptr<SPIRVOperand>, 4> Operands;
David Neto22f144c2017-06-12 14:26:21 -0400285};
286
287struct SPIRVProducerPass final : public ModulePass {
David Neto22f144c2017-06-12 14:26:21 -0400288 typedef DenseMap<Type *, uint32_t> TypeMapType;
289 typedef UniqueVector<Type *> TypeList;
290 typedef DenseMap<Value *, uint32_t> ValueMapType;
David Netofb9a7972017-08-25 17:08:24 -0400291 typedef UniqueVector<Value *> ValueList;
David Neto22f144c2017-06-12 14:26:21 -0400292 typedef std::vector<std::pair<Value *, uint32_t>> EntryPointVecType;
293 typedef std::list<SPIRVInstruction *> SPIRVInstructionList;
David Neto87846742018-04-11 17:36:22 -0400294 // A vector of tuples, each of which is:
295 // - the LLVM instruction that we will later generate SPIR-V code for
296 // - where the SPIR-V instruction should be inserted
297 // - the result ID of the SPIR-V instruction
David Neto22f144c2017-06-12 14:26:21 -0400298 typedef std::vector<
299 std::tuple<Value *, SPIRVInstructionList::iterator, uint32_t>>
300 DeferredInstVecType;
301 typedef DenseMap<FunctionType *, std::pair<FunctionType *, uint32_t>>
302 GlobalConstFuncMapType;
303
David Neto44795152017-07-13 15:45:28 -0400304 explicit SPIRVProducerPass(
alan-bakerf5e5f692018-11-27 08:33:24 -0500305 raw_pwrite_stream &out,
306 std::vector<clspv::version0::DescriptorMapEntry> *descriptor_map_entries,
alan-baker00e7a582019-06-07 12:54:21 -0400307 ArrayRef<std::pair<unsigned, std::string>> samplerMap,
David Neto44795152017-07-13 15:45:28 -0400308 bool outputCInitList)
David Netoc2c368d2017-06-30 16:50:17 -0400309 : ModulePass(ID), samplerMap(samplerMap), out(out),
David Neto0676e6f2017-07-11 18:47:44 -0400310 binaryTempOut(binaryTempUnderlyingVector), binaryOut(&out),
alan-baker00e7a582019-06-07 12:54:21 -0400311 descriptorMapEntries(descriptor_map_entries),
David Neto0676e6f2017-07-11 18:47:44 -0400312 outputCInitList(outputCInitList), patchBoundOffset(0), nextID(1),
alan-baker5b86ed72019-02-15 08:26:50 -0500313 OpExtInstImportID(0), HasVariablePointersStorageBuffer(false),
314 HasVariablePointers(false), SamplerTy(nullptr), WorkgroupSizeValueID(0),
alan-bakera1be3322020-04-20 12:48:18 -0400315 WorkgroupSizeVarID(0) {}
David Neto22f144c2017-06-12 14:26:21 -0400316
James Price11010dc2019-12-19 13:53:09 -0500317 virtual ~SPIRVProducerPass() {
SJW69939d52020-04-16 07:29:07 -0500318 for (int i = 0; i < kSectionCount; ++i) {
319 for (auto *Inst : SPIRVSections[i]) {
320 delete Inst;
321 }
James Price11010dc2019-12-19 13:53:09 -0500322 }
323 }
324
David Neto22f144c2017-06-12 14:26:21 -0400325 void getAnalysisUsage(AnalysisUsage &AU) const override {
326 AU.addRequired<DominatorTreeWrapperPass>();
327 AU.addRequired<LoopInfoWrapperPass>();
328 }
329
330 virtual bool runOnModule(Module &module) override;
331
332 // output the SPIR-V header block
333 void outputHeader();
334
335 // patch the SPIR-V header block
336 void patchHeader();
337
338 uint32_t lookupType(Type *Ty) {
339 if (Ty->isPointerTy() &&
340 (Ty->getPointerAddressSpace() != AddressSpace::UniformConstant)) {
341 auto PointeeTy = Ty->getPointerElementType();
342 if (PointeeTy->isStructTy() &&
343 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
344 Ty = PointeeTy;
345 }
346 }
347
David Neto862b7d82018-06-14 18:48:37 -0400348 auto where = TypeMap.find(Ty);
349 if (where == TypeMap.end()) {
350 if (Ty) {
351 errs() << "Unhandled type " << *Ty << "\n";
352 } else {
353 errs() << "Unhandled type (null)\n";
354 }
David Netoe439d702018-03-23 13:14:08 -0700355 llvm_unreachable("\nUnhandled type!");
David Neto22f144c2017-06-12 14:26:21 -0400356 }
357
David Neto862b7d82018-06-14 18:48:37 -0400358 return where->second;
David Neto22f144c2017-06-12 14:26:21 -0400359 }
360 TypeMapType &getImageTypeMap() { return ImageTypeMap; }
alan-bakerabd82722019-12-03 17:14:51 -0500361 TypeList &getImageTypeList() { return ImageTypeList; }
David Neto22f144c2017-06-12 14:26:21 -0400362 TypeList &getTypeList() { return Types; };
363 ValueList &getConstantList() { return Constants; };
364 ValueMapType &getValueMap() { return ValueMap; }
365 ValueMapType &getAllocatedValueMap() { return AllocatedValueMap; }
SJW69939d52020-04-16 07:29:07 -0500366 SPIRVInstructionList &getSPIRVInstList(SPIRVSection Section) {
367 return SPIRVSections[Section];
368 };
David Neto22f144c2017-06-12 14:26:21 -0400369 EntryPointVecType &getEntryPointVec() { return EntryPointVec; };
370 DeferredInstVecType &getDeferredInstVec() { return DeferredInstVec; };
371 ValueList &getEntryPointInterfacesVec() { return EntryPointInterfacesVec; };
372 uint32_t &getOpExtInstImportID() { return OpExtInstImportID; };
373 std::vector<uint32_t> &getBuiltinDimVec() { return BuiltinDimensionVec; };
SJW2c317da2020-03-23 07:39:13 -0500374
alan-baker5b86ed72019-02-15 08:26:50 -0500375 bool hasVariablePointersStorageBuffer() {
376 return HasVariablePointersStorageBuffer;
377 }
378 void setVariablePointersStorageBuffer(bool Val) {
379 HasVariablePointersStorageBuffer = Val;
380 }
Diego Novillo3cc8d7a2019-04-10 13:30:34 -0400381 bool hasVariablePointers() { return HasVariablePointers; };
David Neto22f144c2017-06-12 14:26:21 -0400382 void setVariablePointers(bool Val) { HasVariablePointers = Val; };
alan-bakerb6b09dc2018-11-08 16:59:28 -0500383 ArrayRef<std::pair<unsigned, std::string>> &getSamplerMap() {
384 return samplerMap;
385 }
David Neto22f144c2017-06-12 14:26:21 -0400386 GlobalConstFuncMapType &getGlobalConstFuncTypeMap() {
387 return GlobalConstFuncTypeMap;
388 }
389 SmallPtrSet<Value *, 16> &getGlobalConstArgSet() {
390 return GlobalConstArgumentSet;
391 }
alan-bakerb6b09dc2018-11-08 16:59:28 -0500392 TypeList &getTypesNeedingArrayStride() { return TypesNeedingArrayStride; }
David Neto22f144c2017-06-12 14:26:21 -0400393
David Netoc6f3ab22018-04-06 18:02:31 -0400394 void GenerateLLVMIRInfo(Module &M, const DataLayout &DL);
alan-bakerb6b09dc2018-11-08 16:59:28 -0500395 // Populate GlobalConstFuncTypeMap. Also, if module-scope __constant will
396 // *not* be converted to a storage buffer, replace each such global variable
397 // with one in the storage class expecgted by SPIR-V.
David Neto862b7d82018-06-14 18:48:37 -0400398 void FindGlobalConstVars(Module &M, const DataLayout &DL);
399 // Populate ResourceVarInfoList, FunctionToResourceVarsMap, and
400 // ModuleOrderedResourceVars.
401 void FindResourceVars(Module &M, const DataLayout &DL);
Alan Baker202c8c72018-08-13 13:47:44 -0400402 void FindWorkgroupVars(Module &M);
David Neto22f144c2017-06-12 14:26:21 -0400403 bool FindExtInst(Module &M);
404 void FindTypePerGlobalVar(GlobalVariable &GV);
405 void FindTypePerFunc(Function &F);
David Neto862b7d82018-06-14 18:48:37 -0400406 void FindTypesForSamplerMap(Module &M);
407 void FindTypesForResourceVars(Module &M);
alan-bakerb6b09dc2018-11-08 16:59:28 -0500408 // Inserts |Ty| and relevant sub-types into the |Types| member, indicating
409 // that |Ty| and its subtypes will need a corresponding SPIR-V type.
David Neto22f144c2017-06-12 14:26:21 -0400410 void FindType(Type *Ty);
411 void FindConstantPerGlobalVar(GlobalVariable &GV);
412 void FindConstantPerFunc(Function &F);
413 void FindConstant(Value *V);
414 void GenerateExtInstImport();
David Neto19a1bad2017-08-25 15:01:41 -0400415 // Generates instructions for SPIR-V types corresponding to the LLVM types
416 // saved in the |Types| member. A type follows its subtypes. IDs are
417 // allocated sequentially starting with the current value of nextID, and
418 // with a type following its subtypes. Also updates nextID to just beyond
419 // the last generated ID.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500420 void GenerateSPIRVTypes(LLVMContext &context, Module &module);
David Neto22f144c2017-06-12 14:26:21 -0400421 void GenerateSPIRVConstants();
David Neto5c22a252018-03-15 16:07:41 -0400422 void GenerateModuleInfo(Module &M);
alan-bakera1be3322020-04-20 12:48:18 -0400423 void GeneratePushConstantDescriptorMapEntries(Module &M);
424 void GenerateSpecConstantDescriptorMapEntries(Module &M);
David Neto22f144c2017-06-12 14:26:21 -0400425 void GenerateGlobalVar(GlobalVariable &GV);
alan-bakera1be3322020-04-20 12:48:18 -0400426 void GenerateWorkgroupVars(Module &M);
David Neto862b7d82018-06-14 18:48:37 -0400427 // Generate descriptor map entries for resource variables associated with
428 // arguments to F.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500429 void GenerateDescriptorMapInfo(const DataLayout &DL, Function &F);
David Neto22f144c2017-06-12 14:26:21 -0400430 void GenerateSamplers(Module &M);
David Neto862b7d82018-06-14 18:48:37 -0400431 // Generate OpVariables for %clspv.resource.var.* calls.
432 void GenerateResourceVars(Module &M);
David Neto22f144c2017-06-12 14:26:21 -0400433 void GenerateFuncPrologue(Function &F);
434 void GenerateFuncBody(Function &F);
David Netob6e2e062018-04-25 10:32:06 -0400435 void GenerateEntryPointInitialStores();
David Neto22f144c2017-06-12 14:26:21 -0400436 spv::Op GetSPIRVCmpOpcode(CmpInst *CmpI);
437 spv::Op GetSPIRVCastOpcode(Instruction &I);
438 spv::Op GetSPIRVBinaryOpcode(Instruction &I);
439 void GenerateInstruction(Instruction &I);
440 void GenerateFuncEpilogue();
441 void HandleDeferredInstruction();
alan-bakera1be3322020-04-20 12:48:18 -0400442 void HandleDeferredDecorations(Module &module);
David Neto22f144c2017-06-12 14:26:21 -0400443 bool is4xi8vec(Type *Ty) const;
444 spv::StorageClass GetStorageClass(unsigned AddrSpace) const;
David Neto862b7d82018-06-14 18:48:37 -0400445 spv::StorageClass GetStorageClassForArgKind(clspv::ArgKind arg_kind) const;
David Neto22f144c2017-06-12 14:26:21 -0400446 spv::BuiltIn GetBuiltin(StringRef globalVarName) const;
David Neto3fbb4072017-10-16 11:28:14 -0400447 // Returns the GLSL extended instruction enum that the given function
448 // call maps to. If none, then returns the 0 value, i.e. GLSLstd4580Bad.
David Neto22f144c2017-06-12 14:26:21 -0400449 glsl::ExtInst getExtInstEnum(StringRef Name);
David Neto3fbb4072017-10-16 11:28:14 -0400450 // Returns the GLSL extended instruction enum indirectly used by the given
451 // function. That is, to implement the given function, we use an extended
452 // instruction plus one more instruction. If none, then returns the 0 value,
453 // i.e. GLSLstd4580Bad.
454 glsl::ExtInst getIndirectExtInstEnum(StringRef Name);
455 // Returns the single GLSL extended instruction used directly or
456 // indirectly by the given function call.
457 glsl::ExtInst getDirectOrIndirectExtInstEnum(StringRef Name);
David Neto22f144c2017-06-12 14:26:21 -0400458 void WriteOneWord(uint32_t Word);
459 void WriteResultID(SPIRVInstruction *Inst);
460 void WriteWordCountAndOpcode(SPIRVInstruction *Inst);
David Netoef5ba2b2019-12-20 08:35:54 -0500461 void WriteOperand(const std::unique_ptr<SPIRVOperand> &Op);
David Neto22f144c2017-06-12 14:26:21 -0400462 void WriteSPIRVBinary();
SJW69939d52020-04-16 07:29:07 -0500463 void WriteSPIRVBinary(SPIRVInstructionList &SPIRVInstList);
David Neto22f144c2017-06-12 14:26:21 -0400464
Alan Baker9bf93fb2018-08-28 16:59:26 -0400465 // Returns true if |type| is compatible with OpConstantNull.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500466 bool IsTypeNullable(const Type *type) const;
Alan Baker9bf93fb2018-08-28 16:59:26 -0400467
Alan Bakerfcda9482018-10-02 17:09:59 -0400468 // Populate UBO remapped type maps.
469 void PopulateUBOTypeMaps(Module &module);
470
alan-baker06cad652019-12-03 17:56:47 -0500471 // Populate the merge and continue block maps.
472 void PopulateStructuredCFGMaps(Module &module);
473
Alan Bakerfcda9482018-10-02 17:09:59 -0400474 // Wrapped methods of DataLayout accessors. If |type| was remapped for UBOs,
475 // uses the internal map, otherwise it falls back on the data layout.
476 uint64_t GetTypeSizeInBits(Type *type, const DataLayout &DL);
477 uint64_t GetTypeStoreSize(Type *type, const DataLayout &DL);
478 uint64_t GetTypeAllocSize(Type *type, const DataLayout &DL);
Kévin Petitbbbda972020-03-03 19:16:31 +0000479 uint32_t GetExplicitLayoutStructMemberOffset(StructType *type,
480 unsigned member,
481 const DataLayout &DL);
Alan Bakerfcda9482018-10-02 17:09:59 -0400482
alan-baker5b86ed72019-02-15 08:26:50 -0500483 // Returns the base pointer of |v|.
484 Value *GetBasePointer(Value *v);
485
486 // Sets |HasVariablePointersStorageBuffer| or |HasVariablePointers| base on
487 // |address_space|.
488 void setVariablePointersCapabilities(unsigned address_space);
489
490 // Returns true if |lhs| and |rhs| represent the same resource or workgroup
491 // variable.
492 bool sameResource(Value *lhs, Value *rhs) const;
493
494 // Returns true if |inst| is phi or select that selects from the same
495 // structure (or null).
496 bool selectFromSameObject(Instruction *inst);
497
alan-bakere9308012019-03-15 10:25:13 -0400498 // Returns true if |Arg| is called with a coherent resource.
499 bool CalledWithCoherentResource(Argument &Arg);
500
David Neto22f144c2017-06-12 14:26:21 -0400501private:
502 static char ID;
David Neto44795152017-07-13 15:45:28 -0400503 ArrayRef<std::pair<unsigned, std::string>> samplerMap;
David Neto22f144c2017-06-12 14:26:21 -0400504 raw_pwrite_stream &out;
David Neto0676e6f2017-07-11 18:47:44 -0400505
506 // TODO(dneto): Wouldn't it be better to always just emit a binary, and then
507 // convert to other formats on demand?
508
509 // When emitting a C initialization list, the WriteSPIRVBinary method
510 // will actually write its words to this vector via binaryTempOut.
511 SmallVector<char, 100> binaryTempUnderlyingVector;
512 raw_svector_ostream binaryTempOut;
513
514 // Binary output writes to this stream, which might be |out| or
515 // |binaryTempOut|. It's the latter when we really want to write a C
516 // initializer list.
Diego Novillo3cc8d7a2019-04-10 13:30:34 -0400517 raw_pwrite_stream *binaryOut;
alan-bakerf5e5f692018-11-27 08:33:24 -0500518 std::vector<version0::DescriptorMapEntry> *descriptorMapEntries;
David Neto0676e6f2017-07-11 18:47:44 -0400519 const bool outputCInitList; // If true, output look like {0x7023, ... , 5}
David Neto22f144c2017-06-12 14:26:21 -0400520 uint64_t patchBoundOffset;
521 uint32_t nextID;
522
alan-bakerf67468c2019-11-25 15:51:49 -0500523 // ID for OpTypeInt 32 1.
524 uint32_t int32ID = 0;
525 // ID for OpTypeVector %int 4.
526 uint32_t v4int32ID = 0;
527
David Neto19a1bad2017-08-25 15:01:41 -0400528 // Maps an LLVM Value pointer to the corresponding SPIR-V Id.
David Neto22f144c2017-06-12 14:26:21 -0400529 TypeMapType TypeMap;
David Neto19a1bad2017-08-25 15:01:41 -0400530 // Maps an LLVM image type to its SPIR-V ID.
David Neto22f144c2017-06-12 14:26:21 -0400531 TypeMapType ImageTypeMap;
alan-bakerabd82722019-12-03 17:14:51 -0500532 // A unique-vector of LLVM image types. This list is used to provide
533 // deterministic traversal of image types.
534 TypeList ImageTypeList;
David Neto19a1bad2017-08-25 15:01:41 -0400535 // A unique-vector of LLVM types that map to a SPIR-V type.
David Neto22f144c2017-06-12 14:26:21 -0400536 TypeList Types;
537 ValueList Constants;
David Neto19a1bad2017-08-25 15:01:41 -0400538 // Maps an LLVM Value pointer to the corresponding SPIR-V Id.
David Neto22f144c2017-06-12 14:26:21 -0400539 ValueMapType ValueMap;
540 ValueMapType AllocatedValueMap;
SJW69939d52020-04-16 07:29:07 -0500541 SPIRVInstructionList SPIRVSections[kSectionCount];
David Neto862b7d82018-06-14 18:48:37 -0400542
David Neto22f144c2017-06-12 14:26:21 -0400543 EntryPointVecType EntryPointVec;
544 DeferredInstVecType DeferredInstVec;
545 ValueList EntryPointInterfacesVec;
546 uint32_t OpExtInstImportID;
547 std::vector<uint32_t> BuiltinDimensionVec;
alan-baker5b86ed72019-02-15 08:26:50 -0500548 bool HasVariablePointersStorageBuffer;
David Neto22f144c2017-06-12 14:26:21 -0400549 bool HasVariablePointers;
550 Type *SamplerTy;
alan-baker09cb9802019-12-10 13:16:27 -0500551 DenseMap<unsigned, unsigned> SamplerLiteralToIDMap;
David Netoc77d9e22018-03-24 06:30:28 -0700552
553 // If a function F has a pointer-to-__constant parameter, then this variable
David Neto9ed8e2f2018-03-24 06:47:24 -0700554 // will map F's type to (G, index of the parameter), where in a first phase
555 // G is F's type. During FindTypePerFunc, G will be changed to F's type
556 // but replacing the pointer-to-constant parameter with
557 // pointer-to-ModuleScopePrivate.
David Netoc77d9e22018-03-24 06:30:28 -0700558 // TODO(dneto): This doesn't seem general enough? A function might have
559 // more than one such parameter.
David Neto22f144c2017-06-12 14:26:21 -0400560 GlobalConstFuncMapType GlobalConstFuncTypeMap;
561 SmallPtrSet<Value *, 16> GlobalConstArgumentSet;
David Neto1a1a0582017-07-07 12:01:44 -0400562 // An ordered set of pointer types of Base arguments to OpPtrAccessChain,
David Neto85082642018-03-24 06:55:20 -0700563 // or array types, and which point into transparent memory (StorageBuffer
564 // storage class). These will require an ArrayStride decoration.
David Neto1a1a0582017-07-07 12:01:44 -0400565 // See SPV_KHR_variable_pointers rev 13.
David Neto85082642018-03-24 06:55:20 -0700566 TypeList TypesNeedingArrayStride;
David Netoa60b00b2017-09-15 16:34:09 -0400567
568 // This is truly ugly, but works around what look like driver bugs.
569 // For get_local_size, an earlier part of the flow has created a module-scope
570 // variable in Private address space to hold the value for the workgroup
571 // size. Its intializer is a uint3 value marked as builtin WorkgroupSize.
572 // When this is present, save the IDs of the initializer value and variable
573 // in these two variables. We only ever do a vector load from it, and
574 // when we see one of those, substitute just the value of the intializer.
575 // This mimics what Glslang does, and that's what drivers are used to.
David Neto66cfe642018-03-24 06:13:56 -0700576 // TODO(dneto): Remove this once drivers are fixed.
David Netoa60b00b2017-09-15 16:34:09 -0400577 uint32_t WorkgroupSizeValueID;
578 uint32_t WorkgroupSizeVarID;
David Neto26aaf622017-10-23 18:11:53 -0400579
David Neto862b7d82018-06-14 18:48:37 -0400580 // Bookkeeping for mapping kernel arguments to resource variables.
581 struct ResourceVarInfo {
582 ResourceVarInfo(int index_arg, unsigned set_arg, unsigned binding_arg,
alan-bakere9308012019-03-15 10:25:13 -0400583 Function *fn, clspv::ArgKind arg_kind_arg, int coherent_arg)
David Neto862b7d82018-06-14 18:48:37 -0400584 : index(index_arg), descriptor_set(set_arg), binding(binding_arg),
alan-bakere9308012019-03-15 10:25:13 -0400585 var_fn(fn), arg_kind(arg_kind_arg), coherent(coherent_arg),
David Neto862b7d82018-06-14 18:48:37 -0400586 addr_space(fn->getReturnType()->getPointerAddressSpace()) {}
587 const int index; // Index into ResourceVarInfoList
588 const unsigned descriptor_set;
589 const unsigned binding;
590 Function *const var_fn; // The @clspv.resource.var.* function.
591 const clspv::ArgKind arg_kind;
alan-bakere9308012019-03-15 10:25:13 -0400592 const int coherent;
David Neto862b7d82018-06-14 18:48:37 -0400593 const unsigned addr_space; // The LLVM address space
594 // The SPIR-V ID of the OpVariable. Not populated at construction time.
595 uint32_t var_id = 0;
596 };
597 // A list of resource var info. Each one correponds to a module-scope
598 // resource variable we will have to create. Resource var indices are
599 // indices into this vector.
600 SmallVector<std::unique_ptr<ResourceVarInfo>, 8> ResourceVarInfoList;
601 // This is a vector of pointers of all the resource vars, but ordered by
602 // kernel function, and then by argument.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500603 UniqueVector<ResourceVarInfo *> ModuleOrderedResourceVars;
David Neto862b7d82018-06-14 18:48:37 -0400604 // Map a function to the ordered list of resource variables it uses, one for
605 // each argument. If an argument does not use a resource variable, it
606 // will have a null pointer entry.
607 using FunctionToResourceVarsMapType =
608 DenseMap<Function *, SmallVector<ResourceVarInfo *, 8>>;
609 FunctionToResourceVarsMapType FunctionToResourceVarsMap;
610
611 // What LLVM types map to SPIR-V types needing layout? These are the
612 // arrays and structures supporting storage buffers and uniform buffers.
613 TypeList TypesNeedingLayout;
614 // What LLVM struct types map to a SPIR-V struct type with Block decoration?
615 UniqueVector<StructType *> StructTypesNeedingBlock;
616 // For a call that represents a load from an opaque type (samplers, images),
617 // map it to the variable id it should load from.
618 DenseMap<CallInst *, uint32_t> ResourceVarDeferredLoadCalls;
David Neto85082642018-03-24 06:55:20 -0700619
David Netoc6f3ab22018-04-06 18:02:31 -0400620 // An ordered list of the kernel arguments of type pointer-to-local.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500621 using LocalArgList = SmallVector<Argument *, 8>;
David Netoc6f3ab22018-04-06 18:02:31 -0400622 LocalArgList LocalArgs;
623 // Information about a pointer-to-local argument.
624 struct LocalArgInfo {
625 // The SPIR-V ID of the array variable.
626 uint32_t variable_id;
627 // The element type of the
alan-bakerb6b09dc2018-11-08 16:59:28 -0500628 Type *elem_type;
David Netoc6f3ab22018-04-06 18:02:31 -0400629 // The ID of the array type.
630 uint32_t array_size_id;
631 // The ID of the array type.
632 uint32_t array_type_id;
633 // The ID of the pointer to the array type.
634 uint32_t ptr_array_type_id;
David Netoc6f3ab22018-04-06 18:02:31 -0400635 // The specialization constant ID of the array size.
636 int spec_id;
637 };
Alan Baker202c8c72018-08-13 13:47:44 -0400638 // A mapping from Argument to its assigned SpecId.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500639 DenseMap<const Argument *, int> LocalArgSpecIds;
Alan Baker202c8c72018-08-13 13:47:44 -0400640 // A mapping from SpecId to its LocalArgInfo.
641 DenseMap<int, LocalArgInfo> LocalSpecIdInfoMap;
Alan Bakerfcda9482018-10-02 17:09:59 -0400642 // A mapping from a remapped type to its real offsets.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500643 DenseMap<Type *, std::vector<uint32_t>> RemappedUBOTypeOffsets;
Alan Bakerfcda9482018-10-02 17:09:59 -0400644 // A mapping from a remapped type to its real sizes.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500645 DenseMap<Type *, std::tuple<uint64_t, uint64_t, uint64_t>>
646 RemappedUBOTypeSizes;
alan-baker06cad652019-12-03 17:56:47 -0500647
648 // Maps basic block to its merge block.
649 DenseMap<BasicBlock *, BasicBlock *> MergeBlocks;
650 // Maps basic block to its continue block.
651 DenseMap<BasicBlock *, BasicBlock *> ContinueBlocks;
David Neto22f144c2017-06-12 14:26:21 -0400652};
653
654char SPIRVProducerPass::ID;
David Netoc6f3ab22018-04-06 18:02:31 -0400655
alan-bakerb6b09dc2018-11-08 16:59:28 -0500656} // namespace
David Neto22f144c2017-06-12 14:26:21 -0400657
658namespace clspv {
alan-bakerf5e5f692018-11-27 08:33:24 -0500659ModulePass *createSPIRVProducerPass(
660 raw_pwrite_stream &out,
661 std::vector<version0::DescriptorMapEntry> *descriptor_map_entries,
alan-baker00e7a582019-06-07 12:54:21 -0400662 ArrayRef<std::pair<unsigned, std::string>> samplerMap,
alan-bakerf5e5f692018-11-27 08:33:24 -0500663 bool outputCInitList) {
664 return new SPIRVProducerPass(out, descriptor_map_entries, samplerMap,
alan-baker00e7a582019-06-07 12:54:21 -0400665 outputCInitList);
David Neto22f144c2017-06-12 14:26:21 -0400666}
David Netoc2c368d2017-06-30 16:50:17 -0400667} // namespace clspv
David Neto22f144c2017-06-12 14:26:21 -0400668
669bool SPIRVProducerPass::runOnModule(Module &module) {
alan-baker5ed87542020-03-23 11:05:22 -0400670 if (ShowProducerIR) {
671 llvm::outs() << module << "\n";
672 }
David Neto0676e6f2017-07-11 18:47:44 -0400673 binaryOut = outputCInitList ? &binaryTempOut : &out;
674
Alan Bakerfcda9482018-10-02 17:09:59 -0400675 PopulateUBOTypeMaps(module);
alan-baker06cad652019-12-03 17:56:47 -0500676 PopulateStructuredCFGMaps(module);
Alan Bakerfcda9482018-10-02 17:09:59 -0400677
David Neto22f144c2017-06-12 14:26:21 -0400678 // SPIR-V always begins with its header information
679 outputHeader();
680
David Netoc6f3ab22018-04-06 18:02:31 -0400681 const DataLayout &DL = module.getDataLayout();
682
David Neto22f144c2017-06-12 14:26:21 -0400683 // Gather information from the LLVM IR that we require.
David Netoc6f3ab22018-04-06 18:02:31 -0400684 GenerateLLVMIRInfo(module, DL);
David Neto22f144c2017-06-12 14:26:21 -0400685
David Neto22f144c2017-06-12 14:26:21 -0400686 // Collect information on global variables too.
687 for (GlobalVariable &GV : module.globals()) {
688 // If the GV is one of our special __spirv_* variables, remove the
689 // initializer as it was only placed there to force LLVM to not throw the
690 // value away.
Kévin Petitbbbda972020-03-03 19:16:31 +0000691 if (GV.getName().startswith("__spirv_") ||
692 GV.getAddressSpace() == clspv::AddressSpace::PushConstant) {
David Neto22f144c2017-06-12 14:26:21 -0400693 GV.setInitializer(nullptr);
694 }
695
696 // Collect types' information from global variable.
697 FindTypePerGlobalVar(GV);
698
699 // Collect constant information from global variable.
700 FindConstantPerGlobalVar(GV);
701
702 // If the variable is an input, entry points need to know about it.
703 if (AddressSpace::Input == GV.getType()->getPointerAddressSpace()) {
David Netofb9a7972017-08-25 17:08:24 -0400704 getEntryPointInterfacesVec().insert(&GV);
David Neto22f144c2017-06-12 14:26:21 -0400705 }
706 }
707
708 // If there are extended instructions, generate OpExtInstImport.
709 if (FindExtInst(module)) {
710 GenerateExtInstImport();
711 }
712
713 // Generate SPIRV instructions for types.
Alan Bakerfcda9482018-10-02 17:09:59 -0400714 GenerateSPIRVTypes(module.getContext(), module);
David Neto22f144c2017-06-12 14:26:21 -0400715
716 // Generate SPIRV constants.
717 GenerateSPIRVConstants();
718
alan-baker09cb9802019-12-10 13:16:27 -0500719 // Generate literal samplers if necessary.
720 GenerateSamplers(module);
David Neto22f144c2017-06-12 14:26:21 -0400721
Kévin Petitbbbda972020-03-03 19:16:31 +0000722 // Generate descriptor map entries for all push constants
alan-bakera1be3322020-04-20 12:48:18 -0400723 GeneratePushConstantDescriptorMapEntries(module);
Kévin Petitbbbda972020-03-03 19:16:31 +0000724
David Neto22f144c2017-06-12 14:26:21 -0400725 // Generate SPIRV variables.
726 for (GlobalVariable &GV : module.globals()) {
727 GenerateGlobalVar(GV);
728 }
David Neto862b7d82018-06-14 18:48:37 -0400729 GenerateResourceVars(module);
alan-bakera1be3322020-04-20 12:48:18 -0400730 GenerateWorkgroupVars(module);
David Neto22f144c2017-06-12 14:26:21 -0400731
732 // Generate SPIRV instructions for each function.
733 for (Function &F : module) {
734 if (F.isDeclaration()) {
735 continue;
736 }
737
David Neto862b7d82018-06-14 18:48:37 -0400738 GenerateDescriptorMapInfo(DL, F);
739
David Neto22f144c2017-06-12 14:26:21 -0400740 // Generate Function Prologue.
741 GenerateFuncPrologue(F);
742
743 // Generate SPIRV instructions for function body.
744 GenerateFuncBody(F);
745
746 // Generate Function Epilogue.
747 GenerateFuncEpilogue();
748 }
749
750 HandleDeferredInstruction();
alan-bakera1be3322020-04-20 12:48:18 -0400751 HandleDeferredDecorations(module);
752
753 // Generate descriptor map entries for module scope specialization constants.
754 GenerateSpecConstantDescriptorMapEntries(module);
David Neto22f144c2017-06-12 14:26:21 -0400755
756 // Generate SPIRV module information.
David Neto5c22a252018-03-15 16:07:41 -0400757 GenerateModuleInfo(module);
David Neto22f144c2017-06-12 14:26:21 -0400758
alan-baker00e7a582019-06-07 12:54:21 -0400759 WriteSPIRVBinary();
David Neto22f144c2017-06-12 14:26:21 -0400760
761 // We need to patch the SPIR-V header to set bound correctly.
762 patchHeader();
David Neto0676e6f2017-07-11 18:47:44 -0400763
764 if (outputCInitList) {
765 bool first = true;
David Neto0676e6f2017-07-11 18:47:44 -0400766 std::ostringstream os;
767
David Neto57fb0b92017-08-04 15:35:09 -0400768 auto emit_word = [&os, &first](uint32_t word) {
David Neto0676e6f2017-07-11 18:47:44 -0400769 if (!first)
David Neto57fb0b92017-08-04 15:35:09 -0400770 os << ",\n";
771 os << word;
David Neto0676e6f2017-07-11 18:47:44 -0400772 first = false;
773 };
774
775 os << "{";
David Neto57fb0b92017-08-04 15:35:09 -0400776 const std::string str(binaryTempOut.str());
777 for (unsigned i = 0; i < str.size(); i += 4) {
778 const uint32_t a = static_cast<unsigned char>(str[i]);
779 const uint32_t b = static_cast<unsigned char>(str[i + 1]);
780 const uint32_t c = static_cast<unsigned char>(str[i + 2]);
781 const uint32_t d = static_cast<unsigned char>(str[i + 3]);
782 emit_word(a | (b << 8) | (c << 16) | (d << 24));
David Neto0676e6f2017-07-11 18:47:44 -0400783 }
784 os << "}\n";
785 out << os.str();
786 }
787
David Neto22f144c2017-06-12 14:26:21 -0400788 return false;
789}
790
791void SPIRVProducerPass::outputHeader() {
alan-baker00e7a582019-06-07 12:54:21 -0400792 binaryOut->write(reinterpret_cast<const char *>(&spv::MagicNumber),
793 sizeof(spv::MagicNumber));
alan-bakere0902602020-03-23 08:43:40 -0400794 const uint32_t spv_version = 0x10000; // SPIR-V 1.0
795 binaryOut->write(reinterpret_cast<const char *>(&spv_version),
796 sizeof(spv_version));
David Neto22f144c2017-06-12 14:26:21 -0400797
alan-baker0c18ab02019-06-12 10:23:21 -0400798 // use Google's vendor ID
799 const uint32_t vendor = 21 << 16;
alan-baker00e7a582019-06-07 12:54:21 -0400800 binaryOut->write(reinterpret_cast<const char *>(&vendor), sizeof(vendor));
David Neto22f144c2017-06-12 14:26:21 -0400801
alan-baker00e7a582019-06-07 12:54:21 -0400802 // we record where we need to come back to and patch in the bound value
803 patchBoundOffset = binaryOut->tell();
David Neto22f144c2017-06-12 14:26:21 -0400804
alan-baker00e7a582019-06-07 12:54:21 -0400805 // output a bad bound for now
806 binaryOut->write(reinterpret_cast<const char *>(&nextID), sizeof(nextID));
David Neto22f144c2017-06-12 14:26:21 -0400807
alan-baker00e7a582019-06-07 12:54:21 -0400808 // output the schema (reserved for use and must be 0)
809 const uint32_t schema = 0;
810 binaryOut->write(reinterpret_cast<const char *>(&schema), sizeof(schema));
David Neto22f144c2017-06-12 14:26:21 -0400811}
812
813void SPIRVProducerPass::patchHeader() {
alan-baker00e7a582019-06-07 12:54:21 -0400814 // for a binary we just write the value of nextID over bound
815 binaryOut->pwrite(reinterpret_cast<char *>(&nextID), sizeof(nextID),
816 patchBoundOffset);
David Neto22f144c2017-06-12 14:26:21 -0400817}
818
David Netoc6f3ab22018-04-06 18:02:31 -0400819void SPIRVProducerPass::GenerateLLVMIRInfo(Module &M, const DataLayout &DL) {
David Neto22f144c2017-06-12 14:26:21 -0400820 // This function generates LLVM IR for function such as global variable for
821 // argument, constant and pointer type for argument access. These information
822 // is artificial one because we need Vulkan SPIR-V output. This function is
823 // executed ahead of FindType and FindConstant.
David Neto22f144c2017-06-12 14:26:21 -0400824 LLVMContext &Context = M.getContext();
825
David Neto862b7d82018-06-14 18:48:37 -0400826 FindGlobalConstVars(M, DL);
David Neto5c22a252018-03-15 16:07:41 -0400827
David Neto862b7d82018-06-14 18:48:37 -0400828 FindResourceVars(M, DL);
David Neto22f144c2017-06-12 14:26:21 -0400829
830 bool HasWorkGroupBuiltin = false;
831 for (GlobalVariable &GV : M.globals()) {
832 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
833 if (spv::BuiltInWorkgroupSize == BuiltinType) {
834 HasWorkGroupBuiltin = true;
835 }
836 }
837
David Neto862b7d82018-06-14 18:48:37 -0400838 FindTypesForSamplerMap(M);
839 FindTypesForResourceVars(M);
Alan Baker202c8c72018-08-13 13:47:44 -0400840 FindWorkgroupVars(M);
David Neto22f144c2017-06-12 14:26:21 -0400841
842 for (Function &F : M) {
Kévin Petitabef4522019-03-27 13:08:01 +0000843 if (F.isDeclaration()) {
David Neto22f144c2017-06-12 14:26:21 -0400844 continue;
845 }
846
847 for (BasicBlock &BB : F) {
848 for (Instruction &I : BB) {
849 if (I.getOpcode() == Instruction::ZExt ||
850 I.getOpcode() == Instruction::SExt ||
851 I.getOpcode() == Instruction::UIToFP) {
852 // If there is zext with i1 type, it will be changed to OpSelect. The
853 // OpSelect needs constant 0 and 1 so the constants are added here.
854
855 auto OpTy = I.getOperand(0)->getType();
856
Kévin Petit24272b62018-10-18 19:16:12 +0000857 if (OpTy->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -0400858 if (I.getOpcode() == Instruction::ZExt) {
David Neto22f144c2017-06-12 14:26:21 -0400859 FindConstant(Constant::getNullValue(I.getType()));
Kévin Petit7bfb8992019-02-26 13:45:08 +0000860 FindConstant(ConstantInt::get(I.getType(), 1));
David Neto22f144c2017-06-12 14:26:21 -0400861 } else if (I.getOpcode() == Instruction::SExt) {
David Neto22f144c2017-06-12 14:26:21 -0400862 FindConstant(Constant::getNullValue(I.getType()));
Kévin Petit7bfb8992019-02-26 13:45:08 +0000863 FindConstant(ConstantInt::getSigned(I.getType(), -1));
David Neto22f144c2017-06-12 14:26:21 -0400864 } else {
865 FindConstant(ConstantFP::get(Context, APFloat(0.0f)));
866 FindConstant(ConstantFP::get(Context, APFloat(1.0f)));
867 }
868 }
869 } else if (CallInst *Call = dyn_cast<CallInst>(&I)) {
David Neto862b7d82018-06-14 18:48:37 -0400870 StringRef callee_name = Call->getCalledFunction()->getName();
David Neto22f144c2017-06-12 14:26:21 -0400871
872 // Handle image type specially.
SJW173c7e92020-03-16 08:44:47 -0500873 if (IsImageBuiltin(callee_name)) {
David Neto22f144c2017-06-12 14:26:21 -0400874 TypeMapType &OpImageTypeMap = getImageTypeMap();
875 Type *ImageTy =
876 Call->getArgOperand(0)->getType()->getPointerElementType();
877 OpImageTypeMap[ImageTy] = 0;
alan-bakerabd82722019-12-03 17:14:51 -0500878 getImageTypeList().insert(ImageTy);
alan-baker75090e42020-02-20 11:21:04 -0500879 }
David Neto22f144c2017-06-12 14:26:21 -0400880
SJW173c7e92020-03-16 08:44:47 -0500881 if (IsSampledImageRead(callee_name)) {
alan-bakerf67468c2019-11-25 15:51:49 -0500882 // All sampled reads need a floating point 0 for the Lod operand.
David Neto22f144c2017-06-12 14:26:21 -0400883 FindConstant(ConstantFP::get(Context, APFloat(0.0f)));
SJW2c317da2020-03-23 07:39:13 -0500884 } else if (IsUnsampledImageRead(callee_name)) {
alan-baker75090e42020-02-20 11:21:04 -0500885 // All unsampled reads need an integer 0 for the Lod operand.
886 FindConstant(ConstantInt::get(Context, APInt(32, 0)));
SJW2c317da2020-03-23 07:39:13 -0500887 } else if (IsImageQuery(callee_name)) {
alan-bakerce179f12019-12-06 19:02:22 -0500888 Type *ImageTy = Call->getOperand(0)->getType();
889 const uint32_t dim = ImageDimensionality(ImageTy);
alan-baker7150a1d2020-02-25 08:31:06 -0500890 uint32_t components =
891 dim + (clspv::IsArrayImageType(ImageTy) ? 1 : 0);
alan-bakerce179f12019-12-06 19:02:22 -0500892 if (components > 1) {
893 // OpImageQuerySize* return |components| components.
894 FindType(VectorType::get(Type::getInt32Ty(Context), components));
895 if (dim == 3 && IsGetImageDim(callee_name)) {
896 // get_image_dim for 3D images returns an int4.
897 FindType(
898 VectorType::get(Type::getInt32Ty(Context), components + 1));
899 }
900 }
901
SJW173c7e92020-03-16 08:44:47 -0500902 if (IsSampledImageType(ImageTy)) {
alan-bakerce179f12019-12-06 19:02:22 -0500903 // All sampled image queries need a integer 0 for the Lod
904 // operand.
905 FindConstant(ConstantInt::get(Context, APInt(32, 0)));
906 }
David Neto5c22a252018-03-15 16:07:41 -0400907 }
David Neto22f144c2017-06-12 14:26:21 -0400908 }
909 }
910 }
911
Kévin Petitabef4522019-03-27 13:08:01 +0000912 // More things to do on kernel functions
913 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
914 if (const MDNode *MD =
915 dyn_cast<Function>(&F)->getMetadata("reqd_work_group_size")) {
916 // We generate constants if the WorkgroupSize builtin is being used.
917 if (HasWorkGroupBuiltin) {
918 // Collect constant information for work group size.
919 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(0)));
920 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(1)));
921 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(2)));
David Neto22f144c2017-06-12 14:26:21 -0400922 }
923 }
924 }
925
alan-bakerf67468c2019-11-25 15:51:49 -0500926 // TODO(alan-baker): make this better.
alan-bakerf906d2b2019-12-10 11:26:23 -0500927 if (M.getTypeByName("opencl.image1d_ro_t.float") ||
928 M.getTypeByName("opencl.image1d_ro_t.float.sampled") ||
929 M.getTypeByName("opencl.image1d_wo_t.float") ||
930 M.getTypeByName("opencl.image2d_ro_t.float") ||
alan-bakerf67468c2019-11-25 15:51:49 -0500931 M.getTypeByName("opencl.image2d_ro_t.float.sampled") ||
932 M.getTypeByName("opencl.image2d_wo_t.float") ||
933 M.getTypeByName("opencl.image3d_ro_t.float") ||
934 M.getTypeByName("opencl.image3d_ro_t.float.sampled") ||
alan-baker7150a1d2020-02-25 08:31:06 -0500935 M.getTypeByName("opencl.image3d_wo_t.float") ||
936 M.getTypeByName("opencl.image1d_array_ro_t.float") ||
937 M.getTypeByName("opencl.image1d_array_ro_t.float.sampled") ||
938 M.getTypeByName("opencl.image1d_array_wo_t.float") ||
939 M.getTypeByName("opencl.image2d_array_ro_t.float") ||
940 M.getTypeByName("opencl.image2d_array_ro_t.float.sampled") ||
941 M.getTypeByName("opencl.image2d_array_wo_t.float")) {
alan-bakerf67468c2019-11-25 15:51:49 -0500942 FindType(Type::getFloatTy(Context));
alan-bakerf906d2b2019-12-10 11:26:23 -0500943 } else if (M.getTypeByName("opencl.image1d_ro_t.uint") ||
944 M.getTypeByName("opencl.image1d_ro_t.uint.sampled") ||
945 M.getTypeByName("opencl.image1d_wo_t.uint") ||
946 M.getTypeByName("opencl.image2d_ro_t.uint") ||
alan-bakerf67468c2019-11-25 15:51:49 -0500947 M.getTypeByName("opencl.image2d_ro_t.uint.sampled") ||
948 M.getTypeByName("opencl.image2d_wo_t.uint") ||
949 M.getTypeByName("opencl.image3d_ro_t.uint") ||
950 M.getTypeByName("opencl.image3d_ro_t.uint.sampled") ||
alan-baker7150a1d2020-02-25 08:31:06 -0500951 M.getTypeByName("opencl.image3d_wo_t.uint") ||
952 M.getTypeByName("opencl.image1d_array_ro_t.uint") ||
953 M.getTypeByName("opencl.image1d_array_ro_t.uint.sampled") ||
954 M.getTypeByName("opencl.image1d_array_wo_t.uint") ||
955 M.getTypeByName("opencl.image2d_array_ro_t.uint") ||
956 M.getTypeByName("opencl.image2d_array_ro_t.uint.sampled") ||
957 M.getTypeByName("opencl.image2d_array_wo_t.uint")) {
alan-bakerf67468c2019-11-25 15:51:49 -0500958 FindType(Type::getInt32Ty(Context));
alan-bakerf906d2b2019-12-10 11:26:23 -0500959 } else if (M.getTypeByName("opencl.image1d_ro_t.int") ||
960 M.getTypeByName("opencl.image1d_ro_t.int.sampled") ||
961 M.getTypeByName("opencl.image1d_wo_t.int") ||
962 M.getTypeByName("opencl.image2d_ro_t.int") ||
alan-bakerf67468c2019-11-25 15:51:49 -0500963 M.getTypeByName("opencl.image2d_ro_t.int.sampled") ||
964 M.getTypeByName("opencl.image2d_wo_t.int") ||
965 M.getTypeByName("opencl.image3d_ro_t.int") ||
966 M.getTypeByName("opencl.image3d_ro_t.int.sampled") ||
alan-baker7150a1d2020-02-25 08:31:06 -0500967 M.getTypeByName("opencl.image3d_wo_t.int") ||
968 M.getTypeByName("opencl.image1d_array_ro_t.int") ||
969 M.getTypeByName("opencl.image1d_array_ro_t.int.sampled") ||
970 M.getTypeByName("opencl.image1d_array_wo_t.int") ||
971 M.getTypeByName("opencl.image2d_array_ro_t.int") ||
972 M.getTypeByName("opencl.image2d_array_ro_t.int.sampled") ||
973 M.getTypeByName("opencl.image2d_array_wo_t.int")) {
alan-bakerf67468c2019-11-25 15:51:49 -0500974 // Nothing for now...
975 } else {
976 // This was likely an UndefValue.
David Neto22f144c2017-06-12 14:26:21 -0400977 FindType(Type::getFloatTy(Context));
978 }
979
980 // Collect types' information from function.
981 FindTypePerFunc(F);
982
983 // Collect constant information from function.
984 FindConstantPerFunc(F);
985 }
986}
987
David Neto862b7d82018-06-14 18:48:37 -0400988void SPIRVProducerPass::FindGlobalConstVars(Module &M, const DataLayout &DL) {
alan-baker56f7aff2019-05-22 08:06:42 -0400989 clspv::NormalizeGlobalVariables(M);
990
David Neto862b7d82018-06-14 18:48:37 -0400991 SmallVector<GlobalVariable *, 8> GVList;
992 SmallVector<GlobalVariable *, 8> DeadGVList;
993 for (GlobalVariable &GV : M.globals()) {
994 if (GV.getType()->getAddressSpace() == AddressSpace::Constant) {
995 if (GV.use_empty()) {
996 DeadGVList.push_back(&GV);
997 } else {
998 GVList.push_back(&GV);
999 }
1000 }
1001 }
1002
1003 // Remove dead global __constant variables.
1004 for (auto GV : DeadGVList) {
1005 GV->eraseFromParent();
1006 }
1007 DeadGVList.clear();
1008
1009 if (clspv::Option::ModuleConstantsInStorageBuffer()) {
1010 // For now, we only support a single storage buffer.
1011 if (GVList.size() > 0) {
1012 assert(GVList.size() == 1);
1013 const auto *GV = GVList[0];
1014 const auto constants_byte_size =
Alan Bakerfcda9482018-10-02 17:09:59 -04001015 (GetTypeSizeInBits(GV->getInitializer()->getType(), DL)) / 8;
David Neto862b7d82018-06-14 18:48:37 -04001016 const size_t kConstantMaxSize = 65536;
1017 if (constants_byte_size > kConstantMaxSize) {
1018 outs() << "Max __constant capacity of " << kConstantMaxSize
1019 << " bytes exceeded: " << constants_byte_size << " bytes used\n";
1020 llvm_unreachable("Max __constant capacity exceeded");
1021 }
1022 }
1023 } else {
1024 // Change global constant variable's address space to ModuleScopePrivate.
1025 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
1026 for (auto GV : GVList) {
1027 // Create new gv with ModuleScopePrivate address space.
1028 Type *NewGVTy = GV->getType()->getPointerElementType();
1029 GlobalVariable *NewGV = new GlobalVariable(
1030 M, NewGVTy, false, GV->getLinkage(), GV->getInitializer(), "",
1031 nullptr, GV->getThreadLocalMode(), AddressSpace::ModuleScopePrivate);
1032 NewGV->takeName(GV);
1033
1034 const SmallVector<User *, 8> GVUsers(GV->user_begin(), GV->user_end());
1035 SmallVector<User *, 8> CandidateUsers;
1036
1037 auto record_called_function_type_as_user =
1038 [&GlobalConstFuncTyMap](Value *gv, CallInst *call) {
1039 // Find argument index.
1040 unsigned index = 0;
1041 for (unsigned i = 0; i < call->getNumArgOperands(); i++) {
1042 if (gv == call->getOperand(i)) {
1043 // TODO(dneto): Should we break here?
1044 index = i;
1045 }
1046 }
1047
1048 // Record function type with global constant.
1049 GlobalConstFuncTyMap[call->getFunctionType()] =
1050 std::make_pair(call->getFunctionType(), index);
1051 };
1052
1053 for (User *GVU : GVUsers) {
1054 if (CallInst *Call = dyn_cast<CallInst>(GVU)) {
1055 record_called_function_type_as_user(GV, Call);
1056 } else if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(GVU)) {
1057 // Check GEP users.
1058 for (User *GEPU : GEP->users()) {
1059 if (CallInst *GEPCall = dyn_cast<CallInst>(GEPU)) {
1060 record_called_function_type_as_user(GEP, GEPCall);
1061 }
1062 }
1063 }
1064
1065 CandidateUsers.push_back(GVU);
1066 }
1067
1068 for (User *U : CandidateUsers) {
1069 // Update users of gv with new gv.
alan-bakered80f572019-02-11 17:28:26 -05001070 if (!isa<Constant>(U)) {
1071 // #254: Can't change operands of a constant, but this shouldn't be
1072 // something that sticks around in the module.
1073 U->replaceUsesOfWith(GV, NewGV);
1074 }
David Neto862b7d82018-06-14 18:48:37 -04001075 }
1076
1077 // Delete original gv.
1078 GV->eraseFromParent();
1079 }
1080 }
1081}
1082
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001083void SPIRVProducerPass::FindResourceVars(Module &M, const DataLayout &) {
David Neto862b7d82018-06-14 18:48:37 -04001084 ResourceVarInfoList.clear();
1085 FunctionToResourceVarsMap.clear();
1086 ModuleOrderedResourceVars.reset();
1087 // Normally, there is one resource variable per clspv.resource.var.*
1088 // function, since that is unique'd by arg type and index. By design,
1089 // we can share these resource variables across kernels because all
1090 // kernels use the same descriptor set.
1091 //
1092 // But if the user requested distinct descriptor sets per kernel, then
1093 // the descriptor allocator has made different (set,binding) pairs for
1094 // the same (type,arg_index) pair. Since we can decorate a resource
1095 // variable with only exactly one DescriptorSet and Binding, we are
1096 // forced in this case to make distinct resource variables whenever
Kévin Petitbbbda972020-03-03 19:16:31 +00001097 // the same clspv.resource.var.X function is seen with disintct
David Neto862b7d82018-06-14 18:48:37 -04001098 // (set,binding) values.
1099 const bool always_distinct_sets =
1100 clspv::Option::DistinctKernelDescriptorSets();
1101 for (Function &F : M) {
1102 // Rely on the fact the resource var functions have a stable ordering
1103 // in the module.
Alan Baker202c8c72018-08-13 13:47:44 -04001104 if (F.getName().startswith(clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04001105 // Find all calls to this function with distinct set and binding pairs.
1106 // Save them in ResourceVarInfoList.
1107
1108 // Determine uniqueness of the (set,binding) pairs only withing this
1109 // one resource-var builtin function.
1110 using SetAndBinding = std::pair<unsigned, unsigned>;
1111 // Maps set and binding to the resource var info.
1112 DenseMap<SetAndBinding, ResourceVarInfo *> set_and_binding_map;
1113 bool first_use = true;
1114 for (auto &U : F.uses()) {
1115 if (auto *call = dyn_cast<CallInst>(U.getUser())) {
1116 const auto set = unsigned(
1117 dyn_cast<ConstantInt>(call->getArgOperand(0))->getZExtValue());
1118 const auto binding = unsigned(
1119 dyn_cast<ConstantInt>(call->getArgOperand(1))->getZExtValue());
1120 const auto arg_kind = clspv::ArgKind(
1121 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
1122 const auto arg_index = unsigned(
1123 dyn_cast<ConstantInt>(call->getArgOperand(3))->getZExtValue());
alan-bakere9308012019-03-15 10:25:13 -04001124 const auto coherent = unsigned(
1125 dyn_cast<ConstantInt>(call->getArgOperand(5))->getZExtValue());
David Neto862b7d82018-06-14 18:48:37 -04001126
1127 // Find or make the resource var info for this combination.
1128 ResourceVarInfo *rv = nullptr;
1129 if (always_distinct_sets) {
1130 // Make a new resource var any time we see a different
1131 // (set,binding) pair.
1132 SetAndBinding key{set, binding};
1133 auto where = set_and_binding_map.find(key);
1134 if (where == set_and_binding_map.end()) {
1135 rv = new ResourceVarInfo(int(ResourceVarInfoList.size()), set,
alan-bakere9308012019-03-15 10:25:13 -04001136 binding, &F, arg_kind, coherent);
David Neto862b7d82018-06-14 18:48:37 -04001137 ResourceVarInfoList.emplace_back(rv);
1138 set_and_binding_map[key] = rv;
1139 } else {
1140 rv = where->second;
1141 }
1142 } else {
1143 // The default is to make exactly one resource for each
1144 // clspv.resource.var.* function.
1145 if (first_use) {
1146 first_use = false;
1147 rv = new ResourceVarInfo(int(ResourceVarInfoList.size()), set,
alan-bakere9308012019-03-15 10:25:13 -04001148 binding, &F, arg_kind, coherent);
David Neto862b7d82018-06-14 18:48:37 -04001149 ResourceVarInfoList.emplace_back(rv);
1150 } else {
1151 rv = ResourceVarInfoList.back().get();
1152 }
1153 }
1154
1155 // Now populate FunctionToResourceVarsMap.
1156 auto &mapping =
1157 FunctionToResourceVarsMap[call->getParent()->getParent()];
1158 while (mapping.size() <= arg_index) {
1159 mapping.push_back(nullptr);
1160 }
1161 mapping[arg_index] = rv;
1162 }
1163 }
1164 }
1165 }
1166
1167 // Populate ModuleOrderedResourceVars.
1168 for (Function &F : M) {
1169 auto where = FunctionToResourceVarsMap.find(&F);
1170 if (where != FunctionToResourceVarsMap.end()) {
1171 for (auto &rv : where->second) {
1172 if (rv != nullptr) {
1173 ModuleOrderedResourceVars.insert(rv);
1174 }
1175 }
1176 }
1177 }
1178 if (ShowResourceVars) {
1179 for (auto *info : ModuleOrderedResourceVars) {
1180 outs() << "MORV index " << info->index << " (" << info->descriptor_set
1181 << "," << info->binding << ") " << *(info->var_fn->getReturnType())
1182 << "\n";
1183 }
1184 }
1185}
1186
David Neto22f144c2017-06-12 14:26:21 -04001187bool SPIRVProducerPass::FindExtInst(Module &M) {
1188 LLVMContext &Context = M.getContext();
1189 bool HasExtInst = false;
1190
1191 for (Function &F : M) {
1192 for (BasicBlock &BB : F) {
1193 for (Instruction &I : BB) {
1194 if (CallInst *Call = dyn_cast<CallInst>(&I)) {
1195 Function *Callee = Call->getCalledFunction();
1196 // Check whether this call is for extend instructions.
David Neto3fbb4072017-10-16 11:28:14 -04001197 auto callee_name = Callee->getName();
1198 const glsl::ExtInst EInst = getExtInstEnum(callee_name);
1199 const glsl::ExtInst IndirectEInst =
1200 getIndirectExtInstEnum(callee_name);
David Neto22f144c2017-06-12 14:26:21 -04001201
David Neto3fbb4072017-10-16 11:28:14 -04001202 HasExtInst |=
1203 (EInst != kGlslExtInstBad) || (IndirectEInst != kGlslExtInstBad);
1204
1205 if (IndirectEInst) {
1206 // Register extra constants if needed.
1207
1208 // Registers a type and constant for computing the result of the
1209 // given instruction. If the result of the instruction is a vector,
1210 // then make a splat vector constant with the same number of
1211 // elements.
1212 auto register_constant = [this, &I](Constant *constant) {
1213 FindType(constant->getType());
1214 FindConstant(constant);
1215 if (auto *vectorTy = dyn_cast<VectorType>(I.getType())) {
1216 // Register the splat vector of the value with the same
1217 // width as the result of the instruction.
1218 auto *vec_constant = ConstantVector::getSplat(
alan-baker7261e062020-03-15 14:35:48 -04001219 {static_cast<unsigned>(vectorTy->getNumElements()), false},
David Neto3fbb4072017-10-16 11:28:14 -04001220 constant);
1221 FindConstant(vec_constant);
1222 FindType(vec_constant->getType());
1223 }
1224 };
1225 switch (IndirectEInst) {
1226 case glsl::ExtInstFindUMsb:
1227 // clz needs OpExtInst and OpISub with constant 31, or splat
1228 // vector of 31. Add it to the constant list here.
1229 register_constant(
1230 ConstantInt::get(Type::getInt32Ty(Context), 31));
1231 break;
1232 case glsl::ExtInstAcos:
1233 case glsl::ExtInstAsin:
Kévin Petiteb9f90a2018-09-29 12:29:34 +01001234 case glsl::ExtInstAtan:
David Neto3fbb4072017-10-16 11:28:14 -04001235 case glsl::ExtInstAtan2:
1236 // We need 1/pi for acospi, asinpi, atan2pi.
1237 register_constant(
1238 ConstantFP::get(Type::getFloatTy(Context), kOneOverPi));
1239 break;
1240 default:
1241 assert(false && "internally inconsistent");
1242 }
David Neto22f144c2017-06-12 14:26:21 -04001243 }
1244 }
1245 }
1246 }
1247 }
1248
1249 return HasExtInst;
1250}
1251
1252void SPIRVProducerPass::FindTypePerGlobalVar(GlobalVariable &GV) {
1253 // Investigate global variable's type.
1254 FindType(GV.getType());
1255}
1256
1257void SPIRVProducerPass::FindTypePerFunc(Function &F) {
1258 // Investigate function's type.
1259 FunctionType *FTy = F.getFunctionType();
1260
1261 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
1262 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
David Neto9ed8e2f2018-03-24 06:47:24 -07001263 // Handle a regular function with global constant parameters.
David Neto22f144c2017-06-12 14:26:21 -04001264 if (GlobalConstFuncTyMap.count(FTy)) {
1265 uint32_t GVCstArgIdx = GlobalConstFuncTypeMap[FTy].second;
1266 SmallVector<Type *, 4> NewFuncParamTys;
1267 for (unsigned i = 0; i < FTy->getNumParams(); i++) {
1268 Type *ParamTy = FTy->getParamType(i);
1269 if (i == GVCstArgIdx) {
1270 Type *EleTy = ParamTy->getPointerElementType();
1271 ParamTy = PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
1272 }
1273
1274 NewFuncParamTys.push_back(ParamTy);
1275 }
1276
1277 FunctionType *NewFTy =
1278 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
1279 GlobalConstFuncTyMap[FTy] = std::make_pair(NewFTy, GVCstArgIdx);
1280 FTy = NewFTy;
1281 }
1282
1283 FindType(FTy);
1284 } else {
1285 // As kernel functions do not have parameters, create new function type and
1286 // add it to type map.
1287 SmallVector<Type *, 4> NewFuncParamTys;
1288 FunctionType *NewFTy =
1289 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
1290 FindType(NewFTy);
1291 }
1292
1293 // Investigate instructions' type in function body.
1294 for (BasicBlock &BB : F) {
1295 for (Instruction &I : BB) {
1296 if (isa<ShuffleVectorInst>(I)) {
1297 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1298 // Ignore type for mask of shuffle vector instruction.
1299 if (i == 2) {
1300 continue;
1301 }
1302
1303 Value *Op = I.getOperand(i);
1304 if (!isa<MetadataAsValue>(Op)) {
1305 FindType(Op->getType());
1306 }
1307 }
1308
1309 FindType(I.getType());
1310 continue;
1311 }
1312
David Neto862b7d82018-06-14 18:48:37 -04001313 CallInst *Call = dyn_cast<CallInst>(&I);
1314
1315 if (Call && Call->getCalledFunction()->getName().startswith(
Alan Baker202c8c72018-08-13 13:47:44 -04001316 clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04001317 // This is a fake call representing access to a resource variable.
1318 // We handle that elsewhere.
1319 continue;
1320 }
1321
Alan Baker202c8c72018-08-13 13:47:44 -04001322 if (Call && Call->getCalledFunction()->getName().startswith(
1323 clspv::WorkgroupAccessorFunction())) {
1324 // This is a fake call representing access to a workgroup variable.
1325 // We handle that elsewhere.
1326 continue;
1327 }
1328
alan-bakerf083bed2020-01-29 08:15:42 -05001329 // #497: InsertValue and ExtractValue map to OpCompositeInsert and
1330 // OpCompositeExtract which takes literal values for indices. As a result
1331 // don't map the type of indices.
1332 if (I.getOpcode() == Instruction::ExtractValue) {
1333 FindType(I.getOperand(0)->getType());
1334 continue;
1335 }
1336 if (I.getOpcode() == Instruction::InsertValue) {
1337 FindType(I.getOperand(0)->getType());
1338 FindType(I.getOperand(1)->getType());
1339 continue;
1340 }
1341
1342 // #497: InsertElement and ExtractElement map to OpCompositeExtract if
1343 // the index is a constant. In such a case don't map the index type.
1344 if (I.getOpcode() == Instruction::ExtractElement) {
1345 FindType(I.getOperand(0)->getType());
1346 Value *op1 = I.getOperand(1);
1347 if (!isa<Constant>(op1) || isa<GlobalValue>(op1)) {
1348 FindType(op1->getType());
1349 }
1350 continue;
1351 }
1352 if (I.getOpcode() == Instruction::InsertElement) {
1353 FindType(I.getOperand(0)->getType());
1354 FindType(I.getOperand(1)->getType());
1355 Value *op2 = I.getOperand(2);
1356 if (!isa<Constant>(op2) || isa<GlobalValue>(op2)) {
1357 FindType(op2->getType());
1358 }
1359 continue;
1360 }
1361
David Neto22f144c2017-06-12 14:26:21 -04001362 // Work through the operands of the instruction.
1363 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1364 Value *const Op = I.getOperand(i);
1365 // If any of the operands is a constant, find the type!
1366 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1367 FindType(Op->getType());
1368 }
1369 }
1370
1371 for (Use &Op : I.operands()) {
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001372 if (isa<CallInst>(&I)) {
David Neto22f144c2017-06-12 14:26:21 -04001373 // Avoid to check call instruction's type.
1374 break;
1375 }
Alan Baker202c8c72018-08-13 13:47:44 -04001376 if (CallInst *OpCall = dyn_cast<CallInst>(Op)) {
1377 if (OpCall && OpCall->getCalledFunction()->getName().startswith(
1378 clspv::WorkgroupAccessorFunction())) {
1379 // This is a fake call representing access to a workgroup variable.
1380 // We handle that elsewhere.
1381 continue;
1382 }
1383 }
David Neto22f144c2017-06-12 14:26:21 -04001384 if (!isa<MetadataAsValue>(&Op)) {
1385 FindType(Op->getType());
1386 continue;
1387 }
1388 }
1389
David Neto22f144c2017-06-12 14:26:21 -04001390 // We don't want to track the type of this call as we are going to replace
1391 // it.
Kévin Petitdf71de32019-04-09 14:09:50 +01001392 if (Call && (clspv::LiteralSamplerFunction() ==
David Neto22f144c2017-06-12 14:26:21 -04001393 Call->getCalledFunction()->getName())) {
1394 continue;
1395 }
1396
1397 if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(&I)) {
1398 // If gep's base operand has ModuleScopePrivate address space, make gep
1399 // return ModuleScopePrivate address space.
1400 if (GEP->getPointerAddressSpace() == AddressSpace::ModuleScopePrivate) {
1401 // Add pointer type with private address space for global constant to
1402 // type list.
1403 Type *EleTy = I.getType()->getPointerElementType();
1404 Type *NewPTy =
1405 PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
1406
1407 FindType(NewPTy);
1408 continue;
1409 }
1410 }
1411
1412 FindType(I.getType());
1413 }
1414 }
1415}
1416
David Neto862b7d82018-06-14 18:48:37 -04001417void SPIRVProducerPass::FindTypesForSamplerMap(Module &M) {
1418 // If we are using a sampler map, find the type of the sampler.
Kévin Petitdf71de32019-04-09 14:09:50 +01001419 if (M.getFunction(clspv::LiteralSamplerFunction()) ||
David Neto862b7d82018-06-14 18:48:37 -04001420 0 < getSamplerMap().size()) {
1421 auto SamplerStructTy = M.getTypeByName("opencl.sampler_t");
1422 if (!SamplerStructTy) {
1423 SamplerStructTy = StructType::create(M.getContext(), "opencl.sampler_t");
1424 }
1425
1426 SamplerTy = SamplerStructTy->getPointerTo(AddressSpace::UniformConstant);
1427
1428 FindType(SamplerTy);
1429 }
1430}
1431
1432void SPIRVProducerPass::FindTypesForResourceVars(Module &M) {
1433 // Record types so they are generated.
1434 TypesNeedingLayout.reset();
1435 StructTypesNeedingBlock.reset();
1436
1437 // To match older clspv codegen, generate the float type first if required
1438 // for images.
1439 for (const auto *info : ModuleOrderedResourceVars) {
1440 if (info->arg_kind == clspv::ArgKind::ReadOnlyImage ||
1441 info->arg_kind == clspv::ArgKind::WriteOnlyImage) {
alan-bakerf67468c2019-11-25 15:51:49 -05001442 if (IsIntImageType(info->var_fn->getReturnType())) {
1443 // Nothing for now...
1444 } else if (IsUintImageType(info->var_fn->getReturnType())) {
1445 FindType(Type::getInt32Ty(M.getContext()));
1446 }
1447
1448 // We need "float" either for the sampled type or for the Lod operand.
David Neto862b7d82018-06-14 18:48:37 -04001449 FindType(Type::getFloatTy(M.getContext()));
David Neto862b7d82018-06-14 18:48:37 -04001450 }
1451 }
1452
1453 for (const auto *info : ModuleOrderedResourceVars) {
1454 Type *type = info->var_fn->getReturnType();
1455
1456 switch (info->arg_kind) {
1457 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04001458 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04001459 if (auto *sty = dyn_cast<StructType>(type->getPointerElementType())) {
1460 StructTypesNeedingBlock.insert(sty);
1461 } else {
1462 errs() << *type << "\n";
1463 llvm_unreachable("Buffer arguments must map to structures!");
1464 }
1465 break;
1466 case clspv::ArgKind::Pod:
alan-baker9b0ec3c2020-04-06 14:45:34 -04001467 case clspv::ArgKind::PodUBO:
1468 case clspv::ArgKind::PodPushConstant:
David Neto862b7d82018-06-14 18:48:37 -04001469 if (auto *sty = dyn_cast<StructType>(type->getPointerElementType())) {
1470 StructTypesNeedingBlock.insert(sty);
1471 } else {
1472 errs() << *type << "\n";
1473 llvm_unreachable("POD arguments must map to structures!");
1474 }
1475 break;
1476 case clspv::ArgKind::ReadOnlyImage:
1477 case clspv::ArgKind::WriteOnlyImage:
1478 case clspv::ArgKind::Sampler:
1479 // Sampler and image types map to the pointee type but
1480 // in the uniform constant address space.
1481 type = PointerType::get(type->getPointerElementType(),
1482 clspv::AddressSpace::UniformConstant);
1483 break;
1484 default:
1485 break;
1486 }
1487
1488 // The converted type is the type of the OpVariable we will generate.
1489 // If the pointee type is an array of size zero, FindType will convert it
1490 // to a runtime array.
1491 FindType(type);
1492 }
1493
alan-bakerdcd97412019-09-16 15:32:30 -04001494 // If module constants are clustered in a storage buffer then that struct
1495 // needs layout decorations.
1496 if (clspv::Option::ModuleConstantsInStorageBuffer()) {
1497 for (GlobalVariable &GV : M.globals()) {
1498 PointerType *PTy = cast<PointerType>(GV.getType());
1499 const auto AS = PTy->getAddressSpace();
1500 const bool module_scope_constant_external_init =
1501 (AS == AddressSpace::Constant) && GV.hasInitializer();
1502 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
1503 if (module_scope_constant_external_init &&
1504 spv::BuiltInMax == BuiltinType) {
1505 StructTypesNeedingBlock.insert(
1506 cast<StructType>(PTy->getPointerElementType()));
1507 }
1508 }
1509 }
1510
Kévin Petitbbbda972020-03-03 19:16:31 +00001511 for (const GlobalVariable &GV : M.globals()) {
1512 if (GV.getAddressSpace() == clspv::AddressSpace::PushConstant) {
1513 auto Ty = cast<PointerType>(GV.getType())->getPointerElementType();
1514 assert(Ty->isStructTy() && "Push constants have to be structures.");
1515 auto STy = cast<StructType>(Ty);
1516 StructTypesNeedingBlock.insert(STy);
1517 }
1518 }
1519
David Neto862b7d82018-06-14 18:48:37 -04001520 // Traverse the arrays and structures underneath each Block, and
1521 // mark them as needing layout.
1522 std::vector<Type *> work_list(StructTypesNeedingBlock.begin(),
1523 StructTypesNeedingBlock.end());
1524 while (!work_list.empty()) {
1525 Type *type = work_list.back();
1526 work_list.pop_back();
1527 TypesNeedingLayout.insert(type);
1528 switch (type->getTypeID()) {
1529 case Type::ArrayTyID:
1530 work_list.push_back(type->getArrayElementType());
1531 if (!Hack_generate_runtime_array_stride_early) {
1532 // Remember this array type for deferred decoration.
1533 TypesNeedingArrayStride.insert(type);
1534 }
1535 break;
1536 case Type::StructTyID:
1537 for (auto *elem_ty : cast<StructType>(type)->elements()) {
1538 work_list.push_back(elem_ty);
1539 }
1540 default:
1541 // This type and its contained types don't get layout.
1542 break;
1543 }
1544 }
1545}
1546
Alan Baker202c8c72018-08-13 13:47:44 -04001547void SPIRVProducerPass::FindWorkgroupVars(Module &M) {
1548 // The SpecId assignment for pointer-to-local arguments is recorded in
1549 // module-level metadata. Translate that information into local argument
1550 // information.
1551 NamedMDNode *nmd = M.getNamedMetadata(clspv::LocalSpecIdMetadataName());
alan-bakerb6b09dc2018-11-08 16:59:28 -05001552 if (!nmd)
1553 return;
Alan Baker202c8c72018-08-13 13:47:44 -04001554 for (auto operand : nmd->operands()) {
1555 MDTuple *tuple = cast<MDTuple>(operand);
1556 ValueAsMetadata *fn_md = cast<ValueAsMetadata>(tuple->getOperand(0));
1557 Function *func = cast<Function>(fn_md->getValue());
alan-bakerb6b09dc2018-11-08 16:59:28 -05001558 ConstantAsMetadata *arg_index_md =
1559 cast<ConstantAsMetadata>(tuple->getOperand(1));
1560 int arg_index = static_cast<int>(
1561 cast<ConstantInt>(arg_index_md->getValue())->getSExtValue());
1562 Argument *arg = &*(func->arg_begin() + arg_index);
Alan Baker202c8c72018-08-13 13:47:44 -04001563
1564 ConstantAsMetadata *spec_id_md =
1565 cast<ConstantAsMetadata>(tuple->getOperand(2));
alan-bakerb6b09dc2018-11-08 16:59:28 -05001566 int spec_id = static_cast<int>(
1567 cast<ConstantInt>(spec_id_md->getValue())->getSExtValue());
Alan Baker202c8c72018-08-13 13:47:44 -04001568
Alan Baker202c8c72018-08-13 13:47:44 -04001569 LocalArgSpecIds[arg] = spec_id;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001570 if (LocalSpecIdInfoMap.count(spec_id))
1571 continue;
Alan Baker202c8c72018-08-13 13:47:44 -04001572
1573 // We haven't seen this SpecId yet, so generate the LocalArgInfo for it.
1574 LocalArgInfo info{nextID, arg->getType()->getPointerElementType(),
1575 nextID + 1, nextID + 2,
1576 nextID + 3, spec_id};
1577 LocalSpecIdInfoMap[spec_id] = info;
1578 nextID += 4;
1579
1580 // Ensure the types necessary for this argument get generated.
1581 Type *IdxTy = Type::getInt32Ty(M.getContext());
1582 FindConstant(ConstantInt::get(IdxTy, 0));
1583 FindType(IdxTy);
1584 FindType(arg->getType());
1585 }
1586}
1587
David Neto22f144c2017-06-12 14:26:21 -04001588void SPIRVProducerPass::FindType(Type *Ty) {
1589 TypeList &TyList = getTypeList();
1590
1591 if (0 != TyList.idFor(Ty)) {
1592 return;
1593 }
1594
1595 if (Ty->isPointerTy()) {
1596 auto AddrSpace = Ty->getPointerAddressSpace();
1597 if ((AddressSpace::Constant == AddrSpace) ||
1598 (AddressSpace::Global == AddrSpace)) {
1599 auto PointeeTy = Ty->getPointerElementType();
1600
1601 if (PointeeTy->isStructTy() &&
1602 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
1603 FindType(PointeeTy);
1604 auto ActualPointerTy =
1605 PointeeTy->getPointerTo(AddressSpace::UniformConstant);
1606 FindType(ActualPointerTy);
1607 return;
1608 }
1609 }
1610 }
1611
David Neto862b7d82018-06-14 18:48:37 -04001612 // By convention, LLVM array type with 0 elements will map to
1613 // OpTypeRuntimeArray. Otherwise, it will map to OpTypeArray, which
1614 // has a constant number of elements. We need to support type of the
1615 // constant.
1616 if (auto *arrayTy = dyn_cast<ArrayType>(Ty)) {
1617 if (arrayTy->getNumElements() > 0) {
1618 LLVMContext &Context = Ty->getContext();
1619 FindType(Type::getInt32Ty(Context));
1620 }
David Neto22f144c2017-06-12 14:26:21 -04001621 }
1622
1623 for (Type *SubTy : Ty->subtypes()) {
1624 FindType(SubTy);
1625 }
1626
1627 TyList.insert(Ty);
1628}
1629
1630void SPIRVProducerPass::FindConstantPerGlobalVar(GlobalVariable &GV) {
1631 // If the global variable has a (non undef) initializer.
1632 if (GV.hasInitializer() && !isa<UndefValue>(GV.getInitializer())) {
David Neto862b7d82018-06-14 18:48:37 -04001633 // Generate the constant if it's not the initializer to a module scope
1634 // constant that we will expect in a storage buffer.
1635 const bool module_scope_constant_external_init =
1636 (GV.getType()->getPointerAddressSpace() == AddressSpace::Constant) &&
1637 clspv::Option::ModuleConstantsInStorageBuffer();
1638 if (!module_scope_constant_external_init) {
1639 FindConstant(GV.getInitializer());
1640 }
David Neto22f144c2017-06-12 14:26:21 -04001641 }
1642}
1643
1644void SPIRVProducerPass::FindConstantPerFunc(Function &F) {
1645 // Investigate constants in function body.
1646 for (BasicBlock &BB : F) {
1647 for (Instruction &I : BB) {
David Neto862b7d82018-06-14 18:48:37 -04001648 if (auto *call = dyn_cast<CallInst>(&I)) {
1649 auto name = call->getCalledFunction()->getName();
Kévin Petitdf71de32019-04-09 14:09:50 +01001650 if (name == clspv::LiteralSamplerFunction()) {
David Neto862b7d82018-06-14 18:48:37 -04001651 // We've handled these constants elsewhere, so skip it.
1652 continue;
1653 }
Alan Baker202c8c72018-08-13 13:47:44 -04001654 if (name.startswith(clspv::ResourceAccessorFunction())) {
1655 continue;
1656 }
1657 if (name.startswith(clspv::WorkgroupAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04001658 continue;
1659 }
Kévin Petit617a76d2019-04-04 13:54:16 +01001660 if (name.startswith(clspv::SPIRVOpIntrinsicFunction())) {
1661 // Skip the first operand that has the SPIR-V Opcode
1662 for (unsigned i = 1; i < I.getNumOperands(); i++) {
1663 if (isa<Constant>(I.getOperand(i)) &&
1664 !isa<GlobalValue>(I.getOperand(i))) {
1665 FindConstant(I.getOperand(i));
1666 }
1667 }
1668 continue;
1669 }
David Neto22f144c2017-06-12 14:26:21 -04001670 }
1671
1672 if (isa<AllocaInst>(I)) {
1673 // Alloca instruction has constant for the number of element. Ignore it.
1674 continue;
1675 } else if (isa<ShuffleVectorInst>(I)) {
1676 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1677 // Ignore constant for mask of shuffle vector instruction.
1678 if (i == 2) {
1679 continue;
1680 }
1681
1682 if (isa<Constant>(I.getOperand(i)) &&
1683 !isa<GlobalValue>(I.getOperand(i))) {
1684 FindConstant(I.getOperand(i));
1685 }
1686 }
1687
1688 continue;
1689 } else if (isa<InsertElementInst>(I)) {
1690 // Handle InsertElement with <4 x i8> specially.
1691 Type *CompositeTy = I.getOperand(0)->getType();
1692 if (is4xi8vec(CompositeTy)) {
1693 LLVMContext &Context = CompositeTy->getContext();
1694 if (isa<Constant>(I.getOperand(0))) {
1695 FindConstant(I.getOperand(0));
1696 }
1697
1698 if (isa<Constant>(I.getOperand(1))) {
1699 FindConstant(I.getOperand(1));
1700 }
1701
1702 // Add mask constant 0xFF.
1703 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
1704 FindConstant(CstFF);
1705
1706 // Add shift amount constant.
1707 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
1708 uint64_t Idx = CI->getZExtValue();
1709 Constant *CstShiftAmount =
1710 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
1711 FindConstant(CstShiftAmount);
1712 }
1713
1714 continue;
1715 }
1716
1717 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1718 // Ignore constant for index of InsertElement instruction.
1719 if (i == 2) {
1720 continue;
1721 }
1722
1723 if (isa<Constant>(I.getOperand(i)) &&
1724 !isa<GlobalValue>(I.getOperand(i))) {
1725 FindConstant(I.getOperand(i));
1726 }
1727 }
1728
1729 continue;
1730 } else if (isa<ExtractElementInst>(I)) {
1731 // Handle ExtractElement with <4 x i8> specially.
1732 Type *CompositeTy = I.getOperand(0)->getType();
1733 if (is4xi8vec(CompositeTy)) {
1734 LLVMContext &Context = CompositeTy->getContext();
1735 if (isa<Constant>(I.getOperand(0))) {
1736 FindConstant(I.getOperand(0));
1737 }
1738
1739 // Add mask constant 0xFF.
1740 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
1741 FindConstant(CstFF);
1742
1743 // Add shift amount constant.
1744 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
1745 uint64_t Idx = CI->getZExtValue();
1746 Constant *CstShiftAmount =
1747 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
1748 FindConstant(CstShiftAmount);
1749 } else {
1750 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
1751 FindConstant(Cst8);
1752 }
1753
1754 continue;
1755 }
1756
1757 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1758 // Ignore constant for index of ExtractElement instruction.
1759 if (i == 1) {
1760 continue;
1761 }
1762
1763 if (isa<Constant>(I.getOperand(i)) &&
1764 !isa<GlobalValue>(I.getOperand(i))) {
1765 FindConstant(I.getOperand(i));
1766 }
1767 }
1768
1769 continue;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001770 } else if ((Instruction::Xor == I.getOpcode()) &&
1771 I.getType()->isIntegerTy(1)) {
1772 // We special case for Xor where the type is i1 and one of the arguments
1773 // is a constant 1 (true), this is an OpLogicalNot in SPIR-V, and we
1774 // don't need the constant
David Neto22f144c2017-06-12 14:26:21 -04001775 bool foundConstantTrue = false;
1776 for (Use &Op : I.operands()) {
1777 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1778 auto CI = cast<ConstantInt>(Op);
1779
1780 if (CI->isZero() || foundConstantTrue) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05001781 // If we already found the true constant, we might (probably only
1782 // on -O0) have an OpLogicalNot which is taking a constant
1783 // argument, so discover it anyway.
David Neto22f144c2017-06-12 14:26:21 -04001784 FindConstant(Op);
1785 } else {
1786 foundConstantTrue = true;
1787 }
1788 }
1789 }
1790
1791 continue;
David Netod2de94a2017-08-28 17:27:47 -04001792 } else if (isa<TruncInst>(I)) {
alan-bakerb39c8262019-03-08 14:03:37 -05001793 // Special case if i8 is not generally handled.
1794 if (!clspv::Option::Int8Support()) {
1795 // For truncation to i8 we mask against 255.
1796 Type *ToTy = I.getType();
1797 if (8u == ToTy->getPrimitiveSizeInBits()) {
1798 LLVMContext &Context = ToTy->getContext();
1799 Constant *Cst255 =
1800 ConstantInt::get(Type::getInt32Ty(Context), 0xff);
1801 FindConstant(Cst255);
1802 }
David Netod2de94a2017-08-28 17:27:47 -04001803 }
Neil Henning39672102017-09-29 14:33:13 +01001804 } else if (isa<AtomicRMWInst>(I)) {
1805 LLVMContext &Context = I.getContext();
1806
1807 FindConstant(
1808 ConstantInt::get(Type::getInt32Ty(Context), spv::ScopeDevice));
1809 FindConstant(ConstantInt::get(
1810 Type::getInt32Ty(Context),
1811 spv::MemorySemanticsUniformMemoryMask |
1812 spv::MemorySemanticsSequentiallyConsistentMask));
David Neto22f144c2017-06-12 14:26:21 -04001813 }
1814
1815 for (Use &Op : I.operands()) {
1816 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1817 FindConstant(Op);
1818 }
1819 }
1820 }
1821 }
1822}
1823
1824void SPIRVProducerPass::FindConstant(Value *V) {
David Neto22f144c2017-06-12 14:26:21 -04001825 ValueList &CstList = getConstantList();
1826
David Netofb9a7972017-08-25 17:08:24 -04001827 // If V is already tracked, ignore it.
1828 if (0 != CstList.idFor(V)) {
David Neto22f144c2017-06-12 14:26:21 -04001829 return;
1830 }
1831
David Neto862b7d82018-06-14 18:48:37 -04001832 if (isa<GlobalValue>(V) && clspv::Option::ModuleConstantsInStorageBuffer()) {
1833 return;
1834 }
1835
David Neto22f144c2017-06-12 14:26:21 -04001836 Constant *Cst = cast<Constant>(V);
David Neto862b7d82018-06-14 18:48:37 -04001837 Type *CstTy = Cst->getType();
David Neto22f144c2017-06-12 14:26:21 -04001838
1839 // Handle constant with <4 x i8> type specially.
David Neto22f144c2017-06-12 14:26:21 -04001840 if (is4xi8vec(CstTy)) {
1841 if (!isa<GlobalValue>(V)) {
David Netofb9a7972017-08-25 17:08:24 -04001842 CstList.insert(V);
David Neto22f144c2017-06-12 14:26:21 -04001843 }
1844 }
1845
1846 if (Cst->getNumOperands()) {
1847 for (User::const_op_iterator I = Cst->op_begin(), E = Cst->op_end(); I != E;
1848 ++I) {
1849 FindConstant(*I);
1850 }
1851
David Netofb9a7972017-08-25 17:08:24 -04001852 CstList.insert(Cst);
David Neto22f144c2017-06-12 14:26:21 -04001853 return;
1854 } else if (const ConstantDataSequential *CDS =
1855 dyn_cast<ConstantDataSequential>(Cst)) {
1856 // Add constants for each element to constant list.
1857 for (unsigned i = 0; i < CDS->getNumElements(); i++) {
1858 Constant *EleCst = CDS->getElementAsConstant(i);
1859 FindConstant(EleCst);
1860 }
1861 }
1862
1863 if (!isa<GlobalValue>(V)) {
David Netofb9a7972017-08-25 17:08:24 -04001864 CstList.insert(V);
David Neto22f144c2017-06-12 14:26:21 -04001865 }
1866}
1867
1868spv::StorageClass SPIRVProducerPass::GetStorageClass(unsigned AddrSpace) const {
1869 switch (AddrSpace) {
1870 default:
1871 llvm_unreachable("Unsupported OpenCL address space");
1872 case AddressSpace::Private:
1873 return spv::StorageClassFunction;
1874 case AddressSpace::Global:
David Neto22f144c2017-06-12 14:26:21 -04001875 return spv::StorageClassStorageBuffer;
Alan Bakerfcda9482018-10-02 17:09:59 -04001876 case AddressSpace::Constant:
1877 return clspv::Option::ConstantArgsInUniformBuffer()
1878 ? spv::StorageClassUniform
1879 : spv::StorageClassStorageBuffer;
David Neto22f144c2017-06-12 14:26:21 -04001880 case AddressSpace::Input:
1881 return spv::StorageClassInput;
1882 case AddressSpace::Local:
1883 return spv::StorageClassWorkgroup;
1884 case AddressSpace::UniformConstant:
1885 return spv::StorageClassUniformConstant;
David Neto9ed8e2f2018-03-24 06:47:24 -07001886 case AddressSpace::Uniform:
David Netoe439d702018-03-23 13:14:08 -07001887 return spv::StorageClassUniform;
David Neto22f144c2017-06-12 14:26:21 -04001888 case AddressSpace::ModuleScopePrivate:
1889 return spv::StorageClassPrivate;
Kévin Petitbbbda972020-03-03 19:16:31 +00001890 case AddressSpace::PushConstant:
1891 return spv::StorageClassPushConstant;
David Neto22f144c2017-06-12 14:26:21 -04001892 }
1893}
1894
David Neto862b7d82018-06-14 18:48:37 -04001895spv::StorageClass
1896SPIRVProducerPass::GetStorageClassForArgKind(clspv::ArgKind arg_kind) const {
1897 switch (arg_kind) {
1898 case clspv::ArgKind::Buffer:
1899 return spv::StorageClassStorageBuffer;
Alan Bakerfcda9482018-10-02 17:09:59 -04001900 case clspv::ArgKind::BufferUBO:
1901 return spv::StorageClassUniform;
David Neto862b7d82018-06-14 18:48:37 -04001902 case clspv::ArgKind::Pod:
alan-baker9b0ec3c2020-04-06 14:45:34 -04001903 return spv::StorageClassStorageBuffer;
1904 case clspv::ArgKind::PodUBO:
1905 return spv::StorageClassUniform;
1906 case clspv::ArgKind::PodPushConstant:
1907 return spv::StorageClassPushConstant;
David Neto862b7d82018-06-14 18:48:37 -04001908 case clspv::ArgKind::Local:
1909 return spv::StorageClassWorkgroup;
1910 case clspv::ArgKind::ReadOnlyImage:
1911 case clspv::ArgKind::WriteOnlyImage:
1912 case clspv::ArgKind::Sampler:
1913 return spv::StorageClassUniformConstant;
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001914 default:
1915 llvm_unreachable("Unsupported storage class for argument kind");
David Neto862b7d82018-06-14 18:48:37 -04001916 }
1917}
1918
David Neto22f144c2017-06-12 14:26:21 -04001919spv::BuiltIn SPIRVProducerPass::GetBuiltin(StringRef Name) const {
1920 return StringSwitch<spv::BuiltIn>(Name)
1921 .Case("__spirv_GlobalInvocationId", spv::BuiltInGlobalInvocationId)
1922 .Case("__spirv_LocalInvocationId", spv::BuiltInLocalInvocationId)
1923 .Case("__spirv_WorkgroupSize", spv::BuiltInWorkgroupSize)
1924 .Case("__spirv_NumWorkgroups", spv::BuiltInNumWorkgroups)
1925 .Case("__spirv_WorkgroupId", spv::BuiltInWorkgroupId)
alan-bakerbed3a882020-04-21 14:42:41 -04001926 .Case("__spirv_WorkDim", spv::BuiltInWorkDim)
David Neto22f144c2017-06-12 14:26:21 -04001927 .Default(spv::BuiltInMax);
1928}
1929
1930void SPIRVProducerPass::GenerateExtInstImport() {
SJW69939d52020-04-16 07:29:07 -05001931 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList(kImports);
David Neto22f144c2017-06-12 14:26:21 -04001932 uint32_t &ExtInstImportID = getOpExtInstImportID();
1933
1934 //
1935 // Generate OpExtInstImport.
1936 //
1937 // Ops[0] ... Ops[n] = Name (Literal String)
David Neto22f144c2017-06-12 14:26:21 -04001938 ExtInstImportID = nextID;
David Neto87846742018-04-11 17:36:22 -04001939 SPIRVInstList.push_back(new SPIRVInstruction(spv::OpExtInstImport, nextID++,
1940 MkString("GLSL.std.450")));
David Neto22f144c2017-06-12 14:26:21 -04001941}
1942
alan-bakerb6b09dc2018-11-08 16:59:28 -05001943void SPIRVProducerPass::GenerateSPIRVTypes(LLVMContext &Context,
1944 Module &module) {
SJW69939d52020-04-16 07:29:07 -05001945 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList(kTypes);
David Neto22f144c2017-06-12 14:26:21 -04001946 ValueMapType &VMap = getValueMap();
1947 ValueMapType &AllocatedVMap = getAllocatedValueMap();
Alan Bakerfcda9482018-10-02 17:09:59 -04001948 const auto &DL = module.getDataLayout();
David Neto22f144c2017-06-12 14:26:21 -04001949
1950 // Map for OpTypeRuntimeArray. If argument has pointer type, 2 spirv type
1951 // instructions are generated. They are OpTypePointer and OpTypeRuntimeArray.
1952 DenseMap<Type *, uint32_t> OpRuntimeTyMap;
1953
1954 for (Type *Ty : getTypeList()) {
1955 // Update TypeMap with nextID for reference later.
1956 TypeMap[Ty] = nextID;
1957
1958 switch (Ty->getTypeID()) {
1959 default: {
1960 Ty->print(errs());
1961 llvm_unreachable("Unsupported type???");
1962 break;
1963 }
1964 case Type::MetadataTyID:
1965 case Type::LabelTyID: {
1966 // Ignore these types.
1967 break;
1968 }
1969 case Type::PointerTyID: {
1970 PointerType *PTy = cast<PointerType>(Ty);
1971 unsigned AddrSpace = PTy->getAddressSpace();
1972
1973 // For the purposes of our Vulkan SPIR-V type system, constant and global
1974 // are conflated.
1975 bool UseExistingOpTypePointer = false;
1976 if (AddressSpace::Constant == AddrSpace) {
Alan Bakerfcda9482018-10-02 17:09:59 -04001977 if (!clspv::Option::ConstantArgsInUniformBuffer()) {
1978 AddrSpace = AddressSpace::Global;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001979 // Check to see if we already created this type (for instance, if we
1980 // had a constant <type>* and a global <type>*, the type would be
1981 // created by one of these types, and shared by both).
Alan Bakerfcda9482018-10-02 17:09:59 -04001982 auto GlobalTy = PTy->getPointerElementType()->getPointerTo(AddrSpace);
1983 if (0 < TypeMap.count(GlobalTy)) {
1984 TypeMap[PTy] = TypeMap[GlobalTy];
1985 UseExistingOpTypePointer = true;
1986 break;
1987 }
David Neto22f144c2017-06-12 14:26:21 -04001988 }
1989 } else if (AddressSpace::Global == AddrSpace) {
Alan Bakerfcda9482018-10-02 17:09:59 -04001990 if (!clspv::Option::ConstantArgsInUniformBuffer()) {
1991 AddrSpace = AddressSpace::Constant;
David Neto22f144c2017-06-12 14:26:21 -04001992
alan-bakerb6b09dc2018-11-08 16:59:28 -05001993 // Check to see if we already created this type (for instance, if we
1994 // had a constant <type>* and a global <type>*, the type would be
1995 // created by one of these types, and shared by both).
1996 auto ConstantTy =
1997 PTy->getPointerElementType()->getPointerTo(AddrSpace);
Alan Bakerfcda9482018-10-02 17:09:59 -04001998 if (0 < TypeMap.count(ConstantTy)) {
1999 TypeMap[PTy] = TypeMap[ConstantTy];
2000 UseExistingOpTypePointer = true;
2001 }
David Neto22f144c2017-06-12 14:26:21 -04002002 }
2003 }
2004
David Neto862b7d82018-06-14 18:48:37 -04002005 const bool HasArgUser = true;
David Neto22f144c2017-06-12 14:26:21 -04002006
David Neto862b7d82018-06-14 18:48:37 -04002007 if (HasArgUser && !UseExistingOpTypePointer) {
David Neto22f144c2017-06-12 14:26:21 -04002008 //
2009 // Generate OpTypePointer.
2010 //
2011
2012 // OpTypePointer
2013 // Ops[0] = Storage Class
2014 // Ops[1] = Element Type ID
2015 SPIRVOperandList Ops;
2016
David Neto257c3892018-04-11 13:19:45 -04002017 Ops << MkNum(GetStorageClass(AddrSpace))
2018 << MkId(lookupType(PTy->getElementType()));
David Neto22f144c2017-06-12 14:26:21 -04002019
David Neto87846742018-04-11 17:36:22 -04002020 auto *Inst = new SPIRVInstruction(spv::OpTypePointer, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002021 SPIRVInstList.push_back(Inst);
2022 }
David Neto22f144c2017-06-12 14:26:21 -04002023 break;
2024 }
2025 case Type::StructTyID: {
David Neto22f144c2017-06-12 14:26:21 -04002026 StructType *STy = cast<StructType>(Ty);
2027
2028 // Handle sampler type.
2029 if (STy->isOpaque()) {
2030 if (STy->getName().equals("opencl.sampler_t")) {
2031 //
2032 // Generate OpTypeSampler
2033 //
2034 // Empty Ops.
2035 SPIRVOperandList Ops;
2036
David Neto87846742018-04-11 17:36:22 -04002037 auto *Inst = new SPIRVInstruction(spv::OpTypeSampler, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002038 SPIRVInstList.push_back(Inst);
2039 break;
alan-bakerf906d2b2019-12-10 11:26:23 -05002040 } else if (STy->getName().startswith("opencl.image1d_ro_t") ||
2041 STy->getName().startswith("opencl.image1d_wo_t") ||
alan-baker7150a1d2020-02-25 08:31:06 -05002042 STy->getName().startswith("opencl.image1d_array_ro_t") ||
2043 STy->getName().startswith("opencl.image1d_array_wo_t") ||
alan-bakerf906d2b2019-12-10 11:26:23 -05002044 STy->getName().startswith("opencl.image2d_ro_t") ||
alan-bakerf67468c2019-11-25 15:51:49 -05002045 STy->getName().startswith("opencl.image2d_wo_t") ||
alan-baker7150a1d2020-02-25 08:31:06 -05002046 STy->getName().startswith("opencl.image2d_array_ro_t") ||
2047 STy->getName().startswith("opencl.image2d_array_wo_t") ||
alan-bakerf67468c2019-11-25 15:51:49 -05002048 STy->getName().startswith("opencl.image3d_ro_t") ||
2049 STy->getName().startswith("opencl.image3d_wo_t")) {
David Neto22f144c2017-06-12 14:26:21 -04002050 //
2051 // Generate OpTypeImage
2052 //
2053 // Ops[0] = Sampled Type ID
2054 // Ops[1] = Dim ID
2055 // Ops[2] = Depth (Literal Number)
2056 // Ops[3] = Arrayed (Literal Number)
2057 // Ops[4] = MS (Literal Number)
2058 // Ops[5] = Sampled (Literal Number)
2059 // Ops[6] = Image Format ID
2060 //
2061 SPIRVOperandList Ops;
2062
alan-bakerf67468c2019-11-25 15:51:49 -05002063 uint32_t ImageTyID = nextID++;
2064 uint32_t SampledTyID = 0;
2065 if (STy->getName().contains(".float")) {
2066 SampledTyID = lookupType(Type::getFloatTy(Context));
2067 } else if (STy->getName().contains(".uint")) {
2068 SampledTyID = lookupType(Type::getInt32Ty(Context));
2069 } else if (STy->getName().contains(".int")) {
2070 // Generate a signed 32-bit integer if necessary.
2071 if (int32ID == 0) {
2072 int32ID = nextID++;
2073 SPIRVOperandList intOps;
2074 intOps << MkNum(32);
2075 intOps << MkNum(1);
2076 auto signed_int =
2077 new SPIRVInstruction(spv::OpTypeInt, int32ID, intOps);
2078 SPIRVInstList.push_back(signed_int);
2079 }
2080 SampledTyID = int32ID;
2081
2082 // Generate a vec4 of the signed int if necessary.
2083 if (v4int32ID == 0) {
2084 v4int32ID = nextID++;
2085 SPIRVOperandList vecOps;
2086 vecOps << MkId(int32ID);
2087 vecOps << MkNum(4);
2088 auto int_vec =
2089 new SPIRVInstruction(spv::OpTypeVector, v4int32ID, vecOps);
2090 SPIRVInstList.push_back(int_vec);
2091 }
2092 } else {
2093 // This was likely an UndefValue.
2094 SampledTyID = lookupType(Type::getFloatTy(Context));
2095 }
David Neto257c3892018-04-11 13:19:45 -04002096 Ops << MkId(SampledTyID);
David Neto22f144c2017-06-12 14:26:21 -04002097
2098 spv::Dim DimID = spv::Dim2D;
alan-bakerf906d2b2019-12-10 11:26:23 -05002099 if (STy->getName().startswith("opencl.image1d_ro_t") ||
alan-baker7150a1d2020-02-25 08:31:06 -05002100 STy->getName().startswith("opencl.image1d_wo_t") ||
2101 STy->getName().startswith("opencl.image1d_array_ro_t") ||
2102 STy->getName().startswith("opencl.image1d_array_wo_t")) {
alan-bakerf906d2b2019-12-10 11:26:23 -05002103 DimID = spv::Dim1D;
2104 } else if (STy->getName().startswith("opencl.image3d_ro_t") ||
2105 STy->getName().startswith("opencl.image3d_wo_t")) {
David Neto22f144c2017-06-12 14:26:21 -04002106 DimID = spv::Dim3D;
2107 }
David Neto257c3892018-04-11 13:19:45 -04002108 Ops << MkNum(DimID);
David Neto22f144c2017-06-12 14:26:21 -04002109
2110 // TODO: Set up Depth.
David Neto257c3892018-04-11 13:19:45 -04002111 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04002112
alan-baker7150a1d2020-02-25 08:31:06 -05002113 uint32_t arrayed = STy->getName().contains("_array_") ? 1 : 0;
2114 Ops << MkNum(arrayed);
David Neto22f144c2017-06-12 14:26:21 -04002115
2116 // TODO: Set up MS.
David Neto257c3892018-04-11 13:19:45 -04002117 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04002118
alan-baker7150a1d2020-02-25 08:31:06 -05002119 // Set up Sampled.
David Neto22f144c2017-06-12 14:26:21 -04002120 //
2121 // From Spec
2122 //
2123 // 0 indicates this is only known at run time, not at compile time
2124 // 1 indicates will be used with sampler
2125 // 2 indicates will be used without a sampler (a storage image)
2126 uint32_t Sampled = 1;
alan-bakerf67468c2019-11-25 15:51:49 -05002127 if (!STy->getName().contains(".sampled")) {
David Neto22f144c2017-06-12 14:26:21 -04002128 Sampled = 2;
2129 }
David Neto257c3892018-04-11 13:19:45 -04002130 Ops << MkNum(Sampled);
David Neto22f144c2017-06-12 14:26:21 -04002131
2132 // TODO: Set up Image Format.
David Neto257c3892018-04-11 13:19:45 -04002133 Ops << MkNum(spv::ImageFormatUnknown);
David Neto22f144c2017-06-12 14:26:21 -04002134
alan-bakerf67468c2019-11-25 15:51:49 -05002135 auto *Inst = new SPIRVInstruction(spv::OpTypeImage, ImageTyID, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002136 SPIRVInstList.push_back(Inst);
2137 break;
2138 }
2139 }
2140
2141 //
2142 // Generate OpTypeStruct
2143 //
2144 // Ops[0] ... Ops[n] = Member IDs
2145 SPIRVOperandList Ops;
2146
2147 for (auto *EleTy : STy->elements()) {
David Neto862b7d82018-06-14 18:48:37 -04002148 Ops << MkId(lookupType(EleTy));
David Neto22f144c2017-06-12 14:26:21 -04002149 }
2150
David Neto22f144c2017-06-12 14:26:21 -04002151 uint32_t STyID = nextID;
2152
alan-bakerb6b09dc2018-11-08 16:59:28 -05002153 auto *Inst = new SPIRVInstruction(spv::OpTypeStruct, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002154 SPIRVInstList.push_back(Inst);
2155
2156 // Generate OpMemberDecorate.
Kévin Petitbbbda972020-03-03 19:16:31 +00002157 if (TypesNeedingLayout.idFor(STy)) {
2158 for (unsigned MemberIdx = 0; MemberIdx < STy->getNumElements();
2159 MemberIdx++) {
2160 // Ops[0] = Structure Type ID
2161 // Ops[1] = Member Index(Literal Number)
2162 // Ops[2] = Decoration (Offset)
2163 // Ops[3] = Byte Offset (Literal Number)
2164 Ops.clear();
David Netoc463b372017-08-10 15:32:21 -04002165
Kévin Petitbbbda972020-03-03 19:16:31 +00002166 Ops << MkId(STyID) << MkNum(MemberIdx)
2167 << MkNum(spv::DecorationOffset);
David Neto22f144c2017-06-12 14:26:21 -04002168
Kévin Petitbbbda972020-03-03 19:16:31 +00002169 const auto ByteOffset =
2170 GetExplicitLayoutStructMemberOffset(STy, MemberIdx, DL);
David Neto22f144c2017-06-12 14:26:21 -04002171
Kévin Petitbbbda972020-03-03 19:16:31 +00002172 Ops << MkNum(ByteOffset);
2173
2174 auto *DecoInst = new SPIRVInstruction(spv::OpMemberDecorate, Ops);
SJW69939d52020-04-16 07:29:07 -05002175 getSPIRVInstList(kAnnotations).push_back(DecoInst);
Alan Bakerfcda9482018-10-02 17:09:59 -04002176 }
David Neto22f144c2017-06-12 14:26:21 -04002177 }
2178
2179 // Generate OpDecorate.
David Neto862b7d82018-06-14 18:48:37 -04002180 if (StructTypesNeedingBlock.idFor(STy)) {
2181 Ops.clear();
2182 // Use Block decorations with StorageBuffer storage class.
2183 Ops << MkId(STyID) << MkNum(spv::DecorationBlock);
David Neto22f144c2017-06-12 14:26:21 -04002184
David Neto862b7d82018-06-14 18:48:37 -04002185 auto *DecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
SJW69939d52020-04-16 07:29:07 -05002186 getSPIRVInstList(kAnnotations).push_back(DecoInst);
David Neto22f144c2017-06-12 14:26:21 -04002187 }
2188 break;
2189 }
2190 case Type::IntegerTyID: {
alan-baker0e64a592019-11-18 13:36:25 -05002191 uint32_t BitWidth = static_cast<uint32_t>(Ty->getPrimitiveSizeInBits());
David Neto22f144c2017-06-12 14:26:21 -04002192
2193 if (BitWidth == 1) {
David Netoef5ba2b2019-12-20 08:35:54 -05002194 auto *Inst = new SPIRVInstruction(spv::OpTypeBool, nextID++);
David Neto22f144c2017-06-12 14:26:21 -04002195 SPIRVInstList.push_back(Inst);
2196 } else {
alan-bakerb39c8262019-03-08 14:03:37 -05002197 if (!clspv::Option::Int8Support()) {
2198 // i8 is added to TypeMap as i32.
2199 // No matter what LLVM type is requested first, always alias the
2200 // second one's SPIR-V type to be the same as the one we generated
2201 // first.
2202 unsigned aliasToWidth = 0;
2203 if (BitWidth == 8) {
2204 aliasToWidth = 32;
2205 BitWidth = 32;
2206 } else if (BitWidth == 32) {
2207 aliasToWidth = 8;
2208 }
2209 if (aliasToWidth) {
2210 Type *otherType = Type::getIntNTy(Ty->getContext(), aliasToWidth);
2211 auto where = TypeMap.find(otherType);
2212 if (where == TypeMap.end()) {
2213 // Go ahead and make it, but also map the other type to it.
2214 TypeMap[otherType] = nextID;
2215 } else {
2216 // Alias this SPIR-V type the existing type.
2217 TypeMap[Ty] = where->second;
2218 break;
2219 }
David Neto391aeb12017-08-26 15:51:58 -04002220 }
David Neto22f144c2017-06-12 14:26:21 -04002221 }
2222
David Neto257c3892018-04-11 13:19:45 -04002223 SPIRVOperandList Ops;
2224 Ops << MkNum(BitWidth) << MkNum(0 /* not signed */);
David Neto22f144c2017-06-12 14:26:21 -04002225
2226 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002227 new SPIRVInstruction(spv::OpTypeInt, nextID++, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002228 }
2229 break;
2230 }
2231 case Type::HalfTyID:
2232 case Type::FloatTyID:
2233 case Type::DoubleTyID: {
alan-baker0e64a592019-11-18 13:36:25 -05002234 uint32_t BitWidth = static_cast<uint32_t>(Ty->getPrimitiveSizeInBits());
James Price11010dc2019-12-19 13:53:09 -05002235 auto WidthOp = MkNum(BitWidth);
David Neto22f144c2017-06-12 14:26:21 -04002236
2237 SPIRVInstList.push_back(
David Netoef5ba2b2019-12-20 08:35:54 -05002238 new SPIRVInstruction(spv::OpTypeFloat, nextID++, std::move(WidthOp)));
David Neto22f144c2017-06-12 14:26:21 -04002239 break;
2240 }
2241 case Type::ArrayTyID: {
David Neto22f144c2017-06-12 14:26:21 -04002242 ArrayType *ArrTy = cast<ArrayType>(Ty);
David Neto862b7d82018-06-14 18:48:37 -04002243 const uint64_t Length = ArrTy->getArrayNumElements();
2244 if (Length == 0) {
2245 // By convention, map it to a RuntimeArray.
David Neto22f144c2017-06-12 14:26:21 -04002246
David Neto862b7d82018-06-14 18:48:37 -04002247 // Only generate the type once.
2248 // TODO(dneto): Can it ever be generated more than once?
2249 // Doesn't LLVM type uniqueness guarantee we'll only see this
2250 // once?
2251 Type *EleTy = ArrTy->getArrayElementType();
2252 if (OpRuntimeTyMap.count(EleTy) == 0) {
2253 uint32_t OpTypeRuntimeArrayID = nextID;
2254 OpRuntimeTyMap[Ty] = nextID;
David Neto22f144c2017-06-12 14:26:21 -04002255
David Neto862b7d82018-06-14 18:48:37 -04002256 //
2257 // Generate OpTypeRuntimeArray.
2258 //
David Neto22f144c2017-06-12 14:26:21 -04002259
David Neto862b7d82018-06-14 18:48:37 -04002260 // OpTypeRuntimeArray
2261 // Ops[0] = Element Type ID
2262 SPIRVOperandList Ops;
2263 Ops << MkId(lookupType(EleTy));
David Neto22f144c2017-06-12 14:26:21 -04002264
David Neto862b7d82018-06-14 18:48:37 -04002265 SPIRVInstList.push_back(
2266 new SPIRVInstruction(spv::OpTypeRuntimeArray, nextID++, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002267
David Neto862b7d82018-06-14 18:48:37 -04002268 if (Hack_generate_runtime_array_stride_early) {
2269 // Generate OpDecorate.
David Neto22f144c2017-06-12 14:26:21 -04002270
David Neto862b7d82018-06-14 18:48:37 -04002271 // Ops[0] = Target ID
2272 // Ops[1] = Decoration (ArrayStride)
2273 // Ops[2] = Stride Number(Literal Number)
2274 Ops.clear();
David Neto85082642018-03-24 06:55:20 -07002275
David Neto862b7d82018-06-14 18:48:37 -04002276 Ops << MkId(OpTypeRuntimeArrayID)
2277 << MkNum(spv::DecorationArrayStride)
Alan Bakerfcda9482018-10-02 17:09:59 -04002278 << MkNum(static_cast<uint32_t>(GetTypeAllocSize(EleTy, DL)));
David Neto22f144c2017-06-12 14:26:21 -04002279
David Neto862b7d82018-06-14 18:48:37 -04002280 auto *DecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
SJW69939d52020-04-16 07:29:07 -05002281 getSPIRVInstList(kAnnotations).push_back(DecoInst);
David Neto862b7d82018-06-14 18:48:37 -04002282 }
2283 }
David Neto22f144c2017-06-12 14:26:21 -04002284
David Neto862b7d82018-06-14 18:48:37 -04002285 } else {
David Neto22f144c2017-06-12 14:26:21 -04002286
David Neto862b7d82018-06-14 18:48:37 -04002287 //
2288 // Generate OpConstant and OpTypeArray.
2289 //
2290
2291 //
2292 // Generate OpConstant for array length.
2293 //
2294 // Ops[0] = Result Type ID
2295 // Ops[1] .. Ops[n] = Values LiteralNumber
2296 SPIRVOperandList Ops;
2297
2298 Type *LengthTy = Type::getInt32Ty(Context);
2299 uint32_t ResTyID = lookupType(LengthTy);
2300 Ops << MkId(ResTyID);
2301
2302 assert(Length < UINT32_MAX);
2303 Ops << MkNum(static_cast<uint32_t>(Length));
2304
2305 // Add constant for length to constant list.
2306 Constant *CstLength = ConstantInt::get(LengthTy, Length);
2307 AllocatedVMap[CstLength] = nextID;
2308 VMap[CstLength] = nextID;
2309 uint32_t LengthID = nextID;
2310
2311 auto *CstInst = new SPIRVInstruction(spv::OpConstant, nextID++, Ops);
2312 SPIRVInstList.push_back(CstInst);
2313
2314 // Remember to generate ArrayStride later
2315 getTypesNeedingArrayStride().insert(Ty);
2316
2317 //
2318 // Generate OpTypeArray.
2319 //
2320 // Ops[0] = Element Type ID
2321 // Ops[1] = Array Length Constant ID
2322 Ops.clear();
2323
2324 uint32_t EleTyID = lookupType(ArrTy->getElementType());
2325 Ops << MkId(EleTyID) << MkId(LengthID);
2326
2327 // Update TypeMap with nextID.
2328 TypeMap[Ty] = nextID;
2329
2330 auto *ArrayInst = new SPIRVInstruction(spv::OpTypeArray, nextID++, Ops);
2331 SPIRVInstList.push_back(ArrayInst);
2332 }
David Neto22f144c2017-06-12 14:26:21 -04002333 break;
2334 }
2335 case Type::VectorTyID: {
James Pricecf53df42020-04-20 14:41:24 -04002336 auto VecTy = cast<VectorType>(Ty);
2337
alan-bakerb39c8262019-03-08 14:03:37 -05002338 // <4 x i8> is changed to i32 if i8 is not generally supported.
2339 if (!clspv::Option::Int8Support() &&
James Pricecf53df42020-04-20 14:41:24 -04002340 VecTy->getElementType() == Type::getInt8Ty(Context)) {
2341 if (VecTy->getNumElements() == 4) {
2342 TypeMap[Ty] = lookupType(VecTy->getElementType());
David Neto22f144c2017-06-12 14:26:21 -04002343 break;
2344 } else {
2345 Ty->print(errs());
2346 llvm_unreachable("Support above i8 vector type");
2347 }
2348 }
2349
2350 // Ops[0] = Component Type ID
2351 // Ops[1] = Component Count (Literal Number)
David Neto257c3892018-04-11 13:19:45 -04002352 SPIRVOperandList Ops;
James Pricecf53df42020-04-20 14:41:24 -04002353 Ops << MkId(lookupType(VecTy->getElementType()))
2354 << MkNum(VecTy->getNumElements());
David Neto22f144c2017-06-12 14:26:21 -04002355
alan-bakerb6b09dc2018-11-08 16:59:28 -05002356 SPIRVInstruction *inst =
2357 new SPIRVInstruction(spv::OpTypeVector, nextID++, Ops);
David Netoc6f3ab22018-04-06 18:02:31 -04002358 SPIRVInstList.push_back(inst);
David Neto22f144c2017-06-12 14:26:21 -04002359 break;
2360 }
2361 case Type::VoidTyID: {
David Netoef5ba2b2019-12-20 08:35:54 -05002362 auto *Inst = new SPIRVInstruction(spv::OpTypeVoid, nextID++);
David Neto22f144c2017-06-12 14:26:21 -04002363 SPIRVInstList.push_back(Inst);
2364 break;
2365 }
2366 case Type::FunctionTyID: {
2367 // Generate SPIRV instruction for function type.
2368 FunctionType *FTy = cast<FunctionType>(Ty);
2369
2370 // Ops[0] = Return Type ID
2371 // Ops[1] ... Ops[n] = Parameter Type IDs
2372 SPIRVOperandList Ops;
2373
2374 // Find SPIRV instruction for return type
David Netoc6f3ab22018-04-06 18:02:31 -04002375 Ops << MkId(lookupType(FTy->getReturnType()));
David Neto22f144c2017-06-12 14:26:21 -04002376
2377 // Find SPIRV instructions for parameter types
2378 for (unsigned k = 0; k < FTy->getNumParams(); k++) {
2379 // Find SPIRV instruction for parameter type.
2380 auto ParamTy = FTy->getParamType(k);
2381 if (ParamTy->isPointerTy()) {
2382 auto PointeeTy = ParamTy->getPointerElementType();
2383 if (PointeeTy->isStructTy() &&
2384 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
2385 ParamTy = PointeeTy;
2386 }
2387 }
2388
David Netoc6f3ab22018-04-06 18:02:31 -04002389 Ops << MkId(lookupType(ParamTy));
David Neto22f144c2017-06-12 14:26:21 -04002390 }
2391
David Neto87846742018-04-11 17:36:22 -04002392 auto *Inst = new SPIRVInstruction(spv::OpTypeFunction, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002393 SPIRVInstList.push_back(Inst);
2394 break;
2395 }
2396 }
2397 }
2398
2399 // Generate OpTypeSampledImage.
alan-bakerabd82722019-12-03 17:14:51 -05002400 for (auto &ImgTy : getImageTypeList()) {
David Neto22f144c2017-06-12 14:26:21 -04002401 //
2402 // Generate OpTypeSampledImage.
2403 //
2404 // Ops[0] = Image Type ID
2405 //
2406 SPIRVOperandList Ops;
2407
David Netoc6f3ab22018-04-06 18:02:31 -04002408 Ops << MkId(TypeMap[ImgTy]);
David Neto22f144c2017-06-12 14:26:21 -04002409
alan-bakerabd82722019-12-03 17:14:51 -05002410 // Update the image type map.
2411 getImageTypeMap()[ImgTy] = nextID;
David Neto22f144c2017-06-12 14:26:21 -04002412
David Neto87846742018-04-11 17:36:22 -04002413 auto *Inst = new SPIRVInstruction(spv::OpTypeSampledImage, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002414 SPIRVInstList.push_back(Inst);
2415 }
David Netoc6f3ab22018-04-06 18:02:31 -04002416
2417 // Generate types for pointer-to-local arguments.
alan-bakera1be3322020-04-20 12:48:18 -04002418 for (auto pair : clspv::GetSpecConstants(&module)) {
2419 auto kind = pair.first;
2420 auto spec_id = pair.second;
2421
2422 if (kind != SpecConstant::kLocalMemorySize)
2423 continue;
2424
alan-bakerb6b09dc2018-11-08 16:59:28 -05002425 LocalArgInfo &arg_info = LocalSpecIdInfoMap[spec_id];
David Netoc6f3ab22018-04-06 18:02:31 -04002426
2427 // Generate the spec constant.
2428 SPIRVOperandList Ops;
2429 Ops << MkId(lookupType(Type::getInt32Ty(Context))) << MkNum(1);
David Neto87846742018-04-11 17:36:22 -04002430 SPIRVInstList.push_back(
2431 new SPIRVInstruction(spv::OpSpecConstant, arg_info.array_size_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002432
2433 // Generate the array type.
2434 Ops.clear();
2435 // The element type must have been created.
2436 uint32_t elem_ty_id = lookupType(arg_info.elem_type);
2437 assert(elem_ty_id);
2438 Ops << MkId(elem_ty_id) << MkId(arg_info.array_size_id);
2439
2440 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002441 new SPIRVInstruction(spv::OpTypeArray, arg_info.array_type_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002442
2443 Ops.clear();
2444 Ops << MkNum(spv::StorageClassWorkgroup) << MkId(arg_info.array_type_id);
David Neto87846742018-04-11 17:36:22 -04002445 SPIRVInstList.push_back(new SPIRVInstruction(
2446 spv::OpTypePointer, arg_info.ptr_array_type_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002447 }
David Neto22f144c2017-06-12 14:26:21 -04002448}
2449
2450void SPIRVProducerPass::GenerateSPIRVConstants() {
SJW69939d52020-04-16 07:29:07 -05002451 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList(kConstants);
David Neto22f144c2017-06-12 14:26:21 -04002452 ValueMapType &VMap = getValueMap();
2453 ValueMapType &AllocatedVMap = getAllocatedValueMap();
2454 ValueList &CstList = getConstantList();
David Neto482550a2018-03-24 05:21:07 -07002455 const bool hack_undef = clspv::Option::HackUndef();
David Neto22f144c2017-06-12 14:26:21 -04002456
2457 for (uint32_t i = 0; i < CstList.size(); i++) {
David Netofb9a7972017-08-25 17:08:24 -04002458 // UniqueVector ids are 1-based.
alan-bakerb6b09dc2018-11-08 16:59:28 -05002459 Constant *Cst = cast<Constant>(CstList[i + 1]);
David Neto22f144c2017-06-12 14:26:21 -04002460
2461 // OpTypeArray's constant was already generated.
David Netofb9a7972017-08-25 17:08:24 -04002462 if (AllocatedVMap.find_as(Cst) != AllocatedVMap.end()) {
David Neto22f144c2017-06-12 14:26:21 -04002463 continue;
2464 }
2465
David Netofb9a7972017-08-25 17:08:24 -04002466 // Set ValueMap with nextID for reference later.
David Neto22f144c2017-06-12 14:26:21 -04002467 VMap[Cst] = nextID;
2468
2469 //
2470 // Generate OpConstant.
2471 //
2472
2473 // Ops[0] = Result Type ID
2474 // Ops[1] .. Ops[n] = Values LiteralNumber
2475 SPIRVOperandList Ops;
2476
David Neto257c3892018-04-11 13:19:45 -04002477 Ops << MkId(lookupType(Cst->getType()));
David Neto22f144c2017-06-12 14:26:21 -04002478
2479 std::vector<uint32_t> LiteralNum;
David Neto22f144c2017-06-12 14:26:21 -04002480 spv::Op Opcode = spv::OpNop;
2481
2482 if (isa<UndefValue>(Cst)) {
2483 // Ops[0] = Result Type ID
David Netoc66b3352017-10-20 14:28:46 -04002484 Opcode = spv::OpUndef;
Alan Baker9bf93fb2018-08-28 16:59:26 -04002485 if (hack_undef && IsTypeNullable(Cst->getType())) {
2486 Opcode = spv::OpConstantNull;
David Netoc66b3352017-10-20 14:28:46 -04002487 }
David Neto22f144c2017-06-12 14:26:21 -04002488 } else if (const ConstantInt *CI = dyn_cast<ConstantInt>(Cst)) {
2489 unsigned BitWidth = CI->getBitWidth();
2490 if (BitWidth == 1) {
2491 // If the bitwidth of constant is 1, generate OpConstantTrue or
2492 // OpConstantFalse.
2493 if (CI->getZExtValue()) {
2494 // Ops[0] = Result Type ID
2495 Opcode = spv::OpConstantTrue;
2496 } else {
2497 // Ops[0] = Result Type ID
2498 Opcode = spv::OpConstantFalse;
2499 }
David Neto22f144c2017-06-12 14:26:21 -04002500 } else {
2501 auto V = CI->getZExtValue();
2502 LiteralNum.push_back(V & 0xFFFFFFFF);
2503
2504 if (BitWidth > 32) {
2505 LiteralNum.push_back(V >> 32);
2506 }
2507
2508 Opcode = spv::OpConstant;
David Neto22f144c2017-06-12 14:26:21 -04002509
David Neto257c3892018-04-11 13:19:45 -04002510 Ops << MkInteger(LiteralNum);
David Neto22f144c2017-06-12 14:26:21 -04002511 }
2512 } else if (const ConstantFP *CFP = dyn_cast<ConstantFP>(Cst)) {
2513 uint64_t FPVal = CFP->getValueAPF().bitcastToAPInt().getZExtValue();
2514 Type *CFPTy = CFP->getType();
2515 if (CFPTy->isFloatTy()) {
2516 LiteralNum.push_back(FPVal & 0xFFFFFFFF);
Kévin Petit02ee34e2019-04-04 19:03:22 +01002517 } else if (CFPTy->isDoubleTy()) {
2518 LiteralNum.push_back(FPVal & 0xFFFFFFFF);
2519 LiteralNum.push_back(FPVal >> 32);
alan-baker089bf932020-01-07 16:35:45 -05002520 } else if (CFPTy->isHalfTy()) {
2521 LiteralNum.push_back(FPVal & 0xFFFF);
David Neto22f144c2017-06-12 14:26:21 -04002522 } else {
2523 CFPTy->print(errs());
2524 llvm_unreachable("Implement this ConstantFP Type");
2525 }
2526
2527 Opcode = spv::OpConstant;
David Neto22f144c2017-06-12 14:26:21 -04002528
David Neto257c3892018-04-11 13:19:45 -04002529 Ops << MkFloat(LiteralNum);
David Neto22f144c2017-06-12 14:26:21 -04002530 } else if (isa<ConstantDataSequential>(Cst) &&
2531 cast<ConstantDataSequential>(Cst)->isString()) {
2532 Cst->print(errs());
2533 llvm_unreachable("Implement this Constant");
2534
2535 } else if (const ConstantDataSequential *CDS =
2536 dyn_cast<ConstantDataSequential>(Cst)) {
David Neto49351ac2017-08-26 17:32:20 -04002537 // Let's convert <4 x i8> constant to int constant specially.
2538 // This case occurs when all the values are specified as constant
2539 // ints.
2540 Type *CstTy = Cst->getType();
2541 if (is4xi8vec(CstTy)) {
2542 LLVMContext &Context = CstTy->getContext();
2543
2544 //
2545 // Generate OpConstant with OpTypeInt 32 0.
2546 //
Neil Henning39672102017-09-29 14:33:13 +01002547 uint32_t IntValue = 0;
2548 for (unsigned k = 0; k < 4; k++) {
2549 const uint64_t Val = CDS->getElementAsInteger(k);
David Neto49351ac2017-08-26 17:32:20 -04002550 IntValue = (IntValue << 8) | (Val & 0xffu);
2551 }
2552
2553 Type *i32 = Type::getInt32Ty(Context);
2554 Constant *CstInt = ConstantInt::get(i32, IntValue);
2555 // If this constant is already registered on VMap, use it.
2556 if (VMap.count(CstInt)) {
2557 uint32_t CstID = VMap[CstInt];
2558 VMap[Cst] = CstID;
2559 continue;
2560 }
2561
David Neto257c3892018-04-11 13:19:45 -04002562 Ops << MkNum(IntValue);
David Neto49351ac2017-08-26 17:32:20 -04002563
David Neto87846742018-04-11 17:36:22 -04002564 auto *CstInst = new SPIRVInstruction(spv::OpConstant, nextID++, Ops);
David Neto49351ac2017-08-26 17:32:20 -04002565 SPIRVInstList.push_back(CstInst);
2566
2567 continue;
2568 }
2569
2570 // A normal constant-data-sequential case.
David Neto22f144c2017-06-12 14:26:21 -04002571 for (unsigned k = 0; k < CDS->getNumElements(); k++) {
2572 Constant *EleCst = CDS->getElementAsConstant(k);
2573 uint32_t EleCstID = VMap[EleCst];
David Neto257c3892018-04-11 13:19:45 -04002574 Ops << MkId(EleCstID);
David Neto22f144c2017-06-12 14:26:21 -04002575 }
2576
2577 Opcode = spv::OpConstantComposite;
David Neto22f144c2017-06-12 14:26:21 -04002578 } else if (const ConstantAggregate *CA = dyn_cast<ConstantAggregate>(Cst)) {
2579 // Let's convert <4 x i8> constant to int constant specially.
David Neto49351ac2017-08-26 17:32:20 -04002580 // This case occurs when at least one of the values is an undef.
David Neto22f144c2017-06-12 14:26:21 -04002581 Type *CstTy = Cst->getType();
2582 if (is4xi8vec(CstTy)) {
2583 LLVMContext &Context = CstTy->getContext();
2584
2585 //
2586 // Generate OpConstant with OpTypeInt 32 0.
2587 //
Neil Henning39672102017-09-29 14:33:13 +01002588 uint32_t IntValue = 0;
David Neto22f144c2017-06-12 14:26:21 -04002589 for (User::const_op_iterator I = Cst->op_begin(), E = Cst->op_end();
2590 I != E; ++I) {
2591 uint64_t Val = 0;
alan-bakerb6b09dc2018-11-08 16:59:28 -05002592 const Value *CV = *I;
Neil Henning39672102017-09-29 14:33:13 +01002593 if (auto *CI2 = dyn_cast<ConstantInt>(CV)) {
2594 Val = CI2->getZExtValue();
David Neto22f144c2017-06-12 14:26:21 -04002595 }
David Neto49351ac2017-08-26 17:32:20 -04002596 IntValue = (IntValue << 8) | (Val & 0xffu);
David Neto22f144c2017-06-12 14:26:21 -04002597 }
2598
David Neto49351ac2017-08-26 17:32:20 -04002599 Type *i32 = Type::getInt32Ty(Context);
2600 Constant *CstInt = ConstantInt::get(i32, IntValue);
David Neto22f144c2017-06-12 14:26:21 -04002601 // If this constant is already registered on VMap, use it.
2602 if (VMap.count(CstInt)) {
2603 uint32_t CstID = VMap[CstInt];
2604 VMap[Cst] = CstID;
David Neto19a1bad2017-08-25 15:01:41 -04002605 continue;
David Neto22f144c2017-06-12 14:26:21 -04002606 }
2607
David Neto257c3892018-04-11 13:19:45 -04002608 Ops << MkNum(IntValue);
David Neto22f144c2017-06-12 14:26:21 -04002609
David Neto87846742018-04-11 17:36:22 -04002610 auto *CstInst = new SPIRVInstruction(spv::OpConstant, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002611 SPIRVInstList.push_back(CstInst);
2612
David Neto19a1bad2017-08-25 15:01:41 -04002613 continue;
David Neto22f144c2017-06-12 14:26:21 -04002614 }
2615
2616 // We use a constant composite in SPIR-V for our constant aggregate in
2617 // LLVM.
2618 Opcode = spv::OpConstantComposite;
David Neto22f144c2017-06-12 14:26:21 -04002619
2620 for (unsigned k = 0; k < CA->getNumOperands(); k++) {
2621 // Look up the ID of the element of this aggregate (which we will
2622 // previously have created a constant for).
2623 uint32_t ElementConstantID = VMap[CA->getAggregateElement(k)];
2624
2625 // And add an operand to the composite we are constructing
David Neto257c3892018-04-11 13:19:45 -04002626 Ops << MkId(ElementConstantID);
David Neto22f144c2017-06-12 14:26:21 -04002627 }
2628 } else if (Cst->isNullValue()) {
2629 Opcode = spv::OpConstantNull;
David Neto22f144c2017-06-12 14:26:21 -04002630 } else {
2631 Cst->print(errs());
2632 llvm_unreachable("Unsupported Constant???");
2633 }
2634
alan-baker5b86ed72019-02-15 08:26:50 -05002635 if (Opcode == spv::OpConstantNull && Cst->getType()->isPointerTy()) {
2636 // Null pointer requires variable pointers.
2637 setVariablePointersCapabilities(Cst->getType()->getPointerAddressSpace());
2638 }
2639
David Neto87846742018-04-11 17:36:22 -04002640 auto *CstInst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002641 SPIRVInstList.push_back(CstInst);
2642 }
2643}
2644
2645void SPIRVProducerPass::GenerateSamplers(Module &M) {
SJW69939d52020-04-16 07:29:07 -05002646 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList(kGlobalVariables);
David Neto22f144c2017-06-12 14:26:21 -04002647
alan-bakerb6b09dc2018-11-08 16:59:28 -05002648 auto &sampler_map = getSamplerMap();
alan-baker09cb9802019-12-10 13:16:27 -05002649 SamplerLiteralToIDMap.clear();
David Neto862b7d82018-06-14 18:48:37 -04002650 DenseMap<unsigned, unsigned> SamplerLiteralToDescriptorSetMap;
2651 DenseMap<unsigned, unsigned> SamplerLiteralToBindingMap;
David Neto22f144c2017-06-12 14:26:21 -04002652
David Neto862b7d82018-06-14 18:48:37 -04002653 // We might have samplers in the sampler map that are not used
2654 // in the translation unit. We need to allocate variables
2655 // for them and bindings too.
2656 DenseSet<unsigned> used_bindings;
David Neto22f144c2017-06-12 14:26:21 -04002657
Kévin Petitdf71de32019-04-09 14:09:50 +01002658 auto *var_fn = M.getFunction(clspv::LiteralSamplerFunction());
alan-baker09cb9802019-12-10 13:16:27 -05002659 // Return if there are no literal samplers.
alan-bakerb6b09dc2018-11-08 16:59:28 -05002660 if (!var_fn)
2661 return;
alan-baker09cb9802019-12-10 13:16:27 -05002662
David Neto862b7d82018-06-14 18:48:37 -04002663 for (auto user : var_fn->users()) {
2664 // Populate SamplerLiteralToDescriptorSetMap and
2665 // SamplerLiteralToBindingMap.
2666 //
2667 // Look for calls like
2668 // call %opencl.sampler_t addrspace(2)*
2669 // @clspv.sampler.var.literal(
2670 // i32 descriptor,
2671 // i32 binding,
alan-baker09cb9802019-12-10 13:16:27 -05002672 // i32 (index-into-sampler-map|sampler_mask))
alan-bakerb6b09dc2018-11-08 16:59:28 -05002673 if (auto *call = dyn_cast<CallInst>(user)) {
alan-baker09cb9802019-12-10 13:16:27 -05002674 const auto third_param = static_cast<unsigned>(
alan-bakerb6b09dc2018-11-08 16:59:28 -05002675 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
alan-baker09cb9802019-12-10 13:16:27 -05002676 auto sampler_value = third_param;
2677 if (clspv::Option::UseSamplerMap()) {
2678 if (third_param >= sampler_map.size()) {
2679 errs() << "Out of bounds index to sampler map: " << third_param;
2680 llvm_unreachable("bad sampler init: out of bounds");
2681 }
2682 sampler_value = sampler_map[third_param].first;
David Neto862b7d82018-06-14 18:48:37 -04002683 }
2684
David Neto862b7d82018-06-14 18:48:37 -04002685 const auto descriptor_set = static_cast<unsigned>(
2686 dyn_cast<ConstantInt>(call->getArgOperand(0))->getZExtValue());
2687 const auto binding = static_cast<unsigned>(
2688 dyn_cast<ConstantInt>(call->getArgOperand(1))->getZExtValue());
2689
2690 SamplerLiteralToDescriptorSetMap[sampler_value] = descriptor_set;
2691 SamplerLiteralToBindingMap[sampler_value] = binding;
2692 used_bindings.insert(binding);
2693 }
2694 }
2695
alan-baker09cb9802019-12-10 13:16:27 -05002696 DenseSet<size_t> seen;
2697 for (auto user : var_fn->users()) {
2698 if (!isa<CallInst>(user))
2699 continue;
2700
2701 auto call = cast<CallInst>(user);
2702 const unsigned third_param = static_cast<unsigned>(
2703 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
2704
2705 // Already allocated a variable for this value.
2706 if (!seen.insert(third_param).second)
2707 continue;
2708
2709 auto sampler_value = third_param;
2710 if (clspv::Option::UseSamplerMap()) {
2711 sampler_value = sampler_map[third_param].first;
2712 }
2713
David Neto22f144c2017-06-12 14:26:21 -04002714 // Generate OpVariable.
2715 //
2716 // GIDOps[0] : Result Type ID
2717 // GIDOps[1] : Storage Class
2718 SPIRVOperandList Ops;
2719
David Neto257c3892018-04-11 13:19:45 -04002720 Ops << MkId(lookupType(SamplerTy))
2721 << MkNum(spv::StorageClassUniformConstant);
David Neto22f144c2017-06-12 14:26:21 -04002722
David Neto862b7d82018-06-14 18:48:37 -04002723 auto sampler_var_id = nextID++;
2724 auto *Inst = new SPIRVInstruction(spv::OpVariable, sampler_var_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002725 SPIRVInstList.push_back(Inst);
2726
alan-baker09cb9802019-12-10 13:16:27 -05002727 SamplerLiteralToIDMap[sampler_value] = sampler_var_id;
David Neto22f144c2017-06-12 14:26:21 -04002728
David Neto862b7d82018-06-14 18:48:37 -04002729 unsigned descriptor_set;
2730 unsigned binding;
alan-baker09cb9802019-12-10 13:16:27 -05002731 if (SamplerLiteralToBindingMap.find(sampler_value) ==
alan-bakerb6b09dc2018-11-08 16:59:28 -05002732 SamplerLiteralToBindingMap.end()) {
David Neto862b7d82018-06-14 18:48:37 -04002733 // This sampler is not actually used. Find the next one.
2734 for (binding = 0; used_bindings.count(binding); binding++)
2735 ;
2736 descriptor_set = 0; // Literal samplers always use descriptor set 0.
2737 used_bindings.insert(binding);
2738 } else {
alan-baker09cb9802019-12-10 13:16:27 -05002739 descriptor_set = SamplerLiteralToDescriptorSetMap[sampler_value];
2740 binding = SamplerLiteralToBindingMap[sampler_value];
alan-bakercff80152019-06-15 00:38:00 -04002741
alan-baker09cb9802019-12-10 13:16:27 -05002742 version0::DescriptorMapEntry::SamplerData sampler_data = {sampler_value};
alan-bakercff80152019-06-15 00:38:00 -04002743 descriptorMapEntries->emplace_back(std::move(sampler_data),
2744 descriptor_set, binding);
David Neto862b7d82018-06-14 18:48:37 -04002745 }
2746
SJW69939d52020-04-16 07:29:07 -05002747 // Ops[0] = Target ID
2748 // Ops[1] = Decoration (DescriptorSet)
2749 // Ops[2] = LiteralNumber according to Decoration
2750 Ops.clear();
2751
David Neto862b7d82018-06-14 18:48:37 -04002752 Ops << MkId(sampler_var_id) << MkNum(spv::DecorationDescriptorSet)
2753 << MkNum(descriptor_set);
David Neto22f144c2017-06-12 14:26:21 -04002754
David Neto87846742018-04-11 17:36:22 -04002755 auto *DescDecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
SJW69939d52020-04-16 07:29:07 -05002756 getSPIRVInstList(kAnnotations).push_back(DescDecoInst);
David Neto22f144c2017-06-12 14:26:21 -04002757
2758 // Ops[0] = Target ID
2759 // Ops[1] = Decoration (Binding)
2760 // Ops[2] = LiteralNumber according to Decoration
2761 Ops.clear();
David Neto862b7d82018-06-14 18:48:37 -04002762 Ops << MkId(sampler_var_id) << MkNum(spv::DecorationBinding)
2763 << MkNum(binding);
David Neto22f144c2017-06-12 14:26:21 -04002764
David Neto87846742018-04-11 17:36:22 -04002765 auto *BindDecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
SJW69939d52020-04-16 07:29:07 -05002766 getSPIRVInstList(kAnnotations).push_back(BindDecoInst);
David Neto22f144c2017-06-12 14:26:21 -04002767 }
David Neto862b7d82018-06-14 18:48:37 -04002768}
David Neto22f144c2017-06-12 14:26:21 -04002769
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01002770void SPIRVProducerPass::GenerateResourceVars(Module &) {
SJW69939d52020-04-16 07:29:07 -05002771 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList(kGlobalVariables);
David Neto862b7d82018-06-14 18:48:37 -04002772 ValueMapType &VMap = getValueMap();
David Neto22f144c2017-06-12 14:26:21 -04002773
David Neto862b7d82018-06-14 18:48:37 -04002774 // Generate variables. Make one for each of resource var info object.
2775 for (auto *info : ModuleOrderedResourceVars) {
2776 Type *type = info->var_fn->getReturnType();
2777 // Remap the address space for opaque types.
2778 switch (info->arg_kind) {
2779 case clspv::ArgKind::Sampler:
2780 case clspv::ArgKind::ReadOnlyImage:
2781 case clspv::ArgKind::WriteOnlyImage:
2782 type = PointerType::get(type->getPointerElementType(),
2783 clspv::AddressSpace::UniformConstant);
2784 break;
2785 default:
2786 break;
2787 }
David Neto22f144c2017-06-12 14:26:21 -04002788
David Neto862b7d82018-06-14 18:48:37 -04002789 info->var_id = nextID++;
David Neto22f144c2017-06-12 14:26:21 -04002790
David Neto862b7d82018-06-14 18:48:37 -04002791 const auto type_id = lookupType(type);
2792 const auto sc = GetStorageClassForArgKind(info->arg_kind);
2793 SPIRVOperandList Ops;
2794 Ops << MkId(type_id) << MkNum(sc);
David Neto22f144c2017-06-12 14:26:21 -04002795
David Neto862b7d82018-06-14 18:48:37 -04002796 auto *Inst = new SPIRVInstruction(spv::OpVariable, info->var_id, Ops);
2797 SPIRVInstList.push_back(Inst);
2798
2799 // Map calls to the variable-builtin-function.
2800 for (auto &U : info->var_fn->uses()) {
2801 if (auto *call = dyn_cast<CallInst>(U.getUser())) {
2802 const auto set = unsigned(
2803 dyn_cast<ConstantInt>(call->getOperand(0))->getZExtValue());
2804 const auto binding = unsigned(
2805 dyn_cast<ConstantInt>(call->getOperand(1))->getZExtValue());
2806 if (set == info->descriptor_set && binding == info->binding) {
2807 switch (info->arg_kind) {
2808 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04002809 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04002810 case clspv::ArgKind::Pod:
alan-baker9b0ec3c2020-04-06 14:45:34 -04002811 case clspv::ArgKind::PodUBO:
2812 case clspv::ArgKind::PodPushConstant:
David Neto862b7d82018-06-14 18:48:37 -04002813 // The call maps to the variable directly.
2814 VMap[call] = info->var_id;
2815 break;
2816 case clspv::ArgKind::Sampler:
2817 case clspv::ArgKind::ReadOnlyImage:
2818 case clspv::ArgKind::WriteOnlyImage:
2819 // The call maps to a load we generate later.
2820 ResourceVarDeferredLoadCalls[call] = info->var_id;
2821 break;
2822 default:
2823 llvm_unreachable("Unhandled arg kind");
2824 }
2825 }
David Neto22f144c2017-06-12 14:26:21 -04002826 }
David Neto862b7d82018-06-14 18:48:37 -04002827 }
2828 }
David Neto22f144c2017-06-12 14:26:21 -04002829
David Neto862b7d82018-06-14 18:48:37 -04002830 // Generate associated decorations.
SJW69939d52020-04-16 07:29:07 -05002831 SPIRVInstructionList &Annotations = getSPIRVInstList(kAnnotations);
David Neto862b7d82018-06-14 18:48:37 -04002832
2833 SPIRVOperandList Ops;
2834 for (auto *info : ModuleOrderedResourceVars) {
alan-baker9b0ec3c2020-04-06 14:45:34 -04002835 // Push constants don't need descriptor set or binding decorations.
2836 if (info->arg_kind == clspv::ArgKind::PodPushConstant)
2837 continue;
2838
David Neto862b7d82018-06-14 18:48:37 -04002839 // Decorate with DescriptorSet and Binding.
2840 Ops.clear();
2841 Ops << MkId(info->var_id) << MkNum(spv::DecorationDescriptorSet)
2842 << MkNum(info->descriptor_set);
SJW69939d52020-04-16 07:29:07 -05002843 Annotations.push_back(new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto862b7d82018-06-14 18:48:37 -04002844
2845 Ops.clear();
2846 Ops << MkId(info->var_id) << MkNum(spv::DecorationBinding)
2847 << MkNum(info->binding);
SJW69939d52020-04-16 07:29:07 -05002848 Annotations.push_back(new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto862b7d82018-06-14 18:48:37 -04002849
alan-bakere9308012019-03-15 10:25:13 -04002850 if (info->coherent) {
2851 // Decorate with Coherent if required for the variable.
2852 Ops.clear();
2853 Ops << MkId(info->var_id) << MkNum(spv::DecorationCoherent);
SJW69939d52020-04-16 07:29:07 -05002854 Annotations.push_back(new SPIRVInstruction(spv::OpDecorate, Ops));
alan-bakere9308012019-03-15 10:25:13 -04002855 }
2856
David Neto862b7d82018-06-14 18:48:37 -04002857 // Generate NonWritable and NonReadable
2858 switch (info->arg_kind) {
2859 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04002860 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04002861 if (info->var_fn->getReturnType()->getPointerAddressSpace() ==
2862 clspv::AddressSpace::Constant) {
2863 Ops.clear();
2864 Ops << MkId(info->var_id) << MkNum(spv::DecorationNonWritable);
SJW69939d52020-04-16 07:29:07 -05002865 Annotations.push_back(new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002866 }
David Neto862b7d82018-06-14 18:48:37 -04002867 break;
David Neto862b7d82018-06-14 18:48:37 -04002868 case clspv::ArgKind::WriteOnlyImage:
2869 Ops.clear();
2870 Ops << MkId(info->var_id) << MkNum(spv::DecorationNonReadable);
SJW69939d52020-04-16 07:29:07 -05002871 Annotations.push_back(new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto862b7d82018-06-14 18:48:37 -04002872 break;
2873 default:
2874 break;
David Neto22f144c2017-06-12 14:26:21 -04002875 }
2876 }
2877}
2878
Kévin Petitbbbda972020-03-03 19:16:31 +00002879namespace {
2880
2881bool isScalarType(Type *type) {
2882 return type->isIntegerTy() || type->isFloatTy();
2883}
2884
2885uint64_t structAlignment(StructType *type,
2886 std::function<uint64_t(Type *)> alignFn) {
2887 uint64_t maxAlign = 1;
2888 for (unsigned i = 0; i < type->getStructNumElements(); i++) {
2889 uint64_t align = alignFn(type->getStructElementType(i));
2890 maxAlign = std::max(align, maxAlign);
2891 }
2892 return maxAlign;
2893}
2894
2895uint64_t scalarAlignment(Type *type) {
2896 // A scalar of size N has a scalar alignment of N.
2897 if (isScalarType(type)) {
2898 return type->getScalarSizeInBits() / 8;
2899 }
2900
2901 // A vector or matrix type has a scalar alignment equal to that of its
2902 // component type.
James Pricecf53df42020-04-20 14:41:24 -04002903 if (auto vec_type = dyn_cast<VectorType>(type)) {
2904 return scalarAlignment(vec_type->getElementType());
Kévin Petitbbbda972020-03-03 19:16:31 +00002905 }
2906
2907 // An array type has a scalar alignment equal to that of its element type.
2908 if (type->isArrayTy()) {
2909 return scalarAlignment(type->getArrayElementType());
2910 }
2911
2912 // A structure has a scalar alignment equal to the largest scalar alignment of
2913 // any of its members.
2914 if (type->isStructTy()) {
2915 return structAlignment(cast<StructType>(type), scalarAlignment);
2916 }
2917
2918 llvm_unreachable("Unsupported type");
2919}
2920
2921uint64_t baseAlignment(Type *type) {
2922 // A scalar has a base alignment equal to its scalar alignment.
2923 if (isScalarType(type)) {
2924 return scalarAlignment(type);
2925 }
2926
James Pricecf53df42020-04-20 14:41:24 -04002927 if (auto vec_type = dyn_cast<VectorType>(type)) {
2928 unsigned numElems = vec_type->getNumElements();
Kévin Petitbbbda972020-03-03 19:16:31 +00002929
2930 // A two-component vector has a base alignment equal to twice its scalar
2931 // alignment.
2932 if (numElems == 2) {
2933 return 2 * scalarAlignment(type);
2934 }
2935 // A three- or four-component vector has a base alignment equal to four
2936 // times its scalar alignment.
2937 if ((numElems == 3) || (numElems == 4)) {
2938 return 4 * scalarAlignment(type);
2939 }
2940 }
2941
2942 // An array has a base alignment equal to the base alignment of its element
2943 // type.
2944 if (type->isArrayTy()) {
2945 return baseAlignment(type->getArrayElementType());
2946 }
2947
2948 // A structure has a base alignment equal to the largest base alignment of any
2949 // of its members.
2950 if (type->isStructTy()) {
2951 return structAlignment(cast<StructType>(type), baseAlignment);
2952 }
2953
2954 // TODO A row-major matrix of C columns has a base alignment equal to the base
2955 // alignment of a vector of C matrix components.
2956 // TODO A column-major matrix has a base alignment equal to the base alignment
2957 // of the matrix column type.
2958
2959 llvm_unreachable("Unsupported type");
2960}
2961
2962uint64_t extendedAlignment(Type *type) {
2963 // A scalar, vector or matrix type has an extended alignment equal to its base
2964 // alignment.
2965 // TODO matrix type
2966 if (isScalarType(type) || type->isVectorTy()) {
2967 return baseAlignment(type);
2968 }
2969
2970 // An array or structure type has an extended alignment equal to the largest
2971 // extended alignment of any of its members, rounded up to a multiple of 16
2972 if (type->isStructTy()) {
2973 auto salign = structAlignment(cast<StructType>(type), extendedAlignment);
2974 return alignTo(salign, 16);
2975 }
2976
2977 if (type->isArrayTy()) {
2978 auto salign = extendedAlignment(type->getArrayElementType());
2979 return alignTo(salign, 16);
2980 }
2981
2982 llvm_unreachable("Unsupported type");
2983}
2984
2985uint64_t standardAlignment(Type *type, spv::StorageClass sclass) {
2986 // If the scalarBlockLayout feature is enabled on the device then every member
2987 // must be aligned according to its scalar alignment
2988 if (clspv::Option::ScalarBlockLayout()) {
2989 return scalarAlignment(type);
2990 }
2991
2992 // All vectors must be aligned according to their scalar alignment
2993 if (type->isVectorTy()) {
2994 return scalarAlignment(type);
2995 }
2996
2997 // If the uniformBufferStandardLayout feature is not enabled on the device,
2998 // then any member of an OpTypeStruct with a storage class of Uniform and a
2999 // decoration of Block must be aligned according to its extended alignment.
3000 if (!clspv::Option::Std430UniformBufferLayout() &&
3001 sclass == spv::StorageClassUniform) {
3002 return extendedAlignment(type);
3003 }
3004
3005 // Every other member must be aligned according to its base alignment
3006 return baseAlignment(type);
3007}
3008
3009bool improperlyStraddles(const DataLayout &DL, Type *type, unsigned offset) {
3010 assert(type->isVectorTy());
3011
3012 auto size = DL.getTypeStoreSize(type);
3013
3014 // It is a vector with total size less than or equal to 16 bytes, and has
3015 // Offset decorations placing its first byte at F and its last byte at L,
3016 // where floor(F / 16) != floor(L / 16).
3017 if ((size <= 16) && (offset % 16 + size > 16)) {
3018 return true;
3019 }
3020
3021 // It is a vector with total size greater than 16 bytes and has its Offset
3022 // decorations placing its first byte at a non-integer multiple of 16
3023 if ((size > 16) && (offset % 16 != 0)) {
3024 return true;
3025 }
3026
3027 return false;
3028}
3029
3030// See 14.5 Shader Resource Interface in Vulkan spec
3031bool isValidExplicitLayout(Module &M, StructType *STy, unsigned Member,
3032 spv::StorageClass SClass, unsigned Offset,
3033 unsigned PreviousMemberOffset) {
3034
3035 auto MemberType = STy->getElementType(Member);
3036 auto Align = standardAlignment(MemberType, SClass);
3037 auto &DL = M.getDataLayout();
3038
3039 // The Offset decoration of any member must be a multiple of its alignment
3040 if (Offset % Align != 0) {
3041 return false;
3042 }
3043
3044 // TODO Any ArrayStride or MatrixStride decoration must be a multiple of the
3045 // alignment of the array or matrix as defined above
3046
3047 if (!clspv::Option::ScalarBlockLayout()) {
3048 // Vectors must not improperly straddle, as defined above
3049 if (MemberType->isVectorTy() &&
3050 improperlyStraddles(DL, MemberType, Offset)) {
3051 return true;
3052 }
3053
3054 // The Offset decoration of a member must not place it between the end
3055 // of a structure or an array and the next multiple of the alignment of that
3056 // structure or array
3057 if (Member > 0) {
3058 auto PType = STy->getElementType(Member - 1);
3059 if (PType->isStructTy() || PType->isArrayTy()) {
3060 auto PAlign = standardAlignment(PType, SClass);
3061 if (Offset - PreviousMemberOffset < PAlign) {
3062 return false;
3063 }
3064 }
3065 }
3066 }
3067
3068 return true;
3069}
3070
3071} // namespace
3072
alan-bakera1be3322020-04-20 12:48:18 -04003073void SPIRVProducerPass::GeneratePushConstantDescriptorMapEntries(Module &M) {
Kévin Petitbbbda972020-03-03 19:16:31 +00003074
3075 if (auto GV = M.getGlobalVariable(clspv::PushConstantsVariableName())) {
3076 auto const &DL = M.getDataLayout();
3077 auto MD = GV->getMetadata(clspv::PushConstantsMetadataName());
3078 auto STy = cast<StructType>(GV->getValueType());
3079
3080 for (unsigned i = 0; i < STy->getNumElements(); i++) {
3081 auto pc = static_cast<clspv::PushConstant>(
3082 mdconst::extract<ConstantInt>(MD->getOperand(i))->getZExtValue());
3083 auto memberType = STy->getElementType(i);
3084 auto offset = GetExplicitLayoutStructMemberOffset(STy, i, DL);
3085 unsigned previousOffset = 0;
3086 if (i > 0) {
3087 previousOffset = GetExplicitLayoutStructMemberOffset(STy, i - 1, DL);
3088 }
3089 auto size = static_cast<uint32_t>(GetTypeSizeInBits(memberType, DL)) / 8;
3090 assert(isValidExplicitLayout(M, STy, i, spv::StorageClassPushConstant,
3091 offset, previousOffset));
3092 version0::DescriptorMapEntry::PushConstantData data = {pc, offset, size};
3093 descriptorMapEntries->emplace_back(std::move(data));
3094 }
3095 }
3096}
3097
alan-bakera1be3322020-04-20 12:48:18 -04003098void SPIRVProducerPass::GenerateSpecConstantDescriptorMapEntries(Module &M) {
3099 for (auto pair : clspv::GetSpecConstants(&M)) {
3100 auto kind = pair.first;
3101 auto id = pair.second;
3102
3103 // Local memory size is only used for kernel arguments.
3104 if (kind == SpecConstant::kLocalMemorySize)
3105 continue;
3106
3107 version0::DescriptorMapEntry::SpecConstantData data = {kind, id};
3108 descriptorMapEntries->emplace_back(std::move(data));
3109 }
3110}
3111
David Neto22f144c2017-06-12 14:26:21 -04003112void SPIRVProducerPass::GenerateGlobalVar(GlobalVariable &GV) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05003113 Module &M = *GV.getParent();
SJW69939d52020-04-16 07:29:07 -05003114 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList(kGlobalVariables);
David Neto22f144c2017-06-12 14:26:21 -04003115 ValueMapType &VMap = getValueMap();
3116 std::vector<uint32_t> &BuiltinDimVec = getBuiltinDimVec();
David Neto85082642018-03-24 06:55:20 -07003117 const DataLayout &DL = GV.getParent()->getDataLayout();
David Neto22f144c2017-06-12 14:26:21 -04003118
3119 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
3120 Type *Ty = GV.getType();
3121 PointerType *PTy = cast<PointerType>(Ty);
3122
3123 uint32_t InitializerID = 0;
3124
3125 // Workgroup size is handled differently (it goes into a constant)
3126 if (spv::BuiltInWorkgroupSize == BuiltinType) {
3127 std::vector<bool> HasMDVec;
3128 uint32_t PrevXDimCst = 0xFFFFFFFF;
3129 uint32_t PrevYDimCst = 0xFFFFFFFF;
3130 uint32_t PrevZDimCst = 0xFFFFFFFF;
3131 for (Function &Func : *GV.getParent()) {
3132 if (Func.isDeclaration()) {
3133 continue;
3134 }
3135
3136 // We only need to check kernels.
3137 if (Func.getCallingConv() != CallingConv::SPIR_KERNEL) {
3138 continue;
3139 }
3140
3141 if (const MDNode *MD =
3142 dyn_cast<Function>(&Func)->getMetadata("reqd_work_group_size")) {
3143 uint32_t CurXDimCst = static_cast<uint32_t>(
3144 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
3145 uint32_t CurYDimCst = static_cast<uint32_t>(
3146 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
3147 uint32_t CurZDimCst = static_cast<uint32_t>(
3148 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
3149
3150 if (PrevXDimCst == 0xFFFFFFFF && PrevYDimCst == 0xFFFFFFFF &&
3151 PrevZDimCst == 0xFFFFFFFF) {
3152 PrevXDimCst = CurXDimCst;
3153 PrevYDimCst = CurYDimCst;
3154 PrevZDimCst = CurZDimCst;
3155 } else if (CurXDimCst != PrevXDimCst || CurYDimCst != PrevYDimCst ||
3156 CurZDimCst != PrevZDimCst) {
3157 llvm_unreachable(
3158 "reqd_work_group_size must be the same across all kernels");
3159 } else {
3160 continue;
3161 }
3162
3163 //
3164 // Generate OpConstantComposite.
3165 //
3166 // Ops[0] : Result Type ID
3167 // Ops[1] : Constant size for x dimension.
3168 // Ops[2] : Constant size for y dimension.
3169 // Ops[3] : Constant size for z dimension.
3170 SPIRVOperandList Ops;
3171
3172 uint32_t XDimCstID =
3173 VMap[mdconst::extract<ConstantInt>(MD->getOperand(0))];
3174 uint32_t YDimCstID =
3175 VMap[mdconst::extract<ConstantInt>(MD->getOperand(1))];
3176 uint32_t ZDimCstID =
3177 VMap[mdconst::extract<ConstantInt>(MD->getOperand(2))];
3178
3179 InitializerID = nextID;
3180
David Neto257c3892018-04-11 13:19:45 -04003181 Ops << MkId(lookupType(Ty->getPointerElementType())) << MkId(XDimCstID)
3182 << MkId(YDimCstID) << MkId(ZDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04003183
David Neto87846742018-04-11 17:36:22 -04003184 auto *Inst =
3185 new SPIRVInstruction(spv::OpConstantComposite, nextID++, Ops);
alan-bakera1be3322020-04-20 12:48:18 -04003186 getSPIRVInstList(kConstants).push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04003187
3188 HasMDVec.push_back(true);
3189 } else {
3190 HasMDVec.push_back(false);
3191 }
3192 }
3193
3194 // Check all kernels have same definitions for work_group_size.
3195 bool HasMD = false;
3196 if (!HasMDVec.empty()) {
3197 HasMD = HasMDVec[0];
3198 for (uint32_t i = 1; i < HasMDVec.size(); i++) {
3199 if (HasMD != HasMDVec[i]) {
3200 llvm_unreachable(
3201 "Kernels should have consistent work group size definition");
3202 }
3203 }
3204 }
3205
3206 // If all kernels do not have metadata for reqd_work_group_size, generate
3207 // OpSpecConstants for x/y/z dimension.
3208 if (!HasMD) {
3209 //
3210 // Generate OpSpecConstants for x/y/z dimension.
3211 //
3212 // Ops[0] : Result Type ID
3213 // Ops[1] : Constant size for x/y/z dimension (Literal Number).
3214 uint32_t XDimCstID = 0;
3215 uint32_t YDimCstID = 0;
3216 uint32_t ZDimCstID = 0;
3217
alan-bakera1be3322020-04-20 12:48:18 -04003218 // Allocate spec constants for workgroup size.
3219 clspv::AddWorkgroupSpecConstants(&M);
3220
David Neto22f144c2017-06-12 14:26:21 -04003221 SPIRVOperandList Ops;
James Pricecf53df42020-04-20 14:41:24 -04003222 uint32_t result_type_id = lookupType(
3223 cast<VectorType>(Ty->getPointerElementType())->getElementType());
David Neto22f144c2017-06-12 14:26:21 -04003224
David Neto257c3892018-04-11 13:19:45 -04003225 // X Dimension
3226 Ops << MkId(result_type_id) << MkNum(1);
3227 XDimCstID = nextID++;
alan-bakera1be3322020-04-20 12:48:18 -04003228 getSPIRVInstList(kConstants)
3229 .push_back(new SPIRVInstruction(spv::OpSpecConstant, XDimCstID, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003230
3231 // Y Dimension
3232 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003233 Ops << MkId(result_type_id) << MkNum(1);
3234 YDimCstID = nextID++;
alan-bakera1be3322020-04-20 12:48:18 -04003235 getSPIRVInstList(kConstants)
3236 .push_back(new SPIRVInstruction(spv::OpSpecConstant, YDimCstID, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003237
3238 // Z Dimension
3239 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003240 Ops << MkId(result_type_id) << MkNum(1);
3241 ZDimCstID = nextID++;
alan-bakera1be3322020-04-20 12:48:18 -04003242 getSPIRVInstList(kConstants)
3243 .push_back(new SPIRVInstruction(spv::OpSpecConstant, ZDimCstID, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003244
David Neto257c3892018-04-11 13:19:45 -04003245 BuiltinDimVec.push_back(XDimCstID);
3246 BuiltinDimVec.push_back(YDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04003247 BuiltinDimVec.push_back(ZDimCstID);
3248
David Neto22f144c2017-06-12 14:26:21 -04003249 //
3250 // Generate OpSpecConstantComposite.
3251 //
3252 // Ops[0] : Result Type ID
3253 // Ops[1] : Constant size for x dimension.
3254 // Ops[2] : Constant size for y dimension.
3255 // Ops[3] : Constant size for z dimension.
3256 InitializerID = nextID;
3257
3258 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003259 Ops << MkId(lookupType(Ty->getPointerElementType())) << MkId(XDimCstID)
3260 << MkId(YDimCstID) << MkId(ZDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04003261
David Neto87846742018-04-11 17:36:22 -04003262 auto *Inst =
3263 new SPIRVInstruction(spv::OpSpecConstantComposite, nextID++, Ops);
alan-bakera1be3322020-04-20 12:48:18 -04003264 getSPIRVInstList(kConstants).push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04003265 }
alan-bakerbed3a882020-04-21 14:42:41 -04003266 } else if (BuiltinType == spv::BuiltInWorkDim) {
3267 // 1. Generate a specialization constant with a default of 3.
3268 // 2. Allocate and annotate a SpecId for the constant.
3269 // 3. Use the spec constant as the initializer for the variable.
3270 SPIRVOperandList Ops;
3271
3272 //
3273 // Generate OpSpecConstant.
3274 //
3275 // Ops[0] : Result Type ID
3276 // Ops[1] : Default literal value
3277 InitializerID = nextID++;
3278
3279 Ops << MkId(lookupType(IntegerType::get(GV.getContext(), 32))) << MkNum(3);
3280
3281 auto *Inst = new SPIRVInstruction(spv::OpSpecConstant, InitializerID, Ops);
3282 getSPIRVInstList(kConstants).push_back(Inst);
3283
3284 //
3285 // Generate SpecId decoration.
3286 //
3287 // Ops[0] : target
3288 // Ops[1] : decoration
3289 // Ops[2] : SpecId
3290 auto spec_id = AllocateSpecConstant(&M, SpecConstant::kWorkDim);
3291 Ops.clear();
3292 Ops << MkId(InitializerID) << MkNum(spv::DecorationSpecId)
3293 << MkNum(spec_id);
3294
3295 Inst = new SPIRVInstruction(spv::OpDecorate, Ops);
3296 getSPIRVInstList(kAnnotations).push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04003297 }
3298
David Neto22f144c2017-06-12 14:26:21 -04003299 VMap[&GV] = nextID;
3300
3301 //
3302 // Generate OpVariable.
3303 //
3304 // GIDOps[0] : Result Type ID
3305 // GIDOps[1] : Storage Class
3306 SPIRVOperandList Ops;
3307
David Neto85082642018-03-24 06:55:20 -07003308 const auto AS = PTy->getAddressSpace();
David Netoc6f3ab22018-04-06 18:02:31 -04003309 Ops << MkId(lookupType(Ty)) << MkNum(GetStorageClass(AS));
David Neto22f144c2017-06-12 14:26:21 -04003310
David Neto85082642018-03-24 06:55:20 -07003311 const bool module_scope_constant_external_init =
David Neto862b7d82018-06-14 18:48:37 -04003312 (AS == AddressSpace::Constant) && GV.hasInitializer() &&
David Neto85082642018-03-24 06:55:20 -07003313 clspv::Option::ModuleConstantsInStorageBuffer();
3314
Kévin Petit23d5f182019-08-13 16:21:29 +01003315 if (GV.hasInitializer()) {
3316 auto GVInit = GV.getInitializer();
3317 if (!isa<UndefValue>(GVInit) && !module_scope_constant_external_init) {
3318 assert(VMap.count(GVInit) == 1);
3319 InitializerID = VMap[GVInit];
David Neto85082642018-03-24 06:55:20 -07003320 }
3321 }
Kévin Petit23d5f182019-08-13 16:21:29 +01003322
3323 if (0 != InitializerID) {
Kévin Petitbbbda972020-03-03 19:16:31 +00003324 // Emit the ID of the initializer as part of the variable definition.
Kévin Petit23d5f182019-08-13 16:21:29 +01003325 Ops << MkId(InitializerID);
3326 }
David Neto85082642018-03-24 06:55:20 -07003327 const uint32_t var_id = nextID++;
3328
David Neto87846742018-04-11 17:36:22 -04003329 auto *Inst = new SPIRVInstruction(spv::OpVariable, var_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003330 SPIRVInstList.push_back(Inst);
3331
SJW69939d52020-04-16 07:29:07 -05003332 SPIRVInstructionList &Annotations = getSPIRVInstList(kAnnotations);
alan-bakerbed3a882020-04-21 14:42:41 -04003333 // If we have a builtin (not WorkDim).
3334 if (spv::BuiltInMax != BuiltinType && BuiltinType != spv::BuiltInWorkDim) {
David Neto22f144c2017-06-12 14:26:21 -04003335 //
3336 // Generate OpDecorate.
3337 //
3338 // DOps[0] = Target ID
3339 // DOps[1] = Decoration (Builtin)
3340 // DOps[2] = BuiltIn ID
3341 uint32_t ResultID;
3342
3343 // WorkgroupSize is different, we decorate the constant composite that has
3344 // its value, rather than the variable that we use to access the value.
3345 if (spv::BuiltInWorkgroupSize == BuiltinType) {
3346 ResultID = InitializerID;
David Netoa60b00b2017-09-15 16:34:09 -04003347 // Save both the value and variable IDs for later.
3348 WorkgroupSizeValueID = InitializerID;
3349 WorkgroupSizeVarID = VMap[&GV];
David Neto22f144c2017-06-12 14:26:21 -04003350 } else {
3351 ResultID = VMap[&GV];
3352 }
3353
3354 SPIRVOperandList DOps;
David Neto257c3892018-04-11 13:19:45 -04003355 DOps << MkId(ResultID) << MkNum(spv::DecorationBuiltIn)
3356 << MkNum(BuiltinType);
David Neto22f144c2017-06-12 14:26:21 -04003357
David Neto87846742018-04-11 17:36:22 -04003358 auto *DescDecoInst = new SPIRVInstruction(spv::OpDecorate, DOps);
SJW69939d52020-04-16 07:29:07 -05003359 Annotations.push_back(DescDecoInst);
David Neto85082642018-03-24 06:55:20 -07003360 } else if (module_scope_constant_external_init) {
3361 // This module scope constant is initialized from a storage buffer with data
3362 // provided by the host at binding 0 of the next descriptor set.
David Neto78383442018-06-15 20:31:56 -04003363 const uint32_t descriptor_set = TakeDescriptorIndex(&M);
David Neto85082642018-03-24 06:55:20 -07003364
David Neto862b7d82018-06-14 18:48:37 -04003365 // Emit the intializer to the descriptor map file.
David Neto85082642018-03-24 06:55:20 -07003366 // Use "kind,buffer" to indicate storage buffer. We might want to expand
3367 // that later to other types, like uniform buffer.
alan-bakerf5e5f692018-11-27 08:33:24 -05003368 std::string hexbytes;
3369 llvm::raw_string_ostream str(hexbytes);
3370 clspv::ConstantEmitter(DL, str).Emit(GV.getInitializer());
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003371 version0::DescriptorMapEntry::ConstantData constant_data = {ArgKind::Buffer,
3372 str.str()};
3373 descriptorMapEntries->emplace_back(std::move(constant_data), descriptor_set,
3374 0);
David Neto85082642018-03-24 06:55:20 -07003375
David Neto85082642018-03-24 06:55:20 -07003376 SPIRVOperandList DOps;
David Neto85082642018-03-24 06:55:20 -07003377
3378 // OpDecorate %var DescriptorSet <descriptor_set>
David Neto257c3892018-04-11 13:19:45 -04003379 DOps << MkId(var_id) << MkNum(spv::DecorationDescriptorSet)
3380 << MkNum(descriptor_set);
SJW69939d52020-04-16 07:29:07 -05003381 Annotations.push_back(new SPIRVInstruction(spv::OpDecorate, DOps));
3382
3383 // OpDecorate %var Binding <binding>
3384 DOps.clear();
3385 DOps << MkId(var_id) << MkNum(spv::DecorationBinding) << MkNum(0);
3386 Annotations.push_back(new SPIRVInstruction(spv::OpDecorate, DOps));
David Neto22f144c2017-06-12 14:26:21 -04003387 }
3388}
3389
alan-bakera1be3322020-04-20 12:48:18 -04003390void SPIRVProducerPass::GenerateWorkgroupVars(Module &M) {
SJW69939d52020-04-16 07:29:07 -05003391 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList(kGlobalVariables);
alan-bakera1be3322020-04-20 12:48:18 -04003392 auto spec_constant_md = M.getNamedMetadata(clspv::SpecConstantMetadataName());
3393 if (!spec_constant_md)
3394 return;
3395
3396 for (auto pair : clspv::GetSpecConstants(&M)) {
3397 auto kind = pair.first;
3398 auto spec_id = pair.second;
3399
3400 if (kind != SpecConstant::kLocalMemorySize)
3401 continue;
3402
alan-bakerb6b09dc2018-11-08 16:59:28 -05003403 LocalArgInfo &info = LocalSpecIdInfoMap[spec_id];
David Netoc6f3ab22018-04-06 18:02:31 -04003404
3405 // Generate OpVariable.
3406 //
3407 // GIDOps[0] : Result Type ID
3408 // GIDOps[1] : Storage Class
3409 SPIRVOperandList Ops;
3410 Ops << MkId(info.ptr_array_type_id) << MkNum(spv::StorageClassWorkgroup);
3411
3412 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04003413 new SPIRVInstruction(spv::OpVariable, info.variable_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04003414 }
3415}
3416
David Neto862b7d82018-06-14 18:48:37 -04003417void SPIRVProducerPass::GenerateDescriptorMapInfo(const DataLayout &DL,
3418 Function &F) {
David Netoc5fb5242018-07-30 13:28:31 -04003419 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
3420 return;
3421 }
Kévin Petit717f8572020-04-06 17:31:53 +01003422 // Add entries for each kernel
3423 version0::DescriptorMapEntry::KernelDeclData kernel_decl_data = {
3424 F.getName().str()};
3425 descriptorMapEntries->emplace_back(std::move(kernel_decl_data));
3426
David Neto862b7d82018-06-14 18:48:37 -04003427 // Gather the list of resources that are used by this function's arguments.
3428 auto &resource_var_at_index = FunctionToResourceVarsMap[&F];
3429
alan-bakerf5e5f692018-11-27 08:33:24 -05003430 // TODO(alan-baker): This should become unnecessary by fixing the rest of the
3431 // flow to generate pod_ubo arguments earlier.
David Neto862b7d82018-06-14 18:48:37 -04003432 auto remap_arg_kind = [](StringRef argKind) {
alan-bakerf5e5f692018-11-27 08:33:24 -05003433 std::string kind =
3434 clspv::Option::PodArgsInUniformBuffer() && argKind.equals("pod")
3435 ? "pod_ubo"
alan-baker21574d32020-01-29 16:00:31 -05003436 : argKind.str();
alan-bakerf5e5f692018-11-27 08:33:24 -05003437 return GetArgKindFromName(kind);
David Neto862b7d82018-06-14 18:48:37 -04003438 };
3439
3440 auto *fty = F.getType()->getPointerElementType();
3441 auto *func_ty = dyn_cast<FunctionType>(fty);
3442
alan-baker038e9242019-04-19 22:14:41 -04003443 // If we've clustered POD arguments, then argument details are in metadata.
David Neto862b7d82018-06-14 18:48:37 -04003444 // If an argument maps to a resource variable, then get descriptor set and
3445 // binding from the resoure variable. Other info comes from the metadata.
3446 const auto *arg_map = F.getMetadata("kernel_arg_map");
3447 if (arg_map) {
3448 for (const auto &arg : arg_map->operands()) {
3449 const MDNode *arg_node = dyn_cast<MDNode>(arg.get());
Kévin PETITa353c832018-03-20 23:21:21 +00003450 assert(arg_node->getNumOperands() == 7);
David Neto862b7d82018-06-14 18:48:37 -04003451 const auto name =
3452 dyn_cast<MDString>(arg_node->getOperand(0))->getString();
3453 const auto old_index =
3454 dyn_extract<ConstantInt>(arg_node->getOperand(1))->getZExtValue();
3455 // Remapped argument index
alan-bakerb6b09dc2018-11-08 16:59:28 -05003456 const size_t new_index = static_cast<size_t>(
3457 dyn_extract<ConstantInt>(arg_node->getOperand(2))->getZExtValue());
David Neto862b7d82018-06-14 18:48:37 -04003458 const auto offset =
3459 dyn_extract<ConstantInt>(arg_node->getOperand(3))->getZExtValue();
Kévin PETITa353c832018-03-20 23:21:21 +00003460 const auto arg_size =
3461 dyn_extract<ConstantInt>(arg_node->getOperand(4))->getZExtValue();
David Neto862b7d82018-06-14 18:48:37 -04003462 const auto argKind = remap_arg_kind(
Kévin PETITa353c832018-03-20 23:21:21 +00003463 dyn_cast<MDString>(arg_node->getOperand(5))->getString());
David Neto862b7d82018-06-14 18:48:37 -04003464 const auto spec_id =
Kévin PETITa353c832018-03-20 23:21:21 +00003465 dyn_extract<ConstantInt>(arg_node->getOperand(6))->getSExtValue();
alan-bakerf5e5f692018-11-27 08:33:24 -05003466
3467 uint32_t descriptor_set = 0;
3468 uint32_t binding = 0;
3469 version0::DescriptorMapEntry::KernelArgData kernel_data = {
alan-baker21574d32020-01-29 16:00:31 -05003470 F.getName().str(), name.str(), static_cast<uint32_t>(old_index),
3471 argKind, static_cast<uint32_t>(spec_id),
alan-bakerf5e5f692018-11-27 08:33:24 -05003472 // This will be set below for pointer-to-local args.
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003473 0, static_cast<uint32_t>(offset), static_cast<uint32_t>(arg_size)};
David Neto862b7d82018-06-14 18:48:37 -04003474 if (spec_id > 0) {
alan-bakerf5e5f692018-11-27 08:33:24 -05003475 kernel_data.local_element_size = static_cast<uint32_t>(GetTypeAllocSize(
3476 func_ty->getParamType(unsigned(new_index))->getPointerElementType(),
3477 DL));
David Neto862b7d82018-06-14 18:48:37 -04003478 } else {
3479 auto *info = resource_var_at_index[new_index];
3480 assert(info);
alan-bakerf5e5f692018-11-27 08:33:24 -05003481 descriptor_set = info->descriptor_set;
3482 binding = info->binding;
David Neto862b7d82018-06-14 18:48:37 -04003483 }
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003484 descriptorMapEntries->emplace_back(std::move(kernel_data), descriptor_set,
3485 binding);
David Neto862b7d82018-06-14 18:48:37 -04003486 }
3487 } else {
3488 // There is no argument map.
3489 // Take descriptor info from the resource variable calls.
Kévin PETITa353c832018-03-20 23:21:21 +00003490 // Take argument name and size from the arguments list.
David Neto862b7d82018-06-14 18:48:37 -04003491
3492 SmallVector<Argument *, 4> arguments;
3493 for (auto &arg : F.args()) {
3494 arguments.push_back(&arg);
3495 }
3496
3497 unsigned arg_index = 0;
3498 for (auto *info : resource_var_at_index) {
3499 if (info) {
Kévin PETITa353c832018-03-20 23:21:21 +00003500 auto arg = arguments[arg_index];
alan-bakerb6b09dc2018-11-08 16:59:28 -05003501 unsigned arg_size = 0;
alan-baker9b0ec3c2020-04-06 14:45:34 -04003502 if (info->arg_kind == clspv::ArgKind::Pod ||
3503 info->arg_kind == clspv::ArgKind::PodUBO ||
3504 info->arg_kind == clspv::ArgKind::PodPushConstant) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05003505 arg_size = static_cast<uint32_t>(DL.getTypeStoreSize(arg->getType()));
Kévin PETITa353c832018-03-20 23:21:21 +00003506 }
3507
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003508 // Local pointer arguments are unused in this case. Offset is always
3509 // zero.
alan-bakerf5e5f692018-11-27 08:33:24 -05003510 version0::DescriptorMapEntry::KernelArgData kernel_data = {
alan-baker21574d32020-01-29 16:00:31 -05003511 F.getName().str(),
3512 arg->getName().str(),
3513 arg_index,
3514 remap_arg_kind(clspv::GetArgKindName(info->arg_kind)),
3515 0,
3516 0,
3517 0,
3518 arg_size};
alan-bakerf5e5f692018-11-27 08:33:24 -05003519 descriptorMapEntries->emplace_back(std::move(kernel_data),
3520 info->descriptor_set, info->binding);
David Neto862b7d82018-06-14 18:48:37 -04003521 }
3522 arg_index++;
3523 }
3524 // Generate mappings for pointer-to-local arguments.
3525 for (arg_index = 0; arg_index < arguments.size(); ++arg_index) {
3526 Argument *arg = arguments[arg_index];
Alan Baker202c8c72018-08-13 13:47:44 -04003527 auto where = LocalArgSpecIds.find(arg);
3528 if (where != LocalArgSpecIds.end()) {
3529 auto &local_arg_info = LocalSpecIdInfoMap[where->second];
alan-bakerf5e5f692018-11-27 08:33:24 -05003530 // Pod arguments members are unused in this case.
3531 version0::DescriptorMapEntry::KernelArgData kernel_data = {
alan-baker21574d32020-01-29 16:00:31 -05003532 F.getName().str(),
3533 arg->getName().str(),
alan-bakerf5e5f692018-11-27 08:33:24 -05003534 arg_index,
3535 ArgKind::Local,
3536 static_cast<uint32_t>(local_arg_info.spec_id),
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003537 static_cast<uint32_t>(
3538 GetTypeAllocSize(local_arg_info.elem_type, DL)),
alan-bakerf5e5f692018-11-27 08:33:24 -05003539 0,
3540 0};
3541 // Pointer-to-local arguments do not utilize descriptor set and binding.
3542 descriptorMapEntries->emplace_back(std::move(kernel_data), 0, 0);
David Neto862b7d82018-06-14 18:48:37 -04003543 }
3544 }
3545 }
3546}
3547
David Neto22f144c2017-06-12 14:26:21 -04003548void SPIRVProducerPass::GenerateFuncPrologue(Function &F) {
SJW69939d52020-04-16 07:29:07 -05003549 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList(kFunctions);
David Neto22f144c2017-06-12 14:26:21 -04003550 ValueMapType &VMap = getValueMap();
3551 EntryPointVecType &EntryPoints = getEntryPointVec();
David Neto22f144c2017-06-12 14:26:21 -04003552 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
3553 auto &GlobalConstArgSet = getGlobalConstArgSet();
3554
3555 FunctionType *FTy = F.getFunctionType();
3556
3557 //
David Neto22f144c2017-06-12 14:26:21 -04003558 // Generate OPFunction.
3559 //
3560
3561 // FOps[0] : Result Type ID
3562 // FOps[1] : Function Control
3563 // FOps[2] : Function Type ID
3564 SPIRVOperandList FOps;
3565
3566 // Find SPIRV instruction for return type.
David Neto257c3892018-04-11 13:19:45 -04003567 FOps << MkId(lookupType(FTy->getReturnType()));
David Neto22f144c2017-06-12 14:26:21 -04003568
3569 // Check function attributes for SPIRV Function Control.
3570 uint32_t FuncControl = spv::FunctionControlMaskNone;
3571 if (F.hasFnAttribute(Attribute::AlwaysInline)) {
3572 FuncControl |= spv::FunctionControlInlineMask;
3573 }
3574 if (F.hasFnAttribute(Attribute::NoInline)) {
3575 FuncControl |= spv::FunctionControlDontInlineMask;
3576 }
3577 // TODO: Check llvm attribute for Function Control Pure.
3578 if (F.hasFnAttribute(Attribute::ReadOnly)) {
3579 FuncControl |= spv::FunctionControlPureMask;
3580 }
3581 // TODO: Check llvm attribute for Function Control Const.
3582 if (F.hasFnAttribute(Attribute::ReadNone)) {
3583 FuncControl |= spv::FunctionControlConstMask;
3584 }
3585
David Neto257c3892018-04-11 13:19:45 -04003586 FOps << MkNum(FuncControl);
David Neto22f144c2017-06-12 14:26:21 -04003587
3588 uint32_t FTyID;
3589 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
3590 SmallVector<Type *, 4> NewFuncParamTys;
3591 FunctionType *NewFTy =
3592 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
3593 FTyID = lookupType(NewFTy);
3594 } else {
David Neto9ed8e2f2018-03-24 06:47:24 -07003595 // Handle regular function with global constant parameters.
David Neto22f144c2017-06-12 14:26:21 -04003596 if (GlobalConstFuncTyMap.count(FTy)) {
3597 FTyID = lookupType(GlobalConstFuncTyMap[FTy].first);
3598 } else {
3599 FTyID = lookupType(FTy);
3600 }
3601 }
3602
David Neto257c3892018-04-11 13:19:45 -04003603 FOps << MkId(FTyID);
David Neto22f144c2017-06-12 14:26:21 -04003604
3605 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
3606 EntryPoints.push_back(std::make_pair(&F, nextID));
3607 }
3608
3609 VMap[&F] = nextID;
3610
David Neto482550a2018-03-24 05:21:07 -07003611 if (clspv::Option::ShowIDs()) {
David Netob05675d2018-02-16 12:37:49 -05003612 errs() << "Function " << F.getName() << " is " << nextID << "\n";
3613 }
David Neto22f144c2017-06-12 14:26:21 -04003614 // Generate SPIRV instruction for function.
David Neto87846742018-04-11 17:36:22 -04003615 auto *FuncInst = new SPIRVInstruction(spv::OpFunction, nextID++, FOps);
David Neto22f144c2017-06-12 14:26:21 -04003616 SPIRVInstList.push_back(FuncInst);
3617
3618 //
3619 // Generate OpFunctionParameter for Normal function.
3620 //
3621
3622 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
alan-bakere9308012019-03-15 10:25:13 -04003623
David Neto22f144c2017-06-12 14:26:21 -04003624 // Iterate Argument for name instead of param type from function type.
3625 unsigned ArgIdx = 0;
3626 for (Argument &Arg : F.args()) {
alan-bakere9308012019-03-15 10:25:13 -04003627 uint32_t param_id = nextID++;
3628 VMap[&Arg] = param_id;
3629
3630 if (CalledWithCoherentResource(Arg)) {
3631 // If the arg is passed a coherent resource ever, then decorate this
3632 // parameter with Coherent too.
3633 SPIRVOperandList decoration_ops;
3634 decoration_ops << MkId(param_id) << MkNum(spv::DecorationCoherent);
SJW69939d52020-04-16 07:29:07 -05003635 getSPIRVInstList(kAnnotations)
3636 .push_back(new SPIRVInstruction(spv::OpDecorate, decoration_ops));
alan-bakere9308012019-03-15 10:25:13 -04003637 }
David Neto22f144c2017-06-12 14:26:21 -04003638
3639 // ParamOps[0] : Result Type ID
3640 SPIRVOperandList ParamOps;
3641
3642 // Find SPIRV instruction for parameter type.
3643 uint32_t ParamTyID = lookupType(Arg.getType());
3644 if (PointerType *PTy = dyn_cast<PointerType>(Arg.getType())) {
3645 if (GlobalConstFuncTyMap.count(FTy)) {
3646 if (ArgIdx == GlobalConstFuncTyMap[FTy].second) {
3647 Type *EleTy = PTy->getPointerElementType();
3648 Type *ArgTy =
3649 PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
3650 ParamTyID = lookupType(ArgTy);
3651 GlobalConstArgSet.insert(&Arg);
3652 }
3653 }
3654 }
David Neto257c3892018-04-11 13:19:45 -04003655 ParamOps << MkId(ParamTyID);
David Neto22f144c2017-06-12 14:26:21 -04003656
3657 // Generate SPIRV instruction for parameter.
David Neto87846742018-04-11 17:36:22 -04003658 auto *ParamInst =
alan-bakere9308012019-03-15 10:25:13 -04003659 new SPIRVInstruction(spv::OpFunctionParameter, param_id, ParamOps);
David Neto22f144c2017-06-12 14:26:21 -04003660 SPIRVInstList.push_back(ParamInst);
3661
3662 ArgIdx++;
3663 }
3664 }
3665}
3666
alan-bakerb6b09dc2018-11-08 16:59:28 -05003667void SPIRVProducerPass::GenerateModuleInfo(Module &module) {
David Neto22f144c2017-06-12 14:26:21 -04003668 EntryPointVecType &EntryPoints = getEntryPointVec();
3669 ValueMapType &VMap = getValueMap();
3670 ValueList &EntryPointInterfaces = getEntryPointInterfacesVec();
David Neto22f144c2017-06-12 14:26:21 -04003671 std::vector<uint32_t> &BuiltinDimVec = getBuiltinDimVec();
3672
SJW69939d52020-04-16 07:29:07 -05003673 SPIRVInstructionList &SPIRVCapabilities = getSPIRVInstList(kCapabilities);
David Neto22f144c2017-06-12 14:26:21 -04003674 //
3675 // Generate OpCapability
3676 //
3677 // TODO: Which llvm information is mapped to SPIRV Capapbility?
3678
3679 // Ops[0] = Capability
3680 SPIRVOperandList Ops;
3681
David Neto87846742018-04-11 17:36:22 -04003682 auto *CapInst =
David Netoef5ba2b2019-12-20 08:35:54 -05003683 new SPIRVInstruction(spv::OpCapability, MkNum(spv::CapabilityShader));
SJW69939d52020-04-16 07:29:07 -05003684 SPIRVCapabilities.push_back(CapInst);
David Neto22f144c2017-06-12 14:26:21 -04003685
alan-bakerf906d2b2019-12-10 11:26:23 -05003686 bool write_without_format = false;
3687 bool sampled_1d = false;
3688 bool image_1d = false;
David Neto22f144c2017-06-12 14:26:21 -04003689 for (Type *Ty : getTypeList()) {
alan-bakerb39c8262019-03-08 14:03:37 -05003690 if (clspv::Option::Int8Support() && Ty->isIntegerTy(8)) {
3691 // Generate OpCapability for i8 type.
SJW69939d52020-04-16 07:29:07 -05003692 SPIRVCapabilities.push_back(
David Netoef5ba2b2019-12-20 08:35:54 -05003693 new SPIRVInstruction(spv::OpCapability, MkNum(spv::CapabilityInt8)));
alan-bakerb39c8262019-03-08 14:03:37 -05003694 } else if (Ty->isIntegerTy(16)) {
David Neto22f144c2017-06-12 14:26:21 -04003695 // Generate OpCapability for i16 type.
SJW69939d52020-04-16 07:29:07 -05003696 SPIRVCapabilities.push_back(
David Netoef5ba2b2019-12-20 08:35:54 -05003697 new SPIRVInstruction(spv::OpCapability, MkNum(spv::CapabilityInt16)));
David Neto22f144c2017-06-12 14:26:21 -04003698 } else if (Ty->isIntegerTy(64)) {
3699 // Generate OpCapability for i64 type.
SJW69939d52020-04-16 07:29:07 -05003700 SPIRVCapabilities.push_back(
David Netoef5ba2b2019-12-20 08:35:54 -05003701 new SPIRVInstruction(spv::OpCapability, MkNum(spv::CapabilityInt64)));
David Neto22f144c2017-06-12 14:26:21 -04003702 } else if (Ty->isHalfTy()) {
3703 // Generate OpCapability for half type.
SJW69939d52020-04-16 07:29:07 -05003704 SPIRVCapabilities.push_back(new SPIRVInstruction(
3705 spv::OpCapability, MkNum(spv::CapabilityFloat16)));
David Neto22f144c2017-06-12 14:26:21 -04003706 } else if (Ty->isDoubleTy()) {
3707 // Generate OpCapability for double type.
SJW69939d52020-04-16 07:29:07 -05003708 SPIRVCapabilities.push_back(new SPIRVInstruction(
3709 spv::OpCapability, MkNum(spv::CapabilityFloat64)));
David Neto22f144c2017-06-12 14:26:21 -04003710 } else if (auto *STy = dyn_cast<StructType>(Ty)) {
3711 if (STy->isOpaque()) {
alan-bakerf906d2b2019-12-10 11:26:23 -05003712 if (STy->getName().startswith("opencl.image1d_wo_t") ||
alan-baker7150a1d2020-02-25 08:31:06 -05003713 STy->getName().startswith("opencl.image1d_array_wo_t") ||
alan-bakerf906d2b2019-12-10 11:26:23 -05003714 STy->getName().startswith("opencl.image2d_wo_t") ||
alan-baker7150a1d2020-02-25 08:31:06 -05003715 STy->getName().startswith("opencl.image2d_array_wo_t") ||
alan-bakerf67468c2019-11-25 15:51:49 -05003716 STy->getName().startswith("opencl.image3d_wo_t")) {
alan-bakerf906d2b2019-12-10 11:26:23 -05003717 write_without_format = true;
3718 }
3719 if (STy->getName().startswith("opencl.image1d_ro_t") ||
alan-baker7150a1d2020-02-25 08:31:06 -05003720 STy->getName().startswith("opencl.image1d_wo_t") ||
3721 STy->getName().startswith("opencl.image1d_array_ro_t") ||
3722 STy->getName().startswith("opencl.image1d_array_wo_t")) {
alan-bakerf906d2b2019-12-10 11:26:23 -05003723 if (STy->getName().contains(".sampled"))
3724 sampled_1d = true;
3725 else
3726 image_1d = true;
David Neto22f144c2017-06-12 14:26:21 -04003727 }
3728 }
3729 }
3730 }
3731
alan-bakerf906d2b2019-12-10 11:26:23 -05003732 if (write_without_format) {
3733 // Generate OpCapability for write only image type.
SJW69939d52020-04-16 07:29:07 -05003734 SPIRVCapabilities.push_back(new SPIRVInstruction(
3735 spv::OpCapability,
3736 {MkNum(spv::CapabilityStorageImageWriteWithoutFormat)}));
alan-bakerf906d2b2019-12-10 11:26:23 -05003737 }
3738 if (image_1d) {
3739 // Generate OpCapability for unsampled 1D image type.
SJW69939d52020-04-16 07:29:07 -05003740 SPIRVCapabilities.push_back(new SPIRVInstruction(
3741 spv::OpCapability, {MkNum(spv::CapabilityImage1D)}));
alan-bakerf906d2b2019-12-10 11:26:23 -05003742 } else if (sampled_1d) {
3743 // Generate OpCapability for sampled 1D image type.
SJW69939d52020-04-16 07:29:07 -05003744 SPIRVCapabilities.push_back(new SPIRVInstruction(
3745 spv::OpCapability, {MkNum(spv::CapabilitySampled1D)}));
alan-bakerf906d2b2019-12-10 11:26:23 -05003746 }
3747
David Neto5c22a252018-03-15 16:07:41 -04003748 { // OpCapability ImageQuery
3749 bool hasImageQuery = false;
alan-bakerf67468c2019-11-25 15:51:49 -05003750 for (const auto &SymVal : module.getValueSymbolTable()) {
3751 if (auto F = dyn_cast<Function>(SymVal.getValue())) {
SJW173c7e92020-03-16 08:44:47 -05003752 if (IsImageQuery(F)) {
alan-bakerf67468c2019-11-25 15:51:49 -05003753 hasImageQuery = true;
3754 break;
3755 }
David Neto5c22a252018-03-15 16:07:41 -04003756 }
3757 }
alan-bakerf67468c2019-11-25 15:51:49 -05003758
David Neto5c22a252018-03-15 16:07:41 -04003759 if (hasImageQuery) {
SJW69939d52020-04-16 07:29:07 -05003760 SPIRVCapabilities.push_back(new SPIRVInstruction(
3761 spv::OpCapability, {MkNum(spv::CapabilityImageQuery)}));
David Neto5c22a252018-03-15 16:07:41 -04003762 }
3763 }
3764
David Neto22f144c2017-06-12 14:26:21 -04003765 if (hasVariablePointers()) {
3766 //
David Neto22f144c2017-06-12 14:26:21 -04003767 // Generate OpCapability.
3768 //
3769 // Ops[0] = Capability
3770 //
3771 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003772 Ops << MkNum(spv::CapabilityVariablePointers);
David Neto22f144c2017-06-12 14:26:21 -04003773
SJW69939d52020-04-16 07:29:07 -05003774 SPIRVCapabilities.push_back(new SPIRVInstruction(spv::OpCapability, Ops));
alan-baker5b86ed72019-02-15 08:26:50 -05003775 } else if (hasVariablePointersStorageBuffer()) {
3776 //
3777 // Generate OpCapability.
3778 //
3779 // Ops[0] = Capability
3780 //
3781 Ops.clear();
3782 Ops << MkNum(spv::CapabilityVariablePointersStorageBuffer);
David Neto22f144c2017-06-12 14:26:21 -04003783
SJW69939d52020-04-16 07:29:07 -05003784 SPIRVCapabilities.push_back(new SPIRVInstruction(spv::OpCapability, Ops));
alan-baker5b86ed72019-02-15 08:26:50 -05003785 }
3786
SJW69939d52020-04-16 07:29:07 -05003787 SPIRVInstructionList &SPIRVExtensions = getSPIRVInstList(kExtensions);
alan-baker5b86ed72019-02-15 08:26:50 -05003788 // Always add the storage buffer extension
3789 {
David Neto22f144c2017-06-12 14:26:21 -04003790 //
3791 // Generate OpExtension.
3792 //
3793 // Ops[0] = Name (Literal String)
3794 //
alan-baker5b86ed72019-02-15 08:26:50 -05003795 auto *ExtensionInst = new SPIRVInstruction(
3796 spv::OpExtension, {MkString("SPV_KHR_storage_buffer_storage_class")});
SJW69939d52020-04-16 07:29:07 -05003797 SPIRVExtensions.push_back(ExtensionInst);
alan-baker5b86ed72019-02-15 08:26:50 -05003798 }
David Neto22f144c2017-06-12 14:26:21 -04003799
alan-baker5b86ed72019-02-15 08:26:50 -05003800 if (hasVariablePointers() || hasVariablePointersStorageBuffer()) {
3801 //
3802 // Generate OpExtension.
3803 //
3804 // Ops[0] = Name (Literal String)
3805 //
3806 auto *ExtensionInst = new SPIRVInstruction(
3807 spv::OpExtension, {MkString("SPV_KHR_variable_pointers")});
SJW69939d52020-04-16 07:29:07 -05003808 SPIRVExtensions.push_back(ExtensionInst);
David Neto22f144c2017-06-12 14:26:21 -04003809 }
3810
3811 //
3812 // Generate OpMemoryModel
3813 //
3814 // Memory model for Vulkan will always be GLSL450.
3815
3816 // Ops[0] = Addressing Model
3817 // Ops[1] = Memory Model
3818 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003819 Ops << MkNum(spv::AddressingModelLogical) << MkNum(spv::MemoryModelGLSL450);
David Neto22f144c2017-06-12 14:26:21 -04003820
David Neto87846742018-04-11 17:36:22 -04003821 auto *MemModelInst = new SPIRVInstruction(spv::OpMemoryModel, Ops);
SJW69939d52020-04-16 07:29:07 -05003822 getSPIRVInstList(kMemoryModel).push_back(MemModelInst);
David Neto22f144c2017-06-12 14:26:21 -04003823
SJW69939d52020-04-16 07:29:07 -05003824 SPIRVInstructionList &SPIRVEntryPoints = getSPIRVInstList(kEntryPoints);
David Neto22f144c2017-06-12 14:26:21 -04003825 //
3826 // Generate OpEntryPoint
3827 //
3828 for (auto EntryPoint : EntryPoints) {
3829 // Ops[0] = Execution Model
3830 // Ops[1] = EntryPoint ID
3831 // Ops[2] = Name (Literal String)
3832 // ...
3833 //
3834 // TODO: Do we need to consider Interface ID for forward references???
3835 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05003836 const StringRef &name = EntryPoint.first->getName();
David Neto257c3892018-04-11 13:19:45 -04003837 Ops << MkNum(spv::ExecutionModelGLCompute) << MkId(EntryPoint.second)
3838 << MkString(name);
David Neto22f144c2017-06-12 14:26:21 -04003839
David Neto22f144c2017-06-12 14:26:21 -04003840 for (Value *Interface : EntryPointInterfaces) {
David Neto257c3892018-04-11 13:19:45 -04003841 Ops << MkId(VMap[Interface]);
David Neto22f144c2017-06-12 14:26:21 -04003842 }
3843
David Neto87846742018-04-11 17:36:22 -04003844 auto *EntryPointInst = new SPIRVInstruction(spv::OpEntryPoint, Ops);
SJW69939d52020-04-16 07:29:07 -05003845 SPIRVEntryPoints.push_back(EntryPointInst);
David Neto22f144c2017-06-12 14:26:21 -04003846 }
3847
SJW69939d52020-04-16 07:29:07 -05003848 SPIRVInstructionList &SPIRVExecutionModes = getSPIRVInstList(kExecutionModes);
David Neto22f144c2017-06-12 14:26:21 -04003849 for (auto EntryPoint : EntryPoints) {
3850 if (const MDNode *MD = dyn_cast<Function>(EntryPoint.first)
3851 ->getMetadata("reqd_work_group_size")) {
3852
3853 if (!BuiltinDimVec.empty()) {
3854 llvm_unreachable(
3855 "Kernels should have consistent work group size definition");
3856 }
3857
3858 //
3859 // Generate OpExecutionMode
3860 //
3861
3862 // Ops[0] = Entry Point ID
3863 // Ops[1] = Execution Mode
3864 // Ops[2] ... Ops[n] = Optional literals according to Execution Mode
3865 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05003866 Ops << MkId(EntryPoint.second) << MkNum(spv::ExecutionModeLocalSize);
David Neto22f144c2017-06-12 14:26:21 -04003867
3868 uint32_t XDim = static_cast<uint32_t>(
3869 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
3870 uint32_t YDim = static_cast<uint32_t>(
3871 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
3872 uint32_t ZDim = static_cast<uint32_t>(
3873 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
3874
David Neto257c3892018-04-11 13:19:45 -04003875 Ops << MkNum(XDim) << MkNum(YDim) << MkNum(ZDim);
David Neto22f144c2017-06-12 14:26:21 -04003876
David Neto87846742018-04-11 17:36:22 -04003877 auto *ExecModeInst = new SPIRVInstruction(spv::OpExecutionMode, Ops);
SJW69939d52020-04-16 07:29:07 -05003878 SPIRVExecutionModes.push_back(ExecModeInst);
David Neto22f144c2017-06-12 14:26:21 -04003879 }
3880 }
3881
3882 //
3883 // Generate OpSource.
3884 //
3885 // Ops[0] = SourceLanguage ID
3886 // Ops[1] = Version (LiteralNum)
3887 //
3888 Ops.clear();
Kévin Petitf0515712020-01-07 18:29:20 +00003889 switch (clspv::Option::Language()) {
3890 case clspv::Option::SourceLanguage::OpenCL_C_10:
3891 Ops << MkNum(spv::SourceLanguageOpenCL_C) << MkNum(100);
3892 break;
3893 case clspv::Option::SourceLanguage::OpenCL_C_11:
3894 Ops << MkNum(spv::SourceLanguageOpenCL_C) << MkNum(110);
3895 break;
3896 case clspv::Option::SourceLanguage::OpenCL_C_12:
Kévin Petit0fc88042019-04-09 23:25:02 +01003897 Ops << MkNum(spv::SourceLanguageOpenCL_C) << MkNum(120);
Kévin Petitf0515712020-01-07 18:29:20 +00003898 break;
3899 case clspv::Option::SourceLanguage::OpenCL_C_20:
3900 Ops << MkNum(spv::SourceLanguageOpenCL_C) << MkNum(200);
3901 break;
3902 case clspv::Option::SourceLanguage::OpenCL_CPP:
3903 Ops << MkNum(spv::SourceLanguageOpenCL_CPP) << MkNum(100);
3904 break;
3905 default:
3906 Ops << MkNum(spv::SourceLanguageUnknown) << MkNum(0);
3907 break;
Kévin Petit0fc88042019-04-09 23:25:02 +01003908 }
David Neto22f144c2017-06-12 14:26:21 -04003909
David Neto87846742018-04-11 17:36:22 -04003910 auto *OpenSourceInst = new SPIRVInstruction(spv::OpSource, Ops);
SJW69939d52020-04-16 07:29:07 -05003911 getSPIRVInstList(kDebug).push_back(OpenSourceInst);
David Neto22f144c2017-06-12 14:26:21 -04003912
3913 if (!BuiltinDimVec.empty()) {
SJW69939d52020-04-16 07:29:07 -05003914 SPIRVInstructionList &SPIRVAnnotations = getSPIRVInstList(kAnnotations);
David Neto22f144c2017-06-12 14:26:21 -04003915 //
3916 // Generate OpDecorates for x/y/z dimension.
3917 //
3918 // Ops[0] = Target ID
3919 // Ops[1] = Decoration (SpecId)
David Neto257c3892018-04-11 13:19:45 -04003920 // Ops[2] = Specialization Constant ID (Literal Number)
David Neto22f144c2017-06-12 14:26:21 -04003921
3922 // X Dimension
3923 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003924 Ops << MkId(BuiltinDimVec[0]) << MkNum(spv::DecorationSpecId) << MkNum(0);
SJW69939d52020-04-16 07:29:07 -05003925 SPIRVAnnotations.push_back(new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003926
3927 // Y Dimension
3928 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003929 Ops << MkId(BuiltinDimVec[1]) << MkNum(spv::DecorationSpecId) << MkNum(1);
SJW69939d52020-04-16 07:29:07 -05003930 SPIRVAnnotations.push_back(new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003931
3932 // Z Dimension
3933 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003934 Ops << MkId(BuiltinDimVec[2]) << MkNum(spv::DecorationSpecId) << MkNum(2);
SJW69939d52020-04-16 07:29:07 -05003935 SPIRVAnnotations.push_back(new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003936 }
3937}
3938
David Netob6e2e062018-04-25 10:32:06 -04003939void SPIRVProducerPass::GenerateEntryPointInitialStores() {
3940 // Work around a driver bug. Initializers on Private variables might not
3941 // work. So the start of the kernel should store the initializer value to the
3942 // variables. Yes, *every* entry point pays this cost if *any* entry point
3943 // uses this builtin. At this point I judge this to be an acceptable tradeoff
3944 // of complexity vs. runtime, for a broken driver.
alan-bakerb6b09dc2018-11-08 16:59:28 -05003945 // TODO(dneto): Remove this at some point once fixed drivers are widely
3946 // available.
David Netob6e2e062018-04-25 10:32:06 -04003947 if (WorkgroupSizeVarID) {
3948 assert(WorkgroupSizeValueID);
3949
3950 SPIRVOperandList Ops;
3951 Ops << MkId(WorkgroupSizeVarID) << MkId(WorkgroupSizeValueID);
3952
3953 auto *Inst = new SPIRVInstruction(spv::OpStore, Ops);
SJW69939d52020-04-16 07:29:07 -05003954 getSPIRVInstList(kFunctions).push_back(Inst);
David Netob6e2e062018-04-25 10:32:06 -04003955 }
3956}
3957
David Neto22f144c2017-06-12 14:26:21 -04003958void SPIRVProducerPass::GenerateFuncBody(Function &F) {
SJW69939d52020-04-16 07:29:07 -05003959 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList(kFunctions);
David Neto22f144c2017-06-12 14:26:21 -04003960 ValueMapType &VMap = getValueMap();
3961
David Netob6e2e062018-04-25 10:32:06 -04003962 const bool IsKernel = F.getCallingConv() == CallingConv::SPIR_KERNEL;
David Neto22f144c2017-06-12 14:26:21 -04003963
3964 for (BasicBlock &BB : F) {
3965 // Register BasicBlock to ValueMap.
3966 VMap[&BB] = nextID;
3967
3968 //
3969 // Generate OpLabel for Basic Block.
3970 //
3971 SPIRVOperandList Ops;
David Neto87846742018-04-11 17:36:22 -04003972 auto *Inst = new SPIRVInstruction(spv::OpLabel, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003973 SPIRVInstList.push_back(Inst);
3974
David Neto6dcd4712017-06-23 11:06:47 -04003975 // OpVariable instructions must come first.
3976 for (Instruction &I : BB) {
alan-baker5b86ed72019-02-15 08:26:50 -05003977 if (auto *alloca = dyn_cast<AllocaInst>(&I)) {
3978 // Allocating a pointer requires variable pointers.
3979 if (alloca->getAllocatedType()->isPointerTy()) {
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003980 setVariablePointersCapabilities(
3981 alloca->getAllocatedType()->getPointerAddressSpace());
alan-baker5b86ed72019-02-15 08:26:50 -05003982 }
David Neto6dcd4712017-06-23 11:06:47 -04003983 GenerateInstruction(I);
3984 }
3985 }
3986
David Neto22f144c2017-06-12 14:26:21 -04003987 if (&BB == &F.getEntryBlock() && IsKernel) {
David Netob6e2e062018-04-25 10:32:06 -04003988 if (clspv::Option::HackInitializers()) {
3989 GenerateEntryPointInitialStores();
3990 }
David Neto22f144c2017-06-12 14:26:21 -04003991 }
3992
3993 for (Instruction &I : BB) {
David Neto6dcd4712017-06-23 11:06:47 -04003994 if (!isa<AllocaInst>(I)) {
3995 GenerateInstruction(I);
3996 }
David Neto22f144c2017-06-12 14:26:21 -04003997 }
3998 }
3999}
4000
4001spv::Op SPIRVProducerPass::GetSPIRVCmpOpcode(CmpInst *I) {
4002 const std::map<CmpInst::Predicate, spv::Op> Map = {
4003 {CmpInst::ICMP_EQ, spv::OpIEqual},
4004 {CmpInst::ICMP_NE, spv::OpINotEqual},
4005 {CmpInst::ICMP_UGT, spv::OpUGreaterThan},
4006 {CmpInst::ICMP_UGE, spv::OpUGreaterThanEqual},
4007 {CmpInst::ICMP_ULT, spv::OpULessThan},
4008 {CmpInst::ICMP_ULE, spv::OpULessThanEqual},
4009 {CmpInst::ICMP_SGT, spv::OpSGreaterThan},
4010 {CmpInst::ICMP_SGE, spv::OpSGreaterThanEqual},
4011 {CmpInst::ICMP_SLT, spv::OpSLessThan},
4012 {CmpInst::ICMP_SLE, spv::OpSLessThanEqual},
4013 {CmpInst::FCMP_OEQ, spv::OpFOrdEqual},
4014 {CmpInst::FCMP_OGT, spv::OpFOrdGreaterThan},
4015 {CmpInst::FCMP_OGE, spv::OpFOrdGreaterThanEqual},
4016 {CmpInst::FCMP_OLT, spv::OpFOrdLessThan},
4017 {CmpInst::FCMP_OLE, spv::OpFOrdLessThanEqual},
4018 {CmpInst::FCMP_ONE, spv::OpFOrdNotEqual},
4019 {CmpInst::FCMP_UEQ, spv::OpFUnordEqual},
4020 {CmpInst::FCMP_UGT, spv::OpFUnordGreaterThan},
4021 {CmpInst::FCMP_UGE, spv::OpFUnordGreaterThanEqual},
4022 {CmpInst::FCMP_ULT, spv::OpFUnordLessThan},
4023 {CmpInst::FCMP_ULE, spv::OpFUnordLessThanEqual},
4024 {CmpInst::FCMP_UNE, spv::OpFUnordNotEqual}};
4025
4026 assert(0 != Map.count(I->getPredicate()));
4027
4028 return Map.at(I->getPredicate());
4029}
4030
4031spv::Op SPIRVProducerPass::GetSPIRVCastOpcode(Instruction &I) {
4032 const std::map<unsigned, spv::Op> Map{
4033 {Instruction::Trunc, spv::OpUConvert},
4034 {Instruction::ZExt, spv::OpUConvert},
4035 {Instruction::SExt, spv::OpSConvert},
4036 {Instruction::FPToUI, spv::OpConvertFToU},
4037 {Instruction::FPToSI, spv::OpConvertFToS},
4038 {Instruction::UIToFP, spv::OpConvertUToF},
4039 {Instruction::SIToFP, spv::OpConvertSToF},
4040 {Instruction::FPTrunc, spv::OpFConvert},
4041 {Instruction::FPExt, spv::OpFConvert},
4042 {Instruction::BitCast, spv::OpBitcast}};
4043
4044 assert(0 != Map.count(I.getOpcode()));
4045
4046 return Map.at(I.getOpcode());
4047}
4048
4049spv::Op SPIRVProducerPass::GetSPIRVBinaryOpcode(Instruction &I) {
Kévin Petit24272b62018-10-18 19:16:12 +00004050 if (I.getType()->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -04004051 switch (I.getOpcode()) {
4052 default:
4053 break;
4054 case Instruction::Or:
4055 return spv::OpLogicalOr;
4056 case Instruction::And:
4057 return spv::OpLogicalAnd;
4058 case Instruction::Xor:
4059 return spv::OpLogicalNotEqual;
4060 }
4061 }
4062
alan-bakerb6b09dc2018-11-08 16:59:28 -05004063 const std::map<unsigned, spv::Op> Map{
David Neto22f144c2017-06-12 14:26:21 -04004064 {Instruction::Add, spv::OpIAdd},
4065 {Instruction::FAdd, spv::OpFAdd},
4066 {Instruction::Sub, spv::OpISub},
4067 {Instruction::FSub, spv::OpFSub},
4068 {Instruction::Mul, spv::OpIMul},
4069 {Instruction::FMul, spv::OpFMul},
4070 {Instruction::UDiv, spv::OpUDiv},
4071 {Instruction::SDiv, spv::OpSDiv},
4072 {Instruction::FDiv, spv::OpFDiv},
4073 {Instruction::URem, spv::OpUMod},
4074 {Instruction::SRem, spv::OpSRem},
4075 {Instruction::FRem, spv::OpFRem},
4076 {Instruction::Or, spv::OpBitwiseOr},
4077 {Instruction::Xor, spv::OpBitwiseXor},
4078 {Instruction::And, spv::OpBitwiseAnd},
4079 {Instruction::Shl, spv::OpShiftLeftLogical},
4080 {Instruction::LShr, spv::OpShiftRightLogical},
4081 {Instruction::AShr, spv::OpShiftRightArithmetic}};
4082
4083 assert(0 != Map.count(I.getOpcode()));
4084
4085 return Map.at(I.getOpcode());
4086}
4087
4088void SPIRVProducerPass::GenerateInstruction(Instruction &I) {
SJW69939d52020-04-16 07:29:07 -05004089 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList(kFunctions);
David Neto22f144c2017-06-12 14:26:21 -04004090 ValueMapType &VMap = getValueMap();
David Neto22f144c2017-06-12 14:26:21 -04004091 DeferredInstVecType &DeferredInsts = getDeferredInstVec();
4092 LLVMContext &Context = I.getParent()->getParent()->getParent()->getContext();
4093
4094 // Register Instruction to ValueMap.
4095 if (0 == VMap[&I]) {
4096 VMap[&I] = nextID;
4097 }
4098
4099 switch (I.getOpcode()) {
4100 default: {
4101 if (Instruction::isCast(I.getOpcode())) {
4102 //
4103 // Generate SPIRV instructions for cast operators.
4104 //
4105
David Netod2de94a2017-08-28 17:27:47 -04004106 auto Ty = I.getType();
David Neto22f144c2017-06-12 14:26:21 -04004107 auto OpTy = I.getOperand(0)->getType();
David Netod2de94a2017-08-28 17:27:47 -04004108 auto toI8 = Ty == Type::getInt8Ty(Context);
4109 auto fromI32 = OpTy == Type::getInt32Ty(Context);
David Neto22f144c2017-06-12 14:26:21 -04004110 // Handle zext, sext and uitofp with i1 type specially.
4111 if ((I.getOpcode() == Instruction::ZExt ||
4112 I.getOpcode() == Instruction::SExt ||
4113 I.getOpcode() == Instruction::UIToFP) &&
alan-bakerb6b09dc2018-11-08 16:59:28 -05004114 OpTy->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -04004115 //
4116 // Generate OpSelect.
4117 //
4118
4119 // Ops[0] = Result Type ID
4120 // Ops[1] = Condition ID
4121 // Ops[2] = True Constant ID
4122 // Ops[3] = False Constant ID
4123 SPIRVOperandList Ops;
4124
David Neto257c3892018-04-11 13:19:45 -04004125 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04004126
David Neto22f144c2017-06-12 14:26:21 -04004127 uint32_t CondID = VMap[I.getOperand(0)];
David Neto257c3892018-04-11 13:19:45 -04004128 Ops << MkId(CondID);
David Neto22f144c2017-06-12 14:26:21 -04004129
4130 uint32_t TrueID = 0;
4131 if (I.getOpcode() == Instruction::ZExt) {
Kévin Petit7bfb8992019-02-26 13:45:08 +00004132 TrueID = VMap[ConstantInt::get(I.getType(), 1)];
David Neto22f144c2017-06-12 14:26:21 -04004133 } else if (I.getOpcode() == Instruction::SExt) {
Kévin Petit7bfb8992019-02-26 13:45:08 +00004134 TrueID = VMap[ConstantInt::getSigned(I.getType(), -1)];
David Neto22f144c2017-06-12 14:26:21 -04004135 } else {
4136 TrueID = VMap[ConstantFP::get(Context, APFloat(1.0f))];
4137 }
David Neto257c3892018-04-11 13:19:45 -04004138 Ops << MkId(TrueID);
David Neto22f144c2017-06-12 14:26:21 -04004139
4140 uint32_t FalseID = 0;
4141 if (I.getOpcode() == Instruction::ZExt) {
4142 FalseID = VMap[Constant::getNullValue(I.getType())];
4143 } else if (I.getOpcode() == Instruction::SExt) {
4144 FalseID = VMap[Constant::getNullValue(I.getType())];
4145 } else {
4146 FalseID = VMap[ConstantFP::get(Context, APFloat(0.0f))];
4147 }
David Neto257c3892018-04-11 13:19:45 -04004148 Ops << MkId(FalseID);
David Neto22f144c2017-06-12 14:26:21 -04004149
David Neto87846742018-04-11 17:36:22 -04004150 auto *Inst = new SPIRVInstruction(spv::OpSelect, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004151 SPIRVInstList.push_back(Inst);
alan-bakerb39c8262019-03-08 14:03:37 -05004152 } else if (!clspv::Option::Int8Support() &&
4153 I.getOpcode() == Instruction::Trunc && fromI32 && toI8) {
David Netod2de94a2017-08-28 17:27:47 -04004154 // The SPIR-V target type is a 32-bit int. Keep only the bottom
4155 // 8 bits.
4156 // Before:
4157 // %result = trunc i32 %a to i8
4158 // After
4159 // %result = OpBitwiseAnd %uint %a %uint_255
4160
4161 SPIRVOperandList Ops;
4162
David Neto257c3892018-04-11 13:19:45 -04004163 Ops << MkId(lookupType(OpTy)) << MkId(VMap[I.getOperand(0)]);
David Netod2de94a2017-08-28 17:27:47 -04004164
4165 Type *UintTy = Type::getInt32Ty(Context);
4166 uint32_t MaskID = VMap[ConstantInt::get(UintTy, 255)];
David Neto257c3892018-04-11 13:19:45 -04004167 Ops << MkId(MaskID);
David Netod2de94a2017-08-28 17:27:47 -04004168
David Neto87846742018-04-11 17:36:22 -04004169 auto *Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Netod2de94a2017-08-28 17:27:47 -04004170 SPIRVInstList.push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04004171 } else {
4172 // Ops[0] = Result Type ID
4173 // Ops[1] = Source Value ID
4174 SPIRVOperandList Ops;
4175
David Neto257c3892018-04-11 13:19:45 -04004176 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04004177
David Neto87846742018-04-11 17:36:22 -04004178 auto *Inst = new SPIRVInstruction(GetSPIRVCastOpcode(I), nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004179 SPIRVInstList.push_back(Inst);
4180 }
4181 } else if (isa<BinaryOperator>(I)) {
4182 //
4183 // Generate SPIRV instructions for binary operators.
4184 //
4185
4186 // Handle xor with i1 type specially.
4187 if (I.getOpcode() == Instruction::Xor &&
4188 I.getType() == Type::getInt1Ty(Context) &&
Kévin Petit24272b62018-10-18 19:16:12 +00004189 ((isa<ConstantInt>(I.getOperand(0)) &&
4190 !cast<ConstantInt>(I.getOperand(0))->isZero()) ||
4191 (isa<ConstantInt>(I.getOperand(1)) &&
4192 !cast<ConstantInt>(I.getOperand(1))->isZero()))) {
David Neto22f144c2017-06-12 14:26:21 -04004193 //
4194 // Generate OpLogicalNot.
4195 //
4196 // Ops[0] = Result Type ID
4197 // Ops[1] = Operand
4198 SPIRVOperandList Ops;
4199
David Neto257c3892018-04-11 13:19:45 -04004200 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04004201
4202 Value *CondV = I.getOperand(0);
4203 if (isa<Constant>(I.getOperand(0))) {
4204 CondV = I.getOperand(1);
4205 }
David Neto257c3892018-04-11 13:19:45 -04004206 Ops << MkId(VMap[CondV]);
David Neto22f144c2017-06-12 14:26:21 -04004207
David Neto87846742018-04-11 17:36:22 -04004208 auto *Inst = new SPIRVInstruction(spv::OpLogicalNot, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004209 SPIRVInstList.push_back(Inst);
4210 } else {
4211 // Ops[0] = Result Type ID
4212 // Ops[1] = Operand 0
4213 // Ops[2] = Operand 1
4214 SPIRVOperandList Ops;
4215
David Neto257c3892018-04-11 13:19:45 -04004216 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)])
4217 << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004218
David Neto87846742018-04-11 17:36:22 -04004219 auto *Inst =
4220 new SPIRVInstruction(GetSPIRVBinaryOpcode(I), nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004221 SPIRVInstList.push_back(Inst);
4222 }
alan-bakerc9c55ae2019-12-02 16:01:27 -05004223 } else if (I.getOpcode() == Instruction::FNeg) {
4224 // The only unary operator.
4225 //
4226 // Ops[0] = Result Type ID
4227 // Ops[1] = Operand 0
4228 SPIRVOperandList ops;
4229
4230 ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)]);
4231 auto *Inst = new SPIRVInstruction(spv::OpFNegate, nextID++, ops);
4232 SPIRVInstList.push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04004233 } else {
4234 I.print(errs());
4235 llvm_unreachable("Unsupported instruction???");
4236 }
4237 break;
4238 }
4239 case Instruction::GetElementPtr: {
4240 auto &GlobalConstArgSet = getGlobalConstArgSet();
4241
4242 //
4243 // Generate OpAccessChain.
4244 //
4245 GetElementPtrInst *GEP = cast<GetElementPtrInst>(&I);
4246
4247 //
4248 // Generate OpAccessChain.
4249 //
4250
4251 // Ops[0] = Result Type ID
4252 // Ops[1] = Base ID
4253 // Ops[2] ... Ops[n] = Indexes ID
4254 SPIRVOperandList Ops;
4255
alan-bakerb6b09dc2018-11-08 16:59:28 -05004256 PointerType *ResultType = cast<PointerType>(GEP->getType());
David Neto22f144c2017-06-12 14:26:21 -04004257 if (GEP->getPointerAddressSpace() == AddressSpace::ModuleScopePrivate ||
4258 GlobalConstArgSet.count(GEP->getPointerOperand())) {
4259 // Use pointer type with private address space for global constant.
4260 Type *EleTy = I.getType()->getPointerElementType();
David Neto1a1a0582017-07-07 12:01:44 -04004261 ResultType = PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
David Neto22f144c2017-06-12 14:26:21 -04004262 }
David Neto257c3892018-04-11 13:19:45 -04004263
4264 Ops << MkId(lookupType(ResultType));
David Neto22f144c2017-06-12 14:26:21 -04004265
David Neto862b7d82018-06-14 18:48:37 -04004266 // Generate the base pointer.
4267 Ops << MkId(VMap[GEP->getPointerOperand()]);
David Neto22f144c2017-06-12 14:26:21 -04004268
David Neto862b7d82018-06-14 18:48:37 -04004269 // TODO(dneto): Simplify the following?
David Neto22f144c2017-06-12 14:26:21 -04004270
4271 //
4272 // Follows below rules for gep.
4273 //
David Neto862b7d82018-06-14 18:48:37 -04004274 // 1. If gep's first index is 0 generate OpAccessChain and ignore gep's
4275 // first index.
David Neto22f144c2017-06-12 14:26:21 -04004276 // 2. If gep's first index is not 0, generate OpPtrAccessChain and use gep's
4277 // first index.
4278 // 3. If gep's first index is not constant, generate OpPtrAccessChain and
4279 // use gep's first index.
4280 // 4. If it is not above case 1, 2 and 3, generate OpAccessChain and use
4281 // gep's first index.
4282 //
4283 spv::Op Opcode = spv::OpAccessChain;
4284 unsigned offset = 0;
4285 if (ConstantInt *CstInt = dyn_cast<ConstantInt>(GEP->getOperand(1))) {
David Neto862b7d82018-06-14 18:48:37 -04004286 if (CstInt->getZExtValue() == 0) {
David Neto22f144c2017-06-12 14:26:21 -04004287 offset = 1;
David Neto862b7d82018-06-14 18:48:37 -04004288 } else if (CstInt->getZExtValue() != 0) {
David Neto22f144c2017-06-12 14:26:21 -04004289 Opcode = spv::OpPtrAccessChain;
David Neto22f144c2017-06-12 14:26:21 -04004290 }
David Neto862b7d82018-06-14 18:48:37 -04004291 } else {
David Neto22f144c2017-06-12 14:26:21 -04004292 Opcode = spv::OpPtrAccessChain;
David Neto1a1a0582017-07-07 12:01:44 -04004293 }
4294
4295 if (Opcode == spv::OpPtrAccessChain) {
David Neto1a1a0582017-07-07 12:01:44 -04004296 // Do we need to generate ArrayStride? Check against the GEP result type
4297 // rather than the pointer type of the base because when indexing into
4298 // an OpenCL program-scope constant, we'll swap out the LLVM base pointer
4299 // for something else in the SPIR-V.
4300 // E.g. see test/PointerAccessChain/pointer_index_is_constant_1.cl
alan-baker5b86ed72019-02-15 08:26:50 -05004301 auto address_space = ResultType->getAddressSpace();
4302 setVariablePointersCapabilities(address_space);
4303 switch (GetStorageClass(address_space)) {
Alan Bakerfcda9482018-10-02 17:09:59 -04004304 case spv::StorageClassStorageBuffer:
4305 case spv::StorageClassUniform:
David Neto1a1a0582017-07-07 12:01:44 -04004306 // Save the need to generate an ArrayStride decoration. But defer
4307 // generation until later, so we only make one decoration.
David Neto85082642018-03-24 06:55:20 -07004308 getTypesNeedingArrayStride().insert(ResultType);
Alan Bakerfcda9482018-10-02 17:09:59 -04004309 break;
4310 default:
4311 break;
David Neto1a1a0582017-07-07 12:01:44 -04004312 }
David Neto22f144c2017-06-12 14:26:21 -04004313 }
4314
4315 for (auto II = GEP->idx_begin() + offset; II != GEP->idx_end(); II++) {
David Neto257c3892018-04-11 13:19:45 -04004316 Ops << MkId(VMap[*II]);
David Neto22f144c2017-06-12 14:26:21 -04004317 }
4318
David Neto87846742018-04-11 17:36:22 -04004319 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004320 SPIRVInstList.push_back(Inst);
4321 break;
4322 }
4323 case Instruction::ExtractValue: {
4324 ExtractValueInst *EVI = cast<ExtractValueInst>(&I);
4325 // Ops[0] = Result Type ID
4326 // Ops[1] = Composite ID
4327 // Ops[2] ... Ops[n] = Indexes (Literal Number)
4328 SPIRVOperandList Ops;
4329
David Neto257c3892018-04-11 13:19:45 -04004330 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04004331
4332 uint32_t CompositeID = VMap[EVI->getAggregateOperand()];
David Neto257c3892018-04-11 13:19:45 -04004333 Ops << MkId(CompositeID);
David Neto22f144c2017-06-12 14:26:21 -04004334
4335 for (auto &Index : EVI->indices()) {
David Neto257c3892018-04-11 13:19:45 -04004336 Ops << MkNum(Index);
David Neto22f144c2017-06-12 14:26:21 -04004337 }
4338
David Neto87846742018-04-11 17:36:22 -04004339 auto *Inst = new SPIRVInstruction(spv::OpCompositeExtract, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004340 SPIRVInstList.push_back(Inst);
4341 break;
4342 }
4343 case Instruction::InsertValue: {
4344 InsertValueInst *IVI = cast<InsertValueInst>(&I);
4345 // Ops[0] = Result Type ID
4346 // Ops[1] = Object ID
4347 // Ops[2] = Composite ID
4348 // Ops[3] ... Ops[n] = Indexes (Literal Number)
4349 SPIRVOperandList Ops;
4350
4351 uint32_t ResTyID = lookupType(I.getType());
David Neto257c3892018-04-11 13:19:45 -04004352 Ops << MkId(ResTyID);
David Neto22f144c2017-06-12 14:26:21 -04004353
4354 uint32_t ObjectID = VMap[IVI->getInsertedValueOperand()];
David Neto257c3892018-04-11 13:19:45 -04004355 Ops << MkId(ObjectID);
David Neto22f144c2017-06-12 14:26:21 -04004356
4357 uint32_t CompositeID = VMap[IVI->getAggregateOperand()];
David Neto257c3892018-04-11 13:19:45 -04004358 Ops << MkId(CompositeID);
David Neto22f144c2017-06-12 14:26:21 -04004359
4360 for (auto &Index : IVI->indices()) {
David Neto257c3892018-04-11 13:19:45 -04004361 Ops << MkNum(Index);
David Neto22f144c2017-06-12 14:26:21 -04004362 }
4363
David Neto87846742018-04-11 17:36:22 -04004364 auto *Inst = new SPIRVInstruction(spv::OpCompositeInsert, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004365 SPIRVInstList.push_back(Inst);
4366 break;
4367 }
4368 case Instruction::Select: {
4369 //
4370 // Generate OpSelect.
4371 //
4372
4373 // Ops[0] = Result Type ID
4374 // Ops[1] = Condition ID
4375 // Ops[2] = True Constant ID
4376 // Ops[3] = False Constant ID
4377 SPIRVOperandList Ops;
4378
4379 // Find SPIRV instruction for parameter type.
4380 auto Ty = I.getType();
4381 if (Ty->isPointerTy()) {
4382 auto PointeeTy = Ty->getPointerElementType();
4383 if (PointeeTy->isStructTy() &&
4384 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
4385 Ty = PointeeTy;
alan-baker5b86ed72019-02-15 08:26:50 -05004386 } else {
4387 // Selecting between pointers requires variable pointers.
4388 setVariablePointersCapabilities(Ty->getPointerAddressSpace());
4389 if (!hasVariablePointers() && !selectFromSameObject(&I)) {
4390 setVariablePointers(true);
4391 }
David Neto22f144c2017-06-12 14:26:21 -04004392 }
4393 }
4394
David Neto257c3892018-04-11 13:19:45 -04004395 Ops << MkId(lookupType(Ty)) << MkId(VMap[I.getOperand(0)])
4396 << MkId(VMap[I.getOperand(1)]) << MkId(VMap[I.getOperand(2)]);
David Neto22f144c2017-06-12 14:26:21 -04004397
David Neto87846742018-04-11 17:36:22 -04004398 auto *Inst = new SPIRVInstruction(spv::OpSelect, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004399 SPIRVInstList.push_back(Inst);
4400 break;
4401 }
4402 case Instruction::ExtractElement: {
4403 // Handle <4 x i8> type manually.
4404 Type *CompositeTy = I.getOperand(0)->getType();
4405 if (is4xi8vec(CompositeTy)) {
4406 //
4407 // Generate OpShiftRightLogical and OpBitwiseAnd for extractelement with
4408 // <4 x i8>.
4409 //
4410
4411 //
4412 // Generate OpShiftRightLogical
4413 //
4414 // Ops[0] = Result Type ID
4415 // Ops[1] = Operand 0
4416 // Ops[2] = Operand 1
4417 //
4418 SPIRVOperandList Ops;
4419
David Neto257c3892018-04-11 13:19:45 -04004420 Ops << MkId(lookupType(CompositeTy));
David Neto22f144c2017-06-12 14:26:21 -04004421
4422 uint32_t Op0ID = VMap[I.getOperand(0)];
David Neto257c3892018-04-11 13:19:45 -04004423 Ops << MkId(Op0ID);
David Neto22f144c2017-06-12 14:26:21 -04004424
4425 uint32_t Op1ID = 0;
4426 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
4427 // Handle constant index.
4428 uint64_t Idx = CI->getZExtValue();
4429 Value *ShiftAmount =
4430 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
4431 Op1ID = VMap[ShiftAmount];
4432 } else {
4433 // Handle variable index.
4434 SPIRVOperandList TmpOps;
4435
David Neto257c3892018-04-11 13:19:45 -04004436 TmpOps << MkId(lookupType(Type::getInt32Ty(Context)))
4437 << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004438
4439 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
David Neto257c3892018-04-11 13:19:45 -04004440 TmpOps << MkId(VMap[Cst8]);
David Neto22f144c2017-06-12 14:26:21 -04004441
4442 Op1ID = nextID;
4443
David Neto87846742018-04-11 17:36:22 -04004444 auto *TmpInst = new SPIRVInstruction(spv::OpIMul, nextID++, TmpOps);
David Neto22f144c2017-06-12 14:26:21 -04004445 SPIRVInstList.push_back(TmpInst);
4446 }
David Neto257c3892018-04-11 13:19:45 -04004447 Ops << MkId(Op1ID);
David Neto22f144c2017-06-12 14:26:21 -04004448
4449 uint32_t ShiftID = nextID;
4450
David Neto87846742018-04-11 17:36:22 -04004451 auto *Inst =
4452 new SPIRVInstruction(spv::OpShiftRightLogical, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004453 SPIRVInstList.push_back(Inst);
4454
4455 //
4456 // Generate OpBitwiseAnd
4457 //
4458 // Ops[0] = Result Type ID
4459 // Ops[1] = Operand 0
4460 // Ops[2] = Operand 1
4461 //
4462 Ops.clear();
4463
David Neto257c3892018-04-11 13:19:45 -04004464 Ops << MkId(lookupType(CompositeTy)) << MkId(ShiftID);
David Neto22f144c2017-06-12 14:26:21 -04004465
4466 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
David Neto257c3892018-04-11 13:19:45 -04004467 Ops << MkId(VMap[CstFF]);
David Neto22f144c2017-06-12 14:26:21 -04004468
David Neto9b2d6252017-09-06 15:47:37 -04004469 // Reset mapping for this value to the result of the bitwise and.
4470 VMap[&I] = nextID;
4471
David Neto87846742018-04-11 17:36:22 -04004472 Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004473 SPIRVInstList.push_back(Inst);
4474 break;
4475 }
4476
4477 // Ops[0] = Result Type ID
4478 // Ops[1] = Composite ID
4479 // Ops[2] ... Ops[n] = Indexes (Literal Number)
4480 SPIRVOperandList Ops;
4481
David Neto257c3892018-04-11 13:19:45 -04004482 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04004483
4484 spv::Op Opcode = spv::OpCompositeExtract;
4485 if (const ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
David Neto257c3892018-04-11 13:19:45 -04004486 Ops << MkNum(static_cast<uint32_t>(CI->getZExtValue()));
David Neto22f144c2017-06-12 14:26:21 -04004487 } else {
David Neto257c3892018-04-11 13:19:45 -04004488 Ops << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004489 Opcode = spv::OpVectorExtractDynamic;
4490 }
4491
David Neto87846742018-04-11 17:36:22 -04004492 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004493 SPIRVInstList.push_back(Inst);
4494 break;
4495 }
4496 case Instruction::InsertElement: {
4497 // Handle <4 x i8> type manually.
4498 Type *CompositeTy = I.getOperand(0)->getType();
4499 if (is4xi8vec(CompositeTy)) {
4500 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
4501 uint32_t CstFFID = VMap[CstFF];
4502
4503 uint32_t ShiftAmountID = 0;
4504 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
4505 // Handle constant index.
4506 uint64_t Idx = CI->getZExtValue();
4507 Value *ShiftAmount =
4508 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
4509 ShiftAmountID = VMap[ShiftAmount];
4510 } else {
4511 // Handle variable index.
4512 SPIRVOperandList TmpOps;
4513
David Neto257c3892018-04-11 13:19:45 -04004514 TmpOps << MkId(lookupType(Type::getInt32Ty(Context)))
4515 << MkId(VMap[I.getOperand(2)]);
David Neto22f144c2017-06-12 14:26:21 -04004516
4517 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
David Neto257c3892018-04-11 13:19:45 -04004518 TmpOps << MkId(VMap[Cst8]);
David Neto22f144c2017-06-12 14:26:21 -04004519
4520 ShiftAmountID = nextID;
4521
David Neto87846742018-04-11 17:36:22 -04004522 auto *TmpInst = new SPIRVInstruction(spv::OpIMul, nextID++, TmpOps);
David Neto22f144c2017-06-12 14:26:21 -04004523 SPIRVInstList.push_back(TmpInst);
4524 }
4525
4526 //
4527 // Generate mask operations.
4528 //
4529
4530 // ShiftLeft mask according to index of insertelement.
4531 SPIRVOperandList Ops;
4532
David Neto257c3892018-04-11 13:19:45 -04004533 const uint32_t ResTyID = lookupType(CompositeTy);
4534 Ops << MkId(ResTyID) << MkId(CstFFID) << MkId(ShiftAmountID);
David Neto22f144c2017-06-12 14:26:21 -04004535
4536 uint32_t MaskID = nextID;
4537
David Neto87846742018-04-11 17:36:22 -04004538 auto *Inst = new SPIRVInstruction(spv::OpShiftLeftLogical, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004539 SPIRVInstList.push_back(Inst);
4540
4541 // Inverse mask.
4542 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004543 Ops << MkId(ResTyID) << MkId(MaskID);
David Neto22f144c2017-06-12 14:26:21 -04004544
4545 uint32_t InvMaskID = nextID;
4546
David Neto87846742018-04-11 17:36:22 -04004547 Inst = new SPIRVInstruction(spv::OpNot, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004548 SPIRVInstList.push_back(Inst);
4549
4550 // Apply mask.
4551 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004552 Ops << MkId(ResTyID) << MkId(VMap[I.getOperand(0)]) << MkId(InvMaskID);
David Neto22f144c2017-06-12 14:26:21 -04004553
4554 uint32_t OrgValID = nextID;
4555
David Neto87846742018-04-11 17:36:22 -04004556 Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004557 SPIRVInstList.push_back(Inst);
4558
4559 // Create correct value according to index of insertelement.
4560 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05004561 Ops << MkId(ResTyID) << MkId(VMap[I.getOperand(1)])
4562 << MkId(ShiftAmountID);
David Neto22f144c2017-06-12 14:26:21 -04004563
4564 uint32_t InsertValID = nextID;
4565
David Neto87846742018-04-11 17:36:22 -04004566 Inst = new SPIRVInstruction(spv::OpShiftLeftLogical, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004567 SPIRVInstList.push_back(Inst);
4568
4569 // Insert value to original value.
4570 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004571 Ops << MkId(ResTyID) << MkId(OrgValID) << MkId(InsertValID);
David Neto22f144c2017-06-12 14:26:21 -04004572
David Netoa394f392017-08-26 20:45:29 -04004573 VMap[&I] = nextID;
4574
David Neto87846742018-04-11 17:36:22 -04004575 Inst = new SPIRVInstruction(spv::OpBitwiseOr, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004576 SPIRVInstList.push_back(Inst);
4577
4578 break;
4579 }
4580
David Neto22f144c2017-06-12 14:26:21 -04004581 SPIRVOperandList Ops;
4582
James Priced26efea2018-06-09 23:28:32 +01004583 // Ops[0] = Result Type ID
4584 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04004585
4586 spv::Op Opcode = spv::OpCompositeInsert;
4587 if (const ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
David Neto257c3892018-04-11 13:19:45 -04004588 const auto value = CI->getZExtValue();
4589 assert(value <= UINT32_MAX);
James Priced26efea2018-06-09 23:28:32 +01004590 // Ops[1] = Object ID
4591 // Ops[2] = Composite ID
4592 // Ops[3] ... Ops[n] = Indexes (Literal Number)
alan-bakerb6b09dc2018-11-08 16:59:28 -05004593 Ops << MkId(VMap[I.getOperand(1)]) << MkId(VMap[I.getOperand(0)])
James Priced26efea2018-06-09 23:28:32 +01004594 << MkNum(static_cast<uint32_t>(value));
David Neto22f144c2017-06-12 14:26:21 -04004595 } else {
James Priced26efea2018-06-09 23:28:32 +01004596 // Ops[1] = Composite ID
4597 // Ops[2] = Object ID
4598 // Ops[3] ... Ops[n] = Indexes (Literal Number)
alan-bakerb6b09dc2018-11-08 16:59:28 -05004599 Ops << MkId(VMap[I.getOperand(0)]) << MkId(VMap[I.getOperand(1)])
James Priced26efea2018-06-09 23:28:32 +01004600 << MkId(VMap[I.getOperand(2)]);
David Neto22f144c2017-06-12 14:26:21 -04004601 Opcode = spv::OpVectorInsertDynamic;
4602 }
4603
David Neto87846742018-04-11 17:36:22 -04004604 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004605 SPIRVInstList.push_back(Inst);
4606 break;
4607 }
4608 case Instruction::ShuffleVector: {
4609 // Ops[0] = Result Type ID
4610 // Ops[1] = Vector 1 ID
4611 // Ops[2] = Vector 2 ID
4612 // Ops[3] ... Ops[n] = Components (Literal Number)
4613 SPIRVOperandList Ops;
4614
David Neto257c3892018-04-11 13:19:45 -04004615 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)])
4616 << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004617
alan-bakerc9666712020-04-01 16:31:21 -04004618 auto shuffle = cast<ShuffleVectorInst>(&I);
4619 SmallVector<int, 4> mask;
4620 shuffle->getShuffleMask(mask);
4621 for (auto i : mask) {
4622 if (i == UndefMaskElem) {
4623 if (clspv::Option::HackUndef())
4624 // Use 0 instead of undef.
David Neto257c3892018-04-11 13:19:45 -04004625 Ops << MkNum(0);
alan-bakerc9666712020-04-01 16:31:21 -04004626 else
4627 // Undef for shuffle in SPIR-V.
4628 Ops << MkNum(0xffffffff);
David Neto22f144c2017-06-12 14:26:21 -04004629 } else {
alan-bakerc9666712020-04-01 16:31:21 -04004630 Ops << MkNum(i);
David Neto22f144c2017-06-12 14:26:21 -04004631 }
4632 }
4633
David Neto87846742018-04-11 17:36:22 -04004634 auto *Inst = new SPIRVInstruction(spv::OpVectorShuffle, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004635 SPIRVInstList.push_back(Inst);
4636 break;
4637 }
4638 case Instruction::ICmp:
4639 case Instruction::FCmp: {
4640 CmpInst *CmpI = cast<CmpInst>(&I);
4641
David Netod4ca2e62017-07-06 18:47:35 -04004642 // Pointer equality is invalid.
alan-bakerb6b09dc2018-11-08 16:59:28 -05004643 Type *ArgTy = CmpI->getOperand(0)->getType();
David Netod4ca2e62017-07-06 18:47:35 -04004644 if (isa<PointerType>(ArgTy)) {
4645 CmpI->print(errs());
alan-baker21574d32020-01-29 16:00:31 -05004646 std::string name = I.getParent()->getParent()->getName().str();
David Netod4ca2e62017-07-06 18:47:35 -04004647 errs()
4648 << "\nPointer equality test is not supported by SPIR-V for Vulkan, "
4649 << "in function " << name << "\n";
4650 llvm_unreachable("Pointer equality check is invalid");
4651 break;
4652 }
4653
David Neto257c3892018-04-11 13:19:45 -04004654 // Ops[0] = Result Type ID
4655 // Ops[1] = Operand 1 ID
4656 // Ops[2] = Operand 2 ID
4657 SPIRVOperandList Ops;
David Neto22f144c2017-06-12 14:26:21 -04004658
David Neto257c3892018-04-11 13:19:45 -04004659 Ops << MkId(lookupType(CmpI->getType())) << MkId(VMap[CmpI->getOperand(0)])
4660 << MkId(VMap[CmpI->getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004661
4662 spv::Op Opcode = GetSPIRVCmpOpcode(CmpI);
David Neto87846742018-04-11 17:36:22 -04004663 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004664 SPIRVInstList.push_back(Inst);
4665 break;
4666 }
4667 case Instruction::Br: {
4668 // Branch instrucion is deferred because it needs label's ID. Record slot's
4669 // location on SPIRVInstructionList.
4670 DeferredInsts.push_back(
4671 std::make_tuple(&I, --SPIRVInstList.end(), 0 /* No id */));
4672 break;
4673 }
4674 case Instruction::Switch: {
4675 I.print(errs());
4676 llvm_unreachable("Unsupported instruction???");
4677 break;
4678 }
4679 case Instruction::IndirectBr: {
4680 I.print(errs());
4681 llvm_unreachable("Unsupported instruction???");
4682 break;
4683 }
4684 case Instruction::PHI: {
4685 // Branch instrucion is deferred because it needs label's ID. Record slot's
4686 // location on SPIRVInstructionList.
4687 DeferredInsts.push_back(
4688 std::make_tuple(&I, --SPIRVInstList.end(), nextID++));
4689 break;
4690 }
4691 case Instruction::Alloca: {
4692 //
4693 // Generate OpVariable.
4694 //
4695 // Ops[0] : Result Type ID
4696 // Ops[1] : Storage Class
4697 SPIRVOperandList Ops;
4698
David Neto257c3892018-04-11 13:19:45 -04004699 Ops << MkId(lookupType(I.getType())) << MkNum(spv::StorageClassFunction);
David Neto22f144c2017-06-12 14:26:21 -04004700
David Neto87846742018-04-11 17:36:22 -04004701 auto *Inst = new SPIRVInstruction(spv::OpVariable, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004702 SPIRVInstList.push_back(Inst);
4703 break;
4704 }
4705 case Instruction::Load: {
4706 LoadInst *LD = cast<LoadInst>(&I);
4707 //
4708 // Generate OpLoad.
4709 //
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04004710
alan-baker5b86ed72019-02-15 08:26:50 -05004711 if (LD->getType()->isPointerTy()) {
4712 // Loading a pointer requires variable pointers.
4713 setVariablePointersCapabilities(LD->getType()->getPointerAddressSpace());
4714 }
David Neto22f144c2017-06-12 14:26:21 -04004715
David Neto0a2f98d2017-09-15 19:38:40 -04004716 uint32_t ResTyID = lookupType(LD->getType());
David Netoa60b00b2017-09-15 16:34:09 -04004717 uint32_t PointerID = VMap[LD->getPointerOperand()];
4718
4719 // This is a hack to work around what looks like a driver bug.
4720 // When we're loading from the special variable holding the WorkgroupSize
David Neto0a2f98d2017-09-15 19:38:40 -04004721 // builtin value, use an OpBitWiseAnd of the value's ID rather than
4722 // generating a load.
David Neto66cfe642018-03-24 06:13:56 -07004723 // TODO(dneto): Remove this awful hack once drivers are fixed.
David Netoa60b00b2017-09-15 16:34:09 -04004724 if (PointerID == WorkgroupSizeVarID) {
David Neto0a2f98d2017-09-15 19:38:40 -04004725 // Generate a bitwise-and of the original value with itself.
4726 // We should have been able to get away with just an OpCopyObject,
4727 // but we need something more complex to get past certain driver bugs.
4728 // This is ridiculous, but necessary.
4729 // TODO(dneto): Revisit this once drivers fix their bugs.
4730
4731 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04004732 Ops << MkId(ResTyID) << MkId(WorkgroupSizeValueID)
4733 << MkId(WorkgroupSizeValueID);
David Neto0a2f98d2017-09-15 19:38:40 -04004734
David Neto87846742018-04-11 17:36:22 -04004735 auto *Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Neto0a2f98d2017-09-15 19:38:40 -04004736 SPIRVInstList.push_back(Inst);
David Netoa60b00b2017-09-15 16:34:09 -04004737 break;
4738 }
4739
4740 // This is the normal path. Generate a load.
4741
David Neto22f144c2017-06-12 14:26:21 -04004742 // Ops[0] = Result Type ID
4743 // Ops[1] = Pointer ID
4744 // Ops[2] ... Ops[n] = Optional Memory Access
4745 //
4746 // TODO: Do we need to implement Optional Memory Access???
David Neto0a2f98d2017-09-15 19:38:40 -04004747
David Neto22f144c2017-06-12 14:26:21 -04004748 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04004749 Ops << MkId(ResTyID) << MkId(PointerID);
David Neto22f144c2017-06-12 14:26:21 -04004750
David Neto87846742018-04-11 17:36:22 -04004751 auto *Inst = new SPIRVInstruction(spv::OpLoad, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004752 SPIRVInstList.push_back(Inst);
4753 break;
4754 }
4755 case Instruction::Store: {
4756 StoreInst *ST = cast<StoreInst>(&I);
4757 //
4758 // Generate OpStore.
4759 //
4760
alan-baker5b86ed72019-02-15 08:26:50 -05004761 if (ST->getValueOperand()->getType()->isPointerTy()) {
4762 // Storing a pointer requires variable pointers.
4763 setVariablePointersCapabilities(
4764 ST->getValueOperand()->getType()->getPointerAddressSpace());
4765 }
4766
David Neto22f144c2017-06-12 14:26:21 -04004767 // Ops[0] = Pointer ID
4768 // Ops[1] = Object ID
4769 // Ops[2] ... Ops[n] = Optional Memory Access (later???)
4770 //
4771 // TODO: Do we need to implement Optional Memory Access???
David Neto257c3892018-04-11 13:19:45 -04004772 SPIRVOperandList Ops;
4773 Ops << MkId(VMap[ST->getPointerOperand()])
4774 << MkId(VMap[ST->getValueOperand()]);
David Neto22f144c2017-06-12 14:26:21 -04004775
David Neto87846742018-04-11 17:36:22 -04004776 auto *Inst = new SPIRVInstruction(spv::OpStore, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004777 SPIRVInstList.push_back(Inst);
4778 break;
4779 }
4780 case Instruction::AtomicCmpXchg: {
4781 I.print(errs());
4782 llvm_unreachable("Unsupported instruction???");
4783 break;
4784 }
4785 case Instruction::AtomicRMW: {
Neil Henning39672102017-09-29 14:33:13 +01004786 AtomicRMWInst *AtomicRMW = dyn_cast<AtomicRMWInst>(&I);
4787
4788 spv::Op opcode;
4789
4790 switch (AtomicRMW->getOperation()) {
4791 default:
4792 I.print(errs());
4793 llvm_unreachable("Unsupported instruction???");
4794 case llvm::AtomicRMWInst::Add:
4795 opcode = spv::OpAtomicIAdd;
4796 break;
4797 case llvm::AtomicRMWInst::Sub:
4798 opcode = spv::OpAtomicISub;
4799 break;
4800 case llvm::AtomicRMWInst::Xchg:
4801 opcode = spv::OpAtomicExchange;
4802 break;
4803 case llvm::AtomicRMWInst::Min:
4804 opcode = spv::OpAtomicSMin;
4805 break;
4806 case llvm::AtomicRMWInst::Max:
4807 opcode = spv::OpAtomicSMax;
4808 break;
4809 case llvm::AtomicRMWInst::UMin:
4810 opcode = spv::OpAtomicUMin;
4811 break;
4812 case llvm::AtomicRMWInst::UMax:
4813 opcode = spv::OpAtomicUMax;
4814 break;
4815 case llvm::AtomicRMWInst::And:
4816 opcode = spv::OpAtomicAnd;
4817 break;
4818 case llvm::AtomicRMWInst::Or:
4819 opcode = spv::OpAtomicOr;
4820 break;
4821 case llvm::AtomicRMWInst::Xor:
4822 opcode = spv::OpAtomicXor;
4823 break;
4824 }
4825
4826 //
4827 // Generate OpAtomic*.
4828 //
4829 SPIRVOperandList Ops;
4830
David Neto257c3892018-04-11 13:19:45 -04004831 Ops << MkId(lookupType(I.getType()))
4832 << MkId(VMap[AtomicRMW->getPointerOperand()]);
Neil Henning39672102017-09-29 14:33:13 +01004833
4834 auto IntTy = Type::getInt32Ty(I.getContext());
Neil Henning39672102017-09-29 14:33:13 +01004835 const auto ConstantScopeDevice = ConstantInt::get(IntTy, spv::ScopeDevice);
David Neto257c3892018-04-11 13:19:45 -04004836 Ops << MkId(VMap[ConstantScopeDevice]);
Neil Henning39672102017-09-29 14:33:13 +01004837
4838 const auto ConstantMemorySemantics = ConstantInt::get(
4839 IntTy, spv::MemorySemanticsUniformMemoryMask |
4840 spv::MemorySemanticsSequentiallyConsistentMask);
David Neto257c3892018-04-11 13:19:45 -04004841 Ops << MkId(VMap[ConstantMemorySemantics]);
Neil Henning39672102017-09-29 14:33:13 +01004842
David Neto257c3892018-04-11 13:19:45 -04004843 Ops << MkId(VMap[AtomicRMW->getValOperand()]);
Neil Henning39672102017-09-29 14:33:13 +01004844
4845 VMap[&I] = nextID;
4846
David Neto87846742018-04-11 17:36:22 -04004847 auto *Inst = new SPIRVInstruction(opcode, nextID++, Ops);
Neil Henning39672102017-09-29 14:33:13 +01004848 SPIRVInstList.push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04004849 break;
4850 }
4851 case Instruction::Fence: {
4852 I.print(errs());
4853 llvm_unreachable("Unsupported instruction???");
4854 break;
4855 }
4856 case Instruction::Call: {
4857 CallInst *Call = dyn_cast<CallInst>(&I);
4858 Function *Callee = Call->getCalledFunction();
4859
Alan Baker202c8c72018-08-13 13:47:44 -04004860 if (Callee->getName().startswith(clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04004861 if (ResourceVarDeferredLoadCalls.count(Call) && Call->hasNUsesOrMore(1)) {
4862 // Generate an OpLoad
4863 SPIRVOperandList Ops;
4864 const auto load_id = nextID++;
David Neto22f144c2017-06-12 14:26:21 -04004865
David Neto862b7d82018-06-14 18:48:37 -04004866 Ops << MkId(lookupType(Call->getType()->getPointerElementType()))
4867 << MkId(ResourceVarDeferredLoadCalls[Call]);
4868
4869 auto *Inst = new SPIRVInstruction(spv::OpLoad, load_id, Ops);
4870 SPIRVInstList.push_back(Inst);
4871 VMap[Call] = load_id;
4872 break;
4873
4874 } else {
4875 // This maps to an OpVariable we've already generated.
4876 // No code is generated for the call.
4877 }
4878 break;
alan-bakerb6b09dc2018-11-08 16:59:28 -05004879 } else if (Callee->getName().startswith(
4880 clspv::WorkgroupAccessorFunction())) {
Alan Baker202c8c72018-08-13 13:47:44 -04004881 // Don't codegen an instruction here, but instead map this call directly
4882 // to the workgroup variable id.
alan-bakerb6b09dc2018-11-08 16:59:28 -05004883 int spec_id = static_cast<int>(
4884 cast<ConstantInt>(Call->getOperand(0))->getSExtValue());
Alan Baker202c8c72018-08-13 13:47:44 -04004885 const auto &info = LocalSpecIdInfoMap[spec_id];
4886 VMap[Call] = info.variable_id;
4887 break;
David Neto862b7d82018-06-14 18:48:37 -04004888 }
4889
4890 // Sampler initializers become a load of the corresponding sampler.
4891
Kévin Petitdf71de32019-04-09 14:09:50 +01004892 if (Callee->getName().equals(clspv::LiteralSamplerFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04004893 // Map this to a load from the variable.
alan-baker09cb9802019-12-10 13:16:27 -05004894 const auto third_param = static_cast<unsigned>(
4895 dyn_cast<ConstantInt>(Call->getArgOperand(2))->getZExtValue());
4896 auto sampler_value = third_param;
4897 if (clspv::Option::UseSamplerMap()) {
4898 sampler_value = getSamplerMap()[third_param].first;
4899 }
David Neto862b7d82018-06-14 18:48:37 -04004900
4901 // Generate an OpLoad
David Neto22f144c2017-06-12 14:26:21 -04004902 SPIRVOperandList Ops;
David Neto862b7d82018-06-14 18:48:37 -04004903 const auto load_id = nextID++;
David Neto22f144c2017-06-12 14:26:21 -04004904
David Neto257c3892018-04-11 13:19:45 -04004905 Ops << MkId(lookupType(SamplerTy->getPointerElementType()))
alan-baker09cb9802019-12-10 13:16:27 -05004906 << MkId(SamplerLiteralToIDMap[sampler_value]);
David Neto22f144c2017-06-12 14:26:21 -04004907
David Neto862b7d82018-06-14 18:48:37 -04004908 auto *Inst = new SPIRVInstruction(spv::OpLoad, load_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004909 SPIRVInstList.push_back(Inst);
David Neto862b7d82018-06-14 18:48:37 -04004910 VMap[Call] = load_id;
David Neto22f144c2017-06-12 14:26:21 -04004911 break;
4912 }
4913
Kévin Petit349c9502019-03-28 17:24:14 +00004914 // Handle SPIR-V intrinsics
Kévin Petit9b340262019-06-19 18:31:11 +01004915 spv::Op opcode = StringSwitch<spv::Op>(Callee->getName())
4916 .Case("spirv.atomic_xor", spv::OpAtomicXor)
4917 .Default(spv::OpNop);
David Neto22f144c2017-06-12 14:26:21 -04004918
Kévin Petit617a76d2019-04-04 13:54:16 +01004919 // If the switch above didn't have an entry maybe the intrinsic
4920 // is using the name mangling logic.
4921 bool usesMangler = false;
4922 if (opcode == spv::OpNop) {
4923 if (Callee->getName().startswith(clspv::SPIRVOpIntrinsicFunction())) {
4924 auto OpCst = cast<ConstantInt>(Call->getOperand(0));
4925 opcode = static_cast<spv::Op>(OpCst->getZExtValue());
4926 usesMangler = true;
4927 }
4928 }
4929
Kévin Petit349c9502019-03-28 17:24:14 +00004930 if (opcode != spv::OpNop) {
4931
David Neto22f144c2017-06-12 14:26:21 -04004932 SPIRVOperandList Ops;
4933
Kévin Petit349c9502019-03-28 17:24:14 +00004934 if (!I.getType()->isVoidTy()) {
4935 Ops << MkId(lookupType(I.getType()));
4936 }
David Neto22f144c2017-06-12 14:26:21 -04004937
Kévin Petit617a76d2019-04-04 13:54:16 +01004938 unsigned firstOperand = usesMangler ? 1 : 0;
4939 for (unsigned i = firstOperand; i < Call->getNumArgOperands(); i++) {
David Neto257c3892018-04-11 13:19:45 -04004940 Ops << MkId(VMap[Call->getArgOperand(i)]);
David Neto22f144c2017-06-12 14:26:21 -04004941 }
4942
Kévin Petit349c9502019-03-28 17:24:14 +00004943 if (!I.getType()->isVoidTy()) {
4944 VMap[&I] = nextID;
Kévin Petit8a560882019-03-21 15:24:34 +00004945 }
4946
Kévin Petit349c9502019-03-28 17:24:14 +00004947 SPIRVInstruction *Inst;
4948 if (!I.getType()->isVoidTy()) {
4949 Inst = new SPIRVInstruction(opcode, nextID++, Ops);
4950 } else {
4951 Inst = new SPIRVInstruction(opcode, Ops);
4952 }
Kévin Petit8a560882019-03-21 15:24:34 +00004953 SPIRVInstList.push_back(Inst);
4954 break;
4955 }
4956
David Neto22f144c2017-06-12 14:26:21 -04004957 // spirv.copy_memory.* intrinsics become OpMemoryMemory's.
4958 if (Callee->getName().startswith("spirv.copy_memory")) {
4959 //
4960 // Generate OpCopyMemory.
4961 //
4962
4963 // Ops[0] = Dst ID
4964 // Ops[1] = Src ID
4965 // Ops[2] = Memory Access
4966 // Ops[3] = Alignment
4967
4968 auto IsVolatile =
4969 dyn_cast<ConstantInt>(Call->getArgOperand(3))->getZExtValue() != 0;
4970
4971 auto VolatileMemoryAccess = (IsVolatile) ? spv::MemoryAccessVolatileMask
4972 : spv::MemoryAccessMaskNone;
4973
4974 auto MemoryAccess = VolatileMemoryAccess | spv::MemoryAccessAlignedMask;
4975
4976 auto Alignment =
4977 dyn_cast<ConstantInt>(Call->getArgOperand(2))->getZExtValue();
4978
David Neto257c3892018-04-11 13:19:45 -04004979 SPIRVOperandList Ops;
4980 Ops << MkId(VMap[Call->getArgOperand(0)])
4981 << MkId(VMap[Call->getArgOperand(1)]) << MkNum(MemoryAccess)
4982 << MkNum(static_cast<uint32_t>(Alignment));
David Neto22f144c2017-06-12 14:26:21 -04004983
David Neto87846742018-04-11 17:36:22 -04004984 auto *Inst = new SPIRVInstruction(spv::OpCopyMemory, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004985
4986 SPIRVInstList.push_back(Inst);
4987
4988 break;
4989 }
4990
SJW2c317da2020-03-23 07:39:13 -05004991 // read_image is converted to OpSampledImage and OpImageSampleExplicitLod.
4992 // Additionally, OpTypeSampledImage is generated.
SJW173c7e92020-03-16 08:44:47 -05004993 if (IsSampledImageRead(Callee)) {
David Neto22f144c2017-06-12 14:26:21 -04004994 //
4995 // Generate OpSampledImage.
4996 //
4997 // Ops[0] = Result Type ID
4998 // Ops[1] = Image ID
4999 // Ops[2] = Sampler ID
5000 //
5001 SPIRVOperandList Ops;
5002
5003 Value *Image = Call->getArgOperand(0);
5004 Value *Sampler = Call->getArgOperand(1);
5005 Value *Coordinate = Call->getArgOperand(2);
5006
5007 TypeMapType &OpImageTypeMap = getImageTypeMap();
5008 Type *ImageTy = Image->getType()->getPointerElementType();
5009 uint32_t ImageTyID = OpImageTypeMap[ImageTy];
David Neto22f144c2017-06-12 14:26:21 -04005010 uint32_t ImageID = VMap[Image];
David Neto22f144c2017-06-12 14:26:21 -04005011 uint32_t SamplerID = VMap[Sampler];
David Neto257c3892018-04-11 13:19:45 -04005012
5013 Ops << MkId(ImageTyID) << MkId(ImageID) << MkId(SamplerID);
David Neto22f144c2017-06-12 14:26:21 -04005014
5015 uint32_t SampledImageID = nextID;
5016
David Neto87846742018-04-11 17:36:22 -04005017 auto *Inst = new SPIRVInstruction(spv::OpSampledImage, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04005018 SPIRVInstList.push_back(Inst);
5019
5020 //
5021 // Generate OpImageSampleExplicitLod.
5022 //
5023 // Ops[0] = Result Type ID
5024 // Ops[1] = Sampled Image ID
5025 // Ops[2] = Coordinate ID
5026 // Ops[3] = Image Operands Type ID
5027 // Ops[4] ... Ops[n] = Operands ID
5028 //
5029 Ops.clear();
5030
alan-bakerf67468c2019-11-25 15:51:49 -05005031 const bool is_int_image = IsIntImageType(Image->getType());
5032 uint32_t result_type = 0;
5033 if (is_int_image) {
5034 result_type = v4int32ID;
5035 } else {
5036 result_type = lookupType(Call->getType());
5037 }
5038
5039 Ops << MkId(result_type) << MkId(SampledImageID) << MkId(VMap[Coordinate])
5040 << MkNum(spv::ImageOperandsLodMask);
David Neto22f144c2017-06-12 14:26:21 -04005041
5042 Constant *CstFP0 = ConstantFP::get(Context, APFloat(0.0f));
David Neto257c3892018-04-11 13:19:45 -04005043 Ops << MkId(VMap[CstFP0]);
David Neto22f144c2017-06-12 14:26:21 -04005044
alan-bakerf67468c2019-11-25 15:51:49 -05005045 uint32_t final_id = nextID++;
5046 VMap[&I] = final_id;
David Neto22f144c2017-06-12 14:26:21 -04005047
alan-bakerf67468c2019-11-25 15:51:49 -05005048 uint32_t image_id = final_id;
5049 if (is_int_image) {
5050 // Int image requires a bitcast from v4int to v4uint.
5051 image_id = nextID++;
5052 }
5053
5054 Inst = new SPIRVInstruction(spv::OpImageSampleExplicitLod, image_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04005055 SPIRVInstList.push_back(Inst);
alan-bakerf67468c2019-11-25 15:51:49 -05005056
5057 if (is_int_image) {
5058 // Generate the bitcast.
5059 Ops.clear();
5060 Ops << MkId(lookupType(Call->getType())) << MkId(image_id);
5061 Inst = new SPIRVInstruction(spv::OpBitcast, final_id, Ops);
5062 SPIRVInstList.push_back(Inst);
5063 }
David Neto22f144c2017-06-12 14:26:21 -04005064 break;
5065 }
5066
alan-baker75090e42020-02-20 11:21:04 -05005067 // read_image (without a sampler) is mapped to OpImageFetch.
SJW173c7e92020-03-16 08:44:47 -05005068 if (IsUnsampledImageRead(Callee)) {
alan-baker75090e42020-02-20 11:21:04 -05005069 Value *Image = Call->getArgOperand(0);
5070 Value *Coordinate = Call->getArgOperand(1);
5071
5072 //
5073 // Generate OpImageFetch
5074 //
5075 // Ops[0] = Result Type ID
5076 // Ops[1] = Image ID
5077 // Ops[2] = Coordinate ID
5078 // Ops[3] = Lod
5079 // Ops[4] = 0
5080 //
5081 SPIRVOperandList Ops;
5082
5083 const bool is_int_image = IsIntImageType(Image->getType());
5084 uint32_t result_type = 0;
5085 if (is_int_image) {
5086 result_type = v4int32ID;
5087 } else {
5088 result_type = lookupType(Call->getType());
5089 }
5090
5091 Ops << MkId(result_type) << MkId(VMap[Image]) << MkId(VMap[Coordinate])
5092 << MkNum(spv::ImageOperandsLodMask);
5093
5094 Constant *CstInt0 = ConstantInt::get(Context, APInt(32, 0));
5095 Ops << MkId(VMap[CstInt0]);
5096
5097 uint32_t final_id = nextID++;
5098 VMap[&I] = final_id;
5099
5100 uint32_t image_id = final_id;
5101 if (is_int_image) {
5102 // Int image requires a bitcast from v4int to v4uint.
5103 image_id = nextID++;
5104 }
5105
5106 auto *Inst = new SPIRVInstruction(spv::OpImageFetch, image_id, Ops);
5107 SPIRVInstList.push_back(Inst);
5108
5109 if (is_int_image) {
5110 // Generate the bitcast.
5111 Ops.clear();
5112 Ops << MkId(lookupType(Call->getType())) << MkId(image_id);
5113 Inst = new SPIRVInstruction(spv::OpBitcast, final_id, Ops);
5114 SPIRVInstList.push_back(Inst);
5115 }
5116 break;
5117 }
5118
alan-bakerf67468c2019-11-25 15:51:49 -05005119 // write_image is mapped to OpImageWrite.
SJW173c7e92020-03-16 08:44:47 -05005120 if (IsImageWrite(Callee)) {
David Neto22f144c2017-06-12 14:26:21 -04005121 //
5122 // Generate OpImageWrite.
5123 //
5124 // Ops[0] = Image ID
5125 // Ops[1] = Coordinate ID
5126 // Ops[2] = Texel ID
5127 // Ops[3] = (Optional) Image Operands Type (Literal Number)
5128 // Ops[4] ... Ops[n] = (Optional) Operands ID
5129 //
5130 SPIRVOperandList Ops;
5131
5132 Value *Image = Call->getArgOperand(0);
5133 Value *Coordinate = Call->getArgOperand(1);
5134 Value *Texel = Call->getArgOperand(2);
5135
5136 uint32_t ImageID = VMap[Image];
David Neto22f144c2017-06-12 14:26:21 -04005137 uint32_t CoordinateID = VMap[Coordinate];
David Neto22f144c2017-06-12 14:26:21 -04005138 uint32_t TexelID = VMap[Texel];
alan-bakerf67468c2019-11-25 15:51:49 -05005139
5140 const bool is_int_image = IsIntImageType(Image->getType());
5141 if (is_int_image) {
5142 // Generate a bitcast to v4int and use it as the texel value.
5143 uint32_t castID = nextID++;
5144 Ops << MkId(v4int32ID) << MkId(TexelID);
5145 auto cast = new SPIRVInstruction(spv::OpBitcast, castID, Ops);
5146 SPIRVInstList.push_back(cast);
5147 Ops.clear();
5148 TexelID = castID;
5149 }
David Neto257c3892018-04-11 13:19:45 -04005150 Ops << MkId(ImageID) << MkId(CoordinateID) << MkId(TexelID);
David Neto22f144c2017-06-12 14:26:21 -04005151
David Neto87846742018-04-11 17:36:22 -04005152 auto *Inst = new SPIRVInstruction(spv::OpImageWrite, Ops);
David Neto22f144c2017-06-12 14:26:21 -04005153 SPIRVInstList.push_back(Inst);
5154 break;
5155 }
5156
alan-bakerce179f12019-12-06 19:02:22 -05005157 // get_image_* is mapped to OpImageQuerySize or OpImageQuerySizeLod
SJW173c7e92020-03-16 08:44:47 -05005158 if (IsImageQuery(Callee)) {
David Neto5c22a252018-03-15 16:07:41 -04005159 //
alan-bakerce179f12019-12-06 19:02:22 -05005160 // Generate OpImageQuerySize[Lod]
David Neto5c22a252018-03-15 16:07:41 -04005161 //
5162 // Ops[0] = Image ID
5163 //
alan-bakerce179f12019-12-06 19:02:22 -05005164 // Result type has components equal to the dimensionality of the image,
5165 // plus 1 if the image is arrayed.
5166 //
alan-bakerf906d2b2019-12-10 11:26:23 -05005167 // %sizes = OpImageQuerySize[Lod] %uint[2|3|4] %im [%uint_0]
David Neto5c22a252018-03-15 16:07:41 -04005168 SPIRVOperandList Ops;
5169
5170 // Implement:
alan-bakerce179f12019-12-06 19:02:22 -05005171 // %sizes = OpImageQuerySize[Lod] %uint[2|3|4] %im [%uint_0]
5172 uint32_t SizesTypeID = 0;
5173
David Neto5c22a252018-03-15 16:07:41 -04005174 Value *Image = Call->getArgOperand(0);
alan-bakerce179f12019-12-06 19:02:22 -05005175 const uint32_t dim = ImageDimensionality(Image->getType());
alan-baker7150a1d2020-02-25 08:31:06 -05005176 const uint32_t components =
5177 dim + (IsArrayImageType(Image->getType()) ? 1 : 0);
alan-bakerce179f12019-12-06 19:02:22 -05005178 if (components == 1) {
alan-bakerce179f12019-12-06 19:02:22 -05005179 SizesTypeID = TypeMap[Type::getInt32Ty(Context)];
5180 } else {
alan-baker7150a1d2020-02-25 08:31:06 -05005181 SizesTypeID =
5182 TypeMap[VectorType::get(Type::getInt32Ty(Context), components)];
alan-bakerce179f12019-12-06 19:02:22 -05005183 }
David Neto5c22a252018-03-15 16:07:41 -04005184 uint32_t ImageID = VMap[Image];
David Neto257c3892018-04-11 13:19:45 -04005185 Ops << MkId(SizesTypeID) << MkId(ImageID);
alan-bakerce179f12019-12-06 19:02:22 -05005186 spv::Op query_opcode = spv::OpImageQuerySize;
SJW173c7e92020-03-16 08:44:47 -05005187 if (IsSampledImageType(Image->getType())) {
alan-bakerce179f12019-12-06 19:02:22 -05005188 query_opcode = spv::OpImageQuerySizeLod;
5189 // Need explicit 0 for Lod operand.
5190 Constant *CstInt0 = ConstantInt::get(Context, APInt(32, 0));
5191 Ops << MkId(VMap[CstInt0]);
5192 }
David Neto5c22a252018-03-15 16:07:41 -04005193
5194 uint32_t SizesID = nextID++;
alan-bakerce179f12019-12-06 19:02:22 -05005195 auto *QueryInst = new SPIRVInstruction(query_opcode, SizesID, Ops);
David Neto5c22a252018-03-15 16:07:41 -04005196 SPIRVInstList.push_back(QueryInst);
5197
alan-bakerce179f12019-12-06 19:02:22 -05005198 // May require an extra instruction to create the appropriate result of
5199 // the builtin function.
SJW173c7e92020-03-16 08:44:47 -05005200 if (IsGetImageDim(Callee)) {
alan-bakerce179f12019-12-06 19:02:22 -05005201 if (dim == 3) {
5202 // get_image_dim returns an int4 for 3D images.
5203 //
5204 // Reset value map entry since we generated an intermediate
5205 // instruction.
5206 VMap[&I] = nextID;
David Neto5c22a252018-03-15 16:07:41 -04005207
alan-bakerce179f12019-12-06 19:02:22 -05005208 // Implement:
5209 // %result = OpCompositeConstruct %uint4 %sizes %uint_0
5210 Ops.clear();
5211 Ops << MkId(lookupType(VectorType::get(Type::getInt32Ty(Context), 4)))
5212 << MkId(SizesID);
David Neto5c22a252018-03-15 16:07:41 -04005213
alan-bakerce179f12019-12-06 19:02:22 -05005214 Constant *CstInt0 = ConstantInt::get(Context, APInt(32, 0));
5215 Ops << MkId(VMap[CstInt0]);
David Neto5c22a252018-03-15 16:07:41 -04005216
alan-bakerce179f12019-12-06 19:02:22 -05005217 auto *Inst =
5218 new SPIRVInstruction(spv::OpCompositeConstruct, nextID++, Ops);
5219 SPIRVInstList.push_back(Inst);
5220 } else if (dim != components) {
5221 // get_image_dim return an int2 regardless of the arrayedness of the
5222 // image. If the image is arrayed an element must be dropped from the
5223 // query result.
5224 //
5225 // Reset value map entry since we generated an intermediate
5226 // instruction.
5227 VMap[&I] = nextID;
5228
5229 // Implement:
5230 // %result = OpVectorShuffle %uint2 %sizes %sizes 0 1
5231 Ops.clear();
5232 Ops << MkId(lookupType(VectorType::get(Type::getInt32Ty(Context), 2)))
5233 << MkId(SizesID) << MkId(SizesID) << MkNum(0) << MkNum(1);
5234
5235 auto *Inst =
5236 new SPIRVInstruction(spv::OpVectorShuffle, nextID++, Ops);
5237 SPIRVInstList.push_back(Inst);
5238 }
5239 } else if (components > 1) {
5240 // Reset value map entry since we generated an intermediate instruction.
5241 VMap[&I] = nextID;
5242
5243 // Implement:
5244 // %result = OpCompositeExtract %uint %sizes <component number>
5245 Ops.clear();
5246 Ops << MkId(TypeMap[I.getType()]) << MkId(SizesID);
5247
5248 uint32_t component = 0;
5249 if (IsGetImageHeight(Callee))
5250 component = 1;
5251 else if (IsGetImageDepth(Callee))
5252 component = 2;
5253 Ops << MkNum(component);
5254
5255 auto *Inst =
5256 new SPIRVInstruction(spv::OpCompositeExtract, nextID++, Ops);
5257 SPIRVInstList.push_back(Inst);
5258 }
David Neto5c22a252018-03-15 16:07:41 -04005259 break;
5260 }
5261
David Neto22f144c2017-06-12 14:26:21 -04005262 // Call instrucion is deferred because it needs function's ID. Record
5263 // slot's location on SPIRVInstructionList.
5264 DeferredInsts.push_back(
5265 std::make_tuple(&I, --SPIRVInstList.end(), nextID++));
5266
David Neto3fbb4072017-10-16 11:28:14 -04005267 // Check whether the implementation of this call uses an extended
5268 // instruction plus one more value-producing instruction. If so, then
5269 // reserve the id for the extra value-producing slot.
5270 glsl::ExtInst EInst = getIndirectExtInstEnum(Callee->getName());
5271 if (EInst != kGlslExtInstBad) {
5272 // Reserve a spot for the extra value.
David Neto4d02a532017-09-17 12:57:44 -04005273 // Increase nextID.
David Neto22f144c2017-06-12 14:26:21 -04005274 VMap[&I] = nextID;
5275 nextID++;
5276 }
5277 break;
5278 }
5279 case Instruction::Ret: {
5280 unsigned NumOps = I.getNumOperands();
5281 if (NumOps == 0) {
5282 //
5283 // Generate OpReturn.
5284 //
David Netoef5ba2b2019-12-20 08:35:54 -05005285 SPIRVInstList.push_back(new SPIRVInstruction(spv::OpReturn));
David Neto22f144c2017-06-12 14:26:21 -04005286 } else {
5287 //
5288 // Generate OpReturnValue.
5289 //
5290
5291 // Ops[0] = Return Value ID
5292 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04005293
5294 Ops << MkId(VMap[I.getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04005295
David Neto87846742018-04-11 17:36:22 -04005296 auto *Inst = new SPIRVInstruction(spv::OpReturnValue, Ops);
David Neto22f144c2017-06-12 14:26:21 -04005297 SPIRVInstList.push_back(Inst);
5298 break;
5299 }
5300 break;
5301 }
5302 }
5303}
5304
5305void SPIRVProducerPass::GenerateFuncEpilogue() {
SJW69939d52020-04-16 07:29:07 -05005306 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList(kFunctions);
David Neto22f144c2017-06-12 14:26:21 -04005307
5308 //
5309 // Generate OpFunctionEnd
5310 //
5311
David Netoef5ba2b2019-12-20 08:35:54 -05005312 auto *Inst = new SPIRVInstruction(spv::OpFunctionEnd);
David Neto22f144c2017-06-12 14:26:21 -04005313 SPIRVInstList.push_back(Inst);
5314}
5315
5316bool SPIRVProducerPass::is4xi8vec(Type *Ty) const {
alan-bakerb39c8262019-03-08 14:03:37 -05005317 // Don't specialize <4 x i8> if i8 is generally supported.
5318 if (clspv::Option::Int8Support())
5319 return false;
5320
David Neto22f144c2017-06-12 14:26:21 -04005321 LLVMContext &Context = Ty->getContext();
James Pricecf53df42020-04-20 14:41:24 -04005322 if (auto VecTy = dyn_cast<VectorType>(Ty)) {
5323 if (VecTy->getElementType() == Type::getInt8Ty(Context) &&
5324 VecTy->getNumElements() == 4) {
David Neto22f144c2017-06-12 14:26:21 -04005325 return true;
5326 }
5327 }
5328
5329 return false;
5330}
5331
5332void SPIRVProducerPass::HandleDeferredInstruction() {
SJW69939d52020-04-16 07:29:07 -05005333 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList(kFunctions);
David Neto22f144c2017-06-12 14:26:21 -04005334 ValueMapType &VMap = getValueMap();
5335 DeferredInstVecType &DeferredInsts = getDeferredInstVec();
5336
5337 for (auto DeferredInst = DeferredInsts.rbegin();
5338 DeferredInst != DeferredInsts.rend(); ++DeferredInst) {
5339 Value *Inst = std::get<0>(*DeferredInst);
5340 SPIRVInstructionList::iterator InsertPoint = ++std::get<1>(*DeferredInst);
5341 if (InsertPoint != SPIRVInstList.end()) {
5342 while ((*InsertPoint)->getOpcode() == spv::OpPhi) {
5343 ++InsertPoint;
5344 }
5345 }
5346
5347 if (BranchInst *Br = dyn_cast<BranchInst>(Inst)) {
alan-baker06cad652019-12-03 17:56:47 -05005348 // Check whether this branch needs to be preceeded by merge instruction.
David Neto22f144c2017-06-12 14:26:21 -04005349 BasicBlock *BrBB = Br->getParent();
alan-baker06cad652019-12-03 17:56:47 -05005350 if (ContinueBlocks.count(BrBB)) {
David Neto22f144c2017-06-12 14:26:21 -04005351 //
5352 // Generate OpLoopMerge.
5353 //
5354 // Ops[0] = Merge Block ID
5355 // Ops[1] = Continue Target ID
5356 // Ops[2] = Selection Control
5357 SPIRVOperandList Ops;
5358
alan-baker06cad652019-12-03 17:56:47 -05005359 auto MergeBB = MergeBlocks[BrBB];
5360 auto ContinueBB = ContinueBlocks[BrBB];
David Neto22f144c2017-06-12 14:26:21 -04005361 uint32_t MergeBBID = VMap[MergeBB];
David Neto22f144c2017-06-12 14:26:21 -04005362 uint32_t ContinueBBID = VMap[ContinueBB];
David Neto257c3892018-04-11 13:19:45 -04005363 Ops << MkId(MergeBBID) << MkId(ContinueBBID)
alan-baker06cad652019-12-03 17:56:47 -05005364 << MkNum(spv::LoopControlMaskNone);
David Neto22f144c2017-06-12 14:26:21 -04005365
David Neto87846742018-04-11 17:36:22 -04005366 auto *MergeInst = new SPIRVInstruction(spv::OpLoopMerge, Ops);
David Neto22f144c2017-06-12 14:26:21 -04005367 SPIRVInstList.insert(InsertPoint, MergeInst);
alan-baker06cad652019-12-03 17:56:47 -05005368 } else if (MergeBlocks.count(BrBB)) {
5369 //
5370 // Generate OpSelectionMerge.
5371 //
5372 // Ops[0] = Merge Block ID
5373 // Ops[1] = Selection Control
5374 SPIRVOperandList Ops;
David Neto22f144c2017-06-12 14:26:21 -04005375
alan-baker06cad652019-12-03 17:56:47 -05005376 auto MergeBB = MergeBlocks[BrBB];
5377 uint32_t MergeBBID = VMap[MergeBB];
5378 Ops << MkId(MergeBBID) << MkNum(spv::SelectionControlMaskNone);
David Neto22f144c2017-06-12 14:26:21 -04005379
alan-baker06cad652019-12-03 17:56:47 -05005380 auto *MergeInst = new SPIRVInstruction(spv::OpSelectionMerge, Ops);
5381 SPIRVInstList.insert(InsertPoint, MergeInst);
David Neto22f144c2017-06-12 14:26:21 -04005382 }
5383
5384 if (Br->isConditional()) {
5385 //
5386 // Generate OpBranchConditional.
5387 //
5388 // Ops[0] = Condition ID
5389 // Ops[1] = True Label ID
5390 // Ops[2] = False Label ID
5391 // Ops[3] ... Ops[n] = Branch weights (Literal Number)
5392 SPIRVOperandList Ops;
5393
5394 uint32_t CondID = VMap[Br->getCondition()];
David Neto22f144c2017-06-12 14:26:21 -04005395 uint32_t TrueBBID = VMap[Br->getSuccessor(0)];
David Neto22f144c2017-06-12 14:26:21 -04005396 uint32_t FalseBBID = VMap[Br->getSuccessor(1)];
David Neto257c3892018-04-11 13:19:45 -04005397
5398 Ops << MkId(CondID) << MkId(TrueBBID) << MkId(FalseBBID);
David Neto22f144c2017-06-12 14:26:21 -04005399
David Neto87846742018-04-11 17:36:22 -04005400 auto *BrInst = new SPIRVInstruction(spv::OpBranchConditional, Ops);
David Neto22f144c2017-06-12 14:26:21 -04005401 SPIRVInstList.insert(InsertPoint, BrInst);
5402 } else {
5403 //
5404 // Generate OpBranch.
5405 //
5406 // Ops[0] = Target Label ID
5407 SPIRVOperandList Ops;
5408
5409 uint32_t TargetID = VMap[Br->getSuccessor(0)];
David Neto257c3892018-04-11 13:19:45 -04005410 Ops << MkId(TargetID);
David Neto22f144c2017-06-12 14:26:21 -04005411
David Neto87846742018-04-11 17:36:22 -04005412 SPIRVInstList.insert(InsertPoint,
5413 new SPIRVInstruction(spv::OpBranch, Ops));
David Neto22f144c2017-06-12 14:26:21 -04005414 }
5415 } else if (PHINode *PHI = dyn_cast<PHINode>(Inst)) {
alan-baker5ed87542020-03-23 11:05:22 -04005416 if (PHI->getType()->isPointerTy() && !IsSamplerType(PHI->getType()) &&
5417 !IsImageType(PHI->getType())) {
alan-baker5b86ed72019-02-15 08:26:50 -05005418 // OpPhi on pointers requires variable pointers.
5419 setVariablePointersCapabilities(
5420 PHI->getType()->getPointerAddressSpace());
5421 if (!hasVariablePointers() && !selectFromSameObject(PHI)) {
5422 setVariablePointers(true);
5423 }
5424 }
5425
David Neto22f144c2017-06-12 14:26:21 -04005426 //
5427 // Generate OpPhi.
5428 //
5429 // Ops[0] = Result Type ID
5430 // Ops[1] ... Ops[n] = (Variable ID, Parent ID) pairs
5431 SPIRVOperandList Ops;
5432
David Neto257c3892018-04-11 13:19:45 -04005433 Ops << MkId(lookupType(PHI->getType()));
David Neto22f144c2017-06-12 14:26:21 -04005434
David Neto22f144c2017-06-12 14:26:21 -04005435 for (unsigned i = 0; i < PHI->getNumIncomingValues(); i++) {
5436 uint32_t VarID = VMap[PHI->getIncomingValue(i)];
David Neto22f144c2017-06-12 14:26:21 -04005437 uint32_t ParentID = VMap[PHI->getIncomingBlock(i)];
David Neto257c3892018-04-11 13:19:45 -04005438 Ops << MkId(VarID) << MkId(ParentID);
David Neto22f144c2017-06-12 14:26:21 -04005439 }
5440
5441 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005442 InsertPoint,
5443 new SPIRVInstruction(spv::OpPhi, std::get<2>(*DeferredInst), Ops));
David Neto22f144c2017-06-12 14:26:21 -04005444 } else if (CallInst *Call = dyn_cast<CallInst>(Inst)) {
5445 Function *Callee = Call->getCalledFunction();
SJW2c317da2020-03-23 07:39:13 -05005446 LLVMContext &Context = Callee->getContext();
5447 auto IntTy = Type::getInt32Ty(Context);
5448 auto callee_code = Builtins::Lookup(Callee);
David Neto3fbb4072017-10-16 11:28:14 -04005449 auto callee_name = Callee->getName();
5450 glsl::ExtInst EInst = getDirectOrIndirectExtInstEnum(callee_name);
David Neto22f144c2017-06-12 14:26:21 -04005451
5452 if (EInst) {
5453 uint32_t &ExtInstImportID = getOpExtInstImportID();
5454
5455 //
5456 // Generate OpExtInst.
5457 //
5458
5459 // Ops[0] = Result Type ID
5460 // Ops[1] = Set ID (OpExtInstImport ID)
5461 // Ops[2] = Instruction Number (Literal Number)
5462 // Ops[3] ... Ops[n] = Operand 1, ... , Operand n
5463 SPIRVOperandList Ops;
5464
David Neto862b7d82018-06-14 18:48:37 -04005465 Ops << MkId(lookupType(Call->getType())) << MkId(ExtInstImportID)
5466 << MkNum(EInst);
David Neto22f144c2017-06-12 14:26:21 -04005467
David Neto22f144c2017-06-12 14:26:21 -04005468 FunctionType *CalleeFTy = cast<FunctionType>(Call->getFunctionType());
5469 for (unsigned i = 0; i < CalleeFTy->getNumParams(); i++) {
David Neto257c3892018-04-11 13:19:45 -04005470 Ops << MkId(VMap[Call->getOperand(i)]);
David Neto22f144c2017-06-12 14:26:21 -04005471 }
5472
David Neto87846742018-04-11 17:36:22 -04005473 auto *ExtInst = new SPIRVInstruction(spv::OpExtInst,
5474 std::get<2>(*DeferredInst), Ops);
David Neto22f144c2017-06-12 14:26:21 -04005475 SPIRVInstList.insert(InsertPoint, ExtInst);
5476
David Neto3fbb4072017-10-16 11:28:14 -04005477 const auto IndirectExtInst = getIndirectExtInstEnum(callee_name);
5478 if (IndirectExtInst != kGlslExtInstBad) {
5479 // Generate one more instruction that uses the result of the extended
5480 // instruction. Its result id is one more than the id of the
5481 // extended instruction.
David Neto3fbb4072017-10-16 11:28:14 -04005482 auto generate_extra_inst = [this, &Context, &Call, &DeferredInst,
5483 &VMap, &SPIRVInstList, &InsertPoint](
5484 spv::Op opcode, Constant *constant) {
5485 //
5486 // Generate instruction like:
5487 // result = opcode constant <extinst-result>
5488 //
5489 // Ops[0] = Result Type ID
5490 // Ops[1] = Operand 0 ;; the constant, suitably splatted
5491 // Ops[2] = Operand 1 ;; the result of the extended instruction
5492 SPIRVOperandList Ops;
David Neto22f144c2017-06-12 14:26:21 -04005493
David Neto3fbb4072017-10-16 11:28:14 -04005494 Type *resultTy = Call->getType();
David Neto257c3892018-04-11 13:19:45 -04005495 Ops << MkId(lookupType(resultTy));
David Neto3fbb4072017-10-16 11:28:14 -04005496
5497 if (auto *vectorTy = dyn_cast<VectorType>(resultTy)) {
5498 constant = ConstantVector::getSplat(
alan-baker7261e062020-03-15 14:35:48 -04005499 {static_cast<unsigned>(vectorTy->getNumElements()), false},
5500 constant);
David Neto3fbb4072017-10-16 11:28:14 -04005501 }
David Neto257c3892018-04-11 13:19:45 -04005502 Ops << MkId(VMap[constant]) << MkId(std::get<2>(*DeferredInst));
David Neto3fbb4072017-10-16 11:28:14 -04005503
5504 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005505 InsertPoint, new SPIRVInstruction(
5506 opcode, std::get<2>(*DeferredInst) + 1, Ops));
David Neto3fbb4072017-10-16 11:28:14 -04005507 };
5508
5509 switch (IndirectExtInst) {
5510 case glsl::ExtInstFindUMsb: // Implementing clz
SJW2c317da2020-03-23 07:39:13 -05005511 generate_extra_inst(spv::OpISub, ConstantInt::get(IntTy, 31));
David Neto3fbb4072017-10-16 11:28:14 -04005512 break;
5513 case glsl::ExtInstAcos: // Implementing acospi
5514 case glsl::ExtInstAsin: // Implementing asinpi
Kévin Petiteb9f90a2018-09-29 12:29:34 +01005515 case glsl::ExtInstAtan: // Implementing atanpi
David Neto3fbb4072017-10-16 11:28:14 -04005516 case glsl::ExtInstAtan2: // Implementing atan2pi
5517 generate_extra_inst(
5518 spv::OpFMul,
5519 ConstantFP::get(Type::getFloatTy(Context), kOneOverPi));
5520 break;
5521
5522 default:
5523 assert(false && "internally inconsistent");
David Neto4d02a532017-09-17 12:57:44 -04005524 }
David Neto22f144c2017-06-12 14:26:21 -04005525 }
David Neto3fbb4072017-10-16 11:28:14 -04005526
SJW2c317da2020-03-23 07:39:13 -05005527 } else if (callee_code == Builtins::kPopcount) {
David Neto22f144c2017-06-12 14:26:21 -04005528 //
5529 // Generate OpBitCount
5530 //
5531 // Ops[0] = Result Type ID
5532 // Ops[1] = Base ID
David Neto257c3892018-04-11 13:19:45 -04005533 SPIRVOperandList Ops;
5534 Ops << MkId(lookupType(Call->getType()))
5535 << MkId(VMap[Call->getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04005536
5537 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005538 InsertPoint, new SPIRVInstruction(spv::OpBitCount,
David Neto22f144c2017-06-12 14:26:21 -04005539 std::get<2>(*DeferredInst), Ops));
David Netoab03f432017-11-03 17:00:44 -04005540
David Neto862b7d82018-06-14 18:48:37 -04005541 } else if (callee_name.startswith(kCompositeConstructFunctionPrefix)) {
David Netoab03f432017-11-03 17:00:44 -04005542
5543 // Generate an OpCompositeConstruct
5544 SPIRVOperandList Ops;
5545
5546 // The result type.
David Neto257c3892018-04-11 13:19:45 -04005547 Ops << MkId(lookupType(Call->getType()));
David Netoab03f432017-11-03 17:00:44 -04005548
5549 for (Use &use : Call->arg_operands()) {
David Neto257c3892018-04-11 13:19:45 -04005550 Ops << MkId(VMap[use.get()]);
David Netoab03f432017-11-03 17:00:44 -04005551 }
5552
5553 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005554 InsertPoint, new SPIRVInstruction(spv::OpCompositeConstruct,
5555 std::get<2>(*DeferredInst), Ops));
David Netoab03f432017-11-03 17:00:44 -04005556
Alan Baker202c8c72018-08-13 13:47:44 -04005557 } else if (callee_name.startswith(clspv::ResourceAccessorFunction())) {
5558
5559 // We have already mapped the call's result value to an ID.
5560 // Don't generate any code now.
5561
5562 } else if (callee_name.startswith(clspv::WorkgroupAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04005563
5564 // We have already mapped the call's result value to an ID.
5565 // Don't generate any code now.
5566
David Neto22f144c2017-06-12 14:26:21 -04005567 } else {
alan-baker5b86ed72019-02-15 08:26:50 -05005568 if (Call->getType()->isPointerTy()) {
5569 // Functions returning pointers require variable pointers.
5570 setVariablePointersCapabilities(
5571 Call->getType()->getPointerAddressSpace());
5572 }
5573
David Neto22f144c2017-06-12 14:26:21 -04005574 //
5575 // Generate OpFunctionCall.
5576 //
5577
5578 // Ops[0] = Result Type ID
5579 // Ops[1] = Callee Function ID
5580 // Ops[2] ... Ops[n] = Argument 0, ... , Argument n
5581 SPIRVOperandList Ops;
5582
David Neto862b7d82018-06-14 18:48:37 -04005583 Ops << MkId(lookupType(Call->getType()));
David Neto22f144c2017-06-12 14:26:21 -04005584
5585 uint32_t CalleeID = VMap[Callee];
David Neto43568eb2017-10-13 18:25:25 -04005586 if (CalleeID == 0) {
5587 errs() << "Can't translate function call. Missing builtin? "
David Neto862b7d82018-06-14 18:48:37 -04005588 << callee_name << " in: " << *Call << "\n";
David Neto43568eb2017-10-13 18:25:25 -04005589 // TODO(dneto): Can we error out? Enabling this llvm_unreachable
5590 // causes an infinite loop. Instead, go ahead and generate
5591 // the bad function call. A validator will catch the 0-Id.
5592 // llvm_unreachable("Can't translate function call");
5593 }
David Neto22f144c2017-06-12 14:26:21 -04005594
David Neto257c3892018-04-11 13:19:45 -04005595 Ops << MkId(CalleeID);
David Neto22f144c2017-06-12 14:26:21 -04005596
David Neto22f144c2017-06-12 14:26:21 -04005597 FunctionType *CalleeFTy = cast<FunctionType>(Call->getFunctionType());
5598 for (unsigned i = 0; i < CalleeFTy->getNumParams(); i++) {
alan-baker5b86ed72019-02-15 08:26:50 -05005599 auto *operand = Call->getOperand(i);
alan-bakerd4d50652019-12-03 17:17:15 -05005600 auto *operand_type = operand->getType();
5601 // Images and samplers can be passed as function parameters without
5602 // variable pointers.
5603 if (operand_type->isPointerTy() && !IsImageType(operand_type) &&
5604 !IsSamplerType(operand_type)) {
alan-baker5b86ed72019-02-15 08:26:50 -05005605 auto sc =
5606 GetStorageClass(operand->getType()->getPointerAddressSpace());
5607 if (sc == spv::StorageClassStorageBuffer) {
5608 // Passing SSBO by reference requires variable pointers storage
5609 // buffer.
5610 setVariablePointersStorageBuffer(true);
5611 } else if (sc == spv::StorageClassWorkgroup) {
5612 // Workgroup references require variable pointers if they are not
5613 // memory object declarations.
5614 if (auto *operand_call = dyn_cast<CallInst>(operand)) {
5615 // Workgroup accessor represents a variable reference.
5616 if (!operand_call->getCalledFunction()->getName().startswith(
5617 clspv::WorkgroupAccessorFunction()))
5618 setVariablePointers(true);
5619 } else {
5620 // Arguments are function parameters.
5621 if (!isa<Argument>(operand))
5622 setVariablePointers(true);
5623 }
5624 }
5625 }
5626 Ops << MkId(VMap[operand]);
David Neto22f144c2017-06-12 14:26:21 -04005627 }
5628
David Neto87846742018-04-11 17:36:22 -04005629 auto *CallInst = new SPIRVInstruction(spv::OpFunctionCall,
5630 std::get<2>(*DeferredInst), Ops);
David Neto22f144c2017-06-12 14:26:21 -04005631 SPIRVInstList.insert(InsertPoint, CallInst);
5632 }
5633 }
5634 }
5635}
5636
alan-bakera1be3322020-04-20 12:48:18 -04005637void SPIRVProducerPass::HandleDeferredDecorations(Module &module) {
5638 const auto &DL = module.getDataLayout();
Alan Baker202c8c72018-08-13 13:47:44 -04005639 if (getTypesNeedingArrayStride().empty() && LocalArgSpecIds.empty()) {
David Neto1a1a0582017-07-07 12:01:44 -04005640 return;
David Netoc6f3ab22018-04-06 18:02:31 -04005641 }
David Neto1a1a0582017-07-07 12:01:44 -04005642
SJW69939d52020-04-16 07:29:07 -05005643 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList(kAnnotations);
David Neto1a1a0582017-07-07 12:01:44 -04005644
David Netoc6f3ab22018-04-06 18:02:31 -04005645 // Insert ArrayStride decorations on pointer types, due to OpPtrAccessChain
5646 // instructions we generated earlier.
David Neto85082642018-03-24 06:55:20 -07005647 for (auto *type : getTypesNeedingArrayStride()) {
5648 Type *elemTy = nullptr;
5649 if (auto *ptrTy = dyn_cast<PointerType>(type)) {
5650 elemTy = ptrTy->getElementType();
alan-bakerb6b09dc2018-11-08 16:59:28 -05005651 } else if (auto *arrayTy = dyn_cast<ArrayType>(type)) {
alan-baker8eb435a2020-04-08 00:42:06 -04005652 elemTy = arrayTy->getElementType();
5653 } else if (auto *vecTy = dyn_cast<VectorType>(type)) {
5654 elemTy = vecTy->getElementType();
David Neto85082642018-03-24 06:55:20 -07005655 } else {
5656 errs() << "Unhandled strided type " << *type << "\n";
5657 llvm_unreachable("Unhandled strided type");
5658 }
David Neto1a1a0582017-07-07 12:01:44 -04005659
5660 // Ops[0] = Target ID
5661 // Ops[1] = Decoration (ArrayStride)
5662 // Ops[2] = Stride number (Literal Number)
5663 SPIRVOperandList Ops;
5664
David Neto85082642018-03-24 06:55:20 -07005665 // Same as DL.getIndexedOffsetInType( elemTy, { 1 } );
Alan Bakerfcda9482018-10-02 17:09:59 -04005666 const uint32_t stride = static_cast<uint32_t>(GetTypeAllocSize(elemTy, DL));
David Neto257c3892018-04-11 13:19:45 -04005667
5668 Ops << MkId(lookupType(type)) << MkNum(spv::DecorationArrayStride)
5669 << MkNum(stride);
David Neto1a1a0582017-07-07 12:01:44 -04005670
David Neto87846742018-04-11 17:36:22 -04005671 auto *DecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
SJW69939d52020-04-16 07:29:07 -05005672 SPIRVInstList.push_back(DecoInst);
David Neto1a1a0582017-07-07 12:01:44 -04005673 }
David Netoc6f3ab22018-04-06 18:02:31 -04005674
5675 // Emit SpecId decorations targeting the array size value.
alan-bakera1be3322020-04-20 12:48:18 -04005676 for (auto pair : clspv::GetSpecConstants(&module)) {
5677 auto kind = pair.first;
5678 auto spec_id = pair.second;
5679
5680 if (kind != SpecConstant::kLocalMemorySize)
5681 continue;
5682
alan-bakerb6b09dc2018-11-08 16:59:28 -05005683 LocalArgInfo &arg_info = LocalSpecIdInfoMap[spec_id];
David Netoc6f3ab22018-04-06 18:02:31 -04005684 SPIRVOperandList Ops;
5685 Ops << MkId(arg_info.array_size_id) << MkNum(spv::DecorationSpecId)
5686 << MkNum(arg_info.spec_id);
SJW69939d52020-04-16 07:29:07 -05005687 SPIRVInstList.push_back(new SPIRVInstruction(spv::OpDecorate, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04005688 }
David Neto1a1a0582017-07-07 12:01:44 -04005689}
5690
David Neto22f144c2017-06-12 14:26:21 -04005691glsl::ExtInst SPIRVProducerPass::getExtInstEnum(StringRef Name) {
SJW2c317da2020-03-23 07:39:13 -05005692
5693 const auto &fi = Builtins::Lookup(Name);
5694 switch (fi) {
5695 case Builtins::kClamp: {
5696 auto param_type = fi.getParameter(0);
5697 if (param_type.type_id == Type::FloatTyID) {
5698 return glsl::ExtInst::ExtInstFClamp;
5699 }
5700 return param_type.is_signed ? glsl::ExtInst::ExtInstSClamp
5701 : glsl::ExtInst::ExtInstUClamp;
5702 }
5703 case Builtins::kMax: {
5704 auto param_type = fi.getParameter(0);
5705 if (param_type.type_id == Type::FloatTyID) {
5706 return glsl::ExtInst::ExtInstFMax;
5707 }
5708 return param_type.is_signed ? glsl::ExtInst::ExtInstSMax
5709 : glsl::ExtInst::ExtInstUMax;
5710 }
5711 case Builtins::kMin: {
5712 auto param_type = fi.getParameter(0);
5713 if (param_type.type_id == Type::FloatTyID) {
5714 return glsl::ExtInst::ExtInstFMin;
5715 }
5716 return param_type.is_signed ? glsl::ExtInst::ExtInstSMin
5717 : glsl::ExtInst::ExtInstUMin;
5718 }
5719 case Builtins::kAbs:
5720 return glsl::ExtInst::ExtInstSAbs;
5721 case Builtins::kFmax:
5722 return glsl::ExtInst::ExtInstFMax;
5723 case Builtins::kFmin:
5724 return glsl::ExtInst::ExtInstFMin;
5725 case Builtins::kDegrees:
5726 return glsl::ExtInst::ExtInstDegrees;
5727 case Builtins::kRadians:
5728 return glsl::ExtInst::ExtInstRadians;
5729 case Builtins::kMix:
5730 return glsl::ExtInst::ExtInstFMix;
5731 case Builtins::kAcos:
5732 case Builtins::kAcospi:
5733 return glsl::ExtInst::ExtInstAcos;
5734 case Builtins::kAcosh:
5735 return glsl::ExtInst::ExtInstAcosh;
5736 case Builtins::kAsin:
5737 case Builtins::kAsinpi:
5738 return glsl::ExtInst::ExtInstAsin;
5739 case Builtins::kAsinh:
5740 return glsl::ExtInst::ExtInstAsinh;
5741 case Builtins::kAtan:
5742 case Builtins::kAtanpi:
5743 return glsl::ExtInst::ExtInstAtan;
5744 case Builtins::kAtanh:
5745 return glsl::ExtInst::ExtInstAtanh;
5746 case Builtins::kAtan2:
5747 case Builtins::kAtan2pi:
5748 return glsl::ExtInst::ExtInstAtan2;
5749 case Builtins::kCeil:
5750 return glsl::ExtInst::ExtInstCeil;
5751 case Builtins::kSin:
5752 case Builtins::kHalfSin:
5753 case Builtins::kNativeSin:
5754 return glsl::ExtInst::ExtInstSin;
5755 case Builtins::kSinh:
5756 return glsl::ExtInst::ExtInstSinh;
5757 case Builtins::kCos:
5758 case Builtins::kHalfCos:
5759 case Builtins::kNativeCos:
5760 return glsl::ExtInst::ExtInstCos;
5761 case Builtins::kCosh:
5762 return glsl::ExtInst::ExtInstCosh;
5763 case Builtins::kTan:
5764 case Builtins::kHalfTan:
5765 case Builtins::kNativeTan:
5766 return glsl::ExtInst::ExtInstTan;
5767 case Builtins::kTanh:
5768 return glsl::ExtInst::ExtInstTanh;
5769 case Builtins::kExp:
5770 case Builtins::kHalfExp:
5771 case Builtins::kNativeExp:
5772 return glsl::ExtInst::ExtInstExp;
5773 case Builtins::kExp2:
5774 case Builtins::kHalfExp2:
5775 case Builtins::kNativeExp2:
5776 return glsl::ExtInst::ExtInstExp2;
5777 case Builtins::kLog:
5778 case Builtins::kHalfLog:
5779 case Builtins::kNativeLog:
5780 return glsl::ExtInst::ExtInstLog;
5781 case Builtins::kLog2:
5782 case Builtins::kHalfLog2:
5783 case Builtins::kNativeLog2:
5784 return glsl::ExtInst::ExtInstLog2;
5785 case Builtins::kFabs:
5786 return glsl::ExtInst::ExtInstFAbs;
5787 case Builtins::kFma:
5788 return glsl::ExtInst::ExtInstFma;
5789 case Builtins::kFloor:
5790 return glsl::ExtInst::ExtInstFloor;
5791 case Builtins::kLdexp:
5792 return glsl::ExtInst::ExtInstLdexp;
5793 case Builtins::kPow:
5794 case Builtins::kPowr:
5795 case Builtins::kHalfPowr:
5796 case Builtins::kNativePowr:
5797 return glsl::ExtInst::ExtInstPow;
5798 case Builtins::kRound:
5799 return glsl::ExtInst::ExtInstRound;
5800 case Builtins::kSqrt:
5801 case Builtins::kHalfSqrt:
5802 case Builtins::kNativeSqrt:
5803 return glsl::ExtInst::ExtInstSqrt;
5804 case Builtins::kRsqrt:
5805 case Builtins::kHalfRsqrt:
5806 case Builtins::kNativeRsqrt:
5807 return glsl::ExtInst::ExtInstInverseSqrt;
5808 case Builtins::kTrunc:
5809 return glsl::ExtInst::ExtInstTrunc;
5810 case Builtins::kFrexp:
5811 return glsl::ExtInst::ExtInstFrexp;
5812 case Builtins::kFract:
5813 return glsl::ExtInst::ExtInstFract;
5814 case Builtins::kSign:
5815 return glsl::ExtInst::ExtInstFSign;
5816 case Builtins::kLength:
5817 case Builtins::kFastLength:
5818 return glsl::ExtInst::ExtInstLength;
5819 case Builtins::kDistance:
5820 case Builtins::kFastDistance:
5821 return glsl::ExtInst::ExtInstDistance;
5822 case Builtins::kStep:
5823 return glsl::ExtInst::ExtInstStep;
5824 case Builtins::kSmoothstep:
5825 return glsl::ExtInst::ExtInstSmoothStep;
5826 case Builtins::kCross:
5827 return glsl::ExtInst::ExtInstCross;
5828 case Builtins::kNormalize:
5829 case Builtins::kFastNormalize:
5830 return glsl::ExtInst::ExtInstNormalize;
5831 default:
5832 break;
5833 }
5834
David Neto22f144c2017-06-12 14:26:21 -04005835 return StringSwitch<glsl::ExtInst>(Name)
David Neto22f144c2017-06-12 14:26:21 -04005836 .StartsWith("llvm.fmuladd.", glsl::ExtInst::ExtInstFma)
5837 .Case("spirv.unpack.v2f16", glsl::ExtInst::ExtInstUnpackHalf2x16)
5838 .Case("spirv.pack.v2f16", glsl::ExtInst::ExtInstPackHalf2x16)
David Neto3fbb4072017-10-16 11:28:14 -04005839 .Default(kGlslExtInstBad);
5840}
5841
5842glsl::ExtInst SPIRVProducerPass::getIndirectExtInstEnum(StringRef Name) {
SJW2c317da2020-03-23 07:39:13 -05005843 switch (Builtins::Lookup(Name)) {
5844 case Builtins::kClz:
5845 return glsl::ExtInst::ExtInstFindUMsb;
5846 case Builtins::kAcospi:
5847 return glsl::ExtInst::ExtInstAcos;
5848 case Builtins::kAsinpi:
5849 return glsl::ExtInst::ExtInstAsin;
5850 case Builtins::kAtanpi:
5851 return glsl::ExtInst::ExtInstAtan;
5852 case Builtins::kAtan2pi:
5853 return glsl::ExtInst::ExtInstAtan2;
5854 default:
5855 break;
5856 }
5857 return kGlslExtInstBad;
David Neto3fbb4072017-10-16 11:28:14 -04005858}
5859
alan-bakerb6b09dc2018-11-08 16:59:28 -05005860glsl::ExtInst
5861SPIRVProducerPass::getDirectOrIndirectExtInstEnum(StringRef Name) {
David Neto3fbb4072017-10-16 11:28:14 -04005862 auto direct = getExtInstEnum(Name);
5863 if (direct != kGlslExtInstBad)
5864 return direct;
5865 return getIndirectExtInstEnum(Name);
David Neto22f144c2017-06-12 14:26:21 -04005866}
5867
David Neto22f144c2017-06-12 14:26:21 -04005868void SPIRVProducerPass::WriteOneWord(uint32_t Word) {
David Neto0676e6f2017-07-11 18:47:44 -04005869 binaryOut->write(reinterpret_cast<const char *>(&Word), sizeof(uint32_t));
David Neto22f144c2017-06-12 14:26:21 -04005870}
5871
5872void SPIRVProducerPass::WriteResultID(SPIRVInstruction *Inst) {
5873 WriteOneWord(Inst->getResultID());
5874}
5875
5876void SPIRVProducerPass::WriteWordCountAndOpcode(SPIRVInstruction *Inst) {
5877 // High 16 bit : Word Count
5878 // Low 16 bit : Opcode
5879 uint32_t Word = Inst->getOpcode();
David Netoee2660d2018-06-28 16:31:29 -04005880 const uint32_t count = Inst->getWordCount();
5881 if (count > 65535) {
5882 errs() << "Word count limit of 65535 exceeded: " << count << "\n";
5883 llvm_unreachable("Word count too high");
5884 }
David Neto22f144c2017-06-12 14:26:21 -04005885 Word |= Inst->getWordCount() << 16;
5886 WriteOneWord(Word);
5887}
5888
David Netoef5ba2b2019-12-20 08:35:54 -05005889void SPIRVProducerPass::WriteOperand(const std::unique_ptr<SPIRVOperand> &Op) {
David Neto22f144c2017-06-12 14:26:21 -04005890 SPIRVOperandType OpTy = Op->getType();
5891 switch (OpTy) {
5892 default: {
5893 llvm_unreachable("Unsupported SPIRV Operand Type???");
5894 break;
5895 }
5896 case SPIRVOperandType::NUMBERID: {
5897 WriteOneWord(Op->getNumID());
5898 break;
5899 }
5900 case SPIRVOperandType::LITERAL_STRING: {
5901 std::string Str = Op->getLiteralStr();
5902 const char *Data = Str.c_str();
5903 size_t WordSize = Str.size() / 4;
5904 for (unsigned Idx = 0; Idx < WordSize; Idx++) {
5905 WriteOneWord(*reinterpret_cast<const uint32_t *>(&Data[4 * Idx]));
5906 }
5907
5908 uint32_t Remainder = Str.size() % 4;
5909 uint32_t LastWord = 0;
5910 if (Remainder) {
5911 for (unsigned Idx = 0; Idx < Remainder; Idx++) {
5912 LastWord |= Data[4 * WordSize + Idx] << 8 * Idx;
5913 }
5914 }
5915
5916 WriteOneWord(LastWord);
5917 break;
5918 }
5919 case SPIRVOperandType::LITERAL_INTEGER:
5920 case SPIRVOperandType::LITERAL_FLOAT: {
5921 auto LiteralNum = Op->getLiteralNum();
5922 // TODO: Handle LiteranNum carefully.
5923 for (auto Word : LiteralNum) {
5924 WriteOneWord(Word);
5925 }
5926 break;
5927 }
5928 }
5929}
5930
5931void SPIRVProducerPass::WriteSPIRVBinary() {
SJW69939d52020-04-16 07:29:07 -05005932 for (int i = 0; i < kSectionCount; ++i) {
5933 WriteSPIRVBinary(SPIRVSections[i]);
5934 }
5935}
5936
5937void SPIRVProducerPass::WriteSPIRVBinary(SPIRVInstructionList &SPIRVInstList) {
David Neto22f144c2017-06-12 14:26:21 -04005938
5939 for (auto Inst : SPIRVInstList) {
David Netoef5ba2b2019-12-20 08:35:54 -05005940 const auto &Ops = Inst->getOperands();
David Neto22f144c2017-06-12 14:26:21 -04005941 spv::Op Opcode = static_cast<spv::Op>(Inst->getOpcode());
5942
5943 switch (Opcode) {
5944 default: {
David Neto5c22a252018-03-15 16:07:41 -04005945 errs() << "Unsupported SPIR-V instruction opcode " << int(Opcode) << "\n";
David Neto22f144c2017-06-12 14:26:21 -04005946 llvm_unreachable("Unsupported SPIRV instruction");
5947 break;
5948 }
5949 case spv::OpCapability:
5950 case spv::OpExtension:
5951 case spv::OpMemoryModel:
5952 case spv::OpEntryPoint:
5953 case spv::OpExecutionMode:
5954 case spv::OpSource:
5955 case spv::OpDecorate:
5956 case spv::OpMemberDecorate:
5957 case spv::OpBranch:
5958 case spv::OpBranchConditional:
5959 case spv::OpSelectionMerge:
5960 case spv::OpLoopMerge:
5961 case spv::OpStore:
5962 case spv::OpImageWrite:
5963 case spv::OpReturnValue:
5964 case spv::OpControlBarrier:
5965 case spv::OpMemoryBarrier:
5966 case spv::OpReturn:
5967 case spv::OpFunctionEnd:
5968 case spv::OpCopyMemory: {
5969 WriteWordCountAndOpcode(Inst);
5970 for (uint32_t i = 0; i < Ops.size(); i++) {
5971 WriteOperand(Ops[i]);
5972 }
5973 break;
5974 }
5975 case spv::OpTypeBool:
5976 case spv::OpTypeVoid:
5977 case spv::OpTypeSampler:
5978 case spv::OpLabel:
5979 case spv::OpExtInstImport:
5980 case spv::OpTypePointer:
5981 case spv::OpTypeRuntimeArray:
5982 case spv::OpTypeStruct:
5983 case spv::OpTypeImage:
5984 case spv::OpTypeSampledImage:
5985 case spv::OpTypeInt:
5986 case spv::OpTypeFloat:
5987 case spv::OpTypeArray:
5988 case spv::OpTypeVector:
5989 case spv::OpTypeFunction: {
5990 WriteWordCountAndOpcode(Inst);
5991 WriteResultID(Inst);
5992 for (uint32_t i = 0; i < Ops.size(); i++) {
5993 WriteOperand(Ops[i]);
5994 }
5995 break;
5996 }
5997 case spv::OpFunction:
5998 case spv::OpFunctionParameter:
5999 case spv::OpAccessChain:
6000 case spv::OpPtrAccessChain:
6001 case spv::OpInBoundsAccessChain:
6002 case spv::OpUConvert:
6003 case spv::OpSConvert:
6004 case spv::OpConvertFToU:
6005 case spv::OpConvertFToS:
6006 case spv::OpConvertUToF:
6007 case spv::OpConvertSToF:
6008 case spv::OpFConvert:
6009 case spv::OpConvertPtrToU:
6010 case spv::OpConvertUToPtr:
6011 case spv::OpBitcast:
alan-bakerc9c55ae2019-12-02 16:01:27 -05006012 case spv::OpFNegate:
David Neto22f144c2017-06-12 14:26:21 -04006013 case spv::OpIAdd:
6014 case spv::OpFAdd:
6015 case spv::OpISub:
6016 case spv::OpFSub:
6017 case spv::OpIMul:
6018 case spv::OpFMul:
6019 case spv::OpUDiv:
6020 case spv::OpSDiv:
6021 case spv::OpFDiv:
6022 case spv::OpUMod:
6023 case spv::OpSRem:
6024 case spv::OpFRem:
Kévin Petit8a560882019-03-21 15:24:34 +00006025 case spv::OpUMulExtended:
6026 case spv::OpSMulExtended:
David Neto22f144c2017-06-12 14:26:21 -04006027 case spv::OpBitwiseOr:
6028 case spv::OpBitwiseXor:
6029 case spv::OpBitwiseAnd:
David Netoa394f392017-08-26 20:45:29 -04006030 case spv::OpNot:
David Neto22f144c2017-06-12 14:26:21 -04006031 case spv::OpShiftLeftLogical:
6032 case spv::OpShiftRightLogical:
6033 case spv::OpShiftRightArithmetic:
6034 case spv::OpBitCount:
David Netoab03f432017-11-03 17:00:44 -04006035 case spv::OpCompositeConstruct:
David Neto22f144c2017-06-12 14:26:21 -04006036 case spv::OpCompositeExtract:
6037 case spv::OpVectorExtractDynamic:
6038 case spv::OpCompositeInsert:
David Neto0a2f98d2017-09-15 19:38:40 -04006039 case spv::OpCopyObject:
David Neto22f144c2017-06-12 14:26:21 -04006040 case spv::OpVectorInsertDynamic:
6041 case spv::OpVectorShuffle:
6042 case spv::OpIEqual:
6043 case spv::OpINotEqual:
6044 case spv::OpUGreaterThan:
6045 case spv::OpUGreaterThanEqual:
6046 case spv::OpULessThan:
6047 case spv::OpULessThanEqual:
6048 case spv::OpSGreaterThan:
6049 case spv::OpSGreaterThanEqual:
6050 case spv::OpSLessThan:
6051 case spv::OpSLessThanEqual:
6052 case spv::OpFOrdEqual:
6053 case spv::OpFOrdGreaterThan:
6054 case spv::OpFOrdGreaterThanEqual:
6055 case spv::OpFOrdLessThan:
6056 case spv::OpFOrdLessThanEqual:
6057 case spv::OpFOrdNotEqual:
6058 case spv::OpFUnordEqual:
6059 case spv::OpFUnordGreaterThan:
6060 case spv::OpFUnordGreaterThanEqual:
6061 case spv::OpFUnordLessThan:
6062 case spv::OpFUnordLessThanEqual:
6063 case spv::OpFUnordNotEqual:
6064 case spv::OpExtInst:
6065 case spv::OpIsInf:
6066 case spv::OpIsNan:
6067 case spv::OpAny:
6068 case spv::OpAll:
6069 case spv::OpUndef:
6070 case spv::OpConstantNull:
6071 case spv::OpLogicalOr:
6072 case spv::OpLogicalAnd:
6073 case spv::OpLogicalNot:
6074 case spv::OpLogicalNotEqual:
6075 case spv::OpConstantComposite:
6076 case spv::OpSpecConstantComposite:
6077 case spv::OpConstantTrue:
6078 case spv::OpConstantFalse:
6079 case spv::OpConstant:
6080 case spv::OpSpecConstant:
6081 case spv::OpVariable:
6082 case spv::OpFunctionCall:
6083 case spv::OpSampledImage:
alan-baker75090e42020-02-20 11:21:04 -05006084 case spv::OpImageFetch:
David Neto22f144c2017-06-12 14:26:21 -04006085 case spv::OpImageSampleExplicitLod:
David Neto5c22a252018-03-15 16:07:41 -04006086 case spv::OpImageQuerySize:
alan-bakerce179f12019-12-06 19:02:22 -05006087 case spv::OpImageQuerySizeLod:
David Neto22f144c2017-06-12 14:26:21 -04006088 case spv::OpSelect:
6089 case spv::OpPhi:
6090 case spv::OpLoad:
6091 case spv::OpAtomicIAdd:
6092 case spv::OpAtomicISub:
6093 case spv::OpAtomicExchange:
6094 case spv::OpAtomicIIncrement:
6095 case spv::OpAtomicIDecrement:
6096 case spv::OpAtomicCompareExchange:
6097 case spv::OpAtomicUMin:
6098 case spv::OpAtomicSMin:
6099 case spv::OpAtomicUMax:
6100 case spv::OpAtomicSMax:
6101 case spv::OpAtomicAnd:
6102 case spv::OpAtomicOr:
6103 case spv::OpAtomicXor:
6104 case spv::OpDot: {
6105 WriteWordCountAndOpcode(Inst);
6106 WriteOperand(Ops[0]);
6107 WriteResultID(Inst);
6108 for (uint32_t i = 1; i < Ops.size(); i++) {
6109 WriteOperand(Ops[i]);
6110 }
6111 break;
6112 }
6113 }
6114 }
6115}
Alan Baker9bf93fb2018-08-28 16:59:26 -04006116
alan-bakerb6b09dc2018-11-08 16:59:28 -05006117bool SPIRVProducerPass::IsTypeNullable(const Type *type) const {
Alan Baker9bf93fb2018-08-28 16:59:26 -04006118 switch (type->getTypeID()) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05006119 case Type::HalfTyID:
6120 case Type::FloatTyID:
6121 case Type::DoubleTyID:
6122 case Type::IntegerTyID:
6123 case Type::VectorTyID:
6124 return true;
6125 case Type::PointerTyID: {
6126 const PointerType *pointer_type = cast<PointerType>(type);
6127 if (pointer_type->getPointerAddressSpace() !=
6128 AddressSpace::UniformConstant) {
6129 auto pointee_type = pointer_type->getPointerElementType();
6130 if (pointee_type->isStructTy() &&
6131 cast<StructType>(pointee_type)->isOpaque()) {
6132 // Images and samplers are not nullable.
6133 return false;
Alan Baker9bf93fb2018-08-28 16:59:26 -04006134 }
Alan Baker9bf93fb2018-08-28 16:59:26 -04006135 }
alan-bakerb6b09dc2018-11-08 16:59:28 -05006136 return true;
6137 }
6138 case Type::ArrayTyID:
alan-baker8eb435a2020-04-08 00:42:06 -04006139 return IsTypeNullable(type->getArrayElementType());
alan-bakerb6b09dc2018-11-08 16:59:28 -05006140 case Type::StructTyID: {
6141 const StructType *struct_type = cast<StructType>(type);
6142 // Images and samplers are not nullable.
6143 if (struct_type->isOpaque())
Alan Baker9bf93fb2018-08-28 16:59:26 -04006144 return false;
alan-bakerb6b09dc2018-11-08 16:59:28 -05006145 for (const auto element : struct_type->elements()) {
6146 if (!IsTypeNullable(element))
6147 return false;
6148 }
6149 return true;
6150 }
6151 default:
6152 return false;
Alan Baker9bf93fb2018-08-28 16:59:26 -04006153 }
6154}
Alan Bakerfcda9482018-10-02 17:09:59 -04006155
6156void SPIRVProducerPass::PopulateUBOTypeMaps(Module &module) {
6157 if (auto *offsets_md =
6158 module.getNamedMetadata(clspv::RemappedTypeOffsetMetadataName())) {
6159 // Metdata is stored as key-value pair operands. The first element of each
6160 // operand is the type and the second is a vector of offsets.
6161 for (const auto *operand : offsets_md->operands()) {
6162 const auto *pair = cast<MDTuple>(operand);
6163 auto *type =
6164 cast<ConstantAsMetadata>(pair->getOperand(0))->getValue()->getType();
6165 const auto *offset_vector = cast<MDTuple>(pair->getOperand(1));
6166 std::vector<uint32_t> offsets;
6167 for (const Metadata *offset_md : offset_vector->operands()) {
6168 const auto *constant_md = cast<ConstantAsMetadata>(offset_md);
alan-bakerb6b09dc2018-11-08 16:59:28 -05006169 offsets.push_back(static_cast<uint32_t>(
6170 cast<ConstantInt>(constant_md->getValue())->getZExtValue()));
Alan Bakerfcda9482018-10-02 17:09:59 -04006171 }
6172 RemappedUBOTypeOffsets.insert(std::make_pair(type, offsets));
6173 }
6174 }
6175
6176 if (auto *sizes_md =
6177 module.getNamedMetadata(clspv::RemappedTypeSizesMetadataName())) {
6178 // Metadata is stored as key-value pair operands. The first element of each
6179 // operand is the type and the second is a triple of sizes: type size in
6180 // bits, store size and alloc size.
6181 for (const auto *operand : sizes_md->operands()) {
6182 const auto *pair = cast<MDTuple>(operand);
6183 auto *type =
6184 cast<ConstantAsMetadata>(pair->getOperand(0))->getValue()->getType();
6185 const auto *size_triple = cast<MDTuple>(pair->getOperand(1));
6186 uint64_t type_size_in_bits =
6187 cast<ConstantInt>(
6188 cast<ConstantAsMetadata>(size_triple->getOperand(0))->getValue())
6189 ->getZExtValue();
6190 uint64_t type_store_size =
6191 cast<ConstantInt>(
6192 cast<ConstantAsMetadata>(size_triple->getOperand(1))->getValue())
6193 ->getZExtValue();
6194 uint64_t type_alloc_size =
6195 cast<ConstantInt>(
6196 cast<ConstantAsMetadata>(size_triple->getOperand(2))->getValue())
6197 ->getZExtValue();
6198 RemappedUBOTypeSizes.insert(std::make_pair(
6199 type, std::make_tuple(type_size_in_bits, type_store_size,
6200 type_alloc_size)));
6201 }
6202 }
6203}
6204
6205uint64_t SPIRVProducerPass::GetTypeSizeInBits(Type *type,
6206 const DataLayout &DL) {
6207 auto iter = RemappedUBOTypeSizes.find(type);
6208 if (iter != RemappedUBOTypeSizes.end()) {
6209 return std::get<0>(iter->second);
6210 }
6211
6212 return DL.getTypeSizeInBits(type);
6213}
6214
6215uint64_t SPIRVProducerPass::GetTypeStoreSize(Type *type, const DataLayout &DL) {
6216 auto iter = RemappedUBOTypeSizes.find(type);
6217 if (iter != RemappedUBOTypeSizes.end()) {
6218 return std::get<1>(iter->second);
6219 }
6220
6221 return DL.getTypeStoreSize(type);
6222}
6223
6224uint64_t SPIRVProducerPass::GetTypeAllocSize(Type *type, const DataLayout &DL) {
6225 auto iter = RemappedUBOTypeSizes.find(type);
6226 if (iter != RemappedUBOTypeSizes.end()) {
6227 return std::get<2>(iter->second);
6228 }
6229
6230 return DL.getTypeAllocSize(type);
6231}
alan-baker5b86ed72019-02-15 08:26:50 -05006232
Kévin Petitbbbda972020-03-03 19:16:31 +00006233uint32_t SPIRVProducerPass::GetExplicitLayoutStructMemberOffset(
6234 StructType *type, unsigned member, const DataLayout &DL) {
6235 const auto StructLayout = DL.getStructLayout(type);
6236 // Search for the correct offsets if this type was remapped.
6237 std::vector<uint32_t> *offsets = nullptr;
6238 auto iter = RemappedUBOTypeOffsets.find(type);
6239 if (iter != RemappedUBOTypeOffsets.end()) {
6240 offsets = &iter->second;
6241 }
6242 auto ByteOffset =
6243 static_cast<uint32_t>(StructLayout->getElementOffset(member));
6244 if (offsets) {
6245 ByteOffset = (*offsets)[member];
6246 }
6247
6248 return ByteOffset;
6249}
6250
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04006251void SPIRVProducerPass::setVariablePointersCapabilities(
6252 unsigned address_space) {
alan-baker5b86ed72019-02-15 08:26:50 -05006253 if (GetStorageClass(address_space) == spv::StorageClassStorageBuffer) {
6254 setVariablePointersStorageBuffer(true);
6255 } else {
6256 setVariablePointers(true);
6257 }
6258}
6259
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04006260Value *SPIRVProducerPass::GetBasePointer(Value *v) {
alan-baker5b86ed72019-02-15 08:26:50 -05006261 if (auto *gep = dyn_cast<GetElementPtrInst>(v)) {
6262 return GetBasePointer(gep->getPointerOperand());
6263 }
6264
6265 // Conservatively return |v|.
6266 return v;
6267}
6268
6269bool SPIRVProducerPass::sameResource(Value *lhs, Value *rhs) const {
6270 if (auto *lhs_call = dyn_cast<CallInst>(lhs)) {
6271 if (auto *rhs_call = dyn_cast<CallInst>(rhs)) {
6272 if (lhs_call->getCalledFunction()->getName().startswith(
6273 clspv::ResourceAccessorFunction()) &&
6274 rhs_call->getCalledFunction()->getName().startswith(
6275 clspv::ResourceAccessorFunction())) {
6276 // For resource accessors, match descriptor set and binding.
6277 if (lhs_call->getOperand(0) == rhs_call->getOperand(0) &&
6278 lhs_call->getOperand(1) == rhs_call->getOperand(1))
6279 return true;
6280 } else if (lhs_call->getCalledFunction()->getName().startswith(
6281 clspv::WorkgroupAccessorFunction()) &&
6282 rhs_call->getCalledFunction()->getName().startswith(
6283 clspv::WorkgroupAccessorFunction())) {
6284 // For workgroup resources, match spec id.
6285 if (lhs_call->getOperand(0) == rhs_call->getOperand(0))
6286 return true;
6287 }
6288 }
6289 }
6290
6291 return false;
6292}
6293
6294bool SPIRVProducerPass::selectFromSameObject(Instruction *inst) {
6295 assert(inst->getType()->isPointerTy());
6296 assert(GetStorageClass(inst->getType()->getPointerAddressSpace()) ==
6297 spv::StorageClassStorageBuffer);
6298 const bool hack_undef = clspv::Option::HackUndef();
6299 if (auto *select = dyn_cast<SelectInst>(inst)) {
6300 auto *true_base = GetBasePointer(select->getTrueValue());
6301 auto *false_base = GetBasePointer(select->getFalseValue());
6302
6303 if (true_base == false_base)
6304 return true;
6305
6306 // If either the true or false operand is a null, then we satisfy the same
6307 // object constraint.
6308 if (auto *true_cst = dyn_cast<Constant>(true_base)) {
6309 if (true_cst->isNullValue() || (hack_undef && isa<UndefValue>(true_base)))
6310 return true;
6311 }
6312
6313 if (auto *false_cst = dyn_cast<Constant>(false_base)) {
6314 if (false_cst->isNullValue() ||
6315 (hack_undef && isa<UndefValue>(false_base)))
6316 return true;
6317 }
6318
6319 if (sameResource(true_base, false_base))
6320 return true;
6321 } else if (auto *phi = dyn_cast<PHINode>(inst)) {
6322 Value *value = nullptr;
6323 bool ok = true;
6324 for (unsigned i = 0; ok && i != phi->getNumIncomingValues(); ++i) {
6325 auto *base = GetBasePointer(phi->getIncomingValue(i));
6326 // Null values satisfy the constraint of selecting of selecting from the
6327 // same object.
6328 if (!value) {
6329 if (auto *cst = dyn_cast<Constant>(base)) {
6330 if (!cst->isNullValue() && !(hack_undef && isa<UndefValue>(base)))
6331 value = base;
6332 } else {
6333 value = base;
6334 }
6335 } else if (base != value) {
6336 if (auto *base_cst = dyn_cast<Constant>(base)) {
6337 if (base_cst->isNullValue() || (hack_undef && isa<UndefValue>(base)))
6338 continue;
6339 }
6340
6341 if (sameResource(value, base))
6342 continue;
6343
6344 // Values don't represent the same base.
6345 ok = false;
6346 }
6347 }
6348
6349 return ok;
6350 }
6351
6352 // Conservatively return false.
6353 return false;
6354}
alan-bakere9308012019-03-15 10:25:13 -04006355
6356bool SPIRVProducerPass::CalledWithCoherentResource(Argument &Arg) {
6357 if (!Arg.getType()->isPointerTy() ||
6358 Arg.getType()->getPointerAddressSpace() != clspv::AddressSpace::Global) {
6359 // Only SSBOs need to be annotated as coherent.
6360 return false;
6361 }
6362
6363 DenseSet<Value *> visited;
6364 std::vector<Value *> stack;
6365 for (auto *U : Arg.getParent()->users()) {
6366 if (auto *call = dyn_cast<CallInst>(U)) {
6367 stack.push_back(call->getOperand(Arg.getArgNo()));
6368 }
6369 }
6370
6371 while (!stack.empty()) {
6372 Value *v = stack.back();
6373 stack.pop_back();
6374
6375 if (!visited.insert(v).second)
6376 continue;
6377
6378 auto *resource_call = dyn_cast<CallInst>(v);
6379 if (resource_call &&
6380 resource_call->getCalledFunction()->getName().startswith(
6381 clspv::ResourceAccessorFunction())) {
6382 // If this is a resource accessor function, check if the coherent operand
6383 // is set.
6384 const auto coherent =
6385 unsigned(dyn_cast<ConstantInt>(resource_call->getArgOperand(5))
6386 ->getZExtValue());
6387 if (coherent == 1)
6388 return true;
6389 } else if (auto *arg = dyn_cast<Argument>(v)) {
6390 // If this is a function argument, trace through its callers.
alan-bakere98f3f92019-04-08 15:06:36 -04006391 for (auto U : arg->getParent()->users()) {
alan-bakere9308012019-03-15 10:25:13 -04006392 if (auto *call = dyn_cast<CallInst>(U)) {
6393 stack.push_back(call->getOperand(arg->getArgNo()));
6394 }
6395 }
6396 } else if (auto *user = dyn_cast<User>(v)) {
6397 // If this is a user, traverse all operands that could lead to resource
6398 // variables.
6399 for (unsigned i = 0; i != user->getNumOperands(); ++i) {
6400 Value *operand = user->getOperand(i);
6401 if (operand->getType()->isPointerTy() &&
6402 operand->getType()->getPointerAddressSpace() ==
6403 clspv::AddressSpace::Global) {
6404 stack.push_back(operand);
6405 }
6406 }
6407 }
6408 }
6409
6410 // No coherent resource variables encountered.
6411 return false;
6412}
alan-baker06cad652019-12-03 17:56:47 -05006413
6414void SPIRVProducerPass::PopulateStructuredCFGMaps(Module &module) {
6415 // First, track loop merges and continues.
6416 DenseSet<BasicBlock *> LoopMergesAndContinues;
6417 for (auto &F : module) {
6418 if (F.isDeclaration())
6419 continue;
6420
6421 DominatorTree &DT = getAnalysis<DominatorTreeWrapperPass>(F).getDomTree();
6422 const LoopInfo &LI = getAnalysis<LoopInfoWrapperPass>(F).getLoopInfo();
6423 std::deque<BasicBlock *> order;
6424 DenseSet<BasicBlock *> visited;
6425 clspv::ComputeStructuredOrder(&*F.begin(), &DT, LI, &order, &visited);
6426
6427 for (auto BB : order) {
6428 auto terminator = BB->getTerminator();
6429 auto branch = dyn_cast<BranchInst>(terminator);
6430 if (LI.isLoopHeader(BB)) {
6431 auto L = LI.getLoopFor(BB);
6432 BasicBlock *ContinueBB = nullptr;
6433 BasicBlock *MergeBB = nullptr;
6434
6435 MergeBB = L->getExitBlock();
6436 if (!MergeBB) {
6437 // StructurizeCFG pass converts CFG into triangle shape and the cfg
6438 // has regions with single entry/exit. As a result, loop should not
6439 // have multiple exits.
6440 llvm_unreachable("Loop has multiple exits???");
6441 }
6442
6443 if (L->isLoopLatch(BB)) {
6444 ContinueBB = BB;
6445 } else {
6446 // From SPIR-V spec 2.11, Continue Target must dominate that back-edge
6447 // block.
6448 BasicBlock *Header = L->getHeader();
6449 BasicBlock *Latch = L->getLoopLatch();
6450 for (auto *loop_block : L->blocks()) {
6451 if (loop_block == Header) {
6452 continue;
6453 }
6454
6455 // Check whether block dominates block with back-edge.
6456 // The loop latch is the single block with a back-edge. If it was
6457 // possible, StructurizeCFG made the loop conform to this
6458 // requirement, otherwise |Latch| is a nullptr.
6459 if (DT.dominates(loop_block, Latch)) {
6460 ContinueBB = loop_block;
6461 }
6462 }
6463
6464 if (!ContinueBB) {
6465 llvm_unreachable("Wrong continue block from loop");
6466 }
6467 }
6468
6469 // Record the continue and merge blocks.
6470 MergeBlocks[BB] = MergeBB;
6471 ContinueBlocks[BB] = ContinueBB;
6472 LoopMergesAndContinues.insert(MergeBB);
6473 LoopMergesAndContinues.insert(ContinueBB);
6474 } else if (branch && branch->isConditional()) {
6475 auto L = LI.getLoopFor(BB);
6476 bool HasBackedge = false;
6477 while (L && !HasBackedge) {
6478 if (L->isLoopLatch(BB)) {
6479 HasBackedge = true;
6480 }
6481 L = L->getParentLoop();
6482 }
6483
6484 if (!HasBackedge) {
6485 // Only need a merge if the branch doesn't include a loop break or
6486 // continue.
6487 auto true_bb = branch->getSuccessor(0);
6488 auto false_bb = branch->getSuccessor(1);
6489 if (!LoopMergesAndContinues.count(true_bb) &&
6490 !LoopMergesAndContinues.count(false_bb)) {
6491 // StructurizeCFG pass already manipulated CFG. Just use false block
6492 // of branch instruction as merge block.
6493 MergeBlocks[BB] = false_bb;
6494 }
6495 }
6496 }
6497 }
6498 }
6499}