blob: 1802a2d972a9da5b73e1f288c9272f9a9dc1f96f [file] [log] [blame]
David Neto22f144c2017-06-12 14:26:21 -04001// Copyright 2017 The Clspv Authors. All rights reserved.
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7// http://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
15#ifdef _MSC_VER
16#pragma warning(push, 0)
17#endif
18
David Neto156783e2017-07-05 15:39:41 -040019#include <cassert>
David Neto257c3892018-04-11 13:19:45 -040020#include <cstring>
David Neto118188e2018-08-24 11:27:54 -040021#include <iomanip>
22#include <list>
David Neto862b7d82018-06-14 18:48:37 -040023#include <memory>
David Neto118188e2018-08-24 11:27:54 -040024#include <set>
25#include <sstream>
26#include <string>
27#include <tuple>
28#include <unordered_set>
29#include <utility>
David Neto862b7d82018-06-14 18:48:37 -040030
David Neto118188e2018-08-24 11:27:54 -040031#include "llvm/ADT/StringSwitch.h"
32#include "llvm/ADT/UniqueVector.h"
33#include "llvm/Analysis/LoopInfo.h"
34#include "llvm/IR/Constants.h"
35#include "llvm/IR/Dominators.h"
36#include "llvm/IR/Instructions.h"
37#include "llvm/IR/Metadata.h"
38#include "llvm/IR/Module.h"
alan-bakerf67468c2019-11-25 15:51:49 -050039#include "llvm/IR/ValueSymbolTable.h"
David Neto118188e2018-08-24 11:27:54 -040040#include "llvm/Pass.h"
41#include "llvm/Support/CommandLine.h"
Kévin Petitbbbda972020-03-03 19:16:31 +000042#include "llvm/Support/MathExtras.h"
David Neto118188e2018-08-24 11:27:54 -040043#include "llvm/Support/raw_ostream.h"
44#include "llvm/Transforms/Utils/Cloning.h"
David Neto22f144c2017-06-12 14:26:21 -040045
alan-bakere0902602020-03-23 08:43:40 -040046#include "spirv/unified1/spirv.hpp"
David Neto118188e2018-08-24 11:27:54 -040047
David Neto85082642018-03-24 06:55:20 -070048#include "clspv/AddressSpace.h"
alan-bakerf5e5f692018-11-27 08:33:24 -050049#include "clspv/DescriptorMap.h"
David Neto118188e2018-08-24 11:27:54 -040050#include "clspv/Option.h"
David Neto85082642018-03-24 06:55:20 -070051#include "clspv/spirv_c_strings.hpp"
52#include "clspv/spirv_glsl.hpp"
David Neto22f144c2017-06-12 14:26:21 -040053
David Neto4feb7a42017-10-06 17:29:42 -040054#include "ArgKind.h"
alan-bakerf67468c2019-11-25 15:51:49 -050055#include "Builtins.h"
alan-baker06cad652019-12-03 17:56:47 -050056#include "ComputeStructuredOrder.h"
David Neto85082642018-03-24 06:55:20 -070057#include "ConstantEmitter.h"
Alan Baker202c8c72018-08-13 13:47:44 -040058#include "Constants.h"
David Neto78383442018-06-15 20:31:56 -040059#include "DescriptorCounter.h"
alan-bakerc4579bb2020-04-29 14:15:50 -040060#include "Layout.h"
alan-baker56f7aff2019-05-22 08:06:42 -040061#include "NormalizeGlobalVariable.h"
Diego Novilloa4c44fa2019-04-11 10:56:15 -040062#include "Passes.h"
alan-bakera1be3322020-04-20 12:48:18 -040063#include "SpecConstant.h"
alan-bakerce179f12019-12-06 19:02:22 -050064#include "Types.h"
David Neto48f56a42017-10-06 16:44:25 -040065
David Neto22f144c2017-06-12 14:26:21 -040066#if defined(_MSC_VER)
67#pragma warning(pop)
68#endif
69
70using namespace llvm;
71using namespace clspv;
SJW173c7e92020-03-16 08:44:47 -050072using namespace clspv::Builtins;
David Neto156783e2017-07-05 15:39:41 -040073using namespace mdconst;
David Neto22f144c2017-06-12 14:26:21 -040074
75namespace {
David Netocd8ca5f2017-10-02 23:34:11 -040076
David Neto862b7d82018-06-14 18:48:37 -040077cl::opt<bool> ShowResourceVars("show-rv", cl::init(false), cl::Hidden,
78 cl::desc("Show resource variable creation"));
79
alan-baker5ed87542020-03-23 11:05:22 -040080cl::opt<bool>
81 ShowProducerIR("show-producer-ir", cl::init(false), cl::ReallyHidden,
82 cl::desc("Dump the IR at the start of SPIRVProducer"));
83
David Neto862b7d82018-06-14 18:48:37 -040084// These hacks exist to help transition code generation algorithms
85// without making huge noise in detailed test output.
86const bool Hack_generate_runtime_array_stride_early = true;
87
David Neto3fbb4072017-10-16 11:28:14 -040088// The value of 1/pi. This value is from MSDN
89// https://msdn.microsoft.com/en-us/library/4hwaceh6.aspx
90const double kOneOverPi = 0.318309886183790671538;
91const glsl::ExtInst kGlslExtInstBad = static_cast<glsl::ExtInst>(0);
92
alan-bakerb6b09dc2018-11-08 16:59:28 -050093const char *kCompositeConstructFunctionPrefix = "clspv.composite_construct.";
David Netoab03f432017-11-03 17:00:44 -040094
SJW69939d52020-04-16 07:29:07 -050095// SPIRV Module Sections (per 2.4 of the SPIRV spec)
96// These are used to collect SPIRVInstructions by type on-the-fly.
97enum SPIRVSection {
98 kCapabilities,
99 kExtensions,
100 kImports,
101 kMemoryModel,
102 kEntryPoints,
103 kExecutionModes,
104
105 kDebug,
106 kAnnotations,
107
108 kTypes,
109 kConstants = kTypes,
110 kGlobalVariables,
111
112 kFunctions,
113
114 kSectionCount
115};
116
David Neto22f144c2017-06-12 14:26:21 -0400117enum SPIRVOperandType {
118 NUMBERID,
119 LITERAL_INTEGER,
120 LITERAL_STRING,
121 LITERAL_FLOAT
122};
123
124struct SPIRVOperand {
125 explicit SPIRVOperand(SPIRVOperandType Ty, uint32_t Num)
126 : Type(Ty), LiteralNum(1, Num) {}
127 explicit SPIRVOperand(SPIRVOperandType Ty, const char *Str)
128 : Type(Ty), LiteralStr(Str) {}
129 explicit SPIRVOperand(SPIRVOperandType Ty, StringRef Str)
130 : Type(Ty), LiteralStr(Str) {}
131 explicit SPIRVOperand(SPIRVOperandType Ty, ArrayRef<uint32_t> NumVec)
132 : Type(Ty), LiteralNum(NumVec.begin(), NumVec.end()) {}
133
James Price11010dc2019-12-19 13:53:09 -0500134 SPIRVOperandType getType() const { return Type; };
135 uint32_t getNumID() const { return LiteralNum[0]; };
136 std::string getLiteralStr() const { return LiteralStr; };
137 ArrayRef<uint32_t> getLiteralNum() const { return LiteralNum; };
David Neto22f144c2017-06-12 14:26:21 -0400138
David Neto87846742018-04-11 17:36:22 -0400139 uint32_t GetNumWords() const {
140 switch (Type) {
141 case NUMBERID:
142 return 1;
143 case LITERAL_INTEGER:
144 case LITERAL_FLOAT:
David Netoee2660d2018-06-28 16:31:29 -0400145 return uint32_t(LiteralNum.size());
David Neto87846742018-04-11 17:36:22 -0400146 case LITERAL_STRING:
147 // Account for the terminating null character.
David Netoee2660d2018-06-28 16:31:29 -0400148 return uint32_t((LiteralStr.size() + 4) / 4);
David Neto87846742018-04-11 17:36:22 -0400149 }
150 llvm_unreachable("Unhandled case in SPIRVOperand::GetNumWords()");
151 }
152
David Neto22f144c2017-06-12 14:26:21 -0400153private:
154 SPIRVOperandType Type;
155 std::string LiteralStr;
156 SmallVector<uint32_t, 4> LiteralNum;
157};
158
David Netoc6f3ab22018-04-06 18:02:31 -0400159class SPIRVOperandList {
160public:
David Netoef5ba2b2019-12-20 08:35:54 -0500161 typedef std::unique_ptr<SPIRVOperand> element_type;
162 typedef SmallVector<element_type, 8> container_type;
163 typedef container_type::iterator iterator;
David Netoc6f3ab22018-04-06 18:02:31 -0400164 SPIRVOperandList() {}
alan-bakerb6b09dc2018-11-08 16:59:28 -0500165 SPIRVOperandList(const SPIRVOperandList &other) = delete;
166 SPIRVOperandList(SPIRVOperandList &&other) {
David Netoc6f3ab22018-04-06 18:02:31 -0400167 contents_ = std::move(other.contents_);
168 other.contents_.clear();
169 }
David Netoef5ba2b2019-12-20 08:35:54 -0500170 iterator begin() { return contents_.begin(); }
171 iterator end() { return contents_.end(); }
172 operator ArrayRef<element_type>() { return contents_; }
173 void push_back(element_type op) { contents_.push_back(std::move(op)); }
alan-bakerb6b09dc2018-11-08 16:59:28 -0500174 void clear() { contents_.clear(); }
David Netoc6f3ab22018-04-06 18:02:31 -0400175 size_t size() const { return contents_.size(); }
James Price11010dc2019-12-19 13:53:09 -0500176 const SPIRVOperand *operator[](size_t i) { return contents_[i].get(); }
David Netoc6f3ab22018-04-06 18:02:31 -0400177
David Netoef5ba2b2019-12-20 08:35:54 -0500178 const container_type &getOperands() const { return contents_; }
David Neto87846742018-04-11 17:36:22 -0400179
David Netoc6f3ab22018-04-06 18:02:31 -0400180private:
David Netoef5ba2b2019-12-20 08:35:54 -0500181 container_type contents_;
David Netoc6f3ab22018-04-06 18:02:31 -0400182};
183
James Price11010dc2019-12-19 13:53:09 -0500184SPIRVOperandList &operator<<(SPIRVOperandList &list,
David Netoef5ba2b2019-12-20 08:35:54 -0500185 std::unique_ptr<SPIRVOperand> elem) {
186 list.push_back(std::move(elem));
David Netoc6f3ab22018-04-06 18:02:31 -0400187 return list;
188}
189
David Netoef5ba2b2019-12-20 08:35:54 -0500190std::unique_ptr<SPIRVOperand> MkNum(uint32_t num) {
191 return std::make_unique<SPIRVOperand>(LITERAL_INTEGER, num);
David Netoc6f3ab22018-04-06 18:02:31 -0400192}
David Netoef5ba2b2019-12-20 08:35:54 -0500193std::unique_ptr<SPIRVOperand> MkInteger(ArrayRef<uint32_t> num_vec) {
194 return std::make_unique<SPIRVOperand>(LITERAL_INTEGER, num_vec);
David Neto257c3892018-04-11 13:19:45 -0400195}
David Netoef5ba2b2019-12-20 08:35:54 -0500196std::unique_ptr<SPIRVOperand> MkFloat(ArrayRef<uint32_t> num_vec) {
197 return std::make_unique<SPIRVOperand>(LITERAL_FLOAT, num_vec);
David Neto257c3892018-04-11 13:19:45 -0400198}
David Netoef5ba2b2019-12-20 08:35:54 -0500199std::unique_ptr<SPIRVOperand> MkId(uint32_t id) {
200 return std::make_unique<SPIRVOperand>(NUMBERID, id);
James Price11010dc2019-12-19 13:53:09 -0500201}
David Netoef5ba2b2019-12-20 08:35:54 -0500202std::unique_ptr<SPIRVOperand> MkString(StringRef str) {
203 return std::make_unique<SPIRVOperand>(LITERAL_STRING, str);
David Neto257c3892018-04-11 13:19:45 -0400204}
David Netoc6f3ab22018-04-06 18:02:31 -0400205
David Neto22f144c2017-06-12 14:26:21 -0400206struct SPIRVInstruction {
David Netoef5ba2b2019-12-20 08:35:54 -0500207 // Creates an instruction with an opcode and no result ID, and with the given
208 // operands. This computes its own word count. Takes ownership of the
209 // operands and clears |Ops|.
210 SPIRVInstruction(spv::Op Opc, SPIRVOperandList &Ops)
211 : WordCount(1), Opcode(static_cast<uint16_t>(Opc)), ResultID(0) {
James Price11010dc2019-12-19 13:53:09 -0500212 for (auto &operand : Ops) {
David Netoee2660d2018-06-28 16:31:29 -0400213 WordCount += uint16_t(operand->GetNumWords());
David Neto87846742018-04-11 17:36:22 -0400214 }
David Netoef5ba2b2019-12-20 08:35:54 -0500215 Operands.reserve(Ops.size());
216 for (auto &ptr : Ops) {
217 Operands.emplace_back(std::move(ptr));
218 ptr.reset(nullptr);
David Neto87846742018-04-11 17:36:22 -0400219 }
David Netoef5ba2b2019-12-20 08:35:54 -0500220 Ops.clear();
221 }
222 // Creates an instruction with an opcode and a no-zero result ID, and
223 // with the given operands. This computes its own word count. Takes ownership
224 // of the operands and clears |Ops|.
225 SPIRVInstruction(spv::Op Opc, uint32_t ResID, SPIRVOperandList &Ops)
226 : WordCount(2), Opcode(static_cast<uint16_t>(Opc)), ResultID(ResID) {
James Price11010dc2019-12-19 13:53:09 -0500227 for (auto &operand : Ops) {
David Neto87846742018-04-11 17:36:22 -0400228 WordCount += operand->GetNumWords();
229 }
David Netoef5ba2b2019-12-20 08:35:54 -0500230 Operands.reserve(Ops.size());
231 for (auto &ptr : Ops) {
232 Operands.emplace_back(std::move(ptr));
233 ptr.reset(nullptr);
234 }
235 if (ResID == 0) {
236 llvm_unreachable("Result ID of 0 was provided");
237 }
238 Ops.clear();
David Neto87846742018-04-11 17:36:22 -0400239 }
David Neto22f144c2017-06-12 14:26:21 -0400240
David Netoef5ba2b2019-12-20 08:35:54 -0500241 // Creates an instruction with an opcode and no result ID, and with the single
242 // operand. This computes its own word count.
243 SPIRVInstruction(spv::Op Opc, SPIRVOperandList::element_type operand)
244 : WordCount(1), Opcode(static_cast<uint16_t>(Opc)), ResultID(0) {
245 WordCount += operand->GetNumWords();
246 Operands.emplace_back(std::move(operand));
247 operand.reset(nullptr);
248 }
249 // Creates an instruction with an opcode and a non-zero result ID, and
250 // with the single operand. This computes its own word count.
251 SPIRVInstruction(spv::Op Opc, uint32_t ResID,
252 SPIRVOperandList::element_type operand)
253 : WordCount(2), Opcode(static_cast<uint16_t>(Opc)), ResultID(ResID) {
254 WordCount += operand->GetNumWords();
255 if (ResID == 0) {
256 llvm_unreachable("Result ID of 0 was provided");
257 }
258 Operands.emplace_back(std::move(operand));
259 operand.reset(nullptr);
260 }
261 // Creates an instruction with an opcode and a no-zero result ID, and no
262 // operands.
263 SPIRVInstruction(spv::Op Opc, uint32_t ResID)
264 : WordCount(2), Opcode(static_cast<uint16_t>(Opc)), ResultID(ResID) {
265 if (ResID == 0) {
266 llvm_unreachable("Result ID of 0 was provided");
267 }
268 }
269 // Creates an instruction with an opcode, no result ID, no type ID, and no
270 // operands.
271 SPIRVInstruction(spv::Op Opc)
272 : WordCount(1), Opcode(static_cast<uint16_t>(Opc)), ResultID(0) {}
273
David Netoee2660d2018-06-28 16:31:29 -0400274 uint32_t getWordCount() const { return WordCount; }
David Neto22f144c2017-06-12 14:26:21 -0400275 uint16_t getOpcode() const { return Opcode; }
276 uint32_t getResultID() const { return ResultID; }
David Netoef5ba2b2019-12-20 08:35:54 -0500277 ArrayRef<std::unique_ptr<SPIRVOperand>> getOperands() const {
James Price11010dc2019-12-19 13:53:09 -0500278 return Operands;
279 }
David Neto22f144c2017-06-12 14:26:21 -0400280
281private:
David Netoee2660d2018-06-28 16:31:29 -0400282 uint32_t WordCount; // Check the 16-bit bound at code generation time.
David Neto22f144c2017-06-12 14:26:21 -0400283 uint16_t Opcode;
284 uint32_t ResultID;
David Netoef5ba2b2019-12-20 08:35:54 -0500285 SmallVector<std::unique_ptr<SPIRVOperand>, 4> Operands;
David Neto22f144c2017-06-12 14:26:21 -0400286};
287
288struct SPIRVProducerPass final : public ModulePass {
David Neto22f144c2017-06-12 14:26:21 -0400289 typedef DenseMap<Type *, uint32_t> TypeMapType;
290 typedef UniqueVector<Type *> TypeList;
291 typedef DenseMap<Value *, uint32_t> ValueMapType;
David Netofb9a7972017-08-25 17:08:24 -0400292 typedef UniqueVector<Value *> ValueList;
David Neto22f144c2017-06-12 14:26:21 -0400293 typedef std::vector<std::pair<Value *, uint32_t>> EntryPointVecType;
294 typedef std::list<SPIRVInstruction *> SPIRVInstructionList;
David Neto87846742018-04-11 17:36:22 -0400295 // A vector of tuples, each of which is:
296 // - the LLVM instruction that we will later generate SPIR-V code for
297 // - where the SPIR-V instruction should be inserted
298 // - the result ID of the SPIR-V instruction
David Neto22f144c2017-06-12 14:26:21 -0400299 typedef std::vector<
300 std::tuple<Value *, SPIRVInstructionList::iterator, uint32_t>>
301 DeferredInstVecType;
302 typedef DenseMap<FunctionType *, std::pair<FunctionType *, uint32_t>>
303 GlobalConstFuncMapType;
304
David Neto44795152017-07-13 15:45:28 -0400305 explicit SPIRVProducerPass(
alan-bakerf5e5f692018-11-27 08:33:24 -0500306 raw_pwrite_stream &out,
307 std::vector<clspv::version0::DescriptorMapEntry> *descriptor_map_entries,
alan-baker00e7a582019-06-07 12:54:21 -0400308 ArrayRef<std::pair<unsigned, std::string>> samplerMap,
David Neto44795152017-07-13 15:45:28 -0400309 bool outputCInitList)
David Netoc2c368d2017-06-30 16:50:17 -0400310 : ModulePass(ID), samplerMap(samplerMap), out(out),
David Neto0676e6f2017-07-11 18:47:44 -0400311 binaryTempOut(binaryTempUnderlyingVector), binaryOut(&out),
alan-baker00e7a582019-06-07 12:54:21 -0400312 descriptorMapEntries(descriptor_map_entries),
David Neto0676e6f2017-07-11 18:47:44 -0400313 outputCInitList(outputCInitList), patchBoundOffset(0), nextID(1),
alan-baker5b86ed72019-02-15 08:26:50 -0500314 OpExtInstImportID(0), HasVariablePointersStorageBuffer(false),
315 HasVariablePointers(false), SamplerTy(nullptr), WorkgroupSizeValueID(0),
alan-bakera1be3322020-04-20 12:48:18 -0400316 WorkgroupSizeVarID(0) {}
David Neto22f144c2017-06-12 14:26:21 -0400317
James Price11010dc2019-12-19 13:53:09 -0500318 virtual ~SPIRVProducerPass() {
SJW69939d52020-04-16 07:29:07 -0500319 for (int i = 0; i < kSectionCount; ++i) {
320 for (auto *Inst : SPIRVSections[i]) {
321 delete Inst;
322 }
James Price11010dc2019-12-19 13:53:09 -0500323 }
324 }
325
David Neto22f144c2017-06-12 14:26:21 -0400326 void getAnalysisUsage(AnalysisUsage &AU) const override {
327 AU.addRequired<DominatorTreeWrapperPass>();
328 AU.addRequired<LoopInfoWrapperPass>();
329 }
330
331 virtual bool runOnModule(Module &module) override;
332
333 // output the SPIR-V header block
334 void outputHeader();
335
336 // patch the SPIR-V header block
337 void patchHeader();
338
339 uint32_t lookupType(Type *Ty) {
340 if (Ty->isPointerTy() &&
341 (Ty->getPointerAddressSpace() != AddressSpace::UniformConstant)) {
342 auto PointeeTy = Ty->getPointerElementType();
343 if (PointeeTy->isStructTy() &&
344 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
345 Ty = PointeeTy;
346 }
347 }
348
David Neto862b7d82018-06-14 18:48:37 -0400349 auto where = TypeMap.find(Ty);
350 if (where == TypeMap.end()) {
351 if (Ty) {
352 errs() << "Unhandled type " << *Ty << "\n";
353 } else {
354 errs() << "Unhandled type (null)\n";
355 }
David Netoe439d702018-03-23 13:14:08 -0700356 llvm_unreachable("\nUnhandled type!");
David Neto22f144c2017-06-12 14:26:21 -0400357 }
358
David Neto862b7d82018-06-14 18:48:37 -0400359 return where->second;
David Neto22f144c2017-06-12 14:26:21 -0400360 }
361 TypeMapType &getImageTypeMap() { return ImageTypeMap; }
alan-bakerabd82722019-12-03 17:14:51 -0500362 TypeList &getImageTypeList() { return ImageTypeList; }
David Neto22f144c2017-06-12 14:26:21 -0400363 TypeList &getTypeList() { return Types; };
364 ValueList &getConstantList() { return Constants; };
365 ValueMapType &getValueMap() { return ValueMap; }
366 ValueMapType &getAllocatedValueMap() { return AllocatedValueMap; }
SJW69939d52020-04-16 07:29:07 -0500367 SPIRVInstructionList &getSPIRVInstList(SPIRVSection Section) {
368 return SPIRVSections[Section];
369 };
David Neto22f144c2017-06-12 14:26:21 -0400370 EntryPointVecType &getEntryPointVec() { return EntryPointVec; };
371 DeferredInstVecType &getDeferredInstVec() { return DeferredInstVec; };
372 ValueList &getEntryPointInterfacesVec() { return EntryPointInterfacesVec; };
373 uint32_t &getOpExtInstImportID() { return OpExtInstImportID; };
374 std::vector<uint32_t> &getBuiltinDimVec() { return BuiltinDimensionVec; };
SJW2c317da2020-03-23 07:39:13 -0500375
alan-baker5b86ed72019-02-15 08:26:50 -0500376 bool hasVariablePointersStorageBuffer() {
377 return HasVariablePointersStorageBuffer;
378 }
379 void setVariablePointersStorageBuffer(bool Val) {
380 HasVariablePointersStorageBuffer = Val;
381 }
Diego Novillo3cc8d7a2019-04-10 13:30:34 -0400382 bool hasVariablePointers() { return HasVariablePointers; };
David Neto22f144c2017-06-12 14:26:21 -0400383 void setVariablePointers(bool Val) { HasVariablePointers = Val; };
alan-bakerb6b09dc2018-11-08 16:59:28 -0500384 ArrayRef<std::pair<unsigned, std::string>> &getSamplerMap() {
385 return samplerMap;
386 }
David Neto22f144c2017-06-12 14:26:21 -0400387 GlobalConstFuncMapType &getGlobalConstFuncTypeMap() {
388 return GlobalConstFuncTypeMap;
389 }
390 SmallPtrSet<Value *, 16> &getGlobalConstArgSet() {
391 return GlobalConstArgumentSet;
392 }
alan-bakerb6b09dc2018-11-08 16:59:28 -0500393 TypeList &getTypesNeedingArrayStride() { return TypesNeedingArrayStride; }
David Neto22f144c2017-06-12 14:26:21 -0400394
SJW77b87ad2020-04-21 14:37:52 -0500395 void GenerateLLVMIRInfo();
alan-bakerb6b09dc2018-11-08 16:59:28 -0500396 // Populate GlobalConstFuncTypeMap. Also, if module-scope __constant will
397 // *not* be converted to a storage buffer, replace each such global variable
398 // with one in the storage class expecgted by SPIR-V.
SJW77b87ad2020-04-21 14:37:52 -0500399 void FindGlobalConstVars();
David Neto862b7d82018-06-14 18:48:37 -0400400 // Populate ResourceVarInfoList, FunctionToResourceVarsMap, and
401 // ModuleOrderedResourceVars.
SJW77b87ad2020-04-21 14:37:52 -0500402 void FindResourceVars();
403 void FindWorkgroupVars();
404 bool FindExtInst();
David Neto22f144c2017-06-12 14:26:21 -0400405 void FindTypePerGlobalVar(GlobalVariable &GV);
406 void FindTypePerFunc(Function &F);
SJW77b87ad2020-04-21 14:37:52 -0500407 void FindTypesForSamplerMap();
408 void FindTypesForResourceVars();
alan-bakerb6b09dc2018-11-08 16:59:28 -0500409 // Inserts |Ty| and relevant sub-types into the |Types| member, indicating
410 // that |Ty| and its subtypes will need a corresponding SPIR-V type.
David Neto22f144c2017-06-12 14:26:21 -0400411 void FindType(Type *Ty);
412 void FindConstantPerGlobalVar(GlobalVariable &GV);
413 void FindConstantPerFunc(Function &F);
414 void FindConstant(Value *V);
415 void GenerateExtInstImport();
David Neto19a1bad2017-08-25 15:01:41 -0400416 // Generates instructions for SPIR-V types corresponding to the LLVM types
417 // saved in the |Types| member. A type follows its subtypes. IDs are
418 // allocated sequentially starting with the current value of nextID, and
419 // with a type following its subtypes. Also updates nextID to just beyond
420 // the last generated ID.
SJW77b87ad2020-04-21 14:37:52 -0500421 void GenerateSPIRVTypes();
David Neto22f144c2017-06-12 14:26:21 -0400422 void GenerateSPIRVConstants();
SJW77b87ad2020-04-21 14:37:52 -0500423 void GenerateModuleInfo();
424 void GeneratePushConstantDescriptorMapEntries();
425 void GenerateSpecConstantDescriptorMapEntries();
David Neto22f144c2017-06-12 14:26:21 -0400426 void GenerateGlobalVar(GlobalVariable &GV);
SJW77b87ad2020-04-21 14:37:52 -0500427 void GenerateWorkgroupVars();
David Neto862b7d82018-06-14 18:48:37 -0400428 // Generate descriptor map entries for resource variables associated with
429 // arguments to F.
SJW77b87ad2020-04-21 14:37:52 -0500430 void GenerateDescriptorMapInfo(Function &F);
431 void GenerateSamplers();
David Neto862b7d82018-06-14 18:48:37 -0400432 // Generate OpVariables for %clspv.resource.var.* calls.
SJW77b87ad2020-04-21 14:37:52 -0500433 void GenerateResourceVars();
David Neto22f144c2017-06-12 14:26:21 -0400434 void GenerateFuncPrologue(Function &F);
435 void GenerateFuncBody(Function &F);
David Netob6e2e062018-04-25 10:32:06 -0400436 void GenerateEntryPointInitialStores();
David Neto22f144c2017-06-12 14:26:21 -0400437 spv::Op GetSPIRVCmpOpcode(CmpInst *CmpI);
438 spv::Op GetSPIRVCastOpcode(Instruction &I);
439 spv::Op GetSPIRVBinaryOpcode(Instruction &I);
440 void GenerateInstruction(Instruction &I);
441 void GenerateFuncEpilogue();
442 void HandleDeferredInstruction();
SJW77b87ad2020-04-21 14:37:52 -0500443 void HandleDeferredDecorations();
David Neto22f144c2017-06-12 14:26:21 -0400444 bool is4xi8vec(Type *Ty) const;
445 spv::StorageClass GetStorageClass(unsigned AddrSpace) const;
David Neto862b7d82018-06-14 18:48:37 -0400446 spv::StorageClass GetStorageClassForArgKind(clspv::ArgKind arg_kind) const;
David Neto22f144c2017-06-12 14:26:21 -0400447 spv::BuiltIn GetBuiltin(StringRef globalVarName) const;
David Neto3fbb4072017-10-16 11:28:14 -0400448 // Returns the GLSL extended instruction enum that the given function
449 // call maps to. If none, then returns the 0 value, i.e. GLSLstd4580Bad.
David Neto22f144c2017-06-12 14:26:21 -0400450 glsl::ExtInst getExtInstEnum(StringRef Name);
David Neto3fbb4072017-10-16 11:28:14 -0400451 // Returns the GLSL extended instruction enum indirectly used by the given
452 // function. That is, to implement the given function, we use an extended
453 // instruction plus one more instruction. If none, then returns the 0 value,
454 // i.e. GLSLstd4580Bad.
455 glsl::ExtInst getIndirectExtInstEnum(StringRef Name);
456 // Returns the single GLSL extended instruction used directly or
457 // indirectly by the given function call.
458 glsl::ExtInst getDirectOrIndirectExtInstEnum(StringRef Name);
David Neto22f144c2017-06-12 14:26:21 -0400459 void WriteOneWord(uint32_t Word);
460 void WriteResultID(SPIRVInstruction *Inst);
461 void WriteWordCountAndOpcode(SPIRVInstruction *Inst);
David Netoef5ba2b2019-12-20 08:35:54 -0500462 void WriteOperand(const std::unique_ptr<SPIRVOperand> &Op);
David Neto22f144c2017-06-12 14:26:21 -0400463 void WriteSPIRVBinary();
SJW69939d52020-04-16 07:29:07 -0500464 void WriteSPIRVBinary(SPIRVInstructionList &SPIRVInstList);
David Neto22f144c2017-06-12 14:26:21 -0400465
Alan Baker9bf93fb2018-08-28 16:59:26 -0400466 // Returns true if |type| is compatible with OpConstantNull.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500467 bool IsTypeNullable(const Type *type) const;
Alan Baker9bf93fb2018-08-28 16:59:26 -0400468
Alan Bakerfcda9482018-10-02 17:09:59 -0400469 // Populate UBO remapped type maps.
SJW77b87ad2020-04-21 14:37:52 -0500470 void PopulateUBOTypeMaps();
Alan Bakerfcda9482018-10-02 17:09:59 -0400471
alan-baker06cad652019-12-03 17:56:47 -0500472 // Populate the merge and continue block maps.
SJW77b87ad2020-04-21 14:37:52 -0500473 void PopulateStructuredCFGMaps();
alan-baker06cad652019-12-03 17:56:47 -0500474
Alan Bakerfcda9482018-10-02 17:09:59 -0400475 // Wrapped methods of DataLayout accessors. If |type| was remapped for UBOs,
476 // uses the internal map, otherwise it falls back on the data layout.
477 uint64_t GetTypeSizeInBits(Type *type, const DataLayout &DL);
478 uint64_t GetTypeStoreSize(Type *type, const DataLayout &DL);
479 uint64_t GetTypeAllocSize(Type *type, const DataLayout &DL);
Kévin Petitbbbda972020-03-03 19:16:31 +0000480 uint32_t GetExplicitLayoutStructMemberOffset(StructType *type,
481 unsigned member,
482 const DataLayout &DL);
Alan Bakerfcda9482018-10-02 17:09:59 -0400483
alan-baker5b86ed72019-02-15 08:26:50 -0500484 // Returns the base pointer of |v|.
485 Value *GetBasePointer(Value *v);
486
487 // Sets |HasVariablePointersStorageBuffer| or |HasVariablePointers| base on
488 // |address_space|.
489 void setVariablePointersCapabilities(unsigned address_space);
490
491 // Returns true if |lhs| and |rhs| represent the same resource or workgroup
492 // variable.
493 bool sameResource(Value *lhs, Value *rhs) const;
494
495 // Returns true if |inst| is phi or select that selects from the same
496 // structure (or null).
497 bool selectFromSameObject(Instruction *inst);
498
alan-bakere9308012019-03-15 10:25:13 -0400499 // Returns true if |Arg| is called with a coherent resource.
500 bool CalledWithCoherentResource(Argument &Arg);
501
David Neto22f144c2017-06-12 14:26:21 -0400502private:
503 static char ID;
SJW77b87ad2020-04-21 14:37:52 -0500504
505 Module *module;
506
David Neto44795152017-07-13 15:45:28 -0400507 ArrayRef<std::pair<unsigned, std::string>> samplerMap;
David Neto22f144c2017-06-12 14:26:21 -0400508 raw_pwrite_stream &out;
David Neto0676e6f2017-07-11 18:47:44 -0400509
510 // TODO(dneto): Wouldn't it be better to always just emit a binary, and then
511 // convert to other formats on demand?
512
513 // When emitting a C initialization list, the WriteSPIRVBinary method
514 // will actually write its words to this vector via binaryTempOut.
515 SmallVector<char, 100> binaryTempUnderlyingVector;
516 raw_svector_ostream binaryTempOut;
517
518 // Binary output writes to this stream, which might be |out| or
519 // |binaryTempOut|. It's the latter when we really want to write a C
520 // initializer list.
Diego Novillo3cc8d7a2019-04-10 13:30:34 -0400521 raw_pwrite_stream *binaryOut;
alan-bakerf5e5f692018-11-27 08:33:24 -0500522 std::vector<version0::DescriptorMapEntry> *descriptorMapEntries;
David Neto0676e6f2017-07-11 18:47:44 -0400523 const bool outputCInitList; // If true, output look like {0x7023, ... , 5}
David Neto22f144c2017-06-12 14:26:21 -0400524 uint64_t patchBoundOffset;
525 uint32_t nextID;
526
alan-bakerf67468c2019-11-25 15:51:49 -0500527 // ID for OpTypeInt 32 1.
528 uint32_t int32ID = 0;
529 // ID for OpTypeVector %int 4.
530 uint32_t v4int32ID = 0;
531
David Neto19a1bad2017-08-25 15:01:41 -0400532 // Maps an LLVM Value pointer to the corresponding SPIR-V Id.
David Neto22f144c2017-06-12 14:26:21 -0400533 TypeMapType TypeMap;
David Neto19a1bad2017-08-25 15:01:41 -0400534 // Maps an LLVM image type to its SPIR-V ID.
David Neto22f144c2017-06-12 14:26:21 -0400535 TypeMapType ImageTypeMap;
alan-bakerabd82722019-12-03 17:14:51 -0500536 // A unique-vector of LLVM image types. This list is used to provide
537 // deterministic traversal of image types.
538 TypeList ImageTypeList;
David Neto19a1bad2017-08-25 15:01:41 -0400539 // A unique-vector of LLVM types that map to a SPIR-V type.
David Neto22f144c2017-06-12 14:26:21 -0400540 TypeList Types;
541 ValueList Constants;
David Neto19a1bad2017-08-25 15:01:41 -0400542 // Maps an LLVM Value pointer to the corresponding SPIR-V Id.
David Neto22f144c2017-06-12 14:26:21 -0400543 ValueMapType ValueMap;
544 ValueMapType AllocatedValueMap;
SJW69939d52020-04-16 07:29:07 -0500545 SPIRVInstructionList SPIRVSections[kSectionCount];
David Neto862b7d82018-06-14 18:48:37 -0400546
David Neto22f144c2017-06-12 14:26:21 -0400547 EntryPointVecType EntryPointVec;
548 DeferredInstVecType DeferredInstVec;
549 ValueList EntryPointInterfacesVec;
550 uint32_t OpExtInstImportID;
551 std::vector<uint32_t> BuiltinDimensionVec;
alan-baker5b86ed72019-02-15 08:26:50 -0500552 bool HasVariablePointersStorageBuffer;
David Neto22f144c2017-06-12 14:26:21 -0400553 bool HasVariablePointers;
554 Type *SamplerTy;
alan-baker09cb9802019-12-10 13:16:27 -0500555 DenseMap<unsigned, unsigned> SamplerLiteralToIDMap;
David Netoc77d9e22018-03-24 06:30:28 -0700556
557 // If a function F has a pointer-to-__constant parameter, then this variable
David Neto9ed8e2f2018-03-24 06:47:24 -0700558 // will map F's type to (G, index of the parameter), where in a first phase
559 // G is F's type. During FindTypePerFunc, G will be changed to F's type
560 // but replacing the pointer-to-constant parameter with
561 // pointer-to-ModuleScopePrivate.
David Netoc77d9e22018-03-24 06:30:28 -0700562 // TODO(dneto): This doesn't seem general enough? A function might have
563 // more than one such parameter.
David Neto22f144c2017-06-12 14:26:21 -0400564 GlobalConstFuncMapType GlobalConstFuncTypeMap;
565 SmallPtrSet<Value *, 16> GlobalConstArgumentSet;
David Neto1a1a0582017-07-07 12:01:44 -0400566 // An ordered set of pointer types of Base arguments to OpPtrAccessChain,
David Neto85082642018-03-24 06:55:20 -0700567 // or array types, and which point into transparent memory (StorageBuffer
568 // storage class). These will require an ArrayStride decoration.
David Neto1a1a0582017-07-07 12:01:44 -0400569 // See SPV_KHR_variable_pointers rev 13.
David Neto85082642018-03-24 06:55:20 -0700570 TypeList TypesNeedingArrayStride;
David Netoa60b00b2017-09-15 16:34:09 -0400571
572 // This is truly ugly, but works around what look like driver bugs.
573 // For get_local_size, an earlier part of the flow has created a module-scope
574 // variable in Private address space to hold the value for the workgroup
575 // size. Its intializer is a uint3 value marked as builtin WorkgroupSize.
576 // When this is present, save the IDs of the initializer value and variable
577 // in these two variables. We only ever do a vector load from it, and
578 // when we see one of those, substitute just the value of the intializer.
579 // This mimics what Glslang does, and that's what drivers are used to.
David Neto66cfe642018-03-24 06:13:56 -0700580 // TODO(dneto): Remove this once drivers are fixed.
David Netoa60b00b2017-09-15 16:34:09 -0400581 uint32_t WorkgroupSizeValueID;
582 uint32_t WorkgroupSizeVarID;
David Neto26aaf622017-10-23 18:11:53 -0400583
David Neto862b7d82018-06-14 18:48:37 -0400584 // Bookkeeping for mapping kernel arguments to resource variables.
585 struct ResourceVarInfo {
586 ResourceVarInfo(int index_arg, unsigned set_arg, unsigned binding_arg,
alan-bakere9308012019-03-15 10:25:13 -0400587 Function *fn, clspv::ArgKind arg_kind_arg, int coherent_arg)
David Neto862b7d82018-06-14 18:48:37 -0400588 : index(index_arg), descriptor_set(set_arg), binding(binding_arg),
alan-bakere9308012019-03-15 10:25:13 -0400589 var_fn(fn), arg_kind(arg_kind_arg), coherent(coherent_arg),
David Neto862b7d82018-06-14 18:48:37 -0400590 addr_space(fn->getReturnType()->getPointerAddressSpace()) {}
591 const int index; // Index into ResourceVarInfoList
592 const unsigned descriptor_set;
593 const unsigned binding;
594 Function *const var_fn; // The @clspv.resource.var.* function.
595 const clspv::ArgKind arg_kind;
alan-bakere9308012019-03-15 10:25:13 -0400596 const int coherent;
David Neto862b7d82018-06-14 18:48:37 -0400597 const unsigned addr_space; // The LLVM address space
598 // The SPIR-V ID of the OpVariable. Not populated at construction time.
599 uint32_t var_id = 0;
600 };
601 // A list of resource var info. Each one correponds to a module-scope
602 // resource variable we will have to create. Resource var indices are
603 // indices into this vector.
604 SmallVector<std::unique_ptr<ResourceVarInfo>, 8> ResourceVarInfoList;
605 // This is a vector of pointers of all the resource vars, but ordered by
606 // kernel function, and then by argument.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500607 UniqueVector<ResourceVarInfo *> ModuleOrderedResourceVars;
David Neto862b7d82018-06-14 18:48:37 -0400608 // Map a function to the ordered list of resource variables it uses, one for
609 // each argument. If an argument does not use a resource variable, it
610 // will have a null pointer entry.
611 using FunctionToResourceVarsMapType =
612 DenseMap<Function *, SmallVector<ResourceVarInfo *, 8>>;
613 FunctionToResourceVarsMapType FunctionToResourceVarsMap;
614
615 // What LLVM types map to SPIR-V types needing layout? These are the
616 // arrays and structures supporting storage buffers and uniform buffers.
617 TypeList TypesNeedingLayout;
618 // What LLVM struct types map to a SPIR-V struct type with Block decoration?
619 UniqueVector<StructType *> StructTypesNeedingBlock;
620 // For a call that represents a load from an opaque type (samplers, images),
621 // map it to the variable id it should load from.
622 DenseMap<CallInst *, uint32_t> ResourceVarDeferredLoadCalls;
David Neto85082642018-03-24 06:55:20 -0700623
David Netoc6f3ab22018-04-06 18:02:31 -0400624 // An ordered list of the kernel arguments of type pointer-to-local.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500625 using LocalArgList = SmallVector<Argument *, 8>;
David Netoc6f3ab22018-04-06 18:02:31 -0400626 LocalArgList LocalArgs;
627 // Information about a pointer-to-local argument.
628 struct LocalArgInfo {
629 // The SPIR-V ID of the array variable.
630 uint32_t variable_id;
631 // The element type of the
alan-bakerb6b09dc2018-11-08 16:59:28 -0500632 Type *elem_type;
David Netoc6f3ab22018-04-06 18:02:31 -0400633 // The ID of the array type.
634 uint32_t array_size_id;
635 // The ID of the array type.
636 uint32_t array_type_id;
637 // The ID of the pointer to the array type.
638 uint32_t ptr_array_type_id;
David Netoc6f3ab22018-04-06 18:02:31 -0400639 // The specialization constant ID of the array size.
640 int spec_id;
641 };
Alan Baker202c8c72018-08-13 13:47:44 -0400642 // A mapping from Argument to its assigned SpecId.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500643 DenseMap<const Argument *, int> LocalArgSpecIds;
Alan Baker202c8c72018-08-13 13:47:44 -0400644 // A mapping from SpecId to its LocalArgInfo.
645 DenseMap<int, LocalArgInfo> LocalSpecIdInfoMap;
Alan Bakerfcda9482018-10-02 17:09:59 -0400646 // A mapping from a remapped type to its real offsets.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500647 DenseMap<Type *, std::vector<uint32_t>> RemappedUBOTypeOffsets;
Alan Bakerfcda9482018-10-02 17:09:59 -0400648 // A mapping from a remapped type to its real sizes.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500649 DenseMap<Type *, std::tuple<uint64_t, uint64_t, uint64_t>>
650 RemappedUBOTypeSizes;
alan-baker06cad652019-12-03 17:56:47 -0500651
652 // Maps basic block to its merge block.
653 DenseMap<BasicBlock *, BasicBlock *> MergeBlocks;
654 // Maps basic block to its continue block.
655 DenseMap<BasicBlock *, BasicBlock *> ContinueBlocks;
David Neto22f144c2017-06-12 14:26:21 -0400656};
657
658char SPIRVProducerPass::ID;
David Netoc6f3ab22018-04-06 18:02:31 -0400659
alan-bakerb6b09dc2018-11-08 16:59:28 -0500660} // namespace
David Neto22f144c2017-06-12 14:26:21 -0400661
662namespace clspv {
alan-bakerf5e5f692018-11-27 08:33:24 -0500663ModulePass *createSPIRVProducerPass(
664 raw_pwrite_stream &out,
665 std::vector<version0::DescriptorMapEntry> *descriptor_map_entries,
alan-baker00e7a582019-06-07 12:54:21 -0400666 ArrayRef<std::pair<unsigned, std::string>> samplerMap,
alan-bakerf5e5f692018-11-27 08:33:24 -0500667 bool outputCInitList) {
668 return new SPIRVProducerPass(out, descriptor_map_entries, samplerMap,
alan-baker00e7a582019-06-07 12:54:21 -0400669 outputCInitList);
David Neto22f144c2017-06-12 14:26:21 -0400670}
David Netoc2c368d2017-06-30 16:50:17 -0400671} // namespace clspv
David Neto22f144c2017-06-12 14:26:21 -0400672
SJW77b87ad2020-04-21 14:37:52 -0500673bool SPIRVProducerPass::runOnModule(Module &M) {
674 module = &M;
alan-baker5ed87542020-03-23 11:05:22 -0400675 if (ShowProducerIR) {
SJW77b87ad2020-04-21 14:37:52 -0500676 llvm::outs() << *module << "\n";
alan-baker5ed87542020-03-23 11:05:22 -0400677 }
David Neto0676e6f2017-07-11 18:47:44 -0400678 binaryOut = outputCInitList ? &binaryTempOut : &out;
679
SJW77b87ad2020-04-21 14:37:52 -0500680 PopulateUBOTypeMaps();
681 PopulateStructuredCFGMaps();
Alan Bakerfcda9482018-10-02 17:09:59 -0400682
David Neto22f144c2017-06-12 14:26:21 -0400683 // SPIR-V always begins with its header information
684 outputHeader();
685
686 // Gather information from the LLVM IR that we require.
SJW77b87ad2020-04-21 14:37:52 -0500687 GenerateLLVMIRInfo();
David Neto22f144c2017-06-12 14:26:21 -0400688
David Neto22f144c2017-06-12 14:26:21 -0400689 // Collect information on global variables too.
SJW77b87ad2020-04-21 14:37:52 -0500690 for (GlobalVariable &GV : module->globals()) {
David Neto22f144c2017-06-12 14:26:21 -0400691 // If the GV is one of our special __spirv_* variables, remove the
692 // initializer as it was only placed there to force LLVM to not throw the
693 // value away.
Kévin Petitbbbda972020-03-03 19:16:31 +0000694 if (GV.getName().startswith("__spirv_") ||
695 GV.getAddressSpace() == clspv::AddressSpace::PushConstant) {
David Neto22f144c2017-06-12 14:26:21 -0400696 GV.setInitializer(nullptr);
697 }
698
699 // Collect types' information from global variable.
700 FindTypePerGlobalVar(GV);
701
702 // Collect constant information from global variable.
703 FindConstantPerGlobalVar(GV);
704
705 // If the variable is an input, entry points need to know about it.
706 if (AddressSpace::Input == GV.getType()->getPointerAddressSpace()) {
David Netofb9a7972017-08-25 17:08:24 -0400707 getEntryPointInterfacesVec().insert(&GV);
David Neto22f144c2017-06-12 14:26:21 -0400708 }
709 }
710
711 // If there are extended instructions, generate OpExtInstImport.
SJW77b87ad2020-04-21 14:37:52 -0500712 if (FindExtInst()) {
David Neto22f144c2017-06-12 14:26:21 -0400713 GenerateExtInstImport();
714 }
715
716 // Generate SPIRV instructions for types.
SJW77b87ad2020-04-21 14:37:52 -0500717 GenerateSPIRVTypes();
David Neto22f144c2017-06-12 14:26:21 -0400718
719 // Generate SPIRV constants.
720 GenerateSPIRVConstants();
721
alan-baker09cb9802019-12-10 13:16:27 -0500722 // Generate literal samplers if necessary.
SJW77b87ad2020-04-21 14:37:52 -0500723 GenerateSamplers();
David Neto22f144c2017-06-12 14:26:21 -0400724
Kévin Petitbbbda972020-03-03 19:16:31 +0000725 // Generate descriptor map entries for all push constants
SJW77b87ad2020-04-21 14:37:52 -0500726 GeneratePushConstantDescriptorMapEntries();
Kévin Petitbbbda972020-03-03 19:16:31 +0000727
David Neto22f144c2017-06-12 14:26:21 -0400728 // Generate SPIRV variables.
SJW77b87ad2020-04-21 14:37:52 -0500729 for (GlobalVariable &GV : module->globals()) {
David Neto22f144c2017-06-12 14:26:21 -0400730 GenerateGlobalVar(GV);
731 }
SJW77b87ad2020-04-21 14:37:52 -0500732 GenerateResourceVars();
733 GenerateWorkgroupVars();
David Neto22f144c2017-06-12 14:26:21 -0400734
735 // Generate SPIRV instructions for each function.
SJW77b87ad2020-04-21 14:37:52 -0500736 for (Function &F : *module) {
David Neto22f144c2017-06-12 14:26:21 -0400737 if (F.isDeclaration()) {
738 continue;
739 }
740
SJW77b87ad2020-04-21 14:37:52 -0500741 GenerateDescriptorMapInfo(F);
David Neto862b7d82018-06-14 18:48:37 -0400742
David Neto22f144c2017-06-12 14:26:21 -0400743 // Generate Function Prologue.
744 GenerateFuncPrologue(F);
745
746 // Generate SPIRV instructions for function body.
747 GenerateFuncBody(F);
748
749 // Generate Function Epilogue.
750 GenerateFuncEpilogue();
751 }
752
753 HandleDeferredInstruction();
SJW77b87ad2020-04-21 14:37:52 -0500754 HandleDeferredDecorations();
alan-bakera1be3322020-04-20 12:48:18 -0400755
756 // Generate descriptor map entries for module scope specialization constants.
SJW77b87ad2020-04-21 14:37:52 -0500757 GenerateSpecConstantDescriptorMapEntries();
David Neto22f144c2017-06-12 14:26:21 -0400758
759 // Generate SPIRV module information.
SJW77b87ad2020-04-21 14:37:52 -0500760 GenerateModuleInfo();
David Neto22f144c2017-06-12 14:26:21 -0400761
alan-baker00e7a582019-06-07 12:54:21 -0400762 WriteSPIRVBinary();
David Neto22f144c2017-06-12 14:26:21 -0400763
764 // We need to patch the SPIR-V header to set bound correctly.
765 patchHeader();
David Neto0676e6f2017-07-11 18:47:44 -0400766
767 if (outputCInitList) {
768 bool first = true;
David Neto0676e6f2017-07-11 18:47:44 -0400769 std::ostringstream os;
770
David Neto57fb0b92017-08-04 15:35:09 -0400771 auto emit_word = [&os, &first](uint32_t word) {
David Neto0676e6f2017-07-11 18:47:44 -0400772 if (!first)
David Neto57fb0b92017-08-04 15:35:09 -0400773 os << ",\n";
774 os << word;
David Neto0676e6f2017-07-11 18:47:44 -0400775 first = false;
776 };
777
778 os << "{";
David Neto57fb0b92017-08-04 15:35:09 -0400779 const std::string str(binaryTempOut.str());
780 for (unsigned i = 0; i < str.size(); i += 4) {
781 const uint32_t a = static_cast<unsigned char>(str[i]);
782 const uint32_t b = static_cast<unsigned char>(str[i + 1]);
783 const uint32_t c = static_cast<unsigned char>(str[i + 2]);
784 const uint32_t d = static_cast<unsigned char>(str[i + 3]);
785 emit_word(a | (b << 8) | (c << 16) | (d << 24));
David Neto0676e6f2017-07-11 18:47:44 -0400786 }
787 os << "}\n";
788 out << os.str();
789 }
790
David Neto22f144c2017-06-12 14:26:21 -0400791 return false;
792}
793
794void SPIRVProducerPass::outputHeader() {
alan-baker00e7a582019-06-07 12:54:21 -0400795 binaryOut->write(reinterpret_cast<const char *>(&spv::MagicNumber),
796 sizeof(spv::MagicNumber));
alan-bakere0902602020-03-23 08:43:40 -0400797 const uint32_t spv_version = 0x10000; // SPIR-V 1.0
798 binaryOut->write(reinterpret_cast<const char *>(&spv_version),
799 sizeof(spv_version));
David Neto22f144c2017-06-12 14:26:21 -0400800
alan-baker0c18ab02019-06-12 10:23:21 -0400801 // use Google's vendor ID
802 const uint32_t vendor = 21 << 16;
alan-baker00e7a582019-06-07 12:54:21 -0400803 binaryOut->write(reinterpret_cast<const char *>(&vendor), sizeof(vendor));
David Neto22f144c2017-06-12 14:26:21 -0400804
alan-baker00e7a582019-06-07 12:54:21 -0400805 // we record where we need to come back to and patch in the bound value
806 patchBoundOffset = binaryOut->tell();
David Neto22f144c2017-06-12 14:26:21 -0400807
alan-baker00e7a582019-06-07 12:54:21 -0400808 // output a bad bound for now
809 binaryOut->write(reinterpret_cast<const char *>(&nextID), sizeof(nextID));
David Neto22f144c2017-06-12 14:26:21 -0400810
alan-baker00e7a582019-06-07 12:54:21 -0400811 // output the schema (reserved for use and must be 0)
812 const uint32_t schema = 0;
813 binaryOut->write(reinterpret_cast<const char *>(&schema), sizeof(schema));
David Neto22f144c2017-06-12 14:26:21 -0400814}
815
816void SPIRVProducerPass::patchHeader() {
alan-baker00e7a582019-06-07 12:54:21 -0400817 // for a binary we just write the value of nextID over bound
818 binaryOut->pwrite(reinterpret_cast<char *>(&nextID), sizeof(nextID),
819 patchBoundOffset);
David Neto22f144c2017-06-12 14:26:21 -0400820}
821
SJW77b87ad2020-04-21 14:37:52 -0500822void SPIRVProducerPass::GenerateLLVMIRInfo() {
David Neto22f144c2017-06-12 14:26:21 -0400823 // This function generates LLVM IR for function such as global variable for
824 // argument, constant and pointer type for argument access. These information
825 // is artificial one because we need Vulkan SPIR-V output. This function is
826 // executed ahead of FindType and FindConstant.
SJW77b87ad2020-04-21 14:37:52 -0500827 LLVMContext &Context = module->getContext();
David Neto22f144c2017-06-12 14:26:21 -0400828
SJW77b87ad2020-04-21 14:37:52 -0500829 FindGlobalConstVars();
David Neto5c22a252018-03-15 16:07:41 -0400830
SJW77b87ad2020-04-21 14:37:52 -0500831 FindResourceVars();
David Neto22f144c2017-06-12 14:26:21 -0400832
833 bool HasWorkGroupBuiltin = false;
SJW77b87ad2020-04-21 14:37:52 -0500834 for (GlobalVariable &GV : module->globals()) {
David Neto22f144c2017-06-12 14:26:21 -0400835 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
836 if (spv::BuiltInWorkgroupSize == BuiltinType) {
837 HasWorkGroupBuiltin = true;
838 }
839 }
840
SJW77b87ad2020-04-21 14:37:52 -0500841 FindTypesForSamplerMap();
842 FindTypesForResourceVars();
843 FindWorkgroupVars();
David Neto22f144c2017-06-12 14:26:21 -0400844
SJW77b87ad2020-04-21 14:37:52 -0500845 for (Function &F : *module) {
Kévin Petitabef4522019-03-27 13:08:01 +0000846 if (F.isDeclaration()) {
David Neto22f144c2017-06-12 14:26:21 -0400847 continue;
848 }
849
850 for (BasicBlock &BB : F) {
851 for (Instruction &I : BB) {
852 if (I.getOpcode() == Instruction::ZExt ||
853 I.getOpcode() == Instruction::SExt ||
854 I.getOpcode() == Instruction::UIToFP) {
855 // If there is zext with i1 type, it will be changed to OpSelect. The
856 // OpSelect needs constant 0 and 1 so the constants are added here.
857
858 auto OpTy = I.getOperand(0)->getType();
859
Kévin Petit24272b62018-10-18 19:16:12 +0000860 if (OpTy->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -0400861 if (I.getOpcode() == Instruction::ZExt) {
David Neto22f144c2017-06-12 14:26:21 -0400862 FindConstant(Constant::getNullValue(I.getType()));
Kévin Petit7bfb8992019-02-26 13:45:08 +0000863 FindConstant(ConstantInt::get(I.getType(), 1));
David Neto22f144c2017-06-12 14:26:21 -0400864 } else if (I.getOpcode() == Instruction::SExt) {
David Neto22f144c2017-06-12 14:26:21 -0400865 FindConstant(Constant::getNullValue(I.getType()));
Kévin Petit7bfb8992019-02-26 13:45:08 +0000866 FindConstant(ConstantInt::getSigned(I.getType(), -1));
David Neto22f144c2017-06-12 14:26:21 -0400867 } else {
868 FindConstant(ConstantFP::get(Context, APFloat(0.0f)));
869 FindConstant(ConstantFP::get(Context, APFloat(1.0f)));
870 }
871 }
872 } else if (CallInst *Call = dyn_cast<CallInst>(&I)) {
David Neto862b7d82018-06-14 18:48:37 -0400873 StringRef callee_name = Call->getCalledFunction()->getName();
David Neto22f144c2017-06-12 14:26:21 -0400874
875 // Handle image type specially.
SJW173c7e92020-03-16 08:44:47 -0500876 if (IsImageBuiltin(callee_name)) {
David Neto22f144c2017-06-12 14:26:21 -0400877 TypeMapType &OpImageTypeMap = getImageTypeMap();
878 Type *ImageTy =
879 Call->getArgOperand(0)->getType()->getPointerElementType();
880 OpImageTypeMap[ImageTy] = 0;
alan-bakerabd82722019-12-03 17:14:51 -0500881 getImageTypeList().insert(ImageTy);
alan-baker75090e42020-02-20 11:21:04 -0500882 }
David Neto22f144c2017-06-12 14:26:21 -0400883
SJW173c7e92020-03-16 08:44:47 -0500884 if (IsSampledImageRead(callee_name)) {
alan-bakerf67468c2019-11-25 15:51:49 -0500885 // All sampled reads need a floating point 0 for the Lod operand.
David Neto22f144c2017-06-12 14:26:21 -0400886 FindConstant(ConstantFP::get(Context, APFloat(0.0f)));
SJW2c317da2020-03-23 07:39:13 -0500887 } else if (IsUnsampledImageRead(callee_name)) {
alan-baker75090e42020-02-20 11:21:04 -0500888 // All unsampled reads need an integer 0 for the Lod operand.
889 FindConstant(ConstantInt::get(Context, APInt(32, 0)));
SJW2c317da2020-03-23 07:39:13 -0500890 } else if (IsImageQuery(callee_name)) {
alan-bakerce179f12019-12-06 19:02:22 -0500891 Type *ImageTy = Call->getOperand(0)->getType();
892 const uint32_t dim = ImageDimensionality(ImageTy);
alan-baker7150a1d2020-02-25 08:31:06 -0500893 uint32_t components =
894 dim + (clspv::IsArrayImageType(ImageTy) ? 1 : 0);
alan-bakerce179f12019-12-06 19:02:22 -0500895 if (components > 1) {
896 // OpImageQuerySize* return |components| components.
897 FindType(VectorType::get(Type::getInt32Ty(Context), components));
898 if (dim == 3 && IsGetImageDim(callee_name)) {
899 // get_image_dim for 3D images returns an int4.
900 FindType(
901 VectorType::get(Type::getInt32Ty(Context), components + 1));
902 }
903 }
904
SJW173c7e92020-03-16 08:44:47 -0500905 if (IsSampledImageType(ImageTy)) {
alan-bakerce179f12019-12-06 19:02:22 -0500906 // All sampled image queries need a integer 0 for the Lod
907 // operand.
908 FindConstant(ConstantInt::get(Context, APInt(32, 0)));
909 }
David Neto5c22a252018-03-15 16:07:41 -0400910 }
David Neto22f144c2017-06-12 14:26:21 -0400911 }
912 }
913 }
914
Kévin Petitabef4522019-03-27 13:08:01 +0000915 // More things to do on kernel functions
916 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
917 if (const MDNode *MD =
918 dyn_cast<Function>(&F)->getMetadata("reqd_work_group_size")) {
919 // We generate constants if the WorkgroupSize builtin is being used.
920 if (HasWorkGroupBuiltin) {
921 // Collect constant information for work group size.
922 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(0)));
923 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(1)));
924 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(2)));
David Neto22f144c2017-06-12 14:26:21 -0400925 }
926 }
927 }
928
alan-bakerf67468c2019-11-25 15:51:49 -0500929 // TODO(alan-baker): make this better.
SJW77b87ad2020-04-21 14:37:52 -0500930 if (module->getTypeByName("opencl.image1d_ro_t.float") ||
931 module->getTypeByName("opencl.image1d_ro_t.float.sampled") ||
932 module->getTypeByName("opencl.image1d_wo_t.float") ||
933 module->getTypeByName("opencl.image2d_ro_t.float") ||
934 module->getTypeByName("opencl.image2d_ro_t.float.sampled") ||
935 module->getTypeByName("opencl.image2d_wo_t.float") ||
936 module->getTypeByName("opencl.image3d_ro_t.float") ||
937 module->getTypeByName("opencl.image3d_ro_t.float.sampled") ||
938 module->getTypeByName("opencl.image3d_wo_t.float") ||
939 module->getTypeByName("opencl.image1d_array_ro_t.float") ||
940 module->getTypeByName("opencl.image1d_array_ro_t.float.sampled") ||
941 module->getTypeByName("opencl.image1d_array_wo_t.float") ||
942 module->getTypeByName("opencl.image2d_array_ro_t.float") ||
943 module->getTypeByName("opencl.image2d_array_ro_t.float.sampled") ||
944 module->getTypeByName("opencl.image2d_array_wo_t.float")) {
alan-bakerf67468c2019-11-25 15:51:49 -0500945 FindType(Type::getFloatTy(Context));
SJW77b87ad2020-04-21 14:37:52 -0500946 } else if (module->getTypeByName("opencl.image1d_ro_t.uint") ||
947 module->getTypeByName("opencl.image1d_ro_t.uint.sampled") ||
948 module->getTypeByName("opencl.image1d_wo_t.uint") ||
949 module->getTypeByName("opencl.image2d_ro_t.uint") ||
950 module->getTypeByName("opencl.image2d_ro_t.uint.sampled") ||
951 module->getTypeByName("opencl.image2d_wo_t.uint") ||
952 module->getTypeByName("opencl.image3d_ro_t.uint") ||
953 module->getTypeByName("opencl.image3d_ro_t.uint.sampled") ||
954 module->getTypeByName("opencl.image3d_wo_t.uint") ||
955 module->getTypeByName("opencl.image1d_array_ro_t.uint") ||
956 module->getTypeByName(
957 "opencl.image1d_array_ro_t.uint.sampled") ||
958 module->getTypeByName("opencl.image1d_array_wo_t.uint") ||
959 module->getTypeByName("opencl.image2d_array_ro_t.uint") ||
960 module->getTypeByName(
961 "opencl.image2d_array_ro_t.uint.sampled") ||
962 module->getTypeByName("opencl.image2d_array_wo_t.uint")) {
alan-bakerf67468c2019-11-25 15:51:49 -0500963 FindType(Type::getInt32Ty(Context));
SJW77b87ad2020-04-21 14:37:52 -0500964 } else if (module->getTypeByName("opencl.image1d_ro_t.int") ||
965 module->getTypeByName("opencl.image1d_ro_t.int.sampled") ||
966 module->getTypeByName("opencl.image1d_wo_t.int") ||
967 module->getTypeByName("opencl.image2d_ro_t.int") ||
968 module->getTypeByName("opencl.image2d_ro_t.int.sampled") ||
969 module->getTypeByName("opencl.image2d_wo_t.int") ||
970 module->getTypeByName("opencl.image3d_ro_t.int") ||
971 module->getTypeByName("opencl.image3d_ro_t.int.sampled") ||
972 module->getTypeByName("opencl.image3d_wo_t.int") ||
973 module->getTypeByName("opencl.image1d_array_ro_t.int") ||
974 module->getTypeByName("opencl.image1d_array_ro_t.int.sampled") ||
975 module->getTypeByName("opencl.image1d_array_wo_t.int") ||
976 module->getTypeByName("opencl.image2d_array_ro_t.int") ||
977 module->getTypeByName("opencl.image2d_array_ro_t.int.sampled") ||
978 module->getTypeByName("opencl.image2d_array_wo_t.int")) {
alan-bakerf67468c2019-11-25 15:51:49 -0500979 // Nothing for now...
980 } else {
981 // This was likely an UndefValue.
David Neto22f144c2017-06-12 14:26:21 -0400982 FindType(Type::getFloatTy(Context));
983 }
984
985 // Collect types' information from function.
986 FindTypePerFunc(F);
987
988 // Collect constant information from function.
989 FindConstantPerFunc(F);
990 }
991}
992
SJW77b87ad2020-04-21 14:37:52 -0500993void SPIRVProducerPass::FindGlobalConstVars() {
994 clspv::NormalizeGlobalVariables(*module);
995 const DataLayout &DL = module->getDataLayout();
alan-baker56f7aff2019-05-22 08:06:42 -0400996
David Neto862b7d82018-06-14 18:48:37 -0400997 SmallVector<GlobalVariable *, 8> GVList;
998 SmallVector<GlobalVariable *, 8> DeadGVList;
SJW77b87ad2020-04-21 14:37:52 -0500999 for (GlobalVariable &GV : module->globals()) {
David Neto862b7d82018-06-14 18:48:37 -04001000 if (GV.getType()->getAddressSpace() == AddressSpace::Constant) {
1001 if (GV.use_empty()) {
1002 DeadGVList.push_back(&GV);
1003 } else {
1004 GVList.push_back(&GV);
1005 }
1006 }
1007 }
1008
1009 // Remove dead global __constant variables.
1010 for (auto GV : DeadGVList) {
1011 GV->eraseFromParent();
1012 }
1013 DeadGVList.clear();
1014
1015 if (clspv::Option::ModuleConstantsInStorageBuffer()) {
1016 // For now, we only support a single storage buffer.
1017 if (GVList.size() > 0) {
1018 assert(GVList.size() == 1);
1019 const auto *GV = GVList[0];
1020 const auto constants_byte_size =
Alan Bakerfcda9482018-10-02 17:09:59 -04001021 (GetTypeSizeInBits(GV->getInitializer()->getType(), DL)) / 8;
David Neto862b7d82018-06-14 18:48:37 -04001022 const size_t kConstantMaxSize = 65536;
1023 if (constants_byte_size > kConstantMaxSize) {
1024 outs() << "Max __constant capacity of " << kConstantMaxSize
1025 << " bytes exceeded: " << constants_byte_size << " bytes used\n";
1026 llvm_unreachable("Max __constant capacity exceeded");
1027 }
1028 }
1029 } else {
1030 // Change global constant variable's address space to ModuleScopePrivate.
1031 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
1032 for (auto GV : GVList) {
1033 // Create new gv with ModuleScopePrivate address space.
1034 Type *NewGVTy = GV->getType()->getPointerElementType();
1035 GlobalVariable *NewGV = new GlobalVariable(
SJW77b87ad2020-04-21 14:37:52 -05001036 *module, NewGVTy, false, GV->getLinkage(), GV->getInitializer(), "",
David Neto862b7d82018-06-14 18:48:37 -04001037 nullptr, GV->getThreadLocalMode(), AddressSpace::ModuleScopePrivate);
1038 NewGV->takeName(GV);
1039
1040 const SmallVector<User *, 8> GVUsers(GV->user_begin(), GV->user_end());
1041 SmallVector<User *, 8> CandidateUsers;
1042
1043 auto record_called_function_type_as_user =
1044 [&GlobalConstFuncTyMap](Value *gv, CallInst *call) {
1045 // Find argument index.
1046 unsigned index = 0;
1047 for (unsigned i = 0; i < call->getNumArgOperands(); i++) {
1048 if (gv == call->getOperand(i)) {
1049 // TODO(dneto): Should we break here?
1050 index = i;
1051 }
1052 }
1053
1054 // Record function type with global constant.
1055 GlobalConstFuncTyMap[call->getFunctionType()] =
1056 std::make_pair(call->getFunctionType(), index);
1057 };
1058
1059 for (User *GVU : GVUsers) {
1060 if (CallInst *Call = dyn_cast<CallInst>(GVU)) {
1061 record_called_function_type_as_user(GV, Call);
1062 } else if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(GVU)) {
1063 // Check GEP users.
1064 for (User *GEPU : GEP->users()) {
1065 if (CallInst *GEPCall = dyn_cast<CallInst>(GEPU)) {
1066 record_called_function_type_as_user(GEP, GEPCall);
1067 }
1068 }
1069 }
1070
1071 CandidateUsers.push_back(GVU);
1072 }
1073
1074 for (User *U : CandidateUsers) {
1075 // Update users of gv with new gv.
alan-bakered80f572019-02-11 17:28:26 -05001076 if (!isa<Constant>(U)) {
1077 // #254: Can't change operands of a constant, but this shouldn't be
1078 // something that sticks around in the module.
1079 U->replaceUsesOfWith(GV, NewGV);
1080 }
David Neto862b7d82018-06-14 18:48:37 -04001081 }
1082
1083 // Delete original gv.
1084 GV->eraseFromParent();
1085 }
1086 }
1087}
1088
SJW77b87ad2020-04-21 14:37:52 -05001089void SPIRVProducerPass::FindResourceVars() {
David Neto862b7d82018-06-14 18:48:37 -04001090 ResourceVarInfoList.clear();
1091 FunctionToResourceVarsMap.clear();
1092 ModuleOrderedResourceVars.reset();
1093 // Normally, there is one resource variable per clspv.resource.var.*
1094 // function, since that is unique'd by arg type and index. By design,
1095 // we can share these resource variables across kernels because all
1096 // kernels use the same descriptor set.
1097 //
1098 // But if the user requested distinct descriptor sets per kernel, then
1099 // the descriptor allocator has made different (set,binding) pairs for
1100 // the same (type,arg_index) pair. Since we can decorate a resource
1101 // variable with only exactly one DescriptorSet and Binding, we are
1102 // forced in this case to make distinct resource variables whenever
Kévin Petitbbbda972020-03-03 19:16:31 +00001103 // the same clspv.resource.var.X function is seen with disintct
David Neto862b7d82018-06-14 18:48:37 -04001104 // (set,binding) values.
1105 const bool always_distinct_sets =
1106 clspv::Option::DistinctKernelDescriptorSets();
SJW77b87ad2020-04-21 14:37:52 -05001107 for (Function &F : *module) {
David Neto862b7d82018-06-14 18:48:37 -04001108 // Rely on the fact the resource var functions have a stable ordering
1109 // in the module.
Alan Baker202c8c72018-08-13 13:47:44 -04001110 if (F.getName().startswith(clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04001111 // Find all calls to this function with distinct set and binding pairs.
1112 // Save them in ResourceVarInfoList.
1113
1114 // Determine uniqueness of the (set,binding) pairs only withing this
1115 // one resource-var builtin function.
1116 using SetAndBinding = std::pair<unsigned, unsigned>;
1117 // Maps set and binding to the resource var info.
1118 DenseMap<SetAndBinding, ResourceVarInfo *> set_and_binding_map;
1119 bool first_use = true;
1120 for (auto &U : F.uses()) {
1121 if (auto *call = dyn_cast<CallInst>(U.getUser())) {
1122 const auto set = unsigned(
1123 dyn_cast<ConstantInt>(call->getArgOperand(0))->getZExtValue());
1124 const auto binding = unsigned(
1125 dyn_cast<ConstantInt>(call->getArgOperand(1))->getZExtValue());
1126 const auto arg_kind = clspv::ArgKind(
1127 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
1128 const auto arg_index = unsigned(
1129 dyn_cast<ConstantInt>(call->getArgOperand(3))->getZExtValue());
alan-bakere9308012019-03-15 10:25:13 -04001130 const auto coherent = unsigned(
1131 dyn_cast<ConstantInt>(call->getArgOperand(5))->getZExtValue());
David Neto862b7d82018-06-14 18:48:37 -04001132
1133 // Find or make the resource var info for this combination.
1134 ResourceVarInfo *rv = nullptr;
1135 if (always_distinct_sets) {
1136 // Make a new resource var any time we see a different
1137 // (set,binding) pair.
1138 SetAndBinding key{set, binding};
1139 auto where = set_and_binding_map.find(key);
1140 if (where == set_and_binding_map.end()) {
1141 rv = new ResourceVarInfo(int(ResourceVarInfoList.size()), set,
alan-bakere9308012019-03-15 10:25:13 -04001142 binding, &F, arg_kind, coherent);
David Neto862b7d82018-06-14 18:48:37 -04001143 ResourceVarInfoList.emplace_back(rv);
1144 set_and_binding_map[key] = rv;
1145 } else {
1146 rv = where->second;
1147 }
1148 } else {
1149 // The default is to make exactly one resource for each
1150 // clspv.resource.var.* function.
1151 if (first_use) {
1152 first_use = false;
1153 rv = new ResourceVarInfo(int(ResourceVarInfoList.size()), set,
alan-bakere9308012019-03-15 10:25:13 -04001154 binding, &F, arg_kind, coherent);
David Neto862b7d82018-06-14 18:48:37 -04001155 ResourceVarInfoList.emplace_back(rv);
1156 } else {
1157 rv = ResourceVarInfoList.back().get();
1158 }
1159 }
1160
1161 // Now populate FunctionToResourceVarsMap.
1162 auto &mapping =
1163 FunctionToResourceVarsMap[call->getParent()->getParent()];
1164 while (mapping.size() <= arg_index) {
1165 mapping.push_back(nullptr);
1166 }
1167 mapping[arg_index] = rv;
1168 }
1169 }
1170 }
1171 }
1172
1173 // Populate ModuleOrderedResourceVars.
SJW77b87ad2020-04-21 14:37:52 -05001174 for (Function &F : *module) {
David Neto862b7d82018-06-14 18:48:37 -04001175 auto where = FunctionToResourceVarsMap.find(&F);
1176 if (where != FunctionToResourceVarsMap.end()) {
1177 for (auto &rv : where->second) {
1178 if (rv != nullptr) {
1179 ModuleOrderedResourceVars.insert(rv);
1180 }
1181 }
1182 }
1183 }
1184 if (ShowResourceVars) {
1185 for (auto *info : ModuleOrderedResourceVars) {
1186 outs() << "MORV index " << info->index << " (" << info->descriptor_set
1187 << "," << info->binding << ") " << *(info->var_fn->getReturnType())
1188 << "\n";
1189 }
1190 }
1191}
1192
SJW77b87ad2020-04-21 14:37:52 -05001193bool SPIRVProducerPass::FindExtInst() {
1194 LLVMContext &Context = module->getContext();
David Neto22f144c2017-06-12 14:26:21 -04001195 bool HasExtInst = false;
1196
SJW77b87ad2020-04-21 14:37:52 -05001197 for (Function &F : *module) {
David Neto22f144c2017-06-12 14:26:21 -04001198 for (BasicBlock &BB : F) {
1199 for (Instruction &I : BB) {
1200 if (CallInst *Call = dyn_cast<CallInst>(&I)) {
1201 Function *Callee = Call->getCalledFunction();
1202 // Check whether this call is for extend instructions.
David Neto3fbb4072017-10-16 11:28:14 -04001203 auto callee_name = Callee->getName();
1204 const glsl::ExtInst EInst = getExtInstEnum(callee_name);
1205 const glsl::ExtInst IndirectEInst =
1206 getIndirectExtInstEnum(callee_name);
David Neto22f144c2017-06-12 14:26:21 -04001207
David Neto3fbb4072017-10-16 11:28:14 -04001208 HasExtInst |=
1209 (EInst != kGlslExtInstBad) || (IndirectEInst != kGlslExtInstBad);
1210
1211 if (IndirectEInst) {
1212 // Register extra constants if needed.
1213
1214 // Registers a type and constant for computing the result of the
1215 // given instruction. If the result of the instruction is a vector,
1216 // then make a splat vector constant with the same number of
1217 // elements.
1218 auto register_constant = [this, &I](Constant *constant) {
1219 FindType(constant->getType());
1220 FindConstant(constant);
1221 if (auto *vectorTy = dyn_cast<VectorType>(I.getType())) {
1222 // Register the splat vector of the value with the same
1223 // width as the result of the instruction.
1224 auto *vec_constant = ConstantVector::getSplat(
alan-baker7261e062020-03-15 14:35:48 -04001225 {static_cast<unsigned>(vectorTy->getNumElements()), false},
David Neto3fbb4072017-10-16 11:28:14 -04001226 constant);
1227 FindConstant(vec_constant);
1228 FindType(vec_constant->getType());
1229 }
1230 };
1231 switch (IndirectEInst) {
1232 case glsl::ExtInstFindUMsb:
1233 // clz needs OpExtInst and OpISub with constant 31, or splat
1234 // vector of 31. Add it to the constant list here.
1235 register_constant(
1236 ConstantInt::get(Type::getInt32Ty(Context), 31));
1237 break;
1238 case glsl::ExtInstAcos:
1239 case glsl::ExtInstAsin:
Kévin Petiteb9f90a2018-09-29 12:29:34 +01001240 case glsl::ExtInstAtan:
David Neto3fbb4072017-10-16 11:28:14 -04001241 case glsl::ExtInstAtan2:
1242 // We need 1/pi for acospi, asinpi, atan2pi.
1243 register_constant(
1244 ConstantFP::get(Type::getFloatTy(Context), kOneOverPi));
1245 break;
1246 default:
1247 assert(false && "internally inconsistent");
1248 }
David Neto22f144c2017-06-12 14:26:21 -04001249 }
1250 }
1251 }
1252 }
1253 }
1254
1255 return HasExtInst;
1256}
1257
1258void SPIRVProducerPass::FindTypePerGlobalVar(GlobalVariable &GV) {
1259 // Investigate global variable's type.
1260 FindType(GV.getType());
1261}
1262
1263void SPIRVProducerPass::FindTypePerFunc(Function &F) {
1264 // Investigate function's type.
1265 FunctionType *FTy = F.getFunctionType();
1266
1267 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
1268 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
David Neto9ed8e2f2018-03-24 06:47:24 -07001269 // Handle a regular function with global constant parameters.
David Neto22f144c2017-06-12 14:26:21 -04001270 if (GlobalConstFuncTyMap.count(FTy)) {
1271 uint32_t GVCstArgIdx = GlobalConstFuncTypeMap[FTy].second;
1272 SmallVector<Type *, 4> NewFuncParamTys;
1273 for (unsigned i = 0; i < FTy->getNumParams(); i++) {
1274 Type *ParamTy = FTy->getParamType(i);
1275 if (i == GVCstArgIdx) {
1276 Type *EleTy = ParamTy->getPointerElementType();
1277 ParamTy = PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
1278 }
1279
1280 NewFuncParamTys.push_back(ParamTy);
1281 }
1282
1283 FunctionType *NewFTy =
1284 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
1285 GlobalConstFuncTyMap[FTy] = std::make_pair(NewFTy, GVCstArgIdx);
1286 FTy = NewFTy;
1287 }
1288
1289 FindType(FTy);
1290 } else {
1291 // As kernel functions do not have parameters, create new function type and
1292 // add it to type map.
1293 SmallVector<Type *, 4> NewFuncParamTys;
1294 FunctionType *NewFTy =
1295 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
1296 FindType(NewFTy);
1297 }
1298
1299 // Investigate instructions' type in function body.
1300 for (BasicBlock &BB : F) {
1301 for (Instruction &I : BB) {
1302 if (isa<ShuffleVectorInst>(I)) {
1303 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1304 // Ignore type for mask of shuffle vector instruction.
1305 if (i == 2) {
1306 continue;
1307 }
1308
1309 Value *Op = I.getOperand(i);
1310 if (!isa<MetadataAsValue>(Op)) {
1311 FindType(Op->getType());
1312 }
1313 }
1314
1315 FindType(I.getType());
1316 continue;
1317 }
1318
David Neto862b7d82018-06-14 18:48:37 -04001319 CallInst *Call = dyn_cast<CallInst>(&I);
1320
1321 if (Call && Call->getCalledFunction()->getName().startswith(
Alan Baker202c8c72018-08-13 13:47:44 -04001322 clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04001323 // This is a fake call representing access to a resource variable.
1324 // We handle that elsewhere.
1325 continue;
1326 }
1327
Alan Baker202c8c72018-08-13 13:47:44 -04001328 if (Call && Call->getCalledFunction()->getName().startswith(
1329 clspv::WorkgroupAccessorFunction())) {
1330 // This is a fake call representing access to a workgroup variable.
1331 // We handle that elsewhere.
1332 continue;
1333 }
1334
alan-bakerf083bed2020-01-29 08:15:42 -05001335 // #497: InsertValue and ExtractValue map to OpCompositeInsert and
1336 // OpCompositeExtract which takes literal values for indices. As a result
1337 // don't map the type of indices.
1338 if (I.getOpcode() == Instruction::ExtractValue) {
1339 FindType(I.getOperand(0)->getType());
1340 continue;
1341 }
1342 if (I.getOpcode() == Instruction::InsertValue) {
1343 FindType(I.getOperand(0)->getType());
1344 FindType(I.getOperand(1)->getType());
1345 continue;
1346 }
1347
1348 // #497: InsertElement and ExtractElement map to OpCompositeExtract if
1349 // the index is a constant. In such a case don't map the index type.
1350 if (I.getOpcode() == Instruction::ExtractElement) {
1351 FindType(I.getOperand(0)->getType());
1352 Value *op1 = I.getOperand(1);
1353 if (!isa<Constant>(op1) || isa<GlobalValue>(op1)) {
1354 FindType(op1->getType());
1355 }
1356 continue;
1357 }
1358 if (I.getOpcode() == Instruction::InsertElement) {
1359 FindType(I.getOperand(0)->getType());
1360 FindType(I.getOperand(1)->getType());
1361 Value *op2 = I.getOperand(2);
1362 if (!isa<Constant>(op2) || isa<GlobalValue>(op2)) {
1363 FindType(op2->getType());
1364 }
1365 continue;
1366 }
1367
David Neto22f144c2017-06-12 14:26:21 -04001368 // Work through the operands of the instruction.
1369 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1370 Value *const Op = I.getOperand(i);
1371 // If any of the operands is a constant, find the type!
1372 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1373 FindType(Op->getType());
1374 }
1375 }
1376
1377 for (Use &Op : I.operands()) {
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001378 if (isa<CallInst>(&I)) {
David Neto22f144c2017-06-12 14:26:21 -04001379 // Avoid to check call instruction's type.
1380 break;
1381 }
Alan Baker202c8c72018-08-13 13:47:44 -04001382 if (CallInst *OpCall = dyn_cast<CallInst>(Op)) {
1383 if (OpCall && OpCall->getCalledFunction()->getName().startswith(
1384 clspv::WorkgroupAccessorFunction())) {
1385 // This is a fake call representing access to a workgroup variable.
1386 // We handle that elsewhere.
1387 continue;
1388 }
1389 }
David Neto22f144c2017-06-12 14:26:21 -04001390 if (!isa<MetadataAsValue>(&Op)) {
1391 FindType(Op->getType());
1392 continue;
1393 }
1394 }
1395
David Neto22f144c2017-06-12 14:26:21 -04001396 // We don't want to track the type of this call as we are going to replace
1397 // it.
Kévin Petitdf71de32019-04-09 14:09:50 +01001398 if (Call && (clspv::LiteralSamplerFunction() ==
David Neto22f144c2017-06-12 14:26:21 -04001399 Call->getCalledFunction()->getName())) {
1400 continue;
1401 }
1402
1403 if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(&I)) {
1404 // If gep's base operand has ModuleScopePrivate address space, make gep
1405 // return ModuleScopePrivate address space.
1406 if (GEP->getPointerAddressSpace() == AddressSpace::ModuleScopePrivate) {
1407 // Add pointer type with private address space for global constant to
1408 // type list.
1409 Type *EleTy = I.getType()->getPointerElementType();
1410 Type *NewPTy =
1411 PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
1412
1413 FindType(NewPTy);
1414 continue;
1415 }
1416 }
1417
1418 FindType(I.getType());
1419 }
1420 }
1421}
1422
SJW77b87ad2020-04-21 14:37:52 -05001423void SPIRVProducerPass::FindTypesForSamplerMap() {
David Neto862b7d82018-06-14 18:48:37 -04001424 // If we are using a sampler map, find the type of the sampler.
SJW77b87ad2020-04-21 14:37:52 -05001425 if (module->getFunction(clspv::LiteralSamplerFunction()) ||
David Neto862b7d82018-06-14 18:48:37 -04001426 0 < getSamplerMap().size()) {
SJW77b87ad2020-04-21 14:37:52 -05001427 auto SamplerStructTy = module->getTypeByName("opencl.sampler_t");
David Neto862b7d82018-06-14 18:48:37 -04001428 if (!SamplerStructTy) {
SJW77b87ad2020-04-21 14:37:52 -05001429 SamplerStructTy =
1430 StructType::create(module->getContext(), "opencl.sampler_t");
David Neto862b7d82018-06-14 18:48:37 -04001431 }
1432
1433 SamplerTy = SamplerStructTy->getPointerTo(AddressSpace::UniformConstant);
1434
1435 FindType(SamplerTy);
1436 }
1437}
1438
SJW77b87ad2020-04-21 14:37:52 -05001439void SPIRVProducerPass::FindTypesForResourceVars() {
David Neto862b7d82018-06-14 18:48:37 -04001440 // Record types so they are generated.
1441 TypesNeedingLayout.reset();
1442 StructTypesNeedingBlock.reset();
1443
1444 // To match older clspv codegen, generate the float type first if required
1445 // for images.
1446 for (const auto *info : ModuleOrderedResourceVars) {
1447 if (info->arg_kind == clspv::ArgKind::ReadOnlyImage ||
1448 info->arg_kind == clspv::ArgKind::WriteOnlyImage) {
alan-bakerf67468c2019-11-25 15:51:49 -05001449 if (IsIntImageType(info->var_fn->getReturnType())) {
1450 // Nothing for now...
1451 } else if (IsUintImageType(info->var_fn->getReturnType())) {
SJW77b87ad2020-04-21 14:37:52 -05001452 FindType(Type::getInt32Ty(module->getContext()));
alan-bakerf67468c2019-11-25 15:51:49 -05001453 }
1454
1455 // We need "float" either for the sampled type or for the Lod operand.
SJW77b87ad2020-04-21 14:37:52 -05001456 FindType(Type::getFloatTy(module->getContext()));
David Neto862b7d82018-06-14 18:48:37 -04001457 }
1458 }
1459
1460 for (const auto *info : ModuleOrderedResourceVars) {
1461 Type *type = info->var_fn->getReturnType();
1462
1463 switch (info->arg_kind) {
1464 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04001465 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04001466 if (auto *sty = dyn_cast<StructType>(type->getPointerElementType())) {
1467 StructTypesNeedingBlock.insert(sty);
1468 } else {
1469 errs() << *type << "\n";
1470 llvm_unreachable("Buffer arguments must map to structures!");
1471 }
1472 break;
1473 case clspv::ArgKind::Pod:
alan-baker9b0ec3c2020-04-06 14:45:34 -04001474 case clspv::ArgKind::PodUBO:
1475 case clspv::ArgKind::PodPushConstant:
David Neto862b7d82018-06-14 18:48:37 -04001476 if (auto *sty = dyn_cast<StructType>(type->getPointerElementType())) {
1477 StructTypesNeedingBlock.insert(sty);
1478 } else {
1479 errs() << *type << "\n";
1480 llvm_unreachable("POD arguments must map to structures!");
1481 }
1482 break;
1483 case clspv::ArgKind::ReadOnlyImage:
1484 case clspv::ArgKind::WriteOnlyImage:
1485 case clspv::ArgKind::Sampler:
1486 // Sampler and image types map to the pointee type but
1487 // in the uniform constant address space.
1488 type = PointerType::get(type->getPointerElementType(),
1489 clspv::AddressSpace::UniformConstant);
1490 break;
1491 default:
1492 break;
1493 }
1494
1495 // The converted type is the type of the OpVariable we will generate.
1496 // If the pointee type is an array of size zero, FindType will convert it
1497 // to a runtime array.
1498 FindType(type);
1499 }
1500
alan-bakerdcd97412019-09-16 15:32:30 -04001501 // If module constants are clustered in a storage buffer then that struct
1502 // needs layout decorations.
1503 if (clspv::Option::ModuleConstantsInStorageBuffer()) {
SJW77b87ad2020-04-21 14:37:52 -05001504 for (GlobalVariable &GV : module->globals()) {
alan-bakerdcd97412019-09-16 15:32:30 -04001505 PointerType *PTy = cast<PointerType>(GV.getType());
1506 const auto AS = PTy->getAddressSpace();
1507 const bool module_scope_constant_external_init =
1508 (AS == AddressSpace::Constant) && GV.hasInitializer();
1509 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
1510 if (module_scope_constant_external_init &&
1511 spv::BuiltInMax == BuiltinType) {
1512 StructTypesNeedingBlock.insert(
1513 cast<StructType>(PTy->getPointerElementType()));
1514 }
1515 }
1516 }
1517
SJW77b87ad2020-04-21 14:37:52 -05001518 for (const GlobalVariable &GV : module->globals()) {
Kévin Petitbbbda972020-03-03 19:16:31 +00001519 if (GV.getAddressSpace() == clspv::AddressSpace::PushConstant) {
1520 auto Ty = cast<PointerType>(GV.getType())->getPointerElementType();
1521 assert(Ty->isStructTy() && "Push constants have to be structures.");
1522 auto STy = cast<StructType>(Ty);
1523 StructTypesNeedingBlock.insert(STy);
1524 }
1525 }
1526
David Neto862b7d82018-06-14 18:48:37 -04001527 // Traverse the arrays and structures underneath each Block, and
1528 // mark them as needing layout.
1529 std::vector<Type *> work_list(StructTypesNeedingBlock.begin(),
1530 StructTypesNeedingBlock.end());
1531 while (!work_list.empty()) {
1532 Type *type = work_list.back();
1533 work_list.pop_back();
1534 TypesNeedingLayout.insert(type);
1535 switch (type->getTypeID()) {
1536 case Type::ArrayTyID:
1537 work_list.push_back(type->getArrayElementType());
1538 if (!Hack_generate_runtime_array_stride_early) {
1539 // Remember this array type for deferred decoration.
1540 TypesNeedingArrayStride.insert(type);
1541 }
1542 break;
1543 case Type::StructTyID:
1544 for (auto *elem_ty : cast<StructType>(type)->elements()) {
1545 work_list.push_back(elem_ty);
1546 }
1547 default:
1548 // This type and its contained types don't get layout.
1549 break;
1550 }
1551 }
1552}
1553
SJW77b87ad2020-04-21 14:37:52 -05001554void SPIRVProducerPass::FindWorkgroupVars() {
Alan Baker202c8c72018-08-13 13:47:44 -04001555 // The SpecId assignment for pointer-to-local arguments is recorded in
1556 // module-level metadata. Translate that information into local argument
1557 // information.
SJW77b87ad2020-04-21 14:37:52 -05001558 NamedMDNode *nmd = module->getNamedMetadata(clspv::LocalSpecIdMetadataName());
alan-bakerb6b09dc2018-11-08 16:59:28 -05001559 if (!nmd)
1560 return;
Alan Baker202c8c72018-08-13 13:47:44 -04001561 for (auto operand : nmd->operands()) {
1562 MDTuple *tuple = cast<MDTuple>(operand);
1563 ValueAsMetadata *fn_md = cast<ValueAsMetadata>(tuple->getOperand(0));
1564 Function *func = cast<Function>(fn_md->getValue());
alan-bakerb6b09dc2018-11-08 16:59:28 -05001565 ConstantAsMetadata *arg_index_md =
1566 cast<ConstantAsMetadata>(tuple->getOperand(1));
1567 int arg_index = static_cast<int>(
1568 cast<ConstantInt>(arg_index_md->getValue())->getSExtValue());
1569 Argument *arg = &*(func->arg_begin() + arg_index);
Alan Baker202c8c72018-08-13 13:47:44 -04001570
1571 ConstantAsMetadata *spec_id_md =
1572 cast<ConstantAsMetadata>(tuple->getOperand(2));
alan-bakerb6b09dc2018-11-08 16:59:28 -05001573 int spec_id = static_cast<int>(
1574 cast<ConstantInt>(spec_id_md->getValue())->getSExtValue());
Alan Baker202c8c72018-08-13 13:47:44 -04001575
Alan Baker202c8c72018-08-13 13:47:44 -04001576 LocalArgSpecIds[arg] = spec_id;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001577 if (LocalSpecIdInfoMap.count(spec_id))
1578 continue;
Alan Baker202c8c72018-08-13 13:47:44 -04001579
1580 // We haven't seen this SpecId yet, so generate the LocalArgInfo for it.
1581 LocalArgInfo info{nextID, arg->getType()->getPointerElementType(),
1582 nextID + 1, nextID + 2,
1583 nextID + 3, spec_id};
1584 LocalSpecIdInfoMap[spec_id] = info;
1585 nextID += 4;
1586
1587 // Ensure the types necessary for this argument get generated.
SJW77b87ad2020-04-21 14:37:52 -05001588 Type *IdxTy = Type::getInt32Ty(module->getContext());
Alan Baker202c8c72018-08-13 13:47:44 -04001589 FindConstant(ConstantInt::get(IdxTy, 0));
1590 FindType(IdxTy);
1591 FindType(arg->getType());
1592 }
1593}
1594
David Neto22f144c2017-06-12 14:26:21 -04001595void SPIRVProducerPass::FindType(Type *Ty) {
1596 TypeList &TyList = getTypeList();
1597
1598 if (0 != TyList.idFor(Ty)) {
1599 return;
1600 }
1601
1602 if (Ty->isPointerTy()) {
1603 auto AddrSpace = Ty->getPointerAddressSpace();
1604 if ((AddressSpace::Constant == AddrSpace) ||
1605 (AddressSpace::Global == AddrSpace)) {
1606 auto PointeeTy = Ty->getPointerElementType();
1607
1608 if (PointeeTy->isStructTy() &&
1609 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
1610 FindType(PointeeTy);
1611 auto ActualPointerTy =
1612 PointeeTy->getPointerTo(AddressSpace::UniformConstant);
1613 FindType(ActualPointerTy);
1614 return;
1615 }
1616 }
1617 }
1618
David Neto862b7d82018-06-14 18:48:37 -04001619 // By convention, LLVM array type with 0 elements will map to
1620 // OpTypeRuntimeArray. Otherwise, it will map to OpTypeArray, which
1621 // has a constant number of elements. We need to support type of the
1622 // constant.
1623 if (auto *arrayTy = dyn_cast<ArrayType>(Ty)) {
1624 if (arrayTy->getNumElements() > 0) {
1625 LLVMContext &Context = Ty->getContext();
1626 FindType(Type::getInt32Ty(Context));
1627 }
David Neto22f144c2017-06-12 14:26:21 -04001628 }
1629
1630 for (Type *SubTy : Ty->subtypes()) {
1631 FindType(SubTy);
1632 }
1633
1634 TyList.insert(Ty);
1635}
1636
1637void SPIRVProducerPass::FindConstantPerGlobalVar(GlobalVariable &GV) {
1638 // If the global variable has a (non undef) initializer.
1639 if (GV.hasInitializer() && !isa<UndefValue>(GV.getInitializer())) {
David Neto862b7d82018-06-14 18:48:37 -04001640 // Generate the constant if it's not the initializer to a module scope
1641 // constant that we will expect in a storage buffer.
1642 const bool module_scope_constant_external_init =
1643 (GV.getType()->getPointerAddressSpace() == AddressSpace::Constant) &&
1644 clspv::Option::ModuleConstantsInStorageBuffer();
1645 if (!module_scope_constant_external_init) {
1646 FindConstant(GV.getInitializer());
1647 }
David Neto22f144c2017-06-12 14:26:21 -04001648 }
1649}
1650
1651void SPIRVProducerPass::FindConstantPerFunc(Function &F) {
1652 // Investigate constants in function body.
1653 for (BasicBlock &BB : F) {
1654 for (Instruction &I : BB) {
David Neto862b7d82018-06-14 18:48:37 -04001655 if (auto *call = dyn_cast<CallInst>(&I)) {
1656 auto name = call->getCalledFunction()->getName();
Kévin Petitdf71de32019-04-09 14:09:50 +01001657 if (name == clspv::LiteralSamplerFunction()) {
David Neto862b7d82018-06-14 18:48:37 -04001658 // We've handled these constants elsewhere, so skip it.
1659 continue;
1660 }
Alan Baker202c8c72018-08-13 13:47:44 -04001661 if (name.startswith(clspv::ResourceAccessorFunction())) {
1662 continue;
1663 }
1664 if (name.startswith(clspv::WorkgroupAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04001665 continue;
1666 }
Kévin Petit617a76d2019-04-04 13:54:16 +01001667 if (name.startswith(clspv::SPIRVOpIntrinsicFunction())) {
1668 // Skip the first operand that has the SPIR-V Opcode
1669 for (unsigned i = 1; i < I.getNumOperands(); i++) {
1670 if (isa<Constant>(I.getOperand(i)) &&
1671 !isa<GlobalValue>(I.getOperand(i))) {
1672 FindConstant(I.getOperand(i));
1673 }
1674 }
1675 continue;
1676 }
David Neto22f144c2017-06-12 14:26:21 -04001677 }
1678
1679 if (isa<AllocaInst>(I)) {
1680 // Alloca instruction has constant for the number of element. Ignore it.
1681 continue;
1682 } else if (isa<ShuffleVectorInst>(I)) {
1683 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1684 // Ignore constant for mask of shuffle vector instruction.
1685 if (i == 2) {
1686 continue;
1687 }
1688
1689 if (isa<Constant>(I.getOperand(i)) &&
1690 !isa<GlobalValue>(I.getOperand(i))) {
1691 FindConstant(I.getOperand(i));
1692 }
1693 }
1694
1695 continue;
1696 } else if (isa<InsertElementInst>(I)) {
1697 // Handle InsertElement with <4 x i8> specially.
1698 Type *CompositeTy = I.getOperand(0)->getType();
1699 if (is4xi8vec(CompositeTy)) {
1700 LLVMContext &Context = CompositeTy->getContext();
1701 if (isa<Constant>(I.getOperand(0))) {
1702 FindConstant(I.getOperand(0));
1703 }
1704
1705 if (isa<Constant>(I.getOperand(1))) {
1706 FindConstant(I.getOperand(1));
1707 }
1708
1709 // Add mask constant 0xFF.
1710 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
1711 FindConstant(CstFF);
1712
1713 // Add shift amount constant.
1714 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
1715 uint64_t Idx = CI->getZExtValue();
1716 Constant *CstShiftAmount =
1717 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
1718 FindConstant(CstShiftAmount);
1719 }
1720
1721 continue;
1722 }
1723
1724 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1725 // Ignore constant for index of InsertElement instruction.
1726 if (i == 2) {
1727 continue;
1728 }
1729
1730 if (isa<Constant>(I.getOperand(i)) &&
1731 !isa<GlobalValue>(I.getOperand(i))) {
1732 FindConstant(I.getOperand(i));
1733 }
1734 }
1735
1736 continue;
1737 } else if (isa<ExtractElementInst>(I)) {
1738 // Handle ExtractElement with <4 x i8> specially.
1739 Type *CompositeTy = I.getOperand(0)->getType();
1740 if (is4xi8vec(CompositeTy)) {
1741 LLVMContext &Context = CompositeTy->getContext();
1742 if (isa<Constant>(I.getOperand(0))) {
1743 FindConstant(I.getOperand(0));
1744 }
1745
1746 // Add mask constant 0xFF.
1747 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
1748 FindConstant(CstFF);
1749
1750 // Add shift amount constant.
1751 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
1752 uint64_t Idx = CI->getZExtValue();
1753 Constant *CstShiftAmount =
1754 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
1755 FindConstant(CstShiftAmount);
1756 } else {
1757 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
1758 FindConstant(Cst8);
1759 }
1760
1761 continue;
1762 }
1763
1764 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1765 // Ignore constant for index of ExtractElement instruction.
1766 if (i == 1) {
1767 continue;
1768 }
1769
1770 if (isa<Constant>(I.getOperand(i)) &&
1771 !isa<GlobalValue>(I.getOperand(i))) {
1772 FindConstant(I.getOperand(i));
1773 }
1774 }
1775
1776 continue;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001777 } else if ((Instruction::Xor == I.getOpcode()) &&
1778 I.getType()->isIntegerTy(1)) {
1779 // We special case for Xor where the type is i1 and one of the arguments
1780 // is a constant 1 (true), this is an OpLogicalNot in SPIR-V, and we
1781 // don't need the constant
David Neto22f144c2017-06-12 14:26:21 -04001782 bool foundConstantTrue = false;
1783 for (Use &Op : I.operands()) {
1784 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1785 auto CI = cast<ConstantInt>(Op);
1786
1787 if (CI->isZero() || foundConstantTrue) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05001788 // If we already found the true constant, we might (probably only
1789 // on -O0) have an OpLogicalNot which is taking a constant
1790 // argument, so discover it anyway.
David Neto22f144c2017-06-12 14:26:21 -04001791 FindConstant(Op);
1792 } else {
1793 foundConstantTrue = true;
1794 }
1795 }
1796 }
1797
1798 continue;
David Netod2de94a2017-08-28 17:27:47 -04001799 } else if (isa<TruncInst>(I)) {
alan-bakerb39c8262019-03-08 14:03:37 -05001800 // Special case if i8 is not generally handled.
1801 if (!clspv::Option::Int8Support()) {
1802 // For truncation to i8 we mask against 255.
1803 Type *ToTy = I.getType();
1804 if (8u == ToTy->getPrimitiveSizeInBits()) {
1805 LLVMContext &Context = ToTy->getContext();
1806 Constant *Cst255 =
1807 ConstantInt::get(Type::getInt32Ty(Context), 0xff);
1808 FindConstant(Cst255);
1809 }
David Netod2de94a2017-08-28 17:27:47 -04001810 }
Neil Henning39672102017-09-29 14:33:13 +01001811 } else if (isa<AtomicRMWInst>(I)) {
1812 LLVMContext &Context = I.getContext();
1813
1814 FindConstant(
1815 ConstantInt::get(Type::getInt32Ty(Context), spv::ScopeDevice));
1816 FindConstant(ConstantInt::get(
1817 Type::getInt32Ty(Context),
1818 spv::MemorySemanticsUniformMemoryMask |
1819 spv::MemorySemanticsSequentiallyConsistentMask));
David Neto22f144c2017-06-12 14:26:21 -04001820 }
1821
1822 for (Use &Op : I.operands()) {
1823 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1824 FindConstant(Op);
1825 }
1826 }
1827 }
1828 }
1829}
1830
1831void SPIRVProducerPass::FindConstant(Value *V) {
David Neto22f144c2017-06-12 14:26:21 -04001832 ValueList &CstList = getConstantList();
1833
David Netofb9a7972017-08-25 17:08:24 -04001834 // If V is already tracked, ignore it.
1835 if (0 != CstList.idFor(V)) {
David Neto22f144c2017-06-12 14:26:21 -04001836 return;
1837 }
1838
David Neto862b7d82018-06-14 18:48:37 -04001839 if (isa<GlobalValue>(V) && clspv::Option::ModuleConstantsInStorageBuffer()) {
1840 return;
1841 }
1842
David Neto22f144c2017-06-12 14:26:21 -04001843 Constant *Cst = cast<Constant>(V);
David Neto862b7d82018-06-14 18:48:37 -04001844 Type *CstTy = Cst->getType();
David Neto22f144c2017-06-12 14:26:21 -04001845
1846 // Handle constant with <4 x i8> type specially.
David Neto22f144c2017-06-12 14:26:21 -04001847 if (is4xi8vec(CstTy)) {
1848 if (!isa<GlobalValue>(V)) {
David Netofb9a7972017-08-25 17:08:24 -04001849 CstList.insert(V);
David Neto22f144c2017-06-12 14:26:21 -04001850 }
1851 }
1852
1853 if (Cst->getNumOperands()) {
1854 for (User::const_op_iterator I = Cst->op_begin(), E = Cst->op_end(); I != E;
1855 ++I) {
1856 FindConstant(*I);
1857 }
1858
David Netofb9a7972017-08-25 17:08:24 -04001859 CstList.insert(Cst);
David Neto22f144c2017-06-12 14:26:21 -04001860 return;
1861 } else if (const ConstantDataSequential *CDS =
1862 dyn_cast<ConstantDataSequential>(Cst)) {
1863 // Add constants for each element to constant list.
1864 for (unsigned i = 0; i < CDS->getNumElements(); i++) {
1865 Constant *EleCst = CDS->getElementAsConstant(i);
1866 FindConstant(EleCst);
1867 }
1868 }
1869
1870 if (!isa<GlobalValue>(V)) {
David Netofb9a7972017-08-25 17:08:24 -04001871 CstList.insert(V);
David Neto22f144c2017-06-12 14:26:21 -04001872 }
1873}
1874
1875spv::StorageClass SPIRVProducerPass::GetStorageClass(unsigned AddrSpace) const {
1876 switch (AddrSpace) {
1877 default:
1878 llvm_unreachable("Unsupported OpenCL address space");
1879 case AddressSpace::Private:
1880 return spv::StorageClassFunction;
1881 case AddressSpace::Global:
David Neto22f144c2017-06-12 14:26:21 -04001882 return spv::StorageClassStorageBuffer;
Alan Bakerfcda9482018-10-02 17:09:59 -04001883 case AddressSpace::Constant:
1884 return clspv::Option::ConstantArgsInUniformBuffer()
1885 ? spv::StorageClassUniform
1886 : spv::StorageClassStorageBuffer;
David Neto22f144c2017-06-12 14:26:21 -04001887 case AddressSpace::Input:
1888 return spv::StorageClassInput;
1889 case AddressSpace::Local:
1890 return spv::StorageClassWorkgroup;
1891 case AddressSpace::UniformConstant:
1892 return spv::StorageClassUniformConstant;
David Neto9ed8e2f2018-03-24 06:47:24 -07001893 case AddressSpace::Uniform:
David Netoe439d702018-03-23 13:14:08 -07001894 return spv::StorageClassUniform;
David Neto22f144c2017-06-12 14:26:21 -04001895 case AddressSpace::ModuleScopePrivate:
1896 return spv::StorageClassPrivate;
Kévin Petitbbbda972020-03-03 19:16:31 +00001897 case AddressSpace::PushConstant:
1898 return spv::StorageClassPushConstant;
David Neto22f144c2017-06-12 14:26:21 -04001899 }
1900}
1901
David Neto862b7d82018-06-14 18:48:37 -04001902spv::StorageClass
1903SPIRVProducerPass::GetStorageClassForArgKind(clspv::ArgKind arg_kind) const {
1904 switch (arg_kind) {
1905 case clspv::ArgKind::Buffer:
1906 return spv::StorageClassStorageBuffer;
Alan Bakerfcda9482018-10-02 17:09:59 -04001907 case clspv::ArgKind::BufferUBO:
1908 return spv::StorageClassUniform;
David Neto862b7d82018-06-14 18:48:37 -04001909 case clspv::ArgKind::Pod:
alan-baker9b0ec3c2020-04-06 14:45:34 -04001910 return spv::StorageClassStorageBuffer;
1911 case clspv::ArgKind::PodUBO:
1912 return spv::StorageClassUniform;
1913 case clspv::ArgKind::PodPushConstant:
1914 return spv::StorageClassPushConstant;
David Neto862b7d82018-06-14 18:48:37 -04001915 case clspv::ArgKind::Local:
1916 return spv::StorageClassWorkgroup;
1917 case clspv::ArgKind::ReadOnlyImage:
1918 case clspv::ArgKind::WriteOnlyImage:
1919 case clspv::ArgKind::Sampler:
1920 return spv::StorageClassUniformConstant;
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001921 default:
1922 llvm_unreachable("Unsupported storage class for argument kind");
David Neto862b7d82018-06-14 18:48:37 -04001923 }
1924}
1925
David Neto22f144c2017-06-12 14:26:21 -04001926spv::BuiltIn SPIRVProducerPass::GetBuiltin(StringRef Name) const {
1927 return StringSwitch<spv::BuiltIn>(Name)
1928 .Case("__spirv_GlobalInvocationId", spv::BuiltInGlobalInvocationId)
1929 .Case("__spirv_LocalInvocationId", spv::BuiltInLocalInvocationId)
1930 .Case("__spirv_WorkgroupSize", spv::BuiltInWorkgroupSize)
1931 .Case("__spirv_NumWorkgroups", spv::BuiltInNumWorkgroups)
1932 .Case("__spirv_WorkgroupId", spv::BuiltInWorkgroupId)
alan-bakerbed3a882020-04-21 14:42:41 -04001933 .Case("__spirv_WorkDim", spv::BuiltInWorkDim)
alan-bakere1996972020-05-04 08:38:12 -04001934 .Case("__spirv_GlobalOffset", spv::BuiltInGlobalOffset)
David Neto22f144c2017-06-12 14:26:21 -04001935 .Default(spv::BuiltInMax);
1936}
1937
1938void SPIRVProducerPass::GenerateExtInstImport() {
SJW69939d52020-04-16 07:29:07 -05001939 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList(kImports);
David Neto22f144c2017-06-12 14:26:21 -04001940 uint32_t &ExtInstImportID = getOpExtInstImportID();
1941
1942 //
1943 // Generate OpExtInstImport.
1944 //
1945 // Ops[0] ... Ops[n] = Name (Literal String)
David Neto22f144c2017-06-12 14:26:21 -04001946 ExtInstImportID = nextID;
David Neto87846742018-04-11 17:36:22 -04001947 SPIRVInstList.push_back(new SPIRVInstruction(spv::OpExtInstImport, nextID++,
1948 MkString("GLSL.std.450")));
David Neto22f144c2017-06-12 14:26:21 -04001949}
1950
SJW77b87ad2020-04-21 14:37:52 -05001951void SPIRVProducerPass::GenerateSPIRVTypes() {
SJW69939d52020-04-16 07:29:07 -05001952 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList(kTypes);
David Neto22f144c2017-06-12 14:26:21 -04001953 ValueMapType &VMap = getValueMap();
1954 ValueMapType &AllocatedVMap = getAllocatedValueMap();
SJW77b87ad2020-04-21 14:37:52 -05001955 const auto &DL = module->getDataLayout();
1956 LLVMContext &Context = module->getContext();
David Neto22f144c2017-06-12 14:26:21 -04001957
1958 // Map for OpTypeRuntimeArray. If argument has pointer type, 2 spirv type
1959 // instructions are generated. They are OpTypePointer and OpTypeRuntimeArray.
1960 DenseMap<Type *, uint32_t> OpRuntimeTyMap;
1961
1962 for (Type *Ty : getTypeList()) {
1963 // Update TypeMap with nextID for reference later.
1964 TypeMap[Ty] = nextID;
1965
1966 switch (Ty->getTypeID()) {
1967 default: {
1968 Ty->print(errs());
1969 llvm_unreachable("Unsupported type???");
1970 break;
1971 }
1972 case Type::MetadataTyID:
1973 case Type::LabelTyID: {
1974 // Ignore these types.
1975 break;
1976 }
1977 case Type::PointerTyID: {
1978 PointerType *PTy = cast<PointerType>(Ty);
1979 unsigned AddrSpace = PTy->getAddressSpace();
1980
1981 // For the purposes of our Vulkan SPIR-V type system, constant and global
1982 // are conflated.
1983 bool UseExistingOpTypePointer = false;
1984 if (AddressSpace::Constant == AddrSpace) {
Alan Bakerfcda9482018-10-02 17:09:59 -04001985 if (!clspv::Option::ConstantArgsInUniformBuffer()) {
1986 AddrSpace = AddressSpace::Global;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001987 // Check to see if we already created this type (for instance, if we
1988 // had a constant <type>* and a global <type>*, the type would be
1989 // created by one of these types, and shared by both).
Alan Bakerfcda9482018-10-02 17:09:59 -04001990 auto GlobalTy = PTy->getPointerElementType()->getPointerTo(AddrSpace);
1991 if (0 < TypeMap.count(GlobalTy)) {
1992 TypeMap[PTy] = TypeMap[GlobalTy];
1993 UseExistingOpTypePointer = true;
1994 break;
1995 }
David Neto22f144c2017-06-12 14:26:21 -04001996 }
1997 } else if (AddressSpace::Global == AddrSpace) {
Alan Bakerfcda9482018-10-02 17:09:59 -04001998 if (!clspv::Option::ConstantArgsInUniformBuffer()) {
1999 AddrSpace = AddressSpace::Constant;
David Neto22f144c2017-06-12 14:26:21 -04002000
alan-bakerb6b09dc2018-11-08 16:59:28 -05002001 // Check to see if we already created this type (for instance, if we
2002 // had a constant <type>* and a global <type>*, the type would be
2003 // created by one of these types, and shared by both).
2004 auto ConstantTy =
2005 PTy->getPointerElementType()->getPointerTo(AddrSpace);
Alan Bakerfcda9482018-10-02 17:09:59 -04002006 if (0 < TypeMap.count(ConstantTy)) {
2007 TypeMap[PTy] = TypeMap[ConstantTy];
2008 UseExistingOpTypePointer = true;
2009 }
David Neto22f144c2017-06-12 14:26:21 -04002010 }
2011 }
2012
David Neto862b7d82018-06-14 18:48:37 -04002013 const bool HasArgUser = true;
David Neto22f144c2017-06-12 14:26:21 -04002014
David Neto862b7d82018-06-14 18:48:37 -04002015 if (HasArgUser && !UseExistingOpTypePointer) {
David Neto22f144c2017-06-12 14:26:21 -04002016 //
2017 // Generate OpTypePointer.
2018 //
2019
2020 // OpTypePointer
2021 // Ops[0] = Storage Class
2022 // Ops[1] = Element Type ID
2023 SPIRVOperandList Ops;
2024
David Neto257c3892018-04-11 13:19:45 -04002025 Ops << MkNum(GetStorageClass(AddrSpace))
2026 << MkId(lookupType(PTy->getElementType()));
David Neto22f144c2017-06-12 14:26:21 -04002027
David Neto87846742018-04-11 17:36:22 -04002028 auto *Inst = new SPIRVInstruction(spv::OpTypePointer, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002029 SPIRVInstList.push_back(Inst);
2030 }
David Neto22f144c2017-06-12 14:26:21 -04002031 break;
2032 }
2033 case Type::StructTyID: {
David Neto22f144c2017-06-12 14:26:21 -04002034 StructType *STy = cast<StructType>(Ty);
2035
2036 // Handle sampler type.
2037 if (STy->isOpaque()) {
2038 if (STy->getName().equals("opencl.sampler_t")) {
2039 //
2040 // Generate OpTypeSampler
2041 //
2042 // Empty Ops.
2043 SPIRVOperandList Ops;
2044
David Neto87846742018-04-11 17:36:22 -04002045 auto *Inst = new SPIRVInstruction(spv::OpTypeSampler, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002046 SPIRVInstList.push_back(Inst);
2047 break;
alan-bakerf906d2b2019-12-10 11:26:23 -05002048 } else if (STy->getName().startswith("opencl.image1d_ro_t") ||
2049 STy->getName().startswith("opencl.image1d_wo_t") ||
alan-baker7150a1d2020-02-25 08:31:06 -05002050 STy->getName().startswith("opencl.image1d_array_ro_t") ||
2051 STy->getName().startswith("opencl.image1d_array_wo_t") ||
alan-bakerf906d2b2019-12-10 11:26:23 -05002052 STy->getName().startswith("opencl.image2d_ro_t") ||
alan-bakerf67468c2019-11-25 15:51:49 -05002053 STy->getName().startswith("opencl.image2d_wo_t") ||
alan-baker7150a1d2020-02-25 08:31:06 -05002054 STy->getName().startswith("opencl.image2d_array_ro_t") ||
2055 STy->getName().startswith("opencl.image2d_array_wo_t") ||
alan-bakerf67468c2019-11-25 15:51:49 -05002056 STy->getName().startswith("opencl.image3d_ro_t") ||
2057 STy->getName().startswith("opencl.image3d_wo_t")) {
David Neto22f144c2017-06-12 14:26:21 -04002058 //
2059 // Generate OpTypeImage
2060 //
2061 // Ops[0] = Sampled Type ID
2062 // Ops[1] = Dim ID
2063 // Ops[2] = Depth (Literal Number)
2064 // Ops[3] = Arrayed (Literal Number)
2065 // Ops[4] = MS (Literal Number)
2066 // Ops[5] = Sampled (Literal Number)
2067 // Ops[6] = Image Format ID
2068 //
2069 SPIRVOperandList Ops;
2070
alan-bakerf67468c2019-11-25 15:51:49 -05002071 uint32_t ImageTyID = nextID++;
2072 uint32_t SampledTyID = 0;
2073 if (STy->getName().contains(".float")) {
2074 SampledTyID = lookupType(Type::getFloatTy(Context));
2075 } else if (STy->getName().contains(".uint")) {
2076 SampledTyID = lookupType(Type::getInt32Ty(Context));
2077 } else if (STy->getName().contains(".int")) {
2078 // Generate a signed 32-bit integer if necessary.
2079 if (int32ID == 0) {
2080 int32ID = nextID++;
2081 SPIRVOperandList intOps;
2082 intOps << MkNum(32);
2083 intOps << MkNum(1);
2084 auto signed_int =
2085 new SPIRVInstruction(spv::OpTypeInt, int32ID, intOps);
2086 SPIRVInstList.push_back(signed_int);
2087 }
2088 SampledTyID = int32ID;
2089
2090 // Generate a vec4 of the signed int if necessary.
2091 if (v4int32ID == 0) {
2092 v4int32ID = nextID++;
2093 SPIRVOperandList vecOps;
2094 vecOps << MkId(int32ID);
2095 vecOps << MkNum(4);
2096 auto int_vec =
2097 new SPIRVInstruction(spv::OpTypeVector, v4int32ID, vecOps);
2098 SPIRVInstList.push_back(int_vec);
2099 }
2100 } else {
2101 // This was likely an UndefValue.
2102 SampledTyID = lookupType(Type::getFloatTy(Context));
2103 }
David Neto257c3892018-04-11 13:19:45 -04002104 Ops << MkId(SampledTyID);
David Neto22f144c2017-06-12 14:26:21 -04002105
2106 spv::Dim DimID = spv::Dim2D;
alan-bakerf906d2b2019-12-10 11:26:23 -05002107 if (STy->getName().startswith("opencl.image1d_ro_t") ||
alan-baker7150a1d2020-02-25 08:31:06 -05002108 STy->getName().startswith("opencl.image1d_wo_t") ||
2109 STy->getName().startswith("opencl.image1d_array_ro_t") ||
2110 STy->getName().startswith("opencl.image1d_array_wo_t")) {
alan-bakerf906d2b2019-12-10 11:26:23 -05002111 DimID = spv::Dim1D;
2112 } else if (STy->getName().startswith("opencl.image3d_ro_t") ||
2113 STy->getName().startswith("opencl.image3d_wo_t")) {
David Neto22f144c2017-06-12 14:26:21 -04002114 DimID = spv::Dim3D;
2115 }
David Neto257c3892018-04-11 13:19:45 -04002116 Ops << MkNum(DimID);
David Neto22f144c2017-06-12 14:26:21 -04002117
2118 // TODO: Set up Depth.
David Neto257c3892018-04-11 13:19:45 -04002119 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04002120
alan-baker7150a1d2020-02-25 08:31:06 -05002121 uint32_t arrayed = STy->getName().contains("_array_") ? 1 : 0;
2122 Ops << MkNum(arrayed);
David Neto22f144c2017-06-12 14:26:21 -04002123
2124 // TODO: Set up MS.
David Neto257c3892018-04-11 13:19:45 -04002125 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04002126
alan-baker7150a1d2020-02-25 08:31:06 -05002127 // Set up Sampled.
David Neto22f144c2017-06-12 14:26:21 -04002128 //
2129 // From Spec
2130 //
2131 // 0 indicates this is only known at run time, not at compile time
2132 // 1 indicates will be used with sampler
2133 // 2 indicates will be used without a sampler (a storage image)
2134 uint32_t Sampled = 1;
alan-bakerf67468c2019-11-25 15:51:49 -05002135 if (!STy->getName().contains(".sampled")) {
David Neto22f144c2017-06-12 14:26:21 -04002136 Sampled = 2;
2137 }
David Neto257c3892018-04-11 13:19:45 -04002138 Ops << MkNum(Sampled);
David Neto22f144c2017-06-12 14:26:21 -04002139
2140 // TODO: Set up Image Format.
David Neto257c3892018-04-11 13:19:45 -04002141 Ops << MkNum(spv::ImageFormatUnknown);
David Neto22f144c2017-06-12 14:26:21 -04002142
alan-bakerf67468c2019-11-25 15:51:49 -05002143 auto *Inst = new SPIRVInstruction(spv::OpTypeImage, ImageTyID, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002144 SPIRVInstList.push_back(Inst);
2145 break;
2146 }
2147 }
2148
2149 //
2150 // Generate OpTypeStruct
2151 //
2152 // Ops[0] ... Ops[n] = Member IDs
2153 SPIRVOperandList Ops;
2154
2155 for (auto *EleTy : STy->elements()) {
David Neto862b7d82018-06-14 18:48:37 -04002156 Ops << MkId(lookupType(EleTy));
David Neto22f144c2017-06-12 14:26:21 -04002157 }
2158
David Neto22f144c2017-06-12 14:26:21 -04002159 uint32_t STyID = nextID;
2160
alan-bakerb6b09dc2018-11-08 16:59:28 -05002161 auto *Inst = new SPIRVInstruction(spv::OpTypeStruct, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002162 SPIRVInstList.push_back(Inst);
2163
2164 // Generate OpMemberDecorate.
Kévin Petitbbbda972020-03-03 19:16:31 +00002165 if (TypesNeedingLayout.idFor(STy)) {
2166 for (unsigned MemberIdx = 0; MemberIdx < STy->getNumElements();
2167 MemberIdx++) {
2168 // Ops[0] = Structure Type ID
2169 // Ops[1] = Member Index(Literal Number)
2170 // Ops[2] = Decoration (Offset)
2171 // Ops[3] = Byte Offset (Literal Number)
2172 Ops.clear();
David Netoc463b372017-08-10 15:32:21 -04002173
Kévin Petitbbbda972020-03-03 19:16:31 +00002174 Ops << MkId(STyID) << MkNum(MemberIdx)
2175 << MkNum(spv::DecorationOffset);
David Neto22f144c2017-06-12 14:26:21 -04002176
Kévin Petitbbbda972020-03-03 19:16:31 +00002177 const auto ByteOffset =
2178 GetExplicitLayoutStructMemberOffset(STy, MemberIdx, DL);
David Neto22f144c2017-06-12 14:26:21 -04002179
Kévin Petitbbbda972020-03-03 19:16:31 +00002180 Ops << MkNum(ByteOffset);
2181
2182 auto *DecoInst = new SPIRVInstruction(spv::OpMemberDecorate, Ops);
SJW69939d52020-04-16 07:29:07 -05002183 getSPIRVInstList(kAnnotations).push_back(DecoInst);
Alan Bakerfcda9482018-10-02 17:09:59 -04002184 }
David Neto22f144c2017-06-12 14:26:21 -04002185 }
2186
2187 // Generate OpDecorate.
David Neto862b7d82018-06-14 18:48:37 -04002188 if (StructTypesNeedingBlock.idFor(STy)) {
2189 Ops.clear();
2190 // Use Block decorations with StorageBuffer storage class.
2191 Ops << MkId(STyID) << MkNum(spv::DecorationBlock);
David Neto22f144c2017-06-12 14:26:21 -04002192
David Neto862b7d82018-06-14 18:48:37 -04002193 auto *DecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
SJW69939d52020-04-16 07:29:07 -05002194 getSPIRVInstList(kAnnotations).push_back(DecoInst);
David Neto22f144c2017-06-12 14:26:21 -04002195 }
2196 break;
2197 }
2198 case Type::IntegerTyID: {
alan-baker0e64a592019-11-18 13:36:25 -05002199 uint32_t BitWidth = static_cast<uint32_t>(Ty->getPrimitiveSizeInBits());
David Neto22f144c2017-06-12 14:26:21 -04002200
2201 if (BitWidth == 1) {
David Netoef5ba2b2019-12-20 08:35:54 -05002202 auto *Inst = new SPIRVInstruction(spv::OpTypeBool, nextID++);
David Neto22f144c2017-06-12 14:26:21 -04002203 SPIRVInstList.push_back(Inst);
2204 } else {
alan-bakerb39c8262019-03-08 14:03:37 -05002205 if (!clspv::Option::Int8Support()) {
2206 // i8 is added to TypeMap as i32.
2207 // No matter what LLVM type is requested first, always alias the
2208 // second one's SPIR-V type to be the same as the one we generated
2209 // first.
2210 unsigned aliasToWidth = 0;
2211 if (BitWidth == 8) {
2212 aliasToWidth = 32;
2213 BitWidth = 32;
2214 } else if (BitWidth == 32) {
2215 aliasToWidth = 8;
2216 }
2217 if (aliasToWidth) {
2218 Type *otherType = Type::getIntNTy(Ty->getContext(), aliasToWidth);
2219 auto where = TypeMap.find(otherType);
2220 if (where == TypeMap.end()) {
2221 // Go ahead and make it, but also map the other type to it.
2222 TypeMap[otherType] = nextID;
2223 } else {
2224 // Alias this SPIR-V type the existing type.
2225 TypeMap[Ty] = where->second;
2226 break;
2227 }
David Neto391aeb12017-08-26 15:51:58 -04002228 }
David Neto22f144c2017-06-12 14:26:21 -04002229 }
2230
David Neto257c3892018-04-11 13:19:45 -04002231 SPIRVOperandList Ops;
2232 Ops << MkNum(BitWidth) << MkNum(0 /* not signed */);
David Neto22f144c2017-06-12 14:26:21 -04002233
2234 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002235 new SPIRVInstruction(spv::OpTypeInt, nextID++, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002236 }
2237 break;
2238 }
2239 case Type::HalfTyID:
2240 case Type::FloatTyID:
2241 case Type::DoubleTyID: {
alan-baker0e64a592019-11-18 13:36:25 -05002242 uint32_t BitWidth = static_cast<uint32_t>(Ty->getPrimitiveSizeInBits());
James Price11010dc2019-12-19 13:53:09 -05002243 auto WidthOp = MkNum(BitWidth);
David Neto22f144c2017-06-12 14:26:21 -04002244
2245 SPIRVInstList.push_back(
David Netoef5ba2b2019-12-20 08:35:54 -05002246 new SPIRVInstruction(spv::OpTypeFloat, nextID++, std::move(WidthOp)));
David Neto22f144c2017-06-12 14:26:21 -04002247 break;
2248 }
2249 case Type::ArrayTyID: {
David Neto22f144c2017-06-12 14:26:21 -04002250 ArrayType *ArrTy = cast<ArrayType>(Ty);
David Neto862b7d82018-06-14 18:48:37 -04002251 const uint64_t Length = ArrTy->getArrayNumElements();
2252 if (Length == 0) {
2253 // By convention, map it to a RuntimeArray.
David Neto22f144c2017-06-12 14:26:21 -04002254
David Neto862b7d82018-06-14 18:48:37 -04002255 // Only generate the type once.
2256 // TODO(dneto): Can it ever be generated more than once?
2257 // Doesn't LLVM type uniqueness guarantee we'll only see this
2258 // once?
2259 Type *EleTy = ArrTy->getArrayElementType();
2260 if (OpRuntimeTyMap.count(EleTy) == 0) {
2261 uint32_t OpTypeRuntimeArrayID = nextID;
2262 OpRuntimeTyMap[Ty] = nextID;
David Neto22f144c2017-06-12 14:26:21 -04002263
David Neto862b7d82018-06-14 18:48:37 -04002264 //
2265 // Generate OpTypeRuntimeArray.
2266 //
David Neto22f144c2017-06-12 14:26:21 -04002267
David Neto862b7d82018-06-14 18:48:37 -04002268 // OpTypeRuntimeArray
2269 // Ops[0] = Element Type ID
2270 SPIRVOperandList Ops;
2271 Ops << MkId(lookupType(EleTy));
David Neto22f144c2017-06-12 14:26:21 -04002272
David Neto862b7d82018-06-14 18:48:37 -04002273 SPIRVInstList.push_back(
2274 new SPIRVInstruction(spv::OpTypeRuntimeArray, nextID++, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002275
David Neto862b7d82018-06-14 18:48:37 -04002276 if (Hack_generate_runtime_array_stride_early) {
2277 // Generate OpDecorate.
David Neto22f144c2017-06-12 14:26:21 -04002278
David Neto862b7d82018-06-14 18:48:37 -04002279 // Ops[0] = Target ID
2280 // Ops[1] = Decoration (ArrayStride)
2281 // Ops[2] = Stride Number(Literal Number)
2282 Ops.clear();
David Neto85082642018-03-24 06:55:20 -07002283
David Neto862b7d82018-06-14 18:48:37 -04002284 Ops << MkId(OpTypeRuntimeArrayID)
2285 << MkNum(spv::DecorationArrayStride)
Alan Bakerfcda9482018-10-02 17:09:59 -04002286 << MkNum(static_cast<uint32_t>(GetTypeAllocSize(EleTy, DL)));
David Neto22f144c2017-06-12 14:26:21 -04002287
David Neto862b7d82018-06-14 18:48:37 -04002288 auto *DecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
SJW69939d52020-04-16 07:29:07 -05002289 getSPIRVInstList(kAnnotations).push_back(DecoInst);
David Neto862b7d82018-06-14 18:48:37 -04002290 }
2291 }
David Neto22f144c2017-06-12 14:26:21 -04002292
David Neto862b7d82018-06-14 18:48:37 -04002293 } else {
David Neto22f144c2017-06-12 14:26:21 -04002294
David Neto862b7d82018-06-14 18:48:37 -04002295 //
2296 // Generate OpConstant and OpTypeArray.
2297 //
2298
2299 //
2300 // Generate OpConstant for array length.
2301 //
2302 // Ops[0] = Result Type ID
2303 // Ops[1] .. Ops[n] = Values LiteralNumber
2304 SPIRVOperandList Ops;
2305
2306 Type *LengthTy = Type::getInt32Ty(Context);
2307 uint32_t ResTyID = lookupType(LengthTy);
2308 Ops << MkId(ResTyID);
2309
2310 assert(Length < UINT32_MAX);
2311 Ops << MkNum(static_cast<uint32_t>(Length));
2312
2313 // Add constant for length to constant list.
2314 Constant *CstLength = ConstantInt::get(LengthTy, Length);
2315 AllocatedVMap[CstLength] = nextID;
2316 VMap[CstLength] = nextID;
2317 uint32_t LengthID = nextID;
2318
2319 auto *CstInst = new SPIRVInstruction(spv::OpConstant, nextID++, Ops);
2320 SPIRVInstList.push_back(CstInst);
2321
2322 // Remember to generate ArrayStride later
2323 getTypesNeedingArrayStride().insert(Ty);
2324
2325 //
2326 // Generate OpTypeArray.
2327 //
2328 // Ops[0] = Element Type ID
2329 // Ops[1] = Array Length Constant ID
2330 Ops.clear();
2331
2332 uint32_t EleTyID = lookupType(ArrTy->getElementType());
2333 Ops << MkId(EleTyID) << MkId(LengthID);
2334
2335 // Update TypeMap with nextID.
2336 TypeMap[Ty] = nextID;
2337
2338 auto *ArrayInst = new SPIRVInstruction(spv::OpTypeArray, nextID++, Ops);
2339 SPIRVInstList.push_back(ArrayInst);
2340 }
David Neto22f144c2017-06-12 14:26:21 -04002341 break;
2342 }
James Price59a1c752020-04-23 23:06:16 -04002343 case Type::FixedVectorTyID: {
James Pricecf53df42020-04-20 14:41:24 -04002344 auto VecTy = cast<VectorType>(Ty);
alan-bakerb39c8262019-03-08 14:03:37 -05002345 // <4 x i8> is changed to i32 if i8 is not generally supported.
2346 if (!clspv::Option::Int8Support() &&
James Pricecf53df42020-04-20 14:41:24 -04002347 VecTy->getElementType() == Type::getInt8Ty(Context)) {
2348 if (VecTy->getNumElements() == 4) {
2349 TypeMap[Ty] = lookupType(VecTy->getElementType());
David Neto22f144c2017-06-12 14:26:21 -04002350 break;
2351 } else {
2352 Ty->print(errs());
2353 llvm_unreachable("Support above i8 vector type");
2354 }
2355 }
2356
2357 // Ops[0] = Component Type ID
2358 // Ops[1] = Component Count (Literal Number)
David Neto257c3892018-04-11 13:19:45 -04002359 SPIRVOperandList Ops;
James Pricecf53df42020-04-20 14:41:24 -04002360 Ops << MkId(lookupType(VecTy->getElementType()))
2361 << MkNum(VecTy->getNumElements());
David Neto22f144c2017-06-12 14:26:21 -04002362
alan-bakerb6b09dc2018-11-08 16:59:28 -05002363 SPIRVInstruction *inst =
2364 new SPIRVInstruction(spv::OpTypeVector, nextID++, Ops);
David Netoc6f3ab22018-04-06 18:02:31 -04002365 SPIRVInstList.push_back(inst);
David Neto22f144c2017-06-12 14:26:21 -04002366 break;
2367 }
2368 case Type::VoidTyID: {
David Netoef5ba2b2019-12-20 08:35:54 -05002369 auto *Inst = new SPIRVInstruction(spv::OpTypeVoid, nextID++);
David Neto22f144c2017-06-12 14:26:21 -04002370 SPIRVInstList.push_back(Inst);
2371 break;
2372 }
2373 case Type::FunctionTyID: {
2374 // Generate SPIRV instruction for function type.
2375 FunctionType *FTy = cast<FunctionType>(Ty);
2376
2377 // Ops[0] = Return Type ID
2378 // Ops[1] ... Ops[n] = Parameter Type IDs
2379 SPIRVOperandList Ops;
2380
2381 // Find SPIRV instruction for return type
David Netoc6f3ab22018-04-06 18:02:31 -04002382 Ops << MkId(lookupType(FTy->getReturnType()));
David Neto22f144c2017-06-12 14:26:21 -04002383
2384 // Find SPIRV instructions for parameter types
2385 for (unsigned k = 0; k < FTy->getNumParams(); k++) {
2386 // Find SPIRV instruction for parameter type.
2387 auto ParamTy = FTy->getParamType(k);
2388 if (ParamTy->isPointerTy()) {
2389 auto PointeeTy = ParamTy->getPointerElementType();
2390 if (PointeeTy->isStructTy() &&
2391 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
2392 ParamTy = PointeeTy;
2393 }
2394 }
2395
David Netoc6f3ab22018-04-06 18:02:31 -04002396 Ops << MkId(lookupType(ParamTy));
David Neto22f144c2017-06-12 14:26:21 -04002397 }
2398
David Neto87846742018-04-11 17:36:22 -04002399 auto *Inst = new SPIRVInstruction(spv::OpTypeFunction, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002400 SPIRVInstList.push_back(Inst);
2401 break;
2402 }
2403 }
2404 }
2405
2406 // Generate OpTypeSampledImage.
alan-bakerabd82722019-12-03 17:14:51 -05002407 for (auto &ImgTy : getImageTypeList()) {
David Neto22f144c2017-06-12 14:26:21 -04002408 //
2409 // Generate OpTypeSampledImage.
2410 //
2411 // Ops[0] = Image Type ID
2412 //
2413 SPIRVOperandList Ops;
2414
David Netoc6f3ab22018-04-06 18:02:31 -04002415 Ops << MkId(TypeMap[ImgTy]);
David Neto22f144c2017-06-12 14:26:21 -04002416
alan-bakerabd82722019-12-03 17:14:51 -05002417 // Update the image type map.
2418 getImageTypeMap()[ImgTy] = nextID;
David Neto22f144c2017-06-12 14:26:21 -04002419
David Neto87846742018-04-11 17:36:22 -04002420 auto *Inst = new SPIRVInstruction(spv::OpTypeSampledImage, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002421 SPIRVInstList.push_back(Inst);
2422 }
David Netoc6f3ab22018-04-06 18:02:31 -04002423
2424 // Generate types for pointer-to-local arguments.
SJW77b87ad2020-04-21 14:37:52 -05002425 for (auto pair : clspv::GetSpecConstants(module)) {
alan-bakera1be3322020-04-20 12:48:18 -04002426 auto kind = pair.first;
2427 auto spec_id = pair.second;
2428
2429 if (kind != SpecConstant::kLocalMemorySize)
2430 continue;
2431
alan-bakerb6b09dc2018-11-08 16:59:28 -05002432 LocalArgInfo &arg_info = LocalSpecIdInfoMap[spec_id];
David Netoc6f3ab22018-04-06 18:02:31 -04002433
2434 // Generate the spec constant.
2435 SPIRVOperandList Ops;
2436 Ops << MkId(lookupType(Type::getInt32Ty(Context))) << MkNum(1);
David Neto87846742018-04-11 17:36:22 -04002437 SPIRVInstList.push_back(
2438 new SPIRVInstruction(spv::OpSpecConstant, arg_info.array_size_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002439
2440 // Generate the array type.
2441 Ops.clear();
2442 // The element type must have been created.
2443 uint32_t elem_ty_id = lookupType(arg_info.elem_type);
2444 assert(elem_ty_id);
2445 Ops << MkId(elem_ty_id) << MkId(arg_info.array_size_id);
2446
2447 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002448 new SPIRVInstruction(spv::OpTypeArray, arg_info.array_type_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002449
2450 Ops.clear();
2451 Ops << MkNum(spv::StorageClassWorkgroup) << MkId(arg_info.array_type_id);
David Neto87846742018-04-11 17:36:22 -04002452 SPIRVInstList.push_back(new SPIRVInstruction(
2453 spv::OpTypePointer, arg_info.ptr_array_type_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002454 }
David Neto22f144c2017-06-12 14:26:21 -04002455}
2456
2457void SPIRVProducerPass::GenerateSPIRVConstants() {
SJW69939d52020-04-16 07:29:07 -05002458 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList(kConstants);
David Neto22f144c2017-06-12 14:26:21 -04002459 ValueMapType &VMap = getValueMap();
2460 ValueMapType &AllocatedVMap = getAllocatedValueMap();
2461 ValueList &CstList = getConstantList();
David Neto482550a2018-03-24 05:21:07 -07002462 const bool hack_undef = clspv::Option::HackUndef();
David Neto22f144c2017-06-12 14:26:21 -04002463
2464 for (uint32_t i = 0; i < CstList.size(); i++) {
David Netofb9a7972017-08-25 17:08:24 -04002465 // UniqueVector ids are 1-based.
alan-bakerb6b09dc2018-11-08 16:59:28 -05002466 Constant *Cst = cast<Constant>(CstList[i + 1]);
David Neto22f144c2017-06-12 14:26:21 -04002467
2468 // OpTypeArray's constant was already generated.
David Netofb9a7972017-08-25 17:08:24 -04002469 if (AllocatedVMap.find_as(Cst) != AllocatedVMap.end()) {
David Neto22f144c2017-06-12 14:26:21 -04002470 continue;
2471 }
2472
David Netofb9a7972017-08-25 17:08:24 -04002473 // Set ValueMap with nextID for reference later.
David Neto22f144c2017-06-12 14:26:21 -04002474 VMap[Cst] = nextID;
2475
2476 //
2477 // Generate OpConstant.
2478 //
2479
2480 // Ops[0] = Result Type ID
2481 // Ops[1] .. Ops[n] = Values LiteralNumber
2482 SPIRVOperandList Ops;
2483
David Neto257c3892018-04-11 13:19:45 -04002484 Ops << MkId(lookupType(Cst->getType()));
David Neto22f144c2017-06-12 14:26:21 -04002485
2486 std::vector<uint32_t> LiteralNum;
David Neto22f144c2017-06-12 14:26:21 -04002487 spv::Op Opcode = spv::OpNop;
2488
2489 if (isa<UndefValue>(Cst)) {
2490 // Ops[0] = Result Type ID
David Netoc66b3352017-10-20 14:28:46 -04002491 Opcode = spv::OpUndef;
Alan Baker9bf93fb2018-08-28 16:59:26 -04002492 if (hack_undef && IsTypeNullable(Cst->getType())) {
2493 Opcode = spv::OpConstantNull;
David Netoc66b3352017-10-20 14:28:46 -04002494 }
David Neto22f144c2017-06-12 14:26:21 -04002495 } else if (const ConstantInt *CI = dyn_cast<ConstantInt>(Cst)) {
2496 unsigned BitWidth = CI->getBitWidth();
2497 if (BitWidth == 1) {
2498 // If the bitwidth of constant is 1, generate OpConstantTrue or
2499 // OpConstantFalse.
2500 if (CI->getZExtValue()) {
2501 // Ops[0] = Result Type ID
2502 Opcode = spv::OpConstantTrue;
2503 } else {
2504 // Ops[0] = Result Type ID
2505 Opcode = spv::OpConstantFalse;
2506 }
David Neto22f144c2017-06-12 14:26:21 -04002507 } else {
2508 auto V = CI->getZExtValue();
2509 LiteralNum.push_back(V & 0xFFFFFFFF);
2510
2511 if (BitWidth > 32) {
2512 LiteralNum.push_back(V >> 32);
2513 }
2514
2515 Opcode = spv::OpConstant;
David Neto22f144c2017-06-12 14:26:21 -04002516
David Neto257c3892018-04-11 13:19:45 -04002517 Ops << MkInteger(LiteralNum);
David Neto22f144c2017-06-12 14:26:21 -04002518 }
2519 } else if (const ConstantFP *CFP = dyn_cast<ConstantFP>(Cst)) {
2520 uint64_t FPVal = CFP->getValueAPF().bitcastToAPInt().getZExtValue();
2521 Type *CFPTy = CFP->getType();
2522 if (CFPTy->isFloatTy()) {
2523 LiteralNum.push_back(FPVal & 0xFFFFFFFF);
Kévin Petit02ee34e2019-04-04 19:03:22 +01002524 } else if (CFPTy->isDoubleTy()) {
2525 LiteralNum.push_back(FPVal & 0xFFFFFFFF);
2526 LiteralNum.push_back(FPVal >> 32);
alan-baker089bf932020-01-07 16:35:45 -05002527 } else if (CFPTy->isHalfTy()) {
2528 LiteralNum.push_back(FPVal & 0xFFFF);
David Neto22f144c2017-06-12 14:26:21 -04002529 } else {
2530 CFPTy->print(errs());
2531 llvm_unreachable("Implement this ConstantFP Type");
2532 }
2533
2534 Opcode = spv::OpConstant;
David Neto22f144c2017-06-12 14:26:21 -04002535
David Neto257c3892018-04-11 13:19:45 -04002536 Ops << MkFloat(LiteralNum);
David Neto22f144c2017-06-12 14:26:21 -04002537 } else if (isa<ConstantDataSequential>(Cst) &&
2538 cast<ConstantDataSequential>(Cst)->isString()) {
2539 Cst->print(errs());
2540 llvm_unreachable("Implement this Constant");
2541
2542 } else if (const ConstantDataSequential *CDS =
2543 dyn_cast<ConstantDataSequential>(Cst)) {
David Neto49351ac2017-08-26 17:32:20 -04002544 // Let's convert <4 x i8> constant to int constant specially.
2545 // This case occurs when all the values are specified as constant
2546 // ints.
2547 Type *CstTy = Cst->getType();
2548 if (is4xi8vec(CstTy)) {
2549 LLVMContext &Context = CstTy->getContext();
2550
2551 //
2552 // Generate OpConstant with OpTypeInt 32 0.
2553 //
Neil Henning39672102017-09-29 14:33:13 +01002554 uint32_t IntValue = 0;
2555 for (unsigned k = 0; k < 4; k++) {
2556 const uint64_t Val = CDS->getElementAsInteger(k);
David Neto49351ac2017-08-26 17:32:20 -04002557 IntValue = (IntValue << 8) | (Val & 0xffu);
2558 }
2559
2560 Type *i32 = Type::getInt32Ty(Context);
2561 Constant *CstInt = ConstantInt::get(i32, IntValue);
2562 // If this constant is already registered on VMap, use it.
2563 if (VMap.count(CstInt)) {
2564 uint32_t CstID = VMap[CstInt];
2565 VMap[Cst] = CstID;
2566 continue;
2567 }
2568
David Neto257c3892018-04-11 13:19:45 -04002569 Ops << MkNum(IntValue);
David Neto49351ac2017-08-26 17:32:20 -04002570
David Neto87846742018-04-11 17:36:22 -04002571 auto *CstInst = new SPIRVInstruction(spv::OpConstant, nextID++, Ops);
David Neto49351ac2017-08-26 17:32:20 -04002572 SPIRVInstList.push_back(CstInst);
2573
2574 continue;
2575 }
2576
2577 // A normal constant-data-sequential case.
David Neto22f144c2017-06-12 14:26:21 -04002578 for (unsigned k = 0; k < CDS->getNumElements(); k++) {
2579 Constant *EleCst = CDS->getElementAsConstant(k);
2580 uint32_t EleCstID = VMap[EleCst];
David Neto257c3892018-04-11 13:19:45 -04002581 Ops << MkId(EleCstID);
David Neto22f144c2017-06-12 14:26:21 -04002582 }
2583
2584 Opcode = spv::OpConstantComposite;
David Neto22f144c2017-06-12 14:26:21 -04002585 } else if (const ConstantAggregate *CA = dyn_cast<ConstantAggregate>(Cst)) {
2586 // Let's convert <4 x i8> constant to int constant specially.
David Neto49351ac2017-08-26 17:32:20 -04002587 // This case occurs when at least one of the values is an undef.
David Neto22f144c2017-06-12 14:26:21 -04002588 Type *CstTy = Cst->getType();
2589 if (is4xi8vec(CstTy)) {
2590 LLVMContext &Context = CstTy->getContext();
2591
2592 //
2593 // Generate OpConstant with OpTypeInt 32 0.
2594 //
Neil Henning39672102017-09-29 14:33:13 +01002595 uint32_t IntValue = 0;
David Neto22f144c2017-06-12 14:26:21 -04002596 for (User::const_op_iterator I = Cst->op_begin(), E = Cst->op_end();
2597 I != E; ++I) {
2598 uint64_t Val = 0;
alan-bakerb6b09dc2018-11-08 16:59:28 -05002599 const Value *CV = *I;
Neil Henning39672102017-09-29 14:33:13 +01002600 if (auto *CI2 = dyn_cast<ConstantInt>(CV)) {
2601 Val = CI2->getZExtValue();
David Neto22f144c2017-06-12 14:26:21 -04002602 }
David Neto49351ac2017-08-26 17:32:20 -04002603 IntValue = (IntValue << 8) | (Val & 0xffu);
David Neto22f144c2017-06-12 14:26:21 -04002604 }
2605
David Neto49351ac2017-08-26 17:32:20 -04002606 Type *i32 = Type::getInt32Ty(Context);
2607 Constant *CstInt = ConstantInt::get(i32, IntValue);
David Neto22f144c2017-06-12 14:26:21 -04002608 // If this constant is already registered on VMap, use it.
2609 if (VMap.count(CstInt)) {
2610 uint32_t CstID = VMap[CstInt];
2611 VMap[Cst] = CstID;
David Neto19a1bad2017-08-25 15:01:41 -04002612 continue;
David Neto22f144c2017-06-12 14:26:21 -04002613 }
2614
David Neto257c3892018-04-11 13:19:45 -04002615 Ops << MkNum(IntValue);
David Neto22f144c2017-06-12 14:26:21 -04002616
David Neto87846742018-04-11 17:36:22 -04002617 auto *CstInst = new SPIRVInstruction(spv::OpConstant, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002618 SPIRVInstList.push_back(CstInst);
2619
David Neto19a1bad2017-08-25 15:01:41 -04002620 continue;
David Neto22f144c2017-06-12 14:26:21 -04002621 }
2622
2623 // We use a constant composite in SPIR-V for our constant aggregate in
2624 // LLVM.
2625 Opcode = spv::OpConstantComposite;
David Neto22f144c2017-06-12 14:26:21 -04002626
2627 for (unsigned k = 0; k < CA->getNumOperands(); k++) {
2628 // Look up the ID of the element of this aggregate (which we will
2629 // previously have created a constant for).
2630 uint32_t ElementConstantID = VMap[CA->getAggregateElement(k)];
2631
2632 // And add an operand to the composite we are constructing
David Neto257c3892018-04-11 13:19:45 -04002633 Ops << MkId(ElementConstantID);
David Neto22f144c2017-06-12 14:26:21 -04002634 }
2635 } else if (Cst->isNullValue()) {
2636 Opcode = spv::OpConstantNull;
David Neto22f144c2017-06-12 14:26:21 -04002637 } else {
2638 Cst->print(errs());
2639 llvm_unreachable("Unsupported Constant???");
2640 }
2641
alan-baker5b86ed72019-02-15 08:26:50 -05002642 if (Opcode == spv::OpConstantNull && Cst->getType()->isPointerTy()) {
2643 // Null pointer requires variable pointers.
2644 setVariablePointersCapabilities(Cst->getType()->getPointerAddressSpace());
2645 }
2646
David Neto87846742018-04-11 17:36:22 -04002647 auto *CstInst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002648 SPIRVInstList.push_back(CstInst);
2649 }
2650}
2651
SJW77b87ad2020-04-21 14:37:52 -05002652void SPIRVProducerPass::GenerateSamplers() {
SJW69939d52020-04-16 07:29:07 -05002653 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList(kGlobalVariables);
David Neto22f144c2017-06-12 14:26:21 -04002654
alan-bakerb6b09dc2018-11-08 16:59:28 -05002655 auto &sampler_map = getSamplerMap();
alan-baker09cb9802019-12-10 13:16:27 -05002656 SamplerLiteralToIDMap.clear();
David Neto862b7d82018-06-14 18:48:37 -04002657 DenseMap<unsigned, unsigned> SamplerLiteralToDescriptorSetMap;
2658 DenseMap<unsigned, unsigned> SamplerLiteralToBindingMap;
David Neto22f144c2017-06-12 14:26:21 -04002659
David Neto862b7d82018-06-14 18:48:37 -04002660 // We might have samplers in the sampler map that are not used
2661 // in the translation unit. We need to allocate variables
2662 // for them and bindings too.
2663 DenseSet<unsigned> used_bindings;
David Neto22f144c2017-06-12 14:26:21 -04002664
SJW77b87ad2020-04-21 14:37:52 -05002665 auto *var_fn = module->getFunction(clspv::LiteralSamplerFunction());
alan-baker09cb9802019-12-10 13:16:27 -05002666 // Return if there are no literal samplers.
alan-bakerb6b09dc2018-11-08 16:59:28 -05002667 if (!var_fn)
2668 return;
alan-baker09cb9802019-12-10 13:16:27 -05002669
David Neto862b7d82018-06-14 18:48:37 -04002670 for (auto user : var_fn->users()) {
2671 // Populate SamplerLiteralToDescriptorSetMap and
2672 // SamplerLiteralToBindingMap.
2673 //
2674 // Look for calls like
2675 // call %opencl.sampler_t addrspace(2)*
2676 // @clspv.sampler.var.literal(
2677 // i32 descriptor,
2678 // i32 binding,
alan-baker09cb9802019-12-10 13:16:27 -05002679 // i32 (index-into-sampler-map|sampler_mask))
alan-bakerb6b09dc2018-11-08 16:59:28 -05002680 if (auto *call = dyn_cast<CallInst>(user)) {
alan-baker09cb9802019-12-10 13:16:27 -05002681 const auto third_param = static_cast<unsigned>(
alan-bakerb6b09dc2018-11-08 16:59:28 -05002682 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
alan-baker09cb9802019-12-10 13:16:27 -05002683 auto sampler_value = third_param;
2684 if (clspv::Option::UseSamplerMap()) {
2685 if (third_param >= sampler_map.size()) {
2686 errs() << "Out of bounds index to sampler map: " << third_param;
2687 llvm_unreachable("bad sampler init: out of bounds");
2688 }
2689 sampler_value = sampler_map[third_param].first;
David Neto862b7d82018-06-14 18:48:37 -04002690 }
2691
David Neto862b7d82018-06-14 18:48:37 -04002692 const auto descriptor_set = static_cast<unsigned>(
2693 dyn_cast<ConstantInt>(call->getArgOperand(0))->getZExtValue());
2694 const auto binding = static_cast<unsigned>(
2695 dyn_cast<ConstantInt>(call->getArgOperand(1))->getZExtValue());
2696
2697 SamplerLiteralToDescriptorSetMap[sampler_value] = descriptor_set;
2698 SamplerLiteralToBindingMap[sampler_value] = binding;
2699 used_bindings.insert(binding);
2700 }
2701 }
2702
alan-baker09cb9802019-12-10 13:16:27 -05002703 DenseSet<size_t> seen;
2704 for (auto user : var_fn->users()) {
2705 if (!isa<CallInst>(user))
2706 continue;
2707
2708 auto call = cast<CallInst>(user);
2709 const unsigned third_param = static_cast<unsigned>(
2710 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
2711
2712 // Already allocated a variable for this value.
2713 if (!seen.insert(third_param).second)
2714 continue;
2715
2716 auto sampler_value = third_param;
2717 if (clspv::Option::UseSamplerMap()) {
2718 sampler_value = sampler_map[third_param].first;
2719 }
2720
David Neto22f144c2017-06-12 14:26:21 -04002721 // Generate OpVariable.
2722 //
2723 // GIDOps[0] : Result Type ID
2724 // GIDOps[1] : Storage Class
2725 SPIRVOperandList Ops;
2726
David Neto257c3892018-04-11 13:19:45 -04002727 Ops << MkId(lookupType(SamplerTy))
2728 << MkNum(spv::StorageClassUniformConstant);
David Neto22f144c2017-06-12 14:26:21 -04002729
David Neto862b7d82018-06-14 18:48:37 -04002730 auto sampler_var_id = nextID++;
2731 auto *Inst = new SPIRVInstruction(spv::OpVariable, sampler_var_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002732 SPIRVInstList.push_back(Inst);
2733
alan-baker09cb9802019-12-10 13:16:27 -05002734 SamplerLiteralToIDMap[sampler_value] = sampler_var_id;
David Neto22f144c2017-06-12 14:26:21 -04002735
David Neto862b7d82018-06-14 18:48:37 -04002736 unsigned descriptor_set;
2737 unsigned binding;
alan-baker09cb9802019-12-10 13:16:27 -05002738 if (SamplerLiteralToBindingMap.find(sampler_value) ==
alan-bakerb6b09dc2018-11-08 16:59:28 -05002739 SamplerLiteralToBindingMap.end()) {
David Neto862b7d82018-06-14 18:48:37 -04002740 // This sampler is not actually used. Find the next one.
2741 for (binding = 0; used_bindings.count(binding); binding++)
2742 ;
2743 descriptor_set = 0; // Literal samplers always use descriptor set 0.
2744 used_bindings.insert(binding);
2745 } else {
alan-baker09cb9802019-12-10 13:16:27 -05002746 descriptor_set = SamplerLiteralToDescriptorSetMap[sampler_value];
2747 binding = SamplerLiteralToBindingMap[sampler_value];
alan-bakercff80152019-06-15 00:38:00 -04002748
alan-baker09cb9802019-12-10 13:16:27 -05002749 version0::DescriptorMapEntry::SamplerData sampler_data = {sampler_value};
alan-bakercff80152019-06-15 00:38:00 -04002750 descriptorMapEntries->emplace_back(std::move(sampler_data),
2751 descriptor_set, binding);
David Neto862b7d82018-06-14 18:48:37 -04002752 }
2753
SJW69939d52020-04-16 07:29:07 -05002754 // Ops[0] = Target ID
2755 // Ops[1] = Decoration (DescriptorSet)
2756 // Ops[2] = LiteralNumber according to Decoration
2757 Ops.clear();
2758
David Neto862b7d82018-06-14 18:48:37 -04002759 Ops << MkId(sampler_var_id) << MkNum(spv::DecorationDescriptorSet)
2760 << MkNum(descriptor_set);
David Neto22f144c2017-06-12 14:26:21 -04002761
David Neto87846742018-04-11 17:36:22 -04002762 auto *DescDecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
SJW69939d52020-04-16 07:29:07 -05002763 getSPIRVInstList(kAnnotations).push_back(DescDecoInst);
David Neto22f144c2017-06-12 14:26:21 -04002764
2765 // Ops[0] = Target ID
2766 // Ops[1] = Decoration (Binding)
2767 // Ops[2] = LiteralNumber according to Decoration
2768 Ops.clear();
David Neto862b7d82018-06-14 18:48:37 -04002769 Ops << MkId(sampler_var_id) << MkNum(spv::DecorationBinding)
2770 << MkNum(binding);
David Neto22f144c2017-06-12 14:26:21 -04002771
David Neto87846742018-04-11 17:36:22 -04002772 auto *BindDecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
SJW69939d52020-04-16 07:29:07 -05002773 getSPIRVInstList(kAnnotations).push_back(BindDecoInst);
David Neto22f144c2017-06-12 14:26:21 -04002774 }
David Neto862b7d82018-06-14 18:48:37 -04002775}
David Neto22f144c2017-06-12 14:26:21 -04002776
SJW77b87ad2020-04-21 14:37:52 -05002777void SPIRVProducerPass::GenerateResourceVars() {
SJW69939d52020-04-16 07:29:07 -05002778 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList(kGlobalVariables);
David Neto862b7d82018-06-14 18:48:37 -04002779 ValueMapType &VMap = getValueMap();
David Neto22f144c2017-06-12 14:26:21 -04002780
David Neto862b7d82018-06-14 18:48:37 -04002781 // Generate variables. Make one for each of resource var info object.
2782 for (auto *info : ModuleOrderedResourceVars) {
2783 Type *type = info->var_fn->getReturnType();
2784 // Remap the address space for opaque types.
2785 switch (info->arg_kind) {
2786 case clspv::ArgKind::Sampler:
2787 case clspv::ArgKind::ReadOnlyImage:
2788 case clspv::ArgKind::WriteOnlyImage:
2789 type = PointerType::get(type->getPointerElementType(),
2790 clspv::AddressSpace::UniformConstant);
2791 break;
2792 default:
2793 break;
2794 }
David Neto22f144c2017-06-12 14:26:21 -04002795
David Neto862b7d82018-06-14 18:48:37 -04002796 info->var_id = nextID++;
David Neto22f144c2017-06-12 14:26:21 -04002797
David Neto862b7d82018-06-14 18:48:37 -04002798 const auto type_id = lookupType(type);
2799 const auto sc = GetStorageClassForArgKind(info->arg_kind);
2800 SPIRVOperandList Ops;
2801 Ops << MkId(type_id) << MkNum(sc);
David Neto22f144c2017-06-12 14:26:21 -04002802
David Neto862b7d82018-06-14 18:48:37 -04002803 auto *Inst = new SPIRVInstruction(spv::OpVariable, info->var_id, Ops);
2804 SPIRVInstList.push_back(Inst);
2805
2806 // Map calls to the variable-builtin-function.
2807 for (auto &U : info->var_fn->uses()) {
2808 if (auto *call = dyn_cast<CallInst>(U.getUser())) {
2809 const auto set = unsigned(
2810 dyn_cast<ConstantInt>(call->getOperand(0))->getZExtValue());
2811 const auto binding = unsigned(
2812 dyn_cast<ConstantInt>(call->getOperand(1))->getZExtValue());
2813 if (set == info->descriptor_set && binding == info->binding) {
2814 switch (info->arg_kind) {
2815 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04002816 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04002817 case clspv::ArgKind::Pod:
alan-baker9b0ec3c2020-04-06 14:45:34 -04002818 case clspv::ArgKind::PodUBO:
2819 case clspv::ArgKind::PodPushConstant:
David Neto862b7d82018-06-14 18:48:37 -04002820 // The call maps to the variable directly.
2821 VMap[call] = info->var_id;
2822 break;
2823 case clspv::ArgKind::Sampler:
2824 case clspv::ArgKind::ReadOnlyImage:
2825 case clspv::ArgKind::WriteOnlyImage:
2826 // The call maps to a load we generate later.
2827 ResourceVarDeferredLoadCalls[call] = info->var_id;
2828 break;
2829 default:
2830 llvm_unreachable("Unhandled arg kind");
2831 }
2832 }
David Neto22f144c2017-06-12 14:26:21 -04002833 }
David Neto862b7d82018-06-14 18:48:37 -04002834 }
2835 }
David Neto22f144c2017-06-12 14:26:21 -04002836
David Neto862b7d82018-06-14 18:48:37 -04002837 // Generate associated decorations.
SJW69939d52020-04-16 07:29:07 -05002838 SPIRVInstructionList &Annotations = getSPIRVInstList(kAnnotations);
David Neto862b7d82018-06-14 18:48:37 -04002839
2840 SPIRVOperandList Ops;
2841 for (auto *info : ModuleOrderedResourceVars) {
alan-baker9b0ec3c2020-04-06 14:45:34 -04002842 // Push constants don't need descriptor set or binding decorations.
2843 if (info->arg_kind == clspv::ArgKind::PodPushConstant)
2844 continue;
2845
David Neto862b7d82018-06-14 18:48:37 -04002846 // Decorate with DescriptorSet and Binding.
2847 Ops.clear();
2848 Ops << MkId(info->var_id) << MkNum(spv::DecorationDescriptorSet)
2849 << MkNum(info->descriptor_set);
SJW69939d52020-04-16 07:29:07 -05002850 Annotations.push_back(new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto862b7d82018-06-14 18:48:37 -04002851
2852 Ops.clear();
2853 Ops << MkId(info->var_id) << MkNum(spv::DecorationBinding)
2854 << MkNum(info->binding);
SJW69939d52020-04-16 07:29:07 -05002855 Annotations.push_back(new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto862b7d82018-06-14 18:48:37 -04002856
alan-bakere9308012019-03-15 10:25:13 -04002857 if (info->coherent) {
2858 // Decorate with Coherent if required for the variable.
2859 Ops.clear();
2860 Ops << MkId(info->var_id) << MkNum(spv::DecorationCoherent);
SJW69939d52020-04-16 07:29:07 -05002861 Annotations.push_back(new SPIRVInstruction(spv::OpDecorate, Ops));
alan-bakere9308012019-03-15 10:25:13 -04002862 }
2863
David Neto862b7d82018-06-14 18:48:37 -04002864 // Generate NonWritable and NonReadable
2865 switch (info->arg_kind) {
2866 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04002867 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04002868 if (info->var_fn->getReturnType()->getPointerAddressSpace() ==
2869 clspv::AddressSpace::Constant) {
2870 Ops.clear();
2871 Ops << MkId(info->var_id) << MkNum(spv::DecorationNonWritable);
SJW69939d52020-04-16 07:29:07 -05002872 Annotations.push_back(new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002873 }
David Neto862b7d82018-06-14 18:48:37 -04002874 break;
David Neto862b7d82018-06-14 18:48:37 -04002875 case clspv::ArgKind::WriteOnlyImage:
2876 Ops.clear();
2877 Ops << MkId(info->var_id) << MkNum(spv::DecorationNonReadable);
SJW69939d52020-04-16 07:29:07 -05002878 Annotations.push_back(new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto862b7d82018-06-14 18:48:37 -04002879 break;
2880 default:
2881 break;
David Neto22f144c2017-06-12 14:26:21 -04002882 }
2883 }
2884}
2885
SJW77b87ad2020-04-21 14:37:52 -05002886void SPIRVProducerPass::GeneratePushConstantDescriptorMapEntries() {
Kévin Petitbbbda972020-03-03 19:16:31 +00002887
SJW77b87ad2020-04-21 14:37:52 -05002888 if (auto GV = module->getGlobalVariable(clspv::PushConstantsVariableName())) {
2889 auto const &DL = module->getDataLayout();
Kévin Petitbbbda972020-03-03 19:16:31 +00002890 auto MD = GV->getMetadata(clspv::PushConstantsMetadataName());
2891 auto STy = cast<StructType>(GV->getValueType());
2892
2893 for (unsigned i = 0; i < STy->getNumElements(); i++) {
2894 auto pc = static_cast<clspv::PushConstant>(
2895 mdconst::extract<ConstantInt>(MD->getOperand(i))->getZExtValue());
2896 auto memberType = STy->getElementType(i);
2897 auto offset = GetExplicitLayoutStructMemberOffset(STy, i, DL);
2898 unsigned previousOffset = 0;
2899 if (i > 0) {
2900 previousOffset = GetExplicitLayoutStructMemberOffset(STy, i - 1, DL);
2901 }
2902 auto size = static_cast<uint32_t>(GetTypeSizeInBits(memberType, DL)) / 8;
SJW77b87ad2020-04-21 14:37:52 -05002903 assert(isValidExplicitLayout(*module, STy, i,
2904 spv::StorageClassPushConstant, offset,
2905 previousOffset));
Kévin Petitbbbda972020-03-03 19:16:31 +00002906 version0::DescriptorMapEntry::PushConstantData data = {pc, offset, size};
2907 descriptorMapEntries->emplace_back(std::move(data));
2908 }
2909 }
2910}
2911
SJW77b87ad2020-04-21 14:37:52 -05002912void SPIRVProducerPass::GenerateSpecConstantDescriptorMapEntries() {
2913 for (auto pair : clspv::GetSpecConstants(module)) {
alan-bakera1be3322020-04-20 12:48:18 -04002914 auto kind = pair.first;
2915 auto id = pair.second;
2916
2917 // Local memory size is only used for kernel arguments.
2918 if (kind == SpecConstant::kLocalMemorySize)
2919 continue;
2920
2921 version0::DescriptorMapEntry::SpecConstantData data = {kind, id};
2922 descriptorMapEntries->emplace_back(std::move(data));
2923 }
2924}
2925
David Neto22f144c2017-06-12 14:26:21 -04002926void SPIRVProducerPass::GenerateGlobalVar(GlobalVariable &GV) {
SJW69939d52020-04-16 07:29:07 -05002927 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList(kGlobalVariables);
David Neto22f144c2017-06-12 14:26:21 -04002928 ValueMapType &VMap = getValueMap();
2929 std::vector<uint32_t> &BuiltinDimVec = getBuiltinDimVec();
David Neto85082642018-03-24 06:55:20 -07002930 const DataLayout &DL = GV.getParent()->getDataLayout();
David Neto22f144c2017-06-12 14:26:21 -04002931
2932 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
2933 Type *Ty = GV.getType();
2934 PointerType *PTy = cast<PointerType>(Ty);
2935
2936 uint32_t InitializerID = 0;
2937
2938 // Workgroup size is handled differently (it goes into a constant)
2939 if (spv::BuiltInWorkgroupSize == BuiltinType) {
2940 std::vector<bool> HasMDVec;
2941 uint32_t PrevXDimCst = 0xFFFFFFFF;
2942 uint32_t PrevYDimCst = 0xFFFFFFFF;
2943 uint32_t PrevZDimCst = 0xFFFFFFFF;
2944 for (Function &Func : *GV.getParent()) {
2945 if (Func.isDeclaration()) {
2946 continue;
2947 }
2948
2949 // We only need to check kernels.
2950 if (Func.getCallingConv() != CallingConv::SPIR_KERNEL) {
2951 continue;
2952 }
2953
2954 if (const MDNode *MD =
2955 dyn_cast<Function>(&Func)->getMetadata("reqd_work_group_size")) {
2956 uint32_t CurXDimCst = static_cast<uint32_t>(
2957 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
2958 uint32_t CurYDimCst = static_cast<uint32_t>(
2959 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
2960 uint32_t CurZDimCst = static_cast<uint32_t>(
2961 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
2962
2963 if (PrevXDimCst == 0xFFFFFFFF && PrevYDimCst == 0xFFFFFFFF &&
2964 PrevZDimCst == 0xFFFFFFFF) {
2965 PrevXDimCst = CurXDimCst;
2966 PrevYDimCst = CurYDimCst;
2967 PrevZDimCst = CurZDimCst;
2968 } else if (CurXDimCst != PrevXDimCst || CurYDimCst != PrevYDimCst ||
2969 CurZDimCst != PrevZDimCst) {
2970 llvm_unreachable(
2971 "reqd_work_group_size must be the same across all kernels");
2972 } else {
2973 continue;
2974 }
2975
2976 //
2977 // Generate OpConstantComposite.
2978 //
2979 // Ops[0] : Result Type ID
2980 // Ops[1] : Constant size for x dimension.
2981 // Ops[2] : Constant size for y dimension.
2982 // Ops[3] : Constant size for z dimension.
2983 SPIRVOperandList Ops;
2984
2985 uint32_t XDimCstID =
2986 VMap[mdconst::extract<ConstantInt>(MD->getOperand(0))];
2987 uint32_t YDimCstID =
2988 VMap[mdconst::extract<ConstantInt>(MD->getOperand(1))];
2989 uint32_t ZDimCstID =
2990 VMap[mdconst::extract<ConstantInt>(MD->getOperand(2))];
2991
2992 InitializerID = nextID;
2993
David Neto257c3892018-04-11 13:19:45 -04002994 Ops << MkId(lookupType(Ty->getPointerElementType())) << MkId(XDimCstID)
2995 << MkId(YDimCstID) << MkId(ZDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04002996
David Neto87846742018-04-11 17:36:22 -04002997 auto *Inst =
2998 new SPIRVInstruction(spv::OpConstantComposite, nextID++, Ops);
alan-bakera1be3322020-04-20 12:48:18 -04002999 getSPIRVInstList(kConstants).push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04003000
3001 HasMDVec.push_back(true);
3002 } else {
3003 HasMDVec.push_back(false);
3004 }
3005 }
3006
3007 // Check all kernels have same definitions for work_group_size.
3008 bool HasMD = false;
3009 if (!HasMDVec.empty()) {
3010 HasMD = HasMDVec[0];
3011 for (uint32_t i = 1; i < HasMDVec.size(); i++) {
3012 if (HasMD != HasMDVec[i]) {
3013 llvm_unreachable(
3014 "Kernels should have consistent work group size definition");
3015 }
3016 }
3017 }
3018
3019 // If all kernels do not have metadata for reqd_work_group_size, generate
3020 // OpSpecConstants for x/y/z dimension.
Kévin Petit21c23c62020-04-29 01:38:28 +01003021 if (!HasMD || clspv::Option::NonUniformNDRangeSupported()) {
David Neto22f144c2017-06-12 14:26:21 -04003022 //
3023 // Generate OpSpecConstants for x/y/z dimension.
3024 //
3025 // Ops[0] : Result Type ID
3026 // Ops[1] : Constant size for x/y/z dimension (Literal Number).
3027 uint32_t XDimCstID = 0;
3028 uint32_t YDimCstID = 0;
3029 uint32_t ZDimCstID = 0;
3030
alan-bakera1be3322020-04-20 12:48:18 -04003031 // Allocate spec constants for workgroup size.
SJW77b87ad2020-04-21 14:37:52 -05003032 clspv::AddWorkgroupSpecConstants(module);
alan-bakera1be3322020-04-20 12:48:18 -04003033
David Neto22f144c2017-06-12 14:26:21 -04003034 SPIRVOperandList Ops;
James Pricecf53df42020-04-20 14:41:24 -04003035 uint32_t result_type_id = lookupType(
3036 cast<VectorType>(Ty->getPointerElementType())->getElementType());
David Neto22f144c2017-06-12 14:26:21 -04003037
David Neto257c3892018-04-11 13:19:45 -04003038 // X Dimension
3039 Ops << MkId(result_type_id) << MkNum(1);
3040 XDimCstID = nextID++;
alan-bakera1be3322020-04-20 12:48:18 -04003041 getSPIRVInstList(kConstants)
3042 .push_back(new SPIRVInstruction(spv::OpSpecConstant, XDimCstID, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003043
3044 // Y Dimension
3045 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003046 Ops << MkId(result_type_id) << MkNum(1);
3047 YDimCstID = nextID++;
alan-bakera1be3322020-04-20 12:48:18 -04003048 getSPIRVInstList(kConstants)
3049 .push_back(new SPIRVInstruction(spv::OpSpecConstant, YDimCstID, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003050
3051 // Z Dimension
3052 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003053 Ops << MkId(result_type_id) << MkNum(1);
3054 ZDimCstID = nextID++;
alan-bakera1be3322020-04-20 12:48:18 -04003055 getSPIRVInstList(kConstants)
3056 .push_back(new SPIRVInstruction(spv::OpSpecConstant, ZDimCstID, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003057
David Neto257c3892018-04-11 13:19:45 -04003058 BuiltinDimVec.push_back(XDimCstID);
3059 BuiltinDimVec.push_back(YDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04003060 BuiltinDimVec.push_back(ZDimCstID);
3061
David Neto22f144c2017-06-12 14:26:21 -04003062 //
3063 // Generate OpSpecConstantComposite.
3064 //
3065 // Ops[0] : Result Type ID
3066 // Ops[1] : Constant size for x dimension.
3067 // Ops[2] : Constant size for y dimension.
3068 // Ops[3] : Constant size for z dimension.
3069 InitializerID = nextID;
3070
3071 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003072 Ops << MkId(lookupType(Ty->getPointerElementType())) << MkId(XDimCstID)
3073 << MkId(YDimCstID) << MkId(ZDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04003074
David Neto87846742018-04-11 17:36:22 -04003075 auto *Inst =
3076 new SPIRVInstruction(spv::OpSpecConstantComposite, nextID++, Ops);
alan-bakera1be3322020-04-20 12:48:18 -04003077 getSPIRVInstList(kConstants).push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04003078 }
alan-bakerbed3a882020-04-21 14:42:41 -04003079 } else if (BuiltinType == spv::BuiltInWorkDim) {
3080 // 1. Generate a specialization constant with a default of 3.
3081 // 2. Allocate and annotate a SpecId for the constant.
3082 // 3. Use the spec constant as the initializer for the variable.
3083 SPIRVOperandList Ops;
3084
3085 //
3086 // Generate OpSpecConstant.
3087 //
3088 // Ops[0] : Result Type ID
3089 // Ops[1] : Default literal value
3090 InitializerID = nextID++;
3091
3092 Ops << MkId(lookupType(IntegerType::get(GV.getContext(), 32))) << MkNum(3);
3093
3094 auto *Inst = new SPIRVInstruction(spv::OpSpecConstant, InitializerID, Ops);
3095 getSPIRVInstList(kConstants).push_back(Inst);
3096
3097 //
3098 // Generate SpecId decoration.
3099 //
3100 // Ops[0] : target
3101 // Ops[1] : decoration
3102 // Ops[2] : SpecId
Alan Baker75ccc252020-04-21 17:11:52 -04003103 auto spec_id = AllocateSpecConstant(module, SpecConstant::kWorkDim);
alan-bakerbed3a882020-04-21 14:42:41 -04003104 Ops.clear();
3105 Ops << MkId(InitializerID) << MkNum(spv::DecorationSpecId)
3106 << MkNum(spec_id);
3107
3108 Inst = new SPIRVInstruction(spv::OpDecorate, Ops);
3109 getSPIRVInstList(kAnnotations).push_back(Inst);
alan-bakere1996972020-05-04 08:38:12 -04003110 } else if (BuiltinType == spv::BuiltInGlobalOffset) {
3111 // 1. Generate a spec constant with a default of {0, 0, 0}.
3112 // 2. Allocate and annotate SpecIds for the constants.
3113 // 3. Use the spec constant as the initializer for the variable.
3114 SPIRVOperandList Ops;
3115
3116 //
3117 // Generate OpSpecConstant for each dimension.
3118 //
3119 // Ops[0] : Result Type ID
3120 // Ops[1] : Default literal value
3121 //
3122 uint32_t x_id = nextID++;
3123 Ops << MkId(lookupType(IntegerType::get(GV.getContext(), 32))) << MkNum(0);
3124 auto *Inst = new SPIRVInstruction(spv::OpSpecConstant, x_id, Ops);
3125 getSPIRVInstList(kConstants).push_back(Inst);
3126
3127 uint32_t y_id = nextID++;
3128 Ops.clear();
3129 Ops << MkId(lookupType(IntegerType::get(GV.getContext(), 32))) << MkNum(0);
3130 Inst = new SPIRVInstruction(spv::OpSpecConstant, y_id, Ops);
3131 getSPIRVInstList(kConstants).push_back(Inst);
3132
3133 uint32_t z_id = nextID++;
3134 Ops.clear();
3135 Ops << MkId(lookupType(IntegerType::get(GV.getContext(), 32))) << MkNum(0);
3136 Inst = new SPIRVInstruction(spv::OpSpecConstant, z_id, Ops);
3137 getSPIRVInstList(kConstants).push_back(Inst);
3138
3139 //
3140 // Generate SpecId decoration for each dimension.
3141 //
3142 // Ops[0] : target
3143 // Ops[1] : decoration
3144 // Ops[2] : SpecId
3145 //
3146 auto spec_id = AllocateSpecConstant(module, SpecConstant::kGlobalOffsetX);
3147 Ops.clear();
3148 Ops << MkId(x_id) << MkNum(spv::DecorationSpecId) << MkNum(spec_id);
3149 Inst = new SPIRVInstruction(spv::OpDecorate, Ops);
3150 getSPIRVInstList(kAnnotations).push_back(Inst);
3151
3152 spec_id = AllocateSpecConstant(module, SpecConstant::kGlobalOffsetY);
3153 Ops.clear();
3154 Ops << MkId(y_id) << MkNum(spv::DecorationSpecId) << MkNum(spec_id);
3155 Inst = new SPIRVInstruction(spv::OpDecorate, Ops);
3156 getSPIRVInstList(kAnnotations).push_back(Inst);
3157
3158 spec_id = AllocateSpecConstant(module, SpecConstant::kGlobalOffsetZ);
3159 Ops.clear();
3160 Ops << MkId(z_id) << MkNum(spv::DecorationSpecId) << MkNum(spec_id);
3161 Inst = new SPIRVInstruction(spv::OpDecorate, Ops);
3162 getSPIRVInstList(kAnnotations).push_back(Inst);
3163
3164 //
3165 // Generate OpSpecConstantComposite.
3166 //
3167 // Ops[0] : type id
3168 // Ops[1..n-1] : elements
3169 //
3170 InitializerID = nextID++;
3171 Ops.clear();
3172 Ops << MkId(lookupType(GV.getType()->getPointerElementType())) << MkId(x_id)
3173 << MkId(y_id) << MkId(z_id);
3174 Inst =
3175 new SPIRVInstruction(spv::OpSpecConstantComposite, InitializerID, Ops);
3176 getSPIRVInstList(kConstants).push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04003177 }
3178
David Neto22f144c2017-06-12 14:26:21 -04003179 VMap[&GV] = nextID;
3180
3181 //
3182 // Generate OpVariable.
3183 //
3184 // GIDOps[0] : Result Type ID
3185 // GIDOps[1] : Storage Class
3186 SPIRVOperandList Ops;
3187
David Neto85082642018-03-24 06:55:20 -07003188 const auto AS = PTy->getAddressSpace();
David Netoc6f3ab22018-04-06 18:02:31 -04003189 Ops << MkId(lookupType(Ty)) << MkNum(GetStorageClass(AS));
David Neto22f144c2017-06-12 14:26:21 -04003190
David Neto85082642018-03-24 06:55:20 -07003191 const bool module_scope_constant_external_init =
David Neto862b7d82018-06-14 18:48:37 -04003192 (AS == AddressSpace::Constant) && GV.hasInitializer() &&
David Neto85082642018-03-24 06:55:20 -07003193 clspv::Option::ModuleConstantsInStorageBuffer();
3194
Kévin Petit23d5f182019-08-13 16:21:29 +01003195 if (GV.hasInitializer()) {
3196 auto GVInit = GV.getInitializer();
3197 if (!isa<UndefValue>(GVInit) && !module_scope_constant_external_init) {
3198 assert(VMap.count(GVInit) == 1);
3199 InitializerID = VMap[GVInit];
David Neto85082642018-03-24 06:55:20 -07003200 }
3201 }
Kévin Petit23d5f182019-08-13 16:21:29 +01003202
3203 if (0 != InitializerID) {
Kévin Petitbbbda972020-03-03 19:16:31 +00003204 // Emit the ID of the initializer as part of the variable definition.
Kévin Petit23d5f182019-08-13 16:21:29 +01003205 Ops << MkId(InitializerID);
3206 }
David Neto85082642018-03-24 06:55:20 -07003207 const uint32_t var_id = nextID++;
3208
David Neto87846742018-04-11 17:36:22 -04003209 auto *Inst = new SPIRVInstruction(spv::OpVariable, var_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003210 SPIRVInstList.push_back(Inst);
3211
alan-bakere1996972020-05-04 08:38:12 -04003212 auto IsOpenCLBuiltin = [](spv::BuiltIn builtin) {
3213 return builtin == spv::BuiltInWorkDim ||
3214 builtin == spv::BuiltInGlobalOffset;
3215 };
3216
SJW69939d52020-04-16 07:29:07 -05003217 SPIRVInstructionList &Annotations = getSPIRVInstList(kAnnotations);
alan-bakere1996972020-05-04 08:38:12 -04003218 // If we have a builtin (not an OpenCL builtin).
3219 if (spv::BuiltInMax != BuiltinType && !IsOpenCLBuiltin(BuiltinType)) {
David Neto22f144c2017-06-12 14:26:21 -04003220 //
3221 // Generate OpDecorate.
3222 //
3223 // DOps[0] = Target ID
3224 // DOps[1] = Decoration (Builtin)
3225 // DOps[2] = BuiltIn ID
3226 uint32_t ResultID;
3227
3228 // WorkgroupSize is different, we decorate the constant composite that has
3229 // its value, rather than the variable that we use to access the value.
3230 if (spv::BuiltInWorkgroupSize == BuiltinType) {
3231 ResultID = InitializerID;
David Netoa60b00b2017-09-15 16:34:09 -04003232 // Save both the value and variable IDs for later.
3233 WorkgroupSizeValueID = InitializerID;
3234 WorkgroupSizeVarID = VMap[&GV];
David Neto22f144c2017-06-12 14:26:21 -04003235 } else {
3236 ResultID = VMap[&GV];
3237 }
3238
3239 SPIRVOperandList DOps;
David Neto257c3892018-04-11 13:19:45 -04003240 DOps << MkId(ResultID) << MkNum(spv::DecorationBuiltIn)
3241 << MkNum(BuiltinType);
David Neto22f144c2017-06-12 14:26:21 -04003242
David Neto87846742018-04-11 17:36:22 -04003243 auto *DescDecoInst = new SPIRVInstruction(spv::OpDecorate, DOps);
SJW69939d52020-04-16 07:29:07 -05003244 Annotations.push_back(DescDecoInst);
David Neto85082642018-03-24 06:55:20 -07003245 } else if (module_scope_constant_external_init) {
3246 // This module scope constant is initialized from a storage buffer with data
3247 // provided by the host at binding 0 of the next descriptor set.
SJW77b87ad2020-04-21 14:37:52 -05003248 const uint32_t descriptor_set = TakeDescriptorIndex(module);
David Neto85082642018-03-24 06:55:20 -07003249
David Neto862b7d82018-06-14 18:48:37 -04003250 // Emit the intializer to the descriptor map file.
David Neto85082642018-03-24 06:55:20 -07003251 // Use "kind,buffer" to indicate storage buffer. We might want to expand
3252 // that later to other types, like uniform buffer.
alan-bakerf5e5f692018-11-27 08:33:24 -05003253 std::string hexbytes;
3254 llvm::raw_string_ostream str(hexbytes);
3255 clspv::ConstantEmitter(DL, str).Emit(GV.getInitializer());
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003256 version0::DescriptorMapEntry::ConstantData constant_data = {ArgKind::Buffer,
3257 str.str()};
3258 descriptorMapEntries->emplace_back(std::move(constant_data), descriptor_set,
3259 0);
David Neto85082642018-03-24 06:55:20 -07003260
David Neto85082642018-03-24 06:55:20 -07003261 SPIRVOperandList DOps;
David Neto85082642018-03-24 06:55:20 -07003262
3263 // OpDecorate %var DescriptorSet <descriptor_set>
David Neto257c3892018-04-11 13:19:45 -04003264 DOps << MkId(var_id) << MkNum(spv::DecorationDescriptorSet)
3265 << MkNum(descriptor_set);
SJW69939d52020-04-16 07:29:07 -05003266 Annotations.push_back(new SPIRVInstruction(spv::OpDecorate, DOps));
3267
3268 // OpDecorate %var Binding <binding>
3269 DOps.clear();
3270 DOps << MkId(var_id) << MkNum(spv::DecorationBinding) << MkNum(0);
3271 Annotations.push_back(new SPIRVInstruction(spv::OpDecorate, DOps));
David Neto22f144c2017-06-12 14:26:21 -04003272 }
3273}
3274
SJW77b87ad2020-04-21 14:37:52 -05003275void SPIRVProducerPass::GenerateWorkgroupVars() {
SJW69939d52020-04-16 07:29:07 -05003276 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList(kGlobalVariables);
SJW77b87ad2020-04-21 14:37:52 -05003277 auto spec_constant_md =
3278 module->getNamedMetadata(clspv::SpecConstantMetadataName());
alan-bakera1be3322020-04-20 12:48:18 -04003279 if (!spec_constant_md)
3280 return;
3281
SJW77b87ad2020-04-21 14:37:52 -05003282 for (auto pair : clspv::GetSpecConstants(module)) {
alan-bakera1be3322020-04-20 12:48:18 -04003283 auto kind = pair.first;
3284 auto spec_id = pair.second;
3285
3286 if (kind != SpecConstant::kLocalMemorySize)
3287 continue;
3288
alan-bakerb6b09dc2018-11-08 16:59:28 -05003289 LocalArgInfo &info = LocalSpecIdInfoMap[spec_id];
David Netoc6f3ab22018-04-06 18:02:31 -04003290
3291 // Generate OpVariable.
3292 //
3293 // GIDOps[0] : Result Type ID
3294 // GIDOps[1] : Storage Class
3295 SPIRVOperandList Ops;
3296 Ops << MkId(info.ptr_array_type_id) << MkNum(spv::StorageClassWorkgroup);
3297
3298 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04003299 new SPIRVInstruction(spv::OpVariable, info.variable_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04003300 }
3301}
3302
SJW77b87ad2020-04-21 14:37:52 -05003303void SPIRVProducerPass::GenerateDescriptorMapInfo(Function &F) {
3304 const auto &DL = module->getDataLayout();
David Netoc5fb5242018-07-30 13:28:31 -04003305 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
3306 return;
3307 }
Kévin Petit717f8572020-04-06 17:31:53 +01003308 // Add entries for each kernel
3309 version0::DescriptorMapEntry::KernelDeclData kernel_decl_data = {
3310 F.getName().str()};
3311 descriptorMapEntries->emplace_back(std::move(kernel_decl_data));
3312
David Neto862b7d82018-06-14 18:48:37 -04003313 // Gather the list of resources that are used by this function's arguments.
3314 auto &resource_var_at_index = FunctionToResourceVarsMap[&F];
3315
David Neto862b7d82018-06-14 18:48:37 -04003316 auto *fty = F.getType()->getPointerElementType();
3317 auto *func_ty = dyn_cast<FunctionType>(fty);
3318
alan-baker038e9242019-04-19 22:14:41 -04003319 // If we've clustered POD arguments, then argument details are in metadata.
David Neto862b7d82018-06-14 18:48:37 -04003320 // If an argument maps to a resource variable, then get descriptor set and
3321 // binding from the resoure variable. Other info comes from the metadata.
alan-bakerff6c9292020-05-04 08:32:09 -04003322 const auto *arg_map = F.getMetadata(clspv::KernelArgMapMetadataName());
3323 auto local_spec_id_md =
3324 module->getNamedMetadata(clspv::LocalSpecIdMetadataName());
David Neto862b7d82018-06-14 18:48:37 -04003325 if (arg_map) {
3326 for (const auto &arg : arg_map->operands()) {
3327 const MDNode *arg_node = dyn_cast<MDNode>(arg.get());
alan-bakerff6c9292020-05-04 08:32:09 -04003328 assert(arg_node->getNumOperands() == 6);
David Neto862b7d82018-06-14 18:48:37 -04003329 const auto name =
3330 dyn_cast<MDString>(arg_node->getOperand(0))->getString();
3331 const auto old_index =
3332 dyn_extract<ConstantInt>(arg_node->getOperand(1))->getZExtValue();
3333 // Remapped argument index
alan-bakerb6b09dc2018-11-08 16:59:28 -05003334 const size_t new_index = static_cast<size_t>(
3335 dyn_extract<ConstantInt>(arg_node->getOperand(2))->getZExtValue());
David Neto862b7d82018-06-14 18:48:37 -04003336 const auto offset =
3337 dyn_extract<ConstantInt>(arg_node->getOperand(3))->getZExtValue();
Kévin PETITa353c832018-03-20 23:21:21 +00003338 const auto arg_size =
3339 dyn_extract<ConstantInt>(arg_node->getOperand(4))->getZExtValue();
alan-bakerc4579bb2020-04-29 14:15:50 -04003340 const auto argKind = clspv::GetArgKindFromName(
3341 dyn_cast<MDString>(arg_node->getOperand(5))->getString().str());
alan-bakerf5e5f692018-11-27 08:33:24 -05003342
alan-bakerff6c9292020-05-04 08:32:09 -04003343 // If this is a local memory argument, find the right spec id for this
3344 // argument.
3345 int64_t spec_id = -1;
3346 if (argKind == clspv::ArgKind::Local) {
3347 for (auto spec_id_arg : local_spec_id_md->operands()) {
3348 if ((&F == dyn_cast<Function>(
3349 dyn_cast<ValueAsMetadata>(spec_id_arg->getOperand(0))
3350 ->getValue())) &&
3351 (new_index ==
3352 mdconst::extract<ConstantInt>(spec_id_arg->getOperand(1))
3353 ->getZExtValue())) {
3354 spec_id = mdconst::extract<ConstantInt>(spec_id_arg->getOperand(2))
3355 ->getSExtValue();
3356 break;
3357 }
3358 }
3359 }
alan-bakerf5e5f692018-11-27 08:33:24 -05003360 uint32_t descriptor_set = 0;
3361 uint32_t binding = 0;
3362 version0::DescriptorMapEntry::KernelArgData kernel_data = {
alan-baker21574d32020-01-29 16:00:31 -05003363 F.getName().str(), name.str(), static_cast<uint32_t>(old_index),
3364 argKind, static_cast<uint32_t>(spec_id),
alan-bakerf5e5f692018-11-27 08:33:24 -05003365 // This will be set below for pointer-to-local args.
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003366 0, static_cast<uint32_t>(offset), static_cast<uint32_t>(arg_size)};
David Neto862b7d82018-06-14 18:48:37 -04003367 if (spec_id > 0) {
alan-bakerf5e5f692018-11-27 08:33:24 -05003368 kernel_data.local_element_size = static_cast<uint32_t>(GetTypeAllocSize(
3369 func_ty->getParamType(unsigned(new_index))->getPointerElementType(),
3370 DL));
David Neto862b7d82018-06-14 18:48:37 -04003371 } else {
3372 auto *info = resource_var_at_index[new_index];
3373 assert(info);
alan-bakerf5e5f692018-11-27 08:33:24 -05003374 descriptor_set = info->descriptor_set;
3375 binding = info->binding;
David Neto862b7d82018-06-14 18:48:37 -04003376 }
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003377 descriptorMapEntries->emplace_back(std::move(kernel_data), descriptor_set,
3378 binding);
David Neto862b7d82018-06-14 18:48:37 -04003379 }
3380 } else {
3381 // There is no argument map.
3382 // Take descriptor info from the resource variable calls.
Kévin PETITa353c832018-03-20 23:21:21 +00003383 // Take argument name and size from the arguments list.
David Neto862b7d82018-06-14 18:48:37 -04003384
3385 SmallVector<Argument *, 4> arguments;
3386 for (auto &arg : F.args()) {
3387 arguments.push_back(&arg);
3388 }
3389
3390 unsigned arg_index = 0;
3391 for (auto *info : resource_var_at_index) {
3392 if (info) {
Kévin PETITa353c832018-03-20 23:21:21 +00003393 auto arg = arguments[arg_index];
alan-bakerb6b09dc2018-11-08 16:59:28 -05003394 unsigned arg_size = 0;
alan-baker9b0ec3c2020-04-06 14:45:34 -04003395 if (info->arg_kind == clspv::ArgKind::Pod ||
3396 info->arg_kind == clspv::ArgKind::PodUBO ||
3397 info->arg_kind == clspv::ArgKind::PodPushConstant) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05003398 arg_size = static_cast<uint32_t>(DL.getTypeStoreSize(arg->getType()));
Kévin PETITa353c832018-03-20 23:21:21 +00003399 }
3400
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003401 // Local pointer arguments are unused in this case. Offset is always
3402 // zero.
alan-bakerf5e5f692018-11-27 08:33:24 -05003403 version0::DescriptorMapEntry::KernelArgData kernel_data = {
alan-baker21574d32020-01-29 16:00:31 -05003404 F.getName().str(),
3405 arg->getName().str(),
3406 arg_index,
alan-bakerc4579bb2020-04-29 14:15:50 -04003407 info->arg_kind,
alan-baker21574d32020-01-29 16:00:31 -05003408 0,
3409 0,
3410 0,
3411 arg_size};
alan-bakerf5e5f692018-11-27 08:33:24 -05003412 descriptorMapEntries->emplace_back(std::move(kernel_data),
3413 info->descriptor_set, info->binding);
David Neto862b7d82018-06-14 18:48:37 -04003414 }
3415 arg_index++;
3416 }
3417 // Generate mappings for pointer-to-local arguments.
3418 for (arg_index = 0; arg_index < arguments.size(); ++arg_index) {
3419 Argument *arg = arguments[arg_index];
Alan Baker202c8c72018-08-13 13:47:44 -04003420 auto where = LocalArgSpecIds.find(arg);
3421 if (where != LocalArgSpecIds.end()) {
3422 auto &local_arg_info = LocalSpecIdInfoMap[where->second];
alan-bakerf5e5f692018-11-27 08:33:24 -05003423 // Pod arguments members are unused in this case.
3424 version0::DescriptorMapEntry::KernelArgData kernel_data = {
alan-baker21574d32020-01-29 16:00:31 -05003425 F.getName().str(),
3426 arg->getName().str(),
alan-bakerf5e5f692018-11-27 08:33:24 -05003427 arg_index,
3428 ArgKind::Local,
3429 static_cast<uint32_t>(local_arg_info.spec_id),
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003430 static_cast<uint32_t>(
3431 GetTypeAllocSize(local_arg_info.elem_type, DL)),
alan-bakerf5e5f692018-11-27 08:33:24 -05003432 0,
3433 0};
3434 // Pointer-to-local arguments do not utilize descriptor set and binding.
3435 descriptorMapEntries->emplace_back(std::move(kernel_data), 0, 0);
David Neto862b7d82018-06-14 18:48:37 -04003436 }
3437 }
3438 }
3439}
3440
David Neto22f144c2017-06-12 14:26:21 -04003441void SPIRVProducerPass::GenerateFuncPrologue(Function &F) {
SJW69939d52020-04-16 07:29:07 -05003442 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList(kFunctions);
David Neto22f144c2017-06-12 14:26:21 -04003443 ValueMapType &VMap = getValueMap();
3444 EntryPointVecType &EntryPoints = getEntryPointVec();
David Neto22f144c2017-06-12 14:26:21 -04003445 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
3446 auto &GlobalConstArgSet = getGlobalConstArgSet();
3447
3448 FunctionType *FTy = F.getFunctionType();
3449
3450 //
David Neto22f144c2017-06-12 14:26:21 -04003451 // Generate OPFunction.
3452 //
3453
3454 // FOps[0] : Result Type ID
3455 // FOps[1] : Function Control
3456 // FOps[2] : Function Type ID
3457 SPIRVOperandList FOps;
3458
3459 // Find SPIRV instruction for return type.
David Neto257c3892018-04-11 13:19:45 -04003460 FOps << MkId(lookupType(FTy->getReturnType()));
David Neto22f144c2017-06-12 14:26:21 -04003461
3462 // Check function attributes for SPIRV Function Control.
3463 uint32_t FuncControl = spv::FunctionControlMaskNone;
3464 if (F.hasFnAttribute(Attribute::AlwaysInline)) {
3465 FuncControl |= spv::FunctionControlInlineMask;
3466 }
3467 if (F.hasFnAttribute(Attribute::NoInline)) {
3468 FuncControl |= spv::FunctionControlDontInlineMask;
3469 }
3470 // TODO: Check llvm attribute for Function Control Pure.
3471 if (F.hasFnAttribute(Attribute::ReadOnly)) {
3472 FuncControl |= spv::FunctionControlPureMask;
3473 }
3474 // TODO: Check llvm attribute for Function Control Const.
3475 if (F.hasFnAttribute(Attribute::ReadNone)) {
3476 FuncControl |= spv::FunctionControlConstMask;
3477 }
3478
David Neto257c3892018-04-11 13:19:45 -04003479 FOps << MkNum(FuncControl);
David Neto22f144c2017-06-12 14:26:21 -04003480
3481 uint32_t FTyID;
3482 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
3483 SmallVector<Type *, 4> NewFuncParamTys;
3484 FunctionType *NewFTy =
3485 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
3486 FTyID = lookupType(NewFTy);
3487 } else {
David Neto9ed8e2f2018-03-24 06:47:24 -07003488 // Handle regular function with global constant parameters.
David Neto22f144c2017-06-12 14:26:21 -04003489 if (GlobalConstFuncTyMap.count(FTy)) {
3490 FTyID = lookupType(GlobalConstFuncTyMap[FTy].first);
3491 } else {
3492 FTyID = lookupType(FTy);
3493 }
3494 }
3495
David Neto257c3892018-04-11 13:19:45 -04003496 FOps << MkId(FTyID);
David Neto22f144c2017-06-12 14:26:21 -04003497
3498 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
3499 EntryPoints.push_back(std::make_pair(&F, nextID));
3500 }
3501
3502 VMap[&F] = nextID;
3503
David Neto482550a2018-03-24 05:21:07 -07003504 if (clspv::Option::ShowIDs()) {
David Netob05675d2018-02-16 12:37:49 -05003505 errs() << "Function " << F.getName() << " is " << nextID << "\n";
3506 }
David Neto22f144c2017-06-12 14:26:21 -04003507 // Generate SPIRV instruction for function.
David Neto87846742018-04-11 17:36:22 -04003508 auto *FuncInst = new SPIRVInstruction(spv::OpFunction, nextID++, FOps);
David Neto22f144c2017-06-12 14:26:21 -04003509 SPIRVInstList.push_back(FuncInst);
3510
3511 //
3512 // Generate OpFunctionParameter for Normal function.
3513 //
3514
3515 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
alan-bakere9308012019-03-15 10:25:13 -04003516
David Neto22f144c2017-06-12 14:26:21 -04003517 // Iterate Argument for name instead of param type from function type.
3518 unsigned ArgIdx = 0;
3519 for (Argument &Arg : F.args()) {
alan-bakere9308012019-03-15 10:25:13 -04003520 uint32_t param_id = nextID++;
3521 VMap[&Arg] = param_id;
3522
3523 if (CalledWithCoherentResource(Arg)) {
3524 // If the arg is passed a coherent resource ever, then decorate this
3525 // parameter with Coherent too.
3526 SPIRVOperandList decoration_ops;
3527 decoration_ops << MkId(param_id) << MkNum(spv::DecorationCoherent);
SJW69939d52020-04-16 07:29:07 -05003528 getSPIRVInstList(kAnnotations)
3529 .push_back(new SPIRVInstruction(spv::OpDecorate, decoration_ops));
alan-bakere9308012019-03-15 10:25:13 -04003530 }
David Neto22f144c2017-06-12 14:26:21 -04003531
3532 // ParamOps[0] : Result Type ID
3533 SPIRVOperandList ParamOps;
3534
3535 // Find SPIRV instruction for parameter type.
3536 uint32_t ParamTyID = lookupType(Arg.getType());
3537 if (PointerType *PTy = dyn_cast<PointerType>(Arg.getType())) {
3538 if (GlobalConstFuncTyMap.count(FTy)) {
3539 if (ArgIdx == GlobalConstFuncTyMap[FTy].second) {
3540 Type *EleTy = PTy->getPointerElementType();
3541 Type *ArgTy =
3542 PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
3543 ParamTyID = lookupType(ArgTy);
3544 GlobalConstArgSet.insert(&Arg);
3545 }
3546 }
3547 }
David Neto257c3892018-04-11 13:19:45 -04003548 ParamOps << MkId(ParamTyID);
David Neto22f144c2017-06-12 14:26:21 -04003549
3550 // Generate SPIRV instruction for parameter.
David Neto87846742018-04-11 17:36:22 -04003551 auto *ParamInst =
alan-bakere9308012019-03-15 10:25:13 -04003552 new SPIRVInstruction(spv::OpFunctionParameter, param_id, ParamOps);
David Neto22f144c2017-06-12 14:26:21 -04003553 SPIRVInstList.push_back(ParamInst);
3554
3555 ArgIdx++;
3556 }
3557 }
3558}
3559
SJW77b87ad2020-04-21 14:37:52 -05003560void SPIRVProducerPass::GenerateModuleInfo() {
David Neto22f144c2017-06-12 14:26:21 -04003561 EntryPointVecType &EntryPoints = getEntryPointVec();
3562 ValueMapType &VMap = getValueMap();
3563 ValueList &EntryPointInterfaces = getEntryPointInterfacesVec();
David Neto22f144c2017-06-12 14:26:21 -04003564 std::vector<uint32_t> &BuiltinDimVec = getBuiltinDimVec();
3565
SJW69939d52020-04-16 07:29:07 -05003566 SPIRVInstructionList &SPIRVCapabilities = getSPIRVInstList(kCapabilities);
David Neto22f144c2017-06-12 14:26:21 -04003567 //
3568 // Generate OpCapability
3569 //
3570 // TODO: Which llvm information is mapped to SPIRV Capapbility?
3571
3572 // Ops[0] = Capability
3573 SPIRVOperandList Ops;
3574
David Neto87846742018-04-11 17:36:22 -04003575 auto *CapInst =
David Netoef5ba2b2019-12-20 08:35:54 -05003576 new SPIRVInstruction(spv::OpCapability, MkNum(spv::CapabilityShader));
SJW69939d52020-04-16 07:29:07 -05003577 SPIRVCapabilities.push_back(CapInst);
David Neto22f144c2017-06-12 14:26:21 -04003578
alan-bakerf906d2b2019-12-10 11:26:23 -05003579 bool write_without_format = false;
3580 bool sampled_1d = false;
3581 bool image_1d = false;
David Neto22f144c2017-06-12 14:26:21 -04003582 for (Type *Ty : getTypeList()) {
alan-bakerb39c8262019-03-08 14:03:37 -05003583 if (clspv::Option::Int8Support() && Ty->isIntegerTy(8)) {
3584 // Generate OpCapability for i8 type.
SJW69939d52020-04-16 07:29:07 -05003585 SPIRVCapabilities.push_back(
David Netoef5ba2b2019-12-20 08:35:54 -05003586 new SPIRVInstruction(spv::OpCapability, MkNum(spv::CapabilityInt8)));
alan-bakerb39c8262019-03-08 14:03:37 -05003587 } else if (Ty->isIntegerTy(16)) {
David Neto22f144c2017-06-12 14:26:21 -04003588 // Generate OpCapability for i16 type.
SJW69939d52020-04-16 07:29:07 -05003589 SPIRVCapabilities.push_back(
David Netoef5ba2b2019-12-20 08:35:54 -05003590 new SPIRVInstruction(spv::OpCapability, MkNum(spv::CapabilityInt16)));
David Neto22f144c2017-06-12 14:26:21 -04003591 } else if (Ty->isIntegerTy(64)) {
3592 // Generate OpCapability for i64 type.
SJW69939d52020-04-16 07:29:07 -05003593 SPIRVCapabilities.push_back(
David Netoef5ba2b2019-12-20 08:35:54 -05003594 new SPIRVInstruction(spv::OpCapability, MkNum(spv::CapabilityInt64)));
David Neto22f144c2017-06-12 14:26:21 -04003595 } else if (Ty->isHalfTy()) {
3596 // Generate OpCapability for half type.
SJW69939d52020-04-16 07:29:07 -05003597 SPIRVCapabilities.push_back(new SPIRVInstruction(
3598 spv::OpCapability, MkNum(spv::CapabilityFloat16)));
David Neto22f144c2017-06-12 14:26:21 -04003599 } else if (Ty->isDoubleTy()) {
3600 // Generate OpCapability for double type.
SJW69939d52020-04-16 07:29:07 -05003601 SPIRVCapabilities.push_back(new SPIRVInstruction(
3602 spv::OpCapability, MkNum(spv::CapabilityFloat64)));
David Neto22f144c2017-06-12 14:26:21 -04003603 } else if (auto *STy = dyn_cast<StructType>(Ty)) {
3604 if (STy->isOpaque()) {
alan-bakerf906d2b2019-12-10 11:26:23 -05003605 if (STy->getName().startswith("opencl.image1d_wo_t") ||
alan-baker7150a1d2020-02-25 08:31:06 -05003606 STy->getName().startswith("opencl.image1d_array_wo_t") ||
alan-bakerf906d2b2019-12-10 11:26:23 -05003607 STy->getName().startswith("opencl.image2d_wo_t") ||
alan-baker7150a1d2020-02-25 08:31:06 -05003608 STy->getName().startswith("opencl.image2d_array_wo_t") ||
alan-bakerf67468c2019-11-25 15:51:49 -05003609 STy->getName().startswith("opencl.image3d_wo_t")) {
alan-bakerf906d2b2019-12-10 11:26:23 -05003610 write_without_format = true;
3611 }
3612 if (STy->getName().startswith("opencl.image1d_ro_t") ||
alan-baker7150a1d2020-02-25 08:31:06 -05003613 STy->getName().startswith("opencl.image1d_wo_t") ||
3614 STy->getName().startswith("opencl.image1d_array_ro_t") ||
3615 STy->getName().startswith("opencl.image1d_array_wo_t")) {
alan-bakerf906d2b2019-12-10 11:26:23 -05003616 if (STy->getName().contains(".sampled"))
3617 sampled_1d = true;
3618 else
3619 image_1d = true;
David Neto22f144c2017-06-12 14:26:21 -04003620 }
3621 }
3622 }
3623 }
3624
alan-bakerf906d2b2019-12-10 11:26:23 -05003625 if (write_without_format) {
3626 // Generate OpCapability for write only image type.
SJW69939d52020-04-16 07:29:07 -05003627 SPIRVCapabilities.push_back(new SPIRVInstruction(
3628 spv::OpCapability,
3629 {MkNum(spv::CapabilityStorageImageWriteWithoutFormat)}));
alan-bakerf906d2b2019-12-10 11:26:23 -05003630 }
3631 if (image_1d) {
3632 // Generate OpCapability for unsampled 1D image type.
SJW69939d52020-04-16 07:29:07 -05003633 SPIRVCapabilities.push_back(new SPIRVInstruction(
3634 spv::OpCapability, {MkNum(spv::CapabilityImage1D)}));
alan-bakerf906d2b2019-12-10 11:26:23 -05003635 } else if (sampled_1d) {
3636 // Generate OpCapability for sampled 1D image type.
SJW69939d52020-04-16 07:29:07 -05003637 SPIRVCapabilities.push_back(new SPIRVInstruction(
3638 spv::OpCapability, {MkNum(spv::CapabilitySampled1D)}));
alan-bakerf906d2b2019-12-10 11:26:23 -05003639 }
3640
David Neto5c22a252018-03-15 16:07:41 -04003641 { // OpCapability ImageQuery
3642 bool hasImageQuery = false;
SJW77b87ad2020-04-21 14:37:52 -05003643 for (const auto &SymVal : module->getValueSymbolTable()) {
alan-bakerf67468c2019-11-25 15:51:49 -05003644 if (auto F = dyn_cast<Function>(SymVal.getValue())) {
SJW173c7e92020-03-16 08:44:47 -05003645 if (IsImageQuery(F)) {
alan-bakerf67468c2019-11-25 15:51:49 -05003646 hasImageQuery = true;
3647 break;
3648 }
David Neto5c22a252018-03-15 16:07:41 -04003649 }
3650 }
alan-bakerf67468c2019-11-25 15:51:49 -05003651
David Neto5c22a252018-03-15 16:07:41 -04003652 if (hasImageQuery) {
SJW69939d52020-04-16 07:29:07 -05003653 SPIRVCapabilities.push_back(new SPIRVInstruction(
3654 spv::OpCapability, {MkNum(spv::CapabilityImageQuery)}));
David Neto5c22a252018-03-15 16:07:41 -04003655 }
3656 }
3657
David Neto22f144c2017-06-12 14:26:21 -04003658 if (hasVariablePointers()) {
3659 //
David Neto22f144c2017-06-12 14:26:21 -04003660 // Generate OpCapability.
3661 //
3662 // Ops[0] = Capability
3663 //
3664 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003665 Ops << MkNum(spv::CapabilityVariablePointers);
David Neto22f144c2017-06-12 14:26:21 -04003666
SJW69939d52020-04-16 07:29:07 -05003667 SPIRVCapabilities.push_back(new SPIRVInstruction(spv::OpCapability, Ops));
alan-baker5b86ed72019-02-15 08:26:50 -05003668 } else if (hasVariablePointersStorageBuffer()) {
3669 //
3670 // Generate OpCapability.
3671 //
3672 // Ops[0] = Capability
3673 //
3674 Ops.clear();
3675 Ops << MkNum(spv::CapabilityVariablePointersStorageBuffer);
David Neto22f144c2017-06-12 14:26:21 -04003676
SJW69939d52020-04-16 07:29:07 -05003677 SPIRVCapabilities.push_back(new SPIRVInstruction(spv::OpCapability, Ops));
alan-baker5b86ed72019-02-15 08:26:50 -05003678 }
3679
SJW69939d52020-04-16 07:29:07 -05003680 SPIRVInstructionList &SPIRVExtensions = getSPIRVInstList(kExtensions);
alan-baker5b86ed72019-02-15 08:26:50 -05003681 // Always add the storage buffer extension
3682 {
David Neto22f144c2017-06-12 14:26:21 -04003683 //
3684 // Generate OpExtension.
3685 //
3686 // Ops[0] = Name (Literal String)
3687 //
alan-baker5b86ed72019-02-15 08:26:50 -05003688 auto *ExtensionInst = new SPIRVInstruction(
3689 spv::OpExtension, {MkString("SPV_KHR_storage_buffer_storage_class")});
SJW69939d52020-04-16 07:29:07 -05003690 SPIRVExtensions.push_back(ExtensionInst);
alan-baker5b86ed72019-02-15 08:26:50 -05003691 }
David Neto22f144c2017-06-12 14:26:21 -04003692
alan-baker5b86ed72019-02-15 08:26:50 -05003693 if (hasVariablePointers() || hasVariablePointersStorageBuffer()) {
3694 //
3695 // Generate OpExtension.
3696 //
3697 // Ops[0] = Name (Literal String)
3698 //
3699 auto *ExtensionInst = new SPIRVInstruction(
3700 spv::OpExtension, {MkString("SPV_KHR_variable_pointers")});
SJW69939d52020-04-16 07:29:07 -05003701 SPIRVExtensions.push_back(ExtensionInst);
David Neto22f144c2017-06-12 14:26:21 -04003702 }
3703
3704 //
3705 // Generate OpMemoryModel
3706 //
3707 // Memory model for Vulkan will always be GLSL450.
3708
3709 // Ops[0] = Addressing Model
3710 // Ops[1] = Memory Model
3711 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003712 Ops << MkNum(spv::AddressingModelLogical) << MkNum(spv::MemoryModelGLSL450);
David Neto22f144c2017-06-12 14:26:21 -04003713
David Neto87846742018-04-11 17:36:22 -04003714 auto *MemModelInst = new SPIRVInstruction(spv::OpMemoryModel, Ops);
SJW69939d52020-04-16 07:29:07 -05003715 getSPIRVInstList(kMemoryModel).push_back(MemModelInst);
David Neto22f144c2017-06-12 14:26:21 -04003716
SJW69939d52020-04-16 07:29:07 -05003717 SPIRVInstructionList &SPIRVEntryPoints = getSPIRVInstList(kEntryPoints);
David Neto22f144c2017-06-12 14:26:21 -04003718 //
3719 // Generate OpEntryPoint
3720 //
3721 for (auto EntryPoint : EntryPoints) {
3722 // Ops[0] = Execution Model
3723 // Ops[1] = EntryPoint ID
3724 // Ops[2] = Name (Literal String)
3725 // ...
3726 //
3727 // TODO: Do we need to consider Interface ID for forward references???
3728 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05003729 const StringRef &name = EntryPoint.first->getName();
David Neto257c3892018-04-11 13:19:45 -04003730 Ops << MkNum(spv::ExecutionModelGLCompute) << MkId(EntryPoint.second)
3731 << MkString(name);
David Neto22f144c2017-06-12 14:26:21 -04003732
David Neto22f144c2017-06-12 14:26:21 -04003733 for (Value *Interface : EntryPointInterfaces) {
David Neto257c3892018-04-11 13:19:45 -04003734 Ops << MkId(VMap[Interface]);
David Neto22f144c2017-06-12 14:26:21 -04003735 }
3736
David Neto87846742018-04-11 17:36:22 -04003737 auto *EntryPointInst = new SPIRVInstruction(spv::OpEntryPoint, Ops);
SJW69939d52020-04-16 07:29:07 -05003738 SPIRVEntryPoints.push_back(EntryPointInst);
David Neto22f144c2017-06-12 14:26:21 -04003739 }
3740
SJW69939d52020-04-16 07:29:07 -05003741 SPIRVInstructionList &SPIRVExecutionModes = getSPIRVInstList(kExecutionModes);
David Neto22f144c2017-06-12 14:26:21 -04003742 for (auto EntryPoint : EntryPoints) {
Kévin Petit21c23c62020-04-29 01:38:28 +01003743 const MDNode *MD = dyn_cast<Function>(EntryPoint.first)
3744 ->getMetadata("reqd_work_group_size");
3745 if ((MD != nullptr) && !clspv::Option::NonUniformNDRangeSupported()) {
David Neto22f144c2017-06-12 14:26:21 -04003746
3747 if (!BuiltinDimVec.empty()) {
3748 llvm_unreachable(
3749 "Kernels should have consistent work group size definition");
3750 }
3751
3752 //
3753 // Generate OpExecutionMode
3754 //
3755
3756 // Ops[0] = Entry Point ID
3757 // Ops[1] = Execution Mode
3758 // Ops[2] ... Ops[n] = Optional literals according to Execution Mode
3759 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05003760 Ops << MkId(EntryPoint.second) << MkNum(spv::ExecutionModeLocalSize);
David Neto22f144c2017-06-12 14:26:21 -04003761
3762 uint32_t XDim = static_cast<uint32_t>(
3763 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
3764 uint32_t YDim = static_cast<uint32_t>(
3765 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
3766 uint32_t ZDim = static_cast<uint32_t>(
3767 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
3768
David Neto257c3892018-04-11 13:19:45 -04003769 Ops << MkNum(XDim) << MkNum(YDim) << MkNum(ZDim);
David Neto22f144c2017-06-12 14:26:21 -04003770
David Neto87846742018-04-11 17:36:22 -04003771 auto *ExecModeInst = new SPIRVInstruction(spv::OpExecutionMode, Ops);
SJW69939d52020-04-16 07:29:07 -05003772 SPIRVExecutionModes.push_back(ExecModeInst);
David Neto22f144c2017-06-12 14:26:21 -04003773 }
3774 }
3775
3776 //
3777 // Generate OpSource.
3778 //
3779 // Ops[0] = SourceLanguage ID
3780 // Ops[1] = Version (LiteralNum)
3781 //
3782 Ops.clear();
Kévin Petitf0515712020-01-07 18:29:20 +00003783 switch (clspv::Option::Language()) {
3784 case clspv::Option::SourceLanguage::OpenCL_C_10:
3785 Ops << MkNum(spv::SourceLanguageOpenCL_C) << MkNum(100);
3786 break;
3787 case clspv::Option::SourceLanguage::OpenCL_C_11:
3788 Ops << MkNum(spv::SourceLanguageOpenCL_C) << MkNum(110);
3789 break;
3790 case clspv::Option::SourceLanguage::OpenCL_C_12:
Kévin Petit0fc88042019-04-09 23:25:02 +01003791 Ops << MkNum(spv::SourceLanguageOpenCL_C) << MkNum(120);
Kévin Petitf0515712020-01-07 18:29:20 +00003792 break;
3793 case clspv::Option::SourceLanguage::OpenCL_C_20:
3794 Ops << MkNum(spv::SourceLanguageOpenCL_C) << MkNum(200);
3795 break;
3796 case clspv::Option::SourceLanguage::OpenCL_CPP:
3797 Ops << MkNum(spv::SourceLanguageOpenCL_CPP) << MkNum(100);
3798 break;
3799 default:
3800 Ops << MkNum(spv::SourceLanguageUnknown) << MkNum(0);
3801 break;
Kévin Petit0fc88042019-04-09 23:25:02 +01003802 }
David Neto22f144c2017-06-12 14:26:21 -04003803
David Neto87846742018-04-11 17:36:22 -04003804 auto *OpenSourceInst = new SPIRVInstruction(spv::OpSource, Ops);
SJW69939d52020-04-16 07:29:07 -05003805 getSPIRVInstList(kDebug).push_back(OpenSourceInst);
David Neto22f144c2017-06-12 14:26:21 -04003806
3807 if (!BuiltinDimVec.empty()) {
SJW69939d52020-04-16 07:29:07 -05003808 SPIRVInstructionList &SPIRVAnnotations = getSPIRVInstList(kAnnotations);
David Neto22f144c2017-06-12 14:26:21 -04003809 //
3810 // Generate OpDecorates for x/y/z dimension.
3811 //
3812 // Ops[0] = Target ID
3813 // Ops[1] = Decoration (SpecId)
David Neto257c3892018-04-11 13:19:45 -04003814 // Ops[2] = Specialization Constant ID (Literal Number)
David Neto22f144c2017-06-12 14:26:21 -04003815
3816 // X Dimension
3817 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003818 Ops << MkId(BuiltinDimVec[0]) << MkNum(spv::DecorationSpecId) << MkNum(0);
SJW69939d52020-04-16 07:29:07 -05003819 SPIRVAnnotations.push_back(new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003820
3821 // Y Dimension
3822 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003823 Ops << MkId(BuiltinDimVec[1]) << MkNum(spv::DecorationSpecId) << MkNum(1);
SJW69939d52020-04-16 07:29:07 -05003824 SPIRVAnnotations.push_back(new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003825
3826 // Z Dimension
3827 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003828 Ops << MkId(BuiltinDimVec[2]) << MkNum(spv::DecorationSpecId) << MkNum(2);
SJW69939d52020-04-16 07:29:07 -05003829 SPIRVAnnotations.push_back(new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003830 }
3831}
3832
David Netob6e2e062018-04-25 10:32:06 -04003833void SPIRVProducerPass::GenerateEntryPointInitialStores() {
3834 // Work around a driver bug. Initializers on Private variables might not
3835 // work. So the start of the kernel should store the initializer value to the
3836 // variables. Yes, *every* entry point pays this cost if *any* entry point
3837 // uses this builtin. At this point I judge this to be an acceptable tradeoff
3838 // of complexity vs. runtime, for a broken driver.
alan-bakerb6b09dc2018-11-08 16:59:28 -05003839 // TODO(dneto): Remove this at some point once fixed drivers are widely
3840 // available.
David Netob6e2e062018-04-25 10:32:06 -04003841 if (WorkgroupSizeVarID) {
3842 assert(WorkgroupSizeValueID);
3843
3844 SPIRVOperandList Ops;
3845 Ops << MkId(WorkgroupSizeVarID) << MkId(WorkgroupSizeValueID);
3846
3847 auto *Inst = new SPIRVInstruction(spv::OpStore, Ops);
SJW69939d52020-04-16 07:29:07 -05003848 getSPIRVInstList(kFunctions).push_back(Inst);
David Netob6e2e062018-04-25 10:32:06 -04003849 }
3850}
3851
David Neto22f144c2017-06-12 14:26:21 -04003852void SPIRVProducerPass::GenerateFuncBody(Function &F) {
SJW69939d52020-04-16 07:29:07 -05003853 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList(kFunctions);
David Neto22f144c2017-06-12 14:26:21 -04003854 ValueMapType &VMap = getValueMap();
3855
David Netob6e2e062018-04-25 10:32:06 -04003856 const bool IsKernel = F.getCallingConv() == CallingConv::SPIR_KERNEL;
David Neto22f144c2017-06-12 14:26:21 -04003857
3858 for (BasicBlock &BB : F) {
3859 // Register BasicBlock to ValueMap.
3860 VMap[&BB] = nextID;
3861
3862 //
3863 // Generate OpLabel for Basic Block.
3864 //
3865 SPIRVOperandList Ops;
David Neto87846742018-04-11 17:36:22 -04003866 auto *Inst = new SPIRVInstruction(spv::OpLabel, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003867 SPIRVInstList.push_back(Inst);
3868
David Neto6dcd4712017-06-23 11:06:47 -04003869 // OpVariable instructions must come first.
3870 for (Instruction &I : BB) {
alan-baker5b86ed72019-02-15 08:26:50 -05003871 if (auto *alloca = dyn_cast<AllocaInst>(&I)) {
3872 // Allocating a pointer requires variable pointers.
3873 if (alloca->getAllocatedType()->isPointerTy()) {
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003874 setVariablePointersCapabilities(
3875 alloca->getAllocatedType()->getPointerAddressSpace());
alan-baker5b86ed72019-02-15 08:26:50 -05003876 }
David Neto6dcd4712017-06-23 11:06:47 -04003877 GenerateInstruction(I);
3878 }
3879 }
3880
David Neto22f144c2017-06-12 14:26:21 -04003881 if (&BB == &F.getEntryBlock() && IsKernel) {
David Netob6e2e062018-04-25 10:32:06 -04003882 if (clspv::Option::HackInitializers()) {
3883 GenerateEntryPointInitialStores();
3884 }
David Neto22f144c2017-06-12 14:26:21 -04003885 }
3886
3887 for (Instruction &I : BB) {
David Neto6dcd4712017-06-23 11:06:47 -04003888 if (!isa<AllocaInst>(I)) {
3889 GenerateInstruction(I);
3890 }
David Neto22f144c2017-06-12 14:26:21 -04003891 }
3892 }
3893}
3894
3895spv::Op SPIRVProducerPass::GetSPIRVCmpOpcode(CmpInst *I) {
3896 const std::map<CmpInst::Predicate, spv::Op> Map = {
3897 {CmpInst::ICMP_EQ, spv::OpIEqual},
3898 {CmpInst::ICMP_NE, spv::OpINotEqual},
3899 {CmpInst::ICMP_UGT, spv::OpUGreaterThan},
3900 {CmpInst::ICMP_UGE, spv::OpUGreaterThanEqual},
3901 {CmpInst::ICMP_ULT, spv::OpULessThan},
3902 {CmpInst::ICMP_ULE, spv::OpULessThanEqual},
3903 {CmpInst::ICMP_SGT, spv::OpSGreaterThan},
3904 {CmpInst::ICMP_SGE, spv::OpSGreaterThanEqual},
3905 {CmpInst::ICMP_SLT, spv::OpSLessThan},
3906 {CmpInst::ICMP_SLE, spv::OpSLessThanEqual},
3907 {CmpInst::FCMP_OEQ, spv::OpFOrdEqual},
3908 {CmpInst::FCMP_OGT, spv::OpFOrdGreaterThan},
3909 {CmpInst::FCMP_OGE, spv::OpFOrdGreaterThanEqual},
3910 {CmpInst::FCMP_OLT, spv::OpFOrdLessThan},
3911 {CmpInst::FCMP_OLE, spv::OpFOrdLessThanEqual},
3912 {CmpInst::FCMP_ONE, spv::OpFOrdNotEqual},
3913 {CmpInst::FCMP_UEQ, spv::OpFUnordEqual},
3914 {CmpInst::FCMP_UGT, spv::OpFUnordGreaterThan},
3915 {CmpInst::FCMP_UGE, spv::OpFUnordGreaterThanEqual},
3916 {CmpInst::FCMP_ULT, spv::OpFUnordLessThan},
3917 {CmpInst::FCMP_ULE, spv::OpFUnordLessThanEqual},
3918 {CmpInst::FCMP_UNE, spv::OpFUnordNotEqual}};
3919
3920 assert(0 != Map.count(I->getPredicate()));
3921
3922 return Map.at(I->getPredicate());
3923}
3924
3925spv::Op SPIRVProducerPass::GetSPIRVCastOpcode(Instruction &I) {
3926 const std::map<unsigned, spv::Op> Map{
3927 {Instruction::Trunc, spv::OpUConvert},
3928 {Instruction::ZExt, spv::OpUConvert},
3929 {Instruction::SExt, spv::OpSConvert},
3930 {Instruction::FPToUI, spv::OpConvertFToU},
3931 {Instruction::FPToSI, spv::OpConvertFToS},
3932 {Instruction::UIToFP, spv::OpConvertUToF},
3933 {Instruction::SIToFP, spv::OpConvertSToF},
3934 {Instruction::FPTrunc, spv::OpFConvert},
3935 {Instruction::FPExt, spv::OpFConvert},
3936 {Instruction::BitCast, spv::OpBitcast}};
3937
3938 assert(0 != Map.count(I.getOpcode()));
3939
3940 return Map.at(I.getOpcode());
3941}
3942
3943spv::Op SPIRVProducerPass::GetSPIRVBinaryOpcode(Instruction &I) {
Kévin Petit24272b62018-10-18 19:16:12 +00003944 if (I.getType()->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -04003945 switch (I.getOpcode()) {
3946 default:
3947 break;
3948 case Instruction::Or:
3949 return spv::OpLogicalOr;
3950 case Instruction::And:
3951 return spv::OpLogicalAnd;
3952 case Instruction::Xor:
3953 return spv::OpLogicalNotEqual;
3954 }
3955 }
3956
alan-bakerb6b09dc2018-11-08 16:59:28 -05003957 const std::map<unsigned, spv::Op> Map{
David Neto22f144c2017-06-12 14:26:21 -04003958 {Instruction::Add, spv::OpIAdd},
3959 {Instruction::FAdd, spv::OpFAdd},
3960 {Instruction::Sub, spv::OpISub},
3961 {Instruction::FSub, spv::OpFSub},
3962 {Instruction::Mul, spv::OpIMul},
3963 {Instruction::FMul, spv::OpFMul},
3964 {Instruction::UDiv, spv::OpUDiv},
3965 {Instruction::SDiv, spv::OpSDiv},
3966 {Instruction::FDiv, spv::OpFDiv},
3967 {Instruction::URem, spv::OpUMod},
3968 {Instruction::SRem, spv::OpSRem},
3969 {Instruction::FRem, spv::OpFRem},
3970 {Instruction::Or, spv::OpBitwiseOr},
3971 {Instruction::Xor, spv::OpBitwiseXor},
3972 {Instruction::And, spv::OpBitwiseAnd},
3973 {Instruction::Shl, spv::OpShiftLeftLogical},
3974 {Instruction::LShr, spv::OpShiftRightLogical},
3975 {Instruction::AShr, spv::OpShiftRightArithmetic}};
3976
3977 assert(0 != Map.count(I.getOpcode()));
3978
3979 return Map.at(I.getOpcode());
3980}
3981
3982void SPIRVProducerPass::GenerateInstruction(Instruction &I) {
SJW69939d52020-04-16 07:29:07 -05003983 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList(kFunctions);
David Neto22f144c2017-06-12 14:26:21 -04003984 ValueMapType &VMap = getValueMap();
David Neto22f144c2017-06-12 14:26:21 -04003985 DeferredInstVecType &DeferredInsts = getDeferredInstVec();
3986 LLVMContext &Context = I.getParent()->getParent()->getParent()->getContext();
3987
3988 // Register Instruction to ValueMap.
3989 if (0 == VMap[&I]) {
3990 VMap[&I] = nextID;
3991 }
3992
3993 switch (I.getOpcode()) {
3994 default: {
3995 if (Instruction::isCast(I.getOpcode())) {
3996 //
3997 // Generate SPIRV instructions for cast operators.
3998 //
3999
David Netod2de94a2017-08-28 17:27:47 -04004000 auto Ty = I.getType();
David Neto22f144c2017-06-12 14:26:21 -04004001 auto OpTy = I.getOperand(0)->getType();
David Netod2de94a2017-08-28 17:27:47 -04004002 auto toI8 = Ty == Type::getInt8Ty(Context);
4003 auto fromI32 = OpTy == Type::getInt32Ty(Context);
David Neto22f144c2017-06-12 14:26:21 -04004004 // Handle zext, sext and uitofp with i1 type specially.
4005 if ((I.getOpcode() == Instruction::ZExt ||
4006 I.getOpcode() == Instruction::SExt ||
4007 I.getOpcode() == Instruction::UIToFP) &&
alan-bakerb6b09dc2018-11-08 16:59:28 -05004008 OpTy->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -04004009 //
4010 // Generate OpSelect.
4011 //
4012
4013 // Ops[0] = Result Type ID
4014 // Ops[1] = Condition ID
4015 // Ops[2] = True Constant ID
4016 // Ops[3] = False Constant ID
4017 SPIRVOperandList Ops;
4018
David Neto257c3892018-04-11 13:19:45 -04004019 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04004020
David Neto22f144c2017-06-12 14:26:21 -04004021 uint32_t CondID = VMap[I.getOperand(0)];
David Neto257c3892018-04-11 13:19:45 -04004022 Ops << MkId(CondID);
David Neto22f144c2017-06-12 14:26:21 -04004023
4024 uint32_t TrueID = 0;
4025 if (I.getOpcode() == Instruction::ZExt) {
Kévin Petit7bfb8992019-02-26 13:45:08 +00004026 TrueID = VMap[ConstantInt::get(I.getType(), 1)];
David Neto22f144c2017-06-12 14:26:21 -04004027 } else if (I.getOpcode() == Instruction::SExt) {
Kévin Petit7bfb8992019-02-26 13:45:08 +00004028 TrueID = VMap[ConstantInt::getSigned(I.getType(), -1)];
David Neto22f144c2017-06-12 14:26:21 -04004029 } else {
4030 TrueID = VMap[ConstantFP::get(Context, APFloat(1.0f))];
4031 }
David Neto257c3892018-04-11 13:19:45 -04004032 Ops << MkId(TrueID);
David Neto22f144c2017-06-12 14:26:21 -04004033
4034 uint32_t FalseID = 0;
4035 if (I.getOpcode() == Instruction::ZExt) {
4036 FalseID = VMap[Constant::getNullValue(I.getType())];
4037 } else if (I.getOpcode() == Instruction::SExt) {
4038 FalseID = VMap[Constant::getNullValue(I.getType())];
4039 } else {
4040 FalseID = VMap[ConstantFP::get(Context, APFloat(0.0f))];
4041 }
David Neto257c3892018-04-11 13:19:45 -04004042 Ops << MkId(FalseID);
David Neto22f144c2017-06-12 14:26:21 -04004043
David Neto87846742018-04-11 17:36:22 -04004044 auto *Inst = new SPIRVInstruction(spv::OpSelect, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004045 SPIRVInstList.push_back(Inst);
alan-bakerb39c8262019-03-08 14:03:37 -05004046 } else if (!clspv::Option::Int8Support() &&
4047 I.getOpcode() == Instruction::Trunc && fromI32 && toI8) {
David Netod2de94a2017-08-28 17:27:47 -04004048 // The SPIR-V target type is a 32-bit int. Keep only the bottom
4049 // 8 bits.
4050 // Before:
4051 // %result = trunc i32 %a to i8
4052 // After
4053 // %result = OpBitwiseAnd %uint %a %uint_255
4054
4055 SPIRVOperandList Ops;
4056
David Neto257c3892018-04-11 13:19:45 -04004057 Ops << MkId(lookupType(OpTy)) << MkId(VMap[I.getOperand(0)]);
David Netod2de94a2017-08-28 17:27:47 -04004058
4059 Type *UintTy = Type::getInt32Ty(Context);
4060 uint32_t MaskID = VMap[ConstantInt::get(UintTy, 255)];
David Neto257c3892018-04-11 13:19:45 -04004061 Ops << MkId(MaskID);
David Netod2de94a2017-08-28 17:27:47 -04004062
David Neto87846742018-04-11 17:36:22 -04004063 auto *Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Netod2de94a2017-08-28 17:27:47 -04004064 SPIRVInstList.push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04004065 } else {
4066 // Ops[0] = Result Type ID
4067 // Ops[1] = Source Value ID
4068 SPIRVOperandList Ops;
4069
David Neto257c3892018-04-11 13:19:45 -04004070 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04004071
David Neto87846742018-04-11 17:36:22 -04004072 auto *Inst = new SPIRVInstruction(GetSPIRVCastOpcode(I), nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004073 SPIRVInstList.push_back(Inst);
4074 }
4075 } else if (isa<BinaryOperator>(I)) {
4076 //
4077 // Generate SPIRV instructions for binary operators.
4078 //
4079
4080 // Handle xor with i1 type specially.
4081 if (I.getOpcode() == Instruction::Xor &&
4082 I.getType() == Type::getInt1Ty(Context) &&
Kévin Petit24272b62018-10-18 19:16:12 +00004083 ((isa<ConstantInt>(I.getOperand(0)) &&
4084 !cast<ConstantInt>(I.getOperand(0))->isZero()) ||
4085 (isa<ConstantInt>(I.getOperand(1)) &&
4086 !cast<ConstantInt>(I.getOperand(1))->isZero()))) {
David Neto22f144c2017-06-12 14:26:21 -04004087 //
4088 // Generate OpLogicalNot.
4089 //
4090 // Ops[0] = Result Type ID
4091 // Ops[1] = Operand
4092 SPIRVOperandList Ops;
4093
David Neto257c3892018-04-11 13:19:45 -04004094 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04004095
4096 Value *CondV = I.getOperand(0);
4097 if (isa<Constant>(I.getOperand(0))) {
4098 CondV = I.getOperand(1);
4099 }
David Neto257c3892018-04-11 13:19:45 -04004100 Ops << MkId(VMap[CondV]);
David Neto22f144c2017-06-12 14:26:21 -04004101
David Neto87846742018-04-11 17:36:22 -04004102 auto *Inst = new SPIRVInstruction(spv::OpLogicalNot, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004103 SPIRVInstList.push_back(Inst);
4104 } else {
4105 // Ops[0] = Result Type ID
4106 // Ops[1] = Operand 0
4107 // Ops[2] = Operand 1
4108 SPIRVOperandList Ops;
4109
David Neto257c3892018-04-11 13:19:45 -04004110 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)])
4111 << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004112
David Neto87846742018-04-11 17:36:22 -04004113 auto *Inst =
4114 new SPIRVInstruction(GetSPIRVBinaryOpcode(I), nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004115 SPIRVInstList.push_back(Inst);
4116 }
alan-bakerc9c55ae2019-12-02 16:01:27 -05004117 } else if (I.getOpcode() == Instruction::FNeg) {
4118 // The only unary operator.
4119 //
4120 // Ops[0] = Result Type ID
4121 // Ops[1] = Operand 0
4122 SPIRVOperandList ops;
4123
4124 ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)]);
4125 auto *Inst = new SPIRVInstruction(spv::OpFNegate, nextID++, ops);
4126 SPIRVInstList.push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04004127 } else {
4128 I.print(errs());
4129 llvm_unreachable("Unsupported instruction???");
4130 }
4131 break;
4132 }
4133 case Instruction::GetElementPtr: {
4134 auto &GlobalConstArgSet = getGlobalConstArgSet();
4135
4136 //
4137 // Generate OpAccessChain.
4138 //
4139 GetElementPtrInst *GEP = cast<GetElementPtrInst>(&I);
4140
4141 //
4142 // Generate OpAccessChain.
4143 //
4144
4145 // Ops[0] = Result Type ID
4146 // Ops[1] = Base ID
4147 // Ops[2] ... Ops[n] = Indexes ID
4148 SPIRVOperandList Ops;
4149
alan-bakerb6b09dc2018-11-08 16:59:28 -05004150 PointerType *ResultType = cast<PointerType>(GEP->getType());
David Neto22f144c2017-06-12 14:26:21 -04004151 if (GEP->getPointerAddressSpace() == AddressSpace::ModuleScopePrivate ||
4152 GlobalConstArgSet.count(GEP->getPointerOperand())) {
4153 // Use pointer type with private address space for global constant.
4154 Type *EleTy = I.getType()->getPointerElementType();
David Neto1a1a0582017-07-07 12:01:44 -04004155 ResultType = PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
David Neto22f144c2017-06-12 14:26:21 -04004156 }
David Neto257c3892018-04-11 13:19:45 -04004157
4158 Ops << MkId(lookupType(ResultType));
David Neto22f144c2017-06-12 14:26:21 -04004159
David Neto862b7d82018-06-14 18:48:37 -04004160 // Generate the base pointer.
4161 Ops << MkId(VMap[GEP->getPointerOperand()]);
David Neto22f144c2017-06-12 14:26:21 -04004162
David Neto862b7d82018-06-14 18:48:37 -04004163 // TODO(dneto): Simplify the following?
David Neto22f144c2017-06-12 14:26:21 -04004164
4165 //
4166 // Follows below rules for gep.
4167 //
David Neto862b7d82018-06-14 18:48:37 -04004168 // 1. If gep's first index is 0 generate OpAccessChain and ignore gep's
4169 // first index.
David Neto22f144c2017-06-12 14:26:21 -04004170 // 2. If gep's first index is not 0, generate OpPtrAccessChain and use gep's
4171 // first index.
4172 // 3. If gep's first index is not constant, generate OpPtrAccessChain and
4173 // use gep's first index.
4174 // 4. If it is not above case 1, 2 and 3, generate OpAccessChain and use
4175 // gep's first index.
4176 //
4177 spv::Op Opcode = spv::OpAccessChain;
4178 unsigned offset = 0;
4179 if (ConstantInt *CstInt = dyn_cast<ConstantInt>(GEP->getOperand(1))) {
David Neto862b7d82018-06-14 18:48:37 -04004180 if (CstInt->getZExtValue() == 0) {
David Neto22f144c2017-06-12 14:26:21 -04004181 offset = 1;
David Neto862b7d82018-06-14 18:48:37 -04004182 } else if (CstInt->getZExtValue() != 0) {
David Neto22f144c2017-06-12 14:26:21 -04004183 Opcode = spv::OpPtrAccessChain;
David Neto22f144c2017-06-12 14:26:21 -04004184 }
David Neto862b7d82018-06-14 18:48:37 -04004185 } else {
David Neto22f144c2017-06-12 14:26:21 -04004186 Opcode = spv::OpPtrAccessChain;
David Neto1a1a0582017-07-07 12:01:44 -04004187 }
4188
4189 if (Opcode == spv::OpPtrAccessChain) {
David Neto1a1a0582017-07-07 12:01:44 -04004190 // Do we need to generate ArrayStride? Check against the GEP result type
4191 // rather than the pointer type of the base because when indexing into
4192 // an OpenCL program-scope constant, we'll swap out the LLVM base pointer
4193 // for something else in the SPIR-V.
4194 // E.g. see test/PointerAccessChain/pointer_index_is_constant_1.cl
alan-baker5b86ed72019-02-15 08:26:50 -05004195 auto address_space = ResultType->getAddressSpace();
4196 setVariablePointersCapabilities(address_space);
4197 switch (GetStorageClass(address_space)) {
Alan Bakerfcda9482018-10-02 17:09:59 -04004198 case spv::StorageClassStorageBuffer:
4199 case spv::StorageClassUniform:
David Neto1a1a0582017-07-07 12:01:44 -04004200 // Save the need to generate an ArrayStride decoration. But defer
4201 // generation until later, so we only make one decoration.
David Neto85082642018-03-24 06:55:20 -07004202 getTypesNeedingArrayStride().insert(ResultType);
Alan Bakerfcda9482018-10-02 17:09:59 -04004203 break;
4204 default:
4205 break;
David Neto1a1a0582017-07-07 12:01:44 -04004206 }
David Neto22f144c2017-06-12 14:26:21 -04004207 }
4208
4209 for (auto II = GEP->idx_begin() + offset; II != GEP->idx_end(); II++) {
David Neto257c3892018-04-11 13:19:45 -04004210 Ops << MkId(VMap[*II]);
David Neto22f144c2017-06-12 14:26:21 -04004211 }
4212
David Neto87846742018-04-11 17:36:22 -04004213 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004214 SPIRVInstList.push_back(Inst);
4215 break;
4216 }
4217 case Instruction::ExtractValue: {
4218 ExtractValueInst *EVI = cast<ExtractValueInst>(&I);
4219 // Ops[0] = Result Type ID
4220 // Ops[1] = Composite ID
4221 // Ops[2] ... Ops[n] = Indexes (Literal Number)
4222 SPIRVOperandList Ops;
4223
David Neto257c3892018-04-11 13:19:45 -04004224 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04004225
4226 uint32_t CompositeID = VMap[EVI->getAggregateOperand()];
David Neto257c3892018-04-11 13:19:45 -04004227 Ops << MkId(CompositeID);
David Neto22f144c2017-06-12 14:26:21 -04004228
4229 for (auto &Index : EVI->indices()) {
David Neto257c3892018-04-11 13:19:45 -04004230 Ops << MkNum(Index);
David Neto22f144c2017-06-12 14:26:21 -04004231 }
4232
David Neto87846742018-04-11 17:36:22 -04004233 auto *Inst = new SPIRVInstruction(spv::OpCompositeExtract, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004234 SPIRVInstList.push_back(Inst);
4235 break;
4236 }
4237 case Instruction::InsertValue: {
4238 InsertValueInst *IVI = cast<InsertValueInst>(&I);
4239 // Ops[0] = Result Type ID
4240 // Ops[1] = Object ID
4241 // Ops[2] = Composite ID
4242 // Ops[3] ... Ops[n] = Indexes (Literal Number)
4243 SPIRVOperandList Ops;
4244
4245 uint32_t ResTyID = lookupType(I.getType());
David Neto257c3892018-04-11 13:19:45 -04004246 Ops << MkId(ResTyID);
David Neto22f144c2017-06-12 14:26:21 -04004247
4248 uint32_t ObjectID = VMap[IVI->getInsertedValueOperand()];
David Neto257c3892018-04-11 13:19:45 -04004249 Ops << MkId(ObjectID);
David Neto22f144c2017-06-12 14:26:21 -04004250
4251 uint32_t CompositeID = VMap[IVI->getAggregateOperand()];
David Neto257c3892018-04-11 13:19:45 -04004252 Ops << MkId(CompositeID);
David Neto22f144c2017-06-12 14:26:21 -04004253
4254 for (auto &Index : IVI->indices()) {
David Neto257c3892018-04-11 13:19:45 -04004255 Ops << MkNum(Index);
David Neto22f144c2017-06-12 14:26:21 -04004256 }
4257
David Neto87846742018-04-11 17:36:22 -04004258 auto *Inst = new SPIRVInstruction(spv::OpCompositeInsert, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004259 SPIRVInstList.push_back(Inst);
4260 break;
4261 }
4262 case Instruction::Select: {
4263 //
4264 // Generate OpSelect.
4265 //
4266
4267 // Ops[0] = Result Type ID
4268 // Ops[1] = Condition ID
4269 // Ops[2] = True Constant ID
4270 // Ops[3] = False Constant ID
4271 SPIRVOperandList Ops;
4272
4273 // Find SPIRV instruction for parameter type.
4274 auto Ty = I.getType();
4275 if (Ty->isPointerTy()) {
4276 auto PointeeTy = Ty->getPointerElementType();
4277 if (PointeeTy->isStructTy() &&
4278 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
4279 Ty = PointeeTy;
alan-baker5b86ed72019-02-15 08:26:50 -05004280 } else {
4281 // Selecting between pointers requires variable pointers.
4282 setVariablePointersCapabilities(Ty->getPointerAddressSpace());
4283 if (!hasVariablePointers() && !selectFromSameObject(&I)) {
4284 setVariablePointers(true);
4285 }
David Neto22f144c2017-06-12 14:26:21 -04004286 }
4287 }
4288
David Neto257c3892018-04-11 13:19:45 -04004289 Ops << MkId(lookupType(Ty)) << MkId(VMap[I.getOperand(0)])
4290 << MkId(VMap[I.getOperand(1)]) << MkId(VMap[I.getOperand(2)]);
David Neto22f144c2017-06-12 14:26:21 -04004291
David Neto87846742018-04-11 17:36:22 -04004292 auto *Inst = new SPIRVInstruction(spv::OpSelect, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004293 SPIRVInstList.push_back(Inst);
4294 break;
4295 }
4296 case Instruction::ExtractElement: {
4297 // Handle <4 x i8> type manually.
4298 Type *CompositeTy = I.getOperand(0)->getType();
4299 if (is4xi8vec(CompositeTy)) {
4300 //
4301 // Generate OpShiftRightLogical and OpBitwiseAnd for extractelement with
4302 // <4 x i8>.
4303 //
4304
4305 //
4306 // Generate OpShiftRightLogical
4307 //
4308 // Ops[0] = Result Type ID
4309 // Ops[1] = Operand 0
4310 // Ops[2] = Operand 1
4311 //
4312 SPIRVOperandList Ops;
4313
David Neto257c3892018-04-11 13:19:45 -04004314 Ops << MkId(lookupType(CompositeTy));
David Neto22f144c2017-06-12 14:26:21 -04004315
4316 uint32_t Op0ID = VMap[I.getOperand(0)];
David Neto257c3892018-04-11 13:19:45 -04004317 Ops << MkId(Op0ID);
David Neto22f144c2017-06-12 14:26:21 -04004318
4319 uint32_t Op1ID = 0;
4320 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
4321 // Handle constant index.
4322 uint64_t Idx = CI->getZExtValue();
4323 Value *ShiftAmount =
4324 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
4325 Op1ID = VMap[ShiftAmount];
4326 } else {
4327 // Handle variable index.
4328 SPIRVOperandList TmpOps;
4329
David Neto257c3892018-04-11 13:19:45 -04004330 TmpOps << MkId(lookupType(Type::getInt32Ty(Context)))
4331 << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004332
4333 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
David Neto257c3892018-04-11 13:19:45 -04004334 TmpOps << MkId(VMap[Cst8]);
David Neto22f144c2017-06-12 14:26:21 -04004335
4336 Op1ID = nextID;
4337
David Neto87846742018-04-11 17:36:22 -04004338 auto *TmpInst = new SPIRVInstruction(spv::OpIMul, nextID++, TmpOps);
David Neto22f144c2017-06-12 14:26:21 -04004339 SPIRVInstList.push_back(TmpInst);
4340 }
David Neto257c3892018-04-11 13:19:45 -04004341 Ops << MkId(Op1ID);
David Neto22f144c2017-06-12 14:26:21 -04004342
4343 uint32_t ShiftID = nextID;
4344
David Neto87846742018-04-11 17:36:22 -04004345 auto *Inst =
4346 new SPIRVInstruction(spv::OpShiftRightLogical, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004347 SPIRVInstList.push_back(Inst);
4348
4349 //
4350 // Generate OpBitwiseAnd
4351 //
4352 // Ops[0] = Result Type ID
4353 // Ops[1] = Operand 0
4354 // Ops[2] = Operand 1
4355 //
4356 Ops.clear();
4357
David Neto257c3892018-04-11 13:19:45 -04004358 Ops << MkId(lookupType(CompositeTy)) << MkId(ShiftID);
David Neto22f144c2017-06-12 14:26:21 -04004359
4360 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
David Neto257c3892018-04-11 13:19:45 -04004361 Ops << MkId(VMap[CstFF]);
David Neto22f144c2017-06-12 14:26:21 -04004362
David Neto9b2d6252017-09-06 15:47:37 -04004363 // Reset mapping for this value to the result of the bitwise and.
4364 VMap[&I] = nextID;
4365
David Neto87846742018-04-11 17:36:22 -04004366 Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004367 SPIRVInstList.push_back(Inst);
4368 break;
4369 }
4370
4371 // Ops[0] = Result Type ID
4372 // Ops[1] = Composite ID
4373 // Ops[2] ... Ops[n] = Indexes (Literal Number)
4374 SPIRVOperandList Ops;
4375
David Neto257c3892018-04-11 13:19:45 -04004376 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04004377
4378 spv::Op Opcode = spv::OpCompositeExtract;
4379 if (const ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
David Neto257c3892018-04-11 13:19:45 -04004380 Ops << MkNum(static_cast<uint32_t>(CI->getZExtValue()));
David Neto22f144c2017-06-12 14:26:21 -04004381 } else {
David Neto257c3892018-04-11 13:19:45 -04004382 Ops << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004383 Opcode = spv::OpVectorExtractDynamic;
4384 }
4385
David Neto87846742018-04-11 17:36:22 -04004386 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004387 SPIRVInstList.push_back(Inst);
4388 break;
4389 }
4390 case Instruction::InsertElement: {
4391 // Handle <4 x i8> type manually.
4392 Type *CompositeTy = I.getOperand(0)->getType();
4393 if (is4xi8vec(CompositeTy)) {
4394 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
4395 uint32_t CstFFID = VMap[CstFF];
4396
4397 uint32_t ShiftAmountID = 0;
4398 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
4399 // Handle constant index.
4400 uint64_t Idx = CI->getZExtValue();
4401 Value *ShiftAmount =
4402 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
4403 ShiftAmountID = VMap[ShiftAmount];
4404 } else {
4405 // Handle variable index.
4406 SPIRVOperandList TmpOps;
4407
David Neto257c3892018-04-11 13:19:45 -04004408 TmpOps << MkId(lookupType(Type::getInt32Ty(Context)))
4409 << MkId(VMap[I.getOperand(2)]);
David Neto22f144c2017-06-12 14:26:21 -04004410
4411 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
David Neto257c3892018-04-11 13:19:45 -04004412 TmpOps << MkId(VMap[Cst8]);
David Neto22f144c2017-06-12 14:26:21 -04004413
4414 ShiftAmountID = nextID;
4415
David Neto87846742018-04-11 17:36:22 -04004416 auto *TmpInst = new SPIRVInstruction(spv::OpIMul, nextID++, TmpOps);
David Neto22f144c2017-06-12 14:26:21 -04004417 SPIRVInstList.push_back(TmpInst);
4418 }
4419
4420 //
4421 // Generate mask operations.
4422 //
4423
4424 // ShiftLeft mask according to index of insertelement.
4425 SPIRVOperandList Ops;
4426
David Neto257c3892018-04-11 13:19:45 -04004427 const uint32_t ResTyID = lookupType(CompositeTy);
4428 Ops << MkId(ResTyID) << MkId(CstFFID) << MkId(ShiftAmountID);
David Neto22f144c2017-06-12 14:26:21 -04004429
4430 uint32_t MaskID = nextID;
4431
David Neto87846742018-04-11 17:36:22 -04004432 auto *Inst = new SPIRVInstruction(spv::OpShiftLeftLogical, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004433 SPIRVInstList.push_back(Inst);
4434
4435 // Inverse mask.
4436 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004437 Ops << MkId(ResTyID) << MkId(MaskID);
David Neto22f144c2017-06-12 14:26:21 -04004438
4439 uint32_t InvMaskID = nextID;
4440
David Neto87846742018-04-11 17:36:22 -04004441 Inst = new SPIRVInstruction(spv::OpNot, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004442 SPIRVInstList.push_back(Inst);
4443
4444 // Apply mask.
4445 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004446 Ops << MkId(ResTyID) << MkId(VMap[I.getOperand(0)]) << MkId(InvMaskID);
David Neto22f144c2017-06-12 14:26:21 -04004447
4448 uint32_t OrgValID = nextID;
4449
David Neto87846742018-04-11 17:36:22 -04004450 Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004451 SPIRVInstList.push_back(Inst);
4452
4453 // Create correct value according to index of insertelement.
4454 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05004455 Ops << MkId(ResTyID) << MkId(VMap[I.getOperand(1)])
4456 << MkId(ShiftAmountID);
David Neto22f144c2017-06-12 14:26:21 -04004457
4458 uint32_t InsertValID = nextID;
4459
David Neto87846742018-04-11 17:36:22 -04004460 Inst = new SPIRVInstruction(spv::OpShiftLeftLogical, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004461 SPIRVInstList.push_back(Inst);
4462
4463 // Insert value to original value.
4464 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004465 Ops << MkId(ResTyID) << MkId(OrgValID) << MkId(InsertValID);
David Neto22f144c2017-06-12 14:26:21 -04004466
David Netoa394f392017-08-26 20:45:29 -04004467 VMap[&I] = nextID;
4468
David Neto87846742018-04-11 17:36:22 -04004469 Inst = new SPIRVInstruction(spv::OpBitwiseOr, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004470 SPIRVInstList.push_back(Inst);
4471
4472 break;
4473 }
4474
David Neto22f144c2017-06-12 14:26:21 -04004475 SPIRVOperandList Ops;
4476
James Priced26efea2018-06-09 23:28:32 +01004477 // Ops[0] = Result Type ID
4478 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04004479
4480 spv::Op Opcode = spv::OpCompositeInsert;
4481 if (const ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
David Neto257c3892018-04-11 13:19:45 -04004482 const auto value = CI->getZExtValue();
4483 assert(value <= UINT32_MAX);
James Priced26efea2018-06-09 23:28:32 +01004484 // Ops[1] = Object ID
4485 // Ops[2] = Composite ID
4486 // Ops[3] ... Ops[n] = Indexes (Literal Number)
alan-bakerb6b09dc2018-11-08 16:59:28 -05004487 Ops << MkId(VMap[I.getOperand(1)]) << MkId(VMap[I.getOperand(0)])
James Priced26efea2018-06-09 23:28:32 +01004488 << MkNum(static_cast<uint32_t>(value));
David Neto22f144c2017-06-12 14:26:21 -04004489 } else {
James Priced26efea2018-06-09 23:28:32 +01004490 // Ops[1] = Composite ID
4491 // Ops[2] = Object ID
4492 // Ops[3] ... Ops[n] = Indexes (Literal Number)
alan-bakerb6b09dc2018-11-08 16:59:28 -05004493 Ops << MkId(VMap[I.getOperand(0)]) << MkId(VMap[I.getOperand(1)])
James Priced26efea2018-06-09 23:28:32 +01004494 << MkId(VMap[I.getOperand(2)]);
David Neto22f144c2017-06-12 14:26:21 -04004495 Opcode = spv::OpVectorInsertDynamic;
4496 }
4497
David Neto87846742018-04-11 17:36:22 -04004498 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004499 SPIRVInstList.push_back(Inst);
4500 break;
4501 }
4502 case Instruction::ShuffleVector: {
4503 // Ops[0] = Result Type ID
4504 // Ops[1] = Vector 1 ID
4505 // Ops[2] = Vector 2 ID
4506 // Ops[3] ... Ops[n] = Components (Literal Number)
4507 SPIRVOperandList Ops;
4508
David Neto257c3892018-04-11 13:19:45 -04004509 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)])
4510 << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004511
alan-bakerc9666712020-04-01 16:31:21 -04004512 auto shuffle = cast<ShuffleVectorInst>(&I);
4513 SmallVector<int, 4> mask;
4514 shuffle->getShuffleMask(mask);
4515 for (auto i : mask) {
4516 if (i == UndefMaskElem) {
4517 if (clspv::Option::HackUndef())
4518 // Use 0 instead of undef.
David Neto257c3892018-04-11 13:19:45 -04004519 Ops << MkNum(0);
alan-bakerc9666712020-04-01 16:31:21 -04004520 else
4521 // Undef for shuffle in SPIR-V.
4522 Ops << MkNum(0xffffffff);
David Neto22f144c2017-06-12 14:26:21 -04004523 } else {
alan-bakerc9666712020-04-01 16:31:21 -04004524 Ops << MkNum(i);
David Neto22f144c2017-06-12 14:26:21 -04004525 }
4526 }
4527
David Neto87846742018-04-11 17:36:22 -04004528 auto *Inst = new SPIRVInstruction(spv::OpVectorShuffle, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004529 SPIRVInstList.push_back(Inst);
4530 break;
4531 }
4532 case Instruction::ICmp:
4533 case Instruction::FCmp: {
4534 CmpInst *CmpI = cast<CmpInst>(&I);
4535
David Netod4ca2e62017-07-06 18:47:35 -04004536 // Pointer equality is invalid.
alan-bakerb6b09dc2018-11-08 16:59:28 -05004537 Type *ArgTy = CmpI->getOperand(0)->getType();
David Netod4ca2e62017-07-06 18:47:35 -04004538 if (isa<PointerType>(ArgTy)) {
4539 CmpI->print(errs());
alan-baker21574d32020-01-29 16:00:31 -05004540 std::string name = I.getParent()->getParent()->getName().str();
David Netod4ca2e62017-07-06 18:47:35 -04004541 errs()
4542 << "\nPointer equality test is not supported by SPIR-V for Vulkan, "
4543 << "in function " << name << "\n";
4544 llvm_unreachable("Pointer equality check is invalid");
4545 break;
4546 }
4547
David Neto257c3892018-04-11 13:19:45 -04004548 // Ops[0] = Result Type ID
4549 // Ops[1] = Operand 1 ID
4550 // Ops[2] = Operand 2 ID
4551 SPIRVOperandList Ops;
David Neto22f144c2017-06-12 14:26:21 -04004552
David Neto257c3892018-04-11 13:19:45 -04004553 Ops << MkId(lookupType(CmpI->getType())) << MkId(VMap[CmpI->getOperand(0)])
4554 << MkId(VMap[CmpI->getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004555
4556 spv::Op Opcode = GetSPIRVCmpOpcode(CmpI);
David Neto87846742018-04-11 17:36:22 -04004557 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004558 SPIRVInstList.push_back(Inst);
4559 break;
4560 }
4561 case Instruction::Br: {
4562 // Branch instrucion is deferred because it needs label's ID. Record slot's
4563 // location on SPIRVInstructionList.
4564 DeferredInsts.push_back(
4565 std::make_tuple(&I, --SPIRVInstList.end(), 0 /* No id */));
4566 break;
4567 }
4568 case Instruction::Switch: {
4569 I.print(errs());
4570 llvm_unreachable("Unsupported instruction???");
4571 break;
4572 }
4573 case Instruction::IndirectBr: {
4574 I.print(errs());
4575 llvm_unreachable("Unsupported instruction???");
4576 break;
4577 }
4578 case Instruction::PHI: {
4579 // Branch instrucion is deferred because it needs label's ID. Record slot's
4580 // location on SPIRVInstructionList.
4581 DeferredInsts.push_back(
4582 std::make_tuple(&I, --SPIRVInstList.end(), nextID++));
4583 break;
4584 }
4585 case Instruction::Alloca: {
4586 //
4587 // Generate OpVariable.
4588 //
4589 // Ops[0] : Result Type ID
4590 // Ops[1] : Storage Class
4591 SPIRVOperandList Ops;
4592
David Neto257c3892018-04-11 13:19:45 -04004593 Ops << MkId(lookupType(I.getType())) << MkNum(spv::StorageClassFunction);
David Neto22f144c2017-06-12 14:26:21 -04004594
David Neto87846742018-04-11 17:36:22 -04004595 auto *Inst = new SPIRVInstruction(spv::OpVariable, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004596 SPIRVInstList.push_back(Inst);
4597 break;
4598 }
4599 case Instruction::Load: {
4600 LoadInst *LD = cast<LoadInst>(&I);
4601 //
4602 // Generate OpLoad.
4603 //
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04004604
alan-baker5b86ed72019-02-15 08:26:50 -05004605 if (LD->getType()->isPointerTy()) {
4606 // Loading a pointer requires variable pointers.
4607 setVariablePointersCapabilities(LD->getType()->getPointerAddressSpace());
4608 }
David Neto22f144c2017-06-12 14:26:21 -04004609
David Neto0a2f98d2017-09-15 19:38:40 -04004610 uint32_t ResTyID = lookupType(LD->getType());
David Netoa60b00b2017-09-15 16:34:09 -04004611 uint32_t PointerID = VMap[LD->getPointerOperand()];
4612
4613 // This is a hack to work around what looks like a driver bug.
4614 // When we're loading from the special variable holding the WorkgroupSize
David Neto0a2f98d2017-09-15 19:38:40 -04004615 // builtin value, use an OpBitWiseAnd of the value's ID rather than
4616 // generating a load.
David Neto66cfe642018-03-24 06:13:56 -07004617 // TODO(dneto): Remove this awful hack once drivers are fixed.
David Netoa60b00b2017-09-15 16:34:09 -04004618 if (PointerID == WorkgroupSizeVarID) {
David Neto0a2f98d2017-09-15 19:38:40 -04004619 // Generate a bitwise-and of the original value with itself.
4620 // We should have been able to get away with just an OpCopyObject,
4621 // but we need something more complex to get past certain driver bugs.
4622 // This is ridiculous, but necessary.
4623 // TODO(dneto): Revisit this once drivers fix their bugs.
4624
4625 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04004626 Ops << MkId(ResTyID) << MkId(WorkgroupSizeValueID)
4627 << MkId(WorkgroupSizeValueID);
David Neto0a2f98d2017-09-15 19:38:40 -04004628
David Neto87846742018-04-11 17:36:22 -04004629 auto *Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Neto0a2f98d2017-09-15 19:38:40 -04004630 SPIRVInstList.push_back(Inst);
David Netoa60b00b2017-09-15 16:34:09 -04004631 break;
4632 }
4633
4634 // This is the normal path. Generate a load.
4635
David Neto22f144c2017-06-12 14:26:21 -04004636 // Ops[0] = Result Type ID
4637 // Ops[1] = Pointer ID
4638 // Ops[2] ... Ops[n] = Optional Memory Access
4639 //
4640 // TODO: Do we need to implement Optional Memory Access???
David Neto0a2f98d2017-09-15 19:38:40 -04004641
David Neto22f144c2017-06-12 14:26:21 -04004642 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04004643 Ops << MkId(ResTyID) << MkId(PointerID);
David Neto22f144c2017-06-12 14:26:21 -04004644
David Neto87846742018-04-11 17:36:22 -04004645 auto *Inst = new SPIRVInstruction(spv::OpLoad, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004646 SPIRVInstList.push_back(Inst);
4647 break;
4648 }
4649 case Instruction::Store: {
4650 StoreInst *ST = cast<StoreInst>(&I);
4651 //
4652 // Generate OpStore.
4653 //
4654
alan-baker5b86ed72019-02-15 08:26:50 -05004655 if (ST->getValueOperand()->getType()->isPointerTy()) {
4656 // Storing a pointer requires variable pointers.
4657 setVariablePointersCapabilities(
4658 ST->getValueOperand()->getType()->getPointerAddressSpace());
4659 }
4660
David Neto22f144c2017-06-12 14:26:21 -04004661 // Ops[0] = Pointer ID
4662 // Ops[1] = Object ID
4663 // Ops[2] ... Ops[n] = Optional Memory Access (later???)
4664 //
4665 // TODO: Do we need to implement Optional Memory Access???
David Neto257c3892018-04-11 13:19:45 -04004666 SPIRVOperandList Ops;
4667 Ops << MkId(VMap[ST->getPointerOperand()])
4668 << MkId(VMap[ST->getValueOperand()]);
David Neto22f144c2017-06-12 14:26:21 -04004669
David Neto87846742018-04-11 17:36:22 -04004670 auto *Inst = new SPIRVInstruction(spv::OpStore, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004671 SPIRVInstList.push_back(Inst);
4672 break;
4673 }
4674 case Instruction::AtomicCmpXchg: {
4675 I.print(errs());
4676 llvm_unreachable("Unsupported instruction???");
4677 break;
4678 }
4679 case Instruction::AtomicRMW: {
Neil Henning39672102017-09-29 14:33:13 +01004680 AtomicRMWInst *AtomicRMW = dyn_cast<AtomicRMWInst>(&I);
4681
4682 spv::Op opcode;
4683
4684 switch (AtomicRMW->getOperation()) {
4685 default:
4686 I.print(errs());
4687 llvm_unreachable("Unsupported instruction???");
4688 case llvm::AtomicRMWInst::Add:
4689 opcode = spv::OpAtomicIAdd;
4690 break;
4691 case llvm::AtomicRMWInst::Sub:
4692 opcode = spv::OpAtomicISub;
4693 break;
4694 case llvm::AtomicRMWInst::Xchg:
4695 opcode = spv::OpAtomicExchange;
4696 break;
4697 case llvm::AtomicRMWInst::Min:
4698 opcode = spv::OpAtomicSMin;
4699 break;
4700 case llvm::AtomicRMWInst::Max:
4701 opcode = spv::OpAtomicSMax;
4702 break;
4703 case llvm::AtomicRMWInst::UMin:
4704 opcode = spv::OpAtomicUMin;
4705 break;
4706 case llvm::AtomicRMWInst::UMax:
4707 opcode = spv::OpAtomicUMax;
4708 break;
4709 case llvm::AtomicRMWInst::And:
4710 opcode = spv::OpAtomicAnd;
4711 break;
4712 case llvm::AtomicRMWInst::Or:
4713 opcode = spv::OpAtomicOr;
4714 break;
4715 case llvm::AtomicRMWInst::Xor:
4716 opcode = spv::OpAtomicXor;
4717 break;
4718 }
4719
4720 //
4721 // Generate OpAtomic*.
4722 //
4723 SPIRVOperandList Ops;
4724
David Neto257c3892018-04-11 13:19:45 -04004725 Ops << MkId(lookupType(I.getType()))
4726 << MkId(VMap[AtomicRMW->getPointerOperand()]);
Neil Henning39672102017-09-29 14:33:13 +01004727
4728 auto IntTy = Type::getInt32Ty(I.getContext());
Neil Henning39672102017-09-29 14:33:13 +01004729 const auto ConstantScopeDevice = ConstantInt::get(IntTy, spv::ScopeDevice);
David Neto257c3892018-04-11 13:19:45 -04004730 Ops << MkId(VMap[ConstantScopeDevice]);
Neil Henning39672102017-09-29 14:33:13 +01004731
4732 const auto ConstantMemorySemantics = ConstantInt::get(
4733 IntTy, spv::MemorySemanticsUniformMemoryMask |
4734 spv::MemorySemanticsSequentiallyConsistentMask);
David Neto257c3892018-04-11 13:19:45 -04004735 Ops << MkId(VMap[ConstantMemorySemantics]);
Neil Henning39672102017-09-29 14:33:13 +01004736
David Neto257c3892018-04-11 13:19:45 -04004737 Ops << MkId(VMap[AtomicRMW->getValOperand()]);
Neil Henning39672102017-09-29 14:33:13 +01004738
4739 VMap[&I] = nextID;
4740
David Neto87846742018-04-11 17:36:22 -04004741 auto *Inst = new SPIRVInstruction(opcode, nextID++, Ops);
Neil Henning39672102017-09-29 14:33:13 +01004742 SPIRVInstList.push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04004743 break;
4744 }
4745 case Instruction::Fence: {
4746 I.print(errs());
4747 llvm_unreachable("Unsupported instruction???");
4748 break;
4749 }
4750 case Instruction::Call: {
4751 CallInst *Call = dyn_cast<CallInst>(&I);
4752 Function *Callee = Call->getCalledFunction();
4753
Alan Baker202c8c72018-08-13 13:47:44 -04004754 if (Callee->getName().startswith(clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04004755 if (ResourceVarDeferredLoadCalls.count(Call) && Call->hasNUsesOrMore(1)) {
4756 // Generate an OpLoad
4757 SPIRVOperandList Ops;
4758 const auto load_id = nextID++;
David Neto22f144c2017-06-12 14:26:21 -04004759
David Neto862b7d82018-06-14 18:48:37 -04004760 Ops << MkId(lookupType(Call->getType()->getPointerElementType()))
4761 << MkId(ResourceVarDeferredLoadCalls[Call]);
4762
4763 auto *Inst = new SPIRVInstruction(spv::OpLoad, load_id, Ops);
4764 SPIRVInstList.push_back(Inst);
4765 VMap[Call] = load_id;
4766 break;
4767
4768 } else {
4769 // This maps to an OpVariable we've already generated.
4770 // No code is generated for the call.
4771 }
4772 break;
alan-bakerb6b09dc2018-11-08 16:59:28 -05004773 } else if (Callee->getName().startswith(
4774 clspv::WorkgroupAccessorFunction())) {
Alan Baker202c8c72018-08-13 13:47:44 -04004775 // Don't codegen an instruction here, but instead map this call directly
4776 // to the workgroup variable id.
alan-bakerb6b09dc2018-11-08 16:59:28 -05004777 int spec_id = static_cast<int>(
4778 cast<ConstantInt>(Call->getOperand(0))->getSExtValue());
Alan Baker202c8c72018-08-13 13:47:44 -04004779 const auto &info = LocalSpecIdInfoMap[spec_id];
4780 VMap[Call] = info.variable_id;
4781 break;
David Neto862b7d82018-06-14 18:48:37 -04004782 }
4783
4784 // Sampler initializers become a load of the corresponding sampler.
4785
Kévin Petitdf71de32019-04-09 14:09:50 +01004786 if (Callee->getName().equals(clspv::LiteralSamplerFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04004787 // Map this to a load from the variable.
alan-baker09cb9802019-12-10 13:16:27 -05004788 const auto third_param = static_cast<unsigned>(
4789 dyn_cast<ConstantInt>(Call->getArgOperand(2))->getZExtValue());
4790 auto sampler_value = third_param;
4791 if (clspv::Option::UseSamplerMap()) {
4792 sampler_value = getSamplerMap()[third_param].first;
4793 }
David Neto862b7d82018-06-14 18:48:37 -04004794
4795 // Generate an OpLoad
David Neto22f144c2017-06-12 14:26:21 -04004796 SPIRVOperandList Ops;
David Neto862b7d82018-06-14 18:48:37 -04004797 const auto load_id = nextID++;
David Neto22f144c2017-06-12 14:26:21 -04004798
David Neto257c3892018-04-11 13:19:45 -04004799 Ops << MkId(lookupType(SamplerTy->getPointerElementType()))
alan-baker09cb9802019-12-10 13:16:27 -05004800 << MkId(SamplerLiteralToIDMap[sampler_value]);
David Neto22f144c2017-06-12 14:26:21 -04004801
David Neto862b7d82018-06-14 18:48:37 -04004802 auto *Inst = new SPIRVInstruction(spv::OpLoad, load_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004803 SPIRVInstList.push_back(Inst);
David Neto862b7d82018-06-14 18:48:37 -04004804 VMap[Call] = load_id;
David Neto22f144c2017-06-12 14:26:21 -04004805 break;
4806 }
4807
Kévin Petit349c9502019-03-28 17:24:14 +00004808 // Handle SPIR-V intrinsics
Kévin Petit9b340262019-06-19 18:31:11 +01004809 spv::Op opcode = StringSwitch<spv::Op>(Callee->getName())
4810 .Case("spirv.atomic_xor", spv::OpAtomicXor)
4811 .Default(spv::OpNop);
David Neto22f144c2017-06-12 14:26:21 -04004812
Kévin Petit617a76d2019-04-04 13:54:16 +01004813 // If the switch above didn't have an entry maybe the intrinsic
4814 // is using the name mangling logic.
4815 bool usesMangler = false;
4816 if (opcode == spv::OpNop) {
4817 if (Callee->getName().startswith(clspv::SPIRVOpIntrinsicFunction())) {
4818 auto OpCst = cast<ConstantInt>(Call->getOperand(0));
4819 opcode = static_cast<spv::Op>(OpCst->getZExtValue());
4820 usesMangler = true;
4821 }
4822 }
4823
Kévin Petit349c9502019-03-28 17:24:14 +00004824 if (opcode != spv::OpNop) {
4825
David Neto22f144c2017-06-12 14:26:21 -04004826 SPIRVOperandList Ops;
4827
Kévin Petit349c9502019-03-28 17:24:14 +00004828 if (!I.getType()->isVoidTy()) {
4829 Ops << MkId(lookupType(I.getType()));
4830 }
David Neto22f144c2017-06-12 14:26:21 -04004831
Kévin Petit617a76d2019-04-04 13:54:16 +01004832 unsigned firstOperand = usesMangler ? 1 : 0;
4833 for (unsigned i = firstOperand; i < Call->getNumArgOperands(); i++) {
David Neto257c3892018-04-11 13:19:45 -04004834 Ops << MkId(VMap[Call->getArgOperand(i)]);
David Neto22f144c2017-06-12 14:26:21 -04004835 }
4836
Kévin Petit349c9502019-03-28 17:24:14 +00004837 if (!I.getType()->isVoidTy()) {
4838 VMap[&I] = nextID;
Kévin Petit8a560882019-03-21 15:24:34 +00004839 }
4840
Kévin Petit349c9502019-03-28 17:24:14 +00004841 SPIRVInstruction *Inst;
4842 if (!I.getType()->isVoidTy()) {
4843 Inst = new SPIRVInstruction(opcode, nextID++, Ops);
4844 } else {
4845 Inst = new SPIRVInstruction(opcode, Ops);
4846 }
Kévin Petit8a560882019-03-21 15:24:34 +00004847 SPIRVInstList.push_back(Inst);
4848 break;
4849 }
4850
David Neto22f144c2017-06-12 14:26:21 -04004851 // spirv.copy_memory.* intrinsics become OpMemoryMemory's.
4852 if (Callee->getName().startswith("spirv.copy_memory")) {
4853 //
4854 // Generate OpCopyMemory.
4855 //
4856
4857 // Ops[0] = Dst ID
4858 // Ops[1] = Src ID
4859 // Ops[2] = Memory Access
4860 // Ops[3] = Alignment
4861
4862 auto IsVolatile =
4863 dyn_cast<ConstantInt>(Call->getArgOperand(3))->getZExtValue() != 0;
4864
4865 auto VolatileMemoryAccess = (IsVolatile) ? spv::MemoryAccessVolatileMask
4866 : spv::MemoryAccessMaskNone;
4867
4868 auto MemoryAccess = VolatileMemoryAccess | spv::MemoryAccessAlignedMask;
4869
4870 auto Alignment =
4871 dyn_cast<ConstantInt>(Call->getArgOperand(2))->getZExtValue();
4872
David Neto257c3892018-04-11 13:19:45 -04004873 SPIRVOperandList Ops;
4874 Ops << MkId(VMap[Call->getArgOperand(0)])
4875 << MkId(VMap[Call->getArgOperand(1)]) << MkNum(MemoryAccess)
4876 << MkNum(static_cast<uint32_t>(Alignment));
David Neto22f144c2017-06-12 14:26:21 -04004877
David Neto87846742018-04-11 17:36:22 -04004878 auto *Inst = new SPIRVInstruction(spv::OpCopyMemory, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004879
4880 SPIRVInstList.push_back(Inst);
4881
4882 break;
4883 }
4884
SJW2c317da2020-03-23 07:39:13 -05004885 // read_image is converted to OpSampledImage and OpImageSampleExplicitLod.
4886 // Additionally, OpTypeSampledImage is generated.
SJW173c7e92020-03-16 08:44:47 -05004887 if (IsSampledImageRead(Callee)) {
David Neto22f144c2017-06-12 14:26:21 -04004888 //
4889 // Generate OpSampledImage.
4890 //
4891 // Ops[0] = Result Type ID
4892 // Ops[1] = Image ID
4893 // Ops[2] = Sampler ID
4894 //
4895 SPIRVOperandList Ops;
4896
4897 Value *Image = Call->getArgOperand(0);
4898 Value *Sampler = Call->getArgOperand(1);
4899 Value *Coordinate = Call->getArgOperand(2);
4900
4901 TypeMapType &OpImageTypeMap = getImageTypeMap();
4902 Type *ImageTy = Image->getType()->getPointerElementType();
4903 uint32_t ImageTyID = OpImageTypeMap[ImageTy];
David Neto22f144c2017-06-12 14:26:21 -04004904 uint32_t ImageID = VMap[Image];
David Neto22f144c2017-06-12 14:26:21 -04004905 uint32_t SamplerID = VMap[Sampler];
David Neto257c3892018-04-11 13:19:45 -04004906
4907 Ops << MkId(ImageTyID) << MkId(ImageID) << MkId(SamplerID);
David Neto22f144c2017-06-12 14:26:21 -04004908
4909 uint32_t SampledImageID = nextID;
4910
David Neto87846742018-04-11 17:36:22 -04004911 auto *Inst = new SPIRVInstruction(spv::OpSampledImage, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004912 SPIRVInstList.push_back(Inst);
4913
4914 //
4915 // Generate OpImageSampleExplicitLod.
4916 //
4917 // Ops[0] = Result Type ID
4918 // Ops[1] = Sampled Image ID
4919 // Ops[2] = Coordinate ID
4920 // Ops[3] = Image Operands Type ID
4921 // Ops[4] ... Ops[n] = Operands ID
4922 //
4923 Ops.clear();
4924
alan-bakerf67468c2019-11-25 15:51:49 -05004925 const bool is_int_image = IsIntImageType(Image->getType());
4926 uint32_t result_type = 0;
4927 if (is_int_image) {
4928 result_type = v4int32ID;
4929 } else {
4930 result_type = lookupType(Call->getType());
4931 }
4932
4933 Ops << MkId(result_type) << MkId(SampledImageID) << MkId(VMap[Coordinate])
4934 << MkNum(spv::ImageOperandsLodMask);
David Neto22f144c2017-06-12 14:26:21 -04004935
4936 Constant *CstFP0 = ConstantFP::get(Context, APFloat(0.0f));
David Neto257c3892018-04-11 13:19:45 -04004937 Ops << MkId(VMap[CstFP0]);
David Neto22f144c2017-06-12 14:26:21 -04004938
alan-bakerf67468c2019-11-25 15:51:49 -05004939 uint32_t final_id = nextID++;
4940 VMap[&I] = final_id;
David Neto22f144c2017-06-12 14:26:21 -04004941
alan-bakerf67468c2019-11-25 15:51:49 -05004942 uint32_t image_id = final_id;
4943 if (is_int_image) {
4944 // Int image requires a bitcast from v4int to v4uint.
4945 image_id = nextID++;
4946 }
4947
4948 Inst = new SPIRVInstruction(spv::OpImageSampleExplicitLod, image_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004949 SPIRVInstList.push_back(Inst);
alan-bakerf67468c2019-11-25 15:51:49 -05004950
4951 if (is_int_image) {
4952 // Generate the bitcast.
4953 Ops.clear();
4954 Ops << MkId(lookupType(Call->getType())) << MkId(image_id);
4955 Inst = new SPIRVInstruction(spv::OpBitcast, final_id, Ops);
4956 SPIRVInstList.push_back(Inst);
4957 }
David Neto22f144c2017-06-12 14:26:21 -04004958 break;
4959 }
4960
alan-baker75090e42020-02-20 11:21:04 -05004961 // read_image (without a sampler) is mapped to OpImageFetch.
SJW173c7e92020-03-16 08:44:47 -05004962 if (IsUnsampledImageRead(Callee)) {
alan-baker75090e42020-02-20 11:21:04 -05004963 Value *Image = Call->getArgOperand(0);
4964 Value *Coordinate = Call->getArgOperand(1);
4965
4966 //
4967 // Generate OpImageFetch
4968 //
4969 // Ops[0] = Result Type ID
4970 // Ops[1] = Image ID
4971 // Ops[2] = Coordinate ID
4972 // Ops[3] = Lod
4973 // Ops[4] = 0
4974 //
4975 SPIRVOperandList Ops;
4976
4977 const bool is_int_image = IsIntImageType(Image->getType());
4978 uint32_t result_type = 0;
4979 if (is_int_image) {
4980 result_type = v4int32ID;
4981 } else {
4982 result_type = lookupType(Call->getType());
4983 }
4984
4985 Ops << MkId(result_type) << MkId(VMap[Image]) << MkId(VMap[Coordinate])
4986 << MkNum(spv::ImageOperandsLodMask);
4987
4988 Constant *CstInt0 = ConstantInt::get(Context, APInt(32, 0));
4989 Ops << MkId(VMap[CstInt0]);
4990
4991 uint32_t final_id = nextID++;
4992 VMap[&I] = final_id;
4993
4994 uint32_t image_id = final_id;
4995 if (is_int_image) {
4996 // Int image requires a bitcast from v4int to v4uint.
4997 image_id = nextID++;
4998 }
4999
5000 auto *Inst = new SPIRVInstruction(spv::OpImageFetch, image_id, Ops);
5001 SPIRVInstList.push_back(Inst);
5002
5003 if (is_int_image) {
5004 // Generate the bitcast.
5005 Ops.clear();
5006 Ops << MkId(lookupType(Call->getType())) << MkId(image_id);
5007 Inst = new SPIRVInstruction(spv::OpBitcast, final_id, Ops);
5008 SPIRVInstList.push_back(Inst);
5009 }
5010 break;
5011 }
5012
alan-bakerf67468c2019-11-25 15:51:49 -05005013 // write_image is mapped to OpImageWrite.
SJW173c7e92020-03-16 08:44:47 -05005014 if (IsImageWrite(Callee)) {
David Neto22f144c2017-06-12 14:26:21 -04005015 //
5016 // Generate OpImageWrite.
5017 //
5018 // Ops[0] = Image ID
5019 // Ops[1] = Coordinate ID
5020 // Ops[2] = Texel ID
5021 // Ops[3] = (Optional) Image Operands Type (Literal Number)
5022 // Ops[4] ... Ops[n] = (Optional) Operands ID
5023 //
5024 SPIRVOperandList Ops;
5025
5026 Value *Image = Call->getArgOperand(0);
5027 Value *Coordinate = Call->getArgOperand(1);
5028 Value *Texel = Call->getArgOperand(2);
5029
5030 uint32_t ImageID = VMap[Image];
David Neto22f144c2017-06-12 14:26:21 -04005031 uint32_t CoordinateID = VMap[Coordinate];
David Neto22f144c2017-06-12 14:26:21 -04005032 uint32_t TexelID = VMap[Texel];
alan-bakerf67468c2019-11-25 15:51:49 -05005033
5034 const bool is_int_image = IsIntImageType(Image->getType());
5035 if (is_int_image) {
5036 // Generate a bitcast to v4int and use it as the texel value.
5037 uint32_t castID = nextID++;
5038 Ops << MkId(v4int32ID) << MkId(TexelID);
5039 auto cast = new SPIRVInstruction(spv::OpBitcast, castID, Ops);
5040 SPIRVInstList.push_back(cast);
5041 Ops.clear();
5042 TexelID = castID;
5043 }
David Neto257c3892018-04-11 13:19:45 -04005044 Ops << MkId(ImageID) << MkId(CoordinateID) << MkId(TexelID);
David Neto22f144c2017-06-12 14:26:21 -04005045
David Neto87846742018-04-11 17:36:22 -04005046 auto *Inst = new SPIRVInstruction(spv::OpImageWrite, Ops);
David Neto22f144c2017-06-12 14:26:21 -04005047 SPIRVInstList.push_back(Inst);
5048 break;
5049 }
5050
alan-bakerce179f12019-12-06 19:02:22 -05005051 // get_image_* is mapped to OpImageQuerySize or OpImageQuerySizeLod
SJW173c7e92020-03-16 08:44:47 -05005052 if (IsImageQuery(Callee)) {
David Neto5c22a252018-03-15 16:07:41 -04005053 //
alan-bakerce179f12019-12-06 19:02:22 -05005054 // Generate OpImageQuerySize[Lod]
David Neto5c22a252018-03-15 16:07:41 -04005055 //
5056 // Ops[0] = Image ID
5057 //
alan-bakerce179f12019-12-06 19:02:22 -05005058 // Result type has components equal to the dimensionality of the image,
5059 // plus 1 if the image is arrayed.
5060 //
alan-bakerf906d2b2019-12-10 11:26:23 -05005061 // %sizes = OpImageQuerySize[Lod] %uint[2|3|4] %im [%uint_0]
David Neto5c22a252018-03-15 16:07:41 -04005062 SPIRVOperandList Ops;
5063
5064 // Implement:
alan-bakerce179f12019-12-06 19:02:22 -05005065 // %sizes = OpImageQuerySize[Lod] %uint[2|3|4] %im [%uint_0]
5066 uint32_t SizesTypeID = 0;
5067
David Neto5c22a252018-03-15 16:07:41 -04005068 Value *Image = Call->getArgOperand(0);
alan-bakerce179f12019-12-06 19:02:22 -05005069 const uint32_t dim = ImageDimensionality(Image->getType());
alan-baker7150a1d2020-02-25 08:31:06 -05005070 const uint32_t components =
5071 dim + (IsArrayImageType(Image->getType()) ? 1 : 0);
alan-bakerce179f12019-12-06 19:02:22 -05005072 if (components == 1) {
alan-bakerce179f12019-12-06 19:02:22 -05005073 SizesTypeID = TypeMap[Type::getInt32Ty(Context)];
5074 } else {
alan-baker7150a1d2020-02-25 08:31:06 -05005075 SizesTypeID =
5076 TypeMap[VectorType::get(Type::getInt32Ty(Context), components)];
alan-bakerce179f12019-12-06 19:02:22 -05005077 }
David Neto5c22a252018-03-15 16:07:41 -04005078 uint32_t ImageID = VMap[Image];
David Neto257c3892018-04-11 13:19:45 -04005079 Ops << MkId(SizesTypeID) << MkId(ImageID);
alan-bakerce179f12019-12-06 19:02:22 -05005080 spv::Op query_opcode = spv::OpImageQuerySize;
SJW173c7e92020-03-16 08:44:47 -05005081 if (IsSampledImageType(Image->getType())) {
alan-bakerce179f12019-12-06 19:02:22 -05005082 query_opcode = spv::OpImageQuerySizeLod;
5083 // Need explicit 0 for Lod operand.
5084 Constant *CstInt0 = ConstantInt::get(Context, APInt(32, 0));
5085 Ops << MkId(VMap[CstInt0]);
5086 }
David Neto5c22a252018-03-15 16:07:41 -04005087
5088 uint32_t SizesID = nextID++;
alan-bakerce179f12019-12-06 19:02:22 -05005089 auto *QueryInst = new SPIRVInstruction(query_opcode, SizesID, Ops);
David Neto5c22a252018-03-15 16:07:41 -04005090 SPIRVInstList.push_back(QueryInst);
5091
alan-bakerce179f12019-12-06 19:02:22 -05005092 // May require an extra instruction to create the appropriate result of
5093 // the builtin function.
SJW173c7e92020-03-16 08:44:47 -05005094 if (IsGetImageDim(Callee)) {
alan-bakerce179f12019-12-06 19:02:22 -05005095 if (dim == 3) {
5096 // get_image_dim returns an int4 for 3D images.
5097 //
5098 // Reset value map entry since we generated an intermediate
5099 // instruction.
5100 VMap[&I] = nextID;
David Neto5c22a252018-03-15 16:07:41 -04005101
alan-bakerce179f12019-12-06 19:02:22 -05005102 // Implement:
5103 // %result = OpCompositeConstruct %uint4 %sizes %uint_0
5104 Ops.clear();
5105 Ops << MkId(lookupType(VectorType::get(Type::getInt32Ty(Context), 4)))
5106 << MkId(SizesID);
David Neto5c22a252018-03-15 16:07:41 -04005107
alan-bakerce179f12019-12-06 19:02:22 -05005108 Constant *CstInt0 = ConstantInt::get(Context, APInt(32, 0));
5109 Ops << MkId(VMap[CstInt0]);
David Neto5c22a252018-03-15 16:07:41 -04005110
alan-bakerce179f12019-12-06 19:02:22 -05005111 auto *Inst =
5112 new SPIRVInstruction(spv::OpCompositeConstruct, nextID++, Ops);
5113 SPIRVInstList.push_back(Inst);
5114 } else if (dim != components) {
5115 // get_image_dim return an int2 regardless of the arrayedness of the
5116 // image. If the image is arrayed an element must be dropped from the
5117 // query result.
5118 //
5119 // Reset value map entry since we generated an intermediate
5120 // instruction.
5121 VMap[&I] = nextID;
5122
5123 // Implement:
5124 // %result = OpVectorShuffle %uint2 %sizes %sizes 0 1
5125 Ops.clear();
5126 Ops << MkId(lookupType(VectorType::get(Type::getInt32Ty(Context), 2)))
5127 << MkId(SizesID) << MkId(SizesID) << MkNum(0) << MkNum(1);
5128
5129 auto *Inst =
5130 new SPIRVInstruction(spv::OpVectorShuffle, nextID++, Ops);
5131 SPIRVInstList.push_back(Inst);
5132 }
5133 } else if (components > 1) {
5134 // Reset value map entry since we generated an intermediate instruction.
5135 VMap[&I] = nextID;
5136
5137 // Implement:
5138 // %result = OpCompositeExtract %uint %sizes <component number>
5139 Ops.clear();
5140 Ops << MkId(TypeMap[I.getType()]) << MkId(SizesID);
5141
5142 uint32_t component = 0;
5143 if (IsGetImageHeight(Callee))
5144 component = 1;
5145 else if (IsGetImageDepth(Callee))
5146 component = 2;
5147 Ops << MkNum(component);
5148
5149 auto *Inst =
5150 new SPIRVInstruction(spv::OpCompositeExtract, nextID++, Ops);
5151 SPIRVInstList.push_back(Inst);
5152 }
David Neto5c22a252018-03-15 16:07:41 -04005153 break;
5154 }
5155
David Neto22f144c2017-06-12 14:26:21 -04005156 // Call instrucion is deferred because it needs function's ID. Record
5157 // slot's location on SPIRVInstructionList.
5158 DeferredInsts.push_back(
5159 std::make_tuple(&I, --SPIRVInstList.end(), nextID++));
5160
David Neto3fbb4072017-10-16 11:28:14 -04005161 // Check whether the implementation of this call uses an extended
5162 // instruction plus one more value-producing instruction. If so, then
5163 // reserve the id for the extra value-producing slot.
5164 glsl::ExtInst EInst = getIndirectExtInstEnum(Callee->getName());
5165 if (EInst != kGlslExtInstBad) {
5166 // Reserve a spot for the extra value.
David Neto4d02a532017-09-17 12:57:44 -04005167 // Increase nextID.
David Neto22f144c2017-06-12 14:26:21 -04005168 VMap[&I] = nextID;
5169 nextID++;
5170 }
5171 break;
5172 }
5173 case Instruction::Ret: {
5174 unsigned NumOps = I.getNumOperands();
5175 if (NumOps == 0) {
5176 //
5177 // Generate OpReturn.
5178 //
David Netoef5ba2b2019-12-20 08:35:54 -05005179 SPIRVInstList.push_back(new SPIRVInstruction(spv::OpReturn));
David Neto22f144c2017-06-12 14:26:21 -04005180 } else {
5181 //
5182 // Generate OpReturnValue.
5183 //
5184
5185 // Ops[0] = Return Value ID
5186 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04005187
5188 Ops << MkId(VMap[I.getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04005189
David Neto87846742018-04-11 17:36:22 -04005190 auto *Inst = new SPIRVInstruction(spv::OpReturnValue, Ops);
David Neto22f144c2017-06-12 14:26:21 -04005191 SPIRVInstList.push_back(Inst);
5192 break;
5193 }
5194 break;
5195 }
5196 }
5197}
5198
5199void SPIRVProducerPass::GenerateFuncEpilogue() {
SJW69939d52020-04-16 07:29:07 -05005200 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList(kFunctions);
David Neto22f144c2017-06-12 14:26:21 -04005201
5202 //
5203 // Generate OpFunctionEnd
5204 //
5205
David Netoef5ba2b2019-12-20 08:35:54 -05005206 auto *Inst = new SPIRVInstruction(spv::OpFunctionEnd);
David Neto22f144c2017-06-12 14:26:21 -04005207 SPIRVInstList.push_back(Inst);
5208}
5209
5210bool SPIRVProducerPass::is4xi8vec(Type *Ty) const {
alan-bakerb39c8262019-03-08 14:03:37 -05005211 // Don't specialize <4 x i8> if i8 is generally supported.
5212 if (clspv::Option::Int8Support())
5213 return false;
5214
David Neto22f144c2017-06-12 14:26:21 -04005215 LLVMContext &Context = Ty->getContext();
James Pricecf53df42020-04-20 14:41:24 -04005216 if (auto VecTy = dyn_cast<VectorType>(Ty)) {
5217 if (VecTy->getElementType() == Type::getInt8Ty(Context) &&
5218 VecTy->getNumElements() == 4) {
David Neto22f144c2017-06-12 14:26:21 -04005219 return true;
5220 }
5221 }
5222
5223 return false;
5224}
5225
5226void SPIRVProducerPass::HandleDeferredInstruction() {
SJW69939d52020-04-16 07:29:07 -05005227 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList(kFunctions);
David Neto22f144c2017-06-12 14:26:21 -04005228 ValueMapType &VMap = getValueMap();
5229 DeferredInstVecType &DeferredInsts = getDeferredInstVec();
5230
5231 for (auto DeferredInst = DeferredInsts.rbegin();
5232 DeferredInst != DeferredInsts.rend(); ++DeferredInst) {
5233 Value *Inst = std::get<0>(*DeferredInst);
5234 SPIRVInstructionList::iterator InsertPoint = ++std::get<1>(*DeferredInst);
5235 if (InsertPoint != SPIRVInstList.end()) {
5236 while ((*InsertPoint)->getOpcode() == spv::OpPhi) {
5237 ++InsertPoint;
5238 }
5239 }
5240
5241 if (BranchInst *Br = dyn_cast<BranchInst>(Inst)) {
alan-baker06cad652019-12-03 17:56:47 -05005242 // Check whether this branch needs to be preceeded by merge instruction.
David Neto22f144c2017-06-12 14:26:21 -04005243 BasicBlock *BrBB = Br->getParent();
alan-baker06cad652019-12-03 17:56:47 -05005244 if (ContinueBlocks.count(BrBB)) {
David Neto22f144c2017-06-12 14:26:21 -04005245 //
5246 // Generate OpLoopMerge.
5247 //
5248 // Ops[0] = Merge Block ID
5249 // Ops[1] = Continue Target ID
5250 // Ops[2] = Selection Control
5251 SPIRVOperandList Ops;
5252
alan-baker06cad652019-12-03 17:56:47 -05005253 auto MergeBB = MergeBlocks[BrBB];
5254 auto ContinueBB = ContinueBlocks[BrBB];
David Neto22f144c2017-06-12 14:26:21 -04005255 uint32_t MergeBBID = VMap[MergeBB];
David Neto22f144c2017-06-12 14:26:21 -04005256 uint32_t ContinueBBID = VMap[ContinueBB];
David Neto257c3892018-04-11 13:19:45 -04005257 Ops << MkId(MergeBBID) << MkId(ContinueBBID)
alan-baker06cad652019-12-03 17:56:47 -05005258 << MkNum(spv::LoopControlMaskNone);
David Neto22f144c2017-06-12 14:26:21 -04005259
David Neto87846742018-04-11 17:36:22 -04005260 auto *MergeInst = new SPIRVInstruction(spv::OpLoopMerge, Ops);
David Neto22f144c2017-06-12 14:26:21 -04005261 SPIRVInstList.insert(InsertPoint, MergeInst);
alan-baker06cad652019-12-03 17:56:47 -05005262 } else if (MergeBlocks.count(BrBB)) {
5263 //
5264 // Generate OpSelectionMerge.
5265 //
5266 // Ops[0] = Merge Block ID
5267 // Ops[1] = Selection Control
5268 SPIRVOperandList Ops;
David Neto22f144c2017-06-12 14:26:21 -04005269
alan-baker06cad652019-12-03 17:56:47 -05005270 auto MergeBB = MergeBlocks[BrBB];
5271 uint32_t MergeBBID = VMap[MergeBB];
5272 Ops << MkId(MergeBBID) << MkNum(spv::SelectionControlMaskNone);
David Neto22f144c2017-06-12 14:26:21 -04005273
alan-baker06cad652019-12-03 17:56:47 -05005274 auto *MergeInst = new SPIRVInstruction(spv::OpSelectionMerge, Ops);
5275 SPIRVInstList.insert(InsertPoint, MergeInst);
David Neto22f144c2017-06-12 14:26:21 -04005276 }
5277
5278 if (Br->isConditional()) {
5279 //
5280 // Generate OpBranchConditional.
5281 //
5282 // Ops[0] = Condition ID
5283 // Ops[1] = True Label ID
5284 // Ops[2] = False Label ID
5285 // Ops[3] ... Ops[n] = Branch weights (Literal Number)
5286 SPIRVOperandList Ops;
5287
5288 uint32_t CondID = VMap[Br->getCondition()];
David Neto22f144c2017-06-12 14:26:21 -04005289 uint32_t TrueBBID = VMap[Br->getSuccessor(0)];
David Neto22f144c2017-06-12 14:26:21 -04005290 uint32_t FalseBBID = VMap[Br->getSuccessor(1)];
David Neto257c3892018-04-11 13:19:45 -04005291
5292 Ops << MkId(CondID) << MkId(TrueBBID) << MkId(FalseBBID);
David Neto22f144c2017-06-12 14:26:21 -04005293
David Neto87846742018-04-11 17:36:22 -04005294 auto *BrInst = new SPIRVInstruction(spv::OpBranchConditional, Ops);
David Neto22f144c2017-06-12 14:26:21 -04005295 SPIRVInstList.insert(InsertPoint, BrInst);
5296 } else {
5297 //
5298 // Generate OpBranch.
5299 //
5300 // Ops[0] = Target Label ID
5301 SPIRVOperandList Ops;
5302
5303 uint32_t TargetID = VMap[Br->getSuccessor(0)];
David Neto257c3892018-04-11 13:19:45 -04005304 Ops << MkId(TargetID);
David Neto22f144c2017-06-12 14:26:21 -04005305
David Neto87846742018-04-11 17:36:22 -04005306 SPIRVInstList.insert(InsertPoint,
5307 new SPIRVInstruction(spv::OpBranch, Ops));
David Neto22f144c2017-06-12 14:26:21 -04005308 }
5309 } else if (PHINode *PHI = dyn_cast<PHINode>(Inst)) {
alan-baker5ed87542020-03-23 11:05:22 -04005310 if (PHI->getType()->isPointerTy() && !IsSamplerType(PHI->getType()) &&
5311 !IsImageType(PHI->getType())) {
alan-baker5b86ed72019-02-15 08:26:50 -05005312 // OpPhi on pointers requires variable pointers.
5313 setVariablePointersCapabilities(
5314 PHI->getType()->getPointerAddressSpace());
5315 if (!hasVariablePointers() && !selectFromSameObject(PHI)) {
5316 setVariablePointers(true);
5317 }
5318 }
5319
David Neto22f144c2017-06-12 14:26:21 -04005320 //
5321 // Generate OpPhi.
5322 //
5323 // Ops[0] = Result Type ID
5324 // Ops[1] ... Ops[n] = (Variable ID, Parent ID) pairs
5325 SPIRVOperandList Ops;
5326
David Neto257c3892018-04-11 13:19:45 -04005327 Ops << MkId(lookupType(PHI->getType()));
David Neto22f144c2017-06-12 14:26:21 -04005328
David Neto22f144c2017-06-12 14:26:21 -04005329 for (unsigned i = 0; i < PHI->getNumIncomingValues(); i++) {
5330 uint32_t VarID = VMap[PHI->getIncomingValue(i)];
David Neto22f144c2017-06-12 14:26:21 -04005331 uint32_t ParentID = VMap[PHI->getIncomingBlock(i)];
David Neto257c3892018-04-11 13:19:45 -04005332 Ops << MkId(VarID) << MkId(ParentID);
David Neto22f144c2017-06-12 14:26:21 -04005333 }
5334
5335 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005336 InsertPoint,
5337 new SPIRVInstruction(spv::OpPhi, std::get<2>(*DeferredInst), Ops));
David Neto22f144c2017-06-12 14:26:21 -04005338 } else if (CallInst *Call = dyn_cast<CallInst>(Inst)) {
5339 Function *Callee = Call->getCalledFunction();
SJW2c317da2020-03-23 07:39:13 -05005340 LLVMContext &Context = Callee->getContext();
5341 auto IntTy = Type::getInt32Ty(Context);
5342 auto callee_code = Builtins::Lookup(Callee);
David Neto3fbb4072017-10-16 11:28:14 -04005343 auto callee_name = Callee->getName();
5344 glsl::ExtInst EInst = getDirectOrIndirectExtInstEnum(callee_name);
David Neto22f144c2017-06-12 14:26:21 -04005345
5346 if (EInst) {
5347 uint32_t &ExtInstImportID = getOpExtInstImportID();
5348
5349 //
5350 // Generate OpExtInst.
5351 //
5352
5353 // Ops[0] = Result Type ID
5354 // Ops[1] = Set ID (OpExtInstImport ID)
5355 // Ops[2] = Instruction Number (Literal Number)
5356 // Ops[3] ... Ops[n] = Operand 1, ... , Operand n
5357 SPIRVOperandList Ops;
5358
David Neto862b7d82018-06-14 18:48:37 -04005359 Ops << MkId(lookupType(Call->getType())) << MkId(ExtInstImportID)
5360 << MkNum(EInst);
David Neto22f144c2017-06-12 14:26:21 -04005361
David Neto22f144c2017-06-12 14:26:21 -04005362 FunctionType *CalleeFTy = cast<FunctionType>(Call->getFunctionType());
5363 for (unsigned i = 0; i < CalleeFTy->getNumParams(); i++) {
David Neto257c3892018-04-11 13:19:45 -04005364 Ops << MkId(VMap[Call->getOperand(i)]);
David Neto22f144c2017-06-12 14:26:21 -04005365 }
5366
David Neto87846742018-04-11 17:36:22 -04005367 auto *ExtInst = new SPIRVInstruction(spv::OpExtInst,
5368 std::get<2>(*DeferredInst), Ops);
David Neto22f144c2017-06-12 14:26:21 -04005369 SPIRVInstList.insert(InsertPoint, ExtInst);
5370
David Neto3fbb4072017-10-16 11:28:14 -04005371 const auto IndirectExtInst = getIndirectExtInstEnum(callee_name);
5372 if (IndirectExtInst != kGlslExtInstBad) {
5373 // Generate one more instruction that uses the result of the extended
5374 // instruction. Its result id is one more than the id of the
5375 // extended instruction.
David Neto3fbb4072017-10-16 11:28:14 -04005376 auto generate_extra_inst = [this, &Context, &Call, &DeferredInst,
5377 &VMap, &SPIRVInstList, &InsertPoint](
5378 spv::Op opcode, Constant *constant) {
5379 //
5380 // Generate instruction like:
5381 // result = opcode constant <extinst-result>
5382 //
5383 // Ops[0] = Result Type ID
5384 // Ops[1] = Operand 0 ;; the constant, suitably splatted
5385 // Ops[2] = Operand 1 ;; the result of the extended instruction
5386 SPIRVOperandList Ops;
David Neto22f144c2017-06-12 14:26:21 -04005387
David Neto3fbb4072017-10-16 11:28:14 -04005388 Type *resultTy = Call->getType();
David Neto257c3892018-04-11 13:19:45 -04005389 Ops << MkId(lookupType(resultTy));
David Neto3fbb4072017-10-16 11:28:14 -04005390
5391 if (auto *vectorTy = dyn_cast<VectorType>(resultTy)) {
5392 constant = ConstantVector::getSplat(
alan-baker7261e062020-03-15 14:35:48 -04005393 {static_cast<unsigned>(vectorTy->getNumElements()), false},
5394 constant);
David Neto3fbb4072017-10-16 11:28:14 -04005395 }
David Neto257c3892018-04-11 13:19:45 -04005396 Ops << MkId(VMap[constant]) << MkId(std::get<2>(*DeferredInst));
David Neto3fbb4072017-10-16 11:28:14 -04005397
5398 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005399 InsertPoint, new SPIRVInstruction(
5400 opcode, std::get<2>(*DeferredInst) + 1, Ops));
David Neto3fbb4072017-10-16 11:28:14 -04005401 };
5402
5403 switch (IndirectExtInst) {
5404 case glsl::ExtInstFindUMsb: // Implementing clz
SJW2c317da2020-03-23 07:39:13 -05005405 generate_extra_inst(spv::OpISub, ConstantInt::get(IntTy, 31));
David Neto3fbb4072017-10-16 11:28:14 -04005406 break;
5407 case glsl::ExtInstAcos: // Implementing acospi
5408 case glsl::ExtInstAsin: // Implementing asinpi
Kévin Petiteb9f90a2018-09-29 12:29:34 +01005409 case glsl::ExtInstAtan: // Implementing atanpi
David Neto3fbb4072017-10-16 11:28:14 -04005410 case glsl::ExtInstAtan2: // Implementing atan2pi
5411 generate_extra_inst(
5412 spv::OpFMul,
5413 ConstantFP::get(Type::getFloatTy(Context), kOneOverPi));
5414 break;
5415
5416 default:
5417 assert(false && "internally inconsistent");
David Neto4d02a532017-09-17 12:57:44 -04005418 }
David Neto22f144c2017-06-12 14:26:21 -04005419 }
David Neto3fbb4072017-10-16 11:28:14 -04005420
SJW2c317da2020-03-23 07:39:13 -05005421 } else if (callee_code == Builtins::kPopcount) {
David Neto22f144c2017-06-12 14:26:21 -04005422 //
5423 // Generate OpBitCount
5424 //
5425 // Ops[0] = Result Type ID
5426 // Ops[1] = Base ID
David Neto257c3892018-04-11 13:19:45 -04005427 SPIRVOperandList Ops;
5428 Ops << MkId(lookupType(Call->getType()))
5429 << MkId(VMap[Call->getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04005430
5431 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005432 InsertPoint, new SPIRVInstruction(spv::OpBitCount,
David Neto22f144c2017-06-12 14:26:21 -04005433 std::get<2>(*DeferredInst), Ops));
David Netoab03f432017-11-03 17:00:44 -04005434
David Neto862b7d82018-06-14 18:48:37 -04005435 } else if (callee_name.startswith(kCompositeConstructFunctionPrefix)) {
David Netoab03f432017-11-03 17:00:44 -04005436
5437 // Generate an OpCompositeConstruct
5438 SPIRVOperandList Ops;
5439
5440 // The result type.
David Neto257c3892018-04-11 13:19:45 -04005441 Ops << MkId(lookupType(Call->getType()));
David Netoab03f432017-11-03 17:00:44 -04005442
5443 for (Use &use : Call->arg_operands()) {
David Neto257c3892018-04-11 13:19:45 -04005444 Ops << MkId(VMap[use.get()]);
David Netoab03f432017-11-03 17:00:44 -04005445 }
5446
5447 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005448 InsertPoint, new SPIRVInstruction(spv::OpCompositeConstruct,
5449 std::get<2>(*DeferredInst), Ops));
David Netoab03f432017-11-03 17:00:44 -04005450
Alan Baker202c8c72018-08-13 13:47:44 -04005451 } else if (callee_name.startswith(clspv::ResourceAccessorFunction())) {
5452
5453 // We have already mapped the call's result value to an ID.
5454 // Don't generate any code now.
5455
5456 } else if (callee_name.startswith(clspv::WorkgroupAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04005457
5458 // We have already mapped the call's result value to an ID.
5459 // Don't generate any code now.
5460
David Neto22f144c2017-06-12 14:26:21 -04005461 } else {
alan-baker5b86ed72019-02-15 08:26:50 -05005462 if (Call->getType()->isPointerTy()) {
5463 // Functions returning pointers require variable pointers.
5464 setVariablePointersCapabilities(
5465 Call->getType()->getPointerAddressSpace());
5466 }
5467
David Neto22f144c2017-06-12 14:26:21 -04005468 //
5469 // Generate OpFunctionCall.
5470 //
5471
5472 // Ops[0] = Result Type ID
5473 // Ops[1] = Callee Function ID
5474 // Ops[2] ... Ops[n] = Argument 0, ... , Argument n
5475 SPIRVOperandList Ops;
5476
David Neto862b7d82018-06-14 18:48:37 -04005477 Ops << MkId(lookupType(Call->getType()));
David Neto22f144c2017-06-12 14:26:21 -04005478
5479 uint32_t CalleeID = VMap[Callee];
David Neto43568eb2017-10-13 18:25:25 -04005480 if (CalleeID == 0) {
5481 errs() << "Can't translate function call. Missing builtin? "
David Neto862b7d82018-06-14 18:48:37 -04005482 << callee_name << " in: " << *Call << "\n";
David Neto43568eb2017-10-13 18:25:25 -04005483 // TODO(dneto): Can we error out? Enabling this llvm_unreachable
5484 // causes an infinite loop. Instead, go ahead and generate
5485 // the bad function call. A validator will catch the 0-Id.
5486 // llvm_unreachable("Can't translate function call");
5487 }
David Neto22f144c2017-06-12 14:26:21 -04005488
David Neto257c3892018-04-11 13:19:45 -04005489 Ops << MkId(CalleeID);
David Neto22f144c2017-06-12 14:26:21 -04005490
David Neto22f144c2017-06-12 14:26:21 -04005491 FunctionType *CalleeFTy = cast<FunctionType>(Call->getFunctionType());
5492 for (unsigned i = 0; i < CalleeFTy->getNumParams(); i++) {
alan-baker5b86ed72019-02-15 08:26:50 -05005493 auto *operand = Call->getOperand(i);
alan-bakerd4d50652019-12-03 17:17:15 -05005494 auto *operand_type = operand->getType();
5495 // Images and samplers can be passed as function parameters without
5496 // variable pointers.
5497 if (operand_type->isPointerTy() && !IsImageType(operand_type) &&
5498 !IsSamplerType(operand_type)) {
alan-baker5b86ed72019-02-15 08:26:50 -05005499 auto sc =
5500 GetStorageClass(operand->getType()->getPointerAddressSpace());
5501 if (sc == spv::StorageClassStorageBuffer) {
5502 // Passing SSBO by reference requires variable pointers storage
5503 // buffer.
5504 setVariablePointersStorageBuffer(true);
5505 } else if (sc == spv::StorageClassWorkgroup) {
5506 // Workgroup references require variable pointers if they are not
5507 // memory object declarations.
5508 if (auto *operand_call = dyn_cast<CallInst>(operand)) {
5509 // Workgroup accessor represents a variable reference.
5510 if (!operand_call->getCalledFunction()->getName().startswith(
5511 clspv::WorkgroupAccessorFunction()))
5512 setVariablePointers(true);
5513 } else {
5514 // Arguments are function parameters.
5515 if (!isa<Argument>(operand))
5516 setVariablePointers(true);
5517 }
5518 }
5519 }
5520 Ops << MkId(VMap[operand]);
David Neto22f144c2017-06-12 14:26:21 -04005521 }
5522
David Neto87846742018-04-11 17:36:22 -04005523 auto *CallInst = new SPIRVInstruction(spv::OpFunctionCall,
5524 std::get<2>(*DeferredInst), Ops);
David Neto22f144c2017-06-12 14:26:21 -04005525 SPIRVInstList.insert(InsertPoint, CallInst);
5526 }
5527 }
5528 }
5529}
5530
SJW77b87ad2020-04-21 14:37:52 -05005531void SPIRVProducerPass::HandleDeferredDecorations() {
5532 const auto &DL = module->getDataLayout();
Alan Baker202c8c72018-08-13 13:47:44 -04005533 if (getTypesNeedingArrayStride().empty() && LocalArgSpecIds.empty()) {
David Neto1a1a0582017-07-07 12:01:44 -04005534 return;
David Netoc6f3ab22018-04-06 18:02:31 -04005535 }
David Neto1a1a0582017-07-07 12:01:44 -04005536
SJW69939d52020-04-16 07:29:07 -05005537 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList(kAnnotations);
David Neto1a1a0582017-07-07 12:01:44 -04005538
David Netoc6f3ab22018-04-06 18:02:31 -04005539 // Insert ArrayStride decorations on pointer types, due to OpPtrAccessChain
5540 // instructions we generated earlier.
David Neto85082642018-03-24 06:55:20 -07005541 for (auto *type : getTypesNeedingArrayStride()) {
5542 Type *elemTy = nullptr;
5543 if (auto *ptrTy = dyn_cast<PointerType>(type)) {
5544 elemTy = ptrTy->getElementType();
alan-bakerb6b09dc2018-11-08 16:59:28 -05005545 } else if (auto *arrayTy = dyn_cast<ArrayType>(type)) {
alan-baker8eb435a2020-04-08 00:42:06 -04005546 elemTy = arrayTy->getElementType();
5547 } else if (auto *vecTy = dyn_cast<VectorType>(type)) {
5548 elemTy = vecTy->getElementType();
David Neto85082642018-03-24 06:55:20 -07005549 } else {
5550 errs() << "Unhandled strided type " << *type << "\n";
5551 llvm_unreachable("Unhandled strided type");
5552 }
David Neto1a1a0582017-07-07 12:01:44 -04005553
5554 // Ops[0] = Target ID
5555 // Ops[1] = Decoration (ArrayStride)
5556 // Ops[2] = Stride number (Literal Number)
5557 SPIRVOperandList Ops;
5558
David Neto85082642018-03-24 06:55:20 -07005559 // Same as DL.getIndexedOffsetInType( elemTy, { 1 } );
Alan Bakerfcda9482018-10-02 17:09:59 -04005560 const uint32_t stride = static_cast<uint32_t>(GetTypeAllocSize(elemTy, DL));
David Neto257c3892018-04-11 13:19:45 -04005561
5562 Ops << MkId(lookupType(type)) << MkNum(spv::DecorationArrayStride)
5563 << MkNum(stride);
David Neto1a1a0582017-07-07 12:01:44 -04005564
David Neto87846742018-04-11 17:36:22 -04005565 auto *DecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
SJW69939d52020-04-16 07:29:07 -05005566 SPIRVInstList.push_back(DecoInst);
David Neto1a1a0582017-07-07 12:01:44 -04005567 }
David Netoc6f3ab22018-04-06 18:02:31 -04005568
5569 // Emit SpecId decorations targeting the array size value.
SJW77b87ad2020-04-21 14:37:52 -05005570 for (auto pair : clspv::GetSpecConstants(module)) {
alan-bakera1be3322020-04-20 12:48:18 -04005571 auto kind = pair.first;
5572 auto spec_id = pair.second;
5573
5574 if (kind != SpecConstant::kLocalMemorySize)
5575 continue;
5576
alan-bakerb6b09dc2018-11-08 16:59:28 -05005577 LocalArgInfo &arg_info = LocalSpecIdInfoMap[spec_id];
David Netoc6f3ab22018-04-06 18:02:31 -04005578 SPIRVOperandList Ops;
5579 Ops << MkId(arg_info.array_size_id) << MkNum(spv::DecorationSpecId)
5580 << MkNum(arg_info.spec_id);
SJW69939d52020-04-16 07:29:07 -05005581 SPIRVInstList.push_back(new SPIRVInstruction(spv::OpDecorate, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04005582 }
David Neto1a1a0582017-07-07 12:01:44 -04005583}
5584
David Neto22f144c2017-06-12 14:26:21 -04005585glsl::ExtInst SPIRVProducerPass::getExtInstEnum(StringRef Name) {
SJW2c317da2020-03-23 07:39:13 -05005586
5587 const auto &fi = Builtins::Lookup(Name);
5588 switch (fi) {
5589 case Builtins::kClamp: {
5590 auto param_type = fi.getParameter(0);
5591 if (param_type.type_id == Type::FloatTyID) {
5592 return glsl::ExtInst::ExtInstFClamp;
5593 }
5594 return param_type.is_signed ? glsl::ExtInst::ExtInstSClamp
5595 : glsl::ExtInst::ExtInstUClamp;
5596 }
5597 case Builtins::kMax: {
5598 auto param_type = fi.getParameter(0);
5599 if (param_type.type_id == Type::FloatTyID) {
5600 return glsl::ExtInst::ExtInstFMax;
5601 }
5602 return param_type.is_signed ? glsl::ExtInst::ExtInstSMax
5603 : glsl::ExtInst::ExtInstUMax;
5604 }
5605 case Builtins::kMin: {
5606 auto param_type = fi.getParameter(0);
5607 if (param_type.type_id == Type::FloatTyID) {
5608 return glsl::ExtInst::ExtInstFMin;
5609 }
5610 return param_type.is_signed ? glsl::ExtInst::ExtInstSMin
5611 : glsl::ExtInst::ExtInstUMin;
5612 }
5613 case Builtins::kAbs:
5614 return glsl::ExtInst::ExtInstSAbs;
5615 case Builtins::kFmax:
5616 return glsl::ExtInst::ExtInstFMax;
5617 case Builtins::kFmin:
5618 return glsl::ExtInst::ExtInstFMin;
5619 case Builtins::kDegrees:
5620 return glsl::ExtInst::ExtInstDegrees;
5621 case Builtins::kRadians:
5622 return glsl::ExtInst::ExtInstRadians;
5623 case Builtins::kMix:
5624 return glsl::ExtInst::ExtInstFMix;
5625 case Builtins::kAcos:
5626 case Builtins::kAcospi:
5627 return glsl::ExtInst::ExtInstAcos;
5628 case Builtins::kAcosh:
5629 return glsl::ExtInst::ExtInstAcosh;
5630 case Builtins::kAsin:
5631 case Builtins::kAsinpi:
5632 return glsl::ExtInst::ExtInstAsin;
5633 case Builtins::kAsinh:
5634 return glsl::ExtInst::ExtInstAsinh;
5635 case Builtins::kAtan:
5636 case Builtins::kAtanpi:
5637 return glsl::ExtInst::ExtInstAtan;
5638 case Builtins::kAtanh:
5639 return glsl::ExtInst::ExtInstAtanh;
5640 case Builtins::kAtan2:
5641 case Builtins::kAtan2pi:
5642 return glsl::ExtInst::ExtInstAtan2;
5643 case Builtins::kCeil:
5644 return glsl::ExtInst::ExtInstCeil;
5645 case Builtins::kSin:
5646 case Builtins::kHalfSin:
5647 case Builtins::kNativeSin:
5648 return glsl::ExtInst::ExtInstSin;
5649 case Builtins::kSinh:
5650 return glsl::ExtInst::ExtInstSinh;
5651 case Builtins::kCos:
5652 case Builtins::kHalfCos:
5653 case Builtins::kNativeCos:
5654 return glsl::ExtInst::ExtInstCos;
5655 case Builtins::kCosh:
5656 return glsl::ExtInst::ExtInstCosh;
5657 case Builtins::kTan:
5658 case Builtins::kHalfTan:
5659 case Builtins::kNativeTan:
5660 return glsl::ExtInst::ExtInstTan;
5661 case Builtins::kTanh:
5662 return glsl::ExtInst::ExtInstTanh;
5663 case Builtins::kExp:
5664 case Builtins::kHalfExp:
5665 case Builtins::kNativeExp:
5666 return glsl::ExtInst::ExtInstExp;
5667 case Builtins::kExp2:
5668 case Builtins::kHalfExp2:
5669 case Builtins::kNativeExp2:
5670 return glsl::ExtInst::ExtInstExp2;
5671 case Builtins::kLog:
5672 case Builtins::kHalfLog:
5673 case Builtins::kNativeLog:
5674 return glsl::ExtInst::ExtInstLog;
5675 case Builtins::kLog2:
5676 case Builtins::kHalfLog2:
5677 case Builtins::kNativeLog2:
5678 return glsl::ExtInst::ExtInstLog2;
5679 case Builtins::kFabs:
5680 return glsl::ExtInst::ExtInstFAbs;
5681 case Builtins::kFma:
5682 return glsl::ExtInst::ExtInstFma;
5683 case Builtins::kFloor:
5684 return glsl::ExtInst::ExtInstFloor;
5685 case Builtins::kLdexp:
5686 return glsl::ExtInst::ExtInstLdexp;
5687 case Builtins::kPow:
5688 case Builtins::kPowr:
5689 case Builtins::kHalfPowr:
5690 case Builtins::kNativePowr:
5691 return glsl::ExtInst::ExtInstPow;
5692 case Builtins::kRound:
5693 return glsl::ExtInst::ExtInstRound;
5694 case Builtins::kSqrt:
5695 case Builtins::kHalfSqrt:
5696 case Builtins::kNativeSqrt:
5697 return glsl::ExtInst::ExtInstSqrt;
5698 case Builtins::kRsqrt:
5699 case Builtins::kHalfRsqrt:
5700 case Builtins::kNativeRsqrt:
5701 return glsl::ExtInst::ExtInstInverseSqrt;
5702 case Builtins::kTrunc:
5703 return glsl::ExtInst::ExtInstTrunc;
5704 case Builtins::kFrexp:
5705 return glsl::ExtInst::ExtInstFrexp;
5706 case Builtins::kFract:
5707 return glsl::ExtInst::ExtInstFract;
5708 case Builtins::kSign:
5709 return glsl::ExtInst::ExtInstFSign;
5710 case Builtins::kLength:
5711 case Builtins::kFastLength:
5712 return glsl::ExtInst::ExtInstLength;
5713 case Builtins::kDistance:
5714 case Builtins::kFastDistance:
5715 return glsl::ExtInst::ExtInstDistance;
5716 case Builtins::kStep:
5717 return glsl::ExtInst::ExtInstStep;
5718 case Builtins::kSmoothstep:
5719 return glsl::ExtInst::ExtInstSmoothStep;
5720 case Builtins::kCross:
5721 return glsl::ExtInst::ExtInstCross;
5722 case Builtins::kNormalize:
5723 case Builtins::kFastNormalize:
5724 return glsl::ExtInst::ExtInstNormalize;
5725 default:
5726 break;
5727 }
5728
David Neto22f144c2017-06-12 14:26:21 -04005729 return StringSwitch<glsl::ExtInst>(Name)
David Neto22f144c2017-06-12 14:26:21 -04005730 .StartsWith("llvm.fmuladd.", glsl::ExtInst::ExtInstFma)
5731 .Case("spirv.unpack.v2f16", glsl::ExtInst::ExtInstUnpackHalf2x16)
5732 .Case("spirv.pack.v2f16", glsl::ExtInst::ExtInstPackHalf2x16)
David Neto3fbb4072017-10-16 11:28:14 -04005733 .Default(kGlslExtInstBad);
5734}
5735
5736glsl::ExtInst SPIRVProducerPass::getIndirectExtInstEnum(StringRef Name) {
SJW2c317da2020-03-23 07:39:13 -05005737 switch (Builtins::Lookup(Name)) {
5738 case Builtins::kClz:
5739 return glsl::ExtInst::ExtInstFindUMsb;
5740 case Builtins::kAcospi:
5741 return glsl::ExtInst::ExtInstAcos;
5742 case Builtins::kAsinpi:
5743 return glsl::ExtInst::ExtInstAsin;
5744 case Builtins::kAtanpi:
5745 return glsl::ExtInst::ExtInstAtan;
5746 case Builtins::kAtan2pi:
5747 return glsl::ExtInst::ExtInstAtan2;
5748 default:
5749 break;
5750 }
5751 return kGlslExtInstBad;
David Neto3fbb4072017-10-16 11:28:14 -04005752}
5753
alan-bakerb6b09dc2018-11-08 16:59:28 -05005754glsl::ExtInst
5755SPIRVProducerPass::getDirectOrIndirectExtInstEnum(StringRef Name) {
David Neto3fbb4072017-10-16 11:28:14 -04005756 auto direct = getExtInstEnum(Name);
5757 if (direct != kGlslExtInstBad)
5758 return direct;
5759 return getIndirectExtInstEnum(Name);
David Neto22f144c2017-06-12 14:26:21 -04005760}
5761
David Neto22f144c2017-06-12 14:26:21 -04005762void SPIRVProducerPass::WriteOneWord(uint32_t Word) {
David Neto0676e6f2017-07-11 18:47:44 -04005763 binaryOut->write(reinterpret_cast<const char *>(&Word), sizeof(uint32_t));
David Neto22f144c2017-06-12 14:26:21 -04005764}
5765
5766void SPIRVProducerPass::WriteResultID(SPIRVInstruction *Inst) {
5767 WriteOneWord(Inst->getResultID());
5768}
5769
5770void SPIRVProducerPass::WriteWordCountAndOpcode(SPIRVInstruction *Inst) {
5771 // High 16 bit : Word Count
5772 // Low 16 bit : Opcode
5773 uint32_t Word = Inst->getOpcode();
David Netoee2660d2018-06-28 16:31:29 -04005774 const uint32_t count = Inst->getWordCount();
5775 if (count > 65535) {
5776 errs() << "Word count limit of 65535 exceeded: " << count << "\n";
5777 llvm_unreachable("Word count too high");
5778 }
David Neto22f144c2017-06-12 14:26:21 -04005779 Word |= Inst->getWordCount() << 16;
5780 WriteOneWord(Word);
5781}
5782
David Netoef5ba2b2019-12-20 08:35:54 -05005783void SPIRVProducerPass::WriteOperand(const std::unique_ptr<SPIRVOperand> &Op) {
David Neto22f144c2017-06-12 14:26:21 -04005784 SPIRVOperandType OpTy = Op->getType();
5785 switch (OpTy) {
5786 default: {
5787 llvm_unreachable("Unsupported SPIRV Operand Type???");
5788 break;
5789 }
5790 case SPIRVOperandType::NUMBERID: {
5791 WriteOneWord(Op->getNumID());
5792 break;
5793 }
5794 case SPIRVOperandType::LITERAL_STRING: {
5795 std::string Str = Op->getLiteralStr();
5796 const char *Data = Str.c_str();
5797 size_t WordSize = Str.size() / 4;
5798 for (unsigned Idx = 0; Idx < WordSize; Idx++) {
5799 WriteOneWord(*reinterpret_cast<const uint32_t *>(&Data[4 * Idx]));
5800 }
5801
5802 uint32_t Remainder = Str.size() % 4;
5803 uint32_t LastWord = 0;
5804 if (Remainder) {
5805 for (unsigned Idx = 0; Idx < Remainder; Idx++) {
5806 LastWord |= Data[4 * WordSize + Idx] << 8 * Idx;
5807 }
5808 }
5809
5810 WriteOneWord(LastWord);
5811 break;
5812 }
5813 case SPIRVOperandType::LITERAL_INTEGER:
5814 case SPIRVOperandType::LITERAL_FLOAT: {
5815 auto LiteralNum = Op->getLiteralNum();
5816 // TODO: Handle LiteranNum carefully.
5817 for (auto Word : LiteralNum) {
5818 WriteOneWord(Word);
5819 }
5820 break;
5821 }
5822 }
5823}
5824
5825void SPIRVProducerPass::WriteSPIRVBinary() {
SJW69939d52020-04-16 07:29:07 -05005826 for (int i = 0; i < kSectionCount; ++i) {
5827 WriteSPIRVBinary(SPIRVSections[i]);
5828 }
5829}
5830
5831void SPIRVProducerPass::WriteSPIRVBinary(SPIRVInstructionList &SPIRVInstList) {
David Neto22f144c2017-06-12 14:26:21 -04005832
5833 for (auto Inst : SPIRVInstList) {
David Netoef5ba2b2019-12-20 08:35:54 -05005834 const auto &Ops = Inst->getOperands();
David Neto22f144c2017-06-12 14:26:21 -04005835 spv::Op Opcode = static_cast<spv::Op>(Inst->getOpcode());
5836
5837 switch (Opcode) {
5838 default: {
David Neto5c22a252018-03-15 16:07:41 -04005839 errs() << "Unsupported SPIR-V instruction opcode " << int(Opcode) << "\n";
David Neto22f144c2017-06-12 14:26:21 -04005840 llvm_unreachable("Unsupported SPIRV instruction");
5841 break;
5842 }
5843 case spv::OpCapability:
5844 case spv::OpExtension:
5845 case spv::OpMemoryModel:
5846 case spv::OpEntryPoint:
5847 case spv::OpExecutionMode:
5848 case spv::OpSource:
5849 case spv::OpDecorate:
5850 case spv::OpMemberDecorate:
5851 case spv::OpBranch:
5852 case spv::OpBranchConditional:
5853 case spv::OpSelectionMerge:
5854 case spv::OpLoopMerge:
5855 case spv::OpStore:
5856 case spv::OpImageWrite:
5857 case spv::OpReturnValue:
5858 case spv::OpControlBarrier:
5859 case spv::OpMemoryBarrier:
5860 case spv::OpReturn:
5861 case spv::OpFunctionEnd:
5862 case spv::OpCopyMemory: {
5863 WriteWordCountAndOpcode(Inst);
5864 for (uint32_t i = 0; i < Ops.size(); i++) {
5865 WriteOperand(Ops[i]);
5866 }
5867 break;
5868 }
5869 case spv::OpTypeBool:
5870 case spv::OpTypeVoid:
5871 case spv::OpTypeSampler:
5872 case spv::OpLabel:
5873 case spv::OpExtInstImport:
5874 case spv::OpTypePointer:
5875 case spv::OpTypeRuntimeArray:
5876 case spv::OpTypeStruct:
5877 case spv::OpTypeImage:
5878 case spv::OpTypeSampledImage:
5879 case spv::OpTypeInt:
5880 case spv::OpTypeFloat:
5881 case spv::OpTypeArray:
5882 case spv::OpTypeVector:
5883 case spv::OpTypeFunction: {
5884 WriteWordCountAndOpcode(Inst);
5885 WriteResultID(Inst);
5886 for (uint32_t i = 0; i < Ops.size(); i++) {
5887 WriteOperand(Ops[i]);
5888 }
5889 break;
5890 }
5891 case spv::OpFunction:
5892 case spv::OpFunctionParameter:
5893 case spv::OpAccessChain:
5894 case spv::OpPtrAccessChain:
5895 case spv::OpInBoundsAccessChain:
5896 case spv::OpUConvert:
5897 case spv::OpSConvert:
5898 case spv::OpConvertFToU:
5899 case spv::OpConvertFToS:
5900 case spv::OpConvertUToF:
5901 case spv::OpConvertSToF:
5902 case spv::OpFConvert:
5903 case spv::OpConvertPtrToU:
5904 case spv::OpConvertUToPtr:
5905 case spv::OpBitcast:
alan-bakerc9c55ae2019-12-02 16:01:27 -05005906 case spv::OpFNegate:
David Neto22f144c2017-06-12 14:26:21 -04005907 case spv::OpIAdd:
5908 case spv::OpFAdd:
5909 case spv::OpISub:
5910 case spv::OpFSub:
5911 case spv::OpIMul:
5912 case spv::OpFMul:
5913 case spv::OpUDiv:
5914 case spv::OpSDiv:
5915 case spv::OpFDiv:
5916 case spv::OpUMod:
5917 case spv::OpSRem:
5918 case spv::OpFRem:
Kévin Petit8a560882019-03-21 15:24:34 +00005919 case spv::OpUMulExtended:
5920 case spv::OpSMulExtended:
David Neto22f144c2017-06-12 14:26:21 -04005921 case spv::OpBitwiseOr:
5922 case spv::OpBitwiseXor:
5923 case spv::OpBitwiseAnd:
David Netoa394f392017-08-26 20:45:29 -04005924 case spv::OpNot:
David Neto22f144c2017-06-12 14:26:21 -04005925 case spv::OpShiftLeftLogical:
5926 case spv::OpShiftRightLogical:
5927 case spv::OpShiftRightArithmetic:
5928 case spv::OpBitCount:
David Netoab03f432017-11-03 17:00:44 -04005929 case spv::OpCompositeConstruct:
David Neto22f144c2017-06-12 14:26:21 -04005930 case spv::OpCompositeExtract:
5931 case spv::OpVectorExtractDynamic:
5932 case spv::OpCompositeInsert:
David Neto0a2f98d2017-09-15 19:38:40 -04005933 case spv::OpCopyObject:
David Neto22f144c2017-06-12 14:26:21 -04005934 case spv::OpVectorInsertDynamic:
5935 case spv::OpVectorShuffle:
5936 case spv::OpIEqual:
5937 case spv::OpINotEqual:
5938 case spv::OpUGreaterThan:
5939 case spv::OpUGreaterThanEqual:
5940 case spv::OpULessThan:
5941 case spv::OpULessThanEqual:
5942 case spv::OpSGreaterThan:
5943 case spv::OpSGreaterThanEqual:
5944 case spv::OpSLessThan:
5945 case spv::OpSLessThanEqual:
5946 case spv::OpFOrdEqual:
5947 case spv::OpFOrdGreaterThan:
5948 case spv::OpFOrdGreaterThanEqual:
5949 case spv::OpFOrdLessThan:
5950 case spv::OpFOrdLessThanEqual:
5951 case spv::OpFOrdNotEqual:
5952 case spv::OpFUnordEqual:
5953 case spv::OpFUnordGreaterThan:
5954 case spv::OpFUnordGreaterThanEqual:
5955 case spv::OpFUnordLessThan:
5956 case spv::OpFUnordLessThanEqual:
5957 case spv::OpFUnordNotEqual:
5958 case spv::OpExtInst:
5959 case spv::OpIsInf:
5960 case spv::OpIsNan:
5961 case spv::OpAny:
5962 case spv::OpAll:
5963 case spv::OpUndef:
5964 case spv::OpConstantNull:
5965 case spv::OpLogicalOr:
5966 case spv::OpLogicalAnd:
5967 case spv::OpLogicalNot:
5968 case spv::OpLogicalNotEqual:
5969 case spv::OpConstantComposite:
5970 case spv::OpSpecConstantComposite:
5971 case spv::OpConstantTrue:
5972 case spv::OpConstantFalse:
5973 case spv::OpConstant:
5974 case spv::OpSpecConstant:
5975 case spv::OpVariable:
5976 case spv::OpFunctionCall:
5977 case spv::OpSampledImage:
alan-baker75090e42020-02-20 11:21:04 -05005978 case spv::OpImageFetch:
David Neto22f144c2017-06-12 14:26:21 -04005979 case spv::OpImageSampleExplicitLod:
David Neto5c22a252018-03-15 16:07:41 -04005980 case spv::OpImageQuerySize:
alan-bakerce179f12019-12-06 19:02:22 -05005981 case spv::OpImageQuerySizeLod:
David Neto22f144c2017-06-12 14:26:21 -04005982 case spv::OpSelect:
5983 case spv::OpPhi:
5984 case spv::OpLoad:
5985 case spv::OpAtomicIAdd:
5986 case spv::OpAtomicISub:
5987 case spv::OpAtomicExchange:
5988 case spv::OpAtomicIIncrement:
5989 case spv::OpAtomicIDecrement:
5990 case spv::OpAtomicCompareExchange:
5991 case spv::OpAtomicUMin:
5992 case spv::OpAtomicSMin:
5993 case spv::OpAtomicUMax:
5994 case spv::OpAtomicSMax:
5995 case spv::OpAtomicAnd:
5996 case spv::OpAtomicOr:
5997 case spv::OpAtomicXor:
5998 case spv::OpDot: {
5999 WriteWordCountAndOpcode(Inst);
6000 WriteOperand(Ops[0]);
6001 WriteResultID(Inst);
6002 for (uint32_t i = 1; i < Ops.size(); i++) {
6003 WriteOperand(Ops[i]);
6004 }
6005 break;
6006 }
6007 }
6008 }
6009}
Alan Baker9bf93fb2018-08-28 16:59:26 -04006010
alan-bakerb6b09dc2018-11-08 16:59:28 -05006011bool SPIRVProducerPass::IsTypeNullable(const Type *type) const {
Alan Baker9bf93fb2018-08-28 16:59:26 -04006012 switch (type->getTypeID()) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05006013 case Type::HalfTyID:
6014 case Type::FloatTyID:
6015 case Type::DoubleTyID:
6016 case Type::IntegerTyID:
James Price59a1c752020-04-23 23:06:16 -04006017 case Type::FixedVectorTyID:
alan-bakerb6b09dc2018-11-08 16:59:28 -05006018 return true;
6019 case Type::PointerTyID: {
6020 const PointerType *pointer_type = cast<PointerType>(type);
6021 if (pointer_type->getPointerAddressSpace() !=
6022 AddressSpace::UniformConstant) {
6023 auto pointee_type = pointer_type->getPointerElementType();
6024 if (pointee_type->isStructTy() &&
6025 cast<StructType>(pointee_type)->isOpaque()) {
6026 // Images and samplers are not nullable.
6027 return false;
Alan Baker9bf93fb2018-08-28 16:59:26 -04006028 }
Alan Baker9bf93fb2018-08-28 16:59:26 -04006029 }
alan-bakerb6b09dc2018-11-08 16:59:28 -05006030 return true;
6031 }
6032 case Type::ArrayTyID:
alan-baker8eb435a2020-04-08 00:42:06 -04006033 return IsTypeNullable(type->getArrayElementType());
alan-bakerb6b09dc2018-11-08 16:59:28 -05006034 case Type::StructTyID: {
6035 const StructType *struct_type = cast<StructType>(type);
6036 // Images and samplers are not nullable.
6037 if (struct_type->isOpaque())
Alan Baker9bf93fb2018-08-28 16:59:26 -04006038 return false;
alan-bakerb6b09dc2018-11-08 16:59:28 -05006039 for (const auto element : struct_type->elements()) {
6040 if (!IsTypeNullable(element))
6041 return false;
6042 }
6043 return true;
6044 }
6045 default:
6046 return false;
Alan Baker9bf93fb2018-08-28 16:59:26 -04006047 }
6048}
Alan Bakerfcda9482018-10-02 17:09:59 -04006049
SJW77b87ad2020-04-21 14:37:52 -05006050void SPIRVProducerPass::PopulateUBOTypeMaps() {
Alan Bakerfcda9482018-10-02 17:09:59 -04006051 if (auto *offsets_md =
SJW77b87ad2020-04-21 14:37:52 -05006052 module->getNamedMetadata(clspv::RemappedTypeOffsetMetadataName())) {
Alan Bakerfcda9482018-10-02 17:09:59 -04006053 // Metdata is stored as key-value pair operands. The first element of each
6054 // operand is the type and the second is a vector of offsets.
6055 for (const auto *operand : offsets_md->operands()) {
6056 const auto *pair = cast<MDTuple>(operand);
6057 auto *type =
6058 cast<ConstantAsMetadata>(pair->getOperand(0))->getValue()->getType();
6059 const auto *offset_vector = cast<MDTuple>(pair->getOperand(1));
6060 std::vector<uint32_t> offsets;
6061 for (const Metadata *offset_md : offset_vector->operands()) {
6062 const auto *constant_md = cast<ConstantAsMetadata>(offset_md);
alan-bakerb6b09dc2018-11-08 16:59:28 -05006063 offsets.push_back(static_cast<uint32_t>(
6064 cast<ConstantInt>(constant_md->getValue())->getZExtValue()));
Alan Bakerfcda9482018-10-02 17:09:59 -04006065 }
6066 RemappedUBOTypeOffsets.insert(std::make_pair(type, offsets));
6067 }
6068 }
6069
6070 if (auto *sizes_md =
SJW77b87ad2020-04-21 14:37:52 -05006071 module->getNamedMetadata(clspv::RemappedTypeSizesMetadataName())) {
Alan Bakerfcda9482018-10-02 17:09:59 -04006072 // Metadata is stored as key-value pair operands. The first element of each
6073 // operand is the type and the second is a triple of sizes: type size in
6074 // bits, store size and alloc size.
6075 for (const auto *operand : sizes_md->operands()) {
6076 const auto *pair = cast<MDTuple>(operand);
6077 auto *type =
6078 cast<ConstantAsMetadata>(pair->getOperand(0))->getValue()->getType();
6079 const auto *size_triple = cast<MDTuple>(pair->getOperand(1));
6080 uint64_t type_size_in_bits =
6081 cast<ConstantInt>(
6082 cast<ConstantAsMetadata>(size_triple->getOperand(0))->getValue())
6083 ->getZExtValue();
6084 uint64_t type_store_size =
6085 cast<ConstantInt>(
6086 cast<ConstantAsMetadata>(size_triple->getOperand(1))->getValue())
6087 ->getZExtValue();
6088 uint64_t type_alloc_size =
6089 cast<ConstantInt>(
6090 cast<ConstantAsMetadata>(size_triple->getOperand(2))->getValue())
6091 ->getZExtValue();
6092 RemappedUBOTypeSizes.insert(std::make_pair(
6093 type, std::make_tuple(type_size_in_bits, type_store_size,
6094 type_alloc_size)));
6095 }
6096 }
6097}
6098
6099uint64_t SPIRVProducerPass::GetTypeSizeInBits(Type *type,
6100 const DataLayout &DL) {
6101 auto iter = RemappedUBOTypeSizes.find(type);
6102 if (iter != RemappedUBOTypeSizes.end()) {
6103 return std::get<0>(iter->second);
6104 }
6105
6106 return DL.getTypeSizeInBits(type);
6107}
6108
6109uint64_t SPIRVProducerPass::GetTypeStoreSize(Type *type, const DataLayout &DL) {
6110 auto iter = RemappedUBOTypeSizes.find(type);
6111 if (iter != RemappedUBOTypeSizes.end()) {
6112 return std::get<1>(iter->second);
6113 }
6114
6115 return DL.getTypeStoreSize(type);
6116}
6117
6118uint64_t SPIRVProducerPass::GetTypeAllocSize(Type *type, const DataLayout &DL) {
6119 auto iter = RemappedUBOTypeSizes.find(type);
6120 if (iter != RemappedUBOTypeSizes.end()) {
6121 return std::get<2>(iter->second);
6122 }
6123
6124 return DL.getTypeAllocSize(type);
6125}
alan-baker5b86ed72019-02-15 08:26:50 -05006126
Kévin Petitbbbda972020-03-03 19:16:31 +00006127uint32_t SPIRVProducerPass::GetExplicitLayoutStructMemberOffset(
6128 StructType *type, unsigned member, const DataLayout &DL) {
6129 const auto StructLayout = DL.getStructLayout(type);
6130 // Search for the correct offsets if this type was remapped.
6131 std::vector<uint32_t> *offsets = nullptr;
6132 auto iter = RemappedUBOTypeOffsets.find(type);
6133 if (iter != RemappedUBOTypeOffsets.end()) {
6134 offsets = &iter->second;
6135 }
6136 auto ByteOffset =
6137 static_cast<uint32_t>(StructLayout->getElementOffset(member));
6138 if (offsets) {
6139 ByteOffset = (*offsets)[member];
6140 }
6141
6142 return ByteOffset;
6143}
6144
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04006145void SPIRVProducerPass::setVariablePointersCapabilities(
6146 unsigned address_space) {
alan-baker5b86ed72019-02-15 08:26:50 -05006147 if (GetStorageClass(address_space) == spv::StorageClassStorageBuffer) {
6148 setVariablePointersStorageBuffer(true);
6149 } else {
6150 setVariablePointers(true);
6151 }
6152}
6153
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04006154Value *SPIRVProducerPass::GetBasePointer(Value *v) {
alan-baker5b86ed72019-02-15 08:26:50 -05006155 if (auto *gep = dyn_cast<GetElementPtrInst>(v)) {
6156 return GetBasePointer(gep->getPointerOperand());
6157 }
6158
6159 // Conservatively return |v|.
6160 return v;
6161}
6162
6163bool SPIRVProducerPass::sameResource(Value *lhs, Value *rhs) const {
6164 if (auto *lhs_call = dyn_cast<CallInst>(lhs)) {
6165 if (auto *rhs_call = dyn_cast<CallInst>(rhs)) {
6166 if (lhs_call->getCalledFunction()->getName().startswith(
6167 clspv::ResourceAccessorFunction()) &&
6168 rhs_call->getCalledFunction()->getName().startswith(
6169 clspv::ResourceAccessorFunction())) {
6170 // For resource accessors, match descriptor set and binding.
6171 if (lhs_call->getOperand(0) == rhs_call->getOperand(0) &&
6172 lhs_call->getOperand(1) == rhs_call->getOperand(1))
6173 return true;
6174 } else if (lhs_call->getCalledFunction()->getName().startswith(
6175 clspv::WorkgroupAccessorFunction()) &&
6176 rhs_call->getCalledFunction()->getName().startswith(
6177 clspv::WorkgroupAccessorFunction())) {
6178 // For workgroup resources, match spec id.
6179 if (lhs_call->getOperand(0) == rhs_call->getOperand(0))
6180 return true;
6181 }
6182 }
6183 }
6184
6185 return false;
6186}
6187
6188bool SPIRVProducerPass::selectFromSameObject(Instruction *inst) {
6189 assert(inst->getType()->isPointerTy());
6190 assert(GetStorageClass(inst->getType()->getPointerAddressSpace()) ==
6191 spv::StorageClassStorageBuffer);
6192 const bool hack_undef = clspv::Option::HackUndef();
6193 if (auto *select = dyn_cast<SelectInst>(inst)) {
6194 auto *true_base = GetBasePointer(select->getTrueValue());
6195 auto *false_base = GetBasePointer(select->getFalseValue());
6196
6197 if (true_base == false_base)
6198 return true;
6199
6200 // If either the true or false operand is a null, then we satisfy the same
6201 // object constraint.
6202 if (auto *true_cst = dyn_cast<Constant>(true_base)) {
6203 if (true_cst->isNullValue() || (hack_undef && isa<UndefValue>(true_base)))
6204 return true;
6205 }
6206
6207 if (auto *false_cst = dyn_cast<Constant>(false_base)) {
6208 if (false_cst->isNullValue() ||
6209 (hack_undef && isa<UndefValue>(false_base)))
6210 return true;
6211 }
6212
6213 if (sameResource(true_base, false_base))
6214 return true;
6215 } else if (auto *phi = dyn_cast<PHINode>(inst)) {
6216 Value *value = nullptr;
6217 bool ok = true;
6218 for (unsigned i = 0; ok && i != phi->getNumIncomingValues(); ++i) {
6219 auto *base = GetBasePointer(phi->getIncomingValue(i));
6220 // Null values satisfy the constraint of selecting of selecting from the
6221 // same object.
6222 if (!value) {
6223 if (auto *cst = dyn_cast<Constant>(base)) {
6224 if (!cst->isNullValue() && !(hack_undef && isa<UndefValue>(base)))
6225 value = base;
6226 } else {
6227 value = base;
6228 }
6229 } else if (base != value) {
6230 if (auto *base_cst = dyn_cast<Constant>(base)) {
6231 if (base_cst->isNullValue() || (hack_undef && isa<UndefValue>(base)))
6232 continue;
6233 }
6234
6235 if (sameResource(value, base))
6236 continue;
6237
6238 // Values don't represent the same base.
6239 ok = false;
6240 }
6241 }
6242
6243 return ok;
6244 }
6245
6246 // Conservatively return false.
6247 return false;
6248}
alan-bakere9308012019-03-15 10:25:13 -04006249
6250bool SPIRVProducerPass::CalledWithCoherentResource(Argument &Arg) {
6251 if (!Arg.getType()->isPointerTy() ||
6252 Arg.getType()->getPointerAddressSpace() != clspv::AddressSpace::Global) {
6253 // Only SSBOs need to be annotated as coherent.
6254 return false;
6255 }
6256
6257 DenseSet<Value *> visited;
6258 std::vector<Value *> stack;
6259 for (auto *U : Arg.getParent()->users()) {
6260 if (auto *call = dyn_cast<CallInst>(U)) {
6261 stack.push_back(call->getOperand(Arg.getArgNo()));
6262 }
6263 }
6264
6265 while (!stack.empty()) {
6266 Value *v = stack.back();
6267 stack.pop_back();
6268
6269 if (!visited.insert(v).second)
6270 continue;
6271
6272 auto *resource_call = dyn_cast<CallInst>(v);
6273 if (resource_call &&
6274 resource_call->getCalledFunction()->getName().startswith(
6275 clspv::ResourceAccessorFunction())) {
6276 // If this is a resource accessor function, check if the coherent operand
6277 // is set.
6278 const auto coherent =
6279 unsigned(dyn_cast<ConstantInt>(resource_call->getArgOperand(5))
6280 ->getZExtValue());
6281 if (coherent == 1)
6282 return true;
6283 } else if (auto *arg = dyn_cast<Argument>(v)) {
6284 // If this is a function argument, trace through its callers.
alan-bakere98f3f92019-04-08 15:06:36 -04006285 for (auto U : arg->getParent()->users()) {
alan-bakere9308012019-03-15 10:25:13 -04006286 if (auto *call = dyn_cast<CallInst>(U)) {
6287 stack.push_back(call->getOperand(arg->getArgNo()));
6288 }
6289 }
6290 } else if (auto *user = dyn_cast<User>(v)) {
6291 // If this is a user, traverse all operands that could lead to resource
6292 // variables.
6293 for (unsigned i = 0; i != user->getNumOperands(); ++i) {
6294 Value *operand = user->getOperand(i);
6295 if (operand->getType()->isPointerTy() &&
6296 operand->getType()->getPointerAddressSpace() ==
6297 clspv::AddressSpace::Global) {
6298 stack.push_back(operand);
6299 }
6300 }
6301 }
6302 }
6303
6304 // No coherent resource variables encountered.
6305 return false;
6306}
alan-baker06cad652019-12-03 17:56:47 -05006307
SJW77b87ad2020-04-21 14:37:52 -05006308void SPIRVProducerPass::PopulateStructuredCFGMaps() {
alan-baker06cad652019-12-03 17:56:47 -05006309 // First, track loop merges and continues.
6310 DenseSet<BasicBlock *> LoopMergesAndContinues;
SJW77b87ad2020-04-21 14:37:52 -05006311 for (auto &F : *module) {
alan-baker06cad652019-12-03 17:56:47 -05006312 if (F.isDeclaration())
6313 continue;
6314
6315 DominatorTree &DT = getAnalysis<DominatorTreeWrapperPass>(F).getDomTree();
6316 const LoopInfo &LI = getAnalysis<LoopInfoWrapperPass>(F).getLoopInfo();
6317 std::deque<BasicBlock *> order;
6318 DenseSet<BasicBlock *> visited;
6319 clspv::ComputeStructuredOrder(&*F.begin(), &DT, LI, &order, &visited);
6320
6321 for (auto BB : order) {
6322 auto terminator = BB->getTerminator();
6323 auto branch = dyn_cast<BranchInst>(terminator);
6324 if (LI.isLoopHeader(BB)) {
6325 auto L = LI.getLoopFor(BB);
6326 BasicBlock *ContinueBB = nullptr;
6327 BasicBlock *MergeBB = nullptr;
6328
6329 MergeBB = L->getExitBlock();
6330 if (!MergeBB) {
6331 // StructurizeCFG pass converts CFG into triangle shape and the cfg
6332 // has regions with single entry/exit. As a result, loop should not
6333 // have multiple exits.
6334 llvm_unreachable("Loop has multiple exits???");
6335 }
6336
6337 if (L->isLoopLatch(BB)) {
6338 ContinueBB = BB;
6339 } else {
6340 // From SPIR-V spec 2.11, Continue Target must dominate that back-edge
6341 // block.
6342 BasicBlock *Header = L->getHeader();
6343 BasicBlock *Latch = L->getLoopLatch();
6344 for (auto *loop_block : L->blocks()) {
6345 if (loop_block == Header) {
6346 continue;
6347 }
6348
6349 // Check whether block dominates block with back-edge.
6350 // The loop latch is the single block with a back-edge. If it was
6351 // possible, StructurizeCFG made the loop conform to this
6352 // requirement, otherwise |Latch| is a nullptr.
6353 if (DT.dominates(loop_block, Latch)) {
6354 ContinueBB = loop_block;
6355 }
6356 }
6357
6358 if (!ContinueBB) {
6359 llvm_unreachable("Wrong continue block from loop");
6360 }
6361 }
6362
6363 // Record the continue and merge blocks.
6364 MergeBlocks[BB] = MergeBB;
6365 ContinueBlocks[BB] = ContinueBB;
6366 LoopMergesAndContinues.insert(MergeBB);
6367 LoopMergesAndContinues.insert(ContinueBB);
6368 } else if (branch && branch->isConditional()) {
6369 auto L = LI.getLoopFor(BB);
6370 bool HasBackedge = false;
6371 while (L && !HasBackedge) {
6372 if (L->isLoopLatch(BB)) {
6373 HasBackedge = true;
6374 }
6375 L = L->getParentLoop();
6376 }
6377
6378 if (!HasBackedge) {
6379 // Only need a merge if the branch doesn't include a loop break or
6380 // continue.
6381 auto true_bb = branch->getSuccessor(0);
6382 auto false_bb = branch->getSuccessor(1);
6383 if (!LoopMergesAndContinues.count(true_bb) &&
6384 !LoopMergesAndContinues.count(false_bb)) {
6385 // StructurizeCFG pass already manipulated CFG. Just use false block
6386 // of branch instruction as merge block.
6387 MergeBlocks[BB] = false_bb;
6388 }
6389 }
6390 }
6391 }
6392 }
6393}