blob: c4401a6d0a5f12c9f15ae3c83807d96e3786e03f [file] [log] [blame]
David Neto22f144c2017-06-12 14:26:21 -04001// Copyright 2017 The Clspv Authors. All rights reserved.
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7// http://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
15#ifdef _MSC_VER
16#pragma warning(push, 0)
17#endif
18
David Neto156783e2017-07-05 15:39:41 -040019#include <cassert>
David Neto257c3892018-04-11 13:19:45 -040020#include <cstring>
David Neto118188e2018-08-24 11:27:54 -040021#include <iomanip>
22#include <list>
David Neto862b7d82018-06-14 18:48:37 -040023#include <memory>
David Neto118188e2018-08-24 11:27:54 -040024#include <set>
25#include <sstream>
26#include <string>
27#include <tuple>
28#include <unordered_set>
29#include <utility>
David Neto862b7d82018-06-14 18:48:37 -040030
David Neto118188e2018-08-24 11:27:54 -040031#include "llvm/ADT/StringSwitch.h"
32#include "llvm/ADT/UniqueVector.h"
33#include "llvm/Analysis/LoopInfo.h"
34#include "llvm/IR/Constants.h"
35#include "llvm/IR/Dominators.h"
36#include "llvm/IR/Instructions.h"
37#include "llvm/IR/Metadata.h"
38#include "llvm/IR/Module.h"
alan-bakerf67468c2019-11-25 15:51:49 -050039#include "llvm/IR/ValueSymbolTable.h"
David Neto118188e2018-08-24 11:27:54 -040040#include "llvm/Pass.h"
41#include "llvm/Support/CommandLine.h"
Kévin Petitbbbda972020-03-03 19:16:31 +000042#include "llvm/Support/MathExtras.h"
David Neto118188e2018-08-24 11:27:54 -040043#include "llvm/Support/raw_ostream.h"
44#include "llvm/Transforms/Utils/Cloning.h"
David Neto22f144c2017-06-12 14:26:21 -040045
alan-bakere0902602020-03-23 08:43:40 -040046#include "spirv/unified1/spirv.hpp"
David Neto118188e2018-08-24 11:27:54 -040047
David Neto85082642018-03-24 06:55:20 -070048#include "clspv/AddressSpace.h"
alan-bakerf5e5f692018-11-27 08:33:24 -050049#include "clspv/DescriptorMap.h"
David Neto118188e2018-08-24 11:27:54 -040050#include "clspv/Option.h"
David Neto85082642018-03-24 06:55:20 -070051#include "clspv/spirv_c_strings.hpp"
52#include "clspv/spirv_glsl.hpp"
David Neto22f144c2017-06-12 14:26:21 -040053
David Neto4feb7a42017-10-06 17:29:42 -040054#include "ArgKind.h"
alan-bakerf67468c2019-11-25 15:51:49 -050055#include "Builtins.h"
alan-baker06cad652019-12-03 17:56:47 -050056#include "ComputeStructuredOrder.h"
David Neto85082642018-03-24 06:55:20 -070057#include "ConstantEmitter.h"
Alan Baker202c8c72018-08-13 13:47:44 -040058#include "Constants.h"
David Neto78383442018-06-15 20:31:56 -040059#include "DescriptorCounter.h"
alan-baker56f7aff2019-05-22 08:06:42 -040060#include "NormalizeGlobalVariable.h"
Diego Novilloa4c44fa2019-04-11 10:56:15 -040061#include "Passes.h"
alan-bakerce179f12019-12-06 19:02:22 -050062#include "Types.h"
David Neto48f56a42017-10-06 16:44:25 -040063
David Neto22f144c2017-06-12 14:26:21 -040064#if defined(_MSC_VER)
65#pragma warning(pop)
66#endif
67
68using namespace llvm;
69using namespace clspv;
SJW173c7e92020-03-16 08:44:47 -050070using namespace clspv::Builtins;
David Neto156783e2017-07-05 15:39:41 -040071using namespace mdconst;
David Neto22f144c2017-06-12 14:26:21 -040072
73namespace {
David Netocd8ca5f2017-10-02 23:34:11 -040074
David Neto862b7d82018-06-14 18:48:37 -040075cl::opt<bool> ShowResourceVars("show-rv", cl::init(false), cl::Hidden,
76 cl::desc("Show resource variable creation"));
77
78// These hacks exist to help transition code generation algorithms
79// without making huge noise in detailed test output.
80const bool Hack_generate_runtime_array_stride_early = true;
81
David Neto3fbb4072017-10-16 11:28:14 -040082// The value of 1/pi. This value is from MSDN
83// https://msdn.microsoft.com/en-us/library/4hwaceh6.aspx
84const double kOneOverPi = 0.318309886183790671538;
85const glsl::ExtInst kGlslExtInstBad = static_cast<glsl::ExtInst>(0);
86
alan-bakerb6b09dc2018-11-08 16:59:28 -050087const char *kCompositeConstructFunctionPrefix = "clspv.composite_construct.";
David Netoab03f432017-11-03 17:00:44 -040088
David Neto22f144c2017-06-12 14:26:21 -040089enum SPIRVOperandType {
90 NUMBERID,
91 LITERAL_INTEGER,
92 LITERAL_STRING,
93 LITERAL_FLOAT
94};
95
96struct SPIRVOperand {
97 explicit SPIRVOperand(SPIRVOperandType Ty, uint32_t Num)
98 : Type(Ty), LiteralNum(1, Num) {}
99 explicit SPIRVOperand(SPIRVOperandType Ty, const char *Str)
100 : Type(Ty), LiteralStr(Str) {}
101 explicit SPIRVOperand(SPIRVOperandType Ty, StringRef Str)
102 : Type(Ty), LiteralStr(Str) {}
103 explicit SPIRVOperand(SPIRVOperandType Ty, ArrayRef<uint32_t> NumVec)
104 : Type(Ty), LiteralNum(NumVec.begin(), NumVec.end()) {}
105
James Price11010dc2019-12-19 13:53:09 -0500106 SPIRVOperandType getType() const { return Type; };
107 uint32_t getNumID() const { return LiteralNum[0]; };
108 std::string getLiteralStr() const { return LiteralStr; };
109 ArrayRef<uint32_t> getLiteralNum() const { return LiteralNum; };
David Neto22f144c2017-06-12 14:26:21 -0400110
David Neto87846742018-04-11 17:36:22 -0400111 uint32_t GetNumWords() const {
112 switch (Type) {
113 case NUMBERID:
114 return 1;
115 case LITERAL_INTEGER:
116 case LITERAL_FLOAT:
David Netoee2660d2018-06-28 16:31:29 -0400117 return uint32_t(LiteralNum.size());
David Neto87846742018-04-11 17:36:22 -0400118 case LITERAL_STRING:
119 // Account for the terminating null character.
David Netoee2660d2018-06-28 16:31:29 -0400120 return uint32_t((LiteralStr.size() + 4) / 4);
David Neto87846742018-04-11 17:36:22 -0400121 }
122 llvm_unreachable("Unhandled case in SPIRVOperand::GetNumWords()");
123 }
124
David Neto22f144c2017-06-12 14:26:21 -0400125private:
126 SPIRVOperandType Type;
127 std::string LiteralStr;
128 SmallVector<uint32_t, 4> LiteralNum;
129};
130
David Netoc6f3ab22018-04-06 18:02:31 -0400131class SPIRVOperandList {
132public:
David Netoef5ba2b2019-12-20 08:35:54 -0500133 typedef std::unique_ptr<SPIRVOperand> element_type;
134 typedef SmallVector<element_type, 8> container_type;
135 typedef container_type::iterator iterator;
David Netoc6f3ab22018-04-06 18:02:31 -0400136 SPIRVOperandList() {}
alan-bakerb6b09dc2018-11-08 16:59:28 -0500137 SPIRVOperandList(const SPIRVOperandList &other) = delete;
138 SPIRVOperandList(SPIRVOperandList &&other) {
David Netoc6f3ab22018-04-06 18:02:31 -0400139 contents_ = std::move(other.contents_);
140 other.contents_.clear();
141 }
David Netoef5ba2b2019-12-20 08:35:54 -0500142 iterator begin() { return contents_.begin(); }
143 iterator end() { return contents_.end(); }
144 operator ArrayRef<element_type>() { return contents_; }
145 void push_back(element_type op) { contents_.push_back(std::move(op)); }
alan-bakerb6b09dc2018-11-08 16:59:28 -0500146 void clear() { contents_.clear(); }
David Netoc6f3ab22018-04-06 18:02:31 -0400147 size_t size() const { return contents_.size(); }
James Price11010dc2019-12-19 13:53:09 -0500148 const SPIRVOperand *operator[](size_t i) { return contents_[i].get(); }
David Netoc6f3ab22018-04-06 18:02:31 -0400149
David Netoef5ba2b2019-12-20 08:35:54 -0500150 const container_type &getOperands() const { return contents_; }
David Neto87846742018-04-11 17:36:22 -0400151
David Netoc6f3ab22018-04-06 18:02:31 -0400152private:
David Netoef5ba2b2019-12-20 08:35:54 -0500153 container_type contents_;
David Netoc6f3ab22018-04-06 18:02:31 -0400154};
155
James Price11010dc2019-12-19 13:53:09 -0500156SPIRVOperandList &operator<<(SPIRVOperandList &list,
David Netoef5ba2b2019-12-20 08:35:54 -0500157 std::unique_ptr<SPIRVOperand> elem) {
158 list.push_back(std::move(elem));
David Netoc6f3ab22018-04-06 18:02:31 -0400159 return list;
160}
161
David Netoef5ba2b2019-12-20 08:35:54 -0500162std::unique_ptr<SPIRVOperand> MkNum(uint32_t num) {
163 return std::make_unique<SPIRVOperand>(LITERAL_INTEGER, num);
David Netoc6f3ab22018-04-06 18:02:31 -0400164}
David Netoef5ba2b2019-12-20 08:35:54 -0500165std::unique_ptr<SPIRVOperand> MkInteger(ArrayRef<uint32_t> num_vec) {
166 return std::make_unique<SPIRVOperand>(LITERAL_INTEGER, num_vec);
David Neto257c3892018-04-11 13:19:45 -0400167}
David Netoef5ba2b2019-12-20 08:35:54 -0500168std::unique_ptr<SPIRVOperand> MkFloat(ArrayRef<uint32_t> num_vec) {
169 return std::make_unique<SPIRVOperand>(LITERAL_FLOAT, num_vec);
David Neto257c3892018-04-11 13:19:45 -0400170}
David Netoef5ba2b2019-12-20 08:35:54 -0500171std::unique_ptr<SPIRVOperand> MkId(uint32_t id) {
172 return std::make_unique<SPIRVOperand>(NUMBERID, id);
James Price11010dc2019-12-19 13:53:09 -0500173}
David Netoef5ba2b2019-12-20 08:35:54 -0500174std::unique_ptr<SPIRVOperand> MkString(StringRef str) {
175 return std::make_unique<SPIRVOperand>(LITERAL_STRING, str);
David Neto257c3892018-04-11 13:19:45 -0400176}
David Netoc6f3ab22018-04-06 18:02:31 -0400177
David Neto22f144c2017-06-12 14:26:21 -0400178struct SPIRVInstruction {
David Netoef5ba2b2019-12-20 08:35:54 -0500179 // Creates an instruction with an opcode and no result ID, and with the given
180 // operands. This computes its own word count. Takes ownership of the
181 // operands and clears |Ops|.
182 SPIRVInstruction(spv::Op Opc, SPIRVOperandList &Ops)
183 : WordCount(1), Opcode(static_cast<uint16_t>(Opc)), ResultID(0) {
James Price11010dc2019-12-19 13:53:09 -0500184 for (auto &operand : Ops) {
David Netoee2660d2018-06-28 16:31:29 -0400185 WordCount += uint16_t(operand->GetNumWords());
David Neto87846742018-04-11 17:36:22 -0400186 }
David Netoef5ba2b2019-12-20 08:35:54 -0500187 Operands.reserve(Ops.size());
188 for (auto &ptr : Ops) {
189 Operands.emplace_back(std::move(ptr));
190 ptr.reset(nullptr);
David Neto87846742018-04-11 17:36:22 -0400191 }
David Netoef5ba2b2019-12-20 08:35:54 -0500192 Ops.clear();
193 }
194 // Creates an instruction with an opcode and a no-zero result ID, and
195 // with the given operands. This computes its own word count. Takes ownership
196 // of the operands and clears |Ops|.
197 SPIRVInstruction(spv::Op Opc, uint32_t ResID, SPIRVOperandList &Ops)
198 : WordCount(2), Opcode(static_cast<uint16_t>(Opc)), ResultID(ResID) {
James Price11010dc2019-12-19 13:53:09 -0500199 for (auto &operand : Ops) {
David Neto87846742018-04-11 17:36:22 -0400200 WordCount += operand->GetNumWords();
201 }
David Netoef5ba2b2019-12-20 08:35:54 -0500202 Operands.reserve(Ops.size());
203 for (auto &ptr : Ops) {
204 Operands.emplace_back(std::move(ptr));
205 ptr.reset(nullptr);
206 }
207 if (ResID == 0) {
208 llvm_unreachable("Result ID of 0 was provided");
209 }
210 Ops.clear();
David Neto87846742018-04-11 17:36:22 -0400211 }
David Neto22f144c2017-06-12 14:26:21 -0400212
David Netoef5ba2b2019-12-20 08:35:54 -0500213 // Creates an instruction with an opcode and no result ID, and with the single
214 // operand. This computes its own word count.
215 SPIRVInstruction(spv::Op Opc, SPIRVOperandList::element_type operand)
216 : WordCount(1), Opcode(static_cast<uint16_t>(Opc)), ResultID(0) {
217 WordCount += operand->GetNumWords();
218 Operands.emplace_back(std::move(operand));
219 operand.reset(nullptr);
220 }
221 // Creates an instruction with an opcode and a non-zero result ID, and
222 // with the single operand. This computes its own word count.
223 SPIRVInstruction(spv::Op Opc, uint32_t ResID,
224 SPIRVOperandList::element_type operand)
225 : WordCount(2), Opcode(static_cast<uint16_t>(Opc)), ResultID(ResID) {
226 WordCount += operand->GetNumWords();
227 if (ResID == 0) {
228 llvm_unreachable("Result ID of 0 was provided");
229 }
230 Operands.emplace_back(std::move(operand));
231 operand.reset(nullptr);
232 }
233 // Creates an instruction with an opcode and a no-zero result ID, and no
234 // operands.
235 SPIRVInstruction(spv::Op Opc, uint32_t ResID)
236 : WordCount(2), Opcode(static_cast<uint16_t>(Opc)), ResultID(ResID) {
237 if (ResID == 0) {
238 llvm_unreachable("Result ID of 0 was provided");
239 }
240 }
241 // Creates an instruction with an opcode, no result ID, no type ID, and no
242 // operands.
243 SPIRVInstruction(spv::Op Opc)
244 : WordCount(1), Opcode(static_cast<uint16_t>(Opc)), ResultID(0) {}
245
David Netoee2660d2018-06-28 16:31:29 -0400246 uint32_t getWordCount() const { return WordCount; }
David Neto22f144c2017-06-12 14:26:21 -0400247 uint16_t getOpcode() const { return Opcode; }
248 uint32_t getResultID() const { return ResultID; }
David Netoef5ba2b2019-12-20 08:35:54 -0500249 ArrayRef<std::unique_ptr<SPIRVOperand>> getOperands() const {
James Price11010dc2019-12-19 13:53:09 -0500250 return Operands;
251 }
David Neto22f144c2017-06-12 14:26:21 -0400252
253private:
David Netoee2660d2018-06-28 16:31:29 -0400254 uint32_t WordCount; // Check the 16-bit bound at code generation time.
David Neto22f144c2017-06-12 14:26:21 -0400255 uint16_t Opcode;
256 uint32_t ResultID;
David Netoef5ba2b2019-12-20 08:35:54 -0500257 SmallVector<std::unique_ptr<SPIRVOperand>, 4> Operands;
David Neto22f144c2017-06-12 14:26:21 -0400258};
259
260struct SPIRVProducerPass final : public ModulePass {
David Neto22f144c2017-06-12 14:26:21 -0400261 typedef DenseMap<Type *, uint32_t> TypeMapType;
262 typedef UniqueVector<Type *> TypeList;
263 typedef DenseMap<Value *, uint32_t> ValueMapType;
David Netofb9a7972017-08-25 17:08:24 -0400264 typedef UniqueVector<Value *> ValueList;
David Neto22f144c2017-06-12 14:26:21 -0400265 typedef std::vector<std::pair<Value *, uint32_t>> EntryPointVecType;
266 typedef std::list<SPIRVInstruction *> SPIRVInstructionList;
David Neto87846742018-04-11 17:36:22 -0400267 // A vector of tuples, each of which is:
268 // - the LLVM instruction that we will later generate SPIR-V code for
269 // - where the SPIR-V instruction should be inserted
270 // - the result ID of the SPIR-V instruction
David Neto22f144c2017-06-12 14:26:21 -0400271 typedef std::vector<
272 std::tuple<Value *, SPIRVInstructionList::iterator, uint32_t>>
273 DeferredInstVecType;
274 typedef DenseMap<FunctionType *, std::pair<FunctionType *, uint32_t>>
275 GlobalConstFuncMapType;
276
David Neto44795152017-07-13 15:45:28 -0400277 explicit SPIRVProducerPass(
alan-bakerf5e5f692018-11-27 08:33:24 -0500278 raw_pwrite_stream &out,
279 std::vector<clspv::version0::DescriptorMapEntry> *descriptor_map_entries,
alan-baker00e7a582019-06-07 12:54:21 -0400280 ArrayRef<std::pair<unsigned, std::string>> samplerMap,
David Neto44795152017-07-13 15:45:28 -0400281 bool outputCInitList)
David Netoc2c368d2017-06-30 16:50:17 -0400282 : ModulePass(ID), samplerMap(samplerMap), out(out),
David Neto0676e6f2017-07-11 18:47:44 -0400283 binaryTempOut(binaryTempUnderlyingVector), binaryOut(&out),
alan-baker00e7a582019-06-07 12:54:21 -0400284 descriptorMapEntries(descriptor_map_entries),
David Neto0676e6f2017-07-11 18:47:44 -0400285 outputCInitList(outputCInitList), patchBoundOffset(0), nextID(1),
alan-baker5b86ed72019-02-15 08:26:50 -0500286 OpExtInstImportID(0), HasVariablePointersStorageBuffer(false),
287 HasVariablePointers(false), SamplerTy(nullptr), WorkgroupSizeValueID(0),
Kévin Petit89a525c2019-06-15 08:13:07 +0100288 WorkgroupSizeVarID(0), max_local_spec_id_(0) {}
David Neto22f144c2017-06-12 14:26:21 -0400289
James Price11010dc2019-12-19 13:53:09 -0500290 virtual ~SPIRVProducerPass() {
291 for (auto *Inst : SPIRVInsts) {
292 delete Inst;
293 }
294 }
295
David Neto22f144c2017-06-12 14:26:21 -0400296 void getAnalysisUsage(AnalysisUsage &AU) const override {
297 AU.addRequired<DominatorTreeWrapperPass>();
298 AU.addRequired<LoopInfoWrapperPass>();
299 }
300
301 virtual bool runOnModule(Module &module) override;
302
303 // output the SPIR-V header block
304 void outputHeader();
305
306 // patch the SPIR-V header block
307 void patchHeader();
308
309 uint32_t lookupType(Type *Ty) {
310 if (Ty->isPointerTy() &&
311 (Ty->getPointerAddressSpace() != AddressSpace::UniformConstant)) {
312 auto PointeeTy = Ty->getPointerElementType();
313 if (PointeeTy->isStructTy() &&
314 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
315 Ty = PointeeTy;
316 }
317 }
318
David Neto862b7d82018-06-14 18:48:37 -0400319 auto where = TypeMap.find(Ty);
320 if (where == TypeMap.end()) {
321 if (Ty) {
322 errs() << "Unhandled type " << *Ty << "\n";
323 } else {
324 errs() << "Unhandled type (null)\n";
325 }
David Netoe439d702018-03-23 13:14:08 -0700326 llvm_unreachable("\nUnhandled type!");
David Neto22f144c2017-06-12 14:26:21 -0400327 }
328
David Neto862b7d82018-06-14 18:48:37 -0400329 return where->second;
David Neto22f144c2017-06-12 14:26:21 -0400330 }
331 TypeMapType &getImageTypeMap() { return ImageTypeMap; }
alan-bakerabd82722019-12-03 17:14:51 -0500332 TypeList &getImageTypeList() { return ImageTypeList; }
David Neto22f144c2017-06-12 14:26:21 -0400333 TypeList &getTypeList() { return Types; };
334 ValueList &getConstantList() { return Constants; };
335 ValueMapType &getValueMap() { return ValueMap; }
336 ValueMapType &getAllocatedValueMap() { return AllocatedValueMap; }
337 SPIRVInstructionList &getSPIRVInstList() { return SPIRVInsts; };
David Neto22f144c2017-06-12 14:26:21 -0400338 EntryPointVecType &getEntryPointVec() { return EntryPointVec; };
339 DeferredInstVecType &getDeferredInstVec() { return DeferredInstVec; };
340 ValueList &getEntryPointInterfacesVec() { return EntryPointInterfacesVec; };
341 uint32_t &getOpExtInstImportID() { return OpExtInstImportID; };
342 std::vector<uint32_t> &getBuiltinDimVec() { return BuiltinDimensionVec; };
SJW2c317da2020-03-23 07:39:13 -0500343
alan-baker5b86ed72019-02-15 08:26:50 -0500344 bool hasVariablePointersStorageBuffer() {
345 return HasVariablePointersStorageBuffer;
346 }
347 void setVariablePointersStorageBuffer(bool Val) {
348 HasVariablePointersStorageBuffer = Val;
349 }
Diego Novillo3cc8d7a2019-04-10 13:30:34 -0400350 bool hasVariablePointers() { return HasVariablePointers; };
David Neto22f144c2017-06-12 14:26:21 -0400351 void setVariablePointers(bool Val) { HasVariablePointers = Val; };
alan-bakerb6b09dc2018-11-08 16:59:28 -0500352 ArrayRef<std::pair<unsigned, std::string>> &getSamplerMap() {
353 return samplerMap;
354 }
David Neto22f144c2017-06-12 14:26:21 -0400355 GlobalConstFuncMapType &getGlobalConstFuncTypeMap() {
356 return GlobalConstFuncTypeMap;
357 }
358 SmallPtrSet<Value *, 16> &getGlobalConstArgSet() {
359 return GlobalConstArgumentSet;
360 }
alan-bakerb6b09dc2018-11-08 16:59:28 -0500361 TypeList &getTypesNeedingArrayStride() { return TypesNeedingArrayStride; }
David Neto22f144c2017-06-12 14:26:21 -0400362
David Netoc6f3ab22018-04-06 18:02:31 -0400363 void GenerateLLVMIRInfo(Module &M, const DataLayout &DL);
alan-bakerb6b09dc2018-11-08 16:59:28 -0500364 // Populate GlobalConstFuncTypeMap. Also, if module-scope __constant will
365 // *not* be converted to a storage buffer, replace each such global variable
366 // with one in the storage class expecgted by SPIR-V.
David Neto862b7d82018-06-14 18:48:37 -0400367 void FindGlobalConstVars(Module &M, const DataLayout &DL);
368 // Populate ResourceVarInfoList, FunctionToResourceVarsMap, and
369 // ModuleOrderedResourceVars.
370 void FindResourceVars(Module &M, const DataLayout &DL);
Alan Baker202c8c72018-08-13 13:47:44 -0400371 void FindWorkgroupVars(Module &M);
David Neto22f144c2017-06-12 14:26:21 -0400372 bool FindExtInst(Module &M);
373 void FindTypePerGlobalVar(GlobalVariable &GV);
374 void FindTypePerFunc(Function &F);
David Neto862b7d82018-06-14 18:48:37 -0400375 void FindTypesForSamplerMap(Module &M);
376 void FindTypesForResourceVars(Module &M);
alan-bakerb6b09dc2018-11-08 16:59:28 -0500377 // Inserts |Ty| and relevant sub-types into the |Types| member, indicating
378 // that |Ty| and its subtypes will need a corresponding SPIR-V type.
David Neto22f144c2017-06-12 14:26:21 -0400379 void FindType(Type *Ty);
380 void FindConstantPerGlobalVar(GlobalVariable &GV);
381 void FindConstantPerFunc(Function &F);
382 void FindConstant(Value *V);
383 void GenerateExtInstImport();
David Neto19a1bad2017-08-25 15:01:41 -0400384 // Generates instructions for SPIR-V types corresponding to the LLVM types
385 // saved in the |Types| member. A type follows its subtypes. IDs are
386 // allocated sequentially starting with the current value of nextID, and
387 // with a type following its subtypes. Also updates nextID to just beyond
388 // the last generated ID.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500389 void GenerateSPIRVTypes(LLVMContext &context, Module &module);
David Neto22f144c2017-06-12 14:26:21 -0400390 void GenerateSPIRVConstants();
David Neto5c22a252018-03-15 16:07:41 -0400391 void GenerateModuleInfo(Module &M);
Kévin Petitbbbda972020-03-03 19:16:31 +0000392 void GeneratePushConstantDescriptormapEntries(Module &M);
David Neto22f144c2017-06-12 14:26:21 -0400393 void GenerateGlobalVar(GlobalVariable &GV);
David Netoc6f3ab22018-04-06 18:02:31 -0400394 void GenerateWorkgroupVars();
David Neto862b7d82018-06-14 18:48:37 -0400395 // Generate descriptor map entries for resource variables associated with
396 // arguments to F.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500397 void GenerateDescriptorMapInfo(const DataLayout &DL, Function &F);
David Neto22f144c2017-06-12 14:26:21 -0400398 void GenerateSamplers(Module &M);
David Neto862b7d82018-06-14 18:48:37 -0400399 // Generate OpVariables for %clspv.resource.var.* calls.
400 void GenerateResourceVars(Module &M);
David Neto22f144c2017-06-12 14:26:21 -0400401 void GenerateFuncPrologue(Function &F);
402 void GenerateFuncBody(Function &F);
David Netob6e2e062018-04-25 10:32:06 -0400403 void GenerateEntryPointInitialStores();
David Neto22f144c2017-06-12 14:26:21 -0400404 spv::Op GetSPIRVCmpOpcode(CmpInst *CmpI);
405 spv::Op GetSPIRVCastOpcode(Instruction &I);
406 spv::Op GetSPIRVBinaryOpcode(Instruction &I);
407 void GenerateInstruction(Instruction &I);
408 void GenerateFuncEpilogue();
409 void HandleDeferredInstruction();
alan-bakerb6b09dc2018-11-08 16:59:28 -0500410 void HandleDeferredDecorations(const DataLayout &DL);
David Neto22f144c2017-06-12 14:26:21 -0400411 bool is4xi8vec(Type *Ty) const;
412 spv::StorageClass GetStorageClass(unsigned AddrSpace) const;
David Neto862b7d82018-06-14 18:48:37 -0400413 spv::StorageClass GetStorageClassForArgKind(clspv::ArgKind arg_kind) const;
David Neto22f144c2017-06-12 14:26:21 -0400414 spv::BuiltIn GetBuiltin(StringRef globalVarName) const;
David Neto3fbb4072017-10-16 11:28:14 -0400415 // Returns the GLSL extended instruction enum that the given function
416 // call maps to. If none, then returns the 0 value, i.e. GLSLstd4580Bad.
David Neto22f144c2017-06-12 14:26:21 -0400417 glsl::ExtInst getExtInstEnum(StringRef Name);
David Neto3fbb4072017-10-16 11:28:14 -0400418 // Returns the GLSL extended instruction enum indirectly used by the given
419 // function. That is, to implement the given function, we use an extended
420 // instruction plus one more instruction. If none, then returns the 0 value,
421 // i.e. GLSLstd4580Bad.
422 glsl::ExtInst getIndirectExtInstEnum(StringRef Name);
423 // Returns the single GLSL extended instruction used directly or
424 // indirectly by the given function call.
425 glsl::ExtInst getDirectOrIndirectExtInstEnum(StringRef Name);
David Neto22f144c2017-06-12 14:26:21 -0400426 void WriteOneWord(uint32_t Word);
427 void WriteResultID(SPIRVInstruction *Inst);
428 void WriteWordCountAndOpcode(SPIRVInstruction *Inst);
David Netoef5ba2b2019-12-20 08:35:54 -0500429 void WriteOperand(const std::unique_ptr<SPIRVOperand> &Op);
David Neto22f144c2017-06-12 14:26:21 -0400430 void WriteSPIRVBinary();
431
Alan Baker9bf93fb2018-08-28 16:59:26 -0400432 // Returns true if |type| is compatible with OpConstantNull.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500433 bool IsTypeNullable(const Type *type) const;
Alan Baker9bf93fb2018-08-28 16:59:26 -0400434
Alan Bakerfcda9482018-10-02 17:09:59 -0400435 // Populate UBO remapped type maps.
436 void PopulateUBOTypeMaps(Module &module);
437
alan-baker06cad652019-12-03 17:56:47 -0500438 // Populate the merge and continue block maps.
439 void PopulateStructuredCFGMaps(Module &module);
440
Alan Bakerfcda9482018-10-02 17:09:59 -0400441 // Wrapped methods of DataLayout accessors. If |type| was remapped for UBOs,
442 // uses the internal map, otherwise it falls back on the data layout.
443 uint64_t GetTypeSizeInBits(Type *type, const DataLayout &DL);
444 uint64_t GetTypeStoreSize(Type *type, const DataLayout &DL);
445 uint64_t GetTypeAllocSize(Type *type, const DataLayout &DL);
Kévin Petitbbbda972020-03-03 19:16:31 +0000446 uint32_t GetExplicitLayoutStructMemberOffset(StructType *type,
447 unsigned member,
448 const DataLayout &DL);
Alan Bakerfcda9482018-10-02 17:09:59 -0400449
alan-baker5b86ed72019-02-15 08:26:50 -0500450 // Returns the base pointer of |v|.
451 Value *GetBasePointer(Value *v);
452
453 // Sets |HasVariablePointersStorageBuffer| or |HasVariablePointers| base on
454 // |address_space|.
455 void setVariablePointersCapabilities(unsigned address_space);
456
457 // Returns true if |lhs| and |rhs| represent the same resource or workgroup
458 // variable.
459 bool sameResource(Value *lhs, Value *rhs) const;
460
461 // Returns true if |inst| is phi or select that selects from the same
462 // structure (or null).
463 bool selectFromSameObject(Instruction *inst);
464
alan-bakere9308012019-03-15 10:25:13 -0400465 // Returns true if |Arg| is called with a coherent resource.
466 bool CalledWithCoherentResource(Argument &Arg);
467
David Neto22f144c2017-06-12 14:26:21 -0400468private:
469 static char ID;
David Neto44795152017-07-13 15:45:28 -0400470 ArrayRef<std::pair<unsigned, std::string>> samplerMap;
David Neto22f144c2017-06-12 14:26:21 -0400471 raw_pwrite_stream &out;
David Neto0676e6f2017-07-11 18:47:44 -0400472
473 // TODO(dneto): Wouldn't it be better to always just emit a binary, and then
474 // convert to other formats on demand?
475
476 // When emitting a C initialization list, the WriteSPIRVBinary method
477 // will actually write its words to this vector via binaryTempOut.
478 SmallVector<char, 100> binaryTempUnderlyingVector;
479 raw_svector_ostream binaryTempOut;
480
481 // Binary output writes to this stream, which might be |out| or
482 // |binaryTempOut|. It's the latter when we really want to write a C
483 // initializer list.
Diego Novillo3cc8d7a2019-04-10 13:30:34 -0400484 raw_pwrite_stream *binaryOut;
alan-bakerf5e5f692018-11-27 08:33:24 -0500485 std::vector<version0::DescriptorMapEntry> *descriptorMapEntries;
David Neto0676e6f2017-07-11 18:47:44 -0400486 const bool outputCInitList; // If true, output look like {0x7023, ... , 5}
David Neto22f144c2017-06-12 14:26:21 -0400487 uint64_t patchBoundOffset;
488 uint32_t nextID;
489
alan-bakerf67468c2019-11-25 15:51:49 -0500490 // ID for OpTypeInt 32 1.
491 uint32_t int32ID = 0;
492 // ID for OpTypeVector %int 4.
493 uint32_t v4int32ID = 0;
494
David Neto19a1bad2017-08-25 15:01:41 -0400495 // Maps an LLVM Value pointer to the corresponding SPIR-V Id.
David Neto22f144c2017-06-12 14:26:21 -0400496 TypeMapType TypeMap;
David Neto19a1bad2017-08-25 15:01:41 -0400497 // Maps an LLVM image type to its SPIR-V ID.
David Neto22f144c2017-06-12 14:26:21 -0400498 TypeMapType ImageTypeMap;
alan-bakerabd82722019-12-03 17:14:51 -0500499 // A unique-vector of LLVM image types. This list is used to provide
500 // deterministic traversal of image types.
501 TypeList ImageTypeList;
David Neto19a1bad2017-08-25 15:01:41 -0400502 // A unique-vector of LLVM types that map to a SPIR-V type.
David Neto22f144c2017-06-12 14:26:21 -0400503 TypeList Types;
504 ValueList Constants;
David Neto19a1bad2017-08-25 15:01:41 -0400505 // Maps an LLVM Value pointer to the corresponding SPIR-V Id.
David Neto22f144c2017-06-12 14:26:21 -0400506 ValueMapType ValueMap;
507 ValueMapType AllocatedValueMap;
508 SPIRVInstructionList SPIRVInsts;
David Neto862b7d82018-06-14 18:48:37 -0400509
David Neto22f144c2017-06-12 14:26:21 -0400510 EntryPointVecType EntryPointVec;
511 DeferredInstVecType DeferredInstVec;
512 ValueList EntryPointInterfacesVec;
513 uint32_t OpExtInstImportID;
514 std::vector<uint32_t> BuiltinDimensionVec;
alan-baker5b86ed72019-02-15 08:26:50 -0500515 bool HasVariablePointersStorageBuffer;
David Neto22f144c2017-06-12 14:26:21 -0400516 bool HasVariablePointers;
517 Type *SamplerTy;
alan-baker09cb9802019-12-10 13:16:27 -0500518 DenseMap<unsigned, unsigned> SamplerLiteralToIDMap;
David Netoc77d9e22018-03-24 06:30:28 -0700519
520 // If a function F has a pointer-to-__constant parameter, then this variable
David Neto9ed8e2f2018-03-24 06:47:24 -0700521 // will map F's type to (G, index of the parameter), where in a first phase
522 // G is F's type. During FindTypePerFunc, G will be changed to F's type
523 // but replacing the pointer-to-constant parameter with
524 // pointer-to-ModuleScopePrivate.
David Netoc77d9e22018-03-24 06:30:28 -0700525 // TODO(dneto): This doesn't seem general enough? A function might have
526 // more than one such parameter.
David Neto22f144c2017-06-12 14:26:21 -0400527 GlobalConstFuncMapType GlobalConstFuncTypeMap;
528 SmallPtrSet<Value *, 16> GlobalConstArgumentSet;
David Neto1a1a0582017-07-07 12:01:44 -0400529 // An ordered set of pointer types of Base arguments to OpPtrAccessChain,
David Neto85082642018-03-24 06:55:20 -0700530 // or array types, and which point into transparent memory (StorageBuffer
531 // storage class). These will require an ArrayStride decoration.
David Neto1a1a0582017-07-07 12:01:44 -0400532 // See SPV_KHR_variable_pointers rev 13.
David Neto85082642018-03-24 06:55:20 -0700533 TypeList TypesNeedingArrayStride;
David Netoa60b00b2017-09-15 16:34:09 -0400534
535 // This is truly ugly, but works around what look like driver bugs.
536 // For get_local_size, an earlier part of the flow has created a module-scope
537 // variable in Private address space to hold the value for the workgroup
538 // size. Its intializer is a uint3 value marked as builtin WorkgroupSize.
539 // When this is present, save the IDs of the initializer value and variable
540 // in these two variables. We only ever do a vector load from it, and
541 // when we see one of those, substitute just the value of the intializer.
542 // This mimics what Glslang does, and that's what drivers are used to.
David Neto66cfe642018-03-24 06:13:56 -0700543 // TODO(dneto): Remove this once drivers are fixed.
David Netoa60b00b2017-09-15 16:34:09 -0400544 uint32_t WorkgroupSizeValueID;
545 uint32_t WorkgroupSizeVarID;
David Neto26aaf622017-10-23 18:11:53 -0400546
David Neto862b7d82018-06-14 18:48:37 -0400547 // Bookkeeping for mapping kernel arguments to resource variables.
548 struct ResourceVarInfo {
549 ResourceVarInfo(int index_arg, unsigned set_arg, unsigned binding_arg,
alan-bakere9308012019-03-15 10:25:13 -0400550 Function *fn, clspv::ArgKind arg_kind_arg, int coherent_arg)
David Neto862b7d82018-06-14 18:48:37 -0400551 : index(index_arg), descriptor_set(set_arg), binding(binding_arg),
alan-bakere9308012019-03-15 10:25:13 -0400552 var_fn(fn), arg_kind(arg_kind_arg), coherent(coherent_arg),
David Neto862b7d82018-06-14 18:48:37 -0400553 addr_space(fn->getReturnType()->getPointerAddressSpace()) {}
554 const int index; // Index into ResourceVarInfoList
555 const unsigned descriptor_set;
556 const unsigned binding;
557 Function *const var_fn; // The @clspv.resource.var.* function.
558 const clspv::ArgKind arg_kind;
alan-bakere9308012019-03-15 10:25:13 -0400559 const int coherent;
David Neto862b7d82018-06-14 18:48:37 -0400560 const unsigned addr_space; // The LLVM address space
561 // The SPIR-V ID of the OpVariable. Not populated at construction time.
562 uint32_t var_id = 0;
563 };
564 // A list of resource var info. Each one correponds to a module-scope
565 // resource variable we will have to create. Resource var indices are
566 // indices into this vector.
567 SmallVector<std::unique_ptr<ResourceVarInfo>, 8> ResourceVarInfoList;
568 // This is a vector of pointers of all the resource vars, but ordered by
569 // kernel function, and then by argument.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500570 UniqueVector<ResourceVarInfo *> ModuleOrderedResourceVars;
David Neto862b7d82018-06-14 18:48:37 -0400571 // Map a function to the ordered list of resource variables it uses, one for
572 // each argument. If an argument does not use a resource variable, it
573 // will have a null pointer entry.
574 using FunctionToResourceVarsMapType =
575 DenseMap<Function *, SmallVector<ResourceVarInfo *, 8>>;
576 FunctionToResourceVarsMapType FunctionToResourceVarsMap;
577
578 // What LLVM types map to SPIR-V types needing layout? These are the
579 // arrays and structures supporting storage buffers and uniform buffers.
580 TypeList TypesNeedingLayout;
581 // What LLVM struct types map to a SPIR-V struct type with Block decoration?
582 UniqueVector<StructType *> StructTypesNeedingBlock;
583 // For a call that represents a load from an opaque type (samplers, images),
584 // map it to the variable id it should load from.
585 DenseMap<CallInst *, uint32_t> ResourceVarDeferredLoadCalls;
David Neto85082642018-03-24 06:55:20 -0700586
Alan Baker202c8c72018-08-13 13:47:44 -0400587 // One larger than the maximum used SpecId for pointer-to-local arguments.
588 int max_local_spec_id_;
David Netoc6f3ab22018-04-06 18:02:31 -0400589 // An ordered list of the kernel arguments of type pointer-to-local.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500590 using LocalArgList = SmallVector<Argument *, 8>;
David Netoc6f3ab22018-04-06 18:02:31 -0400591 LocalArgList LocalArgs;
592 // Information about a pointer-to-local argument.
593 struct LocalArgInfo {
594 // The SPIR-V ID of the array variable.
595 uint32_t variable_id;
596 // The element type of the
alan-bakerb6b09dc2018-11-08 16:59:28 -0500597 Type *elem_type;
David Netoc6f3ab22018-04-06 18:02:31 -0400598 // The ID of the array type.
599 uint32_t array_size_id;
600 // The ID of the array type.
601 uint32_t array_type_id;
602 // The ID of the pointer to the array type.
603 uint32_t ptr_array_type_id;
David Netoc6f3ab22018-04-06 18:02:31 -0400604 // The specialization constant ID of the array size.
605 int spec_id;
606 };
Alan Baker202c8c72018-08-13 13:47:44 -0400607 // A mapping from Argument to its assigned SpecId.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500608 DenseMap<const Argument *, int> LocalArgSpecIds;
Alan Baker202c8c72018-08-13 13:47:44 -0400609 // A mapping from SpecId to its LocalArgInfo.
610 DenseMap<int, LocalArgInfo> LocalSpecIdInfoMap;
Alan Bakerfcda9482018-10-02 17:09:59 -0400611 // A mapping from a remapped type to its real offsets.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500612 DenseMap<Type *, std::vector<uint32_t>> RemappedUBOTypeOffsets;
Alan Bakerfcda9482018-10-02 17:09:59 -0400613 // A mapping from a remapped type to its real sizes.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500614 DenseMap<Type *, std::tuple<uint64_t, uint64_t, uint64_t>>
615 RemappedUBOTypeSizes;
alan-baker06cad652019-12-03 17:56:47 -0500616
617 // Maps basic block to its merge block.
618 DenseMap<BasicBlock *, BasicBlock *> MergeBlocks;
619 // Maps basic block to its continue block.
620 DenseMap<BasicBlock *, BasicBlock *> ContinueBlocks;
David Neto22f144c2017-06-12 14:26:21 -0400621};
622
623char SPIRVProducerPass::ID;
David Netoc6f3ab22018-04-06 18:02:31 -0400624
alan-bakerb6b09dc2018-11-08 16:59:28 -0500625} // namespace
David Neto22f144c2017-06-12 14:26:21 -0400626
627namespace clspv {
alan-bakerf5e5f692018-11-27 08:33:24 -0500628ModulePass *createSPIRVProducerPass(
629 raw_pwrite_stream &out,
630 std::vector<version0::DescriptorMapEntry> *descriptor_map_entries,
alan-baker00e7a582019-06-07 12:54:21 -0400631 ArrayRef<std::pair<unsigned, std::string>> samplerMap,
alan-bakerf5e5f692018-11-27 08:33:24 -0500632 bool outputCInitList) {
633 return new SPIRVProducerPass(out, descriptor_map_entries, samplerMap,
alan-baker00e7a582019-06-07 12:54:21 -0400634 outputCInitList);
David Neto22f144c2017-06-12 14:26:21 -0400635}
David Netoc2c368d2017-06-30 16:50:17 -0400636} // namespace clspv
David Neto22f144c2017-06-12 14:26:21 -0400637
638bool SPIRVProducerPass::runOnModule(Module &module) {
David Neto0676e6f2017-07-11 18:47:44 -0400639 binaryOut = outputCInitList ? &binaryTempOut : &out;
640
Alan Bakerfcda9482018-10-02 17:09:59 -0400641 PopulateUBOTypeMaps(module);
alan-baker06cad652019-12-03 17:56:47 -0500642 PopulateStructuredCFGMaps(module);
Alan Bakerfcda9482018-10-02 17:09:59 -0400643
David Neto22f144c2017-06-12 14:26:21 -0400644 // SPIR-V always begins with its header information
645 outputHeader();
646
David Netoc6f3ab22018-04-06 18:02:31 -0400647 const DataLayout &DL = module.getDataLayout();
648
David Neto22f144c2017-06-12 14:26:21 -0400649 // Gather information from the LLVM IR that we require.
David Netoc6f3ab22018-04-06 18:02:31 -0400650 GenerateLLVMIRInfo(module, DL);
David Neto22f144c2017-06-12 14:26:21 -0400651
David Neto22f144c2017-06-12 14:26:21 -0400652 // Collect information on global variables too.
653 for (GlobalVariable &GV : module.globals()) {
654 // If the GV is one of our special __spirv_* variables, remove the
655 // initializer as it was only placed there to force LLVM to not throw the
656 // value away.
Kévin Petitbbbda972020-03-03 19:16:31 +0000657 if (GV.getName().startswith("__spirv_") ||
658 GV.getAddressSpace() == clspv::AddressSpace::PushConstant) {
David Neto22f144c2017-06-12 14:26:21 -0400659 GV.setInitializer(nullptr);
660 }
661
662 // Collect types' information from global variable.
663 FindTypePerGlobalVar(GV);
664
665 // Collect constant information from global variable.
666 FindConstantPerGlobalVar(GV);
667
668 // If the variable is an input, entry points need to know about it.
669 if (AddressSpace::Input == GV.getType()->getPointerAddressSpace()) {
David Netofb9a7972017-08-25 17:08:24 -0400670 getEntryPointInterfacesVec().insert(&GV);
David Neto22f144c2017-06-12 14:26:21 -0400671 }
672 }
673
674 // If there are extended instructions, generate OpExtInstImport.
675 if (FindExtInst(module)) {
676 GenerateExtInstImport();
677 }
678
679 // Generate SPIRV instructions for types.
Alan Bakerfcda9482018-10-02 17:09:59 -0400680 GenerateSPIRVTypes(module.getContext(), module);
David Neto22f144c2017-06-12 14:26:21 -0400681
682 // Generate SPIRV constants.
683 GenerateSPIRVConstants();
684
alan-baker09cb9802019-12-10 13:16:27 -0500685 // Generate literal samplers if necessary.
686 GenerateSamplers(module);
David Neto22f144c2017-06-12 14:26:21 -0400687
Kévin Petitbbbda972020-03-03 19:16:31 +0000688 // Generate descriptor map entries for all push constants
689 GeneratePushConstantDescriptormapEntries(module);
690
David Neto22f144c2017-06-12 14:26:21 -0400691 // Generate SPIRV variables.
692 for (GlobalVariable &GV : module.globals()) {
693 GenerateGlobalVar(GV);
694 }
David Neto862b7d82018-06-14 18:48:37 -0400695 GenerateResourceVars(module);
David Netoc6f3ab22018-04-06 18:02:31 -0400696 GenerateWorkgroupVars();
David Neto22f144c2017-06-12 14:26:21 -0400697
698 // Generate SPIRV instructions for each function.
699 for (Function &F : module) {
700 if (F.isDeclaration()) {
701 continue;
702 }
703
David Neto862b7d82018-06-14 18:48:37 -0400704 GenerateDescriptorMapInfo(DL, F);
705
David Neto22f144c2017-06-12 14:26:21 -0400706 // Generate Function Prologue.
707 GenerateFuncPrologue(F);
708
709 // Generate SPIRV instructions for function body.
710 GenerateFuncBody(F);
711
712 // Generate Function Epilogue.
713 GenerateFuncEpilogue();
714 }
715
716 HandleDeferredInstruction();
David Neto1a1a0582017-07-07 12:01:44 -0400717 HandleDeferredDecorations(DL);
David Neto22f144c2017-06-12 14:26:21 -0400718
719 // Generate SPIRV module information.
David Neto5c22a252018-03-15 16:07:41 -0400720 GenerateModuleInfo(module);
David Neto22f144c2017-06-12 14:26:21 -0400721
alan-baker00e7a582019-06-07 12:54:21 -0400722 WriteSPIRVBinary();
David Neto22f144c2017-06-12 14:26:21 -0400723
724 // We need to patch the SPIR-V header to set bound correctly.
725 patchHeader();
David Neto0676e6f2017-07-11 18:47:44 -0400726
727 if (outputCInitList) {
728 bool first = true;
David Neto0676e6f2017-07-11 18:47:44 -0400729 std::ostringstream os;
730
David Neto57fb0b92017-08-04 15:35:09 -0400731 auto emit_word = [&os, &first](uint32_t word) {
David Neto0676e6f2017-07-11 18:47:44 -0400732 if (!first)
David Neto57fb0b92017-08-04 15:35:09 -0400733 os << ",\n";
734 os << word;
David Neto0676e6f2017-07-11 18:47:44 -0400735 first = false;
736 };
737
738 os << "{";
David Neto57fb0b92017-08-04 15:35:09 -0400739 const std::string str(binaryTempOut.str());
740 for (unsigned i = 0; i < str.size(); i += 4) {
741 const uint32_t a = static_cast<unsigned char>(str[i]);
742 const uint32_t b = static_cast<unsigned char>(str[i + 1]);
743 const uint32_t c = static_cast<unsigned char>(str[i + 2]);
744 const uint32_t d = static_cast<unsigned char>(str[i + 3]);
745 emit_word(a | (b << 8) | (c << 16) | (d << 24));
David Neto0676e6f2017-07-11 18:47:44 -0400746 }
747 os << "}\n";
748 out << os.str();
749 }
750
David Neto22f144c2017-06-12 14:26:21 -0400751 return false;
752}
753
754void SPIRVProducerPass::outputHeader() {
alan-baker00e7a582019-06-07 12:54:21 -0400755 binaryOut->write(reinterpret_cast<const char *>(&spv::MagicNumber),
756 sizeof(spv::MagicNumber));
alan-bakere0902602020-03-23 08:43:40 -0400757 const uint32_t spv_version = 0x10000; // SPIR-V 1.0
758 binaryOut->write(reinterpret_cast<const char *>(&spv_version),
759 sizeof(spv_version));
David Neto22f144c2017-06-12 14:26:21 -0400760
alan-baker0c18ab02019-06-12 10:23:21 -0400761 // use Google's vendor ID
762 const uint32_t vendor = 21 << 16;
alan-baker00e7a582019-06-07 12:54:21 -0400763 binaryOut->write(reinterpret_cast<const char *>(&vendor), sizeof(vendor));
David Neto22f144c2017-06-12 14:26:21 -0400764
alan-baker00e7a582019-06-07 12:54:21 -0400765 // we record where we need to come back to and patch in the bound value
766 patchBoundOffset = binaryOut->tell();
David Neto22f144c2017-06-12 14:26:21 -0400767
alan-baker00e7a582019-06-07 12:54:21 -0400768 // output a bad bound for now
769 binaryOut->write(reinterpret_cast<const char *>(&nextID), sizeof(nextID));
David Neto22f144c2017-06-12 14:26:21 -0400770
alan-baker00e7a582019-06-07 12:54:21 -0400771 // output the schema (reserved for use and must be 0)
772 const uint32_t schema = 0;
773 binaryOut->write(reinterpret_cast<const char *>(&schema), sizeof(schema));
David Neto22f144c2017-06-12 14:26:21 -0400774}
775
776void SPIRVProducerPass::patchHeader() {
alan-baker00e7a582019-06-07 12:54:21 -0400777 // for a binary we just write the value of nextID over bound
778 binaryOut->pwrite(reinterpret_cast<char *>(&nextID), sizeof(nextID),
779 patchBoundOffset);
David Neto22f144c2017-06-12 14:26:21 -0400780}
781
David Netoc6f3ab22018-04-06 18:02:31 -0400782void SPIRVProducerPass::GenerateLLVMIRInfo(Module &M, const DataLayout &DL) {
David Neto22f144c2017-06-12 14:26:21 -0400783 // This function generates LLVM IR for function such as global variable for
784 // argument, constant and pointer type for argument access. These information
785 // is artificial one because we need Vulkan SPIR-V output. This function is
786 // executed ahead of FindType and FindConstant.
David Neto22f144c2017-06-12 14:26:21 -0400787 LLVMContext &Context = M.getContext();
788
David Neto862b7d82018-06-14 18:48:37 -0400789 FindGlobalConstVars(M, DL);
David Neto5c22a252018-03-15 16:07:41 -0400790
David Neto862b7d82018-06-14 18:48:37 -0400791 FindResourceVars(M, DL);
David Neto22f144c2017-06-12 14:26:21 -0400792
793 bool HasWorkGroupBuiltin = false;
794 for (GlobalVariable &GV : M.globals()) {
795 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
796 if (spv::BuiltInWorkgroupSize == BuiltinType) {
797 HasWorkGroupBuiltin = true;
798 }
799 }
800
David Neto862b7d82018-06-14 18:48:37 -0400801 FindTypesForSamplerMap(M);
802 FindTypesForResourceVars(M);
Alan Baker202c8c72018-08-13 13:47:44 -0400803 FindWorkgroupVars(M);
David Neto22f144c2017-06-12 14:26:21 -0400804
805 for (Function &F : M) {
Kévin Petitabef4522019-03-27 13:08:01 +0000806 if (F.isDeclaration()) {
David Neto22f144c2017-06-12 14:26:21 -0400807 continue;
808 }
809
810 for (BasicBlock &BB : F) {
811 for (Instruction &I : BB) {
812 if (I.getOpcode() == Instruction::ZExt ||
813 I.getOpcode() == Instruction::SExt ||
814 I.getOpcode() == Instruction::UIToFP) {
815 // If there is zext with i1 type, it will be changed to OpSelect. The
816 // OpSelect needs constant 0 and 1 so the constants are added here.
817
818 auto OpTy = I.getOperand(0)->getType();
819
Kévin Petit24272b62018-10-18 19:16:12 +0000820 if (OpTy->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -0400821 if (I.getOpcode() == Instruction::ZExt) {
David Neto22f144c2017-06-12 14:26:21 -0400822 FindConstant(Constant::getNullValue(I.getType()));
Kévin Petit7bfb8992019-02-26 13:45:08 +0000823 FindConstant(ConstantInt::get(I.getType(), 1));
David Neto22f144c2017-06-12 14:26:21 -0400824 } else if (I.getOpcode() == Instruction::SExt) {
David Neto22f144c2017-06-12 14:26:21 -0400825 FindConstant(Constant::getNullValue(I.getType()));
Kévin Petit7bfb8992019-02-26 13:45:08 +0000826 FindConstant(ConstantInt::getSigned(I.getType(), -1));
David Neto22f144c2017-06-12 14:26:21 -0400827 } else {
828 FindConstant(ConstantFP::get(Context, APFloat(0.0f)));
829 FindConstant(ConstantFP::get(Context, APFloat(1.0f)));
830 }
831 }
832 } else if (CallInst *Call = dyn_cast<CallInst>(&I)) {
David Neto862b7d82018-06-14 18:48:37 -0400833 StringRef callee_name = Call->getCalledFunction()->getName();
David Neto22f144c2017-06-12 14:26:21 -0400834
835 // Handle image type specially.
SJW173c7e92020-03-16 08:44:47 -0500836 if (IsImageBuiltin(callee_name)) {
David Neto22f144c2017-06-12 14:26:21 -0400837 TypeMapType &OpImageTypeMap = getImageTypeMap();
838 Type *ImageTy =
839 Call->getArgOperand(0)->getType()->getPointerElementType();
840 OpImageTypeMap[ImageTy] = 0;
alan-bakerabd82722019-12-03 17:14:51 -0500841 getImageTypeList().insert(ImageTy);
alan-baker75090e42020-02-20 11:21:04 -0500842 }
David Neto22f144c2017-06-12 14:26:21 -0400843
SJW173c7e92020-03-16 08:44:47 -0500844 if (IsSampledImageRead(callee_name)) {
alan-bakerf67468c2019-11-25 15:51:49 -0500845 // All sampled reads need a floating point 0 for the Lod operand.
David Neto22f144c2017-06-12 14:26:21 -0400846 FindConstant(ConstantFP::get(Context, APFloat(0.0f)));
SJW2c317da2020-03-23 07:39:13 -0500847 } else if (IsUnsampledImageRead(callee_name)) {
alan-baker75090e42020-02-20 11:21:04 -0500848 // All unsampled reads need an integer 0 for the Lod operand.
849 FindConstant(ConstantInt::get(Context, APInt(32, 0)));
SJW2c317da2020-03-23 07:39:13 -0500850 } else if (IsImageQuery(callee_name)) {
alan-bakerce179f12019-12-06 19:02:22 -0500851 Type *ImageTy = Call->getOperand(0)->getType();
852 const uint32_t dim = ImageDimensionality(ImageTy);
alan-baker7150a1d2020-02-25 08:31:06 -0500853 uint32_t components =
854 dim + (clspv::IsArrayImageType(ImageTy) ? 1 : 0);
alan-bakerce179f12019-12-06 19:02:22 -0500855 if (components > 1) {
856 // OpImageQuerySize* return |components| components.
857 FindType(VectorType::get(Type::getInt32Ty(Context), components));
858 if (dim == 3 && IsGetImageDim(callee_name)) {
859 // get_image_dim for 3D images returns an int4.
860 FindType(
861 VectorType::get(Type::getInt32Ty(Context), components + 1));
862 }
863 }
864
SJW173c7e92020-03-16 08:44:47 -0500865 if (IsSampledImageType(ImageTy)) {
alan-bakerce179f12019-12-06 19:02:22 -0500866 // All sampled image queries need a integer 0 for the Lod
867 // operand.
868 FindConstant(ConstantInt::get(Context, APInt(32, 0)));
869 }
David Neto5c22a252018-03-15 16:07:41 -0400870 }
David Neto22f144c2017-06-12 14:26:21 -0400871 }
872 }
873 }
874
Kévin Petitabef4522019-03-27 13:08:01 +0000875 // More things to do on kernel functions
876 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
877 if (const MDNode *MD =
878 dyn_cast<Function>(&F)->getMetadata("reqd_work_group_size")) {
879 // We generate constants if the WorkgroupSize builtin is being used.
880 if (HasWorkGroupBuiltin) {
881 // Collect constant information for work group size.
882 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(0)));
883 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(1)));
884 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(2)));
David Neto22f144c2017-06-12 14:26:21 -0400885 }
886 }
887 }
888
alan-bakerf67468c2019-11-25 15:51:49 -0500889 // TODO(alan-baker): make this better.
alan-bakerf906d2b2019-12-10 11:26:23 -0500890 if (M.getTypeByName("opencl.image1d_ro_t.float") ||
891 M.getTypeByName("opencl.image1d_ro_t.float.sampled") ||
892 M.getTypeByName("opencl.image1d_wo_t.float") ||
893 M.getTypeByName("opencl.image2d_ro_t.float") ||
alan-bakerf67468c2019-11-25 15:51:49 -0500894 M.getTypeByName("opencl.image2d_ro_t.float.sampled") ||
895 M.getTypeByName("opencl.image2d_wo_t.float") ||
896 M.getTypeByName("opencl.image3d_ro_t.float") ||
897 M.getTypeByName("opencl.image3d_ro_t.float.sampled") ||
alan-baker7150a1d2020-02-25 08:31:06 -0500898 M.getTypeByName("opencl.image3d_wo_t.float") ||
899 M.getTypeByName("opencl.image1d_array_ro_t.float") ||
900 M.getTypeByName("opencl.image1d_array_ro_t.float.sampled") ||
901 M.getTypeByName("opencl.image1d_array_wo_t.float") ||
902 M.getTypeByName("opencl.image2d_array_ro_t.float") ||
903 M.getTypeByName("opencl.image2d_array_ro_t.float.sampled") ||
904 M.getTypeByName("opencl.image2d_array_wo_t.float")) {
alan-bakerf67468c2019-11-25 15:51:49 -0500905 FindType(Type::getFloatTy(Context));
alan-bakerf906d2b2019-12-10 11:26:23 -0500906 } else if (M.getTypeByName("opencl.image1d_ro_t.uint") ||
907 M.getTypeByName("opencl.image1d_ro_t.uint.sampled") ||
908 M.getTypeByName("opencl.image1d_wo_t.uint") ||
909 M.getTypeByName("opencl.image2d_ro_t.uint") ||
alan-bakerf67468c2019-11-25 15:51:49 -0500910 M.getTypeByName("opencl.image2d_ro_t.uint.sampled") ||
911 M.getTypeByName("opencl.image2d_wo_t.uint") ||
912 M.getTypeByName("opencl.image3d_ro_t.uint") ||
913 M.getTypeByName("opencl.image3d_ro_t.uint.sampled") ||
alan-baker7150a1d2020-02-25 08:31:06 -0500914 M.getTypeByName("opencl.image3d_wo_t.uint") ||
915 M.getTypeByName("opencl.image1d_array_ro_t.uint") ||
916 M.getTypeByName("opencl.image1d_array_ro_t.uint.sampled") ||
917 M.getTypeByName("opencl.image1d_array_wo_t.uint") ||
918 M.getTypeByName("opencl.image2d_array_ro_t.uint") ||
919 M.getTypeByName("opencl.image2d_array_ro_t.uint.sampled") ||
920 M.getTypeByName("opencl.image2d_array_wo_t.uint")) {
alan-bakerf67468c2019-11-25 15:51:49 -0500921 FindType(Type::getInt32Ty(Context));
alan-bakerf906d2b2019-12-10 11:26:23 -0500922 } else if (M.getTypeByName("opencl.image1d_ro_t.int") ||
923 M.getTypeByName("opencl.image1d_ro_t.int.sampled") ||
924 M.getTypeByName("opencl.image1d_wo_t.int") ||
925 M.getTypeByName("opencl.image2d_ro_t.int") ||
alan-bakerf67468c2019-11-25 15:51:49 -0500926 M.getTypeByName("opencl.image2d_ro_t.int.sampled") ||
927 M.getTypeByName("opencl.image2d_wo_t.int") ||
928 M.getTypeByName("opencl.image3d_ro_t.int") ||
929 M.getTypeByName("opencl.image3d_ro_t.int.sampled") ||
alan-baker7150a1d2020-02-25 08:31:06 -0500930 M.getTypeByName("opencl.image3d_wo_t.int") ||
931 M.getTypeByName("opencl.image1d_array_ro_t.int") ||
932 M.getTypeByName("opencl.image1d_array_ro_t.int.sampled") ||
933 M.getTypeByName("opencl.image1d_array_wo_t.int") ||
934 M.getTypeByName("opencl.image2d_array_ro_t.int") ||
935 M.getTypeByName("opencl.image2d_array_ro_t.int.sampled") ||
936 M.getTypeByName("opencl.image2d_array_wo_t.int")) {
alan-bakerf67468c2019-11-25 15:51:49 -0500937 // Nothing for now...
938 } else {
939 // This was likely an UndefValue.
David Neto22f144c2017-06-12 14:26:21 -0400940 FindType(Type::getFloatTy(Context));
941 }
942
943 // Collect types' information from function.
944 FindTypePerFunc(F);
945
946 // Collect constant information from function.
947 FindConstantPerFunc(F);
948 }
949}
950
David Neto862b7d82018-06-14 18:48:37 -0400951void SPIRVProducerPass::FindGlobalConstVars(Module &M, const DataLayout &DL) {
alan-baker56f7aff2019-05-22 08:06:42 -0400952 clspv::NormalizeGlobalVariables(M);
953
David Neto862b7d82018-06-14 18:48:37 -0400954 SmallVector<GlobalVariable *, 8> GVList;
955 SmallVector<GlobalVariable *, 8> DeadGVList;
956 for (GlobalVariable &GV : M.globals()) {
957 if (GV.getType()->getAddressSpace() == AddressSpace::Constant) {
958 if (GV.use_empty()) {
959 DeadGVList.push_back(&GV);
960 } else {
961 GVList.push_back(&GV);
962 }
963 }
964 }
965
966 // Remove dead global __constant variables.
967 for (auto GV : DeadGVList) {
968 GV->eraseFromParent();
969 }
970 DeadGVList.clear();
971
972 if (clspv::Option::ModuleConstantsInStorageBuffer()) {
973 // For now, we only support a single storage buffer.
974 if (GVList.size() > 0) {
975 assert(GVList.size() == 1);
976 const auto *GV = GVList[0];
977 const auto constants_byte_size =
Alan Bakerfcda9482018-10-02 17:09:59 -0400978 (GetTypeSizeInBits(GV->getInitializer()->getType(), DL)) / 8;
David Neto862b7d82018-06-14 18:48:37 -0400979 const size_t kConstantMaxSize = 65536;
980 if (constants_byte_size > kConstantMaxSize) {
981 outs() << "Max __constant capacity of " << kConstantMaxSize
982 << " bytes exceeded: " << constants_byte_size << " bytes used\n";
983 llvm_unreachable("Max __constant capacity exceeded");
984 }
985 }
986 } else {
987 // Change global constant variable's address space to ModuleScopePrivate.
988 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
989 for (auto GV : GVList) {
990 // Create new gv with ModuleScopePrivate address space.
991 Type *NewGVTy = GV->getType()->getPointerElementType();
992 GlobalVariable *NewGV = new GlobalVariable(
993 M, NewGVTy, false, GV->getLinkage(), GV->getInitializer(), "",
994 nullptr, GV->getThreadLocalMode(), AddressSpace::ModuleScopePrivate);
995 NewGV->takeName(GV);
996
997 const SmallVector<User *, 8> GVUsers(GV->user_begin(), GV->user_end());
998 SmallVector<User *, 8> CandidateUsers;
999
1000 auto record_called_function_type_as_user =
1001 [&GlobalConstFuncTyMap](Value *gv, CallInst *call) {
1002 // Find argument index.
1003 unsigned index = 0;
1004 for (unsigned i = 0; i < call->getNumArgOperands(); i++) {
1005 if (gv == call->getOperand(i)) {
1006 // TODO(dneto): Should we break here?
1007 index = i;
1008 }
1009 }
1010
1011 // Record function type with global constant.
1012 GlobalConstFuncTyMap[call->getFunctionType()] =
1013 std::make_pair(call->getFunctionType(), index);
1014 };
1015
1016 for (User *GVU : GVUsers) {
1017 if (CallInst *Call = dyn_cast<CallInst>(GVU)) {
1018 record_called_function_type_as_user(GV, Call);
1019 } else if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(GVU)) {
1020 // Check GEP users.
1021 for (User *GEPU : GEP->users()) {
1022 if (CallInst *GEPCall = dyn_cast<CallInst>(GEPU)) {
1023 record_called_function_type_as_user(GEP, GEPCall);
1024 }
1025 }
1026 }
1027
1028 CandidateUsers.push_back(GVU);
1029 }
1030
1031 for (User *U : CandidateUsers) {
1032 // Update users of gv with new gv.
alan-bakered80f572019-02-11 17:28:26 -05001033 if (!isa<Constant>(U)) {
1034 // #254: Can't change operands of a constant, but this shouldn't be
1035 // something that sticks around in the module.
1036 U->replaceUsesOfWith(GV, NewGV);
1037 }
David Neto862b7d82018-06-14 18:48:37 -04001038 }
1039
1040 // Delete original gv.
1041 GV->eraseFromParent();
1042 }
1043 }
1044}
1045
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001046void SPIRVProducerPass::FindResourceVars(Module &M, const DataLayout &) {
David Neto862b7d82018-06-14 18:48:37 -04001047 ResourceVarInfoList.clear();
1048 FunctionToResourceVarsMap.clear();
1049 ModuleOrderedResourceVars.reset();
1050 // Normally, there is one resource variable per clspv.resource.var.*
1051 // function, since that is unique'd by arg type and index. By design,
1052 // we can share these resource variables across kernels because all
1053 // kernels use the same descriptor set.
1054 //
1055 // But if the user requested distinct descriptor sets per kernel, then
1056 // the descriptor allocator has made different (set,binding) pairs for
1057 // the same (type,arg_index) pair. Since we can decorate a resource
1058 // variable with only exactly one DescriptorSet and Binding, we are
1059 // forced in this case to make distinct resource variables whenever
Kévin Petitbbbda972020-03-03 19:16:31 +00001060 // the same clspv.resource.var.X function is seen with disintct
David Neto862b7d82018-06-14 18:48:37 -04001061 // (set,binding) values.
1062 const bool always_distinct_sets =
1063 clspv::Option::DistinctKernelDescriptorSets();
1064 for (Function &F : M) {
1065 // Rely on the fact the resource var functions have a stable ordering
1066 // in the module.
Alan Baker202c8c72018-08-13 13:47:44 -04001067 if (F.getName().startswith(clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04001068 // Find all calls to this function with distinct set and binding pairs.
1069 // Save them in ResourceVarInfoList.
1070
1071 // Determine uniqueness of the (set,binding) pairs only withing this
1072 // one resource-var builtin function.
1073 using SetAndBinding = std::pair<unsigned, unsigned>;
1074 // Maps set and binding to the resource var info.
1075 DenseMap<SetAndBinding, ResourceVarInfo *> set_and_binding_map;
1076 bool first_use = true;
1077 for (auto &U : F.uses()) {
1078 if (auto *call = dyn_cast<CallInst>(U.getUser())) {
1079 const auto set = unsigned(
1080 dyn_cast<ConstantInt>(call->getArgOperand(0))->getZExtValue());
1081 const auto binding = unsigned(
1082 dyn_cast<ConstantInt>(call->getArgOperand(1))->getZExtValue());
1083 const auto arg_kind = clspv::ArgKind(
1084 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
1085 const auto arg_index = unsigned(
1086 dyn_cast<ConstantInt>(call->getArgOperand(3))->getZExtValue());
alan-bakere9308012019-03-15 10:25:13 -04001087 const auto coherent = unsigned(
1088 dyn_cast<ConstantInt>(call->getArgOperand(5))->getZExtValue());
David Neto862b7d82018-06-14 18:48:37 -04001089
1090 // Find or make the resource var info for this combination.
1091 ResourceVarInfo *rv = nullptr;
1092 if (always_distinct_sets) {
1093 // Make a new resource var any time we see a different
1094 // (set,binding) pair.
1095 SetAndBinding key{set, binding};
1096 auto where = set_and_binding_map.find(key);
1097 if (where == set_and_binding_map.end()) {
1098 rv = new ResourceVarInfo(int(ResourceVarInfoList.size()), set,
alan-bakere9308012019-03-15 10:25:13 -04001099 binding, &F, arg_kind, coherent);
David Neto862b7d82018-06-14 18:48:37 -04001100 ResourceVarInfoList.emplace_back(rv);
1101 set_and_binding_map[key] = rv;
1102 } else {
1103 rv = where->second;
1104 }
1105 } else {
1106 // The default is to make exactly one resource for each
1107 // clspv.resource.var.* function.
1108 if (first_use) {
1109 first_use = false;
1110 rv = new ResourceVarInfo(int(ResourceVarInfoList.size()), set,
alan-bakere9308012019-03-15 10:25:13 -04001111 binding, &F, arg_kind, coherent);
David Neto862b7d82018-06-14 18:48:37 -04001112 ResourceVarInfoList.emplace_back(rv);
1113 } else {
1114 rv = ResourceVarInfoList.back().get();
1115 }
1116 }
1117
1118 // Now populate FunctionToResourceVarsMap.
1119 auto &mapping =
1120 FunctionToResourceVarsMap[call->getParent()->getParent()];
1121 while (mapping.size() <= arg_index) {
1122 mapping.push_back(nullptr);
1123 }
1124 mapping[arg_index] = rv;
1125 }
1126 }
1127 }
1128 }
1129
1130 // Populate ModuleOrderedResourceVars.
1131 for (Function &F : M) {
1132 auto where = FunctionToResourceVarsMap.find(&F);
1133 if (where != FunctionToResourceVarsMap.end()) {
1134 for (auto &rv : where->second) {
1135 if (rv != nullptr) {
1136 ModuleOrderedResourceVars.insert(rv);
1137 }
1138 }
1139 }
1140 }
1141 if (ShowResourceVars) {
1142 for (auto *info : ModuleOrderedResourceVars) {
1143 outs() << "MORV index " << info->index << " (" << info->descriptor_set
1144 << "," << info->binding << ") " << *(info->var_fn->getReturnType())
1145 << "\n";
1146 }
1147 }
1148}
1149
David Neto22f144c2017-06-12 14:26:21 -04001150bool SPIRVProducerPass::FindExtInst(Module &M) {
1151 LLVMContext &Context = M.getContext();
1152 bool HasExtInst = false;
1153
1154 for (Function &F : M) {
1155 for (BasicBlock &BB : F) {
1156 for (Instruction &I : BB) {
1157 if (CallInst *Call = dyn_cast<CallInst>(&I)) {
1158 Function *Callee = Call->getCalledFunction();
1159 // Check whether this call is for extend instructions.
David Neto3fbb4072017-10-16 11:28:14 -04001160 auto callee_name = Callee->getName();
1161 const glsl::ExtInst EInst = getExtInstEnum(callee_name);
1162 const glsl::ExtInst IndirectEInst =
1163 getIndirectExtInstEnum(callee_name);
David Neto22f144c2017-06-12 14:26:21 -04001164
David Neto3fbb4072017-10-16 11:28:14 -04001165 HasExtInst |=
1166 (EInst != kGlslExtInstBad) || (IndirectEInst != kGlslExtInstBad);
1167
1168 if (IndirectEInst) {
1169 // Register extra constants if needed.
1170
1171 // Registers a type and constant for computing the result of the
1172 // given instruction. If the result of the instruction is a vector,
1173 // then make a splat vector constant with the same number of
1174 // elements.
1175 auto register_constant = [this, &I](Constant *constant) {
1176 FindType(constant->getType());
1177 FindConstant(constant);
1178 if (auto *vectorTy = dyn_cast<VectorType>(I.getType())) {
1179 // Register the splat vector of the value with the same
1180 // width as the result of the instruction.
1181 auto *vec_constant = ConstantVector::getSplat(
alan-baker7261e062020-03-15 14:35:48 -04001182 {static_cast<unsigned>(vectorTy->getNumElements()), false},
David Neto3fbb4072017-10-16 11:28:14 -04001183 constant);
1184 FindConstant(vec_constant);
1185 FindType(vec_constant->getType());
1186 }
1187 };
1188 switch (IndirectEInst) {
1189 case glsl::ExtInstFindUMsb:
1190 // clz needs OpExtInst and OpISub with constant 31, or splat
1191 // vector of 31. Add it to the constant list here.
1192 register_constant(
1193 ConstantInt::get(Type::getInt32Ty(Context), 31));
1194 break;
1195 case glsl::ExtInstAcos:
1196 case glsl::ExtInstAsin:
Kévin Petiteb9f90a2018-09-29 12:29:34 +01001197 case glsl::ExtInstAtan:
David Neto3fbb4072017-10-16 11:28:14 -04001198 case glsl::ExtInstAtan2:
1199 // We need 1/pi for acospi, asinpi, atan2pi.
1200 register_constant(
1201 ConstantFP::get(Type::getFloatTy(Context), kOneOverPi));
1202 break;
1203 default:
1204 assert(false && "internally inconsistent");
1205 }
David Neto22f144c2017-06-12 14:26:21 -04001206 }
1207 }
1208 }
1209 }
1210 }
1211
1212 return HasExtInst;
1213}
1214
1215void SPIRVProducerPass::FindTypePerGlobalVar(GlobalVariable &GV) {
1216 // Investigate global variable's type.
1217 FindType(GV.getType());
1218}
1219
1220void SPIRVProducerPass::FindTypePerFunc(Function &F) {
1221 // Investigate function's type.
1222 FunctionType *FTy = F.getFunctionType();
1223
1224 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
1225 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
David Neto9ed8e2f2018-03-24 06:47:24 -07001226 // Handle a regular function with global constant parameters.
David Neto22f144c2017-06-12 14:26:21 -04001227 if (GlobalConstFuncTyMap.count(FTy)) {
1228 uint32_t GVCstArgIdx = GlobalConstFuncTypeMap[FTy].second;
1229 SmallVector<Type *, 4> NewFuncParamTys;
1230 for (unsigned i = 0; i < FTy->getNumParams(); i++) {
1231 Type *ParamTy = FTy->getParamType(i);
1232 if (i == GVCstArgIdx) {
1233 Type *EleTy = ParamTy->getPointerElementType();
1234 ParamTy = PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
1235 }
1236
1237 NewFuncParamTys.push_back(ParamTy);
1238 }
1239
1240 FunctionType *NewFTy =
1241 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
1242 GlobalConstFuncTyMap[FTy] = std::make_pair(NewFTy, GVCstArgIdx);
1243 FTy = NewFTy;
1244 }
1245
1246 FindType(FTy);
1247 } else {
1248 // As kernel functions do not have parameters, create new function type and
1249 // add it to type map.
1250 SmallVector<Type *, 4> NewFuncParamTys;
1251 FunctionType *NewFTy =
1252 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
1253 FindType(NewFTy);
1254 }
1255
1256 // Investigate instructions' type in function body.
1257 for (BasicBlock &BB : F) {
1258 for (Instruction &I : BB) {
1259 if (isa<ShuffleVectorInst>(I)) {
1260 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1261 // Ignore type for mask of shuffle vector instruction.
1262 if (i == 2) {
1263 continue;
1264 }
1265
1266 Value *Op = I.getOperand(i);
1267 if (!isa<MetadataAsValue>(Op)) {
1268 FindType(Op->getType());
1269 }
1270 }
1271
1272 FindType(I.getType());
1273 continue;
1274 }
1275
David Neto862b7d82018-06-14 18:48:37 -04001276 CallInst *Call = dyn_cast<CallInst>(&I);
1277
1278 if (Call && Call->getCalledFunction()->getName().startswith(
Alan Baker202c8c72018-08-13 13:47:44 -04001279 clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04001280 // This is a fake call representing access to a resource variable.
1281 // We handle that elsewhere.
1282 continue;
1283 }
1284
Alan Baker202c8c72018-08-13 13:47:44 -04001285 if (Call && Call->getCalledFunction()->getName().startswith(
1286 clspv::WorkgroupAccessorFunction())) {
1287 // This is a fake call representing access to a workgroup variable.
1288 // We handle that elsewhere.
1289 continue;
1290 }
1291
alan-bakerf083bed2020-01-29 08:15:42 -05001292 // #497: InsertValue and ExtractValue map to OpCompositeInsert and
1293 // OpCompositeExtract which takes literal values for indices. As a result
1294 // don't map the type of indices.
1295 if (I.getOpcode() == Instruction::ExtractValue) {
1296 FindType(I.getOperand(0)->getType());
1297 continue;
1298 }
1299 if (I.getOpcode() == Instruction::InsertValue) {
1300 FindType(I.getOperand(0)->getType());
1301 FindType(I.getOperand(1)->getType());
1302 continue;
1303 }
1304
1305 // #497: InsertElement and ExtractElement map to OpCompositeExtract if
1306 // the index is a constant. In such a case don't map the index type.
1307 if (I.getOpcode() == Instruction::ExtractElement) {
1308 FindType(I.getOperand(0)->getType());
1309 Value *op1 = I.getOperand(1);
1310 if (!isa<Constant>(op1) || isa<GlobalValue>(op1)) {
1311 FindType(op1->getType());
1312 }
1313 continue;
1314 }
1315 if (I.getOpcode() == Instruction::InsertElement) {
1316 FindType(I.getOperand(0)->getType());
1317 FindType(I.getOperand(1)->getType());
1318 Value *op2 = I.getOperand(2);
1319 if (!isa<Constant>(op2) || isa<GlobalValue>(op2)) {
1320 FindType(op2->getType());
1321 }
1322 continue;
1323 }
1324
David Neto22f144c2017-06-12 14:26:21 -04001325 // Work through the operands of the instruction.
1326 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1327 Value *const Op = I.getOperand(i);
1328 // If any of the operands is a constant, find the type!
1329 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1330 FindType(Op->getType());
1331 }
1332 }
1333
1334 for (Use &Op : I.operands()) {
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001335 if (isa<CallInst>(&I)) {
David Neto22f144c2017-06-12 14:26:21 -04001336 // Avoid to check call instruction's type.
1337 break;
1338 }
Alan Baker202c8c72018-08-13 13:47:44 -04001339 if (CallInst *OpCall = dyn_cast<CallInst>(Op)) {
1340 if (OpCall && OpCall->getCalledFunction()->getName().startswith(
1341 clspv::WorkgroupAccessorFunction())) {
1342 // This is a fake call representing access to a workgroup variable.
1343 // We handle that elsewhere.
1344 continue;
1345 }
1346 }
David Neto22f144c2017-06-12 14:26:21 -04001347 if (!isa<MetadataAsValue>(&Op)) {
1348 FindType(Op->getType());
1349 continue;
1350 }
1351 }
1352
David Neto22f144c2017-06-12 14:26:21 -04001353 // We don't want to track the type of this call as we are going to replace
1354 // it.
Kévin Petitdf71de32019-04-09 14:09:50 +01001355 if (Call && (clspv::LiteralSamplerFunction() ==
David Neto22f144c2017-06-12 14:26:21 -04001356 Call->getCalledFunction()->getName())) {
1357 continue;
1358 }
1359
1360 if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(&I)) {
1361 // If gep's base operand has ModuleScopePrivate address space, make gep
1362 // return ModuleScopePrivate address space.
1363 if (GEP->getPointerAddressSpace() == AddressSpace::ModuleScopePrivate) {
1364 // Add pointer type with private address space for global constant to
1365 // type list.
1366 Type *EleTy = I.getType()->getPointerElementType();
1367 Type *NewPTy =
1368 PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
1369
1370 FindType(NewPTy);
1371 continue;
1372 }
1373 }
1374
1375 FindType(I.getType());
1376 }
1377 }
1378}
1379
David Neto862b7d82018-06-14 18:48:37 -04001380void SPIRVProducerPass::FindTypesForSamplerMap(Module &M) {
1381 // If we are using a sampler map, find the type of the sampler.
Kévin Petitdf71de32019-04-09 14:09:50 +01001382 if (M.getFunction(clspv::LiteralSamplerFunction()) ||
David Neto862b7d82018-06-14 18:48:37 -04001383 0 < getSamplerMap().size()) {
1384 auto SamplerStructTy = M.getTypeByName("opencl.sampler_t");
1385 if (!SamplerStructTy) {
1386 SamplerStructTy = StructType::create(M.getContext(), "opencl.sampler_t");
1387 }
1388
1389 SamplerTy = SamplerStructTy->getPointerTo(AddressSpace::UniformConstant);
1390
1391 FindType(SamplerTy);
1392 }
1393}
1394
1395void SPIRVProducerPass::FindTypesForResourceVars(Module &M) {
1396 // Record types so they are generated.
1397 TypesNeedingLayout.reset();
1398 StructTypesNeedingBlock.reset();
1399
1400 // To match older clspv codegen, generate the float type first if required
1401 // for images.
1402 for (const auto *info : ModuleOrderedResourceVars) {
1403 if (info->arg_kind == clspv::ArgKind::ReadOnlyImage ||
1404 info->arg_kind == clspv::ArgKind::WriteOnlyImage) {
alan-bakerf67468c2019-11-25 15:51:49 -05001405 if (IsIntImageType(info->var_fn->getReturnType())) {
1406 // Nothing for now...
1407 } else if (IsUintImageType(info->var_fn->getReturnType())) {
1408 FindType(Type::getInt32Ty(M.getContext()));
1409 }
1410
1411 // We need "float" either for the sampled type or for the Lod operand.
David Neto862b7d82018-06-14 18:48:37 -04001412 FindType(Type::getFloatTy(M.getContext()));
David Neto862b7d82018-06-14 18:48:37 -04001413 }
1414 }
1415
1416 for (const auto *info : ModuleOrderedResourceVars) {
1417 Type *type = info->var_fn->getReturnType();
1418
1419 switch (info->arg_kind) {
1420 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04001421 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04001422 if (auto *sty = dyn_cast<StructType>(type->getPointerElementType())) {
1423 StructTypesNeedingBlock.insert(sty);
1424 } else {
1425 errs() << *type << "\n";
1426 llvm_unreachable("Buffer arguments must map to structures!");
1427 }
1428 break;
1429 case clspv::ArgKind::Pod:
1430 if (auto *sty = dyn_cast<StructType>(type->getPointerElementType())) {
1431 StructTypesNeedingBlock.insert(sty);
1432 } else {
1433 errs() << *type << "\n";
1434 llvm_unreachable("POD arguments must map to structures!");
1435 }
1436 break;
1437 case clspv::ArgKind::ReadOnlyImage:
1438 case clspv::ArgKind::WriteOnlyImage:
1439 case clspv::ArgKind::Sampler:
1440 // Sampler and image types map to the pointee type but
1441 // in the uniform constant address space.
1442 type = PointerType::get(type->getPointerElementType(),
1443 clspv::AddressSpace::UniformConstant);
1444 break;
1445 default:
1446 break;
1447 }
1448
1449 // The converted type is the type of the OpVariable we will generate.
1450 // If the pointee type is an array of size zero, FindType will convert it
1451 // to a runtime array.
1452 FindType(type);
1453 }
1454
alan-bakerdcd97412019-09-16 15:32:30 -04001455 // If module constants are clustered in a storage buffer then that struct
1456 // needs layout decorations.
1457 if (clspv::Option::ModuleConstantsInStorageBuffer()) {
1458 for (GlobalVariable &GV : M.globals()) {
1459 PointerType *PTy = cast<PointerType>(GV.getType());
1460 const auto AS = PTy->getAddressSpace();
1461 const bool module_scope_constant_external_init =
1462 (AS == AddressSpace::Constant) && GV.hasInitializer();
1463 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
1464 if (module_scope_constant_external_init &&
1465 spv::BuiltInMax == BuiltinType) {
1466 StructTypesNeedingBlock.insert(
1467 cast<StructType>(PTy->getPointerElementType()));
1468 }
1469 }
1470 }
1471
Kévin Petitbbbda972020-03-03 19:16:31 +00001472 for (const GlobalVariable &GV : M.globals()) {
1473 if (GV.getAddressSpace() == clspv::AddressSpace::PushConstant) {
1474 auto Ty = cast<PointerType>(GV.getType())->getPointerElementType();
1475 assert(Ty->isStructTy() && "Push constants have to be structures.");
1476 auto STy = cast<StructType>(Ty);
1477 StructTypesNeedingBlock.insert(STy);
1478 }
1479 }
1480
David Neto862b7d82018-06-14 18:48:37 -04001481 // Traverse the arrays and structures underneath each Block, and
1482 // mark them as needing layout.
1483 std::vector<Type *> work_list(StructTypesNeedingBlock.begin(),
1484 StructTypesNeedingBlock.end());
1485 while (!work_list.empty()) {
1486 Type *type = work_list.back();
1487 work_list.pop_back();
1488 TypesNeedingLayout.insert(type);
1489 switch (type->getTypeID()) {
1490 case Type::ArrayTyID:
1491 work_list.push_back(type->getArrayElementType());
1492 if (!Hack_generate_runtime_array_stride_early) {
1493 // Remember this array type for deferred decoration.
1494 TypesNeedingArrayStride.insert(type);
1495 }
1496 break;
1497 case Type::StructTyID:
1498 for (auto *elem_ty : cast<StructType>(type)->elements()) {
1499 work_list.push_back(elem_ty);
1500 }
1501 default:
1502 // This type and its contained types don't get layout.
1503 break;
1504 }
1505 }
1506}
1507
Alan Baker202c8c72018-08-13 13:47:44 -04001508void SPIRVProducerPass::FindWorkgroupVars(Module &M) {
1509 // The SpecId assignment for pointer-to-local arguments is recorded in
1510 // module-level metadata. Translate that information into local argument
1511 // information.
1512 NamedMDNode *nmd = M.getNamedMetadata(clspv::LocalSpecIdMetadataName());
alan-bakerb6b09dc2018-11-08 16:59:28 -05001513 if (!nmd)
1514 return;
Alan Baker202c8c72018-08-13 13:47:44 -04001515 for (auto operand : nmd->operands()) {
1516 MDTuple *tuple = cast<MDTuple>(operand);
1517 ValueAsMetadata *fn_md = cast<ValueAsMetadata>(tuple->getOperand(0));
1518 Function *func = cast<Function>(fn_md->getValue());
alan-bakerb6b09dc2018-11-08 16:59:28 -05001519 ConstantAsMetadata *arg_index_md =
1520 cast<ConstantAsMetadata>(tuple->getOperand(1));
1521 int arg_index = static_cast<int>(
1522 cast<ConstantInt>(arg_index_md->getValue())->getSExtValue());
1523 Argument *arg = &*(func->arg_begin() + arg_index);
Alan Baker202c8c72018-08-13 13:47:44 -04001524
1525 ConstantAsMetadata *spec_id_md =
1526 cast<ConstantAsMetadata>(tuple->getOperand(2));
alan-bakerb6b09dc2018-11-08 16:59:28 -05001527 int spec_id = static_cast<int>(
1528 cast<ConstantInt>(spec_id_md->getValue())->getSExtValue());
Alan Baker202c8c72018-08-13 13:47:44 -04001529
1530 max_local_spec_id_ = std::max(max_local_spec_id_, spec_id + 1);
1531 LocalArgSpecIds[arg] = spec_id;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001532 if (LocalSpecIdInfoMap.count(spec_id))
1533 continue;
Alan Baker202c8c72018-08-13 13:47:44 -04001534
1535 // We haven't seen this SpecId yet, so generate the LocalArgInfo for it.
1536 LocalArgInfo info{nextID, arg->getType()->getPointerElementType(),
1537 nextID + 1, nextID + 2,
1538 nextID + 3, spec_id};
1539 LocalSpecIdInfoMap[spec_id] = info;
1540 nextID += 4;
1541
1542 // Ensure the types necessary for this argument get generated.
1543 Type *IdxTy = Type::getInt32Ty(M.getContext());
1544 FindConstant(ConstantInt::get(IdxTy, 0));
1545 FindType(IdxTy);
1546 FindType(arg->getType());
1547 }
1548}
1549
David Neto22f144c2017-06-12 14:26:21 -04001550void SPIRVProducerPass::FindType(Type *Ty) {
1551 TypeList &TyList = getTypeList();
1552
1553 if (0 != TyList.idFor(Ty)) {
1554 return;
1555 }
1556
1557 if (Ty->isPointerTy()) {
1558 auto AddrSpace = Ty->getPointerAddressSpace();
1559 if ((AddressSpace::Constant == AddrSpace) ||
1560 (AddressSpace::Global == AddrSpace)) {
1561 auto PointeeTy = Ty->getPointerElementType();
1562
1563 if (PointeeTy->isStructTy() &&
1564 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
1565 FindType(PointeeTy);
1566 auto ActualPointerTy =
1567 PointeeTy->getPointerTo(AddressSpace::UniformConstant);
1568 FindType(ActualPointerTy);
1569 return;
1570 }
1571 }
1572 }
1573
David Neto862b7d82018-06-14 18:48:37 -04001574 // By convention, LLVM array type with 0 elements will map to
1575 // OpTypeRuntimeArray. Otherwise, it will map to OpTypeArray, which
1576 // has a constant number of elements. We need to support type of the
1577 // constant.
1578 if (auto *arrayTy = dyn_cast<ArrayType>(Ty)) {
1579 if (arrayTy->getNumElements() > 0) {
1580 LLVMContext &Context = Ty->getContext();
1581 FindType(Type::getInt32Ty(Context));
1582 }
David Neto22f144c2017-06-12 14:26:21 -04001583 }
1584
1585 for (Type *SubTy : Ty->subtypes()) {
1586 FindType(SubTy);
1587 }
1588
1589 TyList.insert(Ty);
1590}
1591
1592void SPIRVProducerPass::FindConstantPerGlobalVar(GlobalVariable &GV) {
1593 // If the global variable has a (non undef) initializer.
1594 if (GV.hasInitializer() && !isa<UndefValue>(GV.getInitializer())) {
David Neto862b7d82018-06-14 18:48:37 -04001595 // Generate the constant if it's not the initializer to a module scope
1596 // constant that we will expect in a storage buffer.
1597 const bool module_scope_constant_external_init =
1598 (GV.getType()->getPointerAddressSpace() == AddressSpace::Constant) &&
1599 clspv::Option::ModuleConstantsInStorageBuffer();
1600 if (!module_scope_constant_external_init) {
1601 FindConstant(GV.getInitializer());
1602 }
David Neto22f144c2017-06-12 14:26:21 -04001603 }
1604}
1605
1606void SPIRVProducerPass::FindConstantPerFunc(Function &F) {
1607 // Investigate constants in function body.
1608 for (BasicBlock &BB : F) {
1609 for (Instruction &I : BB) {
David Neto862b7d82018-06-14 18:48:37 -04001610 if (auto *call = dyn_cast<CallInst>(&I)) {
1611 auto name = call->getCalledFunction()->getName();
Kévin Petitdf71de32019-04-09 14:09:50 +01001612 if (name == clspv::LiteralSamplerFunction()) {
David Neto862b7d82018-06-14 18:48:37 -04001613 // We've handled these constants elsewhere, so skip it.
1614 continue;
1615 }
Alan Baker202c8c72018-08-13 13:47:44 -04001616 if (name.startswith(clspv::ResourceAccessorFunction())) {
1617 continue;
1618 }
1619 if (name.startswith(clspv::WorkgroupAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04001620 continue;
1621 }
Kévin Petit617a76d2019-04-04 13:54:16 +01001622 if (name.startswith(clspv::SPIRVOpIntrinsicFunction())) {
1623 // Skip the first operand that has the SPIR-V Opcode
1624 for (unsigned i = 1; i < I.getNumOperands(); i++) {
1625 if (isa<Constant>(I.getOperand(i)) &&
1626 !isa<GlobalValue>(I.getOperand(i))) {
1627 FindConstant(I.getOperand(i));
1628 }
1629 }
1630 continue;
1631 }
David Neto22f144c2017-06-12 14:26:21 -04001632 }
1633
1634 if (isa<AllocaInst>(I)) {
1635 // Alloca instruction has constant for the number of element. Ignore it.
1636 continue;
1637 } else if (isa<ShuffleVectorInst>(I)) {
1638 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1639 // Ignore constant for mask of shuffle vector instruction.
1640 if (i == 2) {
1641 continue;
1642 }
1643
1644 if (isa<Constant>(I.getOperand(i)) &&
1645 !isa<GlobalValue>(I.getOperand(i))) {
1646 FindConstant(I.getOperand(i));
1647 }
1648 }
1649
1650 continue;
1651 } else if (isa<InsertElementInst>(I)) {
1652 // Handle InsertElement with <4 x i8> specially.
1653 Type *CompositeTy = I.getOperand(0)->getType();
1654 if (is4xi8vec(CompositeTy)) {
1655 LLVMContext &Context = CompositeTy->getContext();
1656 if (isa<Constant>(I.getOperand(0))) {
1657 FindConstant(I.getOperand(0));
1658 }
1659
1660 if (isa<Constant>(I.getOperand(1))) {
1661 FindConstant(I.getOperand(1));
1662 }
1663
1664 // Add mask constant 0xFF.
1665 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
1666 FindConstant(CstFF);
1667
1668 // Add shift amount constant.
1669 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
1670 uint64_t Idx = CI->getZExtValue();
1671 Constant *CstShiftAmount =
1672 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
1673 FindConstant(CstShiftAmount);
1674 }
1675
1676 continue;
1677 }
1678
1679 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1680 // Ignore constant for index of InsertElement instruction.
1681 if (i == 2) {
1682 continue;
1683 }
1684
1685 if (isa<Constant>(I.getOperand(i)) &&
1686 !isa<GlobalValue>(I.getOperand(i))) {
1687 FindConstant(I.getOperand(i));
1688 }
1689 }
1690
1691 continue;
1692 } else if (isa<ExtractElementInst>(I)) {
1693 // Handle ExtractElement with <4 x i8> specially.
1694 Type *CompositeTy = I.getOperand(0)->getType();
1695 if (is4xi8vec(CompositeTy)) {
1696 LLVMContext &Context = CompositeTy->getContext();
1697 if (isa<Constant>(I.getOperand(0))) {
1698 FindConstant(I.getOperand(0));
1699 }
1700
1701 // Add mask constant 0xFF.
1702 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
1703 FindConstant(CstFF);
1704
1705 // Add shift amount constant.
1706 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
1707 uint64_t Idx = CI->getZExtValue();
1708 Constant *CstShiftAmount =
1709 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
1710 FindConstant(CstShiftAmount);
1711 } else {
1712 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
1713 FindConstant(Cst8);
1714 }
1715
1716 continue;
1717 }
1718
1719 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1720 // Ignore constant for index of ExtractElement instruction.
1721 if (i == 1) {
1722 continue;
1723 }
1724
1725 if (isa<Constant>(I.getOperand(i)) &&
1726 !isa<GlobalValue>(I.getOperand(i))) {
1727 FindConstant(I.getOperand(i));
1728 }
1729 }
1730
1731 continue;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001732 } else if ((Instruction::Xor == I.getOpcode()) &&
1733 I.getType()->isIntegerTy(1)) {
1734 // We special case for Xor where the type is i1 and one of the arguments
1735 // is a constant 1 (true), this is an OpLogicalNot in SPIR-V, and we
1736 // don't need the constant
David Neto22f144c2017-06-12 14:26:21 -04001737 bool foundConstantTrue = false;
1738 for (Use &Op : I.operands()) {
1739 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1740 auto CI = cast<ConstantInt>(Op);
1741
1742 if (CI->isZero() || foundConstantTrue) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05001743 // If we already found the true constant, we might (probably only
1744 // on -O0) have an OpLogicalNot which is taking a constant
1745 // argument, so discover it anyway.
David Neto22f144c2017-06-12 14:26:21 -04001746 FindConstant(Op);
1747 } else {
1748 foundConstantTrue = true;
1749 }
1750 }
1751 }
1752
1753 continue;
David Netod2de94a2017-08-28 17:27:47 -04001754 } else if (isa<TruncInst>(I)) {
alan-bakerb39c8262019-03-08 14:03:37 -05001755 // Special case if i8 is not generally handled.
1756 if (!clspv::Option::Int8Support()) {
1757 // For truncation to i8 we mask against 255.
1758 Type *ToTy = I.getType();
1759 if (8u == ToTy->getPrimitiveSizeInBits()) {
1760 LLVMContext &Context = ToTy->getContext();
1761 Constant *Cst255 =
1762 ConstantInt::get(Type::getInt32Ty(Context), 0xff);
1763 FindConstant(Cst255);
1764 }
David Netod2de94a2017-08-28 17:27:47 -04001765 }
Neil Henning39672102017-09-29 14:33:13 +01001766 } else if (isa<AtomicRMWInst>(I)) {
1767 LLVMContext &Context = I.getContext();
1768
1769 FindConstant(
1770 ConstantInt::get(Type::getInt32Ty(Context), spv::ScopeDevice));
1771 FindConstant(ConstantInt::get(
1772 Type::getInt32Ty(Context),
1773 spv::MemorySemanticsUniformMemoryMask |
1774 spv::MemorySemanticsSequentiallyConsistentMask));
David Neto22f144c2017-06-12 14:26:21 -04001775 }
1776
1777 for (Use &Op : I.operands()) {
1778 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1779 FindConstant(Op);
1780 }
1781 }
1782 }
1783 }
1784}
1785
1786void SPIRVProducerPass::FindConstant(Value *V) {
David Neto22f144c2017-06-12 14:26:21 -04001787 ValueList &CstList = getConstantList();
1788
David Netofb9a7972017-08-25 17:08:24 -04001789 // If V is already tracked, ignore it.
1790 if (0 != CstList.idFor(V)) {
David Neto22f144c2017-06-12 14:26:21 -04001791 return;
1792 }
1793
David Neto862b7d82018-06-14 18:48:37 -04001794 if (isa<GlobalValue>(V) && clspv::Option::ModuleConstantsInStorageBuffer()) {
1795 return;
1796 }
1797
David Neto22f144c2017-06-12 14:26:21 -04001798 Constant *Cst = cast<Constant>(V);
David Neto862b7d82018-06-14 18:48:37 -04001799 Type *CstTy = Cst->getType();
David Neto22f144c2017-06-12 14:26:21 -04001800
1801 // Handle constant with <4 x i8> type specially.
David Neto22f144c2017-06-12 14:26:21 -04001802 if (is4xi8vec(CstTy)) {
1803 if (!isa<GlobalValue>(V)) {
David Netofb9a7972017-08-25 17:08:24 -04001804 CstList.insert(V);
David Neto22f144c2017-06-12 14:26:21 -04001805 }
1806 }
1807
1808 if (Cst->getNumOperands()) {
1809 for (User::const_op_iterator I = Cst->op_begin(), E = Cst->op_end(); I != E;
1810 ++I) {
1811 FindConstant(*I);
1812 }
1813
David Netofb9a7972017-08-25 17:08:24 -04001814 CstList.insert(Cst);
David Neto22f144c2017-06-12 14:26:21 -04001815 return;
1816 } else if (const ConstantDataSequential *CDS =
1817 dyn_cast<ConstantDataSequential>(Cst)) {
1818 // Add constants for each element to constant list.
1819 for (unsigned i = 0; i < CDS->getNumElements(); i++) {
1820 Constant *EleCst = CDS->getElementAsConstant(i);
1821 FindConstant(EleCst);
1822 }
1823 }
1824
1825 if (!isa<GlobalValue>(V)) {
David Netofb9a7972017-08-25 17:08:24 -04001826 CstList.insert(V);
David Neto22f144c2017-06-12 14:26:21 -04001827 }
1828}
1829
1830spv::StorageClass SPIRVProducerPass::GetStorageClass(unsigned AddrSpace) const {
1831 switch (AddrSpace) {
1832 default:
1833 llvm_unreachable("Unsupported OpenCL address space");
1834 case AddressSpace::Private:
1835 return spv::StorageClassFunction;
1836 case AddressSpace::Global:
David Neto22f144c2017-06-12 14:26:21 -04001837 return spv::StorageClassStorageBuffer;
Alan Bakerfcda9482018-10-02 17:09:59 -04001838 case AddressSpace::Constant:
1839 return clspv::Option::ConstantArgsInUniformBuffer()
1840 ? spv::StorageClassUniform
1841 : spv::StorageClassStorageBuffer;
David Neto22f144c2017-06-12 14:26:21 -04001842 case AddressSpace::Input:
1843 return spv::StorageClassInput;
1844 case AddressSpace::Local:
1845 return spv::StorageClassWorkgroup;
1846 case AddressSpace::UniformConstant:
1847 return spv::StorageClassUniformConstant;
David Neto9ed8e2f2018-03-24 06:47:24 -07001848 case AddressSpace::Uniform:
David Netoe439d702018-03-23 13:14:08 -07001849 return spv::StorageClassUniform;
David Neto22f144c2017-06-12 14:26:21 -04001850 case AddressSpace::ModuleScopePrivate:
1851 return spv::StorageClassPrivate;
Kévin Petitbbbda972020-03-03 19:16:31 +00001852 case AddressSpace::PushConstant:
1853 return spv::StorageClassPushConstant;
David Neto22f144c2017-06-12 14:26:21 -04001854 }
1855}
1856
David Neto862b7d82018-06-14 18:48:37 -04001857spv::StorageClass
1858SPIRVProducerPass::GetStorageClassForArgKind(clspv::ArgKind arg_kind) const {
1859 switch (arg_kind) {
1860 case clspv::ArgKind::Buffer:
1861 return spv::StorageClassStorageBuffer;
Alan Bakerfcda9482018-10-02 17:09:59 -04001862 case clspv::ArgKind::BufferUBO:
1863 return spv::StorageClassUniform;
David Neto862b7d82018-06-14 18:48:37 -04001864 case clspv::ArgKind::Pod:
1865 return clspv::Option::PodArgsInUniformBuffer()
1866 ? spv::StorageClassUniform
1867 : spv::StorageClassStorageBuffer;
1868 case clspv::ArgKind::Local:
1869 return spv::StorageClassWorkgroup;
1870 case clspv::ArgKind::ReadOnlyImage:
1871 case clspv::ArgKind::WriteOnlyImage:
1872 case clspv::ArgKind::Sampler:
1873 return spv::StorageClassUniformConstant;
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001874 default:
1875 llvm_unreachable("Unsupported storage class for argument kind");
David Neto862b7d82018-06-14 18:48:37 -04001876 }
1877}
1878
David Neto22f144c2017-06-12 14:26:21 -04001879spv::BuiltIn SPIRVProducerPass::GetBuiltin(StringRef Name) const {
1880 return StringSwitch<spv::BuiltIn>(Name)
1881 .Case("__spirv_GlobalInvocationId", spv::BuiltInGlobalInvocationId)
1882 .Case("__spirv_LocalInvocationId", spv::BuiltInLocalInvocationId)
1883 .Case("__spirv_WorkgroupSize", spv::BuiltInWorkgroupSize)
1884 .Case("__spirv_NumWorkgroups", spv::BuiltInNumWorkgroups)
1885 .Case("__spirv_WorkgroupId", spv::BuiltInWorkgroupId)
1886 .Default(spv::BuiltInMax);
1887}
1888
1889void SPIRVProducerPass::GenerateExtInstImport() {
1890 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
1891 uint32_t &ExtInstImportID = getOpExtInstImportID();
1892
1893 //
1894 // Generate OpExtInstImport.
1895 //
1896 // Ops[0] ... Ops[n] = Name (Literal String)
David Neto22f144c2017-06-12 14:26:21 -04001897 ExtInstImportID = nextID;
David Neto87846742018-04-11 17:36:22 -04001898 SPIRVInstList.push_back(new SPIRVInstruction(spv::OpExtInstImport, nextID++,
1899 MkString("GLSL.std.450")));
David Neto22f144c2017-06-12 14:26:21 -04001900}
1901
alan-bakerb6b09dc2018-11-08 16:59:28 -05001902void SPIRVProducerPass::GenerateSPIRVTypes(LLVMContext &Context,
1903 Module &module) {
David Neto22f144c2017-06-12 14:26:21 -04001904 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
1905 ValueMapType &VMap = getValueMap();
1906 ValueMapType &AllocatedVMap = getAllocatedValueMap();
Alan Bakerfcda9482018-10-02 17:09:59 -04001907 const auto &DL = module.getDataLayout();
David Neto22f144c2017-06-12 14:26:21 -04001908
1909 // Map for OpTypeRuntimeArray. If argument has pointer type, 2 spirv type
1910 // instructions are generated. They are OpTypePointer and OpTypeRuntimeArray.
1911 DenseMap<Type *, uint32_t> OpRuntimeTyMap;
1912
1913 for (Type *Ty : getTypeList()) {
1914 // Update TypeMap with nextID for reference later.
1915 TypeMap[Ty] = nextID;
1916
1917 switch (Ty->getTypeID()) {
1918 default: {
1919 Ty->print(errs());
1920 llvm_unreachable("Unsupported type???");
1921 break;
1922 }
1923 case Type::MetadataTyID:
1924 case Type::LabelTyID: {
1925 // Ignore these types.
1926 break;
1927 }
1928 case Type::PointerTyID: {
1929 PointerType *PTy = cast<PointerType>(Ty);
1930 unsigned AddrSpace = PTy->getAddressSpace();
1931
1932 // For the purposes of our Vulkan SPIR-V type system, constant and global
1933 // are conflated.
1934 bool UseExistingOpTypePointer = false;
1935 if (AddressSpace::Constant == AddrSpace) {
Alan Bakerfcda9482018-10-02 17:09:59 -04001936 if (!clspv::Option::ConstantArgsInUniformBuffer()) {
1937 AddrSpace = AddressSpace::Global;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001938 // Check to see if we already created this type (for instance, if we
1939 // had a constant <type>* and a global <type>*, the type would be
1940 // created by one of these types, and shared by both).
Alan Bakerfcda9482018-10-02 17:09:59 -04001941 auto GlobalTy = PTy->getPointerElementType()->getPointerTo(AddrSpace);
1942 if (0 < TypeMap.count(GlobalTy)) {
1943 TypeMap[PTy] = TypeMap[GlobalTy];
1944 UseExistingOpTypePointer = true;
1945 break;
1946 }
David Neto22f144c2017-06-12 14:26:21 -04001947 }
1948 } else if (AddressSpace::Global == AddrSpace) {
Alan Bakerfcda9482018-10-02 17:09:59 -04001949 if (!clspv::Option::ConstantArgsInUniformBuffer()) {
1950 AddrSpace = AddressSpace::Constant;
David Neto22f144c2017-06-12 14:26:21 -04001951
alan-bakerb6b09dc2018-11-08 16:59:28 -05001952 // Check to see if we already created this type (for instance, if we
1953 // had a constant <type>* and a global <type>*, the type would be
1954 // created by one of these types, and shared by both).
1955 auto ConstantTy =
1956 PTy->getPointerElementType()->getPointerTo(AddrSpace);
Alan Bakerfcda9482018-10-02 17:09:59 -04001957 if (0 < TypeMap.count(ConstantTy)) {
1958 TypeMap[PTy] = TypeMap[ConstantTy];
1959 UseExistingOpTypePointer = true;
1960 }
David Neto22f144c2017-06-12 14:26:21 -04001961 }
1962 }
1963
David Neto862b7d82018-06-14 18:48:37 -04001964 const bool HasArgUser = true;
David Neto22f144c2017-06-12 14:26:21 -04001965
David Neto862b7d82018-06-14 18:48:37 -04001966 if (HasArgUser && !UseExistingOpTypePointer) {
David Neto22f144c2017-06-12 14:26:21 -04001967 //
1968 // Generate OpTypePointer.
1969 //
1970
1971 // OpTypePointer
1972 // Ops[0] = Storage Class
1973 // Ops[1] = Element Type ID
1974 SPIRVOperandList Ops;
1975
David Neto257c3892018-04-11 13:19:45 -04001976 Ops << MkNum(GetStorageClass(AddrSpace))
1977 << MkId(lookupType(PTy->getElementType()));
David Neto22f144c2017-06-12 14:26:21 -04001978
David Neto87846742018-04-11 17:36:22 -04001979 auto *Inst = new SPIRVInstruction(spv::OpTypePointer, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04001980 SPIRVInstList.push_back(Inst);
1981 }
David Neto22f144c2017-06-12 14:26:21 -04001982 break;
1983 }
1984 case Type::StructTyID: {
David Neto22f144c2017-06-12 14:26:21 -04001985 StructType *STy = cast<StructType>(Ty);
1986
1987 // Handle sampler type.
1988 if (STy->isOpaque()) {
1989 if (STy->getName().equals("opencl.sampler_t")) {
1990 //
1991 // Generate OpTypeSampler
1992 //
1993 // Empty Ops.
1994 SPIRVOperandList Ops;
1995
David Neto87846742018-04-11 17:36:22 -04001996 auto *Inst = new SPIRVInstruction(spv::OpTypeSampler, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04001997 SPIRVInstList.push_back(Inst);
1998 break;
alan-bakerf906d2b2019-12-10 11:26:23 -05001999 } else if (STy->getName().startswith("opencl.image1d_ro_t") ||
2000 STy->getName().startswith("opencl.image1d_wo_t") ||
alan-baker7150a1d2020-02-25 08:31:06 -05002001 STy->getName().startswith("opencl.image1d_array_ro_t") ||
2002 STy->getName().startswith("opencl.image1d_array_wo_t") ||
alan-bakerf906d2b2019-12-10 11:26:23 -05002003 STy->getName().startswith("opencl.image2d_ro_t") ||
alan-bakerf67468c2019-11-25 15:51:49 -05002004 STy->getName().startswith("opencl.image2d_wo_t") ||
alan-baker7150a1d2020-02-25 08:31:06 -05002005 STy->getName().startswith("opencl.image2d_array_ro_t") ||
2006 STy->getName().startswith("opencl.image2d_array_wo_t") ||
alan-bakerf67468c2019-11-25 15:51:49 -05002007 STy->getName().startswith("opencl.image3d_ro_t") ||
2008 STy->getName().startswith("opencl.image3d_wo_t")) {
David Neto22f144c2017-06-12 14:26:21 -04002009 //
2010 // Generate OpTypeImage
2011 //
2012 // Ops[0] = Sampled Type ID
2013 // Ops[1] = Dim ID
2014 // Ops[2] = Depth (Literal Number)
2015 // Ops[3] = Arrayed (Literal Number)
2016 // Ops[4] = MS (Literal Number)
2017 // Ops[5] = Sampled (Literal Number)
2018 // Ops[6] = Image Format ID
2019 //
2020 SPIRVOperandList Ops;
2021
alan-bakerf67468c2019-11-25 15:51:49 -05002022 uint32_t ImageTyID = nextID++;
2023 uint32_t SampledTyID = 0;
2024 if (STy->getName().contains(".float")) {
2025 SampledTyID = lookupType(Type::getFloatTy(Context));
2026 } else if (STy->getName().contains(".uint")) {
2027 SampledTyID = lookupType(Type::getInt32Ty(Context));
2028 } else if (STy->getName().contains(".int")) {
2029 // Generate a signed 32-bit integer if necessary.
2030 if (int32ID == 0) {
2031 int32ID = nextID++;
2032 SPIRVOperandList intOps;
2033 intOps << MkNum(32);
2034 intOps << MkNum(1);
2035 auto signed_int =
2036 new SPIRVInstruction(spv::OpTypeInt, int32ID, intOps);
2037 SPIRVInstList.push_back(signed_int);
2038 }
2039 SampledTyID = int32ID;
2040
2041 // Generate a vec4 of the signed int if necessary.
2042 if (v4int32ID == 0) {
2043 v4int32ID = nextID++;
2044 SPIRVOperandList vecOps;
2045 vecOps << MkId(int32ID);
2046 vecOps << MkNum(4);
2047 auto int_vec =
2048 new SPIRVInstruction(spv::OpTypeVector, v4int32ID, vecOps);
2049 SPIRVInstList.push_back(int_vec);
2050 }
2051 } else {
2052 // This was likely an UndefValue.
2053 SampledTyID = lookupType(Type::getFloatTy(Context));
2054 }
David Neto257c3892018-04-11 13:19:45 -04002055 Ops << MkId(SampledTyID);
David Neto22f144c2017-06-12 14:26:21 -04002056
2057 spv::Dim DimID = spv::Dim2D;
alan-bakerf906d2b2019-12-10 11:26:23 -05002058 if (STy->getName().startswith("opencl.image1d_ro_t") ||
alan-baker7150a1d2020-02-25 08:31:06 -05002059 STy->getName().startswith("opencl.image1d_wo_t") ||
2060 STy->getName().startswith("opencl.image1d_array_ro_t") ||
2061 STy->getName().startswith("opencl.image1d_array_wo_t")) {
alan-bakerf906d2b2019-12-10 11:26:23 -05002062 DimID = spv::Dim1D;
2063 } else if (STy->getName().startswith("opencl.image3d_ro_t") ||
2064 STy->getName().startswith("opencl.image3d_wo_t")) {
David Neto22f144c2017-06-12 14:26:21 -04002065 DimID = spv::Dim3D;
2066 }
David Neto257c3892018-04-11 13:19:45 -04002067 Ops << MkNum(DimID);
David Neto22f144c2017-06-12 14:26:21 -04002068
2069 // TODO: Set up Depth.
David Neto257c3892018-04-11 13:19:45 -04002070 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04002071
alan-baker7150a1d2020-02-25 08:31:06 -05002072 uint32_t arrayed = STy->getName().contains("_array_") ? 1 : 0;
2073 Ops << MkNum(arrayed);
David Neto22f144c2017-06-12 14:26:21 -04002074
2075 // TODO: Set up MS.
David Neto257c3892018-04-11 13:19:45 -04002076 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04002077
alan-baker7150a1d2020-02-25 08:31:06 -05002078 // Set up Sampled.
David Neto22f144c2017-06-12 14:26:21 -04002079 //
2080 // From Spec
2081 //
2082 // 0 indicates this is only known at run time, not at compile time
2083 // 1 indicates will be used with sampler
2084 // 2 indicates will be used without a sampler (a storage image)
2085 uint32_t Sampled = 1;
alan-bakerf67468c2019-11-25 15:51:49 -05002086 if (!STy->getName().contains(".sampled")) {
David Neto22f144c2017-06-12 14:26:21 -04002087 Sampled = 2;
2088 }
David Neto257c3892018-04-11 13:19:45 -04002089 Ops << MkNum(Sampled);
David Neto22f144c2017-06-12 14:26:21 -04002090
2091 // TODO: Set up Image Format.
David Neto257c3892018-04-11 13:19:45 -04002092 Ops << MkNum(spv::ImageFormatUnknown);
David Neto22f144c2017-06-12 14:26:21 -04002093
alan-bakerf67468c2019-11-25 15:51:49 -05002094 auto *Inst = new SPIRVInstruction(spv::OpTypeImage, ImageTyID, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002095 SPIRVInstList.push_back(Inst);
2096 break;
2097 }
2098 }
2099
2100 //
2101 // Generate OpTypeStruct
2102 //
2103 // Ops[0] ... Ops[n] = Member IDs
2104 SPIRVOperandList Ops;
2105
2106 for (auto *EleTy : STy->elements()) {
David Neto862b7d82018-06-14 18:48:37 -04002107 Ops << MkId(lookupType(EleTy));
David Neto22f144c2017-06-12 14:26:21 -04002108 }
2109
David Neto22f144c2017-06-12 14:26:21 -04002110 uint32_t STyID = nextID;
2111
alan-bakerb6b09dc2018-11-08 16:59:28 -05002112 auto *Inst = new SPIRVInstruction(spv::OpTypeStruct, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002113 SPIRVInstList.push_back(Inst);
2114
2115 // Generate OpMemberDecorate.
2116 auto DecoInsertPoint =
2117 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
2118 [](SPIRVInstruction *Inst) -> bool {
2119 return Inst->getOpcode() != spv::OpDecorate &&
2120 Inst->getOpcode() != spv::OpMemberDecorate &&
2121 Inst->getOpcode() != spv::OpExtInstImport;
2122 });
2123
Kévin Petitbbbda972020-03-03 19:16:31 +00002124 if (TypesNeedingLayout.idFor(STy)) {
2125 for (unsigned MemberIdx = 0; MemberIdx < STy->getNumElements();
2126 MemberIdx++) {
2127 // Ops[0] = Structure Type ID
2128 // Ops[1] = Member Index(Literal Number)
2129 // Ops[2] = Decoration (Offset)
2130 // Ops[3] = Byte Offset (Literal Number)
2131 Ops.clear();
David Netoc463b372017-08-10 15:32:21 -04002132
Kévin Petitbbbda972020-03-03 19:16:31 +00002133 Ops << MkId(STyID) << MkNum(MemberIdx)
2134 << MkNum(spv::DecorationOffset);
David Neto22f144c2017-06-12 14:26:21 -04002135
Kévin Petitbbbda972020-03-03 19:16:31 +00002136 const auto ByteOffset =
2137 GetExplicitLayoutStructMemberOffset(STy, MemberIdx, DL);
David Neto22f144c2017-06-12 14:26:21 -04002138
Kévin Petitbbbda972020-03-03 19:16:31 +00002139 Ops << MkNum(ByteOffset);
2140
2141 auto *DecoInst = new SPIRVInstruction(spv::OpMemberDecorate, Ops);
2142 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
Alan Bakerfcda9482018-10-02 17:09:59 -04002143 }
David Neto22f144c2017-06-12 14:26:21 -04002144 }
2145
2146 // Generate OpDecorate.
David Neto862b7d82018-06-14 18:48:37 -04002147 if (StructTypesNeedingBlock.idFor(STy)) {
2148 Ops.clear();
2149 // Use Block decorations with StorageBuffer storage class.
2150 Ops << MkId(STyID) << MkNum(spv::DecorationBlock);
David Neto22f144c2017-06-12 14:26:21 -04002151
David Neto862b7d82018-06-14 18:48:37 -04002152 auto *DecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
2153 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
David Neto22f144c2017-06-12 14:26:21 -04002154 }
2155 break;
2156 }
2157 case Type::IntegerTyID: {
alan-baker0e64a592019-11-18 13:36:25 -05002158 uint32_t BitWidth = static_cast<uint32_t>(Ty->getPrimitiveSizeInBits());
David Neto22f144c2017-06-12 14:26:21 -04002159
2160 if (BitWidth == 1) {
David Netoef5ba2b2019-12-20 08:35:54 -05002161 auto *Inst = new SPIRVInstruction(spv::OpTypeBool, nextID++);
David Neto22f144c2017-06-12 14:26:21 -04002162 SPIRVInstList.push_back(Inst);
2163 } else {
alan-bakerb39c8262019-03-08 14:03:37 -05002164 if (!clspv::Option::Int8Support()) {
2165 // i8 is added to TypeMap as i32.
2166 // No matter what LLVM type is requested first, always alias the
2167 // second one's SPIR-V type to be the same as the one we generated
2168 // first.
2169 unsigned aliasToWidth = 0;
2170 if (BitWidth == 8) {
2171 aliasToWidth = 32;
2172 BitWidth = 32;
2173 } else if (BitWidth == 32) {
2174 aliasToWidth = 8;
2175 }
2176 if (aliasToWidth) {
2177 Type *otherType = Type::getIntNTy(Ty->getContext(), aliasToWidth);
2178 auto where = TypeMap.find(otherType);
2179 if (where == TypeMap.end()) {
2180 // Go ahead and make it, but also map the other type to it.
2181 TypeMap[otherType] = nextID;
2182 } else {
2183 // Alias this SPIR-V type the existing type.
2184 TypeMap[Ty] = where->second;
2185 break;
2186 }
David Neto391aeb12017-08-26 15:51:58 -04002187 }
David Neto22f144c2017-06-12 14:26:21 -04002188 }
2189
David Neto257c3892018-04-11 13:19:45 -04002190 SPIRVOperandList Ops;
2191 Ops << MkNum(BitWidth) << MkNum(0 /* not signed */);
David Neto22f144c2017-06-12 14:26:21 -04002192
2193 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002194 new SPIRVInstruction(spv::OpTypeInt, nextID++, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002195 }
2196 break;
2197 }
2198 case Type::HalfTyID:
2199 case Type::FloatTyID:
2200 case Type::DoubleTyID: {
alan-baker0e64a592019-11-18 13:36:25 -05002201 uint32_t BitWidth = static_cast<uint32_t>(Ty->getPrimitiveSizeInBits());
James Price11010dc2019-12-19 13:53:09 -05002202 auto WidthOp = MkNum(BitWidth);
David Neto22f144c2017-06-12 14:26:21 -04002203
2204 SPIRVInstList.push_back(
David Netoef5ba2b2019-12-20 08:35:54 -05002205 new SPIRVInstruction(spv::OpTypeFloat, nextID++, std::move(WidthOp)));
David Neto22f144c2017-06-12 14:26:21 -04002206 break;
2207 }
2208 case Type::ArrayTyID: {
David Neto22f144c2017-06-12 14:26:21 -04002209 ArrayType *ArrTy = cast<ArrayType>(Ty);
David Neto862b7d82018-06-14 18:48:37 -04002210 const uint64_t Length = ArrTy->getArrayNumElements();
2211 if (Length == 0) {
2212 // By convention, map it to a RuntimeArray.
David Neto22f144c2017-06-12 14:26:21 -04002213
David Neto862b7d82018-06-14 18:48:37 -04002214 // Only generate the type once.
2215 // TODO(dneto): Can it ever be generated more than once?
2216 // Doesn't LLVM type uniqueness guarantee we'll only see this
2217 // once?
2218 Type *EleTy = ArrTy->getArrayElementType();
2219 if (OpRuntimeTyMap.count(EleTy) == 0) {
2220 uint32_t OpTypeRuntimeArrayID = nextID;
2221 OpRuntimeTyMap[Ty] = nextID;
David Neto22f144c2017-06-12 14:26:21 -04002222
David Neto862b7d82018-06-14 18:48:37 -04002223 //
2224 // Generate OpTypeRuntimeArray.
2225 //
David Neto22f144c2017-06-12 14:26:21 -04002226
David Neto862b7d82018-06-14 18:48:37 -04002227 // OpTypeRuntimeArray
2228 // Ops[0] = Element Type ID
2229 SPIRVOperandList Ops;
2230 Ops << MkId(lookupType(EleTy));
David Neto22f144c2017-06-12 14:26:21 -04002231
David Neto862b7d82018-06-14 18:48:37 -04002232 SPIRVInstList.push_back(
2233 new SPIRVInstruction(spv::OpTypeRuntimeArray, nextID++, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002234
David Neto862b7d82018-06-14 18:48:37 -04002235 if (Hack_generate_runtime_array_stride_early) {
2236 // Generate OpDecorate.
2237 auto DecoInsertPoint = std::find_if(
2238 SPIRVInstList.begin(), SPIRVInstList.end(),
2239 [](SPIRVInstruction *Inst) -> bool {
2240 return Inst->getOpcode() != spv::OpDecorate &&
2241 Inst->getOpcode() != spv::OpMemberDecorate &&
2242 Inst->getOpcode() != spv::OpExtInstImport;
2243 });
David Neto22f144c2017-06-12 14:26:21 -04002244
David Neto862b7d82018-06-14 18:48:37 -04002245 // Ops[0] = Target ID
2246 // Ops[1] = Decoration (ArrayStride)
2247 // Ops[2] = Stride Number(Literal Number)
2248 Ops.clear();
David Neto85082642018-03-24 06:55:20 -07002249
David Neto862b7d82018-06-14 18:48:37 -04002250 Ops << MkId(OpTypeRuntimeArrayID)
2251 << MkNum(spv::DecorationArrayStride)
Alan Bakerfcda9482018-10-02 17:09:59 -04002252 << MkNum(static_cast<uint32_t>(GetTypeAllocSize(EleTy, DL)));
David Neto22f144c2017-06-12 14:26:21 -04002253
David Neto862b7d82018-06-14 18:48:37 -04002254 auto *DecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
2255 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
2256 }
2257 }
David Neto22f144c2017-06-12 14:26:21 -04002258
David Neto862b7d82018-06-14 18:48:37 -04002259 } else {
David Neto22f144c2017-06-12 14:26:21 -04002260
David Neto862b7d82018-06-14 18:48:37 -04002261 //
2262 // Generate OpConstant and OpTypeArray.
2263 //
2264
2265 //
2266 // Generate OpConstant for array length.
2267 //
2268 // Ops[0] = Result Type ID
2269 // Ops[1] .. Ops[n] = Values LiteralNumber
2270 SPIRVOperandList Ops;
2271
2272 Type *LengthTy = Type::getInt32Ty(Context);
2273 uint32_t ResTyID = lookupType(LengthTy);
2274 Ops << MkId(ResTyID);
2275
2276 assert(Length < UINT32_MAX);
2277 Ops << MkNum(static_cast<uint32_t>(Length));
2278
2279 // Add constant for length to constant list.
2280 Constant *CstLength = ConstantInt::get(LengthTy, Length);
2281 AllocatedVMap[CstLength] = nextID;
2282 VMap[CstLength] = nextID;
2283 uint32_t LengthID = nextID;
2284
2285 auto *CstInst = new SPIRVInstruction(spv::OpConstant, nextID++, Ops);
2286 SPIRVInstList.push_back(CstInst);
2287
2288 // Remember to generate ArrayStride later
2289 getTypesNeedingArrayStride().insert(Ty);
2290
2291 //
2292 // Generate OpTypeArray.
2293 //
2294 // Ops[0] = Element Type ID
2295 // Ops[1] = Array Length Constant ID
2296 Ops.clear();
2297
2298 uint32_t EleTyID = lookupType(ArrTy->getElementType());
2299 Ops << MkId(EleTyID) << MkId(LengthID);
2300
2301 // Update TypeMap with nextID.
2302 TypeMap[Ty] = nextID;
2303
2304 auto *ArrayInst = new SPIRVInstruction(spv::OpTypeArray, nextID++, Ops);
2305 SPIRVInstList.push_back(ArrayInst);
2306 }
David Neto22f144c2017-06-12 14:26:21 -04002307 break;
2308 }
2309 case Type::VectorTyID: {
alan-bakerb39c8262019-03-08 14:03:37 -05002310 // <4 x i8> is changed to i32 if i8 is not generally supported.
2311 if (!clspv::Option::Int8Support() &&
2312 Ty->getVectorElementType() == Type::getInt8Ty(Context)) {
David Neto22f144c2017-06-12 14:26:21 -04002313 if (Ty->getVectorNumElements() == 4) {
2314 TypeMap[Ty] = lookupType(Ty->getVectorElementType());
2315 break;
2316 } else {
2317 Ty->print(errs());
2318 llvm_unreachable("Support above i8 vector type");
2319 }
2320 }
2321
2322 // Ops[0] = Component Type ID
2323 // Ops[1] = Component Count (Literal Number)
David Neto257c3892018-04-11 13:19:45 -04002324 SPIRVOperandList Ops;
2325 Ops << MkId(lookupType(Ty->getVectorElementType()))
2326 << MkNum(Ty->getVectorNumElements());
David Neto22f144c2017-06-12 14:26:21 -04002327
alan-bakerb6b09dc2018-11-08 16:59:28 -05002328 SPIRVInstruction *inst =
2329 new SPIRVInstruction(spv::OpTypeVector, nextID++, Ops);
David Netoc6f3ab22018-04-06 18:02:31 -04002330 SPIRVInstList.push_back(inst);
David Neto22f144c2017-06-12 14:26:21 -04002331 break;
2332 }
2333 case Type::VoidTyID: {
David Netoef5ba2b2019-12-20 08:35:54 -05002334 auto *Inst = new SPIRVInstruction(spv::OpTypeVoid, nextID++);
David Neto22f144c2017-06-12 14:26:21 -04002335 SPIRVInstList.push_back(Inst);
2336 break;
2337 }
2338 case Type::FunctionTyID: {
2339 // Generate SPIRV instruction for function type.
2340 FunctionType *FTy = cast<FunctionType>(Ty);
2341
2342 // Ops[0] = Return Type ID
2343 // Ops[1] ... Ops[n] = Parameter Type IDs
2344 SPIRVOperandList Ops;
2345
2346 // Find SPIRV instruction for return type
David Netoc6f3ab22018-04-06 18:02:31 -04002347 Ops << MkId(lookupType(FTy->getReturnType()));
David Neto22f144c2017-06-12 14:26:21 -04002348
2349 // Find SPIRV instructions for parameter types
2350 for (unsigned k = 0; k < FTy->getNumParams(); k++) {
2351 // Find SPIRV instruction for parameter type.
2352 auto ParamTy = FTy->getParamType(k);
2353 if (ParamTy->isPointerTy()) {
2354 auto PointeeTy = ParamTy->getPointerElementType();
2355 if (PointeeTy->isStructTy() &&
2356 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
2357 ParamTy = PointeeTy;
2358 }
2359 }
2360
David Netoc6f3ab22018-04-06 18:02:31 -04002361 Ops << MkId(lookupType(ParamTy));
David Neto22f144c2017-06-12 14:26:21 -04002362 }
2363
David Neto87846742018-04-11 17:36:22 -04002364 auto *Inst = new SPIRVInstruction(spv::OpTypeFunction, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002365 SPIRVInstList.push_back(Inst);
2366 break;
2367 }
2368 }
2369 }
2370
2371 // Generate OpTypeSampledImage.
alan-bakerabd82722019-12-03 17:14:51 -05002372 for (auto &ImgTy : getImageTypeList()) {
David Neto22f144c2017-06-12 14:26:21 -04002373 //
2374 // Generate OpTypeSampledImage.
2375 //
2376 // Ops[0] = Image Type ID
2377 //
2378 SPIRVOperandList Ops;
2379
David Netoc6f3ab22018-04-06 18:02:31 -04002380 Ops << MkId(TypeMap[ImgTy]);
David Neto22f144c2017-06-12 14:26:21 -04002381
alan-bakerabd82722019-12-03 17:14:51 -05002382 // Update the image type map.
2383 getImageTypeMap()[ImgTy] = nextID;
David Neto22f144c2017-06-12 14:26:21 -04002384
David Neto87846742018-04-11 17:36:22 -04002385 auto *Inst = new SPIRVInstruction(spv::OpTypeSampledImage, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002386 SPIRVInstList.push_back(Inst);
2387 }
David Netoc6f3ab22018-04-06 18:02:31 -04002388
2389 // Generate types for pointer-to-local arguments.
Alan Baker202c8c72018-08-13 13:47:44 -04002390 for (auto spec_id = clspv::FirstLocalSpecId(); spec_id < max_local_spec_id_;
2391 ++spec_id) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05002392 LocalArgInfo &arg_info = LocalSpecIdInfoMap[spec_id];
David Netoc6f3ab22018-04-06 18:02:31 -04002393
2394 // Generate the spec constant.
2395 SPIRVOperandList Ops;
2396 Ops << MkId(lookupType(Type::getInt32Ty(Context))) << MkNum(1);
David Neto87846742018-04-11 17:36:22 -04002397 SPIRVInstList.push_back(
2398 new SPIRVInstruction(spv::OpSpecConstant, arg_info.array_size_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002399
2400 // Generate the array type.
2401 Ops.clear();
2402 // The element type must have been created.
2403 uint32_t elem_ty_id = lookupType(arg_info.elem_type);
2404 assert(elem_ty_id);
2405 Ops << MkId(elem_ty_id) << MkId(arg_info.array_size_id);
2406
2407 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002408 new SPIRVInstruction(spv::OpTypeArray, arg_info.array_type_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002409
2410 Ops.clear();
2411 Ops << MkNum(spv::StorageClassWorkgroup) << MkId(arg_info.array_type_id);
David Neto87846742018-04-11 17:36:22 -04002412 SPIRVInstList.push_back(new SPIRVInstruction(
2413 spv::OpTypePointer, arg_info.ptr_array_type_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002414 }
David Neto22f144c2017-06-12 14:26:21 -04002415}
2416
2417void SPIRVProducerPass::GenerateSPIRVConstants() {
2418 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
2419 ValueMapType &VMap = getValueMap();
2420 ValueMapType &AllocatedVMap = getAllocatedValueMap();
2421 ValueList &CstList = getConstantList();
David Neto482550a2018-03-24 05:21:07 -07002422 const bool hack_undef = clspv::Option::HackUndef();
David Neto22f144c2017-06-12 14:26:21 -04002423
2424 for (uint32_t i = 0; i < CstList.size(); i++) {
David Netofb9a7972017-08-25 17:08:24 -04002425 // UniqueVector ids are 1-based.
alan-bakerb6b09dc2018-11-08 16:59:28 -05002426 Constant *Cst = cast<Constant>(CstList[i + 1]);
David Neto22f144c2017-06-12 14:26:21 -04002427
2428 // OpTypeArray's constant was already generated.
David Netofb9a7972017-08-25 17:08:24 -04002429 if (AllocatedVMap.find_as(Cst) != AllocatedVMap.end()) {
David Neto22f144c2017-06-12 14:26:21 -04002430 continue;
2431 }
2432
David Netofb9a7972017-08-25 17:08:24 -04002433 // Set ValueMap with nextID for reference later.
David Neto22f144c2017-06-12 14:26:21 -04002434 VMap[Cst] = nextID;
2435
2436 //
2437 // Generate OpConstant.
2438 //
2439
2440 // Ops[0] = Result Type ID
2441 // Ops[1] .. Ops[n] = Values LiteralNumber
2442 SPIRVOperandList Ops;
2443
David Neto257c3892018-04-11 13:19:45 -04002444 Ops << MkId(lookupType(Cst->getType()));
David Neto22f144c2017-06-12 14:26:21 -04002445
2446 std::vector<uint32_t> LiteralNum;
David Neto22f144c2017-06-12 14:26:21 -04002447 spv::Op Opcode = spv::OpNop;
2448
2449 if (isa<UndefValue>(Cst)) {
2450 // Ops[0] = Result Type ID
David Netoc66b3352017-10-20 14:28:46 -04002451 Opcode = spv::OpUndef;
Alan Baker9bf93fb2018-08-28 16:59:26 -04002452 if (hack_undef && IsTypeNullable(Cst->getType())) {
2453 Opcode = spv::OpConstantNull;
David Netoc66b3352017-10-20 14:28:46 -04002454 }
David Neto22f144c2017-06-12 14:26:21 -04002455 } else if (const ConstantInt *CI = dyn_cast<ConstantInt>(Cst)) {
2456 unsigned BitWidth = CI->getBitWidth();
2457 if (BitWidth == 1) {
2458 // If the bitwidth of constant is 1, generate OpConstantTrue or
2459 // OpConstantFalse.
2460 if (CI->getZExtValue()) {
2461 // Ops[0] = Result Type ID
2462 Opcode = spv::OpConstantTrue;
2463 } else {
2464 // Ops[0] = Result Type ID
2465 Opcode = spv::OpConstantFalse;
2466 }
David Neto22f144c2017-06-12 14:26:21 -04002467 } else {
2468 auto V = CI->getZExtValue();
2469 LiteralNum.push_back(V & 0xFFFFFFFF);
2470
2471 if (BitWidth > 32) {
2472 LiteralNum.push_back(V >> 32);
2473 }
2474
2475 Opcode = spv::OpConstant;
David Neto22f144c2017-06-12 14:26:21 -04002476
David Neto257c3892018-04-11 13:19:45 -04002477 Ops << MkInteger(LiteralNum);
David Neto22f144c2017-06-12 14:26:21 -04002478 }
2479 } else if (const ConstantFP *CFP = dyn_cast<ConstantFP>(Cst)) {
2480 uint64_t FPVal = CFP->getValueAPF().bitcastToAPInt().getZExtValue();
2481 Type *CFPTy = CFP->getType();
2482 if (CFPTy->isFloatTy()) {
2483 LiteralNum.push_back(FPVal & 0xFFFFFFFF);
Kévin Petit02ee34e2019-04-04 19:03:22 +01002484 } else if (CFPTy->isDoubleTy()) {
2485 LiteralNum.push_back(FPVal & 0xFFFFFFFF);
2486 LiteralNum.push_back(FPVal >> 32);
alan-baker089bf932020-01-07 16:35:45 -05002487 } else if (CFPTy->isHalfTy()) {
2488 LiteralNum.push_back(FPVal & 0xFFFF);
David Neto22f144c2017-06-12 14:26:21 -04002489 } else {
2490 CFPTy->print(errs());
2491 llvm_unreachable("Implement this ConstantFP Type");
2492 }
2493
2494 Opcode = spv::OpConstant;
David Neto22f144c2017-06-12 14:26:21 -04002495
David Neto257c3892018-04-11 13:19:45 -04002496 Ops << MkFloat(LiteralNum);
David Neto22f144c2017-06-12 14:26:21 -04002497 } else if (isa<ConstantDataSequential>(Cst) &&
2498 cast<ConstantDataSequential>(Cst)->isString()) {
2499 Cst->print(errs());
2500 llvm_unreachable("Implement this Constant");
2501
2502 } else if (const ConstantDataSequential *CDS =
2503 dyn_cast<ConstantDataSequential>(Cst)) {
David Neto49351ac2017-08-26 17:32:20 -04002504 // Let's convert <4 x i8> constant to int constant specially.
2505 // This case occurs when all the values are specified as constant
2506 // ints.
2507 Type *CstTy = Cst->getType();
2508 if (is4xi8vec(CstTy)) {
2509 LLVMContext &Context = CstTy->getContext();
2510
2511 //
2512 // Generate OpConstant with OpTypeInt 32 0.
2513 //
Neil Henning39672102017-09-29 14:33:13 +01002514 uint32_t IntValue = 0;
2515 for (unsigned k = 0; k < 4; k++) {
2516 const uint64_t Val = CDS->getElementAsInteger(k);
David Neto49351ac2017-08-26 17:32:20 -04002517 IntValue = (IntValue << 8) | (Val & 0xffu);
2518 }
2519
2520 Type *i32 = Type::getInt32Ty(Context);
2521 Constant *CstInt = ConstantInt::get(i32, IntValue);
2522 // If this constant is already registered on VMap, use it.
2523 if (VMap.count(CstInt)) {
2524 uint32_t CstID = VMap[CstInt];
2525 VMap[Cst] = CstID;
2526 continue;
2527 }
2528
David Neto257c3892018-04-11 13:19:45 -04002529 Ops << MkNum(IntValue);
David Neto49351ac2017-08-26 17:32:20 -04002530
David Neto87846742018-04-11 17:36:22 -04002531 auto *CstInst = new SPIRVInstruction(spv::OpConstant, nextID++, Ops);
David Neto49351ac2017-08-26 17:32:20 -04002532 SPIRVInstList.push_back(CstInst);
2533
2534 continue;
2535 }
2536
2537 // A normal constant-data-sequential case.
David Neto22f144c2017-06-12 14:26:21 -04002538 for (unsigned k = 0; k < CDS->getNumElements(); k++) {
2539 Constant *EleCst = CDS->getElementAsConstant(k);
2540 uint32_t EleCstID = VMap[EleCst];
David Neto257c3892018-04-11 13:19:45 -04002541 Ops << MkId(EleCstID);
David Neto22f144c2017-06-12 14:26:21 -04002542 }
2543
2544 Opcode = spv::OpConstantComposite;
David Neto22f144c2017-06-12 14:26:21 -04002545 } else if (const ConstantAggregate *CA = dyn_cast<ConstantAggregate>(Cst)) {
2546 // Let's convert <4 x i8> constant to int constant specially.
David Neto49351ac2017-08-26 17:32:20 -04002547 // This case occurs when at least one of the values is an undef.
David Neto22f144c2017-06-12 14:26:21 -04002548 Type *CstTy = Cst->getType();
2549 if (is4xi8vec(CstTy)) {
2550 LLVMContext &Context = CstTy->getContext();
2551
2552 //
2553 // Generate OpConstant with OpTypeInt 32 0.
2554 //
Neil Henning39672102017-09-29 14:33:13 +01002555 uint32_t IntValue = 0;
David Neto22f144c2017-06-12 14:26:21 -04002556 for (User::const_op_iterator I = Cst->op_begin(), E = Cst->op_end();
2557 I != E; ++I) {
2558 uint64_t Val = 0;
alan-bakerb6b09dc2018-11-08 16:59:28 -05002559 const Value *CV = *I;
Neil Henning39672102017-09-29 14:33:13 +01002560 if (auto *CI2 = dyn_cast<ConstantInt>(CV)) {
2561 Val = CI2->getZExtValue();
David Neto22f144c2017-06-12 14:26:21 -04002562 }
David Neto49351ac2017-08-26 17:32:20 -04002563 IntValue = (IntValue << 8) | (Val & 0xffu);
David Neto22f144c2017-06-12 14:26:21 -04002564 }
2565
David Neto49351ac2017-08-26 17:32:20 -04002566 Type *i32 = Type::getInt32Ty(Context);
2567 Constant *CstInt = ConstantInt::get(i32, IntValue);
David Neto22f144c2017-06-12 14:26:21 -04002568 // If this constant is already registered on VMap, use it.
2569 if (VMap.count(CstInt)) {
2570 uint32_t CstID = VMap[CstInt];
2571 VMap[Cst] = CstID;
David Neto19a1bad2017-08-25 15:01:41 -04002572 continue;
David Neto22f144c2017-06-12 14:26:21 -04002573 }
2574
David Neto257c3892018-04-11 13:19:45 -04002575 Ops << MkNum(IntValue);
David Neto22f144c2017-06-12 14:26:21 -04002576
David Neto87846742018-04-11 17:36:22 -04002577 auto *CstInst = new SPIRVInstruction(spv::OpConstant, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002578 SPIRVInstList.push_back(CstInst);
2579
David Neto19a1bad2017-08-25 15:01:41 -04002580 continue;
David Neto22f144c2017-06-12 14:26:21 -04002581 }
2582
2583 // We use a constant composite in SPIR-V for our constant aggregate in
2584 // LLVM.
2585 Opcode = spv::OpConstantComposite;
David Neto22f144c2017-06-12 14:26:21 -04002586
2587 for (unsigned k = 0; k < CA->getNumOperands(); k++) {
2588 // Look up the ID of the element of this aggregate (which we will
2589 // previously have created a constant for).
2590 uint32_t ElementConstantID = VMap[CA->getAggregateElement(k)];
2591
2592 // And add an operand to the composite we are constructing
David Neto257c3892018-04-11 13:19:45 -04002593 Ops << MkId(ElementConstantID);
David Neto22f144c2017-06-12 14:26:21 -04002594 }
2595 } else if (Cst->isNullValue()) {
2596 Opcode = spv::OpConstantNull;
David Neto22f144c2017-06-12 14:26:21 -04002597 } else {
2598 Cst->print(errs());
2599 llvm_unreachable("Unsupported Constant???");
2600 }
2601
alan-baker5b86ed72019-02-15 08:26:50 -05002602 if (Opcode == spv::OpConstantNull && Cst->getType()->isPointerTy()) {
2603 // Null pointer requires variable pointers.
2604 setVariablePointersCapabilities(Cst->getType()->getPointerAddressSpace());
2605 }
2606
David Neto87846742018-04-11 17:36:22 -04002607 auto *CstInst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002608 SPIRVInstList.push_back(CstInst);
2609 }
2610}
2611
2612void SPIRVProducerPass::GenerateSamplers(Module &M) {
2613 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
David Neto22f144c2017-06-12 14:26:21 -04002614
alan-bakerb6b09dc2018-11-08 16:59:28 -05002615 auto &sampler_map = getSamplerMap();
alan-baker09cb9802019-12-10 13:16:27 -05002616 SamplerLiteralToIDMap.clear();
David Neto862b7d82018-06-14 18:48:37 -04002617 DenseMap<unsigned, unsigned> SamplerLiteralToDescriptorSetMap;
2618 DenseMap<unsigned, unsigned> SamplerLiteralToBindingMap;
David Neto22f144c2017-06-12 14:26:21 -04002619
David Neto862b7d82018-06-14 18:48:37 -04002620 // We might have samplers in the sampler map that are not used
2621 // in the translation unit. We need to allocate variables
2622 // for them and bindings too.
2623 DenseSet<unsigned> used_bindings;
David Neto22f144c2017-06-12 14:26:21 -04002624
Kévin Petitdf71de32019-04-09 14:09:50 +01002625 auto *var_fn = M.getFunction(clspv::LiteralSamplerFunction());
alan-baker09cb9802019-12-10 13:16:27 -05002626 // Return if there are no literal samplers.
alan-bakerb6b09dc2018-11-08 16:59:28 -05002627 if (!var_fn)
2628 return;
alan-baker09cb9802019-12-10 13:16:27 -05002629
David Neto862b7d82018-06-14 18:48:37 -04002630 for (auto user : var_fn->users()) {
2631 // Populate SamplerLiteralToDescriptorSetMap and
2632 // SamplerLiteralToBindingMap.
2633 //
2634 // Look for calls like
2635 // call %opencl.sampler_t addrspace(2)*
2636 // @clspv.sampler.var.literal(
2637 // i32 descriptor,
2638 // i32 binding,
alan-baker09cb9802019-12-10 13:16:27 -05002639 // i32 (index-into-sampler-map|sampler_mask))
alan-bakerb6b09dc2018-11-08 16:59:28 -05002640 if (auto *call = dyn_cast<CallInst>(user)) {
alan-baker09cb9802019-12-10 13:16:27 -05002641 const auto third_param = static_cast<unsigned>(
alan-bakerb6b09dc2018-11-08 16:59:28 -05002642 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
alan-baker09cb9802019-12-10 13:16:27 -05002643 auto sampler_value = third_param;
2644 if (clspv::Option::UseSamplerMap()) {
2645 if (third_param >= sampler_map.size()) {
2646 errs() << "Out of bounds index to sampler map: " << third_param;
2647 llvm_unreachable("bad sampler init: out of bounds");
2648 }
2649 sampler_value = sampler_map[third_param].first;
David Neto862b7d82018-06-14 18:48:37 -04002650 }
2651
David Neto862b7d82018-06-14 18:48:37 -04002652 const auto descriptor_set = static_cast<unsigned>(
2653 dyn_cast<ConstantInt>(call->getArgOperand(0))->getZExtValue());
2654 const auto binding = static_cast<unsigned>(
2655 dyn_cast<ConstantInt>(call->getArgOperand(1))->getZExtValue());
2656
2657 SamplerLiteralToDescriptorSetMap[sampler_value] = descriptor_set;
2658 SamplerLiteralToBindingMap[sampler_value] = binding;
2659 used_bindings.insert(binding);
2660 }
2661 }
2662
alan-baker09cb9802019-12-10 13:16:27 -05002663 DenseSet<size_t> seen;
2664 for (auto user : var_fn->users()) {
2665 if (!isa<CallInst>(user))
2666 continue;
2667
2668 auto call = cast<CallInst>(user);
2669 const unsigned third_param = static_cast<unsigned>(
2670 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
2671
2672 // Already allocated a variable for this value.
2673 if (!seen.insert(third_param).second)
2674 continue;
2675
2676 auto sampler_value = third_param;
2677 if (clspv::Option::UseSamplerMap()) {
2678 sampler_value = sampler_map[third_param].first;
2679 }
2680
David Neto22f144c2017-06-12 14:26:21 -04002681 // Generate OpVariable.
2682 //
2683 // GIDOps[0] : Result Type ID
2684 // GIDOps[1] : Storage Class
2685 SPIRVOperandList Ops;
2686
David Neto257c3892018-04-11 13:19:45 -04002687 Ops << MkId(lookupType(SamplerTy))
2688 << MkNum(spv::StorageClassUniformConstant);
David Neto22f144c2017-06-12 14:26:21 -04002689
David Neto862b7d82018-06-14 18:48:37 -04002690 auto sampler_var_id = nextID++;
2691 auto *Inst = new SPIRVInstruction(spv::OpVariable, sampler_var_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002692 SPIRVInstList.push_back(Inst);
2693
alan-baker09cb9802019-12-10 13:16:27 -05002694 SamplerLiteralToIDMap[sampler_value] = sampler_var_id;
David Neto22f144c2017-06-12 14:26:21 -04002695
2696 // Find Insert Point for OpDecorate.
2697 auto DecoInsertPoint =
2698 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
2699 [](SPIRVInstruction *Inst) -> bool {
2700 return Inst->getOpcode() != spv::OpDecorate &&
2701 Inst->getOpcode() != spv::OpMemberDecorate &&
2702 Inst->getOpcode() != spv::OpExtInstImport;
2703 });
2704
2705 // Ops[0] = Target ID
2706 // Ops[1] = Decoration (DescriptorSet)
2707 // Ops[2] = LiteralNumber according to Decoration
2708 Ops.clear();
2709
David Neto862b7d82018-06-14 18:48:37 -04002710 unsigned descriptor_set;
2711 unsigned binding;
alan-baker09cb9802019-12-10 13:16:27 -05002712 if (SamplerLiteralToBindingMap.find(sampler_value) ==
alan-bakerb6b09dc2018-11-08 16:59:28 -05002713 SamplerLiteralToBindingMap.end()) {
David Neto862b7d82018-06-14 18:48:37 -04002714 // This sampler is not actually used. Find the next one.
2715 for (binding = 0; used_bindings.count(binding); binding++)
2716 ;
2717 descriptor_set = 0; // Literal samplers always use descriptor set 0.
2718 used_bindings.insert(binding);
2719 } else {
alan-baker09cb9802019-12-10 13:16:27 -05002720 descriptor_set = SamplerLiteralToDescriptorSetMap[sampler_value];
2721 binding = SamplerLiteralToBindingMap[sampler_value];
alan-bakercff80152019-06-15 00:38:00 -04002722
alan-baker09cb9802019-12-10 13:16:27 -05002723 version0::DescriptorMapEntry::SamplerData sampler_data = {sampler_value};
alan-bakercff80152019-06-15 00:38:00 -04002724 descriptorMapEntries->emplace_back(std::move(sampler_data),
2725 descriptor_set, binding);
David Neto862b7d82018-06-14 18:48:37 -04002726 }
2727
2728 Ops << MkId(sampler_var_id) << MkNum(spv::DecorationDescriptorSet)
2729 << MkNum(descriptor_set);
David Neto22f144c2017-06-12 14:26:21 -04002730
David Neto87846742018-04-11 17:36:22 -04002731 auto *DescDecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002732 SPIRVInstList.insert(DecoInsertPoint, DescDecoInst);
2733
2734 // Ops[0] = Target ID
2735 // Ops[1] = Decoration (Binding)
2736 // Ops[2] = LiteralNumber according to Decoration
2737 Ops.clear();
David Neto862b7d82018-06-14 18:48:37 -04002738 Ops << MkId(sampler_var_id) << MkNum(spv::DecorationBinding)
2739 << MkNum(binding);
David Neto22f144c2017-06-12 14:26:21 -04002740
David Neto87846742018-04-11 17:36:22 -04002741 auto *BindDecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002742 SPIRVInstList.insert(DecoInsertPoint, BindDecoInst);
2743 }
David Neto862b7d82018-06-14 18:48:37 -04002744}
David Neto22f144c2017-06-12 14:26:21 -04002745
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01002746void SPIRVProducerPass::GenerateResourceVars(Module &) {
David Neto862b7d82018-06-14 18:48:37 -04002747 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
2748 ValueMapType &VMap = getValueMap();
David Neto22f144c2017-06-12 14:26:21 -04002749
David Neto862b7d82018-06-14 18:48:37 -04002750 // Generate variables. Make one for each of resource var info object.
2751 for (auto *info : ModuleOrderedResourceVars) {
2752 Type *type = info->var_fn->getReturnType();
2753 // Remap the address space for opaque types.
2754 switch (info->arg_kind) {
2755 case clspv::ArgKind::Sampler:
2756 case clspv::ArgKind::ReadOnlyImage:
2757 case clspv::ArgKind::WriteOnlyImage:
2758 type = PointerType::get(type->getPointerElementType(),
2759 clspv::AddressSpace::UniformConstant);
2760 break;
2761 default:
2762 break;
2763 }
David Neto22f144c2017-06-12 14:26:21 -04002764
David Neto862b7d82018-06-14 18:48:37 -04002765 info->var_id = nextID++;
David Neto22f144c2017-06-12 14:26:21 -04002766
David Neto862b7d82018-06-14 18:48:37 -04002767 const auto type_id = lookupType(type);
2768 const auto sc = GetStorageClassForArgKind(info->arg_kind);
2769 SPIRVOperandList Ops;
2770 Ops << MkId(type_id) << MkNum(sc);
David Neto22f144c2017-06-12 14:26:21 -04002771
David Neto862b7d82018-06-14 18:48:37 -04002772 auto *Inst = new SPIRVInstruction(spv::OpVariable, info->var_id, Ops);
2773 SPIRVInstList.push_back(Inst);
2774
2775 // Map calls to the variable-builtin-function.
2776 for (auto &U : info->var_fn->uses()) {
2777 if (auto *call = dyn_cast<CallInst>(U.getUser())) {
2778 const auto set = unsigned(
2779 dyn_cast<ConstantInt>(call->getOperand(0))->getZExtValue());
2780 const auto binding = unsigned(
2781 dyn_cast<ConstantInt>(call->getOperand(1))->getZExtValue());
2782 if (set == info->descriptor_set && binding == info->binding) {
2783 switch (info->arg_kind) {
2784 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04002785 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04002786 case clspv::ArgKind::Pod:
2787 // The call maps to the variable directly.
2788 VMap[call] = info->var_id;
2789 break;
2790 case clspv::ArgKind::Sampler:
2791 case clspv::ArgKind::ReadOnlyImage:
2792 case clspv::ArgKind::WriteOnlyImage:
2793 // The call maps to a load we generate later.
2794 ResourceVarDeferredLoadCalls[call] = info->var_id;
2795 break;
2796 default:
2797 llvm_unreachable("Unhandled arg kind");
2798 }
2799 }
David Neto22f144c2017-06-12 14:26:21 -04002800 }
David Neto862b7d82018-06-14 18:48:37 -04002801 }
2802 }
David Neto22f144c2017-06-12 14:26:21 -04002803
David Neto862b7d82018-06-14 18:48:37 -04002804 // Generate associated decorations.
David Neto22f144c2017-06-12 14:26:21 -04002805
David Neto862b7d82018-06-14 18:48:37 -04002806 // Find Insert Point for OpDecorate.
2807 auto DecoInsertPoint =
2808 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
2809 [](SPIRVInstruction *Inst) -> bool {
2810 return Inst->getOpcode() != spv::OpDecorate &&
2811 Inst->getOpcode() != spv::OpMemberDecorate &&
2812 Inst->getOpcode() != spv::OpExtInstImport;
2813 });
2814
2815 SPIRVOperandList Ops;
2816 for (auto *info : ModuleOrderedResourceVars) {
2817 // Decorate with DescriptorSet and Binding.
2818 Ops.clear();
2819 Ops << MkId(info->var_id) << MkNum(spv::DecorationDescriptorSet)
2820 << MkNum(info->descriptor_set);
2821 SPIRVInstList.insert(DecoInsertPoint,
2822 new SPIRVInstruction(spv::OpDecorate, Ops));
2823
2824 Ops.clear();
2825 Ops << MkId(info->var_id) << MkNum(spv::DecorationBinding)
2826 << MkNum(info->binding);
2827 SPIRVInstList.insert(DecoInsertPoint,
2828 new SPIRVInstruction(spv::OpDecorate, Ops));
2829
alan-bakere9308012019-03-15 10:25:13 -04002830 if (info->coherent) {
2831 // Decorate with Coherent if required for the variable.
2832 Ops.clear();
2833 Ops << MkId(info->var_id) << MkNum(spv::DecorationCoherent);
2834 SPIRVInstList.insert(DecoInsertPoint,
2835 new SPIRVInstruction(spv::OpDecorate, Ops));
2836 }
2837
David Neto862b7d82018-06-14 18:48:37 -04002838 // Generate NonWritable and NonReadable
2839 switch (info->arg_kind) {
2840 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04002841 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04002842 if (info->var_fn->getReturnType()->getPointerAddressSpace() ==
2843 clspv::AddressSpace::Constant) {
2844 Ops.clear();
2845 Ops << MkId(info->var_id) << MkNum(spv::DecorationNonWritable);
2846 SPIRVInstList.insert(DecoInsertPoint,
2847 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002848 }
David Neto862b7d82018-06-14 18:48:37 -04002849 break;
David Neto862b7d82018-06-14 18:48:37 -04002850 case clspv::ArgKind::WriteOnlyImage:
2851 Ops.clear();
2852 Ops << MkId(info->var_id) << MkNum(spv::DecorationNonReadable);
2853 SPIRVInstList.insert(DecoInsertPoint,
2854 new SPIRVInstruction(spv::OpDecorate, Ops));
2855 break;
2856 default:
2857 break;
David Neto22f144c2017-06-12 14:26:21 -04002858 }
2859 }
2860}
2861
Kévin Petitbbbda972020-03-03 19:16:31 +00002862namespace {
2863
2864bool isScalarType(Type *type) {
2865 return type->isIntegerTy() || type->isFloatTy();
2866}
2867
2868uint64_t structAlignment(StructType *type,
2869 std::function<uint64_t(Type *)> alignFn) {
2870 uint64_t maxAlign = 1;
2871 for (unsigned i = 0; i < type->getStructNumElements(); i++) {
2872 uint64_t align = alignFn(type->getStructElementType(i));
2873 maxAlign = std::max(align, maxAlign);
2874 }
2875 return maxAlign;
2876}
2877
2878uint64_t scalarAlignment(Type *type) {
2879 // A scalar of size N has a scalar alignment of N.
2880 if (isScalarType(type)) {
2881 return type->getScalarSizeInBits() / 8;
2882 }
2883
2884 // A vector or matrix type has a scalar alignment equal to that of its
2885 // component type.
2886 if (type->isVectorTy()) {
2887 return scalarAlignment(type->getVectorElementType());
2888 }
2889
2890 // An array type has a scalar alignment equal to that of its element type.
2891 if (type->isArrayTy()) {
2892 return scalarAlignment(type->getArrayElementType());
2893 }
2894
2895 // A structure has a scalar alignment equal to the largest scalar alignment of
2896 // any of its members.
2897 if (type->isStructTy()) {
2898 return structAlignment(cast<StructType>(type), scalarAlignment);
2899 }
2900
2901 llvm_unreachable("Unsupported type");
2902}
2903
2904uint64_t baseAlignment(Type *type) {
2905 // A scalar has a base alignment equal to its scalar alignment.
2906 if (isScalarType(type)) {
2907 return scalarAlignment(type);
2908 }
2909
2910 if (type->isVectorTy()) {
2911 unsigned numElems = type->getVectorNumElements();
2912
2913 // A two-component vector has a base alignment equal to twice its scalar
2914 // alignment.
2915 if (numElems == 2) {
2916 return 2 * scalarAlignment(type);
2917 }
2918 // A three- or four-component vector has a base alignment equal to four
2919 // times its scalar alignment.
2920 if ((numElems == 3) || (numElems == 4)) {
2921 return 4 * scalarAlignment(type);
2922 }
2923 }
2924
2925 // An array has a base alignment equal to the base alignment of its element
2926 // type.
2927 if (type->isArrayTy()) {
2928 return baseAlignment(type->getArrayElementType());
2929 }
2930
2931 // A structure has a base alignment equal to the largest base alignment of any
2932 // of its members.
2933 if (type->isStructTy()) {
2934 return structAlignment(cast<StructType>(type), baseAlignment);
2935 }
2936
2937 // TODO A row-major matrix of C columns has a base alignment equal to the base
2938 // alignment of a vector of C matrix components.
2939 // TODO A column-major matrix has a base alignment equal to the base alignment
2940 // of the matrix column type.
2941
2942 llvm_unreachable("Unsupported type");
2943}
2944
2945uint64_t extendedAlignment(Type *type) {
2946 // A scalar, vector or matrix type has an extended alignment equal to its base
2947 // alignment.
2948 // TODO matrix type
2949 if (isScalarType(type) || type->isVectorTy()) {
2950 return baseAlignment(type);
2951 }
2952
2953 // An array or structure type has an extended alignment equal to the largest
2954 // extended alignment of any of its members, rounded up to a multiple of 16
2955 if (type->isStructTy()) {
2956 auto salign = structAlignment(cast<StructType>(type), extendedAlignment);
2957 return alignTo(salign, 16);
2958 }
2959
2960 if (type->isArrayTy()) {
2961 auto salign = extendedAlignment(type->getArrayElementType());
2962 return alignTo(salign, 16);
2963 }
2964
2965 llvm_unreachable("Unsupported type");
2966}
2967
2968uint64_t standardAlignment(Type *type, spv::StorageClass sclass) {
2969 // If the scalarBlockLayout feature is enabled on the device then every member
2970 // must be aligned according to its scalar alignment
2971 if (clspv::Option::ScalarBlockLayout()) {
2972 return scalarAlignment(type);
2973 }
2974
2975 // All vectors must be aligned according to their scalar alignment
2976 if (type->isVectorTy()) {
2977 return scalarAlignment(type);
2978 }
2979
2980 // If the uniformBufferStandardLayout feature is not enabled on the device,
2981 // then any member of an OpTypeStruct with a storage class of Uniform and a
2982 // decoration of Block must be aligned according to its extended alignment.
2983 if (!clspv::Option::Std430UniformBufferLayout() &&
2984 sclass == spv::StorageClassUniform) {
2985 return extendedAlignment(type);
2986 }
2987
2988 // Every other member must be aligned according to its base alignment
2989 return baseAlignment(type);
2990}
2991
2992bool improperlyStraddles(const DataLayout &DL, Type *type, unsigned offset) {
2993 assert(type->isVectorTy());
2994
2995 auto size = DL.getTypeStoreSize(type);
2996
2997 // It is a vector with total size less than or equal to 16 bytes, and has
2998 // Offset decorations placing its first byte at F and its last byte at L,
2999 // where floor(F / 16) != floor(L / 16).
3000 if ((size <= 16) && (offset % 16 + size > 16)) {
3001 return true;
3002 }
3003
3004 // It is a vector with total size greater than 16 bytes and has its Offset
3005 // decorations placing its first byte at a non-integer multiple of 16
3006 if ((size > 16) && (offset % 16 != 0)) {
3007 return true;
3008 }
3009
3010 return false;
3011}
3012
3013// See 14.5 Shader Resource Interface in Vulkan spec
3014bool isValidExplicitLayout(Module &M, StructType *STy, unsigned Member,
3015 spv::StorageClass SClass, unsigned Offset,
3016 unsigned PreviousMemberOffset) {
3017
3018 auto MemberType = STy->getElementType(Member);
3019 auto Align = standardAlignment(MemberType, SClass);
3020 auto &DL = M.getDataLayout();
3021
3022 // The Offset decoration of any member must be a multiple of its alignment
3023 if (Offset % Align != 0) {
3024 return false;
3025 }
3026
3027 // TODO Any ArrayStride or MatrixStride decoration must be a multiple of the
3028 // alignment of the array or matrix as defined above
3029
3030 if (!clspv::Option::ScalarBlockLayout()) {
3031 // Vectors must not improperly straddle, as defined above
3032 if (MemberType->isVectorTy() &&
3033 improperlyStraddles(DL, MemberType, Offset)) {
3034 return true;
3035 }
3036
3037 // The Offset decoration of a member must not place it between the end
3038 // of a structure or an array and the next multiple of the alignment of that
3039 // structure or array
3040 if (Member > 0) {
3041 auto PType = STy->getElementType(Member - 1);
3042 if (PType->isStructTy() || PType->isArrayTy()) {
3043 auto PAlign = standardAlignment(PType, SClass);
3044 if (Offset - PreviousMemberOffset < PAlign) {
3045 return false;
3046 }
3047 }
3048 }
3049 }
3050
3051 return true;
3052}
3053
3054} // namespace
3055
3056void SPIRVProducerPass::GeneratePushConstantDescriptormapEntries(Module &M) {
3057
3058 if (auto GV = M.getGlobalVariable(clspv::PushConstantsVariableName())) {
3059 auto const &DL = M.getDataLayout();
3060 auto MD = GV->getMetadata(clspv::PushConstantsMetadataName());
3061 auto STy = cast<StructType>(GV->getValueType());
3062
3063 for (unsigned i = 0; i < STy->getNumElements(); i++) {
3064 auto pc = static_cast<clspv::PushConstant>(
3065 mdconst::extract<ConstantInt>(MD->getOperand(i))->getZExtValue());
3066 auto memberType = STy->getElementType(i);
3067 auto offset = GetExplicitLayoutStructMemberOffset(STy, i, DL);
3068 unsigned previousOffset = 0;
3069 if (i > 0) {
3070 previousOffset = GetExplicitLayoutStructMemberOffset(STy, i - 1, DL);
3071 }
3072 auto size = static_cast<uint32_t>(GetTypeSizeInBits(memberType, DL)) / 8;
3073 assert(isValidExplicitLayout(M, STy, i, spv::StorageClassPushConstant,
3074 offset, previousOffset));
3075 version0::DescriptorMapEntry::PushConstantData data = {pc, offset, size};
3076 descriptorMapEntries->emplace_back(std::move(data));
3077 }
3078 }
3079}
3080
David Neto22f144c2017-06-12 14:26:21 -04003081void SPIRVProducerPass::GenerateGlobalVar(GlobalVariable &GV) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05003082 Module &M = *GV.getParent();
David Neto22f144c2017-06-12 14:26:21 -04003083 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3084 ValueMapType &VMap = getValueMap();
3085 std::vector<uint32_t> &BuiltinDimVec = getBuiltinDimVec();
David Neto85082642018-03-24 06:55:20 -07003086 const DataLayout &DL = GV.getParent()->getDataLayout();
David Neto22f144c2017-06-12 14:26:21 -04003087
3088 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
3089 Type *Ty = GV.getType();
3090 PointerType *PTy = cast<PointerType>(Ty);
3091
3092 uint32_t InitializerID = 0;
3093
3094 // Workgroup size is handled differently (it goes into a constant)
3095 if (spv::BuiltInWorkgroupSize == BuiltinType) {
3096 std::vector<bool> HasMDVec;
3097 uint32_t PrevXDimCst = 0xFFFFFFFF;
3098 uint32_t PrevYDimCst = 0xFFFFFFFF;
3099 uint32_t PrevZDimCst = 0xFFFFFFFF;
3100 for (Function &Func : *GV.getParent()) {
3101 if (Func.isDeclaration()) {
3102 continue;
3103 }
3104
3105 // We only need to check kernels.
3106 if (Func.getCallingConv() != CallingConv::SPIR_KERNEL) {
3107 continue;
3108 }
3109
3110 if (const MDNode *MD =
3111 dyn_cast<Function>(&Func)->getMetadata("reqd_work_group_size")) {
3112 uint32_t CurXDimCst = static_cast<uint32_t>(
3113 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
3114 uint32_t CurYDimCst = static_cast<uint32_t>(
3115 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
3116 uint32_t CurZDimCst = static_cast<uint32_t>(
3117 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
3118
3119 if (PrevXDimCst == 0xFFFFFFFF && PrevYDimCst == 0xFFFFFFFF &&
3120 PrevZDimCst == 0xFFFFFFFF) {
3121 PrevXDimCst = CurXDimCst;
3122 PrevYDimCst = CurYDimCst;
3123 PrevZDimCst = CurZDimCst;
3124 } else if (CurXDimCst != PrevXDimCst || CurYDimCst != PrevYDimCst ||
3125 CurZDimCst != PrevZDimCst) {
3126 llvm_unreachable(
3127 "reqd_work_group_size must be the same across all kernels");
3128 } else {
3129 continue;
3130 }
3131
3132 //
3133 // Generate OpConstantComposite.
3134 //
3135 // Ops[0] : Result Type ID
3136 // Ops[1] : Constant size for x dimension.
3137 // Ops[2] : Constant size for y dimension.
3138 // Ops[3] : Constant size for z dimension.
3139 SPIRVOperandList Ops;
3140
3141 uint32_t XDimCstID =
3142 VMap[mdconst::extract<ConstantInt>(MD->getOperand(0))];
3143 uint32_t YDimCstID =
3144 VMap[mdconst::extract<ConstantInt>(MD->getOperand(1))];
3145 uint32_t ZDimCstID =
3146 VMap[mdconst::extract<ConstantInt>(MD->getOperand(2))];
3147
3148 InitializerID = nextID;
3149
David Neto257c3892018-04-11 13:19:45 -04003150 Ops << MkId(lookupType(Ty->getPointerElementType())) << MkId(XDimCstID)
3151 << MkId(YDimCstID) << MkId(ZDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04003152
David Neto87846742018-04-11 17:36:22 -04003153 auto *Inst =
3154 new SPIRVInstruction(spv::OpConstantComposite, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003155 SPIRVInstList.push_back(Inst);
3156
3157 HasMDVec.push_back(true);
3158 } else {
3159 HasMDVec.push_back(false);
3160 }
3161 }
3162
3163 // Check all kernels have same definitions for work_group_size.
3164 bool HasMD = false;
3165 if (!HasMDVec.empty()) {
3166 HasMD = HasMDVec[0];
3167 for (uint32_t i = 1; i < HasMDVec.size(); i++) {
3168 if (HasMD != HasMDVec[i]) {
3169 llvm_unreachable(
3170 "Kernels should have consistent work group size definition");
3171 }
3172 }
3173 }
3174
3175 // If all kernels do not have metadata for reqd_work_group_size, generate
3176 // OpSpecConstants for x/y/z dimension.
3177 if (!HasMD) {
3178 //
3179 // Generate OpSpecConstants for x/y/z dimension.
3180 //
3181 // Ops[0] : Result Type ID
3182 // Ops[1] : Constant size for x/y/z dimension (Literal Number).
3183 uint32_t XDimCstID = 0;
3184 uint32_t YDimCstID = 0;
3185 uint32_t ZDimCstID = 0;
3186
David Neto22f144c2017-06-12 14:26:21 -04003187 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04003188 uint32_t result_type_id =
3189 lookupType(Ty->getPointerElementType()->getSequentialElementType());
David Neto22f144c2017-06-12 14:26:21 -04003190
David Neto257c3892018-04-11 13:19:45 -04003191 // X Dimension
3192 Ops << MkId(result_type_id) << MkNum(1);
3193 XDimCstID = nextID++;
3194 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04003195 new SPIRVInstruction(spv::OpSpecConstant, XDimCstID, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003196
3197 // Y Dimension
3198 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003199 Ops << MkId(result_type_id) << MkNum(1);
3200 YDimCstID = nextID++;
3201 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04003202 new SPIRVInstruction(spv::OpSpecConstant, YDimCstID, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003203
3204 // Z Dimension
3205 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003206 Ops << MkId(result_type_id) << MkNum(1);
3207 ZDimCstID = nextID++;
3208 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04003209 new SPIRVInstruction(spv::OpSpecConstant, ZDimCstID, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003210
David Neto257c3892018-04-11 13:19:45 -04003211 BuiltinDimVec.push_back(XDimCstID);
3212 BuiltinDimVec.push_back(YDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04003213 BuiltinDimVec.push_back(ZDimCstID);
3214
David Neto22f144c2017-06-12 14:26:21 -04003215 //
3216 // Generate OpSpecConstantComposite.
3217 //
3218 // Ops[0] : Result Type ID
3219 // Ops[1] : Constant size for x dimension.
3220 // Ops[2] : Constant size for y dimension.
3221 // Ops[3] : Constant size for z dimension.
3222 InitializerID = nextID;
3223
3224 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003225 Ops << MkId(lookupType(Ty->getPointerElementType())) << MkId(XDimCstID)
3226 << MkId(YDimCstID) << MkId(ZDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04003227
David Neto87846742018-04-11 17:36:22 -04003228 auto *Inst =
3229 new SPIRVInstruction(spv::OpSpecConstantComposite, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003230 SPIRVInstList.push_back(Inst);
3231 }
3232 }
3233
David Neto22f144c2017-06-12 14:26:21 -04003234 VMap[&GV] = nextID;
3235
3236 //
3237 // Generate OpVariable.
3238 //
3239 // GIDOps[0] : Result Type ID
3240 // GIDOps[1] : Storage Class
3241 SPIRVOperandList Ops;
3242
David Neto85082642018-03-24 06:55:20 -07003243 const auto AS = PTy->getAddressSpace();
David Netoc6f3ab22018-04-06 18:02:31 -04003244 Ops << MkId(lookupType(Ty)) << MkNum(GetStorageClass(AS));
David Neto22f144c2017-06-12 14:26:21 -04003245
David Neto85082642018-03-24 06:55:20 -07003246 const bool module_scope_constant_external_init =
David Neto862b7d82018-06-14 18:48:37 -04003247 (AS == AddressSpace::Constant) && GV.hasInitializer() &&
David Neto85082642018-03-24 06:55:20 -07003248 clspv::Option::ModuleConstantsInStorageBuffer();
3249
Kévin Petit23d5f182019-08-13 16:21:29 +01003250 if (GV.hasInitializer()) {
3251 auto GVInit = GV.getInitializer();
3252 if (!isa<UndefValue>(GVInit) && !module_scope_constant_external_init) {
3253 assert(VMap.count(GVInit) == 1);
3254 InitializerID = VMap[GVInit];
David Neto85082642018-03-24 06:55:20 -07003255 }
3256 }
Kévin Petit23d5f182019-08-13 16:21:29 +01003257
3258 if (0 != InitializerID) {
Kévin Petitbbbda972020-03-03 19:16:31 +00003259 // Emit the ID of the initializer as part of the variable definition.
Kévin Petit23d5f182019-08-13 16:21:29 +01003260 Ops << MkId(InitializerID);
3261 }
David Neto85082642018-03-24 06:55:20 -07003262 const uint32_t var_id = nextID++;
3263
David Neto87846742018-04-11 17:36:22 -04003264 auto *Inst = new SPIRVInstruction(spv::OpVariable, var_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003265 SPIRVInstList.push_back(Inst);
3266
3267 // If we have a builtin.
3268 if (spv::BuiltInMax != BuiltinType) {
3269 // Find Insert Point for OpDecorate.
3270 auto DecoInsertPoint =
3271 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
3272 [](SPIRVInstruction *Inst) -> bool {
3273 return Inst->getOpcode() != spv::OpDecorate &&
3274 Inst->getOpcode() != spv::OpMemberDecorate &&
3275 Inst->getOpcode() != spv::OpExtInstImport;
3276 });
3277 //
3278 // Generate OpDecorate.
3279 //
3280 // DOps[0] = Target ID
3281 // DOps[1] = Decoration (Builtin)
3282 // DOps[2] = BuiltIn ID
3283 uint32_t ResultID;
3284
3285 // WorkgroupSize is different, we decorate the constant composite that has
3286 // its value, rather than the variable that we use to access the value.
3287 if (spv::BuiltInWorkgroupSize == BuiltinType) {
3288 ResultID = InitializerID;
David Netoa60b00b2017-09-15 16:34:09 -04003289 // Save both the value and variable IDs for later.
3290 WorkgroupSizeValueID = InitializerID;
3291 WorkgroupSizeVarID = VMap[&GV];
David Neto22f144c2017-06-12 14:26:21 -04003292 } else {
3293 ResultID = VMap[&GV];
3294 }
3295
3296 SPIRVOperandList DOps;
David Neto257c3892018-04-11 13:19:45 -04003297 DOps << MkId(ResultID) << MkNum(spv::DecorationBuiltIn)
3298 << MkNum(BuiltinType);
David Neto22f144c2017-06-12 14:26:21 -04003299
David Neto87846742018-04-11 17:36:22 -04003300 auto *DescDecoInst = new SPIRVInstruction(spv::OpDecorate, DOps);
David Neto22f144c2017-06-12 14:26:21 -04003301 SPIRVInstList.insert(DecoInsertPoint, DescDecoInst);
David Neto85082642018-03-24 06:55:20 -07003302 } else if (module_scope_constant_external_init) {
3303 // This module scope constant is initialized from a storage buffer with data
3304 // provided by the host at binding 0 of the next descriptor set.
David Neto78383442018-06-15 20:31:56 -04003305 const uint32_t descriptor_set = TakeDescriptorIndex(&M);
David Neto85082642018-03-24 06:55:20 -07003306
David Neto862b7d82018-06-14 18:48:37 -04003307 // Emit the intializer to the descriptor map file.
David Neto85082642018-03-24 06:55:20 -07003308 // Use "kind,buffer" to indicate storage buffer. We might want to expand
3309 // that later to other types, like uniform buffer.
alan-bakerf5e5f692018-11-27 08:33:24 -05003310 std::string hexbytes;
3311 llvm::raw_string_ostream str(hexbytes);
3312 clspv::ConstantEmitter(DL, str).Emit(GV.getInitializer());
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003313 version0::DescriptorMapEntry::ConstantData constant_data = {ArgKind::Buffer,
3314 str.str()};
3315 descriptorMapEntries->emplace_back(std::move(constant_data), descriptor_set,
3316 0);
David Neto85082642018-03-24 06:55:20 -07003317
3318 // Find Insert Point for OpDecorate.
3319 auto DecoInsertPoint =
3320 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
3321 [](SPIRVInstruction *Inst) -> bool {
3322 return Inst->getOpcode() != spv::OpDecorate &&
3323 Inst->getOpcode() != spv::OpMemberDecorate &&
3324 Inst->getOpcode() != spv::OpExtInstImport;
3325 });
3326
David Neto257c3892018-04-11 13:19:45 -04003327 // OpDecorate %var Binding <binding>
David Neto85082642018-03-24 06:55:20 -07003328 SPIRVOperandList DOps;
David Neto257c3892018-04-11 13:19:45 -04003329 DOps << MkId(var_id) << MkNum(spv::DecorationBinding) << MkNum(0);
3330 DecoInsertPoint = SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04003331 DecoInsertPoint, new SPIRVInstruction(spv::OpDecorate, DOps));
David Neto85082642018-03-24 06:55:20 -07003332
3333 // OpDecorate %var DescriptorSet <descriptor_set>
3334 DOps.clear();
David Neto257c3892018-04-11 13:19:45 -04003335 DOps << MkId(var_id) << MkNum(spv::DecorationDescriptorSet)
3336 << MkNum(descriptor_set);
David Netoc6f3ab22018-04-06 18:02:31 -04003337 SPIRVInstList.insert(DecoInsertPoint,
David Neto87846742018-04-11 17:36:22 -04003338 new SPIRVInstruction(spv::OpDecorate, DOps));
David Neto22f144c2017-06-12 14:26:21 -04003339 }
3340}
3341
David Netoc6f3ab22018-04-06 18:02:31 -04003342void SPIRVProducerPass::GenerateWorkgroupVars() {
3343 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
Alan Baker202c8c72018-08-13 13:47:44 -04003344 for (auto spec_id = clspv::FirstLocalSpecId(); spec_id < max_local_spec_id_;
3345 ++spec_id) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05003346 LocalArgInfo &info = LocalSpecIdInfoMap[spec_id];
David Netoc6f3ab22018-04-06 18:02:31 -04003347
3348 // Generate OpVariable.
3349 //
3350 // GIDOps[0] : Result Type ID
3351 // GIDOps[1] : Storage Class
3352 SPIRVOperandList Ops;
3353 Ops << MkId(info.ptr_array_type_id) << MkNum(spv::StorageClassWorkgroup);
3354
3355 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04003356 new SPIRVInstruction(spv::OpVariable, info.variable_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04003357 }
3358}
3359
David Neto862b7d82018-06-14 18:48:37 -04003360void SPIRVProducerPass::GenerateDescriptorMapInfo(const DataLayout &DL,
3361 Function &F) {
David Netoc5fb5242018-07-30 13:28:31 -04003362 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
3363 return;
3364 }
David Neto862b7d82018-06-14 18:48:37 -04003365 // Gather the list of resources that are used by this function's arguments.
3366 auto &resource_var_at_index = FunctionToResourceVarsMap[&F];
3367
alan-bakerf5e5f692018-11-27 08:33:24 -05003368 // TODO(alan-baker): This should become unnecessary by fixing the rest of the
3369 // flow to generate pod_ubo arguments earlier.
David Neto862b7d82018-06-14 18:48:37 -04003370 auto remap_arg_kind = [](StringRef argKind) {
alan-bakerf5e5f692018-11-27 08:33:24 -05003371 std::string kind =
3372 clspv::Option::PodArgsInUniformBuffer() && argKind.equals("pod")
3373 ? "pod_ubo"
alan-baker21574d32020-01-29 16:00:31 -05003374 : argKind.str();
alan-bakerf5e5f692018-11-27 08:33:24 -05003375 return GetArgKindFromName(kind);
David Neto862b7d82018-06-14 18:48:37 -04003376 };
3377
3378 auto *fty = F.getType()->getPointerElementType();
3379 auto *func_ty = dyn_cast<FunctionType>(fty);
3380
alan-baker038e9242019-04-19 22:14:41 -04003381 // If we've clustered POD arguments, then argument details are in metadata.
David Neto862b7d82018-06-14 18:48:37 -04003382 // If an argument maps to a resource variable, then get descriptor set and
3383 // binding from the resoure variable. Other info comes from the metadata.
3384 const auto *arg_map = F.getMetadata("kernel_arg_map");
3385 if (arg_map) {
3386 for (const auto &arg : arg_map->operands()) {
3387 const MDNode *arg_node = dyn_cast<MDNode>(arg.get());
Kévin PETITa353c832018-03-20 23:21:21 +00003388 assert(arg_node->getNumOperands() == 7);
David Neto862b7d82018-06-14 18:48:37 -04003389 const auto name =
3390 dyn_cast<MDString>(arg_node->getOperand(0))->getString();
3391 const auto old_index =
3392 dyn_extract<ConstantInt>(arg_node->getOperand(1))->getZExtValue();
3393 // Remapped argument index
alan-bakerb6b09dc2018-11-08 16:59:28 -05003394 const size_t new_index = static_cast<size_t>(
3395 dyn_extract<ConstantInt>(arg_node->getOperand(2))->getZExtValue());
David Neto862b7d82018-06-14 18:48:37 -04003396 const auto offset =
3397 dyn_extract<ConstantInt>(arg_node->getOperand(3))->getZExtValue();
Kévin PETITa353c832018-03-20 23:21:21 +00003398 const auto arg_size =
3399 dyn_extract<ConstantInt>(arg_node->getOperand(4))->getZExtValue();
David Neto862b7d82018-06-14 18:48:37 -04003400 const auto argKind = remap_arg_kind(
Kévin PETITa353c832018-03-20 23:21:21 +00003401 dyn_cast<MDString>(arg_node->getOperand(5))->getString());
David Neto862b7d82018-06-14 18:48:37 -04003402 const auto spec_id =
Kévin PETITa353c832018-03-20 23:21:21 +00003403 dyn_extract<ConstantInt>(arg_node->getOperand(6))->getSExtValue();
alan-bakerf5e5f692018-11-27 08:33:24 -05003404
3405 uint32_t descriptor_set = 0;
3406 uint32_t binding = 0;
3407 version0::DescriptorMapEntry::KernelArgData kernel_data = {
alan-baker21574d32020-01-29 16:00:31 -05003408 F.getName().str(), name.str(), static_cast<uint32_t>(old_index),
3409 argKind, static_cast<uint32_t>(spec_id),
alan-bakerf5e5f692018-11-27 08:33:24 -05003410 // This will be set below for pointer-to-local args.
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003411 0, static_cast<uint32_t>(offset), static_cast<uint32_t>(arg_size)};
David Neto862b7d82018-06-14 18:48:37 -04003412 if (spec_id > 0) {
alan-bakerf5e5f692018-11-27 08:33:24 -05003413 kernel_data.local_element_size = static_cast<uint32_t>(GetTypeAllocSize(
3414 func_ty->getParamType(unsigned(new_index))->getPointerElementType(),
3415 DL));
David Neto862b7d82018-06-14 18:48:37 -04003416 } else {
3417 auto *info = resource_var_at_index[new_index];
3418 assert(info);
alan-bakerf5e5f692018-11-27 08:33:24 -05003419 descriptor_set = info->descriptor_set;
3420 binding = info->binding;
David Neto862b7d82018-06-14 18:48:37 -04003421 }
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003422 descriptorMapEntries->emplace_back(std::move(kernel_data), descriptor_set,
3423 binding);
David Neto862b7d82018-06-14 18:48:37 -04003424 }
3425 } else {
3426 // There is no argument map.
3427 // Take descriptor info from the resource variable calls.
Kévin PETITa353c832018-03-20 23:21:21 +00003428 // Take argument name and size from the arguments list.
David Neto862b7d82018-06-14 18:48:37 -04003429
3430 SmallVector<Argument *, 4> arguments;
3431 for (auto &arg : F.args()) {
3432 arguments.push_back(&arg);
3433 }
3434
3435 unsigned arg_index = 0;
3436 for (auto *info : resource_var_at_index) {
3437 if (info) {
Kévin PETITa353c832018-03-20 23:21:21 +00003438 auto arg = arguments[arg_index];
alan-bakerb6b09dc2018-11-08 16:59:28 -05003439 unsigned arg_size = 0;
Kévin PETITa353c832018-03-20 23:21:21 +00003440 if (info->arg_kind == clspv::ArgKind::Pod) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05003441 arg_size = static_cast<uint32_t>(DL.getTypeStoreSize(arg->getType()));
Kévin PETITa353c832018-03-20 23:21:21 +00003442 }
3443
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003444 // Local pointer arguments are unused in this case. Offset is always
3445 // zero.
alan-bakerf5e5f692018-11-27 08:33:24 -05003446 version0::DescriptorMapEntry::KernelArgData kernel_data = {
alan-baker21574d32020-01-29 16:00:31 -05003447 F.getName().str(),
3448 arg->getName().str(),
3449 arg_index,
3450 remap_arg_kind(clspv::GetArgKindName(info->arg_kind)),
3451 0,
3452 0,
3453 0,
3454 arg_size};
alan-bakerf5e5f692018-11-27 08:33:24 -05003455 descriptorMapEntries->emplace_back(std::move(kernel_data),
3456 info->descriptor_set, info->binding);
David Neto862b7d82018-06-14 18:48:37 -04003457 }
3458 arg_index++;
3459 }
3460 // Generate mappings for pointer-to-local arguments.
3461 for (arg_index = 0; arg_index < arguments.size(); ++arg_index) {
3462 Argument *arg = arguments[arg_index];
Alan Baker202c8c72018-08-13 13:47:44 -04003463 auto where = LocalArgSpecIds.find(arg);
3464 if (where != LocalArgSpecIds.end()) {
3465 auto &local_arg_info = LocalSpecIdInfoMap[where->second];
alan-bakerf5e5f692018-11-27 08:33:24 -05003466 // Pod arguments members are unused in this case.
3467 version0::DescriptorMapEntry::KernelArgData kernel_data = {
alan-baker21574d32020-01-29 16:00:31 -05003468 F.getName().str(),
3469 arg->getName().str(),
alan-bakerf5e5f692018-11-27 08:33:24 -05003470 arg_index,
3471 ArgKind::Local,
3472 static_cast<uint32_t>(local_arg_info.spec_id),
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003473 static_cast<uint32_t>(
3474 GetTypeAllocSize(local_arg_info.elem_type, DL)),
alan-bakerf5e5f692018-11-27 08:33:24 -05003475 0,
3476 0};
3477 // Pointer-to-local arguments do not utilize descriptor set and binding.
3478 descriptorMapEntries->emplace_back(std::move(kernel_data), 0, 0);
David Neto862b7d82018-06-14 18:48:37 -04003479 }
3480 }
3481 }
3482}
3483
David Neto22f144c2017-06-12 14:26:21 -04003484void SPIRVProducerPass::GenerateFuncPrologue(Function &F) {
3485 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3486 ValueMapType &VMap = getValueMap();
3487 EntryPointVecType &EntryPoints = getEntryPointVec();
David Neto22f144c2017-06-12 14:26:21 -04003488 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
3489 auto &GlobalConstArgSet = getGlobalConstArgSet();
3490
3491 FunctionType *FTy = F.getFunctionType();
3492
3493 //
David Neto22f144c2017-06-12 14:26:21 -04003494 // Generate OPFunction.
3495 //
3496
3497 // FOps[0] : Result Type ID
3498 // FOps[1] : Function Control
3499 // FOps[2] : Function Type ID
3500 SPIRVOperandList FOps;
3501
3502 // Find SPIRV instruction for return type.
David Neto257c3892018-04-11 13:19:45 -04003503 FOps << MkId(lookupType(FTy->getReturnType()));
David Neto22f144c2017-06-12 14:26:21 -04003504
3505 // Check function attributes for SPIRV Function Control.
3506 uint32_t FuncControl = spv::FunctionControlMaskNone;
3507 if (F.hasFnAttribute(Attribute::AlwaysInline)) {
3508 FuncControl |= spv::FunctionControlInlineMask;
3509 }
3510 if (F.hasFnAttribute(Attribute::NoInline)) {
3511 FuncControl |= spv::FunctionControlDontInlineMask;
3512 }
3513 // TODO: Check llvm attribute for Function Control Pure.
3514 if (F.hasFnAttribute(Attribute::ReadOnly)) {
3515 FuncControl |= spv::FunctionControlPureMask;
3516 }
3517 // TODO: Check llvm attribute for Function Control Const.
3518 if (F.hasFnAttribute(Attribute::ReadNone)) {
3519 FuncControl |= spv::FunctionControlConstMask;
3520 }
3521
David Neto257c3892018-04-11 13:19:45 -04003522 FOps << MkNum(FuncControl);
David Neto22f144c2017-06-12 14:26:21 -04003523
3524 uint32_t FTyID;
3525 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
3526 SmallVector<Type *, 4> NewFuncParamTys;
3527 FunctionType *NewFTy =
3528 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
3529 FTyID = lookupType(NewFTy);
3530 } else {
David Neto9ed8e2f2018-03-24 06:47:24 -07003531 // Handle regular function with global constant parameters.
David Neto22f144c2017-06-12 14:26:21 -04003532 if (GlobalConstFuncTyMap.count(FTy)) {
3533 FTyID = lookupType(GlobalConstFuncTyMap[FTy].first);
3534 } else {
3535 FTyID = lookupType(FTy);
3536 }
3537 }
3538
David Neto257c3892018-04-11 13:19:45 -04003539 FOps << MkId(FTyID);
David Neto22f144c2017-06-12 14:26:21 -04003540
3541 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
3542 EntryPoints.push_back(std::make_pair(&F, nextID));
3543 }
3544
3545 VMap[&F] = nextID;
3546
David Neto482550a2018-03-24 05:21:07 -07003547 if (clspv::Option::ShowIDs()) {
David Netob05675d2018-02-16 12:37:49 -05003548 errs() << "Function " << F.getName() << " is " << nextID << "\n";
3549 }
David Neto22f144c2017-06-12 14:26:21 -04003550 // Generate SPIRV instruction for function.
David Neto87846742018-04-11 17:36:22 -04003551 auto *FuncInst = new SPIRVInstruction(spv::OpFunction, nextID++, FOps);
David Neto22f144c2017-06-12 14:26:21 -04003552 SPIRVInstList.push_back(FuncInst);
3553
3554 //
3555 // Generate OpFunctionParameter for Normal function.
3556 //
3557
3558 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
alan-bakere9308012019-03-15 10:25:13 -04003559
3560 // Find Insert Point for OpDecorate.
3561 auto DecoInsertPoint =
3562 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
3563 [](SPIRVInstruction *Inst) -> bool {
3564 return Inst->getOpcode() != spv::OpDecorate &&
3565 Inst->getOpcode() != spv::OpMemberDecorate &&
3566 Inst->getOpcode() != spv::OpExtInstImport;
3567 });
3568
David Neto22f144c2017-06-12 14:26:21 -04003569 // Iterate Argument for name instead of param type from function type.
3570 unsigned ArgIdx = 0;
3571 for (Argument &Arg : F.args()) {
alan-bakere9308012019-03-15 10:25:13 -04003572 uint32_t param_id = nextID++;
3573 VMap[&Arg] = param_id;
3574
3575 if (CalledWithCoherentResource(Arg)) {
3576 // If the arg is passed a coherent resource ever, then decorate this
3577 // parameter with Coherent too.
3578 SPIRVOperandList decoration_ops;
3579 decoration_ops << MkId(param_id) << MkNum(spv::DecorationCoherent);
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003580 SPIRVInstList.insert(
3581 DecoInsertPoint,
3582 new SPIRVInstruction(spv::OpDecorate, decoration_ops));
alan-bakere9308012019-03-15 10:25:13 -04003583 }
David Neto22f144c2017-06-12 14:26:21 -04003584
3585 // ParamOps[0] : Result Type ID
3586 SPIRVOperandList ParamOps;
3587
3588 // Find SPIRV instruction for parameter type.
3589 uint32_t ParamTyID = lookupType(Arg.getType());
3590 if (PointerType *PTy = dyn_cast<PointerType>(Arg.getType())) {
3591 if (GlobalConstFuncTyMap.count(FTy)) {
3592 if (ArgIdx == GlobalConstFuncTyMap[FTy].second) {
3593 Type *EleTy = PTy->getPointerElementType();
3594 Type *ArgTy =
3595 PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
3596 ParamTyID = lookupType(ArgTy);
3597 GlobalConstArgSet.insert(&Arg);
3598 }
3599 }
3600 }
David Neto257c3892018-04-11 13:19:45 -04003601 ParamOps << MkId(ParamTyID);
David Neto22f144c2017-06-12 14:26:21 -04003602
3603 // Generate SPIRV instruction for parameter.
David Neto87846742018-04-11 17:36:22 -04003604 auto *ParamInst =
alan-bakere9308012019-03-15 10:25:13 -04003605 new SPIRVInstruction(spv::OpFunctionParameter, param_id, ParamOps);
David Neto22f144c2017-06-12 14:26:21 -04003606 SPIRVInstList.push_back(ParamInst);
3607
3608 ArgIdx++;
3609 }
3610 }
3611}
3612
alan-bakerb6b09dc2018-11-08 16:59:28 -05003613void SPIRVProducerPass::GenerateModuleInfo(Module &module) {
David Neto22f144c2017-06-12 14:26:21 -04003614 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3615 EntryPointVecType &EntryPoints = getEntryPointVec();
3616 ValueMapType &VMap = getValueMap();
3617 ValueList &EntryPointInterfaces = getEntryPointInterfacesVec();
3618 uint32_t &ExtInstImportID = getOpExtInstImportID();
3619 std::vector<uint32_t> &BuiltinDimVec = getBuiltinDimVec();
3620
3621 // Set up insert point.
3622 auto InsertPoint = SPIRVInstList.begin();
3623
3624 //
3625 // Generate OpCapability
3626 //
3627 // TODO: Which llvm information is mapped to SPIRV Capapbility?
3628
3629 // Ops[0] = Capability
3630 SPIRVOperandList Ops;
3631
David Neto87846742018-04-11 17:36:22 -04003632 auto *CapInst =
David Netoef5ba2b2019-12-20 08:35:54 -05003633 new SPIRVInstruction(spv::OpCapability, MkNum(spv::CapabilityShader));
David Neto22f144c2017-06-12 14:26:21 -04003634 SPIRVInstList.insert(InsertPoint, CapInst);
3635
alan-bakerf906d2b2019-12-10 11:26:23 -05003636 bool write_without_format = false;
3637 bool sampled_1d = false;
3638 bool image_1d = false;
David Neto22f144c2017-06-12 14:26:21 -04003639 for (Type *Ty : getTypeList()) {
alan-bakerb39c8262019-03-08 14:03:37 -05003640 if (clspv::Option::Int8Support() && Ty->isIntegerTy(8)) {
3641 // Generate OpCapability for i8 type.
David Netoef5ba2b2019-12-20 08:35:54 -05003642 SPIRVInstList.insert(
3643 InsertPoint,
3644 new SPIRVInstruction(spv::OpCapability, MkNum(spv::CapabilityInt8)));
alan-bakerb39c8262019-03-08 14:03:37 -05003645 } else if (Ty->isIntegerTy(16)) {
David Neto22f144c2017-06-12 14:26:21 -04003646 // Generate OpCapability for i16 type.
David Netoef5ba2b2019-12-20 08:35:54 -05003647 SPIRVInstList.insert(
3648 InsertPoint,
3649 new SPIRVInstruction(spv::OpCapability, MkNum(spv::CapabilityInt16)));
David Neto22f144c2017-06-12 14:26:21 -04003650 } else if (Ty->isIntegerTy(64)) {
3651 // Generate OpCapability for i64 type.
David Netoef5ba2b2019-12-20 08:35:54 -05003652 SPIRVInstList.insert(
3653 InsertPoint,
3654 new SPIRVInstruction(spv::OpCapability, MkNum(spv::CapabilityInt64)));
David Neto22f144c2017-06-12 14:26:21 -04003655 } else if (Ty->isHalfTy()) {
3656 // Generate OpCapability for half type.
David Netoef5ba2b2019-12-20 08:35:54 -05003657 SPIRVInstList.insert(InsertPoint,
3658 new SPIRVInstruction(spv::OpCapability,
3659 MkNum(spv::CapabilityFloat16)));
David Neto22f144c2017-06-12 14:26:21 -04003660 } else if (Ty->isDoubleTy()) {
3661 // Generate OpCapability for double type.
David Netoef5ba2b2019-12-20 08:35:54 -05003662 SPIRVInstList.insert(InsertPoint,
3663 new SPIRVInstruction(spv::OpCapability,
3664 MkNum(spv::CapabilityFloat64)));
David Neto22f144c2017-06-12 14:26:21 -04003665 } else if (auto *STy = dyn_cast<StructType>(Ty)) {
3666 if (STy->isOpaque()) {
alan-bakerf906d2b2019-12-10 11:26:23 -05003667 if (STy->getName().startswith("opencl.image1d_wo_t") ||
alan-baker7150a1d2020-02-25 08:31:06 -05003668 STy->getName().startswith("opencl.image1d_array_wo_t") ||
alan-bakerf906d2b2019-12-10 11:26:23 -05003669 STy->getName().startswith("opencl.image2d_wo_t") ||
alan-baker7150a1d2020-02-25 08:31:06 -05003670 STy->getName().startswith("opencl.image2d_array_wo_t") ||
alan-bakerf67468c2019-11-25 15:51:49 -05003671 STy->getName().startswith("opencl.image3d_wo_t")) {
alan-bakerf906d2b2019-12-10 11:26:23 -05003672 write_without_format = true;
3673 }
3674 if (STy->getName().startswith("opencl.image1d_ro_t") ||
alan-baker7150a1d2020-02-25 08:31:06 -05003675 STy->getName().startswith("opencl.image1d_wo_t") ||
3676 STy->getName().startswith("opencl.image1d_array_ro_t") ||
3677 STy->getName().startswith("opencl.image1d_array_wo_t")) {
alan-bakerf906d2b2019-12-10 11:26:23 -05003678 if (STy->getName().contains(".sampled"))
3679 sampled_1d = true;
3680 else
3681 image_1d = true;
David Neto22f144c2017-06-12 14:26:21 -04003682 }
3683 }
3684 }
3685 }
3686
alan-bakerf906d2b2019-12-10 11:26:23 -05003687 if (write_without_format) {
3688 // Generate OpCapability for write only image type.
3689 SPIRVInstList.insert(
3690 InsertPoint,
3691 new SPIRVInstruction(
3692 spv::OpCapability,
3693 {MkNum(spv::CapabilityStorageImageWriteWithoutFormat)}));
3694 }
3695 if (image_1d) {
3696 // Generate OpCapability for unsampled 1D image type.
3697 SPIRVInstList.insert(InsertPoint,
3698 new SPIRVInstruction(spv::OpCapability,
3699 {MkNum(spv::CapabilityImage1D)}));
3700 } else if (sampled_1d) {
3701 // Generate OpCapability for sampled 1D image type.
3702 SPIRVInstList.insert(
3703 InsertPoint, new SPIRVInstruction(spv::OpCapability,
3704 {MkNum(spv::CapabilitySampled1D)}));
3705 }
3706
David Neto5c22a252018-03-15 16:07:41 -04003707 { // OpCapability ImageQuery
3708 bool hasImageQuery = false;
alan-bakerf67468c2019-11-25 15:51:49 -05003709 for (const auto &SymVal : module.getValueSymbolTable()) {
3710 if (auto F = dyn_cast<Function>(SymVal.getValue())) {
SJW173c7e92020-03-16 08:44:47 -05003711 if (IsImageQuery(F)) {
alan-bakerf67468c2019-11-25 15:51:49 -05003712 hasImageQuery = true;
3713 break;
3714 }
David Neto5c22a252018-03-15 16:07:41 -04003715 }
3716 }
alan-bakerf67468c2019-11-25 15:51:49 -05003717
David Neto5c22a252018-03-15 16:07:41 -04003718 if (hasImageQuery) {
David Neto87846742018-04-11 17:36:22 -04003719 auto *ImageQueryCapInst = new SPIRVInstruction(
3720 spv::OpCapability, {MkNum(spv::CapabilityImageQuery)});
David Neto5c22a252018-03-15 16:07:41 -04003721 SPIRVInstList.insert(InsertPoint, ImageQueryCapInst);
3722 }
3723 }
3724
David Neto22f144c2017-06-12 14:26:21 -04003725 if (hasVariablePointers()) {
3726 //
David Neto22f144c2017-06-12 14:26:21 -04003727 // Generate OpCapability.
3728 //
3729 // Ops[0] = Capability
3730 //
3731 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003732 Ops << MkNum(spv::CapabilityVariablePointers);
David Neto22f144c2017-06-12 14:26:21 -04003733
David Neto87846742018-04-11 17:36:22 -04003734 SPIRVInstList.insert(InsertPoint,
3735 new SPIRVInstruction(spv::OpCapability, Ops));
alan-baker5b86ed72019-02-15 08:26:50 -05003736 } else if (hasVariablePointersStorageBuffer()) {
3737 //
3738 // Generate OpCapability.
3739 //
3740 // Ops[0] = Capability
3741 //
3742 Ops.clear();
3743 Ops << MkNum(spv::CapabilityVariablePointersStorageBuffer);
David Neto22f144c2017-06-12 14:26:21 -04003744
alan-baker5b86ed72019-02-15 08:26:50 -05003745 SPIRVInstList.insert(InsertPoint,
3746 new SPIRVInstruction(spv::OpCapability, Ops));
3747 }
3748
3749 // Always add the storage buffer extension
3750 {
David Neto22f144c2017-06-12 14:26:21 -04003751 //
3752 // Generate OpExtension.
3753 //
3754 // Ops[0] = Name (Literal String)
3755 //
alan-baker5b86ed72019-02-15 08:26:50 -05003756 auto *ExtensionInst = new SPIRVInstruction(
3757 spv::OpExtension, {MkString("SPV_KHR_storage_buffer_storage_class")});
3758 SPIRVInstList.insert(InsertPoint, ExtensionInst);
3759 }
David Neto22f144c2017-06-12 14:26:21 -04003760
alan-baker5b86ed72019-02-15 08:26:50 -05003761 if (hasVariablePointers() || hasVariablePointersStorageBuffer()) {
3762 //
3763 // Generate OpExtension.
3764 //
3765 // Ops[0] = Name (Literal String)
3766 //
3767 auto *ExtensionInst = new SPIRVInstruction(
3768 spv::OpExtension, {MkString("SPV_KHR_variable_pointers")});
3769 SPIRVInstList.insert(InsertPoint, ExtensionInst);
David Neto22f144c2017-06-12 14:26:21 -04003770 }
3771
3772 if (ExtInstImportID) {
3773 ++InsertPoint;
3774 }
3775
3776 //
3777 // Generate OpMemoryModel
3778 //
3779 // Memory model for Vulkan will always be GLSL450.
3780
3781 // Ops[0] = Addressing Model
3782 // Ops[1] = Memory Model
3783 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003784 Ops << MkNum(spv::AddressingModelLogical) << MkNum(spv::MemoryModelGLSL450);
David Neto22f144c2017-06-12 14:26:21 -04003785
David Neto87846742018-04-11 17:36:22 -04003786 auto *MemModelInst = new SPIRVInstruction(spv::OpMemoryModel, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003787 SPIRVInstList.insert(InsertPoint, MemModelInst);
3788
3789 //
3790 // Generate OpEntryPoint
3791 //
3792 for (auto EntryPoint : EntryPoints) {
3793 // Ops[0] = Execution Model
3794 // Ops[1] = EntryPoint ID
3795 // Ops[2] = Name (Literal String)
3796 // ...
3797 //
3798 // TODO: Do we need to consider Interface ID for forward references???
3799 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05003800 const StringRef &name = EntryPoint.first->getName();
David Neto257c3892018-04-11 13:19:45 -04003801 Ops << MkNum(spv::ExecutionModelGLCompute) << MkId(EntryPoint.second)
3802 << MkString(name);
David Neto22f144c2017-06-12 14:26:21 -04003803
David Neto22f144c2017-06-12 14:26:21 -04003804 for (Value *Interface : EntryPointInterfaces) {
David Neto257c3892018-04-11 13:19:45 -04003805 Ops << MkId(VMap[Interface]);
David Neto22f144c2017-06-12 14:26:21 -04003806 }
3807
David Neto87846742018-04-11 17:36:22 -04003808 auto *EntryPointInst = new SPIRVInstruction(spv::OpEntryPoint, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003809 SPIRVInstList.insert(InsertPoint, EntryPointInst);
3810 }
3811
3812 for (auto EntryPoint : EntryPoints) {
3813 if (const MDNode *MD = dyn_cast<Function>(EntryPoint.first)
3814 ->getMetadata("reqd_work_group_size")) {
3815
3816 if (!BuiltinDimVec.empty()) {
3817 llvm_unreachable(
3818 "Kernels should have consistent work group size definition");
3819 }
3820
3821 //
3822 // Generate OpExecutionMode
3823 //
3824
3825 // Ops[0] = Entry Point ID
3826 // Ops[1] = Execution Mode
3827 // Ops[2] ... Ops[n] = Optional literals according to Execution Mode
3828 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05003829 Ops << MkId(EntryPoint.second) << MkNum(spv::ExecutionModeLocalSize);
David Neto22f144c2017-06-12 14:26:21 -04003830
3831 uint32_t XDim = static_cast<uint32_t>(
3832 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
3833 uint32_t YDim = static_cast<uint32_t>(
3834 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
3835 uint32_t ZDim = static_cast<uint32_t>(
3836 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
3837
David Neto257c3892018-04-11 13:19:45 -04003838 Ops << MkNum(XDim) << MkNum(YDim) << MkNum(ZDim);
David Neto22f144c2017-06-12 14:26:21 -04003839
David Neto87846742018-04-11 17:36:22 -04003840 auto *ExecModeInst = new SPIRVInstruction(spv::OpExecutionMode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003841 SPIRVInstList.insert(InsertPoint, ExecModeInst);
3842 }
3843 }
3844
3845 //
3846 // Generate OpSource.
3847 //
3848 // Ops[0] = SourceLanguage ID
3849 // Ops[1] = Version (LiteralNum)
3850 //
3851 Ops.clear();
Kévin Petitf0515712020-01-07 18:29:20 +00003852 switch (clspv::Option::Language()) {
3853 case clspv::Option::SourceLanguage::OpenCL_C_10:
3854 Ops << MkNum(spv::SourceLanguageOpenCL_C) << MkNum(100);
3855 break;
3856 case clspv::Option::SourceLanguage::OpenCL_C_11:
3857 Ops << MkNum(spv::SourceLanguageOpenCL_C) << MkNum(110);
3858 break;
3859 case clspv::Option::SourceLanguage::OpenCL_C_12:
Kévin Petit0fc88042019-04-09 23:25:02 +01003860 Ops << MkNum(spv::SourceLanguageOpenCL_C) << MkNum(120);
Kévin Petitf0515712020-01-07 18:29:20 +00003861 break;
3862 case clspv::Option::SourceLanguage::OpenCL_C_20:
3863 Ops << MkNum(spv::SourceLanguageOpenCL_C) << MkNum(200);
3864 break;
3865 case clspv::Option::SourceLanguage::OpenCL_CPP:
3866 Ops << MkNum(spv::SourceLanguageOpenCL_CPP) << MkNum(100);
3867 break;
3868 default:
3869 Ops << MkNum(spv::SourceLanguageUnknown) << MkNum(0);
3870 break;
Kévin Petit0fc88042019-04-09 23:25:02 +01003871 }
David Neto22f144c2017-06-12 14:26:21 -04003872
David Neto87846742018-04-11 17:36:22 -04003873 auto *OpenSourceInst = new SPIRVInstruction(spv::OpSource, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003874 SPIRVInstList.insert(InsertPoint, OpenSourceInst);
3875
3876 if (!BuiltinDimVec.empty()) {
3877 //
3878 // Generate OpDecorates for x/y/z dimension.
3879 //
3880 // Ops[0] = Target ID
3881 // Ops[1] = Decoration (SpecId)
David Neto257c3892018-04-11 13:19:45 -04003882 // Ops[2] = Specialization Constant ID (Literal Number)
David Neto22f144c2017-06-12 14:26:21 -04003883
3884 // X Dimension
3885 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003886 Ops << MkId(BuiltinDimVec[0]) << MkNum(spv::DecorationSpecId) << MkNum(0);
David Neto87846742018-04-11 17:36:22 -04003887 SPIRVInstList.insert(InsertPoint,
3888 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003889
3890 // Y Dimension
3891 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003892 Ops << MkId(BuiltinDimVec[1]) << MkNum(spv::DecorationSpecId) << MkNum(1);
David Neto87846742018-04-11 17:36:22 -04003893 SPIRVInstList.insert(InsertPoint,
3894 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003895
3896 // Z Dimension
3897 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003898 Ops << MkId(BuiltinDimVec[2]) << MkNum(spv::DecorationSpecId) << MkNum(2);
David Neto87846742018-04-11 17:36:22 -04003899 SPIRVInstList.insert(InsertPoint,
3900 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003901 }
3902}
3903
David Netob6e2e062018-04-25 10:32:06 -04003904void SPIRVProducerPass::GenerateEntryPointInitialStores() {
3905 // Work around a driver bug. Initializers on Private variables might not
3906 // work. So the start of the kernel should store the initializer value to the
3907 // variables. Yes, *every* entry point pays this cost if *any* entry point
3908 // uses this builtin. At this point I judge this to be an acceptable tradeoff
3909 // of complexity vs. runtime, for a broken driver.
alan-bakerb6b09dc2018-11-08 16:59:28 -05003910 // TODO(dneto): Remove this at some point once fixed drivers are widely
3911 // available.
David Netob6e2e062018-04-25 10:32:06 -04003912 if (WorkgroupSizeVarID) {
3913 assert(WorkgroupSizeValueID);
3914
3915 SPIRVOperandList Ops;
3916 Ops << MkId(WorkgroupSizeVarID) << MkId(WorkgroupSizeValueID);
3917
3918 auto *Inst = new SPIRVInstruction(spv::OpStore, Ops);
3919 getSPIRVInstList().push_back(Inst);
3920 }
3921}
3922
David Neto22f144c2017-06-12 14:26:21 -04003923void SPIRVProducerPass::GenerateFuncBody(Function &F) {
3924 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3925 ValueMapType &VMap = getValueMap();
3926
David Netob6e2e062018-04-25 10:32:06 -04003927 const bool IsKernel = F.getCallingConv() == CallingConv::SPIR_KERNEL;
David Neto22f144c2017-06-12 14:26:21 -04003928
3929 for (BasicBlock &BB : F) {
3930 // Register BasicBlock to ValueMap.
3931 VMap[&BB] = nextID;
3932
3933 //
3934 // Generate OpLabel for Basic Block.
3935 //
3936 SPIRVOperandList Ops;
David Neto87846742018-04-11 17:36:22 -04003937 auto *Inst = new SPIRVInstruction(spv::OpLabel, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003938 SPIRVInstList.push_back(Inst);
3939
David Neto6dcd4712017-06-23 11:06:47 -04003940 // OpVariable instructions must come first.
3941 for (Instruction &I : BB) {
alan-baker5b86ed72019-02-15 08:26:50 -05003942 if (auto *alloca = dyn_cast<AllocaInst>(&I)) {
3943 // Allocating a pointer requires variable pointers.
3944 if (alloca->getAllocatedType()->isPointerTy()) {
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003945 setVariablePointersCapabilities(
3946 alloca->getAllocatedType()->getPointerAddressSpace());
alan-baker5b86ed72019-02-15 08:26:50 -05003947 }
David Neto6dcd4712017-06-23 11:06:47 -04003948 GenerateInstruction(I);
3949 }
3950 }
3951
David Neto22f144c2017-06-12 14:26:21 -04003952 if (&BB == &F.getEntryBlock() && IsKernel) {
David Netob6e2e062018-04-25 10:32:06 -04003953 if (clspv::Option::HackInitializers()) {
3954 GenerateEntryPointInitialStores();
3955 }
David Neto22f144c2017-06-12 14:26:21 -04003956 }
3957
3958 for (Instruction &I : BB) {
David Neto6dcd4712017-06-23 11:06:47 -04003959 if (!isa<AllocaInst>(I)) {
3960 GenerateInstruction(I);
3961 }
David Neto22f144c2017-06-12 14:26:21 -04003962 }
3963 }
3964}
3965
3966spv::Op SPIRVProducerPass::GetSPIRVCmpOpcode(CmpInst *I) {
3967 const std::map<CmpInst::Predicate, spv::Op> Map = {
3968 {CmpInst::ICMP_EQ, spv::OpIEqual},
3969 {CmpInst::ICMP_NE, spv::OpINotEqual},
3970 {CmpInst::ICMP_UGT, spv::OpUGreaterThan},
3971 {CmpInst::ICMP_UGE, spv::OpUGreaterThanEqual},
3972 {CmpInst::ICMP_ULT, spv::OpULessThan},
3973 {CmpInst::ICMP_ULE, spv::OpULessThanEqual},
3974 {CmpInst::ICMP_SGT, spv::OpSGreaterThan},
3975 {CmpInst::ICMP_SGE, spv::OpSGreaterThanEqual},
3976 {CmpInst::ICMP_SLT, spv::OpSLessThan},
3977 {CmpInst::ICMP_SLE, spv::OpSLessThanEqual},
3978 {CmpInst::FCMP_OEQ, spv::OpFOrdEqual},
3979 {CmpInst::FCMP_OGT, spv::OpFOrdGreaterThan},
3980 {CmpInst::FCMP_OGE, spv::OpFOrdGreaterThanEqual},
3981 {CmpInst::FCMP_OLT, spv::OpFOrdLessThan},
3982 {CmpInst::FCMP_OLE, spv::OpFOrdLessThanEqual},
3983 {CmpInst::FCMP_ONE, spv::OpFOrdNotEqual},
3984 {CmpInst::FCMP_UEQ, spv::OpFUnordEqual},
3985 {CmpInst::FCMP_UGT, spv::OpFUnordGreaterThan},
3986 {CmpInst::FCMP_UGE, spv::OpFUnordGreaterThanEqual},
3987 {CmpInst::FCMP_ULT, spv::OpFUnordLessThan},
3988 {CmpInst::FCMP_ULE, spv::OpFUnordLessThanEqual},
3989 {CmpInst::FCMP_UNE, spv::OpFUnordNotEqual}};
3990
3991 assert(0 != Map.count(I->getPredicate()));
3992
3993 return Map.at(I->getPredicate());
3994}
3995
3996spv::Op SPIRVProducerPass::GetSPIRVCastOpcode(Instruction &I) {
3997 const std::map<unsigned, spv::Op> Map{
3998 {Instruction::Trunc, spv::OpUConvert},
3999 {Instruction::ZExt, spv::OpUConvert},
4000 {Instruction::SExt, spv::OpSConvert},
4001 {Instruction::FPToUI, spv::OpConvertFToU},
4002 {Instruction::FPToSI, spv::OpConvertFToS},
4003 {Instruction::UIToFP, spv::OpConvertUToF},
4004 {Instruction::SIToFP, spv::OpConvertSToF},
4005 {Instruction::FPTrunc, spv::OpFConvert},
4006 {Instruction::FPExt, spv::OpFConvert},
4007 {Instruction::BitCast, spv::OpBitcast}};
4008
4009 assert(0 != Map.count(I.getOpcode()));
4010
4011 return Map.at(I.getOpcode());
4012}
4013
4014spv::Op SPIRVProducerPass::GetSPIRVBinaryOpcode(Instruction &I) {
Kévin Petit24272b62018-10-18 19:16:12 +00004015 if (I.getType()->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -04004016 switch (I.getOpcode()) {
4017 default:
4018 break;
4019 case Instruction::Or:
4020 return spv::OpLogicalOr;
4021 case Instruction::And:
4022 return spv::OpLogicalAnd;
4023 case Instruction::Xor:
4024 return spv::OpLogicalNotEqual;
4025 }
4026 }
4027
alan-bakerb6b09dc2018-11-08 16:59:28 -05004028 const std::map<unsigned, spv::Op> Map{
David Neto22f144c2017-06-12 14:26:21 -04004029 {Instruction::Add, spv::OpIAdd},
4030 {Instruction::FAdd, spv::OpFAdd},
4031 {Instruction::Sub, spv::OpISub},
4032 {Instruction::FSub, spv::OpFSub},
4033 {Instruction::Mul, spv::OpIMul},
4034 {Instruction::FMul, spv::OpFMul},
4035 {Instruction::UDiv, spv::OpUDiv},
4036 {Instruction::SDiv, spv::OpSDiv},
4037 {Instruction::FDiv, spv::OpFDiv},
4038 {Instruction::URem, spv::OpUMod},
4039 {Instruction::SRem, spv::OpSRem},
4040 {Instruction::FRem, spv::OpFRem},
4041 {Instruction::Or, spv::OpBitwiseOr},
4042 {Instruction::Xor, spv::OpBitwiseXor},
4043 {Instruction::And, spv::OpBitwiseAnd},
4044 {Instruction::Shl, spv::OpShiftLeftLogical},
4045 {Instruction::LShr, spv::OpShiftRightLogical},
4046 {Instruction::AShr, spv::OpShiftRightArithmetic}};
4047
4048 assert(0 != Map.count(I.getOpcode()));
4049
4050 return Map.at(I.getOpcode());
4051}
4052
4053void SPIRVProducerPass::GenerateInstruction(Instruction &I) {
4054 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
4055 ValueMapType &VMap = getValueMap();
David Neto22f144c2017-06-12 14:26:21 -04004056 DeferredInstVecType &DeferredInsts = getDeferredInstVec();
4057 LLVMContext &Context = I.getParent()->getParent()->getParent()->getContext();
4058
4059 // Register Instruction to ValueMap.
4060 if (0 == VMap[&I]) {
4061 VMap[&I] = nextID;
4062 }
4063
4064 switch (I.getOpcode()) {
4065 default: {
4066 if (Instruction::isCast(I.getOpcode())) {
4067 //
4068 // Generate SPIRV instructions for cast operators.
4069 //
4070
David Netod2de94a2017-08-28 17:27:47 -04004071 auto Ty = I.getType();
David Neto22f144c2017-06-12 14:26:21 -04004072 auto OpTy = I.getOperand(0)->getType();
David Netod2de94a2017-08-28 17:27:47 -04004073 auto toI8 = Ty == Type::getInt8Ty(Context);
4074 auto fromI32 = OpTy == Type::getInt32Ty(Context);
David Neto22f144c2017-06-12 14:26:21 -04004075 // Handle zext, sext and uitofp with i1 type specially.
4076 if ((I.getOpcode() == Instruction::ZExt ||
4077 I.getOpcode() == Instruction::SExt ||
4078 I.getOpcode() == Instruction::UIToFP) &&
alan-bakerb6b09dc2018-11-08 16:59:28 -05004079 OpTy->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -04004080 //
4081 // Generate OpSelect.
4082 //
4083
4084 // Ops[0] = Result Type ID
4085 // Ops[1] = Condition ID
4086 // Ops[2] = True Constant ID
4087 // Ops[3] = False Constant ID
4088 SPIRVOperandList Ops;
4089
David Neto257c3892018-04-11 13:19:45 -04004090 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04004091
David Neto22f144c2017-06-12 14:26:21 -04004092 uint32_t CondID = VMap[I.getOperand(0)];
David Neto257c3892018-04-11 13:19:45 -04004093 Ops << MkId(CondID);
David Neto22f144c2017-06-12 14:26:21 -04004094
4095 uint32_t TrueID = 0;
4096 if (I.getOpcode() == Instruction::ZExt) {
Kévin Petit7bfb8992019-02-26 13:45:08 +00004097 TrueID = VMap[ConstantInt::get(I.getType(), 1)];
David Neto22f144c2017-06-12 14:26:21 -04004098 } else if (I.getOpcode() == Instruction::SExt) {
Kévin Petit7bfb8992019-02-26 13:45:08 +00004099 TrueID = VMap[ConstantInt::getSigned(I.getType(), -1)];
David Neto22f144c2017-06-12 14:26:21 -04004100 } else {
4101 TrueID = VMap[ConstantFP::get(Context, APFloat(1.0f))];
4102 }
David Neto257c3892018-04-11 13:19:45 -04004103 Ops << MkId(TrueID);
David Neto22f144c2017-06-12 14:26:21 -04004104
4105 uint32_t FalseID = 0;
4106 if (I.getOpcode() == Instruction::ZExt) {
4107 FalseID = VMap[Constant::getNullValue(I.getType())];
4108 } else if (I.getOpcode() == Instruction::SExt) {
4109 FalseID = VMap[Constant::getNullValue(I.getType())];
4110 } else {
4111 FalseID = VMap[ConstantFP::get(Context, APFloat(0.0f))];
4112 }
David Neto257c3892018-04-11 13:19:45 -04004113 Ops << MkId(FalseID);
David Neto22f144c2017-06-12 14:26:21 -04004114
David Neto87846742018-04-11 17:36:22 -04004115 auto *Inst = new SPIRVInstruction(spv::OpSelect, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004116 SPIRVInstList.push_back(Inst);
alan-bakerb39c8262019-03-08 14:03:37 -05004117 } else if (!clspv::Option::Int8Support() &&
4118 I.getOpcode() == Instruction::Trunc && fromI32 && toI8) {
David Netod2de94a2017-08-28 17:27:47 -04004119 // The SPIR-V target type is a 32-bit int. Keep only the bottom
4120 // 8 bits.
4121 // Before:
4122 // %result = trunc i32 %a to i8
4123 // After
4124 // %result = OpBitwiseAnd %uint %a %uint_255
4125
4126 SPIRVOperandList Ops;
4127
David Neto257c3892018-04-11 13:19:45 -04004128 Ops << MkId(lookupType(OpTy)) << MkId(VMap[I.getOperand(0)]);
David Netod2de94a2017-08-28 17:27:47 -04004129
4130 Type *UintTy = Type::getInt32Ty(Context);
4131 uint32_t MaskID = VMap[ConstantInt::get(UintTy, 255)];
David Neto257c3892018-04-11 13:19:45 -04004132 Ops << MkId(MaskID);
David Netod2de94a2017-08-28 17:27:47 -04004133
David Neto87846742018-04-11 17:36:22 -04004134 auto *Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Netod2de94a2017-08-28 17:27:47 -04004135 SPIRVInstList.push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04004136 } else {
4137 // Ops[0] = Result Type ID
4138 // Ops[1] = Source Value ID
4139 SPIRVOperandList Ops;
4140
David Neto257c3892018-04-11 13:19:45 -04004141 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04004142
David Neto87846742018-04-11 17:36:22 -04004143 auto *Inst = new SPIRVInstruction(GetSPIRVCastOpcode(I), nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004144 SPIRVInstList.push_back(Inst);
4145 }
4146 } else if (isa<BinaryOperator>(I)) {
4147 //
4148 // Generate SPIRV instructions for binary operators.
4149 //
4150
4151 // Handle xor with i1 type specially.
4152 if (I.getOpcode() == Instruction::Xor &&
4153 I.getType() == Type::getInt1Ty(Context) &&
Kévin Petit24272b62018-10-18 19:16:12 +00004154 ((isa<ConstantInt>(I.getOperand(0)) &&
4155 !cast<ConstantInt>(I.getOperand(0))->isZero()) ||
4156 (isa<ConstantInt>(I.getOperand(1)) &&
4157 !cast<ConstantInt>(I.getOperand(1))->isZero()))) {
David Neto22f144c2017-06-12 14:26:21 -04004158 //
4159 // Generate OpLogicalNot.
4160 //
4161 // Ops[0] = Result Type ID
4162 // Ops[1] = Operand
4163 SPIRVOperandList Ops;
4164
David Neto257c3892018-04-11 13:19:45 -04004165 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04004166
4167 Value *CondV = I.getOperand(0);
4168 if (isa<Constant>(I.getOperand(0))) {
4169 CondV = I.getOperand(1);
4170 }
David Neto257c3892018-04-11 13:19:45 -04004171 Ops << MkId(VMap[CondV]);
David Neto22f144c2017-06-12 14:26:21 -04004172
David Neto87846742018-04-11 17:36:22 -04004173 auto *Inst = new SPIRVInstruction(spv::OpLogicalNot, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004174 SPIRVInstList.push_back(Inst);
4175 } else {
4176 // Ops[0] = Result Type ID
4177 // Ops[1] = Operand 0
4178 // Ops[2] = Operand 1
4179 SPIRVOperandList Ops;
4180
David Neto257c3892018-04-11 13:19:45 -04004181 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)])
4182 << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004183
David Neto87846742018-04-11 17:36:22 -04004184 auto *Inst =
4185 new SPIRVInstruction(GetSPIRVBinaryOpcode(I), nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004186 SPIRVInstList.push_back(Inst);
4187 }
alan-bakerc9c55ae2019-12-02 16:01:27 -05004188 } else if (I.getOpcode() == Instruction::FNeg) {
4189 // The only unary operator.
4190 //
4191 // Ops[0] = Result Type ID
4192 // Ops[1] = Operand 0
4193 SPIRVOperandList ops;
4194
4195 ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)]);
4196 auto *Inst = new SPIRVInstruction(spv::OpFNegate, nextID++, ops);
4197 SPIRVInstList.push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04004198 } else {
4199 I.print(errs());
4200 llvm_unreachable("Unsupported instruction???");
4201 }
4202 break;
4203 }
4204 case Instruction::GetElementPtr: {
4205 auto &GlobalConstArgSet = getGlobalConstArgSet();
4206
4207 //
4208 // Generate OpAccessChain.
4209 //
4210 GetElementPtrInst *GEP = cast<GetElementPtrInst>(&I);
4211
4212 //
4213 // Generate OpAccessChain.
4214 //
4215
4216 // Ops[0] = Result Type ID
4217 // Ops[1] = Base ID
4218 // Ops[2] ... Ops[n] = Indexes ID
4219 SPIRVOperandList Ops;
4220
alan-bakerb6b09dc2018-11-08 16:59:28 -05004221 PointerType *ResultType = cast<PointerType>(GEP->getType());
David Neto22f144c2017-06-12 14:26:21 -04004222 if (GEP->getPointerAddressSpace() == AddressSpace::ModuleScopePrivate ||
4223 GlobalConstArgSet.count(GEP->getPointerOperand())) {
4224 // Use pointer type with private address space for global constant.
4225 Type *EleTy = I.getType()->getPointerElementType();
David Neto1a1a0582017-07-07 12:01:44 -04004226 ResultType = PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
David Neto22f144c2017-06-12 14:26:21 -04004227 }
David Neto257c3892018-04-11 13:19:45 -04004228
4229 Ops << MkId(lookupType(ResultType));
David Neto22f144c2017-06-12 14:26:21 -04004230
David Neto862b7d82018-06-14 18:48:37 -04004231 // Generate the base pointer.
4232 Ops << MkId(VMap[GEP->getPointerOperand()]);
David Neto22f144c2017-06-12 14:26:21 -04004233
David Neto862b7d82018-06-14 18:48:37 -04004234 // TODO(dneto): Simplify the following?
David Neto22f144c2017-06-12 14:26:21 -04004235
4236 //
4237 // Follows below rules for gep.
4238 //
David Neto862b7d82018-06-14 18:48:37 -04004239 // 1. If gep's first index is 0 generate OpAccessChain and ignore gep's
4240 // first index.
David Neto22f144c2017-06-12 14:26:21 -04004241 // 2. If gep's first index is not 0, generate OpPtrAccessChain and use gep's
4242 // first index.
4243 // 3. If gep's first index is not constant, generate OpPtrAccessChain and
4244 // use gep's first index.
4245 // 4. If it is not above case 1, 2 and 3, generate OpAccessChain and use
4246 // gep's first index.
4247 //
4248 spv::Op Opcode = spv::OpAccessChain;
4249 unsigned offset = 0;
4250 if (ConstantInt *CstInt = dyn_cast<ConstantInt>(GEP->getOperand(1))) {
David Neto862b7d82018-06-14 18:48:37 -04004251 if (CstInt->getZExtValue() == 0) {
David Neto22f144c2017-06-12 14:26:21 -04004252 offset = 1;
David Neto862b7d82018-06-14 18:48:37 -04004253 } else if (CstInt->getZExtValue() != 0) {
David Neto22f144c2017-06-12 14:26:21 -04004254 Opcode = spv::OpPtrAccessChain;
David Neto22f144c2017-06-12 14:26:21 -04004255 }
David Neto862b7d82018-06-14 18:48:37 -04004256 } else {
David Neto22f144c2017-06-12 14:26:21 -04004257 Opcode = spv::OpPtrAccessChain;
David Neto1a1a0582017-07-07 12:01:44 -04004258 }
4259
4260 if (Opcode == spv::OpPtrAccessChain) {
David Neto1a1a0582017-07-07 12:01:44 -04004261 // Do we need to generate ArrayStride? Check against the GEP result type
4262 // rather than the pointer type of the base because when indexing into
4263 // an OpenCL program-scope constant, we'll swap out the LLVM base pointer
4264 // for something else in the SPIR-V.
4265 // E.g. see test/PointerAccessChain/pointer_index_is_constant_1.cl
alan-baker5b86ed72019-02-15 08:26:50 -05004266 auto address_space = ResultType->getAddressSpace();
4267 setVariablePointersCapabilities(address_space);
4268 switch (GetStorageClass(address_space)) {
Alan Bakerfcda9482018-10-02 17:09:59 -04004269 case spv::StorageClassStorageBuffer:
4270 case spv::StorageClassUniform:
David Neto1a1a0582017-07-07 12:01:44 -04004271 // Save the need to generate an ArrayStride decoration. But defer
4272 // generation until later, so we only make one decoration.
David Neto85082642018-03-24 06:55:20 -07004273 getTypesNeedingArrayStride().insert(ResultType);
Alan Bakerfcda9482018-10-02 17:09:59 -04004274 break;
4275 default:
4276 break;
David Neto1a1a0582017-07-07 12:01:44 -04004277 }
David Neto22f144c2017-06-12 14:26:21 -04004278 }
4279
4280 for (auto II = GEP->idx_begin() + offset; II != GEP->idx_end(); II++) {
David Neto257c3892018-04-11 13:19:45 -04004281 Ops << MkId(VMap[*II]);
David Neto22f144c2017-06-12 14:26:21 -04004282 }
4283
David Neto87846742018-04-11 17:36:22 -04004284 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004285 SPIRVInstList.push_back(Inst);
4286 break;
4287 }
4288 case Instruction::ExtractValue: {
4289 ExtractValueInst *EVI = cast<ExtractValueInst>(&I);
4290 // Ops[0] = Result Type ID
4291 // Ops[1] = Composite ID
4292 // Ops[2] ... Ops[n] = Indexes (Literal Number)
4293 SPIRVOperandList Ops;
4294
David Neto257c3892018-04-11 13:19:45 -04004295 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04004296
4297 uint32_t CompositeID = VMap[EVI->getAggregateOperand()];
David Neto257c3892018-04-11 13:19:45 -04004298 Ops << MkId(CompositeID);
David Neto22f144c2017-06-12 14:26:21 -04004299
4300 for (auto &Index : EVI->indices()) {
David Neto257c3892018-04-11 13:19:45 -04004301 Ops << MkNum(Index);
David Neto22f144c2017-06-12 14:26:21 -04004302 }
4303
David Neto87846742018-04-11 17:36:22 -04004304 auto *Inst = new SPIRVInstruction(spv::OpCompositeExtract, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004305 SPIRVInstList.push_back(Inst);
4306 break;
4307 }
4308 case Instruction::InsertValue: {
4309 InsertValueInst *IVI = cast<InsertValueInst>(&I);
4310 // Ops[0] = Result Type ID
4311 // Ops[1] = Object ID
4312 // Ops[2] = Composite ID
4313 // Ops[3] ... Ops[n] = Indexes (Literal Number)
4314 SPIRVOperandList Ops;
4315
4316 uint32_t ResTyID = lookupType(I.getType());
David Neto257c3892018-04-11 13:19:45 -04004317 Ops << MkId(ResTyID);
David Neto22f144c2017-06-12 14:26:21 -04004318
4319 uint32_t ObjectID = VMap[IVI->getInsertedValueOperand()];
David Neto257c3892018-04-11 13:19:45 -04004320 Ops << MkId(ObjectID);
David Neto22f144c2017-06-12 14:26:21 -04004321
4322 uint32_t CompositeID = VMap[IVI->getAggregateOperand()];
David Neto257c3892018-04-11 13:19:45 -04004323 Ops << MkId(CompositeID);
David Neto22f144c2017-06-12 14:26:21 -04004324
4325 for (auto &Index : IVI->indices()) {
David Neto257c3892018-04-11 13:19:45 -04004326 Ops << MkNum(Index);
David Neto22f144c2017-06-12 14:26:21 -04004327 }
4328
David Neto87846742018-04-11 17:36:22 -04004329 auto *Inst = new SPIRVInstruction(spv::OpCompositeInsert, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004330 SPIRVInstList.push_back(Inst);
4331 break;
4332 }
4333 case Instruction::Select: {
4334 //
4335 // Generate OpSelect.
4336 //
4337
4338 // Ops[0] = Result Type ID
4339 // Ops[1] = Condition ID
4340 // Ops[2] = True Constant ID
4341 // Ops[3] = False Constant ID
4342 SPIRVOperandList Ops;
4343
4344 // Find SPIRV instruction for parameter type.
4345 auto Ty = I.getType();
4346 if (Ty->isPointerTy()) {
4347 auto PointeeTy = Ty->getPointerElementType();
4348 if (PointeeTy->isStructTy() &&
4349 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
4350 Ty = PointeeTy;
alan-baker5b86ed72019-02-15 08:26:50 -05004351 } else {
4352 // Selecting between pointers requires variable pointers.
4353 setVariablePointersCapabilities(Ty->getPointerAddressSpace());
4354 if (!hasVariablePointers() && !selectFromSameObject(&I)) {
4355 setVariablePointers(true);
4356 }
David Neto22f144c2017-06-12 14:26:21 -04004357 }
4358 }
4359
David Neto257c3892018-04-11 13:19:45 -04004360 Ops << MkId(lookupType(Ty)) << MkId(VMap[I.getOperand(0)])
4361 << MkId(VMap[I.getOperand(1)]) << MkId(VMap[I.getOperand(2)]);
David Neto22f144c2017-06-12 14:26:21 -04004362
David Neto87846742018-04-11 17:36:22 -04004363 auto *Inst = new SPIRVInstruction(spv::OpSelect, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004364 SPIRVInstList.push_back(Inst);
4365 break;
4366 }
4367 case Instruction::ExtractElement: {
4368 // Handle <4 x i8> type manually.
4369 Type *CompositeTy = I.getOperand(0)->getType();
4370 if (is4xi8vec(CompositeTy)) {
4371 //
4372 // Generate OpShiftRightLogical and OpBitwiseAnd for extractelement with
4373 // <4 x i8>.
4374 //
4375
4376 //
4377 // Generate OpShiftRightLogical
4378 //
4379 // Ops[0] = Result Type ID
4380 // Ops[1] = Operand 0
4381 // Ops[2] = Operand 1
4382 //
4383 SPIRVOperandList Ops;
4384
David Neto257c3892018-04-11 13:19:45 -04004385 Ops << MkId(lookupType(CompositeTy));
David Neto22f144c2017-06-12 14:26:21 -04004386
4387 uint32_t Op0ID = VMap[I.getOperand(0)];
David Neto257c3892018-04-11 13:19:45 -04004388 Ops << MkId(Op0ID);
David Neto22f144c2017-06-12 14:26:21 -04004389
4390 uint32_t Op1ID = 0;
4391 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
4392 // Handle constant index.
4393 uint64_t Idx = CI->getZExtValue();
4394 Value *ShiftAmount =
4395 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
4396 Op1ID = VMap[ShiftAmount];
4397 } else {
4398 // Handle variable index.
4399 SPIRVOperandList TmpOps;
4400
David Neto257c3892018-04-11 13:19:45 -04004401 TmpOps << MkId(lookupType(Type::getInt32Ty(Context)))
4402 << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004403
4404 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
David Neto257c3892018-04-11 13:19:45 -04004405 TmpOps << MkId(VMap[Cst8]);
David Neto22f144c2017-06-12 14:26:21 -04004406
4407 Op1ID = nextID;
4408
David Neto87846742018-04-11 17:36:22 -04004409 auto *TmpInst = new SPIRVInstruction(spv::OpIMul, nextID++, TmpOps);
David Neto22f144c2017-06-12 14:26:21 -04004410 SPIRVInstList.push_back(TmpInst);
4411 }
David Neto257c3892018-04-11 13:19:45 -04004412 Ops << MkId(Op1ID);
David Neto22f144c2017-06-12 14:26:21 -04004413
4414 uint32_t ShiftID = nextID;
4415
David Neto87846742018-04-11 17:36:22 -04004416 auto *Inst =
4417 new SPIRVInstruction(spv::OpShiftRightLogical, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004418 SPIRVInstList.push_back(Inst);
4419
4420 //
4421 // Generate OpBitwiseAnd
4422 //
4423 // Ops[0] = Result Type ID
4424 // Ops[1] = Operand 0
4425 // Ops[2] = Operand 1
4426 //
4427 Ops.clear();
4428
David Neto257c3892018-04-11 13:19:45 -04004429 Ops << MkId(lookupType(CompositeTy)) << MkId(ShiftID);
David Neto22f144c2017-06-12 14:26:21 -04004430
4431 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
David Neto257c3892018-04-11 13:19:45 -04004432 Ops << MkId(VMap[CstFF]);
David Neto22f144c2017-06-12 14:26:21 -04004433
David Neto9b2d6252017-09-06 15:47:37 -04004434 // Reset mapping for this value to the result of the bitwise and.
4435 VMap[&I] = nextID;
4436
David Neto87846742018-04-11 17:36:22 -04004437 Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004438 SPIRVInstList.push_back(Inst);
4439 break;
4440 }
4441
4442 // Ops[0] = Result Type ID
4443 // Ops[1] = Composite ID
4444 // Ops[2] ... Ops[n] = Indexes (Literal Number)
4445 SPIRVOperandList Ops;
4446
David Neto257c3892018-04-11 13:19:45 -04004447 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04004448
4449 spv::Op Opcode = spv::OpCompositeExtract;
4450 if (const ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
David Neto257c3892018-04-11 13:19:45 -04004451 Ops << MkNum(static_cast<uint32_t>(CI->getZExtValue()));
David Neto22f144c2017-06-12 14:26:21 -04004452 } else {
David Neto257c3892018-04-11 13:19:45 -04004453 Ops << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004454 Opcode = spv::OpVectorExtractDynamic;
4455 }
4456
David Neto87846742018-04-11 17:36:22 -04004457 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004458 SPIRVInstList.push_back(Inst);
4459 break;
4460 }
4461 case Instruction::InsertElement: {
4462 // Handle <4 x i8> type manually.
4463 Type *CompositeTy = I.getOperand(0)->getType();
4464 if (is4xi8vec(CompositeTy)) {
4465 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
4466 uint32_t CstFFID = VMap[CstFF];
4467
4468 uint32_t ShiftAmountID = 0;
4469 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
4470 // Handle constant index.
4471 uint64_t Idx = CI->getZExtValue();
4472 Value *ShiftAmount =
4473 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
4474 ShiftAmountID = VMap[ShiftAmount];
4475 } else {
4476 // Handle variable index.
4477 SPIRVOperandList TmpOps;
4478
David Neto257c3892018-04-11 13:19:45 -04004479 TmpOps << MkId(lookupType(Type::getInt32Ty(Context)))
4480 << MkId(VMap[I.getOperand(2)]);
David Neto22f144c2017-06-12 14:26:21 -04004481
4482 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
David Neto257c3892018-04-11 13:19:45 -04004483 TmpOps << MkId(VMap[Cst8]);
David Neto22f144c2017-06-12 14:26:21 -04004484
4485 ShiftAmountID = nextID;
4486
David Neto87846742018-04-11 17:36:22 -04004487 auto *TmpInst = new SPIRVInstruction(spv::OpIMul, nextID++, TmpOps);
David Neto22f144c2017-06-12 14:26:21 -04004488 SPIRVInstList.push_back(TmpInst);
4489 }
4490
4491 //
4492 // Generate mask operations.
4493 //
4494
4495 // ShiftLeft mask according to index of insertelement.
4496 SPIRVOperandList Ops;
4497
David Neto257c3892018-04-11 13:19:45 -04004498 const uint32_t ResTyID = lookupType(CompositeTy);
4499 Ops << MkId(ResTyID) << MkId(CstFFID) << MkId(ShiftAmountID);
David Neto22f144c2017-06-12 14:26:21 -04004500
4501 uint32_t MaskID = nextID;
4502
David Neto87846742018-04-11 17:36:22 -04004503 auto *Inst = new SPIRVInstruction(spv::OpShiftLeftLogical, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004504 SPIRVInstList.push_back(Inst);
4505
4506 // Inverse mask.
4507 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004508 Ops << MkId(ResTyID) << MkId(MaskID);
David Neto22f144c2017-06-12 14:26:21 -04004509
4510 uint32_t InvMaskID = nextID;
4511
David Neto87846742018-04-11 17:36:22 -04004512 Inst = new SPIRVInstruction(spv::OpNot, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004513 SPIRVInstList.push_back(Inst);
4514
4515 // Apply mask.
4516 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004517 Ops << MkId(ResTyID) << MkId(VMap[I.getOperand(0)]) << MkId(InvMaskID);
David Neto22f144c2017-06-12 14:26:21 -04004518
4519 uint32_t OrgValID = nextID;
4520
David Neto87846742018-04-11 17:36:22 -04004521 Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004522 SPIRVInstList.push_back(Inst);
4523
4524 // Create correct value according to index of insertelement.
4525 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05004526 Ops << MkId(ResTyID) << MkId(VMap[I.getOperand(1)])
4527 << MkId(ShiftAmountID);
David Neto22f144c2017-06-12 14:26:21 -04004528
4529 uint32_t InsertValID = nextID;
4530
David Neto87846742018-04-11 17:36:22 -04004531 Inst = new SPIRVInstruction(spv::OpShiftLeftLogical, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004532 SPIRVInstList.push_back(Inst);
4533
4534 // Insert value to original value.
4535 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004536 Ops << MkId(ResTyID) << MkId(OrgValID) << MkId(InsertValID);
David Neto22f144c2017-06-12 14:26:21 -04004537
David Netoa394f392017-08-26 20:45:29 -04004538 VMap[&I] = nextID;
4539
David Neto87846742018-04-11 17:36:22 -04004540 Inst = new SPIRVInstruction(spv::OpBitwiseOr, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004541 SPIRVInstList.push_back(Inst);
4542
4543 break;
4544 }
4545
David Neto22f144c2017-06-12 14:26:21 -04004546 SPIRVOperandList Ops;
4547
James Priced26efea2018-06-09 23:28:32 +01004548 // Ops[0] = Result Type ID
4549 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04004550
4551 spv::Op Opcode = spv::OpCompositeInsert;
4552 if (const ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
David Neto257c3892018-04-11 13:19:45 -04004553 const auto value = CI->getZExtValue();
4554 assert(value <= UINT32_MAX);
James Priced26efea2018-06-09 23:28:32 +01004555 // Ops[1] = Object ID
4556 // Ops[2] = Composite ID
4557 // Ops[3] ... Ops[n] = Indexes (Literal Number)
alan-bakerb6b09dc2018-11-08 16:59:28 -05004558 Ops << MkId(VMap[I.getOperand(1)]) << MkId(VMap[I.getOperand(0)])
James Priced26efea2018-06-09 23:28:32 +01004559 << MkNum(static_cast<uint32_t>(value));
David Neto22f144c2017-06-12 14:26:21 -04004560 } else {
James Priced26efea2018-06-09 23:28:32 +01004561 // Ops[1] = Composite ID
4562 // Ops[2] = Object ID
4563 // Ops[3] ... Ops[n] = Indexes (Literal Number)
alan-bakerb6b09dc2018-11-08 16:59:28 -05004564 Ops << MkId(VMap[I.getOperand(0)]) << MkId(VMap[I.getOperand(1)])
James Priced26efea2018-06-09 23:28:32 +01004565 << MkId(VMap[I.getOperand(2)]);
David Neto22f144c2017-06-12 14:26:21 -04004566 Opcode = spv::OpVectorInsertDynamic;
4567 }
4568
David Neto87846742018-04-11 17:36:22 -04004569 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004570 SPIRVInstList.push_back(Inst);
4571 break;
4572 }
4573 case Instruction::ShuffleVector: {
4574 // Ops[0] = Result Type ID
4575 // Ops[1] = Vector 1 ID
4576 // Ops[2] = Vector 2 ID
4577 // Ops[3] ... Ops[n] = Components (Literal Number)
4578 SPIRVOperandList Ops;
4579
David Neto257c3892018-04-11 13:19:45 -04004580 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)])
4581 << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004582
4583 uint64_t NumElements = 0;
4584 if (Constant *Cst = dyn_cast<Constant>(I.getOperand(2))) {
4585 NumElements = cast<VectorType>(Cst->getType())->getNumElements();
4586
4587 if (Cst->isNullValue()) {
4588 for (unsigned i = 0; i < NumElements; i++) {
David Neto257c3892018-04-11 13:19:45 -04004589 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04004590 }
4591 } else if (const ConstantDataSequential *CDS =
4592 dyn_cast<ConstantDataSequential>(Cst)) {
4593 for (unsigned i = 0; i < CDS->getNumElements(); i++) {
4594 std::vector<uint32_t> LiteralNum;
David Neto257c3892018-04-11 13:19:45 -04004595 const auto value = CDS->getElementAsInteger(i);
4596 assert(value <= UINT32_MAX);
4597 Ops << MkNum(static_cast<uint32_t>(value));
David Neto22f144c2017-06-12 14:26:21 -04004598 }
4599 } else if (const ConstantVector *CV = dyn_cast<ConstantVector>(Cst)) {
4600 for (unsigned i = 0; i < CV->getNumOperands(); i++) {
4601 auto Op = CV->getOperand(i);
4602
4603 uint32_t literal = 0;
4604
4605 if (auto CI = dyn_cast<ConstantInt>(Op)) {
4606 literal = static_cast<uint32_t>(CI->getZExtValue());
4607 } else if (auto UI = dyn_cast<UndefValue>(Op)) {
4608 literal = 0xFFFFFFFFu;
4609 } else {
4610 Op->print(errs());
4611 llvm_unreachable("Unsupported element in ConstantVector!");
4612 }
4613
David Neto257c3892018-04-11 13:19:45 -04004614 Ops << MkNum(literal);
David Neto22f144c2017-06-12 14:26:21 -04004615 }
4616 } else {
4617 Cst->print(errs());
4618 llvm_unreachable("Unsupported constant mask in ShuffleVector!");
4619 }
4620 }
4621
David Neto87846742018-04-11 17:36:22 -04004622 auto *Inst = new SPIRVInstruction(spv::OpVectorShuffle, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004623 SPIRVInstList.push_back(Inst);
4624 break;
4625 }
4626 case Instruction::ICmp:
4627 case Instruction::FCmp: {
4628 CmpInst *CmpI = cast<CmpInst>(&I);
4629
David Netod4ca2e62017-07-06 18:47:35 -04004630 // Pointer equality is invalid.
alan-bakerb6b09dc2018-11-08 16:59:28 -05004631 Type *ArgTy = CmpI->getOperand(0)->getType();
David Netod4ca2e62017-07-06 18:47:35 -04004632 if (isa<PointerType>(ArgTy)) {
4633 CmpI->print(errs());
alan-baker21574d32020-01-29 16:00:31 -05004634 std::string name = I.getParent()->getParent()->getName().str();
David Netod4ca2e62017-07-06 18:47:35 -04004635 errs()
4636 << "\nPointer equality test is not supported by SPIR-V for Vulkan, "
4637 << "in function " << name << "\n";
4638 llvm_unreachable("Pointer equality check is invalid");
4639 break;
4640 }
4641
David Neto257c3892018-04-11 13:19:45 -04004642 // Ops[0] = Result Type ID
4643 // Ops[1] = Operand 1 ID
4644 // Ops[2] = Operand 2 ID
4645 SPIRVOperandList Ops;
David Neto22f144c2017-06-12 14:26:21 -04004646
David Neto257c3892018-04-11 13:19:45 -04004647 Ops << MkId(lookupType(CmpI->getType())) << MkId(VMap[CmpI->getOperand(0)])
4648 << MkId(VMap[CmpI->getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004649
4650 spv::Op Opcode = GetSPIRVCmpOpcode(CmpI);
David Neto87846742018-04-11 17:36:22 -04004651 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004652 SPIRVInstList.push_back(Inst);
4653 break;
4654 }
4655 case Instruction::Br: {
4656 // Branch instrucion is deferred because it needs label's ID. Record slot's
4657 // location on SPIRVInstructionList.
4658 DeferredInsts.push_back(
4659 std::make_tuple(&I, --SPIRVInstList.end(), 0 /* No id */));
4660 break;
4661 }
4662 case Instruction::Switch: {
4663 I.print(errs());
4664 llvm_unreachable("Unsupported instruction???");
4665 break;
4666 }
4667 case Instruction::IndirectBr: {
4668 I.print(errs());
4669 llvm_unreachable("Unsupported instruction???");
4670 break;
4671 }
4672 case Instruction::PHI: {
4673 // Branch instrucion is deferred because it needs label's ID. Record slot's
4674 // location on SPIRVInstructionList.
4675 DeferredInsts.push_back(
4676 std::make_tuple(&I, --SPIRVInstList.end(), nextID++));
4677 break;
4678 }
4679 case Instruction::Alloca: {
4680 //
4681 // Generate OpVariable.
4682 //
4683 // Ops[0] : Result Type ID
4684 // Ops[1] : Storage Class
4685 SPIRVOperandList Ops;
4686
David Neto257c3892018-04-11 13:19:45 -04004687 Ops << MkId(lookupType(I.getType())) << MkNum(spv::StorageClassFunction);
David Neto22f144c2017-06-12 14:26:21 -04004688
David Neto87846742018-04-11 17:36:22 -04004689 auto *Inst = new SPIRVInstruction(spv::OpVariable, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004690 SPIRVInstList.push_back(Inst);
4691 break;
4692 }
4693 case Instruction::Load: {
4694 LoadInst *LD = cast<LoadInst>(&I);
4695 //
4696 // Generate OpLoad.
4697 //
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04004698
alan-baker5b86ed72019-02-15 08:26:50 -05004699 if (LD->getType()->isPointerTy()) {
4700 // Loading a pointer requires variable pointers.
4701 setVariablePointersCapabilities(LD->getType()->getPointerAddressSpace());
4702 }
David Neto22f144c2017-06-12 14:26:21 -04004703
David Neto0a2f98d2017-09-15 19:38:40 -04004704 uint32_t ResTyID = lookupType(LD->getType());
David Netoa60b00b2017-09-15 16:34:09 -04004705 uint32_t PointerID = VMap[LD->getPointerOperand()];
4706
4707 // This is a hack to work around what looks like a driver bug.
4708 // When we're loading from the special variable holding the WorkgroupSize
David Neto0a2f98d2017-09-15 19:38:40 -04004709 // builtin value, use an OpBitWiseAnd of the value's ID rather than
4710 // generating a load.
David Neto66cfe642018-03-24 06:13:56 -07004711 // TODO(dneto): Remove this awful hack once drivers are fixed.
David Netoa60b00b2017-09-15 16:34:09 -04004712 if (PointerID == WorkgroupSizeVarID) {
David Neto0a2f98d2017-09-15 19:38:40 -04004713 // Generate a bitwise-and of the original value with itself.
4714 // We should have been able to get away with just an OpCopyObject,
4715 // but we need something more complex to get past certain driver bugs.
4716 // This is ridiculous, but necessary.
4717 // TODO(dneto): Revisit this once drivers fix their bugs.
4718
4719 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04004720 Ops << MkId(ResTyID) << MkId(WorkgroupSizeValueID)
4721 << MkId(WorkgroupSizeValueID);
David Neto0a2f98d2017-09-15 19:38:40 -04004722
David Neto87846742018-04-11 17:36:22 -04004723 auto *Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Neto0a2f98d2017-09-15 19:38:40 -04004724 SPIRVInstList.push_back(Inst);
David Netoa60b00b2017-09-15 16:34:09 -04004725 break;
4726 }
4727
4728 // This is the normal path. Generate a load.
4729
David Neto22f144c2017-06-12 14:26:21 -04004730 // Ops[0] = Result Type ID
4731 // Ops[1] = Pointer ID
4732 // Ops[2] ... Ops[n] = Optional Memory Access
4733 //
4734 // TODO: Do we need to implement Optional Memory Access???
David Neto0a2f98d2017-09-15 19:38:40 -04004735
David Neto22f144c2017-06-12 14:26:21 -04004736 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04004737 Ops << MkId(ResTyID) << MkId(PointerID);
David Neto22f144c2017-06-12 14:26:21 -04004738
David Neto87846742018-04-11 17:36:22 -04004739 auto *Inst = new SPIRVInstruction(spv::OpLoad, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004740 SPIRVInstList.push_back(Inst);
4741 break;
4742 }
4743 case Instruction::Store: {
4744 StoreInst *ST = cast<StoreInst>(&I);
4745 //
4746 // Generate OpStore.
4747 //
4748
alan-baker5b86ed72019-02-15 08:26:50 -05004749 if (ST->getValueOperand()->getType()->isPointerTy()) {
4750 // Storing a pointer requires variable pointers.
4751 setVariablePointersCapabilities(
4752 ST->getValueOperand()->getType()->getPointerAddressSpace());
4753 }
4754
David Neto22f144c2017-06-12 14:26:21 -04004755 // Ops[0] = Pointer ID
4756 // Ops[1] = Object ID
4757 // Ops[2] ... Ops[n] = Optional Memory Access (later???)
4758 //
4759 // TODO: Do we need to implement Optional Memory Access???
David Neto257c3892018-04-11 13:19:45 -04004760 SPIRVOperandList Ops;
4761 Ops << MkId(VMap[ST->getPointerOperand()])
4762 << MkId(VMap[ST->getValueOperand()]);
David Neto22f144c2017-06-12 14:26:21 -04004763
David Neto87846742018-04-11 17:36:22 -04004764 auto *Inst = new SPIRVInstruction(spv::OpStore, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004765 SPIRVInstList.push_back(Inst);
4766 break;
4767 }
4768 case Instruction::AtomicCmpXchg: {
4769 I.print(errs());
4770 llvm_unreachable("Unsupported instruction???");
4771 break;
4772 }
4773 case Instruction::AtomicRMW: {
Neil Henning39672102017-09-29 14:33:13 +01004774 AtomicRMWInst *AtomicRMW = dyn_cast<AtomicRMWInst>(&I);
4775
4776 spv::Op opcode;
4777
4778 switch (AtomicRMW->getOperation()) {
4779 default:
4780 I.print(errs());
4781 llvm_unreachable("Unsupported instruction???");
4782 case llvm::AtomicRMWInst::Add:
4783 opcode = spv::OpAtomicIAdd;
4784 break;
4785 case llvm::AtomicRMWInst::Sub:
4786 opcode = spv::OpAtomicISub;
4787 break;
4788 case llvm::AtomicRMWInst::Xchg:
4789 opcode = spv::OpAtomicExchange;
4790 break;
4791 case llvm::AtomicRMWInst::Min:
4792 opcode = spv::OpAtomicSMin;
4793 break;
4794 case llvm::AtomicRMWInst::Max:
4795 opcode = spv::OpAtomicSMax;
4796 break;
4797 case llvm::AtomicRMWInst::UMin:
4798 opcode = spv::OpAtomicUMin;
4799 break;
4800 case llvm::AtomicRMWInst::UMax:
4801 opcode = spv::OpAtomicUMax;
4802 break;
4803 case llvm::AtomicRMWInst::And:
4804 opcode = spv::OpAtomicAnd;
4805 break;
4806 case llvm::AtomicRMWInst::Or:
4807 opcode = spv::OpAtomicOr;
4808 break;
4809 case llvm::AtomicRMWInst::Xor:
4810 opcode = spv::OpAtomicXor;
4811 break;
4812 }
4813
4814 //
4815 // Generate OpAtomic*.
4816 //
4817 SPIRVOperandList Ops;
4818
David Neto257c3892018-04-11 13:19:45 -04004819 Ops << MkId(lookupType(I.getType()))
4820 << MkId(VMap[AtomicRMW->getPointerOperand()]);
Neil Henning39672102017-09-29 14:33:13 +01004821
4822 auto IntTy = Type::getInt32Ty(I.getContext());
Neil Henning39672102017-09-29 14:33:13 +01004823 const auto ConstantScopeDevice = ConstantInt::get(IntTy, spv::ScopeDevice);
David Neto257c3892018-04-11 13:19:45 -04004824 Ops << MkId(VMap[ConstantScopeDevice]);
Neil Henning39672102017-09-29 14:33:13 +01004825
4826 const auto ConstantMemorySemantics = ConstantInt::get(
4827 IntTy, spv::MemorySemanticsUniformMemoryMask |
4828 spv::MemorySemanticsSequentiallyConsistentMask);
David Neto257c3892018-04-11 13:19:45 -04004829 Ops << MkId(VMap[ConstantMemorySemantics]);
Neil Henning39672102017-09-29 14:33:13 +01004830
David Neto257c3892018-04-11 13:19:45 -04004831 Ops << MkId(VMap[AtomicRMW->getValOperand()]);
Neil Henning39672102017-09-29 14:33:13 +01004832
4833 VMap[&I] = nextID;
4834
David Neto87846742018-04-11 17:36:22 -04004835 auto *Inst = new SPIRVInstruction(opcode, nextID++, Ops);
Neil Henning39672102017-09-29 14:33:13 +01004836 SPIRVInstList.push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04004837 break;
4838 }
4839 case Instruction::Fence: {
4840 I.print(errs());
4841 llvm_unreachable("Unsupported instruction???");
4842 break;
4843 }
4844 case Instruction::Call: {
4845 CallInst *Call = dyn_cast<CallInst>(&I);
4846 Function *Callee = Call->getCalledFunction();
4847
Alan Baker202c8c72018-08-13 13:47:44 -04004848 if (Callee->getName().startswith(clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04004849 if (ResourceVarDeferredLoadCalls.count(Call) && Call->hasNUsesOrMore(1)) {
4850 // Generate an OpLoad
4851 SPIRVOperandList Ops;
4852 const auto load_id = nextID++;
David Neto22f144c2017-06-12 14:26:21 -04004853
David Neto862b7d82018-06-14 18:48:37 -04004854 Ops << MkId(lookupType(Call->getType()->getPointerElementType()))
4855 << MkId(ResourceVarDeferredLoadCalls[Call]);
4856
4857 auto *Inst = new SPIRVInstruction(spv::OpLoad, load_id, Ops);
4858 SPIRVInstList.push_back(Inst);
4859 VMap[Call] = load_id;
4860 break;
4861
4862 } else {
4863 // This maps to an OpVariable we've already generated.
4864 // No code is generated for the call.
4865 }
4866 break;
alan-bakerb6b09dc2018-11-08 16:59:28 -05004867 } else if (Callee->getName().startswith(
4868 clspv::WorkgroupAccessorFunction())) {
Alan Baker202c8c72018-08-13 13:47:44 -04004869 // Don't codegen an instruction here, but instead map this call directly
4870 // to the workgroup variable id.
alan-bakerb6b09dc2018-11-08 16:59:28 -05004871 int spec_id = static_cast<int>(
4872 cast<ConstantInt>(Call->getOperand(0))->getSExtValue());
Alan Baker202c8c72018-08-13 13:47:44 -04004873 const auto &info = LocalSpecIdInfoMap[spec_id];
4874 VMap[Call] = info.variable_id;
4875 break;
David Neto862b7d82018-06-14 18:48:37 -04004876 }
4877
4878 // Sampler initializers become a load of the corresponding sampler.
4879
Kévin Petitdf71de32019-04-09 14:09:50 +01004880 if (Callee->getName().equals(clspv::LiteralSamplerFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04004881 // Map this to a load from the variable.
alan-baker09cb9802019-12-10 13:16:27 -05004882 const auto third_param = static_cast<unsigned>(
4883 dyn_cast<ConstantInt>(Call->getArgOperand(2))->getZExtValue());
4884 auto sampler_value = third_param;
4885 if (clspv::Option::UseSamplerMap()) {
4886 sampler_value = getSamplerMap()[third_param].first;
4887 }
David Neto862b7d82018-06-14 18:48:37 -04004888
4889 // Generate an OpLoad
David Neto22f144c2017-06-12 14:26:21 -04004890 SPIRVOperandList Ops;
David Neto862b7d82018-06-14 18:48:37 -04004891 const auto load_id = nextID++;
David Neto22f144c2017-06-12 14:26:21 -04004892
David Neto257c3892018-04-11 13:19:45 -04004893 Ops << MkId(lookupType(SamplerTy->getPointerElementType()))
alan-baker09cb9802019-12-10 13:16:27 -05004894 << MkId(SamplerLiteralToIDMap[sampler_value]);
David Neto22f144c2017-06-12 14:26:21 -04004895
David Neto862b7d82018-06-14 18:48:37 -04004896 auto *Inst = new SPIRVInstruction(spv::OpLoad, load_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004897 SPIRVInstList.push_back(Inst);
David Neto862b7d82018-06-14 18:48:37 -04004898 VMap[Call] = load_id;
David Neto22f144c2017-06-12 14:26:21 -04004899 break;
4900 }
4901
Kévin Petit349c9502019-03-28 17:24:14 +00004902 // Handle SPIR-V intrinsics
Kévin Petit9b340262019-06-19 18:31:11 +01004903 spv::Op opcode = StringSwitch<spv::Op>(Callee->getName())
4904 .Case("spirv.atomic_xor", spv::OpAtomicXor)
4905 .Default(spv::OpNop);
David Neto22f144c2017-06-12 14:26:21 -04004906
Kévin Petit617a76d2019-04-04 13:54:16 +01004907 // If the switch above didn't have an entry maybe the intrinsic
4908 // is using the name mangling logic.
4909 bool usesMangler = false;
4910 if (opcode == spv::OpNop) {
4911 if (Callee->getName().startswith(clspv::SPIRVOpIntrinsicFunction())) {
4912 auto OpCst = cast<ConstantInt>(Call->getOperand(0));
4913 opcode = static_cast<spv::Op>(OpCst->getZExtValue());
4914 usesMangler = true;
4915 }
4916 }
4917
Kévin Petit349c9502019-03-28 17:24:14 +00004918 if (opcode != spv::OpNop) {
4919
David Neto22f144c2017-06-12 14:26:21 -04004920 SPIRVOperandList Ops;
4921
Kévin Petit349c9502019-03-28 17:24:14 +00004922 if (!I.getType()->isVoidTy()) {
4923 Ops << MkId(lookupType(I.getType()));
4924 }
David Neto22f144c2017-06-12 14:26:21 -04004925
Kévin Petit617a76d2019-04-04 13:54:16 +01004926 unsigned firstOperand = usesMangler ? 1 : 0;
4927 for (unsigned i = firstOperand; i < Call->getNumArgOperands(); i++) {
David Neto257c3892018-04-11 13:19:45 -04004928 Ops << MkId(VMap[Call->getArgOperand(i)]);
David Neto22f144c2017-06-12 14:26:21 -04004929 }
4930
Kévin Petit349c9502019-03-28 17:24:14 +00004931 if (!I.getType()->isVoidTy()) {
4932 VMap[&I] = nextID;
Kévin Petit8a560882019-03-21 15:24:34 +00004933 }
4934
Kévin Petit349c9502019-03-28 17:24:14 +00004935 SPIRVInstruction *Inst;
4936 if (!I.getType()->isVoidTy()) {
4937 Inst = new SPIRVInstruction(opcode, nextID++, Ops);
4938 } else {
4939 Inst = new SPIRVInstruction(opcode, Ops);
4940 }
Kévin Petit8a560882019-03-21 15:24:34 +00004941 SPIRVInstList.push_back(Inst);
4942 break;
4943 }
4944
David Neto22f144c2017-06-12 14:26:21 -04004945 // spirv.copy_memory.* intrinsics become OpMemoryMemory's.
4946 if (Callee->getName().startswith("spirv.copy_memory")) {
4947 //
4948 // Generate OpCopyMemory.
4949 //
4950
4951 // Ops[0] = Dst ID
4952 // Ops[1] = Src ID
4953 // Ops[2] = Memory Access
4954 // Ops[3] = Alignment
4955
4956 auto IsVolatile =
4957 dyn_cast<ConstantInt>(Call->getArgOperand(3))->getZExtValue() != 0;
4958
4959 auto VolatileMemoryAccess = (IsVolatile) ? spv::MemoryAccessVolatileMask
4960 : spv::MemoryAccessMaskNone;
4961
4962 auto MemoryAccess = VolatileMemoryAccess | spv::MemoryAccessAlignedMask;
4963
4964 auto Alignment =
4965 dyn_cast<ConstantInt>(Call->getArgOperand(2))->getZExtValue();
4966
David Neto257c3892018-04-11 13:19:45 -04004967 SPIRVOperandList Ops;
4968 Ops << MkId(VMap[Call->getArgOperand(0)])
4969 << MkId(VMap[Call->getArgOperand(1)]) << MkNum(MemoryAccess)
4970 << MkNum(static_cast<uint32_t>(Alignment));
David Neto22f144c2017-06-12 14:26:21 -04004971
David Neto87846742018-04-11 17:36:22 -04004972 auto *Inst = new SPIRVInstruction(spv::OpCopyMemory, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004973
4974 SPIRVInstList.push_back(Inst);
4975
4976 break;
4977 }
4978
SJW2c317da2020-03-23 07:39:13 -05004979 // read_image is converted to OpSampledImage and OpImageSampleExplicitLod.
4980 // Additionally, OpTypeSampledImage is generated.
SJW173c7e92020-03-16 08:44:47 -05004981 if (IsSampledImageRead(Callee)) {
David Neto22f144c2017-06-12 14:26:21 -04004982 //
4983 // Generate OpSampledImage.
4984 //
4985 // Ops[0] = Result Type ID
4986 // Ops[1] = Image ID
4987 // Ops[2] = Sampler ID
4988 //
4989 SPIRVOperandList Ops;
4990
4991 Value *Image = Call->getArgOperand(0);
4992 Value *Sampler = Call->getArgOperand(1);
4993 Value *Coordinate = Call->getArgOperand(2);
4994
4995 TypeMapType &OpImageTypeMap = getImageTypeMap();
4996 Type *ImageTy = Image->getType()->getPointerElementType();
4997 uint32_t ImageTyID = OpImageTypeMap[ImageTy];
David Neto22f144c2017-06-12 14:26:21 -04004998 uint32_t ImageID = VMap[Image];
David Neto22f144c2017-06-12 14:26:21 -04004999 uint32_t SamplerID = VMap[Sampler];
David Neto257c3892018-04-11 13:19:45 -04005000
5001 Ops << MkId(ImageTyID) << MkId(ImageID) << MkId(SamplerID);
David Neto22f144c2017-06-12 14:26:21 -04005002
5003 uint32_t SampledImageID = nextID;
5004
David Neto87846742018-04-11 17:36:22 -04005005 auto *Inst = new SPIRVInstruction(spv::OpSampledImage, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04005006 SPIRVInstList.push_back(Inst);
5007
5008 //
5009 // Generate OpImageSampleExplicitLod.
5010 //
5011 // Ops[0] = Result Type ID
5012 // Ops[1] = Sampled Image ID
5013 // Ops[2] = Coordinate ID
5014 // Ops[3] = Image Operands Type ID
5015 // Ops[4] ... Ops[n] = Operands ID
5016 //
5017 Ops.clear();
5018
alan-bakerf67468c2019-11-25 15:51:49 -05005019 const bool is_int_image = IsIntImageType(Image->getType());
5020 uint32_t result_type = 0;
5021 if (is_int_image) {
5022 result_type = v4int32ID;
5023 } else {
5024 result_type = lookupType(Call->getType());
5025 }
5026
5027 Ops << MkId(result_type) << MkId(SampledImageID) << MkId(VMap[Coordinate])
5028 << MkNum(spv::ImageOperandsLodMask);
David Neto22f144c2017-06-12 14:26:21 -04005029
5030 Constant *CstFP0 = ConstantFP::get(Context, APFloat(0.0f));
David Neto257c3892018-04-11 13:19:45 -04005031 Ops << MkId(VMap[CstFP0]);
David Neto22f144c2017-06-12 14:26:21 -04005032
alan-bakerf67468c2019-11-25 15:51:49 -05005033 uint32_t final_id = nextID++;
5034 VMap[&I] = final_id;
David Neto22f144c2017-06-12 14:26:21 -04005035
alan-bakerf67468c2019-11-25 15:51:49 -05005036 uint32_t image_id = final_id;
5037 if (is_int_image) {
5038 // Int image requires a bitcast from v4int to v4uint.
5039 image_id = nextID++;
5040 }
5041
5042 Inst = new SPIRVInstruction(spv::OpImageSampleExplicitLod, image_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04005043 SPIRVInstList.push_back(Inst);
alan-bakerf67468c2019-11-25 15:51:49 -05005044
5045 if (is_int_image) {
5046 // Generate the bitcast.
5047 Ops.clear();
5048 Ops << MkId(lookupType(Call->getType())) << MkId(image_id);
5049 Inst = new SPIRVInstruction(spv::OpBitcast, final_id, Ops);
5050 SPIRVInstList.push_back(Inst);
5051 }
David Neto22f144c2017-06-12 14:26:21 -04005052 break;
5053 }
5054
alan-baker75090e42020-02-20 11:21:04 -05005055 // read_image (without a sampler) is mapped to OpImageFetch.
SJW173c7e92020-03-16 08:44:47 -05005056 if (IsUnsampledImageRead(Callee)) {
alan-baker75090e42020-02-20 11:21:04 -05005057 Value *Image = Call->getArgOperand(0);
5058 Value *Coordinate = Call->getArgOperand(1);
5059
5060 //
5061 // Generate OpImageFetch
5062 //
5063 // Ops[0] = Result Type ID
5064 // Ops[1] = Image ID
5065 // Ops[2] = Coordinate ID
5066 // Ops[3] = Lod
5067 // Ops[4] = 0
5068 //
5069 SPIRVOperandList Ops;
5070
5071 const bool is_int_image = IsIntImageType(Image->getType());
5072 uint32_t result_type = 0;
5073 if (is_int_image) {
5074 result_type = v4int32ID;
5075 } else {
5076 result_type = lookupType(Call->getType());
5077 }
5078
5079 Ops << MkId(result_type) << MkId(VMap[Image]) << MkId(VMap[Coordinate])
5080 << MkNum(spv::ImageOperandsLodMask);
5081
5082 Constant *CstInt0 = ConstantInt::get(Context, APInt(32, 0));
5083 Ops << MkId(VMap[CstInt0]);
5084
5085 uint32_t final_id = nextID++;
5086 VMap[&I] = final_id;
5087
5088 uint32_t image_id = final_id;
5089 if (is_int_image) {
5090 // Int image requires a bitcast from v4int to v4uint.
5091 image_id = nextID++;
5092 }
5093
5094 auto *Inst = new SPIRVInstruction(spv::OpImageFetch, image_id, Ops);
5095 SPIRVInstList.push_back(Inst);
5096
5097 if (is_int_image) {
5098 // Generate the bitcast.
5099 Ops.clear();
5100 Ops << MkId(lookupType(Call->getType())) << MkId(image_id);
5101 Inst = new SPIRVInstruction(spv::OpBitcast, final_id, Ops);
5102 SPIRVInstList.push_back(Inst);
5103 }
5104 break;
5105 }
5106
alan-bakerf67468c2019-11-25 15:51:49 -05005107 // write_image is mapped to OpImageWrite.
SJW173c7e92020-03-16 08:44:47 -05005108 if (IsImageWrite(Callee)) {
David Neto22f144c2017-06-12 14:26:21 -04005109 //
5110 // Generate OpImageWrite.
5111 //
5112 // Ops[0] = Image ID
5113 // Ops[1] = Coordinate ID
5114 // Ops[2] = Texel ID
5115 // Ops[3] = (Optional) Image Operands Type (Literal Number)
5116 // Ops[4] ... Ops[n] = (Optional) Operands ID
5117 //
5118 SPIRVOperandList Ops;
5119
5120 Value *Image = Call->getArgOperand(0);
5121 Value *Coordinate = Call->getArgOperand(1);
5122 Value *Texel = Call->getArgOperand(2);
5123
5124 uint32_t ImageID = VMap[Image];
David Neto22f144c2017-06-12 14:26:21 -04005125 uint32_t CoordinateID = VMap[Coordinate];
David Neto22f144c2017-06-12 14:26:21 -04005126 uint32_t TexelID = VMap[Texel];
alan-bakerf67468c2019-11-25 15:51:49 -05005127
5128 const bool is_int_image = IsIntImageType(Image->getType());
5129 if (is_int_image) {
5130 // Generate a bitcast to v4int and use it as the texel value.
5131 uint32_t castID = nextID++;
5132 Ops << MkId(v4int32ID) << MkId(TexelID);
5133 auto cast = new SPIRVInstruction(spv::OpBitcast, castID, Ops);
5134 SPIRVInstList.push_back(cast);
5135 Ops.clear();
5136 TexelID = castID;
5137 }
David Neto257c3892018-04-11 13:19:45 -04005138 Ops << MkId(ImageID) << MkId(CoordinateID) << MkId(TexelID);
David Neto22f144c2017-06-12 14:26:21 -04005139
David Neto87846742018-04-11 17:36:22 -04005140 auto *Inst = new SPIRVInstruction(spv::OpImageWrite, Ops);
David Neto22f144c2017-06-12 14:26:21 -04005141 SPIRVInstList.push_back(Inst);
5142 break;
5143 }
5144
alan-bakerce179f12019-12-06 19:02:22 -05005145 // get_image_* is mapped to OpImageQuerySize or OpImageQuerySizeLod
SJW173c7e92020-03-16 08:44:47 -05005146 if (IsImageQuery(Callee)) {
David Neto5c22a252018-03-15 16:07:41 -04005147 //
alan-bakerce179f12019-12-06 19:02:22 -05005148 // Generate OpImageQuerySize[Lod]
David Neto5c22a252018-03-15 16:07:41 -04005149 //
5150 // Ops[0] = Image ID
5151 //
alan-bakerce179f12019-12-06 19:02:22 -05005152 // Result type has components equal to the dimensionality of the image,
5153 // plus 1 if the image is arrayed.
5154 //
alan-bakerf906d2b2019-12-10 11:26:23 -05005155 // %sizes = OpImageQuerySize[Lod] %uint[2|3|4] %im [%uint_0]
David Neto5c22a252018-03-15 16:07:41 -04005156 SPIRVOperandList Ops;
5157
5158 // Implement:
alan-bakerce179f12019-12-06 19:02:22 -05005159 // %sizes = OpImageQuerySize[Lod] %uint[2|3|4] %im [%uint_0]
5160 uint32_t SizesTypeID = 0;
5161
David Neto5c22a252018-03-15 16:07:41 -04005162 Value *Image = Call->getArgOperand(0);
alan-bakerce179f12019-12-06 19:02:22 -05005163 const uint32_t dim = ImageDimensionality(Image->getType());
alan-baker7150a1d2020-02-25 08:31:06 -05005164 const uint32_t components =
5165 dim + (IsArrayImageType(Image->getType()) ? 1 : 0);
alan-bakerce179f12019-12-06 19:02:22 -05005166 if (components == 1) {
alan-bakerce179f12019-12-06 19:02:22 -05005167 SizesTypeID = TypeMap[Type::getInt32Ty(Context)];
5168 } else {
alan-baker7150a1d2020-02-25 08:31:06 -05005169 SizesTypeID =
5170 TypeMap[VectorType::get(Type::getInt32Ty(Context), components)];
alan-bakerce179f12019-12-06 19:02:22 -05005171 }
David Neto5c22a252018-03-15 16:07:41 -04005172 uint32_t ImageID = VMap[Image];
David Neto257c3892018-04-11 13:19:45 -04005173 Ops << MkId(SizesTypeID) << MkId(ImageID);
alan-bakerce179f12019-12-06 19:02:22 -05005174 spv::Op query_opcode = spv::OpImageQuerySize;
SJW173c7e92020-03-16 08:44:47 -05005175 if (IsSampledImageType(Image->getType())) {
alan-bakerce179f12019-12-06 19:02:22 -05005176 query_opcode = spv::OpImageQuerySizeLod;
5177 // Need explicit 0 for Lod operand.
5178 Constant *CstInt0 = ConstantInt::get(Context, APInt(32, 0));
5179 Ops << MkId(VMap[CstInt0]);
5180 }
David Neto5c22a252018-03-15 16:07:41 -04005181
5182 uint32_t SizesID = nextID++;
alan-bakerce179f12019-12-06 19:02:22 -05005183 auto *QueryInst = new SPIRVInstruction(query_opcode, SizesID, Ops);
David Neto5c22a252018-03-15 16:07:41 -04005184 SPIRVInstList.push_back(QueryInst);
5185
alan-bakerce179f12019-12-06 19:02:22 -05005186 // May require an extra instruction to create the appropriate result of
5187 // the builtin function.
SJW173c7e92020-03-16 08:44:47 -05005188 if (IsGetImageDim(Callee)) {
alan-bakerce179f12019-12-06 19:02:22 -05005189 if (dim == 3) {
5190 // get_image_dim returns an int4 for 3D images.
5191 //
5192 // Reset value map entry since we generated an intermediate
5193 // instruction.
5194 VMap[&I] = nextID;
David Neto5c22a252018-03-15 16:07:41 -04005195
alan-bakerce179f12019-12-06 19:02:22 -05005196 // Implement:
5197 // %result = OpCompositeConstruct %uint4 %sizes %uint_0
5198 Ops.clear();
5199 Ops << MkId(lookupType(VectorType::get(Type::getInt32Ty(Context), 4)))
5200 << MkId(SizesID);
David Neto5c22a252018-03-15 16:07:41 -04005201
alan-bakerce179f12019-12-06 19:02:22 -05005202 Constant *CstInt0 = ConstantInt::get(Context, APInt(32, 0));
5203 Ops << MkId(VMap[CstInt0]);
David Neto5c22a252018-03-15 16:07:41 -04005204
alan-bakerce179f12019-12-06 19:02:22 -05005205 auto *Inst =
5206 new SPIRVInstruction(spv::OpCompositeConstruct, nextID++, Ops);
5207 SPIRVInstList.push_back(Inst);
5208 } else if (dim != components) {
5209 // get_image_dim return an int2 regardless of the arrayedness of the
5210 // image. If the image is arrayed an element must be dropped from the
5211 // query result.
5212 //
5213 // Reset value map entry since we generated an intermediate
5214 // instruction.
5215 VMap[&I] = nextID;
5216
5217 // Implement:
5218 // %result = OpVectorShuffle %uint2 %sizes %sizes 0 1
5219 Ops.clear();
5220 Ops << MkId(lookupType(VectorType::get(Type::getInt32Ty(Context), 2)))
5221 << MkId(SizesID) << MkId(SizesID) << MkNum(0) << MkNum(1);
5222
5223 auto *Inst =
5224 new SPIRVInstruction(spv::OpVectorShuffle, nextID++, Ops);
5225 SPIRVInstList.push_back(Inst);
5226 }
5227 } else if (components > 1) {
5228 // Reset value map entry since we generated an intermediate instruction.
5229 VMap[&I] = nextID;
5230
5231 // Implement:
5232 // %result = OpCompositeExtract %uint %sizes <component number>
5233 Ops.clear();
5234 Ops << MkId(TypeMap[I.getType()]) << MkId(SizesID);
5235
5236 uint32_t component = 0;
5237 if (IsGetImageHeight(Callee))
5238 component = 1;
5239 else if (IsGetImageDepth(Callee))
5240 component = 2;
5241 Ops << MkNum(component);
5242
5243 auto *Inst =
5244 new SPIRVInstruction(spv::OpCompositeExtract, nextID++, Ops);
5245 SPIRVInstList.push_back(Inst);
5246 }
David Neto5c22a252018-03-15 16:07:41 -04005247 break;
5248 }
5249
David Neto22f144c2017-06-12 14:26:21 -04005250 // Call instrucion is deferred because it needs function's ID. Record
5251 // slot's location on SPIRVInstructionList.
5252 DeferredInsts.push_back(
5253 std::make_tuple(&I, --SPIRVInstList.end(), nextID++));
5254
David Neto3fbb4072017-10-16 11:28:14 -04005255 // Check whether the implementation of this call uses an extended
5256 // instruction plus one more value-producing instruction. If so, then
5257 // reserve the id for the extra value-producing slot.
5258 glsl::ExtInst EInst = getIndirectExtInstEnum(Callee->getName());
5259 if (EInst != kGlslExtInstBad) {
5260 // Reserve a spot for the extra value.
David Neto4d02a532017-09-17 12:57:44 -04005261 // Increase nextID.
David Neto22f144c2017-06-12 14:26:21 -04005262 VMap[&I] = nextID;
5263 nextID++;
5264 }
5265 break;
5266 }
5267 case Instruction::Ret: {
5268 unsigned NumOps = I.getNumOperands();
5269 if (NumOps == 0) {
5270 //
5271 // Generate OpReturn.
5272 //
David Netoef5ba2b2019-12-20 08:35:54 -05005273 SPIRVInstList.push_back(new SPIRVInstruction(spv::OpReturn));
David Neto22f144c2017-06-12 14:26:21 -04005274 } else {
5275 //
5276 // Generate OpReturnValue.
5277 //
5278
5279 // Ops[0] = Return Value ID
5280 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04005281
5282 Ops << MkId(VMap[I.getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04005283
David Neto87846742018-04-11 17:36:22 -04005284 auto *Inst = new SPIRVInstruction(spv::OpReturnValue, Ops);
David Neto22f144c2017-06-12 14:26:21 -04005285 SPIRVInstList.push_back(Inst);
5286 break;
5287 }
5288 break;
5289 }
5290 }
5291}
5292
5293void SPIRVProducerPass::GenerateFuncEpilogue() {
5294 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
5295
5296 //
5297 // Generate OpFunctionEnd
5298 //
5299
David Netoef5ba2b2019-12-20 08:35:54 -05005300 auto *Inst = new SPIRVInstruction(spv::OpFunctionEnd);
David Neto22f144c2017-06-12 14:26:21 -04005301 SPIRVInstList.push_back(Inst);
5302}
5303
5304bool SPIRVProducerPass::is4xi8vec(Type *Ty) const {
alan-bakerb39c8262019-03-08 14:03:37 -05005305 // Don't specialize <4 x i8> if i8 is generally supported.
5306 if (clspv::Option::Int8Support())
5307 return false;
5308
David Neto22f144c2017-06-12 14:26:21 -04005309 LLVMContext &Context = Ty->getContext();
5310 if (Ty->isVectorTy()) {
5311 if (Ty->getVectorElementType() == Type::getInt8Ty(Context) &&
5312 Ty->getVectorNumElements() == 4) {
5313 return true;
5314 }
5315 }
5316
5317 return false;
5318}
5319
5320void SPIRVProducerPass::HandleDeferredInstruction() {
5321 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
5322 ValueMapType &VMap = getValueMap();
5323 DeferredInstVecType &DeferredInsts = getDeferredInstVec();
5324
5325 for (auto DeferredInst = DeferredInsts.rbegin();
5326 DeferredInst != DeferredInsts.rend(); ++DeferredInst) {
5327 Value *Inst = std::get<0>(*DeferredInst);
5328 SPIRVInstructionList::iterator InsertPoint = ++std::get<1>(*DeferredInst);
5329 if (InsertPoint != SPIRVInstList.end()) {
5330 while ((*InsertPoint)->getOpcode() == spv::OpPhi) {
5331 ++InsertPoint;
5332 }
5333 }
5334
5335 if (BranchInst *Br = dyn_cast<BranchInst>(Inst)) {
alan-baker06cad652019-12-03 17:56:47 -05005336 // Check whether this branch needs to be preceeded by merge instruction.
David Neto22f144c2017-06-12 14:26:21 -04005337 BasicBlock *BrBB = Br->getParent();
alan-baker06cad652019-12-03 17:56:47 -05005338 if (ContinueBlocks.count(BrBB)) {
David Neto22f144c2017-06-12 14:26:21 -04005339 //
5340 // Generate OpLoopMerge.
5341 //
5342 // Ops[0] = Merge Block ID
5343 // Ops[1] = Continue Target ID
5344 // Ops[2] = Selection Control
5345 SPIRVOperandList Ops;
5346
alan-baker06cad652019-12-03 17:56:47 -05005347 auto MergeBB = MergeBlocks[BrBB];
5348 auto ContinueBB = ContinueBlocks[BrBB];
David Neto22f144c2017-06-12 14:26:21 -04005349 uint32_t MergeBBID = VMap[MergeBB];
David Neto22f144c2017-06-12 14:26:21 -04005350 uint32_t ContinueBBID = VMap[ContinueBB];
David Neto257c3892018-04-11 13:19:45 -04005351 Ops << MkId(MergeBBID) << MkId(ContinueBBID)
alan-baker06cad652019-12-03 17:56:47 -05005352 << MkNum(spv::LoopControlMaskNone);
David Neto22f144c2017-06-12 14:26:21 -04005353
David Neto87846742018-04-11 17:36:22 -04005354 auto *MergeInst = new SPIRVInstruction(spv::OpLoopMerge, Ops);
David Neto22f144c2017-06-12 14:26:21 -04005355 SPIRVInstList.insert(InsertPoint, MergeInst);
alan-baker06cad652019-12-03 17:56:47 -05005356 } else if (MergeBlocks.count(BrBB)) {
5357 //
5358 // Generate OpSelectionMerge.
5359 //
5360 // Ops[0] = Merge Block ID
5361 // Ops[1] = Selection Control
5362 SPIRVOperandList Ops;
David Neto22f144c2017-06-12 14:26:21 -04005363
alan-baker06cad652019-12-03 17:56:47 -05005364 auto MergeBB = MergeBlocks[BrBB];
5365 uint32_t MergeBBID = VMap[MergeBB];
5366 Ops << MkId(MergeBBID) << MkNum(spv::SelectionControlMaskNone);
David Neto22f144c2017-06-12 14:26:21 -04005367
alan-baker06cad652019-12-03 17:56:47 -05005368 auto *MergeInst = new SPIRVInstruction(spv::OpSelectionMerge, Ops);
5369 SPIRVInstList.insert(InsertPoint, MergeInst);
David Neto22f144c2017-06-12 14:26:21 -04005370 }
5371
5372 if (Br->isConditional()) {
5373 //
5374 // Generate OpBranchConditional.
5375 //
5376 // Ops[0] = Condition ID
5377 // Ops[1] = True Label ID
5378 // Ops[2] = False Label ID
5379 // Ops[3] ... Ops[n] = Branch weights (Literal Number)
5380 SPIRVOperandList Ops;
5381
5382 uint32_t CondID = VMap[Br->getCondition()];
David Neto22f144c2017-06-12 14:26:21 -04005383 uint32_t TrueBBID = VMap[Br->getSuccessor(0)];
David Neto22f144c2017-06-12 14:26:21 -04005384 uint32_t FalseBBID = VMap[Br->getSuccessor(1)];
David Neto257c3892018-04-11 13:19:45 -04005385
5386 Ops << MkId(CondID) << MkId(TrueBBID) << MkId(FalseBBID);
David Neto22f144c2017-06-12 14:26:21 -04005387
David Neto87846742018-04-11 17:36:22 -04005388 auto *BrInst = new SPIRVInstruction(spv::OpBranchConditional, Ops);
David Neto22f144c2017-06-12 14:26:21 -04005389 SPIRVInstList.insert(InsertPoint, BrInst);
5390 } else {
5391 //
5392 // Generate OpBranch.
5393 //
5394 // Ops[0] = Target Label ID
5395 SPIRVOperandList Ops;
5396
5397 uint32_t TargetID = VMap[Br->getSuccessor(0)];
David Neto257c3892018-04-11 13:19:45 -04005398 Ops << MkId(TargetID);
David Neto22f144c2017-06-12 14:26:21 -04005399
David Neto87846742018-04-11 17:36:22 -04005400 SPIRVInstList.insert(InsertPoint,
5401 new SPIRVInstruction(spv::OpBranch, Ops));
David Neto22f144c2017-06-12 14:26:21 -04005402 }
5403 } else if (PHINode *PHI = dyn_cast<PHINode>(Inst)) {
alan-baker5b86ed72019-02-15 08:26:50 -05005404 if (PHI->getType()->isPointerTy()) {
5405 // OpPhi on pointers requires variable pointers.
5406 setVariablePointersCapabilities(
5407 PHI->getType()->getPointerAddressSpace());
5408 if (!hasVariablePointers() && !selectFromSameObject(PHI)) {
5409 setVariablePointers(true);
5410 }
5411 }
5412
David Neto22f144c2017-06-12 14:26:21 -04005413 //
5414 // Generate OpPhi.
5415 //
5416 // Ops[0] = Result Type ID
5417 // Ops[1] ... Ops[n] = (Variable ID, Parent ID) pairs
5418 SPIRVOperandList Ops;
5419
David Neto257c3892018-04-11 13:19:45 -04005420 Ops << MkId(lookupType(PHI->getType()));
David Neto22f144c2017-06-12 14:26:21 -04005421
David Neto22f144c2017-06-12 14:26:21 -04005422 for (unsigned i = 0; i < PHI->getNumIncomingValues(); i++) {
5423 uint32_t VarID = VMap[PHI->getIncomingValue(i)];
David Neto22f144c2017-06-12 14:26:21 -04005424 uint32_t ParentID = VMap[PHI->getIncomingBlock(i)];
David Neto257c3892018-04-11 13:19:45 -04005425 Ops << MkId(VarID) << MkId(ParentID);
David Neto22f144c2017-06-12 14:26:21 -04005426 }
5427
5428 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005429 InsertPoint,
5430 new SPIRVInstruction(spv::OpPhi, std::get<2>(*DeferredInst), Ops));
David Neto22f144c2017-06-12 14:26:21 -04005431 } else if (CallInst *Call = dyn_cast<CallInst>(Inst)) {
5432 Function *Callee = Call->getCalledFunction();
SJW2c317da2020-03-23 07:39:13 -05005433 LLVMContext &Context = Callee->getContext();
5434 auto IntTy = Type::getInt32Ty(Context);
5435 auto callee_code = Builtins::Lookup(Callee);
David Neto3fbb4072017-10-16 11:28:14 -04005436 auto callee_name = Callee->getName();
5437 glsl::ExtInst EInst = getDirectOrIndirectExtInstEnum(callee_name);
David Neto22f144c2017-06-12 14:26:21 -04005438
5439 if (EInst) {
5440 uint32_t &ExtInstImportID = getOpExtInstImportID();
5441
5442 //
5443 // Generate OpExtInst.
5444 //
5445
5446 // Ops[0] = Result Type ID
5447 // Ops[1] = Set ID (OpExtInstImport ID)
5448 // Ops[2] = Instruction Number (Literal Number)
5449 // Ops[3] ... Ops[n] = Operand 1, ... , Operand n
5450 SPIRVOperandList Ops;
5451
David Neto862b7d82018-06-14 18:48:37 -04005452 Ops << MkId(lookupType(Call->getType())) << MkId(ExtInstImportID)
5453 << MkNum(EInst);
David Neto22f144c2017-06-12 14:26:21 -04005454
David Neto22f144c2017-06-12 14:26:21 -04005455 FunctionType *CalleeFTy = cast<FunctionType>(Call->getFunctionType());
5456 for (unsigned i = 0; i < CalleeFTy->getNumParams(); i++) {
David Neto257c3892018-04-11 13:19:45 -04005457 Ops << MkId(VMap[Call->getOperand(i)]);
David Neto22f144c2017-06-12 14:26:21 -04005458 }
5459
David Neto87846742018-04-11 17:36:22 -04005460 auto *ExtInst = new SPIRVInstruction(spv::OpExtInst,
5461 std::get<2>(*DeferredInst), Ops);
David Neto22f144c2017-06-12 14:26:21 -04005462 SPIRVInstList.insert(InsertPoint, ExtInst);
5463
David Neto3fbb4072017-10-16 11:28:14 -04005464 const auto IndirectExtInst = getIndirectExtInstEnum(callee_name);
5465 if (IndirectExtInst != kGlslExtInstBad) {
5466 // Generate one more instruction that uses the result of the extended
5467 // instruction. Its result id is one more than the id of the
5468 // extended instruction.
David Neto3fbb4072017-10-16 11:28:14 -04005469 auto generate_extra_inst = [this, &Context, &Call, &DeferredInst,
5470 &VMap, &SPIRVInstList, &InsertPoint](
5471 spv::Op opcode, Constant *constant) {
5472 //
5473 // Generate instruction like:
5474 // result = opcode constant <extinst-result>
5475 //
5476 // Ops[0] = Result Type ID
5477 // Ops[1] = Operand 0 ;; the constant, suitably splatted
5478 // Ops[2] = Operand 1 ;; the result of the extended instruction
5479 SPIRVOperandList Ops;
David Neto22f144c2017-06-12 14:26:21 -04005480
David Neto3fbb4072017-10-16 11:28:14 -04005481 Type *resultTy = Call->getType();
David Neto257c3892018-04-11 13:19:45 -04005482 Ops << MkId(lookupType(resultTy));
David Neto3fbb4072017-10-16 11:28:14 -04005483
5484 if (auto *vectorTy = dyn_cast<VectorType>(resultTy)) {
5485 constant = ConstantVector::getSplat(
alan-baker7261e062020-03-15 14:35:48 -04005486 {static_cast<unsigned>(vectorTy->getNumElements()), false},
5487 constant);
David Neto3fbb4072017-10-16 11:28:14 -04005488 }
David Neto257c3892018-04-11 13:19:45 -04005489 Ops << MkId(VMap[constant]) << MkId(std::get<2>(*DeferredInst));
David Neto3fbb4072017-10-16 11:28:14 -04005490
5491 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005492 InsertPoint, new SPIRVInstruction(
5493 opcode, std::get<2>(*DeferredInst) + 1, Ops));
David Neto3fbb4072017-10-16 11:28:14 -04005494 };
5495
5496 switch (IndirectExtInst) {
5497 case glsl::ExtInstFindUMsb: // Implementing clz
SJW2c317da2020-03-23 07:39:13 -05005498 generate_extra_inst(spv::OpISub, ConstantInt::get(IntTy, 31));
David Neto3fbb4072017-10-16 11:28:14 -04005499 break;
5500 case glsl::ExtInstAcos: // Implementing acospi
5501 case glsl::ExtInstAsin: // Implementing asinpi
Kévin Petiteb9f90a2018-09-29 12:29:34 +01005502 case glsl::ExtInstAtan: // Implementing atanpi
David Neto3fbb4072017-10-16 11:28:14 -04005503 case glsl::ExtInstAtan2: // Implementing atan2pi
5504 generate_extra_inst(
5505 spv::OpFMul,
5506 ConstantFP::get(Type::getFloatTy(Context), kOneOverPi));
5507 break;
5508
5509 default:
5510 assert(false && "internally inconsistent");
David Neto4d02a532017-09-17 12:57:44 -04005511 }
David Neto22f144c2017-06-12 14:26:21 -04005512 }
David Neto3fbb4072017-10-16 11:28:14 -04005513
SJW2c317da2020-03-23 07:39:13 -05005514 } else if (callee_code == Builtins::kPopcount) {
David Neto22f144c2017-06-12 14:26:21 -04005515 //
5516 // Generate OpBitCount
5517 //
5518 // Ops[0] = Result Type ID
5519 // Ops[1] = Base ID
David Neto257c3892018-04-11 13:19:45 -04005520 SPIRVOperandList Ops;
5521 Ops << MkId(lookupType(Call->getType()))
5522 << MkId(VMap[Call->getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04005523
5524 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005525 InsertPoint, new SPIRVInstruction(spv::OpBitCount,
David Neto22f144c2017-06-12 14:26:21 -04005526 std::get<2>(*DeferredInst), Ops));
David Netoab03f432017-11-03 17:00:44 -04005527
David Neto862b7d82018-06-14 18:48:37 -04005528 } else if (callee_name.startswith(kCompositeConstructFunctionPrefix)) {
David Netoab03f432017-11-03 17:00:44 -04005529
5530 // Generate an OpCompositeConstruct
5531 SPIRVOperandList Ops;
5532
5533 // The result type.
David Neto257c3892018-04-11 13:19:45 -04005534 Ops << MkId(lookupType(Call->getType()));
David Netoab03f432017-11-03 17:00:44 -04005535
5536 for (Use &use : Call->arg_operands()) {
David Neto257c3892018-04-11 13:19:45 -04005537 Ops << MkId(VMap[use.get()]);
David Netoab03f432017-11-03 17:00:44 -04005538 }
5539
5540 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005541 InsertPoint, new SPIRVInstruction(spv::OpCompositeConstruct,
5542 std::get<2>(*DeferredInst), Ops));
David Netoab03f432017-11-03 17:00:44 -04005543
Alan Baker202c8c72018-08-13 13:47:44 -04005544 } else if (callee_name.startswith(clspv::ResourceAccessorFunction())) {
5545
5546 // We have already mapped the call's result value to an ID.
5547 // Don't generate any code now.
5548
5549 } else if (callee_name.startswith(clspv::WorkgroupAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04005550
5551 // We have already mapped the call's result value to an ID.
5552 // Don't generate any code now.
5553
David Neto22f144c2017-06-12 14:26:21 -04005554 } else {
alan-baker5b86ed72019-02-15 08:26:50 -05005555 if (Call->getType()->isPointerTy()) {
5556 // Functions returning pointers require variable pointers.
5557 setVariablePointersCapabilities(
5558 Call->getType()->getPointerAddressSpace());
5559 }
5560
David Neto22f144c2017-06-12 14:26:21 -04005561 //
5562 // Generate OpFunctionCall.
5563 //
5564
5565 // Ops[0] = Result Type ID
5566 // Ops[1] = Callee Function ID
5567 // Ops[2] ... Ops[n] = Argument 0, ... , Argument n
5568 SPIRVOperandList Ops;
5569
David Neto862b7d82018-06-14 18:48:37 -04005570 Ops << MkId(lookupType(Call->getType()));
David Neto22f144c2017-06-12 14:26:21 -04005571
5572 uint32_t CalleeID = VMap[Callee];
David Neto43568eb2017-10-13 18:25:25 -04005573 if (CalleeID == 0) {
5574 errs() << "Can't translate function call. Missing builtin? "
David Neto862b7d82018-06-14 18:48:37 -04005575 << callee_name << " in: " << *Call << "\n";
David Neto43568eb2017-10-13 18:25:25 -04005576 // TODO(dneto): Can we error out? Enabling this llvm_unreachable
5577 // causes an infinite loop. Instead, go ahead and generate
5578 // the bad function call. A validator will catch the 0-Id.
5579 // llvm_unreachable("Can't translate function call");
5580 }
David Neto22f144c2017-06-12 14:26:21 -04005581
David Neto257c3892018-04-11 13:19:45 -04005582 Ops << MkId(CalleeID);
David Neto22f144c2017-06-12 14:26:21 -04005583
David Neto22f144c2017-06-12 14:26:21 -04005584 FunctionType *CalleeFTy = cast<FunctionType>(Call->getFunctionType());
5585 for (unsigned i = 0; i < CalleeFTy->getNumParams(); i++) {
alan-baker5b86ed72019-02-15 08:26:50 -05005586 auto *operand = Call->getOperand(i);
alan-bakerd4d50652019-12-03 17:17:15 -05005587 auto *operand_type = operand->getType();
5588 // Images and samplers can be passed as function parameters without
5589 // variable pointers.
5590 if (operand_type->isPointerTy() && !IsImageType(operand_type) &&
5591 !IsSamplerType(operand_type)) {
alan-baker5b86ed72019-02-15 08:26:50 -05005592 auto sc =
5593 GetStorageClass(operand->getType()->getPointerAddressSpace());
5594 if (sc == spv::StorageClassStorageBuffer) {
5595 // Passing SSBO by reference requires variable pointers storage
5596 // buffer.
5597 setVariablePointersStorageBuffer(true);
5598 } else if (sc == spv::StorageClassWorkgroup) {
5599 // Workgroup references require variable pointers if they are not
5600 // memory object declarations.
5601 if (auto *operand_call = dyn_cast<CallInst>(operand)) {
5602 // Workgroup accessor represents a variable reference.
5603 if (!operand_call->getCalledFunction()->getName().startswith(
5604 clspv::WorkgroupAccessorFunction()))
5605 setVariablePointers(true);
5606 } else {
5607 // Arguments are function parameters.
5608 if (!isa<Argument>(operand))
5609 setVariablePointers(true);
5610 }
5611 }
5612 }
5613 Ops << MkId(VMap[operand]);
David Neto22f144c2017-06-12 14:26:21 -04005614 }
5615
David Neto87846742018-04-11 17:36:22 -04005616 auto *CallInst = new SPIRVInstruction(spv::OpFunctionCall,
5617 std::get<2>(*DeferredInst), Ops);
David Neto22f144c2017-06-12 14:26:21 -04005618 SPIRVInstList.insert(InsertPoint, CallInst);
5619 }
5620 }
5621 }
5622}
5623
David Neto1a1a0582017-07-07 12:01:44 -04005624void SPIRVProducerPass::HandleDeferredDecorations(const DataLayout &DL) {
Alan Baker202c8c72018-08-13 13:47:44 -04005625 if (getTypesNeedingArrayStride().empty() && LocalArgSpecIds.empty()) {
David Neto1a1a0582017-07-07 12:01:44 -04005626 return;
David Netoc6f3ab22018-04-06 18:02:31 -04005627 }
David Neto1a1a0582017-07-07 12:01:44 -04005628
5629 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
David Neto1a1a0582017-07-07 12:01:44 -04005630
5631 // Find an iterator pointing just past the last decoration.
5632 bool seen_decorations = false;
5633 auto DecoInsertPoint =
5634 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
5635 [&seen_decorations](SPIRVInstruction *Inst) -> bool {
5636 const bool is_decoration =
5637 Inst->getOpcode() == spv::OpDecorate ||
5638 Inst->getOpcode() == spv::OpMemberDecorate;
5639 if (is_decoration) {
5640 seen_decorations = true;
5641 return false;
5642 } else {
5643 return seen_decorations;
5644 }
5645 });
5646
David Netoc6f3ab22018-04-06 18:02:31 -04005647 // Insert ArrayStride decorations on pointer types, due to OpPtrAccessChain
5648 // instructions we generated earlier.
David Neto85082642018-03-24 06:55:20 -07005649 for (auto *type : getTypesNeedingArrayStride()) {
5650 Type *elemTy = nullptr;
5651 if (auto *ptrTy = dyn_cast<PointerType>(type)) {
5652 elemTy = ptrTy->getElementType();
alan-bakerb6b09dc2018-11-08 16:59:28 -05005653 } else if (auto *arrayTy = dyn_cast<ArrayType>(type)) {
David Neto85082642018-03-24 06:55:20 -07005654 elemTy = arrayTy->getArrayElementType();
alan-bakerb6b09dc2018-11-08 16:59:28 -05005655 } else if (auto *seqTy = dyn_cast<SequentialType>(type)) {
David Neto85082642018-03-24 06:55:20 -07005656 elemTy = seqTy->getSequentialElementType();
5657 } else {
5658 errs() << "Unhandled strided type " << *type << "\n";
5659 llvm_unreachable("Unhandled strided type");
5660 }
David Neto1a1a0582017-07-07 12:01:44 -04005661
5662 // Ops[0] = Target ID
5663 // Ops[1] = Decoration (ArrayStride)
5664 // Ops[2] = Stride number (Literal Number)
5665 SPIRVOperandList Ops;
5666
David Neto85082642018-03-24 06:55:20 -07005667 // Same as DL.getIndexedOffsetInType( elemTy, { 1 } );
Alan Bakerfcda9482018-10-02 17:09:59 -04005668 const uint32_t stride = static_cast<uint32_t>(GetTypeAllocSize(elemTy, DL));
David Neto257c3892018-04-11 13:19:45 -04005669
5670 Ops << MkId(lookupType(type)) << MkNum(spv::DecorationArrayStride)
5671 << MkNum(stride);
David Neto1a1a0582017-07-07 12:01:44 -04005672
David Neto87846742018-04-11 17:36:22 -04005673 auto *DecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
David Neto1a1a0582017-07-07 12:01:44 -04005674 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
5675 }
David Netoc6f3ab22018-04-06 18:02:31 -04005676
5677 // Emit SpecId decorations targeting the array size value.
Alan Baker202c8c72018-08-13 13:47:44 -04005678 for (auto spec_id = clspv::FirstLocalSpecId(); spec_id < max_local_spec_id_;
5679 ++spec_id) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05005680 LocalArgInfo &arg_info = LocalSpecIdInfoMap[spec_id];
David Netoc6f3ab22018-04-06 18:02:31 -04005681 SPIRVOperandList Ops;
5682 Ops << MkId(arg_info.array_size_id) << MkNum(spv::DecorationSpecId)
5683 << MkNum(arg_info.spec_id);
5684 SPIRVInstList.insert(DecoInsertPoint,
David Neto87846742018-04-11 17:36:22 -04005685 new SPIRVInstruction(spv::OpDecorate, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04005686 }
David Neto1a1a0582017-07-07 12:01:44 -04005687}
5688
David Neto22f144c2017-06-12 14:26:21 -04005689glsl::ExtInst SPIRVProducerPass::getExtInstEnum(StringRef Name) {
SJW2c317da2020-03-23 07:39:13 -05005690
5691 const auto &fi = Builtins::Lookup(Name);
5692 switch (fi) {
5693 case Builtins::kClamp: {
5694 auto param_type = fi.getParameter(0);
5695 if (param_type.type_id == Type::FloatTyID) {
5696 return glsl::ExtInst::ExtInstFClamp;
5697 }
5698 return param_type.is_signed ? glsl::ExtInst::ExtInstSClamp
5699 : glsl::ExtInst::ExtInstUClamp;
5700 }
5701 case Builtins::kMax: {
5702 auto param_type = fi.getParameter(0);
5703 if (param_type.type_id == Type::FloatTyID) {
5704 return glsl::ExtInst::ExtInstFMax;
5705 }
5706 return param_type.is_signed ? glsl::ExtInst::ExtInstSMax
5707 : glsl::ExtInst::ExtInstUMax;
5708 }
5709 case Builtins::kMin: {
5710 auto param_type = fi.getParameter(0);
5711 if (param_type.type_id == Type::FloatTyID) {
5712 return glsl::ExtInst::ExtInstFMin;
5713 }
5714 return param_type.is_signed ? glsl::ExtInst::ExtInstSMin
5715 : glsl::ExtInst::ExtInstUMin;
5716 }
5717 case Builtins::kAbs:
5718 return glsl::ExtInst::ExtInstSAbs;
5719 case Builtins::kFmax:
5720 return glsl::ExtInst::ExtInstFMax;
5721 case Builtins::kFmin:
5722 return glsl::ExtInst::ExtInstFMin;
5723 case Builtins::kDegrees:
5724 return glsl::ExtInst::ExtInstDegrees;
5725 case Builtins::kRadians:
5726 return glsl::ExtInst::ExtInstRadians;
5727 case Builtins::kMix:
5728 return glsl::ExtInst::ExtInstFMix;
5729 case Builtins::kAcos:
5730 case Builtins::kAcospi:
5731 return glsl::ExtInst::ExtInstAcos;
5732 case Builtins::kAcosh:
5733 return glsl::ExtInst::ExtInstAcosh;
5734 case Builtins::kAsin:
5735 case Builtins::kAsinpi:
5736 return glsl::ExtInst::ExtInstAsin;
5737 case Builtins::kAsinh:
5738 return glsl::ExtInst::ExtInstAsinh;
5739 case Builtins::kAtan:
5740 case Builtins::kAtanpi:
5741 return glsl::ExtInst::ExtInstAtan;
5742 case Builtins::kAtanh:
5743 return glsl::ExtInst::ExtInstAtanh;
5744 case Builtins::kAtan2:
5745 case Builtins::kAtan2pi:
5746 return glsl::ExtInst::ExtInstAtan2;
5747 case Builtins::kCeil:
5748 return glsl::ExtInst::ExtInstCeil;
5749 case Builtins::kSin:
5750 case Builtins::kHalfSin:
5751 case Builtins::kNativeSin:
5752 return glsl::ExtInst::ExtInstSin;
5753 case Builtins::kSinh:
5754 return glsl::ExtInst::ExtInstSinh;
5755 case Builtins::kCos:
5756 case Builtins::kHalfCos:
5757 case Builtins::kNativeCos:
5758 return glsl::ExtInst::ExtInstCos;
5759 case Builtins::kCosh:
5760 return glsl::ExtInst::ExtInstCosh;
5761 case Builtins::kTan:
5762 case Builtins::kHalfTan:
5763 case Builtins::kNativeTan:
5764 return glsl::ExtInst::ExtInstTan;
5765 case Builtins::kTanh:
5766 return glsl::ExtInst::ExtInstTanh;
5767 case Builtins::kExp:
5768 case Builtins::kHalfExp:
5769 case Builtins::kNativeExp:
5770 return glsl::ExtInst::ExtInstExp;
5771 case Builtins::kExp2:
5772 case Builtins::kHalfExp2:
5773 case Builtins::kNativeExp2:
5774 return glsl::ExtInst::ExtInstExp2;
5775 case Builtins::kLog:
5776 case Builtins::kHalfLog:
5777 case Builtins::kNativeLog:
5778 return glsl::ExtInst::ExtInstLog;
5779 case Builtins::kLog2:
5780 case Builtins::kHalfLog2:
5781 case Builtins::kNativeLog2:
5782 return glsl::ExtInst::ExtInstLog2;
5783 case Builtins::kFabs:
5784 return glsl::ExtInst::ExtInstFAbs;
5785 case Builtins::kFma:
5786 return glsl::ExtInst::ExtInstFma;
5787 case Builtins::kFloor:
5788 return glsl::ExtInst::ExtInstFloor;
5789 case Builtins::kLdexp:
5790 return glsl::ExtInst::ExtInstLdexp;
5791 case Builtins::kPow:
5792 case Builtins::kPowr:
5793 case Builtins::kHalfPowr:
5794 case Builtins::kNativePowr:
5795 return glsl::ExtInst::ExtInstPow;
5796 case Builtins::kRound:
5797 return glsl::ExtInst::ExtInstRound;
5798 case Builtins::kSqrt:
5799 case Builtins::kHalfSqrt:
5800 case Builtins::kNativeSqrt:
5801 return glsl::ExtInst::ExtInstSqrt;
5802 case Builtins::kRsqrt:
5803 case Builtins::kHalfRsqrt:
5804 case Builtins::kNativeRsqrt:
5805 return glsl::ExtInst::ExtInstInverseSqrt;
5806 case Builtins::kTrunc:
5807 return glsl::ExtInst::ExtInstTrunc;
5808 case Builtins::kFrexp:
5809 return glsl::ExtInst::ExtInstFrexp;
5810 case Builtins::kFract:
5811 return glsl::ExtInst::ExtInstFract;
5812 case Builtins::kSign:
5813 return glsl::ExtInst::ExtInstFSign;
5814 case Builtins::kLength:
5815 case Builtins::kFastLength:
5816 return glsl::ExtInst::ExtInstLength;
5817 case Builtins::kDistance:
5818 case Builtins::kFastDistance:
5819 return glsl::ExtInst::ExtInstDistance;
5820 case Builtins::kStep:
5821 return glsl::ExtInst::ExtInstStep;
5822 case Builtins::kSmoothstep:
5823 return glsl::ExtInst::ExtInstSmoothStep;
5824 case Builtins::kCross:
5825 return glsl::ExtInst::ExtInstCross;
5826 case Builtins::kNormalize:
5827 case Builtins::kFastNormalize:
5828 return glsl::ExtInst::ExtInstNormalize;
5829 default:
5830 break;
5831 }
5832
David Neto22f144c2017-06-12 14:26:21 -04005833 return StringSwitch<glsl::ExtInst>(Name)
David Neto22f144c2017-06-12 14:26:21 -04005834 .StartsWith("llvm.fmuladd.", glsl::ExtInst::ExtInstFma)
5835 .Case("spirv.unpack.v2f16", glsl::ExtInst::ExtInstUnpackHalf2x16)
5836 .Case("spirv.pack.v2f16", glsl::ExtInst::ExtInstPackHalf2x16)
David Neto3fbb4072017-10-16 11:28:14 -04005837 .Default(kGlslExtInstBad);
5838}
5839
5840glsl::ExtInst SPIRVProducerPass::getIndirectExtInstEnum(StringRef Name) {
SJW2c317da2020-03-23 07:39:13 -05005841 switch (Builtins::Lookup(Name)) {
5842 case Builtins::kClz:
5843 return glsl::ExtInst::ExtInstFindUMsb;
5844 case Builtins::kAcospi:
5845 return glsl::ExtInst::ExtInstAcos;
5846 case Builtins::kAsinpi:
5847 return glsl::ExtInst::ExtInstAsin;
5848 case Builtins::kAtanpi:
5849 return glsl::ExtInst::ExtInstAtan;
5850 case Builtins::kAtan2pi:
5851 return glsl::ExtInst::ExtInstAtan2;
5852 default:
5853 break;
5854 }
5855 return kGlslExtInstBad;
David Neto3fbb4072017-10-16 11:28:14 -04005856}
5857
alan-bakerb6b09dc2018-11-08 16:59:28 -05005858glsl::ExtInst
5859SPIRVProducerPass::getDirectOrIndirectExtInstEnum(StringRef Name) {
David Neto3fbb4072017-10-16 11:28:14 -04005860 auto direct = getExtInstEnum(Name);
5861 if (direct != kGlslExtInstBad)
5862 return direct;
5863 return getIndirectExtInstEnum(Name);
David Neto22f144c2017-06-12 14:26:21 -04005864}
5865
David Neto22f144c2017-06-12 14:26:21 -04005866void SPIRVProducerPass::WriteOneWord(uint32_t Word) {
David Neto0676e6f2017-07-11 18:47:44 -04005867 binaryOut->write(reinterpret_cast<const char *>(&Word), sizeof(uint32_t));
David Neto22f144c2017-06-12 14:26:21 -04005868}
5869
5870void SPIRVProducerPass::WriteResultID(SPIRVInstruction *Inst) {
5871 WriteOneWord(Inst->getResultID());
5872}
5873
5874void SPIRVProducerPass::WriteWordCountAndOpcode(SPIRVInstruction *Inst) {
5875 // High 16 bit : Word Count
5876 // Low 16 bit : Opcode
5877 uint32_t Word = Inst->getOpcode();
David Netoee2660d2018-06-28 16:31:29 -04005878 const uint32_t count = Inst->getWordCount();
5879 if (count > 65535) {
5880 errs() << "Word count limit of 65535 exceeded: " << count << "\n";
5881 llvm_unreachable("Word count too high");
5882 }
David Neto22f144c2017-06-12 14:26:21 -04005883 Word |= Inst->getWordCount() << 16;
5884 WriteOneWord(Word);
5885}
5886
David Netoef5ba2b2019-12-20 08:35:54 -05005887void SPIRVProducerPass::WriteOperand(const std::unique_ptr<SPIRVOperand> &Op) {
David Neto22f144c2017-06-12 14:26:21 -04005888 SPIRVOperandType OpTy = Op->getType();
5889 switch (OpTy) {
5890 default: {
5891 llvm_unreachable("Unsupported SPIRV Operand Type???");
5892 break;
5893 }
5894 case SPIRVOperandType::NUMBERID: {
5895 WriteOneWord(Op->getNumID());
5896 break;
5897 }
5898 case SPIRVOperandType::LITERAL_STRING: {
5899 std::string Str = Op->getLiteralStr();
5900 const char *Data = Str.c_str();
5901 size_t WordSize = Str.size() / 4;
5902 for (unsigned Idx = 0; Idx < WordSize; Idx++) {
5903 WriteOneWord(*reinterpret_cast<const uint32_t *>(&Data[4 * Idx]));
5904 }
5905
5906 uint32_t Remainder = Str.size() % 4;
5907 uint32_t LastWord = 0;
5908 if (Remainder) {
5909 for (unsigned Idx = 0; Idx < Remainder; Idx++) {
5910 LastWord |= Data[4 * WordSize + Idx] << 8 * Idx;
5911 }
5912 }
5913
5914 WriteOneWord(LastWord);
5915 break;
5916 }
5917 case SPIRVOperandType::LITERAL_INTEGER:
5918 case SPIRVOperandType::LITERAL_FLOAT: {
5919 auto LiteralNum = Op->getLiteralNum();
5920 // TODO: Handle LiteranNum carefully.
5921 for (auto Word : LiteralNum) {
5922 WriteOneWord(Word);
5923 }
5924 break;
5925 }
5926 }
5927}
5928
5929void SPIRVProducerPass::WriteSPIRVBinary() {
5930 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
5931
5932 for (auto Inst : SPIRVInstList) {
David Netoef5ba2b2019-12-20 08:35:54 -05005933 const auto &Ops = Inst->getOperands();
David Neto22f144c2017-06-12 14:26:21 -04005934 spv::Op Opcode = static_cast<spv::Op>(Inst->getOpcode());
5935
5936 switch (Opcode) {
5937 default: {
David Neto5c22a252018-03-15 16:07:41 -04005938 errs() << "Unsupported SPIR-V instruction opcode " << int(Opcode) << "\n";
David Neto22f144c2017-06-12 14:26:21 -04005939 llvm_unreachable("Unsupported SPIRV instruction");
5940 break;
5941 }
5942 case spv::OpCapability:
5943 case spv::OpExtension:
5944 case spv::OpMemoryModel:
5945 case spv::OpEntryPoint:
5946 case spv::OpExecutionMode:
5947 case spv::OpSource:
5948 case spv::OpDecorate:
5949 case spv::OpMemberDecorate:
5950 case spv::OpBranch:
5951 case spv::OpBranchConditional:
5952 case spv::OpSelectionMerge:
5953 case spv::OpLoopMerge:
5954 case spv::OpStore:
5955 case spv::OpImageWrite:
5956 case spv::OpReturnValue:
5957 case spv::OpControlBarrier:
5958 case spv::OpMemoryBarrier:
5959 case spv::OpReturn:
5960 case spv::OpFunctionEnd:
5961 case spv::OpCopyMemory: {
5962 WriteWordCountAndOpcode(Inst);
5963 for (uint32_t i = 0; i < Ops.size(); i++) {
5964 WriteOperand(Ops[i]);
5965 }
5966 break;
5967 }
5968 case spv::OpTypeBool:
5969 case spv::OpTypeVoid:
5970 case spv::OpTypeSampler:
5971 case spv::OpLabel:
5972 case spv::OpExtInstImport:
5973 case spv::OpTypePointer:
5974 case spv::OpTypeRuntimeArray:
5975 case spv::OpTypeStruct:
5976 case spv::OpTypeImage:
5977 case spv::OpTypeSampledImage:
5978 case spv::OpTypeInt:
5979 case spv::OpTypeFloat:
5980 case spv::OpTypeArray:
5981 case spv::OpTypeVector:
5982 case spv::OpTypeFunction: {
5983 WriteWordCountAndOpcode(Inst);
5984 WriteResultID(Inst);
5985 for (uint32_t i = 0; i < Ops.size(); i++) {
5986 WriteOperand(Ops[i]);
5987 }
5988 break;
5989 }
5990 case spv::OpFunction:
5991 case spv::OpFunctionParameter:
5992 case spv::OpAccessChain:
5993 case spv::OpPtrAccessChain:
5994 case spv::OpInBoundsAccessChain:
5995 case spv::OpUConvert:
5996 case spv::OpSConvert:
5997 case spv::OpConvertFToU:
5998 case spv::OpConvertFToS:
5999 case spv::OpConvertUToF:
6000 case spv::OpConvertSToF:
6001 case spv::OpFConvert:
6002 case spv::OpConvertPtrToU:
6003 case spv::OpConvertUToPtr:
6004 case spv::OpBitcast:
alan-bakerc9c55ae2019-12-02 16:01:27 -05006005 case spv::OpFNegate:
David Neto22f144c2017-06-12 14:26:21 -04006006 case spv::OpIAdd:
6007 case spv::OpFAdd:
6008 case spv::OpISub:
6009 case spv::OpFSub:
6010 case spv::OpIMul:
6011 case spv::OpFMul:
6012 case spv::OpUDiv:
6013 case spv::OpSDiv:
6014 case spv::OpFDiv:
6015 case spv::OpUMod:
6016 case spv::OpSRem:
6017 case spv::OpFRem:
Kévin Petit8a560882019-03-21 15:24:34 +00006018 case spv::OpUMulExtended:
6019 case spv::OpSMulExtended:
David Neto22f144c2017-06-12 14:26:21 -04006020 case spv::OpBitwiseOr:
6021 case spv::OpBitwiseXor:
6022 case spv::OpBitwiseAnd:
David Netoa394f392017-08-26 20:45:29 -04006023 case spv::OpNot:
David Neto22f144c2017-06-12 14:26:21 -04006024 case spv::OpShiftLeftLogical:
6025 case spv::OpShiftRightLogical:
6026 case spv::OpShiftRightArithmetic:
6027 case spv::OpBitCount:
David Netoab03f432017-11-03 17:00:44 -04006028 case spv::OpCompositeConstruct:
David Neto22f144c2017-06-12 14:26:21 -04006029 case spv::OpCompositeExtract:
6030 case spv::OpVectorExtractDynamic:
6031 case spv::OpCompositeInsert:
David Neto0a2f98d2017-09-15 19:38:40 -04006032 case spv::OpCopyObject:
David Neto22f144c2017-06-12 14:26:21 -04006033 case spv::OpVectorInsertDynamic:
6034 case spv::OpVectorShuffle:
6035 case spv::OpIEqual:
6036 case spv::OpINotEqual:
6037 case spv::OpUGreaterThan:
6038 case spv::OpUGreaterThanEqual:
6039 case spv::OpULessThan:
6040 case spv::OpULessThanEqual:
6041 case spv::OpSGreaterThan:
6042 case spv::OpSGreaterThanEqual:
6043 case spv::OpSLessThan:
6044 case spv::OpSLessThanEqual:
6045 case spv::OpFOrdEqual:
6046 case spv::OpFOrdGreaterThan:
6047 case spv::OpFOrdGreaterThanEqual:
6048 case spv::OpFOrdLessThan:
6049 case spv::OpFOrdLessThanEqual:
6050 case spv::OpFOrdNotEqual:
6051 case spv::OpFUnordEqual:
6052 case spv::OpFUnordGreaterThan:
6053 case spv::OpFUnordGreaterThanEqual:
6054 case spv::OpFUnordLessThan:
6055 case spv::OpFUnordLessThanEqual:
6056 case spv::OpFUnordNotEqual:
6057 case spv::OpExtInst:
6058 case spv::OpIsInf:
6059 case spv::OpIsNan:
6060 case spv::OpAny:
6061 case spv::OpAll:
6062 case spv::OpUndef:
6063 case spv::OpConstantNull:
6064 case spv::OpLogicalOr:
6065 case spv::OpLogicalAnd:
6066 case spv::OpLogicalNot:
6067 case spv::OpLogicalNotEqual:
6068 case spv::OpConstantComposite:
6069 case spv::OpSpecConstantComposite:
6070 case spv::OpConstantTrue:
6071 case spv::OpConstantFalse:
6072 case spv::OpConstant:
6073 case spv::OpSpecConstant:
6074 case spv::OpVariable:
6075 case spv::OpFunctionCall:
6076 case spv::OpSampledImage:
alan-baker75090e42020-02-20 11:21:04 -05006077 case spv::OpImageFetch:
David Neto22f144c2017-06-12 14:26:21 -04006078 case spv::OpImageSampleExplicitLod:
David Neto5c22a252018-03-15 16:07:41 -04006079 case spv::OpImageQuerySize:
alan-bakerce179f12019-12-06 19:02:22 -05006080 case spv::OpImageQuerySizeLod:
David Neto22f144c2017-06-12 14:26:21 -04006081 case spv::OpSelect:
6082 case spv::OpPhi:
6083 case spv::OpLoad:
6084 case spv::OpAtomicIAdd:
6085 case spv::OpAtomicISub:
6086 case spv::OpAtomicExchange:
6087 case spv::OpAtomicIIncrement:
6088 case spv::OpAtomicIDecrement:
6089 case spv::OpAtomicCompareExchange:
6090 case spv::OpAtomicUMin:
6091 case spv::OpAtomicSMin:
6092 case spv::OpAtomicUMax:
6093 case spv::OpAtomicSMax:
6094 case spv::OpAtomicAnd:
6095 case spv::OpAtomicOr:
6096 case spv::OpAtomicXor:
6097 case spv::OpDot: {
6098 WriteWordCountAndOpcode(Inst);
6099 WriteOperand(Ops[0]);
6100 WriteResultID(Inst);
6101 for (uint32_t i = 1; i < Ops.size(); i++) {
6102 WriteOperand(Ops[i]);
6103 }
6104 break;
6105 }
6106 }
6107 }
6108}
Alan Baker9bf93fb2018-08-28 16:59:26 -04006109
alan-bakerb6b09dc2018-11-08 16:59:28 -05006110bool SPIRVProducerPass::IsTypeNullable(const Type *type) const {
Alan Baker9bf93fb2018-08-28 16:59:26 -04006111 switch (type->getTypeID()) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05006112 case Type::HalfTyID:
6113 case Type::FloatTyID:
6114 case Type::DoubleTyID:
6115 case Type::IntegerTyID:
6116 case Type::VectorTyID:
6117 return true;
6118 case Type::PointerTyID: {
6119 const PointerType *pointer_type = cast<PointerType>(type);
6120 if (pointer_type->getPointerAddressSpace() !=
6121 AddressSpace::UniformConstant) {
6122 auto pointee_type = pointer_type->getPointerElementType();
6123 if (pointee_type->isStructTy() &&
6124 cast<StructType>(pointee_type)->isOpaque()) {
6125 // Images and samplers are not nullable.
6126 return false;
Alan Baker9bf93fb2018-08-28 16:59:26 -04006127 }
Alan Baker9bf93fb2018-08-28 16:59:26 -04006128 }
alan-bakerb6b09dc2018-11-08 16:59:28 -05006129 return true;
6130 }
6131 case Type::ArrayTyID:
alan-baker077517b2020-03-19 13:52:12 -04006132 return IsTypeNullable(cast<SequentialType>(type)->getElementType());
alan-bakerb6b09dc2018-11-08 16:59:28 -05006133 case Type::StructTyID: {
6134 const StructType *struct_type = cast<StructType>(type);
6135 // Images and samplers are not nullable.
6136 if (struct_type->isOpaque())
Alan Baker9bf93fb2018-08-28 16:59:26 -04006137 return false;
alan-bakerb6b09dc2018-11-08 16:59:28 -05006138 for (const auto element : struct_type->elements()) {
6139 if (!IsTypeNullable(element))
6140 return false;
6141 }
6142 return true;
6143 }
6144 default:
6145 return false;
Alan Baker9bf93fb2018-08-28 16:59:26 -04006146 }
6147}
Alan Bakerfcda9482018-10-02 17:09:59 -04006148
6149void SPIRVProducerPass::PopulateUBOTypeMaps(Module &module) {
6150 if (auto *offsets_md =
6151 module.getNamedMetadata(clspv::RemappedTypeOffsetMetadataName())) {
6152 // Metdata is stored as key-value pair operands. The first element of each
6153 // operand is the type and the second is a vector of offsets.
6154 for (const auto *operand : offsets_md->operands()) {
6155 const auto *pair = cast<MDTuple>(operand);
6156 auto *type =
6157 cast<ConstantAsMetadata>(pair->getOperand(0))->getValue()->getType();
6158 const auto *offset_vector = cast<MDTuple>(pair->getOperand(1));
6159 std::vector<uint32_t> offsets;
6160 for (const Metadata *offset_md : offset_vector->operands()) {
6161 const auto *constant_md = cast<ConstantAsMetadata>(offset_md);
alan-bakerb6b09dc2018-11-08 16:59:28 -05006162 offsets.push_back(static_cast<uint32_t>(
6163 cast<ConstantInt>(constant_md->getValue())->getZExtValue()));
Alan Bakerfcda9482018-10-02 17:09:59 -04006164 }
6165 RemappedUBOTypeOffsets.insert(std::make_pair(type, offsets));
6166 }
6167 }
6168
6169 if (auto *sizes_md =
6170 module.getNamedMetadata(clspv::RemappedTypeSizesMetadataName())) {
6171 // Metadata is stored as key-value pair operands. The first element of each
6172 // operand is the type and the second is a triple of sizes: type size in
6173 // bits, store size and alloc size.
6174 for (const auto *operand : sizes_md->operands()) {
6175 const auto *pair = cast<MDTuple>(operand);
6176 auto *type =
6177 cast<ConstantAsMetadata>(pair->getOperand(0))->getValue()->getType();
6178 const auto *size_triple = cast<MDTuple>(pair->getOperand(1));
6179 uint64_t type_size_in_bits =
6180 cast<ConstantInt>(
6181 cast<ConstantAsMetadata>(size_triple->getOperand(0))->getValue())
6182 ->getZExtValue();
6183 uint64_t type_store_size =
6184 cast<ConstantInt>(
6185 cast<ConstantAsMetadata>(size_triple->getOperand(1))->getValue())
6186 ->getZExtValue();
6187 uint64_t type_alloc_size =
6188 cast<ConstantInt>(
6189 cast<ConstantAsMetadata>(size_triple->getOperand(2))->getValue())
6190 ->getZExtValue();
6191 RemappedUBOTypeSizes.insert(std::make_pair(
6192 type, std::make_tuple(type_size_in_bits, type_store_size,
6193 type_alloc_size)));
6194 }
6195 }
6196}
6197
6198uint64_t SPIRVProducerPass::GetTypeSizeInBits(Type *type,
6199 const DataLayout &DL) {
6200 auto iter = RemappedUBOTypeSizes.find(type);
6201 if (iter != RemappedUBOTypeSizes.end()) {
6202 return std::get<0>(iter->second);
6203 }
6204
6205 return DL.getTypeSizeInBits(type);
6206}
6207
6208uint64_t SPIRVProducerPass::GetTypeStoreSize(Type *type, const DataLayout &DL) {
6209 auto iter = RemappedUBOTypeSizes.find(type);
6210 if (iter != RemappedUBOTypeSizes.end()) {
6211 return std::get<1>(iter->second);
6212 }
6213
6214 return DL.getTypeStoreSize(type);
6215}
6216
6217uint64_t SPIRVProducerPass::GetTypeAllocSize(Type *type, const DataLayout &DL) {
6218 auto iter = RemappedUBOTypeSizes.find(type);
6219 if (iter != RemappedUBOTypeSizes.end()) {
6220 return std::get<2>(iter->second);
6221 }
6222
6223 return DL.getTypeAllocSize(type);
6224}
alan-baker5b86ed72019-02-15 08:26:50 -05006225
Kévin Petitbbbda972020-03-03 19:16:31 +00006226uint32_t SPIRVProducerPass::GetExplicitLayoutStructMemberOffset(
6227 StructType *type, unsigned member, const DataLayout &DL) {
6228 const auto StructLayout = DL.getStructLayout(type);
6229 // Search for the correct offsets if this type was remapped.
6230 std::vector<uint32_t> *offsets = nullptr;
6231 auto iter = RemappedUBOTypeOffsets.find(type);
6232 if (iter != RemappedUBOTypeOffsets.end()) {
6233 offsets = &iter->second;
6234 }
6235 auto ByteOffset =
6236 static_cast<uint32_t>(StructLayout->getElementOffset(member));
6237 if (offsets) {
6238 ByteOffset = (*offsets)[member];
6239 }
6240
6241 return ByteOffset;
6242}
6243
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04006244void SPIRVProducerPass::setVariablePointersCapabilities(
6245 unsigned address_space) {
alan-baker5b86ed72019-02-15 08:26:50 -05006246 if (GetStorageClass(address_space) == spv::StorageClassStorageBuffer) {
6247 setVariablePointersStorageBuffer(true);
6248 } else {
6249 setVariablePointers(true);
6250 }
6251}
6252
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04006253Value *SPIRVProducerPass::GetBasePointer(Value *v) {
alan-baker5b86ed72019-02-15 08:26:50 -05006254 if (auto *gep = dyn_cast<GetElementPtrInst>(v)) {
6255 return GetBasePointer(gep->getPointerOperand());
6256 }
6257
6258 // Conservatively return |v|.
6259 return v;
6260}
6261
6262bool SPIRVProducerPass::sameResource(Value *lhs, Value *rhs) const {
6263 if (auto *lhs_call = dyn_cast<CallInst>(lhs)) {
6264 if (auto *rhs_call = dyn_cast<CallInst>(rhs)) {
6265 if (lhs_call->getCalledFunction()->getName().startswith(
6266 clspv::ResourceAccessorFunction()) &&
6267 rhs_call->getCalledFunction()->getName().startswith(
6268 clspv::ResourceAccessorFunction())) {
6269 // For resource accessors, match descriptor set and binding.
6270 if (lhs_call->getOperand(0) == rhs_call->getOperand(0) &&
6271 lhs_call->getOperand(1) == rhs_call->getOperand(1))
6272 return true;
6273 } else if (lhs_call->getCalledFunction()->getName().startswith(
6274 clspv::WorkgroupAccessorFunction()) &&
6275 rhs_call->getCalledFunction()->getName().startswith(
6276 clspv::WorkgroupAccessorFunction())) {
6277 // For workgroup resources, match spec id.
6278 if (lhs_call->getOperand(0) == rhs_call->getOperand(0))
6279 return true;
6280 }
6281 }
6282 }
6283
6284 return false;
6285}
6286
6287bool SPIRVProducerPass::selectFromSameObject(Instruction *inst) {
6288 assert(inst->getType()->isPointerTy());
6289 assert(GetStorageClass(inst->getType()->getPointerAddressSpace()) ==
6290 spv::StorageClassStorageBuffer);
6291 const bool hack_undef = clspv::Option::HackUndef();
6292 if (auto *select = dyn_cast<SelectInst>(inst)) {
6293 auto *true_base = GetBasePointer(select->getTrueValue());
6294 auto *false_base = GetBasePointer(select->getFalseValue());
6295
6296 if (true_base == false_base)
6297 return true;
6298
6299 // If either the true or false operand is a null, then we satisfy the same
6300 // object constraint.
6301 if (auto *true_cst = dyn_cast<Constant>(true_base)) {
6302 if (true_cst->isNullValue() || (hack_undef && isa<UndefValue>(true_base)))
6303 return true;
6304 }
6305
6306 if (auto *false_cst = dyn_cast<Constant>(false_base)) {
6307 if (false_cst->isNullValue() ||
6308 (hack_undef && isa<UndefValue>(false_base)))
6309 return true;
6310 }
6311
6312 if (sameResource(true_base, false_base))
6313 return true;
6314 } else if (auto *phi = dyn_cast<PHINode>(inst)) {
6315 Value *value = nullptr;
6316 bool ok = true;
6317 for (unsigned i = 0; ok && i != phi->getNumIncomingValues(); ++i) {
6318 auto *base = GetBasePointer(phi->getIncomingValue(i));
6319 // Null values satisfy the constraint of selecting of selecting from the
6320 // same object.
6321 if (!value) {
6322 if (auto *cst = dyn_cast<Constant>(base)) {
6323 if (!cst->isNullValue() && !(hack_undef && isa<UndefValue>(base)))
6324 value = base;
6325 } else {
6326 value = base;
6327 }
6328 } else if (base != value) {
6329 if (auto *base_cst = dyn_cast<Constant>(base)) {
6330 if (base_cst->isNullValue() || (hack_undef && isa<UndefValue>(base)))
6331 continue;
6332 }
6333
6334 if (sameResource(value, base))
6335 continue;
6336
6337 // Values don't represent the same base.
6338 ok = false;
6339 }
6340 }
6341
6342 return ok;
6343 }
6344
6345 // Conservatively return false.
6346 return false;
6347}
alan-bakere9308012019-03-15 10:25:13 -04006348
6349bool SPIRVProducerPass::CalledWithCoherentResource(Argument &Arg) {
6350 if (!Arg.getType()->isPointerTy() ||
6351 Arg.getType()->getPointerAddressSpace() != clspv::AddressSpace::Global) {
6352 // Only SSBOs need to be annotated as coherent.
6353 return false;
6354 }
6355
6356 DenseSet<Value *> visited;
6357 std::vector<Value *> stack;
6358 for (auto *U : Arg.getParent()->users()) {
6359 if (auto *call = dyn_cast<CallInst>(U)) {
6360 stack.push_back(call->getOperand(Arg.getArgNo()));
6361 }
6362 }
6363
6364 while (!stack.empty()) {
6365 Value *v = stack.back();
6366 stack.pop_back();
6367
6368 if (!visited.insert(v).second)
6369 continue;
6370
6371 auto *resource_call = dyn_cast<CallInst>(v);
6372 if (resource_call &&
6373 resource_call->getCalledFunction()->getName().startswith(
6374 clspv::ResourceAccessorFunction())) {
6375 // If this is a resource accessor function, check if the coherent operand
6376 // is set.
6377 const auto coherent =
6378 unsigned(dyn_cast<ConstantInt>(resource_call->getArgOperand(5))
6379 ->getZExtValue());
6380 if (coherent == 1)
6381 return true;
6382 } else if (auto *arg = dyn_cast<Argument>(v)) {
6383 // If this is a function argument, trace through its callers.
alan-bakere98f3f92019-04-08 15:06:36 -04006384 for (auto U : arg->getParent()->users()) {
alan-bakere9308012019-03-15 10:25:13 -04006385 if (auto *call = dyn_cast<CallInst>(U)) {
6386 stack.push_back(call->getOperand(arg->getArgNo()));
6387 }
6388 }
6389 } else if (auto *user = dyn_cast<User>(v)) {
6390 // If this is a user, traverse all operands that could lead to resource
6391 // variables.
6392 for (unsigned i = 0; i != user->getNumOperands(); ++i) {
6393 Value *operand = user->getOperand(i);
6394 if (operand->getType()->isPointerTy() &&
6395 operand->getType()->getPointerAddressSpace() ==
6396 clspv::AddressSpace::Global) {
6397 stack.push_back(operand);
6398 }
6399 }
6400 }
6401 }
6402
6403 // No coherent resource variables encountered.
6404 return false;
6405}
alan-baker06cad652019-12-03 17:56:47 -05006406
6407void SPIRVProducerPass::PopulateStructuredCFGMaps(Module &module) {
6408 // First, track loop merges and continues.
6409 DenseSet<BasicBlock *> LoopMergesAndContinues;
6410 for (auto &F : module) {
6411 if (F.isDeclaration())
6412 continue;
6413
6414 DominatorTree &DT = getAnalysis<DominatorTreeWrapperPass>(F).getDomTree();
6415 const LoopInfo &LI = getAnalysis<LoopInfoWrapperPass>(F).getLoopInfo();
6416 std::deque<BasicBlock *> order;
6417 DenseSet<BasicBlock *> visited;
6418 clspv::ComputeStructuredOrder(&*F.begin(), &DT, LI, &order, &visited);
6419
6420 for (auto BB : order) {
6421 auto terminator = BB->getTerminator();
6422 auto branch = dyn_cast<BranchInst>(terminator);
6423 if (LI.isLoopHeader(BB)) {
6424 auto L = LI.getLoopFor(BB);
6425 BasicBlock *ContinueBB = nullptr;
6426 BasicBlock *MergeBB = nullptr;
6427
6428 MergeBB = L->getExitBlock();
6429 if (!MergeBB) {
6430 // StructurizeCFG pass converts CFG into triangle shape and the cfg
6431 // has regions with single entry/exit. As a result, loop should not
6432 // have multiple exits.
6433 llvm_unreachable("Loop has multiple exits???");
6434 }
6435
6436 if (L->isLoopLatch(BB)) {
6437 ContinueBB = BB;
6438 } else {
6439 // From SPIR-V spec 2.11, Continue Target must dominate that back-edge
6440 // block.
6441 BasicBlock *Header = L->getHeader();
6442 BasicBlock *Latch = L->getLoopLatch();
6443 for (auto *loop_block : L->blocks()) {
6444 if (loop_block == Header) {
6445 continue;
6446 }
6447
6448 // Check whether block dominates block with back-edge.
6449 // The loop latch is the single block with a back-edge. If it was
6450 // possible, StructurizeCFG made the loop conform to this
6451 // requirement, otherwise |Latch| is a nullptr.
6452 if (DT.dominates(loop_block, Latch)) {
6453 ContinueBB = loop_block;
6454 }
6455 }
6456
6457 if (!ContinueBB) {
6458 llvm_unreachable("Wrong continue block from loop");
6459 }
6460 }
6461
6462 // Record the continue and merge blocks.
6463 MergeBlocks[BB] = MergeBB;
6464 ContinueBlocks[BB] = ContinueBB;
6465 LoopMergesAndContinues.insert(MergeBB);
6466 LoopMergesAndContinues.insert(ContinueBB);
6467 } else if (branch && branch->isConditional()) {
6468 auto L = LI.getLoopFor(BB);
6469 bool HasBackedge = false;
6470 while (L && !HasBackedge) {
6471 if (L->isLoopLatch(BB)) {
6472 HasBackedge = true;
6473 }
6474 L = L->getParentLoop();
6475 }
6476
6477 if (!HasBackedge) {
6478 // Only need a merge if the branch doesn't include a loop break or
6479 // continue.
6480 auto true_bb = branch->getSuccessor(0);
6481 auto false_bb = branch->getSuccessor(1);
6482 if (!LoopMergesAndContinues.count(true_bb) &&
6483 !LoopMergesAndContinues.count(false_bb)) {
6484 // StructurizeCFG pass already manipulated CFG. Just use false block
6485 // of branch instruction as merge block.
6486 MergeBlocks[BB] = false_bb;
6487 }
6488 }
6489 }
6490 }
6491 }
6492}