blob: 80a85acf7d2905b78cd049f4da28758d68e3c104 [file] [log] [blame]
David Neto22f144c2017-06-12 14:26:21 -04001// Copyright 2017 The Clspv Authors. All rights reserved.
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7// http://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
15#ifdef _MSC_VER
16#pragma warning(push, 0)
17#endif
18
David Neto156783e2017-07-05 15:39:41 -040019#include <cassert>
David Neto257c3892018-04-11 13:19:45 -040020#include <cstring>
David Neto118188e2018-08-24 11:27:54 -040021#include <iomanip>
22#include <list>
David Neto862b7d82018-06-14 18:48:37 -040023#include <memory>
David Neto118188e2018-08-24 11:27:54 -040024#include <set>
25#include <sstream>
26#include <string>
27#include <tuple>
28#include <unordered_set>
29#include <utility>
David Neto862b7d82018-06-14 18:48:37 -040030
David Neto118188e2018-08-24 11:27:54 -040031#include "llvm/ADT/StringSwitch.h"
32#include "llvm/ADT/UniqueVector.h"
33#include "llvm/Analysis/LoopInfo.h"
34#include "llvm/IR/Constants.h"
35#include "llvm/IR/Dominators.h"
36#include "llvm/IR/Instructions.h"
37#include "llvm/IR/Metadata.h"
38#include "llvm/IR/Module.h"
alan-bakerf67468c2019-11-25 15:51:49 -050039#include "llvm/IR/ValueSymbolTable.h"
David Neto118188e2018-08-24 11:27:54 -040040#include "llvm/Pass.h"
41#include "llvm/Support/CommandLine.h"
Kévin Petitbbbda972020-03-03 19:16:31 +000042#include "llvm/Support/MathExtras.h"
David Neto118188e2018-08-24 11:27:54 -040043#include "llvm/Support/raw_ostream.h"
44#include "llvm/Transforms/Utils/Cloning.h"
David Neto22f144c2017-06-12 14:26:21 -040045
David Neto85082642018-03-24 06:55:20 -070046#include "spirv/1.0/spirv.hpp"
David Neto118188e2018-08-24 11:27:54 -040047
David Neto85082642018-03-24 06:55:20 -070048#include "clspv/AddressSpace.h"
alan-bakerf5e5f692018-11-27 08:33:24 -050049#include "clspv/DescriptorMap.h"
David Neto118188e2018-08-24 11:27:54 -040050#include "clspv/Option.h"
David Neto85082642018-03-24 06:55:20 -070051#include "clspv/spirv_c_strings.hpp"
52#include "clspv/spirv_glsl.hpp"
David Neto22f144c2017-06-12 14:26:21 -040053
David Neto4feb7a42017-10-06 17:29:42 -040054#include "ArgKind.h"
alan-bakerf67468c2019-11-25 15:51:49 -050055#include "Builtins.h"
alan-baker06cad652019-12-03 17:56:47 -050056#include "ComputeStructuredOrder.h"
David Neto85082642018-03-24 06:55:20 -070057#include "ConstantEmitter.h"
Alan Baker202c8c72018-08-13 13:47:44 -040058#include "Constants.h"
David Neto78383442018-06-15 20:31:56 -040059#include "DescriptorCounter.h"
alan-baker56f7aff2019-05-22 08:06:42 -040060#include "NormalizeGlobalVariable.h"
Diego Novilloa4c44fa2019-04-11 10:56:15 -040061#include "Passes.h"
alan-bakerce179f12019-12-06 19:02:22 -050062#include "Types.h"
David Neto48f56a42017-10-06 16:44:25 -040063
David Neto22f144c2017-06-12 14:26:21 -040064#if defined(_MSC_VER)
65#pragma warning(pop)
66#endif
67
68using namespace llvm;
69using namespace clspv;
SJW173c7e92020-03-16 08:44:47 -050070using namespace clspv::Builtins;
David Neto156783e2017-07-05 15:39:41 -040071using namespace mdconst;
David Neto22f144c2017-06-12 14:26:21 -040072
73namespace {
David Netocd8ca5f2017-10-02 23:34:11 -040074
David Neto862b7d82018-06-14 18:48:37 -040075cl::opt<bool> ShowResourceVars("show-rv", cl::init(false), cl::Hidden,
76 cl::desc("Show resource variable creation"));
77
78// These hacks exist to help transition code generation algorithms
79// without making huge noise in detailed test output.
80const bool Hack_generate_runtime_array_stride_early = true;
81
David Neto3fbb4072017-10-16 11:28:14 -040082// The value of 1/pi. This value is from MSDN
83// https://msdn.microsoft.com/en-us/library/4hwaceh6.aspx
84const double kOneOverPi = 0.318309886183790671538;
85const glsl::ExtInst kGlslExtInstBad = static_cast<glsl::ExtInst>(0);
86
alan-bakerb6b09dc2018-11-08 16:59:28 -050087const char *kCompositeConstructFunctionPrefix = "clspv.composite_construct.";
David Netoab03f432017-11-03 17:00:44 -040088
David Neto22f144c2017-06-12 14:26:21 -040089enum SPIRVOperandType {
90 NUMBERID,
91 LITERAL_INTEGER,
92 LITERAL_STRING,
93 LITERAL_FLOAT
94};
95
96struct SPIRVOperand {
97 explicit SPIRVOperand(SPIRVOperandType Ty, uint32_t Num)
98 : Type(Ty), LiteralNum(1, Num) {}
99 explicit SPIRVOperand(SPIRVOperandType Ty, const char *Str)
100 : Type(Ty), LiteralStr(Str) {}
101 explicit SPIRVOperand(SPIRVOperandType Ty, StringRef Str)
102 : Type(Ty), LiteralStr(Str) {}
103 explicit SPIRVOperand(SPIRVOperandType Ty, ArrayRef<uint32_t> NumVec)
104 : Type(Ty), LiteralNum(NumVec.begin(), NumVec.end()) {}
105
James Price11010dc2019-12-19 13:53:09 -0500106 SPIRVOperandType getType() const { return Type; };
107 uint32_t getNumID() const { return LiteralNum[0]; };
108 std::string getLiteralStr() const { return LiteralStr; };
109 ArrayRef<uint32_t> getLiteralNum() const { return LiteralNum; };
David Neto22f144c2017-06-12 14:26:21 -0400110
David Neto87846742018-04-11 17:36:22 -0400111 uint32_t GetNumWords() const {
112 switch (Type) {
113 case NUMBERID:
114 return 1;
115 case LITERAL_INTEGER:
116 case LITERAL_FLOAT:
David Netoee2660d2018-06-28 16:31:29 -0400117 return uint32_t(LiteralNum.size());
David Neto87846742018-04-11 17:36:22 -0400118 case LITERAL_STRING:
119 // Account for the terminating null character.
David Netoee2660d2018-06-28 16:31:29 -0400120 return uint32_t((LiteralStr.size() + 4) / 4);
David Neto87846742018-04-11 17:36:22 -0400121 }
122 llvm_unreachable("Unhandled case in SPIRVOperand::GetNumWords()");
123 }
124
David Neto22f144c2017-06-12 14:26:21 -0400125private:
126 SPIRVOperandType Type;
127 std::string LiteralStr;
128 SmallVector<uint32_t, 4> LiteralNum;
129};
130
David Netoc6f3ab22018-04-06 18:02:31 -0400131class SPIRVOperandList {
132public:
David Netoef5ba2b2019-12-20 08:35:54 -0500133 typedef std::unique_ptr<SPIRVOperand> element_type;
134 typedef SmallVector<element_type, 8> container_type;
135 typedef container_type::iterator iterator;
David Netoc6f3ab22018-04-06 18:02:31 -0400136 SPIRVOperandList() {}
alan-bakerb6b09dc2018-11-08 16:59:28 -0500137 SPIRVOperandList(const SPIRVOperandList &other) = delete;
138 SPIRVOperandList(SPIRVOperandList &&other) {
David Netoc6f3ab22018-04-06 18:02:31 -0400139 contents_ = std::move(other.contents_);
140 other.contents_.clear();
141 }
David Netoef5ba2b2019-12-20 08:35:54 -0500142 iterator begin() { return contents_.begin(); }
143 iterator end() { return contents_.end(); }
144 operator ArrayRef<element_type>() { return contents_; }
145 void push_back(element_type op) { contents_.push_back(std::move(op)); }
alan-bakerb6b09dc2018-11-08 16:59:28 -0500146 void clear() { contents_.clear(); }
David Netoc6f3ab22018-04-06 18:02:31 -0400147 size_t size() const { return contents_.size(); }
James Price11010dc2019-12-19 13:53:09 -0500148 const SPIRVOperand *operator[](size_t i) { return contents_[i].get(); }
David Netoc6f3ab22018-04-06 18:02:31 -0400149
David Netoef5ba2b2019-12-20 08:35:54 -0500150 const container_type &getOperands() const { return contents_; }
David Neto87846742018-04-11 17:36:22 -0400151
David Netoc6f3ab22018-04-06 18:02:31 -0400152private:
David Netoef5ba2b2019-12-20 08:35:54 -0500153 container_type contents_;
David Netoc6f3ab22018-04-06 18:02:31 -0400154};
155
James Price11010dc2019-12-19 13:53:09 -0500156SPIRVOperandList &operator<<(SPIRVOperandList &list,
David Netoef5ba2b2019-12-20 08:35:54 -0500157 std::unique_ptr<SPIRVOperand> elem) {
158 list.push_back(std::move(elem));
David Netoc6f3ab22018-04-06 18:02:31 -0400159 return list;
160}
161
David Netoef5ba2b2019-12-20 08:35:54 -0500162std::unique_ptr<SPIRVOperand> MkNum(uint32_t num) {
163 return std::make_unique<SPIRVOperand>(LITERAL_INTEGER, num);
David Netoc6f3ab22018-04-06 18:02:31 -0400164}
David Netoef5ba2b2019-12-20 08:35:54 -0500165std::unique_ptr<SPIRVOperand> MkInteger(ArrayRef<uint32_t> num_vec) {
166 return std::make_unique<SPIRVOperand>(LITERAL_INTEGER, num_vec);
David Neto257c3892018-04-11 13:19:45 -0400167}
David Netoef5ba2b2019-12-20 08:35:54 -0500168std::unique_ptr<SPIRVOperand> MkFloat(ArrayRef<uint32_t> num_vec) {
169 return std::make_unique<SPIRVOperand>(LITERAL_FLOAT, num_vec);
David Neto257c3892018-04-11 13:19:45 -0400170}
David Netoef5ba2b2019-12-20 08:35:54 -0500171std::unique_ptr<SPIRVOperand> MkId(uint32_t id) {
172 return std::make_unique<SPIRVOperand>(NUMBERID, id);
James Price11010dc2019-12-19 13:53:09 -0500173}
David Netoef5ba2b2019-12-20 08:35:54 -0500174std::unique_ptr<SPIRVOperand> MkString(StringRef str) {
175 return std::make_unique<SPIRVOperand>(LITERAL_STRING, str);
David Neto257c3892018-04-11 13:19:45 -0400176}
David Netoc6f3ab22018-04-06 18:02:31 -0400177
David Neto22f144c2017-06-12 14:26:21 -0400178struct SPIRVInstruction {
David Netoef5ba2b2019-12-20 08:35:54 -0500179 // Creates an instruction with an opcode and no result ID, and with the given
180 // operands. This computes its own word count. Takes ownership of the
181 // operands and clears |Ops|.
182 SPIRVInstruction(spv::Op Opc, SPIRVOperandList &Ops)
183 : WordCount(1), Opcode(static_cast<uint16_t>(Opc)), ResultID(0) {
James Price11010dc2019-12-19 13:53:09 -0500184 for (auto &operand : Ops) {
David Netoee2660d2018-06-28 16:31:29 -0400185 WordCount += uint16_t(operand->GetNumWords());
David Neto87846742018-04-11 17:36:22 -0400186 }
David Netoef5ba2b2019-12-20 08:35:54 -0500187 Operands.reserve(Ops.size());
188 for (auto &ptr : Ops) {
189 Operands.emplace_back(std::move(ptr));
190 ptr.reset(nullptr);
David Neto87846742018-04-11 17:36:22 -0400191 }
David Netoef5ba2b2019-12-20 08:35:54 -0500192 Ops.clear();
193 }
194 // Creates an instruction with an opcode and a no-zero result ID, and
195 // with the given operands. This computes its own word count. Takes ownership
196 // of the operands and clears |Ops|.
197 SPIRVInstruction(spv::Op Opc, uint32_t ResID, SPIRVOperandList &Ops)
198 : WordCount(2), Opcode(static_cast<uint16_t>(Opc)), ResultID(ResID) {
James Price11010dc2019-12-19 13:53:09 -0500199 for (auto &operand : Ops) {
David Neto87846742018-04-11 17:36:22 -0400200 WordCount += operand->GetNumWords();
201 }
David Netoef5ba2b2019-12-20 08:35:54 -0500202 Operands.reserve(Ops.size());
203 for (auto &ptr : Ops) {
204 Operands.emplace_back(std::move(ptr));
205 ptr.reset(nullptr);
206 }
207 if (ResID == 0) {
208 llvm_unreachable("Result ID of 0 was provided");
209 }
210 Ops.clear();
David Neto87846742018-04-11 17:36:22 -0400211 }
David Neto22f144c2017-06-12 14:26:21 -0400212
David Netoef5ba2b2019-12-20 08:35:54 -0500213 // Creates an instruction with an opcode and no result ID, and with the single
214 // operand. This computes its own word count.
215 SPIRVInstruction(spv::Op Opc, SPIRVOperandList::element_type operand)
216 : WordCount(1), Opcode(static_cast<uint16_t>(Opc)), ResultID(0) {
217 WordCount += operand->GetNumWords();
218 Operands.emplace_back(std::move(operand));
219 operand.reset(nullptr);
220 }
221 // Creates an instruction with an opcode and a non-zero result ID, and
222 // with the single operand. This computes its own word count.
223 SPIRVInstruction(spv::Op Opc, uint32_t ResID,
224 SPIRVOperandList::element_type operand)
225 : WordCount(2), Opcode(static_cast<uint16_t>(Opc)), ResultID(ResID) {
226 WordCount += operand->GetNumWords();
227 if (ResID == 0) {
228 llvm_unreachable("Result ID of 0 was provided");
229 }
230 Operands.emplace_back(std::move(operand));
231 operand.reset(nullptr);
232 }
233 // Creates an instruction with an opcode and a no-zero result ID, and no
234 // operands.
235 SPIRVInstruction(spv::Op Opc, uint32_t ResID)
236 : WordCount(2), Opcode(static_cast<uint16_t>(Opc)), ResultID(ResID) {
237 if (ResID == 0) {
238 llvm_unreachable("Result ID of 0 was provided");
239 }
240 }
241 // Creates an instruction with an opcode, no result ID, no type ID, and no
242 // operands.
243 SPIRVInstruction(spv::Op Opc)
244 : WordCount(1), Opcode(static_cast<uint16_t>(Opc)), ResultID(0) {}
245
David Netoee2660d2018-06-28 16:31:29 -0400246 uint32_t getWordCount() const { return WordCount; }
David Neto22f144c2017-06-12 14:26:21 -0400247 uint16_t getOpcode() const { return Opcode; }
248 uint32_t getResultID() const { return ResultID; }
David Netoef5ba2b2019-12-20 08:35:54 -0500249 ArrayRef<std::unique_ptr<SPIRVOperand>> getOperands() const {
James Price11010dc2019-12-19 13:53:09 -0500250 return Operands;
251 }
David Neto22f144c2017-06-12 14:26:21 -0400252
253private:
David Netoee2660d2018-06-28 16:31:29 -0400254 uint32_t WordCount; // Check the 16-bit bound at code generation time.
David Neto22f144c2017-06-12 14:26:21 -0400255 uint16_t Opcode;
256 uint32_t ResultID;
David Netoef5ba2b2019-12-20 08:35:54 -0500257 SmallVector<std::unique_ptr<SPIRVOperand>, 4> Operands;
David Neto22f144c2017-06-12 14:26:21 -0400258};
259
260struct SPIRVProducerPass final : public ModulePass {
David Neto22f144c2017-06-12 14:26:21 -0400261 typedef DenseMap<Type *, uint32_t> TypeMapType;
262 typedef UniqueVector<Type *> TypeList;
263 typedef DenseMap<Value *, uint32_t> ValueMapType;
David Netofb9a7972017-08-25 17:08:24 -0400264 typedef UniqueVector<Value *> ValueList;
David Neto22f144c2017-06-12 14:26:21 -0400265 typedef std::vector<std::pair<Value *, uint32_t>> EntryPointVecType;
266 typedef std::list<SPIRVInstruction *> SPIRVInstructionList;
David Neto87846742018-04-11 17:36:22 -0400267 // A vector of tuples, each of which is:
268 // - the LLVM instruction that we will later generate SPIR-V code for
269 // - where the SPIR-V instruction should be inserted
270 // - the result ID of the SPIR-V instruction
David Neto22f144c2017-06-12 14:26:21 -0400271 typedef std::vector<
272 std::tuple<Value *, SPIRVInstructionList::iterator, uint32_t>>
273 DeferredInstVecType;
274 typedef DenseMap<FunctionType *, std::pair<FunctionType *, uint32_t>>
275 GlobalConstFuncMapType;
276
David Neto44795152017-07-13 15:45:28 -0400277 explicit SPIRVProducerPass(
alan-bakerf5e5f692018-11-27 08:33:24 -0500278 raw_pwrite_stream &out,
279 std::vector<clspv::version0::DescriptorMapEntry> *descriptor_map_entries,
alan-baker00e7a582019-06-07 12:54:21 -0400280 ArrayRef<std::pair<unsigned, std::string>> samplerMap,
David Neto44795152017-07-13 15:45:28 -0400281 bool outputCInitList)
David Netoc2c368d2017-06-30 16:50:17 -0400282 : ModulePass(ID), samplerMap(samplerMap), out(out),
David Neto0676e6f2017-07-11 18:47:44 -0400283 binaryTempOut(binaryTempUnderlyingVector), binaryOut(&out),
alan-baker00e7a582019-06-07 12:54:21 -0400284 descriptorMapEntries(descriptor_map_entries),
David Neto0676e6f2017-07-11 18:47:44 -0400285 outputCInitList(outputCInitList), patchBoundOffset(0), nextID(1),
alan-baker5b86ed72019-02-15 08:26:50 -0500286 OpExtInstImportID(0), HasVariablePointersStorageBuffer(false),
287 HasVariablePointers(false), SamplerTy(nullptr), WorkgroupSizeValueID(0),
Kévin Petit89a525c2019-06-15 08:13:07 +0100288 WorkgroupSizeVarID(0), max_local_spec_id_(0) {}
David Neto22f144c2017-06-12 14:26:21 -0400289
James Price11010dc2019-12-19 13:53:09 -0500290 virtual ~SPIRVProducerPass() {
291 for (auto *Inst : SPIRVInsts) {
292 delete Inst;
293 }
294 }
295
David Neto22f144c2017-06-12 14:26:21 -0400296 void getAnalysisUsage(AnalysisUsage &AU) const override {
297 AU.addRequired<DominatorTreeWrapperPass>();
298 AU.addRequired<LoopInfoWrapperPass>();
299 }
300
301 virtual bool runOnModule(Module &module) override;
302
303 // output the SPIR-V header block
304 void outputHeader();
305
306 // patch the SPIR-V header block
307 void patchHeader();
308
309 uint32_t lookupType(Type *Ty) {
310 if (Ty->isPointerTy() &&
311 (Ty->getPointerAddressSpace() != AddressSpace::UniformConstant)) {
312 auto PointeeTy = Ty->getPointerElementType();
313 if (PointeeTy->isStructTy() &&
314 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
315 Ty = PointeeTy;
316 }
317 }
318
David Neto862b7d82018-06-14 18:48:37 -0400319 auto where = TypeMap.find(Ty);
320 if (where == TypeMap.end()) {
321 if (Ty) {
322 errs() << "Unhandled type " << *Ty << "\n";
323 } else {
324 errs() << "Unhandled type (null)\n";
325 }
David Netoe439d702018-03-23 13:14:08 -0700326 llvm_unreachable("\nUnhandled type!");
David Neto22f144c2017-06-12 14:26:21 -0400327 }
328
David Neto862b7d82018-06-14 18:48:37 -0400329 return where->second;
David Neto22f144c2017-06-12 14:26:21 -0400330 }
331 TypeMapType &getImageTypeMap() { return ImageTypeMap; }
alan-bakerabd82722019-12-03 17:14:51 -0500332 TypeList &getImageTypeList() { return ImageTypeList; }
David Neto22f144c2017-06-12 14:26:21 -0400333 TypeList &getTypeList() { return Types; };
334 ValueList &getConstantList() { return Constants; };
335 ValueMapType &getValueMap() { return ValueMap; }
336 ValueMapType &getAllocatedValueMap() { return AllocatedValueMap; }
337 SPIRVInstructionList &getSPIRVInstList() { return SPIRVInsts; };
David Neto22f144c2017-06-12 14:26:21 -0400338 EntryPointVecType &getEntryPointVec() { return EntryPointVec; };
339 DeferredInstVecType &getDeferredInstVec() { return DeferredInstVec; };
340 ValueList &getEntryPointInterfacesVec() { return EntryPointInterfacesVec; };
341 uint32_t &getOpExtInstImportID() { return OpExtInstImportID; };
342 std::vector<uint32_t> &getBuiltinDimVec() { return BuiltinDimensionVec; };
alan-baker5b86ed72019-02-15 08:26:50 -0500343 bool hasVariablePointersStorageBuffer() {
344 return HasVariablePointersStorageBuffer;
345 }
346 void setVariablePointersStorageBuffer(bool Val) {
347 HasVariablePointersStorageBuffer = Val;
348 }
Diego Novillo3cc8d7a2019-04-10 13:30:34 -0400349 bool hasVariablePointers() { return HasVariablePointers; };
David Neto22f144c2017-06-12 14:26:21 -0400350 void setVariablePointers(bool Val) { HasVariablePointers = Val; };
alan-bakerb6b09dc2018-11-08 16:59:28 -0500351 ArrayRef<std::pair<unsigned, std::string>> &getSamplerMap() {
352 return samplerMap;
353 }
David Neto22f144c2017-06-12 14:26:21 -0400354 GlobalConstFuncMapType &getGlobalConstFuncTypeMap() {
355 return GlobalConstFuncTypeMap;
356 }
357 SmallPtrSet<Value *, 16> &getGlobalConstArgSet() {
358 return GlobalConstArgumentSet;
359 }
alan-bakerb6b09dc2018-11-08 16:59:28 -0500360 TypeList &getTypesNeedingArrayStride() { return TypesNeedingArrayStride; }
David Neto22f144c2017-06-12 14:26:21 -0400361
David Netoc6f3ab22018-04-06 18:02:31 -0400362 void GenerateLLVMIRInfo(Module &M, const DataLayout &DL);
alan-bakerb6b09dc2018-11-08 16:59:28 -0500363 // Populate GlobalConstFuncTypeMap. Also, if module-scope __constant will
364 // *not* be converted to a storage buffer, replace each such global variable
365 // with one in the storage class expecgted by SPIR-V.
David Neto862b7d82018-06-14 18:48:37 -0400366 void FindGlobalConstVars(Module &M, const DataLayout &DL);
367 // Populate ResourceVarInfoList, FunctionToResourceVarsMap, and
368 // ModuleOrderedResourceVars.
369 void FindResourceVars(Module &M, const DataLayout &DL);
Alan Baker202c8c72018-08-13 13:47:44 -0400370 void FindWorkgroupVars(Module &M);
David Neto22f144c2017-06-12 14:26:21 -0400371 bool FindExtInst(Module &M);
372 void FindTypePerGlobalVar(GlobalVariable &GV);
373 void FindTypePerFunc(Function &F);
David Neto862b7d82018-06-14 18:48:37 -0400374 void FindTypesForSamplerMap(Module &M);
375 void FindTypesForResourceVars(Module &M);
alan-bakerb6b09dc2018-11-08 16:59:28 -0500376 // Inserts |Ty| and relevant sub-types into the |Types| member, indicating
377 // that |Ty| and its subtypes will need a corresponding SPIR-V type.
David Neto22f144c2017-06-12 14:26:21 -0400378 void FindType(Type *Ty);
379 void FindConstantPerGlobalVar(GlobalVariable &GV);
380 void FindConstantPerFunc(Function &F);
381 void FindConstant(Value *V);
382 void GenerateExtInstImport();
David Neto19a1bad2017-08-25 15:01:41 -0400383 // Generates instructions for SPIR-V types corresponding to the LLVM types
384 // saved in the |Types| member. A type follows its subtypes. IDs are
385 // allocated sequentially starting with the current value of nextID, and
386 // with a type following its subtypes. Also updates nextID to just beyond
387 // the last generated ID.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500388 void GenerateSPIRVTypes(LLVMContext &context, Module &module);
David Neto22f144c2017-06-12 14:26:21 -0400389 void GenerateSPIRVConstants();
David Neto5c22a252018-03-15 16:07:41 -0400390 void GenerateModuleInfo(Module &M);
Kévin Petitbbbda972020-03-03 19:16:31 +0000391 void GeneratePushConstantDescriptormapEntries(Module &M);
David Neto22f144c2017-06-12 14:26:21 -0400392 void GenerateGlobalVar(GlobalVariable &GV);
David Netoc6f3ab22018-04-06 18:02:31 -0400393 void GenerateWorkgroupVars();
David Neto862b7d82018-06-14 18:48:37 -0400394 // Generate descriptor map entries for resource variables associated with
395 // arguments to F.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500396 void GenerateDescriptorMapInfo(const DataLayout &DL, Function &F);
David Neto22f144c2017-06-12 14:26:21 -0400397 void GenerateSamplers(Module &M);
David Neto862b7d82018-06-14 18:48:37 -0400398 // Generate OpVariables for %clspv.resource.var.* calls.
399 void GenerateResourceVars(Module &M);
David Neto22f144c2017-06-12 14:26:21 -0400400 void GenerateFuncPrologue(Function &F);
401 void GenerateFuncBody(Function &F);
David Netob6e2e062018-04-25 10:32:06 -0400402 void GenerateEntryPointInitialStores();
David Neto22f144c2017-06-12 14:26:21 -0400403 spv::Op GetSPIRVCmpOpcode(CmpInst *CmpI);
404 spv::Op GetSPIRVCastOpcode(Instruction &I);
405 spv::Op GetSPIRVBinaryOpcode(Instruction &I);
406 void GenerateInstruction(Instruction &I);
407 void GenerateFuncEpilogue();
408 void HandleDeferredInstruction();
alan-bakerb6b09dc2018-11-08 16:59:28 -0500409 void HandleDeferredDecorations(const DataLayout &DL);
David Neto22f144c2017-06-12 14:26:21 -0400410 bool is4xi8vec(Type *Ty) const;
411 spv::StorageClass GetStorageClass(unsigned AddrSpace) const;
David Neto862b7d82018-06-14 18:48:37 -0400412 spv::StorageClass GetStorageClassForArgKind(clspv::ArgKind arg_kind) const;
David Neto22f144c2017-06-12 14:26:21 -0400413 spv::BuiltIn GetBuiltin(StringRef globalVarName) const;
David Neto3fbb4072017-10-16 11:28:14 -0400414 // Returns the GLSL extended instruction enum that the given function
415 // call maps to. If none, then returns the 0 value, i.e. GLSLstd4580Bad.
David Neto22f144c2017-06-12 14:26:21 -0400416 glsl::ExtInst getExtInstEnum(StringRef Name);
David Neto3fbb4072017-10-16 11:28:14 -0400417 // Returns the GLSL extended instruction enum indirectly used by the given
418 // function. That is, to implement the given function, we use an extended
419 // instruction plus one more instruction. If none, then returns the 0 value,
420 // i.e. GLSLstd4580Bad.
421 glsl::ExtInst getIndirectExtInstEnum(StringRef Name);
422 // Returns the single GLSL extended instruction used directly or
423 // indirectly by the given function call.
424 glsl::ExtInst getDirectOrIndirectExtInstEnum(StringRef Name);
David Neto22f144c2017-06-12 14:26:21 -0400425 void WriteOneWord(uint32_t Word);
426 void WriteResultID(SPIRVInstruction *Inst);
427 void WriteWordCountAndOpcode(SPIRVInstruction *Inst);
David Netoef5ba2b2019-12-20 08:35:54 -0500428 void WriteOperand(const std::unique_ptr<SPIRVOperand> &Op);
David Neto22f144c2017-06-12 14:26:21 -0400429 void WriteSPIRVBinary();
430
Alan Baker9bf93fb2018-08-28 16:59:26 -0400431 // Returns true if |type| is compatible with OpConstantNull.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500432 bool IsTypeNullable(const Type *type) const;
Alan Baker9bf93fb2018-08-28 16:59:26 -0400433
Alan Bakerfcda9482018-10-02 17:09:59 -0400434 // Populate UBO remapped type maps.
435 void PopulateUBOTypeMaps(Module &module);
436
alan-baker06cad652019-12-03 17:56:47 -0500437 // Populate the merge and continue block maps.
438 void PopulateStructuredCFGMaps(Module &module);
439
Alan Bakerfcda9482018-10-02 17:09:59 -0400440 // Wrapped methods of DataLayout accessors. If |type| was remapped for UBOs,
441 // uses the internal map, otherwise it falls back on the data layout.
442 uint64_t GetTypeSizeInBits(Type *type, const DataLayout &DL);
443 uint64_t GetTypeStoreSize(Type *type, const DataLayout &DL);
444 uint64_t GetTypeAllocSize(Type *type, const DataLayout &DL);
Kévin Petitbbbda972020-03-03 19:16:31 +0000445 uint32_t GetExplicitLayoutStructMemberOffset(StructType *type,
446 unsigned member,
447 const DataLayout &DL);
Alan Bakerfcda9482018-10-02 17:09:59 -0400448
alan-baker5b86ed72019-02-15 08:26:50 -0500449 // Returns the base pointer of |v|.
450 Value *GetBasePointer(Value *v);
451
452 // Sets |HasVariablePointersStorageBuffer| or |HasVariablePointers| base on
453 // |address_space|.
454 void setVariablePointersCapabilities(unsigned address_space);
455
456 // Returns true if |lhs| and |rhs| represent the same resource or workgroup
457 // variable.
458 bool sameResource(Value *lhs, Value *rhs) const;
459
460 // Returns true if |inst| is phi or select that selects from the same
461 // structure (or null).
462 bool selectFromSameObject(Instruction *inst);
463
alan-bakere9308012019-03-15 10:25:13 -0400464 // Returns true if |Arg| is called with a coherent resource.
465 bool CalledWithCoherentResource(Argument &Arg);
466
David Neto22f144c2017-06-12 14:26:21 -0400467private:
468 static char ID;
David Neto44795152017-07-13 15:45:28 -0400469 ArrayRef<std::pair<unsigned, std::string>> samplerMap;
David Neto22f144c2017-06-12 14:26:21 -0400470 raw_pwrite_stream &out;
David Neto0676e6f2017-07-11 18:47:44 -0400471
472 // TODO(dneto): Wouldn't it be better to always just emit a binary, and then
473 // convert to other formats on demand?
474
475 // When emitting a C initialization list, the WriteSPIRVBinary method
476 // will actually write its words to this vector via binaryTempOut.
477 SmallVector<char, 100> binaryTempUnderlyingVector;
478 raw_svector_ostream binaryTempOut;
479
480 // Binary output writes to this stream, which might be |out| or
481 // |binaryTempOut|. It's the latter when we really want to write a C
482 // initializer list.
Diego Novillo3cc8d7a2019-04-10 13:30:34 -0400483 raw_pwrite_stream *binaryOut;
alan-bakerf5e5f692018-11-27 08:33:24 -0500484 std::vector<version0::DescriptorMapEntry> *descriptorMapEntries;
David Neto0676e6f2017-07-11 18:47:44 -0400485 const bool outputCInitList; // If true, output look like {0x7023, ... , 5}
David Neto22f144c2017-06-12 14:26:21 -0400486 uint64_t patchBoundOffset;
487 uint32_t nextID;
488
alan-bakerf67468c2019-11-25 15:51:49 -0500489 // ID for OpTypeInt 32 1.
490 uint32_t int32ID = 0;
491 // ID for OpTypeVector %int 4.
492 uint32_t v4int32ID = 0;
493
David Neto19a1bad2017-08-25 15:01:41 -0400494 // Maps an LLVM Value pointer to the corresponding SPIR-V Id.
David Neto22f144c2017-06-12 14:26:21 -0400495 TypeMapType TypeMap;
David Neto19a1bad2017-08-25 15:01:41 -0400496 // Maps an LLVM image type to its SPIR-V ID.
David Neto22f144c2017-06-12 14:26:21 -0400497 TypeMapType ImageTypeMap;
alan-bakerabd82722019-12-03 17:14:51 -0500498 // A unique-vector of LLVM image types. This list is used to provide
499 // deterministic traversal of image types.
500 TypeList ImageTypeList;
David Neto19a1bad2017-08-25 15:01:41 -0400501 // A unique-vector of LLVM types that map to a SPIR-V type.
David Neto22f144c2017-06-12 14:26:21 -0400502 TypeList Types;
503 ValueList Constants;
David Neto19a1bad2017-08-25 15:01:41 -0400504 // Maps an LLVM Value pointer to the corresponding SPIR-V Id.
David Neto22f144c2017-06-12 14:26:21 -0400505 ValueMapType ValueMap;
506 ValueMapType AllocatedValueMap;
507 SPIRVInstructionList SPIRVInsts;
David Neto862b7d82018-06-14 18:48:37 -0400508
David Neto22f144c2017-06-12 14:26:21 -0400509 EntryPointVecType EntryPointVec;
510 DeferredInstVecType DeferredInstVec;
511 ValueList EntryPointInterfacesVec;
512 uint32_t OpExtInstImportID;
513 std::vector<uint32_t> BuiltinDimensionVec;
alan-baker5b86ed72019-02-15 08:26:50 -0500514 bool HasVariablePointersStorageBuffer;
David Neto22f144c2017-06-12 14:26:21 -0400515 bool HasVariablePointers;
516 Type *SamplerTy;
alan-baker09cb9802019-12-10 13:16:27 -0500517 DenseMap<unsigned, unsigned> SamplerLiteralToIDMap;
David Netoc77d9e22018-03-24 06:30:28 -0700518
519 // If a function F has a pointer-to-__constant parameter, then this variable
David Neto9ed8e2f2018-03-24 06:47:24 -0700520 // will map F's type to (G, index of the parameter), where in a first phase
521 // G is F's type. During FindTypePerFunc, G will be changed to F's type
522 // but replacing the pointer-to-constant parameter with
523 // pointer-to-ModuleScopePrivate.
David Netoc77d9e22018-03-24 06:30:28 -0700524 // TODO(dneto): This doesn't seem general enough? A function might have
525 // more than one such parameter.
David Neto22f144c2017-06-12 14:26:21 -0400526 GlobalConstFuncMapType GlobalConstFuncTypeMap;
527 SmallPtrSet<Value *, 16> GlobalConstArgumentSet;
David Neto1a1a0582017-07-07 12:01:44 -0400528 // An ordered set of pointer types of Base arguments to OpPtrAccessChain,
David Neto85082642018-03-24 06:55:20 -0700529 // or array types, and which point into transparent memory (StorageBuffer
530 // storage class). These will require an ArrayStride decoration.
David Neto1a1a0582017-07-07 12:01:44 -0400531 // See SPV_KHR_variable_pointers rev 13.
David Neto85082642018-03-24 06:55:20 -0700532 TypeList TypesNeedingArrayStride;
David Netoa60b00b2017-09-15 16:34:09 -0400533
534 // This is truly ugly, but works around what look like driver bugs.
535 // For get_local_size, an earlier part of the flow has created a module-scope
536 // variable in Private address space to hold the value for the workgroup
537 // size. Its intializer is a uint3 value marked as builtin WorkgroupSize.
538 // When this is present, save the IDs of the initializer value and variable
539 // in these two variables. We only ever do a vector load from it, and
540 // when we see one of those, substitute just the value of the intializer.
541 // This mimics what Glslang does, and that's what drivers are used to.
David Neto66cfe642018-03-24 06:13:56 -0700542 // TODO(dneto): Remove this once drivers are fixed.
David Netoa60b00b2017-09-15 16:34:09 -0400543 uint32_t WorkgroupSizeValueID;
544 uint32_t WorkgroupSizeVarID;
David Neto26aaf622017-10-23 18:11:53 -0400545
David Neto862b7d82018-06-14 18:48:37 -0400546 // Bookkeeping for mapping kernel arguments to resource variables.
547 struct ResourceVarInfo {
548 ResourceVarInfo(int index_arg, unsigned set_arg, unsigned binding_arg,
alan-bakere9308012019-03-15 10:25:13 -0400549 Function *fn, clspv::ArgKind arg_kind_arg, int coherent_arg)
David Neto862b7d82018-06-14 18:48:37 -0400550 : index(index_arg), descriptor_set(set_arg), binding(binding_arg),
alan-bakere9308012019-03-15 10:25:13 -0400551 var_fn(fn), arg_kind(arg_kind_arg), coherent(coherent_arg),
David Neto862b7d82018-06-14 18:48:37 -0400552 addr_space(fn->getReturnType()->getPointerAddressSpace()) {}
553 const int index; // Index into ResourceVarInfoList
554 const unsigned descriptor_set;
555 const unsigned binding;
556 Function *const var_fn; // The @clspv.resource.var.* function.
557 const clspv::ArgKind arg_kind;
alan-bakere9308012019-03-15 10:25:13 -0400558 const int coherent;
David Neto862b7d82018-06-14 18:48:37 -0400559 const unsigned addr_space; // The LLVM address space
560 // The SPIR-V ID of the OpVariable. Not populated at construction time.
561 uint32_t var_id = 0;
562 };
563 // A list of resource var info. Each one correponds to a module-scope
564 // resource variable we will have to create. Resource var indices are
565 // indices into this vector.
566 SmallVector<std::unique_ptr<ResourceVarInfo>, 8> ResourceVarInfoList;
567 // This is a vector of pointers of all the resource vars, but ordered by
568 // kernel function, and then by argument.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500569 UniqueVector<ResourceVarInfo *> ModuleOrderedResourceVars;
David Neto862b7d82018-06-14 18:48:37 -0400570 // Map a function to the ordered list of resource variables it uses, one for
571 // each argument. If an argument does not use a resource variable, it
572 // will have a null pointer entry.
573 using FunctionToResourceVarsMapType =
574 DenseMap<Function *, SmallVector<ResourceVarInfo *, 8>>;
575 FunctionToResourceVarsMapType FunctionToResourceVarsMap;
576
577 // What LLVM types map to SPIR-V types needing layout? These are the
578 // arrays and structures supporting storage buffers and uniform buffers.
579 TypeList TypesNeedingLayout;
580 // What LLVM struct types map to a SPIR-V struct type with Block decoration?
581 UniqueVector<StructType *> StructTypesNeedingBlock;
582 // For a call that represents a load from an opaque type (samplers, images),
583 // map it to the variable id it should load from.
584 DenseMap<CallInst *, uint32_t> ResourceVarDeferredLoadCalls;
David Neto85082642018-03-24 06:55:20 -0700585
Alan Baker202c8c72018-08-13 13:47:44 -0400586 // One larger than the maximum used SpecId for pointer-to-local arguments.
587 int max_local_spec_id_;
David Netoc6f3ab22018-04-06 18:02:31 -0400588 // An ordered list of the kernel arguments of type pointer-to-local.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500589 using LocalArgList = SmallVector<Argument *, 8>;
David Netoc6f3ab22018-04-06 18:02:31 -0400590 LocalArgList LocalArgs;
591 // Information about a pointer-to-local argument.
592 struct LocalArgInfo {
593 // The SPIR-V ID of the array variable.
594 uint32_t variable_id;
595 // The element type of the
alan-bakerb6b09dc2018-11-08 16:59:28 -0500596 Type *elem_type;
David Netoc6f3ab22018-04-06 18:02:31 -0400597 // The ID of the array type.
598 uint32_t array_size_id;
599 // The ID of the array type.
600 uint32_t array_type_id;
601 // The ID of the pointer to the array type.
602 uint32_t ptr_array_type_id;
David Netoc6f3ab22018-04-06 18:02:31 -0400603 // The specialization constant ID of the array size.
604 int spec_id;
605 };
Alan Baker202c8c72018-08-13 13:47:44 -0400606 // A mapping from Argument to its assigned SpecId.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500607 DenseMap<const Argument *, int> LocalArgSpecIds;
Alan Baker202c8c72018-08-13 13:47:44 -0400608 // A mapping from SpecId to its LocalArgInfo.
609 DenseMap<int, LocalArgInfo> LocalSpecIdInfoMap;
Alan Bakerfcda9482018-10-02 17:09:59 -0400610 // A mapping from a remapped type to its real offsets.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500611 DenseMap<Type *, std::vector<uint32_t>> RemappedUBOTypeOffsets;
Alan Bakerfcda9482018-10-02 17:09:59 -0400612 // A mapping from a remapped type to its real sizes.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500613 DenseMap<Type *, std::tuple<uint64_t, uint64_t, uint64_t>>
614 RemappedUBOTypeSizes;
alan-baker06cad652019-12-03 17:56:47 -0500615
616 // Maps basic block to its merge block.
617 DenseMap<BasicBlock *, BasicBlock *> MergeBlocks;
618 // Maps basic block to its continue block.
619 DenseMap<BasicBlock *, BasicBlock *> ContinueBlocks;
David Neto22f144c2017-06-12 14:26:21 -0400620};
621
622char SPIRVProducerPass::ID;
David Netoc6f3ab22018-04-06 18:02:31 -0400623
alan-bakerb6b09dc2018-11-08 16:59:28 -0500624} // namespace
David Neto22f144c2017-06-12 14:26:21 -0400625
626namespace clspv {
alan-bakerf5e5f692018-11-27 08:33:24 -0500627ModulePass *createSPIRVProducerPass(
628 raw_pwrite_stream &out,
629 std::vector<version0::DescriptorMapEntry> *descriptor_map_entries,
alan-baker00e7a582019-06-07 12:54:21 -0400630 ArrayRef<std::pair<unsigned, std::string>> samplerMap,
alan-bakerf5e5f692018-11-27 08:33:24 -0500631 bool outputCInitList) {
632 return new SPIRVProducerPass(out, descriptor_map_entries, samplerMap,
alan-baker00e7a582019-06-07 12:54:21 -0400633 outputCInitList);
David Neto22f144c2017-06-12 14:26:21 -0400634}
David Netoc2c368d2017-06-30 16:50:17 -0400635} // namespace clspv
David Neto22f144c2017-06-12 14:26:21 -0400636
637bool SPIRVProducerPass::runOnModule(Module &module) {
David Neto0676e6f2017-07-11 18:47:44 -0400638 binaryOut = outputCInitList ? &binaryTempOut : &out;
639
Alan Bakerfcda9482018-10-02 17:09:59 -0400640 PopulateUBOTypeMaps(module);
alan-baker06cad652019-12-03 17:56:47 -0500641 PopulateStructuredCFGMaps(module);
Alan Bakerfcda9482018-10-02 17:09:59 -0400642
David Neto22f144c2017-06-12 14:26:21 -0400643 // SPIR-V always begins with its header information
644 outputHeader();
645
David Netoc6f3ab22018-04-06 18:02:31 -0400646 const DataLayout &DL = module.getDataLayout();
647
David Neto22f144c2017-06-12 14:26:21 -0400648 // Gather information from the LLVM IR that we require.
David Netoc6f3ab22018-04-06 18:02:31 -0400649 GenerateLLVMIRInfo(module, DL);
David Neto22f144c2017-06-12 14:26:21 -0400650
David Neto22f144c2017-06-12 14:26:21 -0400651 // Collect information on global variables too.
652 for (GlobalVariable &GV : module.globals()) {
653 // If the GV is one of our special __spirv_* variables, remove the
654 // initializer as it was only placed there to force LLVM to not throw the
655 // value away.
Kévin Petitbbbda972020-03-03 19:16:31 +0000656 if (GV.getName().startswith("__spirv_") ||
657 GV.getAddressSpace() == clspv::AddressSpace::PushConstant) {
David Neto22f144c2017-06-12 14:26:21 -0400658 GV.setInitializer(nullptr);
659 }
660
661 // Collect types' information from global variable.
662 FindTypePerGlobalVar(GV);
663
664 // Collect constant information from global variable.
665 FindConstantPerGlobalVar(GV);
666
667 // If the variable is an input, entry points need to know about it.
668 if (AddressSpace::Input == GV.getType()->getPointerAddressSpace()) {
David Netofb9a7972017-08-25 17:08:24 -0400669 getEntryPointInterfacesVec().insert(&GV);
David Neto22f144c2017-06-12 14:26:21 -0400670 }
671 }
672
673 // If there are extended instructions, generate OpExtInstImport.
674 if (FindExtInst(module)) {
675 GenerateExtInstImport();
676 }
677
678 // Generate SPIRV instructions for types.
Alan Bakerfcda9482018-10-02 17:09:59 -0400679 GenerateSPIRVTypes(module.getContext(), module);
David Neto22f144c2017-06-12 14:26:21 -0400680
681 // Generate SPIRV constants.
682 GenerateSPIRVConstants();
683
alan-baker09cb9802019-12-10 13:16:27 -0500684 // Generate literal samplers if necessary.
685 GenerateSamplers(module);
David Neto22f144c2017-06-12 14:26:21 -0400686
Kévin Petitbbbda972020-03-03 19:16:31 +0000687 // Generate descriptor map entries for all push constants
688 GeneratePushConstantDescriptormapEntries(module);
689
David Neto22f144c2017-06-12 14:26:21 -0400690 // Generate SPIRV variables.
691 for (GlobalVariable &GV : module.globals()) {
692 GenerateGlobalVar(GV);
693 }
David Neto862b7d82018-06-14 18:48:37 -0400694 GenerateResourceVars(module);
David Netoc6f3ab22018-04-06 18:02:31 -0400695 GenerateWorkgroupVars();
David Neto22f144c2017-06-12 14:26:21 -0400696
697 // Generate SPIRV instructions for each function.
698 for (Function &F : module) {
699 if (F.isDeclaration()) {
700 continue;
701 }
702
David Neto862b7d82018-06-14 18:48:37 -0400703 GenerateDescriptorMapInfo(DL, F);
704
David Neto22f144c2017-06-12 14:26:21 -0400705 // Generate Function Prologue.
706 GenerateFuncPrologue(F);
707
708 // Generate SPIRV instructions for function body.
709 GenerateFuncBody(F);
710
711 // Generate Function Epilogue.
712 GenerateFuncEpilogue();
713 }
714
715 HandleDeferredInstruction();
David Neto1a1a0582017-07-07 12:01:44 -0400716 HandleDeferredDecorations(DL);
David Neto22f144c2017-06-12 14:26:21 -0400717
718 // Generate SPIRV module information.
David Neto5c22a252018-03-15 16:07:41 -0400719 GenerateModuleInfo(module);
David Neto22f144c2017-06-12 14:26:21 -0400720
alan-baker00e7a582019-06-07 12:54:21 -0400721 WriteSPIRVBinary();
David Neto22f144c2017-06-12 14:26:21 -0400722
723 // We need to patch the SPIR-V header to set bound correctly.
724 patchHeader();
David Neto0676e6f2017-07-11 18:47:44 -0400725
726 if (outputCInitList) {
727 bool first = true;
David Neto0676e6f2017-07-11 18:47:44 -0400728 std::ostringstream os;
729
David Neto57fb0b92017-08-04 15:35:09 -0400730 auto emit_word = [&os, &first](uint32_t word) {
David Neto0676e6f2017-07-11 18:47:44 -0400731 if (!first)
David Neto57fb0b92017-08-04 15:35:09 -0400732 os << ",\n";
733 os << word;
David Neto0676e6f2017-07-11 18:47:44 -0400734 first = false;
735 };
736
737 os << "{";
David Neto57fb0b92017-08-04 15:35:09 -0400738 const std::string str(binaryTempOut.str());
739 for (unsigned i = 0; i < str.size(); i += 4) {
740 const uint32_t a = static_cast<unsigned char>(str[i]);
741 const uint32_t b = static_cast<unsigned char>(str[i + 1]);
742 const uint32_t c = static_cast<unsigned char>(str[i + 2]);
743 const uint32_t d = static_cast<unsigned char>(str[i + 3]);
744 emit_word(a | (b << 8) | (c << 16) | (d << 24));
David Neto0676e6f2017-07-11 18:47:44 -0400745 }
746 os << "}\n";
747 out << os.str();
748 }
749
David Neto22f144c2017-06-12 14:26:21 -0400750 return false;
751}
752
753void SPIRVProducerPass::outputHeader() {
alan-baker00e7a582019-06-07 12:54:21 -0400754 binaryOut->write(reinterpret_cast<const char *>(&spv::MagicNumber),
755 sizeof(spv::MagicNumber));
756 binaryOut->write(reinterpret_cast<const char *>(&spv::Version),
757 sizeof(spv::Version));
David Neto22f144c2017-06-12 14:26:21 -0400758
alan-baker0c18ab02019-06-12 10:23:21 -0400759 // use Google's vendor ID
760 const uint32_t vendor = 21 << 16;
alan-baker00e7a582019-06-07 12:54:21 -0400761 binaryOut->write(reinterpret_cast<const char *>(&vendor), sizeof(vendor));
David Neto22f144c2017-06-12 14:26:21 -0400762
alan-baker00e7a582019-06-07 12:54:21 -0400763 // we record where we need to come back to and patch in the bound value
764 patchBoundOffset = binaryOut->tell();
David Neto22f144c2017-06-12 14:26:21 -0400765
alan-baker00e7a582019-06-07 12:54:21 -0400766 // output a bad bound for now
767 binaryOut->write(reinterpret_cast<const char *>(&nextID), sizeof(nextID));
David Neto22f144c2017-06-12 14:26:21 -0400768
alan-baker00e7a582019-06-07 12:54:21 -0400769 // output the schema (reserved for use and must be 0)
770 const uint32_t schema = 0;
771 binaryOut->write(reinterpret_cast<const char *>(&schema), sizeof(schema));
David Neto22f144c2017-06-12 14:26:21 -0400772}
773
774void SPIRVProducerPass::patchHeader() {
alan-baker00e7a582019-06-07 12:54:21 -0400775 // for a binary we just write the value of nextID over bound
776 binaryOut->pwrite(reinterpret_cast<char *>(&nextID), sizeof(nextID),
777 patchBoundOffset);
David Neto22f144c2017-06-12 14:26:21 -0400778}
779
David Netoc6f3ab22018-04-06 18:02:31 -0400780void SPIRVProducerPass::GenerateLLVMIRInfo(Module &M, const DataLayout &DL) {
David Neto22f144c2017-06-12 14:26:21 -0400781 // This function generates LLVM IR for function such as global variable for
782 // argument, constant and pointer type for argument access. These information
783 // is artificial one because we need Vulkan SPIR-V output. This function is
784 // executed ahead of FindType and FindConstant.
David Neto22f144c2017-06-12 14:26:21 -0400785 LLVMContext &Context = M.getContext();
786
David Neto862b7d82018-06-14 18:48:37 -0400787 FindGlobalConstVars(M, DL);
David Neto5c22a252018-03-15 16:07:41 -0400788
David Neto862b7d82018-06-14 18:48:37 -0400789 FindResourceVars(M, DL);
David Neto22f144c2017-06-12 14:26:21 -0400790
791 bool HasWorkGroupBuiltin = false;
792 for (GlobalVariable &GV : M.globals()) {
793 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
794 if (spv::BuiltInWorkgroupSize == BuiltinType) {
795 HasWorkGroupBuiltin = true;
796 }
797 }
798
David Neto862b7d82018-06-14 18:48:37 -0400799 FindTypesForSamplerMap(M);
800 FindTypesForResourceVars(M);
Alan Baker202c8c72018-08-13 13:47:44 -0400801 FindWorkgroupVars(M);
David Neto22f144c2017-06-12 14:26:21 -0400802
803 for (Function &F : M) {
Kévin Petitabef4522019-03-27 13:08:01 +0000804 if (F.isDeclaration()) {
David Neto22f144c2017-06-12 14:26:21 -0400805 continue;
806 }
807
808 for (BasicBlock &BB : F) {
809 for (Instruction &I : BB) {
810 if (I.getOpcode() == Instruction::ZExt ||
811 I.getOpcode() == Instruction::SExt ||
812 I.getOpcode() == Instruction::UIToFP) {
813 // If there is zext with i1 type, it will be changed to OpSelect. The
814 // OpSelect needs constant 0 and 1 so the constants are added here.
815
816 auto OpTy = I.getOperand(0)->getType();
817
Kévin Petit24272b62018-10-18 19:16:12 +0000818 if (OpTy->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -0400819 if (I.getOpcode() == Instruction::ZExt) {
David Neto22f144c2017-06-12 14:26:21 -0400820 FindConstant(Constant::getNullValue(I.getType()));
Kévin Petit7bfb8992019-02-26 13:45:08 +0000821 FindConstant(ConstantInt::get(I.getType(), 1));
David Neto22f144c2017-06-12 14:26:21 -0400822 } else if (I.getOpcode() == Instruction::SExt) {
David Neto22f144c2017-06-12 14:26:21 -0400823 FindConstant(Constant::getNullValue(I.getType()));
Kévin Petit7bfb8992019-02-26 13:45:08 +0000824 FindConstant(ConstantInt::getSigned(I.getType(), -1));
David Neto22f144c2017-06-12 14:26:21 -0400825 } else {
826 FindConstant(ConstantFP::get(Context, APFloat(0.0f)));
827 FindConstant(ConstantFP::get(Context, APFloat(1.0f)));
828 }
829 }
830 } else if (CallInst *Call = dyn_cast<CallInst>(&I)) {
David Neto862b7d82018-06-14 18:48:37 -0400831 StringRef callee_name = Call->getCalledFunction()->getName();
David Neto22f144c2017-06-12 14:26:21 -0400832
833 // Handle image type specially.
SJW173c7e92020-03-16 08:44:47 -0500834 if (IsImageBuiltin(callee_name)) {
David Neto22f144c2017-06-12 14:26:21 -0400835 TypeMapType &OpImageTypeMap = getImageTypeMap();
836 Type *ImageTy =
837 Call->getArgOperand(0)->getType()->getPointerElementType();
838 OpImageTypeMap[ImageTy] = 0;
alan-bakerabd82722019-12-03 17:14:51 -0500839 getImageTypeList().insert(ImageTy);
alan-baker75090e42020-02-20 11:21:04 -0500840 }
David Neto22f144c2017-06-12 14:26:21 -0400841
SJW173c7e92020-03-16 08:44:47 -0500842 if (IsSampledImageRead(callee_name)) {
alan-bakerf67468c2019-11-25 15:51:49 -0500843 // All sampled reads need a floating point 0 for the Lod operand.
David Neto22f144c2017-06-12 14:26:21 -0400844 FindConstant(ConstantFP::get(Context, APFloat(0.0f)));
845 }
David Neto5c22a252018-03-15 16:07:41 -0400846
SJW173c7e92020-03-16 08:44:47 -0500847 if (IsUnsampledImageRead(callee_name)) {
alan-baker75090e42020-02-20 11:21:04 -0500848 // All unsampled reads need an integer 0 for the Lod operand.
849 FindConstant(ConstantInt::get(Context, APInt(32, 0)));
850 }
851
SJW173c7e92020-03-16 08:44:47 -0500852 if (IsImageQuery(callee_name)) {
alan-bakerce179f12019-12-06 19:02:22 -0500853 Type *ImageTy = Call->getOperand(0)->getType();
854 const uint32_t dim = ImageDimensionality(ImageTy);
alan-baker7150a1d2020-02-25 08:31:06 -0500855 uint32_t components =
856 dim + (clspv::IsArrayImageType(ImageTy) ? 1 : 0);
alan-bakerce179f12019-12-06 19:02:22 -0500857 if (components > 1) {
858 // OpImageQuerySize* return |components| components.
859 FindType(VectorType::get(Type::getInt32Ty(Context), components));
860 if (dim == 3 && IsGetImageDim(callee_name)) {
861 // get_image_dim for 3D images returns an int4.
862 FindType(
863 VectorType::get(Type::getInt32Ty(Context), components + 1));
864 }
865 }
866
SJW173c7e92020-03-16 08:44:47 -0500867 if (IsSampledImageType(ImageTy)) {
alan-bakerce179f12019-12-06 19:02:22 -0500868 // All sampled image queries need a integer 0 for the Lod
869 // operand.
870 FindConstant(ConstantInt::get(Context, APInt(32, 0)));
871 }
David Neto5c22a252018-03-15 16:07:41 -0400872 }
David Neto22f144c2017-06-12 14:26:21 -0400873 }
874 }
875 }
876
Kévin Petitabef4522019-03-27 13:08:01 +0000877 // More things to do on kernel functions
878 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
879 if (const MDNode *MD =
880 dyn_cast<Function>(&F)->getMetadata("reqd_work_group_size")) {
881 // We generate constants if the WorkgroupSize builtin is being used.
882 if (HasWorkGroupBuiltin) {
883 // Collect constant information for work group size.
884 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(0)));
885 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(1)));
886 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(2)));
David Neto22f144c2017-06-12 14:26:21 -0400887 }
888 }
889 }
890
alan-bakerf67468c2019-11-25 15:51:49 -0500891 // TODO(alan-baker): make this better.
alan-bakerf906d2b2019-12-10 11:26:23 -0500892 if (M.getTypeByName("opencl.image1d_ro_t.float") ||
893 M.getTypeByName("opencl.image1d_ro_t.float.sampled") ||
894 M.getTypeByName("opencl.image1d_wo_t.float") ||
895 M.getTypeByName("opencl.image2d_ro_t.float") ||
alan-bakerf67468c2019-11-25 15:51:49 -0500896 M.getTypeByName("opencl.image2d_ro_t.float.sampled") ||
897 M.getTypeByName("opencl.image2d_wo_t.float") ||
898 M.getTypeByName("opencl.image3d_ro_t.float") ||
899 M.getTypeByName("opencl.image3d_ro_t.float.sampled") ||
alan-baker7150a1d2020-02-25 08:31:06 -0500900 M.getTypeByName("opencl.image3d_wo_t.float") ||
901 M.getTypeByName("opencl.image1d_array_ro_t.float") ||
902 M.getTypeByName("opencl.image1d_array_ro_t.float.sampled") ||
903 M.getTypeByName("opencl.image1d_array_wo_t.float") ||
904 M.getTypeByName("opencl.image2d_array_ro_t.float") ||
905 M.getTypeByName("opencl.image2d_array_ro_t.float.sampled") ||
906 M.getTypeByName("opencl.image2d_array_wo_t.float")) {
alan-bakerf67468c2019-11-25 15:51:49 -0500907 FindType(Type::getFloatTy(Context));
alan-bakerf906d2b2019-12-10 11:26:23 -0500908 } else if (M.getTypeByName("opencl.image1d_ro_t.uint") ||
909 M.getTypeByName("opencl.image1d_ro_t.uint.sampled") ||
910 M.getTypeByName("opencl.image1d_wo_t.uint") ||
911 M.getTypeByName("opencl.image2d_ro_t.uint") ||
alan-bakerf67468c2019-11-25 15:51:49 -0500912 M.getTypeByName("opencl.image2d_ro_t.uint.sampled") ||
913 M.getTypeByName("opencl.image2d_wo_t.uint") ||
914 M.getTypeByName("opencl.image3d_ro_t.uint") ||
915 M.getTypeByName("opencl.image3d_ro_t.uint.sampled") ||
alan-baker7150a1d2020-02-25 08:31:06 -0500916 M.getTypeByName("opencl.image3d_wo_t.uint") ||
917 M.getTypeByName("opencl.image1d_array_ro_t.uint") ||
918 M.getTypeByName("opencl.image1d_array_ro_t.uint.sampled") ||
919 M.getTypeByName("opencl.image1d_array_wo_t.uint") ||
920 M.getTypeByName("opencl.image2d_array_ro_t.uint") ||
921 M.getTypeByName("opencl.image2d_array_ro_t.uint.sampled") ||
922 M.getTypeByName("opencl.image2d_array_wo_t.uint")) {
alan-bakerf67468c2019-11-25 15:51:49 -0500923 FindType(Type::getInt32Ty(Context));
alan-bakerf906d2b2019-12-10 11:26:23 -0500924 } else if (M.getTypeByName("opencl.image1d_ro_t.int") ||
925 M.getTypeByName("opencl.image1d_ro_t.int.sampled") ||
926 M.getTypeByName("opencl.image1d_wo_t.int") ||
927 M.getTypeByName("opencl.image2d_ro_t.int") ||
alan-bakerf67468c2019-11-25 15:51:49 -0500928 M.getTypeByName("opencl.image2d_ro_t.int.sampled") ||
929 M.getTypeByName("opencl.image2d_wo_t.int") ||
930 M.getTypeByName("opencl.image3d_ro_t.int") ||
931 M.getTypeByName("opencl.image3d_ro_t.int.sampled") ||
alan-baker7150a1d2020-02-25 08:31:06 -0500932 M.getTypeByName("opencl.image3d_wo_t.int") ||
933 M.getTypeByName("opencl.image1d_array_ro_t.int") ||
934 M.getTypeByName("opencl.image1d_array_ro_t.int.sampled") ||
935 M.getTypeByName("opencl.image1d_array_wo_t.int") ||
936 M.getTypeByName("opencl.image2d_array_ro_t.int") ||
937 M.getTypeByName("opencl.image2d_array_ro_t.int.sampled") ||
938 M.getTypeByName("opencl.image2d_array_wo_t.int")) {
alan-bakerf67468c2019-11-25 15:51:49 -0500939 // Nothing for now...
940 } else {
941 // This was likely an UndefValue.
David Neto22f144c2017-06-12 14:26:21 -0400942 FindType(Type::getFloatTy(Context));
943 }
944
945 // Collect types' information from function.
946 FindTypePerFunc(F);
947
948 // Collect constant information from function.
949 FindConstantPerFunc(F);
950 }
951}
952
David Neto862b7d82018-06-14 18:48:37 -0400953void SPIRVProducerPass::FindGlobalConstVars(Module &M, const DataLayout &DL) {
alan-baker56f7aff2019-05-22 08:06:42 -0400954 clspv::NormalizeGlobalVariables(M);
955
David Neto862b7d82018-06-14 18:48:37 -0400956 SmallVector<GlobalVariable *, 8> GVList;
957 SmallVector<GlobalVariable *, 8> DeadGVList;
958 for (GlobalVariable &GV : M.globals()) {
959 if (GV.getType()->getAddressSpace() == AddressSpace::Constant) {
960 if (GV.use_empty()) {
961 DeadGVList.push_back(&GV);
962 } else {
963 GVList.push_back(&GV);
964 }
965 }
966 }
967
968 // Remove dead global __constant variables.
969 for (auto GV : DeadGVList) {
970 GV->eraseFromParent();
971 }
972 DeadGVList.clear();
973
974 if (clspv::Option::ModuleConstantsInStorageBuffer()) {
975 // For now, we only support a single storage buffer.
976 if (GVList.size() > 0) {
977 assert(GVList.size() == 1);
978 const auto *GV = GVList[0];
979 const auto constants_byte_size =
Alan Bakerfcda9482018-10-02 17:09:59 -0400980 (GetTypeSizeInBits(GV->getInitializer()->getType(), DL)) / 8;
David Neto862b7d82018-06-14 18:48:37 -0400981 const size_t kConstantMaxSize = 65536;
982 if (constants_byte_size > kConstantMaxSize) {
983 outs() << "Max __constant capacity of " << kConstantMaxSize
984 << " bytes exceeded: " << constants_byte_size << " bytes used\n";
985 llvm_unreachable("Max __constant capacity exceeded");
986 }
987 }
988 } else {
989 // Change global constant variable's address space to ModuleScopePrivate.
990 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
991 for (auto GV : GVList) {
992 // Create new gv with ModuleScopePrivate address space.
993 Type *NewGVTy = GV->getType()->getPointerElementType();
994 GlobalVariable *NewGV = new GlobalVariable(
995 M, NewGVTy, false, GV->getLinkage(), GV->getInitializer(), "",
996 nullptr, GV->getThreadLocalMode(), AddressSpace::ModuleScopePrivate);
997 NewGV->takeName(GV);
998
999 const SmallVector<User *, 8> GVUsers(GV->user_begin(), GV->user_end());
1000 SmallVector<User *, 8> CandidateUsers;
1001
1002 auto record_called_function_type_as_user =
1003 [&GlobalConstFuncTyMap](Value *gv, CallInst *call) {
1004 // Find argument index.
1005 unsigned index = 0;
1006 for (unsigned i = 0; i < call->getNumArgOperands(); i++) {
1007 if (gv == call->getOperand(i)) {
1008 // TODO(dneto): Should we break here?
1009 index = i;
1010 }
1011 }
1012
1013 // Record function type with global constant.
1014 GlobalConstFuncTyMap[call->getFunctionType()] =
1015 std::make_pair(call->getFunctionType(), index);
1016 };
1017
1018 for (User *GVU : GVUsers) {
1019 if (CallInst *Call = dyn_cast<CallInst>(GVU)) {
1020 record_called_function_type_as_user(GV, Call);
1021 } else if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(GVU)) {
1022 // Check GEP users.
1023 for (User *GEPU : GEP->users()) {
1024 if (CallInst *GEPCall = dyn_cast<CallInst>(GEPU)) {
1025 record_called_function_type_as_user(GEP, GEPCall);
1026 }
1027 }
1028 }
1029
1030 CandidateUsers.push_back(GVU);
1031 }
1032
1033 for (User *U : CandidateUsers) {
1034 // Update users of gv with new gv.
alan-bakered80f572019-02-11 17:28:26 -05001035 if (!isa<Constant>(U)) {
1036 // #254: Can't change operands of a constant, but this shouldn't be
1037 // something that sticks around in the module.
1038 U->replaceUsesOfWith(GV, NewGV);
1039 }
David Neto862b7d82018-06-14 18:48:37 -04001040 }
1041
1042 // Delete original gv.
1043 GV->eraseFromParent();
1044 }
1045 }
1046}
1047
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001048void SPIRVProducerPass::FindResourceVars(Module &M, const DataLayout &) {
David Neto862b7d82018-06-14 18:48:37 -04001049 ResourceVarInfoList.clear();
1050 FunctionToResourceVarsMap.clear();
1051 ModuleOrderedResourceVars.reset();
1052 // Normally, there is one resource variable per clspv.resource.var.*
1053 // function, since that is unique'd by arg type and index. By design,
1054 // we can share these resource variables across kernels because all
1055 // kernels use the same descriptor set.
1056 //
1057 // But if the user requested distinct descriptor sets per kernel, then
1058 // the descriptor allocator has made different (set,binding) pairs for
1059 // the same (type,arg_index) pair. Since we can decorate a resource
1060 // variable with only exactly one DescriptorSet and Binding, we are
1061 // forced in this case to make distinct resource variables whenever
Kévin Petitbbbda972020-03-03 19:16:31 +00001062 // the same clspv.resource.var.X function is seen with disintct
David Neto862b7d82018-06-14 18:48:37 -04001063 // (set,binding) values.
1064 const bool always_distinct_sets =
1065 clspv::Option::DistinctKernelDescriptorSets();
1066 for (Function &F : M) {
1067 // Rely on the fact the resource var functions have a stable ordering
1068 // in the module.
Alan Baker202c8c72018-08-13 13:47:44 -04001069 if (F.getName().startswith(clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04001070 // Find all calls to this function with distinct set and binding pairs.
1071 // Save them in ResourceVarInfoList.
1072
1073 // Determine uniqueness of the (set,binding) pairs only withing this
1074 // one resource-var builtin function.
1075 using SetAndBinding = std::pair<unsigned, unsigned>;
1076 // Maps set and binding to the resource var info.
1077 DenseMap<SetAndBinding, ResourceVarInfo *> set_and_binding_map;
1078 bool first_use = true;
1079 for (auto &U : F.uses()) {
1080 if (auto *call = dyn_cast<CallInst>(U.getUser())) {
1081 const auto set = unsigned(
1082 dyn_cast<ConstantInt>(call->getArgOperand(0))->getZExtValue());
1083 const auto binding = unsigned(
1084 dyn_cast<ConstantInt>(call->getArgOperand(1))->getZExtValue());
1085 const auto arg_kind = clspv::ArgKind(
1086 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
1087 const auto arg_index = unsigned(
1088 dyn_cast<ConstantInt>(call->getArgOperand(3))->getZExtValue());
alan-bakere9308012019-03-15 10:25:13 -04001089 const auto coherent = unsigned(
1090 dyn_cast<ConstantInt>(call->getArgOperand(5))->getZExtValue());
David Neto862b7d82018-06-14 18:48:37 -04001091
1092 // Find or make the resource var info for this combination.
1093 ResourceVarInfo *rv = nullptr;
1094 if (always_distinct_sets) {
1095 // Make a new resource var any time we see a different
1096 // (set,binding) pair.
1097 SetAndBinding key{set, binding};
1098 auto where = set_and_binding_map.find(key);
1099 if (where == set_and_binding_map.end()) {
1100 rv = new ResourceVarInfo(int(ResourceVarInfoList.size()), set,
alan-bakere9308012019-03-15 10:25:13 -04001101 binding, &F, arg_kind, coherent);
David Neto862b7d82018-06-14 18:48:37 -04001102 ResourceVarInfoList.emplace_back(rv);
1103 set_and_binding_map[key] = rv;
1104 } else {
1105 rv = where->second;
1106 }
1107 } else {
1108 // The default is to make exactly one resource for each
1109 // clspv.resource.var.* function.
1110 if (first_use) {
1111 first_use = false;
1112 rv = new ResourceVarInfo(int(ResourceVarInfoList.size()), set,
alan-bakere9308012019-03-15 10:25:13 -04001113 binding, &F, arg_kind, coherent);
David Neto862b7d82018-06-14 18:48:37 -04001114 ResourceVarInfoList.emplace_back(rv);
1115 } else {
1116 rv = ResourceVarInfoList.back().get();
1117 }
1118 }
1119
1120 // Now populate FunctionToResourceVarsMap.
1121 auto &mapping =
1122 FunctionToResourceVarsMap[call->getParent()->getParent()];
1123 while (mapping.size() <= arg_index) {
1124 mapping.push_back(nullptr);
1125 }
1126 mapping[arg_index] = rv;
1127 }
1128 }
1129 }
1130 }
1131
1132 // Populate ModuleOrderedResourceVars.
1133 for (Function &F : M) {
1134 auto where = FunctionToResourceVarsMap.find(&F);
1135 if (where != FunctionToResourceVarsMap.end()) {
1136 for (auto &rv : where->second) {
1137 if (rv != nullptr) {
1138 ModuleOrderedResourceVars.insert(rv);
1139 }
1140 }
1141 }
1142 }
1143 if (ShowResourceVars) {
1144 for (auto *info : ModuleOrderedResourceVars) {
1145 outs() << "MORV index " << info->index << " (" << info->descriptor_set
1146 << "," << info->binding << ") " << *(info->var_fn->getReturnType())
1147 << "\n";
1148 }
1149 }
1150}
1151
David Neto22f144c2017-06-12 14:26:21 -04001152bool SPIRVProducerPass::FindExtInst(Module &M) {
1153 LLVMContext &Context = M.getContext();
1154 bool HasExtInst = false;
1155
1156 for (Function &F : M) {
1157 for (BasicBlock &BB : F) {
1158 for (Instruction &I : BB) {
1159 if (CallInst *Call = dyn_cast<CallInst>(&I)) {
1160 Function *Callee = Call->getCalledFunction();
1161 // Check whether this call is for extend instructions.
David Neto3fbb4072017-10-16 11:28:14 -04001162 auto callee_name = Callee->getName();
1163 const glsl::ExtInst EInst = getExtInstEnum(callee_name);
1164 const glsl::ExtInst IndirectEInst =
1165 getIndirectExtInstEnum(callee_name);
David Neto22f144c2017-06-12 14:26:21 -04001166
David Neto3fbb4072017-10-16 11:28:14 -04001167 HasExtInst |=
1168 (EInst != kGlslExtInstBad) || (IndirectEInst != kGlslExtInstBad);
1169
1170 if (IndirectEInst) {
1171 // Register extra constants if needed.
1172
1173 // Registers a type and constant for computing the result of the
1174 // given instruction. If the result of the instruction is a vector,
1175 // then make a splat vector constant with the same number of
1176 // elements.
1177 auto register_constant = [this, &I](Constant *constant) {
1178 FindType(constant->getType());
1179 FindConstant(constant);
1180 if (auto *vectorTy = dyn_cast<VectorType>(I.getType())) {
1181 // Register the splat vector of the value with the same
1182 // width as the result of the instruction.
1183 auto *vec_constant = ConstantVector::getSplat(
alan-baker7261e062020-03-15 14:35:48 -04001184 {static_cast<unsigned>(vectorTy->getNumElements()), false},
David Neto3fbb4072017-10-16 11:28:14 -04001185 constant);
1186 FindConstant(vec_constant);
1187 FindType(vec_constant->getType());
1188 }
1189 };
1190 switch (IndirectEInst) {
1191 case glsl::ExtInstFindUMsb:
1192 // clz needs OpExtInst and OpISub with constant 31, or splat
1193 // vector of 31. Add it to the constant list here.
1194 register_constant(
1195 ConstantInt::get(Type::getInt32Ty(Context), 31));
1196 break;
1197 case glsl::ExtInstAcos:
1198 case glsl::ExtInstAsin:
Kévin Petiteb9f90a2018-09-29 12:29:34 +01001199 case glsl::ExtInstAtan:
David Neto3fbb4072017-10-16 11:28:14 -04001200 case glsl::ExtInstAtan2:
1201 // We need 1/pi for acospi, asinpi, atan2pi.
1202 register_constant(
1203 ConstantFP::get(Type::getFloatTy(Context), kOneOverPi));
1204 break;
1205 default:
1206 assert(false && "internally inconsistent");
1207 }
David Neto22f144c2017-06-12 14:26:21 -04001208 }
1209 }
1210 }
1211 }
1212 }
1213
1214 return HasExtInst;
1215}
1216
1217void SPIRVProducerPass::FindTypePerGlobalVar(GlobalVariable &GV) {
1218 // Investigate global variable's type.
1219 FindType(GV.getType());
1220}
1221
1222void SPIRVProducerPass::FindTypePerFunc(Function &F) {
1223 // Investigate function's type.
1224 FunctionType *FTy = F.getFunctionType();
1225
1226 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
1227 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
David Neto9ed8e2f2018-03-24 06:47:24 -07001228 // Handle a regular function with global constant parameters.
David Neto22f144c2017-06-12 14:26:21 -04001229 if (GlobalConstFuncTyMap.count(FTy)) {
1230 uint32_t GVCstArgIdx = GlobalConstFuncTypeMap[FTy].second;
1231 SmallVector<Type *, 4> NewFuncParamTys;
1232 for (unsigned i = 0; i < FTy->getNumParams(); i++) {
1233 Type *ParamTy = FTy->getParamType(i);
1234 if (i == GVCstArgIdx) {
1235 Type *EleTy = ParamTy->getPointerElementType();
1236 ParamTy = PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
1237 }
1238
1239 NewFuncParamTys.push_back(ParamTy);
1240 }
1241
1242 FunctionType *NewFTy =
1243 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
1244 GlobalConstFuncTyMap[FTy] = std::make_pair(NewFTy, GVCstArgIdx);
1245 FTy = NewFTy;
1246 }
1247
1248 FindType(FTy);
1249 } else {
1250 // As kernel functions do not have parameters, create new function type and
1251 // add it to type map.
1252 SmallVector<Type *, 4> NewFuncParamTys;
1253 FunctionType *NewFTy =
1254 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
1255 FindType(NewFTy);
1256 }
1257
1258 // Investigate instructions' type in function body.
1259 for (BasicBlock &BB : F) {
1260 for (Instruction &I : BB) {
1261 if (isa<ShuffleVectorInst>(I)) {
1262 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1263 // Ignore type for mask of shuffle vector instruction.
1264 if (i == 2) {
1265 continue;
1266 }
1267
1268 Value *Op = I.getOperand(i);
1269 if (!isa<MetadataAsValue>(Op)) {
1270 FindType(Op->getType());
1271 }
1272 }
1273
1274 FindType(I.getType());
1275 continue;
1276 }
1277
David Neto862b7d82018-06-14 18:48:37 -04001278 CallInst *Call = dyn_cast<CallInst>(&I);
1279
1280 if (Call && Call->getCalledFunction()->getName().startswith(
Alan Baker202c8c72018-08-13 13:47:44 -04001281 clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04001282 // This is a fake call representing access to a resource variable.
1283 // We handle that elsewhere.
1284 continue;
1285 }
1286
Alan Baker202c8c72018-08-13 13:47:44 -04001287 if (Call && Call->getCalledFunction()->getName().startswith(
1288 clspv::WorkgroupAccessorFunction())) {
1289 // This is a fake call representing access to a workgroup variable.
1290 // We handle that elsewhere.
1291 continue;
1292 }
1293
alan-bakerf083bed2020-01-29 08:15:42 -05001294 // #497: InsertValue and ExtractValue map to OpCompositeInsert and
1295 // OpCompositeExtract which takes literal values for indices. As a result
1296 // don't map the type of indices.
1297 if (I.getOpcode() == Instruction::ExtractValue) {
1298 FindType(I.getOperand(0)->getType());
1299 continue;
1300 }
1301 if (I.getOpcode() == Instruction::InsertValue) {
1302 FindType(I.getOperand(0)->getType());
1303 FindType(I.getOperand(1)->getType());
1304 continue;
1305 }
1306
1307 // #497: InsertElement and ExtractElement map to OpCompositeExtract if
1308 // the index is a constant. In such a case don't map the index type.
1309 if (I.getOpcode() == Instruction::ExtractElement) {
1310 FindType(I.getOperand(0)->getType());
1311 Value *op1 = I.getOperand(1);
1312 if (!isa<Constant>(op1) || isa<GlobalValue>(op1)) {
1313 FindType(op1->getType());
1314 }
1315 continue;
1316 }
1317 if (I.getOpcode() == Instruction::InsertElement) {
1318 FindType(I.getOperand(0)->getType());
1319 FindType(I.getOperand(1)->getType());
1320 Value *op2 = I.getOperand(2);
1321 if (!isa<Constant>(op2) || isa<GlobalValue>(op2)) {
1322 FindType(op2->getType());
1323 }
1324 continue;
1325 }
1326
David Neto22f144c2017-06-12 14:26:21 -04001327 // Work through the operands of the instruction.
1328 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1329 Value *const Op = I.getOperand(i);
1330 // If any of the operands is a constant, find the type!
1331 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1332 FindType(Op->getType());
1333 }
1334 }
1335
1336 for (Use &Op : I.operands()) {
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001337 if (isa<CallInst>(&I)) {
David Neto22f144c2017-06-12 14:26:21 -04001338 // Avoid to check call instruction's type.
1339 break;
1340 }
Alan Baker202c8c72018-08-13 13:47:44 -04001341 if (CallInst *OpCall = dyn_cast<CallInst>(Op)) {
1342 if (OpCall && OpCall->getCalledFunction()->getName().startswith(
1343 clspv::WorkgroupAccessorFunction())) {
1344 // This is a fake call representing access to a workgroup variable.
1345 // We handle that elsewhere.
1346 continue;
1347 }
1348 }
David Neto22f144c2017-06-12 14:26:21 -04001349 if (!isa<MetadataAsValue>(&Op)) {
1350 FindType(Op->getType());
1351 continue;
1352 }
1353 }
1354
David Neto22f144c2017-06-12 14:26:21 -04001355 // We don't want to track the type of this call as we are going to replace
1356 // it.
Kévin Petitdf71de32019-04-09 14:09:50 +01001357 if (Call && (clspv::LiteralSamplerFunction() ==
David Neto22f144c2017-06-12 14:26:21 -04001358 Call->getCalledFunction()->getName())) {
1359 continue;
1360 }
1361
1362 if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(&I)) {
1363 // If gep's base operand has ModuleScopePrivate address space, make gep
1364 // return ModuleScopePrivate address space.
1365 if (GEP->getPointerAddressSpace() == AddressSpace::ModuleScopePrivate) {
1366 // Add pointer type with private address space for global constant to
1367 // type list.
1368 Type *EleTy = I.getType()->getPointerElementType();
1369 Type *NewPTy =
1370 PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
1371
1372 FindType(NewPTy);
1373 continue;
1374 }
1375 }
1376
1377 FindType(I.getType());
1378 }
1379 }
1380}
1381
David Neto862b7d82018-06-14 18:48:37 -04001382void SPIRVProducerPass::FindTypesForSamplerMap(Module &M) {
1383 // If we are using a sampler map, find the type of the sampler.
Kévin Petitdf71de32019-04-09 14:09:50 +01001384 if (M.getFunction(clspv::LiteralSamplerFunction()) ||
David Neto862b7d82018-06-14 18:48:37 -04001385 0 < getSamplerMap().size()) {
1386 auto SamplerStructTy = M.getTypeByName("opencl.sampler_t");
1387 if (!SamplerStructTy) {
1388 SamplerStructTy = StructType::create(M.getContext(), "opencl.sampler_t");
1389 }
1390
1391 SamplerTy = SamplerStructTy->getPointerTo(AddressSpace::UniformConstant);
1392
1393 FindType(SamplerTy);
1394 }
1395}
1396
1397void SPIRVProducerPass::FindTypesForResourceVars(Module &M) {
1398 // Record types so they are generated.
1399 TypesNeedingLayout.reset();
1400 StructTypesNeedingBlock.reset();
1401
1402 // To match older clspv codegen, generate the float type first if required
1403 // for images.
1404 for (const auto *info : ModuleOrderedResourceVars) {
1405 if (info->arg_kind == clspv::ArgKind::ReadOnlyImage ||
1406 info->arg_kind == clspv::ArgKind::WriteOnlyImage) {
alan-bakerf67468c2019-11-25 15:51:49 -05001407 if (IsIntImageType(info->var_fn->getReturnType())) {
1408 // Nothing for now...
1409 } else if (IsUintImageType(info->var_fn->getReturnType())) {
1410 FindType(Type::getInt32Ty(M.getContext()));
1411 }
1412
1413 // We need "float" either for the sampled type or for the Lod operand.
David Neto862b7d82018-06-14 18:48:37 -04001414 FindType(Type::getFloatTy(M.getContext()));
David Neto862b7d82018-06-14 18:48:37 -04001415 }
1416 }
1417
1418 for (const auto *info : ModuleOrderedResourceVars) {
1419 Type *type = info->var_fn->getReturnType();
1420
1421 switch (info->arg_kind) {
1422 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04001423 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04001424 if (auto *sty = dyn_cast<StructType>(type->getPointerElementType())) {
1425 StructTypesNeedingBlock.insert(sty);
1426 } else {
1427 errs() << *type << "\n";
1428 llvm_unreachable("Buffer arguments must map to structures!");
1429 }
1430 break;
1431 case clspv::ArgKind::Pod:
1432 if (auto *sty = dyn_cast<StructType>(type->getPointerElementType())) {
1433 StructTypesNeedingBlock.insert(sty);
1434 } else {
1435 errs() << *type << "\n";
1436 llvm_unreachable("POD arguments must map to structures!");
1437 }
1438 break;
1439 case clspv::ArgKind::ReadOnlyImage:
1440 case clspv::ArgKind::WriteOnlyImage:
1441 case clspv::ArgKind::Sampler:
1442 // Sampler and image types map to the pointee type but
1443 // in the uniform constant address space.
1444 type = PointerType::get(type->getPointerElementType(),
1445 clspv::AddressSpace::UniformConstant);
1446 break;
1447 default:
1448 break;
1449 }
1450
1451 // The converted type is the type of the OpVariable we will generate.
1452 // If the pointee type is an array of size zero, FindType will convert it
1453 // to a runtime array.
1454 FindType(type);
1455 }
1456
alan-bakerdcd97412019-09-16 15:32:30 -04001457 // If module constants are clustered in a storage buffer then that struct
1458 // needs layout decorations.
1459 if (clspv::Option::ModuleConstantsInStorageBuffer()) {
1460 for (GlobalVariable &GV : M.globals()) {
1461 PointerType *PTy = cast<PointerType>(GV.getType());
1462 const auto AS = PTy->getAddressSpace();
1463 const bool module_scope_constant_external_init =
1464 (AS == AddressSpace::Constant) && GV.hasInitializer();
1465 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
1466 if (module_scope_constant_external_init &&
1467 spv::BuiltInMax == BuiltinType) {
1468 StructTypesNeedingBlock.insert(
1469 cast<StructType>(PTy->getPointerElementType()));
1470 }
1471 }
1472 }
1473
Kévin Petitbbbda972020-03-03 19:16:31 +00001474 for (const GlobalVariable &GV : M.globals()) {
1475 if (GV.getAddressSpace() == clspv::AddressSpace::PushConstant) {
1476 auto Ty = cast<PointerType>(GV.getType())->getPointerElementType();
1477 assert(Ty->isStructTy() && "Push constants have to be structures.");
1478 auto STy = cast<StructType>(Ty);
1479 StructTypesNeedingBlock.insert(STy);
1480 }
1481 }
1482
David Neto862b7d82018-06-14 18:48:37 -04001483 // Traverse the arrays and structures underneath each Block, and
1484 // mark them as needing layout.
1485 std::vector<Type *> work_list(StructTypesNeedingBlock.begin(),
1486 StructTypesNeedingBlock.end());
1487 while (!work_list.empty()) {
1488 Type *type = work_list.back();
1489 work_list.pop_back();
1490 TypesNeedingLayout.insert(type);
1491 switch (type->getTypeID()) {
1492 case Type::ArrayTyID:
1493 work_list.push_back(type->getArrayElementType());
1494 if (!Hack_generate_runtime_array_stride_early) {
1495 // Remember this array type for deferred decoration.
1496 TypesNeedingArrayStride.insert(type);
1497 }
1498 break;
1499 case Type::StructTyID:
1500 for (auto *elem_ty : cast<StructType>(type)->elements()) {
1501 work_list.push_back(elem_ty);
1502 }
1503 default:
1504 // This type and its contained types don't get layout.
1505 break;
1506 }
1507 }
1508}
1509
Alan Baker202c8c72018-08-13 13:47:44 -04001510void SPIRVProducerPass::FindWorkgroupVars(Module &M) {
1511 // The SpecId assignment for pointer-to-local arguments is recorded in
1512 // module-level metadata. Translate that information into local argument
1513 // information.
1514 NamedMDNode *nmd = M.getNamedMetadata(clspv::LocalSpecIdMetadataName());
alan-bakerb6b09dc2018-11-08 16:59:28 -05001515 if (!nmd)
1516 return;
Alan Baker202c8c72018-08-13 13:47:44 -04001517 for (auto operand : nmd->operands()) {
1518 MDTuple *tuple = cast<MDTuple>(operand);
1519 ValueAsMetadata *fn_md = cast<ValueAsMetadata>(tuple->getOperand(0));
1520 Function *func = cast<Function>(fn_md->getValue());
alan-bakerb6b09dc2018-11-08 16:59:28 -05001521 ConstantAsMetadata *arg_index_md =
1522 cast<ConstantAsMetadata>(tuple->getOperand(1));
1523 int arg_index = static_cast<int>(
1524 cast<ConstantInt>(arg_index_md->getValue())->getSExtValue());
1525 Argument *arg = &*(func->arg_begin() + arg_index);
Alan Baker202c8c72018-08-13 13:47:44 -04001526
1527 ConstantAsMetadata *spec_id_md =
1528 cast<ConstantAsMetadata>(tuple->getOperand(2));
alan-bakerb6b09dc2018-11-08 16:59:28 -05001529 int spec_id = static_cast<int>(
1530 cast<ConstantInt>(spec_id_md->getValue())->getSExtValue());
Alan Baker202c8c72018-08-13 13:47:44 -04001531
1532 max_local_spec_id_ = std::max(max_local_spec_id_, spec_id + 1);
1533 LocalArgSpecIds[arg] = spec_id;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001534 if (LocalSpecIdInfoMap.count(spec_id))
1535 continue;
Alan Baker202c8c72018-08-13 13:47:44 -04001536
1537 // We haven't seen this SpecId yet, so generate the LocalArgInfo for it.
1538 LocalArgInfo info{nextID, arg->getType()->getPointerElementType(),
1539 nextID + 1, nextID + 2,
1540 nextID + 3, spec_id};
1541 LocalSpecIdInfoMap[spec_id] = info;
1542 nextID += 4;
1543
1544 // Ensure the types necessary for this argument get generated.
1545 Type *IdxTy = Type::getInt32Ty(M.getContext());
1546 FindConstant(ConstantInt::get(IdxTy, 0));
1547 FindType(IdxTy);
1548 FindType(arg->getType());
1549 }
1550}
1551
David Neto22f144c2017-06-12 14:26:21 -04001552void SPIRVProducerPass::FindType(Type *Ty) {
1553 TypeList &TyList = getTypeList();
1554
1555 if (0 != TyList.idFor(Ty)) {
1556 return;
1557 }
1558
1559 if (Ty->isPointerTy()) {
1560 auto AddrSpace = Ty->getPointerAddressSpace();
1561 if ((AddressSpace::Constant == AddrSpace) ||
1562 (AddressSpace::Global == AddrSpace)) {
1563 auto PointeeTy = Ty->getPointerElementType();
1564
1565 if (PointeeTy->isStructTy() &&
1566 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
1567 FindType(PointeeTy);
1568 auto ActualPointerTy =
1569 PointeeTy->getPointerTo(AddressSpace::UniformConstant);
1570 FindType(ActualPointerTy);
1571 return;
1572 }
1573 }
1574 }
1575
David Neto862b7d82018-06-14 18:48:37 -04001576 // By convention, LLVM array type with 0 elements will map to
1577 // OpTypeRuntimeArray. Otherwise, it will map to OpTypeArray, which
1578 // has a constant number of elements. We need to support type of the
1579 // constant.
1580 if (auto *arrayTy = dyn_cast<ArrayType>(Ty)) {
1581 if (arrayTy->getNumElements() > 0) {
1582 LLVMContext &Context = Ty->getContext();
1583 FindType(Type::getInt32Ty(Context));
1584 }
David Neto22f144c2017-06-12 14:26:21 -04001585 }
1586
1587 for (Type *SubTy : Ty->subtypes()) {
1588 FindType(SubTy);
1589 }
1590
1591 TyList.insert(Ty);
1592}
1593
1594void SPIRVProducerPass::FindConstantPerGlobalVar(GlobalVariable &GV) {
1595 // If the global variable has a (non undef) initializer.
1596 if (GV.hasInitializer() && !isa<UndefValue>(GV.getInitializer())) {
David Neto862b7d82018-06-14 18:48:37 -04001597 // Generate the constant if it's not the initializer to a module scope
1598 // constant that we will expect in a storage buffer.
1599 const bool module_scope_constant_external_init =
1600 (GV.getType()->getPointerAddressSpace() == AddressSpace::Constant) &&
1601 clspv::Option::ModuleConstantsInStorageBuffer();
1602 if (!module_scope_constant_external_init) {
1603 FindConstant(GV.getInitializer());
1604 }
David Neto22f144c2017-06-12 14:26:21 -04001605 }
1606}
1607
1608void SPIRVProducerPass::FindConstantPerFunc(Function &F) {
1609 // Investigate constants in function body.
1610 for (BasicBlock &BB : F) {
1611 for (Instruction &I : BB) {
David Neto862b7d82018-06-14 18:48:37 -04001612 if (auto *call = dyn_cast<CallInst>(&I)) {
1613 auto name = call->getCalledFunction()->getName();
Kévin Petitdf71de32019-04-09 14:09:50 +01001614 if (name == clspv::LiteralSamplerFunction()) {
David Neto862b7d82018-06-14 18:48:37 -04001615 // We've handled these constants elsewhere, so skip it.
1616 continue;
1617 }
Alan Baker202c8c72018-08-13 13:47:44 -04001618 if (name.startswith(clspv::ResourceAccessorFunction())) {
1619 continue;
1620 }
1621 if (name.startswith(clspv::WorkgroupAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04001622 continue;
1623 }
Kévin Petit617a76d2019-04-04 13:54:16 +01001624 if (name.startswith(clspv::SPIRVOpIntrinsicFunction())) {
1625 // Skip the first operand that has the SPIR-V Opcode
1626 for (unsigned i = 1; i < I.getNumOperands(); i++) {
1627 if (isa<Constant>(I.getOperand(i)) &&
1628 !isa<GlobalValue>(I.getOperand(i))) {
1629 FindConstant(I.getOperand(i));
1630 }
1631 }
1632 continue;
1633 }
David Neto22f144c2017-06-12 14:26:21 -04001634 }
1635
1636 if (isa<AllocaInst>(I)) {
1637 // Alloca instruction has constant for the number of element. Ignore it.
1638 continue;
1639 } else if (isa<ShuffleVectorInst>(I)) {
1640 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1641 // Ignore constant for mask of shuffle vector instruction.
1642 if (i == 2) {
1643 continue;
1644 }
1645
1646 if (isa<Constant>(I.getOperand(i)) &&
1647 !isa<GlobalValue>(I.getOperand(i))) {
1648 FindConstant(I.getOperand(i));
1649 }
1650 }
1651
1652 continue;
1653 } else if (isa<InsertElementInst>(I)) {
1654 // Handle InsertElement with <4 x i8> specially.
1655 Type *CompositeTy = I.getOperand(0)->getType();
1656 if (is4xi8vec(CompositeTy)) {
1657 LLVMContext &Context = CompositeTy->getContext();
1658 if (isa<Constant>(I.getOperand(0))) {
1659 FindConstant(I.getOperand(0));
1660 }
1661
1662 if (isa<Constant>(I.getOperand(1))) {
1663 FindConstant(I.getOperand(1));
1664 }
1665
1666 // Add mask constant 0xFF.
1667 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
1668 FindConstant(CstFF);
1669
1670 // Add shift amount constant.
1671 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
1672 uint64_t Idx = CI->getZExtValue();
1673 Constant *CstShiftAmount =
1674 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
1675 FindConstant(CstShiftAmount);
1676 }
1677
1678 continue;
1679 }
1680
1681 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1682 // Ignore constant for index of InsertElement instruction.
1683 if (i == 2) {
1684 continue;
1685 }
1686
1687 if (isa<Constant>(I.getOperand(i)) &&
1688 !isa<GlobalValue>(I.getOperand(i))) {
1689 FindConstant(I.getOperand(i));
1690 }
1691 }
1692
1693 continue;
1694 } else if (isa<ExtractElementInst>(I)) {
1695 // Handle ExtractElement with <4 x i8> specially.
1696 Type *CompositeTy = I.getOperand(0)->getType();
1697 if (is4xi8vec(CompositeTy)) {
1698 LLVMContext &Context = CompositeTy->getContext();
1699 if (isa<Constant>(I.getOperand(0))) {
1700 FindConstant(I.getOperand(0));
1701 }
1702
1703 // Add mask constant 0xFF.
1704 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
1705 FindConstant(CstFF);
1706
1707 // Add shift amount constant.
1708 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
1709 uint64_t Idx = CI->getZExtValue();
1710 Constant *CstShiftAmount =
1711 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
1712 FindConstant(CstShiftAmount);
1713 } else {
1714 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
1715 FindConstant(Cst8);
1716 }
1717
1718 continue;
1719 }
1720
1721 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1722 // Ignore constant for index of ExtractElement instruction.
1723 if (i == 1) {
1724 continue;
1725 }
1726
1727 if (isa<Constant>(I.getOperand(i)) &&
1728 !isa<GlobalValue>(I.getOperand(i))) {
1729 FindConstant(I.getOperand(i));
1730 }
1731 }
1732
1733 continue;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001734 } else if ((Instruction::Xor == I.getOpcode()) &&
1735 I.getType()->isIntegerTy(1)) {
1736 // We special case for Xor where the type is i1 and one of the arguments
1737 // is a constant 1 (true), this is an OpLogicalNot in SPIR-V, and we
1738 // don't need the constant
David Neto22f144c2017-06-12 14:26:21 -04001739 bool foundConstantTrue = false;
1740 for (Use &Op : I.operands()) {
1741 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1742 auto CI = cast<ConstantInt>(Op);
1743
1744 if (CI->isZero() || foundConstantTrue) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05001745 // If we already found the true constant, we might (probably only
1746 // on -O0) have an OpLogicalNot which is taking a constant
1747 // argument, so discover it anyway.
David Neto22f144c2017-06-12 14:26:21 -04001748 FindConstant(Op);
1749 } else {
1750 foundConstantTrue = true;
1751 }
1752 }
1753 }
1754
1755 continue;
David Netod2de94a2017-08-28 17:27:47 -04001756 } else if (isa<TruncInst>(I)) {
alan-bakerb39c8262019-03-08 14:03:37 -05001757 // Special case if i8 is not generally handled.
1758 if (!clspv::Option::Int8Support()) {
1759 // For truncation to i8 we mask against 255.
1760 Type *ToTy = I.getType();
1761 if (8u == ToTy->getPrimitiveSizeInBits()) {
1762 LLVMContext &Context = ToTy->getContext();
1763 Constant *Cst255 =
1764 ConstantInt::get(Type::getInt32Ty(Context), 0xff);
1765 FindConstant(Cst255);
1766 }
David Netod2de94a2017-08-28 17:27:47 -04001767 }
Neil Henning39672102017-09-29 14:33:13 +01001768 } else if (isa<AtomicRMWInst>(I)) {
1769 LLVMContext &Context = I.getContext();
1770
1771 FindConstant(
1772 ConstantInt::get(Type::getInt32Ty(Context), spv::ScopeDevice));
1773 FindConstant(ConstantInt::get(
1774 Type::getInt32Ty(Context),
1775 spv::MemorySemanticsUniformMemoryMask |
1776 spv::MemorySemanticsSequentiallyConsistentMask));
David Neto22f144c2017-06-12 14:26:21 -04001777 }
1778
1779 for (Use &Op : I.operands()) {
1780 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1781 FindConstant(Op);
1782 }
1783 }
1784 }
1785 }
1786}
1787
1788void SPIRVProducerPass::FindConstant(Value *V) {
David Neto22f144c2017-06-12 14:26:21 -04001789 ValueList &CstList = getConstantList();
1790
David Netofb9a7972017-08-25 17:08:24 -04001791 // If V is already tracked, ignore it.
1792 if (0 != CstList.idFor(V)) {
David Neto22f144c2017-06-12 14:26:21 -04001793 return;
1794 }
1795
David Neto862b7d82018-06-14 18:48:37 -04001796 if (isa<GlobalValue>(V) && clspv::Option::ModuleConstantsInStorageBuffer()) {
1797 return;
1798 }
1799
David Neto22f144c2017-06-12 14:26:21 -04001800 Constant *Cst = cast<Constant>(V);
David Neto862b7d82018-06-14 18:48:37 -04001801 Type *CstTy = Cst->getType();
David Neto22f144c2017-06-12 14:26:21 -04001802
1803 // Handle constant with <4 x i8> type specially.
David Neto22f144c2017-06-12 14:26:21 -04001804 if (is4xi8vec(CstTy)) {
1805 if (!isa<GlobalValue>(V)) {
David Netofb9a7972017-08-25 17:08:24 -04001806 CstList.insert(V);
David Neto22f144c2017-06-12 14:26:21 -04001807 }
1808 }
1809
1810 if (Cst->getNumOperands()) {
1811 for (User::const_op_iterator I = Cst->op_begin(), E = Cst->op_end(); I != E;
1812 ++I) {
1813 FindConstant(*I);
1814 }
1815
David Netofb9a7972017-08-25 17:08:24 -04001816 CstList.insert(Cst);
David Neto22f144c2017-06-12 14:26:21 -04001817 return;
1818 } else if (const ConstantDataSequential *CDS =
1819 dyn_cast<ConstantDataSequential>(Cst)) {
1820 // Add constants for each element to constant list.
1821 for (unsigned i = 0; i < CDS->getNumElements(); i++) {
1822 Constant *EleCst = CDS->getElementAsConstant(i);
1823 FindConstant(EleCst);
1824 }
1825 }
1826
1827 if (!isa<GlobalValue>(V)) {
David Netofb9a7972017-08-25 17:08:24 -04001828 CstList.insert(V);
David Neto22f144c2017-06-12 14:26:21 -04001829 }
1830}
1831
1832spv::StorageClass SPIRVProducerPass::GetStorageClass(unsigned AddrSpace) const {
1833 switch (AddrSpace) {
1834 default:
1835 llvm_unreachable("Unsupported OpenCL address space");
1836 case AddressSpace::Private:
1837 return spv::StorageClassFunction;
1838 case AddressSpace::Global:
David Neto22f144c2017-06-12 14:26:21 -04001839 return spv::StorageClassStorageBuffer;
Alan Bakerfcda9482018-10-02 17:09:59 -04001840 case AddressSpace::Constant:
1841 return clspv::Option::ConstantArgsInUniformBuffer()
1842 ? spv::StorageClassUniform
1843 : spv::StorageClassStorageBuffer;
David Neto22f144c2017-06-12 14:26:21 -04001844 case AddressSpace::Input:
1845 return spv::StorageClassInput;
1846 case AddressSpace::Local:
1847 return spv::StorageClassWorkgroup;
1848 case AddressSpace::UniformConstant:
1849 return spv::StorageClassUniformConstant;
David Neto9ed8e2f2018-03-24 06:47:24 -07001850 case AddressSpace::Uniform:
David Netoe439d702018-03-23 13:14:08 -07001851 return spv::StorageClassUniform;
David Neto22f144c2017-06-12 14:26:21 -04001852 case AddressSpace::ModuleScopePrivate:
1853 return spv::StorageClassPrivate;
Kévin Petitbbbda972020-03-03 19:16:31 +00001854 case AddressSpace::PushConstant:
1855 return spv::StorageClassPushConstant;
David Neto22f144c2017-06-12 14:26:21 -04001856 }
1857}
1858
David Neto862b7d82018-06-14 18:48:37 -04001859spv::StorageClass
1860SPIRVProducerPass::GetStorageClassForArgKind(clspv::ArgKind arg_kind) const {
1861 switch (arg_kind) {
1862 case clspv::ArgKind::Buffer:
1863 return spv::StorageClassStorageBuffer;
Alan Bakerfcda9482018-10-02 17:09:59 -04001864 case clspv::ArgKind::BufferUBO:
1865 return spv::StorageClassUniform;
David Neto862b7d82018-06-14 18:48:37 -04001866 case clspv::ArgKind::Pod:
1867 return clspv::Option::PodArgsInUniformBuffer()
1868 ? spv::StorageClassUniform
1869 : spv::StorageClassStorageBuffer;
1870 case clspv::ArgKind::Local:
1871 return spv::StorageClassWorkgroup;
1872 case clspv::ArgKind::ReadOnlyImage:
1873 case clspv::ArgKind::WriteOnlyImage:
1874 case clspv::ArgKind::Sampler:
1875 return spv::StorageClassUniformConstant;
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001876 default:
1877 llvm_unreachable("Unsupported storage class for argument kind");
David Neto862b7d82018-06-14 18:48:37 -04001878 }
1879}
1880
David Neto22f144c2017-06-12 14:26:21 -04001881spv::BuiltIn SPIRVProducerPass::GetBuiltin(StringRef Name) const {
1882 return StringSwitch<spv::BuiltIn>(Name)
1883 .Case("__spirv_GlobalInvocationId", spv::BuiltInGlobalInvocationId)
1884 .Case("__spirv_LocalInvocationId", spv::BuiltInLocalInvocationId)
1885 .Case("__spirv_WorkgroupSize", spv::BuiltInWorkgroupSize)
1886 .Case("__spirv_NumWorkgroups", spv::BuiltInNumWorkgroups)
1887 .Case("__spirv_WorkgroupId", spv::BuiltInWorkgroupId)
1888 .Default(spv::BuiltInMax);
1889}
1890
1891void SPIRVProducerPass::GenerateExtInstImport() {
1892 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
1893 uint32_t &ExtInstImportID = getOpExtInstImportID();
1894
1895 //
1896 // Generate OpExtInstImport.
1897 //
1898 // Ops[0] ... Ops[n] = Name (Literal String)
David Neto22f144c2017-06-12 14:26:21 -04001899 ExtInstImportID = nextID;
David Neto87846742018-04-11 17:36:22 -04001900 SPIRVInstList.push_back(new SPIRVInstruction(spv::OpExtInstImport, nextID++,
1901 MkString("GLSL.std.450")));
David Neto22f144c2017-06-12 14:26:21 -04001902}
1903
alan-bakerb6b09dc2018-11-08 16:59:28 -05001904void SPIRVProducerPass::GenerateSPIRVTypes(LLVMContext &Context,
1905 Module &module) {
David Neto22f144c2017-06-12 14:26:21 -04001906 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
1907 ValueMapType &VMap = getValueMap();
1908 ValueMapType &AllocatedVMap = getAllocatedValueMap();
Alan Bakerfcda9482018-10-02 17:09:59 -04001909 const auto &DL = module.getDataLayout();
David Neto22f144c2017-06-12 14:26:21 -04001910
1911 // Map for OpTypeRuntimeArray. If argument has pointer type, 2 spirv type
1912 // instructions are generated. They are OpTypePointer and OpTypeRuntimeArray.
1913 DenseMap<Type *, uint32_t> OpRuntimeTyMap;
1914
1915 for (Type *Ty : getTypeList()) {
1916 // Update TypeMap with nextID for reference later.
1917 TypeMap[Ty] = nextID;
1918
1919 switch (Ty->getTypeID()) {
1920 default: {
1921 Ty->print(errs());
1922 llvm_unreachable("Unsupported type???");
1923 break;
1924 }
1925 case Type::MetadataTyID:
1926 case Type::LabelTyID: {
1927 // Ignore these types.
1928 break;
1929 }
1930 case Type::PointerTyID: {
1931 PointerType *PTy = cast<PointerType>(Ty);
1932 unsigned AddrSpace = PTy->getAddressSpace();
1933
1934 // For the purposes of our Vulkan SPIR-V type system, constant and global
1935 // are conflated.
1936 bool UseExistingOpTypePointer = false;
1937 if (AddressSpace::Constant == AddrSpace) {
Alan Bakerfcda9482018-10-02 17:09:59 -04001938 if (!clspv::Option::ConstantArgsInUniformBuffer()) {
1939 AddrSpace = AddressSpace::Global;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001940 // Check to see if we already created this type (for instance, if we
1941 // had a constant <type>* and a global <type>*, the type would be
1942 // created by one of these types, and shared by both).
Alan Bakerfcda9482018-10-02 17:09:59 -04001943 auto GlobalTy = PTy->getPointerElementType()->getPointerTo(AddrSpace);
1944 if (0 < TypeMap.count(GlobalTy)) {
1945 TypeMap[PTy] = TypeMap[GlobalTy];
1946 UseExistingOpTypePointer = true;
1947 break;
1948 }
David Neto22f144c2017-06-12 14:26:21 -04001949 }
1950 } else if (AddressSpace::Global == AddrSpace) {
Alan Bakerfcda9482018-10-02 17:09:59 -04001951 if (!clspv::Option::ConstantArgsInUniformBuffer()) {
1952 AddrSpace = AddressSpace::Constant;
David Neto22f144c2017-06-12 14:26:21 -04001953
alan-bakerb6b09dc2018-11-08 16:59:28 -05001954 // Check to see if we already created this type (for instance, if we
1955 // had a constant <type>* and a global <type>*, the type would be
1956 // created by one of these types, and shared by both).
1957 auto ConstantTy =
1958 PTy->getPointerElementType()->getPointerTo(AddrSpace);
Alan Bakerfcda9482018-10-02 17:09:59 -04001959 if (0 < TypeMap.count(ConstantTy)) {
1960 TypeMap[PTy] = TypeMap[ConstantTy];
1961 UseExistingOpTypePointer = true;
1962 }
David Neto22f144c2017-06-12 14:26:21 -04001963 }
1964 }
1965
David Neto862b7d82018-06-14 18:48:37 -04001966 const bool HasArgUser = true;
David Neto22f144c2017-06-12 14:26:21 -04001967
David Neto862b7d82018-06-14 18:48:37 -04001968 if (HasArgUser && !UseExistingOpTypePointer) {
David Neto22f144c2017-06-12 14:26:21 -04001969 //
1970 // Generate OpTypePointer.
1971 //
1972
1973 // OpTypePointer
1974 // Ops[0] = Storage Class
1975 // Ops[1] = Element Type ID
1976 SPIRVOperandList Ops;
1977
David Neto257c3892018-04-11 13:19:45 -04001978 Ops << MkNum(GetStorageClass(AddrSpace))
1979 << MkId(lookupType(PTy->getElementType()));
David Neto22f144c2017-06-12 14:26:21 -04001980
David Neto87846742018-04-11 17:36:22 -04001981 auto *Inst = new SPIRVInstruction(spv::OpTypePointer, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04001982 SPIRVInstList.push_back(Inst);
1983 }
David Neto22f144c2017-06-12 14:26:21 -04001984 break;
1985 }
1986 case Type::StructTyID: {
David Neto22f144c2017-06-12 14:26:21 -04001987 StructType *STy = cast<StructType>(Ty);
1988
1989 // Handle sampler type.
1990 if (STy->isOpaque()) {
1991 if (STy->getName().equals("opencl.sampler_t")) {
1992 //
1993 // Generate OpTypeSampler
1994 //
1995 // Empty Ops.
1996 SPIRVOperandList Ops;
1997
David Neto87846742018-04-11 17:36:22 -04001998 auto *Inst = new SPIRVInstruction(spv::OpTypeSampler, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04001999 SPIRVInstList.push_back(Inst);
2000 break;
alan-bakerf906d2b2019-12-10 11:26:23 -05002001 } else if (STy->getName().startswith("opencl.image1d_ro_t") ||
2002 STy->getName().startswith("opencl.image1d_wo_t") ||
alan-baker7150a1d2020-02-25 08:31:06 -05002003 STy->getName().startswith("opencl.image1d_array_ro_t") ||
2004 STy->getName().startswith("opencl.image1d_array_wo_t") ||
alan-bakerf906d2b2019-12-10 11:26:23 -05002005 STy->getName().startswith("opencl.image2d_ro_t") ||
alan-bakerf67468c2019-11-25 15:51:49 -05002006 STy->getName().startswith("opencl.image2d_wo_t") ||
alan-baker7150a1d2020-02-25 08:31:06 -05002007 STy->getName().startswith("opencl.image2d_array_ro_t") ||
2008 STy->getName().startswith("opencl.image2d_array_wo_t") ||
alan-bakerf67468c2019-11-25 15:51:49 -05002009 STy->getName().startswith("opencl.image3d_ro_t") ||
2010 STy->getName().startswith("opencl.image3d_wo_t")) {
David Neto22f144c2017-06-12 14:26:21 -04002011 //
2012 // Generate OpTypeImage
2013 //
2014 // Ops[0] = Sampled Type ID
2015 // Ops[1] = Dim ID
2016 // Ops[2] = Depth (Literal Number)
2017 // Ops[3] = Arrayed (Literal Number)
2018 // Ops[4] = MS (Literal Number)
2019 // Ops[5] = Sampled (Literal Number)
2020 // Ops[6] = Image Format ID
2021 //
2022 SPIRVOperandList Ops;
2023
alan-bakerf67468c2019-11-25 15:51:49 -05002024 uint32_t ImageTyID = nextID++;
2025 uint32_t SampledTyID = 0;
2026 if (STy->getName().contains(".float")) {
2027 SampledTyID = lookupType(Type::getFloatTy(Context));
2028 } else if (STy->getName().contains(".uint")) {
2029 SampledTyID = lookupType(Type::getInt32Ty(Context));
2030 } else if (STy->getName().contains(".int")) {
2031 // Generate a signed 32-bit integer if necessary.
2032 if (int32ID == 0) {
2033 int32ID = nextID++;
2034 SPIRVOperandList intOps;
2035 intOps << MkNum(32);
2036 intOps << MkNum(1);
2037 auto signed_int =
2038 new SPIRVInstruction(spv::OpTypeInt, int32ID, intOps);
2039 SPIRVInstList.push_back(signed_int);
2040 }
2041 SampledTyID = int32ID;
2042
2043 // Generate a vec4 of the signed int if necessary.
2044 if (v4int32ID == 0) {
2045 v4int32ID = nextID++;
2046 SPIRVOperandList vecOps;
2047 vecOps << MkId(int32ID);
2048 vecOps << MkNum(4);
2049 auto int_vec =
2050 new SPIRVInstruction(spv::OpTypeVector, v4int32ID, vecOps);
2051 SPIRVInstList.push_back(int_vec);
2052 }
2053 } else {
2054 // This was likely an UndefValue.
2055 SampledTyID = lookupType(Type::getFloatTy(Context));
2056 }
David Neto257c3892018-04-11 13:19:45 -04002057 Ops << MkId(SampledTyID);
David Neto22f144c2017-06-12 14:26:21 -04002058
2059 spv::Dim DimID = spv::Dim2D;
alan-bakerf906d2b2019-12-10 11:26:23 -05002060 if (STy->getName().startswith("opencl.image1d_ro_t") ||
alan-baker7150a1d2020-02-25 08:31:06 -05002061 STy->getName().startswith("opencl.image1d_wo_t") ||
2062 STy->getName().startswith("opencl.image1d_array_ro_t") ||
2063 STy->getName().startswith("opencl.image1d_array_wo_t")) {
alan-bakerf906d2b2019-12-10 11:26:23 -05002064 DimID = spv::Dim1D;
2065 } else if (STy->getName().startswith("opencl.image3d_ro_t") ||
2066 STy->getName().startswith("opencl.image3d_wo_t")) {
David Neto22f144c2017-06-12 14:26:21 -04002067 DimID = spv::Dim3D;
2068 }
David Neto257c3892018-04-11 13:19:45 -04002069 Ops << MkNum(DimID);
David Neto22f144c2017-06-12 14:26:21 -04002070
2071 // TODO: Set up Depth.
David Neto257c3892018-04-11 13:19:45 -04002072 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04002073
alan-baker7150a1d2020-02-25 08:31:06 -05002074 uint32_t arrayed = STy->getName().contains("_array_") ? 1 : 0;
2075 Ops << MkNum(arrayed);
David Neto22f144c2017-06-12 14:26:21 -04002076
2077 // TODO: Set up MS.
David Neto257c3892018-04-11 13:19:45 -04002078 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04002079
alan-baker7150a1d2020-02-25 08:31:06 -05002080 // Set up Sampled.
David Neto22f144c2017-06-12 14:26:21 -04002081 //
2082 // From Spec
2083 //
2084 // 0 indicates this is only known at run time, not at compile time
2085 // 1 indicates will be used with sampler
2086 // 2 indicates will be used without a sampler (a storage image)
2087 uint32_t Sampled = 1;
alan-bakerf67468c2019-11-25 15:51:49 -05002088 if (!STy->getName().contains(".sampled")) {
David Neto22f144c2017-06-12 14:26:21 -04002089 Sampled = 2;
2090 }
David Neto257c3892018-04-11 13:19:45 -04002091 Ops << MkNum(Sampled);
David Neto22f144c2017-06-12 14:26:21 -04002092
2093 // TODO: Set up Image Format.
David Neto257c3892018-04-11 13:19:45 -04002094 Ops << MkNum(spv::ImageFormatUnknown);
David Neto22f144c2017-06-12 14:26:21 -04002095
alan-bakerf67468c2019-11-25 15:51:49 -05002096 auto *Inst = new SPIRVInstruction(spv::OpTypeImage, ImageTyID, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002097 SPIRVInstList.push_back(Inst);
2098 break;
2099 }
2100 }
2101
2102 //
2103 // Generate OpTypeStruct
2104 //
2105 // Ops[0] ... Ops[n] = Member IDs
2106 SPIRVOperandList Ops;
2107
2108 for (auto *EleTy : STy->elements()) {
David Neto862b7d82018-06-14 18:48:37 -04002109 Ops << MkId(lookupType(EleTy));
David Neto22f144c2017-06-12 14:26:21 -04002110 }
2111
David Neto22f144c2017-06-12 14:26:21 -04002112 uint32_t STyID = nextID;
2113
alan-bakerb6b09dc2018-11-08 16:59:28 -05002114 auto *Inst = new SPIRVInstruction(spv::OpTypeStruct, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002115 SPIRVInstList.push_back(Inst);
2116
2117 // Generate OpMemberDecorate.
2118 auto DecoInsertPoint =
2119 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
2120 [](SPIRVInstruction *Inst) -> bool {
2121 return Inst->getOpcode() != spv::OpDecorate &&
2122 Inst->getOpcode() != spv::OpMemberDecorate &&
2123 Inst->getOpcode() != spv::OpExtInstImport;
2124 });
2125
Kévin Petitbbbda972020-03-03 19:16:31 +00002126 if (TypesNeedingLayout.idFor(STy)) {
2127 for (unsigned MemberIdx = 0; MemberIdx < STy->getNumElements();
2128 MemberIdx++) {
2129 // Ops[0] = Structure Type ID
2130 // Ops[1] = Member Index(Literal Number)
2131 // Ops[2] = Decoration (Offset)
2132 // Ops[3] = Byte Offset (Literal Number)
2133 Ops.clear();
David Netoc463b372017-08-10 15:32:21 -04002134
Kévin Petitbbbda972020-03-03 19:16:31 +00002135 Ops << MkId(STyID) << MkNum(MemberIdx)
2136 << MkNum(spv::DecorationOffset);
David Neto22f144c2017-06-12 14:26:21 -04002137
Kévin Petitbbbda972020-03-03 19:16:31 +00002138 const auto ByteOffset =
2139 GetExplicitLayoutStructMemberOffset(STy, MemberIdx, DL);
David Neto22f144c2017-06-12 14:26:21 -04002140
Kévin Petitbbbda972020-03-03 19:16:31 +00002141 Ops << MkNum(ByteOffset);
2142
2143 auto *DecoInst = new SPIRVInstruction(spv::OpMemberDecorate, Ops);
2144 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
Alan Bakerfcda9482018-10-02 17:09:59 -04002145 }
David Neto22f144c2017-06-12 14:26:21 -04002146 }
2147
2148 // Generate OpDecorate.
David Neto862b7d82018-06-14 18:48:37 -04002149 if (StructTypesNeedingBlock.idFor(STy)) {
2150 Ops.clear();
2151 // Use Block decorations with StorageBuffer storage class.
2152 Ops << MkId(STyID) << MkNum(spv::DecorationBlock);
David Neto22f144c2017-06-12 14:26:21 -04002153
David Neto862b7d82018-06-14 18:48:37 -04002154 auto *DecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
2155 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
David Neto22f144c2017-06-12 14:26:21 -04002156 }
2157 break;
2158 }
2159 case Type::IntegerTyID: {
alan-baker0e64a592019-11-18 13:36:25 -05002160 uint32_t BitWidth = static_cast<uint32_t>(Ty->getPrimitiveSizeInBits());
David Neto22f144c2017-06-12 14:26:21 -04002161
2162 if (BitWidth == 1) {
David Netoef5ba2b2019-12-20 08:35:54 -05002163 auto *Inst = new SPIRVInstruction(spv::OpTypeBool, nextID++);
David Neto22f144c2017-06-12 14:26:21 -04002164 SPIRVInstList.push_back(Inst);
2165 } else {
alan-bakerb39c8262019-03-08 14:03:37 -05002166 if (!clspv::Option::Int8Support()) {
2167 // i8 is added to TypeMap as i32.
2168 // No matter what LLVM type is requested first, always alias the
2169 // second one's SPIR-V type to be the same as the one we generated
2170 // first.
2171 unsigned aliasToWidth = 0;
2172 if (BitWidth == 8) {
2173 aliasToWidth = 32;
2174 BitWidth = 32;
2175 } else if (BitWidth == 32) {
2176 aliasToWidth = 8;
2177 }
2178 if (aliasToWidth) {
2179 Type *otherType = Type::getIntNTy(Ty->getContext(), aliasToWidth);
2180 auto where = TypeMap.find(otherType);
2181 if (where == TypeMap.end()) {
2182 // Go ahead and make it, but also map the other type to it.
2183 TypeMap[otherType] = nextID;
2184 } else {
2185 // Alias this SPIR-V type the existing type.
2186 TypeMap[Ty] = where->second;
2187 break;
2188 }
David Neto391aeb12017-08-26 15:51:58 -04002189 }
David Neto22f144c2017-06-12 14:26:21 -04002190 }
2191
David Neto257c3892018-04-11 13:19:45 -04002192 SPIRVOperandList Ops;
2193 Ops << MkNum(BitWidth) << MkNum(0 /* not signed */);
David Neto22f144c2017-06-12 14:26:21 -04002194
2195 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002196 new SPIRVInstruction(spv::OpTypeInt, nextID++, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002197 }
2198 break;
2199 }
2200 case Type::HalfTyID:
2201 case Type::FloatTyID:
2202 case Type::DoubleTyID: {
alan-baker0e64a592019-11-18 13:36:25 -05002203 uint32_t BitWidth = static_cast<uint32_t>(Ty->getPrimitiveSizeInBits());
James Price11010dc2019-12-19 13:53:09 -05002204 auto WidthOp = MkNum(BitWidth);
David Neto22f144c2017-06-12 14:26:21 -04002205
2206 SPIRVInstList.push_back(
David Netoef5ba2b2019-12-20 08:35:54 -05002207 new SPIRVInstruction(spv::OpTypeFloat, nextID++, std::move(WidthOp)));
David Neto22f144c2017-06-12 14:26:21 -04002208 break;
2209 }
2210 case Type::ArrayTyID: {
David Neto22f144c2017-06-12 14:26:21 -04002211 ArrayType *ArrTy = cast<ArrayType>(Ty);
David Neto862b7d82018-06-14 18:48:37 -04002212 const uint64_t Length = ArrTy->getArrayNumElements();
2213 if (Length == 0) {
2214 // By convention, map it to a RuntimeArray.
David Neto22f144c2017-06-12 14:26:21 -04002215
David Neto862b7d82018-06-14 18:48:37 -04002216 // Only generate the type once.
2217 // TODO(dneto): Can it ever be generated more than once?
2218 // Doesn't LLVM type uniqueness guarantee we'll only see this
2219 // once?
2220 Type *EleTy = ArrTy->getArrayElementType();
2221 if (OpRuntimeTyMap.count(EleTy) == 0) {
2222 uint32_t OpTypeRuntimeArrayID = nextID;
2223 OpRuntimeTyMap[Ty] = nextID;
David Neto22f144c2017-06-12 14:26:21 -04002224
David Neto862b7d82018-06-14 18:48:37 -04002225 //
2226 // Generate OpTypeRuntimeArray.
2227 //
David Neto22f144c2017-06-12 14:26:21 -04002228
David Neto862b7d82018-06-14 18:48:37 -04002229 // OpTypeRuntimeArray
2230 // Ops[0] = Element Type ID
2231 SPIRVOperandList Ops;
2232 Ops << MkId(lookupType(EleTy));
David Neto22f144c2017-06-12 14:26:21 -04002233
David Neto862b7d82018-06-14 18:48:37 -04002234 SPIRVInstList.push_back(
2235 new SPIRVInstruction(spv::OpTypeRuntimeArray, nextID++, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002236
David Neto862b7d82018-06-14 18:48:37 -04002237 if (Hack_generate_runtime_array_stride_early) {
2238 // Generate OpDecorate.
2239 auto DecoInsertPoint = std::find_if(
2240 SPIRVInstList.begin(), SPIRVInstList.end(),
2241 [](SPIRVInstruction *Inst) -> bool {
2242 return Inst->getOpcode() != spv::OpDecorate &&
2243 Inst->getOpcode() != spv::OpMemberDecorate &&
2244 Inst->getOpcode() != spv::OpExtInstImport;
2245 });
David Neto22f144c2017-06-12 14:26:21 -04002246
David Neto862b7d82018-06-14 18:48:37 -04002247 // Ops[0] = Target ID
2248 // Ops[1] = Decoration (ArrayStride)
2249 // Ops[2] = Stride Number(Literal Number)
2250 Ops.clear();
David Neto85082642018-03-24 06:55:20 -07002251
David Neto862b7d82018-06-14 18:48:37 -04002252 Ops << MkId(OpTypeRuntimeArrayID)
2253 << MkNum(spv::DecorationArrayStride)
Alan Bakerfcda9482018-10-02 17:09:59 -04002254 << MkNum(static_cast<uint32_t>(GetTypeAllocSize(EleTy, DL)));
David Neto22f144c2017-06-12 14:26:21 -04002255
David Neto862b7d82018-06-14 18:48:37 -04002256 auto *DecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
2257 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
2258 }
2259 }
David Neto22f144c2017-06-12 14:26:21 -04002260
David Neto862b7d82018-06-14 18:48:37 -04002261 } else {
David Neto22f144c2017-06-12 14:26:21 -04002262
David Neto862b7d82018-06-14 18:48:37 -04002263 //
2264 // Generate OpConstant and OpTypeArray.
2265 //
2266
2267 //
2268 // Generate OpConstant for array length.
2269 //
2270 // Ops[0] = Result Type ID
2271 // Ops[1] .. Ops[n] = Values LiteralNumber
2272 SPIRVOperandList Ops;
2273
2274 Type *LengthTy = Type::getInt32Ty(Context);
2275 uint32_t ResTyID = lookupType(LengthTy);
2276 Ops << MkId(ResTyID);
2277
2278 assert(Length < UINT32_MAX);
2279 Ops << MkNum(static_cast<uint32_t>(Length));
2280
2281 // Add constant for length to constant list.
2282 Constant *CstLength = ConstantInt::get(LengthTy, Length);
2283 AllocatedVMap[CstLength] = nextID;
2284 VMap[CstLength] = nextID;
2285 uint32_t LengthID = nextID;
2286
2287 auto *CstInst = new SPIRVInstruction(spv::OpConstant, nextID++, Ops);
2288 SPIRVInstList.push_back(CstInst);
2289
2290 // Remember to generate ArrayStride later
2291 getTypesNeedingArrayStride().insert(Ty);
2292
2293 //
2294 // Generate OpTypeArray.
2295 //
2296 // Ops[0] = Element Type ID
2297 // Ops[1] = Array Length Constant ID
2298 Ops.clear();
2299
2300 uint32_t EleTyID = lookupType(ArrTy->getElementType());
2301 Ops << MkId(EleTyID) << MkId(LengthID);
2302
2303 // Update TypeMap with nextID.
2304 TypeMap[Ty] = nextID;
2305
2306 auto *ArrayInst = new SPIRVInstruction(spv::OpTypeArray, nextID++, Ops);
2307 SPIRVInstList.push_back(ArrayInst);
2308 }
David Neto22f144c2017-06-12 14:26:21 -04002309 break;
2310 }
2311 case Type::VectorTyID: {
alan-bakerb39c8262019-03-08 14:03:37 -05002312 // <4 x i8> is changed to i32 if i8 is not generally supported.
2313 if (!clspv::Option::Int8Support() &&
2314 Ty->getVectorElementType() == Type::getInt8Ty(Context)) {
David Neto22f144c2017-06-12 14:26:21 -04002315 if (Ty->getVectorNumElements() == 4) {
2316 TypeMap[Ty] = lookupType(Ty->getVectorElementType());
2317 break;
2318 } else {
2319 Ty->print(errs());
2320 llvm_unreachable("Support above i8 vector type");
2321 }
2322 }
2323
2324 // Ops[0] = Component Type ID
2325 // Ops[1] = Component Count (Literal Number)
David Neto257c3892018-04-11 13:19:45 -04002326 SPIRVOperandList Ops;
2327 Ops << MkId(lookupType(Ty->getVectorElementType()))
2328 << MkNum(Ty->getVectorNumElements());
David Neto22f144c2017-06-12 14:26:21 -04002329
alan-bakerb6b09dc2018-11-08 16:59:28 -05002330 SPIRVInstruction *inst =
2331 new SPIRVInstruction(spv::OpTypeVector, nextID++, Ops);
David Netoc6f3ab22018-04-06 18:02:31 -04002332 SPIRVInstList.push_back(inst);
David Neto22f144c2017-06-12 14:26:21 -04002333 break;
2334 }
2335 case Type::VoidTyID: {
David Netoef5ba2b2019-12-20 08:35:54 -05002336 auto *Inst = new SPIRVInstruction(spv::OpTypeVoid, nextID++);
David Neto22f144c2017-06-12 14:26:21 -04002337 SPIRVInstList.push_back(Inst);
2338 break;
2339 }
2340 case Type::FunctionTyID: {
2341 // Generate SPIRV instruction for function type.
2342 FunctionType *FTy = cast<FunctionType>(Ty);
2343
2344 // Ops[0] = Return Type ID
2345 // Ops[1] ... Ops[n] = Parameter Type IDs
2346 SPIRVOperandList Ops;
2347
2348 // Find SPIRV instruction for return type
David Netoc6f3ab22018-04-06 18:02:31 -04002349 Ops << MkId(lookupType(FTy->getReturnType()));
David Neto22f144c2017-06-12 14:26:21 -04002350
2351 // Find SPIRV instructions for parameter types
2352 for (unsigned k = 0; k < FTy->getNumParams(); k++) {
2353 // Find SPIRV instruction for parameter type.
2354 auto ParamTy = FTy->getParamType(k);
2355 if (ParamTy->isPointerTy()) {
2356 auto PointeeTy = ParamTy->getPointerElementType();
2357 if (PointeeTy->isStructTy() &&
2358 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
2359 ParamTy = PointeeTy;
2360 }
2361 }
2362
David Netoc6f3ab22018-04-06 18:02:31 -04002363 Ops << MkId(lookupType(ParamTy));
David Neto22f144c2017-06-12 14:26:21 -04002364 }
2365
David Neto87846742018-04-11 17:36:22 -04002366 auto *Inst = new SPIRVInstruction(spv::OpTypeFunction, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002367 SPIRVInstList.push_back(Inst);
2368 break;
2369 }
2370 }
2371 }
2372
2373 // Generate OpTypeSampledImage.
alan-bakerabd82722019-12-03 17:14:51 -05002374 for (auto &ImgTy : getImageTypeList()) {
David Neto22f144c2017-06-12 14:26:21 -04002375 //
2376 // Generate OpTypeSampledImage.
2377 //
2378 // Ops[0] = Image Type ID
2379 //
2380 SPIRVOperandList Ops;
2381
David Netoc6f3ab22018-04-06 18:02:31 -04002382 Ops << MkId(TypeMap[ImgTy]);
David Neto22f144c2017-06-12 14:26:21 -04002383
alan-bakerabd82722019-12-03 17:14:51 -05002384 // Update the image type map.
2385 getImageTypeMap()[ImgTy] = nextID;
David Neto22f144c2017-06-12 14:26:21 -04002386
David Neto87846742018-04-11 17:36:22 -04002387 auto *Inst = new SPIRVInstruction(spv::OpTypeSampledImage, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002388 SPIRVInstList.push_back(Inst);
2389 }
David Netoc6f3ab22018-04-06 18:02:31 -04002390
2391 // Generate types for pointer-to-local arguments.
Alan Baker202c8c72018-08-13 13:47:44 -04002392 for (auto spec_id = clspv::FirstLocalSpecId(); spec_id < max_local_spec_id_;
2393 ++spec_id) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05002394 LocalArgInfo &arg_info = LocalSpecIdInfoMap[spec_id];
David Netoc6f3ab22018-04-06 18:02:31 -04002395
2396 // Generate the spec constant.
2397 SPIRVOperandList Ops;
2398 Ops << MkId(lookupType(Type::getInt32Ty(Context))) << MkNum(1);
David Neto87846742018-04-11 17:36:22 -04002399 SPIRVInstList.push_back(
2400 new SPIRVInstruction(spv::OpSpecConstant, arg_info.array_size_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002401
2402 // Generate the array type.
2403 Ops.clear();
2404 // The element type must have been created.
2405 uint32_t elem_ty_id = lookupType(arg_info.elem_type);
2406 assert(elem_ty_id);
2407 Ops << MkId(elem_ty_id) << MkId(arg_info.array_size_id);
2408
2409 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002410 new SPIRVInstruction(spv::OpTypeArray, arg_info.array_type_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002411
2412 Ops.clear();
2413 Ops << MkNum(spv::StorageClassWorkgroup) << MkId(arg_info.array_type_id);
David Neto87846742018-04-11 17:36:22 -04002414 SPIRVInstList.push_back(new SPIRVInstruction(
2415 spv::OpTypePointer, arg_info.ptr_array_type_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002416 }
David Neto22f144c2017-06-12 14:26:21 -04002417}
2418
2419void SPIRVProducerPass::GenerateSPIRVConstants() {
2420 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
2421 ValueMapType &VMap = getValueMap();
2422 ValueMapType &AllocatedVMap = getAllocatedValueMap();
2423 ValueList &CstList = getConstantList();
David Neto482550a2018-03-24 05:21:07 -07002424 const bool hack_undef = clspv::Option::HackUndef();
David Neto22f144c2017-06-12 14:26:21 -04002425
2426 for (uint32_t i = 0; i < CstList.size(); i++) {
David Netofb9a7972017-08-25 17:08:24 -04002427 // UniqueVector ids are 1-based.
alan-bakerb6b09dc2018-11-08 16:59:28 -05002428 Constant *Cst = cast<Constant>(CstList[i + 1]);
David Neto22f144c2017-06-12 14:26:21 -04002429
2430 // OpTypeArray's constant was already generated.
David Netofb9a7972017-08-25 17:08:24 -04002431 if (AllocatedVMap.find_as(Cst) != AllocatedVMap.end()) {
David Neto22f144c2017-06-12 14:26:21 -04002432 continue;
2433 }
2434
David Netofb9a7972017-08-25 17:08:24 -04002435 // Set ValueMap with nextID for reference later.
David Neto22f144c2017-06-12 14:26:21 -04002436 VMap[Cst] = nextID;
2437
2438 //
2439 // Generate OpConstant.
2440 //
2441
2442 // Ops[0] = Result Type ID
2443 // Ops[1] .. Ops[n] = Values LiteralNumber
2444 SPIRVOperandList Ops;
2445
David Neto257c3892018-04-11 13:19:45 -04002446 Ops << MkId(lookupType(Cst->getType()));
David Neto22f144c2017-06-12 14:26:21 -04002447
2448 std::vector<uint32_t> LiteralNum;
David Neto22f144c2017-06-12 14:26:21 -04002449 spv::Op Opcode = spv::OpNop;
2450
2451 if (isa<UndefValue>(Cst)) {
2452 // Ops[0] = Result Type ID
David Netoc66b3352017-10-20 14:28:46 -04002453 Opcode = spv::OpUndef;
Alan Baker9bf93fb2018-08-28 16:59:26 -04002454 if (hack_undef && IsTypeNullable(Cst->getType())) {
2455 Opcode = spv::OpConstantNull;
David Netoc66b3352017-10-20 14:28:46 -04002456 }
David Neto22f144c2017-06-12 14:26:21 -04002457 } else if (const ConstantInt *CI = dyn_cast<ConstantInt>(Cst)) {
2458 unsigned BitWidth = CI->getBitWidth();
2459 if (BitWidth == 1) {
2460 // If the bitwidth of constant is 1, generate OpConstantTrue or
2461 // OpConstantFalse.
2462 if (CI->getZExtValue()) {
2463 // Ops[0] = Result Type ID
2464 Opcode = spv::OpConstantTrue;
2465 } else {
2466 // Ops[0] = Result Type ID
2467 Opcode = spv::OpConstantFalse;
2468 }
David Neto22f144c2017-06-12 14:26:21 -04002469 } else {
2470 auto V = CI->getZExtValue();
2471 LiteralNum.push_back(V & 0xFFFFFFFF);
2472
2473 if (BitWidth > 32) {
2474 LiteralNum.push_back(V >> 32);
2475 }
2476
2477 Opcode = spv::OpConstant;
David Neto22f144c2017-06-12 14:26:21 -04002478
David Neto257c3892018-04-11 13:19:45 -04002479 Ops << MkInteger(LiteralNum);
David Neto22f144c2017-06-12 14:26:21 -04002480 }
2481 } else if (const ConstantFP *CFP = dyn_cast<ConstantFP>(Cst)) {
2482 uint64_t FPVal = CFP->getValueAPF().bitcastToAPInt().getZExtValue();
2483 Type *CFPTy = CFP->getType();
2484 if (CFPTy->isFloatTy()) {
2485 LiteralNum.push_back(FPVal & 0xFFFFFFFF);
Kévin Petit02ee34e2019-04-04 19:03:22 +01002486 } else if (CFPTy->isDoubleTy()) {
2487 LiteralNum.push_back(FPVal & 0xFFFFFFFF);
2488 LiteralNum.push_back(FPVal >> 32);
alan-baker089bf932020-01-07 16:35:45 -05002489 } else if (CFPTy->isHalfTy()) {
2490 LiteralNum.push_back(FPVal & 0xFFFF);
David Neto22f144c2017-06-12 14:26:21 -04002491 } else {
2492 CFPTy->print(errs());
2493 llvm_unreachable("Implement this ConstantFP Type");
2494 }
2495
2496 Opcode = spv::OpConstant;
David Neto22f144c2017-06-12 14:26:21 -04002497
David Neto257c3892018-04-11 13:19:45 -04002498 Ops << MkFloat(LiteralNum);
David Neto22f144c2017-06-12 14:26:21 -04002499 } else if (isa<ConstantDataSequential>(Cst) &&
2500 cast<ConstantDataSequential>(Cst)->isString()) {
2501 Cst->print(errs());
2502 llvm_unreachable("Implement this Constant");
2503
2504 } else if (const ConstantDataSequential *CDS =
2505 dyn_cast<ConstantDataSequential>(Cst)) {
David Neto49351ac2017-08-26 17:32:20 -04002506 // Let's convert <4 x i8> constant to int constant specially.
2507 // This case occurs when all the values are specified as constant
2508 // ints.
2509 Type *CstTy = Cst->getType();
2510 if (is4xi8vec(CstTy)) {
2511 LLVMContext &Context = CstTy->getContext();
2512
2513 //
2514 // Generate OpConstant with OpTypeInt 32 0.
2515 //
Neil Henning39672102017-09-29 14:33:13 +01002516 uint32_t IntValue = 0;
2517 for (unsigned k = 0; k < 4; k++) {
2518 const uint64_t Val = CDS->getElementAsInteger(k);
David Neto49351ac2017-08-26 17:32:20 -04002519 IntValue = (IntValue << 8) | (Val & 0xffu);
2520 }
2521
2522 Type *i32 = Type::getInt32Ty(Context);
2523 Constant *CstInt = ConstantInt::get(i32, IntValue);
2524 // If this constant is already registered on VMap, use it.
2525 if (VMap.count(CstInt)) {
2526 uint32_t CstID = VMap[CstInt];
2527 VMap[Cst] = CstID;
2528 continue;
2529 }
2530
David Neto257c3892018-04-11 13:19:45 -04002531 Ops << MkNum(IntValue);
David Neto49351ac2017-08-26 17:32:20 -04002532
David Neto87846742018-04-11 17:36:22 -04002533 auto *CstInst = new SPIRVInstruction(spv::OpConstant, nextID++, Ops);
David Neto49351ac2017-08-26 17:32:20 -04002534 SPIRVInstList.push_back(CstInst);
2535
2536 continue;
2537 }
2538
2539 // A normal constant-data-sequential case.
David Neto22f144c2017-06-12 14:26:21 -04002540 for (unsigned k = 0; k < CDS->getNumElements(); k++) {
2541 Constant *EleCst = CDS->getElementAsConstant(k);
2542 uint32_t EleCstID = VMap[EleCst];
David Neto257c3892018-04-11 13:19:45 -04002543 Ops << MkId(EleCstID);
David Neto22f144c2017-06-12 14:26:21 -04002544 }
2545
2546 Opcode = spv::OpConstantComposite;
David Neto22f144c2017-06-12 14:26:21 -04002547 } else if (const ConstantAggregate *CA = dyn_cast<ConstantAggregate>(Cst)) {
2548 // Let's convert <4 x i8> constant to int constant specially.
David Neto49351ac2017-08-26 17:32:20 -04002549 // This case occurs when at least one of the values is an undef.
David Neto22f144c2017-06-12 14:26:21 -04002550 Type *CstTy = Cst->getType();
2551 if (is4xi8vec(CstTy)) {
2552 LLVMContext &Context = CstTy->getContext();
2553
2554 //
2555 // Generate OpConstant with OpTypeInt 32 0.
2556 //
Neil Henning39672102017-09-29 14:33:13 +01002557 uint32_t IntValue = 0;
David Neto22f144c2017-06-12 14:26:21 -04002558 for (User::const_op_iterator I = Cst->op_begin(), E = Cst->op_end();
2559 I != E; ++I) {
2560 uint64_t Val = 0;
alan-bakerb6b09dc2018-11-08 16:59:28 -05002561 const Value *CV = *I;
Neil Henning39672102017-09-29 14:33:13 +01002562 if (auto *CI2 = dyn_cast<ConstantInt>(CV)) {
2563 Val = CI2->getZExtValue();
David Neto22f144c2017-06-12 14:26:21 -04002564 }
David Neto49351ac2017-08-26 17:32:20 -04002565 IntValue = (IntValue << 8) | (Val & 0xffu);
David Neto22f144c2017-06-12 14:26:21 -04002566 }
2567
David Neto49351ac2017-08-26 17:32:20 -04002568 Type *i32 = Type::getInt32Ty(Context);
2569 Constant *CstInt = ConstantInt::get(i32, IntValue);
David Neto22f144c2017-06-12 14:26:21 -04002570 // If this constant is already registered on VMap, use it.
2571 if (VMap.count(CstInt)) {
2572 uint32_t CstID = VMap[CstInt];
2573 VMap[Cst] = CstID;
David Neto19a1bad2017-08-25 15:01:41 -04002574 continue;
David Neto22f144c2017-06-12 14:26:21 -04002575 }
2576
David Neto257c3892018-04-11 13:19:45 -04002577 Ops << MkNum(IntValue);
David Neto22f144c2017-06-12 14:26:21 -04002578
David Neto87846742018-04-11 17:36:22 -04002579 auto *CstInst = new SPIRVInstruction(spv::OpConstant, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002580 SPIRVInstList.push_back(CstInst);
2581
David Neto19a1bad2017-08-25 15:01:41 -04002582 continue;
David Neto22f144c2017-06-12 14:26:21 -04002583 }
2584
2585 // We use a constant composite in SPIR-V for our constant aggregate in
2586 // LLVM.
2587 Opcode = spv::OpConstantComposite;
David Neto22f144c2017-06-12 14:26:21 -04002588
2589 for (unsigned k = 0; k < CA->getNumOperands(); k++) {
2590 // Look up the ID of the element of this aggregate (which we will
2591 // previously have created a constant for).
2592 uint32_t ElementConstantID = VMap[CA->getAggregateElement(k)];
2593
2594 // And add an operand to the composite we are constructing
David Neto257c3892018-04-11 13:19:45 -04002595 Ops << MkId(ElementConstantID);
David Neto22f144c2017-06-12 14:26:21 -04002596 }
2597 } else if (Cst->isNullValue()) {
2598 Opcode = spv::OpConstantNull;
David Neto22f144c2017-06-12 14:26:21 -04002599 } else {
2600 Cst->print(errs());
2601 llvm_unreachable("Unsupported Constant???");
2602 }
2603
alan-baker5b86ed72019-02-15 08:26:50 -05002604 if (Opcode == spv::OpConstantNull && Cst->getType()->isPointerTy()) {
2605 // Null pointer requires variable pointers.
2606 setVariablePointersCapabilities(Cst->getType()->getPointerAddressSpace());
2607 }
2608
David Neto87846742018-04-11 17:36:22 -04002609 auto *CstInst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002610 SPIRVInstList.push_back(CstInst);
2611 }
2612}
2613
2614void SPIRVProducerPass::GenerateSamplers(Module &M) {
2615 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
David Neto22f144c2017-06-12 14:26:21 -04002616
alan-bakerb6b09dc2018-11-08 16:59:28 -05002617 auto &sampler_map = getSamplerMap();
alan-baker09cb9802019-12-10 13:16:27 -05002618 SamplerLiteralToIDMap.clear();
David Neto862b7d82018-06-14 18:48:37 -04002619 DenseMap<unsigned, unsigned> SamplerLiteralToDescriptorSetMap;
2620 DenseMap<unsigned, unsigned> SamplerLiteralToBindingMap;
David Neto22f144c2017-06-12 14:26:21 -04002621
David Neto862b7d82018-06-14 18:48:37 -04002622 // We might have samplers in the sampler map that are not used
2623 // in the translation unit. We need to allocate variables
2624 // for them and bindings too.
2625 DenseSet<unsigned> used_bindings;
David Neto22f144c2017-06-12 14:26:21 -04002626
Kévin Petitdf71de32019-04-09 14:09:50 +01002627 auto *var_fn = M.getFunction(clspv::LiteralSamplerFunction());
alan-baker09cb9802019-12-10 13:16:27 -05002628 // Return if there are no literal samplers.
alan-bakerb6b09dc2018-11-08 16:59:28 -05002629 if (!var_fn)
2630 return;
alan-baker09cb9802019-12-10 13:16:27 -05002631
David Neto862b7d82018-06-14 18:48:37 -04002632 for (auto user : var_fn->users()) {
2633 // Populate SamplerLiteralToDescriptorSetMap and
2634 // SamplerLiteralToBindingMap.
2635 //
2636 // Look for calls like
2637 // call %opencl.sampler_t addrspace(2)*
2638 // @clspv.sampler.var.literal(
2639 // i32 descriptor,
2640 // i32 binding,
alan-baker09cb9802019-12-10 13:16:27 -05002641 // i32 (index-into-sampler-map|sampler_mask))
alan-bakerb6b09dc2018-11-08 16:59:28 -05002642 if (auto *call = dyn_cast<CallInst>(user)) {
alan-baker09cb9802019-12-10 13:16:27 -05002643 const auto third_param = static_cast<unsigned>(
alan-bakerb6b09dc2018-11-08 16:59:28 -05002644 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
alan-baker09cb9802019-12-10 13:16:27 -05002645 auto sampler_value = third_param;
2646 if (clspv::Option::UseSamplerMap()) {
2647 if (third_param >= sampler_map.size()) {
2648 errs() << "Out of bounds index to sampler map: " << third_param;
2649 llvm_unreachable("bad sampler init: out of bounds");
2650 }
2651 sampler_value = sampler_map[third_param].first;
David Neto862b7d82018-06-14 18:48:37 -04002652 }
2653
David Neto862b7d82018-06-14 18:48:37 -04002654 const auto descriptor_set = static_cast<unsigned>(
2655 dyn_cast<ConstantInt>(call->getArgOperand(0))->getZExtValue());
2656 const auto binding = static_cast<unsigned>(
2657 dyn_cast<ConstantInt>(call->getArgOperand(1))->getZExtValue());
2658
2659 SamplerLiteralToDescriptorSetMap[sampler_value] = descriptor_set;
2660 SamplerLiteralToBindingMap[sampler_value] = binding;
2661 used_bindings.insert(binding);
2662 }
2663 }
2664
alan-baker09cb9802019-12-10 13:16:27 -05002665 DenseSet<size_t> seen;
2666 for (auto user : var_fn->users()) {
2667 if (!isa<CallInst>(user))
2668 continue;
2669
2670 auto call = cast<CallInst>(user);
2671 const unsigned third_param = static_cast<unsigned>(
2672 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
2673
2674 // Already allocated a variable for this value.
2675 if (!seen.insert(third_param).second)
2676 continue;
2677
2678 auto sampler_value = third_param;
2679 if (clspv::Option::UseSamplerMap()) {
2680 sampler_value = sampler_map[third_param].first;
2681 }
2682
David Neto22f144c2017-06-12 14:26:21 -04002683 // Generate OpVariable.
2684 //
2685 // GIDOps[0] : Result Type ID
2686 // GIDOps[1] : Storage Class
2687 SPIRVOperandList Ops;
2688
David Neto257c3892018-04-11 13:19:45 -04002689 Ops << MkId(lookupType(SamplerTy))
2690 << MkNum(spv::StorageClassUniformConstant);
David Neto22f144c2017-06-12 14:26:21 -04002691
David Neto862b7d82018-06-14 18:48:37 -04002692 auto sampler_var_id = nextID++;
2693 auto *Inst = new SPIRVInstruction(spv::OpVariable, sampler_var_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002694 SPIRVInstList.push_back(Inst);
2695
alan-baker09cb9802019-12-10 13:16:27 -05002696 SamplerLiteralToIDMap[sampler_value] = sampler_var_id;
David Neto22f144c2017-06-12 14:26:21 -04002697
2698 // Find Insert Point for OpDecorate.
2699 auto DecoInsertPoint =
2700 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
2701 [](SPIRVInstruction *Inst) -> bool {
2702 return Inst->getOpcode() != spv::OpDecorate &&
2703 Inst->getOpcode() != spv::OpMemberDecorate &&
2704 Inst->getOpcode() != spv::OpExtInstImport;
2705 });
2706
2707 // Ops[0] = Target ID
2708 // Ops[1] = Decoration (DescriptorSet)
2709 // Ops[2] = LiteralNumber according to Decoration
2710 Ops.clear();
2711
David Neto862b7d82018-06-14 18:48:37 -04002712 unsigned descriptor_set;
2713 unsigned binding;
alan-baker09cb9802019-12-10 13:16:27 -05002714 if (SamplerLiteralToBindingMap.find(sampler_value) ==
alan-bakerb6b09dc2018-11-08 16:59:28 -05002715 SamplerLiteralToBindingMap.end()) {
David Neto862b7d82018-06-14 18:48:37 -04002716 // This sampler is not actually used. Find the next one.
2717 for (binding = 0; used_bindings.count(binding); binding++)
2718 ;
2719 descriptor_set = 0; // Literal samplers always use descriptor set 0.
2720 used_bindings.insert(binding);
2721 } else {
alan-baker09cb9802019-12-10 13:16:27 -05002722 descriptor_set = SamplerLiteralToDescriptorSetMap[sampler_value];
2723 binding = SamplerLiteralToBindingMap[sampler_value];
alan-bakercff80152019-06-15 00:38:00 -04002724
alan-baker09cb9802019-12-10 13:16:27 -05002725 version0::DescriptorMapEntry::SamplerData sampler_data = {sampler_value};
alan-bakercff80152019-06-15 00:38:00 -04002726 descriptorMapEntries->emplace_back(std::move(sampler_data),
2727 descriptor_set, binding);
David Neto862b7d82018-06-14 18:48:37 -04002728 }
2729
2730 Ops << MkId(sampler_var_id) << MkNum(spv::DecorationDescriptorSet)
2731 << MkNum(descriptor_set);
David Neto22f144c2017-06-12 14:26:21 -04002732
David Neto87846742018-04-11 17:36:22 -04002733 auto *DescDecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002734 SPIRVInstList.insert(DecoInsertPoint, DescDecoInst);
2735
2736 // Ops[0] = Target ID
2737 // Ops[1] = Decoration (Binding)
2738 // Ops[2] = LiteralNumber according to Decoration
2739 Ops.clear();
David Neto862b7d82018-06-14 18:48:37 -04002740 Ops << MkId(sampler_var_id) << MkNum(spv::DecorationBinding)
2741 << MkNum(binding);
David Neto22f144c2017-06-12 14:26:21 -04002742
David Neto87846742018-04-11 17:36:22 -04002743 auto *BindDecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002744 SPIRVInstList.insert(DecoInsertPoint, BindDecoInst);
2745 }
David Neto862b7d82018-06-14 18:48:37 -04002746}
David Neto22f144c2017-06-12 14:26:21 -04002747
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01002748void SPIRVProducerPass::GenerateResourceVars(Module &) {
David Neto862b7d82018-06-14 18:48:37 -04002749 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
2750 ValueMapType &VMap = getValueMap();
David Neto22f144c2017-06-12 14:26:21 -04002751
David Neto862b7d82018-06-14 18:48:37 -04002752 // Generate variables. Make one for each of resource var info object.
2753 for (auto *info : ModuleOrderedResourceVars) {
2754 Type *type = info->var_fn->getReturnType();
2755 // Remap the address space for opaque types.
2756 switch (info->arg_kind) {
2757 case clspv::ArgKind::Sampler:
2758 case clspv::ArgKind::ReadOnlyImage:
2759 case clspv::ArgKind::WriteOnlyImage:
2760 type = PointerType::get(type->getPointerElementType(),
2761 clspv::AddressSpace::UniformConstant);
2762 break;
2763 default:
2764 break;
2765 }
David Neto22f144c2017-06-12 14:26:21 -04002766
David Neto862b7d82018-06-14 18:48:37 -04002767 info->var_id = nextID++;
David Neto22f144c2017-06-12 14:26:21 -04002768
David Neto862b7d82018-06-14 18:48:37 -04002769 const auto type_id = lookupType(type);
2770 const auto sc = GetStorageClassForArgKind(info->arg_kind);
2771 SPIRVOperandList Ops;
2772 Ops << MkId(type_id) << MkNum(sc);
David Neto22f144c2017-06-12 14:26:21 -04002773
David Neto862b7d82018-06-14 18:48:37 -04002774 auto *Inst = new SPIRVInstruction(spv::OpVariable, info->var_id, Ops);
2775 SPIRVInstList.push_back(Inst);
2776
2777 // Map calls to the variable-builtin-function.
2778 for (auto &U : info->var_fn->uses()) {
2779 if (auto *call = dyn_cast<CallInst>(U.getUser())) {
2780 const auto set = unsigned(
2781 dyn_cast<ConstantInt>(call->getOperand(0))->getZExtValue());
2782 const auto binding = unsigned(
2783 dyn_cast<ConstantInt>(call->getOperand(1))->getZExtValue());
2784 if (set == info->descriptor_set && binding == info->binding) {
2785 switch (info->arg_kind) {
2786 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04002787 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04002788 case clspv::ArgKind::Pod:
2789 // The call maps to the variable directly.
2790 VMap[call] = info->var_id;
2791 break;
2792 case clspv::ArgKind::Sampler:
2793 case clspv::ArgKind::ReadOnlyImage:
2794 case clspv::ArgKind::WriteOnlyImage:
2795 // The call maps to a load we generate later.
2796 ResourceVarDeferredLoadCalls[call] = info->var_id;
2797 break;
2798 default:
2799 llvm_unreachable("Unhandled arg kind");
2800 }
2801 }
David Neto22f144c2017-06-12 14:26:21 -04002802 }
David Neto862b7d82018-06-14 18:48:37 -04002803 }
2804 }
David Neto22f144c2017-06-12 14:26:21 -04002805
David Neto862b7d82018-06-14 18:48:37 -04002806 // Generate associated decorations.
David Neto22f144c2017-06-12 14:26:21 -04002807
David Neto862b7d82018-06-14 18:48:37 -04002808 // Find Insert Point for OpDecorate.
2809 auto DecoInsertPoint =
2810 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
2811 [](SPIRVInstruction *Inst) -> bool {
2812 return Inst->getOpcode() != spv::OpDecorate &&
2813 Inst->getOpcode() != spv::OpMemberDecorate &&
2814 Inst->getOpcode() != spv::OpExtInstImport;
2815 });
2816
2817 SPIRVOperandList Ops;
2818 for (auto *info : ModuleOrderedResourceVars) {
2819 // Decorate with DescriptorSet and Binding.
2820 Ops.clear();
2821 Ops << MkId(info->var_id) << MkNum(spv::DecorationDescriptorSet)
2822 << MkNum(info->descriptor_set);
2823 SPIRVInstList.insert(DecoInsertPoint,
2824 new SPIRVInstruction(spv::OpDecorate, Ops));
2825
2826 Ops.clear();
2827 Ops << MkId(info->var_id) << MkNum(spv::DecorationBinding)
2828 << MkNum(info->binding);
2829 SPIRVInstList.insert(DecoInsertPoint,
2830 new SPIRVInstruction(spv::OpDecorate, Ops));
2831
alan-bakere9308012019-03-15 10:25:13 -04002832 if (info->coherent) {
2833 // Decorate with Coherent if required for the variable.
2834 Ops.clear();
2835 Ops << MkId(info->var_id) << MkNum(spv::DecorationCoherent);
2836 SPIRVInstList.insert(DecoInsertPoint,
2837 new SPIRVInstruction(spv::OpDecorate, Ops));
2838 }
2839
David Neto862b7d82018-06-14 18:48:37 -04002840 // Generate NonWritable and NonReadable
2841 switch (info->arg_kind) {
2842 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04002843 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04002844 if (info->var_fn->getReturnType()->getPointerAddressSpace() ==
2845 clspv::AddressSpace::Constant) {
2846 Ops.clear();
2847 Ops << MkId(info->var_id) << MkNum(spv::DecorationNonWritable);
2848 SPIRVInstList.insert(DecoInsertPoint,
2849 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002850 }
David Neto862b7d82018-06-14 18:48:37 -04002851 break;
David Neto862b7d82018-06-14 18:48:37 -04002852 case clspv::ArgKind::WriteOnlyImage:
2853 Ops.clear();
2854 Ops << MkId(info->var_id) << MkNum(spv::DecorationNonReadable);
2855 SPIRVInstList.insert(DecoInsertPoint,
2856 new SPIRVInstruction(spv::OpDecorate, Ops));
2857 break;
2858 default:
2859 break;
David Neto22f144c2017-06-12 14:26:21 -04002860 }
2861 }
2862}
2863
Kévin Petitbbbda972020-03-03 19:16:31 +00002864namespace {
2865
2866bool isScalarType(Type *type) {
2867 return type->isIntegerTy() || type->isFloatTy();
2868}
2869
2870uint64_t structAlignment(StructType *type,
2871 std::function<uint64_t(Type *)> alignFn) {
2872 uint64_t maxAlign = 1;
2873 for (unsigned i = 0; i < type->getStructNumElements(); i++) {
2874 uint64_t align = alignFn(type->getStructElementType(i));
2875 maxAlign = std::max(align, maxAlign);
2876 }
2877 return maxAlign;
2878}
2879
2880uint64_t scalarAlignment(Type *type) {
2881 // A scalar of size N has a scalar alignment of N.
2882 if (isScalarType(type)) {
2883 return type->getScalarSizeInBits() / 8;
2884 }
2885
2886 // A vector or matrix type has a scalar alignment equal to that of its
2887 // component type.
2888 if (type->isVectorTy()) {
2889 return scalarAlignment(type->getVectorElementType());
2890 }
2891
2892 // An array type has a scalar alignment equal to that of its element type.
2893 if (type->isArrayTy()) {
2894 return scalarAlignment(type->getArrayElementType());
2895 }
2896
2897 // A structure has a scalar alignment equal to the largest scalar alignment of
2898 // any of its members.
2899 if (type->isStructTy()) {
2900 return structAlignment(cast<StructType>(type), scalarAlignment);
2901 }
2902
2903 llvm_unreachable("Unsupported type");
2904}
2905
2906uint64_t baseAlignment(Type *type) {
2907 // A scalar has a base alignment equal to its scalar alignment.
2908 if (isScalarType(type)) {
2909 return scalarAlignment(type);
2910 }
2911
2912 if (type->isVectorTy()) {
2913 unsigned numElems = type->getVectorNumElements();
2914
2915 // A two-component vector has a base alignment equal to twice its scalar
2916 // alignment.
2917 if (numElems == 2) {
2918 return 2 * scalarAlignment(type);
2919 }
2920 // A three- or four-component vector has a base alignment equal to four
2921 // times its scalar alignment.
2922 if ((numElems == 3) || (numElems == 4)) {
2923 return 4 * scalarAlignment(type);
2924 }
2925 }
2926
2927 // An array has a base alignment equal to the base alignment of its element
2928 // type.
2929 if (type->isArrayTy()) {
2930 return baseAlignment(type->getArrayElementType());
2931 }
2932
2933 // A structure has a base alignment equal to the largest base alignment of any
2934 // of its members.
2935 if (type->isStructTy()) {
2936 return structAlignment(cast<StructType>(type), baseAlignment);
2937 }
2938
2939 // TODO A row-major matrix of C columns has a base alignment equal to the base
2940 // alignment of a vector of C matrix components.
2941 // TODO A column-major matrix has a base alignment equal to the base alignment
2942 // of the matrix column type.
2943
2944 llvm_unreachable("Unsupported type");
2945}
2946
2947uint64_t extendedAlignment(Type *type) {
2948 // A scalar, vector or matrix type has an extended alignment equal to its base
2949 // alignment.
2950 // TODO matrix type
2951 if (isScalarType(type) || type->isVectorTy()) {
2952 return baseAlignment(type);
2953 }
2954
2955 // An array or structure type has an extended alignment equal to the largest
2956 // extended alignment of any of its members, rounded up to a multiple of 16
2957 if (type->isStructTy()) {
2958 auto salign = structAlignment(cast<StructType>(type), extendedAlignment);
2959 return alignTo(salign, 16);
2960 }
2961
2962 if (type->isArrayTy()) {
2963 auto salign = extendedAlignment(type->getArrayElementType());
2964 return alignTo(salign, 16);
2965 }
2966
2967 llvm_unreachable("Unsupported type");
2968}
2969
2970uint64_t standardAlignment(Type *type, spv::StorageClass sclass) {
2971 // If the scalarBlockLayout feature is enabled on the device then every member
2972 // must be aligned according to its scalar alignment
2973 if (clspv::Option::ScalarBlockLayout()) {
2974 return scalarAlignment(type);
2975 }
2976
2977 // All vectors must be aligned according to their scalar alignment
2978 if (type->isVectorTy()) {
2979 return scalarAlignment(type);
2980 }
2981
2982 // If the uniformBufferStandardLayout feature is not enabled on the device,
2983 // then any member of an OpTypeStruct with a storage class of Uniform and a
2984 // decoration of Block must be aligned according to its extended alignment.
2985 if (!clspv::Option::Std430UniformBufferLayout() &&
2986 sclass == spv::StorageClassUniform) {
2987 return extendedAlignment(type);
2988 }
2989
2990 // Every other member must be aligned according to its base alignment
2991 return baseAlignment(type);
2992}
2993
2994bool improperlyStraddles(const DataLayout &DL, Type *type, unsigned offset) {
2995 assert(type->isVectorTy());
2996
2997 auto size = DL.getTypeStoreSize(type);
2998
2999 // It is a vector with total size less than or equal to 16 bytes, and has
3000 // Offset decorations placing its first byte at F and its last byte at L,
3001 // where floor(F / 16) != floor(L / 16).
3002 if ((size <= 16) && (offset % 16 + size > 16)) {
3003 return true;
3004 }
3005
3006 // It is a vector with total size greater than 16 bytes and has its Offset
3007 // decorations placing its first byte at a non-integer multiple of 16
3008 if ((size > 16) && (offset % 16 != 0)) {
3009 return true;
3010 }
3011
3012 return false;
3013}
3014
3015// See 14.5 Shader Resource Interface in Vulkan spec
3016bool isValidExplicitLayout(Module &M, StructType *STy, unsigned Member,
3017 spv::StorageClass SClass, unsigned Offset,
3018 unsigned PreviousMemberOffset) {
3019
3020 auto MemberType = STy->getElementType(Member);
3021 auto Align = standardAlignment(MemberType, SClass);
3022 auto &DL = M.getDataLayout();
3023
3024 // The Offset decoration of any member must be a multiple of its alignment
3025 if (Offset % Align != 0) {
3026 return false;
3027 }
3028
3029 // TODO Any ArrayStride or MatrixStride decoration must be a multiple of the
3030 // alignment of the array or matrix as defined above
3031
3032 if (!clspv::Option::ScalarBlockLayout()) {
3033 // Vectors must not improperly straddle, as defined above
3034 if (MemberType->isVectorTy() &&
3035 improperlyStraddles(DL, MemberType, Offset)) {
3036 return true;
3037 }
3038
3039 // The Offset decoration of a member must not place it between the end
3040 // of a structure or an array and the next multiple of the alignment of that
3041 // structure or array
3042 if (Member > 0) {
3043 auto PType = STy->getElementType(Member - 1);
3044 if (PType->isStructTy() || PType->isArrayTy()) {
3045 auto PAlign = standardAlignment(PType, SClass);
3046 if (Offset - PreviousMemberOffset < PAlign) {
3047 return false;
3048 }
3049 }
3050 }
3051 }
3052
3053 return true;
3054}
3055
3056} // namespace
3057
3058void SPIRVProducerPass::GeneratePushConstantDescriptormapEntries(Module &M) {
3059
3060 if (auto GV = M.getGlobalVariable(clspv::PushConstantsVariableName())) {
3061 auto const &DL = M.getDataLayout();
3062 auto MD = GV->getMetadata(clspv::PushConstantsMetadataName());
3063 auto STy = cast<StructType>(GV->getValueType());
3064
3065 for (unsigned i = 0; i < STy->getNumElements(); i++) {
3066 auto pc = static_cast<clspv::PushConstant>(
3067 mdconst::extract<ConstantInt>(MD->getOperand(i))->getZExtValue());
3068 auto memberType = STy->getElementType(i);
3069 auto offset = GetExplicitLayoutStructMemberOffset(STy, i, DL);
3070 unsigned previousOffset = 0;
3071 if (i > 0) {
3072 previousOffset = GetExplicitLayoutStructMemberOffset(STy, i - 1, DL);
3073 }
3074 auto size = static_cast<uint32_t>(GetTypeSizeInBits(memberType, DL)) / 8;
3075 assert(isValidExplicitLayout(M, STy, i, spv::StorageClassPushConstant,
3076 offset, previousOffset));
3077 version0::DescriptorMapEntry::PushConstantData data = {pc, offset, size};
3078 descriptorMapEntries->emplace_back(std::move(data));
3079 }
3080 }
3081}
3082
David Neto22f144c2017-06-12 14:26:21 -04003083void SPIRVProducerPass::GenerateGlobalVar(GlobalVariable &GV) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05003084 Module &M = *GV.getParent();
David Neto22f144c2017-06-12 14:26:21 -04003085 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3086 ValueMapType &VMap = getValueMap();
3087 std::vector<uint32_t> &BuiltinDimVec = getBuiltinDimVec();
David Neto85082642018-03-24 06:55:20 -07003088 const DataLayout &DL = GV.getParent()->getDataLayout();
David Neto22f144c2017-06-12 14:26:21 -04003089
3090 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
3091 Type *Ty = GV.getType();
3092 PointerType *PTy = cast<PointerType>(Ty);
3093
3094 uint32_t InitializerID = 0;
3095
3096 // Workgroup size is handled differently (it goes into a constant)
3097 if (spv::BuiltInWorkgroupSize == BuiltinType) {
3098 std::vector<bool> HasMDVec;
3099 uint32_t PrevXDimCst = 0xFFFFFFFF;
3100 uint32_t PrevYDimCst = 0xFFFFFFFF;
3101 uint32_t PrevZDimCst = 0xFFFFFFFF;
3102 for (Function &Func : *GV.getParent()) {
3103 if (Func.isDeclaration()) {
3104 continue;
3105 }
3106
3107 // We only need to check kernels.
3108 if (Func.getCallingConv() != CallingConv::SPIR_KERNEL) {
3109 continue;
3110 }
3111
3112 if (const MDNode *MD =
3113 dyn_cast<Function>(&Func)->getMetadata("reqd_work_group_size")) {
3114 uint32_t CurXDimCst = static_cast<uint32_t>(
3115 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
3116 uint32_t CurYDimCst = static_cast<uint32_t>(
3117 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
3118 uint32_t CurZDimCst = static_cast<uint32_t>(
3119 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
3120
3121 if (PrevXDimCst == 0xFFFFFFFF && PrevYDimCst == 0xFFFFFFFF &&
3122 PrevZDimCst == 0xFFFFFFFF) {
3123 PrevXDimCst = CurXDimCst;
3124 PrevYDimCst = CurYDimCst;
3125 PrevZDimCst = CurZDimCst;
3126 } else if (CurXDimCst != PrevXDimCst || CurYDimCst != PrevYDimCst ||
3127 CurZDimCst != PrevZDimCst) {
3128 llvm_unreachable(
3129 "reqd_work_group_size must be the same across all kernels");
3130 } else {
3131 continue;
3132 }
3133
3134 //
3135 // Generate OpConstantComposite.
3136 //
3137 // Ops[0] : Result Type ID
3138 // Ops[1] : Constant size for x dimension.
3139 // Ops[2] : Constant size for y dimension.
3140 // Ops[3] : Constant size for z dimension.
3141 SPIRVOperandList Ops;
3142
3143 uint32_t XDimCstID =
3144 VMap[mdconst::extract<ConstantInt>(MD->getOperand(0))];
3145 uint32_t YDimCstID =
3146 VMap[mdconst::extract<ConstantInt>(MD->getOperand(1))];
3147 uint32_t ZDimCstID =
3148 VMap[mdconst::extract<ConstantInt>(MD->getOperand(2))];
3149
3150 InitializerID = nextID;
3151
David Neto257c3892018-04-11 13:19:45 -04003152 Ops << MkId(lookupType(Ty->getPointerElementType())) << MkId(XDimCstID)
3153 << MkId(YDimCstID) << MkId(ZDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04003154
David Neto87846742018-04-11 17:36:22 -04003155 auto *Inst =
3156 new SPIRVInstruction(spv::OpConstantComposite, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003157 SPIRVInstList.push_back(Inst);
3158
3159 HasMDVec.push_back(true);
3160 } else {
3161 HasMDVec.push_back(false);
3162 }
3163 }
3164
3165 // Check all kernels have same definitions for work_group_size.
3166 bool HasMD = false;
3167 if (!HasMDVec.empty()) {
3168 HasMD = HasMDVec[0];
3169 for (uint32_t i = 1; i < HasMDVec.size(); i++) {
3170 if (HasMD != HasMDVec[i]) {
3171 llvm_unreachable(
3172 "Kernels should have consistent work group size definition");
3173 }
3174 }
3175 }
3176
3177 // If all kernels do not have metadata for reqd_work_group_size, generate
3178 // OpSpecConstants for x/y/z dimension.
3179 if (!HasMD) {
3180 //
3181 // Generate OpSpecConstants for x/y/z dimension.
3182 //
3183 // Ops[0] : Result Type ID
3184 // Ops[1] : Constant size for x/y/z dimension (Literal Number).
3185 uint32_t XDimCstID = 0;
3186 uint32_t YDimCstID = 0;
3187 uint32_t ZDimCstID = 0;
3188
David Neto22f144c2017-06-12 14:26:21 -04003189 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04003190 uint32_t result_type_id =
3191 lookupType(Ty->getPointerElementType()->getSequentialElementType());
David Neto22f144c2017-06-12 14:26:21 -04003192
David Neto257c3892018-04-11 13:19:45 -04003193 // X Dimension
3194 Ops << MkId(result_type_id) << MkNum(1);
3195 XDimCstID = nextID++;
3196 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04003197 new SPIRVInstruction(spv::OpSpecConstant, XDimCstID, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003198
3199 // Y Dimension
3200 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003201 Ops << MkId(result_type_id) << MkNum(1);
3202 YDimCstID = nextID++;
3203 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04003204 new SPIRVInstruction(spv::OpSpecConstant, YDimCstID, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003205
3206 // Z Dimension
3207 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003208 Ops << MkId(result_type_id) << MkNum(1);
3209 ZDimCstID = nextID++;
3210 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04003211 new SPIRVInstruction(spv::OpSpecConstant, ZDimCstID, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003212
David Neto257c3892018-04-11 13:19:45 -04003213 BuiltinDimVec.push_back(XDimCstID);
3214 BuiltinDimVec.push_back(YDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04003215 BuiltinDimVec.push_back(ZDimCstID);
3216
David Neto22f144c2017-06-12 14:26:21 -04003217 //
3218 // Generate OpSpecConstantComposite.
3219 //
3220 // Ops[0] : Result Type ID
3221 // Ops[1] : Constant size for x dimension.
3222 // Ops[2] : Constant size for y dimension.
3223 // Ops[3] : Constant size for z dimension.
3224 InitializerID = nextID;
3225
3226 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003227 Ops << MkId(lookupType(Ty->getPointerElementType())) << MkId(XDimCstID)
3228 << MkId(YDimCstID) << MkId(ZDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04003229
David Neto87846742018-04-11 17:36:22 -04003230 auto *Inst =
3231 new SPIRVInstruction(spv::OpSpecConstantComposite, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003232 SPIRVInstList.push_back(Inst);
3233 }
3234 }
3235
David Neto22f144c2017-06-12 14:26:21 -04003236 VMap[&GV] = nextID;
3237
3238 //
3239 // Generate OpVariable.
3240 //
3241 // GIDOps[0] : Result Type ID
3242 // GIDOps[1] : Storage Class
3243 SPIRVOperandList Ops;
3244
David Neto85082642018-03-24 06:55:20 -07003245 const auto AS = PTy->getAddressSpace();
David Netoc6f3ab22018-04-06 18:02:31 -04003246 Ops << MkId(lookupType(Ty)) << MkNum(GetStorageClass(AS));
David Neto22f144c2017-06-12 14:26:21 -04003247
David Neto85082642018-03-24 06:55:20 -07003248 const bool module_scope_constant_external_init =
David Neto862b7d82018-06-14 18:48:37 -04003249 (AS == AddressSpace::Constant) && GV.hasInitializer() &&
David Neto85082642018-03-24 06:55:20 -07003250 clspv::Option::ModuleConstantsInStorageBuffer();
3251
Kévin Petit23d5f182019-08-13 16:21:29 +01003252 if (GV.hasInitializer()) {
3253 auto GVInit = GV.getInitializer();
3254 if (!isa<UndefValue>(GVInit) && !module_scope_constant_external_init) {
3255 assert(VMap.count(GVInit) == 1);
3256 InitializerID = VMap[GVInit];
David Neto85082642018-03-24 06:55:20 -07003257 }
3258 }
Kévin Petit23d5f182019-08-13 16:21:29 +01003259
3260 if (0 != InitializerID) {
Kévin Petitbbbda972020-03-03 19:16:31 +00003261 // Emit the ID of the initializer as part of the variable definition.
Kévin Petit23d5f182019-08-13 16:21:29 +01003262 Ops << MkId(InitializerID);
3263 }
David Neto85082642018-03-24 06:55:20 -07003264 const uint32_t var_id = nextID++;
3265
David Neto87846742018-04-11 17:36:22 -04003266 auto *Inst = new SPIRVInstruction(spv::OpVariable, var_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003267 SPIRVInstList.push_back(Inst);
3268
3269 // If we have a builtin.
3270 if (spv::BuiltInMax != BuiltinType) {
3271 // Find Insert Point for OpDecorate.
3272 auto DecoInsertPoint =
3273 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
3274 [](SPIRVInstruction *Inst) -> bool {
3275 return Inst->getOpcode() != spv::OpDecorate &&
3276 Inst->getOpcode() != spv::OpMemberDecorate &&
3277 Inst->getOpcode() != spv::OpExtInstImport;
3278 });
3279 //
3280 // Generate OpDecorate.
3281 //
3282 // DOps[0] = Target ID
3283 // DOps[1] = Decoration (Builtin)
3284 // DOps[2] = BuiltIn ID
3285 uint32_t ResultID;
3286
3287 // WorkgroupSize is different, we decorate the constant composite that has
3288 // its value, rather than the variable that we use to access the value.
3289 if (spv::BuiltInWorkgroupSize == BuiltinType) {
3290 ResultID = InitializerID;
David Netoa60b00b2017-09-15 16:34:09 -04003291 // Save both the value and variable IDs for later.
3292 WorkgroupSizeValueID = InitializerID;
3293 WorkgroupSizeVarID = VMap[&GV];
David Neto22f144c2017-06-12 14:26:21 -04003294 } else {
3295 ResultID = VMap[&GV];
3296 }
3297
3298 SPIRVOperandList DOps;
David Neto257c3892018-04-11 13:19:45 -04003299 DOps << MkId(ResultID) << MkNum(spv::DecorationBuiltIn)
3300 << MkNum(BuiltinType);
David Neto22f144c2017-06-12 14:26:21 -04003301
David Neto87846742018-04-11 17:36:22 -04003302 auto *DescDecoInst = new SPIRVInstruction(spv::OpDecorate, DOps);
David Neto22f144c2017-06-12 14:26:21 -04003303 SPIRVInstList.insert(DecoInsertPoint, DescDecoInst);
David Neto85082642018-03-24 06:55:20 -07003304 } else if (module_scope_constant_external_init) {
3305 // This module scope constant is initialized from a storage buffer with data
3306 // provided by the host at binding 0 of the next descriptor set.
David Neto78383442018-06-15 20:31:56 -04003307 const uint32_t descriptor_set = TakeDescriptorIndex(&M);
David Neto85082642018-03-24 06:55:20 -07003308
David Neto862b7d82018-06-14 18:48:37 -04003309 // Emit the intializer to the descriptor map file.
David Neto85082642018-03-24 06:55:20 -07003310 // Use "kind,buffer" to indicate storage buffer. We might want to expand
3311 // that later to other types, like uniform buffer.
alan-bakerf5e5f692018-11-27 08:33:24 -05003312 std::string hexbytes;
3313 llvm::raw_string_ostream str(hexbytes);
3314 clspv::ConstantEmitter(DL, str).Emit(GV.getInitializer());
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003315 version0::DescriptorMapEntry::ConstantData constant_data = {ArgKind::Buffer,
3316 str.str()};
3317 descriptorMapEntries->emplace_back(std::move(constant_data), descriptor_set,
3318 0);
David Neto85082642018-03-24 06:55:20 -07003319
3320 // Find Insert Point for OpDecorate.
3321 auto DecoInsertPoint =
3322 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
3323 [](SPIRVInstruction *Inst) -> bool {
3324 return Inst->getOpcode() != spv::OpDecorate &&
3325 Inst->getOpcode() != spv::OpMemberDecorate &&
3326 Inst->getOpcode() != spv::OpExtInstImport;
3327 });
3328
David Neto257c3892018-04-11 13:19:45 -04003329 // OpDecorate %var Binding <binding>
David Neto85082642018-03-24 06:55:20 -07003330 SPIRVOperandList DOps;
David Neto257c3892018-04-11 13:19:45 -04003331 DOps << MkId(var_id) << MkNum(spv::DecorationBinding) << MkNum(0);
3332 DecoInsertPoint = SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04003333 DecoInsertPoint, new SPIRVInstruction(spv::OpDecorate, DOps));
David Neto85082642018-03-24 06:55:20 -07003334
3335 // OpDecorate %var DescriptorSet <descriptor_set>
3336 DOps.clear();
David Neto257c3892018-04-11 13:19:45 -04003337 DOps << MkId(var_id) << MkNum(spv::DecorationDescriptorSet)
3338 << MkNum(descriptor_set);
David Netoc6f3ab22018-04-06 18:02:31 -04003339 SPIRVInstList.insert(DecoInsertPoint,
David Neto87846742018-04-11 17:36:22 -04003340 new SPIRVInstruction(spv::OpDecorate, DOps));
David Neto22f144c2017-06-12 14:26:21 -04003341 }
3342}
3343
David Netoc6f3ab22018-04-06 18:02:31 -04003344void SPIRVProducerPass::GenerateWorkgroupVars() {
3345 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
Alan Baker202c8c72018-08-13 13:47:44 -04003346 for (auto spec_id = clspv::FirstLocalSpecId(); spec_id < max_local_spec_id_;
3347 ++spec_id) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05003348 LocalArgInfo &info = LocalSpecIdInfoMap[spec_id];
David Netoc6f3ab22018-04-06 18:02:31 -04003349
3350 // Generate OpVariable.
3351 //
3352 // GIDOps[0] : Result Type ID
3353 // GIDOps[1] : Storage Class
3354 SPIRVOperandList Ops;
3355 Ops << MkId(info.ptr_array_type_id) << MkNum(spv::StorageClassWorkgroup);
3356
3357 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04003358 new SPIRVInstruction(spv::OpVariable, info.variable_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04003359 }
3360}
3361
David Neto862b7d82018-06-14 18:48:37 -04003362void SPIRVProducerPass::GenerateDescriptorMapInfo(const DataLayout &DL,
3363 Function &F) {
David Netoc5fb5242018-07-30 13:28:31 -04003364 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
3365 return;
3366 }
David Neto862b7d82018-06-14 18:48:37 -04003367 // Gather the list of resources that are used by this function's arguments.
3368 auto &resource_var_at_index = FunctionToResourceVarsMap[&F];
3369
alan-bakerf5e5f692018-11-27 08:33:24 -05003370 // TODO(alan-baker): This should become unnecessary by fixing the rest of the
3371 // flow to generate pod_ubo arguments earlier.
David Neto862b7d82018-06-14 18:48:37 -04003372 auto remap_arg_kind = [](StringRef argKind) {
alan-bakerf5e5f692018-11-27 08:33:24 -05003373 std::string kind =
3374 clspv::Option::PodArgsInUniformBuffer() && argKind.equals("pod")
3375 ? "pod_ubo"
alan-baker21574d32020-01-29 16:00:31 -05003376 : argKind.str();
alan-bakerf5e5f692018-11-27 08:33:24 -05003377 return GetArgKindFromName(kind);
David Neto862b7d82018-06-14 18:48:37 -04003378 };
3379
3380 auto *fty = F.getType()->getPointerElementType();
3381 auto *func_ty = dyn_cast<FunctionType>(fty);
3382
alan-baker038e9242019-04-19 22:14:41 -04003383 // If we've clustered POD arguments, then argument details are in metadata.
David Neto862b7d82018-06-14 18:48:37 -04003384 // If an argument maps to a resource variable, then get descriptor set and
3385 // binding from the resoure variable. Other info comes from the metadata.
3386 const auto *arg_map = F.getMetadata("kernel_arg_map");
3387 if (arg_map) {
3388 for (const auto &arg : arg_map->operands()) {
3389 const MDNode *arg_node = dyn_cast<MDNode>(arg.get());
Kévin PETITa353c832018-03-20 23:21:21 +00003390 assert(arg_node->getNumOperands() == 7);
David Neto862b7d82018-06-14 18:48:37 -04003391 const auto name =
3392 dyn_cast<MDString>(arg_node->getOperand(0))->getString();
3393 const auto old_index =
3394 dyn_extract<ConstantInt>(arg_node->getOperand(1))->getZExtValue();
3395 // Remapped argument index
alan-bakerb6b09dc2018-11-08 16:59:28 -05003396 const size_t new_index = static_cast<size_t>(
3397 dyn_extract<ConstantInt>(arg_node->getOperand(2))->getZExtValue());
David Neto862b7d82018-06-14 18:48:37 -04003398 const auto offset =
3399 dyn_extract<ConstantInt>(arg_node->getOperand(3))->getZExtValue();
Kévin PETITa353c832018-03-20 23:21:21 +00003400 const auto arg_size =
3401 dyn_extract<ConstantInt>(arg_node->getOperand(4))->getZExtValue();
David Neto862b7d82018-06-14 18:48:37 -04003402 const auto argKind = remap_arg_kind(
Kévin PETITa353c832018-03-20 23:21:21 +00003403 dyn_cast<MDString>(arg_node->getOperand(5))->getString());
David Neto862b7d82018-06-14 18:48:37 -04003404 const auto spec_id =
Kévin PETITa353c832018-03-20 23:21:21 +00003405 dyn_extract<ConstantInt>(arg_node->getOperand(6))->getSExtValue();
alan-bakerf5e5f692018-11-27 08:33:24 -05003406
3407 uint32_t descriptor_set = 0;
3408 uint32_t binding = 0;
3409 version0::DescriptorMapEntry::KernelArgData kernel_data = {
alan-baker21574d32020-01-29 16:00:31 -05003410 F.getName().str(), name.str(), static_cast<uint32_t>(old_index),
3411 argKind, static_cast<uint32_t>(spec_id),
alan-bakerf5e5f692018-11-27 08:33:24 -05003412 // This will be set below for pointer-to-local args.
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003413 0, static_cast<uint32_t>(offset), static_cast<uint32_t>(arg_size)};
David Neto862b7d82018-06-14 18:48:37 -04003414 if (spec_id > 0) {
alan-bakerf5e5f692018-11-27 08:33:24 -05003415 kernel_data.local_element_size = static_cast<uint32_t>(GetTypeAllocSize(
3416 func_ty->getParamType(unsigned(new_index))->getPointerElementType(),
3417 DL));
David Neto862b7d82018-06-14 18:48:37 -04003418 } else {
3419 auto *info = resource_var_at_index[new_index];
3420 assert(info);
alan-bakerf5e5f692018-11-27 08:33:24 -05003421 descriptor_set = info->descriptor_set;
3422 binding = info->binding;
David Neto862b7d82018-06-14 18:48:37 -04003423 }
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003424 descriptorMapEntries->emplace_back(std::move(kernel_data), descriptor_set,
3425 binding);
David Neto862b7d82018-06-14 18:48:37 -04003426 }
3427 } else {
3428 // There is no argument map.
3429 // Take descriptor info from the resource variable calls.
Kévin PETITa353c832018-03-20 23:21:21 +00003430 // Take argument name and size from the arguments list.
David Neto862b7d82018-06-14 18:48:37 -04003431
3432 SmallVector<Argument *, 4> arguments;
3433 for (auto &arg : F.args()) {
3434 arguments.push_back(&arg);
3435 }
3436
3437 unsigned arg_index = 0;
3438 for (auto *info : resource_var_at_index) {
3439 if (info) {
Kévin PETITa353c832018-03-20 23:21:21 +00003440 auto arg = arguments[arg_index];
alan-bakerb6b09dc2018-11-08 16:59:28 -05003441 unsigned arg_size = 0;
Kévin PETITa353c832018-03-20 23:21:21 +00003442 if (info->arg_kind == clspv::ArgKind::Pod) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05003443 arg_size = static_cast<uint32_t>(DL.getTypeStoreSize(arg->getType()));
Kévin PETITa353c832018-03-20 23:21:21 +00003444 }
3445
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003446 // Local pointer arguments are unused in this case. Offset is always
3447 // zero.
alan-bakerf5e5f692018-11-27 08:33:24 -05003448 version0::DescriptorMapEntry::KernelArgData kernel_data = {
alan-baker21574d32020-01-29 16:00:31 -05003449 F.getName().str(),
3450 arg->getName().str(),
3451 arg_index,
3452 remap_arg_kind(clspv::GetArgKindName(info->arg_kind)),
3453 0,
3454 0,
3455 0,
3456 arg_size};
alan-bakerf5e5f692018-11-27 08:33:24 -05003457 descriptorMapEntries->emplace_back(std::move(kernel_data),
3458 info->descriptor_set, info->binding);
David Neto862b7d82018-06-14 18:48:37 -04003459 }
3460 arg_index++;
3461 }
3462 // Generate mappings for pointer-to-local arguments.
3463 for (arg_index = 0; arg_index < arguments.size(); ++arg_index) {
3464 Argument *arg = arguments[arg_index];
Alan Baker202c8c72018-08-13 13:47:44 -04003465 auto where = LocalArgSpecIds.find(arg);
3466 if (where != LocalArgSpecIds.end()) {
3467 auto &local_arg_info = LocalSpecIdInfoMap[where->second];
alan-bakerf5e5f692018-11-27 08:33:24 -05003468 // Pod arguments members are unused in this case.
3469 version0::DescriptorMapEntry::KernelArgData kernel_data = {
alan-baker21574d32020-01-29 16:00:31 -05003470 F.getName().str(),
3471 arg->getName().str(),
alan-bakerf5e5f692018-11-27 08:33:24 -05003472 arg_index,
3473 ArgKind::Local,
3474 static_cast<uint32_t>(local_arg_info.spec_id),
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003475 static_cast<uint32_t>(
3476 GetTypeAllocSize(local_arg_info.elem_type, DL)),
alan-bakerf5e5f692018-11-27 08:33:24 -05003477 0,
3478 0};
3479 // Pointer-to-local arguments do not utilize descriptor set and binding.
3480 descriptorMapEntries->emplace_back(std::move(kernel_data), 0, 0);
David Neto862b7d82018-06-14 18:48:37 -04003481 }
3482 }
3483 }
3484}
3485
David Neto22f144c2017-06-12 14:26:21 -04003486void SPIRVProducerPass::GenerateFuncPrologue(Function &F) {
3487 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3488 ValueMapType &VMap = getValueMap();
3489 EntryPointVecType &EntryPoints = getEntryPointVec();
David Neto22f144c2017-06-12 14:26:21 -04003490 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
3491 auto &GlobalConstArgSet = getGlobalConstArgSet();
3492
3493 FunctionType *FTy = F.getFunctionType();
3494
3495 //
David Neto22f144c2017-06-12 14:26:21 -04003496 // Generate OPFunction.
3497 //
3498
3499 // FOps[0] : Result Type ID
3500 // FOps[1] : Function Control
3501 // FOps[2] : Function Type ID
3502 SPIRVOperandList FOps;
3503
3504 // Find SPIRV instruction for return type.
David Neto257c3892018-04-11 13:19:45 -04003505 FOps << MkId(lookupType(FTy->getReturnType()));
David Neto22f144c2017-06-12 14:26:21 -04003506
3507 // Check function attributes for SPIRV Function Control.
3508 uint32_t FuncControl = spv::FunctionControlMaskNone;
3509 if (F.hasFnAttribute(Attribute::AlwaysInline)) {
3510 FuncControl |= spv::FunctionControlInlineMask;
3511 }
3512 if (F.hasFnAttribute(Attribute::NoInline)) {
3513 FuncControl |= spv::FunctionControlDontInlineMask;
3514 }
3515 // TODO: Check llvm attribute for Function Control Pure.
3516 if (F.hasFnAttribute(Attribute::ReadOnly)) {
3517 FuncControl |= spv::FunctionControlPureMask;
3518 }
3519 // TODO: Check llvm attribute for Function Control Const.
3520 if (F.hasFnAttribute(Attribute::ReadNone)) {
3521 FuncControl |= spv::FunctionControlConstMask;
3522 }
3523
David Neto257c3892018-04-11 13:19:45 -04003524 FOps << MkNum(FuncControl);
David Neto22f144c2017-06-12 14:26:21 -04003525
3526 uint32_t FTyID;
3527 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
3528 SmallVector<Type *, 4> NewFuncParamTys;
3529 FunctionType *NewFTy =
3530 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
3531 FTyID = lookupType(NewFTy);
3532 } else {
David Neto9ed8e2f2018-03-24 06:47:24 -07003533 // Handle regular function with global constant parameters.
David Neto22f144c2017-06-12 14:26:21 -04003534 if (GlobalConstFuncTyMap.count(FTy)) {
3535 FTyID = lookupType(GlobalConstFuncTyMap[FTy].first);
3536 } else {
3537 FTyID = lookupType(FTy);
3538 }
3539 }
3540
David Neto257c3892018-04-11 13:19:45 -04003541 FOps << MkId(FTyID);
David Neto22f144c2017-06-12 14:26:21 -04003542
3543 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
3544 EntryPoints.push_back(std::make_pair(&F, nextID));
3545 }
3546
3547 VMap[&F] = nextID;
3548
David Neto482550a2018-03-24 05:21:07 -07003549 if (clspv::Option::ShowIDs()) {
David Netob05675d2018-02-16 12:37:49 -05003550 errs() << "Function " << F.getName() << " is " << nextID << "\n";
3551 }
David Neto22f144c2017-06-12 14:26:21 -04003552 // Generate SPIRV instruction for function.
David Neto87846742018-04-11 17:36:22 -04003553 auto *FuncInst = new SPIRVInstruction(spv::OpFunction, nextID++, FOps);
David Neto22f144c2017-06-12 14:26:21 -04003554 SPIRVInstList.push_back(FuncInst);
3555
3556 //
3557 // Generate OpFunctionParameter for Normal function.
3558 //
3559
3560 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
alan-bakere9308012019-03-15 10:25:13 -04003561
3562 // Find Insert Point for OpDecorate.
3563 auto DecoInsertPoint =
3564 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
3565 [](SPIRVInstruction *Inst) -> bool {
3566 return Inst->getOpcode() != spv::OpDecorate &&
3567 Inst->getOpcode() != spv::OpMemberDecorate &&
3568 Inst->getOpcode() != spv::OpExtInstImport;
3569 });
3570
David Neto22f144c2017-06-12 14:26:21 -04003571 // Iterate Argument for name instead of param type from function type.
3572 unsigned ArgIdx = 0;
3573 for (Argument &Arg : F.args()) {
alan-bakere9308012019-03-15 10:25:13 -04003574 uint32_t param_id = nextID++;
3575 VMap[&Arg] = param_id;
3576
3577 if (CalledWithCoherentResource(Arg)) {
3578 // If the arg is passed a coherent resource ever, then decorate this
3579 // parameter with Coherent too.
3580 SPIRVOperandList decoration_ops;
3581 decoration_ops << MkId(param_id) << MkNum(spv::DecorationCoherent);
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003582 SPIRVInstList.insert(
3583 DecoInsertPoint,
3584 new SPIRVInstruction(spv::OpDecorate, decoration_ops));
alan-bakere9308012019-03-15 10:25:13 -04003585 }
David Neto22f144c2017-06-12 14:26:21 -04003586
3587 // ParamOps[0] : Result Type ID
3588 SPIRVOperandList ParamOps;
3589
3590 // Find SPIRV instruction for parameter type.
3591 uint32_t ParamTyID = lookupType(Arg.getType());
3592 if (PointerType *PTy = dyn_cast<PointerType>(Arg.getType())) {
3593 if (GlobalConstFuncTyMap.count(FTy)) {
3594 if (ArgIdx == GlobalConstFuncTyMap[FTy].second) {
3595 Type *EleTy = PTy->getPointerElementType();
3596 Type *ArgTy =
3597 PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
3598 ParamTyID = lookupType(ArgTy);
3599 GlobalConstArgSet.insert(&Arg);
3600 }
3601 }
3602 }
David Neto257c3892018-04-11 13:19:45 -04003603 ParamOps << MkId(ParamTyID);
David Neto22f144c2017-06-12 14:26:21 -04003604
3605 // Generate SPIRV instruction for parameter.
David Neto87846742018-04-11 17:36:22 -04003606 auto *ParamInst =
alan-bakere9308012019-03-15 10:25:13 -04003607 new SPIRVInstruction(spv::OpFunctionParameter, param_id, ParamOps);
David Neto22f144c2017-06-12 14:26:21 -04003608 SPIRVInstList.push_back(ParamInst);
3609
3610 ArgIdx++;
3611 }
3612 }
3613}
3614
alan-bakerb6b09dc2018-11-08 16:59:28 -05003615void SPIRVProducerPass::GenerateModuleInfo(Module &module) {
David Neto22f144c2017-06-12 14:26:21 -04003616 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3617 EntryPointVecType &EntryPoints = getEntryPointVec();
3618 ValueMapType &VMap = getValueMap();
3619 ValueList &EntryPointInterfaces = getEntryPointInterfacesVec();
3620 uint32_t &ExtInstImportID = getOpExtInstImportID();
3621 std::vector<uint32_t> &BuiltinDimVec = getBuiltinDimVec();
3622
3623 // Set up insert point.
3624 auto InsertPoint = SPIRVInstList.begin();
3625
3626 //
3627 // Generate OpCapability
3628 //
3629 // TODO: Which llvm information is mapped to SPIRV Capapbility?
3630
3631 // Ops[0] = Capability
3632 SPIRVOperandList Ops;
3633
David Neto87846742018-04-11 17:36:22 -04003634 auto *CapInst =
David Netoef5ba2b2019-12-20 08:35:54 -05003635 new SPIRVInstruction(spv::OpCapability, MkNum(spv::CapabilityShader));
David Neto22f144c2017-06-12 14:26:21 -04003636 SPIRVInstList.insert(InsertPoint, CapInst);
3637
alan-bakerf906d2b2019-12-10 11:26:23 -05003638 bool write_without_format = false;
3639 bool sampled_1d = false;
3640 bool image_1d = false;
David Neto22f144c2017-06-12 14:26:21 -04003641 for (Type *Ty : getTypeList()) {
alan-bakerb39c8262019-03-08 14:03:37 -05003642 if (clspv::Option::Int8Support() && Ty->isIntegerTy(8)) {
3643 // Generate OpCapability for i8 type.
David Netoef5ba2b2019-12-20 08:35:54 -05003644 SPIRVInstList.insert(
3645 InsertPoint,
3646 new SPIRVInstruction(spv::OpCapability, MkNum(spv::CapabilityInt8)));
alan-bakerb39c8262019-03-08 14:03:37 -05003647 } else if (Ty->isIntegerTy(16)) {
David Neto22f144c2017-06-12 14:26:21 -04003648 // Generate OpCapability for i16 type.
David Netoef5ba2b2019-12-20 08:35:54 -05003649 SPIRVInstList.insert(
3650 InsertPoint,
3651 new SPIRVInstruction(spv::OpCapability, MkNum(spv::CapabilityInt16)));
David Neto22f144c2017-06-12 14:26:21 -04003652 } else if (Ty->isIntegerTy(64)) {
3653 // Generate OpCapability for i64 type.
David Netoef5ba2b2019-12-20 08:35:54 -05003654 SPIRVInstList.insert(
3655 InsertPoint,
3656 new SPIRVInstruction(spv::OpCapability, MkNum(spv::CapabilityInt64)));
David Neto22f144c2017-06-12 14:26:21 -04003657 } else if (Ty->isHalfTy()) {
3658 // Generate OpCapability for half type.
David Netoef5ba2b2019-12-20 08:35:54 -05003659 SPIRVInstList.insert(InsertPoint,
3660 new SPIRVInstruction(spv::OpCapability,
3661 MkNum(spv::CapabilityFloat16)));
David Neto22f144c2017-06-12 14:26:21 -04003662 } else if (Ty->isDoubleTy()) {
3663 // Generate OpCapability for double type.
David Netoef5ba2b2019-12-20 08:35:54 -05003664 SPIRVInstList.insert(InsertPoint,
3665 new SPIRVInstruction(spv::OpCapability,
3666 MkNum(spv::CapabilityFloat64)));
David Neto22f144c2017-06-12 14:26:21 -04003667 } else if (auto *STy = dyn_cast<StructType>(Ty)) {
3668 if (STy->isOpaque()) {
alan-bakerf906d2b2019-12-10 11:26:23 -05003669 if (STy->getName().startswith("opencl.image1d_wo_t") ||
alan-baker7150a1d2020-02-25 08:31:06 -05003670 STy->getName().startswith("opencl.image1d_array_wo_t") ||
alan-bakerf906d2b2019-12-10 11:26:23 -05003671 STy->getName().startswith("opencl.image2d_wo_t") ||
alan-baker7150a1d2020-02-25 08:31:06 -05003672 STy->getName().startswith("opencl.image2d_array_wo_t") ||
alan-bakerf67468c2019-11-25 15:51:49 -05003673 STy->getName().startswith("opencl.image3d_wo_t")) {
alan-bakerf906d2b2019-12-10 11:26:23 -05003674 write_without_format = true;
3675 }
3676 if (STy->getName().startswith("opencl.image1d_ro_t") ||
alan-baker7150a1d2020-02-25 08:31:06 -05003677 STy->getName().startswith("opencl.image1d_wo_t") ||
3678 STy->getName().startswith("opencl.image1d_array_ro_t") ||
3679 STy->getName().startswith("opencl.image1d_array_wo_t")) {
alan-bakerf906d2b2019-12-10 11:26:23 -05003680 if (STy->getName().contains(".sampled"))
3681 sampled_1d = true;
3682 else
3683 image_1d = true;
David Neto22f144c2017-06-12 14:26:21 -04003684 }
3685 }
3686 }
3687 }
3688
alan-bakerf906d2b2019-12-10 11:26:23 -05003689 if (write_without_format) {
3690 // Generate OpCapability for write only image type.
3691 SPIRVInstList.insert(
3692 InsertPoint,
3693 new SPIRVInstruction(
3694 spv::OpCapability,
3695 {MkNum(spv::CapabilityStorageImageWriteWithoutFormat)}));
3696 }
3697 if (image_1d) {
3698 // Generate OpCapability for unsampled 1D image type.
3699 SPIRVInstList.insert(InsertPoint,
3700 new SPIRVInstruction(spv::OpCapability,
3701 {MkNum(spv::CapabilityImage1D)}));
3702 } else if (sampled_1d) {
3703 // Generate OpCapability for sampled 1D image type.
3704 SPIRVInstList.insert(
3705 InsertPoint, new SPIRVInstruction(spv::OpCapability,
3706 {MkNum(spv::CapabilitySampled1D)}));
3707 }
3708
David Neto5c22a252018-03-15 16:07:41 -04003709 { // OpCapability ImageQuery
3710 bool hasImageQuery = false;
alan-bakerf67468c2019-11-25 15:51:49 -05003711 for (const auto &SymVal : module.getValueSymbolTable()) {
3712 if (auto F = dyn_cast<Function>(SymVal.getValue())) {
SJW173c7e92020-03-16 08:44:47 -05003713 if (IsImageQuery(F)) {
alan-bakerf67468c2019-11-25 15:51:49 -05003714 hasImageQuery = true;
3715 break;
3716 }
David Neto5c22a252018-03-15 16:07:41 -04003717 }
3718 }
alan-bakerf67468c2019-11-25 15:51:49 -05003719
David Neto5c22a252018-03-15 16:07:41 -04003720 if (hasImageQuery) {
David Neto87846742018-04-11 17:36:22 -04003721 auto *ImageQueryCapInst = new SPIRVInstruction(
3722 spv::OpCapability, {MkNum(spv::CapabilityImageQuery)});
David Neto5c22a252018-03-15 16:07:41 -04003723 SPIRVInstList.insert(InsertPoint, ImageQueryCapInst);
3724 }
3725 }
3726
David Neto22f144c2017-06-12 14:26:21 -04003727 if (hasVariablePointers()) {
3728 //
David Neto22f144c2017-06-12 14:26:21 -04003729 // Generate OpCapability.
3730 //
3731 // Ops[0] = Capability
3732 //
3733 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003734 Ops << MkNum(spv::CapabilityVariablePointers);
David Neto22f144c2017-06-12 14:26:21 -04003735
David Neto87846742018-04-11 17:36:22 -04003736 SPIRVInstList.insert(InsertPoint,
3737 new SPIRVInstruction(spv::OpCapability, Ops));
alan-baker5b86ed72019-02-15 08:26:50 -05003738 } else if (hasVariablePointersStorageBuffer()) {
3739 //
3740 // Generate OpCapability.
3741 //
3742 // Ops[0] = Capability
3743 //
3744 Ops.clear();
3745 Ops << MkNum(spv::CapabilityVariablePointersStorageBuffer);
David Neto22f144c2017-06-12 14:26:21 -04003746
alan-baker5b86ed72019-02-15 08:26:50 -05003747 SPIRVInstList.insert(InsertPoint,
3748 new SPIRVInstruction(spv::OpCapability, Ops));
3749 }
3750
3751 // Always add the storage buffer extension
3752 {
David Neto22f144c2017-06-12 14:26:21 -04003753 //
3754 // Generate OpExtension.
3755 //
3756 // Ops[0] = Name (Literal String)
3757 //
alan-baker5b86ed72019-02-15 08:26:50 -05003758 auto *ExtensionInst = new SPIRVInstruction(
3759 spv::OpExtension, {MkString("SPV_KHR_storage_buffer_storage_class")});
3760 SPIRVInstList.insert(InsertPoint, ExtensionInst);
3761 }
David Neto22f144c2017-06-12 14:26:21 -04003762
alan-baker5b86ed72019-02-15 08:26:50 -05003763 if (hasVariablePointers() || hasVariablePointersStorageBuffer()) {
3764 //
3765 // Generate OpExtension.
3766 //
3767 // Ops[0] = Name (Literal String)
3768 //
3769 auto *ExtensionInst = new SPIRVInstruction(
3770 spv::OpExtension, {MkString("SPV_KHR_variable_pointers")});
3771 SPIRVInstList.insert(InsertPoint, ExtensionInst);
David Neto22f144c2017-06-12 14:26:21 -04003772 }
3773
3774 if (ExtInstImportID) {
3775 ++InsertPoint;
3776 }
3777
3778 //
3779 // Generate OpMemoryModel
3780 //
3781 // Memory model for Vulkan will always be GLSL450.
3782
3783 // Ops[0] = Addressing Model
3784 // Ops[1] = Memory Model
3785 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003786 Ops << MkNum(spv::AddressingModelLogical) << MkNum(spv::MemoryModelGLSL450);
David Neto22f144c2017-06-12 14:26:21 -04003787
David Neto87846742018-04-11 17:36:22 -04003788 auto *MemModelInst = new SPIRVInstruction(spv::OpMemoryModel, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003789 SPIRVInstList.insert(InsertPoint, MemModelInst);
3790
3791 //
3792 // Generate OpEntryPoint
3793 //
3794 for (auto EntryPoint : EntryPoints) {
3795 // Ops[0] = Execution Model
3796 // Ops[1] = EntryPoint ID
3797 // Ops[2] = Name (Literal String)
3798 // ...
3799 //
3800 // TODO: Do we need to consider Interface ID for forward references???
3801 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05003802 const StringRef &name = EntryPoint.first->getName();
David Neto257c3892018-04-11 13:19:45 -04003803 Ops << MkNum(spv::ExecutionModelGLCompute) << MkId(EntryPoint.second)
3804 << MkString(name);
David Neto22f144c2017-06-12 14:26:21 -04003805
David Neto22f144c2017-06-12 14:26:21 -04003806 for (Value *Interface : EntryPointInterfaces) {
David Neto257c3892018-04-11 13:19:45 -04003807 Ops << MkId(VMap[Interface]);
David Neto22f144c2017-06-12 14:26:21 -04003808 }
3809
David Neto87846742018-04-11 17:36:22 -04003810 auto *EntryPointInst = new SPIRVInstruction(spv::OpEntryPoint, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003811 SPIRVInstList.insert(InsertPoint, EntryPointInst);
3812 }
3813
3814 for (auto EntryPoint : EntryPoints) {
3815 if (const MDNode *MD = dyn_cast<Function>(EntryPoint.first)
3816 ->getMetadata("reqd_work_group_size")) {
3817
3818 if (!BuiltinDimVec.empty()) {
3819 llvm_unreachable(
3820 "Kernels should have consistent work group size definition");
3821 }
3822
3823 //
3824 // Generate OpExecutionMode
3825 //
3826
3827 // Ops[0] = Entry Point ID
3828 // Ops[1] = Execution Mode
3829 // Ops[2] ... Ops[n] = Optional literals according to Execution Mode
3830 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05003831 Ops << MkId(EntryPoint.second) << MkNum(spv::ExecutionModeLocalSize);
David Neto22f144c2017-06-12 14:26:21 -04003832
3833 uint32_t XDim = static_cast<uint32_t>(
3834 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
3835 uint32_t YDim = static_cast<uint32_t>(
3836 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
3837 uint32_t ZDim = static_cast<uint32_t>(
3838 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
3839
David Neto257c3892018-04-11 13:19:45 -04003840 Ops << MkNum(XDim) << MkNum(YDim) << MkNum(ZDim);
David Neto22f144c2017-06-12 14:26:21 -04003841
David Neto87846742018-04-11 17:36:22 -04003842 auto *ExecModeInst = new SPIRVInstruction(spv::OpExecutionMode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003843 SPIRVInstList.insert(InsertPoint, ExecModeInst);
3844 }
3845 }
3846
3847 //
3848 // Generate OpSource.
3849 //
3850 // Ops[0] = SourceLanguage ID
3851 // Ops[1] = Version (LiteralNum)
3852 //
3853 Ops.clear();
Kévin Petitf0515712020-01-07 18:29:20 +00003854 switch (clspv::Option::Language()) {
3855 case clspv::Option::SourceLanguage::OpenCL_C_10:
3856 Ops << MkNum(spv::SourceLanguageOpenCL_C) << MkNum(100);
3857 break;
3858 case clspv::Option::SourceLanguage::OpenCL_C_11:
3859 Ops << MkNum(spv::SourceLanguageOpenCL_C) << MkNum(110);
3860 break;
3861 case clspv::Option::SourceLanguage::OpenCL_C_12:
Kévin Petit0fc88042019-04-09 23:25:02 +01003862 Ops << MkNum(spv::SourceLanguageOpenCL_C) << MkNum(120);
Kévin Petitf0515712020-01-07 18:29:20 +00003863 break;
3864 case clspv::Option::SourceLanguage::OpenCL_C_20:
3865 Ops << MkNum(spv::SourceLanguageOpenCL_C) << MkNum(200);
3866 break;
3867 case clspv::Option::SourceLanguage::OpenCL_CPP:
3868 Ops << MkNum(spv::SourceLanguageOpenCL_CPP) << MkNum(100);
3869 break;
3870 default:
3871 Ops << MkNum(spv::SourceLanguageUnknown) << MkNum(0);
3872 break;
Kévin Petit0fc88042019-04-09 23:25:02 +01003873 }
David Neto22f144c2017-06-12 14:26:21 -04003874
David Neto87846742018-04-11 17:36:22 -04003875 auto *OpenSourceInst = new SPIRVInstruction(spv::OpSource, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003876 SPIRVInstList.insert(InsertPoint, OpenSourceInst);
3877
3878 if (!BuiltinDimVec.empty()) {
3879 //
3880 // Generate OpDecorates for x/y/z dimension.
3881 //
3882 // Ops[0] = Target ID
3883 // Ops[1] = Decoration (SpecId)
David Neto257c3892018-04-11 13:19:45 -04003884 // Ops[2] = Specialization Constant ID (Literal Number)
David Neto22f144c2017-06-12 14:26:21 -04003885
3886 // X Dimension
3887 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003888 Ops << MkId(BuiltinDimVec[0]) << MkNum(spv::DecorationSpecId) << MkNum(0);
David Neto87846742018-04-11 17:36:22 -04003889 SPIRVInstList.insert(InsertPoint,
3890 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003891
3892 // Y Dimension
3893 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003894 Ops << MkId(BuiltinDimVec[1]) << MkNum(spv::DecorationSpecId) << MkNum(1);
David Neto87846742018-04-11 17:36:22 -04003895 SPIRVInstList.insert(InsertPoint,
3896 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003897
3898 // Z Dimension
3899 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003900 Ops << MkId(BuiltinDimVec[2]) << MkNum(spv::DecorationSpecId) << MkNum(2);
David Neto87846742018-04-11 17:36:22 -04003901 SPIRVInstList.insert(InsertPoint,
3902 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003903 }
3904}
3905
David Netob6e2e062018-04-25 10:32:06 -04003906void SPIRVProducerPass::GenerateEntryPointInitialStores() {
3907 // Work around a driver bug. Initializers on Private variables might not
3908 // work. So the start of the kernel should store the initializer value to the
3909 // variables. Yes, *every* entry point pays this cost if *any* entry point
3910 // uses this builtin. At this point I judge this to be an acceptable tradeoff
3911 // of complexity vs. runtime, for a broken driver.
alan-bakerb6b09dc2018-11-08 16:59:28 -05003912 // TODO(dneto): Remove this at some point once fixed drivers are widely
3913 // available.
David Netob6e2e062018-04-25 10:32:06 -04003914 if (WorkgroupSizeVarID) {
3915 assert(WorkgroupSizeValueID);
3916
3917 SPIRVOperandList Ops;
3918 Ops << MkId(WorkgroupSizeVarID) << MkId(WorkgroupSizeValueID);
3919
3920 auto *Inst = new SPIRVInstruction(spv::OpStore, Ops);
3921 getSPIRVInstList().push_back(Inst);
3922 }
3923}
3924
David Neto22f144c2017-06-12 14:26:21 -04003925void SPIRVProducerPass::GenerateFuncBody(Function &F) {
3926 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3927 ValueMapType &VMap = getValueMap();
3928
David Netob6e2e062018-04-25 10:32:06 -04003929 const bool IsKernel = F.getCallingConv() == CallingConv::SPIR_KERNEL;
David Neto22f144c2017-06-12 14:26:21 -04003930
3931 for (BasicBlock &BB : F) {
3932 // Register BasicBlock to ValueMap.
3933 VMap[&BB] = nextID;
3934
3935 //
3936 // Generate OpLabel for Basic Block.
3937 //
3938 SPIRVOperandList Ops;
David Neto87846742018-04-11 17:36:22 -04003939 auto *Inst = new SPIRVInstruction(spv::OpLabel, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003940 SPIRVInstList.push_back(Inst);
3941
David Neto6dcd4712017-06-23 11:06:47 -04003942 // OpVariable instructions must come first.
3943 for (Instruction &I : BB) {
alan-baker5b86ed72019-02-15 08:26:50 -05003944 if (auto *alloca = dyn_cast<AllocaInst>(&I)) {
3945 // Allocating a pointer requires variable pointers.
3946 if (alloca->getAllocatedType()->isPointerTy()) {
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003947 setVariablePointersCapabilities(
3948 alloca->getAllocatedType()->getPointerAddressSpace());
alan-baker5b86ed72019-02-15 08:26:50 -05003949 }
David Neto6dcd4712017-06-23 11:06:47 -04003950 GenerateInstruction(I);
3951 }
3952 }
3953
David Neto22f144c2017-06-12 14:26:21 -04003954 if (&BB == &F.getEntryBlock() && IsKernel) {
David Netob6e2e062018-04-25 10:32:06 -04003955 if (clspv::Option::HackInitializers()) {
3956 GenerateEntryPointInitialStores();
3957 }
David Neto22f144c2017-06-12 14:26:21 -04003958 }
3959
3960 for (Instruction &I : BB) {
David Neto6dcd4712017-06-23 11:06:47 -04003961 if (!isa<AllocaInst>(I)) {
3962 GenerateInstruction(I);
3963 }
David Neto22f144c2017-06-12 14:26:21 -04003964 }
3965 }
3966}
3967
3968spv::Op SPIRVProducerPass::GetSPIRVCmpOpcode(CmpInst *I) {
3969 const std::map<CmpInst::Predicate, spv::Op> Map = {
3970 {CmpInst::ICMP_EQ, spv::OpIEqual},
3971 {CmpInst::ICMP_NE, spv::OpINotEqual},
3972 {CmpInst::ICMP_UGT, spv::OpUGreaterThan},
3973 {CmpInst::ICMP_UGE, spv::OpUGreaterThanEqual},
3974 {CmpInst::ICMP_ULT, spv::OpULessThan},
3975 {CmpInst::ICMP_ULE, spv::OpULessThanEqual},
3976 {CmpInst::ICMP_SGT, spv::OpSGreaterThan},
3977 {CmpInst::ICMP_SGE, spv::OpSGreaterThanEqual},
3978 {CmpInst::ICMP_SLT, spv::OpSLessThan},
3979 {CmpInst::ICMP_SLE, spv::OpSLessThanEqual},
3980 {CmpInst::FCMP_OEQ, spv::OpFOrdEqual},
3981 {CmpInst::FCMP_OGT, spv::OpFOrdGreaterThan},
3982 {CmpInst::FCMP_OGE, spv::OpFOrdGreaterThanEqual},
3983 {CmpInst::FCMP_OLT, spv::OpFOrdLessThan},
3984 {CmpInst::FCMP_OLE, spv::OpFOrdLessThanEqual},
3985 {CmpInst::FCMP_ONE, spv::OpFOrdNotEqual},
3986 {CmpInst::FCMP_UEQ, spv::OpFUnordEqual},
3987 {CmpInst::FCMP_UGT, spv::OpFUnordGreaterThan},
3988 {CmpInst::FCMP_UGE, spv::OpFUnordGreaterThanEqual},
3989 {CmpInst::FCMP_ULT, spv::OpFUnordLessThan},
3990 {CmpInst::FCMP_ULE, spv::OpFUnordLessThanEqual},
3991 {CmpInst::FCMP_UNE, spv::OpFUnordNotEqual}};
3992
3993 assert(0 != Map.count(I->getPredicate()));
3994
3995 return Map.at(I->getPredicate());
3996}
3997
3998spv::Op SPIRVProducerPass::GetSPIRVCastOpcode(Instruction &I) {
3999 const std::map<unsigned, spv::Op> Map{
4000 {Instruction::Trunc, spv::OpUConvert},
4001 {Instruction::ZExt, spv::OpUConvert},
4002 {Instruction::SExt, spv::OpSConvert},
4003 {Instruction::FPToUI, spv::OpConvertFToU},
4004 {Instruction::FPToSI, spv::OpConvertFToS},
4005 {Instruction::UIToFP, spv::OpConvertUToF},
4006 {Instruction::SIToFP, spv::OpConvertSToF},
4007 {Instruction::FPTrunc, spv::OpFConvert},
4008 {Instruction::FPExt, spv::OpFConvert},
4009 {Instruction::BitCast, spv::OpBitcast}};
4010
4011 assert(0 != Map.count(I.getOpcode()));
4012
4013 return Map.at(I.getOpcode());
4014}
4015
4016spv::Op SPIRVProducerPass::GetSPIRVBinaryOpcode(Instruction &I) {
Kévin Petit24272b62018-10-18 19:16:12 +00004017 if (I.getType()->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -04004018 switch (I.getOpcode()) {
4019 default:
4020 break;
4021 case Instruction::Or:
4022 return spv::OpLogicalOr;
4023 case Instruction::And:
4024 return spv::OpLogicalAnd;
4025 case Instruction::Xor:
4026 return spv::OpLogicalNotEqual;
4027 }
4028 }
4029
alan-bakerb6b09dc2018-11-08 16:59:28 -05004030 const std::map<unsigned, spv::Op> Map{
David Neto22f144c2017-06-12 14:26:21 -04004031 {Instruction::Add, spv::OpIAdd},
4032 {Instruction::FAdd, spv::OpFAdd},
4033 {Instruction::Sub, spv::OpISub},
4034 {Instruction::FSub, spv::OpFSub},
4035 {Instruction::Mul, spv::OpIMul},
4036 {Instruction::FMul, spv::OpFMul},
4037 {Instruction::UDiv, spv::OpUDiv},
4038 {Instruction::SDiv, spv::OpSDiv},
4039 {Instruction::FDiv, spv::OpFDiv},
4040 {Instruction::URem, spv::OpUMod},
4041 {Instruction::SRem, spv::OpSRem},
4042 {Instruction::FRem, spv::OpFRem},
4043 {Instruction::Or, spv::OpBitwiseOr},
4044 {Instruction::Xor, spv::OpBitwiseXor},
4045 {Instruction::And, spv::OpBitwiseAnd},
4046 {Instruction::Shl, spv::OpShiftLeftLogical},
4047 {Instruction::LShr, spv::OpShiftRightLogical},
4048 {Instruction::AShr, spv::OpShiftRightArithmetic}};
4049
4050 assert(0 != Map.count(I.getOpcode()));
4051
4052 return Map.at(I.getOpcode());
4053}
4054
4055void SPIRVProducerPass::GenerateInstruction(Instruction &I) {
4056 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
4057 ValueMapType &VMap = getValueMap();
David Neto22f144c2017-06-12 14:26:21 -04004058 DeferredInstVecType &DeferredInsts = getDeferredInstVec();
4059 LLVMContext &Context = I.getParent()->getParent()->getParent()->getContext();
4060
4061 // Register Instruction to ValueMap.
4062 if (0 == VMap[&I]) {
4063 VMap[&I] = nextID;
4064 }
4065
4066 switch (I.getOpcode()) {
4067 default: {
4068 if (Instruction::isCast(I.getOpcode())) {
4069 //
4070 // Generate SPIRV instructions for cast operators.
4071 //
4072
David Netod2de94a2017-08-28 17:27:47 -04004073 auto Ty = I.getType();
David Neto22f144c2017-06-12 14:26:21 -04004074 auto OpTy = I.getOperand(0)->getType();
David Netod2de94a2017-08-28 17:27:47 -04004075 auto toI8 = Ty == Type::getInt8Ty(Context);
4076 auto fromI32 = OpTy == Type::getInt32Ty(Context);
David Neto22f144c2017-06-12 14:26:21 -04004077 // Handle zext, sext and uitofp with i1 type specially.
4078 if ((I.getOpcode() == Instruction::ZExt ||
4079 I.getOpcode() == Instruction::SExt ||
4080 I.getOpcode() == Instruction::UIToFP) &&
alan-bakerb6b09dc2018-11-08 16:59:28 -05004081 OpTy->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -04004082 //
4083 // Generate OpSelect.
4084 //
4085
4086 // Ops[0] = Result Type ID
4087 // Ops[1] = Condition ID
4088 // Ops[2] = True Constant ID
4089 // Ops[3] = False Constant ID
4090 SPIRVOperandList Ops;
4091
David Neto257c3892018-04-11 13:19:45 -04004092 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04004093
David Neto22f144c2017-06-12 14:26:21 -04004094 uint32_t CondID = VMap[I.getOperand(0)];
David Neto257c3892018-04-11 13:19:45 -04004095 Ops << MkId(CondID);
David Neto22f144c2017-06-12 14:26:21 -04004096
4097 uint32_t TrueID = 0;
4098 if (I.getOpcode() == Instruction::ZExt) {
Kévin Petit7bfb8992019-02-26 13:45:08 +00004099 TrueID = VMap[ConstantInt::get(I.getType(), 1)];
David Neto22f144c2017-06-12 14:26:21 -04004100 } else if (I.getOpcode() == Instruction::SExt) {
Kévin Petit7bfb8992019-02-26 13:45:08 +00004101 TrueID = VMap[ConstantInt::getSigned(I.getType(), -1)];
David Neto22f144c2017-06-12 14:26:21 -04004102 } else {
4103 TrueID = VMap[ConstantFP::get(Context, APFloat(1.0f))];
4104 }
David Neto257c3892018-04-11 13:19:45 -04004105 Ops << MkId(TrueID);
David Neto22f144c2017-06-12 14:26:21 -04004106
4107 uint32_t FalseID = 0;
4108 if (I.getOpcode() == Instruction::ZExt) {
4109 FalseID = VMap[Constant::getNullValue(I.getType())];
4110 } else if (I.getOpcode() == Instruction::SExt) {
4111 FalseID = VMap[Constant::getNullValue(I.getType())];
4112 } else {
4113 FalseID = VMap[ConstantFP::get(Context, APFloat(0.0f))];
4114 }
David Neto257c3892018-04-11 13:19:45 -04004115 Ops << MkId(FalseID);
David Neto22f144c2017-06-12 14:26:21 -04004116
David Neto87846742018-04-11 17:36:22 -04004117 auto *Inst = new SPIRVInstruction(spv::OpSelect, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004118 SPIRVInstList.push_back(Inst);
alan-bakerb39c8262019-03-08 14:03:37 -05004119 } else if (!clspv::Option::Int8Support() &&
4120 I.getOpcode() == Instruction::Trunc && fromI32 && toI8) {
David Netod2de94a2017-08-28 17:27:47 -04004121 // The SPIR-V target type is a 32-bit int. Keep only the bottom
4122 // 8 bits.
4123 // Before:
4124 // %result = trunc i32 %a to i8
4125 // After
4126 // %result = OpBitwiseAnd %uint %a %uint_255
4127
4128 SPIRVOperandList Ops;
4129
David Neto257c3892018-04-11 13:19:45 -04004130 Ops << MkId(lookupType(OpTy)) << MkId(VMap[I.getOperand(0)]);
David Netod2de94a2017-08-28 17:27:47 -04004131
4132 Type *UintTy = Type::getInt32Ty(Context);
4133 uint32_t MaskID = VMap[ConstantInt::get(UintTy, 255)];
David Neto257c3892018-04-11 13:19:45 -04004134 Ops << MkId(MaskID);
David Netod2de94a2017-08-28 17:27:47 -04004135
David Neto87846742018-04-11 17:36:22 -04004136 auto *Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Netod2de94a2017-08-28 17:27:47 -04004137 SPIRVInstList.push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04004138 } else {
4139 // Ops[0] = Result Type ID
4140 // Ops[1] = Source Value ID
4141 SPIRVOperandList Ops;
4142
David Neto257c3892018-04-11 13:19:45 -04004143 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04004144
David Neto87846742018-04-11 17:36:22 -04004145 auto *Inst = new SPIRVInstruction(GetSPIRVCastOpcode(I), nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004146 SPIRVInstList.push_back(Inst);
4147 }
4148 } else if (isa<BinaryOperator>(I)) {
4149 //
4150 // Generate SPIRV instructions for binary operators.
4151 //
4152
4153 // Handle xor with i1 type specially.
4154 if (I.getOpcode() == Instruction::Xor &&
4155 I.getType() == Type::getInt1Ty(Context) &&
Kévin Petit24272b62018-10-18 19:16:12 +00004156 ((isa<ConstantInt>(I.getOperand(0)) &&
4157 !cast<ConstantInt>(I.getOperand(0))->isZero()) ||
4158 (isa<ConstantInt>(I.getOperand(1)) &&
4159 !cast<ConstantInt>(I.getOperand(1))->isZero()))) {
David Neto22f144c2017-06-12 14:26:21 -04004160 //
4161 // Generate OpLogicalNot.
4162 //
4163 // Ops[0] = Result Type ID
4164 // Ops[1] = Operand
4165 SPIRVOperandList Ops;
4166
David Neto257c3892018-04-11 13:19:45 -04004167 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04004168
4169 Value *CondV = I.getOperand(0);
4170 if (isa<Constant>(I.getOperand(0))) {
4171 CondV = I.getOperand(1);
4172 }
David Neto257c3892018-04-11 13:19:45 -04004173 Ops << MkId(VMap[CondV]);
David Neto22f144c2017-06-12 14:26:21 -04004174
David Neto87846742018-04-11 17:36:22 -04004175 auto *Inst = new SPIRVInstruction(spv::OpLogicalNot, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004176 SPIRVInstList.push_back(Inst);
4177 } else {
4178 // Ops[0] = Result Type ID
4179 // Ops[1] = Operand 0
4180 // Ops[2] = Operand 1
4181 SPIRVOperandList Ops;
4182
David Neto257c3892018-04-11 13:19:45 -04004183 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)])
4184 << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004185
David Neto87846742018-04-11 17:36:22 -04004186 auto *Inst =
4187 new SPIRVInstruction(GetSPIRVBinaryOpcode(I), nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004188 SPIRVInstList.push_back(Inst);
4189 }
alan-bakerc9c55ae2019-12-02 16:01:27 -05004190 } else if (I.getOpcode() == Instruction::FNeg) {
4191 // The only unary operator.
4192 //
4193 // Ops[0] = Result Type ID
4194 // Ops[1] = Operand 0
4195 SPIRVOperandList ops;
4196
4197 ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)]);
4198 auto *Inst = new SPIRVInstruction(spv::OpFNegate, nextID++, ops);
4199 SPIRVInstList.push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04004200 } else {
4201 I.print(errs());
4202 llvm_unreachable("Unsupported instruction???");
4203 }
4204 break;
4205 }
4206 case Instruction::GetElementPtr: {
4207 auto &GlobalConstArgSet = getGlobalConstArgSet();
4208
4209 //
4210 // Generate OpAccessChain.
4211 //
4212 GetElementPtrInst *GEP = cast<GetElementPtrInst>(&I);
4213
4214 //
4215 // Generate OpAccessChain.
4216 //
4217
4218 // Ops[0] = Result Type ID
4219 // Ops[1] = Base ID
4220 // Ops[2] ... Ops[n] = Indexes ID
4221 SPIRVOperandList Ops;
4222
alan-bakerb6b09dc2018-11-08 16:59:28 -05004223 PointerType *ResultType = cast<PointerType>(GEP->getType());
David Neto22f144c2017-06-12 14:26:21 -04004224 if (GEP->getPointerAddressSpace() == AddressSpace::ModuleScopePrivate ||
4225 GlobalConstArgSet.count(GEP->getPointerOperand())) {
4226 // Use pointer type with private address space for global constant.
4227 Type *EleTy = I.getType()->getPointerElementType();
David Neto1a1a0582017-07-07 12:01:44 -04004228 ResultType = PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
David Neto22f144c2017-06-12 14:26:21 -04004229 }
David Neto257c3892018-04-11 13:19:45 -04004230
4231 Ops << MkId(lookupType(ResultType));
David Neto22f144c2017-06-12 14:26:21 -04004232
David Neto862b7d82018-06-14 18:48:37 -04004233 // Generate the base pointer.
4234 Ops << MkId(VMap[GEP->getPointerOperand()]);
David Neto22f144c2017-06-12 14:26:21 -04004235
David Neto862b7d82018-06-14 18:48:37 -04004236 // TODO(dneto): Simplify the following?
David Neto22f144c2017-06-12 14:26:21 -04004237
4238 //
4239 // Follows below rules for gep.
4240 //
David Neto862b7d82018-06-14 18:48:37 -04004241 // 1. If gep's first index is 0 generate OpAccessChain and ignore gep's
4242 // first index.
David Neto22f144c2017-06-12 14:26:21 -04004243 // 2. If gep's first index is not 0, generate OpPtrAccessChain and use gep's
4244 // first index.
4245 // 3. If gep's first index is not constant, generate OpPtrAccessChain and
4246 // use gep's first index.
4247 // 4. If it is not above case 1, 2 and 3, generate OpAccessChain and use
4248 // gep's first index.
4249 //
4250 spv::Op Opcode = spv::OpAccessChain;
4251 unsigned offset = 0;
4252 if (ConstantInt *CstInt = dyn_cast<ConstantInt>(GEP->getOperand(1))) {
David Neto862b7d82018-06-14 18:48:37 -04004253 if (CstInt->getZExtValue() == 0) {
David Neto22f144c2017-06-12 14:26:21 -04004254 offset = 1;
David Neto862b7d82018-06-14 18:48:37 -04004255 } else if (CstInt->getZExtValue() != 0) {
David Neto22f144c2017-06-12 14:26:21 -04004256 Opcode = spv::OpPtrAccessChain;
David Neto22f144c2017-06-12 14:26:21 -04004257 }
David Neto862b7d82018-06-14 18:48:37 -04004258 } else {
David Neto22f144c2017-06-12 14:26:21 -04004259 Opcode = spv::OpPtrAccessChain;
David Neto1a1a0582017-07-07 12:01:44 -04004260 }
4261
4262 if (Opcode == spv::OpPtrAccessChain) {
David Neto1a1a0582017-07-07 12:01:44 -04004263 // Do we need to generate ArrayStride? Check against the GEP result type
4264 // rather than the pointer type of the base because when indexing into
4265 // an OpenCL program-scope constant, we'll swap out the LLVM base pointer
4266 // for something else in the SPIR-V.
4267 // E.g. see test/PointerAccessChain/pointer_index_is_constant_1.cl
alan-baker5b86ed72019-02-15 08:26:50 -05004268 auto address_space = ResultType->getAddressSpace();
4269 setVariablePointersCapabilities(address_space);
4270 switch (GetStorageClass(address_space)) {
Alan Bakerfcda9482018-10-02 17:09:59 -04004271 case spv::StorageClassStorageBuffer:
4272 case spv::StorageClassUniform:
David Neto1a1a0582017-07-07 12:01:44 -04004273 // Save the need to generate an ArrayStride decoration. But defer
4274 // generation until later, so we only make one decoration.
David Neto85082642018-03-24 06:55:20 -07004275 getTypesNeedingArrayStride().insert(ResultType);
Alan Bakerfcda9482018-10-02 17:09:59 -04004276 break;
4277 default:
4278 break;
David Neto1a1a0582017-07-07 12:01:44 -04004279 }
David Neto22f144c2017-06-12 14:26:21 -04004280 }
4281
4282 for (auto II = GEP->idx_begin() + offset; II != GEP->idx_end(); II++) {
David Neto257c3892018-04-11 13:19:45 -04004283 Ops << MkId(VMap[*II]);
David Neto22f144c2017-06-12 14:26:21 -04004284 }
4285
David Neto87846742018-04-11 17:36:22 -04004286 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004287 SPIRVInstList.push_back(Inst);
4288 break;
4289 }
4290 case Instruction::ExtractValue: {
4291 ExtractValueInst *EVI = cast<ExtractValueInst>(&I);
4292 // Ops[0] = Result Type ID
4293 // Ops[1] = Composite ID
4294 // Ops[2] ... Ops[n] = Indexes (Literal Number)
4295 SPIRVOperandList Ops;
4296
David Neto257c3892018-04-11 13:19:45 -04004297 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04004298
4299 uint32_t CompositeID = VMap[EVI->getAggregateOperand()];
David Neto257c3892018-04-11 13:19:45 -04004300 Ops << MkId(CompositeID);
David Neto22f144c2017-06-12 14:26:21 -04004301
4302 for (auto &Index : EVI->indices()) {
David Neto257c3892018-04-11 13:19:45 -04004303 Ops << MkNum(Index);
David Neto22f144c2017-06-12 14:26:21 -04004304 }
4305
David Neto87846742018-04-11 17:36:22 -04004306 auto *Inst = new SPIRVInstruction(spv::OpCompositeExtract, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004307 SPIRVInstList.push_back(Inst);
4308 break;
4309 }
4310 case Instruction::InsertValue: {
4311 InsertValueInst *IVI = cast<InsertValueInst>(&I);
4312 // Ops[0] = Result Type ID
4313 // Ops[1] = Object ID
4314 // Ops[2] = Composite ID
4315 // Ops[3] ... Ops[n] = Indexes (Literal Number)
4316 SPIRVOperandList Ops;
4317
4318 uint32_t ResTyID = lookupType(I.getType());
David Neto257c3892018-04-11 13:19:45 -04004319 Ops << MkId(ResTyID);
David Neto22f144c2017-06-12 14:26:21 -04004320
4321 uint32_t ObjectID = VMap[IVI->getInsertedValueOperand()];
David Neto257c3892018-04-11 13:19:45 -04004322 Ops << MkId(ObjectID);
David Neto22f144c2017-06-12 14:26:21 -04004323
4324 uint32_t CompositeID = VMap[IVI->getAggregateOperand()];
David Neto257c3892018-04-11 13:19:45 -04004325 Ops << MkId(CompositeID);
David Neto22f144c2017-06-12 14:26:21 -04004326
4327 for (auto &Index : IVI->indices()) {
David Neto257c3892018-04-11 13:19:45 -04004328 Ops << MkNum(Index);
David Neto22f144c2017-06-12 14:26:21 -04004329 }
4330
David Neto87846742018-04-11 17:36:22 -04004331 auto *Inst = new SPIRVInstruction(spv::OpCompositeInsert, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004332 SPIRVInstList.push_back(Inst);
4333 break;
4334 }
4335 case Instruction::Select: {
4336 //
4337 // Generate OpSelect.
4338 //
4339
4340 // Ops[0] = Result Type ID
4341 // Ops[1] = Condition ID
4342 // Ops[2] = True Constant ID
4343 // Ops[3] = False Constant ID
4344 SPIRVOperandList Ops;
4345
4346 // Find SPIRV instruction for parameter type.
4347 auto Ty = I.getType();
4348 if (Ty->isPointerTy()) {
4349 auto PointeeTy = Ty->getPointerElementType();
4350 if (PointeeTy->isStructTy() &&
4351 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
4352 Ty = PointeeTy;
alan-baker5b86ed72019-02-15 08:26:50 -05004353 } else {
4354 // Selecting between pointers requires variable pointers.
4355 setVariablePointersCapabilities(Ty->getPointerAddressSpace());
4356 if (!hasVariablePointers() && !selectFromSameObject(&I)) {
4357 setVariablePointers(true);
4358 }
David Neto22f144c2017-06-12 14:26:21 -04004359 }
4360 }
4361
David Neto257c3892018-04-11 13:19:45 -04004362 Ops << MkId(lookupType(Ty)) << MkId(VMap[I.getOperand(0)])
4363 << MkId(VMap[I.getOperand(1)]) << MkId(VMap[I.getOperand(2)]);
David Neto22f144c2017-06-12 14:26:21 -04004364
David Neto87846742018-04-11 17:36:22 -04004365 auto *Inst = new SPIRVInstruction(spv::OpSelect, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004366 SPIRVInstList.push_back(Inst);
4367 break;
4368 }
4369 case Instruction::ExtractElement: {
4370 // Handle <4 x i8> type manually.
4371 Type *CompositeTy = I.getOperand(0)->getType();
4372 if (is4xi8vec(CompositeTy)) {
4373 //
4374 // Generate OpShiftRightLogical and OpBitwiseAnd for extractelement with
4375 // <4 x i8>.
4376 //
4377
4378 //
4379 // Generate OpShiftRightLogical
4380 //
4381 // Ops[0] = Result Type ID
4382 // Ops[1] = Operand 0
4383 // Ops[2] = Operand 1
4384 //
4385 SPIRVOperandList Ops;
4386
David Neto257c3892018-04-11 13:19:45 -04004387 Ops << MkId(lookupType(CompositeTy));
David Neto22f144c2017-06-12 14:26:21 -04004388
4389 uint32_t Op0ID = VMap[I.getOperand(0)];
David Neto257c3892018-04-11 13:19:45 -04004390 Ops << MkId(Op0ID);
David Neto22f144c2017-06-12 14:26:21 -04004391
4392 uint32_t Op1ID = 0;
4393 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
4394 // Handle constant index.
4395 uint64_t Idx = CI->getZExtValue();
4396 Value *ShiftAmount =
4397 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
4398 Op1ID = VMap[ShiftAmount];
4399 } else {
4400 // Handle variable index.
4401 SPIRVOperandList TmpOps;
4402
David Neto257c3892018-04-11 13:19:45 -04004403 TmpOps << MkId(lookupType(Type::getInt32Ty(Context)))
4404 << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004405
4406 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
David Neto257c3892018-04-11 13:19:45 -04004407 TmpOps << MkId(VMap[Cst8]);
David Neto22f144c2017-06-12 14:26:21 -04004408
4409 Op1ID = nextID;
4410
David Neto87846742018-04-11 17:36:22 -04004411 auto *TmpInst = new SPIRVInstruction(spv::OpIMul, nextID++, TmpOps);
David Neto22f144c2017-06-12 14:26:21 -04004412 SPIRVInstList.push_back(TmpInst);
4413 }
David Neto257c3892018-04-11 13:19:45 -04004414 Ops << MkId(Op1ID);
David Neto22f144c2017-06-12 14:26:21 -04004415
4416 uint32_t ShiftID = nextID;
4417
David Neto87846742018-04-11 17:36:22 -04004418 auto *Inst =
4419 new SPIRVInstruction(spv::OpShiftRightLogical, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004420 SPIRVInstList.push_back(Inst);
4421
4422 //
4423 // Generate OpBitwiseAnd
4424 //
4425 // Ops[0] = Result Type ID
4426 // Ops[1] = Operand 0
4427 // Ops[2] = Operand 1
4428 //
4429 Ops.clear();
4430
David Neto257c3892018-04-11 13:19:45 -04004431 Ops << MkId(lookupType(CompositeTy)) << MkId(ShiftID);
David Neto22f144c2017-06-12 14:26:21 -04004432
4433 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
David Neto257c3892018-04-11 13:19:45 -04004434 Ops << MkId(VMap[CstFF]);
David Neto22f144c2017-06-12 14:26:21 -04004435
David Neto9b2d6252017-09-06 15:47:37 -04004436 // Reset mapping for this value to the result of the bitwise and.
4437 VMap[&I] = nextID;
4438
David Neto87846742018-04-11 17:36:22 -04004439 Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004440 SPIRVInstList.push_back(Inst);
4441 break;
4442 }
4443
4444 // Ops[0] = Result Type ID
4445 // Ops[1] = Composite ID
4446 // Ops[2] ... Ops[n] = Indexes (Literal Number)
4447 SPIRVOperandList Ops;
4448
David Neto257c3892018-04-11 13:19:45 -04004449 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04004450
4451 spv::Op Opcode = spv::OpCompositeExtract;
4452 if (const ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
David Neto257c3892018-04-11 13:19:45 -04004453 Ops << MkNum(static_cast<uint32_t>(CI->getZExtValue()));
David Neto22f144c2017-06-12 14:26:21 -04004454 } else {
David Neto257c3892018-04-11 13:19:45 -04004455 Ops << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004456 Opcode = spv::OpVectorExtractDynamic;
4457 }
4458
David Neto87846742018-04-11 17:36:22 -04004459 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004460 SPIRVInstList.push_back(Inst);
4461 break;
4462 }
4463 case Instruction::InsertElement: {
4464 // Handle <4 x i8> type manually.
4465 Type *CompositeTy = I.getOperand(0)->getType();
4466 if (is4xi8vec(CompositeTy)) {
4467 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
4468 uint32_t CstFFID = VMap[CstFF];
4469
4470 uint32_t ShiftAmountID = 0;
4471 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
4472 // Handle constant index.
4473 uint64_t Idx = CI->getZExtValue();
4474 Value *ShiftAmount =
4475 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
4476 ShiftAmountID = VMap[ShiftAmount];
4477 } else {
4478 // Handle variable index.
4479 SPIRVOperandList TmpOps;
4480
David Neto257c3892018-04-11 13:19:45 -04004481 TmpOps << MkId(lookupType(Type::getInt32Ty(Context)))
4482 << MkId(VMap[I.getOperand(2)]);
David Neto22f144c2017-06-12 14:26:21 -04004483
4484 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
David Neto257c3892018-04-11 13:19:45 -04004485 TmpOps << MkId(VMap[Cst8]);
David Neto22f144c2017-06-12 14:26:21 -04004486
4487 ShiftAmountID = nextID;
4488
David Neto87846742018-04-11 17:36:22 -04004489 auto *TmpInst = new SPIRVInstruction(spv::OpIMul, nextID++, TmpOps);
David Neto22f144c2017-06-12 14:26:21 -04004490 SPIRVInstList.push_back(TmpInst);
4491 }
4492
4493 //
4494 // Generate mask operations.
4495 //
4496
4497 // ShiftLeft mask according to index of insertelement.
4498 SPIRVOperandList Ops;
4499
David Neto257c3892018-04-11 13:19:45 -04004500 const uint32_t ResTyID = lookupType(CompositeTy);
4501 Ops << MkId(ResTyID) << MkId(CstFFID) << MkId(ShiftAmountID);
David Neto22f144c2017-06-12 14:26:21 -04004502
4503 uint32_t MaskID = nextID;
4504
David Neto87846742018-04-11 17:36:22 -04004505 auto *Inst = new SPIRVInstruction(spv::OpShiftLeftLogical, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004506 SPIRVInstList.push_back(Inst);
4507
4508 // Inverse mask.
4509 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004510 Ops << MkId(ResTyID) << MkId(MaskID);
David Neto22f144c2017-06-12 14:26:21 -04004511
4512 uint32_t InvMaskID = nextID;
4513
David Neto87846742018-04-11 17:36:22 -04004514 Inst = new SPIRVInstruction(spv::OpNot, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004515 SPIRVInstList.push_back(Inst);
4516
4517 // Apply mask.
4518 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004519 Ops << MkId(ResTyID) << MkId(VMap[I.getOperand(0)]) << MkId(InvMaskID);
David Neto22f144c2017-06-12 14:26:21 -04004520
4521 uint32_t OrgValID = nextID;
4522
David Neto87846742018-04-11 17:36:22 -04004523 Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004524 SPIRVInstList.push_back(Inst);
4525
4526 // Create correct value according to index of insertelement.
4527 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05004528 Ops << MkId(ResTyID) << MkId(VMap[I.getOperand(1)])
4529 << MkId(ShiftAmountID);
David Neto22f144c2017-06-12 14:26:21 -04004530
4531 uint32_t InsertValID = nextID;
4532
David Neto87846742018-04-11 17:36:22 -04004533 Inst = new SPIRVInstruction(spv::OpShiftLeftLogical, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004534 SPIRVInstList.push_back(Inst);
4535
4536 // Insert value to original value.
4537 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004538 Ops << MkId(ResTyID) << MkId(OrgValID) << MkId(InsertValID);
David Neto22f144c2017-06-12 14:26:21 -04004539
David Netoa394f392017-08-26 20:45:29 -04004540 VMap[&I] = nextID;
4541
David Neto87846742018-04-11 17:36:22 -04004542 Inst = new SPIRVInstruction(spv::OpBitwiseOr, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004543 SPIRVInstList.push_back(Inst);
4544
4545 break;
4546 }
4547
David Neto22f144c2017-06-12 14:26:21 -04004548 SPIRVOperandList Ops;
4549
James Priced26efea2018-06-09 23:28:32 +01004550 // Ops[0] = Result Type ID
4551 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04004552
4553 spv::Op Opcode = spv::OpCompositeInsert;
4554 if (const ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
David Neto257c3892018-04-11 13:19:45 -04004555 const auto value = CI->getZExtValue();
4556 assert(value <= UINT32_MAX);
James Priced26efea2018-06-09 23:28:32 +01004557 // Ops[1] = Object ID
4558 // Ops[2] = Composite ID
4559 // Ops[3] ... Ops[n] = Indexes (Literal Number)
alan-bakerb6b09dc2018-11-08 16:59:28 -05004560 Ops << MkId(VMap[I.getOperand(1)]) << MkId(VMap[I.getOperand(0)])
James Priced26efea2018-06-09 23:28:32 +01004561 << MkNum(static_cast<uint32_t>(value));
David Neto22f144c2017-06-12 14:26:21 -04004562 } else {
James Priced26efea2018-06-09 23:28:32 +01004563 // Ops[1] = Composite ID
4564 // Ops[2] = Object ID
4565 // Ops[3] ... Ops[n] = Indexes (Literal Number)
alan-bakerb6b09dc2018-11-08 16:59:28 -05004566 Ops << MkId(VMap[I.getOperand(0)]) << MkId(VMap[I.getOperand(1)])
James Priced26efea2018-06-09 23:28:32 +01004567 << MkId(VMap[I.getOperand(2)]);
David Neto22f144c2017-06-12 14:26:21 -04004568 Opcode = spv::OpVectorInsertDynamic;
4569 }
4570
David Neto87846742018-04-11 17:36:22 -04004571 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004572 SPIRVInstList.push_back(Inst);
4573 break;
4574 }
4575 case Instruction::ShuffleVector: {
4576 // Ops[0] = Result Type ID
4577 // Ops[1] = Vector 1 ID
4578 // Ops[2] = Vector 2 ID
4579 // Ops[3] ... Ops[n] = Components (Literal Number)
4580 SPIRVOperandList Ops;
4581
David Neto257c3892018-04-11 13:19:45 -04004582 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)])
4583 << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004584
4585 uint64_t NumElements = 0;
4586 if (Constant *Cst = dyn_cast<Constant>(I.getOperand(2))) {
4587 NumElements = cast<VectorType>(Cst->getType())->getNumElements();
4588
4589 if (Cst->isNullValue()) {
4590 for (unsigned i = 0; i < NumElements; i++) {
David Neto257c3892018-04-11 13:19:45 -04004591 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04004592 }
4593 } else if (const ConstantDataSequential *CDS =
4594 dyn_cast<ConstantDataSequential>(Cst)) {
4595 for (unsigned i = 0; i < CDS->getNumElements(); i++) {
4596 std::vector<uint32_t> LiteralNum;
David Neto257c3892018-04-11 13:19:45 -04004597 const auto value = CDS->getElementAsInteger(i);
4598 assert(value <= UINT32_MAX);
4599 Ops << MkNum(static_cast<uint32_t>(value));
David Neto22f144c2017-06-12 14:26:21 -04004600 }
4601 } else if (const ConstantVector *CV = dyn_cast<ConstantVector>(Cst)) {
4602 for (unsigned i = 0; i < CV->getNumOperands(); i++) {
4603 auto Op = CV->getOperand(i);
4604
4605 uint32_t literal = 0;
4606
4607 if (auto CI = dyn_cast<ConstantInt>(Op)) {
4608 literal = static_cast<uint32_t>(CI->getZExtValue());
4609 } else if (auto UI = dyn_cast<UndefValue>(Op)) {
4610 literal = 0xFFFFFFFFu;
4611 } else {
4612 Op->print(errs());
4613 llvm_unreachable("Unsupported element in ConstantVector!");
4614 }
4615
David Neto257c3892018-04-11 13:19:45 -04004616 Ops << MkNum(literal);
David Neto22f144c2017-06-12 14:26:21 -04004617 }
4618 } else {
4619 Cst->print(errs());
4620 llvm_unreachable("Unsupported constant mask in ShuffleVector!");
4621 }
4622 }
4623
David Neto87846742018-04-11 17:36:22 -04004624 auto *Inst = new SPIRVInstruction(spv::OpVectorShuffle, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004625 SPIRVInstList.push_back(Inst);
4626 break;
4627 }
4628 case Instruction::ICmp:
4629 case Instruction::FCmp: {
4630 CmpInst *CmpI = cast<CmpInst>(&I);
4631
David Netod4ca2e62017-07-06 18:47:35 -04004632 // Pointer equality is invalid.
alan-bakerb6b09dc2018-11-08 16:59:28 -05004633 Type *ArgTy = CmpI->getOperand(0)->getType();
David Netod4ca2e62017-07-06 18:47:35 -04004634 if (isa<PointerType>(ArgTy)) {
4635 CmpI->print(errs());
alan-baker21574d32020-01-29 16:00:31 -05004636 std::string name = I.getParent()->getParent()->getName().str();
David Netod4ca2e62017-07-06 18:47:35 -04004637 errs()
4638 << "\nPointer equality test is not supported by SPIR-V for Vulkan, "
4639 << "in function " << name << "\n";
4640 llvm_unreachable("Pointer equality check is invalid");
4641 break;
4642 }
4643
David Neto257c3892018-04-11 13:19:45 -04004644 // Ops[0] = Result Type ID
4645 // Ops[1] = Operand 1 ID
4646 // Ops[2] = Operand 2 ID
4647 SPIRVOperandList Ops;
David Neto22f144c2017-06-12 14:26:21 -04004648
David Neto257c3892018-04-11 13:19:45 -04004649 Ops << MkId(lookupType(CmpI->getType())) << MkId(VMap[CmpI->getOperand(0)])
4650 << MkId(VMap[CmpI->getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004651
4652 spv::Op Opcode = GetSPIRVCmpOpcode(CmpI);
David Neto87846742018-04-11 17:36:22 -04004653 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004654 SPIRVInstList.push_back(Inst);
4655 break;
4656 }
4657 case Instruction::Br: {
4658 // Branch instrucion is deferred because it needs label's ID. Record slot's
4659 // location on SPIRVInstructionList.
4660 DeferredInsts.push_back(
4661 std::make_tuple(&I, --SPIRVInstList.end(), 0 /* No id */));
4662 break;
4663 }
4664 case Instruction::Switch: {
4665 I.print(errs());
4666 llvm_unreachable("Unsupported instruction???");
4667 break;
4668 }
4669 case Instruction::IndirectBr: {
4670 I.print(errs());
4671 llvm_unreachable("Unsupported instruction???");
4672 break;
4673 }
4674 case Instruction::PHI: {
4675 // Branch instrucion is deferred because it needs label's ID. Record slot's
4676 // location on SPIRVInstructionList.
4677 DeferredInsts.push_back(
4678 std::make_tuple(&I, --SPIRVInstList.end(), nextID++));
4679 break;
4680 }
4681 case Instruction::Alloca: {
4682 //
4683 // Generate OpVariable.
4684 //
4685 // Ops[0] : Result Type ID
4686 // Ops[1] : Storage Class
4687 SPIRVOperandList Ops;
4688
David Neto257c3892018-04-11 13:19:45 -04004689 Ops << MkId(lookupType(I.getType())) << MkNum(spv::StorageClassFunction);
David Neto22f144c2017-06-12 14:26:21 -04004690
David Neto87846742018-04-11 17:36:22 -04004691 auto *Inst = new SPIRVInstruction(spv::OpVariable, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004692 SPIRVInstList.push_back(Inst);
4693 break;
4694 }
4695 case Instruction::Load: {
4696 LoadInst *LD = cast<LoadInst>(&I);
4697 //
4698 // Generate OpLoad.
4699 //
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04004700
alan-baker5b86ed72019-02-15 08:26:50 -05004701 if (LD->getType()->isPointerTy()) {
4702 // Loading a pointer requires variable pointers.
4703 setVariablePointersCapabilities(LD->getType()->getPointerAddressSpace());
4704 }
David Neto22f144c2017-06-12 14:26:21 -04004705
David Neto0a2f98d2017-09-15 19:38:40 -04004706 uint32_t ResTyID = lookupType(LD->getType());
David Netoa60b00b2017-09-15 16:34:09 -04004707 uint32_t PointerID = VMap[LD->getPointerOperand()];
4708
4709 // This is a hack to work around what looks like a driver bug.
4710 // When we're loading from the special variable holding the WorkgroupSize
David Neto0a2f98d2017-09-15 19:38:40 -04004711 // builtin value, use an OpBitWiseAnd of the value's ID rather than
4712 // generating a load.
David Neto66cfe642018-03-24 06:13:56 -07004713 // TODO(dneto): Remove this awful hack once drivers are fixed.
David Netoa60b00b2017-09-15 16:34:09 -04004714 if (PointerID == WorkgroupSizeVarID) {
David Neto0a2f98d2017-09-15 19:38:40 -04004715 // Generate a bitwise-and of the original value with itself.
4716 // We should have been able to get away with just an OpCopyObject,
4717 // but we need something more complex to get past certain driver bugs.
4718 // This is ridiculous, but necessary.
4719 // TODO(dneto): Revisit this once drivers fix their bugs.
4720
4721 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04004722 Ops << MkId(ResTyID) << MkId(WorkgroupSizeValueID)
4723 << MkId(WorkgroupSizeValueID);
David Neto0a2f98d2017-09-15 19:38:40 -04004724
David Neto87846742018-04-11 17:36:22 -04004725 auto *Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Neto0a2f98d2017-09-15 19:38:40 -04004726 SPIRVInstList.push_back(Inst);
David Netoa60b00b2017-09-15 16:34:09 -04004727 break;
4728 }
4729
4730 // This is the normal path. Generate a load.
4731
David Neto22f144c2017-06-12 14:26:21 -04004732 // Ops[0] = Result Type ID
4733 // Ops[1] = Pointer ID
4734 // Ops[2] ... Ops[n] = Optional Memory Access
4735 //
4736 // TODO: Do we need to implement Optional Memory Access???
David Neto0a2f98d2017-09-15 19:38:40 -04004737
David Neto22f144c2017-06-12 14:26:21 -04004738 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04004739 Ops << MkId(ResTyID) << MkId(PointerID);
David Neto22f144c2017-06-12 14:26:21 -04004740
David Neto87846742018-04-11 17:36:22 -04004741 auto *Inst = new SPIRVInstruction(spv::OpLoad, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004742 SPIRVInstList.push_back(Inst);
4743 break;
4744 }
4745 case Instruction::Store: {
4746 StoreInst *ST = cast<StoreInst>(&I);
4747 //
4748 // Generate OpStore.
4749 //
4750
alan-baker5b86ed72019-02-15 08:26:50 -05004751 if (ST->getValueOperand()->getType()->isPointerTy()) {
4752 // Storing a pointer requires variable pointers.
4753 setVariablePointersCapabilities(
4754 ST->getValueOperand()->getType()->getPointerAddressSpace());
4755 }
4756
David Neto22f144c2017-06-12 14:26:21 -04004757 // Ops[0] = Pointer ID
4758 // Ops[1] = Object ID
4759 // Ops[2] ... Ops[n] = Optional Memory Access (later???)
4760 //
4761 // TODO: Do we need to implement Optional Memory Access???
David Neto257c3892018-04-11 13:19:45 -04004762 SPIRVOperandList Ops;
4763 Ops << MkId(VMap[ST->getPointerOperand()])
4764 << MkId(VMap[ST->getValueOperand()]);
David Neto22f144c2017-06-12 14:26:21 -04004765
David Neto87846742018-04-11 17:36:22 -04004766 auto *Inst = new SPIRVInstruction(spv::OpStore, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004767 SPIRVInstList.push_back(Inst);
4768 break;
4769 }
4770 case Instruction::AtomicCmpXchg: {
4771 I.print(errs());
4772 llvm_unreachable("Unsupported instruction???");
4773 break;
4774 }
4775 case Instruction::AtomicRMW: {
Neil Henning39672102017-09-29 14:33:13 +01004776 AtomicRMWInst *AtomicRMW = dyn_cast<AtomicRMWInst>(&I);
4777
4778 spv::Op opcode;
4779
4780 switch (AtomicRMW->getOperation()) {
4781 default:
4782 I.print(errs());
4783 llvm_unreachable("Unsupported instruction???");
4784 case llvm::AtomicRMWInst::Add:
4785 opcode = spv::OpAtomicIAdd;
4786 break;
4787 case llvm::AtomicRMWInst::Sub:
4788 opcode = spv::OpAtomicISub;
4789 break;
4790 case llvm::AtomicRMWInst::Xchg:
4791 opcode = spv::OpAtomicExchange;
4792 break;
4793 case llvm::AtomicRMWInst::Min:
4794 opcode = spv::OpAtomicSMin;
4795 break;
4796 case llvm::AtomicRMWInst::Max:
4797 opcode = spv::OpAtomicSMax;
4798 break;
4799 case llvm::AtomicRMWInst::UMin:
4800 opcode = spv::OpAtomicUMin;
4801 break;
4802 case llvm::AtomicRMWInst::UMax:
4803 opcode = spv::OpAtomicUMax;
4804 break;
4805 case llvm::AtomicRMWInst::And:
4806 opcode = spv::OpAtomicAnd;
4807 break;
4808 case llvm::AtomicRMWInst::Or:
4809 opcode = spv::OpAtomicOr;
4810 break;
4811 case llvm::AtomicRMWInst::Xor:
4812 opcode = spv::OpAtomicXor;
4813 break;
4814 }
4815
4816 //
4817 // Generate OpAtomic*.
4818 //
4819 SPIRVOperandList Ops;
4820
David Neto257c3892018-04-11 13:19:45 -04004821 Ops << MkId(lookupType(I.getType()))
4822 << MkId(VMap[AtomicRMW->getPointerOperand()]);
Neil Henning39672102017-09-29 14:33:13 +01004823
4824 auto IntTy = Type::getInt32Ty(I.getContext());
Neil Henning39672102017-09-29 14:33:13 +01004825 const auto ConstantScopeDevice = ConstantInt::get(IntTy, spv::ScopeDevice);
David Neto257c3892018-04-11 13:19:45 -04004826 Ops << MkId(VMap[ConstantScopeDevice]);
Neil Henning39672102017-09-29 14:33:13 +01004827
4828 const auto ConstantMemorySemantics = ConstantInt::get(
4829 IntTy, spv::MemorySemanticsUniformMemoryMask |
4830 spv::MemorySemanticsSequentiallyConsistentMask);
David Neto257c3892018-04-11 13:19:45 -04004831 Ops << MkId(VMap[ConstantMemorySemantics]);
Neil Henning39672102017-09-29 14:33:13 +01004832
David Neto257c3892018-04-11 13:19:45 -04004833 Ops << MkId(VMap[AtomicRMW->getValOperand()]);
Neil Henning39672102017-09-29 14:33:13 +01004834
4835 VMap[&I] = nextID;
4836
David Neto87846742018-04-11 17:36:22 -04004837 auto *Inst = new SPIRVInstruction(opcode, nextID++, Ops);
Neil Henning39672102017-09-29 14:33:13 +01004838 SPIRVInstList.push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04004839 break;
4840 }
4841 case Instruction::Fence: {
4842 I.print(errs());
4843 llvm_unreachable("Unsupported instruction???");
4844 break;
4845 }
4846 case Instruction::Call: {
4847 CallInst *Call = dyn_cast<CallInst>(&I);
4848 Function *Callee = Call->getCalledFunction();
4849
Alan Baker202c8c72018-08-13 13:47:44 -04004850 if (Callee->getName().startswith(clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04004851 if (ResourceVarDeferredLoadCalls.count(Call) && Call->hasNUsesOrMore(1)) {
4852 // Generate an OpLoad
4853 SPIRVOperandList Ops;
4854 const auto load_id = nextID++;
David Neto22f144c2017-06-12 14:26:21 -04004855
David Neto862b7d82018-06-14 18:48:37 -04004856 Ops << MkId(lookupType(Call->getType()->getPointerElementType()))
4857 << MkId(ResourceVarDeferredLoadCalls[Call]);
4858
4859 auto *Inst = new SPIRVInstruction(spv::OpLoad, load_id, Ops);
4860 SPIRVInstList.push_back(Inst);
4861 VMap[Call] = load_id;
4862 break;
4863
4864 } else {
4865 // This maps to an OpVariable we've already generated.
4866 // No code is generated for the call.
4867 }
4868 break;
alan-bakerb6b09dc2018-11-08 16:59:28 -05004869 } else if (Callee->getName().startswith(
4870 clspv::WorkgroupAccessorFunction())) {
Alan Baker202c8c72018-08-13 13:47:44 -04004871 // Don't codegen an instruction here, but instead map this call directly
4872 // to the workgroup variable id.
alan-bakerb6b09dc2018-11-08 16:59:28 -05004873 int spec_id = static_cast<int>(
4874 cast<ConstantInt>(Call->getOperand(0))->getSExtValue());
Alan Baker202c8c72018-08-13 13:47:44 -04004875 const auto &info = LocalSpecIdInfoMap[spec_id];
4876 VMap[Call] = info.variable_id;
4877 break;
David Neto862b7d82018-06-14 18:48:37 -04004878 }
4879
4880 // Sampler initializers become a load of the corresponding sampler.
4881
Kévin Petitdf71de32019-04-09 14:09:50 +01004882 if (Callee->getName().equals(clspv::LiteralSamplerFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04004883 // Map this to a load from the variable.
alan-baker09cb9802019-12-10 13:16:27 -05004884 const auto third_param = static_cast<unsigned>(
4885 dyn_cast<ConstantInt>(Call->getArgOperand(2))->getZExtValue());
4886 auto sampler_value = third_param;
4887 if (clspv::Option::UseSamplerMap()) {
4888 sampler_value = getSamplerMap()[third_param].first;
4889 }
David Neto862b7d82018-06-14 18:48:37 -04004890
4891 // Generate an OpLoad
David Neto22f144c2017-06-12 14:26:21 -04004892 SPIRVOperandList Ops;
David Neto862b7d82018-06-14 18:48:37 -04004893 const auto load_id = nextID++;
David Neto22f144c2017-06-12 14:26:21 -04004894
David Neto257c3892018-04-11 13:19:45 -04004895 Ops << MkId(lookupType(SamplerTy->getPointerElementType()))
alan-baker09cb9802019-12-10 13:16:27 -05004896 << MkId(SamplerLiteralToIDMap[sampler_value]);
David Neto22f144c2017-06-12 14:26:21 -04004897
David Neto862b7d82018-06-14 18:48:37 -04004898 auto *Inst = new SPIRVInstruction(spv::OpLoad, load_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004899 SPIRVInstList.push_back(Inst);
David Neto862b7d82018-06-14 18:48:37 -04004900 VMap[Call] = load_id;
David Neto22f144c2017-06-12 14:26:21 -04004901 break;
4902 }
4903
Kévin Petit349c9502019-03-28 17:24:14 +00004904 // Handle SPIR-V intrinsics
Kévin Petit9b340262019-06-19 18:31:11 +01004905 spv::Op opcode = StringSwitch<spv::Op>(Callee->getName())
4906 .Case("spirv.atomic_xor", spv::OpAtomicXor)
4907 .Default(spv::OpNop);
David Neto22f144c2017-06-12 14:26:21 -04004908
Kévin Petit617a76d2019-04-04 13:54:16 +01004909 // If the switch above didn't have an entry maybe the intrinsic
4910 // is using the name mangling logic.
4911 bool usesMangler = false;
4912 if (opcode == spv::OpNop) {
4913 if (Callee->getName().startswith(clspv::SPIRVOpIntrinsicFunction())) {
4914 auto OpCst = cast<ConstantInt>(Call->getOperand(0));
4915 opcode = static_cast<spv::Op>(OpCst->getZExtValue());
4916 usesMangler = true;
4917 }
4918 }
4919
Kévin Petit349c9502019-03-28 17:24:14 +00004920 if (opcode != spv::OpNop) {
4921
David Neto22f144c2017-06-12 14:26:21 -04004922 SPIRVOperandList Ops;
4923
Kévin Petit349c9502019-03-28 17:24:14 +00004924 if (!I.getType()->isVoidTy()) {
4925 Ops << MkId(lookupType(I.getType()));
4926 }
David Neto22f144c2017-06-12 14:26:21 -04004927
Kévin Petit617a76d2019-04-04 13:54:16 +01004928 unsigned firstOperand = usesMangler ? 1 : 0;
4929 for (unsigned i = firstOperand; i < Call->getNumArgOperands(); i++) {
David Neto257c3892018-04-11 13:19:45 -04004930 Ops << MkId(VMap[Call->getArgOperand(i)]);
David Neto22f144c2017-06-12 14:26:21 -04004931 }
4932
Kévin Petit349c9502019-03-28 17:24:14 +00004933 if (!I.getType()->isVoidTy()) {
4934 VMap[&I] = nextID;
Kévin Petit8a560882019-03-21 15:24:34 +00004935 }
4936
Kévin Petit349c9502019-03-28 17:24:14 +00004937 SPIRVInstruction *Inst;
4938 if (!I.getType()->isVoidTy()) {
4939 Inst = new SPIRVInstruction(opcode, nextID++, Ops);
4940 } else {
4941 Inst = new SPIRVInstruction(opcode, Ops);
4942 }
Kévin Petit8a560882019-03-21 15:24:34 +00004943 SPIRVInstList.push_back(Inst);
4944 break;
4945 }
4946
David Neto22f144c2017-06-12 14:26:21 -04004947 // spirv.copy_memory.* intrinsics become OpMemoryMemory's.
4948 if (Callee->getName().startswith("spirv.copy_memory")) {
4949 //
4950 // Generate OpCopyMemory.
4951 //
4952
4953 // Ops[0] = Dst ID
4954 // Ops[1] = Src ID
4955 // Ops[2] = Memory Access
4956 // Ops[3] = Alignment
4957
4958 auto IsVolatile =
4959 dyn_cast<ConstantInt>(Call->getArgOperand(3))->getZExtValue() != 0;
4960
4961 auto VolatileMemoryAccess = (IsVolatile) ? spv::MemoryAccessVolatileMask
4962 : spv::MemoryAccessMaskNone;
4963
4964 auto MemoryAccess = VolatileMemoryAccess | spv::MemoryAccessAlignedMask;
4965
4966 auto Alignment =
4967 dyn_cast<ConstantInt>(Call->getArgOperand(2))->getZExtValue();
4968
David Neto257c3892018-04-11 13:19:45 -04004969 SPIRVOperandList Ops;
4970 Ops << MkId(VMap[Call->getArgOperand(0)])
4971 << MkId(VMap[Call->getArgOperand(1)]) << MkNum(MemoryAccess)
4972 << MkNum(static_cast<uint32_t>(Alignment));
David Neto22f144c2017-06-12 14:26:21 -04004973
David Neto87846742018-04-11 17:36:22 -04004974 auto *Inst = new SPIRVInstruction(spv::OpCopyMemory, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004975
4976 SPIRVInstList.push_back(Inst);
4977
4978 break;
4979 }
4980
alan-baker75090e42020-02-20 11:21:04 -05004981 // read_image (with a sampler) is converted to OpSampledImage and
4982 // OpImageSampleExplicitLod. Additionally, OpTypeSampledImage is
4983 // generated.
SJW173c7e92020-03-16 08:44:47 -05004984 if (IsSampledImageRead(Callee)) {
David Neto22f144c2017-06-12 14:26:21 -04004985 //
4986 // Generate OpSampledImage.
4987 //
4988 // Ops[0] = Result Type ID
4989 // Ops[1] = Image ID
4990 // Ops[2] = Sampler ID
4991 //
4992 SPIRVOperandList Ops;
4993
4994 Value *Image = Call->getArgOperand(0);
4995 Value *Sampler = Call->getArgOperand(1);
4996 Value *Coordinate = Call->getArgOperand(2);
4997
4998 TypeMapType &OpImageTypeMap = getImageTypeMap();
4999 Type *ImageTy = Image->getType()->getPointerElementType();
5000 uint32_t ImageTyID = OpImageTypeMap[ImageTy];
David Neto22f144c2017-06-12 14:26:21 -04005001 uint32_t ImageID = VMap[Image];
David Neto22f144c2017-06-12 14:26:21 -04005002 uint32_t SamplerID = VMap[Sampler];
David Neto257c3892018-04-11 13:19:45 -04005003
5004 Ops << MkId(ImageTyID) << MkId(ImageID) << MkId(SamplerID);
David Neto22f144c2017-06-12 14:26:21 -04005005
5006 uint32_t SampledImageID = nextID;
5007
David Neto87846742018-04-11 17:36:22 -04005008 auto *Inst = new SPIRVInstruction(spv::OpSampledImage, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04005009 SPIRVInstList.push_back(Inst);
5010
5011 //
5012 // Generate OpImageSampleExplicitLod.
5013 //
5014 // Ops[0] = Result Type ID
5015 // Ops[1] = Sampled Image ID
5016 // Ops[2] = Coordinate ID
5017 // Ops[3] = Image Operands Type ID
5018 // Ops[4] ... Ops[n] = Operands ID
5019 //
5020 Ops.clear();
5021
alan-bakerf67468c2019-11-25 15:51:49 -05005022 const bool is_int_image = IsIntImageType(Image->getType());
5023 uint32_t result_type = 0;
5024 if (is_int_image) {
5025 result_type = v4int32ID;
5026 } else {
5027 result_type = lookupType(Call->getType());
5028 }
5029
5030 Ops << MkId(result_type) << MkId(SampledImageID) << MkId(VMap[Coordinate])
5031 << MkNum(spv::ImageOperandsLodMask);
David Neto22f144c2017-06-12 14:26:21 -04005032
5033 Constant *CstFP0 = ConstantFP::get(Context, APFloat(0.0f));
David Neto257c3892018-04-11 13:19:45 -04005034 Ops << MkId(VMap[CstFP0]);
David Neto22f144c2017-06-12 14:26:21 -04005035
alan-bakerf67468c2019-11-25 15:51:49 -05005036 uint32_t final_id = nextID++;
5037 VMap[&I] = final_id;
David Neto22f144c2017-06-12 14:26:21 -04005038
alan-bakerf67468c2019-11-25 15:51:49 -05005039 uint32_t image_id = final_id;
5040 if (is_int_image) {
5041 // Int image requires a bitcast from v4int to v4uint.
5042 image_id = nextID++;
5043 }
5044
5045 Inst = new SPIRVInstruction(spv::OpImageSampleExplicitLod, image_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04005046 SPIRVInstList.push_back(Inst);
alan-bakerf67468c2019-11-25 15:51:49 -05005047
5048 if (is_int_image) {
5049 // Generate the bitcast.
5050 Ops.clear();
5051 Ops << MkId(lookupType(Call->getType())) << MkId(image_id);
5052 Inst = new SPIRVInstruction(spv::OpBitcast, final_id, Ops);
5053 SPIRVInstList.push_back(Inst);
5054 }
David Neto22f144c2017-06-12 14:26:21 -04005055 break;
5056 }
5057
alan-baker75090e42020-02-20 11:21:04 -05005058 // read_image (without a sampler) is mapped to OpImageFetch.
SJW173c7e92020-03-16 08:44:47 -05005059 if (IsUnsampledImageRead(Callee)) {
alan-baker75090e42020-02-20 11:21:04 -05005060 Value *Image = Call->getArgOperand(0);
5061 Value *Coordinate = Call->getArgOperand(1);
5062
5063 //
5064 // Generate OpImageFetch
5065 //
5066 // Ops[0] = Result Type ID
5067 // Ops[1] = Image ID
5068 // Ops[2] = Coordinate ID
5069 // Ops[3] = Lod
5070 // Ops[4] = 0
5071 //
5072 SPIRVOperandList Ops;
5073
5074 const bool is_int_image = IsIntImageType(Image->getType());
5075 uint32_t result_type = 0;
5076 if (is_int_image) {
5077 result_type = v4int32ID;
5078 } else {
5079 result_type = lookupType(Call->getType());
5080 }
5081
5082 Ops << MkId(result_type) << MkId(VMap[Image]) << MkId(VMap[Coordinate])
5083 << MkNum(spv::ImageOperandsLodMask);
5084
5085 Constant *CstInt0 = ConstantInt::get(Context, APInt(32, 0));
5086 Ops << MkId(VMap[CstInt0]);
5087
5088 uint32_t final_id = nextID++;
5089 VMap[&I] = final_id;
5090
5091 uint32_t image_id = final_id;
5092 if (is_int_image) {
5093 // Int image requires a bitcast from v4int to v4uint.
5094 image_id = nextID++;
5095 }
5096
5097 auto *Inst = new SPIRVInstruction(spv::OpImageFetch, image_id, Ops);
5098 SPIRVInstList.push_back(Inst);
5099
5100 if (is_int_image) {
5101 // Generate the bitcast.
5102 Ops.clear();
5103 Ops << MkId(lookupType(Call->getType())) << MkId(image_id);
5104 Inst = new SPIRVInstruction(spv::OpBitcast, final_id, Ops);
5105 SPIRVInstList.push_back(Inst);
5106 }
5107 break;
5108 }
5109
alan-bakerf67468c2019-11-25 15:51:49 -05005110 // write_image is mapped to OpImageWrite.
SJW173c7e92020-03-16 08:44:47 -05005111 if (IsImageWrite(Callee)) {
David Neto22f144c2017-06-12 14:26:21 -04005112 //
5113 // Generate OpImageWrite.
5114 //
5115 // Ops[0] = Image ID
5116 // Ops[1] = Coordinate ID
5117 // Ops[2] = Texel ID
5118 // Ops[3] = (Optional) Image Operands Type (Literal Number)
5119 // Ops[4] ... Ops[n] = (Optional) Operands ID
5120 //
5121 SPIRVOperandList Ops;
5122
5123 Value *Image = Call->getArgOperand(0);
5124 Value *Coordinate = Call->getArgOperand(1);
5125 Value *Texel = Call->getArgOperand(2);
5126
5127 uint32_t ImageID = VMap[Image];
David Neto22f144c2017-06-12 14:26:21 -04005128 uint32_t CoordinateID = VMap[Coordinate];
David Neto22f144c2017-06-12 14:26:21 -04005129 uint32_t TexelID = VMap[Texel];
alan-bakerf67468c2019-11-25 15:51:49 -05005130
5131 const bool is_int_image = IsIntImageType(Image->getType());
5132 if (is_int_image) {
5133 // Generate a bitcast to v4int and use it as the texel value.
5134 uint32_t castID = nextID++;
5135 Ops << MkId(v4int32ID) << MkId(TexelID);
5136 auto cast = new SPIRVInstruction(spv::OpBitcast, castID, Ops);
5137 SPIRVInstList.push_back(cast);
5138 Ops.clear();
5139 TexelID = castID;
5140 }
David Neto257c3892018-04-11 13:19:45 -04005141 Ops << MkId(ImageID) << MkId(CoordinateID) << MkId(TexelID);
David Neto22f144c2017-06-12 14:26:21 -04005142
David Neto87846742018-04-11 17:36:22 -04005143 auto *Inst = new SPIRVInstruction(spv::OpImageWrite, Ops);
David Neto22f144c2017-06-12 14:26:21 -04005144 SPIRVInstList.push_back(Inst);
5145 break;
5146 }
5147
alan-bakerce179f12019-12-06 19:02:22 -05005148 // get_image_* is mapped to OpImageQuerySize or OpImageQuerySizeLod
SJW173c7e92020-03-16 08:44:47 -05005149 if (IsImageQuery(Callee)) {
David Neto5c22a252018-03-15 16:07:41 -04005150 //
alan-bakerce179f12019-12-06 19:02:22 -05005151 // Generate OpImageQuerySize[Lod]
David Neto5c22a252018-03-15 16:07:41 -04005152 //
5153 // Ops[0] = Image ID
5154 //
alan-bakerce179f12019-12-06 19:02:22 -05005155 // Result type has components equal to the dimensionality of the image,
5156 // plus 1 if the image is arrayed.
5157 //
alan-bakerf906d2b2019-12-10 11:26:23 -05005158 // %sizes = OpImageQuerySize[Lod] %uint[2|3|4] %im [%uint_0]
David Neto5c22a252018-03-15 16:07:41 -04005159 SPIRVOperandList Ops;
5160
5161 // Implement:
alan-bakerce179f12019-12-06 19:02:22 -05005162 // %sizes = OpImageQuerySize[Lod] %uint[2|3|4] %im [%uint_0]
5163 uint32_t SizesTypeID = 0;
5164
David Neto5c22a252018-03-15 16:07:41 -04005165 Value *Image = Call->getArgOperand(0);
alan-bakerce179f12019-12-06 19:02:22 -05005166 const uint32_t dim = ImageDimensionality(Image->getType());
alan-baker7150a1d2020-02-25 08:31:06 -05005167 const uint32_t components =
5168 dim + (IsArrayImageType(Image->getType()) ? 1 : 0);
alan-bakerce179f12019-12-06 19:02:22 -05005169 if (components == 1) {
alan-bakerce179f12019-12-06 19:02:22 -05005170 SizesTypeID = TypeMap[Type::getInt32Ty(Context)];
5171 } else {
alan-baker7150a1d2020-02-25 08:31:06 -05005172 SizesTypeID =
5173 TypeMap[VectorType::get(Type::getInt32Ty(Context), components)];
alan-bakerce179f12019-12-06 19:02:22 -05005174 }
David Neto5c22a252018-03-15 16:07:41 -04005175 uint32_t ImageID = VMap[Image];
David Neto257c3892018-04-11 13:19:45 -04005176 Ops << MkId(SizesTypeID) << MkId(ImageID);
alan-bakerce179f12019-12-06 19:02:22 -05005177 spv::Op query_opcode = spv::OpImageQuerySize;
SJW173c7e92020-03-16 08:44:47 -05005178 if (IsSampledImageType(Image->getType())) {
alan-bakerce179f12019-12-06 19:02:22 -05005179 query_opcode = spv::OpImageQuerySizeLod;
5180 // Need explicit 0 for Lod operand.
5181 Constant *CstInt0 = ConstantInt::get(Context, APInt(32, 0));
5182 Ops << MkId(VMap[CstInt0]);
5183 }
David Neto5c22a252018-03-15 16:07:41 -04005184
5185 uint32_t SizesID = nextID++;
alan-bakerce179f12019-12-06 19:02:22 -05005186 auto *QueryInst = new SPIRVInstruction(query_opcode, SizesID, Ops);
David Neto5c22a252018-03-15 16:07:41 -04005187 SPIRVInstList.push_back(QueryInst);
5188
alan-bakerce179f12019-12-06 19:02:22 -05005189 // May require an extra instruction to create the appropriate result of
5190 // the builtin function.
SJW173c7e92020-03-16 08:44:47 -05005191 if (IsGetImageDim(Callee)) {
alan-bakerce179f12019-12-06 19:02:22 -05005192 if (dim == 3) {
5193 // get_image_dim returns an int4 for 3D images.
5194 //
5195 // Reset value map entry since we generated an intermediate
5196 // instruction.
5197 VMap[&I] = nextID;
David Neto5c22a252018-03-15 16:07:41 -04005198
alan-bakerce179f12019-12-06 19:02:22 -05005199 // Implement:
5200 // %result = OpCompositeConstruct %uint4 %sizes %uint_0
5201 Ops.clear();
5202 Ops << MkId(lookupType(VectorType::get(Type::getInt32Ty(Context), 4)))
5203 << MkId(SizesID);
David Neto5c22a252018-03-15 16:07:41 -04005204
alan-bakerce179f12019-12-06 19:02:22 -05005205 Constant *CstInt0 = ConstantInt::get(Context, APInt(32, 0));
5206 Ops << MkId(VMap[CstInt0]);
David Neto5c22a252018-03-15 16:07:41 -04005207
alan-bakerce179f12019-12-06 19:02:22 -05005208 auto *Inst =
5209 new SPIRVInstruction(spv::OpCompositeConstruct, nextID++, Ops);
5210 SPIRVInstList.push_back(Inst);
5211 } else if (dim != components) {
5212 // get_image_dim return an int2 regardless of the arrayedness of the
5213 // image. If the image is arrayed an element must be dropped from the
5214 // query result.
5215 //
5216 // Reset value map entry since we generated an intermediate
5217 // instruction.
5218 VMap[&I] = nextID;
5219
5220 // Implement:
5221 // %result = OpVectorShuffle %uint2 %sizes %sizes 0 1
5222 Ops.clear();
5223 Ops << MkId(lookupType(VectorType::get(Type::getInt32Ty(Context), 2)))
5224 << MkId(SizesID) << MkId(SizesID) << MkNum(0) << MkNum(1);
5225
5226 auto *Inst =
5227 new SPIRVInstruction(spv::OpVectorShuffle, nextID++, Ops);
5228 SPIRVInstList.push_back(Inst);
5229 }
5230 } else if (components > 1) {
5231 // Reset value map entry since we generated an intermediate instruction.
5232 VMap[&I] = nextID;
5233
5234 // Implement:
5235 // %result = OpCompositeExtract %uint %sizes <component number>
5236 Ops.clear();
5237 Ops << MkId(TypeMap[I.getType()]) << MkId(SizesID);
5238
5239 uint32_t component = 0;
5240 if (IsGetImageHeight(Callee))
5241 component = 1;
5242 else if (IsGetImageDepth(Callee))
5243 component = 2;
5244 Ops << MkNum(component);
5245
5246 auto *Inst =
5247 new SPIRVInstruction(spv::OpCompositeExtract, nextID++, Ops);
5248 SPIRVInstList.push_back(Inst);
5249 }
David Neto5c22a252018-03-15 16:07:41 -04005250 break;
5251 }
5252
David Neto22f144c2017-06-12 14:26:21 -04005253 // Call instrucion is deferred because it needs function's ID. Record
5254 // slot's location on SPIRVInstructionList.
5255 DeferredInsts.push_back(
5256 std::make_tuple(&I, --SPIRVInstList.end(), nextID++));
5257
David Neto3fbb4072017-10-16 11:28:14 -04005258 // Check whether the implementation of this call uses an extended
5259 // instruction plus one more value-producing instruction. If so, then
5260 // reserve the id for the extra value-producing slot.
5261 glsl::ExtInst EInst = getIndirectExtInstEnum(Callee->getName());
5262 if (EInst != kGlslExtInstBad) {
5263 // Reserve a spot for the extra value.
David Neto4d02a532017-09-17 12:57:44 -04005264 // Increase nextID.
David Neto22f144c2017-06-12 14:26:21 -04005265 VMap[&I] = nextID;
5266 nextID++;
5267 }
5268 break;
5269 }
5270 case Instruction::Ret: {
5271 unsigned NumOps = I.getNumOperands();
5272 if (NumOps == 0) {
5273 //
5274 // Generate OpReturn.
5275 //
David Netoef5ba2b2019-12-20 08:35:54 -05005276 SPIRVInstList.push_back(new SPIRVInstruction(spv::OpReturn));
David Neto22f144c2017-06-12 14:26:21 -04005277 } else {
5278 //
5279 // Generate OpReturnValue.
5280 //
5281
5282 // Ops[0] = Return Value ID
5283 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04005284
5285 Ops << MkId(VMap[I.getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04005286
David Neto87846742018-04-11 17:36:22 -04005287 auto *Inst = new SPIRVInstruction(spv::OpReturnValue, Ops);
David Neto22f144c2017-06-12 14:26:21 -04005288 SPIRVInstList.push_back(Inst);
5289 break;
5290 }
5291 break;
5292 }
5293 }
5294}
5295
5296void SPIRVProducerPass::GenerateFuncEpilogue() {
5297 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
5298
5299 //
5300 // Generate OpFunctionEnd
5301 //
5302
David Netoef5ba2b2019-12-20 08:35:54 -05005303 auto *Inst = new SPIRVInstruction(spv::OpFunctionEnd);
David Neto22f144c2017-06-12 14:26:21 -04005304 SPIRVInstList.push_back(Inst);
5305}
5306
5307bool SPIRVProducerPass::is4xi8vec(Type *Ty) const {
alan-bakerb39c8262019-03-08 14:03:37 -05005308 // Don't specialize <4 x i8> if i8 is generally supported.
5309 if (clspv::Option::Int8Support())
5310 return false;
5311
David Neto22f144c2017-06-12 14:26:21 -04005312 LLVMContext &Context = Ty->getContext();
5313 if (Ty->isVectorTy()) {
5314 if (Ty->getVectorElementType() == Type::getInt8Ty(Context) &&
5315 Ty->getVectorNumElements() == 4) {
5316 return true;
5317 }
5318 }
5319
5320 return false;
5321}
5322
5323void SPIRVProducerPass::HandleDeferredInstruction() {
5324 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
5325 ValueMapType &VMap = getValueMap();
5326 DeferredInstVecType &DeferredInsts = getDeferredInstVec();
5327
5328 for (auto DeferredInst = DeferredInsts.rbegin();
5329 DeferredInst != DeferredInsts.rend(); ++DeferredInst) {
5330 Value *Inst = std::get<0>(*DeferredInst);
5331 SPIRVInstructionList::iterator InsertPoint = ++std::get<1>(*DeferredInst);
5332 if (InsertPoint != SPIRVInstList.end()) {
5333 while ((*InsertPoint)->getOpcode() == spv::OpPhi) {
5334 ++InsertPoint;
5335 }
5336 }
5337
5338 if (BranchInst *Br = dyn_cast<BranchInst>(Inst)) {
alan-baker06cad652019-12-03 17:56:47 -05005339 // Check whether this branch needs to be preceeded by merge instruction.
David Neto22f144c2017-06-12 14:26:21 -04005340 BasicBlock *BrBB = Br->getParent();
alan-baker06cad652019-12-03 17:56:47 -05005341 if (ContinueBlocks.count(BrBB)) {
David Neto22f144c2017-06-12 14:26:21 -04005342 //
5343 // Generate OpLoopMerge.
5344 //
5345 // Ops[0] = Merge Block ID
5346 // Ops[1] = Continue Target ID
5347 // Ops[2] = Selection Control
5348 SPIRVOperandList Ops;
5349
alan-baker06cad652019-12-03 17:56:47 -05005350 auto MergeBB = MergeBlocks[BrBB];
5351 auto ContinueBB = ContinueBlocks[BrBB];
David Neto22f144c2017-06-12 14:26:21 -04005352 uint32_t MergeBBID = VMap[MergeBB];
David Neto22f144c2017-06-12 14:26:21 -04005353 uint32_t ContinueBBID = VMap[ContinueBB];
David Neto257c3892018-04-11 13:19:45 -04005354 Ops << MkId(MergeBBID) << MkId(ContinueBBID)
alan-baker06cad652019-12-03 17:56:47 -05005355 << MkNum(spv::LoopControlMaskNone);
David Neto22f144c2017-06-12 14:26:21 -04005356
David Neto87846742018-04-11 17:36:22 -04005357 auto *MergeInst = new SPIRVInstruction(spv::OpLoopMerge, Ops);
David Neto22f144c2017-06-12 14:26:21 -04005358 SPIRVInstList.insert(InsertPoint, MergeInst);
alan-baker06cad652019-12-03 17:56:47 -05005359 } else if (MergeBlocks.count(BrBB)) {
5360 //
5361 // Generate OpSelectionMerge.
5362 //
5363 // Ops[0] = Merge Block ID
5364 // Ops[1] = Selection Control
5365 SPIRVOperandList Ops;
David Neto22f144c2017-06-12 14:26:21 -04005366
alan-baker06cad652019-12-03 17:56:47 -05005367 auto MergeBB = MergeBlocks[BrBB];
5368 uint32_t MergeBBID = VMap[MergeBB];
5369 Ops << MkId(MergeBBID) << MkNum(spv::SelectionControlMaskNone);
David Neto22f144c2017-06-12 14:26:21 -04005370
alan-baker06cad652019-12-03 17:56:47 -05005371 auto *MergeInst = new SPIRVInstruction(spv::OpSelectionMerge, Ops);
5372 SPIRVInstList.insert(InsertPoint, MergeInst);
David Neto22f144c2017-06-12 14:26:21 -04005373 }
5374
5375 if (Br->isConditional()) {
5376 //
5377 // Generate OpBranchConditional.
5378 //
5379 // Ops[0] = Condition ID
5380 // Ops[1] = True Label ID
5381 // Ops[2] = False Label ID
5382 // Ops[3] ... Ops[n] = Branch weights (Literal Number)
5383 SPIRVOperandList Ops;
5384
5385 uint32_t CondID = VMap[Br->getCondition()];
David Neto22f144c2017-06-12 14:26:21 -04005386 uint32_t TrueBBID = VMap[Br->getSuccessor(0)];
David Neto22f144c2017-06-12 14:26:21 -04005387 uint32_t FalseBBID = VMap[Br->getSuccessor(1)];
David Neto257c3892018-04-11 13:19:45 -04005388
5389 Ops << MkId(CondID) << MkId(TrueBBID) << MkId(FalseBBID);
David Neto22f144c2017-06-12 14:26:21 -04005390
David Neto87846742018-04-11 17:36:22 -04005391 auto *BrInst = new SPIRVInstruction(spv::OpBranchConditional, Ops);
David Neto22f144c2017-06-12 14:26:21 -04005392 SPIRVInstList.insert(InsertPoint, BrInst);
5393 } else {
5394 //
5395 // Generate OpBranch.
5396 //
5397 // Ops[0] = Target Label ID
5398 SPIRVOperandList Ops;
5399
5400 uint32_t TargetID = VMap[Br->getSuccessor(0)];
David Neto257c3892018-04-11 13:19:45 -04005401 Ops << MkId(TargetID);
David Neto22f144c2017-06-12 14:26:21 -04005402
David Neto87846742018-04-11 17:36:22 -04005403 SPIRVInstList.insert(InsertPoint,
5404 new SPIRVInstruction(spv::OpBranch, Ops));
David Neto22f144c2017-06-12 14:26:21 -04005405 }
5406 } else if (PHINode *PHI = dyn_cast<PHINode>(Inst)) {
alan-baker5b86ed72019-02-15 08:26:50 -05005407 if (PHI->getType()->isPointerTy()) {
5408 // OpPhi on pointers requires variable pointers.
5409 setVariablePointersCapabilities(
5410 PHI->getType()->getPointerAddressSpace());
5411 if (!hasVariablePointers() && !selectFromSameObject(PHI)) {
5412 setVariablePointers(true);
5413 }
5414 }
5415
David Neto22f144c2017-06-12 14:26:21 -04005416 //
5417 // Generate OpPhi.
5418 //
5419 // Ops[0] = Result Type ID
5420 // Ops[1] ... Ops[n] = (Variable ID, Parent ID) pairs
5421 SPIRVOperandList Ops;
5422
David Neto257c3892018-04-11 13:19:45 -04005423 Ops << MkId(lookupType(PHI->getType()));
David Neto22f144c2017-06-12 14:26:21 -04005424
David Neto22f144c2017-06-12 14:26:21 -04005425 for (unsigned i = 0; i < PHI->getNumIncomingValues(); i++) {
5426 uint32_t VarID = VMap[PHI->getIncomingValue(i)];
David Neto22f144c2017-06-12 14:26:21 -04005427 uint32_t ParentID = VMap[PHI->getIncomingBlock(i)];
David Neto257c3892018-04-11 13:19:45 -04005428 Ops << MkId(VarID) << MkId(ParentID);
David Neto22f144c2017-06-12 14:26:21 -04005429 }
5430
5431 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005432 InsertPoint,
5433 new SPIRVInstruction(spv::OpPhi, std::get<2>(*DeferredInst), Ops));
David Neto22f144c2017-06-12 14:26:21 -04005434 } else if (CallInst *Call = dyn_cast<CallInst>(Inst)) {
5435 Function *Callee = Call->getCalledFunction();
David Neto3fbb4072017-10-16 11:28:14 -04005436 auto callee_name = Callee->getName();
5437 glsl::ExtInst EInst = getDirectOrIndirectExtInstEnum(callee_name);
David Neto22f144c2017-06-12 14:26:21 -04005438
5439 if (EInst) {
5440 uint32_t &ExtInstImportID = getOpExtInstImportID();
5441
5442 //
5443 // Generate OpExtInst.
5444 //
5445
5446 // Ops[0] = Result Type ID
5447 // Ops[1] = Set ID (OpExtInstImport ID)
5448 // Ops[2] = Instruction Number (Literal Number)
5449 // Ops[3] ... Ops[n] = Operand 1, ... , Operand n
5450 SPIRVOperandList Ops;
5451
David Neto862b7d82018-06-14 18:48:37 -04005452 Ops << MkId(lookupType(Call->getType())) << MkId(ExtInstImportID)
5453 << MkNum(EInst);
David Neto22f144c2017-06-12 14:26:21 -04005454
David Neto22f144c2017-06-12 14:26:21 -04005455 FunctionType *CalleeFTy = cast<FunctionType>(Call->getFunctionType());
5456 for (unsigned i = 0; i < CalleeFTy->getNumParams(); i++) {
David Neto257c3892018-04-11 13:19:45 -04005457 Ops << MkId(VMap[Call->getOperand(i)]);
David Neto22f144c2017-06-12 14:26:21 -04005458 }
5459
David Neto87846742018-04-11 17:36:22 -04005460 auto *ExtInst = new SPIRVInstruction(spv::OpExtInst,
5461 std::get<2>(*DeferredInst), Ops);
David Neto22f144c2017-06-12 14:26:21 -04005462 SPIRVInstList.insert(InsertPoint, ExtInst);
5463
David Neto3fbb4072017-10-16 11:28:14 -04005464 const auto IndirectExtInst = getIndirectExtInstEnum(callee_name);
5465 if (IndirectExtInst != kGlslExtInstBad) {
5466 // Generate one more instruction that uses the result of the extended
5467 // instruction. Its result id is one more than the id of the
5468 // extended instruction.
David Neto22f144c2017-06-12 14:26:21 -04005469 LLVMContext &Context =
5470 Call->getParent()->getParent()->getParent()->getContext();
David Neto22f144c2017-06-12 14:26:21 -04005471
David Neto3fbb4072017-10-16 11:28:14 -04005472 auto generate_extra_inst = [this, &Context, &Call, &DeferredInst,
5473 &VMap, &SPIRVInstList, &InsertPoint](
5474 spv::Op opcode, Constant *constant) {
5475 //
5476 // Generate instruction like:
5477 // result = opcode constant <extinst-result>
5478 //
5479 // Ops[0] = Result Type ID
5480 // Ops[1] = Operand 0 ;; the constant, suitably splatted
5481 // Ops[2] = Operand 1 ;; the result of the extended instruction
5482 SPIRVOperandList Ops;
David Neto22f144c2017-06-12 14:26:21 -04005483
David Neto3fbb4072017-10-16 11:28:14 -04005484 Type *resultTy = Call->getType();
David Neto257c3892018-04-11 13:19:45 -04005485 Ops << MkId(lookupType(resultTy));
David Neto3fbb4072017-10-16 11:28:14 -04005486
5487 if (auto *vectorTy = dyn_cast<VectorType>(resultTy)) {
5488 constant = ConstantVector::getSplat(
alan-baker7261e062020-03-15 14:35:48 -04005489 {static_cast<unsigned>(vectorTy->getNumElements()), false},
5490 constant);
David Neto3fbb4072017-10-16 11:28:14 -04005491 }
David Neto257c3892018-04-11 13:19:45 -04005492 Ops << MkId(VMap[constant]) << MkId(std::get<2>(*DeferredInst));
David Neto3fbb4072017-10-16 11:28:14 -04005493
5494 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005495 InsertPoint, new SPIRVInstruction(
5496 opcode, std::get<2>(*DeferredInst) + 1, Ops));
David Neto3fbb4072017-10-16 11:28:14 -04005497 };
5498
5499 switch (IndirectExtInst) {
5500 case glsl::ExtInstFindUMsb: // Implementing clz
5501 generate_extra_inst(
5502 spv::OpISub, ConstantInt::get(Type::getInt32Ty(Context), 31));
5503 break;
5504 case glsl::ExtInstAcos: // Implementing acospi
5505 case glsl::ExtInstAsin: // Implementing asinpi
Kévin Petiteb9f90a2018-09-29 12:29:34 +01005506 case glsl::ExtInstAtan: // Implementing atanpi
David Neto3fbb4072017-10-16 11:28:14 -04005507 case glsl::ExtInstAtan2: // Implementing atan2pi
5508 generate_extra_inst(
5509 spv::OpFMul,
5510 ConstantFP::get(Type::getFloatTy(Context), kOneOverPi));
5511 break;
5512
5513 default:
5514 assert(false && "internally inconsistent");
David Neto4d02a532017-09-17 12:57:44 -04005515 }
David Neto22f144c2017-06-12 14:26:21 -04005516 }
David Neto3fbb4072017-10-16 11:28:14 -04005517
alan-bakerb39c8262019-03-08 14:03:37 -05005518 } else if (callee_name.startswith("_Z8popcount")) {
David Neto22f144c2017-06-12 14:26:21 -04005519 //
5520 // Generate OpBitCount
5521 //
5522 // Ops[0] = Result Type ID
5523 // Ops[1] = Base ID
David Neto257c3892018-04-11 13:19:45 -04005524 SPIRVOperandList Ops;
5525 Ops << MkId(lookupType(Call->getType()))
5526 << MkId(VMap[Call->getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04005527
5528 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005529 InsertPoint, new SPIRVInstruction(spv::OpBitCount,
David Neto22f144c2017-06-12 14:26:21 -04005530 std::get<2>(*DeferredInst), Ops));
David Netoab03f432017-11-03 17:00:44 -04005531
David Neto862b7d82018-06-14 18:48:37 -04005532 } else if (callee_name.startswith(kCompositeConstructFunctionPrefix)) {
David Netoab03f432017-11-03 17:00:44 -04005533
5534 // Generate an OpCompositeConstruct
5535 SPIRVOperandList Ops;
5536
5537 // The result type.
David Neto257c3892018-04-11 13:19:45 -04005538 Ops << MkId(lookupType(Call->getType()));
David Netoab03f432017-11-03 17:00:44 -04005539
5540 for (Use &use : Call->arg_operands()) {
David Neto257c3892018-04-11 13:19:45 -04005541 Ops << MkId(VMap[use.get()]);
David Netoab03f432017-11-03 17:00:44 -04005542 }
5543
5544 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005545 InsertPoint, new SPIRVInstruction(spv::OpCompositeConstruct,
5546 std::get<2>(*DeferredInst), Ops));
David Netoab03f432017-11-03 17:00:44 -04005547
Alan Baker202c8c72018-08-13 13:47:44 -04005548 } else if (callee_name.startswith(clspv::ResourceAccessorFunction())) {
5549
5550 // We have already mapped the call's result value to an ID.
5551 // Don't generate any code now.
5552
5553 } else if (callee_name.startswith(clspv::WorkgroupAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04005554
5555 // We have already mapped the call's result value to an ID.
5556 // Don't generate any code now.
5557
David Neto22f144c2017-06-12 14:26:21 -04005558 } else {
alan-baker5b86ed72019-02-15 08:26:50 -05005559 if (Call->getType()->isPointerTy()) {
5560 // Functions returning pointers require variable pointers.
5561 setVariablePointersCapabilities(
5562 Call->getType()->getPointerAddressSpace());
5563 }
5564
David Neto22f144c2017-06-12 14:26:21 -04005565 //
5566 // Generate OpFunctionCall.
5567 //
5568
5569 // Ops[0] = Result Type ID
5570 // Ops[1] = Callee Function ID
5571 // Ops[2] ... Ops[n] = Argument 0, ... , Argument n
5572 SPIRVOperandList Ops;
5573
David Neto862b7d82018-06-14 18:48:37 -04005574 Ops << MkId(lookupType(Call->getType()));
David Neto22f144c2017-06-12 14:26:21 -04005575
5576 uint32_t CalleeID = VMap[Callee];
David Neto43568eb2017-10-13 18:25:25 -04005577 if (CalleeID == 0) {
5578 errs() << "Can't translate function call. Missing builtin? "
David Neto862b7d82018-06-14 18:48:37 -04005579 << callee_name << " in: " << *Call << "\n";
David Neto43568eb2017-10-13 18:25:25 -04005580 // TODO(dneto): Can we error out? Enabling this llvm_unreachable
5581 // causes an infinite loop. Instead, go ahead and generate
5582 // the bad function call. A validator will catch the 0-Id.
5583 // llvm_unreachable("Can't translate function call");
5584 }
David Neto22f144c2017-06-12 14:26:21 -04005585
David Neto257c3892018-04-11 13:19:45 -04005586 Ops << MkId(CalleeID);
David Neto22f144c2017-06-12 14:26:21 -04005587
David Neto22f144c2017-06-12 14:26:21 -04005588 FunctionType *CalleeFTy = cast<FunctionType>(Call->getFunctionType());
5589 for (unsigned i = 0; i < CalleeFTy->getNumParams(); i++) {
alan-baker5b86ed72019-02-15 08:26:50 -05005590 auto *operand = Call->getOperand(i);
alan-bakerd4d50652019-12-03 17:17:15 -05005591 auto *operand_type = operand->getType();
5592 // Images and samplers can be passed as function parameters without
5593 // variable pointers.
5594 if (operand_type->isPointerTy() && !IsImageType(operand_type) &&
5595 !IsSamplerType(operand_type)) {
alan-baker5b86ed72019-02-15 08:26:50 -05005596 auto sc =
5597 GetStorageClass(operand->getType()->getPointerAddressSpace());
5598 if (sc == spv::StorageClassStorageBuffer) {
5599 // Passing SSBO by reference requires variable pointers storage
5600 // buffer.
5601 setVariablePointersStorageBuffer(true);
5602 } else if (sc == spv::StorageClassWorkgroup) {
5603 // Workgroup references require variable pointers if they are not
5604 // memory object declarations.
5605 if (auto *operand_call = dyn_cast<CallInst>(operand)) {
5606 // Workgroup accessor represents a variable reference.
5607 if (!operand_call->getCalledFunction()->getName().startswith(
5608 clspv::WorkgroupAccessorFunction()))
5609 setVariablePointers(true);
5610 } else {
5611 // Arguments are function parameters.
5612 if (!isa<Argument>(operand))
5613 setVariablePointers(true);
5614 }
5615 }
5616 }
5617 Ops << MkId(VMap[operand]);
David Neto22f144c2017-06-12 14:26:21 -04005618 }
5619
David Neto87846742018-04-11 17:36:22 -04005620 auto *CallInst = new SPIRVInstruction(spv::OpFunctionCall,
5621 std::get<2>(*DeferredInst), Ops);
David Neto22f144c2017-06-12 14:26:21 -04005622 SPIRVInstList.insert(InsertPoint, CallInst);
5623 }
5624 }
5625 }
5626}
5627
David Neto1a1a0582017-07-07 12:01:44 -04005628void SPIRVProducerPass::HandleDeferredDecorations(const DataLayout &DL) {
Alan Baker202c8c72018-08-13 13:47:44 -04005629 if (getTypesNeedingArrayStride().empty() && LocalArgSpecIds.empty()) {
David Neto1a1a0582017-07-07 12:01:44 -04005630 return;
David Netoc6f3ab22018-04-06 18:02:31 -04005631 }
David Neto1a1a0582017-07-07 12:01:44 -04005632
5633 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
David Neto1a1a0582017-07-07 12:01:44 -04005634
5635 // Find an iterator pointing just past the last decoration.
5636 bool seen_decorations = false;
5637 auto DecoInsertPoint =
5638 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
5639 [&seen_decorations](SPIRVInstruction *Inst) -> bool {
5640 const bool is_decoration =
5641 Inst->getOpcode() == spv::OpDecorate ||
5642 Inst->getOpcode() == spv::OpMemberDecorate;
5643 if (is_decoration) {
5644 seen_decorations = true;
5645 return false;
5646 } else {
5647 return seen_decorations;
5648 }
5649 });
5650
David Netoc6f3ab22018-04-06 18:02:31 -04005651 // Insert ArrayStride decorations on pointer types, due to OpPtrAccessChain
5652 // instructions we generated earlier.
David Neto85082642018-03-24 06:55:20 -07005653 for (auto *type : getTypesNeedingArrayStride()) {
5654 Type *elemTy = nullptr;
5655 if (auto *ptrTy = dyn_cast<PointerType>(type)) {
5656 elemTy = ptrTy->getElementType();
alan-bakerb6b09dc2018-11-08 16:59:28 -05005657 } else if (auto *arrayTy = dyn_cast<ArrayType>(type)) {
David Neto85082642018-03-24 06:55:20 -07005658 elemTy = arrayTy->getArrayElementType();
alan-bakerb6b09dc2018-11-08 16:59:28 -05005659 } else if (auto *seqTy = dyn_cast<SequentialType>(type)) {
David Neto85082642018-03-24 06:55:20 -07005660 elemTy = seqTy->getSequentialElementType();
5661 } else {
5662 errs() << "Unhandled strided type " << *type << "\n";
5663 llvm_unreachable("Unhandled strided type");
5664 }
David Neto1a1a0582017-07-07 12:01:44 -04005665
5666 // Ops[0] = Target ID
5667 // Ops[1] = Decoration (ArrayStride)
5668 // Ops[2] = Stride number (Literal Number)
5669 SPIRVOperandList Ops;
5670
David Neto85082642018-03-24 06:55:20 -07005671 // Same as DL.getIndexedOffsetInType( elemTy, { 1 } );
Alan Bakerfcda9482018-10-02 17:09:59 -04005672 const uint32_t stride = static_cast<uint32_t>(GetTypeAllocSize(elemTy, DL));
David Neto257c3892018-04-11 13:19:45 -04005673
5674 Ops << MkId(lookupType(type)) << MkNum(spv::DecorationArrayStride)
5675 << MkNum(stride);
David Neto1a1a0582017-07-07 12:01:44 -04005676
David Neto87846742018-04-11 17:36:22 -04005677 auto *DecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
David Neto1a1a0582017-07-07 12:01:44 -04005678 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
5679 }
David Netoc6f3ab22018-04-06 18:02:31 -04005680
5681 // Emit SpecId decorations targeting the array size value.
Alan Baker202c8c72018-08-13 13:47:44 -04005682 for (auto spec_id = clspv::FirstLocalSpecId(); spec_id < max_local_spec_id_;
5683 ++spec_id) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05005684 LocalArgInfo &arg_info = LocalSpecIdInfoMap[spec_id];
David Netoc6f3ab22018-04-06 18:02:31 -04005685 SPIRVOperandList Ops;
5686 Ops << MkId(arg_info.array_size_id) << MkNum(spv::DecorationSpecId)
5687 << MkNum(arg_info.spec_id);
5688 SPIRVInstList.insert(DecoInsertPoint,
David Neto87846742018-04-11 17:36:22 -04005689 new SPIRVInstruction(spv::OpDecorate, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04005690 }
David Neto1a1a0582017-07-07 12:01:44 -04005691}
5692
David Neto22f144c2017-06-12 14:26:21 -04005693glsl::ExtInst SPIRVProducerPass::getExtInstEnum(StringRef Name) {
5694 return StringSwitch<glsl::ExtInst>(Name)
alan-bakerb39c8262019-03-08 14:03:37 -05005695 .Case("_Z3absc", glsl::ExtInst::ExtInstSAbs)
5696 .Case("_Z3absDv2_c", glsl::ExtInst::ExtInstSAbs)
5697 .Case("_Z3absDv3_c", glsl::ExtInst::ExtInstSAbs)
5698 .Case("_Z3absDv4_c", glsl::ExtInst::ExtInstSAbs)
Kévin Petit2444e9b2018-11-09 14:14:37 +00005699 .Case("_Z3abss", glsl::ExtInst::ExtInstSAbs)
5700 .Case("_Z3absDv2_s", glsl::ExtInst::ExtInstSAbs)
5701 .Case("_Z3absDv3_s", glsl::ExtInst::ExtInstSAbs)
5702 .Case("_Z3absDv4_s", glsl::ExtInst::ExtInstSAbs)
David Neto22f144c2017-06-12 14:26:21 -04005703 .Case("_Z3absi", glsl::ExtInst::ExtInstSAbs)
5704 .Case("_Z3absDv2_i", glsl::ExtInst::ExtInstSAbs)
5705 .Case("_Z3absDv3_i", glsl::ExtInst::ExtInstSAbs)
5706 .Case("_Z3absDv4_i", glsl::ExtInst::ExtInstSAbs)
Kévin Petit2444e9b2018-11-09 14:14:37 +00005707 .Case("_Z3absl", glsl::ExtInst::ExtInstSAbs)
5708 .Case("_Z3absDv2_l", glsl::ExtInst::ExtInstSAbs)
5709 .Case("_Z3absDv3_l", glsl::ExtInst::ExtInstSAbs)
5710 .Case("_Z3absDv4_l", glsl::ExtInst::ExtInstSAbs)
alan-bakerb39c8262019-03-08 14:03:37 -05005711 .Case("_Z5clampccc", glsl::ExtInst::ExtInstSClamp)
5712 .Case("_Z5clampDv2_cS_S_", glsl::ExtInst::ExtInstSClamp)
5713 .Case("_Z5clampDv3_cS_S_", glsl::ExtInst::ExtInstSClamp)
5714 .Case("_Z5clampDv4_cS_S_", glsl::ExtInst::ExtInstSClamp)
5715 .Case("_Z5clamphhh", glsl::ExtInst::ExtInstUClamp)
5716 .Case("_Z5clampDv2_hS_S_", glsl::ExtInst::ExtInstUClamp)
5717 .Case("_Z5clampDv3_hS_S_", glsl::ExtInst::ExtInstUClamp)
5718 .Case("_Z5clampDv4_hS_S_", glsl::ExtInst::ExtInstUClamp)
Kévin Petit495255d2019-03-06 13:56:48 +00005719 .Case("_Z5clampsss", glsl::ExtInst::ExtInstSClamp)
5720 .Case("_Z5clampDv2_sS_S_", glsl::ExtInst::ExtInstSClamp)
5721 .Case("_Z5clampDv3_sS_S_", glsl::ExtInst::ExtInstSClamp)
5722 .Case("_Z5clampDv4_sS_S_", glsl::ExtInst::ExtInstSClamp)
5723 .Case("_Z5clampttt", glsl::ExtInst::ExtInstUClamp)
5724 .Case("_Z5clampDv2_tS_S_", glsl::ExtInst::ExtInstUClamp)
5725 .Case("_Z5clampDv3_tS_S_", glsl::ExtInst::ExtInstUClamp)
5726 .Case("_Z5clampDv4_tS_S_", glsl::ExtInst::ExtInstUClamp)
David Neto22f144c2017-06-12 14:26:21 -04005727 .Case("_Z5clampiii", glsl::ExtInst::ExtInstSClamp)
5728 .Case("_Z5clampDv2_iS_S_", glsl::ExtInst::ExtInstSClamp)
5729 .Case("_Z5clampDv3_iS_S_", glsl::ExtInst::ExtInstSClamp)
5730 .Case("_Z5clampDv4_iS_S_", glsl::ExtInst::ExtInstSClamp)
5731 .Case("_Z5clampjjj", glsl::ExtInst::ExtInstUClamp)
5732 .Case("_Z5clampDv2_jS_S_", glsl::ExtInst::ExtInstUClamp)
5733 .Case("_Z5clampDv3_jS_S_", glsl::ExtInst::ExtInstUClamp)
5734 .Case("_Z5clampDv4_jS_S_", glsl::ExtInst::ExtInstUClamp)
Kévin Petit495255d2019-03-06 13:56:48 +00005735 .Case("_Z5clamplll", glsl::ExtInst::ExtInstSClamp)
5736 .Case("_Z5clampDv2_lS_S_", glsl::ExtInst::ExtInstSClamp)
5737 .Case("_Z5clampDv3_lS_S_", glsl::ExtInst::ExtInstSClamp)
5738 .Case("_Z5clampDv4_lS_S_", glsl::ExtInst::ExtInstSClamp)
5739 .Case("_Z5clampmmm", glsl::ExtInst::ExtInstUClamp)
5740 .Case("_Z5clampDv2_mS_S_", glsl::ExtInst::ExtInstUClamp)
5741 .Case("_Z5clampDv3_mS_S_", glsl::ExtInst::ExtInstUClamp)
5742 .Case("_Z5clampDv4_mS_S_", glsl::ExtInst::ExtInstUClamp)
David Neto22f144c2017-06-12 14:26:21 -04005743 .Case("_Z5clampfff", glsl::ExtInst::ExtInstFClamp)
5744 .Case("_Z5clampDv2_fS_S_", glsl::ExtInst::ExtInstFClamp)
5745 .Case("_Z5clampDv3_fS_S_", glsl::ExtInst::ExtInstFClamp)
5746 .Case("_Z5clampDv4_fS_S_", glsl::ExtInst::ExtInstFClamp)
alan-baker49bb5fb2020-01-15 08:22:13 -05005747 .Case("_Z5clampDhDhDh", glsl::ExtInst::ExtInstFClamp)
5748 .Case("_Z5clampDv2_DhS_S_", glsl::ExtInst::ExtInstFClamp)
5749 .Case("_Z5clampDv3_DhS_S_", glsl::ExtInst::ExtInstFClamp)
5750 .Case("_Z5clampDv4_DhS_S_", glsl::ExtInst::ExtInstFClamp)
alan-bakerb39c8262019-03-08 14:03:37 -05005751 .Case("_Z3maxcc", glsl::ExtInst::ExtInstSMax)
5752 .Case("_Z3maxDv2_cS_", glsl::ExtInst::ExtInstSMax)
5753 .Case("_Z3maxDv3_cS_", glsl::ExtInst::ExtInstSMax)
5754 .Case("_Z3maxDv4_cS_", glsl::ExtInst::ExtInstSMax)
5755 .Case("_Z3maxhh", glsl::ExtInst::ExtInstUMax)
5756 .Case("_Z3maxDv2_hS_", glsl::ExtInst::ExtInstUMax)
5757 .Case("_Z3maxDv3_hS_", glsl::ExtInst::ExtInstUMax)
5758 .Case("_Z3maxDv4_hS_", glsl::ExtInst::ExtInstUMax)
Kévin Petit9e1971c2018-11-09 14:17:18 +00005759 .Case("_Z3maxss", glsl::ExtInst::ExtInstSMax)
5760 .Case("_Z3maxDv2_sS_", glsl::ExtInst::ExtInstSMax)
5761 .Case("_Z3maxDv3_sS_", glsl::ExtInst::ExtInstSMax)
5762 .Case("_Z3maxDv4_sS_", glsl::ExtInst::ExtInstSMax)
5763 .Case("_Z3maxtt", glsl::ExtInst::ExtInstUMax)
5764 .Case("_Z3maxDv2_tS_", glsl::ExtInst::ExtInstUMax)
5765 .Case("_Z3maxDv3_tS_", glsl::ExtInst::ExtInstUMax)
5766 .Case("_Z3maxDv4_tS_", glsl::ExtInst::ExtInstUMax)
David Neto22f144c2017-06-12 14:26:21 -04005767 .Case("_Z3maxii", glsl::ExtInst::ExtInstSMax)
5768 .Case("_Z3maxDv2_iS_", glsl::ExtInst::ExtInstSMax)
5769 .Case("_Z3maxDv3_iS_", glsl::ExtInst::ExtInstSMax)
5770 .Case("_Z3maxDv4_iS_", glsl::ExtInst::ExtInstSMax)
5771 .Case("_Z3maxjj", glsl::ExtInst::ExtInstUMax)
5772 .Case("_Z3maxDv2_jS_", glsl::ExtInst::ExtInstUMax)
5773 .Case("_Z3maxDv3_jS_", glsl::ExtInst::ExtInstUMax)
5774 .Case("_Z3maxDv4_jS_", glsl::ExtInst::ExtInstUMax)
Kévin Petit9e1971c2018-11-09 14:17:18 +00005775 .Case("_Z3maxll", glsl::ExtInst::ExtInstSMax)
5776 .Case("_Z3maxDv2_lS_", glsl::ExtInst::ExtInstSMax)
5777 .Case("_Z3maxDv3_lS_", glsl::ExtInst::ExtInstSMax)
5778 .Case("_Z3maxDv4_lS_", glsl::ExtInst::ExtInstSMax)
5779 .Case("_Z3maxmm", glsl::ExtInst::ExtInstUMax)
5780 .Case("_Z3maxDv2_mS_", glsl::ExtInst::ExtInstUMax)
5781 .Case("_Z3maxDv3_mS_", glsl::ExtInst::ExtInstUMax)
5782 .Case("_Z3maxDv4_mS_", glsl::ExtInst::ExtInstUMax)
David Neto22f144c2017-06-12 14:26:21 -04005783 .Case("_Z3maxff", glsl::ExtInst::ExtInstFMax)
5784 .Case("_Z3maxDv2_fS_", glsl::ExtInst::ExtInstFMax)
5785 .Case("_Z3maxDv3_fS_", glsl::ExtInst::ExtInstFMax)
5786 .Case("_Z3maxDv4_fS_", glsl::ExtInst::ExtInstFMax)
alan-baker49bb5fb2020-01-15 08:22:13 -05005787 .Case("_Z3maxDhDh", glsl::ExtInst::ExtInstFMax)
5788 .Case("_Z3maxDv2_DhS_", glsl::ExtInst::ExtInstFMax)
5789 .Case("_Z3maxDv3_DhS_", glsl::ExtInst::ExtInstFMax)
5790 .Case("_Z3maxDv4_DhS_", glsl::ExtInst::ExtInstFMax)
David Neto22f144c2017-06-12 14:26:21 -04005791 .StartsWith("_Z4fmax", glsl::ExtInst::ExtInstFMax)
alan-bakerb39c8262019-03-08 14:03:37 -05005792 .Case("_Z3mincc", glsl::ExtInst::ExtInstSMin)
5793 .Case("_Z3minDv2_cS_", glsl::ExtInst::ExtInstSMin)
5794 .Case("_Z3minDv3_cS_", glsl::ExtInst::ExtInstSMin)
5795 .Case("_Z3minDv4_cS_", glsl::ExtInst::ExtInstSMin)
5796 .Case("_Z3minhh", glsl::ExtInst::ExtInstUMin)
5797 .Case("_Z3minDv2_hS_", glsl::ExtInst::ExtInstUMin)
5798 .Case("_Z3minDv3_hS_", glsl::ExtInst::ExtInstUMin)
5799 .Case("_Z3minDv4_hS_", glsl::ExtInst::ExtInstUMin)
Kévin Petit9e1971c2018-11-09 14:17:18 +00005800 .Case("_Z3minss", glsl::ExtInst::ExtInstSMin)
5801 .Case("_Z3minDv2_sS_", glsl::ExtInst::ExtInstSMin)
5802 .Case("_Z3minDv3_sS_", glsl::ExtInst::ExtInstSMin)
5803 .Case("_Z3minDv4_sS_", glsl::ExtInst::ExtInstSMin)
5804 .Case("_Z3mintt", glsl::ExtInst::ExtInstUMin)
5805 .Case("_Z3minDv2_tS_", glsl::ExtInst::ExtInstUMin)
5806 .Case("_Z3minDv3_tS_", glsl::ExtInst::ExtInstUMin)
5807 .Case("_Z3minDv4_tS_", glsl::ExtInst::ExtInstUMin)
David Neto22f144c2017-06-12 14:26:21 -04005808 .Case("_Z3minii", glsl::ExtInst::ExtInstSMin)
5809 .Case("_Z3minDv2_iS_", glsl::ExtInst::ExtInstSMin)
5810 .Case("_Z3minDv3_iS_", glsl::ExtInst::ExtInstSMin)
5811 .Case("_Z3minDv4_iS_", glsl::ExtInst::ExtInstSMin)
5812 .Case("_Z3minjj", glsl::ExtInst::ExtInstUMin)
5813 .Case("_Z3minDv2_jS_", glsl::ExtInst::ExtInstUMin)
5814 .Case("_Z3minDv3_jS_", glsl::ExtInst::ExtInstUMin)
5815 .Case("_Z3minDv4_jS_", glsl::ExtInst::ExtInstUMin)
Kévin Petit9e1971c2018-11-09 14:17:18 +00005816 .Case("_Z3minll", glsl::ExtInst::ExtInstSMin)
5817 .Case("_Z3minDv2_lS_", glsl::ExtInst::ExtInstSMin)
5818 .Case("_Z3minDv3_lS_", glsl::ExtInst::ExtInstSMin)
5819 .Case("_Z3minDv4_lS_", glsl::ExtInst::ExtInstSMin)
5820 .Case("_Z3minmm", glsl::ExtInst::ExtInstUMin)
5821 .Case("_Z3minDv2_mS_", glsl::ExtInst::ExtInstUMin)
5822 .Case("_Z3minDv3_mS_", glsl::ExtInst::ExtInstUMin)
5823 .Case("_Z3minDv4_mS_", glsl::ExtInst::ExtInstUMin)
David Neto22f144c2017-06-12 14:26:21 -04005824 .Case("_Z3minff", glsl::ExtInst::ExtInstFMin)
5825 .Case("_Z3minDv2_fS_", glsl::ExtInst::ExtInstFMin)
5826 .Case("_Z3minDv3_fS_", glsl::ExtInst::ExtInstFMin)
5827 .Case("_Z3minDv4_fS_", glsl::ExtInst::ExtInstFMin)
alan-baker49bb5fb2020-01-15 08:22:13 -05005828 .Case("_Z3minDhDh", glsl::ExtInst::ExtInstFMin)
5829 .Case("_Z3minDv2_DhS_", glsl::ExtInst::ExtInstFMin)
5830 .Case("_Z3minDv3_DhS_", glsl::ExtInst::ExtInstFMin)
5831 .Case("_Z3minDv4_DhS_", glsl::ExtInst::ExtInstFMin)
David Neto22f144c2017-06-12 14:26:21 -04005832 .StartsWith("_Z4fmin", glsl::ExtInst::ExtInstFMin)
5833 .StartsWith("_Z7degrees", glsl::ExtInst::ExtInstDegrees)
5834 .StartsWith("_Z7radians", glsl::ExtInst::ExtInstRadians)
5835 .StartsWith("_Z3mix", glsl::ExtInst::ExtInstFMix)
5836 .StartsWith("_Z4acos", glsl::ExtInst::ExtInstAcos)
5837 .StartsWith("_Z5acosh", glsl::ExtInst::ExtInstAcosh)
5838 .StartsWith("_Z4asin", glsl::ExtInst::ExtInstAsin)
5839 .StartsWith("_Z5asinh", glsl::ExtInst::ExtInstAsinh)
5840 .StartsWith("_Z4atan", glsl::ExtInst::ExtInstAtan)
5841 .StartsWith("_Z5atan2", glsl::ExtInst::ExtInstAtan2)
5842 .StartsWith("_Z5atanh", glsl::ExtInst::ExtInstAtanh)
5843 .StartsWith("_Z4ceil", glsl::ExtInst::ExtInstCeil)
5844 .StartsWith("_Z3sin", glsl::ExtInst::ExtInstSin)
5845 .StartsWith("_Z4sinh", glsl::ExtInst::ExtInstSinh)
5846 .StartsWith("_Z8half_sin", glsl::ExtInst::ExtInstSin)
5847 .StartsWith("_Z10native_sin", glsl::ExtInst::ExtInstSin)
5848 .StartsWith("_Z3cos", glsl::ExtInst::ExtInstCos)
5849 .StartsWith("_Z4cosh", glsl::ExtInst::ExtInstCosh)
5850 .StartsWith("_Z8half_cos", glsl::ExtInst::ExtInstCos)
5851 .StartsWith("_Z10native_cos", glsl::ExtInst::ExtInstCos)
5852 .StartsWith("_Z3tan", glsl::ExtInst::ExtInstTan)
5853 .StartsWith("_Z4tanh", glsl::ExtInst::ExtInstTanh)
5854 .StartsWith("_Z8half_tan", glsl::ExtInst::ExtInstTan)
5855 .StartsWith("_Z10native_tan", glsl::ExtInst::ExtInstTan)
5856 .StartsWith("_Z3exp", glsl::ExtInst::ExtInstExp)
5857 .StartsWith("_Z8half_exp", glsl::ExtInst::ExtInstExp)
5858 .StartsWith("_Z10native_exp", glsl::ExtInst::ExtInstExp)
5859 .StartsWith("_Z4exp2", glsl::ExtInst::ExtInstExp2)
5860 .StartsWith("_Z9half_exp2", glsl::ExtInst::ExtInstExp2)
5861 .StartsWith("_Z11native_exp2", glsl::ExtInst::ExtInstExp2)
5862 .StartsWith("_Z3log", glsl::ExtInst::ExtInstLog)
5863 .StartsWith("_Z8half_log", glsl::ExtInst::ExtInstLog)
5864 .StartsWith("_Z10native_log", glsl::ExtInst::ExtInstLog)
5865 .StartsWith("_Z4log2", glsl::ExtInst::ExtInstLog2)
5866 .StartsWith("_Z9half_log2", glsl::ExtInst::ExtInstLog2)
5867 .StartsWith("_Z11native_log2", glsl::ExtInst::ExtInstLog2)
5868 .StartsWith("_Z4fabs", glsl::ExtInst::ExtInstFAbs)
kpet3458e942018-10-03 14:35:21 +01005869 .StartsWith("_Z3fma", glsl::ExtInst::ExtInstFma)
David Neto22f144c2017-06-12 14:26:21 -04005870 .StartsWith("_Z5floor", glsl::ExtInst::ExtInstFloor)
5871 .StartsWith("_Z5ldexp", glsl::ExtInst::ExtInstLdexp)
5872 .StartsWith("_Z3pow", glsl::ExtInst::ExtInstPow)
5873 .StartsWith("_Z4powr", glsl::ExtInst::ExtInstPow)
5874 .StartsWith("_Z9half_powr", glsl::ExtInst::ExtInstPow)
5875 .StartsWith("_Z11native_powr", glsl::ExtInst::ExtInstPow)
5876 .StartsWith("_Z5round", glsl::ExtInst::ExtInstRound)
5877 .StartsWith("_Z4sqrt", glsl::ExtInst::ExtInstSqrt)
5878 .StartsWith("_Z9half_sqrt", glsl::ExtInst::ExtInstSqrt)
5879 .StartsWith("_Z11native_sqrt", glsl::ExtInst::ExtInstSqrt)
5880 .StartsWith("_Z5rsqrt", glsl::ExtInst::ExtInstInverseSqrt)
5881 .StartsWith("_Z10half_rsqrt", glsl::ExtInst::ExtInstInverseSqrt)
5882 .StartsWith("_Z12native_rsqrt", glsl::ExtInst::ExtInstInverseSqrt)
5883 .StartsWith("_Z5trunc", glsl::ExtInst::ExtInstTrunc)
5884 .StartsWith("_Z5frexp", glsl::ExtInst::ExtInstFrexp)
5885 .StartsWith("_Z4sign", glsl::ExtInst::ExtInstFSign)
5886 .StartsWith("_Z6length", glsl::ExtInst::ExtInstLength)
Kévin Petit7d09cec2018-09-22 15:43:38 +01005887 .StartsWith("_Z11fast_length", glsl::ExtInst::ExtInstLength)
David Neto22f144c2017-06-12 14:26:21 -04005888 .StartsWith("_Z8distance", glsl::ExtInst::ExtInstDistance)
Kévin Petit7d09cec2018-09-22 15:43:38 +01005889 .StartsWith("_Z13fast_distance", glsl::ExtInst::ExtInstDistance)
David Netoe9a03512017-10-16 10:08:27 -04005890 .StartsWith("_Z4step", glsl::ExtInst::ExtInstStep)
kpet6fd2a262018-10-03 14:48:01 +01005891 .StartsWith("_Z10smoothstep", glsl::ExtInst::ExtInstSmoothStep)
David Neto22f144c2017-06-12 14:26:21 -04005892 .Case("_Z5crossDv3_fS_", glsl::ExtInst::ExtInstCross)
5893 .StartsWith("_Z9normalize", glsl::ExtInst::ExtInstNormalize)
Kévin Petit7d09cec2018-09-22 15:43:38 +01005894 .StartsWith("_Z14fast_normalize", glsl::ExtInst::ExtInstNormalize)
David Neto22f144c2017-06-12 14:26:21 -04005895 .StartsWith("llvm.fmuladd.", glsl::ExtInst::ExtInstFma)
5896 .Case("spirv.unpack.v2f16", glsl::ExtInst::ExtInstUnpackHalf2x16)
5897 .Case("spirv.pack.v2f16", glsl::ExtInst::ExtInstPackHalf2x16)
David Neto62653202017-10-16 19:05:18 -04005898 .Case("clspv.fract.f", glsl::ExtInst::ExtInstFract)
5899 .Case("clspv.fract.v2f", glsl::ExtInst::ExtInstFract)
5900 .Case("clspv.fract.v3f", glsl::ExtInst::ExtInstFract)
5901 .Case("clspv.fract.v4f", glsl::ExtInst::ExtInstFract)
David Neto3fbb4072017-10-16 11:28:14 -04005902 .Default(kGlslExtInstBad);
5903}
5904
5905glsl::ExtInst SPIRVProducerPass::getIndirectExtInstEnum(StringRef Name) {
5906 // Check indirect cases.
5907 return StringSwitch<glsl::ExtInst>(Name)
5908 .StartsWith("_Z3clz", glsl::ExtInst::ExtInstFindUMsb)
5909 // Use exact match on float arg because these need a multiply
5910 // of a constant of the right floating point type.
5911 .Case("_Z6acospif", glsl::ExtInst::ExtInstAcos)
5912 .Case("_Z6acospiDv2_f", glsl::ExtInst::ExtInstAcos)
5913 .Case("_Z6acospiDv3_f", glsl::ExtInst::ExtInstAcos)
5914 .Case("_Z6acospiDv4_f", glsl::ExtInst::ExtInstAcos)
5915 .Case("_Z6asinpif", glsl::ExtInst::ExtInstAsin)
5916 .Case("_Z6asinpiDv2_f", glsl::ExtInst::ExtInstAsin)
5917 .Case("_Z6asinpiDv3_f", glsl::ExtInst::ExtInstAsin)
5918 .Case("_Z6asinpiDv4_f", glsl::ExtInst::ExtInstAsin)
Kévin Petiteb9f90a2018-09-29 12:29:34 +01005919 .Case("_Z6atanpif", glsl::ExtInst::ExtInstAtan)
5920 .Case("_Z6atanpiDv2_f", glsl::ExtInst::ExtInstAtan)
5921 .Case("_Z6atanpiDv3_f", glsl::ExtInst::ExtInstAtan)
5922 .Case("_Z6atanpiDv4_f", glsl::ExtInst::ExtInstAtan)
David Neto3fbb4072017-10-16 11:28:14 -04005923 .Case("_Z7atan2piff", glsl::ExtInst::ExtInstAtan2)
5924 .Case("_Z7atan2piDv2_fS_", glsl::ExtInst::ExtInstAtan2)
5925 .Case("_Z7atan2piDv3_fS_", glsl::ExtInst::ExtInstAtan2)
5926 .Case("_Z7atan2piDv4_fS_", glsl::ExtInst::ExtInstAtan2)
5927 .Default(kGlslExtInstBad);
5928}
5929
alan-bakerb6b09dc2018-11-08 16:59:28 -05005930glsl::ExtInst
5931SPIRVProducerPass::getDirectOrIndirectExtInstEnum(StringRef Name) {
David Neto3fbb4072017-10-16 11:28:14 -04005932 auto direct = getExtInstEnum(Name);
5933 if (direct != kGlslExtInstBad)
5934 return direct;
5935 return getIndirectExtInstEnum(Name);
David Neto22f144c2017-06-12 14:26:21 -04005936}
5937
David Neto22f144c2017-06-12 14:26:21 -04005938void SPIRVProducerPass::WriteOneWord(uint32_t Word) {
David Neto0676e6f2017-07-11 18:47:44 -04005939 binaryOut->write(reinterpret_cast<const char *>(&Word), sizeof(uint32_t));
David Neto22f144c2017-06-12 14:26:21 -04005940}
5941
5942void SPIRVProducerPass::WriteResultID(SPIRVInstruction *Inst) {
5943 WriteOneWord(Inst->getResultID());
5944}
5945
5946void SPIRVProducerPass::WriteWordCountAndOpcode(SPIRVInstruction *Inst) {
5947 // High 16 bit : Word Count
5948 // Low 16 bit : Opcode
5949 uint32_t Word = Inst->getOpcode();
David Netoee2660d2018-06-28 16:31:29 -04005950 const uint32_t count = Inst->getWordCount();
5951 if (count > 65535) {
5952 errs() << "Word count limit of 65535 exceeded: " << count << "\n";
5953 llvm_unreachable("Word count too high");
5954 }
David Neto22f144c2017-06-12 14:26:21 -04005955 Word |= Inst->getWordCount() << 16;
5956 WriteOneWord(Word);
5957}
5958
David Netoef5ba2b2019-12-20 08:35:54 -05005959void SPIRVProducerPass::WriteOperand(const std::unique_ptr<SPIRVOperand> &Op) {
David Neto22f144c2017-06-12 14:26:21 -04005960 SPIRVOperandType OpTy = Op->getType();
5961 switch (OpTy) {
5962 default: {
5963 llvm_unreachable("Unsupported SPIRV Operand Type???");
5964 break;
5965 }
5966 case SPIRVOperandType::NUMBERID: {
5967 WriteOneWord(Op->getNumID());
5968 break;
5969 }
5970 case SPIRVOperandType::LITERAL_STRING: {
5971 std::string Str = Op->getLiteralStr();
5972 const char *Data = Str.c_str();
5973 size_t WordSize = Str.size() / 4;
5974 for (unsigned Idx = 0; Idx < WordSize; Idx++) {
5975 WriteOneWord(*reinterpret_cast<const uint32_t *>(&Data[4 * Idx]));
5976 }
5977
5978 uint32_t Remainder = Str.size() % 4;
5979 uint32_t LastWord = 0;
5980 if (Remainder) {
5981 for (unsigned Idx = 0; Idx < Remainder; Idx++) {
5982 LastWord |= Data[4 * WordSize + Idx] << 8 * Idx;
5983 }
5984 }
5985
5986 WriteOneWord(LastWord);
5987 break;
5988 }
5989 case SPIRVOperandType::LITERAL_INTEGER:
5990 case SPIRVOperandType::LITERAL_FLOAT: {
5991 auto LiteralNum = Op->getLiteralNum();
5992 // TODO: Handle LiteranNum carefully.
5993 for (auto Word : LiteralNum) {
5994 WriteOneWord(Word);
5995 }
5996 break;
5997 }
5998 }
5999}
6000
6001void SPIRVProducerPass::WriteSPIRVBinary() {
6002 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
6003
6004 for (auto Inst : SPIRVInstList) {
David Netoef5ba2b2019-12-20 08:35:54 -05006005 const auto &Ops = Inst->getOperands();
David Neto22f144c2017-06-12 14:26:21 -04006006 spv::Op Opcode = static_cast<spv::Op>(Inst->getOpcode());
6007
6008 switch (Opcode) {
6009 default: {
David Neto5c22a252018-03-15 16:07:41 -04006010 errs() << "Unsupported SPIR-V instruction opcode " << int(Opcode) << "\n";
David Neto22f144c2017-06-12 14:26:21 -04006011 llvm_unreachable("Unsupported SPIRV instruction");
6012 break;
6013 }
6014 case spv::OpCapability:
6015 case spv::OpExtension:
6016 case spv::OpMemoryModel:
6017 case spv::OpEntryPoint:
6018 case spv::OpExecutionMode:
6019 case spv::OpSource:
6020 case spv::OpDecorate:
6021 case spv::OpMemberDecorate:
6022 case spv::OpBranch:
6023 case spv::OpBranchConditional:
6024 case spv::OpSelectionMerge:
6025 case spv::OpLoopMerge:
6026 case spv::OpStore:
6027 case spv::OpImageWrite:
6028 case spv::OpReturnValue:
6029 case spv::OpControlBarrier:
6030 case spv::OpMemoryBarrier:
6031 case spv::OpReturn:
6032 case spv::OpFunctionEnd:
6033 case spv::OpCopyMemory: {
6034 WriteWordCountAndOpcode(Inst);
6035 for (uint32_t i = 0; i < Ops.size(); i++) {
6036 WriteOperand(Ops[i]);
6037 }
6038 break;
6039 }
6040 case spv::OpTypeBool:
6041 case spv::OpTypeVoid:
6042 case spv::OpTypeSampler:
6043 case spv::OpLabel:
6044 case spv::OpExtInstImport:
6045 case spv::OpTypePointer:
6046 case spv::OpTypeRuntimeArray:
6047 case spv::OpTypeStruct:
6048 case spv::OpTypeImage:
6049 case spv::OpTypeSampledImage:
6050 case spv::OpTypeInt:
6051 case spv::OpTypeFloat:
6052 case spv::OpTypeArray:
6053 case spv::OpTypeVector:
6054 case spv::OpTypeFunction: {
6055 WriteWordCountAndOpcode(Inst);
6056 WriteResultID(Inst);
6057 for (uint32_t i = 0; i < Ops.size(); i++) {
6058 WriteOperand(Ops[i]);
6059 }
6060 break;
6061 }
6062 case spv::OpFunction:
6063 case spv::OpFunctionParameter:
6064 case spv::OpAccessChain:
6065 case spv::OpPtrAccessChain:
6066 case spv::OpInBoundsAccessChain:
6067 case spv::OpUConvert:
6068 case spv::OpSConvert:
6069 case spv::OpConvertFToU:
6070 case spv::OpConvertFToS:
6071 case spv::OpConvertUToF:
6072 case spv::OpConvertSToF:
6073 case spv::OpFConvert:
6074 case spv::OpConvertPtrToU:
6075 case spv::OpConvertUToPtr:
6076 case spv::OpBitcast:
alan-bakerc9c55ae2019-12-02 16:01:27 -05006077 case spv::OpFNegate:
David Neto22f144c2017-06-12 14:26:21 -04006078 case spv::OpIAdd:
6079 case spv::OpFAdd:
6080 case spv::OpISub:
6081 case spv::OpFSub:
6082 case spv::OpIMul:
6083 case spv::OpFMul:
6084 case spv::OpUDiv:
6085 case spv::OpSDiv:
6086 case spv::OpFDiv:
6087 case spv::OpUMod:
6088 case spv::OpSRem:
6089 case spv::OpFRem:
Kévin Petit8a560882019-03-21 15:24:34 +00006090 case spv::OpUMulExtended:
6091 case spv::OpSMulExtended:
David Neto22f144c2017-06-12 14:26:21 -04006092 case spv::OpBitwiseOr:
6093 case spv::OpBitwiseXor:
6094 case spv::OpBitwiseAnd:
David Netoa394f392017-08-26 20:45:29 -04006095 case spv::OpNot:
David Neto22f144c2017-06-12 14:26:21 -04006096 case spv::OpShiftLeftLogical:
6097 case spv::OpShiftRightLogical:
6098 case spv::OpShiftRightArithmetic:
6099 case spv::OpBitCount:
David Netoab03f432017-11-03 17:00:44 -04006100 case spv::OpCompositeConstruct:
David Neto22f144c2017-06-12 14:26:21 -04006101 case spv::OpCompositeExtract:
6102 case spv::OpVectorExtractDynamic:
6103 case spv::OpCompositeInsert:
David Neto0a2f98d2017-09-15 19:38:40 -04006104 case spv::OpCopyObject:
David Neto22f144c2017-06-12 14:26:21 -04006105 case spv::OpVectorInsertDynamic:
6106 case spv::OpVectorShuffle:
6107 case spv::OpIEqual:
6108 case spv::OpINotEqual:
6109 case spv::OpUGreaterThan:
6110 case spv::OpUGreaterThanEqual:
6111 case spv::OpULessThan:
6112 case spv::OpULessThanEqual:
6113 case spv::OpSGreaterThan:
6114 case spv::OpSGreaterThanEqual:
6115 case spv::OpSLessThan:
6116 case spv::OpSLessThanEqual:
6117 case spv::OpFOrdEqual:
6118 case spv::OpFOrdGreaterThan:
6119 case spv::OpFOrdGreaterThanEqual:
6120 case spv::OpFOrdLessThan:
6121 case spv::OpFOrdLessThanEqual:
6122 case spv::OpFOrdNotEqual:
6123 case spv::OpFUnordEqual:
6124 case spv::OpFUnordGreaterThan:
6125 case spv::OpFUnordGreaterThanEqual:
6126 case spv::OpFUnordLessThan:
6127 case spv::OpFUnordLessThanEqual:
6128 case spv::OpFUnordNotEqual:
6129 case spv::OpExtInst:
6130 case spv::OpIsInf:
6131 case spv::OpIsNan:
6132 case spv::OpAny:
6133 case spv::OpAll:
6134 case spv::OpUndef:
6135 case spv::OpConstantNull:
6136 case spv::OpLogicalOr:
6137 case spv::OpLogicalAnd:
6138 case spv::OpLogicalNot:
6139 case spv::OpLogicalNotEqual:
6140 case spv::OpConstantComposite:
6141 case spv::OpSpecConstantComposite:
6142 case spv::OpConstantTrue:
6143 case spv::OpConstantFalse:
6144 case spv::OpConstant:
6145 case spv::OpSpecConstant:
6146 case spv::OpVariable:
6147 case spv::OpFunctionCall:
6148 case spv::OpSampledImage:
alan-baker75090e42020-02-20 11:21:04 -05006149 case spv::OpImageFetch:
David Neto22f144c2017-06-12 14:26:21 -04006150 case spv::OpImageSampleExplicitLod:
David Neto5c22a252018-03-15 16:07:41 -04006151 case spv::OpImageQuerySize:
alan-bakerce179f12019-12-06 19:02:22 -05006152 case spv::OpImageQuerySizeLod:
David Neto22f144c2017-06-12 14:26:21 -04006153 case spv::OpSelect:
6154 case spv::OpPhi:
6155 case spv::OpLoad:
6156 case spv::OpAtomicIAdd:
6157 case spv::OpAtomicISub:
6158 case spv::OpAtomicExchange:
6159 case spv::OpAtomicIIncrement:
6160 case spv::OpAtomicIDecrement:
6161 case spv::OpAtomicCompareExchange:
6162 case spv::OpAtomicUMin:
6163 case spv::OpAtomicSMin:
6164 case spv::OpAtomicUMax:
6165 case spv::OpAtomicSMax:
6166 case spv::OpAtomicAnd:
6167 case spv::OpAtomicOr:
6168 case spv::OpAtomicXor:
6169 case spv::OpDot: {
6170 WriteWordCountAndOpcode(Inst);
6171 WriteOperand(Ops[0]);
6172 WriteResultID(Inst);
6173 for (uint32_t i = 1; i < Ops.size(); i++) {
6174 WriteOperand(Ops[i]);
6175 }
6176 break;
6177 }
6178 }
6179 }
6180}
Alan Baker9bf93fb2018-08-28 16:59:26 -04006181
alan-bakerb6b09dc2018-11-08 16:59:28 -05006182bool SPIRVProducerPass::IsTypeNullable(const Type *type) const {
Alan Baker9bf93fb2018-08-28 16:59:26 -04006183 switch (type->getTypeID()) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05006184 case Type::HalfTyID:
6185 case Type::FloatTyID:
6186 case Type::DoubleTyID:
6187 case Type::IntegerTyID:
6188 case Type::VectorTyID:
6189 return true;
6190 case Type::PointerTyID: {
6191 const PointerType *pointer_type = cast<PointerType>(type);
6192 if (pointer_type->getPointerAddressSpace() !=
6193 AddressSpace::UniformConstant) {
6194 auto pointee_type = pointer_type->getPointerElementType();
6195 if (pointee_type->isStructTy() &&
6196 cast<StructType>(pointee_type)->isOpaque()) {
6197 // Images and samplers are not nullable.
6198 return false;
Alan Baker9bf93fb2018-08-28 16:59:26 -04006199 }
Alan Baker9bf93fb2018-08-28 16:59:26 -04006200 }
alan-bakerb6b09dc2018-11-08 16:59:28 -05006201 return true;
6202 }
6203 case Type::ArrayTyID:
alan-baker077517b2020-03-19 13:52:12 -04006204 return IsTypeNullable(cast<SequentialType>(type)->getElementType());
alan-bakerb6b09dc2018-11-08 16:59:28 -05006205 case Type::StructTyID: {
6206 const StructType *struct_type = cast<StructType>(type);
6207 // Images and samplers are not nullable.
6208 if (struct_type->isOpaque())
Alan Baker9bf93fb2018-08-28 16:59:26 -04006209 return false;
alan-bakerb6b09dc2018-11-08 16:59:28 -05006210 for (const auto element : struct_type->elements()) {
6211 if (!IsTypeNullable(element))
6212 return false;
6213 }
6214 return true;
6215 }
6216 default:
6217 return false;
Alan Baker9bf93fb2018-08-28 16:59:26 -04006218 }
6219}
Alan Bakerfcda9482018-10-02 17:09:59 -04006220
6221void SPIRVProducerPass::PopulateUBOTypeMaps(Module &module) {
6222 if (auto *offsets_md =
6223 module.getNamedMetadata(clspv::RemappedTypeOffsetMetadataName())) {
6224 // Metdata is stored as key-value pair operands. The first element of each
6225 // operand is the type and the second is a vector of offsets.
6226 for (const auto *operand : offsets_md->operands()) {
6227 const auto *pair = cast<MDTuple>(operand);
6228 auto *type =
6229 cast<ConstantAsMetadata>(pair->getOperand(0))->getValue()->getType();
6230 const auto *offset_vector = cast<MDTuple>(pair->getOperand(1));
6231 std::vector<uint32_t> offsets;
6232 for (const Metadata *offset_md : offset_vector->operands()) {
6233 const auto *constant_md = cast<ConstantAsMetadata>(offset_md);
alan-bakerb6b09dc2018-11-08 16:59:28 -05006234 offsets.push_back(static_cast<uint32_t>(
6235 cast<ConstantInt>(constant_md->getValue())->getZExtValue()));
Alan Bakerfcda9482018-10-02 17:09:59 -04006236 }
6237 RemappedUBOTypeOffsets.insert(std::make_pair(type, offsets));
6238 }
6239 }
6240
6241 if (auto *sizes_md =
6242 module.getNamedMetadata(clspv::RemappedTypeSizesMetadataName())) {
6243 // Metadata is stored as key-value pair operands. The first element of each
6244 // operand is the type and the second is a triple of sizes: type size in
6245 // bits, store size and alloc size.
6246 for (const auto *operand : sizes_md->operands()) {
6247 const auto *pair = cast<MDTuple>(operand);
6248 auto *type =
6249 cast<ConstantAsMetadata>(pair->getOperand(0))->getValue()->getType();
6250 const auto *size_triple = cast<MDTuple>(pair->getOperand(1));
6251 uint64_t type_size_in_bits =
6252 cast<ConstantInt>(
6253 cast<ConstantAsMetadata>(size_triple->getOperand(0))->getValue())
6254 ->getZExtValue();
6255 uint64_t type_store_size =
6256 cast<ConstantInt>(
6257 cast<ConstantAsMetadata>(size_triple->getOperand(1))->getValue())
6258 ->getZExtValue();
6259 uint64_t type_alloc_size =
6260 cast<ConstantInt>(
6261 cast<ConstantAsMetadata>(size_triple->getOperand(2))->getValue())
6262 ->getZExtValue();
6263 RemappedUBOTypeSizes.insert(std::make_pair(
6264 type, std::make_tuple(type_size_in_bits, type_store_size,
6265 type_alloc_size)));
6266 }
6267 }
6268}
6269
6270uint64_t SPIRVProducerPass::GetTypeSizeInBits(Type *type,
6271 const DataLayout &DL) {
6272 auto iter = RemappedUBOTypeSizes.find(type);
6273 if (iter != RemappedUBOTypeSizes.end()) {
6274 return std::get<0>(iter->second);
6275 }
6276
6277 return DL.getTypeSizeInBits(type);
6278}
6279
6280uint64_t SPIRVProducerPass::GetTypeStoreSize(Type *type, const DataLayout &DL) {
6281 auto iter = RemappedUBOTypeSizes.find(type);
6282 if (iter != RemappedUBOTypeSizes.end()) {
6283 return std::get<1>(iter->second);
6284 }
6285
6286 return DL.getTypeStoreSize(type);
6287}
6288
6289uint64_t SPIRVProducerPass::GetTypeAllocSize(Type *type, const DataLayout &DL) {
6290 auto iter = RemappedUBOTypeSizes.find(type);
6291 if (iter != RemappedUBOTypeSizes.end()) {
6292 return std::get<2>(iter->second);
6293 }
6294
6295 return DL.getTypeAllocSize(type);
6296}
alan-baker5b86ed72019-02-15 08:26:50 -05006297
Kévin Petitbbbda972020-03-03 19:16:31 +00006298uint32_t SPIRVProducerPass::GetExplicitLayoutStructMemberOffset(
6299 StructType *type, unsigned member, const DataLayout &DL) {
6300 const auto StructLayout = DL.getStructLayout(type);
6301 // Search for the correct offsets if this type was remapped.
6302 std::vector<uint32_t> *offsets = nullptr;
6303 auto iter = RemappedUBOTypeOffsets.find(type);
6304 if (iter != RemappedUBOTypeOffsets.end()) {
6305 offsets = &iter->second;
6306 }
6307 auto ByteOffset =
6308 static_cast<uint32_t>(StructLayout->getElementOffset(member));
6309 if (offsets) {
6310 ByteOffset = (*offsets)[member];
6311 }
6312
6313 return ByteOffset;
6314}
6315
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04006316void SPIRVProducerPass::setVariablePointersCapabilities(
6317 unsigned address_space) {
alan-baker5b86ed72019-02-15 08:26:50 -05006318 if (GetStorageClass(address_space) == spv::StorageClassStorageBuffer) {
6319 setVariablePointersStorageBuffer(true);
6320 } else {
6321 setVariablePointers(true);
6322 }
6323}
6324
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04006325Value *SPIRVProducerPass::GetBasePointer(Value *v) {
alan-baker5b86ed72019-02-15 08:26:50 -05006326 if (auto *gep = dyn_cast<GetElementPtrInst>(v)) {
6327 return GetBasePointer(gep->getPointerOperand());
6328 }
6329
6330 // Conservatively return |v|.
6331 return v;
6332}
6333
6334bool SPIRVProducerPass::sameResource(Value *lhs, Value *rhs) const {
6335 if (auto *lhs_call = dyn_cast<CallInst>(lhs)) {
6336 if (auto *rhs_call = dyn_cast<CallInst>(rhs)) {
6337 if (lhs_call->getCalledFunction()->getName().startswith(
6338 clspv::ResourceAccessorFunction()) &&
6339 rhs_call->getCalledFunction()->getName().startswith(
6340 clspv::ResourceAccessorFunction())) {
6341 // For resource accessors, match descriptor set and binding.
6342 if (lhs_call->getOperand(0) == rhs_call->getOperand(0) &&
6343 lhs_call->getOperand(1) == rhs_call->getOperand(1))
6344 return true;
6345 } else if (lhs_call->getCalledFunction()->getName().startswith(
6346 clspv::WorkgroupAccessorFunction()) &&
6347 rhs_call->getCalledFunction()->getName().startswith(
6348 clspv::WorkgroupAccessorFunction())) {
6349 // For workgroup resources, match spec id.
6350 if (lhs_call->getOperand(0) == rhs_call->getOperand(0))
6351 return true;
6352 }
6353 }
6354 }
6355
6356 return false;
6357}
6358
6359bool SPIRVProducerPass::selectFromSameObject(Instruction *inst) {
6360 assert(inst->getType()->isPointerTy());
6361 assert(GetStorageClass(inst->getType()->getPointerAddressSpace()) ==
6362 spv::StorageClassStorageBuffer);
6363 const bool hack_undef = clspv::Option::HackUndef();
6364 if (auto *select = dyn_cast<SelectInst>(inst)) {
6365 auto *true_base = GetBasePointer(select->getTrueValue());
6366 auto *false_base = GetBasePointer(select->getFalseValue());
6367
6368 if (true_base == false_base)
6369 return true;
6370
6371 // If either the true or false operand is a null, then we satisfy the same
6372 // object constraint.
6373 if (auto *true_cst = dyn_cast<Constant>(true_base)) {
6374 if (true_cst->isNullValue() || (hack_undef && isa<UndefValue>(true_base)))
6375 return true;
6376 }
6377
6378 if (auto *false_cst = dyn_cast<Constant>(false_base)) {
6379 if (false_cst->isNullValue() ||
6380 (hack_undef && isa<UndefValue>(false_base)))
6381 return true;
6382 }
6383
6384 if (sameResource(true_base, false_base))
6385 return true;
6386 } else if (auto *phi = dyn_cast<PHINode>(inst)) {
6387 Value *value = nullptr;
6388 bool ok = true;
6389 for (unsigned i = 0; ok && i != phi->getNumIncomingValues(); ++i) {
6390 auto *base = GetBasePointer(phi->getIncomingValue(i));
6391 // Null values satisfy the constraint of selecting of selecting from the
6392 // same object.
6393 if (!value) {
6394 if (auto *cst = dyn_cast<Constant>(base)) {
6395 if (!cst->isNullValue() && !(hack_undef && isa<UndefValue>(base)))
6396 value = base;
6397 } else {
6398 value = base;
6399 }
6400 } else if (base != value) {
6401 if (auto *base_cst = dyn_cast<Constant>(base)) {
6402 if (base_cst->isNullValue() || (hack_undef && isa<UndefValue>(base)))
6403 continue;
6404 }
6405
6406 if (sameResource(value, base))
6407 continue;
6408
6409 // Values don't represent the same base.
6410 ok = false;
6411 }
6412 }
6413
6414 return ok;
6415 }
6416
6417 // Conservatively return false.
6418 return false;
6419}
alan-bakere9308012019-03-15 10:25:13 -04006420
6421bool SPIRVProducerPass::CalledWithCoherentResource(Argument &Arg) {
6422 if (!Arg.getType()->isPointerTy() ||
6423 Arg.getType()->getPointerAddressSpace() != clspv::AddressSpace::Global) {
6424 // Only SSBOs need to be annotated as coherent.
6425 return false;
6426 }
6427
6428 DenseSet<Value *> visited;
6429 std::vector<Value *> stack;
6430 for (auto *U : Arg.getParent()->users()) {
6431 if (auto *call = dyn_cast<CallInst>(U)) {
6432 stack.push_back(call->getOperand(Arg.getArgNo()));
6433 }
6434 }
6435
6436 while (!stack.empty()) {
6437 Value *v = stack.back();
6438 stack.pop_back();
6439
6440 if (!visited.insert(v).second)
6441 continue;
6442
6443 auto *resource_call = dyn_cast<CallInst>(v);
6444 if (resource_call &&
6445 resource_call->getCalledFunction()->getName().startswith(
6446 clspv::ResourceAccessorFunction())) {
6447 // If this is a resource accessor function, check if the coherent operand
6448 // is set.
6449 const auto coherent =
6450 unsigned(dyn_cast<ConstantInt>(resource_call->getArgOperand(5))
6451 ->getZExtValue());
6452 if (coherent == 1)
6453 return true;
6454 } else if (auto *arg = dyn_cast<Argument>(v)) {
6455 // If this is a function argument, trace through its callers.
alan-bakere98f3f92019-04-08 15:06:36 -04006456 for (auto U : arg->getParent()->users()) {
alan-bakere9308012019-03-15 10:25:13 -04006457 if (auto *call = dyn_cast<CallInst>(U)) {
6458 stack.push_back(call->getOperand(arg->getArgNo()));
6459 }
6460 }
6461 } else if (auto *user = dyn_cast<User>(v)) {
6462 // If this is a user, traverse all operands that could lead to resource
6463 // variables.
6464 for (unsigned i = 0; i != user->getNumOperands(); ++i) {
6465 Value *operand = user->getOperand(i);
6466 if (operand->getType()->isPointerTy() &&
6467 operand->getType()->getPointerAddressSpace() ==
6468 clspv::AddressSpace::Global) {
6469 stack.push_back(operand);
6470 }
6471 }
6472 }
6473 }
6474
6475 // No coherent resource variables encountered.
6476 return false;
6477}
alan-baker06cad652019-12-03 17:56:47 -05006478
6479void SPIRVProducerPass::PopulateStructuredCFGMaps(Module &module) {
6480 // First, track loop merges and continues.
6481 DenseSet<BasicBlock *> LoopMergesAndContinues;
6482 for (auto &F : module) {
6483 if (F.isDeclaration())
6484 continue;
6485
6486 DominatorTree &DT = getAnalysis<DominatorTreeWrapperPass>(F).getDomTree();
6487 const LoopInfo &LI = getAnalysis<LoopInfoWrapperPass>(F).getLoopInfo();
6488 std::deque<BasicBlock *> order;
6489 DenseSet<BasicBlock *> visited;
6490 clspv::ComputeStructuredOrder(&*F.begin(), &DT, LI, &order, &visited);
6491
6492 for (auto BB : order) {
6493 auto terminator = BB->getTerminator();
6494 auto branch = dyn_cast<BranchInst>(terminator);
6495 if (LI.isLoopHeader(BB)) {
6496 auto L = LI.getLoopFor(BB);
6497 BasicBlock *ContinueBB = nullptr;
6498 BasicBlock *MergeBB = nullptr;
6499
6500 MergeBB = L->getExitBlock();
6501 if (!MergeBB) {
6502 // StructurizeCFG pass converts CFG into triangle shape and the cfg
6503 // has regions with single entry/exit. As a result, loop should not
6504 // have multiple exits.
6505 llvm_unreachable("Loop has multiple exits???");
6506 }
6507
6508 if (L->isLoopLatch(BB)) {
6509 ContinueBB = BB;
6510 } else {
6511 // From SPIR-V spec 2.11, Continue Target must dominate that back-edge
6512 // block.
6513 BasicBlock *Header = L->getHeader();
6514 BasicBlock *Latch = L->getLoopLatch();
6515 for (auto *loop_block : L->blocks()) {
6516 if (loop_block == Header) {
6517 continue;
6518 }
6519
6520 // Check whether block dominates block with back-edge.
6521 // The loop latch is the single block with a back-edge. If it was
6522 // possible, StructurizeCFG made the loop conform to this
6523 // requirement, otherwise |Latch| is a nullptr.
6524 if (DT.dominates(loop_block, Latch)) {
6525 ContinueBB = loop_block;
6526 }
6527 }
6528
6529 if (!ContinueBB) {
6530 llvm_unreachable("Wrong continue block from loop");
6531 }
6532 }
6533
6534 // Record the continue and merge blocks.
6535 MergeBlocks[BB] = MergeBB;
6536 ContinueBlocks[BB] = ContinueBB;
6537 LoopMergesAndContinues.insert(MergeBB);
6538 LoopMergesAndContinues.insert(ContinueBB);
6539 } else if (branch && branch->isConditional()) {
6540 auto L = LI.getLoopFor(BB);
6541 bool HasBackedge = false;
6542 while (L && !HasBackedge) {
6543 if (L->isLoopLatch(BB)) {
6544 HasBackedge = true;
6545 }
6546 L = L->getParentLoop();
6547 }
6548
6549 if (!HasBackedge) {
6550 // Only need a merge if the branch doesn't include a loop break or
6551 // continue.
6552 auto true_bb = branch->getSuccessor(0);
6553 auto false_bb = branch->getSuccessor(1);
6554 if (!LoopMergesAndContinues.count(true_bb) &&
6555 !LoopMergesAndContinues.count(false_bb)) {
6556 // StructurizeCFG pass already manipulated CFG. Just use false block
6557 // of branch instruction as merge block.
6558 MergeBlocks[BB] = false_bb;
6559 }
6560 }
6561 }
6562 }
6563 }
6564}