blob: b6d7007b2df189ae81adfd07295a87f8b3171476 [file] [log] [blame]
David Neto22f144c2017-06-12 14:26:21 -04001// Copyright 2017 The Clspv Authors. All rights reserved.
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7// http://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
15#ifdef _MSC_VER
16#pragma warning(push, 0)
17#endif
18
David Neto156783e2017-07-05 15:39:41 -040019#include <cassert>
David Neto257c3892018-04-11 13:19:45 -040020#include <cstring>
David Neto118188e2018-08-24 11:27:54 -040021#include <iomanip>
22#include <list>
David Neto862b7d82018-06-14 18:48:37 -040023#include <memory>
David Neto118188e2018-08-24 11:27:54 -040024#include <set>
25#include <sstream>
26#include <string>
27#include <tuple>
28#include <unordered_set>
29#include <utility>
David Neto862b7d82018-06-14 18:48:37 -040030
David Neto118188e2018-08-24 11:27:54 -040031#include "llvm/ADT/StringSwitch.h"
32#include "llvm/ADT/UniqueVector.h"
33#include "llvm/Analysis/LoopInfo.h"
34#include "llvm/IR/Constants.h"
35#include "llvm/IR/Dominators.h"
36#include "llvm/IR/Instructions.h"
37#include "llvm/IR/Metadata.h"
38#include "llvm/IR/Module.h"
alan-bakerf67468c2019-11-25 15:51:49 -050039#include "llvm/IR/ValueSymbolTable.h"
David Neto118188e2018-08-24 11:27:54 -040040#include "llvm/Pass.h"
41#include "llvm/Support/CommandLine.h"
Kévin Petitbbbda972020-03-03 19:16:31 +000042#include "llvm/Support/MathExtras.h"
David Neto118188e2018-08-24 11:27:54 -040043#include "llvm/Support/raw_ostream.h"
44#include "llvm/Transforms/Utils/Cloning.h"
David Neto22f144c2017-06-12 14:26:21 -040045
alan-bakere0902602020-03-23 08:43:40 -040046#include "spirv/unified1/spirv.hpp"
David Neto118188e2018-08-24 11:27:54 -040047
David Neto85082642018-03-24 06:55:20 -070048#include "clspv/AddressSpace.h"
alan-bakerf5e5f692018-11-27 08:33:24 -050049#include "clspv/DescriptorMap.h"
David Neto118188e2018-08-24 11:27:54 -040050#include "clspv/Option.h"
David Neto85082642018-03-24 06:55:20 -070051#include "clspv/spirv_c_strings.hpp"
52#include "clspv/spirv_glsl.hpp"
David Neto22f144c2017-06-12 14:26:21 -040053
David Neto4feb7a42017-10-06 17:29:42 -040054#include "ArgKind.h"
alan-bakerf67468c2019-11-25 15:51:49 -050055#include "Builtins.h"
alan-baker06cad652019-12-03 17:56:47 -050056#include "ComputeStructuredOrder.h"
David Neto85082642018-03-24 06:55:20 -070057#include "ConstantEmitter.h"
Alan Baker202c8c72018-08-13 13:47:44 -040058#include "Constants.h"
David Neto78383442018-06-15 20:31:56 -040059#include "DescriptorCounter.h"
alan-baker56f7aff2019-05-22 08:06:42 -040060#include "NormalizeGlobalVariable.h"
Diego Novilloa4c44fa2019-04-11 10:56:15 -040061#include "Passes.h"
alan-bakerce179f12019-12-06 19:02:22 -050062#include "Types.h"
David Neto48f56a42017-10-06 16:44:25 -040063
David Neto22f144c2017-06-12 14:26:21 -040064#if defined(_MSC_VER)
65#pragma warning(pop)
66#endif
67
68using namespace llvm;
69using namespace clspv;
SJW173c7e92020-03-16 08:44:47 -050070using namespace clspv::Builtins;
David Neto156783e2017-07-05 15:39:41 -040071using namespace mdconst;
David Neto22f144c2017-06-12 14:26:21 -040072
73namespace {
David Netocd8ca5f2017-10-02 23:34:11 -040074
David Neto862b7d82018-06-14 18:48:37 -040075cl::opt<bool> ShowResourceVars("show-rv", cl::init(false), cl::Hidden,
76 cl::desc("Show resource variable creation"));
77
alan-baker5ed87542020-03-23 11:05:22 -040078cl::opt<bool>
79 ShowProducerIR("show-producer-ir", cl::init(false), cl::ReallyHidden,
80 cl::desc("Dump the IR at the start of SPIRVProducer"));
81
David Neto862b7d82018-06-14 18:48:37 -040082// These hacks exist to help transition code generation algorithms
83// without making huge noise in detailed test output.
84const bool Hack_generate_runtime_array_stride_early = true;
85
David Neto3fbb4072017-10-16 11:28:14 -040086// The value of 1/pi. This value is from MSDN
87// https://msdn.microsoft.com/en-us/library/4hwaceh6.aspx
88const double kOneOverPi = 0.318309886183790671538;
89const glsl::ExtInst kGlslExtInstBad = static_cast<glsl::ExtInst>(0);
90
alan-bakerb6b09dc2018-11-08 16:59:28 -050091const char *kCompositeConstructFunctionPrefix = "clspv.composite_construct.";
David Netoab03f432017-11-03 17:00:44 -040092
David Neto22f144c2017-06-12 14:26:21 -040093enum SPIRVOperandType {
94 NUMBERID,
95 LITERAL_INTEGER,
96 LITERAL_STRING,
97 LITERAL_FLOAT
98};
99
100struct SPIRVOperand {
101 explicit SPIRVOperand(SPIRVOperandType Ty, uint32_t Num)
102 : Type(Ty), LiteralNum(1, Num) {}
103 explicit SPIRVOperand(SPIRVOperandType Ty, const char *Str)
104 : Type(Ty), LiteralStr(Str) {}
105 explicit SPIRVOperand(SPIRVOperandType Ty, StringRef Str)
106 : Type(Ty), LiteralStr(Str) {}
107 explicit SPIRVOperand(SPIRVOperandType Ty, ArrayRef<uint32_t> NumVec)
108 : Type(Ty), LiteralNum(NumVec.begin(), NumVec.end()) {}
109
James Price11010dc2019-12-19 13:53:09 -0500110 SPIRVOperandType getType() const { return Type; };
111 uint32_t getNumID() const { return LiteralNum[0]; };
112 std::string getLiteralStr() const { return LiteralStr; };
113 ArrayRef<uint32_t> getLiteralNum() const { return LiteralNum; };
David Neto22f144c2017-06-12 14:26:21 -0400114
David Neto87846742018-04-11 17:36:22 -0400115 uint32_t GetNumWords() const {
116 switch (Type) {
117 case NUMBERID:
118 return 1;
119 case LITERAL_INTEGER:
120 case LITERAL_FLOAT:
David Netoee2660d2018-06-28 16:31:29 -0400121 return uint32_t(LiteralNum.size());
David Neto87846742018-04-11 17:36:22 -0400122 case LITERAL_STRING:
123 // Account for the terminating null character.
David Netoee2660d2018-06-28 16:31:29 -0400124 return uint32_t((LiteralStr.size() + 4) / 4);
David Neto87846742018-04-11 17:36:22 -0400125 }
126 llvm_unreachable("Unhandled case in SPIRVOperand::GetNumWords()");
127 }
128
David Neto22f144c2017-06-12 14:26:21 -0400129private:
130 SPIRVOperandType Type;
131 std::string LiteralStr;
132 SmallVector<uint32_t, 4> LiteralNum;
133};
134
David Netoc6f3ab22018-04-06 18:02:31 -0400135class SPIRVOperandList {
136public:
David Netoef5ba2b2019-12-20 08:35:54 -0500137 typedef std::unique_ptr<SPIRVOperand> element_type;
138 typedef SmallVector<element_type, 8> container_type;
139 typedef container_type::iterator iterator;
David Netoc6f3ab22018-04-06 18:02:31 -0400140 SPIRVOperandList() {}
alan-bakerb6b09dc2018-11-08 16:59:28 -0500141 SPIRVOperandList(const SPIRVOperandList &other) = delete;
142 SPIRVOperandList(SPIRVOperandList &&other) {
David Netoc6f3ab22018-04-06 18:02:31 -0400143 contents_ = std::move(other.contents_);
144 other.contents_.clear();
145 }
David Netoef5ba2b2019-12-20 08:35:54 -0500146 iterator begin() { return contents_.begin(); }
147 iterator end() { return contents_.end(); }
148 operator ArrayRef<element_type>() { return contents_; }
149 void push_back(element_type op) { contents_.push_back(std::move(op)); }
alan-bakerb6b09dc2018-11-08 16:59:28 -0500150 void clear() { contents_.clear(); }
David Netoc6f3ab22018-04-06 18:02:31 -0400151 size_t size() const { return contents_.size(); }
James Price11010dc2019-12-19 13:53:09 -0500152 const SPIRVOperand *operator[](size_t i) { return contents_[i].get(); }
David Netoc6f3ab22018-04-06 18:02:31 -0400153
David Netoef5ba2b2019-12-20 08:35:54 -0500154 const container_type &getOperands() const { return contents_; }
David Neto87846742018-04-11 17:36:22 -0400155
David Netoc6f3ab22018-04-06 18:02:31 -0400156private:
David Netoef5ba2b2019-12-20 08:35:54 -0500157 container_type contents_;
David Netoc6f3ab22018-04-06 18:02:31 -0400158};
159
James Price11010dc2019-12-19 13:53:09 -0500160SPIRVOperandList &operator<<(SPIRVOperandList &list,
David Netoef5ba2b2019-12-20 08:35:54 -0500161 std::unique_ptr<SPIRVOperand> elem) {
162 list.push_back(std::move(elem));
David Netoc6f3ab22018-04-06 18:02:31 -0400163 return list;
164}
165
David Netoef5ba2b2019-12-20 08:35:54 -0500166std::unique_ptr<SPIRVOperand> MkNum(uint32_t num) {
167 return std::make_unique<SPIRVOperand>(LITERAL_INTEGER, num);
David Netoc6f3ab22018-04-06 18:02:31 -0400168}
David Netoef5ba2b2019-12-20 08:35:54 -0500169std::unique_ptr<SPIRVOperand> MkInteger(ArrayRef<uint32_t> num_vec) {
170 return std::make_unique<SPIRVOperand>(LITERAL_INTEGER, num_vec);
David Neto257c3892018-04-11 13:19:45 -0400171}
David Netoef5ba2b2019-12-20 08:35:54 -0500172std::unique_ptr<SPIRVOperand> MkFloat(ArrayRef<uint32_t> num_vec) {
173 return std::make_unique<SPIRVOperand>(LITERAL_FLOAT, num_vec);
David Neto257c3892018-04-11 13:19:45 -0400174}
David Netoef5ba2b2019-12-20 08:35:54 -0500175std::unique_ptr<SPIRVOperand> MkId(uint32_t id) {
176 return std::make_unique<SPIRVOperand>(NUMBERID, id);
James Price11010dc2019-12-19 13:53:09 -0500177}
David Netoef5ba2b2019-12-20 08:35:54 -0500178std::unique_ptr<SPIRVOperand> MkString(StringRef str) {
179 return std::make_unique<SPIRVOperand>(LITERAL_STRING, str);
David Neto257c3892018-04-11 13:19:45 -0400180}
David Netoc6f3ab22018-04-06 18:02:31 -0400181
David Neto22f144c2017-06-12 14:26:21 -0400182struct SPIRVInstruction {
David Netoef5ba2b2019-12-20 08:35:54 -0500183 // Creates an instruction with an opcode and no result ID, and with the given
184 // operands. This computes its own word count. Takes ownership of the
185 // operands and clears |Ops|.
186 SPIRVInstruction(spv::Op Opc, SPIRVOperandList &Ops)
187 : WordCount(1), Opcode(static_cast<uint16_t>(Opc)), ResultID(0) {
James Price11010dc2019-12-19 13:53:09 -0500188 for (auto &operand : Ops) {
David Netoee2660d2018-06-28 16:31:29 -0400189 WordCount += uint16_t(operand->GetNumWords());
David Neto87846742018-04-11 17:36:22 -0400190 }
David Netoef5ba2b2019-12-20 08:35:54 -0500191 Operands.reserve(Ops.size());
192 for (auto &ptr : Ops) {
193 Operands.emplace_back(std::move(ptr));
194 ptr.reset(nullptr);
David Neto87846742018-04-11 17:36:22 -0400195 }
David Netoef5ba2b2019-12-20 08:35:54 -0500196 Ops.clear();
197 }
198 // Creates an instruction with an opcode and a no-zero result ID, and
199 // with the given operands. This computes its own word count. Takes ownership
200 // of the operands and clears |Ops|.
201 SPIRVInstruction(spv::Op Opc, uint32_t ResID, SPIRVOperandList &Ops)
202 : WordCount(2), Opcode(static_cast<uint16_t>(Opc)), ResultID(ResID) {
James Price11010dc2019-12-19 13:53:09 -0500203 for (auto &operand : Ops) {
David Neto87846742018-04-11 17:36:22 -0400204 WordCount += operand->GetNumWords();
205 }
David Netoef5ba2b2019-12-20 08:35:54 -0500206 Operands.reserve(Ops.size());
207 for (auto &ptr : Ops) {
208 Operands.emplace_back(std::move(ptr));
209 ptr.reset(nullptr);
210 }
211 if (ResID == 0) {
212 llvm_unreachable("Result ID of 0 was provided");
213 }
214 Ops.clear();
David Neto87846742018-04-11 17:36:22 -0400215 }
David Neto22f144c2017-06-12 14:26:21 -0400216
David Netoef5ba2b2019-12-20 08:35:54 -0500217 // Creates an instruction with an opcode and no result ID, and with the single
218 // operand. This computes its own word count.
219 SPIRVInstruction(spv::Op Opc, SPIRVOperandList::element_type operand)
220 : WordCount(1), Opcode(static_cast<uint16_t>(Opc)), ResultID(0) {
221 WordCount += operand->GetNumWords();
222 Operands.emplace_back(std::move(operand));
223 operand.reset(nullptr);
224 }
225 // Creates an instruction with an opcode and a non-zero result ID, and
226 // with the single operand. This computes its own word count.
227 SPIRVInstruction(spv::Op Opc, uint32_t ResID,
228 SPIRVOperandList::element_type operand)
229 : WordCount(2), Opcode(static_cast<uint16_t>(Opc)), ResultID(ResID) {
230 WordCount += operand->GetNumWords();
231 if (ResID == 0) {
232 llvm_unreachable("Result ID of 0 was provided");
233 }
234 Operands.emplace_back(std::move(operand));
235 operand.reset(nullptr);
236 }
237 // Creates an instruction with an opcode and a no-zero result ID, and no
238 // operands.
239 SPIRVInstruction(spv::Op Opc, uint32_t ResID)
240 : WordCount(2), Opcode(static_cast<uint16_t>(Opc)), ResultID(ResID) {
241 if (ResID == 0) {
242 llvm_unreachable("Result ID of 0 was provided");
243 }
244 }
245 // Creates an instruction with an opcode, no result ID, no type ID, and no
246 // operands.
247 SPIRVInstruction(spv::Op Opc)
248 : WordCount(1), Opcode(static_cast<uint16_t>(Opc)), ResultID(0) {}
249
David Netoee2660d2018-06-28 16:31:29 -0400250 uint32_t getWordCount() const { return WordCount; }
David Neto22f144c2017-06-12 14:26:21 -0400251 uint16_t getOpcode() const { return Opcode; }
252 uint32_t getResultID() const { return ResultID; }
David Netoef5ba2b2019-12-20 08:35:54 -0500253 ArrayRef<std::unique_ptr<SPIRVOperand>> getOperands() const {
James Price11010dc2019-12-19 13:53:09 -0500254 return Operands;
255 }
David Neto22f144c2017-06-12 14:26:21 -0400256
257private:
David Netoee2660d2018-06-28 16:31:29 -0400258 uint32_t WordCount; // Check the 16-bit bound at code generation time.
David Neto22f144c2017-06-12 14:26:21 -0400259 uint16_t Opcode;
260 uint32_t ResultID;
David Netoef5ba2b2019-12-20 08:35:54 -0500261 SmallVector<std::unique_ptr<SPIRVOperand>, 4> Operands;
David Neto22f144c2017-06-12 14:26:21 -0400262};
263
264struct SPIRVProducerPass final : public ModulePass {
David Neto22f144c2017-06-12 14:26:21 -0400265 typedef DenseMap<Type *, uint32_t> TypeMapType;
266 typedef UniqueVector<Type *> TypeList;
267 typedef DenseMap<Value *, uint32_t> ValueMapType;
David Netofb9a7972017-08-25 17:08:24 -0400268 typedef UniqueVector<Value *> ValueList;
David Neto22f144c2017-06-12 14:26:21 -0400269 typedef std::vector<std::pair<Value *, uint32_t>> EntryPointVecType;
270 typedef std::list<SPIRVInstruction *> SPIRVInstructionList;
David Neto87846742018-04-11 17:36:22 -0400271 // A vector of tuples, each of which is:
272 // - the LLVM instruction that we will later generate SPIR-V code for
273 // - where the SPIR-V instruction should be inserted
274 // - the result ID of the SPIR-V instruction
David Neto22f144c2017-06-12 14:26:21 -0400275 typedef std::vector<
276 std::tuple<Value *, SPIRVInstructionList::iterator, uint32_t>>
277 DeferredInstVecType;
278 typedef DenseMap<FunctionType *, std::pair<FunctionType *, uint32_t>>
279 GlobalConstFuncMapType;
280
David Neto44795152017-07-13 15:45:28 -0400281 explicit SPIRVProducerPass(
alan-bakerf5e5f692018-11-27 08:33:24 -0500282 raw_pwrite_stream &out,
283 std::vector<clspv::version0::DescriptorMapEntry> *descriptor_map_entries,
alan-baker00e7a582019-06-07 12:54:21 -0400284 ArrayRef<std::pair<unsigned, std::string>> samplerMap,
David Neto44795152017-07-13 15:45:28 -0400285 bool outputCInitList)
David Netoc2c368d2017-06-30 16:50:17 -0400286 : ModulePass(ID), samplerMap(samplerMap), out(out),
David Neto0676e6f2017-07-11 18:47:44 -0400287 binaryTempOut(binaryTempUnderlyingVector), binaryOut(&out),
alan-baker00e7a582019-06-07 12:54:21 -0400288 descriptorMapEntries(descriptor_map_entries),
David Neto0676e6f2017-07-11 18:47:44 -0400289 outputCInitList(outputCInitList), patchBoundOffset(0), nextID(1),
alan-baker5b86ed72019-02-15 08:26:50 -0500290 OpExtInstImportID(0), HasVariablePointersStorageBuffer(false),
291 HasVariablePointers(false), SamplerTy(nullptr), WorkgroupSizeValueID(0),
Kévin Petit89a525c2019-06-15 08:13:07 +0100292 WorkgroupSizeVarID(0), max_local_spec_id_(0) {}
David Neto22f144c2017-06-12 14:26:21 -0400293
James Price11010dc2019-12-19 13:53:09 -0500294 virtual ~SPIRVProducerPass() {
295 for (auto *Inst : SPIRVInsts) {
296 delete Inst;
297 }
298 }
299
David Neto22f144c2017-06-12 14:26:21 -0400300 void getAnalysisUsage(AnalysisUsage &AU) const override {
301 AU.addRequired<DominatorTreeWrapperPass>();
302 AU.addRequired<LoopInfoWrapperPass>();
303 }
304
305 virtual bool runOnModule(Module &module) override;
306
307 // output the SPIR-V header block
308 void outputHeader();
309
310 // patch the SPIR-V header block
311 void patchHeader();
312
313 uint32_t lookupType(Type *Ty) {
314 if (Ty->isPointerTy() &&
315 (Ty->getPointerAddressSpace() != AddressSpace::UniformConstant)) {
316 auto PointeeTy = Ty->getPointerElementType();
317 if (PointeeTy->isStructTy() &&
318 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
319 Ty = PointeeTy;
320 }
321 }
322
David Neto862b7d82018-06-14 18:48:37 -0400323 auto where = TypeMap.find(Ty);
324 if (where == TypeMap.end()) {
325 if (Ty) {
326 errs() << "Unhandled type " << *Ty << "\n";
327 } else {
328 errs() << "Unhandled type (null)\n";
329 }
David Netoe439d702018-03-23 13:14:08 -0700330 llvm_unreachable("\nUnhandled type!");
David Neto22f144c2017-06-12 14:26:21 -0400331 }
332
David Neto862b7d82018-06-14 18:48:37 -0400333 return where->second;
David Neto22f144c2017-06-12 14:26:21 -0400334 }
335 TypeMapType &getImageTypeMap() { return ImageTypeMap; }
alan-bakerabd82722019-12-03 17:14:51 -0500336 TypeList &getImageTypeList() { return ImageTypeList; }
David Neto22f144c2017-06-12 14:26:21 -0400337 TypeList &getTypeList() { return Types; };
338 ValueList &getConstantList() { return Constants; };
339 ValueMapType &getValueMap() { return ValueMap; }
340 ValueMapType &getAllocatedValueMap() { return AllocatedValueMap; }
341 SPIRVInstructionList &getSPIRVInstList() { return SPIRVInsts; };
David Neto22f144c2017-06-12 14:26:21 -0400342 EntryPointVecType &getEntryPointVec() { return EntryPointVec; };
343 DeferredInstVecType &getDeferredInstVec() { return DeferredInstVec; };
344 ValueList &getEntryPointInterfacesVec() { return EntryPointInterfacesVec; };
345 uint32_t &getOpExtInstImportID() { return OpExtInstImportID; };
346 std::vector<uint32_t> &getBuiltinDimVec() { return BuiltinDimensionVec; };
SJW2c317da2020-03-23 07:39:13 -0500347
alan-baker5b86ed72019-02-15 08:26:50 -0500348 bool hasVariablePointersStorageBuffer() {
349 return HasVariablePointersStorageBuffer;
350 }
351 void setVariablePointersStorageBuffer(bool Val) {
352 HasVariablePointersStorageBuffer = Val;
353 }
Diego Novillo3cc8d7a2019-04-10 13:30:34 -0400354 bool hasVariablePointers() { return HasVariablePointers; };
David Neto22f144c2017-06-12 14:26:21 -0400355 void setVariablePointers(bool Val) { HasVariablePointers = Val; };
alan-bakerb6b09dc2018-11-08 16:59:28 -0500356 ArrayRef<std::pair<unsigned, std::string>> &getSamplerMap() {
357 return samplerMap;
358 }
David Neto22f144c2017-06-12 14:26:21 -0400359 GlobalConstFuncMapType &getGlobalConstFuncTypeMap() {
360 return GlobalConstFuncTypeMap;
361 }
362 SmallPtrSet<Value *, 16> &getGlobalConstArgSet() {
363 return GlobalConstArgumentSet;
364 }
alan-bakerb6b09dc2018-11-08 16:59:28 -0500365 TypeList &getTypesNeedingArrayStride() { return TypesNeedingArrayStride; }
David Neto22f144c2017-06-12 14:26:21 -0400366
David Netoc6f3ab22018-04-06 18:02:31 -0400367 void GenerateLLVMIRInfo(Module &M, const DataLayout &DL);
alan-bakerb6b09dc2018-11-08 16:59:28 -0500368 // Populate GlobalConstFuncTypeMap. Also, if module-scope __constant will
369 // *not* be converted to a storage buffer, replace each such global variable
370 // with one in the storage class expecgted by SPIR-V.
David Neto862b7d82018-06-14 18:48:37 -0400371 void FindGlobalConstVars(Module &M, const DataLayout &DL);
372 // Populate ResourceVarInfoList, FunctionToResourceVarsMap, and
373 // ModuleOrderedResourceVars.
374 void FindResourceVars(Module &M, const DataLayout &DL);
Alan Baker202c8c72018-08-13 13:47:44 -0400375 void FindWorkgroupVars(Module &M);
David Neto22f144c2017-06-12 14:26:21 -0400376 bool FindExtInst(Module &M);
377 void FindTypePerGlobalVar(GlobalVariable &GV);
378 void FindTypePerFunc(Function &F);
David Neto862b7d82018-06-14 18:48:37 -0400379 void FindTypesForSamplerMap(Module &M);
380 void FindTypesForResourceVars(Module &M);
alan-bakerb6b09dc2018-11-08 16:59:28 -0500381 // Inserts |Ty| and relevant sub-types into the |Types| member, indicating
382 // that |Ty| and its subtypes will need a corresponding SPIR-V type.
David Neto22f144c2017-06-12 14:26:21 -0400383 void FindType(Type *Ty);
384 void FindConstantPerGlobalVar(GlobalVariable &GV);
385 void FindConstantPerFunc(Function &F);
386 void FindConstant(Value *V);
387 void GenerateExtInstImport();
David Neto19a1bad2017-08-25 15:01:41 -0400388 // Generates instructions for SPIR-V types corresponding to the LLVM types
389 // saved in the |Types| member. A type follows its subtypes. IDs are
390 // allocated sequentially starting with the current value of nextID, and
391 // with a type following its subtypes. Also updates nextID to just beyond
392 // the last generated ID.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500393 void GenerateSPIRVTypes(LLVMContext &context, Module &module);
David Neto22f144c2017-06-12 14:26:21 -0400394 void GenerateSPIRVConstants();
David Neto5c22a252018-03-15 16:07:41 -0400395 void GenerateModuleInfo(Module &M);
Kévin Petitbbbda972020-03-03 19:16:31 +0000396 void GeneratePushConstantDescriptormapEntries(Module &M);
David Neto22f144c2017-06-12 14:26:21 -0400397 void GenerateGlobalVar(GlobalVariable &GV);
David Netoc6f3ab22018-04-06 18:02:31 -0400398 void GenerateWorkgroupVars();
David Neto862b7d82018-06-14 18:48:37 -0400399 // Generate descriptor map entries for resource variables associated with
400 // arguments to F.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500401 void GenerateDescriptorMapInfo(const DataLayout &DL, Function &F);
David Neto22f144c2017-06-12 14:26:21 -0400402 void GenerateSamplers(Module &M);
David Neto862b7d82018-06-14 18:48:37 -0400403 // Generate OpVariables for %clspv.resource.var.* calls.
404 void GenerateResourceVars(Module &M);
David Neto22f144c2017-06-12 14:26:21 -0400405 void GenerateFuncPrologue(Function &F);
406 void GenerateFuncBody(Function &F);
David Netob6e2e062018-04-25 10:32:06 -0400407 void GenerateEntryPointInitialStores();
David Neto22f144c2017-06-12 14:26:21 -0400408 spv::Op GetSPIRVCmpOpcode(CmpInst *CmpI);
409 spv::Op GetSPIRVCastOpcode(Instruction &I);
410 spv::Op GetSPIRVBinaryOpcode(Instruction &I);
411 void GenerateInstruction(Instruction &I);
412 void GenerateFuncEpilogue();
413 void HandleDeferredInstruction();
alan-bakerb6b09dc2018-11-08 16:59:28 -0500414 void HandleDeferredDecorations(const DataLayout &DL);
David Neto22f144c2017-06-12 14:26:21 -0400415 bool is4xi8vec(Type *Ty) const;
416 spv::StorageClass GetStorageClass(unsigned AddrSpace) const;
David Neto862b7d82018-06-14 18:48:37 -0400417 spv::StorageClass GetStorageClassForArgKind(clspv::ArgKind arg_kind) const;
David Neto22f144c2017-06-12 14:26:21 -0400418 spv::BuiltIn GetBuiltin(StringRef globalVarName) const;
David Neto3fbb4072017-10-16 11:28:14 -0400419 // Returns the GLSL extended instruction enum that the given function
420 // call maps to. If none, then returns the 0 value, i.e. GLSLstd4580Bad.
David Neto22f144c2017-06-12 14:26:21 -0400421 glsl::ExtInst getExtInstEnum(StringRef Name);
David Neto3fbb4072017-10-16 11:28:14 -0400422 // Returns the GLSL extended instruction enum indirectly used by the given
423 // function. That is, to implement the given function, we use an extended
424 // instruction plus one more instruction. If none, then returns the 0 value,
425 // i.e. GLSLstd4580Bad.
426 glsl::ExtInst getIndirectExtInstEnum(StringRef Name);
427 // Returns the single GLSL extended instruction used directly or
428 // indirectly by the given function call.
429 glsl::ExtInst getDirectOrIndirectExtInstEnum(StringRef Name);
David Neto22f144c2017-06-12 14:26:21 -0400430 void WriteOneWord(uint32_t Word);
431 void WriteResultID(SPIRVInstruction *Inst);
432 void WriteWordCountAndOpcode(SPIRVInstruction *Inst);
David Netoef5ba2b2019-12-20 08:35:54 -0500433 void WriteOperand(const std::unique_ptr<SPIRVOperand> &Op);
David Neto22f144c2017-06-12 14:26:21 -0400434 void WriteSPIRVBinary();
435
Alan Baker9bf93fb2018-08-28 16:59:26 -0400436 // Returns true if |type| is compatible with OpConstantNull.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500437 bool IsTypeNullable(const Type *type) const;
Alan Baker9bf93fb2018-08-28 16:59:26 -0400438
Alan Bakerfcda9482018-10-02 17:09:59 -0400439 // Populate UBO remapped type maps.
440 void PopulateUBOTypeMaps(Module &module);
441
alan-baker06cad652019-12-03 17:56:47 -0500442 // Populate the merge and continue block maps.
443 void PopulateStructuredCFGMaps(Module &module);
444
Alan Bakerfcda9482018-10-02 17:09:59 -0400445 // Wrapped methods of DataLayout accessors. If |type| was remapped for UBOs,
446 // uses the internal map, otherwise it falls back on the data layout.
447 uint64_t GetTypeSizeInBits(Type *type, const DataLayout &DL);
448 uint64_t GetTypeStoreSize(Type *type, const DataLayout &DL);
449 uint64_t GetTypeAllocSize(Type *type, const DataLayout &DL);
Kévin Petitbbbda972020-03-03 19:16:31 +0000450 uint32_t GetExplicitLayoutStructMemberOffset(StructType *type,
451 unsigned member,
452 const DataLayout &DL);
Alan Bakerfcda9482018-10-02 17:09:59 -0400453
alan-baker5b86ed72019-02-15 08:26:50 -0500454 // Returns the base pointer of |v|.
455 Value *GetBasePointer(Value *v);
456
457 // Sets |HasVariablePointersStorageBuffer| or |HasVariablePointers| base on
458 // |address_space|.
459 void setVariablePointersCapabilities(unsigned address_space);
460
461 // Returns true if |lhs| and |rhs| represent the same resource or workgroup
462 // variable.
463 bool sameResource(Value *lhs, Value *rhs) const;
464
465 // Returns true if |inst| is phi or select that selects from the same
466 // structure (or null).
467 bool selectFromSameObject(Instruction *inst);
468
alan-bakere9308012019-03-15 10:25:13 -0400469 // Returns true if |Arg| is called with a coherent resource.
470 bool CalledWithCoherentResource(Argument &Arg);
471
David Neto22f144c2017-06-12 14:26:21 -0400472private:
473 static char ID;
David Neto44795152017-07-13 15:45:28 -0400474 ArrayRef<std::pair<unsigned, std::string>> samplerMap;
David Neto22f144c2017-06-12 14:26:21 -0400475 raw_pwrite_stream &out;
David Neto0676e6f2017-07-11 18:47:44 -0400476
477 // TODO(dneto): Wouldn't it be better to always just emit a binary, and then
478 // convert to other formats on demand?
479
480 // When emitting a C initialization list, the WriteSPIRVBinary method
481 // will actually write its words to this vector via binaryTempOut.
482 SmallVector<char, 100> binaryTempUnderlyingVector;
483 raw_svector_ostream binaryTempOut;
484
485 // Binary output writes to this stream, which might be |out| or
486 // |binaryTempOut|. It's the latter when we really want to write a C
487 // initializer list.
Diego Novillo3cc8d7a2019-04-10 13:30:34 -0400488 raw_pwrite_stream *binaryOut;
alan-bakerf5e5f692018-11-27 08:33:24 -0500489 std::vector<version0::DescriptorMapEntry> *descriptorMapEntries;
David Neto0676e6f2017-07-11 18:47:44 -0400490 const bool outputCInitList; // If true, output look like {0x7023, ... , 5}
David Neto22f144c2017-06-12 14:26:21 -0400491 uint64_t patchBoundOffset;
492 uint32_t nextID;
493
alan-bakerf67468c2019-11-25 15:51:49 -0500494 // ID for OpTypeInt 32 1.
495 uint32_t int32ID = 0;
496 // ID for OpTypeVector %int 4.
497 uint32_t v4int32ID = 0;
498
David Neto19a1bad2017-08-25 15:01:41 -0400499 // Maps an LLVM Value pointer to the corresponding SPIR-V Id.
David Neto22f144c2017-06-12 14:26:21 -0400500 TypeMapType TypeMap;
David Neto19a1bad2017-08-25 15:01:41 -0400501 // Maps an LLVM image type to its SPIR-V ID.
David Neto22f144c2017-06-12 14:26:21 -0400502 TypeMapType ImageTypeMap;
alan-bakerabd82722019-12-03 17:14:51 -0500503 // A unique-vector of LLVM image types. This list is used to provide
504 // deterministic traversal of image types.
505 TypeList ImageTypeList;
David Neto19a1bad2017-08-25 15:01:41 -0400506 // A unique-vector of LLVM types that map to a SPIR-V type.
David Neto22f144c2017-06-12 14:26:21 -0400507 TypeList Types;
508 ValueList Constants;
David Neto19a1bad2017-08-25 15:01:41 -0400509 // Maps an LLVM Value pointer to the corresponding SPIR-V Id.
David Neto22f144c2017-06-12 14:26:21 -0400510 ValueMapType ValueMap;
511 ValueMapType AllocatedValueMap;
512 SPIRVInstructionList SPIRVInsts;
David Neto862b7d82018-06-14 18:48:37 -0400513
David Neto22f144c2017-06-12 14:26:21 -0400514 EntryPointVecType EntryPointVec;
515 DeferredInstVecType DeferredInstVec;
516 ValueList EntryPointInterfacesVec;
517 uint32_t OpExtInstImportID;
518 std::vector<uint32_t> BuiltinDimensionVec;
alan-baker5b86ed72019-02-15 08:26:50 -0500519 bool HasVariablePointersStorageBuffer;
David Neto22f144c2017-06-12 14:26:21 -0400520 bool HasVariablePointers;
521 Type *SamplerTy;
alan-baker09cb9802019-12-10 13:16:27 -0500522 DenseMap<unsigned, unsigned> SamplerLiteralToIDMap;
David Netoc77d9e22018-03-24 06:30:28 -0700523
524 // If a function F has a pointer-to-__constant parameter, then this variable
David Neto9ed8e2f2018-03-24 06:47:24 -0700525 // will map F's type to (G, index of the parameter), where in a first phase
526 // G is F's type. During FindTypePerFunc, G will be changed to F's type
527 // but replacing the pointer-to-constant parameter with
528 // pointer-to-ModuleScopePrivate.
David Netoc77d9e22018-03-24 06:30:28 -0700529 // TODO(dneto): This doesn't seem general enough? A function might have
530 // more than one such parameter.
David Neto22f144c2017-06-12 14:26:21 -0400531 GlobalConstFuncMapType GlobalConstFuncTypeMap;
532 SmallPtrSet<Value *, 16> GlobalConstArgumentSet;
David Neto1a1a0582017-07-07 12:01:44 -0400533 // An ordered set of pointer types of Base arguments to OpPtrAccessChain,
David Neto85082642018-03-24 06:55:20 -0700534 // or array types, and which point into transparent memory (StorageBuffer
535 // storage class). These will require an ArrayStride decoration.
David Neto1a1a0582017-07-07 12:01:44 -0400536 // See SPV_KHR_variable_pointers rev 13.
David Neto85082642018-03-24 06:55:20 -0700537 TypeList TypesNeedingArrayStride;
David Netoa60b00b2017-09-15 16:34:09 -0400538
539 // This is truly ugly, but works around what look like driver bugs.
540 // For get_local_size, an earlier part of the flow has created a module-scope
541 // variable in Private address space to hold the value for the workgroup
542 // size. Its intializer is a uint3 value marked as builtin WorkgroupSize.
543 // When this is present, save the IDs of the initializer value and variable
544 // in these two variables. We only ever do a vector load from it, and
545 // when we see one of those, substitute just the value of the intializer.
546 // This mimics what Glslang does, and that's what drivers are used to.
David Neto66cfe642018-03-24 06:13:56 -0700547 // TODO(dneto): Remove this once drivers are fixed.
David Netoa60b00b2017-09-15 16:34:09 -0400548 uint32_t WorkgroupSizeValueID;
549 uint32_t WorkgroupSizeVarID;
David Neto26aaf622017-10-23 18:11:53 -0400550
David Neto862b7d82018-06-14 18:48:37 -0400551 // Bookkeeping for mapping kernel arguments to resource variables.
552 struct ResourceVarInfo {
553 ResourceVarInfo(int index_arg, unsigned set_arg, unsigned binding_arg,
alan-bakere9308012019-03-15 10:25:13 -0400554 Function *fn, clspv::ArgKind arg_kind_arg, int coherent_arg)
David Neto862b7d82018-06-14 18:48:37 -0400555 : index(index_arg), descriptor_set(set_arg), binding(binding_arg),
alan-bakere9308012019-03-15 10:25:13 -0400556 var_fn(fn), arg_kind(arg_kind_arg), coherent(coherent_arg),
David Neto862b7d82018-06-14 18:48:37 -0400557 addr_space(fn->getReturnType()->getPointerAddressSpace()) {}
558 const int index; // Index into ResourceVarInfoList
559 const unsigned descriptor_set;
560 const unsigned binding;
561 Function *const var_fn; // The @clspv.resource.var.* function.
562 const clspv::ArgKind arg_kind;
alan-bakere9308012019-03-15 10:25:13 -0400563 const int coherent;
David Neto862b7d82018-06-14 18:48:37 -0400564 const unsigned addr_space; // The LLVM address space
565 // The SPIR-V ID of the OpVariable. Not populated at construction time.
566 uint32_t var_id = 0;
567 };
568 // A list of resource var info. Each one correponds to a module-scope
569 // resource variable we will have to create. Resource var indices are
570 // indices into this vector.
571 SmallVector<std::unique_ptr<ResourceVarInfo>, 8> ResourceVarInfoList;
572 // This is a vector of pointers of all the resource vars, but ordered by
573 // kernel function, and then by argument.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500574 UniqueVector<ResourceVarInfo *> ModuleOrderedResourceVars;
David Neto862b7d82018-06-14 18:48:37 -0400575 // Map a function to the ordered list of resource variables it uses, one for
576 // each argument. If an argument does not use a resource variable, it
577 // will have a null pointer entry.
578 using FunctionToResourceVarsMapType =
579 DenseMap<Function *, SmallVector<ResourceVarInfo *, 8>>;
580 FunctionToResourceVarsMapType FunctionToResourceVarsMap;
581
582 // What LLVM types map to SPIR-V types needing layout? These are the
583 // arrays and structures supporting storage buffers and uniform buffers.
584 TypeList TypesNeedingLayout;
585 // What LLVM struct types map to a SPIR-V struct type with Block decoration?
586 UniqueVector<StructType *> StructTypesNeedingBlock;
587 // For a call that represents a load from an opaque type (samplers, images),
588 // map it to the variable id it should load from.
589 DenseMap<CallInst *, uint32_t> ResourceVarDeferredLoadCalls;
David Neto85082642018-03-24 06:55:20 -0700590
Alan Baker202c8c72018-08-13 13:47:44 -0400591 // One larger than the maximum used SpecId for pointer-to-local arguments.
592 int max_local_spec_id_;
David Netoc6f3ab22018-04-06 18:02:31 -0400593 // An ordered list of the kernel arguments of type pointer-to-local.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500594 using LocalArgList = SmallVector<Argument *, 8>;
David Netoc6f3ab22018-04-06 18:02:31 -0400595 LocalArgList LocalArgs;
596 // Information about a pointer-to-local argument.
597 struct LocalArgInfo {
598 // The SPIR-V ID of the array variable.
599 uint32_t variable_id;
600 // The element type of the
alan-bakerb6b09dc2018-11-08 16:59:28 -0500601 Type *elem_type;
David Netoc6f3ab22018-04-06 18:02:31 -0400602 // The ID of the array type.
603 uint32_t array_size_id;
604 // The ID of the array type.
605 uint32_t array_type_id;
606 // The ID of the pointer to the array type.
607 uint32_t ptr_array_type_id;
David Netoc6f3ab22018-04-06 18:02:31 -0400608 // The specialization constant ID of the array size.
609 int spec_id;
610 };
Alan Baker202c8c72018-08-13 13:47:44 -0400611 // A mapping from Argument to its assigned SpecId.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500612 DenseMap<const Argument *, int> LocalArgSpecIds;
Alan Baker202c8c72018-08-13 13:47:44 -0400613 // A mapping from SpecId to its LocalArgInfo.
614 DenseMap<int, LocalArgInfo> LocalSpecIdInfoMap;
Alan Bakerfcda9482018-10-02 17:09:59 -0400615 // A mapping from a remapped type to its real offsets.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500616 DenseMap<Type *, std::vector<uint32_t>> RemappedUBOTypeOffsets;
Alan Bakerfcda9482018-10-02 17:09:59 -0400617 // A mapping from a remapped type to its real sizes.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500618 DenseMap<Type *, std::tuple<uint64_t, uint64_t, uint64_t>>
619 RemappedUBOTypeSizes;
alan-baker06cad652019-12-03 17:56:47 -0500620
621 // Maps basic block to its merge block.
622 DenseMap<BasicBlock *, BasicBlock *> MergeBlocks;
623 // Maps basic block to its continue block.
624 DenseMap<BasicBlock *, BasicBlock *> ContinueBlocks;
David Neto22f144c2017-06-12 14:26:21 -0400625};
626
627char SPIRVProducerPass::ID;
David Netoc6f3ab22018-04-06 18:02:31 -0400628
alan-bakerb6b09dc2018-11-08 16:59:28 -0500629} // namespace
David Neto22f144c2017-06-12 14:26:21 -0400630
631namespace clspv {
alan-bakerf5e5f692018-11-27 08:33:24 -0500632ModulePass *createSPIRVProducerPass(
633 raw_pwrite_stream &out,
634 std::vector<version0::DescriptorMapEntry> *descriptor_map_entries,
alan-baker00e7a582019-06-07 12:54:21 -0400635 ArrayRef<std::pair<unsigned, std::string>> samplerMap,
alan-bakerf5e5f692018-11-27 08:33:24 -0500636 bool outputCInitList) {
637 return new SPIRVProducerPass(out, descriptor_map_entries, samplerMap,
alan-baker00e7a582019-06-07 12:54:21 -0400638 outputCInitList);
David Neto22f144c2017-06-12 14:26:21 -0400639}
David Netoc2c368d2017-06-30 16:50:17 -0400640} // namespace clspv
David Neto22f144c2017-06-12 14:26:21 -0400641
642bool SPIRVProducerPass::runOnModule(Module &module) {
alan-baker5ed87542020-03-23 11:05:22 -0400643 if (ShowProducerIR) {
644 llvm::outs() << module << "\n";
645 }
David Neto0676e6f2017-07-11 18:47:44 -0400646 binaryOut = outputCInitList ? &binaryTempOut : &out;
647
Alan Bakerfcda9482018-10-02 17:09:59 -0400648 PopulateUBOTypeMaps(module);
alan-baker06cad652019-12-03 17:56:47 -0500649 PopulateStructuredCFGMaps(module);
Alan Bakerfcda9482018-10-02 17:09:59 -0400650
David Neto22f144c2017-06-12 14:26:21 -0400651 // SPIR-V always begins with its header information
652 outputHeader();
653
David Netoc6f3ab22018-04-06 18:02:31 -0400654 const DataLayout &DL = module.getDataLayout();
655
David Neto22f144c2017-06-12 14:26:21 -0400656 // Gather information from the LLVM IR that we require.
David Netoc6f3ab22018-04-06 18:02:31 -0400657 GenerateLLVMIRInfo(module, DL);
David Neto22f144c2017-06-12 14:26:21 -0400658
David Neto22f144c2017-06-12 14:26:21 -0400659 // Collect information on global variables too.
660 for (GlobalVariable &GV : module.globals()) {
661 // If the GV is one of our special __spirv_* variables, remove the
662 // initializer as it was only placed there to force LLVM to not throw the
663 // value away.
Kévin Petitbbbda972020-03-03 19:16:31 +0000664 if (GV.getName().startswith("__spirv_") ||
665 GV.getAddressSpace() == clspv::AddressSpace::PushConstant) {
David Neto22f144c2017-06-12 14:26:21 -0400666 GV.setInitializer(nullptr);
667 }
668
669 // Collect types' information from global variable.
670 FindTypePerGlobalVar(GV);
671
672 // Collect constant information from global variable.
673 FindConstantPerGlobalVar(GV);
674
675 // If the variable is an input, entry points need to know about it.
676 if (AddressSpace::Input == GV.getType()->getPointerAddressSpace()) {
David Netofb9a7972017-08-25 17:08:24 -0400677 getEntryPointInterfacesVec().insert(&GV);
David Neto22f144c2017-06-12 14:26:21 -0400678 }
679 }
680
681 // If there are extended instructions, generate OpExtInstImport.
682 if (FindExtInst(module)) {
683 GenerateExtInstImport();
684 }
685
686 // Generate SPIRV instructions for types.
Alan Bakerfcda9482018-10-02 17:09:59 -0400687 GenerateSPIRVTypes(module.getContext(), module);
David Neto22f144c2017-06-12 14:26:21 -0400688
689 // Generate SPIRV constants.
690 GenerateSPIRVConstants();
691
alan-baker09cb9802019-12-10 13:16:27 -0500692 // Generate literal samplers if necessary.
693 GenerateSamplers(module);
David Neto22f144c2017-06-12 14:26:21 -0400694
Kévin Petitbbbda972020-03-03 19:16:31 +0000695 // Generate descriptor map entries for all push constants
696 GeneratePushConstantDescriptormapEntries(module);
697
David Neto22f144c2017-06-12 14:26:21 -0400698 // Generate SPIRV variables.
699 for (GlobalVariable &GV : module.globals()) {
700 GenerateGlobalVar(GV);
701 }
David Neto862b7d82018-06-14 18:48:37 -0400702 GenerateResourceVars(module);
David Netoc6f3ab22018-04-06 18:02:31 -0400703 GenerateWorkgroupVars();
David Neto22f144c2017-06-12 14:26:21 -0400704
705 // Generate SPIRV instructions for each function.
706 for (Function &F : module) {
707 if (F.isDeclaration()) {
708 continue;
709 }
710
David Neto862b7d82018-06-14 18:48:37 -0400711 GenerateDescriptorMapInfo(DL, F);
712
David Neto22f144c2017-06-12 14:26:21 -0400713 // Generate Function Prologue.
714 GenerateFuncPrologue(F);
715
716 // Generate SPIRV instructions for function body.
717 GenerateFuncBody(F);
718
719 // Generate Function Epilogue.
720 GenerateFuncEpilogue();
721 }
722
723 HandleDeferredInstruction();
David Neto1a1a0582017-07-07 12:01:44 -0400724 HandleDeferredDecorations(DL);
David Neto22f144c2017-06-12 14:26:21 -0400725
726 // Generate SPIRV module information.
David Neto5c22a252018-03-15 16:07:41 -0400727 GenerateModuleInfo(module);
David Neto22f144c2017-06-12 14:26:21 -0400728
alan-baker00e7a582019-06-07 12:54:21 -0400729 WriteSPIRVBinary();
David Neto22f144c2017-06-12 14:26:21 -0400730
731 // We need to patch the SPIR-V header to set bound correctly.
732 patchHeader();
David Neto0676e6f2017-07-11 18:47:44 -0400733
734 if (outputCInitList) {
735 bool first = true;
David Neto0676e6f2017-07-11 18:47:44 -0400736 std::ostringstream os;
737
David Neto57fb0b92017-08-04 15:35:09 -0400738 auto emit_word = [&os, &first](uint32_t word) {
David Neto0676e6f2017-07-11 18:47:44 -0400739 if (!first)
David Neto57fb0b92017-08-04 15:35:09 -0400740 os << ",\n";
741 os << word;
David Neto0676e6f2017-07-11 18:47:44 -0400742 first = false;
743 };
744
745 os << "{";
David Neto57fb0b92017-08-04 15:35:09 -0400746 const std::string str(binaryTempOut.str());
747 for (unsigned i = 0; i < str.size(); i += 4) {
748 const uint32_t a = static_cast<unsigned char>(str[i]);
749 const uint32_t b = static_cast<unsigned char>(str[i + 1]);
750 const uint32_t c = static_cast<unsigned char>(str[i + 2]);
751 const uint32_t d = static_cast<unsigned char>(str[i + 3]);
752 emit_word(a | (b << 8) | (c << 16) | (d << 24));
David Neto0676e6f2017-07-11 18:47:44 -0400753 }
754 os << "}\n";
755 out << os.str();
756 }
757
David Neto22f144c2017-06-12 14:26:21 -0400758 return false;
759}
760
761void SPIRVProducerPass::outputHeader() {
alan-baker00e7a582019-06-07 12:54:21 -0400762 binaryOut->write(reinterpret_cast<const char *>(&spv::MagicNumber),
763 sizeof(spv::MagicNumber));
alan-bakere0902602020-03-23 08:43:40 -0400764 const uint32_t spv_version = 0x10000; // SPIR-V 1.0
765 binaryOut->write(reinterpret_cast<const char *>(&spv_version),
766 sizeof(spv_version));
David Neto22f144c2017-06-12 14:26:21 -0400767
alan-baker0c18ab02019-06-12 10:23:21 -0400768 // use Google's vendor ID
769 const uint32_t vendor = 21 << 16;
alan-baker00e7a582019-06-07 12:54:21 -0400770 binaryOut->write(reinterpret_cast<const char *>(&vendor), sizeof(vendor));
David Neto22f144c2017-06-12 14:26:21 -0400771
alan-baker00e7a582019-06-07 12:54:21 -0400772 // we record where we need to come back to and patch in the bound value
773 patchBoundOffset = binaryOut->tell();
David Neto22f144c2017-06-12 14:26:21 -0400774
alan-baker00e7a582019-06-07 12:54:21 -0400775 // output a bad bound for now
776 binaryOut->write(reinterpret_cast<const char *>(&nextID), sizeof(nextID));
David Neto22f144c2017-06-12 14:26:21 -0400777
alan-baker00e7a582019-06-07 12:54:21 -0400778 // output the schema (reserved for use and must be 0)
779 const uint32_t schema = 0;
780 binaryOut->write(reinterpret_cast<const char *>(&schema), sizeof(schema));
David Neto22f144c2017-06-12 14:26:21 -0400781}
782
783void SPIRVProducerPass::patchHeader() {
alan-baker00e7a582019-06-07 12:54:21 -0400784 // for a binary we just write the value of nextID over bound
785 binaryOut->pwrite(reinterpret_cast<char *>(&nextID), sizeof(nextID),
786 patchBoundOffset);
David Neto22f144c2017-06-12 14:26:21 -0400787}
788
David Netoc6f3ab22018-04-06 18:02:31 -0400789void SPIRVProducerPass::GenerateLLVMIRInfo(Module &M, const DataLayout &DL) {
David Neto22f144c2017-06-12 14:26:21 -0400790 // This function generates LLVM IR for function such as global variable for
791 // argument, constant and pointer type for argument access. These information
792 // is artificial one because we need Vulkan SPIR-V output. This function is
793 // executed ahead of FindType and FindConstant.
David Neto22f144c2017-06-12 14:26:21 -0400794 LLVMContext &Context = M.getContext();
795
David Neto862b7d82018-06-14 18:48:37 -0400796 FindGlobalConstVars(M, DL);
David Neto5c22a252018-03-15 16:07:41 -0400797
David Neto862b7d82018-06-14 18:48:37 -0400798 FindResourceVars(M, DL);
David Neto22f144c2017-06-12 14:26:21 -0400799
800 bool HasWorkGroupBuiltin = false;
801 for (GlobalVariable &GV : M.globals()) {
802 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
803 if (spv::BuiltInWorkgroupSize == BuiltinType) {
804 HasWorkGroupBuiltin = true;
805 }
806 }
807
David Neto862b7d82018-06-14 18:48:37 -0400808 FindTypesForSamplerMap(M);
809 FindTypesForResourceVars(M);
Alan Baker202c8c72018-08-13 13:47:44 -0400810 FindWorkgroupVars(M);
David Neto22f144c2017-06-12 14:26:21 -0400811
812 for (Function &F : M) {
Kévin Petitabef4522019-03-27 13:08:01 +0000813 if (F.isDeclaration()) {
David Neto22f144c2017-06-12 14:26:21 -0400814 continue;
815 }
816
817 for (BasicBlock &BB : F) {
818 for (Instruction &I : BB) {
819 if (I.getOpcode() == Instruction::ZExt ||
820 I.getOpcode() == Instruction::SExt ||
821 I.getOpcode() == Instruction::UIToFP) {
822 // If there is zext with i1 type, it will be changed to OpSelect. The
823 // OpSelect needs constant 0 and 1 so the constants are added here.
824
825 auto OpTy = I.getOperand(0)->getType();
826
Kévin Petit24272b62018-10-18 19:16:12 +0000827 if (OpTy->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -0400828 if (I.getOpcode() == Instruction::ZExt) {
David Neto22f144c2017-06-12 14:26:21 -0400829 FindConstant(Constant::getNullValue(I.getType()));
Kévin Petit7bfb8992019-02-26 13:45:08 +0000830 FindConstant(ConstantInt::get(I.getType(), 1));
David Neto22f144c2017-06-12 14:26:21 -0400831 } else if (I.getOpcode() == Instruction::SExt) {
David Neto22f144c2017-06-12 14:26:21 -0400832 FindConstant(Constant::getNullValue(I.getType()));
Kévin Petit7bfb8992019-02-26 13:45:08 +0000833 FindConstant(ConstantInt::getSigned(I.getType(), -1));
David Neto22f144c2017-06-12 14:26:21 -0400834 } else {
835 FindConstant(ConstantFP::get(Context, APFloat(0.0f)));
836 FindConstant(ConstantFP::get(Context, APFloat(1.0f)));
837 }
838 }
839 } else if (CallInst *Call = dyn_cast<CallInst>(&I)) {
David Neto862b7d82018-06-14 18:48:37 -0400840 StringRef callee_name = Call->getCalledFunction()->getName();
David Neto22f144c2017-06-12 14:26:21 -0400841
842 // Handle image type specially.
SJW173c7e92020-03-16 08:44:47 -0500843 if (IsImageBuiltin(callee_name)) {
David Neto22f144c2017-06-12 14:26:21 -0400844 TypeMapType &OpImageTypeMap = getImageTypeMap();
845 Type *ImageTy =
846 Call->getArgOperand(0)->getType()->getPointerElementType();
847 OpImageTypeMap[ImageTy] = 0;
alan-bakerabd82722019-12-03 17:14:51 -0500848 getImageTypeList().insert(ImageTy);
alan-baker75090e42020-02-20 11:21:04 -0500849 }
David Neto22f144c2017-06-12 14:26:21 -0400850
SJW173c7e92020-03-16 08:44:47 -0500851 if (IsSampledImageRead(callee_name)) {
alan-bakerf67468c2019-11-25 15:51:49 -0500852 // All sampled reads need a floating point 0 for the Lod operand.
David Neto22f144c2017-06-12 14:26:21 -0400853 FindConstant(ConstantFP::get(Context, APFloat(0.0f)));
SJW2c317da2020-03-23 07:39:13 -0500854 } else if (IsUnsampledImageRead(callee_name)) {
alan-baker75090e42020-02-20 11:21:04 -0500855 // All unsampled reads need an integer 0 for the Lod operand.
856 FindConstant(ConstantInt::get(Context, APInt(32, 0)));
SJW2c317da2020-03-23 07:39:13 -0500857 } else if (IsImageQuery(callee_name)) {
alan-bakerce179f12019-12-06 19:02:22 -0500858 Type *ImageTy = Call->getOperand(0)->getType();
859 const uint32_t dim = ImageDimensionality(ImageTy);
alan-baker7150a1d2020-02-25 08:31:06 -0500860 uint32_t components =
861 dim + (clspv::IsArrayImageType(ImageTy) ? 1 : 0);
alan-bakerce179f12019-12-06 19:02:22 -0500862 if (components > 1) {
863 // OpImageQuerySize* return |components| components.
864 FindType(VectorType::get(Type::getInt32Ty(Context), components));
865 if (dim == 3 && IsGetImageDim(callee_name)) {
866 // get_image_dim for 3D images returns an int4.
867 FindType(
868 VectorType::get(Type::getInt32Ty(Context), components + 1));
869 }
870 }
871
SJW173c7e92020-03-16 08:44:47 -0500872 if (IsSampledImageType(ImageTy)) {
alan-bakerce179f12019-12-06 19:02:22 -0500873 // All sampled image queries need a integer 0 for the Lod
874 // operand.
875 FindConstant(ConstantInt::get(Context, APInt(32, 0)));
876 }
David Neto5c22a252018-03-15 16:07:41 -0400877 }
David Neto22f144c2017-06-12 14:26:21 -0400878 }
879 }
880 }
881
Kévin Petitabef4522019-03-27 13:08:01 +0000882 // More things to do on kernel functions
883 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
884 if (const MDNode *MD =
885 dyn_cast<Function>(&F)->getMetadata("reqd_work_group_size")) {
886 // We generate constants if the WorkgroupSize builtin is being used.
887 if (HasWorkGroupBuiltin) {
888 // Collect constant information for work group size.
889 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(0)));
890 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(1)));
891 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(2)));
David Neto22f144c2017-06-12 14:26:21 -0400892 }
893 }
894 }
895
alan-bakerf67468c2019-11-25 15:51:49 -0500896 // TODO(alan-baker): make this better.
alan-bakerf906d2b2019-12-10 11:26:23 -0500897 if (M.getTypeByName("opencl.image1d_ro_t.float") ||
898 M.getTypeByName("opencl.image1d_ro_t.float.sampled") ||
899 M.getTypeByName("opencl.image1d_wo_t.float") ||
900 M.getTypeByName("opencl.image2d_ro_t.float") ||
alan-bakerf67468c2019-11-25 15:51:49 -0500901 M.getTypeByName("opencl.image2d_ro_t.float.sampled") ||
902 M.getTypeByName("opencl.image2d_wo_t.float") ||
903 M.getTypeByName("opencl.image3d_ro_t.float") ||
904 M.getTypeByName("opencl.image3d_ro_t.float.sampled") ||
alan-baker7150a1d2020-02-25 08:31:06 -0500905 M.getTypeByName("opencl.image3d_wo_t.float") ||
906 M.getTypeByName("opencl.image1d_array_ro_t.float") ||
907 M.getTypeByName("opencl.image1d_array_ro_t.float.sampled") ||
908 M.getTypeByName("opencl.image1d_array_wo_t.float") ||
909 M.getTypeByName("opencl.image2d_array_ro_t.float") ||
910 M.getTypeByName("opencl.image2d_array_ro_t.float.sampled") ||
911 M.getTypeByName("opencl.image2d_array_wo_t.float")) {
alan-bakerf67468c2019-11-25 15:51:49 -0500912 FindType(Type::getFloatTy(Context));
alan-bakerf906d2b2019-12-10 11:26:23 -0500913 } else if (M.getTypeByName("opencl.image1d_ro_t.uint") ||
914 M.getTypeByName("opencl.image1d_ro_t.uint.sampled") ||
915 M.getTypeByName("opencl.image1d_wo_t.uint") ||
916 M.getTypeByName("opencl.image2d_ro_t.uint") ||
alan-bakerf67468c2019-11-25 15:51:49 -0500917 M.getTypeByName("opencl.image2d_ro_t.uint.sampled") ||
918 M.getTypeByName("opencl.image2d_wo_t.uint") ||
919 M.getTypeByName("opencl.image3d_ro_t.uint") ||
920 M.getTypeByName("opencl.image3d_ro_t.uint.sampled") ||
alan-baker7150a1d2020-02-25 08:31:06 -0500921 M.getTypeByName("opencl.image3d_wo_t.uint") ||
922 M.getTypeByName("opencl.image1d_array_ro_t.uint") ||
923 M.getTypeByName("opencl.image1d_array_ro_t.uint.sampled") ||
924 M.getTypeByName("opencl.image1d_array_wo_t.uint") ||
925 M.getTypeByName("opencl.image2d_array_ro_t.uint") ||
926 M.getTypeByName("opencl.image2d_array_ro_t.uint.sampled") ||
927 M.getTypeByName("opencl.image2d_array_wo_t.uint")) {
alan-bakerf67468c2019-11-25 15:51:49 -0500928 FindType(Type::getInt32Ty(Context));
alan-bakerf906d2b2019-12-10 11:26:23 -0500929 } else if (M.getTypeByName("opencl.image1d_ro_t.int") ||
930 M.getTypeByName("opencl.image1d_ro_t.int.sampled") ||
931 M.getTypeByName("opencl.image1d_wo_t.int") ||
932 M.getTypeByName("opencl.image2d_ro_t.int") ||
alan-bakerf67468c2019-11-25 15:51:49 -0500933 M.getTypeByName("opencl.image2d_ro_t.int.sampled") ||
934 M.getTypeByName("opencl.image2d_wo_t.int") ||
935 M.getTypeByName("opencl.image3d_ro_t.int") ||
936 M.getTypeByName("opencl.image3d_ro_t.int.sampled") ||
alan-baker7150a1d2020-02-25 08:31:06 -0500937 M.getTypeByName("opencl.image3d_wo_t.int") ||
938 M.getTypeByName("opencl.image1d_array_ro_t.int") ||
939 M.getTypeByName("opencl.image1d_array_ro_t.int.sampled") ||
940 M.getTypeByName("opencl.image1d_array_wo_t.int") ||
941 M.getTypeByName("opencl.image2d_array_ro_t.int") ||
942 M.getTypeByName("opencl.image2d_array_ro_t.int.sampled") ||
943 M.getTypeByName("opencl.image2d_array_wo_t.int")) {
alan-bakerf67468c2019-11-25 15:51:49 -0500944 // Nothing for now...
945 } else {
946 // This was likely an UndefValue.
David Neto22f144c2017-06-12 14:26:21 -0400947 FindType(Type::getFloatTy(Context));
948 }
949
950 // Collect types' information from function.
951 FindTypePerFunc(F);
952
953 // Collect constant information from function.
954 FindConstantPerFunc(F);
955 }
956}
957
David Neto862b7d82018-06-14 18:48:37 -0400958void SPIRVProducerPass::FindGlobalConstVars(Module &M, const DataLayout &DL) {
alan-baker56f7aff2019-05-22 08:06:42 -0400959 clspv::NormalizeGlobalVariables(M);
960
David Neto862b7d82018-06-14 18:48:37 -0400961 SmallVector<GlobalVariable *, 8> GVList;
962 SmallVector<GlobalVariable *, 8> DeadGVList;
963 for (GlobalVariable &GV : M.globals()) {
964 if (GV.getType()->getAddressSpace() == AddressSpace::Constant) {
965 if (GV.use_empty()) {
966 DeadGVList.push_back(&GV);
967 } else {
968 GVList.push_back(&GV);
969 }
970 }
971 }
972
973 // Remove dead global __constant variables.
974 for (auto GV : DeadGVList) {
975 GV->eraseFromParent();
976 }
977 DeadGVList.clear();
978
979 if (clspv::Option::ModuleConstantsInStorageBuffer()) {
980 // For now, we only support a single storage buffer.
981 if (GVList.size() > 0) {
982 assert(GVList.size() == 1);
983 const auto *GV = GVList[0];
984 const auto constants_byte_size =
Alan Bakerfcda9482018-10-02 17:09:59 -0400985 (GetTypeSizeInBits(GV->getInitializer()->getType(), DL)) / 8;
David Neto862b7d82018-06-14 18:48:37 -0400986 const size_t kConstantMaxSize = 65536;
987 if (constants_byte_size > kConstantMaxSize) {
988 outs() << "Max __constant capacity of " << kConstantMaxSize
989 << " bytes exceeded: " << constants_byte_size << " bytes used\n";
990 llvm_unreachable("Max __constant capacity exceeded");
991 }
992 }
993 } else {
994 // Change global constant variable's address space to ModuleScopePrivate.
995 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
996 for (auto GV : GVList) {
997 // Create new gv with ModuleScopePrivate address space.
998 Type *NewGVTy = GV->getType()->getPointerElementType();
999 GlobalVariable *NewGV = new GlobalVariable(
1000 M, NewGVTy, false, GV->getLinkage(), GV->getInitializer(), "",
1001 nullptr, GV->getThreadLocalMode(), AddressSpace::ModuleScopePrivate);
1002 NewGV->takeName(GV);
1003
1004 const SmallVector<User *, 8> GVUsers(GV->user_begin(), GV->user_end());
1005 SmallVector<User *, 8> CandidateUsers;
1006
1007 auto record_called_function_type_as_user =
1008 [&GlobalConstFuncTyMap](Value *gv, CallInst *call) {
1009 // Find argument index.
1010 unsigned index = 0;
1011 for (unsigned i = 0; i < call->getNumArgOperands(); i++) {
1012 if (gv == call->getOperand(i)) {
1013 // TODO(dneto): Should we break here?
1014 index = i;
1015 }
1016 }
1017
1018 // Record function type with global constant.
1019 GlobalConstFuncTyMap[call->getFunctionType()] =
1020 std::make_pair(call->getFunctionType(), index);
1021 };
1022
1023 for (User *GVU : GVUsers) {
1024 if (CallInst *Call = dyn_cast<CallInst>(GVU)) {
1025 record_called_function_type_as_user(GV, Call);
1026 } else if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(GVU)) {
1027 // Check GEP users.
1028 for (User *GEPU : GEP->users()) {
1029 if (CallInst *GEPCall = dyn_cast<CallInst>(GEPU)) {
1030 record_called_function_type_as_user(GEP, GEPCall);
1031 }
1032 }
1033 }
1034
1035 CandidateUsers.push_back(GVU);
1036 }
1037
1038 for (User *U : CandidateUsers) {
1039 // Update users of gv with new gv.
alan-bakered80f572019-02-11 17:28:26 -05001040 if (!isa<Constant>(U)) {
1041 // #254: Can't change operands of a constant, but this shouldn't be
1042 // something that sticks around in the module.
1043 U->replaceUsesOfWith(GV, NewGV);
1044 }
David Neto862b7d82018-06-14 18:48:37 -04001045 }
1046
1047 // Delete original gv.
1048 GV->eraseFromParent();
1049 }
1050 }
1051}
1052
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001053void SPIRVProducerPass::FindResourceVars(Module &M, const DataLayout &) {
David Neto862b7d82018-06-14 18:48:37 -04001054 ResourceVarInfoList.clear();
1055 FunctionToResourceVarsMap.clear();
1056 ModuleOrderedResourceVars.reset();
1057 // Normally, there is one resource variable per clspv.resource.var.*
1058 // function, since that is unique'd by arg type and index. By design,
1059 // we can share these resource variables across kernels because all
1060 // kernels use the same descriptor set.
1061 //
1062 // But if the user requested distinct descriptor sets per kernel, then
1063 // the descriptor allocator has made different (set,binding) pairs for
1064 // the same (type,arg_index) pair. Since we can decorate a resource
1065 // variable with only exactly one DescriptorSet and Binding, we are
1066 // forced in this case to make distinct resource variables whenever
Kévin Petitbbbda972020-03-03 19:16:31 +00001067 // the same clspv.resource.var.X function is seen with disintct
David Neto862b7d82018-06-14 18:48:37 -04001068 // (set,binding) values.
1069 const bool always_distinct_sets =
1070 clspv::Option::DistinctKernelDescriptorSets();
1071 for (Function &F : M) {
1072 // Rely on the fact the resource var functions have a stable ordering
1073 // in the module.
Alan Baker202c8c72018-08-13 13:47:44 -04001074 if (F.getName().startswith(clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04001075 // Find all calls to this function with distinct set and binding pairs.
1076 // Save them in ResourceVarInfoList.
1077
1078 // Determine uniqueness of the (set,binding) pairs only withing this
1079 // one resource-var builtin function.
1080 using SetAndBinding = std::pair<unsigned, unsigned>;
1081 // Maps set and binding to the resource var info.
1082 DenseMap<SetAndBinding, ResourceVarInfo *> set_and_binding_map;
1083 bool first_use = true;
1084 for (auto &U : F.uses()) {
1085 if (auto *call = dyn_cast<CallInst>(U.getUser())) {
1086 const auto set = unsigned(
1087 dyn_cast<ConstantInt>(call->getArgOperand(0))->getZExtValue());
1088 const auto binding = unsigned(
1089 dyn_cast<ConstantInt>(call->getArgOperand(1))->getZExtValue());
1090 const auto arg_kind = clspv::ArgKind(
1091 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
1092 const auto arg_index = unsigned(
1093 dyn_cast<ConstantInt>(call->getArgOperand(3))->getZExtValue());
alan-bakere9308012019-03-15 10:25:13 -04001094 const auto coherent = unsigned(
1095 dyn_cast<ConstantInt>(call->getArgOperand(5))->getZExtValue());
David Neto862b7d82018-06-14 18:48:37 -04001096
1097 // Find or make the resource var info for this combination.
1098 ResourceVarInfo *rv = nullptr;
1099 if (always_distinct_sets) {
1100 // Make a new resource var any time we see a different
1101 // (set,binding) pair.
1102 SetAndBinding key{set, binding};
1103 auto where = set_and_binding_map.find(key);
1104 if (where == set_and_binding_map.end()) {
1105 rv = new ResourceVarInfo(int(ResourceVarInfoList.size()), set,
alan-bakere9308012019-03-15 10:25:13 -04001106 binding, &F, arg_kind, coherent);
David Neto862b7d82018-06-14 18:48:37 -04001107 ResourceVarInfoList.emplace_back(rv);
1108 set_and_binding_map[key] = rv;
1109 } else {
1110 rv = where->second;
1111 }
1112 } else {
1113 // The default is to make exactly one resource for each
1114 // clspv.resource.var.* function.
1115 if (first_use) {
1116 first_use = false;
1117 rv = new ResourceVarInfo(int(ResourceVarInfoList.size()), set,
alan-bakere9308012019-03-15 10:25:13 -04001118 binding, &F, arg_kind, coherent);
David Neto862b7d82018-06-14 18:48:37 -04001119 ResourceVarInfoList.emplace_back(rv);
1120 } else {
1121 rv = ResourceVarInfoList.back().get();
1122 }
1123 }
1124
1125 // Now populate FunctionToResourceVarsMap.
1126 auto &mapping =
1127 FunctionToResourceVarsMap[call->getParent()->getParent()];
1128 while (mapping.size() <= arg_index) {
1129 mapping.push_back(nullptr);
1130 }
1131 mapping[arg_index] = rv;
1132 }
1133 }
1134 }
1135 }
1136
1137 // Populate ModuleOrderedResourceVars.
1138 for (Function &F : M) {
1139 auto where = FunctionToResourceVarsMap.find(&F);
1140 if (where != FunctionToResourceVarsMap.end()) {
1141 for (auto &rv : where->second) {
1142 if (rv != nullptr) {
1143 ModuleOrderedResourceVars.insert(rv);
1144 }
1145 }
1146 }
1147 }
1148 if (ShowResourceVars) {
1149 for (auto *info : ModuleOrderedResourceVars) {
1150 outs() << "MORV index " << info->index << " (" << info->descriptor_set
1151 << "," << info->binding << ") " << *(info->var_fn->getReturnType())
1152 << "\n";
1153 }
1154 }
1155}
1156
David Neto22f144c2017-06-12 14:26:21 -04001157bool SPIRVProducerPass::FindExtInst(Module &M) {
1158 LLVMContext &Context = M.getContext();
1159 bool HasExtInst = false;
1160
1161 for (Function &F : M) {
1162 for (BasicBlock &BB : F) {
1163 for (Instruction &I : BB) {
1164 if (CallInst *Call = dyn_cast<CallInst>(&I)) {
1165 Function *Callee = Call->getCalledFunction();
1166 // Check whether this call is for extend instructions.
David Neto3fbb4072017-10-16 11:28:14 -04001167 auto callee_name = Callee->getName();
1168 const glsl::ExtInst EInst = getExtInstEnum(callee_name);
1169 const glsl::ExtInst IndirectEInst =
1170 getIndirectExtInstEnum(callee_name);
David Neto22f144c2017-06-12 14:26:21 -04001171
David Neto3fbb4072017-10-16 11:28:14 -04001172 HasExtInst |=
1173 (EInst != kGlslExtInstBad) || (IndirectEInst != kGlslExtInstBad);
1174
1175 if (IndirectEInst) {
1176 // Register extra constants if needed.
1177
1178 // Registers a type and constant for computing the result of the
1179 // given instruction. If the result of the instruction is a vector,
1180 // then make a splat vector constant with the same number of
1181 // elements.
1182 auto register_constant = [this, &I](Constant *constant) {
1183 FindType(constant->getType());
1184 FindConstant(constant);
1185 if (auto *vectorTy = dyn_cast<VectorType>(I.getType())) {
1186 // Register the splat vector of the value with the same
1187 // width as the result of the instruction.
1188 auto *vec_constant = ConstantVector::getSplat(
alan-baker7261e062020-03-15 14:35:48 -04001189 {static_cast<unsigned>(vectorTy->getNumElements()), false},
David Neto3fbb4072017-10-16 11:28:14 -04001190 constant);
1191 FindConstant(vec_constant);
1192 FindType(vec_constant->getType());
1193 }
1194 };
1195 switch (IndirectEInst) {
1196 case glsl::ExtInstFindUMsb:
1197 // clz needs OpExtInst and OpISub with constant 31, or splat
1198 // vector of 31. Add it to the constant list here.
1199 register_constant(
1200 ConstantInt::get(Type::getInt32Ty(Context), 31));
1201 break;
1202 case glsl::ExtInstAcos:
1203 case glsl::ExtInstAsin:
Kévin Petiteb9f90a2018-09-29 12:29:34 +01001204 case glsl::ExtInstAtan:
David Neto3fbb4072017-10-16 11:28:14 -04001205 case glsl::ExtInstAtan2:
1206 // We need 1/pi for acospi, asinpi, atan2pi.
1207 register_constant(
1208 ConstantFP::get(Type::getFloatTy(Context), kOneOverPi));
1209 break;
1210 default:
1211 assert(false && "internally inconsistent");
1212 }
David Neto22f144c2017-06-12 14:26:21 -04001213 }
1214 }
1215 }
1216 }
1217 }
1218
1219 return HasExtInst;
1220}
1221
1222void SPIRVProducerPass::FindTypePerGlobalVar(GlobalVariable &GV) {
1223 // Investigate global variable's type.
1224 FindType(GV.getType());
1225}
1226
1227void SPIRVProducerPass::FindTypePerFunc(Function &F) {
1228 // Investigate function's type.
1229 FunctionType *FTy = F.getFunctionType();
1230
1231 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
1232 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
David Neto9ed8e2f2018-03-24 06:47:24 -07001233 // Handle a regular function with global constant parameters.
David Neto22f144c2017-06-12 14:26:21 -04001234 if (GlobalConstFuncTyMap.count(FTy)) {
1235 uint32_t GVCstArgIdx = GlobalConstFuncTypeMap[FTy].second;
1236 SmallVector<Type *, 4> NewFuncParamTys;
1237 for (unsigned i = 0; i < FTy->getNumParams(); i++) {
1238 Type *ParamTy = FTy->getParamType(i);
1239 if (i == GVCstArgIdx) {
1240 Type *EleTy = ParamTy->getPointerElementType();
1241 ParamTy = PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
1242 }
1243
1244 NewFuncParamTys.push_back(ParamTy);
1245 }
1246
1247 FunctionType *NewFTy =
1248 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
1249 GlobalConstFuncTyMap[FTy] = std::make_pair(NewFTy, GVCstArgIdx);
1250 FTy = NewFTy;
1251 }
1252
1253 FindType(FTy);
1254 } else {
1255 // As kernel functions do not have parameters, create new function type and
1256 // add it to type map.
1257 SmallVector<Type *, 4> NewFuncParamTys;
1258 FunctionType *NewFTy =
1259 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
1260 FindType(NewFTy);
1261 }
1262
1263 // Investigate instructions' type in function body.
1264 for (BasicBlock &BB : F) {
1265 for (Instruction &I : BB) {
1266 if (isa<ShuffleVectorInst>(I)) {
1267 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1268 // Ignore type for mask of shuffle vector instruction.
1269 if (i == 2) {
1270 continue;
1271 }
1272
1273 Value *Op = I.getOperand(i);
1274 if (!isa<MetadataAsValue>(Op)) {
1275 FindType(Op->getType());
1276 }
1277 }
1278
1279 FindType(I.getType());
1280 continue;
1281 }
1282
David Neto862b7d82018-06-14 18:48:37 -04001283 CallInst *Call = dyn_cast<CallInst>(&I);
1284
1285 if (Call && Call->getCalledFunction()->getName().startswith(
Alan Baker202c8c72018-08-13 13:47:44 -04001286 clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04001287 // This is a fake call representing access to a resource variable.
1288 // We handle that elsewhere.
1289 continue;
1290 }
1291
Alan Baker202c8c72018-08-13 13:47:44 -04001292 if (Call && Call->getCalledFunction()->getName().startswith(
1293 clspv::WorkgroupAccessorFunction())) {
1294 // This is a fake call representing access to a workgroup variable.
1295 // We handle that elsewhere.
1296 continue;
1297 }
1298
alan-bakerf083bed2020-01-29 08:15:42 -05001299 // #497: InsertValue and ExtractValue map to OpCompositeInsert and
1300 // OpCompositeExtract which takes literal values for indices. As a result
1301 // don't map the type of indices.
1302 if (I.getOpcode() == Instruction::ExtractValue) {
1303 FindType(I.getOperand(0)->getType());
1304 continue;
1305 }
1306 if (I.getOpcode() == Instruction::InsertValue) {
1307 FindType(I.getOperand(0)->getType());
1308 FindType(I.getOperand(1)->getType());
1309 continue;
1310 }
1311
1312 // #497: InsertElement and ExtractElement map to OpCompositeExtract if
1313 // the index is a constant. In such a case don't map the index type.
1314 if (I.getOpcode() == Instruction::ExtractElement) {
1315 FindType(I.getOperand(0)->getType());
1316 Value *op1 = I.getOperand(1);
1317 if (!isa<Constant>(op1) || isa<GlobalValue>(op1)) {
1318 FindType(op1->getType());
1319 }
1320 continue;
1321 }
1322 if (I.getOpcode() == Instruction::InsertElement) {
1323 FindType(I.getOperand(0)->getType());
1324 FindType(I.getOperand(1)->getType());
1325 Value *op2 = I.getOperand(2);
1326 if (!isa<Constant>(op2) || isa<GlobalValue>(op2)) {
1327 FindType(op2->getType());
1328 }
1329 continue;
1330 }
1331
David Neto22f144c2017-06-12 14:26:21 -04001332 // Work through the operands of the instruction.
1333 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1334 Value *const Op = I.getOperand(i);
1335 // If any of the operands is a constant, find the type!
1336 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1337 FindType(Op->getType());
1338 }
1339 }
1340
1341 for (Use &Op : I.operands()) {
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001342 if (isa<CallInst>(&I)) {
David Neto22f144c2017-06-12 14:26:21 -04001343 // Avoid to check call instruction's type.
1344 break;
1345 }
Alan Baker202c8c72018-08-13 13:47:44 -04001346 if (CallInst *OpCall = dyn_cast<CallInst>(Op)) {
1347 if (OpCall && OpCall->getCalledFunction()->getName().startswith(
1348 clspv::WorkgroupAccessorFunction())) {
1349 // This is a fake call representing access to a workgroup variable.
1350 // We handle that elsewhere.
1351 continue;
1352 }
1353 }
David Neto22f144c2017-06-12 14:26:21 -04001354 if (!isa<MetadataAsValue>(&Op)) {
1355 FindType(Op->getType());
1356 continue;
1357 }
1358 }
1359
David Neto22f144c2017-06-12 14:26:21 -04001360 // We don't want to track the type of this call as we are going to replace
1361 // it.
Kévin Petitdf71de32019-04-09 14:09:50 +01001362 if (Call && (clspv::LiteralSamplerFunction() ==
David Neto22f144c2017-06-12 14:26:21 -04001363 Call->getCalledFunction()->getName())) {
1364 continue;
1365 }
1366
1367 if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(&I)) {
1368 // If gep's base operand has ModuleScopePrivate address space, make gep
1369 // return ModuleScopePrivate address space.
1370 if (GEP->getPointerAddressSpace() == AddressSpace::ModuleScopePrivate) {
1371 // Add pointer type with private address space for global constant to
1372 // type list.
1373 Type *EleTy = I.getType()->getPointerElementType();
1374 Type *NewPTy =
1375 PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
1376
1377 FindType(NewPTy);
1378 continue;
1379 }
1380 }
1381
1382 FindType(I.getType());
1383 }
1384 }
1385}
1386
David Neto862b7d82018-06-14 18:48:37 -04001387void SPIRVProducerPass::FindTypesForSamplerMap(Module &M) {
1388 // If we are using a sampler map, find the type of the sampler.
Kévin Petitdf71de32019-04-09 14:09:50 +01001389 if (M.getFunction(clspv::LiteralSamplerFunction()) ||
David Neto862b7d82018-06-14 18:48:37 -04001390 0 < getSamplerMap().size()) {
1391 auto SamplerStructTy = M.getTypeByName("opencl.sampler_t");
1392 if (!SamplerStructTy) {
1393 SamplerStructTy = StructType::create(M.getContext(), "opencl.sampler_t");
1394 }
1395
1396 SamplerTy = SamplerStructTy->getPointerTo(AddressSpace::UniformConstant);
1397
1398 FindType(SamplerTy);
1399 }
1400}
1401
1402void SPIRVProducerPass::FindTypesForResourceVars(Module &M) {
1403 // Record types so they are generated.
1404 TypesNeedingLayout.reset();
1405 StructTypesNeedingBlock.reset();
1406
1407 // To match older clspv codegen, generate the float type first if required
1408 // for images.
1409 for (const auto *info : ModuleOrderedResourceVars) {
1410 if (info->arg_kind == clspv::ArgKind::ReadOnlyImage ||
1411 info->arg_kind == clspv::ArgKind::WriteOnlyImage) {
alan-bakerf67468c2019-11-25 15:51:49 -05001412 if (IsIntImageType(info->var_fn->getReturnType())) {
1413 // Nothing for now...
1414 } else if (IsUintImageType(info->var_fn->getReturnType())) {
1415 FindType(Type::getInt32Ty(M.getContext()));
1416 }
1417
1418 // We need "float" either for the sampled type or for the Lod operand.
David Neto862b7d82018-06-14 18:48:37 -04001419 FindType(Type::getFloatTy(M.getContext()));
David Neto862b7d82018-06-14 18:48:37 -04001420 }
1421 }
1422
1423 for (const auto *info : ModuleOrderedResourceVars) {
1424 Type *type = info->var_fn->getReturnType();
1425
1426 switch (info->arg_kind) {
1427 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04001428 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04001429 if (auto *sty = dyn_cast<StructType>(type->getPointerElementType())) {
1430 StructTypesNeedingBlock.insert(sty);
1431 } else {
1432 errs() << *type << "\n";
1433 llvm_unreachable("Buffer arguments must map to structures!");
1434 }
1435 break;
1436 case clspv::ArgKind::Pod:
alan-baker9b0ec3c2020-04-06 14:45:34 -04001437 case clspv::ArgKind::PodUBO:
1438 case clspv::ArgKind::PodPushConstant:
David Neto862b7d82018-06-14 18:48:37 -04001439 if (auto *sty = dyn_cast<StructType>(type->getPointerElementType())) {
1440 StructTypesNeedingBlock.insert(sty);
1441 } else {
1442 errs() << *type << "\n";
1443 llvm_unreachable("POD arguments must map to structures!");
1444 }
1445 break;
1446 case clspv::ArgKind::ReadOnlyImage:
1447 case clspv::ArgKind::WriteOnlyImage:
1448 case clspv::ArgKind::Sampler:
1449 // Sampler and image types map to the pointee type but
1450 // in the uniform constant address space.
1451 type = PointerType::get(type->getPointerElementType(),
1452 clspv::AddressSpace::UniformConstant);
1453 break;
1454 default:
1455 break;
1456 }
1457
1458 // The converted type is the type of the OpVariable we will generate.
1459 // If the pointee type is an array of size zero, FindType will convert it
1460 // to a runtime array.
1461 FindType(type);
1462 }
1463
alan-bakerdcd97412019-09-16 15:32:30 -04001464 // If module constants are clustered in a storage buffer then that struct
1465 // needs layout decorations.
1466 if (clspv::Option::ModuleConstantsInStorageBuffer()) {
1467 for (GlobalVariable &GV : M.globals()) {
1468 PointerType *PTy = cast<PointerType>(GV.getType());
1469 const auto AS = PTy->getAddressSpace();
1470 const bool module_scope_constant_external_init =
1471 (AS == AddressSpace::Constant) && GV.hasInitializer();
1472 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
1473 if (module_scope_constant_external_init &&
1474 spv::BuiltInMax == BuiltinType) {
1475 StructTypesNeedingBlock.insert(
1476 cast<StructType>(PTy->getPointerElementType()));
1477 }
1478 }
1479 }
1480
Kévin Petitbbbda972020-03-03 19:16:31 +00001481 for (const GlobalVariable &GV : M.globals()) {
1482 if (GV.getAddressSpace() == clspv::AddressSpace::PushConstant) {
1483 auto Ty = cast<PointerType>(GV.getType())->getPointerElementType();
1484 assert(Ty->isStructTy() && "Push constants have to be structures.");
1485 auto STy = cast<StructType>(Ty);
1486 StructTypesNeedingBlock.insert(STy);
1487 }
1488 }
1489
David Neto862b7d82018-06-14 18:48:37 -04001490 // Traverse the arrays and structures underneath each Block, and
1491 // mark them as needing layout.
1492 std::vector<Type *> work_list(StructTypesNeedingBlock.begin(),
1493 StructTypesNeedingBlock.end());
1494 while (!work_list.empty()) {
1495 Type *type = work_list.back();
1496 work_list.pop_back();
1497 TypesNeedingLayout.insert(type);
1498 switch (type->getTypeID()) {
1499 case Type::ArrayTyID:
1500 work_list.push_back(type->getArrayElementType());
1501 if (!Hack_generate_runtime_array_stride_early) {
1502 // Remember this array type for deferred decoration.
1503 TypesNeedingArrayStride.insert(type);
1504 }
1505 break;
1506 case Type::StructTyID:
1507 for (auto *elem_ty : cast<StructType>(type)->elements()) {
1508 work_list.push_back(elem_ty);
1509 }
1510 default:
1511 // This type and its contained types don't get layout.
1512 break;
1513 }
1514 }
1515}
1516
Alan Baker202c8c72018-08-13 13:47:44 -04001517void SPIRVProducerPass::FindWorkgroupVars(Module &M) {
1518 // The SpecId assignment for pointer-to-local arguments is recorded in
1519 // module-level metadata. Translate that information into local argument
1520 // information.
1521 NamedMDNode *nmd = M.getNamedMetadata(clspv::LocalSpecIdMetadataName());
alan-bakerb6b09dc2018-11-08 16:59:28 -05001522 if (!nmd)
1523 return;
Alan Baker202c8c72018-08-13 13:47:44 -04001524 for (auto operand : nmd->operands()) {
1525 MDTuple *tuple = cast<MDTuple>(operand);
1526 ValueAsMetadata *fn_md = cast<ValueAsMetadata>(tuple->getOperand(0));
1527 Function *func = cast<Function>(fn_md->getValue());
alan-bakerb6b09dc2018-11-08 16:59:28 -05001528 ConstantAsMetadata *arg_index_md =
1529 cast<ConstantAsMetadata>(tuple->getOperand(1));
1530 int arg_index = static_cast<int>(
1531 cast<ConstantInt>(arg_index_md->getValue())->getSExtValue());
1532 Argument *arg = &*(func->arg_begin() + arg_index);
Alan Baker202c8c72018-08-13 13:47:44 -04001533
1534 ConstantAsMetadata *spec_id_md =
1535 cast<ConstantAsMetadata>(tuple->getOperand(2));
alan-bakerb6b09dc2018-11-08 16:59:28 -05001536 int spec_id = static_cast<int>(
1537 cast<ConstantInt>(spec_id_md->getValue())->getSExtValue());
Alan Baker202c8c72018-08-13 13:47:44 -04001538
1539 max_local_spec_id_ = std::max(max_local_spec_id_, spec_id + 1);
1540 LocalArgSpecIds[arg] = spec_id;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001541 if (LocalSpecIdInfoMap.count(spec_id))
1542 continue;
Alan Baker202c8c72018-08-13 13:47:44 -04001543
1544 // We haven't seen this SpecId yet, so generate the LocalArgInfo for it.
1545 LocalArgInfo info{nextID, arg->getType()->getPointerElementType(),
1546 nextID + 1, nextID + 2,
1547 nextID + 3, spec_id};
1548 LocalSpecIdInfoMap[spec_id] = info;
1549 nextID += 4;
1550
1551 // Ensure the types necessary for this argument get generated.
1552 Type *IdxTy = Type::getInt32Ty(M.getContext());
1553 FindConstant(ConstantInt::get(IdxTy, 0));
1554 FindType(IdxTy);
1555 FindType(arg->getType());
1556 }
1557}
1558
David Neto22f144c2017-06-12 14:26:21 -04001559void SPIRVProducerPass::FindType(Type *Ty) {
1560 TypeList &TyList = getTypeList();
1561
1562 if (0 != TyList.idFor(Ty)) {
1563 return;
1564 }
1565
1566 if (Ty->isPointerTy()) {
1567 auto AddrSpace = Ty->getPointerAddressSpace();
1568 if ((AddressSpace::Constant == AddrSpace) ||
1569 (AddressSpace::Global == AddrSpace)) {
1570 auto PointeeTy = Ty->getPointerElementType();
1571
1572 if (PointeeTy->isStructTy() &&
1573 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
1574 FindType(PointeeTy);
1575 auto ActualPointerTy =
1576 PointeeTy->getPointerTo(AddressSpace::UniformConstant);
1577 FindType(ActualPointerTy);
1578 return;
1579 }
1580 }
1581 }
1582
David Neto862b7d82018-06-14 18:48:37 -04001583 // By convention, LLVM array type with 0 elements will map to
1584 // OpTypeRuntimeArray. Otherwise, it will map to OpTypeArray, which
1585 // has a constant number of elements. We need to support type of the
1586 // constant.
1587 if (auto *arrayTy = dyn_cast<ArrayType>(Ty)) {
1588 if (arrayTy->getNumElements() > 0) {
1589 LLVMContext &Context = Ty->getContext();
1590 FindType(Type::getInt32Ty(Context));
1591 }
David Neto22f144c2017-06-12 14:26:21 -04001592 }
1593
1594 for (Type *SubTy : Ty->subtypes()) {
1595 FindType(SubTy);
1596 }
1597
1598 TyList.insert(Ty);
1599}
1600
1601void SPIRVProducerPass::FindConstantPerGlobalVar(GlobalVariable &GV) {
1602 // If the global variable has a (non undef) initializer.
1603 if (GV.hasInitializer() && !isa<UndefValue>(GV.getInitializer())) {
David Neto862b7d82018-06-14 18:48:37 -04001604 // Generate the constant if it's not the initializer to a module scope
1605 // constant that we will expect in a storage buffer.
1606 const bool module_scope_constant_external_init =
1607 (GV.getType()->getPointerAddressSpace() == AddressSpace::Constant) &&
1608 clspv::Option::ModuleConstantsInStorageBuffer();
1609 if (!module_scope_constant_external_init) {
1610 FindConstant(GV.getInitializer());
1611 }
David Neto22f144c2017-06-12 14:26:21 -04001612 }
1613}
1614
1615void SPIRVProducerPass::FindConstantPerFunc(Function &F) {
1616 // Investigate constants in function body.
1617 for (BasicBlock &BB : F) {
1618 for (Instruction &I : BB) {
David Neto862b7d82018-06-14 18:48:37 -04001619 if (auto *call = dyn_cast<CallInst>(&I)) {
1620 auto name = call->getCalledFunction()->getName();
Kévin Petitdf71de32019-04-09 14:09:50 +01001621 if (name == clspv::LiteralSamplerFunction()) {
David Neto862b7d82018-06-14 18:48:37 -04001622 // We've handled these constants elsewhere, so skip it.
1623 continue;
1624 }
Alan Baker202c8c72018-08-13 13:47:44 -04001625 if (name.startswith(clspv::ResourceAccessorFunction())) {
1626 continue;
1627 }
1628 if (name.startswith(clspv::WorkgroupAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04001629 continue;
1630 }
Kévin Petit617a76d2019-04-04 13:54:16 +01001631 if (name.startswith(clspv::SPIRVOpIntrinsicFunction())) {
1632 // Skip the first operand that has the SPIR-V Opcode
1633 for (unsigned i = 1; i < I.getNumOperands(); i++) {
1634 if (isa<Constant>(I.getOperand(i)) &&
1635 !isa<GlobalValue>(I.getOperand(i))) {
1636 FindConstant(I.getOperand(i));
1637 }
1638 }
1639 continue;
1640 }
David Neto22f144c2017-06-12 14:26:21 -04001641 }
1642
1643 if (isa<AllocaInst>(I)) {
1644 // Alloca instruction has constant for the number of element. Ignore it.
1645 continue;
1646 } else if (isa<ShuffleVectorInst>(I)) {
1647 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1648 // Ignore constant for mask of shuffle vector instruction.
1649 if (i == 2) {
1650 continue;
1651 }
1652
1653 if (isa<Constant>(I.getOperand(i)) &&
1654 !isa<GlobalValue>(I.getOperand(i))) {
1655 FindConstant(I.getOperand(i));
1656 }
1657 }
1658
1659 continue;
1660 } else if (isa<InsertElementInst>(I)) {
1661 // Handle InsertElement with <4 x i8> specially.
1662 Type *CompositeTy = I.getOperand(0)->getType();
1663 if (is4xi8vec(CompositeTy)) {
1664 LLVMContext &Context = CompositeTy->getContext();
1665 if (isa<Constant>(I.getOperand(0))) {
1666 FindConstant(I.getOperand(0));
1667 }
1668
1669 if (isa<Constant>(I.getOperand(1))) {
1670 FindConstant(I.getOperand(1));
1671 }
1672
1673 // Add mask constant 0xFF.
1674 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
1675 FindConstant(CstFF);
1676
1677 // Add shift amount constant.
1678 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
1679 uint64_t Idx = CI->getZExtValue();
1680 Constant *CstShiftAmount =
1681 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
1682 FindConstant(CstShiftAmount);
1683 }
1684
1685 continue;
1686 }
1687
1688 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1689 // Ignore constant for index of InsertElement instruction.
1690 if (i == 2) {
1691 continue;
1692 }
1693
1694 if (isa<Constant>(I.getOperand(i)) &&
1695 !isa<GlobalValue>(I.getOperand(i))) {
1696 FindConstant(I.getOperand(i));
1697 }
1698 }
1699
1700 continue;
1701 } else if (isa<ExtractElementInst>(I)) {
1702 // Handle ExtractElement with <4 x i8> specially.
1703 Type *CompositeTy = I.getOperand(0)->getType();
1704 if (is4xi8vec(CompositeTy)) {
1705 LLVMContext &Context = CompositeTy->getContext();
1706 if (isa<Constant>(I.getOperand(0))) {
1707 FindConstant(I.getOperand(0));
1708 }
1709
1710 // Add mask constant 0xFF.
1711 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
1712 FindConstant(CstFF);
1713
1714 // Add shift amount constant.
1715 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
1716 uint64_t Idx = CI->getZExtValue();
1717 Constant *CstShiftAmount =
1718 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
1719 FindConstant(CstShiftAmount);
1720 } else {
1721 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
1722 FindConstant(Cst8);
1723 }
1724
1725 continue;
1726 }
1727
1728 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1729 // Ignore constant for index of ExtractElement instruction.
1730 if (i == 1) {
1731 continue;
1732 }
1733
1734 if (isa<Constant>(I.getOperand(i)) &&
1735 !isa<GlobalValue>(I.getOperand(i))) {
1736 FindConstant(I.getOperand(i));
1737 }
1738 }
1739
1740 continue;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001741 } else if ((Instruction::Xor == I.getOpcode()) &&
1742 I.getType()->isIntegerTy(1)) {
1743 // We special case for Xor where the type is i1 and one of the arguments
1744 // is a constant 1 (true), this is an OpLogicalNot in SPIR-V, and we
1745 // don't need the constant
David Neto22f144c2017-06-12 14:26:21 -04001746 bool foundConstantTrue = false;
1747 for (Use &Op : I.operands()) {
1748 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1749 auto CI = cast<ConstantInt>(Op);
1750
1751 if (CI->isZero() || foundConstantTrue) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05001752 // If we already found the true constant, we might (probably only
1753 // on -O0) have an OpLogicalNot which is taking a constant
1754 // argument, so discover it anyway.
David Neto22f144c2017-06-12 14:26:21 -04001755 FindConstant(Op);
1756 } else {
1757 foundConstantTrue = true;
1758 }
1759 }
1760 }
1761
1762 continue;
David Netod2de94a2017-08-28 17:27:47 -04001763 } else if (isa<TruncInst>(I)) {
alan-bakerb39c8262019-03-08 14:03:37 -05001764 // Special case if i8 is not generally handled.
1765 if (!clspv::Option::Int8Support()) {
1766 // For truncation to i8 we mask against 255.
1767 Type *ToTy = I.getType();
1768 if (8u == ToTy->getPrimitiveSizeInBits()) {
1769 LLVMContext &Context = ToTy->getContext();
1770 Constant *Cst255 =
1771 ConstantInt::get(Type::getInt32Ty(Context), 0xff);
1772 FindConstant(Cst255);
1773 }
David Netod2de94a2017-08-28 17:27:47 -04001774 }
Neil Henning39672102017-09-29 14:33:13 +01001775 } else if (isa<AtomicRMWInst>(I)) {
1776 LLVMContext &Context = I.getContext();
1777
1778 FindConstant(
1779 ConstantInt::get(Type::getInt32Ty(Context), spv::ScopeDevice));
1780 FindConstant(ConstantInt::get(
1781 Type::getInt32Ty(Context),
1782 spv::MemorySemanticsUniformMemoryMask |
1783 spv::MemorySemanticsSequentiallyConsistentMask));
David Neto22f144c2017-06-12 14:26:21 -04001784 }
1785
1786 for (Use &Op : I.operands()) {
1787 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1788 FindConstant(Op);
1789 }
1790 }
1791 }
1792 }
1793}
1794
1795void SPIRVProducerPass::FindConstant(Value *V) {
David Neto22f144c2017-06-12 14:26:21 -04001796 ValueList &CstList = getConstantList();
1797
David Netofb9a7972017-08-25 17:08:24 -04001798 // If V is already tracked, ignore it.
1799 if (0 != CstList.idFor(V)) {
David Neto22f144c2017-06-12 14:26:21 -04001800 return;
1801 }
1802
David Neto862b7d82018-06-14 18:48:37 -04001803 if (isa<GlobalValue>(V) && clspv::Option::ModuleConstantsInStorageBuffer()) {
1804 return;
1805 }
1806
David Neto22f144c2017-06-12 14:26:21 -04001807 Constant *Cst = cast<Constant>(V);
David Neto862b7d82018-06-14 18:48:37 -04001808 Type *CstTy = Cst->getType();
David Neto22f144c2017-06-12 14:26:21 -04001809
1810 // Handle constant with <4 x i8> type specially.
David Neto22f144c2017-06-12 14:26:21 -04001811 if (is4xi8vec(CstTy)) {
1812 if (!isa<GlobalValue>(V)) {
David Netofb9a7972017-08-25 17:08:24 -04001813 CstList.insert(V);
David Neto22f144c2017-06-12 14:26:21 -04001814 }
1815 }
1816
1817 if (Cst->getNumOperands()) {
1818 for (User::const_op_iterator I = Cst->op_begin(), E = Cst->op_end(); I != E;
1819 ++I) {
1820 FindConstant(*I);
1821 }
1822
David Netofb9a7972017-08-25 17:08:24 -04001823 CstList.insert(Cst);
David Neto22f144c2017-06-12 14:26:21 -04001824 return;
1825 } else if (const ConstantDataSequential *CDS =
1826 dyn_cast<ConstantDataSequential>(Cst)) {
1827 // Add constants for each element to constant list.
1828 for (unsigned i = 0; i < CDS->getNumElements(); i++) {
1829 Constant *EleCst = CDS->getElementAsConstant(i);
1830 FindConstant(EleCst);
1831 }
1832 }
1833
1834 if (!isa<GlobalValue>(V)) {
David Netofb9a7972017-08-25 17:08:24 -04001835 CstList.insert(V);
David Neto22f144c2017-06-12 14:26:21 -04001836 }
1837}
1838
1839spv::StorageClass SPIRVProducerPass::GetStorageClass(unsigned AddrSpace) const {
1840 switch (AddrSpace) {
1841 default:
1842 llvm_unreachable("Unsupported OpenCL address space");
1843 case AddressSpace::Private:
1844 return spv::StorageClassFunction;
1845 case AddressSpace::Global:
David Neto22f144c2017-06-12 14:26:21 -04001846 return spv::StorageClassStorageBuffer;
Alan Bakerfcda9482018-10-02 17:09:59 -04001847 case AddressSpace::Constant:
1848 return clspv::Option::ConstantArgsInUniformBuffer()
1849 ? spv::StorageClassUniform
1850 : spv::StorageClassStorageBuffer;
David Neto22f144c2017-06-12 14:26:21 -04001851 case AddressSpace::Input:
1852 return spv::StorageClassInput;
1853 case AddressSpace::Local:
1854 return spv::StorageClassWorkgroup;
1855 case AddressSpace::UniformConstant:
1856 return spv::StorageClassUniformConstant;
David Neto9ed8e2f2018-03-24 06:47:24 -07001857 case AddressSpace::Uniform:
David Netoe439d702018-03-23 13:14:08 -07001858 return spv::StorageClassUniform;
David Neto22f144c2017-06-12 14:26:21 -04001859 case AddressSpace::ModuleScopePrivate:
1860 return spv::StorageClassPrivate;
Kévin Petitbbbda972020-03-03 19:16:31 +00001861 case AddressSpace::PushConstant:
1862 return spv::StorageClassPushConstant;
David Neto22f144c2017-06-12 14:26:21 -04001863 }
1864}
1865
David Neto862b7d82018-06-14 18:48:37 -04001866spv::StorageClass
1867SPIRVProducerPass::GetStorageClassForArgKind(clspv::ArgKind arg_kind) const {
1868 switch (arg_kind) {
1869 case clspv::ArgKind::Buffer:
1870 return spv::StorageClassStorageBuffer;
Alan Bakerfcda9482018-10-02 17:09:59 -04001871 case clspv::ArgKind::BufferUBO:
1872 return spv::StorageClassUniform;
David Neto862b7d82018-06-14 18:48:37 -04001873 case clspv::ArgKind::Pod:
alan-baker9b0ec3c2020-04-06 14:45:34 -04001874 return spv::StorageClassStorageBuffer;
1875 case clspv::ArgKind::PodUBO:
1876 return spv::StorageClassUniform;
1877 case clspv::ArgKind::PodPushConstant:
1878 return spv::StorageClassPushConstant;
David Neto862b7d82018-06-14 18:48:37 -04001879 case clspv::ArgKind::Local:
1880 return spv::StorageClassWorkgroup;
1881 case clspv::ArgKind::ReadOnlyImage:
1882 case clspv::ArgKind::WriteOnlyImage:
1883 case clspv::ArgKind::Sampler:
1884 return spv::StorageClassUniformConstant;
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001885 default:
1886 llvm_unreachable("Unsupported storage class for argument kind");
David Neto862b7d82018-06-14 18:48:37 -04001887 }
1888}
1889
David Neto22f144c2017-06-12 14:26:21 -04001890spv::BuiltIn SPIRVProducerPass::GetBuiltin(StringRef Name) const {
1891 return StringSwitch<spv::BuiltIn>(Name)
1892 .Case("__spirv_GlobalInvocationId", spv::BuiltInGlobalInvocationId)
1893 .Case("__spirv_LocalInvocationId", spv::BuiltInLocalInvocationId)
1894 .Case("__spirv_WorkgroupSize", spv::BuiltInWorkgroupSize)
1895 .Case("__spirv_NumWorkgroups", spv::BuiltInNumWorkgroups)
1896 .Case("__spirv_WorkgroupId", spv::BuiltInWorkgroupId)
1897 .Default(spv::BuiltInMax);
1898}
1899
1900void SPIRVProducerPass::GenerateExtInstImport() {
1901 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
1902 uint32_t &ExtInstImportID = getOpExtInstImportID();
1903
1904 //
1905 // Generate OpExtInstImport.
1906 //
1907 // Ops[0] ... Ops[n] = Name (Literal String)
David Neto22f144c2017-06-12 14:26:21 -04001908 ExtInstImportID = nextID;
David Neto87846742018-04-11 17:36:22 -04001909 SPIRVInstList.push_back(new SPIRVInstruction(spv::OpExtInstImport, nextID++,
1910 MkString("GLSL.std.450")));
David Neto22f144c2017-06-12 14:26:21 -04001911}
1912
alan-bakerb6b09dc2018-11-08 16:59:28 -05001913void SPIRVProducerPass::GenerateSPIRVTypes(LLVMContext &Context,
1914 Module &module) {
David Neto22f144c2017-06-12 14:26:21 -04001915 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
1916 ValueMapType &VMap = getValueMap();
1917 ValueMapType &AllocatedVMap = getAllocatedValueMap();
Alan Bakerfcda9482018-10-02 17:09:59 -04001918 const auto &DL = module.getDataLayout();
David Neto22f144c2017-06-12 14:26:21 -04001919
1920 // Map for OpTypeRuntimeArray. If argument has pointer type, 2 spirv type
1921 // instructions are generated. They are OpTypePointer and OpTypeRuntimeArray.
1922 DenseMap<Type *, uint32_t> OpRuntimeTyMap;
1923
1924 for (Type *Ty : getTypeList()) {
1925 // Update TypeMap with nextID for reference later.
1926 TypeMap[Ty] = nextID;
1927
1928 switch (Ty->getTypeID()) {
1929 default: {
1930 Ty->print(errs());
1931 llvm_unreachable("Unsupported type???");
1932 break;
1933 }
1934 case Type::MetadataTyID:
1935 case Type::LabelTyID: {
1936 // Ignore these types.
1937 break;
1938 }
1939 case Type::PointerTyID: {
1940 PointerType *PTy = cast<PointerType>(Ty);
1941 unsigned AddrSpace = PTy->getAddressSpace();
1942
1943 // For the purposes of our Vulkan SPIR-V type system, constant and global
1944 // are conflated.
1945 bool UseExistingOpTypePointer = false;
1946 if (AddressSpace::Constant == AddrSpace) {
Alan Bakerfcda9482018-10-02 17:09:59 -04001947 if (!clspv::Option::ConstantArgsInUniformBuffer()) {
1948 AddrSpace = AddressSpace::Global;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001949 // Check to see if we already created this type (for instance, if we
1950 // had a constant <type>* and a global <type>*, the type would be
1951 // created by one of these types, and shared by both).
Alan Bakerfcda9482018-10-02 17:09:59 -04001952 auto GlobalTy = PTy->getPointerElementType()->getPointerTo(AddrSpace);
1953 if (0 < TypeMap.count(GlobalTy)) {
1954 TypeMap[PTy] = TypeMap[GlobalTy];
1955 UseExistingOpTypePointer = true;
1956 break;
1957 }
David Neto22f144c2017-06-12 14:26:21 -04001958 }
1959 } else if (AddressSpace::Global == AddrSpace) {
Alan Bakerfcda9482018-10-02 17:09:59 -04001960 if (!clspv::Option::ConstantArgsInUniformBuffer()) {
1961 AddrSpace = AddressSpace::Constant;
David Neto22f144c2017-06-12 14:26:21 -04001962
alan-bakerb6b09dc2018-11-08 16:59:28 -05001963 // Check to see if we already created this type (for instance, if we
1964 // had a constant <type>* and a global <type>*, the type would be
1965 // created by one of these types, and shared by both).
1966 auto ConstantTy =
1967 PTy->getPointerElementType()->getPointerTo(AddrSpace);
Alan Bakerfcda9482018-10-02 17:09:59 -04001968 if (0 < TypeMap.count(ConstantTy)) {
1969 TypeMap[PTy] = TypeMap[ConstantTy];
1970 UseExistingOpTypePointer = true;
1971 }
David Neto22f144c2017-06-12 14:26:21 -04001972 }
1973 }
1974
David Neto862b7d82018-06-14 18:48:37 -04001975 const bool HasArgUser = true;
David Neto22f144c2017-06-12 14:26:21 -04001976
David Neto862b7d82018-06-14 18:48:37 -04001977 if (HasArgUser && !UseExistingOpTypePointer) {
David Neto22f144c2017-06-12 14:26:21 -04001978 //
1979 // Generate OpTypePointer.
1980 //
1981
1982 // OpTypePointer
1983 // Ops[0] = Storage Class
1984 // Ops[1] = Element Type ID
1985 SPIRVOperandList Ops;
1986
David Neto257c3892018-04-11 13:19:45 -04001987 Ops << MkNum(GetStorageClass(AddrSpace))
1988 << MkId(lookupType(PTy->getElementType()));
David Neto22f144c2017-06-12 14:26:21 -04001989
David Neto87846742018-04-11 17:36:22 -04001990 auto *Inst = new SPIRVInstruction(spv::OpTypePointer, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04001991 SPIRVInstList.push_back(Inst);
1992 }
David Neto22f144c2017-06-12 14:26:21 -04001993 break;
1994 }
1995 case Type::StructTyID: {
David Neto22f144c2017-06-12 14:26:21 -04001996 StructType *STy = cast<StructType>(Ty);
1997
1998 // Handle sampler type.
1999 if (STy->isOpaque()) {
2000 if (STy->getName().equals("opencl.sampler_t")) {
2001 //
2002 // Generate OpTypeSampler
2003 //
2004 // Empty Ops.
2005 SPIRVOperandList Ops;
2006
David Neto87846742018-04-11 17:36:22 -04002007 auto *Inst = new SPIRVInstruction(spv::OpTypeSampler, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002008 SPIRVInstList.push_back(Inst);
2009 break;
alan-bakerf906d2b2019-12-10 11:26:23 -05002010 } else if (STy->getName().startswith("opencl.image1d_ro_t") ||
2011 STy->getName().startswith("opencl.image1d_wo_t") ||
alan-baker7150a1d2020-02-25 08:31:06 -05002012 STy->getName().startswith("opencl.image1d_array_ro_t") ||
2013 STy->getName().startswith("opencl.image1d_array_wo_t") ||
alan-bakerf906d2b2019-12-10 11:26:23 -05002014 STy->getName().startswith("opencl.image2d_ro_t") ||
alan-bakerf67468c2019-11-25 15:51:49 -05002015 STy->getName().startswith("opencl.image2d_wo_t") ||
alan-baker7150a1d2020-02-25 08:31:06 -05002016 STy->getName().startswith("opencl.image2d_array_ro_t") ||
2017 STy->getName().startswith("opencl.image2d_array_wo_t") ||
alan-bakerf67468c2019-11-25 15:51:49 -05002018 STy->getName().startswith("opencl.image3d_ro_t") ||
2019 STy->getName().startswith("opencl.image3d_wo_t")) {
David Neto22f144c2017-06-12 14:26:21 -04002020 //
2021 // Generate OpTypeImage
2022 //
2023 // Ops[0] = Sampled Type ID
2024 // Ops[1] = Dim ID
2025 // Ops[2] = Depth (Literal Number)
2026 // Ops[3] = Arrayed (Literal Number)
2027 // Ops[4] = MS (Literal Number)
2028 // Ops[5] = Sampled (Literal Number)
2029 // Ops[6] = Image Format ID
2030 //
2031 SPIRVOperandList Ops;
2032
alan-bakerf67468c2019-11-25 15:51:49 -05002033 uint32_t ImageTyID = nextID++;
2034 uint32_t SampledTyID = 0;
2035 if (STy->getName().contains(".float")) {
2036 SampledTyID = lookupType(Type::getFloatTy(Context));
2037 } else if (STy->getName().contains(".uint")) {
2038 SampledTyID = lookupType(Type::getInt32Ty(Context));
2039 } else if (STy->getName().contains(".int")) {
2040 // Generate a signed 32-bit integer if necessary.
2041 if (int32ID == 0) {
2042 int32ID = nextID++;
2043 SPIRVOperandList intOps;
2044 intOps << MkNum(32);
2045 intOps << MkNum(1);
2046 auto signed_int =
2047 new SPIRVInstruction(spv::OpTypeInt, int32ID, intOps);
2048 SPIRVInstList.push_back(signed_int);
2049 }
2050 SampledTyID = int32ID;
2051
2052 // Generate a vec4 of the signed int if necessary.
2053 if (v4int32ID == 0) {
2054 v4int32ID = nextID++;
2055 SPIRVOperandList vecOps;
2056 vecOps << MkId(int32ID);
2057 vecOps << MkNum(4);
2058 auto int_vec =
2059 new SPIRVInstruction(spv::OpTypeVector, v4int32ID, vecOps);
2060 SPIRVInstList.push_back(int_vec);
2061 }
2062 } else {
2063 // This was likely an UndefValue.
2064 SampledTyID = lookupType(Type::getFloatTy(Context));
2065 }
David Neto257c3892018-04-11 13:19:45 -04002066 Ops << MkId(SampledTyID);
David Neto22f144c2017-06-12 14:26:21 -04002067
2068 spv::Dim DimID = spv::Dim2D;
alan-bakerf906d2b2019-12-10 11:26:23 -05002069 if (STy->getName().startswith("opencl.image1d_ro_t") ||
alan-baker7150a1d2020-02-25 08:31:06 -05002070 STy->getName().startswith("opencl.image1d_wo_t") ||
2071 STy->getName().startswith("opencl.image1d_array_ro_t") ||
2072 STy->getName().startswith("opencl.image1d_array_wo_t")) {
alan-bakerf906d2b2019-12-10 11:26:23 -05002073 DimID = spv::Dim1D;
2074 } else if (STy->getName().startswith("opencl.image3d_ro_t") ||
2075 STy->getName().startswith("opencl.image3d_wo_t")) {
David Neto22f144c2017-06-12 14:26:21 -04002076 DimID = spv::Dim3D;
2077 }
David Neto257c3892018-04-11 13:19:45 -04002078 Ops << MkNum(DimID);
David Neto22f144c2017-06-12 14:26:21 -04002079
2080 // TODO: Set up Depth.
David Neto257c3892018-04-11 13:19:45 -04002081 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04002082
alan-baker7150a1d2020-02-25 08:31:06 -05002083 uint32_t arrayed = STy->getName().contains("_array_") ? 1 : 0;
2084 Ops << MkNum(arrayed);
David Neto22f144c2017-06-12 14:26:21 -04002085
2086 // TODO: Set up MS.
David Neto257c3892018-04-11 13:19:45 -04002087 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04002088
alan-baker7150a1d2020-02-25 08:31:06 -05002089 // Set up Sampled.
David Neto22f144c2017-06-12 14:26:21 -04002090 //
2091 // From Spec
2092 //
2093 // 0 indicates this is only known at run time, not at compile time
2094 // 1 indicates will be used with sampler
2095 // 2 indicates will be used without a sampler (a storage image)
2096 uint32_t Sampled = 1;
alan-bakerf67468c2019-11-25 15:51:49 -05002097 if (!STy->getName().contains(".sampled")) {
David Neto22f144c2017-06-12 14:26:21 -04002098 Sampled = 2;
2099 }
David Neto257c3892018-04-11 13:19:45 -04002100 Ops << MkNum(Sampled);
David Neto22f144c2017-06-12 14:26:21 -04002101
2102 // TODO: Set up Image Format.
David Neto257c3892018-04-11 13:19:45 -04002103 Ops << MkNum(spv::ImageFormatUnknown);
David Neto22f144c2017-06-12 14:26:21 -04002104
alan-bakerf67468c2019-11-25 15:51:49 -05002105 auto *Inst = new SPIRVInstruction(spv::OpTypeImage, ImageTyID, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002106 SPIRVInstList.push_back(Inst);
2107 break;
2108 }
2109 }
2110
2111 //
2112 // Generate OpTypeStruct
2113 //
2114 // Ops[0] ... Ops[n] = Member IDs
2115 SPIRVOperandList Ops;
2116
2117 for (auto *EleTy : STy->elements()) {
David Neto862b7d82018-06-14 18:48:37 -04002118 Ops << MkId(lookupType(EleTy));
David Neto22f144c2017-06-12 14:26:21 -04002119 }
2120
David Neto22f144c2017-06-12 14:26:21 -04002121 uint32_t STyID = nextID;
2122
alan-bakerb6b09dc2018-11-08 16:59:28 -05002123 auto *Inst = new SPIRVInstruction(spv::OpTypeStruct, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002124 SPIRVInstList.push_back(Inst);
2125
2126 // Generate OpMemberDecorate.
2127 auto DecoInsertPoint =
2128 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
2129 [](SPIRVInstruction *Inst) -> bool {
2130 return Inst->getOpcode() != spv::OpDecorate &&
2131 Inst->getOpcode() != spv::OpMemberDecorate &&
2132 Inst->getOpcode() != spv::OpExtInstImport;
2133 });
2134
Kévin Petitbbbda972020-03-03 19:16:31 +00002135 if (TypesNeedingLayout.idFor(STy)) {
2136 for (unsigned MemberIdx = 0; MemberIdx < STy->getNumElements();
2137 MemberIdx++) {
2138 // Ops[0] = Structure Type ID
2139 // Ops[1] = Member Index(Literal Number)
2140 // Ops[2] = Decoration (Offset)
2141 // Ops[3] = Byte Offset (Literal Number)
2142 Ops.clear();
David Netoc463b372017-08-10 15:32:21 -04002143
Kévin Petitbbbda972020-03-03 19:16:31 +00002144 Ops << MkId(STyID) << MkNum(MemberIdx)
2145 << MkNum(spv::DecorationOffset);
David Neto22f144c2017-06-12 14:26:21 -04002146
Kévin Petitbbbda972020-03-03 19:16:31 +00002147 const auto ByteOffset =
2148 GetExplicitLayoutStructMemberOffset(STy, MemberIdx, DL);
David Neto22f144c2017-06-12 14:26:21 -04002149
Kévin Petitbbbda972020-03-03 19:16:31 +00002150 Ops << MkNum(ByteOffset);
2151
2152 auto *DecoInst = new SPIRVInstruction(spv::OpMemberDecorate, Ops);
2153 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
Alan Bakerfcda9482018-10-02 17:09:59 -04002154 }
David Neto22f144c2017-06-12 14:26:21 -04002155 }
2156
2157 // Generate OpDecorate.
David Neto862b7d82018-06-14 18:48:37 -04002158 if (StructTypesNeedingBlock.idFor(STy)) {
2159 Ops.clear();
2160 // Use Block decorations with StorageBuffer storage class.
2161 Ops << MkId(STyID) << MkNum(spv::DecorationBlock);
David Neto22f144c2017-06-12 14:26:21 -04002162
David Neto862b7d82018-06-14 18:48:37 -04002163 auto *DecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
2164 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
David Neto22f144c2017-06-12 14:26:21 -04002165 }
2166 break;
2167 }
2168 case Type::IntegerTyID: {
alan-baker0e64a592019-11-18 13:36:25 -05002169 uint32_t BitWidth = static_cast<uint32_t>(Ty->getPrimitiveSizeInBits());
David Neto22f144c2017-06-12 14:26:21 -04002170
2171 if (BitWidth == 1) {
David Netoef5ba2b2019-12-20 08:35:54 -05002172 auto *Inst = new SPIRVInstruction(spv::OpTypeBool, nextID++);
David Neto22f144c2017-06-12 14:26:21 -04002173 SPIRVInstList.push_back(Inst);
2174 } else {
alan-bakerb39c8262019-03-08 14:03:37 -05002175 if (!clspv::Option::Int8Support()) {
2176 // i8 is added to TypeMap as i32.
2177 // No matter what LLVM type is requested first, always alias the
2178 // second one's SPIR-V type to be the same as the one we generated
2179 // first.
2180 unsigned aliasToWidth = 0;
2181 if (BitWidth == 8) {
2182 aliasToWidth = 32;
2183 BitWidth = 32;
2184 } else if (BitWidth == 32) {
2185 aliasToWidth = 8;
2186 }
2187 if (aliasToWidth) {
2188 Type *otherType = Type::getIntNTy(Ty->getContext(), aliasToWidth);
2189 auto where = TypeMap.find(otherType);
2190 if (where == TypeMap.end()) {
2191 // Go ahead and make it, but also map the other type to it.
2192 TypeMap[otherType] = nextID;
2193 } else {
2194 // Alias this SPIR-V type the existing type.
2195 TypeMap[Ty] = where->second;
2196 break;
2197 }
David Neto391aeb12017-08-26 15:51:58 -04002198 }
David Neto22f144c2017-06-12 14:26:21 -04002199 }
2200
David Neto257c3892018-04-11 13:19:45 -04002201 SPIRVOperandList Ops;
2202 Ops << MkNum(BitWidth) << MkNum(0 /* not signed */);
David Neto22f144c2017-06-12 14:26:21 -04002203
2204 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002205 new SPIRVInstruction(spv::OpTypeInt, nextID++, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002206 }
2207 break;
2208 }
2209 case Type::HalfTyID:
2210 case Type::FloatTyID:
2211 case Type::DoubleTyID: {
alan-baker0e64a592019-11-18 13:36:25 -05002212 uint32_t BitWidth = static_cast<uint32_t>(Ty->getPrimitiveSizeInBits());
James Price11010dc2019-12-19 13:53:09 -05002213 auto WidthOp = MkNum(BitWidth);
David Neto22f144c2017-06-12 14:26:21 -04002214
2215 SPIRVInstList.push_back(
David Netoef5ba2b2019-12-20 08:35:54 -05002216 new SPIRVInstruction(spv::OpTypeFloat, nextID++, std::move(WidthOp)));
David Neto22f144c2017-06-12 14:26:21 -04002217 break;
2218 }
2219 case Type::ArrayTyID: {
David Neto22f144c2017-06-12 14:26:21 -04002220 ArrayType *ArrTy = cast<ArrayType>(Ty);
David Neto862b7d82018-06-14 18:48:37 -04002221 const uint64_t Length = ArrTy->getArrayNumElements();
2222 if (Length == 0) {
2223 // By convention, map it to a RuntimeArray.
David Neto22f144c2017-06-12 14:26:21 -04002224
David Neto862b7d82018-06-14 18:48:37 -04002225 // Only generate the type once.
2226 // TODO(dneto): Can it ever be generated more than once?
2227 // Doesn't LLVM type uniqueness guarantee we'll only see this
2228 // once?
2229 Type *EleTy = ArrTy->getArrayElementType();
2230 if (OpRuntimeTyMap.count(EleTy) == 0) {
2231 uint32_t OpTypeRuntimeArrayID = nextID;
2232 OpRuntimeTyMap[Ty] = nextID;
David Neto22f144c2017-06-12 14:26:21 -04002233
David Neto862b7d82018-06-14 18:48:37 -04002234 //
2235 // Generate OpTypeRuntimeArray.
2236 //
David Neto22f144c2017-06-12 14:26:21 -04002237
David Neto862b7d82018-06-14 18:48:37 -04002238 // OpTypeRuntimeArray
2239 // Ops[0] = Element Type ID
2240 SPIRVOperandList Ops;
2241 Ops << MkId(lookupType(EleTy));
David Neto22f144c2017-06-12 14:26:21 -04002242
David Neto862b7d82018-06-14 18:48:37 -04002243 SPIRVInstList.push_back(
2244 new SPIRVInstruction(spv::OpTypeRuntimeArray, nextID++, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002245
David Neto862b7d82018-06-14 18:48:37 -04002246 if (Hack_generate_runtime_array_stride_early) {
2247 // Generate OpDecorate.
2248 auto DecoInsertPoint = std::find_if(
2249 SPIRVInstList.begin(), SPIRVInstList.end(),
2250 [](SPIRVInstruction *Inst) -> bool {
2251 return Inst->getOpcode() != spv::OpDecorate &&
2252 Inst->getOpcode() != spv::OpMemberDecorate &&
2253 Inst->getOpcode() != spv::OpExtInstImport;
2254 });
David Neto22f144c2017-06-12 14:26:21 -04002255
David Neto862b7d82018-06-14 18:48:37 -04002256 // Ops[0] = Target ID
2257 // Ops[1] = Decoration (ArrayStride)
2258 // Ops[2] = Stride Number(Literal Number)
2259 Ops.clear();
David Neto85082642018-03-24 06:55:20 -07002260
David Neto862b7d82018-06-14 18:48:37 -04002261 Ops << MkId(OpTypeRuntimeArrayID)
2262 << MkNum(spv::DecorationArrayStride)
Alan Bakerfcda9482018-10-02 17:09:59 -04002263 << MkNum(static_cast<uint32_t>(GetTypeAllocSize(EleTy, DL)));
David Neto22f144c2017-06-12 14:26:21 -04002264
David Neto862b7d82018-06-14 18:48:37 -04002265 auto *DecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
2266 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
2267 }
2268 }
David Neto22f144c2017-06-12 14:26:21 -04002269
David Neto862b7d82018-06-14 18:48:37 -04002270 } else {
David Neto22f144c2017-06-12 14:26:21 -04002271
David Neto862b7d82018-06-14 18:48:37 -04002272 //
2273 // Generate OpConstant and OpTypeArray.
2274 //
2275
2276 //
2277 // Generate OpConstant for array length.
2278 //
2279 // Ops[0] = Result Type ID
2280 // Ops[1] .. Ops[n] = Values LiteralNumber
2281 SPIRVOperandList Ops;
2282
2283 Type *LengthTy = Type::getInt32Ty(Context);
2284 uint32_t ResTyID = lookupType(LengthTy);
2285 Ops << MkId(ResTyID);
2286
2287 assert(Length < UINT32_MAX);
2288 Ops << MkNum(static_cast<uint32_t>(Length));
2289
2290 // Add constant for length to constant list.
2291 Constant *CstLength = ConstantInt::get(LengthTy, Length);
2292 AllocatedVMap[CstLength] = nextID;
2293 VMap[CstLength] = nextID;
2294 uint32_t LengthID = nextID;
2295
2296 auto *CstInst = new SPIRVInstruction(spv::OpConstant, nextID++, Ops);
2297 SPIRVInstList.push_back(CstInst);
2298
2299 // Remember to generate ArrayStride later
2300 getTypesNeedingArrayStride().insert(Ty);
2301
2302 //
2303 // Generate OpTypeArray.
2304 //
2305 // Ops[0] = Element Type ID
2306 // Ops[1] = Array Length Constant ID
2307 Ops.clear();
2308
2309 uint32_t EleTyID = lookupType(ArrTy->getElementType());
2310 Ops << MkId(EleTyID) << MkId(LengthID);
2311
2312 // Update TypeMap with nextID.
2313 TypeMap[Ty] = nextID;
2314
2315 auto *ArrayInst = new SPIRVInstruction(spv::OpTypeArray, nextID++, Ops);
2316 SPIRVInstList.push_back(ArrayInst);
2317 }
David Neto22f144c2017-06-12 14:26:21 -04002318 break;
2319 }
2320 case Type::VectorTyID: {
alan-bakerb39c8262019-03-08 14:03:37 -05002321 // <4 x i8> is changed to i32 if i8 is not generally supported.
2322 if (!clspv::Option::Int8Support() &&
2323 Ty->getVectorElementType() == Type::getInt8Ty(Context)) {
David Neto22f144c2017-06-12 14:26:21 -04002324 if (Ty->getVectorNumElements() == 4) {
2325 TypeMap[Ty] = lookupType(Ty->getVectorElementType());
2326 break;
2327 } else {
2328 Ty->print(errs());
2329 llvm_unreachable("Support above i8 vector type");
2330 }
2331 }
2332
2333 // Ops[0] = Component Type ID
2334 // Ops[1] = Component Count (Literal Number)
David Neto257c3892018-04-11 13:19:45 -04002335 SPIRVOperandList Ops;
2336 Ops << MkId(lookupType(Ty->getVectorElementType()))
2337 << MkNum(Ty->getVectorNumElements());
David Neto22f144c2017-06-12 14:26:21 -04002338
alan-bakerb6b09dc2018-11-08 16:59:28 -05002339 SPIRVInstruction *inst =
2340 new SPIRVInstruction(spv::OpTypeVector, nextID++, Ops);
David Netoc6f3ab22018-04-06 18:02:31 -04002341 SPIRVInstList.push_back(inst);
David Neto22f144c2017-06-12 14:26:21 -04002342 break;
2343 }
2344 case Type::VoidTyID: {
David Netoef5ba2b2019-12-20 08:35:54 -05002345 auto *Inst = new SPIRVInstruction(spv::OpTypeVoid, nextID++);
David Neto22f144c2017-06-12 14:26:21 -04002346 SPIRVInstList.push_back(Inst);
2347 break;
2348 }
2349 case Type::FunctionTyID: {
2350 // Generate SPIRV instruction for function type.
2351 FunctionType *FTy = cast<FunctionType>(Ty);
2352
2353 // Ops[0] = Return Type ID
2354 // Ops[1] ... Ops[n] = Parameter Type IDs
2355 SPIRVOperandList Ops;
2356
2357 // Find SPIRV instruction for return type
David Netoc6f3ab22018-04-06 18:02:31 -04002358 Ops << MkId(lookupType(FTy->getReturnType()));
David Neto22f144c2017-06-12 14:26:21 -04002359
2360 // Find SPIRV instructions for parameter types
2361 for (unsigned k = 0; k < FTy->getNumParams(); k++) {
2362 // Find SPIRV instruction for parameter type.
2363 auto ParamTy = FTy->getParamType(k);
2364 if (ParamTy->isPointerTy()) {
2365 auto PointeeTy = ParamTy->getPointerElementType();
2366 if (PointeeTy->isStructTy() &&
2367 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
2368 ParamTy = PointeeTy;
2369 }
2370 }
2371
David Netoc6f3ab22018-04-06 18:02:31 -04002372 Ops << MkId(lookupType(ParamTy));
David Neto22f144c2017-06-12 14:26:21 -04002373 }
2374
David Neto87846742018-04-11 17:36:22 -04002375 auto *Inst = new SPIRVInstruction(spv::OpTypeFunction, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002376 SPIRVInstList.push_back(Inst);
2377 break;
2378 }
2379 }
2380 }
2381
2382 // Generate OpTypeSampledImage.
alan-bakerabd82722019-12-03 17:14:51 -05002383 for (auto &ImgTy : getImageTypeList()) {
David Neto22f144c2017-06-12 14:26:21 -04002384 //
2385 // Generate OpTypeSampledImage.
2386 //
2387 // Ops[0] = Image Type ID
2388 //
2389 SPIRVOperandList Ops;
2390
David Netoc6f3ab22018-04-06 18:02:31 -04002391 Ops << MkId(TypeMap[ImgTy]);
David Neto22f144c2017-06-12 14:26:21 -04002392
alan-bakerabd82722019-12-03 17:14:51 -05002393 // Update the image type map.
2394 getImageTypeMap()[ImgTy] = nextID;
David Neto22f144c2017-06-12 14:26:21 -04002395
David Neto87846742018-04-11 17:36:22 -04002396 auto *Inst = new SPIRVInstruction(spv::OpTypeSampledImage, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002397 SPIRVInstList.push_back(Inst);
2398 }
David Netoc6f3ab22018-04-06 18:02:31 -04002399
2400 // Generate types for pointer-to-local arguments.
Alan Baker202c8c72018-08-13 13:47:44 -04002401 for (auto spec_id = clspv::FirstLocalSpecId(); spec_id < max_local_spec_id_;
2402 ++spec_id) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05002403 LocalArgInfo &arg_info = LocalSpecIdInfoMap[spec_id];
David Netoc6f3ab22018-04-06 18:02:31 -04002404
2405 // Generate the spec constant.
2406 SPIRVOperandList Ops;
2407 Ops << MkId(lookupType(Type::getInt32Ty(Context))) << MkNum(1);
David Neto87846742018-04-11 17:36:22 -04002408 SPIRVInstList.push_back(
2409 new SPIRVInstruction(spv::OpSpecConstant, arg_info.array_size_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002410
2411 // Generate the array type.
2412 Ops.clear();
2413 // The element type must have been created.
2414 uint32_t elem_ty_id = lookupType(arg_info.elem_type);
2415 assert(elem_ty_id);
2416 Ops << MkId(elem_ty_id) << MkId(arg_info.array_size_id);
2417
2418 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002419 new SPIRVInstruction(spv::OpTypeArray, arg_info.array_type_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002420
2421 Ops.clear();
2422 Ops << MkNum(spv::StorageClassWorkgroup) << MkId(arg_info.array_type_id);
David Neto87846742018-04-11 17:36:22 -04002423 SPIRVInstList.push_back(new SPIRVInstruction(
2424 spv::OpTypePointer, arg_info.ptr_array_type_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002425 }
David Neto22f144c2017-06-12 14:26:21 -04002426}
2427
2428void SPIRVProducerPass::GenerateSPIRVConstants() {
2429 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
2430 ValueMapType &VMap = getValueMap();
2431 ValueMapType &AllocatedVMap = getAllocatedValueMap();
2432 ValueList &CstList = getConstantList();
David Neto482550a2018-03-24 05:21:07 -07002433 const bool hack_undef = clspv::Option::HackUndef();
David Neto22f144c2017-06-12 14:26:21 -04002434
2435 for (uint32_t i = 0; i < CstList.size(); i++) {
David Netofb9a7972017-08-25 17:08:24 -04002436 // UniqueVector ids are 1-based.
alan-bakerb6b09dc2018-11-08 16:59:28 -05002437 Constant *Cst = cast<Constant>(CstList[i + 1]);
David Neto22f144c2017-06-12 14:26:21 -04002438
2439 // OpTypeArray's constant was already generated.
David Netofb9a7972017-08-25 17:08:24 -04002440 if (AllocatedVMap.find_as(Cst) != AllocatedVMap.end()) {
David Neto22f144c2017-06-12 14:26:21 -04002441 continue;
2442 }
2443
David Netofb9a7972017-08-25 17:08:24 -04002444 // Set ValueMap with nextID for reference later.
David Neto22f144c2017-06-12 14:26:21 -04002445 VMap[Cst] = nextID;
2446
2447 //
2448 // Generate OpConstant.
2449 //
2450
2451 // Ops[0] = Result Type ID
2452 // Ops[1] .. Ops[n] = Values LiteralNumber
2453 SPIRVOperandList Ops;
2454
David Neto257c3892018-04-11 13:19:45 -04002455 Ops << MkId(lookupType(Cst->getType()));
David Neto22f144c2017-06-12 14:26:21 -04002456
2457 std::vector<uint32_t> LiteralNum;
David Neto22f144c2017-06-12 14:26:21 -04002458 spv::Op Opcode = spv::OpNop;
2459
2460 if (isa<UndefValue>(Cst)) {
2461 // Ops[0] = Result Type ID
David Netoc66b3352017-10-20 14:28:46 -04002462 Opcode = spv::OpUndef;
Alan Baker9bf93fb2018-08-28 16:59:26 -04002463 if (hack_undef && IsTypeNullable(Cst->getType())) {
2464 Opcode = spv::OpConstantNull;
David Netoc66b3352017-10-20 14:28:46 -04002465 }
David Neto22f144c2017-06-12 14:26:21 -04002466 } else if (const ConstantInt *CI = dyn_cast<ConstantInt>(Cst)) {
2467 unsigned BitWidth = CI->getBitWidth();
2468 if (BitWidth == 1) {
2469 // If the bitwidth of constant is 1, generate OpConstantTrue or
2470 // OpConstantFalse.
2471 if (CI->getZExtValue()) {
2472 // Ops[0] = Result Type ID
2473 Opcode = spv::OpConstantTrue;
2474 } else {
2475 // Ops[0] = Result Type ID
2476 Opcode = spv::OpConstantFalse;
2477 }
David Neto22f144c2017-06-12 14:26:21 -04002478 } else {
2479 auto V = CI->getZExtValue();
2480 LiteralNum.push_back(V & 0xFFFFFFFF);
2481
2482 if (BitWidth > 32) {
2483 LiteralNum.push_back(V >> 32);
2484 }
2485
2486 Opcode = spv::OpConstant;
David Neto22f144c2017-06-12 14:26:21 -04002487
David Neto257c3892018-04-11 13:19:45 -04002488 Ops << MkInteger(LiteralNum);
David Neto22f144c2017-06-12 14:26:21 -04002489 }
2490 } else if (const ConstantFP *CFP = dyn_cast<ConstantFP>(Cst)) {
2491 uint64_t FPVal = CFP->getValueAPF().bitcastToAPInt().getZExtValue();
2492 Type *CFPTy = CFP->getType();
2493 if (CFPTy->isFloatTy()) {
2494 LiteralNum.push_back(FPVal & 0xFFFFFFFF);
Kévin Petit02ee34e2019-04-04 19:03:22 +01002495 } else if (CFPTy->isDoubleTy()) {
2496 LiteralNum.push_back(FPVal & 0xFFFFFFFF);
2497 LiteralNum.push_back(FPVal >> 32);
alan-baker089bf932020-01-07 16:35:45 -05002498 } else if (CFPTy->isHalfTy()) {
2499 LiteralNum.push_back(FPVal & 0xFFFF);
David Neto22f144c2017-06-12 14:26:21 -04002500 } else {
2501 CFPTy->print(errs());
2502 llvm_unreachable("Implement this ConstantFP Type");
2503 }
2504
2505 Opcode = spv::OpConstant;
David Neto22f144c2017-06-12 14:26:21 -04002506
David Neto257c3892018-04-11 13:19:45 -04002507 Ops << MkFloat(LiteralNum);
David Neto22f144c2017-06-12 14:26:21 -04002508 } else if (isa<ConstantDataSequential>(Cst) &&
2509 cast<ConstantDataSequential>(Cst)->isString()) {
2510 Cst->print(errs());
2511 llvm_unreachable("Implement this Constant");
2512
2513 } else if (const ConstantDataSequential *CDS =
2514 dyn_cast<ConstantDataSequential>(Cst)) {
David Neto49351ac2017-08-26 17:32:20 -04002515 // Let's convert <4 x i8> constant to int constant specially.
2516 // This case occurs when all the values are specified as constant
2517 // ints.
2518 Type *CstTy = Cst->getType();
2519 if (is4xi8vec(CstTy)) {
2520 LLVMContext &Context = CstTy->getContext();
2521
2522 //
2523 // Generate OpConstant with OpTypeInt 32 0.
2524 //
Neil Henning39672102017-09-29 14:33:13 +01002525 uint32_t IntValue = 0;
2526 for (unsigned k = 0; k < 4; k++) {
2527 const uint64_t Val = CDS->getElementAsInteger(k);
David Neto49351ac2017-08-26 17:32:20 -04002528 IntValue = (IntValue << 8) | (Val & 0xffu);
2529 }
2530
2531 Type *i32 = Type::getInt32Ty(Context);
2532 Constant *CstInt = ConstantInt::get(i32, IntValue);
2533 // If this constant is already registered on VMap, use it.
2534 if (VMap.count(CstInt)) {
2535 uint32_t CstID = VMap[CstInt];
2536 VMap[Cst] = CstID;
2537 continue;
2538 }
2539
David Neto257c3892018-04-11 13:19:45 -04002540 Ops << MkNum(IntValue);
David Neto49351ac2017-08-26 17:32:20 -04002541
David Neto87846742018-04-11 17:36:22 -04002542 auto *CstInst = new SPIRVInstruction(spv::OpConstant, nextID++, Ops);
David Neto49351ac2017-08-26 17:32:20 -04002543 SPIRVInstList.push_back(CstInst);
2544
2545 continue;
2546 }
2547
2548 // A normal constant-data-sequential case.
David Neto22f144c2017-06-12 14:26:21 -04002549 for (unsigned k = 0; k < CDS->getNumElements(); k++) {
2550 Constant *EleCst = CDS->getElementAsConstant(k);
2551 uint32_t EleCstID = VMap[EleCst];
David Neto257c3892018-04-11 13:19:45 -04002552 Ops << MkId(EleCstID);
David Neto22f144c2017-06-12 14:26:21 -04002553 }
2554
2555 Opcode = spv::OpConstantComposite;
David Neto22f144c2017-06-12 14:26:21 -04002556 } else if (const ConstantAggregate *CA = dyn_cast<ConstantAggregate>(Cst)) {
2557 // Let's convert <4 x i8> constant to int constant specially.
David Neto49351ac2017-08-26 17:32:20 -04002558 // This case occurs when at least one of the values is an undef.
David Neto22f144c2017-06-12 14:26:21 -04002559 Type *CstTy = Cst->getType();
2560 if (is4xi8vec(CstTy)) {
2561 LLVMContext &Context = CstTy->getContext();
2562
2563 //
2564 // Generate OpConstant with OpTypeInt 32 0.
2565 //
Neil Henning39672102017-09-29 14:33:13 +01002566 uint32_t IntValue = 0;
David Neto22f144c2017-06-12 14:26:21 -04002567 for (User::const_op_iterator I = Cst->op_begin(), E = Cst->op_end();
2568 I != E; ++I) {
2569 uint64_t Val = 0;
alan-bakerb6b09dc2018-11-08 16:59:28 -05002570 const Value *CV = *I;
Neil Henning39672102017-09-29 14:33:13 +01002571 if (auto *CI2 = dyn_cast<ConstantInt>(CV)) {
2572 Val = CI2->getZExtValue();
David Neto22f144c2017-06-12 14:26:21 -04002573 }
David Neto49351ac2017-08-26 17:32:20 -04002574 IntValue = (IntValue << 8) | (Val & 0xffu);
David Neto22f144c2017-06-12 14:26:21 -04002575 }
2576
David Neto49351ac2017-08-26 17:32:20 -04002577 Type *i32 = Type::getInt32Ty(Context);
2578 Constant *CstInt = ConstantInt::get(i32, IntValue);
David Neto22f144c2017-06-12 14:26:21 -04002579 // If this constant is already registered on VMap, use it.
2580 if (VMap.count(CstInt)) {
2581 uint32_t CstID = VMap[CstInt];
2582 VMap[Cst] = CstID;
David Neto19a1bad2017-08-25 15:01:41 -04002583 continue;
David Neto22f144c2017-06-12 14:26:21 -04002584 }
2585
David Neto257c3892018-04-11 13:19:45 -04002586 Ops << MkNum(IntValue);
David Neto22f144c2017-06-12 14:26:21 -04002587
David Neto87846742018-04-11 17:36:22 -04002588 auto *CstInst = new SPIRVInstruction(spv::OpConstant, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002589 SPIRVInstList.push_back(CstInst);
2590
David Neto19a1bad2017-08-25 15:01:41 -04002591 continue;
David Neto22f144c2017-06-12 14:26:21 -04002592 }
2593
2594 // We use a constant composite in SPIR-V for our constant aggregate in
2595 // LLVM.
2596 Opcode = spv::OpConstantComposite;
David Neto22f144c2017-06-12 14:26:21 -04002597
2598 for (unsigned k = 0; k < CA->getNumOperands(); k++) {
2599 // Look up the ID of the element of this aggregate (which we will
2600 // previously have created a constant for).
2601 uint32_t ElementConstantID = VMap[CA->getAggregateElement(k)];
2602
2603 // And add an operand to the composite we are constructing
David Neto257c3892018-04-11 13:19:45 -04002604 Ops << MkId(ElementConstantID);
David Neto22f144c2017-06-12 14:26:21 -04002605 }
2606 } else if (Cst->isNullValue()) {
2607 Opcode = spv::OpConstantNull;
David Neto22f144c2017-06-12 14:26:21 -04002608 } else {
2609 Cst->print(errs());
2610 llvm_unreachable("Unsupported Constant???");
2611 }
2612
alan-baker5b86ed72019-02-15 08:26:50 -05002613 if (Opcode == spv::OpConstantNull && Cst->getType()->isPointerTy()) {
2614 // Null pointer requires variable pointers.
2615 setVariablePointersCapabilities(Cst->getType()->getPointerAddressSpace());
2616 }
2617
David Neto87846742018-04-11 17:36:22 -04002618 auto *CstInst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002619 SPIRVInstList.push_back(CstInst);
2620 }
2621}
2622
2623void SPIRVProducerPass::GenerateSamplers(Module &M) {
2624 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
David Neto22f144c2017-06-12 14:26:21 -04002625
alan-bakerb6b09dc2018-11-08 16:59:28 -05002626 auto &sampler_map = getSamplerMap();
alan-baker09cb9802019-12-10 13:16:27 -05002627 SamplerLiteralToIDMap.clear();
David Neto862b7d82018-06-14 18:48:37 -04002628 DenseMap<unsigned, unsigned> SamplerLiteralToDescriptorSetMap;
2629 DenseMap<unsigned, unsigned> SamplerLiteralToBindingMap;
David Neto22f144c2017-06-12 14:26:21 -04002630
David Neto862b7d82018-06-14 18:48:37 -04002631 // We might have samplers in the sampler map that are not used
2632 // in the translation unit. We need to allocate variables
2633 // for them and bindings too.
2634 DenseSet<unsigned> used_bindings;
David Neto22f144c2017-06-12 14:26:21 -04002635
Kévin Petitdf71de32019-04-09 14:09:50 +01002636 auto *var_fn = M.getFunction(clspv::LiteralSamplerFunction());
alan-baker09cb9802019-12-10 13:16:27 -05002637 // Return if there are no literal samplers.
alan-bakerb6b09dc2018-11-08 16:59:28 -05002638 if (!var_fn)
2639 return;
alan-baker09cb9802019-12-10 13:16:27 -05002640
David Neto862b7d82018-06-14 18:48:37 -04002641 for (auto user : var_fn->users()) {
2642 // Populate SamplerLiteralToDescriptorSetMap and
2643 // SamplerLiteralToBindingMap.
2644 //
2645 // Look for calls like
2646 // call %opencl.sampler_t addrspace(2)*
2647 // @clspv.sampler.var.literal(
2648 // i32 descriptor,
2649 // i32 binding,
alan-baker09cb9802019-12-10 13:16:27 -05002650 // i32 (index-into-sampler-map|sampler_mask))
alan-bakerb6b09dc2018-11-08 16:59:28 -05002651 if (auto *call = dyn_cast<CallInst>(user)) {
alan-baker09cb9802019-12-10 13:16:27 -05002652 const auto third_param = static_cast<unsigned>(
alan-bakerb6b09dc2018-11-08 16:59:28 -05002653 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
alan-baker09cb9802019-12-10 13:16:27 -05002654 auto sampler_value = third_param;
2655 if (clspv::Option::UseSamplerMap()) {
2656 if (third_param >= sampler_map.size()) {
2657 errs() << "Out of bounds index to sampler map: " << third_param;
2658 llvm_unreachable("bad sampler init: out of bounds");
2659 }
2660 sampler_value = sampler_map[third_param].first;
David Neto862b7d82018-06-14 18:48:37 -04002661 }
2662
David Neto862b7d82018-06-14 18:48:37 -04002663 const auto descriptor_set = static_cast<unsigned>(
2664 dyn_cast<ConstantInt>(call->getArgOperand(0))->getZExtValue());
2665 const auto binding = static_cast<unsigned>(
2666 dyn_cast<ConstantInt>(call->getArgOperand(1))->getZExtValue());
2667
2668 SamplerLiteralToDescriptorSetMap[sampler_value] = descriptor_set;
2669 SamplerLiteralToBindingMap[sampler_value] = binding;
2670 used_bindings.insert(binding);
2671 }
2672 }
2673
alan-baker09cb9802019-12-10 13:16:27 -05002674 DenseSet<size_t> seen;
2675 for (auto user : var_fn->users()) {
2676 if (!isa<CallInst>(user))
2677 continue;
2678
2679 auto call = cast<CallInst>(user);
2680 const unsigned third_param = static_cast<unsigned>(
2681 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
2682
2683 // Already allocated a variable for this value.
2684 if (!seen.insert(third_param).second)
2685 continue;
2686
2687 auto sampler_value = third_param;
2688 if (clspv::Option::UseSamplerMap()) {
2689 sampler_value = sampler_map[third_param].first;
2690 }
2691
David Neto22f144c2017-06-12 14:26:21 -04002692 // Generate OpVariable.
2693 //
2694 // GIDOps[0] : Result Type ID
2695 // GIDOps[1] : Storage Class
2696 SPIRVOperandList Ops;
2697
David Neto257c3892018-04-11 13:19:45 -04002698 Ops << MkId(lookupType(SamplerTy))
2699 << MkNum(spv::StorageClassUniformConstant);
David Neto22f144c2017-06-12 14:26:21 -04002700
David Neto862b7d82018-06-14 18:48:37 -04002701 auto sampler_var_id = nextID++;
2702 auto *Inst = new SPIRVInstruction(spv::OpVariable, sampler_var_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002703 SPIRVInstList.push_back(Inst);
2704
alan-baker09cb9802019-12-10 13:16:27 -05002705 SamplerLiteralToIDMap[sampler_value] = sampler_var_id;
David Neto22f144c2017-06-12 14:26:21 -04002706
2707 // Find Insert Point for OpDecorate.
2708 auto DecoInsertPoint =
2709 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
2710 [](SPIRVInstruction *Inst) -> bool {
2711 return Inst->getOpcode() != spv::OpDecorate &&
2712 Inst->getOpcode() != spv::OpMemberDecorate &&
2713 Inst->getOpcode() != spv::OpExtInstImport;
2714 });
2715
2716 // Ops[0] = Target ID
2717 // Ops[1] = Decoration (DescriptorSet)
2718 // Ops[2] = LiteralNumber according to Decoration
2719 Ops.clear();
2720
David Neto862b7d82018-06-14 18:48:37 -04002721 unsigned descriptor_set;
2722 unsigned binding;
alan-baker09cb9802019-12-10 13:16:27 -05002723 if (SamplerLiteralToBindingMap.find(sampler_value) ==
alan-bakerb6b09dc2018-11-08 16:59:28 -05002724 SamplerLiteralToBindingMap.end()) {
David Neto862b7d82018-06-14 18:48:37 -04002725 // This sampler is not actually used. Find the next one.
2726 for (binding = 0; used_bindings.count(binding); binding++)
2727 ;
2728 descriptor_set = 0; // Literal samplers always use descriptor set 0.
2729 used_bindings.insert(binding);
2730 } else {
alan-baker09cb9802019-12-10 13:16:27 -05002731 descriptor_set = SamplerLiteralToDescriptorSetMap[sampler_value];
2732 binding = SamplerLiteralToBindingMap[sampler_value];
alan-bakercff80152019-06-15 00:38:00 -04002733
alan-baker09cb9802019-12-10 13:16:27 -05002734 version0::DescriptorMapEntry::SamplerData sampler_data = {sampler_value};
alan-bakercff80152019-06-15 00:38:00 -04002735 descriptorMapEntries->emplace_back(std::move(sampler_data),
2736 descriptor_set, binding);
David Neto862b7d82018-06-14 18:48:37 -04002737 }
2738
2739 Ops << MkId(sampler_var_id) << MkNum(spv::DecorationDescriptorSet)
2740 << MkNum(descriptor_set);
David Neto22f144c2017-06-12 14:26:21 -04002741
David Neto87846742018-04-11 17:36:22 -04002742 auto *DescDecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002743 SPIRVInstList.insert(DecoInsertPoint, DescDecoInst);
2744
2745 // Ops[0] = Target ID
2746 // Ops[1] = Decoration (Binding)
2747 // Ops[2] = LiteralNumber according to Decoration
2748 Ops.clear();
David Neto862b7d82018-06-14 18:48:37 -04002749 Ops << MkId(sampler_var_id) << MkNum(spv::DecorationBinding)
2750 << MkNum(binding);
David Neto22f144c2017-06-12 14:26:21 -04002751
David Neto87846742018-04-11 17:36:22 -04002752 auto *BindDecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002753 SPIRVInstList.insert(DecoInsertPoint, BindDecoInst);
2754 }
David Neto862b7d82018-06-14 18:48:37 -04002755}
David Neto22f144c2017-06-12 14:26:21 -04002756
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01002757void SPIRVProducerPass::GenerateResourceVars(Module &) {
David Neto862b7d82018-06-14 18:48:37 -04002758 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
2759 ValueMapType &VMap = getValueMap();
David Neto22f144c2017-06-12 14:26:21 -04002760
David Neto862b7d82018-06-14 18:48:37 -04002761 // Generate variables. Make one for each of resource var info object.
2762 for (auto *info : ModuleOrderedResourceVars) {
2763 Type *type = info->var_fn->getReturnType();
2764 // Remap the address space for opaque types.
2765 switch (info->arg_kind) {
2766 case clspv::ArgKind::Sampler:
2767 case clspv::ArgKind::ReadOnlyImage:
2768 case clspv::ArgKind::WriteOnlyImage:
2769 type = PointerType::get(type->getPointerElementType(),
2770 clspv::AddressSpace::UniformConstant);
2771 break;
2772 default:
2773 break;
2774 }
David Neto22f144c2017-06-12 14:26:21 -04002775
David Neto862b7d82018-06-14 18:48:37 -04002776 info->var_id = nextID++;
David Neto22f144c2017-06-12 14:26:21 -04002777
David Neto862b7d82018-06-14 18:48:37 -04002778 const auto type_id = lookupType(type);
2779 const auto sc = GetStorageClassForArgKind(info->arg_kind);
2780 SPIRVOperandList Ops;
2781 Ops << MkId(type_id) << MkNum(sc);
David Neto22f144c2017-06-12 14:26:21 -04002782
David Neto862b7d82018-06-14 18:48:37 -04002783 auto *Inst = new SPIRVInstruction(spv::OpVariable, info->var_id, Ops);
2784 SPIRVInstList.push_back(Inst);
2785
2786 // Map calls to the variable-builtin-function.
2787 for (auto &U : info->var_fn->uses()) {
2788 if (auto *call = dyn_cast<CallInst>(U.getUser())) {
2789 const auto set = unsigned(
2790 dyn_cast<ConstantInt>(call->getOperand(0))->getZExtValue());
2791 const auto binding = unsigned(
2792 dyn_cast<ConstantInt>(call->getOperand(1))->getZExtValue());
2793 if (set == info->descriptor_set && binding == info->binding) {
2794 switch (info->arg_kind) {
2795 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04002796 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04002797 case clspv::ArgKind::Pod:
alan-baker9b0ec3c2020-04-06 14:45:34 -04002798 case clspv::ArgKind::PodUBO:
2799 case clspv::ArgKind::PodPushConstant:
David Neto862b7d82018-06-14 18:48:37 -04002800 // The call maps to the variable directly.
2801 VMap[call] = info->var_id;
2802 break;
2803 case clspv::ArgKind::Sampler:
2804 case clspv::ArgKind::ReadOnlyImage:
2805 case clspv::ArgKind::WriteOnlyImage:
2806 // The call maps to a load we generate later.
2807 ResourceVarDeferredLoadCalls[call] = info->var_id;
2808 break;
2809 default:
2810 llvm_unreachable("Unhandled arg kind");
2811 }
2812 }
David Neto22f144c2017-06-12 14:26:21 -04002813 }
David Neto862b7d82018-06-14 18:48:37 -04002814 }
2815 }
David Neto22f144c2017-06-12 14:26:21 -04002816
David Neto862b7d82018-06-14 18:48:37 -04002817 // Generate associated decorations.
David Neto22f144c2017-06-12 14:26:21 -04002818
David Neto862b7d82018-06-14 18:48:37 -04002819 // Find Insert Point for OpDecorate.
2820 auto DecoInsertPoint =
2821 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
2822 [](SPIRVInstruction *Inst) -> bool {
2823 return Inst->getOpcode() != spv::OpDecorate &&
2824 Inst->getOpcode() != spv::OpMemberDecorate &&
2825 Inst->getOpcode() != spv::OpExtInstImport;
2826 });
2827
2828 SPIRVOperandList Ops;
2829 for (auto *info : ModuleOrderedResourceVars) {
alan-baker9b0ec3c2020-04-06 14:45:34 -04002830 // Push constants don't need descriptor set or binding decorations.
2831 if (info->arg_kind == clspv::ArgKind::PodPushConstant)
2832 continue;
2833
David Neto862b7d82018-06-14 18:48:37 -04002834 // Decorate with DescriptorSet and Binding.
2835 Ops.clear();
2836 Ops << MkId(info->var_id) << MkNum(spv::DecorationDescriptorSet)
2837 << MkNum(info->descriptor_set);
2838 SPIRVInstList.insert(DecoInsertPoint,
2839 new SPIRVInstruction(spv::OpDecorate, Ops));
2840
2841 Ops.clear();
2842 Ops << MkId(info->var_id) << MkNum(spv::DecorationBinding)
2843 << MkNum(info->binding);
2844 SPIRVInstList.insert(DecoInsertPoint,
2845 new SPIRVInstruction(spv::OpDecorate, Ops));
2846
alan-bakere9308012019-03-15 10:25:13 -04002847 if (info->coherent) {
2848 // Decorate with Coherent if required for the variable.
2849 Ops.clear();
2850 Ops << MkId(info->var_id) << MkNum(spv::DecorationCoherent);
2851 SPIRVInstList.insert(DecoInsertPoint,
2852 new SPIRVInstruction(spv::OpDecorate, Ops));
2853 }
2854
David Neto862b7d82018-06-14 18:48:37 -04002855 // Generate NonWritable and NonReadable
2856 switch (info->arg_kind) {
2857 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04002858 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04002859 if (info->var_fn->getReturnType()->getPointerAddressSpace() ==
2860 clspv::AddressSpace::Constant) {
2861 Ops.clear();
2862 Ops << MkId(info->var_id) << MkNum(spv::DecorationNonWritable);
2863 SPIRVInstList.insert(DecoInsertPoint,
2864 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002865 }
David Neto862b7d82018-06-14 18:48:37 -04002866 break;
David Neto862b7d82018-06-14 18:48:37 -04002867 case clspv::ArgKind::WriteOnlyImage:
2868 Ops.clear();
2869 Ops << MkId(info->var_id) << MkNum(spv::DecorationNonReadable);
2870 SPIRVInstList.insert(DecoInsertPoint,
2871 new SPIRVInstruction(spv::OpDecorate, Ops));
2872 break;
2873 default:
2874 break;
David Neto22f144c2017-06-12 14:26:21 -04002875 }
2876 }
2877}
2878
Kévin Petitbbbda972020-03-03 19:16:31 +00002879namespace {
2880
2881bool isScalarType(Type *type) {
2882 return type->isIntegerTy() || type->isFloatTy();
2883}
2884
2885uint64_t structAlignment(StructType *type,
2886 std::function<uint64_t(Type *)> alignFn) {
2887 uint64_t maxAlign = 1;
2888 for (unsigned i = 0; i < type->getStructNumElements(); i++) {
2889 uint64_t align = alignFn(type->getStructElementType(i));
2890 maxAlign = std::max(align, maxAlign);
2891 }
2892 return maxAlign;
2893}
2894
2895uint64_t scalarAlignment(Type *type) {
2896 // A scalar of size N has a scalar alignment of N.
2897 if (isScalarType(type)) {
2898 return type->getScalarSizeInBits() / 8;
2899 }
2900
2901 // A vector or matrix type has a scalar alignment equal to that of its
2902 // component type.
2903 if (type->isVectorTy()) {
2904 return scalarAlignment(type->getVectorElementType());
2905 }
2906
2907 // An array type has a scalar alignment equal to that of its element type.
2908 if (type->isArrayTy()) {
2909 return scalarAlignment(type->getArrayElementType());
2910 }
2911
2912 // A structure has a scalar alignment equal to the largest scalar alignment of
2913 // any of its members.
2914 if (type->isStructTy()) {
2915 return structAlignment(cast<StructType>(type), scalarAlignment);
2916 }
2917
2918 llvm_unreachable("Unsupported type");
2919}
2920
2921uint64_t baseAlignment(Type *type) {
2922 // A scalar has a base alignment equal to its scalar alignment.
2923 if (isScalarType(type)) {
2924 return scalarAlignment(type);
2925 }
2926
2927 if (type->isVectorTy()) {
2928 unsigned numElems = type->getVectorNumElements();
2929
2930 // A two-component vector has a base alignment equal to twice its scalar
2931 // alignment.
2932 if (numElems == 2) {
2933 return 2 * scalarAlignment(type);
2934 }
2935 // A three- or four-component vector has a base alignment equal to four
2936 // times its scalar alignment.
2937 if ((numElems == 3) || (numElems == 4)) {
2938 return 4 * scalarAlignment(type);
2939 }
2940 }
2941
2942 // An array has a base alignment equal to the base alignment of its element
2943 // type.
2944 if (type->isArrayTy()) {
2945 return baseAlignment(type->getArrayElementType());
2946 }
2947
2948 // A structure has a base alignment equal to the largest base alignment of any
2949 // of its members.
2950 if (type->isStructTy()) {
2951 return structAlignment(cast<StructType>(type), baseAlignment);
2952 }
2953
2954 // TODO A row-major matrix of C columns has a base alignment equal to the base
2955 // alignment of a vector of C matrix components.
2956 // TODO A column-major matrix has a base alignment equal to the base alignment
2957 // of the matrix column type.
2958
2959 llvm_unreachable("Unsupported type");
2960}
2961
2962uint64_t extendedAlignment(Type *type) {
2963 // A scalar, vector or matrix type has an extended alignment equal to its base
2964 // alignment.
2965 // TODO matrix type
2966 if (isScalarType(type) || type->isVectorTy()) {
2967 return baseAlignment(type);
2968 }
2969
2970 // An array or structure type has an extended alignment equal to the largest
2971 // extended alignment of any of its members, rounded up to a multiple of 16
2972 if (type->isStructTy()) {
2973 auto salign = structAlignment(cast<StructType>(type), extendedAlignment);
2974 return alignTo(salign, 16);
2975 }
2976
2977 if (type->isArrayTy()) {
2978 auto salign = extendedAlignment(type->getArrayElementType());
2979 return alignTo(salign, 16);
2980 }
2981
2982 llvm_unreachable("Unsupported type");
2983}
2984
2985uint64_t standardAlignment(Type *type, spv::StorageClass sclass) {
2986 // If the scalarBlockLayout feature is enabled on the device then every member
2987 // must be aligned according to its scalar alignment
2988 if (clspv::Option::ScalarBlockLayout()) {
2989 return scalarAlignment(type);
2990 }
2991
2992 // All vectors must be aligned according to their scalar alignment
2993 if (type->isVectorTy()) {
2994 return scalarAlignment(type);
2995 }
2996
2997 // If the uniformBufferStandardLayout feature is not enabled on the device,
2998 // then any member of an OpTypeStruct with a storage class of Uniform and a
2999 // decoration of Block must be aligned according to its extended alignment.
3000 if (!clspv::Option::Std430UniformBufferLayout() &&
3001 sclass == spv::StorageClassUniform) {
3002 return extendedAlignment(type);
3003 }
3004
3005 // Every other member must be aligned according to its base alignment
3006 return baseAlignment(type);
3007}
3008
3009bool improperlyStraddles(const DataLayout &DL, Type *type, unsigned offset) {
3010 assert(type->isVectorTy());
3011
3012 auto size = DL.getTypeStoreSize(type);
3013
3014 // It is a vector with total size less than or equal to 16 bytes, and has
3015 // Offset decorations placing its first byte at F and its last byte at L,
3016 // where floor(F / 16) != floor(L / 16).
3017 if ((size <= 16) && (offset % 16 + size > 16)) {
3018 return true;
3019 }
3020
3021 // It is a vector with total size greater than 16 bytes and has its Offset
3022 // decorations placing its first byte at a non-integer multiple of 16
3023 if ((size > 16) && (offset % 16 != 0)) {
3024 return true;
3025 }
3026
3027 return false;
3028}
3029
3030// See 14.5 Shader Resource Interface in Vulkan spec
3031bool isValidExplicitLayout(Module &M, StructType *STy, unsigned Member,
3032 spv::StorageClass SClass, unsigned Offset,
3033 unsigned PreviousMemberOffset) {
3034
3035 auto MemberType = STy->getElementType(Member);
3036 auto Align = standardAlignment(MemberType, SClass);
3037 auto &DL = M.getDataLayout();
3038
3039 // The Offset decoration of any member must be a multiple of its alignment
3040 if (Offset % Align != 0) {
3041 return false;
3042 }
3043
3044 // TODO Any ArrayStride or MatrixStride decoration must be a multiple of the
3045 // alignment of the array or matrix as defined above
3046
3047 if (!clspv::Option::ScalarBlockLayout()) {
3048 // Vectors must not improperly straddle, as defined above
3049 if (MemberType->isVectorTy() &&
3050 improperlyStraddles(DL, MemberType, Offset)) {
3051 return true;
3052 }
3053
3054 // The Offset decoration of a member must not place it between the end
3055 // of a structure or an array and the next multiple of the alignment of that
3056 // structure or array
3057 if (Member > 0) {
3058 auto PType = STy->getElementType(Member - 1);
3059 if (PType->isStructTy() || PType->isArrayTy()) {
3060 auto PAlign = standardAlignment(PType, SClass);
3061 if (Offset - PreviousMemberOffset < PAlign) {
3062 return false;
3063 }
3064 }
3065 }
3066 }
3067
3068 return true;
3069}
3070
3071} // namespace
3072
3073void SPIRVProducerPass::GeneratePushConstantDescriptormapEntries(Module &M) {
3074
3075 if (auto GV = M.getGlobalVariable(clspv::PushConstantsVariableName())) {
3076 auto const &DL = M.getDataLayout();
3077 auto MD = GV->getMetadata(clspv::PushConstantsMetadataName());
3078 auto STy = cast<StructType>(GV->getValueType());
3079
3080 for (unsigned i = 0; i < STy->getNumElements(); i++) {
3081 auto pc = static_cast<clspv::PushConstant>(
3082 mdconst::extract<ConstantInt>(MD->getOperand(i))->getZExtValue());
3083 auto memberType = STy->getElementType(i);
3084 auto offset = GetExplicitLayoutStructMemberOffset(STy, i, DL);
3085 unsigned previousOffset = 0;
3086 if (i > 0) {
3087 previousOffset = GetExplicitLayoutStructMemberOffset(STy, i - 1, DL);
3088 }
3089 auto size = static_cast<uint32_t>(GetTypeSizeInBits(memberType, DL)) / 8;
3090 assert(isValidExplicitLayout(M, STy, i, spv::StorageClassPushConstant,
3091 offset, previousOffset));
3092 version0::DescriptorMapEntry::PushConstantData data = {pc, offset, size};
3093 descriptorMapEntries->emplace_back(std::move(data));
3094 }
3095 }
3096}
3097
David Neto22f144c2017-06-12 14:26:21 -04003098void SPIRVProducerPass::GenerateGlobalVar(GlobalVariable &GV) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05003099 Module &M = *GV.getParent();
David Neto22f144c2017-06-12 14:26:21 -04003100 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3101 ValueMapType &VMap = getValueMap();
3102 std::vector<uint32_t> &BuiltinDimVec = getBuiltinDimVec();
David Neto85082642018-03-24 06:55:20 -07003103 const DataLayout &DL = GV.getParent()->getDataLayout();
David Neto22f144c2017-06-12 14:26:21 -04003104
3105 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
3106 Type *Ty = GV.getType();
3107 PointerType *PTy = cast<PointerType>(Ty);
3108
3109 uint32_t InitializerID = 0;
3110
3111 // Workgroup size is handled differently (it goes into a constant)
3112 if (spv::BuiltInWorkgroupSize == BuiltinType) {
3113 std::vector<bool> HasMDVec;
3114 uint32_t PrevXDimCst = 0xFFFFFFFF;
3115 uint32_t PrevYDimCst = 0xFFFFFFFF;
3116 uint32_t PrevZDimCst = 0xFFFFFFFF;
3117 for (Function &Func : *GV.getParent()) {
3118 if (Func.isDeclaration()) {
3119 continue;
3120 }
3121
3122 // We only need to check kernels.
3123 if (Func.getCallingConv() != CallingConv::SPIR_KERNEL) {
3124 continue;
3125 }
3126
3127 if (const MDNode *MD =
3128 dyn_cast<Function>(&Func)->getMetadata("reqd_work_group_size")) {
3129 uint32_t CurXDimCst = static_cast<uint32_t>(
3130 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
3131 uint32_t CurYDimCst = static_cast<uint32_t>(
3132 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
3133 uint32_t CurZDimCst = static_cast<uint32_t>(
3134 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
3135
3136 if (PrevXDimCst == 0xFFFFFFFF && PrevYDimCst == 0xFFFFFFFF &&
3137 PrevZDimCst == 0xFFFFFFFF) {
3138 PrevXDimCst = CurXDimCst;
3139 PrevYDimCst = CurYDimCst;
3140 PrevZDimCst = CurZDimCst;
3141 } else if (CurXDimCst != PrevXDimCst || CurYDimCst != PrevYDimCst ||
3142 CurZDimCst != PrevZDimCst) {
3143 llvm_unreachable(
3144 "reqd_work_group_size must be the same across all kernels");
3145 } else {
3146 continue;
3147 }
3148
3149 //
3150 // Generate OpConstantComposite.
3151 //
3152 // Ops[0] : Result Type ID
3153 // Ops[1] : Constant size for x dimension.
3154 // Ops[2] : Constant size for y dimension.
3155 // Ops[3] : Constant size for z dimension.
3156 SPIRVOperandList Ops;
3157
3158 uint32_t XDimCstID =
3159 VMap[mdconst::extract<ConstantInt>(MD->getOperand(0))];
3160 uint32_t YDimCstID =
3161 VMap[mdconst::extract<ConstantInt>(MD->getOperand(1))];
3162 uint32_t ZDimCstID =
3163 VMap[mdconst::extract<ConstantInt>(MD->getOperand(2))];
3164
3165 InitializerID = nextID;
3166
David Neto257c3892018-04-11 13:19:45 -04003167 Ops << MkId(lookupType(Ty->getPointerElementType())) << MkId(XDimCstID)
3168 << MkId(YDimCstID) << MkId(ZDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04003169
David Neto87846742018-04-11 17:36:22 -04003170 auto *Inst =
3171 new SPIRVInstruction(spv::OpConstantComposite, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003172 SPIRVInstList.push_back(Inst);
3173
3174 HasMDVec.push_back(true);
3175 } else {
3176 HasMDVec.push_back(false);
3177 }
3178 }
3179
3180 // Check all kernels have same definitions for work_group_size.
3181 bool HasMD = false;
3182 if (!HasMDVec.empty()) {
3183 HasMD = HasMDVec[0];
3184 for (uint32_t i = 1; i < HasMDVec.size(); i++) {
3185 if (HasMD != HasMDVec[i]) {
3186 llvm_unreachable(
3187 "Kernels should have consistent work group size definition");
3188 }
3189 }
3190 }
3191
3192 // If all kernels do not have metadata for reqd_work_group_size, generate
3193 // OpSpecConstants for x/y/z dimension.
3194 if (!HasMD) {
3195 //
3196 // Generate OpSpecConstants for x/y/z dimension.
3197 //
3198 // Ops[0] : Result Type ID
3199 // Ops[1] : Constant size for x/y/z dimension (Literal Number).
3200 uint32_t XDimCstID = 0;
3201 uint32_t YDimCstID = 0;
3202 uint32_t ZDimCstID = 0;
3203
David Neto22f144c2017-06-12 14:26:21 -04003204 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04003205 uint32_t result_type_id =
alan-baker8eb435a2020-04-08 00:42:06 -04003206 lookupType(Ty->getPointerElementType()->getVectorElementType());
David Neto22f144c2017-06-12 14:26:21 -04003207
David Neto257c3892018-04-11 13:19:45 -04003208 // X Dimension
3209 Ops << MkId(result_type_id) << MkNum(1);
3210 XDimCstID = nextID++;
3211 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04003212 new SPIRVInstruction(spv::OpSpecConstant, XDimCstID, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003213
3214 // Y Dimension
3215 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003216 Ops << MkId(result_type_id) << MkNum(1);
3217 YDimCstID = nextID++;
3218 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04003219 new SPIRVInstruction(spv::OpSpecConstant, YDimCstID, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003220
3221 // Z Dimension
3222 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003223 Ops << MkId(result_type_id) << MkNum(1);
3224 ZDimCstID = nextID++;
3225 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04003226 new SPIRVInstruction(spv::OpSpecConstant, ZDimCstID, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003227
David Neto257c3892018-04-11 13:19:45 -04003228 BuiltinDimVec.push_back(XDimCstID);
3229 BuiltinDimVec.push_back(YDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04003230 BuiltinDimVec.push_back(ZDimCstID);
3231
David Neto22f144c2017-06-12 14:26:21 -04003232 //
3233 // Generate OpSpecConstantComposite.
3234 //
3235 // Ops[0] : Result Type ID
3236 // Ops[1] : Constant size for x dimension.
3237 // Ops[2] : Constant size for y dimension.
3238 // Ops[3] : Constant size for z dimension.
3239 InitializerID = nextID;
3240
3241 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003242 Ops << MkId(lookupType(Ty->getPointerElementType())) << MkId(XDimCstID)
3243 << MkId(YDimCstID) << MkId(ZDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04003244
David Neto87846742018-04-11 17:36:22 -04003245 auto *Inst =
3246 new SPIRVInstruction(spv::OpSpecConstantComposite, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003247 SPIRVInstList.push_back(Inst);
3248 }
3249 }
3250
David Neto22f144c2017-06-12 14:26:21 -04003251 VMap[&GV] = nextID;
3252
3253 //
3254 // Generate OpVariable.
3255 //
3256 // GIDOps[0] : Result Type ID
3257 // GIDOps[1] : Storage Class
3258 SPIRVOperandList Ops;
3259
David Neto85082642018-03-24 06:55:20 -07003260 const auto AS = PTy->getAddressSpace();
David Netoc6f3ab22018-04-06 18:02:31 -04003261 Ops << MkId(lookupType(Ty)) << MkNum(GetStorageClass(AS));
David Neto22f144c2017-06-12 14:26:21 -04003262
David Neto85082642018-03-24 06:55:20 -07003263 const bool module_scope_constant_external_init =
David Neto862b7d82018-06-14 18:48:37 -04003264 (AS == AddressSpace::Constant) && GV.hasInitializer() &&
David Neto85082642018-03-24 06:55:20 -07003265 clspv::Option::ModuleConstantsInStorageBuffer();
3266
Kévin Petit23d5f182019-08-13 16:21:29 +01003267 if (GV.hasInitializer()) {
3268 auto GVInit = GV.getInitializer();
3269 if (!isa<UndefValue>(GVInit) && !module_scope_constant_external_init) {
3270 assert(VMap.count(GVInit) == 1);
3271 InitializerID = VMap[GVInit];
David Neto85082642018-03-24 06:55:20 -07003272 }
3273 }
Kévin Petit23d5f182019-08-13 16:21:29 +01003274
3275 if (0 != InitializerID) {
Kévin Petitbbbda972020-03-03 19:16:31 +00003276 // Emit the ID of the initializer as part of the variable definition.
Kévin Petit23d5f182019-08-13 16:21:29 +01003277 Ops << MkId(InitializerID);
3278 }
David Neto85082642018-03-24 06:55:20 -07003279 const uint32_t var_id = nextID++;
3280
David Neto87846742018-04-11 17:36:22 -04003281 auto *Inst = new SPIRVInstruction(spv::OpVariable, var_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003282 SPIRVInstList.push_back(Inst);
3283
3284 // If we have a builtin.
3285 if (spv::BuiltInMax != BuiltinType) {
3286 // Find Insert Point for OpDecorate.
3287 auto DecoInsertPoint =
3288 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
3289 [](SPIRVInstruction *Inst) -> bool {
3290 return Inst->getOpcode() != spv::OpDecorate &&
3291 Inst->getOpcode() != spv::OpMemberDecorate &&
3292 Inst->getOpcode() != spv::OpExtInstImport;
3293 });
3294 //
3295 // Generate OpDecorate.
3296 //
3297 // DOps[0] = Target ID
3298 // DOps[1] = Decoration (Builtin)
3299 // DOps[2] = BuiltIn ID
3300 uint32_t ResultID;
3301
3302 // WorkgroupSize is different, we decorate the constant composite that has
3303 // its value, rather than the variable that we use to access the value.
3304 if (spv::BuiltInWorkgroupSize == BuiltinType) {
3305 ResultID = InitializerID;
David Netoa60b00b2017-09-15 16:34:09 -04003306 // Save both the value and variable IDs for later.
3307 WorkgroupSizeValueID = InitializerID;
3308 WorkgroupSizeVarID = VMap[&GV];
David Neto22f144c2017-06-12 14:26:21 -04003309 } else {
3310 ResultID = VMap[&GV];
3311 }
3312
3313 SPIRVOperandList DOps;
David Neto257c3892018-04-11 13:19:45 -04003314 DOps << MkId(ResultID) << MkNum(spv::DecorationBuiltIn)
3315 << MkNum(BuiltinType);
David Neto22f144c2017-06-12 14:26:21 -04003316
David Neto87846742018-04-11 17:36:22 -04003317 auto *DescDecoInst = new SPIRVInstruction(spv::OpDecorate, DOps);
David Neto22f144c2017-06-12 14:26:21 -04003318 SPIRVInstList.insert(DecoInsertPoint, DescDecoInst);
David Neto85082642018-03-24 06:55:20 -07003319 } else if (module_scope_constant_external_init) {
3320 // This module scope constant is initialized from a storage buffer with data
3321 // provided by the host at binding 0 of the next descriptor set.
David Neto78383442018-06-15 20:31:56 -04003322 const uint32_t descriptor_set = TakeDescriptorIndex(&M);
David Neto85082642018-03-24 06:55:20 -07003323
David Neto862b7d82018-06-14 18:48:37 -04003324 // Emit the intializer to the descriptor map file.
David Neto85082642018-03-24 06:55:20 -07003325 // Use "kind,buffer" to indicate storage buffer. We might want to expand
3326 // that later to other types, like uniform buffer.
alan-bakerf5e5f692018-11-27 08:33:24 -05003327 std::string hexbytes;
3328 llvm::raw_string_ostream str(hexbytes);
3329 clspv::ConstantEmitter(DL, str).Emit(GV.getInitializer());
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003330 version0::DescriptorMapEntry::ConstantData constant_data = {ArgKind::Buffer,
3331 str.str()};
3332 descriptorMapEntries->emplace_back(std::move(constant_data), descriptor_set,
3333 0);
David Neto85082642018-03-24 06:55:20 -07003334
3335 // Find Insert Point for OpDecorate.
3336 auto DecoInsertPoint =
3337 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
3338 [](SPIRVInstruction *Inst) -> bool {
3339 return Inst->getOpcode() != spv::OpDecorate &&
3340 Inst->getOpcode() != spv::OpMemberDecorate &&
3341 Inst->getOpcode() != spv::OpExtInstImport;
3342 });
3343
David Neto257c3892018-04-11 13:19:45 -04003344 // OpDecorate %var Binding <binding>
David Neto85082642018-03-24 06:55:20 -07003345 SPIRVOperandList DOps;
David Neto257c3892018-04-11 13:19:45 -04003346 DOps << MkId(var_id) << MkNum(spv::DecorationBinding) << MkNum(0);
3347 DecoInsertPoint = SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04003348 DecoInsertPoint, new SPIRVInstruction(spv::OpDecorate, DOps));
David Neto85082642018-03-24 06:55:20 -07003349
3350 // OpDecorate %var DescriptorSet <descriptor_set>
3351 DOps.clear();
David Neto257c3892018-04-11 13:19:45 -04003352 DOps << MkId(var_id) << MkNum(spv::DecorationDescriptorSet)
3353 << MkNum(descriptor_set);
David Netoc6f3ab22018-04-06 18:02:31 -04003354 SPIRVInstList.insert(DecoInsertPoint,
David Neto87846742018-04-11 17:36:22 -04003355 new SPIRVInstruction(spv::OpDecorate, DOps));
David Neto22f144c2017-06-12 14:26:21 -04003356 }
3357}
3358
David Netoc6f3ab22018-04-06 18:02:31 -04003359void SPIRVProducerPass::GenerateWorkgroupVars() {
3360 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
Alan Baker202c8c72018-08-13 13:47:44 -04003361 for (auto spec_id = clspv::FirstLocalSpecId(); spec_id < max_local_spec_id_;
3362 ++spec_id) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05003363 LocalArgInfo &info = LocalSpecIdInfoMap[spec_id];
David Netoc6f3ab22018-04-06 18:02:31 -04003364
3365 // Generate OpVariable.
3366 //
3367 // GIDOps[0] : Result Type ID
3368 // GIDOps[1] : Storage Class
3369 SPIRVOperandList Ops;
3370 Ops << MkId(info.ptr_array_type_id) << MkNum(spv::StorageClassWorkgroup);
3371
3372 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04003373 new SPIRVInstruction(spv::OpVariable, info.variable_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04003374 }
3375}
3376
David Neto862b7d82018-06-14 18:48:37 -04003377void SPIRVProducerPass::GenerateDescriptorMapInfo(const DataLayout &DL,
3378 Function &F) {
David Netoc5fb5242018-07-30 13:28:31 -04003379 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
3380 return;
3381 }
Kévin Petit717f8572020-04-06 17:31:53 +01003382 // Add entries for each kernel
3383 version0::DescriptorMapEntry::KernelDeclData kernel_decl_data = {
3384 F.getName().str()};
3385 descriptorMapEntries->emplace_back(std::move(kernel_decl_data));
3386
David Neto862b7d82018-06-14 18:48:37 -04003387 // Gather the list of resources that are used by this function's arguments.
3388 auto &resource_var_at_index = FunctionToResourceVarsMap[&F];
3389
alan-bakerf5e5f692018-11-27 08:33:24 -05003390 // TODO(alan-baker): This should become unnecessary by fixing the rest of the
3391 // flow to generate pod_ubo arguments earlier.
David Neto862b7d82018-06-14 18:48:37 -04003392 auto remap_arg_kind = [](StringRef argKind) {
alan-bakerf5e5f692018-11-27 08:33:24 -05003393 std::string kind =
3394 clspv::Option::PodArgsInUniformBuffer() && argKind.equals("pod")
3395 ? "pod_ubo"
alan-baker21574d32020-01-29 16:00:31 -05003396 : argKind.str();
alan-bakerf5e5f692018-11-27 08:33:24 -05003397 return GetArgKindFromName(kind);
David Neto862b7d82018-06-14 18:48:37 -04003398 };
3399
3400 auto *fty = F.getType()->getPointerElementType();
3401 auto *func_ty = dyn_cast<FunctionType>(fty);
3402
alan-baker038e9242019-04-19 22:14:41 -04003403 // If we've clustered POD arguments, then argument details are in metadata.
David Neto862b7d82018-06-14 18:48:37 -04003404 // If an argument maps to a resource variable, then get descriptor set and
3405 // binding from the resoure variable. Other info comes from the metadata.
3406 const auto *arg_map = F.getMetadata("kernel_arg_map");
3407 if (arg_map) {
3408 for (const auto &arg : arg_map->operands()) {
3409 const MDNode *arg_node = dyn_cast<MDNode>(arg.get());
Kévin PETITa353c832018-03-20 23:21:21 +00003410 assert(arg_node->getNumOperands() == 7);
David Neto862b7d82018-06-14 18:48:37 -04003411 const auto name =
3412 dyn_cast<MDString>(arg_node->getOperand(0))->getString();
3413 const auto old_index =
3414 dyn_extract<ConstantInt>(arg_node->getOperand(1))->getZExtValue();
3415 // Remapped argument index
alan-bakerb6b09dc2018-11-08 16:59:28 -05003416 const size_t new_index = static_cast<size_t>(
3417 dyn_extract<ConstantInt>(arg_node->getOperand(2))->getZExtValue());
David Neto862b7d82018-06-14 18:48:37 -04003418 const auto offset =
3419 dyn_extract<ConstantInt>(arg_node->getOperand(3))->getZExtValue();
Kévin PETITa353c832018-03-20 23:21:21 +00003420 const auto arg_size =
3421 dyn_extract<ConstantInt>(arg_node->getOperand(4))->getZExtValue();
David Neto862b7d82018-06-14 18:48:37 -04003422 const auto argKind = remap_arg_kind(
Kévin PETITa353c832018-03-20 23:21:21 +00003423 dyn_cast<MDString>(arg_node->getOperand(5))->getString());
David Neto862b7d82018-06-14 18:48:37 -04003424 const auto spec_id =
Kévin PETITa353c832018-03-20 23:21:21 +00003425 dyn_extract<ConstantInt>(arg_node->getOperand(6))->getSExtValue();
alan-bakerf5e5f692018-11-27 08:33:24 -05003426
3427 uint32_t descriptor_set = 0;
3428 uint32_t binding = 0;
3429 version0::DescriptorMapEntry::KernelArgData kernel_data = {
alan-baker21574d32020-01-29 16:00:31 -05003430 F.getName().str(), name.str(), static_cast<uint32_t>(old_index),
3431 argKind, static_cast<uint32_t>(spec_id),
alan-bakerf5e5f692018-11-27 08:33:24 -05003432 // This will be set below for pointer-to-local args.
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003433 0, static_cast<uint32_t>(offset), static_cast<uint32_t>(arg_size)};
David Neto862b7d82018-06-14 18:48:37 -04003434 if (spec_id > 0) {
alan-bakerf5e5f692018-11-27 08:33:24 -05003435 kernel_data.local_element_size = static_cast<uint32_t>(GetTypeAllocSize(
3436 func_ty->getParamType(unsigned(new_index))->getPointerElementType(),
3437 DL));
David Neto862b7d82018-06-14 18:48:37 -04003438 } else {
3439 auto *info = resource_var_at_index[new_index];
3440 assert(info);
alan-bakerf5e5f692018-11-27 08:33:24 -05003441 descriptor_set = info->descriptor_set;
3442 binding = info->binding;
David Neto862b7d82018-06-14 18:48:37 -04003443 }
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003444 descriptorMapEntries->emplace_back(std::move(kernel_data), descriptor_set,
3445 binding);
David Neto862b7d82018-06-14 18:48:37 -04003446 }
3447 } else {
3448 // There is no argument map.
3449 // Take descriptor info from the resource variable calls.
Kévin PETITa353c832018-03-20 23:21:21 +00003450 // Take argument name and size from the arguments list.
David Neto862b7d82018-06-14 18:48:37 -04003451
3452 SmallVector<Argument *, 4> arguments;
3453 for (auto &arg : F.args()) {
3454 arguments.push_back(&arg);
3455 }
3456
3457 unsigned arg_index = 0;
3458 for (auto *info : resource_var_at_index) {
3459 if (info) {
Kévin PETITa353c832018-03-20 23:21:21 +00003460 auto arg = arguments[arg_index];
alan-bakerb6b09dc2018-11-08 16:59:28 -05003461 unsigned arg_size = 0;
alan-baker9b0ec3c2020-04-06 14:45:34 -04003462 if (info->arg_kind == clspv::ArgKind::Pod ||
3463 info->arg_kind == clspv::ArgKind::PodUBO ||
3464 info->arg_kind == clspv::ArgKind::PodPushConstant) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05003465 arg_size = static_cast<uint32_t>(DL.getTypeStoreSize(arg->getType()));
Kévin PETITa353c832018-03-20 23:21:21 +00003466 }
3467
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003468 // Local pointer arguments are unused in this case. Offset is always
3469 // zero.
alan-bakerf5e5f692018-11-27 08:33:24 -05003470 version0::DescriptorMapEntry::KernelArgData kernel_data = {
alan-baker21574d32020-01-29 16:00:31 -05003471 F.getName().str(),
3472 arg->getName().str(),
3473 arg_index,
3474 remap_arg_kind(clspv::GetArgKindName(info->arg_kind)),
3475 0,
3476 0,
3477 0,
3478 arg_size};
alan-bakerf5e5f692018-11-27 08:33:24 -05003479 descriptorMapEntries->emplace_back(std::move(kernel_data),
3480 info->descriptor_set, info->binding);
David Neto862b7d82018-06-14 18:48:37 -04003481 }
3482 arg_index++;
3483 }
3484 // Generate mappings for pointer-to-local arguments.
3485 for (arg_index = 0; arg_index < arguments.size(); ++arg_index) {
3486 Argument *arg = arguments[arg_index];
Alan Baker202c8c72018-08-13 13:47:44 -04003487 auto where = LocalArgSpecIds.find(arg);
3488 if (where != LocalArgSpecIds.end()) {
3489 auto &local_arg_info = LocalSpecIdInfoMap[where->second];
alan-bakerf5e5f692018-11-27 08:33:24 -05003490 // Pod arguments members are unused in this case.
3491 version0::DescriptorMapEntry::KernelArgData kernel_data = {
alan-baker21574d32020-01-29 16:00:31 -05003492 F.getName().str(),
3493 arg->getName().str(),
alan-bakerf5e5f692018-11-27 08:33:24 -05003494 arg_index,
3495 ArgKind::Local,
3496 static_cast<uint32_t>(local_arg_info.spec_id),
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003497 static_cast<uint32_t>(
3498 GetTypeAllocSize(local_arg_info.elem_type, DL)),
alan-bakerf5e5f692018-11-27 08:33:24 -05003499 0,
3500 0};
3501 // Pointer-to-local arguments do not utilize descriptor set and binding.
3502 descriptorMapEntries->emplace_back(std::move(kernel_data), 0, 0);
David Neto862b7d82018-06-14 18:48:37 -04003503 }
3504 }
3505 }
3506}
3507
David Neto22f144c2017-06-12 14:26:21 -04003508void SPIRVProducerPass::GenerateFuncPrologue(Function &F) {
3509 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3510 ValueMapType &VMap = getValueMap();
3511 EntryPointVecType &EntryPoints = getEntryPointVec();
David Neto22f144c2017-06-12 14:26:21 -04003512 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
3513 auto &GlobalConstArgSet = getGlobalConstArgSet();
3514
3515 FunctionType *FTy = F.getFunctionType();
3516
3517 //
David Neto22f144c2017-06-12 14:26:21 -04003518 // Generate OPFunction.
3519 //
3520
3521 // FOps[0] : Result Type ID
3522 // FOps[1] : Function Control
3523 // FOps[2] : Function Type ID
3524 SPIRVOperandList FOps;
3525
3526 // Find SPIRV instruction for return type.
David Neto257c3892018-04-11 13:19:45 -04003527 FOps << MkId(lookupType(FTy->getReturnType()));
David Neto22f144c2017-06-12 14:26:21 -04003528
3529 // Check function attributes for SPIRV Function Control.
3530 uint32_t FuncControl = spv::FunctionControlMaskNone;
3531 if (F.hasFnAttribute(Attribute::AlwaysInline)) {
3532 FuncControl |= spv::FunctionControlInlineMask;
3533 }
3534 if (F.hasFnAttribute(Attribute::NoInline)) {
3535 FuncControl |= spv::FunctionControlDontInlineMask;
3536 }
3537 // TODO: Check llvm attribute for Function Control Pure.
3538 if (F.hasFnAttribute(Attribute::ReadOnly)) {
3539 FuncControl |= spv::FunctionControlPureMask;
3540 }
3541 // TODO: Check llvm attribute for Function Control Const.
3542 if (F.hasFnAttribute(Attribute::ReadNone)) {
3543 FuncControl |= spv::FunctionControlConstMask;
3544 }
3545
David Neto257c3892018-04-11 13:19:45 -04003546 FOps << MkNum(FuncControl);
David Neto22f144c2017-06-12 14:26:21 -04003547
3548 uint32_t FTyID;
3549 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
3550 SmallVector<Type *, 4> NewFuncParamTys;
3551 FunctionType *NewFTy =
3552 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
3553 FTyID = lookupType(NewFTy);
3554 } else {
David Neto9ed8e2f2018-03-24 06:47:24 -07003555 // Handle regular function with global constant parameters.
David Neto22f144c2017-06-12 14:26:21 -04003556 if (GlobalConstFuncTyMap.count(FTy)) {
3557 FTyID = lookupType(GlobalConstFuncTyMap[FTy].first);
3558 } else {
3559 FTyID = lookupType(FTy);
3560 }
3561 }
3562
David Neto257c3892018-04-11 13:19:45 -04003563 FOps << MkId(FTyID);
David Neto22f144c2017-06-12 14:26:21 -04003564
3565 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
3566 EntryPoints.push_back(std::make_pair(&F, nextID));
3567 }
3568
3569 VMap[&F] = nextID;
3570
David Neto482550a2018-03-24 05:21:07 -07003571 if (clspv::Option::ShowIDs()) {
David Netob05675d2018-02-16 12:37:49 -05003572 errs() << "Function " << F.getName() << " is " << nextID << "\n";
3573 }
David Neto22f144c2017-06-12 14:26:21 -04003574 // Generate SPIRV instruction for function.
David Neto87846742018-04-11 17:36:22 -04003575 auto *FuncInst = new SPIRVInstruction(spv::OpFunction, nextID++, FOps);
David Neto22f144c2017-06-12 14:26:21 -04003576 SPIRVInstList.push_back(FuncInst);
3577
3578 //
3579 // Generate OpFunctionParameter for Normal function.
3580 //
3581
3582 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
alan-bakere9308012019-03-15 10:25:13 -04003583
3584 // Find Insert Point for OpDecorate.
3585 auto DecoInsertPoint =
3586 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
3587 [](SPIRVInstruction *Inst) -> bool {
3588 return Inst->getOpcode() != spv::OpDecorate &&
3589 Inst->getOpcode() != spv::OpMemberDecorate &&
3590 Inst->getOpcode() != spv::OpExtInstImport;
3591 });
3592
David Neto22f144c2017-06-12 14:26:21 -04003593 // Iterate Argument for name instead of param type from function type.
3594 unsigned ArgIdx = 0;
3595 for (Argument &Arg : F.args()) {
alan-bakere9308012019-03-15 10:25:13 -04003596 uint32_t param_id = nextID++;
3597 VMap[&Arg] = param_id;
3598
3599 if (CalledWithCoherentResource(Arg)) {
3600 // If the arg is passed a coherent resource ever, then decorate this
3601 // parameter with Coherent too.
3602 SPIRVOperandList decoration_ops;
3603 decoration_ops << MkId(param_id) << MkNum(spv::DecorationCoherent);
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003604 SPIRVInstList.insert(
3605 DecoInsertPoint,
3606 new SPIRVInstruction(spv::OpDecorate, decoration_ops));
alan-bakere9308012019-03-15 10:25:13 -04003607 }
David Neto22f144c2017-06-12 14:26:21 -04003608
3609 // ParamOps[0] : Result Type ID
3610 SPIRVOperandList ParamOps;
3611
3612 // Find SPIRV instruction for parameter type.
3613 uint32_t ParamTyID = lookupType(Arg.getType());
3614 if (PointerType *PTy = dyn_cast<PointerType>(Arg.getType())) {
3615 if (GlobalConstFuncTyMap.count(FTy)) {
3616 if (ArgIdx == GlobalConstFuncTyMap[FTy].second) {
3617 Type *EleTy = PTy->getPointerElementType();
3618 Type *ArgTy =
3619 PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
3620 ParamTyID = lookupType(ArgTy);
3621 GlobalConstArgSet.insert(&Arg);
3622 }
3623 }
3624 }
David Neto257c3892018-04-11 13:19:45 -04003625 ParamOps << MkId(ParamTyID);
David Neto22f144c2017-06-12 14:26:21 -04003626
3627 // Generate SPIRV instruction for parameter.
David Neto87846742018-04-11 17:36:22 -04003628 auto *ParamInst =
alan-bakere9308012019-03-15 10:25:13 -04003629 new SPIRVInstruction(spv::OpFunctionParameter, param_id, ParamOps);
David Neto22f144c2017-06-12 14:26:21 -04003630 SPIRVInstList.push_back(ParamInst);
3631
3632 ArgIdx++;
3633 }
3634 }
3635}
3636
alan-bakerb6b09dc2018-11-08 16:59:28 -05003637void SPIRVProducerPass::GenerateModuleInfo(Module &module) {
David Neto22f144c2017-06-12 14:26:21 -04003638 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3639 EntryPointVecType &EntryPoints = getEntryPointVec();
3640 ValueMapType &VMap = getValueMap();
3641 ValueList &EntryPointInterfaces = getEntryPointInterfacesVec();
3642 uint32_t &ExtInstImportID = getOpExtInstImportID();
3643 std::vector<uint32_t> &BuiltinDimVec = getBuiltinDimVec();
3644
3645 // Set up insert point.
3646 auto InsertPoint = SPIRVInstList.begin();
3647
3648 //
3649 // Generate OpCapability
3650 //
3651 // TODO: Which llvm information is mapped to SPIRV Capapbility?
3652
3653 // Ops[0] = Capability
3654 SPIRVOperandList Ops;
3655
David Neto87846742018-04-11 17:36:22 -04003656 auto *CapInst =
David Netoef5ba2b2019-12-20 08:35:54 -05003657 new SPIRVInstruction(spv::OpCapability, MkNum(spv::CapabilityShader));
David Neto22f144c2017-06-12 14:26:21 -04003658 SPIRVInstList.insert(InsertPoint, CapInst);
3659
alan-bakerf906d2b2019-12-10 11:26:23 -05003660 bool write_without_format = false;
3661 bool sampled_1d = false;
3662 bool image_1d = false;
David Neto22f144c2017-06-12 14:26:21 -04003663 for (Type *Ty : getTypeList()) {
alan-bakerb39c8262019-03-08 14:03:37 -05003664 if (clspv::Option::Int8Support() && Ty->isIntegerTy(8)) {
3665 // Generate OpCapability for i8 type.
David Netoef5ba2b2019-12-20 08:35:54 -05003666 SPIRVInstList.insert(
3667 InsertPoint,
3668 new SPIRVInstruction(spv::OpCapability, MkNum(spv::CapabilityInt8)));
alan-bakerb39c8262019-03-08 14:03:37 -05003669 } else if (Ty->isIntegerTy(16)) {
David Neto22f144c2017-06-12 14:26:21 -04003670 // Generate OpCapability for i16 type.
David Netoef5ba2b2019-12-20 08:35:54 -05003671 SPIRVInstList.insert(
3672 InsertPoint,
3673 new SPIRVInstruction(spv::OpCapability, MkNum(spv::CapabilityInt16)));
David Neto22f144c2017-06-12 14:26:21 -04003674 } else if (Ty->isIntegerTy(64)) {
3675 // Generate OpCapability for i64 type.
David Netoef5ba2b2019-12-20 08:35:54 -05003676 SPIRVInstList.insert(
3677 InsertPoint,
3678 new SPIRVInstruction(spv::OpCapability, MkNum(spv::CapabilityInt64)));
David Neto22f144c2017-06-12 14:26:21 -04003679 } else if (Ty->isHalfTy()) {
3680 // Generate OpCapability for half type.
David Netoef5ba2b2019-12-20 08:35:54 -05003681 SPIRVInstList.insert(InsertPoint,
3682 new SPIRVInstruction(spv::OpCapability,
3683 MkNum(spv::CapabilityFloat16)));
David Neto22f144c2017-06-12 14:26:21 -04003684 } else if (Ty->isDoubleTy()) {
3685 // Generate OpCapability for double type.
David Netoef5ba2b2019-12-20 08:35:54 -05003686 SPIRVInstList.insert(InsertPoint,
3687 new SPIRVInstruction(spv::OpCapability,
3688 MkNum(spv::CapabilityFloat64)));
David Neto22f144c2017-06-12 14:26:21 -04003689 } else if (auto *STy = dyn_cast<StructType>(Ty)) {
3690 if (STy->isOpaque()) {
alan-bakerf906d2b2019-12-10 11:26:23 -05003691 if (STy->getName().startswith("opencl.image1d_wo_t") ||
alan-baker7150a1d2020-02-25 08:31:06 -05003692 STy->getName().startswith("opencl.image1d_array_wo_t") ||
alan-bakerf906d2b2019-12-10 11:26:23 -05003693 STy->getName().startswith("opencl.image2d_wo_t") ||
alan-baker7150a1d2020-02-25 08:31:06 -05003694 STy->getName().startswith("opencl.image2d_array_wo_t") ||
alan-bakerf67468c2019-11-25 15:51:49 -05003695 STy->getName().startswith("opencl.image3d_wo_t")) {
alan-bakerf906d2b2019-12-10 11:26:23 -05003696 write_without_format = true;
3697 }
3698 if (STy->getName().startswith("opencl.image1d_ro_t") ||
alan-baker7150a1d2020-02-25 08:31:06 -05003699 STy->getName().startswith("opencl.image1d_wo_t") ||
3700 STy->getName().startswith("opencl.image1d_array_ro_t") ||
3701 STy->getName().startswith("opencl.image1d_array_wo_t")) {
alan-bakerf906d2b2019-12-10 11:26:23 -05003702 if (STy->getName().contains(".sampled"))
3703 sampled_1d = true;
3704 else
3705 image_1d = true;
David Neto22f144c2017-06-12 14:26:21 -04003706 }
3707 }
3708 }
3709 }
3710
alan-bakerf906d2b2019-12-10 11:26:23 -05003711 if (write_without_format) {
3712 // Generate OpCapability for write only image type.
3713 SPIRVInstList.insert(
3714 InsertPoint,
3715 new SPIRVInstruction(
3716 spv::OpCapability,
3717 {MkNum(spv::CapabilityStorageImageWriteWithoutFormat)}));
3718 }
3719 if (image_1d) {
3720 // Generate OpCapability for unsampled 1D image type.
3721 SPIRVInstList.insert(InsertPoint,
3722 new SPIRVInstruction(spv::OpCapability,
3723 {MkNum(spv::CapabilityImage1D)}));
3724 } else if (sampled_1d) {
3725 // Generate OpCapability for sampled 1D image type.
3726 SPIRVInstList.insert(
3727 InsertPoint, new SPIRVInstruction(spv::OpCapability,
3728 {MkNum(spv::CapabilitySampled1D)}));
3729 }
3730
David Neto5c22a252018-03-15 16:07:41 -04003731 { // OpCapability ImageQuery
3732 bool hasImageQuery = false;
alan-bakerf67468c2019-11-25 15:51:49 -05003733 for (const auto &SymVal : module.getValueSymbolTable()) {
3734 if (auto F = dyn_cast<Function>(SymVal.getValue())) {
SJW173c7e92020-03-16 08:44:47 -05003735 if (IsImageQuery(F)) {
alan-bakerf67468c2019-11-25 15:51:49 -05003736 hasImageQuery = true;
3737 break;
3738 }
David Neto5c22a252018-03-15 16:07:41 -04003739 }
3740 }
alan-bakerf67468c2019-11-25 15:51:49 -05003741
David Neto5c22a252018-03-15 16:07:41 -04003742 if (hasImageQuery) {
David Neto87846742018-04-11 17:36:22 -04003743 auto *ImageQueryCapInst = new SPIRVInstruction(
3744 spv::OpCapability, {MkNum(spv::CapabilityImageQuery)});
David Neto5c22a252018-03-15 16:07:41 -04003745 SPIRVInstList.insert(InsertPoint, ImageQueryCapInst);
3746 }
3747 }
3748
David Neto22f144c2017-06-12 14:26:21 -04003749 if (hasVariablePointers()) {
3750 //
David Neto22f144c2017-06-12 14:26:21 -04003751 // Generate OpCapability.
3752 //
3753 // Ops[0] = Capability
3754 //
3755 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003756 Ops << MkNum(spv::CapabilityVariablePointers);
David Neto22f144c2017-06-12 14:26:21 -04003757
David Neto87846742018-04-11 17:36:22 -04003758 SPIRVInstList.insert(InsertPoint,
3759 new SPIRVInstruction(spv::OpCapability, Ops));
alan-baker5b86ed72019-02-15 08:26:50 -05003760 } else if (hasVariablePointersStorageBuffer()) {
3761 //
3762 // Generate OpCapability.
3763 //
3764 // Ops[0] = Capability
3765 //
3766 Ops.clear();
3767 Ops << MkNum(spv::CapabilityVariablePointersStorageBuffer);
David Neto22f144c2017-06-12 14:26:21 -04003768
alan-baker5b86ed72019-02-15 08:26:50 -05003769 SPIRVInstList.insert(InsertPoint,
3770 new SPIRVInstruction(spv::OpCapability, Ops));
3771 }
3772
3773 // Always add the storage buffer extension
3774 {
David Neto22f144c2017-06-12 14:26:21 -04003775 //
3776 // Generate OpExtension.
3777 //
3778 // Ops[0] = Name (Literal String)
3779 //
alan-baker5b86ed72019-02-15 08:26:50 -05003780 auto *ExtensionInst = new SPIRVInstruction(
3781 spv::OpExtension, {MkString("SPV_KHR_storage_buffer_storage_class")});
3782 SPIRVInstList.insert(InsertPoint, ExtensionInst);
3783 }
David Neto22f144c2017-06-12 14:26:21 -04003784
alan-baker5b86ed72019-02-15 08:26:50 -05003785 if (hasVariablePointers() || hasVariablePointersStorageBuffer()) {
3786 //
3787 // Generate OpExtension.
3788 //
3789 // Ops[0] = Name (Literal String)
3790 //
3791 auto *ExtensionInst = new SPIRVInstruction(
3792 spv::OpExtension, {MkString("SPV_KHR_variable_pointers")});
3793 SPIRVInstList.insert(InsertPoint, ExtensionInst);
David Neto22f144c2017-06-12 14:26:21 -04003794 }
3795
3796 if (ExtInstImportID) {
3797 ++InsertPoint;
3798 }
3799
3800 //
3801 // Generate OpMemoryModel
3802 //
3803 // Memory model for Vulkan will always be GLSL450.
3804
3805 // Ops[0] = Addressing Model
3806 // Ops[1] = Memory Model
3807 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003808 Ops << MkNum(spv::AddressingModelLogical) << MkNum(spv::MemoryModelGLSL450);
David Neto22f144c2017-06-12 14:26:21 -04003809
David Neto87846742018-04-11 17:36:22 -04003810 auto *MemModelInst = new SPIRVInstruction(spv::OpMemoryModel, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003811 SPIRVInstList.insert(InsertPoint, MemModelInst);
3812
3813 //
3814 // Generate OpEntryPoint
3815 //
3816 for (auto EntryPoint : EntryPoints) {
3817 // Ops[0] = Execution Model
3818 // Ops[1] = EntryPoint ID
3819 // Ops[2] = Name (Literal String)
3820 // ...
3821 //
3822 // TODO: Do we need to consider Interface ID for forward references???
3823 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05003824 const StringRef &name = EntryPoint.first->getName();
David Neto257c3892018-04-11 13:19:45 -04003825 Ops << MkNum(spv::ExecutionModelGLCompute) << MkId(EntryPoint.second)
3826 << MkString(name);
David Neto22f144c2017-06-12 14:26:21 -04003827
David Neto22f144c2017-06-12 14:26:21 -04003828 for (Value *Interface : EntryPointInterfaces) {
David Neto257c3892018-04-11 13:19:45 -04003829 Ops << MkId(VMap[Interface]);
David Neto22f144c2017-06-12 14:26:21 -04003830 }
3831
David Neto87846742018-04-11 17:36:22 -04003832 auto *EntryPointInst = new SPIRVInstruction(spv::OpEntryPoint, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003833 SPIRVInstList.insert(InsertPoint, EntryPointInst);
3834 }
3835
3836 for (auto EntryPoint : EntryPoints) {
3837 if (const MDNode *MD = dyn_cast<Function>(EntryPoint.first)
3838 ->getMetadata("reqd_work_group_size")) {
3839
3840 if (!BuiltinDimVec.empty()) {
3841 llvm_unreachable(
3842 "Kernels should have consistent work group size definition");
3843 }
3844
3845 //
3846 // Generate OpExecutionMode
3847 //
3848
3849 // Ops[0] = Entry Point ID
3850 // Ops[1] = Execution Mode
3851 // Ops[2] ... Ops[n] = Optional literals according to Execution Mode
3852 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05003853 Ops << MkId(EntryPoint.second) << MkNum(spv::ExecutionModeLocalSize);
David Neto22f144c2017-06-12 14:26:21 -04003854
3855 uint32_t XDim = static_cast<uint32_t>(
3856 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
3857 uint32_t YDim = static_cast<uint32_t>(
3858 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
3859 uint32_t ZDim = static_cast<uint32_t>(
3860 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
3861
David Neto257c3892018-04-11 13:19:45 -04003862 Ops << MkNum(XDim) << MkNum(YDim) << MkNum(ZDim);
David Neto22f144c2017-06-12 14:26:21 -04003863
David Neto87846742018-04-11 17:36:22 -04003864 auto *ExecModeInst = new SPIRVInstruction(spv::OpExecutionMode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003865 SPIRVInstList.insert(InsertPoint, ExecModeInst);
3866 }
3867 }
3868
3869 //
3870 // Generate OpSource.
3871 //
3872 // Ops[0] = SourceLanguage ID
3873 // Ops[1] = Version (LiteralNum)
3874 //
3875 Ops.clear();
Kévin Petitf0515712020-01-07 18:29:20 +00003876 switch (clspv::Option::Language()) {
3877 case clspv::Option::SourceLanguage::OpenCL_C_10:
3878 Ops << MkNum(spv::SourceLanguageOpenCL_C) << MkNum(100);
3879 break;
3880 case clspv::Option::SourceLanguage::OpenCL_C_11:
3881 Ops << MkNum(spv::SourceLanguageOpenCL_C) << MkNum(110);
3882 break;
3883 case clspv::Option::SourceLanguage::OpenCL_C_12:
Kévin Petit0fc88042019-04-09 23:25:02 +01003884 Ops << MkNum(spv::SourceLanguageOpenCL_C) << MkNum(120);
Kévin Petitf0515712020-01-07 18:29:20 +00003885 break;
3886 case clspv::Option::SourceLanguage::OpenCL_C_20:
3887 Ops << MkNum(spv::SourceLanguageOpenCL_C) << MkNum(200);
3888 break;
3889 case clspv::Option::SourceLanguage::OpenCL_CPP:
3890 Ops << MkNum(spv::SourceLanguageOpenCL_CPP) << MkNum(100);
3891 break;
3892 default:
3893 Ops << MkNum(spv::SourceLanguageUnknown) << MkNum(0);
3894 break;
Kévin Petit0fc88042019-04-09 23:25:02 +01003895 }
David Neto22f144c2017-06-12 14:26:21 -04003896
David Neto87846742018-04-11 17:36:22 -04003897 auto *OpenSourceInst = new SPIRVInstruction(spv::OpSource, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003898 SPIRVInstList.insert(InsertPoint, OpenSourceInst);
3899
3900 if (!BuiltinDimVec.empty()) {
3901 //
3902 // Generate OpDecorates for x/y/z dimension.
3903 //
3904 // Ops[0] = Target ID
3905 // Ops[1] = Decoration (SpecId)
David Neto257c3892018-04-11 13:19:45 -04003906 // Ops[2] = Specialization Constant ID (Literal Number)
David Neto22f144c2017-06-12 14:26:21 -04003907
3908 // X Dimension
3909 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003910 Ops << MkId(BuiltinDimVec[0]) << MkNum(spv::DecorationSpecId) << MkNum(0);
David Neto87846742018-04-11 17:36:22 -04003911 SPIRVInstList.insert(InsertPoint,
3912 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003913
3914 // Y Dimension
3915 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003916 Ops << MkId(BuiltinDimVec[1]) << MkNum(spv::DecorationSpecId) << MkNum(1);
David Neto87846742018-04-11 17:36:22 -04003917 SPIRVInstList.insert(InsertPoint,
3918 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003919
3920 // Z Dimension
3921 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003922 Ops << MkId(BuiltinDimVec[2]) << MkNum(spv::DecorationSpecId) << MkNum(2);
David Neto87846742018-04-11 17:36:22 -04003923 SPIRVInstList.insert(InsertPoint,
3924 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003925 }
3926}
3927
David Netob6e2e062018-04-25 10:32:06 -04003928void SPIRVProducerPass::GenerateEntryPointInitialStores() {
3929 // Work around a driver bug. Initializers on Private variables might not
3930 // work. So the start of the kernel should store the initializer value to the
3931 // variables. Yes, *every* entry point pays this cost if *any* entry point
3932 // uses this builtin. At this point I judge this to be an acceptable tradeoff
3933 // of complexity vs. runtime, for a broken driver.
alan-bakerb6b09dc2018-11-08 16:59:28 -05003934 // TODO(dneto): Remove this at some point once fixed drivers are widely
3935 // available.
David Netob6e2e062018-04-25 10:32:06 -04003936 if (WorkgroupSizeVarID) {
3937 assert(WorkgroupSizeValueID);
3938
3939 SPIRVOperandList Ops;
3940 Ops << MkId(WorkgroupSizeVarID) << MkId(WorkgroupSizeValueID);
3941
3942 auto *Inst = new SPIRVInstruction(spv::OpStore, Ops);
3943 getSPIRVInstList().push_back(Inst);
3944 }
3945}
3946
David Neto22f144c2017-06-12 14:26:21 -04003947void SPIRVProducerPass::GenerateFuncBody(Function &F) {
3948 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3949 ValueMapType &VMap = getValueMap();
3950
David Netob6e2e062018-04-25 10:32:06 -04003951 const bool IsKernel = F.getCallingConv() == CallingConv::SPIR_KERNEL;
David Neto22f144c2017-06-12 14:26:21 -04003952
3953 for (BasicBlock &BB : F) {
3954 // Register BasicBlock to ValueMap.
3955 VMap[&BB] = nextID;
3956
3957 //
3958 // Generate OpLabel for Basic Block.
3959 //
3960 SPIRVOperandList Ops;
David Neto87846742018-04-11 17:36:22 -04003961 auto *Inst = new SPIRVInstruction(spv::OpLabel, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003962 SPIRVInstList.push_back(Inst);
3963
David Neto6dcd4712017-06-23 11:06:47 -04003964 // OpVariable instructions must come first.
3965 for (Instruction &I : BB) {
alan-baker5b86ed72019-02-15 08:26:50 -05003966 if (auto *alloca = dyn_cast<AllocaInst>(&I)) {
3967 // Allocating a pointer requires variable pointers.
3968 if (alloca->getAllocatedType()->isPointerTy()) {
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003969 setVariablePointersCapabilities(
3970 alloca->getAllocatedType()->getPointerAddressSpace());
alan-baker5b86ed72019-02-15 08:26:50 -05003971 }
David Neto6dcd4712017-06-23 11:06:47 -04003972 GenerateInstruction(I);
3973 }
3974 }
3975
David Neto22f144c2017-06-12 14:26:21 -04003976 if (&BB == &F.getEntryBlock() && IsKernel) {
David Netob6e2e062018-04-25 10:32:06 -04003977 if (clspv::Option::HackInitializers()) {
3978 GenerateEntryPointInitialStores();
3979 }
David Neto22f144c2017-06-12 14:26:21 -04003980 }
3981
3982 for (Instruction &I : BB) {
David Neto6dcd4712017-06-23 11:06:47 -04003983 if (!isa<AllocaInst>(I)) {
3984 GenerateInstruction(I);
3985 }
David Neto22f144c2017-06-12 14:26:21 -04003986 }
3987 }
3988}
3989
3990spv::Op SPIRVProducerPass::GetSPIRVCmpOpcode(CmpInst *I) {
3991 const std::map<CmpInst::Predicate, spv::Op> Map = {
3992 {CmpInst::ICMP_EQ, spv::OpIEqual},
3993 {CmpInst::ICMP_NE, spv::OpINotEqual},
3994 {CmpInst::ICMP_UGT, spv::OpUGreaterThan},
3995 {CmpInst::ICMP_UGE, spv::OpUGreaterThanEqual},
3996 {CmpInst::ICMP_ULT, spv::OpULessThan},
3997 {CmpInst::ICMP_ULE, spv::OpULessThanEqual},
3998 {CmpInst::ICMP_SGT, spv::OpSGreaterThan},
3999 {CmpInst::ICMP_SGE, spv::OpSGreaterThanEqual},
4000 {CmpInst::ICMP_SLT, spv::OpSLessThan},
4001 {CmpInst::ICMP_SLE, spv::OpSLessThanEqual},
4002 {CmpInst::FCMP_OEQ, spv::OpFOrdEqual},
4003 {CmpInst::FCMP_OGT, spv::OpFOrdGreaterThan},
4004 {CmpInst::FCMP_OGE, spv::OpFOrdGreaterThanEqual},
4005 {CmpInst::FCMP_OLT, spv::OpFOrdLessThan},
4006 {CmpInst::FCMP_OLE, spv::OpFOrdLessThanEqual},
4007 {CmpInst::FCMP_ONE, spv::OpFOrdNotEqual},
4008 {CmpInst::FCMP_UEQ, spv::OpFUnordEqual},
4009 {CmpInst::FCMP_UGT, spv::OpFUnordGreaterThan},
4010 {CmpInst::FCMP_UGE, spv::OpFUnordGreaterThanEqual},
4011 {CmpInst::FCMP_ULT, spv::OpFUnordLessThan},
4012 {CmpInst::FCMP_ULE, spv::OpFUnordLessThanEqual},
4013 {CmpInst::FCMP_UNE, spv::OpFUnordNotEqual}};
4014
4015 assert(0 != Map.count(I->getPredicate()));
4016
4017 return Map.at(I->getPredicate());
4018}
4019
4020spv::Op SPIRVProducerPass::GetSPIRVCastOpcode(Instruction &I) {
4021 const std::map<unsigned, spv::Op> Map{
4022 {Instruction::Trunc, spv::OpUConvert},
4023 {Instruction::ZExt, spv::OpUConvert},
4024 {Instruction::SExt, spv::OpSConvert},
4025 {Instruction::FPToUI, spv::OpConvertFToU},
4026 {Instruction::FPToSI, spv::OpConvertFToS},
4027 {Instruction::UIToFP, spv::OpConvertUToF},
4028 {Instruction::SIToFP, spv::OpConvertSToF},
4029 {Instruction::FPTrunc, spv::OpFConvert},
4030 {Instruction::FPExt, spv::OpFConvert},
4031 {Instruction::BitCast, spv::OpBitcast}};
4032
4033 assert(0 != Map.count(I.getOpcode()));
4034
4035 return Map.at(I.getOpcode());
4036}
4037
4038spv::Op SPIRVProducerPass::GetSPIRVBinaryOpcode(Instruction &I) {
Kévin Petit24272b62018-10-18 19:16:12 +00004039 if (I.getType()->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -04004040 switch (I.getOpcode()) {
4041 default:
4042 break;
4043 case Instruction::Or:
4044 return spv::OpLogicalOr;
4045 case Instruction::And:
4046 return spv::OpLogicalAnd;
4047 case Instruction::Xor:
4048 return spv::OpLogicalNotEqual;
4049 }
4050 }
4051
alan-bakerb6b09dc2018-11-08 16:59:28 -05004052 const std::map<unsigned, spv::Op> Map{
David Neto22f144c2017-06-12 14:26:21 -04004053 {Instruction::Add, spv::OpIAdd},
4054 {Instruction::FAdd, spv::OpFAdd},
4055 {Instruction::Sub, spv::OpISub},
4056 {Instruction::FSub, spv::OpFSub},
4057 {Instruction::Mul, spv::OpIMul},
4058 {Instruction::FMul, spv::OpFMul},
4059 {Instruction::UDiv, spv::OpUDiv},
4060 {Instruction::SDiv, spv::OpSDiv},
4061 {Instruction::FDiv, spv::OpFDiv},
4062 {Instruction::URem, spv::OpUMod},
4063 {Instruction::SRem, spv::OpSRem},
4064 {Instruction::FRem, spv::OpFRem},
4065 {Instruction::Or, spv::OpBitwiseOr},
4066 {Instruction::Xor, spv::OpBitwiseXor},
4067 {Instruction::And, spv::OpBitwiseAnd},
4068 {Instruction::Shl, spv::OpShiftLeftLogical},
4069 {Instruction::LShr, spv::OpShiftRightLogical},
4070 {Instruction::AShr, spv::OpShiftRightArithmetic}};
4071
4072 assert(0 != Map.count(I.getOpcode()));
4073
4074 return Map.at(I.getOpcode());
4075}
4076
4077void SPIRVProducerPass::GenerateInstruction(Instruction &I) {
4078 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
4079 ValueMapType &VMap = getValueMap();
David Neto22f144c2017-06-12 14:26:21 -04004080 DeferredInstVecType &DeferredInsts = getDeferredInstVec();
4081 LLVMContext &Context = I.getParent()->getParent()->getParent()->getContext();
4082
4083 // Register Instruction to ValueMap.
4084 if (0 == VMap[&I]) {
4085 VMap[&I] = nextID;
4086 }
4087
4088 switch (I.getOpcode()) {
4089 default: {
4090 if (Instruction::isCast(I.getOpcode())) {
4091 //
4092 // Generate SPIRV instructions for cast operators.
4093 //
4094
David Netod2de94a2017-08-28 17:27:47 -04004095 auto Ty = I.getType();
David Neto22f144c2017-06-12 14:26:21 -04004096 auto OpTy = I.getOperand(0)->getType();
David Netod2de94a2017-08-28 17:27:47 -04004097 auto toI8 = Ty == Type::getInt8Ty(Context);
4098 auto fromI32 = OpTy == Type::getInt32Ty(Context);
David Neto22f144c2017-06-12 14:26:21 -04004099 // Handle zext, sext and uitofp with i1 type specially.
4100 if ((I.getOpcode() == Instruction::ZExt ||
4101 I.getOpcode() == Instruction::SExt ||
4102 I.getOpcode() == Instruction::UIToFP) &&
alan-bakerb6b09dc2018-11-08 16:59:28 -05004103 OpTy->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -04004104 //
4105 // Generate OpSelect.
4106 //
4107
4108 // Ops[0] = Result Type ID
4109 // Ops[1] = Condition ID
4110 // Ops[2] = True Constant ID
4111 // Ops[3] = False Constant ID
4112 SPIRVOperandList Ops;
4113
David Neto257c3892018-04-11 13:19:45 -04004114 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04004115
David Neto22f144c2017-06-12 14:26:21 -04004116 uint32_t CondID = VMap[I.getOperand(0)];
David Neto257c3892018-04-11 13:19:45 -04004117 Ops << MkId(CondID);
David Neto22f144c2017-06-12 14:26:21 -04004118
4119 uint32_t TrueID = 0;
4120 if (I.getOpcode() == Instruction::ZExt) {
Kévin Petit7bfb8992019-02-26 13:45:08 +00004121 TrueID = VMap[ConstantInt::get(I.getType(), 1)];
David Neto22f144c2017-06-12 14:26:21 -04004122 } else if (I.getOpcode() == Instruction::SExt) {
Kévin Petit7bfb8992019-02-26 13:45:08 +00004123 TrueID = VMap[ConstantInt::getSigned(I.getType(), -1)];
David Neto22f144c2017-06-12 14:26:21 -04004124 } else {
4125 TrueID = VMap[ConstantFP::get(Context, APFloat(1.0f))];
4126 }
David Neto257c3892018-04-11 13:19:45 -04004127 Ops << MkId(TrueID);
David Neto22f144c2017-06-12 14:26:21 -04004128
4129 uint32_t FalseID = 0;
4130 if (I.getOpcode() == Instruction::ZExt) {
4131 FalseID = VMap[Constant::getNullValue(I.getType())];
4132 } else if (I.getOpcode() == Instruction::SExt) {
4133 FalseID = VMap[Constant::getNullValue(I.getType())];
4134 } else {
4135 FalseID = VMap[ConstantFP::get(Context, APFloat(0.0f))];
4136 }
David Neto257c3892018-04-11 13:19:45 -04004137 Ops << MkId(FalseID);
David Neto22f144c2017-06-12 14:26:21 -04004138
David Neto87846742018-04-11 17:36:22 -04004139 auto *Inst = new SPIRVInstruction(spv::OpSelect, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004140 SPIRVInstList.push_back(Inst);
alan-bakerb39c8262019-03-08 14:03:37 -05004141 } else if (!clspv::Option::Int8Support() &&
4142 I.getOpcode() == Instruction::Trunc && fromI32 && toI8) {
David Netod2de94a2017-08-28 17:27:47 -04004143 // The SPIR-V target type is a 32-bit int. Keep only the bottom
4144 // 8 bits.
4145 // Before:
4146 // %result = trunc i32 %a to i8
4147 // After
4148 // %result = OpBitwiseAnd %uint %a %uint_255
4149
4150 SPIRVOperandList Ops;
4151
David Neto257c3892018-04-11 13:19:45 -04004152 Ops << MkId(lookupType(OpTy)) << MkId(VMap[I.getOperand(0)]);
David Netod2de94a2017-08-28 17:27:47 -04004153
4154 Type *UintTy = Type::getInt32Ty(Context);
4155 uint32_t MaskID = VMap[ConstantInt::get(UintTy, 255)];
David Neto257c3892018-04-11 13:19:45 -04004156 Ops << MkId(MaskID);
David Netod2de94a2017-08-28 17:27:47 -04004157
David Neto87846742018-04-11 17:36:22 -04004158 auto *Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Netod2de94a2017-08-28 17:27:47 -04004159 SPIRVInstList.push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04004160 } else {
4161 // Ops[0] = Result Type ID
4162 // Ops[1] = Source Value ID
4163 SPIRVOperandList Ops;
4164
David Neto257c3892018-04-11 13:19:45 -04004165 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04004166
David Neto87846742018-04-11 17:36:22 -04004167 auto *Inst = new SPIRVInstruction(GetSPIRVCastOpcode(I), nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004168 SPIRVInstList.push_back(Inst);
4169 }
4170 } else if (isa<BinaryOperator>(I)) {
4171 //
4172 // Generate SPIRV instructions for binary operators.
4173 //
4174
4175 // Handle xor with i1 type specially.
4176 if (I.getOpcode() == Instruction::Xor &&
4177 I.getType() == Type::getInt1Ty(Context) &&
Kévin Petit24272b62018-10-18 19:16:12 +00004178 ((isa<ConstantInt>(I.getOperand(0)) &&
4179 !cast<ConstantInt>(I.getOperand(0))->isZero()) ||
4180 (isa<ConstantInt>(I.getOperand(1)) &&
4181 !cast<ConstantInt>(I.getOperand(1))->isZero()))) {
David Neto22f144c2017-06-12 14:26:21 -04004182 //
4183 // Generate OpLogicalNot.
4184 //
4185 // Ops[0] = Result Type ID
4186 // Ops[1] = Operand
4187 SPIRVOperandList Ops;
4188
David Neto257c3892018-04-11 13:19:45 -04004189 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04004190
4191 Value *CondV = I.getOperand(0);
4192 if (isa<Constant>(I.getOperand(0))) {
4193 CondV = I.getOperand(1);
4194 }
David Neto257c3892018-04-11 13:19:45 -04004195 Ops << MkId(VMap[CondV]);
David Neto22f144c2017-06-12 14:26:21 -04004196
David Neto87846742018-04-11 17:36:22 -04004197 auto *Inst = new SPIRVInstruction(spv::OpLogicalNot, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004198 SPIRVInstList.push_back(Inst);
4199 } else {
4200 // Ops[0] = Result Type ID
4201 // Ops[1] = Operand 0
4202 // Ops[2] = Operand 1
4203 SPIRVOperandList Ops;
4204
David Neto257c3892018-04-11 13:19:45 -04004205 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)])
4206 << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004207
David Neto87846742018-04-11 17:36:22 -04004208 auto *Inst =
4209 new SPIRVInstruction(GetSPIRVBinaryOpcode(I), nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004210 SPIRVInstList.push_back(Inst);
4211 }
alan-bakerc9c55ae2019-12-02 16:01:27 -05004212 } else if (I.getOpcode() == Instruction::FNeg) {
4213 // The only unary operator.
4214 //
4215 // Ops[0] = Result Type ID
4216 // Ops[1] = Operand 0
4217 SPIRVOperandList ops;
4218
4219 ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)]);
4220 auto *Inst = new SPIRVInstruction(spv::OpFNegate, nextID++, ops);
4221 SPIRVInstList.push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04004222 } else {
4223 I.print(errs());
4224 llvm_unreachable("Unsupported instruction???");
4225 }
4226 break;
4227 }
4228 case Instruction::GetElementPtr: {
4229 auto &GlobalConstArgSet = getGlobalConstArgSet();
4230
4231 //
4232 // Generate OpAccessChain.
4233 //
4234 GetElementPtrInst *GEP = cast<GetElementPtrInst>(&I);
4235
4236 //
4237 // Generate OpAccessChain.
4238 //
4239
4240 // Ops[0] = Result Type ID
4241 // Ops[1] = Base ID
4242 // Ops[2] ... Ops[n] = Indexes ID
4243 SPIRVOperandList Ops;
4244
alan-bakerb6b09dc2018-11-08 16:59:28 -05004245 PointerType *ResultType = cast<PointerType>(GEP->getType());
David Neto22f144c2017-06-12 14:26:21 -04004246 if (GEP->getPointerAddressSpace() == AddressSpace::ModuleScopePrivate ||
4247 GlobalConstArgSet.count(GEP->getPointerOperand())) {
4248 // Use pointer type with private address space for global constant.
4249 Type *EleTy = I.getType()->getPointerElementType();
David Neto1a1a0582017-07-07 12:01:44 -04004250 ResultType = PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
David Neto22f144c2017-06-12 14:26:21 -04004251 }
David Neto257c3892018-04-11 13:19:45 -04004252
4253 Ops << MkId(lookupType(ResultType));
David Neto22f144c2017-06-12 14:26:21 -04004254
David Neto862b7d82018-06-14 18:48:37 -04004255 // Generate the base pointer.
4256 Ops << MkId(VMap[GEP->getPointerOperand()]);
David Neto22f144c2017-06-12 14:26:21 -04004257
David Neto862b7d82018-06-14 18:48:37 -04004258 // TODO(dneto): Simplify the following?
David Neto22f144c2017-06-12 14:26:21 -04004259
4260 //
4261 // Follows below rules for gep.
4262 //
David Neto862b7d82018-06-14 18:48:37 -04004263 // 1. If gep's first index is 0 generate OpAccessChain and ignore gep's
4264 // first index.
David Neto22f144c2017-06-12 14:26:21 -04004265 // 2. If gep's first index is not 0, generate OpPtrAccessChain and use gep's
4266 // first index.
4267 // 3. If gep's first index is not constant, generate OpPtrAccessChain and
4268 // use gep's first index.
4269 // 4. If it is not above case 1, 2 and 3, generate OpAccessChain and use
4270 // gep's first index.
4271 //
4272 spv::Op Opcode = spv::OpAccessChain;
4273 unsigned offset = 0;
4274 if (ConstantInt *CstInt = dyn_cast<ConstantInt>(GEP->getOperand(1))) {
David Neto862b7d82018-06-14 18:48:37 -04004275 if (CstInt->getZExtValue() == 0) {
David Neto22f144c2017-06-12 14:26:21 -04004276 offset = 1;
David Neto862b7d82018-06-14 18:48:37 -04004277 } else if (CstInt->getZExtValue() != 0) {
David Neto22f144c2017-06-12 14:26:21 -04004278 Opcode = spv::OpPtrAccessChain;
David Neto22f144c2017-06-12 14:26:21 -04004279 }
David Neto862b7d82018-06-14 18:48:37 -04004280 } else {
David Neto22f144c2017-06-12 14:26:21 -04004281 Opcode = spv::OpPtrAccessChain;
David Neto1a1a0582017-07-07 12:01:44 -04004282 }
4283
4284 if (Opcode == spv::OpPtrAccessChain) {
David Neto1a1a0582017-07-07 12:01:44 -04004285 // Do we need to generate ArrayStride? Check against the GEP result type
4286 // rather than the pointer type of the base because when indexing into
4287 // an OpenCL program-scope constant, we'll swap out the LLVM base pointer
4288 // for something else in the SPIR-V.
4289 // E.g. see test/PointerAccessChain/pointer_index_is_constant_1.cl
alan-baker5b86ed72019-02-15 08:26:50 -05004290 auto address_space = ResultType->getAddressSpace();
4291 setVariablePointersCapabilities(address_space);
4292 switch (GetStorageClass(address_space)) {
Alan Bakerfcda9482018-10-02 17:09:59 -04004293 case spv::StorageClassStorageBuffer:
4294 case spv::StorageClassUniform:
David Neto1a1a0582017-07-07 12:01:44 -04004295 // Save the need to generate an ArrayStride decoration. But defer
4296 // generation until later, so we only make one decoration.
David Neto85082642018-03-24 06:55:20 -07004297 getTypesNeedingArrayStride().insert(ResultType);
Alan Bakerfcda9482018-10-02 17:09:59 -04004298 break;
4299 default:
4300 break;
David Neto1a1a0582017-07-07 12:01:44 -04004301 }
David Neto22f144c2017-06-12 14:26:21 -04004302 }
4303
4304 for (auto II = GEP->idx_begin() + offset; II != GEP->idx_end(); II++) {
David Neto257c3892018-04-11 13:19:45 -04004305 Ops << MkId(VMap[*II]);
David Neto22f144c2017-06-12 14:26:21 -04004306 }
4307
David Neto87846742018-04-11 17:36:22 -04004308 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004309 SPIRVInstList.push_back(Inst);
4310 break;
4311 }
4312 case Instruction::ExtractValue: {
4313 ExtractValueInst *EVI = cast<ExtractValueInst>(&I);
4314 // Ops[0] = Result Type ID
4315 // Ops[1] = Composite ID
4316 // Ops[2] ... Ops[n] = Indexes (Literal Number)
4317 SPIRVOperandList Ops;
4318
David Neto257c3892018-04-11 13:19:45 -04004319 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04004320
4321 uint32_t CompositeID = VMap[EVI->getAggregateOperand()];
David Neto257c3892018-04-11 13:19:45 -04004322 Ops << MkId(CompositeID);
David Neto22f144c2017-06-12 14:26:21 -04004323
4324 for (auto &Index : EVI->indices()) {
David Neto257c3892018-04-11 13:19:45 -04004325 Ops << MkNum(Index);
David Neto22f144c2017-06-12 14:26:21 -04004326 }
4327
David Neto87846742018-04-11 17:36:22 -04004328 auto *Inst = new SPIRVInstruction(spv::OpCompositeExtract, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004329 SPIRVInstList.push_back(Inst);
4330 break;
4331 }
4332 case Instruction::InsertValue: {
4333 InsertValueInst *IVI = cast<InsertValueInst>(&I);
4334 // Ops[0] = Result Type ID
4335 // Ops[1] = Object ID
4336 // Ops[2] = Composite ID
4337 // Ops[3] ... Ops[n] = Indexes (Literal Number)
4338 SPIRVOperandList Ops;
4339
4340 uint32_t ResTyID = lookupType(I.getType());
David Neto257c3892018-04-11 13:19:45 -04004341 Ops << MkId(ResTyID);
David Neto22f144c2017-06-12 14:26:21 -04004342
4343 uint32_t ObjectID = VMap[IVI->getInsertedValueOperand()];
David Neto257c3892018-04-11 13:19:45 -04004344 Ops << MkId(ObjectID);
David Neto22f144c2017-06-12 14:26:21 -04004345
4346 uint32_t CompositeID = VMap[IVI->getAggregateOperand()];
David Neto257c3892018-04-11 13:19:45 -04004347 Ops << MkId(CompositeID);
David Neto22f144c2017-06-12 14:26:21 -04004348
4349 for (auto &Index : IVI->indices()) {
David Neto257c3892018-04-11 13:19:45 -04004350 Ops << MkNum(Index);
David Neto22f144c2017-06-12 14:26:21 -04004351 }
4352
David Neto87846742018-04-11 17:36:22 -04004353 auto *Inst = new SPIRVInstruction(spv::OpCompositeInsert, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004354 SPIRVInstList.push_back(Inst);
4355 break;
4356 }
4357 case Instruction::Select: {
4358 //
4359 // Generate OpSelect.
4360 //
4361
4362 // Ops[0] = Result Type ID
4363 // Ops[1] = Condition ID
4364 // Ops[2] = True Constant ID
4365 // Ops[3] = False Constant ID
4366 SPIRVOperandList Ops;
4367
4368 // Find SPIRV instruction for parameter type.
4369 auto Ty = I.getType();
4370 if (Ty->isPointerTy()) {
4371 auto PointeeTy = Ty->getPointerElementType();
4372 if (PointeeTy->isStructTy() &&
4373 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
4374 Ty = PointeeTy;
alan-baker5b86ed72019-02-15 08:26:50 -05004375 } else {
4376 // Selecting between pointers requires variable pointers.
4377 setVariablePointersCapabilities(Ty->getPointerAddressSpace());
4378 if (!hasVariablePointers() && !selectFromSameObject(&I)) {
4379 setVariablePointers(true);
4380 }
David Neto22f144c2017-06-12 14:26:21 -04004381 }
4382 }
4383
David Neto257c3892018-04-11 13:19:45 -04004384 Ops << MkId(lookupType(Ty)) << MkId(VMap[I.getOperand(0)])
4385 << MkId(VMap[I.getOperand(1)]) << MkId(VMap[I.getOperand(2)]);
David Neto22f144c2017-06-12 14:26:21 -04004386
David Neto87846742018-04-11 17:36:22 -04004387 auto *Inst = new SPIRVInstruction(spv::OpSelect, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004388 SPIRVInstList.push_back(Inst);
4389 break;
4390 }
4391 case Instruction::ExtractElement: {
4392 // Handle <4 x i8> type manually.
4393 Type *CompositeTy = I.getOperand(0)->getType();
4394 if (is4xi8vec(CompositeTy)) {
4395 //
4396 // Generate OpShiftRightLogical and OpBitwiseAnd for extractelement with
4397 // <4 x i8>.
4398 //
4399
4400 //
4401 // Generate OpShiftRightLogical
4402 //
4403 // Ops[0] = Result Type ID
4404 // Ops[1] = Operand 0
4405 // Ops[2] = Operand 1
4406 //
4407 SPIRVOperandList Ops;
4408
David Neto257c3892018-04-11 13:19:45 -04004409 Ops << MkId(lookupType(CompositeTy));
David Neto22f144c2017-06-12 14:26:21 -04004410
4411 uint32_t Op0ID = VMap[I.getOperand(0)];
David Neto257c3892018-04-11 13:19:45 -04004412 Ops << MkId(Op0ID);
David Neto22f144c2017-06-12 14:26:21 -04004413
4414 uint32_t Op1ID = 0;
4415 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
4416 // Handle constant index.
4417 uint64_t Idx = CI->getZExtValue();
4418 Value *ShiftAmount =
4419 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
4420 Op1ID = VMap[ShiftAmount];
4421 } else {
4422 // Handle variable index.
4423 SPIRVOperandList TmpOps;
4424
David Neto257c3892018-04-11 13:19:45 -04004425 TmpOps << MkId(lookupType(Type::getInt32Ty(Context)))
4426 << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004427
4428 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
David Neto257c3892018-04-11 13:19:45 -04004429 TmpOps << MkId(VMap[Cst8]);
David Neto22f144c2017-06-12 14:26:21 -04004430
4431 Op1ID = nextID;
4432
David Neto87846742018-04-11 17:36:22 -04004433 auto *TmpInst = new SPIRVInstruction(spv::OpIMul, nextID++, TmpOps);
David Neto22f144c2017-06-12 14:26:21 -04004434 SPIRVInstList.push_back(TmpInst);
4435 }
David Neto257c3892018-04-11 13:19:45 -04004436 Ops << MkId(Op1ID);
David Neto22f144c2017-06-12 14:26:21 -04004437
4438 uint32_t ShiftID = nextID;
4439
David Neto87846742018-04-11 17:36:22 -04004440 auto *Inst =
4441 new SPIRVInstruction(spv::OpShiftRightLogical, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004442 SPIRVInstList.push_back(Inst);
4443
4444 //
4445 // Generate OpBitwiseAnd
4446 //
4447 // Ops[0] = Result Type ID
4448 // Ops[1] = Operand 0
4449 // Ops[2] = Operand 1
4450 //
4451 Ops.clear();
4452
David Neto257c3892018-04-11 13:19:45 -04004453 Ops << MkId(lookupType(CompositeTy)) << MkId(ShiftID);
David Neto22f144c2017-06-12 14:26:21 -04004454
4455 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
David Neto257c3892018-04-11 13:19:45 -04004456 Ops << MkId(VMap[CstFF]);
David Neto22f144c2017-06-12 14:26:21 -04004457
David Neto9b2d6252017-09-06 15:47:37 -04004458 // Reset mapping for this value to the result of the bitwise and.
4459 VMap[&I] = nextID;
4460
David Neto87846742018-04-11 17:36:22 -04004461 Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004462 SPIRVInstList.push_back(Inst);
4463 break;
4464 }
4465
4466 // Ops[0] = Result Type ID
4467 // Ops[1] = Composite ID
4468 // Ops[2] ... Ops[n] = Indexes (Literal Number)
4469 SPIRVOperandList Ops;
4470
David Neto257c3892018-04-11 13:19:45 -04004471 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04004472
4473 spv::Op Opcode = spv::OpCompositeExtract;
4474 if (const ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
David Neto257c3892018-04-11 13:19:45 -04004475 Ops << MkNum(static_cast<uint32_t>(CI->getZExtValue()));
David Neto22f144c2017-06-12 14:26:21 -04004476 } else {
David Neto257c3892018-04-11 13:19:45 -04004477 Ops << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004478 Opcode = spv::OpVectorExtractDynamic;
4479 }
4480
David Neto87846742018-04-11 17:36:22 -04004481 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004482 SPIRVInstList.push_back(Inst);
4483 break;
4484 }
4485 case Instruction::InsertElement: {
4486 // Handle <4 x i8> type manually.
4487 Type *CompositeTy = I.getOperand(0)->getType();
4488 if (is4xi8vec(CompositeTy)) {
4489 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
4490 uint32_t CstFFID = VMap[CstFF];
4491
4492 uint32_t ShiftAmountID = 0;
4493 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
4494 // Handle constant index.
4495 uint64_t Idx = CI->getZExtValue();
4496 Value *ShiftAmount =
4497 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
4498 ShiftAmountID = VMap[ShiftAmount];
4499 } else {
4500 // Handle variable index.
4501 SPIRVOperandList TmpOps;
4502
David Neto257c3892018-04-11 13:19:45 -04004503 TmpOps << MkId(lookupType(Type::getInt32Ty(Context)))
4504 << MkId(VMap[I.getOperand(2)]);
David Neto22f144c2017-06-12 14:26:21 -04004505
4506 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
David Neto257c3892018-04-11 13:19:45 -04004507 TmpOps << MkId(VMap[Cst8]);
David Neto22f144c2017-06-12 14:26:21 -04004508
4509 ShiftAmountID = nextID;
4510
David Neto87846742018-04-11 17:36:22 -04004511 auto *TmpInst = new SPIRVInstruction(spv::OpIMul, nextID++, TmpOps);
David Neto22f144c2017-06-12 14:26:21 -04004512 SPIRVInstList.push_back(TmpInst);
4513 }
4514
4515 //
4516 // Generate mask operations.
4517 //
4518
4519 // ShiftLeft mask according to index of insertelement.
4520 SPIRVOperandList Ops;
4521
David Neto257c3892018-04-11 13:19:45 -04004522 const uint32_t ResTyID = lookupType(CompositeTy);
4523 Ops << MkId(ResTyID) << MkId(CstFFID) << MkId(ShiftAmountID);
David Neto22f144c2017-06-12 14:26:21 -04004524
4525 uint32_t MaskID = nextID;
4526
David Neto87846742018-04-11 17:36:22 -04004527 auto *Inst = new SPIRVInstruction(spv::OpShiftLeftLogical, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004528 SPIRVInstList.push_back(Inst);
4529
4530 // Inverse mask.
4531 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004532 Ops << MkId(ResTyID) << MkId(MaskID);
David Neto22f144c2017-06-12 14:26:21 -04004533
4534 uint32_t InvMaskID = nextID;
4535
David Neto87846742018-04-11 17:36:22 -04004536 Inst = new SPIRVInstruction(spv::OpNot, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004537 SPIRVInstList.push_back(Inst);
4538
4539 // Apply mask.
4540 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004541 Ops << MkId(ResTyID) << MkId(VMap[I.getOperand(0)]) << MkId(InvMaskID);
David Neto22f144c2017-06-12 14:26:21 -04004542
4543 uint32_t OrgValID = nextID;
4544
David Neto87846742018-04-11 17:36:22 -04004545 Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004546 SPIRVInstList.push_back(Inst);
4547
4548 // Create correct value according to index of insertelement.
4549 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05004550 Ops << MkId(ResTyID) << MkId(VMap[I.getOperand(1)])
4551 << MkId(ShiftAmountID);
David Neto22f144c2017-06-12 14:26:21 -04004552
4553 uint32_t InsertValID = nextID;
4554
David Neto87846742018-04-11 17:36:22 -04004555 Inst = new SPIRVInstruction(spv::OpShiftLeftLogical, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004556 SPIRVInstList.push_back(Inst);
4557
4558 // Insert value to original value.
4559 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004560 Ops << MkId(ResTyID) << MkId(OrgValID) << MkId(InsertValID);
David Neto22f144c2017-06-12 14:26:21 -04004561
David Netoa394f392017-08-26 20:45:29 -04004562 VMap[&I] = nextID;
4563
David Neto87846742018-04-11 17:36:22 -04004564 Inst = new SPIRVInstruction(spv::OpBitwiseOr, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004565 SPIRVInstList.push_back(Inst);
4566
4567 break;
4568 }
4569
David Neto22f144c2017-06-12 14:26:21 -04004570 SPIRVOperandList Ops;
4571
James Priced26efea2018-06-09 23:28:32 +01004572 // Ops[0] = Result Type ID
4573 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04004574
4575 spv::Op Opcode = spv::OpCompositeInsert;
4576 if (const ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
David Neto257c3892018-04-11 13:19:45 -04004577 const auto value = CI->getZExtValue();
4578 assert(value <= UINT32_MAX);
James Priced26efea2018-06-09 23:28:32 +01004579 // Ops[1] = Object ID
4580 // Ops[2] = Composite ID
4581 // Ops[3] ... Ops[n] = Indexes (Literal Number)
alan-bakerb6b09dc2018-11-08 16:59:28 -05004582 Ops << MkId(VMap[I.getOperand(1)]) << MkId(VMap[I.getOperand(0)])
James Priced26efea2018-06-09 23:28:32 +01004583 << MkNum(static_cast<uint32_t>(value));
David Neto22f144c2017-06-12 14:26:21 -04004584 } else {
James Priced26efea2018-06-09 23:28:32 +01004585 // Ops[1] = Composite ID
4586 // Ops[2] = Object ID
4587 // Ops[3] ... Ops[n] = Indexes (Literal Number)
alan-bakerb6b09dc2018-11-08 16:59:28 -05004588 Ops << MkId(VMap[I.getOperand(0)]) << MkId(VMap[I.getOperand(1)])
James Priced26efea2018-06-09 23:28:32 +01004589 << MkId(VMap[I.getOperand(2)]);
David Neto22f144c2017-06-12 14:26:21 -04004590 Opcode = spv::OpVectorInsertDynamic;
4591 }
4592
David Neto87846742018-04-11 17:36:22 -04004593 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004594 SPIRVInstList.push_back(Inst);
4595 break;
4596 }
4597 case Instruction::ShuffleVector: {
4598 // Ops[0] = Result Type ID
4599 // Ops[1] = Vector 1 ID
4600 // Ops[2] = Vector 2 ID
4601 // Ops[3] ... Ops[n] = Components (Literal Number)
4602 SPIRVOperandList Ops;
4603
David Neto257c3892018-04-11 13:19:45 -04004604 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)])
4605 << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004606
alan-bakerc9666712020-04-01 16:31:21 -04004607 auto shuffle = cast<ShuffleVectorInst>(&I);
4608 SmallVector<int, 4> mask;
4609 shuffle->getShuffleMask(mask);
4610 for (auto i : mask) {
4611 if (i == UndefMaskElem) {
4612 if (clspv::Option::HackUndef())
4613 // Use 0 instead of undef.
David Neto257c3892018-04-11 13:19:45 -04004614 Ops << MkNum(0);
alan-bakerc9666712020-04-01 16:31:21 -04004615 else
4616 // Undef for shuffle in SPIR-V.
4617 Ops << MkNum(0xffffffff);
David Neto22f144c2017-06-12 14:26:21 -04004618 } else {
alan-bakerc9666712020-04-01 16:31:21 -04004619 Ops << MkNum(i);
David Neto22f144c2017-06-12 14:26:21 -04004620 }
4621 }
4622
David Neto87846742018-04-11 17:36:22 -04004623 auto *Inst = new SPIRVInstruction(spv::OpVectorShuffle, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004624 SPIRVInstList.push_back(Inst);
4625 break;
4626 }
4627 case Instruction::ICmp:
4628 case Instruction::FCmp: {
4629 CmpInst *CmpI = cast<CmpInst>(&I);
4630
David Netod4ca2e62017-07-06 18:47:35 -04004631 // Pointer equality is invalid.
alan-bakerb6b09dc2018-11-08 16:59:28 -05004632 Type *ArgTy = CmpI->getOperand(0)->getType();
David Netod4ca2e62017-07-06 18:47:35 -04004633 if (isa<PointerType>(ArgTy)) {
4634 CmpI->print(errs());
alan-baker21574d32020-01-29 16:00:31 -05004635 std::string name = I.getParent()->getParent()->getName().str();
David Netod4ca2e62017-07-06 18:47:35 -04004636 errs()
4637 << "\nPointer equality test is not supported by SPIR-V for Vulkan, "
4638 << "in function " << name << "\n";
4639 llvm_unreachable("Pointer equality check is invalid");
4640 break;
4641 }
4642
David Neto257c3892018-04-11 13:19:45 -04004643 // Ops[0] = Result Type ID
4644 // Ops[1] = Operand 1 ID
4645 // Ops[2] = Operand 2 ID
4646 SPIRVOperandList Ops;
David Neto22f144c2017-06-12 14:26:21 -04004647
David Neto257c3892018-04-11 13:19:45 -04004648 Ops << MkId(lookupType(CmpI->getType())) << MkId(VMap[CmpI->getOperand(0)])
4649 << MkId(VMap[CmpI->getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004650
4651 spv::Op Opcode = GetSPIRVCmpOpcode(CmpI);
David Neto87846742018-04-11 17:36:22 -04004652 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004653 SPIRVInstList.push_back(Inst);
4654 break;
4655 }
4656 case Instruction::Br: {
4657 // Branch instrucion is deferred because it needs label's ID. Record slot's
4658 // location on SPIRVInstructionList.
4659 DeferredInsts.push_back(
4660 std::make_tuple(&I, --SPIRVInstList.end(), 0 /* No id */));
4661 break;
4662 }
4663 case Instruction::Switch: {
4664 I.print(errs());
4665 llvm_unreachable("Unsupported instruction???");
4666 break;
4667 }
4668 case Instruction::IndirectBr: {
4669 I.print(errs());
4670 llvm_unreachable("Unsupported instruction???");
4671 break;
4672 }
4673 case Instruction::PHI: {
4674 // Branch instrucion is deferred because it needs label's ID. Record slot's
4675 // location on SPIRVInstructionList.
4676 DeferredInsts.push_back(
4677 std::make_tuple(&I, --SPIRVInstList.end(), nextID++));
4678 break;
4679 }
4680 case Instruction::Alloca: {
4681 //
4682 // Generate OpVariable.
4683 //
4684 // Ops[0] : Result Type ID
4685 // Ops[1] : Storage Class
4686 SPIRVOperandList Ops;
4687
David Neto257c3892018-04-11 13:19:45 -04004688 Ops << MkId(lookupType(I.getType())) << MkNum(spv::StorageClassFunction);
David Neto22f144c2017-06-12 14:26:21 -04004689
David Neto87846742018-04-11 17:36:22 -04004690 auto *Inst = new SPIRVInstruction(spv::OpVariable, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004691 SPIRVInstList.push_back(Inst);
4692 break;
4693 }
4694 case Instruction::Load: {
4695 LoadInst *LD = cast<LoadInst>(&I);
4696 //
4697 // Generate OpLoad.
4698 //
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04004699
alan-baker5b86ed72019-02-15 08:26:50 -05004700 if (LD->getType()->isPointerTy()) {
4701 // Loading a pointer requires variable pointers.
4702 setVariablePointersCapabilities(LD->getType()->getPointerAddressSpace());
4703 }
David Neto22f144c2017-06-12 14:26:21 -04004704
David Neto0a2f98d2017-09-15 19:38:40 -04004705 uint32_t ResTyID = lookupType(LD->getType());
David Netoa60b00b2017-09-15 16:34:09 -04004706 uint32_t PointerID = VMap[LD->getPointerOperand()];
4707
4708 // This is a hack to work around what looks like a driver bug.
4709 // When we're loading from the special variable holding the WorkgroupSize
David Neto0a2f98d2017-09-15 19:38:40 -04004710 // builtin value, use an OpBitWiseAnd of the value's ID rather than
4711 // generating a load.
David Neto66cfe642018-03-24 06:13:56 -07004712 // TODO(dneto): Remove this awful hack once drivers are fixed.
David Netoa60b00b2017-09-15 16:34:09 -04004713 if (PointerID == WorkgroupSizeVarID) {
David Neto0a2f98d2017-09-15 19:38:40 -04004714 // Generate a bitwise-and of the original value with itself.
4715 // We should have been able to get away with just an OpCopyObject,
4716 // but we need something more complex to get past certain driver bugs.
4717 // This is ridiculous, but necessary.
4718 // TODO(dneto): Revisit this once drivers fix their bugs.
4719
4720 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04004721 Ops << MkId(ResTyID) << MkId(WorkgroupSizeValueID)
4722 << MkId(WorkgroupSizeValueID);
David Neto0a2f98d2017-09-15 19:38:40 -04004723
David Neto87846742018-04-11 17:36:22 -04004724 auto *Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Neto0a2f98d2017-09-15 19:38:40 -04004725 SPIRVInstList.push_back(Inst);
David Netoa60b00b2017-09-15 16:34:09 -04004726 break;
4727 }
4728
4729 // This is the normal path. Generate a load.
4730
David Neto22f144c2017-06-12 14:26:21 -04004731 // Ops[0] = Result Type ID
4732 // Ops[1] = Pointer ID
4733 // Ops[2] ... Ops[n] = Optional Memory Access
4734 //
4735 // TODO: Do we need to implement Optional Memory Access???
David Neto0a2f98d2017-09-15 19:38:40 -04004736
David Neto22f144c2017-06-12 14:26:21 -04004737 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04004738 Ops << MkId(ResTyID) << MkId(PointerID);
David Neto22f144c2017-06-12 14:26:21 -04004739
David Neto87846742018-04-11 17:36:22 -04004740 auto *Inst = new SPIRVInstruction(spv::OpLoad, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004741 SPIRVInstList.push_back(Inst);
4742 break;
4743 }
4744 case Instruction::Store: {
4745 StoreInst *ST = cast<StoreInst>(&I);
4746 //
4747 // Generate OpStore.
4748 //
4749
alan-baker5b86ed72019-02-15 08:26:50 -05004750 if (ST->getValueOperand()->getType()->isPointerTy()) {
4751 // Storing a pointer requires variable pointers.
4752 setVariablePointersCapabilities(
4753 ST->getValueOperand()->getType()->getPointerAddressSpace());
4754 }
4755
David Neto22f144c2017-06-12 14:26:21 -04004756 // Ops[0] = Pointer ID
4757 // Ops[1] = Object ID
4758 // Ops[2] ... Ops[n] = Optional Memory Access (later???)
4759 //
4760 // TODO: Do we need to implement Optional Memory Access???
David Neto257c3892018-04-11 13:19:45 -04004761 SPIRVOperandList Ops;
4762 Ops << MkId(VMap[ST->getPointerOperand()])
4763 << MkId(VMap[ST->getValueOperand()]);
David Neto22f144c2017-06-12 14:26:21 -04004764
David Neto87846742018-04-11 17:36:22 -04004765 auto *Inst = new SPIRVInstruction(spv::OpStore, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004766 SPIRVInstList.push_back(Inst);
4767 break;
4768 }
4769 case Instruction::AtomicCmpXchg: {
4770 I.print(errs());
4771 llvm_unreachable("Unsupported instruction???");
4772 break;
4773 }
4774 case Instruction::AtomicRMW: {
Neil Henning39672102017-09-29 14:33:13 +01004775 AtomicRMWInst *AtomicRMW = dyn_cast<AtomicRMWInst>(&I);
4776
4777 spv::Op opcode;
4778
4779 switch (AtomicRMW->getOperation()) {
4780 default:
4781 I.print(errs());
4782 llvm_unreachable("Unsupported instruction???");
4783 case llvm::AtomicRMWInst::Add:
4784 opcode = spv::OpAtomicIAdd;
4785 break;
4786 case llvm::AtomicRMWInst::Sub:
4787 opcode = spv::OpAtomicISub;
4788 break;
4789 case llvm::AtomicRMWInst::Xchg:
4790 opcode = spv::OpAtomicExchange;
4791 break;
4792 case llvm::AtomicRMWInst::Min:
4793 opcode = spv::OpAtomicSMin;
4794 break;
4795 case llvm::AtomicRMWInst::Max:
4796 opcode = spv::OpAtomicSMax;
4797 break;
4798 case llvm::AtomicRMWInst::UMin:
4799 opcode = spv::OpAtomicUMin;
4800 break;
4801 case llvm::AtomicRMWInst::UMax:
4802 opcode = spv::OpAtomicUMax;
4803 break;
4804 case llvm::AtomicRMWInst::And:
4805 opcode = spv::OpAtomicAnd;
4806 break;
4807 case llvm::AtomicRMWInst::Or:
4808 opcode = spv::OpAtomicOr;
4809 break;
4810 case llvm::AtomicRMWInst::Xor:
4811 opcode = spv::OpAtomicXor;
4812 break;
4813 }
4814
4815 //
4816 // Generate OpAtomic*.
4817 //
4818 SPIRVOperandList Ops;
4819
David Neto257c3892018-04-11 13:19:45 -04004820 Ops << MkId(lookupType(I.getType()))
4821 << MkId(VMap[AtomicRMW->getPointerOperand()]);
Neil Henning39672102017-09-29 14:33:13 +01004822
4823 auto IntTy = Type::getInt32Ty(I.getContext());
Neil Henning39672102017-09-29 14:33:13 +01004824 const auto ConstantScopeDevice = ConstantInt::get(IntTy, spv::ScopeDevice);
David Neto257c3892018-04-11 13:19:45 -04004825 Ops << MkId(VMap[ConstantScopeDevice]);
Neil Henning39672102017-09-29 14:33:13 +01004826
4827 const auto ConstantMemorySemantics = ConstantInt::get(
4828 IntTy, spv::MemorySemanticsUniformMemoryMask |
4829 spv::MemorySemanticsSequentiallyConsistentMask);
David Neto257c3892018-04-11 13:19:45 -04004830 Ops << MkId(VMap[ConstantMemorySemantics]);
Neil Henning39672102017-09-29 14:33:13 +01004831
David Neto257c3892018-04-11 13:19:45 -04004832 Ops << MkId(VMap[AtomicRMW->getValOperand()]);
Neil Henning39672102017-09-29 14:33:13 +01004833
4834 VMap[&I] = nextID;
4835
David Neto87846742018-04-11 17:36:22 -04004836 auto *Inst = new SPIRVInstruction(opcode, nextID++, Ops);
Neil Henning39672102017-09-29 14:33:13 +01004837 SPIRVInstList.push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04004838 break;
4839 }
4840 case Instruction::Fence: {
4841 I.print(errs());
4842 llvm_unreachable("Unsupported instruction???");
4843 break;
4844 }
4845 case Instruction::Call: {
4846 CallInst *Call = dyn_cast<CallInst>(&I);
4847 Function *Callee = Call->getCalledFunction();
4848
Alan Baker202c8c72018-08-13 13:47:44 -04004849 if (Callee->getName().startswith(clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04004850 if (ResourceVarDeferredLoadCalls.count(Call) && Call->hasNUsesOrMore(1)) {
4851 // Generate an OpLoad
4852 SPIRVOperandList Ops;
4853 const auto load_id = nextID++;
David Neto22f144c2017-06-12 14:26:21 -04004854
David Neto862b7d82018-06-14 18:48:37 -04004855 Ops << MkId(lookupType(Call->getType()->getPointerElementType()))
4856 << MkId(ResourceVarDeferredLoadCalls[Call]);
4857
4858 auto *Inst = new SPIRVInstruction(spv::OpLoad, load_id, Ops);
4859 SPIRVInstList.push_back(Inst);
4860 VMap[Call] = load_id;
4861 break;
4862
4863 } else {
4864 // This maps to an OpVariable we've already generated.
4865 // No code is generated for the call.
4866 }
4867 break;
alan-bakerb6b09dc2018-11-08 16:59:28 -05004868 } else if (Callee->getName().startswith(
4869 clspv::WorkgroupAccessorFunction())) {
Alan Baker202c8c72018-08-13 13:47:44 -04004870 // Don't codegen an instruction here, but instead map this call directly
4871 // to the workgroup variable id.
alan-bakerb6b09dc2018-11-08 16:59:28 -05004872 int spec_id = static_cast<int>(
4873 cast<ConstantInt>(Call->getOperand(0))->getSExtValue());
Alan Baker202c8c72018-08-13 13:47:44 -04004874 const auto &info = LocalSpecIdInfoMap[spec_id];
4875 VMap[Call] = info.variable_id;
4876 break;
David Neto862b7d82018-06-14 18:48:37 -04004877 }
4878
4879 // Sampler initializers become a load of the corresponding sampler.
4880
Kévin Petitdf71de32019-04-09 14:09:50 +01004881 if (Callee->getName().equals(clspv::LiteralSamplerFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04004882 // Map this to a load from the variable.
alan-baker09cb9802019-12-10 13:16:27 -05004883 const auto third_param = static_cast<unsigned>(
4884 dyn_cast<ConstantInt>(Call->getArgOperand(2))->getZExtValue());
4885 auto sampler_value = third_param;
4886 if (clspv::Option::UseSamplerMap()) {
4887 sampler_value = getSamplerMap()[third_param].first;
4888 }
David Neto862b7d82018-06-14 18:48:37 -04004889
4890 // Generate an OpLoad
David Neto22f144c2017-06-12 14:26:21 -04004891 SPIRVOperandList Ops;
David Neto862b7d82018-06-14 18:48:37 -04004892 const auto load_id = nextID++;
David Neto22f144c2017-06-12 14:26:21 -04004893
David Neto257c3892018-04-11 13:19:45 -04004894 Ops << MkId(lookupType(SamplerTy->getPointerElementType()))
alan-baker09cb9802019-12-10 13:16:27 -05004895 << MkId(SamplerLiteralToIDMap[sampler_value]);
David Neto22f144c2017-06-12 14:26:21 -04004896
David Neto862b7d82018-06-14 18:48:37 -04004897 auto *Inst = new SPIRVInstruction(spv::OpLoad, load_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004898 SPIRVInstList.push_back(Inst);
David Neto862b7d82018-06-14 18:48:37 -04004899 VMap[Call] = load_id;
David Neto22f144c2017-06-12 14:26:21 -04004900 break;
4901 }
4902
Kévin Petit349c9502019-03-28 17:24:14 +00004903 // Handle SPIR-V intrinsics
Kévin Petit9b340262019-06-19 18:31:11 +01004904 spv::Op opcode = StringSwitch<spv::Op>(Callee->getName())
4905 .Case("spirv.atomic_xor", spv::OpAtomicXor)
4906 .Default(spv::OpNop);
David Neto22f144c2017-06-12 14:26:21 -04004907
Kévin Petit617a76d2019-04-04 13:54:16 +01004908 // If the switch above didn't have an entry maybe the intrinsic
4909 // is using the name mangling logic.
4910 bool usesMangler = false;
4911 if (opcode == spv::OpNop) {
4912 if (Callee->getName().startswith(clspv::SPIRVOpIntrinsicFunction())) {
4913 auto OpCst = cast<ConstantInt>(Call->getOperand(0));
4914 opcode = static_cast<spv::Op>(OpCst->getZExtValue());
4915 usesMangler = true;
4916 }
4917 }
4918
Kévin Petit349c9502019-03-28 17:24:14 +00004919 if (opcode != spv::OpNop) {
4920
David Neto22f144c2017-06-12 14:26:21 -04004921 SPIRVOperandList Ops;
4922
Kévin Petit349c9502019-03-28 17:24:14 +00004923 if (!I.getType()->isVoidTy()) {
4924 Ops << MkId(lookupType(I.getType()));
4925 }
David Neto22f144c2017-06-12 14:26:21 -04004926
Kévin Petit617a76d2019-04-04 13:54:16 +01004927 unsigned firstOperand = usesMangler ? 1 : 0;
4928 for (unsigned i = firstOperand; i < Call->getNumArgOperands(); i++) {
David Neto257c3892018-04-11 13:19:45 -04004929 Ops << MkId(VMap[Call->getArgOperand(i)]);
David Neto22f144c2017-06-12 14:26:21 -04004930 }
4931
Kévin Petit349c9502019-03-28 17:24:14 +00004932 if (!I.getType()->isVoidTy()) {
4933 VMap[&I] = nextID;
Kévin Petit8a560882019-03-21 15:24:34 +00004934 }
4935
Kévin Petit349c9502019-03-28 17:24:14 +00004936 SPIRVInstruction *Inst;
4937 if (!I.getType()->isVoidTy()) {
4938 Inst = new SPIRVInstruction(opcode, nextID++, Ops);
4939 } else {
4940 Inst = new SPIRVInstruction(opcode, Ops);
4941 }
Kévin Petit8a560882019-03-21 15:24:34 +00004942 SPIRVInstList.push_back(Inst);
4943 break;
4944 }
4945
David Neto22f144c2017-06-12 14:26:21 -04004946 // spirv.copy_memory.* intrinsics become OpMemoryMemory's.
4947 if (Callee->getName().startswith("spirv.copy_memory")) {
4948 //
4949 // Generate OpCopyMemory.
4950 //
4951
4952 // Ops[0] = Dst ID
4953 // Ops[1] = Src ID
4954 // Ops[2] = Memory Access
4955 // Ops[3] = Alignment
4956
4957 auto IsVolatile =
4958 dyn_cast<ConstantInt>(Call->getArgOperand(3))->getZExtValue() != 0;
4959
4960 auto VolatileMemoryAccess = (IsVolatile) ? spv::MemoryAccessVolatileMask
4961 : spv::MemoryAccessMaskNone;
4962
4963 auto MemoryAccess = VolatileMemoryAccess | spv::MemoryAccessAlignedMask;
4964
4965 auto Alignment =
4966 dyn_cast<ConstantInt>(Call->getArgOperand(2))->getZExtValue();
4967
David Neto257c3892018-04-11 13:19:45 -04004968 SPIRVOperandList Ops;
4969 Ops << MkId(VMap[Call->getArgOperand(0)])
4970 << MkId(VMap[Call->getArgOperand(1)]) << MkNum(MemoryAccess)
4971 << MkNum(static_cast<uint32_t>(Alignment));
David Neto22f144c2017-06-12 14:26:21 -04004972
David Neto87846742018-04-11 17:36:22 -04004973 auto *Inst = new SPIRVInstruction(spv::OpCopyMemory, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004974
4975 SPIRVInstList.push_back(Inst);
4976
4977 break;
4978 }
4979
SJW2c317da2020-03-23 07:39:13 -05004980 // read_image is converted to OpSampledImage and OpImageSampleExplicitLod.
4981 // Additionally, OpTypeSampledImage is generated.
SJW173c7e92020-03-16 08:44:47 -05004982 if (IsSampledImageRead(Callee)) {
David Neto22f144c2017-06-12 14:26:21 -04004983 //
4984 // Generate OpSampledImage.
4985 //
4986 // Ops[0] = Result Type ID
4987 // Ops[1] = Image ID
4988 // Ops[2] = Sampler ID
4989 //
4990 SPIRVOperandList Ops;
4991
4992 Value *Image = Call->getArgOperand(0);
4993 Value *Sampler = Call->getArgOperand(1);
4994 Value *Coordinate = Call->getArgOperand(2);
4995
4996 TypeMapType &OpImageTypeMap = getImageTypeMap();
4997 Type *ImageTy = Image->getType()->getPointerElementType();
4998 uint32_t ImageTyID = OpImageTypeMap[ImageTy];
David Neto22f144c2017-06-12 14:26:21 -04004999 uint32_t ImageID = VMap[Image];
David Neto22f144c2017-06-12 14:26:21 -04005000 uint32_t SamplerID = VMap[Sampler];
David Neto257c3892018-04-11 13:19:45 -04005001
5002 Ops << MkId(ImageTyID) << MkId(ImageID) << MkId(SamplerID);
David Neto22f144c2017-06-12 14:26:21 -04005003
5004 uint32_t SampledImageID = nextID;
5005
David Neto87846742018-04-11 17:36:22 -04005006 auto *Inst = new SPIRVInstruction(spv::OpSampledImage, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04005007 SPIRVInstList.push_back(Inst);
5008
5009 //
5010 // Generate OpImageSampleExplicitLod.
5011 //
5012 // Ops[0] = Result Type ID
5013 // Ops[1] = Sampled Image ID
5014 // Ops[2] = Coordinate ID
5015 // Ops[3] = Image Operands Type ID
5016 // Ops[4] ... Ops[n] = Operands ID
5017 //
5018 Ops.clear();
5019
alan-bakerf67468c2019-11-25 15:51:49 -05005020 const bool is_int_image = IsIntImageType(Image->getType());
5021 uint32_t result_type = 0;
5022 if (is_int_image) {
5023 result_type = v4int32ID;
5024 } else {
5025 result_type = lookupType(Call->getType());
5026 }
5027
5028 Ops << MkId(result_type) << MkId(SampledImageID) << MkId(VMap[Coordinate])
5029 << MkNum(spv::ImageOperandsLodMask);
David Neto22f144c2017-06-12 14:26:21 -04005030
5031 Constant *CstFP0 = ConstantFP::get(Context, APFloat(0.0f));
David Neto257c3892018-04-11 13:19:45 -04005032 Ops << MkId(VMap[CstFP0]);
David Neto22f144c2017-06-12 14:26:21 -04005033
alan-bakerf67468c2019-11-25 15:51:49 -05005034 uint32_t final_id = nextID++;
5035 VMap[&I] = final_id;
David Neto22f144c2017-06-12 14:26:21 -04005036
alan-bakerf67468c2019-11-25 15:51:49 -05005037 uint32_t image_id = final_id;
5038 if (is_int_image) {
5039 // Int image requires a bitcast from v4int to v4uint.
5040 image_id = nextID++;
5041 }
5042
5043 Inst = new SPIRVInstruction(spv::OpImageSampleExplicitLod, image_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04005044 SPIRVInstList.push_back(Inst);
alan-bakerf67468c2019-11-25 15:51:49 -05005045
5046 if (is_int_image) {
5047 // Generate the bitcast.
5048 Ops.clear();
5049 Ops << MkId(lookupType(Call->getType())) << MkId(image_id);
5050 Inst = new SPIRVInstruction(spv::OpBitcast, final_id, Ops);
5051 SPIRVInstList.push_back(Inst);
5052 }
David Neto22f144c2017-06-12 14:26:21 -04005053 break;
5054 }
5055
alan-baker75090e42020-02-20 11:21:04 -05005056 // read_image (without a sampler) is mapped to OpImageFetch.
SJW173c7e92020-03-16 08:44:47 -05005057 if (IsUnsampledImageRead(Callee)) {
alan-baker75090e42020-02-20 11:21:04 -05005058 Value *Image = Call->getArgOperand(0);
5059 Value *Coordinate = Call->getArgOperand(1);
5060
5061 //
5062 // Generate OpImageFetch
5063 //
5064 // Ops[0] = Result Type ID
5065 // Ops[1] = Image ID
5066 // Ops[2] = Coordinate ID
5067 // Ops[3] = Lod
5068 // Ops[4] = 0
5069 //
5070 SPIRVOperandList Ops;
5071
5072 const bool is_int_image = IsIntImageType(Image->getType());
5073 uint32_t result_type = 0;
5074 if (is_int_image) {
5075 result_type = v4int32ID;
5076 } else {
5077 result_type = lookupType(Call->getType());
5078 }
5079
5080 Ops << MkId(result_type) << MkId(VMap[Image]) << MkId(VMap[Coordinate])
5081 << MkNum(spv::ImageOperandsLodMask);
5082
5083 Constant *CstInt0 = ConstantInt::get(Context, APInt(32, 0));
5084 Ops << MkId(VMap[CstInt0]);
5085
5086 uint32_t final_id = nextID++;
5087 VMap[&I] = final_id;
5088
5089 uint32_t image_id = final_id;
5090 if (is_int_image) {
5091 // Int image requires a bitcast from v4int to v4uint.
5092 image_id = nextID++;
5093 }
5094
5095 auto *Inst = new SPIRVInstruction(spv::OpImageFetch, image_id, Ops);
5096 SPIRVInstList.push_back(Inst);
5097
5098 if (is_int_image) {
5099 // Generate the bitcast.
5100 Ops.clear();
5101 Ops << MkId(lookupType(Call->getType())) << MkId(image_id);
5102 Inst = new SPIRVInstruction(spv::OpBitcast, final_id, Ops);
5103 SPIRVInstList.push_back(Inst);
5104 }
5105 break;
5106 }
5107
alan-bakerf67468c2019-11-25 15:51:49 -05005108 // write_image is mapped to OpImageWrite.
SJW173c7e92020-03-16 08:44:47 -05005109 if (IsImageWrite(Callee)) {
David Neto22f144c2017-06-12 14:26:21 -04005110 //
5111 // Generate OpImageWrite.
5112 //
5113 // Ops[0] = Image ID
5114 // Ops[1] = Coordinate ID
5115 // Ops[2] = Texel ID
5116 // Ops[3] = (Optional) Image Operands Type (Literal Number)
5117 // Ops[4] ... Ops[n] = (Optional) Operands ID
5118 //
5119 SPIRVOperandList Ops;
5120
5121 Value *Image = Call->getArgOperand(0);
5122 Value *Coordinate = Call->getArgOperand(1);
5123 Value *Texel = Call->getArgOperand(2);
5124
5125 uint32_t ImageID = VMap[Image];
David Neto22f144c2017-06-12 14:26:21 -04005126 uint32_t CoordinateID = VMap[Coordinate];
David Neto22f144c2017-06-12 14:26:21 -04005127 uint32_t TexelID = VMap[Texel];
alan-bakerf67468c2019-11-25 15:51:49 -05005128
5129 const bool is_int_image = IsIntImageType(Image->getType());
5130 if (is_int_image) {
5131 // Generate a bitcast to v4int and use it as the texel value.
5132 uint32_t castID = nextID++;
5133 Ops << MkId(v4int32ID) << MkId(TexelID);
5134 auto cast = new SPIRVInstruction(spv::OpBitcast, castID, Ops);
5135 SPIRVInstList.push_back(cast);
5136 Ops.clear();
5137 TexelID = castID;
5138 }
David Neto257c3892018-04-11 13:19:45 -04005139 Ops << MkId(ImageID) << MkId(CoordinateID) << MkId(TexelID);
David Neto22f144c2017-06-12 14:26:21 -04005140
David Neto87846742018-04-11 17:36:22 -04005141 auto *Inst = new SPIRVInstruction(spv::OpImageWrite, Ops);
David Neto22f144c2017-06-12 14:26:21 -04005142 SPIRVInstList.push_back(Inst);
5143 break;
5144 }
5145
alan-bakerce179f12019-12-06 19:02:22 -05005146 // get_image_* is mapped to OpImageQuerySize or OpImageQuerySizeLod
SJW173c7e92020-03-16 08:44:47 -05005147 if (IsImageQuery(Callee)) {
David Neto5c22a252018-03-15 16:07:41 -04005148 //
alan-bakerce179f12019-12-06 19:02:22 -05005149 // Generate OpImageQuerySize[Lod]
David Neto5c22a252018-03-15 16:07:41 -04005150 //
5151 // Ops[0] = Image ID
5152 //
alan-bakerce179f12019-12-06 19:02:22 -05005153 // Result type has components equal to the dimensionality of the image,
5154 // plus 1 if the image is arrayed.
5155 //
alan-bakerf906d2b2019-12-10 11:26:23 -05005156 // %sizes = OpImageQuerySize[Lod] %uint[2|3|4] %im [%uint_0]
David Neto5c22a252018-03-15 16:07:41 -04005157 SPIRVOperandList Ops;
5158
5159 // Implement:
alan-bakerce179f12019-12-06 19:02:22 -05005160 // %sizes = OpImageQuerySize[Lod] %uint[2|3|4] %im [%uint_0]
5161 uint32_t SizesTypeID = 0;
5162
David Neto5c22a252018-03-15 16:07:41 -04005163 Value *Image = Call->getArgOperand(0);
alan-bakerce179f12019-12-06 19:02:22 -05005164 const uint32_t dim = ImageDimensionality(Image->getType());
alan-baker7150a1d2020-02-25 08:31:06 -05005165 const uint32_t components =
5166 dim + (IsArrayImageType(Image->getType()) ? 1 : 0);
alan-bakerce179f12019-12-06 19:02:22 -05005167 if (components == 1) {
alan-bakerce179f12019-12-06 19:02:22 -05005168 SizesTypeID = TypeMap[Type::getInt32Ty(Context)];
5169 } else {
alan-baker7150a1d2020-02-25 08:31:06 -05005170 SizesTypeID =
5171 TypeMap[VectorType::get(Type::getInt32Ty(Context), components)];
alan-bakerce179f12019-12-06 19:02:22 -05005172 }
David Neto5c22a252018-03-15 16:07:41 -04005173 uint32_t ImageID = VMap[Image];
David Neto257c3892018-04-11 13:19:45 -04005174 Ops << MkId(SizesTypeID) << MkId(ImageID);
alan-bakerce179f12019-12-06 19:02:22 -05005175 spv::Op query_opcode = spv::OpImageQuerySize;
SJW173c7e92020-03-16 08:44:47 -05005176 if (IsSampledImageType(Image->getType())) {
alan-bakerce179f12019-12-06 19:02:22 -05005177 query_opcode = spv::OpImageQuerySizeLod;
5178 // Need explicit 0 for Lod operand.
5179 Constant *CstInt0 = ConstantInt::get(Context, APInt(32, 0));
5180 Ops << MkId(VMap[CstInt0]);
5181 }
David Neto5c22a252018-03-15 16:07:41 -04005182
5183 uint32_t SizesID = nextID++;
alan-bakerce179f12019-12-06 19:02:22 -05005184 auto *QueryInst = new SPIRVInstruction(query_opcode, SizesID, Ops);
David Neto5c22a252018-03-15 16:07:41 -04005185 SPIRVInstList.push_back(QueryInst);
5186
alan-bakerce179f12019-12-06 19:02:22 -05005187 // May require an extra instruction to create the appropriate result of
5188 // the builtin function.
SJW173c7e92020-03-16 08:44:47 -05005189 if (IsGetImageDim(Callee)) {
alan-bakerce179f12019-12-06 19:02:22 -05005190 if (dim == 3) {
5191 // get_image_dim returns an int4 for 3D images.
5192 //
5193 // Reset value map entry since we generated an intermediate
5194 // instruction.
5195 VMap[&I] = nextID;
David Neto5c22a252018-03-15 16:07:41 -04005196
alan-bakerce179f12019-12-06 19:02:22 -05005197 // Implement:
5198 // %result = OpCompositeConstruct %uint4 %sizes %uint_0
5199 Ops.clear();
5200 Ops << MkId(lookupType(VectorType::get(Type::getInt32Ty(Context), 4)))
5201 << MkId(SizesID);
David Neto5c22a252018-03-15 16:07:41 -04005202
alan-bakerce179f12019-12-06 19:02:22 -05005203 Constant *CstInt0 = ConstantInt::get(Context, APInt(32, 0));
5204 Ops << MkId(VMap[CstInt0]);
David Neto5c22a252018-03-15 16:07:41 -04005205
alan-bakerce179f12019-12-06 19:02:22 -05005206 auto *Inst =
5207 new SPIRVInstruction(spv::OpCompositeConstruct, nextID++, Ops);
5208 SPIRVInstList.push_back(Inst);
5209 } else if (dim != components) {
5210 // get_image_dim return an int2 regardless of the arrayedness of the
5211 // image. If the image is arrayed an element must be dropped from the
5212 // query result.
5213 //
5214 // Reset value map entry since we generated an intermediate
5215 // instruction.
5216 VMap[&I] = nextID;
5217
5218 // Implement:
5219 // %result = OpVectorShuffle %uint2 %sizes %sizes 0 1
5220 Ops.clear();
5221 Ops << MkId(lookupType(VectorType::get(Type::getInt32Ty(Context), 2)))
5222 << MkId(SizesID) << MkId(SizesID) << MkNum(0) << MkNum(1);
5223
5224 auto *Inst =
5225 new SPIRVInstruction(spv::OpVectorShuffle, nextID++, Ops);
5226 SPIRVInstList.push_back(Inst);
5227 }
5228 } else if (components > 1) {
5229 // Reset value map entry since we generated an intermediate instruction.
5230 VMap[&I] = nextID;
5231
5232 // Implement:
5233 // %result = OpCompositeExtract %uint %sizes <component number>
5234 Ops.clear();
5235 Ops << MkId(TypeMap[I.getType()]) << MkId(SizesID);
5236
5237 uint32_t component = 0;
5238 if (IsGetImageHeight(Callee))
5239 component = 1;
5240 else if (IsGetImageDepth(Callee))
5241 component = 2;
5242 Ops << MkNum(component);
5243
5244 auto *Inst =
5245 new SPIRVInstruction(spv::OpCompositeExtract, nextID++, Ops);
5246 SPIRVInstList.push_back(Inst);
5247 }
David Neto5c22a252018-03-15 16:07:41 -04005248 break;
5249 }
5250
David Neto22f144c2017-06-12 14:26:21 -04005251 // Call instrucion is deferred because it needs function's ID. Record
5252 // slot's location on SPIRVInstructionList.
5253 DeferredInsts.push_back(
5254 std::make_tuple(&I, --SPIRVInstList.end(), nextID++));
5255
David Neto3fbb4072017-10-16 11:28:14 -04005256 // Check whether the implementation of this call uses an extended
5257 // instruction plus one more value-producing instruction. If so, then
5258 // reserve the id for the extra value-producing slot.
5259 glsl::ExtInst EInst = getIndirectExtInstEnum(Callee->getName());
5260 if (EInst != kGlslExtInstBad) {
5261 // Reserve a spot for the extra value.
David Neto4d02a532017-09-17 12:57:44 -04005262 // Increase nextID.
David Neto22f144c2017-06-12 14:26:21 -04005263 VMap[&I] = nextID;
5264 nextID++;
5265 }
5266 break;
5267 }
5268 case Instruction::Ret: {
5269 unsigned NumOps = I.getNumOperands();
5270 if (NumOps == 0) {
5271 //
5272 // Generate OpReturn.
5273 //
David Netoef5ba2b2019-12-20 08:35:54 -05005274 SPIRVInstList.push_back(new SPIRVInstruction(spv::OpReturn));
David Neto22f144c2017-06-12 14:26:21 -04005275 } else {
5276 //
5277 // Generate OpReturnValue.
5278 //
5279
5280 // Ops[0] = Return Value ID
5281 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04005282
5283 Ops << MkId(VMap[I.getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04005284
David Neto87846742018-04-11 17:36:22 -04005285 auto *Inst = new SPIRVInstruction(spv::OpReturnValue, Ops);
David Neto22f144c2017-06-12 14:26:21 -04005286 SPIRVInstList.push_back(Inst);
5287 break;
5288 }
5289 break;
5290 }
5291 }
5292}
5293
5294void SPIRVProducerPass::GenerateFuncEpilogue() {
5295 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
5296
5297 //
5298 // Generate OpFunctionEnd
5299 //
5300
David Netoef5ba2b2019-12-20 08:35:54 -05005301 auto *Inst = new SPIRVInstruction(spv::OpFunctionEnd);
David Neto22f144c2017-06-12 14:26:21 -04005302 SPIRVInstList.push_back(Inst);
5303}
5304
5305bool SPIRVProducerPass::is4xi8vec(Type *Ty) const {
alan-bakerb39c8262019-03-08 14:03:37 -05005306 // Don't specialize <4 x i8> if i8 is generally supported.
5307 if (clspv::Option::Int8Support())
5308 return false;
5309
David Neto22f144c2017-06-12 14:26:21 -04005310 LLVMContext &Context = Ty->getContext();
5311 if (Ty->isVectorTy()) {
5312 if (Ty->getVectorElementType() == Type::getInt8Ty(Context) &&
5313 Ty->getVectorNumElements() == 4) {
5314 return true;
5315 }
5316 }
5317
5318 return false;
5319}
5320
5321void SPIRVProducerPass::HandleDeferredInstruction() {
5322 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
5323 ValueMapType &VMap = getValueMap();
5324 DeferredInstVecType &DeferredInsts = getDeferredInstVec();
5325
5326 for (auto DeferredInst = DeferredInsts.rbegin();
5327 DeferredInst != DeferredInsts.rend(); ++DeferredInst) {
5328 Value *Inst = std::get<0>(*DeferredInst);
5329 SPIRVInstructionList::iterator InsertPoint = ++std::get<1>(*DeferredInst);
5330 if (InsertPoint != SPIRVInstList.end()) {
5331 while ((*InsertPoint)->getOpcode() == spv::OpPhi) {
5332 ++InsertPoint;
5333 }
5334 }
5335
5336 if (BranchInst *Br = dyn_cast<BranchInst>(Inst)) {
alan-baker06cad652019-12-03 17:56:47 -05005337 // Check whether this branch needs to be preceeded by merge instruction.
David Neto22f144c2017-06-12 14:26:21 -04005338 BasicBlock *BrBB = Br->getParent();
alan-baker06cad652019-12-03 17:56:47 -05005339 if (ContinueBlocks.count(BrBB)) {
David Neto22f144c2017-06-12 14:26:21 -04005340 //
5341 // Generate OpLoopMerge.
5342 //
5343 // Ops[0] = Merge Block ID
5344 // Ops[1] = Continue Target ID
5345 // Ops[2] = Selection Control
5346 SPIRVOperandList Ops;
5347
alan-baker06cad652019-12-03 17:56:47 -05005348 auto MergeBB = MergeBlocks[BrBB];
5349 auto ContinueBB = ContinueBlocks[BrBB];
David Neto22f144c2017-06-12 14:26:21 -04005350 uint32_t MergeBBID = VMap[MergeBB];
David Neto22f144c2017-06-12 14:26:21 -04005351 uint32_t ContinueBBID = VMap[ContinueBB];
David Neto257c3892018-04-11 13:19:45 -04005352 Ops << MkId(MergeBBID) << MkId(ContinueBBID)
alan-baker06cad652019-12-03 17:56:47 -05005353 << MkNum(spv::LoopControlMaskNone);
David Neto22f144c2017-06-12 14:26:21 -04005354
David Neto87846742018-04-11 17:36:22 -04005355 auto *MergeInst = new SPIRVInstruction(spv::OpLoopMerge, Ops);
David Neto22f144c2017-06-12 14:26:21 -04005356 SPIRVInstList.insert(InsertPoint, MergeInst);
alan-baker06cad652019-12-03 17:56:47 -05005357 } else if (MergeBlocks.count(BrBB)) {
5358 //
5359 // Generate OpSelectionMerge.
5360 //
5361 // Ops[0] = Merge Block ID
5362 // Ops[1] = Selection Control
5363 SPIRVOperandList Ops;
David Neto22f144c2017-06-12 14:26:21 -04005364
alan-baker06cad652019-12-03 17:56:47 -05005365 auto MergeBB = MergeBlocks[BrBB];
5366 uint32_t MergeBBID = VMap[MergeBB];
5367 Ops << MkId(MergeBBID) << MkNum(spv::SelectionControlMaskNone);
David Neto22f144c2017-06-12 14:26:21 -04005368
alan-baker06cad652019-12-03 17:56:47 -05005369 auto *MergeInst = new SPIRVInstruction(spv::OpSelectionMerge, Ops);
5370 SPIRVInstList.insert(InsertPoint, MergeInst);
David Neto22f144c2017-06-12 14:26:21 -04005371 }
5372
5373 if (Br->isConditional()) {
5374 //
5375 // Generate OpBranchConditional.
5376 //
5377 // Ops[0] = Condition ID
5378 // Ops[1] = True Label ID
5379 // Ops[2] = False Label ID
5380 // Ops[3] ... Ops[n] = Branch weights (Literal Number)
5381 SPIRVOperandList Ops;
5382
5383 uint32_t CondID = VMap[Br->getCondition()];
David Neto22f144c2017-06-12 14:26:21 -04005384 uint32_t TrueBBID = VMap[Br->getSuccessor(0)];
David Neto22f144c2017-06-12 14:26:21 -04005385 uint32_t FalseBBID = VMap[Br->getSuccessor(1)];
David Neto257c3892018-04-11 13:19:45 -04005386
5387 Ops << MkId(CondID) << MkId(TrueBBID) << MkId(FalseBBID);
David Neto22f144c2017-06-12 14:26:21 -04005388
David Neto87846742018-04-11 17:36:22 -04005389 auto *BrInst = new SPIRVInstruction(spv::OpBranchConditional, Ops);
David Neto22f144c2017-06-12 14:26:21 -04005390 SPIRVInstList.insert(InsertPoint, BrInst);
5391 } else {
5392 //
5393 // Generate OpBranch.
5394 //
5395 // Ops[0] = Target Label ID
5396 SPIRVOperandList Ops;
5397
5398 uint32_t TargetID = VMap[Br->getSuccessor(0)];
David Neto257c3892018-04-11 13:19:45 -04005399 Ops << MkId(TargetID);
David Neto22f144c2017-06-12 14:26:21 -04005400
David Neto87846742018-04-11 17:36:22 -04005401 SPIRVInstList.insert(InsertPoint,
5402 new SPIRVInstruction(spv::OpBranch, Ops));
David Neto22f144c2017-06-12 14:26:21 -04005403 }
5404 } else if (PHINode *PHI = dyn_cast<PHINode>(Inst)) {
alan-baker5ed87542020-03-23 11:05:22 -04005405 if (PHI->getType()->isPointerTy() && !IsSamplerType(PHI->getType()) &&
5406 !IsImageType(PHI->getType())) {
alan-baker5b86ed72019-02-15 08:26:50 -05005407 // OpPhi on pointers requires variable pointers.
5408 setVariablePointersCapabilities(
5409 PHI->getType()->getPointerAddressSpace());
5410 if (!hasVariablePointers() && !selectFromSameObject(PHI)) {
5411 setVariablePointers(true);
5412 }
5413 }
5414
David Neto22f144c2017-06-12 14:26:21 -04005415 //
5416 // Generate OpPhi.
5417 //
5418 // Ops[0] = Result Type ID
5419 // Ops[1] ... Ops[n] = (Variable ID, Parent ID) pairs
5420 SPIRVOperandList Ops;
5421
David Neto257c3892018-04-11 13:19:45 -04005422 Ops << MkId(lookupType(PHI->getType()));
David Neto22f144c2017-06-12 14:26:21 -04005423
David Neto22f144c2017-06-12 14:26:21 -04005424 for (unsigned i = 0; i < PHI->getNumIncomingValues(); i++) {
5425 uint32_t VarID = VMap[PHI->getIncomingValue(i)];
David Neto22f144c2017-06-12 14:26:21 -04005426 uint32_t ParentID = VMap[PHI->getIncomingBlock(i)];
David Neto257c3892018-04-11 13:19:45 -04005427 Ops << MkId(VarID) << MkId(ParentID);
David Neto22f144c2017-06-12 14:26:21 -04005428 }
5429
5430 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005431 InsertPoint,
5432 new SPIRVInstruction(spv::OpPhi, std::get<2>(*DeferredInst), Ops));
David Neto22f144c2017-06-12 14:26:21 -04005433 } else if (CallInst *Call = dyn_cast<CallInst>(Inst)) {
5434 Function *Callee = Call->getCalledFunction();
SJW2c317da2020-03-23 07:39:13 -05005435 LLVMContext &Context = Callee->getContext();
5436 auto IntTy = Type::getInt32Ty(Context);
5437 auto callee_code = Builtins::Lookup(Callee);
David Neto3fbb4072017-10-16 11:28:14 -04005438 auto callee_name = Callee->getName();
5439 glsl::ExtInst EInst = getDirectOrIndirectExtInstEnum(callee_name);
David Neto22f144c2017-06-12 14:26:21 -04005440
5441 if (EInst) {
5442 uint32_t &ExtInstImportID = getOpExtInstImportID();
5443
5444 //
5445 // Generate OpExtInst.
5446 //
5447
5448 // Ops[0] = Result Type ID
5449 // Ops[1] = Set ID (OpExtInstImport ID)
5450 // Ops[2] = Instruction Number (Literal Number)
5451 // Ops[3] ... Ops[n] = Operand 1, ... , Operand n
5452 SPIRVOperandList Ops;
5453
David Neto862b7d82018-06-14 18:48:37 -04005454 Ops << MkId(lookupType(Call->getType())) << MkId(ExtInstImportID)
5455 << MkNum(EInst);
David Neto22f144c2017-06-12 14:26:21 -04005456
David Neto22f144c2017-06-12 14:26:21 -04005457 FunctionType *CalleeFTy = cast<FunctionType>(Call->getFunctionType());
5458 for (unsigned i = 0; i < CalleeFTy->getNumParams(); i++) {
David Neto257c3892018-04-11 13:19:45 -04005459 Ops << MkId(VMap[Call->getOperand(i)]);
David Neto22f144c2017-06-12 14:26:21 -04005460 }
5461
David Neto87846742018-04-11 17:36:22 -04005462 auto *ExtInst = new SPIRVInstruction(spv::OpExtInst,
5463 std::get<2>(*DeferredInst), Ops);
David Neto22f144c2017-06-12 14:26:21 -04005464 SPIRVInstList.insert(InsertPoint, ExtInst);
5465
David Neto3fbb4072017-10-16 11:28:14 -04005466 const auto IndirectExtInst = getIndirectExtInstEnum(callee_name);
5467 if (IndirectExtInst != kGlslExtInstBad) {
5468 // Generate one more instruction that uses the result of the extended
5469 // instruction. Its result id is one more than the id of the
5470 // extended instruction.
David Neto3fbb4072017-10-16 11:28:14 -04005471 auto generate_extra_inst = [this, &Context, &Call, &DeferredInst,
5472 &VMap, &SPIRVInstList, &InsertPoint](
5473 spv::Op opcode, Constant *constant) {
5474 //
5475 // Generate instruction like:
5476 // result = opcode constant <extinst-result>
5477 //
5478 // Ops[0] = Result Type ID
5479 // Ops[1] = Operand 0 ;; the constant, suitably splatted
5480 // Ops[2] = Operand 1 ;; the result of the extended instruction
5481 SPIRVOperandList Ops;
David Neto22f144c2017-06-12 14:26:21 -04005482
David Neto3fbb4072017-10-16 11:28:14 -04005483 Type *resultTy = Call->getType();
David Neto257c3892018-04-11 13:19:45 -04005484 Ops << MkId(lookupType(resultTy));
David Neto3fbb4072017-10-16 11:28:14 -04005485
5486 if (auto *vectorTy = dyn_cast<VectorType>(resultTy)) {
5487 constant = ConstantVector::getSplat(
alan-baker7261e062020-03-15 14:35:48 -04005488 {static_cast<unsigned>(vectorTy->getNumElements()), false},
5489 constant);
David Neto3fbb4072017-10-16 11:28:14 -04005490 }
David Neto257c3892018-04-11 13:19:45 -04005491 Ops << MkId(VMap[constant]) << MkId(std::get<2>(*DeferredInst));
David Neto3fbb4072017-10-16 11:28:14 -04005492
5493 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005494 InsertPoint, new SPIRVInstruction(
5495 opcode, std::get<2>(*DeferredInst) + 1, Ops));
David Neto3fbb4072017-10-16 11:28:14 -04005496 };
5497
5498 switch (IndirectExtInst) {
5499 case glsl::ExtInstFindUMsb: // Implementing clz
SJW2c317da2020-03-23 07:39:13 -05005500 generate_extra_inst(spv::OpISub, ConstantInt::get(IntTy, 31));
David Neto3fbb4072017-10-16 11:28:14 -04005501 break;
5502 case glsl::ExtInstAcos: // Implementing acospi
5503 case glsl::ExtInstAsin: // Implementing asinpi
Kévin Petiteb9f90a2018-09-29 12:29:34 +01005504 case glsl::ExtInstAtan: // Implementing atanpi
David Neto3fbb4072017-10-16 11:28:14 -04005505 case glsl::ExtInstAtan2: // Implementing atan2pi
5506 generate_extra_inst(
5507 spv::OpFMul,
5508 ConstantFP::get(Type::getFloatTy(Context), kOneOverPi));
5509 break;
5510
5511 default:
5512 assert(false && "internally inconsistent");
David Neto4d02a532017-09-17 12:57:44 -04005513 }
David Neto22f144c2017-06-12 14:26:21 -04005514 }
David Neto3fbb4072017-10-16 11:28:14 -04005515
SJW2c317da2020-03-23 07:39:13 -05005516 } else if (callee_code == Builtins::kPopcount) {
David Neto22f144c2017-06-12 14:26:21 -04005517 //
5518 // Generate OpBitCount
5519 //
5520 // Ops[0] = Result Type ID
5521 // Ops[1] = Base ID
David Neto257c3892018-04-11 13:19:45 -04005522 SPIRVOperandList Ops;
5523 Ops << MkId(lookupType(Call->getType()))
5524 << MkId(VMap[Call->getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04005525
5526 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005527 InsertPoint, new SPIRVInstruction(spv::OpBitCount,
David Neto22f144c2017-06-12 14:26:21 -04005528 std::get<2>(*DeferredInst), Ops));
David Netoab03f432017-11-03 17:00:44 -04005529
David Neto862b7d82018-06-14 18:48:37 -04005530 } else if (callee_name.startswith(kCompositeConstructFunctionPrefix)) {
David Netoab03f432017-11-03 17:00:44 -04005531
5532 // Generate an OpCompositeConstruct
5533 SPIRVOperandList Ops;
5534
5535 // The result type.
David Neto257c3892018-04-11 13:19:45 -04005536 Ops << MkId(lookupType(Call->getType()));
David Netoab03f432017-11-03 17:00:44 -04005537
5538 for (Use &use : Call->arg_operands()) {
David Neto257c3892018-04-11 13:19:45 -04005539 Ops << MkId(VMap[use.get()]);
David Netoab03f432017-11-03 17:00:44 -04005540 }
5541
5542 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005543 InsertPoint, new SPIRVInstruction(spv::OpCompositeConstruct,
5544 std::get<2>(*DeferredInst), Ops));
David Netoab03f432017-11-03 17:00:44 -04005545
Alan Baker202c8c72018-08-13 13:47:44 -04005546 } else if (callee_name.startswith(clspv::ResourceAccessorFunction())) {
5547
5548 // We have already mapped the call's result value to an ID.
5549 // Don't generate any code now.
5550
5551 } else if (callee_name.startswith(clspv::WorkgroupAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04005552
5553 // We have already mapped the call's result value to an ID.
5554 // Don't generate any code now.
5555
David Neto22f144c2017-06-12 14:26:21 -04005556 } else {
alan-baker5b86ed72019-02-15 08:26:50 -05005557 if (Call->getType()->isPointerTy()) {
5558 // Functions returning pointers require variable pointers.
5559 setVariablePointersCapabilities(
5560 Call->getType()->getPointerAddressSpace());
5561 }
5562
David Neto22f144c2017-06-12 14:26:21 -04005563 //
5564 // Generate OpFunctionCall.
5565 //
5566
5567 // Ops[0] = Result Type ID
5568 // Ops[1] = Callee Function ID
5569 // Ops[2] ... Ops[n] = Argument 0, ... , Argument n
5570 SPIRVOperandList Ops;
5571
David Neto862b7d82018-06-14 18:48:37 -04005572 Ops << MkId(lookupType(Call->getType()));
David Neto22f144c2017-06-12 14:26:21 -04005573
5574 uint32_t CalleeID = VMap[Callee];
David Neto43568eb2017-10-13 18:25:25 -04005575 if (CalleeID == 0) {
5576 errs() << "Can't translate function call. Missing builtin? "
David Neto862b7d82018-06-14 18:48:37 -04005577 << callee_name << " in: " << *Call << "\n";
David Neto43568eb2017-10-13 18:25:25 -04005578 // TODO(dneto): Can we error out? Enabling this llvm_unreachable
5579 // causes an infinite loop. Instead, go ahead and generate
5580 // the bad function call. A validator will catch the 0-Id.
5581 // llvm_unreachable("Can't translate function call");
5582 }
David Neto22f144c2017-06-12 14:26:21 -04005583
David Neto257c3892018-04-11 13:19:45 -04005584 Ops << MkId(CalleeID);
David Neto22f144c2017-06-12 14:26:21 -04005585
David Neto22f144c2017-06-12 14:26:21 -04005586 FunctionType *CalleeFTy = cast<FunctionType>(Call->getFunctionType());
5587 for (unsigned i = 0; i < CalleeFTy->getNumParams(); i++) {
alan-baker5b86ed72019-02-15 08:26:50 -05005588 auto *operand = Call->getOperand(i);
alan-bakerd4d50652019-12-03 17:17:15 -05005589 auto *operand_type = operand->getType();
5590 // Images and samplers can be passed as function parameters without
5591 // variable pointers.
5592 if (operand_type->isPointerTy() && !IsImageType(operand_type) &&
5593 !IsSamplerType(operand_type)) {
alan-baker5b86ed72019-02-15 08:26:50 -05005594 auto sc =
5595 GetStorageClass(operand->getType()->getPointerAddressSpace());
5596 if (sc == spv::StorageClassStorageBuffer) {
5597 // Passing SSBO by reference requires variable pointers storage
5598 // buffer.
5599 setVariablePointersStorageBuffer(true);
5600 } else if (sc == spv::StorageClassWorkgroup) {
5601 // Workgroup references require variable pointers if they are not
5602 // memory object declarations.
5603 if (auto *operand_call = dyn_cast<CallInst>(operand)) {
5604 // Workgroup accessor represents a variable reference.
5605 if (!operand_call->getCalledFunction()->getName().startswith(
5606 clspv::WorkgroupAccessorFunction()))
5607 setVariablePointers(true);
5608 } else {
5609 // Arguments are function parameters.
5610 if (!isa<Argument>(operand))
5611 setVariablePointers(true);
5612 }
5613 }
5614 }
5615 Ops << MkId(VMap[operand]);
David Neto22f144c2017-06-12 14:26:21 -04005616 }
5617
David Neto87846742018-04-11 17:36:22 -04005618 auto *CallInst = new SPIRVInstruction(spv::OpFunctionCall,
5619 std::get<2>(*DeferredInst), Ops);
David Neto22f144c2017-06-12 14:26:21 -04005620 SPIRVInstList.insert(InsertPoint, CallInst);
5621 }
5622 }
5623 }
5624}
5625
David Neto1a1a0582017-07-07 12:01:44 -04005626void SPIRVProducerPass::HandleDeferredDecorations(const DataLayout &DL) {
Alan Baker202c8c72018-08-13 13:47:44 -04005627 if (getTypesNeedingArrayStride().empty() && LocalArgSpecIds.empty()) {
David Neto1a1a0582017-07-07 12:01:44 -04005628 return;
David Netoc6f3ab22018-04-06 18:02:31 -04005629 }
David Neto1a1a0582017-07-07 12:01:44 -04005630
5631 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
David Neto1a1a0582017-07-07 12:01:44 -04005632
5633 // Find an iterator pointing just past the last decoration.
5634 bool seen_decorations = false;
5635 auto DecoInsertPoint =
5636 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
5637 [&seen_decorations](SPIRVInstruction *Inst) -> bool {
5638 const bool is_decoration =
5639 Inst->getOpcode() == spv::OpDecorate ||
5640 Inst->getOpcode() == spv::OpMemberDecorate;
5641 if (is_decoration) {
5642 seen_decorations = true;
5643 return false;
5644 } else {
5645 return seen_decorations;
5646 }
5647 });
5648
David Netoc6f3ab22018-04-06 18:02:31 -04005649 // Insert ArrayStride decorations on pointer types, due to OpPtrAccessChain
5650 // instructions we generated earlier.
David Neto85082642018-03-24 06:55:20 -07005651 for (auto *type : getTypesNeedingArrayStride()) {
5652 Type *elemTy = nullptr;
5653 if (auto *ptrTy = dyn_cast<PointerType>(type)) {
5654 elemTy = ptrTy->getElementType();
alan-bakerb6b09dc2018-11-08 16:59:28 -05005655 } else if (auto *arrayTy = dyn_cast<ArrayType>(type)) {
alan-baker8eb435a2020-04-08 00:42:06 -04005656 elemTy = arrayTy->getElementType();
5657 } else if (auto *vecTy = dyn_cast<VectorType>(type)) {
5658 elemTy = vecTy->getElementType();
David Neto85082642018-03-24 06:55:20 -07005659 } else {
5660 errs() << "Unhandled strided type " << *type << "\n";
5661 llvm_unreachable("Unhandled strided type");
5662 }
David Neto1a1a0582017-07-07 12:01:44 -04005663
5664 // Ops[0] = Target ID
5665 // Ops[1] = Decoration (ArrayStride)
5666 // Ops[2] = Stride number (Literal Number)
5667 SPIRVOperandList Ops;
5668
David Neto85082642018-03-24 06:55:20 -07005669 // Same as DL.getIndexedOffsetInType( elemTy, { 1 } );
Alan Bakerfcda9482018-10-02 17:09:59 -04005670 const uint32_t stride = static_cast<uint32_t>(GetTypeAllocSize(elemTy, DL));
David Neto257c3892018-04-11 13:19:45 -04005671
5672 Ops << MkId(lookupType(type)) << MkNum(spv::DecorationArrayStride)
5673 << MkNum(stride);
David Neto1a1a0582017-07-07 12:01:44 -04005674
David Neto87846742018-04-11 17:36:22 -04005675 auto *DecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
David Neto1a1a0582017-07-07 12:01:44 -04005676 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
5677 }
David Netoc6f3ab22018-04-06 18:02:31 -04005678
5679 // Emit SpecId decorations targeting the array size value.
Alan Baker202c8c72018-08-13 13:47:44 -04005680 for (auto spec_id = clspv::FirstLocalSpecId(); spec_id < max_local_spec_id_;
5681 ++spec_id) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05005682 LocalArgInfo &arg_info = LocalSpecIdInfoMap[spec_id];
David Netoc6f3ab22018-04-06 18:02:31 -04005683 SPIRVOperandList Ops;
5684 Ops << MkId(arg_info.array_size_id) << MkNum(spv::DecorationSpecId)
5685 << MkNum(arg_info.spec_id);
5686 SPIRVInstList.insert(DecoInsertPoint,
David Neto87846742018-04-11 17:36:22 -04005687 new SPIRVInstruction(spv::OpDecorate, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04005688 }
David Neto1a1a0582017-07-07 12:01:44 -04005689}
5690
David Neto22f144c2017-06-12 14:26:21 -04005691glsl::ExtInst SPIRVProducerPass::getExtInstEnum(StringRef Name) {
SJW2c317da2020-03-23 07:39:13 -05005692
5693 const auto &fi = Builtins::Lookup(Name);
5694 switch (fi) {
5695 case Builtins::kClamp: {
5696 auto param_type = fi.getParameter(0);
5697 if (param_type.type_id == Type::FloatTyID) {
5698 return glsl::ExtInst::ExtInstFClamp;
5699 }
5700 return param_type.is_signed ? glsl::ExtInst::ExtInstSClamp
5701 : glsl::ExtInst::ExtInstUClamp;
5702 }
5703 case Builtins::kMax: {
5704 auto param_type = fi.getParameter(0);
5705 if (param_type.type_id == Type::FloatTyID) {
5706 return glsl::ExtInst::ExtInstFMax;
5707 }
5708 return param_type.is_signed ? glsl::ExtInst::ExtInstSMax
5709 : glsl::ExtInst::ExtInstUMax;
5710 }
5711 case Builtins::kMin: {
5712 auto param_type = fi.getParameter(0);
5713 if (param_type.type_id == Type::FloatTyID) {
5714 return glsl::ExtInst::ExtInstFMin;
5715 }
5716 return param_type.is_signed ? glsl::ExtInst::ExtInstSMin
5717 : glsl::ExtInst::ExtInstUMin;
5718 }
5719 case Builtins::kAbs:
5720 return glsl::ExtInst::ExtInstSAbs;
5721 case Builtins::kFmax:
5722 return glsl::ExtInst::ExtInstFMax;
5723 case Builtins::kFmin:
5724 return glsl::ExtInst::ExtInstFMin;
5725 case Builtins::kDegrees:
5726 return glsl::ExtInst::ExtInstDegrees;
5727 case Builtins::kRadians:
5728 return glsl::ExtInst::ExtInstRadians;
5729 case Builtins::kMix:
5730 return glsl::ExtInst::ExtInstFMix;
5731 case Builtins::kAcos:
5732 case Builtins::kAcospi:
5733 return glsl::ExtInst::ExtInstAcos;
5734 case Builtins::kAcosh:
5735 return glsl::ExtInst::ExtInstAcosh;
5736 case Builtins::kAsin:
5737 case Builtins::kAsinpi:
5738 return glsl::ExtInst::ExtInstAsin;
5739 case Builtins::kAsinh:
5740 return glsl::ExtInst::ExtInstAsinh;
5741 case Builtins::kAtan:
5742 case Builtins::kAtanpi:
5743 return glsl::ExtInst::ExtInstAtan;
5744 case Builtins::kAtanh:
5745 return glsl::ExtInst::ExtInstAtanh;
5746 case Builtins::kAtan2:
5747 case Builtins::kAtan2pi:
5748 return glsl::ExtInst::ExtInstAtan2;
5749 case Builtins::kCeil:
5750 return glsl::ExtInst::ExtInstCeil;
5751 case Builtins::kSin:
5752 case Builtins::kHalfSin:
5753 case Builtins::kNativeSin:
5754 return glsl::ExtInst::ExtInstSin;
5755 case Builtins::kSinh:
5756 return glsl::ExtInst::ExtInstSinh;
5757 case Builtins::kCos:
5758 case Builtins::kHalfCos:
5759 case Builtins::kNativeCos:
5760 return glsl::ExtInst::ExtInstCos;
5761 case Builtins::kCosh:
5762 return glsl::ExtInst::ExtInstCosh;
5763 case Builtins::kTan:
5764 case Builtins::kHalfTan:
5765 case Builtins::kNativeTan:
5766 return glsl::ExtInst::ExtInstTan;
5767 case Builtins::kTanh:
5768 return glsl::ExtInst::ExtInstTanh;
5769 case Builtins::kExp:
5770 case Builtins::kHalfExp:
5771 case Builtins::kNativeExp:
5772 return glsl::ExtInst::ExtInstExp;
5773 case Builtins::kExp2:
5774 case Builtins::kHalfExp2:
5775 case Builtins::kNativeExp2:
5776 return glsl::ExtInst::ExtInstExp2;
5777 case Builtins::kLog:
5778 case Builtins::kHalfLog:
5779 case Builtins::kNativeLog:
5780 return glsl::ExtInst::ExtInstLog;
5781 case Builtins::kLog2:
5782 case Builtins::kHalfLog2:
5783 case Builtins::kNativeLog2:
5784 return glsl::ExtInst::ExtInstLog2;
5785 case Builtins::kFabs:
5786 return glsl::ExtInst::ExtInstFAbs;
5787 case Builtins::kFma:
5788 return glsl::ExtInst::ExtInstFma;
5789 case Builtins::kFloor:
5790 return glsl::ExtInst::ExtInstFloor;
5791 case Builtins::kLdexp:
5792 return glsl::ExtInst::ExtInstLdexp;
5793 case Builtins::kPow:
5794 case Builtins::kPowr:
5795 case Builtins::kHalfPowr:
5796 case Builtins::kNativePowr:
5797 return glsl::ExtInst::ExtInstPow;
5798 case Builtins::kRound:
5799 return glsl::ExtInst::ExtInstRound;
5800 case Builtins::kSqrt:
5801 case Builtins::kHalfSqrt:
5802 case Builtins::kNativeSqrt:
5803 return glsl::ExtInst::ExtInstSqrt;
5804 case Builtins::kRsqrt:
5805 case Builtins::kHalfRsqrt:
5806 case Builtins::kNativeRsqrt:
5807 return glsl::ExtInst::ExtInstInverseSqrt;
5808 case Builtins::kTrunc:
5809 return glsl::ExtInst::ExtInstTrunc;
5810 case Builtins::kFrexp:
5811 return glsl::ExtInst::ExtInstFrexp;
5812 case Builtins::kFract:
5813 return glsl::ExtInst::ExtInstFract;
5814 case Builtins::kSign:
5815 return glsl::ExtInst::ExtInstFSign;
5816 case Builtins::kLength:
5817 case Builtins::kFastLength:
5818 return glsl::ExtInst::ExtInstLength;
5819 case Builtins::kDistance:
5820 case Builtins::kFastDistance:
5821 return glsl::ExtInst::ExtInstDistance;
5822 case Builtins::kStep:
5823 return glsl::ExtInst::ExtInstStep;
5824 case Builtins::kSmoothstep:
5825 return glsl::ExtInst::ExtInstSmoothStep;
5826 case Builtins::kCross:
5827 return glsl::ExtInst::ExtInstCross;
5828 case Builtins::kNormalize:
5829 case Builtins::kFastNormalize:
5830 return glsl::ExtInst::ExtInstNormalize;
5831 default:
5832 break;
5833 }
5834
David Neto22f144c2017-06-12 14:26:21 -04005835 return StringSwitch<glsl::ExtInst>(Name)
David Neto22f144c2017-06-12 14:26:21 -04005836 .StartsWith("llvm.fmuladd.", glsl::ExtInst::ExtInstFma)
5837 .Case("spirv.unpack.v2f16", glsl::ExtInst::ExtInstUnpackHalf2x16)
5838 .Case("spirv.pack.v2f16", glsl::ExtInst::ExtInstPackHalf2x16)
David Neto3fbb4072017-10-16 11:28:14 -04005839 .Default(kGlslExtInstBad);
5840}
5841
5842glsl::ExtInst SPIRVProducerPass::getIndirectExtInstEnum(StringRef Name) {
SJW2c317da2020-03-23 07:39:13 -05005843 switch (Builtins::Lookup(Name)) {
5844 case Builtins::kClz:
5845 return glsl::ExtInst::ExtInstFindUMsb;
5846 case Builtins::kAcospi:
5847 return glsl::ExtInst::ExtInstAcos;
5848 case Builtins::kAsinpi:
5849 return glsl::ExtInst::ExtInstAsin;
5850 case Builtins::kAtanpi:
5851 return glsl::ExtInst::ExtInstAtan;
5852 case Builtins::kAtan2pi:
5853 return glsl::ExtInst::ExtInstAtan2;
5854 default:
5855 break;
5856 }
5857 return kGlslExtInstBad;
David Neto3fbb4072017-10-16 11:28:14 -04005858}
5859
alan-bakerb6b09dc2018-11-08 16:59:28 -05005860glsl::ExtInst
5861SPIRVProducerPass::getDirectOrIndirectExtInstEnum(StringRef Name) {
David Neto3fbb4072017-10-16 11:28:14 -04005862 auto direct = getExtInstEnum(Name);
5863 if (direct != kGlslExtInstBad)
5864 return direct;
5865 return getIndirectExtInstEnum(Name);
David Neto22f144c2017-06-12 14:26:21 -04005866}
5867
David Neto22f144c2017-06-12 14:26:21 -04005868void SPIRVProducerPass::WriteOneWord(uint32_t Word) {
David Neto0676e6f2017-07-11 18:47:44 -04005869 binaryOut->write(reinterpret_cast<const char *>(&Word), sizeof(uint32_t));
David Neto22f144c2017-06-12 14:26:21 -04005870}
5871
5872void SPIRVProducerPass::WriteResultID(SPIRVInstruction *Inst) {
5873 WriteOneWord(Inst->getResultID());
5874}
5875
5876void SPIRVProducerPass::WriteWordCountAndOpcode(SPIRVInstruction *Inst) {
5877 // High 16 bit : Word Count
5878 // Low 16 bit : Opcode
5879 uint32_t Word = Inst->getOpcode();
David Netoee2660d2018-06-28 16:31:29 -04005880 const uint32_t count = Inst->getWordCount();
5881 if (count > 65535) {
5882 errs() << "Word count limit of 65535 exceeded: " << count << "\n";
5883 llvm_unreachable("Word count too high");
5884 }
David Neto22f144c2017-06-12 14:26:21 -04005885 Word |= Inst->getWordCount() << 16;
5886 WriteOneWord(Word);
5887}
5888
David Netoef5ba2b2019-12-20 08:35:54 -05005889void SPIRVProducerPass::WriteOperand(const std::unique_ptr<SPIRVOperand> &Op) {
David Neto22f144c2017-06-12 14:26:21 -04005890 SPIRVOperandType OpTy = Op->getType();
5891 switch (OpTy) {
5892 default: {
5893 llvm_unreachable("Unsupported SPIRV Operand Type???");
5894 break;
5895 }
5896 case SPIRVOperandType::NUMBERID: {
5897 WriteOneWord(Op->getNumID());
5898 break;
5899 }
5900 case SPIRVOperandType::LITERAL_STRING: {
5901 std::string Str = Op->getLiteralStr();
5902 const char *Data = Str.c_str();
5903 size_t WordSize = Str.size() / 4;
5904 for (unsigned Idx = 0; Idx < WordSize; Idx++) {
5905 WriteOneWord(*reinterpret_cast<const uint32_t *>(&Data[4 * Idx]));
5906 }
5907
5908 uint32_t Remainder = Str.size() % 4;
5909 uint32_t LastWord = 0;
5910 if (Remainder) {
5911 for (unsigned Idx = 0; Idx < Remainder; Idx++) {
5912 LastWord |= Data[4 * WordSize + Idx] << 8 * Idx;
5913 }
5914 }
5915
5916 WriteOneWord(LastWord);
5917 break;
5918 }
5919 case SPIRVOperandType::LITERAL_INTEGER:
5920 case SPIRVOperandType::LITERAL_FLOAT: {
5921 auto LiteralNum = Op->getLiteralNum();
5922 // TODO: Handle LiteranNum carefully.
5923 for (auto Word : LiteralNum) {
5924 WriteOneWord(Word);
5925 }
5926 break;
5927 }
5928 }
5929}
5930
5931void SPIRVProducerPass::WriteSPIRVBinary() {
5932 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
5933
5934 for (auto Inst : SPIRVInstList) {
David Netoef5ba2b2019-12-20 08:35:54 -05005935 const auto &Ops = Inst->getOperands();
David Neto22f144c2017-06-12 14:26:21 -04005936 spv::Op Opcode = static_cast<spv::Op>(Inst->getOpcode());
5937
5938 switch (Opcode) {
5939 default: {
David Neto5c22a252018-03-15 16:07:41 -04005940 errs() << "Unsupported SPIR-V instruction opcode " << int(Opcode) << "\n";
David Neto22f144c2017-06-12 14:26:21 -04005941 llvm_unreachable("Unsupported SPIRV instruction");
5942 break;
5943 }
5944 case spv::OpCapability:
5945 case spv::OpExtension:
5946 case spv::OpMemoryModel:
5947 case spv::OpEntryPoint:
5948 case spv::OpExecutionMode:
5949 case spv::OpSource:
5950 case spv::OpDecorate:
5951 case spv::OpMemberDecorate:
5952 case spv::OpBranch:
5953 case spv::OpBranchConditional:
5954 case spv::OpSelectionMerge:
5955 case spv::OpLoopMerge:
5956 case spv::OpStore:
5957 case spv::OpImageWrite:
5958 case spv::OpReturnValue:
5959 case spv::OpControlBarrier:
5960 case spv::OpMemoryBarrier:
5961 case spv::OpReturn:
5962 case spv::OpFunctionEnd:
5963 case spv::OpCopyMemory: {
5964 WriteWordCountAndOpcode(Inst);
5965 for (uint32_t i = 0; i < Ops.size(); i++) {
5966 WriteOperand(Ops[i]);
5967 }
5968 break;
5969 }
5970 case spv::OpTypeBool:
5971 case spv::OpTypeVoid:
5972 case spv::OpTypeSampler:
5973 case spv::OpLabel:
5974 case spv::OpExtInstImport:
5975 case spv::OpTypePointer:
5976 case spv::OpTypeRuntimeArray:
5977 case spv::OpTypeStruct:
5978 case spv::OpTypeImage:
5979 case spv::OpTypeSampledImage:
5980 case spv::OpTypeInt:
5981 case spv::OpTypeFloat:
5982 case spv::OpTypeArray:
5983 case spv::OpTypeVector:
5984 case spv::OpTypeFunction: {
5985 WriteWordCountAndOpcode(Inst);
5986 WriteResultID(Inst);
5987 for (uint32_t i = 0; i < Ops.size(); i++) {
5988 WriteOperand(Ops[i]);
5989 }
5990 break;
5991 }
5992 case spv::OpFunction:
5993 case spv::OpFunctionParameter:
5994 case spv::OpAccessChain:
5995 case spv::OpPtrAccessChain:
5996 case spv::OpInBoundsAccessChain:
5997 case spv::OpUConvert:
5998 case spv::OpSConvert:
5999 case spv::OpConvertFToU:
6000 case spv::OpConvertFToS:
6001 case spv::OpConvertUToF:
6002 case spv::OpConvertSToF:
6003 case spv::OpFConvert:
6004 case spv::OpConvertPtrToU:
6005 case spv::OpConvertUToPtr:
6006 case spv::OpBitcast:
alan-bakerc9c55ae2019-12-02 16:01:27 -05006007 case spv::OpFNegate:
David Neto22f144c2017-06-12 14:26:21 -04006008 case spv::OpIAdd:
6009 case spv::OpFAdd:
6010 case spv::OpISub:
6011 case spv::OpFSub:
6012 case spv::OpIMul:
6013 case spv::OpFMul:
6014 case spv::OpUDiv:
6015 case spv::OpSDiv:
6016 case spv::OpFDiv:
6017 case spv::OpUMod:
6018 case spv::OpSRem:
6019 case spv::OpFRem:
Kévin Petit8a560882019-03-21 15:24:34 +00006020 case spv::OpUMulExtended:
6021 case spv::OpSMulExtended:
David Neto22f144c2017-06-12 14:26:21 -04006022 case spv::OpBitwiseOr:
6023 case spv::OpBitwiseXor:
6024 case spv::OpBitwiseAnd:
David Netoa394f392017-08-26 20:45:29 -04006025 case spv::OpNot:
David Neto22f144c2017-06-12 14:26:21 -04006026 case spv::OpShiftLeftLogical:
6027 case spv::OpShiftRightLogical:
6028 case spv::OpShiftRightArithmetic:
6029 case spv::OpBitCount:
David Netoab03f432017-11-03 17:00:44 -04006030 case spv::OpCompositeConstruct:
David Neto22f144c2017-06-12 14:26:21 -04006031 case spv::OpCompositeExtract:
6032 case spv::OpVectorExtractDynamic:
6033 case spv::OpCompositeInsert:
David Neto0a2f98d2017-09-15 19:38:40 -04006034 case spv::OpCopyObject:
David Neto22f144c2017-06-12 14:26:21 -04006035 case spv::OpVectorInsertDynamic:
6036 case spv::OpVectorShuffle:
6037 case spv::OpIEqual:
6038 case spv::OpINotEqual:
6039 case spv::OpUGreaterThan:
6040 case spv::OpUGreaterThanEqual:
6041 case spv::OpULessThan:
6042 case spv::OpULessThanEqual:
6043 case spv::OpSGreaterThan:
6044 case spv::OpSGreaterThanEqual:
6045 case spv::OpSLessThan:
6046 case spv::OpSLessThanEqual:
6047 case spv::OpFOrdEqual:
6048 case spv::OpFOrdGreaterThan:
6049 case spv::OpFOrdGreaterThanEqual:
6050 case spv::OpFOrdLessThan:
6051 case spv::OpFOrdLessThanEqual:
6052 case spv::OpFOrdNotEqual:
6053 case spv::OpFUnordEqual:
6054 case spv::OpFUnordGreaterThan:
6055 case spv::OpFUnordGreaterThanEqual:
6056 case spv::OpFUnordLessThan:
6057 case spv::OpFUnordLessThanEqual:
6058 case spv::OpFUnordNotEqual:
6059 case spv::OpExtInst:
6060 case spv::OpIsInf:
6061 case spv::OpIsNan:
6062 case spv::OpAny:
6063 case spv::OpAll:
6064 case spv::OpUndef:
6065 case spv::OpConstantNull:
6066 case spv::OpLogicalOr:
6067 case spv::OpLogicalAnd:
6068 case spv::OpLogicalNot:
6069 case spv::OpLogicalNotEqual:
6070 case spv::OpConstantComposite:
6071 case spv::OpSpecConstantComposite:
6072 case spv::OpConstantTrue:
6073 case spv::OpConstantFalse:
6074 case spv::OpConstant:
6075 case spv::OpSpecConstant:
6076 case spv::OpVariable:
6077 case spv::OpFunctionCall:
6078 case spv::OpSampledImage:
alan-baker75090e42020-02-20 11:21:04 -05006079 case spv::OpImageFetch:
David Neto22f144c2017-06-12 14:26:21 -04006080 case spv::OpImageSampleExplicitLod:
David Neto5c22a252018-03-15 16:07:41 -04006081 case spv::OpImageQuerySize:
alan-bakerce179f12019-12-06 19:02:22 -05006082 case spv::OpImageQuerySizeLod:
David Neto22f144c2017-06-12 14:26:21 -04006083 case spv::OpSelect:
6084 case spv::OpPhi:
6085 case spv::OpLoad:
6086 case spv::OpAtomicIAdd:
6087 case spv::OpAtomicISub:
6088 case spv::OpAtomicExchange:
6089 case spv::OpAtomicIIncrement:
6090 case spv::OpAtomicIDecrement:
6091 case spv::OpAtomicCompareExchange:
6092 case spv::OpAtomicUMin:
6093 case spv::OpAtomicSMin:
6094 case spv::OpAtomicUMax:
6095 case spv::OpAtomicSMax:
6096 case spv::OpAtomicAnd:
6097 case spv::OpAtomicOr:
6098 case spv::OpAtomicXor:
6099 case spv::OpDot: {
6100 WriteWordCountAndOpcode(Inst);
6101 WriteOperand(Ops[0]);
6102 WriteResultID(Inst);
6103 for (uint32_t i = 1; i < Ops.size(); i++) {
6104 WriteOperand(Ops[i]);
6105 }
6106 break;
6107 }
6108 }
6109 }
6110}
Alan Baker9bf93fb2018-08-28 16:59:26 -04006111
alan-bakerb6b09dc2018-11-08 16:59:28 -05006112bool SPIRVProducerPass::IsTypeNullable(const Type *type) const {
Alan Baker9bf93fb2018-08-28 16:59:26 -04006113 switch (type->getTypeID()) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05006114 case Type::HalfTyID:
6115 case Type::FloatTyID:
6116 case Type::DoubleTyID:
6117 case Type::IntegerTyID:
6118 case Type::VectorTyID:
6119 return true;
6120 case Type::PointerTyID: {
6121 const PointerType *pointer_type = cast<PointerType>(type);
6122 if (pointer_type->getPointerAddressSpace() !=
6123 AddressSpace::UniformConstant) {
6124 auto pointee_type = pointer_type->getPointerElementType();
6125 if (pointee_type->isStructTy() &&
6126 cast<StructType>(pointee_type)->isOpaque()) {
6127 // Images and samplers are not nullable.
6128 return false;
Alan Baker9bf93fb2018-08-28 16:59:26 -04006129 }
Alan Baker9bf93fb2018-08-28 16:59:26 -04006130 }
alan-bakerb6b09dc2018-11-08 16:59:28 -05006131 return true;
6132 }
6133 case Type::ArrayTyID:
alan-baker8eb435a2020-04-08 00:42:06 -04006134 return IsTypeNullable(type->getArrayElementType());
alan-bakerb6b09dc2018-11-08 16:59:28 -05006135 case Type::StructTyID: {
6136 const StructType *struct_type = cast<StructType>(type);
6137 // Images and samplers are not nullable.
6138 if (struct_type->isOpaque())
Alan Baker9bf93fb2018-08-28 16:59:26 -04006139 return false;
alan-bakerb6b09dc2018-11-08 16:59:28 -05006140 for (const auto element : struct_type->elements()) {
6141 if (!IsTypeNullable(element))
6142 return false;
6143 }
6144 return true;
6145 }
6146 default:
6147 return false;
Alan Baker9bf93fb2018-08-28 16:59:26 -04006148 }
6149}
Alan Bakerfcda9482018-10-02 17:09:59 -04006150
6151void SPIRVProducerPass::PopulateUBOTypeMaps(Module &module) {
6152 if (auto *offsets_md =
6153 module.getNamedMetadata(clspv::RemappedTypeOffsetMetadataName())) {
6154 // Metdata is stored as key-value pair operands. The first element of each
6155 // operand is the type and the second is a vector of offsets.
6156 for (const auto *operand : offsets_md->operands()) {
6157 const auto *pair = cast<MDTuple>(operand);
6158 auto *type =
6159 cast<ConstantAsMetadata>(pair->getOperand(0))->getValue()->getType();
6160 const auto *offset_vector = cast<MDTuple>(pair->getOperand(1));
6161 std::vector<uint32_t> offsets;
6162 for (const Metadata *offset_md : offset_vector->operands()) {
6163 const auto *constant_md = cast<ConstantAsMetadata>(offset_md);
alan-bakerb6b09dc2018-11-08 16:59:28 -05006164 offsets.push_back(static_cast<uint32_t>(
6165 cast<ConstantInt>(constant_md->getValue())->getZExtValue()));
Alan Bakerfcda9482018-10-02 17:09:59 -04006166 }
6167 RemappedUBOTypeOffsets.insert(std::make_pair(type, offsets));
6168 }
6169 }
6170
6171 if (auto *sizes_md =
6172 module.getNamedMetadata(clspv::RemappedTypeSizesMetadataName())) {
6173 // Metadata is stored as key-value pair operands. The first element of each
6174 // operand is the type and the second is a triple of sizes: type size in
6175 // bits, store size and alloc size.
6176 for (const auto *operand : sizes_md->operands()) {
6177 const auto *pair = cast<MDTuple>(operand);
6178 auto *type =
6179 cast<ConstantAsMetadata>(pair->getOperand(0))->getValue()->getType();
6180 const auto *size_triple = cast<MDTuple>(pair->getOperand(1));
6181 uint64_t type_size_in_bits =
6182 cast<ConstantInt>(
6183 cast<ConstantAsMetadata>(size_triple->getOperand(0))->getValue())
6184 ->getZExtValue();
6185 uint64_t type_store_size =
6186 cast<ConstantInt>(
6187 cast<ConstantAsMetadata>(size_triple->getOperand(1))->getValue())
6188 ->getZExtValue();
6189 uint64_t type_alloc_size =
6190 cast<ConstantInt>(
6191 cast<ConstantAsMetadata>(size_triple->getOperand(2))->getValue())
6192 ->getZExtValue();
6193 RemappedUBOTypeSizes.insert(std::make_pair(
6194 type, std::make_tuple(type_size_in_bits, type_store_size,
6195 type_alloc_size)));
6196 }
6197 }
6198}
6199
6200uint64_t SPIRVProducerPass::GetTypeSizeInBits(Type *type,
6201 const DataLayout &DL) {
6202 auto iter = RemappedUBOTypeSizes.find(type);
6203 if (iter != RemappedUBOTypeSizes.end()) {
6204 return std::get<0>(iter->second);
6205 }
6206
6207 return DL.getTypeSizeInBits(type);
6208}
6209
6210uint64_t SPIRVProducerPass::GetTypeStoreSize(Type *type, const DataLayout &DL) {
6211 auto iter = RemappedUBOTypeSizes.find(type);
6212 if (iter != RemappedUBOTypeSizes.end()) {
6213 return std::get<1>(iter->second);
6214 }
6215
6216 return DL.getTypeStoreSize(type);
6217}
6218
6219uint64_t SPIRVProducerPass::GetTypeAllocSize(Type *type, const DataLayout &DL) {
6220 auto iter = RemappedUBOTypeSizes.find(type);
6221 if (iter != RemappedUBOTypeSizes.end()) {
6222 return std::get<2>(iter->second);
6223 }
6224
6225 return DL.getTypeAllocSize(type);
6226}
alan-baker5b86ed72019-02-15 08:26:50 -05006227
Kévin Petitbbbda972020-03-03 19:16:31 +00006228uint32_t SPIRVProducerPass::GetExplicitLayoutStructMemberOffset(
6229 StructType *type, unsigned member, const DataLayout &DL) {
6230 const auto StructLayout = DL.getStructLayout(type);
6231 // Search for the correct offsets if this type was remapped.
6232 std::vector<uint32_t> *offsets = nullptr;
6233 auto iter = RemappedUBOTypeOffsets.find(type);
6234 if (iter != RemappedUBOTypeOffsets.end()) {
6235 offsets = &iter->second;
6236 }
6237 auto ByteOffset =
6238 static_cast<uint32_t>(StructLayout->getElementOffset(member));
6239 if (offsets) {
6240 ByteOffset = (*offsets)[member];
6241 }
6242
6243 return ByteOffset;
6244}
6245
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04006246void SPIRVProducerPass::setVariablePointersCapabilities(
6247 unsigned address_space) {
alan-baker5b86ed72019-02-15 08:26:50 -05006248 if (GetStorageClass(address_space) == spv::StorageClassStorageBuffer) {
6249 setVariablePointersStorageBuffer(true);
6250 } else {
6251 setVariablePointers(true);
6252 }
6253}
6254
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04006255Value *SPIRVProducerPass::GetBasePointer(Value *v) {
alan-baker5b86ed72019-02-15 08:26:50 -05006256 if (auto *gep = dyn_cast<GetElementPtrInst>(v)) {
6257 return GetBasePointer(gep->getPointerOperand());
6258 }
6259
6260 // Conservatively return |v|.
6261 return v;
6262}
6263
6264bool SPIRVProducerPass::sameResource(Value *lhs, Value *rhs) const {
6265 if (auto *lhs_call = dyn_cast<CallInst>(lhs)) {
6266 if (auto *rhs_call = dyn_cast<CallInst>(rhs)) {
6267 if (lhs_call->getCalledFunction()->getName().startswith(
6268 clspv::ResourceAccessorFunction()) &&
6269 rhs_call->getCalledFunction()->getName().startswith(
6270 clspv::ResourceAccessorFunction())) {
6271 // For resource accessors, match descriptor set and binding.
6272 if (lhs_call->getOperand(0) == rhs_call->getOperand(0) &&
6273 lhs_call->getOperand(1) == rhs_call->getOperand(1))
6274 return true;
6275 } else if (lhs_call->getCalledFunction()->getName().startswith(
6276 clspv::WorkgroupAccessorFunction()) &&
6277 rhs_call->getCalledFunction()->getName().startswith(
6278 clspv::WorkgroupAccessorFunction())) {
6279 // For workgroup resources, match spec id.
6280 if (lhs_call->getOperand(0) == rhs_call->getOperand(0))
6281 return true;
6282 }
6283 }
6284 }
6285
6286 return false;
6287}
6288
6289bool SPIRVProducerPass::selectFromSameObject(Instruction *inst) {
6290 assert(inst->getType()->isPointerTy());
6291 assert(GetStorageClass(inst->getType()->getPointerAddressSpace()) ==
6292 spv::StorageClassStorageBuffer);
6293 const bool hack_undef = clspv::Option::HackUndef();
6294 if (auto *select = dyn_cast<SelectInst>(inst)) {
6295 auto *true_base = GetBasePointer(select->getTrueValue());
6296 auto *false_base = GetBasePointer(select->getFalseValue());
6297
6298 if (true_base == false_base)
6299 return true;
6300
6301 // If either the true or false operand is a null, then we satisfy the same
6302 // object constraint.
6303 if (auto *true_cst = dyn_cast<Constant>(true_base)) {
6304 if (true_cst->isNullValue() || (hack_undef && isa<UndefValue>(true_base)))
6305 return true;
6306 }
6307
6308 if (auto *false_cst = dyn_cast<Constant>(false_base)) {
6309 if (false_cst->isNullValue() ||
6310 (hack_undef && isa<UndefValue>(false_base)))
6311 return true;
6312 }
6313
6314 if (sameResource(true_base, false_base))
6315 return true;
6316 } else if (auto *phi = dyn_cast<PHINode>(inst)) {
6317 Value *value = nullptr;
6318 bool ok = true;
6319 for (unsigned i = 0; ok && i != phi->getNumIncomingValues(); ++i) {
6320 auto *base = GetBasePointer(phi->getIncomingValue(i));
6321 // Null values satisfy the constraint of selecting of selecting from the
6322 // same object.
6323 if (!value) {
6324 if (auto *cst = dyn_cast<Constant>(base)) {
6325 if (!cst->isNullValue() && !(hack_undef && isa<UndefValue>(base)))
6326 value = base;
6327 } else {
6328 value = base;
6329 }
6330 } else if (base != value) {
6331 if (auto *base_cst = dyn_cast<Constant>(base)) {
6332 if (base_cst->isNullValue() || (hack_undef && isa<UndefValue>(base)))
6333 continue;
6334 }
6335
6336 if (sameResource(value, base))
6337 continue;
6338
6339 // Values don't represent the same base.
6340 ok = false;
6341 }
6342 }
6343
6344 return ok;
6345 }
6346
6347 // Conservatively return false.
6348 return false;
6349}
alan-bakere9308012019-03-15 10:25:13 -04006350
6351bool SPIRVProducerPass::CalledWithCoherentResource(Argument &Arg) {
6352 if (!Arg.getType()->isPointerTy() ||
6353 Arg.getType()->getPointerAddressSpace() != clspv::AddressSpace::Global) {
6354 // Only SSBOs need to be annotated as coherent.
6355 return false;
6356 }
6357
6358 DenseSet<Value *> visited;
6359 std::vector<Value *> stack;
6360 for (auto *U : Arg.getParent()->users()) {
6361 if (auto *call = dyn_cast<CallInst>(U)) {
6362 stack.push_back(call->getOperand(Arg.getArgNo()));
6363 }
6364 }
6365
6366 while (!stack.empty()) {
6367 Value *v = stack.back();
6368 stack.pop_back();
6369
6370 if (!visited.insert(v).second)
6371 continue;
6372
6373 auto *resource_call = dyn_cast<CallInst>(v);
6374 if (resource_call &&
6375 resource_call->getCalledFunction()->getName().startswith(
6376 clspv::ResourceAccessorFunction())) {
6377 // If this is a resource accessor function, check if the coherent operand
6378 // is set.
6379 const auto coherent =
6380 unsigned(dyn_cast<ConstantInt>(resource_call->getArgOperand(5))
6381 ->getZExtValue());
6382 if (coherent == 1)
6383 return true;
6384 } else if (auto *arg = dyn_cast<Argument>(v)) {
6385 // If this is a function argument, trace through its callers.
alan-bakere98f3f92019-04-08 15:06:36 -04006386 for (auto U : arg->getParent()->users()) {
alan-bakere9308012019-03-15 10:25:13 -04006387 if (auto *call = dyn_cast<CallInst>(U)) {
6388 stack.push_back(call->getOperand(arg->getArgNo()));
6389 }
6390 }
6391 } else if (auto *user = dyn_cast<User>(v)) {
6392 // If this is a user, traverse all operands that could lead to resource
6393 // variables.
6394 for (unsigned i = 0; i != user->getNumOperands(); ++i) {
6395 Value *operand = user->getOperand(i);
6396 if (operand->getType()->isPointerTy() &&
6397 operand->getType()->getPointerAddressSpace() ==
6398 clspv::AddressSpace::Global) {
6399 stack.push_back(operand);
6400 }
6401 }
6402 }
6403 }
6404
6405 // No coherent resource variables encountered.
6406 return false;
6407}
alan-baker06cad652019-12-03 17:56:47 -05006408
6409void SPIRVProducerPass::PopulateStructuredCFGMaps(Module &module) {
6410 // First, track loop merges and continues.
6411 DenseSet<BasicBlock *> LoopMergesAndContinues;
6412 for (auto &F : module) {
6413 if (F.isDeclaration())
6414 continue;
6415
6416 DominatorTree &DT = getAnalysis<DominatorTreeWrapperPass>(F).getDomTree();
6417 const LoopInfo &LI = getAnalysis<LoopInfoWrapperPass>(F).getLoopInfo();
6418 std::deque<BasicBlock *> order;
6419 DenseSet<BasicBlock *> visited;
6420 clspv::ComputeStructuredOrder(&*F.begin(), &DT, LI, &order, &visited);
6421
6422 for (auto BB : order) {
6423 auto terminator = BB->getTerminator();
6424 auto branch = dyn_cast<BranchInst>(terminator);
6425 if (LI.isLoopHeader(BB)) {
6426 auto L = LI.getLoopFor(BB);
6427 BasicBlock *ContinueBB = nullptr;
6428 BasicBlock *MergeBB = nullptr;
6429
6430 MergeBB = L->getExitBlock();
6431 if (!MergeBB) {
6432 // StructurizeCFG pass converts CFG into triangle shape and the cfg
6433 // has regions with single entry/exit. As a result, loop should not
6434 // have multiple exits.
6435 llvm_unreachable("Loop has multiple exits???");
6436 }
6437
6438 if (L->isLoopLatch(BB)) {
6439 ContinueBB = BB;
6440 } else {
6441 // From SPIR-V spec 2.11, Continue Target must dominate that back-edge
6442 // block.
6443 BasicBlock *Header = L->getHeader();
6444 BasicBlock *Latch = L->getLoopLatch();
6445 for (auto *loop_block : L->blocks()) {
6446 if (loop_block == Header) {
6447 continue;
6448 }
6449
6450 // Check whether block dominates block with back-edge.
6451 // The loop latch is the single block with a back-edge. If it was
6452 // possible, StructurizeCFG made the loop conform to this
6453 // requirement, otherwise |Latch| is a nullptr.
6454 if (DT.dominates(loop_block, Latch)) {
6455 ContinueBB = loop_block;
6456 }
6457 }
6458
6459 if (!ContinueBB) {
6460 llvm_unreachable("Wrong continue block from loop");
6461 }
6462 }
6463
6464 // Record the continue and merge blocks.
6465 MergeBlocks[BB] = MergeBB;
6466 ContinueBlocks[BB] = ContinueBB;
6467 LoopMergesAndContinues.insert(MergeBB);
6468 LoopMergesAndContinues.insert(ContinueBB);
6469 } else if (branch && branch->isConditional()) {
6470 auto L = LI.getLoopFor(BB);
6471 bool HasBackedge = false;
6472 while (L && !HasBackedge) {
6473 if (L->isLoopLatch(BB)) {
6474 HasBackedge = true;
6475 }
6476 L = L->getParentLoop();
6477 }
6478
6479 if (!HasBackedge) {
6480 // Only need a merge if the branch doesn't include a loop break or
6481 // continue.
6482 auto true_bb = branch->getSuccessor(0);
6483 auto false_bb = branch->getSuccessor(1);
6484 if (!LoopMergesAndContinues.count(true_bb) &&
6485 !LoopMergesAndContinues.count(false_bb)) {
6486 // StructurizeCFG pass already manipulated CFG. Just use false block
6487 // of branch instruction as merge block.
6488 MergeBlocks[BB] = false_bb;
6489 }
6490 }
6491 }
6492 }
6493 }
6494}