blob: 0fcf13c1cb5af34e68e2084a891a6af1fea360d0 [file] [log] [blame]
David Neto22f144c2017-06-12 14:26:21 -04001// Copyright 2017 The Clspv Authors. All rights reserved.
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7// http://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
15#ifdef _MSC_VER
16#pragma warning(push, 0)
17#endif
18
David Neto156783e2017-07-05 15:39:41 -040019#include <cassert>
David Neto257c3892018-04-11 13:19:45 -040020#include <cstring>
David Neto118188e2018-08-24 11:27:54 -040021#include <iomanip>
22#include <list>
David Neto862b7d82018-06-14 18:48:37 -040023#include <memory>
David Neto118188e2018-08-24 11:27:54 -040024#include <set>
25#include <sstream>
26#include <string>
27#include <tuple>
28#include <unordered_set>
29#include <utility>
David Neto862b7d82018-06-14 18:48:37 -040030
David Neto118188e2018-08-24 11:27:54 -040031#include "llvm/ADT/StringSwitch.h"
32#include "llvm/ADT/UniqueVector.h"
33#include "llvm/Analysis/LoopInfo.h"
34#include "llvm/IR/Constants.h"
35#include "llvm/IR/Dominators.h"
36#include "llvm/IR/Instructions.h"
37#include "llvm/IR/Metadata.h"
38#include "llvm/IR/Module.h"
alan-bakerf67468c2019-11-25 15:51:49 -050039#include "llvm/IR/ValueSymbolTable.h"
David Neto118188e2018-08-24 11:27:54 -040040#include "llvm/Pass.h"
41#include "llvm/Support/CommandLine.h"
42#include "llvm/Support/raw_ostream.h"
43#include "llvm/Transforms/Utils/Cloning.h"
David Neto22f144c2017-06-12 14:26:21 -040044
David Neto85082642018-03-24 06:55:20 -070045#include "spirv/1.0/spirv.hpp"
David Neto118188e2018-08-24 11:27:54 -040046
David Neto85082642018-03-24 06:55:20 -070047#include "clspv/AddressSpace.h"
alan-bakerf5e5f692018-11-27 08:33:24 -050048#include "clspv/DescriptorMap.h"
David Neto118188e2018-08-24 11:27:54 -040049#include "clspv/Option.h"
David Neto85082642018-03-24 06:55:20 -070050#include "clspv/spirv_c_strings.hpp"
51#include "clspv/spirv_glsl.hpp"
David Neto22f144c2017-06-12 14:26:21 -040052
David Neto4feb7a42017-10-06 17:29:42 -040053#include "ArgKind.h"
alan-bakerf67468c2019-11-25 15:51:49 -050054#include "Builtins.h"
alan-baker06cad652019-12-03 17:56:47 -050055#include "ComputeStructuredOrder.h"
David Neto85082642018-03-24 06:55:20 -070056#include "ConstantEmitter.h"
Alan Baker202c8c72018-08-13 13:47:44 -040057#include "Constants.h"
David Neto78383442018-06-15 20:31:56 -040058#include "DescriptorCounter.h"
alan-baker56f7aff2019-05-22 08:06:42 -040059#include "NormalizeGlobalVariable.h"
Diego Novilloa4c44fa2019-04-11 10:56:15 -040060#include "Passes.h"
alan-bakerce179f12019-12-06 19:02:22 -050061#include "Types.h"
David Neto48f56a42017-10-06 16:44:25 -040062
David Neto22f144c2017-06-12 14:26:21 -040063#if defined(_MSC_VER)
64#pragma warning(pop)
65#endif
66
67using namespace llvm;
68using namespace clspv;
David Neto156783e2017-07-05 15:39:41 -040069using namespace mdconst;
David Neto22f144c2017-06-12 14:26:21 -040070
71namespace {
David Netocd8ca5f2017-10-02 23:34:11 -040072
David Neto862b7d82018-06-14 18:48:37 -040073cl::opt<bool> ShowResourceVars("show-rv", cl::init(false), cl::Hidden,
74 cl::desc("Show resource variable creation"));
75
76// These hacks exist to help transition code generation algorithms
77// without making huge noise in detailed test output.
78const bool Hack_generate_runtime_array_stride_early = true;
79
David Neto3fbb4072017-10-16 11:28:14 -040080// The value of 1/pi. This value is from MSDN
81// https://msdn.microsoft.com/en-us/library/4hwaceh6.aspx
82const double kOneOverPi = 0.318309886183790671538;
83const glsl::ExtInst kGlslExtInstBad = static_cast<glsl::ExtInst>(0);
84
alan-bakerb6b09dc2018-11-08 16:59:28 -050085const char *kCompositeConstructFunctionPrefix = "clspv.composite_construct.";
David Netoab03f432017-11-03 17:00:44 -040086
David Neto22f144c2017-06-12 14:26:21 -040087enum SPIRVOperandType {
88 NUMBERID,
89 LITERAL_INTEGER,
90 LITERAL_STRING,
91 LITERAL_FLOAT
92};
93
94struct SPIRVOperand {
95 explicit SPIRVOperand(SPIRVOperandType Ty, uint32_t Num)
96 : Type(Ty), LiteralNum(1, Num) {}
97 explicit SPIRVOperand(SPIRVOperandType Ty, const char *Str)
98 : Type(Ty), LiteralStr(Str) {}
99 explicit SPIRVOperand(SPIRVOperandType Ty, StringRef Str)
100 : Type(Ty), LiteralStr(Str) {}
101 explicit SPIRVOperand(SPIRVOperandType Ty, ArrayRef<uint32_t> NumVec)
102 : Type(Ty), LiteralNum(NumVec.begin(), NumVec.end()) {}
103
James Price11010dc2019-12-19 13:53:09 -0500104 SPIRVOperandType getType() const { return Type; };
105 uint32_t getNumID() const { return LiteralNum[0]; };
106 std::string getLiteralStr() const { return LiteralStr; };
107 ArrayRef<uint32_t> getLiteralNum() const { return LiteralNum; };
David Neto22f144c2017-06-12 14:26:21 -0400108
David Neto87846742018-04-11 17:36:22 -0400109 uint32_t GetNumWords() const {
110 switch (Type) {
111 case NUMBERID:
112 return 1;
113 case LITERAL_INTEGER:
114 case LITERAL_FLOAT:
David Netoee2660d2018-06-28 16:31:29 -0400115 return uint32_t(LiteralNum.size());
David Neto87846742018-04-11 17:36:22 -0400116 case LITERAL_STRING:
117 // Account for the terminating null character.
David Netoee2660d2018-06-28 16:31:29 -0400118 return uint32_t((LiteralStr.size() + 4) / 4);
David Neto87846742018-04-11 17:36:22 -0400119 }
120 llvm_unreachable("Unhandled case in SPIRVOperand::GetNumWords()");
121 }
122
David Neto22f144c2017-06-12 14:26:21 -0400123private:
124 SPIRVOperandType Type;
125 std::string LiteralStr;
126 SmallVector<uint32_t, 4> LiteralNum;
127};
128
David Netoc6f3ab22018-04-06 18:02:31 -0400129class SPIRVOperandList {
130public:
David Netoef5ba2b2019-12-20 08:35:54 -0500131 typedef std::unique_ptr<SPIRVOperand> element_type;
132 typedef SmallVector<element_type, 8> container_type;
133 typedef container_type::iterator iterator;
David Netoc6f3ab22018-04-06 18:02:31 -0400134 SPIRVOperandList() {}
alan-bakerb6b09dc2018-11-08 16:59:28 -0500135 SPIRVOperandList(const SPIRVOperandList &other) = delete;
136 SPIRVOperandList(SPIRVOperandList &&other) {
David Netoc6f3ab22018-04-06 18:02:31 -0400137 contents_ = std::move(other.contents_);
138 other.contents_.clear();
139 }
David Netoef5ba2b2019-12-20 08:35:54 -0500140 iterator begin() { return contents_.begin(); }
141 iterator end() { return contents_.end(); }
142 operator ArrayRef<element_type>() { return contents_; }
143 void push_back(element_type op) { contents_.push_back(std::move(op)); }
alan-bakerb6b09dc2018-11-08 16:59:28 -0500144 void clear() { contents_.clear(); }
David Netoc6f3ab22018-04-06 18:02:31 -0400145 size_t size() const { return contents_.size(); }
James Price11010dc2019-12-19 13:53:09 -0500146 const SPIRVOperand *operator[](size_t i) { return contents_[i].get(); }
David Netoc6f3ab22018-04-06 18:02:31 -0400147
David Netoef5ba2b2019-12-20 08:35:54 -0500148 const container_type &getOperands() const { return contents_; }
David Neto87846742018-04-11 17:36:22 -0400149
David Netoc6f3ab22018-04-06 18:02:31 -0400150private:
David Netoef5ba2b2019-12-20 08:35:54 -0500151 container_type contents_;
David Netoc6f3ab22018-04-06 18:02:31 -0400152};
153
James Price11010dc2019-12-19 13:53:09 -0500154SPIRVOperandList &operator<<(SPIRVOperandList &list,
David Netoef5ba2b2019-12-20 08:35:54 -0500155 std::unique_ptr<SPIRVOperand> elem) {
156 list.push_back(std::move(elem));
David Netoc6f3ab22018-04-06 18:02:31 -0400157 return list;
158}
159
David Netoef5ba2b2019-12-20 08:35:54 -0500160std::unique_ptr<SPIRVOperand> MkNum(uint32_t num) {
161 return std::make_unique<SPIRVOperand>(LITERAL_INTEGER, num);
David Netoc6f3ab22018-04-06 18:02:31 -0400162}
David Netoef5ba2b2019-12-20 08:35:54 -0500163std::unique_ptr<SPIRVOperand> MkInteger(ArrayRef<uint32_t> num_vec) {
164 return std::make_unique<SPIRVOperand>(LITERAL_INTEGER, num_vec);
David Neto257c3892018-04-11 13:19:45 -0400165}
David Netoef5ba2b2019-12-20 08:35:54 -0500166std::unique_ptr<SPIRVOperand> MkFloat(ArrayRef<uint32_t> num_vec) {
167 return std::make_unique<SPIRVOperand>(LITERAL_FLOAT, num_vec);
David Neto257c3892018-04-11 13:19:45 -0400168}
David Netoef5ba2b2019-12-20 08:35:54 -0500169std::unique_ptr<SPIRVOperand> MkId(uint32_t id) {
170 return std::make_unique<SPIRVOperand>(NUMBERID, id);
James Price11010dc2019-12-19 13:53:09 -0500171}
David Netoef5ba2b2019-12-20 08:35:54 -0500172std::unique_ptr<SPIRVOperand> MkString(StringRef str) {
173 return std::make_unique<SPIRVOperand>(LITERAL_STRING, str);
David Neto257c3892018-04-11 13:19:45 -0400174}
David Netoc6f3ab22018-04-06 18:02:31 -0400175
David Neto22f144c2017-06-12 14:26:21 -0400176struct SPIRVInstruction {
David Netoef5ba2b2019-12-20 08:35:54 -0500177 // Creates an instruction with an opcode and no result ID, and with the given
178 // operands. This computes its own word count. Takes ownership of the
179 // operands and clears |Ops|.
180 SPIRVInstruction(spv::Op Opc, SPIRVOperandList &Ops)
181 : WordCount(1), Opcode(static_cast<uint16_t>(Opc)), ResultID(0) {
James Price11010dc2019-12-19 13:53:09 -0500182 for (auto &operand : Ops) {
David Netoee2660d2018-06-28 16:31:29 -0400183 WordCount += uint16_t(operand->GetNumWords());
David Neto87846742018-04-11 17:36:22 -0400184 }
David Netoef5ba2b2019-12-20 08:35:54 -0500185 Operands.reserve(Ops.size());
186 for (auto &ptr : Ops) {
187 Operands.emplace_back(std::move(ptr));
188 ptr.reset(nullptr);
David Neto87846742018-04-11 17:36:22 -0400189 }
David Netoef5ba2b2019-12-20 08:35:54 -0500190 Ops.clear();
191 }
192 // Creates an instruction with an opcode and a no-zero result ID, and
193 // with the given operands. This computes its own word count. Takes ownership
194 // of the operands and clears |Ops|.
195 SPIRVInstruction(spv::Op Opc, uint32_t ResID, SPIRVOperandList &Ops)
196 : WordCount(2), Opcode(static_cast<uint16_t>(Opc)), ResultID(ResID) {
James Price11010dc2019-12-19 13:53:09 -0500197 for (auto &operand : Ops) {
David Neto87846742018-04-11 17:36:22 -0400198 WordCount += operand->GetNumWords();
199 }
David Netoef5ba2b2019-12-20 08:35:54 -0500200 Operands.reserve(Ops.size());
201 for (auto &ptr : Ops) {
202 Operands.emplace_back(std::move(ptr));
203 ptr.reset(nullptr);
204 }
205 if (ResID == 0) {
206 llvm_unreachable("Result ID of 0 was provided");
207 }
208 Ops.clear();
David Neto87846742018-04-11 17:36:22 -0400209 }
David Neto22f144c2017-06-12 14:26:21 -0400210
David Netoef5ba2b2019-12-20 08:35:54 -0500211 // Creates an instruction with an opcode and no result ID, and with the single
212 // operand. This computes its own word count.
213 SPIRVInstruction(spv::Op Opc, SPIRVOperandList::element_type operand)
214 : WordCount(1), Opcode(static_cast<uint16_t>(Opc)), ResultID(0) {
215 WordCount += operand->GetNumWords();
216 Operands.emplace_back(std::move(operand));
217 operand.reset(nullptr);
218 }
219 // Creates an instruction with an opcode and a non-zero result ID, and
220 // with the single operand. This computes its own word count.
221 SPIRVInstruction(spv::Op Opc, uint32_t ResID,
222 SPIRVOperandList::element_type operand)
223 : WordCount(2), Opcode(static_cast<uint16_t>(Opc)), ResultID(ResID) {
224 WordCount += operand->GetNumWords();
225 if (ResID == 0) {
226 llvm_unreachable("Result ID of 0 was provided");
227 }
228 Operands.emplace_back(std::move(operand));
229 operand.reset(nullptr);
230 }
231 // Creates an instruction with an opcode and a no-zero result ID, and no
232 // operands.
233 SPIRVInstruction(spv::Op Opc, uint32_t ResID)
234 : WordCount(2), Opcode(static_cast<uint16_t>(Opc)), ResultID(ResID) {
235 if (ResID == 0) {
236 llvm_unreachable("Result ID of 0 was provided");
237 }
238 }
239 // Creates an instruction with an opcode, no result ID, no type ID, and no
240 // operands.
241 SPIRVInstruction(spv::Op Opc)
242 : WordCount(1), Opcode(static_cast<uint16_t>(Opc)), ResultID(0) {}
243
David Netoee2660d2018-06-28 16:31:29 -0400244 uint32_t getWordCount() const { return WordCount; }
David Neto22f144c2017-06-12 14:26:21 -0400245 uint16_t getOpcode() const { return Opcode; }
246 uint32_t getResultID() const { return ResultID; }
David Netoef5ba2b2019-12-20 08:35:54 -0500247 ArrayRef<std::unique_ptr<SPIRVOperand>> getOperands() const {
James Price11010dc2019-12-19 13:53:09 -0500248 return Operands;
249 }
David Neto22f144c2017-06-12 14:26:21 -0400250
251private:
David Netoee2660d2018-06-28 16:31:29 -0400252 uint32_t WordCount; // Check the 16-bit bound at code generation time.
David Neto22f144c2017-06-12 14:26:21 -0400253 uint16_t Opcode;
254 uint32_t ResultID;
David Netoef5ba2b2019-12-20 08:35:54 -0500255 SmallVector<std::unique_ptr<SPIRVOperand>, 4> Operands;
David Neto22f144c2017-06-12 14:26:21 -0400256};
257
258struct SPIRVProducerPass final : public ModulePass {
David Neto22f144c2017-06-12 14:26:21 -0400259 typedef DenseMap<Type *, uint32_t> TypeMapType;
260 typedef UniqueVector<Type *> TypeList;
261 typedef DenseMap<Value *, uint32_t> ValueMapType;
David Netofb9a7972017-08-25 17:08:24 -0400262 typedef UniqueVector<Value *> ValueList;
David Neto22f144c2017-06-12 14:26:21 -0400263 typedef std::vector<std::pair<Value *, uint32_t>> EntryPointVecType;
264 typedef std::list<SPIRVInstruction *> SPIRVInstructionList;
David Neto87846742018-04-11 17:36:22 -0400265 // A vector of tuples, each of which is:
266 // - the LLVM instruction that we will later generate SPIR-V code for
267 // - where the SPIR-V instruction should be inserted
268 // - the result ID of the SPIR-V instruction
David Neto22f144c2017-06-12 14:26:21 -0400269 typedef std::vector<
270 std::tuple<Value *, SPIRVInstructionList::iterator, uint32_t>>
271 DeferredInstVecType;
272 typedef DenseMap<FunctionType *, std::pair<FunctionType *, uint32_t>>
273 GlobalConstFuncMapType;
274
David Neto44795152017-07-13 15:45:28 -0400275 explicit SPIRVProducerPass(
alan-bakerf5e5f692018-11-27 08:33:24 -0500276 raw_pwrite_stream &out,
277 std::vector<clspv::version0::DescriptorMapEntry> *descriptor_map_entries,
alan-baker00e7a582019-06-07 12:54:21 -0400278 ArrayRef<std::pair<unsigned, std::string>> samplerMap,
David Neto44795152017-07-13 15:45:28 -0400279 bool outputCInitList)
David Netoc2c368d2017-06-30 16:50:17 -0400280 : ModulePass(ID), samplerMap(samplerMap), out(out),
David Neto0676e6f2017-07-11 18:47:44 -0400281 binaryTempOut(binaryTempUnderlyingVector), binaryOut(&out),
alan-baker00e7a582019-06-07 12:54:21 -0400282 descriptorMapEntries(descriptor_map_entries),
David Neto0676e6f2017-07-11 18:47:44 -0400283 outputCInitList(outputCInitList), patchBoundOffset(0), nextID(1),
alan-baker5b86ed72019-02-15 08:26:50 -0500284 OpExtInstImportID(0), HasVariablePointersStorageBuffer(false),
285 HasVariablePointers(false), SamplerTy(nullptr), WorkgroupSizeValueID(0),
Kévin Petit89a525c2019-06-15 08:13:07 +0100286 WorkgroupSizeVarID(0), max_local_spec_id_(0) {}
David Neto22f144c2017-06-12 14:26:21 -0400287
James Price11010dc2019-12-19 13:53:09 -0500288 virtual ~SPIRVProducerPass() {
289 for (auto *Inst : SPIRVInsts) {
290 delete Inst;
291 }
292 }
293
David Neto22f144c2017-06-12 14:26:21 -0400294 void getAnalysisUsage(AnalysisUsage &AU) const override {
295 AU.addRequired<DominatorTreeWrapperPass>();
296 AU.addRequired<LoopInfoWrapperPass>();
297 }
298
299 virtual bool runOnModule(Module &module) override;
300
301 // output the SPIR-V header block
302 void outputHeader();
303
304 // patch the SPIR-V header block
305 void patchHeader();
306
307 uint32_t lookupType(Type *Ty) {
308 if (Ty->isPointerTy() &&
309 (Ty->getPointerAddressSpace() != AddressSpace::UniformConstant)) {
310 auto PointeeTy = Ty->getPointerElementType();
311 if (PointeeTy->isStructTy() &&
312 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
313 Ty = PointeeTy;
314 }
315 }
316
David Neto862b7d82018-06-14 18:48:37 -0400317 auto where = TypeMap.find(Ty);
318 if (where == TypeMap.end()) {
319 if (Ty) {
320 errs() << "Unhandled type " << *Ty << "\n";
321 } else {
322 errs() << "Unhandled type (null)\n";
323 }
David Netoe439d702018-03-23 13:14:08 -0700324 llvm_unreachable("\nUnhandled type!");
David Neto22f144c2017-06-12 14:26:21 -0400325 }
326
David Neto862b7d82018-06-14 18:48:37 -0400327 return where->second;
David Neto22f144c2017-06-12 14:26:21 -0400328 }
329 TypeMapType &getImageTypeMap() { return ImageTypeMap; }
alan-bakerabd82722019-12-03 17:14:51 -0500330 TypeList &getImageTypeList() { return ImageTypeList; }
David Neto22f144c2017-06-12 14:26:21 -0400331 TypeList &getTypeList() { return Types; };
332 ValueList &getConstantList() { return Constants; };
333 ValueMapType &getValueMap() { return ValueMap; }
334 ValueMapType &getAllocatedValueMap() { return AllocatedValueMap; }
335 SPIRVInstructionList &getSPIRVInstList() { return SPIRVInsts; };
David Neto22f144c2017-06-12 14:26:21 -0400336 EntryPointVecType &getEntryPointVec() { return EntryPointVec; };
337 DeferredInstVecType &getDeferredInstVec() { return DeferredInstVec; };
338 ValueList &getEntryPointInterfacesVec() { return EntryPointInterfacesVec; };
339 uint32_t &getOpExtInstImportID() { return OpExtInstImportID; };
340 std::vector<uint32_t> &getBuiltinDimVec() { return BuiltinDimensionVec; };
alan-baker5b86ed72019-02-15 08:26:50 -0500341 bool hasVariablePointersStorageBuffer() {
342 return HasVariablePointersStorageBuffer;
343 }
344 void setVariablePointersStorageBuffer(bool Val) {
345 HasVariablePointersStorageBuffer = Val;
346 }
Diego Novillo3cc8d7a2019-04-10 13:30:34 -0400347 bool hasVariablePointers() { return HasVariablePointers; };
David Neto22f144c2017-06-12 14:26:21 -0400348 void setVariablePointers(bool Val) { HasVariablePointers = Val; };
alan-bakerb6b09dc2018-11-08 16:59:28 -0500349 ArrayRef<std::pair<unsigned, std::string>> &getSamplerMap() {
350 return samplerMap;
351 }
David Neto22f144c2017-06-12 14:26:21 -0400352 GlobalConstFuncMapType &getGlobalConstFuncTypeMap() {
353 return GlobalConstFuncTypeMap;
354 }
355 SmallPtrSet<Value *, 16> &getGlobalConstArgSet() {
356 return GlobalConstArgumentSet;
357 }
alan-bakerb6b09dc2018-11-08 16:59:28 -0500358 TypeList &getTypesNeedingArrayStride() { return TypesNeedingArrayStride; }
David Neto22f144c2017-06-12 14:26:21 -0400359
David Netoc6f3ab22018-04-06 18:02:31 -0400360 void GenerateLLVMIRInfo(Module &M, const DataLayout &DL);
alan-bakerb6b09dc2018-11-08 16:59:28 -0500361 // Populate GlobalConstFuncTypeMap. Also, if module-scope __constant will
362 // *not* be converted to a storage buffer, replace each such global variable
363 // with one in the storage class expecgted by SPIR-V.
David Neto862b7d82018-06-14 18:48:37 -0400364 void FindGlobalConstVars(Module &M, const DataLayout &DL);
365 // Populate ResourceVarInfoList, FunctionToResourceVarsMap, and
366 // ModuleOrderedResourceVars.
367 void FindResourceVars(Module &M, const DataLayout &DL);
Alan Baker202c8c72018-08-13 13:47:44 -0400368 void FindWorkgroupVars(Module &M);
David Neto22f144c2017-06-12 14:26:21 -0400369 bool FindExtInst(Module &M);
370 void FindTypePerGlobalVar(GlobalVariable &GV);
371 void FindTypePerFunc(Function &F);
David Neto862b7d82018-06-14 18:48:37 -0400372 void FindTypesForSamplerMap(Module &M);
373 void FindTypesForResourceVars(Module &M);
alan-bakerb6b09dc2018-11-08 16:59:28 -0500374 // Inserts |Ty| and relevant sub-types into the |Types| member, indicating
375 // that |Ty| and its subtypes will need a corresponding SPIR-V type.
David Neto22f144c2017-06-12 14:26:21 -0400376 void FindType(Type *Ty);
377 void FindConstantPerGlobalVar(GlobalVariable &GV);
378 void FindConstantPerFunc(Function &F);
379 void FindConstant(Value *V);
380 void GenerateExtInstImport();
David Neto19a1bad2017-08-25 15:01:41 -0400381 // Generates instructions for SPIR-V types corresponding to the LLVM types
382 // saved in the |Types| member. A type follows its subtypes. IDs are
383 // allocated sequentially starting with the current value of nextID, and
384 // with a type following its subtypes. Also updates nextID to just beyond
385 // the last generated ID.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500386 void GenerateSPIRVTypes(LLVMContext &context, Module &module);
David Neto22f144c2017-06-12 14:26:21 -0400387 void GenerateSPIRVConstants();
David Neto5c22a252018-03-15 16:07:41 -0400388 void GenerateModuleInfo(Module &M);
David Neto22f144c2017-06-12 14:26:21 -0400389 void GenerateGlobalVar(GlobalVariable &GV);
David Netoc6f3ab22018-04-06 18:02:31 -0400390 void GenerateWorkgroupVars();
David Neto862b7d82018-06-14 18:48:37 -0400391 // Generate descriptor map entries for resource variables associated with
392 // arguments to F.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500393 void GenerateDescriptorMapInfo(const DataLayout &DL, Function &F);
David Neto22f144c2017-06-12 14:26:21 -0400394 void GenerateSamplers(Module &M);
David Neto862b7d82018-06-14 18:48:37 -0400395 // Generate OpVariables for %clspv.resource.var.* calls.
396 void GenerateResourceVars(Module &M);
David Neto22f144c2017-06-12 14:26:21 -0400397 void GenerateFuncPrologue(Function &F);
398 void GenerateFuncBody(Function &F);
David Netob6e2e062018-04-25 10:32:06 -0400399 void GenerateEntryPointInitialStores();
David Neto22f144c2017-06-12 14:26:21 -0400400 spv::Op GetSPIRVCmpOpcode(CmpInst *CmpI);
401 spv::Op GetSPIRVCastOpcode(Instruction &I);
402 spv::Op GetSPIRVBinaryOpcode(Instruction &I);
403 void GenerateInstruction(Instruction &I);
404 void GenerateFuncEpilogue();
405 void HandleDeferredInstruction();
alan-bakerb6b09dc2018-11-08 16:59:28 -0500406 void HandleDeferredDecorations(const DataLayout &DL);
David Neto22f144c2017-06-12 14:26:21 -0400407 bool is4xi8vec(Type *Ty) const;
408 spv::StorageClass GetStorageClass(unsigned AddrSpace) const;
David Neto862b7d82018-06-14 18:48:37 -0400409 spv::StorageClass GetStorageClassForArgKind(clspv::ArgKind arg_kind) const;
David Neto22f144c2017-06-12 14:26:21 -0400410 spv::BuiltIn GetBuiltin(StringRef globalVarName) const;
David Neto3fbb4072017-10-16 11:28:14 -0400411 // Returns the GLSL extended instruction enum that the given function
412 // call maps to. If none, then returns the 0 value, i.e. GLSLstd4580Bad.
David Neto22f144c2017-06-12 14:26:21 -0400413 glsl::ExtInst getExtInstEnum(StringRef Name);
David Neto3fbb4072017-10-16 11:28:14 -0400414 // Returns the GLSL extended instruction enum indirectly used by the given
415 // function. That is, to implement the given function, we use an extended
416 // instruction plus one more instruction. If none, then returns the 0 value,
417 // i.e. GLSLstd4580Bad.
418 glsl::ExtInst getIndirectExtInstEnum(StringRef Name);
419 // Returns the single GLSL extended instruction used directly or
420 // indirectly by the given function call.
421 glsl::ExtInst getDirectOrIndirectExtInstEnum(StringRef Name);
David Neto22f144c2017-06-12 14:26:21 -0400422 void WriteOneWord(uint32_t Word);
423 void WriteResultID(SPIRVInstruction *Inst);
424 void WriteWordCountAndOpcode(SPIRVInstruction *Inst);
David Netoef5ba2b2019-12-20 08:35:54 -0500425 void WriteOperand(const std::unique_ptr<SPIRVOperand> &Op);
David Neto22f144c2017-06-12 14:26:21 -0400426 void WriteSPIRVBinary();
427
Alan Baker9bf93fb2018-08-28 16:59:26 -0400428 // Returns true if |type| is compatible with OpConstantNull.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500429 bool IsTypeNullable(const Type *type) const;
Alan Baker9bf93fb2018-08-28 16:59:26 -0400430
Alan Bakerfcda9482018-10-02 17:09:59 -0400431 // Populate UBO remapped type maps.
432 void PopulateUBOTypeMaps(Module &module);
433
alan-baker06cad652019-12-03 17:56:47 -0500434 // Populate the merge and continue block maps.
435 void PopulateStructuredCFGMaps(Module &module);
436
Alan Bakerfcda9482018-10-02 17:09:59 -0400437 // Wrapped methods of DataLayout accessors. If |type| was remapped for UBOs,
438 // uses the internal map, otherwise it falls back on the data layout.
439 uint64_t GetTypeSizeInBits(Type *type, const DataLayout &DL);
440 uint64_t GetTypeStoreSize(Type *type, const DataLayout &DL);
441 uint64_t GetTypeAllocSize(Type *type, const DataLayout &DL);
442
alan-baker5b86ed72019-02-15 08:26:50 -0500443 // Returns the base pointer of |v|.
444 Value *GetBasePointer(Value *v);
445
446 // Sets |HasVariablePointersStorageBuffer| or |HasVariablePointers| base on
447 // |address_space|.
448 void setVariablePointersCapabilities(unsigned address_space);
449
450 // Returns true if |lhs| and |rhs| represent the same resource or workgroup
451 // variable.
452 bool sameResource(Value *lhs, Value *rhs) const;
453
454 // Returns true if |inst| is phi or select that selects from the same
455 // structure (or null).
456 bool selectFromSameObject(Instruction *inst);
457
alan-bakere9308012019-03-15 10:25:13 -0400458 // Returns true if |Arg| is called with a coherent resource.
459 bool CalledWithCoherentResource(Argument &Arg);
460
David Neto22f144c2017-06-12 14:26:21 -0400461private:
462 static char ID;
David Neto44795152017-07-13 15:45:28 -0400463 ArrayRef<std::pair<unsigned, std::string>> samplerMap;
David Neto22f144c2017-06-12 14:26:21 -0400464 raw_pwrite_stream &out;
David Neto0676e6f2017-07-11 18:47:44 -0400465
466 // TODO(dneto): Wouldn't it be better to always just emit a binary, and then
467 // convert to other formats on demand?
468
469 // When emitting a C initialization list, the WriteSPIRVBinary method
470 // will actually write its words to this vector via binaryTempOut.
471 SmallVector<char, 100> binaryTempUnderlyingVector;
472 raw_svector_ostream binaryTempOut;
473
474 // Binary output writes to this stream, which might be |out| or
475 // |binaryTempOut|. It's the latter when we really want to write a C
476 // initializer list.
Diego Novillo3cc8d7a2019-04-10 13:30:34 -0400477 raw_pwrite_stream *binaryOut;
alan-bakerf5e5f692018-11-27 08:33:24 -0500478 std::vector<version0::DescriptorMapEntry> *descriptorMapEntries;
David Neto0676e6f2017-07-11 18:47:44 -0400479 const bool outputCInitList; // If true, output look like {0x7023, ... , 5}
David Neto22f144c2017-06-12 14:26:21 -0400480 uint64_t patchBoundOffset;
481 uint32_t nextID;
482
alan-bakerf67468c2019-11-25 15:51:49 -0500483 // ID for OpTypeInt 32 1.
484 uint32_t int32ID = 0;
485 // ID for OpTypeVector %int 4.
486 uint32_t v4int32ID = 0;
487
David Neto19a1bad2017-08-25 15:01:41 -0400488 // Maps an LLVM Value pointer to the corresponding SPIR-V Id.
David Neto22f144c2017-06-12 14:26:21 -0400489 TypeMapType TypeMap;
David Neto19a1bad2017-08-25 15:01:41 -0400490 // Maps an LLVM image type to its SPIR-V ID.
David Neto22f144c2017-06-12 14:26:21 -0400491 TypeMapType ImageTypeMap;
alan-bakerabd82722019-12-03 17:14:51 -0500492 // A unique-vector of LLVM image types. This list is used to provide
493 // deterministic traversal of image types.
494 TypeList ImageTypeList;
David Neto19a1bad2017-08-25 15:01:41 -0400495 // A unique-vector of LLVM types that map to a SPIR-V type.
David Neto22f144c2017-06-12 14:26:21 -0400496 TypeList Types;
497 ValueList Constants;
David Neto19a1bad2017-08-25 15:01:41 -0400498 // Maps an LLVM Value pointer to the corresponding SPIR-V Id.
David Neto22f144c2017-06-12 14:26:21 -0400499 ValueMapType ValueMap;
500 ValueMapType AllocatedValueMap;
501 SPIRVInstructionList SPIRVInsts;
David Neto862b7d82018-06-14 18:48:37 -0400502
David Neto22f144c2017-06-12 14:26:21 -0400503 EntryPointVecType EntryPointVec;
504 DeferredInstVecType DeferredInstVec;
505 ValueList EntryPointInterfacesVec;
506 uint32_t OpExtInstImportID;
507 std::vector<uint32_t> BuiltinDimensionVec;
alan-baker5b86ed72019-02-15 08:26:50 -0500508 bool HasVariablePointersStorageBuffer;
David Neto22f144c2017-06-12 14:26:21 -0400509 bool HasVariablePointers;
510 Type *SamplerTy;
alan-baker09cb9802019-12-10 13:16:27 -0500511 DenseMap<unsigned, unsigned> SamplerLiteralToIDMap;
David Netoc77d9e22018-03-24 06:30:28 -0700512
513 // If a function F has a pointer-to-__constant parameter, then this variable
David Neto9ed8e2f2018-03-24 06:47:24 -0700514 // will map F's type to (G, index of the parameter), where in a first phase
515 // G is F's type. During FindTypePerFunc, G will be changed to F's type
516 // but replacing the pointer-to-constant parameter with
517 // pointer-to-ModuleScopePrivate.
David Netoc77d9e22018-03-24 06:30:28 -0700518 // TODO(dneto): This doesn't seem general enough? A function might have
519 // more than one such parameter.
David Neto22f144c2017-06-12 14:26:21 -0400520 GlobalConstFuncMapType GlobalConstFuncTypeMap;
521 SmallPtrSet<Value *, 16> GlobalConstArgumentSet;
David Neto1a1a0582017-07-07 12:01:44 -0400522 // An ordered set of pointer types of Base arguments to OpPtrAccessChain,
David Neto85082642018-03-24 06:55:20 -0700523 // or array types, and which point into transparent memory (StorageBuffer
524 // storage class). These will require an ArrayStride decoration.
David Neto1a1a0582017-07-07 12:01:44 -0400525 // See SPV_KHR_variable_pointers rev 13.
David Neto85082642018-03-24 06:55:20 -0700526 TypeList TypesNeedingArrayStride;
David Netoa60b00b2017-09-15 16:34:09 -0400527
528 // This is truly ugly, but works around what look like driver bugs.
529 // For get_local_size, an earlier part of the flow has created a module-scope
530 // variable in Private address space to hold the value for the workgroup
531 // size. Its intializer is a uint3 value marked as builtin WorkgroupSize.
532 // When this is present, save the IDs of the initializer value and variable
533 // in these two variables. We only ever do a vector load from it, and
534 // when we see one of those, substitute just the value of the intializer.
535 // This mimics what Glslang does, and that's what drivers are used to.
David Neto66cfe642018-03-24 06:13:56 -0700536 // TODO(dneto): Remove this once drivers are fixed.
David Netoa60b00b2017-09-15 16:34:09 -0400537 uint32_t WorkgroupSizeValueID;
538 uint32_t WorkgroupSizeVarID;
David Neto26aaf622017-10-23 18:11:53 -0400539
David Neto862b7d82018-06-14 18:48:37 -0400540 // Bookkeeping for mapping kernel arguments to resource variables.
541 struct ResourceVarInfo {
542 ResourceVarInfo(int index_arg, unsigned set_arg, unsigned binding_arg,
alan-bakere9308012019-03-15 10:25:13 -0400543 Function *fn, clspv::ArgKind arg_kind_arg, int coherent_arg)
David Neto862b7d82018-06-14 18:48:37 -0400544 : index(index_arg), descriptor_set(set_arg), binding(binding_arg),
alan-bakere9308012019-03-15 10:25:13 -0400545 var_fn(fn), arg_kind(arg_kind_arg), coherent(coherent_arg),
David Neto862b7d82018-06-14 18:48:37 -0400546 addr_space(fn->getReturnType()->getPointerAddressSpace()) {}
547 const int index; // Index into ResourceVarInfoList
548 const unsigned descriptor_set;
549 const unsigned binding;
550 Function *const var_fn; // The @clspv.resource.var.* function.
551 const clspv::ArgKind arg_kind;
alan-bakere9308012019-03-15 10:25:13 -0400552 const int coherent;
David Neto862b7d82018-06-14 18:48:37 -0400553 const unsigned addr_space; // The LLVM address space
554 // The SPIR-V ID of the OpVariable. Not populated at construction time.
555 uint32_t var_id = 0;
556 };
557 // A list of resource var info. Each one correponds to a module-scope
558 // resource variable we will have to create. Resource var indices are
559 // indices into this vector.
560 SmallVector<std::unique_ptr<ResourceVarInfo>, 8> ResourceVarInfoList;
561 // This is a vector of pointers of all the resource vars, but ordered by
562 // kernel function, and then by argument.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500563 UniqueVector<ResourceVarInfo *> ModuleOrderedResourceVars;
David Neto862b7d82018-06-14 18:48:37 -0400564 // Map a function to the ordered list of resource variables it uses, one for
565 // each argument. If an argument does not use a resource variable, it
566 // will have a null pointer entry.
567 using FunctionToResourceVarsMapType =
568 DenseMap<Function *, SmallVector<ResourceVarInfo *, 8>>;
569 FunctionToResourceVarsMapType FunctionToResourceVarsMap;
570
571 // What LLVM types map to SPIR-V types needing layout? These are the
572 // arrays and structures supporting storage buffers and uniform buffers.
573 TypeList TypesNeedingLayout;
574 // What LLVM struct types map to a SPIR-V struct type with Block decoration?
575 UniqueVector<StructType *> StructTypesNeedingBlock;
576 // For a call that represents a load from an opaque type (samplers, images),
577 // map it to the variable id it should load from.
578 DenseMap<CallInst *, uint32_t> ResourceVarDeferredLoadCalls;
David Neto85082642018-03-24 06:55:20 -0700579
Alan Baker202c8c72018-08-13 13:47:44 -0400580 // One larger than the maximum used SpecId for pointer-to-local arguments.
581 int max_local_spec_id_;
David Netoc6f3ab22018-04-06 18:02:31 -0400582 // An ordered list of the kernel arguments of type pointer-to-local.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500583 using LocalArgList = SmallVector<Argument *, 8>;
David Netoc6f3ab22018-04-06 18:02:31 -0400584 LocalArgList LocalArgs;
585 // Information about a pointer-to-local argument.
586 struct LocalArgInfo {
587 // The SPIR-V ID of the array variable.
588 uint32_t variable_id;
589 // The element type of the
alan-bakerb6b09dc2018-11-08 16:59:28 -0500590 Type *elem_type;
David Netoc6f3ab22018-04-06 18:02:31 -0400591 // The ID of the array type.
592 uint32_t array_size_id;
593 // The ID of the array type.
594 uint32_t array_type_id;
595 // The ID of the pointer to the array type.
596 uint32_t ptr_array_type_id;
David Netoc6f3ab22018-04-06 18:02:31 -0400597 // The specialization constant ID of the array size.
598 int spec_id;
599 };
Alan Baker202c8c72018-08-13 13:47:44 -0400600 // A mapping from Argument to its assigned SpecId.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500601 DenseMap<const Argument *, int> LocalArgSpecIds;
Alan Baker202c8c72018-08-13 13:47:44 -0400602 // A mapping from SpecId to its LocalArgInfo.
603 DenseMap<int, LocalArgInfo> LocalSpecIdInfoMap;
Alan Bakerfcda9482018-10-02 17:09:59 -0400604 // A mapping from a remapped type to its real offsets.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500605 DenseMap<Type *, std::vector<uint32_t>> RemappedUBOTypeOffsets;
Alan Bakerfcda9482018-10-02 17:09:59 -0400606 // A mapping from a remapped type to its real sizes.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500607 DenseMap<Type *, std::tuple<uint64_t, uint64_t, uint64_t>>
608 RemappedUBOTypeSizes;
alan-baker06cad652019-12-03 17:56:47 -0500609
610 // Maps basic block to its merge block.
611 DenseMap<BasicBlock *, BasicBlock *> MergeBlocks;
612 // Maps basic block to its continue block.
613 DenseMap<BasicBlock *, BasicBlock *> ContinueBlocks;
David Neto22f144c2017-06-12 14:26:21 -0400614};
615
616char SPIRVProducerPass::ID;
David Netoc6f3ab22018-04-06 18:02:31 -0400617
alan-bakerb6b09dc2018-11-08 16:59:28 -0500618} // namespace
David Neto22f144c2017-06-12 14:26:21 -0400619
620namespace clspv {
alan-bakerf5e5f692018-11-27 08:33:24 -0500621ModulePass *createSPIRVProducerPass(
622 raw_pwrite_stream &out,
623 std::vector<version0::DescriptorMapEntry> *descriptor_map_entries,
alan-baker00e7a582019-06-07 12:54:21 -0400624 ArrayRef<std::pair<unsigned, std::string>> samplerMap,
alan-bakerf5e5f692018-11-27 08:33:24 -0500625 bool outputCInitList) {
626 return new SPIRVProducerPass(out, descriptor_map_entries, samplerMap,
alan-baker00e7a582019-06-07 12:54:21 -0400627 outputCInitList);
David Neto22f144c2017-06-12 14:26:21 -0400628}
David Netoc2c368d2017-06-30 16:50:17 -0400629} // namespace clspv
David Neto22f144c2017-06-12 14:26:21 -0400630
631bool SPIRVProducerPass::runOnModule(Module &module) {
David Neto0676e6f2017-07-11 18:47:44 -0400632 binaryOut = outputCInitList ? &binaryTempOut : &out;
633
Alan Bakerfcda9482018-10-02 17:09:59 -0400634 PopulateUBOTypeMaps(module);
alan-baker06cad652019-12-03 17:56:47 -0500635 PopulateStructuredCFGMaps(module);
Alan Bakerfcda9482018-10-02 17:09:59 -0400636
David Neto22f144c2017-06-12 14:26:21 -0400637 // SPIR-V always begins with its header information
638 outputHeader();
639
David Netoc6f3ab22018-04-06 18:02:31 -0400640 const DataLayout &DL = module.getDataLayout();
641
David Neto22f144c2017-06-12 14:26:21 -0400642 // Gather information from the LLVM IR that we require.
David Netoc6f3ab22018-04-06 18:02:31 -0400643 GenerateLLVMIRInfo(module, DL);
David Neto22f144c2017-06-12 14:26:21 -0400644
David Neto22f144c2017-06-12 14:26:21 -0400645 // Collect information on global variables too.
646 for (GlobalVariable &GV : module.globals()) {
647 // If the GV is one of our special __spirv_* variables, remove the
648 // initializer as it was only placed there to force LLVM to not throw the
649 // value away.
650 if (GV.getName().startswith("__spirv_")) {
651 GV.setInitializer(nullptr);
652 }
653
654 // Collect types' information from global variable.
655 FindTypePerGlobalVar(GV);
656
657 // Collect constant information from global variable.
658 FindConstantPerGlobalVar(GV);
659
660 // If the variable is an input, entry points need to know about it.
661 if (AddressSpace::Input == GV.getType()->getPointerAddressSpace()) {
David Netofb9a7972017-08-25 17:08:24 -0400662 getEntryPointInterfacesVec().insert(&GV);
David Neto22f144c2017-06-12 14:26:21 -0400663 }
664 }
665
666 // If there are extended instructions, generate OpExtInstImport.
667 if (FindExtInst(module)) {
668 GenerateExtInstImport();
669 }
670
671 // Generate SPIRV instructions for types.
Alan Bakerfcda9482018-10-02 17:09:59 -0400672 GenerateSPIRVTypes(module.getContext(), module);
David Neto22f144c2017-06-12 14:26:21 -0400673
674 // Generate SPIRV constants.
675 GenerateSPIRVConstants();
676
alan-baker09cb9802019-12-10 13:16:27 -0500677 // Generate literal samplers if necessary.
678 GenerateSamplers(module);
David Neto22f144c2017-06-12 14:26:21 -0400679
680 // Generate SPIRV variables.
681 for (GlobalVariable &GV : module.globals()) {
682 GenerateGlobalVar(GV);
683 }
David Neto862b7d82018-06-14 18:48:37 -0400684 GenerateResourceVars(module);
David Netoc6f3ab22018-04-06 18:02:31 -0400685 GenerateWorkgroupVars();
David Neto22f144c2017-06-12 14:26:21 -0400686
687 // Generate SPIRV instructions for each function.
688 for (Function &F : module) {
689 if (F.isDeclaration()) {
690 continue;
691 }
692
David Neto862b7d82018-06-14 18:48:37 -0400693 GenerateDescriptorMapInfo(DL, F);
694
David Neto22f144c2017-06-12 14:26:21 -0400695 // Generate Function Prologue.
696 GenerateFuncPrologue(F);
697
698 // Generate SPIRV instructions for function body.
699 GenerateFuncBody(F);
700
701 // Generate Function Epilogue.
702 GenerateFuncEpilogue();
703 }
704
705 HandleDeferredInstruction();
David Neto1a1a0582017-07-07 12:01:44 -0400706 HandleDeferredDecorations(DL);
David Neto22f144c2017-06-12 14:26:21 -0400707
708 // Generate SPIRV module information.
David Neto5c22a252018-03-15 16:07:41 -0400709 GenerateModuleInfo(module);
David Neto22f144c2017-06-12 14:26:21 -0400710
alan-baker00e7a582019-06-07 12:54:21 -0400711 WriteSPIRVBinary();
David Neto22f144c2017-06-12 14:26:21 -0400712
713 // We need to patch the SPIR-V header to set bound correctly.
714 patchHeader();
David Neto0676e6f2017-07-11 18:47:44 -0400715
716 if (outputCInitList) {
717 bool first = true;
David Neto0676e6f2017-07-11 18:47:44 -0400718 std::ostringstream os;
719
David Neto57fb0b92017-08-04 15:35:09 -0400720 auto emit_word = [&os, &first](uint32_t word) {
David Neto0676e6f2017-07-11 18:47:44 -0400721 if (!first)
David Neto57fb0b92017-08-04 15:35:09 -0400722 os << ",\n";
723 os << word;
David Neto0676e6f2017-07-11 18:47:44 -0400724 first = false;
725 };
726
727 os << "{";
David Neto57fb0b92017-08-04 15:35:09 -0400728 const std::string str(binaryTempOut.str());
729 for (unsigned i = 0; i < str.size(); i += 4) {
730 const uint32_t a = static_cast<unsigned char>(str[i]);
731 const uint32_t b = static_cast<unsigned char>(str[i + 1]);
732 const uint32_t c = static_cast<unsigned char>(str[i + 2]);
733 const uint32_t d = static_cast<unsigned char>(str[i + 3]);
734 emit_word(a | (b << 8) | (c << 16) | (d << 24));
David Neto0676e6f2017-07-11 18:47:44 -0400735 }
736 os << "}\n";
737 out << os.str();
738 }
739
David Neto22f144c2017-06-12 14:26:21 -0400740 return false;
741}
742
743void SPIRVProducerPass::outputHeader() {
alan-baker00e7a582019-06-07 12:54:21 -0400744 binaryOut->write(reinterpret_cast<const char *>(&spv::MagicNumber),
745 sizeof(spv::MagicNumber));
746 binaryOut->write(reinterpret_cast<const char *>(&spv::Version),
747 sizeof(spv::Version));
David Neto22f144c2017-06-12 14:26:21 -0400748
alan-baker0c18ab02019-06-12 10:23:21 -0400749 // use Google's vendor ID
750 const uint32_t vendor = 21 << 16;
alan-baker00e7a582019-06-07 12:54:21 -0400751 binaryOut->write(reinterpret_cast<const char *>(&vendor), sizeof(vendor));
David Neto22f144c2017-06-12 14:26:21 -0400752
alan-baker00e7a582019-06-07 12:54:21 -0400753 // we record where we need to come back to and patch in the bound value
754 patchBoundOffset = binaryOut->tell();
David Neto22f144c2017-06-12 14:26:21 -0400755
alan-baker00e7a582019-06-07 12:54:21 -0400756 // output a bad bound for now
757 binaryOut->write(reinterpret_cast<const char *>(&nextID), sizeof(nextID));
David Neto22f144c2017-06-12 14:26:21 -0400758
alan-baker00e7a582019-06-07 12:54:21 -0400759 // output the schema (reserved for use and must be 0)
760 const uint32_t schema = 0;
761 binaryOut->write(reinterpret_cast<const char *>(&schema), sizeof(schema));
David Neto22f144c2017-06-12 14:26:21 -0400762}
763
764void SPIRVProducerPass::patchHeader() {
alan-baker00e7a582019-06-07 12:54:21 -0400765 // for a binary we just write the value of nextID over bound
766 binaryOut->pwrite(reinterpret_cast<char *>(&nextID), sizeof(nextID),
767 patchBoundOffset);
David Neto22f144c2017-06-12 14:26:21 -0400768}
769
David Netoc6f3ab22018-04-06 18:02:31 -0400770void SPIRVProducerPass::GenerateLLVMIRInfo(Module &M, const DataLayout &DL) {
David Neto22f144c2017-06-12 14:26:21 -0400771 // This function generates LLVM IR for function such as global variable for
772 // argument, constant and pointer type for argument access. These information
773 // is artificial one because we need Vulkan SPIR-V output. This function is
774 // executed ahead of FindType and FindConstant.
David Neto22f144c2017-06-12 14:26:21 -0400775 LLVMContext &Context = M.getContext();
776
David Neto862b7d82018-06-14 18:48:37 -0400777 FindGlobalConstVars(M, DL);
David Neto5c22a252018-03-15 16:07:41 -0400778
David Neto862b7d82018-06-14 18:48:37 -0400779 FindResourceVars(M, DL);
David Neto22f144c2017-06-12 14:26:21 -0400780
781 bool HasWorkGroupBuiltin = false;
782 for (GlobalVariable &GV : M.globals()) {
783 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
784 if (spv::BuiltInWorkgroupSize == BuiltinType) {
785 HasWorkGroupBuiltin = true;
786 }
787 }
788
David Neto862b7d82018-06-14 18:48:37 -0400789 FindTypesForSamplerMap(M);
790 FindTypesForResourceVars(M);
Alan Baker202c8c72018-08-13 13:47:44 -0400791 FindWorkgroupVars(M);
David Neto22f144c2017-06-12 14:26:21 -0400792
793 for (Function &F : M) {
Kévin Petitabef4522019-03-27 13:08:01 +0000794 if (F.isDeclaration()) {
David Neto22f144c2017-06-12 14:26:21 -0400795 continue;
796 }
797
798 for (BasicBlock &BB : F) {
799 for (Instruction &I : BB) {
800 if (I.getOpcode() == Instruction::ZExt ||
801 I.getOpcode() == Instruction::SExt ||
802 I.getOpcode() == Instruction::UIToFP) {
803 // If there is zext with i1 type, it will be changed to OpSelect. The
804 // OpSelect needs constant 0 and 1 so the constants are added here.
805
806 auto OpTy = I.getOperand(0)->getType();
807
Kévin Petit24272b62018-10-18 19:16:12 +0000808 if (OpTy->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -0400809 if (I.getOpcode() == Instruction::ZExt) {
David Neto22f144c2017-06-12 14:26:21 -0400810 FindConstant(Constant::getNullValue(I.getType()));
Kévin Petit7bfb8992019-02-26 13:45:08 +0000811 FindConstant(ConstantInt::get(I.getType(), 1));
David Neto22f144c2017-06-12 14:26:21 -0400812 } else if (I.getOpcode() == Instruction::SExt) {
David Neto22f144c2017-06-12 14:26:21 -0400813 FindConstant(Constant::getNullValue(I.getType()));
Kévin Petit7bfb8992019-02-26 13:45:08 +0000814 FindConstant(ConstantInt::getSigned(I.getType(), -1));
David Neto22f144c2017-06-12 14:26:21 -0400815 } else {
816 FindConstant(ConstantFP::get(Context, APFloat(0.0f)));
817 FindConstant(ConstantFP::get(Context, APFloat(1.0f)));
818 }
819 }
820 } else if (CallInst *Call = dyn_cast<CallInst>(&I)) {
David Neto862b7d82018-06-14 18:48:37 -0400821 StringRef callee_name = Call->getCalledFunction()->getName();
David Neto22f144c2017-06-12 14:26:21 -0400822
823 // Handle image type specially.
alan-bakerf67468c2019-11-25 15:51:49 -0500824 if (clspv::IsSampledImageRead(callee_name)) {
David Neto22f144c2017-06-12 14:26:21 -0400825 TypeMapType &OpImageTypeMap = getImageTypeMap();
826 Type *ImageTy =
827 Call->getArgOperand(0)->getType()->getPointerElementType();
828 OpImageTypeMap[ImageTy] = 0;
alan-bakerabd82722019-12-03 17:14:51 -0500829 getImageTypeList().insert(ImageTy);
David Neto22f144c2017-06-12 14:26:21 -0400830
alan-bakerf67468c2019-11-25 15:51:49 -0500831 // All sampled reads need a floating point 0 for the Lod operand.
David Neto22f144c2017-06-12 14:26:21 -0400832 FindConstant(ConstantFP::get(Context, APFloat(0.0f)));
833 }
David Neto5c22a252018-03-15 16:07:41 -0400834
alan-bakerce179f12019-12-06 19:02:22 -0500835 if (clspv::IsImageQuery(callee_name)) {
836 Type *ImageTy = Call->getOperand(0)->getType();
837 const uint32_t dim = ImageDimensionality(ImageTy);
838 uint32_t components = dim;
839 if (components > 1) {
840 // OpImageQuerySize* return |components| components.
841 FindType(VectorType::get(Type::getInt32Ty(Context), components));
842 if (dim == 3 && IsGetImageDim(callee_name)) {
843 // get_image_dim for 3D images returns an int4.
844 FindType(
845 VectorType::get(Type::getInt32Ty(Context), components + 1));
846 }
847 }
848
849 if (clspv::IsSampledImageType(ImageTy)) {
850 // All sampled image queries need a integer 0 for the Lod
851 // operand.
852 FindConstant(ConstantInt::get(Context, APInt(32, 0)));
853 }
David Neto5c22a252018-03-15 16:07:41 -0400854 }
David Neto22f144c2017-06-12 14:26:21 -0400855 }
856 }
857 }
858
Kévin Petitabef4522019-03-27 13:08:01 +0000859 // More things to do on kernel functions
860 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
861 if (const MDNode *MD =
862 dyn_cast<Function>(&F)->getMetadata("reqd_work_group_size")) {
863 // We generate constants if the WorkgroupSize builtin is being used.
864 if (HasWorkGroupBuiltin) {
865 // Collect constant information for work group size.
866 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(0)));
867 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(1)));
868 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(2)));
David Neto22f144c2017-06-12 14:26:21 -0400869 }
870 }
871 }
872
alan-bakerf67468c2019-11-25 15:51:49 -0500873 // TODO(alan-baker): make this better.
alan-bakerf906d2b2019-12-10 11:26:23 -0500874 if (M.getTypeByName("opencl.image1d_ro_t.float") ||
875 M.getTypeByName("opencl.image1d_ro_t.float.sampled") ||
876 M.getTypeByName("opencl.image1d_wo_t.float") ||
877 M.getTypeByName("opencl.image2d_ro_t.float") ||
alan-bakerf67468c2019-11-25 15:51:49 -0500878 M.getTypeByName("opencl.image2d_ro_t.float.sampled") ||
879 M.getTypeByName("opencl.image2d_wo_t.float") ||
880 M.getTypeByName("opencl.image3d_ro_t.float") ||
881 M.getTypeByName("opencl.image3d_ro_t.float.sampled") ||
882 M.getTypeByName("opencl.image3d_wo_t.float")) {
883 FindType(Type::getFloatTy(Context));
alan-bakerf906d2b2019-12-10 11:26:23 -0500884 } else if (M.getTypeByName("opencl.image1d_ro_t.uint") ||
885 M.getTypeByName("opencl.image1d_ro_t.uint.sampled") ||
886 M.getTypeByName("opencl.image1d_wo_t.uint") ||
887 M.getTypeByName("opencl.image2d_ro_t.uint") ||
alan-bakerf67468c2019-11-25 15:51:49 -0500888 M.getTypeByName("opencl.image2d_ro_t.uint.sampled") ||
889 M.getTypeByName("opencl.image2d_wo_t.uint") ||
890 M.getTypeByName("opencl.image3d_ro_t.uint") ||
891 M.getTypeByName("opencl.image3d_ro_t.uint.sampled") ||
892 M.getTypeByName("opencl.image3d_wo_t.uint")) {
893 FindType(Type::getInt32Ty(Context));
alan-bakerf906d2b2019-12-10 11:26:23 -0500894 } else if (M.getTypeByName("opencl.image1d_ro_t.int") ||
895 M.getTypeByName("opencl.image1d_ro_t.int.sampled") ||
896 M.getTypeByName("opencl.image1d_wo_t.int") ||
897 M.getTypeByName("opencl.image2d_ro_t.int") ||
alan-bakerf67468c2019-11-25 15:51:49 -0500898 M.getTypeByName("opencl.image2d_ro_t.int.sampled") ||
899 M.getTypeByName("opencl.image2d_wo_t.int") ||
900 M.getTypeByName("opencl.image3d_ro_t.int") ||
901 M.getTypeByName("opencl.image3d_ro_t.int.sampled") ||
902 M.getTypeByName("opencl.image3d_wo_t.int")) {
903 // Nothing for now...
904 } else {
905 // This was likely an UndefValue.
David Neto22f144c2017-06-12 14:26:21 -0400906 FindType(Type::getFloatTy(Context));
907 }
908
909 // Collect types' information from function.
910 FindTypePerFunc(F);
911
912 // Collect constant information from function.
913 FindConstantPerFunc(F);
914 }
915}
916
David Neto862b7d82018-06-14 18:48:37 -0400917void SPIRVProducerPass::FindGlobalConstVars(Module &M, const DataLayout &DL) {
alan-baker56f7aff2019-05-22 08:06:42 -0400918 clspv::NormalizeGlobalVariables(M);
919
David Neto862b7d82018-06-14 18:48:37 -0400920 SmallVector<GlobalVariable *, 8> GVList;
921 SmallVector<GlobalVariable *, 8> DeadGVList;
922 for (GlobalVariable &GV : M.globals()) {
923 if (GV.getType()->getAddressSpace() == AddressSpace::Constant) {
924 if (GV.use_empty()) {
925 DeadGVList.push_back(&GV);
926 } else {
927 GVList.push_back(&GV);
928 }
929 }
930 }
931
932 // Remove dead global __constant variables.
933 for (auto GV : DeadGVList) {
934 GV->eraseFromParent();
935 }
936 DeadGVList.clear();
937
938 if (clspv::Option::ModuleConstantsInStorageBuffer()) {
939 // For now, we only support a single storage buffer.
940 if (GVList.size() > 0) {
941 assert(GVList.size() == 1);
942 const auto *GV = GVList[0];
943 const auto constants_byte_size =
Alan Bakerfcda9482018-10-02 17:09:59 -0400944 (GetTypeSizeInBits(GV->getInitializer()->getType(), DL)) / 8;
David Neto862b7d82018-06-14 18:48:37 -0400945 const size_t kConstantMaxSize = 65536;
946 if (constants_byte_size > kConstantMaxSize) {
947 outs() << "Max __constant capacity of " << kConstantMaxSize
948 << " bytes exceeded: " << constants_byte_size << " bytes used\n";
949 llvm_unreachable("Max __constant capacity exceeded");
950 }
951 }
952 } else {
953 // Change global constant variable's address space to ModuleScopePrivate.
954 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
955 for (auto GV : GVList) {
956 // Create new gv with ModuleScopePrivate address space.
957 Type *NewGVTy = GV->getType()->getPointerElementType();
958 GlobalVariable *NewGV = new GlobalVariable(
959 M, NewGVTy, false, GV->getLinkage(), GV->getInitializer(), "",
960 nullptr, GV->getThreadLocalMode(), AddressSpace::ModuleScopePrivate);
961 NewGV->takeName(GV);
962
963 const SmallVector<User *, 8> GVUsers(GV->user_begin(), GV->user_end());
964 SmallVector<User *, 8> CandidateUsers;
965
966 auto record_called_function_type_as_user =
967 [&GlobalConstFuncTyMap](Value *gv, CallInst *call) {
968 // Find argument index.
969 unsigned index = 0;
970 for (unsigned i = 0; i < call->getNumArgOperands(); i++) {
971 if (gv == call->getOperand(i)) {
972 // TODO(dneto): Should we break here?
973 index = i;
974 }
975 }
976
977 // Record function type with global constant.
978 GlobalConstFuncTyMap[call->getFunctionType()] =
979 std::make_pair(call->getFunctionType(), index);
980 };
981
982 for (User *GVU : GVUsers) {
983 if (CallInst *Call = dyn_cast<CallInst>(GVU)) {
984 record_called_function_type_as_user(GV, Call);
985 } else if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(GVU)) {
986 // Check GEP users.
987 for (User *GEPU : GEP->users()) {
988 if (CallInst *GEPCall = dyn_cast<CallInst>(GEPU)) {
989 record_called_function_type_as_user(GEP, GEPCall);
990 }
991 }
992 }
993
994 CandidateUsers.push_back(GVU);
995 }
996
997 for (User *U : CandidateUsers) {
998 // Update users of gv with new gv.
alan-bakered80f572019-02-11 17:28:26 -0500999 if (!isa<Constant>(U)) {
1000 // #254: Can't change operands of a constant, but this shouldn't be
1001 // something that sticks around in the module.
1002 U->replaceUsesOfWith(GV, NewGV);
1003 }
David Neto862b7d82018-06-14 18:48:37 -04001004 }
1005
1006 // Delete original gv.
1007 GV->eraseFromParent();
1008 }
1009 }
1010}
1011
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001012void SPIRVProducerPass::FindResourceVars(Module &M, const DataLayout &) {
David Neto862b7d82018-06-14 18:48:37 -04001013 ResourceVarInfoList.clear();
1014 FunctionToResourceVarsMap.clear();
1015 ModuleOrderedResourceVars.reset();
1016 // Normally, there is one resource variable per clspv.resource.var.*
1017 // function, since that is unique'd by arg type and index. By design,
1018 // we can share these resource variables across kernels because all
1019 // kernels use the same descriptor set.
1020 //
1021 // But if the user requested distinct descriptor sets per kernel, then
1022 // the descriptor allocator has made different (set,binding) pairs for
1023 // the same (type,arg_index) pair. Since we can decorate a resource
1024 // variable with only exactly one DescriptorSet and Binding, we are
1025 // forced in this case to make distinct resource variables whenever
1026 // the same clspv.reource.var.X function is seen with disintct
1027 // (set,binding) values.
1028 const bool always_distinct_sets =
1029 clspv::Option::DistinctKernelDescriptorSets();
1030 for (Function &F : M) {
1031 // Rely on the fact the resource var functions have a stable ordering
1032 // in the module.
Alan Baker202c8c72018-08-13 13:47:44 -04001033 if (F.getName().startswith(clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04001034 // Find all calls to this function with distinct set and binding pairs.
1035 // Save them in ResourceVarInfoList.
1036
1037 // Determine uniqueness of the (set,binding) pairs only withing this
1038 // one resource-var builtin function.
1039 using SetAndBinding = std::pair<unsigned, unsigned>;
1040 // Maps set and binding to the resource var info.
1041 DenseMap<SetAndBinding, ResourceVarInfo *> set_and_binding_map;
1042 bool first_use = true;
1043 for (auto &U : F.uses()) {
1044 if (auto *call = dyn_cast<CallInst>(U.getUser())) {
1045 const auto set = unsigned(
1046 dyn_cast<ConstantInt>(call->getArgOperand(0))->getZExtValue());
1047 const auto binding = unsigned(
1048 dyn_cast<ConstantInt>(call->getArgOperand(1))->getZExtValue());
1049 const auto arg_kind = clspv::ArgKind(
1050 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
1051 const auto arg_index = unsigned(
1052 dyn_cast<ConstantInt>(call->getArgOperand(3))->getZExtValue());
alan-bakere9308012019-03-15 10:25:13 -04001053 const auto coherent = unsigned(
1054 dyn_cast<ConstantInt>(call->getArgOperand(5))->getZExtValue());
David Neto862b7d82018-06-14 18:48:37 -04001055
1056 // Find or make the resource var info for this combination.
1057 ResourceVarInfo *rv = nullptr;
1058 if (always_distinct_sets) {
1059 // Make a new resource var any time we see a different
1060 // (set,binding) pair.
1061 SetAndBinding key{set, binding};
1062 auto where = set_and_binding_map.find(key);
1063 if (where == set_and_binding_map.end()) {
1064 rv = new ResourceVarInfo(int(ResourceVarInfoList.size()), set,
alan-bakere9308012019-03-15 10:25:13 -04001065 binding, &F, arg_kind, coherent);
David Neto862b7d82018-06-14 18:48:37 -04001066 ResourceVarInfoList.emplace_back(rv);
1067 set_and_binding_map[key] = rv;
1068 } else {
1069 rv = where->second;
1070 }
1071 } else {
1072 // The default is to make exactly one resource for each
1073 // clspv.resource.var.* function.
1074 if (first_use) {
1075 first_use = false;
1076 rv = new ResourceVarInfo(int(ResourceVarInfoList.size()), set,
alan-bakere9308012019-03-15 10:25:13 -04001077 binding, &F, arg_kind, coherent);
David Neto862b7d82018-06-14 18:48:37 -04001078 ResourceVarInfoList.emplace_back(rv);
1079 } else {
1080 rv = ResourceVarInfoList.back().get();
1081 }
1082 }
1083
1084 // Now populate FunctionToResourceVarsMap.
1085 auto &mapping =
1086 FunctionToResourceVarsMap[call->getParent()->getParent()];
1087 while (mapping.size() <= arg_index) {
1088 mapping.push_back(nullptr);
1089 }
1090 mapping[arg_index] = rv;
1091 }
1092 }
1093 }
1094 }
1095
1096 // Populate ModuleOrderedResourceVars.
1097 for (Function &F : M) {
1098 auto where = FunctionToResourceVarsMap.find(&F);
1099 if (where != FunctionToResourceVarsMap.end()) {
1100 for (auto &rv : where->second) {
1101 if (rv != nullptr) {
1102 ModuleOrderedResourceVars.insert(rv);
1103 }
1104 }
1105 }
1106 }
1107 if (ShowResourceVars) {
1108 for (auto *info : ModuleOrderedResourceVars) {
1109 outs() << "MORV index " << info->index << " (" << info->descriptor_set
1110 << "," << info->binding << ") " << *(info->var_fn->getReturnType())
1111 << "\n";
1112 }
1113 }
1114}
1115
David Neto22f144c2017-06-12 14:26:21 -04001116bool SPIRVProducerPass::FindExtInst(Module &M) {
1117 LLVMContext &Context = M.getContext();
1118 bool HasExtInst = false;
1119
1120 for (Function &F : M) {
1121 for (BasicBlock &BB : F) {
1122 for (Instruction &I : BB) {
1123 if (CallInst *Call = dyn_cast<CallInst>(&I)) {
1124 Function *Callee = Call->getCalledFunction();
1125 // Check whether this call is for extend instructions.
David Neto3fbb4072017-10-16 11:28:14 -04001126 auto callee_name = Callee->getName();
1127 const glsl::ExtInst EInst = getExtInstEnum(callee_name);
1128 const glsl::ExtInst IndirectEInst =
1129 getIndirectExtInstEnum(callee_name);
David Neto22f144c2017-06-12 14:26:21 -04001130
David Neto3fbb4072017-10-16 11:28:14 -04001131 HasExtInst |=
1132 (EInst != kGlslExtInstBad) || (IndirectEInst != kGlslExtInstBad);
1133
1134 if (IndirectEInst) {
1135 // Register extra constants if needed.
1136
1137 // Registers a type and constant for computing the result of the
1138 // given instruction. If the result of the instruction is a vector,
1139 // then make a splat vector constant with the same number of
1140 // elements.
1141 auto register_constant = [this, &I](Constant *constant) {
1142 FindType(constant->getType());
1143 FindConstant(constant);
1144 if (auto *vectorTy = dyn_cast<VectorType>(I.getType())) {
1145 // Register the splat vector of the value with the same
1146 // width as the result of the instruction.
1147 auto *vec_constant = ConstantVector::getSplat(
1148 static_cast<unsigned>(vectorTy->getNumElements()),
1149 constant);
1150 FindConstant(vec_constant);
1151 FindType(vec_constant->getType());
1152 }
1153 };
1154 switch (IndirectEInst) {
1155 case glsl::ExtInstFindUMsb:
1156 // clz needs OpExtInst and OpISub with constant 31, or splat
1157 // vector of 31. Add it to the constant list here.
1158 register_constant(
1159 ConstantInt::get(Type::getInt32Ty(Context), 31));
1160 break;
1161 case glsl::ExtInstAcos:
1162 case glsl::ExtInstAsin:
Kévin Petiteb9f90a2018-09-29 12:29:34 +01001163 case glsl::ExtInstAtan:
David Neto3fbb4072017-10-16 11:28:14 -04001164 case glsl::ExtInstAtan2:
1165 // We need 1/pi for acospi, asinpi, atan2pi.
1166 register_constant(
1167 ConstantFP::get(Type::getFloatTy(Context), kOneOverPi));
1168 break;
1169 default:
1170 assert(false && "internally inconsistent");
1171 }
David Neto22f144c2017-06-12 14:26:21 -04001172 }
1173 }
1174 }
1175 }
1176 }
1177
1178 return HasExtInst;
1179}
1180
1181void SPIRVProducerPass::FindTypePerGlobalVar(GlobalVariable &GV) {
1182 // Investigate global variable's type.
1183 FindType(GV.getType());
1184}
1185
1186void SPIRVProducerPass::FindTypePerFunc(Function &F) {
1187 // Investigate function's type.
1188 FunctionType *FTy = F.getFunctionType();
1189
1190 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
1191 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
David Neto9ed8e2f2018-03-24 06:47:24 -07001192 // Handle a regular function with global constant parameters.
David Neto22f144c2017-06-12 14:26:21 -04001193 if (GlobalConstFuncTyMap.count(FTy)) {
1194 uint32_t GVCstArgIdx = GlobalConstFuncTypeMap[FTy].second;
1195 SmallVector<Type *, 4> NewFuncParamTys;
1196 for (unsigned i = 0; i < FTy->getNumParams(); i++) {
1197 Type *ParamTy = FTy->getParamType(i);
1198 if (i == GVCstArgIdx) {
1199 Type *EleTy = ParamTy->getPointerElementType();
1200 ParamTy = PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
1201 }
1202
1203 NewFuncParamTys.push_back(ParamTy);
1204 }
1205
1206 FunctionType *NewFTy =
1207 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
1208 GlobalConstFuncTyMap[FTy] = std::make_pair(NewFTy, GVCstArgIdx);
1209 FTy = NewFTy;
1210 }
1211
1212 FindType(FTy);
1213 } else {
1214 // As kernel functions do not have parameters, create new function type and
1215 // add it to type map.
1216 SmallVector<Type *, 4> NewFuncParamTys;
1217 FunctionType *NewFTy =
1218 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
1219 FindType(NewFTy);
1220 }
1221
1222 // Investigate instructions' type in function body.
1223 for (BasicBlock &BB : F) {
1224 for (Instruction &I : BB) {
1225 if (isa<ShuffleVectorInst>(I)) {
1226 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1227 // Ignore type for mask of shuffle vector instruction.
1228 if (i == 2) {
1229 continue;
1230 }
1231
1232 Value *Op = I.getOperand(i);
1233 if (!isa<MetadataAsValue>(Op)) {
1234 FindType(Op->getType());
1235 }
1236 }
1237
1238 FindType(I.getType());
1239 continue;
1240 }
1241
David Neto862b7d82018-06-14 18:48:37 -04001242 CallInst *Call = dyn_cast<CallInst>(&I);
1243
1244 if (Call && Call->getCalledFunction()->getName().startswith(
Alan Baker202c8c72018-08-13 13:47:44 -04001245 clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04001246 // This is a fake call representing access to a resource variable.
1247 // We handle that elsewhere.
1248 continue;
1249 }
1250
Alan Baker202c8c72018-08-13 13:47:44 -04001251 if (Call && Call->getCalledFunction()->getName().startswith(
1252 clspv::WorkgroupAccessorFunction())) {
1253 // This is a fake call representing access to a workgroup variable.
1254 // We handle that elsewhere.
1255 continue;
1256 }
1257
alan-bakerf083bed2020-01-29 08:15:42 -05001258 // #497: InsertValue and ExtractValue map to OpCompositeInsert and
1259 // OpCompositeExtract which takes literal values for indices. As a result
1260 // don't map the type of indices.
1261 if (I.getOpcode() == Instruction::ExtractValue) {
1262 FindType(I.getOperand(0)->getType());
1263 continue;
1264 }
1265 if (I.getOpcode() == Instruction::InsertValue) {
1266 FindType(I.getOperand(0)->getType());
1267 FindType(I.getOperand(1)->getType());
1268 continue;
1269 }
1270
1271 // #497: InsertElement and ExtractElement map to OpCompositeExtract if
1272 // the index is a constant. In such a case don't map the index type.
1273 if (I.getOpcode() == Instruction::ExtractElement) {
1274 FindType(I.getOperand(0)->getType());
1275 Value *op1 = I.getOperand(1);
1276 if (!isa<Constant>(op1) || isa<GlobalValue>(op1)) {
1277 FindType(op1->getType());
1278 }
1279 continue;
1280 }
1281 if (I.getOpcode() == Instruction::InsertElement) {
1282 FindType(I.getOperand(0)->getType());
1283 FindType(I.getOperand(1)->getType());
1284 Value *op2 = I.getOperand(2);
1285 if (!isa<Constant>(op2) || isa<GlobalValue>(op2)) {
1286 FindType(op2->getType());
1287 }
1288 continue;
1289 }
1290
David Neto22f144c2017-06-12 14:26:21 -04001291 // Work through the operands of the instruction.
1292 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1293 Value *const Op = I.getOperand(i);
1294 // If any of the operands is a constant, find the type!
1295 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1296 FindType(Op->getType());
1297 }
1298 }
1299
1300 for (Use &Op : I.operands()) {
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001301 if (isa<CallInst>(&I)) {
David Neto22f144c2017-06-12 14:26:21 -04001302 // Avoid to check call instruction's type.
1303 break;
1304 }
Alan Baker202c8c72018-08-13 13:47:44 -04001305 if (CallInst *OpCall = dyn_cast<CallInst>(Op)) {
1306 if (OpCall && OpCall->getCalledFunction()->getName().startswith(
1307 clspv::WorkgroupAccessorFunction())) {
1308 // This is a fake call representing access to a workgroup variable.
1309 // We handle that elsewhere.
1310 continue;
1311 }
1312 }
David Neto22f144c2017-06-12 14:26:21 -04001313 if (!isa<MetadataAsValue>(&Op)) {
1314 FindType(Op->getType());
1315 continue;
1316 }
1317 }
1318
David Neto22f144c2017-06-12 14:26:21 -04001319 // We don't want to track the type of this call as we are going to replace
1320 // it.
Kévin Petitdf71de32019-04-09 14:09:50 +01001321 if (Call && (clspv::LiteralSamplerFunction() ==
David Neto22f144c2017-06-12 14:26:21 -04001322 Call->getCalledFunction()->getName())) {
1323 continue;
1324 }
1325
1326 if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(&I)) {
1327 // If gep's base operand has ModuleScopePrivate address space, make gep
1328 // return ModuleScopePrivate address space.
1329 if (GEP->getPointerAddressSpace() == AddressSpace::ModuleScopePrivate) {
1330 // Add pointer type with private address space for global constant to
1331 // type list.
1332 Type *EleTy = I.getType()->getPointerElementType();
1333 Type *NewPTy =
1334 PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
1335
1336 FindType(NewPTy);
1337 continue;
1338 }
1339 }
1340
1341 FindType(I.getType());
1342 }
1343 }
1344}
1345
David Neto862b7d82018-06-14 18:48:37 -04001346void SPIRVProducerPass::FindTypesForSamplerMap(Module &M) {
1347 // If we are using a sampler map, find the type of the sampler.
Kévin Petitdf71de32019-04-09 14:09:50 +01001348 if (M.getFunction(clspv::LiteralSamplerFunction()) ||
David Neto862b7d82018-06-14 18:48:37 -04001349 0 < getSamplerMap().size()) {
1350 auto SamplerStructTy = M.getTypeByName("opencl.sampler_t");
1351 if (!SamplerStructTy) {
1352 SamplerStructTy = StructType::create(M.getContext(), "opencl.sampler_t");
1353 }
1354
1355 SamplerTy = SamplerStructTy->getPointerTo(AddressSpace::UniformConstant);
1356
1357 FindType(SamplerTy);
1358 }
1359}
1360
1361void SPIRVProducerPass::FindTypesForResourceVars(Module &M) {
1362 // Record types so they are generated.
1363 TypesNeedingLayout.reset();
1364 StructTypesNeedingBlock.reset();
1365
1366 // To match older clspv codegen, generate the float type first if required
1367 // for images.
1368 for (const auto *info : ModuleOrderedResourceVars) {
1369 if (info->arg_kind == clspv::ArgKind::ReadOnlyImage ||
1370 info->arg_kind == clspv::ArgKind::WriteOnlyImage) {
alan-bakerf67468c2019-11-25 15:51:49 -05001371 if (IsIntImageType(info->var_fn->getReturnType())) {
1372 // Nothing for now...
1373 } else if (IsUintImageType(info->var_fn->getReturnType())) {
1374 FindType(Type::getInt32Ty(M.getContext()));
1375 }
1376
1377 // We need "float" either for the sampled type or for the Lod operand.
David Neto862b7d82018-06-14 18:48:37 -04001378 FindType(Type::getFloatTy(M.getContext()));
David Neto862b7d82018-06-14 18:48:37 -04001379 }
1380 }
1381
1382 for (const auto *info : ModuleOrderedResourceVars) {
1383 Type *type = info->var_fn->getReturnType();
1384
1385 switch (info->arg_kind) {
1386 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04001387 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04001388 if (auto *sty = dyn_cast<StructType>(type->getPointerElementType())) {
1389 StructTypesNeedingBlock.insert(sty);
1390 } else {
1391 errs() << *type << "\n";
1392 llvm_unreachable("Buffer arguments must map to structures!");
1393 }
1394 break;
1395 case clspv::ArgKind::Pod:
1396 if (auto *sty = dyn_cast<StructType>(type->getPointerElementType())) {
1397 StructTypesNeedingBlock.insert(sty);
1398 } else {
1399 errs() << *type << "\n";
1400 llvm_unreachable("POD arguments must map to structures!");
1401 }
1402 break;
1403 case clspv::ArgKind::ReadOnlyImage:
1404 case clspv::ArgKind::WriteOnlyImage:
1405 case clspv::ArgKind::Sampler:
1406 // Sampler and image types map to the pointee type but
1407 // in the uniform constant address space.
1408 type = PointerType::get(type->getPointerElementType(),
1409 clspv::AddressSpace::UniformConstant);
1410 break;
1411 default:
1412 break;
1413 }
1414
1415 // The converted type is the type of the OpVariable we will generate.
1416 // If the pointee type is an array of size zero, FindType will convert it
1417 // to a runtime array.
1418 FindType(type);
1419 }
1420
alan-bakerdcd97412019-09-16 15:32:30 -04001421 // If module constants are clustered in a storage buffer then that struct
1422 // needs layout decorations.
1423 if (clspv::Option::ModuleConstantsInStorageBuffer()) {
1424 for (GlobalVariable &GV : M.globals()) {
1425 PointerType *PTy = cast<PointerType>(GV.getType());
1426 const auto AS = PTy->getAddressSpace();
1427 const bool module_scope_constant_external_init =
1428 (AS == AddressSpace::Constant) && GV.hasInitializer();
1429 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
1430 if (module_scope_constant_external_init &&
1431 spv::BuiltInMax == BuiltinType) {
1432 StructTypesNeedingBlock.insert(
1433 cast<StructType>(PTy->getPointerElementType()));
1434 }
1435 }
1436 }
1437
David Neto862b7d82018-06-14 18:48:37 -04001438 // Traverse the arrays and structures underneath each Block, and
1439 // mark them as needing layout.
1440 std::vector<Type *> work_list(StructTypesNeedingBlock.begin(),
1441 StructTypesNeedingBlock.end());
1442 while (!work_list.empty()) {
1443 Type *type = work_list.back();
1444 work_list.pop_back();
1445 TypesNeedingLayout.insert(type);
1446 switch (type->getTypeID()) {
1447 case Type::ArrayTyID:
1448 work_list.push_back(type->getArrayElementType());
1449 if (!Hack_generate_runtime_array_stride_early) {
1450 // Remember this array type for deferred decoration.
1451 TypesNeedingArrayStride.insert(type);
1452 }
1453 break;
1454 case Type::StructTyID:
1455 for (auto *elem_ty : cast<StructType>(type)->elements()) {
1456 work_list.push_back(elem_ty);
1457 }
1458 default:
1459 // This type and its contained types don't get layout.
1460 break;
1461 }
1462 }
1463}
1464
Alan Baker202c8c72018-08-13 13:47:44 -04001465void SPIRVProducerPass::FindWorkgroupVars(Module &M) {
1466 // The SpecId assignment for pointer-to-local arguments is recorded in
1467 // module-level metadata. Translate that information into local argument
1468 // information.
1469 NamedMDNode *nmd = M.getNamedMetadata(clspv::LocalSpecIdMetadataName());
alan-bakerb6b09dc2018-11-08 16:59:28 -05001470 if (!nmd)
1471 return;
Alan Baker202c8c72018-08-13 13:47:44 -04001472 for (auto operand : nmd->operands()) {
1473 MDTuple *tuple = cast<MDTuple>(operand);
1474 ValueAsMetadata *fn_md = cast<ValueAsMetadata>(tuple->getOperand(0));
1475 Function *func = cast<Function>(fn_md->getValue());
alan-bakerb6b09dc2018-11-08 16:59:28 -05001476 ConstantAsMetadata *arg_index_md =
1477 cast<ConstantAsMetadata>(tuple->getOperand(1));
1478 int arg_index = static_cast<int>(
1479 cast<ConstantInt>(arg_index_md->getValue())->getSExtValue());
1480 Argument *arg = &*(func->arg_begin() + arg_index);
Alan Baker202c8c72018-08-13 13:47:44 -04001481
1482 ConstantAsMetadata *spec_id_md =
1483 cast<ConstantAsMetadata>(tuple->getOperand(2));
alan-bakerb6b09dc2018-11-08 16:59:28 -05001484 int spec_id = static_cast<int>(
1485 cast<ConstantInt>(spec_id_md->getValue())->getSExtValue());
Alan Baker202c8c72018-08-13 13:47:44 -04001486
1487 max_local_spec_id_ = std::max(max_local_spec_id_, spec_id + 1);
1488 LocalArgSpecIds[arg] = spec_id;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001489 if (LocalSpecIdInfoMap.count(spec_id))
1490 continue;
Alan Baker202c8c72018-08-13 13:47:44 -04001491
1492 // We haven't seen this SpecId yet, so generate the LocalArgInfo for it.
1493 LocalArgInfo info{nextID, arg->getType()->getPointerElementType(),
1494 nextID + 1, nextID + 2,
1495 nextID + 3, spec_id};
1496 LocalSpecIdInfoMap[spec_id] = info;
1497 nextID += 4;
1498
1499 // Ensure the types necessary for this argument get generated.
1500 Type *IdxTy = Type::getInt32Ty(M.getContext());
1501 FindConstant(ConstantInt::get(IdxTy, 0));
1502 FindType(IdxTy);
1503 FindType(arg->getType());
1504 }
1505}
1506
David Neto22f144c2017-06-12 14:26:21 -04001507void SPIRVProducerPass::FindType(Type *Ty) {
1508 TypeList &TyList = getTypeList();
1509
1510 if (0 != TyList.idFor(Ty)) {
1511 return;
1512 }
1513
1514 if (Ty->isPointerTy()) {
1515 auto AddrSpace = Ty->getPointerAddressSpace();
1516 if ((AddressSpace::Constant == AddrSpace) ||
1517 (AddressSpace::Global == AddrSpace)) {
1518 auto PointeeTy = Ty->getPointerElementType();
1519
1520 if (PointeeTy->isStructTy() &&
1521 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
1522 FindType(PointeeTy);
1523 auto ActualPointerTy =
1524 PointeeTy->getPointerTo(AddressSpace::UniformConstant);
1525 FindType(ActualPointerTy);
1526 return;
1527 }
1528 }
1529 }
1530
David Neto862b7d82018-06-14 18:48:37 -04001531 // By convention, LLVM array type with 0 elements will map to
1532 // OpTypeRuntimeArray. Otherwise, it will map to OpTypeArray, which
1533 // has a constant number of elements. We need to support type of the
1534 // constant.
1535 if (auto *arrayTy = dyn_cast<ArrayType>(Ty)) {
1536 if (arrayTy->getNumElements() > 0) {
1537 LLVMContext &Context = Ty->getContext();
1538 FindType(Type::getInt32Ty(Context));
1539 }
David Neto22f144c2017-06-12 14:26:21 -04001540 }
1541
1542 for (Type *SubTy : Ty->subtypes()) {
1543 FindType(SubTy);
1544 }
1545
1546 TyList.insert(Ty);
1547}
1548
1549void SPIRVProducerPass::FindConstantPerGlobalVar(GlobalVariable &GV) {
1550 // If the global variable has a (non undef) initializer.
1551 if (GV.hasInitializer() && !isa<UndefValue>(GV.getInitializer())) {
David Neto862b7d82018-06-14 18:48:37 -04001552 // Generate the constant if it's not the initializer to a module scope
1553 // constant that we will expect in a storage buffer.
1554 const bool module_scope_constant_external_init =
1555 (GV.getType()->getPointerAddressSpace() == AddressSpace::Constant) &&
1556 clspv::Option::ModuleConstantsInStorageBuffer();
1557 if (!module_scope_constant_external_init) {
1558 FindConstant(GV.getInitializer());
1559 }
David Neto22f144c2017-06-12 14:26:21 -04001560 }
1561}
1562
1563void SPIRVProducerPass::FindConstantPerFunc(Function &F) {
1564 // Investigate constants in function body.
1565 for (BasicBlock &BB : F) {
1566 for (Instruction &I : BB) {
David Neto862b7d82018-06-14 18:48:37 -04001567 if (auto *call = dyn_cast<CallInst>(&I)) {
1568 auto name = call->getCalledFunction()->getName();
Kévin Petitdf71de32019-04-09 14:09:50 +01001569 if (name == clspv::LiteralSamplerFunction()) {
David Neto862b7d82018-06-14 18:48:37 -04001570 // We've handled these constants elsewhere, so skip it.
1571 continue;
1572 }
Alan Baker202c8c72018-08-13 13:47:44 -04001573 if (name.startswith(clspv::ResourceAccessorFunction())) {
1574 continue;
1575 }
1576 if (name.startswith(clspv::WorkgroupAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04001577 continue;
1578 }
Kévin Petit617a76d2019-04-04 13:54:16 +01001579 if (name.startswith(clspv::SPIRVOpIntrinsicFunction())) {
1580 // Skip the first operand that has the SPIR-V Opcode
1581 for (unsigned i = 1; i < I.getNumOperands(); i++) {
1582 if (isa<Constant>(I.getOperand(i)) &&
1583 !isa<GlobalValue>(I.getOperand(i))) {
1584 FindConstant(I.getOperand(i));
1585 }
1586 }
1587 continue;
1588 }
David Neto22f144c2017-06-12 14:26:21 -04001589 }
1590
1591 if (isa<AllocaInst>(I)) {
1592 // Alloca instruction has constant for the number of element. Ignore it.
1593 continue;
1594 } else if (isa<ShuffleVectorInst>(I)) {
1595 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1596 // Ignore constant for mask of shuffle vector instruction.
1597 if (i == 2) {
1598 continue;
1599 }
1600
1601 if (isa<Constant>(I.getOperand(i)) &&
1602 !isa<GlobalValue>(I.getOperand(i))) {
1603 FindConstant(I.getOperand(i));
1604 }
1605 }
1606
1607 continue;
1608 } else if (isa<InsertElementInst>(I)) {
1609 // Handle InsertElement with <4 x i8> specially.
1610 Type *CompositeTy = I.getOperand(0)->getType();
1611 if (is4xi8vec(CompositeTy)) {
1612 LLVMContext &Context = CompositeTy->getContext();
1613 if (isa<Constant>(I.getOperand(0))) {
1614 FindConstant(I.getOperand(0));
1615 }
1616
1617 if (isa<Constant>(I.getOperand(1))) {
1618 FindConstant(I.getOperand(1));
1619 }
1620
1621 // Add mask constant 0xFF.
1622 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
1623 FindConstant(CstFF);
1624
1625 // Add shift amount constant.
1626 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
1627 uint64_t Idx = CI->getZExtValue();
1628 Constant *CstShiftAmount =
1629 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
1630 FindConstant(CstShiftAmount);
1631 }
1632
1633 continue;
1634 }
1635
1636 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1637 // Ignore constant for index of InsertElement instruction.
1638 if (i == 2) {
1639 continue;
1640 }
1641
1642 if (isa<Constant>(I.getOperand(i)) &&
1643 !isa<GlobalValue>(I.getOperand(i))) {
1644 FindConstant(I.getOperand(i));
1645 }
1646 }
1647
1648 continue;
1649 } else if (isa<ExtractElementInst>(I)) {
1650 // Handle ExtractElement with <4 x i8> specially.
1651 Type *CompositeTy = I.getOperand(0)->getType();
1652 if (is4xi8vec(CompositeTy)) {
1653 LLVMContext &Context = CompositeTy->getContext();
1654 if (isa<Constant>(I.getOperand(0))) {
1655 FindConstant(I.getOperand(0));
1656 }
1657
1658 // Add mask constant 0xFF.
1659 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
1660 FindConstant(CstFF);
1661
1662 // Add shift amount constant.
1663 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
1664 uint64_t Idx = CI->getZExtValue();
1665 Constant *CstShiftAmount =
1666 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
1667 FindConstant(CstShiftAmount);
1668 } else {
1669 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
1670 FindConstant(Cst8);
1671 }
1672
1673 continue;
1674 }
1675
1676 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1677 // Ignore constant for index of ExtractElement instruction.
1678 if (i == 1) {
1679 continue;
1680 }
1681
1682 if (isa<Constant>(I.getOperand(i)) &&
1683 !isa<GlobalValue>(I.getOperand(i))) {
1684 FindConstant(I.getOperand(i));
1685 }
1686 }
1687
1688 continue;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001689 } else if ((Instruction::Xor == I.getOpcode()) &&
1690 I.getType()->isIntegerTy(1)) {
1691 // We special case for Xor where the type is i1 and one of the arguments
1692 // is a constant 1 (true), this is an OpLogicalNot in SPIR-V, and we
1693 // don't need the constant
David Neto22f144c2017-06-12 14:26:21 -04001694 bool foundConstantTrue = false;
1695 for (Use &Op : I.operands()) {
1696 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1697 auto CI = cast<ConstantInt>(Op);
1698
1699 if (CI->isZero() || foundConstantTrue) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05001700 // If we already found the true constant, we might (probably only
1701 // on -O0) have an OpLogicalNot which is taking a constant
1702 // argument, so discover it anyway.
David Neto22f144c2017-06-12 14:26:21 -04001703 FindConstant(Op);
1704 } else {
1705 foundConstantTrue = true;
1706 }
1707 }
1708 }
1709
1710 continue;
David Netod2de94a2017-08-28 17:27:47 -04001711 } else if (isa<TruncInst>(I)) {
alan-bakerb39c8262019-03-08 14:03:37 -05001712 // Special case if i8 is not generally handled.
1713 if (!clspv::Option::Int8Support()) {
1714 // For truncation to i8 we mask against 255.
1715 Type *ToTy = I.getType();
1716 if (8u == ToTy->getPrimitiveSizeInBits()) {
1717 LLVMContext &Context = ToTy->getContext();
1718 Constant *Cst255 =
1719 ConstantInt::get(Type::getInt32Ty(Context), 0xff);
1720 FindConstant(Cst255);
1721 }
David Netod2de94a2017-08-28 17:27:47 -04001722 }
Neil Henning39672102017-09-29 14:33:13 +01001723 } else if (isa<AtomicRMWInst>(I)) {
1724 LLVMContext &Context = I.getContext();
1725
1726 FindConstant(
1727 ConstantInt::get(Type::getInt32Ty(Context), spv::ScopeDevice));
1728 FindConstant(ConstantInt::get(
1729 Type::getInt32Ty(Context),
1730 spv::MemorySemanticsUniformMemoryMask |
1731 spv::MemorySemanticsSequentiallyConsistentMask));
David Neto22f144c2017-06-12 14:26:21 -04001732 }
1733
1734 for (Use &Op : I.operands()) {
1735 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1736 FindConstant(Op);
1737 }
1738 }
1739 }
1740 }
1741}
1742
1743void SPIRVProducerPass::FindConstant(Value *V) {
David Neto22f144c2017-06-12 14:26:21 -04001744 ValueList &CstList = getConstantList();
1745
David Netofb9a7972017-08-25 17:08:24 -04001746 // If V is already tracked, ignore it.
1747 if (0 != CstList.idFor(V)) {
David Neto22f144c2017-06-12 14:26:21 -04001748 return;
1749 }
1750
David Neto862b7d82018-06-14 18:48:37 -04001751 if (isa<GlobalValue>(V) && clspv::Option::ModuleConstantsInStorageBuffer()) {
1752 return;
1753 }
1754
David Neto22f144c2017-06-12 14:26:21 -04001755 Constant *Cst = cast<Constant>(V);
David Neto862b7d82018-06-14 18:48:37 -04001756 Type *CstTy = Cst->getType();
David Neto22f144c2017-06-12 14:26:21 -04001757
1758 // Handle constant with <4 x i8> type specially.
David Neto22f144c2017-06-12 14:26:21 -04001759 if (is4xi8vec(CstTy)) {
1760 if (!isa<GlobalValue>(V)) {
David Netofb9a7972017-08-25 17:08:24 -04001761 CstList.insert(V);
David Neto22f144c2017-06-12 14:26:21 -04001762 }
1763 }
1764
1765 if (Cst->getNumOperands()) {
1766 for (User::const_op_iterator I = Cst->op_begin(), E = Cst->op_end(); I != E;
1767 ++I) {
1768 FindConstant(*I);
1769 }
1770
David Netofb9a7972017-08-25 17:08:24 -04001771 CstList.insert(Cst);
David Neto22f144c2017-06-12 14:26:21 -04001772 return;
1773 } else if (const ConstantDataSequential *CDS =
1774 dyn_cast<ConstantDataSequential>(Cst)) {
1775 // Add constants for each element to constant list.
1776 for (unsigned i = 0; i < CDS->getNumElements(); i++) {
1777 Constant *EleCst = CDS->getElementAsConstant(i);
1778 FindConstant(EleCst);
1779 }
1780 }
1781
1782 if (!isa<GlobalValue>(V)) {
David Netofb9a7972017-08-25 17:08:24 -04001783 CstList.insert(V);
David Neto22f144c2017-06-12 14:26:21 -04001784 }
1785}
1786
1787spv::StorageClass SPIRVProducerPass::GetStorageClass(unsigned AddrSpace) const {
1788 switch (AddrSpace) {
1789 default:
1790 llvm_unreachable("Unsupported OpenCL address space");
1791 case AddressSpace::Private:
1792 return spv::StorageClassFunction;
1793 case AddressSpace::Global:
David Neto22f144c2017-06-12 14:26:21 -04001794 return spv::StorageClassStorageBuffer;
Alan Bakerfcda9482018-10-02 17:09:59 -04001795 case AddressSpace::Constant:
1796 return clspv::Option::ConstantArgsInUniformBuffer()
1797 ? spv::StorageClassUniform
1798 : spv::StorageClassStorageBuffer;
David Neto22f144c2017-06-12 14:26:21 -04001799 case AddressSpace::Input:
1800 return spv::StorageClassInput;
1801 case AddressSpace::Local:
1802 return spv::StorageClassWorkgroup;
1803 case AddressSpace::UniformConstant:
1804 return spv::StorageClassUniformConstant;
David Neto9ed8e2f2018-03-24 06:47:24 -07001805 case AddressSpace::Uniform:
David Netoe439d702018-03-23 13:14:08 -07001806 return spv::StorageClassUniform;
David Neto22f144c2017-06-12 14:26:21 -04001807 case AddressSpace::ModuleScopePrivate:
1808 return spv::StorageClassPrivate;
1809 }
1810}
1811
David Neto862b7d82018-06-14 18:48:37 -04001812spv::StorageClass
1813SPIRVProducerPass::GetStorageClassForArgKind(clspv::ArgKind arg_kind) const {
1814 switch (arg_kind) {
1815 case clspv::ArgKind::Buffer:
1816 return spv::StorageClassStorageBuffer;
Alan Bakerfcda9482018-10-02 17:09:59 -04001817 case clspv::ArgKind::BufferUBO:
1818 return spv::StorageClassUniform;
David Neto862b7d82018-06-14 18:48:37 -04001819 case clspv::ArgKind::Pod:
1820 return clspv::Option::PodArgsInUniformBuffer()
1821 ? spv::StorageClassUniform
1822 : spv::StorageClassStorageBuffer;
1823 case clspv::ArgKind::Local:
1824 return spv::StorageClassWorkgroup;
1825 case clspv::ArgKind::ReadOnlyImage:
1826 case clspv::ArgKind::WriteOnlyImage:
1827 case clspv::ArgKind::Sampler:
1828 return spv::StorageClassUniformConstant;
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001829 default:
1830 llvm_unreachable("Unsupported storage class for argument kind");
David Neto862b7d82018-06-14 18:48:37 -04001831 }
1832}
1833
David Neto22f144c2017-06-12 14:26:21 -04001834spv::BuiltIn SPIRVProducerPass::GetBuiltin(StringRef Name) const {
1835 return StringSwitch<spv::BuiltIn>(Name)
1836 .Case("__spirv_GlobalInvocationId", spv::BuiltInGlobalInvocationId)
1837 .Case("__spirv_LocalInvocationId", spv::BuiltInLocalInvocationId)
1838 .Case("__spirv_WorkgroupSize", spv::BuiltInWorkgroupSize)
1839 .Case("__spirv_NumWorkgroups", spv::BuiltInNumWorkgroups)
1840 .Case("__spirv_WorkgroupId", spv::BuiltInWorkgroupId)
1841 .Default(spv::BuiltInMax);
1842}
1843
1844void SPIRVProducerPass::GenerateExtInstImport() {
1845 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
1846 uint32_t &ExtInstImportID = getOpExtInstImportID();
1847
1848 //
1849 // Generate OpExtInstImport.
1850 //
1851 // Ops[0] ... Ops[n] = Name (Literal String)
David Neto22f144c2017-06-12 14:26:21 -04001852 ExtInstImportID = nextID;
David Neto87846742018-04-11 17:36:22 -04001853 SPIRVInstList.push_back(new SPIRVInstruction(spv::OpExtInstImport, nextID++,
1854 MkString("GLSL.std.450")));
David Neto22f144c2017-06-12 14:26:21 -04001855}
1856
alan-bakerb6b09dc2018-11-08 16:59:28 -05001857void SPIRVProducerPass::GenerateSPIRVTypes(LLVMContext &Context,
1858 Module &module) {
David Neto22f144c2017-06-12 14:26:21 -04001859 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
1860 ValueMapType &VMap = getValueMap();
1861 ValueMapType &AllocatedVMap = getAllocatedValueMap();
Alan Bakerfcda9482018-10-02 17:09:59 -04001862 const auto &DL = module.getDataLayout();
David Neto22f144c2017-06-12 14:26:21 -04001863
1864 // Map for OpTypeRuntimeArray. If argument has pointer type, 2 spirv type
1865 // instructions are generated. They are OpTypePointer and OpTypeRuntimeArray.
1866 DenseMap<Type *, uint32_t> OpRuntimeTyMap;
1867
1868 for (Type *Ty : getTypeList()) {
1869 // Update TypeMap with nextID for reference later.
1870 TypeMap[Ty] = nextID;
1871
1872 switch (Ty->getTypeID()) {
1873 default: {
1874 Ty->print(errs());
1875 llvm_unreachable("Unsupported type???");
1876 break;
1877 }
1878 case Type::MetadataTyID:
1879 case Type::LabelTyID: {
1880 // Ignore these types.
1881 break;
1882 }
1883 case Type::PointerTyID: {
1884 PointerType *PTy = cast<PointerType>(Ty);
1885 unsigned AddrSpace = PTy->getAddressSpace();
1886
1887 // For the purposes of our Vulkan SPIR-V type system, constant and global
1888 // are conflated.
1889 bool UseExistingOpTypePointer = false;
1890 if (AddressSpace::Constant == AddrSpace) {
Alan Bakerfcda9482018-10-02 17:09:59 -04001891 if (!clspv::Option::ConstantArgsInUniformBuffer()) {
1892 AddrSpace = AddressSpace::Global;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001893 // Check to see if we already created this type (for instance, if we
1894 // had a constant <type>* and a global <type>*, the type would be
1895 // created by one of these types, and shared by both).
Alan Bakerfcda9482018-10-02 17:09:59 -04001896 auto GlobalTy = PTy->getPointerElementType()->getPointerTo(AddrSpace);
1897 if (0 < TypeMap.count(GlobalTy)) {
1898 TypeMap[PTy] = TypeMap[GlobalTy];
1899 UseExistingOpTypePointer = true;
1900 break;
1901 }
David Neto22f144c2017-06-12 14:26:21 -04001902 }
1903 } else if (AddressSpace::Global == AddrSpace) {
Alan Bakerfcda9482018-10-02 17:09:59 -04001904 if (!clspv::Option::ConstantArgsInUniformBuffer()) {
1905 AddrSpace = AddressSpace::Constant;
David Neto22f144c2017-06-12 14:26:21 -04001906
alan-bakerb6b09dc2018-11-08 16:59:28 -05001907 // Check to see if we already created this type (for instance, if we
1908 // had a constant <type>* and a global <type>*, the type would be
1909 // created by one of these types, and shared by both).
1910 auto ConstantTy =
1911 PTy->getPointerElementType()->getPointerTo(AddrSpace);
Alan Bakerfcda9482018-10-02 17:09:59 -04001912 if (0 < TypeMap.count(ConstantTy)) {
1913 TypeMap[PTy] = TypeMap[ConstantTy];
1914 UseExistingOpTypePointer = true;
1915 }
David Neto22f144c2017-06-12 14:26:21 -04001916 }
1917 }
1918
David Neto862b7d82018-06-14 18:48:37 -04001919 const bool HasArgUser = true;
David Neto22f144c2017-06-12 14:26:21 -04001920
David Neto862b7d82018-06-14 18:48:37 -04001921 if (HasArgUser && !UseExistingOpTypePointer) {
David Neto22f144c2017-06-12 14:26:21 -04001922 //
1923 // Generate OpTypePointer.
1924 //
1925
1926 // OpTypePointer
1927 // Ops[0] = Storage Class
1928 // Ops[1] = Element Type ID
1929 SPIRVOperandList Ops;
1930
David Neto257c3892018-04-11 13:19:45 -04001931 Ops << MkNum(GetStorageClass(AddrSpace))
1932 << MkId(lookupType(PTy->getElementType()));
David Neto22f144c2017-06-12 14:26:21 -04001933
David Neto87846742018-04-11 17:36:22 -04001934 auto *Inst = new SPIRVInstruction(spv::OpTypePointer, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04001935 SPIRVInstList.push_back(Inst);
1936 }
David Neto22f144c2017-06-12 14:26:21 -04001937 break;
1938 }
1939 case Type::StructTyID: {
David Neto22f144c2017-06-12 14:26:21 -04001940 StructType *STy = cast<StructType>(Ty);
1941
1942 // Handle sampler type.
1943 if (STy->isOpaque()) {
1944 if (STy->getName().equals("opencl.sampler_t")) {
1945 //
1946 // Generate OpTypeSampler
1947 //
1948 // Empty Ops.
1949 SPIRVOperandList Ops;
1950
David Neto87846742018-04-11 17:36:22 -04001951 auto *Inst = new SPIRVInstruction(spv::OpTypeSampler, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04001952 SPIRVInstList.push_back(Inst);
1953 break;
alan-bakerf906d2b2019-12-10 11:26:23 -05001954 } else if (STy->getName().startswith("opencl.image1d_ro_t") ||
1955 STy->getName().startswith("opencl.image1d_wo_t") ||
1956 STy->getName().startswith("opencl.image2d_ro_t") ||
alan-bakerf67468c2019-11-25 15:51:49 -05001957 STy->getName().startswith("opencl.image2d_wo_t") ||
1958 STy->getName().startswith("opencl.image3d_ro_t") ||
1959 STy->getName().startswith("opencl.image3d_wo_t")) {
David Neto22f144c2017-06-12 14:26:21 -04001960 //
1961 // Generate OpTypeImage
1962 //
1963 // Ops[0] = Sampled Type ID
1964 // Ops[1] = Dim ID
1965 // Ops[2] = Depth (Literal Number)
1966 // Ops[3] = Arrayed (Literal Number)
1967 // Ops[4] = MS (Literal Number)
1968 // Ops[5] = Sampled (Literal Number)
1969 // Ops[6] = Image Format ID
1970 //
1971 SPIRVOperandList Ops;
1972
alan-bakerf67468c2019-11-25 15:51:49 -05001973 uint32_t ImageTyID = nextID++;
1974 uint32_t SampledTyID = 0;
1975 if (STy->getName().contains(".float")) {
1976 SampledTyID = lookupType(Type::getFloatTy(Context));
1977 } else if (STy->getName().contains(".uint")) {
1978 SampledTyID = lookupType(Type::getInt32Ty(Context));
1979 } else if (STy->getName().contains(".int")) {
1980 // Generate a signed 32-bit integer if necessary.
1981 if (int32ID == 0) {
1982 int32ID = nextID++;
1983 SPIRVOperandList intOps;
1984 intOps << MkNum(32);
1985 intOps << MkNum(1);
1986 auto signed_int =
1987 new SPIRVInstruction(spv::OpTypeInt, int32ID, intOps);
1988 SPIRVInstList.push_back(signed_int);
1989 }
1990 SampledTyID = int32ID;
1991
1992 // Generate a vec4 of the signed int if necessary.
1993 if (v4int32ID == 0) {
1994 v4int32ID = nextID++;
1995 SPIRVOperandList vecOps;
1996 vecOps << MkId(int32ID);
1997 vecOps << MkNum(4);
1998 auto int_vec =
1999 new SPIRVInstruction(spv::OpTypeVector, v4int32ID, vecOps);
2000 SPIRVInstList.push_back(int_vec);
2001 }
2002 } else {
2003 // This was likely an UndefValue.
2004 SampledTyID = lookupType(Type::getFloatTy(Context));
2005 }
David Neto257c3892018-04-11 13:19:45 -04002006 Ops << MkId(SampledTyID);
David Neto22f144c2017-06-12 14:26:21 -04002007
2008 spv::Dim DimID = spv::Dim2D;
alan-bakerf906d2b2019-12-10 11:26:23 -05002009 if (STy->getName().startswith("opencl.image1d_ro_t") ||
2010 STy->getName().startswith("opencl.image1d_wo_t")) {
2011 DimID = spv::Dim1D;
2012 } else if (STy->getName().startswith("opencl.image3d_ro_t") ||
2013 STy->getName().startswith("opencl.image3d_wo_t")) {
David Neto22f144c2017-06-12 14:26:21 -04002014 DimID = spv::Dim3D;
2015 }
David Neto257c3892018-04-11 13:19:45 -04002016 Ops << MkNum(DimID);
David Neto22f144c2017-06-12 14:26:21 -04002017
2018 // TODO: Set up Depth.
David Neto257c3892018-04-11 13:19:45 -04002019 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04002020
2021 // TODO: Set up Arrayed.
David Neto257c3892018-04-11 13:19:45 -04002022 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04002023
2024 // TODO: Set up MS.
David Neto257c3892018-04-11 13:19:45 -04002025 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04002026
2027 // TODO: Set up Sampled.
2028 //
2029 // From Spec
2030 //
2031 // 0 indicates this is only known at run time, not at compile time
2032 // 1 indicates will be used with sampler
2033 // 2 indicates will be used without a sampler (a storage image)
2034 uint32_t Sampled = 1;
alan-bakerf67468c2019-11-25 15:51:49 -05002035 if (!STy->getName().contains(".sampled")) {
David Neto22f144c2017-06-12 14:26:21 -04002036 Sampled = 2;
2037 }
David Neto257c3892018-04-11 13:19:45 -04002038 Ops << MkNum(Sampled);
David Neto22f144c2017-06-12 14:26:21 -04002039
2040 // TODO: Set up Image Format.
David Neto257c3892018-04-11 13:19:45 -04002041 Ops << MkNum(spv::ImageFormatUnknown);
David Neto22f144c2017-06-12 14:26:21 -04002042
alan-bakerf67468c2019-11-25 15:51:49 -05002043 auto *Inst = new SPIRVInstruction(spv::OpTypeImage, ImageTyID, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002044 SPIRVInstList.push_back(Inst);
2045 break;
2046 }
2047 }
2048
2049 //
2050 // Generate OpTypeStruct
2051 //
2052 // Ops[0] ... Ops[n] = Member IDs
2053 SPIRVOperandList Ops;
2054
2055 for (auto *EleTy : STy->elements()) {
David Neto862b7d82018-06-14 18:48:37 -04002056 Ops << MkId(lookupType(EleTy));
David Neto22f144c2017-06-12 14:26:21 -04002057 }
2058
David Neto22f144c2017-06-12 14:26:21 -04002059 uint32_t STyID = nextID;
2060
alan-bakerb6b09dc2018-11-08 16:59:28 -05002061 auto *Inst = new SPIRVInstruction(spv::OpTypeStruct, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002062 SPIRVInstList.push_back(Inst);
2063
2064 // Generate OpMemberDecorate.
2065 auto DecoInsertPoint =
2066 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
2067 [](SPIRVInstruction *Inst) -> bool {
2068 return Inst->getOpcode() != spv::OpDecorate &&
2069 Inst->getOpcode() != spv::OpMemberDecorate &&
2070 Inst->getOpcode() != spv::OpExtInstImport;
2071 });
2072
David Netoc463b372017-08-10 15:32:21 -04002073 const auto StructLayout = DL.getStructLayout(STy);
Alan Bakerfcda9482018-10-02 17:09:59 -04002074 // Search for the correct offsets if this type was remapped.
2075 std::vector<uint32_t> *offsets = nullptr;
2076 auto iter = RemappedUBOTypeOffsets.find(STy);
2077 if (iter != RemappedUBOTypeOffsets.end()) {
2078 offsets = &iter->second;
2079 }
David Netoc463b372017-08-10 15:32:21 -04002080
David Neto862b7d82018-06-14 18:48:37 -04002081 // #error TODO(dneto): Only do this if in TypesNeedingLayout.
David Neto22f144c2017-06-12 14:26:21 -04002082 for (unsigned MemberIdx = 0; MemberIdx < STy->getNumElements();
2083 MemberIdx++) {
2084 // Ops[0] = Structure Type ID
2085 // Ops[1] = Member Index(Literal Number)
2086 // Ops[2] = Decoration (Offset)
2087 // Ops[3] = Byte Offset (Literal Number)
2088 Ops.clear();
2089
David Neto257c3892018-04-11 13:19:45 -04002090 Ops << MkId(STyID) << MkNum(MemberIdx) << MkNum(spv::DecorationOffset);
David Neto22f144c2017-06-12 14:26:21 -04002091
alan-bakerb6b09dc2018-11-08 16:59:28 -05002092 auto ByteOffset =
2093 static_cast<uint32_t>(StructLayout->getElementOffset(MemberIdx));
Alan Bakerfcda9482018-10-02 17:09:59 -04002094 if (offsets) {
2095 ByteOffset = (*offsets)[MemberIdx];
2096 }
alan-bakerb6b09dc2018-11-08 16:59:28 -05002097 // const auto ByteOffset =
Alan Bakerfcda9482018-10-02 17:09:59 -04002098 // uint32_t(StructLayout->getElementOffset(MemberIdx));
David Neto257c3892018-04-11 13:19:45 -04002099 Ops << MkNum(ByteOffset);
David Neto22f144c2017-06-12 14:26:21 -04002100
David Neto87846742018-04-11 17:36:22 -04002101 auto *DecoInst = new SPIRVInstruction(spv::OpMemberDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002102 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
David Neto22f144c2017-06-12 14:26:21 -04002103 }
2104
2105 // Generate OpDecorate.
David Neto862b7d82018-06-14 18:48:37 -04002106 if (StructTypesNeedingBlock.idFor(STy)) {
2107 Ops.clear();
2108 // Use Block decorations with StorageBuffer storage class.
2109 Ops << MkId(STyID) << MkNum(spv::DecorationBlock);
David Neto22f144c2017-06-12 14:26:21 -04002110
David Neto862b7d82018-06-14 18:48:37 -04002111 auto *DecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
2112 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
David Neto22f144c2017-06-12 14:26:21 -04002113 }
2114 break;
2115 }
2116 case Type::IntegerTyID: {
alan-baker0e64a592019-11-18 13:36:25 -05002117 uint32_t BitWidth = static_cast<uint32_t>(Ty->getPrimitiveSizeInBits());
David Neto22f144c2017-06-12 14:26:21 -04002118
2119 if (BitWidth == 1) {
David Netoef5ba2b2019-12-20 08:35:54 -05002120 auto *Inst = new SPIRVInstruction(spv::OpTypeBool, nextID++);
David Neto22f144c2017-06-12 14:26:21 -04002121 SPIRVInstList.push_back(Inst);
2122 } else {
alan-bakerb39c8262019-03-08 14:03:37 -05002123 if (!clspv::Option::Int8Support()) {
2124 // i8 is added to TypeMap as i32.
2125 // No matter what LLVM type is requested first, always alias the
2126 // second one's SPIR-V type to be the same as the one we generated
2127 // first.
2128 unsigned aliasToWidth = 0;
2129 if (BitWidth == 8) {
2130 aliasToWidth = 32;
2131 BitWidth = 32;
2132 } else if (BitWidth == 32) {
2133 aliasToWidth = 8;
2134 }
2135 if (aliasToWidth) {
2136 Type *otherType = Type::getIntNTy(Ty->getContext(), aliasToWidth);
2137 auto where = TypeMap.find(otherType);
2138 if (where == TypeMap.end()) {
2139 // Go ahead and make it, but also map the other type to it.
2140 TypeMap[otherType] = nextID;
2141 } else {
2142 // Alias this SPIR-V type the existing type.
2143 TypeMap[Ty] = where->second;
2144 break;
2145 }
David Neto391aeb12017-08-26 15:51:58 -04002146 }
David Neto22f144c2017-06-12 14:26:21 -04002147 }
2148
David Neto257c3892018-04-11 13:19:45 -04002149 SPIRVOperandList Ops;
2150 Ops << MkNum(BitWidth) << MkNum(0 /* not signed */);
David Neto22f144c2017-06-12 14:26:21 -04002151
2152 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002153 new SPIRVInstruction(spv::OpTypeInt, nextID++, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002154 }
2155 break;
2156 }
2157 case Type::HalfTyID:
2158 case Type::FloatTyID:
2159 case Type::DoubleTyID: {
alan-baker0e64a592019-11-18 13:36:25 -05002160 uint32_t BitWidth = static_cast<uint32_t>(Ty->getPrimitiveSizeInBits());
James Price11010dc2019-12-19 13:53:09 -05002161 auto WidthOp = MkNum(BitWidth);
David Neto22f144c2017-06-12 14:26:21 -04002162
2163 SPIRVInstList.push_back(
David Netoef5ba2b2019-12-20 08:35:54 -05002164 new SPIRVInstruction(spv::OpTypeFloat, nextID++, std::move(WidthOp)));
David Neto22f144c2017-06-12 14:26:21 -04002165 break;
2166 }
2167 case Type::ArrayTyID: {
David Neto22f144c2017-06-12 14:26:21 -04002168 ArrayType *ArrTy = cast<ArrayType>(Ty);
David Neto862b7d82018-06-14 18:48:37 -04002169 const uint64_t Length = ArrTy->getArrayNumElements();
2170 if (Length == 0) {
2171 // By convention, map it to a RuntimeArray.
David Neto22f144c2017-06-12 14:26:21 -04002172
David Neto862b7d82018-06-14 18:48:37 -04002173 // Only generate the type once.
2174 // TODO(dneto): Can it ever be generated more than once?
2175 // Doesn't LLVM type uniqueness guarantee we'll only see this
2176 // once?
2177 Type *EleTy = ArrTy->getArrayElementType();
2178 if (OpRuntimeTyMap.count(EleTy) == 0) {
2179 uint32_t OpTypeRuntimeArrayID = nextID;
2180 OpRuntimeTyMap[Ty] = nextID;
David Neto22f144c2017-06-12 14:26:21 -04002181
David Neto862b7d82018-06-14 18:48:37 -04002182 //
2183 // Generate OpTypeRuntimeArray.
2184 //
David Neto22f144c2017-06-12 14:26:21 -04002185
David Neto862b7d82018-06-14 18:48:37 -04002186 // OpTypeRuntimeArray
2187 // Ops[0] = Element Type ID
2188 SPIRVOperandList Ops;
2189 Ops << MkId(lookupType(EleTy));
David Neto22f144c2017-06-12 14:26:21 -04002190
David Neto862b7d82018-06-14 18:48:37 -04002191 SPIRVInstList.push_back(
2192 new SPIRVInstruction(spv::OpTypeRuntimeArray, nextID++, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002193
David Neto862b7d82018-06-14 18:48:37 -04002194 if (Hack_generate_runtime_array_stride_early) {
2195 // Generate OpDecorate.
2196 auto DecoInsertPoint = std::find_if(
2197 SPIRVInstList.begin(), SPIRVInstList.end(),
2198 [](SPIRVInstruction *Inst) -> bool {
2199 return Inst->getOpcode() != spv::OpDecorate &&
2200 Inst->getOpcode() != spv::OpMemberDecorate &&
2201 Inst->getOpcode() != spv::OpExtInstImport;
2202 });
David Neto22f144c2017-06-12 14:26:21 -04002203
David Neto862b7d82018-06-14 18:48:37 -04002204 // Ops[0] = Target ID
2205 // Ops[1] = Decoration (ArrayStride)
2206 // Ops[2] = Stride Number(Literal Number)
2207 Ops.clear();
David Neto85082642018-03-24 06:55:20 -07002208
David Neto862b7d82018-06-14 18:48:37 -04002209 Ops << MkId(OpTypeRuntimeArrayID)
2210 << MkNum(spv::DecorationArrayStride)
Alan Bakerfcda9482018-10-02 17:09:59 -04002211 << MkNum(static_cast<uint32_t>(GetTypeAllocSize(EleTy, DL)));
David Neto22f144c2017-06-12 14:26:21 -04002212
David Neto862b7d82018-06-14 18:48:37 -04002213 auto *DecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
2214 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
2215 }
2216 }
David Neto22f144c2017-06-12 14:26:21 -04002217
David Neto862b7d82018-06-14 18:48:37 -04002218 } else {
David Neto22f144c2017-06-12 14:26:21 -04002219
David Neto862b7d82018-06-14 18:48:37 -04002220 //
2221 // Generate OpConstant and OpTypeArray.
2222 //
2223
2224 //
2225 // Generate OpConstant for array length.
2226 //
2227 // Ops[0] = Result Type ID
2228 // Ops[1] .. Ops[n] = Values LiteralNumber
2229 SPIRVOperandList Ops;
2230
2231 Type *LengthTy = Type::getInt32Ty(Context);
2232 uint32_t ResTyID = lookupType(LengthTy);
2233 Ops << MkId(ResTyID);
2234
2235 assert(Length < UINT32_MAX);
2236 Ops << MkNum(static_cast<uint32_t>(Length));
2237
2238 // Add constant for length to constant list.
2239 Constant *CstLength = ConstantInt::get(LengthTy, Length);
2240 AllocatedVMap[CstLength] = nextID;
2241 VMap[CstLength] = nextID;
2242 uint32_t LengthID = nextID;
2243
2244 auto *CstInst = new SPIRVInstruction(spv::OpConstant, nextID++, Ops);
2245 SPIRVInstList.push_back(CstInst);
2246
2247 // Remember to generate ArrayStride later
2248 getTypesNeedingArrayStride().insert(Ty);
2249
2250 //
2251 // Generate OpTypeArray.
2252 //
2253 // Ops[0] = Element Type ID
2254 // Ops[1] = Array Length Constant ID
2255 Ops.clear();
2256
2257 uint32_t EleTyID = lookupType(ArrTy->getElementType());
2258 Ops << MkId(EleTyID) << MkId(LengthID);
2259
2260 // Update TypeMap with nextID.
2261 TypeMap[Ty] = nextID;
2262
2263 auto *ArrayInst = new SPIRVInstruction(spv::OpTypeArray, nextID++, Ops);
2264 SPIRVInstList.push_back(ArrayInst);
2265 }
David Neto22f144c2017-06-12 14:26:21 -04002266 break;
2267 }
2268 case Type::VectorTyID: {
alan-bakerb39c8262019-03-08 14:03:37 -05002269 // <4 x i8> is changed to i32 if i8 is not generally supported.
2270 if (!clspv::Option::Int8Support() &&
2271 Ty->getVectorElementType() == Type::getInt8Ty(Context)) {
David Neto22f144c2017-06-12 14:26:21 -04002272 if (Ty->getVectorNumElements() == 4) {
2273 TypeMap[Ty] = lookupType(Ty->getVectorElementType());
2274 break;
2275 } else {
2276 Ty->print(errs());
2277 llvm_unreachable("Support above i8 vector type");
2278 }
2279 }
2280
2281 // Ops[0] = Component Type ID
2282 // Ops[1] = Component Count (Literal Number)
David Neto257c3892018-04-11 13:19:45 -04002283 SPIRVOperandList Ops;
2284 Ops << MkId(lookupType(Ty->getVectorElementType()))
2285 << MkNum(Ty->getVectorNumElements());
David Neto22f144c2017-06-12 14:26:21 -04002286
alan-bakerb6b09dc2018-11-08 16:59:28 -05002287 SPIRVInstruction *inst =
2288 new SPIRVInstruction(spv::OpTypeVector, nextID++, Ops);
David Netoc6f3ab22018-04-06 18:02:31 -04002289 SPIRVInstList.push_back(inst);
David Neto22f144c2017-06-12 14:26:21 -04002290 break;
2291 }
2292 case Type::VoidTyID: {
David Netoef5ba2b2019-12-20 08:35:54 -05002293 auto *Inst = new SPIRVInstruction(spv::OpTypeVoid, nextID++);
David Neto22f144c2017-06-12 14:26:21 -04002294 SPIRVInstList.push_back(Inst);
2295 break;
2296 }
2297 case Type::FunctionTyID: {
2298 // Generate SPIRV instruction for function type.
2299 FunctionType *FTy = cast<FunctionType>(Ty);
2300
2301 // Ops[0] = Return Type ID
2302 // Ops[1] ... Ops[n] = Parameter Type IDs
2303 SPIRVOperandList Ops;
2304
2305 // Find SPIRV instruction for return type
David Netoc6f3ab22018-04-06 18:02:31 -04002306 Ops << MkId(lookupType(FTy->getReturnType()));
David Neto22f144c2017-06-12 14:26:21 -04002307
2308 // Find SPIRV instructions for parameter types
2309 for (unsigned k = 0; k < FTy->getNumParams(); k++) {
2310 // Find SPIRV instruction for parameter type.
2311 auto ParamTy = FTy->getParamType(k);
2312 if (ParamTy->isPointerTy()) {
2313 auto PointeeTy = ParamTy->getPointerElementType();
2314 if (PointeeTy->isStructTy() &&
2315 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
2316 ParamTy = PointeeTy;
2317 }
2318 }
2319
David Netoc6f3ab22018-04-06 18:02:31 -04002320 Ops << MkId(lookupType(ParamTy));
David Neto22f144c2017-06-12 14:26:21 -04002321 }
2322
David Neto87846742018-04-11 17:36:22 -04002323 auto *Inst = new SPIRVInstruction(spv::OpTypeFunction, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002324 SPIRVInstList.push_back(Inst);
2325 break;
2326 }
2327 }
2328 }
2329
2330 // Generate OpTypeSampledImage.
alan-bakerabd82722019-12-03 17:14:51 -05002331 for (auto &ImgTy : getImageTypeList()) {
David Neto22f144c2017-06-12 14:26:21 -04002332 //
2333 // Generate OpTypeSampledImage.
2334 //
2335 // Ops[0] = Image Type ID
2336 //
2337 SPIRVOperandList Ops;
2338
David Netoc6f3ab22018-04-06 18:02:31 -04002339 Ops << MkId(TypeMap[ImgTy]);
David Neto22f144c2017-06-12 14:26:21 -04002340
alan-bakerabd82722019-12-03 17:14:51 -05002341 // Update the image type map.
2342 getImageTypeMap()[ImgTy] = nextID;
David Neto22f144c2017-06-12 14:26:21 -04002343
David Neto87846742018-04-11 17:36:22 -04002344 auto *Inst = new SPIRVInstruction(spv::OpTypeSampledImage, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002345 SPIRVInstList.push_back(Inst);
2346 }
David Netoc6f3ab22018-04-06 18:02:31 -04002347
2348 // Generate types for pointer-to-local arguments.
Alan Baker202c8c72018-08-13 13:47:44 -04002349 for (auto spec_id = clspv::FirstLocalSpecId(); spec_id < max_local_spec_id_;
2350 ++spec_id) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05002351 LocalArgInfo &arg_info = LocalSpecIdInfoMap[spec_id];
David Netoc6f3ab22018-04-06 18:02:31 -04002352
2353 // Generate the spec constant.
2354 SPIRVOperandList Ops;
2355 Ops << MkId(lookupType(Type::getInt32Ty(Context))) << MkNum(1);
David Neto87846742018-04-11 17:36:22 -04002356 SPIRVInstList.push_back(
2357 new SPIRVInstruction(spv::OpSpecConstant, arg_info.array_size_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002358
2359 // Generate the array type.
2360 Ops.clear();
2361 // The element type must have been created.
2362 uint32_t elem_ty_id = lookupType(arg_info.elem_type);
2363 assert(elem_ty_id);
2364 Ops << MkId(elem_ty_id) << MkId(arg_info.array_size_id);
2365
2366 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002367 new SPIRVInstruction(spv::OpTypeArray, arg_info.array_type_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002368
2369 Ops.clear();
2370 Ops << MkNum(spv::StorageClassWorkgroup) << MkId(arg_info.array_type_id);
David Neto87846742018-04-11 17:36:22 -04002371 SPIRVInstList.push_back(new SPIRVInstruction(
2372 spv::OpTypePointer, arg_info.ptr_array_type_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002373 }
David Neto22f144c2017-06-12 14:26:21 -04002374}
2375
2376void SPIRVProducerPass::GenerateSPIRVConstants() {
2377 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
2378 ValueMapType &VMap = getValueMap();
2379 ValueMapType &AllocatedVMap = getAllocatedValueMap();
2380 ValueList &CstList = getConstantList();
David Neto482550a2018-03-24 05:21:07 -07002381 const bool hack_undef = clspv::Option::HackUndef();
David Neto22f144c2017-06-12 14:26:21 -04002382
2383 for (uint32_t i = 0; i < CstList.size(); i++) {
David Netofb9a7972017-08-25 17:08:24 -04002384 // UniqueVector ids are 1-based.
alan-bakerb6b09dc2018-11-08 16:59:28 -05002385 Constant *Cst = cast<Constant>(CstList[i + 1]);
David Neto22f144c2017-06-12 14:26:21 -04002386
2387 // OpTypeArray's constant was already generated.
David Netofb9a7972017-08-25 17:08:24 -04002388 if (AllocatedVMap.find_as(Cst) != AllocatedVMap.end()) {
David Neto22f144c2017-06-12 14:26:21 -04002389 continue;
2390 }
2391
David Netofb9a7972017-08-25 17:08:24 -04002392 // Set ValueMap with nextID for reference later.
David Neto22f144c2017-06-12 14:26:21 -04002393 VMap[Cst] = nextID;
2394
2395 //
2396 // Generate OpConstant.
2397 //
2398
2399 // Ops[0] = Result Type ID
2400 // Ops[1] .. Ops[n] = Values LiteralNumber
2401 SPIRVOperandList Ops;
2402
David Neto257c3892018-04-11 13:19:45 -04002403 Ops << MkId(lookupType(Cst->getType()));
David Neto22f144c2017-06-12 14:26:21 -04002404
2405 std::vector<uint32_t> LiteralNum;
David Neto22f144c2017-06-12 14:26:21 -04002406 spv::Op Opcode = spv::OpNop;
2407
2408 if (isa<UndefValue>(Cst)) {
2409 // Ops[0] = Result Type ID
David Netoc66b3352017-10-20 14:28:46 -04002410 Opcode = spv::OpUndef;
Alan Baker9bf93fb2018-08-28 16:59:26 -04002411 if (hack_undef && IsTypeNullable(Cst->getType())) {
2412 Opcode = spv::OpConstantNull;
David Netoc66b3352017-10-20 14:28:46 -04002413 }
David Neto22f144c2017-06-12 14:26:21 -04002414 } else if (const ConstantInt *CI = dyn_cast<ConstantInt>(Cst)) {
2415 unsigned BitWidth = CI->getBitWidth();
2416 if (BitWidth == 1) {
2417 // If the bitwidth of constant is 1, generate OpConstantTrue or
2418 // OpConstantFalse.
2419 if (CI->getZExtValue()) {
2420 // Ops[0] = Result Type ID
2421 Opcode = spv::OpConstantTrue;
2422 } else {
2423 // Ops[0] = Result Type ID
2424 Opcode = spv::OpConstantFalse;
2425 }
David Neto22f144c2017-06-12 14:26:21 -04002426 } else {
2427 auto V = CI->getZExtValue();
2428 LiteralNum.push_back(V & 0xFFFFFFFF);
2429
2430 if (BitWidth > 32) {
2431 LiteralNum.push_back(V >> 32);
2432 }
2433
2434 Opcode = spv::OpConstant;
David Neto22f144c2017-06-12 14:26:21 -04002435
David Neto257c3892018-04-11 13:19:45 -04002436 Ops << MkInteger(LiteralNum);
David Neto22f144c2017-06-12 14:26:21 -04002437 }
2438 } else if (const ConstantFP *CFP = dyn_cast<ConstantFP>(Cst)) {
2439 uint64_t FPVal = CFP->getValueAPF().bitcastToAPInt().getZExtValue();
2440 Type *CFPTy = CFP->getType();
2441 if (CFPTy->isFloatTy()) {
2442 LiteralNum.push_back(FPVal & 0xFFFFFFFF);
Kévin Petit02ee34e2019-04-04 19:03:22 +01002443 } else if (CFPTy->isDoubleTy()) {
2444 LiteralNum.push_back(FPVal & 0xFFFFFFFF);
2445 LiteralNum.push_back(FPVal >> 32);
alan-baker089bf932020-01-07 16:35:45 -05002446 } else if (CFPTy->isHalfTy()) {
2447 LiteralNum.push_back(FPVal & 0xFFFF);
David Neto22f144c2017-06-12 14:26:21 -04002448 } else {
2449 CFPTy->print(errs());
2450 llvm_unreachable("Implement this ConstantFP Type");
2451 }
2452
2453 Opcode = spv::OpConstant;
David Neto22f144c2017-06-12 14:26:21 -04002454
David Neto257c3892018-04-11 13:19:45 -04002455 Ops << MkFloat(LiteralNum);
David Neto22f144c2017-06-12 14:26:21 -04002456 } else if (isa<ConstantDataSequential>(Cst) &&
2457 cast<ConstantDataSequential>(Cst)->isString()) {
2458 Cst->print(errs());
2459 llvm_unreachable("Implement this Constant");
2460
2461 } else if (const ConstantDataSequential *CDS =
2462 dyn_cast<ConstantDataSequential>(Cst)) {
David Neto49351ac2017-08-26 17:32:20 -04002463 // Let's convert <4 x i8> constant to int constant specially.
2464 // This case occurs when all the values are specified as constant
2465 // ints.
2466 Type *CstTy = Cst->getType();
2467 if (is4xi8vec(CstTy)) {
2468 LLVMContext &Context = CstTy->getContext();
2469
2470 //
2471 // Generate OpConstant with OpTypeInt 32 0.
2472 //
Neil Henning39672102017-09-29 14:33:13 +01002473 uint32_t IntValue = 0;
2474 for (unsigned k = 0; k < 4; k++) {
2475 const uint64_t Val = CDS->getElementAsInteger(k);
David Neto49351ac2017-08-26 17:32:20 -04002476 IntValue = (IntValue << 8) | (Val & 0xffu);
2477 }
2478
2479 Type *i32 = Type::getInt32Ty(Context);
2480 Constant *CstInt = ConstantInt::get(i32, IntValue);
2481 // If this constant is already registered on VMap, use it.
2482 if (VMap.count(CstInt)) {
2483 uint32_t CstID = VMap[CstInt];
2484 VMap[Cst] = CstID;
2485 continue;
2486 }
2487
David Neto257c3892018-04-11 13:19:45 -04002488 Ops << MkNum(IntValue);
David Neto49351ac2017-08-26 17:32:20 -04002489
David Neto87846742018-04-11 17:36:22 -04002490 auto *CstInst = new SPIRVInstruction(spv::OpConstant, nextID++, Ops);
David Neto49351ac2017-08-26 17:32:20 -04002491 SPIRVInstList.push_back(CstInst);
2492
2493 continue;
2494 }
2495
2496 // A normal constant-data-sequential case.
David Neto22f144c2017-06-12 14:26:21 -04002497 for (unsigned k = 0; k < CDS->getNumElements(); k++) {
2498 Constant *EleCst = CDS->getElementAsConstant(k);
2499 uint32_t EleCstID = VMap[EleCst];
David Neto257c3892018-04-11 13:19:45 -04002500 Ops << MkId(EleCstID);
David Neto22f144c2017-06-12 14:26:21 -04002501 }
2502
2503 Opcode = spv::OpConstantComposite;
David Neto22f144c2017-06-12 14:26:21 -04002504 } else if (const ConstantAggregate *CA = dyn_cast<ConstantAggregate>(Cst)) {
2505 // Let's convert <4 x i8> constant to int constant specially.
David Neto49351ac2017-08-26 17:32:20 -04002506 // This case occurs when at least one of the values is an undef.
David Neto22f144c2017-06-12 14:26:21 -04002507 Type *CstTy = Cst->getType();
2508 if (is4xi8vec(CstTy)) {
2509 LLVMContext &Context = CstTy->getContext();
2510
2511 //
2512 // Generate OpConstant with OpTypeInt 32 0.
2513 //
Neil Henning39672102017-09-29 14:33:13 +01002514 uint32_t IntValue = 0;
David Neto22f144c2017-06-12 14:26:21 -04002515 for (User::const_op_iterator I = Cst->op_begin(), E = Cst->op_end();
2516 I != E; ++I) {
2517 uint64_t Val = 0;
alan-bakerb6b09dc2018-11-08 16:59:28 -05002518 const Value *CV = *I;
Neil Henning39672102017-09-29 14:33:13 +01002519 if (auto *CI2 = dyn_cast<ConstantInt>(CV)) {
2520 Val = CI2->getZExtValue();
David Neto22f144c2017-06-12 14:26:21 -04002521 }
David Neto49351ac2017-08-26 17:32:20 -04002522 IntValue = (IntValue << 8) | (Val & 0xffu);
David Neto22f144c2017-06-12 14:26:21 -04002523 }
2524
David Neto49351ac2017-08-26 17:32:20 -04002525 Type *i32 = Type::getInt32Ty(Context);
2526 Constant *CstInt = ConstantInt::get(i32, IntValue);
David Neto22f144c2017-06-12 14:26:21 -04002527 // If this constant is already registered on VMap, use it.
2528 if (VMap.count(CstInt)) {
2529 uint32_t CstID = VMap[CstInt];
2530 VMap[Cst] = CstID;
David Neto19a1bad2017-08-25 15:01:41 -04002531 continue;
David Neto22f144c2017-06-12 14:26:21 -04002532 }
2533
David Neto257c3892018-04-11 13:19:45 -04002534 Ops << MkNum(IntValue);
David Neto22f144c2017-06-12 14:26:21 -04002535
David Neto87846742018-04-11 17:36:22 -04002536 auto *CstInst = new SPIRVInstruction(spv::OpConstant, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002537 SPIRVInstList.push_back(CstInst);
2538
David Neto19a1bad2017-08-25 15:01:41 -04002539 continue;
David Neto22f144c2017-06-12 14:26:21 -04002540 }
2541
2542 // We use a constant composite in SPIR-V for our constant aggregate in
2543 // LLVM.
2544 Opcode = spv::OpConstantComposite;
David Neto22f144c2017-06-12 14:26:21 -04002545
2546 for (unsigned k = 0; k < CA->getNumOperands(); k++) {
2547 // Look up the ID of the element of this aggregate (which we will
2548 // previously have created a constant for).
2549 uint32_t ElementConstantID = VMap[CA->getAggregateElement(k)];
2550
2551 // And add an operand to the composite we are constructing
David Neto257c3892018-04-11 13:19:45 -04002552 Ops << MkId(ElementConstantID);
David Neto22f144c2017-06-12 14:26:21 -04002553 }
2554 } else if (Cst->isNullValue()) {
2555 Opcode = spv::OpConstantNull;
David Neto22f144c2017-06-12 14:26:21 -04002556 } else {
2557 Cst->print(errs());
2558 llvm_unreachable("Unsupported Constant???");
2559 }
2560
alan-baker5b86ed72019-02-15 08:26:50 -05002561 if (Opcode == spv::OpConstantNull && Cst->getType()->isPointerTy()) {
2562 // Null pointer requires variable pointers.
2563 setVariablePointersCapabilities(Cst->getType()->getPointerAddressSpace());
2564 }
2565
David Neto87846742018-04-11 17:36:22 -04002566 auto *CstInst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002567 SPIRVInstList.push_back(CstInst);
2568 }
2569}
2570
2571void SPIRVProducerPass::GenerateSamplers(Module &M) {
2572 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
David Neto22f144c2017-06-12 14:26:21 -04002573
alan-bakerb6b09dc2018-11-08 16:59:28 -05002574 auto &sampler_map = getSamplerMap();
alan-baker09cb9802019-12-10 13:16:27 -05002575 SamplerLiteralToIDMap.clear();
David Neto862b7d82018-06-14 18:48:37 -04002576 DenseMap<unsigned, unsigned> SamplerLiteralToDescriptorSetMap;
2577 DenseMap<unsigned, unsigned> SamplerLiteralToBindingMap;
David Neto22f144c2017-06-12 14:26:21 -04002578
David Neto862b7d82018-06-14 18:48:37 -04002579 // We might have samplers in the sampler map that are not used
2580 // in the translation unit. We need to allocate variables
2581 // for them and bindings too.
2582 DenseSet<unsigned> used_bindings;
David Neto22f144c2017-06-12 14:26:21 -04002583
Kévin Petitdf71de32019-04-09 14:09:50 +01002584 auto *var_fn = M.getFunction(clspv::LiteralSamplerFunction());
alan-baker09cb9802019-12-10 13:16:27 -05002585 // Return if there are no literal samplers.
alan-bakerb6b09dc2018-11-08 16:59:28 -05002586 if (!var_fn)
2587 return;
alan-baker09cb9802019-12-10 13:16:27 -05002588
David Neto862b7d82018-06-14 18:48:37 -04002589 for (auto user : var_fn->users()) {
2590 // Populate SamplerLiteralToDescriptorSetMap and
2591 // SamplerLiteralToBindingMap.
2592 //
2593 // Look for calls like
2594 // call %opencl.sampler_t addrspace(2)*
2595 // @clspv.sampler.var.literal(
2596 // i32 descriptor,
2597 // i32 binding,
alan-baker09cb9802019-12-10 13:16:27 -05002598 // i32 (index-into-sampler-map|sampler_mask))
alan-bakerb6b09dc2018-11-08 16:59:28 -05002599 if (auto *call = dyn_cast<CallInst>(user)) {
alan-baker09cb9802019-12-10 13:16:27 -05002600 const auto third_param = static_cast<unsigned>(
alan-bakerb6b09dc2018-11-08 16:59:28 -05002601 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
alan-baker09cb9802019-12-10 13:16:27 -05002602 auto sampler_value = third_param;
2603 if (clspv::Option::UseSamplerMap()) {
2604 if (third_param >= sampler_map.size()) {
2605 errs() << "Out of bounds index to sampler map: " << third_param;
2606 llvm_unreachable("bad sampler init: out of bounds");
2607 }
2608 sampler_value = sampler_map[third_param].first;
David Neto862b7d82018-06-14 18:48:37 -04002609 }
2610
David Neto862b7d82018-06-14 18:48:37 -04002611 const auto descriptor_set = static_cast<unsigned>(
2612 dyn_cast<ConstantInt>(call->getArgOperand(0))->getZExtValue());
2613 const auto binding = static_cast<unsigned>(
2614 dyn_cast<ConstantInt>(call->getArgOperand(1))->getZExtValue());
2615
2616 SamplerLiteralToDescriptorSetMap[sampler_value] = descriptor_set;
2617 SamplerLiteralToBindingMap[sampler_value] = binding;
2618 used_bindings.insert(binding);
2619 }
2620 }
2621
alan-baker09cb9802019-12-10 13:16:27 -05002622 DenseSet<size_t> seen;
2623 for (auto user : var_fn->users()) {
2624 if (!isa<CallInst>(user))
2625 continue;
2626
2627 auto call = cast<CallInst>(user);
2628 const unsigned third_param = static_cast<unsigned>(
2629 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
2630
2631 // Already allocated a variable for this value.
2632 if (!seen.insert(third_param).second)
2633 continue;
2634
2635 auto sampler_value = third_param;
2636 if (clspv::Option::UseSamplerMap()) {
2637 sampler_value = sampler_map[third_param].first;
2638 }
2639
David Neto22f144c2017-06-12 14:26:21 -04002640 // Generate OpVariable.
2641 //
2642 // GIDOps[0] : Result Type ID
2643 // GIDOps[1] : Storage Class
2644 SPIRVOperandList Ops;
2645
David Neto257c3892018-04-11 13:19:45 -04002646 Ops << MkId(lookupType(SamplerTy))
2647 << MkNum(spv::StorageClassUniformConstant);
David Neto22f144c2017-06-12 14:26:21 -04002648
David Neto862b7d82018-06-14 18:48:37 -04002649 auto sampler_var_id = nextID++;
2650 auto *Inst = new SPIRVInstruction(spv::OpVariable, sampler_var_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002651 SPIRVInstList.push_back(Inst);
2652
alan-baker09cb9802019-12-10 13:16:27 -05002653 SamplerLiteralToIDMap[sampler_value] = sampler_var_id;
David Neto22f144c2017-06-12 14:26:21 -04002654
2655 // Find Insert Point for OpDecorate.
2656 auto DecoInsertPoint =
2657 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
2658 [](SPIRVInstruction *Inst) -> bool {
2659 return Inst->getOpcode() != spv::OpDecorate &&
2660 Inst->getOpcode() != spv::OpMemberDecorate &&
2661 Inst->getOpcode() != spv::OpExtInstImport;
2662 });
2663
2664 // Ops[0] = Target ID
2665 // Ops[1] = Decoration (DescriptorSet)
2666 // Ops[2] = LiteralNumber according to Decoration
2667 Ops.clear();
2668
David Neto862b7d82018-06-14 18:48:37 -04002669 unsigned descriptor_set;
2670 unsigned binding;
alan-baker09cb9802019-12-10 13:16:27 -05002671 if (SamplerLiteralToBindingMap.find(sampler_value) ==
alan-bakerb6b09dc2018-11-08 16:59:28 -05002672 SamplerLiteralToBindingMap.end()) {
David Neto862b7d82018-06-14 18:48:37 -04002673 // This sampler is not actually used. Find the next one.
2674 for (binding = 0; used_bindings.count(binding); binding++)
2675 ;
2676 descriptor_set = 0; // Literal samplers always use descriptor set 0.
2677 used_bindings.insert(binding);
2678 } else {
alan-baker09cb9802019-12-10 13:16:27 -05002679 descriptor_set = SamplerLiteralToDescriptorSetMap[sampler_value];
2680 binding = SamplerLiteralToBindingMap[sampler_value];
alan-bakercff80152019-06-15 00:38:00 -04002681
alan-baker09cb9802019-12-10 13:16:27 -05002682 version0::DescriptorMapEntry::SamplerData sampler_data = {sampler_value};
alan-bakercff80152019-06-15 00:38:00 -04002683 descriptorMapEntries->emplace_back(std::move(sampler_data),
2684 descriptor_set, binding);
David Neto862b7d82018-06-14 18:48:37 -04002685 }
2686
2687 Ops << MkId(sampler_var_id) << MkNum(spv::DecorationDescriptorSet)
2688 << MkNum(descriptor_set);
David Neto22f144c2017-06-12 14:26:21 -04002689
David Neto87846742018-04-11 17:36:22 -04002690 auto *DescDecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002691 SPIRVInstList.insert(DecoInsertPoint, DescDecoInst);
2692
2693 // Ops[0] = Target ID
2694 // Ops[1] = Decoration (Binding)
2695 // Ops[2] = LiteralNumber according to Decoration
2696 Ops.clear();
David Neto862b7d82018-06-14 18:48:37 -04002697 Ops << MkId(sampler_var_id) << MkNum(spv::DecorationBinding)
2698 << MkNum(binding);
David Neto22f144c2017-06-12 14:26:21 -04002699
David Neto87846742018-04-11 17:36:22 -04002700 auto *BindDecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002701 SPIRVInstList.insert(DecoInsertPoint, BindDecoInst);
2702 }
David Neto862b7d82018-06-14 18:48:37 -04002703}
David Neto22f144c2017-06-12 14:26:21 -04002704
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01002705void SPIRVProducerPass::GenerateResourceVars(Module &) {
David Neto862b7d82018-06-14 18:48:37 -04002706 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
2707 ValueMapType &VMap = getValueMap();
David Neto22f144c2017-06-12 14:26:21 -04002708
David Neto862b7d82018-06-14 18:48:37 -04002709 // Generate variables. Make one for each of resource var info object.
2710 for (auto *info : ModuleOrderedResourceVars) {
2711 Type *type = info->var_fn->getReturnType();
2712 // Remap the address space for opaque types.
2713 switch (info->arg_kind) {
2714 case clspv::ArgKind::Sampler:
2715 case clspv::ArgKind::ReadOnlyImage:
2716 case clspv::ArgKind::WriteOnlyImage:
2717 type = PointerType::get(type->getPointerElementType(),
2718 clspv::AddressSpace::UniformConstant);
2719 break;
2720 default:
2721 break;
2722 }
David Neto22f144c2017-06-12 14:26:21 -04002723
David Neto862b7d82018-06-14 18:48:37 -04002724 info->var_id = nextID++;
David Neto22f144c2017-06-12 14:26:21 -04002725
David Neto862b7d82018-06-14 18:48:37 -04002726 const auto type_id = lookupType(type);
2727 const auto sc = GetStorageClassForArgKind(info->arg_kind);
2728 SPIRVOperandList Ops;
2729 Ops << MkId(type_id) << MkNum(sc);
David Neto22f144c2017-06-12 14:26:21 -04002730
David Neto862b7d82018-06-14 18:48:37 -04002731 auto *Inst = new SPIRVInstruction(spv::OpVariable, info->var_id, Ops);
2732 SPIRVInstList.push_back(Inst);
2733
2734 // Map calls to the variable-builtin-function.
2735 for (auto &U : info->var_fn->uses()) {
2736 if (auto *call = dyn_cast<CallInst>(U.getUser())) {
2737 const auto set = unsigned(
2738 dyn_cast<ConstantInt>(call->getOperand(0))->getZExtValue());
2739 const auto binding = unsigned(
2740 dyn_cast<ConstantInt>(call->getOperand(1))->getZExtValue());
2741 if (set == info->descriptor_set && binding == info->binding) {
2742 switch (info->arg_kind) {
2743 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04002744 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04002745 case clspv::ArgKind::Pod:
2746 // The call maps to the variable directly.
2747 VMap[call] = info->var_id;
2748 break;
2749 case clspv::ArgKind::Sampler:
2750 case clspv::ArgKind::ReadOnlyImage:
2751 case clspv::ArgKind::WriteOnlyImage:
2752 // The call maps to a load we generate later.
2753 ResourceVarDeferredLoadCalls[call] = info->var_id;
2754 break;
2755 default:
2756 llvm_unreachable("Unhandled arg kind");
2757 }
2758 }
David Neto22f144c2017-06-12 14:26:21 -04002759 }
David Neto862b7d82018-06-14 18:48:37 -04002760 }
2761 }
David Neto22f144c2017-06-12 14:26:21 -04002762
David Neto862b7d82018-06-14 18:48:37 -04002763 // Generate associated decorations.
David Neto22f144c2017-06-12 14:26:21 -04002764
David Neto862b7d82018-06-14 18:48:37 -04002765 // Find Insert Point for OpDecorate.
2766 auto DecoInsertPoint =
2767 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
2768 [](SPIRVInstruction *Inst) -> bool {
2769 return Inst->getOpcode() != spv::OpDecorate &&
2770 Inst->getOpcode() != spv::OpMemberDecorate &&
2771 Inst->getOpcode() != spv::OpExtInstImport;
2772 });
2773
2774 SPIRVOperandList Ops;
2775 for (auto *info : ModuleOrderedResourceVars) {
2776 // Decorate with DescriptorSet and Binding.
2777 Ops.clear();
2778 Ops << MkId(info->var_id) << MkNum(spv::DecorationDescriptorSet)
2779 << MkNum(info->descriptor_set);
2780 SPIRVInstList.insert(DecoInsertPoint,
2781 new SPIRVInstruction(spv::OpDecorate, Ops));
2782
2783 Ops.clear();
2784 Ops << MkId(info->var_id) << MkNum(spv::DecorationBinding)
2785 << MkNum(info->binding);
2786 SPIRVInstList.insert(DecoInsertPoint,
2787 new SPIRVInstruction(spv::OpDecorate, Ops));
2788
alan-bakere9308012019-03-15 10:25:13 -04002789 if (info->coherent) {
2790 // Decorate with Coherent if required for the variable.
2791 Ops.clear();
2792 Ops << MkId(info->var_id) << MkNum(spv::DecorationCoherent);
2793 SPIRVInstList.insert(DecoInsertPoint,
2794 new SPIRVInstruction(spv::OpDecorate, Ops));
2795 }
2796
David Neto862b7d82018-06-14 18:48:37 -04002797 // Generate NonWritable and NonReadable
2798 switch (info->arg_kind) {
2799 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04002800 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04002801 if (info->var_fn->getReturnType()->getPointerAddressSpace() ==
2802 clspv::AddressSpace::Constant) {
2803 Ops.clear();
2804 Ops << MkId(info->var_id) << MkNum(spv::DecorationNonWritable);
2805 SPIRVInstList.insert(DecoInsertPoint,
2806 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002807 }
David Neto862b7d82018-06-14 18:48:37 -04002808 break;
David Neto862b7d82018-06-14 18:48:37 -04002809 case clspv::ArgKind::WriteOnlyImage:
2810 Ops.clear();
2811 Ops << MkId(info->var_id) << MkNum(spv::DecorationNonReadable);
2812 SPIRVInstList.insert(DecoInsertPoint,
2813 new SPIRVInstruction(spv::OpDecorate, Ops));
2814 break;
2815 default:
2816 break;
David Neto22f144c2017-06-12 14:26:21 -04002817 }
2818 }
2819}
2820
2821void SPIRVProducerPass::GenerateGlobalVar(GlobalVariable &GV) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05002822 Module &M = *GV.getParent();
David Neto22f144c2017-06-12 14:26:21 -04002823 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
2824 ValueMapType &VMap = getValueMap();
2825 std::vector<uint32_t> &BuiltinDimVec = getBuiltinDimVec();
David Neto85082642018-03-24 06:55:20 -07002826 const DataLayout &DL = GV.getParent()->getDataLayout();
David Neto22f144c2017-06-12 14:26:21 -04002827
2828 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
2829 Type *Ty = GV.getType();
2830 PointerType *PTy = cast<PointerType>(Ty);
2831
2832 uint32_t InitializerID = 0;
2833
2834 // Workgroup size is handled differently (it goes into a constant)
2835 if (spv::BuiltInWorkgroupSize == BuiltinType) {
2836 std::vector<bool> HasMDVec;
2837 uint32_t PrevXDimCst = 0xFFFFFFFF;
2838 uint32_t PrevYDimCst = 0xFFFFFFFF;
2839 uint32_t PrevZDimCst = 0xFFFFFFFF;
2840 for (Function &Func : *GV.getParent()) {
2841 if (Func.isDeclaration()) {
2842 continue;
2843 }
2844
2845 // We only need to check kernels.
2846 if (Func.getCallingConv() != CallingConv::SPIR_KERNEL) {
2847 continue;
2848 }
2849
2850 if (const MDNode *MD =
2851 dyn_cast<Function>(&Func)->getMetadata("reqd_work_group_size")) {
2852 uint32_t CurXDimCst = static_cast<uint32_t>(
2853 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
2854 uint32_t CurYDimCst = static_cast<uint32_t>(
2855 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
2856 uint32_t CurZDimCst = static_cast<uint32_t>(
2857 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
2858
2859 if (PrevXDimCst == 0xFFFFFFFF && PrevYDimCst == 0xFFFFFFFF &&
2860 PrevZDimCst == 0xFFFFFFFF) {
2861 PrevXDimCst = CurXDimCst;
2862 PrevYDimCst = CurYDimCst;
2863 PrevZDimCst = CurZDimCst;
2864 } else if (CurXDimCst != PrevXDimCst || CurYDimCst != PrevYDimCst ||
2865 CurZDimCst != PrevZDimCst) {
2866 llvm_unreachable(
2867 "reqd_work_group_size must be the same across all kernels");
2868 } else {
2869 continue;
2870 }
2871
2872 //
2873 // Generate OpConstantComposite.
2874 //
2875 // Ops[0] : Result Type ID
2876 // Ops[1] : Constant size for x dimension.
2877 // Ops[2] : Constant size for y dimension.
2878 // Ops[3] : Constant size for z dimension.
2879 SPIRVOperandList Ops;
2880
2881 uint32_t XDimCstID =
2882 VMap[mdconst::extract<ConstantInt>(MD->getOperand(0))];
2883 uint32_t YDimCstID =
2884 VMap[mdconst::extract<ConstantInt>(MD->getOperand(1))];
2885 uint32_t ZDimCstID =
2886 VMap[mdconst::extract<ConstantInt>(MD->getOperand(2))];
2887
2888 InitializerID = nextID;
2889
David Neto257c3892018-04-11 13:19:45 -04002890 Ops << MkId(lookupType(Ty->getPointerElementType())) << MkId(XDimCstID)
2891 << MkId(YDimCstID) << MkId(ZDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04002892
David Neto87846742018-04-11 17:36:22 -04002893 auto *Inst =
2894 new SPIRVInstruction(spv::OpConstantComposite, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002895 SPIRVInstList.push_back(Inst);
2896
2897 HasMDVec.push_back(true);
2898 } else {
2899 HasMDVec.push_back(false);
2900 }
2901 }
2902
2903 // Check all kernels have same definitions for work_group_size.
2904 bool HasMD = false;
2905 if (!HasMDVec.empty()) {
2906 HasMD = HasMDVec[0];
2907 for (uint32_t i = 1; i < HasMDVec.size(); i++) {
2908 if (HasMD != HasMDVec[i]) {
2909 llvm_unreachable(
2910 "Kernels should have consistent work group size definition");
2911 }
2912 }
2913 }
2914
2915 // If all kernels do not have metadata for reqd_work_group_size, generate
2916 // OpSpecConstants for x/y/z dimension.
2917 if (!HasMD) {
2918 //
2919 // Generate OpSpecConstants for x/y/z dimension.
2920 //
2921 // Ops[0] : Result Type ID
2922 // Ops[1] : Constant size for x/y/z dimension (Literal Number).
2923 uint32_t XDimCstID = 0;
2924 uint32_t YDimCstID = 0;
2925 uint32_t ZDimCstID = 0;
2926
David Neto22f144c2017-06-12 14:26:21 -04002927 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04002928 uint32_t result_type_id =
2929 lookupType(Ty->getPointerElementType()->getSequentialElementType());
David Neto22f144c2017-06-12 14:26:21 -04002930
David Neto257c3892018-04-11 13:19:45 -04002931 // X Dimension
2932 Ops << MkId(result_type_id) << MkNum(1);
2933 XDimCstID = nextID++;
2934 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002935 new SPIRVInstruction(spv::OpSpecConstant, XDimCstID, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002936
2937 // Y Dimension
2938 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04002939 Ops << MkId(result_type_id) << MkNum(1);
2940 YDimCstID = nextID++;
2941 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002942 new SPIRVInstruction(spv::OpSpecConstant, YDimCstID, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002943
2944 // Z Dimension
2945 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04002946 Ops << MkId(result_type_id) << MkNum(1);
2947 ZDimCstID = nextID++;
2948 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002949 new SPIRVInstruction(spv::OpSpecConstant, ZDimCstID, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002950
David Neto257c3892018-04-11 13:19:45 -04002951 BuiltinDimVec.push_back(XDimCstID);
2952 BuiltinDimVec.push_back(YDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04002953 BuiltinDimVec.push_back(ZDimCstID);
2954
David Neto22f144c2017-06-12 14:26:21 -04002955 //
2956 // Generate OpSpecConstantComposite.
2957 //
2958 // Ops[0] : Result Type ID
2959 // Ops[1] : Constant size for x dimension.
2960 // Ops[2] : Constant size for y dimension.
2961 // Ops[3] : Constant size for z dimension.
2962 InitializerID = nextID;
2963
2964 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04002965 Ops << MkId(lookupType(Ty->getPointerElementType())) << MkId(XDimCstID)
2966 << MkId(YDimCstID) << MkId(ZDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04002967
David Neto87846742018-04-11 17:36:22 -04002968 auto *Inst =
2969 new SPIRVInstruction(spv::OpSpecConstantComposite, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002970 SPIRVInstList.push_back(Inst);
2971 }
2972 }
2973
David Neto22f144c2017-06-12 14:26:21 -04002974 VMap[&GV] = nextID;
2975
2976 //
2977 // Generate OpVariable.
2978 //
2979 // GIDOps[0] : Result Type ID
2980 // GIDOps[1] : Storage Class
2981 SPIRVOperandList Ops;
2982
David Neto85082642018-03-24 06:55:20 -07002983 const auto AS = PTy->getAddressSpace();
David Netoc6f3ab22018-04-06 18:02:31 -04002984 Ops << MkId(lookupType(Ty)) << MkNum(GetStorageClass(AS));
David Neto22f144c2017-06-12 14:26:21 -04002985
David Neto85082642018-03-24 06:55:20 -07002986 const bool module_scope_constant_external_init =
David Neto862b7d82018-06-14 18:48:37 -04002987 (AS == AddressSpace::Constant) && GV.hasInitializer() &&
David Neto85082642018-03-24 06:55:20 -07002988 clspv::Option::ModuleConstantsInStorageBuffer();
2989
Kévin Petit23d5f182019-08-13 16:21:29 +01002990 if (GV.hasInitializer()) {
2991 auto GVInit = GV.getInitializer();
2992 if (!isa<UndefValue>(GVInit) && !module_scope_constant_external_init) {
2993 assert(VMap.count(GVInit) == 1);
2994 InitializerID = VMap[GVInit];
David Neto85082642018-03-24 06:55:20 -07002995 }
2996 }
Kévin Petit23d5f182019-08-13 16:21:29 +01002997
2998 if (0 != InitializerID) {
2999 // Emit the ID of the intiializer as part of the variable definition.
3000 Ops << MkId(InitializerID);
3001 }
David Neto85082642018-03-24 06:55:20 -07003002 const uint32_t var_id = nextID++;
3003
David Neto87846742018-04-11 17:36:22 -04003004 auto *Inst = new SPIRVInstruction(spv::OpVariable, var_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003005 SPIRVInstList.push_back(Inst);
3006
3007 // If we have a builtin.
3008 if (spv::BuiltInMax != BuiltinType) {
3009 // Find Insert Point for OpDecorate.
3010 auto DecoInsertPoint =
3011 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
3012 [](SPIRVInstruction *Inst) -> bool {
3013 return Inst->getOpcode() != spv::OpDecorate &&
3014 Inst->getOpcode() != spv::OpMemberDecorate &&
3015 Inst->getOpcode() != spv::OpExtInstImport;
3016 });
3017 //
3018 // Generate OpDecorate.
3019 //
3020 // DOps[0] = Target ID
3021 // DOps[1] = Decoration (Builtin)
3022 // DOps[2] = BuiltIn ID
3023 uint32_t ResultID;
3024
3025 // WorkgroupSize is different, we decorate the constant composite that has
3026 // its value, rather than the variable that we use to access the value.
3027 if (spv::BuiltInWorkgroupSize == BuiltinType) {
3028 ResultID = InitializerID;
David Netoa60b00b2017-09-15 16:34:09 -04003029 // Save both the value and variable IDs for later.
3030 WorkgroupSizeValueID = InitializerID;
3031 WorkgroupSizeVarID = VMap[&GV];
David Neto22f144c2017-06-12 14:26:21 -04003032 } else {
3033 ResultID = VMap[&GV];
3034 }
3035
3036 SPIRVOperandList DOps;
David Neto257c3892018-04-11 13:19:45 -04003037 DOps << MkId(ResultID) << MkNum(spv::DecorationBuiltIn)
3038 << MkNum(BuiltinType);
David Neto22f144c2017-06-12 14:26:21 -04003039
David Neto87846742018-04-11 17:36:22 -04003040 auto *DescDecoInst = new SPIRVInstruction(spv::OpDecorate, DOps);
David Neto22f144c2017-06-12 14:26:21 -04003041 SPIRVInstList.insert(DecoInsertPoint, DescDecoInst);
David Neto85082642018-03-24 06:55:20 -07003042 } else if (module_scope_constant_external_init) {
3043 // This module scope constant is initialized from a storage buffer with data
3044 // provided by the host at binding 0 of the next descriptor set.
David Neto78383442018-06-15 20:31:56 -04003045 const uint32_t descriptor_set = TakeDescriptorIndex(&M);
David Neto85082642018-03-24 06:55:20 -07003046
David Neto862b7d82018-06-14 18:48:37 -04003047 // Emit the intializer to the descriptor map file.
David Neto85082642018-03-24 06:55:20 -07003048 // Use "kind,buffer" to indicate storage buffer. We might want to expand
3049 // that later to other types, like uniform buffer.
alan-bakerf5e5f692018-11-27 08:33:24 -05003050 std::string hexbytes;
3051 llvm::raw_string_ostream str(hexbytes);
3052 clspv::ConstantEmitter(DL, str).Emit(GV.getInitializer());
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003053 version0::DescriptorMapEntry::ConstantData constant_data = {ArgKind::Buffer,
3054 str.str()};
3055 descriptorMapEntries->emplace_back(std::move(constant_data), descriptor_set,
3056 0);
David Neto85082642018-03-24 06:55:20 -07003057
3058 // Find Insert Point for OpDecorate.
3059 auto DecoInsertPoint =
3060 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
3061 [](SPIRVInstruction *Inst) -> bool {
3062 return Inst->getOpcode() != spv::OpDecorate &&
3063 Inst->getOpcode() != spv::OpMemberDecorate &&
3064 Inst->getOpcode() != spv::OpExtInstImport;
3065 });
3066
David Neto257c3892018-04-11 13:19:45 -04003067 // OpDecorate %var Binding <binding>
David Neto85082642018-03-24 06:55:20 -07003068 SPIRVOperandList DOps;
David Neto257c3892018-04-11 13:19:45 -04003069 DOps << MkId(var_id) << MkNum(spv::DecorationBinding) << MkNum(0);
3070 DecoInsertPoint = SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04003071 DecoInsertPoint, new SPIRVInstruction(spv::OpDecorate, DOps));
David Neto85082642018-03-24 06:55:20 -07003072
3073 // OpDecorate %var DescriptorSet <descriptor_set>
3074 DOps.clear();
David Neto257c3892018-04-11 13:19:45 -04003075 DOps << MkId(var_id) << MkNum(spv::DecorationDescriptorSet)
3076 << MkNum(descriptor_set);
David Netoc6f3ab22018-04-06 18:02:31 -04003077 SPIRVInstList.insert(DecoInsertPoint,
David Neto87846742018-04-11 17:36:22 -04003078 new SPIRVInstruction(spv::OpDecorate, DOps));
David Neto22f144c2017-06-12 14:26:21 -04003079 }
3080}
3081
David Netoc6f3ab22018-04-06 18:02:31 -04003082void SPIRVProducerPass::GenerateWorkgroupVars() {
3083 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
Alan Baker202c8c72018-08-13 13:47:44 -04003084 for (auto spec_id = clspv::FirstLocalSpecId(); spec_id < max_local_spec_id_;
3085 ++spec_id) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05003086 LocalArgInfo &info = LocalSpecIdInfoMap[spec_id];
David Netoc6f3ab22018-04-06 18:02:31 -04003087
3088 // Generate OpVariable.
3089 //
3090 // GIDOps[0] : Result Type ID
3091 // GIDOps[1] : Storage Class
3092 SPIRVOperandList Ops;
3093 Ops << MkId(info.ptr_array_type_id) << MkNum(spv::StorageClassWorkgroup);
3094
3095 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04003096 new SPIRVInstruction(spv::OpVariable, info.variable_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04003097 }
3098}
3099
David Neto862b7d82018-06-14 18:48:37 -04003100void SPIRVProducerPass::GenerateDescriptorMapInfo(const DataLayout &DL,
3101 Function &F) {
David Netoc5fb5242018-07-30 13:28:31 -04003102 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
3103 return;
3104 }
David Neto862b7d82018-06-14 18:48:37 -04003105 // Gather the list of resources that are used by this function's arguments.
3106 auto &resource_var_at_index = FunctionToResourceVarsMap[&F];
3107
alan-bakerf5e5f692018-11-27 08:33:24 -05003108 // TODO(alan-baker): This should become unnecessary by fixing the rest of the
3109 // flow to generate pod_ubo arguments earlier.
David Neto862b7d82018-06-14 18:48:37 -04003110 auto remap_arg_kind = [](StringRef argKind) {
alan-bakerf5e5f692018-11-27 08:33:24 -05003111 std::string kind =
3112 clspv::Option::PodArgsInUniformBuffer() && argKind.equals("pod")
3113 ? "pod_ubo"
3114 : argKind;
3115 return GetArgKindFromName(kind);
David Neto862b7d82018-06-14 18:48:37 -04003116 };
3117
3118 auto *fty = F.getType()->getPointerElementType();
3119 auto *func_ty = dyn_cast<FunctionType>(fty);
3120
alan-baker038e9242019-04-19 22:14:41 -04003121 // If we've clustered POD arguments, then argument details are in metadata.
David Neto862b7d82018-06-14 18:48:37 -04003122 // If an argument maps to a resource variable, then get descriptor set and
3123 // binding from the resoure variable. Other info comes from the metadata.
3124 const auto *arg_map = F.getMetadata("kernel_arg_map");
3125 if (arg_map) {
3126 for (const auto &arg : arg_map->operands()) {
3127 const MDNode *arg_node = dyn_cast<MDNode>(arg.get());
Kévin PETITa353c832018-03-20 23:21:21 +00003128 assert(arg_node->getNumOperands() == 7);
David Neto862b7d82018-06-14 18:48:37 -04003129 const auto name =
3130 dyn_cast<MDString>(arg_node->getOperand(0))->getString();
3131 const auto old_index =
3132 dyn_extract<ConstantInt>(arg_node->getOperand(1))->getZExtValue();
3133 // Remapped argument index
alan-bakerb6b09dc2018-11-08 16:59:28 -05003134 const size_t new_index = static_cast<size_t>(
3135 dyn_extract<ConstantInt>(arg_node->getOperand(2))->getZExtValue());
David Neto862b7d82018-06-14 18:48:37 -04003136 const auto offset =
3137 dyn_extract<ConstantInt>(arg_node->getOperand(3))->getZExtValue();
Kévin PETITa353c832018-03-20 23:21:21 +00003138 const auto arg_size =
3139 dyn_extract<ConstantInt>(arg_node->getOperand(4))->getZExtValue();
David Neto862b7d82018-06-14 18:48:37 -04003140 const auto argKind = remap_arg_kind(
Kévin PETITa353c832018-03-20 23:21:21 +00003141 dyn_cast<MDString>(arg_node->getOperand(5))->getString());
David Neto862b7d82018-06-14 18:48:37 -04003142 const auto spec_id =
Kévin PETITa353c832018-03-20 23:21:21 +00003143 dyn_extract<ConstantInt>(arg_node->getOperand(6))->getSExtValue();
alan-bakerf5e5f692018-11-27 08:33:24 -05003144
3145 uint32_t descriptor_set = 0;
3146 uint32_t binding = 0;
3147 version0::DescriptorMapEntry::KernelArgData kernel_data = {
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003148 F.getName(), name, static_cast<uint32_t>(old_index), argKind,
alan-bakerf5e5f692018-11-27 08:33:24 -05003149 static_cast<uint32_t>(spec_id),
3150 // This will be set below for pointer-to-local args.
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003151 0, static_cast<uint32_t>(offset), static_cast<uint32_t>(arg_size)};
David Neto862b7d82018-06-14 18:48:37 -04003152 if (spec_id > 0) {
alan-bakerf5e5f692018-11-27 08:33:24 -05003153 kernel_data.local_element_size = static_cast<uint32_t>(GetTypeAllocSize(
3154 func_ty->getParamType(unsigned(new_index))->getPointerElementType(),
3155 DL));
David Neto862b7d82018-06-14 18:48:37 -04003156 } else {
3157 auto *info = resource_var_at_index[new_index];
3158 assert(info);
alan-bakerf5e5f692018-11-27 08:33:24 -05003159 descriptor_set = info->descriptor_set;
3160 binding = info->binding;
David Neto862b7d82018-06-14 18:48:37 -04003161 }
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003162 descriptorMapEntries->emplace_back(std::move(kernel_data), descriptor_set,
3163 binding);
David Neto862b7d82018-06-14 18:48:37 -04003164 }
3165 } else {
3166 // There is no argument map.
3167 // Take descriptor info from the resource variable calls.
Kévin PETITa353c832018-03-20 23:21:21 +00003168 // Take argument name and size from the arguments list.
David Neto862b7d82018-06-14 18:48:37 -04003169
3170 SmallVector<Argument *, 4> arguments;
3171 for (auto &arg : F.args()) {
3172 arguments.push_back(&arg);
3173 }
3174
3175 unsigned arg_index = 0;
3176 for (auto *info : resource_var_at_index) {
3177 if (info) {
Kévin PETITa353c832018-03-20 23:21:21 +00003178 auto arg = arguments[arg_index];
alan-bakerb6b09dc2018-11-08 16:59:28 -05003179 unsigned arg_size = 0;
Kévin PETITa353c832018-03-20 23:21:21 +00003180 if (info->arg_kind == clspv::ArgKind::Pod) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05003181 arg_size = static_cast<uint32_t>(DL.getTypeStoreSize(arg->getType()));
Kévin PETITa353c832018-03-20 23:21:21 +00003182 }
3183
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003184 // Local pointer arguments are unused in this case. Offset is always
3185 // zero.
alan-bakerf5e5f692018-11-27 08:33:24 -05003186 version0::DescriptorMapEntry::KernelArgData kernel_data = {
3187 F.getName(), arg->getName(),
3188 arg_index, remap_arg_kind(clspv::GetArgKindName(info->arg_kind)),
3189 0, 0,
3190 0, arg_size};
3191 descriptorMapEntries->emplace_back(std::move(kernel_data),
3192 info->descriptor_set, info->binding);
David Neto862b7d82018-06-14 18:48:37 -04003193 }
3194 arg_index++;
3195 }
3196 // Generate mappings for pointer-to-local arguments.
3197 for (arg_index = 0; arg_index < arguments.size(); ++arg_index) {
3198 Argument *arg = arguments[arg_index];
Alan Baker202c8c72018-08-13 13:47:44 -04003199 auto where = LocalArgSpecIds.find(arg);
3200 if (where != LocalArgSpecIds.end()) {
3201 auto &local_arg_info = LocalSpecIdInfoMap[where->second];
alan-bakerf5e5f692018-11-27 08:33:24 -05003202 // Pod arguments members are unused in this case.
3203 version0::DescriptorMapEntry::KernelArgData kernel_data = {
3204 F.getName(),
3205 arg->getName(),
3206 arg_index,
3207 ArgKind::Local,
3208 static_cast<uint32_t>(local_arg_info.spec_id),
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003209 static_cast<uint32_t>(
3210 GetTypeAllocSize(local_arg_info.elem_type, DL)),
alan-bakerf5e5f692018-11-27 08:33:24 -05003211 0,
3212 0};
3213 // Pointer-to-local arguments do not utilize descriptor set and binding.
3214 descriptorMapEntries->emplace_back(std::move(kernel_data), 0, 0);
David Neto862b7d82018-06-14 18:48:37 -04003215 }
3216 }
3217 }
3218}
3219
David Neto22f144c2017-06-12 14:26:21 -04003220void SPIRVProducerPass::GenerateFuncPrologue(Function &F) {
3221 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3222 ValueMapType &VMap = getValueMap();
3223 EntryPointVecType &EntryPoints = getEntryPointVec();
David Neto22f144c2017-06-12 14:26:21 -04003224 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
3225 auto &GlobalConstArgSet = getGlobalConstArgSet();
3226
3227 FunctionType *FTy = F.getFunctionType();
3228
3229 //
David Neto22f144c2017-06-12 14:26:21 -04003230 // Generate OPFunction.
3231 //
3232
3233 // FOps[0] : Result Type ID
3234 // FOps[1] : Function Control
3235 // FOps[2] : Function Type ID
3236 SPIRVOperandList FOps;
3237
3238 // Find SPIRV instruction for return type.
David Neto257c3892018-04-11 13:19:45 -04003239 FOps << MkId(lookupType(FTy->getReturnType()));
David Neto22f144c2017-06-12 14:26:21 -04003240
3241 // Check function attributes for SPIRV Function Control.
3242 uint32_t FuncControl = spv::FunctionControlMaskNone;
3243 if (F.hasFnAttribute(Attribute::AlwaysInline)) {
3244 FuncControl |= spv::FunctionControlInlineMask;
3245 }
3246 if (F.hasFnAttribute(Attribute::NoInline)) {
3247 FuncControl |= spv::FunctionControlDontInlineMask;
3248 }
3249 // TODO: Check llvm attribute for Function Control Pure.
3250 if (F.hasFnAttribute(Attribute::ReadOnly)) {
3251 FuncControl |= spv::FunctionControlPureMask;
3252 }
3253 // TODO: Check llvm attribute for Function Control Const.
3254 if (F.hasFnAttribute(Attribute::ReadNone)) {
3255 FuncControl |= spv::FunctionControlConstMask;
3256 }
3257
David Neto257c3892018-04-11 13:19:45 -04003258 FOps << MkNum(FuncControl);
David Neto22f144c2017-06-12 14:26:21 -04003259
3260 uint32_t FTyID;
3261 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
3262 SmallVector<Type *, 4> NewFuncParamTys;
3263 FunctionType *NewFTy =
3264 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
3265 FTyID = lookupType(NewFTy);
3266 } else {
David Neto9ed8e2f2018-03-24 06:47:24 -07003267 // Handle regular function with global constant parameters.
David Neto22f144c2017-06-12 14:26:21 -04003268 if (GlobalConstFuncTyMap.count(FTy)) {
3269 FTyID = lookupType(GlobalConstFuncTyMap[FTy].first);
3270 } else {
3271 FTyID = lookupType(FTy);
3272 }
3273 }
3274
David Neto257c3892018-04-11 13:19:45 -04003275 FOps << MkId(FTyID);
David Neto22f144c2017-06-12 14:26:21 -04003276
3277 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
3278 EntryPoints.push_back(std::make_pair(&F, nextID));
3279 }
3280
3281 VMap[&F] = nextID;
3282
David Neto482550a2018-03-24 05:21:07 -07003283 if (clspv::Option::ShowIDs()) {
David Netob05675d2018-02-16 12:37:49 -05003284 errs() << "Function " << F.getName() << " is " << nextID << "\n";
3285 }
David Neto22f144c2017-06-12 14:26:21 -04003286 // Generate SPIRV instruction for function.
David Neto87846742018-04-11 17:36:22 -04003287 auto *FuncInst = new SPIRVInstruction(spv::OpFunction, nextID++, FOps);
David Neto22f144c2017-06-12 14:26:21 -04003288 SPIRVInstList.push_back(FuncInst);
3289
3290 //
3291 // Generate OpFunctionParameter for Normal function.
3292 //
3293
3294 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
alan-bakere9308012019-03-15 10:25:13 -04003295
3296 // Find Insert Point for OpDecorate.
3297 auto DecoInsertPoint =
3298 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
3299 [](SPIRVInstruction *Inst) -> bool {
3300 return Inst->getOpcode() != spv::OpDecorate &&
3301 Inst->getOpcode() != spv::OpMemberDecorate &&
3302 Inst->getOpcode() != spv::OpExtInstImport;
3303 });
3304
David Neto22f144c2017-06-12 14:26:21 -04003305 // Iterate Argument for name instead of param type from function type.
3306 unsigned ArgIdx = 0;
3307 for (Argument &Arg : F.args()) {
alan-bakere9308012019-03-15 10:25:13 -04003308 uint32_t param_id = nextID++;
3309 VMap[&Arg] = param_id;
3310
3311 if (CalledWithCoherentResource(Arg)) {
3312 // If the arg is passed a coherent resource ever, then decorate this
3313 // parameter with Coherent too.
3314 SPIRVOperandList decoration_ops;
3315 decoration_ops << MkId(param_id) << MkNum(spv::DecorationCoherent);
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003316 SPIRVInstList.insert(
3317 DecoInsertPoint,
3318 new SPIRVInstruction(spv::OpDecorate, decoration_ops));
alan-bakere9308012019-03-15 10:25:13 -04003319 }
David Neto22f144c2017-06-12 14:26:21 -04003320
3321 // ParamOps[0] : Result Type ID
3322 SPIRVOperandList ParamOps;
3323
3324 // Find SPIRV instruction for parameter type.
3325 uint32_t ParamTyID = lookupType(Arg.getType());
3326 if (PointerType *PTy = dyn_cast<PointerType>(Arg.getType())) {
3327 if (GlobalConstFuncTyMap.count(FTy)) {
3328 if (ArgIdx == GlobalConstFuncTyMap[FTy].second) {
3329 Type *EleTy = PTy->getPointerElementType();
3330 Type *ArgTy =
3331 PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
3332 ParamTyID = lookupType(ArgTy);
3333 GlobalConstArgSet.insert(&Arg);
3334 }
3335 }
3336 }
David Neto257c3892018-04-11 13:19:45 -04003337 ParamOps << MkId(ParamTyID);
David Neto22f144c2017-06-12 14:26:21 -04003338
3339 // Generate SPIRV instruction for parameter.
David Neto87846742018-04-11 17:36:22 -04003340 auto *ParamInst =
alan-bakere9308012019-03-15 10:25:13 -04003341 new SPIRVInstruction(spv::OpFunctionParameter, param_id, ParamOps);
David Neto22f144c2017-06-12 14:26:21 -04003342 SPIRVInstList.push_back(ParamInst);
3343
3344 ArgIdx++;
3345 }
3346 }
3347}
3348
alan-bakerb6b09dc2018-11-08 16:59:28 -05003349void SPIRVProducerPass::GenerateModuleInfo(Module &module) {
David Neto22f144c2017-06-12 14:26:21 -04003350 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3351 EntryPointVecType &EntryPoints = getEntryPointVec();
3352 ValueMapType &VMap = getValueMap();
3353 ValueList &EntryPointInterfaces = getEntryPointInterfacesVec();
3354 uint32_t &ExtInstImportID = getOpExtInstImportID();
3355 std::vector<uint32_t> &BuiltinDimVec = getBuiltinDimVec();
3356
3357 // Set up insert point.
3358 auto InsertPoint = SPIRVInstList.begin();
3359
3360 //
3361 // Generate OpCapability
3362 //
3363 // TODO: Which llvm information is mapped to SPIRV Capapbility?
3364
3365 // Ops[0] = Capability
3366 SPIRVOperandList Ops;
3367
David Neto87846742018-04-11 17:36:22 -04003368 auto *CapInst =
David Netoef5ba2b2019-12-20 08:35:54 -05003369 new SPIRVInstruction(spv::OpCapability, MkNum(spv::CapabilityShader));
David Neto22f144c2017-06-12 14:26:21 -04003370 SPIRVInstList.insert(InsertPoint, CapInst);
3371
alan-bakerf906d2b2019-12-10 11:26:23 -05003372 bool write_without_format = false;
3373 bool sampled_1d = false;
3374 bool image_1d = false;
David Neto22f144c2017-06-12 14:26:21 -04003375 for (Type *Ty : getTypeList()) {
alan-bakerb39c8262019-03-08 14:03:37 -05003376 if (clspv::Option::Int8Support() && Ty->isIntegerTy(8)) {
3377 // Generate OpCapability for i8 type.
David Netoef5ba2b2019-12-20 08:35:54 -05003378 SPIRVInstList.insert(
3379 InsertPoint,
3380 new SPIRVInstruction(spv::OpCapability, MkNum(spv::CapabilityInt8)));
alan-bakerb39c8262019-03-08 14:03:37 -05003381 } else if (Ty->isIntegerTy(16)) {
David Neto22f144c2017-06-12 14:26:21 -04003382 // Generate OpCapability for i16 type.
David Netoef5ba2b2019-12-20 08:35:54 -05003383 SPIRVInstList.insert(
3384 InsertPoint,
3385 new SPIRVInstruction(spv::OpCapability, MkNum(spv::CapabilityInt16)));
David Neto22f144c2017-06-12 14:26:21 -04003386 } else if (Ty->isIntegerTy(64)) {
3387 // Generate OpCapability for i64 type.
David Netoef5ba2b2019-12-20 08:35:54 -05003388 SPIRVInstList.insert(
3389 InsertPoint,
3390 new SPIRVInstruction(spv::OpCapability, MkNum(spv::CapabilityInt64)));
David Neto22f144c2017-06-12 14:26:21 -04003391 } else if (Ty->isHalfTy()) {
3392 // Generate OpCapability for half type.
David Netoef5ba2b2019-12-20 08:35:54 -05003393 SPIRVInstList.insert(InsertPoint,
3394 new SPIRVInstruction(spv::OpCapability,
3395 MkNum(spv::CapabilityFloat16)));
David Neto22f144c2017-06-12 14:26:21 -04003396 } else if (Ty->isDoubleTy()) {
3397 // Generate OpCapability for double type.
David Netoef5ba2b2019-12-20 08:35:54 -05003398 SPIRVInstList.insert(InsertPoint,
3399 new SPIRVInstruction(spv::OpCapability,
3400 MkNum(spv::CapabilityFloat64)));
David Neto22f144c2017-06-12 14:26:21 -04003401 } else if (auto *STy = dyn_cast<StructType>(Ty)) {
3402 if (STy->isOpaque()) {
alan-bakerf906d2b2019-12-10 11:26:23 -05003403 if (STy->getName().startswith("opencl.image1d_wo_t") ||
3404 STy->getName().startswith("opencl.image2d_wo_t") ||
alan-bakerf67468c2019-11-25 15:51:49 -05003405 STy->getName().startswith("opencl.image3d_wo_t")) {
alan-bakerf906d2b2019-12-10 11:26:23 -05003406 write_without_format = true;
3407 }
3408 if (STy->getName().startswith("opencl.image1d_ro_t") ||
3409 STy->getName().startswith("opencl.image1d_wo_t")) {
3410 if (STy->getName().contains(".sampled"))
3411 sampled_1d = true;
3412 else
3413 image_1d = true;
David Neto22f144c2017-06-12 14:26:21 -04003414 }
3415 }
3416 }
3417 }
3418
alan-bakerf906d2b2019-12-10 11:26:23 -05003419 if (write_without_format) {
3420 // Generate OpCapability for write only image type.
3421 SPIRVInstList.insert(
3422 InsertPoint,
3423 new SPIRVInstruction(
3424 spv::OpCapability,
3425 {MkNum(spv::CapabilityStorageImageWriteWithoutFormat)}));
3426 }
3427 if (image_1d) {
3428 // Generate OpCapability for unsampled 1D image type.
3429 SPIRVInstList.insert(InsertPoint,
3430 new SPIRVInstruction(spv::OpCapability,
3431 {MkNum(spv::CapabilityImage1D)}));
3432 } else if (sampled_1d) {
3433 // Generate OpCapability for sampled 1D image type.
3434 SPIRVInstList.insert(
3435 InsertPoint, new SPIRVInstruction(spv::OpCapability,
3436 {MkNum(spv::CapabilitySampled1D)}));
3437 }
3438
David Neto5c22a252018-03-15 16:07:41 -04003439 { // OpCapability ImageQuery
3440 bool hasImageQuery = false;
alan-bakerf67468c2019-11-25 15:51:49 -05003441 for (const auto &SymVal : module.getValueSymbolTable()) {
3442 if (auto F = dyn_cast<Function>(SymVal.getValue())) {
alan-bakerce179f12019-12-06 19:02:22 -05003443 if (clspv::IsImageQuery(F)) {
alan-bakerf67468c2019-11-25 15:51:49 -05003444 hasImageQuery = true;
3445 break;
3446 }
David Neto5c22a252018-03-15 16:07:41 -04003447 }
3448 }
alan-bakerf67468c2019-11-25 15:51:49 -05003449
David Neto5c22a252018-03-15 16:07:41 -04003450 if (hasImageQuery) {
David Neto87846742018-04-11 17:36:22 -04003451 auto *ImageQueryCapInst = new SPIRVInstruction(
3452 spv::OpCapability, {MkNum(spv::CapabilityImageQuery)});
David Neto5c22a252018-03-15 16:07:41 -04003453 SPIRVInstList.insert(InsertPoint, ImageQueryCapInst);
3454 }
3455 }
3456
David Neto22f144c2017-06-12 14:26:21 -04003457 if (hasVariablePointers()) {
3458 //
David Neto22f144c2017-06-12 14:26:21 -04003459 // Generate OpCapability.
3460 //
3461 // Ops[0] = Capability
3462 //
3463 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003464 Ops << MkNum(spv::CapabilityVariablePointers);
David Neto22f144c2017-06-12 14:26:21 -04003465
David Neto87846742018-04-11 17:36:22 -04003466 SPIRVInstList.insert(InsertPoint,
3467 new SPIRVInstruction(spv::OpCapability, Ops));
alan-baker5b86ed72019-02-15 08:26:50 -05003468 } else if (hasVariablePointersStorageBuffer()) {
3469 //
3470 // Generate OpCapability.
3471 //
3472 // Ops[0] = Capability
3473 //
3474 Ops.clear();
3475 Ops << MkNum(spv::CapabilityVariablePointersStorageBuffer);
David Neto22f144c2017-06-12 14:26:21 -04003476
alan-baker5b86ed72019-02-15 08:26:50 -05003477 SPIRVInstList.insert(InsertPoint,
3478 new SPIRVInstruction(spv::OpCapability, Ops));
3479 }
3480
3481 // Always add the storage buffer extension
3482 {
David Neto22f144c2017-06-12 14:26:21 -04003483 //
3484 // Generate OpExtension.
3485 //
3486 // Ops[0] = Name (Literal String)
3487 //
alan-baker5b86ed72019-02-15 08:26:50 -05003488 auto *ExtensionInst = new SPIRVInstruction(
3489 spv::OpExtension, {MkString("SPV_KHR_storage_buffer_storage_class")});
3490 SPIRVInstList.insert(InsertPoint, ExtensionInst);
3491 }
David Neto22f144c2017-06-12 14:26:21 -04003492
alan-baker5b86ed72019-02-15 08:26:50 -05003493 if (hasVariablePointers() || hasVariablePointersStorageBuffer()) {
3494 //
3495 // Generate OpExtension.
3496 //
3497 // Ops[0] = Name (Literal String)
3498 //
3499 auto *ExtensionInst = new SPIRVInstruction(
3500 spv::OpExtension, {MkString("SPV_KHR_variable_pointers")});
3501 SPIRVInstList.insert(InsertPoint, ExtensionInst);
David Neto22f144c2017-06-12 14:26:21 -04003502 }
3503
3504 if (ExtInstImportID) {
3505 ++InsertPoint;
3506 }
3507
3508 //
3509 // Generate OpMemoryModel
3510 //
3511 // Memory model for Vulkan will always be GLSL450.
3512
3513 // Ops[0] = Addressing Model
3514 // Ops[1] = Memory Model
3515 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003516 Ops << MkNum(spv::AddressingModelLogical) << MkNum(spv::MemoryModelGLSL450);
David Neto22f144c2017-06-12 14:26:21 -04003517
David Neto87846742018-04-11 17:36:22 -04003518 auto *MemModelInst = new SPIRVInstruction(spv::OpMemoryModel, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003519 SPIRVInstList.insert(InsertPoint, MemModelInst);
3520
3521 //
3522 // Generate OpEntryPoint
3523 //
3524 for (auto EntryPoint : EntryPoints) {
3525 // Ops[0] = Execution Model
3526 // Ops[1] = EntryPoint ID
3527 // Ops[2] = Name (Literal String)
3528 // ...
3529 //
3530 // TODO: Do we need to consider Interface ID for forward references???
3531 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05003532 const StringRef &name = EntryPoint.first->getName();
David Neto257c3892018-04-11 13:19:45 -04003533 Ops << MkNum(spv::ExecutionModelGLCompute) << MkId(EntryPoint.second)
3534 << MkString(name);
David Neto22f144c2017-06-12 14:26:21 -04003535
David Neto22f144c2017-06-12 14:26:21 -04003536 for (Value *Interface : EntryPointInterfaces) {
David Neto257c3892018-04-11 13:19:45 -04003537 Ops << MkId(VMap[Interface]);
David Neto22f144c2017-06-12 14:26:21 -04003538 }
3539
David Neto87846742018-04-11 17:36:22 -04003540 auto *EntryPointInst = new SPIRVInstruction(spv::OpEntryPoint, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003541 SPIRVInstList.insert(InsertPoint, EntryPointInst);
3542 }
3543
3544 for (auto EntryPoint : EntryPoints) {
3545 if (const MDNode *MD = dyn_cast<Function>(EntryPoint.first)
3546 ->getMetadata("reqd_work_group_size")) {
3547
3548 if (!BuiltinDimVec.empty()) {
3549 llvm_unreachable(
3550 "Kernels should have consistent work group size definition");
3551 }
3552
3553 //
3554 // Generate OpExecutionMode
3555 //
3556
3557 // Ops[0] = Entry Point ID
3558 // Ops[1] = Execution Mode
3559 // Ops[2] ... Ops[n] = Optional literals according to Execution Mode
3560 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05003561 Ops << MkId(EntryPoint.second) << MkNum(spv::ExecutionModeLocalSize);
David Neto22f144c2017-06-12 14:26:21 -04003562
3563 uint32_t XDim = static_cast<uint32_t>(
3564 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
3565 uint32_t YDim = static_cast<uint32_t>(
3566 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
3567 uint32_t ZDim = static_cast<uint32_t>(
3568 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
3569
David Neto257c3892018-04-11 13:19:45 -04003570 Ops << MkNum(XDim) << MkNum(YDim) << MkNum(ZDim);
David Neto22f144c2017-06-12 14:26:21 -04003571
David Neto87846742018-04-11 17:36:22 -04003572 auto *ExecModeInst = new SPIRVInstruction(spv::OpExecutionMode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003573 SPIRVInstList.insert(InsertPoint, ExecModeInst);
3574 }
3575 }
3576
3577 //
3578 // Generate OpSource.
3579 //
3580 // Ops[0] = SourceLanguage ID
3581 // Ops[1] = Version (LiteralNum)
3582 //
3583 Ops.clear();
Kévin Petitf0515712020-01-07 18:29:20 +00003584 switch (clspv::Option::Language()) {
3585 case clspv::Option::SourceLanguage::OpenCL_C_10:
3586 Ops << MkNum(spv::SourceLanguageOpenCL_C) << MkNum(100);
3587 break;
3588 case clspv::Option::SourceLanguage::OpenCL_C_11:
3589 Ops << MkNum(spv::SourceLanguageOpenCL_C) << MkNum(110);
3590 break;
3591 case clspv::Option::SourceLanguage::OpenCL_C_12:
Kévin Petit0fc88042019-04-09 23:25:02 +01003592 Ops << MkNum(spv::SourceLanguageOpenCL_C) << MkNum(120);
Kévin Petitf0515712020-01-07 18:29:20 +00003593 break;
3594 case clspv::Option::SourceLanguage::OpenCL_C_20:
3595 Ops << MkNum(spv::SourceLanguageOpenCL_C) << MkNum(200);
3596 break;
3597 case clspv::Option::SourceLanguage::OpenCL_CPP:
3598 Ops << MkNum(spv::SourceLanguageOpenCL_CPP) << MkNum(100);
3599 break;
3600 default:
3601 Ops << MkNum(spv::SourceLanguageUnknown) << MkNum(0);
3602 break;
Kévin Petit0fc88042019-04-09 23:25:02 +01003603 }
David Neto22f144c2017-06-12 14:26:21 -04003604
David Neto87846742018-04-11 17:36:22 -04003605 auto *OpenSourceInst = new SPIRVInstruction(spv::OpSource, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003606 SPIRVInstList.insert(InsertPoint, OpenSourceInst);
3607
3608 if (!BuiltinDimVec.empty()) {
3609 //
3610 // Generate OpDecorates for x/y/z dimension.
3611 //
3612 // Ops[0] = Target ID
3613 // Ops[1] = Decoration (SpecId)
David Neto257c3892018-04-11 13:19:45 -04003614 // Ops[2] = Specialization Constant ID (Literal Number)
David Neto22f144c2017-06-12 14:26:21 -04003615
3616 // X Dimension
3617 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003618 Ops << MkId(BuiltinDimVec[0]) << MkNum(spv::DecorationSpecId) << MkNum(0);
David Neto87846742018-04-11 17:36:22 -04003619 SPIRVInstList.insert(InsertPoint,
3620 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003621
3622 // Y Dimension
3623 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003624 Ops << MkId(BuiltinDimVec[1]) << MkNum(spv::DecorationSpecId) << MkNum(1);
David Neto87846742018-04-11 17:36:22 -04003625 SPIRVInstList.insert(InsertPoint,
3626 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003627
3628 // Z Dimension
3629 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003630 Ops << MkId(BuiltinDimVec[2]) << MkNum(spv::DecorationSpecId) << MkNum(2);
David Neto87846742018-04-11 17:36:22 -04003631 SPIRVInstList.insert(InsertPoint,
3632 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003633 }
3634}
3635
David Netob6e2e062018-04-25 10:32:06 -04003636void SPIRVProducerPass::GenerateEntryPointInitialStores() {
3637 // Work around a driver bug. Initializers on Private variables might not
3638 // work. So the start of the kernel should store the initializer value to the
3639 // variables. Yes, *every* entry point pays this cost if *any* entry point
3640 // uses this builtin. At this point I judge this to be an acceptable tradeoff
3641 // of complexity vs. runtime, for a broken driver.
alan-bakerb6b09dc2018-11-08 16:59:28 -05003642 // TODO(dneto): Remove this at some point once fixed drivers are widely
3643 // available.
David Netob6e2e062018-04-25 10:32:06 -04003644 if (WorkgroupSizeVarID) {
3645 assert(WorkgroupSizeValueID);
3646
3647 SPIRVOperandList Ops;
3648 Ops << MkId(WorkgroupSizeVarID) << MkId(WorkgroupSizeValueID);
3649
3650 auto *Inst = new SPIRVInstruction(spv::OpStore, Ops);
3651 getSPIRVInstList().push_back(Inst);
3652 }
3653}
3654
David Neto22f144c2017-06-12 14:26:21 -04003655void SPIRVProducerPass::GenerateFuncBody(Function &F) {
3656 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3657 ValueMapType &VMap = getValueMap();
3658
David Netob6e2e062018-04-25 10:32:06 -04003659 const bool IsKernel = F.getCallingConv() == CallingConv::SPIR_KERNEL;
David Neto22f144c2017-06-12 14:26:21 -04003660
3661 for (BasicBlock &BB : F) {
3662 // Register BasicBlock to ValueMap.
3663 VMap[&BB] = nextID;
3664
3665 //
3666 // Generate OpLabel for Basic Block.
3667 //
3668 SPIRVOperandList Ops;
David Neto87846742018-04-11 17:36:22 -04003669 auto *Inst = new SPIRVInstruction(spv::OpLabel, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003670 SPIRVInstList.push_back(Inst);
3671
David Neto6dcd4712017-06-23 11:06:47 -04003672 // OpVariable instructions must come first.
3673 for (Instruction &I : BB) {
alan-baker5b86ed72019-02-15 08:26:50 -05003674 if (auto *alloca = dyn_cast<AllocaInst>(&I)) {
3675 // Allocating a pointer requires variable pointers.
3676 if (alloca->getAllocatedType()->isPointerTy()) {
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003677 setVariablePointersCapabilities(
3678 alloca->getAllocatedType()->getPointerAddressSpace());
alan-baker5b86ed72019-02-15 08:26:50 -05003679 }
David Neto6dcd4712017-06-23 11:06:47 -04003680 GenerateInstruction(I);
3681 }
3682 }
3683
David Neto22f144c2017-06-12 14:26:21 -04003684 if (&BB == &F.getEntryBlock() && IsKernel) {
David Netob6e2e062018-04-25 10:32:06 -04003685 if (clspv::Option::HackInitializers()) {
3686 GenerateEntryPointInitialStores();
3687 }
David Neto22f144c2017-06-12 14:26:21 -04003688 }
3689
3690 for (Instruction &I : BB) {
David Neto6dcd4712017-06-23 11:06:47 -04003691 if (!isa<AllocaInst>(I)) {
3692 GenerateInstruction(I);
3693 }
David Neto22f144c2017-06-12 14:26:21 -04003694 }
3695 }
3696}
3697
3698spv::Op SPIRVProducerPass::GetSPIRVCmpOpcode(CmpInst *I) {
3699 const std::map<CmpInst::Predicate, spv::Op> Map = {
3700 {CmpInst::ICMP_EQ, spv::OpIEqual},
3701 {CmpInst::ICMP_NE, spv::OpINotEqual},
3702 {CmpInst::ICMP_UGT, spv::OpUGreaterThan},
3703 {CmpInst::ICMP_UGE, spv::OpUGreaterThanEqual},
3704 {CmpInst::ICMP_ULT, spv::OpULessThan},
3705 {CmpInst::ICMP_ULE, spv::OpULessThanEqual},
3706 {CmpInst::ICMP_SGT, spv::OpSGreaterThan},
3707 {CmpInst::ICMP_SGE, spv::OpSGreaterThanEqual},
3708 {CmpInst::ICMP_SLT, spv::OpSLessThan},
3709 {CmpInst::ICMP_SLE, spv::OpSLessThanEqual},
3710 {CmpInst::FCMP_OEQ, spv::OpFOrdEqual},
3711 {CmpInst::FCMP_OGT, spv::OpFOrdGreaterThan},
3712 {CmpInst::FCMP_OGE, spv::OpFOrdGreaterThanEqual},
3713 {CmpInst::FCMP_OLT, spv::OpFOrdLessThan},
3714 {CmpInst::FCMP_OLE, spv::OpFOrdLessThanEqual},
3715 {CmpInst::FCMP_ONE, spv::OpFOrdNotEqual},
3716 {CmpInst::FCMP_UEQ, spv::OpFUnordEqual},
3717 {CmpInst::FCMP_UGT, spv::OpFUnordGreaterThan},
3718 {CmpInst::FCMP_UGE, spv::OpFUnordGreaterThanEqual},
3719 {CmpInst::FCMP_ULT, spv::OpFUnordLessThan},
3720 {CmpInst::FCMP_ULE, spv::OpFUnordLessThanEqual},
3721 {CmpInst::FCMP_UNE, spv::OpFUnordNotEqual}};
3722
3723 assert(0 != Map.count(I->getPredicate()));
3724
3725 return Map.at(I->getPredicate());
3726}
3727
3728spv::Op SPIRVProducerPass::GetSPIRVCastOpcode(Instruction &I) {
3729 const std::map<unsigned, spv::Op> Map{
3730 {Instruction::Trunc, spv::OpUConvert},
3731 {Instruction::ZExt, spv::OpUConvert},
3732 {Instruction::SExt, spv::OpSConvert},
3733 {Instruction::FPToUI, spv::OpConvertFToU},
3734 {Instruction::FPToSI, spv::OpConvertFToS},
3735 {Instruction::UIToFP, spv::OpConvertUToF},
3736 {Instruction::SIToFP, spv::OpConvertSToF},
3737 {Instruction::FPTrunc, spv::OpFConvert},
3738 {Instruction::FPExt, spv::OpFConvert},
3739 {Instruction::BitCast, spv::OpBitcast}};
3740
3741 assert(0 != Map.count(I.getOpcode()));
3742
3743 return Map.at(I.getOpcode());
3744}
3745
3746spv::Op SPIRVProducerPass::GetSPIRVBinaryOpcode(Instruction &I) {
Kévin Petit24272b62018-10-18 19:16:12 +00003747 if (I.getType()->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -04003748 switch (I.getOpcode()) {
3749 default:
3750 break;
3751 case Instruction::Or:
3752 return spv::OpLogicalOr;
3753 case Instruction::And:
3754 return spv::OpLogicalAnd;
3755 case Instruction::Xor:
3756 return spv::OpLogicalNotEqual;
3757 }
3758 }
3759
alan-bakerb6b09dc2018-11-08 16:59:28 -05003760 const std::map<unsigned, spv::Op> Map{
David Neto22f144c2017-06-12 14:26:21 -04003761 {Instruction::Add, spv::OpIAdd},
3762 {Instruction::FAdd, spv::OpFAdd},
3763 {Instruction::Sub, spv::OpISub},
3764 {Instruction::FSub, spv::OpFSub},
3765 {Instruction::Mul, spv::OpIMul},
3766 {Instruction::FMul, spv::OpFMul},
3767 {Instruction::UDiv, spv::OpUDiv},
3768 {Instruction::SDiv, spv::OpSDiv},
3769 {Instruction::FDiv, spv::OpFDiv},
3770 {Instruction::URem, spv::OpUMod},
3771 {Instruction::SRem, spv::OpSRem},
3772 {Instruction::FRem, spv::OpFRem},
3773 {Instruction::Or, spv::OpBitwiseOr},
3774 {Instruction::Xor, spv::OpBitwiseXor},
3775 {Instruction::And, spv::OpBitwiseAnd},
3776 {Instruction::Shl, spv::OpShiftLeftLogical},
3777 {Instruction::LShr, spv::OpShiftRightLogical},
3778 {Instruction::AShr, spv::OpShiftRightArithmetic}};
3779
3780 assert(0 != Map.count(I.getOpcode()));
3781
3782 return Map.at(I.getOpcode());
3783}
3784
3785void SPIRVProducerPass::GenerateInstruction(Instruction &I) {
3786 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3787 ValueMapType &VMap = getValueMap();
David Neto22f144c2017-06-12 14:26:21 -04003788 DeferredInstVecType &DeferredInsts = getDeferredInstVec();
3789 LLVMContext &Context = I.getParent()->getParent()->getParent()->getContext();
3790
3791 // Register Instruction to ValueMap.
3792 if (0 == VMap[&I]) {
3793 VMap[&I] = nextID;
3794 }
3795
3796 switch (I.getOpcode()) {
3797 default: {
3798 if (Instruction::isCast(I.getOpcode())) {
3799 //
3800 // Generate SPIRV instructions for cast operators.
3801 //
3802
David Netod2de94a2017-08-28 17:27:47 -04003803 auto Ty = I.getType();
David Neto22f144c2017-06-12 14:26:21 -04003804 auto OpTy = I.getOperand(0)->getType();
David Netod2de94a2017-08-28 17:27:47 -04003805 auto toI8 = Ty == Type::getInt8Ty(Context);
3806 auto fromI32 = OpTy == Type::getInt32Ty(Context);
David Neto22f144c2017-06-12 14:26:21 -04003807 // Handle zext, sext and uitofp with i1 type specially.
3808 if ((I.getOpcode() == Instruction::ZExt ||
3809 I.getOpcode() == Instruction::SExt ||
3810 I.getOpcode() == Instruction::UIToFP) &&
alan-bakerb6b09dc2018-11-08 16:59:28 -05003811 OpTy->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -04003812 //
3813 // Generate OpSelect.
3814 //
3815
3816 // Ops[0] = Result Type ID
3817 // Ops[1] = Condition ID
3818 // Ops[2] = True Constant ID
3819 // Ops[3] = False Constant ID
3820 SPIRVOperandList Ops;
3821
David Neto257c3892018-04-11 13:19:45 -04003822 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04003823
David Neto22f144c2017-06-12 14:26:21 -04003824 uint32_t CondID = VMap[I.getOperand(0)];
David Neto257c3892018-04-11 13:19:45 -04003825 Ops << MkId(CondID);
David Neto22f144c2017-06-12 14:26:21 -04003826
3827 uint32_t TrueID = 0;
3828 if (I.getOpcode() == Instruction::ZExt) {
Kévin Petit7bfb8992019-02-26 13:45:08 +00003829 TrueID = VMap[ConstantInt::get(I.getType(), 1)];
David Neto22f144c2017-06-12 14:26:21 -04003830 } else if (I.getOpcode() == Instruction::SExt) {
Kévin Petit7bfb8992019-02-26 13:45:08 +00003831 TrueID = VMap[ConstantInt::getSigned(I.getType(), -1)];
David Neto22f144c2017-06-12 14:26:21 -04003832 } else {
3833 TrueID = VMap[ConstantFP::get(Context, APFloat(1.0f))];
3834 }
David Neto257c3892018-04-11 13:19:45 -04003835 Ops << MkId(TrueID);
David Neto22f144c2017-06-12 14:26:21 -04003836
3837 uint32_t FalseID = 0;
3838 if (I.getOpcode() == Instruction::ZExt) {
3839 FalseID = VMap[Constant::getNullValue(I.getType())];
3840 } else if (I.getOpcode() == Instruction::SExt) {
3841 FalseID = VMap[Constant::getNullValue(I.getType())];
3842 } else {
3843 FalseID = VMap[ConstantFP::get(Context, APFloat(0.0f))];
3844 }
David Neto257c3892018-04-11 13:19:45 -04003845 Ops << MkId(FalseID);
David Neto22f144c2017-06-12 14:26:21 -04003846
David Neto87846742018-04-11 17:36:22 -04003847 auto *Inst = new SPIRVInstruction(spv::OpSelect, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003848 SPIRVInstList.push_back(Inst);
alan-bakerb39c8262019-03-08 14:03:37 -05003849 } else if (!clspv::Option::Int8Support() &&
3850 I.getOpcode() == Instruction::Trunc && fromI32 && toI8) {
David Netod2de94a2017-08-28 17:27:47 -04003851 // The SPIR-V target type is a 32-bit int. Keep only the bottom
3852 // 8 bits.
3853 // Before:
3854 // %result = trunc i32 %a to i8
3855 // After
3856 // %result = OpBitwiseAnd %uint %a %uint_255
3857
3858 SPIRVOperandList Ops;
3859
David Neto257c3892018-04-11 13:19:45 -04003860 Ops << MkId(lookupType(OpTy)) << MkId(VMap[I.getOperand(0)]);
David Netod2de94a2017-08-28 17:27:47 -04003861
3862 Type *UintTy = Type::getInt32Ty(Context);
3863 uint32_t MaskID = VMap[ConstantInt::get(UintTy, 255)];
David Neto257c3892018-04-11 13:19:45 -04003864 Ops << MkId(MaskID);
David Netod2de94a2017-08-28 17:27:47 -04003865
David Neto87846742018-04-11 17:36:22 -04003866 auto *Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Netod2de94a2017-08-28 17:27:47 -04003867 SPIRVInstList.push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04003868 } else {
3869 // Ops[0] = Result Type ID
3870 // Ops[1] = Source Value ID
3871 SPIRVOperandList Ops;
3872
David Neto257c3892018-04-11 13:19:45 -04003873 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04003874
David Neto87846742018-04-11 17:36:22 -04003875 auto *Inst = new SPIRVInstruction(GetSPIRVCastOpcode(I), nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003876 SPIRVInstList.push_back(Inst);
3877 }
3878 } else if (isa<BinaryOperator>(I)) {
3879 //
3880 // Generate SPIRV instructions for binary operators.
3881 //
3882
3883 // Handle xor with i1 type specially.
3884 if (I.getOpcode() == Instruction::Xor &&
3885 I.getType() == Type::getInt1Ty(Context) &&
Kévin Petit24272b62018-10-18 19:16:12 +00003886 ((isa<ConstantInt>(I.getOperand(0)) &&
3887 !cast<ConstantInt>(I.getOperand(0))->isZero()) ||
3888 (isa<ConstantInt>(I.getOperand(1)) &&
3889 !cast<ConstantInt>(I.getOperand(1))->isZero()))) {
David Neto22f144c2017-06-12 14:26:21 -04003890 //
3891 // Generate OpLogicalNot.
3892 //
3893 // Ops[0] = Result Type ID
3894 // Ops[1] = Operand
3895 SPIRVOperandList Ops;
3896
David Neto257c3892018-04-11 13:19:45 -04003897 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04003898
3899 Value *CondV = I.getOperand(0);
3900 if (isa<Constant>(I.getOperand(0))) {
3901 CondV = I.getOperand(1);
3902 }
David Neto257c3892018-04-11 13:19:45 -04003903 Ops << MkId(VMap[CondV]);
David Neto22f144c2017-06-12 14:26:21 -04003904
David Neto87846742018-04-11 17:36:22 -04003905 auto *Inst = new SPIRVInstruction(spv::OpLogicalNot, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003906 SPIRVInstList.push_back(Inst);
3907 } else {
3908 // Ops[0] = Result Type ID
3909 // Ops[1] = Operand 0
3910 // Ops[2] = Operand 1
3911 SPIRVOperandList Ops;
3912
David Neto257c3892018-04-11 13:19:45 -04003913 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)])
3914 << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04003915
David Neto87846742018-04-11 17:36:22 -04003916 auto *Inst =
3917 new SPIRVInstruction(GetSPIRVBinaryOpcode(I), nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003918 SPIRVInstList.push_back(Inst);
3919 }
alan-bakerc9c55ae2019-12-02 16:01:27 -05003920 } else if (I.getOpcode() == Instruction::FNeg) {
3921 // The only unary operator.
3922 //
3923 // Ops[0] = Result Type ID
3924 // Ops[1] = Operand 0
3925 SPIRVOperandList ops;
3926
3927 ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)]);
3928 auto *Inst = new SPIRVInstruction(spv::OpFNegate, nextID++, ops);
3929 SPIRVInstList.push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04003930 } else {
3931 I.print(errs());
3932 llvm_unreachable("Unsupported instruction???");
3933 }
3934 break;
3935 }
3936 case Instruction::GetElementPtr: {
3937 auto &GlobalConstArgSet = getGlobalConstArgSet();
3938
3939 //
3940 // Generate OpAccessChain.
3941 //
3942 GetElementPtrInst *GEP = cast<GetElementPtrInst>(&I);
3943
3944 //
3945 // Generate OpAccessChain.
3946 //
3947
3948 // Ops[0] = Result Type ID
3949 // Ops[1] = Base ID
3950 // Ops[2] ... Ops[n] = Indexes ID
3951 SPIRVOperandList Ops;
3952
alan-bakerb6b09dc2018-11-08 16:59:28 -05003953 PointerType *ResultType = cast<PointerType>(GEP->getType());
David Neto22f144c2017-06-12 14:26:21 -04003954 if (GEP->getPointerAddressSpace() == AddressSpace::ModuleScopePrivate ||
3955 GlobalConstArgSet.count(GEP->getPointerOperand())) {
3956 // Use pointer type with private address space for global constant.
3957 Type *EleTy = I.getType()->getPointerElementType();
David Neto1a1a0582017-07-07 12:01:44 -04003958 ResultType = PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
David Neto22f144c2017-06-12 14:26:21 -04003959 }
David Neto257c3892018-04-11 13:19:45 -04003960
3961 Ops << MkId(lookupType(ResultType));
David Neto22f144c2017-06-12 14:26:21 -04003962
David Neto862b7d82018-06-14 18:48:37 -04003963 // Generate the base pointer.
3964 Ops << MkId(VMap[GEP->getPointerOperand()]);
David Neto22f144c2017-06-12 14:26:21 -04003965
David Neto862b7d82018-06-14 18:48:37 -04003966 // TODO(dneto): Simplify the following?
David Neto22f144c2017-06-12 14:26:21 -04003967
3968 //
3969 // Follows below rules for gep.
3970 //
David Neto862b7d82018-06-14 18:48:37 -04003971 // 1. If gep's first index is 0 generate OpAccessChain and ignore gep's
3972 // first index.
David Neto22f144c2017-06-12 14:26:21 -04003973 // 2. If gep's first index is not 0, generate OpPtrAccessChain and use gep's
3974 // first index.
3975 // 3. If gep's first index is not constant, generate OpPtrAccessChain and
3976 // use gep's first index.
3977 // 4. If it is not above case 1, 2 and 3, generate OpAccessChain and use
3978 // gep's first index.
3979 //
3980 spv::Op Opcode = spv::OpAccessChain;
3981 unsigned offset = 0;
3982 if (ConstantInt *CstInt = dyn_cast<ConstantInt>(GEP->getOperand(1))) {
David Neto862b7d82018-06-14 18:48:37 -04003983 if (CstInt->getZExtValue() == 0) {
David Neto22f144c2017-06-12 14:26:21 -04003984 offset = 1;
David Neto862b7d82018-06-14 18:48:37 -04003985 } else if (CstInt->getZExtValue() != 0) {
David Neto22f144c2017-06-12 14:26:21 -04003986 Opcode = spv::OpPtrAccessChain;
David Neto22f144c2017-06-12 14:26:21 -04003987 }
David Neto862b7d82018-06-14 18:48:37 -04003988 } else {
David Neto22f144c2017-06-12 14:26:21 -04003989 Opcode = spv::OpPtrAccessChain;
David Neto1a1a0582017-07-07 12:01:44 -04003990 }
3991
3992 if (Opcode == spv::OpPtrAccessChain) {
David Neto1a1a0582017-07-07 12:01:44 -04003993 // Do we need to generate ArrayStride? Check against the GEP result type
3994 // rather than the pointer type of the base because when indexing into
3995 // an OpenCL program-scope constant, we'll swap out the LLVM base pointer
3996 // for something else in the SPIR-V.
3997 // E.g. see test/PointerAccessChain/pointer_index_is_constant_1.cl
alan-baker5b86ed72019-02-15 08:26:50 -05003998 auto address_space = ResultType->getAddressSpace();
3999 setVariablePointersCapabilities(address_space);
4000 switch (GetStorageClass(address_space)) {
Alan Bakerfcda9482018-10-02 17:09:59 -04004001 case spv::StorageClassStorageBuffer:
4002 case spv::StorageClassUniform:
David Neto1a1a0582017-07-07 12:01:44 -04004003 // Save the need to generate an ArrayStride decoration. But defer
4004 // generation until later, so we only make one decoration.
David Neto85082642018-03-24 06:55:20 -07004005 getTypesNeedingArrayStride().insert(ResultType);
Alan Bakerfcda9482018-10-02 17:09:59 -04004006 break;
4007 default:
4008 break;
David Neto1a1a0582017-07-07 12:01:44 -04004009 }
David Neto22f144c2017-06-12 14:26:21 -04004010 }
4011
4012 for (auto II = GEP->idx_begin() + offset; II != GEP->idx_end(); II++) {
David Neto257c3892018-04-11 13:19:45 -04004013 Ops << MkId(VMap[*II]);
David Neto22f144c2017-06-12 14:26:21 -04004014 }
4015
David Neto87846742018-04-11 17:36:22 -04004016 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004017 SPIRVInstList.push_back(Inst);
4018 break;
4019 }
4020 case Instruction::ExtractValue: {
4021 ExtractValueInst *EVI = cast<ExtractValueInst>(&I);
4022 // Ops[0] = Result Type ID
4023 // Ops[1] = Composite ID
4024 // Ops[2] ... Ops[n] = Indexes (Literal Number)
4025 SPIRVOperandList Ops;
4026
David Neto257c3892018-04-11 13:19:45 -04004027 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04004028
4029 uint32_t CompositeID = VMap[EVI->getAggregateOperand()];
David Neto257c3892018-04-11 13:19:45 -04004030 Ops << MkId(CompositeID);
David Neto22f144c2017-06-12 14:26:21 -04004031
4032 for (auto &Index : EVI->indices()) {
David Neto257c3892018-04-11 13:19:45 -04004033 Ops << MkNum(Index);
David Neto22f144c2017-06-12 14:26:21 -04004034 }
4035
David Neto87846742018-04-11 17:36:22 -04004036 auto *Inst = new SPIRVInstruction(spv::OpCompositeExtract, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004037 SPIRVInstList.push_back(Inst);
4038 break;
4039 }
4040 case Instruction::InsertValue: {
4041 InsertValueInst *IVI = cast<InsertValueInst>(&I);
4042 // Ops[0] = Result Type ID
4043 // Ops[1] = Object ID
4044 // Ops[2] = Composite ID
4045 // Ops[3] ... Ops[n] = Indexes (Literal Number)
4046 SPIRVOperandList Ops;
4047
4048 uint32_t ResTyID = lookupType(I.getType());
David Neto257c3892018-04-11 13:19:45 -04004049 Ops << MkId(ResTyID);
David Neto22f144c2017-06-12 14:26:21 -04004050
4051 uint32_t ObjectID = VMap[IVI->getInsertedValueOperand()];
David Neto257c3892018-04-11 13:19:45 -04004052 Ops << MkId(ObjectID);
David Neto22f144c2017-06-12 14:26:21 -04004053
4054 uint32_t CompositeID = VMap[IVI->getAggregateOperand()];
David Neto257c3892018-04-11 13:19:45 -04004055 Ops << MkId(CompositeID);
David Neto22f144c2017-06-12 14:26:21 -04004056
4057 for (auto &Index : IVI->indices()) {
David Neto257c3892018-04-11 13:19:45 -04004058 Ops << MkNum(Index);
David Neto22f144c2017-06-12 14:26:21 -04004059 }
4060
David Neto87846742018-04-11 17:36:22 -04004061 auto *Inst = new SPIRVInstruction(spv::OpCompositeInsert, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004062 SPIRVInstList.push_back(Inst);
4063 break;
4064 }
4065 case Instruction::Select: {
4066 //
4067 // Generate OpSelect.
4068 //
4069
4070 // Ops[0] = Result Type ID
4071 // Ops[1] = Condition ID
4072 // Ops[2] = True Constant ID
4073 // Ops[3] = False Constant ID
4074 SPIRVOperandList Ops;
4075
4076 // Find SPIRV instruction for parameter type.
4077 auto Ty = I.getType();
4078 if (Ty->isPointerTy()) {
4079 auto PointeeTy = Ty->getPointerElementType();
4080 if (PointeeTy->isStructTy() &&
4081 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
4082 Ty = PointeeTy;
alan-baker5b86ed72019-02-15 08:26:50 -05004083 } else {
4084 // Selecting between pointers requires variable pointers.
4085 setVariablePointersCapabilities(Ty->getPointerAddressSpace());
4086 if (!hasVariablePointers() && !selectFromSameObject(&I)) {
4087 setVariablePointers(true);
4088 }
David Neto22f144c2017-06-12 14:26:21 -04004089 }
4090 }
4091
David Neto257c3892018-04-11 13:19:45 -04004092 Ops << MkId(lookupType(Ty)) << MkId(VMap[I.getOperand(0)])
4093 << MkId(VMap[I.getOperand(1)]) << MkId(VMap[I.getOperand(2)]);
David Neto22f144c2017-06-12 14:26:21 -04004094
David Neto87846742018-04-11 17:36:22 -04004095 auto *Inst = new SPIRVInstruction(spv::OpSelect, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004096 SPIRVInstList.push_back(Inst);
4097 break;
4098 }
4099 case Instruction::ExtractElement: {
4100 // Handle <4 x i8> type manually.
4101 Type *CompositeTy = I.getOperand(0)->getType();
4102 if (is4xi8vec(CompositeTy)) {
4103 //
4104 // Generate OpShiftRightLogical and OpBitwiseAnd for extractelement with
4105 // <4 x i8>.
4106 //
4107
4108 //
4109 // Generate OpShiftRightLogical
4110 //
4111 // Ops[0] = Result Type ID
4112 // Ops[1] = Operand 0
4113 // Ops[2] = Operand 1
4114 //
4115 SPIRVOperandList Ops;
4116
David Neto257c3892018-04-11 13:19:45 -04004117 Ops << MkId(lookupType(CompositeTy));
David Neto22f144c2017-06-12 14:26:21 -04004118
4119 uint32_t Op0ID = VMap[I.getOperand(0)];
David Neto257c3892018-04-11 13:19:45 -04004120 Ops << MkId(Op0ID);
David Neto22f144c2017-06-12 14:26:21 -04004121
4122 uint32_t Op1ID = 0;
4123 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
4124 // Handle constant index.
4125 uint64_t Idx = CI->getZExtValue();
4126 Value *ShiftAmount =
4127 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
4128 Op1ID = VMap[ShiftAmount];
4129 } else {
4130 // Handle variable index.
4131 SPIRVOperandList TmpOps;
4132
David Neto257c3892018-04-11 13:19:45 -04004133 TmpOps << MkId(lookupType(Type::getInt32Ty(Context)))
4134 << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004135
4136 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
David Neto257c3892018-04-11 13:19:45 -04004137 TmpOps << MkId(VMap[Cst8]);
David Neto22f144c2017-06-12 14:26:21 -04004138
4139 Op1ID = nextID;
4140
David Neto87846742018-04-11 17:36:22 -04004141 auto *TmpInst = new SPIRVInstruction(spv::OpIMul, nextID++, TmpOps);
David Neto22f144c2017-06-12 14:26:21 -04004142 SPIRVInstList.push_back(TmpInst);
4143 }
David Neto257c3892018-04-11 13:19:45 -04004144 Ops << MkId(Op1ID);
David Neto22f144c2017-06-12 14:26:21 -04004145
4146 uint32_t ShiftID = nextID;
4147
David Neto87846742018-04-11 17:36:22 -04004148 auto *Inst =
4149 new SPIRVInstruction(spv::OpShiftRightLogical, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004150 SPIRVInstList.push_back(Inst);
4151
4152 //
4153 // Generate OpBitwiseAnd
4154 //
4155 // Ops[0] = Result Type ID
4156 // Ops[1] = Operand 0
4157 // Ops[2] = Operand 1
4158 //
4159 Ops.clear();
4160
David Neto257c3892018-04-11 13:19:45 -04004161 Ops << MkId(lookupType(CompositeTy)) << MkId(ShiftID);
David Neto22f144c2017-06-12 14:26:21 -04004162
4163 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
David Neto257c3892018-04-11 13:19:45 -04004164 Ops << MkId(VMap[CstFF]);
David Neto22f144c2017-06-12 14:26:21 -04004165
David Neto9b2d6252017-09-06 15:47:37 -04004166 // Reset mapping for this value to the result of the bitwise and.
4167 VMap[&I] = nextID;
4168
David Neto87846742018-04-11 17:36:22 -04004169 Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004170 SPIRVInstList.push_back(Inst);
4171 break;
4172 }
4173
4174 // Ops[0] = Result Type ID
4175 // Ops[1] = Composite ID
4176 // Ops[2] ... Ops[n] = Indexes (Literal Number)
4177 SPIRVOperandList Ops;
4178
David Neto257c3892018-04-11 13:19:45 -04004179 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04004180
4181 spv::Op Opcode = spv::OpCompositeExtract;
4182 if (const ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
David Neto257c3892018-04-11 13:19:45 -04004183 Ops << MkNum(static_cast<uint32_t>(CI->getZExtValue()));
David Neto22f144c2017-06-12 14:26:21 -04004184 } else {
David Neto257c3892018-04-11 13:19:45 -04004185 Ops << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004186 Opcode = spv::OpVectorExtractDynamic;
4187 }
4188
David Neto87846742018-04-11 17:36:22 -04004189 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004190 SPIRVInstList.push_back(Inst);
4191 break;
4192 }
4193 case Instruction::InsertElement: {
4194 // Handle <4 x i8> type manually.
4195 Type *CompositeTy = I.getOperand(0)->getType();
4196 if (is4xi8vec(CompositeTy)) {
4197 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
4198 uint32_t CstFFID = VMap[CstFF];
4199
4200 uint32_t ShiftAmountID = 0;
4201 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
4202 // Handle constant index.
4203 uint64_t Idx = CI->getZExtValue();
4204 Value *ShiftAmount =
4205 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
4206 ShiftAmountID = VMap[ShiftAmount];
4207 } else {
4208 // Handle variable index.
4209 SPIRVOperandList TmpOps;
4210
David Neto257c3892018-04-11 13:19:45 -04004211 TmpOps << MkId(lookupType(Type::getInt32Ty(Context)))
4212 << MkId(VMap[I.getOperand(2)]);
David Neto22f144c2017-06-12 14:26:21 -04004213
4214 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
David Neto257c3892018-04-11 13:19:45 -04004215 TmpOps << MkId(VMap[Cst8]);
David Neto22f144c2017-06-12 14:26:21 -04004216
4217 ShiftAmountID = nextID;
4218
David Neto87846742018-04-11 17:36:22 -04004219 auto *TmpInst = new SPIRVInstruction(spv::OpIMul, nextID++, TmpOps);
David Neto22f144c2017-06-12 14:26:21 -04004220 SPIRVInstList.push_back(TmpInst);
4221 }
4222
4223 //
4224 // Generate mask operations.
4225 //
4226
4227 // ShiftLeft mask according to index of insertelement.
4228 SPIRVOperandList Ops;
4229
David Neto257c3892018-04-11 13:19:45 -04004230 const uint32_t ResTyID = lookupType(CompositeTy);
4231 Ops << MkId(ResTyID) << MkId(CstFFID) << MkId(ShiftAmountID);
David Neto22f144c2017-06-12 14:26:21 -04004232
4233 uint32_t MaskID = nextID;
4234
David Neto87846742018-04-11 17:36:22 -04004235 auto *Inst = new SPIRVInstruction(spv::OpShiftLeftLogical, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004236 SPIRVInstList.push_back(Inst);
4237
4238 // Inverse mask.
4239 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004240 Ops << MkId(ResTyID) << MkId(MaskID);
David Neto22f144c2017-06-12 14:26:21 -04004241
4242 uint32_t InvMaskID = nextID;
4243
David Neto87846742018-04-11 17:36:22 -04004244 Inst = new SPIRVInstruction(spv::OpNot, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004245 SPIRVInstList.push_back(Inst);
4246
4247 // Apply mask.
4248 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004249 Ops << MkId(ResTyID) << MkId(VMap[I.getOperand(0)]) << MkId(InvMaskID);
David Neto22f144c2017-06-12 14:26:21 -04004250
4251 uint32_t OrgValID = nextID;
4252
David Neto87846742018-04-11 17:36:22 -04004253 Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004254 SPIRVInstList.push_back(Inst);
4255
4256 // Create correct value according to index of insertelement.
4257 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05004258 Ops << MkId(ResTyID) << MkId(VMap[I.getOperand(1)])
4259 << MkId(ShiftAmountID);
David Neto22f144c2017-06-12 14:26:21 -04004260
4261 uint32_t InsertValID = nextID;
4262
David Neto87846742018-04-11 17:36:22 -04004263 Inst = new SPIRVInstruction(spv::OpShiftLeftLogical, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004264 SPIRVInstList.push_back(Inst);
4265
4266 // Insert value to original value.
4267 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004268 Ops << MkId(ResTyID) << MkId(OrgValID) << MkId(InsertValID);
David Neto22f144c2017-06-12 14:26:21 -04004269
David Netoa394f392017-08-26 20:45:29 -04004270 VMap[&I] = nextID;
4271
David Neto87846742018-04-11 17:36:22 -04004272 Inst = new SPIRVInstruction(spv::OpBitwiseOr, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004273 SPIRVInstList.push_back(Inst);
4274
4275 break;
4276 }
4277
David Neto22f144c2017-06-12 14:26:21 -04004278 SPIRVOperandList Ops;
4279
James Priced26efea2018-06-09 23:28:32 +01004280 // Ops[0] = Result Type ID
4281 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04004282
4283 spv::Op Opcode = spv::OpCompositeInsert;
4284 if (const ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
David Neto257c3892018-04-11 13:19:45 -04004285 const auto value = CI->getZExtValue();
4286 assert(value <= UINT32_MAX);
James Priced26efea2018-06-09 23:28:32 +01004287 // Ops[1] = Object ID
4288 // Ops[2] = Composite ID
4289 // Ops[3] ... Ops[n] = Indexes (Literal Number)
alan-bakerb6b09dc2018-11-08 16:59:28 -05004290 Ops << MkId(VMap[I.getOperand(1)]) << MkId(VMap[I.getOperand(0)])
James Priced26efea2018-06-09 23:28:32 +01004291 << MkNum(static_cast<uint32_t>(value));
David Neto22f144c2017-06-12 14:26:21 -04004292 } else {
James Priced26efea2018-06-09 23:28:32 +01004293 // Ops[1] = Composite ID
4294 // Ops[2] = Object ID
4295 // Ops[3] ... Ops[n] = Indexes (Literal Number)
alan-bakerb6b09dc2018-11-08 16:59:28 -05004296 Ops << MkId(VMap[I.getOperand(0)]) << MkId(VMap[I.getOperand(1)])
James Priced26efea2018-06-09 23:28:32 +01004297 << MkId(VMap[I.getOperand(2)]);
David Neto22f144c2017-06-12 14:26:21 -04004298 Opcode = spv::OpVectorInsertDynamic;
4299 }
4300
David Neto87846742018-04-11 17:36:22 -04004301 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004302 SPIRVInstList.push_back(Inst);
4303 break;
4304 }
4305 case Instruction::ShuffleVector: {
4306 // Ops[0] = Result Type ID
4307 // Ops[1] = Vector 1 ID
4308 // Ops[2] = Vector 2 ID
4309 // Ops[3] ... Ops[n] = Components (Literal Number)
4310 SPIRVOperandList Ops;
4311
David Neto257c3892018-04-11 13:19:45 -04004312 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)])
4313 << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004314
4315 uint64_t NumElements = 0;
4316 if (Constant *Cst = dyn_cast<Constant>(I.getOperand(2))) {
4317 NumElements = cast<VectorType>(Cst->getType())->getNumElements();
4318
4319 if (Cst->isNullValue()) {
4320 for (unsigned i = 0; i < NumElements; i++) {
David Neto257c3892018-04-11 13:19:45 -04004321 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04004322 }
4323 } else if (const ConstantDataSequential *CDS =
4324 dyn_cast<ConstantDataSequential>(Cst)) {
4325 for (unsigned i = 0; i < CDS->getNumElements(); i++) {
4326 std::vector<uint32_t> LiteralNum;
David Neto257c3892018-04-11 13:19:45 -04004327 const auto value = CDS->getElementAsInteger(i);
4328 assert(value <= UINT32_MAX);
4329 Ops << MkNum(static_cast<uint32_t>(value));
David Neto22f144c2017-06-12 14:26:21 -04004330 }
4331 } else if (const ConstantVector *CV = dyn_cast<ConstantVector>(Cst)) {
4332 for (unsigned i = 0; i < CV->getNumOperands(); i++) {
4333 auto Op = CV->getOperand(i);
4334
4335 uint32_t literal = 0;
4336
4337 if (auto CI = dyn_cast<ConstantInt>(Op)) {
4338 literal = static_cast<uint32_t>(CI->getZExtValue());
4339 } else if (auto UI = dyn_cast<UndefValue>(Op)) {
4340 literal = 0xFFFFFFFFu;
4341 } else {
4342 Op->print(errs());
4343 llvm_unreachable("Unsupported element in ConstantVector!");
4344 }
4345
David Neto257c3892018-04-11 13:19:45 -04004346 Ops << MkNum(literal);
David Neto22f144c2017-06-12 14:26:21 -04004347 }
4348 } else {
4349 Cst->print(errs());
4350 llvm_unreachable("Unsupported constant mask in ShuffleVector!");
4351 }
4352 }
4353
David Neto87846742018-04-11 17:36:22 -04004354 auto *Inst = new SPIRVInstruction(spv::OpVectorShuffle, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004355 SPIRVInstList.push_back(Inst);
4356 break;
4357 }
4358 case Instruction::ICmp:
4359 case Instruction::FCmp: {
4360 CmpInst *CmpI = cast<CmpInst>(&I);
4361
David Netod4ca2e62017-07-06 18:47:35 -04004362 // Pointer equality is invalid.
alan-bakerb6b09dc2018-11-08 16:59:28 -05004363 Type *ArgTy = CmpI->getOperand(0)->getType();
David Netod4ca2e62017-07-06 18:47:35 -04004364 if (isa<PointerType>(ArgTy)) {
4365 CmpI->print(errs());
4366 std::string name = I.getParent()->getParent()->getName();
4367 errs()
4368 << "\nPointer equality test is not supported by SPIR-V for Vulkan, "
4369 << "in function " << name << "\n";
4370 llvm_unreachable("Pointer equality check is invalid");
4371 break;
4372 }
4373
David Neto257c3892018-04-11 13:19:45 -04004374 // Ops[0] = Result Type ID
4375 // Ops[1] = Operand 1 ID
4376 // Ops[2] = Operand 2 ID
4377 SPIRVOperandList Ops;
David Neto22f144c2017-06-12 14:26:21 -04004378
David Neto257c3892018-04-11 13:19:45 -04004379 Ops << MkId(lookupType(CmpI->getType())) << MkId(VMap[CmpI->getOperand(0)])
4380 << MkId(VMap[CmpI->getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004381
4382 spv::Op Opcode = GetSPIRVCmpOpcode(CmpI);
David Neto87846742018-04-11 17:36:22 -04004383 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004384 SPIRVInstList.push_back(Inst);
4385 break;
4386 }
4387 case Instruction::Br: {
4388 // Branch instrucion is deferred because it needs label's ID. Record slot's
4389 // location on SPIRVInstructionList.
4390 DeferredInsts.push_back(
4391 std::make_tuple(&I, --SPIRVInstList.end(), 0 /* No id */));
4392 break;
4393 }
4394 case Instruction::Switch: {
4395 I.print(errs());
4396 llvm_unreachable("Unsupported instruction???");
4397 break;
4398 }
4399 case Instruction::IndirectBr: {
4400 I.print(errs());
4401 llvm_unreachable("Unsupported instruction???");
4402 break;
4403 }
4404 case Instruction::PHI: {
4405 // Branch instrucion is deferred because it needs label's ID. Record slot's
4406 // location on SPIRVInstructionList.
4407 DeferredInsts.push_back(
4408 std::make_tuple(&I, --SPIRVInstList.end(), nextID++));
4409 break;
4410 }
4411 case Instruction::Alloca: {
4412 //
4413 // Generate OpVariable.
4414 //
4415 // Ops[0] : Result Type ID
4416 // Ops[1] : Storage Class
4417 SPIRVOperandList Ops;
4418
David Neto257c3892018-04-11 13:19:45 -04004419 Ops << MkId(lookupType(I.getType())) << MkNum(spv::StorageClassFunction);
David Neto22f144c2017-06-12 14:26:21 -04004420
David Neto87846742018-04-11 17:36:22 -04004421 auto *Inst = new SPIRVInstruction(spv::OpVariable, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004422 SPIRVInstList.push_back(Inst);
4423 break;
4424 }
4425 case Instruction::Load: {
4426 LoadInst *LD = cast<LoadInst>(&I);
4427 //
4428 // Generate OpLoad.
4429 //
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04004430
alan-baker5b86ed72019-02-15 08:26:50 -05004431 if (LD->getType()->isPointerTy()) {
4432 // Loading a pointer requires variable pointers.
4433 setVariablePointersCapabilities(LD->getType()->getPointerAddressSpace());
4434 }
David Neto22f144c2017-06-12 14:26:21 -04004435
David Neto0a2f98d2017-09-15 19:38:40 -04004436 uint32_t ResTyID = lookupType(LD->getType());
David Netoa60b00b2017-09-15 16:34:09 -04004437 uint32_t PointerID = VMap[LD->getPointerOperand()];
4438
4439 // This is a hack to work around what looks like a driver bug.
4440 // When we're loading from the special variable holding the WorkgroupSize
David Neto0a2f98d2017-09-15 19:38:40 -04004441 // builtin value, use an OpBitWiseAnd of the value's ID rather than
4442 // generating a load.
David Neto66cfe642018-03-24 06:13:56 -07004443 // TODO(dneto): Remove this awful hack once drivers are fixed.
David Netoa60b00b2017-09-15 16:34:09 -04004444 if (PointerID == WorkgroupSizeVarID) {
David Neto0a2f98d2017-09-15 19:38:40 -04004445 // Generate a bitwise-and of the original value with itself.
4446 // We should have been able to get away with just an OpCopyObject,
4447 // but we need something more complex to get past certain driver bugs.
4448 // This is ridiculous, but necessary.
4449 // TODO(dneto): Revisit this once drivers fix their bugs.
4450
4451 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04004452 Ops << MkId(ResTyID) << MkId(WorkgroupSizeValueID)
4453 << MkId(WorkgroupSizeValueID);
David Neto0a2f98d2017-09-15 19:38:40 -04004454
David Neto87846742018-04-11 17:36:22 -04004455 auto *Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Neto0a2f98d2017-09-15 19:38:40 -04004456 SPIRVInstList.push_back(Inst);
David Netoa60b00b2017-09-15 16:34:09 -04004457 break;
4458 }
4459
4460 // This is the normal path. Generate a load.
4461
David Neto22f144c2017-06-12 14:26:21 -04004462 // Ops[0] = Result Type ID
4463 // Ops[1] = Pointer ID
4464 // Ops[2] ... Ops[n] = Optional Memory Access
4465 //
4466 // TODO: Do we need to implement Optional Memory Access???
David Neto0a2f98d2017-09-15 19:38:40 -04004467
David Neto22f144c2017-06-12 14:26:21 -04004468 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04004469 Ops << MkId(ResTyID) << MkId(PointerID);
David Neto22f144c2017-06-12 14:26:21 -04004470
David Neto87846742018-04-11 17:36:22 -04004471 auto *Inst = new SPIRVInstruction(spv::OpLoad, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004472 SPIRVInstList.push_back(Inst);
4473 break;
4474 }
4475 case Instruction::Store: {
4476 StoreInst *ST = cast<StoreInst>(&I);
4477 //
4478 // Generate OpStore.
4479 //
4480
alan-baker5b86ed72019-02-15 08:26:50 -05004481 if (ST->getValueOperand()->getType()->isPointerTy()) {
4482 // Storing a pointer requires variable pointers.
4483 setVariablePointersCapabilities(
4484 ST->getValueOperand()->getType()->getPointerAddressSpace());
4485 }
4486
David Neto22f144c2017-06-12 14:26:21 -04004487 // Ops[0] = Pointer ID
4488 // Ops[1] = Object ID
4489 // Ops[2] ... Ops[n] = Optional Memory Access (later???)
4490 //
4491 // TODO: Do we need to implement Optional Memory Access???
David Neto257c3892018-04-11 13:19:45 -04004492 SPIRVOperandList Ops;
4493 Ops << MkId(VMap[ST->getPointerOperand()])
4494 << MkId(VMap[ST->getValueOperand()]);
David Neto22f144c2017-06-12 14:26:21 -04004495
David Neto87846742018-04-11 17:36:22 -04004496 auto *Inst = new SPIRVInstruction(spv::OpStore, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004497 SPIRVInstList.push_back(Inst);
4498 break;
4499 }
4500 case Instruction::AtomicCmpXchg: {
4501 I.print(errs());
4502 llvm_unreachable("Unsupported instruction???");
4503 break;
4504 }
4505 case Instruction::AtomicRMW: {
Neil Henning39672102017-09-29 14:33:13 +01004506 AtomicRMWInst *AtomicRMW = dyn_cast<AtomicRMWInst>(&I);
4507
4508 spv::Op opcode;
4509
4510 switch (AtomicRMW->getOperation()) {
4511 default:
4512 I.print(errs());
4513 llvm_unreachable("Unsupported instruction???");
4514 case llvm::AtomicRMWInst::Add:
4515 opcode = spv::OpAtomicIAdd;
4516 break;
4517 case llvm::AtomicRMWInst::Sub:
4518 opcode = spv::OpAtomicISub;
4519 break;
4520 case llvm::AtomicRMWInst::Xchg:
4521 opcode = spv::OpAtomicExchange;
4522 break;
4523 case llvm::AtomicRMWInst::Min:
4524 opcode = spv::OpAtomicSMin;
4525 break;
4526 case llvm::AtomicRMWInst::Max:
4527 opcode = spv::OpAtomicSMax;
4528 break;
4529 case llvm::AtomicRMWInst::UMin:
4530 opcode = spv::OpAtomicUMin;
4531 break;
4532 case llvm::AtomicRMWInst::UMax:
4533 opcode = spv::OpAtomicUMax;
4534 break;
4535 case llvm::AtomicRMWInst::And:
4536 opcode = spv::OpAtomicAnd;
4537 break;
4538 case llvm::AtomicRMWInst::Or:
4539 opcode = spv::OpAtomicOr;
4540 break;
4541 case llvm::AtomicRMWInst::Xor:
4542 opcode = spv::OpAtomicXor;
4543 break;
4544 }
4545
4546 //
4547 // Generate OpAtomic*.
4548 //
4549 SPIRVOperandList Ops;
4550
David Neto257c3892018-04-11 13:19:45 -04004551 Ops << MkId(lookupType(I.getType()))
4552 << MkId(VMap[AtomicRMW->getPointerOperand()]);
Neil Henning39672102017-09-29 14:33:13 +01004553
4554 auto IntTy = Type::getInt32Ty(I.getContext());
Neil Henning39672102017-09-29 14:33:13 +01004555 const auto ConstantScopeDevice = ConstantInt::get(IntTy, spv::ScopeDevice);
David Neto257c3892018-04-11 13:19:45 -04004556 Ops << MkId(VMap[ConstantScopeDevice]);
Neil Henning39672102017-09-29 14:33:13 +01004557
4558 const auto ConstantMemorySemantics = ConstantInt::get(
4559 IntTy, spv::MemorySemanticsUniformMemoryMask |
4560 spv::MemorySemanticsSequentiallyConsistentMask);
David Neto257c3892018-04-11 13:19:45 -04004561 Ops << MkId(VMap[ConstantMemorySemantics]);
Neil Henning39672102017-09-29 14:33:13 +01004562
David Neto257c3892018-04-11 13:19:45 -04004563 Ops << MkId(VMap[AtomicRMW->getValOperand()]);
Neil Henning39672102017-09-29 14:33:13 +01004564
4565 VMap[&I] = nextID;
4566
David Neto87846742018-04-11 17:36:22 -04004567 auto *Inst = new SPIRVInstruction(opcode, nextID++, Ops);
Neil Henning39672102017-09-29 14:33:13 +01004568 SPIRVInstList.push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04004569 break;
4570 }
4571 case Instruction::Fence: {
4572 I.print(errs());
4573 llvm_unreachable("Unsupported instruction???");
4574 break;
4575 }
4576 case Instruction::Call: {
4577 CallInst *Call = dyn_cast<CallInst>(&I);
4578 Function *Callee = Call->getCalledFunction();
4579
Alan Baker202c8c72018-08-13 13:47:44 -04004580 if (Callee->getName().startswith(clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04004581 if (ResourceVarDeferredLoadCalls.count(Call) && Call->hasNUsesOrMore(1)) {
4582 // Generate an OpLoad
4583 SPIRVOperandList Ops;
4584 const auto load_id = nextID++;
David Neto22f144c2017-06-12 14:26:21 -04004585
David Neto862b7d82018-06-14 18:48:37 -04004586 Ops << MkId(lookupType(Call->getType()->getPointerElementType()))
4587 << MkId(ResourceVarDeferredLoadCalls[Call]);
4588
4589 auto *Inst = new SPIRVInstruction(spv::OpLoad, load_id, Ops);
4590 SPIRVInstList.push_back(Inst);
4591 VMap[Call] = load_id;
4592 break;
4593
4594 } else {
4595 // This maps to an OpVariable we've already generated.
4596 // No code is generated for the call.
4597 }
4598 break;
alan-bakerb6b09dc2018-11-08 16:59:28 -05004599 } else if (Callee->getName().startswith(
4600 clspv::WorkgroupAccessorFunction())) {
Alan Baker202c8c72018-08-13 13:47:44 -04004601 // Don't codegen an instruction here, but instead map this call directly
4602 // to the workgroup variable id.
alan-bakerb6b09dc2018-11-08 16:59:28 -05004603 int spec_id = static_cast<int>(
4604 cast<ConstantInt>(Call->getOperand(0))->getSExtValue());
Alan Baker202c8c72018-08-13 13:47:44 -04004605 const auto &info = LocalSpecIdInfoMap[spec_id];
4606 VMap[Call] = info.variable_id;
4607 break;
David Neto862b7d82018-06-14 18:48:37 -04004608 }
4609
4610 // Sampler initializers become a load of the corresponding sampler.
4611
Kévin Petitdf71de32019-04-09 14:09:50 +01004612 if (Callee->getName().equals(clspv::LiteralSamplerFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04004613 // Map this to a load from the variable.
alan-baker09cb9802019-12-10 13:16:27 -05004614 const auto third_param = static_cast<unsigned>(
4615 dyn_cast<ConstantInt>(Call->getArgOperand(2))->getZExtValue());
4616 auto sampler_value = third_param;
4617 if (clspv::Option::UseSamplerMap()) {
4618 sampler_value = getSamplerMap()[third_param].first;
4619 }
David Neto862b7d82018-06-14 18:48:37 -04004620
4621 // Generate an OpLoad
David Neto22f144c2017-06-12 14:26:21 -04004622 SPIRVOperandList Ops;
David Neto862b7d82018-06-14 18:48:37 -04004623 const auto load_id = nextID++;
David Neto22f144c2017-06-12 14:26:21 -04004624
David Neto257c3892018-04-11 13:19:45 -04004625 Ops << MkId(lookupType(SamplerTy->getPointerElementType()))
alan-baker09cb9802019-12-10 13:16:27 -05004626 << MkId(SamplerLiteralToIDMap[sampler_value]);
David Neto22f144c2017-06-12 14:26:21 -04004627
David Neto862b7d82018-06-14 18:48:37 -04004628 auto *Inst = new SPIRVInstruction(spv::OpLoad, load_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004629 SPIRVInstList.push_back(Inst);
David Neto862b7d82018-06-14 18:48:37 -04004630 VMap[Call] = load_id;
David Neto22f144c2017-06-12 14:26:21 -04004631 break;
4632 }
4633
Kévin Petit349c9502019-03-28 17:24:14 +00004634 // Handle SPIR-V intrinsics
Kévin Petit9b340262019-06-19 18:31:11 +01004635 spv::Op opcode = StringSwitch<spv::Op>(Callee->getName())
4636 .Case("spirv.atomic_xor", spv::OpAtomicXor)
4637 .Default(spv::OpNop);
David Neto22f144c2017-06-12 14:26:21 -04004638
Kévin Petit617a76d2019-04-04 13:54:16 +01004639 // If the switch above didn't have an entry maybe the intrinsic
4640 // is using the name mangling logic.
4641 bool usesMangler = false;
4642 if (opcode == spv::OpNop) {
4643 if (Callee->getName().startswith(clspv::SPIRVOpIntrinsicFunction())) {
4644 auto OpCst = cast<ConstantInt>(Call->getOperand(0));
4645 opcode = static_cast<spv::Op>(OpCst->getZExtValue());
4646 usesMangler = true;
4647 }
4648 }
4649
Kévin Petit349c9502019-03-28 17:24:14 +00004650 if (opcode != spv::OpNop) {
4651
David Neto22f144c2017-06-12 14:26:21 -04004652 SPIRVOperandList Ops;
4653
Kévin Petit349c9502019-03-28 17:24:14 +00004654 if (!I.getType()->isVoidTy()) {
4655 Ops << MkId(lookupType(I.getType()));
4656 }
David Neto22f144c2017-06-12 14:26:21 -04004657
Kévin Petit617a76d2019-04-04 13:54:16 +01004658 unsigned firstOperand = usesMangler ? 1 : 0;
4659 for (unsigned i = firstOperand; i < Call->getNumArgOperands(); i++) {
David Neto257c3892018-04-11 13:19:45 -04004660 Ops << MkId(VMap[Call->getArgOperand(i)]);
David Neto22f144c2017-06-12 14:26:21 -04004661 }
4662
Kévin Petit349c9502019-03-28 17:24:14 +00004663 if (!I.getType()->isVoidTy()) {
4664 VMap[&I] = nextID;
Kévin Petit8a560882019-03-21 15:24:34 +00004665 }
4666
Kévin Petit349c9502019-03-28 17:24:14 +00004667 SPIRVInstruction *Inst;
4668 if (!I.getType()->isVoidTy()) {
4669 Inst = new SPIRVInstruction(opcode, nextID++, Ops);
4670 } else {
4671 Inst = new SPIRVInstruction(opcode, Ops);
4672 }
Kévin Petit8a560882019-03-21 15:24:34 +00004673 SPIRVInstList.push_back(Inst);
4674 break;
4675 }
4676
David Neto22f144c2017-06-12 14:26:21 -04004677 // spirv.copy_memory.* intrinsics become OpMemoryMemory's.
4678 if (Callee->getName().startswith("spirv.copy_memory")) {
4679 //
4680 // Generate OpCopyMemory.
4681 //
4682
4683 // Ops[0] = Dst ID
4684 // Ops[1] = Src ID
4685 // Ops[2] = Memory Access
4686 // Ops[3] = Alignment
4687
4688 auto IsVolatile =
4689 dyn_cast<ConstantInt>(Call->getArgOperand(3))->getZExtValue() != 0;
4690
4691 auto VolatileMemoryAccess = (IsVolatile) ? spv::MemoryAccessVolatileMask
4692 : spv::MemoryAccessMaskNone;
4693
4694 auto MemoryAccess = VolatileMemoryAccess | spv::MemoryAccessAlignedMask;
4695
4696 auto Alignment =
4697 dyn_cast<ConstantInt>(Call->getArgOperand(2))->getZExtValue();
4698
David Neto257c3892018-04-11 13:19:45 -04004699 SPIRVOperandList Ops;
4700 Ops << MkId(VMap[Call->getArgOperand(0)])
4701 << MkId(VMap[Call->getArgOperand(1)]) << MkNum(MemoryAccess)
4702 << MkNum(static_cast<uint32_t>(Alignment));
David Neto22f144c2017-06-12 14:26:21 -04004703
David Neto87846742018-04-11 17:36:22 -04004704 auto *Inst = new SPIRVInstruction(spv::OpCopyMemory, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004705
4706 SPIRVInstList.push_back(Inst);
4707
4708 break;
4709 }
4710
David Neto22f144c2017-06-12 14:26:21 -04004711 // read_image is converted to OpSampledImage and OpImageSampleExplicitLod.
4712 // Additionally, OpTypeSampledImage is generated.
alan-bakerf67468c2019-11-25 15:51:49 -05004713 if (clspv::IsSampledImageRead(Callee)) {
David Neto22f144c2017-06-12 14:26:21 -04004714 //
4715 // Generate OpSampledImage.
4716 //
4717 // Ops[0] = Result Type ID
4718 // Ops[1] = Image ID
4719 // Ops[2] = Sampler ID
4720 //
4721 SPIRVOperandList Ops;
4722
4723 Value *Image = Call->getArgOperand(0);
4724 Value *Sampler = Call->getArgOperand(1);
4725 Value *Coordinate = Call->getArgOperand(2);
4726
4727 TypeMapType &OpImageTypeMap = getImageTypeMap();
4728 Type *ImageTy = Image->getType()->getPointerElementType();
4729 uint32_t ImageTyID = OpImageTypeMap[ImageTy];
David Neto22f144c2017-06-12 14:26:21 -04004730 uint32_t ImageID = VMap[Image];
David Neto22f144c2017-06-12 14:26:21 -04004731 uint32_t SamplerID = VMap[Sampler];
David Neto257c3892018-04-11 13:19:45 -04004732
4733 Ops << MkId(ImageTyID) << MkId(ImageID) << MkId(SamplerID);
David Neto22f144c2017-06-12 14:26:21 -04004734
4735 uint32_t SampledImageID = nextID;
4736
David Neto87846742018-04-11 17:36:22 -04004737 auto *Inst = new SPIRVInstruction(spv::OpSampledImage, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004738 SPIRVInstList.push_back(Inst);
4739
4740 //
4741 // Generate OpImageSampleExplicitLod.
4742 //
4743 // Ops[0] = Result Type ID
4744 // Ops[1] = Sampled Image ID
4745 // Ops[2] = Coordinate ID
4746 // Ops[3] = Image Operands Type ID
4747 // Ops[4] ... Ops[n] = Operands ID
4748 //
4749 Ops.clear();
4750
alan-bakerf67468c2019-11-25 15:51:49 -05004751 const bool is_int_image = IsIntImageType(Image->getType());
4752 uint32_t result_type = 0;
4753 if (is_int_image) {
4754 result_type = v4int32ID;
4755 } else {
4756 result_type = lookupType(Call->getType());
4757 }
4758
4759 Ops << MkId(result_type) << MkId(SampledImageID) << MkId(VMap[Coordinate])
4760 << MkNum(spv::ImageOperandsLodMask);
David Neto22f144c2017-06-12 14:26:21 -04004761
4762 Constant *CstFP0 = ConstantFP::get(Context, APFloat(0.0f));
David Neto257c3892018-04-11 13:19:45 -04004763 Ops << MkId(VMap[CstFP0]);
David Neto22f144c2017-06-12 14:26:21 -04004764
alan-bakerf67468c2019-11-25 15:51:49 -05004765 uint32_t final_id = nextID++;
4766 VMap[&I] = final_id;
David Neto22f144c2017-06-12 14:26:21 -04004767
alan-bakerf67468c2019-11-25 15:51:49 -05004768 uint32_t image_id = final_id;
4769 if (is_int_image) {
4770 // Int image requires a bitcast from v4int to v4uint.
4771 image_id = nextID++;
4772 }
4773
4774 Inst = new SPIRVInstruction(spv::OpImageSampleExplicitLod, image_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004775 SPIRVInstList.push_back(Inst);
alan-bakerf67468c2019-11-25 15:51:49 -05004776
4777 if (is_int_image) {
4778 // Generate the bitcast.
4779 Ops.clear();
4780 Ops << MkId(lookupType(Call->getType())) << MkId(image_id);
4781 Inst = new SPIRVInstruction(spv::OpBitcast, final_id, Ops);
4782 SPIRVInstList.push_back(Inst);
4783 }
David Neto22f144c2017-06-12 14:26:21 -04004784 break;
4785 }
4786
alan-bakerf67468c2019-11-25 15:51:49 -05004787 // write_image is mapped to OpImageWrite.
4788 if (clspv::IsImageWrite(Callee)) {
David Neto22f144c2017-06-12 14:26:21 -04004789 //
4790 // Generate OpImageWrite.
4791 //
4792 // Ops[0] = Image ID
4793 // Ops[1] = Coordinate ID
4794 // Ops[2] = Texel ID
4795 // Ops[3] = (Optional) Image Operands Type (Literal Number)
4796 // Ops[4] ... Ops[n] = (Optional) Operands ID
4797 //
4798 SPIRVOperandList Ops;
4799
4800 Value *Image = Call->getArgOperand(0);
4801 Value *Coordinate = Call->getArgOperand(1);
4802 Value *Texel = Call->getArgOperand(2);
4803
4804 uint32_t ImageID = VMap[Image];
David Neto22f144c2017-06-12 14:26:21 -04004805 uint32_t CoordinateID = VMap[Coordinate];
David Neto22f144c2017-06-12 14:26:21 -04004806 uint32_t TexelID = VMap[Texel];
alan-bakerf67468c2019-11-25 15:51:49 -05004807
4808 const bool is_int_image = IsIntImageType(Image->getType());
4809 if (is_int_image) {
4810 // Generate a bitcast to v4int and use it as the texel value.
4811 uint32_t castID = nextID++;
4812 Ops << MkId(v4int32ID) << MkId(TexelID);
4813 auto cast = new SPIRVInstruction(spv::OpBitcast, castID, Ops);
4814 SPIRVInstList.push_back(cast);
4815 Ops.clear();
4816 TexelID = castID;
4817 }
David Neto257c3892018-04-11 13:19:45 -04004818 Ops << MkId(ImageID) << MkId(CoordinateID) << MkId(TexelID);
David Neto22f144c2017-06-12 14:26:21 -04004819
David Neto87846742018-04-11 17:36:22 -04004820 auto *Inst = new SPIRVInstruction(spv::OpImageWrite, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004821 SPIRVInstList.push_back(Inst);
4822 break;
4823 }
4824
alan-bakerce179f12019-12-06 19:02:22 -05004825 // get_image_* is mapped to OpImageQuerySize or OpImageQuerySizeLod
4826 if (clspv::IsImageQuery(Callee)) {
David Neto5c22a252018-03-15 16:07:41 -04004827 //
alan-bakerce179f12019-12-06 19:02:22 -05004828 // Generate OpImageQuerySize[Lod]
David Neto5c22a252018-03-15 16:07:41 -04004829 //
4830 // Ops[0] = Image ID
4831 //
alan-bakerce179f12019-12-06 19:02:22 -05004832 // Result type has components equal to the dimensionality of the image,
4833 // plus 1 if the image is arrayed.
4834 //
alan-bakerf906d2b2019-12-10 11:26:23 -05004835 // %sizes = OpImageQuerySize[Lod] %uint[2|3|4] %im [%uint_0]
David Neto5c22a252018-03-15 16:07:41 -04004836 SPIRVOperandList Ops;
4837
4838 // Implement:
alan-bakerce179f12019-12-06 19:02:22 -05004839 // %sizes = OpImageQuerySize[Lod] %uint[2|3|4] %im [%uint_0]
4840 uint32_t SizesTypeID = 0;
4841
David Neto5c22a252018-03-15 16:07:41 -04004842 Value *Image = Call->getArgOperand(0);
alan-bakerce179f12019-12-06 19:02:22 -05004843 const uint32_t dim = ImageDimensionality(Image->getType());
alan-bakerf906d2b2019-12-10 11:26:23 -05004844 // TODO(alan-baker): fix component calculation when arrayed images are
4845 // supported.
alan-bakerce179f12019-12-06 19:02:22 -05004846 const uint32_t components = dim;
4847 if (components == 1) {
alan-bakerce179f12019-12-06 19:02:22 -05004848 SizesTypeID = TypeMap[Type::getInt32Ty(Context)];
4849 } else {
4850 SizesTypeID = TypeMap[VectorType::get(Type::getInt32Ty(Context), dim)];
4851 }
David Neto5c22a252018-03-15 16:07:41 -04004852 uint32_t ImageID = VMap[Image];
David Neto257c3892018-04-11 13:19:45 -04004853 Ops << MkId(SizesTypeID) << MkId(ImageID);
alan-bakerce179f12019-12-06 19:02:22 -05004854 spv::Op query_opcode = spv::OpImageQuerySize;
4855 if (clspv::IsSampledImageType(Image->getType())) {
4856 query_opcode = spv::OpImageQuerySizeLod;
4857 // Need explicit 0 for Lod operand.
4858 Constant *CstInt0 = ConstantInt::get(Context, APInt(32, 0));
4859 Ops << MkId(VMap[CstInt0]);
4860 }
David Neto5c22a252018-03-15 16:07:41 -04004861
4862 uint32_t SizesID = nextID++;
alan-bakerce179f12019-12-06 19:02:22 -05004863 auto *QueryInst = new SPIRVInstruction(query_opcode, SizesID, Ops);
David Neto5c22a252018-03-15 16:07:41 -04004864 SPIRVInstList.push_back(QueryInst);
4865
alan-bakerce179f12019-12-06 19:02:22 -05004866 // May require an extra instruction to create the appropriate result of
4867 // the builtin function.
4868 if (clspv::IsGetImageDim(Callee)) {
4869 if (dim == 3) {
4870 // get_image_dim returns an int4 for 3D images.
4871 //
4872 // Reset value map entry since we generated an intermediate
4873 // instruction.
4874 VMap[&I] = nextID;
David Neto5c22a252018-03-15 16:07:41 -04004875
alan-bakerce179f12019-12-06 19:02:22 -05004876 // Implement:
4877 // %result = OpCompositeConstruct %uint4 %sizes %uint_0
4878 Ops.clear();
4879 Ops << MkId(lookupType(VectorType::get(Type::getInt32Ty(Context), 4)))
4880 << MkId(SizesID);
David Neto5c22a252018-03-15 16:07:41 -04004881
alan-bakerce179f12019-12-06 19:02:22 -05004882 Constant *CstInt0 = ConstantInt::get(Context, APInt(32, 0));
4883 Ops << MkId(VMap[CstInt0]);
David Neto5c22a252018-03-15 16:07:41 -04004884
alan-bakerce179f12019-12-06 19:02:22 -05004885 auto *Inst =
4886 new SPIRVInstruction(spv::OpCompositeConstruct, nextID++, Ops);
4887 SPIRVInstList.push_back(Inst);
4888 } else if (dim != components) {
4889 // get_image_dim return an int2 regardless of the arrayedness of the
4890 // image. If the image is arrayed an element must be dropped from the
4891 // query result.
4892 //
4893 // Reset value map entry since we generated an intermediate
4894 // instruction.
4895 VMap[&I] = nextID;
4896
4897 // Implement:
4898 // %result = OpVectorShuffle %uint2 %sizes %sizes 0 1
4899 Ops.clear();
4900 Ops << MkId(lookupType(VectorType::get(Type::getInt32Ty(Context), 2)))
4901 << MkId(SizesID) << MkId(SizesID) << MkNum(0) << MkNum(1);
4902
4903 auto *Inst =
4904 new SPIRVInstruction(spv::OpVectorShuffle, nextID++, Ops);
4905 SPIRVInstList.push_back(Inst);
4906 }
4907 } else if (components > 1) {
4908 // Reset value map entry since we generated an intermediate instruction.
4909 VMap[&I] = nextID;
4910
4911 // Implement:
4912 // %result = OpCompositeExtract %uint %sizes <component number>
4913 Ops.clear();
4914 Ops << MkId(TypeMap[I.getType()]) << MkId(SizesID);
4915
4916 uint32_t component = 0;
4917 if (IsGetImageHeight(Callee))
4918 component = 1;
4919 else if (IsGetImageDepth(Callee))
4920 component = 2;
4921 Ops << MkNum(component);
4922
4923 auto *Inst =
4924 new SPIRVInstruction(spv::OpCompositeExtract, nextID++, Ops);
4925 SPIRVInstList.push_back(Inst);
4926 }
David Neto5c22a252018-03-15 16:07:41 -04004927 break;
4928 }
4929
David Neto22f144c2017-06-12 14:26:21 -04004930 // Call instrucion is deferred because it needs function's ID. Record
4931 // slot's location on SPIRVInstructionList.
4932 DeferredInsts.push_back(
4933 std::make_tuple(&I, --SPIRVInstList.end(), nextID++));
4934
David Neto3fbb4072017-10-16 11:28:14 -04004935 // Check whether the implementation of this call uses an extended
4936 // instruction plus one more value-producing instruction. If so, then
4937 // reserve the id for the extra value-producing slot.
4938 glsl::ExtInst EInst = getIndirectExtInstEnum(Callee->getName());
4939 if (EInst != kGlslExtInstBad) {
4940 // Reserve a spot for the extra value.
David Neto4d02a532017-09-17 12:57:44 -04004941 // Increase nextID.
David Neto22f144c2017-06-12 14:26:21 -04004942 VMap[&I] = nextID;
4943 nextID++;
4944 }
4945 break;
4946 }
4947 case Instruction::Ret: {
4948 unsigned NumOps = I.getNumOperands();
4949 if (NumOps == 0) {
4950 //
4951 // Generate OpReturn.
4952 //
David Netoef5ba2b2019-12-20 08:35:54 -05004953 SPIRVInstList.push_back(new SPIRVInstruction(spv::OpReturn));
David Neto22f144c2017-06-12 14:26:21 -04004954 } else {
4955 //
4956 // Generate OpReturnValue.
4957 //
4958
4959 // Ops[0] = Return Value ID
4960 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04004961
4962 Ops << MkId(VMap[I.getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04004963
David Neto87846742018-04-11 17:36:22 -04004964 auto *Inst = new SPIRVInstruction(spv::OpReturnValue, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004965 SPIRVInstList.push_back(Inst);
4966 break;
4967 }
4968 break;
4969 }
4970 }
4971}
4972
4973void SPIRVProducerPass::GenerateFuncEpilogue() {
4974 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
4975
4976 //
4977 // Generate OpFunctionEnd
4978 //
4979
David Netoef5ba2b2019-12-20 08:35:54 -05004980 auto *Inst = new SPIRVInstruction(spv::OpFunctionEnd);
David Neto22f144c2017-06-12 14:26:21 -04004981 SPIRVInstList.push_back(Inst);
4982}
4983
4984bool SPIRVProducerPass::is4xi8vec(Type *Ty) const {
alan-bakerb39c8262019-03-08 14:03:37 -05004985 // Don't specialize <4 x i8> if i8 is generally supported.
4986 if (clspv::Option::Int8Support())
4987 return false;
4988
David Neto22f144c2017-06-12 14:26:21 -04004989 LLVMContext &Context = Ty->getContext();
4990 if (Ty->isVectorTy()) {
4991 if (Ty->getVectorElementType() == Type::getInt8Ty(Context) &&
4992 Ty->getVectorNumElements() == 4) {
4993 return true;
4994 }
4995 }
4996
4997 return false;
4998}
4999
5000void SPIRVProducerPass::HandleDeferredInstruction() {
5001 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
5002 ValueMapType &VMap = getValueMap();
5003 DeferredInstVecType &DeferredInsts = getDeferredInstVec();
5004
5005 for (auto DeferredInst = DeferredInsts.rbegin();
5006 DeferredInst != DeferredInsts.rend(); ++DeferredInst) {
5007 Value *Inst = std::get<0>(*DeferredInst);
5008 SPIRVInstructionList::iterator InsertPoint = ++std::get<1>(*DeferredInst);
5009 if (InsertPoint != SPIRVInstList.end()) {
5010 while ((*InsertPoint)->getOpcode() == spv::OpPhi) {
5011 ++InsertPoint;
5012 }
5013 }
5014
5015 if (BranchInst *Br = dyn_cast<BranchInst>(Inst)) {
alan-baker06cad652019-12-03 17:56:47 -05005016 // Check whether this branch needs to be preceeded by merge instruction.
David Neto22f144c2017-06-12 14:26:21 -04005017 BasicBlock *BrBB = Br->getParent();
alan-baker06cad652019-12-03 17:56:47 -05005018 if (ContinueBlocks.count(BrBB)) {
David Neto22f144c2017-06-12 14:26:21 -04005019 //
5020 // Generate OpLoopMerge.
5021 //
5022 // Ops[0] = Merge Block ID
5023 // Ops[1] = Continue Target ID
5024 // Ops[2] = Selection Control
5025 SPIRVOperandList Ops;
5026
alan-baker06cad652019-12-03 17:56:47 -05005027 auto MergeBB = MergeBlocks[BrBB];
5028 auto ContinueBB = ContinueBlocks[BrBB];
David Neto22f144c2017-06-12 14:26:21 -04005029 uint32_t MergeBBID = VMap[MergeBB];
David Neto22f144c2017-06-12 14:26:21 -04005030 uint32_t ContinueBBID = VMap[ContinueBB];
David Neto257c3892018-04-11 13:19:45 -04005031 Ops << MkId(MergeBBID) << MkId(ContinueBBID)
alan-baker06cad652019-12-03 17:56:47 -05005032 << MkNum(spv::LoopControlMaskNone);
David Neto22f144c2017-06-12 14:26:21 -04005033
David Neto87846742018-04-11 17:36:22 -04005034 auto *MergeInst = new SPIRVInstruction(spv::OpLoopMerge, Ops);
David Neto22f144c2017-06-12 14:26:21 -04005035 SPIRVInstList.insert(InsertPoint, MergeInst);
alan-baker06cad652019-12-03 17:56:47 -05005036 } else if (MergeBlocks.count(BrBB)) {
5037 //
5038 // Generate OpSelectionMerge.
5039 //
5040 // Ops[0] = Merge Block ID
5041 // Ops[1] = Selection Control
5042 SPIRVOperandList Ops;
David Neto22f144c2017-06-12 14:26:21 -04005043
alan-baker06cad652019-12-03 17:56:47 -05005044 auto MergeBB = MergeBlocks[BrBB];
5045 uint32_t MergeBBID = VMap[MergeBB];
5046 Ops << MkId(MergeBBID) << MkNum(spv::SelectionControlMaskNone);
David Neto22f144c2017-06-12 14:26:21 -04005047
alan-baker06cad652019-12-03 17:56:47 -05005048 auto *MergeInst = new SPIRVInstruction(spv::OpSelectionMerge, Ops);
5049 SPIRVInstList.insert(InsertPoint, MergeInst);
David Neto22f144c2017-06-12 14:26:21 -04005050 }
5051
5052 if (Br->isConditional()) {
5053 //
5054 // Generate OpBranchConditional.
5055 //
5056 // Ops[0] = Condition ID
5057 // Ops[1] = True Label ID
5058 // Ops[2] = False Label ID
5059 // Ops[3] ... Ops[n] = Branch weights (Literal Number)
5060 SPIRVOperandList Ops;
5061
5062 uint32_t CondID = VMap[Br->getCondition()];
David Neto22f144c2017-06-12 14:26:21 -04005063 uint32_t TrueBBID = VMap[Br->getSuccessor(0)];
David Neto22f144c2017-06-12 14:26:21 -04005064 uint32_t FalseBBID = VMap[Br->getSuccessor(1)];
David Neto257c3892018-04-11 13:19:45 -04005065
5066 Ops << MkId(CondID) << MkId(TrueBBID) << MkId(FalseBBID);
David Neto22f144c2017-06-12 14:26:21 -04005067
David Neto87846742018-04-11 17:36:22 -04005068 auto *BrInst = new SPIRVInstruction(spv::OpBranchConditional, Ops);
David Neto22f144c2017-06-12 14:26:21 -04005069 SPIRVInstList.insert(InsertPoint, BrInst);
5070 } else {
5071 //
5072 // Generate OpBranch.
5073 //
5074 // Ops[0] = Target Label ID
5075 SPIRVOperandList Ops;
5076
5077 uint32_t TargetID = VMap[Br->getSuccessor(0)];
David Neto257c3892018-04-11 13:19:45 -04005078 Ops << MkId(TargetID);
David Neto22f144c2017-06-12 14:26:21 -04005079
David Neto87846742018-04-11 17:36:22 -04005080 SPIRVInstList.insert(InsertPoint,
5081 new SPIRVInstruction(spv::OpBranch, Ops));
David Neto22f144c2017-06-12 14:26:21 -04005082 }
5083 } else if (PHINode *PHI = dyn_cast<PHINode>(Inst)) {
alan-baker5b86ed72019-02-15 08:26:50 -05005084 if (PHI->getType()->isPointerTy()) {
5085 // OpPhi on pointers requires variable pointers.
5086 setVariablePointersCapabilities(
5087 PHI->getType()->getPointerAddressSpace());
5088 if (!hasVariablePointers() && !selectFromSameObject(PHI)) {
5089 setVariablePointers(true);
5090 }
5091 }
5092
David Neto22f144c2017-06-12 14:26:21 -04005093 //
5094 // Generate OpPhi.
5095 //
5096 // Ops[0] = Result Type ID
5097 // Ops[1] ... Ops[n] = (Variable ID, Parent ID) pairs
5098 SPIRVOperandList Ops;
5099
David Neto257c3892018-04-11 13:19:45 -04005100 Ops << MkId(lookupType(PHI->getType()));
David Neto22f144c2017-06-12 14:26:21 -04005101
David Neto22f144c2017-06-12 14:26:21 -04005102 for (unsigned i = 0; i < PHI->getNumIncomingValues(); i++) {
5103 uint32_t VarID = VMap[PHI->getIncomingValue(i)];
David Neto22f144c2017-06-12 14:26:21 -04005104 uint32_t ParentID = VMap[PHI->getIncomingBlock(i)];
David Neto257c3892018-04-11 13:19:45 -04005105 Ops << MkId(VarID) << MkId(ParentID);
David Neto22f144c2017-06-12 14:26:21 -04005106 }
5107
5108 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005109 InsertPoint,
5110 new SPIRVInstruction(spv::OpPhi, std::get<2>(*DeferredInst), Ops));
David Neto22f144c2017-06-12 14:26:21 -04005111 } else if (CallInst *Call = dyn_cast<CallInst>(Inst)) {
5112 Function *Callee = Call->getCalledFunction();
David Neto3fbb4072017-10-16 11:28:14 -04005113 auto callee_name = Callee->getName();
5114 glsl::ExtInst EInst = getDirectOrIndirectExtInstEnum(callee_name);
David Neto22f144c2017-06-12 14:26:21 -04005115
5116 if (EInst) {
5117 uint32_t &ExtInstImportID = getOpExtInstImportID();
5118
5119 //
5120 // Generate OpExtInst.
5121 //
5122
5123 // Ops[0] = Result Type ID
5124 // Ops[1] = Set ID (OpExtInstImport ID)
5125 // Ops[2] = Instruction Number (Literal Number)
5126 // Ops[3] ... Ops[n] = Operand 1, ... , Operand n
5127 SPIRVOperandList Ops;
5128
David Neto862b7d82018-06-14 18:48:37 -04005129 Ops << MkId(lookupType(Call->getType())) << MkId(ExtInstImportID)
5130 << MkNum(EInst);
David Neto22f144c2017-06-12 14:26:21 -04005131
David Neto22f144c2017-06-12 14:26:21 -04005132 FunctionType *CalleeFTy = cast<FunctionType>(Call->getFunctionType());
5133 for (unsigned i = 0; i < CalleeFTy->getNumParams(); i++) {
David Neto257c3892018-04-11 13:19:45 -04005134 Ops << MkId(VMap[Call->getOperand(i)]);
David Neto22f144c2017-06-12 14:26:21 -04005135 }
5136
David Neto87846742018-04-11 17:36:22 -04005137 auto *ExtInst = new SPIRVInstruction(spv::OpExtInst,
5138 std::get<2>(*DeferredInst), Ops);
David Neto22f144c2017-06-12 14:26:21 -04005139 SPIRVInstList.insert(InsertPoint, ExtInst);
5140
David Neto3fbb4072017-10-16 11:28:14 -04005141 const auto IndirectExtInst = getIndirectExtInstEnum(callee_name);
5142 if (IndirectExtInst != kGlslExtInstBad) {
5143 // Generate one more instruction that uses the result of the extended
5144 // instruction. Its result id is one more than the id of the
5145 // extended instruction.
David Neto22f144c2017-06-12 14:26:21 -04005146 LLVMContext &Context =
5147 Call->getParent()->getParent()->getParent()->getContext();
David Neto22f144c2017-06-12 14:26:21 -04005148
David Neto3fbb4072017-10-16 11:28:14 -04005149 auto generate_extra_inst = [this, &Context, &Call, &DeferredInst,
5150 &VMap, &SPIRVInstList, &InsertPoint](
5151 spv::Op opcode, Constant *constant) {
5152 //
5153 // Generate instruction like:
5154 // result = opcode constant <extinst-result>
5155 //
5156 // Ops[0] = Result Type ID
5157 // Ops[1] = Operand 0 ;; the constant, suitably splatted
5158 // Ops[2] = Operand 1 ;; the result of the extended instruction
5159 SPIRVOperandList Ops;
David Neto22f144c2017-06-12 14:26:21 -04005160
David Neto3fbb4072017-10-16 11:28:14 -04005161 Type *resultTy = Call->getType();
David Neto257c3892018-04-11 13:19:45 -04005162 Ops << MkId(lookupType(resultTy));
David Neto3fbb4072017-10-16 11:28:14 -04005163
5164 if (auto *vectorTy = dyn_cast<VectorType>(resultTy)) {
5165 constant = ConstantVector::getSplat(
5166 static_cast<unsigned>(vectorTy->getNumElements()), constant);
5167 }
David Neto257c3892018-04-11 13:19:45 -04005168 Ops << MkId(VMap[constant]) << MkId(std::get<2>(*DeferredInst));
David Neto3fbb4072017-10-16 11:28:14 -04005169
5170 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005171 InsertPoint, new SPIRVInstruction(
5172 opcode, std::get<2>(*DeferredInst) + 1, Ops));
David Neto3fbb4072017-10-16 11:28:14 -04005173 };
5174
5175 switch (IndirectExtInst) {
5176 case glsl::ExtInstFindUMsb: // Implementing clz
5177 generate_extra_inst(
5178 spv::OpISub, ConstantInt::get(Type::getInt32Ty(Context), 31));
5179 break;
5180 case glsl::ExtInstAcos: // Implementing acospi
5181 case glsl::ExtInstAsin: // Implementing asinpi
Kévin Petiteb9f90a2018-09-29 12:29:34 +01005182 case glsl::ExtInstAtan: // Implementing atanpi
David Neto3fbb4072017-10-16 11:28:14 -04005183 case glsl::ExtInstAtan2: // Implementing atan2pi
5184 generate_extra_inst(
5185 spv::OpFMul,
5186 ConstantFP::get(Type::getFloatTy(Context), kOneOverPi));
5187 break;
5188
5189 default:
5190 assert(false && "internally inconsistent");
David Neto4d02a532017-09-17 12:57:44 -04005191 }
David Neto22f144c2017-06-12 14:26:21 -04005192 }
David Neto3fbb4072017-10-16 11:28:14 -04005193
alan-bakerb39c8262019-03-08 14:03:37 -05005194 } else if (callee_name.startswith("_Z8popcount")) {
David Neto22f144c2017-06-12 14:26:21 -04005195 //
5196 // Generate OpBitCount
5197 //
5198 // Ops[0] = Result Type ID
5199 // Ops[1] = Base ID
David Neto257c3892018-04-11 13:19:45 -04005200 SPIRVOperandList Ops;
5201 Ops << MkId(lookupType(Call->getType()))
5202 << MkId(VMap[Call->getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04005203
5204 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005205 InsertPoint, new SPIRVInstruction(spv::OpBitCount,
David Neto22f144c2017-06-12 14:26:21 -04005206 std::get<2>(*DeferredInst), Ops));
David Netoab03f432017-11-03 17:00:44 -04005207
David Neto862b7d82018-06-14 18:48:37 -04005208 } else if (callee_name.startswith(kCompositeConstructFunctionPrefix)) {
David Netoab03f432017-11-03 17:00:44 -04005209
5210 // Generate an OpCompositeConstruct
5211 SPIRVOperandList Ops;
5212
5213 // The result type.
David Neto257c3892018-04-11 13:19:45 -04005214 Ops << MkId(lookupType(Call->getType()));
David Netoab03f432017-11-03 17:00:44 -04005215
5216 for (Use &use : Call->arg_operands()) {
David Neto257c3892018-04-11 13:19:45 -04005217 Ops << MkId(VMap[use.get()]);
David Netoab03f432017-11-03 17:00:44 -04005218 }
5219
5220 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005221 InsertPoint, new SPIRVInstruction(spv::OpCompositeConstruct,
5222 std::get<2>(*DeferredInst), Ops));
David Netoab03f432017-11-03 17:00:44 -04005223
Alan Baker202c8c72018-08-13 13:47:44 -04005224 } else if (callee_name.startswith(clspv::ResourceAccessorFunction())) {
5225
5226 // We have already mapped the call's result value to an ID.
5227 // Don't generate any code now.
5228
5229 } else if (callee_name.startswith(clspv::WorkgroupAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04005230
5231 // We have already mapped the call's result value to an ID.
5232 // Don't generate any code now.
5233
David Neto22f144c2017-06-12 14:26:21 -04005234 } else {
alan-baker5b86ed72019-02-15 08:26:50 -05005235 if (Call->getType()->isPointerTy()) {
5236 // Functions returning pointers require variable pointers.
5237 setVariablePointersCapabilities(
5238 Call->getType()->getPointerAddressSpace());
5239 }
5240
David Neto22f144c2017-06-12 14:26:21 -04005241 //
5242 // Generate OpFunctionCall.
5243 //
5244
5245 // Ops[0] = Result Type ID
5246 // Ops[1] = Callee Function ID
5247 // Ops[2] ... Ops[n] = Argument 0, ... , Argument n
5248 SPIRVOperandList Ops;
5249
David Neto862b7d82018-06-14 18:48:37 -04005250 Ops << MkId(lookupType(Call->getType()));
David Neto22f144c2017-06-12 14:26:21 -04005251
5252 uint32_t CalleeID = VMap[Callee];
David Neto43568eb2017-10-13 18:25:25 -04005253 if (CalleeID == 0) {
5254 errs() << "Can't translate function call. Missing builtin? "
David Neto862b7d82018-06-14 18:48:37 -04005255 << callee_name << " in: " << *Call << "\n";
David Neto43568eb2017-10-13 18:25:25 -04005256 // TODO(dneto): Can we error out? Enabling this llvm_unreachable
5257 // causes an infinite loop. Instead, go ahead and generate
5258 // the bad function call. A validator will catch the 0-Id.
5259 // llvm_unreachable("Can't translate function call");
5260 }
David Neto22f144c2017-06-12 14:26:21 -04005261
David Neto257c3892018-04-11 13:19:45 -04005262 Ops << MkId(CalleeID);
David Neto22f144c2017-06-12 14:26:21 -04005263
David Neto22f144c2017-06-12 14:26:21 -04005264 FunctionType *CalleeFTy = cast<FunctionType>(Call->getFunctionType());
5265 for (unsigned i = 0; i < CalleeFTy->getNumParams(); i++) {
alan-baker5b86ed72019-02-15 08:26:50 -05005266 auto *operand = Call->getOperand(i);
alan-bakerd4d50652019-12-03 17:17:15 -05005267 auto *operand_type = operand->getType();
5268 // Images and samplers can be passed as function parameters without
5269 // variable pointers.
5270 if (operand_type->isPointerTy() && !IsImageType(operand_type) &&
5271 !IsSamplerType(operand_type)) {
alan-baker5b86ed72019-02-15 08:26:50 -05005272 auto sc =
5273 GetStorageClass(operand->getType()->getPointerAddressSpace());
5274 if (sc == spv::StorageClassStorageBuffer) {
5275 // Passing SSBO by reference requires variable pointers storage
5276 // buffer.
5277 setVariablePointersStorageBuffer(true);
5278 } else if (sc == spv::StorageClassWorkgroup) {
5279 // Workgroup references require variable pointers if they are not
5280 // memory object declarations.
5281 if (auto *operand_call = dyn_cast<CallInst>(operand)) {
5282 // Workgroup accessor represents a variable reference.
5283 if (!operand_call->getCalledFunction()->getName().startswith(
5284 clspv::WorkgroupAccessorFunction()))
5285 setVariablePointers(true);
5286 } else {
5287 // Arguments are function parameters.
5288 if (!isa<Argument>(operand))
5289 setVariablePointers(true);
5290 }
5291 }
5292 }
5293 Ops << MkId(VMap[operand]);
David Neto22f144c2017-06-12 14:26:21 -04005294 }
5295
David Neto87846742018-04-11 17:36:22 -04005296 auto *CallInst = new SPIRVInstruction(spv::OpFunctionCall,
5297 std::get<2>(*DeferredInst), Ops);
David Neto22f144c2017-06-12 14:26:21 -04005298 SPIRVInstList.insert(InsertPoint, CallInst);
5299 }
5300 }
5301 }
5302}
5303
David Neto1a1a0582017-07-07 12:01:44 -04005304void SPIRVProducerPass::HandleDeferredDecorations(const DataLayout &DL) {
Alan Baker202c8c72018-08-13 13:47:44 -04005305 if (getTypesNeedingArrayStride().empty() && LocalArgSpecIds.empty()) {
David Neto1a1a0582017-07-07 12:01:44 -04005306 return;
David Netoc6f3ab22018-04-06 18:02:31 -04005307 }
David Neto1a1a0582017-07-07 12:01:44 -04005308
5309 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
David Neto1a1a0582017-07-07 12:01:44 -04005310
5311 // Find an iterator pointing just past the last decoration.
5312 bool seen_decorations = false;
5313 auto DecoInsertPoint =
5314 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
5315 [&seen_decorations](SPIRVInstruction *Inst) -> bool {
5316 const bool is_decoration =
5317 Inst->getOpcode() == spv::OpDecorate ||
5318 Inst->getOpcode() == spv::OpMemberDecorate;
5319 if (is_decoration) {
5320 seen_decorations = true;
5321 return false;
5322 } else {
5323 return seen_decorations;
5324 }
5325 });
5326
David Netoc6f3ab22018-04-06 18:02:31 -04005327 // Insert ArrayStride decorations on pointer types, due to OpPtrAccessChain
5328 // instructions we generated earlier.
David Neto85082642018-03-24 06:55:20 -07005329 for (auto *type : getTypesNeedingArrayStride()) {
5330 Type *elemTy = nullptr;
5331 if (auto *ptrTy = dyn_cast<PointerType>(type)) {
5332 elemTy = ptrTy->getElementType();
alan-bakerb6b09dc2018-11-08 16:59:28 -05005333 } else if (auto *arrayTy = dyn_cast<ArrayType>(type)) {
David Neto85082642018-03-24 06:55:20 -07005334 elemTy = arrayTy->getArrayElementType();
alan-bakerb6b09dc2018-11-08 16:59:28 -05005335 } else if (auto *seqTy = dyn_cast<SequentialType>(type)) {
David Neto85082642018-03-24 06:55:20 -07005336 elemTy = seqTy->getSequentialElementType();
5337 } else {
5338 errs() << "Unhandled strided type " << *type << "\n";
5339 llvm_unreachable("Unhandled strided type");
5340 }
David Neto1a1a0582017-07-07 12:01:44 -04005341
5342 // Ops[0] = Target ID
5343 // Ops[1] = Decoration (ArrayStride)
5344 // Ops[2] = Stride number (Literal Number)
5345 SPIRVOperandList Ops;
5346
David Neto85082642018-03-24 06:55:20 -07005347 // Same as DL.getIndexedOffsetInType( elemTy, { 1 } );
Alan Bakerfcda9482018-10-02 17:09:59 -04005348 const uint32_t stride = static_cast<uint32_t>(GetTypeAllocSize(elemTy, DL));
David Neto257c3892018-04-11 13:19:45 -04005349
5350 Ops << MkId(lookupType(type)) << MkNum(spv::DecorationArrayStride)
5351 << MkNum(stride);
David Neto1a1a0582017-07-07 12:01:44 -04005352
David Neto87846742018-04-11 17:36:22 -04005353 auto *DecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
David Neto1a1a0582017-07-07 12:01:44 -04005354 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
5355 }
David Netoc6f3ab22018-04-06 18:02:31 -04005356
5357 // Emit SpecId decorations targeting the array size value.
Alan Baker202c8c72018-08-13 13:47:44 -04005358 for (auto spec_id = clspv::FirstLocalSpecId(); spec_id < max_local_spec_id_;
5359 ++spec_id) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05005360 LocalArgInfo &arg_info = LocalSpecIdInfoMap[spec_id];
David Netoc6f3ab22018-04-06 18:02:31 -04005361 SPIRVOperandList Ops;
5362 Ops << MkId(arg_info.array_size_id) << MkNum(spv::DecorationSpecId)
5363 << MkNum(arg_info.spec_id);
5364 SPIRVInstList.insert(DecoInsertPoint,
David Neto87846742018-04-11 17:36:22 -04005365 new SPIRVInstruction(spv::OpDecorate, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04005366 }
David Neto1a1a0582017-07-07 12:01:44 -04005367}
5368
David Neto22f144c2017-06-12 14:26:21 -04005369glsl::ExtInst SPIRVProducerPass::getExtInstEnum(StringRef Name) {
5370 return StringSwitch<glsl::ExtInst>(Name)
alan-bakerb39c8262019-03-08 14:03:37 -05005371 .Case("_Z3absc", glsl::ExtInst::ExtInstSAbs)
5372 .Case("_Z3absDv2_c", glsl::ExtInst::ExtInstSAbs)
5373 .Case("_Z3absDv3_c", glsl::ExtInst::ExtInstSAbs)
5374 .Case("_Z3absDv4_c", glsl::ExtInst::ExtInstSAbs)
Kévin Petit2444e9b2018-11-09 14:14:37 +00005375 .Case("_Z3abss", glsl::ExtInst::ExtInstSAbs)
5376 .Case("_Z3absDv2_s", glsl::ExtInst::ExtInstSAbs)
5377 .Case("_Z3absDv3_s", glsl::ExtInst::ExtInstSAbs)
5378 .Case("_Z3absDv4_s", glsl::ExtInst::ExtInstSAbs)
David Neto22f144c2017-06-12 14:26:21 -04005379 .Case("_Z3absi", glsl::ExtInst::ExtInstSAbs)
5380 .Case("_Z3absDv2_i", glsl::ExtInst::ExtInstSAbs)
5381 .Case("_Z3absDv3_i", glsl::ExtInst::ExtInstSAbs)
5382 .Case("_Z3absDv4_i", glsl::ExtInst::ExtInstSAbs)
Kévin Petit2444e9b2018-11-09 14:14:37 +00005383 .Case("_Z3absl", glsl::ExtInst::ExtInstSAbs)
5384 .Case("_Z3absDv2_l", glsl::ExtInst::ExtInstSAbs)
5385 .Case("_Z3absDv3_l", glsl::ExtInst::ExtInstSAbs)
5386 .Case("_Z3absDv4_l", glsl::ExtInst::ExtInstSAbs)
alan-bakerb39c8262019-03-08 14:03:37 -05005387 .Case("_Z5clampccc", glsl::ExtInst::ExtInstSClamp)
5388 .Case("_Z5clampDv2_cS_S_", glsl::ExtInst::ExtInstSClamp)
5389 .Case("_Z5clampDv3_cS_S_", glsl::ExtInst::ExtInstSClamp)
5390 .Case("_Z5clampDv4_cS_S_", glsl::ExtInst::ExtInstSClamp)
5391 .Case("_Z5clamphhh", glsl::ExtInst::ExtInstUClamp)
5392 .Case("_Z5clampDv2_hS_S_", glsl::ExtInst::ExtInstUClamp)
5393 .Case("_Z5clampDv3_hS_S_", glsl::ExtInst::ExtInstUClamp)
5394 .Case("_Z5clampDv4_hS_S_", glsl::ExtInst::ExtInstUClamp)
Kévin Petit495255d2019-03-06 13:56:48 +00005395 .Case("_Z5clampsss", glsl::ExtInst::ExtInstSClamp)
5396 .Case("_Z5clampDv2_sS_S_", glsl::ExtInst::ExtInstSClamp)
5397 .Case("_Z5clampDv3_sS_S_", glsl::ExtInst::ExtInstSClamp)
5398 .Case("_Z5clampDv4_sS_S_", glsl::ExtInst::ExtInstSClamp)
5399 .Case("_Z5clampttt", glsl::ExtInst::ExtInstUClamp)
5400 .Case("_Z5clampDv2_tS_S_", glsl::ExtInst::ExtInstUClamp)
5401 .Case("_Z5clampDv3_tS_S_", glsl::ExtInst::ExtInstUClamp)
5402 .Case("_Z5clampDv4_tS_S_", glsl::ExtInst::ExtInstUClamp)
David Neto22f144c2017-06-12 14:26:21 -04005403 .Case("_Z5clampiii", glsl::ExtInst::ExtInstSClamp)
5404 .Case("_Z5clampDv2_iS_S_", glsl::ExtInst::ExtInstSClamp)
5405 .Case("_Z5clampDv3_iS_S_", glsl::ExtInst::ExtInstSClamp)
5406 .Case("_Z5clampDv4_iS_S_", glsl::ExtInst::ExtInstSClamp)
5407 .Case("_Z5clampjjj", glsl::ExtInst::ExtInstUClamp)
5408 .Case("_Z5clampDv2_jS_S_", glsl::ExtInst::ExtInstUClamp)
5409 .Case("_Z5clampDv3_jS_S_", glsl::ExtInst::ExtInstUClamp)
5410 .Case("_Z5clampDv4_jS_S_", glsl::ExtInst::ExtInstUClamp)
Kévin Petit495255d2019-03-06 13:56:48 +00005411 .Case("_Z5clamplll", glsl::ExtInst::ExtInstSClamp)
5412 .Case("_Z5clampDv2_lS_S_", glsl::ExtInst::ExtInstSClamp)
5413 .Case("_Z5clampDv3_lS_S_", glsl::ExtInst::ExtInstSClamp)
5414 .Case("_Z5clampDv4_lS_S_", glsl::ExtInst::ExtInstSClamp)
5415 .Case("_Z5clampmmm", glsl::ExtInst::ExtInstUClamp)
5416 .Case("_Z5clampDv2_mS_S_", glsl::ExtInst::ExtInstUClamp)
5417 .Case("_Z5clampDv3_mS_S_", glsl::ExtInst::ExtInstUClamp)
5418 .Case("_Z5clampDv4_mS_S_", glsl::ExtInst::ExtInstUClamp)
David Neto22f144c2017-06-12 14:26:21 -04005419 .Case("_Z5clampfff", glsl::ExtInst::ExtInstFClamp)
5420 .Case("_Z5clampDv2_fS_S_", glsl::ExtInst::ExtInstFClamp)
5421 .Case("_Z5clampDv3_fS_S_", glsl::ExtInst::ExtInstFClamp)
5422 .Case("_Z5clampDv4_fS_S_", glsl::ExtInst::ExtInstFClamp)
alan-baker49bb5fb2020-01-15 08:22:13 -05005423 .Case("_Z5clampDhDhDh", glsl::ExtInst::ExtInstFClamp)
5424 .Case("_Z5clampDv2_DhS_S_", glsl::ExtInst::ExtInstFClamp)
5425 .Case("_Z5clampDv3_DhS_S_", glsl::ExtInst::ExtInstFClamp)
5426 .Case("_Z5clampDv4_DhS_S_", glsl::ExtInst::ExtInstFClamp)
alan-bakerb39c8262019-03-08 14:03:37 -05005427 .Case("_Z3maxcc", glsl::ExtInst::ExtInstSMax)
5428 .Case("_Z3maxDv2_cS_", glsl::ExtInst::ExtInstSMax)
5429 .Case("_Z3maxDv3_cS_", glsl::ExtInst::ExtInstSMax)
5430 .Case("_Z3maxDv4_cS_", glsl::ExtInst::ExtInstSMax)
5431 .Case("_Z3maxhh", glsl::ExtInst::ExtInstUMax)
5432 .Case("_Z3maxDv2_hS_", glsl::ExtInst::ExtInstUMax)
5433 .Case("_Z3maxDv3_hS_", glsl::ExtInst::ExtInstUMax)
5434 .Case("_Z3maxDv4_hS_", glsl::ExtInst::ExtInstUMax)
Kévin Petit9e1971c2018-11-09 14:17:18 +00005435 .Case("_Z3maxss", glsl::ExtInst::ExtInstSMax)
5436 .Case("_Z3maxDv2_sS_", glsl::ExtInst::ExtInstSMax)
5437 .Case("_Z3maxDv3_sS_", glsl::ExtInst::ExtInstSMax)
5438 .Case("_Z3maxDv4_sS_", glsl::ExtInst::ExtInstSMax)
5439 .Case("_Z3maxtt", glsl::ExtInst::ExtInstUMax)
5440 .Case("_Z3maxDv2_tS_", glsl::ExtInst::ExtInstUMax)
5441 .Case("_Z3maxDv3_tS_", glsl::ExtInst::ExtInstUMax)
5442 .Case("_Z3maxDv4_tS_", glsl::ExtInst::ExtInstUMax)
David Neto22f144c2017-06-12 14:26:21 -04005443 .Case("_Z3maxii", glsl::ExtInst::ExtInstSMax)
5444 .Case("_Z3maxDv2_iS_", glsl::ExtInst::ExtInstSMax)
5445 .Case("_Z3maxDv3_iS_", glsl::ExtInst::ExtInstSMax)
5446 .Case("_Z3maxDv4_iS_", glsl::ExtInst::ExtInstSMax)
5447 .Case("_Z3maxjj", glsl::ExtInst::ExtInstUMax)
5448 .Case("_Z3maxDv2_jS_", glsl::ExtInst::ExtInstUMax)
5449 .Case("_Z3maxDv3_jS_", glsl::ExtInst::ExtInstUMax)
5450 .Case("_Z3maxDv4_jS_", glsl::ExtInst::ExtInstUMax)
Kévin Petit9e1971c2018-11-09 14:17:18 +00005451 .Case("_Z3maxll", glsl::ExtInst::ExtInstSMax)
5452 .Case("_Z3maxDv2_lS_", glsl::ExtInst::ExtInstSMax)
5453 .Case("_Z3maxDv3_lS_", glsl::ExtInst::ExtInstSMax)
5454 .Case("_Z3maxDv4_lS_", glsl::ExtInst::ExtInstSMax)
5455 .Case("_Z3maxmm", glsl::ExtInst::ExtInstUMax)
5456 .Case("_Z3maxDv2_mS_", glsl::ExtInst::ExtInstUMax)
5457 .Case("_Z3maxDv3_mS_", glsl::ExtInst::ExtInstUMax)
5458 .Case("_Z3maxDv4_mS_", glsl::ExtInst::ExtInstUMax)
David Neto22f144c2017-06-12 14:26:21 -04005459 .Case("_Z3maxff", glsl::ExtInst::ExtInstFMax)
5460 .Case("_Z3maxDv2_fS_", glsl::ExtInst::ExtInstFMax)
5461 .Case("_Z3maxDv3_fS_", glsl::ExtInst::ExtInstFMax)
5462 .Case("_Z3maxDv4_fS_", glsl::ExtInst::ExtInstFMax)
alan-baker49bb5fb2020-01-15 08:22:13 -05005463 .Case("_Z3maxDhDh", glsl::ExtInst::ExtInstFMax)
5464 .Case("_Z3maxDv2_DhS_", glsl::ExtInst::ExtInstFMax)
5465 .Case("_Z3maxDv3_DhS_", glsl::ExtInst::ExtInstFMax)
5466 .Case("_Z3maxDv4_DhS_", glsl::ExtInst::ExtInstFMax)
David Neto22f144c2017-06-12 14:26:21 -04005467 .StartsWith("_Z4fmax", glsl::ExtInst::ExtInstFMax)
alan-bakerb39c8262019-03-08 14:03:37 -05005468 .Case("_Z3mincc", glsl::ExtInst::ExtInstSMin)
5469 .Case("_Z3minDv2_cS_", glsl::ExtInst::ExtInstSMin)
5470 .Case("_Z3minDv3_cS_", glsl::ExtInst::ExtInstSMin)
5471 .Case("_Z3minDv4_cS_", glsl::ExtInst::ExtInstSMin)
5472 .Case("_Z3minhh", glsl::ExtInst::ExtInstUMin)
5473 .Case("_Z3minDv2_hS_", glsl::ExtInst::ExtInstUMin)
5474 .Case("_Z3minDv3_hS_", glsl::ExtInst::ExtInstUMin)
5475 .Case("_Z3minDv4_hS_", glsl::ExtInst::ExtInstUMin)
Kévin Petit9e1971c2018-11-09 14:17:18 +00005476 .Case("_Z3minss", glsl::ExtInst::ExtInstSMin)
5477 .Case("_Z3minDv2_sS_", glsl::ExtInst::ExtInstSMin)
5478 .Case("_Z3minDv3_sS_", glsl::ExtInst::ExtInstSMin)
5479 .Case("_Z3minDv4_sS_", glsl::ExtInst::ExtInstSMin)
5480 .Case("_Z3mintt", glsl::ExtInst::ExtInstUMin)
5481 .Case("_Z3minDv2_tS_", glsl::ExtInst::ExtInstUMin)
5482 .Case("_Z3minDv3_tS_", glsl::ExtInst::ExtInstUMin)
5483 .Case("_Z3minDv4_tS_", glsl::ExtInst::ExtInstUMin)
David Neto22f144c2017-06-12 14:26:21 -04005484 .Case("_Z3minii", glsl::ExtInst::ExtInstSMin)
5485 .Case("_Z3minDv2_iS_", glsl::ExtInst::ExtInstSMin)
5486 .Case("_Z3minDv3_iS_", glsl::ExtInst::ExtInstSMin)
5487 .Case("_Z3minDv4_iS_", glsl::ExtInst::ExtInstSMin)
5488 .Case("_Z3minjj", glsl::ExtInst::ExtInstUMin)
5489 .Case("_Z3minDv2_jS_", glsl::ExtInst::ExtInstUMin)
5490 .Case("_Z3minDv3_jS_", glsl::ExtInst::ExtInstUMin)
5491 .Case("_Z3minDv4_jS_", glsl::ExtInst::ExtInstUMin)
Kévin Petit9e1971c2018-11-09 14:17:18 +00005492 .Case("_Z3minll", glsl::ExtInst::ExtInstSMin)
5493 .Case("_Z3minDv2_lS_", glsl::ExtInst::ExtInstSMin)
5494 .Case("_Z3minDv3_lS_", glsl::ExtInst::ExtInstSMin)
5495 .Case("_Z3minDv4_lS_", glsl::ExtInst::ExtInstSMin)
5496 .Case("_Z3minmm", glsl::ExtInst::ExtInstUMin)
5497 .Case("_Z3minDv2_mS_", glsl::ExtInst::ExtInstUMin)
5498 .Case("_Z3minDv3_mS_", glsl::ExtInst::ExtInstUMin)
5499 .Case("_Z3minDv4_mS_", glsl::ExtInst::ExtInstUMin)
David Neto22f144c2017-06-12 14:26:21 -04005500 .Case("_Z3minff", glsl::ExtInst::ExtInstFMin)
5501 .Case("_Z3minDv2_fS_", glsl::ExtInst::ExtInstFMin)
5502 .Case("_Z3minDv3_fS_", glsl::ExtInst::ExtInstFMin)
5503 .Case("_Z3minDv4_fS_", glsl::ExtInst::ExtInstFMin)
alan-baker49bb5fb2020-01-15 08:22:13 -05005504 .Case("_Z3minDhDh", glsl::ExtInst::ExtInstFMin)
5505 .Case("_Z3minDv2_DhS_", glsl::ExtInst::ExtInstFMin)
5506 .Case("_Z3minDv3_DhS_", glsl::ExtInst::ExtInstFMin)
5507 .Case("_Z3minDv4_DhS_", glsl::ExtInst::ExtInstFMin)
David Neto22f144c2017-06-12 14:26:21 -04005508 .StartsWith("_Z4fmin", glsl::ExtInst::ExtInstFMin)
5509 .StartsWith("_Z7degrees", glsl::ExtInst::ExtInstDegrees)
5510 .StartsWith("_Z7radians", glsl::ExtInst::ExtInstRadians)
5511 .StartsWith("_Z3mix", glsl::ExtInst::ExtInstFMix)
5512 .StartsWith("_Z4acos", glsl::ExtInst::ExtInstAcos)
5513 .StartsWith("_Z5acosh", glsl::ExtInst::ExtInstAcosh)
5514 .StartsWith("_Z4asin", glsl::ExtInst::ExtInstAsin)
5515 .StartsWith("_Z5asinh", glsl::ExtInst::ExtInstAsinh)
5516 .StartsWith("_Z4atan", glsl::ExtInst::ExtInstAtan)
5517 .StartsWith("_Z5atan2", glsl::ExtInst::ExtInstAtan2)
5518 .StartsWith("_Z5atanh", glsl::ExtInst::ExtInstAtanh)
5519 .StartsWith("_Z4ceil", glsl::ExtInst::ExtInstCeil)
5520 .StartsWith("_Z3sin", glsl::ExtInst::ExtInstSin)
5521 .StartsWith("_Z4sinh", glsl::ExtInst::ExtInstSinh)
5522 .StartsWith("_Z8half_sin", glsl::ExtInst::ExtInstSin)
5523 .StartsWith("_Z10native_sin", glsl::ExtInst::ExtInstSin)
5524 .StartsWith("_Z3cos", glsl::ExtInst::ExtInstCos)
5525 .StartsWith("_Z4cosh", glsl::ExtInst::ExtInstCosh)
5526 .StartsWith("_Z8half_cos", glsl::ExtInst::ExtInstCos)
5527 .StartsWith("_Z10native_cos", glsl::ExtInst::ExtInstCos)
5528 .StartsWith("_Z3tan", glsl::ExtInst::ExtInstTan)
5529 .StartsWith("_Z4tanh", glsl::ExtInst::ExtInstTanh)
5530 .StartsWith("_Z8half_tan", glsl::ExtInst::ExtInstTan)
5531 .StartsWith("_Z10native_tan", glsl::ExtInst::ExtInstTan)
5532 .StartsWith("_Z3exp", glsl::ExtInst::ExtInstExp)
5533 .StartsWith("_Z8half_exp", glsl::ExtInst::ExtInstExp)
5534 .StartsWith("_Z10native_exp", glsl::ExtInst::ExtInstExp)
5535 .StartsWith("_Z4exp2", glsl::ExtInst::ExtInstExp2)
5536 .StartsWith("_Z9half_exp2", glsl::ExtInst::ExtInstExp2)
5537 .StartsWith("_Z11native_exp2", glsl::ExtInst::ExtInstExp2)
5538 .StartsWith("_Z3log", glsl::ExtInst::ExtInstLog)
5539 .StartsWith("_Z8half_log", glsl::ExtInst::ExtInstLog)
5540 .StartsWith("_Z10native_log", glsl::ExtInst::ExtInstLog)
5541 .StartsWith("_Z4log2", glsl::ExtInst::ExtInstLog2)
5542 .StartsWith("_Z9half_log2", glsl::ExtInst::ExtInstLog2)
5543 .StartsWith("_Z11native_log2", glsl::ExtInst::ExtInstLog2)
5544 .StartsWith("_Z4fabs", glsl::ExtInst::ExtInstFAbs)
kpet3458e942018-10-03 14:35:21 +01005545 .StartsWith("_Z3fma", glsl::ExtInst::ExtInstFma)
David Neto22f144c2017-06-12 14:26:21 -04005546 .StartsWith("_Z5floor", glsl::ExtInst::ExtInstFloor)
5547 .StartsWith("_Z5ldexp", glsl::ExtInst::ExtInstLdexp)
5548 .StartsWith("_Z3pow", glsl::ExtInst::ExtInstPow)
5549 .StartsWith("_Z4powr", glsl::ExtInst::ExtInstPow)
5550 .StartsWith("_Z9half_powr", glsl::ExtInst::ExtInstPow)
5551 .StartsWith("_Z11native_powr", glsl::ExtInst::ExtInstPow)
5552 .StartsWith("_Z5round", glsl::ExtInst::ExtInstRound)
5553 .StartsWith("_Z4sqrt", glsl::ExtInst::ExtInstSqrt)
5554 .StartsWith("_Z9half_sqrt", glsl::ExtInst::ExtInstSqrt)
5555 .StartsWith("_Z11native_sqrt", glsl::ExtInst::ExtInstSqrt)
5556 .StartsWith("_Z5rsqrt", glsl::ExtInst::ExtInstInverseSqrt)
5557 .StartsWith("_Z10half_rsqrt", glsl::ExtInst::ExtInstInverseSqrt)
5558 .StartsWith("_Z12native_rsqrt", glsl::ExtInst::ExtInstInverseSqrt)
5559 .StartsWith("_Z5trunc", glsl::ExtInst::ExtInstTrunc)
5560 .StartsWith("_Z5frexp", glsl::ExtInst::ExtInstFrexp)
5561 .StartsWith("_Z4sign", glsl::ExtInst::ExtInstFSign)
5562 .StartsWith("_Z6length", glsl::ExtInst::ExtInstLength)
Kévin Petit7d09cec2018-09-22 15:43:38 +01005563 .StartsWith("_Z11fast_length", glsl::ExtInst::ExtInstLength)
David Neto22f144c2017-06-12 14:26:21 -04005564 .StartsWith("_Z8distance", glsl::ExtInst::ExtInstDistance)
Kévin Petit7d09cec2018-09-22 15:43:38 +01005565 .StartsWith("_Z13fast_distance", glsl::ExtInst::ExtInstDistance)
David Netoe9a03512017-10-16 10:08:27 -04005566 .StartsWith("_Z4step", glsl::ExtInst::ExtInstStep)
kpet6fd2a262018-10-03 14:48:01 +01005567 .StartsWith("_Z10smoothstep", glsl::ExtInst::ExtInstSmoothStep)
David Neto22f144c2017-06-12 14:26:21 -04005568 .Case("_Z5crossDv3_fS_", glsl::ExtInst::ExtInstCross)
5569 .StartsWith("_Z9normalize", glsl::ExtInst::ExtInstNormalize)
Kévin Petit7d09cec2018-09-22 15:43:38 +01005570 .StartsWith("_Z14fast_normalize", glsl::ExtInst::ExtInstNormalize)
David Neto22f144c2017-06-12 14:26:21 -04005571 .StartsWith("llvm.fmuladd.", glsl::ExtInst::ExtInstFma)
5572 .Case("spirv.unpack.v2f16", glsl::ExtInst::ExtInstUnpackHalf2x16)
5573 .Case("spirv.pack.v2f16", glsl::ExtInst::ExtInstPackHalf2x16)
David Neto62653202017-10-16 19:05:18 -04005574 .Case("clspv.fract.f", glsl::ExtInst::ExtInstFract)
5575 .Case("clspv.fract.v2f", glsl::ExtInst::ExtInstFract)
5576 .Case("clspv.fract.v3f", glsl::ExtInst::ExtInstFract)
5577 .Case("clspv.fract.v4f", glsl::ExtInst::ExtInstFract)
David Neto3fbb4072017-10-16 11:28:14 -04005578 .Default(kGlslExtInstBad);
5579}
5580
5581glsl::ExtInst SPIRVProducerPass::getIndirectExtInstEnum(StringRef Name) {
5582 // Check indirect cases.
5583 return StringSwitch<glsl::ExtInst>(Name)
5584 .StartsWith("_Z3clz", glsl::ExtInst::ExtInstFindUMsb)
5585 // Use exact match on float arg because these need a multiply
5586 // of a constant of the right floating point type.
5587 .Case("_Z6acospif", glsl::ExtInst::ExtInstAcos)
5588 .Case("_Z6acospiDv2_f", glsl::ExtInst::ExtInstAcos)
5589 .Case("_Z6acospiDv3_f", glsl::ExtInst::ExtInstAcos)
5590 .Case("_Z6acospiDv4_f", glsl::ExtInst::ExtInstAcos)
5591 .Case("_Z6asinpif", glsl::ExtInst::ExtInstAsin)
5592 .Case("_Z6asinpiDv2_f", glsl::ExtInst::ExtInstAsin)
5593 .Case("_Z6asinpiDv3_f", glsl::ExtInst::ExtInstAsin)
5594 .Case("_Z6asinpiDv4_f", glsl::ExtInst::ExtInstAsin)
Kévin Petiteb9f90a2018-09-29 12:29:34 +01005595 .Case("_Z6atanpif", glsl::ExtInst::ExtInstAtan)
5596 .Case("_Z6atanpiDv2_f", glsl::ExtInst::ExtInstAtan)
5597 .Case("_Z6atanpiDv3_f", glsl::ExtInst::ExtInstAtan)
5598 .Case("_Z6atanpiDv4_f", glsl::ExtInst::ExtInstAtan)
David Neto3fbb4072017-10-16 11:28:14 -04005599 .Case("_Z7atan2piff", glsl::ExtInst::ExtInstAtan2)
5600 .Case("_Z7atan2piDv2_fS_", glsl::ExtInst::ExtInstAtan2)
5601 .Case("_Z7atan2piDv3_fS_", glsl::ExtInst::ExtInstAtan2)
5602 .Case("_Z7atan2piDv4_fS_", glsl::ExtInst::ExtInstAtan2)
5603 .Default(kGlslExtInstBad);
5604}
5605
alan-bakerb6b09dc2018-11-08 16:59:28 -05005606glsl::ExtInst
5607SPIRVProducerPass::getDirectOrIndirectExtInstEnum(StringRef Name) {
David Neto3fbb4072017-10-16 11:28:14 -04005608 auto direct = getExtInstEnum(Name);
5609 if (direct != kGlslExtInstBad)
5610 return direct;
5611 return getIndirectExtInstEnum(Name);
David Neto22f144c2017-06-12 14:26:21 -04005612}
5613
David Neto22f144c2017-06-12 14:26:21 -04005614void SPIRVProducerPass::WriteOneWord(uint32_t Word) {
David Neto0676e6f2017-07-11 18:47:44 -04005615 binaryOut->write(reinterpret_cast<const char *>(&Word), sizeof(uint32_t));
David Neto22f144c2017-06-12 14:26:21 -04005616}
5617
5618void SPIRVProducerPass::WriteResultID(SPIRVInstruction *Inst) {
5619 WriteOneWord(Inst->getResultID());
5620}
5621
5622void SPIRVProducerPass::WriteWordCountAndOpcode(SPIRVInstruction *Inst) {
5623 // High 16 bit : Word Count
5624 // Low 16 bit : Opcode
5625 uint32_t Word = Inst->getOpcode();
David Netoee2660d2018-06-28 16:31:29 -04005626 const uint32_t count = Inst->getWordCount();
5627 if (count > 65535) {
5628 errs() << "Word count limit of 65535 exceeded: " << count << "\n";
5629 llvm_unreachable("Word count too high");
5630 }
David Neto22f144c2017-06-12 14:26:21 -04005631 Word |= Inst->getWordCount() << 16;
5632 WriteOneWord(Word);
5633}
5634
David Netoef5ba2b2019-12-20 08:35:54 -05005635void SPIRVProducerPass::WriteOperand(const std::unique_ptr<SPIRVOperand> &Op) {
David Neto22f144c2017-06-12 14:26:21 -04005636 SPIRVOperandType OpTy = Op->getType();
5637 switch (OpTy) {
5638 default: {
5639 llvm_unreachable("Unsupported SPIRV Operand Type???");
5640 break;
5641 }
5642 case SPIRVOperandType::NUMBERID: {
5643 WriteOneWord(Op->getNumID());
5644 break;
5645 }
5646 case SPIRVOperandType::LITERAL_STRING: {
5647 std::string Str = Op->getLiteralStr();
5648 const char *Data = Str.c_str();
5649 size_t WordSize = Str.size() / 4;
5650 for (unsigned Idx = 0; Idx < WordSize; Idx++) {
5651 WriteOneWord(*reinterpret_cast<const uint32_t *>(&Data[4 * Idx]));
5652 }
5653
5654 uint32_t Remainder = Str.size() % 4;
5655 uint32_t LastWord = 0;
5656 if (Remainder) {
5657 for (unsigned Idx = 0; Idx < Remainder; Idx++) {
5658 LastWord |= Data[4 * WordSize + Idx] << 8 * Idx;
5659 }
5660 }
5661
5662 WriteOneWord(LastWord);
5663 break;
5664 }
5665 case SPIRVOperandType::LITERAL_INTEGER:
5666 case SPIRVOperandType::LITERAL_FLOAT: {
5667 auto LiteralNum = Op->getLiteralNum();
5668 // TODO: Handle LiteranNum carefully.
5669 for (auto Word : LiteralNum) {
5670 WriteOneWord(Word);
5671 }
5672 break;
5673 }
5674 }
5675}
5676
5677void SPIRVProducerPass::WriteSPIRVBinary() {
5678 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
5679
5680 for (auto Inst : SPIRVInstList) {
David Netoef5ba2b2019-12-20 08:35:54 -05005681 const auto &Ops = Inst->getOperands();
David Neto22f144c2017-06-12 14:26:21 -04005682 spv::Op Opcode = static_cast<spv::Op>(Inst->getOpcode());
5683
5684 switch (Opcode) {
5685 default: {
David Neto5c22a252018-03-15 16:07:41 -04005686 errs() << "Unsupported SPIR-V instruction opcode " << int(Opcode) << "\n";
David Neto22f144c2017-06-12 14:26:21 -04005687 llvm_unreachable("Unsupported SPIRV instruction");
5688 break;
5689 }
5690 case spv::OpCapability:
5691 case spv::OpExtension:
5692 case spv::OpMemoryModel:
5693 case spv::OpEntryPoint:
5694 case spv::OpExecutionMode:
5695 case spv::OpSource:
5696 case spv::OpDecorate:
5697 case spv::OpMemberDecorate:
5698 case spv::OpBranch:
5699 case spv::OpBranchConditional:
5700 case spv::OpSelectionMerge:
5701 case spv::OpLoopMerge:
5702 case spv::OpStore:
5703 case spv::OpImageWrite:
5704 case spv::OpReturnValue:
5705 case spv::OpControlBarrier:
5706 case spv::OpMemoryBarrier:
5707 case spv::OpReturn:
5708 case spv::OpFunctionEnd:
5709 case spv::OpCopyMemory: {
5710 WriteWordCountAndOpcode(Inst);
5711 for (uint32_t i = 0; i < Ops.size(); i++) {
5712 WriteOperand(Ops[i]);
5713 }
5714 break;
5715 }
5716 case spv::OpTypeBool:
5717 case spv::OpTypeVoid:
5718 case spv::OpTypeSampler:
5719 case spv::OpLabel:
5720 case spv::OpExtInstImport:
5721 case spv::OpTypePointer:
5722 case spv::OpTypeRuntimeArray:
5723 case spv::OpTypeStruct:
5724 case spv::OpTypeImage:
5725 case spv::OpTypeSampledImage:
5726 case spv::OpTypeInt:
5727 case spv::OpTypeFloat:
5728 case spv::OpTypeArray:
5729 case spv::OpTypeVector:
5730 case spv::OpTypeFunction: {
5731 WriteWordCountAndOpcode(Inst);
5732 WriteResultID(Inst);
5733 for (uint32_t i = 0; i < Ops.size(); i++) {
5734 WriteOperand(Ops[i]);
5735 }
5736 break;
5737 }
5738 case spv::OpFunction:
5739 case spv::OpFunctionParameter:
5740 case spv::OpAccessChain:
5741 case spv::OpPtrAccessChain:
5742 case spv::OpInBoundsAccessChain:
5743 case spv::OpUConvert:
5744 case spv::OpSConvert:
5745 case spv::OpConvertFToU:
5746 case spv::OpConvertFToS:
5747 case spv::OpConvertUToF:
5748 case spv::OpConvertSToF:
5749 case spv::OpFConvert:
5750 case spv::OpConvertPtrToU:
5751 case spv::OpConvertUToPtr:
5752 case spv::OpBitcast:
alan-bakerc9c55ae2019-12-02 16:01:27 -05005753 case spv::OpFNegate:
David Neto22f144c2017-06-12 14:26:21 -04005754 case spv::OpIAdd:
5755 case spv::OpFAdd:
5756 case spv::OpISub:
5757 case spv::OpFSub:
5758 case spv::OpIMul:
5759 case spv::OpFMul:
5760 case spv::OpUDiv:
5761 case spv::OpSDiv:
5762 case spv::OpFDiv:
5763 case spv::OpUMod:
5764 case spv::OpSRem:
5765 case spv::OpFRem:
Kévin Petit8a560882019-03-21 15:24:34 +00005766 case spv::OpUMulExtended:
5767 case spv::OpSMulExtended:
David Neto22f144c2017-06-12 14:26:21 -04005768 case spv::OpBitwiseOr:
5769 case spv::OpBitwiseXor:
5770 case spv::OpBitwiseAnd:
David Netoa394f392017-08-26 20:45:29 -04005771 case spv::OpNot:
David Neto22f144c2017-06-12 14:26:21 -04005772 case spv::OpShiftLeftLogical:
5773 case spv::OpShiftRightLogical:
5774 case spv::OpShiftRightArithmetic:
5775 case spv::OpBitCount:
David Netoab03f432017-11-03 17:00:44 -04005776 case spv::OpCompositeConstruct:
David Neto22f144c2017-06-12 14:26:21 -04005777 case spv::OpCompositeExtract:
5778 case spv::OpVectorExtractDynamic:
5779 case spv::OpCompositeInsert:
David Neto0a2f98d2017-09-15 19:38:40 -04005780 case spv::OpCopyObject:
David Neto22f144c2017-06-12 14:26:21 -04005781 case spv::OpVectorInsertDynamic:
5782 case spv::OpVectorShuffle:
5783 case spv::OpIEqual:
5784 case spv::OpINotEqual:
5785 case spv::OpUGreaterThan:
5786 case spv::OpUGreaterThanEqual:
5787 case spv::OpULessThan:
5788 case spv::OpULessThanEqual:
5789 case spv::OpSGreaterThan:
5790 case spv::OpSGreaterThanEqual:
5791 case spv::OpSLessThan:
5792 case spv::OpSLessThanEqual:
5793 case spv::OpFOrdEqual:
5794 case spv::OpFOrdGreaterThan:
5795 case spv::OpFOrdGreaterThanEqual:
5796 case spv::OpFOrdLessThan:
5797 case spv::OpFOrdLessThanEqual:
5798 case spv::OpFOrdNotEqual:
5799 case spv::OpFUnordEqual:
5800 case spv::OpFUnordGreaterThan:
5801 case spv::OpFUnordGreaterThanEqual:
5802 case spv::OpFUnordLessThan:
5803 case spv::OpFUnordLessThanEqual:
5804 case spv::OpFUnordNotEqual:
5805 case spv::OpExtInst:
5806 case spv::OpIsInf:
5807 case spv::OpIsNan:
5808 case spv::OpAny:
5809 case spv::OpAll:
5810 case spv::OpUndef:
5811 case spv::OpConstantNull:
5812 case spv::OpLogicalOr:
5813 case spv::OpLogicalAnd:
5814 case spv::OpLogicalNot:
5815 case spv::OpLogicalNotEqual:
5816 case spv::OpConstantComposite:
5817 case spv::OpSpecConstantComposite:
5818 case spv::OpConstantTrue:
5819 case spv::OpConstantFalse:
5820 case spv::OpConstant:
5821 case spv::OpSpecConstant:
5822 case spv::OpVariable:
5823 case spv::OpFunctionCall:
5824 case spv::OpSampledImage:
5825 case spv::OpImageSampleExplicitLod:
David Neto5c22a252018-03-15 16:07:41 -04005826 case spv::OpImageQuerySize:
alan-bakerce179f12019-12-06 19:02:22 -05005827 case spv::OpImageQuerySizeLod:
David Neto22f144c2017-06-12 14:26:21 -04005828 case spv::OpSelect:
5829 case spv::OpPhi:
5830 case spv::OpLoad:
5831 case spv::OpAtomicIAdd:
5832 case spv::OpAtomicISub:
5833 case spv::OpAtomicExchange:
5834 case spv::OpAtomicIIncrement:
5835 case spv::OpAtomicIDecrement:
5836 case spv::OpAtomicCompareExchange:
5837 case spv::OpAtomicUMin:
5838 case spv::OpAtomicSMin:
5839 case spv::OpAtomicUMax:
5840 case spv::OpAtomicSMax:
5841 case spv::OpAtomicAnd:
5842 case spv::OpAtomicOr:
5843 case spv::OpAtomicXor:
5844 case spv::OpDot: {
5845 WriteWordCountAndOpcode(Inst);
5846 WriteOperand(Ops[0]);
5847 WriteResultID(Inst);
5848 for (uint32_t i = 1; i < Ops.size(); i++) {
5849 WriteOperand(Ops[i]);
5850 }
5851 break;
5852 }
5853 }
5854 }
5855}
Alan Baker9bf93fb2018-08-28 16:59:26 -04005856
alan-bakerb6b09dc2018-11-08 16:59:28 -05005857bool SPIRVProducerPass::IsTypeNullable(const Type *type) const {
Alan Baker9bf93fb2018-08-28 16:59:26 -04005858 switch (type->getTypeID()) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05005859 case Type::HalfTyID:
5860 case Type::FloatTyID:
5861 case Type::DoubleTyID:
5862 case Type::IntegerTyID:
5863 case Type::VectorTyID:
5864 return true;
5865 case Type::PointerTyID: {
5866 const PointerType *pointer_type = cast<PointerType>(type);
5867 if (pointer_type->getPointerAddressSpace() !=
5868 AddressSpace::UniformConstant) {
5869 auto pointee_type = pointer_type->getPointerElementType();
5870 if (pointee_type->isStructTy() &&
5871 cast<StructType>(pointee_type)->isOpaque()) {
5872 // Images and samplers are not nullable.
5873 return false;
Alan Baker9bf93fb2018-08-28 16:59:26 -04005874 }
Alan Baker9bf93fb2018-08-28 16:59:26 -04005875 }
alan-bakerb6b09dc2018-11-08 16:59:28 -05005876 return true;
5877 }
5878 case Type::ArrayTyID:
5879 return IsTypeNullable(cast<CompositeType>(type)->getTypeAtIndex(0u));
5880 case Type::StructTyID: {
5881 const StructType *struct_type = cast<StructType>(type);
5882 // Images and samplers are not nullable.
5883 if (struct_type->isOpaque())
Alan Baker9bf93fb2018-08-28 16:59:26 -04005884 return false;
alan-bakerb6b09dc2018-11-08 16:59:28 -05005885 for (const auto element : struct_type->elements()) {
5886 if (!IsTypeNullable(element))
5887 return false;
5888 }
5889 return true;
5890 }
5891 default:
5892 return false;
Alan Baker9bf93fb2018-08-28 16:59:26 -04005893 }
5894}
Alan Bakerfcda9482018-10-02 17:09:59 -04005895
5896void SPIRVProducerPass::PopulateUBOTypeMaps(Module &module) {
5897 if (auto *offsets_md =
5898 module.getNamedMetadata(clspv::RemappedTypeOffsetMetadataName())) {
5899 // Metdata is stored as key-value pair operands. The first element of each
5900 // operand is the type and the second is a vector of offsets.
5901 for (const auto *operand : offsets_md->operands()) {
5902 const auto *pair = cast<MDTuple>(operand);
5903 auto *type =
5904 cast<ConstantAsMetadata>(pair->getOperand(0))->getValue()->getType();
5905 const auto *offset_vector = cast<MDTuple>(pair->getOperand(1));
5906 std::vector<uint32_t> offsets;
5907 for (const Metadata *offset_md : offset_vector->operands()) {
5908 const auto *constant_md = cast<ConstantAsMetadata>(offset_md);
alan-bakerb6b09dc2018-11-08 16:59:28 -05005909 offsets.push_back(static_cast<uint32_t>(
5910 cast<ConstantInt>(constant_md->getValue())->getZExtValue()));
Alan Bakerfcda9482018-10-02 17:09:59 -04005911 }
5912 RemappedUBOTypeOffsets.insert(std::make_pair(type, offsets));
5913 }
5914 }
5915
5916 if (auto *sizes_md =
5917 module.getNamedMetadata(clspv::RemappedTypeSizesMetadataName())) {
5918 // Metadata is stored as key-value pair operands. The first element of each
5919 // operand is the type and the second is a triple of sizes: type size in
5920 // bits, store size and alloc size.
5921 for (const auto *operand : sizes_md->operands()) {
5922 const auto *pair = cast<MDTuple>(operand);
5923 auto *type =
5924 cast<ConstantAsMetadata>(pair->getOperand(0))->getValue()->getType();
5925 const auto *size_triple = cast<MDTuple>(pair->getOperand(1));
5926 uint64_t type_size_in_bits =
5927 cast<ConstantInt>(
5928 cast<ConstantAsMetadata>(size_triple->getOperand(0))->getValue())
5929 ->getZExtValue();
5930 uint64_t type_store_size =
5931 cast<ConstantInt>(
5932 cast<ConstantAsMetadata>(size_triple->getOperand(1))->getValue())
5933 ->getZExtValue();
5934 uint64_t type_alloc_size =
5935 cast<ConstantInt>(
5936 cast<ConstantAsMetadata>(size_triple->getOperand(2))->getValue())
5937 ->getZExtValue();
5938 RemappedUBOTypeSizes.insert(std::make_pair(
5939 type, std::make_tuple(type_size_in_bits, type_store_size,
5940 type_alloc_size)));
5941 }
5942 }
5943}
5944
5945uint64_t SPIRVProducerPass::GetTypeSizeInBits(Type *type,
5946 const DataLayout &DL) {
5947 auto iter = RemappedUBOTypeSizes.find(type);
5948 if (iter != RemappedUBOTypeSizes.end()) {
5949 return std::get<0>(iter->second);
5950 }
5951
5952 return DL.getTypeSizeInBits(type);
5953}
5954
5955uint64_t SPIRVProducerPass::GetTypeStoreSize(Type *type, const DataLayout &DL) {
5956 auto iter = RemappedUBOTypeSizes.find(type);
5957 if (iter != RemappedUBOTypeSizes.end()) {
5958 return std::get<1>(iter->second);
5959 }
5960
5961 return DL.getTypeStoreSize(type);
5962}
5963
5964uint64_t SPIRVProducerPass::GetTypeAllocSize(Type *type, const DataLayout &DL) {
5965 auto iter = RemappedUBOTypeSizes.find(type);
5966 if (iter != RemappedUBOTypeSizes.end()) {
5967 return std::get<2>(iter->second);
5968 }
5969
5970 return DL.getTypeAllocSize(type);
5971}
alan-baker5b86ed72019-02-15 08:26:50 -05005972
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04005973void SPIRVProducerPass::setVariablePointersCapabilities(
5974 unsigned address_space) {
alan-baker5b86ed72019-02-15 08:26:50 -05005975 if (GetStorageClass(address_space) == spv::StorageClassStorageBuffer) {
5976 setVariablePointersStorageBuffer(true);
5977 } else {
5978 setVariablePointers(true);
5979 }
5980}
5981
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04005982Value *SPIRVProducerPass::GetBasePointer(Value *v) {
alan-baker5b86ed72019-02-15 08:26:50 -05005983 if (auto *gep = dyn_cast<GetElementPtrInst>(v)) {
5984 return GetBasePointer(gep->getPointerOperand());
5985 }
5986
5987 // Conservatively return |v|.
5988 return v;
5989}
5990
5991bool SPIRVProducerPass::sameResource(Value *lhs, Value *rhs) const {
5992 if (auto *lhs_call = dyn_cast<CallInst>(lhs)) {
5993 if (auto *rhs_call = dyn_cast<CallInst>(rhs)) {
5994 if (lhs_call->getCalledFunction()->getName().startswith(
5995 clspv::ResourceAccessorFunction()) &&
5996 rhs_call->getCalledFunction()->getName().startswith(
5997 clspv::ResourceAccessorFunction())) {
5998 // For resource accessors, match descriptor set and binding.
5999 if (lhs_call->getOperand(0) == rhs_call->getOperand(0) &&
6000 lhs_call->getOperand(1) == rhs_call->getOperand(1))
6001 return true;
6002 } else if (lhs_call->getCalledFunction()->getName().startswith(
6003 clspv::WorkgroupAccessorFunction()) &&
6004 rhs_call->getCalledFunction()->getName().startswith(
6005 clspv::WorkgroupAccessorFunction())) {
6006 // For workgroup resources, match spec id.
6007 if (lhs_call->getOperand(0) == rhs_call->getOperand(0))
6008 return true;
6009 }
6010 }
6011 }
6012
6013 return false;
6014}
6015
6016bool SPIRVProducerPass::selectFromSameObject(Instruction *inst) {
6017 assert(inst->getType()->isPointerTy());
6018 assert(GetStorageClass(inst->getType()->getPointerAddressSpace()) ==
6019 spv::StorageClassStorageBuffer);
6020 const bool hack_undef = clspv::Option::HackUndef();
6021 if (auto *select = dyn_cast<SelectInst>(inst)) {
6022 auto *true_base = GetBasePointer(select->getTrueValue());
6023 auto *false_base = GetBasePointer(select->getFalseValue());
6024
6025 if (true_base == false_base)
6026 return true;
6027
6028 // If either the true or false operand is a null, then we satisfy the same
6029 // object constraint.
6030 if (auto *true_cst = dyn_cast<Constant>(true_base)) {
6031 if (true_cst->isNullValue() || (hack_undef && isa<UndefValue>(true_base)))
6032 return true;
6033 }
6034
6035 if (auto *false_cst = dyn_cast<Constant>(false_base)) {
6036 if (false_cst->isNullValue() ||
6037 (hack_undef && isa<UndefValue>(false_base)))
6038 return true;
6039 }
6040
6041 if (sameResource(true_base, false_base))
6042 return true;
6043 } else if (auto *phi = dyn_cast<PHINode>(inst)) {
6044 Value *value = nullptr;
6045 bool ok = true;
6046 for (unsigned i = 0; ok && i != phi->getNumIncomingValues(); ++i) {
6047 auto *base = GetBasePointer(phi->getIncomingValue(i));
6048 // Null values satisfy the constraint of selecting of selecting from the
6049 // same object.
6050 if (!value) {
6051 if (auto *cst = dyn_cast<Constant>(base)) {
6052 if (!cst->isNullValue() && !(hack_undef && isa<UndefValue>(base)))
6053 value = base;
6054 } else {
6055 value = base;
6056 }
6057 } else if (base != value) {
6058 if (auto *base_cst = dyn_cast<Constant>(base)) {
6059 if (base_cst->isNullValue() || (hack_undef && isa<UndefValue>(base)))
6060 continue;
6061 }
6062
6063 if (sameResource(value, base))
6064 continue;
6065
6066 // Values don't represent the same base.
6067 ok = false;
6068 }
6069 }
6070
6071 return ok;
6072 }
6073
6074 // Conservatively return false.
6075 return false;
6076}
alan-bakere9308012019-03-15 10:25:13 -04006077
6078bool SPIRVProducerPass::CalledWithCoherentResource(Argument &Arg) {
6079 if (!Arg.getType()->isPointerTy() ||
6080 Arg.getType()->getPointerAddressSpace() != clspv::AddressSpace::Global) {
6081 // Only SSBOs need to be annotated as coherent.
6082 return false;
6083 }
6084
6085 DenseSet<Value *> visited;
6086 std::vector<Value *> stack;
6087 for (auto *U : Arg.getParent()->users()) {
6088 if (auto *call = dyn_cast<CallInst>(U)) {
6089 stack.push_back(call->getOperand(Arg.getArgNo()));
6090 }
6091 }
6092
6093 while (!stack.empty()) {
6094 Value *v = stack.back();
6095 stack.pop_back();
6096
6097 if (!visited.insert(v).second)
6098 continue;
6099
6100 auto *resource_call = dyn_cast<CallInst>(v);
6101 if (resource_call &&
6102 resource_call->getCalledFunction()->getName().startswith(
6103 clspv::ResourceAccessorFunction())) {
6104 // If this is a resource accessor function, check if the coherent operand
6105 // is set.
6106 const auto coherent =
6107 unsigned(dyn_cast<ConstantInt>(resource_call->getArgOperand(5))
6108 ->getZExtValue());
6109 if (coherent == 1)
6110 return true;
6111 } else if (auto *arg = dyn_cast<Argument>(v)) {
6112 // If this is a function argument, trace through its callers.
alan-bakere98f3f92019-04-08 15:06:36 -04006113 for (auto U : arg->getParent()->users()) {
alan-bakere9308012019-03-15 10:25:13 -04006114 if (auto *call = dyn_cast<CallInst>(U)) {
6115 stack.push_back(call->getOperand(arg->getArgNo()));
6116 }
6117 }
6118 } else if (auto *user = dyn_cast<User>(v)) {
6119 // If this is a user, traverse all operands that could lead to resource
6120 // variables.
6121 for (unsigned i = 0; i != user->getNumOperands(); ++i) {
6122 Value *operand = user->getOperand(i);
6123 if (operand->getType()->isPointerTy() &&
6124 operand->getType()->getPointerAddressSpace() ==
6125 clspv::AddressSpace::Global) {
6126 stack.push_back(operand);
6127 }
6128 }
6129 }
6130 }
6131
6132 // No coherent resource variables encountered.
6133 return false;
6134}
alan-baker06cad652019-12-03 17:56:47 -05006135
6136void SPIRVProducerPass::PopulateStructuredCFGMaps(Module &module) {
6137 // First, track loop merges and continues.
6138 DenseSet<BasicBlock *> LoopMergesAndContinues;
6139 for (auto &F : module) {
6140 if (F.isDeclaration())
6141 continue;
6142
6143 DominatorTree &DT = getAnalysis<DominatorTreeWrapperPass>(F).getDomTree();
6144 const LoopInfo &LI = getAnalysis<LoopInfoWrapperPass>(F).getLoopInfo();
6145 std::deque<BasicBlock *> order;
6146 DenseSet<BasicBlock *> visited;
6147 clspv::ComputeStructuredOrder(&*F.begin(), &DT, LI, &order, &visited);
6148
6149 for (auto BB : order) {
6150 auto terminator = BB->getTerminator();
6151 auto branch = dyn_cast<BranchInst>(terminator);
6152 if (LI.isLoopHeader(BB)) {
6153 auto L = LI.getLoopFor(BB);
6154 BasicBlock *ContinueBB = nullptr;
6155 BasicBlock *MergeBB = nullptr;
6156
6157 MergeBB = L->getExitBlock();
6158 if (!MergeBB) {
6159 // StructurizeCFG pass converts CFG into triangle shape and the cfg
6160 // has regions with single entry/exit. As a result, loop should not
6161 // have multiple exits.
6162 llvm_unreachable("Loop has multiple exits???");
6163 }
6164
6165 if (L->isLoopLatch(BB)) {
6166 ContinueBB = BB;
6167 } else {
6168 // From SPIR-V spec 2.11, Continue Target must dominate that back-edge
6169 // block.
6170 BasicBlock *Header = L->getHeader();
6171 BasicBlock *Latch = L->getLoopLatch();
6172 for (auto *loop_block : L->blocks()) {
6173 if (loop_block == Header) {
6174 continue;
6175 }
6176
6177 // Check whether block dominates block with back-edge.
6178 // The loop latch is the single block with a back-edge. If it was
6179 // possible, StructurizeCFG made the loop conform to this
6180 // requirement, otherwise |Latch| is a nullptr.
6181 if (DT.dominates(loop_block, Latch)) {
6182 ContinueBB = loop_block;
6183 }
6184 }
6185
6186 if (!ContinueBB) {
6187 llvm_unreachable("Wrong continue block from loop");
6188 }
6189 }
6190
6191 // Record the continue and merge blocks.
6192 MergeBlocks[BB] = MergeBB;
6193 ContinueBlocks[BB] = ContinueBB;
6194 LoopMergesAndContinues.insert(MergeBB);
6195 LoopMergesAndContinues.insert(ContinueBB);
6196 } else if (branch && branch->isConditional()) {
6197 auto L = LI.getLoopFor(BB);
6198 bool HasBackedge = false;
6199 while (L && !HasBackedge) {
6200 if (L->isLoopLatch(BB)) {
6201 HasBackedge = true;
6202 }
6203 L = L->getParentLoop();
6204 }
6205
6206 if (!HasBackedge) {
6207 // Only need a merge if the branch doesn't include a loop break or
6208 // continue.
6209 auto true_bb = branch->getSuccessor(0);
6210 auto false_bb = branch->getSuccessor(1);
6211 if (!LoopMergesAndContinues.count(true_bb) &&
6212 !LoopMergesAndContinues.count(false_bb)) {
6213 // StructurizeCFG pass already manipulated CFG. Just use false block
6214 // of branch instruction as merge block.
6215 MergeBlocks[BB] = false_bb;
6216 }
6217 }
6218 }
6219 }
6220 }
6221}