blob: 3e76b863dd2dfd14b037bee3d85ecaa3329b5a07 [file] [log] [blame]
David Neto22f144c2017-06-12 14:26:21 -04001// Copyright 2017 The Clspv Authors. All rights reserved.
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7// http://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
15#ifdef _MSC_VER
16#pragma warning(push, 0)
17#endif
18
David Neto156783e2017-07-05 15:39:41 -040019#include <cassert>
David Neto257c3892018-04-11 13:19:45 -040020#include <cstring>
David Neto118188e2018-08-24 11:27:54 -040021#include <iomanip>
22#include <list>
David Neto862b7d82018-06-14 18:48:37 -040023#include <memory>
David Neto118188e2018-08-24 11:27:54 -040024#include <set>
25#include <sstream>
26#include <string>
27#include <tuple>
28#include <unordered_set>
29#include <utility>
David Neto862b7d82018-06-14 18:48:37 -040030
David Neto118188e2018-08-24 11:27:54 -040031#include "llvm/ADT/StringSwitch.h"
32#include "llvm/ADT/UniqueVector.h"
33#include "llvm/Analysis/LoopInfo.h"
34#include "llvm/IR/Constants.h"
35#include "llvm/IR/Dominators.h"
36#include "llvm/IR/Instructions.h"
37#include "llvm/IR/Metadata.h"
38#include "llvm/IR/Module.h"
alan-bakerf67468c2019-11-25 15:51:49 -050039#include "llvm/IR/ValueSymbolTable.h"
David Neto118188e2018-08-24 11:27:54 -040040#include "llvm/Pass.h"
41#include "llvm/Support/CommandLine.h"
42#include "llvm/Support/raw_ostream.h"
43#include "llvm/Transforms/Utils/Cloning.h"
David Neto22f144c2017-06-12 14:26:21 -040044
David Neto85082642018-03-24 06:55:20 -070045#include "spirv/1.0/spirv.hpp"
David Neto118188e2018-08-24 11:27:54 -040046
David Neto85082642018-03-24 06:55:20 -070047#include "clspv/AddressSpace.h"
alan-bakerf5e5f692018-11-27 08:33:24 -050048#include "clspv/DescriptorMap.h"
David Neto118188e2018-08-24 11:27:54 -040049#include "clspv/Option.h"
David Neto85082642018-03-24 06:55:20 -070050#include "clspv/spirv_c_strings.hpp"
51#include "clspv/spirv_glsl.hpp"
David Neto22f144c2017-06-12 14:26:21 -040052
David Neto4feb7a42017-10-06 17:29:42 -040053#include "ArgKind.h"
alan-bakerf67468c2019-11-25 15:51:49 -050054#include "Builtins.h"
alan-baker06cad652019-12-03 17:56:47 -050055#include "ComputeStructuredOrder.h"
David Neto85082642018-03-24 06:55:20 -070056#include "ConstantEmitter.h"
Alan Baker202c8c72018-08-13 13:47:44 -040057#include "Constants.h"
David Neto78383442018-06-15 20:31:56 -040058#include "DescriptorCounter.h"
alan-baker56f7aff2019-05-22 08:06:42 -040059#include "NormalizeGlobalVariable.h"
Diego Novilloa4c44fa2019-04-11 10:56:15 -040060#include "Passes.h"
alan-bakerce179f12019-12-06 19:02:22 -050061#include "Types.h"
David Neto48f56a42017-10-06 16:44:25 -040062
David Neto22f144c2017-06-12 14:26:21 -040063#if defined(_MSC_VER)
64#pragma warning(pop)
65#endif
66
67using namespace llvm;
68using namespace clspv;
David Neto156783e2017-07-05 15:39:41 -040069using namespace mdconst;
David Neto22f144c2017-06-12 14:26:21 -040070
71namespace {
David Netocd8ca5f2017-10-02 23:34:11 -040072
David Neto862b7d82018-06-14 18:48:37 -040073cl::opt<bool> ShowResourceVars("show-rv", cl::init(false), cl::Hidden,
74 cl::desc("Show resource variable creation"));
75
76// These hacks exist to help transition code generation algorithms
77// without making huge noise in detailed test output.
78const bool Hack_generate_runtime_array_stride_early = true;
79
David Neto3fbb4072017-10-16 11:28:14 -040080// The value of 1/pi. This value is from MSDN
81// https://msdn.microsoft.com/en-us/library/4hwaceh6.aspx
82const double kOneOverPi = 0.318309886183790671538;
83const glsl::ExtInst kGlslExtInstBad = static_cast<glsl::ExtInst>(0);
84
alan-bakerb6b09dc2018-11-08 16:59:28 -050085const char *kCompositeConstructFunctionPrefix = "clspv.composite_construct.";
David Netoab03f432017-11-03 17:00:44 -040086
David Neto22f144c2017-06-12 14:26:21 -040087enum SPIRVOperandType {
88 NUMBERID,
89 LITERAL_INTEGER,
90 LITERAL_STRING,
91 LITERAL_FLOAT
92};
93
94struct SPIRVOperand {
95 explicit SPIRVOperand(SPIRVOperandType Ty, uint32_t Num)
96 : Type(Ty), LiteralNum(1, Num) {}
97 explicit SPIRVOperand(SPIRVOperandType Ty, const char *Str)
98 : Type(Ty), LiteralStr(Str) {}
99 explicit SPIRVOperand(SPIRVOperandType Ty, StringRef Str)
100 : Type(Ty), LiteralStr(Str) {}
101 explicit SPIRVOperand(SPIRVOperandType Ty, ArrayRef<uint32_t> NumVec)
102 : Type(Ty), LiteralNum(NumVec.begin(), NumVec.end()) {}
103
James Price11010dc2019-12-19 13:53:09 -0500104 SPIRVOperandType getType() const { return Type; };
105 uint32_t getNumID() const { return LiteralNum[0]; };
106 std::string getLiteralStr() const { return LiteralStr; };
107 ArrayRef<uint32_t> getLiteralNum() const { return LiteralNum; };
David Neto22f144c2017-06-12 14:26:21 -0400108
David Neto87846742018-04-11 17:36:22 -0400109 uint32_t GetNumWords() const {
110 switch (Type) {
111 case NUMBERID:
112 return 1;
113 case LITERAL_INTEGER:
114 case LITERAL_FLOAT:
David Netoee2660d2018-06-28 16:31:29 -0400115 return uint32_t(LiteralNum.size());
David Neto87846742018-04-11 17:36:22 -0400116 case LITERAL_STRING:
117 // Account for the terminating null character.
David Netoee2660d2018-06-28 16:31:29 -0400118 return uint32_t((LiteralStr.size() + 4) / 4);
David Neto87846742018-04-11 17:36:22 -0400119 }
120 llvm_unreachable("Unhandled case in SPIRVOperand::GetNumWords()");
121 }
122
David Neto22f144c2017-06-12 14:26:21 -0400123private:
124 SPIRVOperandType Type;
125 std::string LiteralStr;
126 SmallVector<uint32_t, 4> LiteralNum;
127};
128
David Netoc6f3ab22018-04-06 18:02:31 -0400129class SPIRVOperandList {
130public:
David Netoef5ba2b2019-12-20 08:35:54 -0500131 typedef std::unique_ptr<SPIRVOperand> element_type;
132 typedef SmallVector<element_type, 8> container_type;
133 typedef container_type::iterator iterator;
David Netoc6f3ab22018-04-06 18:02:31 -0400134 SPIRVOperandList() {}
alan-bakerb6b09dc2018-11-08 16:59:28 -0500135 SPIRVOperandList(const SPIRVOperandList &other) = delete;
136 SPIRVOperandList(SPIRVOperandList &&other) {
David Netoc6f3ab22018-04-06 18:02:31 -0400137 contents_ = std::move(other.contents_);
138 other.contents_.clear();
139 }
David Netoef5ba2b2019-12-20 08:35:54 -0500140 iterator begin() { return contents_.begin(); }
141 iterator end() { return contents_.end(); }
142 operator ArrayRef<element_type>() { return contents_; }
143 void push_back(element_type op) { contents_.push_back(std::move(op)); }
alan-bakerb6b09dc2018-11-08 16:59:28 -0500144 void clear() { contents_.clear(); }
David Netoc6f3ab22018-04-06 18:02:31 -0400145 size_t size() const { return contents_.size(); }
James Price11010dc2019-12-19 13:53:09 -0500146 const SPIRVOperand *operator[](size_t i) { return contents_[i].get(); }
David Netoc6f3ab22018-04-06 18:02:31 -0400147
David Netoef5ba2b2019-12-20 08:35:54 -0500148 const container_type &getOperands() const { return contents_; }
David Neto87846742018-04-11 17:36:22 -0400149
David Netoc6f3ab22018-04-06 18:02:31 -0400150private:
David Netoef5ba2b2019-12-20 08:35:54 -0500151 container_type contents_;
David Netoc6f3ab22018-04-06 18:02:31 -0400152};
153
James Price11010dc2019-12-19 13:53:09 -0500154SPIRVOperandList &operator<<(SPIRVOperandList &list,
David Netoef5ba2b2019-12-20 08:35:54 -0500155 std::unique_ptr<SPIRVOperand> elem) {
156 list.push_back(std::move(elem));
David Netoc6f3ab22018-04-06 18:02:31 -0400157 return list;
158}
159
David Netoef5ba2b2019-12-20 08:35:54 -0500160std::unique_ptr<SPIRVOperand> MkNum(uint32_t num) {
161 return std::make_unique<SPIRVOperand>(LITERAL_INTEGER, num);
David Netoc6f3ab22018-04-06 18:02:31 -0400162}
David Netoef5ba2b2019-12-20 08:35:54 -0500163std::unique_ptr<SPIRVOperand> MkInteger(ArrayRef<uint32_t> num_vec) {
164 return std::make_unique<SPIRVOperand>(LITERAL_INTEGER, num_vec);
David Neto257c3892018-04-11 13:19:45 -0400165}
David Netoef5ba2b2019-12-20 08:35:54 -0500166std::unique_ptr<SPIRVOperand> MkFloat(ArrayRef<uint32_t> num_vec) {
167 return std::make_unique<SPIRVOperand>(LITERAL_FLOAT, num_vec);
David Neto257c3892018-04-11 13:19:45 -0400168}
David Netoef5ba2b2019-12-20 08:35:54 -0500169std::unique_ptr<SPIRVOperand> MkId(uint32_t id) {
170 return std::make_unique<SPIRVOperand>(NUMBERID, id);
James Price11010dc2019-12-19 13:53:09 -0500171}
David Netoef5ba2b2019-12-20 08:35:54 -0500172std::unique_ptr<SPIRVOperand> MkString(StringRef str) {
173 return std::make_unique<SPIRVOperand>(LITERAL_STRING, str);
David Neto257c3892018-04-11 13:19:45 -0400174}
David Netoc6f3ab22018-04-06 18:02:31 -0400175
David Neto22f144c2017-06-12 14:26:21 -0400176struct SPIRVInstruction {
David Netoef5ba2b2019-12-20 08:35:54 -0500177 // Creates an instruction with an opcode and no result ID, and with the given
178 // operands. This computes its own word count. Takes ownership of the
179 // operands and clears |Ops|.
180 SPIRVInstruction(spv::Op Opc, SPIRVOperandList &Ops)
181 : WordCount(1), Opcode(static_cast<uint16_t>(Opc)), ResultID(0) {
James Price11010dc2019-12-19 13:53:09 -0500182 for (auto &operand : Ops) {
David Netoee2660d2018-06-28 16:31:29 -0400183 WordCount += uint16_t(operand->GetNumWords());
David Neto87846742018-04-11 17:36:22 -0400184 }
David Netoef5ba2b2019-12-20 08:35:54 -0500185 Operands.reserve(Ops.size());
186 for (auto &ptr : Ops) {
187 Operands.emplace_back(std::move(ptr));
188 ptr.reset(nullptr);
David Neto87846742018-04-11 17:36:22 -0400189 }
David Netoef5ba2b2019-12-20 08:35:54 -0500190 Ops.clear();
191 }
192 // Creates an instruction with an opcode and a no-zero result ID, and
193 // with the given operands. This computes its own word count. Takes ownership
194 // of the operands and clears |Ops|.
195 SPIRVInstruction(spv::Op Opc, uint32_t ResID, SPIRVOperandList &Ops)
196 : WordCount(2), Opcode(static_cast<uint16_t>(Opc)), ResultID(ResID) {
James Price11010dc2019-12-19 13:53:09 -0500197 for (auto &operand : Ops) {
David Neto87846742018-04-11 17:36:22 -0400198 WordCount += operand->GetNumWords();
199 }
David Netoef5ba2b2019-12-20 08:35:54 -0500200 Operands.reserve(Ops.size());
201 for (auto &ptr : Ops) {
202 Operands.emplace_back(std::move(ptr));
203 ptr.reset(nullptr);
204 }
205 if (ResID == 0) {
206 llvm_unreachable("Result ID of 0 was provided");
207 }
208 Ops.clear();
David Neto87846742018-04-11 17:36:22 -0400209 }
David Neto22f144c2017-06-12 14:26:21 -0400210
David Netoef5ba2b2019-12-20 08:35:54 -0500211 // Creates an instruction with an opcode and no result ID, and with the single
212 // operand. This computes its own word count.
213 SPIRVInstruction(spv::Op Opc, SPIRVOperandList::element_type operand)
214 : WordCount(1), Opcode(static_cast<uint16_t>(Opc)), ResultID(0) {
215 WordCount += operand->GetNumWords();
216 Operands.emplace_back(std::move(operand));
217 operand.reset(nullptr);
218 }
219 // Creates an instruction with an opcode and a non-zero result ID, and
220 // with the single operand. This computes its own word count.
221 SPIRVInstruction(spv::Op Opc, uint32_t ResID,
222 SPIRVOperandList::element_type operand)
223 : WordCount(2), Opcode(static_cast<uint16_t>(Opc)), ResultID(ResID) {
224 WordCount += operand->GetNumWords();
225 if (ResID == 0) {
226 llvm_unreachable("Result ID of 0 was provided");
227 }
228 Operands.emplace_back(std::move(operand));
229 operand.reset(nullptr);
230 }
231 // Creates an instruction with an opcode and a no-zero result ID, and no
232 // operands.
233 SPIRVInstruction(spv::Op Opc, uint32_t ResID)
234 : WordCount(2), Opcode(static_cast<uint16_t>(Opc)), ResultID(ResID) {
235 if (ResID == 0) {
236 llvm_unreachable("Result ID of 0 was provided");
237 }
238 }
239 // Creates an instruction with an opcode, no result ID, no type ID, and no
240 // operands.
241 SPIRVInstruction(spv::Op Opc)
242 : WordCount(1), Opcode(static_cast<uint16_t>(Opc)), ResultID(0) {}
243
David Netoee2660d2018-06-28 16:31:29 -0400244 uint32_t getWordCount() const { return WordCount; }
David Neto22f144c2017-06-12 14:26:21 -0400245 uint16_t getOpcode() const { return Opcode; }
246 uint32_t getResultID() const { return ResultID; }
David Netoef5ba2b2019-12-20 08:35:54 -0500247 ArrayRef<std::unique_ptr<SPIRVOperand>> getOperands() const {
James Price11010dc2019-12-19 13:53:09 -0500248 return Operands;
249 }
David Neto22f144c2017-06-12 14:26:21 -0400250
251private:
David Netoee2660d2018-06-28 16:31:29 -0400252 uint32_t WordCount; // Check the 16-bit bound at code generation time.
David Neto22f144c2017-06-12 14:26:21 -0400253 uint16_t Opcode;
254 uint32_t ResultID;
David Netoef5ba2b2019-12-20 08:35:54 -0500255 SmallVector<std::unique_ptr<SPIRVOperand>, 4> Operands;
David Neto22f144c2017-06-12 14:26:21 -0400256};
257
258struct SPIRVProducerPass final : public ModulePass {
David Neto22f144c2017-06-12 14:26:21 -0400259 typedef DenseMap<Type *, uint32_t> TypeMapType;
260 typedef UniqueVector<Type *> TypeList;
261 typedef DenseMap<Value *, uint32_t> ValueMapType;
David Netofb9a7972017-08-25 17:08:24 -0400262 typedef UniqueVector<Value *> ValueList;
David Neto22f144c2017-06-12 14:26:21 -0400263 typedef std::vector<std::pair<Value *, uint32_t>> EntryPointVecType;
264 typedef std::list<SPIRVInstruction *> SPIRVInstructionList;
David Neto87846742018-04-11 17:36:22 -0400265 // A vector of tuples, each of which is:
266 // - the LLVM instruction that we will later generate SPIR-V code for
267 // - where the SPIR-V instruction should be inserted
268 // - the result ID of the SPIR-V instruction
David Neto22f144c2017-06-12 14:26:21 -0400269 typedef std::vector<
270 std::tuple<Value *, SPIRVInstructionList::iterator, uint32_t>>
271 DeferredInstVecType;
272 typedef DenseMap<FunctionType *, std::pair<FunctionType *, uint32_t>>
273 GlobalConstFuncMapType;
274
David Neto44795152017-07-13 15:45:28 -0400275 explicit SPIRVProducerPass(
alan-bakerf5e5f692018-11-27 08:33:24 -0500276 raw_pwrite_stream &out,
277 std::vector<clspv::version0::DescriptorMapEntry> *descriptor_map_entries,
alan-baker00e7a582019-06-07 12:54:21 -0400278 ArrayRef<std::pair<unsigned, std::string>> samplerMap,
David Neto44795152017-07-13 15:45:28 -0400279 bool outputCInitList)
David Netoc2c368d2017-06-30 16:50:17 -0400280 : ModulePass(ID), samplerMap(samplerMap), out(out),
David Neto0676e6f2017-07-11 18:47:44 -0400281 binaryTempOut(binaryTempUnderlyingVector), binaryOut(&out),
alan-baker00e7a582019-06-07 12:54:21 -0400282 descriptorMapEntries(descriptor_map_entries),
David Neto0676e6f2017-07-11 18:47:44 -0400283 outputCInitList(outputCInitList), patchBoundOffset(0), nextID(1),
alan-baker5b86ed72019-02-15 08:26:50 -0500284 OpExtInstImportID(0), HasVariablePointersStorageBuffer(false),
285 HasVariablePointers(false), SamplerTy(nullptr), WorkgroupSizeValueID(0),
Kévin Petit89a525c2019-06-15 08:13:07 +0100286 WorkgroupSizeVarID(0), max_local_spec_id_(0) {}
David Neto22f144c2017-06-12 14:26:21 -0400287
James Price11010dc2019-12-19 13:53:09 -0500288 virtual ~SPIRVProducerPass() {
289 for (auto *Inst : SPIRVInsts) {
290 delete Inst;
291 }
292 }
293
David Neto22f144c2017-06-12 14:26:21 -0400294 void getAnalysisUsage(AnalysisUsage &AU) const override {
295 AU.addRequired<DominatorTreeWrapperPass>();
296 AU.addRequired<LoopInfoWrapperPass>();
297 }
298
299 virtual bool runOnModule(Module &module) override;
300
301 // output the SPIR-V header block
302 void outputHeader();
303
304 // patch the SPIR-V header block
305 void patchHeader();
306
307 uint32_t lookupType(Type *Ty) {
308 if (Ty->isPointerTy() &&
309 (Ty->getPointerAddressSpace() != AddressSpace::UniformConstant)) {
310 auto PointeeTy = Ty->getPointerElementType();
311 if (PointeeTy->isStructTy() &&
312 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
313 Ty = PointeeTy;
314 }
315 }
316
David Neto862b7d82018-06-14 18:48:37 -0400317 auto where = TypeMap.find(Ty);
318 if (where == TypeMap.end()) {
319 if (Ty) {
320 errs() << "Unhandled type " << *Ty << "\n";
321 } else {
322 errs() << "Unhandled type (null)\n";
323 }
David Netoe439d702018-03-23 13:14:08 -0700324 llvm_unreachable("\nUnhandled type!");
David Neto22f144c2017-06-12 14:26:21 -0400325 }
326
David Neto862b7d82018-06-14 18:48:37 -0400327 return where->second;
David Neto22f144c2017-06-12 14:26:21 -0400328 }
329 TypeMapType &getImageTypeMap() { return ImageTypeMap; }
alan-bakerabd82722019-12-03 17:14:51 -0500330 TypeList &getImageTypeList() { return ImageTypeList; }
David Neto22f144c2017-06-12 14:26:21 -0400331 TypeList &getTypeList() { return Types; };
332 ValueList &getConstantList() { return Constants; };
333 ValueMapType &getValueMap() { return ValueMap; }
334 ValueMapType &getAllocatedValueMap() { return AllocatedValueMap; }
335 SPIRVInstructionList &getSPIRVInstList() { return SPIRVInsts; };
David Neto22f144c2017-06-12 14:26:21 -0400336 EntryPointVecType &getEntryPointVec() { return EntryPointVec; };
337 DeferredInstVecType &getDeferredInstVec() { return DeferredInstVec; };
338 ValueList &getEntryPointInterfacesVec() { return EntryPointInterfacesVec; };
339 uint32_t &getOpExtInstImportID() { return OpExtInstImportID; };
340 std::vector<uint32_t> &getBuiltinDimVec() { return BuiltinDimensionVec; };
alan-baker5b86ed72019-02-15 08:26:50 -0500341 bool hasVariablePointersStorageBuffer() {
342 return HasVariablePointersStorageBuffer;
343 }
344 void setVariablePointersStorageBuffer(bool Val) {
345 HasVariablePointersStorageBuffer = Val;
346 }
Diego Novillo3cc8d7a2019-04-10 13:30:34 -0400347 bool hasVariablePointers() { return HasVariablePointers; };
David Neto22f144c2017-06-12 14:26:21 -0400348 void setVariablePointers(bool Val) { HasVariablePointers = Val; };
alan-bakerb6b09dc2018-11-08 16:59:28 -0500349 ArrayRef<std::pair<unsigned, std::string>> &getSamplerMap() {
350 return samplerMap;
351 }
David Neto22f144c2017-06-12 14:26:21 -0400352 GlobalConstFuncMapType &getGlobalConstFuncTypeMap() {
353 return GlobalConstFuncTypeMap;
354 }
355 SmallPtrSet<Value *, 16> &getGlobalConstArgSet() {
356 return GlobalConstArgumentSet;
357 }
alan-bakerb6b09dc2018-11-08 16:59:28 -0500358 TypeList &getTypesNeedingArrayStride() { return TypesNeedingArrayStride; }
David Neto22f144c2017-06-12 14:26:21 -0400359
David Netoc6f3ab22018-04-06 18:02:31 -0400360 void GenerateLLVMIRInfo(Module &M, const DataLayout &DL);
alan-bakerb6b09dc2018-11-08 16:59:28 -0500361 // Populate GlobalConstFuncTypeMap. Also, if module-scope __constant will
362 // *not* be converted to a storage buffer, replace each such global variable
363 // with one in the storage class expecgted by SPIR-V.
David Neto862b7d82018-06-14 18:48:37 -0400364 void FindGlobalConstVars(Module &M, const DataLayout &DL);
365 // Populate ResourceVarInfoList, FunctionToResourceVarsMap, and
366 // ModuleOrderedResourceVars.
367 void FindResourceVars(Module &M, const DataLayout &DL);
Alan Baker202c8c72018-08-13 13:47:44 -0400368 void FindWorkgroupVars(Module &M);
David Neto22f144c2017-06-12 14:26:21 -0400369 bool FindExtInst(Module &M);
370 void FindTypePerGlobalVar(GlobalVariable &GV);
371 void FindTypePerFunc(Function &F);
David Neto862b7d82018-06-14 18:48:37 -0400372 void FindTypesForSamplerMap(Module &M);
373 void FindTypesForResourceVars(Module &M);
alan-bakerb6b09dc2018-11-08 16:59:28 -0500374 // Inserts |Ty| and relevant sub-types into the |Types| member, indicating
375 // that |Ty| and its subtypes will need a corresponding SPIR-V type.
David Neto22f144c2017-06-12 14:26:21 -0400376 void FindType(Type *Ty);
377 void FindConstantPerGlobalVar(GlobalVariable &GV);
378 void FindConstantPerFunc(Function &F);
379 void FindConstant(Value *V);
380 void GenerateExtInstImport();
David Neto19a1bad2017-08-25 15:01:41 -0400381 // Generates instructions for SPIR-V types corresponding to the LLVM types
382 // saved in the |Types| member. A type follows its subtypes. IDs are
383 // allocated sequentially starting with the current value of nextID, and
384 // with a type following its subtypes. Also updates nextID to just beyond
385 // the last generated ID.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500386 void GenerateSPIRVTypes(LLVMContext &context, Module &module);
David Neto22f144c2017-06-12 14:26:21 -0400387 void GenerateSPIRVConstants();
David Neto5c22a252018-03-15 16:07:41 -0400388 void GenerateModuleInfo(Module &M);
David Neto22f144c2017-06-12 14:26:21 -0400389 void GenerateGlobalVar(GlobalVariable &GV);
David Netoc6f3ab22018-04-06 18:02:31 -0400390 void GenerateWorkgroupVars();
David Neto862b7d82018-06-14 18:48:37 -0400391 // Generate descriptor map entries for resource variables associated with
392 // arguments to F.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500393 void GenerateDescriptorMapInfo(const DataLayout &DL, Function &F);
David Neto22f144c2017-06-12 14:26:21 -0400394 void GenerateSamplers(Module &M);
David Neto862b7d82018-06-14 18:48:37 -0400395 // Generate OpVariables for %clspv.resource.var.* calls.
396 void GenerateResourceVars(Module &M);
David Neto22f144c2017-06-12 14:26:21 -0400397 void GenerateFuncPrologue(Function &F);
398 void GenerateFuncBody(Function &F);
David Netob6e2e062018-04-25 10:32:06 -0400399 void GenerateEntryPointInitialStores();
David Neto22f144c2017-06-12 14:26:21 -0400400 spv::Op GetSPIRVCmpOpcode(CmpInst *CmpI);
401 spv::Op GetSPIRVCastOpcode(Instruction &I);
402 spv::Op GetSPIRVBinaryOpcode(Instruction &I);
403 void GenerateInstruction(Instruction &I);
404 void GenerateFuncEpilogue();
405 void HandleDeferredInstruction();
alan-bakerb6b09dc2018-11-08 16:59:28 -0500406 void HandleDeferredDecorations(const DataLayout &DL);
David Neto22f144c2017-06-12 14:26:21 -0400407 bool is4xi8vec(Type *Ty) const;
408 spv::StorageClass GetStorageClass(unsigned AddrSpace) const;
David Neto862b7d82018-06-14 18:48:37 -0400409 spv::StorageClass GetStorageClassForArgKind(clspv::ArgKind arg_kind) const;
David Neto22f144c2017-06-12 14:26:21 -0400410 spv::BuiltIn GetBuiltin(StringRef globalVarName) const;
David Neto3fbb4072017-10-16 11:28:14 -0400411 // Returns the GLSL extended instruction enum that the given function
412 // call maps to. If none, then returns the 0 value, i.e. GLSLstd4580Bad.
David Neto22f144c2017-06-12 14:26:21 -0400413 glsl::ExtInst getExtInstEnum(StringRef Name);
David Neto3fbb4072017-10-16 11:28:14 -0400414 // Returns the GLSL extended instruction enum indirectly used by the given
415 // function. That is, to implement the given function, we use an extended
416 // instruction plus one more instruction. If none, then returns the 0 value,
417 // i.e. GLSLstd4580Bad.
418 glsl::ExtInst getIndirectExtInstEnum(StringRef Name);
419 // Returns the single GLSL extended instruction used directly or
420 // indirectly by the given function call.
421 glsl::ExtInst getDirectOrIndirectExtInstEnum(StringRef Name);
David Neto22f144c2017-06-12 14:26:21 -0400422 void WriteOneWord(uint32_t Word);
423 void WriteResultID(SPIRVInstruction *Inst);
424 void WriteWordCountAndOpcode(SPIRVInstruction *Inst);
David Netoef5ba2b2019-12-20 08:35:54 -0500425 void WriteOperand(const std::unique_ptr<SPIRVOperand> &Op);
David Neto22f144c2017-06-12 14:26:21 -0400426 void WriteSPIRVBinary();
427
Alan Baker9bf93fb2018-08-28 16:59:26 -0400428 // Returns true if |type| is compatible with OpConstantNull.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500429 bool IsTypeNullable(const Type *type) const;
Alan Baker9bf93fb2018-08-28 16:59:26 -0400430
Alan Bakerfcda9482018-10-02 17:09:59 -0400431 // Populate UBO remapped type maps.
432 void PopulateUBOTypeMaps(Module &module);
433
alan-baker06cad652019-12-03 17:56:47 -0500434 // Populate the merge and continue block maps.
435 void PopulateStructuredCFGMaps(Module &module);
436
Alan Bakerfcda9482018-10-02 17:09:59 -0400437 // Wrapped methods of DataLayout accessors. If |type| was remapped for UBOs,
438 // uses the internal map, otherwise it falls back on the data layout.
439 uint64_t GetTypeSizeInBits(Type *type, const DataLayout &DL);
440 uint64_t GetTypeStoreSize(Type *type, const DataLayout &DL);
441 uint64_t GetTypeAllocSize(Type *type, const DataLayout &DL);
442
alan-baker5b86ed72019-02-15 08:26:50 -0500443 // Returns the base pointer of |v|.
444 Value *GetBasePointer(Value *v);
445
446 // Sets |HasVariablePointersStorageBuffer| or |HasVariablePointers| base on
447 // |address_space|.
448 void setVariablePointersCapabilities(unsigned address_space);
449
450 // Returns true if |lhs| and |rhs| represent the same resource or workgroup
451 // variable.
452 bool sameResource(Value *lhs, Value *rhs) const;
453
454 // Returns true if |inst| is phi or select that selects from the same
455 // structure (or null).
456 bool selectFromSameObject(Instruction *inst);
457
alan-bakere9308012019-03-15 10:25:13 -0400458 // Returns true if |Arg| is called with a coherent resource.
459 bool CalledWithCoherentResource(Argument &Arg);
460
David Neto22f144c2017-06-12 14:26:21 -0400461private:
462 static char ID;
David Neto44795152017-07-13 15:45:28 -0400463 ArrayRef<std::pair<unsigned, std::string>> samplerMap;
David Neto22f144c2017-06-12 14:26:21 -0400464 raw_pwrite_stream &out;
David Neto0676e6f2017-07-11 18:47:44 -0400465
466 // TODO(dneto): Wouldn't it be better to always just emit a binary, and then
467 // convert to other formats on demand?
468
469 // When emitting a C initialization list, the WriteSPIRVBinary method
470 // will actually write its words to this vector via binaryTempOut.
471 SmallVector<char, 100> binaryTempUnderlyingVector;
472 raw_svector_ostream binaryTempOut;
473
474 // Binary output writes to this stream, which might be |out| or
475 // |binaryTempOut|. It's the latter when we really want to write a C
476 // initializer list.
Diego Novillo3cc8d7a2019-04-10 13:30:34 -0400477 raw_pwrite_stream *binaryOut;
alan-bakerf5e5f692018-11-27 08:33:24 -0500478 std::vector<version0::DescriptorMapEntry> *descriptorMapEntries;
David Neto0676e6f2017-07-11 18:47:44 -0400479 const bool outputCInitList; // If true, output look like {0x7023, ... , 5}
David Neto22f144c2017-06-12 14:26:21 -0400480 uint64_t patchBoundOffset;
481 uint32_t nextID;
482
alan-bakerf67468c2019-11-25 15:51:49 -0500483 // ID for OpTypeInt 32 1.
484 uint32_t int32ID = 0;
485 // ID for OpTypeVector %int 4.
486 uint32_t v4int32ID = 0;
487
David Neto19a1bad2017-08-25 15:01:41 -0400488 // Maps an LLVM Value pointer to the corresponding SPIR-V Id.
David Neto22f144c2017-06-12 14:26:21 -0400489 TypeMapType TypeMap;
David Neto19a1bad2017-08-25 15:01:41 -0400490 // Maps an LLVM image type to its SPIR-V ID.
David Neto22f144c2017-06-12 14:26:21 -0400491 TypeMapType ImageTypeMap;
alan-bakerabd82722019-12-03 17:14:51 -0500492 // A unique-vector of LLVM image types. This list is used to provide
493 // deterministic traversal of image types.
494 TypeList ImageTypeList;
David Neto19a1bad2017-08-25 15:01:41 -0400495 // A unique-vector of LLVM types that map to a SPIR-V type.
David Neto22f144c2017-06-12 14:26:21 -0400496 TypeList Types;
497 ValueList Constants;
David Neto19a1bad2017-08-25 15:01:41 -0400498 // Maps an LLVM Value pointer to the corresponding SPIR-V Id.
David Neto22f144c2017-06-12 14:26:21 -0400499 ValueMapType ValueMap;
500 ValueMapType AllocatedValueMap;
501 SPIRVInstructionList SPIRVInsts;
David Neto862b7d82018-06-14 18:48:37 -0400502
David Neto22f144c2017-06-12 14:26:21 -0400503 EntryPointVecType EntryPointVec;
504 DeferredInstVecType DeferredInstVec;
505 ValueList EntryPointInterfacesVec;
506 uint32_t OpExtInstImportID;
507 std::vector<uint32_t> BuiltinDimensionVec;
alan-baker5b86ed72019-02-15 08:26:50 -0500508 bool HasVariablePointersStorageBuffer;
David Neto22f144c2017-06-12 14:26:21 -0400509 bool HasVariablePointers;
510 Type *SamplerTy;
alan-baker09cb9802019-12-10 13:16:27 -0500511 DenseMap<unsigned, unsigned> SamplerLiteralToIDMap;
David Netoc77d9e22018-03-24 06:30:28 -0700512
513 // If a function F has a pointer-to-__constant parameter, then this variable
David Neto9ed8e2f2018-03-24 06:47:24 -0700514 // will map F's type to (G, index of the parameter), where in a first phase
515 // G is F's type. During FindTypePerFunc, G will be changed to F's type
516 // but replacing the pointer-to-constant parameter with
517 // pointer-to-ModuleScopePrivate.
David Netoc77d9e22018-03-24 06:30:28 -0700518 // TODO(dneto): This doesn't seem general enough? A function might have
519 // more than one such parameter.
David Neto22f144c2017-06-12 14:26:21 -0400520 GlobalConstFuncMapType GlobalConstFuncTypeMap;
521 SmallPtrSet<Value *, 16> GlobalConstArgumentSet;
David Neto1a1a0582017-07-07 12:01:44 -0400522 // An ordered set of pointer types of Base arguments to OpPtrAccessChain,
David Neto85082642018-03-24 06:55:20 -0700523 // or array types, and which point into transparent memory (StorageBuffer
524 // storage class). These will require an ArrayStride decoration.
David Neto1a1a0582017-07-07 12:01:44 -0400525 // See SPV_KHR_variable_pointers rev 13.
David Neto85082642018-03-24 06:55:20 -0700526 TypeList TypesNeedingArrayStride;
David Netoa60b00b2017-09-15 16:34:09 -0400527
528 // This is truly ugly, but works around what look like driver bugs.
529 // For get_local_size, an earlier part of the flow has created a module-scope
530 // variable in Private address space to hold the value for the workgroup
531 // size. Its intializer is a uint3 value marked as builtin WorkgroupSize.
532 // When this is present, save the IDs of the initializer value and variable
533 // in these two variables. We only ever do a vector load from it, and
534 // when we see one of those, substitute just the value of the intializer.
535 // This mimics what Glslang does, and that's what drivers are used to.
David Neto66cfe642018-03-24 06:13:56 -0700536 // TODO(dneto): Remove this once drivers are fixed.
David Netoa60b00b2017-09-15 16:34:09 -0400537 uint32_t WorkgroupSizeValueID;
538 uint32_t WorkgroupSizeVarID;
David Neto26aaf622017-10-23 18:11:53 -0400539
David Neto862b7d82018-06-14 18:48:37 -0400540 // Bookkeeping for mapping kernel arguments to resource variables.
541 struct ResourceVarInfo {
542 ResourceVarInfo(int index_arg, unsigned set_arg, unsigned binding_arg,
alan-bakere9308012019-03-15 10:25:13 -0400543 Function *fn, clspv::ArgKind arg_kind_arg, int coherent_arg)
David Neto862b7d82018-06-14 18:48:37 -0400544 : index(index_arg), descriptor_set(set_arg), binding(binding_arg),
alan-bakere9308012019-03-15 10:25:13 -0400545 var_fn(fn), arg_kind(arg_kind_arg), coherent(coherent_arg),
David Neto862b7d82018-06-14 18:48:37 -0400546 addr_space(fn->getReturnType()->getPointerAddressSpace()) {}
547 const int index; // Index into ResourceVarInfoList
548 const unsigned descriptor_set;
549 const unsigned binding;
550 Function *const var_fn; // The @clspv.resource.var.* function.
551 const clspv::ArgKind arg_kind;
alan-bakere9308012019-03-15 10:25:13 -0400552 const int coherent;
David Neto862b7d82018-06-14 18:48:37 -0400553 const unsigned addr_space; // The LLVM address space
554 // The SPIR-V ID of the OpVariable. Not populated at construction time.
555 uint32_t var_id = 0;
556 };
557 // A list of resource var info. Each one correponds to a module-scope
558 // resource variable we will have to create. Resource var indices are
559 // indices into this vector.
560 SmallVector<std::unique_ptr<ResourceVarInfo>, 8> ResourceVarInfoList;
561 // This is a vector of pointers of all the resource vars, but ordered by
562 // kernel function, and then by argument.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500563 UniqueVector<ResourceVarInfo *> ModuleOrderedResourceVars;
David Neto862b7d82018-06-14 18:48:37 -0400564 // Map a function to the ordered list of resource variables it uses, one for
565 // each argument. If an argument does not use a resource variable, it
566 // will have a null pointer entry.
567 using FunctionToResourceVarsMapType =
568 DenseMap<Function *, SmallVector<ResourceVarInfo *, 8>>;
569 FunctionToResourceVarsMapType FunctionToResourceVarsMap;
570
571 // What LLVM types map to SPIR-V types needing layout? These are the
572 // arrays and structures supporting storage buffers and uniform buffers.
573 TypeList TypesNeedingLayout;
574 // What LLVM struct types map to a SPIR-V struct type with Block decoration?
575 UniqueVector<StructType *> StructTypesNeedingBlock;
576 // For a call that represents a load from an opaque type (samplers, images),
577 // map it to the variable id it should load from.
578 DenseMap<CallInst *, uint32_t> ResourceVarDeferredLoadCalls;
David Neto85082642018-03-24 06:55:20 -0700579
Alan Baker202c8c72018-08-13 13:47:44 -0400580 // One larger than the maximum used SpecId for pointer-to-local arguments.
581 int max_local_spec_id_;
David Netoc6f3ab22018-04-06 18:02:31 -0400582 // An ordered list of the kernel arguments of type pointer-to-local.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500583 using LocalArgList = SmallVector<Argument *, 8>;
David Netoc6f3ab22018-04-06 18:02:31 -0400584 LocalArgList LocalArgs;
585 // Information about a pointer-to-local argument.
586 struct LocalArgInfo {
587 // The SPIR-V ID of the array variable.
588 uint32_t variable_id;
589 // The element type of the
alan-bakerb6b09dc2018-11-08 16:59:28 -0500590 Type *elem_type;
David Netoc6f3ab22018-04-06 18:02:31 -0400591 // The ID of the array type.
592 uint32_t array_size_id;
593 // The ID of the array type.
594 uint32_t array_type_id;
595 // The ID of the pointer to the array type.
596 uint32_t ptr_array_type_id;
David Netoc6f3ab22018-04-06 18:02:31 -0400597 // The specialization constant ID of the array size.
598 int spec_id;
599 };
Alan Baker202c8c72018-08-13 13:47:44 -0400600 // A mapping from Argument to its assigned SpecId.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500601 DenseMap<const Argument *, int> LocalArgSpecIds;
Alan Baker202c8c72018-08-13 13:47:44 -0400602 // A mapping from SpecId to its LocalArgInfo.
603 DenseMap<int, LocalArgInfo> LocalSpecIdInfoMap;
Alan Bakerfcda9482018-10-02 17:09:59 -0400604 // A mapping from a remapped type to its real offsets.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500605 DenseMap<Type *, std::vector<uint32_t>> RemappedUBOTypeOffsets;
Alan Bakerfcda9482018-10-02 17:09:59 -0400606 // A mapping from a remapped type to its real sizes.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500607 DenseMap<Type *, std::tuple<uint64_t, uint64_t, uint64_t>>
608 RemappedUBOTypeSizes;
alan-baker06cad652019-12-03 17:56:47 -0500609
610 // Maps basic block to its merge block.
611 DenseMap<BasicBlock *, BasicBlock *> MergeBlocks;
612 // Maps basic block to its continue block.
613 DenseMap<BasicBlock *, BasicBlock *> ContinueBlocks;
David Neto22f144c2017-06-12 14:26:21 -0400614};
615
616char SPIRVProducerPass::ID;
David Netoc6f3ab22018-04-06 18:02:31 -0400617
alan-bakerb6b09dc2018-11-08 16:59:28 -0500618} // namespace
David Neto22f144c2017-06-12 14:26:21 -0400619
620namespace clspv {
alan-bakerf5e5f692018-11-27 08:33:24 -0500621ModulePass *createSPIRVProducerPass(
622 raw_pwrite_stream &out,
623 std::vector<version0::DescriptorMapEntry> *descriptor_map_entries,
alan-baker00e7a582019-06-07 12:54:21 -0400624 ArrayRef<std::pair<unsigned, std::string>> samplerMap,
alan-bakerf5e5f692018-11-27 08:33:24 -0500625 bool outputCInitList) {
626 return new SPIRVProducerPass(out, descriptor_map_entries, samplerMap,
alan-baker00e7a582019-06-07 12:54:21 -0400627 outputCInitList);
David Neto22f144c2017-06-12 14:26:21 -0400628}
David Netoc2c368d2017-06-30 16:50:17 -0400629} // namespace clspv
David Neto22f144c2017-06-12 14:26:21 -0400630
631bool SPIRVProducerPass::runOnModule(Module &module) {
David Neto0676e6f2017-07-11 18:47:44 -0400632 binaryOut = outputCInitList ? &binaryTempOut : &out;
633
Alan Bakerfcda9482018-10-02 17:09:59 -0400634 PopulateUBOTypeMaps(module);
alan-baker06cad652019-12-03 17:56:47 -0500635 PopulateStructuredCFGMaps(module);
Alan Bakerfcda9482018-10-02 17:09:59 -0400636
David Neto22f144c2017-06-12 14:26:21 -0400637 // SPIR-V always begins with its header information
638 outputHeader();
639
David Netoc6f3ab22018-04-06 18:02:31 -0400640 const DataLayout &DL = module.getDataLayout();
641
David Neto22f144c2017-06-12 14:26:21 -0400642 // Gather information from the LLVM IR that we require.
David Netoc6f3ab22018-04-06 18:02:31 -0400643 GenerateLLVMIRInfo(module, DL);
David Neto22f144c2017-06-12 14:26:21 -0400644
David Neto22f144c2017-06-12 14:26:21 -0400645 // Collect information on global variables too.
646 for (GlobalVariable &GV : module.globals()) {
647 // If the GV is one of our special __spirv_* variables, remove the
648 // initializer as it was only placed there to force LLVM to not throw the
649 // value away.
650 if (GV.getName().startswith("__spirv_")) {
651 GV.setInitializer(nullptr);
652 }
653
654 // Collect types' information from global variable.
655 FindTypePerGlobalVar(GV);
656
657 // Collect constant information from global variable.
658 FindConstantPerGlobalVar(GV);
659
660 // If the variable is an input, entry points need to know about it.
661 if (AddressSpace::Input == GV.getType()->getPointerAddressSpace()) {
David Netofb9a7972017-08-25 17:08:24 -0400662 getEntryPointInterfacesVec().insert(&GV);
David Neto22f144c2017-06-12 14:26:21 -0400663 }
664 }
665
666 // If there are extended instructions, generate OpExtInstImport.
667 if (FindExtInst(module)) {
668 GenerateExtInstImport();
669 }
670
671 // Generate SPIRV instructions for types.
Alan Bakerfcda9482018-10-02 17:09:59 -0400672 GenerateSPIRVTypes(module.getContext(), module);
David Neto22f144c2017-06-12 14:26:21 -0400673
674 // Generate SPIRV constants.
675 GenerateSPIRVConstants();
676
alan-baker09cb9802019-12-10 13:16:27 -0500677 // Generate literal samplers if necessary.
678 GenerateSamplers(module);
David Neto22f144c2017-06-12 14:26:21 -0400679
680 // Generate SPIRV variables.
681 for (GlobalVariable &GV : module.globals()) {
682 GenerateGlobalVar(GV);
683 }
David Neto862b7d82018-06-14 18:48:37 -0400684 GenerateResourceVars(module);
David Netoc6f3ab22018-04-06 18:02:31 -0400685 GenerateWorkgroupVars();
David Neto22f144c2017-06-12 14:26:21 -0400686
687 // Generate SPIRV instructions for each function.
688 for (Function &F : module) {
689 if (F.isDeclaration()) {
690 continue;
691 }
692
David Neto862b7d82018-06-14 18:48:37 -0400693 GenerateDescriptorMapInfo(DL, F);
694
David Neto22f144c2017-06-12 14:26:21 -0400695 // Generate Function Prologue.
696 GenerateFuncPrologue(F);
697
698 // Generate SPIRV instructions for function body.
699 GenerateFuncBody(F);
700
701 // Generate Function Epilogue.
702 GenerateFuncEpilogue();
703 }
704
705 HandleDeferredInstruction();
David Neto1a1a0582017-07-07 12:01:44 -0400706 HandleDeferredDecorations(DL);
David Neto22f144c2017-06-12 14:26:21 -0400707
708 // Generate SPIRV module information.
David Neto5c22a252018-03-15 16:07:41 -0400709 GenerateModuleInfo(module);
David Neto22f144c2017-06-12 14:26:21 -0400710
alan-baker00e7a582019-06-07 12:54:21 -0400711 WriteSPIRVBinary();
David Neto22f144c2017-06-12 14:26:21 -0400712
713 // We need to patch the SPIR-V header to set bound correctly.
714 patchHeader();
David Neto0676e6f2017-07-11 18:47:44 -0400715
716 if (outputCInitList) {
717 bool first = true;
David Neto0676e6f2017-07-11 18:47:44 -0400718 std::ostringstream os;
719
David Neto57fb0b92017-08-04 15:35:09 -0400720 auto emit_word = [&os, &first](uint32_t word) {
David Neto0676e6f2017-07-11 18:47:44 -0400721 if (!first)
David Neto57fb0b92017-08-04 15:35:09 -0400722 os << ",\n";
723 os << word;
David Neto0676e6f2017-07-11 18:47:44 -0400724 first = false;
725 };
726
727 os << "{";
David Neto57fb0b92017-08-04 15:35:09 -0400728 const std::string str(binaryTempOut.str());
729 for (unsigned i = 0; i < str.size(); i += 4) {
730 const uint32_t a = static_cast<unsigned char>(str[i]);
731 const uint32_t b = static_cast<unsigned char>(str[i + 1]);
732 const uint32_t c = static_cast<unsigned char>(str[i + 2]);
733 const uint32_t d = static_cast<unsigned char>(str[i + 3]);
734 emit_word(a | (b << 8) | (c << 16) | (d << 24));
David Neto0676e6f2017-07-11 18:47:44 -0400735 }
736 os << "}\n";
737 out << os.str();
738 }
739
David Neto22f144c2017-06-12 14:26:21 -0400740 return false;
741}
742
743void SPIRVProducerPass::outputHeader() {
alan-baker00e7a582019-06-07 12:54:21 -0400744 binaryOut->write(reinterpret_cast<const char *>(&spv::MagicNumber),
745 sizeof(spv::MagicNumber));
746 binaryOut->write(reinterpret_cast<const char *>(&spv::Version),
747 sizeof(spv::Version));
David Neto22f144c2017-06-12 14:26:21 -0400748
alan-baker0c18ab02019-06-12 10:23:21 -0400749 // use Google's vendor ID
750 const uint32_t vendor = 21 << 16;
alan-baker00e7a582019-06-07 12:54:21 -0400751 binaryOut->write(reinterpret_cast<const char *>(&vendor), sizeof(vendor));
David Neto22f144c2017-06-12 14:26:21 -0400752
alan-baker00e7a582019-06-07 12:54:21 -0400753 // we record where we need to come back to and patch in the bound value
754 patchBoundOffset = binaryOut->tell();
David Neto22f144c2017-06-12 14:26:21 -0400755
alan-baker00e7a582019-06-07 12:54:21 -0400756 // output a bad bound for now
757 binaryOut->write(reinterpret_cast<const char *>(&nextID), sizeof(nextID));
David Neto22f144c2017-06-12 14:26:21 -0400758
alan-baker00e7a582019-06-07 12:54:21 -0400759 // output the schema (reserved for use and must be 0)
760 const uint32_t schema = 0;
761 binaryOut->write(reinterpret_cast<const char *>(&schema), sizeof(schema));
David Neto22f144c2017-06-12 14:26:21 -0400762}
763
764void SPIRVProducerPass::patchHeader() {
alan-baker00e7a582019-06-07 12:54:21 -0400765 // for a binary we just write the value of nextID over bound
766 binaryOut->pwrite(reinterpret_cast<char *>(&nextID), sizeof(nextID),
767 patchBoundOffset);
David Neto22f144c2017-06-12 14:26:21 -0400768}
769
David Netoc6f3ab22018-04-06 18:02:31 -0400770void SPIRVProducerPass::GenerateLLVMIRInfo(Module &M, const DataLayout &DL) {
David Neto22f144c2017-06-12 14:26:21 -0400771 // This function generates LLVM IR for function such as global variable for
772 // argument, constant and pointer type for argument access. These information
773 // is artificial one because we need Vulkan SPIR-V output. This function is
774 // executed ahead of FindType and FindConstant.
David Neto22f144c2017-06-12 14:26:21 -0400775 LLVMContext &Context = M.getContext();
776
David Neto862b7d82018-06-14 18:48:37 -0400777 FindGlobalConstVars(M, DL);
David Neto5c22a252018-03-15 16:07:41 -0400778
David Neto862b7d82018-06-14 18:48:37 -0400779 FindResourceVars(M, DL);
David Neto22f144c2017-06-12 14:26:21 -0400780
781 bool HasWorkGroupBuiltin = false;
782 for (GlobalVariable &GV : M.globals()) {
783 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
784 if (spv::BuiltInWorkgroupSize == BuiltinType) {
785 HasWorkGroupBuiltin = true;
786 }
787 }
788
David Neto862b7d82018-06-14 18:48:37 -0400789 FindTypesForSamplerMap(M);
790 FindTypesForResourceVars(M);
Alan Baker202c8c72018-08-13 13:47:44 -0400791 FindWorkgroupVars(M);
David Neto22f144c2017-06-12 14:26:21 -0400792
793 for (Function &F : M) {
Kévin Petitabef4522019-03-27 13:08:01 +0000794 if (F.isDeclaration()) {
David Neto22f144c2017-06-12 14:26:21 -0400795 continue;
796 }
797
798 for (BasicBlock &BB : F) {
799 for (Instruction &I : BB) {
800 if (I.getOpcode() == Instruction::ZExt ||
801 I.getOpcode() == Instruction::SExt ||
802 I.getOpcode() == Instruction::UIToFP) {
803 // If there is zext with i1 type, it will be changed to OpSelect. The
804 // OpSelect needs constant 0 and 1 so the constants are added here.
805
806 auto OpTy = I.getOperand(0)->getType();
807
Kévin Petit24272b62018-10-18 19:16:12 +0000808 if (OpTy->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -0400809 if (I.getOpcode() == Instruction::ZExt) {
David Neto22f144c2017-06-12 14:26:21 -0400810 FindConstant(Constant::getNullValue(I.getType()));
Kévin Petit7bfb8992019-02-26 13:45:08 +0000811 FindConstant(ConstantInt::get(I.getType(), 1));
David Neto22f144c2017-06-12 14:26:21 -0400812 } else if (I.getOpcode() == Instruction::SExt) {
David Neto22f144c2017-06-12 14:26:21 -0400813 FindConstant(Constant::getNullValue(I.getType()));
Kévin Petit7bfb8992019-02-26 13:45:08 +0000814 FindConstant(ConstantInt::getSigned(I.getType(), -1));
David Neto22f144c2017-06-12 14:26:21 -0400815 } else {
816 FindConstant(ConstantFP::get(Context, APFloat(0.0f)));
817 FindConstant(ConstantFP::get(Context, APFloat(1.0f)));
818 }
819 }
820 } else if (CallInst *Call = dyn_cast<CallInst>(&I)) {
David Neto862b7d82018-06-14 18:48:37 -0400821 StringRef callee_name = Call->getCalledFunction()->getName();
David Neto22f144c2017-06-12 14:26:21 -0400822
823 // Handle image type specially.
alan-baker75090e42020-02-20 11:21:04 -0500824 if (clspv::IsImageBuiltin(callee_name)) {
David Neto22f144c2017-06-12 14:26:21 -0400825 TypeMapType &OpImageTypeMap = getImageTypeMap();
826 Type *ImageTy =
827 Call->getArgOperand(0)->getType()->getPointerElementType();
828 OpImageTypeMap[ImageTy] = 0;
alan-bakerabd82722019-12-03 17:14:51 -0500829 getImageTypeList().insert(ImageTy);
alan-baker75090e42020-02-20 11:21:04 -0500830 }
David Neto22f144c2017-06-12 14:26:21 -0400831
alan-baker75090e42020-02-20 11:21:04 -0500832 if (clspv::IsSampledImageRead(callee_name)) {
alan-bakerf67468c2019-11-25 15:51:49 -0500833 // All sampled reads need a floating point 0 for the Lod operand.
David Neto22f144c2017-06-12 14:26:21 -0400834 FindConstant(ConstantFP::get(Context, APFloat(0.0f)));
835 }
David Neto5c22a252018-03-15 16:07:41 -0400836
alan-baker75090e42020-02-20 11:21:04 -0500837 if (clspv::IsUnsampledImageRead(callee_name)) {
838 // All unsampled reads need an integer 0 for the Lod operand.
839 FindConstant(ConstantInt::get(Context, APInt(32, 0)));
840 }
841
alan-bakerce179f12019-12-06 19:02:22 -0500842 if (clspv::IsImageQuery(callee_name)) {
843 Type *ImageTy = Call->getOperand(0)->getType();
844 const uint32_t dim = ImageDimensionality(ImageTy);
alan-baker7150a1d2020-02-25 08:31:06 -0500845 uint32_t components =
846 dim + (clspv::IsArrayImageType(ImageTy) ? 1 : 0);
alan-bakerce179f12019-12-06 19:02:22 -0500847 if (components > 1) {
848 // OpImageQuerySize* return |components| components.
849 FindType(VectorType::get(Type::getInt32Ty(Context), components));
850 if (dim == 3 && IsGetImageDim(callee_name)) {
851 // get_image_dim for 3D images returns an int4.
852 FindType(
853 VectorType::get(Type::getInt32Ty(Context), components + 1));
854 }
855 }
856
857 if (clspv::IsSampledImageType(ImageTy)) {
858 // All sampled image queries need a integer 0 for the Lod
859 // operand.
860 FindConstant(ConstantInt::get(Context, APInt(32, 0)));
861 }
David Neto5c22a252018-03-15 16:07:41 -0400862 }
David Neto22f144c2017-06-12 14:26:21 -0400863 }
864 }
865 }
866
Kévin Petitabef4522019-03-27 13:08:01 +0000867 // More things to do on kernel functions
868 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
869 if (const MDNode *MD =
870 dyn_cast<Function>(&F)->getMetadata("reqd_work_group_size")) {
871 // We generate constants if the WorkgroupSize builtin is being used.
872 if (HasWorkGroupBuiltin) {
873 // Collect constant information for work group size.
874 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(0)));
875 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(1)));
876 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(2)));
David Neto22f144c2017-06-12 14:26:21 -0400877 }
878 }
879 }
880
alan-bakerf67468c2019-11-25 15:51:49 -0500881 // TODO(alan-baker): make this better.
alan-bakerf906d2b2019-12-10 11:26:23 -0500882 if (M.getTypeByName("opencl.image1d_ro_t.float") ||
883 M.getTypeByName("opencl.image1d_ro_t.float.sampled") ||
884 M.getTypeByName("opencl.image1d_wo_t.float") ||
885 M.getTypeByName("opencl.image2d_ro_t.float") ||
alan-bakerf67468c2019-11-25 15:51:49 -0500886 M.getTypeByName("opencl.image2d_ro_t.float.sampled") ||
887 M.getTypeByName("opencl.image2d_wo_t.float") ||
888 M.getTypeByName("opencl.image3d_ro_t.float") ||
889 M.getTypeByName("opencl.image3d_ro_t.float.sampled") ||
alan-baker7150a1d2020-02-25 08:31:06 -0500890 M.getTypeByName("opencl.image3d_wo_t.float") ||
891 M.getTypeByName("opencl.image1d_array_ro_t.float") ||
892 M.getTypeByName("opencl.image1d_array_ro_t.float.sampled") ||
893 M.getTypeByName("opencl.image1d_array_wo_t.float") ||
894 M.getTypeByName("opencl.image2d_array_ro_t.float") ||
895 M.getTypeByName("opencl.image2d_array_ro_t.float.sampled") ||
896 M.getTypeByName("opencl.image2d_array_wo_t.float")) {
alan-bakerf67468c2019-11-25 15:51:49 -0500897 FindType(Type::getFloatTy(Context));
alan-bakerf906d2b2019-12-10 11:26:23 -0500898 } else if (M.getTypeByName("opencl.image1d_ro_t.uint") ||
899 M.getTypeByName("opencl.image1d_ro_t.uint.sampled") ||
900 M.getTypeByName("opencl.image1d_wo_t.uint") ||
901 M.getTypeByName("opencl.image2d_ro_t.uint") ||
alan-bakerf67468c2019-11-25 15:51:49 -0500902 M.getTypeByName("opencl.image2d_ro_t.uint.sampled") ||
903 M.getTypeByName("opencl.image2d_wo_t.uint") ||
904 M.getTypeByName("opencl.image3d_ro_t.uint") ||
905 M.getTypeByName("opencl.image3d_ro_t.uint.sampled") ||
alan-baker7150a1d2020-02-25 08:31:06 -0500906 M.getTypeByName("opencl.image3d_wo_t.uint") ||
907 M.getTypeByName("opencl.image1d_array_ro_t.uint") ||
908 M.getTypeByName("opencl.image1d_array_ro_t.uint.sampled") ||
909 M.getTypeByName("opencl.image1d_array_wo_t.uint") ||
910 M.getTypeByName("opencl.image2d_array_ro_t.uint") ||
911 M.getTypeByName("opencl.image2d_array_ro_t.uint.sampled") ||
912 M.getTypeByName("opencl.image2d_array_wo_t.uint")) {
alan-bakerf67468c2019-11-25 15:51:49 -0500913 FindType(Type::getInt32Ty(Context));
alan-bakerf906d2b2019-12-10 11:26:23 -0500914 } else if (M.getTypeByName("opencl.image1d_ro_t.int") ||
915 M.getTypeByName("opencl.image1d_ro_t.int.sampled") ||
916 M.getTypeByName("opencl.image1d_wo_t.int") ||
917 M.getTypeByName("opencl.image2d_ro_t.int") ||
alan-bakerf67468c2019-11-25 15:51:49 -0500918 M.getTypeByName("opencl.image2d_ro_t.int.sampled") ||
919 M.getTypeByName("opencl.image2d_wo_t.int") ||
920 M.getTypeByName("opencl.image3d_ro_t.int") ||
921 M.getTypeByName("opencl.image3d_ro_t.int.sampled") ||
alan-baker7150a1d2020-02-25 08:31:06 -0500922 M.getTypeByName("opencl.image3d_wo_t.int") ||
923 M.getTypeByName("opencl.image1d_array_ro_t.int") ||
924 M.getTypeByName("opencl.image1d_array_ro_t.int.sampled") ||
925 M.getTypeByName("opencl.image1d_array_wo_t.int") ||
926 M.getTypeByName("opencl.image2d_array_ro_t.int") ||
927 M.getTypeByName("opencl.image2d_array_ro_t.int.sampled") ||
928 M.getTypeByName("opencl.image2d_array_wo_t.int")) {
alan-bakerf67468c2019-11-25 15:51:49 -0500929 // Nothing for now...
930 } else {
931 // This was likely an UndefValue.
David Neto22f144c2017-06-12 14:26:21 -0400932 FindType(Type::getFloatTy(Context));
933 }
934
935 // Collect types' information from function.
936 FindTypePerFunc(F);
937
938 // Collect constant information from function.
939 FindConstantPerFunc(F);
940 }
941}
942
David Neto862b7d82018-06-14 18:48:37 -0400943void SPIRVProducerPass::FindGlobalConstVars(Module &M, const DataLayout &DL) {
alan-baker56f7aff2019-05-22 08:06:42 -0400944 clspv::NormalizeGlobalVariables(M);
945
David Neto862b7d82018-06-14 18:48:37 -0400946 SmallVector<GlobalVariable *, 8> GVList;
947 SmallVector<GlobalVariable *, 8> DeadGVList;
948 for (GlobalVariable &GV : M.globals()) {
949 if (GV.getType()->getAddressSpace() == AddressSpace::Constant) {
950 if (GV.use_empty()) {
951 DeadGVList.push_back(&GV);
952 } else {
953 GVList.push_back(&GV);
954 }
955 }
956 }
957
958 // Remove dead global __constant variables.
959 for (auto GV : DeadGVList) {
960 GV->eraseFromParent();
961 }
962 DeadGVList.clear();
963
964 if (clspv::Option::ModuleConstantsInStorageBuffer()) {
965 // For now, we only support a single storage buffer.
966 if (GVList.size() > 0) {
967 assert(GVList.size() == 1);
968 const auto *GV = GVList[0];
969 const auto constants_byte_size =
Alan Bakerfcda9482018-10-02 17:09:59 -0400970 (GetTypeSizeInBits(GV->getInitializer()->getType(), DL)) / 8;
David Neto862b7d82018-06-14 18:48:37 -0400971 const size_t kConstantMaxSize = 65536;
972 if (constants_byte_size > kConstantMaxSize) {
973 outs() << "Max __constant capacity of " << kConstantMaxSize
974 << " bytes exceeded: " << constants_byte_size << " bytes used\n";
975 llvm_unreachable("Max __constant capacity exceeded");
976 }
977 }
978 } else {
979 // Change global constant variable's address space to ModuleScopePrivate.
980 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
981 for (auto GV : GVList) {
982 // Create new gv with ModuleScopePrivate address space.
983 Type *NewGVTy = GV->getType()->getPointerElementType();
984 GlobalVariable *NewGV = new GlobalVariable(
985 M, NewGVTy, false, GV->getLinkage(), GV->getInitializer(), "",
986 nullptr, GV->getThreadLocalMode(), AddressSpace::ModuleScopePrivate);
987 NewGV->takeName(GV);
988
989 const SmallVector<User *, 8> GVUsers(GV->user_begin(), GV->user_end());
990 SmallVector<User *, 8> CandidateUsers;
991
992 auto record_called_function_type_as_user =
993 [&GlobalConstFuncTyMap](Value *gv, CallInst *call) {
994 // Find argument index.
995 unsigned index = 0;
996 for (unsigned i = 0; i < call->getNumArgOperands(); i++) {
997 if (gv == call->getOperand(i)) {
998 // TODO(dneto): Should we break here?
999 index = i;
1000 }
1001 }
1002
1003 // Record function type with global constant.
1004 GlobalConstFuncTyMap[call->getFunctionType()] =
1005 std::make_pair(call->getFunctionType(), index);
1006 };
1007
1008 for (User *GVU : GVUsers) {
1009 if (CallInst *Call = dyn_cast<CallInst>(GVU)) {
1010 record_called_function_type_as_user(GV, Call);
1011 } else if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(GVU)) {
1012 // Check GEP users.
1013 for (User *GEPU : GEP->users()) {
1014 if (CallInst *GEPCall = dyn_cast<CallInst>(GEPU)) {
1015 record_called_function_type_as_user(GEP, GEPCall);
1016 }
1017 }
1018 }
1019
1020 CandidateUsers.push_back(GVU);
1021 }
1022
1023 for (User *U : CandidateUsers) {
1024 // Update users of gv with new gv.
alan-bakered80f572019-02-11 17:28:26 -05001025 if (!isa<Constant>(U)) {
1026 // #254: Can't change operands of a constant, but this shouldn't be
1027 // something that sticks around in the module.
1028 U->replaceUsesOfWith(GV, NewGV);
1029 }
David Neto862b7d82018-06-14 18:48:37 -04001030 }
1031
1032 // Delete original gv.
1033 GV->eraseFromParent();
1034 }
1035 }
1036}
1037
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001038void SPIRVProducerPass::FindResourceVars(Module &M, const DataLayout &) {
David Neto862b7d82018-06-14 18:48:37 -04001039 ResourceVarInfoList.clear();
1040 FunctionToResourceVarsMap.clear();
1041 ModuleOrderedResourceVars.reset();
1042 // Normally, there is one resource variable per clspv.resource.var.*
1043 // function, since that is unique'd by arg type and index. By design,
1044 // we can share these resource variables across kernels because all
1045 // kernels use the same descriptor set.
1046 //
1047 // But if the user requested distinct descriptor sets per kernel, then
1048 // the descriptor allocator has made different (set,binding) pairs for
1049 // the same (type,arg_index) pair. Since we can decorate a resource
1050 // variable with only exactly one DescriptorSet and Binding, we are
1051 // forced in this case to make distinct resource variables whenever
1052 // the same clspv.reource.var.X function is seen with disintct
1053 // (set,binding) values.
1054 const bool always_distinct_sets =
1055 clspv::Option::DistinctKernelDescriptorSets();
1056 for (Function &F : M) {
1057 // Rely on the fact the resource var functions have a stable ordering
1058 // in the module.
Alan Baker202c8c72018-08-13 13:47:44 -04001059 if (F.getName().startswith(clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04001060 // Find all calls to this function with distinct set and binding pairs.
1061 // Save them in ResourceVarInfoList.
1062
1063 // Determine uniqueness of the (set,binding) pairs only withing this
1064 // one resource-var builtin function.
1065 using SetAndBinding = std::pair<unsigned, unsigned>;
1066 // Maps set and binding to the resource var info.
1067 DenseMap<SetAndBinding, ResourceVarInfo *> set_and_binding_map;
1068 bool first_use = true;
1069 for (auto &U : F.uses()) {
1070 if (auto *call = dyn_cast<CallInst>(U.getUser())) {
1071 const auto set = unsigned(
1072 dyn_cast<ConstantInt>(call->getArgOperand(0))->getZExtValue());
1073 const auto binding = unsigned(
1074 dyn_cast<ConstantInt>(call->getArgOperand(1))->getZExtValue());
1075 const auto arg_kind = clspv::ArgKind(
1076 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
1077 const auto arg_index = unsigned(
1078 dyn_cast<ConstantInt>(call->getArgOperand(3))->getZExtValue());
alan-bakere9308012019-03-15 10:25:13 -04001079 const auto coherent = unsigned(
1080 dyn_cast<ConstantInt>(call->getArgOperand(5))->getZExtValue());
David Neto862b7d82018-06-14 18:48:37 -04001081
1082 // Find or make the resource var info for this combination.
1083 ResourceVarInfo *rv = nullptr;
1084 if (always_distinct_sets) {
1085 // Make a new resource var any time we see a different
1086 // (set,binding) pair.
1087 SetAndBinding key{set, binding};
1088 auto where = set_and_binding_map.find(key);
1089 if (where == set_and_binding_map.end()) {
1090 rv = new ResourceVarInfo(int(ResourceVarInfoList.size()), set,
alan-bakere9308012019-03-15 10:25:13 -04001091 binding, &F, arg_kind, coherent);
David Neto862b7d82018-06-14 18:48:37 -04001092 ResourceVarInfoList.emplace_back(rv);
1093 set_and_binding_map[key] = rv;
1094 } else {
1095 rv = where->second;
1096 }
1097 } else {
1098 // The default is to make exactly one resource for each
1099 // clspv.resource.var.* function.
1100 if (first_use) {
1101 first_use = false;
1102 rv = new ResourceVarInfo(int(ResourceVarInfoList.size()), set,
alan-bakere9308012019-03-15 10:25:13 -04001103 binding, &F, arg_kind, coherent);
David Neto862b7d82018-06-14 18:48:37 -04001104 ResourceVarInfoList.emplace_back(rv);
1105 } else {
1106 rv = ResourceVarInfoList.back().get();
1107 }
1108 }
1109
1110 // Now populate FunctionToResourceVarsMap.
1111 auto &mapping =
1112 FunctionToResourceVarsMap[call->getParent()->getParent()];
1113 while (mapping.size() <= arg_index) {
1114 mapping.push_back(nullptr);
1115 }
1116 mapping[arg_index] = rv;
1117 }
1118 }
1119 }
1120 }
1121
1122 // Populate ModuleOrderedResourceVars.
1123 for (Function &F : M) {
1124 auto where = FunctionToResourceVarsMap.find(&F);
1125 if (where != FunctionToResourceVarsMap.end()) {
1126 for (auto &rv : where->second) {
1127 if (rv != nullptr) {
1128 ModuleOrderedResourceVars.insert(rv);
1129 }
1130 }
1131 }
1132 }
1133 if (ShowResourceVars) {
1134 for (auto *info : ModuleOrderedResourceVars) {
1135 outs() << "MORV index " << info->index << " (" << info->descriptor_set
1136 << "," << info->binding << ") " << *(info->var_fn->getReturnType())
1137 << "\n";
1138 }
1139 }
1140}
1141
David Neto22f144c2017-06-12 14:26:21 -04001142bool SPIRVProducerPass::FindExtInst(Module &M) {
1143 LLVMContext &Context = M.getContext();
1144 bool HasExtInst = false;
1145
1146 for (Function &F : M) {
1147 for (BasicBlock &BB : F) {
1148 for (Instruction &I : BB) {
1149 if (CallInst *Call = dyn_cast<CallInst>(&I)) {
1150 Function *Callee = Call->getCalledFunction();
1151 // Check whether this call is for extend instructions.
David Neto3fbb4072017-10-16 11:28:14 -04001152 auto callee_name = Callee->getName();
1153 const glsl::ExtInst EInst = getExtInstEnum(callee_name);
1154 const glsl::ExtInst IndirectEInst =
1155 getIndirectExtInstEnum(callee_name);
David Neto22f144c2017-06-12 14:26:21 -04001156
David Neto3fbb4072017-10-16 11:28:14 -04001157 HasExtInst |=
1158 (EInst != kGlslExtInstBad) || (IndirectEInst != kGlslExtInstBad);
1159
1160 if (IndirectEInst) {
1161 // Register extra constants if needed.
1162
1163 // Registers a type and constant for computing the result of the
1164 // given instruction. If the result of the instruction is a vector,
1165 // then make a splat vector constant with the same number of
1166 // elements.
1167 auto register_constant = [this, &I](Constant *constant) {
1168 FindType(constant->getType());
1169 FindConstant(constant);
1170 if (auto *vectorTy = dyn_cast<VectorType>(I.getType())) {
1171 // Register the splat vector of the value with the same
1172 // width as the result of the instruction.
1173 auto *vec_constant = ConstantVector::getSplat(
1174 static_cast<unsigned>(vectorTy->getNumElements()),
1175 constant);
1176 FindConstant(vec_constant);
1177 FindType(vec_constant->getType());
1178 }
1179 };
1180 switch (IndirectEInst) {
1181 case glsl::ExtInstFindUMsb:
1182 // clz needs OpExtInst and OpISub with constant 31, or splat
1183 // vector of 31. Add it to the constant list here.
1184 register_constant(
1185 ConstantInt::get(Type::getInt32Ty(Context), 31));
1186 break;
1187 case glsl::ExtInstAcos:
1188 case glsl::ExtInstAsin:
Kévin Petiteb9f90a2018-09-29 12:29:34 +01001189 case glsl::ExtInstAtan:
David Neto3fbb4072017-10-16 11:28:14 -04001190 case glsl::ExtInstAtan2:
1191 // We need 1/pi for acospi, asinpi, atan2pi.
1192 register_constant(
1193 ConstantFP::get(Type::getFloatTy(Context), kOneOverPi));
1194 break;
1195 default:
1196 assert(false && "internally inconsistent");
1197 }
David Neto22f144c2017-06-12 14:26:21 -04001198 }
1199 }
1200 }
1201 }
1202 }
1203
1204 return HasExtInst;
1205}
1206
1207void SPIRVProducerPass::FindTypePerGlobalVar(GlobalVariable &GV) {
1208 // Investigate global variable's type.
1209 FindType(GV.getType());
1210}
1211
1212void SPIRVProducerPass::FindTypePerFunc(Function &F) {
1213 // Investigate function's type.
1214 FunctionType *FTy = F.getFunctionType();
1215
1216 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
1217 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
David Neto9ed8e2f2018-03-24 06:47:24 -07001218 // Handle a regular function with global constant parameters.
David Neto22f144c2017-06-12 14:26:21 -04001219 if (GlobalConstFuncTyMap.count(FTy)) {
1220 uint32_t GVCstArgIdx = GlobalConstFuncTypeMap[FTy].second;
1221 SmallVector<Type *, 4> NewFuncParamTys;
1222 for (unsigned i = 0; i < FTy->getNumParams(); i++) {
1223 Type *ParamTy = FTy->getParamType(i);
1224 if (i == GVCstArgIdx) {
1225 Type *EleTy = ParamTy->getPointerElementType();
1226 ParamTy = PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
1227 }
1228
1229 NewFuncParamTys.push_back(ParamTy);
1230 }
1231
1232 FunctionType *NewFTy =
1233 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
1234 GlobalConstFuncTyMap[FTy] = std::make_pair(NewFTy, GVCstArgIdx);
1235 FTy = NewFTy;
1236 }
1237
1238 FindType(FTy);
1239 } else {
1240 // As kernel functions do not have parameters, create new function type and
1241 // add it to type map.
1242 SmallVector<Type *, 4> NewFuncParamTys;
1243 FunctionType *NewFTy =
1244 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
1245 FindType(NewFTy);
1246 }
1247
1248 // Investigate instructions' type in function body.
1249 for (BasicBlock &BB : F) {
1250 for (Instruction &I : BB) {
1251 if (isa<ShuffleVectorInst>(I)) {
1252 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1253 // Ignore type for mask of shuffle vector instruction.
1254 if (i == 2) {
1255 continue;
1256 }
1257
1258 Value *Op = I.getOperand(i);
1259 if (!isa<MetadataAsValue>(Op)) {
1260 FindType(Op->getType());
1261 }
1262 }
1263
1264 FindType(I.getType());
1265 continue;
1266 }
1267
David Neto862b7d82018-06-14 18:48:37 -04001268 CallInst *Call = dyn_cast<CallInst>(&I);
1269
1270 if (Call && Call->getCalledFunction()->getName().startswith(
Alan Baker202c8c72018-08-13 13:47:44 -04001271 clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04001272 // This is a fake call representing access to a resource variable.
1273 // We handle that elsewhere.
1274 continue;
1275 }
1276
Alan Baker202c8c72018-08-13 13:47:44 -04001277 if (Call && Call->getCalledFunction()->getName().startswith(
1278 clspv::WorkgroupAccessorFunction())) {
1279 // This is a fake call representing access to a workgroup variable.
1280 // We handle that elsewhere.
1281 continue;
1282 }
1283
alan-bakerf083bed2020-01-29 08:15:42 -05001284 // #497: InsertValue and ExtractValue map to OpCompositeInsert and
1285 // OpCompositeExtract which takes literal values for indices. As a result
1286 // don't map the type of indices.
1287 if (I.getOpcode() == Instruction::ExtractValue) {
1288 FindType(I.getOperand(0)->getType());
1289 continue;
1290 }
1291 if (I.getOpcode() == Instruction::InsertValue) {
1292 FindType(I.getOperand(0)->getType());
1293 FindType(I.getOperand(1)->getType());
1294 continue;
1295 }
1296
1297 // #497: InsertElement and ExtractElement map to OpCompositeExtract if
1298 // the index is a constant. In such a case don't map the index type.
1299 if (I.getOpcode() == Instruction::ExtractElement) {
1300 FindType(I.getOperand(0)->getType());
1301 Value *op1 = I.getOperand(1);
1302 if (!isa<Constant>(op1) || isa<GlobalValue>(op1)) {
1303 FindType(op1->getType());
1304 }
1305 continue;
1306 }
1307 if (I.getOpcode() == Instruction::InsertElement) {
1308 FindType(I.getOperand(0)->getType());
1309 FindType(I.getOperand(1)->getType());
1310 Value *op2 = I.getOperand(2);
1311 if (!isa<Constant>(op2) || isa<GlobalValue>(op2)) {
1312 FindType(op2->getType());
1313 }
1314 continue;
1315 }
1316
David Neto22f144c2017-06-12 14:26:21 -04001317 // Work through the operands of the instruction.
1318 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1319 Value *const Op = I.getOperand(i);
1320 // If any of the operands is a constant, find the type!
1321 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1322 FindType(Op->getType());
1323 }
1324 }
1325
1326 for (Use &Op : I.operands()) {
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001327 if (isa<CallInst>(&I)) {
David Neto22f144c2017-06-12 14:26:21 -04001328 // Avoid to check call instruction's type.
1329 break;
1330 }
Alan Baker202c8c72018-08-13 13:47:44 -04001331 if (CallInst *OpCall = dyn_cast<CallInst>(Op)) {
1332 if (OpCall && OpCall->getCalledFunction()->getName().startswith(
1333 clspv::WorkgroupAccessorFunction())) {
1334 // This is a fake call representing access to a workgroup variable.
1335 // We handle that elsewhere.
1336 continue;
1337 }
1338 }
David Neto22f144c2017-06-12 14:26:21 -04001339 if (!isa<MetadataAsValue>(&Op)) {
1340 FindType(Op->getType());
1341 continue;
1342 }
1343 }
1344
David Neto22f144c2017-06-12 14:26:21 -04001345 // We don't want to track the type of this call as we are going to replace
1346 // it.
Kévin Petitdf71de32019-04-09 14:09:50 +01001347 if (Call && (clspv::LiteralSamplerFunction() ==
David Neto22f144c2017-06-12 14:26:21 -04001348 Call->getCalledFunction()->getName())) {
1349 continue;
1350 }
1351
1352 if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(&I)) {
1353 // If gep's base operand has ModuleScopePrivate address space, make gep
1354 // return ModuleScopePrivate address space.
1355 if (GEP->getPointerAddressSpace() == AddressSpace::ModuleScopePrivate) {
1356 // Add pointer type with private address space for global constant to
1357 // type list.
1358 Type *EleTy = I.getType()->getPointerElementType();
1359 Type *NewPTy =
1360 PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
1361
1362 FindType(NewPTy);
1363 continue;
1364 }
1365 }
1366
1367 FindType(I.getType());
1368 }
1369 }
1370}
1371
David Neto862b7d82018-06-14 18:48:37 -04001372void SPIRVProducerPass::FindTypesForSamplerMap(Module &M) {
1373 // If we are using a sampler map, find the type of the sampler.
Kévin Petitdf71de32019-04-09 14:09:50 +01001374 if (M.getFunction(clspv::LiteralSamplerFunction()) ||
David Neto862b7d82018-06-14 18:48:37 -04001375 0 < getSamplerMap().size()) {
1376 auto SamplerStructTy = M.getTypeByName("opencl.sampler_t");
1377 if (!SamplerStructTy) {
1378 SamplerStructTy = StructType::create(M.getContext(), "opencl.sampler_t");
1379 }
1380
1381 SamplerTy = SamplerStructTy->getPointerTo(AddressSpace::UniformConstant);
1382
1383 FindType(SamplerTy);
1384 }
1385}
1386
1387void SPIRVProducerPass::FindTypesForResourceVars(Module &M) {
1388 // Record types so they are generated.
1389 TypesNeedingLayout.reset();
1390 StructTypesNeedingBlock.reset();
1391
1392 // To match older clspv codegen, generate the float type first if required
1393 // for images.
1394 for (const auto *info : ModuleOrderedResourceVars) {
1395 if (info->arg_kind == clspv::ArgKind::ReadOnlyImage ||
1396 info->arg_kind == clspv::ArgKind::WriteOnlyImage) {
alan-bakerf67468c2019-11-25 15:51:49 -05001397 if (IsIntImageType(info->var_fn->getReturnType())) {
1398 // Nothing for now...
1399 } else if (IsUintImageType(info->var_fn->getReturnType())) {
1400 FindType(Type::getInt32Ty(M.getContext()));
1401 }
1402
1403 // We need "float" either for the sampled type or for the Lod operand.
David Neto862b7d82018-06-14 18:48:37 -04001404 FindType(Type::getFloatTy(M.getContext()));
David Neto862b7d82018-06-14 18:48:37 -04001405 }
1406 }
1407
1408 for (const auto *info : ModuleOrderedResourceVars) {
1409 Type *type = info->var_fn->getReturnType();
1410
1411 switch (info->arg_kind) {
1412 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04001413 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04001414 if (auto *sty = dyn_cast<StructType>(type->getPointerElementType())) {
1415 StructTypesNeedingBlock.insert(sty);
1416 } else {
1417 errs() << *type << "\n";
1418 llvm_unreachable("Buffer arguments must map to structures!");
1419 }
1420 break;
1421 case clspv::ArgKind::Pod:
1422 if (auto *sty = dyn_cast<StructType>(type->getPointerElementType())) {
1423 StructTypesNeedingBlock.insert(sty);
1424 } else {
1425 errs() << *type << "\n";
1426 llvm_unreachable("POD arguments must map to structures!");
1427 }
1428 break;
1429 case clspv::ArgKind::ReadOnlyImage:
1430 case clspv::ArgKind::WriteOnlyImage:
1431 case clspv::ArgKind::Sampler:
1432 // Sampler and image types map to the pointee type but
1433 // in the uniform constant address space.
1434 type = PointerType::get(type->getPointerElementType(),
1435 clspv::AddressSpace::UniformConstant);
1436 break;
1437 default:
1438 break;
1439 }
1440
1441 // The converted type is the type of the OpVariable we will generate.
1442 // If the pointee type is an array of size zero, FindType will convert it
1443 // to a runtime array.
1444 FindType(type);
1445 }
1446
alan-bakerdcd97412019-09-16 15:32:30 -04001447 // If module constants are clustered in a storage buffer then that struct
1448 // needs layout decorations.
1449 if (clspv::Option::ModuleConstantsInStorageBuffer()) {
1450 for (GlobalVariable &GV : M.globals()) {
1451 PointerType *PTy = cast<PointerType>(GV.getType());
1452 const auto AS = PTy->getAddressSpace();
1453 const bool module_scope_constant_external_init =
1454 (AS == AddressSpace::Constant) && GV.hasInitializer();
1455 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
1456 if (module_scope_constant_external_init &&
1457 spv::BuiltInMax == BuiltinType) {
1458 StructTypesNeedingBlock.insert(
1459 cast<StructType>(PTy->getPointerElementType()));
1460 }
1461 }
1462 }
1463
David Neto862b7d82018-06-14 18:48:37 -04001464 // Traverse the arrays and structures underneath each Block, and
1465 // mark them as needing layout.
1466 std::vector<Type *> work_list(StructTypesNeedingBlock.begin(),
1467 StructTypesNeedingBlock.end());
1468 while (!work_list.empty()) {
1469 Type *type = work_list.back();
1470 work_list.pop_back();
1471 TypesNeedingLayout.insert(type);
1472 switch (type->getTypeID()) {
1473 case Type::ArrayTyID:
1474 work_list.push_back(type->getArrayElementType());
1475 if (!Hack_generate_runtime_array_stride_early) {
1476 // Remember this array type for deferred decoration.
1477 TypesNeedingArrayStride.insert(type);
1478 }
1479 break;
1480 case Type::StructTyID:
1481 for (auto *elem_ty : cast<StructType>(type)->elements()) {
1482 work_list.push_back(elem_ty);
1483 }
1484 default:
1485 // This type and its contained types don't get layout.
1486 break;
1487 }
1488 }
1489}
1490
Alan Baker202c8c72018-08-13 13:47:44 -04001491void SPIRVProducerPass::FindWorkgroupVars(Module &M) {
1492 // The SpecId assignment for pointer-to-local arguments is recorded in
1493 // module-level metadata. Translate that information into local argument
1494 // information.
1495 NamedMDNode *nmd = M.getNamedMetadata(clspv::LocalSpecIdMetadataName());
alan-bakerb6b09dc2018-11-08 16:59:28 -05001496 if (!nmd)
1497 return;
Alan Baker202c8c72018-08-13 13:47:44 -04001498 for (auto operand : nmd->operands()) {
1499 MDTuple *tuple = cast<MDTuple>(operand);
1500 ValueAsMetadata *fn_md = cast<ValueAsMetadata>(tuple->getOperand(0));
1501 Function *func = cast<Function>(fn_md->getValue());
alan-bakerb6b09dc2018-11-08 16:59:28 -05001502 ConstantAsMetadata *arg_index_md =
1503 cast<ConstantAsMetadata>(tuple->getOperand(1));
1504 int arg_index = static_cast<int>(
1505 cast<ConstantInt>(arg_index_md->getValue())->getSExtValue());
1506 Argument *arg = &*(func->arg_begin() + arg_index);
Alan Baker202c8c72018-08-13 13:47:44 -04001507
1508 ConstantAsMetadata *spec_id_md =
1509 cast<ConstantAsMetadata>(tuple->getOperand(2));
alan-bakerb6b09dc2018-11-08 16:59:28 -05001510 int spec_id = static_cast<int>(
1511 cast<ConstantInt>(spec_id_md->getValue())->getSExtValue());
Alan Baker202c8c72018-08-13 13:47:44 -04001512
1513 max_local_spec_id_ = std::max(max_local_spec_id_, spec_id + 1);
1514 LocalArgSpecIds[arg] = spec_id;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001515 if (LocalSpecIdInfoMap.count(spec_id))
1516 continue;
Alan Baker202c8c72018-08-13 13:47:44 -04001517
1518 // We haven't seen this SpecId yet, so generate the LocalArgInfo for it.
1519 LocalArgInfo info{nextID, arg->getType()->getPointerElementType(),
1520 nextID + 1, nextID + 2,
1521 nextID + 3, spec_id};
1522 LocalSpecIdInfoMap[spec_id] = info;
1523 nextID += 4;
1524
1525 // Ensure the types necessary for this argument get generated.
1526 Type *IdxTy = Type::getInt32Ty(M.getContext());
1527 FindConstant(ConstantInt::get(IdxTy, 0));
1528 FindType(IdxTy);
1529 FindType(arg->getType());
1530 }
1531}
1532
David Neto22f144c2017-06-12 14:26:21 -04001533void SPIRVProducerPass::FindType(Type *Ty) {
1534 TypeList &TyList = getTypeList();
1535
1536 if (0 != TyList.idFor(Ty)) {
1537 return;
1538 }
1539
1540 if (Ty->isPointerTy()) {
1541 auto AddrSpace = Ty->getPointerAddressSpace();
1542 if ((AddressSpace::Constant == AddrSpace) ||
1543 (AddressSpace::Global == AddrSpace)) {
1544 auto PointeeTy = Ty->getPointerElementType();
1545
1546 if (PointeeTy->isStructTy() &&
1547 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
1548 FindType(PointeeTy);
1549 auto ActualPointerTy =
1550 PointeeTy->getPointerTo(AddressSpace::UniformConstant);
1551 FindType(ActualPointerTy);
1552 return;
1553 }
1554 }
1555 }
1556
David Neto862b7d82018-06-14 18:48:37 -04001557 // By convention, LLVM array type with 0 elements will map to
1558 // OpTypeRuntimeArray. Otherwise, it will map to OpTypeArray, which
1559 // has a constant number of elements. We need to support type of the
1560 // constant.
1561 if (auto *arrayTy = dyn_cast<ArrayType>(Ty)) {
1562 if (arrayTy->getNumElements() > 0) {
1563 LLVMContext &Context = Ty->getContext();
1564 FindType(Type::getInt32Ty(Context));
1565 }
David Neto22f144c2017-06-12 14:26:21 -04001566 }
1567
1568 for (Type *SubTy : Ty->subtypes()) {
1569 FindType(SubTy);
1570 }
1571
1572 TyList.insert(Ty);
1573}
1574
1575void SPIRVProducerPass::FindConstantPerGlobalVar(GlobalVariable &GV) {
1576 // If the global variable has a (non undef) initializer.
1577 if (GV.hasInitializer() && !isa<UndefValue>(GV.getInitializer())) {
David Neto862b7d82018-06-14 18:48:37 -04001578 // Generate the constant if it's not the initializer to a module scope
1579 // constant that we will expect in a storage buffer.
1580 const bool module_scope_constant_external_init =
1581 (GV.getType()->getPointerAddressSpace() == AddressSpace::Constant) &&
1582 clspv::Option::ModuleConstantsInStorageBuffer();
1583 if (!module_scope_constant_external_init) {
1584 FindConstant(GV.getInitializer());
1585 }
David Neto22f144c2017-06-12 14:26:21 -04001586 }
1587}
1588
1589void SPIRVProducerPass::FindConstantPerFunc(Function &F) {
1590 // Investigate constants in function body.
1591 for (BasicBlock &BB : F) {
1592 for (Instruction &I : BB) {
David Neto862b7d82018-06-14 18:48:37 -04001593 if (auto *call = dyn_cast<CallInst>(&I)) {
1594 auto name = call->getCalledFunction()->getName();
Kévin Petitdf71de32019-04-09 14:09:50 +01001595 if (name == clspv::LiteralSamplerFunction()) {
David Neto862b7d82018-06-14 18:48:37 -04001596 // We've handled these constants elsewhere, so skip it.
1597 continue;
1598 }
Alan Baker202c8c72018-08-13 13:47:44 -04001599 if (name.startswith(clspv::ResourceAccessorFunction())) {
1600 continue;
1601 }
1602 if (name.startswith(clspv::WorkgroupAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04001603 continue;
1604 }
Kévin Petit617a76d2019-04-04 13:54:16 +01001605 if (name.startswith(clspv::SPIRVOpIntrinsicFunction())) {
1606 // Skip the first operand that has the SPIR-V Opcode
1607 for (unsigned i = 1; i < I.getNumOperands(); i++) {
1608 if (isa<Constant>(I.getOperand(i)) &&
1609 !isa<GlobalValue>(I.getOperand(i))) {
1610 FindConstant(I.getOperand(i));
1611 }
1612 }
1613 continue;
1614 }
David Neto22f144c2017-06-12 14:26:21 -04001615 }
1616
1617 if (isa<AllocaInst>(I)) {
1618 // Alloca instruction has constant for the number of element. Ignore it.
1619 continue;
1620 } else if (isa<ShuffleVectorInst>(I)) {
1621 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1622 // Ignore constant for mask of shuffle vector instruction.
1623 if (i == 2) {
1624 continue;
1625 }
1626
1627 if (isa<Constant>(I.getOperand(i)) &&
1628 !isa<GlobalValue>(I.getOperand(i))) {
1629 FindConstant(I.getOperand(i));
1630 }
1631 }
1632
1633 continue;
1634 } else if (isa<InsertElementInst>(I)) {
1635 // Handle InsertElement with <4 x i8> specially.
1636 Type *CompositeTy = I.getOperand(0)->getType();
1637 if (is4xi8vec(CompositeTy)) {
1638 LLVMContext &Context = CompositeTy->getContext();
1639 if (isa<Constant>(I.getOperand(0))) {
1640 FindConstant(I.getOperand(0));
1641 }
1642
1643 if (isa<Constant>(I.getOperand(1))) {
1644 FindConstant(I.getOperand(1));
1645 }
1646
1647 // Add mask constant 0xFF.
1648 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
1649 FindConstant(CstFF);
1650
1651 // Add shift amount constant.
1652 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
1653 uint64_t Idx = CI->getZExtValue();
1654 Constant *CstShiftAmount =
1655 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
1656 FindConstant(CstShiftAmount);
1657 }
1658
1659 continue;
1660 }
1661
1662 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1663 // Ignore constant for index of InsertElement instruction.
1664 if (i == 2) {
1665 continue;
1666 }
1667
1668 if (isa<Constant>(I.getOperand(i)) &&
1669 !isa<GlobalValue>(I.getOperand(i))) {
1670 FindConstant(I.getOperand(i));
1671 }
1672 }
1673
1674 continue;
1675 } else if (isa<ExtractElementInst>(I)) {
1676 // Handle ExtractElement with <4 x i8> specially.
1677 Type *CompositeTy = I.getOperand(0)->getType();
1678 if (is4xi8vec(CompositeTy)) {
1679 LLVMContext &Context = CompositeTy->getContext();
1680 if (isa<Constant>(I.getOperand(0))) {
1681 FindConstant(I.getOperand(0));
1682 }
1683
1684 // Add mask constant 0xFF.
1685 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
1686 FindConstant(CstFF);
1687
1688 // Add shift amount constant.
1689 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
1690 uint64_t Idx = CI->getZExtValue();
1691 Constant *CstShiftAmount =
1692 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
1693 FindConstant(CstShiftAmount);
1694 } else {
1695 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
1696 FindConstant(Cst8);
1697 }
1698
1699 continue;
1700 }
1701
1702 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1703 // Ignore constant for index of ExtractElement instruction.
1704 if (i == 1) {
1705 continue;
1706 }
1707
1708 if (isa<Constant>(I.getOperand(i)) &&
1709 !isa<GlobalValue>(I.getOperand(i))) {
1710 FindConstant(I.getOperand(i));
1711 }
1712 }
1713
1714 continue;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001715 } else if ((Instruction::Xor == I.getOpcode()) &&
1716 I.getType()->isIntegerTy(1)) {
1717 // We special case for Xor where the type is i1 and one of the arguments
1718 // is a constant 1 (true), this is an OpLogicalNot in SPIR-V, and we
1719 // don't need the constant
David Neto22f144c2017-06-12 14:26:21 -04001720 bool foundConstantTrue = false;
1721 for (Use &Op : I.operands()) {
1722 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1723 auto CI = cast<ConstantInt>(Op);
1724
1725 if (CI->isZero() || foundConstantTrue) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05001726 // If we already found the true constant, we might (probably only
1727 // on -O0) have an OpLogicalNot which is taking a constant
1728 // argument, so discover it anyway.
David Neto22f144c2017-06-12 14:26:21 -04001729 FindConstant(Op);
1730 } else {
1731 foundConstantTrue = true;
1732 }
1733 }
1734 }
1735
1736 continue;
David Netod2de94a2017-08-28 17:27:47 -04001737 } else if (isa<TruncInst>(I)) {
alan-bakerb39c8262019-03-08 14:03:37 -05001738 // Special case if i8 is not generally handled.
1739 if (!clspv::Option::Int8Support()) {
1740 // For truncation to i8 we mask against 255.
1741 Type *ToTy = I.getType();
1742 if (8u == ToTy->getPrimitiveSizeInBits()) {
1743 LLVMContext &Context = ToTy->getContext();
1744 Constant *Cst255 =
1745 ConstantInt::get(Type::getInt32Ty(Context), 0xff);
1746 FindConstant(Cst255);
1747 }
David Netod2de94a2017-08-28 17:27:47 -04001748 }
Neil Henning39672102017-09-29 14:33:13 +01001749 } else if (isa<AtomicRMWInst>(I)) {
1750 LLVMContext &Context = I.getContext();
1751
1752 FindConstant(
1753 ConstantInt::get(Type::getInt32Ty(Context), spv::ScopeDevice));
1754 FindConstant(ConstantInt::get(
1755 Type::getInt32Ty(Context),
1756 spv::MemorySemanticsUniformMemoryMask |
1757 spv::MemorySemanticsSequentiallyConsistentMask));
David Neto22f144c2017-06-12 14:26:21 -04001758 }
1759
1760 for (Use &Op : I.operands()) {
1761 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1762 FindConstant(Op);
1763 }
1764 }
1765 }
1766 }
1767}
1768
1769void SPIRVProducerPass::FindConstant(Value *V) {
David Neto22f144c2017-06-12 14:26:21 -04001770 ValueList &CstList = getConstantList();
1771
David Netofb9a7972017-08-25 17:08:24 -04001772 // If V is already tracked, ignore it.
1773 if (0 != CstList.idFor(V)) {
David Neto22f144c2017-06-12 14:26:21 -04001774 return;
1775 }
1776
David Neto862b7d82018-06-14 18:48:37 -04001777 if (isa<GlobalValue>(V) && clspv::Option::ModuleConstantsInStorageBuffer()) {
1778 return;
1779 }
1780
David Neto22f144c2017-06-12 14:26:21 -04001781 Constant *Cst = cast<Constant>(V);
David Neto862b7d82018-06-14 18:48:37 -04001782 Type *CstTy = Cst->getType();
David Neto22f144c2017-06-12 14:26:21 -04001783
1784 // Handle constant with <4 x i8> type specially.
David Neto22f144c2017-06-12 14:26:21 -04001785 if (is4xi8vec(CstTy)) {
1786 if (!isa<GlobalValue>(V)) {
David Netofb9a7972017-08-25 17:08:24 -04001787 CstList.insert(V);
David Neto22f144c2017-06-12 14:26:21 -04001788 }
1789 }
1790
1791 if (Cst->getNumOperands()) {
1792 for (User::const_op_iterator I = Cst->op_begin(), E = Cst->op_end(); I != E;
1793 ++I) {
1794 FindConstant(*I);
1795 }
1796
David Netofb9a7972017-08-25 17:08:24 -04001797 CstList.insert(Cst);
David Neto22f144c2017-06-12 14:26:21 -04001798 return;
1799 } else if (const ConstantDataSequential *CDS =
1800 dyn_cast<ConstantDataSequential>(Cst)) {
1801 // Add constants for each element to constant list.
1802 for (unsigned i = 0; i < CDS->getNumElements(); i++) {
1803 Constant *EleCst = CDS->getElementAsConstant(i);
1804 FindConstant(EleCst);
1805 }
1806 }
1807
1808 if (!isa<GlobalValue>(V)) {
David Netofb9a7972017-08-25 17:08:24 -04001809 CstList.insert(V);
David Neto22f144c2017-06-12 14:26:21 -04001810 }
1811}
1812
1813spv::StorageClass SPIRVProducerPass::GetStorageClass(unsigned AddrSpace) const {
1814 switch (AddrSpace) {
1815 default:
1816 llvm_unreachable("Unsupported OpenCL address space");
1817 case AddressSpace::Private:
1818 return spv::StorageClassFunction;
1819 case AddressSpace::Global:
David Neto22f144c2017-06-12 14:26:21 -04001820 return spv::StorageClassStorageBuffer;
Alan Bakerfcda9482018-10-02 17:09:59 -04001821 case AddressSpace::Constant:
1822 return clspv::Option::ConstantArgsInUniformBuffer()
1823 ? spv::StorageClassUniform
1824 : spv::StorageClassStorageBuffer;
David Neto22f144c2017-06-12 14:26:21 -04001825 case AddressSpace::Input:
1826 return spv::StorageClassInput;
1827 case AddressSpace::Local:
1828 return spv::StorageClassWorkgroup;
1829 case AddressSpace::UniformConstant:
1830 return spv::StorageClassUniformConstant;
David Neto9ed8e2f2018-03-24 06:47:24 -07001831 case AddressSpace::Uniform:
David Netoe439d702018-03-23 13:14:08 -07001832 return spv::StorageClassUniform;
David Neto22f144c2017-06-12 14:26:21 -04001833 case AddressSpace::ModuleScopePrivate:
1834 return spv::StorageClassPrivate;
1835 }
1836}
1837
David Neto862b7d82018-06-14 18:48:37 -04001838spv::StorageClass
1839SPIRVProducerPass::GetStorageClassForArgKind(clspv::ArgKind arg_kind) const {
1840 switch (arg_kind) {
1841 case clspv::ArgKind::Buffer:
1842 return spv::StorageClassStorageBuffer;
Alan Bakerfcda9482018-10-02 17:09:59 -04001843 case clspv::ArgKind::BufferUBO:
1844 return spv::StorageClassUniform;
David Neto862b7d82018-06-14 18:48:37 -04001845 case clspv::ArgKind::Pod:
1846 return clspv::Option::PodArgsInUniformBuffer()
1847 ? spv::StorageClassUniform
1848 : spv::StorageClassStorageBuffer;
1849 case clspv::ArgKind::Local:
1850 return spv::StorageClassWorkgroup;
1851 case clspv::ArgKind::ReadOnlyImage:
1852 case clspv::ArgKind::WriteOnlyImage:
1853 case clspv::ArgKind::Sampler:
1854 return spv::StorageClassUniformConstant;
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001855 default:
1856 llvm_unreachable("Unsupported storage class for argument kind");
David Neto862b7d82018-06-14 18:48:37 -04001857 }
1858}
1859
David Neto22f144c2017-06-12 14:26:21 -04001860spv::BuiltIn SPIRVProducerPass::GetBuiltin(StringRef Name) const {
1861 return StringSwitch<spv::BuiltIn>(Name)
1862 .Case("__spirv_GlobalInvocationId", spv::BuiltInGlobalInvocationId)
1863 .Case("__spirv_LocalInvocationId", spv::BuiltInLocalInvocationId)
1864 .Case("__spirv_WorkgroupSize", spv::BuiltInWorkgroupSize)
1865 .Case("__spirv_NumWorkgroups", spv::BuiltInNumWorkgroups)
1866 .Case("__spirv_WorkgroupId", spv::BuiltInWorkgroupId)
1867 .Default(spv::BuiltInMax);
1868}
1869
1870void SPIRVProducerPass::GenerateExtInstImport() {
1871 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
1872 uint32_t &ExtInstImportID = getOpExtInstImportID();
1873
1874 //
1875 // Generate OpExtInstImport.
1876 //
1877 // Ops[0] ... Ops[n] = Name (Literal String)
David Neto22f144c2017-06-12 14:26:21 -04001878 ExtInstImportID = nextID;
David Neto87846742018-04-11 17:36:22 -04001879 SPIRVInstList.push_back(new SPIRVInstruction(spv::OpExtInstImport, nextID++,
1880 MkString("GLSL.std.450")));
David Neto22f144c2017-06-12 14:26:21 -04001881}
1882
alan-bakerb6b09dc2018-11-08 16:59:28 -05001883void SPIRVProducerPass::GenerateSPIRVTypes(LLVMContext &Context,
1884 Module &module) {
David Neto22f144c2017-06-12 14:26:21 -04001885 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
1886 ValueMapType &VMap = getValueMap();
1887 ValueMapType &AllocatedVMap = getAllocatedValueMap();
Alan Bakerfcda9482018-10-02 17:09:59 -04001888 const auto &DL = module.getDataLayout();
David Neto22f144c2017-06-12 14:26:21 -04001889
1890 // Map for OpTypeRuntimeArray. If argument has pointer type, 2 spirv type
1891 // instructions are generated. They are OpTypePointer and OpTypeRuntimeArray.
1892 DenseMap<Type *, uint32_t> OpRuntimeTyMap;
1893
1894 for (Type *Ty : getTypeList()) {
1895 // Update TypeMap with nextID for reference later.
1896 TypeMap[Ty] = nextID;
1897
1898 switch (Ty->getTypeID()) {
1899 default: {
1900 Ty->print(errs());
1901 llvm_unreachable("Unsupported type???");
1902 break;
1903 }
1904 case Type::MetadataTyID:
1905 case Type::LabelTyID: {
1906 // Ignore these types.
1907 break;
1908 }
1909 case Type::PointerTyID: {
1910 PointerType *PTy = cast<PointerType>(Ty);
1911 unsigned AddrSpace = PTy->getAddressSpace();
1912
1913 // For the purposes of our Vulkan SPIR-V type system, constant and global
1914 // are conflated.
1915 bool UseExistingOpTypePointer = false;
1916 if (AddressSpace::Constant == AddrSpace) {
Alan Bakerfcda9482018-10-02 17:09:59 -04001917 if (!clspv::Option::ConstantArgsInUniformBuffer()) {
1918 AddrSpace = AddressSpace::Global;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001919 // Check to see if we already created this type (for instance, if we
1920 // had a constant <type>* and a global <type>*, the type would be
1921 // created by one of these types, and shared by both).
Alan Bakerfcda9482018-10-02 17:09:59 -04001922 auto GlobalTy = PTy->getPointerElementType()->getPointerTo(AddrSpace);
1923 if (0 < TypeMap.count(GlobalTy)) {
1924 TypeMap[PTy] = TypeMap[GlobalTy];
1925 UseExistingOpTypePointer = true;
1926 break;
1927 }
David Neto22f144c2017-06-12 14:26:21 -04001928 }
1929 } else if (AddressSpace::Global == AddrSpace) {
Alan Bakerfcda9482018-10-02 17:09:59 -04001930 if (!clspv::Option::ConstantArgsInUniformBuffer()) {
1931 AddrSpace = AddressSpace::Constant;
David Neto22f144c2017-06-12 14:26:21 -04001932
alan-bakerb6b09dc2018-11-08 16:59:28 -05001933 // Check to see if we already created this type (for instance, if we
1934 // had a constant <type>* and a global <type>*, the type would be
1935 // created by one of these types, and shared by both).
1936 auto ConstantTy =
1937 PTy->getPointerElementType()->getPointerTo(AddrSpace);
Alan Bakerfcda9482018-10-02 17:09:59 -04001938 if (0 < TypeMap.count(ConstantTy)) {
1939 TypeMap[PTy] = TypeMap[ConstantTy];
1940 UseExistingOpTypePointer = true;
1941 }
David Neto22f144c2017-06-12 14:26:21 -04001942 }
1943 }
1944
David Neto862b7d82018-06-14 18:48:37 -04001945 const bool HasArgUser = true;
David Neto22f144c2017-06-12 14:26:21 -04001946
David Neto862b7d82018-06-14 18:48:37 -04001947 if (HasArgUser && !UseExistingOpTypePointer) {
David Neto22f144c2017-06-12 14:26:21 -04001948 //
1949 // Generate OpTypePointer.
1950 //
1951
1952 // OpTypePointer
1953 // Ops[0] = Storage Class
1954 // Ops[1] = Element Type ID
1955 SPIRVOperandList Ops;
1956
David Neto257c3892018-04-11 13:19:45 -04001957 Ops << MkNum(GetStorageClass(AddrSpace))
1958 << MkId(lookupType(PTy->getElementType()));
David Neto22f144c2017-06-12 14:26:21 -04001959
David Neto87846742018-04-11 17:36:22 -04001960 auto *Inst = new SPIRVInstruction(spv::OpTypePointer, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04001961 SPIRVInstList.push_back(Inst);
1962 }
David Neto22f144c2017-06-12 14:26:21 -04001963 break;
1964 }
1965 case Type::StructTyID: {
David Neto22f144c2017-06-12 14:26:21 -04001966 StructType *STy = cast<StructType>(Ty);
1967
1968 // Handle sampler type.
1969 if (STy->isOpaque()) {
1970 if (STy->getName().equals("opencl.sampler_t")) {
1971 //
1972 // Generate OpTypeSampler
1973 //
1974 // Empty Ops.
1975 SPIRVOperandList Ops;
1976
David Neto87846742018-04-11 17:36:22 -04001977 auto *Inst = new SPIRVInstruction(spv::OpTypeSampler, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04001978 SPIRVInstList.push_back(Inst);
1979 break;
alan-bakerf906d2b2019-12-10 11:26:23 -05001980 } else if (STy->getName().startswith("opencl.image1d_ro_t") ||
1981 STy->getName().startswith("opencl.image1d_wo_t") ||
alan-baker7150a1d2020-02-25 08:31:06 -05001982 STy->getName().startswith("opencl.image1d_array_ro_t") ||
1983 STy->getName().startswith("opencl.image1d_array_wo_t") ||
alan-bakerf906d2b2019-12-10 11:26:23 -05001984 STy->getName().startswith("opencl.image2d_ro_t") ||
alan-bakerf67468c2019-11-25 15:51:49 -05001985 STy->getName().startswith("opencl.image2d_wo_t") ||
alan-baker7150a1d2020-02-25 08:31:06 -05001986 STy->getName().startswith("opencl.image2d_array_ro_t") ||
1987 STy->getName().startswith("opencl.image2d_array_wo_t") ||
alan-bakerf67468c2019-11-25 15:51:49 -05001988 STy->getName().startswith("opencl.image3d_ro_t") ||
1989 STy->getName().startswith("opencl.image3d_wo_t")) {
David Neto22f144c2017-06-12 14:26:21 -04001990 //
1991 // Generate OpTypeImage
1992 //
1993 // Ops[0] = Sampled Type ID
1994 // Ops[1] = Dim ID
1995 // Ops[2] = Depth (Literal Number)
1996 // Ops[3] = Arrayed (Literal Number)
1997 // Ops[4] = MS (Literal Number)
1998 // Ops[5] = Sampled (Literal Number)
1999 // Ops[6] = Image Format ID
2000 //
2001 SPIRVOperandList Ops;
2002
alan-bakerf67468c2019-11-25 15:51:49 -05002003 uint32_t ImageTyID = nextID++;
2004 uint32_t SampledTyID = 0;
2005 if (STy->getName().contains(".float")) {
2006 SampledTyID = lookupType(Type::getFloatTy(Context));
2007 } else if (STy->getName().contains(".uint")) {
2008 SampledTyID = lookupType(Type::getInt32Ty(Context));
2009 } else if (STy->getName().contains(".int")) {
2010 // Generate a signed 32-bit integer if necessary.
2011 if (int32ID == 0) {
2012 int32ID = nextID++;
2013 SPIRVOperandList intOps;
2014 intOps << MkNum(32);
2015 intOps << MkNum(1);
2016 auto signed_int =
2017 new SPIRVInstruction(spv::OpTypeInt, int32ID, intOps);
2018 SPIRVInstList.push_back(signed_int);
2019 }
2020 SampledTyID = int32ID;
2021
2022 // Generate a vec4 of the signed int if necessary.
2023 if (v4int32ID == 0) {
2024 v4int32ID = nextID++;
2025 SPIRVOperandList vecOps;
2026 vecOps << MkId(int32ID);
2027 vecOps << MkNum(4);
2028 auto int_vec =
2029 new SPIRVInstruction(spv::OpTypeVector, v4int32ID, vecOps);
2030 SPIRVInstList.push_back(int_vec);
2031 }
2032 } else {
2033 // This was likely an UndefValue.
2034 SampledTyID = lookupType(Type::getFloatTy(Context));
2035 }
David Neto257c3892018-04-11 13:19:45 -04002036 Ops << MkId(SampledTyID);
David Neto22f144c2017-06-12 14:26:21 -04002037
2038 spv::Dim DimID = spv::Dim2D;
alan-bakerf906d2b2019-12-10 11:26:23 -05002039 if (STy->getName().startswith("opencl.image1d_ro_t") ||
alan-baker7150a1d2020-02-25 08:31:06 -05002040 STy->getName().startswith("opencl.image1d_wo_t") ||
2041 STy->getName().startswith("opencl.image1d_array_ro_t") ||
2042 STy->getName().startswith("opencl.image1d_array_wo_t")) {
alan-bakerf906d2b2019-12-10 11:26:23 -05002043 DimID = spv::Dim1D;
2044 } else if (STy->getName().startswith("opencl.image3d_ro_t") ||
2045 STy->getName().startswith("opencl.image3d_wo_t")) {
David Neto22f144c2017-06-12 14:26:21 -04002046 DimID = spv::Dim3D;
2047 }
David Neto257c3892018-04-11 13:19:45 -04002048 Ops << MkNum(DimID);
David Neto22f144c2017-06-12 14:26:21 -04002049
2050 // TODO: Set up Depth.
David Neto257c3892018-04-11 13:19:45 -04002051 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04002052
alan-baker7150a1d2020-02-25 08:31:06 -05002053 uint32_t arrayed = STy->getName().contains("_array_") ? 1 : 0;
2054 Ops << MkNum(arrayed);
David Neto22f144c2017-06-12 14:26:21 -04002055
2056 // TODO: Set up MS.
David Neto257c3892018-04-11 13:19:45 -04002057 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04002058
alan-baker7150a1d2020-02-25 08:31:06 -05002059 // Set up Sampled.
David Neto22f144c2017-06-12 14:26:21 -04002060 //
2061 // From Spec
2062 //
2063 // 0 indicates this is only known at run time, not at compile time
2064 // 1 indicates will be used with sampler
2065 // 2 indicates will be used without a sampler (a storage image)
2066 uint32_t Sampled = 1;
alan-bakerf67468c2019-11-25 15:51:49 -05002067 if (!STy->getName().contains(".sampled")) {
David Neto22f144c2017-06-12 14:26:21 -04002068 Sampled = 2;
2069 }
David Neto257c3892018-04-11 13:19:45 -04002070 Ops << MkNum(Sampled);
David Neto22f144c2017-06-12 14:26:21 -04002071
2072 // TODO: Set up Image Format.
David Neto257c3892018-04-11 13:19:45 -04002073 Ops << MkNum(spv::ImageFormatUnknown);
David Neto22f144c2017-06-12 14:26:21 -04002074
alan-bakerf67468c2019-11-25 15:51:49 -05002075 auto *Inst = new SPIRVInstruction(spv::OpTypeImage, ImageTyID, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002076 SPIRVInstList.push_back(Inst);
2077 break;
2078 }
2079 }
2080
2081 //
2082 // Generate OpTypeStruct
2083 //
2084 // Ops[0] ... Ops[n] = Member IDs
2085 SPIRVOperandList Ops;
2086
2087 for (auto *EleTy : STy->elements()) {
David Neto862b7d82018-06-14 18:48:37 -04002088 Ops << MkId(lookupType(EleTy));
David Neto22f144c2017-06-12 14:26:21 -04002089 }
2090
David Neto22f144c2017-06-12 14:26:21 -04002091 uint32_t STyID = nextID;
2092
alan-bakerb6b09dc2018-11-08 16:59:28 -05002093 auto *Inst = new SPIRVInstruction(spv::OpTypeStruct, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002094 SPIRVInstList.push_back(Inst);
2095
2096 // Generate OpMemberDecorate.
2097 auto DecoInsertPoint =
2098 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
2099 [](SPIRVInstruction *Inst) -> bool {
2100 return Inst->getOpcode() != spv::OpDecorate &&
2101 Inst->getOpcode() != spv::OpMemberDecorate &&
2102 Inst->getOpcode() != spv::OpExtInstImport;
2103 });
2104
David Netoc463b372017-08-10 15:32:21 -04002105 const auto StructLayout = DL.getStructLayout(STy);
Alan Bakerfcda9482018-10-02 17:09:59 -04002106 // Search for the correct offsets if this type was remapped.
2107 std::vector<uint32_t> *offsets = nullptr;
2108 auto iter = RemappedUBOTypeOffsets.find(STy);
2109 if (iter != RemappedUBOTypeOffsets.end()) {
2110 offsets = &iter->second;
2111 }
David Netoc463b372017-08-10 15:32:21 -04002112
David Neto862b7d82018-06-14 18:48:37 -04002113 // #error TODO(dneto): Only do this if in TypesNeedingLayout.
David Neto22f144c2017-06-12 14:26:21 -04002114 for (unsigned MemberIdx = 0; MemberIdx < STy->getNumElements();
2115 MemberIdx++) {
2116 // Ops[0] = Structure Type ID
2117 // Ops[1] = Member Index(Literal Number)
2118 // Ops[2] = Decoration (Offset)
2119 // Ops[3] = Byte Offset (Literal Number)
2120 Ops.clear();
2121
David Neto257c3892018-04-11 13:19:45 -04002122 Ops << MkId(STyID) << MkNum(MemberIdx) << MkNum(spv::DecorationOffset);
David Neto22f144c2017-06-12 14:26:21 -04002123
alan-bakerb6b09dc2018-11-08 16:59:28 -05002124 auto ByteOffset =
2125 static_cast<uint32_t>(StructLayout->getElementOffset(MemberIdx));
Alan Bakerfcda9482018-10-02 17:09:59 -04002126 if (offsets) {
2127 ByteOffset = (*offsets)[MemberIdx];
2128 }
alan-bakerb6b09dc2018-11-08 16:59:28 -05002129 // const auto ByteOffset =
Alan Bakerfcda9482018-10-02 17:09:59 -04002130 // uint32_t(StructLayout->getElementOffset(MemberIdx));
David Neto257c3892018-04-11 13:19:45 -04002131 Ops << MkNum(ByteOffset);
David Neto22f144c2017-06-12 14:26:21 -04002132
David Neto87846742018-04-11 17:36:22 -04002133 auto *DecoInst = new SPIRVInstruction(spv::OpMemberDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002134 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
David Neto22f144c2017-06-12 14:26:21 -04002135 }
2136
2137 // Generate OpDecorate.
David Neto862b7d82018-06-14 18:48:37 -04002138 if (StructTypesNeedingBlock.idFor(STy)) {
2139 Ops.clear();
2140 // Use Block decorations with StorageBuffer storage class.
2141 Ops << MkId(STyID) << MkNum(spv::DecorationBlock);
David Neto22f144c2017-06-12 14:26:21 -04002142
David Neto862b7d82018-06-14 18:48:37 -04002143 auto *DecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
2144 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
David Neto22f144c2017-06-12 14:26:21 -04002145 }
2146 break;
2147 }
2148 case Type::IntegerTyID: {
alan-baker0e64a592019-11-18 13:36:25 -05002149 uint32_t BitWidth = static_cast<uint32_t>(Ty->getPrimitiveSizeInBits());
David Neto22f144c2017-06-12 14:26:21 -04002150
2151 if (BitWidth == 1) {
David Netoef5ba2b2019-12-20 08:35:54 -05002152 auto *Inst = new SPIRVInstruction(spv::OpTypeBool, nextID++);
David Neto22f144c2017-06-12 14:26:21 -04002153 SPIRVInstList.push_back(Inst);
2154 } else {
alan-bakerb39c8262019-03-08 14:03:37 -05002155 if (!clspv::Option::Int8Support()) {
2156 // i8 is added to TypeMap as i32.
2157 // No matter what LLVM type is requested first, always alias the
2158 // second one's SPIR-V type to be the same as the one we generated
2159 // first.
2160 unsigned aliasToWidth = 0;
2161 if (BitWidth == 8) {
2162 aliasToWidth = 32;
2163 BitWidth = 32;
2164 } else if (BitWidth == 32) {
2165 aliasToWidth = 8;
2166 }
2167 if (aliasToWidth) {
2168 Type *otherType = Type::getIntNTy(Ty->getContext(), aliasToWidth);
2169 auto where = TypeMap.find(otherType);
2170 if (where == TypeMap.end()) {
2171 // Go ahead and make it, but also map the other type to it.
2172 TypeMap[otherType] = nextID;
2173 } else {
2174 // Alias this SPIR-V type the existing type.
2175 TypeMap[Ty] = where->second;
2176 break;
2177 }
David Neto391aeb12017-08-26 15:51:58 -04002178 }
David Neto22f144c2017-06-12 14:26:21 -04002179 }
2180
David Neto257c3892018-04-11 13:19:45 -04002181 SPIRVOperandList Ops;
2182 Ops << MkNum(BitWidth) << MkNum(0 /* not signed */);
David Neto22f144c2017-06-12 14:26:21 -04002183
2184 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002185 new SPIRVInstruction(spv::OpTypeInt, nextID++, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002186 }
2187 break;
2188 }
2189 case Type::HalfTyID:
2190 case Type::FloatTyID:
2191 case Type::DoubleTyID: {
alan-baker0e64a592019-11-18 13:36:25 -05002192 uint32_t BitWidth = static_cast<uint32_t>(Ty->getPrimitiveSizeInBits());
James Price11010dc2019-12-19 13:53:09 -05002193 auto WidthOp = MkNum(BitWidth);
David Neto22f144c2017-06-12 14:26:21 -04002194
2195 SPIRVInstList.push_back(
David Netoef5ba2b2019-12-20 08:35:54 -05002196 new SPIRVInstruction(spv::OpTypeFloat, nextID++, std::move(WidthOp)));
David Neto22f144c2017-06-12 14:26:21 -04002197 break;
2198 }
2199 case Type::ArrayTyID: {
David Neto22f144c2017-06-12 14:26:21 -04002200 ArrayType *ArrTy = cast<ArrayType>(Ty);
David Neto862b7d82018-06-14 18:48:37 -04002201 const uint64_t Length = ArrTy->getArrayNumElements();
2202 if (Length == 0) {
2203 // By convention, map it to a RuntimeArray.
David Neto22f144c2017-06-12 14:26:21 -04002204
David Neto862b7d82018-06-14 18:48:37 -04002205 // Only generate the type once.
2206 // TODO(dneto): Can it ever be generated more than once?
2207 // Doesn't LLVM type uniqueness guarantee we'll only see this
2208 // once?
2209 Type *EleTy = ArrTy->getArrayElementType();
2210 if (OpRuntimeTyMap.count(EleTy) == 0) {
2211 uint32_t OpTypeRuntimeArrayID = nextID;
2212 OpRuntimeTyMap[Ty] = nextID;
David Neto22f144c2017-06-12 14:26:21 -04002213
David Neto862b7d82018-06-14 18:48:37 -04002214 //
2215 // Generate OpTypeRuntimeArray.
2216 //
David Neto22f144c2017-06-12 14:26:21 -04002217
David Neto862b7d82018-06-14 18:48:37 -04002218 // OpTypeRuntimeArray
2219 // Ops[0] = Element Type ID
2220 SPIRVOperandList Ops;
2221 Ops << MkId(lookupType(EleTy));
David Neto22f144c2017-06-12 14:26:21 -04002222
David Neto862b7d82018-06-14 18:48:37 -04002223 SPIRVInstList.push_back(
2224 new SPIRVInstruction(spv::OpTypeRuntimeArray, nextID++, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002225
David Neto862b7d82018-06-14 18:48:37 -04002226 if (Hack_generate_runtime_array_stride_early) {
2227 // Generate OpDecorate.
2228 auto DecoInsertPoint = std::find_if(
2229 SPIRVInstList.begin(), SPIRVInstList.end(),
2230 [](SPIRVInstruction *Inst) -> bool {
2231 return Inst->getOpcode() != spv::OpDecorate &&
2232 Inst->getOpcode() != spv::OpMemberDecorate &&
2233 Inst->getOpcode() != spv::OpExtInstImport;
2234 });
David Neto22f144c2017-06-12 14:26:21 -04002235
David Neto862b7d82018-06-14 18:48:37 -04002236 // Ops[0] = Target ID
2237 // Ops[1] = Decoration (ArrayStride)
2238 // Ops[2] = Stride Number(Literal Number)
2239 Ops.clear();
David Neto85082642018-03-24 06:55:20 -07002240
David Neto862b7d82018-06-14 18:48:37 -04002241 Ops << MkId(OpTypeRuntimeArrayID)
2242 << MkNum(spv::DecorationArrayStride)
Alan Bakerfcda9482018-10-02 17:09:59 -04002243 << MkNum(static_cast<uint32_t>(GetTypeAllocSize(EleTy, DL)));
David Neto22f144c2017-06-12 14:26:21 -04002244
David Neto862b7d82018-06-14 18:48:37 -04002245 auto *DecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
2246 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
2247 }
2248 }
David Neto22f144c2017-06-12 14:26:21 -04002249
David Neto862b7d82018-06-14 18:48:37 -04002250 } else {
David Neto22f144c2017-06-12 14:26:21 -04002251
David Neto862b7d82018-06-14 18:48:37 -04002252 //
2253 // Generate OpConstant and OpTypeArray.
2254 //
2255
2256 //
2257 // Generate OpConstant for array length.
2258 //
2259 // Ops[0] = Result Type ID
2260 // Ops[1] .. Ops[n] = Values LiteralNumber
2261 SPIRVOperandList Ops;
2262
2263 Type *LengthTy = Type::getInt32Ty(Context);
2264 uint32_t ResTyID = lookupType(LengthTy);
2265 Ops << MkId(ResTyID);
2266
2267 assert(Length < UINT32_MAX);
2268 Ops << MkNum(static_cast<uint32_t>(Length));
2269
2270 // Add constant for length to constant list.
2271 Constant *CstLength = ConstantInt::get(LengthTy, Length);
2272 AllocatedVMap[CstLength] = nextID;
2273 VMap[CstLength] = nextID;
2274 uint32_t LengthID = nextID;
2275
2276 auto *CstInst = new SPIRVInstruction(spv::OpConstant, nextID++, Ops);
2277 SPIRVInstList.push_back(CstInst);
2278
2279 // Remember to generate ArrayStride later
2280 getTypesNeedingArrayStride().insert(Ty);
2281
2282 //
2283 // Generate OpTypeArray.
2284 //
2285 // Ops[0] = Element Type ID
2286 // Ops[1] = Array Length Constant ID
2287 Ops.clear();
2288
2289 uint32_t EleTyID = lookupType(ArrTy->getElementType());
2290 Ops << MkId(EleTyID) << MkId(LengthID);
2291
2292 // Update TypeMap with nextID.
2293 TypeMap[Ty] = nextID;
2294
2295 auto *ArrayInst = new SPIRVInstruction(spv::OpTypeArray, nextID++, Ops);
2296 SPIRVInstList.push_back(ArrayInst);
2297 }
David Neto22f144c2017-06-12 14:26:21 -04002298 break;
2299 }
2300 case Type::VectorTyID: {
alan-bakerb39c8262019-03-08 14:03:37 -05002301 // <4 x i8> is changed to i32 if i8 is not generally supported.
2302 if (!clspv::Option::Int8Support() &&
2303 Ty->getVectorElementType() == Type::getInt8Ty(Context)) {
David Neto22f144c2017-06-12 14:26:21 -04002304 if (Ty->getVectorNumElements() == 4) {
2305 TypeMap[Ty] = lookupType(Ty->getVectorElementType());
2306 break;
2307 } else {
2308 Ty->print(errs());
2309 llvm_unreachable("Support above i8 vector type");
2310 }
2311 }
2312
2313 // Ops[0] = Component Type ID
2314 // Ops[1] = Component Count (Literal Number)
David Neto257c3892018-04-11 13:19:45 -04002315 SPIRVOperandList Ops;
2316 Ops << MkId(lookupType(Ty->getVectorElementType()))
2317 << MkNum(Ty->getVectorNumElements());
David Neto22f144c2017-06-12 14:26:21 -04002318
alan-bakerb6b09dc2018-11-08 16:59:28 -05002319 SPIRVInstruction *inst =
2320 new SPIRVInstruction(spv::OpTypeVector, nextID++, Ops);
David Netoc6f3ab22018-04-06 18:02:31 -04002321 SPIRVInstList.push_back(inst);
David Neto22f144c2017-06-12 14:26:21 -04002322 break;
2323 }
2324 case Type::VoidTyID: {
David Netoef5ba2b2019-12-20 08:35:54 -05002325 auto *Inst = new SPIRVInstruction(spv::OpTypeVoid, nextID++);
David Neto22f144c2017-06-12 14:26:21 -04002326 SPIRVInstList.push_back(Inst);
2327 break;
2328 }
2329 case Type::FunctionTyID: {
2330 // Generate SPIRV instruction for function type.
2331 FunctionType *FTy = cast<FunctionType>(Ty);
2332
2333 // Ops[0] = Return Type ID
2334 // Ops[1] ... Ops[n] = Parameter Type IDs
2335 SPIRVOperandList Ops;
2336
2337 // Find SPIRV instruction for return type
David Netoc6f3ab22018-04-06 18:02:31 -04002338 Ops << MkId(lookupType(FTy->getReturnType()));
David Neto22f144c2017-06-12 14:26:21 -04002339
2340 // Find SPIRV instructions for parameter types
2341 for (unsigned k = 0; k < FTy->getNumParams(); k++) {
2342 // Find SPIRV instruction for parameter type.
2343 auto ParamTy = FTy->getParamType(k);
2344 if (ParamTy->isPointerTy()) {
2345 auto PointeeTy = ParamTy->getPointerElementType();
2346 if (PointeeTy->isStructTy() &&
2347 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
2348 ParamTy = PointeeTy;
2349 }
2350 }
2351
David Netoc6f3ab22018-04-06 18:02:31 -04002352 Ops << MkId(lookupType(ParamTy));
David Neto22f144c2017-06-12 14:26:21 -04002353 }
2354
David Neto87846742018-04-11 17:36:22 -04002355 auto *Inst = new SPIRVInstruction(spv::OpTypeFunction, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002356 SPIRVInstList.push_back(Inst);
2357 break;
2358 }
2359 }
2360 }
2361
2362 // Generate OpTypeSampledImage.
alan-bakerabd82722019-12-03 17:14:51 -05002363 for (auto &ImgTy : getImageTypeList()) {
David Neto22f144c2017-06-12 14:26:21 -04002364 //
2365 // Generate OpTypeSampledImage.
2366 //
2367 // Ops[0] = Image Type ID
2368 //
2369 SPIRVOperandList Ops;
2370
David Netoc6f3ab22018-04-06 18:02:31 -04002371 Ops << MkId(TypeMap[ImgTy]);
David Neto22f144c2017-06-12 14:26:21 -04002372
alan-bakerabd82722019-12-03 17:14:51 -05002373 // Update the image type map.
2374 getImageTypeMap()[ImgTy] = nextID;
David Neto22f144c2017-06-12 14:26:21 -04002375
David Neto87846742018-04-11 17:36:22 -04002376 auto *Inst = new SPIRVInstruction(spv::OpTypeSampledImage, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002377 SPIRVInstList.push_back(Inst);
2378 }
David Netoc6f3ab22018-04-06 18:02:31 -04002379
2380 // Generate types for pointer-to-local arguments.
Alan Baker202c8c72018-08-13 13:47:44 -04002381 for (auto spec_id = clspv::FirstLocalSpecId(); spec_id < max_local_spec_id_;
2382 ++spec_id) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05002383 LocalArgInfo &arg_info = LocalSpecIdInfoMap[spec_id];
David Netoc6f3ab22018-04-06 18:02:31 -04002384
2385 // Generate the spec constant.
2386 SPIRVOperandList Ops;
2387 Ops << MkId(lookupType(Type::getInt32Ty(Context))) << MkNum(1);
David Neto87846742018-04-11 17:36:22 -04002388 SPIRVInstList.push_back(
2389 new SPIRVInstruction(spv::OpSpecConstant, arg_info.array_size_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002390
2391 // Generate the array type.
2392 Ops.clear();
2393 // The element type must have been created.
2394 uint32_t elem_ty_id = lookupType(arg_info.elem_type);
2395 assert(elem_ty_id);
2396 Ops << MkId(elem_ty_id) << MkId(arg_info.array_size_id);
2397
2398 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002399 new SPIRVInstruction(spv::OpTypeArray, arg_info.array_type_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002400
2401 Ops.clear();
2402 Ops << MkNum(spv::StorageClassWorkgroup) << MkId(arg_info.array_type_id);
David Neto87846742018-04-11 17:36:22 -04002403 SPIRVInstList.push_back(new SPIRVInstruction(
2404 spv::OpTypePointer, arg_info.ptr_array_type_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002405 }
David Neto22f144c2017-06-12 14:26:21 -04002406}
2407
2408void SPIRVProducerPass::GenerateSPIRVConstants() {
2409 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
2410 ValueMapType &VMap = getValueMap();
2411 ValueMapType &AllocatedVMap = getAllocatedValueMap();
2412 ValueList &CstList = getConstantList();
David Neto482550a2018-03-24 05:21:07 -07002413 const bool hack_undef = clspv::Option::HackUndef();
David Neto22f144c2017-06-12 14:26:21 -04002414
2415 for (uint32_t i = 0; i < CstList.size(); i++) {
David Netofb9a7972017-08-25 17:08:24 -04002416 // UniqueVector ids are 1-based.
alan-bakerb6b09dc2018-11-08 16:59:28 -05002417 Constant *Cst = cast<Constant>(CstList[i + 1]);
David Neto22f144c2017-06-12 14:26:21 -04002418
2419 // OpTypeArray's constant was already generated.
David Netofb9a7972017-08-25 17:08:24 -04002420 if (AllocatedVMap.find_as(Cst) != AllocatedVMap.end()) {
David Neto22f144c2017-06-12 14:26:21 -04002421 continue;
2422 }
2423
David Netofb9a7972017-08-25 17:08:24 -04002424 // Set ValueMap with nextID for reference later.
David Neto22f144c2017-06-12 14:26:21 -04002425 VMap[Cst] = nextID;
2426
2427 //
2428 // Generate OpConstant.
2429 //
2430
2431 // Ops[0] = Result Type ID
2432 // Ops[1] .. Ops[n] = Values LiteralNumber
2433 SPIRVOperandList Ops;
2434
David Neto257c3892018-04-11 13:19:45 -04002435 Ops << MkId(lookupType(Cst->getType()));
David Neto22f144c2017-06-12 14:26:21 -04002436
2437 std::vector<uint32_t> LiteralNum;
David Neto22f144c2017-06-12 14:26:21 -04002438 spv::Op Opcode = spv::OpNop;
2439
2440 if (isa<UndefValue>(Cst)) {
2441 // Ops[0] = Result Type ID
David Netoc66b3352017-10-20 14:28:46 -04002442 Opcode = spv::OpUndef;
Alan Baker9bf93fb2018-08-28 16:59:26 -04002443 if (hack_undef && IsTypeNullable(Cst->getType())) {
2444 Opcode = spv::OpConstantNull;
David Netoc66b3352017-10-20 14:28:46 -04002445 }
David Neto22f144c2017-06-12 14:26:21 -04002446 } else if (const ConstantInt *CI = dyn_cast<ConstantInt>(Cst)) {
2447 unsigned BitWidth = CI->getBitWidth();
2448 if (BitWidth == 1) {
2449 // If the bitwidth of constant is 1, generate OpConstantTrue or
2450 // OpConstantFalse.
2451 if (CI->getZExtValue()) {
2452 // Ops[0] = Result Type ID
2453 Opcode = spv::OpConstantTrue;
2454 } else {
2455 // Ops[0] = Result Type ID
2456 Opcode = spv::OpConstantFalse;
2457 }
David Neto22f144c2017-06-12 14:26:21 -04002458 } else {
2459 auto V = CI->getZExtValue();
2460 LiteralNum.push_back(V & 0xFFFFFFFF);
2461
2462 if (BitWidth > 32) {
2463 LiteralNum.push_back(V >> 32);
2464 }
2465
2466 Opcode = spv::OpConstant;
David Neto22f144c2017-06-12 14:26:21 -04002467
David Neto257c3892018-04-11 13:19:45 -04002468 Ops << MkInteger(LiteralNum);
David Neto22f144c2017-06-12 14:26:21 -04002469 }
2470 } else if (const ConstantFP *CFP = dyn_cast<ConstantFP>(Cst)) {
2471 uint64_t FPVal = CFP->getValueAPF().bitcastToAPInt().getZExtValue();
2472 Type *CFPTy = CFP->getType();
2473 if (CFPTy->isFloatTy()) {
2474 LiteralNum.push_back(FPVal & 0xFFFFFFFF);
Kévin Petit02ee34e2019-04-04 19:03:22 +01002475 } else if (CFPTy->isDoubleTy()) {
2476 LiteralNum.push_back(FPVal & 0xFFFFFFFF);
2477 LiteralNum.push_back(FPVal >> 32);
alan-baker089bf932020-01-07 16:35:45 -05002478 } else if (CFPTy->isHalfTy()) {
2479 LiteralNum.push_back(FPVal & 0xFFFF);
David Neto22f144c2017-06-12 14:26:21 -04002480 } else {
2481 CFPTy->print(errs());
2482 llvm_unreachable("Implement this ConstantFP Type");
2483 }
2484
2485 Opcode = spv::OpConstant;
David Neto22f144c2017-06-12 14:26:21 -04002486
David Neto257c3892018-04-11 13:19:45 -04002487 Ops << MkFloat(LiteralNum);
David Neto22f144c2017-06-12 14:26:21 -04002488 } else if (isa<ConstantDataSequential>(Cst) &&
2489 cast<ConstantDataSequential>(Cst)->isString()) {
2490 Cst->print(errs());
2491 llvm_unreachable("Implement this Constant");
2492
2493 } else if (const ConstantDataSequential *CDS =
2494 dyn_cast<ConstantDataSequential>(Cst)) {
David Neto49351ac2017-08-26 17:32:20 -04002495 // Let's convert <4 x i8> constant to int constant specially.
2496 // This case occurs when all the values are specified as constant
2497 // ints.
2498 Type *CstTy = Cst->getType();
2499 if (is4xi8vec(CstTy)) {
2500 LLVMContext &Context = CstTy->getContext();
2501
2502 //
2503 // Generate OpConstant with OpTypeInt 32 0.
2504 //
Neil Henning39672102017-09-29 14:33:13 +01002505 uint32_t IntValue = 0;
2506 for (unsigned k = 0; k < 4; k++) {
2507 const uint64_t Val = CDS->getElementAsInteger(k);
David Neto49351ac2017-08-26 17:32:20 -04002508 IntValue = (IntValue << 8) | (Val & 0xffu);
2509 }
2510
2511 Type *i32 = Type::getInt32Ty(Context);
2512 Constant *CstInt = ConstantInt::get(i32, IntValue);
2513 // If this constant is already registered on VMap, use it.
2514 if (VMap.count(CstInt)) {
2515 uint32_t CstID = VMap[CstInt];
2516 VMap[Cst] = CstID;
2517 continue;
2518 }
2519
David Neto257c3892018-04-11 13:19:45 -04002520 Ops << MkNum(IntValue);
David Neto49351ac2017-08-26 17:32:20 -04002521
David Neto87846742018-04-11 17:36:22 -04002522 auto *CstInst = new SPIRVInstruction(spv::OpConstant, nextID++, Ops);
David Neto49351ac2017-08-26 17:32:20 -04002523 SPIRVInstList.push_back(CstInst);
2524
2525 continue;
2526 }
2527
2528 // A normal constant-data-sequential case.
David Neto22f144c2017-06-12 14:26:21 -04002529 for (unsigned k = 0; k < CDS->getNumElements(); k++) {
2530 Constant *EleCst = CDS->getElementAsConstant(k);
2531 uint32_t EleCstID = VMap[EleCst];
David Neto257c3892018-04-11 13:19:45 -04002532 Ops << MkId(EleCstID);
David Neto22f144c2017-06-12 14:26:21 -04002533 }
2534
2535 Opcode = spv::OpConstantComposite;
David Neto22f144c2017-06-12 14:26:21 -04002536 } else if (const ConstantAggregate *CA = dyn_cast<ConstantAggregate>(Cst)) {
2537 // Let's convert <4 x i8> constant to int constant specially.
David Neto49351ac2017-08-26 17:32:20 -04002538 // This case occurs when at least one of the values is an undef.
David Neto22f144c2017-06-12 14:26:21 -04002539 Type *CstTy = Cst->getType();
2540 if (is4xi8vec(CstTy)) {
2541 LLVMContext &Context = CstTy->getContext();
2542
2543 //
2544 // Generate OpConstant with OpTypeInt 32 0.
2545 //
Neil Henning39672102017-09-29 14:33:13 +01002546 uint32_t IntValue = 0;
David Neto22f144c2017-06-12 14:26:21 -04002547 for (User::const_op_iterator I = Cst->op_begin(), E = Cst->op_end();
2548 I != E; ++I) {
2549 uint64_t Val = 0;
alan-bakerb6b09dc2018-11-08 16:59:28 -05002550 const Value *CV = *I;
Neil Henning39672102017-09-29 14:33:13 +01002551 if (auto *CI2 = dyn_cast<ConstantInt>(CV)) {
2552 Val = CI2->getZExtValue();
David Neto22f144c2017-06-12 14:26:21 -04002553 }
David Neto49351ac2017-08-26 17:32:20 -04002554 IntValue = (IntValue << 8) | (Val & 0xffu);
David Neto22f144c2017-06-12 14:26:21 -04002555 }
2556
David Neto49351ac2017-08-26 17:32:20 -04002557 Type *i32 = Type::getInt32Ty(Context);
2558 Constant *CstInt = ConstantInt::get(i32, IntValue);
David Neto22f144c2017-06-12 14:26:21 -04002559 // If this constant is already registered on VMap, use it.
2560 if (VMap.count(CstInt)) {
2561 uint32_t CstID = VMap[CstInt];
2562 VMap[Cst] = CstID;
David Neto19a1bad2017-08-25 15:01:41 -04002563 continue;
David Neto22f144c2017-06-12 14:26:21 -04002564 }
2565
David Neto257c3892018-04-11 13:19:45 -04002566 Ops << MkNum(IntValue);
David Neto22f144c2017-06-12 14:26:21 -04002567
David Neto87846742018-04-11 17:36:22 -04002568 auto *CstInst = new SPIRVInstruction(spv::OpConstant, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002569 SPIRVInstList.push_back(CstInst);
2570
David Neto19a1bad2017-08-25 15:01:41 -04002571 continue;
David Neto22f144c2017-06-12 14:26:21 -04002572 }
2573
2574 // We use a constant composite in SPIR-V for our constant aggregate in
2575 // LLVM.
2576 Opcode = spv::OpConstantComposite;
David Neto22f144c2017-06-12 14:26:21 -04002577
2578 for (unsigned k = 0; k < CA->getNumOperands(); k++) {
2579 // Look up the ID of the element of this aggregate (which we will
2580 // previously have created a constant for).
2581 uint32_t ElementConstantID = VMap[CA->getAggregateElement(k)];
2582
2583 // And add an operand to the composite we are constructing
David Neto257c3892018-04-11 13:19:45 -04002584 Ops << MkId(ElementConstantID);
David Neto22f144c2017-06-12 14:26:21 -04002585 }
2586 } else if (Cst->isNullValue()) {
2587 Opcode = spv::OpConstantNull;
David Neto22f144c2017-06-12 14:26:21 -04002588 } else {
2589 Cst->print(errs());
2590 llvm_unreachable("Unsupported Constant???");
2591 }
2592
alan-baker5b86ed72019-02-15 08:26:50 -05002593 if (Opcode == spv::OpConstantNull && Cst->getType()->isPointerTy()) {
2594 // Null pointer requires variable pointers.
2595 setVariablePointersCapabilities(Cst->getType()->getPointerAddressSpace());
2596 }
2597
David Neto87846742018-04-11 17:36:22 -04002598 auto *CstInst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002599 SPIRVInstList.push_back(CstInst);
2600 }
2601}
2602
2603void SPIRVProducerPass::GenerateSamplers(Module &M) {
2604 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
David Neto22f144c2017-06-12 14:26:21 -04002605
alan-bakerb6b09dc2018-11-08 16:59:28 -05002606 auto &sampler_map = getSamplerMap();
alan-baker09cb9802019-12-10 13:16:27 -05002607 SamplerLiteralToIDMap.clear();
David Neto862b7d82018-06-14 18:48:37 -04002608 DenseMap<unsigned, unsigned> SamplerLiteralToDescriptorSetMap;
2609 DenseMap<unsigned, unsigned> SamplerLiteralToBindingMap;
David Neto22f144c2017-06-12 14:26:21 -04002610
David Neto862b7d82018-06-14 18:48:37 -04002611 // We might have samplers in the sampler map that are not used
2612 // in the translation unit. We need to allocate variables
2613 // for them and bindings too.
2614 DenseSet<unsigned> used_bindings;
David Neto22f144c2017-06-12 14:26:21 -04002615
Kévin Petitdf71de32019-04-09 14:09:50 +01002616 auto *var_fn = M.getFunction(clspv::LiteralSamplerFunction());
alan-baker09cb9802019-12-10 13:16:27 -05002617 // Return if there are no literal samplers.
alan-bakerb6b09dc2018-11-08 16:59:28 -05002618 if (!var_fn)
2619 return;
alan-baker09cb9802019-12-10 13:16:27 -05002620
David Neto862b7d82018-06-14 18:48:37 -04002621 for (auto user : var_fn->users()) {
2622 // Populate SamplerLiteralToDescriptorSetMap and
2623 // SamplerLiteralToBindingMap.
2624 //
2625 // Look for calls like
2626 // call %opencl.sampler_t addrspace(2)*
2627 // @clspv.sampler.var.literal(
2628 // i32 descriptor,
2629 // i32 binding,
alan-baker09cb9802019-12-10 13:16:27 -05002630 // i32 (index-into-sampler-map|sampler_mask))
alan-bakerb6b09dc2018-11-08 16:59:28 -05002631 if (auto *call = dyn_cast<CallInst>(user)) {
alan-baker09cb9802019-12-10 13:16:27 -05002632 const auto third_param = static_cast<unsigned>(
alan-bakerb6b09dc2018-11-08 16:59:28 -05002633 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
alan-baker09cb9802019-12-10 13:16:27 -05002634 auto sampler_value = third_param;
2635 if (clspv::Option::UseSamplerMap()) {
2636 if (third_param >= sampler_map.size()) {
2637 errs() << "Out of bounds index to sampler map: " << third_param;
2638 llvm_unreachable("bad sampler init: out of bounds");
2639 }
2640 sampler_value = sampler_map[third_param].first;
David Neto862b7d82018-06-14 18:48:37 -04002641 }
2642
David Neto862b7d82018-06-14 18:48:37 -04002643 const auto descriptor_set = static_cast<unsigned>(
2644 dyn_cast<ConstantInt>(call->getArgOperand(0))->getZExtValue());
2645 const auto binding = static_cast<unsigned>(
2646 dyn_cast<ConstantInt>(call->getArgOperand(1))->getZExtValue());
2647
2648 SamplerLiteralToDescriptorSetMap[sampler_value] = descriptor_set;
2649 SamplerLiteralToBindingMap[sampler_value] = binding;
2650 used_bindings.insert(binding);
2651 }
2652 }
2653
alan-baker09cb9802019-12-10 13:16:27 -05002654 DenseSet<size_t> seen;
2655 for (auto user : var_fn->users()) {
2656 if (!isa<CallInst>(user))
2657 continue;
2658
2659 auto call = cast<CallInst>(user);
2660 const unsigned third_param = static_cast<unsigned>(
2661 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
2662
2663 // Already allocated a variable for this value.
2664 if (!seen.insert(third_param).second)
2665 continue;
2666
2667 auto sampler_value = third_param;
2668 if (clspv::Option::UseSamplerMap()) {
2669 sampler_value = sampler_map[third_param].first;
2670 }
2671
David Neto22f144c2017-06-12 14:26:21 -04002672 // Generate OpVariable.
2673 //
2674 // GIDOps[0] : Result Type ID
2675 // GIDOps[1] : Storage Class
2676 SPIRVOperandList Ops;
2677
David Neto257c3892018-04-11 13:19:45 -04002678 Ops << MkId(lookupType(SamplerTy))
2679 << MkNum(spv::StorageClassUniformConstant);
David Neto22f144c2017-06-12 14:26:21 -04002680
David Neto862b7d82018-06-14 18:48:37 -04002681 auto sampler_var_id = nextID++;
2682 auto *Inst = new SPIRVInstruction(spv::OpVariable, sampler_var_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002683 SPIRVInstList.push_back(Inst);
2684
alan-baker09cb9802019-12-10 13:16:27 -05002685 SamplerLiteralToIDMap[sampler_value] = sampler_var_id;
David Neto22f144c2017-06-12 14:26:21 -04002686
2687 // Find Insert Point for OpDecorate.
2688 auto DecoInsertPoint =
2689 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
2690 [](SPIRVInstruction *Inst) -> bool {
2691 return Inst->getOpcode() != spv::OpDecorate &&
2692 Inst->getOpcode() != spv::OpMemberDecorate &&
2693 Inst->getOpcode() != spv::OpExtInstImport;
2694 });
2695
2696 // Ops[0] = Target ID
2697 // Ops[1] = Decoration (DescriptorSet)
2698 // Ops[2] = LiteralNumber according to Decoration
2699 Ops.clear();
2700
David Neto862b7d82018-06-14 18:48:37 -04002701 unsigned descriptor_set;
2702 unsigned binding;
alan-baker09cb9802019-12-10 13:16:27 -05002703 if (SamplerLiteralToBindingMap.find(sampler_value) ==
alan-bakerb6b09dc2018-11-08 16:59:28 -05002704 SamplerLiteralToBindingMap.end()) {
David Neto862b7d82018-06-14 18:48:37 -04002705 // This sampler is not actually used. Find the next one.
2706 for (binding = 0; used_bindings.count(binding); binding++)
2707 ;
2708 descriptor_set = 0; // Literal samplers always use descriptor set 0.
2709 used_bindings.insert(binding);
2710 } else {
alan-baker09cb9802019-12-10 13:16:27 -05002711 descriptor_set = SamplerLiteralToDescriptorSetMap[sampler_value];
2712 binding = SamplerLiteralToBindingMap[sampler_value];
alan-bakercff80152019-06-15 00:38:00 -04002713
alan-baker09cb9802019-12-10 13:16:27 -05002714 version0::DescriptorMapEntry::SamplerData sampler_data = {sampler_value};
alan-bakercff80152019-06-15 00:38:00 -04002715 descriptorMapEntries->emplace_back(std::move(sampler_data),
2716 descriptor_set, binding);
David Neto862b7d82018-06-14 18:48:37 -04002717 }
2718
2719 Ops << MkId(sampler_var_id) << MkNum(spv::DecorationDescriptorSet)
2720 << MkNum(descriptor_set);
David Neto22f144c2017-06-12 14:26:21 -04002721
David Neto87846742018-04-11 17:36:22 -04002722 auto *DescDecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002723 SPIRVInstList.insert(DecoInsertPoint, DescDecoInst);
2724
2725 // Ops[0] = Target ID
2726 // Ops[1] = Decoration (Binding)
2727 // Ops[2] = LiteralNumber according to Decoration
2728 Ops.clear();
David Neto862b7d82018-06-14 18:48:37 -04002729 Ops << MkId(sampler_var_id) << MkNum(spv::DecorationBinding)
2730 << MkNum(binding);
David Neto22f144c2017-06-12 14:26:21 -04002731
David Neto87846742018-04-11 17:36:22 -04002732 auto *BindDecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002733 SPIRVInstList.insert(DecoInsertPoint, BindDecoInst);
2734 }
David Neto862b7d82018-06-14 18:48:37 -04002735}
David Neto22f144c2017-06-12 14:26:21 -04002736
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01002737void SPIRVProducerPass::GenerateResourceVars(Module &) {
David Neto862b7d82018-06-14 18:48:37 -04002738 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
2739 ValueMapType &VMap = getValueMap();
David Neto22f144c2017-06-12 14:26:21 -04002740
David Neto862b7d82018-06-14 18:48:37 -04002741 // Generate variables. Make one for each of resource var info object.
2742 for (auto *info : ModuleOrderedResourceVars) {
2743 Type *type = info->var_fn->getReturnType();
2744 // Remap the address space for opaque types.
2745 switch (info->arg_kind) {
2746 case clspv::ArgKind::Sampler:
2747 case clspv::ArgKind::ReadOnlyImage:
2748 case clspv::ArgKind::WriteOnlyImage:
2749 type = PointerType::get(type->getPointerElementType(),
2750 clspv::AddressSpace::UniformConstant);
2751 break;
2752 default:
2753 break;
2754 }
David Neto22f144c2017-06-12 14:26:21 -04002755
David Neto862b7d82018-06-14 18:48:37 -04002756 info->var_id = nextID++;
David Neto22f144c2017-06-12 14:26:21 -04002757
David Neto862b7d82018-06-14 18:48:37 -04002758 const auto type_id = lookupType(type);
2759 const auto sc = GetStorageClassForArgKind(info->arg_kind);
2760 SPIRVOperandList Ops;
2761 Ops << MkId(type_id) << MkNum(sc);
David Neto22f144c2017-06-12 14:26:21 -04002762
David Neto862b7d82018-06-14 18:48:37 -04002763 auto *Inst = new SPIRVInstruction(spv::OpVariable, info->var_id, Ops);
2764 SPIRVInstList.push_back(Inst);
2765
2766 // Map calls to the variable-builtin-function.
2767 for (auto &U : info->var_fn->uses()) {
2768 if (auto *call = dyn_cast<CallInst>(U.getUser())) {
2769 const auto set = unsigned(
2770 dyn_cast<ConstantInt>(call->getOperand(0))->getZExtValue());
2771 const auto binding = unsigned(
2772 dyn_cast<ConstantInt>(call->getOperand(1))->getZExtValue());
2773 if (set == info->descriptor_set && binding == info->binding) {
2774 switch (info->arg_kind) {
2775 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04002776 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04002777 case clspv::ArgKind::Pod:
2778 // The call maps to the variable directly.
2779 VMap[call] = info->var_id;
2780 break;
2781 case clspv::ArgKind::Sampler:
2782 case clspv::ArgKind::ReadOnlyImage:
2783 case clspv::ArgKind::WriteOnlyImage:
2784 // The call maps to a load we generate later.
2785 ResourceVarDeferredLoadCalls[call] = info->var_id;
2786 break;
2787 default:
2788 llvm_unreachable("Unhandled arg kind");
2789 }
2790 }
David Neto22f144c2017-06-12 14:26:21 -04002791 }
David Neto862b7d82018-06-14 18:48:37 -04002792 }
2793 }
David Neto22f144c2017-06-12 14:26:21 -04002794
David Neto862b7d82018-06-14 18:48:37 -04002795 // Generate associated decorations.
David Neto22f144c2017-06-12 14:26:21 -04002796
David Neto862b7d82018-06-14 18:48:37 -04002797 // Find Insert Point for OpDecorate.
2798 auto DecoInsertPoint =
2799 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
2800 [](SPIRVInstruction *Inst) -> bool {
2801 return Inst->getOpcode() != spv::OpDecorate &&
2802 Inst->getOpcode() != spv::OpMemberDecorate &&
2803 Inst->getOpcode() != spv::OpExtInstImport;
2804 });
2805
2806 SPIRVOperandList Ops;
2807 for (auto *info : ModuleOrderedResourceVars) {
2808 // Decorate with DescriptorSet and Binding.
2809 Ops.clear();
2810 Ops << MkId(info->var_id) << MkNum(spv::DecorationDescriptorSet)
2811 << MkNum(info->descriptor_set);
2812 SPIRVInstList.insert(DecoInsertPoint,
2813 new SPIRVInstruction(spv::OpDecorate, Ops));
2814
2815 Ops.clear();
2816 Ops << MkId(info->var_id) << MkNum(spv::DecorationBinding)
2817 << MkNum(info->binding);
2818 SPIRVInstList.insert(DecoInsertPoint,
2819 new SPIRVInstruction(spv::OpDecorate, Ops));
2820
alan-bakere9308012019-03-15 10:25:13 -04002821 if (info->coherent) {
2822 // Decorate with Coherent if required for the variable.
2823 Ops.clear();
2824 Ops << MkId(info->var_id) << MkNum(spv::DecorationCoherent);
2825 SPIRVInstList.insert(DecoInsertPoint,
2826 new SPIRVInstruction(spv::OpDecorate, Ops));
2827 }
2828
David Neto862b7d82018-06-14 18:48:37 -04002829 // Generate NonWritable and NonReadable
2830 switch (info->arg_kind) {
2831 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04002832 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04002833 if (info->var_fn->getReturnType()->getPointerAddressSpace() ==
2834 clspv::AddressSpace::Constant) {
2835 Ops.clear();
2836 Ops << MkId(info->var_id) << MkNum(spv::DecorationNonWritable);
2837 SPIRVInstList.insert(DecoInsertPoint,
2838 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002839 }
David Neto862b7d82018-06-14 18:48:37 -04002840 break;
David Neto862b7d82018-06-14 18:48:37 -04002841 case clspv::ArgKind::WriteOnlyImage:
2842 Ops.clear();
2843 Ops << MkId(info->var_id) << MkNum(spv::DecorationNonReadable);
2844 SPIRVInstList.insert(DecoInsertPoint,
2845 new SPIRVInstruction(spv::OpDecorate, Ops));
2846 break;
2847 default:
2848 break;
David Neto22f144c2017-06-12 14:26:21 -04002849 }
2850 }
2851}
2852
2853void SPIRVProducerPass::GenerateGlobalVar(GlobalVariable &GV) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05002854 Module &M = *GV.getParent();
David Neto22f144c2017-06-12 14:26:21 -04002855 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
2856 ValueMapType &VMap = getValueMap();
2857 std::vector<uint32_t> &BuiltinDimVec = getBuiltinDimVec();
David Neto85082642018-03-24 06:55:20 -07002858 const DataLayout &DL = GV.getParent()->getDataLayout();
David Neto22f144c2017-06-12 14:26:21 -04002859
2860 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
2861 Type *Ty = GV.getType();
2862 PointerType *PTy = cast<PointerType>(Ty);
2863
2864 uint32_t InitializerID = 0;
2865
2866 // Workgroup size is handled differently (it goes into a constant)
2867 if (spv::BuiltInWorkgroupSize == BuiltinType) {
2868 std::vector<bool> HasMDVec;
2869 uint32_t PrevXDimCst = 0xFFFFFFFF;
2870 uint32_t PrevYDimCst = 0xFFFFFFFF;
2871 uint32_t PrevZDimCst = 0xFFFFFFFF;
2872 for (Function &Func : *GV.getParent()) {
2873 if (Func.isDeclaration()) {
2874 continue;
2875 }
2876
2877 // We only need to check kernels.
2878 if (Func.getCallingConv() != CallingConv::SPIR_KERNEL) {
2879 continue;
2880 }
2881
2882 if (const MDNode *MD =
2883 dyn_cast<Function>(&Func)->getMetadata("reqd_work_group_size")) {
2884 uint32_t CurXDimCst = static_cast<uint32_t>(
2885 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
2886 uint32_t CurYDimCst = static_cast<uint32_t>(
2887 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
2888 uint32_t CurZDimCst = static_cast<uint32_t>(
2889 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
2890
2891 if (PrevXDimCst == 0xFFFFFFFF && PrevYDimCst == 0xFFFFFFFF &&
2892 PrevZDimCst == 0xFFFFFFFF) {
2893 PrevXDimCst = CurXDimCst;
2894 PrevYDimCst = CurYDimCst;
2895 PrevZDimCst = CurZDimCst;
2896 } else if (CurXDimCst != PrevXDimCst || CurYDimCst != PrevYDimCst ||
2897 CurZDimCst != PrevZDimCst) {
2898 llvm_unreachable(
2899 "reqd_work_group_size must be the same across all kernels");
2900 } else {
2901 continue;
2902 }
2903
2904 //
2905 // Generate OpConstantComposite.
2906 //
2907 // Ops[0] : Result Type ID
2908 // Ops[1] : Constant size for x dimension.
2909 // Ops[2] : Constant size for y dimension.
2910 // Ops[3] : Constant size for z dimension.
2911 SPIRVOperandList Ops;
2912
2913 uint32_t XDimCstID =
2914 VMap[mdconst::extract<ConstantInt>(MD->getOperand(0))];
2915 uint32_t YDimCstID =
2916 VMap[mdconst::extract<ConstantInt>(MD->getOperand(1))];
2917 uint32_t ZDimCstID =
2918 VMap[mdconst::extract<ConstantInt>(MD->getOperand(2))];
2919
2920 InitializerID = nextID;
2921
David Neto257c3892018-04-11 13:19:45 -04002922 Ops << MkId(lookupType(Ty->getPointerElementType())) << MkId(XDimCstID)
2923 << MkId(YDimCstID) << MkId(ZDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04002924
David Neto87846742018-04-11 17:36:22 -04002925 auto *Inst =
2926 new SPIRVInstruction(spv::OpConstantComposite, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002927 SPIRVInstList.push_back(Inst);
2928
2929 HasMDVec.push_back(true);
2930 } else {
2931 HasMDVec.push_back(false);
2932 }
2933 }
2934
2935 // Check all kernels have same definitions for work_group_size.
2936 bool HasMD = false;
2937 if (!HasMDVec.empty()) {
2938 HasMD = HasMDVec[0];
2939 for (uint32_t i = 1; i < HasMDVec.size(); i++) {
2940 if (HasMD != HasMDVec[i]) {
2941 llvm_unreachable(
2942 "Kernels should have consistent work group size definition");
2943 }
2944 }
2945 }
2946
2947 // If all kernels do not have metadata for reqd_work_group_size, generate
2948 // OpSpecConstants for x/y/z dimension.
2949 if (!HasMD) {
2950 //
2951 // Generate OpSpecConstants for x/y/z dimension.
2952 //
2953 // Ops[0] : Result Type ID
2954 // Ops[1] : Constant size for x/y/z dimension (Literal Number).
2955 uint32_t XDimCstID = 0;
2956 uint32_t YDimCstID = 0;
2957 uint32_t ZDimCstID = 0;
2958
David Neto22f144c2017-06-12 14:26:21 -04002959 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04002960 uint32_t result_type_id =
2961 lookupType(Ty->getPointerElementType()->getSequentialElementType());
David Neto22f144c2017-06-12 14:26:21 -04002962
David Neto257c3892018-04-11 13:19:45 -04002963 // X Dimension
2964 Ops << MkId(result_type_id) << MkNum(1);
2965 XDimCstID = nextID++;
2966 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002967 new SPIRVInstruction(spv::OpSpecConstant, XDimCstID, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002968
2969 // Y Dimension
2970 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04002971 Ops << MkId(result_type_id) << MkNum(1);
2972 YDimCstID = nextID++;
2973 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002974 new SPIRVInstruction(spv::OpSpecConstant, YDimCstID, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002975
2976 // Z Dimension
2977 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04002978 Ops << MkId(result_type_id) << MkNum(1);
2979 ZDimCstID = nextID++;
2980 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002981 new SPIRVInstruction(spv::OpSpecConstant, ZDimCstID, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002982
David Neto257c3892018-04-11 13:19:45 -04002983 BuiltinDimVec.push_back(XDimCstID);
2984 BuiltinDimVec.push_back(YDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04002985 BuiltinDimVec.push_back(ZDimCstID);
2986
David Neto22f144c2017-06-12 14:26:21 -04002987 //
2988 // Generate OpSpecConstantComposite.
2989 //
2990 // Ops[0] : Result Type ID
2991 // Ops[1] : Constant size for x dimension.
2992 // Ops[2] : Constant size for y dimension.
2993 // Ops[3] : Constant size for z dimension.
2994 InitializerID = nextID;
2995
2996 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04002997 Ops << MkId(lookupType(Ty->getPointerElementType())) << MkId(XDimCstID)
2998 << MkId(YDimCstID) << MkId(ZDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04002999
David Neto87846742018-04-11 17:36:22 -04003000 auto *Inst =
3001 new SPIRVInstruction(spv::OpSpecConstantComposite, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003002 SPIRVInstList.push_back(Inst);
3003 }
3004 }
3005
David Neto22f144c2017-06-12 14:26:21 -04003006 VMap[&GV] = nextID;
3007
3008 //
3009 // Generate OpVariable.
3010 //
3011 // GIDOps[0] : Result Type ID
3012 // GIDOps[1] : Storage Class
3013 SPIRVOperandList Ops;
3014
David Neto85082642018-03-24 06:55:20 -07003015 const auto AS = PTy->getAddressSpace();
David Netoc6f3ab22018-04-06 18:02:31 -04003016 Ops << MkId(lookupType(Ty)) << MkNum(GetStorageClass(AS));
David Neto22f144c2017-06-12 14:26:21 -04003017
David Neto85082642018-03-24 06:55:20 -07003018 const bool module_scope_constant_external_init =
David Neto862b7d82018-06-14 18:48:37 -04003019 (AS == AddressSpace::Constant) && GV.hasInitializer() &&
David Neto85082642018-03-24 06:55:20 -07003020 clspv::Option::ModuleConstantsInStorageBuffer();
3021
Kévin Petit23d5f182019-08-13 16:21:29 +01003022 if (GV.hasInitializer()) {
3023 auto GVInit = GV.getInitializer();
3024 if (!isa<UndefValue>(GVInit) && !module_scope_constant_external_init) {
3025 assert(VMap.count(GVInit) == 1);
3026 InitializerID = VMap[GVInit];
David Neto85082642018-03-24 06:55:20 -07003027 }
3028 }
Kévin Petit23d5f182019-08-13 16:21:29 +01003029
3030 if (0 != InitializerID) {
3031 // Emit the ID of the intiializer as part of the variable definition.
3032 Ops << MkId(InitializerID);
3033 }
David Neto85082642018-03-24 06:55:20 -07003034 const uint32_t var_id = nextID++;
3035
David Neto87846742018-04-11 17:36:22 -04003036 auto *Inst = new SPIRVInstruction(spv::OpVariable, var_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003037 SPIRVInstList.push_back(Inst);
3038
3039 // If we have a builtin.
3040 if (spv::BuiltInMax != BuiltinType) {
3041 // Find Insert Point for OpDecorate.
3042 auto DecoInsertPoint =
3043 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
3044 [](SPIRVInstruction *Inst) -> bool {
3045 return Inst->getOpcode() != spv::OpDecorate &&
3046 Inst->getOpcode() != spv::OpMemberDecorate &&
3047 Inst->getOpcode() != spv::OpExtInstImport;
3048 });
3049 //
3050 // Generate OpDecorate.
3051 //
3052 // DOps[0] = Target ID
3053 // DOps[1] = Decoration (Builtin)
3054 // DOps[2] = BuiltIn ID
3055 uint32_t ResultID;
3056
3057 // WorkgroupSize is different, we decorate the constant composite that has
3058 // its value, rather than the variable that we use to access the value.
3059 if (spv::BuiltInWorkgroupSize == BuiltinType) {
3060 ResultID = InitializerID;
David Netoa60b00b2017-09-15 16:34:09 -04003061 // Save both the value and variable IDs for later.
3062 WorkgroupSizeValueID = InitializerID;
3063 WorkgroupSizeVarID = VMap[&GV];
David Neto22f144c2017-06-12 14:26:21 -04003064 } else {
3065 ResultID = VMap[&GV];
3066 }
3067
3068 SPIRVOperandList DOps;
David Neto257c3892018-04-11 13:19:45 -04003069 DOps << MkId(ResultID) << MkNum(spv::DecorationBuiltIn)
3070 << MkNum(BuiltinType);
David Neto22f144c2017-06-12 14:26:21 -04003071
David Neto87846742018-04-11 17:36:22 -04003072 auto *DescDecoInst = new SPIRVInstruction(spv::OpDecorate, DOps);
David Neto22f144c2017-06-12 14:26:21 -04003073 SPIRVInstList.insert(DecoInsertPoint, DescDecoInst);
David Neto85082642018-03-24 06:55:20 -07003074 } else if (module_scope_constant_external_init) {
3075 // This module scope constant is initialized from a storage buffer with data
3076 // provided by the host at binding 0 of the next descriptor set.
David Neto78383442018-06-15 20:31:56 -04003077 const uint32_t descriptor_set = TakeDescriptorIndex(&M);
David Neto85082642018-03-24 06:55:20 -07003078
David Neto862b7d82018-06-14 18:48:37 -04003079 // Emit the intializer to the descriptor map file.
David Neto85082642018-03-24 06:55:20 -07003080 // Use "kind,buffer" to indicate storage buffer. We might want to expand
3081 // that later to other types, like uniform buffer.
alan-bakerf5e5f692018-11-27 08:33:24 -05003082 std::string hexbytes;
3083 llvm::raw_string_ostream str(hexbytes);
3084 clspv::ConstantEmitter(DL, str).Emit(GV.getInitializer());
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003085 version0::DescriptorMapEntry::ConstantData constant_data = {ArgKind::Buffer,
3086 str.str()};
3087 descriptorMapEntries->emplace_back(std::move(constant_data), descriptor_set,
3088 0);
David Neto85082642018-03-24 06:55:20 -07003089
3090 // Find Insert Point for OpDecorate.
3091 auto DecoInsertPoint =
3092 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
3093 [](SPIRVInstruction *Inst) -> bool {
3094 return Inst->getOpcode() != spv::OpDecorate &&
3095 Inst->getOpcode() != spv::OpMemberDecorate &&
3096 Inst->getOpcode() != spv::OpExtInstImport;
3097 });
3098
David Neto257c3892018-04-11 13:19:45 -04003099 // OpDecorate %var Binding <binding>
David Neto85082642018-03-24 06:55:20 -07003100 SPIRVOperandList DOps;
David Neto257c3892018-04-11 13:19:45 -04003101 DOps << MkId(var_id) << MkNum(spv::DecorationBinding) << MkNum(0);
3102 DecoInsertPoint = SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04003103 DecoInsertPoint, new SPIRVInstruction(spv::OpDecorate, DOps));
David Neto85082642018-03-24 06:55:20 -07003104
3105 // OpDecorate %var DescriptorSet <descriptor_set>
3106 DOps.clear();
David Neto257c3892018-04-11 13:19:45 -04003107 DOps << MkId(var_id) << MkNum(spv::DecorationDescriptorSet)
3108 << MkNum(descriptor_set);
David Netoc6f3ab22018-04-06 18:02:31 -04003109 SPIRVInstList.insert(DecoInsertPoint,
David Neto87846742018-04-11 17:36:22 -04003110 new SPIRVInstruction(spv::OpDecorate, DOps));
David Neto22f144c2017-06-12 14:26:21 -04003111 }
3112}
3113
David Netoc6f3ab22018-04-06 18:02:31 -04003114void SPIRVProducerPass::GenerateWorkgroupVars() {
3115 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
Alan Baker202c8c72018-08-13 13:47:44 -04003116 for (auto spec_id = clspv::FirstLocalSpecId(); spec_id < max_local_spec_id_;
3117 ++spec_id) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05003118 LocalArgInfo &info = LocalSpecIdInfoMap[spec_id];
David Netoc6f3ab22018-04-06 18:02:31 -04003119
3120 // Generate OpVariable.
3121 //
3122 // GIDOps[0] : Result Type ID
3123 // GIDOps[1] : Storage Class
3124 SPIRVOperandList Ops;
3125 Ops << MkId(info.ptr_array_type_id) << MkNum(spv::StorageClassWorkgroup);
3126
3127 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04003128 new SPIRVInstruction(spv::OpVariable, info.variable_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04003129 }
3130}
3131
David Neto862b7d82018-06-14 18:48:37 -04003132void SPIRVProducerPass::GenerateDescriptorMapInfo(const DataLayout &DL,
3133 Function &F) {
David Netoc5fb5242018-07-30 13:28:31 -04003134 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
3135 return;
3136 }
David Neto862b7d82018-06-14 18:48:37 -04003137 // Gather the list of resources that are used by this function's arguments.
3138 auto &resource_var_at_index = FunctionToResourceVarsMap[&F];
3139
alan-bakerf5e5f692018-11-27 08:33:24 -05003140 // TODO(alan-baker): This should become unnecessary by fixing the rest of the
3141 // flow to generate pod_ubo arguments earlier.
David Neto862b7d82018-06-14 18:48:37 -04003142 auto remap_arg_kind = [](StringRef argKind) {
alan-bakerf5e5f692018-11-27 08:33:24 -05003143 std::string kind =
3144 clspv::Option::PodArgsInUniformBuffer() && argKind.equals("pod")
3145 ? "pod_ubo"
alan-baker21574d32020-01-29 16:00:31 -05003146 : argKind.str();
alan-bakerf5e5f692018-11-27 08:33:24 -05003147 return GetArgKindFromName(kind);
David Neto862b7d82018-06-14 18:48:37 -04003148 };
3149
3150 auto *fty = F.getType()->getPointerElementType();
3151 auto *func_ty = dyn_cast<FunctionType>(fty);
3152
alan-baker038e9242019-04-19 22:14:41 -04003153 // If we've clustered POD arguments, then argument details are in metadata.
David Neto862b7d82018-06-14 18:48:37 -04003154 // If an argument maps to a resource variable, then get descriptor set and
3155 // binding from the resoure variable. Other info comes from the metadata.
3156 const auto *arg_map = F.getMetadata("kernel_arg_map");
3157 if (arg_map) {
3158 for (const auto &arg : arg_map->operands()) {
3159 const MDNode *arg_node = dyn_cast<MDNode>(arg.get());
Kévin PETITa353c832018-03-20 23:21:21 +00003160 assert(arg_node->getNumOperands() == 7);
David Neto862b7d82018-06-14 18:48:37 -04003161 const auto name =
3162 dyn_cast<MDString>(arg_node->getOperand(0))->getString();
3163 const auto old_index =
3164 dyn_extract<ConstantInt>(arg_node->getOperand(1))->getZExtValue();
3165 // Remapped argument index
alan-bakerb6b09dc2018-11-08 16:59:28 -05003166 const size_t new_index = static_cast<size_t>(
3167 dyn_extract<ConstantInt>(arg_node->getOperand(2))->getZExtValue());
David Neto862b7d82018-06-14 18:48:37 -04003168 const auto offset =
3169 dyn_extract<ConstantInt>(arg_node->getOperand(3))->getZExtValue();
Kévin PETITa353c832018-03-20 23:21:21 +00003170 const auto arg_size =
3171 dyn_extract<ConstantInt>(arg_node->getOperand(4))->getZExtValue();
David Neto862b7d82018-06-14 18:48:37 -04003172 const auto argKind = remap_arg_kind(
Kévin PETITa353c832018-03-20 23:21:21 +00003173 dyn_cast<MDString>(arg_node->getOperand(5))->getString());
David Neto862b7d82018-06-14 18:48:37 -04003174 const auto spec_id =
Kévin PETITa353c832018-03-20 23:21:21 +00003175 dyn_extract<ConstantInt>(arg_node->getOperand(6))->getSExtValue();
alan-bakerf5e5f692018-11-27 08:33:24 -05003176
3177 uint32_t descriptor_set = 0;
3178 uint32_t binding = 0;
3179 version0::DescriptorMapEntry::KernelArgData kernel_data = {
alan-baker21574d32020-01-29 16:00:31 -05003180 F.getName().str(), name.str(), static_cast<uint32_t>(old_index),
3181 argKind, static_cast<uint32_t>(spec_id),
alan-bakerf5e5f692018-11-27 08:33:24 -05003182 // This will be set below for pointer-to-local args.
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003183 0, static_cast<uint32_t>(offset), static_cast<uint32_t>(arg_size)};
David Neto862b7d82018-06-14 18:48:37 -04003184 if (spec_id > 0) {
alan-bakerf5e5f692018-11-27 08:33:24 -05003185 kernel_data.local_element_size = static_cast<uint32_t>(GetTypeAllocSize(
3186 func_ty->getParamType(unsigned(new_index))->getPointerElementType(),
3187 DL));
David Neto862b7d82018-06-14 18:48:37 -04003188 } else {
3189 auto *info = resource_var_at_index[new_index];
3190 assert(info);
alan-bakerf5e5f692018-11-27 08:33:24 -05003191 descriptor_set = info->descriptor_set;
3192 binding = info->binding;
David Neto862b7d82018-06-14 18:48:37 -04003193 }
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003194 descriptorMapEntries->emplace_back(std::move(kernel_data), descriptor_set,
3195 binding);
David Neto862b7d82018-06-14 18:48:37 -04003196 }
3197 } else {
3198 // There is no argument map.
3199 // Take descriptor info from the resource variable calls.
Kévin PETITa353c832018-03-20 23:21:21 +00003200 // Take argument name and size from the arguments list.
David Neto862b7d82018-06-14 18:48:37 -04003201
3202 SmallVector<Argument *, 4> arguments;
3203 for (auto &arg : F.args()) {
3204 arguments.push_back(&arg);
3205 }
3206
3207 unsigned arg_index = 0;
3208 for (auto *info : resource_var_at_index) {
3209 if (info) {
Kévin PETITa353c832018-03-20 23:21:21 +00003210 auto arg = arguments[arg_index];
alan-bakerb6b09dc2018-11-08 16:59:28 -05003211 unsigned arg_size = 0;
Kévin PETITa353c832018-03-20 23:21:21 +00003212 if (info->arg_kind == clspv::ArgKind::Pod) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05003213 arg_size = static_cast<uint32_t>(DL.getTypeStoreSize(arg->getType()));
Kévin PETITa353c832018-03-20 23:21:21 +00003214 }
3215
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003216 // Local pointer arguments are unused in this case. Offset is always
3217 // zero.
alan-bakerf5e5f692018-11-27 08:33:24 -05003218 version0::DescriptorMapEntry::KernelArgData kernel_data = {
alan-baker21574d32020-01-29 16:00:31 -05003219 F.getName().str(),
3220 arg->getName().str(),
3221 arg_index,
3222 remap_arg_kind(clspv::GetArgKindName(info->arg_kind)),
3223 0,
3224 0,
3225 0,
3226 arg_size};
alan-bakerf5e5f692018-11-27 08:33:24 -05003227 descriptorMapEntries->emplace_back(std::move(kernel_data),
3228 info->descriptor_set, info->binding);
David Neto862b7d82018-06-14 18:48:37 -04003229 }
3230 arg_index++;
3231 }
3232 // Generate mappings for pointer-to-local arguments.
3233 for (arg_index = 0; arg_index < arguments.size(); ++arg_index) {
3234 Argument *arg = arguments[arg_index];
Alan Baker202c8c72018-08-13 13:47:44 -04003235 auto where = LocalArgSpecIds.find(arg);
3236 if (where != LocalArgSpecIds.end()) {
3237 auto &local_arg_info = LocalSpecIdInfoMap[where->second];
alan-bakerf5e5f692018-11-27 08:33:24 -05003238 // Pod arguments members are unused in this case.
3239 version0::DescriptorMapEntry::KernelArgData kernel_data = {
alan-baker21574d32020-01-29 16:00:31 -05003240 F.getName().str(),
3241 arg->getName().str(),
alan-bakerf5e5f692018-11-27 08:33:24 -05003242 arg_index,
3243 ArgKind::Local,
3244 static_cast<uint32_t>(local_arg_info.spec_id),
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003245 static_cast<uint32_t>(
3246 GetTypeAllocSize(local_arg_info.elem_type, DL)),
alan-bakerf5e5f692018-11-27 08:33:24 -05003247 0,
3248 0};
3249 // Pointer-to-local arguments do not utilize descriptor set and binding.
3250 descriptorMapEntries->emplace_back(std::move(kernel_data), 0, 0);
David Neto862b7d82018-06-14 18:48:37 -04003251 }
3252 }
3253 }
3254}
3255
David Neto22f144c2017-06-12 14:26:21 -04003256void SPIRVProducerPass::GenerateFuncPrologue(Function &F) {
3257 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3258 ValueMapType &VMap = getValueMap();
3259 EntryPointVecType &EntryPoints = getEntryPointVec();
David Neto22f144c2017-06-12 14:26:21 -04003260 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
3261 auto &GlobalConstArgSet = getGlobalConstArgSet();
3262
3263 FunctionType *FTy = F.getFunctionType();
3264
3265 //
David Neto22f144c2017-06-12 14:26:21 -04003266 // Generate OPFunction.
3267 //
3268
3269 // FOps[0] : Result Type ID
3270 // FOps[1] : Function Control
3271 // FOps[2] : Function Type ID
3272 SPIRVOperandList FOps;
3273
3274 // Find SPIRV instruction for return type.
David Neto257c3892018-04-11 13:19:45 -04003275 FOps << MkId(lookupType(FTy->getReturnType()));
David Neto22f144c2017-06-12 14:26:21 -04003276
3277 // Check function attributes for SPIRV Function Control.
3278 uint32_t FuncControl = spv::FunctionControlMaskNone;
3279 if (F.hasFnAttribute(Attribute::AlwaysInline)) {
3280 FuncControl |= spv::FunctionControlInlineMask;
3281 }
3282 if (F.hasFnAttribute(Attribute::NoInline)) {
3283 FuncControl |= spv::FunctionControlDontInlineMask;
3284 }
3285 // TODO: Check llvm attribute for Function Control Pure.
3286 if (F.hasFnAttribute(Attribute::ReadOnly)) {
3287 FuncControl |= spv::FunctionControlPureMask;
3288 }
3289 // TODO: Check llvm attribute for Function Control Const.
3290 if (F.hasFnAttribute(Attribute::ReadNone)) {
3291 FuncControl |= spv::FunctionControlConstMask;
3292 }
3293
David Neto257c3892018-04-11 13:19:45 -04003294 FOps << MkNum(FuncControl);
David Neto22f144c2017-06-12 14:26:21 -04003295
3296 uint32_t FTyID;
3297 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
3298 SmallVector<Type *, 4> NewFuncParamTys;
3299 FunctionType *NewFTy =
3300 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
3301 FTyID = lookupType(NewFTy);
3302 } else {
David Neto9ed8e2f2018-03-24 06:47:24 -07003303 // Handle regular function with global constant parameters.
David Neto22f144c2017-06-12 14:26:21 -04003304 if (GlobalConstFuncTyMap.count(FTy)) {
3305 FTyID = lookupType(GlobalConstFuncTyMap[FTy].first);
3306 } else {
3307 FTyID = lookupType(FTy);
3308 }
3309 }
3310
David Neto257c3892018-04-11 13:19:45 -04003311 FOps << MkId(FTyID);
David Neto22f144c2017-06-12 14:26:21 -04003312
3313 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
3314 EntryPoints.push_back(std::make_pair(&F, nextID));
3315 }
3316
3317 VMap[&F] = nextID;
3318
David Neto482550a2018-03-24 05:21:07 -07003319 if (clspv::Option::ShowIDs()) {
David Netob05675d2018-02-16 12:37:49 -05003320 errs() << "Function " << F.getName() << " is " << nextID << "\n";
3321 }
David Neto22f144c2017-06-12 14:26:21 -04003322 // Generate SPIRV instruction for function.
David Neto87846742018-04-11 17:36:22 -04003323 auto *FuncInst = new SPIRVInstruction(spv::OpFunction, nextID++, FOps);
David Neto22f144c2017-06-12 14:26:21 -04003324 SPIRVInstList.push_back(FuncInst);
3325
3326 //
3327 // Generate OpFunctionParameter for Normal function.
3328 //
3329
3330 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
alan-bakere9308012019-03-15 10:25:13 -04003331
3332 // Find Insert Point for OpDecorate.
3333 auto DecoInsertPoint =
3334 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
3335 [](SPIRVInstruction *Inst) -> bool {
3336 return Inst->getOpcode() != spv::OpDecorate &&
3337 Inst->getOpcode() != spv::OpMemberDecorate &&
3338 Inst->getOpcode() != spv::OpExtInstImport;
3339 });
3340
David Neto22f144c2017-06-12 14:26:21 -04003341 // Iterate Argument for name instead of param type from function type.
3342 unsigned ArgIdx = 0;
3343 for (Argument &Arg : F.args()) {
alan-bakere9308012019-03-15 10:25:13 -04003344 uint32_t param_id = nextID++;
3345 VMap[&Arg] = param_id;
3346
3347 if (CalledWithCoherentResource(Arg)) {
3348 // If the arg is passed a coherent resource ever, then decorate this
3349 // parameter with Coherent too.
3350 SPIRVOperandList decoration_ops;
3351 decoration_ops << MkId(param_id) << MkNum(spv::DecorationCoherent);
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003352 SPIRVInstList.insert(
3353 DecoInsertPoint,
3354 new SPIRVInstruction(spv::OpDecorate, decoration_ops));
alan-bakere9308012019-03-15 10:25:13 -04003355 }
David Neto22f144c2017-06-12 14:26:21 -04003356
3357 // ParamOps[0] : Result Type ID
3358 SPIRVOperandList ParamOps;
3359
3360 // Find SPIRV instruction for parameter type.
3361 uint32_t ParamTyID = lookupType(Arg.getType());
3362 if (PointerType *PTy = dyn_cast<PointerType>(Arg.getType())) {
3363 if (GlobalConstFuncTyMap.count(FTy)) {
3364 if (ArgIdx == GlobalConstFuncTyMap[FTy].second) {
3365 Type *EleTy = PTy->getPointerElementType();
3366 Type *ArgTy =
3367 PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
3368 ParamTyID = lookupType(ArgTy);
3369 GlobalConstArgSet.insert(&Arg);
3370 }
3371 }
3372 }
David Neto257c3892018-04-11 13:19:45 -04003373 ParamOps << MkId(ParamTyID);
David Neto22f144c2017-06-12 14:26:21 -04003374
3375 // Generate SPIRV instruction for parameter.
David Neto87846742018-04-11 17:36:22 -04003376 auto *ParamInst =
alan-bakere9308012019-03-15 10:25:13 -04003377 new SPIRVInstruction(spv::OpFunctionParameter, param_id, ParamOps);
David Neto22f144c2017-06-12 14:26:21 -04003378 SPIRVInstList.push_back(ParamInst);
3379
3380 ArgIdx++;
3381 }
3382 }
3383}
3384
alan-bakerb6b09dc2018-11-08 16:59:28 -05003385void SPIRVProducerPass::GenerateModuleInfo(Module &module) {
David Neto22f144c2017-06-12 14:26:21 -04003386 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3387 EntryPointVecType &EntryPoints = getEntryPointVec();
3388 ValueMapType &VMap = getValueMap();
3389 ValueList &EntryPointInterfaces = getEntryPointInterfacesVec();
3390 uint32_t &ExtInstImportID = getOpExtInstImportID();
3391 std::vector<uint32_t> &BuiltinDimVec = getBuiltinDimVec();
3392
3393 // Set up insert point.
3394 auto InsertPoint = SPIRVInstList.begin();
3395
3396 //
3397 // Generate OpCapability
3398 //
3399 // TODO: Which llvm information is mapped to SPIRV Capapbility?
3400
3401 // Ops[0] = Capability
3402 SPIRVOperandList Ops;
3403
David Neto87846742018-04-11 17:36:22 -04003404 auto *CapInst =
David Netoef5ba2b2019-12-20 08:35:54 -05003405 new SPIRVInstruction(spv::OpCapability, MkNum(spv::CapabilityShader));
David Neto22f144c2017-06-12 14:26:21 -04003406 SPIRVInstList.insert(InsertPoint, CapInst);
3407
alan-bakerf906d2b2019-12-10 11:26:23 -05003408 bool write_without_format = false;
3409 bool sampled_1d = false;
3410 bool image_1d = false;
David Neto22f144c2017-06-12 14:26:21 -04003411 for (Type *Ty : getTypeList()) {
alan-bakerb39c8262019-03-08 14:03:37 -05003412 if (clspv::Option::Int8Support() && Ty->isIntegerTy(8)) {
3413 // Generate OpCapability for i8 type.
David Netoef5ba2b2019-12-20 08:35:54 -05003414 SPIRVInstList.insert(
3415 InsertPoint,
3416 new SPIRVInstruction(spv::OpCapability, MkNum(spv::CapabilityInt8)));
alan-bakerb39c8262019-03-08 14:03:37 -05003417 } else if (Ty->isIntegerTy(16)) {
David Neto22f144c2017-06-12 14:26:21 -04003418 // Generate OpCapability for i16 type.
David Netoef5ba2b2019-12-20 08:35:54 -05003419 SPIRVInstList.insert(
3420 InsertPoint,
3421 new SPIRVInstruction(spv::OpCapability, MkNum(spv::CapabilityInt16)));
David Neto22f144c2017-06-12 14:26:21 -04003422 } else if (Ty->isIntegerTy(64)) {
3423 // Generate OpCapability for i64 type.
David Netoef5ba2b2019-12-20 08:35:54 -05003424 SPIRVInstList.insert(
3425 InsertPoint,
3426 new SPIRVInstruction(spv::OpCapability, MkNum(spv::CapabilityInt64)));
David Neto22f144c2017-06-12 14:26:21 -04003427 } else if (Ty->isHalfTy()) {
3428 // Generate OpCapability for half type.
David Netoef5ba2b2019-12-20 08:35:54 -05003429 SPIRVInstList.insert(InsertPoint,
3430 new SPIRVInstruction(spv::OpCapability,
3431 MkNum(spv::CapabilityFloat16)));
David Neto22f144c2017-06-12 14:26:21 -04003432 } else if (Ty->isDoubleTy()) {
3433 // Generate OpCapability for double type.
David Netoef5ba2b2019-12-20 08:35:54 -05003434 SPIRVInstList.insert(InsertPoint,
3435 new SPIRVInstruction(spv::OpCapability,
3436 MkNum(spv::CapabilityFloat64)));
David Neto22f144c2017-06-12 14:26:21 -04003437 } else if (auto *STy = dyn_cast<StructType>(Ty)) {
3438 if (STy->isOpaque()) {
alan-bakerf906d2b2019-12-10 11:26:23 -05003439 if (STy->getName().startswith("opencl.image1d_wo_t") ||
alan-baker7150a1d2020-02-25 08:31:06 -05003440 STy->getName().startswith("opencl.image1d_array_wo_t") ||
alan-bakerf906d2b2019-12-10 11:26:23 -05003441 STy->getName().startswith("opencl.image2d_wo_t") ||
alan-baker7150a1d2020-02-25 08:31:06 -05003442 STy->getName().startswith("opencl.image2d_array_wo_t") ||
alan-bakerf67468c2019-11-25 15:51:49 -05003443 STy->getName().startswith("opencl.image3d_wo_t")) {
alan-bakerf906d2b2019-12-10 11:26:23 -05003444 write_without_format = true;
3445 }
3446 if (STy->getName().startswith("opencl.image1d_ro_t") ||
alan-baker7150a1d2020-02-25 08:31:06 -05003447 STy->getName().startswith("opencl.image1d_wo_t") ||
3448 STy->getName().startswith("opencl.image1d_array_ro_t") ||
3449 STy->getName().startswith("opencl.image1d_array_wo_t")) {
alan-bakerf906d2b2019-12-10 11:26:23 -05003450 if (STy->getName().contains(".sampled"))
3451 sampled_1d = true;
3452 else
3453 image_1d = true;
David Neto22f144c2017-06-12 14:26:21 -04003454 }
3455 }
3456 }
3457 }
3458
alan-bakerf906d2b2019-12-10 11:26:23 -05003459 if (write_without_format) {
3460 // Generate OpCapability for write only image type.
3461 SPIRVInstList.insert(
3462 InsertPoint,
3463 new SPIRVInstruction(
3464 spv::OpCapability,
3465 {MkNum(spv::CapabilityStorageImageWriteWithoutFormat)}));
3466 }
3467 if (image_1d) {
3468 // Generate OpCapability for unsampled 1D image type.
3469 SPIRVInstList.insert(InsertPoint,
3470 new SPIRVInstruction(spv::OpCapability,
3471 {MkNum(spv::CapabilityImage1D)}));
3472 } else if (sampled_1d) {
3473 // Generate OpCapability for sampled 1D image type.
3474 SPIRVInstList.insert(
3475 InsertPoint, new SPIRVInstruction(spv::OpCapability,
3476 {MkNum(spv::CapabilitySampled1D)}));
3477 }
3478
David Neto5c22a252018-03-15 16:07:41 -04003479 { // OpCapability ImageQuery
3480 bool hasImageQuery = false;
alan-bakerf67468c2019-11-25 15:51:49 -05003481 for (const auto &SymVal : module.getValueSymbolTable()) {
3482 if (auto F = dyn_cast<Function>(SymVal.getValue())) {
alan-bakerce179f12019-12-06 19:02:22 -05003483 if (clspv::IsImageQuery(F)) {
alan-bakerf67468c2019-11-25 15:51:49 -05003484 hasImageQuery = true;
3485 break;
3486 }
David Neto5c22a252018-03-15 16:07:41 -04003487 }
3488 }
alan-bakerf67468c2019-11-25 15:51:49 -05003489
David Neto5c22a252018-03-15 16:07:41 -04003490 if (hasImageQuery) {
David Neto87846742018-04-11 17:36:22 -04003491 auto *ImageQueryCapInst = new SPIRVInstruction(
3492 spv::OpCapability, {MkNum(spv::CapabilityImageQuery)});
David Neto5c22a252018-03-15 16:07:41 -04003493 SPIRVInstList.insert(InsertPoint, ImageQueryCapInst);
3494 }
3495 }
3496
David Neto22f144c2017-06-12 14:26:21 -04003497 if (hasVariablePointers()) {
3498 //
David Neto22f144c2017-06-12 14:26:21 -04003499 // Generate OpCapability.
3500 //
3501 // Ops[0] = Capability
3502 //
3503 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003504 Ops << MkNum(spv::CapabilityVariablePointers);
David Neto22f144c2017-06-12 14:26:21 -04003505
David Neto87846742018-04-11 17:36:22 -04003506 SPIRVInstList.insert(InsertPoint,
3507 new SPIRVInstruction(spv::OpCapability, Ops));
alan-baker5b86ed72019-02-15 08:26:50 -05003508 } else if (hasVariablePointersStorageBuffer()) {
3509 //
3510 // Generate OpCapability.
3511 //
3512 // Ops[0] = Capability
3513 //
3514 Ops.clear();
3515 Ops << MkNum(spv::CapabilityVariablePointersStorageBuffer);
David Neto22f144c2017-06-12 14:26:21 -04003516
alan-baker5b86ed72019-02-15 08:26:50 -05003517 SPIRVInstList.insert(InsertPoint,
3518 new SPIRVInstruction(spv::OpCapability, Ops));
3519 }
3520
3521 // Always add the storage buffer extension
3522 {
David Neto22f144c2017-06-12 14:26:21 -04003523 //
3524 // Generate OpExtension.
3525 //
3526 // Ops[0] = Name (Literal String)
3527 //
alan-baker5b86ed72019-02-15 08:26:50 -05003528 auto *ExtensionInst = new SPIRVInstruction(
3529 spv::OpExtension, {MkString("SPV_KHR_storage_buffer_storage_class")});
3530 SPIRVInstList.insert(InsertPoint, ExtensionInst);
3531 }
David Neto22f144c2017-06-12 14:26:21 -04003532
alan-baker5b86ed72019-02-15 08:26:50 -05003533 if (hasVariablePointers() || hasVariablePointersStorageBuffer()) {
3534 //
3535 // Generate OpExtension.
3536 //
3537 // Ops[0] = Name (Literal String)
3538 //
3539 auto *ExtensionInst = new SPIRVInstruction(
3540 spv::OpExtension, {MkString("SPV_KHR_variable_pointers")});
3541 SPIRVInstList.insert(InsertPoint, ExtensionInst);
David Neto22f144c2017-06-12 14:26:21 -04003542 }
3543
3544 if (ExtInstImportID) {
3545 ++InsertPoint;
3546 }
3547
3548 //
3549 // Generate OpMemoryModel
3550 //
3551 // Memory model for Vulkan will always be GLSL450.
3552
3553 // Ops[0] = Addressing Model
3554 // Ops[1] = Memory Model
3555 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003556 Ops << MkNum(spv::AddressingModelLogical) << MkNum(spv::MemoryModelGLSL450);
David Neto22f144c2017-06-12 14:26:21 -04003557
David Neto87846742018-04-11 17:36:22 -04003558 auto *MemModelInst = new SPIRVInstruction(spv::OpMemoryModel, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003559 SPIRVInstList.insert(InsertPoint, MemModelInst);
3560
3561 //
3562 // Generate OpEntryPoint
3563 //
3564 for (auto EntryPoint : EntryPoints) {
3565 // Ops[0] = Execution Model
3566 // Ops[1] = EntryPoint ID
3567 // Ops[2] = Name (Literal String)
3568 // ...
3569 //
3570 // TODO: Do we need to consider Interface ID for forward references???
3571 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05003572 const StringRef &name = EntryPoint.first->getName();
David Neto257c3892018-04-11 13:19:45 -04003573 Ops << MkNum(spv::ExecutionModelGLCompute) << MkId(EntryPoint.second)
3574 << MkString(name);
David Neto22f144c2017-06-12 14:26:21 -04003575
David Neto22f144c2017-06-12 14:26:21 -04003576 for (Value *Interface : EntryPointInterfaces) {
David Neto257c3892018-04-11 13:19:45 -04003577 Ops << MkId(VMap[Interface]);
David Neto22f144c2017-06-12 14:26:21 -04003578 }
3579
David Neto87846742018-04-11 17:36:22 -04003580 auto *EntryPointInst = new SPIRVInstruction(spv::OpEntryPoint, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003581 SPIRVInstList.insert(InsertPoint, EntryPointInst);
3582 }
3583
3584 for (auto EntryPoint : EntryPoints) {
3585 if (const MDNode *MD = dyn_cast<Function>(EntryPoint.first)
3586 ->getMetadata("reqd_work_group_size")) {
3587
3588 if (!BuiltinDimVec.empty()) {
3589 llvm_unreachable(
3590 "Kernels should have consistent work group size definition");
3591 }
3592
3593 //
3594 // Generate OpExecutionMode
3595 //
3596
3597 // Ops[0] = Entry Point ID
3598 // Ops[1] = Execution Mode
3599 // Ops[2] ... Ops[n] = Optional literals according to Execution Mode
3600 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05003601 Ops << MkId(EntryPoint.second) << MkNum(spv::ExecutionModeLocalSize);
David Neto22f144c2017-06-12 14:26:21 -04003602
3603 uint32_t XDim = static_cast<uint32_t>(
3604 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
3605 uint32_t YDim = static_cast<uint32_t>(
3606 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
3607 uint32_t ZDim = static_cast<uint32_t>(
3608 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
3609
David Neto257c3892018-04-11 13:19:45 -04003610 Ops << MkNum(XDim) << MkNum(YDim) << MkNum(ZDim);
David Neto22f144c2017-06-12 14:26:21 -04003611
David Neto87846742018-04-11 17:36:22 -04003612 auto *ExecModeInst = new SPIRVInstruction(spv::OpExecutionMode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003613 SPIRVInstList.insert(InsertPoint, ExecModeInst);
3614 }
3615 }
3616
3617 //
3618 // Generate OpSource.
3619 //
3620 // Ops[0] = SourceLanguage ID
3621 // Ops[1] = Version (LiteralNum)
3622 //
3623 Ops.clear();
Kévin Petitf0515712020-01-07 18:29:20 +00003624 switch (clspv::Option::Language()) {
3625 case clspv::Option::SourceLanguage::OpenCL_C_10:
3626 Ops << MkNum(spv::SourceLanguageOpenCL_C) << MkNum(100);
3627 break;
3628 case clspv::Option::SourceLanguage::OpenCL_C_11:
3629 Ops << MkNum(spv::SourceLanguageOpenCL_C) << MkNum(110);
3630 break;
3631 case clspv::Option::SourceLanguage::OpenCL_C_12:
Kévin Petit0fc88042019-04-09 23:25:02 +01003632 Ops << MkNum(spv::SourceLanguageOpenCL_C) << MkNum(120);
Kévin Petitf0515712020-01-07 18:29:20 +00003633 break;
3634 case clspv::Option::SourceLanguage::OpenCL_C_20:
3635 Ops << MkNum(spv::SourceLanguageOpenCL_C) << MkNum(200);
3636 break;
3637 case clspv::Option::SourceLanguage::OpenCL_CPP:
3638 Ops << MkNum(spv::SourceLanguageOpenCL_CPP) << MkNum(100);
3639 break;
3640 default:
3641 Ops << MkNum(spv::SourceLanguageUnknown) << MkNum(0);
3642 break;
Kévin Petit0fc88042019-04-09 23:25:02 +01003643 }
David Neto22f144c2017-06-12 14:26:21 -04003644
David Neto87846742018-04-11 17:36:22 -04003645 auto *OpenSourceInst = new SPIRVInstruction(spv::OpSource, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003646 SPIRVInstList.insert(InsertPoint, OpenSourceInst);
3647
3648 if (!BuiltinDimVec.empty()) {
3649 //
3650 // Generate OpDecorates for x/y/z dimension.
3651 //
3652 // Ops[0] = Target ID
3653 // Ops[1] = Decoration (SpecId)
David Neto257c3892018-04-11 13:19:45 -04003654 // Ops[2] = Specialization Constant ID (Literal Number)
David Neto22f144c2017-06-12 14:26:21 -04003655
3656 // X Dimension
3657 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003658 Ops << MkId(BuiltinDimVec[0]) << MkNum(spv::DecorationSpecId) << MkNum(0);
David Neto87846742018-04-11 17:36:22 -04003659 SPIRVInstList.insert(InsertPoint,
3660 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003661
3662 // Y Dimension
3663 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003664 Ops << MkId(BuiltinDimVec[1]) << MkNum(spv::DecorationSpecId) << MkNum(1);
David Neto87846742018-04-11 17:36:22 -04003665 SPIRVInstList.insert(InsertPoint,
3666 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003667
3668 // Z Dimension
3669 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003670 Ops << MkId(BuiltinDimVec[2]) << MkNum(spv::DecorationSpecId) << MkNum(2);
David Neto87846742018-04-11 17:36:22 -04003671 SPIRVInstList.insert(InsertPoint,
3672 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003673 }
3674}
3675
David Netob6e2e062018-04-25 10:32:06 -04003676void SPIRVProducerPass::GenerateEntryPointInitialStores() {
3677 // Work around a driver bug. Initializers on Private variables might not
3678 // work. So the start of the kernel should store the initializer value to the
3679 // variables. Yes, *every* entry point pays this cost if *any* entry point
3680 // uses this builtin. At this point I judge this to be an acceptable tradeoff
3681 // of complexity vs. runtime, for a broken driver.
alan-bakerb6b09dc2018-11-08 16:59:28 -05003682 // TODO(dneto): Remove this at some point once fixed drivers are widely
3683 // available.
David Netob6e2e062018-04-25 10:32:06 -04003684 if (WorkgroupSizeVarID) {
3685 assert(WorkgroupSizeValueID);
3686
3687 SPIRVOperandList Ops;
3688 Ops << MkId(WorkgroupSizeVarID) << MkId(WorkgroupSizeValueID);
3689
3690 auto *Inst = new SPIRVInstruction(spv::OpStore, Ops);
3691 getSPIRVInstList().push_back(Inst);
3692 }
3693}
3694
David Neto22f144c2017-06-12 14:26:21 -04003695void SPIRVProducerPass::GenerateFuncBody(Function &F) {
3696 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3697 ValueMapType &VMap = getValueMap();
3698
David Netob6e2e062018-04-25 10:32:06 -04003699 const bool IsKernel = F.getCallingConv() == CallingConv::SPIR_KERNEL;
David Neto22f144c2017-06-12 14:26:21 -04003700
3701 for (BasicBlock &BB : F) {
3702 // Register BasicBlock to ValueMap.
3703 VMap[&BB] = nextID;
3704
3705 //
3706 // Generate OpLabel for Basic Block.
3707 //
3708 SPIRVOperandList Ops;
David Neto87846742018-04-11 17:36:22 -04003709 auto *Inst = new SPIRVInstruction(spv::OpLabel, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003710 SPIRVInstList.push_back(Inst);
3711
David Neto6dcd4712017-06-23 11:06:47 -04003712 // OpVariable instructions must come first.
3713 for (Instruction &I : BB) {
alan-baker5b86ed72019-02-15 08:26:50 -05003714 if (auto *alloca = dyn_cast<AllocaInst>(&I)) {
3715 // Allocating a pointer requires variable pointers.
3716 if (alloca->getAllocatedType()->isPointerTy()) {
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003717 setVariablePointersCapabilities(
3718 alloca->getAllocatedType()->getPointerAddressSpace());
alan-baker5b86ed72019-02-15 08:26:50 -05003719 }
David Neto6dcd4712017-06-23 11:06:47 -04003720 GenerateInstruction(I);
3721 }
3722 }
3723
David Neto22f144c2017-06-12 14:26:21 -04003724 if (&BB == &F.getEntryBlock() && IsKernel) {
David Netob6e2e062018-04-25 10:32:06 -04003725 if (clspv::Option::HackInitializers()) {
3726 GenerateEntryPointInitialStores();
3727 }
David Neto22f144c2017-06-12 14:26:21 -04003728 }
3729
3730 for (Instruction &I : BB) {
David Neto6dcd4712017-06-23 11:06:47 -04003731 if (!isa<AllocaInst>(I)) {
3732 GenerateInstruction(I);
3733 }
David Neto22f144c2017-06-12 14:26:21 -04003734 }
3735 }
3736}
3737
3738spv::Op SPIRVProducerPass::GetSPIRVCmpOpcode(CmpInst *I) {
3739 const std::map<CmpInst::Predicate, spv::Op> Map = {
3740 {CmpInst::ICMP_EQ, spv::OpIEqual},
3741 {CmpInst::ICMP_NE, spv::OpINotEqual},
3742 {CmpInst::ICMP_UGT, spv::OpUGreaterThan},
3743 {CmpInst::ICMP_UGE, spv::OpUGreaterThanEqual},
3744 {CmpInst::ICMP_ULT, spv::OpULessThan},
3745 {CmpInst::ICMP_ULE, spv::OpULessThanEqual},
3746 {CmpInst::ICMP_SGT, spv::OpSGreaterThan},
3747 {CmpInst::ICMP_SGE, spv::OpSGreaterThanEqual},
3748 {CmpInst::ICMP_SLT, spv::OpSLessThan},
3749 {CmpInst::ICMP_SLE, spv::OpSLessThanEqual},
3750 {CmpInst::FCMP_OEQ, spv::OpFOrdEqual},
3751 {CmpInst::FCMP_OGT, spv::OpFOrdGreaterThan},
3752 {CmpInst::FCMP_OGE, spv::OpFOrdGreaterThanEqual},
3753 {CmpInst::FCMP_OLT, spv::OpFOrdLessThan},
3754 {CmpInst::FCMP_OLE, spv::OpFOrdLessThanEqual},
3755 {CmpInst::FCMP_ONE, spv::OpFOrdNotEqual},
3756 {CmpInst::FCMP_UEQ, spv::OpFUnordEqual},
3757 {CmpInst::FCMP_UGT, spv::OpFUnordGreaterThan},
3758 {CmpInst::FCMP_UGE, spv::OpFUnordGreaterThanEqual},
3759 {CmpInst::FCMP_ULT, spv::OpFUnordLessThan},
3760 {CmpInst::FCMP_ULE, spv::OpFUnordLessThanEqual},
3761 {CmpInst::FCMP_UNE, spv::OpFUnordNotEqual}};
3762
3763 assert(0 != Map.count(I->getPredicate()));
3764
3765 return Map.at(I->getPredicate());
3766}
3767
3768spv::Op SPIRVProducerPass::GetSPIRVCastOpcode(Instruction &I) {
3769 const std::map<unsigned, spv::Op> Map{
3770 {Instruction::Trunc, spv::OpUConvert},
3771 {Instruction::ZExt, spv::OpUConvert},
3772 {Instruction::SExt, spv::OpSConvert},
3773 {Instruction::FPToUI, spv::OpConvertFToU},
3774 {Instruction::FPToSI, spv::OpConvertFToS},
3775 {Instruction::UIToFP, spv::OpConvertUToF},
3776 {Instruction::SIToFP, spv::OpConvertSToF},
3777 {Instruction::FPTrunc, spv::OpFConvert},
3778 {Instruction::FPExt, spv::OpFConvert},
3779 {Instruction::BitCast, spv::OpBitcast}};
3780
3781 assert(0 != Map.count(I.getOpcode()));
3782
3783 return Map.at(I.getOpcode());
3784}
3785
3786spv::Op SPIRVProducerPass::GetSPIRVBinaryOpcode(Instruction &I) {
Kévin Petit24272b62018-10-18 19:16:12 +00003787 if (I.getType()->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -04003788 switch (I.getOpcode()) {
3789 default:
3790 break;
3791 case Instruction::Or:
3792 return spv::OpLogicalOr;
3793 case Instruction::And:
3794 return spv::OpLogicalAnd;
3795 case Instruction::Xor:
3796 return spv::OpLogicalNotEqual;
3797 }
3798 }
3799
alan-bakerb6b09dc2018-11-08 16:59:28 -05003800 const std::map<unsigned, spv::Op> Map{
David Neto22f144c2017-06-12 14:26:21 -04003801 {Instruction::Add, spv::OpIAdd},
3802 {Instruction::FAdd, spv::OpFAdd},
3803 {Instruction::Sub, spv::OpISub},
3804 {Instruction::FSub, spv::OpFSub},
3805 {Instruction::Mul, spv::OpIMul},
3806 {Instruction::FMul, spv::OpFMul},
3807 {Instruction::UDiv, spv::OpUDiv},
3808 {Instruction::SDiv, spv::OpSDiv},
3809 {Instruction::FDiv, spv::OpFDiv},
3810 {Instruction::URem, spv::OpUMod},
3811 {Instruction::SRem, spv::OpSRem},
3812 {Instruction::FRem, spv::OpFRem},
3813 {Instruction::Or, spv::OpBitwiseOr},
3814 {Instruction::Xor, spv::OpBitwiseXor},
3815 {Instruction::And, spv::OpBitwiseAnd},
3816 {Instruction::Shl, spv::OpShiftLeftLogical},
3817 {Instruction::LShr, spv::OpShiftRightLogical},
3818 {Instruction::AShr, spv::OpShiftRightArithmetic}};
3819
3820 assert(0 != Map.count(I.getOpcode()));
3821
3822 return Map.at(I.getOpcode());
3823}
3824
3825void SPIRVProducerPass::GenerateInstruction(Instruction &I) {
3826 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3827 ValueMapType &VMap = getValueMap();
David Neto22f144c2017-06-12 14:26:21 -04003828 DeferredInstVecType &DeferredInsts = getDeferredInstVec();
3829 LLVMContext &Context = I.getParent()->getParent()->getParent()->getContext();
3830
3831 // Register Instruction to ValueMap.
3832 if (0 == VMap[&I]) {
3833 VMap[&I] = nextID;
3834 }
3835
3836 switch (I.getOpcode()) {
3837 default: {
3838 if (Instruction::isCast(I.getOpcode())) {
3839 //
3840 // Generate SPIRV instructions for cast operators.
3841 //
3842
David Netod2de94a2017-08-28 17:27:47 -04003843 auto Ty = I.getType();
David Neto22f144c2017-06-12 14:26:21 -04003844 auto OpTy = I.getOperand(0)->getType();
David Netod2de94a2017-08-28 17:27:47 -04003845 auto toI8 = Ty == Type::getInt8Ty(Context);
3846 auto fromI32 = OpTy == Type::getInt32Ty(Context);
David Neto22f144c2017-06-12 14:26:21 -04003847 // Handle zext, sext and uitofp with i1 type specially.
3848 if ((I.getOpcode() == Instruction::ZExt ||
3849 I.getOpcode() == Instruction::SExt ||
3850 I.getOpcode() == Instruction::UIToFP) &&
alan-bakerb6b09dc2018-11-08 16:59:28 -05003851 OpTy->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -04003852 //
3853 // Generate OpSelect.
3854 //
3855
3856 // Ops[0] = Result Type ID
3857 // Ops[1] = Condition ID
3858 // Ops[2] = True Constant ID
3859 // Ops[3] = False Constant ID
3860 SPIRVOperandList Ops;
3861
David Neto257c3892018-04-11 13:19:45 -04003862 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04003863
David Neto22f144c2017-06-12 14:26:21 -04003864 uint32_t CondID = VMap[I.getOperand(0)];
David Neto257c3892018-04-11 13:19:45 -04003865 Ops << MkId(CondID);
David Neto22f144c2017-06-12 14:26:21 -04003866
3867 uint32_t TrueID = 0;
3868 if (I.getOpcode() == Instruction::ZExt) {
Kévin Petit7bfb8992019-02-26 13:45:08 +00003869 TrueID = VMap[ConstantInt::get(I.getType(), 1)];
David Neto22f144c2017-06-12 14:26:21 -04003870 } else if (I.getOpcode() == Instruction::SExt) {
Kévin Petit7bfb8992019-02-26 13:45:08 +00003871 TrueID = VMap[ConstantInt::getSigned(I.getType(), -1)];
David Neto22f144c2017-06-12 14:26:21 -04003872 } else {
3873 TrueID = VMap[ConstantFP::get(Context, APFloat(1.0f))];
3874 }
David Neto257c3892018-04-11 13:19:45 -04003875 Ops << MkId(TrueID);
David Neto22f144c2017-06-12 14:26:21 -04003876
3877 uint32_t FalseID = 0;
3878 if (I.getOpcode() == Instruction::ZExt) {
3879 FalseID = VMap[Constant::getNullValue(I.getType())];
3880 } else if (I.getOpcode() == Instruction::SExt) {
3881 FalseID = VMap[Constant::getNullValue(I.getType())];
3882 } else {
3883 FalseID = VMap[ConstantFP::get(Context, APFloat(0.0f))];
3884 }
David Neto257c3892018-04-11 13:19:45 -04003885 Ops << MkId(FalseID);
David Neto22f144c2017-06-12 14:26:21 -04003886
David Neto87846742018-04-11 17:36:22 -04003887 auto *Inst = new SPIRVInstruction(spv::OpSelect, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003888 SPIRVInstList.push_back(Inst);
alan-bakerb39c8262019-03-08 14:03:37 -05003889 } else if (!clspv::Option::Int8Support() &&
3890 I.getOpcode() == Instruction::Trunc && fromI32 && toI8) {
David Netod2de94a2017-08-28 17:27:47 -04003891 // The SPIR-V target type is a 32-bit int. Keep only the bottom
3892 // 8 bits.
3893 // Before:
3894 // %result = trunc i32 %a to i8
3895 // After
3896 // %result = OpBitwiseAnd %uint %a %uint_255
3897
3898 SPIRVOperandList Ops;
3899
David Neto257c3892018-04-11 13:19:45 -04003900 Ops << MkId(lookupType(OpTy)) << MkId(VMap[I.getOperand(0)]);
David Netod2de94a2017-08-28 17:27:47 -04003901
3902 Type *UintTy = Type::getInt32Ty(Context);
3903 uint32_t MaskID = VMap[ConstantInt::get(UintTy, 255)];
David Neto257c3892018-04-11 13:19:45 -04003904 Ops << MkId(MaskID);
David Netod2de94a2017-08-28 17:27:47 -04003905
David Neto87846742018-04-11 17:36:22 -04003906 auto *Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Netod2de94a2017-08-28 17:27:47 -04003907 SPIRVInstList.push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04003908 } else {
3909 // Ops[0] = Result Type ID
3910 // Ops[1] = Source Value ID
3911 SPIRVOperandList Ops;
3912
David Neto257c3892018-04-11 13:19:45 -04003913 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04003914
David Neto87846742018-04-11 17:36:22 -04003915 auto *Inst = new SPIRVInstruction(GetSPIRVCastOpcode(I), nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003916 SPIRVInstList.push_back(Inst);
3917 }
3918 } else if (isa<BinaryOperator>(I)) {
3919 //
3920 // Generate SPIRV instructions for binary operators.
3921 //
3922
3923 // Handle xor with i1 type specially.
3924 if (I.getOpcode() == Instruction::Xor &&
3925 I.getType() == Type::getInt1Ty(Context) &&
Kévin Petit24272b62018-10-18 19:16:12 +00003926 ((isa<ConstantInt>(I.getOperand(0)) &&
3927 !cast<ConstantInt>(I.getOperand(0))->isZero()) ||
3928 (isa<ConstantInt>(I.getOperand(1)) &&
3929 !cast<ConstantInt>(I.getOperand(1))->isZero()))) {
David Neto22f144c2017-06-12 14:26:21 -04003930 //
3931 // Generate OpLogicalNot.
3932 //
3933 // Ops[0] = Result Type ID
3934 // Ops[1] = Operand
3935 SPIRVOperandList Ops;
3936
David Neto257c3892018-04-11 13:19:45 -04003937 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04003938
3939 Value *CondV = I.getOperand(0);
3940 if (isa<Constant>(I.getOperand(0))) {
3941 CondV = I.getOperand(1);
3942 }
David Neto257c3892018-04-11 13:19:45 -04003943 Ops << MkId(VMap[CondV]);
David Neto22f144c2017-06-12 14:26:21 -04003944
David Neto87846742018-04-11 17:36:22 -04003945 auto *Inst = new SPIRVInstruction(spv::OpLogicalNot, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003946 SPIRVInstList.push_back(Inst);
3947 } else {
3948 // Ops[0] = Result Type ID
3949 // Ops[1] = Operand 0
3950 // Ops[2] = Operand 1
3951 SPIRVOperandList Ops;
3952
David Neto257c3892018-04-11 13:19:45 -04003953 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)])
3954 << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04003955
David Neto87846742018-04-11 17:36:22 -04003956 auto *Inst =
3957 new SPIRVInstruction(GetSPIRVBinaryOpcode(I), nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003958 SPIRVInstList.push_back(Inst);
3959 }
alan-bakerc9c55ae2019-12-02 16:01:27 -05003960 } else if (I.getOpcode() == Instruction::FNeg) {
3961 // The only unary operator.
3962 //
3963 // Ops[0] = Result Type ID
3964 // Ops[1] = Operand 0
3965 SPIRVOperandList ops;
3966
3967 ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)]);
3968 auto *Inst = new SPIRVInstruction(spv::OpFNegate, nextID++, ops);
3969 SPIRVInstList.push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04003970 } else {
3971 I.print(errs());
3972 llvm_unreachable("Unsupported instruction???");
3973 }
3974 break;
3975 }
3976 case Instruction::GetElementPtr: {
3977 auto &GlobalConstArgSet = getGlobalConstArgSet();
3978
3979 //
3980 // Generate OpAccessChain.
3981 //
3982 GetElementPtrInst *GEP = cast<GetElementPtrInst>(&I);
3983
3984 //
3985 // Generate OpAccessChain.
3986 //
3987
3988 // Ops[0] = Result Type ID
3989 // Ops[1] = Base ID
3990 // Ops[2] ... Ops[n] = Indexes ID
3991 SPIRVOperandList Ops;
3992
alan-bakerb6b09dc2018-11-08 16:59:28 -05003993 PointerType *ResultType = cast<PointerType>(GEP->getType());
David Neto22f144c2017-06-12 14:26:21 -04003994 if (GEP->getPointerAddressSpace() == AddressSpace::ModuleScopePrivate ||
3995 GlobalConstArgSet.count(GEP->getPointerOperand())) {
3996 // Use pointer type with private address space for global constant.
3997 Type *EleTy = I.getType()->getPointerElementType();
David Neto1a1a0582017-07-07 12:01:44 -04003998 ResultType = PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
David Neto22f144c2017-06-12 14:26:21 -04003999 }
David Neto257c3892018-04-11 13:19:45 -04004000
4001 Ops << MkId(lookupType(ResultType));
David Neto22f144c2017-06-12 14:26:21 -04004002
David Neto862b7d82018-06-14 18:48:37 -04004003 // Generate the base pointer.
4004 Ops << MkId(VMap[GEP->getPointerOperand()]);
David Neto22f144c2017-06-12 14:26:21 -04004005
David Neto862b7d82018-06-14 18:48:37 -04004006 // TODO(dneto): Simplify the following?
David Neto22f144c2017-06-12 14:26:21 -04004007
4008 //
4009 // Follows below rules for gep.
4010 //
David Neto862b7d82018-06-14 18:48:37 -04004011 // 1. If gep's first index is 0 generate OpAccessChain and ignore gep's
4012 // first index.
David Neto22f144c2017-06-12 14:26:21 -04004013 // 2. If gep's first index is not 0, generate OpPtrAccessChain and use gep's
4014 // first index.
4015 // 3. If gep's first index is not constant, generate OpPtrAccessChain and
4016 // use gep's first index.
4017 // 4. If it is not above case 1, 2 and 3, generate OpAccessChain and use
4018 // gep's first index.
4019 //
4020 spv::Op Opcode = spv::OpAccessChain;
4021 unsigned offset = 0;
4022 if (ConstantInt *CstInt = dyn_cast<ConstantInt>(GEP->getOperand(1))) {
David Neto862b7d82018-06-14 18:48:37 -04004023 if (CstInt->getZExtValue() == 0) {
David Neto22f144c2017-06-12 14:26:21 -04004024 offset = 1;
David Neto862b7d82018-06-14 18:48:37 -04004025 } else if (CstInt->getZExtValue() != 0) {
David Neto22f144c2017-06-12 14:26:21 -04004026 Opcode = spv::OpPtrAccessChain;
David Neto22f144c2017-06-12 14:26:21 -04004027 }
David Neto862b7d82018-06-14 18:48:37 -04004028 } else {
David Neto22f144c2017-06-12 14:26:21 -04004029 Opcode = spv::OpPtrAccessChain;
David Neto1a1a0582017-07-07 12:01:44 -04004030 }
4031
4032 if (Opcode == spv::OpPtrAccessChain) {
David Neto1a1a0582017-07-07 12:01:44 -04004033 // Do we need to generate ArrayStride? Check against the GEP result type
4034 // rather than the pointer type of the base because when indexing into
4035 // an OpenCL program-scope constant, we'll swap out the LLVM base pointer
4036 // for something else in the SPIR-V.
4037 // E.g. see test/PointerAccessChain/pointer_index_is_constant_1.cl
alan-baker5b86ed72019-02-15 08:26:50 -05004038 auto address_space = ResultType->getAddressSpace();
4039 setVariablePointersCapabilities(address_space);
4040 switch (GetStorageClass(address_space)) {
Alan Bakerfcda9482018-10-02 17:09:59 -04004041 case spv::StorageClassStorageBuffer:
4042 case spv::StorageClassUniform:
David Neto1a1a0582017-07-07 12:01:44 -04004043 // Save the need to generate an ArrayStride decoration. But defer
4044 // generation until later, so we only make one decoration.
David Neto85082642018-03-24 06:55:20 -07004045 getTypesNeedingArrayStride().insert(ResultType);
Alan Bakerfcda9482018-10-02 17:09:59 -04004046 break;
4047 default:
4048 break;
David Neto1a1a0582017-07-07 12:01:44 -04004049 }
David Neto22f144c2017-06-12 14:26:21 -04004050 }
4051
4052 for (auto II = GEP->idx_begin() + offset; II != GEP->idx_end(); II++) {
David Neto257c3892018-04-11 13:19:45 -04004053 Ops << MkId(VMap[*II]);
David Neto22f144c2017-06-12 14:26:21 -04004054 }
4055
David Neto87846742018-04-11 17:36:22 -04004056 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004057 SPIRVInstList.push_back(Inst);
4058 break;
4059 }
4060 case Instruction::ExtractValue: {
4061 ExtractValueInst *EVI = cast<ExtractValueInst>(&I);
4062 // Ops[0] = Result Type ID
4063 // Ops[1] = Composite ID
4064 // Ops[2] ... Ops[n] = Indexes (Literal Number)
4065 SPIRVOperandList Ops;
4066
David Neto257c3892018-04-11 13:19:45 -04004067 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04004068
4069 uint32_t CompositeID = VMap[EVI->getAggregateOperand()];
David Neto257c3892018-04-11 13:19:45 -04004070 Ops << MkId(CompositeID);
David Neto22f144c2017-06-12 14:26:21 -04004071
4072 for (auto &Index : EVI->indices()) {
David Neto257c3892018-04-11 13:19:45 -04004073 Ops << MkNum(Index);
David Neto22f144c2017-06-12 14:26:21 -04004074 }
4075
David Neto87846742018-04-11 17:36:22 -04004076 auto *Inst = new SPIRVInstruction(spv::OpCompositeExtract, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004077 SPIRVInstList.push_back(Inst);
4078 break;
4079 }
4080 case Instruction::InsertValue: {
4081 InsertValueInst *IVI = cast<InsertValueInst>(&I);
4082 // Ops[0] = Result Type ID
4083 // Ops[1] = Object ID
4084 // Ops[2] = Composite ID
4085 // Ops[3] ... Ops[n] = Indexes (Literal Number)
4086 SPIRVOperandList Ops;
4087
4088 uint32_t ResTyID = lookupType(I.getType());
David Neto257c3892018-04-11 13:19:45 -04004089 Ops << MkId(ResTyID);
David Neto22f144c2017-06-12 14:26:21 -04004090
4091 uint32_t ObjectID = VMap[IVI->getInsertedValueOperand()];
David Neto257c3892018-04-11 13:19:45 -04004092 Ops << MkId(ObjectID);
David Neto22f144c2017-06-12 14:26:21 -04004093
4094 uint32_t CompositeID = VMap[IVI->getAggregateOperand()];
David Neto257c3892018-04-11 13:19:45 -04004095 Ops << MkId(CompositeID);
David Neto22f144c2017-06-12 14:26:21 -04004096
4097 for (auto &Index : IVI->indices()) {
David Neto257c3892018-04-11 13:19:45 -04004098 Ops << MkNum(Index);
David Neto22f144c2017-06-12 14:26:21 -04004099 }
4100
David Neto87846742018-04-11 17:36:22 -04004101 auto *Inst = new SPIRVInstruction(spv::OpCompositeInsert, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004102 SPIRVInstList.push_back(Inst);
4103 break;
4104 }
4105 case Instruction::Select: {
4106 //
4107 // Generate OpSelect.
4108 //
4109
4110 // Ops[0] = Result Type ID
4111 // Ops[1] = Condition ID
4112 // Ops[2] = True Constant ID
4113 // Ops[3] = False Constant ID
4114 SPIRVOperandList Ops;
4115
4116 // Find SPIRV instruction for parameter type.
4117 auto Ty = I.getType();
4118 if (Ty->isPointerTy()) {
4119 auto PointeeTy = Ty->getPointerElementType();
4120 if (PointeeTy->isStructTy() &&
4121 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
4122 Ty = PointeeTy;
alan-baker5b86ed72019-02-15 08:26:50 -05004123 } else {
4124 // Selecting between pointers requires variable pointers.
4125 setVariablePointersCapabilities(Ty->getPointerAddressSpace());
4126 if (!hasVariablePointers() && !selectFromSameObject(&I)) {
4127 setVariablePointers(true);
4128 }
David Neto22f144c2017-06-12 14:26:21 -04004129 }
4130 }
4131
David Neto257c3892018-04-11 13:19:45 -04004132 Ops << MkId(lookupType(Ty)) << MkId(VMap[I.getOperand(0)])
4133 << MkId(VMap[I.getOperand(1)]) << MkId(VMap[I.getOperand(2)]);
David Neto22f144c2017-06-12 14:26:21 -04004134
David Neto87846742018-04-11 17:36:22 -04004135 auto *Inst = new SPIRVInstruction(spv::OpSelect, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004136 SPIRVInstList.push_back(Inst);
4137 break;
4138 }
4139 case Instruction::ExtractElement: {
4140 // Handle <4 x i8> type manually.
4141 Type *CompositeTy = I.getOperand(0)->getType();
4142 if (is4xi8vec(CompositeTy)) {
4143 //
4144 // Generate OpShiftRightLogical and OpBitwiseAnd for extractelement with
4145 // <4 x i8>.
4146 //
4147
4148 //
4149 // Generate OpShiftRightLogical
4150 //
4151 // Ops[0] = Result Type ID
4152 // Ops[1] = Operand 0
4153 // Ops[2] = Operand 1
4154 //
4155 SPIRVOperandList Ops;
4156
David Neto257c3892018-04-11 13:19:45 -04004157 Ops << MkId(lookupType(CompositeTy));
David Neto22f144c2017-06-12 14:26:21 -04004158
4159 uint32_t Op0ID = VMap[I.getOperand(0)];
David Neto257c3892018-04-11 13:19:45 -04004160 Ops << MkId(Op0ID);
David Neto22f144c2017-06-12 14:26:21 -04004161
4162 uint32_t Op1ID = 0;
4163 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
4164 // Handle constant index.
4165 uint64_t Idx = CI->getZExtValue();
4166 Value *ShiftAmount =
4167 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
4168 Op1ID = VMap[ShiftAmount];
4169 } else {
4170 // Handle variable index.
4171 SPIRVOperandList TmpOps;
4172
David Neto257c3892018-04-11 13:19:45 -04004173 TmpOps << MkId(lookupType(Type::getInt32Ty(Context)))
4174 << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004175
4176 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
David Neto257c3892018-04-11 13:19:45 -04004177 TmpOps << MkId(VMap[Cst8]);
David Neto22f144c2017-06-12 14:26:21 -04004178
4179 Op1ID = nextID;
4180
David Neto87846742018-04-11 17:36:22 -04004181 auto *TmpInst = new SPIRVInstruction(spv::OpIMul, nextID++, TmpOps);
David Neto22f144c2017-06-12 14:26:21 -04004182 SPIRVInstList.push_back(TmpInst);
4183 }
David Neto257c3892018-04-11 13:19:45 -04004184 Ops << MkId(Op1ID);
David Neto22f144c2017-06-12 14:26:21 -04004185
4186 uint32_t ShiftID = nextID;
4187
David Neto87846742018-04-11 17:36:22 -04004188 auto *Inst =
4189 new SPIRVInstruction(spv::OpShiftRightLogical, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004190 SPIRVInstList.push_back(Inst);
4191
4192 //
4193 // Generate OpBitwiseAnd
4194 //
4195 // Ops[0] = Result Type ID
4196 // Ops[1] = Operand 0
4197 // Ops[2] = Operand 1
4198 //
4199 Ops.clear();
4200
David Neto257c3892018-04-11 13:19:45 -04004201 Ops << MkId(lookupType(CompositeTy)) << MkId(ShiftID);
David Neto22f144c2017-06-12 14:26:21 -04004202
4203 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
David Neto257c3892018-04-11 13:19:45 -04004204 Ops << MkId(VMap[CstFF]);
David Neto22f144c2017-06-12 14:26:21 -04004205
David Neto9b2d6252017-09-06 15:47:37 -04004206 // Reset mapping for this value to the result of the bitwise and.
4207 VMap[&I] = nextID;
4208
David Neto87846742018-04-11 17:36:22 -04004209 Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004210 SPIRVInstList.push_back(Inst);
4211 break;
4212 }
4213
4214 // Ops[0] = Result Type ID
4215 // Ops[1] = Composite ID
4216 // Ops[2] ... Ops[n] = Indexes (Literal Number)
4217 SPIRVOperandList Ops;
4218
David Neto257c3892018-04-11 13:19:45 -04004219 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04004220
4221 spv::Op Opcode = spv::OpCompositeExtract;
4222 if (const ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
David Neto257c3892018-04-11 13:19:45 -04004223 Ops << MkNum(static_cast<uint32_t>(CI->getZExtValue()));
David Neto22f144c2017-06-12 14:26:21 -04004224 } else {
David Neto257c3892018-04-11 13:19:45 -04004225 Ops << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004226 Opcode = spv::OpVectorExtractDynamic;
4227 }
4228
David Neto87846742018-04-11 17:36:22 -04004229 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004230 SPIRVInstList.push_back(Inst);
4231 break;
4232 }
4233 case Instruction::InsertElement: {
4234 // Handle <4 x i8> type manually.
4235 Type *CompositeTy = I.getOperand(0)->getType();
4236 if (is4xi8vec(CompositeTy)) {
4237 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
4238 uint32_t CstFFID = VMap[CstFF];
4239
4240 uint32_t ShiftAmountID = 0;
4241 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
4242 // Handle constant index.
4243 uint64_t Idx = CI->getZExtValue();
4244 Value *ShiftAmount =
4245 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
4246 ShiftAmountID = VMap[ShiftAmount];
4247 } else {
4248 // Handle variable index.
4249 SPIRVOperandList TmpOps;
4250
David Neto257c3892018-04-11 13:19:45 -04004251 TmpOps << MkId(lookupType(Type::getInt32Ty(Context)))
4252 << MkId(VMap[I.getOperand(2)]);
David Neto22f144c2017-06-12 14:26:21 -04004253
4254 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
David Neto257c3892018-04-11 13:19:45 -04004255 TmpOps << MkId(VMap[Cst8]);
David Neto22f144c2017-06-12 14:26:21 -04004256
4257 ShiftAmountID = nextID;
4258
David Neto87846742018-04-11 17:36:22 -04004259 auto *TmpInst = new SPIRVInstruction(spv::OpIMul, nextID++, TmpOps);
David Neto22f144c2017-06-12 14:26:21 -04004260 SPIRVInstList.push_back(TmpInst);
4261 }
4262
4263 //
4264 // Generate mask operations.
4265 //
4266
4267 // ShiftLeft mask according to index of insertelement.
4268 SPIRVOperandList Ops;
4269
David Neto257c3892018-04-11 13:19:45 -04004270 const uint32_t ResTyID = lookupType(CompositeTy);
4271 Ops << MkId(ResTyID) << MkId(CstFFID) << MkId(ShiftAmountID);
David Neto22f144c2017-06-12 14:26:21 -04004272
4273 uint32_t MaskID = nextID;
4274
David Neto87846742018-04-11 17:36:22 -04004275 auto *Inst = new SPIRVInstruction(spv::OpShiftLeftLogical, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004276 SPIRVInstList.push_back(Inst);
4277
4278 // Inverse mask.
4279 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004280 Ops << MkId(ResTyID) << MkId(MaskID);
David Neto22f144c2017-06-12 14:26:21 -04004281
4282 uint32_t InvMaskID = nextID;
4283
David Neto87846742018-04-11 17:36:22 -04004284 Inst = new SPIRVInstruction(spv::OpNot, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004285 SPIRVInstList.push_back(Inst);
4286
4287 // Apply mask.
4288 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004289 Ops << MkId(ResTyID) << MkId(VMap[I.getOperand(0)]) << MkId(InvMaskID);
David Neto22f144c2017-06-12 14:26:21 -04004290
4291 uint32_t OrgValID = nextID;
4292
David Neto87846742018-04-11 17:36:22 -04004293 Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004294 SPIRVInstList.push_back(Inst);
4295
4296 // Create correct value according to index of insertelement.
4297 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05004298 Ops << MkId(ResTyID) << MkId(VMap[I.getOperand(1)])
4299 << MkId(ShiftAmountID);
David Neto22f144c2017-06-12 14:26:21 -04004300
4301 uint32_t InsertValID = nextID;
4302
David Neto87846742018-04-11 17:36:22 -04004303 Inst = new SPIRVInstruction(spv::OpShiftLeftLogical, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004304 SPIRVInstList.push_back(Inst);
4305
4306 // Insert value to original value.
4307 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004308 Ops << MkId(ResTyID) << MkId(OrgValID) << MkId(InsertValID);
David Neto22f144c2017-06-12 14:26:21 -04004309
David Netoa394f392017-08-26 20:45:29 -04004310 VMap[&I] = nextID;
4311
David Neto87846742018-04-11 17:36:22 -04004312 Inst = new SPIRVInstruction(spv::OpBitwiseOr, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004313 SPIRVInstList.push_back(Inst);
4314
4315 break;
4316 }
4317
David Neto22f144c2017-06-12 14:26:21 -04004318 SPIRVOperandList Ops;
4319
James Priced26efea2018-06-09 23:28:32 +01004320 // Ops[0] = Result Type ID
4321 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04004322
4323 spv::Op Opcode = spv::OpCompositeInsert;
4324 if (const ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
David Neto257c3892018-04-11 13:19:45 -04004325 const auto value = CI->getZExtValue();
4326 assert(value <= UINT32_MAX);
James Priced26efea2018-06-09 23:28:32 +01004327 // Ops[1] = Object ID
4328 // Ops[2] = Composite ID
4329 // Ops[3] ... Ops[n] = Indexes (Literal Number)
alan-bakerb6b09dc2018-11-08 16:59:28 -05004330 Ops << MkId(VMap[I.getOperand(1)]) << MkId(VMap[I.getOperand(0)])
James Priced26efea2018-06-09 23:28:32 +01004331 << MkNum(static_cast<uint32_t>(value));
David Neto22f144c2017-06-12 14:26:21 -04004332 } else {
James Priced26efea2018-06-09 23:28:32 +01004333 // Ops[1] = Composite ID
4334 // Ops[2] = Object ID
4335 // Ops[3] ... Ops[n] = Indexes (Literal Number)
alan-bakerb6b09dc2018-11-08 16:59:28 -05004336 Ops << MkId(VMap[I.getOperand(0)]) << MkId(VMap[I.getOperand(1)])
James Priced26efea2018-06-09 23:28:32 +01004337 << MkId(VMap[I.getOperand(2)]);
David Neto22f144c2017-06-12 14:26:21 -04004338 Opcode = spv::OpVectorInsertDynamic;
4339 }
4340
David Neto87846742018-04-11 17:36:22 -04004341 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004342 SPIRVInstList.push_back(Inst);
4343 break;
4344 }
4345 case Instruction::ShuffleVector: {
4346 // Ops[0] = Result Type ID
4347 // Ops[1] = Vector 1 ID
4348 // Ops[2] = Vector 2 ID
4349 // Ops[3] ... Ops[n] = Components (Literal Number)
4350 SPIRVOperandList Ops;
4351
David Neto257c3892018-04-11 13:19:45 -04004352 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)])
4353 << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004354
4355 uint64_t NumElements = 0;
4356 if (Constant *Cst = dyn_cast<Constant>(I.getOperand(2))) {
4357 NumElements = cast<VectorType>(Cst->getType())->getNumElements();
4358
4359 if (Cst->isNullValue()) {
4360 for (unsigned i = 0; i < NumElements; i++) {
David Neto257c3892018-04-11 13:19:45 -04004361 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04004362 }
4363 } else if (const ConstantDataSequential *CDS =
4364 dyn_cast<ConstantDataSequential>(Cst)) {
4365 for (unsigned i = 0; i < CDS->getNumElements(); i++) {
4366 std::vector<uint32_t> LiteralNum;
David Neto257c3892018-04-11 13:19:45 -04004367 const auto value = CDS->getElementAsInteger(i);
4368 assert(value <= UINT32_MAX);
4369 Ops << MkNum(static_cast<uint32_t>(value));
David Neto22f144c2017-06-12 14:26:21 -04004370 }
4371 } else if (const ConstantVector *CV = dyn_cast<ConstantVector>(Cst)) {
4372 for (unsigned i = 0; i < CV->getNumOperands(); i++) {
4373 auto Op = CV->getOperand(i);
4374
4375 uint32_t literal = 0;
4376
4377 if (auto CI = dyn_cast<ConstantInt>(Op)) {
4378 literal = static_cast<uint32_t>(CI->getZExtValue());
4379 } else if (auto UI = dyn_cast<UndefValue>(Op)) {
4380 literal = 0xFFFFFFFFu;
4381 } else {
4382 Op->print(errs());
4383 llvm_unreachable("Unsupported element in ConstantVector!");
4384 }
4385
David Neto257c3892018-04-11 13:19:45 -04004386 Ops << MkNum(literal);
David Neto22f144c2017-06-12 14:26:21 -04004387 }
4388 } else {
4389 Cst->print(errs());
4390 llvm_unreachable("Unsupported constant mask in ShuffleVector!");
4391 }
4392 }
4393
David Neto87846742018-04-11 17:36:22 -04004394 auto *Inst = new SPIRVInstruction(spv::OpVectorShuffle, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004395 SPIRVInstList.push_back(Inst);
4396 break;
4397 }
4398 case Instruction::ICmp:
4399 case Instruction::FCmp: {
4400 CmpInst *CmpI = cast<CmpInst>(&I);
4401
David Netod4ca2e62017-07-06 18:47:35 -04004402 // Pointer equality is invalid.
alan-bakerb6b09dc2018-11-08 16:59:28 -05004403 Type *ArgTy = CmpI->getOperand(0)->getType();
David Netod4ca2e62017-07-06 18:47:35 -04004404 if (isa<PointerType>(ArgTy)) {
4405 CmpI->print(errs());
alan-baker21574d32020-01-29 16:00:31 -05004406 std::string name = I.getParent()->getParent()->getName().str();
David Netod4ca2e62017-07-06 18:47:35 -04004407 errs()
4408 << "\nPointer equality test is not supported by SPIR-V for Vulkan, "
4409 << "in function " << name << "\n";
4410 llvm_unreachable("Pointer equality check is invalid");
4411 break;
4412 }
4413
David Neto257c3892018-04-11 13:19:45 -04004414 // Ops[0] = Result Type ID
4415 // Ops[1] = Operand 1 ID
4416 // Ops[2] = Operand 2 ID
4417 SPIRVOperandList Ops;
David Neto22f144c2017-06-12 14:26:21 -04004418
David Neto257c3892018-04-11 13:19:45 -04004419 Ops << MkId(lookupType(CmpI->getType())) << MkId(VMap[CmpI->getOperand(0)])
4420 << MkId(VMap[CmpI->getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004421
4422 spv::Op Opcode = GetSPIRVCmpOpcode(CmpI);
David Neto87846742018-04-11 17:36:22 -04004423 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004424 SPIRVInstList.push_back(Inst);
4425 break;
4426 }
4427 case Instruction::Br: {
4428 // Branch instrucion is deferred because it needs label's ID. Record slot's
4429 // location on SPIRVInstructionList.
4430 DeferredInsts.push_back(
4431 std::make_tuple(&I, --SPIRVInstList.end(), 0 /* No id */));
4432 break;
4433 }
4434 case Instruction::Switch: {
4435 I.print(errs());
4436 llvm_unreachable("Unsupported instruction???");
4437 break;
4438 }
4439 case Instruction::IndirectBr: {
4440 I.print(errs());
4441 llvm_unreachable("Unsupported instruction???");
4442 break;
4443 }
4444 case Instruction::PHI: {
4445 // Branch instrucion is deferred because it needs label's ID. Record slot's
4446 // location on SPIRVInstructionList.
4447 DeferredInsts.push_back(
4448 std::make_tuple(&I, --SPIRVInstList.end(), nextID++));
4449 break;
4450 }
4451 case Instruction::Alloca: {
4452 //
4453 // Generate OpVariable.
4454 //
4455 // Ops[0] : Result Type ID
4456 // Ops[1] : Storage Class
4457 SPIRVOperandList Ops;
4458
David Neto257c3892018-04-11 13:19:45 -04004459 Ops << MkId(lookupType(I.getType())) << MkNum(spv::StorageClassFunction);
David Neto22f144c2017-06-12 14:26:21 -04004460
David Neto87846742018-04-11 17:36:22 -04004461 auto *Inst = new SPIRVInstruction(spv::OpVariable, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004462 SPIRVInstList.push_back(Inst);
4463 break;
4464 }
4465 case Instruction::Load: {
4466 LoadInst *LD = cast<LoadInst>(&I);
4467 //
4468 // Generate OpLoad.
4469 //
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04004470
alan-baker5b86ed72019-02-15 08:26:50 -05004471 if (LD->getType()->isPointerTy()) {
4472 // Loading a pointer requires variable pointers.
4473 setVariablePointersCapabilities(LD->getType()->getPointerAddressSpace());
4474 }
David Neto22f144c2017-06-12 14:26:21 -04004475
David Neto0a2f98d2017-09-15 19:38:40 -04004476 uint32_t ResTyID = lookupType(LD->getType());
David Netoa60b00b2017-09-15 16:34:09 -04004477 uint32_t PointerID = VMap[LD->getPointerOperand()];
4478
4479 // This is a hack to work around what looks like a driver bug.
4480 // When we're loading from the special variable holding the WorkgroupSize
David Neto0a2f98d2017-09-15 19:38:40 -04004481 // builtin value, use an OpBitWiseAnd of the value's ID rather than
4482 // generating a load.
David Neto66cfe642018-03-24 06:13:56 -07004483 // TODO(dneto): Remove this awful hack once drivers are fixed.
David Netoa60b00b2017-09-15 16:34:09 -04004484 if (PointerID == WorkgroupSizeVarID) {
David Neto0a2f98d2017-09-15 19:38:40 -04004485 // Generate a bitwise-and of the original value with itself.
4486 // We should have been able to get away with just an OpCopyObject,
4487 // but we need something more complex to get past certain driver bugs.
4488 // This is ridiculous, but necessary.
4489 // TODO(dneto): Revisit this once drivers fix their bugs.
4490
4491 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04004492 Ops << MkId(ResTyID) << MkId(WorkgroupSizeValueID)
4493 << MkId(WorkgroupSizeValueID);
David Neto0a2f98d2017-09-15 19:38:40 -04004494
David Neto87846742018-04-11 17:36:22 -04004495 auto *Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Neto0a2f98d2017-09-15 19:38:40 -04004496 SPIRVInstList.push_back(Inst);
David Netoa60b00b2017-09-15 16:34:09 -04004497 break;
4498 }
4499
4500 // This is the normal path. Generate a load.
4501
David Neto22f144c2017-06-12 14:26:21 -04004502 // Ops[0] = Result Type ID
4503 // Ops[1] = Pointer ID
4504 // Ops[2] ... Ops[n] = Optional Memory Access
4505 //
4506 // TODO: Do we need to implement Optional Memory Access???
David Neto0a2f98d2017-09-15 19:38:40 -04004507
David Neto22f144c2017-06-12 14:26:21 -04004508 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04004509 Ops << MkId(ResTyID) << MkId(PointerID);
David Neto22f144c2017-06-12 14:26:21 -04004510
David Neto87846742018-04-11 17:36:22 -04004511 auto *Inst = new SPIRVInstruction(spv::OpLoad, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004512 SPIRVInstList.push_back(Inst);
4513 break;
4514 }
4515 case Instruction::Store: {
4516 StoreInst *ST = cast<StoreInst>(&I);
4517 //
4518 // Generate OpStore.
4519 //
4520
alan-baker5b86ed72019-02-15 08:26:50 -05004521 if (ST->getValueOperand()->getType()->isPointerTy()) {
4522 // Storing a pointer requires variable pointers.
4523 setVariablePointersCapabilities(
4524 ST->getValueOperand()->getType()->getPointerAddressSpace());
4525 }
4526
David Neto22f144c2017-06-12 14:26:21 -04004527 // Ops[0] = Pointer ID
4528 // Ops[1] = Object ID
4529 // Ops[2] ... Ops[n] = Optional Memory Access (later???)
4530 //
4531 // TODO: Do we need to implement Optional Memory Access???
David Neto257c3892018-04-11 13:19:45 -04004532 SPIRVOperandList Ops;
4533 Ops << MkId(VMap[ST->getPointerOperand()])
4534 << MkId(VMap[ST->getValueOperand()]);
David Neto22f144c2017-06-12 14:26:21 -04004535
David Neto87846742018-04-11 17:36:22 -04004536 auto *Inst = new SPIRVInstruction(spv::OpStore, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004537 SPIRVInstList.push_back(Inst);
4538 break;
4539 }
4540 case Instruction::AtomicCmpXchg: {
4541 I.print(errs());
4542 llvm_unreachable("Unsupported instruction???");
4543 break;
4544 }
4545 case Instruction::AtomicRMW: {
Neil Henning39672102017-09-29 14:33:13 +01004546 AtomicRMWInst *AtomicRMW = dyn_cast<AtomicRMWInst>(&I);
4547
4548 spv::Op opcode;
4549
4550 switch (AtomicRMW->getOperation()) {
4551 default:
4552 I.print(errs());
4553 llvm_unreachable("Unsupported instruction???");
4554 case llvm::AtomicRMWInst::Add:
4555 opcode = spv::OpAtomicIAdd;
4556 break;
4557 case llvm::AtomicRMWInst::Sub:
4558 opcode = spv::OpAtomicISub;
4559 break;
4560 case llvm::AtomicRMWInst::Xchg:
4561 opcode = spv::OpAtomicExchange;
4562 break;
4563 case llvm::AtomicRMWInst::Min:
4564 opcode = spv::OpAtomicSMin;
4565 break;
4566 case llvm::AtomicRMWInst::Max:
4567 opcode = spv::OpAtomicSMax;
4568 break;
4569 case llvm::AtomicRMWInst::UMin:
4570 opcode = spv::OpAtomicUMin;
4571 break;
4572 case llvm::AtomicRMWInst::UMax:
4573 opcode = spv::OpAtomicUMax;
4574 break;
4575 case llvm::AtomicRMWInst::And:
4576 opcode = spv::OpAtomicAnd;
4577 break;
4578 case llvm::AtomicRMWInst::Or:
4579 opcode = spv::OpAtomicOr;
4580 break;
4581 case llvm::AtomicRMWInst::Xor:
4582 opcode = spv::OpAtomicXor;
4583 break;
4584 }
4585
4586 //
4587 // Generate OpAtomic*.
4588 //
4589 SPIRVOperandList Ops;
4590
David Neto257c3892018-04-11 13:19:45 -04004591 Ops << MkId(lookupType(I.getType()))
4592 << MkId(VMap[AtomicRMW->getPointerOperand()]);
Neil Henning39672102017-09-29 14:33:13 +01004593
4594 auto IntTy = Type::getInt32Ty(I.getContext());
Neil Henning39672102017-09-29 14:33:13 +01004595 const auto ConstantScopeDevice = ConstantInt::get(IntTy, spv::ScopeDevice);
David Neto257c3892018-04-11 13:19:45 -04004596 Ops << MkId(VMap[ConstantScopeDevice]);
Neil Henning39672102017-09-29 14:33:13 +01004597
4598 const auto ConstantMemorySemantics = ConstantInt::get(
4599 IntTy, spv::MemorySemanticsUniformMemoryMask |
4600 spv::MemorySemanticsSequentiallyConsistentMask);
David Neto257c3892018-04-11 13:19:45 -04004601 Ops << MkId(VMap[ConstantMemorySemantics]);
Neil Henning39672102017-09-29 14:33:13 +01004602
David Neto257c3892018-04-11 13:19:45 -04004603 Ops << MkId(VMap[AtomicRMW->getValOperand()]);
Neil Henning39672102017-09-29 14:33:13 +01004604
4605 VMap[&I] = nextID;
4606
David Neto87846742018-04-11 17:36:22 -04004607 auto *Inst = new SPIRVInstruction(opcode, nextID++, Ops);
Neil Henning39672102017-09-29 14:33:13 +01004608 SPIRVInstList.push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04004609 break;
4610 }
4611 case Instruction::Fence: {
4612 I.print(errs());
4613 llvm_unreachable("Unsupported instruction???");
4614 break;
4615 }
4616 case Instruction::Call: {
4617 CallInst *Call = dyn_cast<CallInst>(&I);
4618 Function *Callee = Call->getCalledFunction();
4619
Alan Baker202c8c72018-08-13 13:47:44 -04004620 if (Callee->getName().startswith(clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04004621 if (ResourceVarDeferredLoadCalls.count(Call) && Call->hasNUsesOrMore(1)) {
4622 // Generate an OpLoad
4623 SPIRVOperandList Ops;
4624 const auto load_id = nextID++;
David Neto22f144c2017-06-12 14:26:21 -04004625
David Neto862b7d82018-06-14 18:48:37 -04004626 Ops << MkId(lookupType(Call->getType()->getPointerElementType()))
4627 << MkId(ResourceVarDeferredLoadCalls[Call]);
4628
4629 auto *Inst = new SPIRVInstruction(spv::OpLoad, load_id, Ops);
4630 SPIRVInstList.push_back(Inst);
4631 VMap[Call] = load_id;
4632 break;
4633
4634 } else {
4635 // This maps to an OpVariable we've already generated.
4636 // No code is generated for the call.
4637 }
4638 break;
alan-bakerb6b09dc2018-11-08 16:59:28 -05004639 } else if (Callee->getName().startswith(
4640 clspv::WorkgroupAccessorFunction())) {
Alan Baker202c8c72018-08-13 13:47:44 -04004641 // Don't codegen an instruction here, but instead map this call directly
4642 // to the workgroup variable id.
alan-bakerb6b09dc2018-11-08 16:59:28 -05004643 int spec_id = static_cast<int>(
4644 cast<ConstantInt>(Call->getOperand(0))->getSExtValue());
Alan Baker202c8c72018-08-13 13:47:44 -04004645 const auto &info = LocalSpecIdInfoMap[spec_id];
4646 VMap[Call] = info.variable_id;
4647 break;
David Neto862b7d82018-06-14 18:48:37 -04004648 }
4649
4650 // Sampler initializers become a load of the corresponding sampler.
4651
Kévin Petitdf71de32019-04-09 14:09:50 +01004652 if (Callee->getName().equals(clspv::LiteralSamplerFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04004653 // Map this to a load from the variable.
alan-baker09cb9802019-12-10 13:16:27 -05004654 const auto third_param = static_cast<unsigned>(
4655 dyn_cast<ConstantInt>(Call->getArgOperand(2))->getZExtValue());
4656 auto sampler_value = third_param;
4657 if (clspv::Option::UseSamplerMap()) {
4658 sampler_value = getSamplerMap()[third_param].first;
4659 }
David Neto862b7d82018-06-14 18:48:37 -04004660
4661 // Generate an OpLoad
David Neto22f144c2017-06-12 14:26:21 -04004662 SPIRVOperandList Ops;
David Neto862b7d82018-06-14 18:48:37 -04004663 const auto load_id = nextID++;
David Neto22f144c2017-06-12 14:26:21 -04004664
David Neto257c3892018-04-11 13:19:45 -04004665 Ops << MkId(lookupType(SamplerTy->getPointerElementType()))
alan-baker09cb9802019-12-10 13:16:27 -05004666 << MkId(SamplerLiteralToIDMap[sampler_value]);
David Neto22f144c2017-06-12 14:26:21 -04004667
David Neto862b7d82018-06-14 18:48:37 -04004668 auto *Inst = new SPIRVInstruction(spv::OpLoad, load_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004669 SPIRVInstList.push_back(Inst);
David Neto862b7d82018-06-14 18:48:37 -04004670 VMap[Call] = load_id;
David Neto22f144c2017-06-12 14:26:21 -04004671 break;
4672 }
4673
Kévin Petit349c9502019-03-28 17:24:14 +00004674 // Handle SPIR-V intrinsics
Kévin Petit9b340262019-06-19 18:31:11 +01004675 spv::Op opcode = StringSwitch<spv::Op>(Callee->getName())
4676 .Case("spirv.atomic_xor", spv::OpAtomicXor)
4677 .Default(spv::OpNop);
David Neto22f144c2017-06-12 14:26:21 -04004678
Kévin Petit617a76d2019-04-04 13:54:16 +01004679 // If the switch above didn't have an entry maybe the intrinsic
4680 // is using the name mangling logic.
4681 bool usesMangler = false;
4682 if (opcode == spv::OpNop) {
4683 if (Callee->getName().startswith(clspv::SPIRVOpIntrinsicFunction())) {
4684 auto OpCst = cast<ConstantInt>(Call->getOperand(0));
4685 opcode = static_cast<spv::Op>(OpCst->getZExtValue());
4686 usesMangler = true;
4687 }
4688 }
4689
Kévin Petit349c9502019-03-28 17:24:14 +00004690 if (opcode != spv::OpNop) {
4691
David Neto22f144c2017-06-12 14:26:21 -04004692 SPIRVOperandList Ops;
4693
Kévin Petit349c9502019-03-28 17:24:14 +00004694 if (!I.getType()->isVoidTy()) {
4695 Ops << MkId(lookupType(I.getType()));
4696 }
David Neto22f144c2017-06-12 14:26:21 -04004697
Kévin Petit617a76d2019-04-04 13:54:16 +01004698 unsigned firstOperand = usesMangler ? 1 : 0;
4699 for (unsigned i = firstOperand; i < Call->getNumArgOperands(); i++) {
David Neto257c3892018-04-11 13:19:45 -04004700 Ops << MkId(VMap[Call->getArgOperand(i)]);
David Neto22f144c2017-06-12 14:26:21 -04004701 }
4702
Kévin Petit349c9502019-03-28 17:24:14 +00004703 if (!I.getType()->isVoidTy()) {
4704 VMap[&I] = nextID;
Kévin Petit8a560882019-03-21 15:24:34 +00004705 }
4706
Kévin Petit349c9502019-03-28 17:24:14 +00004707 SPIRVInstruction *Inst;
4708 if (!I.getType()->isVoidTy()) {
4709 Inst = new SPIRVInstruction(opcode, nextID++, Ops);
4710 } else {
4711 Inst = new SPIRVInstruction(opcode, Ops);
4712 }
Kévin Petit8a560882019-03-21 15:24:34 +00004713 SPIRVInstList.push_back(Inst);
4714 break;
4715 }
4716
David Neto22f144c2017-06-12 14:26:21 -04004717 // spirv.copy_memory.* intrinsics become OpMemoryMemory's.
4718 if (Callee->getName().startswith("spirv.copy_memory")) {
4719 //
4720 // Generate OpCopyMemory.
4721 //
4722
4723 // Ops[0] = Dst ID
4724 // Ops[1] = Src ID
4725 // Ops[2] = Memory Access
4726 // Ops[3] = Alignment
4727
4728 auto IsVolatile =
4729 dyn_cast<ConstantInt>(Call->getArgOperand(3))->getZExtValue() != 0;
4730
4731 auto VolatileMemoryAccess = (IsVolatile) ? spv::MemoryAccessVolatileMask
4732 : spv::MemoryAccessMaskNone;
4733
4734 auto MemoryAccess = VolatileMemoryAccess | spv::MemoryAccessAlignedMask;
4735
4736 auto Alignment =
4737 dyn_cast<ConstantInt>(Call->getArgOperand(2))->getZExtValue();
4738
David Neto257c3892018-04-11 13:19:45 -04004739 SPIRVOperandList Ops;
4740 Ops << MkId(VMap[Call->getArgOperand(0)])
4741 << MkId(VMap[Call->getArgOperand(1)]) << MkNum(MemoryAccess)
4742 << MkNum(static_cast<uint32_t>(Alignment));
David Neto22f144c2017-06-12 14:26:21 -04004743
David Neto87846742018-04-11 17:36:22 -04004744 auto *Inst = new SPIRVInstruction(spv::OpCopyMemory, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004745
4746 SPIRVInstList.push_back(Inst);
4747
4748 break;
4749 }
4750
alan-baker75090e42020-02-20 11:21:04 -05004751 // read_image (with a sampler) is converted to OpSampledImage and
4752 // OpImageSampleExplicitLod. Additionally, OpTypeSampledImage is
4753 // generated.
alan-bakerf67468c2019-11-25 15:51:49 -05004754 if (clspv::IsSampledImageRead(Callee)) {
David Neto22f144c2017-06-12 14:26:21 -04004755 //
4756 // Generate OpSampledImage.
4757 //
4758 // Ops[0] = Result Type ID
4759 // Ops[1] = Image ID
4760 // Ops[2] = Sampler ID
4761 //
4762 SPIRVOperandList Ops;
4763
4764 Value *Image = Call->getArgOperand(0);
4765 Value *Sampler = Call->getArgOperand(1);
4766 Value *Coordinate = Call->getArgOperand(2);
4767
4768 TypeMapType &OpImageTypeMap = getImageTypeMap();
4769 Type *ImageTy = Image->getType()->getPointerElementType();
4770 uint32_t ImageTyID = OpImageTypeMap[ImageTy];
David Neto22f144c2017-06-12 14:26:21 -04004771 uint32_t ImageID = VMap[Image];
David Neto22f144c2017-06-12 14:26:21 -04004772 uint32_t SamplerID = VMap[Sampler];
David Neto257c3892018-04-11 13:19:45 -04004773
4774 Ops << MkId(ImageTyID) << MkId(ImageID) << MkId(SamplerID);
David Neto22f144c2017-06-12 14:26:21 -04004775
4776 uint32_t SampledImageID = nextID;
4777
David Neto87846742018-04-11 17:36:22 -04004778 auto *Inst = new SPIRVInstruction(spv::OpSampledImage, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004779 SPIRVInstList.push_back(Inst);
4780
4781 //
4782 // Generate OpImageSampleExplicitLod.
4783 //
4784 // Ops[0] = Result Type ID
4785 // Ops[1] = Sampled Image ID
4786 // Ops[2] = Coordinate ID
4787 // Ops[3] = Image Operands Type ID
4788 // Ops[4] ... Ops[n] = Operands ID
4789 //
4790 Ops.clear();
4791
alan-bakerf67468c2019-11-25 15:51:49 -05004792 const bool is_int_image = IsIntImageType(Image->getType());
4793 uint32_t result_type = 0;
4794 if (is_int_image) {
4795 result_type = v4int32ID;
4796 } else {
4797 result_type = lookupType(Call->getType());
4798 }
4799
4800 Ops << MkId(result_type) << MkId(SampledImageID) << MkId(VMap[Coordinate])
4801 << MkNum(spv::ImageOperandsLodMask);
David Neto22f144c2017-06-12 14:26:21 -04004802
4803 Constant *CstFP0 = ConstantFP::get(Context, APFloat(0.0f));
David Neto257c3892018-04-11 13:19:45 -04004804 Ops << MkId(VMap[CstFP0]);
David Neto22f144c2017-06-12 14:26:21 -04004805
alan-bakerf67468c2019-11-25 15:51:49 -05004806 uint32_t final_id = nextID++;
4807 VMap[&I] = final_id;
David Neto22f144c2017-06-12 14:26:21 -04004808
alan-bakerf67468c2019-11-25 15:51:49 -05004809 uint32_t image_id = final_id;
4810 if (is_int_image) {
4811 // Int image requires a bitcast from v4int to v4uint.
4812 image_id = nextID++;
4813 }
4814
4815 Inst = new SPIRVInstruction(spv::OpImageSampleExplicitLod, image_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004816 SPIRVInstList.push_back(Inst);
alan-bakerf67468c2019-11-25 15:51:49 -05004817
4818 if (is_int_image) {
4819 // Generate the bitcast.
4820 Ops.clear();
4821 Ops << MkId(lookupType(Call->getType())) << MkId(image_id);
4822 Inst = new SPIRVInstruction(spv::OpBitcast, final_id, Ops);
4823 SPIRVInstList.push_back(Inst);
4824 }
David Neto22f144c2017-06-12 14:26:21 -04004825 break;
4826 }
4827
alan-baker75090e42020-02-20 11:21:04 -05004828 // read_image (without a sampler) is mapped to OpImageFetch.
4829 if (clspv::IsUnsampledImageRead(Callee)) {
4830 Value *Image = Call->getArgOperand(0);
4831 Value *Coordinate = Call->getArgOperand(1);
4832
4833 //
4834 // Generate OpImageFetch
4835 //
4836 // Ops[0] = Result Type ID
4837 // Ops[1] = Image ID
4838 // Ops[2] = Coordinate ID
4839 // Ops[3] = Lod
4840 // Ops[4] = 0
4841 //
4842 SPIRVOperandList Ops;
4843
4844 const bool is_int_image = IsIntImageType(Image->getType());
4845 uint32_t result_type = 0;
4846 if (is_int_image) {
4847 result_type = v4int32ID;
4848 } else {
4849 result_type = lookupType(Call->getType());
4850 }
4851
4852 Ops << MkId(result_type) << MkId(VMap[Image]) << MkId(VMap[Coordinate])
4853 << MkNum(spv::ImageOperandsLodMask);
4854
4855 Constant *CstInt0 = ConstantInt::get(Context, APInt(32, 0));
4856 Ops << MkId(VMap[CstInt0]);
4857
4858 uint32_t final_id = nextID++;
4859 VMap[&I] = final_id;
4860
4861 uint32_t image_id = final_id;
4862 if (is_int_image) {
4863 // Int image requires a bitcast from v4int to v4uint.
4864 image_id = nextID++;
4865 }
4866
4867 auto *Inst = new SPIRVInstruction(spv::OpImageFetch, image_id, Ops);
4868 SPIRVInstList.push_back(Inst);
4869
4870 if (is_int_image) {
4871 // Generate the bitcast.
4872 Ops.clear();
4873 Ops << MkId(lookupType(Call->getType())) << MkId(image_id);
4874 Inst = new SPIRVInstruction(spv::OpBitcast, final_id, Ops);
4875 SPIRVInstList.push_back(Inst);
4876 }
4877 break;
4878 }
4879
alan-bakerf67468c2019-11-25 15:51:49 -05004880 // write_image is mapped to OpImageWrite.
4881 if (clspv::IsImageWrite(Callee)) {
David Neto22f144c2017-06-12 14:26:21 -04004882 //
4883 // Generate OpImageWrite.
4884 //
4885 // Ops[0] = Image ID
4886 // Ops[1] = Coordinate ID
4887 // Ops[2] = Texel ID
4888 // Ops[3] = (Optional) Image Operands Type (Literal Number)
4889 // Ops[4] ... Ops[n] = (Optional) Operands ID
4890 //
4891 SPIRVOperandList Ops;
4892
4893 Value *Image = Call->getArgOperand(0);
4894 Value *Coordinate = Call->getArgOperand(1);
4895 Value *Texel = Call->getArgOperand(2);
4896
4897 uint32_t ImageID = VMap[Image];
David Neto22f144c2017-06-12 14:26:21 -04004898 uint32_t CoordinateID = VMap[Coordinate];
David Neto22f144c2017-06-12 14:26:21 -04004899 uint32_t TexelID = VMap[Texel];
alan-bakerf67468c2019-11-25 15:51:49 -05004900
4901 const bool is_int_image = IsIntImageType(Image->getType());
4902 if (is_int_image) {
4903 // Generate a bitcast to v4int and use it as the texel value.
4904 uint32_t castID = nextID++;
4905 Ops << MkId(v4int32ID) << MkId(TexelID);
4906 auto cast = new SPIRVInstruction(spv::OpBitcast, castID, Ops);
4907 SPIRVInstList.push_back(cast);
4908 Ops.clear();
4909 TexelID = castID;
4910 }
David Neto257c3892018-04-11 13:19:45 -04004911 Ops << MkId(ImageID) << MkId(CoordinateID) << MkId(TexelID);
David Neto22f144c2017-06-12 14:26:21 -04004912
David Neto87846742018-04-11 17:36:22 -04004913 auto *Inst = new SPIRVInstruction(spv::OpImageWrite, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004914 SPIRVInstList.push_back(Inst);
4915 break;
4916 }
4917
alan-bakerce179f12019-12-06 19:02:22 -05004918 // get_image_* is mapped to OpImageQuerySize or OpImageQuerySizeLod
4919 if (clspv::IsImageQuery(Callee)) {
David Neto5c22a252018-03-15 16:07:41 -04004920 //
alan-bakerce179f12019-12-06 19:02:22 -05004921 // Generate OpImageQuerySize[Lod]
David Neto5c22a252018-03-15 16:07:41 -04004922 //
4923 // Ops[0] = Image ID
4924 //
alan-bakerce179f12019-12-06 19:02:22 -05004925 // Result type has components equal to the dimensionality of the image,
4926 // plus 1 if the image is arrayed.
4927 //
alan-bakerf906d2b2019-12-10 11:26:23 -05004928 // %sizes = OpImageQuerySize[Lod] %uint[2|3|4] %im [%uint_0]
David Neto5c22a252018-03-15 16:07:41 -04004929 SPIRVOperandList Ops;
4930
4931 // Implement:
alan-bakerce179f12019-12-06 19:02:22 -05004932 // %sizes = OpImageQuerySize[Lod] %uint[2|3|4] %im [%uint_0]
4933 uint32_t SizesTypeID = 0;
4934
David Neto5c22a252018-03-15 16:07:41 -04004935 Value *Image = Call->getArgOperand(0);
alan-bakerce179f12019-12-06 19:02:22 -05004936 const uint32_t dim = ImageDimensionality(Image->getType());
alan-baker7150a1d2020-02-25 08:31:06 -05004937 const uint32_t components =
4938 dim + (IsArrayImageType(Image->getType()) ? 1 : 0);
alan-bakerce179f12019-12-06 19:02:22 -05004939 if (components == 1) {
alan-bakerce179f12019-12-06 19:02:22 -05004940 SizesTypeID = TypeMap[Type::getInt32Ty(Context)];
4941 } else {
alan-baker7150a1d2020-02-25 08:31:06 -05004942 SizesTypeID =
4943 TypeMap[VectorType::get(Type::getInt32Ty(Context), components)];
alan-bakerce179f12019-12-06 19:02:22 -05004944 }
David Neto5c22a252018-03-15 16:07:41 -04004945 uint32_t ImageID = VMap[Image];
David Neto257c3892018-04-11 13:19:45 -04004946 Ops << MkId(SizesTypeID) << MkId(ImageID);
alan-bakerce179f12019-12-06 19:02:22 -05004947 spv::Op query_opcode = spv::OpImageQuerySize;
4948 if (clspv::IsSampledImageType(Image->getType())) {
4949 query_opcode = spv::OpImageQuerySizeLod;
4950 // Need explicit 0 for Lod operand.
4951 Constant *CstInt0 = ConstantInt::get(Context, APInt(32, 0));
4952 Ops << MkId(VMap[CstInt0]);
4953 }
David Neto5c22a252018-03-15 16:07:41 -04004954
4955 uint32_t SizesID = nextID++;
alan-bakerce179f12019-12-06 19:02:22 -05004956 auto *QueryInst = new SPIRVInstruction(query_opcode, SizesID, Ops);
David Neto5c22a252018-03-15 16:07:41 -04004957 SPIRVInstList.push_back(QueryInst);
4958
alan-bakerce179f12019-12-06 19:02:22 -05004959 // May require an extra instruction to create the appropriate result of
4960 // the builtin function.
4961 if (clspv::IsGetImageDim(Callee)) {
4962 if (dim == 3) {
4963 // get_image_dim returns an int4 for 3D images.
4964 //
4965 // Reset value map entry since we generated an intermediate
4966 // instruction.
4967 VMap[&I] = nextID;
David Neto5c22a252018-03-15 16:07:41 -04004968
alan-bakerce179f12019-12-06 19:02:22 -05004969 // Implement:
4970 // %result = OpCompositeConstruct %uint4 %sizes %uint_0
4971 Ops.clear();
4972 Ops << MkId(lookupType(VectorType::get(Type::getInt32Ty(Context), 4)))
4973 << MkId(SizesID);
David Neto5c22a252018-03-15 16:07:41 -04004974
alan-bakerce179f12019-12-06 19:02:22 -05004975 Constant *CstInt0 = ConstantInt::get(Context, APInt(32, 0));
4976 Ops << MkId(VMap[CstInt0]);
David Neto5c22a252018-03-15 16:07:41 -04004977
alan-bakerce179f12019-12-06 19:02:22 -05004978 auto *Inst =
4979 new SPIRVInstruction(spv::OpCompositeConstruct, nextID++, Ops);
4980 SPIRVInstList.push_back(Inst);
4981 } else if (dim != components) {
4982 // get_image_dim return an int2 regardless of the arrayedness of the
4983 // image. If the image is arrayed an element must be dropped from the
4984 // query result.
4985 //
4986 // Reset value map entry since we generated an intermediate
4987 // instruction.
4988 VMap[&I] = nextID;
4989
4990 // Implement:
4991 // %result = OpVectorShuffle %uint2 %sizes %sizes 0 1
4992 Ops.clear();
4993 Ops << MkId(lookupType(VectorType::get(Type::getInt32Ty(Context), 2)))
4994 << MkId(SizesID) << MkId(SizesID) << MkNum(0) << MkNum(1);
4995
4996 auto *Inst =
4997 new SPIRVInstruction(spv::OpVectorShuffle, nextID++, Ops);
4998 SPIRVInstList.push_back(Inst);
4999 }
5000 } else if (components > 1) {
5001 // Reset value map entry since we generated an intermediate instruction.
5002 VMap[&I] = nextID;
5003
5004 // Implement:
5005 // %result = OpCompositeExtract %uint %sizes <component number>
5006 Ops.clear();
5007 Ops << MkId(TypeMap[I.getType()]) << MkId(SizesID);
5008
5009 uint32_t component = 0;
5010 if (IsGetImageHeight(Callee))
5011 component = 1;
5012 else if (IsGetImageDepth(Callee))
5013 component = 2;
5014 Ops << MkNum(component);
5015
5016 auto *Inst =
5017 new SPIRVInstruction(spv::OpCompositeExtract, nextID++, Ops);
5018 SPIRVInstList.push_back(Inst);
5019 }
David Neto5c22a252018-03-15 16:07:41 -04005020 break;
5021 }
5022
David Neto22f144c2017-06-12 14:26:21 -04005023 // Call instrucion is deferred because it needs function's ID. Record
5024 // slot's location on SPIRVInstructionList.
5025 DeferredInsts.push_back(
5026 std::make_tuple(&I, --SPIRVInstList.end(), nextID++));
5027
David Neto3fbb4072017-10-16 11:28:14 -04005028 // Check whether the implementation of this call uses an extended
5029 // instruction plus one more value-producing instruction. If so, then
5030 // reserve the id for the extra value-producing slot.
5031 glsl::ExtInst EInst = getIndirectExtInstEnum(Callee->getName());
5032 if (EInst != kGlslExtInstBad) {
5033 // Reserve a spot for the extra value.
David Neto4d02a532017-09-17 12:57:44 -04005034 // Increase nextID.
David Neto22f144c2017-06-12 14:26:21 -04005035 VMap[&I] = nextID;
5036 nextID++;
5037 }
5038 break;
5039 }
5040 case Instruction::Ret: {
5041 unsigned NumOps = I.getNumOperands();
5042 if (NumOps == 0) {
5043 //
5044 // Generate OpReturn.
5045 //
David Netoef5ba2b2019-12-20 08:35:54 -05005046 SPIRVInstList.push_back(new SPIRVInstruction(spv::OpReturn));
David Neto22f144c2017-06-12 14:26:21 -04005047 } else {
5048 //
5049 // Generate OpReturnValue.
5050 //
5051
5052 // Ops[0] = Return Value ID
5053 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04005054
5055 Ops << MkId(VMap[I.getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04005056
David Neto87846742018-04-11 17:36:22 -04005057 auto *Inst = new SPIRVInstruction(spv::OpReturnValue, Ops);
David Neto22f144c2017-06-12 14:26:21 -04005058 SPIRVInstList.push_back(Inst);
5059 break;
5060 }
5061 break;
5062 }
5063 }
5064}
5065
5066void SPIRVProducerPass::GenerateFuncEpilogue() {
5067 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
5068
5069 //
5070 // Generate OpFunctionEnd
5071 //
5072
David Netoef5ba2b2019-12-20 08:35:54 -05005073 auto *Inst = new SPIRVInstruction(spv::OpFunctionEnd);
David Neto22f144c2017-06-12 14:26:21 -04005074 SPIRVInstList.push_back(Inst);
5075}
5076
5077bool SPIRVProducerPass::is4xi8vec(Type *Ty) const {
alan-bakerb39c8262019-03-08 14:03:37 -05005078 // Don't specialize <4 x i8> if i8 is generally supported.
5079 if (clspv::Option::Int8Support())
5080 return false;
5081
David Neto22f144c2017-06-12 14:26:21 -04005082 LLVMContext &Context = Ty->getContext();
5083 if (Ty->isVectorTy()) {
5084 if (Ty->getVectorElementType() == Type::getInt8Ty(Context) &&
5085 Ty->getVectorNumElements() == 4) {
5086 return true;
5087 }
5088 }
5089
5090 return false;
5091}
5092
5093void SPIRVProducerPass::HandleDeferredInstruction() {
5094 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
5095 ValueMapType &VMap = getValueMap();
5096 DeferredInstVecType &DeferredInsts = getDeferredInstVec();
5097
5098 for (auto DeferredInst = DeferredInsts.rbegin();
5099 DeferredInst != DeferredInsts.rend(); ++DeferredInst) {
5100 Value *Inst = std::get<0>(*DeferredInst);
5101 SPIRVInstructionList::iterator InsertPoint = ++std::get<1>(*DeferredInst);
5102 if (InsertPoint != SPIRVInstList.end()) {
5103 while ((*InsertPoint)->getOpcode() == spv::OpPhi) {
5104 ++InsertPoint;
5105 }
5106 }
5107
5108 if (BranchInst *Br = dyn_cast<BranchInst>(Inst)) {
alan-baker06cad652019-12-03 17:56:47 -05005109 // Check whether this branch needs to be preceeded by merge instruction.
David Neto22f144c2017-06-12 14:26:21 -04005110 BasicBlock *BrBB = Br->getParent();
alan-baker06cad652019-12-03 17:56:47 -05005111 if (ContinueBlocks.count(BrBB)) {
David Neto22f144c2017-06-12 14:26:21 -04005112 //
5113 // Generate OpLoopMerge.
5114 //
5115 // Ops[0] = Merge Block ID
5116 // Ops[1] = Continue Target ID
5117 // Ops[2] = Selection Control
5118 SPIRVOperandList Ops;
5119
alan-baker06cad652019-12-03 17:56:47 -05005120 auto MergeBB = MergeBlocks[BrBB];
5121 auto ContinueBB = ContinueBlocks[BrBB];
David Neto22f144c2017-06-12 14:26:21 -04005122 uint32_t MergeBBID = VMap[MergeBB];
David Neto22f144c2017-06-12 14:26:21 -04005123 uint32_t ContinueBBID = VMap[ContinueBB];
David Neto257c3892018-04-11 13:19:45 -04005124 Ops << MkId(MergeBBID) << MkId(ContinueBBID)
alan-baker06cad652019-12-03 17:56:47 -05005125 << MkNum(spv::LoopControlMaskNone);
David Neto22f144c2017-06-12 14:26:21 -04005126
David Neto87846742018-04-11 17:36:22 -04005127 auto *MergeInst = new SPIRVInstruction(spv::OpLoopMerge, Ops);
David Neto22f144c2017-06-12 14:26:21 -04005128 SPIRVInstList.insert(InsertPoint, MergeInst);
alan-baker06cad652019-12-03 17:56:47 -05005129 } else if (MergeBlocks.count(BrBB)) {
5130 //
5131 // Generate OpSelectionMerge.
5132 //
5133 // Ops[0] = Merge Block ID
5134 // Ops[1] = Selection Control
5135 SPIRVOperandList Ops;
David Neto22f144c2017-06-12 14:26:21 -04005136
alan-baker06cad652019-12-03 17:56:47 -05005137 auto MergeBB = MergeBlocks[BrBB];
5138 uint32_t MergeBBID = VMap[MergeBB];
5139 Ops << MkId(MergeBBID) << MkNum(spv::SelectionControlMaskNone);
David Neto22f144c2017-06-12 14:26:21 -04005140
alan-baker06cad652019-12-03 17:56:47 -05005141 auto *MergeInst = new SPIRVInstruction(spv::OpSelectionMerge, Ops);
5142 SPIRVInstList.insert(InsertPoint, MergeInst);
David Neto22f144c2017-06-12 14:26:21 -04005143 }
5144
5145 if (Br->isConditional()) {
5146 //
5147 // Generate OpBranchConditional.
5148 //
5149 // Ops[0] = Condition ID
5150 // Ops[1] = True Label ID
5151 // Ops[2] = False Label ID
5152 // Ops[3] ... Ops[n] = Branch weights (Literal Number)
5153 SPIRVOperandList Ops;
5154
5155 uint32_t CondID = VMap[Br->getCondition()];
David Neto22f144c2017-06-12 14:26:21 -04005156 uint32_t TrueBBID = VMap[Br->getSuccessor(0)];
David Neto22f144c2017-06-12 14:26:21 -04005157 uint32_t FalseBBID = VMap[Br->getSuccessor(1)];
David Neto257c3892018-04-11 13:19:45 -04005158
5159 Ops << MkId(CondID) << MkId(TrueBBID) << MkId(FalseBBID);
David Neto22f144c2017-06-12 14:26:21 -04005160
David Neto87846742018-04-11 17:36:22 -04005161 auto *BrInst = new SPIRVInstruction(spv::OpBranchConditional, Ops);
David Neto22f144c2017-06-12 14:26:21 -04005162 SPIRVInstList.insert(InsertPoint, BrInst);
5163 } else {
5164 //
5165 // Generate OpBranch.
5166 //
5167 // Ops[0] = Target Label ID
5168 SPIRVOperandList Ops;
5169
5170 uint32_t TargetID = VMap[Br->getSuccessor(0)];
David Neto257c3892018-04-11 13:19:45 -04005171 Ops << MkId(TargetID);
David Neto22f144c2017-06-12 14:26:21 -04005172
David Neto87846742018-04-11 17:36:22 -04005173 SPIRVInstList.insert(InsertPoint,
5174 new SPIRVInstruction(spv::OpBranch, Ops));
David Neto22f144c2017-06-12 14:26:21 -04005175 }
5176 } else if (PHINode *PHI = dyn_cast<PHINode>(Inst)) {
alan-baker5b86ed72019-02-15 08:26:50 -05005177 if (PHI->getType()->isPointerTy()) {
5178 // OpPhi on pointers requires variable pointers.
5179 setVariablePointersCapabilities(
5180 PHI->getType()->getPointerAddressSpace());
5181 if (!hasVariablePointers() && !selectFromSameObject(PHI)) {
5182 setVariablePointers(true);
5183 }
5184 }
5185
David Neto22f144c2017-06-12 14:26:21 -04005186 //
5187 // Generate OpPhi.
5188 //
5189 // Ops[0] = Result Type ID
5190 // Ops[1] ... Ops[n] = (Variable ID, Parent ID) pairs
5191 SPIRVOperandList Ops;
5192
David Neto257c3892018-04-11 13:19:45 -04005193 Ops << MkId(lookupType(PHI->getType()));
David Neto22f144c2017-06-12 14:26:21 -04005194
David Neto22f144c2017-06-12 14:26:21 -04005195 for (unsigned i = 0; i < PHI->getNumIncomingValues(); i++) {
5196 uint32_t VarID = VMap[PHI->getIncomingValue(i)];
David Neto22f144c2017-06-12 14:26:21 -04005197 uint32_t ParentID = VMap[PHI->getIncomingBlock(i)];
David Neto257c3892018-04-11 13:19:45 -04005198 Ops << MkId(VarID) << MkId(ParentID);
David Neto22f144c2017-06-12 14:26:21 -04005199 }
5200
5201 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005202 InsertPoint,
5203 new SPIRVInstruction(spv::OpPhi, std::get<2>(*DeferredInst), Ops));
David Neto22f144c2017-06-12 14:26:21 -04005204 } else if (CallInst *Call = dyn_cast<CallInst>(Inst)) {
5205 Function *Callee = Call->getCalledFunction();
David Neto3fbb4072017-10-16 11:28:14 -04005206 auto callee_name = Callee->getName();
5207 glsl::ExtInst EInst = getDirectOrIndirectExtInstEnum(callee_name);
David Neto22f144c2017-06-12 14:26:21 -04005208
5209 if (EInst) {
5210 uint32_t &ExtInstImportID = getOpExtInstImportID();
5211
5212 //
5213 // Generate OpExtInst.
5214 //
5215
5216 // Ops[0] = Result Type ID
5217 // Ops[1] = Set ID (OpExtInstImport ID)
5218 // Ops[2] = Instruction Number (Literal Number)
5219 // Ops[3] ... Ops[n] = Operand 1, ... , Operand n
5220 SPIRVOperandList Ops;
5221
David Neto862b7d82018-06-14 18:48:37 -04005222 Ops << MkId(lookupType(Call->getType())) << MkId(ExtInstImportID)
5223 << MkNum(EInst);
David Neto22f144c2017-06-12 14:26:21 -04005224
David Neto22f144c2017-06-12 14:26:21 -04005225 FunctionType *CalleeFTy = cast<FunctionType>(Call->getFunctionType());
5226 for (unsigned i = 0; i < CalleeFTy->getNumParams(); i++) {
David Neto257c3892018-04-11 13:19:45 -04005227 Ops << MkId(VMap[Call->getOperand(i)]);
David Neto22f144c2017-06-12 14:26:21 -04005228 }
5229
David Neto87846742018-04-11 17:36:22 -04005230 auto *ExtInst = new SPIRVInstruction(spv::OpExtInst,
5231 std::get<2>(*DeferredInst), Ops);
David Neto22f144c2017-06-12 14:26:21 -04005232 SPIRVInstList.insert(InsertPoint, ExtInst);
5233
David Neto3fbb4072017-10-16 11:28:14 -04005234 const auto IndirectExtInst = getIndirectExtInstEnum(callee_name);
5235 if (IndirectExtInst != kGlslExtInstBad) {
5236 // Generate one more instruction that uses the result of the extended
5237 // instruction. Its result id is one more than the id of the
5238 // extended instruction.
David Neto22f144c2017-06-12 14:26:21 -04005239 LLVMContext &Context =
5240 Call->getParent()->getParent()->getParent()->getContext();
David Neto22f144c2017-06-12 14:26:21 -04005241
David Neto3fbb4072017-10-16 11:28:14 -04005242 auto generate_extra_inst = [this, &Context, &Call, &DeferredInst,
5243 &VMap, &SPIRVInstList, &InsertPoint](
5244 spv::Op opcode, Constant *constant) {
5245 //
5246 // Generate instruction like:
5247 // result = opcode constant <extinst-result>
5248 //
5249 // Ops[0] = Result Type ID
5250 // Ops[1] = Operand 0 ;; the constant, suitably splatted
5251 // Ops[2] = Operand 1 ;; the result of the extended instruction
5252 SPIRVOperandList Ops;
David Neto22f144c2017-06-12 14:26:21 -04005253
David Neto3fbb4072017-10-16 11:28:14 -04005254 Type *resultTy = Call->getType();
David Neto257c3892018-04-11 13:19:45 -04005255 Ops << MkId(lookupType(resultTy));
David Neto3fbb4072017-10-16 11:28:14 -04005256
5257 if (auto *vectorTy = dyn_cast<VectorType>(resultTy)) {
5258 constant = ConstantVector::getSplat(
5259 static_cast<unsigned>(vectorTy->getNumElements()), constant);
5260 }
David Neto257c3892018-04-11 13:19:45 -04005261 Ops << MkId(VMap[constant]) << MkId(std::get<2>(*DeferredInst));
David Neto3fbb4072017-10-16 11:28:14 -04005262
5263 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005264 InsertPoint, new SPIRVInstruction(
5265 opcode, std::get<2>(*DeferredInst) + 1, Ops));
David Neto3fbb4072017-10-16 11:28:14 -04005266 };
5267
5268 switch (IndirectExtInst) {
5269 case glsl::ExtInstFindUMsb: // Implementing clz
5270 generate_extra_inst(
5271 spv::OpISub, ConstantInt::get(Type::getInt32Ty(Context), 31));
5272 break;
5273 case glsl::ExtInstAcos: // Implementing acospi
5274 case glsl::ExtInstAsin: // Implementing asinpi
Kévin Petiteb9f90a2018-09-29 12:29:34 +01005275 case glsl::ExtInstAtan: // Implementing atanpi
David Neto3fbb4072017-10-16 11:28:14 -04005276 case glsl::ExtInstAtan2: // Implementing atan2pi
5277 generate_extra_inst(
5278 spv::OpFMul,
5279 ConstantFP::get(Type::getFloatTy(Context), kOneOverPi));
5280 break;
5281
5282 default:
5283 assert(false && "internally inconsistent");
David Neto4d02a532017-09-17 12:57:44 -04005284 }
David Neto22f144c2017-06-12 14:26:21 -04005285 }
David Neto3fbb4072017-10-16 11:28:14 -04005286
alan-bakerb39c8262019-03-08 14:03:37 -05005287 } else if (callee_name.startswith("_Z8popcount")) {
David Neto22f144c2017-06-12 14:26:21 -04005288 //
5289 // Generate OpBitCount
5290 //
5291 // Ops[0] = Result Type ID
5292 // Ops[1] = Base ID
David Neto257c3892018-04-11 13:19:45 -04005293 SPIRVOperandList Ops;
5294 Ops << MkId(lookupType(Call->getType()))
5295 << MkId(VMap[Call->getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04005296
5297 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005298 InsertPoint, new SPIRVInstruction(spv::OpBitCount,
David Neto22f144c2017-06-12 14:26:21 -04005299 std::get<2>(*DeferredInst), Ops));
David Netoab03f432017-11-03 17:00:44 -04005300
David Neto862b7d82018-06-14 18:48:37 -04005301 } else if (callee_name.startswith(kCompositeConstructFunctionPrefix)) {
David Netoab03f432017-11-03 17:00:44 -04005302
5303 // Generate an OpCompositeConstruct
5304 SPIRVOperandList Ops;
5305
5306 // The result type.
David Neto257c3892018-04-11 13:19:45 -04005307 Ops << MkId(lookupType(Call->getType()));
David Netoab03f432017-11-03 17:00:44 -04005308
5309 for (Use &use : Call->arg_operands()) {
David Neto257c3892018-04-11 13:19:45 -04005310 Ops << MkId(VMap[use.get()]);
David Netoab03f432017-11-03 17:00:44 -04005311 }
5312
5313 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005314 InsertPoint, new SPIRVInstruction(spv::OpCompositeConstruct,
5315 std::get<2>(*DeferredInst), Ops));
David Netoab03f432017-11-03 17:00:44 -04005316
Alan Baker202c8c72018-08-13 13:47:44 -04005317 } else if (callee_name.startswith(clspv::ResourceAccessorFunction())) {
5318
5319 // We have already mapped the call's result value to an ID.
5320 // Don't generate any code now.
5321
5322 } else if (callee_name.startswith(clspv::WorkgroupAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04005323
5324 // We have already mapped the call's result value to an ID.
5325 // Don't generate any code now.
5326
David Neto22f144c2017-06-12 14:26:21 -04005327 } else {
alan-baker5b86ed72019-02-15 08:26:50 -05005328 if (Call->getType()->isPointerTy()) {
5329 // Functions returning pointers require variable pointers.
5330 setVariablePointersCapabilities(
5331 Call->getType()->getPointerAddressSpace());
5332 }
5333
David Neto22f144c2017-06-12 14:26:21 -04005334 //
5335 // Generate OpFunctionCall.
5336 //
5337
5338 // Ops[0] = Result Type ID
5339 // Ops[1] = Callee Function ID
5340 // Ops[2] ... Ops[n] = Argument 0, ... , Argument n
5341 SPIRVOperandList Ops;
5342
David Neto862b7d82018-06-14 18:48:37 -04005343 Ops << MkId(lookupType(Call->getType()));
David Neto22f144c2017-06-12 14:26:21 -04005344
5345 uint32_t CalleeID = VMap[Callee];
David Neto43568eb2017-10-13 18:25:25 -04005346 if (CalleeID == 0) {
5347 errs() << "Can't translate function call. Missing builtin? "
David Neto862b7d82018-06-14 18:48:37 -04005348 << callee_name << " in: " << *Call << "\n";
David Neto43568eb2017-10-13 18:25:25 -04005349 // TODO(dneto): Can we error out? Enabling this llvm_unreachable
5350 // causes an infinite loop. Instead, go ahead and generate
5351 // the bad function call. A validator will catch the 0-Id.
5352 // llvm_unreachable("Can't translate function call");
5353 }
David Neto22f144c2017-06-12 14:26:21 -04005354
David Neto257c3892018-04-11 13:19:45 -04005355 Ops << MkId(CalleeID);
David Neto22f144c2017-06-12 14:26:21 -04005356
David Neto22f144c2017-06-12 14:26:21 -04005357 FunctionType *CalleeFTy = cast<FunctionType>(Call->getFunctionType());
5358 for (unsigned i = 0; i < CalleeFTy->getNumParams(); i++) {
alan-baker5b86ed72019-02-15 08:26:50 -05005359 auto *operand = Call->getOperand(i);
alan-bakerd4d50652019-12-03 17:17:15 -05005360 auto *operand_type = operand->getType();
5361 // Images and samplers can be passed as function parameters without
5362 // variable pointers.
5363 if (operand_type->isPointerTy() && !IsImageType(operand_type) &&
5364 !IsSamplerType(operand_type)) {
alan-baker5b86ed72019-02-15 08:26:50 -05005365 auto sc =
5366 GetStorageClass(operand->getType()->getPointerAddressSpace());
5367 if (sc == spv::StorageClassStorageBuffer) {
5368 // Passing SSBO by reference requires variable pointers storage
5369 // buffer.
5370 setVariablePointersStorageBuffer(true);
5371 } else if (sc == spv::StorageClassWorkgroup) {
5372 // Workgroup references require variable pointers if they are not
5373 // memory object declarations.
5374 if (auto *operand_call = dyn_cast<CallInst>(operand)) {
5375 // Workgroup accessor represents a variable reference.
5376 if (!operand_call->getCalledFunction()->getName().startswith(
5377 clspv::WorkgroupAccessorFunction()))
5378 setVariablePointers(true);
5379 } else {
5380 // Arguments are function parameters.
5381 if (!isa<Argument>(operand))
5382 setVariablePointers(true);
5383 }
5384 }
5385 }
5386 Ops << MkId(VMap[operand]);
David Neto22f144c2017-06-12 14:26:21 -04005387 }
5388
David Neto87846742018-04-11 17:36:22 -04005389 auto *CallInst = new SPIRVInstruction(spv::OpFunctionCall,
5390 std::get<2>(*DeferredInst), Ops);
David Neto22f144c2017-06-12 14:26:21 -04005391 SPIRVInstList.insert(InsertPoint, CallInst);
5392 }
5393 }
5394 }
5395}
5396
David Neto1a1a0582017-07-07 12:01:44 -04005397void SPIRVProducerPass::HandleDeferredDecorations(const DataLayout &DL) {
Alan Baker202c8c72018-08-13 13:47:44 -04005398 if (getTypesNeedingArrayStride().empty() && LocalArgSpecIds.empty()) {
David Neto1a1a0582017-07-07 12:01:44 -04005399 return;
David Netoc6f3ab22018-04-06 18:02:31 -04005400 }
David Neto1a1a0582017-07-07 12:01:44 -04005401
5402 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
David Neto1a1a0582017-07-07 12:01:44 -04005403
5404 // Find an iterator pointing just past the last decoration.
5405 bool seen_decorations = false;
5406 auto DecoInsertPoint =
5407 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
5408 [&seen_decorations](SPIRVInstruction *Inst) -> bool {
5409 const bool is_decoration =
5410 Inst->getOpcode() == spv::OpDecorate ||
5411 Inst->getOpcode() == spv::OpMemberDecorate;
5412 if (is_decoration) {
5413 seen_decorations = true;
5414 return false;
5415 } else {
5416 return seen_decorations;
5417 }
5418 });
5419
David Netoc6f3ab22018-04-06 18:02:31 -04005420 // Insert ArrayStride decorations on pointer types, due to OpPtrAccessChain
5421 // instructions we generated earlier.
David Neto85082642018-03-24 06:55:20 -07005422 for (auto *type : getTypesNeedingArrayStride()) {
5423 Type *elemTy = nullptr;
5424 if (auto *ptrTy = dyn_cast<PointerType>(type)) {
5425 elemTy = ptrTy->getElementType();
alan-bakerb6b09dc2018-11-08 16:59:28 -05005426 } else if (auto *arrayTy = dyn_cast<ArrayType>(type)) {
David Neto85082642018-03-24 06:55:20 -07005427 elemTy = arrayTy->getArrayElementType();
alan-bakerb6b09dc2018-11-08 16:59:28 -05005428 } else if (auto *seqTy = dyn_cast<SequentialType>(type)) {
David Neto85082642018-03-24 06:55:20 -07005429 elemTy = seqTy->getSequentialElementType();
5430 } else {
5431 errs() << "Unhandled strided type " << *type << "\n";
5432 llvm_unreachable("Unhandled strided type");
5433 }
David Neto1a1a0582017-07-07 12:01:44 -04005434
5435 // Ops[0] = Target ID
5436 // Ops[1] = Decoration (ArrayStride)
5437 // Ops[2] = Stride number (Literal Number)
5438 SPIRVOperandList Ops;
5439
David Neto85082642018-03-24 06:55:20 -07005440 // Same as DL.getIndexedOffsetInType( elemTy, { 1 } );
Alan Bakerfcda9482018-10-02 17:09:59 -04005441 const uint32_t stride = static_cast<uint32_t>(GetTypeAllocSize(elemTy, DL));
David Neto257c3892018-04-11 13:19:45 -04005442
5443 Ops << MkId(lookupType(type)) << MkNum(spv::DecorationArrayStride)
5444 << MkNum(stride);
David Neto1a1a0582017-07-07 12:01:44 -04005445
David Neto87846742018-04-11 17:36:22 -04005446 auto *DecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
David Neto1a1a0582017-07-07 12:01:44 -04005447 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
5448 }
David Netoc6f3ab22018-04-06 18:02:31 -04005449
5450 // Emit SpecId decorations targeting the array size value.
Alan Baker202c8c72018-08-13 13:47:44 -04005451 for (auto spec_id = clspv::FirstLocalSpecId(); spec_id < max_local_spec_id_;
5452 ++spec_id) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05005453 LocalArgInfo &arg_info = LocalSpecIdInfoMap[spec_id];
David Netoc6f3ab22018-04-06 18:02:31 -04005454 SPIRVOperandList Ops;
5455 Ops << MkId(arg_info.array_size_id) << MkNum(spv::DecorationSpecId)
5456 << MkNum(arg_info.spec_id);
5457 SPIRVInstList.insert(DecoInsertPoint,
David Neto87846742018-04-11 17:36:22 -04005458 new SPIRVInstruction(spv::OpDecorate, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04005459 }
David Neto1a1a0582017-07-07 12:01:44 -04005460}
5461
David Neto22f144c2017-06-12 14:26:21 -04005462glsl::ExtInst SPIRVProducerPass::getExtInstEnum(StringRef Name) {
5463 return StringSwitch<glsl::ExtInst>(Name)
alan-bakerb39c8262019-03-08 14:03:37 -05005464 .Case("_Z3absc", glsl::ExtInst::ExtInstSAbs)
5465 .Case("_Z3absDv2_c", glsl::ExtInst::ExtInstSAbs)
5466 .Case("_Z3absDv3_c", glsl::ExtInst::ExtInstSAbs)
5467 .Case("_Z3absDv4_c", glsl::ExtInst::ExtInstSAbs)
Kévin Petit2444e9b2018-11-09 14:14:37 +00005468 .Case("_Z3abss", glsl::ExtInst::ExtInstSAbs)
5469 .Case("_Z3absDv2_s", glsl::ExtInst::ExtInstSAbs)
5470 .Case("_Z3absDv3_s", glsl::ExtInst::ExtInstSAbs)
5471 .Case("_Z3absDv4_s", glsl::ExtInst::ExtInstSAbs)
David Neto22f144c2017-06-12 14:26:21 -04005472 .Case("_Z3absi", glsl::ExtInst::ExtInstSAbs)
5473 .Case("_Z3absDv2_i", glsl::ExtInst::ExtInstSAbs)
5474 .Case("_Z3absDv3_i", glsl::ExtInst::ExtInstSAbs)
5475 .Case("_Z3absDv4_i", glsl::ExtInst::ExtInstSAbs)
Kévin Petit2444e9b2018-11-09 14:14:37 +00005476 .Case("_Z3absl", glsl::ExtInst::ExtInstSAbs)
5477 .Case("_Z3absDv2_l", glsl::ExtInst::ExtInstSAbs)
5478 .Case("_Z3absDv3_l", glsl::ExtInst::ExtInstSAbs)
5479 .Case("_Z3absDv4_l", glsl::ExtInst::ExtInstSAbs)
alan-bakerb39c8262019-03-08 14:03:37 -05005480 .Case("_Z5clampccc", glsl::ExtInst::ExtInstSClamp)
5481 .Case("_Z5clampDv2_cS_S_", glsl::ExtInst::ExtInstSClamp)
5482 .Case("_Z5clampDv3_cS_S_", glsl::ExtInst::ExtInstSClamp)
5483 .Case("_Z5clampDv4_cS_S_", glsl::ExtInst::ExtInstSClamp)
5484 .Case("_Z5clamphhh", glsl::ExtInst::ExtInstUClamp)
5485 .Case("_Z5clampDv2_hS_S_", glsl::ExtInst::ExtInstUClamp)
5486 .Case("_Z5clampDv3_hS_S_", glsl::ExtInst::ExtInstUClamp)
5487 .Case("_Z5clampDv4_hS_S_", glsl::ExtInst::ExtInstUClamp)
Kévin Petit495255d2019-03-06 13:56:48 +00005488 .Case("_Z5clampsss", glsl::ExtInst::ExtInstSClamp)
5489 .Case("_Z5clampDv2_sS_S_", glsl::ExtInst::ExtInstSClamp)
5490 .Case("_Z5clampDv3_sS_S_", glsl::ExtInst::ExtInstSClamp)
5491 .Case("_Z5clampDv4_sS_S_", glsl::ExtInst::ExtInstSClamp)
5492 .Case("_Z5clampttt", glsl::ExtInst::ExtInstUClamp)
5493 .Case("_Z5clampDv2_tS_S_", glsl::ExtInst::ExtInstUClamp)
5494 .Case("_Z5clampDv3_tS_S_", glsl::ExtInst::ExtInstUClamp)
5495 .Case("_Z5clampDv4_tS_S_", glsl::ExtInst::ExtInstUClamp)
David Neto22f144c2017-06-12 14:26:21 -04005496 .Case("_Z5clampiii", glsl::ExtInst::ExtInstSClamp)
5497 .Case("_Z5clampDv2_iS_S_", glsl::ExtInst::ExtInstSClamp)
5498 .Case("_Z5clampDv3_iS_S_", glsl::ExtInst::ExtInstSClamp)
5499 .Case("_Z5clampDv4_iS_S_", glsl::ExtInst::ExtInstSClamp)
5500 .Case("_Z5clampjjj", glsl::ExtInst::ExtInstUClamp)
5501 .Case("_Z5clampDv2_jS_S_", glsl::ExtInst::ExtInstUClamp)
5502 .Case("_Z5clampDv3_jS_S_", glsl::ExtInst::ExtInstUClamp)
5503 .Case("_Z5clampDv4_jS_S_", glsl::ExtInst::ExtInstUClamp)
Kévin Petit495255d2019-03-06 13:56:48 +00005504 .Case("_Z5clamplll", glsl::ExtInst::ExtInstSClamp)
5505 .Case("_Z5clampDv2_lS_S_", glsl::ExtInst::ExtInstSClamp)
5506 .Case("_Z5clampDv3_lS_S_", glsl::ExtInst::ExtInstSClamp)
5507 .Case("_Z5clampDv4_lS_S_", glsl::ExtInst::ExtInstSClamp)
5508 .Case("_Z5clampmmm", glsl::ExtInst::ExtInstUClamp)
5509 .Case("_Z5clampDv2_mS_S_", glsl::ExtInst::ExtInstUClamp)
5510 .Case("_Z5clampDv3_mS_S_", glsl::ExtInst::ExtInstUClamp)
5511 .Case("_Z5clampDv4_mS_S_", glsl::ExtInst::ExtInstUClamp)
David Neto22f144c2017-06-12 14:26:21 -04005512 .Case("_Z5clampfff", glsl::ExtInst::ExtInstFClamp)
5513 .Case("_Z5clampDv2_fS_S_", glsl::ExtInst::ExtInstFClamp)
5514 .Case("_Z5clampDv3_fS_S_", glsl::ExtInst::ExtInstFClamp)
5515 .Case("_Z5clampDv4_fS_S_", glsl::ExtInst::ExtInstFClamp)
alan-baker49bb5fb2020-01-15 08:22:13 -05005516 .Case("_Z5clampDhDhDh", glsl::ExtInst::ExtInstFClamp)
5517 .Case("_Z5clampDv2_DhS_S_", glsl::ExtInst::ExtInstFClamp)
5518 .Case("_Z5clampDv3_DhS_S_", glsl::ExtInst::ExtInstFClamp)
5519 .Case("_Z5clampDv4_DhS_S_", glsl::ExtInst::ExtInstFClamp)
alan-bakerb39c8262019-03-08 14:03:37 -05005520 .Case("_Z3maxcc", glsl::ExtInst::ExtInstSMax)
5521 .Case("_Z3maxDv2_cS_", glsl::ExtInst::ExtInstSMax)
5522 .Case("_Z3maxDv3_cS_", glsl::ExtInst::ExtInstSMax)
5523 .Case("_Z3maxDv4_cS_", glsl::ExtInst::ExtInstSMax)
5524 .Case("_Z3maxhh", glsl::ExtInst::ExtInstUMax)
5525 .Case("_Z3maxDv2_hS_", glsl::ExtInst::ExtInstUMax)
5526 .Case("_Z3maxDv3_hS_", glsl::ExtInst::ExtInstUMax)
5527 .Case("_Z3maxDv4_hS_", glsl::ExtInst::ExtInstUMax)
Kévin Petit9e1971c2018-11-09 14:17:18 +00005528 .Case("_Z3maxss", glsl::ExtInst::ExtInstSMax)
5529 .Case("_Z3maxDv2_sS_", glsl::ExtInst::ExtInstSMax)
5530 .Case("_Z3maxDv3_sS_", glsl::ExtInst::ExtInstSMax)
5531 .Case("_Z3maxDv4_sS_", glsl::ExtInst::ExtInstSMax)
5532 .Case("_Z3maxtt", glsl::ExtInst::ExtInstUMax)
5533 .Case("_Z3maxDv2_tS_", glsl::ExtInst::ExtInstUMax)
5534 .Case("_Z3maxDv3_tS_", glsl::ExtInst::ExtInstUMax)
5535 .Case("_Z3maxDv4_tS_", glsl::ExtInst::ExtInstUMax)
David Neto22f144c2017-06-12 14:26:21 -04005536 .Case("_Z3maxii", glsl::ExtInst::ExtInstSMax)
5537 .Case("_Z3maxDv2_iS_", glsl::ExtInst::ExtInstSMax)
5538 .Case("_Z3maxDv3_iS_", glsl::ExtInst::ExtInstSMax)
5539 .Case("_Z3maxDv4_iS_", glsl::ExtInst::ExtInstSMax)
5540 .Case("_Z3maxjj", glsl::ExtInst::ExtInstUMax)
5541 .Case("_Z3maxDv2_jS_", glsl::ExtInst::ExtInstUMax)
5542 .Case("_Z3maxDv3_jS_", glsl::ExtInst::ExtInstUMax)
5543 .Case("_Z3maxDv4_jS_", glsl::ExtInst::ExtInstUMax)
Kévin Petit9e1971c2018-11-09 14:17:18 +00005544 .Case("_Z3maxll", glsl::ExtInst::ExtInstSMax)
5545 .Case("_Z3maxDv2_lS_", glsl::ExtInst::ExtInstSMax)
5546 .Case("_Z3maxDv3_lS_", glsl::ExtInst::ExtInstSMax)
5547 .Case("_Z3maxDv4_lS_", glsl::ExtInst::ExtInstSMax)
5548 .Case("_Z3maxmm", glsl::ExtInst::ExtInstUMax)
5549 .Case("_Z3maxDv2_mS_", glsl::ExtInst::ExtInstUMax)
5550 .Case("_Z3maxDv3_mS_", glsl::ExtInst::ExtInstUMax)
5551 .Case("_Z3maxDv4_mS_", glsl::ExtInst::ExtInstUMax)
David Neto22f144c2017-06-12 14:26:21 -04005552 .Case("_Z3maxff", glsl::ExtInst::ExtInstFMax)
5553 .Case("_Z3maxDv2_fS_", glsl::ExtInst::ExtInstFMax)
5554 .Case("_Z3maxDv3_fS_", glsl::ExtInst::ExtInstFMax)
5555 .Case("_Z3maxDv4_fS_", glsl::ExtInst::ExtInstFMax)
alan-baker49bb5fb2020-01-15 08:22:13 -05005556 .Case("_Z3maxDhDh", glsl::ExtInst::ExtInstFMax)
5557 .Case("_Z3maxDv2_DhS_", glsl::ExtInst::ExtInstFMax)
5558 .Case("_Z3maxDv3_DhS_", glsl::ExtInst::ExtInstFMax)
5559 .Case("_Z3maxDv4_DhS_", glsl::ExtInst::ExtInstFMax)
David Neto22f144c2017-06-12 14:26:21 -04005560 .StartsWith("_Z4fmax", glsl::ExtInst::ExtInstFMax)
alan-bakerb39c8262019-03-08 14:03:37 -05005561 .Case("_Z3mincc", glsl::ExtInst::ExtInstSMin)
5562 .Case("_Z3minDv2_cS_", glsl::ExtInst::ExtInstSMin)
5563 .Case("_Z3minDv3_cS_", glsl::ExtInst::ExtInstSMin)
5564 .Case("_Z3minDv4_cS_", glsl::ExtInst::ExtInstSMin)
5565 .Case("_Z3minhh", glsl::ExtInst::ExtInstUMin)
5566 .Case("_Z3minDv2_hS_", glsl::ExtInst::ExtInstUMin)
5567 .Case("_Z3minDv3_hS_", glsl::ExtInst::ExtInstUMin)
5568 .Case("_Z3minDv4_hS_", glsl::ExtInst::ExtInstUMin)
Kévin Petit9e1971c2018-11-09 14:17:18 +00005569 .Case("_Z3minss", glsl::ExtInst::ExtInstSMin)
5570 .Case("_Z3minDv2_sS_", glsl::ExtInst::ExtInstSMin)
5571 .Case("_Z3minDv3_sS_", glsl::ExtInst::ExtInstSMin)
5572 .Case("_Z3minDv4_sS_", glsl::ExtInst::ExtInstSMin)
5573 .Case("_Z3mintt", glsl::ExtInst::ExtInstUMin)
5574 .Case("_Z3minDv2_tS_", glsl::ExtInst::ExtInstUMin)
5575 .Case("_Z3minDv3_tS_", glsl::ExtInst::ExtInstUMin)
5576 .Case("_Z3minDv4_tS_", glsl::ExtInst::ExtInstUMin)
David Neto22f144c2017-06-12 14:26:21 -04005577 .Case("_Z3minii", glsl::ExtInst::ExtInstSMin)
5578 .Case("_Z3minDv2_iS_", glsl::ExtInst::ExtInstSMin)
5579 .Case("_Z3minDv3_iS_", glsl::ExtInst::ExtInstSMin)
5580 .Case("_Z3minDv4_iS_", glsl::ExtInst::ExtInstSMin)
5581 .Case("_Z3minjj", glsl::ExtInst::ExtInstUMin)
5582 .Case("_Z3minDv2_jS_", glsl::ExtInst::ExtInstUMin)
5583 .Case("_Z3minDv3_jS_", glsl::ExtInst::ExtInstUMin)
5584 .Case("_Z3minDv4_jS_", glsl::ExtInst::ExtInstUMin)
Kévin Petit9e1971c2018-11-09 14:17:18 +00005585 .Case("_Z3minll", glsl::ExtInst::ExtInstSMin)
5586 .Case("_Z3minDv2_lS_", glsl::ExtInst::ExtInstSMin)
5587 .Case("_Z3minDv3_lS_", glsl::ExtInst::ExtInstSMin)
5588 .Case("_Z3minDv4_lS_", glsl::ExtInst::ExtInstSMin)
5589 .Case("_Z3minmm", glsl::ExtInst::ExtInstUMin)
5590 .Case("_Z3minDv2_mS_", glsl::ExtInst::ExtInstUMin)
5591 .Case("_Z3minDv3_mS_", glsl::ExtInst::ExtInstUMin)
5592 .Case("_Z3minDv4_mS_", glsl::ExtInst::ExtInstUMin)
David Neto22f144c2017-06-12 14:26:21 -04005593 .Case("_Z3minff", glsl::ExtInst::ExtInstFMin)
5594 .Case("_Z3minDv2_fS_", glsl::ExtInst::ExtInstFMin)
5595 .Case("_Z3minDv3_fS_", glsl::ExtInst::ExtInstFMin)
5596 .Case("_Z3minDv4_fS_", glsl::ExtInst::ExtInstFMin)
alan-baker49bb5fb2020-01-15 08:22:13 -05005597 .Case("_Z3minDhDh", glsl::ExtInst::ExtInstFMin)
5598 .Case("_Z3minDv2_DhS_", glsl::ExtInst::ExtInstFMin)
5599 .Case("_Z3minDv3_DhS_", glsl::ExtInst::ExtInstFMin)
5600 .Case("_Z3minDv4_DhS_", glsl::ExtInst::ExtInstFMin)
David Neto22f144c2017-06-12 14:26:21 -04005601 .StartsWith("_Z4fmin", glsl::ExtInst::ExtInstFMin)
5602 .StartsWith("_Z7degrees", glsl::ExtInst::ExtInstDegrees)
5603 .StartsWith("_Z7radians", glsl::ExtInst::ExtInstRadians)
5604 .StartsWith("_Z3mix", glsl::ExtInst::ExtInstFMix)
5605 .StartsWith("_Z4acos", glsl::ExtInst::ExtInstAcos)
5606 .StartsWith("_Z5acosh", glsl::ExtInst::ExtInstAcosh)
5607 .StartsWith("_Z4asin", glsl::ExtInst::ExtInstAsin)
5608 .StartsWith("_Z5asinh", glsl::ExtInst::ExtInstAsinh)
5609 .StartsWith("_Z4atan", glsl::ExtInst::ExtInstAtan)
5610 .StartsWith("_Z5atan2", glsl::ExtInst::ExtInstAtan2)
5611 .StartsWith("_Z5atanh", glsl::ExtInst::ExtInstAtanh)
5612 .StartsWith("_Z4ceil", glsl::ExtInst::ExtInstCeil)
5613 .StartsWith("_Z3sin", glsl::ExtInst::ExtInstSin)
5614 .StartsWith("_Z4sinh", glsl::ExtInst::ExtInstSinh)
5615 .StartsWith("_Z8half_sin", glsl::ExtInst::ExtInstSin)
5616 .StartsWith("_Z10native_sin", glsl::ExtInst::ExtInstSin)
5617 .StartsWith("_Z3cos", glsl::ExtInst::ExtInstCos)
5618 .StartsWith("_Z4cosh", glsl::ExtInst::ExtInstCosh)
5619 .StartsWith("_Z8half_cos", glsl::ExtInst::ExtInstCos)
5620 .StartsWith("_Z10native_cos", glsl::ExtInst::ExtInstCos)
5621 .StartsWith("_Z3tan", glsl::ExtInst::ExtInstTan)
5622 .StartsWith("_Z4tanh", glsl::ExtInst::ExtInstTanh)
5623 .StartsWith("_Z8half_tan", glsl::ExtInst::ExtInstTan)
5624 .StartsWith("_Z10native_tan", glsl::ExtInst::ExtInstTan)
5625 .StartsWith("_Z3exp", glsl::ExtInst::ExtInstExp)
5626 .StartsWith("_Z8half_exp", glsl::ExtInst::ExtInstExp)
5627 .StartsWith("_Z10native_exp", glsl::ExtInst::ExtInstExp)
5628 .StartsWith("_Z4exp2", glsl::ExtInst::ExtInstExp2)
5629 .StartsWith("_Z9half_exp2", glsl::ExtInst::ExtInstExp2)
5630 .StartsWith("_Z11native_exp2", glsl::ExtInst::ExtInstExp2)
5631 .StartsWith("_Z3log", glsl::ExtInst::ExtInstLog)
5632 .StartsWith("_Z8half_log", glsl::ExtInst::ExtInstLog)
5633 .StartsWith("_Z10native_log", glsl::ExtInst::ExtInstLog)
5634 .StartsWith("_Z4log2", glsl::ExtInst::ExtInstLog2)
5635 .StartsWith("_Z9half_log2", glsl::ExtInst::ExtInstLog2)
5636 .StartsWith("_Z11native_log2", glsl::ExtInst::ExtInstLog2)
5637 .StartsWith("_Z4fabs", glsl::ExtInst::ExtInstFAbs)
kpet3458e942018-10-03 14:35:21 +01005638 .StartsWith("_Z3fma", glsl::ExtInst::ExtInstFma)
David Neto22f144c2017-06-12 14:26:21 -04005639 .StartsWith("_Z5floor", glsl::ExtInst::ExtInstFloor)
5640 .StartsWith("_Z5ldexp", glsl::ExtInst::ExtInstLdexp)
5641 .StartsWith("_Z3pow", glsl::ExtInst::ExtInstPow)
5642 .StartsWith("_Z4powr", glsl::ExtInst::ExtInstPow)
5643 .StartsWith("_Z9half_powr", glsl::ExtInst::ExtInstPow)
5644 .StartsWith("_Z11native_powr", glsl::ExtInst::ExtInstPow)
5645 .StartsWith("_Z5round", glsl::ExtInst::ExtInstRound)
5646 .StartsWith("_Z4sqrt", glsl::ExtInst::ExtInstSqrt)
5647 .StartsWith("_Z9half_sqrt", glsl::ExtInst::ExtInstSqrt)
5648 .StartsWith("_Z11native_sqrt", glsl::ExtInst::ExtInstSqrt)
5649 .StartsWith("_Z5rsqrt", glsl::ExtInst::ExtInstInverseSqrt)
5650 .StartsWith("_Z10half_rsqrt", glsl::ExtInst::ExtInstInverseSqrt)
5651 .StartsWith("_Z12native_rsqrt", glsl::ExtInst::ExtInstInverseSqrt)
5652 .StartsWith("_Z5trunc", glsl::ExtInst::ExtInstTrunc)
5653 .StartsWith("_Z5frexp", glsl::ExtInst::ExtInstFrexp)
5654 .StartsWith("_Z4sign", glsl::ExtInst::ExtInstFSign)
5655 .StartsWith("_Z6length", glsl::ExtInst::ExtInstLength)
Kévin Petit7d09cec2018-09-22 15:43:38 +01005656 .StartsWith("_Z11fast_length", glsl::ExtInst::ExtInstLength)
David Neto22f144c2017-06-12 14:26:21 -04005657 .StartsWith("_Z8distance", glsl::ExtInst::ExtInstDistance)
Kévin Petit7d09cec2018-09-22 15:43:38 +01005658 .StartsWith("_Z13fast_distance", glsl::ExtInst::ExtInstDistance)
David Netoe9a03512017-10-16 10:08:27 -04005659 .StartsWith("_Z4step", glsl::ExtInst::ExtInstStep)
kpet6fd2a262018-10-03 14:48:01 +01005660 .StartsWith("_Z10smoothstep", glsl::ExtInst::ExtInstSmoothStep)
David Neto22f144c2017-06-12 14:26:21 -04005661 .Case("_Z5crossDv3_fS_", glsl::ExtInst::ExtInstCross)
5662 .StartsWith("_Z9normalize", glsl::ExtInst::ExtInstNormalize)
Kévin Petit7d09cec2018-09-22 15:43:38 +01005663 .StartsWith("_Z14fast_normalize", glsl::ExtInst::ExtInstNormalize)
David Neto22f144c2017-06-12 14:26:21 -04005664 .StartsWith("llvm.fmuladd.", glsl::ExtInst::ExtInstFma)
5665 .Case("spirv.unpack.v2f16", glsl::ExtInst::ExtInstUnpackHalf2x16)
5666 .Case("spirv.pack.v2f16", glsl::ExtInst::ExtInstPackHalf2x16)
David Neto62653202017-10-16 19:05:18 -04005667 .Case("clspv.fract.f", glsl::ExtInst::ExtInstFract)
5668 .Case("clspv.fract.v2f", glsl::ExtInst::ExtInstFract)
5669 .Case("clspv.fract.v3f", glsl::ExtInst::ExtInstFract)
5670 .Case("clspv.fract.v4f", glsl::ExtInst::ExtInstFract)
David Neto3fbb4072017-10-16 11:28:14 -04005671 .Default(kGlslExtInstBad);
5672}
5673
5674glsl::ExtInst SPIRVProducerPass::getIndirectExtInstEnum(StringRef Name) {
5675 // Check indirect cases.
5676 return StringSwitch<glsl::ExtInst>(Name)
5677 .StartsWith("_Z3clz", glsl::ExtInst::ExtInstFindUMsb)
5678 // Use exact match on float arg because these need a multiply
5679 // of a constant of the right floating point type.
5680 .Case("_Z6acospif", glsl::ExtInst::ExtInstAcos)
5681 .Case("_Z6acospiDv2_f", glsl::ExtInst::ExtInstAcos)
5682 .Case("_Z6acospiDv3_f", glsl::ExtInst::ExtInstAcos)
5683 .Case("_Z6acospiDv4_f", glsl::ExtInst::ExtInstAcos)
5684 .Case("_Z6asinpif", glsl::ExtInst::ExtInstAsin)
5685 .Case("_Z6asinpiDv2_f", glsl::ExtInst::ExtInstAsin)
5686 .Case("_Z6asinpiDv3_f", glsl::ExtInst::ExtInstAsin)
5687 .Case("_Z6asinpiDv4_f", glsl::ExtInst::ExtInstAsin)
Kévin Petiteb9f90a2018-09-29 12:29:34 +01005688 .Case("_Z6atanpif", glsl::ExtInst::ExtInstAtan)
5689 .Case("_Z6atanpiDv2_f", glsl::ExtInst::ExtInstAtan)
5690 .Case("_Z6atanpiDv3_f", glsl::ExtInst::ExtInstAtan)
5691 .Case("_Z6atanpiDv4_f", glsl::ExtInst::ExtInstAtan)
David Neto3fbb4072017-10-16 11:28:14 -04005692 .Case("_Z7atan2piff", glsl::ExtInst::ExtInstAtan2)
5693 .Case("_Z7atan2piDv2_fS_", glsl::ExtInst::ExtInstAtan2)
5694 .Case("_Z7atan2piDv3_fS_", glsl::ExtInst::ExtInstAtan2)
5695 .Case("_Z7atan2piDv4_fS_", glsl::ExtInst::ExtInstAtan2)
5696 .Default(kGlslExtInstBad);
5697}
5698
alan-bakerb6b09dc2018-11-08 16:59:28 -05005699glsl::ExtInst
5700SPIRVProducerPass::getDirectOrIndirectExtInstEnum(StringRef Name) {
David Neto3fbb4072017-10-16 11:28:14 -04005701 auto direct = getExtInstEnum(Name);
5702 if (direct != kGlslExtInstBad)
5703 return direct;
5704 return getIndirectExtInstEnum(Name);
David Neto22f144c2017-06-12 14:26:21 -04005705}
5706
David Neto22f144c2017-06-12 14:26:21 -04005707void SPIRVProducerPass::WriteOneWord(uint32_t Word) {
David Neto0676e6f2017-07-11 18:47:44 -04005708 binaryOut->write(reinterpret_cast<const char *>(&Word), sizeof(uint32_t));
David Neto22f144c2017-06-12 14:26:21 -04005709}
5710
5711void SPIRVProducerPass::WriteResultID(SPIRVInstruction *Inst) {
5712 WriteOneWord(Inst->getResultID());
5713}
5714
5715void SPIRVProducerPass::WriteWordCountAndOpcode(SPIRVInstruction *Inst) {
5716 // High 16 bit : Word Count
5717 // Low 16 bit : Opcode
5718 uint32_t Word = Inst->getOpcode();
David Netoee2660d2018-06-28 16:31:29 -04005719 const uint32_t count = Inst->getWordCount();
5720 if (count > 65535) {
5721 errs() << "Word count limit of 65535 exceeded: " << count << "\n";
5722 llvm_unreachable("Word count too high");
5723 }
David Neto22f144c2017-06-12 14:26:21 -04005724 Word |= Inst->getWordCount() << 16;
5725 WriteOneWord(Word);
5726}
5727
David Netoef5ba2b2019-12-20 08:35:54 -05005728void SPIRVProducerPass::WriteOperand(const std::unique_ptr<SPIRVOperand> &Op) {
David Neto22f144c2017-06-12 14:26:21 -04005729 SPIRVOperandType OpTy = Op->getType();
5730 switch (OpTy) {
5731 default: {
5732 llvm_unreachable("Unsupported SPIRV Operand Type???");
5733 break;
5734 }
5735 case SPIRVOperandType::NUMBERID: {
5736 WriteOneWord(Op->getNumID());
5737 break;
5738 }
5739 case SPIRVOperandType::LITERAL_STRING: {
5740 std::string Str = Op->getLiteralStr();
5741 const char *Data = Str.c_str();
5742 size_t WordSize = Str.size() / 4;
5743 for (unsigned Idx = 0; Idx < WordSize; Idx++) {
5744 WriteOneWord(*reinterpret_cast<const uint32_t *>(&Data[4 * Idx]));
5745 }
5746
5747 uint32_t Remainder = Str.size() % 4;
5748 uint32_t LastWord = 0;
5749 if (Remainder) {
5750 for (unsigned Idx = 0; Idx < Remainder; Idx++) {
5751 LastWord |= Data[4 * WordSize + Idx] << 8 * Idx;
5752 }
5753 }
5754
5755 WriteOneWord(LastWord);
5756 break;
5757 }
5758 case SPIRVOperandType::LITERAL_INTEGER:
5759 case SPIRVOperandType::LITERAL_FLOAT: {
5760 auto LiteralNum = Op->getLiteralNum();
5761 // TODO: Handle LiteranNum carefully.
5762 for (auto Word : LiteralNum) {
5763 WriteOneWord(Word);
5764 }
5765 break;
5766 }
5767 }
5768}
5769
5770void SPIRVProducerPass::WriteSPIRVBinary() {
5771 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
5772
5773 for (auto Inst : SPIRVInstList) {
David Netoef5ba2b2019-12-20 08:35:54 -05005774 const auto &Ops = Inst->getOperands();
David Neto22f144c2017-06-12 14:26:21 -04005775 spv::Op Opcode = static_cast<spv::Op>(Inst->getOpcode());
5776
5777 switch (Opcode) {
5778 default: {
David Neto5c22a252018-03-15 16:07:41 -04005779 errs() << "Unsupported SPIR-V instruction opcode " << int(Opcode) << "\n";
David Neto22f144c2017-06-12 14:26:21 -04005780 llvm_unreachable("Unsupported SPIRV instruction");
5781 break;
5782 }
5783 case spv::OpCapability:
5784 case spv::OpExtension:
5785 case spv::OpMemoryModel:
5786 case spv::OpEntryPoint:
5787 case spv::OpExecutionMode:
5788 case spv::OpSource:
5789 case spv::OpDecorate:
5790 case spv::OpMemberDecorate:
5791 case spv::OpBranch:
5792 case spv::OpBranchConditional:
5793 case spv::OpSelectionMerge:
5794 case spv::OpLoopMerge:
5795 case spv::OpStore:
5796 case spv::OpImageWrite:
5797 case spv::OpReturnValue:
5798 case spv::OpControlBarrier:
5799 case spv::OpMemoryBarrier:
5800 case spv::OpReturn:
5801 case spv::OpFunctionEnd:
5802 case spv::OpCopyMemory: {
5803 WriteWordCountAndOpcode(Inst);
5804 for (uint32_t i = 0; i < Ops.size(); i++) {
5805 WriteOperand(Ops[i]);
5806 }
5807 break;
5808 }
5809 case spv::OpTypeBool:
5810 case spv::OpTypeVoid:
5811 case spv::OpTypeSampler:
5812 case spv::OpLabel:
5813 case spv::OpExtInstImport:
5814 case spv::OpTypePointer:
5815 case spv::OpTypeRuntimeArray:
5816 case spv::OpTypeStruct:
5817 case spv::OpTypeImage:
5818 case spv::OpTypeSampledImage:
5819 case spv::OpTypeInt:
5820 case spv::OpTypeFloat:
5821 case spv::OpTypeArray:
5822 case spv::OpTypeVector:
5823 case spv::OpTypeFunction: {
5824 WriteWordCountAndOpcode(Inst);
5825 WriteResultID(Inst);
5826 for (uint32_t i = 0; i < Ops.size(); i++) {
5827 WriteOperand(Ops[i]);
5828 }
5829 break;
5830 }
5831 case spv::OpFunction:
5832 case spv::OpFunctionParameter:
5833 case spv::OpAccessChain:
5834 case spv::OpPtrAccessChain:
5835 case spv::OpInBoundsAccessChain:
5836 case spv::OpUConvert:
5837 case spv::OpSConvert:
5838 case spv::OpConvertFToU:
5839 case spv::OpConvertFToS:
5840 case spv::OpConvertUToF:
5841 case spv::OpConvertSToF:
5842 case spv::OpFConvert:
5843 case spv::OpConvertPtrToU:
5844 case spv::OpConvertUToPtr:
5845 case spv::OpBitcast:
alan-bakerc9c55ae2019-12-02 16:01:27 -05005846 case spv::OpFNegate:
David Neto22f144c2017-06-12 14:26:21 -04005847 case spv::OpIAdd:
5848 case spv::OpFAdd:
5849 case spv::OpISub:
5850 case spv::OpFSub:
5851 case spv::OpIMul:
5852 case spv::OpFMul:
5853 case spv::OpUDiv:
5854 case spv::OpSDiv:
5855 case spv::OpFDiv:
5856 case spv::OpUMod:
5857 case spv::OpSRem:
5858 case spv::OpFRem:
Kévin Petit8a560882019-03-21 15:24:34 +00005859 case spv::OpUMulExtended:
5860 case spv::OpSMulExtended:
David Neto22f144c2017-06-12 14:26:21 -04005861 case spv::OpBitwiseOr:
5862 case spv::OpBitwiseXor:
5863 case spv::OpBitwiseAnd:
David Netoa394f392017-08-26 20:45:29 -04005864 case spv::OpNot:
David Neto22f144c2017-06-12 14:26:21 -04005865 case spv::OpShiftLeftLogical:
5866 case spv::OpShiftRightLogical:
5867 case spv::OpShiftRightArithmetic:
5868 case spv::OpBitCount:
David Netoab03f432017-11-03 17:00:44 -04005869 case spv::OpCompositeConstruct:
David Neto22f144c2017-06-12 14:26:21 -04005870 case spv::OpCompositeExtract:
5871 case spv::OpVectorExtractDynamic:
5872 case spv::OpCompositeInsert:
David Neto0a2f98d2017-09-15 19:38:40 -04005873 case spv::OpCopyObject:
David Neto22f144c2017-06-12 14:26:21 -04005874 case spv::OpVectorInsertDynamic:
5875 case spv::OpVectorShuffle:
5876 case spv::OpIEqual:
5877 case spv::OpINotEqual:
5878 case spv::OpUGreaterThan:
5879 case spv::OpUGreaterThanEqual:
5880 case spv::OpULessThan:
5881 case spv::OpULessThanEqual:
5882 case spv::OpSGreaterThan:
5883 case spv::OpSGreaterThanEqual:
5884 case spv::OpSLessThan:
5885 case spv::OpSLessThanEqual:
5886 case spv::OpFOrdEqual:
5887 case spv::OpFOrdGreaterThan:
5888 case spv::OpFOrdGreaterThanEqual:
5889 case spv::OpFOrdLessThan:
5890 case spv::OpFOrdLessThanEqual:
5891 case spv::OpFOrdNotEqual:
5892 case spv::OpFUnordEqual:
5893 case spv::OpFUnordGreaterThan:
5894 case spv::OpFUnordGreaterThanEqual:
5895 case spv::OpFUnordLessThan:
5896 case spv::OpFUnordLessThanEqual:
5897 case spv::OpFUnordNotEqual:
5898 case spv::OpExtInst:
5899 case spv::OpIsInf:
5900 case spv::OpIsNan:
5901 case spv::OpAny:
5902 case spv::OpAll:
5903 case spv::OpUndef:
5904 case spv::OpConstantNull:
5905 case spv::OpLogicalOr:
5906 case spv::OpLogicalAnd:
5907 case spv::OpLogicalNot:
5908 case spv::OpLogicalNotEqual:
5909 case spv::OpConstantComposite:
5910 case spv::OpSpecConstantComposite:
5911 case spv::OpConstantTrue:
5912 case spv::OpConstantFalse:
5913 case spv::OpConstant:
5914 case spv::OpSpecConstant:
5915 case spv::OpVariable:
5916 case spv::OpFunctionCall:
5917 case spv::OpSampledImage:
alan-baker75090e42020-02-20 11:21:04 -05005918 case spv::OpImageFetch:
David Neto22f144c2017-06-12 14:26:21 -04005919 case spv::OpImageSampleExplicitLod:
David Neto5c22a252018-03-15 16:07:41 -04005920 case spv::OpImageQuerySize:
alan-bakerce179f12019-12-06 19:02:22 -05005921 case spv::OpImageQuerySizeLod:
David Neto22f144c2017-06-12 14:26:21 -04005922 case spv::OpSelect:
5923 case spv::OpPhi:
5924 case spv::OpLoad:
5925 case spv::OpAtomicIAdd:
5926 case spv::OpAtomicISub:
5927 case spv::OpAtomicExchange:
5928 case spv::OpAtomicIIncrement:
5929 case spv::OpAtomicIDecrement:
5930 case spv::OpAtomicCompareExchange:
5931 case spv::OpAtomicUMin:
5932 case spv::OpAtomicSMin:
5933 case spv::OpAtomicUMax:
5934 case spv::OpAtomicSMax:
5935 case spv::OpAtomicAnd:
5936 case spv::OpAtomicOr:
5937 case spv::OpAtomicXor:
5938 case spv::OpDot: {
5939 WriteWordCountAndOpcode(Inst);
5940 WriteOperand(Ops[0]);
5941 WriteResultID(Inst);
5942 for (uint32_t i = 1; i < Ops.size(); i++) {
5943 WriteOperand(Ops[i]);
5944 }
5945 break;
5946 }
5947 }
5948 }
5949}
Alan Baker9bf93fb2018-08-28 16:59:26 -04005950
alan-bakerb6b09dc2018-11-08 16:59:28 -05005951bool SPIRVProducerPass::IsTypeNullable(const Type *type) const {
Alan Baker9bf93fb2018-08-28 16:59:26 -04005952 switch (type->getTypeID()) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05005953 case Type::HalfTyID:
5954 case Type::FloatTyID:
5955 case Type::DoubleTyID:
5956 case Type::IntegerTyID:
5957 case Type::VectorTyID:
5958 return true;
5959 case Type::PointerTyID: {
5960 const PointerType *pointer_type = cast<PointerType>(type);
5961 if (pointer_type->getPointerAddressSpace() !=
5962 AddressSpace::UniformConstant) {
5963 auto pointee_type = pointer_type->getPointerElementType();
5964 if (pointee_type->isStructTy() &&
5965 cast<StructType>(pointee_type)->isOpaque()) {
5966 // Images and samplers are not nullable.
5967 return false;
Alan Baker9bf93fb2018-08-28 16:59:26 -04005968 }
Alan Baker9bf93fb2018-08-28 16:59:26 -04005969 }
alan-bakerb6b09dc2018-11-08 16:59:28 -05005970 return true;
5971 }
5972 case Type::ArrayTyID:
5973 return IsTypeNullable(cast<CompositeType>(type)->getTypeAtIndex(0u));
5974 case Type::StructTyID: {
5975 const StructType *struct_type = cast<StructType>(type);
5976 // Images and samplers are not nullable.
5977 if (struct_type->isOpaque())
Alan Baker9bf93fb2018-08-28 16:59:26 -04005978 return false;
alan-bakerb6b09dc2018-11-08 16:59:28 -05005979 for (const auto element : struct_type->elements()) {
5980 if (!IsTypeNullable(element))
5981 return false;
5982 }
5983 return true;
5984 }
5985 default:
5986 return false;
Alan Baker9bf93fb2018-08-28 16:59:26 -04005987 }
5988}
Alan Bakerfcda9482018-10-02 17:09:59 -04005989
5990void SPIRVProducerPass::PopulateUBOTypeMaps(Module &module) {
5991 if (auto *offsets_md =
5992 module.getNamedMetadata(clspv::RemappedTypeOffsetMetadataName())) {
5993 // Metdata is stored as key-value pair operands. The first element of each
5994 // operand is the type and the second is a vector of offsets.
5995 for (const auto *operand : offsets_md->operands()) {
5996 const auto *pair = cast<MDTuple>(operand);
5997 auto *type =
5998 cast<ConstantAsMetadata>(pair->getOperand(0))->getValue()->getType();
5999 const auto *offset_vector = cast<MDTuple>(pair->getOperand(1));
6000 std::vector<uint32_t> offsets;
6001 for (const Metadata *offset_md : offset_vector->operands()) {
6002 const auto *constant_md = cast<ConstantAsMetadata>(offset_md);
alan-bakerb6b09dc2018-11-08 16:59:28 -05006003 offsets.push_back(static_cast<uint32_t>(
6004 cast<ConstantInt>(constant_md->getValue())->getZExtValue()));
Alan Bakerfcda9482018-10-02 17:09:59 -04006005 }
6006 RemappedUBOTypeOffsets.insert(std::make_pair(type, offsets));
6007 }
6008 }
6009
6010 if (auto *sizes_md =
6011 module.getNamedMetadata(clspv::RemappedTypeSizesMetadataName())) {
6012 // Metadata is stored as key-value pair operands. The first element of each
6013 // operand is the type and the second is a triple of sizes: type size in
6014 // bits, store size and alloc size.
6015 for (const auto *operand : sizes_md->operands()) {
6016 const auto *pair = cast<MDTuple>(operand);
6017 auto *type =
6018 cast<ConstantAsMetadata>(pair->getOperand(0))->getValue()->getType();
6019 const auto *size_triple = cast<MDTuple>(pair->getOperand(1));
6020 uint64_t type_size_in_bits =
6021 cast<ConstantInt>(
6022 cast<ConstantAsMetadata>(size_triple->getOperand(0))->getValue())
6023 ->getZExtValue();
6024 uint64_t type_store_size =
6025 cast<ConstantInt>(
6026 cast<ConstantAsMetadata>(size_triple->getOperand(1))->getValue())
6027 ->getZExtValue();
6028 uint64_t type_alloc_size =
6029 cast<ConstantInt>(
6030 cast<ConstantAsMetadata>(size_triple->getOperand(2))->getValue())
6031 ->getZExtValue();
6032 RemappedUBOTypeSizes.insert(std::make_pair(
6033 type, std::make_tuple(type_size_in_bits, type_store_size,
6034 type_alloc_size)));
6035 }
6036 }
6037}
6038
6039uint64_t SPIRVProducerPass::GetTypeSizeInBits(Type *type,
6040 const DataLayout &DL) {
6041 auto iter = RemappedUBOTypeSizes.find(type);
6042 if (iter != RemappedUBOTypeSizes.end()) {
6043 return std::get<0>(iter->second);
6044 }
6045
6046 return DL.getTypeSizeInBits(type);
6047}
6048
6049uint64_t SPIRVProducerPass::GetTypeStoreSize(Type *type, const DataLayout &DL) {
6050 auto iter = RemappedUBOTypeSizes.find(type);
6051 if (iter != RemappedUBOTypeSizes.end()) {
6052 return std::get<1>(iter->second);
6053 }
6054
6055 return DL.getTypeStoreSize(type);
6056}
6057
6058uint64_t SPIRVProducerPass::GetTypeAllocSize(Type *type, const DataLayout &DL) {
6059 auto iter = RemappedUBOTypeSizes.find(type);
6060 if (iter != RemappedUBOTypeSizes.end()) {
6061 return std::get<2>(iter->second);
6062 }
6063
6064 return DL.getTypeAllocSize(type);
6065}
alan-baker5b86ed72019-02-15 08:26:50 -05006066
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04006067void SPIRVProducerPass::setVariablePointersCapabilities(
6068 unsigned address_space) {
alan-baker5b86ed72019-02-15 08:26:50 -05006069 if (GetStorageClass(address_space) == spv::StorageClassStorageBuffer) {
6070 setVariablePointersStorageBuffer(true);
6071 } else {
6072 setVariablePointers(true);
6073 }
6074}
6075
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04006076Value *SPIRVProducerPass::GetBasePointer(Value *v) {
alan-baker5b86ed72019-02-15 08:26:50 -05006077 if (auto *gep = dyn_cast<GetElementPtrInst>(v)) {
6078 return GetBasePointer(gep->getPointerOperand());
6079 }
6080
6081 // Conservatively return |v|.
6082 return v;
6083}
6084
6085bool SPIRVProducerPass::sameResource(Value *lhs, Value *rhs) const {
6086 if (auto *lhs_call = dyn_cast<CallInst>(lhs)) {
6087 if (auto *rhs_call = dyn_cast<CallInst>(rhs)) {
6088 if (lhs_call->getCalledFunction()->getName().startswith(
6089 clspv::ResourceAccessorFunction()) &&
6090 rhs_call->getCalledFunction()->getName().startswith(
6091 clspv::ResourceAccessorFunction())) {
6092 // For resource accessors, match descriptor set and binding.
6093 if (lhs_call->getOperand(0) == rhs_call->getOperand(0) &&
6094 lhs_call->getOperand(1) == rhs_call->getOperand(1))
6095 return true;
6096 } else if (lhs_call->getCalledFunction()->getName().startswith(
6097 clspv::WorkgroupAccessorFunction()) &&
6098 rhs_call->getCalledFunction()->getName().startswith(
6099 clspv::WorkgroupAccessorFunction())) {
6100 // For workgroup resources, match spec id.
6101 if (lhs_call->getOperand(0) == rhs_call->getOperand(0))
6102 return true;
6103 }
6104 }
6105 }
6106
6107 return false;
6108}
6109
6110bool SPIRVProducerPass::selectFromSameObject(Instruction *inst) {
6111 assert(inst->getType()->isPointerTy());
6112 assert(GetStorageClass(inst->getType()->getPointerAddressSpace()) ==
6113 spv::StorageClassStorageBuffer);
6114 const bool hack_undef = clspv::Option::HackUndef();
6115 if (auto *select = dyn_cast<SelectInst>(inst)) {
6116 auto *true_base = GetBasePointer(select->getTrueValue());
6117 auto *false_base = GetBasePointer(select->getFalseValue());
6118
6119 if (true_base == false_base)
6120 return true;
6121
6122 // If either the true or false operand is a null, then we satisfy the same
6123 // object constraint.
6124 if (auto *true_cst = dyn_cast<Constant>(true_base)) {
6125 if (true_cst->isNullValue() || (hack_undef && isa<UndefValue>(true_base)))
6126 return true;
6127 }
6128
6129 if (auto *false_cst = dyn_cast<Constant>(false_base)) {
6130 if (false_cst->isNullValue() ||
6131 (hack_undef && isa<UndefValue>(false_base)))
6132 return true;
6133 }
6134
6135 if (sameResource(true_base, false_base))
6136 return true;
6137 } else if (auto *phi = dyn_cast<PHINode>(inst)) {
6138 Value *value = nullptr;
6139 bool ok = true;
6140 for (unsigned i = 0; ok && i != phi->getNumIncomingValues(); ++i) {
6141 auto *base = GetBasePointer(phi->getIncomingValue(i));
6142 // Null values satisfy the constraint of selecting of selecting from the
6143 // same object.
6144 if (!value) {
6145 if (auto *cst = dyn_cast<Constant>(base)) {
6146 if (!cst->isNullValue() && !(hack_undef && isa<UndefValue>(base)))
6147 value = base;
6148 } else {
6149 value = base;
6150 }
6151 } else if (base != value) {
6152 if (auto *base_cst = dyn_cast<Constant>(base)) {
6153 if (base_cst->isNullValue() || (hack_undef && isa<UndefValue>(base)))
6154 continue;
6155 }
6156
6157 if (sameResource(value, base))
6158 continue;
6159
6160 // Values don't represent the same base.
6161 ok = false;
6162 }
6163 }
6164
6165 return ok;
6166 }
6167
6168 // Conservatively return false.
6169 return false;
6170}
alan-bakere9308012019-03-15 10:25:13 -04006171
6172bool SPIRVProducerPass::CalledWithCoherentResource(Argument &Arg) {
6173 if (!Arg.getType()->isPointerTy() ||
6174 Arg.getType()->getPointerAddressSpace() != clspv::AddressSpace::Global) {
6175 // Only SSBOs need to be annotated as coherent.
6176 return false;
6177 }
6178
6179 DenseSet<Value *> visited;
6180 std::vector<Value *> stack;
6181 for (auto *U : Arg.getParent()->users()) {
6182 if (auto *call = dyn_cast<CallInst>(U)) {
6183 stack.push_back(call->getOperand(Arg.getArgNo()));
6184 }
6185 }
6186
6187 while (!stack.empty()) {
6188 Value *v = stack.back();
6189 stack.pop_back();
6190
6191 if (!visited.insert(v).second)
6192 continue;
6193
6194 auto *resource_call = dyn_cast<CallInst>(v);
6195 if (resource_call &&
6196 resource_call->getCalledFunction()->getName().startswith(
6197 clspv::ResourceAccessorFunction())) {
6198 // If this is a resource accessor function, check if the coherent operand
6199 // is set.
6200 const auto coherent =
6201 unsigned(dyn_cast<ConstantInt>(resource_call->getArgOperand(5))
6202 ->getZExtValue());
6203 if (coherent == 1)
6204 return true;
6205 } else if (auto *arg = dyn_cast<Argument>(v)) {
6206 // If this is a function argument, trace through its callers.
alan-bakere98f3f92019-04-08 15:06:36 -04006207 for (auto U : arg->getParent()->users()) {
alan-bakere9308012019-03-15 10:25:13 -04006208 if (auto *call = dyn_cast<CallInst>(U)) {
6209 stack.push_back(call->getOperand(arg->getArgNo()));
6210 }
6211 }
6212 } else if (auto *user = dyn_cast<User>(v)) {
6213 // If this is a user, traverse all operands that could lead to resource
6214 // variables.
6215 for (unsigned i = 0; i != user->getNumOperands(); ++i) {
6216 Value *operand = user->getOperand(i);
6217 if (operand->getType()->isPointerTy() &&
6218 operand->getType()->getPointerAddressSpace() ==
6219 clspv::AddressSpace::Global) {
6220 stack.push_back(operand);
6221 }
6222 }
6223 }
6224 }
6225
6226 // No coherent resource variables encountered.
6227 return false;
6228}
alan-baker06cad652019-12-03 17:56:47 -05006229
6230void SPIRVProducerPass::PopulateStructuredCFGMaps(Module &module) {
6231 // First, track loop merges and continues.
6232 DenseSet<BasicBlock *> LoopMergesAndContinues;
6233 for (auto &F : module) {
6234 if (F.isDeclaration())
6235 continue;
6236
6237 DominatorTree &DT = getAnalysis<DominatorTreeWrapperPass>(F).getDomTree();
6238 const LoopInfo &LI = getAnalysis<LoopInfoWrapperPass>(F).getLoopInfo();
6239 std::deque<BasicBlock *> order;
6240 DenseSet<BasicBlock *> visited;
6241 clspv::ComputeStructuredOrder(&*F.begin(), &DT, LI, &order, &visited);
6242
6243 for (auto BB : order) {
6244 auto terminator = BB->getTerminator();
6245 auto branch = dyn_cast<BranchInst>(terminator);
6246 if (LI.isLoopHeader(BB)) {
6247 auto L = LI.getLoopFor(BB);
6248 BasicBlock *ContinueBB = nullptr;
6249 BasicBlock *MergeBB = nullptr;
6250
6251 MergeBB = L->getExitBlock();
6252 if (!MergeBB) {
6253 // StructurizeCFG pass converts CFG into triangle shape and the cfg
6254 // has regions with single entry/exit. As a result, loop should not
6255 // have multiple exits.
6256 llvm_unreachable("Loop has multiple exits???");
6257 }
6258
6259 if (L->isLoopLatch(BB)) {
6260 ContinueBB = BB;
6261 } else {
6262 // From SPIR-V spec 2.11, Continue Target must dominate that back-edge
6263 // block.
6264 BasicBlock *Header = L->getHeader();
6265 BasicBlock *Latch = L->getLoopLatch();
6266 for (auto *loop_block : L->blocks()) {
6267 if (loop_block == Header) {
6268 continue;
6269 }
6270
6271 // Check whether block dominates block with back-edge.
6272 // The loop latch is the single block with a back-edge. If it was
6273 // possible, StructurizeCFG made the loop conform to this
6274 // requirement, otherwise |Latch| is a nullptr.
6275 if (DT.dominates(loop_block, Latch)) {
6276 ContinueBB = loop_block;
6277 }
6278 }
6279
6280 if (!ContinueBB) {
6281 llvm_unreachable("Wrong continue block from loop");
6282 }
6283 }
6284
6285 // Record the continue and merge blocks.
6286 MergeBlocks[BB] = MergeBB;
6287 ContinueBlocks[BB] = ContinueBB;
6288 LoopMergesAndContinues.insert(MergeBB);
6289 LoopMergesAndContinues.insert(ContinueBB);
6290 } else if (branch && branch->isConditional()) {
6291 auto L = LI.getLoopFor(BB);
6292 bool HasBackedge = false;
6293 while (L && !HasBackedge) {
6294 if (L->isLoopLatch(BB)) {
6295 HasBackedge = true;
6296 }
6297 L = L->getParentLoop();
6298 }
6299
6300 if (!HasBackedge) {
6301 // Only need a merge if the branch doesn't include a loop break or
6302 // continue.
6303 auto true_bb = branch->getSuccessor(0);
6304 auto false_bb = branch->getSuccessor(1);
6305 if (!LoopMergesAndContinues.count(true_bb) &&
6306 !LoopMergesAndContinues.count(false_bb)) {
6307 // StructurizeCFG pass already manipulated CFG. Just use false block
6308 // of branch instruction as merge block.
6309 MergeBlocks[BB] = false_bb;
6310 }
6311 }
6312 }
6313 }
6314 }
6315}