blob: 66075a1c3c402ceaa9b8c474540e6b61a7269e96 [file] [log] [blame]
David Neto22f144c2017-06-12 14:26:21 -04001// Copyright 2017 The Clspv Authors. All rights reserved.
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7// http://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
15#ifdef _MSC_VER
16#pragma warning(push, 0)
17#endif
18
David Neto156783e2017-07-05 15:39:41 -040019#include <cassert>
David Neto257c3892018-04-11 13:19:45 -040020#include <cstring>
David Neto118188e2018-08-24 11:27:54 -040021#include <iomanip>
22#include <list>
David Neto862b7d82018-06-14 18:48:37 -040023#include <memory>
David Neto118188e2018-08-24 11:27:54 -040024#include <set>
25#include <sstream>
26#include <string>
27#include <tuple>
28#include <unordered_set>
29#include <utility>
David Neto862b7d82018-06-14 18:48:37 -040030
David Neto118188e2018-08-24 11:27:54 -040031#include "llvm/ADT/StringSwitch.h"
32#include "llvm/ADT/UniqueVector.h"
33#include "llvm/Analysis/LoopInfo.h"
34#include "llvm/IR/Constants.h"
35#include "llvm/IR/Dominators.h"
36#include "llvm/IR/Instructions.h"
37#include "llvm/IR/Metadata.h"
38#include "llvm/IR/Module.h"
alan-bakerf67468c2019-11-25 15:51:49 -050039#include "llvm/IR/ValueSymbolTable.h"
David Neto118188e2018-08-24 11:27:54 -040040#include "llvm/Pass.h"
41#include "llvm/Support/CommandLine.h"
42#include "llvm/Support/raw_ostream.h"
43#include "llvm/Transforms/Utils/Cloning.h"
David Neto22f144c2017-06-12 14:26:21 -040044
David Neto85082642018-03-24 06:55:20 -070045#include "spirv/1.0/spirv.hpp"
David Neto118188e2018-08-24 11:27:54 -040046
David Neto85082642018-03-24 06:55:20 -070047#include "clspv/AddressSpace.h"
alan-bakerf5e5f692018-11-27 08:33:24 -050048#include "clspv/DescriptorMap.h"
David Neto118188e2018-08-24 11:27:54 -040049#include "clspv/Option.h"
David Neto85082642018-03-24 06:55:20 -070050#include "clspv/spirv_c_strings.hpp"
51#include "clspv/spirv_glsl.hpp"
David Neto22f144c2017-06-12 14:26:21 -040052
David Neto4feb7a42017-10-06 17:29:42 -040053#include "ArgKind.h"
alan-bakerf67468c2019-11-25 15:51:49 -050054#include "Builtins.h"
alan-baker06cad652019-12-03 17:56:47 -050055#include "ComputeStructuredOrder.h"
David Neto85082642018-03-24 06:55:20 -070056#include "ConstantEmitter.h"
Alan Baker202c8c72018-08-13 13:47:44 -040057#include "Constants.h"
David Neto78383442018-06-15 20:31:56 -040058#include "DescriptorCounter.h"
alan-baker56f7aff2019-05-22 08:06:42 -040059#include "NormalizeGlobalVariable.h"
Diego Novilloa4c44fa2019-04-11 10:56:15 -040060#include "Passes.h"
alan-bakerce179f12019-12-06 19:02:22 -050061#include "Types.h"
David Neto48f56a42017-10-06 16:44:25 -040062
David Neto22f144c2017-06-12 14:26:21 -040063#if defined(_MSC_VER)
64#pragma warning(pop)
65#endif
66
67using namespace llvm;
68using namespace clspv;
David Neto156783e2017-07-05 15:39:41 -040069using namespace mdconst;
David Neto22f144c2017-06-12 14:26:21 -040070
71namespace {
David Netocd8ca5f2017-10-02 23:34:11 -040072
David Neto862b7d82018-06-14 18:48:37 -040073cl::opt<bool> ShowResourceVars("show-rv", cl::init(false), cl::Hidden,
74 cl::desc("Show resource variable creation"));
75
76// These hacks exist to help transition code generation algorithms
77// without making huge noise in detailed test output.
78const bool Hack_generate_runtime_array_stride_early = true;
79
David Neto3fbb4072017-10-16 11:28:14 -040080// The value of 1/pi. This value is from MSDN
81// https://msdn.microsoft.com/en-us/library/4hwaceh6.aspx
82const double kOneOverPi = 0.318309886183790671538;
83const glsl::ExtInst kGlslExtInstBad = static_cast<glsl::ExtInst>(0);
84
alan-bakerb6b09dc2018-11-08 16:59:28 -050085const char *kCompositeConstructFunctionPrefix = "clspv.composite_construct.";
David Netoab03f432017-11-03 17:00:44 -040086
David Neto22f144c2017-06-12 14:26:21 -040087enum SPIRVOperandType {
88 NUMBERID,
89 LITERAL_INTEGER,
90 LITERAL_STRING,
91 LITERAL_FLOAT
92};
93
94struct SPIRVOperand {
95 explicit SPIRVOperand(SPIRVOperandType Ty, uint32_t Num)
96 : Type(Ty), LiteralNum(1, Num) {}
97 explicit SPIRVOperand(SPIRVOperandType Ty, const char *Str)
98 : Type(Ty), LiteralStr(Str) {}
99 explicit SPIRVOperand(SPIRVOperandType Ty, StringRef Str)
100 : Type(Ty), LiteralStr(Str) {}
101 explicit SPIRVOperand(SPIRVOperandType Ty, ArrayRef<uint32_t> NumVec)
102 : Type(Ty), LiteralNum(NumVec.begin(), NumVec.end()) {}
103
James Price11010dc2019-12-19 13:53:09 -0500104 SPIRVOperandType getType() const { return Type; };
105 uint32_t getNumID() const { return LiteralNum[0]; };
106 std::string getLiteralStr() const { return LiteralStr; };
107 ArrayRef<uint32_t> getLiteralNum() const { return LiteralNum; };
David Neto22f144c2017-06-12 14:26:21 -0400108
David Neto87846742018-04-11 17:36:22 -0400109 uint32_t GetNumWords() const {
110 switch (Type) {
111 case NUMBERID:
112 return 1;
113 case LITERAL_INTEGER:
114 case LITERAL_FLOAT:
David Netoee2660d2018-06-28 16:31:29 -0400115 return uint32_t(LiteralNum.size());
David Neto87846742018-04-11 17:36:22 -0400116 case LITERAL_STRING:
117 // Account for the terminating null character.
David Netoee2660d2018-06-28 16:31:29 -0400118 return uint32_t((LiteralStr.size() + 4) / 4);
David Neto87846742018-04-11 17:36:22 -0400119 }
120 llvm_unreachable("Unhandled case in SPIRVOperand::GetNumWords()");
121 }
122
David Neto22f144c2017-06-12 14:26:21 -0400123private:
124 SPIRVOperandType Type;
125 std::string LiteralStr;
126 SmallVector<uint32_t, 4> LiteralNum;
127};
128
David Netoc6f3ab22018-04-06 18:02:31 -0400129class SPIRVOperandList {
130public:
David Netoef5ba2b2019-12-20 08:35:54 -0500131 typedef std::unique_ptr<SPIRVOperand> element_type;
132 typedef SmallVector<element_type, 8> container_type;
133 typedef container_type::iterator iterator;
David Netoc6f3ab22018-04-06 18:02:31 -0400134 SPIRVOperandList() {}
alan-bakerb6b09dc2018-11-08 16:59:28 -0500135 SPIRVOperandList(const SPIRVOperandList &other) = delete;
136 SPIRVOperandList(SPIRVOperandList &&other) {
David Netoc6f3ab22018-04-06 18:02:31 -0400137 contents_ = std::move(other.contents_);
138 other.contents_.clear();
139 }
David Netoef5ba2b2019-12-20 08:35:54 -0500140 iterator begin() { return contents_.begin(); }
141 iterator end() { return contents_.end(); }
142 operator ArrayRef<element_type>() { return contents_; }
143 void push_back(element_type op) { contents_.push_back(std::move(op)); }
alan-bakerb6b09dc2018-11-08 16:59:28 -0500144 void clear() { contents_.clear(); }
David Netoc6f3ab22018-04-06 18:02:31 -0400145 size_t size() const { return contents_.size(); }
James Price11010dc2019-12-19 13:53:09 -0500146 const SPIRVOperand *operator[](size_t i) { return contents_[i].get(); }
David Netoc6f3ab22018-04-06 18:02:31 -0400147
David Netoef5ba2b2019-12-20 08:35:54 -0500148 const container_type &getOperands() const { return contents_; }
David Neto87846742018-04-11 17:36:22 -0400149
David Netoc6f3ab22018-04-06 18:02:31 -0400150private:
David Netoef5ba2b2019-12-20 08:35:54 -0500151 container_type contents_;
David Netoc6f3ab22018-04-06 18:02:31 -0400152};
153
James Price11010dc2019-12-19 13:53:09 -0500154SPIRVOperandList &operator<<(SPIRVOperandList &list,
David Netoef5ba2b2019-12-20 08:35:54 -0500155 std::unique_ptr<SPIRVOperand> elem) {
156 list.push_back(std::move(elem));
David Netoc6f3ab22018-04-06 18:02:31 -0400157 return list;
158}
159
David Netoef5ba2b2019-12-20 08:35:54 -0500160std::unique_ptr<SPIRVOperand> MkNum(uint32_t num) {
161 return std::make_unique<SPIRVOperand>(LITERAL_INTEGER, num);
David Netoc6f3ab22018-04-06 18:02:31 -0400162}
David Netoef5ba2b2019-12-20 08:35:54 -0500163std::unique_ptr<SPIRVOperand> MkInteger(ArrayRef<uint32_t> num_vec) {
164 return std::make_unique<SPIRVOperand>(LITERAL_INTEGER, num_vec);
David Neto257c3892018-04-11 13:19:45 -0400165}
David Netoef5ba2b2019-12-20 08:35:54 -0500166std::unique_ptr<SPIRVOperand> MkFloat(ArrayRef<uint32_t> num_vec) {
167 return std::make_unique<SPIRVOperand>(LITERAL_FLOAT, num_vec);
David Neto257c3892018-04-11 13:19:45 -0400168}
David Netoef5ba2b2019-12-20 08:35:54 -0500169std::unique_ptr<SPIRVOperand> MkId(uint32_t id) {
170 return std::make_unique<SPIRVOperand>(NUMBERID, id);
James Price11010dc2019-12-19 13:53:09 -0500171}
David Netoef5ba2b2019-12-20 08:35:54 -0500172std::unique_ptr<SPIRVOperand> MkString(StringRef str) {
173 return std::make_unique<SPIRVOperand>(LITERAL_STRING, str);
David Neto257c3892018-04-11 13:19:45 -0400174}
David Netoc6f3ab22018-04-06 18:02:31 -0400175
David Neto22f144c2017-06-12 14:26:21 -0400176struct SPIRVInstruction {
David Netoef5ba2b2019-12-20 08:35:54 -0500177 // Creates an instruction with an opcode and no result ID, and with the given
178 // operands. This computes its own word count. Takes ownership of the
179 // operands and clears |Ops|.
180 SPIRVInstruction(spv::Op Opc, SPIRVOperandList &Ops)
181 : WordCount(1), Opcode(static_cast<uint16_t>(Opc)), ResultID(0) {
James Price11010dc2019-12-19 13:53:09 -0500182 for (auto &operand : Ops) {
David Netoee2660d2018-06-28 16:31:29 -0400183 WordCount += uint16_t(operand->GetNumWords());
David Neto87846742018-04-11 17:36:22 -0400184 }
David Netoef5ba2b2019-12-20 08:35:54 -0500185 Operands.reserve(Ops.size());
186 for (auto &ptr : Ops) {
187 Operands.emplace_back(std::move(ptr));
188 ptr.reset(nullptr);
David Neto87846742018-04-11 17:36:22 -0400189 }
David Netoef5ba2b2019-12-20 08:35:54 -0500190 Ops.clear();
191 }
192 // Creates an instruction with an opcode and a no-zero result ID, and
193 // with the given operands. This computes its own word count. Takes ownership
194 // of the operands and clears |Ops|.
195 SPIRVInstruction(spv::Op Opc, uint32_t ResID, SPIRVOperandList &Ops)
196 : WordCount(2), Opcode(static_cast<uint16_t>(Opc)), ResultID(ResID) {
James Price11010dc2019-12-19 13:53:09 -0500197 for (auto &operand : Ops) {
David Neto87846742018-04-11 17:36:22 -0400198 WordCount += operand->GetNumWords();
199 }
David Netoef5ba2b2019-12-20 08:35:54 -0500200 Operands.reserve(Ops.size());
201 for (auto &ptr : Ops) {
202 Operands.emplace_back(std::move(ptr));
203 ptr.reset(nullptr);
204 }
205 if (ResID == 0) {
206 llvm_unreachable("Result ID of 0 was provided");
207 }
208 Ops.clear();
David Neto87846742018-04-11 17:36:22 -0400209 }
David Neto22f144c2017-06-12 14:26:21 -0400210
David Netoef5ba2b2019-12-20 08:35:54 -0500211 // Creates an instruction with an opcode and no result ID, and with the single
212 // operand. This computes its own word count.
213 SPIRVInstruction(spv::Op Opc, SPIRVOperandList::element_type operand)
214 : WordCount(1), Opcode(static_cast<uint16_t>(Opc)), ResultID(0) {
215 WordCount += operand->GetNumWords();
216 Operands.emplace_back(std::move(operand));
217 operand.reset(nullptr);
218 }
219 // Creates an instruction with an opcode and a non-zero result ID, and
220 // with the single operand. This computes its own word count.
221 SPIRVInstruction(spv::Op Opc, uint32_t ResID,
222 SPIRVOperandList::element_type operand)
223 : WordCount(2), Opcode(static_cast<uint16_t>(Opc)), ResultID(ResID) {
224 WordCount += operand->GetNumWords();
225 if (ResID == 0) {
226 llvm_unreachable("Result ID of 0 was provided");
227 }
228 Operands.emplace_back(std::move(operand));
229 operand.reset(nullptr);
230 }
231 // Creates an instruction with an opcode and a no-zero result ID, and no
232 // operands.
233 SPIRVInstruction(spv::Op Opc, uint32_t ResID)
234 : WordCount(2), Opcode(static_cast<uint16_t>(Opc)), ResultID(ResID) {
235 if (ResID == 0) {
236 llvm_unreachable("Result ID of 0 was provided");
237 }
238 }
239 // Creates an instruction with an opcode, no result ID, no type ID, and no
240 // operands.
241 SPIRVInstruction(spv::Op Opc)
242 : WordCount(1), Opcode(static_cast<uint16_t>(Opc)), ResultID(0) {}
243
David Netoee2660d2018-06-28 16:31:29 -0400244 uint32_t getWordCount() const { return WordCount; }
David Neto22f144c2017-06-12 14:26:21 -0400245 uint16_t getOpcode() const { return Opcode; }
246 uint32_t getResultID() const { return ResultID; }
David Netoef5ba2b2019-12-20 08:35:54 -0500247 ArrayRef<std::unique_ptr<SPIRVOperand>> getOperands() const {
James Price11010dc2019-12-19 13:53:09 -0500248 return Operands;
249 }
David Neto22f144c2017-06-12 14:26:21 -0400250
251private:
David Netoee2660d2018-06-28 16:31:29 -0400252 uint32_t WordCount; // Check the 16-bit bound at code generation time.
David Neto22f144c2017-06-12 14:26:21 -0400253 uint16_t Opcode;
254 uint32_t ResultID;
David Netoef5ba2b2019-12-20 08:35:54 -0500255 SmallVector<std::unique_ptr<SPIRVOperand>, 4> Operands;
David Neto22f144c2017-06-12 14:26:21 -0400256};
257
258struct SPIRVProducerPass final : public ModulePass {
David Neto22f144c2017-06-12 14:26:21 -0400259 typedef DenseMap<Type *, uint32_t> TypeMapType;
260 typedef UniqueVector<Type *> TypeList;
261 typedef DenseMap<Value *, uint32_t> ValueMapType;
David Netofb9a7972017-08-25 17:08:24 -0400262 typedef UniqueVector<Value *> ValueList;
David Neto22f144c2017-06-12 14:26:21 -0400263 typedef std::vector<std::pair<Value *, uint32_t>> EntryPointVecType;
264 typedef std::list<SPIRVInstruction *> SPIRVInstructionList;
David Neto87846742018-04-11 17:36:22 -0400265 // A vector of tuples, each of which is:
266 // - the LLVM instruction that we will later generate SPIR-V code for
267 // - where the SPIR-V instruction should be inserted
268 // - the result ID of the SPIR-V instruction
David Neto22f144c2017-06-12 14:26:21 -0400269 typedef std::vector<
270 std::tuple<Value *, SPIRVInstructionList::iterator, uint32_t>>
271 DeferredInstVecType;
272 typedef DenseMap<FunctionType *, std::pair<FunctionType *, uint32_t>>
273 GlobalConstFuncMapType;
274
David Neto44795152017-07-13 15:45:28 -0400275 explicit SPIRVProducerPass(
alan-bakerf5e5f692018-11-27 08:33:24 -0500276 raw_pwrite_stream &out,
277 std::vector<clspv::version0::DescriptorMapEntry> *descriptor_map_entries,
alan-baker00e7a582019-06-07 12:54:21 -0400278 ArrayRef<std::pair<unsigned, std::string>> samplerMap,
David Neto44795152017-07-13 15:45:28 -0400279 bool outputCInitList)
David Netoc2c368d2017-06-30 16:50:17 -0400280 : ModulePass(ID), samplerMap(samplerMap), out(out),
David Neto0676e6f2017-07-11 18:47:44 -0400281 binaryTempOut(binaryTempUnderlyingVector), binaryOut(&out),
alan-baker00e7a582019-06-07 12:54:21 -0400282 descriptorMapEntries(descriptor_map_entries),
David Neto0676e6f2017-07-11 18:47:44 -0400283 outputCInitList(outputCInitList), patchBoundOffset(0), nextID(1),
alan-baker5b86ed72019-02-15 08:26:50 -0500284 OpExtInstImportID(0), HasVariablePointersStorageBuffer(false),
285 HasVariablePointers(false), SamplerTy(nullptr), WorkgroupSizeValueID(0),
Kévin Petit89a525c2019-06-15 08:13:07 +0100286 WorkgroupSizeVarID(0), max_local_spec_id_(0) {}
David Neto22f144c2017-06-12 14:26:21 -0400287
James Price11010dc2019-12-19 13:53:09 -0500288 virtual ~SPIRVProducerPass() {
289 for (auto *Inst : SPIRVInsts) {
290 delete Inst;
291 }
292 }
293
David Neto22f144c2017-06-12 14:26:21 -0400294 void getAnalysisUsage(AnalysisUsage &AU) const override {
295 AU.addRequired<DominatorTreeWrapperPass>();
296 AU.addRequired<LoopInfoWrapperPass>();
297 }
298
299 virtual bool runOnModule(Module &module) override;
300
301 // output the SPIR-V header block
302 void outputHeader();
303
304 // patch the SPIR-V header block
305 void patchHeader();
306
307 uint32_t lookupType(Type *Ty) {
308 if (Ty->isPointerTy() &&
309 (Ty->getPointerAddressSpace() != AddressSpace::UniformConstant)) {
310 auto PointeeTy = Ty->getPointerElementType();
311 if (PointeeTy->isStructTy() &&
312 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
313 Ty = PointeeTy;
314 }
315 }
316
David Neto862b7d82018-06-14 18:48:37 -0400317 auto where = TypeMap.find(Ty);
318 if (where == TypeMap.end()) {
319 if (Ty) {
320 errs() << "Unhandled type " << *Ty << "\n";
321 } else {
322 errs() << "Unhandled type (null)\n";
323 }
David Netoe439d702018-03-23 13:14:08 -0700324 llvm_unreachable("\nUnhandled type!");
David Neto22f144c2017-06-12 14:26:21 -0400325 }
326
David Neto862b7d82018-06-14 18:48:37 -0400327 return where->second;
David Neto22f144c2017-06-12 14:26:21 -0400328 }
329 TypeMapType &getImageTypeMap() { return ImageTypeMap; }
alan-bakerabd82722019-12-03 17:14:51 -0500330 TypeList &getImageTypeList() { return ImageTypeList; }
David Neto22f144c2017-06-12 14:26:21 -0400331 TypeList &getTypeList() { return Types; };
332 ValueList &getConstantList() { return Constants; };
333 ValueMapType &getValueMap() { return ValueMap; }
334 ValueMapType &getAllocatedValueMap() { return AllocatedValueMap; }
335 SPIRVInstructionList &getSPIRVInstList() { return SPIRVInsts; };
David Neto22f144c2017-06-12 14:26:21 -0400336 EntryPointVecType &getEntryPointVec() { return EntryPointVec; };
337 DeferredInstVecType &getDeferredInstVec() { return DeferredInstVec; };
338 ValueList &getEntryPointInterfacesVec() { return EntryPointInterfacesVec; };
339 uint32_t &getOpExtInstImportID() { return OpExtInstImportID; };
340 std::vector<uint32_t> &getBuiltinDimVec() { return BuiltinDimensionVec; };
alan-baker5b86ed72019-02-15 08:26:50 -0500341 bool hasVariablePointersStorageBuffer() {
342 return HasVariablePointersStorageBuffer;
343 }
344 void setVariablePointersStorageBuffer(bool Val) {
345 HasVariablePointersStorageBuffer = Val;
346 }
Diego Novillo3cc8d7a2019-04-10 13:30:34 -0400347 bool hasVariablePointers() { return HasVariablePointers; };
David Neto22f144c2017-06-12 14:26:21 -0400348 void setVariablePointers(bool Val) { HasVariablePointers = Val; };
alan-bakerb6b09dc2018-11-08 16:59:28 -0500349 ArrayRef<std::pair<unsigned, std::string>> &getSamplerMap() {
350 return samplerMap;
351 }
David Neto22f144c2017-06-12 14:26:21 -0400352 GlobalConstFuncMapType &getGlobalConstFuncTypeMap() {
353 return GlobalConstFuncTypeMap;
354 }
355 SmallPtrSet<Value *, 16> &getGlobalConstArgSet() {
356 return GlobalConstArgumentSet;
357 }
alan-bakerb6b09dc2018-11-08 16:59:28 -0500358 TypeList &getTypesNeedingArrayStride() { return TypesNeedingArrayStride; }
David Neto22f144c2017-06-12 14:26:21 -0400359
David Netoc6f3ab22018-04-06 18:02:31 -0400360 void GenerateLLVMIRInfo(Module &M, const DataLayout &DL);
alan-bakerb6b09dc2018-11-08 16:59:28 -0500361 // Populate GlobalConstFuncTypeMap. Also, if module-scope __constant will
362 // *not* be converted to a storage buffer, replace each such global variable
363 // with one in the storage class expecgted by SPIR-V.
David Neto862b7d82018-06-14 18:48:37 -0400364 void FindGlobalConstVars(Module &M, const DataLayout &DL);
365 // Populate ResourceVarInfoList, FunctionToResourceVarsMap, and
366 // ModuleOrderedResourceVars.
367 void FindResourceVars(Module &M, const DataLayout &DL);
Alan Baker202c8c72018-08-13 13:47:44 -0400368 void FindWorkgroupVars(Module &M);
David Neto22f144c2017-06-12 14:26:21 -0400369 bool FindExtInst(Module &M);
370 void FindTypePerGlobalVar(GlobalVariable &GV);
371 void FindTypePerFunc(Function &F);
David Neto862b7d82018-06-14 18:48:37 -0400372 void FindTypesForSamplerMap(Module &M);
373 void FindTypesForResourceVars(Module &M);
alan-bakerb6b09dc2018-11-08 16:59:28 -0500374 // Inserts |Ty| and relevant sub-types into the |Types| member, indicating
375 // that |Ty| and its subtypes will need a corresponding SPIR-V type.
David Neto22f144c2017-06-12 14:26:21 -0400376 void FindType(Type *Ty);
377 void FindConstantPerGlobalVar(GlobalVariable &GV);
378 void FindConstantPerFunc(Function &F);
379 void FindConstant(Value *V);
380 void GenerateExtInstImport();
David Neto19a1bad2017-08-25 15:01:41 -0400381 // Generates instructions for SPIR-V types corresponding to the LLVM types
382 // saved in the |Types| member. A type follows its subtypes. IDs are
383 // allocated sequentially starting with the current value of nextID, and
384 // with a type following its subtypes. Also updates nextID to just beyond
385 // the last generated ID.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500386 void GenerateSPIRVTypes(LLVMContext &context, Module &module);
David Neto22f144c2017-06-12 14:26:21 -0400387 void GenerateSPIRVConstants();
David Neto5c22a252018-03-15 16:07:41 -0400388 void GenerateModuleInfo(Module &M);
David Neto22f144c2017-06-12 14:26:21 -0400389 void GenerateGlobalVar(GlobalVariable &GV);
David Netoc6f3ab22018-04-06 18:02:31 -0400390 void GenerateWorkgroupVars();
David Neto862b7d82018-06-14 18:48:37 -0400391 // Generate descriptor map entries for resource variables associated with
392 // arguments to F.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500393 void GenerateDescriptorMapInfo(const DataLayout &DL, Function &F);
David Neto22f144c2017-06-12 14:26:21 -0400394 void GenerateSamplers(Module &M);
David Neto862b7d82018-06-14 18:48:37 -0400395 // Generate OpVariables for %clspv.resource.var.* calls.
396 void GenerateResourceVars(Module &M);
David Neto22f144c2017-06-12 14:26:21 -0400397 void GenerateFuncPrologue(Function &F);
398 void GenerateFuncBody(Function &F);
David Netob6e2e062018-04-25 10:32:06 -0400399 void GenerateEntryPointInitialStores();
David Neto22f144c2017-06-12 14:26:21 -0400400 spv::Op GetSPIRVCmpOpcode(CmpInst *CmpI);
401 spv::Op GetSPIRVCastOpcode(Instruction &I);
402 spv::Op GetSPIRVBinaryOpcode(Instruction &I);
403 void GenerateInstruction(Instruction &I);
404 void GenerateFuncEpilogue();
405 void HandleDeferredInstruction();
alan-bakerb6b09dc2018-11-08 16:59:28 -0500406 void HandleDeferredDecorations(const DataLayout &DL);
David Neto22f144c2017-06-12 14:26:21 -0400407 bool is4xi8vec(Type *Ty) const;
408 spv::StorageClass GetStorageClass(unsigned AddrSpace) const;
David Neto862b7d82018-06-14 18:48:37 -0400409 spv::StorageClass GetStorageClassForArgKind(clspv::ArgKind arg_kind) const;
David Neto22f144c2017-06-12 14:26:21 -0400410 spv::BuiltIn GetBuiltin(StringRef globalVarName) const;
David Neto3fbb4072017-10-16 11:28:14 -0400411 // Returns the GLSL extended instruction enum that the given function
412 // call maps to. If none, then returns the 0 value, i.e. GLSLstd4580Bad.
David Neto22f144c2017-06-12 14:26:21 -0400413 glsl::ExtInst getExtInstEnum(StringRef Name);
David Neto3fbb4072017-10-16 11:28:14 -0400414 // Returns the GLSL extended instruction enum indirectly used by the given
415 // function. That is, to implement the given function, we use an extended
416 // instruction plus one more instruction. If none, then returns the 0 value,
417 // i.e. GLSLstd4580Bad.
418 glsl::ExtInst getIndirectExtInstEnum(StringRef Name);
419 // Returns the single GLSL extended instruction used directly or
420 // indirectly by the given function call.
421 glsl::ExtInst getDirectOrIndirectExtInstEnum(StringRef Name);
David Neto22f144c2017-06-12 14:26:21 -0400422 void WriteOneWord(uint32_t Word);
423 void WriteResultID(SPIRVInstruction *Inst);
424 void WriteWordCountAndOpcode(SPIRVInstruction *Inst);
David Netoef5ba2b2019-12-20 08:35:54 -0500425 void WriteOperand(const std::unique_ptr<SPIRVOperand> &Op);
David Neto22f144c2017-06-12 14:26:21 -0400426 void WriteSPIRVBinary();
427
Alan Baker9bf93fb2018-08-28 16:59:26 -0400428 // Returns true if |type| is compatible with OpConstantNull.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500429 bool IsTypeNullable(const Type *type) const;
Alan Baker9bf93fb2018-08-28 16:59:26 -0400430
Alan Bakerfcda9482018-10-02 17:09:59 -0400431 // Populate UBO remapped type maps.
432 void PopulateUBOTypeMaps(Module &module);
433
alan-baker06cad652019-12-03 17:56:47 -0500434 // Populate the merge and continue block maps.
435 void PopulateStructuredCFGMaps(Module &module);
436
Alan Bakerfcda9482018-10-02 17:09:59 -0400437 // Wrapped methods of DataLayout accessors. If |type| was remapped for UBOs,
438 // uses the internal map, otherwise it falls back on the data layout.
439 uint64_t GetTypeSizeInBits(Type *type, const DataLayout &DL);
440 uint64_t GetTypeStoreSize(Type *type, const DataLayout &DL);
441 uint64_t GetTypeAllocSize(Type *type, const DataLayout &DL);
442
alan-baker5b86ed72019-02-15 08:26:50 -0500443 // Returns the base pointer of |v|.
444 Value *GetBasePointer(Value *v);
445
446 // Sets |HasVariablePointersStorageBuffer| or |HasVariablePointers| base on
447 // |address_space|.
448 void setVariablePointersCapabilities(unsigned address_space);
449
450 // Returns true if |lhs| and |rhs| represent the same resource or workgroup
451 // variable.
452 bool sameResource(Value *lhs, Value *rhs) const;
453
454 // Returns true if |inst| is phi or select that selects from the same
455 // structure (or null).
456 bool selectFromSameObject(Instruction *inst);
457
alan-bakere9308012019-03-15 10:25:13 -0400458 // Returns true if |Arg| is called with a coherent resource.
459 bool CalledWithCoherentResource(Argument &Arg);
460
David Neto22f144c2017-06-12 14:26:21 -0400461private:
462 static char ID;
David Neto44795152017-07-13 15:45:28 -0400463 ArrayRef<std::pair<unsigned, std::string>> samplerMap;
David Neto22f144c2017-06-12 14:26:21 -0400464 raw_pwrite_stream &out;
David Neto0676e6f2017-07-11 18:47:44 -0400465
466 // TODO(dneto): Wouldn't it be better to always just emit a binary, and then
467 // convert to other formats on demand?
468
469 // When emitting a C initialization list, the WriteSPIRVBinary method
470 // will actually write its words to this vector via binaryTempOut.
471 SmallVector<char, 100> binaryTempUnderlyingVector;
472 raw_svector_ostream binaryTempOut;
473
474 // Binary output writes to this stream, which might be |out| or
475 // |binaryTempOut|. It's the latter when we really want to write a C
476 // initializer list.
Diego Novillo3cc8d7a2019-04-10 13:30:34 -0400477 raw_pwrite_stream *binaryOut;
alan-bakerf5e5f692018-11-27 08:33:24 -0500478 std::vector<version0::DescriptorMapEntry> *descriptorMapEntries;
David Neto0676e6f2017-07-11 18:47:44 -0400479 const bool outputCInitList; // If true, output look like {0x7023, ... , 5}
David Neto22f144c2017-06-12 14:26:21 -0400480 uint64_t patchBoundOffset;
481 uint32_t nextID;
482
alan-bakerf67468c2019-11-25 15:51:49 -0500483 // ID for OpTypeInt 32 1.
484 uint32_t int32ID = 0;
485 // ID for OpTypeVector %int 4.
486 uint32_t v4int32ID = 0;
487
David Neto19a1bad2017-08-25 15:01:41 -0400488 // Maps an LLVM Value pointer to the corresponding SPIR-V Id.
David Neto22f144c2017-06-12 14:26:21 -0400489 TypeMapType TypeMap;
David Neto19a1bad2017-08-25 15:01:41 -0400490 // Maps an LLVM image type to its SPIR-V ID.
David Neto22f144c2017-06-12 14:26:21 -0400491 TypeMapType ImageTypeMap;
alan-bakerabd82722019-12-03 17:14:51 -0500492 // A unique-vector of LLVM image types. This list is used to provide
493 // deterministic traversal of image types.
494 TypeList ImageTypeList;
David Neto19a1bad2017-08-25 15:01:41 -0400495 // A unique-vector of LLVM types that map to a SPIR-V type.
David Neto22f144c2017-06-12 14:26:21 -0400496 TypeList Types;
497 ValueList Constants;
David Neto19a1bad2017-08-25 15:01:41 -0400498 // Maps an LLVM Value pointer to the corresponding SPIR-V Id.
David Neto22f144c2017-06-12 14:26:21 -0400499 ValueMapType ValueMap;
500 ValueMapType AllocatedValueMap;
501 SPIRVInstructionList SPIRVInsts;
David Neto862b7d82018-06-14 18:48:37 -0400502
David Neto22f144c2017-06-12 14:26:21 -0400503 EntryPointVecType EntryPointVec;
504 DeferredInstVecType DeferredInstVec;
505 ValueList EntryPointInterfacesVec;
506 uint32_t OpExtInstImportID;
507 std::vector<uint32_t> BuiltinDimensionVec;
alan-baker5b86ed72019-02-15 08:26:50 -0500508 bool HasVariablePointersStorageBuffer;
David Neto22f144c2017-06-12 14:26:21 -0400509 bool HasVariablePointers;
510 Type *SamplerTy;
alan-baker09cb9802019-12-10 13:16:27 -0500511 DenseMap<unsigned, unsigned> SamplerLiteralToIDMap;
David Netoc77d9e22018-03-24 06:30:28 -0700512
513 // If a function F has a pointer-to-__constant parameter, then this variable
David Neto9ed8e2f2018-03-24 06:47:24 -0700514 // will map F's type to (G, index of the parameter), where in a first phase
515 // G is F's type. During FindTypePerFunc, G will be changed to F's type
516 // but replacing the pointer-to-constant parameter with
517 // pointer-to-ModuleScopePrivate.
David Netoc77d9e22018-03-24 06:30:28 -0700518 // TODO(dneto): This doesn't seem general enough? A function might have
519 // more than one such parameter.
David Neto22f144c2017-06-12 14:26:21 -0400520 GlobalConstFuncMapType GlobalConstFuncTypeMap;
521 SmallPtrSet<Value *, 16> GlobalConstArgumentSet;
David Neto1a1a0582017-07-07 12:01:44 -0400522 // An ordered set of pointer types of Base arguments to OpPtrAccessChain,
David Neto85082642018-03-24 06:55:20 -0700523 // or array types, and which point into transparent memory (StorageBuffer
524 // storage class). These will require an ArrayStride decoration.
David Neto1a1a0582017-07-07 12:01:44 -0400525 // See SPV_KHR_variable_pointers rev 13.
David Neto85082642018-03-24 06:55:20 -0700526 TypeList TypesNeedingArrayStride;
David Netoa60b00b2017-09-15 16:34:09 -0400527
528 // This is truly ugly, but works around what look like driver bugs.
529 // For get_local_size, an earlier part of the flow has created a module-scope
530 // variable in Private address space to hold the value for the workgroup
531 // size. Its intializer is a uint3 value marked as builtin WorkgroupSize.
532 // When this is present, save the IDs of the initializer value and variable
533 // in these two variables. We only ever do a vector load from it, and
534 // when we see one of those, substitute just the value of the intializer.
535 // This mimics what Glslang does, and that's what drivers are used to.
David Neto66cfe642018-03-24 06:13:56 -0700536 // TODO(dneto): Remove this once drivers are fixed.
David Netoa60b00b2017-09-15 16:34:09 -0400537 uint32_t WorkgroupSizeValueID;
538 uint32_t WorkgroupSizeVarID;
David Neto26aaf622017-10-23 18:11:53 -0400539
David Neto862b7d82018-06-14 18:48:37 -0400540 // Bookkeeping for mapping kernel arguments to resource variables.
541 struct ResourceVarInfo {
542 ResourceVarInfo(int index_arg, unsigned set_arg, unsigned binding_arg,
alan-bakere9308012019-03-15 10:25:13 -0400543 Function *fn, clspv::ArgKind arg_kind_arg, int coherent_arg)
David Neto862b7d82018-06-14 18:48:37 -0400544 : index(index_arg), descriptor_set(set_arg), binding(binding_arg),
alan-bakere9308012019-03-15 10:25:13 -0400545 var_fn(fn), arg_kind(arg_kind_arg), coherent(coherent_arg),
David Neto862b7d82018-06-14 18:48:37 -0400546 addr_space(fn->getReturnType()->getPointerAddressSpace()) {}
547 const int index; // Index into ResourceVarInfoList
548 const unsigned descriptor_set;
549 const unsigned binding;
550 Function *const var_fn; // The @clspv.resource.var.* function.
551 const clspv::ArgKind arg_kind;
alan-bakere9308012019-03-15 10:25:13 -0400552 const int coherent;
David Neto862b7d82018-06-14 18:48:37 -0400553 const unsigned addr_space; // The LLVM address space
554 // The SPIR-V ID of the OpVariable. Not populated at construction time.
555 uint32_t var_id = 0;
556 };
557 // A list of resource var info. Each one correponds to a module-scope
558 // resource variable we will have to create. Resource var indices are
559 // indices into this vector.
560 SmallVector<std::unique_ptr<ResourceVarInfo>, 8> ResourceVarInfoList;
561 // This is a vector of pointers of all the resource vars, but ordered by
562 // kernel function, and then by argument.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500563 UniqueVector<ResourceVarInfo *> ModuleOrderedResourceVars;
David Neto862b7d82018-06-14 18:48:37 -0400564 // Map a function to the ordered list of resource variables it uses, one for
565 // each argument. If an argument does not use a resource variable, it
566 // will have a null pointer entry.
567 using FunctionToResourceVarsMapType =
568 DenseMap<Function *, SmallVector<ResourceVarInfo *, 8>>;
569 FunctionToResourceVarsMapType FunctionToResourceVarsMap;
570
571 // What LLVM types map to SPIR-V types needing layout? These are the
572 // arrays and structures supporting storage buffers and uniform buffers.
573 TypeList TypesNeedingLayout;
574 // What LLVM struct types map to a SPIR-V struct type with Block decoration?
575 UniqueVector<StructType *> StructTypesNeedingBlock;
576 // For a call that represents a load from an opaque type (samplers, images),
577 // map it to the variable id it should load from.
578 DenseMap<CallInst *, uint32_t> ResourceVarDeferredLoadCalls;
David Neto85082642018-03-24 06:55:20 -0700579
Alan Baker202c8c72018-08-13 13:47:44 -0400580 // One larger than the maximum used SpecId for pointer-to-local arguments.
581 int max_local_spec_id_;
David Netoc6f3ab22018-04-06 18:02:31 -0400582 // An ordered list of the kernel arguments of type pointer-to-local.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500583 using LocalArgList = SmallVector<Argument *, 8>;
David Netoc6f3ab22018-04-06 18:02:31 -0400584 LocalArgList LocalArgs;
585 // Information about a pointer-to-local argument.
586 struct LocalArgInfo {
587 // The SPIR-V ID of the array variable.
588 uint32_t variable_id;
589 // The element type of the
alan-bakerb6b09dc2018-11-08 16:59:28 -0500590 Type *elem_type;
David Netoc6f3ab22018-04-06 18:02:31 -0400591 // The ID of the array type.
592 uint32_t array_size_id;
593 // The ID of the array type.
594 uint32_t array_type_id;
595 // The ID of the pointer to the array type.
596 uint32_t ptr_array_type_id;
David Netoc6f3ab22018-04-06 18:02:31 -0400597 // The specialization constant ID of the array size.
598 int spec_id;
599 };
Alan Baker202c8c72018-08-13 13:47:44 -0400600 // A mapping from Argument to its assigned SpecId.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500601 DenseMap<const Argument *, int> LocalArgSpecIds;
Alan Baker202c8c72018-08-13 13:47:44 -0400602 // A mapping from SpecId to its LocalArgInfo.
603 DenseMap<int, LocalArgInfo> LocalSpecIdInfoMap;
Alan Bakerfcda9482018-10-02 17:09:59 -0400604 // A mapping from a remapped type to its real offsets.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500605 DenseMap<Type *, std::vector<uint32_t>> RemappedUBOTypeOffsets;
Alan Bakerfcda9482018-10-02 17:09:59 -0400606 // A mapping from a remapped type to its real sizes.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500607 DenseMap<Type *, std::tuple<uint64_t, uint64_t, uint64_t>>
608 RemappedUBOTypeSizes;
alan-baker06cad652019-12-03 17:56:47 -0500609
610 // Maps basic block to its merge block.
611 DenseMap<BasicBlock *, BasicBlock *> MergeBlocks;
612 // Maps basic block to its continue block.
613 DenseMap<BasicBlock *, BasicBlock *> ContinueBlocks;
David Neto22f144c2017-06-12 14:26:21 -0400614};
615
616char SPIRVProducerPass::ID;
David Netoc6f3ab22018-04-06 18:02:31 -0400617
alan-bakerb6b09dc2018-11-08 16:59:28 -0500618} // namespace
David Neto22f144c2017-06-12 14:26:21 -0400619
620namespace clspv {
alan-bakerf5e5f692018-11-27 08:33:24 -0500621ModulePass *createSPIRVProducerPass(
622 raw_pwrite_stream &out,
623 std::vector<version0::DescriptorMapEntry> *descriptor_map_entries,
alan-baker00e7a582019-06-07 12:54:21 -0400624 ArrayRef<std::pair<unsigned, std::string>> samplerMap,
alan-bakerf5e5f692018-11-27 08:33:24 -0500625 bool outputCInitList) {
626 return new SPIRVProducerPass(out, descriptor_map_entries, samplerMap,
alan-baker00e7a582019-06-07 12:54:21 -0400627 outputCInitList);
David Neto22f144c2017-06-12 14:26:21 -0400628}
David Netoc2c368d2017-06-30 16:50:17 -0400629} // namespace clspv
David Neto22f144c2017-06-12 14:26:21 -0400630
631bool SPIRVProducerPass::runOnModule(Module &module) {
David Neto0676e6f2017-07-11 18:47:44 -0400632 binaryOut = outputCInitList ? &binaryTempOut : &out;
633
Alan Bakerfcda9482018-10-02 17:09:59 -0400634 PopulateUBOTypeMaps(module);
alan-baker06cad652019-12-03 17:56:47 -0500635 PopulateStructuredCFGMaps(module);
Alan Bakerfcda9482018-10-02 17:09:59 -0400636
David Neto22f144c2017-06-12 14:26:21 -0400637 // SPIR-V always begins with its header information
638 outputHeader();
639
David Netoc6f3ab22018-04-06 18:02:31 -0400640 const DataLayout &DL = module.getDataLayout();
641
David Neto22f144c2017-06-12 14:26:21 -0400642 // Gather information from the LLVM IR that we require.
David Netoc6f3ab22018-04-06 18:02:31 -0400643 GenerateLLVMIRInfo(module, DL);
David Neto22f144c2017-06-12 14:26:21 -0400644
David Neto22f144c2017-06-12 14:26:21 -0400645 // Collect information on global variables too.
646 for (GlobalVariable &GV : module.globals()) {
647 // If the GV is one of our special __spirv_* variables, remove the
648 // initializer as it was only placed there to force LLVM to not throw the
649 // value away.
650 if (GV.getName().startswith("__spirv_")) {
651 GV.setInitializer(nullptr);
652 }
653
654 // Collect types' information from global variable.
655 FindTypePerGlobalVar(GV);
656
657 // Collect constant information from global variable.
658 FindConstantPerGlobalVar(GV);
659
660 // If the variable is an input, entry points need to know about it.
661 if (AddressSpace::Input == GV.getType()->getPointerAddressSpace()) {
David Netofb9a7972017-08-25 17:08:24 -0400662 getEntryPointInterfacesVec().insert(&GV);
David Neto22f144c2017-06-12 14:26:21 -0400663 }
664 }
665
666 // If there are extended instructions, generate OpExtInstImport.
667 if (FindExtInst(module)) {
668 GenerateExtInstImport();
669 }
670
671 // Generate SPIRV instructions for types.
Alan Bakerfcda9482018-10-02 17:09:59 -0400672 GenerateSPIRVTypes(module.getContext(), module);
David Neto22f144c2017-06-12 14:26:21 -0400673
674 // Generate SPIRV constants.
675 GenerateSPIRVConstants();
676
alan-baker09cb9802019-12-10 13:16:27 -0500677 // Generate literal samplers if necessary.
678 GenerateSamplers(module);
David Neto22f144c2017-06-12 14:26:21 -0400679
680 // Generate SPIRV variables.
681 for (GlobalVariable &GV : module.globals()) {
682 GenerateGlobalVar(GV);
683 }
David Neto862b7d82018-06-14 18:48:37 -0400684 GenerateResourceVars(module);
David Netoc6f3ab22018-04-06 18:02:31 -0400685 GenerateWorkgroupVars();
David Neto22f144c2017-06-12 14:26:21 -0400686
687 // Generate SPIRV instructions for each function.
688 for (Function &F : module) {
689 if (F.isDeclaration()) {
690 continue;
691 }
692
David Neto862b7d82018-06-14 18:48:37 -0400693 GenerateDescriptorMapInfo(DL, F);
694
David Neto22f144c2017-06-12 14:26:21 -0400695 // Generate Function Prologue.
696 GenerateFuncPrologue(F);
697
698 // Generate SPIRV instructions for function body.
699 GenerateFuncBody(F);
700
701 // Generate Function Epilogue.
702 GenerateFuncEpilogue();
703 }
704
705 HandleDeferredInstruction();
David Neto1a1a0582017-07-07 12:01:44 -0400706 HandleDeferredDecorations(DL);
David Neto22f144c2017-06-12 14:26:21 -0400707
708 // Generate SPIRV module information.
David Neto5c22a252018-03-15 16:07:41 -0400709 GenerateModuleInfo(module);
David Neto22f144c2017-06-12 14:26:21 -0400710
alan-baker00e7a582019-06-07 12:54:21 -0400711 WriteSPIRVBinary();
David Neto22f144c2017-06-12 14:26:21 -0400712
713 // We need to patch the SPIR-V header to set bound correctly.
714 patchHeader();
David Neto0676e6f2017-07-11 18:47:44 -0400715
716 if (outputCInitList) {
717 bool first = true;
David Neto0676e6f2017-07-11 18:47:44 -0400718 std::ostringstream os;
719
David Neto57fb0b92017-08-04 15:35:09 -0400720 auto emit_word = [&os, &first](uint32_t word) {
David Neto0676e6f2017-07-11 18:47:44 -0400721 if (!first)
David Neto57fb0b92017-08-04 15:35:09 -0400722 os << ",\n";
723 os << word;
David Neto0676e6f2017-07-11 18:47:44 -0400724 first = false;
725 };
726
727 os << "{";
David Neto57fb0b92017-08-04 15:35:09 -0400728 const std::string str(binaryTempOut.str());
729 for (unsigned i = 0; i < str.size(); i += 4) {
730 const uint32_t a = static_cast<unsigned char>(str[i]);
731 const uint32_t b = static_cast<unsigned char>(str[i + 1]);
732 const uint32_t c = static_cast<unsigned char>(str[i + 2]);
733 const uint32_t d = static_cast<unsigned char>(str[i + 3]);
734 emit_word(a | (b << 8) | (c << 16) | (d << 24));
David Neto0676e6f2017-07-11 18:47:44 -0400735 }
736 os << "}\n";
737 out << os.str();
738 }
739
David Neto22f144c2017-06-12 14:26:21 -0400740 return false;
741}
742
743void SPIRVProducerPass::outputHeader() {
alan-baker00e7a582019-06-07 12:54:21 -0400744 binaryOut->write(reinterpret_cast<const char *>(&spv::MagicNumber),
745 sizeof(spv::MagicNumber));
746 binaryOut->write(reinterpret_cast<const char *>(&spv::Version),
747 sizeof(spv::Version));
David Neto22f144c2017-06-12 14:26:21 -0400748
alan-baker0c18ab02019-06-12 10:23:21 -0400749 // use Google's vendor ID
750 const uint32_t vendor = 21 << 16;
alan-baker00e7a582019-06-07 12:54:21 -0400751 binaryOut->write(reinterpret_cast<const char *>(&vendor), sizeof(vendor));
David Neto22f144c2017-06-12 14:26:21 -0400752
alan-baker00e7a582019-06-07 12:54:21 -0400753 // we record where we need to come back to and patch in the bound value
754 patchBoundOffset = binaryOut->tell();
David Neto22f144c2017-06-12 14:26:21 -0400755
alan-baker00e7a582019-06-07 12:54:21 -0400756 // output a bad bound for now
757 binaryOut->write(reinterpret_cast<const char *>(&nextID), sizeof(nextID));
David Neto22f144c2017-06-12 14:26:21 -0400758
alan-baker00e7a582019-06-07 12:54:21 -0400759 // output the schema (reserved for use and must be 0)
760 const uint32_t schema = 0;
761 binaryOut->write(reinterpret_cast<const char *>(&schema), sizeof(schema));
David Neto22f144c2017-06-12 14:26:21 -0400762}
763
764void SPIRVProducerPass::patchHeader() {
alan-baker00e7a582019-06-07 12:54:21 -0400765 // for a binary we just write the value of nextID over bound
766 binaryOut->pwrite(reinterpret_cast<char *>(&nextID), sizeof(nextID),
767 patchBoundOffset);
David Neto22f144c2017-06-12 14:26:21 -0400768}
769
David Netoc6f3ab22018-04-06 18:02:31 -0400770void SPIRVProducerPass::GenerateLLVMIRInfo(Module &M, const DataLayout &DL) {
David Neto22f144c2017-06-12 14:26:21 -0400771 // This function generates LLVM IR for function such as global variable for
772 // argument, constant and pointer type for argument access. These information
773 // is artificial one because we need Vulkan SPIR-V output. This function is
774 // executed ahead of FindType and FindConstant.
David Neto22f144c2017-06-12 14:26:21 -0400775 LLVMContext &Context = M.getContext();
776
David Neto862b7d82018-06-14 18:48:37 -0400777 FindGlobalConstVars(M, DL);
David Neto5c22a252018-03-15 16:07:41 -0400778
David Neto862b7d82018-06-14 18:48:37 -0400779 FindResourceVars(M, DL);
David Neto22f144c2017-06-12 14:26:21 -0400780
781 bool HasWorkGroupBuiltin = false;
782 for (GlobalVariable &GV : M.globals()) {
783 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
784 if (spv::BuiltInWorkgroupSize == BuiltinType) {
785 HasWorkGroupBuiltin = true;
786 }
787 }
788
David Neto862b7d82018-06-14 18:48:37 -0400789 FindTypesForSamplerMap(M);
790 FindTypesForResourceVars(M);
Alan Baker202c8c72018-08-13 13:47:44 -0400791 FindWorkgroupVars(M);
David Neto22f144c2017-06-12 14:26:21 -0400792
793 for (Function &F : M) {
Kévin Petitabef4522019-03-27 13:08:01 +0000794 if (F.isDeclaration()) {
David Neto22f144c2017-06-12 14:26:21 -0400795 continue;
796 }
797
798 for (BasicBlock &BB : F) {
799 for (Instruction &I : BB) {
800 if (I.getOpcode() == Instruction::ZExt ||
801 I.getOpcode() == Instruction::SExt ||
802 I.getOpcode() == Instruction::UIToFP) {
803 // If there is zext with i1 type, it will be changed to OpSelect. The
804 // OpSelect needs constant 0 and 1 so the constants are added here.
805
806 auto OpTy = I.getOperand(0)->getType();
807
Kévin Petit24272b62018-10-18 19:16:12 +0000808 if (OpTy->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -0400809 if (I.getOpcode() == Instruction::ZExt) {
David Neto22f144c2017-06-12 14:26:21 -0400810 FindConstant(Constant::getNullValue(I.getType()));
Kévin Petit7bfb8992019-02-26 13:45:08 +0000811 FindConstant(ConstantInt::get(I.getType(), 1));
David Neto22f144c2017-06-12 14:26:21 -0400812 } else if (I.getOpcode() == Instruction::SExt) {
David Neto22f144c2017-06-12 14:26:21 -0400813 FindConstant(Constant::getNullValue(I.getType()));
Kévin Petit7bfb8992019-02-26 13:45:08 +0000814 FindConstant(ConstantInt::getSigned(I.getType(), -1));
David Neto22f144c2017-06-12 14:26:21 -0400815 } else {
816 FindConstant(ConstantFP::get(Context, APFloat(0.0f)));
817 FindConstant(ConstantFP::get(Context, APFloat(1.0f)));
818 }
819 }
820 } else if (CallInst *Call = dyn_cast<CallInst>(&I)) {
David Neto862b7d82018-06-14 18:48:37 -0400821 StringRef callee_name = Call->getCalledFunction()->getName();
David Neto22f144c2017-06-12 14:26:21 -0400822
823 // Handle image type specially.
alan-bakerf67468c2019-11-25 15:51:49 -0500824 if (clspv::IsSampledImageRead(callee_name)) {
David Neto22f144c2017-06-12 14:26:21 -0400825 TypeMapType &OpImageTypeMap = getImageTypeMap();
826 Type *ImageTy =
827 Call->getArgOperand(0)->getType()->getPointerElementType();
828 OpImageTypeMap[ImageTy] = 0;
alan-bakerabd82722019-12-03 17:14:51 -0500829 getImageTypeList().insert(ImageTy);
David Neto22f144c2017-06-12 14:26:21 -0400830
alan-bakerf67468c2019-11-25 15:51:49 -0500831 // All sampled reads need a floating point 0 for the Lod operand.
David Neto22f144c2017-06-12 14:26:21 -0400832 FindConstant(ConstantFP::get(Context, APFloat(0.0f)));
833 }
David Neto5c22a252018-03-15 16:07:41 -0400834
alan-bakerce179f12019-12-06 19:02:22 -0500835 if (clspv::IsImageQuery(callee_name)) {
836 Type *ImageTy = Call->getOperand(0)->getType();
837 const uint32_t dim = ImageDimensionality(ImageTy);
838 uint32_t components = dim;
839 if (components > 1) {
840 // OpImageQuerySize* return |components| components.
841 FindType(VectorType::get(Type::getInt32Ty(Context), components));
842 if (dim == 3 && IsGetImageDim(callee_name)) {
843 // get_image_dim for 3D images returns an int4.
844 FindType(
845 VectorType::get(Type::getInt32Ty(Context), components + 1));
846 }
847 }
848
849 if (clspv::IsSampledImageType(ImageTy)) {
850 // All sampled image queries need a integer 0 for the Lod
851 // operand.
852 FindConstant(ConstantInt::get(Context, APInt(32, 0)));
853 }
David Neto5c22a252018-03-15 16:07:41 -0400854 }
David Neto22f144c2017-06-12 14:26:21 -0400855 }
856 }
857 }
858
Kévin Petitabef4522019-03-27 13:08:01 +0000859 // More things to do on kernel functions
860 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
861 if (const MDNode *MD =
862 dyn_cast<Function>(&F)->getMetadata("reqd_work_group_size")) {
863 // We generate constants if the WorkgroupSize builtin is being used.
864 if (HasWorkGroupBuiltin) {
865 // Collect constant information for work group size.
866 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(0)));
867 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(1)));
868 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(2)));
David Neto22f144c2017-06-12 14:26:21 -0400869 }
870 }
871 }
872
alan-bakerf67468c2019-11-25 15:51:49 -0500873 // TODO(alan-baker): make this better.
alan-bakerf906d2b2019-12-10 11:26:23 -0500874 if (M.getTypeByName("opencl.image1d_ro_t.float") ||
875 M.getTypeByName("opencl.image1d_ro_t.float.sampled") ||
876 M.getTypeByName("opencl.image1d_wo_t.float") ||
877 M.getTypeByName("opencl.image2d_ro_t.float") ||
alan-bakerf67468c2019-11-25 15:51:49 -0500878 M.getTypeByName("opencl.image2d_ro_t.float.sampled") ||
879 M.getTypeByName("opencl.image2d_wo_t.float") ||
880 M.getTypeByName("opencl.image3d_ro_t.float") ||
881 M.getTypeByName("opencl.image3d_ro_t.float.sampled") ||
882 M.getTypeByName("opencl.image3d_wo_t.float")) {
883 FindType(Type::getFloatTy(Context));
alan-bakerf906d2b2019-12-10 11:26:23 -0500884 } else if (M.getTypeByName("opencl.image1d_ro_t.uint") ||
885 M.getTypeByName("opencl.image1d_ro_t.uint.sampled") ||
886 M.getTypeByName("opencl.image1d_wo_t.uint") ||
887 M.getTypeByName("opencl.image2d_ro_t.uint") ||
alan-bakerf67468c2019-11-25 15:51:49 -0500888 M.getTypeByName("opencl.image2d_ro_t.uint.sampled") ||
889 M.getTypeByName("opencl.image2d_wo_t.uint") ||
890 M.getTypeByName("opencl.image3d_ro_t.uint") ||
891 M.getTypeByName("opencl.image3d_ro_t.uint.sampled") ||
892 M.getTypeByName("opencl.image3d_wo_t.uint")) {
893 FindType(Type::getInt32Ty(Context));
alan-bakerf906d2b2019-12-10 11:26:23 -0500894 } else if (M.getTypeByName("opencl.image1d_ro_t.int") ||
895 M.getTypeByName("opencl.image1d_ro_t.int.sampled") ||
896 M.getTypeByName("opencl.image1d_wo_t.int") ||
897 M.getTypeByName("opencl.image2d_ro_t.int") ||
alan-bakerf67468c2019-11-25 15:51:49 -0500898 M.getTypeByName("opencl.image2d_ro_t.int.sampled") ||
899 M.getTypeByName("opencl.image2d_wo_t.int") ||
900 M.getTypeByName("opencl.image3d_ro_t.int") ||
901 M.getTypeByName("opencl.image3d_ro_t.int.sampled") ||
902 M.getTypeByName("opencl.image3d_wo_t.int")) {
903 // Nothing for now...
904 } else {
905 // This was likely an UndefValue.
David Neto22f144c2017-06-12 14:26:21 -0400906 FindType(Type::getFloatTy(Context));
907 }
908
909 // Collect types' information from function.
910 FindTypePerFunc(F);
911
912 // Collect constant information from function.
913 FindConstantPerFunc(F);
914 }
915}
916
David Neto862b7d82018-06-14 18:48:37 -0400917void SPIRVProducerPass::FindGlobalConstVars(Module &M, const DataLayout &DL) {
alan-baker56f7aff2019-05-22 08:06:42 -0400918 clspv::NormalizeGlobalVariables(M);
919
David Neto862b7d82018-06-14 18:48:37 -0400920 SmallVector<GlobalVariable *, 8> GVList;
921 SmallVector<GlobalVariable *, 8> DeadGVList;
922 for (GlobalVariable &GV : M.globals()) {
923 if (GV.getType()->getAddressSpace() == AddressSpace::Constant) {
924 if (GV.use_empty()) {
925 DeadGVList.push_back(&GV);
926 } else {
927 GVList.push_back(&GV);
928 }
929 }
930 }
931
932 // Remove dead global __constant variables.
933 for (auto GV : DeadGVList) {
934 GV->eraseFromParent();
935 }
936 DeadGVList.clear();
937
938 if (clspv::Option::ModuleConstantsInStorageBuffer()) {
939 // For now, we only support a single storage buffer.
940 if (GVList.size() > 0) {
941 assert(GVList.size() == 1);
942 const auto *GV = GVList[0];
943 const auto constants_byte_size =
Alan Bakerfcda9482018-10-02 17:09:59 -0400944 (GetTypeSizeInBits(GV->getInitializer()->getType(), DL)) / 8;
David Neto862b7d82018-06-14 18:48:37 -0400945 const size_t kConstantMaxSize = 65536;
946 if (constants_byte_size > kConstantMaxSize) {
947 outs() << "Max __constant capacity of " << kConstantMaxSize
948 << " bytes exceeded: " << constants_byte_size << " bytes used\n";
949 llvm_unreachable("Max __constant capacity exceeded");
950 }
951 }
952 } else {
953 // Change global constant variable's address space to ModuleScopePrivate.
954 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
955 for (auto GV : GVList) {
956 // Create new gv with ModuleScopePrivate address space.
957 Type *NewGVTy = GV->getType()->getPointerElementType();
958 GlobalVariable *NewGV = new GlobalVariable(
959 M, NewGVTy, false, GV->getLinkage(), GV->getInitializer(), "",
960 nullptr, GV->getThreadLocalMode(), AddressSpace::ModuleScopePrivate);
961 NewGV->takeName(GV);
962
963 const SmallVector<User *, 8> GVUsers(GV->user_begin(), GV->user_end());
964 SmallVector<User *, 8> CandidateUsers;
965
966 auto record_called_function_type_as_user =
967 [&GlobalConstFuncTyMap](Value *gv, CallInst *call) {
968 // Find argument index.
969 unsigned index = 0;
970 for (unsigned i = 0; i < call->getNumArgOperands(); i++) {
971 if (gv == call->getOperand(i)) {
972 // TODO(dneto): Should we break here?
973 index = i;
974 }
975 }
976
977 // Record function type with global constant.
978 GlobalConstFuncTyMap[call->getFunctionType()] =
979 std::make_pair(call->getFunctionType(), index);
980 };
981
982 for (User *GVU : GVUsers) {
983 if (CallInst *Call = dyn_cast<CallInst>(GVU)) {
984 record_called_function_type_as_user(GV, Call);
985 } else if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(GVU)) {
986 // Check GEP users.
987 for (User *GEPU : GEP->users()) {
988 if (CallInst *GEPCall = dyn_cast<CallInst>(GEPU)) {
989 record_called_function_type_as_user(GEP, GEPCall);
990 }
991 }
992 }
993
994 CandidateUsers.push_back(GVU);
995 }
996
997 for (User *U : CandidateUsers) {
998 // Update users of gv with new gv.
alan-bakered80f572019-02-11 17:28:26 -0500999 if (!isa<Constant>(U)) {
1000 // #254: Can't change operands of a constant, but this shouldn't be
1001 // something that sticks around in the module.
1002 U->replaceUsesOfWith(GV, NewGV);
1003 }
David Neto862b7d82018-06-14 18:48:37 -04001004 }
1005
1006 // Delete original gv.
1007 GV->eraseFromParent();
1008 }
1009 }
1010}
1011
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001012void SPIRVProducerPass::FindResourceVars(Module &M, const DataLayout &) {
David Neto862b7d82018-06-14 18:48:37 -04001013 ResourceVarInfoList.clear();
1014 FunctionToResourceVarsMap.clear();
1015 ModuleOrderedResourceVars.reset();
1016 // Normally, there is one resource variable per clspv.resource.var.*
1017 // function, since that is unique'd by arg type and index. By design,
1018 // we can share these resource variables across kernels because all
1019 // kernels use the same descriptor set.
1020 //
1021 // But if the user requested distinct descriptor sets per kernel, then
1022 // the descriptor allocator has made different (set,binding) pairs for
1023 // the same (type,arg_index) pair. Since we can decorate a resource
1024 // variable with only exactly one DescriptorSet and Binding, we are
1025 // forced in this case to make distinct resource variables whenever
1026 // the same clspv.reource.var.X function is seen with disintct
1027 // (set,binding) values.
1028 const bool always_distinct_sets =
1029 clspv::Option::DistinctKernelDescriptorSets();
1030 for (Function &F : M) {
1031 // Rely on the fact the resource var functions have a stable ordering
1032 // in the module.
Alan Baker202c8c72018-08-13 13:47:44 -04001033 if (F.getName().startswith(clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04001034 // Find all calls to this function with distinct set and binding pairs.
1035 // Save them in ResourceVarInfoList.
1036
1037 // Determine uniqueness of the (set,binding) pairs only withing this
1038 // one resource-var builtin function.
1039 using SetAndBinding = std::pair<unsigned, unsigned>;
1040 // Maps set and binding to the resource var info.
1041 DenseMap<SetAndBinding, ResourceVarInfo *> set_and_binding_map;
1042 bool first_use = true;
1043 for (auto &U : F.uses()) {
1044 if (auto *call = dyn_cast<CallInst>(U.getUser())) {
1045 const auto set = unsigned(
1046 dyn_cast<ConstantInt>(call->getArgOperand(0))->getZExtValue());
1047 const auto binding = unsigned(
1048 dyn_cast<ConstantInt>(call->getArgOperand(1))->getZExtValue());
1049 const auto arg_kind = clspv::ArgKind(
1050 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
1051 const auto arg_index = unsigned(
1052 dyn_cast<ConstantInt>(call->getArgOperand(3))->getZExtValue());
alan-bakere9308012019-03-15 10:25:13 -04001053 const auto coherent = unsigned(
1054 dyn_cast<ConstantInt>(call->getArgOperand(5))->getZExtValue());
David Neto862b7d82018-06-14 18:48:37 -04001055
1056 // Find or make the resource var info for this combination.
1057 ResourceVarInfo *rv = nullptr;
1058 if (always_distinct_sets) {
1059 // Make a new resource var any time we see a different
1060 // (set,binding) pair.
1061 SetAndBinding key{set, binding};
1062 auto where = set_and_binding_map.find(key);
1063 if (where == set_and_binding_map.end()) {
1064 rv = new ResourceVarInfo(int(ResourceVarInfoList.size()), set,
alan-bakere9308012019-03-15 10:25:13 -04001065 binding, &F, arg_kind, coherent);
David Neto862b7d82018-06-14 18:48:37 -04001066 ResourceVarInfoList.emplace_back(rv);
1067 set_and_binding_map[key] = rv;
1068 } else {
1069 rv = where->second;
1070 }
1071 } else {
1072 // The default is to make exactly one resource for each
1073 // clspv.resource.var.* function.
1074 if (first_use) {
1075 first_use = false;
1076 rv = new ResourceVarInfo(int(ResourceVarInfoList.size()), set,
alan-bakere9308012019-03-15 10:25:13 -04001077 binding, &F, arg_kind, coherent);
David Neto862b7d82018-06-14 18:48:37 -04001078 ResourceVarInfoList.emplace_back(rv);
1079 } else {
1080 rv = ResourceVarInfoList.back().get();
1081 }
1082 }
1083
1084 // Now populate FunctionToResourceVarsMap.
1085 auto &mapping =
1086 FunctionToResourceVarsMap[call->getParent()->getParent()];
1087 while (mapping.size() <= arg_index) {
1088 mapping.push_back(nullptr);
1089 }
1090 mapping[arg_index] = rv;
1091 }
1092 }
1093 }
1094 }
1095
1096 // Populate ModuleOrderedResourceVars.
1097 for (Function &F : M) {
1098 auto where = FunctionToResourceVarsMap.find(&F);
1099 if (where != FunctionToResourceVarsMap.end()) {
1100 for (auto &rv : where->second) {
1101 if (rv != nullptr) {
1102 ModuleOrderedResourceVars.insert(rv);
1103 }
1104 }
1105 }
1106 }
1107 if (ShowResourceVars) {
1108 for (auto *info : ModuleOrderedResourceVars) {
1109 outs() << "MORV index " << info->index << " (" << info->descriptor_set
1110 << "," << info->binding << ") " << *(info->var_fn->getReturnType())
1111 << "\n";
1112 }
1113 }
1114}
1115
David Neto22f144c2017-06-12 14:26:21 -04001116bool SPIRVProducerPass::FindExtInst(Module &M) {
1117 LLVMContext &Context = M.getContext();
1118 bool HasExtInst = false;
1119
1120 for (Function &F : M) {
1121 for (BasicBlock &BB : F) {
1122 for (Instruction &I : BB) {
1123 if (CallInst *Call = dyn_cast<CallInst>(&I)) {
1124 Function *Callee = Call->getCalledFunction();
1125 // Check whether this call is for extend instructions.
David Neto3fbb4072017-10-16 11:28:14 -04001126 auto callee_name = Callee->getName();
1127 const glsl::ExtInst EInst = getExtInstEnum(callee_name);
1128 const glsl::ExtInst IndirectEInst =
1129 getIndirectExtInstEnum(callee_name);
David Neto22f144c2017-06-12 14:26:21 -04001130
David Neto3fbb4072017-10-16 11:28:14 -04001131 HasExtInst |=
1132 (EInst != kGlslExtInstBad) || (IndirectEInst != kGlslExtInstBad);
1133
1134 if (IndirectEInst) {
1135 // Register extra constants if needed.
1136
1137 // Registers a type and constant for computing the result of the
1138 // given instruction. If the result of the instruction is a vector,
1139 // then make a splat vector constant with the same number of
1140 // elements.
1141 auto register_constant = [this, &I](Constant *constant) {
1142 FindType(constant->getType());
1143 FindConstant(constant);
1144 if (auto *vectorTy = dyn_cast<VectorType>(I.getType())) {
1145 // Register the splat vector of the value with the same
1146 // width as the result of the instruction.
1147 auto *vec_constant = ConstantVector::getSplat(
1148 static_cast<unsigned>(vectorTy->getNumElements()),
1149 constant);
1150 FindConstant(vec_constant);
1151 FindType(vec_constant->getType());
1152 }
1153 };
1154 switch (IndirectEInst) {
1155 case glsl::ExtInstFindUMsb:
1156 // clz needs OpExtInst and OpISub with constant 31, or splat
1157 // vector of 31. Add it to the constant list here.
1158 register_constant(
1159 ConstantInt::get(Type::getInt32Ty(Context), 31));
1160 break;
1161 case glsl::ExtInstAcos:
1162 case glsl::ExtInstAsin:
Kévin Petiteb9f90a2018-09-29 12:29:34 +01001163 case glsl::ExtInstAtan:
David Neto3fbb4072017-10-16 11:28:14 -04001164 case glsl::ExtInstAtan2:
1165 // We need 1/pi for acospi, asinpi, atan2pi.
1166 register_constant(
1167 ConstantFP::get(Type::getFloatTy(Context), kOneOverPi));
1168 break;
1169 default:
1170 assert(false && "internally inconsistent");
1171 }
David Neto22f144c2017-06-12 14:26:21 -04001172 }
1173 }
1174 }
1175 }
1176 }
1177
1178 return HasExtInst;
1179}
1180
1181void SPIRVProducerPass::FindTypePerGlobalVar(GlobalVariable &GV) {
1182 // Investigate global variable's type.
1183 FindType(GV.getType());
1184}
1185
1186void SPIRVProducerPass::FindTypePerFunc(Function &F) {
1187 // Investigate function's type.
1188 FunctionType *FTy = F.getFunctionType();
1189
1190 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
1191 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
David Neto9ed8e2f2018-03-24 06:47:24 -07001192 // Handle a regular function with global constant parameters.
David Neto22f144c2017-06-12 14:26:21 -04001193 if (GlobalConstFuncTyMap.count(FTy)) {
1194 uint32_t GVCstArgIdx = GlobalConstFuncTypeMap[FTy].second;
1195 SmallVector<Type *, 4> NewFuncParamTys;
1196 for (unsigned i = 0; i < FTy->getNumParams(); i++) {
1197 Type *ParamTy = FTy->getParamType(i);
1198 if (i == GVCstArgIdx) {
1199 Type *EleTy = ParamTy->getPointerElementType();
1200 ParamTy = PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
1201 }
1202
1203 NewFuncParamTys.push_back(ParamTy);
1204 }
1205
1206 FunctionType *NewFTy =
1207 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
1208 GlobalConstFuncTyMap[FTy] = std::make_pair(NewFTy, GVCstArgIdx);
1209 FTy = NewFTy;
1210 }
1211
1212 FindType(FTy);
1213 } else {
1214 // As kernel functions do not have parameters, create new function type and
1215 // add it to type map.
1216 SmallVector<Type *, 4> NewFuncParamTys;
1217 FunctionType *NewFTy =
1218 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
1219 FindType(NewFTy);
1220 }
1221
1222 // Investigate instructions' type in function body.
1223 for (BasicBlock &BB : F) {
1224 for (Instruction &I : BB) {
1225 if (isa<ShuffleVectorInst>(I)) {
1226 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1227 // Ignore type for mask of shuffle vector instruction.
1228 if (i == 2) {
1229 continue;
1230 }
1231
1232 Value *Op = I.getOperand(i);
1233 if (!isa<MetadataAsValue>(Op)) {
1234 FindType(Op->getType());
1235 }
1236 }
1237
1238 FindType(I.getType());
1239 continue;
1240 }
1241
David Neto862b7d82018-06-14 18:48:37 -04001242 CallInst *Call = dyn_cast<CallInst>(&I);
1243
1244 if (Call && Call->getCalledFunction()->getName().startswith(
Alan Baker202c8c72018-08-13 13:47:44 -04001245 clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04001246 // This is a fake call representing access to a resource variable.
1247 // We handle that elsewhere.
1248 continue;
1249 }
1250
Alan Baker202c8c72018-08-13 13:47:44 -04001251 if (Call && Call->getCalledFunction()->getName().startswith(
1252 clspv::WorkgroupAccessorFunction())) {
1253 // This is a fake call representing access to a workgroup variable.
1254 // We handle that elsewhere.
1255 continue;
1256 }
1257
alan-bakerf083bed2020-01-29 08:15:42 -05001258 // #497: InsertValue and ExtractValue map to OpCompositeInsert and
1259 // OpCompositeExtract which takes literal values for indices. As a result
1260 // don't map the type of indices.
1261 if (I.getOpcode() == Instruction::ExtractValue) {
1262 FindType(I.getOperand(0)->getType());
1263 continue;
1264 }
1265 if (I.getOpcode() == Instruction::InsertValue) {
1266 FindType(I.getOperand(0)->getType());
1267 FindType(I.getOperand(1)->getType());
1268 continue;
1269 }
1270
1271 // #497: InsertElement and ExtractElement map to OpCompositeExtract if
1272 // the index is a constant. In such a case don't map the index type.
1273 if (I.getOpcode() == Instruction::ExtractElement) {
1274 FindType(I.getOperand(0)->getType());
1275 Value *op1 = I.getOperand(1);
1276 if (!isa<Constant>(op1) || isa<GlobalValue>(op1)) {
1277 FindType(op1->getType());
1278 }
1279 continue;
1280 }
1281 if (I.getOpcode() == Instruction::InsertElement) {
1282 FindType(I.getOperand(0)->getType());
1283 FindType(I.getOperand(1)->getType());
1284 Value *op2 = I.getOperand(2);
1285 if (!isa<Constant>(op2) || isa<GlobalValue>(op2)) {
1286 FindType(op2->getType());
1287 }
1288 continue;
1289 }
1290
David Neto22f144c2017-06-12 14:26:21 -04001291 // Work through the operands of the instruction.
1292 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1293 Value *const Op = I.getOperand(i);
1294 // If any of the operands is a constant, find the type!
1295 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1296 FindType(Op->getType());
1297 }
1298 }
1299
1300 for (Use &Op : I.operands()) {
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001301 if (isa<CallInst>(&I)) {
David Neto22f144c2017-06-12 14:26:21 -04001302 // Avoid to check call instruction's type.
1303 break;
1304 }
Alan Baker202c8c72018-08-13 13:47:44 -04001305 if (CallInst *OpCall = dyn_cast<CallInst>(Op)) {
1306 if (OpCall && OpCall->getCalledFunction()->getName().startswith(
1307 clspv::WorkgroupAccessorFunction())) {
1308 // This is a fake call representing access to a workgroup variable.
1309 // We handle that elsewhere.
1310 continue;
1311 }
1312 }
David Neto22f144c2017-06-12 14:26:21 -04001313 if (!isa<MetadataAsValue>(&Op)) {
1314 FindType(Op->getType());
1315 continue;
1316 }
1317 }
1318
David Neto22f144c2017-06-12 14:26:21 -04001319 // We don't want to track the type of this call as we are going to replace
1320 // it.
Kévin Petitdf71de32019-04-09 14:09:50 +01001321 if (Call && (clspv::LiteralSamplerFunction() ==
David Neto22f144c2017-06-12 14:26:21 -04001322 Call->getCalledFunction()->getName())) {
1323 continue;
1324 }
1325
1326 if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(&I)) {
1327 // If gep's base operand has ModuleScopePrivate address space, make gep
1328 // return ModuleScopePrivate address space.
1329 if (GEP->getPointerAddressSpace() == AddressSpace::ModuleScopePrivate) {
1330 // Add pointer type with private address space for global constant to
1331 // type list.
1332 Type *EleTy = I.getType()->getPointerElementType();
1333 Type *NewPTy =
1334 PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
1335
1336 FindType(NewPTy);
1337 continue;
1338 }
1339 }
1340
1341 FindType(I.getType());
1342 }
1343 }
1344}
1345
David Neto862b7d82018-06-14 18:48:37 -04001346void SPIRVProducerPass::FindTypesForSamplerMap(Module &M) {
1347 // If we are using a sampler map, find the type of the sampler.
Kévin Petitdf71de32019-04-09 14:09:50 +01001348 if (M.getFunction(clspv::LiteralSamplerFunction()) ||
David Neto862b7d82018-06-14 18:48:37 -04001349 0 < getSamplerMap().size()) {
1350 auto SamplerStructTy = M.getTypeByName("opencl.sampler_t");
1351 if (!SamplerStructTy) {
1352 SamplerStructTy = StructType::create(M.getContext(), "opencl.sampler_t");
1353 }
1354
1355 SamplerTy = SamplerStructTy->getPointerTo(AddressSpace::UniformConstant);
1356
1357 FindType(SamplerTy);
1358 }
1359}
1360
1361void SPIRVProducerPass::FindTypesForResourceVars(Module &M) {
1362 // Record types so they are generated.
1363 TypesNeedingLayout.reset();
1364 StructTypesNeedingBlock.reset();
1365
1366 // To match older clspv codegen, generate the float type first if required
1367 // for images.
1368 for (const auto *info : ModuleOrderedResourceVars) {
1369 if (info->arg_kind == clspv::ArgKind::ReadOnlyImage ||
1370 info->arg_kind == clspv::ArgKind::WriteOnlyImage) {
alan-bakerf67468c2019-11-25 15:51:49 -05001371 if (IsIntImageType(info->var_fn->getReturnType())) {
1372 // Nothing for now...
1373 } else if (IsUintImageType(info->var_fn->getReturnType())) {
1374 FindType(Type::getInt32Ty(M.getContext()));
1375 }
1376
1377 // We need "float" either for the sampled type or for the Lod operand.
David Neto862b7d82018-06-14 18:48:37 -04001378 FindType(Type::getFloatTy(M.getContext()));
David Neto862b7d82018-06-14 18:48:37 -04001379 }
1380 }
1381
1382 for (const auto *info : ModuleOrderedResourceVars) {
1383 Type *type = info->var_fn->getReturnType();
1384
1385 switch (info->arg_kind) {
1386 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04001387 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04001388 if (auto *sty = dyn_cast<StructType>(type->getPointerElementType())) {
1389 StructTypesNeedingBlock.insert(sty);
1390 } else {
1391 errs() << *type << "\n";
1392 llvm_unreachable("Buffer arguments must map to structures!");
1393 }
1394 break;
1395 case clspv::ArgKind::Pod:
1396 if (auto *sty = dyn_cast<StructType>(type->getPointerElementType())) {
1397 StructTypesNeedingBlock.insert(sty);
1398 } else {
1399 errs() << *type << "\n";
1400 llvm_unreachable("POD arguments must map to structures!");
1401 }
1402 break;
1403 case clspv::ArgKind::ReadOnlyImage:
1404 case clspv::ArgKind::WriteOnlyImage:
1405 case clspv::ArgKind::Sampler:
1406 // Sampler and image types map to the pointee type but
1407 // in the uniform constant address space.
1408 type = PointerType::get(type->getPointerElementType(),
1409 clspv::AddressSpace::UniformConstant);
1410 break;
1411 default:
1412 break;
1413 }
1414
1415 // The converted type is the type of the OpVariable we will generate.
1416 // If the pointee type is an array of size zero, FindType will convert it
1417 // to a runtime array.
1418 FindType(type);
1419 }
1420
alan-bakerdcd97412019-09-16 15:32:30 -04001421 // If module constants are clustered in a storage buffer then that struct
1422 // needs layout decorations.
1423 if (clspv::Option::ModuleConstantsInStorageBuffer()) {
1424 for (GlobalVariable &GV : M.globals()) {
1425 PointerType *PTy = cast<PointerType>(GV.getType());
1426 const auto AS = PTy->getAddressSpace();
1427 const bool module_scope_constant_external_init =
1428 (AS == AddressSpace::Constant) && GV.hasInitializer();
1429 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
1430 if (module_scope_constant_external_init &&
1431 spv::BuiltInMax == BuiltinType) {
1432 StructTypesNeedingBlock.insert(
1433 cast<StructType>(PTy->getPointerElementType()));
1434 }
1435 }
1436 }
1437
David Neto862b7d82018-06-14 18:48:37 -04001438 // Traverse the arrays and structures underneath each Block, and
1439 // mark them as needing layout.
1440 std::vector<Type *> work_list(StructTypesNeedingBlock.begin(),
1441 StructTypesNeedingBlock.end());
1442 while (!work_list.empty()) {
1443 Type *type = work_list.back();
1444 work_list.pop_back();
1445 TypesNeedingLayout.insert(type);
1446 switch (type->getTypeID()) {
1447 case Type::ArrayTyID:
1448 work_list.push_back(type->getArrayElementType());
1449 if (!Hack_generate_runtime_array_stride_early) {
1450 // Remember this array type for deferred decoration.
1451 TypesNeedingArrayStride.insert(type);
1452 }
1453 break;
1454 case Type::StructTyID:
1455 for (auto *elem_ty : cast<StructType>(type)->elements()) {
1456 work_list.push_back(elem_ty);
1457 }
1458 default:
1459 // This type and its contained types don't get layout.
1460 break;
1461 }
1462 }
1463}
1464
Alan Baker202c8c72018-08-13 13:47:44 -04001465void SPIRVProducerPass::FindWorkgroupVars(Module &M) {
1466 // The SpecId assignment for pointer-to-local arguments is recorded in
1467 // module-level metadata. Translate that information into local argument
1468 // information.
1469 NamedMDNode *nmd = M.getNamedMetadata(clspv::LocalSpecIdMetadataName());
alan-bakerb6b09dc2018-11-08 16:59:28 -05001470 if (!nmd)
1471 return;
Alan Baker202c8c72018-08-13 13:47:44 -04001472 for (auto operand : nmd->operands()) {
1473 MDTuple *tuple = cast<MDTuple>(operand);
1474 ValueAsMetadata *fn_md = cast<ValueAsMetadata>(tuple->getOperand(0));
1475 Function *func = cast<Function>(fn_md->getValue());
alan-bakerb6b09dc2018-11-08 16:59:28 -05001476 ConstantAsMetadata *arg_index_md =
1477 cast<ConstantAsMetadata>(tuple->getOperand(1));
1478 int arg_index = static_cast<int>(
1479 cast<ConstantInt>(arg_index_md->getValue())->getSExtValue());
1480 Argument *arg = &*(func->arg_begin() + arg_index);
Alan Baker202c8c72018-08-13 13:47:44 -04001481
1482 ConstantAsMetadata *spec_id_md =
1483 cast<ConstantAsMetadata>(tuple->getOperand(2));
alan-bakerb6b09dc2018-11-08 16:59:28 -05001484 int spec_id = static_cast<int>(
1485 cast<ConstantInt>(spec_id_md->getValue())->getSExtValue());
Alan Baker202c8c72018-08-13 13:47:44 -04001486
1487 max_local_spec_id_ = std::max(max_local_spec_id_, spec_id + 1);
1488 LocalArgSpecIds[arg] = spec_id;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001489 if (LocalSpecIdInfoMap.count(spec_id))
1490 continue;
Alan Baker202c8c72018-08-13 13:47:44 -04001491
1492 // We haven't seen this SpecId yet, so generate the LocalArgInfo for it.
1493 LocalArgInfo info{nextID, arg->getType()->getPointerElementType(),
1494 nextID + 1, nextID + 2,
1495 nextID + 3, spec_id};
1496 LocalSpecIdInfoMap[spec_id] = info;
1497 nextID += 4;
1498
1499 // Ensure the types necessary for this argument get generated.
1500 Type *IdxTy = Type::getInt32Ty(M.getContext());
1501 FindConstant(ConstantInt::get(IdxTy, 0));
1502 FindType(IdxTy);
1503 FindType(arg->getType());
1504 }
1505}
1506
David Neto22f144c2017-06-12 14:26:21 -04001507void SPIRVProducerPass::FindType(Type *Ty) {
1508 TypeList &TyList = getTypeList();
1509
1510 if (0 != TyList.idFor(Ty)) {
1511 return;
1512 }
1513
1514 if (Ty->isPointerTy()) {
1515 auto AddrSpace = Ty->getPointerAddressSpace();
1516 if ((AddressSpace::Constant == AddrSpace) ||
1517 (AddressSpace::Global == AddrSpace)) {
1518 auto PointeeTy = Ty->getPointerElementType();
1519
1520 if (PointeeTy->isStructTy() &&
1521 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
1522 FindType(PointeeTy);
1523 auto ActualPointerTy =
1524 PointeeTy->getPointerTo(AddressSpace::UniformConstant);
1525 FindType(ActualPointerTy);
1526 return;
1527 }
1528 }
1529 }
1530
David Neto862b7d82018-06-14 18:48:37 -04001531 // By convention, LLVM array type with 0 elements will map to
1532 // OpTypeRuntimeArray. Otherwise, it will map to OpTypeArray, which
1533 // has a constant number of elements. We need to support type of the
1534 // constant.
1535 if (auto *arrayTy = dyn_cast<ArrayType>(Ty)) {
1536 if (arrayTy->getNumElements() > 0) {
1537 LLVMContext &Context = Ty->getContext();
1538 FindType(Type::getInt32Ty(Context));
1539 }
David Neto22f144c2017-06-12 14:26:21 -04001540 }
1541
1542 for (Type *SubTy : Ty->subtypes()) {
1543 FindType(SubTy);
1544 }
1545
1546 TyList.insert(Ty);
1547}
1548
1549void SPIRVProducerPass::FindConstantPerGlobalVar(GlobalVariable &GV) {
1550 // If the global variable has a (non undef) initializer.
1551 if (GV.hasInitializer() && !isa<UndefValue>(GV.getInitializer())) {
David Neto862b7d82018-06-14 18:48:37 -04001552 // Generate the constant if it's not the initializer to a module scope
1553 // constant that we will expect in a storage buffer.
1554 const bool module_scope_constant_external_init =
1555 (GV.getType()->getPointerAddressSpace() == AddressSpace::Constant) &&
1556 clspv::Option::ModuleConstantsInStorageBuffer();
1557 if (!module_scope_constant_external_init) {
1558 FindConstant(GV.getInitializer());
1559 }
David Neto22f144c2017-06-12 14:26:21 -04001560 }
1561}
1562
1563void SPIRVProducerPass::FindConstantPerFunc(Function &F) {
1564 // Investigate constants in function body.
1565 for (BasicBlock &BB : F) {
1566 for (Instruction &I : BB) {
David Neto862b7d82018-06-14 18:48:37 -04001567 if (auto *call = dyn_cast<CallInst>(&I)) {
1568 auto name = call->getCalledFunction()->getName();
Kévin Petitdf71de32019-04-09 14:09:50 +01001569 if (name == clspv::LiteralSamplerFunction()) {
David Neto862b7d82018-06-14 18:48:37 -04001570 // We've handled these constants elsewhere, so skip it.
1571 continue;
1572 }
Alan Baker202c8c72018-08-13 13:47:44 -04001573 if (name.startswith(clspv::ResourceAccessorFunction())) {
1574 continue;
1575 }
1576 if (name.startswith(clspv::WorkgroupAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04001577 continue;
1578 }
Kévin Petit617a76d2019-04-04 13:54:16 +01001579 if (name.startswith(clspv::SPIRVOpIntrinsicFunction())) {
1580 // Skip the first operand that has the SPIR-V Opcode
1581 for (unsigned i = 1; i < I.getNumOperands(); i++) {
1582 if (isa<Constant>(I.getOperand(i)) &&
1583 !isa<GlobalValue>(I.getOperand(i))) {
1584 FindConstant(I.getOperand(i));
1585 }
1586 }
1587 continue;
1588 }
David Neto22f144c2017-06-12 14:26:21 -04001589 }
1590
1591 if (isa<AllocaInst>(I)) {
1592 // Alloca instruction has constant for the number of element. Ignore it.
1593 continue;
1594 } else if (isa<ShuffleVectorInst>(I)) {
1595 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1596 // Ignore constant for mask of shuffle vector instruction.
1597 if (i == 2) {
1598 continue;
1599 }
1600
1601 if (isa<Constant>(I.getOperand(i)) &&
1602 !isa<GlobalValue>(I.getOperand(i))) {
1603 FindConstant(I.getOperand(i));
1604 }
1605 }
1606
1607 continue;
1608 } else if (isa<InsertElementInst>(I)) {
1609 // Handle InsertElement with <4 x i8> specially.
1610 Type *CompositeTy = I.getOperand(0)->getType();
1611 if (is4xi8vec(CompositeTy)) {
1612 LLVMContext &Context = CompositeTy->getContext();
1613 if (isa<Constant>(I.getOperand(0))) {
1614 FindConstant(I.getOperand(0));
1615 }
1616
1617 if (isa<Constant>(I.getOperand(1))) {
1618 FindConstant(I.getOperand(1));
1619 }
1620
1621 // Add mask constant 0xFF.
1622 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
1623 FindConstant(CstFF);
1624
1625 // Add shift amount constant.
1626 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
1627 uint64_t Idx = CI->getZExtValue();
1628 Constant *CstShiftAmount =
1629 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
1630 FindConstant(CstShiftAmount);
1631 }
1632
1633 continue;
1634 }
1635
1636 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1637 // Ignore constant for index of InsertElement instruction.
1638 if (i == 2) {
1639 continue;
1640 }
1641
1642 if (isa<Constant>(I.getOperand(i)) &&
1643 !isa<GlobalValue>(I.getOperand(i))) {
1644 FindConstant(I.getOperand(i));
1645 }
1646 }
1647
1648 continue;
1649 } else if (isa<ExtractElementInst>(I)) {
1650 // Handle ExtractElement with <4 x i8> specially.
1651 Type *CompositeTy = I.getOperand(0)->getType();
1652 if (is4xi8vec(CompositeTy)) {
1653 LLVMContext &Context = CompositeTy->getContext();
1654 if (isa<Constant>(I.getOperand(0))) {
1655 FindConstant(I.getOperand(0));
1656 }
1657
1658 // Add mask constant 0xFF.
1659 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
1660 FindConstant(CstFF);
1661
1662 // Add shift amount constant.
1663 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
1664 uint64_t Idx = CI->getZExtValue();
1665 Constant *CstShiftAmount =
1666 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
1667 FindConstant(CstShiftAmount);
1668 } else {
1669 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
1670 FindConstant(Cst8);
1671 }
1672
1673 continue;
1674 }
1675
1676 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1677 // Ignore constant for index of ExtractElement instruction.
1678 if (i == 1) {
1679 continue;
1680 }
1681
1682 if (isa<Constant>(I.getOperand(i)) &&
1683 !isa<GlobalValue>(I.getOperand(i))) {
1684 FindConstant(I.getOperand(i));
1685 }
1686 }
1687
1688 continue;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001689 } else if ((Instruction::Xor == I.getOpcode()) &&
1690 I.getType()->isIntegerTy(1)) {
1691 // We special case for Xor where the type is i1 and one of the arguments
1692 // is a constant 1 (true), this is an OpLogicalNot in SPIR-V, and we
1693 // don't need the constant
David Neto22f144c2017-06-12 14:26:21 -04001694 bool foundConstantTrue = false;
1695 for (Use &Op : I.operands()) {
1696 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1697 auto CI = cast<ConstantInt>(Op);
1698
1699 if (CI->isZero() || foundConstantTrue) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05001700 // If we already found the true constant, we might (probably only
1701 // on -O0) have an OpLogicalNot which is taking a constant
1702 // argument, so discover it anyway.
David Neto22f144c2017-06-12 14:26:21 -04001703 FindConstant(Op);
1704 } else {
1705 foundConstantTrue = true;
1706 }
1707 }
1708 }
1709
1710 continue;
David Netod2de94a2017-08-28 17:27:47 -04001711 } else if (isa<TruncInst>(I)) {
alan-bakerb39c8262019-03-08 14:03:37 -05001712 // Special case if i8 is not generally handled.
1713 if (!clspv::Option::Int8Support()) {
1714 // For truncation to i8 we mask against 255.
1715 Type *ToTy = I.getType();
1716 if (8u == ToTy->getPrimitiveSizeInBits()) {
1717 LLVMContext &Context = ToTy->getContext();
1718 Constant *Cst255 =
1719 ConstantInt::get(Type::getInt32Ty(Context), 0xff);
1720 FindConstant(Cst255);
1721 }
David Netod2de94a2017-08-28 17:27:47 -04001722 }
Neil Henning39672102017-09-29 14:33:13 +01001723 } else if (isa<AtomicRMWInst>(I)) {
1724 LLVMContext &Context = I.getContext();
1725
1726 FindConstant(
1727 ConstantInt::get(Type::getInt32Ty(Context), spv::ScopeDevice));
1728 FindConstant(ConstantInt::get(
1729 Type::getInt32Ty(Context),
1730 spv::MemorySemanticsUniformMemoryMask |
1731 spv::MemorySemanticsSequentiallyConsistentMask));
David Neto22f144c2017-06-12 14:26:21 -04001732 }
1733
1734 for (Use &Op : I.operands()) {
1735 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1736 FindConstant(Op);
1737 }
1738 }
1739 }
1740 }
1741}
1742
1743void SPIRVProducerPass::FindConstant(Value *V) {
David Neto22f144c2017-06-12 14:26:21 -04001744 ValueList &CstList = getConstantList();
1745
David Netofb9a7972017-08-25 17:08:24 -04001746 // If V is already tracked, ignore it.
1747 if (0 != CstList.idFor(V)) {
David Neto22f144c2017-06-12 14:26:21 -04001748 return;
1749 }
1750
David Neto862b7d82018-06-14 18:48:37 -04001751 if (isa<GlobalValue>(V) && clspv::Option::ModuleConstantsInStorageBuffer()) {
1752 return;
1753 }
1754
David Neto22f144c2017-06-12 14:26:21 -04001755 Constant *Cst = cast<Constant>(V);
David Neto862b7d82018-06-14 18:48:37 -04001756 Type *CstTy = Cst->getType();
David Neto22f144c2017-06-12 14:26:21 -04001757
1758 // Handle constant with <4 x i8> type specially.
David Neto22f144c2017-06-12 14:26:21 -04001759 if (is4xi8vec(CstTy)) {
1760 if (!isa<GlobalValue>(V)) {
David Netofb9a7972017-08-25 17:08:24 -04001761 CstList.insert(V);
David Neto22f144c2017-06-12 14:26:21 -04001762 }
1763 }
1764
1765 if (Cst->getNumOperands()) {
1766 for (User::const_op_iterator I = Cst->op_begin(), E = Cst->op_end(); I != E;
1767 ++I) {
1768 FindConstant(*I);
1769 }
1770
David Netofb9a7972017-08-25 17:08:24 -04001771 CstList.insert(Cst);
David Neto22f144c2017-06-12 14:26:21 -04001772 return;
1773 } else if (const ConstantDataSequential *CDS =
1774 dyn_cast<ConstantDataSequential>(Cst)) {
1775 // Add constants for each element to constant list.
1776 for (unsigned i = 0; i < CDS->getNumElements(); i++) {
1777 Constant *EleCst = CDS->getElementAsConstant(i);
1778 FindConstant(EleCst);
1779 }
1780 }
1781
1782 if (!isa<GlobalValue>(V)) {
David Netofb9a7972017-08-25 17:08:24 -04001783 CstList.insert(V);
David Neto22f144c2017-06-12 14:26:21 -04001784 }
1785}
1786
1787spv::StorageClass SPIRVProducerPass::GetStorageClass(unsigned AddrSpace) const {
1788 switch (AddrSpace) {
1789 default:
1790 llvm_unreachable("Unsupported OpenCL address space");
1791 case AddressSpace::Private:
1792 return spv::StorageClassFunction;
1793 case AddressSpace::Global:
David Neto22f144c2017-06-12 14:26:21 -04001794 return spv::StorageClassStorageBuffer;
Alan Bakerfcda9482018-10-02 17:09:59 -04001795 case AddressSpace::Constant:
1796 return clspv::Option::ConstantArgsInUniformBuffer()
1797 ? spv::StorageClassUniform
1798 : spv::StorageClassStorageBuffer;
David Neto22f144c2017-06-12 14:26:21 -04001799 case AddressSpace::Input:
1800 return spv::StorageClassInput;
1801 case AddressSpace::Local:
1802 return spv::StorageClassWorkgroup;
1803 case AddressSpace::UniformConstant:
1804 return spv::StorageClassUniformConstant;
David Neto9ed8e2f2018-03-24 06:47:24 -07001805 case AddressSpace::Uniform:
David Netoe439d702018-03-23 13:14:08 -07001806 return spv::StorageClassUniform;
David Neto22f144c2017-06-12 14:26:21 -04001807 case AddressSpace::ModuleScopePrivate:
1808 return spv::StorageClassPrivate;
1809 }
1810}
1811
David Neto862b7d82018-06-14 18:48:37 -04001812spv::StorageClass
1813SPIRVProducerPass::GetStorageClassForArgKind(clspv::ArgKind arg_kind) const {
1814 switch (arg_kind) {
1815 case clspv::ArgKind::Buffer:
1816 return spv::StorageClassStorageBuffer;
Alan Bakerfcda9482018-10-02 17:09:59 -04001817 case clspv::ArgKind::BufferUBO:
1818 return spv::StorageClassUniform;
David Neto862b7d82018-06-14 18:48:37 -04001819 case clspv::ArgKind::Pod:
1820 return clspv::Option::PodArgsInUniformBuffer()
1821 ? spv::StorageClassUniform
1822 : spv::StorageClassStorageBuffer;
1823 case clspv::ArgKind::Local:
1824 return spv::StorageClassWorkgroup;
1825 case clspv::ArgKind::ReadOnlyImage:
1826 case clspv::ArgKind::WriteOnlyImage:
1827 case clspv::ArgKind::Sampler:
1828 return spv::StorageClassUniformConstant;
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001829 default:
1830 llvm_unreachable("Unsupported storage class for argument kind");
David Neto862b7d82018-06-14 18:48:37 -04001831 }
1832}
1833
David Neto22f144c2017-06-12 14:26:21 -04001834spv::BuiltIn SPIRVProducerPass::GetBuiltin(StringRef Name) const {
1835 return StringSwitch<spv::BuiltIn>(Name)
1836 .Case("__spirv_GlobalInvocationId", spv::BuiltInGlobalInvocationId)
1837 .Case("__spirv_LocalInvocationId", spv::BuiltInLocalInvocationId)
1838 .Case("__spirv_WorkgroupSize", spv::BuiltInWorkgroupSize)
1839 .Case("__spirv_NumWorkgroups", spv::BuiltInNumWorkgroups)
1840 .Case("__spirv_WorkgroupId", spv::BuiltInWorkgroupId)
1841 .Default(spv::BuiltInMax);
1842}
1843
1844void SPIRVProducerPass::GenerateExtInstImport() {
1845 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
1846 uint32_t &ExtInstImportID = getOpExtInstImportID();
1847
1848 //
1849 // Generate OpExtInstImport.
1850 //
1851 // Ops[0] ... Ops[n] = Name (Literal String)
David Neto22f144c2017-06-12 14:26:21 -04001852 ExtInstImportID = nextID;
David Neto87846742018-04-11 17:36:22 -04001853 SPIRVInstList.push_back(new SPIRVInstruction(spv::OpExtInstImport, nextID++,
1854 MkString("GLSL.std.450")));
David Neto22f144c2017-06-12 14:26:21 -04001855}
1856
alan-bakerb6b09dc2018-11-08 16:59:28 -05001857void SPIRVProducerPass::GenerateSPIRVTypes(LLVMContext &Context,
1858 Module &module) {
David Neto22f144c2017-06-12 14:26:21 -04001859 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
1860 ValueMapType &VMap = getValueMap();
1861 ValueMapType &AllocatedVMap = getAllocatedValueMap();
Alan Bakerfcda9482018-10-02 17:09:59 -04001862 const auto &DL = module.getDataLayout();
David Neto22f144c2017-06-12 14:26:21 -04001863
1864 // Map for OpTypeRuntimeArray. If argument has pointer type, 2 spirv type
1865 // instructions are generated. They are OpTypePointer and OpTypeRuntimeArray.
1866 DenseMap<Type *, uint32_t> OpRuntimeTyMap;
1867
1868 for (Type *Ty : getTypeList()) {
1869 // Update TypeMap with nextID for reference later.
1870 TypeMap[Ty] = nextID;
1871
1872 switch (Ty->getTypeID()) {
1873 default: {
1874 Ty->print(errs());
1875 llvm_unreachable("Unsupported type???");
1876 break;
1877 }
1878 case Type::MetadataTyID:
1879 case Type::LabelTyID: {
1880 // Ignore these types.
1881 break;
1882 }
1883 case Type::PointerTyID: {
1884 PointerType *PTy = cast<PointerType>(Ty);
1885 unsigned AddrSpace = PTy->getAddressSpace();
1886
1887 // For the purposes of our Vulkan SPIR-V type system, constant and global
1888 // are conflated.
1889 bool UseExistingOpTypePointer = false;
1890 if (AddressSpace::Constant == AddrSpace) {
Alan Bakerfcda9482018-10-02 17:09:59 -04001891 if (!clspv::Option::ConstantArgsInUniformBuffer()) {
1892 AddrSpace = AddressSpace::Global;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001893 // Check to see if we already created this type (for instance, if we
1894 // had a constant <type>* and a global <type>*, the type would be
1895 // created by one of these types, and shared by both).
Alan Bakerfcda9482018-10-02 17:09:59 -04001896 auto GlobalTy = PTy->getPointerElementType()->getPointerTo(AddrSpace);
1897 if (0 < TypeMap.count(GlobalTy)) {
1898 TypeMap[PTy] = TypeMap[GlobalTy];
1899 UseExistingOpTypePointer = true;
1900 break;
1901 }
David Neto22f144c2017-06-12 14:26:21 -04001902 }
1903 } else if (AddressSpace::Global == AddrSpace) {
Alan Bakerfcda9482018-10-02 17:09:59 -04001904 if (!clspv::Option::ConstantArgsInUniformBuffer()) {
1905 AddrSpace = AddressSpace::Constant;
David Neto22f144c2017-06-12 14:26:21 -04001906
alan-bakerb6b09dc2018-11-08 16:59:28 -05001907 // Check to see if we already created this type (for instance, if we
1908 // had a constant <type>* and a global <type>*, the type would be
1909 // created by one of these types, and shared by both).
1910 auto ConstantTy =
1911 PTy->getPointerElementType()->getPointerTo(AddrSpace);
Alan Bakerfcda9482018-10-02 17:09:59 -04001912 if (0 < TypeMap.count(ConstantTy)) {
1913 TypeMap[PTy] = TypeMap[ConstantTy];
1914 UseExistingOpTypePointer = true;
1915 }
David Neto22f144c2017-06-12 14:26:21 -04001916 }
1917 }
1918
David Neto862b7d82018-06-14 18:48:37 -04001919 const bool HasArgUser = true;
David Neto22f144c2017-06-12 14:26:21 -04001920
David Neto862b7d82018-06-14 18:48:37 -04001921 if (HasArgUser && !UseExistingOpTypePointer) {
David Neto22f144c2017-06-12 14:26:21 -04001922 //
1923 // Generate OpTypePointer.
1924 //
1925
1926 // OpTypePointer
1927 // Ops[0] = Storage Class
1928 // Ops[1] = Element Type ID
1929 SPIRVOperandList Ops;
1930
David Neto257c3892018-04-11 13:19:45 -04001931 Ops << MkNum(GetStorageClass(AddrSpace))
1932 << MkId(lookupType(PTy->getElementType()));
David Neto22f144c2017-06-12 14:26:21 -04001933
David Neto87846742018-04-11 17:36:22 -04001934 auto *Inst = new SPIRVInstruction(spv::OpTypePointer, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04001935 SPIRVInstList.push_back(Inst);
1936 }
David Neto22f144c2017-06-12 14:26:21 -04001937 break;
1938 }
1939 case Type::StructTyID: {
David Neto22f144c2017-06-12 14:26:21 -04001940 StructType *STy = cast<StructType>(Ty);
1941
1942 // Handle sampler type.
1943 if (STy->isOpaque()) {
1944 if (STy->getName().equals("opencl.sampler_t")) {
1945 //
1946 // Generate OpTypeSampler
1947 //
1948 // Empty Ops.
1949 SPIRVOperandList Ops;
1950
David Neto87846742018-04-11 17:36:22 -04001951 auto *Inst = new SPIRVInstruction(spv::OpTypeSampler, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04001952 SPIRVInstList.push_back(Inst);
1953 break;
alan-bakerf906d2b2019-12-10 11:26:23 -05001954 } else if (STy->getName().startswith("opencl.image1d_ro_t") ||
1955 STy->getName().startswith("opencl.image1d_wo_t") ||
1956 STy->getName().startswith("opencl.image2d_ro_t") ||
alan-bakerf67468c2019-11-25 15:51:49 -05001957 STy->getName().startswith("opencl.image2d_wo_t") ||
1958 STy->getName().startswith("opencl.image3d_ro_t") ||
1959 STy->getName().startswith("opencl.image3d_wo_t")) {
David Neto22f144c2017-06-12 14:26:21 -04001960 //
1961 // Generate OpTypeImage
1962 //
1963 // Ops[0] = Sampled Type ID
1964 // Ops[1] = Dim ID
1965 // Ops[2] = Depth (Literal Number)
1966 // Ops[3] = Arrayed (Literal Number)
1967 // Ops[4] = MS (Literal Number)
1968 // Ops[5] = Sampled (Literal Number)
1969 // Ops[6] = Image Format ID
1970 //
1971 SPIRVOperandList Ops;
1972
alan-bakerf67468c2019-11-25 15:51:49 -05001973 uint32_t ImageTyID = nextID++;
1974 uint32_t SampledTyID = 0;
1975 if (STy->getName().contains(".float")) {
1976 SampledTyID = lookupType(Type::getFloatTy(Context));
1977 } else if (STy->getName().contains(".uint")) {
1978 SampledTyID = lookupType(Type::getInt32Ty(Context));
1979 } else if (STy->getName().contains(".int")) {
1980 // Generate a signed 32-bit integer if necessary.
1981 if (int32ID == 0) {
1982 int32ID = nextID++;
1983 SPIRVOperandList intOps;
1984 intOps << MkNum(32);
1985 intOps << MkNum(1);
1986 auto signed_int =
1987 new SPIRVInstruction(spv::OpTypeInt, int32ID, intOps);
1988 SPIRVInstList.push_back(signed_int);
1989 }
1990 SampledTyID = int32ID;
1991
1992 // Generate a vec4 of the signed int if necessary.
1993 if (v4int32ID == 0) {
1994 v4int32ID = nextID++;
1995 SPIRVOperandList vecOps;
1996 vecOps << MkId(int32ID);
1997 vecOps << MkNum(4);
1998 auto int_vec =
1999 new SPIRVInstruction(spv::OpTypeVector, v4int32ID, vecOps);
2000 SPIRVInstList.push_back(int_vec);
2001 }
2002 } else {
2003 // This was likely an UndefValue.
2004 SampledTyID = lookupType(Type::getFloatTy(Context));
2005 }
David Neto257c3892018-04-11 13:19:45 -04002006 Ops << MkId(SampledTyID);
David Neto22f144c2017-06-12 14:26:21 -04002007
2008 spv::Dim DimID = spv::Dim2D;
alan-bakerf906d2b2019-12-10 11:26:23 -05002009 if (STy->getName().startswith("opencl.image1d_ro_t") ||
2010 STy->getName().startswith("opencl.image1d_wo_t")) {
2011 DimID = spv::Dim1D;
2012 } else if (STy->getName().startswith("opencl.image3d_ro_t") ||
2013 STy->getName().startswith("opencl.image3d_wo_t")) {
David Neto22f144c2017-06-12 14:26:21 -04002014 DimID = spv::Dim3D;
2015 }
David Neto257c3892018-04-11 13:19:45 -04002016 Ops << MkNum(DimID);
David Neto22f144c2017-06-12 14:26:21 -04002017
2018 // TODO: Set up Depth.
David Neto257c3892018-04-11 13:19:45 -04002019 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04002020
2021 // TODO: Set up Arrayed.
David Neto257c3892018-04-11 13:19:45 -04002022 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04002023
2024 // TODO: Set up MS.
David Neto257c3892018-04-11 13:19:45 -04002025 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04002026
2027 // TODO: Set up Sampled.
2028 //
2029 // From Spec
2030 //
2031 // 0 indicates this is only known at run time, not at compile time
2032 // 1 indicates will be used with sampler
2033 // 2 indicates will be used without a sampler (a storage image)
2034 uint32_t Sampled = 1;
alan-bakerf67468c2019-11-25 15:51:49 -05002035 if (!STy->getName().contains(".sampled")) {
David Neto22f144c2017-06-12 14:26:21 -04002036 Sampled = 2;
2037 }
David Neto257c3892018-04-11 13:19:45 -04002038 Ops << MkNum(Sampled);
David Neto22f144c2017-06-12 14:26:21 -04002039
2040 // TODO: Set up Image Format.
David Neto257c3892018-04-11 13:19:45 -04002041 Ops << MkNum(spv::ImageFormatUnknown);
David Neto22f144c2017-06-12 14:26:21 -04002042
alan-bakerf67468c2019-11-25 15:51:49 -05002043 auto *Inst = new SPIRVInstruction(spv::OpTypeImage, ImageTyID, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002044 SPIRVInstList.push_back(Inst);
2045 break;
2046 }
2047 }
2048
2049 //
2050 // Generate OpTypeStruct
2051 //
2052 // Ops[0] ... Ops[n] = Member IDs
2053 SPIRVOperandList Ops;
2054
2055 for (auto *EleTy : STy->elements()) {
David Neto862b7d82018-06-14 18:48:37 -04002056 Ops << MkId(lookupType(EleTy));
David Neto22f144c2017-06-12 14:26:21 -04002057 }
2058
David Neto22f144c2017-06-12 14:26:21 -04002059 uint32_t STyID = nextID;
2060
alan-bakerb6b09dc2018-11-08 16:59:28 -05002061 auto *Inst = new SPIRVInstruction(spv::OpTypeStruct, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002062 SPIRVInstList.push_back(Inst);
2063
2064 // Generate OpMemberDecorate.
2065 auto DecoInsertPoint =
2066 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
2067 [](SPIRVInstruction *Inst) -> bool {
2068 return Inst->getOpcode() != spv::OpDecorate &&
2069 Inst->getOpcode() != spv::OpMemberDecorate &&
2070 Inst->getOpcode() != spv::OpExtInstImport;
2071 });
2072
David Netoc463b372017-08-10 15:32:21 -04002073 const auto StructLayout = DL.getStructLayout(STy);
Alan Bakerfcda9482018-10-02 17:09:59 -04002074 // Search for the correct offsets if this type was remapped.
2075 std::vector<uint32_t> *offsets = nullptr;
2076 auto iter = RemappedUBOTypeOffsets.find(STy);
2077 if (iter != RemappedUBOTypeOffsets.end()) {
2078 offsets = &iter->second;
2079 }
David Netoc463b372017-08-10 15:32:21 -04002080
David Neto862b7d82018-06-14 18:48:37 -04002081 // #error TODO(dneto): Only do this if in TypesNeedingLayout.
David Neto22f144c2017-06-12 14:26:21 -04002082 for (unsigned MemberIdx = 0; MemberIdx < STy->getNumElements();
2083 MemberIdx++) {
2084 // Ops[0] = Structure Type ID
2085 // Ops[1] = Member Index(Literal Number)
2086 // Ops[2] = Decoration (Offset)
2087 // Ops[3] = Byte Offset (Literal Number)
2088 Ops.clear();
2089
David Neto257c3892018-04-11 13:19:45 -04002090 Ops << MkId(STyID) << MkNum(MemberIdx) << MkNum(spv::DecorationOffset);
David Neto22f144c2017-06-12 14:26:21 -04002091
alan-bakerb6b09dc2018-11-08 16:59:28 -05002092 auto ByteOffset =
2093 static_cast<uint32_t>(StructLayout->getElementOffset(MemberIdx));
Alan Bakerfcda9482018-10-02 17:09:59 -04002094 if (offsets) {
2095 ByteOffset = (*offsets)[MemberIdx];
2096 }
alan-bakerb6b09dc2018-11-08 16:59:28 -05002097 // const auto ByteOffset =
Alan Bakerfcda9482018-10-02 17:09:59 -04002098 // uint32_t(StructLayout->getElementOffset(MemberIdx));
David Neto257c3892018-04-11 13:19:45 -04002099 Ops << MkNum(ByteOffset);
David Neto22f144c2017-06-12 14:26:21 -04002100
David Neto87846742018-04-11 17:36:22 -04002101 auto *DecoInst = new SPIRVInstruction(spv::OpMemberDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002102 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
David Neto22f144c2017-06-12 14:26:21 -04002103 }
2104
2105 // Generate OpDecorate.
David Neto862b7d82018-06-14 18:48:37 -04002106 if (StructTypesNeedingBlock.idFor(STy)) {
2107 Ops.clear();
2108 // Use Block decorations with StorageBuffer storage class.
2109 Ops << MkId(STyID) << MkNum(spv::DecorationBlock);
David Neto22f144c2017-06-12 14:26:21 -04002110
David Neto862b7d82018-06-14 18:48:37 -04002111 auto *DecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
2112 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
David Neto22f144c2017-06-12 14:26:21 -04002113 }
2114 break;
2115 }
2116 case Type::IntegerTyID: {
alan-baker0e64a592019-11-18 13:36:25 -05002117 uint32_t BitWidth = static_cast<uint32_t>(Ty->getPrimitiveSizeInBits());
David Neto22f144c2017-06-12 14:26:21 -04002118
2119 if (BitWidth == 1) {
David Netoef5ba2b2019-12-20 08:35:54 -05002120 auto *Inst = new SPIRVInstruction(spv::OpTypeBool, nextID++);
David Neto22f144c2017-06-12 14:26:21 -04002121 SPIRVInstList.push_back(Inst);
2122 } else {
alan-bakerb39c8262019-03-08 14:03:37 -05002123 if (!clspv::Option::Int8Support()) {
2124 // i8 is added to TypeMap as i32.
2125 // No matter what LLVM type is requested first, always alias the
2126 // second one's SPIR-V type to be the same as the one we generated
2127 // first.
2128 unsigned aliasToWidth = 0;
2129 if (BitWidth == 8) {
2130 aliasToWidth = 32;
2131 BitWidth = 32;
2132 } else if (BitWidth == 32) {
2133 aliasToWidth = 8;
2134 }
2135 if (aliasToWidth) {
2136 Type *otherType = Type::getIntNTy(Ty->getContext(), aliasToWidth);
2137 auto where = TypeMap.find(otherType);
2138 if (where == TypeMap.end()) {
2139 // Go ahead and make it, but also map the other type to it.
2140 TypeMap[otherType] = nextID;
2141 } else {
2142 // Alias this SPIR-V type the existing type.
2143 TypeMap[Ty] = where->second;
2144 break;
2145 }
David Neto391aeb12017-08-26 15:51:58 -04002146 }
David Neto22f144c2017-06-12 14:26:21 -04002147 }
2148
David Neto257c3892018-04-11 13:19:45 -04002149 SPIRVOperandList Ops;
2150 Ops << MkNum(BitWidth) << MkNum(0 /* not signed */);
David Neto22f144c2017-06-12 14:26:21 -04002151
2152 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002153 new SPIRVInstruction(spv::OpTypeInt, nextID++, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002154 }
2155 break;
2156 }
2157 case Type::HalfTyID:
2158 case Type::FloatTyID:
2159 case Type::DoubleTyID: {
alan-baker0e64a592019-11-18 13:36:25 -05002160 uint32_t BitWidth = static_cast<uint32_t>(Ty->getPrimitiveSizeInBits());
James Price11010dc2019-12-19 13:53:09 -05002161 auto WidthOp = MkNum(BitWidth);
David Neto22f144c2017-06-12 14:26:21 -04002162
2163 SPIRVInstList.push_back(
David Netoef5ba2b2019-12-20 08:35:54 -05002164 new SPIRVInstruction(spv::OpTypeFloat, nextID++, std::move(WidthOp)));
David Neto22f144c2017-06-12 14:26:21 -04002165 break;
2166 }
2167 case Type::ArrayTyID: {
David Neto22f144c2017-06-12 14:26:21 -04002168 ArrayType *ArrTy = cast<ArrayType>(Ty);
David Neto862b7d82018-06-14 18:48:37 -04002169 const uint64_t Length = ArrTy->getArrayNumElements();
2170 if (Length == 0) {
2171 // By convention, map it to a RuntimeArray.
David Neto22f144c2017-06-12 14:26:21 -04002172
David Neto862b7d82018-06-14 18:48:37 -04002173 // Only generate the type once.
2174 // TODO(dneto): Can it ever be generated more than once?
2175 // Doesn't LLVM type uniqueness guarantee we'll only see this
2176 // once?
2177 Type *EleTy = ArrTy->getArrayElementType();
2178 if (OpRuntimeTyMap.count(EleTy) == 0) {
2179 uint32_t OpTypeRuntimeArrayID = nextID;
2180 OpRuntimeTyMap[Ty] = nextID;
David Neto22f144c2017-06-12 14:26:21 -04002181
David Neto862b7d82018-06-14 18:48:37 -04002182 //
2183 // Generate OpTypeRuntimeArray.
2184 //
David Neto22f144c2017-06-12 14:26:21 -04002185
David Neto862b7d82018-06-14 18:48:37 -04002186 // OpTypeRuntimeArray
2187 // Ops[0] = Element Type ID
2188 SPIRVOperandList Ops;
2189 Ops << MkId(lookupType(EleTy));
David Neto22f144c2017-06-12 14:26:21 -04002190
David Neto862b7d82018-06-14 18:48:37 -04002191 SPIRVInstList.push_back(
2192 new SPIRVInstruction(spv::OpTypeRuntimeArray, nextID++, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002193
David Neto862b7d82018-06-14 18:48:37 -04002194 if (Hack_generate_runtime_array_stride_early) {
2195 // Generate OpDecorate.
2196 auto DecoInsertPoint = std::find_if(
2197 SPIRVInstList.begin(), SPIRVInstList.end(),
2198 [](SPIRVInstruction *Inst) -> bool {
2199 return Inst->getOpcode() != spv::OpDecorate &&
2200 Inst->getOpcode() != spv::OpMemberDecorate &&
2201 Inst->getOpcode() != spv::OpExtInstImport;
2202 });
David Neto22f144c2017-06-12 14:26:21 -04002203
David Neto862b7d82018-06-14 18:48:37 -04002204 // Ops[0] = Target ID
2205 // Ops[1] = Decoration (ArrayStride)
2206 // Ops[2] = Stride Number(Literal Number)
2207 Ops.clear();
David Neto85082642018-03-24 06:55:20 -07002208
David Neto862b7d82018-06-14 18:48:37 -04002209 Ops << MkId(OpTypeRuntimeArrayID)
2210 << MkNum(spv::DecorationArrayStride)
Alan Bakerfcda9482018-10-02 17:09:59 -04002211 << MkNum(static_cast<uint32_t>(GetTypeAllocSize(EleTy, DL)));
David Neto22f144c2017-06-12 14:26:21 -04002212
David Neto862b7d82018-06-14 18:48:37 -04002213 auto *DecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
2214 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
2215 }
2216 }
David Neto22f144c2017-06-12 14:26:21 -04002217
David Neto862b7d82018-06-14 18:48:37 -04002218 } else {
David Neto22f144c2017-06-12 14:26:21 -04002219
David Neto862b7d82018-06-14 18:48:37 -04002220 //
2221 // Generate OpConstant and OpTypeArray.
2222 //
2223
2224 //
2225 // Generate OpConstant for array length.
2226 //
2227 // Ops[0] = Result Type ID
2228 // Ops[1] .. Ops[n] = Values LiteralNumber
2229 SPIRVOperandList Ops;
2230
2231 Type *LengthTy = Type::getInt32Ty(Context);
2232 uint32_t ResTyID = lookupType(LengthTy);
2233 Ops << MkId(ResTyID);
2234
2235 assert(Length < UINT32_MAX);
2236 Ops << MkNum(static_cast<uint32_t>(Length));
2237
2238 // Add constant for length to constant list.
2239 Constant *CstLength = ConstantInt::get(LengthTy, Length);
2240 AllocatedVMap[CstLength] = nextID;
2241 VMap[CstLength] = nextID;
2242 uint32_t LengthID = nextID;
2243
2244 auto *CstInst = new SPIRVInstruction(spv::OpConstant, nextID++, Ops);
2245 SPIRVInstList.push_back(CstInst);
2246
2247 // Remember to generate ArrayStride later
2248 getTypesNeedingArrayStride().insert(Ty);
2249
2250 //
2251 // Generate OpTypeArray.
2252 //
2253 // Ops[0] = Element Type ID
2254 // Ops[1] = Array Length Constant ID
2255 Ops.clear();
2256
2257 uint32_t EleTyID = lookupType(ArrTy->getElementType());
2258 Ops << MkId(EleTyID) << MkId(LengthID);
2259
2260 // Update TypeMap with nextID.
2261 TypeMap[Ty] = nextID;
2262
2263 auto *ArrayInst = new SPIRVInstruction(spv::OpTypeArray, nextID++, Ops);
2264 SPIRVInstList.push_back(ArrayInst);
2265 }
David Neto22f144c2017-06-12 14:26:21 -04002266 break;
2267 }
2268 case Type::VectorTyID: {
alan-bakerb39c8262019-03-08 14:03:37 -05002269 // <4 x i8> is changed to i32 if i8 is not generally supported.
2270 if (!clspv::Option::Int8Support() &&
2271 Ty->getVectorElementType() == Type::getInt8Ty(Context)) {
David Neto22f144c2017-06-12 14:26:21 -04002272 if (Ty->getVectorNumElements() == 4) {
2273 TypeMap[Ty] = lookupType(Ty->getVectorElementType());
2274 break;
2275 } else {
2276 Ty->print(errs());
2277 llvm_unreachable("Support above i8 vector type");
2278 }
2279 }
2280
2281 // Ops[0] = Component Type ID
2282 // Ops[1] = Component Count (Literal Number)
David Neto257c3892018-04-11 13:19:45 -04002283 SPIRVOperandList Ops;
2284 Ops << MkId(lookupType(Ty->getVectorElementType()))
2285 << MkNum(Ty->getVectorNumElements());
David Neto22f144c2017-06-12 14:26:21 -04002286
alan-bakerb6b09dc2018-11-08 16:59:28 -05002287 SPIRVInstruction *inst =
2288 new SPIRVInstruction(spv::OpTypeVector, nextID++, Ops);
David Netoc6f3ab22018-04-06 18:02:31 -04002289 SPIRVInstList.push_back(inst);
David Neto22f144c2017-06-12 14:26:21 -04002290 break;
2291 }
2292 case Type::VoidTyID: {
David Netoef5ba2b2019-12-20 08:35:54 -05002293 auto *Inst = new SPIRVInstruction(spv::OpTypeVoid, nextID++);
David Neto22f144c2017-06-12 14:26:21 -04002294 SPIRVInstList.push_back(Inst);
2295 break;
2296 }
2297 case Type::FunctionTyID: {
2298 // Generate SPIRV instruction for function type.
2299 FunctionType *FTy = cast<FunctionType>(Ty);
2300
2301 // Ops[0] = Return Type ID
2302 // Ops[1] ... Ops[n] = Parameter Type IDs
2303 SPIRVOperandList Ops;
2304
2305 // Find SPIRV instruction for return type
David Netoc6f3ab22018-04-06 18:02:31 -04002306 Ops << MkId(lookupType(FTy->getReturnType()));
David Neto22f144c2017-06-12 14:26:21 -04002307
2308 // Find SPIRV instructions for parameter types
2309 for (unsigned k = 0; k < FTy->getNumParams(); k++) {
2310 // Find SPIRV instruction for parameter type.
2311 auto ParamTy = FTy->getParamType(k);
2312 if (ParamTy->isPointerTy()) {
2313 auto PointeeTy = ParamTy->getPointerElementType();
2314 if (PointeeTy->isStructTy() &&
2315 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
2316 ParamTy = PointeeTy;
2317 }
2318 }
2319
David Netoc6f3ab22018-04-06 18:02:31 -04002320 Ops << MkId(lookupType(ParamTy));
David Neto22f144c2017-06-12 14:26:21 -04002321 }
2322
David Neto87846742018-04-11 17:36:22 -04002323 auto *Inst = new SPIRVInstruction(spv::OpTypeFunction, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002324 SPIRVInstList.push_back(Inst);
2325 break;
2326 }
2327 }
2328 }
2329
2330 // Generate OpTypeSampledImage.
alan-bakerabd82722019-12-03 17:14:51 -05002331 for (auto &ImgTy : getImageTypeList()) {
David Neto22f144c2017-06-12 14:26:21 -04002332 //
2333 // Generate OpTypeSampledImage.
2334 //
2335 // Ops[0] = Image Type ID
2336 //
2337 SPIRVOperandList Ops;
2338
David Netoc6f3ab22018-04-06 18:02:31 -04002339 Ops << MkId(TypeMap[ImgTy]);
David Neto22f144c2017-06-12 14:26:21 -04002340
alan-bakerabd82722019-12-03 17:14:51 -05002341 // Update the image type map.
2342 getImageTypeMap()[ImgTy] = nextID;
David Neto22f144c2017-06-12 14:26:21 -04002343
David Neto87846742018-04-11 17:36:22 -04002344 auto *Inst = new SPIRVInstruction(spv::OpTypeSampledImage, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002345 SPIRVInstList.push_back(Inst);
2346 }
David Netoc6f3ab22018-04-06 18:02:31 -04002347
2348 // Generate types for pointer-to-local arguments.
Alan Baker202c8c72018-08-13 13:47:44 -04002349 for (auto spec_id = clspv::FirstLocalSpecId(); spec_id < max_local_spec_id_;
2350 ++spec_id) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05002351 LocalArgInfo &arg_info = LocalSpecIdInfoMap[spec_id];
David Netoc6f3ab22018-04-06 18:02:31 -04002352
2353 // Generate the spec constant.
2354 SPIRVOperandList Ops;
2355 Ops << MkId(lookupType(Type::getInt32Ty(Context))) << MkNum(1);
David Neto87846742018-04-11 17:36:22 -04002356 SPIRVInstList.push_back(
2357 new SPIRVInstruction(spv::OpSpecConstant, arg_info.array_size_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002358
2359 // Generate the array type.
2360 Ops.clear();
2361 // The element type must have been created.
2362 uint32_t elem_ty_id = lookupType(arg_info.elem_type);
2363 assert(elem_ty_id);
2364 Ops << MkId(elem_ty_id) << MkId(arg_info.array_size_id);
2365
2366 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002367 new SPIRVInstruction(spv::OpTypeArray, arg_info.array_type_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002368
2369 Ops.clear();
2370 Ops << MkNum(spv::StorageClassWorkgroup) << MkId(arg_info.array_type_id);
David Neto87846742018-04-11 17:36:22 -04002371 SPIRVInstList.push_back(new SPIRVInstruction(
2372 spv::OpTypePointer, arg_info.ptr_array_type_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002373 }
David Neto22f144c2017-06-12 14:26:21 -04002374}
2375
2376void SPIRVProducerPass::GenerateSPIRVConstants() {
2377 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
2378 ValueMapType &VMap = getValueMap();
2379 ValueMapType &AllocatedVMap = getAllocatedValueMap();
2380 ValueList &CstList = getConstantList();
David Neto482550a2018-03-24 05:21:07 -07002381 const bool hack_undef = clspv::Option::HackUndef();
David Neto22f144c2017-06-12 14:26:21 -04002382
2383 for (uint32_t i = 0; i < CstList.size(); i++) {
David Netofb9a7972017-08-25 17:08:24 -04002384 // UniqueVector ids are 1-based.
alan-bakerb6b09dc2018-11-08 16:59:28 -05002385 Constant *Cst = cast<Constant>(CstList[i + 1]);
David Neto22f144c2017-06-12 14:26:21 -04002386
2387 // OpTypeArray's constant was already generated.
David Netofb9a7972017-08-25 17:08:24 -04002388 if (AllocatedVMap.find_as(Cst) != AllocatedVMap.end()) {
David Neto22f144c2017-06-12 14:26:21 -04002389 continue;
2390 }
2391
David Netofb9a7972017-08-25 17:08:24 -04002392 // Set ValueMap with nextID for reference later.
David Neto22f144c2017-06-12 14:26:21 -04002393 VMap[Cst] = nextID;
2394
2395 //
2396 // Generate OpConstant.
2397 //
2398
2399 // Ops[0] = Result Type ID
2400 // Ops[1] .. Ops[n] = Values LiteralNumber
2401 SPIRVOperandList Ops;
2402
David Neto257c3892018-04-11 13:19:45 -04002403 Ops << MkId(lookupType(Cst->getType()));
David Neto22f144c2017-06-12 14:26:21 -04002404
2405 std::vector<uint32_t> LiteralNum;
David Neto22f144c2017-06-12 14:26:21 -04002406 spv::Op Opcode = spv::OpNop;
2407
2408 if (isa<UndefValue>(Cst)) {
2409 // Ops[0] = Result Type ID
David Netoc66b3352017-10-20 14:28:46 -04002410 Opcode = spv::OpUndef;
Alan Baker9bf93fb2018-08-28 16:59:26 -04002411 if (hack_undef && IsTypeNullable(Cst->getType())) {
2412 Opcode = spv::OpConstantNull;
David Netoc66b3352017-10-20 14:28:46 -04002413 }
David Neto22f144c2017-06-12 14:26:21 -04002414 } else if (const ConstantInt *CI = dyn_cast<ConstantInt>(Cst)) {
2415 unsigned BitWidth = CI->getBitWidth();
2416 if (BitWidth == 1) {
2417 // If the bitwidth of constant is 1, generate OpConstantTrue or
2418 // OpConstantFalse.
2419 if (CI->getZExtValue()) {
2420 // Ops[0] = Result Type ID
2421 Opcode = spv::OpConstantTrue;
2422 } else {
2423 // Ops[0] = Result Type ID
2424 Opcode = spv::OpConstantFalse;
2425 }
David Neto22f144c2017-06-12 14:26:21 -04002426 } else {
2427 auto V = CI->getZExtValue();
2428 LiteralNum.push_back(V & 0xFFFFFFFF);
2429
2430 if (BitWidth > 32) {
2431 LiteralNum.push_back(V >> 32);
2432 }
2433
2434 Opcode = spv::OpConstant;
David Neto22f144c2017-06-12 14:26:21 -04002435
David Neto257c3892018-04-11 13:19:45 -04002436 Ops << MkInteger(LiteralNum);
David Neto22f144c2017-06-12 14:26:21 -04002437 }
2438 } else if (const ConstantFP *CFP = dyn_cast<ConstantFP>(Cst)) {
2439 uint64_t FPVal = CFP->getValueAPF().bitcastToAPInt().getZExtValue();
2440 Type *CFPTy = CFP->getType();
2441 if (CFPTy->isFloatTy()) {
2442 LiteralNum.push_back(FPVal & 0xFFFFFFFF);
Kévin Petit02ee34e2019-04-04 19:03:22 +01002443 } else if (CFPTy->isDoubleTy()) {
2444 LiteralNum.push_back(FPVal & 0xFFFFFFFF);
2445 LiteralNum.push_back(FPVal >> 32);
alan-baker089bf932020-01-07 16:35:45 -05002446 } else if (CFPTy->isHalfTy()) {
2447 LiteralNum.push_back(FPVal & 0xFFFF);
David Neto22f144c2017-06-12 14:26:21 -04002448 } else {
2449 CFPTy->print(errs());
2450 llvm_unreachable("Implement this ConstantFP Type");
2451 }
2452
2453 Opcode = spv::OpConstant;
David Neto22f144c2017-06-12 14:26:21 -04002454
David Neto257c3892018-04-11 13:19:45 -04002455 Ops << MkFloat(LiteralNum);
David Neto22f144c2017-06-12 14:26:21 -04002456 } else if (isa<ConstantDataSequential>(Cst) &&
2457 cast<ConstantDataSequential>(Cst)->isString()) {
2458 Cst->print(errs());
2459 llvm_unreachable("Implement this Constant");
2460
2461 } else if (const ConstantDataSequential *CDS =
2462 dyn_cast<ConstantDataSequential>(Cst)) {
David Neto49351ac2017-08-26 17:32:20 -04002463 // Let's convert <4 x i8> constant to int constant specially.
2464 // This case occurs when all the values are specified as constant
2465 // ints.
2466 Type *CstTy = Cst->getType();
2467 if (is4xi8vec(CstTy)) {
2468 LLVMContext &Context = CstTy->getContext();
2469
2470 //
2471 // Generate OpConstant with OpTypeInt 32 0.
2472 //
Neil Henning39672102017-09-29 14:33:13 +01002473 uint32_t IntValue = 0;
2474 for (unsigned k = 0; k < 4; k++) {
2475 const uint64_t Val = CDS->getElementAsInteger(k);
David Neto49351ac2017-08-26 17:32:20 -04002476 IntValue = (IntValue << 8) | (Val & 0xffu);
2477 }
2478
2479 Type *i32 = Type::getInt32Ty(Context);
2480 Constant *CstInt = ConstantInt::get(i32, IntValue);
2481 // If this constant is already registered on VMap, use it.
2482 if (VMap.count(CstInt)) {
2483 uint32_t CstID = VMap[CstInt];
2484 VMap[Cst] = CstID;
2485 continue;
2486 }
2487
David Neto257c3892018-04-11 13:19:45 -04002488 Ops << MkNum(IntValue);
David Neto49351ac2017-08-26 17:32:20 -04002489
David Neto87846742018-04-11 17:36:22 -04002490 auto *CstInst = new SPIRVInstruction(spv::OpConstant, nextID++, Ops);
David Neto49351ac2017-08-26 17:32:20 -04002491 SPIRVInstList.push_back(CstInst);
2492
2493 continue;
2494 }
2495
2496 // A normal constant-data-sequential case.
David Neto22f144c2017-06-12 14:26:21 -04002497 for (unsigned k = 0; k < CDS->getNumElements(); k++) {
2498 Constant *EleCst = CDS->getElementAsConstant(k);
2499 uint32_t EleCstID = VMap[EleCst];
David Neto257c3892018-04-11 13:19:45 -04002500 Ops << MkId(EleCstID);
David Neto22f144c2017-06-12 14:26:21 -04002501 }
2502
2503 Opcode = spv::OpConstantComposite;
David Neto22f144c2017-06-12 14:26:21 -04002504 } else if (const ConstantAggregate *CA = dyn_cast<ConstantAggregate>(Cst)) {
2505 // Let's convert <4 x i8> constant to int constant specially.
David Neto49351ac2017-08-26 17:32:20 -04002506 // This case occurs when at least one of the values is an undef.
David Neto22f144c2017-06-12 14:26:21 -04002507 Type *CstTy = Cst->getType();
2508 if (is4xi8vec(CstTy)) {
2509 LLVMContext &Context = CstTy->getContext();
2510
2511 //
2512 // Generate OpConstant with OpTypeInt 32 0.
2513 //
Neil Henning39672102017-09-29 14:33:13 +01002514 uint32_t IntValue = 0;
David Neto22f144c2017-06-12 14:26:21 -04002515 for (User::const_op_iterator I = Cst->op_begin(), E = Cst->op_end();
2516 I != E; ++I) {
2517 uint64_t Val = 0;
alan-bakerb6b09dc2018-11-08 16:59:28 -05002518 const Value *CV = *I;
Neil Henning39672102017-09-29 14:33:13 +01002519 if (auto *CI2 = dyn_cast<ConstantInt>(CV)) {
2520 Val = CI2->getZExtValue();
David Neto22f144c2017-06-12 14:26:21 -04002521 }
David Neto49351ac2017-08-26 17:32:20 -04002522 IntValue = (IntValue << 8) | (Val & 0xffu);
David Neto22f144c2017-06-12 14:26:21 -04002523 }
2524
David Neto49351ac2017-08-26 17:32:20 -04002525 Type *i32 = Type::getInt32Ty(Context);
2526 Constant *CstInt = ConstantInt::get(i32, IntValue);
David Neto22f144c2017-06-12 14:26:21 -04002527 // If this constant is already registered on VMap, use it.
2528 if (VMap.count(CstInt)) {
2529 uint32_t CstID = VMap[CstInt];
2530 VMap[Cst] = CstID;
David Neto19a1bad2017-08-25 15:01:41 -04002531 continue;
David Neto22f144c2017-06-12 14:26:21 -04002532 }
2533
David Neto257c3892018-04-11 13:19:45 -04002534 Ops << MkNum(IntValue);
David Neto22f144c2017-06-12 14:26:21 -04002535
David Neto87846742018-04-11 17:36:22 -04002536 auto *CstInst = new SPIRVInstruction(spv::OpConstant, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002537 SPIRVInstList.push_back(CstInst);
2538
David Neto19a1bad2017-08-25 15:01:41 -04002539 continue;
David Neto22f144c2017-06-12 14:26:21 -04002540 }
2541
2542 // We use a constant composite in SPIR-V for our constant aggregate in
2543 // LLVM.
2544 Opcode = spv::OpConstantComposite;
David Neto22f144c2017-06-12 14:26:21 -04002545
2546 for (unsigned k = 0; k < CA->getNumOperands(); k++) {
2547 // Look up the ID of the element of this aggregate (which we will
2548 // previously have created a constant for).
2549 uint32_t ElementConstantID = VMap[CA->getAggregateElement(k)];
2550
2551 // And add an operand to the composite we are constructing
David Neto257c3892018-04-11 13:19:45 -04002552 Ops << MkId(ElementConstantID);
David Neto22f144c2017-06-12 14:26:21 -04002553 }
2554 } else if (Cst->isNullValue()) {
2555 Opcode = spv::OpConstantNull;
David Neto22f144c2017-06-12 14:26:21 -04002556 } else {
2557 Cst->print(errs());
2558 llvm_unreachable("Unsupported Constant???");
2559 }
2560
alan-baker5b86ed72019-02-15 08:26:50 -05002561 if (Opcode == spv::OpConstantNull && Cst->getType()->isPointerTy()) {
2562 // Null pointer requires variable pointers.
2563 setVariablePointersCapabilities(Cst->getType()->getPointerAddressSpace());
2564 }
2565
David Neto87846742018-04-11 17:36:22 -04002566 auto *CstInst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002567 SPIRVInstList.push_back(CstInst);
2568 }
2569}
2570
2571void SPIRVProducerPass::GenerateSamplers(Module &M) {
2572 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
David Neto22f144c2017-06-12 14:26:21 -04002573
alan-bakerb6b09dc2018-11-08 16:59:28 -05002574 auto &sampler_map = getSamplerMap();
alan-baker09cb9802019-12-10 13:16:27 -05002575 SamplerLiteralToIDMap.clear();
David Neto862b7d82018-06-14 18:48:37 -04002576 DenseMap<unsigned, unsigned> SamplerLiteralToDescriptorSetMap;
2577 DenseMap<unsigned, unsigned> SamplerLiteralToBindingMap;
David Neto22f144c2017-06-12 14:26:21 -04002578
David Neto862b7d82018-06-14 18:48:37 -04002579 // We might have samplers in the sampler map that are not used
2580 // in the translation unit. We need to allocate variables
2581 // for them and bindings too.
2582 DenseSet<unsigned> used_bindings;
David Neto22f144c2017-06-12 14:26:21 -04002583
Kévin Petitdf71de32019-04-09 14:09:50 +01002584 auto *var_fn = M.getFunction(clspv::LiteralSamplerFunction());
alan-baker09cb9802019-12-10 13:16:27 -05002585 // Return if there are no literal samplers.
alan-bakerb6b09dc2018-11-08 16:59:28 -05002586 if (!var_fn)
2587 return;
alan-baker09cb9802019-12-10 13:16:27 -05002588
David Neto862b7d82018-06-14 18:48:37 -04002589 for (auto user : var_fn->users()) {
2590 // Populate SamplerLiteralToDescriptorSetMap and
2591 // SamplerLiteralToBindingMap.
2592 //
2593 // Look for calls like
2594 // call %opencl.sampler_t addrspace(2)*
2595 // @clspv.sampler.var.literal(
2596 // i32 descriptor,
2597 // i32 binding,
alan-baker09cb9802019-12-10 13:16:27 -05002598 // i32 (index-into-sampler-map|sampler_mask))
alan-bakerb6b09dc2018-11-08 16:59:28 -05002599 if (auto *call = dyn_cast<CallInst>(user)) {
alan-baker09cb9802019-12-10 13:16:27 -05002600 const auto third_param = static_cast<unsigned>(
alan-bakerb6b09dc2018-11-08 16:59:28 -05002601 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
alan-baker09cb9802019-12-10 13:16:27 -05002602 auto sampler_value = third_param;
2603 if (clspv::Option::UseSamplerMap()) {
2604 if (third_param >= sampler_map.size()) {
2605 errs() << "Out of bounds index to sampler map: " << third_param;
2606 llvm_unreachable("bad sampler init: out of bounds");
2607 }
2608 sampler_value = sampler_map[third_param].first;
David Neto862b7d82018-06-14 18:48:37 -04002609 }
2610
David Neto862b7d82018-06-14 18:48:37 -04002611 const auto descriptor_set = static_cast<unsigned>(
2612 dyn_cast<ConstantInt>(call->getArgOperand(0))->getZExtValue());
2613 const auto binding = static_cast<unsigned>(
2614 dyn_cast<ConstantInt>(call->getArgOperand(1))->getZExtValue());
2615
2616 SamplerLiteralToDescriptorSetMap[sampler_value] = descriptor_set;
2617 SamplerLiteralToBindingMap[sampler_value] = binding;
2618 used_bindings.insert(binding);
2619 }
2620 }
2621
alan-baker09cb9802019-12-10 13:16:27 -05002622 DenseSet<size_t> seen;
2623 for (auto user : var_fn->users()) {
2624 if (!isa<CallInst>(user))
2625 continue;
2626
2627 auto call = cast<CallInst>(user);
2628 const unsigned third_param = static_cast<unsigned>(
2629 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
2630
2631 // Already allocated a variable for this value.
2632 if (!seen.insert(third_param).second)
2633 continue;
2634
2635 auto sampler_value = third_param;
2636 if (clspv::Option::UseSamplerMap()) {
2637 sampler_value = sampler_map[third_param].first;
2638 }
2639
David Neto22f144c2017-06-12 14:26:21 -04002640 // Generate OpVariable.
2641 //
2642 // GIDOps[0] : Result Type ID
2643 // GIDOps[1] : Storage Class
2644 SPIRVOperandList Ops;
2645
David Neto257c3892018-04-11 13:19:45 -04002646 Ops << MkId(lookupType(SamplerTy))
2647 << MkNum(spv::StorageClassUniformConstant);
David Neto22f144c2017-06-12 14:26:21 -04002648
David Neto862b7d82018-06-14 18:48:37 -04002649 auto sampler_var_id = nextID++;
2650 auto *Inst = new SPIRVInstruction(spv::OpVariable, sampler_var_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002651 SPIRVInstList.push_back(Inst);
2652
alan-baker09cb9802019-12-10 13:16:27 -05002653 SamplerLiteralToIDMap[sampler_value] = sampler_var_id;
David Neto22f144c2017-06-12 14:26:21 -04002654
2655 // Find Insert Point for OpDecorate.
2656 auto DecoInsertPoint =
2657 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
2658 [](SPIRVInstruction *Inst) -> bool {
2659 return Inst->getOpcode() != spv::OpDecorate &&
2660 Inst->getOpcode() != spv::OpMemberDecorate &&
2661 Inst->getOpcode() != spv::OpExtInstImport;
2662 });
2663
2664 // Ops[0] = Target ID
2665 // Ops[1] = Decoration (DescriptorSet)
2666 // Ops[2] = LiteralNumber according to Decoration
2667 Ops.clear();
2668
David Neto862b7d82018-06-14 18:48:37 -04002669 unsigned descriptor_set;
2670 unsigned binding;
alan-baker09cb9802019-12-10 13:16:27 -05002671 if (SamplerLiteralToBindingMap.find(sampler_value) ==
alan-bakerb6b09dc2018-11-08 16:59:28 -05002672 SamplerLiteralToBindingMap.end()) {
David Neto862b7d82018-06-14 18:48:37 -04002673 // This sampler is not actually used. Find the next one.
2674 for (binding = 0; used_bindings.count(binding); binding++)
2675 ;
2676 descriptor_set = 0; // Literal samplers always use descriptor set 0.
2677 used_bindings.insert(binding);
2678 } else {
alan-baker09cb9802019-12-10 13:16:27 -05002679 descriptor_set = SamplerLiteralToDescriptorSetMap[sampler_value];
2680 binding = SamplerLiteralToBindingMap[sampler_value];
alan-bakercff80152019-06-15 00:38:00 -04002681
alan-baker09cb9802019-12-10 13:16:27 -05002682 version0::DescriptorMapEntry::SamplerData sampler_data = {sampler_value};
alan-bakercff80152019-06-15 00:38:00 -04002683 descriptorMapEntries->emplace_back(std::move(sampler_data),
2684 descriptor_set, binding);
David Neto862b7d82018-06-14 18:48:37 -04002685 }
2686
2687 Ops << MkId(sampler_var_id) << MkNum(spv::DecorationDescriptorSet)
2688 << MkNum(descriptor_set);
David Neto22f144c2017-06-12 14:26:21 -04002689
David Neto87846742018-04-11 17:36:22 -04002690 auto *DescDecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002691 SPIRVInstList.insert(DecoInsertPoint, DescDecoInst);
2692
2693 // Ops[0] = Target ID
2694 // Ops[1] = Decoration (Binding)
2695 // Ops[2] = LiteralNumber according to Decoration
2696 Ops.clear();
David Neto862b7d82018-06-14 18:48:37 -04002697 Ops << MkId(sampler_var_id) << MkNum(spv::DecorationBinding)
2698 << MkNum(binding);
David Neto22f144c2017-06-12 14:26:21 -04002699
David Neto87846742018-04-11 17:36:22 -04002700 auto *BindDecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002701 SPIRVInstList.insert(DecoInsertPoint, BindDecoInst);
2702 }
David Neto862b7d82018-06-14 18:48:37 -04002703}
David Neto22f144c2017-06-12 14:26:21 -04002704
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01002705void SPIRVProducerPass::GenerateResourceVars(Module &) {
David Neto862b7d82018-06-14 18:48:37 -04002706 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
2707 ValueMapType &VMap = getValueMap();
David Neto22f144c2017-06-12 14:26:21 -04002708
David Neto862b7d82018-06-14 18:48:37 -04002709 // Generate variables. Make one for each of resource var info object.
2710 for (auto *info : ModuleOrderedResourceVars) {
2711 Type *type = info->var_fn->getReturnType();
2712 // Remap the address space for opaque types.
2713 switch (info->arg_kind) {
2714 case clspv::ArgKind::Sampler:
2715 case clspv::ArgKind::ReadOnlyImage:
2716 case clspv::ArgKind::WriteOnlyImage:
2717 type = PointerType::get(type->getPointerElementType(),
2718 clspv::AddressSpace::UniformConstant);
2719 break;
2720 default:
2721 break;
2722 }
David Neto22f144c2017-06-12 14:26:21 -04002723
David Neto862b7d82018-06-14 18:48:37 -04002724 info->var_id = nextID++;
David Neto22f144c2017-06-12 14:26:21 -04002725
David Neto862b7d82018-06-14 18:48:37 -04002726 const auto type_id = lookupType(type);
2727 const auto sc = GetStorageClassForArgKind(info->arg_kind);
2728 SPIRVOperandList Ops;
2729 Ops << MkId(type_id) << MkNum(sc);
David Neto22f144c2017-06-12 14:26:21 -04002730
David Neto862b7d82018-06-14 18:48:37 -04002731 auto *Inst = new SPIRVInstruction(spv::OpVariable, info->var_id, Ops);
2732 SPIRVInstList.push_back(Inst);
2733
2734 // Map calls to the variable-builtin-function.
2735 for (auto &U : info->var_fn->uses()) {
2736 if (auto *call = dyn_cast<CallInst>(U.getUser())) {
2737 const auto set = unsigned(
2738 dyn_cast<ConstantInt>(call->getOperand(0))->getZExtValue());
2739 const auto binding = unsigned(
2740 dyn_cast<ConstantInt>(call->getOperand(1))->getZExtValue());
2741 if (set == info->descriptor_set && binding == info->binding) {
2742 switch (info->arg_kind) {
2743 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04002744 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04002745 case clspv::ArgKind::Pod:
2746 // The call maps to the variable directly.
2747 VMap[call] = info->var_id;
2748 break;
2749 case clspv::ArgKind::Sampler:
2750 case clspv::ArgKind::ReadOnlyImage:
2751 case clspv::ArgKind::WriteOnlyImage:
2752 // The call maps to a load we generate later.
2753 ResourceVarDeferredLoadCalls[call] = info->var_id;
2754 break;
2755 default:
2756 llvm_unreachable("Unhandled arg kind");
2757 }
2758 }
David Neto22f144c2017-06-12 14:26:21 -04002759 }
David Neto862b7d82018-06-14 18:48:37 -04002760 }
2761 }
David Neto22f144c2017-06-12 14:26:21 -04002762
David Neto862b7d82018-06-14 18:48:37 -04002763 // Generate associated decorations.
David Neto22f144c2017-06-12 14:26:21 -04002764
David Neto862b7d82018-06-14 18:48:37 -04002765 // Find Insert Point for OpDecorate.
2766 auto DecoInsertPoint =
2767 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
2768 [](SPIRVInstruction *Inst) -> bool {
2769 return Inst->getOpcode() != spv::OpDecorate &&
2770 Inst->getOpcode() != spv::OpMemberDecorate &&
2771 Inst->getOpcode() != spv::OpExtInstImport;
2772 });
2773
2774 SPIRVOperandList Ops;
2775 for (auto *info : ModuleOrderedResourceVars) {
2776 // Decorate with DescriptorSet and Binding.
2777 Ops.clear();
2778 Ops << MkId(info->var_id) << MkNum(spv::DecorationDescriptorSet)
2779 << MkNum(info->descriptor_set);
2780 SPIRVInstList.insert(DecoInsertPoint,
2781 new SPIRVInstruction(spv::OpDecorate, Ops));
2782
2783 Ops.clear();
2784 Ops << MkId(info->var_id) << MkNum(spv::DecorationBinding)
2785 << MkNum(info->binding);
2786 SPIRVInstList.insert(DecoInsertPoint,
2787 new SPIRVInstruction(spv::OpDecorate, Ops));
2788
alan-bakere9308012019-03-15 10:25:13 -04002789 if (info->coherent) {
2790 // Decorate with Coherent if required for the variable.
2791 Ops.clear();
2792 Ops << MkId(info->var_id) << MkNum(spv::DecorationCoherent);
2793 SPIRVInstList.insert(DecoInsertPoint,
2794 new SPIRVInstruction(spv::OpDecorate, Ops));
2795 }
2796
David Neto862b7d82018-06-14 18:48:37 -04002797 // Generate NonWritable and NonReadable
2798 switch (info->arg_kind) {
2799 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04002800 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04002801 if (info->var_fn->getReturnType()->getPointerAddressSpace() ==
2802 clspv::AddressSpace::Constant) {
2803 Ops.clear();
2804 Ops << MkId(info->var_id) << MkNum(spv::DecorationNonWritable);
2805 SPIRVInstList.insert(DecoInsertPoint,
2806 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002807 }
David Neto862b7d82018-06-14 18:48:37 -04002808 break;
David Neto862b7d82018-06-14 18:48:37 -04002809 case clspv::ArgKind::WriteOnlyImage:
2810 Ops.clear();
2811 Ops << MkId(info->var_id) << MkNum(spv::DecorationNonReadable);
2812 SPIRVInstList.insert(DecoInsertPoint,
2813 new SPIRVInstruction(spv::OpDecorate, Ops));
2814 break;
2815 default:
2816 break;
David Neto22f144c2017-06-12 14:26:21 -04002817 }
2818 }
2819}
2820
2821void SPIRVProducerPass::GenerateGlobalVar(GlobalVariable &GV) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05002822 Module &M = *GV.getParent();
David Neto22f144c2017-06-12 14:26:21 -04002823 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
2824 ValueMapType &VMap = getValueMap();
2825 std::vector<uint32_t> &BuiltinDimVec = getBuiltinDimVec();
David Neto85082642018-03-24 06:55:20 -07002826 const DataLayout &DL = GV.getParent()->getDataLayout();
David Neto22f144c2017-06-12 14:26:21 -04002827
2828 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
2829 Type *Ty = GV.getType();
2830 PointerType *PTy = cast<PointerType>(Ty);
2831
2832 uint32_t InitializerID = 0;
2833
2834 // Workgroup size is handled differently (it goes into a constant)
2835 if (spv::BuiltInWorkgroupSize == BuiltinType) {
2836 std::vector<bool> HasMDVec;
2837 uint32_t PrevXDimCst = 0xFFFFFFFF;
2838 uint32_t PrevYDimCst = 0xFFFFFFFF;
2839 uint32_t PrevZDimCst = 0xFFFFFFFF;
2840 for (Function &Func : *GV.getParent()) {
2841 if (Func.isDeclaration()) {
2842 continue;
2843 }
2844
2845 // We only need to check kernels.
2846 if (Func.getCallingConv() != CallingConv::SPIR_KERNEL) {
2847 continue;
2848 }
2849
2850 if (const MDNode *MD =
2851 dyn_cast<Function>(&Func)->getMetadata("reqd_work_group_size")) {
2852 uint32_t CurXDimCst = static_cast<uint32_t>(
2853 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
2854 uint32_t CurYDimCst = static_cast<uint32_t>(
2855 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
2856 uint32_t CurZDimCst = static_cast<uint32_t>(
2857 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
2858
2859 if (PrevXDimCst == 0xFFFFFFFF && PrevYDimCst == 0xFFFFFFFF &&
2860 PrevZDimCst == 0xFFFFFFFF) {
2861 PrevXDimCst = CurXDimCst;
2862 PrevYDimCst = CurYDimCst;
2863 PrevZDimCst = CurZDimCst;
2864 } else if (CurXDimCst != PrevXDimCst || CurYDimCst != PrevYDimCst ||
2865 CurZDimCst != PrevZDimCst) {
2866 llvm_unreachable(
2867 "reqd_work_group_size must be the same across all kernels");
2868 } else {
2869 continue;
2870 }
2871
2872 //
2873 // Generate OpConstantComposite.
2874 //
2875 // Ops[0] : Result Type ID
2876 // Ops[1] : Constant size for x dimension.
2877 // Ops[2] : Constant size for y dimension.
2878 // Ops[3] : Constant size for z dimension.
2879 SPIRVOperandList Ops;
2880
2881 uint32_t XDimCstID =
2882 VMap[mdconst::extract<ConstantInt>(MD->getOperand(0))];
2883 uint32_t YDimCstID =
2884 VMap[mdconst::extract<ConstantInt>(MD->getOperand(1))];
2885 uint32_t ZDimCstID =
2886 VMap[mdconst::extract<ConstantInt>(MD->getOperand(2))];
2887
2888 InitializerID = nextID;
2889
David Neto257c3892018-04-11 13:19:45 -04002890 Ops << MkId(lookupType(Ty->getPointerElementType())) << MkId(XDimCstID)
2891 << MkId(YDimCstID) << MkId(ZDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04002892
David Neto87846742018-04-11 17:36:22 -04002893 auto *Inst =
2894 new SPIRVInstruction(spv::OpConstantComposite, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002895 SPIRVInstList.push_back(Inst);
2896
2897 HasMDVec.push_back(true);
2898 } else {
2899 HasMDVec.push_back(false);
2900 }
2901 }
2902
2903 // Check all kernels have same definitions for work_group_size.
2904 bool HasMD = false;
2905 if (!HasMDVec.empty()) {
2906 HasMD = HasMDVec[0];
2907 for (uint32_t i = 1; i < HasMDVec.size(); i++) {
2908 if (HasMD != HasMDVec[i]) {
2909 llvm_unreachable(
2910 "Kernels should have consistent work group size definition");
2911 }
2912 }
2913 }
2914
2915 // If all kernels do not have metadata for reqd_work_group_size, generate
2916 // OpSpecConstants for x/y/z dimension.
2917 if (!HasMD) {
2918 //
2919 // Generate OpSpecConstants for x/y/z dimension.
2920 //
2921 // Ops[0] : Result Type ID
2922 // Ops[1] : Constant size for x/y/z dimension (Literal Number).
2923 uint32_t XDimCstID = 0;
2924 uint32_t YDimCstID = 0;
2925 uint32_t ZDimCstID = 0;
2926
David Neto22f144c2017-06-12 14:26:21 -04002927 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04002928 uint32_t result_type_id =
2929 lookupType(Ty->getPointerElementType()->getSequentialElementType());
David Neto22f144c2017-06-12 14:26:21 -04002930
David Neto257c3892018-04-11 13:19:45 -04002931 // X Dimension
2932 Ops << MkId(result_type_id) << MkNum(1);
2933 XDimCstID = nextID++;
2934 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002935 new SPIRVInstruction(spv::OpSpecConstant, XDimCstID, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002936
2937 // Y Dimension
2938 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04002939 Ops << MkId(result_type_id) << MkNum(1);
2940 YDimCstID = nextID++;
2941 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002942 new SPIRVInstruction(spv::OpSpecConstant, YDimCstID, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002943
2944 // Z Dimension
2945 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04002946 Ops << MkId(result_type_id) << MkNum(1);
2947 ZDimCstID = nextID++;
2948 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002949 new SPIRVInstruction(spv::OpSpecConstant, ZDimCstID, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002950
David Neto257c3892018-04-11 13:19:45 -04002951 BuiltinDimVec.push_back(XDimCstID);
2952 BuiltinDimVec.push_back(YDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04002953 BuiltinDimVec.push_back(ZDimCstID);
2954
David Neto22f144c2017-06-12 14:26:21 -04002955 //
2956 // Generate OpSpecConstantComposite.
2957 //
2958 // Ops[0] : Result Type ID
2959 // Ops[1] : Constant size for x dimension.
2960 // Ops[2] : Constant size for y dimension.
2961 // Ops[3] : Constant size for z dimension.
2962 InitializerID = nextID;
2963
2964 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04002965 Ops << MkId(lookupType(Ty->getPointerElementType())) << MkId(XDimCstID)
2966 << MkId(YDimCstID) << MkId(ZDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04002967
David Neto87846742018-04-11 17:36:22 -04002968 auto *Inst =
2969 new SPIRVInstruction(spv::OpSpecConstantComposite, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002970 SPIRVInstList.push_back(Inst);
2971 }
2972 }
2973
David Neto22f144c2017-06-12 14:26:21 -04002974 VMap[&GV] = nextID;
2975
2976 //
2977 // Generate OpVariable.
2978 //
2979 // GIDOps[0] : Result Type ID
2980 // GIDOps[1] : Storage Class
2981 SPIRVOperandList Ops;
2982
David Neto85082642018-03-24 06:55:20 -07002983 const auto AS = PTy->getAddressSpace();
David Netoc6f3ab22018-04-06 18:02:31 -04002984 Ops << MkId(lookupType(Ty)) << MkNum(GetStorageClass(AS));
David Neto22f144c2017-06-12 14:26:21 -04002985
David Neto85082642018-03-24 06:55:20 -07002986 const bool module_scope_constant_external_init =
David Neto862b7d82018-06-14 18:48:37 -04002987 (AS == AddressSpace::Constant) && GV.hasInitializer() &&
David Neto85082642018-03-24 06:55:20 -07002988 clspv::Option::ModuleConstantsInStorageBuffer();
2989
Kévin Petit23d5f182019-08-13 16:21:29 +01002990 if (GV.hasInitializer()) {
2991 auto GVInit = GV.getInitializer();
2992 if (!isa<UndefValue>(GVInit) && !module_scope_constant_external_init) {
2993 assert(VMap.count(GVInit) == 1);
2994 InitializerID = VMap[GVInit];
David Neto85082642018-03-24 06:55:20 -07002995 }
2996 }
Kévin Petit23d5f182019-08-13 16:21:29 +01002997
2998 if (0 != InitializerID) {
2999 // Emit the ID of the intiializer as part of the variable definition.
3000 Ops << MkId(InitializerID);
3001 }
David Neto85082642018-03-24 06:55:20 -07003002 const uint32_t var_id = nextID++;
3003
David Neto87846742018-04-11 17:36:22 -04003004 auto *Inst = new SPIRVInstruction(spv::OpVariable, var_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003005 SPIRVInstList.push_back(Inst);
3006
3007 // If we have a builtin.
3008 if (spv::BuiltInMax != BuiltinType) {
3009 // Find Insert Point for OpDecorate.
3010 auto DecoInsertPoint =
3011 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
3012 [](SPIRVInstruction *Inst) -> bool {
3013 return Inst->getOpcode() != spv::OpDecorate &&
3014 Inst->getOpcode() != spv::OpMemberDecorate &&
3015 Inst->getOpcode() != spv::OpExtInstImport;
3016 });
3017 //
3018 // Generate OpDecorate.
3019 //
3020 // DOps[0] = Target ID
3021 // DOps[1] = Decoration (Builtin)
3022 // DOps[2] = BuiltIn ID
3023 uint32_t ResultID;
3024
3025 // WorkgroupSize is different, we decorate the constant composite that has
3026 // its value, rather than the variable that we use to access the value.
3027 if (spv::BuiltInWorkgroupSize == BuiltinType) {
3028 ResultID = InitializerID;
David Netoa60b00b2017-09-15 16:34:09 -04003029 // Save both the value and variable IDs for later.
3030 WorkgroupSizeValueID = InitializerID;
3031 WorkgroupSizeVarID = VMap[&GV];
David Neto22f144c2017-06-12 14:26:21 -04003032 } else {
3033 ResultID = VMap[&GV];
3034 }
3035
3036 SPIRVOperandList DOps;
David Neto257c3892018-04-11 13:19:45 -04003037 DOps << MkId(ResultID) << MkNum(spv::DecorationBuiltIn)
3038 << MkNum(BuiltinType);
David Neto22f144c2017-06-12 14:26:21 -04003039
David Neto87846742018-04-11 17:36:22 -04003040 auto *DescDecoInst = new SPIRVInstruction(spv::OpDecorate, DOps);
David Neto22f144c2017-06-12 14:26:21 -04003041 SPIRVInstList.insert(DecoInsertPoint, DescDecoInst);
David Neto85082642018-03-24 06:55:20 -07003042 } else if (module_scope_constant_external_init) {
3043 // This module scope constant is initialized from a storage buffer with data
3044 // provided by the host at binding 0 of the next descriptor set.
David Neto78383442018-06-15 20:31:56 -04003045 const uint32_t descriptor_set = TakeDescriptorIndex(&M);
David Neto85082642018-03-24 06:55:20 -07003046
David Neto862b7d82018-06-14 18:48:37 -04003047 // Emit the intializer to the descriptor map file.
David Neto85082642018-03-24 06:55:20 -07003048 // Use "kind,buffer" to indicate storage buffer. We might want to expand
3049 // that later to other types, like uniform buffer.
alan-bakerf5e5f692018-11-27 08:33:24 -05003050 std::string hexbytes;
3051 llvm::raw_string_ostream str(hexbytes);
3052 clspv::ConstantEmitter(DL, str).Emit(GV.getInitializer());
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003053 version0::DescriptorMapEntry::ConstantData constant_data = {ArgKind::Buffer,
3054 str.str()};
3055 descriptorMapEntries->emplace_back(std::move(constant_data), descriptor_set,
3056 0);
David Neto85082642018-03-24 06:55:20 -07003057
3058 // Find Insert Point for OpDecorate.
3059 auto DecoInsertPoint =
3060 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
3061 [](SPIRVInstruction *Inst) -> bool {
3062 return Inst->getOpcode() != spv::OpDecorate &&
3063 Inst->getOpcode() != spv::OpMemberDecorate &&
3064 Inst->getOpcode() != spv::OpExtInstImport;
3065 });
3066
David Neto257c3892018-04-11 13:19:45 -04003067 // OpDecorate %var Binding <binding>
David Neto85082642018-03-24 06:55:20 -07003068 SPIRVOperandList DOps;
David Neto257c3892018-04-11 13:19:45 -04003069 DOps << MkId(var_id) << MkNum(spv::DecorationBinding) << MkNum(0);
3070 DecoInsertPoint = SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04003071 DecoInsertPoint, new SPIRVInstruction(spv::OpDecorate, DOps));
David Neto85082642018-03-24 06:55:20 -07003072
3073 // OpDecorate %var DescriptorSet <descriptor_set>
3074 DOps.clear();
David Neto257c3892018-04-11 13:19:45 -04003075 DOps << MkId(var_id) << MkNum(spv::DecorationDescriptorSet)
3076 << MkNum(descriptor_set);
David Netoc6f3ab22018-04-06 18:02:31 -04003077 SPIRVInstList.insert(DecoInsertPoint,
David Neto87846742018-04-11 17:36:22 -04003078 new SPIRVInstruction(spv::OpDecorate, DOps));
David Neto22f144c2017-06-12 14:26:21 -04003079 }
3080}
3081
David Netoc6f3ab22018-04-06 18:02:31 -04003082void SPIRVProducerPass::GenerateWorkgroupVars() {
3083 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
Alan Baker202c8c72018-08-13 13:47:44 -04003084 for (auto spec_id = clspv::FirstLocalSpecId(); spec_id < max_local_spec_id_;
3085 ++spec_id) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05003086 LocalArgInfo &info = LocalSpecIdInfoMap[spec_id];
David Netoc6f3ab22018-04-06 18:02:31 -04003087
3088 // Generate OpVariable.
3089 //
3090 // GIDOps[0] : Result Type ID
3091 // GIDOps[1] : Storage Class
3092 SPIRVOperandList Ops;
3093 Ops << MkId(info.ptr_array_type_id) << MkNum(spv::StorageClassWorkgroup);
3094
3095 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04003096 new SPIRVInstruction(spv::OpVariable, info.variable_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04003097 }
3098}
3099
David Neto862b7d82018-06-14 18:48:37 -04003100void SPIRVProducerPass::GenerateDescriptorMapInfo(const DataLayout &DL,
3101 Function &F) {
David Netoc5fb5242018-07-30 13:28:31 -04003102 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
3103 return;
3104 }
David Neto862b7d82018-06-14 18:48:37 -04003105 // Gather the list of resources that are used by this function's arguments.
3106 auto &resource_var_at_index = FunctionToResourceVarsMap[&F];
3107
alan-bakerf5e5f692018-11-27 08:33:24 -05003108 // TODO(alan-baker): This should become unnecessary by fixing the rest of the
3109 // flow to generate pod_ubo arguments earlier.
David Neto862b7d82018-06-14 18:48:37 -04003110 auto remap_arg_kind = [](StringRef argKind) {
alan-bakerf5e5f692018-11-27 08:33:24 -05003111 std::string kind =
3112 clspv::Option::PodArgsInUniformBuffer() && argKind.equals("pod")
3113 ? "pod_ubo"
alan-baker21574d32020-01-29 16:00:31 -05003114 : argKind.str();
alan-bakerf5e5f692018-11-27 08:33:24 -05003115 return GetArgKindFromName(kind);
David Neto862b7d82018-06-14 18:48:37 -04003116 };
3117
3118 auto *fty = F.getType()->getPointerElementType();
3119 auto *func_ty = dyn_cast<FunctionType>(fty);
3120
alan-baker038e9242019-04-19 22:14:41 -04003121 // If we've clustered POD arguments, then argument details are in metadata.
David Neto862b7d82018-06-14 18:48:37 -04003122 // If an argument maps to a resource variable, then get descriptor set and
3123 // binding from the resoure variable. Other info comes from the metadata.
3124 const auto *arg_map = F.getMetadata("kernel_arg_map");
3125 if (arg_map) {
3126 for (const auto &arg : arg_map->operands()) {
3127 const MDNode *arg_node = dyn_cast<MDNode>(arg.get());
Kévin PETITa353c832018-03-20 23:21:21 +00003128 assert(arg_node->getNumOperands() == 7);
David Neto862b7d82018-06-14 18:48:37 -04003129 const auto name =
3130 dyn_cast<MDString>(arg_node->getOperand(0))->getString();
3131 const auto old_index =
3132 dyn_extract<ConstantInt>(arg_node->getOperand(1))->getZExtValue();
3133 // Remapped argument index
alan-bakerb6b09dc2018-11-08 16:59:28 -05003134 const size_t new_index = static_cast<size_t>(
3135 dyn_extract<ConstantInt>(arg_node->getOperand(2))->getZExtValue());
David Neto862b7d82018-06-14 18:48:37 -04003136 const auto offset =
3137 dyn_extract<ConstantInt>(arg_node->getOperand(3))->getZExtValue();
Kévin PETITa353c832018-03-20 23:21:21 +00003138 const auto arg_size =
3139 dyn_extract<ConstantInt>(arg_node->getOperand(4))->getZExtValue();
David Neto862b7d82018-06-14 18:48:37 -04003140 const auto argKind = remap_arg_kind(
Kévin PETITa353c832018-03-20 23:21:21 +00003141 dyn_cast<MDString>(arg_node->getOperand(5))->getString());
David Neto862b7d82018-06-14 18:48:37 -04003142 const auto spec_id =
Kévin PETITa353c832018-03-20 23:21:21 +00003143 dyn_extract<ConstantInt>(arg_node->getOperand(6))->getSExtValue();
alan-bakerf5e5f692018-11-27 08:33:24 -05003144
3145 uint32_t descriptor_set = 0;
3146 uint32_t binding = 0;
3147 version0::DescriptorMapEntry::KernelArgData kernel_data = {
alan-baker21574d32020-01-29 16:00:31 -05003148 F.getName().str(), name.str(), static_cast<uint32_t>(old_index),
3149 argKind, static_cast<uint32_t>(spec_id),
alan-bakerf5e5f692018-11-27 08:33:24 -05003150 // This will be set below for pointer-to-local args.
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003151 0, static_cast<uint32_t>(offset), static_cast<uint32_t>(arg_size)};
David Neto862b7d82018-06-14 18:48:37 -04003152 if (spec_id > 0) {
alan-bakerf5e5f692018-11-27 08:33:24 -05003153 kernel_data.local_element_size = static_cast<uint32_t>(GetTypeAllocSize(
3154 func_ty->getParamType(unsigned(new_index))->getPointerElementType(),
3155 DL));
David Neto862b7d82018-06-14 18:48:37 -04003156 } else {
3157 auto *info = resource_var_at_index[new_index];
3158 assert(info);
alan-bakerf5e5f692018-11-27 08:33:24 -05003159 descriptor_set = info->descriptor_set;
3160 binding = info->binding;
David Neto862b7d82018-06-14 18:48:37 -04003161 }
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003162 descriptorMapEntries->emplace_back(std::move(kernel_data), descriptor_set,
3163 binding);
David Neto862b7d82018-06-14 18:48:37 -04003164 }
3165 } else {
3166 // There is no argument map.
3167 // Take descriptor info from the resource variable calls.
Kévin PETITa353c832018-03-20 23:21:21 +00003168 // Take argument name and size from the arguments list.
David Neto862b7d82018-06-14 18:48:37 -04003169
3170 SmallVector<Argument *, 4> arguments;
3171 for (auto &arg : F.args()) {
3172 arguments.push_back(&arg);
3173 }
3174
3175 unsigned arg_index = 0;
3176 for (auto *info : resource_var_at_index) {
3177 if (info) {
Kévin PETITa353c832018-03-20 23:21:21 +00003178 auto arg = arguments[arg_index];
alan-bakerb6b09dc2018-11-08 16:59:28 -05003179 unsigned arg_size = 0;
Kévin PETITa353c832018-03-20 23:21:21 +00003180 if (info->arg_kind == clspv::ArgKind::Pod) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05003181 arg_size = static_cast<uint32_t>(DL.getTypeStoreSize(arg->getType()));
Kévin PETITa353c832018-03-20 23:21:21 +00003182 }
3183
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003184 // Local pointer arguments are unused in this case. Offset is always
3185 // zero.
alan-bakerf5e5f692018-11-27 08:33:24 -05003186 version0::DescriptorMapEntry::KernelArgData kernel_data = {
alan-baker21574d32020-01-29 16:00:31 -05003187 F.getName().str(),
3188 arg->getName().str(),
3189 arg_index,
3190 remap_arg_kind(clspv::GetArgKindName(info->arg_kind)),
3191 0,
3192 0,
3193 0,
3194 arg_size};
alan-bakerf5e5f692018-11-27 08:33:24 -05003195 descriptorMapEntries->emplace_back(std::move(kernel_data),
3196 info->descriptor_set, info->binding);
David Neto862b7d82018-06-14 18:48:37 -04003197 }
3198 arg_index++;
3199 }
3200 // Generate mappings for pointer-to-local arguments.
3201 for (arg_index = 0; arg_index < arguments.size(); ++arg_index) {
3202 Argument *arg = arguments[arg_index];
Alan Baker202c8c72018-08-13 13:47:44 -04003203 auto where = LocalArgSpecIds.find(arg);
3204 if (where != LocalArgSpecIds.end()) {
3205 auto &local_arg_info = LocalSpecIdInfoMap[where->second];
alan-bakerf5e5f692018-11-27 08:33:24 -05003206 // Pod arguments members are unused in this case.
3207 version0::DescriptorMapEntry::KernelArgData kernel_data = {
alan-baker21574d32020-01-29 16:00:31 -05003208 F.getName().str(),
3209 arg->getName().str(),
alan-bakerf5e5f692018-11-27 08:33:24 -05003210 arg_index,
3211 ArgKind::Local,
3212 static_cast<uint32_t>(local_arg_info.spec_id),
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003213 static_cast<uint32_t>(
3214 GetTypeAllocSize(local_arg_info.elem_type, DL)),
alan-bakerf5e5f692018-11-27 08:33:24 -05003215 0,
3216 0};
3217 // Pointer-to-local arguments do not utilize descriptor set and binding.
3218 descriptorMapEntries->emplace_back(std::move(kernel_data), 0, 0);
David Neto862b7d82018-06-14 18:48:37 -04003219 }
3220 }
3221 }
3222}
3223
David Neto22f144c2017-06-12 14:26:21 -04003224void SPIRVProducerPass::GenerateFuncPrologue(Function &F) {
3225 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3226 ValueMapType &VMap = getValueMap();
3227 EntryPointVecType &EntryPoints = getEntryPointVec();
David Neto22f144c2017-06-12 14:26:21 -04003228 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
3229 auto &GlobalConstArgSet = getGlobalConstArgSet();
3230
3231 FunctionType *FTy = F.getFunctionType();
3232
3233 //
David Neto22f144c2017-06-12 14:26:21 -04003234 // Generate OPFunction.
3235 //
3236
3237 // FOps[0] : Result Type ID
3238 // FOps[1] : Function Control
3239 // FOps[2] : Function Type ID
3240 SPIRVOperandList FOps;
3241
3242 // Find SPIRV instruction for return type.
David Neto257c3892018-04-11 13:19:45 -04003243 FOps << MkId(lookupType(FTy->getReturnType()));
David Neto22f144c2017-06-12 14:26:21 -04003244
3245 // Check function attributes for SPIRV Function Control.
3246 uint32_t FuncControl = spv::FunctionControlMaskNone;
3247 if (F.hasFnAttribute(Attribute::AlwaysInline)) {
3248 FuncControl |= spv::FunctionControlInlineMask;
3249 }
3250 if (F.hasFnAttribute(Attribute::NoInline)) {
3251 FuncControl |= spv::FunctionControlDontInlineMask;
3252 }
3253 // TODO: Check llvm attribute for Function Control Pure.
3254 if (F.hasFnAttribute(Attribute::ReadOnly)) {
3255 FuncControl |= spv::FunctionControlPureMask;
3256 }
3257 // TODO: Check llvm attribute for Function Control Const.
3258 if (F.hasFnAttribute(Attribute::ReadNone)) {
3259 FuncControl |= spv::FunctionControlConstMask;
3260 }
3261
David Neto257c3892018-04-11 13:19:45 -04003262 FOps << MkNum(FuncControl);
David Neto22f144c2017-06-12 14:26:21 -04003263
3264 uint32_t FTyID;
3265 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
3266 SmallVector<Type *, 4> NewFuncParamTys;
3267 FunctionType *NewFTy =
3268 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
3269 FTyID = lookupType(NewFTy);
3270 } else {
David Neto9ed8e2f2018-03-24 06:47:24 -07003271 // Handle regular function with global constant parameters.
David Neto22f144c2017-06-12 14:26:21 -04003272 if (GlobalConstFuncTyMap.count(FTy)) {
3273 FTyID = lookupType(GlobalConstFuncTyMap[FTy].first);
3274 } else {
3275 FTyID = lookupType(FTy);
3276 }
3277 }
3278
David Neto257c3892018-04-11 13:19:45 -04003279 FOps << MkId(FTyID);
David Neto22f144c2017-06-12 14:26:21 -04003280
3281 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
3282 EntryPoints.push_back(std::make_pair(&F, nextID));
3283 }
3284
3285 VMap[&F] = nextID;
3286
David Neto482550a2018-03-24 05:21:07 -07003287 if (clspv::Option::ShowIDs()) {
David Netob05675d2018-02-16 12:37:49 -05003288 errs() << "Function " << F.getName() << " is " << nextID << "\n";
3289 }
David Neto22f144c2017-06-12 14:26:21 -04003290 // Generate SPIRV instruction for function.
David Neto87846742018-04-11 17:36:22 -04003291 auto *FuncInst = new SPIRVInstruction(spv::OpFunction, nextID++, FOps);
David Neto22f144c2017-06-12 14:26:21 -04003292 SPIRVInstList.push_back(FuncInst);
3293
3294 //
3295 // Generate OpFunctionParameter for Normal function.
3296 //
3297
3298 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
alan-bakere9308012019-03-15 10:25:13 -04003299
3300 // Find Insert Point for OpDecorate.
3301 auto DecoInsertPoint =
3302 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
3303 [](SPIRVInstruction *Inst) -> bool {
3304 return Inst->getOpcode() != spv::OpDecorate &&
3305 Inst->getOpcode() != spv::OpMemberDecorate &&
3306 Inst->getOpcode() != spv::OpExtInstImport;
3307 });
3308
David Neto22f144c2017-06-12 14:26:21 -04003309 // Iterate Argument for name instead of param type from function type.
3310 unsigned ArgIdx = 0;
3311 for (Argument &Arg : F.args()) {
alan-bakere9308012019-03-15 10:25:13 -04003312 uint32_t param_id = nextID++;
3313 VMap[&Arg] = param_id;
3314
3315 if (CalledWithCoherentResource(Arg)) {
3316 // If the arg is passed a coherent resource ever, then decorate this
3317 // parameter with Coherent too.
3318 SPIRVOperandList decoration_ops;
3319 decoration_ops << MkId(param_id) << MkNum(spv::DecorationCoherent);
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003320 SPIRVInstList.insert(
3321 DecoInsertPoint,
3322 new SPIRVInstruction(spv::OpDecorate, decoration_ops));
alan-bakere9308012019-03-15 10:25:13 -04003323 }
David Neto22f144c2017-06-12 14:26:21 -04003324
3325 // ParamOps[0] : Result Type ID
3326 SPIRVOperandList ParamOps;
3327
3328 // Find SPIRV instruction for parameter type.
3329 uint32_t ParamTyID = lookupType(Arg.getType());
3330 if (PointerType *PTy = dyn_cast<PointerType>(Arg.getType())) {
3331 if (GlobalConstFuncTyMap.count(FTy)) {
3332 if (ArgIdx == GlobalConstFuncTyMap[FTy].second) {
3333 Type *EleTy = PTy->getPointerElementType();
3334 Type *ArgTy =
3335 PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
3336 ParamTyID = lookupType(ArgTy);
3337 GlobalConstArgSet.insert(&Arg);
3338 }
3339 }
3340 }
David Neto257c3892018-04-11 13:19:45 -04003341 ParamOps << MkId(ParamTyID);
David Neto22f144c2017-06-12 14:26:21 -04003342
3343 // Generate SPIRV instruction for parameter.
David Neto87846742018-04-11 17:36:22 -04003344 auto *ParamInst =
alan-bakere9308012019-03-15 10:25:13 -04003345 new SPIRVInstruction(spv::OpFunctionParameter, param_id, ParamOps);
David Neto22f144c2017-06-12 14:26:21 -04003346 SPIRVInstList.push_back(ParamInst);
3347
3348 ArgIdx++;
3349 }
3350 }
3351}
3352
alan-bakerb6b09dc2018-11-08 16:59:28 -05003353void SPIRVProducerPass::GenerateModuleInfo(Module &module) {
David Neto22f144c2017-06-12 14:26:21 -04003354 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3355 EntryPointVecType &EntryPoints = getEntryPointVec();
3356 ValueMapType &VMap = getValueMap();
3357 ValueList &EntryPointInterfaces = getEntryPointInterfacesVec();
3358 uint32_t &ExtInstImportID = getOpExtInstImportID();
3359 std::vector<uint32_t> &BuiltinDimVec = getBuiltinDimVec();
3360
3361 // Set up insert point.
3362 auto InsertPoint = SPIRVInstList.begin();
3363
3364 //
3365 // Generate OpCapability
3366 //
3367 // TODO: Which llvm information is mapped to SPIRV Capapbility?
3368
3369 // Ops[0] = Capability
3370 SPIRVOperandList Ops;
3371
David Neto87846742018-04-11 17:36:22 -04003372 auto *CapInst =
David Netoef5ba2b2019-12-20 08:35:54 -05003373 new SPIRVInstruction(spv::OpCapability, MkNum(spv::CapabilityShader));
David Neto22f144c2017-06-12 14:26:21 -04003374 SPIRVInstList.insert(InsertPoint, CapInst);
3375
alan-bakerf906d2b2019-12-10 11:26:23 -05003376 bool write_without_format = false;
3377 bool sampled_1d = false;
3378 bool image_1d = false;
David Neto22f144c2017-06-12 14:26:21 -04003379 for (Type *Ty : getTypeList()) {
alan-bakerb39c8262019-03-08 14:03:37 -05003380 if (clspv::Option::Int8Support() && Ty->isIntegerTy(8)) {
3381 // Generate OpCapability for i8 type.
David Netoef5ba2b2019-12-20 08:35:54 -05003382 SPIRVInstList.insert(
3383 InsertPoint,
3384 new SPIRVInstruction(spv::OpCapability, MkNum(spv::CapabilityInt8)));
alan-bakerb39c8262019-03-08 14:03:37 -05003385 } else if (Ty->isIntegerTy(16)) {
David Neto22f144c2017-06-12 14:26:21 -04003386 // Generate OpCapability for i16 type.
David Netoef5ba2b2019-12-20 08:35:54 -05003387 SPIRVInstList.insert(
3388 InsertPoint,
3389 new SPIRVInstruction(spv::OpCapability, MkNum(spv::CapabilityInt16)));
David Neto22f144c2017-06-12 14:26:21 -04003390 } else if (Ty->isIntegerTy(64)) {
3391 // Generate OpCapability for i64 type.
David Netoef5ba2b2019-12-20 08:35:54 -05003392 SPIRVInstList.insert(
3393 InsertPoint,
3394 new SPIRVInstruction(spv::OpCapability, MkNum(spv::CapabilityInt64)));
David Neto22f144c2017-06-12 14:26:21 -04003395 } else if (Ty->isHalfTy()) {
3396 // Generate OpCapability for half type.
David Netoef5ba2b2019-12-20 08:35:54 -05003397 SPIRVInstList.insert(InsertPoint,
3398 new SPIRVInstruction(spv::OpCapability,
3399 MkNum(spv::CapabilityFloat16)));
David Neto22f144c2017-06-12 14:26:21 -04003400 } else if (Ty->isDoubleTy()) {
3401 // Generate OpCapability for double type.
David Netoef5ba2b2019-12-20 08:35:54 -05003402 SPIRVInstList.insert(InsertPoint,
3403 new SPIRVInstruction(spv::OpCapability,
3404 MkNum(spv::CapabilityFloat64)));
David Neto22f144c2017-06-12 14:26:21 -04003405 } else if (auto *STy = dyn_cast<StructType>(Ty)) {
3406 if (STy->isOpaque()) {
alan-bakerf906d2b2019-12-10 11:26:23 -05003407 if (STy->getName().startswith("opencl.image1d_wo_t") ||
3408 STy->getName().startswith("opencl.image2d_wo_t") ||
alan-bakerf67468c2019-11-25 15:51:49 -05003409 STy->getName().startswith("opencl.image3d_wo_t")) {
alan-bakerf906d2b2019-12-10 11:26:23 -05003410 write_without_format = true;
3411 }
3412 if (STy->getName().startswith("opencl.image1d_ro_t") ||
3413 STy->getName().startswith("opencl.image1d_wo_t")) {
3414 if (STy->getName().contains(".sampled"))
3415 sampled_1d = true;
3416 else
3417 image_1d = true;
David Neto22f144c2017-06-12 14:26:21 -04003418 }
3419 }
3420 }
3421 }
3422
alan-bakerf906d2b2019-12-10 11:26:23 -05003423 if (write_without_format) {
3424 // Generate OpCapability for write only image type.
3425 SPIRVInstList.insert(
3426 InsertPoint,
3427 new SPIRVInstruction(
3428 spv::OpCapability,
3429 {MkNum(spv::CapabilityStorageImageWriteWithoutFormat)}));
3430 }
3431 if (image_1d) {
3432 // Generate OpCapability for unsampled 1D image type.
3433 SPIRVInstList.insert(InsertPoint,
3434 new SPIRVInstruction(spv::OpCapability,
3435 {MkNum(spv::CapabilityImage1D)}));
3436 } else if (sampled_1d) {
3437 // Generate OpCapability for sampled 1D image type.
3438 SPIRVInstList.insert(
3439 InsertPoint, new SPIRVInstruction(spv::OpCapability,
3440 {MkNum(spv::CapabilitySampled1D)}));
3441 }
3442
David Neto5c22a252018-03-15 16:07:41 -04003443 { // OpCapability ImageQuery
3444 bool hasImageQuery = false;
alan-bakerf67468c2019-11-25 15:51:49 -05003445 for (const auto &SymVal : module.getValueSymbolTable()) {
3446 if (auto F = dyn_cast<Function>(SymVal.getValue())) {
alan-bakerce179f12019-12-06 19:02:22 -05003447 if (clspv::IsImageQuery(F)) {
alan-bakerf67468c2019-11-25 15:51:49 -05003448 hasImageQuery = true;
3449 break;
3450 }
David Neto5c22a252018-03-15 16:07:41 -04003451 }
3452 }
alan-bakerf67468c2019-11-25 15:51:49 -05003453
David Neto5c22a252018-03-15 16:07:41 -04003454 if (hasImageQuery) {
David Neto87846742018-04-11 17:36:22 -04003455 auto *ImageQueryCapInst = new SPIRVInstruction(
3456 spv::OpCapability, {MkNum(spv::CapabilityImageQuery)});
David Neto5c22a252018-03-15 16:07:41 -04003457 SPIRVInstList.insert(InsertPoint, ImageQueryCapInst);
3458 }
3459 }
3460
David Neto22f144c2017-06-12 14:26:21 -04003461 if (hasVariablePointers()) {
3462 //
David Neto22f144c2017-06-12 14:26:21 -04003463 // Generate OpCapability.
3464 //
3465 // Ops[0] = Capability
3466 //
3467 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003468 Ops << MkNum(spv::CapabilityVariablePointers);
David Neto22f144c2017-06-12 14:26:21 -04003469
David Neto87846742018-04-11 17:36:22 -04003470 SPIRVInstList.insert(InsertPoint,
3471 new SPIRVInstruction(spv::OpCapability, Ops));
alan-baker5b86ed72019-02-15 08:26:50 -05003472 } else if (hasVariablePointersStorageBuffer()) {
3473 //
3474 // Generate OpCapability.
3475 //
3476 // Ops[0] = Capability
3477 //
3478 Ops.clear();
3479 Ops << MkNum(spv::CapabilityVariablePointersStorageBuffer);
David Neto22f144c2017-06-12 14:26:21 -04003480
alan-baker5b86ed72019-02-15 08:26:50 -05003481 SPIRVInstList.insert(InsertPoint,
3482 new SPIRVInstruction(spv::OpCapability, Ops));
3483 }
3484
3485 // Always add the storage buffer extension
3486 {
David Neto22f144c2017-06-12 14:26:21 -04003487 //
3488 // Generate OpExtension.
3489 //
3490 // Ops[0] = Name (Literal String)
3491 //
alan-baker5b86ed72019-02-15 08:26:50 -05003492 auto *ExtensionInst = new SPIRVInstruction(
3493 spv::OpExtension, {MkString("SPV_KHR_storage_buffer_storage_class")});
3494 SPIRVInstList.insert(InsertPoint, ExtensionInst);
3495 }
David Neto22f144c2017-06-12 14:26:21 -04003496
alan-baker5b86ed72019-02-15 08:26:50 -05003497 if (hasVariablePointers() || hasVariablePointersStorageBuffer()) {
3498 //
3499 // Generate OpExtension.
3500 //
3501 // Ops[0] = Name (Literal String)
3502 //
3503 auto *ExtensionInst = new SPIRVInstruction(
3504 spv::OpExtension, {MkString("SPV_KHR_variable_pointers")});
3505 SPIRVInstList.insert(InsertPoint, ExtensionInst);
David Neto22f144c2017-06-12 14:26:21 -04003506 }
3507
3508 if (ExtInstImportID) {
3509 ++InsertPoint;
3510 }
3511
3512 //
3513 // Generate OpMemoryModel
3514 //
3515 // Memory model for Vulkan will always be GLSL450.
3516
3517 // Ops[0] = Addressing Model
3518 // Ops[1] = Memory Model
3519 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003520 Ops << MkNum(spv::AddressingModelLogical) << MkNum(spv::MemoryModelGLSL450);
David Neto22f144c2017-06-12 14:26:21 -04003521
David Neto87846742018-04-11 17:36:22 -04003522 auto *MemModelInst = new SPIRVInstruction(spv::OpMemoryModel, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003523 SPIRVInstList.insert(InsertPoint, MemModelInst);
3524
3525 //
3526 // Generate OpEntryPoint
3527 //
3528 for (auto EntryPoint : EntryPoints) {
3529 // Ops[0] = Execution Model
3530 // Ops[1] = EntryPoint ID
3531 // Ops[2] = Name (Literal String)
3532 // ...
3533 //
3534 // TODO: Do we need to consider Interface ID for forward references???
3535 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05003536 const StringRef &name = EntryPoint.first->getName();
David Neto257c3892018-04-11 13:19:45 -04003537 Ops << MkNum(spv::ExecutionModelGLCompute) << MkId(EntryPoint.second)
3538 << MkString(name);
David Neto22f144c2017-06-12 14:26:21 -04003539
David Neto22f144c2017-06-12 14:26:21 -04003540 for (Value *Interface : EntryPointInterfaces) {
David Neto257c3892018-04-11 13:19:45 -04003541 Ops << MkId(VMap[Interface]);
David Neto22f144c2017-06-12 14:26:21 -04003542 }
3543
David Neto87846742018-04-11 17:36:22 -04003544 auto *EntryPointInst = new SPIRVInstruction(spv::OpEntryPoint, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003545 SPIRVInstList.insert(InsertPoint, EntryPointInst);
3546 }
3547
3548 for (auto EntryPoint : EntryPoints) {
3549 if (const MDNode *MD = dyn_cast<Function>(EntryPoint.first)
3550 ->getMetadata("reqd_work_group_size")) {
3551
3552 if (!BuiltinDimVec.empty()) {
3553 llvm_unreachable(
3554 "Kernels should have consistent work group size definition");
3555 }
3556
3557 //
3558 // Generate OpExecutionMode
3559 //
3560
3561 // Ops[0] = Entry Point ID
3562 // Ops[1] = Execution Mode
3563 // Ops[2] ... Ops[n] = Optional literals according to Execution Mode
3564 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05003565 Ops << MkId(EntryPoint.second) << MkNum(spv::ExecutionModeLocalSize);
David Neto22f144c2017-06-12 14:26:21 -04003566
3567 uint32_t XDim = static_cast<uint32_t>(
3568 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
3569 uint32_t YDim = static_cast<uint32_t>(
3570 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
3571 uint32_t ZDim = static_cast<uint32_t>(
3572 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
3573
David Neto257c3892018-04-11 13:19:45 -04003574 Ops << MkNum(XDim) << MkNum(YDim) << MkNum(ZDim);
David Neto22f144c2017-06-12 14:26:21 -04003575
David Neto87846742018-04-11 17:36:22 -04003576 auto *ExecModeInst = new SPIRVInstruction(spv::OpExecutionMode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003577 SPIRVInstList.insert(InsertPoint, ExecModeInst);
3578 }
3579 }
3580
3581 //
3582 // Generate OpSource.
3583 //
3584 // Ops[0] = SourceLanguage ID
3585 // Ops[1] = Version (LiteralNum)
3586 //
3587 Ops.clear();
Kévin Petitf0515712020-01-07 18:29:20 +00003588 switch (clspv::Option::Language()) {
3589 case clspv::Option::SourceLanguage::OpenCL_C_10:
3590 Ops << MkNum(spv::SourceLanguageOpenCL_C) << MkNum(100);
3591 break;
3592 case clspv::Option::SourceLanguage::OpenCL_C_11:
3593 Ops << MkNum(spv::SourceLanguageOpenCL_C) << MkNum(110);
3594 break;
3595 case clspv::Option::SourceLanguage::OpenCL_C_12:
Kévin Petit0fc88042019-04-09 23:25:02 +01003596 Ops << MkNum(spv::SourceLanguageOpenCL_C) << MkNum(120);
Kévin Petitf0515712020-01-07 18:29:20 +00003597 break;
3598 case clspv::Option::SourceLanguage::OpenCL_C_20:
3599 Ops << MkNum(spv::SourceLanguageOpenCL_C) << MkNum(200);
3600 break;
3601 case clspv::Option::SourceLanguage::OpenCL_CPP:
3602 Ops << MkNum(spv::SourceLanguageOpenCL_CPP) << MkNum(100);
3603 break;
3604 default:
3605 Ops << MkNum(spv::SourceLanguageUnknown) << MkNum(0);
3606 break;
Kévin Petit0fc88042019-04-09 23:25:02 +01003607 }
David Neto22f144c2017-06-12 14:26:21 -04003608
David Neto87846742018-04-11 17:36:22 -04003609 auto *OpenSourceInst = new SPIRVInstruction(spv::OpSource, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003610 SPIRVInstList.insert(InsertPoint, OpenSourceInst);
3611
3612 if (!BuiltinDimVec.empty()) {
3613 //
3614 // Generate OpDecorates for x/y/z dimension.
3615 //
3616 // Ops[0] = Target ID
3617 // Ops[1] = Decoration (SpecId)
David Neto257c3892018-04-11 13:19:45 -04003618 // Ops[2] = Specialization Constant ID (Literal Number)
David Neto22f144c2017-06-12 14:26:21 -04003619
3620 // X Dimension
3621 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003622 Ops << MkId(BuiltinDimVec[0]) << MkNum(spv::DecorationSpecId) << MkNum(0);
David Neto87846742018-04-11 17:36:22 -04003623 SPIRVInstList.insert(InsertPoint,
3624 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003625
3626 // Y Dimension
3627 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003628 Ops << MkId(BuiltinDimVec[1]) << MkNum(spv::DecorationSpecId) << MkNum(1);
David Neto87846742018-04-11 17:36:22 -04003629 SPIRVInstList.insert(InsertPoint,
3630 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003631
3632 // Z Dimension
3633 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003634 Ops << MkId(BuiltinDimVec[2]) << MkNum(spv::DecorationSpecId) << MkNum(2);
David Neto87846742018-04-11 17:36:22 -04003635 SPIRVInstList.insert(InsertPoint,
3636 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003637 }
3638}
3639
David Netob6e2e062018-04-25 10:32:06 -04003640void SPIRVProducerPass::GenerateEntryPointInitialStores() {
3641 // Work around a driver bug. Initializers on Private variables might not
3642 // work. So the start of the kernel should store the initializer value to the
3643 // variables. Yes, *every* entry point pays this cost if *any* entry point
3644 // uses this builtin. At this point I judge this to be an acceptable tradeoff
3645 // of complexity vs. runtime, for a broken driver.
alan-bakerb6b09dc2018-11-08 16:59:28 -05003646 // TODO(dneto): Remove this at some point once fixed drivers are widely
3647 // available.
David Netob6e2e062018-04-25 10:32:06 -04003648 if (WorkgroupSizeVarID) {
3649 assert(WorkgroupSizeValueID);
3650
3651 SPIRVOperandList Ops;
3652 Ops << MkId(WorkgroupSizeVarID) << MkId(WorkgroupSizeValueID);
3653
3654 auto *Inst = new SPIRVInstruction(spv::OpStore, Ops);
3655 getSPIRVInstList().push_back(Inst);
3656 }
3657}
3658
David Neto22f144c2017-06-12 14:26:21 -04003659void SPIRVProducerPass::GenerateFuncBody(Function &F) {
3660 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3661 ValueMapType &VMap = getValueMap();
3662
David Netob6e2e062018-04-25 10:32:06 -04003663 const bool IsKernel = F.getCallingConv() == CallingConv::SPIR_KERNEL;
David Neto22f144c2017-06-12 14:26:21 -04003664
3665 for (BasicBlock &BB : F) {
3666 // Register BasicBlock to ValueMap.
3667 VMap[&BB] = nextID;
3668
3669 //
3670 // Generate OpLabel for Basic Block.
3671 //
3672 SPIRVOperandList Ops;
David Neto87846742018-04-11 17:36:22 -04003673 auto *Inst = new SPIRVInstruction(spv::OpLabel, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003674 SPIRVInstList.push_back(Inst);
3675
David Neto6dcd4712017-06-23 11:06:47 -04003676 // OpVariable instructions must come first.
3677 for (Instruction &I : BB) {
alan-baker5b86ed72019-02-15 08:26:50 -05003678 if (auto *alloca = dyn_cast<AllocaInst>(&I)) {
3679 // Allocating a pointer requires variable pointers.
3680 if (alloca->getAllocatedType()->isPointerTy()) {
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003681 setVariablePointersCapabilities(
3682 alloca->getAllocatedType()->getPointerAddressSpace());
alan-baker5b86ed72019-02-15 08:26:50 -05003683 }
David Neto6dcd4712017-06-23 11:06:47 -04003684 GenerateInstruction(I);
3685 }
3686 }
3687
David Neto22f144c2017-06-12 14:26:21 -04003688 if (&BB == &F.getEntryBlock() && IsKernel) {
David Netob6e2e062018-04-25 10:32:06 -04003689 if (clspv::Option::HackInitializers()) {
3690 GenerateEntryPointInitialStores();
3691 }
David Neto22f144c2017-06-12 14:26:21 -04003692 }
3693
3694 for (Instruction &I : BB) {
David Neto6dcd4712017-06-23 11:06:47 -04003695 if (!isa<AllocaInst>(I)) {
3696 GenerateInstruction(I);
3697 }
David Neto22f144c2017-06-12 14:26:21 -04003698 }
3699 }
3700}
3701
3702spv::Op SPIRVProducerPass::GetSPIRVCmpOpcode(CmpInst *I) {
3703 const std::map<CmpInst::Predicate, spv::Op> Map = {
3704 {CmpInst::ICMP_EQ, spv::OpIEqual},
3705 {CmpInst::ICMP_NE, spv::OpINotEqual},
3706 {CmpInst::ICMP_UGT, spv::OpUGreaterThan},
3707 {CmpInst::ICMP_UGE, spv::OpUGreaterThanEqual},
3708 {CmpInst::ICMP_ULT, spv::OpULessThan},
3709 {CmpInst::ICMP_ULE, spv::OpULessThanEqual},
3710 {CmpInst::ICMP_SGT, spv::OpSGreaterThan},
3711 {CmpInst::ICMP_SGE, spv::OpSGreaterThanEqual},
3712 {CmpInst::ICMP_SLT, spv::OpSLessThan},
3713 {CmpInst::ICMP_SLE, spv::OpSLessThanEqual},
3714 {CmpInst::FCMP_OEQ, spv::OpFOrdEqual},
3715 {CmpInst::FCMP_OGT, spv::OpFOrdGreaterThan},
3716 {CmpInst::FCMP_OGE, spv::OpFOrdGreaterThanEqual},
3717 {CmpInst::FCMP_OLT, spv::OpFOrdLessThan},
3718 {CmpInst::FCMP_OLE, spv::OpFOrdLessThanEqual},
3719 {CmpInst::FCMP_ONE, spv::OpFOrdNotEqual},
3720 {CmpInst::FCMP_UEQ, spv::OpFUnordEqual},
3721 {CmpInst::FCMP_UGT, spv::OpFUnordGreaterThan},
3722 {CmpInst::FCMP_UGE, spv::OpFUnordGreaterThanEqual},
3723 {CmpInst::FCMP_ULT, spv::OpFUnordLessThan},
3724 {CmpInst::FCMP_ULE, spv::OpFUnordLessThanEqual},
3725 {CmpInst::FCMP_UNE, spv::OpFUnordNotEqual}};
3726
3727 assert(0 != Map.count(I->getPredicate()));
3728
3729 return Map.at(I->getPredicate());
3730}
3731
3732spv::Op SPIRVProducerPass::GetSPIRVCastOpcode(Instruction &I) {
3733 const std::map<unsigned, spv::Op> Map{
3734 {Instruction::Trunc, spv::OpUConvert},
3735 {Instruction::ZExt, spv::OpUConvert},
3736 {Instruction::SExt, spv::OpSConvert},
3737 {Instruction::FPToUI, spv::OpConvertFToU},
3738 {Instruction::FPToSI, spv::OpConvertFToS},
3739 {Instruction::UIToFP, spv::OpConvertUToF},
3740 {Instruction::SIToFP, spv::OpConvertSToF},
3741 {Instruction::FPTrunc, spv::OpFConvert},
3742 {Instruction::FPExt, spv::OpFConvert},
3743 {Instruction::BitCast, spv::OpBitcast}};
3744
3745 assert(0 != Map.count(I.getOpcode()));
3746
3747 return Map.at(I.getOpcode());
3748}
3749
3750spv::Op SPIRVProducerPass::GetSPIRVBinaryOpcode(Instruction &I) {
Kévin Petit24272b62018-10-18 19:16:12 +00003751 if (I.getType()->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -04003752 switch (I.getOpcode()) {
3753 default:
3754 break;
3755 case Instruction::Or:
3756 return spv::OpLogicalOr;
3757 case Instruction::And:
3758 return spv::OpLogicalAnd;
3759 case Instruction::Xor:
3760 return spv::OpLogicalNotEqual;
3761 }
3762 }
3763
alan-bakerb6b09dc2018-11-08 16:59:28 -05003764 const std::map<unsigned, spv::Op> Map{
David Neto22f144c2017-06-12 14:26:21 -04003765 {Instruction::Add, spv::OpIAdd},
3766 {Instruction::FAdd, spv::OpFAdd},
3767 {Instruction::Sub, spv::OpISub},
3768 {Instruction::FSub, spv::OpFSub},
3769 {Instruction::Mul, spv::OpIMul},
3770 {Instruction::FMul, spv::OpFMul},
3771 {Instruction::UDiv, spv::OpUDiv},
3772 {Instruction::SDiv, spv::OpSDiv},
3773 {Instruction::FDiv, spv::OpFDiv},
3774 {Instruction::URem, spv::OpUMod},
3775 {Instruction::SRem, spv::OpSRem},
3776 {Instruction::FRem, spv::OpFRem},
3777 {Instruction::Or, spv::OpBitwiseOr},
3778 {Instruction::Xor, spv::OpBitwiseXor},
3779 {Instruction::And, spv::OpBitwiseAnd},
3780 {Instruction::Shl, spv::OpShiftLeftLogical},
3781 {Instruction::LShr, spv::OpShiftRightLogical},
3782 {Instruction::AShr, spv::OpShiftRightArithmetic}};
3783
3784 assert(0 != Map.count(I.getOpcode()));
3785
3786 return Map.at(I.getOpcode());
3787}
3788
3789void SPIRVProducerPass::GenerateInstruction(Instruction &I) {
3790 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3791 ValueMapType &VMap = getValueMap();
David Neto22f144c2017-06-12 14:26:21 -04003792 DeferredInstVecType &DeferredInsts = getDeferredInstVec();
3793 LLVMContext &Context = I.getParent()->getParent()->getParent()->getContext();
3794
3795 // Register Instruction to ValueMap.
3796 if (0 == VMap[&I]) {
3797 VMap[&I] = nextID;
3798 }
3799
3800 switch (I.getOpcode()) {
3801 default: {
3802 if (Instruction::isCast(I.getOpcode())) {
3803 //
3804 // Generate SPIRV instructions for cast operators.
3805 //
3806
David Netod2de94a2017-08-28 17:27:47 -04003807 auto Ty = I.getType();
David Neto22f144c2017-06-12 14:26:21 -04003808 auto OpTy = I.getOperand(0)->getType();
David Netod2de94a2017-08-28 17:27:47 -04003809 auto toI8 = Ty == Type::getInt8Ty(Context);
3810 auto fromI32 = OpTy == Type::getInt32Ty(Context);
David Neto22f144c2017-06-12 14:26:21 -04003811 // Handle zext, sext and uitofp with i1 type specially.
3812 if ((I.getOpcode() == Instruction::ZExt ||
3813 I.getOpcode() == Instruction::SExt ||
3814 I.getOpcode() == Instruction::UIToFP) &&
alan-bakerb6b09dc2018-11-08 16:59:28 -05003815 OpTy->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -04003816 //
3817 // Generate OpSelect.
3818 //
3819
3820 // Ops[0] = Result Type ID
3821 // Ops[1] = Condition ID
3822 // Ops[2] = True Constant ID
3823 // Ops[3] = False Constant ID
3824 SPIRVOperandList Ops;
3825
David Neto257c3892018-04-11 13:19:45 -04003826 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04003827
David Neto22f144c2017-06-12 14:26:21 -04003828 uint32_t CondID = VMap[I.getOperand(0)];
David Neto257c3892018-04-11 13:19:45 -04003829 Ops << MkId(CondID);
David Neto22f144c2017-06-12 14:26:21 -04003830
3831 uint32_t TrueID = 0;
3832 if (I.getOpcode() == Instruction::ZExt) {
Kévin Petit7bfb8992019-02-26 13:45:08 +00003833 TrueID = VMap[ConstantInt::get(I.getType(), 1)];
David Neto22f144c2017-06-12 14:26:21 -04003834 } else if (I.getOpcode() == Instruction::SExt) {
Kévin Petit7bfb8992019-02-26 13:45:08 +00003835 TrueID = VMap[ConstantInt::getSigned(I.getType(), -1)];
David Neto22f144c2017-06-12 14:26:21 -04003836 } else {
3837 TrueID = VMap[ConstantFP::get(Context, APFloat(1.0f))];
3838 }
David Neto257c3892018-04-11 13:19:45 -04003839 Ops << MkId(TrueID);
David Neto22f144c2017-06-12 14:26:21 -04003840
3841 uint32_t FalseID = 0;
3842 if (I.getOpcode() == Instruction::ZExt) {
3843 FalseID = VMap[Constant::getNullValue(I.getType())];
3844 } else if (I.getOpcode() == Instruction::SExt) {
3845 FalseID = VMap[Constant::getNullValue(I.getType())];
3846 } else {
3847 FalseID = VMap[ConstantFP::get(Context, APFloat(0.0f))];
3848 }
David Neto257c3892018-04-11 13:19:45 -04003849 Ops << MkId(FalseID);
David Neto22f144c2017-06-12 14:26:21 -04003850
David Neto87846742018-04-11 17:36:22 -04003851 auto *Inst = new SPIRVInstruction(spv::OpSelect, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003852 SPIRVInstList.push_back(Inst);
alan-bakerb39c8262019-03-08 14:03:37 -05003853 } else if (!clspv::Option::Int8Support() &&
3854 I.getOpcode() == Instruction::Trunc && fromI32 && toI8) {
David Netod2de94a2017-08-28 17:27:47 -04003855 // The SPIR-V target type is a 32-bit int. Keep only the bottom
3856 // 8 bits.
3857 // Before:
3858 // %result = trunc i32 %a to i8
3859 // After
3860 // %result = OpBitwiseAnd %uint %a %uint_255
3861
3862 SPIRVOperandList Ops;
3863
David Neto257c3892018-04-11 13:19:45 -04003864 Ops << MkId(lookupType(OpTy)) << MkId(VMap[I.getOperand(0)]);
David Netod2de94a2017-08-28 17:27:47 -04003865
3866 Type *UintTy = Type::getInt32Ty(Context);
3867 uint32_t MaskID = VMap[ConstantInt::get(UintTy, 255)];
David Neto257c3892018-04-11 13:19:45 -04003868 Ops << MkId(MaskID);
David Netod2de94a2017-08-28 17:27:47 -04003869
David Neto87846742018-04-11 17:36:22 -04003870 auto *Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Netod2de94a2017-08-28 17:27:47 -04003871 SPIRVInstList.push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04003872 } else {
3873 // Ops[0] = Result Type ID
3874 // Ops[1] = Source Value ID
3875 SPIRVOperandList Ops;
3876
David Neto257c3892018-04-11 13:19:45 -04003877 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04003878
David Neto87846742018-04-11 17:36:22 -04003879 auto *Inst = new SPIRVInstruction(GetSPIRVCastOpcode(I), nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003880 SPIRVInstList.push_back(Inst);
3881 }
3882 } else if (isa<BinaryOperator>(I)) {
3883 //
3884 // Generate SPIRV instructions for binary operators.
3885 //
3886
3887 // Handle xor with i1 type specially.
3888 if (I.getOpcode() == Instruction::Xor &&
3889 I.getType() == Type::getInt1Ty(Context) &&
Kévin Petit24272b62018-10-18 19:16:12 +00003890 ((isa<ConstantInt>(I.getOperand(0)) &&
3891 !cast<ConstantInt>(I.getOperand(0))->isZero()) ||
3892 (isa<ConstantInt>(I.getOperand(1)) &&
3893 !cast<ConstantInt>(I.getOperand(1))->isZero()))) {
David Neto22f144c2017-06-12 14:26:21 -04003894 //
3895 // Generate OpLogicalNot.
3896 //
3897 // Ops[0] = Result Type ID
3898 // Ops[1] = Operand
3899 SPIRVOperandList Ops;
3900
David Neto257c3892018-04-11 13:19:45 -04003901 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04003902
3903 Value *CondV = I.getOperand(0);
3904 if (isa<Constant>(I.getOperand(0))) {
3905 CondV = I.getOperand(1);
3906 }
David Neto257c3892018-04-11 13:19:45 -04003907 Ops << MkId(VMap[CondV]);
David Neto22f144c2017-06-12 14:26:21 -04003908
David Neto87846742018-04-11 17:36:22 -04003909 auto *Inst = new SPIRVInstruction(spv::OpLogicalNot, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003910 SPIRVInstList.push_back(Inst);
3911 } else {
3912 // Ops[0] = Result Type ID
3913 // Ops[1] = Operand 0
3914 // Ops[2] = Operand 1
3915 SPIRVOperandList Ops;
3916
David Neto257c3892018-04-11 13:19:45 -04003917 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)])
3918 << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04003919
David Neto87846742018-04-11 17:36:22 -04003920 auto *Inst =
3921 new SPIRVInstruction(GetSPIRVBinaryOpcode(I), nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003922 SPIRVInstList.push_back(Inst);
3923 }
alan-bakerc9c55ae2019-12-02 16:01:27 -05003924 } else if (I.getOpcode() == Instruction::FNeg) {
3925 // The only unary operator.
3926 //
3927 // Ops[0] = Result Type ID
3928 // Ops[1] = Operand 0
3929 SPIRVOperandList ops;
3930
3931 ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)]);
3932 auto *Inst = new SPIRVInstruction(spv::OpFNegate, nextID++, ops);
3933 SPIRVInstList.push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04003934 } else {
3935 I.print(errs());
3936 llvm_unreachable("Unsupported instruction???");
3937 }
3938 break;
3939 }
3940 case Instruction::GetElementPtr: {
3941 auto &GlobalConstArgSet = getGlobalConstArgSet();
3942
3943 //
3944 // Generate OpAccessChain.
3945 //
3946 GetElementPtrInst *GEP = cast<GetElementPtrInst>(&I);
3947
3948 //
3949 // Generate OpAccessChain.
3950 //
3951
3952 // Ops[0] = Result Type ID
3953 // Ops[1] = Base ID
3954 // Ops[2] ... Ops[n] = Indexes ID
3955 SPIRVOperandList Ops;
3956
alan-bakerb6b09dc2018-11-08 16:59:28 -05003957 PointerType *ResultType = cast<PointerType>(GEP->getType());
David Neto22f144c2017-06-12 14:26:21 -04003958 if (GEP->getPointerAddressSpace() == AddressSpace::ModuleScopePrivate ||
3959 GlobalConstArgSet.count(GEP->getPointerOperand())) {
3960 // Use pointer type with private address space for global constant.
3961 Type *EleTy = I.getType()->getPointerElementType();
David Neto1a1a0582017-07-07 12:01:44 -04003962 ResultType = PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
David Neto22f144c2017-06-12 14:26:21 -04003963 }
David Neto257c3892018-04-11 13:19:45 -04003964
3965 Ops << MkId(lookupType(ResultType));
David Neto22f144c2017-06-12 14:26:21 -04003966
David Neto862b7d82018-06-14 18:48:37 -04003967 // Generate the base pointer.
3968 Ops << MkId(VMap[GEP->getPointerOperand()]);
David Neto22f144c2017-06-12 14:26:21 -04003969
David Neto862b7d82018-06-14 18:48:37 -04003970 // TODO(dneto): Simplify the following?
David Neto22f144c2017-06-12 14:26:21 -04003971
3972 //
3973 // Follows below rules for gep.
3974 //
David Neto862b7d82018-06-14 18:48:37 -04003975 // 1. If gep's first index is 0 generate OpAccessChain and ignore gep's
3976 // first index.
David Neto22f144c2017-06-12 14:26:21 -04003977 // 2. If gep's first index is not 0, generate OpPtrAccessChain and use gep's
3978 // first index.
3979 // 3. If gep's first index is not constant, generate OpPtrAccessChain and
3980 // use gep's first index.
3981 // 4. If it is not above case 1, 2 and 3, generate OpAccessChain and use
3982 // gep's first index.
3983 //
3984 spv::Op Opcode = spv::OpAccessChain;
3985 unsigned offset = 0;
3986 if (ConstantInt *CstInt = dyn_cast<ConstantInt>(GEP->getOperand(1))) {
David Neto862b7d82018-06-14 18:48:37 -04003987 if (CstInt->getZExtValue() == 0) {
David Neto22f144c2017-06-12 14:26:21 -04003988 offset = 1;
David Neto862b7d82018-06-14 18:48:37 -04003989 } else if (CstInt->getZExtValue() != 0) {
David Neto22f144c2017-06-12 14:26:21 -04003990 Opcode = spv::OpPtrAccessChain;
David Neto22f144c2017-06-12 14:26:21 -04003991 }
David Neto862b7d82018-06-14 18:48:37 -04003992 } else {
David Neto22f144c2017-06-12 14:26:21 -04003993 Opcode = spv::OpPtrAccessChain;
David Neto1a1a0582017-07-07 12:01:44 -04003994 }
3995
3996 if (Opcode == spv::OpPtrAccessChain) {
David Neto1a1a0582017-07-07 12:01:44 -04003997 // Do we need to generate ArrayStride? Check against the GEP result type
3998 // rather than the pointer type of the base because when indexing into
3999 // an OpenCL program-scope constant, we'll swap out the LLVM base pointer
4000 // for something else in the SPIR-V.
4001 // E.g. see test/PointerAccessChain/pointer_index_is_constant_1.cl
alan-baker5b86ed72019-02-15 08:26:50 -05004002 auto address_space = ResultType->getAddressSpace();
4003 setVariablePointersCapabilities(address_space);
4004 switch (GetStorageClass(address_space)) {
Alan Bakerfcda9482018-10-02 17:09:59 -04004005 case spv::StorageClassStorageBuffer:
4006 case spv::StorageClassUniform:
David Neto1a1a0582017-07-07 12:01:44 -04004007 // Save the need to generate an ArrayStride decoration. But defer
4008 // generation until later, so we only make one decoration.
David Neto85082642018-03-24 06:55:20 -07004009 getTypesNeedingArrayStride().insert(ResultType);
Alan Bakerfcda9482018-10-02 17:09:59 -04004010 break;
4011 default:
4012 break;
David Neto1a1a0582017-07-07 12:01:44 -04004013 }
David Neto22f144c2017-06-12 14:26:21 -04004014 }
4015
4016 for (auto II = GEP->idx_begin() + offset; II != GEP->idx_end(); II++) {
David Neto257c3892018-04-11 13:19:45 -04004017 Ops << MkId(VMap[*II]);
David Neto22f144c2017-06-12 14:26:21 -04004018 }
4019
David Neto87846742018-04-11 17:36:22 -04004020 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004021 SPIRVInstList.push_back(Inst);
4022 break;
4023 }
4024 case Instruction::ExtractValue: {
4025 ExtractValueInst *EVI = cast<ExtractValueInst>(&I);
4026 // Ops[0] = Result Type ID
4027 // Ops[1] = Composite ID
4028 // Ops[2] ... Ops[n] = Indexes (Literal Number)
4029 SPIRVOperandList Ops;
4030
David Neto257c3892018-04-11 13:19:45 -04004031 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04004032
4033 uint32_t CompositeID = VMap[EVI->getAggregateOperand()];
David Neto257c3892018-04-11 13:19:45 -04004034 Ops << MkId(CompositeID);
David Neto22f144c2017-06-12 14:26:21 -04004035
4036 for (auto &Index : EVI->indices()) {
David Neto257c3892018-04-11 13:19:45 -04004037 Ops << MkNum(Index);
David Neto22f144c2017-06-12 14:26:21 -04004038 }
4039
David Neto87846742018-04-11 17:36:22 -04004040 auto *Inst = new SPIRVInstruction(spv::OpCompositeExtract, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004041 SPIRVInstList.push_back(Inst);
4042 break;
4043 }
4044 case Instruction::InsertValue: {
4045 InsertValueInst *IVI = cast<InsertValueInst>(&I);
4046 // Ops[0] = Result Type ID
4047 // Ops[1] = Object ID
4048 // Ops[2] = Composite ID
4049 // Ops[3] ... Ops[n] = Indexes (Literal Number)
4050 SPIRVOperandList Ops;
4051
4052 uint32_t ResTyID = lookupType(I.getType());
David Neto257c3892018-04-11 13:19:45 -04004053 Ops << MkId(ResTyID);
David Neto22f144c2017-06-12 14:26:21 -04004054
4055 uint32_t ObjectID = VMap[IVI->getInsertedValueOperand()];
David Neto257c3892018-04-11 13:19:45 -04004056 Ops << MkId(ObjectID);
David Neto22f144c2017-06-12 14:26:21 -04004057
4058 uint32_t CompositeID = VMap[IVI->getAggregateOperand()];
David Neto257c3892018-04-11 13:19:45 -04004059 Ops << MkId(CompositeID);
David Neto22f144c2017-06-12 14:26:21 -04004060
4061 for (auto &Index : IVI->indices()) {
David Neto257c3892018-04-11 13:19:45 -04004062 Ops << MkNum(Index);
David Neto22f144c2017-06-12 14:26:21 -04004063 }
4064
David Neto87846742018-04-11 17:36:22 -04004065 auto *Inst = new SPIRVInstruction(spv::OpCompositeInsert, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004066 SPIRVInstList.push_back(Inst);
4067 break;
4068 }
4069 case Instruction::Select: {
4070 //
4071 // Generate OpSelect.
4072 //
4073
4074 // Ops[0] = Result Type ID
4075 // Ops[1] = Condition ID
4076 // Ops[2] = True Constant ID
4077 // Ops[3] = False Constant ID
4078 SPIRVOperandList Ops;
4079
4080 // Find SPIRV instruction for parameter type.
4081 auto Ty = I.getType();
4082 if (Ty->isPointerTy()) {
4083 auto PointeeTy = Ty->getPointerElementType();
4084 if (PointeeTy->isStructTy() &&
4085 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
4086 Ty = PointeeTy;
alan-baker5b86ed72019-02-15 08:26:50 -05004087 } else {
4088 // Selecting between pointers requires variable pointers.
4089 setVariablePointersCapabilities(Ty->getPointerAddressSpace());
4090 if (!hasVariablePointers() && !selectFromSameObject(&I)) {
4091 setVariablePointers(true);
4092 }
David Neto22f144c2017-06-12 14:26:21 -04004093 }
4094 }
4095
David Neto257c3892018-04-11 13:19:45 -04004096 Ops << MkId(lookupType(Ty)) << MkId(VMap[I.getOperand(0)])
4097 << MkId(VMap[I.getOperand(1)]) << MkId(VMap[I.getOperand(2)]);
David Neto22f144c2017-06-12 14:26:21 -04004098
David Neto87846742018-04-11 17:36:22 -04004099 auto *Inst = new SPIRVInstruction(spv::OpSelect, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004100 SPIRVInstList.push_back(Inst);
4101 break;
4102 }
4103 case Instruction::ExtractElement: {
4104 // Handle <4 x i8> type manually.
4105 Type *CompositeTy = I.getOperand(0)->getType();
4106 if (is4xi8vec(CompositeTy)) {
4107 //
4108 // Generate OpShiftRightLogical and OpBitwiseAnd for extractelement with
4109 // <4 x i8>.
4110 //
4111
4112 //
4113 // Generate OpShiftRightLogical
4114 //
4115 // Ops[0] = Result Type ID
4116 // Ops[1] = Operand 0
4117 // Ops[2] = Operand 1
4118 //
4119 SPIRVOperandList Ops;
4120
David Neto257c3892018-04-11 13:19:45 -04004121 Ops << MkId(lookupType(CompositeTy));
David Neto22f144c2017-06-12 14:26:21 -04004122
4123 uint32_t Op0ID = VMap[I.getOperand(0)];
David Neto257c3892018-04-11 13:19:45 -04004124 Ops << MkId(Op0ID);
David Neto22f144c2017-06-12 14:26:21 -04004125
4126 uint32_t Op1ID = 0;
4127 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
4128 // Handle constant index.
4129 uint64_t Idx = CI->getZExtValue();
4130 Value *ShiftAmount =
4131 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
4132 Op1ID = VMap[ShiftAmount];
4133 } else {
4134 // Handle variable index.
4135 SPIRVOperandList TmpOps;
4136
David Neto257c3892018-04-11 13:19:45 -04004137 TmpOps << MkId(lookupType(Type::getInt32Ty(Context)))
4138 << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004139
4140 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
David Neto257c3892018-04-11 13:19:45 -04004141 TmpOps << MkId(VMap[Cst8]);
David Neto22f144c2017-06-12 14:26:21 -04004142
4143 Op1ID = nextID;
4144
David Neto87846742018-04-11 17:36:22 -04004145 auto *TmpInst = new SPIRVInstruction(spv::OpIMul, nextID++, TmpOps);
David Neto22f144c2017-06-12 14:26:21 -04004146 SPIRVInstList.push_back(TmpInst);
4147 }
David Neto257c3892018-04-11 13:19:45 -04004148 Ops << MkId(Op1ID);
David Neto22f144c2017-06-12 14:26:21 -04004149
4150 uint32_t ShiftID = nextID;
4151
David Neto87846742018-04-11 17:36:22 -04004152 auto *Inst =
4153 new SPIRVInstruction(spv::OpShiftRightLogical, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004154 SPIRVInstList.push_back(Inst);
4155
4156 //
4157 // Generate OpBitwiseAnd
4158 //
4159 // Ops[0] = Result Type ID
4160 // Ops[1] = Operand 0
4161 // Ops[2] = Operand 1
4162 //
4163 Ops.clear();
4164
David Neto257c3892018-04-11 13:19:45 -04004165 Ops << MkId(lookupType(CompositeTy)) << MkId(ShiftID);
David Neto22f144c2017-06-12 14:26:21 -04004166
4167 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
David Neto257c3892018-04-11 13:19:45 -04004168 Ops << MkId(VMap[CstFF]);
David Neto22f144c2017-06-12 14:26:21 -04004169
David Neto9b2d6252017-09-06 15:47:37 -04004170 // Reset mapping for this value to the result of the bitwise and.
4171 VMap[&I] = nextID;
4172
David Neto87846742018-04-11 17:36:22 -04004173 Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004174 SPIRVInstList.push_back(Inst);
4175 break;
4176 }
4177
4178 // Ops[0] = Result Type ID
4179 // Ops[1] = Composite ID
4180 // Ops[2] ... Ops[n] = Indexes (Literal Number)
4181 SPIRVOperandList Ops;
4182
David Neto257c3892018-04-11 13:19:45 -04004183 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04004184
4185 spv::Op Opcode = spv::OpCompositeExtract;
4186 if (const ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
David Neto257c3892018-04-11 13:19:45 -04004187 Ops << MkNum(static_cast<uint32_t>(CI->getZExtValue()));
David Neto22f144c2017-06-12 14:26:21 -04004188 } else {
David Neto257c3892018-04-11 13:19:45 -04004189 Ops << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004190 Opcode = spv::OpVectorExtractDynamic;
4191 }
4192
David Neto87846742018-04-11 17:36:22 -04004193 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004194 SPIRVInstList.push_back(Inst);
4195 break;
4196 }
4197 case Instruction::InsertElement: {
4198 // Handle <4 x i8> type manually.
4199 Type *CompositeTy = I.getOperand(0)->getType();
4200 if (is4xi8vec(CompositeTy)) {
4201 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
4202 uint32_t CstFFID = VMap[CstFF];
4203
4204 uint32_t ShiftAmountID = 0;
4205 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
4206 // Handle constant index.
4207 uint64_t Idx = CI->getZExtValue();
4208 Value *ShiftAmount =
4209 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
4210 ShiftAmountID = VMap[ShiftAmount];
4211 } else {
4212 // Handle variable index.
4213 SPIRVOperandList TmpOps;
4214
David Neto257c3892018-04-11 13:19:45 -04004215 TmpOps << MkId(lookupType(Type::getInt32Ty(Context)))
4216 << MkId(VMap[I.getOperand(2)]);
David Neto22f144c2017-06-12 14:26:21 -04004217
4218 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
David Neto257c3892018-04-11 13:19:45 -04004219 TmpOps << MkId(VMap[Cst8]);
David Neto22f144c2017-06-12 14:26:21 -04004220
4221 ShiftAmountID = nextID;
4222
David Neto87846742018-04-11 17:36:22 -04004223 auto *TmpInst = new SPIRVInstruction(spv::OpIMul, nextID++, TmpOps);
David Neto22f144c2017-06-12 14:26:21 -04004224 SPIRVInstList.push_back(TmpInst);
4225 }
4226
4227 //
4228 // Generate mask operations.
4229 //
4230
4231 // ShiftLeft mask according to index of insertelement.
4232 SPIRVOperandList Ops;
4233
David Neto257c3892018-04-11 13:19:45 -04004234 const uint32_t ResTyID = lookupType(CompositeTy);
4235 Ops << MkId(ResTyID) << MkId(CstFFID) << MkId(ShiftAmountID);
David Neto22f144c2017-06-12 14:26:21 -04004236
4237 uint32_t MaskID = nextID;
4238
David Neto87846742018-04-11 17:36:22 -04004239 auto *Inst = new SPIRVInstruction(spv::OpShiftLeftLogical, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004240 SPIRVInstList.push_back(Inst);
4241
4242 // Inverse mask.
4243 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004244 Ops << MkId(ResTyID) << MkId(MaskID);
David Neto22f144c2017-06-12 14:26:21 -04004245
4246 uint32_t InvMaskID = nextID;
4247
David Neto87846742018-04-11 17:36:22 -04004248 Inst = new SPIRVInstruction(spv::OpNot, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004249 SPIRVInstList.push_back(Inst);
4250
4251 // Apply mask.
4252 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004253 Ops << MkId(ResTyID) << MkId(VMap[I.getOperand(0)]) << MkId(InvMaskID);
David Neto22f144c2017-06-12 14:26:21 -04004254
4255 uint32_t OrgValID = nextID;
4256
David Neto87846742018-04-11 17:36:22 -04004257 Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004258 SPIRVInstList.push_back(Inst);
4259
4260 // Create correct value according to index of insertelement.
4261 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05004262 Ops << MkId(ResTyID) << MkId(VMap[I.getOperand(1)])
4263 << MkId(ShiftAmountID);
David Neto22f144c2017-06-12 14:26:21 -04004264
4265 uint32_t InsertValID = nextID;
4266
David Neto87846742018-04-11 17:36:22 -04004267 Inst = new SPIRVInstruction(spv::OpShiftLeftLogical, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004268 SPIRVInstList.push_back(Inst);
4269
4270 // Insert value to original value.
4271 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004272 Ops << MkId(ResTyID) << MkId(OrgValID) << MkId(InsertValID);
David Neto22f144c2017-06-12 14:26:21 -04004273
David Netoa394f392017-08-26 20:45:29 -04004274 VMap[&I] = nextID;
4275
David Neto87846742018-04-11 17:36:22 -04004276 Inst = new SPIRVInstruction(spv::OpBitwiseOr, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004277 SPIRVInstList.push_back(Inst);
4278
4279 break;
4280 }
4281
David Neto22f144c2017-06-12 14:26:21 -04004282 SPIRVOperandList Ops;
4283
James Priced26efea2018-06-09 23:28:32 +01004284 // Ops[0] = Result Type ID
4285 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04004286
4287 spv::Op Opcode = spv::OpCompositeInsert;
4288 if (const ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
David Neto257c3892018-04-11 13:19:45 -04004289 const auto value = CI->getZExtValue();
4290 assert(value <= UINT32_MAX);
James Priced26efea2018-06-09 23:28:32 +01004291 // Ops[1] = Object ID
4292 // Ops[2] = Composite ID
4293 // Ops[3] ... Ops[n] = Indexes (Literal Number)
alan-bakerb6b09dc2018-11-08 16:59:28 -05004294 Ops << MkId(VMap[I.getOperand(1)]) << MkId(VMap[I.getOperand(0)])
James Priced26efea2018-06-09 23:28:32 +01004295 << MkNum(static_cast<uint32_t>(value));
David Neto22f144c2017-06-12 14:26:21 -04004296 } else {
James Priced26efea2018-06-09 23:28:32 +01004297 // Ops[1] = Composite ID
4298 // Ops[2] = Object ID
4299 // Ops[3] ... Ops[n] = Indexes (Literal Number)
alan-bakerb6b09dc2018-11-08 16:59:28 -05004300 Ops << MkId(VMap[I.getOperand(0)]) << MkId(VMap[I.getOperand(1)])
James Priced26efea2018-06-09 23:28:32 +01004301 << MkId(VMap[I.getOperand(2)]);
David Neto22f144c2017-06-12 14:26:21 -04004302 Opcode = spv::OpVectorInsertDynamic;
4303 }
4304
David Neto87846742018-04-11 17:36:22 -04004305 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004306 SPIRVInstList.push_back(Inst);
4307 break;
4308 }
4309 case Instruction::ShuffleVector: {
4310 // Ops[0] = Result Type ID
4311 // Ops[1] = Vector 1 ID
4312 // Ops[2] = Vector 2 ID
4313 // Ops[3] ... Ops[n] = Components (Literal Number)
4314 SPIRVOperandList Ops;
4315
David Neto257c3892018-04-11 13:19:45 -04004316 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)])
4317 << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004318
4319 uint64_t NumElements = 0;
4320 if (Constant *Cst = dyn_cast<Constant>(I.getOperand(2))) {
4321 NumElements = cast<VectorType>(Cst->getType())->getNumElements();
4322
4323 if (Cst->isNullValue()) {
4324 for (unsigned i = 0; i < NumElements; i++) {
David Neto257c3892018-04-11 13:19:45 -04004325 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04004326 }
4327 } else if (const ConstantDataSequential *CDS =
4328 dyn_cast<ConstantDataSequential>(Cst)) {
4329 for (unsigned i = 0; i < CDS->getNumElements(); i++) {
4330 std::vector<uint32_t> LiteralNum;
David Neto257c3892018-04-11 13:19:45 -04004331 const auto value = CDS->getElementAsInteger(i);
4332 assert(value <= UINT32_MAX);
4333 Ops << MkNum(static_cast<uint32_t>(value));
David Neto22f144c2017-06-12 14:26:21 -04004334 }
4335 } else if (const ConstantVector *CV = dyn_cast<ConstantVector>(Cst)) {
4336 for (unsigned i = 0; i < CV->getNumOperands(); i++) {
4337 auto Op = CV->getOperand(i);
4338
4339 uint32_t literal = 0;
4340
4341 if (auto CI = dyn_cast<ConstantInt>(Op)) {
4342 literal = static_cast<uint32_t>(CI->getZExtValue());
4343 } else if (auto UI = dyn_cast<UndefValue>(Op)) {
4344 literal = 0xFFFFFFFFu;
4345 } else {
4346 Op->print(errs());
4347 llvm_unreachable("Unsupported element in ConstantVector!");
4348 }
4349
David Neto257c3892018-04-11 13:19:45 -04004350 Ops << MkNum(literal);
David Neto22f144c2017-06-12 14:26:21 -04004351 }
4352 } else {
4353 Cst->print(errs());
4354 llvm_unreachable("Unsupported constant mask in ShuffleVector!");
4355 }
4356 }
4357
David Neto87846742018-04-11 17:36:22 -04004358 auto *Inst = new SPIRVInstruction(spv::OpVectorShuffle, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004359 SPIRVInstList.push_back(Inst);
4360 break;
4361 }
4362 case Instruction::ICmp:
4363 case Instruction::FCmp: {
4364 CmpInst *CmpI = cast<CmpInst>(&I);
4365
David Netod4ca2e62017-07-06 18:47:35 -04004366 // Pointer equality is invalid.
alan-bakerb6b09dc2018-11-08 16:59:28 -05004367 Type *ArgTy = CmpI->getOperand(0)->getType();
David Netod4ca2e62017-07-06 18:47:35 -04004368 if (isa<PointerType>(ArgTy)) {
4369 CmpI->print(errs());
alan-baker21574d32020-01-29 16:00:31 -05004370 std::string name = I.getParent()->getParent()->getName().str();
David Netod4ca2e62017-07-06 18:47:35 -04004371 errs()
4372 << "\nPointer equality test is not supported by SPIR-V for Vulkan, "
4373 << "in function " << name << "\n";
4374 llvm_unreachable("Pointer equality check is invalid");
4375 break;
4376 }
4377
David Neto257c3892018-04-11 13:19:45 -04004378 // Ops[0] = Result Type ID
4379 // Ops[1] = Operand 1 ID
4380 // Ops[2] = Operand 2 ID
4381 SPIRVOperandList Ops;
David Neto22f144c2017-06-12 14:26:21 -04004382
David Neto257c3892018-04-11 13:19:45 -04004383 Ops << MkId(lookupType(CmpI->getType())) << MkId(VMap[CmpI->getOperand(0)])
4384 << MkId(VMap[CmpI->getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004385
4386 spv::Op Opcode = GetSPIRVCmpOpcode(CmpI);
David Neto87846742018-04-11 17:36:22 -04004387 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004388 SPIRVInstList.push_back(Inst);
4389 break;
4390 }
4391 case Instruction::Br: {
4392 // Branch instrucion is deferred because it needs label's ID. Record slot's
4393 // location on SPIRVInstructionList.
4394 DeferredInsts.push_back(
4395 std::make_tuple(&I, --SPIRVInstList.end(), 0 /* No id */));
4396 break;
4397 }
4398 case Instruction::Switch: {
4399 I.print(errs());
4400 llvm_unreachable("Unsupported instruction???");
4401 break;
4402 }
4403 case Instruction::IndirectBr: {
4404 I.print(errs());
4405 llvm_unreachable("Unsupported instruction???");
4406 break;
4407 }
4408 case Instruction::PHI: {
4409 // Branch instrucion is deferred because it needs label's ID. Record slot's
4410 // location on SPIRVInstructionList.
4411 DeferredInsts.push_back(
4412 std::make_tuple(&I, --SPIRVInstList.end(), nextID++));
4413 break;
4414 }
4415 case Instruction::Alloca: {
4416 //
4417 // Generate OpVariable.
4418 //
4419 // Ops[0] : Result Type ID
4420 // Ops[1] : Storage Class
4421 SPIRVOperandList Ops;
4422
David Neto257c3892018-04-11 13:19:45 -04004423 Ops << MkId(lookupType(I.getType())) << MkNum(spv::StorageClassFunction);
David Neto22f144c2017-06-12 14:26:21 -04004424
David Neto87846742018-04-11 17:36:22 -04004425 auto *Inst = new SPIRVInstruction(spv::OpVariable, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004426 SPIRVInstList.push_back(Inst);
4427 break;
4428 }
4429 case Instruction::Load: {
4430 LoadInst *LD = cast<LoadInst>(&I);
4431 //
4432 // Generate OpLoad.
4433 //
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04004434
alan-baker5b86ed72019-02-15 08:26:50 -05004435 if (LD->getType()->isPointerTy()) {
4436 // Loading a pointer requires variable pointers.
4437 setVariablePointersCapabilities(LD->getType()->getPointerAddressSpace());
4438 }
David Neto22f144c2017-06-12 14:26:21 -04004439
David Neto0a2f98d2017-09-15 19:38:40 -04004440 uint32_t ResTyID = lookupType(LD->getType());
David Netoa60b00b2017-09-15 16:34:09 -04004441 uint32_t PointerID = VMap[LD->getPointerOperand()];
4442
4443 // This is a hack to work around what looks like a driver bug.
4444 // When we're loading from the special variable holding the WorkgroupSize
David Neto0a2f98d2017-09-15 19:38:40 -04004445 // builtin value, use an OpBitWiseAnd of the value's ID rather than
4446 // generating a load.
David Neto66cfe642018-03-24 06:13:56 -07004447 // TODO(dneto): Remove this awful hack once drivers are fixed.
David Netoa60b00b2017-09-15 16:34:09 -04004448 if (PointerID == WorkgroupSizeVarID) {
David Neto0a2f98d2017-09-15 19:38:40 -04004449 // Generate a bitwise-and of the original value with itself.
4450 // We should have been able to get away with just an OpCopyObject,
4451 // but we need something more complex to get past certain driver bugs.
4452 // This is ridiculous, but necessary.
4453 // TODO(dneto): Revisit this once drivers fix their bugs.
4454
4455 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04004456 Ops << MkId(ResTyID) << MkId(WorkgroupSizeValueID)
4457 << MkId(WorkgroupSizeValueID);
David Neto0a2f98d2017-09-15 19:38:40 -04004458
David Neto87846742018-04-11 17:36:22 -04004459 auto *Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Neto0a2f98d2017-09-15 19:38:40 -04004460 SPIRVInstList.push_back(Inst);
David Netoa60b00b2017-09-15 16:34:09 -04004461 break;
4462 }
4463
4464 // This is the normal path. Generate a load.
4465
David Neto22f144c2017-06-12 14:26:21 -04004466 // Ops[0] = Result Type ID
4467 // Ops[1] = Pointer ID
4468 // Ops[2] ... Ops[n] = Optional Memory Access
4469 //
4470 // TODO: Do we need to implement Optional Memory Access???
David Neto0a2f98d2017-09-15 19:38:40 -04004471
David Neto22f144c2017-06-12 14:26:21 -04004472 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04004473 Ops << MkId(ResTyID) << MkId(PointerID);
David Neto22f144c2017-06-12 14:26:21 -04004474
David Neto87846742018-04-11 17:36:22 -04004475 auto *Inst = new SPIRVInstruction(spv::OpLoad, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004476 SPIRVInstList.push_back(Inst);
4477 break;
4478 }
4479 case Instruction::Store: {
4480 StoreInst *ST = cast<StoreInst>(&I);
4481 //
4482 // Generate OpStore.
4483 //
4484
alan-baker5b86ed72019-02-15 08:26:50 -05004485 if (ST->getValueOperand()->getType()->isPointerTy()) {
4486 // Storing a pointer requires variable pointers.
4487 setVariablePointersCapabilities(
4488 ST->getValueOperand()->getType()->getPointerAddressSpace());
4489 }
4490
David Neto22f144c2017-06-12 14:26:21 -04004491 // Ops[0] = Pointer ID
4492 // Ops[1] = Object ID
4493 // Ops[2] ... Ops[n] = Optional Memory Access (later???)
4494 //
4495 // TODO: Do we need to implement Optional Memory Access???
David Neto257c3892018-04-11 13:19:45 -04004496 SPIRVOperandList Ops;
4497 Ops << MkId(VMap[ST->getPointerOperand()])
4498 << MkId(VMap[ST->getValueOperand()]);
David Neto22f144c2017-06-12 14:26:21 -04004499
David Neto87846742018-04-11 17:36:22 -04004500 auto *Inst = new SPIRVInstruction(spv::OpStore, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004501 SPIRVInstList.push_back(Inst);
4502 break;
4503 }
4504 case Instruction::AtomicCmpXchg: {
4505 I.print(errs());
4506 llvm_unreachable("Unsupported instruction???");
4507 break;
4508 }
4509 case Instruction::AtomicRMW: {
Neil Henning39672102017-09-29 14:33:13 +01004510 AtomicRMWInst *AtomicRMW = dyn_cast<AtomicRMWInst>(&I);
4511
4512 spv::Op opcode;
4513
4514 switch (AtomicRMW->getOperation()) {
4515 default:
4516 I.print(errs());
4517 llvm_unreachable("Unsupported instruction???");
4518 case llvm::AtomicRMWInst::Add:
4519 opcode = spv::OpAtomicIAdd;
4520 break;
4521 case llvm::AtomicRMWInst::Sub:
4522 opcode = spv::OpAtomicISub;
4523 break;
4524 case llvm::AtomicRMWInst::Xchg:
4525 opcode = spv::OpAtomicExchange;
4526 break;
4527 case llvm::AtomicRMWInst::Min:
4528 opcode = spv::OpAtomicSMin;
4529 break;
4530 case llvm::AtomicRMWInst::Max:
4531 opcode = spv::OpAtomicSMax;
4532 break;
4533 case llvm::AtomicRMWInst::UMin:
4534 opcode = spv::OpAtomicUMin;
4535 break;
4536 case llvm::AtomicRMWInst::UMax:
4537 opcode = spv::OpAtomicUMax;
4538 break;
4539 case llvm::AtomicRMWInst::And:
4540 opcode = spv::OpAtomicAnd;
4541 break;
4542 case llvm::AtomicRMWInst::Or:
4543 opcode = spv::OpAtomicOr;
4544 break;
4545 case llvm::AtomicRMWInst::Xor:
4546 opcode = spv::OpAtomicXor;
4547 break;
4548 }
4549
4550 //
4551 // Generate OpAtomic*.
4552 //
4553 SPIRVOperandList Ops;
4554
David Neto257c3892018-04-11 13:19:45 -04004555 Ops << MkId(lookupType(I.getType()))
4556 << MkId(VMap[AtomicRMW->getPointerOperand()]);
Neil Henning39672102017-09-29 14:33:13 +01004557
4558 auto IntTy = Type::getInt32Ty(I.getContext());
Neil Henning39672102017-09-29 14:33:13 +01004559 const auto ConstantScopeDevice = ConstantInt::get(IntTy, spv::ScopeDevice);
David Neto257c3892018-04-11 13:19:45 -04004560 Ops << MkId(VMap[ConstantScopeDevice]);
Neil Henning39672102017-09-29 14:33:13 +01004561
4562 const auto ConstantMemorySemantics = ConstantInt::get(
4563 IntTy, spv::MemorySemanticsUniformMemoryMask |
4564 spv::MemorySemanticsSequentiallyConsistentMask);
David Neto257c3892018-04-11 13:19:45 -04004565 Ops << MkId(VMap[ConstantMemorySemantics]);
Neil Henning39672102017-09-29 14:33:13 +01004566
David Neto257c3892018-04-11 13:19:45 -04004567 Ops << MkId(VMap[AtomicRMW->getValOperand()]);
Neil Henning39672102017-09-29 14:33:13 +01004568
4569 VMap[&I] = nextID;
4570
David Neto87846742018-04-11 17:36:22 -04004571 auto *Inst = new SPIRVInstruction(opcode, nextID++, Ops);
Neil Henning39672102017-09-29 14:33:13 +01004572 SPIRVInstList.push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04004573 break;
4574 }
4575 case Instruction::Fence: {
4576 I.print(errs());
4577 llvm_unreachable("Unsupported instruction???");
4578 break;
4579 }
4580 case Instruction::Call: {
4581 CallInst *Call = dyn_cast<CallInst>(&I);
4582 Function *Callee = Call->getCalledFunction();
4583
Alan Baker202c8c72018-08-13 13:47:44 -04004584 if (Callee->getName().startswith(clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04004585 if (ResourceVarDeferredLoadCalls.count(Call) && Call->hasNUsesOrMore(1)) {
4586 // Generate an OpLoad
4587 SPIRVOperandList Ops;
4588 const auto load_id = nextID++;
David Neto22f144c2017-06-12 14:26:21 -04004589
David Neto862b7d82018-06-14 18:48:37 -04004590 Ops << MkId(lookupType(Call->getType()->getPointerElementType()))
4591 << MkId(ResourceVarDeferredLoadCalls[Call]);
4592
4593 auto *Inst = new SPIRVInstruction(spv::OpLoad, load_id, Ops);
4594 SPIRVInstList.push_back(Inst);
4595 VMap[Call] = load_id;
4596 break;
4597
4598 } else {
4599 // This maps to an OpVariable we've already generated.
4600 // No code is generated for the call.
4601 }
4602 break;
alan-bakerb6b09dc2018-11-08 16:59:28 -05004603 } else if (Callee->getName().startswith(
4604 clspv::WorkgroupAccessorFunction())) {
Alan Baker202c8c72018-08-13 13:47:44 -04004605 // Don't codegen an instruction here, but instead map this call directly
4606 // to the workgroup variable id.
alan-bakerb6b09dc2018-11-08 16:59:28 -05004607 int spec_id = static_cast<int>(
4608 cast<ConstantInt>(Call->getOperand(0))->getSExtValue());
Alan Baker202c8c72018-08-13 13:47:44 -04004609 const auto &info = LocalSpecIdInfoMap[spec_id];
4610 VMap[Call] = info.variable_id;
4611 break;
David Neto862b7d82018-06-14 18:48:37 -04004612 }
4613
4614 // Sampler initializers become a load of the corresponding sampler.
4615
Kévin Petitdf71de32019-04-09 14:09:50 +01004616 if (Callee->getName().equals(clspv::LiteralSamplerFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04004617 // Map this to a load from the variable.
alan-baker09cb9802019-12-10 13:16:27 -05004618 const auto third_param = static_cast<unsigned>(
4619 dyn_cast<ConstantInt>(Call->getArgOperand(2))->getZExtValue());
4620 auto sampler_value = third_param;
4621 if (clspv::Option::UseSamplerMap()) {
4622 sampler_value = getSamplerMap()[third_param].first;
4623 }
David Neto862b7d82018-06-14 18:48:37 -04004624
4625 // Generate an OpLoad
David Neto22f144c2017-06-12 14:26:21 -04004626 SPIRVOperandList Ops;
David Neto862b7d82018-06-14 18:48:37 -04004627 const auto load_id = nextID++;
David Neto22f144c2017-06-12 14:26:21 -04004628
David Neto257c3892018-04-11 13:19:45 -04004629 Ops << MkId(lookupType(SamplerTy->getPointerElementType()))
alan-baker09cb9802019-12-10 13:16:27 -05004630 << MkId(SamplerLiteralToIDMap[sampler_value]);
David Neto22f144c2017-06-12 14:26:21 -04004631
David Neto862b7d82018-06-14 18:48:37 -04004632 auto *Inst = new SPIRVInstruction(spv::OpLoad, load_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004633 SPIRVInstList.push_back(Inst);
David Neto862b7d82018-06-14 18:48:37 -04004634 VMap[Call] = load_id;
David Neto22f144c2017-06-12 14:26:21 -04004635 break;
4636 }
4637
Kévin Petit349c9502019-03-28 17:24:14 +00004638 // Handle SPIR-V intrinsics
Kévin Petit9b340262019-06-19 18:31:11 +01004639 spv::Op opcode = StringSwitch<spv::Op>(Callee->getName())
4640 .Case("spirv.atomic_xor", spv::OpAtomicXor)
4641 .Default(spv::OpNop);
David Neto22f144c2017-06-12 14:26:21 -04004642
Kévin Petit617a76d2019-04-04 13:54:16 +01004643 // If the switch above didn't have an entry maybe the intrinsic
4644 // is using the name mangling logic.
4645 bool usesMangler = false;
4646 if (opcode == spv::OpNop) {
4647 if (Callee->getName().startswith(clspv::SPIRVOpIntrinsicFunction())) {
4648 auto OpCst = cast<ConstantInt>(Call->getOperand(0));
4649 opcode = static_cast<spv::Op>(OpCst->getZExtValue());
4650 usesMangler = true;
4651 }
4652 }
4653
Kévin Petit349c9502019-03-28 17:24:14 +00004654 if (opcode != spv::OpNop) {
4655
David Neto22f144c2017-06-12 14:26:21 -04004656 SPIRVOperandList Ops;
4657
Kévin Petit349c9502019-03-28 17:24:14 +00004658 if (!I.getType()->isVoidTy()) {
4659 Ops << MkId(lookupType(I.getType()));
4660 }
David Neto22f144c2017-06-12 14:26:21 -04004661
Kévin Petit617a76d2019-04-04 13:54:16 +01004662 unsigned firstOperand = usesMangler ? 1 : 0;
4663 for (unsigned i = firstOperand; i < Call->getNumArgOperands(); i++) {
David Neto257c3892018-04-11 13:19:45 -04004664 Ops << MkId(VMap[Call->getArgOperand(i)]);
David Neto22f144c2017-06-12 14:26:21 -04004665 }
4666
Kévin Petit349c9502019-03-28 17:24:14 +00004667 if (!I.getType()->isVoidTy()) {
4668 VMap[&I] = nextID;
Kévin Petit8a560882019-03-21 15:24:34 +00004669 }
4670
Kévin Petit349c9502019-03-28 17:24:14 +00004671 SPIRVInstruction *Inst;
4672 if (!I.getType()->isVoidTy()) {
4673 Inst = new SPIRVInstruction(opcode, nextID++, Ops);
4674 } else {
4675 Inst = new SPIRVInstruction(opcode, Ops);
4676 }
Kévin Petit8a560882019-03-21 15:24:34 +00004677 SPIRVInstList.push_back(Inst);
4678 break;
4679 }
4680
David Neto22f144c2017-06-12 14:26:21 -04004681 // spirv.copy_memory.* intrinsics become OpMemoryMemory's.
4682 if (Callee->getName().startswith("spirv.copy_memory")) {
4683 //
4684 // Generate OpCopyMemory.
4685 //
4686
4687 // Ops[0] = Dst ID
4688 // Ops[1] = Src ID
4689 // Ops[2] = Memory Access
4690 // Ops[3] = Alignment
4691
4692 auto IsVolatile =
4693 dyn_cast<ConstantInt>(Call->getArgOperand(3))->getZExtValue() != 0;
4694
4695 auto VolatileMemoryAccess = (IsVolatile) ? spv::MemoryAccessVolatileMask
4696 : spv::MemoryAccessMaskNone;
4697
4698 auto MemoryAccess = VolatileMemoryAccess | spv::MemoryAccessAlignedMask;
4699
4700 auto Alignment =
4701 dyn_cast<ConstantInt>(Call->getArgOperand(2))->getZExtValue();
4702
David Neto257c3892018-04-11 13:19:45 -04004703 SPIRVOperandList Ops;
4704 Ops << MkId(VMap[Call->getArgOperand(0)])
4705 << MkId(VMap[Call->getArgOperand(1)]) << MkNum(MemoryAccess)
4706 << MkNum(static_cast<uint32_t>(Alignment));
David Neto22f144c2017-06-12 14:26:21 -04004707
David Neto87846742018-04-11 17:36:22 -04004708 auto *Inst = new SPIRVInstruction(spv::OpCopyMemory, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004709
4710 SPIRVInstList.push_back(Inst);
4711
4712 break;
4713 }
4714
David Neto22f144c2017-06-12 14:26:21 -04004715 // read_image is converted to OpSampledImage and OpImageSampleExplicitLod.
4716 // Additionally, OpTypeSampledImage is generated.
alan-bakerf67468c2019-11-25 15:51:49 -05004717 if (clspv::IsSampledImageRead(Callee)) {
David Neto22f144c2017-06-12 14:26:21 -04004718 //
4719 // Generate OpSampledImage.
4720 //
4721 // Ops[0] = Result Type ID
4722 // Ops[1] = Image ID
4723 // Ops[2] = Sampler ID
4724 //
4725 SPIRVOperandList Ops;
4726
4727 Value *Image = Call->getArgOperand(0);
4728 Value *Sampler = Call->getArgOperand(1);
4729 Value *Coordinate = Call->getArgOperand(2);
4730
4731 TypeMapType &OpImageTypeMap = getImageTypeMap();
4732 Type *ImageTy = Image->getType()->getPointerElementType();
4733 uint32_t ImageTyID = OpImageTypeMap[ImageTy];
David Neto22f144c2017-06-12 14:26:21 -04004734 uint32_t ImageID = VMap[Image];
David Neto22f144c2017-06-12 14:26:21 -04004735 uint32_t SamplerID = VMap[Sampler];
David Neto257c3892018-04-11 13:19:45 -04004736
4737 Ops << MkId(ImageTyID) << MkId(ImageID) << MkId(SamplerID);
David Neto22f144c2017-06-12 14:26:21 -04004738
4739 uint32_t SampledImageID = nextID;
4740
David Neto87846742018-04-11 17:36:22 -04004741 auto *Inst = new SPIRVInstruction(spv::OpSampledImage, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004742 SPIRVInstList.push_back(Inst);
4743
4744 //
4745 // Generate OpImageSampleExplicitLod.
4746 //
4747 // Ops[0] = Result Type ID
4748 // Ops[1] = Sampled Image ID
4749 // Ops[2] = Coordinate ID
4750 // Ops[3] = Image Operands Type ID
4751 // Ops[4] ... Ops[n] = Operands ID
4752 //
4753 Ops.clear();
4754
alan-bakerf67468c2019-11-25 15:51:49 -05004755 const bool is_int_image = IsIntImageType(Image->getType());
4756 uint32_t result_type = 0;
4757 if (is_int_image) {
4758 result_type = v4int32ID;
4759 } else {
4760 result_type = lookupType(Call->getType());
4761 }
4762
4763 Ops << MkId(result_type) << MkId(SampledImageID) << MkId(VMap[Coordinate])
4764 << MkNum(spv::ImageOperandsLodMask);
David Neto22f144c2017-06-12 14:26:21 -04004765
4766 Constant *CstFP0 = ConstantFP::get(Context, APFloat(0.0f));
David Neto257c3892018-04-11 13:19:45 -04004767 Ops << MkId(VMap[CstFP0]);
David Neto22f144c2017-06-12 14:26:21 -04004768
alan-bakerf67468c2019-11-25 15:51:49 -05004769 uint32_t final_id = nextID++;
4770 VMap[&I] = final_id;
David Neto22f144c2017-06-12 14:26:21 -04004771
alan-bakerf67468c2019-11-25 15:51:49 -05004772 uint32_t image_id = final_id;
4773 if (is_int_image) {
4774 // Int image requires a bitcast from v4int to v4uint.
4775 image_id = nextID++;
4776 }
4777
4778 Inst = new SPIRVInstruction(spv::OpImageSampleExplicitLod, image_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004779 SPIRVInstList.push_back(Inst);
alan-bakerf67468c2019-11-25 15:51:49 -05004780
4781 if (is_int_image) {
4782 // Generate the bitcast.
4783 Ops.clear();
4784 Ops << MkId(lookupType(Call->getType())) << MkId(image_id);
4785 Inst = new SPIRVInstruction(spv::OpBitcast, final_id, Ops);
4786 SPIRVInstList.push_back(Inst);
4787 }
David Neto22f144c2017-06-12 14:26:21 -04004788 break;
4789 }
4790
alan-bakerf67468c2019-11-25 15:51:49 -05004791 // write_image is mapped to OpImageWrite.
4792 if (clspv::IsImageWrite(Callee)) {
David Neto22f144c2017-06-12 14:26:21 -04004793 //
4794 // Generate OpImageWrite.
4795 //
4796 // Ops[0] = Image ID
4797 // Ops[1] = Coordinate ID
4798 // Ops[2] = Texel ID
4799 // Ops[3] = (Optional) Image Operands Type (Literal Number)
4800 // Ops[4] ... Ops[n] = (Optional) Operands ID
4801 //
4802 SPIRVOperandList Ops;
4803
4804 Value *Image = Call->getArgOperand(0);
4805 Value *Coordinate = Call->getArgOperand(1);
4806 Value *Texel = Call->getArgOperand(2);
4807
4808 uint32_t ImageID = VMap[Image];
David Neto22f144c2017-06-12 14:26:21 -04004809 uint32_t CoordinateID = VMap[Coordinate];
David Neto22f144c2017-06-12 14:26:21 -04004810 uint32_t TexelID = VMap[Texel];
alan-bakerf67468c2019-11-25 15:51:49 -05004811
4812 const bool is_int_image = IsIntImageType(Image->getType());
4813 if (is_int_image) {
4814 // Generate a bitcast to v4int and use it as the texel value.
4815 uint32_t castID = nextID++;
4816 Ops << MkId(v4int32ID) << MkId(TexelID);
4817 auto cast = new SPIRVInstruction(spv::OpBitcast, castID, Ops);
4818 SPIRVInstList.push_back(cast);
4819 Ops.clear();
4820 TexelID = castID;
4821 }
David Neto257c3892018-04-11 13:19:45 -04004822 Ops << MkId(ImageID) << MkId(CoordinateID) << MkId(TexelID);
David Neto22f144c2017-06-12 14:26:21 -04004823
David Neto87846742018-04-11 17:36:22 -04004824 auto *Inst = new SPIRVInstruction(spv::OpImageWrite, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004825 SPIRVInstList.push_back(Inst);
4826 break;
4827 }
4828
alan-bakerce179f12019-12-06 19:02:22 -05004829 // get_image_* is mapped to OpImageQuerySize or OpImageQuerySizeLod
4830 if (clspv::IsImageQuery(Callee)) {
David Neto5c22a252018-03-15 16:07:41 -04004831 //
alan-bakerce179f12019-12-06 19:02:22 -05004832 // Generate OpImageQuerySize[Lod]
David Neto5c22a252018-03-15 16:07:41 -04004833 //
4834 // Ops[0] = Image ID
4835 //
alan-bakerce179f12019-12-06 19:02:22 -05004836 // Result type has components equal to the dimensionality of the image,
4837 // plus 1 if the image is arrayed.
4838 //
alan-bakerf906d2b2019-12-10 11:26:23 -05004839 // %sizes = OpImageQuerySize[Lod] %uint[2|3|4] %im [%uint_0]
David Neto5c22a252018-03-15 16:07:41 -04004840 SPIRVOperandList Ops;
4841
4842 // Implement:
alan-bakerce179f12019-12-06 19:02:22 -05004843 // %sizes = OpImageQuerySize[Lod] %uint[2|3|4] %im [%uint_0]
4844 uint32_t SizesTypeID = 0;
4845
David Neto5c22a252018-03-15 16:07:41 -04004846 Value *Image = Call->getArgOperand(0);
alan-bakerce179f12019-12-06 19:02:22 -05004847 const uint32_t dim = ImageDimensionality(Image->getType());
alan-bakerf906d2b2019-12-10 11:26:23 -05004848 // TODO(alan-baker): fix component calculation when arrayed images are
4849 // supported.
alan-bakerce179f12019-12-06 19:02:22 -05004850 const uint32_t components = dim;
4851 if (components == 1) {
alan-bakerce179f12019-12-06 19:02:22 -05004852 SizesTypeID = TypeMap[Type::getInt32Ty(Context)];
4853 } else {
4854 SizesTypeID = TypeMap[VectorType::get(Type::getInt32Ty(Context), dim)];
4855 }
David Neto5c22a252018-03-15 16:07:41 -04004856 uint32_t ImageID = VMap[Image];
David Neto257c3892018-04-11 13:19:45 -04004857 Ops << MkId(SizesTypeID) << MkId(ImageID);
alan-bakerce179f12019-12-06 19:02:22 -05004858 spv::Op query_opcode = spv::OpImageQuerySize;
4859 if (clspv::IsSampledImageType(Image->getType())) {
4860 query_opcode = spv::OpImageQuerySizeLod;
4861 // Need explicit 0 for Lod operand.
4862 Constant *CstInt0 = ConstantInt::get(Context, APInt(32, 0));
4863 Ops << MkId(VMap[CstInt0]);
4864 }
David Neto5c22a252018-03-15 16:07:41 -04004865
4866 uint32_t SizesID = nextID++;
alan-bakerce179f12019-12-06 19:02:22 -05004867 auto *QueryInst = new SPIRVInstruction(query_opcode, SizesID, Ops);
David Neto5c22a252018-03-15 16:07:41 -04004868 SPIRVInstList.push_back(QueryInst);
4869
alan-bakerce179f12019-12-06 19:02:22 -05004870 // May require an extra instruction to create the appropriate result of
4871 // the builtin function.
4872 if (clspv::IsGetImageDim(Callee)) {
4873 if (dim == 3) {
4874 // get_image_dim returns an int4 for 3D images.
4875 //
4876 // Reset value map entry since we generated an intermediate
4877 // instruction.
4878 VMap[&I] = nextID;
David Neto5c22a252018-03-15 16:07:41 -04004879
alan-bakerce179f12019-12-06 19:02:22 -05004880 // Implement:
4881 // %result = OpCompositeConstruct %uint4 %sizes %uint_0
4882 Ops.clear();
4883 Ops << MkId(lookupType(VectorType::get(Type::getInt32Ty(Context), 4)))
4884 << MkId(SizesID);
David Neto5c22a252018-03-15 16:07:41 -04004885
alan-bakerce179f12019-12-06 19:02:22 -05004886 Constant *CstInt0 = ConstantInt::get(Context, APInt(32, 0));
4887 Ops << MkId(VMap[CstInt0]);
David Neto5c22a252018-03-15 16:07:41 -04004888
alan-bakerce179f12019-12-06 19:02:22 -05004889 auto *Inst =
4890 new SPIRVInstruction(spv::OpCompositeConstruct, nextID++, Ops);
4891 SPIRVInstList.push_back(Inst);
4892 } else if (dim != components) {
4893 // get_image_dim return an int2 regardless of the arrayedness of the
4894 // image. If the image is arrayed an element must be dropped from the
4895 // query result.
4896 //
4897 // Reset value map entry since we generated an intermediate
4898 // instruction.
4899 VMap[&I] = nextID;
4900
4901 // Implement:
4902 // %result = OpVectorShuffle %uint2 %sizes %sizes 0 1
4903 Ops.clear();
4904 Ops << MkId(lookupType(VectorType::get(Type::getInt32Ty(Context), 2)))
4905 << MkId(SizesID) << MkId(SizesID) << MkNum(0) << MkNum(1);
4906
4907 auto *Inst =
4908 new SPIRVInstruction(spv::OpVectorShuffle, nextID++, Ops);
4909 SPIRVInstList.push_back(Inst);
4910 }
4911 } else if (components > 1) {
4912 // Reset value map entry since we generated an intermediate instruction.
4913 VMap[&I] = nextID;
4914
4915 // Implement:
4916 // %result = OpCompositeExtract %uint %sizes <component number>
4917 Ops.clear();
4918 Ops << MkId(TypeMap[I.getType()]) << MkId(SizesID);
4919
4920 uint32_t component = 0;
4921 if (IsGetImageHeight(Callee))
4922 component = 1;
4923 else if (IsGetImageDepth(Callee))
4924 component = 2;
4925 Ops << MkNum(component);
4926
4927 auto *Inst =
4928 new SPIRVInstruction(spv::OpCompositeExtract, nextID++, Ops);
4929 SPIRVInstList.push_back(Inst);
4930 }
David Neto5c22a252018-03-15 16:07:41 -04004931 break;
4932 }
4933
David Neto22f144c2017-06-12 14:26:21 -04004934 // Call instrucion is deferred because it needs function's ID. Record
4935 // slot's location on SPIRVInstructionList.
4936 DeferredInsts.push_back(
4937 std::make_tuple(&I, --SPIRVInstList.end(), nextID++));
4938
David Neto3fbb4072017-10-16 11:28:14 -04004939 // Check whether the implementation of this call uses an extended
4940 // instruction plus one more value-producing instruction. If so, then
4941 // reserve the id for the extra value-producing slot.
4942 glsl::ExtInst EInst = getIndirectExtInstEnum(Callee->getName());
4943 if (EInst != kGlslExtInstBad) {
4944 // Reserve a spot for the extra value.
David Neto4d02a532017-09-17 12:57:44 -04004945 // Increase nextID.
David Neto22f144c2017-06-12 14:26:21 -04004946 VMap[&I] = nextID;
4947 nextID++;
4948 }
4949 break;
4950 }
4951 case Instruction::Ret: {
4952 unsigned NumOps = I.getNumOperands();
4953 if (NumOps == 0) {
4954 //
4955 // Generate OpReturn.
4956 //
David Netoef5ba2b2019-12-20 08:35:54 -05004957 SPIRVInstList.push_back(new SPIRVInstruction(spv::OpReturn));
David Neto22f144c2017-06-12 14:26:21 -04004958 } else {
4959 //
4960 // Generate OpReturnValue.
4961 //
4962
4963 // Ops[0] = Return Value ID
4964 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04004965
4966 Ops << MkId(VMap[I.getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04004967
David Neto87846742018-04-11 17:36:22 -04004968 auto *Inst = new SPIRVInstruction(spv::OpReturnValue, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004969 SPIRVInstList.push_back(Inst);
4970 break;
4971 }
4972 break;
4973 }
4974 }
4975}
4976
4977void SPIRVProducerPass::GenerateFuncEpilogue() {
4978 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
4979
4980 //
4981 // Generate OpFunctionEnd
4982 //
4983
David Netoef5ba2b2019-12-20 08:35:54 -05004984 auto *Inst = new SPIRVInstruction(spv::OpFunctionEnd);
David Neto22f144c2017-06-12 14:26:21 -04004985 SPIRVInstList.push_back(Inst);
4986}
4987
4988bool SPIRVProducerPass::is4xi8vec(Type *Ty) const {
alan-bakerb39c8262019-03-08 14:03:37 -05004989 // Don't specialize <4 x i8> if i8 is generally supported.
4990 if (clspv::Option::Int8Support())
4991 return false;
4992
David Neto22f144c2017-06-12 14:26:21 -04004993 LLVMContext &Context = Ty->getContext();
4994 if (Ty->isVectorTy()) {
4995 if (Ty->getVectorElementType() == Type::getInt8Ty(Context) &&
4996 Ty->getVectorNumElements() == 4) {
4997 return true;
4998 }
4999 }
5000
5001 return false;
5002}
5003
5004void SPIRVProducerPass::HandleDeferredInstruction() {
5005 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
5006 ValueMapType &VMap = getValueMap();
5007 DeferredInstVecType &DeferredInsts = getDeferredInstVec();
5008
5009 for (auto DeferredInst = DeferredInsts.rbegin();
5010 DeferredInst != DeferredInsts.rend(); ++DeferredInst) {
5011 Value *Inst = std::get<0>(*DeferredInst);
5012 SPIRVInstructionList::iterator InsertPoint = ++std::get<1>(*DeferredInst);
5013 if (InsertPoint != SPIRVInstList.end()) {
5014 while ((*InsertPoint)->getOpcode() == spv::OpPhi) {
5015 ++InsertPoint;
5016 }
5017 }
5018
5019 if (BranchInst *Br = dyn_cast<BranchInst>(Inst)) {
alan-baker06cad652019-12-03 17:56:47 -05005020 // Check whether this branch needs to be preceeded by merge instruction.
David Neto22f144c2017-06-12 14:26:21 -04005021 BasicBlock *BrBB = Br->getParent();
alan-baker06cad652019-12-03 17:56:47 -05005022 if (ContinueBlocks.count(BrBB)) {
David Neto22f144c2017-06-12 14:26:21 -04005023 //
5024 // Generate OpLoopMerge.
5025 //
5026 // Ops[0] = Merge Block ID
5027 // Ops[1] = Continue Target ID
5028 // Ops[2] = Selection Control
5029 SPIRVOperandList Ops;
5030
alan-baker06cad652019-12-03 17:56:47 -05005031 auto MergeBB = MergeBlocks[BrBB];
5032 auto ContinueBB = ContinueBlocks[BrBB];
David Neto22f144c2017-06-12 14:26:21 -04005033 uint32_t MergeBBID = VMap[MergeBB];
David Neto22f144c2017-06-12 14:26:21 -04005034 uint32_t ContinueBBID = VMap[ContinueBB];
David Neto257c3892018-04-11 13:19:45 -04005035 Ops << MkId(MergeBBID) << MkId(ContinueBBID)
alan-baker06cad652019-12-03 17:56:47 -05005036 << MkNum(spv::LoopControlMaskNone);
David Neto22f144c2017-06-12 14:26:21 -04005037
David Neto87846742018-04-11 17:36:22 -04005038 auto *MergeInst = new SPIRVInstruction(spv::OpLoopMerge, Ops);
David Neto22f144c2017-06-12 14:26:21 -04005039 SPIRVInstList.insert(InsertPoint, MergeInst);
alan-baker06cad652019-12-03 17:56:47 -05005040 } else if (MergeBlocks.count(BrBB)) {
5041 //
5042 // Generate OpSelectionMerge.
5043 //
5044 // Ops[0] = Merge Block ID
5045 // Ops[1] = Selection Control
5046 SPIRVOperandList Ops;
David Neto22f144c2017-06-12 14:26:21 -04005047
alan-baker06cad652019-12-03 17:56:47 -05005048 auto MergeBB = MergeBlocks[BrBB];
5049 uint32_t MergeBBID = VMap[MergeBB];
5050 Ops << MkId(MergeBBID) << MkNum(spv::SelectionControlMaskNone);
David Neto22f144c2017-06-12 14:26:21 -04005051
alan-baker06cad652019-12-03 17:56:47 -05005052 auto *MergeInst = new SPIRVInstruction(spv::OpSelectionMerge, Ops);
5053 SPIRVInstList.insert(InsertPoint, MergeInst);
David Neto22f144c2017-06-12 14:26:21 -04005054 }
5055
5056 if (Br->isConditional()) {
5057 //
5058 // Generate OpBranchConditional.
5059 //
5060 // Ops[0] = Condition ID
5061 // Ops[1] = True Label ID
5062 // Ops[2] = False Label ID
5063 // Ops[3] ... Ops[n] = Branch weights (Literal Number)
5064 SPIRVOperandList Ops;
5065
5066 uint32_t CondID = VMap[Br->getCondition()];
David Neto22f144c2017-06-12 14:26:21 -04005067 uint32_t TrueBBID = VMap[Br->getSuccessor(0)];
David Neto22f144c2017-06-12 14:26:21 -04005068 uint32_t FalseBBID = VMap[Br->getSuccessor(1)];
David Neto257c3892018-04-11 13:19:45 -04005069
5070 Ops << MkId(CondID) << MkId(TrueBBID) << MkId(FalseBBID);
David Neto22f144c2017-06-12 14:26:21 -04005071
David Neto87846742018-04-11 17:36:22 -04005072 auto *BrInst = new SPIRVInstruction(spv::OpBranchConditional, Ops);
David Neto22f144c2017-06-12 14:26:21 -04005073 SPIRVInstList.insert(InsertPoint, BrInst);
5074 } else {
5075 //
5076 // Generate OpBranch.
5077 //
5078 // Ops[0] = Target Label ID
5079 SPIRVOperandList Ops;
5080
5081 uint32_t TargetID = VMap[Br->getSuccessor(0)];
David Neto257c3892018-04-11 13:19:45 -04005082 Ops << MkId(TargetID);
David Neto22f144c2017-06-12 14:26:21 -04005083
David Neto87846742018-04-11 17:36:22 -04005084 SPIRVInstList.insert(InsertPoint,
5085 new SPIRVInstruction(spv::OpBranch, Ops));
David Neto22f144c2017-06-12 14:26:21 -04005086 }
5087 } else if (PHINode *PHI = dyn_cast<PHINode>(Inst)) {
alan-baker5b86ed72019-02-15 08:26:50 -05005088 if (PHI->getType()->isPointerTy()) {
5089 // OpPhi on pointers requires variable pointers.
5090 setVariablePointersCapabilities(
5091 PHI->getType()->getPointerAddressSpace());
5092 if (!hasVariablePointers() && !selectFromSameObject(PHI)) {
5093 setVariablePointers(true);
5094 }
5095 }
5096
David Neto22f144c2017-06-12 14:26:21 -04005097 //
5098 // Generate OpPhi.
5099 //
5100 // Ops[0] = Result Type ID
5101 // Ops[1] ... Ops[n] = (Variable ID, Parent ID) pairs
5102 SPIRVOperandList Ops;
5103
David Neto257c3892018-04-11 13:19:45 -04005104 Ops << MkId(lookupType(PHI->getType()));
David Neto22f144c2017-06-12 14:26:21 -04005105
David Neto22f144c2017-06-12 14:26:21 -04005106 for (unsigned i = 0; i < PHI->getNumIncomingValues(); i++) {
5107 uint32_t VarID = VMap[PHI->getIncomingValue(i)];
David Neto22f144c2017-06-12 14:26:21 -04005108 uint32_t ParentID = VMap[PHI->getIncomingBlock(i)];
David Neto257c3892018-04-11 13:19:45 -04005109 Ops << MkId(VarID) << MkId(ParentID);
David Neto22f144c2017-06-12 14:26:21 -04005110 }
5111
5112 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005113 InsertPoint,
5114 new SPIRVInstruction(spv::OpPhi, std::get<2>(*DeferredInst), Ops));
David Neto22f144c2017-06-12 14:26:21 -04005115 } else if (CallInst *Call = dyn_cast<CallInst>(Inst)) {
5116 Function *Callee = Call->getCalledFunction();
David Neto3fbb4072017-10-16 11:28:14 -04005117 auto callee_name = Callee->getName();
5118 glsl::ExtInst EInst = getDirectOrIndirectExtInstEnum(callee_name);
David Neto22f144c2017-06-12 14:26:21 -04005119
5120 if (EInst) {
5121 uint32_t &ExtInstImportID = getOpExtInstImportID();
5122
5123 //
5124 // Generate OpExtInst.
5125 //
5126
5127 // Ops[0] = Result Type ID
5128 // Ops[1] = Set ID (OpExtInstImport ID)
5129 // Ops[2] = Instruction Number (Literal Number)
5130 // Ops[3] ... Ops[n] = Operand 1, ... , Operand n
5131 SPIRVOperandList Ops;
5132
David Neto862b7d82018-06-14 18:48:37 -04005133 Ops << MkId(lookupType(Call->getType())) << MkId(ExtInstImportID)
5134 << MkNum(EInst);
David Neto22f144c2017-06-12 14:26:21 -04005135
David Neto22f144c2017-06-12 14:26:21 -04005136 FunctionType *CalleeFTy = cast<FunctionType>(Call->getFunctionType());
5137 for (unsigned i = 0; i < CalleeFTy->getNumParams(); i++) {
David Neto257c3892018-04-11 13:19:45 -04005138 Ops << MkId(VMap[Call->getOperand(i)]);
David Neto22f144c2017-06-12 14:26:21 -04005139 }
5140
David Neto87846742018-04-11 17:36:22 -04005141 auto *ExtInst = new SPIRVInstruction(spv::OpExtInst,
5142 std::get<2>(*DeferredInst), Ops);
David Neto22f144c2017-06-12 14:26:21 -04005143 SPIRVInstList.insert(InsertPoint, ExtInst);
5144
David Neto3fbb4072017-10-16 11:28:14 -04005145 const auto IndirectExtInst = getIndirectExtInstEnum(callee_name);
5146 if (IndirectExtInst != kGlslExtInstBad) {
5147 // Generate one more instruction that uses the result of the extended
5148 // instruction. Its result id is one more than the id of the
5149 // extended instruction.
David Neto22f144c2017-06-12 14:26:21 -04005150 LLVMContext &Context =
5151 Call->getParent()->getParent()->getParent()->getContext();
David Neto22f144c2017-06-12 14:26:21 -04005152
David Neto3fbb4072017-10-16 11:28:14 -04005153 auto generate_extra_inst = [this, &Context, &Call, &DeferredInst,
5154 &VMap, &SPIRVInstList, &InsertPoint](
5155 spv::Op opcode, Constant *constant) {
5156 //
5157 // Generate instruction like:
5158 // result = opcode constant <extinst-result>
5159 //
5160 // Ops[0] = Result Type ID
5161 // Ops[1] = Operand 0 ;; the constant, suitably splatted
5162 // Ops[2] = Operand 1 ;; the result of the extended instruction
5163 SPIRVOperandList Ops;
David Neto22f144c2017-06-12 14:26:21 -04005164
David Neto3fbb4072017-10-16 11:28:14 -04005165 Type *resultTy = Call->getType();
David Neto257c3892018-04-11 13:19:45 -04005166 Ops << MkId(lookupType(resultTy));
David Neto3fbb4072017-10-16 11:28:14 -04005167
5168 if (auto *vectorTy = dyn_cast<VectorType>(resultTy)) {
5169 constant = ConstantVector::getSplat(
5170 static_cast<unsigned>(vectorTy->getNumElements()), constant);
5171 }
David Neto257c3892018-04-11 13:19:45 -04005172 Ops << MkId(VMap[constant]) << MkId(std::get<2>(*DeferredInst));
David Neto3fbb4072017-10-16 11:28:14 -04005173
5174 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005175 InsertPoint, new SPIRVInstruction(
5176 opcode, std::get<2>(*DeferredInst) + 1, Ops));
David Neto3fbb4072017-10-16 11:28:14 -04005177 };
5178
5179 switch (IndirectExtInst) {
5180 case glsl::ExtInstFindUMsb: // Implementing clz
5181 generate_extra_inst(
5182 spv::OpISub, ConstantInt::get(Type::getInt32Ty(Context), 31));
5183 break;
5184 case glsl::ExtInstAcos: // Implementing acospi
5185 case glsl::ExtInstAsin: // Implementing asinpi
Kévin Petiteb9f90a2018-09-29 12:29:34 +01005186 case glsl::ExtInstAtan: // Implementing atanpi
David Neto3fbb4072017-10-16 11:28:14 -04005187 case glsl::ExtInstAtan2: // Implementing atan2pi
5188 generate_extra_inst(
5189 spv::OpFMul,
5190 ConstantFP::get(Type::getFloatTy(Context), kOneOverPi));
5191 break;
5192
5193 default:
5194 assert(false && "internally inconsistent");
David Neto4d02a532017-09-17 12:57:44 -04005195 }
David Neto22f144c2017-06-12 14:26:21 -04005196 }
David Neto3fbb4072017-10-16 11:28:14 -04005197
alan-bakerb39c8262019-03-08 14:03:37 -05005198 } else if (callee_name.startswith("_Z8popcount")) {
David Neto22f144c2017-06-12 14:26:21 -04005199 //
5200 // Generate OpBitCount
5201 //
5202 // Ops[0] = Result Type ID
5203 // Ops[1] = Base ID
David Neto257c3892018-04-11 13:19:45 -04005204 SPIRVOperandList Ops;
5205 Ops << MkId(lookupType(Call->getType()))
5206 << MkId(VMap[Call->getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04005207
5208 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005209 InsertPoint, new SPIRVInstruction(spv::OpBitCount,
David Neto22f144c2017-06-12 14:26:21 -04005210 std::get<2>(*DeferredInst), Ops));
David Netoab03f432017-11-03 17:00:44 -04005211
David Neto862b7d82018-06-14 18:48:37 -04005212 } else if (callee_name.startswith(kCompositeConstructFunctionPrefix)) {
David Netoab03f432017-11-03 17:00:44 -04005213
5214 // Generate an OpCompositeConstruct
5215 SPIRVOperandList Ops;
5216
5217 // The result type.
David Neto257c3892018-04-11 13:19:45 -04005218 Ops << MkId(lookupType(Call->getType()));
David Netoab03f432017-11-03 17:00:44 -04005219
5220 for (Use &use : Call->arg_operands()) {
David Neto257c3892018-04-11 13:19:45 -04005221 Ops << MkId(VMap[use.get()]);
David Netoab03f432017-11-03 17:00:44 -04005222 }
5223
5224 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005225 InsertPoint, new SPIRVInstruction(spv::OpCompositeConstruct,
5226 std::get<2>(*DeferredInst), Ops));
David Netoab03f432017-11-03 17:00:44 -04005227
Alan Baker202c8c72018-08-13 13:47:44 -04005228 } else if (callee_name.startswith(clspv::ResourceAccessorFunction())) {
5229
5230 // We have already mapped the call's result value to an ID.
5231 // Don't generate any code now.
5232
5233 } else if (callee_name.startswith(clspv::WorkgroupAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04005234
5235 // We have already mapped the call's result value to an ID.
5236 // Don't generate any code now.
5237
David Neto22f144c2017-06-12 14:26:21 -04005238 } else {
alan-baker5b86ed72019-02-15 08:26:50 -05005239 if (Call->getType()->isPointerTy()) {
5240 // Functions returning pointers require variable pointers.
5241 setVariablePointersCapabilities(
5242 Call->getType()->getPointerAddressSpace());
5243 }
5244
David Neto22f144c2017-06-12 14:26:21 -04005245 //
5246 // Generate OpFunctionCall.
5247 //
5248
5249 // Ops[0] = Result Type ID
5250 // Ops[1] = Callee Function ID
5251 // Ops[2] ... Ops[n] = Argument 0, ... , Argument n
5252 SPIRVOperandList Ops;
5253
David Neto862b7d82018-06-14 18:48:37 -04005254 Ops << MkId(lookupType(Call->getType()));
David Neto22f144c2017-06-12 14:26:21 -04005255
5256 uint32_t CalleeID = VMap[Callee];
David Neto43568eb2017-10-13 18:25:25 -04005257 if (CalleeID == 0) {
5258 errs() << "Can't translate function call. Missing builtin? "
David Neto862b7d82018-06-14 18:48:37 -04005259 << callee_name << " in: " << *Call << "\n";
David Neto43568eb2017-10-13 18:25:25 -04005260 // TODO(dneto): Can we error out? Enabling this llvm_unreachable
5261 // causes an infinite loop. Instead, go ahead and generate
5262 // the bad function call. A validator will catch the 0-Id.
5263 // llvm_unreachable("Can't translate function call");
5264 }
David Neto22f144c2017-06-12 14:26:21 -04005265
David Neto257c3892018-04-11 13:19:45 -04005266 Ops << MkId(CalleeID);
David Neto22f144c2017-06-12 14:26:21 -04005267
David Neto22f144c2017-06-12 14:26:21 -04005268 FunctionType *CalleeFTy = cast<FunctionType>(Call->getFunctionType());
5269 for (unsigned i = 0; i < CalleeFTy->getNumParams(); i++) {
alan-baker5b86ed72019-02-15 08:26:50 -05005270 auto *operand = Call->getOperand(i);
alan-bakerd4d50652019-12-03 17:17:15 -05005271 auto *operand_type = operand->getType();
5272 // Images and samplers can be passed as function parameters without
5273 // variable pointers.
5274 if (operand_type->isPointerTy() && !IsImageType(operand_type) &&
5275 !IsSamplerType(operand_type)) {
alan-baker5b86ed72019-02-15 08:26:50 -05005276 auto sc =
5277 GetStorageClass(operand->getType()->getPointerAddressSpace());
5278 if (sc == spv::StorageClassStorageBuffer) {
5279 // Passing SSBO by reference requires variable pointers storage
5280 // buffer.
5281 setVariablePointersStorageBuffer(true);
5282 } else if (sc == spv::StorageClassWorkgroup) {
5283 // Workgroup references require variable pointers if they are not
5284 // memory object declarations.
5285 if (auto *operand_call = dyn_cast<CallInst>(operand)) {
5286 // Workgroup accessor represents a variable reference.
5287 if (!operand_call->getCalledFunction()->getName().startswith(
5288 clspv::WorkgroupAccessorFunction()))
5289 setVariablePointers(true);
5290 } else {
5291 // Arguments are function parameters.
5292 if (!isa<Argument>(operand))
5293 setVariablePointers(true);
5294 }
5295 }
5296 }
5297 Ops << MkId(VMap[operand]);
David Neto22f144c2017-06-12 14:26:21 -04005298 }
5299
David Neto87846742018-04-11 17:36:22 -04005300 auto *CallInst = new SPIRVInstruction(spv::OpFunctionCall,
5301 std::get<2>(*DeferredInst), Ops);
David Neto22f144c2017-06-12 14:26:21 -04005302 SPIRVInstList.insert(InsertPoint, CallInst);
5303 }
5304 }
5305 }
5306}
5307
David Neto1a1a0582017-07-07 12:01:44 -04005308void SPIRVProducerPass::HandleDeferredDecorations(const DataLayout &DL) {
Alan Baker202c8c72018-08-13 13:47:44 -04005309 if (getTypesNeedingArrayStride().empty() && LocalArgSpecIds.empty()) {
David Neto1a1a0582017-07-07 12:01:44 -04005310 return;
David Netoc6f3ab22018-04-06 18:02:31 -04005311 }
David Neto1a1a0582017-07-07 12:01:44 -04005312
5313 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
David Neto1a1a0582017-07-07 12:01:44 -04005314
5315 // Find an iterator pointing just past the last decoration.
5316 bool seen_decorations = false;
5317 auto DecoInsertPoint =
5318 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
5319 [&seen_decorations](SPIRVInstruction *Inst) -> bool {
5320 const bool is_decoration =
5321 Inst->getOpcode() == spv::OpDecorate ||
5322 Inst->getOpcode() == spv::OpMemberDecorate;
5323 if (is_decoration) {
5324 seen_decorations = true;
5325 return false;
5326 } else {
5327 return seen_decorations;
5328 }
5329 });
5330
David Netoc6f3ab22018-04-06 18:02:31 -04005331 // Insert ArrayStride decorations on pointer types, due to OpPtrAccessChain
5332 // instructions we generated earlier.
David Neto85082642018-03-24 06:55:20 -07005333 for (auto *type : getTypesNeedingArrayStride()) {
5334 Type *elemTy = nullptr;
5335 if (auto *ptrTy = dyn_cast<PointerType>(type)) {
5336 elemTy = ptrTy->getElementType();
alan-bakerb6b09dc2018-11-08 16:59:28 -05005337 } else if (auto *arrayTy = dyn_cast<ArrayType>(type)) {
David Neto85082642018-03-24 06:55:20 -07005338 elemTy = arrayTy->getArrayElementType();
alan-bakerb6b09dc2018-11-08 16:59:28 -05005339 } else if (auto *seqTy = dyn_cast<SequentialType>(type)) {
David Neto85082642018-03-24 06:55:20 -07005340 elemTy = seqTy->getSequentialElementType();
5341 } else {
5342 errs() << "Unhandled strided type " << *type << "\n";
5343 llvm_unreachable("Unhandled strided type");
5344 }
David Neto1a1a0582017-07-07 12:01:44 -04005345
5346 // Ops[0] = Target ID
5347 // Ops[1] = Decoration (ArrayStride)
5348 // Ops[2] = Stride number (Literal Number)
5349 SPIRVOperandList Ops;
5350
David Neto85082642018-03-24 06:55:20 -07005351 // Same as DL.getIndexedOffsetInType( elemTy, { 1 } );
Alan Bakerfcda9482018-10-02 17:09:59 -04005352 const uint32_t stride = static_cast<uint32_t>(GetTypeAllocSize(elemTy, DL));
David Neto257c3892018-04-11 13:19:45 -04005353
5354 Ops << MkId(lookupType(type)) << MkNum(spv::DecorationArrayStride)
5355 << MkNum(stride);
David Neto1a1a0582017-07-07 12:01:44 -04005356
David Neto87846742018-04-11 17:36:22 -04005357 auto *DecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
David Neto1a1a0582017-07-07 12:01:44 -04005358 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
5359 }
David Netoc6f3ab22018-04-06 18:02:31 -04005360
5361 // Emit SpecId decorations targeting the array size value.
Alan Baker202c8c72018-08-13 13:47:44 -04005362 for (auto spec_id = clspv::FirstLocalSpecId(); spec_id < max_local_spec_id_;
5363 ++spec_id) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05005364 LocalArgInfo &arg_info = LocalSpecIdInfoMap[spec_id];
David Netoc6f3ab22018-04-06 18:02:31 -04005365 SPIRVOperandList Ops;
5366 Ops << MkId(arg_info.array_size_id) << MkNum(spv::DecorationSpecId)
5367 << MkNum(arg_info.spec_id);
5368 SPIRVInstList.insert(DecoInsertPoint,
David Neto87846742018-04-11 17:36:22 -04005369 new SPIRVInstruction(spv::OpDecorate, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04005370 }
David Neto1a1a0582017-07-07 12:01:44 -04005371}
5372
David Neto22f144c2017-06-12 14:26:21 -04005373glsl::ExtInst SPIRVProducerPass::getExtInstEnum(StringRef Name) {
5374 return StringSwitch<glsl::ExtInst>(Name)
alan-bakerb39c8262019-03-08 14:03:37 -05005375 .Case("_Z3absc", glsl::ExtInst::ExtInstSAbs)
5376 .Case("_Z3absDv2_c", glsl::ExtInst::ExtInstSAbs)
5377 .Case("_Z3absDv3_c", glsl::ExtInst::ExtInstSAbs)
5378 .Case("_Z3absDv4_c", glsl::ExtInst::ExtInstSAbs)
Kévin Petit2444e9b2018-11-09 14:14:37 +00005379 .Case("_Z3abss", glsl::ExtInst::ExtInstSAbs)
5380 .Case("_Z3absDv2_s", glsl::ExtInst::ExtInstSAbs)
5381 .Case("_Z3absDv3_s", glsl::ExtInst::ExtInstSAbs)
5382 .Case("_Z3absDv4_s", glsl::ExtInst::ExtInstSAbs)
David Neto22f144c2017-06-12 14:26:21 -04005383 .Case("_Z3absi", glsl::ExtInst::ExtInstSAbs)
5384 .Case("_Z3absDv2_i", glsl::ExtInst::ExtInstSAbs)
5385 .Case("_Z3absDv3_i", glsl::ExtInst::ExtInstSAbs)
5386 .Case("_Z3absDv4_i", glsl::ExtInst::ExtInstSAbs)
Kévin Petit2444e9b2018-11-09 14:14:37 +00005387 .Case("_Z3absl", glsl::ExtInst::ExtInstSAbs)
5388 .Case("_Z3absDv2_l", glsl::ExtInst::ExtInstSAbs)
5389 .Case("_Z3absDv3_l", glsl::ExtInst::ExtInstSAbs)
5390 .Case("_Z3absDv4_l", glsl::ExtInst::ExtInstSAbs)
alan-bakerb39c8262019-03-08 14:03:37 -05005391 .Case("_Z5clampccc", glsl::ExtInst::ExtInstSClamp)
5392 .Case("_Z5clampDv2_cS_S_", glsl::ExtInst::ExtInstSClamp)
5393 .Case("_Z5clampDv3_cS_S_", glsl::ExtInst::ExtInstSClamp)
5394 .Case("_Z5clampDv4_cS_S_", glsl::ExtInst::ExtInstSClamp)
5395 .Case("_Z5clamphhh", glsl::ExtInst::ExtInstUClamp)
5396 .Case("_Z5clampDv2_hS_S_", glsl::ExtInst::ExtInstUClamp)
5397 .Case("_Z5clampDv3_hS_S_", glsl::ExtInst::ExtInstUClamp)
5398 .Case("_Z5clampDv4_hS_S_", glsl::ExtInst::ExtInstUClamp)
Kévin Petit495255d2019-03-06 13:56:48 +00005399 .Case("_Z5clampsss", glsl::ExtInst::ExtInstSClamp)
5400 .Case("_Z5clampDv2_sS_S_", glsl::ExtInst::ExtInstSClamp)
5401 .Case("_Z5clampDv3_sS_S_", glsl::ExtInst::ExtInstSClamp)
5402 .Case("_Z5clampDv4_sS_S_", glsl::ExtInst::ExtInstSClamp)
5403 .Case("_Z5clampttt", glsl::ExtInst::ExtInstUClamp)
5404 .Case("_Z5clampDv2_tS_S_", glsl::ExtInst::ExtInstUClamp)
5405 .Case("_Z5clampDv3_tS_S_", glsl::ExtInst::ExtInstUClamp)
5406 .Case("_Z5clampDv4_tS_S_", glsl::ExtInst::ExtInstUClamp)
David Neto22f144c2017-06-12 14:26:21 -04005407 .Case("_Z5clampiii", glsl::ExtInst::ExtInstSClamp)
5408 .Case("_Z5clampDv2_iS_S_", glsl::ExtInst::ExtInstSClamp)
5409 .Case("_Z5clampDv3_iS_S_", glsl::ExtInst::ExtInstSClamp)
5410 .Case("_Z5clampDv4_iS_S_", glsl::ExtInst::ExtInstSClamp)
5411 .Case("_Z5clampjjj", glsl::ExtInst::ExtInstUClamp)
5412 .Case("_Z5clampDv2_jS_S_", glsl::ExtInst::ExtInstUClamp)
5413 .Case("_Z5clampDv3_jS_S_", glsl::ExtInst::ExtInstUClamp)
5414 .Case("_Z5clampDv4_jS_S_", glsl::ExtInst::ExtInstUClamp)
Kévin Petit495255d2019-03-06 13:56:48 +00005415 .Case("_Z5clamplll", glsl::ExtInst::ExtInstSClamp)
5416 .Case("_Z5clampDv2_lS_S_", glsl::ExtInst::ExtInstSClamp)
5417 .Case("_Z5clampDv3_lS_S_", glsl::ExtInst::ExtInstSClamp)
5418 .Case("_Z5clampDv4_lS_S_", glsl::ExtInst::ExtInstSClamp)
5419 .Case("_Z5clampmmm", glsl::ExtInst::ExtInstUClamp)
5420 .Case("_Z5clampDv2_mS_S_", glsl::ExtInst::ExtInstUClamp)
5421 .Case("_Z5clampDv3_mS_S_", glsl::ExtInst::ExtInstUClamp)
5422 .Case("_Z5clampDv4_mS_S_", glsl::ExtInst::ExtInstUClamp)
David Neto22f144c2017-06-12 14:26:21 -04005423 .Case("_Z5clampfff", glsl::ExtInst::ExtInstFClamp)
5424 .Case("_Z5clampDv2_fS_S_", glsl::ExtInst::ExtInstFClamp)
5425 .Case("_Z5clampDv3_fS_S_", glsl::ExtInst::ExtInstFClamp)
5426 .Case("_Z5clampDv4_fS_S_", glsl::ExtInst::ExtInstFClamp)
alan-baker49bb5fb2020-01-15 08:22:13 -05005427 .Case("_Z5clampDhDhDh", glsl::ExtInst::ExtInstFClamp)
5428 .Case("_Z5clampDv2_DhS_S_", glsl::ExtInst::ExtInstFClamp)
5429 .Case("_Z5clampDv3_DhS_S_", glsl::ExtInst::ExtInstFClamp)
5430 .Case("_Z5clampDv4_DhS_S_", glsl::ExtInst::ExtInstFClamp)
alan-bakerb39c8262019-03-08 14:03:37 -05005431 .Case("_Z3maxcc", glsl::ExtInst::ExtInstSMax)
5432 .Case("_Z3maxDv2_cS_", glsl::ExtInst::ExtInstSMax)
5433 .Case("_Z3maxDv3_cS_", glsl::ExtInst::ExtInstSMax)
5434 .Case("_Z3maxDv4_cS_", glsl::ExtInst::ExtInstSMax)
5435 .Case("_Z3maxhh", glsl::ExtInst::ExtInstUMax)
5436 .Case("_Z3maxDv2_hS_", glsl::ExtInst::ExtInstUMax)
5437 .Case("_Z3maxDv3_hS_", glsl::ExtInst::ExtInstUMax)
5438 .Case("_Z3maxDv4_hS_", glsl::ExtInst::ExtInstUMax)
Kévin Petit9e1971c2018-11-09 14:17:18 +00005439 .Case("_Z3maxss", glsl::ExtInst::ExtInstSMax)
5440 .Case("_Z3maxDv2_sS_", glsl::ExtInst::ExtInstSMax)
5441 .Case("_Z3maxDv3_sS_", glsl::ExtInst::ExtInstSMax)
5442 .Case("_Z3maxDv4_sS_", glsl::ExtInst::ExtInstSMax)
5443 .Case("_Z3maxtt", glsl::ExtInst::ExtInstUMax)
5444 .Case("_Z3maxDv2_tS_", glsl::ExtInst::ExtInstUMax)
5445 .Case("_Z3maxDv3_tS_", glsl::ExtInst::ExtInstUMax)
5446 .Case("_Z3maxDv4_tS_", glsl::ExtInst::ExtInstUMax)
David Neto22f144c2017-06-12 14:26:21 -04005447 .Case("_Z3maxii", glsl::ExtInst::ExtInstSMax)
5448 .Case("_Z3maxDv2_iS_", glsl::ExtInst::ExtInstSMax)
5449 .Case("_Z3maxDv3_iS_", glsl::ExtInst::ExtInstSMax)
5450 .Case("_Z3maxDv4_iS_", glsl::ExtInst::ExtInstSMax)
5451 .Case("_Z3maxjj", glsl::ExtInst::ExtInstUMax)
5452 .Case("_Z3maxDv2_jS_", glsl::ExtInst::ExtInstUMax)
5453 .Case("_Z3maxDv3_jS_", glsl::ExtInst::ExtInstUMax)
5454 .Case("_Z3maxDv4_jS_", glsl::ExtInst::ExtInstUMax)
Kévin Petit9e1971c2018-11-09 14:17:18 +00005455 .Case("_Z3maxll", glsl::ExtInst::ExtInstSMax)
5456 .Case("_Z3maxDv2_lS_", glsl::ExtInst::ExtInstSMax)
5457 .Case("_Z3maxDv3_lS_", glsl::ExtInst::ExtInstSMax)
5458 .Case("_Z3maxDv4_lS_", glsl::ExtInst::ExtInstSMax)
5459 .Case("_Z3maxmm", glsl::ExtInst::ExtInstUMax)
5460 .Case("_Z3maxDv2_mS_", glsl::ExtInst::ExtInstUMax)
5461 .Case("_Z3maxDv3_mS_", glsl::ExtInst::ExtInstUMax)
5462 .Case("_Z3maxDv4_mS_", glsl::ExtInst::ExtInstUMax)
David Neto22f144c2017-06-12 14:26:21 -04005463 .Case("_Z3maxff", glsl::ExtInst::ExtInstFMax)
5464 .Case("_Z3maxDv2_fS_", glsl::ExtInst::ExtInstFMax)
5465 .Case("_Z3maxDv3_fS_", glsl::ExtInst::ExtInstFMax)
5466 .Case("_Z3maxDv4_fS_", glsl::ExtInst::ExtInstFMax)
alan-baker49bb5fb2020-01-15 08:22:13 -05005467 .Case("_Z3maxDhDh", glsl::ExtInst::ExtInstFMax)
5468 .Case("_Z3maxDv2_DhS_", glsl::ExtInst::ExtInstFMax)
5469 .Case("_Z3maxDv3_DhS_", glsl::ExtInst::ExtInstFMax)
5470 .Case("_Z3maxDv4_DhS_", glsl::ExtInst::ExtInstFMax)
David Neto22f144c2017-06-12 14:26:21 -04005471 .StartsWith("_Z4fmax", glsl::ExtInst::ExtInstFMax)
alan-bakerb39c8262019-03-08 14:03:37 -05005472 .Case("_Z3mincc", glsl::ExtInst::ExtInstSMin)
5473 .Case("_Z3minDv2_cS_", glsl::ExtInst::ExtInstSMin)
5474 .Case("_Z3minDv3_cS_", glsl::ExtInst::ExtInstSMin)
5475 .Case("_Z3minDv4_cS_", glsl::ExtInst::ExtInstSMin)
5476 .Case("_Z3minhh", glsl::ExtInst::ExtInstUMin)
5477 .Case("_Z3minDv2_hS_", glsl::ExtInst::ExtInstUMin)
5478 .Case("_Z3minDv3_hS_", glsl::ExtInst::ExtInstUMin)
5479 .Case("_Z3minDv4_hS_", glsl::ExtInst::ExtInstUMin)
Kévin Petit9e1971c2018-11-09 14:17:18 +00005480 .Case("_Z3minss", glsl::ExtInst::ExtInstSMin)
5481 .Case("_Z3minDv2_sS_", glsl::ExtInst::ExtInstSMin)
5482 .Case("_Z3minDv3_sS_", glsl::ExtInst::ExtInstSMin)
5483 .Case("_Z3minDv4_sS_", glsl::ExtInst::ExtInstSMin)
5484 .Case("_Z3mintt", glsl::ExtInst::ExtInstUMin)
5485 .Case("_Z3minDv2_tS_", glsl::ExtInst::ExtInstUMin)
5486 .Case("_Z3minDv3_tS_", glsl::ExtInst::ExtInstUMin)
5487 .Case("_Z3minDv4_tS_", glsl::ExtInst::ExtInstUMin)
David Neto22f144c2017-06-12 14:26:21 -04005488 .Case("_Z3minii", glsl::ExtInst::ExtInstSMin)
5489 .Case("_Z3minDv2_iS_", glsl::ExtInst::ExtInstSMin)
5490 .Case("_Z3minDv3_iS_", glsl::ExtInst::ExtInstSMin)
5491 .Case("_Z3minDv4_iS_", glsl::ExtInst::ExtInstSMin)
5492 .Case("_Z3minjj", glsl::ExtInst::ExtInstUMin)
5493 .Case("_Z3minDv2_jS_", glsl::ExtInst::ExtInstUMin)
5494 .Case("_Z3minDv3_jS_", glsl::ExtInst::ExtInstUMin)
5495 .Case("_Z3minDv4_jS_", glsl::ExtInst::ExtInstUMin)
Kévin Petit9e1971c2018-11-09 14:17:18 +00005496 .Case("_Z3minll", glsl::ExtInst::ExtInstSMin)
5497 .Case("_Z3minDv2_lS_", glsl::ExtInst::ExtInstSMin)
5498 .Case("_Z3minDv3_lS_", glsl::ExtInst::ExtInstSMin)
5499 .Case("_Z3minDv4_lS_", glsl::ExtInst::ExtInstSMin)
5500 .Case("_Z3minmm", glsl::ExtInst::ExtInstUMin)
5501 .Case("_Z3minDv2_mS_", glsl::ExtInst::ExtInstUMin)
5502 .Case("_Z3minDv3_mS_", glsl::ExtInst::ExtInstUMin)
5503 .Case("_Z3minDv4_mS_", glsl::ExtInst::ExtInstUMin)
David Neto22f144c2017-06-12 14:26:21 -04005504 .Case("_Z3minff", glsl::ExtInst::ExtInstFMin)
5505 .Case("_Z3minDv2_fS_", glsl::ExtInst::ExtInstFMin)
5506 .Case("_Z3minDv3_fS_", glsl::ExtInst::ExtInstFMin)
5507 .Case("_Z3minDv4_fS_", glsl::ExtInst::ExtInstFMin)
alan-baker49bb5fb2020-01-15 08:22:13 -05005508 .Case("_Z3minDhDh", glsl::ExtInst::ExtInstFMin)
5509 .Case("_Z3minDv2_DhS_", glsl::ExtInst::ExtInstFMin)
5510 .Case("_Z3minDv3_DhS_", glsl::ExtInst::ExtInstFMin)
5511 .Case("_Z3minDv4_DhS_", glsl::ExtInst::ExtInstFMin)
David Neto22f144c2017-06-12 14:26:21 -04005512 .StartsWith("_Z4fmin", glsl::ExtInst::ExtInstFMin)
5513 .StartsWith("_Z7degrees", glsl::ExtInst::ExtInstDegrees)
5514 .StartsWith("_Z7radians", glsl::ExtInst::ExtInstRadians)
5515 .StartsWith("_Z3mix", glsl::ExtInst::ExtInstFMix)
5516 .StartsWith("_Z4acos", glsl::ExtInst::ExtInstAcos)
5517 .StartsWith("_Z5acosh", glsl::ExtInst::ExtInstAcosh)
5518 .StartsWith("_Z4asin", glsl::ExtInst::ExtInstAsin)
5519 .StartsWith("_Z5asinh", glsl::ExtInst::ExtInstAsinh)
5520 .StartsWith("_Z4atan", glsl::ExtInst::ExtInstAtan)
5521 .StartsWith("_Z5atan2", glsl::ExtInst::ExtInstAtan2)
5522 .StartsWith("_Z5atanh", glsl::ExtInst::ExtInstAtanh)
5523 .StartsWith("_Z4ceil", glsl::ExtInst::ExtInstCeil)
5524 .StartsWith("_Z3sin", glsl::ExtInst::ExtInstSin)
5525 .StartsWith("_Z4sinh", glsl::ExtInst::ExtInstSinh)
5526 .StartsWith("_Z8half_sin", glsl::ExtInst::ExtInstSin)
5527 .StartsWith("_Z10native_sin", glsl::ExtInst::ExtInstSin)
5528 .StartsWith("_Z3cos", glsl::ExtInst::ExtInstCos)
5529 .StartsWith("_Z4cosh", glsl::ExtInst::ExtInstCosh)
5530 .StartsWith("_Z8half_cos", glsl::ExtInst::ExtInstCos)
5531 .StartsWith("_Z10native_cos", glsl::ExtInst::ExtInstCos)
5532 .StartsWith("_Z3tan", glsl::ExtInst::ExtInstTan)
5533 .StartsWith("_Z4tanh", glsl::ExtInst::ExtInstTanh)
5534 .StartsWith("_Z8half_tan", glsl::ExtInst::ExtInstTan)
5535 .StartsWith("_Z10native_tan", glsl::ExtInst::ExtInstTan)
5536 .StartsWith("_Z3exp", glsl::ExtInst::ExtInstExp)
5537 .StartsWith("_Z8half_exp", glsl::ExtInst::ExtInstExp)
5538 .StartsWith("_Z10native_exp", glsl::ExtInst::ExtInstExp)
5539 .StartsWith("_Z4exp2", glsl::ExtInst::ExtInstExp2)
5540 .StartsWith("_Z9half_exp2", glsl::ExtInst::ExtInstExp2)
5541 .StartsWith("_Z11native_exp2", glsl::ExtInst::ExtInstExp2)
5542 .StartsWith("_Z3log", glsl::ExtInst::ExtInstLog)
5543 .StartsWith("_Z8half_log", glsl::ExtInst::ExtInstLog)
5544 .StartsWith("_Z10native_log", glsl::ExtInst::ExtInstLog)
5545 .StartsWith("_Z4log2", glsl::ExtInst::ExtInstLog2)
5546 .StartsWith("_Z9half_log2", glsl::ExtInst::ExtInstLog2)
5547 .StartsWith("_Z11native_log2", glsl::ExtInst::ExtInstLog2)
5548 .StartsWith("_Z4fabs", glsl::ExtInst::ExtInstFAbs)
kpet3458e942018-10-03 14:35:21 +01005549 .StartsWith("_Z3fma", glsl::ExtInst::ExtInstFma)
David Neto22f144c2017-06-12 14:26:21 -04005550 .StartsWith("_Z5floor", glsl::ExtInst::ExtInstFloor)
5551 .StartsWith("_Z5ldexp", glsl::ExtInst::ExtInstLdexp)
5552 .StartsWith("_Z3pow", glsl::ExtInst::ExtInstPow)
5553 .StartsWith("_Z4powr", glsl::ExtInst::ExtInstPow)
5554 .StartsWith("_Z9half_powr", glsl::ExtInst::ExtInstPow)
5555 .StartsWith("_Z11native_powr", glsl::ExtInst::ExtInstPow)
5556 .StartsWith("_Z5round", glsl::ExtInst::ExtInstRound)
5557 .StartsWith("_Z4sqrt", glsl::ExtInst::ExtInstSqrt)
5558 .StartsWith("_Z9half_sqrt", glsl::ExtInst::ExtInstSqrt)
5559 .StartsWith("_Z11native_sqrt", glsl::ExtInst::ExtInstSqrt)
5560 .StartsWith("_Z5rsqrt", glsl::ExtInst::ExtInstInverseSqrt)
5561 .StartsWith("_Z10half_rsqrt", glsl::ExtInst::ExtInstInverseSqrt)
5562 .StartsWith("_Z12native_rsqrt", glsl::ExtInst::ExtInstInverseSqrt)
5563 .StartsWith("_Z5trunc", glsl::ExtInst::ExtInstTrunc)
5564 .StartsWith("_Z5frexp", glsl::ExtInst::ExtInstFrexp)
5565 .StartsWith("_Z4sign", glsl::ExtInst::ExtInstFSign)
5566 .StartsWith("_Z6length", glsl::ExtInst::ExtInstLength)
Kévin Petit7d09cec2018-09-22 15:43:38 +01005567 .StartsWith("_Z11fast_length", glsl::ExtInst::ExtInstLength)
David Neto22f144c2017-06-12 14:26:21 -04005568 .StartsWith("_Z8distance", glsl::ExtInst::ExtInstDistance)
Kévin Petit7d09cec2018-09-22 15:43:38 +01005569 .StartsWith("_Z13fast_distance", glsl::ExtInst::ExtInstDistance)
David Netoe9a03512017-10-16 10:08:27 -04005570 .StartsWith("_Z4step", glsl::ExtInst::ExtInstStep)
kpet6fd2a262018-10-03 14:48:01 +01005571 .StartsWith("_Z10smoothstep", glsl::ExtInst::ExtInstSmoothStep)
David Neto22f144c2017-06-12 14:26:21 -04005572 .Case("_Z5crossDv3_fS_", glsl::ExtInst::ExtInstCross)
5573 .StartsWith("_Z9normalize", glsl::ExtInst::ExtInstNormalize)
Kévin Petit7d09cec2018-09-22 15:43:38 +01005574 .StartsWith("_Z14fast_normalize", glsl::ExtInst::ExtInstNormalize)
David Neto22f144c2017-06-12 14:26:21 -04005575 .StartsWith("llvm.fmuladd.", glsl::ExtInst::ExtInstFma)
5576 .Case("spirv.unpack.v2f16", glsl::ExtInst::ExtInstUnpackHalf2x16)
5577 .Case("spirv.pack.v2f16", glsl::ExtInst::ExtInstPackHalf2x16)
David Neto62653202017-10-16 19:05:18 -04005578 .Case("clspv.fract.f", glsl::ExtInst::ExtInstFract)
5579 .Case("clspv.fract.v2f", glsl::ExtInst::ExtInstFract)
5580 .Case("clspv.fract.v3f", glsl::ExtInst::ExtInstFract)
5581 .Case("clspv.fract.v4f", glsl::ExtInst::ExtInstFract)
David Neto3fbb4072017-10-16 11:28:14 -04005582 .Default(kGlslExtInstBad);
5583}
5584
5585glsl::ExtInst SPIRVProducerPass::getIndirectExtInstEnum(StringRef Name) {
5586 // Check indirect cases.
5587 return StringSwitch<glsl::ExtInst>(Name)
5588 .StartsWith("_Z3clz", glsl::ExtInst::ExtInstFindUMsb)
5589 // Use exact match on float arg because these need a multiply
5590 // of a constant of the right floating point type.
5591 .Case("_Z6acospif", glsl::ExtInst::ExtInstAcos)
5592 .Case("_Z6acospiDv2_f", glsl::ExtInst::ExtInstAcos)
5593 .Case("_Z6acospiDv3_f", glsl::ExtInst::ExtInstAcos)
5594 .Case("_Z6acospiDv4_f", glsl::ExtInst::ExtInstAcos)
5595 .Case("_Z6asinpif", glsl::ExtInst::ExtInstAsin)
5596 .Case("_Z6asinpiDv2_f", glsl::ExtInst::ExtInstAsin)
5597 .Case("_Z6asinpiDv3_f", glsl::ExtInst::ExtInstAsin)
5598 .Case("_Z6asinpiDv4_f", glsl::ExtInst::ExtInstAsin)
Kévin Petiteb9f90a2018-09-29 12:29:34 +01005599 .Case("_Z6atanpif", glsl::ExtInst::ExtInstAtan)
5600 .Case("_Z6atanpiDv2_f", glsl::ExtInst::ExtInstAtan)
5601 .Case("_Z6atanpiDv3_f", glsl::ExtInst::ExtInstAtan)
5602 .Case("_Z6atanpiDv4_f", glsl::ExtInst::ExtInstAtan)
David Neto3fbb4072017-10-16 11:28:14 -04005603 .Case("_Z7atan2piff", glsl::ExtInst::ExtInstAtan2)
5604 .Case("_Z7atan2piDv2_fS_", glsl::ExtInst::ExtInstAtan2)
5605 .Case("_Z7atan2piDv3_fS_", glsl::ExtInst::ExtInstAtan2)
5606 .Case("_Z7atan2piDv4_fS_", glsl::ExtInst::ExtInstAtan2)
5607 .Default(kGlslExtInstBad);
5608}
5609
alan-bakerb6b09dc2018-11-08 16:59:28 -05005610glsl::ExtInst
5611SPIRVProducerPass::getDirectOrIndirectExtInstEnum(StringRef Name) {
David Neto3fbb4072017-10-16 11:28:14 -04005612 auto direct = getExtInstEnum(Name);
5613 if (direct != kGlslExtInstBad)
5614 return direct;
5615 return getIndirectExtInstEnum(Name);
David Neto22f144c2017-06-12 14:26:21 -04005616}
5617
David Neto22f144c2017-06-12 14:26:21 -04005618void SPIRVProducerPass::WriteOneWord(uint32_t Word) {
David Neto0676e6f2017-07-11 18:47:44 -04005619 binaryOut->write(reinterpret_cast<const char *>(&Word), sizeof(uint32_t));
David Neto22f144c2017-06-12 14:26:21 -04005620}
5621
5622void SPIRVProducerPass::WriteResultID(SPIRVInstruction *Inst) {
5623 WriteOneWord(Inst->getResultID());
5624}
5625
5626void SPIRVProducerPass::WriteWordCountAndOpcode(SPIRVInstruction *Inst) {
5627 // High 16 bit : Word Count
5628 // Low 16 bit : Opcode
5629 uint32_t Word = Inst->getOpcode();
David Netoee2660d2018-06-28 16:31:29 -04005630 const uint32_t count = Inst->getWordCount();
5631 if (count > 65535) {
5632 errs() << "Word count limit of 65535 exceeded: " << count << "\n";
5633 llvm_unreachable("Word count too high");
5634 }
David Neto22f144c2017-06-12 14:26:21 -04005635 Word |= Inst->getWordCount() << 16;
5636 WriteOneWord(Word);
5637}
5638
David Netoef5ba2b2019-12-20 08:35:54 -05005639void SPIRVProducerPass::WriteOperand(const std::unique_ptr<SPIRVOperand> &Op) {
David Neto22f144c2017-06-12 14:26:21 -04005640 SPIRVOperandType OpTy = Op->getType();
5641 switch (OpTy) {
5642 default: {
5643 llvm_unreachable("Unsupported SPIRV Operand Type???");
5644 break;
5645 }
5646 case SPIRVOperandType::NUMBERID: {
5647 WriteOneWord(Op->getNumID());
5648 break;
5649 }
5650 case SPIRVOperandType::LITERAL_STRING: {
5651 std::string Str = Op->getLiteralStr();
5652 const char *Data = Str.c_str();
5653 size_t WordSize = Str.size() / 4;
5654 for (unsigned Idx = 0; Idx < WordSize; Idx++) {
5655 WriteOneWord(*reinterpret_cast<const uint32_t *>(&Data[4 * Idx]));
5656 }
5657
5658 uint32_t Remainder = Str.size() % 4;
5659 uint32_t LastWord = 0;
5660 if (Remainder) {
5661 for (unsigned Idx = 0; Idx < Remainder; Idx++) {
5662 LastWord |= Data[4 * WordSize + Idx] << 8 * Idx;
5663 }
5664 }
5665
5666 WriteOneWord(LastWord);
5667 break;
5668 }
5669 case SPIRVOperandType::LITERAL_INTEGER:
5670 case SPIRVOperandType::LITERAL_FLOAT: {
5671 auto LiteralNum = Op->getLiteralNum();
5672 // TODO: Handle LiteranNum carefully.
5673 for (auto Word : LiteralNum) {
5674 WriteOneWord(Word);
5675 }
5676 break;
5677 }
5678 }
5679}
5680
5681void SPIRVProducerPass::WriteSPIRVBinary() {
5682 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
5683
5684 for (auto Inst : SPIRVInstList) {
David Netoef5ba2b2019-12-20 08:35:54 -05005685 const auto &Ops = Inst->getOperands();
David Neto22f144c2017-06-12 14:26:21 -04005686 spv::Op Opcode = static_cast<spv::Op>(Inst->getOpcode());
5687
5688 switch (Opcode) {
5689 default: {
David Neto5c22a252018-03-15 16:07:41 -04005690 errs() << "Unsupported SPIR-V instruction opcode " << int(Opcode) << "\n";
David Neto22f144c2017-06-12 14:26:21 -04005691 llvm_unreachable("Unsupported SPIRV instruction");
5692 break;
5693 }
5694 case spv::OpCapability:
5695 case spv::OpExtension:
5696 case spv::OpMemoryModel:
5697 case spv::OpEntryPoint:
5698 case spv::OpExecutionMode:
5699 case spv::OpSource:
5700 case spv::OpDecorate:
5701 case spv::OpMemberDecorate:
5702 case spv::OpBranch:
5703 case spv::OpBranchConditional:
5704 case spv::OpSelectionMerge:
5705 case spv::OpLoopMerge:
5706 case spv::OpStore:
5707 case spv::OpImageWrite:
5708 case spv::OpReturnValue:
5709 case spv::OpControlBarrier:
5710 case spv::OpMemoryBarrier:
5711 case spv::OpReturn:
5712 case spv::OpFunctionEnd:
5713 case spv::OpCopyMemory: {
5714 WriteWordCountAndOpcode(Inst);
5715 for (uint32_t i = 0; i < Ops.size(); i++) {
5716 WriteOperand(Ops[i]);
5717 }
5718 break;
5719 }
5720 case spv::OpTypeBool:
5721 case spv::OpTypeVoid:
5722 case spv::OpTypeSampler:
5723 case spv::OpLabel:
5724 case spv::OpExtInstImport:
5725 case spv::OpTypePointer:
5726 case spv::OpTypeRuntimeArray:
5727 case spv::OpTypeStruct:
5728 case spv::OpTypeImage:
5729 case spv::OpTypeSampledImage:
5730 case spv::OpTypeInt:
5731 case spv::OpTypeFloat:
5732 case spv::OpTypeArray:
5733 case spv::OpTypeVector:
5734 case spv::OpTypeFunction: {
5735 WriteWordCountAndOpcode(Inst);
5736 WriteResultID(Inst);
5737 for (uint32_t i = 0; i < Ops.size(); i++) {
5738 WriteOperand(Ops[i]);
5739 }
5740 break;
5741 }
5742 case spv::OpFunction:
5743 case spv::OpFunctionParameter:
5744 case spv::OpAccessChain:
5745 case spv::OpPtrAccessChain:
5746 case spv::OpInBoundsAccessChain:
5747 case spv::OpUConvert:
5748 case spv::OpSConvert:
5749 case spv::OpConvertFToU:
5750 case spv::OpConvertFToS:
5751 case spv::OpConvertUToF:
5752 case spv::OpConvertSToF:
5753 case spv::OpFConvert:
5754 case spv::OpConvertPtrToU:
5755 case spv::OpConvertUToPtr:
5756 case spv::OpBitcast:
alan-bakerc9c55ae2019-12-02 16:01:27 -05005757 case spv::OpFNegate:
David Neto22f144c2017-06-12 14:26:21 -04005758 case spv::OpIAdd:
5759 case spv::OpFAdd:
5760 case spv::OpISub:
5761 case spv::OpFSub:
5762 case spv::OpIMul:
5763 case spv::OpFMul:
5764 case spv::OpUDiv:
5765 case spv::OpSDiv:
5766 case spv::OpFDiv:
5767 case spv::OpUMod:
5768 case spv::OpSRem:
5769 case spv::OpFRem:
Kévin Petit8a560882019-03-21 15:24:34 +00005770 case spv::OpUMulExtended:
5771 case spv::OpSMulExtended:
David Neto22f144c2017-06-12 14:26:21 -04005772 case spv::OpBitwiseOr:
5773 case spv::OpBitwiseXor:
5774 case spv::OpBitwiseAnd:
David Netoa394f392017-08-26 20:45:29 -04005775 case spv::OpNot:
David Neto22f144c2017-06-12 14:26:21 -04005776 case spv::OpShiftLeftLogical:
5777 case spv::OpShiftRightLogical:
5778 case spv::OpShiftRightArithmetic:
5779 case spv::OpBitCount:
David Netoab03f432017-11-03 17:00:44 -04005780 case spv::OpCompositeConstruct:
David Neto22f144c2017-06-12 14:26:21 -04005781 case spv::OpCompositeExtract:
5782 case spv::OpVectorExtractDynamic:
5783 case spv::OpCompositeInsert:
David Neto0a2f98d2017-09-15 19:38:40 -04005784 case spv::OpCopyObject:
David Neto22f144c2017-06-12 14:26:21 -04005785 case spv::OpVectorInsertDynamic:
5786 case spv::OpVectorShuffle:
5787 case spv::OpIEqual:
5788 case spv::OpINotEqual:
5789 case spv::OpUGreaterThan:
5790 case spv::OpUGreaterThanEqual:
5791 case spv::OpULessThan:
5792 case spv::OpULessThanEqual:
5793 case spv::OpSGreaterThan:
5794 case spv::OpSGreaterThanEqual:
5795 case spv::OpSLessThan:
5796 case spv::OpSLessThanEqual:
5797 case spv::OpFOrdEqual:
5798 case spv::OpFOrdGreaterThan:
5799 case spv::OpFOrdGreaterThanEqual:
5800 case spv::OpFOrdLessThan:
5801 case spv::OpFOrdLessThanEqual:
5802 case spv::OpFOrdNotEqual:
5803 case spv::OpFUnordEqual:
5804 case spv::OpFUnordGreaterThan:
5805 case spv::OpFUnordGreaterThanEqual:
5806 case spv::OpFUnordLessThan:
5807 case spv::OpFUnordLessThanEqual:
5808 case spv::OpFUnordNotEqual:
5809 case spv::OpExtInst:
5810 case spv::OpIsInf:
5811 case spv::OpIsNan:
5812 case spv::OpAny:
5813 case spv::OpAll:
5814 case spv::OpUndef:
5815 case spv::OpConstantNull:
5816 case spv::OpLogicalOr:
5817 case spv::OpLogicalAnd:
5818 case spv::OpLogicalNot:
5819 case spv::OpLogicalNotEqual:
5820 case spv::OpConstantComposite:
5821 case spv::OpSpecConstantComposite:
5822 case spv::OpConstantTrue:
5823 case spv::OpConstantFalse:
5824 case spv::OpConstant:
5825 case spv::OpSpecConstant:
5826 case spv::OpVariable:
5827 case spv::OpFunctionCall:
5828 case spv::OpSampledImage:
5829 case spv::OpImageSampleExplicitLod:
David Neto5c22a252018-03-15 16:07:41 -04005830 case spv::OpImageQuerySize:
alan-bakerce179f12019-12-06 19:02:22 -05005831 case spv::OpImageQuerySizeLod:
David Neto22f144c2017-06-12 14:26:21 -04005832 case spv::OpSelect:
5833 case spv::OpPhi:
5834 case spv::OpLoad:
5835 case spv::OpAtomicIAdd:
5836 case spv::OpAtomicISub:
5837 case spv::OpAtomicExchange:
5838 case spv::OpAtomicIIncrement:
5839 case spv::OpAtomicIDecrement:
5840 case spv::OpAtomicCompareExchange:
5841 case spv::OpAtomicUMin:
5842 case spv::OpAtomicSMin:
5843 case spv::OpAtomicUMax:
5844 case spv::OpAtomicSMax:
5845 case spv::OpAtomicAnd:
5846 case spv::OpAtomicOr:
5847 case spv::OpAtomicXor:
5848 case spv::OpDot: {
5849 WriteWordCountAndOpcode(Inst);
5850 WriteOperand(Ops[0]);
5851 WriteResultID(Inst);
5852 for (uint32_t i = 1; i < Ops.size(); i++) {
5853 WriteOperand(Ops[i]);
5854 }
5855 break;
5856 }
5857 }
5858 }
5859}
Alan Baker9bf93fb2018-08-28 16:59:26 -04005860
alan-bakerb6b09dc2018-11-08 16:59:28 -05005861bool SPIRVProducerPass::IsTypeNullable(const Type *type) const {
Alan Baker9bf93fb2018-08-28 16:59:26 -04005862 switch (type->getTypeID()) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05005863 case Type::HalfTyID:
5864 case Type::FloatTyID:
5865 case Type::DoubleTyID:
5866 case Type::IntegerTyID:
5867 case Type::VectorTyID:
5868 return true;
5869 case Type::PointerTyID: {
5870 const PointerType *pointer_type = cast<PointerType>(type);
5871 if (pointer_type->getPointerAddressSpace() !=
5872 AddressSpace::UniformConstant) {
5873 auto pointee_type = pointer_type->getPointerElementType();
5874 if (pointee_type->isStructTy() &&
5875 cast<StructType>(pointee_type)->isOpaque()) {
5876 // Images and samplers are not nullable.
5877 return false;
Alan Baker9bf93fb2018-08-28 16:59:26 -04005878 }
Alan Baker9bf93fb2018-08-28 16:59:26 -04005879 }
alan-bakerb6b09dc2018-11-08 16:59:28 -05005880 return true;
5881 }
5882 case Type::ArrayTyID:
5883 return IsTypeNullable(cast<CompositeType>(type)->getTypeAtIndex(0u));
5884 case Type::StructTyID: {
5885 const StructType *struct_type = cast<StructType>(type);
5886 // Images and samplers are not nullable.
5887 if (struct_type->isOpaque())
Alan Baker9bf93fb2018-08-28 16:59:26 -04005888 return false;
alan-bakerb6b09dc2018-11-08 16:59:28 -05005889 for (const auto element : struct_type->elements()) {
5890 if (!IsTypeNullable(element))
5891 return false;
5892 }
5893 return true;
5894 }
5895 default:
5896 return false;
Alan Baker9bf93fb2018-08-28 16:59:26 -04005897 }
5898}
Alan Bakerfcda9482018-10-02 17:09:59 -04005899
5900void SPIRVProducerPass::PopulateUBOTypeMaps(Module &module) {
5901 if (auto *offsets_md =
5902 module.getNamedMetadata(clspv::RemappedTypeOffsetMetadataName())) {
5903 // Metdata is stored as key-value pair operands. The first element of each
5904 // operand is the type and the second is a vector of offsets.
5905 for (const auto *operand : offsets_md->operands()) {
5906 const auto *pair = cast<MDTuple>(operand);
5907 auto *type =
5908 cast<ConstantAsMetadata>(pair->getOperand(0))->getValue()->getType();
5909 const auto *offset_vector = cast<MDTuple>(pair->getOperand(1));
5910 std::vector<uint32_t> offsets;
5911 for (const Metadata *offset_md : offset_vector->operands()) {
5912 const auto *constant_md = cast<ConstantAsMetadata>(offset_md);
alan-bakerb6b09dc2018-11-08 16:59:28 -05005913 offsets.push_back(static_cast<uint32_t>(
5914 cast<ConstantInt>(constant_md->getValue())->getZExtValue()));
Alan Bakerfcda9482018-10-02 17:09:59 -04005915 }
5916 RemappedUBOTypeOffsets.insert(std::make_pair(type, offsets));
5917 }
5918 }
5919
5920 if (auto *sizes_md =
5921 module.getNamedMetadata(clspv::RemappedTypeSizesMetadataName())) {
5922 // Metadata is stored as key-value pair operands. The first element of each
5923 // operand is the type and the second is a triple of sizes: type size in
5924 // bits, store size and alloc size.
5925 for (const auto *operand : sizes_md->operands()) {
5926 const auto *pair = cast<MDTuple>(operand);
5927 auto *type =
5928 cast<ConstantAsMetadata>(pair->getOperand(0))->getValue()->getType();
5929 const auto *size_triple = cast<MDTuple>(pair->getOperand(1));
5930 uint64_t type_size_in_bits =
5931 cast<ConstantInt>(
5932 cast<ConstantAsMetadata>(size_triple->getOperand(0))->getValue())
5933 ->getZExtValue();
5934 uint64_t type_store_size =
5935 cast<ConstantInt>(
5936 cast<ConstantAsMetadata>(size_triple->getOperand(1))->getValue())
5937 ->getZExtValue();
5938 uint64_t type_alloc_size =
5939 cast<ConstantInt>(
5940 cast<ConstantAsMetadata>(size_triple->getOperand(2))->getValue())
5941 ->getZExtValue();
5942 RemappedUBOTypeSizes.insert(std::make_pair(
5943 type, std::make_tuple(type_size_in_bits, type_store_size,
5944 type_alloc_size)));
5945 }
5946 }
5947}
5948
5949uint64_t SPIRVProducerPass::GetTypeSizeInBits(Type *type,
5950 const DataLayout &DL) {
5951 auto iter = RemappedUBOTypeSizes.find(type);
5952 if (iter != RemappedUBOTypeSizes.end()) {
5953 return std::get<0>(iter->second);
5954 }
5955
5956 return DL.getTypeSizeInBits(type);
5957}
5958
5959uint64_t SPIRVProducerPass::GetTypeStoreSize(Type *type, const DataLayout &DL) {
5960 auto iter = RemappedUBOTypeSizes.find(type);
5961 if (iter != RemappedUBOTypeSizes.end()) {
5962 return std::get<1>(iter->second);
5963 }
5964
5965 return DL.getTypeStoreSize(type);
5966}
5967
5968uint64_t SPIRVProducerPass::GetTypeAllocSize(Type *type, const DataLayout &DL) {
5969 auto iter = RemappedUBOTypeSizes.find(type);
5970 if (iter != RemappedUBOTypeSizes.end()) {
5971 return std::get<2>(iter->second);
5972 }
5973
5974 return DL.getTypeAllocSize(type);
5975}
alan-baker5b86ed72019-02-15 08:26:50 -05005976
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04005977void SPIRVProducerPass::setVariablePointersCapabilities(
5978 unsigned address_space) {
alan-baker5b86ed72019-02-15 08:26:50 -05005979 if (GetStorageClass(address_space) == spv::StorageClassStorageBuffer) {
5980 setVariablePointersStorageBuffer(true);
5981 } else {
5982 setVariablePointers(true);
5983 }
5984}
5985
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04005986Value *SPIRVProducerPass::GetBasePointer(Value *v) {
alan-baker5b86ed72019-02-15 08:26:50 -05005987 if (auto *gep = dyn_cast<GetElementPtrInst>(v)) {
5988 return GetBasePointer(gep->getPointerOperand());
5989 }
5990
5991 // Conservatively return |v|.
5992 return v;
5993}
5994
5995bool SPIRVProducerPass::sameResource(Value *lhs, Value *rhs) const {
5996 if (auto *lhs_call = dyn_cast<CallInst>(lhs)) {
5997 if (auto *rhs_call = dyn_cast<CallInst>(rhs)) {
5998 if (lhs_call->getCalledFunction()->getName().startswith(
5999 clspv::ResourceAccessorFunction()) &&
6000 rhs_call->getCalledFunction()->getName().startswith(
6001 clspv::ResourceAccessorFunction())) {
6002 // For resource accessors, match descriptor set and binding.
6003 if (lhs_call->getOperand(0) == rhs_call->getOperand(0) &&
6004 lhs_call->getOperand(1) == rhs_call->getOperand(1))
6005 return true;
6006 } else if (lhs_call->getCalledFunction()->getName().startswith(
6007 clspv::WorkgroupAccessorFunction()) &&
6008 rhs_call->getCalledFunction()->getName().startswith(
6009 clspv::WorkgroupAccessorFunction())) {
6010 // For workgroup resources, match spec id.
6011 if (lhs_call->getOperand(0) == rhs_call->getOperand(0))
6012 return true;
6013 }
6014 }
6015 }
6016
6017 return false;
6018}
6019
6020bool SPIRVProducerPass::selectFromSameObject(Instruction *inst) {
6021 assert(inst->getType()->isPointerTy());
6022 assert(GetStorageClass(inst->getType()->getPointerAddressSpace()) ==
6023 spv::StorageClassStorageBuffer);
6024 const bool hack_undef = clspv::Option::HackUndef();
6025 if (auto *select = dyn_cast<SelectInst>(inst)) {
6026 auto *true_base = GetBasePointer(select->getTrueValue());
6027 auto *false_base = GetBasePointer(select->getFalseValue());
6028
6029 if (true_base == false_base)
6030 return true;
6031
6032 // If either the true or false operand is a null, then we satisfy the same
6033 // object constraint.
6034 if (auto *true_cst = dyn_cast<Constant>(true_base)) {
6035 if (true_cst->isNullValue() || (hack_undef && isa<UndefValue>(true_base)))
6036 return true;
6037 }
6038
6039 if (auto *false_cst = dyn_cast<Constant>(false_base)) {
6040 if (false_cst->isNullValue() ||
6041 (hack_undef && isa<UndefValue>(false_base)))
6042 return true;
6043 }
6044
6045 if (sameResource(true_base, false_base))
6046 return true;
6047 } else if (auto *phi = dyn_cast<PHINode>(inst)) {
6048 Value *value = nullptr;
6049 bool ok = true;
6050 for (unsigned i = 0; ok && i != phi->getNumIncomingValues(); ++i) {
6051 auto *base = GetBasePointer(phi->getIncomingValue(i));
6052 // Null values satisfy the constraint of selecting of selecting from the
6053 // same object.
6054 if (!value) {
6055 if (auto *cst = dyn_cast<Constant>(base)) {
6056 if (!cst->isNullValue() && !(hack_undef && isa<UndefValue>(base)))
6057 value = base;
6058 } else {
6059 value = base;
6060 }
6061 } else if (base != value) {
6062 if (auto *base_cst = dyn_cast<Constant>(base)) {
6063 if (base_cst->isNullValue() || (hack_undef && isa<UndefValue>(base)))
6064 continue;
6065 }
6066
6067 if (sameResource(value, base))
6068 continue;
6069
6070 // Values don't represent the same base.
6071 ok = false;
6072 }
6073 }
6074
6075 return ok;
6076 }
6077
6078 // Conservatively return false.
6079 return false;
6080}
alan-bakere9308012019-03-15 10:25:13 -04006081
6082bool SPIRVProducerPass::CalledWithCoherentResource(Argument &Arg) {
6083 if (!Arg.getType()->isPointerTy() ||
6084 Arg.getType()->getPointerAddressSpace() != clspv::AddressSpace::Global) {
6085 // Only SSBOs need to be annotated as coherent.
6086 return false;
6087 }
6088
6089 DenseSet<Value *> visited;
6090 std::vector<Value *> stack;
6091 for (auto *U : Arg.getParent()->users()) {
6092 if (auto *call = dyn_cast<CallInst>(U)) {
6093 stack.push_back(call->getOperand(Arg.getArgNo()));
6094 }
6095 }
6096
6097 while (!stack.empty()) {
6098 Value *v = stack.back();
6099 stack.pop_back();
6100
6101 if (!visited.insert(v).second)
6102 continue;
6103
6104 auto *resource_call = dyn_cast<CallInst>(v);
6105 if (resource_call &&
6106 resource_call->getCalledFunction()->getName().startswith(
6107 clspv::ResourceAccessorFunction())) {
6108 // If this is a resource accessor function, check if the coherent operand
6109 // is set.
6110 const auto coherent =
6111 unsigned(dyn_cast<ConstantInt>(resource_call->getArgOperand(5))
6112 ->getZExtValue());
6113 if (coherent == 1)
6114 return true;
6115 } else if (auto *arg = dyn_cast<Argument>(v)) {
6116 // If this is a function argument, trace through its callers.
alan-bakere98f3f92019-04-08 15:06:36 -04006117 for (auto U : arg->getParent()->users()) {
alan-bakere9308012019-03-15 10:25:13 -04006118 if (auto *call = dyn_cast<CallInst>(U)) {
6119 stack.push_back(call->getOperand(arg->getArgNo()));
6120 }
6121 }
6122 } else if (auto *user = dyn_cast<User>(v)) {
6123 // If this is a user, traverse all operands that could lead to resource
6124 // variables.
6125 for (unsigned i = 0; i != user->getNumOperands(); ++i) {
6126 Value *operand = user->getOperand(i);
6127 if (operand->getType()->isPointerTy() &&
6128 operand->getType()->getPointerAddressSpace() ==
6129 clspv::AddressSpace::Global) {
6130 stack.push_back(operand);
6131 }
6132 }
6133 }
6134 }
6135
6136 // No coherent resource variables encountered.
6137 return false;
6138}
alan-baker06cad652019-12-03 17:56:47 -05006139
6140void SPIRVProducerPass::PopulateStructuredCFGMaps(Module &module) {
6141 // First, track loop merges and continues.
6142 DenseSet<BasicBlock *> LoopMergesAndContinues;
6143 for (auto &F : module) {
6144 if (F.isDeclaration())
6145 continue;
6146
6147 DominatorTree &DT = getAnalysis<DominatorTreeWrapperPass>(F).getDomTree();
6148 const LoopInfo &LI = getAnalysis<LoopInfoWrapperPass>(F).getLoopInfo();
6149 std::deque<BasicBlock *> order;
6150 DenseSet<BasicBlock *> visited;
6151 clspv::ComputeStructuredOrder(&*F.begin(), &DT, LI, &order, &visited);
6152
6153 for (auto BB : order) {
6154 auto terminator = BB->getTerminator();
6155 auto branch = dyn_cast<BranchInst>(terminator);
6156 if (LI.isLoopHeader(BB)) {
6157 auto L = LI.getLoopFor(BB);
6158 BasicBlock *ContinueBB = nullptr;
6159 BasicBlock *MergeBB = nullptr;
6160
6161 MergeBB = L->getExitBlock();
6162 if (!MergeBB) {
6163 // StructurizeCFG pass converts CFG into triangle shape and the cfg
6164 // has regions with single entry/exit. As a result, loop should not
6165 // have multiple exits.
6166 llvm_unreachable("Loop has multiple exits???");
6167 }
6168
6169 if (L->isLoopLatch(BB)) {
6170 ContinueBB = BB;
6171 } else {
6172 // From SPIR-V spec 2.11, Continue Target must dominate that back-edge
6173 // block.
6174 BasicBlock *Header = L->getHeader();
6175 BasicBlock *Latch = L->getLoopLatch();
6176 for (auto *loop_block : L->blocks()) {
6177 if (loop_block == Header) {
6178 continue;
6179 }
6180
6181 // Check whether block dominates block with back-edge.
6182 // The loop latch is the single block with a back-edge. If it was
6183 // possible, StructurizeCFG made the loop conform to this
6184 // requirement, otherwise |Latch| is a nullptr.
6185 if (DT.dominates(loop_block, Latch)) {
6186 ContinueBB = loop_block;
6187 }
6188 }
6189
6190 if (!ContinueBB) {
6191 llvm_unreachable("Wrong continue block from loop");
6192 }
6193 }
6194
6195 // Record the continue and merge blocks.
6196 MergeBlocks[BB] = MergeBB;
6197 ContinueBlocks[BB] = ContinueBB;
6198 LoopMergesAndContinues.insert(MergeBB);
6199 LoopMergesAndContinues.insert(ContinueBB);
6200 } else if (branch && branch->isConditional()) {
6201 auto L = LI.getLoopFor(BB);
6202 bool HasBackedge = false;
6203 while (L && !HasBackedge) {
6204 if (L->isLoopLatch(BB)) {
6205 HasBackedge = true;
6206 }
6207 L = L->getParentLoop();
6208 }
6209
6210 if (!HasBackedge) {
6211 // Only need a merge if the branch doesn't include a loop break or
6212 // continue.
6213 auto true_bb = branch->getSuccessor(0);
6214 auto false_bb = branch->getSuccessor(1);
6215 if (!LoopMergesAndContinues.count(true_bb) &&
6216 !LoopMergesAndContinues.count(false_bb)) {
6217 // StructurizeCFG pass already manipulated CFG. Just use false block
6218 // of branch instruction as merge block.
6219 MergeBlocks[BB] = false_bb;
6220 }
6221 }
6222 }
6223 }
6224 }
6225}