blob: 834ba77852e0839b26da5e147f9808018329ee52 [file] [log] [blame]
David Neto22f144c2017-06-12 14:26:21 -04001// Copyright 2017 The Clspv Authors. All rights reserved.
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7// http://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
15#ifdef _MSC_VER
16#pragma warning(push, 0)
17#endif
18
David Neto156783e2017-07-05 15:39:41 -040019#include <cassert>
David Neto257c3892018-04-11 13:19:45 -040020#include <cstring>
David Neto118188e2018-08-24 11:27:54 -040021#include <iomanip>
22#include <list>
David Neto862b7d82018-06-14 18:48:37 -040023#include <memory>
David Neto118188e2018-08-24 11:27:54 -040024#include <set>
25#include <sstream>
26#include <string>
27#include <tuple>
28#include <unordered_set>
29#include <utility>
David Neto862b7d82018-06-14 18:48:37 -040030
David Neto118188e2018-08-24 11:27:54 -040031#include "llvm/ADT/StringSwitch.h"
32#include "llvm/ADT/UniqueVector.h"
33#include "llvm/Analysis/LoopInfo.h"
34#include "llvm/IR/Constants.h"
35#include "llvm/IR/Dominators.h"
36#include "llvm/IR/Instructions.h"
37#include "llvm/IR/Metadata.h"
38#include "llvm/IR/Module.h"
alan-bakerf67468c2019-11-25 15:51:49 -050039#include "llvm/IR/ValueSymbolTable.h"
David Neto118188e2018-08-24 11:27:54 -040040#include "llvm/Pass.h"
41#include "llvm/Support/CommandLine.h"
42#include "llvm/Support/raw_ostream.h"
43#include "llvm/Transforms/Utils/Cloning.h"
David Neto22f144c2017-06-12 14:26:21 -040044
David Neto85082642018-03-24 06:55:20 -070045#include "spirv/1.0/spirv.hpp"
David Neto118188e2018-08-24 11:27:54 -040046
David Neto85082642018-03-24 06:55:20 -070047#include "clspv/AddressSpace.h"
alan-bakerf5e5f692018-11-27 08:33:24 -050048#include "clspv/DescriptorMap.h"
David Neto118188e2018-08-24 11:27:54 -040049#include "clspv/Option.h"
David Neto85082642018-03-24 06:55:20 -070050#include "clspv/spirv_c_strings.hpp"
51#include "clspv/spirv_glsl.hpp"
David Neto22f144c2017-06-12 14:26:21 -040052
David Neto4feb7a42017-10-06 17:29:42 -040053#include "ArgKind.h"
alan-bakerf67468c2019-11-25 15:51:49 -050054#include "Builtins.h"
alan-baker06cad652019-12-03 17:56:47 -050055#include "ComputeStructuredOrder.h"
David Neto85082642018-03-24 06:55:20 -070056#include "ConstantEmitter.h"
Alan Baker202c8c72018-08-13 13:47:44 -040057#include "Constants.h"
David Neto78383442018-06-15 20:31:56 -040058#include "DescriptorCounter.h"
alan-baker56f7aff2019-05-22 08:06:42 -040059#include "NormalizeGlobalVariable.h"
Diego Novilloa4c44fa2019-04-11 10:56:15 -040060#include "Passes.h"
alan-bakerce179f12019-12-06 19:02:22 -050061#include "Types.h"
David Neto48f56a42017-10-06 16:44:25 -040062
David Neto22f144c2017-06-12 14:26:21 -040063#if defined(_MSC_VER)
64#pragma warning(pop)
65#endif
66
67using namespace llvm;
68using namespace clspv;
David Neto156783e2017-07-05 15:39:41 -040069using namespace mdconst;
David Neto22f144c2017-06-12 14:26:21 -040070
71namespace {
David Netocd8ca5f2017-10-02 23:34:11 -040072
David Neto862b7d82018-06-14 18:48:37 -040073cl::opt<bool> ShowResourceVars("show-rv", cl::init(false), cl::Hidden,
74 cl::desc("Show resource variable creation"));
75
76// These hacks exist to help transition code generation algorithms
77// without making huge noise in detailed test output.
78const bool Hack_generate_runtime_array_stride_early = true;
79
David Neto3fbb4072017-10-16 11:28:14 -040080// The value of 1/pi. This value is from MSDN
81// https://msdn.microsoft.com/en-us/library/4hwaceh6.aspx
82const double kOneOverPi = 0.318309886183790671538;
83const glsl::ExtInst kGlslExtInstBad = static_cast<glsl::ExtInst>(0);
84
alan-bakerb6b09dc2018-11-08 16:59:28 -050085const char *kCompositeConstructFunctionPrefix = "clspv.composite_construct.";
David Netoab03f432017-11-03 17:00:44 -040086
David Neto22f144c2017-06-12 14:26:21 -040087enum SPIRVOperandType {
88 NUMBERID,
89 LITERAL_INTEGER,
90 LITERAL_STRING,
91 LITERAL_FLOAT
92};
93
94struct SPIRVOperand {
95 explicit SPIRVOperand(SPIRVOperandType Ty, uint32_t Num)
96 : Type(Ty), LiteralNum(1, Num) {}
97 explicit SPIRVOperand(SPIRVOperandType Ty, const char *Str)
98 : Type(Ty), LiteralStr(Str) {}
99 explicit SPIRVOperand(SPIRVOperandType Ty, StringRef Str)
100 : Type(Ty), LiteralStr(Str) {}
101 explicit SPIRVOperand(SPIRVOperandType Ty, ArrayRef<uint32_t> NumVec)
102 : Type(Ty), LiteralNum(NumVec.begin(), NumVec.end()) {}
103
James Price11010dc2019-12-19 13:53:09 -0500104 SPIRVOperandType getType() const { return Type; };
105 uint32_t getNumID() const { return LiteralNum[0]; };
106 std::string getLiteralStr() const { return LiteralStr; };
107 ArrayRef<uint32_t> getLiteralNum() const { return LiteralNum; };
David Neto22f144c2017-06-12 14:26:21 -0400108
David Neto87846742018-04-11 17:36:22 -0400109 uint32_t GetNumWords() const {
110 switch (Type) {
111 case NUMBERID:
112 return 1;
113 case LITERAL_INTEGER:
114 case LITERAL_FLOAT:
David Netoee2660d2018-06-28 16:31:29 -0400115 return uint32_t(LiteralNum.size());
David Neto87846742018-04-11 17:36:22 -0400116 case LITERAL_STRING:
117 // Account for the terminating null character.
David Netoee2660d2018-06-28 16:31:29 -0400118 return uint32_t((LiteralStr.size() + 4) / 4);
David Neto87846742018-04-11 17:36:22 -0400119 }
120 llvm_unreachable("Unhandled case in SPIRVOperand::GetNumWords()");
121 }
122
David Neto22f144c2017-06-12 14:26:21 -0400123private:
124 SPIRVOperandType Type;
125 std::string LiteralStr;
126 SmallVector<uint32_t, 4> LiteralNum;
127};
128
David Netoc6f3ab22018-04-06 18:02:31 -0400129class SPIRVOperandList {
130public:
David Netoef5ba2b2019-12-20 08:35:54 -0500131 typedef std::unique_ptr<SPIRVOperand> element_type;
132 typedef SmallVector<element_type, 8> container_type;
133 typedef container_type::iterator iterator;
David Netoc6f3ab22018-04-06 18:02:31 -0400134 SPIRVOperandList() {}
alan-bakerb6b09dc2018-11-08 16:59:28 -0500135 SPIRVOperandList(const SPIRVOperandList &other) = delete;
136 SPIRVOperandList(SPIRVOperandList &&other) {
David Netoc6f3ab22018-04-06 18:02:31 -0400137 contents_ = std::move(other.contents_);
138 other.contents_.clear();
139 }
David Netoef5ba2b2019-12-20 08:35:54 -0500140 iterator begin() { return contents_.begin(); }
141 iterator end() { return contents_.end(); }
142 operator ArrayRef<element_type>() { return contents_; }
143 void push_back(element_type op) { contents_.push_back(std::move(op)); }
alan-bakerb6b09dc2018-11-08 16:59:28 -0500144 void clear() { contents_.clear(); }
David Netoc6f3ab22018-04-06 18:02:31 -0400145 size_t size() const { return contents_.size(); }
James Price11010dc2019-12-19 13:53:09 -0500146 const SPIRVOperand *operator[](size_t i) { return contents_[i].get(); }
David Netoc6f3ab22018-04-06 18:02:31 -0400147
David Netoef5ba2b2019-12-20 08:35:54 -0500148 const container_type &getOperands() const { return contents_; }
David Neto87846742018-04-11 17:36:22 -0400149
David Netoc6f3ab22018-04-06 18:02:31 -0400150private:
David Netoef5ba2b2019-12-20 08:35:54 -0500151 container_type contents_;
David Netoc6f3ab22018-04-06 18:02:31 -0400152};
153
James Price11010dc2019-12-19 13:53:09 -0500154SPIRVOperandList &operator<<(SPIRVOperandList &list,
David Netoef5ba2b2019-12-20 08:35:54 -0500155 std::unique_ptr<SPIRVOperand> elem) {
156 list.push_back(std::move(elem));
David Netoc6f3ab22018-04-06 18:02:31 -0400157 return list;
158}
159
David Netoef5ba2b2019-12-20 08:35:54 -0500160std::unique_ptr<SPIRVOperand> MkNum(uint32_t num) {
161 return std::make_unique<SPIRVOperand>(LITERAL_INTEGER, num);
David Netoc6f3ab22018-04-06 18:02:31 -0400162}
David Netoef5ba2b2019-12-20 08:35:54 -0500163std::unique_ptr<SPIRVOperand> MkInteger(ArrayRef<uint32_t> num_vec) {
164 return std::make_unique<SPIRVOperand>(LITERAL_INTEGER, num_vec);
David Neto257c3892018-04-11 13:19:45 -0400165}
David Netoef5ba2b2019-12-20 08:35:54 -0500166std::unique_ptr<SPIRVOperand> MkFloat(ArrayRef<uint32_t> num_vec) {
167 return std::make_unique<SPIRVOperand>(LITERAL_FLOAT, num_vec);
David Neto257c3892018-04-11 13:19:45 -0400168}
David Netoef5ba2b2019-12-20 08:35:54 -0500169std::unique_ptr<SPIRVOperand> MkId(uint32_t id) {
170 return std::make_unique<SPIRVOperand>(NUMBERID, id);
James Price11010dc2019-12-19 13:53:09 -0500171}
David Netoef5ba2b2019-12-20 08:35:54 -0500172std::unique_ptr<SPIRVOperand> MkString(StringRef str) {
173 return std::make_unique<SPIRVOperand>(LITERAL_STRING, str);
David Neto257c3892018-04-11 13:19:45 -0400174}
David Netoc6f3ab22018-04-06 18:02:31 -0400175
David Neto22f144c2017-06-12 14:26:21 -0400176struct SPIRVInstruction {
David Netoef5ba2b2019-12-20 08:35:54 -0500177 // Creates an instruction with an opcode and no result ID, and with the given
178 // operands. This computes its own word count. Takes ownership of the
179 // operands and clears |Ops|.
180 SPIRVInstruction(spv::Op Opc, SPIRVOperandList &Ops)
181 : WordCount(1), Opcode(static_cast<uint16_t>(Opc)), ResultID(0) {
James Price11010dc2019-12-19 13:53:09 -0500182 for (auto &operand : Ops) {
David Netoee2660d2018-06-28 16:31:29 -0400183 WordCount += uint16_t(operand->GetNumWords());
David Neto87846742018-04-11 17:36:22 -0400184 }
David Netoef5ba2b2019-12-20 08:35:54 -0500185 Operands.reserve(Ops.size());
186 for (auto &ptr : Ops) {
187 Operands.emplace_back(std::move(ptr));
188 ptr.reset(nullptr);
David Neto87846742018-04-11 17:36:22 -0400189 }
David Netoef5ba2b2019-12-20 08:35:54 -0500190 Ops.clear();
191 }
192 // Creates an instruction with an opcode and a no-zero result ID, and
193 // with the given operands. This computes its own word count. Takes ownership
194 // of the operands and clears |Ops|.
195 SPIRVInstruction(spv::Op Opc, uint32_t ResID, SPIRVOperandList &Ops)
196 : WordCount(2), Opcode(static_cast<uint16_t>(Opc)), ResultID(ResID) {
James Price11010dc2019-12-19 13:53:09 -0500197 for (auto &operand : Ops) {
David Neto87846742018-04-11 17:36:22 -0400198 WordCount += operand->GetNumWords();
199 }
David Netoef5ba2b2019-12-20 08:35:54 -0500200 Operands.reserve(Ops.size());
201 for (auto &ptr : Ops) {
202 Operands.emplace_back(std::move(ptr));
203 ptr.reset(nullptr);
204 }
205 if (ResID == 0) {
206 llvm_unreachable("Result ID of 0 was provided");
207 }
208 Ops.clear();
David Neto87846742018-04-11 17:36:22 -0400209 }
David Neto22f144c2017-06-12 14:26:21 -0400210
David Netoef5ba2b2019-12-20 08:35:54 -0500211 // Creates an instruction with an opcode and no result ID, and with the single
212 // operand. This computes its own word count.
213 SPIRVInstruction(spv::Op Opc, SPIRVOperandList::element_type operand)
214 : WordCount(1), Opcode(static_cast<uint16_t>(Opc)), ResultID(0) {
215 WordCount += operand->GetNumWords();
216 Operands.emplace_back(std::move(operand));
217 operand.reset(nullptr);
218 }
219 // Creates an instruction with an opcode and a non-zero result ID, and
220 // with the single operand. This computes its own word count.
221 SPIRVInstruction(spv::Op Opc, uint32_t ResID,
222 SPIRVOperandList::element_type operand)
223 : WordCount(2), Opcode(static_cast<uint16_t>(Opc)), ResultID(ResID) {
224 WordCount += operand->GetNumWords();
225 if (ResID == 0) {
226 llvm_unreachable("Result ID of 0 was provided");
227 }
228 Operands.emplace_back(std::move(operand));
229 operand.reset(nullptr);
230 }
231 // Creates an instruction with an opcode and a no-zero result ID, and no
232 // operands.
233 SPIRVInstruction(spv::Op Opc, uint32_t ResID)
234 : WordCount(2), Opcode(static_cast<uint16_t>(Opc)), ResultID(ResID) {
235 if (ResID == 0) {
236 llvm_unreachable("Result ID of 0 was provided");
237 }
238 }
239 // Creates an instruction with an opcode, no result ID, no type ID, and no
240 // operands.
241 SPIRVInstruction(spv::Op Opc)
242 : WordCount(1), Opcode(static_cast<uint16_t>(Opc)), ResultID(0) {}
243
David Netoee2660d2018-06-28 16:31:29 -0400244 uint32_t getWordCount() const { return WordCount; }
David Neto22f144c2017-06-12 14:26:21 -0400245 uint16_t getOpcode() const { return Opcode; }
246 uint32_t getResultID() const { return ResultID; }
David Netoef5ba2b2019-12-20 08:35:54 -0500247 ArrayRef<std::unique_ptr<SPIRVOperand>> getOperands() const {
James Price11010dc2019-12-19 13:53:09 -0500248 return Operands;
249 }
David Neto22f144c2017-06-12 14:26:21 -0400250
251private:
David Netoee2660d2018-06-28 16:31:29 -0400252 uint32_t WordCount; // Check the 16-bit bound at code generation time.
David Neto22f144c2017-06-12 14:26:21 -0400253 uint16_t Opcode;
254 uint32_t ResultID;
David Netoef5ba2b2019-12-20 08:35:54 -0500255 SmallVector<std::unique_ptr<SPIRVOperand>, 4> Operands;
David Neto22f144c2017-06-12 14:26:21 -0400256};
257
258struct SPIRVProducerPass final : public ModulePass {
David Neto22f144c2017-06-12 14:26:21 -0400259 typedef DenseMap<Type *, uint32_t> TypeMapType;
260 typedef UniqueVector<Type *> TypeList;
261 typedef DenseMap<Value *, uint32_t> ValueMapType;
David Netofb9a7972017-08-25 17:08:24 -0400262 typedef UniqueVector<Value *> ValueList;
David Neto22f144c2017-06-12 14:26:21 -0400263 typedef std::vector<std::pair<Value *, uint32_t>> EntryPointVecType;
264 typedef std::list<SPIRVInstruction *> SPIRVInstructionList;
David Neto87846742018-04-11 17:36:22 -0400265 // A vector of tuples, each of which is:
266 // - the LLVM instruction that we will later generate SPIR-V code for
267 // - where the SPIR-V instruction should be inserted
268 // - the result ID of the SPIR-V instruction
David Neto22f144c2017-06-12 14:26:21 -0400269 typedef std::vector<
270 std::tuple<Value *, SPIRVInstructionList::iterator, uint32_t>>
271 DeferredInstVecType;
272 typedef DenseMap<FunctionType *, std::pair<FunctionType *, uint32_t>>
273 GlobalConstFuncMapType;
274
David Neto44795152017-07-13 15:45:28 -0400275 explicit SPIRVProducerPass(
alan-bakerf5e5f692018-11-27 08:33:24 -0500276 raw_pwrite_stream &out,
277 std::vector<clspv::version0::DescriptorMapEntry> *descriptor_map_entries,
alan-baker00e7a582019-06-07 12:54:21 -0400278 ArrayRef<std::pair<unsigned, std::string>> samplerMap,
David Neto44795152017-07-13 15:45:28 -0400279 bool outputCInitList)
David Netoc2c368d2017-06-30 16:50:17 -0400280 : ModulePass(ID), samplerMap(samplerMap), out(out),
David Neto0676e6f2017-07-11 18:47:44 -0400281 binaryTempOut(binaryTempUnderlyingVector), binaryOut(&out),
alan-baker00e7a582019-06-07 12:54:21 -0400282 descriptorMapEntries(descriptor_map_entries),
David Neto0676e6f2017-07-11 18:47:44 -0400283 outputCInitList(outputCInitList), patchBoundOffset(0), nextID(1),
alan-baker5b86ed72019-02-15 08:26:50 -0500284 OpExtInstImportID(0), HasVariablePointersStorageBuffer(false),
285 HasVariablePointers(false), SamplerTy(nullptr), WorkgroupSizeValueID(0),
Kévin Petit89a525c2019-06-15 08:13:07 +0100286 WorkgroupSizeVarID(0), max_local_spec_id_(0) {}
David Neto22f144c2017-06-12 14:26:21 -0400287
James Price11010dc2019-12-19 13:53:09 -0500288 virtual ~SPIRVProducerPass() {
289 for (auto *Inst : SPIRVInsts) {
290 delete Inst;
291 }
292 }
293
David Neto22f144c2017-06-12 14:26:21 -0400294 void getAnalysisUsage(AnalysisUsage &AU) const override {
295 AU.addRequired<DominatorTreeWrapperPass>();
296 AU.addRequired<LoopInfoWrapperPass>();
297 }
298
299 virtual bool runOnModule(Module &module) override;
300
301 // output the SPIR-V header block
302 void outputHeader();
303
304 // patch the SPIR-V header block
305 void patchHeader();
306
307 uint32_t lookupType(Type *Ty) {
308 if (Ty->isPointerTy() &&
309 (Ty->getPointerAddressSpace() != AddressSpace::UniformConstant)) {
310 auto PointeeTy = Ty->getPointerElementType();
311 if (PointeeTy->isStructTy() &&
312 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
313 Ty = PointeeTy;
314 }
315 }
316
David Neto862b7d82018-06-14 18:48:37 -0400317 auto where = TypeMap.find(Ty);
318 if (where == TypeMap.end()) {
319 if (Ty) {
320 errs() << "Unhandled type " << *Ty << "\n";
321 } else {
322 errs() << "Unhandled type (null)\n";
323 }
David Netoe439d702018-03-23 13:14:08 -0700324 llvm_unreachable("\nUnhandled type!");
David Neto22f144c2017-06-12 14:26:21 -0400325 }
326
David Neto862b7d82018-06-14 18:48:37 -0400327 return where->second;
David Neto22f144c2017-06-12 14:26:21 -0400328 }
329 TypeMapType &getImageTypeMap() { return ImageTypeMap; }
alan-bakerabd82722019-12-03 17:14:51 -0500330 TypeList &getImageTypeList() { return ImageTypeList; }
David Neto22f144c2017-06-12 14:26:21 -0400331 TypeList &getTypeList() { return Types; };
332 ValueList &getConstantList() { return Constants; };
333 ValueMapType &getValueMap() { return ValueMap; }
334 ValueMapType &getAllocatedValueMap() { return AllocatedValueMap; }
335 SPIRVInstructionList &getSPIRVInstList() { return SPIRVInsts; };
David Neto22f144c2017-06-12 14:26:21 -0400336 EntryPointVecType &getEntryPointVec() { return EntryPointVec; };
337 DeferredInstVecType &getDeferredInstVec() { return DeferredInstVec; };
338 ValueList &getEntryPointInterfacesVec() { return EntryPointInterfacesVec; };
339 uint32_t &getOpExtInstImportID() { return OpExtInstImportID; };
340 std::vector<uint32_t> &getBuiltinDimVec() { return BuiltinDimensionVec; };
alan-baker5b86ed72019-02-15 08:26:50 -0500341 bool hasVariablePointersStorageBuffer() {
342 return HasVariablePointersStorageBuffer;
343 }
344 void setVariablePointersStorageBuffer(bool Val) {
345 HasVariablePointersStorageBuffer = Val;
346 }
Diego Novillo3cc8d7a2019-04-10 13:30:34 -0400347 bool hasVariablePointers() { return HasVariablePointers; };
David Neto22f144c2017-06-12 14:26:21 -0400348 void setVariablePointers(bool Val) { HasVariablePointers = Val; };
alan-bakerb6b09dc2018-11-08 16:59:28 -0500349 ArrayRef<std::pair<unsigned, std::string>> &getSamplerMap() {
350 return samplerMap;
351 }
David Neto22f144c2017-06-12 14:26:21 -0400352 GlobalConstFuncMapType &getGlobalConstFuncTypeMap() {
353 return GlobalConstFuncTypeMap;
354 }
355 SmallPtrSet<Value *, 16> &getGlobalConstArgSet() {
356 return GlobalConstArgumentSet;
357 }
alan-bakerb6b09dc2018-11-08 16:59:28 -0500358 TypeList &getTypesNeedingArrayStride() { return TypesNeedingArrayStride; }
David Neto22f144c2017-06-12 14:26:21 -0400359
David Netoc6f3ab22018-04-06 18:02:31 -0400360 void GenerateLLVMIRInfo(Module &M, const DataLayout &DL);
alan-bakerb6b09dc2018-11-08 16:59:28 -0500361 // Populate GlobalConstFuncTypeMap. Also, if module-scope __constant will
362 // *not* be converted to a storage buffer, replace each such global variable
363 // with one in the storage class expecgted by SPIR-V.
David Neto862b7d82018-06-14 18:48:37 -0400364 void FindGlobalConstVars(Module &M, const DataLayout &DL);
365 // Populate ResourceVarInfoList, FunctionToResourceVarsMap, and
366 // ModuleOrderedResourceVars.
367 void FindResourceVars(Module &M, const DataLayout &DL);
Alan Baker202c8c72018-08-13 13:47:44 -0400368 void FindWorkgroupVars(Module &M);
David Neto22f144c2017-06-12 14:26:21 -0400369 bool FindExtInst(Module &M);
370 void FindTypePerGlobalVar(GlobalVariable &GV);
371 void FindTypePerFunc(Function &F);
David Neto862b7d82018-06-14 18:48:37 -0400372 void FindTypesForSamplerMap(Module &M);
373 void FindTypesForResourceVars(Module &M);
alan-bakerb6b09dc2018-11-08 16:59:28 -0500374 // Inserts |Ty| and relevant sub-types into the |Types| member, indicating
375 // that |Ty| and its subtypes will need a corresponding SPIR-V type.
David Neto22f144c2017-06-12 14:26:21 -0400376 void FindType(Type *Ty);
377 void FindConstantPerGlobalVar(GlobalVariable &GV);
378 void FindConstantPerFunc(Function &F);
379 void FindConstant(Value *V);
380 void GenerateExtInstImport();
David Neto19a1bad2017-08-25 15:01:41 -0400381 // Generates instructions for SPIR-V types corresponding to the LLVM types
382 // saved in the |Types| member. A type follows its subtypes. IDs are
383 // allocated sequentially starting with the current value of nextID, and
384 // with a type following its subtypes. Also updates nextID to just beyond
385 // the last generated ID.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500386 void GenerateSPIRVTypes(LLVMContext &context, Module &module);
David Neto22f144c2017-06-12 14:26:21 -0400387 void GenerateSPIRVConstants();
David Neto5c22a252018-03-15 16:07:41 -0400388 void GenerateModuleInfo(Module &M);
David Neto22f144c2017-06-12 14:26:21 -0400389 void GenerateGlobalVar(GlobalVariable &GV);
David Netoc6f3ab22018-04-06 18:02:31 -0400390 void GenerateWorkgroupVars();
David Neto862b7d82018-06-14 18:48:37 -0400391 // Generate descriptor map entries for resource variables associated with
392 // arguments to F.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500393 void GenerateDescriptorMapInfo(const DataLayout &DL, Function &F);
David Neto22f144c2017-06-12 14:26:21 -0400394 void GenerateSamplers(Module &M);
David Neto862b7d82018-06-14 18:48:37 -0400395 // Generate OpVariables for %clspv.resource.var.* calls.
396 void GenerateResourceVars(Module &M);
David Neto22f144c2017-06-12 14:26:21 -0400397 void GenerateFuncPrologue(Function &F);
398 void GenerateFuncBody(Function &F);
David Netob6e2e062018-04-25 10:32:06 -0400399 void GenerateEntryPointInitialStores();
David Neto22f144c2017-06-12 14:26:21 -0400400 spv::Op GetSPIRVCmpOpcode(CmpInst *CmpI);
401 spv::Op GetSPIRVCastOpcode(Instruction &I);
402 spv::Op GetSPIRVBinaryOpcode(Instruction &I);
403 void GenerateInstruction(Instruction &I);
404 void GenerateFuncEpilogue();
405 void HandleDeferredInstruction();
alan-bakerb6b09dc2018-11-08 16:59:28 -0500406 void HandleDeferredDecorations(const DataLayout &DL);
David Neto22f144c2017-06-12 14:26:21 -0400407 bool is4xi8vec(Type *Ty) const;
408 spv::StorageClass GetStorageClass(unsigned AddrSpace) const;
David Neto862b7d82018-06-14 18:48:37 -0400409 spv::StorageClass GetStorageClassForArgKind(clspv::ArgKind arg_kind) const;
David Neto22f144c2017-06-12 14:26:21 -0400410 spv::BuiltIn GetBuiltin(StringRef globalVarName) const;
David Neto3fbb4072017-10-16 11:28:14 -0400411 // Returns the GLSL extended instruction enum that the given function
412 // call maps to. If none, then returns the 0 value, i.e. GLSLstd4580Bad.
David Neto22f144c2017-06-12 14:26:21 -0400413 glsl::ExtInst getExtInstEnum(StringRef Name);
David Neto3fbb4072017-10-16 11:28:14 -0400414 // Returns the GLSL extended instruction enum indirectly used by the given
415 // function. That is, to implement the given function, we use an extended
416 // instruction plus one more instruction. If none, then returns the 0 value,
417 // i.e. GLSLstd4580Bad.
418 glsl::ExtInst getIndirectExtInstEnum(StringRef Name);
419 // Returns the single GLSL extended instruction used directly or
420 // indirectly by the given function call.
421 glsl::ExtInst getDirectOrIndirectExtInstEnum(StringRef Name);
David Neto22f144c2017-06-12 14:26:21 -0400422 void WriteOneWord(uint32_t Word);
423 void WriteResultID(SPIRVInstruction *Inst);
424 void WriteWordCountAndOpcode(SPIRVInstruction *Inst);
David Netoef5ba2b2019-12-20 08:35:54 -0500425 void WriteOperand(const std::unique_ptr<SPIRVOperand> &Op);
David Neto22f144c2017-06-12 14:26:21 -0400426 void WriteSPIRVBinary();
427
Alan Baker9bf93fb2018-08-28 16:59:26 -0400428 // Returns true if |type| is compatible with OpConstantNull.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500429 bool IsTypeNullable(const Type *type) const;
Alan Baker9bf93fb2018-08-28 16:59:26 -0400430
Alan Bakerfcda9482018-10-02 17:09:59 -0400431 // Populate UBO remapped type maps.
432 void PopulateUBOTypeMaps(Module &module);
433
alan-baker06cad652019-12-03 17:56:47 -0500434 // Populate the merge and continue block maps.
435 void PopulateStructuredCFGMaps(Module &module);
436
Alan Bakerfcda9482018-10-02 17:09:59 -0400437 // Wrapped methods of DataLayout accessors. If |type| was remapped for UBOs,
438 // uses the internal map, otherwise it falls back on the data layout.
439 uint64_t GetTypeSizeInBits(Type *type, const DataLayout &DL);
440 uint64_t GetTypeStoreSize(Type *type, const DataLayout &DL);
441 uint64_t GetTypeAllocSize(Type *type, const DataLayout &DL);
442
alan-baker5b86ed72019-02-15 08:26:50 -0500443 // Returns the base pointer of |v|.
444 Value *GetBasePointer(Value *v);
445
446 // Sets |HasVariablePointersStorageBuffer| or |HasVariablePointers| base on
447 // |address_space|.
448 void setVariablePointersCapabilities(unsigned address_space);
449
450 // Returns true if |lhs| and |rhs| represent the same resource or workgroup
451 // variable.
452 bool sameResource(Value *lhs, Value *rhs) const;
453
454 // Returns true if |inst| is phi or select that selects from the same
455 // structure (or null).
456 bool selectFromSameObject(Instruction *inst);
457
alan-bakere9308012019-03-15 10:25:13 -0400458 // Returns true if |Arg| is called with a coherent resource.
459 bool CalledWithCoherentResource(Argument &Arg);
460
David Neto22f144c2017-06-12 14:26:21 -0400461private:
462 static char ID;
David Neto44795152017-07-13 15:45:28 -0400463 ArrayRef<std::pair<unsigned, std::string>> samplerMap;
David Neto22f144c2017-06-12 14:26:21 -0400464 raw_pwrite_stream &out;
David Neto0676e6f2017-07-11 18:47:44 -0400465
466 // TODO(dneto): Wouldn't it be better to always just emit a binary, and then
467 // convert to other formats on demand?
468
469 // When emitting a C initialization list, the WriteSPIRVBinary method
470 // will actually write its words to this vector via binaryTempOut.
471 SmallVector<char, 100> binaryTempUnderlyingVector;
472 raw_svector_ostream binaryTempOut;
473
474 // Binary output writes to this stream, which might be |out| or
475 // |binaryTempOut|. It's the latter when we really want to write a C
476 // initializer list.
Diego Novillo3cc8d7a2019-04-10 13:30:34 -0400477 raw_pwrite_stream *binaryOut;
alan-bakerf5e5f692018-11-27 08:33:24 -0500478 std::vector<version0::DescriptorMapEntry> *descriptorMapEntries;
David Neto0676e6f2017-07-11 18:47:44 -0400479 const bool outputCInitList; // If true, output look like {0x7023, ... , 5}
David Neto22f144c2017-06-12 14:26:21 -0400480 uint64_t patchBoundOffset;
481 uint32_t nextID;
482
alan-bakerf67468c2019-11-25 15:51:49 -0500483 // ID for OpTypeInt 32 1.
484 uint32_t int32ID = 0;
485 // ID for OpTypeVector %int 4.
486 uint32_t v4int32ID = 0;
487
David Neto19a1bad2017-08-25 15:01:41 -0400488 // Maps an LLVM Value pointer to the corresponding SPIR-V Id.
David Neto22f144c2017-06-12 14:26:21 -0400489 TypeMapType TypeMap;
David Neto19a1bad2017-08-25 15:01:41 -0400490 // Maps an LLVM image type to its SPIR-V ID.
David Neto22f144c2017-06-12 14:26:21 -0400491 TypeMapType ImageTypeMap;
alan-bakerabd82722019-12-03 17:14:51 -0500492 // A unique-vector of LLVM image types. This list is used to provide
493 // deterministic traversal of image types.
494 TypeList ImageTypeList;
David Neto19a1bad2017-08-25 15:01:41 -0400495 // A unique-vector of LLVM types that map to a SPIR-V type.
David Neto22f144c2017-06-12 14:26:21 -0400496 TypeList Types;
497 ValueList Constants;
David Neto19a1bad2017-08-25 15:01:41 -0400498 // Maps an LLVM Value pointer to the corresponding SPIR-V Id.
David Neto22f144c2017-06-12 14:26:21 -0400499 ValueMapType ValueMap;
500 ValueMapType AllocatedValueMap;
501 SPIRVInstructionList SPIRVInsts;
David Neto862b7d82018-06-14 18:48:37 -0400502
David Neto22f144c2017-06-12 14:26:21 -0400503 EntryPointVecType EntryPointVec;
504 DeferredInstVecType DeferredInstVec;
505 ValueList EntryPointInterfacesVec;
506 uint32_t OpExtInstImportID;
507 std::vector<uint32_t> BuiltinDimensionVec;
alan-baker5b86ed72019-02-15 08:26:50 -0500508 bool HasVariablePointersStorageBuffer;
David Neto22f144c2017-06-12 14:26:21 -0400509 bool HasVariablePointers;
510 Type *SamplerTy;
alan-baker09cb9802019-12-10 13:16:27 -0500511 DenseMap<unsigned, unsigned> SamplerLiteralToIDMap;
David Netoc77d9e22018-03-24 06:30:28 -0700512
513 // If a function F has a pointer-to-__constant parameter, then this variable
David Neto9ed8e2f2018-03-24 06:47:24 -0700514 // will map F's type to (G, index of the parameter), where in a first phase
515 // G is F's type. During FindTypePerFunc, G will be changed to F's type
516 // but replacing the pointer-to-constant parameter with
517 // pointer-to-ModuleScopePrivate.
David Netoc77d9e22018-03-24 06:30:28 -0700518 // TODO(dneto): This doesn't seem general enough? A function might have
519 // more than one such parameter.
David Neto22f144c2017-06-12 14:26:21 -0400520 GlobalConstFuncMapType GlobalConstFuncTypeMap;
521 SmallPtrSet<Value *, 16> GlobalConstArgumentSet;
David Neto1a1a0582017-07-07 12:01:44 -0400522 // An ordered set of pointer types of Base arguments to OpPtrAccessChain,
David Neto85082642018-03-24 06:55:20 -0700523 // or array types, and which point into transparent memory (StorageBuffer
524 // storage class). These will require an ArrayStride decoration.
David Neto1a1a0582017-07-07 12:01:44 -0400525 // See SPV_KHR_variable_pointers rev 13.
David Neto85082642018-03-24 06:55:20 -0700526 TypeList TypesNeedingArrayStride;
David Netoa60b00b2017-09-15 16:34:09 -0400527
528 // This is truly ugly, but works around what look like driver bugs.
529 // For get_local_size, an earlier part of the flow has created a module-scope
530 // variable in Private address space to hold the value for the workgroup
531 // size. Its intializer is a uint3 value marked as builtin WorkgroupSize.
532 // When this is present, save the IDs of the initializer value and variable
533 // in these two variables. We only ever do a vector load from it, and
534 // when we see one of those, substitute just the value of the intializer.
535 // This mimics what Glslang does, and that's what drivers are used to.
David Neto66cfe642018-03-24 06:13:56 -0700536 // TODO(dneto): Remove this once drivers are fixed.
David Netoa60b00b2017-09-15 16:34:09 -0400537 uint32_t WorkgroupSizeValueID;
538 uint32_t WorkgroupSizeVarID;
David Neto26aaf622017-10-23 18:11:53 -0400539
David Neto862b7d82018-06-14 18:48:37 -0400540 // Bookkeeping for mapping kernel arguments to resource variables.
541 struct ResourceVarInfo {
542 ResourceVarInfo(int index_arg, unsigned set_arg, unsigned binding_arg,
alan-bakere9308012019-03-15 10:25:13 -0400543 Function *fn, clspv::ArgKind arg_kind_arg, int coherent_arg)
David Neto862b7d82018-06-14 18:48:37 -0400544 : index(index_arg), descriptor_set(set_arg), binding(binding_arg),
alan-bakere9308012019-03-15 10:25:13 -0400545 var_fn(fn), arg_kind(arg_kind_arg), coherent(coherent_arg),
David Neto862b7d82018-06-14 18:48:37 -0400546 addr_space(fn->getReturnType()->getPointerAddressSpace()) {}
547 const int index; // Index into ResourceVarInfoList
548 const unsigned descriptor_set;
549 const unsigned binding;
550 Function *const var_fn; // The @clspv.resource.var.* function.
551 const clspv::ArgKind arg_kind;
alan-bakere9308012019-03-15 10:25:13 -0400552 const int coherent;
David Neto862b7d82018-06-14 18:48:37 -0400553 const unsigned addr_space; // The LLVM address space
554 // The SPIR-V ID of the OpVariable. Not populated at construction time.
555 uint32_t var_id = 0;
556 };
557 // A list of resource var info. Each one correponds to a module-scope
558 // resource variable we will have to create. Resource var indices are
559 // indices into this vector.
560 SmallVector<std::unique_ptr<ResourceVarInfo>, 8> ResourceVarInfoList;
561 // This is a vector of pointers of all the resource vars, but ordered by
562 // kernel function, and then by argument.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500563 UniqueVector<ResourceVarInfo *> ModuleOrderedResourceVars;
David Neto862b7d82018-06-14 18:48:37 -0400564 // Map a function to the ordered list of resource variables it uses, one for
565 // each argument. If an argument does not use a resource variable, it
566 // will have a null pointer entry.
567 using FunctionToResourceVarsMapType =
568 DenseMap<Function *, SmallVector<ResourceVarInfo *, 8>>;
569 FunctionToResourceVarsMapType FunctionToResourceVarsMap;
570
571 // What LLVM types map to SPIR-V types needing layout? These are the
572 // arrays and structures supporting storage buffers and uniform buffers.
573 TypeList TypesNeedingLayout;
574 // What LLVM struct types map to a SPIR-V struct type with Block decoration?
575 UniqueVector<StructType *> StructTypesNeedingBlock;
576 // For a call that represents a load from an opaque type (samplers, images),
577 // map it to the variable id it should load from.
578 DenseMap<CallInst *, uint32_t> ResourceVarDeferredLoadCalls;
David Neto85082642018-03-24 06:55:20 -0700579
Alan Baker202c8c72018-08-13 13:47:44 -0400580 // One larger than the maximum used SpecId for pointer-to-local arguments.
581 int max_local_spec_id_;
David Netoc6f3ab22018-04-06 18:02:31 -0400582 // An ordered list of the kernel arguments of type pointer-to-local.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500583 using LocalArgList = SmallVector<Argument *, 8>;
David Netoc6f3ab22018-04-06 18:02:31 -0400584 LocalArgList LocalArgs;
585 // Information about a pointer-to-local argument.
586 struct LocalArgInfo {
587 // The SPIR-V ID of the array variable.
588 uint32_t variable_id;
589 // The element type of the
alan-bakerb6b09dc2018-11-08 16:59:28 -0500590 Type *elem_type;
David Netoc6f3ab22018-04-06 18:02:31 -0400591 // The ID of the array type.
592 uint32_t array_size_id;
593 // The ID of the array type.
594 uint32_t array_type_id;
595 // The ID of the pointer to the array type.
596 uint32_t ptr_array_type_id;
David Netoc6f3ab22018-04-06 18:02:31 -0400597 // The specialization constant ID of the array size.
598 int spec_id;
599 };
Alan Baker202c8c72018-08-13 13:47:44 -0400600 // A mapping from Argument to its assigned SpecId.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500601 DenseMap<const Argument *, int> LocalArgSpecIds;
Alan Baker202c8c72018-08-13 13:47:44 -0400602 // A mapping from SpecId to its LocalArgInfo.
603 DenseMap<int, LocalArgInfo> LocalSpecIdInfoMap;
Alan Bakerfcda9482018-10-02 17:09:59 -0400604 // A mapping from a remapped type to its real offsets.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500605 DenseMap<Type *, std::vector<uint32_t>> RemappedUBOTypeOffsets;
Alan Bakerfcda9482018-10-02 17:09:59 -0400606 // A mapping from a remapped type to its real sizes.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500607 DenseMap<Type *, std::tuple<uint64_t, uint64_t, uint64_t>>
608 RemappedUBOTypeSizes;
alan-baker06cad652019-12-03 17:56:47 -0500609
610 // Maps basic block to its merge block.
611 DenseMap<BasicBlock *, BasicBlock *> MergeBlocks;
612 // Maps basic block to its continue block.
613 DenseMap<BasicBlock *, BasicBlock *> ContinueBlocks;
David Neto22f144c2017-06-12 14:26:21 -0400614};
615
616char SPIRVProducerPass::ID;
David Netoc6f3ab22018-04-06 18:02:31 -0400617
alan-bakerb6b09dc2018-11-08 16:59:28 -0500618} // namespace
David Neto22f144c2017-06-12 14:26:21 -0400619
620namespace clspv {
alan-bakerf5e5f692018-11-27 08:33:24 -0500621ModulePass *createSPIRVProducerPass(
622 raw_pwrite_stream &out,
623 std::vector<version0::DescriptorMapEntry> *descriptor_map_entries,
alan-baker00e7a582019-06-07 12:54:21 -0400624 ArrayRef<std::pair<unsigned, std::string>> samplerMap,
alan-bakerf5e5f692018-11-27 08:33:24 -0500625 bool outputCInitList) {
626 return new SPIRVProducerPass(out, descriptor_map_entries, samplerMap,
alan-baker00e7a582019-06-07 12:54:21 -0400627 outputCInitList);
David Neto22f144c2017-06-12 14:26:21 -0400628}
David Netoc2c368d2017-06-30 16:50:17 -0400629} // namespace clspv
David Neto22f144c2017-06-12 14:26:21 -0400630
631bool SPIRVProducerPass::runOnModule(Module &module) {
David Neto0676e6f2017-07-11 18:47:44 -0400632 binaryOut = outputCInitList ? &binaryTempOut : &out;
633
Alan Bakerfcda9482018-10-02 17:09:59 -0400634 PopulateUBOTypeMaps(module);
alan-baker06cad652019-12-03 17:56:47 -0500635 PopulateStructuredCFGMaps(module);
Alan Bakerfcda9482018-10-02 17:09:59 -0400636
David Neto22f144c2017-06-12 14:26:21 -0400637 // SPIR-V always begins with its header information
638 outputHeader();
639
David Netoc6f3ab22018-04-06 18:02:31 -0400640 const DataLayout &DL = module.getDataLayout();
641
David Neto22f144c2017-06-12 14:26:21 -0400642 // Gather information from the LLVM IR that we require.
David Netoc6f3ab22018-04-06 18:02:31 -0400643 GenerateLLVMIRInfo(module, DL);
David Neto22f144c2017-06-12 14:26:21 -0400644
David Neto22f144c2017-06-12 14:26:21 -0400645 // Collect information on global variables too.
646 for (GlobalVariable &GV : module.globals()) {
647 // If the GV is one of our special __spirv_* variables, remove the
648 // initializer as it was only placed there to force LLVM to not throw the
649 // value away.
650 if (GV.getName().startswith("__spirv_")) {
651 GV.setInitializer(nullptr);
652 }
653
654 // Collect types' information from global variable.
655 FindTypePerGlobalVar(GV);
656
657 // Collect constant information from global variable.
658 FindConstantPerGlobalVar(GV);
659
660 // If the variable is an input, entry points need to know about it.
661 if (AddressSpace::Input == GV.getType()->getPointerAddressSpace()) {
David Netofb9a7972017-08-25 17:08:24 -0400662 getEntryPointInterfacesVec().insert(&GV);
David Neto22f144c2017-06-12 14:26:21 -0400663 }
664 }
665
666 // If there are extended instructions, generate OpExtInstImport.
667 if (FindExtInst(module)) {
668 GenerateExtInstImport();
669 }
670
671 // Generate SPIRV instructions for types.
Alan Bakerfcda9482018-10-02 17:09:59 -0400672 GenerateSPIRVTypes(module.getContext(), module);
David Neto22f144c2017-06-12 14:26:21 -0400673
674 // Generate SPIRV constants.
675 GenerateSPIRVConstants();
676
alan-baker09cb9802019-12-10 13:16:27 -0500677 // Generate literal samplers if necessary.
678 GenerateSamplers(module);
David Neto22f144c2017-06-12 14:26:21 -0400679
680 // Generate SPIRV variables.
681 for (GlobalVariable &GV : module.globals()) {
682 GenerateGlobalVar(GV);
683 }
David Neto862b7d82018-06-14 18:48:37 -0400684 GenerateResourceVars(module);
David Netoc6f3ab22018-04-06 18:02:31 -0400685 GenerateWorkgroupVars();
David Neto22f144c2017-06-12 14:26:21 -0400686
687 // Generate SPIRV instructions for each function.
688 for (Function &F : module) {
689 if (F.isDeclaration()) {
690 continue;
691 }
692
David Neto862b7d82018-06-14 18:48:37 -0400693 GenerateDescriptorMapInfo(DL, F);
694
David Neto22f144c2017-06-12 14:26:21 -0400695 // Generate Function Prologue.
696 GenerateFuncPrologue(F);
697
698 // Generate SPIRV instructions for function body.
699 GenerateFuncBody(F);
700
701 // Generate Function Epilogue.
702 GenerateFuncEpilogue();
703 }
704
705 HandleDeferredInstruction();
David Neto1a1a0582017-07-07 12:01:44 -0400706 HandleDeferredDecorations(DL);
David Neto22f144c2017-06-12 14:26:21 -0400707
708 // Generate SPIRV module information.
David Neto5c22a252018-03-15 16:07:41 -0400709 GenerateModuleInfo(module);
David Neto22f144c2017-06-12 14:26:21 -0400710
alan-baker00e7a582019-06-07 12:54:21 -0400711 WriteSPIRVBinary();
David Neto22f144c2017-06-12 14:26:21 -0400712
713 // We need to patch the SPIR-V header to set bound correctly.
714 patchHeader();
David Neto0676e6f2017-07-11 18:47:44 -0400715
716 if (outputCInitList) {
717 bool first = true;
David Neto0676e6f2017-07-11 18:47:44 -0400718 std::ostringstream os;
719
David Neto57fb0b92017-08-04 15:35:09 -0400720 auto emit_word = [&os, &first](uint32_t word) {
David Neto0676e6f2017-07-11 18:47:44 -0400721 if (!first)
David Neto57fb0b92017-08-04 15:35:09 -0400722 os << ",\n";
723 os << word;
David Neto0676e6f2017-07-11 18:47:44 -0400724 first = false;
725 };
726
727 os << "{";
David Neto57fb0b92017-08-04 15:35:09 -0400728 const std::string str(binaryTempOut.str());
729 for (unsigned i = 0; i < str.size(); i += 4) {
730 const uint32_t a = static_cast<unsigned char>(str[i]);
731 const uint32_t b = static_cast<unsigned char>(str[i + 1]);
732 const uint32_t c = static_cast<unsigned char>(str[i + 2]);
733 const uint32_t d = static_cast<unsigned char>(str[i + 3]);
734 emit_word(a | (b << 8) | (c << 16) | (d << 24));
David Neto0676e6f2017-07-11 18:47:44 -0400735 }
736 os << "}\n";
737 out << os.str();
738 }
739
David Neto22f144c2017-06-12 14:26:21 -0400740 return false;
741}
742
743void SPIRVProducerPass::outputHeader() {
alan-baker00e7a582019-06-07 12:54:21 -0400744 binaryOut->write(reinterpret_cast<const char *>(&spv::MagicNumber),
745 sizeof(spv::MagicNumber));
746 binaryOut->write(reinterpret_cast<const char *>(&spv::Version),
747 sizeof(spv::Version));
David Neto22f144c2017-06-12 14:26:21 -0400748
alan-baker0c18ab02019-06-12 10:23:21 -0400749 // use Google's vendor ID
750 const uint32_t vendor = 21 << 16;
alan-baker00e7a582019-06-07 12:54:21 -0400751 binaryOut->write(reinterpret_cast<const char *>(&vendor), sizeof(vendor));
David Neto22f144c2017-06-12 14:26:21 -0400752
alan-baker00e7a582019-06-07 12:54:21 -0400753 // we record where we need to come back to and patch in the bound value
754 patchBoundOffset = binaryOut->tell();
David Neto22f144c2017-06-12 14:26:21 -0400755
alan-baker00e7a582019-06-07 12:54:21 -0400756 // output a bad bound for now
757 binaryOut->write(reinterpret_cast<const char *>(&nextID), sizeof(nextID));
David Neto22f144c2017-06-12 14:26:21 -0400758
alan-baker00e7a582019-06-07 12:54:21 -0400759 // output the schema (reserved for use and must be 0)
760 const uint32_t schema = 0;
761 binaryOut->write(reinterpret_cast<const char *>(&schema), sizeof(schema));
David Neto22f144c2017-06-12 14:26:21 -0400762}
763
764void SPIRVProducerPass::patchHeader() {
alan-baker00e7a582019-06-07 12:54:21 -0400765 // for a binary we just write the value of nextID over bound
766 binaryOut->pwrite(reinterpret_cast<char *>(&nextID), sizeof(nextID),
767 patchBoundOffset);
David Neto22f144c2017-06-12 14:26:21 -0400768}
769
David Netoc6f3ab22018-04-06 18:02:31 -0400770void SPIRVProducerPass::GenerateLLVMIRInfo(Module &M, const DataLayout &DL) {
David Neto22f144c2017-06-12 14:26:21 -0400771 // This function generates LLVM IR for function such as global variable for
772 // argument, constant and pointer type for argument access. These information
773 // is artificial one because we need Vulkan SPIR-V output. This function is
774 // executed ahead of FindType and FindConstant.
David Neto22f144c2017-06-12 14:26:21 -0400775 LLVMContext &Context = M.getContext();
776
David Neto862b7d82018-06-14 18:48:37 -0400777 FindGlobalConstVars(M, DL);
David Neto5c22a252018-03-15 16:07:41 -0400778
David Neto862b7d82018-06-14 18:48:37 -0400779 FindResourceVars(M, DL);
David Neto22f144c2017-06-12 14:26:21 -0400780
781 bool HasWorkGroupBuiltin = false;
782 for (GlobalVariable &GV : M.globals()) {
783 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
784 if (spv::BuiltInWorkgroupSize == BuiltinType) {
785 HasWorkGroupBuiltin = true;
786 }
787 }
788
David Neto862b7d82018-06-14 18:48:37 -0400789 FindTypesForSamplerMap(M);
790 FindTypesForResourceVars(M);
Alan Baker202c8c72018-08-13 13:47:44 -0400791 FindWorkgroupVars(M);
David Neto22f144c2017-06-12 14:26:21 -0400792
793 for (Function &F : M) {
Kévin Petitabef4522019-03-27 13:08:01 +0000794 if (F.isDeclaration()) {
David Neto22f144c2017-06-12 14:26:21 -0400795 continue;
796 }
797
798 for (BasicBlock &BB : F) {
799 for (Instruction &I : BB) {
800 if (I.getOpcode() == Instruction::ZExt ||
801 I.getOpcode() == Instruction::SExt ||
802 I.getOpcode() == Instruction::UIToFP) {
803 // If there is zext with i1 type, it will be changed to OpSelect. The
804 // OpSelect needs constant 0 and 1 so the constants are added here.
805
806 auto OpTy = I.getOperand(0)->getType();
807
Kévin Petit24272b62018-10-18 19:16:12 +0000808 if (OpTy->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -0400809 if (I.getOpcode() == Instruction::ZExt) {
David Neto22f144c2017-06-12 14:26:21 -0400810 FindConstant(Constant::getNullValue(I.getType()));
Kévin Petit7bfb8992019-02-26 13:45:08 +0000811 FindConstant(ConstantInt::get(I.getType(), 1));
David Neto22f144c2017-06-12 14:26:21 -0400812 } else if (I.getOpcode() == Instruction::SExt) {
David Neto22f144c2017-06-12 14:26:21 -0400813 FindConstant(Constant::getNullValue(I.getType()));
Kévin Petit7bfb8992019-02-26 13:45:08 +0000814 FindConstant(ConstantInt::getSigned(I.getType(), -1));
David Neto22f144c2017-06-12 14:26:21 -0400815 } else {
816 FindConstant(ConstantFP::get(Context, APFloat(0.0f)));
817 FindConstant(ConstantFP::get(Context, APFloat(1.0f)));
818 }
819 }
820 } else if (CallInst *Call = dyn_cast<CallInst>(&I)) {
David Neto862b7d82018-06-14 18:48:37 -0400821 StringRef callee_name = Call->getCalledFunction()->getName();
David Neto22f144c2017-06-12 14:26:21 -0400822
823 // Handle image type specially.
alan-baker75090e42020-02-20 11:21:04 -0500824 if (clspv::IsImageBuiltin(callee_name)) {
David Neto22f144c2017-06-12 14:26:21 -0400825 TypeMapType &OpImageTypeMap = getImageTypeMap();
826 Type *ImageTy =
827 Call->getArgOperand(0)->getType()->getPointerElementType();
828 OpImageTypeMap[ImageTy] = 0;
alan-bakerabd82722019-12-03 17:14:51 -0500829 getImageTypeList().insert(ImageTy);
alan-baker75090e42020-02-20 11:21:04 -0500830 }
David Neto22f144c2017-06-12 14:26:21 -0400831
alan-baker75090e42020-02-20 11:21:04 -0500832 if (clspv::IsSampledImageRead(callee_name)) {
alan-bakerf67468c2019-11-25 15:51:49 -0500833 // All sampled reads need a floating point 0 for the Lod operand.
David Neto22f144c2017-06-12 14:26:21 -0400834 FindConstant(ConstantFP::get(Context, APFloat(0.0f)));
835 }
David Neto5c22a252018-03-15 16:07:41 -0400836
alan-baker75090e42020-02-20 11:21:04 -0500837 if (clspv::IsUnsampledImageRead(callee_name)) {
838 // All unsampled reads need an integer 0 for the Lod operand.
839 FindConstant(ConstantInt::get(Context, APInt(32, 0)));
840 }
841
alan-bakerce179f12019-12-06 19:02:22 -0500842 if (clspv::IsImageQuery(callee_name)) {
843 Type *ImageTy = Call->getOperand(0)->getType();
844 const uint32_t dim = ImageDimensionality(ImageTy);
845 uint32_t components = dim;
846 if (components > 1) {
847 // OpImageQuerySize* return |components| components.
848 FindType(VectorType::get(Type::getInt32Ty(Context), components));
849 if (dim == 3 && IsGetImageDim(callee_name)) {
850 // get_image_dim for 3D images returns an int4.
851 FindType(
852 VectorType::get(Type::getInt32Ty(Context), components + 1));
853 }
854 }
855
856 if (clspv::IsSampledImageType(ImageTy)) {
857 // All sampled image queries need a integer 0 for the Lod
858 // operand.
859 FindConstant(ConstantInt::get(Context, APInt(32, 0)));
860 }
David Neto5c22a252018-03-15 16:07:41 -0400861 }
David Neto22f144c2017-06-12 14:26:21 -0400862 }
863 }
864 }
865
Kévin Petitabef4522019-03-27 13:08:01 +0000866 // More things to do on kernel functions
867 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
868 if (const MDNode *MD =
869 dyn_cast<Function>(&F)->getMetadata("reqd_work_group_size")) {
870 // We generate constants if the WorkgroupSize builtin is being used.
871 if (HasWorkGroupBuiltin) {
872 // Collect constant information for work group size.
873 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(0)));
874 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(1)));
875 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(2)));
David Neto22f144c2017-06-12 14:26:21 -0400876 }
877 }
878 }
879
alan-bakerf67468c2019-11-25 15:51:49 -0500880 // TODO(alan-baker): make this better.
alan-bakerf906d2b2019-12-10 11:26:23 -0500881 if (M.getTypeByName("opencl.image1d_ro_t.float") ||
882 M.getTypeByName("opencl.image1d_ro_t.float.sampled") ||
883 M.getTypeByName("opencl.image1d_wo_t.float") ||
884 M.getTypeByName("opencl.image2d_ro_t.float") ||
alan-bakerf67468c2019-11-25 15:51:49 -0500885 M.getTypeByName("opencl.image2d_ro_t.float.sampled") ||
886 M.getTypeByName("opencl.image2d_wo_t.float") ||
887 M.getTypeByName("opencl.image3d_ro_t.float") ||
888 M.getTypeByName("opencl.image3d_ro_t.float.sampled") ||
889 M.getTypeByName("opencl.image3d_wo_t.float")) {
890 FindType(Type::getFloatTy(Context));
alan-bakerf906d2b2019-12-10 11:26:23 -0500891 } else if (M.getTypeByName("opencl.image1d_ro_t.uint") ||
892 M.getTypeByName("opencl.image1d_ro_t.uint.sampled") ||
893 M.getTypeByName("opencl.image1d_wo_t.uint") ||
894 M.getTypeByName("opencl.image2d_ro_t.uint") ||
alan-bakerf67468c2019-11-25 15:51:49 -0500895 M.getTypeByName("opencl.image2d_ro_t.uint.sampled") ||
896 M.getTypeByName("opencl.image2d_wo_t.uint") ||
897 M.getTypeByName("opencl.image3d_ro_t.uint") ||
898 M.getTypeByName("opencl.image3d_ro_t.uint.sampled") ||
899 M.getTypeByName("opencl.image3d_wo_t.uint")) {
900 FindType(Type::getInt32Ty(Context));
alan-bakerf906d2b2019-12-10 11:26:23 -0500901 } else if (M.getTypeByName("opencl.image1d_ro_t.int") ||
902 M.getTypeByName("opencl.image1d_ro_t.int.sampled") ||
903 M.getTypeByName("opencl.image1d_wo_t.int") ||
904 M.getTypeByName("opencl.image2d_ro_t.int") ||
alan-bakerf67468c2019-11-25 15:51:49 -0500905 M.getTypeByName("opencl.image2d_ro_t.int.sampled") ||
906 M.getTypeByName("opencl.image2d_wo_t.int") ||
907 M.getTypeByName("opencl.image3d_ro_t.int") ||
908 M.getTypeByName("opencl.image3d_ro_t.int.sampled") ||
909 M.getTypeByName("opencl.image3d_wo_t.int")) {
910 // Nothing for now...
911 } else {
912 // This was likely an UndefValue.
David Neto22f144c2017-06-12 14:26:21 -0400913 FindType(Type::getFloatTy(Context));
914 }
915
916 // Collect types' information from function.
917 FindTypePerFunc(F);
918
919 // Collect constant information from function.
920 FindConstantPerFunc(F);
921 }
922}
923
David Neto862b7d82018-06-14 18:48:37 -0400924void SPIRVProducerPass::FindGlobalConstVars(Module &M, const DataLayout &DL) {
alan-baker56f7aff2019-05-22 08:06:42 -0400925 clspv::NormalizeGlobalVariables(M);
926
David Neto862b7d82018-06-14 18:48:37 -0400927 SmallVector<GlobalVariable *, 8> GVList;
928 SmallVector<GlobalVariable *, 8> DeadGVList;
929 for (GlobalVariable &GV : M.globals()) {
930 if (GV.getType()->getAddressSpace() == AddressSpace::Constant) {
931 if (GV.use_empty()) {
932 DeadGVList.push_back(&GV);
933 } else {
934 GVList.push_back(&GV);
935 }
936 }
937 }
938
939 // Remove dead global __constant variables.
940 for (auto GV : DeadGVList) {
941 GV->eraseFromParent();
942 }
943 DeadGVList.clear();
944
945 if (clspv::Option::ModuleConstantsInStorageBuffer()) {
946 // For now, we only support a single storage buffer.
947 if (GVList.size() > 0) {
948 assert(GVList.size() == 1);
949 const auto *GV = GVList[0];
950 const auto constants_byte_size =
Alan Bakerfcda9482018-10-02 17:09:59 -0400951 (GetTypeSizeInBits(GV->getInitializer()->getType(), DL)) / 8;
David Neto862b7d82018-06-14 18:48:37 -0400952 const size_t kConstantMaxSize = 65536;
953 if (constants_byte_size > kConstantMaxSize) {
954 outs() << "Max __constant capacity of " << kConstantMaxSize
955 << " bytes exceeded: " << constants_byte_size << " bytes used\n";
956 llvm_unreachable("Max __constant capacity exceeded");
957 }
958 }
959 } else {
960 // Change global constant variable's address space to ModuleScopePrivate.
961 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
962 for (auto GV : GVList) {
963 // Create new gv with ModuleScopePrivate address space.
964 Type *NewGVTy = GV->getType()->getPointerElementType();
965 GlobalVariable *NewGV = new GlobalVariable(
966 M, NewGVTy, false, GV->getLinkage(), GV->getInitializer(), "",
967 nullptr, GV->getThreadLocalMode(), AddressSpace::ModuleScopePrivate);
968 NewGV->takeName(GV);
969
970 const SmallVector<User *, 8> GVUsers(GV->user_begin(), GV->user_end());
971 SmallVector<User *, 8> CandidateUsers;
972
973 auto record_called_function_type_as_user =
974 [&GlobalConstFuncTyMap](Value *gv, CallInst *call) {
975 // Find argument index.
976 unsigned index = 0;
977 for (unsigned i = 0; i < call->getNumArgOperands(); i++) {
978 if (gv == call->getOperand(i)) {
979 // TODO(dneto): Should we break here?
980 index = i;
981 }
982 }
983
984 // Record function type with global constant.
985 GlobalConstFuncTyMap[call->getFunctionType()] =
986 std::make_pair(call->getFunctionType(), index);
987 };
988
989 for (User *GVU : GVUsers) {
990 if (CallInst *Call = dyn_cast<CallInst>(GVU)) {
991 record_called_function_type_as_user(GV, Call);
992 } else if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(GVU)) {
993 // Check GEP users.
994 for (User *GEPU : GEP->users()) {
995 if (CallInst *GEPCall = dyn_cast<CallInst>(GEPU)) {
996 record_called_function_type_as_user(GEP, GEPCall);
997 }
998 }
999 }
1000
1001 CandidateUsers.push_back(GVU);
1002 }
1003
1004 for (User *U : CandidateUsers) {
1005 // Update users of gv with new gv.
alan-bakered80f572019-02-11 17:28:26 -05001006 if (!isa<Constant>(U)) {
1007 // #254: Can't change operands of a constant, but this shouldn't be
1008 // something that sticks around in the module.
1009 U->replaceUsesOfWith(GV, NewGV);
1010 }
David Neto862b7d82018-06-14 18:48:37 -04001011 }
1012
1013 // Delete original gv.
1014 GV->eraseFromParent();
1015 }
1016 }
1017}
1018
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001019void SPIRVProducerPass::FindResourceVars(Module &M, const DataLayout &) {
David Neto862b7d82018-06-14 18:48:37 -04001020 ResourceVarInfoList.clear();
1021 FunctionToResourceVarsMap.clear();
1022 ModuleOrderedResourceVars.reset();
1023 // Normally, there is one resource variable per clspv.resource.var.*
1024 // function, since that is unique'd by arg type and index. By design,
1025 // we can share these resource variables across kernels because all
1026 // kernels use the same descriptor set.
1027 //
1028 // But if the user requested distinct descriptor sets per kernel, then
1029 // the descriptor allocator has made different (set,binding) pairs for
1030 // the same (type,arg_index) pair. Since we can decorate a resource
1031 // variable with only exactly one DescriptorSet and Binding, we are
1032 // forced in this case to make distinct resource variables whenever
1033 // the same clspv.reource.var.X function is seen with disintct
1034 // (set,binding) values.
1035 const bool always_distinct_sets =
1036 clspv::Option::DistinctKernelDescriptorSets();
1037 for (Function &F : M) {
1038 // Rely on the fact the resource var functions have a stable ordering
1039 // in the module.
Alan Baker202c8c72018-08-13 13:47:44 -04001040 if (F.getName().startswith(clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04001041 // Find all calls to this function with distinct set and binding pairs.
1042 // Save them in ResourceVarInfoList.
1043
1044 // Determine uniqueness of the (set,binding) pairs only withing this
1045 // one resource-var builtin function.
1046 using SetAndBinding = std::pair<unsigned, unsigned>;
1047 // Maps set and binding to the resource var info.
1048 DenseMap<SetAndBinding, ResourceVarInfo *> set_and_binding_map;
1049 bool first_use = true;
1050 for (auto &U : F.uses()) {
1051 if (auto *call = dyn_cast<CallInst>(U.getUser())) {
1052 const auto set = unsigned(
1053 dyn_cast<ConstantInt>(call->getArgOperand(0))->getZExtValue());
1054 const auto binding = unsigned(
1055 dyn_cast<ConstantInt>(call->getArgOperand(1))->getZExtValue());
1056 const auto arg_kind = clspv::ArgKind(
1057 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
1058 const auto arg_index = unsigned(
1059 dyn_cast<ConstantInt>(call->getArgOperand(3))->getZExtValue());
alan-bakere9308012019-03-15 10:25:13 -04001060 const auto coherent = unsigned(
1061 dyn_cast<ConstantInt>(call->getArgOperand(5))->getZExtValue());
David Neto862b7d82018-06-14 18:48:37 -04001062
1063 // Find or make the resource var info for this combination.
1064 ResourceVarInfo *rv = nullptr;
1065 if (always_distinct_sets) {
1066 // Make a new resource var any time we see a different
1067 // (set,binding) pair.
1068 SetAndBinding key{set, binding};
1069 auto where = set_and_binding_map.find(key);
1070 if (where == set_and_binding_map.end()) {
1071 rv = new ResourceVarInfo(int(ResourceVarInfoList.size()), set,
alan-bakere9308012019-03-15 10:25:13 -04001072 binding, &F, arg_kind, coherent);
David Neto862b7d82018-06-14 18:48:37 -04001073 ResourceVarInfoList.emplace_back(rv);
1074 set_and_binding_map[key] = rv;
1075 } else {
1076 rv = where->second;
1077 }
1078 } else {
1079 // The default is to make exactly one resource for each
1080 // clspv.resource.var.* function.
1081 if (first_use) {
1082 first_use = false;
1083 rv = new ResourceVarInfo(int(ResourceVarInfoList.size()), set,
alan-bakere9308012019-03-15 10:25:13 -04001084 binding, &F, arg_kind, coherent);
David Neto862b7d82018-06-14 18:48:37 -04001085 ResourceVarInfoList.emplace_back(rv);
1086 } else {
1087 rv = ResourceVarInfoList.back().get();
1088 }
1089 }
1090
1091 // Now populate FunctionToResourceVarsMap.
1092 auto &mapping =
1093 FunctionToResourceVarsMap[call->getParent()->getParent()];
1094 while (mapping.size() <= arg_index) {
1095 mapping.push_back(nullptr);
1096 }
1097 mapping[arg_index] = rv;
1098 }
1099 }
1100 }
1101 }
1102
1103 // Populate ModuleOrderedResourceVars.
1104 for (Function &F : M) {
1105 auto where = FunctionToResourceVarsMap.find(&F);
1106 if (where != FunctionToResourceVarsMap.end()) {
1107 for (auto &rv : where->second) {
1108 if (rv != nullptr) {
1109 ModuleOrderedResourceVars.insert(rv);
1110 }
1111 }
1112 }
1113 }
1114 if (ShowResourceVars) {
1115 for (auto *info : ModuleOrderedResourceVars) {
1116 outs() << "MORV index " << info->index << " (" << info->descriptor_set
1117 << "," << info->binding << ") " << *(info->var_fn->getReturnType())
1118 << "\n";
1119 }
1120 }
1121}
1122
David Neto22f144c2017-06-12 14:26:21 -04001123bool SPIRVProducerPass::FindExtInst(Module &M) {
1124 LLVMContext &Context = M.getContext();
1125 bool HasExtInst = false;
1126
1127 for (Function &F : M) {
1128 for (BasicBlock &BB : F) {
1129 for (Instruction &I : BB) {
1130 if (CallInst *Call = dyn_cast<CallInst>(&I)) {
1131 Function *Callee = Call->getCalledFunction();
1132 // Check whether this call is for extend instructions.
David Neto3fbb4072017-10-16 11:28:14 -04001133 auto callee_name = Callee->getName();
1134 const glsl::ExtInst EInst = getExtInstEnum(callee_name);
1135 const glsl::ExtInst IndirectEInst =
1136 getIndirectExtInstEnum(callee_name);
David Neto22f144c2017-06-12 14:26:21 -04001137
David Neto3fbb4072017-10-16 11:28:14 -04001138 HasExtInst |=
1139 (EInst != kGlslExtInstBad) || (IndirectEInst != kGlslExtInstBad);
1140
1141 if (IndirectEInst) {
1142 // Register extra constants if needed.
1143
1144 // Registers a type and constant for computing the result of the
1145 // given instruction. If the result of the instruction is a vector,
1146 // then make a splat vector constant with the same number of
1147 // elements.
1148 auto register_constant = [this, &I](Constant *constant) {
1149 FindType(constant->getType());
1150 FindConstant(constant);
1151 if (auto *vectorTy = dyn_cast<VectorType>(I.getType())) {
1152 // Register the splat vector of the value with the same
1153 // width as the result of the instruction.
1154 auto *vec_constant = ConstantVector::getSplat(
1155 static_cast<unsigned>(vectorTy->getNumElements()),
1156 constant);
1157 FindConstant(vec_constant);
1158 FindType(vec_constant->getType());
1159 }
1160 };
1161 switch (IndirectEInst) {
1162 case glsl::ExtInstFindUMsb:
1163 // clz needs OpExtInst and OpISub with constant 31, or splat
1164 // vector of 31. Add it to the constant list here.
1165 register_constant(
1166 ConstantInt::get(Type::getInt32Ty(Context), 31));
1167 break;
1168 case glsl::ExtInstAcos:
1169 case glsl::ExtInstAsin:
Kévin Petiteb9f90a2018-09-29 12:29:34 +01001170 case glsl::ExtInstAtan:
David Neto3fbb4072017-10-16 11:28:14 -04001171 case glsl::ExtInstAtan2:
1172 // We need 1/pi for acospi, asinpi, atan2pi.
1173 register_constant(
1174 ConstantFP::get(Type::getFloatTy(Context), kOneOverPi));
1175 break;
1176 default:
1177 assert(false && "internally inconsistent");
1178 }
David Neto22f144c2017-06-12 14:26:21 -04001179 }
1180 }
1181 }
1182 }
1183 }
1184
1185 return HasExtInst;
1186}
1187
1188void SPIRVProducerPass::FindTypePerGlobalVar(GlobalVariable &GV) {
1189 // Investigate global variable's type.
1190 FindType(GV.getType());
1191}
1192
1193void SPIRVProducerPass::FindTypePerFunc(Function &F) {
1194 // Investigate function's type.
1195 FunctionType *FTy = F.getFunctionType();
1196
1197 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
1198 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
David Neto9ed8e2f2018-03-24 06:47:24 -07001199 // Handle a regular function with global constant parameters.
David Neto22f144c2017-06-12 14:26:21 -04001200 if (GlobalConstFuncTyMap.count(FTy)) {
1201 uint32_t GVCstArgIdx = GlobalConstFuncTypeMap[FTy].second;
1202 SmallVector<Type *, 4> NewFuncParamTys;
1203 for (unsigned i = 0; i < FTy->getNumParams(); i++) {
1204 Type *ParamTy = FTy->getParamType(i);
1205 if (i == GVCstArgIdx) {
1206 Type *EleTy = ParamTy->getPointerElementType();
1207 ParamTy = PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
1208 }
1209
1210 NewFuncParamTys.push_back(ParamTy);
1211 }
1212
1213 FunctionType *NewFTy =
1214 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
1215 GlobalConstFuncTyMap[FTy] = std::make_pair(NewFTy, GVCstArgIdx);
1216 FTy = NewFTy;
1217 }
1218
1219 FindType(FTy);
1220 } else {
1221 // As kernel functions do not have parameters, create new function type and
1222 // add it to type map.
1223 SmallVector<Type *, 4> NewFuncParamTys;
1224 FunctionType *NewFTy =
1225 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
1226 FindType(NewFTy);
1227 }
1228
1229 // Investigate instructions' type in function body.
1230 for (BasicBlock &BB : F) {
1231 for (Instruction &I : BB) {
1232 if (isa<ShuffleVectorInst>(I)) {
1233 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1234 // Ignore type for mask of shuffle vector instruction.
1235 if (i == 2) {
1236 continue;
1237 }
1238
1239 Value *Op = I.getOperand(i);
1240 if (!isa<MetadataAsValue>(Op)) {
1241 FindType(Op->getType());
1242 }
1243 }
1244
1245 FindType(I.getType());
1246 continue;
1247 }
1248
David Neto862b7d82018-06-14 18:48:37 -04001249 CallInst *Call = dyn_cast<CallInst>(&I);
1250
1251 if (Call && Call->getCalledFunction()->getName().startswith(
Alan Baker202c8c72018-08-13 13:47:44 -04001252 clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04001253 // This is a fake call representing access to a resource variable.
1254 // We handle that elsewhere.
1255 continue;
1256 }
1257
Alan Baker202c8c72018-08-13 13:47:44 -04001258 if (Call && Call->getCalledFunction()->getName().startswith(
1259 clspv::WorkgroupAccessorFunction())) {
1260 // This is a fake call representing access to a workgroup variable.
1261 // We handle that elsewhere.
1262 continue;
1263 }
1264
alan-bakerf083bed2020-01-29 08:15:42 -05001265 // #497: InsertValue and ExtractValue map to OpCompositeInsert and
1266 // OpCompositeExtract which takes literal values for indices. As a result
1267 // don't map the type of indices.
1268 if (I.getOpcode() == Instruction::ExtractValue) {
1269 FindType(I.getOperand(0)->getType());
1270 continue;
1271 }
1272 if (I.getOpcode() == Instruction::InsertValue) {
1273 FindType(I.getOperand(0)->getType());
1274 FindType(I.getOperand(1)->getType());
1275 continue;
1276 }
1277
1278 // #497: InsertElement and ExtractElement map to OpCompositeExtract if
1279 // the index is a constant. In such a case don't map the index type.
1280 if (I.getOpcode() == Instruction::ExtractElement) {
1281 FindType(I.getOperand(0)->getType());
1282 Value *op1 = I.getOperand(1);
1283 if (!isa<Constant>(op1) || isa<GlobalValue>(op1)) {
1284 FindType(op1->getType());
1285 }
1286 continue;
1287 }
1288 if (I.getOpcode() == Instruction::InsertElement) {
1289 FindType(I.getOperand(0)->getType());
1290 FindType(I.getOperand(1)->getType());
1291 Value *op2 = I.getOperand(2);
1292 if (!isa<Constant>(op2) || isa<GlobalValue>(op2)) {
1293 FindType(op2->getType());
1294 }
1295 continue;
1296 }
1297
David Neto22f144c2017-06-12 14:26:21 -04001298 // Work through the operands of the instruction.
1299 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1300 Value *const Op = I.getOperand(i);
1301 // If any of the operands is a constant, find the type!
1302 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1303 FindType(Op->getType());
1304 }
1305 }
1306
1307 for (Use &Op : I.operands()) {
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001308 if (isa<CallInst>(&I)) {
David Neto22f144c2017-06-12 14:26:21 -04001309 // Avoid to check call instruction's type.
1310 break;
1311 }
Alan Baker202c8c72018-08-13 13:47:44 -04001312 if (CallInst *OpCall = dyn_cast<CallInst>(Op)) {
1313 if (OpCall && OpCall->getCalledFunction()->getName().startswith(
1314 clspv::WorkgroupAccessorFunction())) {
1315 // This is a fake call representing access to a workgroup variable.
1316 // We handle that elsewhere.
1317 continue;
1318 }
1319 }
David Neto22f144c2017-06-12 14:26:21 -04001320 if (!isa<MetadataAsValue>(&Op)) {
1321 FindType(Op->getType());
1322 continue;
1323 }
1324 }
1325
David Neto22f144c2017-06-12 14:26:21 -04001326 // We don't want to track the type of this call as we are going to replace
1327 // it.
Kévin Petitdf71de32019-04-09 14:09:50 +01001328 if (Call && (clspv::LiteralSamplerFunction() ==
David Neto22f144c2017-06-12 14:26:21 -04001329 Call->getCalledFunction()->getName())) {
1330 continue;
1331 }
1332
1333 if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(&I)) {
1334 // If gep's base operand has ModuleScopePrivate address space, make gep
1335 // return ModuleScopePrivate address space.
1336 if (GEP->getPointerAddressSpace() == AddressSpace::ModuleScopePrivate) {
1337 // Add pointer type with private address space for global constant to
1338 // type list.
1339 Type *EleTy = I.getType()->getPointerElementType();
1340 Type *NewPTy =
1341 PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
1342
1343 FindType(NewPTy);
1344 continue;
1345 }
1346 }
1347
1348 FindType(I.getType());
1349 }
1350 }
1351}
1352
David Neto862b7d82018-06-14 18:48:37 -04001353void SPIRVProducerPass::FindTypesForSamplerMap(Module &M) {
1354 // If we are using a sampler map, find the type of the sampler.
Kévin Petitdf71de32019-04-09 14:09:50 +01001355 if (M.getFunction(clspv::LiteralSamplerFunction()) ||
David Neto862b7d82018-06-14 18:48:37 -04001356 0 < getSamplerMap().size()) {
1357 auto SamplerStructTy = M.getTypeByName("opencl.sampler_t");
1358 if (!SamplerStructTy) {
1359 SamplerStructTy = StructType::create(M.getContext(), "opencl.sampler_t");
1360 }
1361
1362 SamplerTy = SamplerStructTy->getPointerTo(AddressSpace::UniformConstant);
1363
1364 FindType(SamplerTy);
1365 }
1366}
1367
1368void SPIRVProducerPass::FindTypesForResourceVars(Module &M) {
1369 // Record types so they are generated.
1370 TypesNeedingLayout.reset();
1371 StructTypesNeedingBlock.reset();
1372
1373 // To match older clspv codegen, generate the float type first if required
1374 // for images.
1375 for (const auto *info : ModuleOrderedResourceVars) {
1376 if (info->arg_kind == clspv::ArgKind::ReadOnlyImage ||
1377 info->arg_kind == clspv::ArgKind::WriteOnlyImage) {
alan-bakerf67468c2019-11-25 15:51:49 -05001378 if (IsIntImageType(info->var_fn->getReturnType())) {
1379 // Nothing for now...
1380 } else if (IsUintImageType(info->var_fn->getReturnType())) {
1381 FindType(Type::getInt32Ty(M.getContext()));
1382 }
1383
1384 // We need "float" either for the sampled type or for the Lod operand.
David Neto862b7d82018-06-14 18:48:37 -04001385 FindType(Type::getFloatTy(M.getContext()));
David Neto862b7d82018-06-14 18:48:37 -04001386 }
1387 }
1388
1389 for (const auto *info : ModuleOrderedResourceVars) {
1390 Type *type = info->var_fn->getReturnType();
1391
1392 switch (info->arg_kind) {
1393 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04001394 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04001395 if (auto *sty = dyn_cast<StructType>(type->getPointerElementType())) {
1396 StructTypesNeedingBlock.insert(sty);
1397 } else {
1398 errs() << *type << "\n";
1399 llvm_unreachable("Buffer arguments must map to structures!");
1400 }
1401 break;
1402 case clspv::ArgKind::Pod:
1403 if (auto *sty = dyn_cast<StructType>(type->getPointerElementType())) {
1404 StructTypesNeedingBlock.insert(sty);
1405 } else {
1406 errs() << *type << "\n";
1407 llvm_unreachable("POD arguments must map to structures!");
1408 }
1409 break;
1410 case clspv::ArgKind::ReadOnlyImage:
1411 case clspv::ArgKind::WriteOnlyImage:
1412 case clspv::ArgKind::Sampler:
1413 // Sampler and image types map to the pointee type but
1414 // in the uniform constant address space.
1415 type = PointerType::get(type->getPointerElementType(),
1416 clspv::AddressSpace::UniformConstant);
1417 break;
1418 default:
1419 break;
1420 }
1421
1422 // The converted type is the type of the OpVariable we will generate.
1423 // If the pointee type is an array of size zero, FindType will convert it
1424 // to a runtime array.
1425 FindType(type);
1426 }
1427
alan-bakerdcd97412019-09-16 15:32:30 -04001428 // If module constants are clustered in a storage buffer then that struct
1429 // needs layout decorations.
1430 if (clspv::Option::ModuleConstantsInStorageBuffer()) {
1431 for (GlobalVariable &GV : M.globals()) {
1432 PointerType *PTy = cast<PointerType>(GV.getType());
1433 const auto AS = PTy->getAddressSpace();
1434 const bool module_scope_constant_external_init =
1435 (AS == AddressSpace::Constant) && GV.hasInitializer();
1436 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
1437 if (module_scope_constant_external_init &&
1438 spv::BuiltInMax == BuiltinType) {
1439 StructTypesNeedingBlock.insert(
1440 cast<StructType>(PTy->getPointerElementType()));
1441 }
1442 }
1443 }
1444
David Neto862b7d82018-06-14 18:48:37 -04001445 // Traverse the arrays and structures underneath each Block, and
1446 // mark them as needing layout.
1447 std::vector<Type *> work_list(StructTypesNeedingBlock.begin(),
1448 StructTypesNeedingBlock.end());
1449 while (!work_list.empty()) {
1450 Type *type = work_list.back();
1451 work_list.pop_back();
1452 TypesNeedingLayout.insert(type);
1453 switch (type->getTypeID()) {
1454 case Type::ArrayTyID:
1455 work_list.push_back(type->getArrayElementType());
1456 if (!Hack_generate_runtime_array_stride_early) {
1457 // Remember this array type for deferred decoration.
1458 TypesNeedingArrayStride.insert(type);
1459 }
1460 break;
1461 case Type::StructTyID:
1462 for (auto *elem_ty : cast<StructType>(type)->elements()) {
1463 work_list.push_back(elem_ty);
1464 }
1465 default:
1466 // This type and its contained types don't get layout.
1467 break;
1468 }
1469 }
1470}
1471
Alan Baker202c8c72018-08-13 13:47:44 -04001472void SPIRVProducerPass::FindWorkgroupVars(Module &M) {
1473 // The SpecId assignment for pointer-to-local arguments is recorded in
1474 // module-level metadata. Translate that information into local argument
1475 // information.
1476 NamedMDNode *nmd = M.getNamedMetadata(clspv::LocalSpecIdMetadataName());
alan-bakerb6b09dc2018-11-08 16:59:28 -05001477 if (!nmd)
1478 return;
Alan Baker202c8c72018-08-13 13:47:44 -04001479 for (auto operand : nmd->operands()) {
1480 MDTuple *tuple = cast<MDTuple>(operand);
1481 ValueAsMetadata *fn_md = cast<ValueAsMetadata>(tuple->getOperand(0));
1482 Function *func = cast<Function>(fn_md->getValue());
alan-bakerb6b09dc2018-11-08 16:59:28 -05001483 ConstantAsMetadata *arg_index_md =
1484 cast<ConstantAsMetadata>(tuple->getOperand(1));
1485 int arg_index = static_cast<int>(
1486 cast<ConstantInt>(arg_index_md->getValue())->getSExtValue());
1487 Argument *arg = &*(func->arg_begin() + arg_index);
Alan Baker202c8c72018-08-13 13:47:44 -04001488
1489 ConstantAsMetadata *spec_id_md =
1490 cast<ConstantAsMetadata>(tuple->getOperand(2));
alan-bakerb6b09dc2018-11-08 16:59:28 -05001491 int spec_id = static_cast<int>(
1492 cast<ConstantInt>(spec_id_md->getValue())->getSExtValue());
Alan Baker202c8c72018-08-13 13:47:44 -04001493
1494 max_local_spec_id_ = std::max(max_local_spec_id_, spec_id + 1);
1495 LocalArgSpecIds[arg] = spec_id;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001496 if (LocalSpecIdInfoMap.count(spec_id))
1497 continue;
Alan Baker202c8c72018-08-13 13:47:44 -04001498
1499 // We haven't seen this SpecId yet, so generate the LocalArgInfo for it.
1500 LocalArgInfo info{nextID, arg->getType()->getPointerElementType(),
1501 nextID + 1, nextID + 2,
1502 nextID + 3, spec_id};
1503 LocalSpecIdInfoMap[spec_id] = info;
1504 nextID += 4;
1505
1506 // Ensure the types necessary for this argument get generated.
1507 Type *IdxTy = Type::getInt32Ty(M.getContext());
1508 FindConstant(ConstantInt::get(IdxTy, 0));
1509 FindType(IdxTy);
1510 FindType(arg->getType());
1511 }
1512}
1513
David Neto22f144c2017-06-12 14:26:21 -04001514void SPIRVProducerPass::FindType(Type *Ty) {
1515 TypeList &TyList = getTypeList();
1516
1517 if (0 != TyList.idFor(Ty)) {
1518 return;
1519 }
1520
1521 if (Ty->isPointerTy()) {
1522 auto AddrSpace = Ty->getPointerAddressSpace();
1523 if ((AddressSpace::Constant == AddrSpace) ||
1524 (AddressSpace::Global == AddrSpace)) {
1525 auto PointeeTy = Ty->getPointerElementType();
1526
1527 if (PointeeTy->isStructTy() &&
1528 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
1529 FindType(PointeeTy);
1530 auto ActualPointerTy =
1531 PointeeTy->getPointerTo(AddressSpace::UniformConstant);
1532 FindType(ActualPointerTy);
1533 return;
1534 }
1535 }
1536 }
1537
David Neto862b7d82018-06-14 18:48:37 -04001538 // By convention, LLVM array type with 0 elements will map to
1539 // OpTypeRuntimeArray. Otherwise, it will map to OpTypeArray, which
1540 // has a constant number of elements. We need to support type of the
1541 // constant.
1542 if (auto *arrayTy = dyn_cast<ArrayType>(Ty)) {
1543 if (arrayTy->getNumElements() > 0) {
1544 LLVMContext &Context = Ty->getContext();
1545 FindType(Type::getInt32Ty(Context));
1546 }
David Neto22f144c2017-06-12 14:26:21 -04001547 }
1548
1549 for (Type *SubTy : Ty->subtypes()) {
1550 FindType(SubTy);
1551 }
1552
1553 TyList.insert(Ty);
1554}
1555
1556void SPIRVProducerPass::FindConstantPerGlobalVar(GlobalVariable &GV) {
1557 // If the global variable has a (non undef) initializer.
1558 if (GV.hasInitializer() && !isa<UndefValue>(GV.getInitializer())) {
David Neto862b7d82018-06-14 18:48:37 -04001559 // Generate the constant if it's not the initializer to a module scope
1560 // constant that we will expect in a storage buffer.
1561 const bool module_scope_constant_external_init =
1562 (GV.getType()->getPointerAddressSpace() == AddressSpace::Constant) &&
1563 clspv::Option::ModuleConstantsInStorageBuffer();
1564 if (!module_scope_constant_external_init) {
1565 FindConstant(GV.getInitializer());
1566 }
David Neto22f144c2017-06-12 14:26:21 -04001567 }
1568}
1569
1570void SPIRVProducerPass::FindConstantPerFunc(Function &F) {
1571 // Investigate constants in function body.
1572 for (BasicBlock &BB : F) {
1573 for (Instruction &I : BB) {
David Neto862b7d82018-06-14 18:48:37 -04001574 if (auto *call = dyn_cast<CallInst>(&I)) {
1575 auto name = call->getCalledFunction()->getName();
Kévin Petitdf71de32019-04-09 14:09:50 +01001576 if (name == clspv::LiteralSamplerFunction()) {
David Neto862b7d82018-06-14 18:48:37 -04001577 // We've handled these constants elsewhere, so skip it.
1578 continue;
1579 }
Alan Baker202c8c72018-08-13 13:47:44 -04001580 if (name.startswith(clspv::ResourceAccessorFunction())) {
1581 continue;
1582 }
1583 if (name.startswith(clspv::WorkgroupAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04001584 continue;
1585 }
Kévin Petit617a76d2019-04-04 13:54:16 +01001586 if (name.startswith(clspv::SPIRVOpIntrinsicFunction())) {
1587 // Skip the first operand that has the SPIR-V Opcode
1588 for (unsigned i = 1; i < I.getNumOperands(); i++) {
1589 if (isa<Constant>(I.getOperand(i)) &&
1590 !isa<GlobalValue>(I.getOperand(i))) {
1591 FindConstant(I.getOperand(i));
1592 }
1593 }
1594 continue;
1595 }
David Neto22f144c2017-06-12 14:26:21 -04001596 }
1597
1598 if (isa<AllocaInst>(I)) {
1599 // Alloca instruction has constant for the number of element. Ignore it.
1600 continue;
1601 } else if (isa<ShuffleVectorInst>(I)) {
1602 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1603 // Ignore constant for mask of shuffle vector instruction.
1604 if (i == 2) {
1605 continue;
1606 }
1607
1608 if (isa<Constant>(I.getOperand(i)) &&
1609 !isa<GlobalValue>(I.getOperand(i))) {
1610 FindConstant(I.getOperand(i));
1611 }
1612 }
1613
1614 continue;
1615 } else if (isa<InsertElementInst>(I)) {
1616 // Handle InsertElement with <4 x i8> specially.
1617 Type *CompositeTy = I.getOperand(0)->getType();
1618 if (is4xi8vec(CompositeTy)) {
1619 LLVMContext &Context = CompositeTy->getContext();
1620 if (isa<Constant>(I.getOperand(0))) {
1621 FindConstant(I.getOperand(0));
1622 }
1623
1624 if (isa<Constant>(I.getOperand(1))) {
1625 FindConstant(I.getOperand(1));
1626 }
1627
1628 // Add mask constant 0xFF.
1629 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
1630 FindConstant(CstFF);
1631
1632 // Add shift amount constant.
1633 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
1634 uint64_t Idx = CI->getZExtValue();
1635 Constant *CstShiftAmount =
1636 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
1637 FindConstant(CstShiftAmount);
1638 }
1639
1640 continue;
1641 }
1642
1643 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1644 // Ignore constant for index of InsertElement instruction.
1645 if (i == 2) {
1646 continue;
1647 }
1648
1649 if (isa<Constant>(I.getOperand(i)) &&
1650 !isa<GlobalValue>(I.getOperand(i))) {
1651 FindConstant(I.getOperand(i));
1652 }
1653 }
1654
1655 continue;
1656 } else if (isa<ExtractElementInst>(I)) {
1657 // Handle ExtractElement with <4 x i8> specially.
1658 Type *CompositeTy = I.getOperand(0)->getType();
1659 if (is4xi8vec(CompositeTy)) {
1660 LLVMContext &Context = CompositeTy->getContext();
1661 if (isa<Constant>(I.getOperand(0))) {
1662 FindConstant(I.getOperand(0));
1663 }
1664
1665 // Add mask constant 0xFF.
1666 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
1667 FindConstant(CstFF);
1668
1669 // Add shift amount constant.
1670 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
1671 uint64_t Idx = CI->getZExtValue();
1672 Constant *CstShiftAmount =
1673 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
1674 FindConstant(CstShiftAmount);
1675 } else {
1676 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
1677 FindConstant(Cst8);
1678 }
1679
1680 continue;
1681 }
1682
1683 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1684 // Ignore constant for index of ExtractElement instruction.
1685 if (i == 1) {
1686 continue;
1687 }
1688
1689 if (isa<Constant>(I.getOperand(i)) &&
1690 !isa<GlobalValue>(I.getOperand(i))) {
1691 FindConstant(I.getOperand(i));
1692 }
1693 }
1694
1695 continue;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001696 } else if ((Instruction::Xor == I.getOpcode()) &&
1697 I.getType()->isIntegerTy(1)) {
1698 // We special case for Xor where the type is i1 and one of the arguments
1699 // is a constant 1 (true), this is an OpLogicalNot in SPIR-V, and we
1700 // don't need the constant
David Neto22f144c2017-06-12 14:26:21 -04001701 bool foundConstantTrue = false;
1702 for (Use &Op : I.operands()) {
1703 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1704 auto CI = cast<ConstantInt>(Op);
1705
1706 if (CI->isZero() || foundConstantTrue) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05001707 // If we already found the true constant, we might (probably only
1708 // on -O0) have an OpLogicalNot which is taking a constant
1709 // argument, so discover it anyway.
David Neto22f144c2017-06-12 14:26:21 -04001710 FindConstant(Op);
1711 } else {
1712 foundConstantTrue = true;
1713 }
1714 }
1715 }
1716
1717 continue;
David Netod2de94a2017-08-28 17:27:47 -04001718 } else if (isa<TruncInst>(I)) {
alan-bakerb39c8262019-03-08 14:03:37 -05001719 // Special case if i8 is not generally handled.
1720 if (!clspv::Option::Int8Support()) {
1721 // For truncation to i8 we mask against 255.
1722 Type *ToTy = I.getType();
1723 if (8u == ToTy->getPrimitiveSizeInBits()) {
1724 LLVMContext &Context = ToTy->getContext();
1725 Constant *Cst255 =
1726 ConstantInt::get(Type::getInt32Ty(Context), 0xff);
1727 FindConstant(Cst255);
1728 }
David Netod2de94a2017-08-28 17:27:47 -04001729 }
Neil Henning39672102017-09-29 14:33:13 +01001730 } else if (isa<AtomicRMWInst>(I)) {
1731 LLVMContext &Context = I.getContext();
1732
1733 FindConstant(
1734 ConstantInt::get(Type::getInt32Ty(Context), spv::ScopeDevice));
1735 FindConstant(ConstantInt::get(
1736 Type::getInt32Ty(Context),
1737 spv::MemorySemanticsUniformMemoryMask |
1738 spv::MemorySemanticsSequentiallyConsistentMask));
David Neto22f144c2017-06-12 14:26:21 -04001739 }
1740
1741 for (Use &Op : I.operands()) {
1742 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1743 FindConstant(Op);
1744 }
1745 }
1746 }
1747 }
1748}
1749
1750void SPIRVProducerPass::FindConstant(Value *V) {
David Neto22f144c2017-06-12 14:26:21 -04001751 ValueList &CstList = getConstantList();
1752
David Netofb9a7972017-08-25 17:08:24 -04001753 // If V is already tracked, ignore it.
1754 if (0 != CstList.idFor(V)) {
David Neto22f144c2017-06-12 14:26:21 -04001755 return;
1756 }
1757
David Neto862b7d82018-06-14 18:48:37 -04001758 if (isa<GlobalValue>(V) && clspv::Option::ModuleConstantsInStorageBuffer()) {
1759 return;
1760 }
1761
David Neto22f144c2017-06-12 14:26:21 -04001762 Constant *Cst = cast<Constant>(V);
David Neto862b7d82018-06-14 18:48:37 -04001763 Type *CstTy = Cst->getType();
David Neto22f144c2017-06-12 14:26:21 -04001764
1765 // Handle constant with <4 x i8> type specially.
David Neto22f144c2017-06-12 14:26:21 -04001766 if (is4xi8vec(CstTy)) {
1767 if (!isa<GlobalValue>(V)) {
David Netofb9a7972017-08-25 17:08:24 -04001768 CstList.insert(V);
David Neto22f144c2017-06-12 14:26:21 -04001769 }
1770 }
1771
1772 if (Cst->getNumOperands()) {
1773 for (User::const_op_iterator I = Cst->op_begin(), E = Cst->op_end(); I != E;
1774 ++I) {
1775 FindConstant(*I);
1776 }
1777
David Netofb9a7972017-08-25 17:08:24 -04001778 CstList.insert(Cst);
David Neto22f144c2017-06-12 14:26:21 -04001779 return;
1780 } else if (const ConstantDataSequential *CDS =
1781 dyn_cast<ConstantDataSequential>(Cst)) {
1782 // Add constants for each element to constant list.
1783 for (unsigned i = 0; i < CDS->getNumElements(); i++) {
1784 Constant *EleCst = CDS->getElementAsConstant(i);
1785 FindConstant(EleCst);
1786 }
1787 }
1788
1789 if (!isa<GlobalValue>(V)) {
David Netofb9a7972017-08-25 17:08:24 -04001790 CstList.insert(V);
David Neto22f144c2017-06-12 14:26:21 -04001791 }
1792}
1793
1794spv::StorageClass SPIRVProducerPass::GetStorageClass(unsigned AddrSpace) const {
1795 switch (AddrSpace) {
1796 default:
1797 llvm_unreachable("Unsupported OpenCL address space");
1798 case AddressSpace::Private:
1799 return spv::StorageClassFunction;
1800 case AddressSpace::Global:
David Neto22f144c2017-06-12 14:26:21 -04001801 return spv::StorageClassStorageBuffer;
Alan Bakerfcda9482018-10-02 17:09:59 -04001802 case AddressSpace::Constant:
1803 return clspv::Option::ConstantArgsInUniformBuffer()
1804 ? spv::StorageClassUniform
1805 : spv::StorageClassStorageBuffer;
David Neto22f144c2017-06-12 14:26:21 -04001806 case AddressSpace::Input:
1807 return spv::StorageClassInput;
1808 case AddressSpace::Local:
1809 return spv::StorageClassWorkgroup;
1810 case AddressSpace::UniformConstant:
1811 return spv::StorageClassUniformConstant;
David Neto9ed8e2f2018-03-24 06:47:24 -07001812 case AddressSpace::Uniform:
David Netoe439d702018-03-23 13:14:08 -07001813 return spv::StorageClassUniform;
David Neto22f144c2017-06-12 14:26:21 -04001814 case AddressSpace::ModuleScopePrivate:
1815 return spv::StorageClassPrivate;
1816 }
1817}
1818
David Neto862b7d82018-06-14 18:48:37 -04001819spv::StorageClass
1820SPIRVProducerPass::GetStorageClassForArgKind(clspv::ArgKind arg_kind) const {
1821 switch (arg_kind) {
1822 case clspv::ArgKind::Buffer:
1823 return spv::StorageClassStorageBuffer;
Alan Bakerfcda9482018-10-02 17:09:59 -04001824 case clspv::ArgKind::BufferUBO:
1825 return spv::StorageClassUniform;
David Neto862b7d82018-06-14 18:48:37 -04001826 case clspv::ArgKind::Pod:
1827 return clspv::Option::PodArgsInUniformBuffer()
1828 ? spv::StorageClassUniform
1829 : spv::StorageClassStorageBuffer;
1830 case clspv::ArgKind::Local:
1831 return spv::StorageClassWorkgroup;
1832 case clspv::ArgKind::ReadOnlyImage:
1833 case clspv::ArgKind::WriteOnlyImage:
1834 case clspv::ArgKind::Sampler:
1835 return spv::StorageClassUniformConstant;
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001836 default:
1837 llvm_unreachable("Unsupported storage class for argument kind");
David Neto862b7d82018-06-14 18:48:37 -04001838 }
1839}
1840
David Neto22f144c2017-06-12 14:26:21 -04001841spv::BuiltIn SPIRVProducerPass::GetBuiltin(StringRef Name) const {
1842 return StringSwitch<spv::BuiltIn>(Name)
1843 .Case("__spirv_GlobalInvocationId", spv::BuiltInGlobalInvocationId)
1844 .Case("__spirv_LocalInvocationId", spv::BuiltInLocalInvocationId)
1845 .Case("__spirv_WorkgroupSize", spv::BuiltInWorkgroupSize)
1846 .Case("__spirv_NumWorkgroups", spv::BuiltInNumWorkgroups)
1847 .Case("__spirv_WorkgroupId", spv::BuiltInWorkgroupId)
1848 .Default(spv::BuiltInMax);
1849}
1850
1851void SPIRVProducerPass::GenerateExtInstImport() {
1852 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
1853 uint32_t &ExtInstImportID = getOpExtInstImportID();
1854
1855 //
1856 // Generate OpExtInstImport.
1857 //
1858 // Ops[0] ... Ops[n] = Name (Literal String)
David Neto22f144c2017-06-12 14:26:21 -04001859 ExtInstImportID = nextID;
David Neto87846742018-04-11 17:36:22 -04001860 SPIRVInstList.push_back(new SPIRVInstruction(spv::OpExtInstImport, nextID++,
1861 MkString("GLSL.std.450")));
David Neto22f144c2017-06-12 14:26:21 -04001862}
1863
alan-bakerb6b09dc2018-11-08 16:59:28 -05001864void SPIRVProducerPass::GenerateSPIRVTypes(LLVMContext &Context,
1865 Module &module) {
David Neto22f144c2017-06-12 14:26:21 -04001866 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
1867 ValueMapType &VMap = getValueMap();
1868 ValueMapType &AllocatedVMap = getAllocatedValueMap();
Alan Bakerfcda9482018-10-02 17:09:59 -04001869 const auto &DL = module.getDataLayout();
David Neto22f144c2017-06-12 14:26:21 -04001870
1871 // Map for OpTypeRuntimeArray. If argument has pointer type, 2 spirv type
1872 // instructions are generated. They are OpTypePointer and OpTypeRuntimeArray.
1873 DenseMap<Type *, uint32_t> OpRuntimeTyMap;
1874
1875 for (Type *Ty : getTypeList()) {
1876 // Update TypeMap with nextID for reference later.
1877 TypeMap[Ty] = nextID;
1878
1879 switch (Ty->getTypeID()) {
1880 default: {
1881 Ty->print(errs());
1882 llvm_unreachable("Unsupported type???");
1883 break;
1884 }
1885 case Type::MetadataTyID:
1886 case Type::LabelTyID: {
1887 // Ignore these types.
1888 break;
1889 }
1890 case Type::PointerTyID: {
1891 PointerType *PTy = cast<PointerType>(Ty);
1892 unsigned AddrSpace = PTy->getAddressSpace();
1893
1894 // For the purposes of our Vulkan SPIR-V type system, constant and global
1895 // are conflated.
1896 bool UseExistingOpTypePointer = false;
1897 if (AddressSpace::Constant == AddrSpace) {
Alan Bakerfcda9482018-10-02 17:09:59 -04001898 if (!clspv::Option::ConstantArgsInUniformBuffer()) {
1899 AddrSpace = AddressSpace::Global;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001900 // Check to see if we already created this type (for instance, if we
1901 // had a constant <type>* and a global <type>*, the type would be
1902 // created by one of these types, and shared by both).
Alan Bakerfcda9482018-10-02 17:09:59 -04001903 auto GlobalTy = PTy->getPointerElementType()->getPointerTo(AddrSpace);
1904 if (0 < TypeMap.count(GlobalTy)) {
1905 TypeMap[PTy] = TypeMap[GlobalTy];
1906 UseExistingOpTypePointer = true;
1907 break;
1908 }
David Neto22f144c2017-06-12 14:26:21 -04001909 }
1910 } else if (AddressSpace::Global == AddrSpace) {
Alan Bakerfcda9482018-10-02 17:09:59 -04001911 if (!clspv::Option::ConstantArgsInUniformBuffer()) {
1912 AddrSpace = AddressSpace::Constant;
David Neto22f144c2017-06-12 14:26:21 -04001913
alan-bakerb6b09dc2018-11-08 16:59:28 -05001914 // Check to see if we already created this type (for instance, if we
1915 // had a constant <type>* and a global <type>*, the type would be
1916 // created by one of these types, and shared by both).
1917 auto ConstantTy =
1918 PTy->getPointerElementType()->getPointerTo(AddrSpace);
Alan Bakerfcda9482018-10-02 17:09:59 -04001919 if (0 < TypeMap.count(ConstantTy)) {
1920 TypeMap[PTy] = TypeMap[ConstantTy];
1921 UseExistingOpTypePointer = true;
1922 }
David Neto22f144c2017-06-12 14:26:21 -04001923 }
1924 }
1925
David Neto862b7d82018-06-14 18:48:37 -04001926 const bool HasArgUser = true;
David Neto22f144c2017-06-12 14:26:21 -04001927
David Neto862b7d82018-06-14 18:48:37 -04001928 if (HasArgUser && !UseExistingOpTypePointer) {
David Neto22f144c2017-06-12 14:26:21 -04001929 //
1930 // Generate OpTypePointer.
1931 //
1932
1933 // OpTypePointer
1934 // Ops[0] = Storage Class
1935 // Ops[1] = Element Type ID
1936 SPIRVOperandList Ops;
1937
David Neto257c3892018-04-11 13:19:45 -04001938 Ops << MkNum(GetStorageClass(AddrSpace))
1939 << MkId(lookupType(PTy->getElementType()));
David Neto22f144c2017-06-12 14:26:21 -04001940
David Neto87846742018-04-11 17:36:22 -04001941 auto *Inst = new SPIRVInstruction(spv::OpTypePointer, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04001942 SPIRVInstList.push_back(Inst);
1943 }
David Neto22f144c2017-06-12 14:26:21 -04001944 break;
1945 }
1946 case Type::StructTyID: {
David Neto22f144c2017-06-12 14:26:21 -04001947 StructType *STy = cast<StructType>(Ty);
1948
1949 // Handle sampler type.
1950 if (STy->isOpaque()) {
1951 if (STy->getName().equals("opencl.sampler_t")) {
1952 //
1953 // Generate OpTypeSampler
1954 //
1955 // Empty Ops.
1956 SPIRVOperandList Ops;
1957
David Neto87846742018-04-11 17:36:22 -04001958 auto *Inst = new SPIRVInstruction(spv::OpTypeSampler, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04001959 SPIRVInstList.push_back(Inst);
1960 break;
alan-bakerf906d2b2019-12-10 11:26:23 -05001961 } else if (STy->getName().startswith("opencl.image1d_ro_t") ||
1962 STy->getName().startswith("opencl.image1d_wo_t") ||
1963 STy->getName().startswith("opencl.image2d_ro_t") ||
alan-bakerf67468c2019-11-25 15:51:49 -05001964 STy->getName().startswith("opencl.image2d_wo_t") ||
1965 STy->getName().startswith("opencl.image3d_ro_t") ||
1966 STy->getName().startswith("opencl.image3d_wo_t")) {
David Neto22f144c2017-06-12 14:26:21 -04001967 //
1968 // Generate OpTypeImage
1969 //
1970 // Ops[0] = Sampled Type ID
1971 // Ops[1] = Dim ID
1972 // Ops[2] = Depth (Literal Number)
1973 // Ops[3] = Arrayed (Literal Number)
1974 // Ops[4] = MS (Literal Number)
1975 // Ops[5] = Sampled (Literal Number)
1976 // Ops[6] = Image Format ID
1977 //
1978 SPIRVOperandList Ops;
1979
alan-bakerf67468c2019-11-25 15:51:49 -05001980 uint32_t ImageTyID = nextID++;
1981 uint32_t SampledTyID = 0;
1982 if (STy->getName().contains(".float")) {
1983 SampledTyID = lookupType(Type::getFloatTy(Context));
1984 } else if (STy->getName().contains(".uint")) {
1985 SampledTyID = lookupType(Type::getInt32Ty(Context));
1986 } else if (STy->getName().contains(".int")) {
1987 // Generate a signed 32-bit integer if necessary.
1988 if (int32ID == 0) {
1989 int32ID = nextID++;
1990 SPIRVOperandList intOps;
1991 intOps << MkNum(32);
1992 intOps << MkNum(1);
1993 auto signed_int =
1994 new SPIRVInstruction(spv::OpTypeInt, int32ID, intOps);
1995 SPIRVInstList.push_back(signed_int);
1996 }
1997 SampledTyID = int32ID;
1998
1999 // Generate a vec4 of the signed int if necessary.
2000 if (v4int32ID == 0) {
2001 v4int32ID = nextID++;
2002 SPIRVOperandList vecOps;
2003 vecOps << MkId(int32ID);
2004 vecOps << MkNum(4);
2005 auto int_vec =
2006 new SPIRVInstruction(spv::OpTypeVector, v4int32ID, vecOps);
2007 SPIRVInstList.push_back(int_vec);
2008 }
2009 } else {
2010 // This was likely an UndefValue.
2011 SampledTyID = lookupType(Type::getFloatTy(Context));
2012 }
David Neto257c3892018-04-11 13:19:45 -04002013 Ops << MkId(SampledTyID);
David Neto22f144c2017-06-12 14:26:21 -04002014
2015 spv::Dim DimID = spv::Dim2D;
alan-bakerf906d2b2019-12-10 11:26:23 -05002016 if (STy->getName().startswith("opencl.image1d_ro_t") ||
2017 STy->getName().startswith("opencl.image1d_wo_t")) {
2018 DimID = spv::Dim1D;
2019 } else if (STy->getName().startswith("opencl.image3d_ro_t") ||
2020 STy->getName().startswith("opencl.image3d_wo_t")) {
David Neto22f144c2017-06-12 14:26:21 -04002021 DimID = spv::Dim3D;
2022 }
David Neto257c3892018-04-11 13:19:45 -04002023 Ops << MkNum(DimID);
David Neto22f144c2017-06-12 14:26:21 -04002024
2025 // TODO: Set up Depth.
David Neto257c3892018-04-11 13:19:45 -04002026 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04002027
2028 // TODO: Set up Arrayed.
David Neto257c3892018-04-11 13:19:45 -04002029 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04002030
2031 // TODO: Set up MS.
David Neto257c3892018-04-11 13:19:45 -04002032 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04002033
2034 // TODO: Set up Sampled.
2035 //
2036 // From Spec
2037 //
2038 // 0 indicates this is only known at run time, not at compile time
2039 // 1 indicates will be used with sampler
2040 // 2 indicates will be used without a sampler (a storage image)
2041 uint32_t Sampled = 1;
alan-bakerf67468c2019-11-25 15:51:49 -05002042 if (!STy->getName().contains(".sampled")) {
David Neto22f144c2017-06-12 14:26:21 -04002043 Sampled = 2;
2044 }
David Neto257c3892018-04-11 13:19:45 -04002045 Ops << MkNum(Sampled);
David Neto22f144c2017-06-12 14:26:21 -04002046
2047 // TODO: Set up Image Format.
David Neto257c3892018-04-11 13:19:45 -04002048 Ops << MkNum(spv::ImageFormatUnknown);
David Neto22f144c2017-06-12 14:26:21 -04002049
alan-bakerf67468c2019-11-25 15:51:49 -05002050 auto *Inst = new SPIRVInstruction(spv::OpTypeImage, ImageTyID, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002051 SPIRVInstList.push_back(Inst);
2052 break;
2053 }
2054 }
2055
2056 //
2057 // Generate OpTypeStruct
2058 //
2059 // Ops[0] ... Ops[n] = Member IDs
2060 SPIRVOperandList Ops;
2061
2062 for (auto *EleTy : STy->elements()) {
David Neto862b7d82018-06-14 18:48:37 -04002063 Ops << MkId(lookupType(EleTy));
David Neto22f144c2017-06-12 14:26:21 -04002064 }
2065
David Neto22f144c2017-06-12 14:26:21 -04002066 uint32_t STyID = nextID;
2067
alan-bakerb6b09dc2018-11-08 16:59:28 -05002068 auto *Inst = new SPIRVInstruction(spv::OpTypeStruct, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002069 SPIRVInstList.push_back(Inst);
2070
2071 // Generate OpMemberDecorate.
2072 auto DecoInsertPoint =
2073 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
2074 [](SPIRVInstruction *Inst) -> bool {
2075 return Inst->getOpcode() != spv::OpDecorate &&
2076 Inst->getOpcode() != spv::OpMemberDecorate &&
2077 Inst->getOpcode() != spv::OpExtInstImport;
2078 });
2079
David Netoc463b372017-08-10 15:32:21 -04002080 const auto StructLayout = DL.getStructLayout(STy);
Alan Bakerfcda9482018-10-02 17:09:59 -04002081 // Search for the correct offsets if this type was remapped.
2082 std::vector<uint32_t> *offsets = nullptr;
2083 auto iter = RemappedUBOTypeOffsets.find(STy);
2084 if (iter != RemappedUBOTypeOffsets.end()) {
2085 offsets = &iter->second;
2086 }
David Netoc463b372017-08-10 15:32:21 -04002087
David Neto862b7d82018-06-14 18:48:37 -04002088 // #error TODO(dneto): Only do this if in TypesNeedingLayout.
David Neto22f144c2017-06-12 14:26:21 -04002089 for (unsigned MemberIdx = 0; MemberIdx < STy->getNumElements();
2090 MemberIdx++) {
2091 // Ops[0] = Structure Type ID
2092 // Ops[1] = Member Index(Literal Number)
2093 // Ops[2] = Decoration (Offset)
2094 // Ops[3] = Byte Offset (Literal Number)
2095 Ops.clear();
2096
David Neto257c3892018-04-11 13:19:45 -04002097 Ops << MkId(STyID) << MkNum(MemberIdx) << MkNum(spv::DecorationOffset);
David Neto22f144c2017-06-12 14:26:21 -04002098
alan-bakerb6b09dc2018-11-08 16:59:28 -05002099 auto ByteOffset =
2100 static_cast<uint32_t>(StructLayout->getElementOffset(MemberIdx));
Alan Bakerfcda9482018-10-02 17:09:59 -04002101 if (offsets) {
2102 ByteOffset = (*offsets)[MemberIdx];
2103 }
alan-bakerb6b09dc2018-11-08 16:59:28 -05002104 // const auto ByteOffset =
Alan Bakerfcda9482018-10-02 17:09:59 -04002105 // uint32_t(StructLayout->getElementOffset(MemberIdx));
David Neto257c3892018-04-11 13:19:45 -04002106 Ops << MkNum(ByteOffset);
David Neto22f144c2017-06-12 14:26:21 -04002107
David Neto87846742018-04-11 17:36:22 -04002108 auto *DecoInst = new SPIRVInstruction(spv::OpMemberDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002109 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
David Neto22f144c2017-06-12 14:26:21 -04002110 }
2111
2112 // Generate OpDecorate.
David Neto862b7d82018-06-14 18:48:37 -04002113 if (StructTypesNeedingBlock.idFor(STy)) {
2114 Ops.clear();
2115 // Use Block decorations with StorageBuffer storage class.
2116 Ops << MkId(STyID) << MkNum(spv::DecorationBlock);
David Neto22f144c2017-06-12 14:26:21 -04002117
David Neto862b7d82018-06-14 18:48:37 -04002118 auto *DecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
2119 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
David Neto22f144c2017-06-12 14:26:21 -04002120 }
2121 break;
2122 }
2123 case Type::IntegerTyID: {
alan-baker0e64a592019-11-18 13:36:25 -05002124 uint32_t BitWidth = static_cast<uint32_t>(Ty->getPrimitiveSizeInBits());
David Neto22f144c2017-06-12 14:26:21 -04002125
2126 if (BitWidth == 1) {
David Netoef5ba2b2019-12-20 08:35:54 -05002127 auto *Inst = new SPIRVInstruction(spv::OpTypeBool, nextID++);
David Neto22f144c2017-06-12 14:26:21 -04002128 SPIRVInstList.push_back(Inst);
2129 } else {
alan-bakerb39c8262019-03-08 14:03:37 -05002130 if (!clspv::Option::Int8Support()) {
2131 // i8 is added to TypeMap as i32.
2132 // No matter what LLVM type is requested first, always alias the
2133 // second one's SPIR-V type to be the same as the one we generated
2134 // first.
2135 unsigned aliasToWidth = 0;
2136 if (BitWidth == 8) {
2137 aliasToWidth = 32;
2138 BitWidth = 32;
2139 } else if (BitWidth == 32) {
2140 aliasToWidth = 8;
2141 }
2142 if (aliasToWidth) {
2143 Type *otherType = Type::getIntNTy(Ty->getContext(), aliasToWidth);
2144 auto where = TypeMap.find(otherType);
2145 if (where == TypeMap.end()) {
2146 // Go ahead and make it, but also map the other type to it.
2147 TypeMap[otherType] = nextID;
2148 } else {
2149 // Alias this SPIR-V type the existing type.
2150 TypeMap[Ty] = where->second;
2151 break;
2152 }
David Neto391aeb12017-08-26 15:51:58 -04002153 }
David Neto22f144c2017-06-12 14:26:21 -04002154 }
2155
David Neto257c3892018-04-11 13:19:45 -04002156 SPIRVOperandList Ops;
2157 Ops << MkNum(BitWidth) << MkNum(0 /* not signed */);
David Neto22f144c2017-06-12 14:26:21 -04002158
2159 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002160 new SPIRVInstruction(spv::OpTypeInt, nextID++, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002161 }
2162 break;
2163 }
2164 case Type::HalfTyID:
2165 case Type::FloatTyID:
2166 case Type::DoubleTyID: {
alan-baker0e64a592019-11-18 13:36:25 -05002167 uint32_t BitWidth = static_cast<uint32_t>(Ty->getPrimitiveSizeInBits());
James Price11010dc2019-12-19 13:53:09 -05002168 auto WidthOp = MkNum(BitWidth);
David Neto22f144c2017-06-12 14:26:21 -04002169
2170 SPIRVInstList.push_back(
David Netoef5ba2b2019-12-20 08:35:54 -05002171 new SPIRVInstruction(spv::OpTypeFloat, nextID++, std::move(WidthOp)));
David Neto22f144c2017-06-12 14:26:21 -04002172 break;
2173 }
2174 case Type::ArrayTyID: {
David Neto22f144c2017-06-12 14:26:21 -04002175 ArrayType *ArrTy = cast<ArrayType>(Ty);
David Neto862b7d82018-06-14 18:48:37 -04002176 const uint64_t Length = ArrTy->getArrayNumElements();
2177 if (Length == 0) {
2178 // By convention, map it to a RuntimeArray.
David Neto22f144c2017-06-12 14:26:21 -04002179
David Neto862b7d82018-06-14 18:48:37 -04002180 // Only generate the type once.
2181 // TODO(dneto): Can it ever be generated more than once?
2182 // Doesn't LLVM type uniqueness guarantee we'll only see this
2183 // once?
2184 Type *EleTy = ArrTy->getArrayElementType();
2185 if (OpRuntimeTyMap.count(EleTy) == 0) {
2186 uint32_t OpTypeRuntimeArrayID = nextID;
2187 OpRuntimeTyMap[Ty] = nextID;
David Neto22f144c2017-06-12 14:26:21 -04002188
David Neto862b7d82018-06-14 18:48:37 -04002189 //
2190 // Generate OpTypeRuntimeArray.
2191 //
David Neto22f144c2017-06-12 14:26:21 -04002192
David Neto862b7d82018-06-14 18:48:37 -04002193 // OpTypeRuntimeArray
2194 // Ops[0] = Element Type ID
2195 SPIRVOperandList Ops;
2196 Ops << MkId(lookupType(EleTy));
David Neto22f144c2017-06-12 14:26:21 -04002197
David Neto862b7d82018-06-14 18:48:37 -04002198 SPIRVInstList.push_back(
2199 new SPIRVInstruction(spv::OpTypeRuntimeArray, nextID++, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002200
David Neto862b7d82018-06-14 18:48:37 -04002201 if (Hack_generate_runtime_array_stride_early) {
2202 // Generate OpDecorate.
2203 auto DecoInsertPoint = std::find_if(
2204 SPIRVInstList.begin(), SPIRVInstList.end(),
2205 [](SPIRVInstruction *Inst) -> bool {
2206 return Inst->getOpcode() != spv::OpDecorate &&
2207 Inst->getOpcode() != spv::OpMemberDecorate &&
2208 Inst->getOpcode() != spv::OpExtInstImport;
2209 });
David Neto22f144c2017-06-12 14:26:21 -04002210
David Neto862b7d82018-06-14 18:48:37 -04002211 // Ops[0] = Target ID
2212 // Ops[1] = Decoration (ArrayStride)
2213 // Ops[2] = Stride Number(Literal Number)
2214 Ops.clear();
David Neto85082642018-03-24 06:55:20 -07002215
David Neto862b7d82018-06-14 18:48:37 -04002216 Ops << MkId(OpTypeRuntimeArrayID)
2217 << MkNum(spv::DecorationArrayStride)
Alan Bakerfcda9482018-10-02 17:09:59 -04002218 << MkNum(static_cast<uint32_t>(GetTypeAllocSize(EleTy, DL)));
David Neto22f144c2017-06-12 14:26:21 -04002219
David Neto862b7d82018-06-14 18:48:37 -04002220 auto *DecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
2221 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
2222 }
2223 }
David Neto22f144c2017-06-12 14:26:21 -04002224
David Neto862b7d82018-06-14 18:48:37 -04002225 } else {
David Neto22f144c2017-06-12 14:26:21 -04002226
David Neto862b7d82018-06-14 18:48:37 -04002227 //
2228 // Generate OpConstant and OpTypeArray.
2229 //
2230
2231 //
2232 // Generate OpConstant for array length.
2233 //
2234 // Ops[0] = Result Type ID
2235 // Ops[1] .. Ops[n] = Values LiteralNumber
2236 SPIRVOperandList Ops;
2237
2238 Type *LengthTy = Type::getInt32Ty(Context);
2239 uint32_t ResTyID = lookupType(LengthTy);
2240 Ops << MkId(ResTyID);
2241
2242 assert(Length < UINT32_MAX);
2243 Ops << MkNum(static_cast<uint32_t>(Length));
2244
2245 // Add constant for length to constant list.
2246 Constant *CstLength = ConstantInt::get(LengthTy, Length);
2247 AllocatedVMap[CstLength] = nextID;
2248 VMap[CstLength] = nextID;
2249 uint32_t LengthID = nextID;
2250
2251 auto *CstInst = new SPIRVInstruction(spv::OpConstant, nextID++, Ops);
2252 SPIRVInstList.push_back(CstInst);
2253
2254 // Remember to generate ArrayStride later
2255 getTypesNeedingArrayStride().insert(Ty);
2256
2257 //
2258 // Generate OpTypeArray.
2259 //
2260 // Ops[0] = Element Type ID
2261 // Ops[1] = Array Length Constant ID
2262 Ops.clear();
2263
2264 uint32_t EleTyID = lookupType(ArrTy->getElementType());
2265 Ops << MkId(EleTyID) << MkId(LengthID);
2266
2267 // Update TypeMap with nextID.
2268 TypeMap[Ty] = nextID;
2269
2270 auto *ArrayInst = new SPIRVInstruction(spv::OpTypeArray, nextID++, Ops);
2271 SPIRVInstList.push_back(ArrayInst);
2272 }
David Neto22f144c2017-06-12 14:26:21 -04002273 break;
2274 }
2275 case Type::VectorTyID: {
alan-bakerb39c8262019-03-08 14:03:37 -05002276 // <4 x i8> is changed to i32 if i8 is not generally supported.
2277 if (!clspv::Option::Int8Support() &&
2278 Ty->getVectorElementType() == Type::getInt8Ty(Context)) {
David Neto22f144c2017-06-12 14:26:21 -04002279 if (Ty->getVectorNumElements() == 4) {
2280 TypeMap[Ty] = lookupType(Ty->getVectorElementType());
2281 break;
2282 } else {
2283 Ty->print(errs());
2284 llvm_unreachable("Support above i8 vector type");
2285 }
2286 }
2287
2288 // Ops[0] = Component Type ID
2289 // Ops[1] = Component Count (Literal Number)
David Neto257c3892018-04-11 13:19:45 -04002290 SPIRVOperandList Ops;
2291 Ops << MkId(lookupType(Ty->getVectorElementType()))
2292 << MkNum(Ty->getVectorNumElements());
David Neto22f144c2017-06-12 14:26:21 -04002293
alan-bakerb6b09dc2018-11-08 16:59:28 -05002294 SPIRVInstruction *inst =
2295 new SPIRVInstruction(spv::OpTypeVector, nextID++, Ops);
David Netoc6f3ab22018-04-06 18:02:31 -04002296 SPIRVInstList.push_back(inst);
David Neto22f144c2017-06-12 14:26:21 -04002297 break;
2298 }
2299 case Type::VoidTyID: {
David Netoef5ba2b2019-12-20 08:35:54 -05002300 auto *Inst = new SPIRVInstruction(spv::OpTypeVoid, nextID++);
David Neto22f144c2017-06-12 14:26:21 -04002301 SPIRVInstList.push_back(Inst);
2302 break;
2303 }
2304 case Type::FunctionTyID: {
2305 // Generate SPIRV instruction for function type.
2306 FunctionType *FTy = cast<FunctionType>(Ty);
2307
2308 // Ops[0] = Return Type ID
2309 // Ops[1] ... Ops[n] = Parameter Type IDs
2310 SPIRVOperandList Ops;
2311
2312 // Find SPIRV instruction for return type
David Netoc6f3ab22018-04-06 18:02:31 -04002313 Ops << MkId(lookupType(FTy->getReturnType()));
David Neto22f144c2017-06-12 14:26:21 -04002314
2315 // Find SPIRV instructions for parameter types
2316 for (unsigned k = 0; k < FTy->getNumParams(); k++) {
2317 // Find SPIRV instruction for parameter type.
2318 auto ParamTy = FTy->getParamType(k);
2319 if (ParamTy->isPointerTy()) {
2320 auto PointeeTy = ParamTy->getPointerElementType();
2321 if (PointeeTy->isStructTy() &&
2322 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
2323 ParamTy = PointeeTy;
2324 }
2325 }
2326
David Netoc6f3ab22018-04-06 18:02:31 -04002327 Ops << MkId(lookupType(ParamTy));
David Neto22f144c2017-06-12 14:26:21 -04002328 }
2329
David Neto87846742018-04-11 17:36:22 -04002330 auto *Inst = new SPIRVInstruction(spv::OpTypeFunction, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002331 SPIRVInstList.push_back(Inst);
2332 break;
2333 }
2334 }
2335 }
2336
2337 // Generate OpTypeSampledImage.
alan-bakerabd82722019-12-03 17:14:51 -05002338 for (auto &ImgTy : getImageTypeList()) {
David Neto22f144c2017-06-12 14:26:21 -04002339 //
2340 // Generate OpTypeSampledImage.
2341 //
2342 // Ops[0] = Image Type ID
2343 //
2344 SPIRVOperandList Ops;
2345
David Netoc6f3ab22018-04-06 18:02:31 -04002346 Ops << MkId(TypeMap[ImgTy]);
David Neto22f144c2017-06-12 14:26:21 -04002347
alan-bakerabd82722019-12-03 17:14:51 -05002348 // Update the image type map.
2349 getImageTypeMap()[ImgTy] = nextID;
David Neto22f144c2017-06-12 14:26:21 -04002350
David Neto87846742018-04-11 17:36:22 -04002351 auto *Inst = new SPIRVInstruction(spv::OpTypeSampledImage, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002352 SPIRVInstList.push_back(Inst);
2353 }
David Netoc6f3ab22018-04-06 18:02:31 -04002354
2355 // Generate types for pointer-to-local arguments.
Alan Baker202c8c72018-08-13 13:47:44 -04002356 for (auto spec_id = clspv::FirstLocalSpecId(); spec_id < max_local_spec_id_;
2357 ++spec_id) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05002358 LocalArgInfo &arg_info = LocalSpecIdInfoMap[spec_id];
David Netoc6f3ab22018-04-06 18:02:31 -04002359
2360 // Generate the spec constant.
2361 SPIRVOperandList Ops;
2362 Ops << MkId(lookupType(Type::getInt32Ty(Context))) << MkNum(1);
David Neto87846742018-04-11 17:36:22 -04002363 SPIRVInstList.push_back(
2364 new SPIRVInstruction(spv::OpSpecConstant, arg_info.array_size_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002365
2366 // Generate the array type.
2367 Ops.clear();
2368 // The element type must have been created.
2369 uint32_t elem_ty_id = lookupType(arg_info.elem_type);
2370 assert(elem_ty_id);
2371 Ops << MkId(elem_ty_id) << MkId(arg_info.array_size_id);
2372
2373 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002374 new SPIRVInstruction(spv::OpTypeArray, arg_info.array_type_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002375
2376 Ops.clear();
2377 Ops << MkNum(spv::StorageClassWorkgroup) << MkId(arg_info.array_type_id);
David Neto87846742018-04-11 17:36:22 -04002378 SPIRVInstList.push_back(new SPIRVInstruction(
2379 spv::OpTypePointer, arg_info.ptr_array_type_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002380 }
David Neto22f144c2017-06-12 14:26:21 -04002381}
2382
2383void SPIRVProducerPass::GenerateSPIRVConstants() {
2384 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
2385 ValueMapType &VMap = getValueMap();
2386 ValueMapType &AllocatedVMap = getAllocatedValueMap();
2387 ValueList &CstList = getConstantList();
David Neto482550a2018-03-24 05:21:07 -07002388 const bool hack_undef = clspv::Option::HackUndef();
David Neto22f144c2017-06-12 14:26:21 -04002389
2390 for (uint32_t i = 0; i < CstList.size(); i++) {
David Netofb9a7972017-08-25 17:08:24 -04002391 // UniqueVector ids are 1-based.
alan-bakerb6b09dc2018-11-08 16:59:28 -05002392 Constant *Cst = cast<Constant>(CstList[i + 1]);
David Neto22f144c2017-06-12 14:26:21 -04002393
2394 // OpTypeArray's constant was already generated.
David Netofb9a7972017-08-25 17:08:24 -04002395 if (AllocatedVMap.find_as(Cst) != AllocatedVMap.end()) {
David Neto22f144c2017-06-12 14:26:21 -04002396 continue;
2397 }
2398
David Netofb9a7972017-08-25 17:08:24 -04002399 // Set ValueMap with nextID for reference later.
David Neto22f144c2017-06-12 14:26:21 -04002400 VMap[Cst] = nextID;
2401
2402 //
2403 // Generate OpConstant.
2404 //
2405
2406 // Ops[0] = Result Type ID
2407 // Ops[1] .. Ops[n] = Values LiteralNumber
2408 SPIRVOperandList Ops;
2409
David Neto257c3892018-04-11 13:19:45 -04002410 Ops << MkId(lookupType(Cst->getType()));
David Neto22f144c2017-06-12 14:26:21 -04002411
2412 std::vector<uint32_t> LiteralNum;
David Neto22f144c2017-06-12 14:26:21 -04002413 spv::Op Opcode = spv::OpNop;
2414
2415 if (isa<UndefValue>(Cst)) {
2416 // Ops[0] = Result Type ID
David Netoc66b3352017-10-20 14:28:46 -04002417 Opcode = spv::OpUndef;
Alan Baker9bf93fb2018-08-28 16:59:26 -04002418 if (hack_undef && IsTypeNullable(Cst->getType())) {
2419 Opcode = spv::OpConstantNull;
David Netoc66b3352017-10-20 14:28:46 -04002420 }
David Neto22f144c2017-06-12 14:26:21 -04002421 } else if (const ConstantInt *CI = dyn_cast<ConstantInt>(Cst)) {
2422 unsigned BitWidth = CI->getBitWidth();
2423 if (BitWidth == 1) {
2424 // If the bitwidth of constant is 1, generate OpConstantTrue or
2425 // OpConstantFalse.
2426 if (CI->getZExtValue()) {
2427 // Ops[0] = Result Type ID
2428 Opcode = spv::OpConstantTrue;
2429 } else {
2430 // Ops[0] = Result Type ID
2431 Opcode = spv::OpConstantFalse;
2432 }
David Neto22f144c2017-06-12 14:26:21 -04002433 } else {
2434 auto V = CI->getZExtValue();
2435 LiteralNum.push_back(V & 0xFFFFFFFF);
2436
2437 if (BitWidth > 32) {
2438 LiteralNum.push_back(V >> 32);
2439 }
2440
2441 Opcode = spv::OpConstant;
David Neto22f144c2017-06-12 14:26:21 -04002442
David Neto257c3892018-04-11 13:19:45 -04002443 Ops << MkInteger(LiteralNum);
David Neto22f144c2017-06-12 14:26:21 -04002444 }
2445 } else if (const ConstantFP *CFP = dyn_cast<ConstantFP>(Cst)) {
2446 uint64_t FPVal = CFP->getValueAPF().bitcastToAPInt().getZExtValue();
2447 Type *CFPTy = CFP->getType();
2448 if (CFPTy->isFloatTy()) {
2449 LiteralNum.push_back(FPVal & 0xFFFFFFFF);
Kévin Petit02ee34e2019-04-04 19:03:22 +01002450 } else if (CFPTy->isDoubleTy()) {
2451 LiteralNum.push_back(FPVal & 0xFFFFFFFF);
2452 LiteralNum.push_back(FPVal >> 32);
alan-baker089bf932020-01-07 16:35:45 -05002453 } else if (CFPTy->isHalfTy()) {
2454 LiteralNum.push_back(FPVal & 0xFFFF);
David Neto22f144c2017-06-12 14:26:21 -04002455 } else {
2456 CFPTy->print(errs());
2457 llvm_unreachable("Implement this ConstantFP Type");
2458 }
2459
2460 Opcode = spv::OpConstant;
David Neto22f144c2017-06-12 14:26:21 -04002461
David Neto257c3892018-04-11 13:19:45 -04002462 Ops << MkFloat(LiteralNum);
David Neto22f144c2017-06-12 14:26:21 -04002463 } else if (isa<ConstantDataSequential>(Cst) &&
2464 cast<ConstantDataSequential>(Cst)->isString()) {
2465 Cst->print(errs());
2466 llvm_unreachable("Implement this Constant");
2467
2468 } else if (const ConstantDataSequential *CDS =
2469 dyn_cast<ConstantDataSequential>(Cst)) {
David Neto49351ac2017-08-26 17:32:20 -04002470 // Let's convert <4 x i8> constant to int constant specially.
2471 // This case occurs when all the values are specified as constant
2472 // ints.
2473 Type *CstTy = Cst->getType();
2474 if (is4xi8vec(CstTy)) {
2475 LLVMContext &Context = CstTy->getContext();
2476
2477 //
2478 // Generate OpConstant with OpTypeInt 32 0.
2479 //
Neil Henning39672102017-09-29 14:33:13 +01002480 uint32_t IntValue = 0;
2481 for (unsigned k = 0; k < 4; k++) {
2482 const uint64_t Val = CDS->getElementAsInteger(k);
David Neto49351ac2017-08-26 17:32:20 -04002483 IntValue = (IntValue << 8) | (Val & 0xffu);
2484 }
2485
2486 Type *i32 = Type::getInt32Ty(Context);
2487 Constant *CstInt = ConstantInt::get(i32, IntValue);
2488 // If this constant is already registered on VMap, use it.
2489 if (VMap.count(CstInt)) {
2490 uint32_t CstID = VMap[CstInt];
2491 VMap[Cst] = CstID;
2492 continue;
2493 }
2494
David Neto257c3892018-04-11 13:19:45 -04002495 Ops << MkNum(IntValue);
David Neto49351ac2017-08-26 17:32:20 -04002496
David Neto87846742018-04-11 17:36:22 -04002497 auto *CstInst = new SPIRVInstruction(spv::OpConstant, nextID++, Ops);
David Neto49351ac2017-08-26 17:32:20 -04002498 SPIRVInstList.push_back(CstInst);
2499
2500 continue;
2501 }
2502
2503 // A normal constant-data-sequential case.
David Neto22f144c2017-06-12 14:26:21 -04002504 for (unsigned k = 0; k < CDS->getNumElements(); k++) {
2505 Constant *EleCst = CDS->getElementAsConstant(k);
2506 uint32_t EleCstID = VMap[EleCst];
David Neto257c3892018-04-11 13:19:45 -04002507 Ops << MkId(EleCstID);
David Neto22f144c2017-06-12 14:26:21 -04002508 }
2509
2510 Opcode = spv::OpConstantComposite;
David Neto22f144c2017-06-12 14:26:21 -04002511 } else if (const ConstantAggregate *CA = dyn_cast<ConstantAggregate>(Cst)) {
2512 // Let's convert <4 x i8> constant to int constant specially.
David Neto49351ac2017-08-26 17:32:20 -04002513 // This case occurs when at least one of the values is an undef.
David Neto22f144c2017-06-12 14:26:21 -04002514 Type *CstTy = Cst->getType();
2515 if (is4xi8vec(CstTy)) {
2516 LLVMContext &Context = CstTy->getContext();
2517
2518 //
2519 // Generate OpConstant with OpTypeInt 32 0.
2520 //
Neil Henning39672102017-09-29 14:33:13 +01002521 uint32_t IntValue = 0;
David Neto22f144c2017-06-12 14:26:21 -04002522 for (User::const_op_iterator I = Cst->op_begin(), E = Cst->op_end();
2523 I != E; ++I) {
2524 uint64_t Val = 0;
alan-bakerb6b09dc2018-11-08 16:59:28 -05002525 const Value *CV = *I;
Neil Henning39672102017-09-29 14:33:13 +01002526 if (auto *CI2 = dyn_cast<ConstantInt>(CV)) {
2527 Val = CI2->getZExtValue();
David Neto22f144c2017-06-12 14:26:21 -04002528 }
David Neto49351ac2017-08-26 17:32:20 -04002529 IntValue = (IntValue << 8) | (Val & 0xffu);
David Neto22f144c2017-06-12 14:26:21 -04002530 }
2531
David Neto49351ac2017-08-26 17:32:20 -04002532 Type *i32 = Type::getInt32Ty(Context);
2533 Constant *CstInt = ConstantInt::get(i32, IntValue);
David Neto22f144c2017-06-12 14:26:21 -04002534 // If this constant is already registered on VMap, use it.
2535 if (VMap.count(CstInt)) {
2536 uint32_t CstID = VMap[CstInt];
2537 VMap[Cst] = CstID;
David Neto19a1bad2017-08-25 15:01:41 -04002538 continue;
David Neto22f144c2017-06-12 14:26:21 -04002539 }
2540
David Neto257c3892018-04-11 13:19:45 -04002541 Ops << MkNum(IntValue);
David Neto22f144c2017-06-12 14:26:21 -04002542
David Neto87846742018-04-11 17:36:22 -04002543 auto *CstInst = new SPIRVInstruction(spv::OpConstant, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002544 SPIRVInstList.push_back(CstInst);
2545
David Neto19a1bad2017-08-25 15:01:41 -04002546 continue;
David Neto22f144c2017-06-12 14:26:21 -04002547 }
2548
2549 // We use a constant composite in SPIR-V for our constant aggregate in
2550 // LLVM.
2551 Opcode = spv::OpConstantComposite;
David Neto22f144c2017-06-12 14:26:21 -04002552
2553 for (unsigned k = 0; k < CA->getNumOperands(); k++) {
2554 // Look up the ID of the element of this aggregate (which we will
2555 // previously have created a constant for).
2556 uint32_t ElementConstantID = VMap[CA->getAggregateElement(k)];
2557
2558 // And add an operand to the composite we are constructing
David Neto257c3892018-04-11 13:19:45 -04002559 Ops << MkId(ElementConstantID);
David Neto22f144c2017-06-12 14:26:21 -04002560 }
2561 } else if (Cst->isNullValue()) {
2562 Opcode = spv::OpConstantNull;
David Neto22f144c2017-06-12 14:26:21 -04002563 } else {
2564 Cst->print(errs());
2565 llvm_unreachable("Unsupported Constant???");
2566 }
2567
alan-baker5b86ed72019-02-15 08:26:50 -05002568 if (Opcode == spv::OpConstantNull && Cst->getType()->isPointerTy()) {
2569 // Null pointer requires variable pointers.
2570 setVariablePointersCapabilities(Cst->getType()->getPointerAddressSpace());
2571 }
2572
David Neto87846742018-04-11 17:36:22 -04002573 auto *CstInst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002574 SPIRVInstList.push_back(CstInst);
2575 }
2576}
2577
2578void SPIRVProducerPass::GenerateSamplers(Module &M) {
2579 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
David Neto22f144c2017-06-12 14:26:21 -04002580
alan-bakerb6b09dc2018-11-08 16:59:28 -05002581 auto &sampler_map = getSamplerMap();
alan-baker09cb9802019-12-10 13:16:27 -05002582 SamplerLiteralToIDMap.clear();
David Neto862b7d82018-06-14 18:48:37 -04002583 DenseMap<unsigned, unsigned> SamplerLiteralToDescriptorSetMap;
2584 DenseMap<unsigned, unsigned> SamplerLiteralToBindingMap;
David Neto22f144c2017-06-12 14:26:21 -04002585
David Neto862b7d82018-06-14 18:48:37 -04002586 // We might have samplers in the sampler map that are not used
2587 // in the translation unit. We need to allocate variables
2588 // for them and bindings too.
2589 DenseSet<unsigned> used_bindings;
David Neto22f144c2017-06-12 14:26:21 -04002590
Kévin Petitdf71de32019-04-09 14:09:50 +01002591 auto *var_fn = M.getFunction(clspv::LiteralSamplerFunction());
alan-baker09cb9802019-12-10 13:16:27 -05002592 // Return if there are no literal samplers.
alan-bakerb6b09dc2018-11-08 16:59:28 -05002593 if (!var_fn)
2594 return;
alan-baker09cb9802019-12-10 13:16:27 -05002595
David Neto862b7d82018-06-14 18:48:37 -04002596 for (auto user : var_fn->users()) {
2597 // Populate SamplerLiteralToDescriptorSetMap and
2598 // SamplerLiteralToBindingMap.
2599 //
2600 // Look for calls like
2601 // call %opencl.sampler_t addrspace(2)*
2602 // @clspv.sampler.var.literal(
2603 // i32 descriptor,
2604 // i32 binding,
alan-baker09cb9802019-12-10 13:16:27 -05002605 // i32 (index-into-sampler-map|sampler_mask))
alan-bakerb6b09dc2018-11-08 16:59:28 -05002606 if (auto *call = dyn_cast<CallInst>(user)) {
alan-baker09cb9802019-12-10 13:16:27 -05002607 const auto third_param = static_cast<unsigned>(
alan-bakerb6b09dc2018-11-08 16:59:28 -05002608 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
alan-baker09cb9802019-12-10 13:16:27 -05002609 auto sampler_value = third_param;
2610 if (clspv::Option::UseSamplerMap()) {
2611 if (third_param >= sampler_map.size()) {
2612 errs() << "Out of bounds index to sampler map: " << third_param;
2613 llvm_unreachable("bad sampler init: out of bounds");
2614 }
2615 sampler_value = sampler_map[third_param].first;
David Neto862b7d82018-06-14 18:48:37 -04002616 }
2617
David Neto862b7d82018-06-14 18:48:37 -04002618 const auto descriptor_set = static_cast<unsigned>(
2619 dyn_cast<ConstantInt>(call->getArgOperand(0))->getZExtValue());
2620 const auto binding = static_cast<unsigned>(
2621 dyn_cast<ConstantInt>(call->getArgOperand(1))->getZExtValue());
2622
2623 SamplerLiteralToDescriptorSetMap[sampler_value] = descriptor_set;
2624 SamplerLiteralToBindingMap[sampler_value] = binding;
2625 used_bindings.insert(binding);
2626 }
2627 }
2628
alan-baker09cb9802019-12-10 13:16:27 -05002629 DenseSet<size_t> seen;
2630 for (auto user : var_fn->users()) {
2631 if (!isa<CallInst>(user))
2632 continue;
2633
2634 auto call = cast<CallInst>(user);
2635 const unsigned third_param = static_cast<unsigned>(
2636 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
2637
2638 // Already allocated a variable for this value.
2639 if (!seen.insert(third_param).second)
2640 continue;
2641
2642 auto sampler_value = third_param;
2643 if (clspv::Option::UseSamplerMap()) {
2644 sampler_value = sampler_map[third_param].first;
2645 }
2646
David Neto22f144c2017-06-12 14:26:21 -04002647 // Generate OpVariable.
2648 //
2649 // GIDOps[0] : Result Type ID
2650 // GIDOps[1] : Storage Class
2651 SPIRVOperandList Ops;
2652
David Neto257c3892018-04-11 13:19:45 -04002653 Ops << MkId(lookupType(SamplerTy))
2654 << MkNum(spv::StorageClassUniformConstant);
David Neto22f144c2017-06-12 14:26:21 -04002655
David Neto862b7d82018-06-14 18:48:37 -04002656 auto sampler_var_id = nextID++;
2657 auto *Inst = new SPIRVInstruction(spv::OpVariable, sampler_var_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002658 SPIRVInstList.push_back(Inst);
2659
alan-baker09cb9802019-12-10 13:16:27 -05002660 SamplerLiteralToIDMap[sampler_value] = sampler_var_id;
David Neto22f144c2017-06-12 14:26:21 -04002661
2662 // Find Insert Point for OpDecorate.
2663 auto DecoInsertPoint =
2664 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
2665 [](SPIRVInstruction *Inst) -> bool {
2666 return Inst->getOpcode() != spv::OpDecorate &&
2667 Inst->getOpcode() != spv::OpMemberDecorate &&
2668 Inst->getOpcode() != spv::OpExtInstImport;
2669 });
2670
2671 // Ops[0] = Target ID
2672 // Ops[1] = Decoration (DescriptorSet)
2673 // Ops[2] = LiteralNumber according to Decoration
2674 Ops.clear();
2675
David Neto862b7d82018-06-14 18:48:37 -04002676 unsigned descriptor_set;
2677 unsigned binding;
alan-baker09cb9802019-12-10 13:16:27 -05002678 if (SamplerLiteralToBindingMap.find(sampler_value) ==
alan-bakerb6b09dc2018-11-08 16:59:28 -05002679 SamplerLiteralToBindingMap.end()) {
David Neto862b7d82018-06-14 18:48:37 -04002680 // This sampler is not actually used. Find the next one.
2681 for (binding = 0; used_bindings.count(binding); binding++)
2682 ;
2683 descriptor_set = 0; // Literal samplers always use descriptor set 0.
2684 used_bindings.insert(binding);
2685 } else {
alan-baker09cb9802019-12-10 13:16:27 -05002686 descriptor_set = SamplerLiteralToDescriptorSetMap[sampler_value];
2687 binding = SamplerLiteralToBindingMap[sampler_value];
alan-bakercff80152019-06-15 00:38:00 -04002688
alan-baker09cb9802019-12-10 13:16:27 -05002689 version0::DescriptorMapEntry::SamplerData sampler_data = {sampler_value};
alan-bakercff80152019-06-15 00:38:00 -04002690 descriptorMapEntries->emplace_back(std::move(sampler_data),
2691 descriptor_set, binding);
David Neto862b7d82018-06-14 18:48:37 -04002692 }
2693
2694 Ops << MkId(sampler_var_id) << MkNum(spv::DecorationDescriptorSet)
2695 << MkNum(descriptor_set);
David Neto22f144c2017-06-12 14:26:21 -04002696
David Neto87846742018-04-11 17:36:22 -04002697 auto *DescDecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002698 SPIRVInstList.insert(DecoInsertPoint, DescDecoInst);
2699
2700 // Ops[0] = Target ID
2701 // Ops[1] = Decoration (Binding)
2702 // Ops[2] = LiteralNumber according to Decoration
2703 Ops.clear();
David Neto862b7d82018-06-14 18:48:37 -04002704 Ops << MkId(sampler_var_id) << MkNum(spv::DecorationBinding)
2705 << MkNum(binding);
David Neto22f144c2017-06-12 14:26:21 -04002706
David Neto87846742018-04-11 17:36:22 -04002707 auto *BindDecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002708 SPIRVInstList.insert(DecoInsertPoint, BindDecoInst);
2709 }
David Neto862b7d82018-06-14 18:48:37 -04002710}
David Neto22f144c2017-06-12 14:26:21 -04002711
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01002712void SPIRVProducerPass::GenerateResourceVars(Module &) {
David Neto862b7d82018-06-14 18:48:37 -04002713 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
2714 ValueMapType &VMap = getValueMap();
David Neto22f144c2017-06-12 14:26:21 -04002715
David Neto862b7d82018-06-14 18:48:37 -04002716 // Generate variables. Make one for each of resource var info object.
2717 for (auto *info : ModuleOrderedResourceVars) {
2718 Type *type = info->var_fn->getReturnType();
2719 // Remap the address space for opaque types.
2720 switch (info->arg_kind) {
2721 case clspv::ArgKind::Sampler:
2722 case clspv::ArgKind::ReadOnlyImage:
2723 case clspv::ArgKind::WriteOnlyImage:
2724 type = PointerType::get(type->getPointerElementType(),
2725 clspv::AddressSpace::UniformConstant);
2726 break;
2727 default:
2728 break;
2729 }
David Neto22f144c2017-06-12 14:26:21 -04002730
David Neto862b7d82018-06-14 18:48:37 -04002731 info->var_id = nextID++;
David Neto22f144c2017-06-12 14:26:21 -04002732
David Neto862b7d82018-06-14 18:48:37 -04002733 const auto type_id = lookupType(type);
2734 const auto sc = GetStorageClassForArgKind(info->arg_kind);
2735 SPIRVOperandList Ops;
2736 Ops << MkId(type_id) << MkNum(sc);
David Neto22f144c2017-06-12 14:26:21 -04002737
David Neto862b7d82018-06-14 18:48:37 -04002738 auto *Inst = new SPIRVInstruction(spv::OpVariable, info->var_id, Ops);
2739 SPIRVInstList.push_back(Inst);
2740
2741 // Map calls to the variable-builtin-function.
2742 for (auto &U : info->var_fn->uses()) {
2743 if (auto *call = dyn_cast<CallInst>(U.getUser())) {
2744 const auto set = unsigned(
2745 dyn_cast<ConstantInt>(call->getOperand(0))->getZExtValue());
2746 const auto binding = unsigned(
2747 dyn_cast<ConstantInt>(call->getOperand(1))->getZExtValue());
2748 if (set == info->descriptor_set && binding == info->binding) {
2749 switch (info->arg_kind) {
2750 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04002751 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04002752 case clspv::ArgKind::Pod:
2753 // The call maps to the variable directly.
2754 VMap[call] = info->var_id;
2755 break;
2756 case clspv::ArgKind::Sampler:
2757 case clspv::ArgKind::ReadOnlyImage:
2758 case clspv::ArgKind::WriteOnlyImage:
2759 // The call maps to a load we generate later.
2760 ResourceVarDeferredLoadCalls[call] = info->var_id;
2761 break;
2762 default:
2763 llvm_unreachable("Unhandled arg kind");
2764 }
2765 }
David Neto22f144c2017-06-12 14:26:21 -04002766 }
David Neto862b7d82018-06-14 18:48:37 -04002767 }
2768 }
David Neto22f144c2017-06-12 14:26:21 -04002769
David Neto862b7d82018-06-14 18:48:37 -04002770 // Generate associated decorations.
David Neto22f144c2017-06-12 14:26:21 -04002771
David Neto862b7d82018-06-14 18:48:37 -04002772 // Find Insert Point for OpDecorate.
2773 auto DecoInsertPoint =
2774 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
2775 [](SPIRVInstruction *Inst) -> bool {
2776 return Inst->getOpcode() != spv::OpDecorate &&
2777 Inst->getOpcode() != spv::OpMemberDecorate &&
2778 Inst->getOpcode() != spv::OpExtInstImport;
2779 });
2780
2781 SPIRVOperandList Ops;
2782 for (auto *info : ModuleOrderedResourceVars) {
2783 // Decorate with DescriptorSet and Binding.
2784 Ops.clear();
2785 Ops << MkId(info->var_id) << MkNum(spv::DecorationDescriptorSet)
2786 << MkNum(info->descriptor_set);
2787 SPIRVInstList.insert(DecoInsertPoint,
2788 new SPIRVInstruction(spv::OpDecorate, Ops));
2789
2790 Ops.clear();
2791 Ops << MkId(info->var_id) << MkNum(spv::DecorationBinding)
2792 << MkNum(info->binding);
2793 SPIRVInstList.insert(DecoInsertPoint,
2794 new SPIRVInstruction(spv::OpDecorate, Ops));
2795
alan-bakere9308012019-03-15 10:25:13 -04002796 if (info->coherent) {
2797 // Decorate with Coherent if required for the variable.
2798 Ops.clear();
2799 Ops << MkId(info->var_id) << MkNum(spv::DecorationCoherent);
2800 SPIRVInstList.insert(DecoInsertPoint,
2801 new SPIRVInstruction(spv::OpDecorate, Ops));
2802 }
2803
David Neto862b7d82018-06-14 18:48:37 -04002804 // Generate NonWritable and NonReadable
2805 switch (info->arg_kind) {
2806 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04002807 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04002808 if (info->var_fn->getReturnType()->getPointerAddressSpace() ==
2809 clspv::AddressSpace::Constant) {
2810 Ops.clear();
2811 Ops << MkId(info->var_id) << MkNum(spv::DecorationNonWritable);
2812 SPIRVInstList.insert(DecoInsertPoint,
2813 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002814 }
David Neto862b7d82018-06-14 18:48:37 -04002815 break;
David Neto862b7d82018-06-14 18:48:37 -04002816 case clspv::ArgKind::WriteOnlyImage:
2817 Ops.clear();
2818 Ops << MkId(info->var_id) << MkNum(spv::DecorationNonReadable);
2819 SPIRVInstList.insert(DecoInsertPoint,
2820 new SPIRVInstruction(spv::OpDecorate, Ops));
2821 break;
2822 default:
2823 break;
David Neto22f144c2017-06-12 14:26:21 -04002824 }
2825 }
2826}
2827
2828void SPIRVProducerPass::GenerateGlobalVar(GlobalVariable &GV) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05002829 Module &M = *GV.getParent();
David Neto22f144c2017-06-12 14:26:21 -04002830 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
2831 ValueMapType &VMap = getValueMap();
2832 std::vector<uint32_t> &BuiltinDimVec = getBuiltinDimVec();
David Neto85082642018-03-24 06:55:20 -07002833 const DataLayout &DL = GV.getParent()->getDataLayout();
David Neto22f144c2017-06-12 14:26:21 -04002834
2835 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
2836 Type *Ty = GV.getType();
2837 PointerType *PTy = cast<PointerType>(Ty);
2838
2839 uint32_t InitializerID = 0;
2840
2841 // Workgroup size is handled differently (it goes into a constant)
2842 if (spv::BuiltInWorkgroupSize == BuiltinType) {
2843 std::vector<bool> HasMDVec;
2844 uint32_t PrevXDimCst = 0xFFFFFFFF;
2845 uint32_t PrevYDimCst = 0xFFFFFFFF;
2846 uint32_t PrevZDimCst = 0xFFFFFFFF;
2847 for (Function &Func : *GV.getParent()) {
2848 if (Func.isDeclaration()) {
2849 continue;
2850 }
2851
2852 // We only need to check kernels.
2853 if (Func.getCallingConv() != CallingConv::SPIR_KERNEL) {
2854 continue;
2855 }
2856
2857 if (const MDNode *MD =
2858 dyn_cast<Function>(&Func)->getMetadata("reqd_work_group_size")) {
2859 uint32_t CurXDimCst = static_cast<uint32_t>(
2860 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
2861 uint32_t CurYDimCst = static_cast<uint32_t>(
2862 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
2863 uint32_t CurZDimCst = static_cast<uint32_t>(
2864 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
2865
2866 if (PrevXDimCst == 0xFFFFFFFF && PrevYDimCst == 0xFFFFFFFF &&
2867 PrevZDimCst == 0xFFFFFFFF) {
2868 PrevXDimCst = CurXDimCst;
2869 PrevYDimCst = CurYDimCst;
2870 PrevZDimCst = CurZDimCst;
2871 } else if (CurXDimCst != PrevXDimCst || CurYDimCst != PrevYDimCst ||
2872 CurZDimCst != PrevZDimCst) {
2873 llvm_unreachable(
2874 "reqd_work_group_size must be the same across all kernels");
2875 } else {
2876 continue;
2877 }
2878
2879 //
2880 // Generate OpConstantComposite.
2881 //
2882 // Ops[0] : Result Type ID
2883 // Ops[1] : Constant size for x dimension.
2884 // Ops[2] : Constant size for y dimension.
2885 // Ops[3] : Constant size for z dimension.
2886 SPIRVOperandList Ops;
2887
2888 uint32_t XDimCstID =
2889 VMap[mdconst::extract<ConstantInt>(MD->getOperand(0))];
2890 uint32_t YDimCstID =
2891 VMap[mdconst::extract<ConstantInt>(MD->getOperand(1))];
2892 uint32_t ZDimCstID =
2893 VMap[mdconst::extract<ConstantInt>(MD->getOperand(2))];
2894
2895 InitializerID = nextID;
2896
David Neto257c3892018-04-11 13:19:45 -04002897 Ops << MkId(lookupType(Ty->getPointerElementType())) << MkId(XDimCstID)
2898 << MkId(YDimCstID) << MkId(ZDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04002899
David Neto87846742018-04-11 17:36:22 -04002900 auto *Inst =
2901 new SPIRVInstruction(spv::OpConstantComposite, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002902 SPIRVInstList.push_back(Inst);
2903
2904 HasMDVec.push_back(true);
2905 } else {
2906 HasMDVec.push_back(false);
2907 }
2908 }
2909
2910 // Check all kernels have same definitions for work_group_size.
2911 bool HasMD = false;
2912 if (!HasMDVec.empty()) {
2913 HasMD = HasMDVec[0];
2914 for (uint32_t i = 1; i < HasMDVec.size(); i++) {
2915 if (HasMD != HasMDVec[i]) {
2916 llvm_unreachable(
2917 "Kernels should have consistent work group size definition");
2918 }
2919 }
2920 }
2921
2922 // If all kernels do not have metadata for reqd_work_group_size, generate
2923 // OpSpecConstants for x/y/z dimension.
2924 if (!HasMD) {
2925 //
2926 // Generate OpSpecConstants for x/y/z dimension.
2927 //
2928 // Ops[0] : Result Type ID
2929 // Ops[1] : Constant size for x/y/z dimension (Literal Number).
2930 uint32_t XDimCstID = 0;
2931 uint32_t YDimCstID = 0;
2932 uint32_t ZDimCstID = 0;
2933
David Neto22f144c2017-06-12 14:26:21 -04002934 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04002935 uint32_t result_type_id =
2936 lookupType(Ty->getPointerElementType()->getSequentialElementType());
David Neto22f144c2017-06-12 14:26:21 -04002937
David Neto257c3892018-04-11 13:19:45 -04002938 // X Dimension
2939 Ops << MkId(result_type_id) << MkNum(1);
2940 XDimCstID = nextID++;
2941 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002942 new SPIRVInstruction(spv::OpSpecConstant, XDimCstID, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002943
2944 // Y Dimension
2945 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04002946 Ops << MkId(result_type_id) << MkNum(1);
2947 YDimCstID = nextID++;
2948 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002949 new SPIRVInstruction(spv::OpSpecConstant, YDimCstID, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002950
2951 // Z Dimension
2952 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04002953 Ops << MkId(result_type_id) << MkNum(1);
2954 ZDimCstID = nextID++;
2955 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002956 new SPIRVInstruction(spv::OpSpecConstant, ZDimCstID, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002957
David Neto257c3892018-04-11 13:19:45 -04002958 BuiltinDimVec.push_back(XDimCstID);
2959 BuiltinDimVec.push_back(YDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04002960 BuiltinDimVec.push_back(ZDimCstID);
2961
David Neto22f144c2017-06-12 14:26:21 -04002962 //
2963 // Generate OpSpecConstantComposite.
2964 //
2965 // Ops[0] : Result Type ID
2966 // Ops[1] : Constant size for x dimension.
2967 // Ops[2] : Constant size for y dimension.
2968 // Ops[3] : Constant size for z dimension.
2969 InitializerID = nextID;
2970
2971 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04002972 Ops << MkId(lookupType(Ty->getPointerElementType())) << MkId(XDimCstID)
2973 << MkId(YDimCstID) << MkId(ZDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04002974
David Neto87846742018-04-11 17:36:22 -04002975 auto *Inst =
2976 new SPIRVInstruction(spv::OpSpecConstantComposite, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002977 SPIRVInstList.push_back(Inst);
2978 }
2979 }
2980
David Neto22f144c2017-06-12 14:26:21 -04002981 VMap[&GV] = nextID;
2982
2983 //
2984 // Generate OpVariable.
2985 //
2986 // GIDOps[0] : Result Type ID
2987 // GIDOps[1] : Storage Class
2988 SPIRVOperandList Ops;
2989
David Neto85082642018-03-24 06:55:20 -07002990 const auto AS = PTy->getAddressSpace();
David Netoc6f3ab22018-04-06 18:02:31 -04002991 Ops << MkId(lookupType(Ty)) << MkNum(GetStorageClass(AS));
David Neto22f144c2017-06-12 14:26:21 -04002992
David Neto85082642018-03-24 06:55:20 -07002993 const bool module_scope_constant_external_init =
David Neto862b7d82018-06-14 18:48:37 -04002994 (AS == AddressSpace::Constant) && GV.hasInitializer() &&
David Neto85082642018-03-24 06:55:20 -07002995 clspv::Option::ModuleConstantsInStorageBuffer();
2996
Kévin Petit23d5f182019-08-13 16:21:29 +01002997 if (GV.hasInitializer()) {
2998 auto GVInit = GV.getInitializer();
2999 if (!isa<UndefValue>(GVInit) && !module_scope_constant_external_init) {
3000 assert(VMap.count(GVInit) == 1);
3001 InitializerID = VMap[GVInit];
David Neto85082642018-03-24 06:55:20 -07003002 }
3003 }
Kévin Petit23d5f182019-08-13 16:21:29 +01003004
3005 if (0 != InitializerID) {
3006 // Emit the ID of the intiializer as part of the variable definition.
3007 Ops << MkId(InitializerID);
3008 }
David Neto85082642018-03-24 06:55:20 -07003009 const uint32_t var_id = nextID++;
3010
David Neto87846742018-04-11 17:36:22 -04003011 auto *Inst = new SPIRVInstruction(spv::OpVariable, var_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003012 SPIRVInstList.push_back(Inst);
3013
3014 // If we have a builtin.
3015 if (spv::BuiltInMax != BuiltinType) {
3016 // Find Insert Point for OpDecorate.
3017 auto DecoInsertPoint =
3018 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
3019 [](SPIRVInstruction *Inst) -> bool {
3020 return Inst->getOpcode() != spv::OpDecorate &&
3021 Inst->getOpcode() != spv::OpMemberDecorate &&
3022 Inst->getOpcode() != spv::OpExtInstImport;
3023 });
3024 //
3025 // Generate OpDecorate.
3026 //
3027 // DOps[0] = Target ID
3028 // DOps[1] = Decoration (Builtin)
3029 // DOps[2] = BuiltIn ID
3030 uint32_t ResultID;
3031
3032 // WorkgroupSize is different, we decorate the constant composite that has
3033 // its value, rather than the variable that we use to access the value.
3034 if (spv::BuiltInWorkgroupSize == BuiltinType) {
3035 ResultID = InitializerID;
David Netoa60b00b2017-09-15 16:34:09 -04003036 // Save both the value and variable IDs for later.
3037 WorkgroupSizeValueID = InitializerID;
3038 WorkgroupSizeVarID = VMap[&GV];
David Neto22f144c2017-06-12 14:26:21 -04003039 } else {
3040 ResultID = VMap[&GV];
3041 }
3042
3043 SPIRVOperandList DOps;
David Neto257c3892018-04-11 13:19:45 -04003044 DOps << MkId(ResultID) << MkNum(spv::DecorationBuiltIn)
3045 << MkNum(BuiltinType);
David Neto22f144c2017-06-12 14:26:21 -04003046
David Neto87846742018-04-11 17:36:22 -04003047 auto *DescDecoInst = new SPIRVInstruction(spv::OpDecorate, DOps);
David Neto22f144c2017-06-12 14:26:21 -04003048 SPIRVInstList.insert(DecoInsertPoint, DescDecoInst);
David Neto85082642018-03-24 06:55:20 -07003049 } else if (module_scope_constant_external_init) {
3050 // This module scope constant is initialized from a storage buffer with data
3051 // provided by the host at binding 0 of the next descriptor set.
David Neto78383442018-06-15 20:31:56 -04003052 const uint32_t descriptor_set = TakeDescriptorIndex(&M);
David Neto85082642018-03-24 06:55:20 -07003053
David Neto862b7d82018-06-14 18:48:37 -04003054 // Emit the intializer to the descriptor map file.
David Neto85082642018-03-24 06:55:20 -07003055 // Use "kind,buffer" to indicate storage buffer. We might want to expand
3056 // that later to other types, like uniform buffer.
alan-bakerf5e5f692018-11-27 08:33:24 -05003057 std::string hexbytes;
3058 llvm::raw_string_ostream str(hexbytes);
3059 clspv::ConstantEmitter(DL, str).Emit(GV.getInitializer());
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003060 version0::DescriptorMapEntry::ConstantData constant_data = {ArgKind::Buffer,
3061 str.str()};
3062 descriptorMapEntries->emplace_back(std::move(constant_data), descriptor_set,
3063 0);
David Neto85082642018-03-24 06:55:20 -07003064
3065 // Find Insert Point for OpDecorate.
3066 auto DecoInsertPoint =
3067 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
3068 [](SPIRVInstruction *Inst) -> bool {
3069 return Inst->getOpcode() != spv::OpDecorate &&
3070 Inst->getOpcode() != spv::OpMemberDecorate &&
3071 Inst->getOpcode() != spv::OpExtInstImport;
3072 });
3073
David Neto257c3892018-04-11 13:19:45 -04003074 // OpDecorate %var Binding <binding>
David Neto85082642018-03-24 06:55:20 -07003075 SPIRVOperandList DOps;
David Neto257c3892018-04-11 13:19:45 -04003076 DOps << MkId(var_id) << MkNum(spv::DecorationBinding) << MkNum(0);
3077 DecoInsertPoint = SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04003078 DecoInsertPoint, new SPIRVInstruction(spv::OpDecorate, DOps));
David Neto85082642018-03-24 06:55:20 -07003079
3080 // OpDecorate %var DescriptorSet <descriptor_set>
3081 DOps.clear();
David Neto257c3892018-04-11 13:19:45 -04003082 DOps << MkId(var_id) << MkNum(spv::DecorationDescriptorSet)
3083 << MkNum(descriptor_set);
David Netoc6f3ab22018-04-06 18:02:31 -04003084 SPIRVInstList.insert(DecoInsertPoint,
David Neto87846742018-04-11 17:36:22 -04003085 new SPIRVInstruction(spv::OpDecorate, DOps));
David Neto22f144c2017-06-12 14:26:21 -04003086 }
3087}
3088
David Netoc6f3ab22018-04-06 18:02:31 -04003089void SPIRVProducerPass::GenerateWorkgroupVars() {
3090 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
Alan Baker202c8c72018-08-13 13:47:44 -04003091 for (auto spec_id = clspv::FirstLocalSpecId(); spec_id < max_local_spec_id_;
3092 ++spec_id) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05003093 LocalArgInfo &info = LocalSpecIdInfoMap[spec_id];
David Netoc6f3ab22018-04-06 18:02:31 -04003094
3095 // Generate OpVariable.
3096 //
3097 // GIDOps[0] : Result Type ID
3098 // GIDOps[1] : Storage Class
3099 SPIRVOperandList Ops;
3100 Ops << MkId(info.ptr_array_type_id) << MkNum(spv::StorageClassWorkgroup);
3101
3102 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04003103 new SPIRVInstruction(spv::OpVariable, info.variable_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04003104 }
3105}
3106
David Neto862b7d82018-06-14 18:48:37 -04003107void SPIRVProducerPass::GenerateDescriptorMapInfo(const DataLayout &DL,
3108 Function &F) {
David Netoc5fb5242018-07-30 13:28:31 -04003109 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
3110 return;
3111 }
David Neto862b7d82018-06-14 18:48:37 -04003112 // Gather the list of resources that are used by this function's arguments.
3113 auto &resource_var_at_index = FunctionToResourceVarsMap[&F];
3114
alan-bakerf5e5f692018-11-27 08:33:24 -05003115 // TODO(alan-baker): This should become unnecessary by fixing the rest of the
3116 // flow to generate pod_ubo arguments earlier.
David Neto862b7d82018-06-14 18:48:37 -04003117 auto remap_arg_kind = [](StringRef argKind) {
alan-bakerf5e5f692018-11-27 08:33:24 -05003118 std::string kind =
3119 clspv::Option::PodArgsInUniformBuffer() && argKind.equals("pod")
3120 ? "pod_ubo"
alan-baker21574d32020-01-29 16:00:31 -05003121 : argKind.str();
alan-bakerf5e5f692018-11-27 08:33:24 -05003122 return GetArgKindFromName(kind);
David Neto862b7d82018-06-14 18:48:37 -04003123 };
3124
3125 auto *fty = F.getType()->getPointerElementType();
3126 auto *func_ty = dyn_cast<FunctionType>(fty);
3127
alan-baker038e9242019-04-19 22:14:41 -04003128 // If we've clustered POD arguments, then argument details are in metadata.
David Neto862b7d82018-06-14 18:48:37 -04003129 // If an argument maps to a resource variable, then get descriptor set and
3130 // binding from the resoure variable. Other info comes from the metadata.
3131 const auto *arg_map = F.getMetadata("kernel_arg_map");
3132 if (arg_map) {
3133 for (const auto &arg : arg_map->operands()) {
3134 const MDNode *arg_node = dyn_cast<MDNode>(arg.get());
Kévin PETITa353c832018-03-20 23:21:21 +00003135 assert(arg_node->getNumOperands() == 7);
David Neto862b7d82018-06-14 18:48:37 -04003136 const auto name =
3137 dyn_cast<MDString>(arg_node->getOperand(0))->getString();
3138 const auto old_index =
3139 dyn_extract<ConstantInt>(arg_node->getOperand(1))->getZExtValue();
3140 // Remapped argument index
alan-bakerb6b09dc2018-11-08 16:59:28 -05003141 const size_t new_index = static_cast<size_t>(
3142 dyn_extract<ConstantInt>(arg_node->getOperand(2))->getZExtValue());
David Neto862b7d82018-06-14 18:48:37 -04003143 const auto offset =
3144 dyn_extract<ConstantInt>(arg_node->getOperand(3))->getZExtValue();
Kévin PETITa353c832018-03-20 23:21:21 +00003145 const auto arg_size =
3146 dyn_extract<ConstantInt>(arg_node->getOperand(4))->getZExtValue();
David Neto862b7d82018-06-14 18:48:37 -04003147 const auto argKind = remap_arg_kind(
Kévin PETITa353c832018-03-20 23:21:21 +00003148 dyn_cast<MDString>(arg_node->getOperand(5))->getString());
David Neto862b7d82018-06-14 18:48:37 -04003149 const auto spec_id =
Kévin PETITa353c832018-03-20 23:21:21 +00003150 dyn_extract<ConstantInt>(arg_node->getOperand(6))->getSExtValue();
alan-bakerf5e5f692018-11-27 08:33:24 -05003151
3152 uint32_t descriptor_set = 0;
3153 uint32_t binding = 0;
3154 version0::DescriptorMapEntry::KernelArgData kernel_data = {
alan-baker21574d32020-01-29 16:00:31 -05003155 F.getName().str(), name.str(), static_cast<uint32_t>(old_index),
3156 argKind, static_cast<uint32_t>(spec_id),
alan-bakerf5e5f692018-11-27 08:33:24 -05003157 // This will be set below for pointer-to-local args.
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003158 0, static_cast<uint32_t>(offset), static_cast<uint32_t>(arg_size)};
David Neto862b7d82018-06-14 18:48:37 -04003159 if (spec_id > 0) {
alan-bakerf5e5f692018-11-27 08:33:24 -05003160 kernel_data.local_element_size = static_cast<uint32_t>(GetTypeAllocSize(
3161 func_ty->getParamType(unsigned(new_index))->getPointerElementType(),
3162 DL));
David Neto862b7d82018-06-14 18:48:37 -04003163 } else {
3164 auto *info = resource_var_at_index[new_index];
3165 assert(info);
alan-bakerf5e5f692018-11-27 08:33:24 -05003166 descriptor_set = info->descriptor_set;
3167 binding = info->binding;
David Neto862b7d82018-06-14 18:48:37 -04003168 }
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003169 descriptorMapEntries->emplace_back(std::move(kernel_data), descriptor_set,
3170 binding);
David Neto862b7d82018-06-14 18:48:37 -04003171 }
3172 } else {
3173 // There is no argument map.
3174 // Take descriptor info from the resource variable calls.
Kévin PETITa353c832018-03-20 23:21:21 +00003175 // Take argument name and size from the arguments list.
David Neto862b7d82018-06-14 18:48:37 -04003176
3177 SmallVector<Argument *, 4> arguments;
3178 for (auto &arg : F.args()) {
3179 arguments.push_back(&arg);
3180 }
3181
3182 unsigned arg_index = 0;
3183 for (auto *info : resource_var_at_index) {
3184 if (info) {
Kévin PETITa353c832018-03-20 23:21:21 +00003185 auto arg = arguments[arg_index];
alan-bakerb6b09dc2018-11-08 16:59:28 -05003186 unsigned arg_size = 0;
Kévin PETITa353c832018-03-20 23:21:21 +00003187 if (info->arg_kind == clspv::ArgKind::Pod) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05003188 arg_size = static_cast<uint32_t>(DL.getTypeStoreSize(arg->getType()));
Kévin PETITa353c832018-03-20 23:21:21 +00003189 }
3190
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003191 // Local pointer arguments are unused in this case. Offset is always
3192 // zero.
alan-bakerf5e5f692018-11-27 08:33:24 -05003193 version0::DescriptorMapEntry::KernelArgData kernel_data = {
alan-baker21574d32020-01-29 16:00:31 -05003194 F.getName().str(),
3195 arg->getName().str(),
3196 arg_index,
3197 remap_arg_kind(clspv::GetArgKindName(info->arg_kind)),
3198 0,
3199 0,
3200 0,
3201 arg_size};
alan-bakerf5e5f692018-11-27 08:33:24 -05003202 descriptorMapEntries->emplace_back(std::move(kernel_data),
3203 info->descriptor_set, info->binding);
David Neto862b7d82018-06-14 18:48:37 -04003204 }
3205 arg_index++;
3206 }
3207 // Generate mappings for pointer-to-local arguments.
3208 for (arg_index = 0; arg_index < arguments.size(); ++arg_index) {
3209 Argument *arg = arguments[arg_index];
Alan Baker202c8c72018-08-13 13:47:44 -04003210 auto where = LocalArgSpecIds.find(arg);
3211 if (where != LocalArgSpecIds.end()) {
3212 auto &local_arg_info = LocalSpecIdInfoMap[where->second];
alan-bakerf5e5f692018-11-27 08:33:24 -05003213 // Pod arguments members are unused in this case.
3214 version0::DescriptorMapEntry::KernelArgData kernel_data = {
alan-baker21574d32020-01-29 16:00:31 -05003215 F.getName().str(),
3216 arg->getName().str(),
alan-bakerf5e5f692018-11-27 08:33:24 -05003217 arg_index,
3218 ArgKind::Local,
3219 static_cast<uint32_t>(local_arg_info.spec_id),
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003220 static_cast<uint32_t>(
3221 GetTypeAllocSize(local_arg_info.elem_type, DL)),
alan-bakerf5e5f692018-11-27 08:33:24 -05003222 0,
3223 0};
3224 // Pointer-to-local arguments do not utilize descriptor set and binding.
3225 descriptorMapEntries->emplace_back(std::move(kernel_data), 0, 0);
David Neto862b7d82018-06-14 18:48:37 -04003226 }
3227 }
3228 }
3229}
3230
David Neto22f144c2017-06-12 14:26:21 -04003231void SPIRVProducerPass::GenerateFuncPrologue(Function &F) {
3232 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3233 ValueMapType &VMap = getValueMap();
3234 EntryPointVecType &EntryPoints = getEntryPointVec();
David Neto22f144c2017-06-12 14:26:21 -04003235 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
3236 auto &GlobalConstArgSet = getGlobalConstArgSet();
3237
3238 FunctionType *FTy = F.getFunctionType();
3239
3240 //
David Neto22f144c2017-06-12 14:26:21 -04003241 // Generate OPFunction.
3242 //
3243
3244 // FOps[0] : Result Type ID
3245 // FOps[1] : Function Control
3246 // FOps[2] : Function Type ID
3247 SPIRVOperandList FOps;
3248
3249 // Find SPIRV instruction for return type.
David Neto257c3892018-04-11 13:19:45 -04003250 FOps << MkId(lookupType(FTy->getReturnType()));
David Neto22f144c2017-06-12 14:26:21 -04003251
3252 // Check function attributes for SPIRV Function Control.
3253 uint32_t FuncControl = spv::FunctionControlMaskNone;
3254 if (F.hasFnAttribute(Attribute::AlwaysInline)) {
3255 FuncControl |= spv::FunctionControlInlineMask;
3256 }
3257 if (F.hasFnAttribute(Attribute::NoInline)) {
3258 FuncControl |= spv::FunctionControlDontInlineMask;
3259 }
3260 // TODO: Check llvm attribute for Function Control Pure.
3261 if (F.hasFnAttribute(Attribute::ReadOnly)) {
3262 FuncControl |= spv::FunctionControlPureMask;
3263 }
3264 // TODO: Check llvm attribute for Function Control Const.
3265 if (F.hasFnAttribute(Attribute::ReadNone)) {
3266 FuncControl |= spv::FunctionControlConstMask;
3267 }
3268
David Neto257c3892018-04-11 13:19:45 -04003269 FOps << MkNum(FuncControl);
David Neto22f144c2017-06-12 14:26:21 -04003270
3271 uint32_t FTyID;
3272 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
3273 SmallVector<Type *, 4> NewFuncParamTys;
3274 FunctionType *NewFTy =
3275 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
3276 FTyID = lookupType(NewFTy);
3277 } else {
David Neto9ed8e2f2018-03-24 06:47:24 -07003278 // Handle regular function with global constant parameters.
David Neto22f144c2017-06-12 14:26:21 -04003279 if (GlobalConstFuncTyMap.count(FTy)) {
3280 FTyID = lookupType(GlobalConstFuncTyMap[FTy].first);
3281 } else {
3282 FTyID = lookupType(FTy);
3283 }
3284 }
3285
David Neto257c3892018-04-11 13:19:45 -04003286 FOps << MkId(FTyID);
David Neto22f144c2017-06-12 14:26:21 -04003287
3288 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
3289 EntryPoints.push_back(std::make_pair(&F, nextID));
3290 }
3291
3292 VMap[&F] = nextID;
3293
David Neto482550a2018-03-24 05:21:07 -07003294 if (clspv::Option::ShowIDs()) {
David Netob05675d2018-02-16 12:37:49 -05003295 errs() << "Function " << F.getName() << " is " << nextID << "\n";
3296 }
David Neto22f144c2017-06-12 14:26:21 -04003297 // Generate SPIRV instruction for function.
David Neto87846742018-04-11 17:36:22 -04003298 auto *FuncInst = new SPIRVInstruction(spv::OpFunction, nextID++, FOps);
David Neto22f144c2017-06-12 14:26:21 -04003299 SPIRVInstList.push_back(FuncInst);
3300
3301 //
3302 // Generate OpFunctionParameter for Normal function.
3303 //
3304
3305 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
alan-bakere9308012019-03-15 10:25:13 -04003306
3307 // Find Insert Point for OpDecorate.
3308 auto DecoInsertPoint =
3309 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
3310 [](SPIRVInstruction *Inst) -> bool {
3311 return Inst->getOpcode() != spv::OpDecorate &&
3312 Inst->getOpcode() != spv::OpMemberDecorate &&
3313 Inst->getOpcode() != spv::OpExtInstImport;
3314 });
3315
David Neto22f144c2017-06-12 14:26:21 -04003316 // Iterate Argument for name instead of param type from function type.
3317 unsigned ArgIdx = 0;
3318 for (Argument &Arg : F.args()) {
alan-bakere9308012019-03-15 10:25:13 -04003319 uint32_t param_id = nextID++;
3320 VMap[&Arg] = param_id;
3321
3322 if (CalledWithCoherentResource(Arg)) {
3323 // If the arg is passed a coherent resource ever, then decorate this
3324 // parameter with Coherent too.
3325 SPIRVOperandList decoration_ops;
3326 decoration_ops << MkId(param_id) << MkNum(spv::DecorationCoherent);
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003327 SPIRVInstList.insert(
3328 DecoInsertPoint,
3329 new SPIRVInstruction(spv::OpDecorate, decoration_ops));
alan-bakere9308012019-03-15 10:25:13 -04003330 }
David Neto22f144c2017-06-12 14:26:21 -04003331
3332 // ParamOps[0] : Result Type ID
3333 SPIRVOperandList ParamOps;
3334
3335 // Find SPIRV instruction for parameter type.
3336 uint32_t ParamTyID = lookupType(Arg.getType());
3337 if (PointerType *PTy = dyn_cast<PointerType>(Arg.getType())) {
3338 if (GlobalConstFuncTyMap.count(FTy)) {
3339 if (ArgIdx == GlobalConstFuncTyMap[FTy].second) {
3340 Type *EleTy = PTy->getPointerElementType();
3341 Type *ArgTy =
3342 PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
3343 ParamTyID = lookupType(ArgTy);
3344 GlobalConstArgSet.insert(&Arg);
3345 }
3346 }
3347 }
David Neto257c3892018-04-11 13:19:45 -04003348 ParamOps << MkId(ParamTyID);
David Neto22f144c2017-06-12 14:26:21 -04003349
3350 // Generate SPIRV instruction for parameter.
David Neto87846742018-04-11 17:36:22 -04003351 auto *ParamInst =
alan-bakere9308012019-03-15 10:25:13 -04003352 new SPIRVInstruction(spv::OpFunctionParameter, param_id, ParamOps);
David Neto22f144c2017-06-12 14:26:21 -04003353 SPIRVInstList.push_back(ParamInst);
3354
3355 ArgIdx++;
3356 }
3357 }
3358}
3359
alan-bakerb6b09dc2018-11-08 16:59:28 -05003360void SPIRVProducerPass::GenerateModuleInfo(Module &module) {
David Neto22f144c2017-06-12 14:26:21 -04003361 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3362 EntryPointVecType &EntryPoints = getEntryPointVec();
3363 ValueMapType &VMap = getValueMap();
3364 ValueList &EntryPointInterfaces = getEntryPointInterfacesVec();
3365 uint32_t &ExtInstImportID = getOpExtInstImportID();
3366 std::vector<uint32_t> &BuiltinDimVec = getBuiltinDimVec();
3367
3368 // Set up insert point.
3369 auto InsertPoint = SPIRVInstList.begin();
3370
3371 //
3372 // Generate OpCapability
3373 //
3374 // TODO: Which llvm information is mapped to SPIRV Capapbility?
3375
3376 // Ops[0] = Capability
3377 SPIRVOperandList Ops;
3378
David Neto87846742018-04-11 17:36:22 -04003379 auto *CapInst =
David Netoef5ba2b2019-12-20 08:35:54 -05003380 new SPIRVInstruction(spv::OpCapability, MkNum(spv::CapabilityShader));
David Neto22f144c2017-06-12 14:26:21 -04003381 SPIRVInstList.insert(InsertPoint, CapInst);
3382
alan-bakerf906d2b2019-12-10 11:26:23 -05003383 bool write_without_format = false;
3384 bool sampled_1d = false;
3385 bool image_1d = false;
David Neto22f144c2017-06-12 14:26:21 -04003386 for (Type *Ty : getTypeList()) {
alan-bakerb39c8262019-03-08 14:03:37 -05003387 if (clspv::Option::Int8Support() && Ty->isIntegerTy(8)) {
3388 // Generate OpCapability for i8 type.
David Netoef5ba2b2019-12-20 08:35:54 -05003389 SPIRVInstList.insert(
3390 InsertPoint,
3391 new SPIRVInstruction(spv::OpCapability, MkNum(spv::CapabilityInt8)));
alan-bakerb39c8262019-03-08 14:03:37 -05003392 } else if (Ty->isIntegerTy(16)) {
David Neto22f144c2017-06-12 14:26:21 -04003393 // Generate OpCapability for i16 type.
David Netoef5ba2b2019-12-20 08:35:54 -05003394 SPIRVInstList.insert(
3395 InsertPoint,
3396 new SPIRVInstruction(spv::OpCapability, MkNum(spv::CapabilityInt16)));
David Neto22f144c2017-06-12 14:26:21 -04003397 } else if (Ty->isIntegerTy(64)) {
3398 // Generate OpCapability for i64 type.
David Netoef5ba2b2019-12-20 08:35:54 -05003399 SPIRVInstList.insert(
3400 InsertPoint,
3401 new SPIRVInstruction(spv::OpCapability, MkNum(spv::CapabilityInt64)));
David Neto22f144c2017-06-12 14:26:21 -04003402 } else if (Ty->isHalfTy()) {
3403 // Generate OpCapability for half type.
David Netoef5ba2b2019-12-20 08:35:54 -05003404 SPIRVInstList.insert(InsertPoint,
3405 new SPIRVInstruction(spv::OpCapability,
3406 MkNum(spv::CapabilityFloat16)));
David Neto22f144c2017-06-12 14:26:21 -04003407 } else if (Ty->isDoubleTy()) {
3408 // Generate OpCapability for double type.
David Netoef5ba2b2019-12-20 08:35:54 -05003409 SPIRVInstList.insert(InsertPoint,
3410 new SPIRVInstruction(spv::OpCapability,
3411 MkNum(spv::CapabilityFloat64)));
David Neto22f144c2017-06-12 14:26:21 -04003412 } else if (auto *STy = dyn_cast<StructType>(Ty)) {
3413 if (STy->isOpaque()) {
alan-bakerf906d2b2019-12-10 11:26:23 -05003414 if (STy->getName().startswith("opencl.image1d_wo_t") ||
3415 STy->getName().startswith("opencl.image2d_wo_t") ||
alan-bakerf67468c2019-11-25 15:51:49 -05003416 STy->getName().startswith("opencl.image3d_wo_t")) {
alan-bakerf906d2b2019-12-10 11:26:23 -05003417 write_without_format = true;
3418 }
3419 if (STy->getName().startswith("opencl.image1d_ro_t") ||
3420 STy->getName().startswith("opencl.image1d_wo_t")) {
3421 if (STy->getName().contains(".sampled"))
3422 sampled_1d = true;
3423 else
3424 image_1d = true;
David Neto22f144c2017-06-12 14:26:21 -04003425 }
3426 }
3427 }
3428 }
3429
alan-bakerf906d2b2019-12-10 11:26:23 -05003430 if (write_without_format) {
3431 // Generate OpCapability for write only image type.
3432 SPIRVInstList.insert(
3433 InsertPoint,
3434 new SPIRVInstruction(
3435 spv::OpCapability,
3436 {MkNum(spv::CapabilityStorageImageWriteWithoutFormat)}));
3437 }
3438 if (image_1d) {
3439 // Generate OpCapability for unsampled 1D image type.
3440 SPIRVInstList.insert(InsertPoint,
3441 new SPIRVInstruction(spv::OpCapability,
3442 {MkNum(spv::CapabilityImage1D)}));
3443 } else if (sampled_1d) {
3444 // Generate OpCapability for sampled 1D image type.
3445 SPIRVInstList.insert(
3446 InsertPoint, new SPIRVInstruction(spv::OpCapability,
3447 {MkNum(spv::CapabilitySampled1D)}));
3448 }
3449
David Neto5c22a252018-03-15 16:07:41 -04003450 { // OpCapability ImageQuery
3451 bool hasImageQuery = false;
alan-bakerf67468c2019-11-25 15:51:49 -05003452 for (const auto &SymVal : module.getValueSymbolTable()) {
3453 if (auto F = dyn_cast<Function>(SymVal.getValue())) {
alan-bakerce179f12019-12-06 19:02:22 -05003454 if (clspv::IsImageQuery(F)) {
alan-bakerf67468c2019-11-25 15:51:49 -05003455 hasImageQuery = true;
3456 break;
3457 }
David Neto5c22a252018-03-15 16:07:41 -04003458 }
3459 }
alan-bakerf67468c2019-11-25 15:51:49 -05003460
David Neto5c22a252018-03-15 16:07:41 -04003461 if (hasImageQuery) {
David Neto87846742018-04-11 17:36:22 -04003462 auto *ImageQueryCapInst = new SPIRVInstruction(
3463 spv::OpCapability, {MkNum(spv::CapabilityImageQuery)});
David Neto5c22a252018-03-15 16:07:41 -04003464 SPIRVInstList.insert(InsertPoint, ImageQueryCapInst);
3465 }
3466 }
3467
David Neto22f144c2017-06-12 14:26:21 -04003468 if (hasVariablePointers()) {
3469 //
David Neto22f144c2017-06-12 14:26:21 -04003470 // Generate OpCapability.
3471 //
3472 // Ops[0] = Capability
3473 //
3474 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003475 Ops << MkNum(spv::CapabilityVariablePointers);
David Neto22f144c2017-06-12 14:26:21 -04003476
David Neto87846742018-04-11 17:36:22 -04003477 SPIRVInstList.insert(InsertPoint,
3478 new SPIRVInstruction(spv::OpCapability, Ops));
alan-baker5b86ed72019-02-15 08:26:50 -05003479 } else if (hasVariablePointersStorageBuffer()) {
3480 //
3481 // Generate OpCapability.
3482 //
3483 // Ops[0] = Capability
3484 //
3485 Ops.clear();
3486 Ops << MkNum(spv::CapabilityVariablePointersStorageBuffer);
David Neto22f144c2017-06-12 14:26:21 -04003487
alan-baker5b86ed72019-02-15 08:26:50 -05003488 SPIRVInstList.insert(InsertPoint,
3489 new SPIRVInstruction(spv::OpCapability, Ops));
3490 }
3491
3492 // Always add the storage buffer extension
3493 {
David Neto22f144c2017-06-12 14:26:21 -04003494 //
3495 // Generate OpExtension.
3496 //
3497 // Ops[0] = Name (Literal String)
3498 //
alan-baker5b86ed72019-02-15 08:26:50 -05003499 auto *ExtensionInst = new SPIRVInstruction(
3500 spv::OpExtension, {MkString("SPV_KHR_storage_buffer_storage_class")});
3501 SPIRVInstList.insert(InsertPoint, ExtensionInst);
3502 }
David Neto22f144c2017-06-12 14:26:21 -04003503
alan-baker5b86ed72019-02-15 08:26:50 -05003504 if (hasVariablePointers() || hasVariablePointersStorageBuffer()) {
3505 //
3506 // Generate OpExtension.
3507 //
3508 // Ops[0] = Name (Literal String)
3509 //
3510 auto *ExtensionInst = new SPIRVInstruction(
3511 spv::OpExtension, {MkString("SPV_KHR_variable_pointers")});
3512 SPIRVInstList.insert(InsertPoint, ExtensionInst);
David Neto22f144c2017-06-12 14:26:21 -04003513 }
3514
3515 if (ExtInstImportID) {
3516 ++InsertPoint;
3517 }
3518
3519 //
3520 // Generate OpMemoryModel
3521 //
3522 // Memory model for Vulkan will always be GLSL450.
3523
3524 // Ops[0] = Addressing Model
3525 // Ops[1] = Memory Model
3526 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003527 Ops << MkNum(spv::AddressingModelLogical) << MkNum(spv::MemoryModelGLSL450);
David Neto22f144c2017-06-12 14:26:21 -04003528
David Neto87846742018-04-11 17:36:22 -04003529 auto *MemModelInst = new SPIRVInstruction(spv::OpMemoryModel, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003530 SPIRVInstList.insert(InsertPoint, MemModelInst);
3531
3532 //
3533 // Generate OpEntryPoint
3534 //
3535 for (auto EntryPoint : EntryPoints) {
3536 // Ops[0] = Execution Model
3537 // Ops[1] = EntryPoint ID
3538 // Ops[2] = Name (Literal String)
3539 // ...
3540 //
3541 // TODO: Do we need to consider Interface ID for forward references???
3542 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05003543 const StringRef &name = EntryPoint.first->getName();
David Neto257c3892018-04-11 13:19:45 -04003544 Ops << MkNum(spv::ExecutionModelGLCompute) << MkId(EntryPoint.second)
3545 << MkString(name);
David Neto22f144c2017-06-12 14:26:21 -04003546
David Neto22f144c2017-06-12 14:26:21 -04003547 for (Value *Interface : EntryPointInterfaces) {
David Neto257c3892018-04-11 13:19:45 -04003548 Ops << MkId(VMap[Interface]);
David Neto22f144c2017-06-12 14:26:21 -04003549 }
3550
David Neto87846742018-04-11 17:36:22 -04003551 auto *EntryPointInst = new SPIRVInstruction(spv::OpEntryPoint, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003552 SPIRVInstList.insert(InsertPoint, EntryPointInst);
3553 }
3554
3555 for (auto EntryPoint : EntryPoints) {
3556 if (const MDNode *MD = dyn_cast<Function>(EntryPoint.first)
3557 ->getMetadata("reqd_work_group_size")) {
3558
3559 if (!BuiltinDimVec.empty()) {
3560 llvm_unreachable(
3561 "Kernels should have consistent work group size definition");
3562 }
3563
3564 //
3565 // Generate OpExecutionMode
3566 //
3567
3568 // Ops[0] = Entry Point ID
3569 // Ops[1] = Execution Mode
3570 // Ops[2] ... Ops[n] = Optional literals according to Execution Mode
3571 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05003572 Ops << MkId(EntryPoint.second) << MkNum(spv::ExecutionModeLocalSize);
David Neto22f144c2017-06-12 14:26:21 -04003573
3574 uint32_t XDim = static_cast<uint32_t>(
3575 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
3576 uint32_t YDim = static_cast<uint32_t>(
3577 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
3578 uint32_t ZDim = static_cast<uint32_t>(
3579 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
3580
David Neto257c3892018-04-11 13:19:45 -04003581 Ops << MkNum(XDim) << MkNum(YDim) << MkNum(ZDim);
David Neto22f144c2017-06-12 14:26:21 -04003582
David Neto87846742018-04-11 17:36:22 -04003583 auto *ExecModeInst = new SPIRVInstruction(spv::OpExecutionMode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003584 SPIRVInstList.insert(InsertPoint, ExecModeInst);
3585 }
3586 }
3587
3588 //
3589 // Generate OpSource.
3590 //
3591 // Ops[0] = SourceLanguage ID
3592 // Ops[1] = Version (LiteralNum)
3593 //
3594 Ops.clear();
Kévin Petitf0515712020-01-07 18:29:20 +00003595 switch (clspv::Option::Language()) {
3596 case clspv::Option::SourceLanguage::OpenCL_C_10:
3597 Ops << MkNum(spv::SourceLanguageOpenCL_C) << MkNum(100);
3598 break;
3599 case clspv::Option::SourceLanguage::OpenCL_C_11:
3600 Ops << MkNum(spv::SourceLanguageOpenCL_C) << MkNum(110);
3601 break;
3602 case clspv::Option::SourceLanguage::OpenCL_C_12:
Kévin Petit0fc88042019-04-09 23:25:02 +01003603 Ops << MkNum(spv::SourceLanguageOpenCL_C) << MkNum(120);
Kévin Petitf0515712020-01-07 18:29:20 +00003604 break;
3605 case clspv::Option::SourceLanguage::OpenCL_C_20:
3606 Ops << MkNum(spv::SourceLanguageOpenCL_C) << MkNum(200);
3607 break;
3608 case clspv::Option::SourceLanguage::OpenCL_CPP:
3609 Ops << MkNum(spv::SourceLanguageOpenCL_CPP) << MkNum(100);
3610 break;
3611 default:
3612 Ops << MkNum(spv::SourceLanguageUnknown) << MkNum(0);
3613 break;
Kévin Petit0fc88042019-04-09 23:25:02 +01003614 }
David Neto22f144c2017-06-12 14:26:21 -04003615
David Neto87846742018-04-11 17:36:22 -04003616 auto *OpenSourceInst = new SPIRVInstruction(spv::OpSource, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003617 SPIRVInstList.insert(InsertPoint, OpenSourceInst);
3618
3619 if (!BuiltinDimVec.empty()) {
3620 //
3621 // Generate OpDecorates for x/y/z dimension.
3622 //
3623 // Ops[0] = Target ID
3624 // Ops[1] = Decoration (SpecId)
David Neto257c3892018-04-11 13:19:45 -04003625 // Ops[2] = Specialization Constant ID (Literal Number)
David Neto22f144c2017-06-12 14:26:21 -04003626
3627 // X Dimension
3628 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003629 Ops << MkId(BuiltinDimVec[0]) << MkNum(spv::DecorationSpecId) << MkNum(0);
David Neto87846742018-04-11 17:36:22 -04003630 SPIRVInstList.insert(InsertPoint,
3631 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003632
3633 // Y Dimension
3634 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003635 Ops << MkId(BuiltinDimVec[1]) << MkNum(spv::DecorationSpecId) << MkNum(1);
David Neto87846742018-04-11 17:36:22 -04003636 SPIRVInstList.insert(InsertPoint,
3637 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003638
3639 // Z Dimension
3640 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003641 Ops << MkId(BuiltinDimVec[2]) << MkNum(spv::DecorationSpecId) << MkNum(2);
David Neto87846742018-04-11 17:36:22 -04003642 SPIRVInstList.insert(InsertPoint,
3643 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003644 }
3645}
3646
David Netob6e2e062018-04-25 10:32:06 -04003647void SPIRVProducerPass::GenerateEntryPointInitialStores() {
3648 // Work around a driver bug. Initializers on Private variables might not
3649 // work. So the start of the kernel should store the initializer value to the
3650 // variables. Yes, *every* entry point pays this cost if *any* entry point
3651 // uses this builtin. At this point I judge this to be an acceptable tradeoff
3652 // of complexity vs. runtime, for a broken driver.
alan-bakerb6b09dc2018-11-08 16:59:28 -05003653 // TODO(dneto): Remove this at some point once fixed drivers are widely
3654 // available.
David Netob6e2e062018-04-25 10:32:06 -04003655 if (WorkgroupSizeVarID) {
3656 assert(WorkgroupSizeValueID);
3657
3658 SPIRVOperandList Ops;
3659 Ops << MkId(WorkgroupSizeVarID) << MkId(WorkgroupSizeValueID);
3660
3661 auto *Inst = new SPIRVInstruction(spv::OpStore, Ops);
3662 getSPIRVInstList().push_back(Inst);
3663 }
3664}
3665
David Neto22f144c2017-06-12 14:26:21 -04003666void SPIRVProducerPass::GenerateFuncBody(Function &F) {
3667 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3668 ValueMapType &VMap = getValueMap();
3669
David Netob6e2e062018-04-25 10:32:06 -04003670 const bool IsKernel = F.getCallingConv() == CallingConv::SPIR_KERNEL;
David Neto22f144c2017-06-12 14:26:21 -04003671
3672 for (BasicBlock &BB : F) {
3673 // Register BasicBlock to ValueMap.
3674 VMap[&BB] = nextID;
3675
3676 //
3677 // Generate OpLabel for Basic Block.
3678 //
3679 SPIRVOperandList Ops;
David Neto87846742018-04-11 17:36:22 -04003680 auto *Inst = new SPIRVInstruction(spv::OpLabel, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003681 SPIRVInstList.push_back(Inst);
3682
David Neto6dcd4712017-06-23 11:06:47 -04003683 // OpVariable instructions must come first.
3684 for (Instruction &I : BB) {
alan-baker5b86ed72019-02-15 08:26:50 -05003685 if (auto *alloca = dyn_cast<AllocaInst>(&I)) {
3686 // Allocating a pointer requires variable pointers.
3687 if (alloca->getAllocatedType()->isPointerTy()) {
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003688 setVariablePointersCapabilities(
3689 alloca->getAllocatedType()->getPointerAddressSpace());
alan-baker5b86ed72019-02-15 08:26:50 -05003690 }
David Neto6dcd4712017-06-23 11:06:47 -04003691 GenerateInstruction(I);
3692 }
3693 }
3694
David Neto22f144c2017-06-12 14:26:21 -04003695 if (&BB == &F.getEntryBlock() && IsKernel) {
David Netob6e2e062018-04-25 10:32:06 -04003696 if (clspv::Option::HackInitializers()) {
3697 GenerateEntryPointInitialStores();
3698 }
David Neto22f144c2017-06-12 14:26:21 -04003699 }
3700
3701 for (Instruction &I : BB) {
David Neto6dcd4712017-06-23 11:06:47 -04003702 if (!isa<AllocaInst>(I)) {
3703 GenerateInstruction(I);
3704 }
David Neto22f144c2017-06-12 14:26:21 -04003705 }
3706 }
3707}
3708
3709spv::Op SPIRVProducerPass::GetSPIRVCmpOpcode(CmpInst *I) {
3710 const std::map<CmpInst::Predicate, spv::Op> Map = {
3711 {CmpInst::ICMP_EQ, spv::OpIEqual},
3712 {CmpInst::ICMP_NE, spv::OpINotEqual},
3713 {CmpInst::ICMP_UGT, spv::OpUGreaterThan},
3714 {CmpInst::ICMP_UGE, spv::OpUGreaterThanEqual},
3715 {CmpInst::ICMP_ULT, spv::OpULessThan},
3716 {CmpInst::ICMP_ULE, spv::OpULessThanEqual},
3717 {CmpInst::ICMP_SGT, spv::OpSGreaterThan},
3718 {CmpInst::ICMP_SGE, spv::OpSGreaterThanEqual},
3719 {CmpInst::ICMP_SLT, spv::OpSLessThan},
3720 {CmpInst::ICMP_SLE, spv::OpSLessThanEqual},
3721 {CmpInst::FCMP_OEQ, spv::OpFOrdEqual},
3722 {CmpInst::FCMP_OGT, spv::OpFOrdGreaterThan},
3723 {CmpInst::FCMP_OGE, spv::OpFOrdGreaterThanEqual},
3724 {CmpInst::FCMP_OLT, spv::OpFOrdLessThan},
3725 {CmpInst::FCMP_OLE, spv::OpFOrdLessThanEqual},
3726 {CmpInst::FCMP_ONE, spv::OpFOrdNotEqual},
3727 {CmpInst::FCMP_UEQ, spv::OpFUnordEqual},
3728 {CmpInst::FCMP_UGT, spv::OpFUnordGreaterThan},
3729 {CmpInst::FCMP_UGE, spv::OpFUnordGreaterThanEqual},
3730 {CmpInst::FCMP_ULT, spv::OpFUnordLessThan},
3731 {CmpInst::FCMP_ULE, spv::OpFUnordLessThanEqual},
3732 {CmpInst::FCMP_UNE, spv::OpFUnordNotEqual}};
3733
3734 assert(0 != Map.count(I->getPredicate()));
3735
3736 return Map.at(I->getPredicate());
3737}
3738
3739spv::Op SPIRVProducerPass::GetSPIRVCastOpcode(Instruction &I) {
3740 const std::map<unsigned, spv::Op> Map{
3741 {Instruction::Trunc, spv::OpUConvert},
3742 {Instruction::ZExt, spv::OpUConvert},
3743 {Instruction::SExt, spv::OpSConvert},
3744 {Instruction::FPToUI, spv::OpConvertFToU},
3745 {Instruction::FPToSI, spv::OpConvertFToS},
3746 {Instruction::UIToFP, spv::OpConvertUToF},
3747 {Instruction::SIToFP, spv::OpConvertSToF},
3748 {Instruction::FPTrunc, spv::OpFConvert},
3749 {Instruction::FPExt, spv::OpFConvert},
3750 {Instruction::BitCast, spv::OpBitcast}};
3751
3752 assert(0 != Map.count(I.getOpcode()));
3753
3754 return Map.at(I.getOpcode());
3755}
3756
3757spv::Op SPIRVProducerPass::GetSPIRVBinaryOpcode(Instruction &I) {
Kévin Petit24272b62018-10-18 19:16:12 +00003758 if (I.getType()->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -04003759 switch (I.getOpcode()) {
3760 default:
3761 break;
3762 case Instruction::Or:
3763 return spv::OpLogicalOr;
3764 case Instruction::And:
3765 return spv::OpLogicalAnd;
3766 case Instruction::Xor:
3767 return spv::OpLogicalNotEqual;
3768 }
3769 }
3770
alan-bakerb6b09dc2018-11-08 16:59:28 -05003771 const std::map<unsigned, spv::Op> Map{
David Neto22f144c2017-06-12 14:26:21 -04003772 {Instruction::Add, spv::OpIAdd},
3773 {Instruction::FAdd, spv::OpFAdd},
3774 {Instruction::Sub, spv::OpISub},
3775 {Instruction::FSub, spv::OpFSub},
3776 {Instruction::Mul, spv::OpIMul},
3777 {Instruction::FMul, spv::OpFMul},
3778 {Instruction::UDiv, spv::OpUDiv},
3779 {Instruction::SDiv, spv::OpSDiv},
3780 {Instruction::FDiv, spv::OpFDiv},
3781 {Instruction::URem, spv::OpUMod},
3782 {Instruction::SRem, spv::OpSRem},
3783 {Instruction::FRem, spv::OpFRem},
3784 {Instruction::Or, spv::OpBitwiseOr},
3785 {Instruction::Xor, spv::OpBitwiseXor},
3786 {Instruction::And, spv::OpBitwiseAnd},
3787 {Instruction::Shl, spv::OpShiftLeftLogical},
3788 {Instruction::LShr, spv::OpShiftRightLogical},
3789 {Instruction::AShr, spv::OpShiftRightArithmetic}};
3790
3791 assert(0 != Map.count(I.getOpcode()));
3792
3793 return Map.at(I.getOpcode());
3794}
3795
3796void SPIRVProducerPass::GenerateInstruction(Instruction &I) {
3797 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3798 ValueMapType &VMap = getValueMap();
David Neto22f144c2017-06-12 14:26:21 -04003799 DeferredInstVecType &DeferredInsts = getDeferredInstVec();
3800 LLVMContext &Context = I.getParent()->getParent()->getParent()->getContext();
3801
3802 // Register Instruction to ValueMap.
3803 if (0 == VMap[&I]) {
3804 VMap[&I] = nextID;
3805 }
3806
3807 switch (I.getOpcode()) {
3808 default: {
3809 if (Instruction::isCast(I.getOpcode())) {
3810 //
3811 // Generate SPIRV instructions for cast operators.
3812 //
3813
David Netod2de94a2017-08-28 17:27:47 -04003814 auto Ty = I.getType();
David Neto22f144c2017-06-12 14:26:21 -04003815 auto OpTy = I.getOperand(0)->getType();
David Netod2de94a2017-08-28 17:27:47 -04003816 auto toI8 = Ty == Type::getInt8Ty(Context);
3817 auto fromI32 = OpTy == Type::getInt32Ty(Context);
David Neto22f144c2017-06-12 14:26:21 -04003818 // Handle zext, sext and uitofp with i1 type specially.
3819 if ((I.getOpcode() == Instruction::ZExt ||
3820 I.getOpcode() == Instruction::SExt ||
3821 I.getOpcode() == Instruction::UIToFP) &&
alan-bakerb6b09dc2018-11-08 16:59:28 -05003822 OpTy->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -04003823 //
3824 // Generate OpSelect.
3825 //
3826
3827 // Ops[0] = Result Type ID
3828 // Ops[1] = Condition ID
3829 // Ops[2] = True Constant ID
3830 // Ops[3] = False Constant ID
3831 SPIRVOperandList Ops;
3832
David Neto257c3892018-04-11 13:19:45 -04003833 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04003834
David Neto22f144c2017-06-12 14:26:21 -04003835 uint32_t CondID = VMap[I.getOperand(0)];
David Neto257c3892018-04-11 13:19:45 -04003836 Ops << MkId(CondID);
David Neto22f144c2017-06-12 14:26:21 -04003837
3838 uint32_t TrueID = 0;
3839 if (I.getOpcode() == Instruction::ZExt) {
Kévin Petit7bfb8992019-02-26 13:45:08 +00003840 TrueID = VMap[ConstantInt::get(I.getType(), 1)];
David Neto22f144c2017-06-12 14:26:21 -04003841 } else if (I.getOpcode() == Instruction::SExt) {
Kévin Petit7bfb8992019-02-26 13:45:08 +00003842 TrueID = VMap[ConstantInt::getSigned(I.getType(), -1)];
David Neto22f144c2017-06-12 14:26:21 -04003843 } else {
3844 TrueID = VMap[ConstantFP::get(Context, APFloat(1.0f))];
3845 }
David Neto257c3892018-04-11 13:19:45 -04003846 Ops << MkId(TrueID);
David Neto22f144c2017-06-12 14:26:21 -04003847
3848 uint32_t FalseID = 0;
3849 if (I.getOpcode() == Instruction::ZExt) {
3850 FalseID = VMap[Constant::getNullValue(I.getType())];
3851 } else if (I.getOpcode() == Instruction::SExt) {
3852 FalseID = VMap[Constant::getNullValue(I.getType())];
3853 } else {
3854 FalseID = VMap[ConstantFP::get(Context, APFloat(0.0f))];
3855 }
David Neto257c3892018-04-11 13:19:45 -04003856 Ops << MkId(FalseID);
David Neto22f144c2017-06-12 14:26:21 -04003857
David Neto87846742018-04-11 17:36:22 -04003858 auto *Inst = new SPIRVInstruction(spv::OpSelect, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003859 SPIRVInstList.push_back(Inst);
alan-bakerb39c8262019-03-08 14:03:37 -05003860 } else if (!clspv::Option::Int8Support() &&
3861 I.getOpcode() == Instruction::Trunc && fromI32 && toI8) {
David Netod2de94a2017-08-28 17:27:47 -04003862 // The SPIR-V target type is a 32-bit int. Keep only the bottom
3863 // 8 bits.
3864 // Before:
3865 // %result = trunc i32 %a to i8
3866 // After
3867 // %result = OpBitwiseAnd %uint %a %uint_255
3868
3869 SPIRVOperandList Ops;
3870
David Neto257c3892018-04-11 13:19:45 -04003871 Ops << MkId(lookupType(OpTy)) << MkId(VMap[I.getOperand(0)]);
David Netod2de94a2017-08-28 17:27:47 -04003872
3873 Type *UintTy = Type::getInt32Ty(Context);
3874 uint32_t MaskID = VMap[ConstantInt::get(UintTy, 255)];
David Neto257c3892018-04-11 13:19:45 -04003875 Ops << MkId(MaskID);
David Netod2de94a2017-08-28 17:27:47 -04003876
David Neto87846742018-04-11 17:36:22 -04003877 auto *Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Netod2de94a2017-08-28 17:27:47 -04003878 SPIRVInstList.push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04003879 } else {
3880 // Ops[0] = Result Type ID
3881 // Ops[1] = Source Value ID
3882 SPIRVOperandList Ops;
3883
David Neto257c3892018-04-11 13:19:45 -04003884 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04003885
David Neto87846742018-04-11 17:36:22 -04003886 auto *Inst = new SPIRVInstruction(GetSPIRVCastOpcode(I), nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003887 SPIRVInstList.push_back(Inst);
3888 }
3889 } else if (isa<BinaryOperator>(I)) {
3890 //
3891 // Generate SPIRV instructions for binary operators.
3892 //
3893
3894 // Handle xor with i1 type specially.
3895 if (I.getOpcode() == Instruction::Xor &&
3896 I.getType() == Type::getInt1Ty(Context) &&
Kévin Petit24272b62018-10-18 19:16:12 +00003897 ((isa<ConstantInt>(I.getOperand(0)) &&
3898 !cast<ConstantInt>(I.getOperand(0))->isZero()) ||
3899 (isa<ConstantInt>(I.getOperand(1)) &&
3900 !cast<ConstantInt>(I.getOperand(1))->isZero()))) {
David Neto22f144c2017-06-12 14:26:21 -04003901 //
3902 // Generate OpLogicalNot.
3903 //
3904 // Ops[0] = Result Type ID
3905 // Ops[1] = Operand
3906 SPIRVOperandList Ops;
3907
David Neto257c3892018-04-11 13:19:45 -04003908 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04003909
3910 Value *CondV = I.getOperand(0);
3911 if (isa<Constant>(I.getOperand(0))) {
3912 CondV = I.getOperand(1);
3913 }
David Neto257c3892018-04-11 13:19:45 -04003914 Ops << MkId(VMap[CondV]);
David Neto22f144c2017-06-12 14:26:21 -04003915
David Neto87846742018-04-11 17:36:22 -04003916 auto *Inst = new SPIRVInstruction(spv::OpLogicalNot, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003917 SPIRVInstList.push_back(Inst);
3918 } else {
3919 // Ops[0] = Result Type ID
3920 // Ops[1] = Operand 0
3921 // Ops[2] = Operand 1
3922 SPIRVOperandList Ops;
3923
David Neto257c3892018-04-11 13:19:45 -04003924 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)])
3925 << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04003926
David Neto87846742018-04-11 17:36:22 -04003927 auto *Inst =
3928 new SPIRVInstruction(GetSPIRVBinaryOpcode(I), nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003929 SPIRVInstList.push_back(Inst);
3930 }
alan-bakerc9c55ae2019-12-02 16:01:27 -05003931 } else if (I.getOpcode() == Instruction::FNeg) {
3932 // The only unary operator.
3933 //
3934 // Ops[0] = Result Type ID
3935 // Ops[1] = Operand 0
3936 SPIRVOperandList ops;
3937
3938 ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)]);
3939 auto *Inst = new SPIRVInstruction(spv::OpFNegate, nextID++, ops);
3940 SPIRVInstList.push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04003941 } else {
3942 I.print(errs());
3943 llvm_unreachable("Unsupported instruction???");
3944 }
3945 break;
3946 }
3947 case Instruction::GetElementPtr: {
3948 auto &GlobalConstArgSet = getGlobalConstArgSet();
3949
3950 //
3951 // Generate OpAccessChain.
3952 //
3953 GetElementPtrInst *GEP = cast<GetElementPtrInst>(&I);
3954
3955 //
3956 // Generate OpAccessChain.
3957 //
3958
3959 // Ops[0] = Result Type ID
3960 // Ops[1] = Base ID
3961 // Ops[2] ... Ops[n] = Indexes ID
3962 SPIRVOperandList Ops;
3963
alan-bakerb6b09dc2018-11-08 16:59:28 -05003964 PointerType *ResultType = cast<PointerType>(GEP->getType());
David Neto22f144c2017-06-12 14:26:21 -04003965 if (GEP->getPointerAddressSpace() == AddressSpace::ModuleScopePrivate ||
3966 GlobalConstArgSet.count(GEP->getPointerOperand())) {
3967 // Use pointer type with private address space for global constant.
3968 Type *EleTy = I.getType()->getPointerElementType();
David Neto1a1a0582017-07-07 12:01:44 -04003969 ResultType = PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
David Neto22f144c2017-06-12 14:26:21 -04003970 }
David Neto257c3892018-04-11 13:19:45 -04003971
3972 Ops << MkId(lookupType(ResultType));
David Neto22f144c2017-06-12 14:26:21 -04003973
David Neto862b7d82018-06-14 18:48:37 -04003974 // Generate the base pointer.
3975 Ops << MkId(VMap[GEP->getPointerOperand()]);
David Neto22f144c2017-06-12 14:26:21 -04003976
David Neto862b7d82018-06-14 18:48:37 -04003977 // TODO(dneto): Simplify the following?
David Neto22f144c2017-06-12 14:26:21 -04003978
3979 //
3980 // Follows below rules for gep.
3981 //
David Neto862b7d82018-06-14 18:48:37 -04003982 // 1. If gep's first index is 0 generate OpAccessChain and ignore gep's
3983 // first index.
David Neto22f144c2017-06-12 14:26:21 -04003984 // 2. If gep's first index is not 0, generate OpPtrAccessChain and use gep's
3985 // first index.
3986 // 3. If gep's first index is not constant, generate OpPtrAccessChain and
3987 // use gep's first index.
3988 // 4. If it is not above case 1, 2 and 3, generate OpAccessChain and use
3989 // gep's first index.
3990 //
3991 spv::Op Opcode = spv::OpAccessChain;
3992 unsigned offset = 0;
3993 if (ConstantInt *CstInt = dyn_cast<ConstantInt>(GEP->getOperand(1))) {
David Neto862b7d82018-06-14 18:48:37 -04003994 if (CstInt->getZExtValue() == 0) {
David Neto22f144c2017-06-12 14:26:21 -04003995 offset = 1;
David Neto862b7d82018-06-14 18:48:37 -04003996 } else if (CstInt->getZExtValue() != 0) {
David Neto22f144c2017-06-12 14:26:21 -04003997 Opcode = spv::OpPtrAccessChain;
David Neto22f144c2017-06-12 14:26:21 -04003998 }
David Neto862b7d82018-06-14 18:48:37 -04003999 } else {
David Neto22f144c2017-06-12 14:26:21 -04004000 Opcode = spv::OpPtrAccessChain;
David Neto1a1a0582017-07-07 12:01:44 -04004001 }
4002
4003 if (Opcode == spv::OpPtrAccessChain) {
David Neto1a1a0582017-07-07 12:01:44 -04004004 // Do we need to generate ArrayStride? Check against the GEP result type
4005 // rather than the pointer type of the base because when indexing into
4006 // an OpenCL program-scope constant, we'll swap out the LLVM base pointer
4007 // for something else in the SPIR-V.
4008 // E.g. see test/PointerAccessChain/pointer_index_is_constant_1.cl
alan-baker5b86ed72019-02-15 08:26:50 -05004009 auto address_space = ResultType->getAddressSpace();
4010 setVariablePointersCapabilities(address_space);
4011 switch (GetStorageClass(address_space)) {
Alan Bakerfcda9482018-10-02 17:09:59 -04004012 case spv::StorageClassStorageBuffer:
4013 case spv::StorageClassUniform:
David Neto1a1a0582017-07-07 12:01:44 -04004014 // Save the need to generate an ArrayStride decoration. But defer
4015 // generation until later, so we only make one decoration.
David Neto85082642018-03-24 06:55:20 -07004016 getTypesNeedingArrayStride().insert(ResultType);
Alan Bakerfcda9482018-10-02 17:09:59 -04004017 break;
4018 default:
4019 break;
David Neto1a1a0582017-07-07 12:01:44 -04004020 }
David Neto22f144c2017-06-12 14:26:21 -04004021 }
4022
4023 for (auto II = GEP->idx_begin() + offset; II != GEP->idx_end(); II++) {
David Neto257c3892018-04-11 13:19:45 -04004024 Ops << MkId(VMap[*II]);
David Neto22f144c2017-06-12 14:26:21 -04004025 }
4026
David Neto87846742018-04-11 17:36:22 -04004027 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004028 SPIRVInstList.push_back(Inst);
4029 break;
4030 }
4031 case Instruction::ExtractValue: {
4032 ExtractValueInst *EVI = cast<ExtractValueInst>(&I);
4033 // Ops[0] = Result Type ID
4034 // Ops[1] = Composite ID
4035 // Ops[2] ... Ops[n] = Indexes (Literal Number)
4036 SPIRVOperandList Ops;
4037
David Neto257c3892018-04-11 13:19:45 -04004038 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04004039
4040 uint32_t CompositeID = VMap[EVI->getAggregateOperand()];
David Neto257c3892018-04-11 13:19:45 -04004041 Ops << MkId(CompositeID);
David Neto22f144c2017-06-12 14:26:21 -04004042
4043 for (auto &Index : EVI->indices()) {
David Neto257c3892018-04-11 13:19:45 -04004044 Ops << MkNum(Index);
David Neto22f144c2017-06-12 14:26:21 -04004045 }
4046
David Neto87846742018-04-11 17:36:22 -04004047 auto *Inst = new SPIRVInstruction(spv::OpCompositeExtract, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004048 SPIRVInstList.push_back(Inst);
4049 break;
4050 }
4051 case Instruction::InsertValue: {
4052 InsertValueInst *IVI = cast<InsertValueInst>(&I);
4053 // Ops[0] = Result Type ID
4054 // Ops[1] = Object ID
4055 // Ops[2] = Composite ID
4056 // Ops[3] ... Ops[n] = Indexes (Literal Number)
4057 SPIRVOperandList Ops;
4058
4059 uint32_t ResTyID = lookupType(I.getType());
David Neto257c3892018-04-11 13:19:45 -04004060 Ops << MkId(ResTyID);
David Neto22f144c2017-06-12 14:26:21 -04004061
4062 uint32_t ObjectID = VMap[IVI->getInsertedValueOperand()];
David Neto257c3892018-04-11 13:19:45 -04004063 Ops << MkId(ObjectID);
David Neto22f144c2017-06-12 14:26:21 -04004064
4065 uint32_t CompositeID = VMap[IVI->getAggregateOperand()];
David Neto257c3892018-04-11 13:19:45 -04004066 Ops << MkId(CompositeID);
David Neto22f144c2017-06-12 14:26:21 -04004067
4068 for (auto &Index : IVI->indices()) {
David Neto257c3892018-04-11 13:19:45 -04004069 Ops << MkNum(Index);
David Neto22f144c2017-06-12 14:26:21 -04004070 }
4071
David Neto87846742018-04-11 17:36:22 -04004072 auto *Inst = new SPIRVInstruction(spv::OpCompositeInsert, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004073 SPIRVInstList.push_back(Inst);
4074 break;
4075 }
4076 case Instruction::Select: {
4077 //
4078 // Generate OpSelect.
4079 //
4080
4081 // Ops[0] = Result Type ID
4082 // Ops[1] = Condition ID
4083 // Ops[2] = True Constant ID
4084 // Ops[3] = False Constant ID
4085 SPIRVOperandList Ops;
4086
4087 // Find SPIRV instruction for parameter type.
4088 auto Ty = I.getType();
4089 if (Ty->isPointerTy()) {
4090 auto PointeeTy = Ty->getPointerElementType();
4091 if (PointeeTy->isStructTy() &&
4092 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
4093 Ty = PointeeTy;
alan-baker5b86ed72019-02-15 08:26:50 -05004094 } else {
4095 // Selecting between pointers requires variable pointers.
4096 setVariablePointersCapabilities(Ty->getPointerAddressSpace());
4097 if (!hasVariablePointers() && !selectFromSameObject(&I)) {
4098 setVariablePointers(true);
4099 }
David Neto22f144c2017-06-12 14:26:21 -04004100 }
4101 }
4102
David Neto257c3892018-04-11 13:19:45 -04004103 Ops << MkId(lookupType(Ty)) << MkId(VMap[I.getOperand(0)])
4104 << MkId(VMap[I.getOperand(1)]) << MkId(VMap[I.getOperand(2)]);
David Neto22f144c2017-06-12 14:26:21 -04004105
David Neto87846742018-04-11 17:36:22 -04004106 auto *Inst = new SPIRVInstruction(spv::OpSelect, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004107 SPIRVInstList.push_back(Inst);
4108 break;
4109 }
4110 case Instruction::ExtractElement: {
4111 // Handle <4 x i8> type manually.
4112 Type *CompositeTy = I.getOperand(0)->getType();
4113 if (is4xi8vec(CompositeTy)) {
4114 //
4115 // Generate OpShiftRightLogical and OpBitwiseAnd for extractelement with
4116 // <4 x i8>.
4117 //
4118
4119 //
4120 // Generate OpShiftRightLogical
4121 //
4122 // Ops[0] = Result Type ID
4123 // Ops[1] = Operand 0
4124 // Ops[2] = Operand 1
4125 //
4126 SPIRVOperandList Ops;
4127
David Neto257c3892018-04-11 13:19:45 -04004128 Ops << MkId(lookupType(CompositeTy));
David Neto22f144c2017-06-12 14:26:21 -04004129
4130 uint32_t Op0ID = VMap[I.getOperand(0)];
David Neto257c3892018-04-11 13:19:45 -04004131 Ops << MkId(Op0ID);
David Neto22f144c2017-06-12 14:26:21 -04004132
4133 uint32_t Op1ID = 0;
4134 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
4135 // Handle constant index.
4136 uint64_t Idx = CI->getZExtValue();
4137 Value *ShiftAmount =
4138 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
4139 Op1ID = VMap[ShiftAmount];
4140 } else {
4141 // Handle variable index.
4142 SPIRVOperandList TmpOps;
4143
David Neto257c3892018-04-11 13:19:45 -04004144 TmpOps << MkId(lookupType(Type::getInt32Ty(Context)))
4145 << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004146
4147 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
David Neto257c3892018-04-11 13:19:45 -04004148 TmpOps << MkId(VMap[Cst8]);
David Neto22f144c2017-06-12 14:26:21 -04004149
4150 Op1ID = nextID;
4151
David Neto87846742018-04-11 17:36:22 -04004152 auto *TmpInst = new SPIRVInstruction(spv::OpIMul, nextID++, TmpOps);
David Neto22f144c2017-06-12 14:26:21 -04004153 SPIRVInstList.push_back(TmpInst);
4154 }
David Neto257c3892018-04-11 13:19:45 -04004155 Ops << MkId(Op1ID);
David Neto22f144c2017-06-12 14:26:21 -04004156
4157 uint32_t ShiftID = nextID;
4158
David Neto87846742018-04-11 17:36:22 -04004159 auto *Inst =
4160 new SPIRVInstruction(spv::OpShiftRightLogical, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004161 SPIRVInstList.push_back(Inst);
4162
4163 //
4164 // Generate OpBitwiseAnd
4165 //
4166 // Ops[0] = Result Type ID
4167 // Ops[1] = Operand 0
4168 // Ops[2] = Operand 1
4169 //
4170 Ops.clear();
4171
David Neto257c3892018-04-11 13:19:45 -04004172 Ops << MkId(lookupType(CompositeTy)) << MkId(ShiftID);
David Neto22f144c2017-06-12 14:26:21 -04004173
4174 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
David Neto257c3892018-04-11 13:19:45 -04004175 Ops << MkId(VMap[CstFF]);
David Neto22f144c2017-06-12 14:26:21 -04004176
David Neto9b2d6252017-09-06 15:47:37 -04004177 // Reset mapping for this value to the result of the bitwise and.
4178 VMap[&I] = nextID;
4179
David Neto87846742018-04-11 17:36:22 -04004180 Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004181 SPIRVInstList.push_back(Inst);
4182 break;
4183 }
4184
4185 // Ops[0] = Result Type ID
4186 // Ops[1] = Composite ID
4187 // Ops[2] ... Ops[n] = Indexes (Literal Number)
4188 SPIRVOperandList Ops;
4189
David Neto257c3892018-04-11 13:19:45 -04004190 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04004191
4192 spv::Op Opcode = spv::OpCompositeExtract;
4193 if (const ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
David Neto257c3892018-04-11 13:19:45 -04004194 Ops << MkNum(static_cast<uint32_t>(CI->getZExtValue()));
David Neto22f144c2017-06-12 14:26:21 -04004195 } else {
David Neto257c3892018-04-11 13:19:45 -04004196 Ops << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004197 Opcode = spv::OpVectorExtractDynamic;
4198 }
4199
David Neto87846742018-04-11 17:36:22 -04004200 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004201 SPIRVInstList.push_back(Inst);
4202 break;
4203 }
4204 case Instruction::InsertElement: {
4205 // Handle <4 x i8> type manually.
4206 Type *CompositeTy = I.getOperand(0)->getType();
4207 if (is4xi8vec(CompositeTy)) {
4208 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
4209 uint32_t CstFFID = VMap[CstFF];
4210
4211 uint32_t ShiftAmountID = 0;
4212 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
4213 // Handle constant index.
4214 uint64_t Idx = CI->getZExtValue();
4215 Value *ShiftAmount =
4216 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
4217 ShiftAmountID = VMap[ShiftAmount];
4218 } else {
4219 // Handle variable index.
4220 SPIRVOperandList TmpOps;
4221
David Neto257c3892018-04-11 13:19:45 -04004222 TmpOps << MkId(lookupType(Type::getInt32Ty(Context)))
4223 << MkId(VMap[I.getOperand(2)]);
David Neto22f144c2017-06-12 14:26:21 -04004224
4225 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
David Neto257c3892018-04-11 13:19:45 -04004226 TmpOps << MkId(VMap[Cst8]);
David Neto22f144c2017-06-12 14:26:21 -04004227
4228 ShiftAmountID = nextID;
4229
David Neto87846742018-04-11 17:36:22 -04004230 auto *TmpInst = new SPIRVInstruction(spv::OpIMul, nextID++, TmpOps);
David Neto22f144c2017-06-12 14:26:21 -04004231 SPIRVInstList.push_back(TmpInst);
4232 }
4233
4234 //
4235 // Generate mask operations.
4236 //
4237
4238 // ShiftLeft mask according to index of insertelement.
4239 SPIRVOperandList Ops;
4240
David Neto257c3892018-04-11 13:19:45 -04004241 const uint32_t ResTyID = lookupType(CompositeTy);
4242 Ops << MkId(ResTyID) << MkId(CstFFID) << MkId(ShiftAmountID);
David Neto22f144c2017-06-12 14:26:21 -04004243
4244 uint32_t MaskID = nextID;
4245
David Neto87846742018-04-11 17:36:22 -04004246 auto *Inst = new SPIRVInstruction(spv::OpShiftLeftLogical, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004247 SPIRVInstList.push_back(Inst);
4248
4249 // Inverse mask.
4250 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004251 Ops << MkId(ResTyID) << MkId(MaskID);
David Neto22f144c2017-06-12 14:26:21 -04004252
4253 uint32_t InvMaskID = nextID;
4254
David Neto87846742018-04-11 17:36:22 -04004255 Inst = new SPIRVInstruction(spv::OpNot, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004256 SPIRVInstList.push_back(Inst);
4257
4258 // Apply mask.
4259 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004260 Ops << MkId(ResTyID) << MkId(VMap[I.getOperand(0)]) << MkId(InvMaskID);
David Neto22f144c2017-06-12 14:26:21 -04004261
4262 uint32_t OrgValID = nextID;
4263
David Neto87846742018-04-11 17:36:22 -04004264 Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004265 SPIRVInstList.push_back(Inst);
4266
4267 // Create correct value according to index of insertelement.
4268 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05004269 Ops << MkId(ResTyID) << MkId(VMap[I.getOperand(1)])
4270 << MkId(ShiftAmountID);
David Neto22f144c2017-06-12 14:26:21 -04004271
4272 uint32_t InsertValID = nextID;
4273
David Neto87846742018-04-11 17:36:22 -04004274 Inst = new SPIRVInstruction(spv::OpShiftLeftLogical, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004275 SPIRVInstList.push_back(Inst);
4276
4277 // Insert value to original value.
4278 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004279 Ops << MkId(ResTyID) << MkId(OrgValID) << MkId(InsertValID);
David Neto22f144c2017-06-12 14:26:21 -04004280
David Netoa394f392017-08-26 20:45:29 -04004281 VMap[&I] = nextID;
4282
David Neto87846742018-04-11 17:36:22 -04004283 Inst = new SPIRVInstruction(spv::OpBitwiseOr, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004284 SPIRVInstList.push_back(Inst);
4285
4286 break;
4287 }
4288
David Neto22f144c2017-06-12 14:26:21 -04004289 SPIRVOperandList Ops;
4290
James Priced26efea2018-06-09 23:28:32 +01004291 // Ops[0] = Result Type ID
4292 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04004293
4294 spv::Op Opcode = spv::OpCompositeInsert;
4295 if (const ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
David Neto257c3892018-04-11 13:19:45 -04004296 const auto value = CI->getZExtValue();
4297 assert(value <= UINT32_MAX);
James Priced26efea2018-06-09 23:28:32 +01004298 // Ops[1] = Object ID
4299 // Ops[2] = Composite ID
4300 // Ops[3] ... Ops[n] = Indexes (Literal Number)
alan-bakerb6b09dc2018-11-08 16:59:28 -05004301 Ops << MkId(VMap[I.getOperand(1)]) << MkId(VMap[I.getOperand(0)])
James Priced26efea2018-06-09 23:28:32 +01004302 << MkNum(static_cast<uint32_t>(value));
David Neto22f144c2017-06-12 14:26:21 -04004303 } else {
James Priced26efea2018-06-09 23:28:32 +01004304 // Ops[1] = Composite ID
4305 // Ops[2] = Object ID
4306 // Ops[3] ... Ops[n] = Indexes (Literal Number)
alan-bakerb6b09dc2018-11-08 16:59:28 -05004307 Ops << MkId(VMap[I.getOperand(0)]) << MkId(VMap[I.getOperand(1)])
James Priced26efea2018-06-09 23:28:32 +01004308 << MkId(VMap[I.getOperand(2)]);
David Neto22f144c2017-06-12 14:26:21 -04004309 Opcode = spv::OpVectorInsertDynamic;
4310 }
4311
David Neto87846742018-04-11 17:36:22 -04004312 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004313 SPIRVInstList.push_back(Inst);
4314 break;
4315 }
4316 case Instruction::ShuffleVector: {
4317 // Ops[0] = Result Type ID
4318 // Ops[1] = Vector 1 ID
4319 // Ops[2] = Vector 2 ID
4320 // Ops[3] ... Ops[n] = Components (Literal Number)
4321 SPIRVOperandList Ops;
4322
David Neto257c3892018-04-11 13:19:45 -04004323 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)])
4324 << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004325
4326 uint64_t NumElements = 0;
4327 if (Constant *Cst = dyn_cast<Constant>(I.getOperand(2))) {
4328 NumElements = cast<VectorType>(Cst->getType())->getNumElements();
4329
4330 if (Cst->isNullValue()) {
4331 for (unsigned i = 0; i < NumElements; i++) {
David Neto257c3892018-04-11 13:19:45 -04004332 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04004333 }
4334 } else if (const ConstantDataSequential *CDS =
4335 dyn_cast<ConstantDataSequential>(Cst)) {
4336 for (unsigned i = 0; i < CDS->getNumElements(); i++) {
4337 std::vector<uint32_t> LiteralNum;
David Neto257c3892018-04-11 13:19:45 -04004338 const auto value = CDS->getElementAsInteger(i);
4339 assert(value <= UINT32_MAX);
4340 Ops << MkNum(static_cast<uint32_t>(value));
David Neto22f144c2017-06-12 14:26:21 -04004341 }
4342 } else if (const ConstantVector *CV = dyn_cast<ConstantVector>(Cst)) {
4343 for (unsigned i = 0; i < CV->getNumOperands(); i++) {
4344 auto Op = CV->getOperand(i);
4345
4346 uint32_t literal = 0;
4347
4348 if (auto CI = dyn_cast<ConstantInt>(Op)) {
4349 literal = static_cast<uint32_t>(CI->getZExtValue());
4350 } else if (auto UI = dyn_cast<UndefValue>(Op)) {
4351 literal = 0xFFFFFFFFu;
4352 } else {
4353 Op->print(errs());
4354 llvm_unreachable("Unsupported element in ConstantVector!");
4355 }
4356
David Neto257c3892018-04-11 13:19:45 -04004357 Ops << MkNum(literal);
David Neto22f144c2017-06-12 14:26:21 -04004358 }
4359 } else {
4360 Cst->print(errs());
4361 llvm_unreachable("Unsupported constant mask in ShuffleVector!");
4362 }
4363 }
4364
David Neto87846742018-04-11 17:36:22 -04004365 auto *Inst = new SPIRVInstruction(spv::OpVectorShuffle, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004366 SPIRVInstList.push_back(Inst);
4367 break;
4368 }
4369 case Instruction::ICmp:
4370 case Instruction::FCmp: {
4371 CmpInst *CmpI = cast<CmpInst>(&I);
4372
David Netod4ca2e62017-07-06 18:47:35 -04004373 // Pointer equality is invalid.
alan-bakerb6b09dc2018-11-08 16:59:28 -05004374 Type *ArgTy = CmpI->getOperand(0)->getType();
David Netod4ca2e62017-07-06 18:47:35 -04004375 if (isa<PointerType>(ArgTy)) {
4376 CmpI->print(errs());
alan-baker21574d32020-01-29 16:00:31 -05004377 std::string name = I.getParent()->getParent()->getName().str();
David Netod4ca2e62017-07-06 18:47:35 -04004378 errs()
4379 << "\nPointer equality test is not supported by SPIR-V for Vulkan, "
4380 << "in function " << name << "\n";
4381 llvm_unreachable("Pointer equality check is invalid");
4382 break;
4383 }
4384
David Neto257c3892018-04-11 13:19:45 -04004385 // Ops[0] = Result Type ID
4386 // Ops[1] = Operand 1 ID
4387 // Ops[2] = Operand 2 ID
4388 SPIRVOperandList Ops;
David Neto22f144c2017-06-12 14:26:21 -04004389
David Neto257c3892018-04-11 13:19:45 -04004390 Ops << MkId(lookupType(CmpI->getType())) << MkId(VMap[CmpI->getOperand(0)])
4391 << MkId(VMap[CmpI->getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004392
4393 spv::Op Opcode = GetSPIRVCmpOpcode(CmpI);
David Neto87846742018-04-11 17:36:22 -04004394 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004395 SPIRVInstList.push_back(Inst);
4396 break;
4397 }
4398 case Instruction::Br: {
4399 // Branch instrucion is deferred because it needs label's ID. Record slot's
4400 // location on SPIRVInstructionList.
4401 DeferredInsts.push_back(
4402 std::make_tuple(&I, --SPIRVInstList.end(), 0 /* No id */));
4403 break;
4404 }
4405 case Instruction::Switch: {
4406 I.print(errs());
4407 llvm_unreachable("Unsupported instruction???");
4408 break;
4409 }
4410 case Instruction::IndirectBr: {
4411 I.print(errs());
4412 llvm_unreachable("Unsupported instruction???");
4413 break;
4414 }
4415 case Instruction::PHI: {
4416 // Branch instrucion is deferred because it needs label's ID. Record slot's
4417 // location on SPIRVInstructionList.
4418 DeferredInsts.push_back(
4419 std::make_tuple(&I, --SPIRVInstList.end(), nextID++));
4420 break;
4421 }
4422 case Instruction::Alloca: {
4423 //
4424 // Generate OpVariable.
4425 //
4426 // Ops[0] : Result Type ID
4427 // Ops[1] : Storage Class
4428 SPIRVOperandList Ops;
4429
David Neto257c3892018-04-11 13:19:45 -04004430 Ops << MkId(lookupType(I.getType())) << MkNum(spv::StorageClassFunction);
David Neto22f144c2017-06-12 14:26:21 -04004431
David Neto87846742018-04-11 17:36:22 -04004432 auto *Inst = new SPIRVInstruction(spv::OpVariable, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004433 SPIRVInstList.push_back(Inst);
4434 break;
4435 }
4436 case Instruction::Load: {
4437 LoadInst *LD = cast<LoadInst>(&I);
4438 //
4439 // Generate OpLoad.
4440 //
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04004441
alan-baker5b86ed72019-02-15 08:26:50 -05004442 if (LD->getType()->isPointerTy()) {
4443 // Loading a pointer requires variable pointers.
4444 setVariablePointersCapabilities(LD->getType()->getPointerAddressSpace());
4445 }
David Neto22f144c2017-06-12 14:26:21 -04004446
David Neto0a2f98d2017-09-15 19:38:40 -04004447 uint32_t ResTyID = lookupType(LD->getType());
David Netoa60b00b2017-09-15 16:34:09 -04004448 uint32_t PointerID = VMap[LD->getPointerOperand()];
4449
4450 // This is a hack to work around what looks like a driver bug.
4451 // When we're loading from the special variable holding the WorkgroupSize
David Neto0a2f98d2017-09-15 19:38:40 -04004452 // builtin value, use an OpBitWiseAnd of the value's ID rather than
4453 // generating a load.
David Neto66cfe642018-03-24 06:13:56 -07004454 // TODO(dneto): Remove this awful hack once drivers are fixed.
David Netoa60b00b2017-09-15 16:34:09 -04004455 if (PointerID == WorkgroupSizeVarID) {
David Neto0a2f98d2017-09-15 19:38:40 -04004456 // Generate a bitwise-and of the original value with itself.
4457 // We should have been able to get away with just an OpCopyObject,
4458 // but we need something more complex to get past certain driver bugs.
4459 // This is ridiculous, but necessary.
4460 // TODO(dneto): Revisit this once drivers fix their bugs.
4461
4462 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04004463 Ops << MkId(ResTyID) << MkId(WorkgroupSizeValueID)
4464 << MkId(WorkgroupSizeValueID);
David Neto0a2f98d2017-09-15 19:38:40 -04004465
David Neto87846742018-04-11 17:36:22 -04004466 auto *Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Neto0a2f98d2017-09-15 19:38:40 -04004467 SPIRVInstList.push_back(Inst);
David Netoa60b00b2017-09-15 16:34:09 -04004468 break;
4469 }
4470
4471 // This is the normal path. Generate a load.
4472
David Neto22f144c2017-06-12 14:26:21 -04004473 // Ops[0] = Result Type ID
4474 // Ops[1] = Pointer ID
4475 // Ops[2] ... Ops[n] = Optional Memory Access
4476 //
4477 // TODO: Do we need to implement Optional Memory Access???
David Neto0a2f98d2017-09-15 19:38:40 -04004478
David Neto22f144c2017-06-12 14:26:21 -04004479 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04004480 Ops << MkId(ResTyID) << MkId(PointerID);
David Neto22f144c2017-06-12 14:26:21 -04004481
David Neto87846742018-04-11 17:36:22 -04004482 auto *Inst = new SPIRVInstruction(spv::OpLoad, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004483 SPIRVInstList.push_back(Inst);
4484 break;
4485 }
4486 case Instruction::Store: {
4487 StoreInst *ST = cast<StoreInst>(&I);
4488 //
4489 // Generate OpStore.
4490 //
4491
alan-baker5b86ed72019-02-15 08:26:50 -05004492 if (ST->getValueOperand()->getType()->isPointerTy()) {
4493 // Storing a pointer requires variable pointers.
4494 setVariablePointersCapabilities(
4495 ST->getValueOperand()->getType()->getPointerAddressSpace());
4496 }
4497
David Neto22f144c2017-06-12 14:26:21 -04004498 // Ops[0] = Pointer ID
4499 // Ops[1] = Object ID
4500 // Ops[2] ... Ops[n] = Optional Memory Access (later???)
4501 //
4502 // TODO: Do we need to implement Optional Memory Access???
David Neto257c3892018-04-11 13:19:45 -04004503 SPIRVOperandList Ops;
4504 Ops << MkId(VMap[ST->getPointerOperand()])
4505 << MkId(VMap[ST->getValueOperand()]);
David Neto22f144c2017-06-12 14:26:21 -04004506
David Neto87846742018-04-11 17:36:22 -04004507 auto *Inst = new SPIRVInstruction(spv::OpStore, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004508 SPIRVInstList.push_back(Inst);
4509 break;
4510 }
4511 case Instruction::AtomicCmpXchg: {
4512 I.print(errs());
4513 llvm_unreachable("Unsupported instruction???");
4514 break;
4515 }
4516 case Instruction::AtomicRMW: {
Neil Henning39672102017-09-29 14:33:13 +01004517 AtomicRMWInst *AtomicRMW = dyn_cast<AtomicRMWInst>(&I);
4518
4519 spv::Op opcode;
4520
4521 switch (AtomicRMW->getOperation()) {
4522 default:
4523 I.print(errs());
4524 llvm_unreachable("Unsupported instruction???");
4525 case llvm::AtomicRMWInst::Add:
4526 opcode = spv::OpAtomicIAdd;
4527 break;
4528 case llvm::AtomicRMWInst::Sub:
4529 opcode = spv::OpAtomicISub;
4530 break;
4531 case llvm::AtomicRMWInst::Xchg:
4532 opcode = spv::OpAtomicExchange;
4533 break;
4534 case llvm::AtomicRMWInst::Min:
4535 opcode = spv::OpAtomicSMin;
4536 break;
4537 case llvm::AtomicRMWInst::Max:
4538 opcode = spv::OpAtomicSMax;
4539 break;
4540 case llvm::AtomicRMWInst::UMin:
4541 opcode = spv::OpAtomicUMin;
4542 break;
4543 case llvm::AtomicRMWInst::UMax:
4544 opcode = spv::OpAtomicUMax;
4545 break;
4546 case llvm::AtomicRMWInst::And:
4547 opcode = spv::OpAtomicAnd;
4548 break;
4549 case llvm::AtomicRMWInst::Or:
4550 opcode = spv::OpAtomicOr;
4551 break;
4552 case llvm::AtomicRMWInst::Xor:
4553 opcode = spv::OpAtomicXor;
4554 break;
4555 }
4556
4557 //
4558 // Generate OpAtomic*.
4559 //
4560 SPIRVOperandList Ops;
4561
David Neto257c3892018-04-11 13:19:45 -04004562 Ops << MkId(lookupType(I.getType()))
4563 << MkId(VMap[AtomicRMW->getPointerOperand()]);
Neil Henning39672102017-09-29 14:33:13 +01004564
4565 auto IntTy = Type::getInt32Ty(I.getContext());
Neil Henning39672102017-09-29 14:33:13 +01004566 const auto ConstantScopeDevice = ConstantInt::get(IntTy, spv::ScopeDevice);
David Neto257c3892018-04-11 13:19:45 -04004567 Ops << MkId(VMap[ConstantScopeDevice]);
Neil Henning39672102017-09-29 14:33:13 +01004568
4569 const auto ConstantMemorySemantics = ConstantInt::get(
4570 IntTy, spv::MemorySemanticsUniformMemoryMask |
4571 spv::MemorySemanticsSequentiallyConsistentMask);
David Neto257c3892018-04-11 13:19:45 -04004572 Ops << MkId(VMap[ConstantMemorySemantics]);
Neil Henning39672102017-09-29 14:33:13 +01004573
David Neto257c3892018-04-11 13:19:45 -04004574 Ops << MkId(VMap[AtomicRMW->getValOperand()]);
Neil Henning39672102017-09-29 14:33:13 +01004575
4576 VMap[&I] = nextID;
4577
David Neto87846742018-04-11 17:36:22 -04004578 auto *Inst = new SPIRVInstruction(opcode, nextID++, Ops);
Neil Henning39672102017-09-29 14:33:13 +01004579 SPIRVInstList.push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04004580 break;
4581 }
4582 case Instruction::Fence: {
4583 I.print(errs());
4584 llvm_unreachable("Unsupported instruction???");
4585 break;
4586 }
4587 case Instruction::Call: {
4588 CallInst *Call = dyn_cast<CallInst>(&I);
4589 Function *Callee = Call->getCalledFunction();
4590
Alan Baker202c8c72018-08-13 13:47:44 -04004591 if (Callee->getName().startswith(clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04004592 if (ResourceVarDeferredLoadCalls.count(Call) && Call->hasNUsesOrMore(1)) {
4593 // Generate an OpLoad
4594 SPIRVOperandList Ops;
4595 const auto load_id = nextID++;
David Neto22f144c2017-06-12 14:26:21 -04004596
David Neto862b7d82018-06-14 18:48:37 -04004597 Ops << MkId(lookupType(Call->getType()->getPointerElementType()))
4598 << MkId(ResourceVarDeferredLoadCalls[Call]);
4599
4600 auto *Inst = new SPIRVInstruction(spv::OpLoad, load_id, Ops);
4601 SPIRVInstList.push_back(Inst);
4602 VMap[Call] = load_id;
4603 break;
4604
4605 } else {
4606 // This maps to an OpVariable we've already generated.
4607 // No code is generated for the call.
4608 }
4609 break;
alan-bakerb6b09dc2018-11-08 16:59:28 -05004610 } else if (Callee->getName().startswith(
4611 clspv::WorkgroupAccessorFunction())) {
Alan Baker202c8c72018-08-13 13:47:44 -04004612 // Don't codegen an instruction here, but instead map this call directly
4613 // to the workgroup variable id.
alan-bakerb6b09dc2018-11-08 16:59:28 -05004614 int spec_id = static_cast<int>(
4615 cast<ConstantInt>(Call->getOperand(0))->getSExtValue());
Alan Baker202c8c72018-08-13 13:47:44 -04004616 const auto &info = LocalSpecIdInfoMap[spec_id];
4617 VMap[Call] = info.variable_id;
4618 break;
David Neto862b7d82018-06-14 18:48:37 -04004619 }
4620
4621 // Sampler initializers become a load of the corresponding sampler.
4622
Kévin Petitdf71de32019-04-09 14:09:50 +01004623 if (Callee->getName().equals(clspv::LiteralSamplerFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04004624 // Map this to a load from the variable.
alan-baker09cb9802019-12-10 13:16:27 -05004625 const auto third_param = static_cast<unsigned>(
4626 dyn_cast<ConstantInt>(Call->getArgOperand(2))->getZExtValue());
4627 auto sampler_value = third_param;
4628 if (clspv::Option::UseSamplerMap()) {
4629 sampler_value = getSamplerMap()[third_param].first;
4630 }
David Neto862b7d82018-06-14 18:48:37 -04004631
4632 // Generate an OpLoad
David Neto22f144c2017-06-12 14:26:21 -04004633 SPIRVOperandList Ops;
David Neto862b7d82018-06-14 18:48:37 -04004634 const auto load_id = nextID++;
David Neto22f144c2017-06-12 14:26:21 -04004635
David Neto257c3892018-04-11 13:19:45 -04004636 Ops << MkId(lookupType(SamplerTy->getPointerElementType()))
alan-baker09cb9802019-12-10 13:16:27 -05004637 << MkId(SamplerLiteralToIDMap[sampler_value]);
David Neto22f144c2017-06-12 14:26:21 -04004638
David Neto862b7d82018-06-14 18:48:37 -04004639 auto *Inst = new SPIRVInstruction(spv::OpLoad, load_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004640 SPIRVInstList.push_back(Inst);
David Neto862b7d82018-06-14 18:48:37 -04004641 VMap[Call] = load_id;
David Neto22f144c2017-06-12 14:26:21 -04004642 break;
4643 }
4644
Kévin Petit349c9502019-03-28 17:24:14 +00004645 // Handle SPIR-V intrinsics
Kévin Petit9b340262019-06-19 18:31:11 +01004646 spv::Op opcode = StringSwitch<spv::Op>(Callee->getName())
4647 .Case("spirv.atomic_xor", spv::OpAtomicXor)
4648 .Default(spv::OpNop);
David Neto22f144c2017-06-12 14:26:21 -04004649
Kévin Petit617a76d2019-04-04 13:54:16 +01004650 // If the switch above didn't have an entry maybe the intrinsic
4651 // is using the name mangling logic.
4652 bool usesMangler = false;
4653 if (opcode == spv::OpNop) {
4654 if (Callee->getName().startswith(clspv::SPIRVOpIntrinsicFunction())) {
4655 auto OpCst = cast<ConstantInt>(Call->getOperand(0));
4656 opcode = static_cast<spv::Op>(OpCst->getZExtValue());
4657 usesMangler = true;
4658 }
4659 }
4660
Kévin Petit349c9502019-03-28 17:24:14 +00004661 if (opcode != spv::OpNop) {
4662
David Neto22f144c2017-06-12 14:26:21 -04004663 SPIRVOperandList Ops;
4664
Kévin Petit349c9502019-03-28 17:24:14 +00004665 if (!I.getType()->isVoidTy()) {
4666 Ops << MkId(lookupType(I.getType()));
4667 }
David Neto22f144c2017-06-12 14:26:21 -04004668
Kévin Petit617a76d2019-04-04 13:54:16 +01004669 unsigned firstOperand = usesMangler ? 1 : 0;
4670 for (unsigned i = firstOperand; i < Call->getNumArgOperands(); i++) {
David Neto257c3892018-04-11 13:19:45 -04004671 Ops << MkId(VMap[Call->getArgOperand(i)]);
David Neto22f144c2017-06-12 14:26:21 -04004672 }
4673
Kévin Petit349c9502019-03-28 17:24:14 +00004674 if (!I.getType()->isVoidTy()) {
4675 VMap[&I] = nextID;
Kévin Petit8a560882019-03-21 15:24:34 +00004676 }
4677
Kévin Petit349c9502019-03-28 17:24:14 +00004678 SPIRVInstruction *Inst;
4679 if (!I.getType()->isVoidTy()) {
4680 Inst = new SPIRVInstruction(opcode, nextID++, Ops);
4681 } else {
4682 Inst = new SPIRVInstruction(opcode, Ops);
4683 }
Kévin Petit8a560882019-03-21 15:24:34 +00004684 SPIRVInstList.push_back(Inst);
4685 break;
4686 }
4687
David Neto22f144c2017-06-12 14:26:21 -04004688 // spirv.copy_memory.* intrinsics become OpMemoryMemory's.
4689 if (Callee->getName().startswith("spirv.copy_memory")) {
4690 //
4691 // Generate OpCopyMemory.
4692 //
4693
4694 // Ops[0] = Dst ID
4695 // Ops[1] = Src ID
4696 // Ops[2] = Memory Access
4697 // Ops[3] = Alignment
4698
4699 auto IsVolatile =
4700 dyn_cast<ConstantInt>(Call->getArgOperand(3))->getZExtValue() != 0;
4701
4702 auto VolatileMemoryAccess = (IsVolatile) ? spv::MemoryAccessVolatileMask
4703 : spv::MemoryAccessMaskNone;
4704
4705 auto MemoryAccess = VolatileMemoryAccess | spv::MemoryAccessAlignedMask;
4706
4707 auto Alignment =
4708 dyn_cast<ConstantInt>(Call->getArgOperand(2))->getZExtValue();
4709
David Neto257c3892018-04-11 13:19:45 -04004710 SPIRVOperandList Ops;
4711 Ops << MkId(VMap[Call->getArgOperand(0)])
4712 << MkId(VMap[Call->getArgOperand(1)]) << MkNum(MemoryAccess)
4713 << MkNum(static_cast<uint32_t>(Alignment));
David Neto22f144c2017-06-12 14:26:21 -04004714
David Neto87846742018-04-11 17:36:22 -04004715 auto *Inst = new SPIRVInstruction(spv::OpCopyMemory, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004716
4717 SPIRVInstList.push_back(Inst);
4718
4719 break;
4720 }
4721
alan-baker75090e42020-02-20 11:21:04 -05004722 // read_image (with a sampler) is converted to OpSampledImage and
4723 // OpImageSampleExplicitLod. Additionally, OpTypeSampledImage is
4724 // generated.
alan-bakerf67468c2019-11-25 15:51:49 -05004725 if (clspv::IsSampledImageRead(Callee)) {
David Neto22f144c2017-06-12 14:26:21 -04004726 //
4727 // Generate OpSampledImage.
4728 //
4729 // Ops[0] = Result Type ID
4730 // Ops[1] = Image ID
4731 // Ops[2] = Sampler ID
4732 //
4733 SPIRVOperandList Ops;
4734
4735 Value *Image = Call->getArgOperand(0);
4736 Value *Sampler = Call->getArgOperand(1);
4737 Value *Coordinate = Call->getArgOperand(2);
4738
4739 TypeMapType &OpImageTypeMap = getImageTypeMap();
4740 Type *ImageTy = Image->getType()->getPointerElementType();
4741 uint32_t ImageTyID = OpImageTypeMap[ImageTy];
David Neto22f144c2017-06-12 14:26:21 -04004742 uint32_t ImageID = VMap[Image];
David Neto22f144c2017-06-12 14:26:21 -04004743 uint32_t SamplerID = VMap[Sampler];
David Neto257c3892018-04-11 13:19:45 -04004744
4745 Ops << MkId(ImageTyID) << MkId(ImageID) << MkId(SamplerID);
David Neto22f144c2017-06-12 14:26:21 -04004746
4747 uint32_t SampledImageID = nextID;
4748
David Neto87846742018-04-11 17:36:22 -04004749 auto *Inst = new SPIRVInstruction(spv::OpSampledImage, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004750 SPIRVInstList.push_back(Inst);
4751
4752 //
4753 // Generate OpImageSampleExplicitLod.
4754 //
4755 // Ops[0] = Result Type ID
4756 // Ops[1] = Sampled Image ID
4757 // Ops[2] = Coordinate ID
4758 // Ops[3] = Image Operands Type ID
4759 // Ops[4] ... Ops[n] = Operands ID
4760 //
4761 Ops.clear();
4762
alan-bakerf67468c2019-11-25 15:51:49 -05004763 const bool is_int_image = IsIntImageType(Image->getType());
4764 uint32_t result_type = 0;
4765 if (is_int_image) {
4766 result_type = v4int32ID;
4767 } else {
4768 result_type = lookupType(Call->getType());
4769 }
4770
4771 Ops << MkId(result_type) << MkId(SampledImageID) << MkId(VMap[Coordinate])
4772 << MkNum(spv::ImageOperandsLodMask);
David Neto22f144c2017-06-12 14:26:21 -04004773
4774 Constant *CstFP0 = ConstantFP::get(Context, APFloat(0.0f));
David Neto257c3892018-04-11 13:19:45 -04004775 Ops << MkId(VMap[CstFP0]);
David Neto22f144c2017-06-12 14:26:21 -04004776
alan-bakerf67468c2019-11-25 15:51:49 -05004777 uint32_t final_id = nextID++;
4778 VMap[&I] = final_id;
David Neto22f144c2017-06-12 14:26:21 -04004779
alan-bakerf67468c2019-11-25 15:51:49 -05004780 uint32_t image_id = final_id;
4781 if (is_int_image) {
4782 // Int image requires a bitcast from v4int to v4uint.
4783 image_id = nextID++;
4784 }
4785
4786 Inst = new SPIRVInstruction(spv::OpImageSampleExplicitLod, image_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004787 SPIRVInstList.push_back(Inst);
alan-bakerf67468c2019-11-25 15:51:49 -05004788
4789 if (is_int_image) {
4790 // Generate the bitcast.
4791 Ops.clear();
4792 Ops << MkId(lookupType(Call->getType())) << MkId(image_id);
4793 Inst = new SPIRVInstruction(spv::OpBitcast, final_id, Ops);
4794 SPIRVInstList.push_back(Inst);
4795 }
David Neto22f144c2017-06-12 14:26:21 -04004796 break;
4797 }
4798
alan-baker75090e42020-02-20 11:21:04 -05004799 // read_image (without a sampler) is mapped to OpImageFetch.
4800 if (clspv::IsUnsampledImageRead(Callee)) {
4801 Value *Image = Call->getArgOperand(0);
4802 Value *Coordinate = Call->getArgOperand(1);
4803
4804 //
4805 // Generate OpImageFetch
4806 //
4807 // Ops[0] = Result Type ID
4808 // Ops[1] = Image ID
4809 // Ops[2] = Coordinate ID
4810 // Ops[3] = Lod
4811 // Ops[4] = 0
4812 //
4813 SPIRVOperandList Ops;
4814
4815 const bool is_int_image = IsIntImageType(Image->getType());
4816 uint32_t result_type = 0;
4817 if (is_int_image) {
4818 result_type = v4int32ID;
4819 } else {
4820 result_type = lookupType(Call->getType());
4821 }
4822
4823 Ops << MkId(result_type) << MkId(VMap[Image]) << MkId(VMap[Coordinate])
4824 << MkNum(spv::ImageOperandsLodMask);
4825
4826 Constant *CstInt0 = ConstantInt::get(Context, APInt(32, 0));
4827 Ops << MkId(VMap[CstInt0]);
4828
4829 uint32_t final_id = nextID++;
4830 VMap[&I] = final_id;
4831
4832 uint32_t image_id = final_id;
4833 if (is_int_image) {
4834 // Int image requires a bitcast from v4int to v4uint.
4835 image_id = nextID++;
4836 }
4837
4838 auto *Inst = new SPIRVInstruction(spv::OpImageFetch, image_id, Ops);
4839 SPIRVInstList.push_back(Inst);
4840
4841 if (is_int_image) {
4842 // Generate the bitcast.
4843 Ops.clear();
4844 Ops << MkId(lookupType(Call->getType())) << MkId(image_id);
4845 Inst = new SPIRVInstruction(spv::OpBitcast, final_id, Ops);
4846 SPIRVInstList.push_back(Inst);
4847 }
4848 break;
4849 }
4850
alan-bakerf67468c2019-11-25 15:51:49 -05004851 // write_image is mapped to OpImageWrite.
4852 if (clspv::IsImageWrite(Callee)) {
David Neto22f144c2017-06-12 14:26:21 -04004853 //
4854 // Generate OpImageWrite.
4855 //
4856 // Ops[0] = Image ID
4857 // Ops[1] = Coordinate ID
4858 // Ops[2] = Texel ID
4859 // Ops[3] = (Optional) Image Operands Type (Literal Number)
4860 // Ops[4] ... Ops[n] = (Optional) Operands ID
4861 //
4862 SPIRVOperandList Ops;
4863
4864 Value *Image = Call->getArgOperand(0);
4865 Value *Coordinate = Call->getArgOperand(1);
4866 Value *Texel = Call->getArgOperand(2);
4867
4868 uint32_t ImageID = VMap[Image];
David Neto22f144c2017-06-12 14:26:21 -04004869 uint32_t CoordinateID = VMap[Coordinate];
David Neto22f144c2017-06-12 14:26:21 -04004870 uint32_t TexelID = VMap[Texel];
alan-bakerf67468c2019-11-25 15:51:49 -05004871
4872 const bool is_int_image = IsIntImageType(Image->getType());
4873 if (is_int_image) {
4874 // Generate a bitcast to v4int and use it as the texel value.
4875 uint32_t castID = nextID++;
4876 Ops << MkId(v4int32ID) << MkId(TexelID);
4877 auto cast = new SPIRVInstruction(spv::OpBitcast, castID, Ops);
4878 SPIRVInstList.push_back(cast);
4879 Ops.clear();
4880 TexelID = castID;
4881 }
David Neto257c3892018-04-11 13:19:45 -04004882 Ops << MkId(ImageID) << MkId(CoordinateID) << MkId(TexelID);
David Neto22f144c2017-06-12 14:26:21 -04004883
David Neto87846742018-04-11 17:36:22 -04004884 auto *Inst = new SPIRVInstruction(spv::OpImageWrite, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004885 SPIRVInstList.push_back(Inst);
4886 break;
4887 }
4888
alan-bakerce179f12019-12-06 19:02:22 -05004889 // get_image_* is mapped to OpImageQuerySize or OpImageQuerySizeLod
4890 if (clspv::IsImageQuery(Callee)) {
David Neto5c22a252018-03-15 16:07:41 -04004891 //
alan-bakerce179f12019-12-06 19:02:22 -05004892 // Generate OpImageQuerySize[Lod]
David Neto5c22a252018-03-15 16:07:41 -04004893 //
4894 // Ops[0] = Image ID
4895 //
alan-bakerce179f12019-12-06 19:02:22 -05004896 // Result type has components equal to the dimensionality of the image,
4897 // plus 1 if the image is arrayed.
4898 //
alan-bakerf906d2b2019-12-10 11:26:23 -05004899 // %sizes = OpImageQuerySize[Lod] %uint[2|3|4] %im [%uint_0]
David Neto5c22a252018-03-15 16:07:41 -04004900 SPIRVOperandList Ops;
4901
4902 // Implement:
alan-bakerce179f12019-12-06 19:02:22 -05004903 // %sizes = OpImageQuerySize[Lod] %uint[2|3|4] %im [%uint_0]
4904 uint32_t SizesTypeID = 0;
4905
David Neto5c22a252018-03-15 16:07:41 -04004906 Value *Image = Call->getArgOperand(0);
alan-bakerce179f12019-12-06 19:02:22 -05004907 const uint32_t dim = ImageDimensionality(Image->getType());
alan-bakerf906d2b2019-12-10 11:26:23 -05004908 // TODO(alan-baker): fix component calculation when arrayed images are
4909 // supported.
alan-bakerce179f12019-12-06 19:02:22 -05004910 const uint32_t components = dim;
4911 if (components == 1) {
alan-bakerce179f12019-12-06 19:02:22 -05004912 SizesTypeID = TypeMap[Type::getInt32Ty(Context)];
4913 } else {
4914 SizesTypeID = TypeMap[VectorType::get(Type::getInt32Ty(Context), dim)];
4915 }
David Neto5c22a252018-03-15 16:07:41 -04004916 uint32_t ImageID = VMap[Image];
David Neto257c3892018-04-11 13:19:45 -04004917 Ops << MkId(SizesTypeID) << MkId(ImageID);
alan-bakerce179f12019-12-06 19:02:22 -05004918 spv::Op query_opcode = spv::OpImageQuerySize;
4919 if (clspv::IsSampledImageType(Image->getType())) {
4920 query_opcode = spv::OpImageQuerySizeLod;
4921 // Need explicit 0 for Lod operand.
4922 Constant *CstInt0 = ConstantInt::get(Context, APInt(32, 0));
4923 Ops << MkId(VMap[CstInt0]);
4924 }
David Neto5c22a252018-03-15 16:07:41 -04004925
4926 uint32_t SizesID = nextID++;
alan-bakerce179f12019-12-06 19:02:22 -05004927 auto *QueryInst = new SPIRVInstruction(query_opcode, SizesID, Ops);
David Neto5c22a252018-03-15 16:07:41 -04004928 SPIRVInstList.push_back(QueryInst);
4929
alan-bakerce179f12019-12-06 19:02:22 -05004930 // May require an extra instruction to create the appropriate result of
4931 // the builtin function.
4932 if (clspv::IsGetImageDim(Callee)) {
4933 if (dim == 3) {
4934 // get_image_dim returns an int4 for 3D images.
4935 //
4936 // Reset value map entry since we generated an intermediate
4937 // instruction.
4938 VMap[&I] = nextID;
David Neto5c22a252018-03-15 16:07:41 -04004939
alan-bakerce179f12019-12-06 19:02:22 -05004940 // Implement:
4941 // %result = OpCompositeConstruct %uint4 %sizes %uint_0
4942 Ops.clear();
4943 Ops << MkId(lookupType(VectorType::get(Type::getInt32Ty(Context), 4)))
4944 << MkId(SizesID);
David Neto5c22a252018-03-15 16:07:41 -04004945
alan-bakerce179f12019-12-06 19:02:22 -05004946 Constant *CstInt0 = ConstantInt::get(Context, APInt(32, 0));
4947 Ops << MkId(VMap[CstInt0]);
David Neto5c22a252018-03-15 16:07:41 -04004948
alan-bakerce179f12019-12-06 19:02:22 -05004949 auto *Inst =
4950 new SPIRVInstruction(spv::OpCompositeConstruct, nextID++, Ops);
4951 SPIRVInstList.push_back(Inst);
4952 } else if (dim != components) {
4953 // get_image_dim return an int2 regardless of the arrayedness of the
4954 // image. If the image is arrayed an element must be dropped from the
4955 // query result.
4956 //
4957 // Reset value map entry since we generated an intermediate
4958 // instruction.
4959 VMap[&I] = nextID;
4960
4961 // Implement:
4962 // %result = OpVectorShuffle %uint2 %sizes %sizes 0 1
4963 Ops.clear();
4964 Ops << MkId(lookupType(VectorType::get(Type::getInt32Ty(Context), 2)))
4965 << MkId(SizesID) << MkId(SizesID) << MkNum(0) << MkNum(1);
4966
4967 auto *Inst =
4968 new SPIRVInstruction(spv::OpVectorShuffle, nextID++, Ops);
4969 SPIRVInstList.push_back(Inst);
4970 }
4971 } else if (components > 1) {
4972 // Reset value map entry since we generated an intermediate instruction.
4973 VMap[&I] = nextID;
4974
4975 // Implement:
4976 // %result = OpCompositeExtract %uint %sizes <component number>
4977 Ops.clear();
4978 Ops << MkId(TypeMap[I.getType()]) << MkId(SizesID);
4979
4980 uint32_t component = 0;
4981 if (IsGetImageHeight(Callee))
4982 component = 1;
4983 else if (IsGetImageDepth(Callee))
4984 component = 2;
4985 Ops << MkNum(component);
4986
4987 auto *Inst =
4988 new SPIRVInstruction(spv::OpCompositeExtract, nextID++, Ops);
4989 SPIRVInstList.push_back(Inst);
4990 }
David Neto5c22a252018-03-15 16:07:41 -04004991 break;
4992 }
4993
David Neto22f144c2017-06-12 14:26:21 -04004994 // Call instrucion is deferred because it needs function's ID. Record
4995 // slot's location on SPIRVInstructionList.
4996 DeferredInsts.push_back(
4997 std::make_tuple(&I, --SPIRVInstList.end(), nextID++));
4998
David Neto3fbb4072017-10-16 11:28:14 -04004999 // Check whether the implementation of this call uses an extended
5000 // instruction plus one more value-producing instruction. If so, then
5001 // reserve the id for the extra value-producing slot.
5002 glsl::ExtInst EInst = getIndirectExtInstEnum(Callee->getName());
5003 if (EInst != kGlslExtInstBad) {
5004 // Reserve a spot for the extra value.
David Neto4d02a532017-09-17 12:57:44 -04005005 // Increase nextID.
David Neto22f144c2017-06-12 14:26:21 -04005006 VMap[&I] = nextID;
5007 nextID++;
5008 }
5009 break;
5010 }
5011 case Instruction::Ret: {
5012 unsigned NumOps = I.getNumOperands();
5013 if (NumOps == 0) {
5014 //
5015 // Generate OpReturn.
5016 //
David Netoef5ba2b2019-12-20 08:35:54 -05005017 SPIRVInstList.push_back(new SPIRVInstruction(spv::OpReturn));
David Neto22f144c2017-06-12 14:26:21 -04005018 } else {
5019 //
5020 // Generate OpReturnValue.
5021 //
5022
5023 // Ops[0] = Return Value ID
5024 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04005025
5026 Ops << MkId(VMap[I.getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04005027
David Neto87846742018-04-11 17:36:22 -04005028 auto *Inst = new SPIRVInstruction(spv::OpReturnValue, Ops);
David Neto22f144c2017-06-12 14:26:21 -04005029 SPIRVInstList.push_back(Inst);
5030 break;
5031 }
5032 break;
5033 }
5034 }
5035}
5036
5037void SPIRVProducerPass::GenerateFuncEpilogue() {
5038 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
5039
5040 //
5041 // Generate OpFunctionEnd
5042 //
5043
David Netoef5ba2b2019-12-20 08:35:54 -05005044 auto *Inst = new SPIRVInstruction(spv::OpFunctionEnd);
David Neto22f144c2017-06-12 14:26:21 -04005045 SPIRVInstList.push_back(Inst);
5046}
5047
5048bool SPIRVProducerPass::is4xi8vec(Type *Ty) const {
alan-bakerb39c8262019-03-08 14:03:37 -05005049 // Don't specialize <4 x i8> if i8 is generally supported.
5050 if (clspv::Option::Int8Support())
5051 return false;
5052
David Neto22f144c2017-06-12 14:26:21 -04005053 LLVMContext &Context = Ty->getContext();
5054 if (Ty->isVectorTy()) {
5055 if (Ty->getVectorElementType() == Type::getInt8Ty(Context) &&
5056 Ty->getVectorNumElements() == 4) {
5057 return true;
5058 }
5059 }
5060
5061 return false;
5062}
5063
5064void SPIRVProducerPass::HandleDeferredInstruction() {
5065 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
5066 ValueMapType &VMap = getValueMap();
5067 DeferredInstVecType &DeferredInsts = getDeferredInstVec();
5068
5069 for (auto DeferredInst = DeferredInsts.rbegin();
5070 DeferredInst != DeferredInsts.rend(); ++DeferredInst) {
5071 Value *Inst = std::get<0>(*DeferredInst);
5072 SPIRVInstructionList::iterator InsertPoint = ++std::get<1>(*DeferredInst);
5073 if (InsertPoint != SPIRVInstList.end()) {
5074 while ((*InsertPoint)->getOpcode() == spv::OpPhi) {
5075 ++InsertPoint;
5076 }
5077 }
5078
5079 if (BranchInst *Br = dyn_cast<BranchInst>(Inst)) {
alan-baker06cad652019-12-03 17:56:47 -05005080 // Check whether this branch needs to be preceeded by merge instruction.
David Neto22f144c2017-06-12 14:26:21 -04005081 BasicBlock *BrBB = Br->getParent();
alan-baker06cad652019-12-03 17:56:47 -05005082 if (ContinueBlocks.count(BrBB)) {
David Neto22f144c2017-06-12 14:26:21 -04005083 //
5084 // Generate OpLoopMerge.
5085 //
5086 // Ops[0] = Merge Block ID
5087 // Ops[1] = Continue Target ID
5088 // Ops[2] = Selection Control
5089 SPIRVOperandList Ops;
5090
alan-baker06cad652019-12-03 17:56:47 -05005091 auto MergeBB = MergeBlocks[BrBB];
5092 auto ContinueBB = ContinueBlocks[BrBB];
David Neto22f144c2017-06-12 14:26:21 -04005093 uint32_t MergeBBID = VMap[MergeBB];
David Neto22f144c2017-06-12 14:26:21 -04005094 uint32_t ContinueBBID = VMap[ContinueBB];
David Neto257c3892018-04-11 13:19:45 -04005095 Ops << MkId(MergeBBID) << MkId(ContinueBBID)
alan-baker06cad652019-12-03 17:56:47 -05005096 << MkNum(spv::LoopControlMaskNone);
David Neto22f144c2017-06-12 14:26:21 -04005097
David Neto87846742018-04-11 17:36:22 -04005098 auto *MergeInst = new SPIRVInstruction(spv::OpLoopMerge, Ops);
David Neto22f144c2017-06-12 14:26:21 -04005099 SPIRVInstList.insert(InsertPoint, MergeInst);
alan-baker06cad652019-12-03 17:56:47 -05005100 } else if (MergeBlocks.count(BrBB)) {
5101 //
5102 // Generate OpSelectionMerge.
5103 //
5104 // Ops[0] = Merge Block ID
5105 // Ops[1] = Selection Control
5106 SPIRVOperandList Ops;
David Neto22f144c2017-06-12 14:26:21 -04005107
alan-baker06cad652019-12-03 17:56:47 -05005108 auto MergeBB = MergeBlocks[BrBB];
5109 uint32_t MergeBBID = VMap[MergeBB];
5110 Ops << MkId(MergeBBID) << MkNum(spv::SelectionControlMaskNone);
David Neto22f144c2017-06-12 14:26:21 -04005111
alan-baker06cad652019-12-03 17:56:47 -05005112 auto *MergeInst = new SPIRVInstruction(spv::OpSelectionMerge, Ops);
5113 SPIRVInstList.insert(InsertPoint, MergeInst);
David Neto22f144c2017-06-12 14:26:21 -04005114 }
5115
5116 if (Br->isConditional()) {
5117 //
5118 // Generate OpBranchConditional.
5119 //
5120 // Ops[0] = Condition ID
5121 // Ops[1] = True Label ID
5122 // Ops[2] = False Label ID
5123 // Ops[3] ... Ops[n] = Branch weights (Literal Number)
5124 SPIRVOperandList Ops;
5125
5126 uint32_t CondID = VMap[Br->getCondition()];
David Neto22f144c2017-06-12 14:26:21 -04005127 uint32_t TrueBBID = VMap[Br->getSuccessor(0)];
David Neto22f144c2017-06-12 14:26:21 -04005128 uint32_t FalseBBID = VMap[Br->getSuccessor(1)];
David Neto257c3892018-04-11 13:19:45 -04005129
5130 Ops << MkId(CondID) << MkId(TrueBBID) << MkId(FalseBBID);
David Neto22f144c2017-06-12 14:26:21 -04005131
David Neto87846742018-04-11 17:36:22 -04005132 auto *BrInst = new SPIRVInstruction(spv::OpBranchConditional, Ops);
David Neto22f144c2017-06-12 14:26:21 -04005133 SPIRVInstList.insert(InsertPoint, BrInst);
5134 } else {
5135 //
5136 // Generate OpBranch.
5137 //
5138 // Ops[0] = Target Label ID
5139 SPIRVOperandList Ops;
5140
5141 uint32_t TargetID = VMap[Br->getSuccessor(0)];
David Neto257c3892018-04-11 13:19:45 -04005142 Ops << MkId(TargetID);
David Neto22f144c2017-06-12 14:26:21 -04005143
David Neto87846742018-04-11 17:36:22 -04005144 SPIRVInstList.insert(InsertPoint,
5145 new SPIRVInstruction(spv::OpBranch, Ops));
David Neto22f144c2017-06-12 14:26:21 -04005146 }
5147 } else if (PHINode *PHI = dyn_cast<PHINode>(Inst)) {
alan-baker5b86ed72019-02-15 08:26:50 -05005148 if (PHI->getType()->isPointerTy()) {
5149 // OpPhi on pointers requires variable pointers.
5150 setVariablePointersCapabilities(
5151 PHI->getType()->getPointerAddressSpace());
5152 if (!hasVariablePointers() && !selectFromSameObject(PHI)) {
5153 setVariablePointers(true);
5154 }
5155 }
5156
David Neto22f144c2017-06-12 14:26:21 -04005157 //
5158 // Generate OpPhi.
5159 //
5160 // Ops[0] = Result Type ID
5161 // Ops[1] ... Ops[n] = (Variable ID, Parent ID) pairs
5162 SPIRVOperandList Ops;
5163
David Neto257c3892018-04-11 13:19:45 -04005164 Ops << MkId(lookupType(PHI->getType()));
David Neto22f144c2017-06-12 14:26:21 -04005165
David Neto22f144c2017-06-12 14:26:21 -04005166 for (unsigned i = 0; i < PHI->getNumIncomingValues(); i++) {
5167 uint32_t VarID = VMap[PHI->getIncomingValue(i)];
David Neto22f144c2017-06-12 14:26:21 -04005168 uint32_t ParentID = VMap[PHI->getIncomingBlock(i)];
David Neto257c3892018-04-11 13:19:45 -04005169 Ops << MkId(VarID) << MkId(ParentID);
David Neto22f144c2017-06-12 14:26:21 -04005170 }
5171
5172 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005173 InsertPoint,
5174 new SPIRVInstruction(spv::OpPhi, std::get<2>(*DeferredInst), Ops));
David Neto22f144c2017-06-12 14:26:21 -04005175 } else if (CallInst *Call = dyn_cast<CallInst>(Inst)) {
5176 Function *Callee = Call->getCalledFunction();
David Neto3fbb4072017-10-16 11:28:14 -04005177 auto callee_name = Callee->getName();
5178 glsl::ExtInst EInst = getDirectOrIndirectExtInstEnum(callee_name);
David Neto22f144c2017-06-12 14:26:21 -04005179
5180 if (EInst) {
5181 uint32_t &ExtInstImportID = getOpExtInstImportID();
5182
5183 //
5184 // Generate OpExtInst.
5185 //
5186
5187 // Ops[0] = Result Type ID
5188 // Ops[1] = Set ID (OpExtInstImport ID)
5189 // Ops[2] = Instruction Number (Literal Number)
5190 // Ops[3] ... Ops[n] = Operand 1, ... , Operand n
5191 SPIRVOperandList Ops;
5192
David Neto862b7d82018-06-14 18:48:37 -04005193 Ops << MkId(lookupType(Call->getType())) << MkId(ExtInstImportID)
5194 << MkNum(EInst);
David Neto22f144c2017-06-12 14:26:21 -04005195
David Neto22f144c2017-06-12 14:26:21 -04005196 FunctionType *CalleeFTy = cast<FunctionType>(Call->getFunctionType());
5197 for (unsigned i = 0; i < CalleeFTy->getNumParams(); i++) {
David Neto257c3892018-04-11 13:19:45 -04005198 Ops << MkId(VMap[Call->getOperand(i)]);
David Neto22f144c2017-06-12 14:26:21 -04005199 }
5200
David Neto87846742018-04-11 17:36:22 -04005201 auto *ExtInst = new SPIRVInstruction(spv::OpExtInst,
5202 std::get<2>(*DeferredInst), Ops);
David Neto22f144c2017-06-12 14:26:21 -04005203 SPIRVInstList.insert(InsertPoint, ExtInst);
5204
David Neto3fbb4072017-10-16 11:28:14 -04005205 const auto IndirectExtInst = getIndirectExtInstEnum(callee_name);
5206 if (IndirectExtInst != kGlslExtInstBad) {
5207 // Generate one more instruction that uses the result of the extended
5208 // instruction. Its result id is one more than the id of the
5209 // extended instruction.
David Neto22f144c2017-06-12 14:26:21 -04005210 LLVMContext &Context =
5211 Call->getParent()->getParent()->getParent()->getContext();
David Neto22f144c2017-06-12 14:26:21 -04005212
David Neto3fbb4072017-10-16 11:28:14 -04005213 auto generate_extra_inst = [this, &Context, &Call, &DeferredInst,
5214 &VMap, &SPIRVInstList, &InsertPoint](
5215 spv::Op opcode, Constant *constant) {
5216 //
5217 // Generate instruction like:
5218 // result = opcode constant <extinst-result>
5219 //
5220 // Ops[0] = Result Type ID
5221 // Ops[1] = Operand 0 ;; the constant, suitably splatted
5222 // Ops[2] = Operand 1 ;; the result of the extended instruction
5223 SPIRVOperandList Ops;
David Neto22f144c2017-06-12 14:26:21 -04005224
David Neto3fbb4072017-10-16 11:28:14 -04005225 Type *resultTy = Call->getType();
David Neto257c3892018-04-11 13:19:45 -04005226 Ops << MkId(lookupType(resultTy));
David Neto3fbb4072017-10-16 11:28:14 -04005227
5228 if (auto *vectorTy = dyn_cast<VectorType>(resultTy)) {
5229 constant = ConstantVector::getSplat(
5230 static_cast<unsigned>(vectorTy->getNumElements()), constant);
5231 }
David Neto257c3892018-04-11 13:19:45 -04005232 Ops << MkId(VMap[constant]) << MkId(std::get<2>(*DeferredInst));
David Neto3fbb4072017-10-16 11:28:14 -04005233
5234 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005235 InsertPoint, new SPIRVInstruction(
5236 opcode, std::get<2>(*DeferredInst) + 1, Ops));
David Neto3fbb4072017-10-16 11:28:14 -04005237 };
5238
5239 switch (IndirectExtInst) {
5240 case glsl::ExtInstFindUMsb: // Implementing clz
5241 generate_extra_inst(
5242 spv::OpISub, ConstantInt::get(Type::getInt32Ty(Context), 31));
5243 break;
5244 case glsl::ExtInstAcos: // Implementing acospi
5245 case glsl::ExtInstAsin: // Implementing asinpi
Kévin Petiteb9f90a2018-09-29 12:29:34 +01005246 case glsl::ExtInstAtan: // Implementing atanpi
David Neto3fbb4072017-10-16 11:28:14 -04005247 case glsl::ExtInstAtan2: // Implementing atan2pi
5248 generate_extra_inst(
5249 spv::OpFMul,
5250 ConstantFP::get(Type::getFloatTy(Context), kOneOverPi));
5251 break;
5252
5253 default:
5254 assert(false && "internally inconsistent");
David Neto4d02a532017-09-17 12:57:44 -04005255 }
David Neto22f144c2017-06-12 14:26:21 -04005256 }
David Neto3fbb4072017-10-16 11:28:14 -04005257
alan-bakerb39c8262019-03-08 14:03:37 -05005258 } else if (callee_name.startswith("_Z8popcount")) {
David Neto22f144c2017-06-12 14:26:21 -04005259 //
5260 // Generate OpBitCount
5261 //
5262 // Ops[0] = Result Type ID
5263 // Ops[1] = Base ID
David Neto257c3892018-04-11 13:19:45 -04005264 SPIRVOperandList Ops;
5265 Ops << MkId(lookupType(Call->getType()))
5266 << MkId(VMap[Call->getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04005267
5268 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005269 InsertPoint, new SPIRVInstruction(spv::OpBitCount,
David Neto22f144c2017-06-12 14:26:21 -04005270 std::get<2>(*DeferredInst), Ops));
David Netoab03f432017-11-03 17:00:44 -04005271
David Neto862b7d82018-06-14 18:48:37 -04005272 } else if (callee_name.startswith(kCompositeConstructFunctionPrefix)) {
David Netoab03f432017-11-03 17:00:44 -04005273
5274 // Generate an OpCompositeConstruct
5275 SPIRVOperandList Ops;
5276
5277 // The result type.
David Neto257c3892018-04-11 13:19:45 -04005278 Ops << MkId(lookupType(Call->getType()));
David Netoab03f432017-11-03 17:00:44 -04005279
5280 for (Use &use : Call->arg_operands()) {
David Neto257c3892018-04-11 13:19:45 -04005281 Ops << MkId(VMap[use.get()]);
David Netoab03f432017-11-03 17:00:44 -04005282 }
5283
5284 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005285 InsertPoint, new SPIRVInstruction(spv::OpCompositeConstruct,
5286 std::get<2>(*DeferredInst), Ops));
David Netoab03f432017-11-03 17:00:44 -04005287
Alan Baker202c8c72018-08-13 13:47:44 -04005288 } else if (callee_name.startswith(clspv::ResourceAccessorFunction())) {
5289
5290 // We have already mapped the call's result value to an ID.
5291 // Don't generate any code now.
5292
5293 } else if (callee_name.startswith(clspv::WorkgroupAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04005294
5295 // We have already mapped the call's result value to an ID.
5296 // Don't generate any code now.
5297
David Neto22f144c2017-06-12 14:26:21 -04005298 } else {
alan-baker5b86ed72019-02-15 08:26:50 -05005299 if (Call->getType()->isPointerTy()) {
5300 // Functions returning pointers require variable pointers.
5301 setVariablePointersCapabilities(
5302 Call->getType()->getPointerAddressSpace());
5303 }
5304
David Neto22f144c2017-06-12 14:26:21 -04005305 //
5306 // Generate OpFunctionCall.
5307 //
5308
5309 // Ops[0] = Result Type ID
5310 // Ops[1] = Callee Function ID
5311 // Ops[2] ... Ops[n] = Argument 0, ... , Argument n
5312 SPIRVOperandList Ops;
5313
David Neto862b7d82018-06-14 18:48:37 -04005314 Ops << MkId(lookupType(Call->getType()));
David Neto22f144c2017-06-12 14:26:21 -04005315
5316 uint32_t CalleeID = VMap[Callee];
David Neto43568eb2017-10-13 18:25:25 -04005317 if (CalleeID == 0) {
5318 errs() << "Can't translate function call. Missing builtin? "
David Neto862b7d82018-06-14 18:48:37 -04005319 << callee_name << " in: " << *Call << "\n";
David Neto43568eb2017-10-13 18:25:25 -04005320 // TODO(dneto): Can we error out? Enabling this llvm_unreachable
5321 // causes an infinite loop. Instead, go ahead and generate
5322 // the bad function call. A validator will catch the 0-Id.
5323 // llvm_unreachable("Can't translate function call");
5324 }
David Neto22f144c2017-06-12 14:26:21 -04005325
David Neto257c3892018-04-11 13:19:45 -04005326 Ops << MkId(CalleeID);
David Neto22f144c2017-06-12 14:26:21 -04005327
David Neto22f144c2017-06-12 14:26:21 -04005328 FunctionType *CalleeFTy = cast<FunctionType>(Call->getFunctionType());
5329 for (unsigned i = 0; i < CalleeFTy->getNumParams(); i++) {
alan-baker5b86ed72019-02-15 08:26:50 -05005330 auto *operand = Call->getOperand(i);
alan-bakerd4d50652019-12-03 17:17:15 -05005331 auto *operand_type = operand->getType();
5332 // Images and samplers can be passed as function parameters without
5333 // variable pointers.
5334 if (operand_type->isPointerTy() && !IsImageType(operand_type) &&
5335 !IsSamplerType(operand_type)) {
alan-baker5b86ed72019-02-15 08:26:50 -05005336 auto sc =
5337 GetStorageClass(operand->getType()->getPointerAddressSpace());
5338 if (sc == spv::StorageClassStorageBuffer) {
5339 // Passing SSBO by reference requires variable pointers storage
5340 // buffer.
5341 setVariablePointersStorageBuffer(true);
5342 } else if (sc == spv::StorageClassWorkgroup) {
5343 // Workgroup references require variable pointers if they are not
5344 // memory object declarations.
5345 if (auto *operand_call = dyn_cast<CallInst>(operand)) {
5346 // Workgroup accessor represents a variable reference.
5347 if (!operand_call->getCalledFunction()->getName().startswith(
5348 clspv::WorkgroupAccessorFunction()))
5349 setVariablePointers(true);
5350 } else {
5351 // Arguments are function parameters.
5352 if (!isa<Argument>(operand))
5353 setVariablePointers(true);
5354 }
5355 }
5356 }
5357 Ops << MkId(VMap[operand]);
David Neto22f144c2017-06-12 14:26:21 -04005358 }
5359
David Neto87846742018-04-11 17:36:22 -04005360 auto *CallInst = new SPIRVInstruction(spv::OpFunctionCall,
5361 std::get<2>(*DeferredInst), Ops);
David Neto22f144c2017-06-12 14:26:21 -04005362 SPIRVInstList.insert(InsertPoint, CallInst);
5363 }
5364 }
5365 }
5366}
5367
David Neto1a1a0582017-07-07 12:01:44 -04005368void SPIRVProducerPass::HandleDeferredDecorations(const DataLayout &DL) {
Alan Baker202c8c72018-08-13 13:47:44 -04005369 if (getTypesNeedingArrayStride().empty() && LocalArgSpecIds.empty()) {
David Neto1a1a0582017-07-07 12:01:44 -04005370 return;
David Netoc6f3ab22018-04-06 18:02:31 -04005371 }
David Neto1a1a0582017-07-07 12:01:44 -04005372
5373 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
David Neto1a1a0582017-07-07 12:01:44 -04005374
5375 // Find an iterator pointing just past the last decoration.
5376 bool seen_decorations = false;
5377 auto DecoInsertPoint =
5378 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
5379 [&seen_decorations](SPIRVInstruction *Inst) -> bool {
5380 const bool is_decoration =
5381 Inst->getOpcode() == spv::OpDecorate ||
5382 Inst->getOpcode() == spv::OpMemberDecorate;
5383 if (is_decoration) {
5384 seen_decorations = true;
5385 return false;
5386 } else {
5387 return seen_decorations;
5388 }
5389 });
5390
David Netoc6f3ab22018-04-06 18:02:31 -04005391 // Insert ArrayStride decorations on pointer types, due to OpPtrAccessChain
5392 // instructions we generated earlier.
David Neto85082642018-03-24 06:55:20 -07005393 for (auto *type : getTypesNeedingArrayStride()) {
5394 Type *elemTy = nullptr;
5395 if (auto *ptrTy = dyn_cast<PointerType>(type)) {
5396 elemTy = ptrTy->getElementType();
alan-bakerb6b09dc2018-11-08 16:59:28 -05005397 } else if (auto *arrayTy = dyn_cast<ArrayType>(type)) {
David Neto85082642018-03-24 06:55:20 -07005398 elemTy = arrayTy->getArrayElementType();
alan-bakerb6b09dc2018-11-08 16:59:28 -05005399 } else if (auto *seqTy = dyn_cast<SequentialType>(type)) {
David Neto85082642018-03-24 06:55:20 -07005400 elemTy = seqTy->getSequentialElementType();
5401 } else {
5402 errs() << "Unhandled strided type " << *type << "\n";
5403 llvm_unreachable("Unhandled strided type");
5404 }
David Neto1a1a0582017-07-07 12:01:44 -04005405
5406 // Ops[0] = Target ID
5407 // Ops[1] = Decoration (ArrayStride)
5408 // Ops[2] = Stride number (Literal Number)
5409 SPIRVOperandList Ops;
5410
David Neto85082642018-03-24 06:55:20 -07005411 // Same as DL.getIndexedOffsetInType( elemTy, { 1 } );
Alan Bakerfcda9482018-10-02 17:09:59 -04005412 const uint32_t stride = static_cast<uint32_t>(GetTypeAllocSize(elemTy, DL));
David Neto257c3892018-04-11 13:19:45 -04005413
5414 Ops << MkId(lookupType(type)) << MkNum(spv::DecorationArrayStride)
5415 << MkNum(stride);
David Neto1a1a0582017-07-07 12:01:44 -04005416
David Neto87846742018-04-11 17:36:22 -04005417 auto *DecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
David Neto1a1a0582017-07-07 12:01:44 -04005418 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
5419 }
David Netoc6f3ab22018-04-06 18:02:31 -04005420
5421 // Emit SpecId decorations targeting the array size value.
Alan Baker202c8c72018-08-13 13:47:44 -04005422 for (auto spec_id = clspv::FirstLocalSpecId(); spec_id < max_local_spec_id_;
5423 ++spec_id) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05005424 LocalArgInfo &arg_info = LocalSpecIdInfoMap[spec_id];
David Netoc6f3ab22018-04-06 18:02:31 -04005425 SPIRVOperandList Ops;
5426 Ops << MkId(arg_info.array_size_id) << MkNum(spv::DecorationSpecId)
5427 << MkNum(arg_info.spec_id);
5428 SPIRVInstList.insert(DecoInsertPoint,
David Neto87846742018-04-11 17:36:22 -04005429 new SPIRVInstruction(spv::OpDecorate, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04005430 }
David Neto1a1a0582017-07-07 12:01:44 -04005431}
5432
David Neto22f144c2017-06-12 14:26:21 -04005433glsl::ExtInst SPIRVProducerPass::getExtInstEnum(StringRef Name) {
5434 return StringSwitch<glsl::ExtInst>(Name)
alan-bakerb39c8262019-03-08 14:03:37 -05005435 .Case("_Z3absc", glsl::ExtInst::ExtInstSAbs)
5436 .Case("_Z3absDv2_c", glsl::ExtInst::ExtInstSAbs)
5437 .Case("_Z3absDv3_c", glsl::ExtInst::ExtInstSAbs)
5438 .Case("_Z3absDv4_c", glsl::ExtInst::ExtInstSAbs)
Kévin Petit2444e9b2018-11-09 14:14:37 +00005439 .Case("_Z3abss", glsl::ExtInst::ExtInstSAbs)
5440 .Case("_Z3absDv2_s", glsl::ExtInst::ExtInstSAbs)
5441 .Case("_Z3absDv3_s", glsl::ExtInst::ExtInstSAbs)
5442 .Case("_Z3absDv4_s", glsl::ExtInst::ExtInstSAbs)
David Neto22f144c2017-06-12 14:26:21 -04005443 .Case("_Z3absi", glsl::ExtInst::ExtInstSAbs)
5444 .Case("_Z3absDv2_i", glsl::ExtInst::ExtInstSAbs)
5445 .Case("_Z3absDv3_i", glsl::ExtInst::ExtInstSAbs)
5446 .Case("_Z3absDv4_i", glsl::ExtInst::ExtInstSAbs)
Kévin Petit2444e9b2018-11-09 14:14:37 +00005447 .Case("_Z3absl", glsl::ExtInst::ExtInstSAbs)
5448 .Case("_Z3absDv2_l", glsl::ExtInst::ExtInstSAbs)
5449 .Case("_Z3absDv3_l", glsl::ExtInst::ExtInstSAbs)
5450 .Case("_Z3absDv4_l", glsl::ExtInst::ExtInstSAbs)
alan-bakerb39c8262019-03-08 14:03:37 -05005451 .Case("_Z5clampccc", glsl::ExtInst::ExtInstSClamp)
5452 .Case("_Z5clampDv2_cS_S_", glsl::ExtInst::ExtInstSClamp)
5453 .Case("_Z5clampDv3_cS_S_", glsl::ExtInst::ExtInstSClamp)
5454 .Case("_Z5clampDv4_cS_S_", glsl::ExtInst::ExtInstSClamp)
5455 .Case("_Z5clamphhh", glsl::ExtInst::ExtInstUClamp)
5456 .Case("_Z5clampDv2_hS_S_", glsl::ExtInst::ExtInstUClamp)
5457 .Case("_Z5clampDv3_hS_S_", glsl::ExtInst::ExtInstUClamp)
5458 .Case("_Z5clampDv4_hS_S_", glsl::ExtInst::ExtInstUClamp)
Kévin Petit495255d2019-03-06 13:56:48 +00005459 .Case("_Z5clampsss", glsl::ExtInst::ExtInstSClamp)
5460 .Case("_Z5clampDv2_sS_S_", glsl::ExtInst::ExtInstSClamp)
5461 .Case("_Z5clampDv3_sS_S_", glsl::ExtInst::ExtInstSClamp)
5462 .Case("_Z5clampDv4_sS_S_", glsl::ExtInst::ExtInstSClamp)
5463 .Case("_Z5clampttt", glsl::ExtInst::ExtInstUClamp)
5464 .Case("_Z5clampDv2_tS_S_", glsl::ExtInst::ExtInstUClamp)
5465 .Case("_Z5clampDv3_tS_S_", glsl::ExtInst::ExtInstUClamp)
5466 .Case("_Z5clampDv4_tS_S_", glsl::ExtInst::ExtInstUClamp)
David Neto22f144c2017-06-12 14:26:21 -04005467 .Case("_Z5clampiii", glsl::ExtInst::ExtInstSClamp)
5468 .Case("_Z5clampDv2_iS_S_", glsl::ExtInst::ExtInstSClamp)
5469 .Case("_Z5clampDv3_iS_S_", glsl::ExtInst::ExtInstSClamp)
5470 .Case("_Z5clampDv4_iS_S_", glsl::ExtInst::ExtInstSClamp)
5471 .Case("_Z5clampjjj", glsl::ExtInst::ExtInstUClamp)
5472 .Case("_Z5clampDv2_jS_S_", glsl::ExtInst::ExtInstUClamp)
5473 .Case("_Z5clampDv3_jS_S_", glsl::ExtInst::ExtInstUClamp)
5474 .Case("_Z5clampDv4_jS_S_", glsl::ExtInst::ExtInstUClamp)
Kévin Petit495255d2019-03-06 13:56:48 +00005475 .Case("_Z5clamplll", glsl::ExtInst::ExtInstSClamp)
5476 .Case("_Z5clampDv2_lS_S_", glsl::ExtInst::ExtInstSClamp)
5477 .Case("_Z5clampDv3_lS_S_", glsl::ExtInst::ExtInstSClamp)
5478 .Case("_Z5clampDv4_lS_S_", glsl::ExtInst::ExtInstSClamp)
5479 .Case("_Z5clampmmm", glsl::ExtInst::ExtInstUClamp)
5480 .Case("_Z5clampDv2_mS_S_", glsl::ExtInst::ExtInstUClamp)
5481 .Case("_Z5clampDv3_mS_S_", glsl::ExtInst::ExtInstUClamp)
5482 .Case("_Z5clampDv4_mS_S_", glsl::ExtInst::ExtInstUClamp)
David Neto22f144c2017-06-12 14:26:21 -04005483 .Case("_Z5clampfff", glsl::ExtInst::ExtInstFClamp)
5484 .Case("_Z5clampDv2_fS_S_", glsl::ExtInst::ExtInstFClamp)
5485 .Case("_Z5clampDv3_fS_S_", glsl::ExtInst::ExtInstFClamp)
5486 .Case("_Z5clampDv4_fS_S_", glsl::ExtInst::ExtInstFClamp)
alan-baker49bb5fb2020-01-15 08:22:13 -05005487 .Case("_Z5clampDhDhDh", glsl::ExtInst::ExtInstFClamp)
5488 .Case("_Z5clampDv2_DhS_S_", glsl::ExtInst::ExtInstFClamp)
5489 .Case("_Z5clampDv3_DhS_S_", glsl::ExtInst::ExtInstFClamp)
5490 .Case("_Z5clampDv4_DhS_S_", glsl::ExtInst::ExtInstFClamp)
alan-bakerb39c8262019-03-08 14:03:37 -05005491 .Case("_Z3maxcc", glsl::ExtInst::ExtInstSMax)
5492 .Case("_Z3maxDv2_cS_", glsl::ExtInst::ExtInstSMax)
5493 .Case("_Z3maxDv3_cS_", glsl::ExtInst::ExtInstSMax)
5494 .Case("_Z3maxDv4_cS_", glsl::ExtInst::ExtInstSMax)
5495 .Case("_Z3maxhh", glsl::ExtInst::ExtInstUMax)
5496 .Case("_Z3maxDv2_hS_", glsl::ExtInst::ExtInstUMax)
5497 .Case("_Z3maxDv3_hS_", glsl::ExtInst::ExtInstUMax)
5498 .Case("_Z3maxDv4_hS_", glsl::ExtInst::ExtInstUMax)
Kévin Petit9e1971c2018-11-09 14:17:18 +00005499 .Case("_Z3maxss", glsl::ExtInst::ExtInstSMax)
5500 .Case("_Z3maxDv2_sS_", glsl::ExtInst::ExtInstSMax)
5501 .Case("_Z3maxDv3_sS_", glsl::ExtInst::ExtInstSMax)
5502 .Case("_Z3maxDv4_sS_", glsl::ExtInst::ExtInstSMax)
5503 .Case("_Z3maxtt", glsl::ExtInst::ExtInstUMax)
5504 .Case("_Z3maxDv2_tS_", glsl::ExtInst::ExtInstUMax)
5505 .Case("_Z3maxDv3_tS_", glsl::ExtInst::ExtInstUMax)
5506 .Case("_Z3maxDv4_tS_", glsl::ExtInst::ExtInstUMax)
David Neto22f144c2017-06-12 14:26:21 -04005507 .Case("_Z3maxii", glsl::ExtInst::ExtInstSMax)
5508 .Case("_Z3maxDv2_iS_", glsl::ExtInst::ExtInstSMax)
5509 .Case("_Z3maxDv3_iS_", glsl::ExtInst::ExtInstSMax)
5510 .Case("_Z3maxDv4_iS_", glsl::ExtInst::ExtInstSMax)
5511 .Case("_Z3maxjj", glsl::ExtInst::ExtInstUMax)
5512 .Case("_Z3maxDv2_jS_", glsl::ExtInst::ExtInstUMax)
5513 .Case("_Z3maxDv3_jS_", glsl::ExtInst::ExtInstUMax)
5514 .Case("_Z3maxDv4_jS_", glsl::ExtInst::ExtInstUMax)
Kévin Petit9e1971c2018-11-09 14:17:18 +00005515 .Case("_Z3maxll", glsl::ExtInst::ExtInstSMax)
5516 .Case("_Z3maxDv2_lS_", glsl::ExtInst::ExtInstSMax)
5517 .Case("_Z3maxDv3_lS_", glsl::ExtInst::ExtInstSMax)
5518 .Case("_Z3maxDv4_lS_", glsl::ExtInst::ExtInstSMax)
5519 .Case("_Z3maxmm", glsl::ExtInst::ExtInstUMax)
5520 .Case("_Z3maxDv2_mS_", glsl::ExtInst::ExtInstUMax)
5521 .Case("_Z3maxDv3_mS_", glsl::ExtInst::ExtInstUMax)
5522 .Case("_Z3maxDv4_mS_", glsl::ExtInst::ExtInstUMax)
David Neto22f144c2017-06-12 14:26:21 -04005523 .Case("_Z3maxff", glsl::ExtInst::ExtInstFMax)
5524 .Case("_Z3maxDv2_fS_", glsl::ExtInst::ExtInstFMax)
5525 .Case("_Z3maxDv3_fS_", glsl::ExtInst::ExtInstFMax)
5526 .Case("_Z3maxDv4_fS_", glsl::ExtInst::ExtInstFMax)
alan-baker49bb5fb2020-01-15 08:22:13 -05005527 .Case("_Z3maxDhDh", glsl::ExtInst::ExtInstFMax)
5528 .Case("_Z3maxDv2_DhS_", glsl::ExtInst::ExtInstFMax)
5529 .Case("_Z3maxDv3_DhS_", glsl::ExtInst::ExtInstFMax)
5530 .Case("_Z3maxDv4_DhS_", glsl::ExtInst::ExtInstFMax)
David Neto22f144c2017-06-12 14:26:21 -04005531 .StartsWith("_Z4fmax", glsl::ExtInst::ExtInstFMax)
alan-bakerb39c8262019-03-08 14:03:37 -05005532 .Case("_Z3mincc", glsl::ExtInst::ExtInstSMin)
5533 .Case("_Z3minDv2_cS_", glsl::ExtInst::ExtInstSMin)
5534 .Case("_Z3minDv3_cS_", glsl::ExtInst::ExtInstSMin)
5535 .Case("_Z3minDv4_cS_", glsl::ExtInst::ExtInstSMin)
5536 .Case("_Z3minhh", glsl::ExtInst::ExtInstUMin)
5537 .Case("_Z3minDv2_hS_", glsl::ExtInst::ExtInstUMin)
5538 .Case("_Z3minDv3_hS_", glsl::ExtInst::ExtInstUMin)
5539 .Case("_Z3minDv4_hS_", glsl::ExtInst::ExtInstUMin)
Kévin Petit9e1971c2018-11-09 14:17:18 +00005540 .Case("_Z3minss", glsl::ExtInst::ExtInstSMin)
5541 .Case("_Z3minDv2_sS_", glsl::ExtInst::ExtInstSMin)
5542 .Case("_Z3minDv3_sS_", glsl::ExtInst::ExtInstSMin)
5543 .Case("_Z3minDv4_sS_", glsl::ExtInst::ExtInstSMin)
5544 .Case("_Z3mintt", glsl::ExtInst::ExtInstUMin)
5545 .Case("_Z3minDv2_tS_", glsl::ExtInst::ExtInstUMin)
5546 .Case("_Z3minDv3_tS_", glsl::ExtInst::ExtInstUMin)
5547 .Case("_Z3minDv4_tS_", glsl::ExtInst::ExtInstUMin)
David Neto22f144c2017-06-12 14:26:21 -04005548 .Case("_Z3minii", glsl::ExtInst::ExtInstSMin)
5549 .Case("_Z3minDv2_iS_", glsl::ExtInst::ExtInstSMin)
5550 .Case("_Z3minDv3_iS_", glsl::ExtInst::ExtInstSMin)
5551 .Case("_Z3minDv4_iS_", glsl::ExtInst::ExtInstSMin)
5552 .Case("_Z3minjj", glsl::ExtInst::ExtInstUMin)
5553 .Case("_Z3minDv2_jS_", glsl::ExtInst::ExtInstUMin)
5554 .Case("_Z3minDv3_jS_", glsl::ExtInst::ExtInstUMin)
5555 .Case("_Z3minDv4_jS_", glsl::ExtInst::ExtInstUMin)
Kévin Petit9e1971c2018-11-09 14:17:18 +00005556 .Case("_Z3minll", glsl::ExtInst::ExtInstSMin)
5557 .Case("_Z3minDv2_lS_", glsl::ExtInst::ExtInstSMin)
5558 .Case("_Z3minDv3_lS_", glsl::ExtInst::ExtInstSMin)
5559 .Case("_Z3minDv4_lS_", glsl::ExtInst::ExtInstSMin)
5560 .Case("_Z3minmm", glsl::ExtInst::ExtInstUMin)
5561 .Case("_Z3minDv2_mS_", glsl::ExtInst::ExtInstUMin)
5562 .Case("_Z3minDv3_mS_", glsl::ExtInst::ExtInstUMin)
5563 .Case("_Z3minDv4_mS_", glsl::ExtInst::ExtInstUMin)
David Neto22f144c2017-06-12 14:26:21 -04005564 .Case("_Z3minff", glsl::ExtInst::ExtInstFMin)
5565 .Case("_Z3minDv2_fS_", glsl::ExtInst::ExtInstFMin)
5566 .Case("_Z3minDv3_fS_", glsl::ExtInst::ExtInstFMin)
5567 .Case("_Z3minDv4_fS_", glsl::ExtInst::ExtInstFMin)
alan-baker49bb5fb2020-01-15 08:22:13 -05005568 .Case("_Z3minDhDh", glsl::ExtInst::ExtInstFMin)
5569 .Case("_Z3minDv2_DhS_", glsl::ExtInst::ExtInstFMin)
5570 .Case("_Z3minDv3_DhS_", glsl::ExtInst::ExtInstFMin)
5571 .Case("_Z3minDv4_DhS_", glsl::ExtInst::ExtInstFMin)
David Neto22f144c2017-06-12 14:26:21 -04005572 .StartsWith("_Z4fmin", glsl::ExtInst::ExtInstFMin)
5573 .StartsWith("_Z7degrees", glsl::ExtInst::ExtInstDegrees)
5574 .StartsWith("_Z7radians", glsl::ExtInst::ExtInstRadians)
5575 .StartsWith("_Z3mix", glsl::ExtInst::ExtInstFMix)
5576 .StartsWith("_Z4acos", glsl::ExtInst::ExtInstAcos)
5577 .StartsWith("_Z5acosh", glsl::ExtInst::ExtInstAcosh)
5578 .StartsWith("_Z4asin", glsl::ExtInst::ExtInstAsin)
5579 .StartsWith("_Z5asinh", glsl::ExtInst::ExtInstAsinh)
5580 .StartsWith("_Z4atan", glsl::ExtInst::ExtInstAtan)
5581 .StartsWith("_Z5atan2", glsl::ExtInst::ExtInstAtan2)
5582 .StartsWith("_Z5atanh", glsl::ExtInst::ExtInstAtanh)
5583 .StartsWith("_Z4ceil", glsl::ExtInst::ExtInstCeil)
5584 .StartsWith("_Z3sin", glsl::ExtInst::ExtInstSin)
5585 .StartsWith("_Z4sinh", glsl::ExtInst::ExtInstSinh)
5586 .StartsWith("_Z8half_sin", glsl::ExtInst::ExtInstSin)
5587 .StartsWith("_Z10native_sin", glsl::ExtInst::ExtInstSin)
5588 .StartsWith("_Z3cos", glsl::ExtInst::ExtInstCos)
5589 .StartsWith("_Z4cosh", glsl::ExtInst::ExtInstCosh)
5590 .StartsWith("_Z8half_cos", glsl::ExtInst::ExtInstCos)
5591 .StartsWith("_Z10native_cos", glsl::ExtInst::ExtInstCos)
5592 .StartsWith("_Z3tan", glsl::ExtInst::ExtInstTan)
5593 .StartsWith("_Z4tanh", glsl::ExtInst::ExtInstTanh)
5594 .StartsWith("_Z8half_tan", glsl::ExtInst::ExtInstTan)
5595 .StartsWith("_Z10native_tan", glsl::ExtInst::ExtInstTan)
5596 .StartsWith("_Z3exp", glsl::ExtInst::ExtInstExp)
5597 .StartsWith("_Z8half_exp", glsl::ExtInst::ExtInstExp)
5598 .StartsWith("_Z10native_exp", glsl::ExtInst::ExtInstExp)
5599 .StartsWith("_Z4exp2", glsl::ExtInst::ExtInstExp2)
5600 .StartsWith("_Z9half_exp2", glsl::ExtInst::ExtInstExp2)
5601 .StartsWith("_Z11native_exp2", glsl::ExtInst::ExtInstExp2)
5602 .StartsWith("_Z3log", glsl::ExtInst::ExtInstLog)
5603 .StartsWith("_Z8half_log", glsl::ExtInst::ExtInstLog)
5604 .StartsWith("_Z10native_log", glsl::ExtInst::ExtInstLog)
5605 .StartsWith("_Z4log2", glsl::ExtInst::ExtInstLog2)
5606 .StartsWith("_Z9half_log2", glsl::ExtInst::ExtInstLog2)
5607 .StartsWith("_Z11native_log2", glsl::ExtInst::ExtInstLog2)
5608 .StartsWith("_Z4fabs", glsl::ExtInst::ExtInstFAbs)
kpet3458e942018-10-03 14:35:21 +01005609 .StartsWith("_Z3fma", glsl::ExtInst::ExtInstFma)
David Neto22f144c2017-06-12 14:26:21 -04005610 .StartsWith("_Z5floor", glsl::ExtInst::ExtInstFloor)
5611 .StartsWith("_Z5ldexp", glsl::ExtInst::ExtInstLdexp)
5612 .StartsWith("_Z3pow", glsl::ExtInst::ExtInstPow)
5613 .StartsWith("_Z4powr", glsl::ExtInst::ExtInstPow)
5614 .StartsWith("_Z9half_powr", glsl::ExtInst::ExtInstPow)
5615 .StartsWith("_Z11native_powr", glsl::ExtInst::ExtInstPow)
5616 .StartsWith("_Z5round", glsl::ExtInst::ExtInstRound)
5617 .StartsWith("_Z4sqrt", glsl::ExtInst::ExtInstSqrt)
5618 .StartsWith("_Z9half_sqrt", glsl::ExtInst::ExtInstSqrt)
5619 .StartsWith("_Z11native_sqrt", glsl::ExtInst::ExtInstSqrt)
5620 .StartsWith("_Z5rsqrt", glsl::ExtInst::ExtInstInverseSqrt)
5621 .StartsWith("_Z10half_rsqrt", glsl::ExtInst::ExtInstInverseSqrt)
5622 .StartsWith("_Z12native_rsqrt", glsl::ExtInst::ExtInstInverseSqrt)
5623 .StartsWith("_Z5trunc", glsl::ExtInst::ExtInstTrunc)
5624 .StartsWith("_Z5frexp", glsl::ExtInst::ExtInstFrexp)
5625 .StartsWith("_Z4sign", glsl::ExtInst::ExtInstFSign)
5626 .StartsWith("_Z6length", glsl::ExtInst::ExtInstLength)
Kévin Petit7d09cec2018-09-22 15:43:38 +01005627 .StartsWith("_Z11fast_length", glsl::ExtInst::ExtInstLength)
David Neto22f144c2017-06-12 14:26:21 -04005628 .StartsWith("_Z8distance", glsl::ExtInst::ExtInstDistance)
Kévin Petit7d09cec2018-09-22 15:43:38 +01005629 .StartsWith("_Z13fast_distance", glsl::ExtInst::ExtInstDistance)
David Netoe9a03512017-10-16 10:08:27 -04005630 .StartsWith("_Z4step", glsl::ExtInst::ExtInstStep)
kpet6fd2a262018-10-03 14:48:01 +01005631 .StartsWith("_Z10smoothstep", glsl::ExtInst::ExtInstSmoothStep)
David Neto22f144c2017-06-12 14:26:21 -04005632 .Case("_Z5crossDv3_fS_", glsl::ExtInst::ExtInstCross)
5633 .StartsWith("_Z9normalize", glsl::ExtInst::ExtInstNormalize)
Kévin Petit7d09cec2018-09-22 15:43:38 +01005634 .StartsWith("_Z14fast_normalize", glsl::ExtInst::ExtInstNormalize)
David Neto22f144c2017-06-12 14:26:21 -04005635 .StartsWith("llvm.fmuladd.", glsl::ExtInst::ExtInstFma)
5636 .Case("spirv.unpack.v2f16", glsl::ExtInst::ExtInstUnpackHalf2x16)
5637 .Case("spirv.pack.v2f16", glsl::ExtInst::ExtInstPackHalf2x16)
David Neto62653202017-10-16 19:05:18 -04005638 .Case("clspv.fract.f", glsl::ExtInst::ExtInstFract)
5639 .Case("clspv.fract.v2f", glsl::ExtInst::ExtInstFract)
5640 .Case("clspv.fract.v3f", glsl::ExtInst::ExtInstFract)
5641 .Case("clspv.fract.v4f", glsl::ExtInst::ExtInstFract)
David Neto3fbb4072017-10-16 11:28:14 -04005642 .Default(kGlslExtInstBad);
5643}
5644
5645glsl::ExtInst SPIRVProducerPass::getIndirectExtInstEnum(StringRef Name) {
5646 // Check indirect cases.
5647 return StringSwitch<glsl::ExtInst>(Name)
5648 .StartsWith("_Z3clz", glsl::ExtInst::ExtInstFindUMsb)
5649 // Use exact match on float arg because these need a multiply
5650 // of a constant of the right floating point type.
5651 .Case("_Z6acospif", glsl::ExtInst::ExtInstAcos)
5652 .Case("_Z6acospiDv2_f", glsl::ExtInst::ExtInstAcos)
5653 .Case("_Z6acospiDv3_f", glsl::ExtInst::ExtInstAcos)
5654 .Case("_Z6acospiDv4_f", glsl::ExtInst::ExtInstAcos)
5655 .Case("_Z6asinpif", glsl::ExtInst::ExtInstAsin)
5656 .Case("_Z6asinpiDv2_f", glsl::ExtInst::ExtInstAsin)
5657 .Case("_Z6asinpiDv3_f", glsl::ExtInst::ExtInstAsin)
5658 .Case("_Z6asinpiDv4_f", glsl::ExtInst::ExtInstAsin)
Kévin Petiteb9f90a2018-09-29 12:29:34 +01005659 .Case("_Z6atanpif", glsl::ExtInst::ExtInstAtan)
5660 .Case("_Z6atanpiDv2_f", glsl::ExtInst::ExtInstAtan)
5661 .Case("_Z6atanpiDv3_f", glsl::ExtInst::ExtInstAtan)
5662 .Case("_Z6atanpiDv4_f", glsl::ExtInst::ExtInstAtan)
David Neto3fbb4072017-10-16 11:28:14 -04005663 .Case("_Z7atan2piff", glsl::ExtInst::ExtInstAtan2)
5664 .Case("_Z7atan2piDv2_fS_", glsl::ExtInst::ExtInstAtan2)
5665 .Case("_Z7atan2piDv3_fS_", glsl::ExtInst::ExtInstAtan2)
5666 .Case("_Z7atan2piDv4_fS_", glsl::ExtInst::ExtInstAtan2)
5667 .Default(kGlslExtInstBad);
5668}
5669
alan-bakerb6b09dc2018-11-08 16:59:28 -05005670glsl::ExtInst
5671SPIRVProducerPass::getDirectOrIndirectExtInstEnum(StringRef Name) {
David Neto3fbb4072017-10-16 11:28:14 -04005672 auto direct = getExtInstEnum(Name);
5673 if (direct != kGlslExtInstBad)
5674 return direct;
5675 return getIndirectExtInstEnum(Name);
David Neto22f144c2017-06-12 14:26:21 -04005676}
5677
David Neto22f144c2017-06-12 14:26:21 -04005678void SPIRVProducerPass::WriteOneWord(uint32_t Word) {
David Neto0676e6f2017-07-11 18:47:44 -04005679 binaryOut->write(reinterpret_cast<const char *>(&Word), sizeof(uint32_t));
David Neto22f144c2017-06-12 14:26:21 -04005680}
5681
5682void SPIRVProducerPass::WriteResultID(SPIRVInstruction *Inst) {
5683 WriteOneWord(Inst->getResultID());
5684}
5685
5686void SPIRVProducerPass::WriteWordCountAndOpcode(SPIRVInstruction *Inst) {
5687 // High 16 bit : Word Count
5688 // Low 16 bit : Opcode
5689 uint32_t Word = Inst->getOpcode();
David Netoee2660d2018-06-28 16:31:29 -04005690 const uint32_t count = Inst->getWordCount();
5691 if (count > 65535) {
5692 errs() << "Word count limit of 65535 exceeded: " << count << "\n";
5693 llvm_unreachable("Word count too high");
5694 }
David Neto22f144c2017-06-12 14:26:21 -04005695 Word |= Inst->getWordCount() << 16;
5696 WriteOneWord(Word);
5697}
5698
David Netoef5ba2b2019-12-20 08:35:54 -05005699void SPIRVProducerPass::WriteOperand(const std::unique_ptr<SPIRVOperand> &Op) {
David Neto22f144c2017-06-12 14:26:21 -04005700 SPIRVOperandType OpTy = Op->getType();
5701 switch (OpTy) {
5702 default: {
5703 llvm_unreachable("Unsupported SPIRV Operand Type???");
5704 break;
5705 }
5706 case SPIRVOperandType::NUMBERID: {
5707 WriteOneWord(Op->getNumID());
5708 break;
5709 }
5710 case SPIRVOperandType::LITERAL_STRING: {
5711 std::string Str = Op->getLiteralStr();
5712 const char *Data = Str.c_str();
5713 size_t WordSize = Str.size() / 4;
5714 for (unsigned Idx = 0; Idx < WordSize; Idx++) {
5715 WriteOneWord(*reinterpret_cast<const uint32_t *>(&Data[4 * Idx]));
5716 }
5717
5718 uint32_t Remainder = Str.size() % 4;
5719 uint32_t LastWord = 0;
5720 if (Remainder) {
5721 for (unsigned Idx = 0; Idx < Remainder; Idx++) {
5722 LastWord |= Data[4 * WordSize + Idx] << 8 * Idx;
5723 }
5724 }
5725
5726 WriteOneWord(LastWord);
5727 break;
5728 }
5729 case SPIRVOperandType::LITERAL_INTEGER:
5730 case SPIRVOperandType::LITERAL_FLOAT: {
5731 auto LiteralNum = Op->getLiteralNum();
5732 // TODO: Handle LiteranNum carefully.
5733 for (auto Word : LiteralNum) {
5734 WriteOneWord(Word);
5735 }
5736 break;
5737 }
5738 }
5739}
5740
5741void SPIRVProducerPass::WriteSPIRVBinary() {
5742 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
5743
5744 for (auto Inst : SPIRVInstList) {
David Netoef5ba2b2019-12-20 08:35:54 -05005745 const auto &Ops = Inst->getOperands();
David Neto22f144c2017-06-12 14:26:21 -04005746 spv::Op Opcode = static_cast<spv::Op>(Inst->getOpcode());
5747
5748 switch (Opcode) {
5749 default: {
David Neto5c22a252018-03-15 16:07:41 -04005750 errs() << "Unsupported SPIR-V instruction opcode " << int(Opcode) << "\n";
David Neto22f144c2017-06-12 14:26:21 -04005751 llvm_unreachable("Unsupported SPIRV instruction");
5752 break;
5753 }
5754 case spv::OpCapability:
5755 case spv::OpExtension:
5756 case spv::OpMemoryModel:
5757 case spv::OpEntryPoint:
5758 case spv::OpExecutionMode:
5759 case spv::OpSource:
5760 case spv::OpDecorate:
5761 case spv::OpMemberDecorate:
5762 case spv::OpBranch:
5763 case spv::OpBranchConditional:
5764 case spv::OpSelectionMerge:
5765 case spv::OpLoopMerge:
5766 case spv::OpStore:
5767 case spv::OpImageWrite:
5768 case spv::OpReturnValue:
5769 case spv::OpControlBarrier:
5770 case spv::OpMemoryBarrier:
5771 case spv::OpReturn:
5772 case spv::OpFunctionEnd:
5773 case spv::OpCopyMemory: {
5774 WriteWordCountAndOpcode(Inst);
5775 for (uint32_t i = 0; i < Ops.size(); i++) {
5776 WriteOperand(Ops[i]);
5777 }
5778 break;
5779 }
5780 case spv::OpTypeBool:
5781 case spv::OpTypeVoid:
5782 case spv::OpTypeSampler:
5783 case spv::OpLabel:
5784 case spv::OpExtInstImport:
5785 case spv::OpTypePointer:
5786 case spv::OpTypeRuntimeArray:
5787 case spv::OpTypeStruct:
5788 case spv::OpTypeImage:
5789 case spv::OpTypeSampledImage:
5790 case spv::OpTypeInt:
5791 case spv::OpTypeFloat:
5792 case spv::OpTypeArray:
5793 case spv::OpTypeVector:
5794 case spv::OpTypeFunction: {
5795 WriteWordCountAndOpcode(Inst);
5796 WriteResultID(Inst);
5797 for (uint32_t i = 0; i < Ops.size(); i++) {
5798 WriteOperand(Ops[i]);
5799 }
5800 break;
5801 }
5802 case spv::OpFunction:
5803 case spv::OpFunctionParameter:
5804 case spv::OpAccessChain:
5805 case spv::OpPtrAccessChain:
5806 case spv::OpInBoundsAccessChain:
5807 case spv::OpUConvert:
5808 case spv::OpSConvert:
5809 case spv::OpConvertFToU:
5810 case spv::OpConvertFToS:
5811 case spv::OpConvertUToF:
5812 case spv::OpConvertSToF:
5813 case spv::OpFConvert:
5814 case spv::OpConvertPtrToU:
5815 case spv::OpConvertUToPtr:
5816 case spv::OpBitcast:
alan-bakerc9c55ae2019-12-02 16:01:27 -05005817 case spv::OpFNegate:
David Neto22f144c2017-06-12 14:26:21 -04005818 case spv::OpIAdd:
5819 case spv::OpFAdd:
5820 case spv::OpISub:
5821 case spv::OpFSub:
5822 case spv::OpIMul:
5823 case spv::OpFMul:
5824 case spv::OpUDiv:
5825 case spv::OpSDiv:
5826 case spv::OpFDiv:
5827 case spv::OpUMod:
5828 case spv::OpSRem:
5829 case spv::OpFRem:
Kévin Petit8a560882019-03-21 15:24:34 +00005830 case spv::OpUMulExtended:
5831 case spv::OpSMulExtended:
David Neto22f144c2017-06-12 14:26:21 -04005832 case spv::OpBitwiseOr:
5833 case spv::OpBitwiseXor:
5834 case spv::OpBitwiseAnd:
David Netoa394f392017-08-26 20:45:29 -04005835 case spv::OpNot:
David Neto22f144c2017-06-12 14:26:21 -04005836 case spv::OpShiftLeftLogical:
5837 case spv::OpShiftRightLogical:
5838 case spv::OpShiftRightArithmetic:
5839 case spv::OpBitCount:
David Netoab03f432017-11-03 17:00:44 -04005840 case spv::OpCompositeConstruct:
David Neto22f144c2017-06-12 14:26:21 -04005841 case spv::OpCompositeExtract:
5842 case spv::OpVectorExtractDynamic:
5843 case spv::OpCompositeInsert:
David Neto0a2f98d2017-09-15 19:38:40 -04005844 case spv::OpCopyObject:
David Neto22f144c2017-06-12 14:26:21 -04005845 case spv::OpVectorInsertDynamic:
5846 case spv::OpVectorShuffle:
5847 case spv::OpIEqual:
5848 case spv::OpINotEqual:
5849 case spv::OpUGreaterThan:
5850 case spv::OpUGreaterThanEqual:
5851 case spv::OpULessThan:
5852 case spv::OpULessThanEqual:
5853 case spv::OpSGreaterThan:
5854 case spv::OpSGreaterThanEqual:
5855 case spv::OpSLessThan:
5856 case spv::OpSLessThanEqual:
5857 case spv::OpFOrdEqual:
5858 case spv::OpFOrdGreaterThan:
5859 case spv::OpFOrdGreaterThanEqual:
5860 case spv::OpFOrdLessThan:
5861 case spv::OpFOrdLessThanEqual:
5862 case spv::OpFOrdNotEqual:
5863 case spv::OpFUnordEqual:
5864 case spv::OpFUnordGreaterThan:
5865 case spv::OpFUnordGreaterThanEqual:
5866 case spv::OpFUnordLessThan:
5867 case spv::OpFUnordLessThanEqual:
5868 case spv::OpFUnordNotEqual:
5869 case spv::OpExtInst:
5870 case spv::OpIsInf:
5871 case spv::OpIsNan:
5872 case spv::OpAny:
5873 case spv::OpAll:
5874 case spv::OpUndef:
5875 case spv::OpConstantNull:
5876 case spv::OpLogicalOr:
5877 case spv::OpLogicalAnd:
5878 case spv::OpLogicalNot:
5879 case spv::OpLogicalNotEqual:
5880 case spv::OpConstantComposite:
5881 case spv::OpSpecConstantComposite:
5882 case spv::OpConstantTrue:
5883 case spv::OpConstantFalse:
5884 case spv::OpConstant:
5885 case spv::OpSpecConstant:
5886 case spv::OpVariable:
5887 case spv::OpFunctionCall:
5888 case spv::OpSampledImage:
alan-baker75090e42020-02-20 11:21:04 -05005889 case spv::OpImageFetch:
David Neto22f144c2017-06-12 14:26:21 -04005890 case spv::OpImageSampleExplicitLod:
David Neto5c22a252018-03-15 16:07:41 -04005891 case spv::OpImageQuerySize:
alan-bakerce179f12019-12-06 19:02:22 -05005892 case spv::OpImageQuerySizeLod:
David Neto22f144c2017-06-12 14:26:21 -04005893 case spv::OpSelect:
5894 case spv::OpPhi:
5895 case spv::OpLoad:
5896 case spv::OpAtomicIAdd:
5897 case spv::OpAtomicISub:
5898 case spv::OpAtomicExchange:
5899 case spv::OpAtomicIIncrement:
5900 case spv::OpAtomicIDecrement:
5901 case spv::OpAtomicCompareExchange:
5902 case spv::OpAtomicUMin:
5903 case spv::OpAtomicSMin:
5904 case spv::OpAtomicUMax:
5905 case spv::OpAtomicSMax:
5906 case spv::OpAtomicAnd:
5907 case spv::OpAtomicOr:
5908 case spv::OpAtomicXor:
5909 case spv::OpDot: {
5910 WriteWordCountAndOpcode(Inst);
5911 WriteOperand(Ops[0]);
5912 WriteResultID(Inst);
5913 for (uint32_t i = 1; i < Ops.size(); i++) {
5914 WriteOperand(Ops[i]);
5915 }
5916 break;
5917 }
5918 }
5919 }
5920}
Alan Baker9bf93fb2018-08-28 16:59:26 -04005921
alan-bakerb6b09dc2018-11-08 16:59:28 -05005922bool SPIRVProducerPass::IsTypeNullable(const Type *type) const {
Alan Baker9bf93fb2018-08-28 16:59:26 -04005923 switch (type->getTypeID()) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05005924 case Type::HalfTyID:
5925 case Type::FloatTyID:
5926 case Type::DoubleTyID:
5927 case Type::IntegerTyID:
5928 case Type::VectorTyID:
5929 return true;
5930 case Type::PointerTyID: {
5931 const PointerType *pointer_type = cast<PointerType>(type);
5932 if (pointer_type->getPointerAddressSpace() !=
5933 AddressSpace::UniformConstant) {
5934 auto pointee_type = pointer_type->getPointerElementType();
5935 if (pointee_type->isStructTy() &&
5936 cast<StructType>(pointee_type)->isOpaque()) {
5937 // Images and samplers are not nullable.
5938 return false;
Alan Baker9bf93fb2018-08-28 16:59:26 -04005939 }
Alan Baker9bf93fb2018-08-28 16:59:26 -04005940 }
alan-bakerb6b09dc2018-11-08 16:59:28 -05005941 return true;
5942 }
5943 case Type::ArrayTyID:
5944 return IsTypeNullable(cast<CompositeType>(type)->getTypeAtIndex(0u));
5945 case Type::StructTyID: {
5946 const StructType *struct_type = cast<StructType>(type);
5947 // Images and samplers are not nullable.
5948 if (struct_type->isOpaque())
Alan Baker9bf93fb2018-08-28 16:59:26 -04005949 return false;
alan-bakerb6b09dc2018-11-08 16:59:28 -05005950 for (const auto element : struct_type->elements()) {
5951 if (!IsTypeNullable(element))
5952 return false;
5953 }
5954 return true;
5955 }
5956 default:
5957 return false;
Alan Baker9bf93fb2018-08-28 16:59:26 -04005958 }
5959}
Alan Bakerfcda9482018-10-02 17:09:59 -04005960
5961void SPIRVProducerPass::PopulateUBOTypeMaps(Module &module) {
5962 if (auto *offsets_md =
5963 module.getNamedMetadata(clspv::RemappedTypeOffsetMetadataName())) {
5964 // Metdata is stored as key-value pair operands. The first element of each
5965 // operand is the type and the second is a vector of offsets.
5966 for (const auto *operand : offsets_md->operands()) {
5967 const auto *pair = cast<MDTuple>(operand);
5968 auto *type =
5969 cast<ConstantAsMetadata>(pair->getOperand(0))->getValue()->getType();
5970 const auto *offset_vector = cast<MDTuple>(pair->getOperand(1));
5971 std::vector<uint32_t> offsets;
5972 for (const Metadata *offset_md : offset_vector->operands()) {
5973 const auto *constant_md = cast<ConstantAsMetadata>(offset_md);
alan-bakerb6b09dc2018-11-08 16:59:28 -05005974 offsets.push_back(static_cast<uint32_t>(
5975 cast<ConstantInt>(constant_md->getValue())->getZExtValue()));
Alan Bakerfcda9482018-10-02 17:09:59 -04005976 }
5977 RemappedUBOTypeOffsets.insert(std::make_pair(type, offsets));
5978 }
5979 }
5980
5981 if (auto *sizes_md =
5982 module.getNamedMetadata(clspv::RemappedTypeSizesMetadataName())) {
5983 // Metadata is stored as key-value pair operands. The first element of each
5984 // operand is the type and the second is a triple of sizes: type size in
5985 // bits, store size and alloc size.
5986 for (const auto *operand : sizes_md->operands()) {
5987 const auto *pair = cast<MDTuple>(operand);
5988 auto *type =
5989 cast<ConstantAsMetadata>(pair->getOperand(0))->getValue()->getType();
5990 const auto *size_triple = cast<MDTuple>(pair->getOperand(1));
5991 uint64_t type_size_in_bits =
5992 cast<ConstantInt>(
5993 cast<ConstantAsMetadata>(size_triple->getOperand(0))->getValue())
5994 ->getZExtValue();
5995 uint64_t type_store_size =
5996 cast<ConstantInt>(
5997 cast<ConstantAsMetadata>(size_triple->getOperand(1))->getValue())
5998 ->getZExtValue();
5999 uint64_t type_alloc_size =
6000 cast<ConstantInt>(
6001 cast<ConstantAsMetadata>(size_triple->getOperand(2))->getValue())
6002 ->getZExtValue();
6003 RemappedUBOTypeSizes.insert(std::make_pair(
6004 type, std::make_tuple(type_size_in_bits, type_store_size,
6005 type_alloc_size)));
6006 }
6007 }
6008}
6009
6010uint64_t SPIRVProducerPass::GetTypeSizeInBits(Type *type,
6011 const DataLayout &DL) {
6012 auto iter = RemappedUBOTypeSizes.find(type);
6013 if (iter != RemappedUBOTypeSizes.end()) {
6014 return std::get<0>(iter->second);
6015 }
6016
6017 return DL.getTypeSizeInBits(type);
6018}
6019
6020uint64_t SPIRVProducerPass::GetTypeStoreSize(Type *type, const DataLayout &DL) {
6021 auto iter = RemappedUBOTypeSizes.find(type);
6022 if (iter != RemappedUBOTypeSizes.end()) {
6023 return std::get<1>(iter->second);
6024 }
6025
6026 return DL.getTypeStoreSize(type);
6027}
6028
6029uint64_t SPIRVProducerPass::GetTypeAllocSize(Type *type, const DataLayout &DL) {
6030 auto iter = RemappedUBOTypeSizes.find(type);
6031 if (iter != RemappedUBOTypeSizes.end()) {
6032 return std::get<2>(iter->second);
6033 }
6034
6035 return DL.getTypeAllocSize(type);
6036}
alan-baker5b86ed72019-02-15 08:26:50 -05006037
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04006038void SPIRVProducerPass::setVariablePointersCapabilities(
6039 unsigned address_space) {
alan-baker5b86ed72019-02-15 08:26:50 -05006040 if (GetStorageClass(address_space) == spv::StorageClassStorageBuffer) {
6041 setVariablePointersStorageBuffer(true);
6042 } else {
6043 setVariablePointers(true);
6044 }
6045}
6046
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04006047Value *SPIRVProducerPass::GetBasePointer(Value *v) {
alan-baker5b86ed72019-02-15 08:26:50 -05006048 if (auto *gep = dyn_cast<GetElementPtrInst>(v)) {
6049 return GetBasePointer(gep->getPointerOperand());
6050 }
6051
6052 // Conservatively return |v|.
6053 return v;
6054}
6055
6056bool SPIRVProducerPass::sameResource(Value *lhs, Value *rhs) const {
6057 if (auto *lhs_call = dyn_cast<CallInst>(lhs)) {
6058 if (auto *rhs_call = dyn_cast<CallInst>(rhs)) {
6059 if (lhs_call->getCalledFunction()->getName().startswith(
6060 clspv::ResourceAccessorFunction()) &&
6061 rhs_call->getCalledFunction()->getName().startswith(
6062 clspv::ResourceAccessorFunction())) {
6063 // For resource accessors, match descriptor set and binding.
6064 if (lhs_call->getOperand(0) == rhs_call->getOperand(0) &&
6065 lhs_call->getOperand(1) == rhs_call->getOperand(1))
6066 return true;
6067 } else if (lhs_call->getCalledFunction()->getName().startswith(
6068 clspv::WorkgroupAccessorFunction()) &&
6069 rhs_call->getCalledFunction()->getName().startswith(
6070 clspv::WorkgroupAccessorFunction())) {
6071 // For workgroup resources, match spec id.
6072 if (lhs_call->getOperand(0) == rhs_call->getOperand(0))
6073 return true;
6074 }
6075 }
6076 }
6077
6078 return false;
6079}
6080
6081bool SPIRVProducerPass::selectFromSameObject(Instruction *inst) {
6082 assert(inst->getType()->isPointerTy());
6083 assert(GetStorageClass(inst->getType()->getPointerAddressSpace()) ==
6084 spv::StorageClassStorageBuffer);
6085 const bool hack_undef = clspv::Option::HackUndef();
6086 if (auto *select = dyn_cast<SelectInst>(inst)) {
6087 auto *true_base = GetBasePointer(select->getTrueValue());
6088 auto *false_base = GetBasePointer(select->getFalseValue());
6089
6090 if (true_base == false_base)
6091 return true;
6092
6093 // If either the true or false operand is a null, then we satisfy the same
6094 // object constraint.
6095 if (auto *true_cst = dyn_cast<Constant>(true_base)) {
6096 if (true_cst->isNullValue() || (hack_undef && isa<UndefValue>(true_base)))
6097 return true;
6098 }
6099
6100 if (auto *false_cst = dyn_cast<Constant>(false_base)) {
6101 if (false_cst->isNullValue() ||
6102 (hack_undef && isa<UndefValue>(false_base)))
6103 return true;
6104 }
6105
6106 if (sameResource(true_base, false_base))
6107 return true;
6108 } else if (auto *phi = dyn_cast<PHINode>(inst)) {
6109 Value *value = nullptr;
6110 bool ok = true;
6111 for (unsigned i = 0; ok && i != phi->getNumIncomingValues(); ++i) {
6112 auto *base = GetBasePointer(phi->getIncomingValue(i));
6113 // Null values satisfy the constraint of selecting of selecting from the
6114 // same object.
6115 if (!value) {
6116 if (auto *cst = dyn_cast<Constant>(base)) {
6117 if (!cst->isNullValue() && !(hack_undef && isa<UndefValue>(base)))
6118 value = base;
6119 } else {
6120 value = base;
6121 }
6122 } else if (base != value) {
6123 if (auto *base_cst = dyn_cast<Constant>(base)) {
6124 if (base_cst->isNullValue() || (hack_undef && isa<UndefValue>(base)))
6125 continue;
6126 }
6127
6128 if (sameResource(value, base))
6129 continue;
6130
6131 // Values don't represent the same base.
6132 ok = false;
6133 }
6134 }
6135
6136 return ok;
6137 }
6138
6139 // Conservatively return false.
6140 return false;
6141}
alan-bakere9308012019-03-15 10:25:13 -04006142
6143bool SPIRVProducerPass::CalledWithCoherentResource(Argument &Arg) {
6144 if (!Arg.getType()->isPointerTy() ||
6145 Arg.getType()->getPointerAddressSpace() != clspv::AddressSpace::Global) {
6146 // Only SSBOs need to be annotated as coherent.
6147 return false;
6148 }
6149
6150 DenseSet<Value *> visited;
6151 std::vector<Value *> stack;
6152 for (auto *U : Arg.getParent()->users()) {
6153 if (auto *call = dyn_cast<CallInst>(U)) {
6154 stack.push_back(call->getOperand(Arg.getArgNo()));
6155 }
6156 }
6157
6158 while (!stack.empty()) {
6159 Value *v = stack.back();
6160 stack.pop_back();
6161
6162 if (!visited.insert(v).second)
6163 continue;
6164
6165 auto *resource_call = dyn_cast<CallInst>(v);
6166 if (resource_call &&
6167 resource_call->getCalledFunction()->getName().startswith(
6168 clspv::ResourceAccessorFunction())) {
6169 // If this is a resource accessor function, check if the coherent operand
6170 // is set.
6171 const auto coherent =
6172 unsigned(dyn_cast<ConstantInt>(resource_call->getArgOperand(5))
6173 ->getZExtValue());
6174 if (coherent == 1)
6175 return true;
6176 } else if (auto *arg = dyn_cast<Argument>(v)) {
6177 // If this is a function argument, trace through its callers.
alan-bakere98f3f92019-04-08 15:06:36 -04006178 for (auto U : arg->getParent()->users()) {
alan-bakere9308012019-03-15 10:25:13 -04006179 if (auto *call = dyn_cast<CallInst>(U)) {
6180 stack.push_back(call->getOperand(arg->getArgNo()));
6181 }
6182 }
6183 } else if (auto *user = dyn_cast<User>(v)) {
6184 // If this is a user, traverse all operands that could lead to resource
6185 // variables.
6186 for (unsigned i = 0; i != user->getNumOperands(); ++i) {
6187 Value *operand = user->getOperand(i);
6188 if (operand->getType()->isPointerTy() &&
6189 operand->getType()->getPointerAddressSpace() ==
6190 clspv::AddressSpace::Global) {
6191 stack.push_back(operand);
6192 }
6193 }
6194 }
6195 }
6196
6197 // No coherent resource variables encountered.
6198 return false;
6199}
alan-baker06cad652019-12-03 17:56:47 -05006200
6201void SPIRVProducerPass::PopulateStructuredCFGMaps(Module &module) {
6202 // First, track loop merges and continues.
6203 DenseSet<BasicBlock *> LoopMergesAndContinues;
6204 for (auto &F : module) {
6205 if (F.isDeclaration())
6206 continue;
6207
6208 DominatorTree &DT = getAnalysis<DominatorTreeWrapperPass>(F).getDomTree();
6209 const LoopInfo &LI = getAnalysis<LoopInfoWrapperPass>(F).getLoopInfo();
6210 std::deque<BasicBlock *> order;
6211 DenseSet<BasicBlock *> visited;
6212 clspv::ComputeStructuredOrder(&*F.begin(), &DT, LI, &order, &visited);
6213
6214 for (auto BB : order) {
6215 auto terminator = BB->getTerminator();
6216 auto branch = dyn_cast<BranchInst>(terminator);
6217 if (LI.isLoopHeader(BB)) {
6218 auto L = LI.getLoopFor(BB);
6219 BasicBlock *ContinueBB = nullptr;
6220 BasicBlock *MergeBB = nullptr;
6221
6222 MergeBB = L->getExitBlock();
6223 if (!MergeBB) {
6224 // StructurizeCFG pass converts CFG into triangle shape and the cfg
6225 // has regions with single entry/exit. As a result, loop should not
6226 // have multiple exits.
6227 llvm_unreachable("Loop has multiple exits???");
6228 }
6229
6230 if (L->isLoopLatch(BB)) {
6231 ContinueBB = BB;
6232 } else {
6233 // From SPIR-V spec 2.11, Continue Target must dominate that back-edge
6234 // block.
6235 BasicBlock *Header = L->getHeader();
6236 BasicBlock *Latch = L->getLoopLatch();
6237 for (auto *loop_block : L->blocks()) {
6238 if (loop_block == Header) {
6239 continue;
6240 }
6241
6242 // Check whether block dominates block with back-edge.
6243 // The loop latch is the single block with a back-edge. If it was
6244 // possible, StructurizeCFG made the loop conform to this
6245 // requirement, otherwise |Latch| is a nullptr.
6246 if (DT.dominates(loop_block, Latch)) {
6247 ContinueBB = loop_block;
6248 }
6249 }
6250
6251 if (!ContinueBB) {
6252 llvm_unreachable("Wrong continue block from loop");
6253 }
6254 }
6255
6256 // Record the continue and merge blocks.
6257 MergeBlocks[BB] = MergeBB;
6258 ContinueBlocks[BB] = ContinueBB;
6259 LoopMergesAndContinues.insert(MergeBB);
6260 LoopMergesAndContinues.insert(ContinueBB);
6261 } else if (branch && branch->isConditional()) {
6262 auto L = LI.getLoopFor(BB);
6263 bool HasBackedge = false;
6264 while (L && !HasBackedge) {
6265 if (L->isLoopLatch(BB)) {
6266 HasBackedge = true;
6267 }
6268 L = L->getParentLoop();
6269 }
6270
6271 if (!HasBackedge) {
6272 // Only need a merge if the branch doesn't include a loop break or
6273 // continue.
6274 auto true_bb = branch->getSuccessor(0);
6275 auto false_bb = branch->getSuccessor(1);
6276 if (!LoopMergesAndContinues.count(true_bb) &&
6277 !LoopMergesAndContinues.count(false_bb)) {
6278 // StructurizeCFG pass already manipulated CFG. Just use false block
6279 // of branch instruction as merge block.
6280 MergeBlocks[BB] = false_bb;
6281 }
6282 }
6283 }
6284 }
6285 }
6286}